mirror of
https://github.com/PostHog/posthog.git
synced 2024-11-28 00:46:45 +01:00
fix(flags): Do the expected for numeric comparisons - FINAL FINAL edition (#18443)
This commit is contained in:
parent
f6ce07f3b4
commit
aa6bac9c84
@ -610,6 +610,42 @@ class TestFiltering(ClickhouseTestMixin, property_to_Q_test_factory(_filter_pers
|
||||
events = _filter_events(filter, self.team)
|
||||
self.assertEqual(events[0]["id"], event1_uuid)
|
||||
|
||||
def test_numerical_person_properties(self):
|
||||
_create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"$a_number": 4})
|
||||
_create_person(team_id=self.team.pk, distinct_ids=["p2"], properties={"$a_number": 5})
|
||||
_create_person(team_id=self.team.pk, distinct_ids=["p3"], properties={"$a_number": 6})
|
||||
|
||||
filter = Filter(
|
||||
data={
|
||||
"properties": [
|
||||
{
|
||||
"type": "person",
|
||||
"key": "$a_number",
|
||||
"value": 4,
|
||||
"operator": "gt",
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
self.assertEqual(len(_filter_persons(filter, self.team)), 2)
|
||||
|
||||
filter = Filter(data={"properties": [{"type": "person", "key": "$a_number", "value": 5}]})
|
||||
self.assertEqual(len(_filter_persons(filter, self.team)), 1)
|
||||
|
||||
filter = Filter(
|
||||
data={
|
||||
"properties": [
|
||||
{
|
||||
"type": "person",
|
||||
"key": "$a_number",
|
||||
"value": 6,
|
||||
"operator": "lt",
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
self.assertEqual(len(_filter_persons(filter, self.team)), 2)
|
||||
|
||||
def test_contains(self):
|
||||
_create_event(team=self.team, distinct_id="test", event="$pageview")
|
||||
event2_uuid = _create_event(
|
||||
|
@ -3,14 +3,14 @@ from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
import time
|
||||
import structlog
|
||||
from typing import Dict, List, Optional, Tuple, Union
|
||||
from typing import Dict, List, Optional, Tuple, Union, cast
|
||||
|
||||
from prometheus_client import Counter
|
||||
from django.conf import settings
|
||||
from django.db import DatabaseError, IntegrityError, OperationalError
|
||||
from django.db.models.expressions import ExpressionWrapper, RawSQL
|
||||
from django.db.models.fields import BooleanField
|
||||
from django.db.models import Q
|
||||
from django.db.models import Q, Func, F, CharField
|
||||
from django.db.models.query import QuerySet
|
||||
from sentry_sdk.api import capture_exception, start_span
|
||||
from posthog.metrics import LABEL_TEAM_ID
|
||||
@ -24,7 +24,7 @@ from posthog.models.property import GroupTypeIndex, GroupTypeName
|
||||
from posthog.models.property.property import Property
|
||||
from posthog.models.cohort import Cohort
|
||||
from posthog.models.utils import execute_with_timeout
|
||||
from posthog.queries.base import match_property, properties_to_Q
|
||||
from posthog.queries.base import match_property, properties_to_Q, sanitize_property_key
|
||||
from posthog.database_healthcheck import (
|
||||
postgres_healthcheck,
|
||||
DATABASE_FOR_FLAG_MATCHING,
|
||||
@ -396,6 +396,11 @@ class FeatureFlagMatcher:
|
||||
annotate_query = True
|
||||
nonlocal person_query
|
||||
|
||||
property_list = Filter(data=condition).property_groups.flat
|
||||
properties_with_math_operators = get_all_properties_with_math_operators(
|
||||
property_list, self.cohorts_cache
|
||||
)
|
||||
|
||||
if len(condition.get("properties", {})) > 0:
|
||||
# Feature Flags don't support OR filtering yet
|
||||
target_properties = self.property_value_overrides
|
||||
@ -404,8 +409,9 @@ class FeatureFlagMatcher:
|
||||
self.cache.group_type_index_to_name[feature_flag.aggregation_group_type_index],
|
||||
{},
|
||||
)
|
||||
|
||||
expr = properties_to_Q(
|
||||
Filter(data=condition).property_groups.flat,
|
||||
property_list,
|
||||
override_property_values=target_properties,
|
||||
cohorts_cache=self.cohorts_cache,
|
||||
using_database=DATABASE_FOR_FLAG_MATCHING,
|
||||
@ -428,13 +434,24 @@ class FeatureFlagMatcher:
|
||||
|
||||
if annotate_query:
|
||||
if feature_flag.aggregation_group_type_index is None:
|
||||
# :TRICKY: Flag matching depends on type of property when doing >, <, >=, <= comparisons.
|
||||
# This requires a generated field to query in Q objects, which sadly don't allow inlining fields,
|
||||
# hence we need to annotate the query here, even though these annotations are used much deeper,
|
||||
# in properties_to_q, in empty_or_null_with_value_q
|
||||
# These need to come in before the expr so they're available to use inside the expr.
|
||||
# Same holds for the group queries below.
|
||||
type_property_annotations = {
|
||||
prop_key: Func(F(prop_field), function="JSONB_TYPEOF", output_field=CharField())
|
||||
for prop_key, prop_field in properties_with_math_operators
|
||||
}
|
||||
person_query = person_query.annotate(
|
||||
**type_property_annotations,
|
||||
**{
|
||||
key: ExpressionWrapper(
|
||||
expr if expr else RawSQL("true", []),
|
||||
output_field=BooleanField(),
|
||||
)
|
||||
}
|
||||
),
|
||||
},
|
||||
)
|
||||
person_fields.append(key)
|
||||
else:
|
||||
@ -445,13 +462,18 @@ class FeatureFlagMatcher:
|
||||
group_query,
|
||||
group_fields,
|
||||
) = group_query_per_group_type_mapping[feature_flag.aggregation_group_type_index]
|
||||
type_property_annotations = {
|
||||
prop_key: Func(F(prop_field), function="JSONB_TYPEOF", output_field=CharField())
|
||||
for prop_key, prop_field in properties_with_math_operators
|
||||
}
|
||||
group_query = group_query.annotate(
|
||||
**type_property_annotations,
|
||||
**{
|
||||
key: ExpressionWrapper(
|
||||
expr if expr else RawSQL("true", []),
|
||||
output_field=BooleanField(),
|
||||
)
|
||||
}
|
||||
},
|
||||
)
|
||||
group_fields.append(key)
|
||||
group_query_per_group_type_mapping[feature_flag.aggregation_group_type_index] = (
|
||||
@ -881,3 +903,35 @@ def parse_exception_for_error_message(err: Exception):
|
||||
reason = "query_wait_timeout"
|
||||
|
||||
return reason
|
||||
|
||||
|
||||
def key_and_field_for_property(property: Property) -> Tuple[str, str]:
|
||||
column = "group_properties" if property.type == "group" else "properties"
|
||||
key = property.key
|
||||
sanitized_key = sanitize_property_key(key)
|
||||
|
||||
return (
|
||||
f"{column}_{sanitized_key}_type",
|
||||
f"{column}__{key}",
|
||||
)
|
||||
|
||||
|
||||
def get_all_properties_with_math_operators(
|
||||
properties: List[Property], cohorts_cache: Dict[int, Cohort]
|
||||
) -> List[Tuple[str, str]]:
|
||||
all_keys_and_fields = []
|
||||
|
||||
for prop in properties:
|
||||
if prop.type == "cohort":
|
||||
cohort_id = int(cast(Union[str, int], prop.value))
|
||||
if cohorts_cache.get(cohort_id) is None:
|
||||
cohorts_cache[cohort_id] = Cohort.objects.using(DATABASE_FOR_FLAG_MATCHING).get(pk=cohort_id)
|
||||
cohort = cohorts_cache[cohort_id]
|
||||
if cohort:
|
||||
all_keys_and_fields.extend(
|
||||
get_all_properties_with_math_operators(cohort.properties.flat, cohorts_cache)
|
||||
)
|
||||
elif prop.operator in ["gt", "lt", "gte", "lte"] and prop.type in ("person", "group"):
|
||||
all_keys_and_fields.append(key_and_field_for_property(prop))
|
||||
|
||||
return all_keys_and_fields
|
||||
|
@ -306,6 +306,36 @@
|
||||
ORDER BY "posthog_persondistinctid"."id" ASC
|
||||
'
|
||||
---
|
||||
# name: TestDjangoPropertiesToQ.test_icontains_with_array_value
|
||||
'
|
||||
SELECT "posthog_person"."uuid"
|
||||
FROM "posthog_person"
|
||||
WHERE (UPPER(("posthog_person"."properties" ->> '$key')::text) LIKE UPPER('%[''red'']%')
|
||||
AND "posthog_person"."properties" ? '$key'
|
||||
AND NOT (("posthog_person"."properties" -> '$key') = 'null')
|
||||
AND "posthog_person"."team_id" = 2)
|
||||
'
|
||||
---
|
||||
# name: TestDjangoPropertiesToQ.test_icontains_with_array_value.1
|
||||
'
|
||||
SELECT "posthog_person"."uuid"
|
||||
FROM "posthog_person"
|
||||
WHERE (UPPER(("posthog_person"."properties" ->> '$key')::text) LIKE UPPER('%red%')
|
||||
AND "posthog_person"."properties" ? '$key'
|
||||
AND NOT (("posthog_person"."properties" -> '$key') = 'null')
|
||||
AND "posthog_person"."team_id" = 2)
|
||||
'
|
||||
---
|
||||
# name: TestDjangoPropertiesToQ.test_icontains_with_array_value.2
|
||||
'
|
||||
SELECT "posthog_person"."uuid"
|
||||
FROM "posthog_person"
|
||||
WHERE (("posthog_person"."properties" -> '$key') > '["2"]'
|
||||
AND "posthog_person"."properties" ? '$key'
|
||||
AND NOT (("posthog_person"."properties" -> '$key') = 'null')
|
||||
AND "posthog_person"."team_id" = 2)
|
||||
'
|
||||
---
|
||||
# name: TestDjangoPropertyGroupToQ.test_property_group_to_q_with_cohorts
|
||||
'
|
||||
SELECT "posthog_cohort"."id",
|
||||
|
@ -2,7 +2,7 @@ import datetime
|
||||
import json
|
||||
from typing import Any, Callable, Dict, List, Optional, cast
|
||||
|
||||
from django.db.models import Q
|
||||
from django.db.models import Q, Func, F, CharField
|
||||
|
||||
from posthog.constants import FILTER_TEST_ACCOUNTS
|
||||
from posthog.models import Cohort, Filter, Person, Team
|
||||
@ -219,42 +219,6 @@ def property_to_Q_test_factory(filter_persons: Callable, person_factory):
|
||||
)
|
||||
self.assertListEqual(filter.property_groups.values, [])
|
||||
|
||||
def test_numerical_person_properties(self):
|
||||
person_factory(team_id=self.team.pk, distinct_ids=["p1"], properties={"$a_number": 4})
|
||||
person_factory(team_id=self.team.pk, distinct_ids=["p2"], properties={"$a_number": 5})
|
||||
person_factory(team_id=self.team.pk, distinct_ids=["p3"], properties={"$a_number": 6})
|
||||
|
||||
filter = Filter(
|
||||
data={
|
||||
"properties": [
|
||||
{
|
||||
"type": "person",
|
||||
"key": "$a_number",
|
||||
"value": 4,
|
||||
"operator": "gt",
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
self.assertEqual(len(filter_persons(filter, self.team)), 2)
|
||||
|
||||
filter = Filter(data={"properties": [{"type": "person", "key": "$a_number", "value": 5}]})
|
||||
self.assertEqual(len(filter_persons(filter, self.team)), 1)
|
||||
|
||||
filter = Filter(
|
||||
data={
|
||||
"properties": [
|
||||
{
|
||||
"type": "person",
|
||||
"key": "$a_number",
|
||||
"value": 6,
|
||||
"operator": "lt",
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
self.assertEqual(len(filter_persons(filter, self.team)), 2)
|
||||
|
||||
def test_contains_persons(self):
|
||||
person_factory(
|
||||
team_id=self.team.pk,
|
||||
@ -819,6 +783,117 @@ class TestDjangoPropertiesToQ(property_to_Q_test_factory(_filter_persons, _creat
|
||||
|
||||
return Filter(data=data)
|
||||
|
||||
def test_numerical_person_properties(self):
|
||||
_create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"$a_number": 4})
|
||||
_create_person(team_id=self.team.pk, distinct_ids=["p2"], properties={"$a_number": 5})
|
||||
_create_person(team_id=self.team.pk, distinct_ids=["p3"], properties={"$a_number": 6})
|
||||
_create_person(team_id=self.team.pk, distinct_ids=["p4"], properties={"$a_number": 14})
|
||||
|
||||
flush_persons_and_events()
|
||||
|
||||
def filter_persons_with_annotation(filter: Filter, team: Team):
|
||||
persons = Person.objects.annotate(
|
||||
**{
|
||||
"properties_anumber_27b11200b8ed4fb_type": Func(
|
||||
F("properties__$a_number"), function="JSONB_TYPEOF", output_field=CharField()
|
||||
)
|
||||
}
|
||||
).filter(properties_to_Q(filter.property_groups.flat))
|
||||
persons = persons.filter(team_id=team.pk)
|
||||
return [str(uuid) for uuid in persons.values_list("uuid", flat=True)]
|
||||
|
||||
filter = Filter(
|
||||
data={
|
||||
"properties": [
|
||||
{
|
||||
"type": "person",
|
||||
"key": "$a_number",
|
||||
"value": "4",
|
||||
"operator": "gt",
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
self.assertEqual(len(filter_persons_with_annotation(filter, self.team)), 3)
|
||||
|
||||
filter = Filter(data={"properties": [{"type": "person", "key": "$a_number", "value": 5}]})
|
||||
self.assertEqual(len(filter_persons_with_annotation(filter, self.team)), 1)
|
||||
|
||||
filter = Filter(
|
||||
data={
|
||||
"properties": [
|
||||
{
|
||||
"type": "person",
|
||||
"key": "$a_number",
|
||||
"value": 6,
|
||||
"operator": "lt",
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
self.assertEqual(len(filter_persons_with_annotation(filter, self.team)), 2)
|
||||
|
||||
@snapshot_postgres_queries
|
||||
def test_icontains_with_array_value(self):
|
||||
_create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"$key": "red-123"})
|
||||
_create_person(team_id=self.team.pk, distinct_ids=["p2"], properties={"$key": "blue-123"})
|
||||
_create_person(team_id=self.team.pk, distinct_ids=["p3"], properties={"$key": 6})
|
||||
|
||||
flush_persons_and_events()
|
||||
|
||||
def filter_persons_with_annotation(filter: Filter, team: Team):
|
||||
persons = Person.objects.annotate(
|
||||
**{
|
||||
"properties_$key_type": Func(
|
||||
F("properties__$key"), function="JSONB_TYPEOF", output_field=CharField()
|
||||
)
|
||||
}
|
||||
).filter(properties_to_Q(filter.property_groups.flat))
|
||||
persons = persons.filter(team_id=team.pk)
|
||||
return [str(uuid) for uuid in persons.values_list("uuid", flat=True)]
|
||||
|
||||
filter = Filter(
|
||||
data={
|
||||
"properties": [
|
||||
{
|
||||
"type": "person",
|
||||
"key": "$key",
|
||||
"value": ["red"],
|
||||
"operator": "icontains",
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
self.assertEqual(len(filter_persons_with_annotation(filter, self.team)), 0)
|
||||
|
||||
filter = Filter(
|
||||
data={
|
||||
"properties": [
|
||||
{
|
||||
"type": "person",
|
||||
"key": "$key",
|
||||
"value": "red",
|
||||
"operator": "icontains",
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
self.assertEqual(len(filter_persons_with_annotation(filter, self.team)), 1)
|
||||
|
||||
filter = Filter(
|
||||
data={
|
||||
"properties": [
|
||||
{
|
||||
"type": "person",
|
||||
"key": "$key",
|
||||
"value": ["2"],
|
||||
"operator": "gt",
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
self.assertEqual(len(filter_persons_with_annotation(filter, self.team)), 0)
|
||||
|
||||
|
||||
def filter_persons_with_property_group(
|
||||
filter: Filter, team: Team, property_overrides: Dict[str, Any] = {}
|
||||
|
@ -1,4 +1,5 @@
|
||||
import datetime
|
||||
import hashlib
|
||||
import re
|
||||
from typing import (
|
||||
Any,
|
||||
@ -12,7 +13,7 @@ from typing import (
|
||||
)
|
||||
|
||||
from dateutil import parser
|
||||
from django.db.models import Exists, OuterRef, Q
|
||||
from django.db.models import Exists, OuterRef, Q, Value
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from posthog.constants import PropertyOperatorType
|
||||
@ -20,7 +21,6 @@ from posthog.models.cohort import Cohort, CohortPeople
|
||||
from posthog.models.filters.filter import Filter
|
||||
from posthog.models.filters.path_filter import PathFilter
|
||||
from posthog.models.property import (
|
||||
CLICKHOUSE_ONLY_PROPERTY_TYPES,
|
||||
Property,
|
||||
PropertyGroup,
|
||||
)
|
||||
@ -29,10 +29,10 @@ from posthog.models.team import Team
|
||||
from posthog.queries.util import convert_to_datetime_aware
|
||||
from posthog.utils import get_compare_period_dates, is_valid_regex
|
||||
|
||||
F = TypeVar("F", Filter, PathFilter)
|
||||
FilterType = TypeVar("FilterType", Filter, PathFilter)
|
||||
|
||||
|
||||
def determine_compared_filter(filter: F) -> F:
|
||||
def determine_compared_filter(filter: FilterType) -> FilterType:
|
||||
if not filter.date_to or not filter.date_from:
|
||||
raise ValidationError("You need date_from and date_to to compare")
|
||||
date_from, date_to = get_compare_period_dates(
|
||||
@ -142,17 +142,34 @@ def match_property(property: Property, override_property_values: Dict[str, Any])
|
||||
except re.error:
|
||||
return False
|
||||
|
||||
if operator == "gt":
|
||||
return type(override_value) == type(value) and override_value > value
|
||||
if operator in ("gt", "gte", "lt", "lte"):
|
||||
# :TRICKY: We adjust comparison based on the override value passed in,
|
||||
# to make sure we handle both numeric and string comparisons appropriately.
|
||||
def compare(lhs, rhs, operator):
|
||||
if operator == "gt":
|
||||
return lhs > rhs
|
||||
elif operator == "gte":
|
||||
return lhs >= rhs
|
||||
elif operator == "lt":
|
||||
return lhs < rhs
|
||||
elif operator == "lte":
|
||||
return lhs <= rhs
|
||||
else:
|
||||
raise ValueError(f"Invalid operator: {operator}")
|
||||
|
||||
if operator == "gte":
|
||||
return type(override_value) == type(value) and override_value >= value
|
||||
parsed_value = None
|
||||
try:
|
||||
parsed_value = float(value) # type: ignore
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if operator == "lt":
|
||||
return type(override_value) == type(value) and override_value < value
|
||||
|
||||
if operator == "lte":
|
||||
return type(override_value) == type(value) and override_value <= value
|
||||
if parsed_value is not None and override_value is not None:
|
||||
if isinstance(override_value, str):
|
||||
return compare(override_value, str(value), operator)
|
||||
else:
|
||||
return compare(override_value, parsed_value, operator)
|
||||
else:
|
||||
return compare(str(override_value), str(value), operator)
|
||||
|
||||
if operator in ["is_date_before", "is_date_after"]:
|
||||
try:
|
||||
@ -207,7 +224,24 @@ def empty_or_null_with_value_q(
|
||||
f"{column}__{key}", value_as_coerced_to_number
|
||||
)
|
||||
else:
|
||||
target_filter = Q(**{f"{column}__{key}__{operator}": value})
|
||||
parsed_value = None
|
||||
if operator in ("gt", "gte", "lt", "lte"):
|
||||
try:
|
||||
# try to parse even if arrays can't be parsed, the catch will handle it
|
||||
parsed_value = float(value) # type: ignore
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if parsed_value is not None:
|
||||
# When we can coerce given value to a number, check whether the value in DB is a number
|
||||
# and do a numeric comparison. Otherwise, do a string comparison.
|
||||
sanitized_key = sanitize_property_key(key)
|
||||
target_filter = Q(
|
||||
Q(**{f"{column}__{key}__{operator}": str(value), f"{column}_{sanitized_key}_type": Value("string")})
|
||||
| Q(**{f"{column}__{key}__{operator}": parsed_value, f"{column}_{sanitized_key}_type": Value("number")})
|
||||
)
|
||||
else:
|
||||
target_filter = Q(**{f"{column}__{key}__{operator}": value})
|
||||
|
||||
query_filter = Q(target_filter & Q(**{f"{column}__has_key": key}) & ~Q(**{f"{column}__{key}": None}))
|
||||
|
||||
@ -229,7 +263,8 @@ def property_to_Q(
|
||||
cohorts_cache: Optional[Dict[int, Cohort]] = None,
|
||||
using_database: str = "default",
|
||||
) -> Q:
|
||||
if property.type in CLICKHOUSE_ONLY_PROPERTY_TYPES:
|
||||
if property.type not in ["person", "group", "cohort", "event"]:
|
||||
# We need to support event type for backwards compatibility, even though it's treated as a person property type
|
||||
raise ValueError(f"property_to_Q: type is not supported: {repr(property.type)}")
|
||||
|
||||
value = property._parse_value(property.value)
|
||||
@ -381,3 +416,15 @@ def is_truthy_or_falsy_property_value(value: Any) -> bool:
|
||||
or value is True
|
||||
or value is False
|
||||
)
|
||||
|
||||
|
||||
def sanitize_property_key(key: Any) -> str:
|
||||
string_key = str(key)
|
||||
# remove all but a-zA-Z0-9 characters from the key
|
||||
substitute = re.sub(r"[^a-zA-Z0-9]", "", string_key)
|
||||
|
||||
# :TRICKY: We also want to prevent clashes between key1_ and key1, or key1 and key2 so we add
|
||||
# a salt based on hash of the key
|
||||
# This is because we don't want to overwrite the value of key1 when we're trying to read key2
|
||||
hash_value = hashlib.sha1(string_key.encode("utf-8")).hexdigest()[:15]
|
||||
return f"{substitute}_{hash_value}"
|
||||
|
@ -4,11 +4,12 @@ from unittest.mock import patch
|
||||
|
||||
from dateutil import parser, tz
|
||||
from django.test import TestCase
|
||||
import pytest
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from posthog.models.filters.path_filter import PathFilter
|
||||
from posthog.models.property.property import Property
|
||||
from posthog.queries.base import match_property
|
||||
from posthog.queries.base import match_property, sanitize_property_key
|
||||
from posthog.test.base import APIBaseTest
|
||||
|
||||
|
||||
@ -154,7 +155,8 @@ class TestMatchProperties(TestCase):
|
||||
|
||||
self.assertFalse(match_property(property_a, {"key": 0}))
|
||||
self.assertFalse(match_property(property_a, {"key": -1}))
|
||||
self.assertFalse(match_property(property_a, {"key": "23"}))
|
||||
# now we handle type mismatches so this should be true
|
||||
self.assertTrue(match_property(property_a, {"key": "23"}))
|
||||
|
||||
property_b = Property(key="key", value=1, operator="lt")
|
||||
self.assertTrue(match_property(property_b, {"key": 0}))
|
||||
@ -171,16 +173,32 @@ class TestMatchProperties(TestCase):
|
||||
|
||||
self.assertFalse(match_property(property_c, {"key": 0}))
|
||||
self.assertFalse(match_property(property_c, {"key": -1}))
|
||||
self.assertFalse(match_property(property_c, {"key": "3"}))
|
||||
# now we handle type mismatches so this should be true
|
||||
self.assertTrue(match_property(property_c, {"key": "3"}))
|
||||
|
||||
property_d = Property(key="key", value="43", operator="lt")
|
||||
self.assertTrue(match_property(property_d, {"key": "41"}))
|
||||
self.assertTrue(match_property(property_d, {"key": "42"}))
|
||||
self.assertTrue(match_property(property_d, {"key": 42}))
|
||||
|
||||
self.assertFalse(match_property(property_d, {"key": "43"}))
|
||||
self.assertFalse(match_property(property_d, {"key": "44"}))
|
||||
self.assertFalse(match_property(property_d, {"key": 44}))
|
||||
|
||||
property_e = Property(key="key", value="30", operator="lt")
|
||||
self.assertTrue(match_property(property_e, {"key": "29"}))
|
||||
|
||||
# depending on the type of override, we adjust type comparison
|
||||
self.assertTrue(match_property(property_e, {"key": "100"}))
|
||||
self.assertFalse(match_property(property_e, {"key": 100}))
|
||||
|
||||
property_f = Property(key="key", value="123aloha", operator="gt")
|
||||
self.assertFalse(match_property(property_f, {"key": "123"}))
|
||||
self.assertFalse(match_property(property_f, {"key": 122}))
|
||||
|
||||
# this turns into a string comparison
|
||||
self.assertTrue(match_property(property_f, {"key": 129}))
|
||||
|
||||
def test_match_property_date_operators(self):
|
||||
property_a = Property(key="key", value="2022-05-01", operator="is_date_before")
|
||||
self.assertTrue(match_property(property_a, {"key": "2022-03-01"}))
|
||||
@ -227,3 +245,66 @@ class TestMatchProperties(TestCase):
|
||||
self.assertTrue(match_property(property_d, {"key": "2022-04-05 12:34:11 CET"}))
|
||||
|
||||
self.assertFalse(match_property(property_d, {"key": "2022-04-05 12:34:13 CET"}))
|
||||
|
||||
def test_none_property_value_with_all_operators(self):
|
||||
property_a = Property(key="key", value="none", operator="is_not")
|
||||
self.assertFalse(match_property(property_a, {"key": None}))
|
||||
self.assertTrue(match_property(property_a, {"key": "non"}))
|
||||
|
||||
property_b = Property(key="key", value=None, operator="is_set")
|
||||
self.assertTrue(match_property(property_b, {"key": None}))
|
||||
|
||||
property_c = Property(key="key", value="no", operator="icontains")
|
||||
self.assertTrue(match_property(property_c, {"key": None}))
|
||||
self.assertFalse(match_property(property_c, {"key": "smh"}))
|
||||
|
||||
property_d = Property(key="key", value="No", operator="regex")
|
||||
self.assertTrue(match_property(property_d, {"key": None}))
|
||||
|
||||
property_d_lower_case = Property(key="key", value="no", operator="regex")
|
||||
self.assertFalse(match_property(property_d_lower_case, {"key": None}))
|
||||
|
||||
property_e = Property(key="key", value=1, operator="gt")
|
||||
self.assertTrue(match_property(property_e, {"key": None}))
|
||||
|
||||
property_f = Property(key="key", value=1, operator="lt")
|
||||
self.assertFalse(match_property(property_f, {"key": None}))
|
||||
|
||||
property_g = Property(key="key", value="xyz", operator="gte")
|
||||
self.assertFalse(match_property(property_g, {"key": None}))
|
||||
|
||||
property_h = Property(key="key", value="Oo", operator="lte")
|
||||
self.assertTrue(match_property(property_h, {"key": None}))
|
||||
|
||||
property_i = Property(key="key", value="2022-05-01", operator="is_date_before")
|
||||
self.assertFalse(match_property(property_i, {"key": None}))
|
||||
|
||||
property_j = Property(key="key", value="2022-05-01", operator="is_date_after")
|
||||
self.assertFalse(match_property(property_j, {"key": None}))
|
||||
|
||||
property_k = Property(key="key", value="2022-05-01", operator="is_date_before")
|
||||
self.assertFalse(match_property(property_k, {"key": "random"}))
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"key,expected",
|
||||
[
|
||||
("test_key", "testkey_00942f4668670f3"),
|
||||
("test-key", "testkey_3acfb2c2b433c0e"),
|
||||
("test-!!key", "testkey_007a0fef83e9d2f"),
|
||||
("test-key-1", "testkey1_1af855c78902ffc"),
|
||||
("test-key-1-2", "testkey12_2f0c347f439af5c"),
|
||||
("test-key-1-2-3-4", "testkey1234_0332a83ad5c75ee"),
|
||||
("only_nums!!!;$£hebfjhvd", "onlynumshebfjhvd_5a1514bfab83040"),
|
||||
(" ", "_b858cb282617fb0"),
|
||||
("", "_da39a3ee5e6b4b0"),
|
||||
("readme.md", "readmemd_275d783e2982285"),
|
||||
("readme≥md", "readmemd_8857015efe59db9"),
|
||||
(None, "None_6eef6648406c333"),
|
||||
(12, "12_7b52009b64fd0a2"),
|
||||
],
|
||||
)
|
||||
def test_sanitize_keys(key, expected):
|
||||
sanitized_key = sanitize_property_key(key)
|
||||
|
||||
assert sanitized_key == expected
|
||||
|
@ -345,6 +345,114 @@
|
||||
AND "posthog_group"."group_type_index" = 2)
|
||||
'
|
||||
---
|
||||
# name: TestFeatureFlagMatcher.test_numeric_operator_with_cohorts_and_nested_cohorts
|
||||
'
|
||||
SELECT "posthog_cohort"."id",
|
||||
"posthog_cohort"."name",
|
||||
"posthog_cohort"."description",
|
||||
"posthog_cohort"."team_id",
|
||||
"posthog_cohort"."deleted",
|
||||
"posthog_cohort"."filters",
|
||||
"posthog_cohort"."version",
|
||||
"posthog_cohort"."pending_version",
|
||||
"posthog_cohort"."count",
|
||||
"posthog_cohort"."created_by_id",
|
||||
"posthog_cohort"."created_at",
|
||||
"posthog_cohort"."is_calculating",
|
||||
"posthog_cohort"."last_calculation",
|
||||
"posthog_cohort"."errors_calculating",
|
||||
"posthog_cohort"."is_static",
|
||||
"posthog_cohort"."groups"
|
||||
FROM "posthog_cohort"
|
||||
WHERE (NOT "posthog_cohort"."deleted"
|
||||
AND "posthog_cohort"."team_id" = 2)
|
||||
'
|
||||
---
|
||||
# name: TestFeatureFlagMatcher.test_numeric_operator_with_cohorts_and_nested_cohorts.1
|
||||
'
|
||||
SELECT (((("posthog_person"."properties" -> 'number') > '"100"'
|
||||
AND JSONB_TYPEOF(("posthog_person"."properties" -> 'number')) = 'string')
|
||||
OR (("posthog_person"."properties" -> 'number') > '100.0'
|
||||
AND JSONB_TYPEOF(("posthog_person"."properties" -> 'number')) = 'number'))
|
||||
AND "posthog_person"."properties" ? 'number'
|
||||
AND NOT (("posthog_person"."properties" -> 'number') = 'null')) AS "flag_X_condition_0",
|
||||
(((("posthog_person"."properties" -> 'version') > '"1.05"'
|
||||
AND JSONB_TYPEOF(("posthog_person"."properties" -> 'version')) = 'string')
|
||||
OR (("posthog_person"."properties" -> 'version') > '1.05'
|
||||
AND JSONB_TYPEOF(("posthog_person"."properties" -> 'version')) = 'number'))
|
||||
AND "posthog_person"."properties" ? 'version'
|
||||
AND NOT (("posthog_person"."properties" -> 'version') = 'null')) AS "flag_X_condition_0",
|
||||
(((("posthog_person"."properties" -> 'number') < '"31"'
|
||||
AND JSONB_TYPEOF(("posthog_person"."properties" -> 'number')) = 'string')
|
||||
OR (("posthog_person"."properties" -> 'number') < '31.0'
|
||||
AND JSONB_TYPEOF(("posthog_person"."properties" -> 'number')) = 'number'))
|
||||
AND "posthog_person"."properties" ? 'number'
|
||||
AND NOT (("posthog_person"."properties" -> 'number') = 'null')
|
||||
AND ((("posthog_person"."properties" -> 'nested_prop') > '"20"'
|
||||
AND JSONB_TYPEOF(("posthog_person"."properties" -> 'nested_prop')) = 'string')
|
||||
OR (("posthog_person"."properties" -> 'nested_prop') > '20.0'
|
||||
AND JSONB_TYPEOF(("posthog_person"."properties" -> 'nested_prop')) = 'number'))
|
||||
AND "posthog_person"."properties" ? 'nested_prop'
|
||||
AND NOT (("posthog_person"."properties" -> 'nested_prop') = 'null')) AS "flag_X_condition_0"
|
||||
FROM "posthog_person"
|
||||
INNER JOIN "posthog_persondistinctid" ON ("posthog_person"."id" = "posthog_persondistinctid"."person_id")
|
||||
WHERE ("posthog_persondistinctid"."distinct_id" = '307'
|
||||
AND "posthog_persondistinctid"."team_id" = 2
|
||||
AND "posthog_person"."team_id" = 2)
|
||||
'
|
||||
---
|
||||
# name: TestFeatureFlagMatcher.test_numeric_operator_with_groups_and_person_flags
|
||||
'
|
||||
SELECT "posthog_grouptypemapping"."id",
|
||||
"posthog_grouptypemapping"."team_id",
|
||||
"posthog_grouptypemapping"."group_type",
|
||||
"posthog_grouptypemapping"."group_type_index",
|
||||
"posthog_grouptypemapping"."name_singular",
|
||||
"posthog_grouptypemapping"."name_plural"
|
||||
FROM "posthog_grouptypemapping"
|
||||
WHERE "posthog_grouptypemapping"."team_id" = 2
|
||||
'
|
||||
---
|
||||
# name: TestFeatureFlagMatcher.test_numeric_operator_with_groups_and_person_flags.1
|
||||
'
|
||||
SELECT (((("posthog_person"."properties" -> 'number') >= '"20"'
|
||||
AND JSONB_TYPEOF(("posthog_person"."properties" -> 'number')) = 'string')
|
||||
OR (("posthog_person"."properties" -> 'number') >= '20.0'
|
||||
AND JSONB_TYPEOF(("posthog_person"."properties" -> 'number')) = 'number'))
|
||||
AND "posthog_person"."properties" ? 'number'
|
||||
AND NOT (("posthog_person"."properties" -> 'number') = 'null')) AS "flag_X_condition_0"
|
||||
FROM "posthog_person"
|
||||
INNER JOIN "posthog_persondistinctid" ON ("posthog_person"."id" = "posthog_persondistinctid"."person_id")
|
||||
WHERE ("posthog_persondistinctid"."distinct_id" = '307'
|
||||
AND "posthog_persondistinctid"."team_id" = 2
|
||||
AND "posthog_person"."team_id" = 2)
|
||||
'
|
||||
---
|
||||
# name: TestFeatureFlagMatcher.test_numeric_operator_with_groups_and_person_flags.2
|
||||
'
|
||||
SELECT (((("posthog_group"."group_properties" -> 'number') > '"100"'
|
||||
AND JSONB_TYPEOF(("posthog_group"."group_properties" -> 'number')) = 'string')
|
||||
OR (("posthog_group"."group_properties" -> 'number') > '100.0'
|
||||
AND JSONB_TYPEOF(("posthog_group"."group_properties" -> 'number')) = 'number'))
|
||||
AND "posthog_group"."group_properties" ? 'number'
|
||||
AND NOT (("posthog_group"."group_properties" -> 'number') = 'null')) AS "flag_X_condition_0"
|
||||
FROM "posthog_group"
|
||||
WHERE ("posthog_group"."team_id" = 2
|
||||
AND "posthog_group"."group_key" = 'foo'
|
||||
AND "posthog_group"."group_type_index" = 0)
|
||||
'
|
||||
---
|
||||
# name: TestFeatureFlagMatcher.test_numeric_operator_with_groups_and_person_flags.3
|
||||
'
|
||||
SELECT (("posthog_group"."group_properties" -> 'number') > '"100b2c"'
|
||||
AND "posthog_group"."group_properties" ? 'number'
|
||||
AND NOT (("posthog_group"."group_properties" -> 'number') = 'null')) AS "flag_X_condition_0"
|
||||
FROM "posthog_group"
|
||||
WHERE ("posthog_group"."team_id" = 2
|
||||
AND "posthog_group"."group_key" = 'foo-project'
|
||||
AND "posthog_group"."group_type_index" = 1)
|
||||
'
|
||||
---
|
||||
# name: TestFeatureFlagMatcher.test_super_condition_matches_string
|
||||
'
|
||||
SELECT ((("posthog_person"."properties" -> 'is_enabled') = 'true'
|
||||
@ -365,6 +473,145 @@
|
||||
AND "posthog_person"."team_id" = 2)
|
||||
'
|
||||
---
|
||||
# name: TestFeatureFlagMatcher.test_with_sql_injection_properties_and_other_aliases
|
||||
'
|
||||
SELECT "posthog_team"."id",
|
||||
"posthog_team"."uuid",
|
||||
"posthog_team"."organization_id",
|
||||
"posthog_team"."api_token",
|
||||
"posthog_team"."app_urls",
|
||||
"posthog_team"."name",
|
||||
"posthog_team"."slack_incoming_webhook",
|
||||
"posthog_team"."created_at",
|
||||
"posthog_team"."updated_at",
|
||||
"posthog_team"."anonymize_ips",
|
||||
"posthog_team"."completed_snippet_onboarding",
|
||||
"posthog_team"."has_completed_onboarding_for",
|
||||
"posthog_team"."ingested_event",
|
||||
"posthog_team"."autocapture_opt_out",
|
||||
"posthog_team"."autocapture_exceptions_opt_in",
|
||||
"posthog_team"."autocapture_exceptions_errors_to_ignore",
|
||||
"posthog_team"."session_recording_opt_in",
|
||||
"posthog_team"."session_recording_sample_rate",
|
||||
"posthog_team"."session_recording_minimum_duration_milliseconds",
|
||||
"posthog_team"."session_recording_linked_flag",
|
||||
"posthog_team"."capture_console_log_opt_in",
|
||||
"posthog_team"."capture_performance_opt_in",
|
||||
"posthog_team"."surveys_opt_in",
|
||||
"posthog_team"."session_recording_version",
|
||||
"posthog_team"."signup_token",
|
||||
"posthog_team"."is_demo",
|
||||
"posthog_team"."access_control",
|
||||
"posthog_team"."week_start_day",
|
||||
"posthog_team"."inject_web_apps",
|
||||
"posthog_team"."test_account_filters",
|
||||
"posthog_team"."test_account_filters_default_checked",
|
||||
"posthog_team"."path_cleaning_filters",
|
||||
"posthog_team"."timezone",
|
||||
"posthog_team"."data_attributes",
|
||||
"posthog_team"."person_display_name_properties",
|
||||
"posthog_team"."live_events_columns",
|
||||
"posthog_team"."recording_domains",
|
||||
"posthog_team"."primary_dashboard_id",
|
||||
"posthog_team"."extra_settings",
|
||||
"posthog_team"."correlation_config",
|
||||
"posthog_team"."session_recording_retention_period_days",
|
||||
"posthog_team"."plugins_opt_in",
|
||||
"posthog_team"."opt_out_capture",
|
||||
"posthog_team"."event_names",
|
||||
"posthog_team"."event_names_with_usage",
|
||||
"posthog_team"."event_properties",
|
||||
"posthog_team"."event_properties_with_usage",
|
||||
"posthog_team"."event_properties_numerical",
|
||||
"posthog_team"."external_data_workspace_id"
|
||||
FROM "posthog_team"
|
||||
WHERE "posthog_team"."id" = 2
|
||||
LIMIT 21
|
||||
'
|
||||
---
|
||||
# name: TestFeatureFlagMatcher.test_with_sql_injection_properties_and_other_aliases.1
|
||||
'
|
||||
SELECT "posthog_featureflag"."id",
|
||||
"posthog_featureflag"."key",
|
||||
"posthog_featureflag"."name",
|
||||
"posthog_featureflag"."filters",
|
||||
"posthog_featureflag"."rollout_percentage",
|
||||
"posthog_featureflag"."team_id",
|
||||
"posthog_featureflag"."created_by_id",
|
||||
"posthog_featureflag"."created_at",
|
||||
"posthog_featureflag"."deleted",
|
||||
"posthog_featureflag"."active",
|
||||
"posthog_featureflag"."rollback_conditions",
|
||||
"posthog_featureflag"."performed_rollback",
|
||||
"posthog_featureflag"."ensure_experience_continuity",
|
||||
"posthog_featureflag"."usage_dashboard_id",
|
||||
"posthog_featureflag"."has_enriched_analytics"
|
||||
FROM "posthog_featureflag"
|
||||
WHERE ("posthog_featureflag"."active"
|
||||
AND NOT "posthog_featureflag"."deleted"
|
||||
AND "posthog_featureflag"."team_id" = 2)
|
||||
'
|
||||
---
|
||||
# name: TestFeatureFlagMatcher.test_with_sql_injection_properties_and_other_aliases.2
|
||||
'
|
||||
SELECT "posthog_cohort"."id",
|
||||
"posthog_cohort"."name",
|
||||
"posthog_cohort"."description",
|
||||
"posthog_cohort"."team_id",
|
||||
"posthog_cohort"."deleted",
|
||||
"posthog_cohort"."filters",
|
||||
"posthog_cohort"."version",
|
||||
"posthog_cohort"."pending_version",
|
||||
"posthog_cohort"."count",
|
||||
"posthog_cohort"."created_by_id",
|
||||
"posthog_cohort"."created_at",
|
||||
"posthog_cohort"."is_calculating",
|
||||
"posthog_cohort"."last_calculation",
|
||||
"posthog_cohort"."errors_calculating",
|
||||
"posthog_cohort"."is_static",
|
||||
"posthog_cohort"."groups"
|
||||
FROM "posthog_cohort"
|
||||
WHERE (NOT "posthog_cohort"."deleted"
|
||||
AND "posthog_cohort"."team_id" = 2)
|
||||
'
|
||||
---
|
||||
# name: TestFeatureFlagMatcher.test_with_sql_injection_properties_and_other_aliases.3
|
||||
'
|
||||
SELECT (((("posthog_person"."properties" -> 'number space') > '"100"'
|
||||
AND JSONB_TYPEOF(("posthog_person"."properties" -> 'number space')) = 'string')
|
||||
OR (("posthog_person"."properties" -> 'number space') > '100.0'
|
||||
AND JSONB_TYPEOF(("posthog_person"."properties" -> 'number space')) = 'number'))
|
||||
AND "posthog_person"."properties" ? 'number space'
|
||||
AND NOT (("posthog_person"."properties" -> 'number space') = 'null')
|
||||
AND ((JSONB_TYPEOF(("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;')) = 'string'
|
||||
AND ("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;') > '"100"')
|
||||
OR (JSONB_TYPEOF(("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;')) = 'number'
|
||||
AND ("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;') > '100.0'))
|
||||
AND "posthog_person"."properties" ? ';''" SELECT 1; DROP TABLE posthog_featureflag;'
|
||||
AND NOT (("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;') = 'null')) AS "flag_X_condition_0",
|
||||
(((JSONB_TYPEOF(("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;')) = 'string'
|
||||
AND ("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;') > '"100"')
|
||||
OR (JSONB_TYPEOF(("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;')) = 'number'
|
||||
AND ("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;') > '100.0'))
|
||||
AND "posthog_person"."properties" ? ';''" SELECT 1; DROP TABLE posthog_featureflag;'
|
||||
AND NOT (("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;') = 'null')) AS "flag_X_condition_1",
|
||||
(((("posthog_person"."properties" -> 'version!!!') > '"1.05"'
|
||||
AND JSONB_TYPEOF(("posthog_person"."properties" -> 'version!!!')) = 'string')
|
||||
OR (("posthog_person"."properties" -> 'version!!!') > '1.05'
|
||||
AND JSONB_TYPEOF(("posthog_person"."properties" -> 'version!!!')) = 'number'))
|
||||
AND "posthog_person"."properties" ? 'version!!!'
|
||||
AND NOT (("posthog_person"."properties" -> 'version!!!') = 'null')) AS "flag_X_condition_2",
|
||||
((("posthog_person"."properties" -> 'nested_prop --random #comment //test') = '"21"'
|
||||
OR ("posthog_person"."properties" -> 'nested_prop --random #comment //test') = '21')
|
||||
AND "posthog_person"."properties" ? 'nested_prop --random #comment //test'
|
||||
AND NOT (("posthog_person"."properties" -> 'nested_prop --random #comment //test') = 'null')) AS "flag_X_condition_3"
|
||||
FROM "posthog_person"
|
||||
INNER JOIN "posthog_persondistinctid" ON ("posthog_person"."id" = "posthog_persondistinctid"."person_id")
|
||||
WHERE ("posthog_persondistinctid"."distinct_id" = '307'
|
||||
AND "posthog_persondistinctid"."team_id" = 2
|
||||
AND "posthog_person"."team_id" = 2)
|
||||
'
|
||||
---
|
||||
# name: TestHashKeyOverridesRaceConditions.test_hash_key_overrides_with_simulated_error_race_conditions_on_person_merging
|
||||
'
|
||||
|
||||
|
@ -460,13 +460,14 @@ class QueryMatchingTest:
|
||||
if replace_all_numbers:
|
||||
query = re.sub(r"(\"?) = \d+", r"\1 = 2", query)
|
||||
query = re.sub(r"(\"?) IN \(\d+(, \d+)*\)", r"\1 IN (1, 2, 3, 4, 5 /* ... */)", query)
|
||||
# feature flag conditions use primary keys as columns in queries, so replace those too
|
||||
query = re.sub(r"flag_\d+_condition", r"flag_X_condition", query)
|
||||
query = re.sub(r"flag_\d+_super_condition", r"flag_X_super_condition", query)
|
||||
else:
|
||||
query = re.sub(r"(team|cohort)_id(\"?) = \d+", r"\1_id\2 = 2", query)
|
||||
query = re.sub(r"\d+ as (team|cohort)_id(\"?)", r"2 as \1_id\2", query)
|
||||
|
||||
# feature flag conditions use primary keys as columns in queries, so replace those always
|
||||
query = re.sub(r"flag_\d+_condition", r"flag_X_condition", query)
|
||||
query = re.sub(r"flag_\d+_super_condition", r"flag_X_super_condition", query)
|
||||
|
||||
# hog ql checks team ids differently
|
||||
query = re.sub(
|
||||
r"equals\(([^.]+\.)?team_id?, \d+\)",
|
||||
|
@ -4139,6 +4139,694 @@ class TestFeatureFlagMatcher(BaseTest, QueryMatchingTest):
|
||||
def create_feature_flag(self, key="beta-feature", **kwargs):
|
||||
return FeatureFlag.objects.create(team=self.team, name="Beta feature", key=key, created_by=self.user, **kwargs)
|
||||
|
||||
def test_numeric_operator(self):
|
||||
Person.objects.create(
|
||||
team=self.team,
|
||||
distinct_ids=["307"],
|
||||
properties={"number": 30, "string_number": "30", "version": "1.24"},
|
||||
)
|
||||
|
||||
feature_flag1 = self.create_feature_flag(
|
||||
key="random1",
|
||||
filters={
|
||||
"groups": [
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "number",
|
||||
"value": "100",
|
||||
"operator": "gt",
|
||||
"type": "person",
|
||||
},
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
feature_flag2 = self.create_feature_flag(
|
||||
key="random2",
|
||||
filters={
|
||||
"groups": [
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "number",
|
||||
"value": "100b2c",
|
||||
"operator": "gt",
|
||||
"type": "person",
|
||||
},
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
feature_flag3 = self.create_feature_flag(
|
||||
key="random3",
|
||||
filters={
|
||||
"groups": [
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "number",
|
||||
"value": "3.1x00b2c",
|
||||
"operator": "gt",
|
||||
"type": "person",
|
||||
},
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
feature_flag4 = self.create_feature_flag(
|
||||
key="random4",
|
||||
filters={
|
||||
"groups": [
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "number",
|
||||
"value": "20",
|
||||
"operator": "gt",
|
||||
"type": "person",
|
||||
},
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
feature_flag5 = self.create_feature_flag(
|
||||
key="random5",
|
||||
filters={
|
||||
"groups": [
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "version",
|
||||
"value": "1.05",
|
||||
"operator": "gt",
|
||||
"type": "person",
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "version",
|
||||
"value": "1.15",
|
||||
"operator": "gt",
|
||||
"type": "person",
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "version",
|
||||
"value": "1.1200",
|
||||
"operator": "gt",
|
||||
"type": "person",
|
||||
},
|
||||
]
|
||||
},
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
feature_flag6 = self.create_feature_flag(
|
||||
key="random6",
|
||||
filters={
|
||||
"groups": [
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "version",
|
||||
"value": "1.206.0",
|
||||
"operator": "lt",
|
||||
"type": "person",
|
||||
},
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
self.match_flag(feature_flag1, "307"),
|
||||
FeatureFlagMatch(False, None, FeatureFlagMatchReason.NO_CONDITION_MATCH, 0),
|
||||
)
|
||||
self.assertEqual(
|
||||
self.match_flag(feature_flag2, "307"),
|
||||
FeatureFlagMatch(True, None, FeatureFlagMatchReason.CONDITION_MATCH, 0),
|
||||
)
|
||||
self.assertEqual(
|
||||
self.match_flag(feature_flag3, "307"),
|
||||
FeatureFlagMatch(True, None, FeatureFlagMatchReason.CONDITION_MATCH, 0),
|
||||
)
|
||||
self.assertEqual(
|
||||
self.match_flag(feature_flag4, "307"),
|
||||
FeatureFlagMatch(True, None, FeatureFlagMatchReason.CONDITION_MATCH, 0),
|
||||
)
|
||||
|
||||
# even though we can parse as a number, only do string comparison
|
||||
self.assertEqual(
|
||||
self.match_flag(feature_flag5, "307"),
|
||||
FeatureFlagMatch(True, None, FeatureFlagMatchReason.CONDITION_MATCH, 0),
|
||||
)
|
||||
self.assertEqual(
|
||||
self.match_flag(feature_flag6, "307"),
|
||||
FeatureFlagMatch(False, None, FeatureFlagMatchReason.NO_CONDITION_MATCH, 0),
|
||||
)
|
||||
|
||||
def test_numeric_operator_with_groups_and_person_flags(self):
|
||||
Person.objects.create(
|
||||
team=self.team,
|
||||
distinct_ids=["307"],
|
||||
properties={"number": 30, "string_number": "30", "version": "1.24"},
|
||||
)
|
||||
GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0)
|
||||
GroupTypeMapping.objects.create(team=self.team, group_type="project", group_type_index=1)
|
||||
|
||||
Group.objects.create(
|
||||
team=self.team,
|
||||
group_type_index=0,
|
||||
group_key="foo",
|
||||
group_properties={"name": "foo.inc", "number": 50, "string_number": "50"},
|
||||
version=1,
|
||||
)
|
||||
Group.objects.create(
|
||||
team=self.team,
|
||||
group_type_index=1,
|
||||
group_key="foo-project",
|
||||
group_properties={"name": "foo-project", "number": 20, "string_number": "20"},
|
||||
version=1,
|
||||
)
|
||||
|
||||
feature_flag1 = self.create_feature_flag(
|
||||
key="random1",
|
||||
filters={
|
||||
"aggregation_group_type_index": 0,
|
||||
"groups": [
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "number",
|
||||
"value": "100",
|
||||
"operator": "gt",
|
||||
"group_type_index": 0,
|
||||
"type": "group",
|
||||
},
|
||||
]
|
||||
}
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
feature_flag2 = self.create_feature_flag(
|
||||
key="random2",
|
||||
filters={
|
||||
"aggregation_group_type_index": 1,
|
||||
"groups": [
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "number",
|
||||
"value": "100b2c",
|
||||
"operator": "gt",
|
||||
"group_type_index": 1,
|
||||
"type": "group",
|
||||
},
|
||||
]
|
||||
}
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
feature_flag3 = self.create_feature_flag(
|
||||
key="random3",
|
||||
filters={
|
||||
"aggregation_group_type_index": 0,
|
||||
"groups": [
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "number",
|
||||
"value": "3.1x00b2c",
|
||||
"operator": "gte",
|
||||
"type": "person",
|
||||
"group_type_index": 0,
|
||||
"type": "group",
|
||||
},
|
||||
]
|
||||
}
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
feature_flag4 = self.create_feature_flag(
|
||||
key="random4",
|
||||
filters={
|
||||
"aggregation_group_type_index": 0,
|
||||
"groups": [
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "number",
|
||||
"value": "20",
|
||||
"operator": "gt",
|
||||
"group_type_index": 0,
|
||||
"type": "group",
|
||||
},
|
||||
]
|
||||
}
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
feature_flag4_person = self.create_feature_flag(
|
||||
key="random4_person",
|
||||
filters={
|
||||
"groups": [
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "number",
|
||||
"value": "20",
|
||||
"operator": "gte",
|
||||
"type": "person",
|
||||
},
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
feature_flag5 = self.create_feature_flag(
|
||||
key="random5",
|
||||
filters={
|
||||
"groups": [
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "version",
|
||||
"value": "1.05",
|
||||
"operator": "gt",
|
||||
"type": "person",
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "version",
|
||||
"value": "1.15",
|
||||
"operator": "gt",
|
||||
"type": "person",
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "version",
|
||||
"value": "1.1200",
|
||||
"operator": "gte",
|
||||
"type": "person",
|
||||
},
|
||||
]
|
||||
},
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
feature_flag6 = self.create_feature_flag(
|
||||
key="random6",
|
||||
filters={
|
||||
"aggregation_group_type_index": 0,
|
||||
"groups": [
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "version",
|
||||
"value": "1.206.0",
|
||||
"operator": "lt",
|
||||
"group_type_index": 0,
|
||||
"type": "group",
|
||||
},
|
||||
]
|
||||
}
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
self.match_flag(feature_flag1, "307", groups={"organization": "foo", "project": "foo-project"}),
|
||||
FeatureFlagMatch(False, None, FeatureFlagMatchReason.NO_CONDITION_MATCH, 0),
|
||||
)
|
||||
self.assertEqual(
|
||||
self.match_flag(feature_flag2, "307", groups={"organization": "foo", "project": "foo-project"}),
|
||||
FeatureFlagMatch(True, None, FeatureFlagMatchReason.CONDITION_MATCH, 0),
|
||||
)
|
||||
self.assertEqual(
|
||||
self.match_flag(feature_flag3, "307", groups={"organization": "foo", "project": "foo-project"}),
|
||||
FeatureFlagMatch(True, None, FeatureFlagMatchReason.CONDITION_MATCH, 0),
|
||||
)
|
||||
self.assertEqual(
|
||||
self.match_flag(feature_flag4, "307", groups={"organization": "foo", "project": "foo-project"}),
|
||||
FeatureFlagMatch(True, None, FeatureFlagMatchReason.CONDITION_MATCH, 0),
|
||||
)
|
||||
|
||||
# even though we can parse as a number, only do string comparison
|
||||
self.assertEqual(
|
||||
self.match_flag(feature_flag5, "307", groups={"organization": "foo", "project": "foo-project"}),
|
||||
FeatureFlagMatch(True, None, FeatureFlagMatchReason.CONDITION_MATCH, 0),
|
||||
)
|
||||
self.assertEqual(
|
||||
self.match_flag(feature_flag6, "307", groups={"organization": "foo", "project": "foo-project"}),
|
||||
FeatureFlagMatch(False, None, FeatureFlagMatchReason.NO_CONDITION_MATCH, 0),
|
||||
)
|
||||
|
||||
# Make sure clashes on property name doesn't affect computation
|
||||
with snapshot_postgres_queries_context(self, replace_all_numbers=False):
|
||||
self.assertEqual(
|
||||
FeatureFlagMatcher(
|
||||
[feature_flag1, feature_flag2, feature_flag4_person],
|
||||
"307",
|
||||
groups={"organization": "foo", "project": "foo-project"},
|
||||
).get_matches()[1],
|
||||
{
|
||||
"random1": {
|
||||
"condition_index": 0,
|
||||
"reason": FeatureFlagMatchReason.NO_CONDITION_MATCH,
|
||||
},
|
||||
"random2": {
|
||||
"condition_index": 0,
|
||||
"reason": FeatureFlagMatchReason.CONDITION_MATCH,
|
||||
},
|
||||
"random4_person": {
|
||||
"condition_index": 0,
|
||||
"reason": FeatureFlagMatchReason.CONDITION_MATCH,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
# handle overrides in group properties
|
||||
self.assertEqual(
|
||||
self.match_flag(
|
||||
feature_flag1,
|
||||
"307",
|
||||
groups={"organization": "foo", "project": "foo-project"},
|
||||
group_property_value_overrides={"organization": {"number": 200}, "project": {"number": 1}},
|
||||
),
|
||||
FeatureFlagMatch(True, None, FeatureFlagMatchReason.CONDITION_MATCH, 0),
|
||||
)
|
||||
|
||||
# string '30' > string '100' (lexicographically)
|
||||
self.assertEqual(
|
||||
self.match_flag(
|
||||
feature_flag1,
|
||||
"307",
|
||||
groups={"organization": "foo", "project": "foo-project"},
|
||||
group_property_value_overrides={"organization": {"number": "30"}, "project": {"number": 1}},
|
||||
),
|
||||
FeatureFlagMatch(True, None, FeatureFlagMatchReason.CONDITION_MATCH, 0),
|
||||
)
|
||||
self.assertEqual(
|
||||
self.match_flag(
|
||||
feature_flag1,
|
||||
"307",
|
||||
groups={"organization": "foo", "project": "foo-project"},
|
||||
group_property_value_overrides={"organization": {"number": "01323"}, "project": {"number": 1}},
|
||||
),
|
||||
FeatureFlagMatch(False, None, FeatureFlagMatchReason.NO_CONDITION_MATCH, 0),
|
||||
)
|
||||
self.assertEqual(
|
||||
self.match_flag(
|
||||
feature_flag1,
|
||||
"307",
|
||||
groups={"organization": "foo", "project": "foo-project"},
|
||||
group_property_value_overrides={"organization": {"number": 0}, "project": {"number": 1}},
|
||||
),
|
||||
FeatureFlagMatch(False, None, FeatureFlagMatchReason.NO_CONDITION_MATCH, 0),
|
||||
)
|
||||
self.assertEqual(
|
||||
self.match_flag(
|
||||
feature_flag2,
|
||||
"307",
|
||||
groups={"organization": "foo", "project": "foo-project"},
|
||||
group_property_value_overrides={"organization": {"number": "0"}, "project": {"number": 19.999999}},
|
||||
),
|
||||
FeatureFlagMatch(True, None, FeatureFlagMatchReason.CONDITION_MATCH, 0),
|
||||
)
|
||||
|
||||
def test_numeric_operator_with_cohorts_and_nested_cohorts(self):
|
||||
Person.objects.create(
|
||||
team=self.team,
|
||||
distinct_ids=["307"],
|
||||
properties={"number": 30, "string_number": "30", "version": "1.24", "nested_prop": 21},
|
||||
)
|
||||
cohort1 = Cohort.objects.create(
|
||||
team=self.team,
|
||||
groups=[
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "number",
|
||||
"value": "100",
|
||||
"type": "person",
|
||||
"operator": "gt",
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
)
|
||||
feature_flag1 = self.create_feature_flag(
|
||||
key="random1",
|
||||
filters={
|
||||
"groups": [
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "id",
|
||||
"value": cohort1.pk,
|
||||
"type": "cohort",
|
||||
},
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
self.match_flag(feature_flag1, "307"),
|
||||
FeatureFlagMatch(False, None, FeatureFlagMatchReason.NO_CONDITION_MATCH, 0),
|
||||
)
|
||||
|
||||
cohort2 = Cohort.objects.create(
|
||||
team=self.team,
|
||||
groups=[
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "version",
|
||||
"value": "1.05",
|
||||
"operator": "gt",
|
||||
"type": "person",
|
||||
},
|
||||
]
|
||||
}
|
||||
],
|
||||
)
|
||||
feature_flag2 = self.create_feature_flag(
|
||||
key="random2",
|
||||
filters={
|
||||
"groups": [
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "id",
|
||||
"value": cohort2.pk,
|
||||
"type": "cohort",
|
||||
},
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
self.match_flag(feature_flag2, "307"),
|
||||
FeatureFlagMatch(True, None, FeatureFlagMatchReason.CONDITION_MATCH, 0),
|
||||
)
|
||||
|
||||
cohort_nest = Cohort.objects.create(
|
||||
team=self.team,
|
||||
groups=[
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "nested_prop",
|
||||
"value": "20",
|
||||
"operator": "gt",
|
||||
"type": "person",
|
||||
},
|
||||
]
|
||||
}
|
||||
],
|
||||
)
|
||||
|
||||
cohort3 = Cohort.objects.create(
|
||||
team=self.team,
|
||||
groups=[
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "number",
|
||||
"value": "31",
|
||||
"operator": "lt",
|
||||
"type": "person",
|
||||
},
|
||||
{
|
||||
"key": "id",
|
||||
"value": str(cohort_nest.pk),
|
||||
"type": "cohort",
|
||||
},
|
||||
]
|
||||
}
|
||||
],
|
||||
)
|
||||
feature_flag3 = self.create_feature_flag(
|
||||
key="random3",
|
||||
filters={
|
||||
"groups": [
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "id",
|
||||
"value": str(cohort3.pk),
|
||||
"type": "cohort",
|
||||
},
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
self.match_flag(feature_flag3, "307"),
|
||||
FeatureFlagMatch(True, None, FeatureFlagMatchReason.CONDITION_MATCH, 0),
|
||||
)
|
||||
|
||||
# Make sure clashes on property name doesn't affect computation
|
||||
with snapshot_postgres_queries_context(self, replace_all_numbers=False):
|
||||
self.assertEqual(
|
||||
FeatureFlagMatcher(
|
||||
[feature_flag1, feature_flag2, feature_flag3],
|
||||
"307",
|
||||
).get_matches()[1],
|
||||
{
|
||||
"random1": {
|
||||
"condition_index": 0,
|
||||
"reason": FeatureFlagMatchReason.NO_CONDITION_MATCH,
|
||||
},
|
||||
"random2": {
|
||||
"condition_index": 0,
|
||||
"reason": FeatureFlagMatchReason.CONDITION_MATCH,
|
||||
},
|
||||
"random3": {
|
||||
"condition_index": 0,
|
||||
"reason": FeatureFlagMatchReason.CONDITION_MATCH,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
@snapshot_postgres_queries
|
||||
def test_with_sql_injection_properties_and_other_aliases(self):
|
||||
Person.objects.create(
|
||||
team=self.team,
|
||||
distinct_ids=["307"],
|
||||
properties={
|
||||
"number space": 30,
|
||||
";'\" SELECT 1; DROP TABLE posthog_featureflag;": "30",
|
||||
"version!!!": "1.24",
|
||||
"nested_prop --random #comment //test": 21,
|
||||
},
|
||||
)
|
||||
cohort1 = Cohort.objects.create(
|
||||
team=self.team,
|
||||
groups=[
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "number space",
|
||||
"value": "100",
|
||||
"type": "person",
|
||||
"operator": "gt",
|
||||
},
|
||||
{
|
||||
"key": ";'\" SELECT 1; DROP TABLE posthog_featureflag;",
|
||||
"value": "100",
|
||||
"type": "person",
|
||||
"operator": "gt",
|
||||
},
|
||||
]
|
||||
}
|
||||
],
|
||||
)
|
||||
feature_flag1 = self.create_feature_flag(
|
||||
key="random1",
|
||||
filters={
|
||||
"groups": [
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "id",
|
||||
"value": cohort1.pk,
|
||||
"type": "cohort",
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": ";'\" SELECT 1; DROP TABLE posthog_featureflag;",
|
||||
"value": "100",
|
||||
"type": "person",
|
||||
"operator": "gt",
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "version!!!",
|
||||
"value": "1.05",
|
||||
"operator": "gt",
|
||||
"type": "person",
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
"properties": [
|
||||
{
|
||||
"key": "nested_prop --random #comment //test",
|
||||
"value": "21",
|
||||
"type": "person",
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
self.match_flag(feature_flag1, "307"),
|
||||
FeatureFlagMatch(True, None, FeatureFlagMatchReason.CONDITION_MATCH, 1),
|
||||
)
|
||||
|
||||
|
||||
class TestFeatureFlagHashKeyOverrides(BaseTest, QueryMatchingTest):
|
||||
person: Person
|
||||
|
Loading…
Reference in New Issue
Block a user