2010-04-15 14:41:08 +02:00
|
|
|
import datetime
|
2015-01-28 13:35:27 +01:00
|
|
|
import pickle
|
2010-03-27 16:54:31 +01:00
|
|
|
|
2020-04-03 22:13:02 +02:00
|
|
|
import django
|
2015-10-07 18:46:07 +02:00
|
|
|
from django.db import models
|
2010-03-27 16:54:31 +01:00
|
|
|
from django.test import TestCase
|
|
|
|
|
2021-12-04 15:55:03 +01:00
|
|
|
from .models import (
|
|
|
|
BinaryFieldModel,
|
|
|
|
Container,
|
|
|
|
Event,
|
|
|
|
Group,
|
|
|
|
Happening,
|
|
|
|
M2MModel,
|
|
|
|
MyEvent,
|
|
|
|
)
|
2010-03-27 16:54:31 +01:00
|
|
|
|
|
|
|
|
|
|
|
class PickleabilityTestCase(TestCase):
|
2018-11-24 02:59:38 +01:00
|
|
|
@classmethod
|
|
|
|
def setUpTestData(cls):
|
2020-09-10 14:34:04 +02:00
|
|
|
cls.happening = (
|
|
|
|
Happening.objects.create()
|
|
|
|
) # make sure the defaults are working (#20158)
|
2013-03-29 13:32:21 +01:00
|
|
|
|
2010-03-27 16:54:31 +01:00
|
|
|
def assert_pickles(self, qs):
|
|
|
|
self.assertEqual(list(pickle.loads(pickle.dumps(qs))), list(qs))
|
|
|
|
|
2021-12-03 11:56:22 +01:00
|
|
|
def test_binaryfield(self):
|
2021-12-04 15:55:03 +01:00
|
|
|
BinaryFieldModel.objects.create(data=b"binary data")
|
|
|
|
self.assert_pickles(BinaryFieldModel.objects.all())
|
2021-12-03 11:56:22 +01:00
|
|
|
|
2010-03-27 16:54:31 +01:00
|
|
|
def test_related_field(self):
|
|
|
|
g = Group.objects.create(name="Ponies Who Own Maybachs")
|
|
|
|
self.assert_pickles(Event.objects.filter(group=g.id))
|
2010-04-15 14:41:08 +02:00
|
|
|
|
|
|
|
def test_datetime_callable_default_all(self):
|
|
|
|
self.assert_pickles(Happening.objects.all())
|
|
|
|
|
|
|
|
def test_datetime_callable_default_filter(self):
|
|
|
|
self.assert_pickles(Happening.objects.filter(when=datetime.datetime.now()))
|
|
|
|
|
2015-07-16 03:18:07 +02:00
|
|
|
def test_string_as_default(self):
|
2010-04-15 14:41:08 +02:00
|
|
|
self.assert_pickles(Happening.objects.filter(name="test"))
|
|
|
|
|
2010-04-21 18:34:33 +02:00
|
|
|
def test_standalone_method_as_default(self):
|
|
|
|
self.assert_pickles(Happening.objects.filter(number1=1))
|
|
|
|
|
|
|
|
def test_staticmethod_as_default(self):
|
|
|
|
self.assert_pickles(Happening.objects.filter(number2=1))
|
|
|
|
|
2015-02-23 08:31:58 +01:00
|
|
|
def test_filter_reverse_fk(self):
|
|
|
|
self.assert_pickles(Group.objects.filter(event=1))
|
|
|
|
|
2012-06-22 14:28:15 +02:00
|
|
|
def test_doesnotexist_exception(self):
|
|
|
|
# Ticket #17776
|
|
|
|
original = Event.DoesNotExist("Doesn't exist")
|
|
|
|
unpickled = pickle.loads(pickle.dumps(original))
|
|
|
|
|
|
|
|
# Exceptions are not equal to equivalent instances of themselves, so
|
|
|
|
# can't just use assertEqual(original, unpickled)
|
|
|
|
self.assertEqual(original.__class__, unpickled.__class__)
|
|
|
|
self.assertEqual(original.args, unpickled.args)
|
2013-05-21 11:57:24 +02:00
|
|
|
|
2017-10-13 15:16:09 +02:00
|
|
|
def test_doesnotexist_class(self):
|
|
|
|
klass = Event.DoesNotExist
|
|
|
|
self.assertIs(pickle.loads(pickle.dumps(klass)), klass)
|
|
|
|
|
|
|
|
def test_multipleobjectsreturned_class(self):
|
|
|
|
klass = Event.MultipleObjectsReturned
|
|
|
|
self.assertIs(pickle.loads(pickle.dumps(klass)), klass)
|
|
|
|
|
2017-10-19 03:43:53 +02:00
|
|
|
def test_forward_relatedobjectdoesnotexist_class(self):
|
|
|
|
# ForwardManyToOneDescriptor
|
|
|
|
klass = Event.group.RelatedObjectDoesNotExist
|
|
|
|
self.assertIs(pickle.loads(pickle.dumps(klass)), klass)
|
|
|
|
# ForwardOneToOneDescriptor
|
|
|
|
klass = Happening.event.RelatedObjectDoesNotExist
|
|
|
|
self.assertIs(pickle.loads(pickle.dumps(klass)), klass)
|
|
|
|
|
|
|
|
def test_reverse_one_to_one_relatedobjectdoesnotexist_class(self):
|
|
|
|
klass = Event.happening.RelatedObjectDoesNotExist
|
|
|
|
self.assertIs(pickle.loads(pickle.dumps(klass)), klass)
|
|
|
|
|
2014-01-22 10:53:41 +01:00
|
|
|
def test_manager_pickle(self):
|
|
|
|
pickle.loads(pickle.dumps(Happening.objects))
|
|
|
|
|
2013-05-21 11:57:24 +02:00
|
|
|
def test_model_pickle(self):
|
|
|
|
"""
|
2016-10-27 09:53:39 +02:00
|
|
|
A model not defined on module level is picklable.
|
2013-05-21 11:57:24 +02:00
|
|
|
"""
|
|
|
|
original = Container.SomeModel(pk=1)
|
|
|
|
dumped = pickle.dumps(original)
|
|
|
|
reloaded = pickle.loads(dumped)
|
|
|
|
self.assertEqual(original, reloaded)
|
|
|
|
# Also, deferred dynamic model works
|
|
|
|
Container.SomeModel.objects.create(somefield=1)
|
|
|
|
original = Container.SomeModel.objects.defer("somefield")[0]
|
|
|
|
dumped = pickle.dumps(original)
|
|
|
|
reloaded = pickle.loads(dumped)
|
|
|
|
self.assertEqual(original, reloaded)
|
|
|
|
self.assertEqual(original.somefield, reloaded.somefield)
|
|
|
|
|
|
|
|
def test_model_pickle_m2m(self):
|
|
|
|
"""
|
|
|
|
Test intentionally the automatically created through model.
|
|
|
|
"""
|
|
|
|
m1 = M2MModel.objects.create()
|
|
|
|
g1 = Group.objects.create(name="foof")
|
|
|
|
m1.groups.add(g1)
|
2015-02-26 15:19:17 +01:00
|
|
|
m2m_through = M2MModel._meta.get_field("groups").remote_field.through
|
2013-05-21 11:57:24 +02:00
|
|
|
original = m2m_through.objects.get()
|
|
|
|
dumped = pickle.dumps(original)
|
|
|
|
reloaded = pickle.loads(dumped)
|
|
|
|
self.assertEqual(original, reloaded)
|
|
|
|
|
|
|
|
def test_model_pickle_dynamic(self):
|
|
|
|
class Meta:
|
|
|
|
proxy = True
|
2022-02-03 20:24:19 +01:00
|
|
|
|
2017-12-28 21:07:29 +01:00
|
|
|
dynclass = type(
|
|
|
|
"DynamicEventSubclass",
|
|
|
|
(Event,),
|
|
|
|
{"Meta": Meta, "__module__": Event.__module__},
|
|
|
|
)
|
2013-05-21 11:57:24 +02:00
|
|
|
original = dynclass(pk=1)
|
|
|
|
dumped = pickle.dumps(original)
|
|
|
|
reloaded = pickle.loads(dumped)
|
|
|
|
self.assertEqual(original, reloaded)
|
|
|
|
self.assertIs(reloaded.__class__, dynclass)
|
2013-07-26 10:59:40 +02:00
|
|
|
|
|
|
|
def test_specialized_queryset(self):
|
|
|
|
self.assert_pickles(Happening.objects.values("name"))
|
|
|
|
self.assert_pickles(Happening.objects.values("name").dates("when", "year"))
|
2015-01-30 08:26:13 +01:00
|
|
|
# With related field (#14515)
|
2014-12-24 14:58:32 +01:00
|
|
|
self.assert_pickles(
|
|
|
|
Event.objects.select_related("group")
|
|
|
|
.order_by("title")
|
|
|
|
.values_list("title", "group__name")
|
|
|
|
)
|
|
|
|
|
2013-09-14 09:33:12 +02:00
|
|
|
def test_pickle_prefetch_related_idempotence(self):
|
|
|
|
g = Group.objects.create(name="foo")
|
|
|
|
groups = Group.objects.prefetch_related("event_set")
|
|
|
|
|
|
|
|
# First pickling
|
|
|
|
groups = pickle.loads(pickle.dumps(groups))
|
2016-09-10 11:36:27 +02:00
|
|
|
self.assertSequenceEqual(groups, [g])
|
2013-09-14 09:33:12 +02:00
|
|
|
|
|
|
|
# Second pickling
|
|
|
|
groups = pickle.loads(pickle.dumps(groups))
|
2016-09-10 11:36:27 +02:00
|
|
|
self.assertSequenceEqual(groups, [g])
|
2014-06-06 13:10:20 +02:00
|
|
|
|
2016-11-23 15:59:43 +01:00
|
|
|
def test_pickle_prefetch_queryset_usable_outside_of_prefetch(self):
|
|
|
|
# Prefetch shouldn't affect the fetch-on-pickle behavior of the
|
|
|
|
# queryset passed to it.
|
|
|
|
Group.objects.create(name="foo")
|
|
|
|
events = Event.objects.order_by("id")
|
|
|
|
Group.objects.prefetch_related(models.Prefetch("event_set", queryset=events))
|
|
|
|
with self.assertNumQueries(1):
|
|
|
|
events2 = pickle.loads(pickle.dumps(events))
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
list(events2)
|
|
|
|
|
|
|
|
def test_pickle_prefetch_queryset_still_usable(self):
|
|
|
|
g = Group.objects.create(name="foo")
|
|
|
|
groups = Group.objects.prefetch_related(
|
|
|
|
models.Prefetch("event_set", queryset=Event.objects.order_by("id"))
|
|
|
|
)
|
|
|
|
groups2 = pickle.loads(pickle.dumps(groups))
|
|
|
|
self.assertSequenceEqual(groups2.filter(id__gte=0), [g])
|
|
|
|
|
2016-11-16 15:49:20 +01:00
|
|
|
def test_pickle_prefetch_queryset_not_evaluated(self):
|
|
|
|
Group.objects.create(name="foo")
|
|
|
|
groups = Group.objects.prefetch_related(
|
|
|
|
models.Prefetch("event_set", queryset=Event.objects.order_by("id"))
|
|
|
|
)
|
|
|
|
list(groups) # evaluate QuerySet
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
pickle.loads(pickle.dumps(groups))
|
|
|
|
|
2015-05-29 13:45:36 +02:00
|
|
|
def test_pickle_prefetch_related_with_m2m_and_objects_deletion(self):
|
|
|
|
"""
|
|
|
|
#24831 -- Cached properties on ManyToOneRel created in QuerySet.delete()
|
|
|
|
caused subsequent QuerySet pickling to fail.
|
|
|
|
"""
|
|
|
|
g = Group.objects.create(name="foo")
|
|
|
|
m2m = M2MModel.objects.create()
|
|
|
|
m2m.groups.add(g)
|
|
|
|
Group.objects.all().delete()
|
|
|
|
|
|
|
|
m2ms = M2MModel.objects.prefetch_related("groups")
|
|
|
|
m2ms = pickle.loads(pickle.dumps(m2ms))
|
2016-09-10 11:36:27 +02:00
|
|
|
self.assertSequenceEqual(m2ms, [m2m])
|
2015-05-29 13:45:36 +02:00
|
|
|
|
2021-03-17 11:44:09 +01:00
|
|
|
def test_pickle_boolean_expression_in_Q__queryset(self):
|
|
|
|
group = Group.objects.create(name="group")
|
|
|
|
Event.objects.create(title="event", group=group)
|
|
|
|
groups = Group.objects.filter(
|
|
|
|
models.Q(
|
|
|
|
models.Exists(
|
|
|
|
Event.objects.filter(group_id=models.OuterRef("id")),
|
2022-02-03 20:24:19 +01:00
|
|
|
)
|
2021-03-17 11:44:09 +01:00
|
|
|
),
|
|
|
|
)
|
|
|
|
groups2 = pickle.loads(pickle.dumps(groups))
|
|
|
|
self.assertSequenceEqual(groups2, [group])
|
|
|
|
|
2019-08-27 12:58:11 +02:00
|
|
|
def test_pickle_exists_queryset_still_usable(self):
|
|
|
|
group = Group.objects.create(name="group")
|
|
|
|
Event.objects.create(title="event", group=group)
|
|
|
|
groups = Group.objects.annotate(
|
|
|
|
has_event=models.Exists(
|
|
|
|
Event.objects.filter(group_id=models.OuterRef("id")),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
groups2 = pickle.loads(pickle.dumps(groups))
|
|
|
|
self.assertSequenceEqual(groups2.filter(has_event=True), [group])
|
|
|
|
|
|
|
|
def test_pickle_exists_queryset_not_evaluated(self):
|
|
|
|
group = Group.objects.create(name="group")
|
|
|
|
Event.objects.create(title="event", group=group)
|
|
|
|
groups = Group.objects.annotate(
|
|
|
|
has_event=models.Exists(
|
|
|
|
Event.objects.filter(group_id=models.OuterRef("id")),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
list(groups) # evaluate QuerySet.
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
self.assert_pickles(groups)
|
|
|
|
|
2020-05-19 21:05:52 +02:00
|
|
|
def test_pickle_exists_kwargs_queryset_not_evaluated(self):
|
|
|
|
group = Group.objects.create(name="group")
|
|
|
|
Event.objects.create(title="event", group=group)
|
|
|
|
groups = Group.objects.annotate(
|
|
|
|
has_event=models.Exists(
|
|
|
|
queryset=Event.objects.filter(group_id=models.OuterRef("id")),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
list(groups) # evaluate QuerySet.
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
self.assert_pickles(groups)
|
|
|
|
|
2019-08-27 12:58:11 +02:00
|
|
|
def test_pickle_subquery_queryset_not_evaluated(self):
|
|
|
|
group = Group.objects.create(name="group")
|
|
|
|
Event.objects.create(title="event", group=group)
|
|
|
|
groups = Group.objects.annotate(
|
|
|
|
event_title=models.Subquery(
|
|
|
|
Event.objects.filter(group_id=models.OuterRef("id")).values("title"),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
list(groups) # evaluate QuerySet.
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
self.assert_pickles(groups)
|
|
|
|
|
2020-10-03 20:30:53 +02:00
|
|
|
def test_pickle_filteredrelation(self):
|
|
|
|
group = Group.objects.create(name="group")
|
|
|
|
event_1 = Event.objects.create(title="Big event", group=group)
|
|
|
|
event_2 = Event.objects.create(title="Small event", group=group)
|
|
|
|
Happening.objects.bulk_create(
|
|
|
|
[
|
|
|
|
Happening(event=event_1, number1=5),
|
|
|
|
Happening(event=event_2, number1=3),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
groups = Group.objects.annotate(
|
|
|
|
big_events=models.FilteredRelation(
|
|
|
|
"event",
|
|
|
|
condition=models.Q(event__title__startswith="Big"),
|
|
|
|
),
|
|
|
|
).annotate(sum_number=models.Sum("big_events__happening__number1"))
|
|
|
|
groups_query = pickle.loads(pickle.dumps(groups.query))
|
|
|
|
groups = Group.objects.all()
|
|
|
|
groups.query = groups_query
|
|
|
|
self.assertEqual(groups.get().sum_number, 5)
|
|
|
|
|
|
|
|
def test_pickle_filteredrelation_m2m(self):
|
|
|
|
group = Group.objects.create(name="group")
|
2020-10-15 08:17:42 +02:00
|
|
|
m2mmodel = M2MModel.objects.create(added=datetime.date(2020, 1, 1))
|
2020-10-03 20:30:53 +02:00
|
|
|
m2mmodel.groups.add(group)
|
|
|
|
groups = Group.objects.annotate(
|
|
|
|
first_m2mmodels=models.FilteredRelation(
|
|
|
|
"m2mmodel",
|
2020-10-15 08:17:42 +02:00
|
|
|
condition=models.Q(m2mmodel__added__year=2020),
|
2020-10-03 20:30:53 +02:00
|
|
|
),
|
|
|
|
).annotate(count_groups=models.Count("first_m2mmodels__groups"))
|
|
|
|
groups_query = pickle.loads(pickle.dumps(groups.query))
|
|
|
|
groups = Group.objects.all()
|
|
|
|
groups.query = groups_query
|
|
|
|
self.assertEqual(groups.get().count_groups, 1)
|
|
|
|
|
2015-10-07 18:46:07 +02:00
|
|
|
def test_annotation_with_callable_default(self):
|
|
|
|
# Happening.when has a callable default of datetime.datetime.now.
|
|
|
|
qs = Happening.objects.annotate(latest_time=models.Max("when"))
|
|
|
|
self.assert_pickles(qs)
|
|
|
|
|
2020-09-10 14:34:04 +02:00
|
|
|
def test_annotation_values(self):
|
|
|
|
qs = Happening.objects.values("name").annotate(latest_time=models.Max("when"))
|
|
|
|
reloaded = Happening.objects.all()
|
|
|
|
reloaded.query = pickle.loads(pickle.dumps(qs.query))
|
|
|
|
self.assertEqual(
|
|
|
|
reloaded.get(),
|
|
|
|
{"name": "test", "latest_time": self.happening.when},
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_annotation_values_list(self):
|
|
|
|
# values_list() is reloaded to values() when using a pickled query.
|
|
|
|
tests = [
|
|
|
|
Happening.objects.values_list("name"),
|
|
|
|
Happening.objects.values_list("name", flat=True),
|
|
|
|
Happening.objects.values_list("name", named=True),
|
|
|
|
]
|
|
|
|
for qs in tests:
|
|
|
|
with self.subTest(qs._iterable_class.__name__):
|
|
|
|
reloaded = Happening.objects.all()
|
|
|
|
reloaded.query = pickle.loads(pickle.dumps(qs.query))
|
|
|
|
self.assertEqual(reloaded.get(), {"name": "test"})
|
|
|
|
|
2019-10-15 00:59:43 +02:00
|
|
|
def test_filter_deferred(self):
|
|
|
|
qs = Happening.objects.all()
|
|
|
|
qs._defer_next_filter = True
|
|
|
|
qs = qs.filter(id=0)
|
|
|
|
self.assert_pickles(qs)
|
|
|
|
|
2014-06-06 13:10:20 +02:00
|
|
|
def test_missing_django_version_unpickling(self):
|
|
|
|
"""
|
|
|
|
#21430 -- Verifies a warning is raised for querysets that are
|
|
|
|
unpickled without a Django version
|
|
|
|
"""
|
|
|
|
qs = Group.missing_django_version_objects.all()
|
2015-11-13 21:54:05 +01:00
|
|
|
msg = "Pickled queryset instance's Django version is not specified."
|
|
|
|
with self.assertRaisesMessage(RuntimeWarning, msg):
|
2014-06-06 13:10:20 +02:00
|
|
|
pickle.loads(pickle.dumps(qs))
|
|
|
|
|
|
|
|
def test_unsupported_unpickle(self):
|
|
|
|
"""
|
|
|
|
#21430 -- Verifies a warning is raised for querysets that are
|
|
|
|
unpickled with a different Django version than the current
|
|
|
|
"""
|
|
|
|
qs = Group.previous_django_version_objects.all()
|
2020-04-03 22:13:02 +02:00
|
|
|
msg = (
|
|
|
|
"Pickled queryset instance's Django version 1.0 does not match "
|
|
|
|
"the current version %s." % django.__version__
|
|
|
|
)
|
2015-11-13 21:54:05 +01:00
|
|
|
with self.assertRaisesMessage(RuntimeWarning, msg):
|
2014-06-06 13:10:20 +02:00
|
|
|
pickle.loads(pickle.dumps(qs))
|
2016-08-31 20:16:39 +02:00
|
|
|
|
2019-12-11 15:25:50 +01:00
|
|
|
def test_order_by_model_with_abstract_inheritance_and_meta_ordering(self):
|
|
|
|
group = Group.objects.create(name="test")
|
|
|
|
event = MyEvent.objects.create(title="test event", group=group)
|
|
|
|
event.edition_set.create()
|
|
|
|
self.assert_pickles(event.edition_set.order_by("event"))
|
|
|
|
|
2016-08-31 20:16:39 +02:00
|
|
|
|
|
|
|
class InLookupTests(TestCase):
|
|
|
|
@classmethod
|
|
|
|
def setUpTestData(cls):
|
|
|
|
for i in range(1, 3):
|
|
|
|
group = Group.objects.create(name="Group {}".format(i))
|
|
|
|
cls.e1 = Event.objects.create(title="Event 1", group=group)
|
|
|
|
|
|
|
|
def test_in_lookup_queryset_evaluation(self):
|
|
|
|
"""
|
|
|
|
Neither pickling nor unpickling a QuerySet.query with an __in=inner_qs
|
|
|
|
lookup should evaluate inner_qs.
|
|
|
|
"""
|
|
|
|
events = Event.objects.filter(group__in=Group.objects.all())
|
|
|
|
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
dumped = pickle.dumps(events.query)
|
|
|
|
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
reloaded = pickle.loads(dumped)
|
|
|
|
reloaded_events = Event.objects.none()
|
|
|
|
reloaded_events.query = reloaded
|
|
|
|
|
|
|
|
self.assertSequenceEqual(reloaded_events, [self.e1])
|
|
|
|
|
|
|
|
def test_in_lookup_query_evaluation(self):
|
|
|
|
events = Event.objects.filter(group__in=Group.objects.values("id").query)
|
|
|
|
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
dumped = pickle.dumps(events.query)
|
|
|
|
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
reloaded = pickle.loads(dumped)
|
|
|
|
reloaded_events = Event.objects.none()
|
|
|
|
reloaded_events.query = reloaded
|
|
|
|
|
|
|
|
self.assertSequenceEqual(reloaded_events, [self.e1])
|