0
0
mirror of https://github.com/django/django.git synced 2024-12-01 15:42:04 +01:00

Fixed #33124 -- Avoided accessing the database connections when not necessary.

Follow up to bf5abf1bdc.

This also caches the __getitem__ access.
This commit is contained in:
Keryn Knight 2021-08-14 19:30:58 +01:00 committed by Mariusz Felisiak
parent 7fe9b6f6df
commit 06c50cee0f
4 changed files with 19 additions and 18 deletions

View File

@ -101,7 +101,8 @@ class LayerMapping:
self.layer = self.ds[layer]
self.using = using if using is not None else router.db_for_write(model)
self.spatial_backend = connections[self.using].ops
connection = connections[self.using]
self.spatial_backend = connection.ops
# Setting the mapping & model attributes.
self.mapping = mapping
@ -113,7 +114,7 @@ class LayerMapping:
# Getting the geometry column associated with the model (an
# exception will be raised if there is no geometry column).
if connections[self.using].features.supports_transform:
if connection.features.supports_transform:
self.geo_field = self.geometry_field()
else:
transform = False

View File

@ -1100,8 +1100,8 @@ def create_forward_many_to_many_manager(superclass, rel, reverse):
# user-defined intermediary models as they could have other fields
# causing conflicts which must be surfaced.
can_ignore_conflicts = (
connections[db].features.supports_ignore_conflicts and
self.through._meta.auto_created is not False
self.through._meta.auto_created is not False and
connections[db].features.supports_ignore_conflicts
)
# Don't send the signal when inserting duplicate data row
# for symmetrical reverse entries.

View File

@ -498,7 +498,6 @@ class QuerySet:
if not objs:
return objs
self._for_write = True
connection = connections[self.db]
opts = self.model._meta
fields = opts.concrete_fields
objs = list(objs)
@ -521,6 +520,7 @@ class QuerySet:
returned_columns = self._batched_insert(
objs_without_pk, fields, batch_size, ignore_conflicts=ignore_conflicts,
)
connection = connections[self.db]
if connection.features.can_return_rows_from_bulk_insert and not ignore_conflicts:
assert len(returned_columns) == len(objs_without_pk)
for obj_without_pk, results in zip(objs_without_pk, returned_columns):
@ -551,9 +551,10 @@ class QuerySet:
return 0
# PK is used twice in the resulting update query, once in the filter
# and once in the WHEN. Each field will also have one CAST.
max_batch_size = connections[self.db].ops.bulk_batch_size(['pk', 'pk'] + fields, objs)
connection = connections[self.db]
max_batch_size = connection.ops.bulk_batch_size(['pk', 'pk'] + fields, objs)
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
requires_casting = connections[self.db].features.requires_casted_case_in_updates
requires_casting = connection.features.requires_casted_case_in_updates
batches = (objs[i:i + batch_size] for i in range(0, len(objs), batch_size))
updates = []
for batch_objs in batches:
@ -1308,13 +1309,14 @@ class QuerySet:
"""
Helper method for bulk_create() to insert objs one batch at a time.
"""
if ignore_conflicts and not connections[self.db].features.supports_ignore_conflicts:
connection = connections[self.db]
if ignore_conflicts and not connection.features.supports_ignore_conflicts:
raise NotSupportedError('This database backend does not support ignoring conflicts.')
ops = connections[self.db].ops
ops = connection.ops
max_batch_size = max(ops.bulk_batch_size(fields, objs), 1)
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
inserted_rows = []
bulk_return = connections[self.db].features.can_return_rows_from_bulk_insert
bulk_return = connection.features.can_return_rows_from_bulk_insert
for item in [objs[i:i + batch_size] for i in range(0, len(objs), batch_size)]:
if bulk_return and not ignore_conflicts:
inserted_rows.extend(self._insert(
@ -1523,10 +1525,8 @@ class RawQuerySet:
def iterator(self):
# Cache some things for performance reasons outside the loop.
db = self.db
compiler = connections[db].ops.compiler('SQLCompiler')(
self.query, connections[db], db
)
connection = connections[db]
compiler = connection.ops.compiler('SQLCompiler')(self.query, connection, db)
query = iter(self.query)
try:

View File

@ -2332,10 +2332,10 @@ class Query(BaseExpression):
# used. The proper fix would be to defer all decisions where
# is_nullable() is needed to the compiler stage, but that is not easy
# to do currently.
return (
connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and
field.empty_strings_allowed
) or field.null
return field.null or (
field.empty_strings_allowed and
connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls
)
def get_order_dir(field, default='ASC'):