mirror of
				https://github.com/django/django.git
				synced 2025-10-31 09:41:08 +00:00 
			
		
		
		
	Fixed #33124 -- Avoided accessing the database connections when not necessary.
Follow up to bf5abf1bdc.
This also caches the __getitem__ access.
			
			
This commit is contained in:
		
				
					committed by
					
						 Mariusz Felisiak
						Mariusz Felisiak
					
				
			
			
				
	
			
			
			
						parent
						
							7fe9b6f6df
						
					
				
				
					commit
					06c50cee0f
				
			| @@ -101,7 +101,8 @@ class LayerMapping: | |||||||
|         self.layer = self.ds[layer] |         self.layer = self.ds[layer] | ||||||
|  |  | ||||||
|         self.using = using if using is not None else router.db_for_write(model) |         self.using = using if using is not None else router.db_for_write(model) | ||||||
|         self.spatial_backend = connections[self.using].ops |         connection = connections[self.using] | ||||||
|  |         self.spatial_backend = connection.ops | ||||||
|  |  | ||||||
|         # Setting the mapping & model attributes. |         # Setting the mapping & model attributes. | ||||||
|         self.mapping = mapping |         self.mapping = mapping | ||||||
| @@ -113,7 +114,7 @@ class LayerMapping: | |||||||
|  |  | ||||||
|         # Getting the geometry column associated with the model (an |         # Getting the geometry column associated with the model (an | ||||||
|         # exception will be raised if there is no geometry column). |         # exception will be raised if there is no geometry column). | ||||||
|         if connections[self.using].features.supports_transform: |         if connection.features.supports_transform: | ||||||
|             self.geo_field = self.geometry_field() |             self.geo_field = self.geometry_field() | ||||||
|         else: |         else: | ||||||
|             transform = False |             transform = False | ||||||
|   | |||||||
| @@ -1100,8 +1100,8 @@ def create_forward_many_to_many_manager(superclass, rel, reverse): | |||||||
|             # user-defined intermediary models as they could have other fields |             # user-defined intermediary models as they could have other fields | ||||||
|             # causing conflicts which must be surfaced. |             # causing conflicts which must be surfaced. | ||||||
|             can_ignore_conflicts = ( |             can_ignore_conflicts = ( | ||||||
|                 connections[db].features.supports_ignore_conflicts and |                 self.through._meta.auto_created is not False and | ||||||
|                 self.through._meta.auto_created is not False |                 connections[db].features.supports_ignore_conflicts | ||||||
|             ) |             ) | ||||||
|             # Don't send the signal when inserting duplicate data row |             # Don't send the signal when inserting duplicate data row | ||||||
|             # for symmetrical reverse entries. |             # for symmetrical reverse entries. | ||||||
|   | |||||||
| @@ -498,7 +498,6 @@ class QuerySet: | |||||||
|         if not objs: |         if not objs: | ||||||
|             return objs |             return objs | ||||||
|         self._for_write = True |         self._for_write = True | ||||||
|         connection = connections[self.db] |  | ||||||
|         opts = self.model._meta |         opts = self.model._meta | ||||||
|         fields = opts.concrete_fields |         fields = opts.concrete_fields | ||||||
|         objs = list(objs) |         objs = list(objs) | ||||||
| @@ -521,6 +520,7 @@ class QuerySet: | |||||||
|                 returned_columns = self._batched_insert( |                 returned_columns = self._batched_insert( | ||||||
|                     objs_without_pk, fields, batch_size, ignore_conflicts=ignore_conflicts, |                     objs_without_pk, fields, batch_size, ignore_conflicts=ignore_conflicts, | ||||||
|                 ) |                 ) | ||||||
|  |                 connection = connections[self.db] | ||||||
|                 if connection.features.can_return_rows_from_bulk_insert and not ignore_conflicts: |                 if connection.features.can_return_rows_from_bulk_insert and not ignore_conflicts: | ||||||
|                     assert len(returned_columns) == len(objs_without_pk) |                     assert len(returned_columns) == len(objs_without_pk) | ||||||
|                 for obj_without_pk, results in zip(objs_without_pk, returned_columns): |                 for obj_without_pk, results in zip(objs_without_pk, returned_columns): | ||||||
| @@ -551,9 +551,10 @@ class QuerySet: | |||||||
|             return 0 |             return 0 | ||||||
|         # PK is used twice in the resulting update query, once in the filter |         # PK is used twice in the resulting update query, once in the filter | ||||||
|         # and once in the WHEN. Each field will also have one CAST. |         # and once in the WHEN. Each field will also have one CAST. | ||||||
|         max_batch_size = connections[self.db].ops.bulk_batch_size(['pk', 'pk'] + fields, objs) |         connection = connections[self.db] | ||||||
|  |         max_batch_size = connection.ops.bulk_batch_size(['pk', 'pk'] + fields, objs) | ||||||
|         batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size |         batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size | ||||||
|         requires_casting = connections[self.db].features.requires_casted_case_in_updates |         requires_casting = connection.features.requires_casted_case_in_updates | ||||||
|         batches = (objs[i:i + batch_size] for i in range(0, len(objs), batch_size)) |         batches = (objs[i:i + batch_size] for i in range(0, len(objs), batch_size)) | ||||||
|         updates = [] |         updates = [] | ||||||
|         for batch_objs in batches: |         for batch_objs in batches: | ||||||
| @@ -1308,13 +1309,14 @@ class QuerySet: | |||||||
|         """ |         """ | ||||||
|         Helper method for bulk_create() to insert objs one batch at a time. |         Helper method for bulk_create() to insert objs one batch at a time. | ||||||
|         """ |         """ | ||||||
|         if ignore_conflicts and not connections[self.db].features.supports_ignore_conflicts: |         connection = connections[self.db] | ||||||
|  |         if ignore_conflicts and not connection.features.supports_ignore_conflicts: | ||||||
|             raise NotSupportedError('This database backend does not support ignoring conflicts.') |             raise NotSupportedError('This database backend does not support ignoring conflicts.') | ||||||
|         ops = connections[self.db].ops |         ops = connection.ops | ||||||
|         max_batch_size = max(ops.bulk_batch_size(fields, objs), 1) |         max_batch_size = max(ops.bulk_batch_size(fields, objs), 1) | ||||||
|         batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size |         batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size | ||||||
|         inserted_rows = [] |         inserted_rows = [] | ||||||
|         bulk_return = connections[self.db].features.can_return_rows_from_bulk_insert |         bulk_return = connection.features.can_return_rows_from_bulk_insert | ||||||
|         for item in [objs[i:i + batch_size] for i in range(0, len(objs), batch_size)]: |         for item in [objs[i:i + batch_size] for i in range(0, len(objs), batch_size)]: | ||||||
|             if bulk_return and not ignore_conflicts: |             if bulk_return and not ignore_conflicts: | ||||||
|                 inserted_rows.extend(self._insert( |                 inserted_rows.extend(self._insert( | ||||||
| @@ -1523,10 +1525,8 @@ class RawQuerySet: | |||||||
|     def iterator(self): |     def iterator(self): | ||||||
|         # Cache some things for performance reasons outside the loop. |         # Cache some things for performance reasons outside the loop. | ||||||
|         db = self.db |         db = self.db | ||||||
|         compiler = connections[db].ops.compiler('SQLCompiler')( |         connection = connections[db] | ||||||
|             self.query, connections[db], db |         compiler = connection.ops.compiler('SQLCompiler')(self.query, connection, db) | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         query = iter(self.query) |         query = iter(self.query) | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|   | |||||||
| @@ -2332,10 +2332,10 @@ class Query(BaseExpression): | |||||||
|         # used. The proper fix would be to defer all decisions where |         # used. The proper fix would be to defer all decisions where | ||||||
|         # is_nullable() is needed to the compiler stage, but that is not easy |         # is_nullable() is needed to the compiler stage, but that is not easy | ||||||
|         # to do currently. |         # to do currently. | ||||||
|         return ( |         return field.null or ( | ||||||
|             connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and |             field.empty_strings_allowed and | ||||||
|             field.empty_strings_allowed |             connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls | ||||||
|         ) or field.null |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_order_dir(field, default='ASC'): | def get_order_dir(field, default='ASC'): | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user