diff --git a/django/contrib/gis/db/backends/postgis/adapter.py b/django/contrib/gis/db/backends/postgis/adapter.py index 9161e25f16..c95f903253 100644 --- a/django/contrib/gis/db/backends/postgis/adapter.py +++ b/django/contrib/gis/db/backends/postgis/adapter.py @@ -1,8 +1,6 @@ """ This object provides quoting for GEOS geometries into PostgreSQL/PostGIS. """ -from psycopg2.extensions import ISQLQuote - from django.contrib.gis.db.backends.postgis.pgraster import to_pgraster from django.contrib.gis.geos import GEOSGeometry from django.db.backends.postgresql.psycopg_any import sql @@ -27,6 +25,8 @@ class PostGISAdapter: def __conform__(self, proto): """Does the given protocol conform to what Psycopg2 expects?""" + from psycopg2.extensions import ISQLQuote + if proto == ISQLQuote: return self else: diff --git a/django/contrib/gis/db/backends/postgis/base.py b/django/contrib/gis/db/backends/postgis/base.py index 98c2813aa2..23ec0553f8 100644 --- a/django/contrib/gis/db/backends/postgis/base.py +++ b/django/contrib/gis/db/backends/postgis/base.py @@ -1,17 +1,93 @@ -from django.db.backends.base.base import NO_DB_ALIAS -from django.db.backends.postgresql.base import ( - DatabaseWrapper as Psycopg2DatabaseWrapper, -) +from functools import lru_cache +from django.db.backends.base.base import NO_DB_ALIAS +from django.db.backends.postgresql.base import DatabaseWrapper as PsycopgDatabaseWrapper +from django.db.backends.postgresql.psycopg_any import is_psycopg3 + +from .adapter import PostGISAdapter from .features import DatabaseFeatures from .introspection import PostGISIntrospection from .operations import PostGISOperations from .schema import PostGISSchemaEditor +if is_psycopg3: + from psycopg.adapt import Dumper + from psycopg.pq import Format + from psycopg.types import TypeInfo + from psycopg.types.string import TextBinaryLoader, TextLoader -class DatabaseWrapper(Psycopg2DatabaseWrapper): + class GeometryType: + pass + + class GeographyType: + pass + + class RasterType: + pass + + class BaseTextDumper(Dumper): + def dump(self, obj): + # Return bytes as hex for text formatting + return obj.ewkb.hex().encode() + + class BaseBinaryDumper(Dumper): + format = Format.BINARY + + def dump(self, obj): + return obj.ewkb + + @lru_cache + def postgis_adapters(geo_oid, geog_oid, raster_oid): + class BaseDumper(Dumper): + def __init_subclass__(cls, base_dumper): + super().__init_subclass__() + + cls.GeometryDumper = type( + "GeometryDumper", (base_dumper,), {"oid": geo_oid} + ) + cls.GeographyDumper = type( + "GeographyDumper", (base_dumper,), {"oid": geog_oid} + ) + cls.RasterDumper = type( + "RasterDumper", (BaseTextDumper,), {"oid": raster_oid} + ) + + def get_key(self, obj, format): + if obj.is_geometry: + return GeographyType if obj.geography else GeometryType + else: + return RasterType + + def upgrade(self, obj, format): + if obj.is_geometry: + if obj.geography: + return self.GeographyDumper(GeographyType) + else: + return self.GeometryDumper(GeometryType) + else: + return self.RasterDumper(RasterType) + + def dump(self, obj): + raise NotImplementedError + + class PostGISTextDumper(BaseDumper, base_dumper=BaseTextDumper): + pass + + class PostGISBinaryDumper(BaseDumper, base_dumper=BaseBinaryDumper): + format = Format.BINARY + + return PostGISTextDumper, PostGISBinaryDumper + + +class DatabaseWrapper(PsycopgDatabaseWrapper): SchemaEditorClass = PostGISSchemaEditor + _type_infos = { + "geometry": {}, + "geography": {}, + "raster": {}, + } + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) if kwargs.get("alias", "") != NO_DB_ALIAS: @@ -27,3 +103,45 @@ class DatabaseWrapper(Psycopg2DatabaseWrapper): if bool(cursor.fetchone()): return cursor.execute("CREATE EXTENSION IF NOT EXISTS postgis") + if is_psycopg3: + # Ensure adapters are registers if PostGIS is used within this + # connection. + self.register_geometry_adapters(self.connection, True) + + def get_new_connection(self, conn_params): + connection = super().get_new_connection(conn_params) + if is_psycopg3: + self.register_geometry_adapters(connection) + return connection + + if is_psycopg3: + + def _register_type(self, pg_connection, typename): + registry = self._type_infos[typename] + try: + info = registry[self.alias] + except KeyError: + info = TypeInfo.fetch(pg_connection, typename) + registry[self.alias] = info + + if info: # Can be None if the type does not exist (yet). + info.register(pg_connection) + pg_connection.adapters.register_loader(info.oid, TextLoader) + pg_connection.adapters.register_loader(info.oid, TextBinaryLoader) + + return info.oid if info else None + + def register_geometry_adapters(self, pg_connection, clear_caches=False): + if clear_caches: + for typename in self._type_infos: + self._type_infos[typename].pop(self.alias, None) + + geo_oid = self._register_type(pg_connection, "geometry") + geog_oid = self._register_type(pg_connection, "geography") + raster_oid = self._register_type(pg_connection, "raster") + + PostGISTextDumper, PostGISBinaryDumper = postgis_adapters( + geo_oid, geog_oid, raster_oid + ) + pg_connection.adapters.register_dumper(PostGISAdapter, PostGISTextDumper) + pg_connection.adapters.register_dumper(PostGISAdapter, PostGISBinaryDumper) diff --git a/django/contrib/gis/db/backends/postgis/features.py b/django/contrib/gis/db/backends/postgis/features.py index 29a1079631..d96e939db3 100644 --- a/django/contrib/gis/db/backends/postgis/features.py +++ b/django/contrib/gis/db/backends/postgis/features.py @@ -1,10 +1,10 @@ from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures from django.db.backends.postgresql.features import ( - DatabaseFeatures as Psycopg2DatabaseFeatures, + DatabaseFeatures as PsycopgDatabaseFeatures, ) -class DatabaseFeatures(BaseSpatialFeatures, Psycopg2DatabaseFeatures): +class DatabaseFeatures(BaseSpatialFeatures, PsycopgDatabaseFeatures): supports_geography = True supports_3d_storage = True supports_3d_functions = True diff --git a/django/contrib/gis/db/backends/postgis/operations.py b/django/contrib/gis/db/backends/postgis/operations.py index 31ad31e2e5..070f670a0b 100644 --- a/django/contrib/gis/db/backends/postgis/operations.py +++ b/django/contrib/gis/db/backends/postgis/operations.py @@ -11,6 +11,7 @@ from django.contrib.gis.measure import Distance from django.core.exceptions import ImproperlyConfigured from django.db import NotSupportedError, ProgrammingError from django.db.backends.postgresql.operations import DatabaseOperations +from django.db.backends.postgresql.psycopg_any import is_psycopg3 from django.db.models import Func, Value from django.utils.functional import cached_property from django.utils.version import get_version_tuple @@ -161,7 +162,8 @@ class PostGISOperations(BaseSpatialOperations, DatabaseOperations): unsupported_functions = set() - select = "%s::bytea" + select = "%s" if is_psycopg3 else "%s::bytea" + select_extent = None @cached_property @@ -407,6 +409,8 @@ class PostGISOperations(BaseSpatialOperations, DatabaseOperations): geom_class = expression.output_field.geom_class def converter(value, expression, connection): + if isinstance(value, str): # Coming from hex strings. + value = value.encode("ascii") return None if value is None else GEOSGeometryBase(read(value), geom_class) return converter diff --git a/django/contrib/postgres/fields/array.py b/django/contrib/postgres/fields/array.py index eaff032465..8477dd9fff 100644 --- a/django/contrib/postgres/fields/array.py +++ b/django/contrib/postgres/fields/array.py @@ -237,7 +237,7 @@ class ArrayField(CheckFieldDefaultMixin, Field): class ArrayRHSMixin: def __init__(self, lhs, rhs): - # Don't wrap arrays that contains only None values, psycopg2 doesn't + # Don't wrap arrays that contains only None values, psycopg doesn't # allow this. if isinstance(rhs, (tuple, list)) and any(self._rhs_not_none_values(rhs)): expressions = [] diff --git a/django/contrib/postgres/fields/ranges.py b/django/contrib/postgres/fields/ranges.py index d5c438dbdc..fbb6012660 100644 --- a/django/contrib/postgres/fields/ranges.py +++ b/django/contrib/postgres/fields/ranges.py @@ -9,6 +9,7 @@ from django.db.backends.postgresql.psycopg_any import ( NumericRange, Range, ) +from django.db.models.functions import Cast from django.db.models.lookups import PostgresOperatorLookup from .utils import AttributeSetter @@ -208,7 +209,14 @@ class DateRangeField(RangeField): return "daterange" -RangeField.register_lookup(lookups.DataContains) +class RangeContains(lookups.DataContains): + def get_prep_lookup(self): + if not isinstance(self.rhs, (list, tuple, Range)): + return Cast(self.rhs, self.lhs.field.base_field) + return super().get_prep_lookup() + + +RangeField.register_lookup(RangeContains) RangeField.register_lookup(lookups.ContainedBy) RangeField.register_lookup(lookups.Overlap) diff --git a/django/contrib/postgres/operations.py b/django/contrib/postgres/operations.py index 9dbd491773..5ac396bedf 100644 --- a/django/contrib/postgres/operations.py +++ b/django/contrib/postgres/operations.py @@ -35,6 +35,10 @@ class CreateExtension(Operation): # installed, otherwise a subsequent data migration would use the same # connection. register_type_handlers(schema_editor.connection) + if hasattr(schema_editor.connection, "register_geometry_adapters"): + schema_editor.connection.register_geometry_adapters( + schema_editor.connection.connection, True + ) def database_backwards(self, app_label, schema_editor, from_state, to_state): if not router.allow_migrate(schema_editor.connection.alias, app_label): diff --git a/django/contrib/postgres/search.py b/django/contrib/postgres/search.py index 05c8f72f6f..4e370aa167 100644 --- a/django/contrib/postgres/search.py +++ b/django/contrib/postgres/search.py @@ -39,6 +39,11 @@ class SearchQueryField(Field): return "tsquery" +class _Float4Field(Field): + def db_type(self, connection): + return "float4" + + class SearchConfig(Expression): def __init__(self, config): super().__init__() @@ -138,7 +143,11 @@ class SearchVector(SearchVectorCombinable, Func): if clone.weight: weight_sql, extra_params = compiler.compile(clone.weight) sql = "setweight({}, {})".format(sql, weight_sql) - return sql, config_params + params + extra_params + + # These parameters must be bound on the client side because we may + # want to create an index on this expression. + sql = connection.ops.compose_sql(sql, config_params + params + extra_params) + return sql, [] class CombinedSearchVector(SearchVectorCombinable, CombinedExpression): @@ -244,6 +253,8 @@ class SearchRank(Func): normalization=None, cover_density=False, ): + from .fields.array import ArrayField + if not hasattr(vector, "resolve_expression"): vector = SearchVector(vector) if not hasattr(query, "resolve_expression"): @@ -252,6 +263,7 @@ class SearchRank(Func): if weights is not None: if not hasattr(weights, "resolve_expression"): weights = Value(weights) + weights = Cast(weights, ArrayField(_Float4Field())) expressions = (weights,) + expressions if normalization is not None: if not hasattr(normalization, "resolve_expression"): diff --git a/django/contrib/postgres/signals.py b/django/contrib/postgres/signals.py index 5c6ca3687a..a3816d3d30 100644 --- a/django/contrib/postgres/signals.py +++ b/django/contrib/postgres/signals.py @@ -1,10 +1,8 @@ import functools -import psycopg2 -from psycopg2.extras import register_hstore - from django.db import connections from django.db.backends.base.base import NO_DB_ALIAS +from django.db.backends.postgresql.psycopg_any import is_psycopg3 def get_type_oids(connection_alias, type_name): @@ -32,30 +30,51 @@ def get_citext_oids(connection_alias): return get_type_oids(connection_alias, "citext") -def register_type_handlers(connection, **kwargs): - if connection.vendor != "postgresql" or connection.alias == NO_DB_ALIAS: - return +if is_psycopg3: + from psycopg.types import TypeInfo, hstore - oids, array_oids = get_hstore_oids(connection.alias) - # Don't register handlers when hstore is not available on the database. - # - # If someone tries to create an hstore field it will error there. This is - # necessary as someone may be using PSQL without extensions installed but - # be using other features of contrib.postgres. - # - # This is also needed in order to create the connection in order to install - # the hstore extension. - if oids: - register_hstore( - connection.connection, globally=True, oid=oids, array_oid=array_oids - ) + def register_type_handlers(connection, **kwargs): + if connection.vendor != "postgresql" or connection.alias == NO_DB_ALIAS: + return - oids, citext_oids = get_citext_oids(connection.alias) - # Don't register handlers when citext is not available on the database. - # - # The same comments in the above call to register_hstore() also apply here. - if oids: - array_type = psycopg2.extensions.new_array_type( - citext_oids, "citext[]", psycopg2.STRING - ) - psycopg2.extensions.register_type(array_type, None) + oids, array_oids = get_hstore_oids(connection.alias) + for oid, array_oid in zip(oids, array_oids): + ti = TypeInfo("hstore", oid, array_oid) + hstore.register_hstore(ti, connection.connection) + + _, citext_oids = get_citext_oids(connection.alias) + for array_oid in citext_oids: + ti = TypeInfo("citext", 0, array_oid) + ti.register(connection.connection) + +else: + import psycopg2 + from psycopg2.extras import register_hstore + + def register_type_handlers(connection, **kwargs): + if connection.vendor != "postgresql" or connection.alias == NO_DB_ALIAS: + return + + oids, array_oids = get_hstore_oids(connection.alias) + # Don't register handlers when hstore is not available on the database. + # + # If someone tries to create an hstore field it will error there. This is + # necessary as someone may be using PSQL without extensions installed but + # be using other features of contrib.postgres. + # + # This is also needed in order to create the connection in order to install + # the hstore extension. + if oids: + register_hstore( + connection.connection, globally=True, oid=oids, array_oid=array_oids + ) + + oids, citext_oids = get_citext_oids(connection.alias) + # Don't register handlers when citext is not available on the database. + # + # The same comments in the above call to register_hstore() also apply here. + if oids: + array_type = psycopg2.extensions.new_array_type( + citext_oids, "citext[]", psycopg2.STRING + ) + psycopg2.extensions.register_type(array_type, None) diff --git a/django/core/management/commands/loaddata.py b/django/core/management/commands/loaddata.py index 6fae255407..618c6c29b6 100644 --- a/django/core/management/commands/loaddata.py +++ b/django/core/management/commands/loaddata.py @@ -207,7 +207,7 @@ class Command(BaseCommand): self.models.add(obj.object.__class__) try: obj.save(using=self.using) - # psycopg2 raises ValueError if data contains NUL chars. + # psycopg raises ValueError if data contains NUL chars. except (DatabaseError, IntegrityError, ValueError) as e: e.args = ( "Could not load %(object_label)s(pk=%(pk)s): %(error_msg)s" diff --git a/django/db/backends/base/features.py b/django/db/backends/base/features.py index a1d38d3530..190f728bba 100644 --- a/django/db/backends/base/features.py +++ b/django/db/backends/base/features.py @@ -164,6 +164,8 @@ class BaseDatabaseFeatures: # Can we roll back DDL in a transaction? can_rollback_ddl = False + schema_editor_uses_clientside_param_binding = False + # Does it support operations requiring references rename in a transaction? supports_atomic_references_rename = True @@ -335,6 +337,9 @@ class BaseDatabaseFeatures: # Does the backend support the logical XOR operator? supports_logical_xor = False + # Set to (exception, message) if null characters in text are disallowed. + prohibits_null_characters_in_text_exception = None + # Collation names for use by the Django test suite. test_collations = { "ci": None, # Case-insensitive. diff --git a/django/db/backends/base/operations.py b/django/db/backends/base/operations.py index 407681a418..4ee73c0734 100644 --- a/django/db/backends/base/operations.py +++ b/django/db/backends/base/operations.py @@ -525,6 +525,9 @@ class BaseDatabaseOperations: else: return value + def adapt_integerfield_value(self, value, internal_type): + return value + def adapt_datefield_value(self, value): """ Transform a date value to an object compatible with what is expected diff --git a/django/db/backends/postgresql/base.py b/django/db/backends/postgresql/base.py index 0aee39aa5c..ceea1bebad 100644 --- a/django/db/backends/postgresql/base.py +++ b/django/db/backends/postgresql/base.py @@ -1,7 +1,7 @@ """ PostgreSQL database backend for Django. -Requires psycopg 2: https://www.psycopg.org/ +Requires psycopg2 >= 2.8.4 or psycopg >= 3.1 """ import asyncio @@ -21,48 +21,63 @@ from django.utils.safestring import SafeString from django.utils.version import get_version_tuple try: - import psycopg2 as Database - import psycopg2.extensions - import psycopg2.extras -except ImportError as e: - raise ImproperlyConfigured("Error loading psycopg2 module: %s" % e) + try: + import psycopg as Database + except ImportError: + import psycopg2 as Database +except ImportError: + raise ImproperlyConfigured("Error loading psycopg2 or psycopg module") -def psycopg2_version(): - version = psycopg2.__version__.split(" ", 1)[0] +def psycopg_version(): + version = Database.__version__.split(" ", 1)[0] return get_version_tuple(version) -PSYCOPG2_VERSION = psycopg2_version() - -if PSYCOPG2_VERSION < (2, 8, 4): +if psycopg_version() < (2, 8, 4): raise ImproperlyConfigured( - "psycopg2 version 2.8.4 or newer is required; you have %s" - % psycopg2.__version__ + f"psycopg2 version 2.8.4 or newer is required; you have {Database.__version__}" + ) +if (3,) <= psycopg_version() < (3, 1): + raise ImproperlyConfigured( + f"psycopg version 3.1 or newer is required; you have {Database.__version__}" ) -# Some of these import psycopg2, so import them after checking if it's installed. -from .client import DatabaseClient # NOQA -from .creation import DatabaseCreation # NOQA -from .features import DatabaseFeatures # NOQA -from .introspection import DatabaseIntrospection # NOQA -from .operations import DatabaseOperations # NOQA -from .psycopg_any import IsolationLevel # NOQA -from .schema import DatabaseSchemaEditor # NOQA +from .psycopg_any import IsolationLevel, is_psycopg3 # NOQA isort:skip -psycopg2.extensions.register_adapter(SafeString, psycopg2.extensions.QuotedString) -psycopg2.extras.register_uuid() +if is_psycopg3: + from psycopg import adapters, sql + from psycopg.pq import Format -# Register support for inet[] manually so we don't have to handle the Inet() -# object on load all the time. -INETARRAY_OID = 1041 -INETARRAY = psycopg2.extensions.new_array_type( - (INETARRAY_OID,), - "INETARRAY", - psycopg2.extensions.UNICODE, -) -psycopg2.extensions.register_type(INETARRAY) + from .psycopg_any import get_adapters_template, register_tzloader + + TIMESTAMPTZ_OID = adapters.types["timestamptz"].oid + +else: + import psycopg2.extensions + import psycopg2.extras + + psycopg2.extensions.register_adapter(SafeString, psycopg2.extensions.QuotedString) + psycopg2.extras.register_uuid() + + # Register support for inet[] manually so we don't have to handle the Inet() + # object on load all the time. + INETARRAY_OID = 1041 + INETARRAY = psycopg2.extensions.new_array_type( + (INETARRAY_OID,), + "INETARRAY", + psycopg2.extensions.UNICODE, + ) + psycopg2.extensions.register_type(INETARRAY) + +# Some of these import psycopg, so import them after checking if it's installed. +from .client import DatabaseClient # NOQA isort:skip +from .creation import DatabaseCreation # NOQA isort:skip +from .features import DatabaseFeatures # NOQA isort:skip +from .introspection import DatabaseIntrospection # NOQA isort:skip +from .operations import DatabaseOperations # NOQA isort:skip +from .schema import DatabaseSchemaEditor # NOQA isort:skip class DatabaseWrapper(BaseDatabaseWrapper): @@ -209,6 +224,15 @@ class DatabaseWrapper(BaseDatabaseWrapper): conn_params["host"] = settings_dict["HOST"] if settings_dict["PORT"]: conn_params["port"] = settings_dict["PORT"] + if is_psycopg3: + conn_params["context"] = get_adapters_template( + settings.USE_TZ, self.timezone + ) + # Disable prepared statements by default to keep connection poolers + # working. Can be reenabled via OPTIONS in the settings dict. + conn_params["prepare_threshold"] = conn_params.pop( + "prepare_threshold", None + ) return conn_params @async_unsafe @@ -232,17 +256,19 @@ class DatabaseWrapper(BaseDatabaseWrapper): except ValueError: raise ImproperlyConfigured( f"Invalid transaction isolation level {isolation_level_value} " - f"specified. Use one of the IsolationLevel values." + f"specified. Use one of the psycopg.IsolationLevel values." ) - connection = Database.connect(**conn_params) + connection = self.Database.connect(**conn_params) if set_isolation_level: connection.isolation_level = self.isolation_level - # Register dummy loads() to avoid a round trip from psycopg2's decode - # to json.dumps() to json.loads(), when using a custom decoder in - # JSONField. - psycopg2.extras.register_default_jsonb( - conn_or_curs=connection, loads=lambda x: x - ) + if not is_psycopg3: + # Register dummy loads() to avoid a round trip from psycopg2's + # decode to json.dumps() to json.loads(), when using a custom + # decoder in JSONField. + psycopg2.extras.register_default_jsonb( + conn_or_curs=connection, loads=lambda x: x + ) + connection.cursor_factory = Cursor return connection def ensure_timezone(self): @@ -275,7 +301,15 @@ class DatabaseWrapper(BaseDatabaseWrapper): ) else: cursor = self.connection.cursor() - cursor.tzinfo_factory = self.tzinfo_factory if settings.USE_TZ else None + + if is_psycopg3: + # Register the cursor timezone only if the connection disagrees, to + # avoid copying the adapter map. + tzloader = self.connection.adapters.get_loader(TIMESTAMPTZ_OID, Format.TEXT) + if self.timezone != tzloader.timezone: + register_tzloader(self.timezone, cursor) + else: + cursor.tzinfo_factory = self.tzinfo_factory if settings.USE_TZ else None return cursor def tzinfo_factory(self, offset): @@ -379,11 +413,43 @@ class DatabaseWrapper(BaseDatabaseWrapper): return CursorDebugWrapper(cursor, self) -class CursorDebugWrapper(BaseCursorDebugWrapper): - def copy_expert(self, sql, file, *args): - with self.debug_sql(sql): - return self.cursor.copy_expert(sql, file, *args) +if is_psycopg3: - def copy_to(self, file, table, *args, **kwargs): - with self.debug_sql(sql="COPY %s TO STDOUT" % table): - return self.cursor.copy_to(file, table, *args, **kwargs) + class Cursor(Database.Cursor): + """ + A subclass of psycopg cursor implementing callproc. + """ + + def callproc(self, name, args=None): + if not isinstance(name, sql.Identifier): + name = sql.Identifier(name) + + qparts = [sql.SQL("SELECT * FROM "), name, sql.SQL("(")] + if args: + for item in args: + qparts.append(sql.Literal(item)) + qparts.append(sql.SQL(",")) + del qparts[-1] + + qparts.append(sql.SQL(")")) + stmt = sql.Composed(qparts) + self.execute(stmt) + return args + + class CursorDebugWrapper(BaseCursorDebugWrapper): + def copy(self, statement): + with self.debug_sql(statement): + return self.cursor.copy(statement) + +else: + + Cursor = psycopg2.extensions.cursor + + class CursorDebugWrapper(BaseCursorDebugWrapper): + def copy_expert(self, sql, file, *args): + with self.debug_sql(sql): + return self.cursor.copy_expert(sql, file, *args) + + def copy_to(self, file, table, *args, **kwargs): + with self.debug_sql(sql="COPY %s TO STDOUT" % table): + return self.cursor.copy_to(file, table, *args, **kwargs) diff --git a/django/db/backends/postgresql/features.py b/django/db/backends/postgresql/features.py index 0eed8c8d63..fd5b05aad4 100644 --- a/django/db/backends/postgresql/features.py +++ b/django/db/backends/postgresql/features.py @@ -1,7 +1,8 @@ import operator -from django.db import InterfaceError +from django.db import DataError, InterfaceError from django.db.backends.base.features import BaseDatabaseFeatures +from django.db.backends.postgresql.psycopg_any import is_psycopg3 from django.utils.functional import cached_property @@ -26,6 +27,7 @@ class DatabaseFeatures(BaseDatabaseFeatures): can_introspect_materialized_views = True can_distinct_on_fields = True can_rollback_ddl = True + schema_editor_uses_clientside_param_binding = True supports_combined_alters = True nulls_order_largest = True closed_cursor_error_class = InterfaceError @@ -81,6 +83,13 @@ class DatabaseFeatures(BaseDatabaseFeatures): }, } + @cached_property + def prohibits_null_characters_in_text_exception(self): + if is_psycopg3: + return DataError, "PostgreSQL text fields cannot contain NUL (0x00) bytes" + else: + return ValueError, "A string literal cannot contain NUL (0x00) characters." + @cached_property def introspected_field_types(self): return { diff --git a/django/db/backends/postgresql/operations.py b/django/db/backends/postgresql/operations.py index 824e0c3e4b..18cfcb29cb 100644 --- a/django/db/backends/postgresql/operations.py +++ b/django/db/backends/postgresql/operations.py @@ -3,9 +3,16 @@ from functools import lru_cache, partial from django.conf import settings from django.db.backends.base.operations import BaseDatabaseOperations -from django.db.backends.postgresql.psycopg_any import Inet, Jsonb, mogrify +from django.db.backends.postgresql.psycopg_any import ( + Inet, + Jsonb, + errors, + is_psycopg3, + mogrify, +) from django.db.backends.utils import split_tzname_delta from django.db.models.constants import OnConflict +from django.utils.regex_helper import _lazy_re_compile @lru_cache @@ -36,6 +43,18 @@ class DatabaseOperations(BaseDatabaseOperations): "SmallAutoField": "smallint", } + if is_psycopg3: + from psycopg.types import numeric + + integerfield_type_map = { + "SmallIntegerField": numeric.Int2, + "IntegerField": numeric.Int4, + "BigIntegerField": numeric.Int8, + "PositiveSmallIntegerField": numeric.Int2, + "PositiveIntegerField": numeric.Int4, + "PositiveBigIntegerField": numeric.Int8, + } + def unification_cast_sql(self, output_field): internal_type = output_field.get_internal_type() if internal_type in ( @@ -56,19 +75,23 @@ class DatabaseOperations(BaseDatabaseOperations): ) return "%s" + # EXTRACT format cannot be passed in parameters. + _extract_format_re = _lazy_re_compile(r"[A-Z_]+") + def date_extract_sql(self, lookup_type, sql, params): # https://www.postgresql.org/docs/current/functions-datetime.html#FUNCTIONS-DATETIME-EXTRACT - extract_sql = f"EXTRACT(%s FROM {sql})" - extract_param = lookup_type if lookup_type == "week_day": # For consistency across backends, we return Sunday=1, Saturday=7. - extract_sql = f"EXTRACT(%s FROM {sql}) + 1" - extract_param = "dow" + return f"EXTRACT(DOW FROM {sql}) + 1", params elif lookup_type == "iso_week_day": - extract_param = "isodow" + return f"EXTRACT(ISODOW FROM {sql})", params elif lookup_type == "iso_year": - extract_param = "isoyear" - return extract_sql, (extract_param, *params) + return f"EXTRACT(ISOYEAR FROM {sql})", params + + lookup_type = lookup_type.upper() + if not self._extract_format_re.fullmatch(lookup_type): + raise ValueError(f"Invalid lookup type: {lookup_type!r}") + return f"EXTRACT({lookup_type} FROM {sql})", params def date_trunc_sql(self, lookup_type, sql, params, tzname=None): sql, params = self._convert_sql_to_tz(sql, params, tzname) @@ -100,10 +123,7 @@ class DatabaseOperations(BaseDatabaseOperations): sql, params = self._convert_sql_to_tz(sql, params, tzname) if lookup_type == "second": # Truncate fractional seconds. - return ( - f"EXTRACT(%s FROM DATE_TRUNC(%s, {sql}))", - ("second", "second", *params), - ) + return f"EXTRACT(SECOND FROM DATE_TRUNC(%s, {sql}))", ("second", *params) return self.date_extract_sql(lookup_type, sql, params) def datetime_trunc_sql(self, lookup_type, sql, params, tzname): @@ -114,10 +134,7 @@ class DatabaseOperations(BaseDatabaseOperations): def time_extract_sql(self, lookup_type, sql, params): if lookup_type == "second": # Truncate fractional seconds. - return ( - f"EXTRACT(%s FROM DATE_TRUNC(%s, {sql}))", - ("second", "second", *params), - ) + return f"EXTRACT(SECOND FROM DATE_TRUNC(%s, {sql}))", ("second", *params) return self.date_extract_sql(lookup_type, sql, params) def time_trunc_sql(self, lookup_type, sql, params, tzname=None): @@ -137,6 +154,16 @@ class DatabaseOperations(BaseDatabaseOperations): def lookup_cast(self, lookup_type, internal_type=None): lookup = "%s" + if lookup_type == "isnull" and internal_type in ( + "CharField", + "EmailField", + "TextField", + "CICharField", + "CIEmailField", + "CITextField", + ): + return "%s::text" + # Cast text lookups to text to allow things like filter(x__contains=4) if lookup_type in ( "iexact", @@ -178,7 +205,7 @@ class DatabaseOperations(BaseDatabaseOperations): return mogrify(sql, params, self.connection) def set_time_zone_sql(self): - return "SET TIME ZONE %s" + return "SELECT set_config('TimeZone', %s, false)" def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False): if not tables: @@ -278,12 +305,22 @@ class DatabaseOperations(BaseDatabaseOperations): else: return ["DISTINCT"], [] - def last_executed_query(self, cursor, sql, params): - # https://www.psycopg.org/docs/cursor.html#cursor.query - # The query attribute is a Psycopg extension to the DB API 2.0. - if cursor.query is not None: - return cursor.query.decode() - return None + if is_psycopg3: + + def last_executed_query(self, cursor, sql, params): + try: + return self.compose_sql(sql, params) + except errors.DataError: + return None + + else: + + def last_executed_query(self, cursor, sql, params): + # https://www.psycopg.org/docs/cursor.html#cursor.query + # The query attribute is a Psycopg extension to the DB API 2.0. + if cursor.query is not None: + return cursor.query.decode() + return None def return_insert_columns(self, fields): if not fields: @@ -303,6 +340,13 @@ class DatabaseOperations(BaseDatabaseOperations): values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql) return "VALUES " + values_sql + if is_psycopg3: + + def adapt_integerfield_value(self, value, internal_type): + if value is None or hasattr(value, "resolve_expression"): + return value + return self.integerfield_type_map[internal_type](value) + def adapt_datefield_value(self, value): return value diff --git a/django/db/backends/postgresql/psycopg_any.py b/django/db/backends/postgresql/psycopg_any.py index e9bb84f313..579104dead 100644 --- a/django/db/backends/postgresql/psycopg_any.py +++ b/django/db/backends/postgresql/psycopg_any.py @@ -1,31 +1,102 @@ -from enum import IntEnum +import ipaddress +from functools import lru_cache -from psycopg2 import errors, extensions, sql # NOQA -from psycopg2.extras import DateRange, DateTimeRange, DateTimeTZRange, Inet # NOQA -from psycopg2.extras import Json as Jsonb # NOQA -from psycopg2.extras import NumericRange, Range # NOQA +try: + from psycopg import ClientCursor, IsolationLevel, adapt, adapters, errors, sql + from psycopg.postgres import types + from psycopg.types.datetime import TimestamptzLoader + from psycopg.types.json import Jsonb + from psycopg.types.range import Range, RangeDumper + from psycopg.types.string import TextLoader -RANGE_TYPES = (DateRange, DateTimeRange, DateTimeTZRange, NumericRange) + Inet = ipaddress.ip_address + DateRange = DateTimeRange = DateTimeTZRange = NumericRange = Range + RANGE_TYPES = (Range,) -class IsolationLevel(IntEnum): - READ_UNCOMMITTED = extensions.ISOLATION_LEVEL_READ_UNCOMMITTED - READ_COMMITTED = extensions.ISOLATION_LEVEL_READ_COMMITTED - REPEATABLE_READ = extensions.ISOLATION_LEVEL_REPEATABLE_READ - SERIALIZABLE = extensions.ISOLATION_LEVEL_SERIALIZABLE + TSRANGE_OID = types["tsrange"].oid + TSTZRANGE_OID = types["tstzrange"].oid + def mogrify(sql, params, connection): + return ClientCursor(connection.connection).mogrify(sql, params) -def _quote(value, connection=None): - adapted = extensions.adapt(value) - if hasattr(adapted, "encoding"): - adapted.encoding = "utf8" - # getquoted() returns a quoted bytestring of the adapted value. - return adapted.getquoted().decode() + # Adapters. + class BaseTzLoader(TimestamptzLoader): + """ + Load a PostgreSQL timestamptz using the a specific timezone. + The timezone can be None too, in which case it will be chopped. + """ + timezone = None -sql.quote = _quote + def load(self, data): + res = super().load(data) + return res.replace(tzinfo=self.timezone) + def register_tzloader(tz, context): + class SpecificTzLoader(BaseTzLoader): + timezone = tz -def mogrify(sql, params, connection): - with connection.cursor() as cursor: - return cursor.mogrify(sql, params).decode() + context.adapters.register_loader("timestamptz", SpecificTzLoader) + + class DjangoRangeDumper(RangeDumper): + """A Range dumper customized for Django.""" + + def upgrade(self, obj, format): + # Dump ranges containing naive datetimes as tstzrange, because + # Django doesn't use tz-aware ones. + dumper = super().upgrade(obj, format) + if dumper is not self and dumper.oid == TSRANGE_OID: + dumper.oid = TSTZRANGE_OID + return dumper + + @lru_cache + def get_adapters_template(use_tz, timezone): + # Create at adapters map extending the base one. + ctx = adapt.AdaptersMap(adapters) + # Register a no-op dumper to avoid a round trip from psycopg version 3 + # decode to json.dumps() to json.loads(), when using a custom decoder + # in JSONField. + ctx.register_loader("jsonb", TextLoader) + # Don't convert automatically from PostgreSQL network types to Python + # ipaddress. + ctx.register_loader("inet", TextLoader) + ctx.register_loader("cidr", TextLoader) + ctx.register_dumper(Range, DjangoRangeDumper) + # Register a timestamptz loader configured on self.timezone. + # This, however, can be overridden by create_cursor. + register_tzloader(timezone, ctx) + return ctx + + is_psycopg3 = True + +except ImportError: + from enum import IntEnum + + from psycopg2 import errors, extensions, sql # NOQA + from psycopg2.extras import DateRange, DateTimeRange, DateTimeTZRange, Inet # NOQA + from psycopg2.extras import Json as Jsonb # NOQA + from psycopg2.extras import NumericRange, Range # NOQA + + RANGE_TYPES = (DateRange, DateTimeRange, DateTimeTZRange, NumericRange) + + class IsolationLevel(IntEnum): + READ_UNCOMMITTED = extensions.ISOLATION_LEVEL_READ_UNCOMMITTED + READ_COMMITTED = extensions.ISOLATION_LEVEL_READ_COMMITTED + REPEATABLE_READ = extensions.ISOLATION_LEVEL_REPEATABLE_READ + SERIALIZABLE = extensions.ISOLATION_LEVEL_SERIALIZABLE + + def _quote(value, connection=None): + adapted = extensions.adapt(value) + if hasattr(adapted, "encoding"): + adapted.encoding = "utf8" + # getquoted() returns a quoted bytestring of the adapted value. + return adapted.getquoted().decode() + + sql.quote = _quote + + def mogrify(sql, params, connection): + with connection.cursor() as cursor: + return cursor.mogrify(sql, params).decode() + + is_psycopg3 = False diff --git a/django/db/backends/postgresql/schema.py b/django/db/backends/postgresql/schema.py index cc0da85817..1bd72bc0cb 100644 --- a/django/db/backends/postgresql/schema.py +++ b/django/db/backends/postgresql/schema.py @@ -40,6 +40,14 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): ) sql_delete_procedure = "DROP FUNCTION %(procedure)s(%(param_types)s)" + def execute(self, sql, params=()): + # Merge the query client-side, as PostgreSQL won't do it server-side. + if params is None: + return super().execute(sql, params) + sql = self.connection.ops.compose_sql(str(sql), params) + # Don't let the superclass touch anything. + return super().execute(sql, None) + sql_add_identity = ( "ALTER TABLE %(table)s ALTER COLUMN %(column)s ADD " "GENERATED BY DEFAULT AS IDENTITY" diff --git a/django/db/models/fields/__init__.py b/django/db/models/fields/__init__.py index cd995de80f..991db8d707 100644 --- a/django/db/models/fields/__init__.py +++ b/django/db/models/fields/__init__.py @@ -2019,6 +2019,10 @@ class IntegerField(Field): "Field '%s' expected a number but got %r." % (self.name, value), ) from e + def get_db_prep_value(self, value, connection, prepared=False): + value = super().get_db_prep_value(value, connection, prepared) + return connection.ops.adapt_integerfield_value(value, self.get_internal_type()) + def get_internal_type(self): return "IntegerField" diff --git a/django/db/models/functions/comparison.py b/django/db/models/functions/comparison.py index eb1f20a77c..de7eef4cdc 100644 --- a/django/db/models/functions/comparison.py +++ b/django/db/models/functions/comparison.py @@ -1,6 +1,7 @@ """Database functions that do comparisons or type conversions.""" from django.db import NotSupportedError from django.db.models.expressions import Func, Value +from django.db.models.fields import TextField from django.db.models.fields.json import JSONField from django.utils.regex_helper import _lazy_re_compile @@ -158,7 +159,14 @@ class JSONObject(Func): return super().as_sql(compiler, connection, **extra_context) def as_postgresql(self, compiler, connection, **extra_context): - return self.as_sql( + copy = self.copy() + copy.set_source_expressions( + [ + Cast(expression, TextField()) if index % 2 == 0 else expression + for index, expression in enumerate(copy.get_source_expressions()) + ] + ) + return super(JSONObject, copy).as_sql( compiler, connection, function="JSONB_BUILD_OBJECT", diff --git a/django/db/models/functions/text.py b/django/db/models/functions/text.py index a54ce8f19b..34a1e81982 100644 --- a/django/db/models/functions/text.py +++ b/django/db/models/functions/text.py @@ -1,7 +1,7 @@ from django.db import NotSupportedError from django.db.models.expressions import Func, Value -from django.db.models.fields import CharField, IntegerField -from django.db.models.functions import Coalesce +from django.db.models.fields import CharField, IntegerField, TextField +from django.db.models.functions import Cast, Coalesce from django.db.models.lookups import Transform @@ -82,6 +82,20 @@ class ConcatPair(Func): **extra_context, ) + def as_postgresql(self, compiler, connection, **extra_context): + copy = self.copy() + copy.set_source_expressions( + [ + Cast(expression, TextField()) + for expression in copy.get_source_expressions() + ] + ) + return super(ConcatPair, copy).as_sql( + compiler, + connection, + **extra_context, + ) + def as_mysql(self, compiler, connection, **extra_context): # Use CONCAT_WS with an empty separator so that NULLs are ignored. return super().as_sql( diff --git a/django/db/models/lookups.py b/django/db/models/lookups.py index dded81da82..9e2d9373e6 100644 --- a/django/db/models/lookups.py +++ b/django/db/models/lookups.py @@ -568,7 +568,7 @@ class IsNull(BuiltinLookup): raise ValueError( "The QuerySet value for an isnull lookup must be True or False." ) - sql, params = compiler.compile(self.lhs) + sql, params = self.process_lhs(compiler, connection) if self.rhs: return "%s IS NULL" % sql, params else: diff --git a/docs/conf.py b/docs/conf.py index 3e37509337..8bb6530b75 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -174,7 +174,7 @@ pygments_style = "trac" intersphinx_mapping = { "python": ("https://docs.python.org/3/", None), "sphinx": ("https://www.sphinx-doc.org/en/master/", None), - "psycopg2": ("https://www.psycopg.org/docs/", None), + "psycopg": ("https://www.psycopg.org/psycopg3/docs/", None), } # Python's docs don't change every week. diff --git a/docs/ref/contrib/gis/install/index.txt b/docs/ref/contrib/gis/install/index.txt index 51b3235279..026c64ccd1 100644 --- a/docs/ref/contrib/gis/install/index.txt +++ b/docs/ref/contrib/gis/install/index.txt @@ -429,14 +429,14 @@ Install Django and set up database recommended that you create a :doc:`virtual environment ` for each project you create. -psycopg2 -~~~~~~~~ +psycopg +~~~~~~~ -The ``psycopg2`` Python module provides the interface between Python and the -PostgreSQL database. ``psycopg2`` can be installed via pip within your Python +The ``psycopg`` Python module provides the interface between Python and the +PostgreSQL database. ``psycopg`` can be installed via pip within your Python virtual environment:: - ...\> py -m pip install psycopg2 + ...\> py -m pip install psycopg .. rubric:: Footnotes .. [#] GeoDjango uses the :func:`~ctypes.util.find_library` routine from diff --git a/docs/ref/contrib/gis/install/postgis.txt b/docs/ref/contrib/gis/install/postgis.txt index 0c7d20ae84..e469ede4d0 100644 --- a/docs/ref/contrib/gis/install/postgis.txt +++ b/docs/ref/contrib/gis/install/postgis.txt @@ -7,20 +7,25 @@ into a spatial database. :ref:`geosbuild`, :ref:`proj4` and :ref:`gdalbuild` should be installed prior to building PostGIS. You might also need additional libraries, see `PostGIS requirements`_. -The `psycopg2`_ module is required for use as the database adapter when using -GeoDjango with PostGIS. +The `psycopg`_ or `psycopg2`_ module is required for use as the database +adapter when using GeoDjango with PostGIS. On Debian/Ubuntu, you are advised to install the following packages: ``postgresql-x``, ``postgresql-x-postgis-3``, ``postgresql-server-dev-x``, -and ``python3-psycopg2`` (x matching the PostgreSQL version you want to +and ``python3-psycopg3`` (x matching the PostgreSQL version you want to install). Alternately, you can `build from source`_. Consult the platform-specific instructions if you are on :ref:`macos` or :ref:`windows`. .. _PostGIS: https://postgis.net/ +.. _psycopg: https://www.psycopg.org/psycopg3/ .. _psycopg2: https://www.psycopg.org/ .. _PostGIS requirements: https://postgis.net/docs/postgis_installation.html#install_requirements .. _build from source: https://postgis.net/docs/postgis_installation.html#install_short_version +.. versionchanged:: 4.2 + + Support for ``psycopg`` 3.1+ was added. + Post-installation ================= diff --git a/docs/ref/contrib/postgres/fields.txt b/docs/ref/contrib/postgres/fields.txt index 2e6ce4253e..34ad06a09a 100644 --- a/docs/ref/contrib/postgres/fields.txt +++ b/docs/ref/contrib/postgres/fields.txt @@ -538,8 +538,8 @@ PostgreSQL. These fields are used to store a range of values; for example the start and end timestamps of an event, or the range of ages an activity is suitable for. -All of the range fields translate to :ref:`psycopg2 Range objects -` in Python, but also accept tuples as input if no bounds +All of the range fields translate to :ref:`psycopg Range objects +` in Python, but also accept tuples as input if no bounds information is necessary. The default is lower bound included, upper bound excluded, that is ``[)`` (see the PostgreSQL documentation for details about `different bounds`_). The default bounds can be changed for non-discrete range @@ -553,8 +553,8 @@ the ``default_bounds`` argument. Stores a range of integers. Based on an :class:`~django.db.models.IntegerField`. Represented by an ``int4range`` in - the database and a :class:`~psycopg2:psycopg2.extras.NumericRange` in - Python. + the database and a + ``django.db.backends.postgresql.psycopg_any.NumericRange`` in Python. Regardless of the bounds specified when saving the data, PostgreSQL always returns a range in a canonical form that includes the lower bound and @@ -567,8 +567,8 @@ the ``default_bounds`` argument. Stores a range of large integers. Based on a :class:`~django.db.models.BigIntegerField`. Represented by an ``int8range`` - in the database and a :class:`~psycopg2:psycopg2.extras.NumericRange` in - Python. + in the database and a + ``django.db.backends.postgresql.psycopg_any.NumericRange`` in Python. Regardless of the bounds specified when saving the data, PostgreSQL always returns a range in a canonical form that includes the lower bound and @@ -581,8 +581,8 @@ the ``default_bounds`` argument. Stores a range of floating point values. Based on a :class:`~django.db.models.DecimalField`. Represented by a ``numrange`` in - the database and a :class:`~psycopg2:psycopg2.extras.NumericRange` in - Python. + the database and a + ``django.db.backends.postgresql.psycopg_any.NumericRange`` in Python. .. attribute:: DecimalRangeField.default_bounds @@ -592,7 +592,7 @@ the ``default_bounds`` argument. default is lower bound included, upper bound excluded, that is ``[)`` (see the PostgreSQL documentation for details about `different bounds`_). ``default_bounds`` is not used for - :class:`~psycopg2:psycopg2.extras.NumericRange` inputs. + ``django.db.backends.postgresql.psycopg_any.NumericRange`` inputs. ``DateTimeRangeField`` ---------------------- @@ -601,8 +601,8 @@ the ``default_bounds`` argument. Stores a range of timestamps. Based on a :class:`~django.db.models.DateTimeField`. Represented by a ``tstzrange`` in - the database and a :class:`~psycopg2:psycopg2.extras.DateTimeTZRange` in - Python. + the database and a + ``django.db.backends.postgresql.psycopg_any.DateTimeTZRange`` in Python. .. attribute:: DateTimeRangeField.default_bounds @@ -612,7 +612,7 @@ the ``default_bounds`` argument. default is lower bound included, upper bound excluded, that is ``[)`` (see the PostgreSQL documentation for details about `different bounds`_). ``default_bounds`` is not used for - :class:`~psycopg2:psycopg2.extras.DateTimeTZRange` inputs. + ``django.db.backends.postgresql.psycopg_any.DateTimeTZRange`` inputs. ``DateRangeField`` ------------------ @@ -621,7 +621,8 @@ the ``default_bounds`` argument. Stores a range of dates. Based on a :class:`~django.db.models.DateField`. Represented by a ``daterange`` in the - database and a :class:`~psycopg2:psycopg2.extras.DateRange` in Python. + database and a ``django.db.backends.postgresql.psycopg_any.DateRange`` in + Python. Regardless of the bounds specified when saving the data, PostgreSQL always returns a range in a canonical form that includes the lower bound and @@ -655,7 +656,7 @@ We will also use the following example objects:: and ``NumericRange``: - >>> from psycopg2.extras import NumericRange + >>> from django.db.backends.postgresql.psycopg_any import NumericRange Containment functions ~~~~~~~~~~~~~~~~~~~~~ @@ -690,7 +691,7 @@ The ``contained_by`` lookup is also available on the non-range field types: :class:`~django.db.models.DateField`, and :class:`~django.db.models.DateTimeField`. For example:: - >>> from psycopg2.extras import DateTimeTZRange + >>> from django.db.backends.postgresql.psycopg_any import DateTimeTZRange >>> Event.objects.filter( ... start__contained_by=DateTimeTZRange( ... timezone.now() - datetime.timedelta(hours=1), @@ -864,9 +865,9 @@ Defining your own range types ----------------------------- PostgreSQL allows the definition of custom range types. Django's model and form -field implementations use base classes below, and psycopg2 provides a -:func:`~psycopg2:psycopg2.extras.register_range` to allow use of custom range -types. +field implementations use base classes below, and ``psycopg`` provides a +:func:`~psycopg:psycopg.types.range.register_range` to allow use of custom +range types. .. class:: RangeField(**options) @@ -878,7 +879,7 @@ types. .. attribute:: range_type - The psycopg2 range type to use. + The range type to use. .. attribute:: form_field @@ -895,7 +896,7 @@ types. .. attribute:: range_type - The psycopg2 range type to use. + The range type to use. Range operators --------------- diff --git a/docs/ref/contrib/postgres/forms.txt b/docs/ref/contrib/postgres/forms.txt index e5d597655f..8f9dd449d1 100644 --- a/docs/ref/contrib/postgres/forms.txt +++ b/docs/ref/contrib/postgres/forms.txt @@ -173,7 +173,7 @@ not greater than the upper bound. All of these fields use .. class:: IntegerRangeField Based on :class:`~django.forms.IntegerField` and translates its input into - :class:`~psycopg2:psycopg2.extras.NumericRange`. Default for + ``django.db.backends.postgresql.psycopg_any.NumericRange``. Default for :class:`~django.contrib.postgres.fields.IntegerRangeField` and :class:`~django.contrib.postgres.fields.BigIntegerRangeField`. @@ -183,7 +183,7 @@ not greater than the upper bound. All of these fields use .. class:: DecimalRangeField Based on :class:`~django.forms.DecimalField` and translates its input into - :class:`~psycopg2:psycopg2.extras.NumericRange`. Default for + ``django.db.backends.postgresql.psycopg_any.NumericRange``. Default for :class:`~django.contrib.postgres.fields.DecimalRangeField`. ``DateTimeRangeField`` @@ -192,7 +192,7 @@ not greater than the upper bound. All of these fields use .. class:: DateTimeRangeField Based on :class:`~django.forms.DateTimeField` and translates its input into - :class:`~psycopg2:psycopg2.extras.DateTimeTZRange`. Default for + ``django.db.backends.postgresql.psycopg_any.DateTimeTZRange``. Default for :class:`~django.contrib.postgres.fields.DateTimeRangeField`. ``DateRangeField`` @@ -201,7 +201,7 @@ not greater than the upper bound. All of these fields use .. class:: DateRangeField Based on :class:`~django.forms.DateField` and translates its input into - :class:`~psycopg2:psycopg2.extras.DateRange`. Default for + ``django.db.backends.postgresql.psycopg_any.DateRange``. Default for :class:`~django.contrib.postgres.fields.DateRangeField`. Widgets diff --git a/docs/ref/databases.txt b/docs/ref/databases.txt index 79b0386e1a..d62adbe832 100644 --- a/docs/ref/databases.txt +++ b/docs/ref/databases.txt @@ -114,11 +114,21 @@ below for information on how to set up your database correctly. PostgreSQL notes ================ -Django supports PostgreSQL 12 and higher. `psycopg2`_ 2.8.4 or higher is -required, though the latest release is recommended. +Django supports PostgreSQL 12 and higher. `psycopg`_ 3.1+ or `psycopg2`_ 2.8.4+ +is required, though the latest `psycopg`_ 3.1+ is recommended. +.. _psycopg: https://www.psycopg.org/psycopg3/ .. _psycopg2: https://www.psycopg.org/ +.. note:: + + Support for ``psycopg2`` is likely to be deprecated and removed at some + point in the future. + +.. versionchanged:: 4.2 + + Support for ``psycopg`` 3.1+ was added. + .. _postgresql-connection-settings: PostgreSQL connection settings @@ -199,12 +209,12 @@ level`_. If you need a higher isolation level such as ``REPEATABLE READ`` or ``SERIALIZABLE``, set it in the :setting:`OPTIONS` part of your database configuration in :setting:`DATABASES`:: - import psycopg2.extensions + from django.db.backends.postgresql.psycopg_any import IsolationLevel DATABASES = { # ... 'OPTIONS': { - 'isolation_level': psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE, + 'isolation_level': IsolationLevel.SERIALIZABLE, }, } @@ -216,6 +226,10 @@ configuration in :setting:`DATABASES`:: .. _isolation level: https://www.postgresql.org/docs/current/transaction-iso.html +.. versionchanged:: 4.2 + + ``IsolationLevel`` was added. + Indexes for ``varchar`` and ``text`` columns -------------------------------------------- @@ -244,7 +258,7 @@ Server-side cursors When using :meth:`QuerySet.iterator() `, Django opens a :ref:`server-side -cursor `. By default, PostgreSQL assumes that +cursor `. By default, PostgreSQL assumes that only the first 10% of the results of cursor queries will be fetched. The query planner spends less time planning the query and starts returning results faster, but this could diminish performance if more than 10% of the results are diff --git a/docs/releases/1.11.txt b/docs/releases/1.11.txt index 5da81cd739..50b78305d4 100644 --- a/docs/releases/1.11.txt +++ b/docs/releases/1.11.txt @@ -256,10 +256,11 @@ Database backends * Added the :setting:`TEST['TEMPLATE'] ` setting to let PostgreSQL users specify a template for creating the test database. -* :meth:`.QuerySet.iterator()` now uses :ref:`server-side cursors - ` on PostgreSQL. This feature transfers some of - the worker memory load (used to hold query results) to the database and might - increase database memory usage. +* :meth:`.QuerySet.iterator()` now uses `server-side cursors`_ on PostgreSQL. + This feature transfers some of the worker memory load (used to hold query + results) to the database and might increase database memory usage. + + .. _server-side cursors: https://www.psycopg.org/docs/usage.html#server-side-cursors * Added MySQL support for the ``'isolation_level'`` option in :setting:`OPTIONS` to allow specifying the :ref:`transaction isolation level diff --git a/docs/releases/4.2.txt b/docs/releases/4.2.txt index 3abe03ef85..077762275a 100644 --- a/docs/releases/4.2.txt +++ b/docs/releases/4.2.txt @@ -26,6 +26,20 @@ and only officially support the latest release of each series. What's new in Django 4.2 ======================== +Psycopg 3 support +----------------- + +Django now supports `psycopg`_ version 3.1 or higher. To update your code, +install the `psycopg library`_, you don't need to change the +:setting:`ENGINE ` as ``django.db.backends.postgresql`` +supports both libraries. + +Support for ``psycopg2`` is likely to be deprecated and removed at some point +in the future. + +.. _psycopg: https://www.psycopg.org/psycopg3/ +.. _psycopg library: https://pypi.org/project/psycopg/ + Minor features -------------- diff --git a/docs/topics/db/transactions.txt b/docs/topics/db/transactions.txt index d0b67b86f4..b41c9fa758 100644 --- a/docs/topics/db/transactions.txt +++ b/docs/topics/db/transactions.txt @@ -397,8 +397,8 @@ tasks, etc.), this should be fine. If it's not (if your follow-up action is so critical that its failure should mean the failure of the transaction itself), then you don't want to use the :func:`on_commit` hook. Instead, you may want `two-phase commit`_ such as the :ref:`psycopg Two-Phase Commit protocol support -` and the :pep:`optional Two-Phase Commit Extensions in the -Python DB-API specification <249#optional-two-phase-commit-extensions>`. +` and the :pep:`optional Two-Phase Commit Extensions +in the Python DB-API specification <249#optional-two-phase-commit-extensions>`. Callbacks are not run until autocommit is restored on the connection following the commit (because otherwise any queries done in a callback would open an diff --git a/docs/topics/install.txt b/docs/topics/install.txt index 1590da8906..bbc74bd4e3 100644 --- a/docs/topics/install.txt +++ b/docs/topics/install.txt @@ -79,8 +79,9 @@ databases with Django. In addition to a database backend, you'll need to make sure your Python database bindings are installed. -* If you're using PostgreSQL, you'll need the `psycopg2`_ package. Refer to the - :ref:`PostgreSQL notes ` for further details. +* If you're using PostgreSQL, you'll need the `psycopg`_ or `psycopg2`_ + package. Refer to the :ref:`PostgreSQL notes ` for further + details. * If you're using MySQL or MariaDB, you'll need a :ref:`DB API driver ` like ``mysqlclient``. See :ref:`notes for the MySQL @@ -111,6 +112,7 @@ database queries, Django will need permission to create a test database. .. _PostgreSQL: https://www.postgresql.org/ .. _MariaDB: https://mariadb.org/ .. _MySQL: https://www.mysql.com/ +.. _psycopg: https://www.psycopg.org/psycopg3/ .. _psycopg2: https://www.psycopg.org/ .. _SQLite: https://www.sqlite.org/ .. _cx_Oracle: https://oracle.github.io/python-cx_Oracle/ diff --git a/tests/backends/postgresql/tests.py b/tests/backends/postgresql/tests.py index 41d445e6c7..7e1a2d000d 100644 --- a/tests/backends/postgresql/tests.py +++ b/tests/backends/postgresql/tests.py @@ -14,6 +14,11 @@ from django.db import ( from django.db.backends.base.base import BaseDatabaseWrapper from django.test import TestCase, override_settings +try: + from django.db.backends.postgresql.psycopg_any import is_psycopg3 +except ImportError: + is_psycopg3 = False + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL tests") class Tests(TestCase): @@ -228,7 +233,7 @@ class Tests(TestCase): # Since this is a django.test.TestCase, a transaction is in progress # and the isolation level isn't reported as 0. This test assumes that # PostgreSQL is configured with the default isolation level. - # Check the level on the psycopg2 connection, not the Django wrapper. + # Check the level on the psycopg connection, not the Django wrapper. self.assertIsNone(connection.connection.isolation_level) new_connection = connection.copy() @@ -238,7 +243,7 @@ class Tests(TestCase): try: # Start a transaction so the isolation level isn't reported as 0. new_connection.set_autocommit(False) - # Check the level on the psycopg2 connection, not the Django wrapper. + # Check the level on the psycopg connection, not the Django wrapper. self.assertEqual( new_connection.connection.isolation_level, IsolationLevel.SERIALIZABLE, @@ -252,7 +257,7 @@ class Tests(TestCase): new_connection.settings_dict["OPTIONS"]["isolation_level"] = -1 msg = ( "Invalid transaction isolation level -1 specified. Use one of the " - "IsolationLevel values." + "psycopg.IsolationLevel values." ) with self.assertRaisesMessage(ImproperlyConfigured, msg): new_connection.ensure_connection() @@ -268,7 +273,7 @@ class Tests(TestCase): def _select(self, val): with connection.cursor() as cursor: - cursor.execute("SELECT %s", (val,)) + cursor.execute("SELECT %s::text[]", (val,)) return cursor.fetchone()[0] def test_select_ascii_array(self): @@ -308,17 +313,18 @@ class Tests(TestCase): ) def test_correct_extraction_psycopg_version(self): - from django.db.backends.postgresql.base import Database, psycopg2_version + from django.db.backends.postgresql.base import Database, psycopg_version with mock.patch.object(Database, "__version__", "4.2.1 (dt dec pq3 ext lo64)"): - self.assertEqual(psycopg2_version(), (4, 2, 1)) + self.assertEqual(psycopg_version(), (4, 2, 1)) with mock.patch.object( Database, "__version__", "4.2b0.dev1 (dt dec pq3 ext lo64)" ): - self.assertEqual(psycopg2_version(), (4, 2)) + self.assertEqual(psycopg_version(), (4, 2)) @override_settings(DEBUG=True) - def test_copy_cursors(self): + @unittest.skipIf(is_psycopg3, "psycopg2 specific test") + def test_copy_to_expert_cursors(self): out = StringIO() copy_expert_sql = "COPY django_session TO STDOUT (FORMAT CSV, HEADER)" with connection.cursor() as cursor: @@ -329,6 +335,16 @@ class Tests(TestCase): [copy_expert_sql, "COPY django_session TO STDOUT"], ) + @override_settings(DEBUG=True) + @unittest.skipUnless(is_psycopg3, "psycopg3 specific test") + def test_copy_cursors(self): + copy_sql = "COPY django_session TO STDOUT (FORMAT CSV, HEADER)" + with connection.cursor() as cursor: + with cursor.copy(copy_sql) as copy: + for row in copy: + pass + self.assertEqual([q["sql"] for q in connection.queries], [copy_sql]) + def test_get_database_version(self): new_connection = connection.copy() new_connection.pg_version = 110009 diff --git a/tests/backends/tests.py b/tests/backends/tests.py index c3cfa61fdb..5f11f91958 100644 --- a/tests/backends/tests.py +++ b/tests/backends/tests.py @@ -454,7 +454,7 @@ class BackendTestCase(TransactionTestCase): with connection.cursor() as cursor: self.assertIsInstance(cursor, CursorWrapper) # Both InterfaceError and ProgrammingError seem to be used when - # accessing closed cursor (psycopg2 has InterfaceError, rest seem + # accessing closed cursor (psycopg has InterfaceError, rest seem # to use ProgrammingError). with self.assertRaises(connection.features.closed_cursor_error_class): # cursor should be closed, so no queries should be possible. @@ -462,12 +462,12 @@ class BackendTestCase(TransactionTestCase): @unittest.skipUnless( connection.vendor == "postgresql", - "Psycopg2 specific cursor.closed attribute needed", + "Psycopg specific cursor.closed attribute needed", ) def test_cursor_contextmanager_closing(self): # There isn't a generic way to test that cursors are closed, but - # psycopg2 offers us a way to check that by closed attribute. - # So, run only on psycopg2 for that reason. + # psycopg offers us a way to check that by closed attribute. + # So, run only on psycopg for that reason. with connection.cursor() as cursor: self.assertIsInstance(cursor, CursorWrapper) self.assertTrue(cursor.closed) diff --git a/tests/db_functions/datetime/test_extract_trunc.py b/tests/db_functions/datetime/test_extract_trunc.py index 2d38234981..b2327931f0 100644 --- a/tests/db_functions/datetime/test_extract_trunc.py +++ b/tests/db_functions/datetime/test_extract_trunc.py @@ -245,7 +245,7 @@ class DateFunctionTests(TestCase): self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) - with self.assertRaises((DataError, OperationalError, ValueError)): + with self.assertRaises((OperationalError, ValueError)): DTModel.objects.filter( start_datetime__year=Extract( "start_datetime", "day' FROM start_datetime)) OR 1=1;--" diff --git a/tests/db_utils/tests.py b/tests/db_utils/tests.py index 9c0ec905cc..a2d9cc7b5e 100644 --- a/tests/db_utils/tests.py +++ b/tests/db_utils/tests.py @@ -62,14 +62,20 @@ class ConnectionHandlerTests(SimpleTestCase): class DatabaseErrorWrapperTests(TestCase): @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL test") def test_reraising_backend_specific_database_exception(self): + from django.db.backends.postgresql.psycopg_any import is_psycopg3 + with connection.cursor() as cursor: msg = 'table "X" does not exist' with self.assertRaisesMessage(ProgrammingError, msg) as cm: cursor.execute('DROP TABLE "X"') self.assertNotEqual(type(cm.exception), type(cm.exception.__cause__)) self.assertIsNotNone(cm.exception.__cause__) - self.assertIsNotNone(cm.exception.__cause__.pgcode) - self.assertIsNotNone(cm.exception.__cause__.pgerror) + if is_psycopg3: + self.assertIsNotNone(cm.exception.__cause__.diag.sqlstate) + self.assertIsNotNone(cm.exception.__cause__.diag.message_primary) + else: + self.assertIsNotNone(cm.exception.__cause__.pgcode) + self.assertIsNotNone(cm.exception.__cause__.pgerror) class LoadBackendTests(SimpleTestCase): diff --git a/tests/fixtures/tests.py b/tests/fixtures/tests.py index 9eb2740c90..deac1c2d77 100644 --- a/tests/fixtures/tests.py +++ b/tests/fixtures/tests.py @@ -916,15 +916,11 @@ class FixtureLoadingTests(DumpDataAssertMixin, TestCase): with self.assertRaisesMessage(IntegrityError, msg): management.call_command("loaddata", "invalid.json", verbosity=0) - @unittest.skipUnless( - connection.vendor == "postgresql", "psycopg2 prohibits null characters in data." - ) + @skipUnlessDBFeature("prohibits_null_characters_in_text_exception") def test_loaddata_null_characters_on_postgresql(self): - msg = ( - "Could not load fixtures.Article(pk=2): " - "A string literal cannot contain NUL (0x00) characters." - ) - with self.assertRaisesMessage(ValueError, msg): + error, msg = connection.features.prohibits_null_characters_in_text_exception + msg = f"Could not load fixtures.Article(pk=2): {msg}" + with self.assertRaisesMessage(error, msg): management.call_command("loaddata", "null_character_in_field_value.json") def test_loaddata_app_option(self): diff --git a/tests/gis_tests/tests.py b/tests/gis_tests/tests.py index d1c93592a8..9da2b4df99 100644 --- a/tests/gis_tests/tests.py +++ b/tests/gis_tests/tests.py @@ -36,7 +36,7 @@ if HAS_POSTGRES: raise NotImplementedError("This function was not expected to be called") -@unittest.skipUnless(HAS_POSTGRES, "The psycopg2 driver is needed for these tests") +@unittest.skipUnless(HAS_POSTGRES, "The psycopg driver is needed for these tests") class TestPostGISVersionCheck(unittest.TestCase): """ The PostGIS version check parses correctly the version numbers diff --git a/tests/model_fields/test_jsonfield.py b/tests/model_fields/test_jsonfield.py index 05816817ef..60357d87b2 100644 --- a/tests/model_fields/test_jsonfield.py +++ b/tests/model_fields/test_jsonfield.py @@ -1007,7 +1007,7 @@ class TestQuerying(TestCase): False, ) self.assertIn( - """."value" -> 'test'' = ''"a"'') OR 1 = 1 OR (''d') = '"x"' """, + """."value" -> 'test'' = ''"a"'') OR 1 = 1 OR (''d') = '"x"'""", queries[0]["sql"], ) diff --git a/tests/postgres_tests/test_apps.py b/tests/postgres_tests/test_apps.py index 7c4cc38183..d9fb962251 100644 --- a/tests/postgres_tests/test_apps.py +++ b/tests/postgres_tests/test_apps.py @@ -19,6 +19,7 @@ try: DateTimeRange, DateTimeTZRange, NumericRange, + is_psycopg3, ) except ImportError: pass @@ -59,6 +60,7 @@ class PostgresConfigTests(TestCase): MigrationWriter.serialize(field) assertNotSerializable() + import_name = "psycopg.types.range" if is_psycopg3 else "psycopg2.extras" with self.modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}): for default, test_field in tests: with self.subTest(default=default): @@ -68,16 +70,12 @@ class PostgresConfigTests(TestCase): imports, { "import django.contrib.postgres.fields.ranges", - "import psycopg2.extras", + f"import {import_name}", }, ) self.assertIn( - "%s.%s(default=psycopg2.extras.%r)" - % ( - field.__module__, - field.__class__.__name__, - default, - ), + f"{field.__module__}.{field.__class__.__name__}" + f"(default={import_name}.{default!r})", serialized_field, ) assertNotSerializable() diff --git a/tests/postgres_tests/test_array.py b/tests/postgres_tests/test_array.py index a3c26fddae..86e9d00b41 100644 --- a/tests/postgres_tests/test_array.py +++ b/tests/postgres_tests/test_array.py @@ -317,7 +317,7 @@ class TestQuerying(PostgreSQLTestCase): def test_in_including_F_object(self): # This test asserts that Array objects passed to filters can be # constructed to contain F objects. This currently doesn't work as the - # psycopg2 mogrify method that generates the ARRAY() syntax is + # psycopg mogrify method that generates the ARRAY() syntax is # expecting literals, not column references (#27095). self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__in=[[models.F("id")]]), diff --git a/tests/requirements/postgres.txt b/tests/requirements/postgres.txt index f0288c8b74..726a08b3e4 100644 --- a/tests/requirements/postgres.txt +++ b/tests/requirements/postgres.txt @@ -1 +1 @@ -psycopg2>=2.8.4 +psycopg[binary]>=3.1 diff --git a/tests/schema/test_logging.py b/tests/schema/test_logging.py index 2821e5f406..9c7069c874 100644 --- a/tests/schema/test_logging.py +++ b/tests/schema/test_logging.py @@ -9,9 +9,9 @@ class SchemaLoggerTests(TestCase): params = [42, 1337] with self.assertLogs("django.db.backends.schema", "DEBUG") as cm: editor.execute(sql, params) + if connection.features.schema_editor_uses_clientside_param_binding: + sql = "SELECT * FROM foo WHERE id in (42, 1337)" + params = None self.assertEqual(cm.records[0].sql, sql) self.assertEqual(cm.records[0].params, params) - self.assertEqual( - cm.records[0].getMessage(), - "SELECT * FROM foo WHERE id in (%s, %s); (params [42, 1337])", - ) + self.assertEqual(cm.records[0].getMessage(), f"{sql}; (params {params})")