Merge branch 'main' into ticket_25782
9
.flake8
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
[flake8]
|
||||||
|
exclude = build,.git,.tox,./tests/.env
|
||||||
|
extend-ignore = E203
|
||||||
|
max-line-length = 88
|
||||||
|
per-file-ignores =
|
||||||
|
django/core/cache/backends/filebased.py:W601
|
||||||
|
django/core/cache/backends/base.py:W601
|
||||||
|
django/core/cache/backends/redis.py:W601
|
||||||
|
tests/cache/tests.py:W601
|
2
AUTHORS
@ -82,10 +82,12 @@ answer newbie questions, and generally made Django that much better:
|
|||||||
Andreas Mock <andreas.mock@web.de>
|
Andreas Mock <andreas.mock@web.de>
|
||||||
Andreas Pelme <andreas@pelme.se>
|
Andreas Pelme <andreas@pelme.se>
|
||||||
Andrés Torres Marroquín <andres.torres.marroquin@gmail.com>
|
Andrés Torres Marroquín <andres.torres.marroquin@gmail.com>
|
||||||
|
Andreu Vallbona Plazas <avallbona@gmail.com>
|
||||||
Andrew Brehaut <https://brehaut.net/blog>
|
Andrew Brehaut <https://brehaut.net/blog>
|
||||||
Andrew Clark <amclark7@gmail.com>
|
Andrew Clark <amclark7@gmail.com>
|
||||||
Andrew Durdin <adurdin@gmail.com>
|
Andrew Durdin <adurdin@gmail.com>
|
||||||
Andrew Godwin <andrew@aeracode.org>
|
Andrew Godwin <andrew@aeracode.org>
|
||||||
|
Andrew Miller <info+django@akmiller.co.uk>
|
||||||
Andrew Pinkham <http://AndrewsForge.com>
|
Andrew Pinkham <http://AndrewsForge.com>
|
||||||
Andrews Medina <andrewsmedina@gmail.com>
|
Andrews Medina <andrewsmedina@gmail.com>
|
||||||
Andrew Northall <andrew@northall.me.uk>
|
Andrew Northall <andrew@northall.me.uk>
|
||||||
|
@ -13,5 +13,4 @@ graft extras
|
|||||||
graft js_tests
|
graft js_tests
|
||||||
graft scripts
|
graft scripts
|
||||||
graft tests
|
graft tests
|
||||||
global-exclude __pycache__
|
|
||||||
global-exclude *.py[co]
|
global-exclude *.py[co]
|
||||||
|
@ -1814,6 +1814,9 @@ class ModelAdmin(BaseModelAdmin):
|
|||||||
|
|
||||||
@csrf_protect_m
|
@csrf_protect_m
|
||||||
def changeform_view(self, request, object_id=None, form_url="", extra_context=None):
|
def changeform_view(self, request, object_id=None, form_url="", extra_context=None):
|
||||||
|
if request.method in ("GET", "HEAD", "OPTIONS", "TRACE"):
|
||||||
|
return self._changeform_view(request, object_id, form_url, extra_context)
|
||||||
|
|
||||||
with transaction.atomic(using=router.db_for_write(self.model)):
|
with transaction.atomic(using=router.db_for_write(self.model)):
|
||||||
return self._changeform_view(request, object_id, form_url, extra_context)
|
return self._changeform_view(request, object_id, form_url, extra_context)
|
||||||
|
|
||||||
@ -2175,6 +2178,9 @@ class ModelAdmin(BaseModelAdmin):
|
|||||||
|
|
||||||
@csrf_protect_m
|
@csrf_protect_m
|
||||||
def delete_view(self, request, object_id, extra_context=None):
|
def delete_view(self, request, object_id, extra_context=None):
|
||||||
|
if request.method in ("GET", "HEAD", "OPTIONS", "TRACE"):
|
||||||
|
return self._delete_view(request, object_id, extra_context)
|
||||||
|
|
||||||
with transaction.atomic(using=router.db_for_write(self.model)):
|
with transaction.atomic(using=router.db_for_write(self.model)):
|
||||||
return self._delete_view(request, object_id, extra_context)
|
return self._delete_view(request, object_id, extra_context)
|
||||||
|
|
||||||
|
@ -390,6 +390,10 @@ body.popup .submit-row {
|
|||||||
border-right-color: var(--darkened-bg);
|
border-right-color: var(--darkened-bg);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.inline-related h3 {
|
||||||
|
color: var(--body-loud-color);
|
||||||
|
}
|
||||||
|
|
||||||
.inline-related h3 span.delete {
|
.inline-related h3 span.delete {
|
||||||
float: right;
|
float: right;
|
||||||
}
|
}
|
||||||
|
@ -21,7 +21,7 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
.login #content {
|
.login #content {
|
||||||
padding: 20px 20px 0;
|
padding: 20px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.login #container {
|
.login #container {
|
||||||
|
@ -117,6 +117,9 @@ class UserAdmin(admin.ModelAdmin):
|
|||||||
@sensitive_post_parameters_m
|
@sensitive_post_parameters_m
|
||||||
@csrf_protect_m
|
@csrf_protect_m
|
||||||
def add_view(self, request, form_url="", extra_context=None):
|
def add_view(self, request, form_url="", extra_context=None):
|
||||||
|
if request.method in ("GET", "HEAD", "OPTIONS", "TRACE"):
|
||||||
|
return self._add_view(request, form_url, extra_context)
|
||||||
|
|
||||||
with transaction.atomic(using=router.db_for_write(self.model)):
|
with transaction.atomic(using=router.db_for_write(self.model)):
|
||||||
return self._add_view(request, form_url, extra_context)
|
return self._add_view(request, form_url, extra_context)
|
||||||
|
|
||||||
|
@ -39,14 +39,20 @@ def verify_password(password, encoded, preferred="default"):
|
|||||||
three part encoded digest, and the second whether to regenerate the
|
three part encoded digest, and the second whether to regenerate the
|
||||||
password.
|
password.
|
||||||
"""
|
"""
|
||||||
if password is None or not is_password_usable(encoded):
|
fake_runtime = password is None or not is_password_usable(encoded)
|
||||||
return False, False
|
|
||||||
|
|
||||||
preferred = get_hasher(preferred)
|
preferred = get_hasher(preferred)
|
||||||
try:
|
try:
|
||||||
hasher = identify_hasher(encoded)
|
hasher = identify_hasher(encoded)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
# encoded is gibberish or uses a hasher that's no longer installed.
|
# encoded is gibberish or uses a hasher that's no longer installed.
|
||||||
|
fake_runtime = True
|
||||||
|
|
||||||
|
if fake_runtime:
|
||||||
|
# Run the default password hasher once to reduce the timing difference
|
||||||
|
# between an existing user with an unusable password and a nonexistent
|
||||||
|
# user or missing hasher (similar to #20760).
|
||||||
|
make_password(get_random_string(UNUSABLE_PASSWORD_SUFFIX_LENGTH))
|
||||||
return False, False
|
return False, False
|
||||||
|
|
||||||
hasher_changed = hasher.algorithm != preferred.algorithm
|
hasher_changed = hasher.algorithm != preferred.algorithm
|
||||||
|
@ -75,7 +75,7 @@ class ContentTypeManager(models.Manager):
|
|||||||
ct = self._get_from_cache(opts)
|
ct = self._get_from_cache(opts)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
needed_models[opts.app_label].add(opts.model_name)
|
needed_models[opts.app_label].add(opts.model_name)
|
||||||
needed_opts[opts].append(model)
|
needed_opts[(opts.app_label, opts.model_name)].append(model)
|
||||||
else:
|
else:
|
||||||
results[model] = ct
|
results[model] = ct
|
||||||
if needed_opts:
|
if needed_opts:
|
||||||
@ -89,18 +89,13 @@ class ContentTypeManager(models.Manager):
|
|||||||
)
|
)
|
||||||
cts = self.filter(condition)
|
cts = self.filter(condition)
|
||||||
for ct in cts:
|
for ct in cts:
|
||||||
opts_models = needed_opts.pop(
|
opts_models = needed_opts.pop((ct.app_label, ct.model), [])
|
||||||
ct._meta.apps.get_model(ct.app_label, ct.model)._meta, []
|
|
||||||
)
|
|
||||||
for model in opts_models:
|
for model in opts_models:
|
||||||
results[model] = ct
|
results[model] = ct
|
||||||
self._add_to_cache(self.db, ct)
|
self._add_to_cache(self.db, ct)
|
||||||
# Create content types that weren't in the cache or DB.
|
# Create content types that weren't in the cache or DB.
|
||||||
for opts, opts_models in needed_opts.items():
|
for (app_label, model_name), opts_models in needed_opts.items():
|
||||||
ct = self.create(
|
ct = self.create(app_label=app_label, model=model_name)
|
||||||
app_label=opts.app_label,
|
|
||||||
model=opts.model_name,
|
|
||||||
)
|
|
||||||
self._add_to_cache(self.db, ct)
|
self._add_to_cache(self.db, ct)
|
||||||
for model in opts_models:
|
for model in opts_models:
|
||||||
results[model] = ct
|
results[model] = ct
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import functools
|
import functools
|
||||||
import os
|
import os
|
||||||
|
import warnings
|
||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@ -8,6 +9,7 @@ from django.core.checks import Error, Warning
|
|||||||
from django.core.exceptions import ImproperlyConfigured
|
from django.core.exceptions import ImproperlyConfigured
|
||||||
from django.core.files.storage import FileSystemStorage, Storage, default_storage
|
from django.core.files.storage import FileSystemStorage, Storage, default_storage
|
||||||
from django.utils._os import safe_join
|
from django.utils._os import safe_join
|
||||||
|
from django.utils.deprecation import RemovedInDjango61Warning
|
||||||
from django.utils.functional import LazyObject, empty
|
from django.utils.functional import LazyObject, empty
|
||||||
from django.utils.module_loading import import_string
|
from django.utils.module_loading import import_string
|
||||||
|
|
||||||
@ -15,6 +17,32 @@ from django.utils.module_loading import import_string
|
|||||||
searched_locations = []
|
searched_locations = []
|
||||||
|
|
||||||
|
|
||||||
|
# RemovedInDjango61Warning: When the deprecation ends, remove completely.
|
||||||
|
def _check_deprecated_find_param(class_name="", find_all=False, **kwargs):
|
||||||
|
method_name = "find" if not class_name else f"{class_name}.find"
|
||||||
|
if "all" in kwargs:
|
||||||
|
legacy_all = kwargs.pop("all")
|
||||||
|
msg = (
|
||||||
|
"Passing the `all` argument to find() is deprecated. Use `find_all` "
|
||||||
|
"instead."
|
||||||
|
)
|
||||||
|
warnings.warn(msg, RemovedInDjango61Warning, stacklevel=2)
|
||||||
|
|
||||||
|
# If both `find_all` and `all` were given, raise TypeError.
|
||||||
|
if find_all is not False:
|
||||||
|
raise TypeError(
|
||||||
|
f"{method_name}() got multiple values for argument 'find_all'"
|
||||||
|
)
|
||||||
|
|
||||||
|
find_all = legacy_all
|
||||||
|
|
||||||
|
if kwargs: # any remaining kwargs must be a TypeError
|
||||||
|
first = list(kwargs.keys()).pop()
|
||||||
|
raise TypeError(f"{method_name}() got an unexpected keyword argument '{first}'")
|
||||||
|
|
||||||
|
return find_all
|
||||||
|
|
||||||
|
|
||||||
class BaseFinder:
|
class BaseFinder:
|
||||||
"""
|
"""
|
||||||
A base file finder to be used for custom staticfiles finder classes.
|
A base file finder to be used for custom staticfiles finder classes.
|
||||||
@ -26,12 +54,20 @@ class BaseFinder:
|
|||||||
"configured correctly."
|
"configured correctly."
|
||||||
)
|
)
|
||||||
|
|
||||||
def find(self, path, all=False):
|
# RemovedInDjango61Warning: When the deprecation ends, remove completely.
|
||||||
|
def _check_deprecated_find_param(self, **kwargs):
|
||||||
|
return _check_deprecated_find_param(
|
||||||
|
class_name=self.__class__.__qualname__, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
# RemovedInDjango61Warning: When the deprecation ends, replace with:
|
||||||
|
# def find(self, path, find_all=False):
|
||||||
|
def find(self, path, find_all=False, **kwargs):
|
||||||
"""
|
"""
|
||||||
Given a relative file path, find an absolute file path.
|
Given a relative file path, find an absolute file path.
|
||||||
|
|
||||||
If the ``all`` parameter is False (default) return only the first found
|
If the ``find_all`` parameter is False (default) return only the first
|
||||||
file path; if True, return a list of all found files paths.
|
found file path; if True, return a list of all found files paths.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
"subclasses of BaseFinder must provide a find() method"
|
"subclasses of BaseFinder must provide a find() method"
|
||||||
@ -113,17 +149,22 @@ class FileSystemFinder(BaseFinder):
|
|||||||
)
|
)
|
||||||
return errors
|
return errors
|
||||||
|
|
||||||
def find(self, path, all=False):
|
# RemovedInDjango61Warning: When the deprecation ends, replace with:
|
||||||
|
# def find(self, path, find_all=False):
|
||||||
|
def find(self, path, find_all=False, **kwargs):
|
||||||
"""
|
"""
|
||||||
Look for files in the extra locations as defined in STATICFILES_DIRS.
|
Look for files in the extra locations as defined in STATICFILES_DIRS.
|
||||||
"""
|
"""
|
||||||
|
# RemovedInDjango61Warning.
|
||||||
|
if kwargs:
|
||||||
|
find_all = self._check_deprecated_find_param(find_all=find_all, **kwargs)
|
||||||
matches = []
|
matches = []
|
||||||
for prefix, root in self.locations:
|
for prefix, root in self.locations:
|
||||||
if root not in searched_locations:
|
if root not in searched_locations:
|
||||||
searched_locations.append(root)
|
searched_locations.append(root)
|
||||||
matched_path = self.find_location(root, path, prefix)
|
matched_path = self.find_location(root, path, prefix)
|
||||||
if matched_path:
|
if matched_path:
|
||||||
if not all:
|
if not find_all:
|
||||||
return matched_path
|
return matched_path
|
||||||
matches.append(matched_path)
|
matches.append(matched_path)
|
||||||
return matches
|
return matches
|
||||||
@ -191,10 +232,15 @@ class AppDirectoriesFinder(BaseFinder):
|
|||||||
for path in utils.get_files(storage, ignore_patterns):
|
for path in utils.get_files(storage, ignore_patterns):
|
||||||
yield path, storage
|
yield path, storage
|
||||||
|
|
||||||
def find(self, path, all=False):
|
# RemovedInDjango61Warning: When the deprecation ends, replace with:
|
||||||
|
# def find(self, path, find_all=False):
|
||||||
|
def find(self, path, find_all=False, **kwargs):
|
||||||
"""
|
"""
|
||||||
Look for files in the app directories.
|
Look for files in the app directories.
|
||||||
"""
|
"""
|
||||||
|
# RemovedInDjango61Warning.
|
||||||
|
if kwargs:
|
||||||
|
find_all = self._check_deprecated_find_param(find_all=find_all, **kwargs)
|
||||||
matches = []
|
matches = []
|
||||||
for app in self.apps:
|
for app in self.apps:
|
||||||
app_location = self.storages[app].location
|
app_location = self.storages[app].location
|
||||||
@ -202,7 +248,7 @@ class AppDirectoriesFinder(BaseFinder):
|
|||||||
searched_locations.append(app_location)
|
searched_locations.append(app_location)
|
||||||
match = self.find_in_app(app, path)
|
match = self.find_in_app(app, path)
|
||||||
if match:
|
if match:
|
||||||
if not all:
|
if not find_all:
|
||||||
return match
|
return match
|
||||||
matches.append(match)
|
matches.append(match)
|
||||||
return matches
|
return matches
|
||||||
@ -241,10 +287,15 @@ class BaseStorageFinder(BaseFinder):
|
|||||||
self.storage = self.storage()
|
self.storage = self.storage()
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
def find(self, path, all=False):
|
# RemovedInDjango61Warning: When the deprecation ends, replace with:
|
||||||
|
# def find(self, path, find_all=False):
|
||||||
|
def find(self, path, find_all=False, **kwargs):
|
||||||
"""
|
"""
|
||||||
Look for files in the default file storage, if it's local.
|
Look for files in the default file storage, if it's local.
|
||||||
"""
|
"""
|
||||||
|
# RemovedInDjango61Warning.
|
||||||
|
if kwargs:
|
||||||
|
find_all = self._check_deprecated_find_param(find_all=find_all, **kwargs)
|
||||||
try:
|
try:
|
||||||
self.storage.path("")
|
self.storage.path("")
|
||||||
except NotImplementedError:
|
except NotImplementedError:
|
||||||
@ -254,7 +305,7 @@ class BaseStorageFinder(BaseFinder):
|
|||||||
searched_locations.append(self.storage.location)
|
searched_locations.append(self.storage.location)
|
||||||
if self.storage.exists(path):
|
if self.storage.exists(path):
|
||||||
match = self.storage.path(path)
|
match = self.storage.path(path)
|
||||||
if all:
|
if find_all:
|
||||||
match = [match]
|
match = [match]
|
||||||
return match
|
return match
|
||||||
return []
|
return []
|
||||||
@ -285,18 +336,23 @@ class DefaultStorageFinder(BaseStorageFinder):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def find(path, all=False):
|
# RemovedInDjango61Warning: When the deprecation ends, replace with:
|
||||||
|
# def find(path, find_all=False):
|
||||||
|
def find(path, find_all=False, **kwargs):
|
||||||
"""
|
"""
|
||||||
Find a static file with the given path using all enabled finders.
|
Find a static file with the given path using all enabled finders.
|
||||||
|
|
||||||
If ``all`` is ``False`` (default), return the first matching
|
If ``find_all`` is ``False`` (default), return the first matching
|
||||||
absolute path (or ``None`` if no match). Otherwise return a list.
|
absolute path (or ``None`` if no match). Otherwise return a list.
|
||||||
"""
|
"""
|
||||||
|
# RemovedInDjango61Warning.
|
||||||
|
if kwargs:
|
||||||
|
find_all = _check_deprecated_find_param(find_all=find_all, **kwargs)
|
||||||
searched_locations[:] = []
|
searched_locations[:] = []
|
||||||
matches = []
|
matches = []
|
||||||
for finder in get_finders():
|
for finder in get_finders():
|
||||||
result = finder.find(path, all=all)
|
result = finder.find(path, find_all=find_all)
|
||||||
if not all and result:
|
if not find_all and result:
|
||||||
return result
|
return result
|
||||||
if not isinstance(result, (list, tuple)):
|
if not isinstance(result, (list, tuple)):
|
||||||
result = [result]
|
result = [result]
|
||||||
@ -304,7 +360,7 @@ def find(path, all=False):
|
|||||||
if matches:
|
if matches:
|
||||||
return matches
|
return matches
|
||||||
# No match.
|
# No match.
|
||||||
return [] if all else None
|
return [] if find_all else None
|
||||||
|
|
||||||
|
|
||||||
def get_finders():
|
def get_finders():
|
||||||
|
@ -19,7 +19,7 @@ class Command(LabelCommand):
|
|||||||
|
|
||||||
def handle_label(self, path, **options):
|
def handle_label(self, path, **options):
|
||||||
verbosity = options["verbosity"]
|
verbosity = options["verbosity"]
|
||||||
result = finders.find(path, all=options["all"])
|
result = finders.find(path, find_all=options["all"])
|
||||||
if verbosity >= 2:
|
if verbosity >= 2:
|
||||||
searched_locations = (
|
searched_locations = (
|
||||||
"\nLooking in the following locations:\n %s"
|
"\nLooking in the following locations:\n %s"
|
||||||
|
@ -34,7 +34,18 @@ class Storage:
|
|||||||
if not hasattr(content, "chunks"):
|
if not hasattr(content, "chunks"):
|
||||||
content = File(content, name)
|
content = File(content, name)
|
||||||
|
|
||||||
|
# Ensure that the name is valid, before and after having the storage
|
||||||
|
# system potentially modifying the name. This duplicates the check made
|
||||||
|
# inside `get_available_name` but it's necessary for those cases where
|
||||||
|
# `get_available_name` is overriden and validation is lost.
|
||||||
|
validate_file_name(name, allow_relative_path=True)
|
||||||
|
|
||||||
|
# Potentially find a different name depending on storage constraints.
|
||||||
name = self.get_available_name(name, max_length=max_length)
|
name = self.get_available_name(name, max_length=max_length)
|
||||||
|
# Validate the (potentially) new name.
|
||||||
|
validate_file_name(name, allow_relative_path=True)
|
||||||
|
|
||||||
|
# The save operation should return the actual name of the file saved.
|
||||||
name = self._save(name, content)
|
name = self._save(name, content)
|
||||||
# Ensure that the name returned from the storage system is still valid.
|
# Ensure that the name returned from the storage system is still valid.
|
||||||
validate_file_name(name, allow_relative_path=True)
|
validate_file_name(name, allow_relative_path=True)
|
||||||
|
@ -10,10 +10,9 @@ def validate_file_name(name, allow_relative_path=False):
|
|||||||
raise SuspiciousFileOperation("Could not derive file name from '%s'" % name)
|
raise SuspiciousFileOperation("Could not derive file name from '%s'" % name)
|
||||||
|
|
||||||
if allow_relative_path:
|
if allow_relative_path:
|
||||||
# Use PurePosixPath() because this branch is checked only in
|
# Ensure that name can be treated as a pure posix path, i.e. Unix
|
||||||
# FileField.generate_filename() where all file paths are expected to be
|
# style (with forward slashes).
|
||||||
# Unix style (with forward slashes).
|
path = pathlib.PurePosixPath(str(name).replace("\\", "/"))
|
||||||
path = pathlib.PurePosixPath(name)
|
|
||||||
if path.is_absolute() or ".." in path.parts:
|
if path.is_absolute() or ".." in path.parts:
|
||||||
raise SuspiciousFileOperation(
|
raise SuspiciousFileOperation(
|
||||||
"Detected path traversal attempt in '%s'" % name
|
"Detected path traversal attempt in '%s'" % name
|
||||||
|
@ -286,7 +286,8 @@ class EmailMessage:
|
|||||||
# Use cached DNS_NAME for performance
|
# Use cached DNS_NAME for performance
|
||||||
msg["Message-ID"] = make_msgid(domain=DNS_NAME)
|
msg["Message-ID"] = make_msgid(domain=DNS_NAME)
|
||||||
for name, value in self.extra_headers.items():
|
for name, value in self.extra_headers.items():
|
||||||
if name.lower() != "from": # From is already handled
|
# Avoid headers handled above.
|
||||||
|
if name.lower() not in {"from", "to", "cc", "reply-to"}:
|
||||||
msg[name] = value
|
msg[name] = value
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
@ -427,14 +428,13 @@ class EmailMessage:
|
|||||||
def _set_list_header_if_not_empty(self, msg, header, values):
|
def _set_list_header_if_not_empty(self, msg, header, values):
|
||||||
"""
|
"""
|
||||||
Set msg's header, either from self.extra_headers, if present, or from
|
Set msg's header, either from self.extra_headers, if present, or from
|
||||||
the values argument.
|
the values argument if not empty.
|
||||||
"""
|
"""
|
||||||
if values:
|
try:
|
||||||
try:
|
msg[header] = self.extra_headers[header]
|
||||||
value = self.extra_headers[header]
|
except KeyError:
|
||||||
except KeyError:
|
if values:
|
||||||
value = ", ".join(str(v) for v in values)
|
msg[header] = ", ".join(str(v) for v in values)
|
||||||
msg[header] = value
|
|
||||||
|
|
||||||
|
|
||||||
class EmailMultiAlternatives(EmailMessage):
|
class EmailMultiAlternatives(EmailMessage):
|
||||||
|
@ -20,6 +20,7 @@ __all__ = [
|
|||||||
"close_old_connections",
|
"close_old_connections",
|
||||||
"connection",
|
"connection",
|
||||||
"connections",
|
"connections",
|
||||||
|
"reset_queries",
|
||||||
"router",
|
"router",
|
||||||
"DatabaseError",
|
"DatabaseError",
|
||||||
"IntegrityError",
|
"IntegrityError",
|
||||||
|
@ -761,8 +761,11 @@ class ModelState:
|
|||||||
return self.name.lower()
|
return self.name.lower()
|
||||||
|
|
||||||
def get_field(self, field_name):
|
def get_field(self, field_name):
|
||||||
if field_name == "_order":
|
if (
|
||||||
field_name = self.options.get("order_with_respect_to", field_name)
|
field_name == "_order"
|
||||||
|
and self.options.get("order_with_respect_to") is not None
|
||||||
|
):
|
||||||
|
field_name = self.options["order_with_respect_to"]
|
||||||
return self.fields[field_name]
|
return self.fields[field_name]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -776,6 +776,43 @@ class Model(AltersData, metaclass=ModelBase):
|
|||||||
return getattr(self, field_name)
|
return getattr(self, field_name)
|
||||||
return getattr(self, field.attname)
|
return getattr(self, field.attname)
|
||||||
|
|
||||||
|
# RemovedInDjango60Warning: When the deprecation ends, remove completely.
|
||||||
|
def _parse_save_params(self, *args, method_name, **kwargs):
|
||||||
|
defaults = {
|
||||||
|
"force_insert": False,
|
||||||
|
"force_update": False,
|
||||||
|
"using": None,
|
||||||
|
"update_fields": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
warnings.warn(
|
||||||
|
f"Passing positional arguments to {method_name}() is deprecated",
|
||||||
|
RemovedInDjango60Warning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
total_len_args = len(args) + 1 # include self
|
||||||
|
max_len_args = len(defaults) + 1
|
||||||
|
if total_len_args > max_len_args:
|
||||||
|
# Recreate the proper TypeError message from Python.
|
||||||
|
raise TypeError(
|
||||||
|
f"Model.{method_name}() takes from 1 to {max_len_args} positional "
|
||||||
|
f"arguments but {total_len_args} were given"
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_param(param_name, param_value, arg_index):
|
||||||
|
if arg_index < len(args):
|
||||||
|
if param_value is not defaults[param_name]:
|
||||||
|
# Recreate the proper TypeError message from Python.
|
||||||
|
raise TypeError(
|
||||||
|
f"Model.{method_name}() got multiple values for argument "
|
||||||
|
f"'{param_name}'"
|
||||||
|
)
|
||||||
|
return args[arg_index]
|
||||||
|
|
||||||
|
return param_value
|
||||||
|
|
||||||
|
return [get_param(k, v, i) for i, (k, v) in enumerate(kwargs.items())]
|
||||||
|
|
||||||
# RemovedInDjango60Warning: When the deprecation ends, replace with:
|
# RemovedInDjango60Warning: When the deprecation ends, replace with:
|
||||||
# def save(
|
# def save(
|
||||||
# self, *, force_insert=False, force_update=False, using=None, update_fields=None,
|
# self, *, force_insert=False, force_update=False, using=None, update_fields=None,
|
||||||
@ -798,23 +835,14 @@ class Model(AltersData, metaclass=ModelBase):
|
|||||||
"""
|
"""
|
||||||
# RemovedInDjango60Warning.
|
# RemovedInDjango60Warning.
|
||||||
if args:
|
if args:
|
||||||
warnings.warn(
|
force_insert, force_update, using, update_fields = self._parse_save_params(
|
||||||
"Passing positional arguments to save() is deprecated",
|
*args,
|
||||||
RemovedInDjango60Warning,
|
method_name="save",
|
||||||
stacklevel=2,
|
force_insert=force_insert,
|
||||||
|
force_update=force_update,
|
||||||
|
using=using,
|
||||||
|
update_fields=update_fields,
|
||||||
)
|
)
|
||||||
for arg, attr in zip(
|
|
||||||
args, ["force_insert", "force_update", "using", "update_fields"]
|
|
||||||
):
|
|
||||||
if arg:
|
|
||||||
if attr == "force_insert":
|
|
||||||
force_insert = arg
|
|
||||||
elif attr == "force_update":
|
|
||||||
force_update = arg
|
|
||||||
elif attr == "using":
|
|
||||||
using = arg
|
|
||||||
else:
|
|
||||||
update_fields = arg
|
|
||||||
|
|
||||||
self._prepare_related_fields_for_save(operation_name="save")
|
self._prepare_related_fields_for_save(operation_name="save")
|
||||||
|
|
||||||
@ -883,24 +911,14 @@ class Model(AltersData, metaclass=ModelBase):
|
|||||||
):
|
):
|
||||||
# RemovedInDjango60Warning.
|
# RemovedInDjango60Warning.
|
||||||
if args:
|
if args:
|
||||||
warnings.warn(
|
force_insert, force_update, using, update_fields = self._parse_save_params(
|
||||||
"Passing positional arguments to asave() is deprecated",
|
*args,
|
||||||
RemovedInDjango60Warning,
|
method_name="asave",
|
||||||
stacklevel=2,
|
force_insert=force_insert,
|
||||||
|
force_update=force_update,
|
||||||
|
using=using,
|
||||||
|
update_fields=update_fields,
|
||||||
)
|
)
|
||||||
for arg, attr in zip(
|
|
||||||
args, ["force_insert", "force_update", "using", "update_fields"]
|
|
||||||
):
|
|
||||||
if arg:
|
|
||||||
if attr == "force_insert":
|
|
||||||
force_insert = arg
|
|
||||||
elif attr == "force_update":
|
|
||||||
force_update = arg
|
|
||||||
elif attr == "using":
|
|
||||||
using = arg
|
|
||||||
else:
|
|
||||||
update_fields = arg
|
|
||||||
|
|
||||||
return await sync_to_async(self.save)(
|
return await sync_to_async(self.save)(
|
||||||
force_insert=force_insert,
|
force_insert=force_insert,
|
||||||
force_update=force_update,
|
force_update=force_update,
|
||||||
@ -1322,7 +1340,7 @@ class Model(AltersData, metaclass=ModelBase):
|
|||||||
field_map = {
|
field_map = {
|
||||||
field.name: Value(getattr(self, field.attname), field)
|
field.name: Value(getattr(self, field.attname), field)
|
||||||
for field in meta.local_concrete_fields
|
for field in meta.local_concrete_fields
|
||||||
if field.name not in exclude
|
if field.name not in exclude and not field.generated
|
||||||
}
|
}
|
||||||
if "pk" not in exclude:
|
if "pk" not in exclude:
|
||||||
field_map["pk"] = Value(self.pk, meta.pk)
|
field_map["pk"] = Value(self.pk, meta.pk)
|
||||||
|
@ -1613,7 +1613,6 @@ class Case(SQLiteNumericMixin, Expression):
|
|||||||
template_params = {**self.extra, **extra_context}
|
template_params = {**self.extra, **extra_context}
|
||||||
case_parts = []
|
case_parts = []
|
||||||
sql_params = []
|
sql_params = []
|
||||||
default_sql, default_params = compiler.compile(self.default)
|
|
||||||
for case in self.cases:
|
for case in self.cases:
|
||||||
try:
|
try:
|
||||||
case_sql, case_params = compiler.compile(case)
|
case_sql, case_params = compiler.compile(case)
|
||||||
@ -1624,6 +1623,8 @@ class Case(SQLiteNumericMixin, Expression):
|
|||||||
break
|
break
|
||||||
case_parts.append(case_sql)
|
case_parts.append(case_sql)
|
||||||
sql_params.extend(case_params)
|
sql_params.extend(case_params)
|
||||||
|
else:
|
||||||
|
default_sql, default_params = compiler.compile(self.default)
|
||||||
if not case_parts:
|
if not case_parts:
|
||||||
return default_sql, default_params
|
return default_sql, default_params
|
||||||
case_joiner = case_joiner or self.case_joiner
|
case_joiner = case_joiner or self.case_joiner
|
||||||
|
@ -187,7 +187,9 @@ class RelatedField(FieldCacheMixin, Field):
|
|||||||
return errors
|
return errors
|
||||||
|
|
||||||
def _check_relation_model_exists(self):
|
def _check_relation_model_exists(self):
|
||||||
rel_is_missing = self.remote_field.model not in self.opts.apps.get_models()
|
rel_is_missing = self.remote_field.model not in self.opts.apps.get_models(
|
||||||
|
include_auto_created=True
|
||||||
|
)
|
||||||
rel_is_string = isinstance(self.remote_field.model, str)
|
rel_is_string = isinstance(self.remote_field.model, str)
|
||||||
model_name = (
|
model_name = (
|
||||||
self.remote_field.model
|
self.remote_field.model
|
||||||
@ -929,7 +931,9 @@ class ForeignKey(ForeignObject):
|
|||||||
|
|
||||||
empty_strings_allowed = False
|
empty_strings_allowed = False
|
||||||
default_error_messages = {
|
default_error_messages = {
|
||||||
"invalid": _("%(model)s instance with %(field)s %(value)r does not exist.")
|
"invalid": _(
|
||||||
|
"%(model)s instance with %(field)s %(value)r is not a valid choice."
|
||||||
|
)
|
||||||
}
|
}
|
||||||
description = _("Foreign Key (type determined by related field)")
|
description = _("Foreign Key (type determined by related field)")
|
||||||
|
|
||||||
|
@ -200,12 +200,15 @@ class ValuesIterable(BaseIterable):
|
|||||||
query = queryset.query
|
query = queryset.query
|
||||||
compiler = query.get_compiler(queryset.db)
|
compiler = query.get_compiler(queryset.db)
|
||||||
|
|
||||||
# extra(select=...) cols are always at the start of the row.
|
if query.selected:
|
||||||
names = [
|
names = list(query.selected)
|
||||||
*query.extra_select,
|
else:
|
||||||
*query.values_select,
|
# extra(select=...) cols are always at the start of the row.
|
||||||
*query.annotation_select,
|
names = [
|
||||||
]
|
*query.extra_select,
|
||||||
|
*query.values_select,
|
||||||
|
*query.annotation_select,
|
||||||
|
]
|
||||||
indexes = range(len(names))
|
indexes = range(len(names))
|
||||||
for row in compiler.results_iter(
|
for row in compiler.results_iter(
|
||||||
chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
|
chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
|
||||||
@ -223,28 +226,6 @@ class ValuesListIterable(BaseIterable):
|
|||||||
queryset = self.queryset
|
queryset = self.queryset
|
||||||
query = queryset.query
|
query = queryset.query
|
||||||
compiler = query.get_compiler(queryset.db)
|
compiler = query.get_compiler(queryset.db)
|
||||||
|
|
||||||
if queryset._fields:
|
|
||||||
# extra(select=...) cols are always at the start of the row.
|
|
||||||
names = [
|
|
||||||
*query.extra_select,
|
|
||||||
*query.values_select,
|
|
||||||
*query.annotation_select,
|
|
||||||
]
|
|
||||||
fields = [
|
|
||||||
*queryset._fields,
|
|
||||||
*(f for f in query.annotation_select if f not in queryset._fields),
|
|
||||||
]
|
|
||||||
if fields != names:
|
|
||||||
# Reorder according to fields.
|
|
||||||
index_map = {name: idx for idx, name in enumerate(names)}
|
|
||||||
rowfactory = operator.itemgetter(*[index_map[f] for f in fields])
|
|
||||||
return map(
|
|
||||||
rowfactory,
|
|
||||||
compiler.results_iter(
|
|
||||||
chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
|
|
||||||
),
|
|
||||||
)
|
|
||||||
return compiler.results_iter(
|
return compiler.results_iter(
|
||||||
tuple_expected=True,
|
tuple_expected=True,
|
||||||
chunked_fetch=self.chunked_fetch,
|
chunked_fetch=self.chunked_fetch,
|
||||||
|
@ -247,11 +247,6 @@ class SQLCompiler:
|
|||||||
select = []
|
select = []
|
||||||
klass_info = None
|
klass_info = None
|
||||||
annotations = {}
|
annotations = {}
|
||||||
select_idx = 0
|
|
||||||
for alias, (sql, params) in self.query.extra_select.items():
|
|
||||||
annotations[alias] = select_idx
|
|
||||||
select.append((RawSQL(sql, params), alias))
|
|
||||||
select_idx += 1
|
|
||||||
assert not (self.query.select and self.query.default_cols)
|
assert not (self.query.select and self.query.default_cols)
|
||||||
select_mask = self.query.get_select_mask()
|
select_mask = self.query.get_select_mask()
|
||||||
if self.query.default_cols:
|
if self.query.default_cols:
|
||||||
@ -261,19 +256,39 @@ class SQLCompiler:
|
|||||||
# any model.
|
# any model.
|
||||||
cols = self.query.select
|
cols = self.query.select
|
||||||
if cols:
|
if cols:
|
||||||
select_list = []
|
|
||||||
for col in cols:
|
|
||||||
select_list.append(select_idx)
|
|
||||||
select.append((col, None))
|
|
||||||
select_idx += 1
|
|
||||||
klass_info = {
|
klass_info = {
|
||||||
"model": self.query.model,
|
"model": self.query.model,
|
||||||
"select_fields": select_list,
|
"select_fields": list(
|
||||||
|
range(
|
||||||
|
len(self.query.extra_select),
|
||||||
|
len(self.query.extra_select) + len(cols),
|
||||||
|
)
|
||||||
|
),
|
||||||
}
|
}
|
||||||
for alias, annotation in self.query.annotation_select.items():
|
selected = []
|
||||||
annotations[alias] = select_idx
|
if self.query.selected is None:
|
||||||
select.append((annotation, alias))
|
selected = [
|
||||||
select_idx += 1
|
*(
|
||||||
|
(alias, RawSQL(*args))
|
||||||
|
for alias, args in self.query.extra_select.items()
|
||||||
|
),
|
||||||
|
*((None, col) for col in cols),
|
||||||
|
*self.query.annotation_select.items(),
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
for alias, expression in self.query.selected.items():
|
||||||
|
# Reference to an annotation.
|
||||||
|
if isinstance(expression, str):
|
||||||
|
expression = self.query.annotations[expression]
|
||||||
|
# Reference to a column.
|
||||||
|
elif isinstance(expression, int):
|
||||||
|
expression = cols[expression]
|
||||||
|
selected.append((alias, expression))
|
||||||
|
|
||||||
|
for select_idx, (alias, expression) in enumerate(selected):
|
||||||
|
if alias:
|
||||||
|
annotations[alias] = select_idx
|
||||||
|
select.append((expression, alias))
|
||||||
|
|
||||||
if self.query.select_related:
|
if self.query.select_related:
|
||||||
related_klass_infos = self.get_related_selections(select, select_mask)
|
related_klass_infos = self.get_related_selections(select, select_mask)
|
||||||
@ -576,20 +591,15 @@ class SQLCompiler:
|
|||||||
# generate valid SQL.
|
# generate valid SQL.
|
||||||
compiler.elide_empty = False
|
compiler.elide_empty = False
|
||||||
parts = ()
|
parts = ()
|
||||||
|
selected = self.query.selected
|
||||||
for compiler in compilers:
|
for compiler in compilers:
|
||||||
try:
|
try:
|
||||||
# If the columns list is limited, then all combined queries
|
# If the columns list is limited, then all combined queries
|
||||||
# must have the same columns list. Set the selects defined on
|
# must have the same columns list. Set the selects defined on
|
||||||
# the query on all combined queries, if not already set.
|
# the query on all combined queries, if not already set.
|
||||||
if not compiler.query.values_select and self.query.values_select:
|
if selected is not None and compiler.query.selected is None:
|
||||||
compiler.query = compiler.query.clone()
|
compiler.query = compiler.query.clone()
|
||||||
compiler.query.set_values(
|
compiler.query.set_values(selected)
|
||||||
(
|
|
||||||
*self.query.extra_select,
|
|
||||||
*self.query.values_select,
|
|
||||||
*self.query.annotation_select,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
part_sql, part_args = compiler.as_sql(with_col_aliases=True)
|
part_sql, part_args = compiler.as_sql(with_col_aliases=True)
|
||||||
if compiler.query.combinator:
|
if compiler.query.combinator:
|
||||||
# Wrap in a subquery if wrapping in parentheses isn't
|
# Wrap in a subquery if wrapping in parentheses isn't
|
||||||
|
@ -26,6 +26,7 @@ from django.db.models.expressions import (
|
|||||||
Exists,
|
Exists,
|
||||||
F,
|
F,
|
||||||
OuterRef,
|
OuterRef,
|
||||||
|
RawSQL,
|
||||||
Ref,
|
Ref,
|
||||||
ResolvedOuterRef,
|
ResolvedOuterRef,
|
||||||
Value,
|
Value,
|
||||||
@ -259,12 +260,12 @@ class Query(BaseExpression):
|
|||||||
select_for_update_of = ()
|
select_for_update_of = ()
|
||||||
select_for_no_key_update = False
|
select_for_no_key_update = False
|
||||||
select_related = False
|
select_related = False
|
||||||
has_select_fields = False
|
|
||||||
# Arbitrary limit for select_related to prevents infinite recursion.
|
# Arbitrary limit for select_related to prevents infinite recursion.
|
||||||
max_depth = 5
|
max_depth = 5
|
||||||
# Holds the selects defined by a call to values() or values_list()
|
# Holds the selects defined by a call to values() or values_list()
|
||||||
# excluding annotation_select and extra_select.
|
# excluding annotation_select and extra_select.
|
||||||
values_select = ()
|
values_select = ()
|
||||||
|
selected = None
|
||||||
|
|
||||||
# SQL annotation-related attributes.
|
# SQL annotation-related attributes.
|
||||||
annotation_select_mask = None
|
annotation_select_mask = None
|
||||||
@ -565,8 +566,7 @@ class Query(BaseExpression):
|
|||||||
col_alias = f"__col{index}"
|
col_alias = f"__col{index}"
|
||||||
col_ref = Ref(col_alias, col)
|
col_ref = Ref(col_alias, col)
|
||||||
col_refs[col] = col_ref
|
col_refs[col] = col_ref
|
||||||
inner_query.annotations[col_alias] = col
|
inner_query.add_annotation(col, col_alias)
|
||||||
inner_query.append_annotation_mask([col_alias])
|
|
||||||
replacements[col] = col_ref
|
replacements[col] = col_ref
|
||||||
outer_query.annotations[alias] = aggregate.replace_expressions(
|
outer_query.annotations[alias] = aggregate.replace_expressions(
|
||||||
replacements
|
replacements
|
||||||
@ -585,6 +585,7 @@ class Query(BaseExpression):
|
|||||||
else:
|
else:
|
||||||
outer_query = self
|
outer_query = self
|
||||||
self.select = ()
|
self.select = ()
|
||||||
|
self.selected = None
|
||||||
self.default_cols = False
|
self.default_cols = False
|
||||||
self.extra = {}
|
self.extra = {}
|
||||||
if self.annotations:
|
if self.annotations:
|
||||||
@ -1195,13 +1196,10 @@ class Query(BaseExpression):
|
|||||||
if select:
|
if select:
|
||||||
self.append_annotation_mask([alias])
|
self.append_annotation_mask([alias])
|
||||||
else:
|
else:
|
||||||
annotation_mask = (
|
self.set_annotation_mask(set(self.annotation_select).difference({alias}))
|
||||||
value
|
|
||||||
for value in dict.fromkeys(self.annotation_select)
|
|
||||||
if value != alias
|
|
||||||
)
|
|
||||||
self.set_annotation_mask(annotation_mask)
|
|
||||||
self.annotations[alias] = annotation
|
self.annotations[alias] = annotation
|
||||||
|
if self.selected:
|
||||||
|
self.selected[alias] = alias
|
||||||
|
|
||||||
def resolve_expression(self, query, *args, **kwargs):
|
def resolve_expression(self, query, *args, **kwargs):
|
||||||
clone = self.clone()
|
clone = self.clone()
|
||||||
@ -1369,7 +1367,7 @@ class Query(BaseExpression):
|
|||||||
# __exact is the default lookup if one isn't given.
|
# __exact is the default lookup if one isn't given.
|
||||||
*transforms, lookup_name = lookups or ["exact"]
|
*transforms, lookup_name = lookups or ["exact"]
|
||||||
for name in transforms:
|
for name in transforms:
|
||||||
lhs = self.try_transform(lhs, name)
|
lhs = self.try_transform(lhs, name, lookups)
|
||||||
# First try get_lookup() so that the lookup takes precedence if the lhs
|
# First try get_lookup() so that the lookup takes precedence if the lhs
|
||||||
# supports both transform and lookup for the name.
|
# supports both transform and lookup for the name.
|
||||||
lookup_class = lhs.get_lookup(lookup_name)
|
lookup_class = lhs.get_lookup(lookup_name)
|
||||||
@ -1403,7 +1401,7 @@ class Query(BaseExpression):
|
|||||||
|
|
||||||
return lookup
|
return lookup
|
||||||
|
|
||||||
def try_transform(self, lhs, name):
|
def try_transform(self, lhs, name, lookups=None):
|
||||||
"""
|
"""
|
||||||
Helper method for build_lookup(). Try to fetch and initialize
|
Helper method for build_lookup(). Try to fetch and initialize
|
||||||
a transform for name parameter from lhs.
|
a transform for name parameter from lhs.
|
||||||
@ -1420,9 +1418,14 @@ class Query(BaseExpression):
|
|||||||
suggestion = ", perhaps you meant %s?" % " or ".join(suggested_lookups)
|
suggestion = ", perhaps you meant %s?" % " or ".join(suggested_lookups)
|
||||||
else:
|
else:
|
||||||
suggestion = "."
|
suggestion = "."
|
||||||
|
if lookups is not None:
|
||||||
|
name_index = lookups.index(name)
|
||||||
|
unsupported_lookup = LOOKUP_SEP.join(lookups[name_index:])
|
||||||
|
else:
|
||||||
|
unsupported_lookup = name
|
||||||
raise FieldError(
|
raise FieldError(
|
||||||
"Unsupported lookup '%s' for %s or join on the field not "
|
"Unsupported lookup '%s' for %s or join on the field not "
|
||||||
"permitted%s" % (name, output_field.__name__, suggestion)
|
"permitted%s" % (unsupported_lookup, output_field.__name__, suggestion)
|
||||||
)
|
)
|
||||||
|
|
||||||
def build_filter(
|
def build_filter(
|
||||||
@ -2154,6 +2157,7 @@ class Query(BaseExpression):
|
|||||||
self.select_related = False
|
self.select_related = False
|
||||||
self.set_extra_mask(())
|
self.set_extra_mask(())
|
||||||
self.set_annotation_mask(())
|
self.set_annotation_mask(())
|
||||||
|
self.selected = None
|
||||||
|
|
||||||
def clear_select_fields(self):
|
def clear_select_fields(self):
|
||||||
"""
|
"""
|
||||||
@ -2163,10 +2167,12 @@ class Query(BaseExpression):
|
|||||||
"""
|
"""
|
||||||
self.select = ()
|
self.select = ()
|
||||||
self.values_select = ()
|
self.values_select = ()
|
||||||
|
self.selected = None
|
||||||
|
|
||||||
def add_select_col(self, col, name):
|
def add_select_col(self, col, name):
|
||||||
self.select += (col,)
|
self.select += (col,)
|
||||||
self.values_select += (name,)
|
self.values_select += (name,)
|
||||||
|
self.selected[name] = len(self.select) - 1
|
||||||
|
|
||||||
def set_select(self, cols):
|
def set_select(self, cols):
|
||||||
self.default_cols = False
|
self.default_cols = False
|
||||||
@ -2417,12 +2423,23 @@ class Query(BaseExpression):
|
|||||||
if names is None:
|
if names is None:
|
||||||
self.annotation_select_mask = None
|
self.annotation_select_mask = None
|
||||||
else:
|
else:
|
||||||
self.annotation_select_mask = list(dict.fromkeys(names))
|
self.annotation_select_mask = set(names)
|
||||||
|
if self.selected:
|
||||||
|
# Prune the masked annotations.
|
||||||
|
self.selected = {
|
||||||
|
key: value
|
||||||
|
for key, value in self.selected.items()
|
||||||
|
if not isinstance(value, str)
|
||||||
|
or value in self.annotation_select_mask
|
||||||
|
}
|
||||||
|
# Append the unmasked annotations.
|
||||||
|
for name in names:
|
||||||
|
self.selected[name] = name
|
||||||
self._annotation_select_cache = None
|
self._annotation_select_cache = None
|
||||||
|
|
||||||
def append_annotation_mask(self, names):
|
def append_annotation_mask(self, names):
|
||||||
if self.annotation_select_mask is not None:
|
if self.annotation_select_mask is not None:
|
||||||
self.set_annotation_mask((*self.annotation_select_mask, *names))
|
self.set_annotation_mask(self.annotation_select_mask.union(names))
|
||||||
|
|
||||||
def set_extra_mask(self, names):
|
def set_extra_mask(self, names):
|
||||||
"""
|
"""
|
||||||
@ -2435,12 +2452,16 @@ class Query(BaseExpression):
|
|||||||
self.extra_select_mask = set(names)
|
self.extra_select_mask = set(names)
|
||||||
self._extra_select_cache = None
|
self._extra_select_cache = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_select_fields(self):
|
||||||
|
return self.selected is not None
|
||||||
|
|
||||||
def set_values(self, fields):
|
def set_values(self, fields):
|
||||||
self.select_related = False
|
self.select_related = False
|
||||||
self.clear_deferred_loading()
|
self.clear_deferred_loading()
|
||||||
self.clear_select_fields()
|
self.clear_select_fields()
|
||||||
self.has_select_fields = True
|
|
||||||
|
|
||||||
|
selected = {}
|
||||||
if fields:
|
if fields:
|
||||||
field_names = []
|
field_names = []
|
||||||
extra_names = []
|
extra_names = []
|
||||||
@ -2449,13 +2470,16 @@ class Query(BaseExpression):
|
|||||||
# Shortcut - if there are no extra or annotations, then
|
# Shortcut - if there are no extra or annotations, then
|
||||||
# the values() clause must be just field names.
|
# the values() clause must be just field names.
|
||||||
field_names = list(fields)
|
field_names = list(fields)
|
||||||
|
selected = dict(zip(fields, range(len(fields))))
|
||||||
else:
|
else:
|
||||||
self.default_cols = False
|
self.default_cols = False
|
||||||
for f in fields:
|
for f in fields:
|
||||||
if f in self.extra_select:
|
if extra := self.extra_select.get(f):
|
||||||
extra_names.append(f)
|
extra_names.append(f)
|
||||||
|
selected[f] = RawSQL(*extra)
|
||||||
elif f in self.annotation_select:
|
elif f in self.annotation_select:
|
||||||
annotation_names.append(f)
|
annotation_names.append(f)
|
||||||
|
selected[f] = f
|
||||||
elif f in self.annotations:
|
elif f in self.annotations:
|
||||||
raise FieldError(
|
raise FieldError(
|
||||||
f"Cannot select the '{f}' alias. Use annotate() to "
|
f"Cannot select the '{f}' alias. Use annotate() to "
|
||||||
@ -2467,13 +2491,13 @@ class Query(BaseExpression):
|
|||||||
# `f` is not resolvable.
|
# `f` is not resolvable.
|
||||||
if self.annotation_select:
|
if self.annotation_select:
|
||||||
self.names_to_path(f.split(LOOKUP_SEP), self.model._meta)
|
self.names_to_path(f.split(LOOKUP_SEP), self.model._meta)
|
||||||
|
selected[f] = len(field_names)
|
||||||
field_names.append(f)
|
field_names.append(f)
|
||||||
self.set_extra_mask(extra_names)
|
self.set_extra_mask(extra_names)
|
||||||
self.set_annotation_mask(annotation_names)
|
self.set_annotation_mask(annotation_names)
|
||||||
selected = frozenset(field_names + extra_names + annotation_names)
|
|
||||||
else:
|
else:
|
||||||
field_names = [f.attname for f in self.model._meta.concrete_fields]
|
field_names = [f.attname for f in self.model._meta.concrete_fields]
|
||||||
selected = frozenset(field_names)
|
selected = dict.fromkeys(field_names, None)
|
||||||
# Selected annotations must be known before setting the GROUP BY
|
# Selected annotations must be known before setting the GROUP BY
|
||||||
# clause.
|
# clause.
|
||||||
if self.group_by is True:
|
if self.group_by is True:
|
||||||
@ -2496,6 +2520,7 @@ class Query(BaseExpression):
|
|||||||
|
|
||||||
self.values_select = tuple(field_names)
|
self.values_select = tuple(field_names)
|
||||||
self.add_fields(field_names, True)
|
self.add_fields(field_names, True)
|
||||||
|
self.selected = selected if fields else None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def annotation_select(self):
|
def annotation_select(self):
|
||||||
@ -2509,9 +2534,9 @@ class Query(BaseExpression):
|
|||||||
return {}
|
return {}
|
||||||
elif self.annotation_select_mask is not None:
|
elif self.annotation_select_mask is not None:
|
||||||
self._annotation_select_cache = {
|
self._annotation_select_cache = {
|
||||||
k: self.annotations[k]
|
k: v
|
||||||
for k in self.annotation_select_mask
|
for k, v in self.annotations.items()
|
||||||
if k in self.annotations
|
if k in self.annotation_select_mask
|
||||||
}
|
}
|
||||||
return self._annotation_select_cache
|
return self._annotation_select_cache
|
||||||
else:
|
else:
|
||||||
|
@ -10,7 +10,7 @@ URL. The canonical way to enable cache middleware is to set
|
|||||||
'django.middleware.cache.FetchFromCacheMiddleware'
|
'django.middleware.cache.FetchFromCacheMiddleware'
|
||||||
]
|
]
|
||||||
|
|
||||||
This is counter-intuitive, but correct: ``UpdateCacheMiddleware`` needs to run
|
This is counterintuitive, but correct: ``UpdateCacheMiddleware`` needs to run
|
||||||
last during the response phase, which processes middleware bottom-up;
|
last during the response phase, which processes middleware bottom-up;
|
||||||
``FetchFromCacheMiddleware`` needs to run last during the request phase, which
|
``FetchFromCacheMiddleware`` needs to run last during the request phase, which
|
||||||
processes middleware top-down.
|
processes middleware top-down.
|
||||||
|
@ -83,16 +83,6 @@ class RenameMethodsBase(type):
|
|||||||
return new_class
|
return new_class
|
||||||
|
|
||||||
|
|
||||||
class DeprecationInstanceCheck(type):
|
|
||||||
def __instancecheck__(self, instance):
|
|
||||||
warnings.warn(
|
|
||||||
"`%s` is deprecated, use `%s` instead." % (self.__name__, self.alternative),
|
|
||||||
self.deprecation_warning,
|
|
||||||
2,
|
|
||||||
)
|
|
||||||
return super().__instancecheck__(instance)
|
|
||||||
|
|
||||||
|
|
||||||
class MiddlewareMixin:
|
class MiddlewareMixin:
|
||||||
sync_capable = True
|
sync_capable = True
|
||||||
async_capable = True
|
async_capable = True
|
||||||
|
@ -9,7 +9,7 @@ from urllib.parse import parse_qsl, quote, unquote, urlencode, urlsplit, urlunsp
|
|||||||
|
|
||||||
from django.utils.deprecation import RemovedInDjango60Warning
|
from django.utils.deprecation import RemovedInDjango60Warning
|
||||||
from django.utils.encoding import punycode
|
from django.utils.encoding import punycode
|
||||||
from django.utils.functional import Promise, keep_lazy, keep_lazy_text
|
from django.utils.functional import Promise, cached_property, keep_lazy, keep_lazy_text
|
||||||
from django.utils.http import RFC3986_GENDELIMS, RFC3986_SUBDELIMS
|
from django.utils.http import RFC3986_GENDELIMS, RFC3986_SUBDELIMS
|
||||||
from django.utils.regex_helper import _lazy_re_compile
|
from django.utils.regex_helper import _lazy_re_compile
|
||||||
from django.utils.safestring import SafeData, SafeString, mark_safe
|
from django.utils.safestring import SafeData, SafeString, mark_safe
|
||||||
@ -257,6 +257,16 @@ def smart_urlquote(url):
|
|||||||
return urlunsplit((scheme, netloc, path, query, fragment))
|
return urlunsplit((scheme, netloc, path, query, fragment))
|
||||||
|
|
||||||
|
|
||||||
|
class CountsDict(dict):
|
||||||
|
def __init__(self, *args, word, **kwargs):
|
||||||
|
super().__init__(*args, *kwargs)
|
||||||
|
self.word = word
|
||||||
|
|
||||||
|
def __missing__(self, key):
|
||||||
|
self[key] = self.word.count(key)
|
||||||
|
return self[key]
|
||||||
|
|
||||||
|
|
||||||
class Urlizer:
|
class Urlizer:
|
||||||
"""
|
"""
|
||||||
Convert any URLs in text into clickable links.
|
Convert any URLs in text into clickable links.
|
||||||
@ -362,40 +372,72 @@ class Urlizer:
|
|||||||
return x
|
return x
|
||||||
return "%s…" % x[: max(0, limit - 1)]
|
return "%s…" % x[: max(0, limit - 1)]
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def wrapping_punctuation_openings(self):
|
||||||
|
return "".join(dict(self.wrapping_punctuation).keys())
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def trailing_punctuation_chars_no_semicolon(self):
|
||||||
|
return self.trailing_punctuation_chars.replace(";", "")
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def trailing_punctuation_chars_has_semicolon(self):
|
||||||
|
return ";" in self.trailing_punctuation_chars
|
||||||
|
|
||||||
def trim_punctuation(self, word):
|
def trim_punctuation(self, word):
|
||||||
"""
|
"""
|
||||||
Trim trailing and wrapping punctuation from `word`. Return the items of
|
Trim trailing and wrapping punctuation from `word`. Return the items of
|
||||||
the new state.
|
the new state.
|
||||||
"""
|
"""
|
||||||
lead, middle, trail = "", word, ""
|
# Strip all opening wrapping punctuation.
|
||||||
|
middle = word.lstrip(self.wrapping_punctuation_openings)
|
||||||
|
lead = word[: len(word) - len(middle)]
|
||||||
|
trail = ""
|
||||||
|
|
||||||
# Continue trimming until middle remains unchanged.
|
# Continue trimming until middle remains unchanged.
|
||||||
trimmed_something = True
|
trimmed_something = True
|
||||||
while trimmed_something:
|
counts = CountsDict(word=middle)
|
||||||
|
while trimmed_something and middle:
|
||||||
trimmed_something = False
|
trimmed_something = False
|
||||||
# Trim wrapping punctuation.
|
# Trim wrapping punctuation.
|
||||||
for opening, closing in self.wrapping_punctuation:
|
for opening, closing in self.wrapping_punctuation:
|
||||||
if middle.startswith(opening):
|
if counts[opening] < counts[closing]:
|
||||||
middle = middle.removeprefix(opening)
|
rstripped = middle.rstrip(closing)
|
||||||
lead += opening
|
if rstripped != middle:
|
||||||
trimmed_something = True
|
strip = counts[closing] - counts[opening]
|
||||||
# Keep parentheses at the end only if they're balanced.
|
trail = middle[-strip:]
|
||||||
if (
|
middle = middle[:-strip]
|
||||||
middle.endswith(closing)
|
trimmed_something = True
|
||||||
and middle.count(closing) == middle.count(opening) + 1
|
counts[closing] -= strip
|
||||||
):
|
|
||||||
middle = middle.removesuffix(closing)
|
rstripped = middle.rstrip(self.trailing_punctuation_chars_no_semicolon)
|
||||||
trail = closing + trail
|
if rstripped != middle:
|
||||||
trimmed_something = True
|
trail = middle[len(rstripped) :] + trail
|
||||||
# Trim trailing punctuation (after trimming wrapping punctuation,
|
middle = rstripped
|
||||||
# as encoded entities contain ';'). Unescape entities to avoid
|
|
||||||
# breaking them by removing ';'.
|
|
||||||
middle_unescaped = html.unescape(middle)
|
|
||||||
stripped = middle_unescaped.rstrip(self.trailing_punctuation_chars)
|
|
||||||
if middle_unescaped != stripped:
|
|
||||||
punctuation_count = len(middle_unescaped) - len(stripped)
|
|
||||||
trail = middle[-punctuation_count:] + trail
|
|
||||||
middle = middle[:-punctuation_count]
|
|
||||||
trimmed_something = True
|
trimmed_something = True
|
||||||
|
|
||||||
|
if self.trailing_punctuation_chars_has_semicolon and middle.endswith(";"):
|
||||||
|
# Only strip if not part of an HTML entity.
|
||||||
|
amp = middle.rfind("&")
|
||||||
|
if amp == -1:
|
||||||
|
can_strip = True
|
||||||
|
else:
|
||||||
|
potential_entity = middle[amp:]
|
||||||
|
escaped = html.unescape(potential_entity)
|
||||||
|
can_strip = (escaped == potential_entity) or escaped.endswith(";")
|
||||||
|
|
||||||
|
if can_strip:
|
||||||
|
rstripped = middle.rstrip(";")
|
||||||
|
amount_stripped = len(middle) - len(rstripped)
|
||||||
|
if amp > -1 and amount_stripped > 1:
|
||||||
|
# Leave a trailing semicolon as might be an entity.
|
||||||
|
trail = middle[len(rstripped) + 1 :] + trail
|
||||||
|
middle = rstripped + ";"
|
||||||
|
else:
|
||||||
|
trail = middle[len(rstripped) :] + trail
|
||||||
|
middle = rstripped
|
||||||
|
trimmed_something = True
|
||||||
|
|
||||||
return lead, middle, trail
|
return lead, middle, trail
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -32,9 +32,10 @@ _default = None
|
|||||||
CONTEXT_SEPARATOR = "\x04"
|
CONTEXT_SEPARATOR = "\x04"
|
||||||
|
|
||||||
# Maximum number of characters that will be parsed from the Accept-Language
|
# Maximum number of characters that will be parsed from the Accept-Language
|
||||||
# header to prevent possible denial of service or memory exhaustion attacks.
|
# header or cookie to prevent possible denial of service or memory exhaustion
|
||||||
# About 10x longer than the longest value shown on MDN’s Accept-Language page.
|
# attacks. About 10x longer than the longest value shown on MDN’s
|
||||||
ACCEPT_LANGUAGE_HEADER_MAX_LENGTH = 500
|
# Accept-Language page.
|
||||||
|
LANGUAGE_CODE_MAX_LENGTH = 500
|
||||||
|
|
||||||
# Format of Accept-Language header values. From RFC 9110 Sections 12.4.2 and
|
# Format of Accept-Language header values. From RFC 9110 Sections 12.4.2 and
|
||||||
# 12.5.4, and RFC 5646 Section 2.1.
|
# 12.5.4, and RFC 5646 Section 2.1.
|
||||||
@ -498,11 +499,25 @@ def get_supported_language_variant(lang_code, strict=False):
|
|||||||
If `strict` is False (the default), look for a country-specific variant
|
If `strict` is False (the default), look for a country-specific variant
|
||||||
when neither the language code nor its generic variant is found.
|
when neither the language code nor its generic variant is found.
|
||||||
|
|
||||||
|
The language code is truncated to a maximum length to avoid potential
|
||||||
|
denial of service attacks.
|
||||||
|
|
||||||
lru_cache should have a maxsize to prevent from memory exhaustion attacks,
|
lru_cache should have a maxsize to prevent from memory exhaustion attacks,
|
||||||
as the provided language codes are taken from the HTTP request. See also
|
as the provided language codes are taken from the HTTP request. See also
|
||||||
<https://www.djangoproject.com/weblog/2007/oct/26/security-fix/>.
|
<https://www.djangoproject.com/weblog/2007/oct/26/security-fix/>.
|
||||||
"""
|
"""
|
||||||
if lang_code:
|
if lang_code:
|
||||||
|
# Truncate the language code to a maximum length to avoid potential
|
||||||
|
# denial of service attacks.
|
||||||
|
if len(lang_code) > LANGUAGE_CODE_MAX_LENGTH:
|
||||||
|
if (
|
||||||
|
not strict
|
||||||
|
and (index := lang_code.rfind("-", 0, LANGUAGE_CODE_MAX_LENGTH)) > 0
|
||||||
|
):
|
||||||
|
# There is a generic variant under the maximum length accepted length.
|
||||||
|
lang_code = lang_code[:index]
|
||||||
|
else:
|
||||||
|
raise ValueError("'lang_code' exceeds the maximum accepted length")
|
||||||
# If 'zh-hant-tw' is not supported, try special fallback or subsequent
|
# If 'zh-hant-tw' is not supported, try special fallback or subsequent
|
||||||
# language codes i.e. 'zh-hant' and 'zh'.
|
# language codes i.e. 'zh-hant' and 'zh'.
|
||||||
possible_lang_codes = [lang_code]
|
possible_lang_codes = [lang_code]
|
||||||
@ -626,13 +641,13 @@ def parse_accept_lang_header(lang_string):
|
|||||||
functools.lru_cache() to avoid repetitive parsing of common header values.
|
functools.lru_cache() to avoid repetitive parsing of common header values.
|
||||||
"""
|
"""
|
||||||
# If the header value doesn't exceed the maximum allowed length, parse it.
|
# If the header value doesn't exceed the maximum allowed length, parse it.
|
||||||
if len(lang_string) <= ACCEPT_LANGUAGE_HEADER_MAX_LENGTH:
|
if len(lang_string) <= LANGUAGE_CODE_MAX_LENGTH:
|
||||||
return _parse_accept_lang_header(lang_string)
|
return _parse_accept_lang_header(lang_string)
|
||||||
|
|
||||||
# If there is at least one comma in the value, parse up to the last comma
|
# If there is at least one comma in the value, parse up to the last comma
|
||||||
# before the max length, skipping any truncated parts at the end of the
|
# before the max length, skipping any truncated parts at the end of the
|
||||||
# header value.
|
# header value.
|
||||||
if (index := lang_string.rfind(",", 0, ACCEPT_LANGUAGE_HEADER_MAX_LENGTH)) > 0:
|
if (index := lang_string.rfind(",", 0, LANGUAGE_CODE_MAX_LENGTH)) > 0:
|
||||||
return _parse_accept_lang_header(lang_string[:index])
|
return _parse_accept_lang_header(lang_string[:index])
|
||||||
|
|
||||||
# Don't attempt to parse if there is only one language-range value which is
|
# Don't attempt to parse if there is only one language-range value which is
|
||||||
|
@ -143,7 +143,7 @@ def github_linkcode_resolve(domain, info, *, version, next_version):
|
|||||||
|
|
||||||
branch = get_branch(version=version, next_version=next_version)
|
branch = get_branch(version=version, next_version=next_version)
|
||||||
relative_path = path.relative_to(pathlib.Path(__file__).parents[2])
|
relative_path = path.relative_to(pathlib.Path(__file__).parents[2])
|
||||||
# Use "/" explicitely to join the path parts since str(file), on Windows,
|
# Use "/" explicitly to join the path parts since str(file), on Windows,
|
||||||
# uses the Windows path separator which is incorrect for URLs.
|
# uses the Windows path separator which is incorrect for URLs.
|
||||||
url_path = "/".join(relative_path.parts)
|
url_path = "/".join(relative_path.parts)
|
||||||
return f"https://github.com/django/django/blob/{branch}/{url_path}#L{lineno}"
|
return f"https://github.com/django/django/blob/{branch}/{url_path}#L{lineno}"
|
||||||
|
@ -22,11 +22,13 @@ Then, please post it in one of the following channels:
|
|||||||
* The Django Forum section `"Using Django"`_. This is for web-based
|
* The Django Forum section `"Using Django"`_. This is for web-based
|
||||||
discussions.
|
discussions.
|
||||||
* The |django-users| mailing list. This is for email-based discussions.
|
* The |django-users| mailing list. This is for email-based discussions.
|
||||||
|
* The `Django Discord server`_ for chat-based discussions.
|
||||||
* The `#django IRC channel`_ on the Libera.Chat IRC network. This is for
|
* The `#django IRC channel`_ on the Libera.Chat IRC network. This is for
|
||||||
chat-based discussions. If you're new to IRC, see the `Libera.Chat
|
chat-based discussions. If you're new to IRC, see the `Libera.Chat
|
||||||
documentation`_ for different ways to connect.
|
documentation`_ for different ways to connect.
|
||||||
|
|
||||||
.. _`"Using Django"`: https://forum.djangoproject.com/c/users/6
|
.. _`"Using Django"`: https://forum.djangoproject.com/c/users/6
|
||||||
|
.. _`Django Discord server`: https://discord.gg/xcRH6mN4fa
|
||||||
.. _#django IRC channel: https://web.libera.chat/#django
|
.. _#django IRC channel: https://web.libera.chat/#django
|
||||||
.. _Libera.Chat documentation: https://libera.chat/guides/connect
|
.. _Libera.Chat documentation: https://libera.chat/guides/connect
|
||||||
|
|
||||||
@ -86,8 +88,8 @@ to security@djangoproject.com. This is a private list only open to long-time,
|
|||||||
highly trusted Django developers, and its archives are not publicly readable.
|
highly trusted Django developers, and its archives are not publicly readable.
|
||||||
|
|
||||||
Due to the sensitive nature of security issues, we ask that if you think you
|
Due to the sensitive nature of security issues, we ask that if you think you
|
||||||
have found a security problem, *please* don't post a message on the forum, IRC,
|
have found a security problem, *please* don't post a message on the forum, the
|
||||||
or one of the public mailing lists. Django has a
|
Discord server, IRC, or one of the public mailing lists. Django has a
|
||||||
:ref:`policy for handling security issues <reporting-security-issues>`;
|
:ref:`policy for handling security issues <reporting-security-issues>`;
|
||||||
while a defect is outstanding, we would like to minimize any damage that
|
while a defect is outstanding, we would like to minimize any damage that
|
||||||
could be inflicted through public knowledge of that defect.
|
could be inflicted through public knowledge of that defect.
|
||||||
|
@ -32,6 +32,14 @@ matches the version you installed by executing:
|
|||||||
|
|
||||||
...\> py --version
|
...\> py --version
|
||||||
|
|
||||||
|
.. admonition:: ``py`` is not recognized or found
|
||||||
|
|
||||||
|
Depending on how you've installed Python (such as via the Microsoft Store),
|
||||||
|
``py`` may not be available in the command prompt.
|
||||||
|
|
||||||
|
You will then need to use ``python`` instead of ``py`` when entering
|
||||||
|
commands.
|
||||||
|
|
||||||
.. seealso::
|
.. seealso::
|
||||||
|
|
||||||
For more details, see :doc:`python:using/windows` documentation.
|
For more details, see :doc:`python:using/windows` documentation.
|
||||||
|
@ -59,13 +59,14 @@ the date, time and numbers formatting particularities of your locale. See
|
|||||||
:doc:`/topics/i18n/formatting` for details.
|
:doc:`/topics/i18n/formatting` for details.
|
||||||
|
|
||||||
The format files aren't managed by the use of Transifex. To change them, you
|
The format files aren't managed by the use of Transifex. To change them, you
|
||||||
must :doc:`create a patch<writing-code/submitting-patches>` against the
|
must:
|
||||||
Django source tree, as for any code change:
|
|
||||||
|
|
||||||
* Create a diff against the current Git main branch.
|
* :doc:`Create a pull request<writing-code/submitting-patches>` against the
|
||||||
|
Django Git ``main`` branch, as for any code change.
|
||||||
|
|
||||||
* Open a ticket in Django's ticket system, set its ``Component`` field to
|
* Open a ticket in Django's ticket system, set its ``Component`` field to
|
||||||
``Translations``, and attach the patch to it.
|
``Translations``, set the "has patch" flag, and include the link to the pull
|
||||||
|
request.
|
||||||
|
|
||||||
.. _Transifex: https://www.transifex.com/
|
.. _Transifex: https://www.transifex.com/
|
||||||
.. _Django project page: https://app.transifex.com/django/django/
|
.. _Django project page: https://app.transifex.com/django/django/
|
||||||
|
@ -35,8 +35,8 @@ Triage workflow
|
|||||||
|
|
||||||
Unfortunately, not all bug reports and feature requests in the ticket tracker
|
Unfortunately, not all bug reports and feature requests in the ticket tracker
|
||||||
provide all the :doc:`required details<bugs-and-features>`. A number of
|
provide all the :doc:`required details<bugs-and-features>`. A number of
|
||||||
tickets have patches, but those patches don't meet all the requirements of a
|
tickets have proposed solutions, but those don't necessarily meet all the
|
||||||
:ref:`good patch<patch-style>`.
|
requirements :ref:`adhering to the guidelines for contributing <patch-style>`.
|
||||||
|
|
||||||
One way to help out is to *triage* tickets that have been created by other
|
One way to help out is to *triage* tickets that have been created by other
|
||||||
users.
|
users.
|
||||||
@ -56,7 +56,7 @@ Since a picture is worth a thousand words, let's start there:
|
|||||||
We've got two roles in this diagram:
|
We've got two roles in this diagram:
|
||||||
|
|
||||||
* Mergers: people with commit access who are responsible for making the
|
* Mergers: people with commit access who are responsible for making the
|
||||||
final decision to merge a patch.
|
final decision to merge a change.
|
||||||
|
|
||||||
* Ticket triagers: anyone in the Django community who chooses to
|
* Ticket triagers: anyone in the Django community who chooses to
|
||||||
become involved in Django's development process. Our Trac installation
|
become involved in Django's development process. Our Trac installation
|
||||||
@ -115,18 +115,18 @@ Beyond that there are several considerations:
|
|||||||
* **Accepted + No Flags**
|
* **Accepted + No Flags**
|
||||||
|
|
||||||
The ticket is valid, but no one has submitted a patch for it yet. Often this
|
The ticket is valid, but no one has submitted a patch for it yet. Often this
|
||||||
means you could safely start writing a patch for it. This is generally more
|
means you could safely start writing a fix for it. This is generally more
|
||||||
true for the case of accepted bugs than accepted features. A ticket for a bug
|
true for the case of accepted bugs than accepted features. A ticket for a bug
|
||||||
that has been accepted means that the issue has been verified by at least one
|
that has been accepted means that the issue has been verified by at least one
|
||||||
triager as a legitimate bug - and should probably be fixed if possible. An
|
triager as a legitimate bug - and should probably be fixed if possible. An
|
||||||
accepted new feature may only mean that one triager thought the feature would
|
accepted new feature may only mean that one triager thought the feature would
|
||||||
be good to have, but this alone does not represent a consensus view or imply
|
be good to have, but this alone does not represent a consensus view or imply
|
||||||
with any certainty that a patch will be accepted for that feature. Seek more
|
with any certainty that a patch will be accepted for that feature. Seek more
|
||||||
feedback before writing an extensive patch if you are in doubt.
|
feedback before writing an extensive contribution if you are in doubt.
|
||||||
|
|
||||||
* **Accepted + Has Patch**
|
* **Accepted + Has Patch**
|
||||||
|
|
||||||
The ticket is waiting for people to review the supplied patch. This means
|
The ticket is waiting for people to review the supplied solution. This means
|
||||||
downloading the patch and trying it out, verifying that it contains tests
|
downloading the patch and trying it out, verifying that it contains tests
|
||||||
and docs, running the test suite with the included patch, and leaving
|
and docs, running the test suite with the included patch, and leaving
|
||||||
feedback on the ticket.
|
feedback on the ticket.
|
||||||
@ -143,7 +143,7 @@ Ready For Checkin
|
|||||||
|
|
||||||
The ticket was reviewed by any member of the community other than the person
|
The ticket was reviewed by any member of the community other than the person
|
||||||
who supplied the patch and found to meet all the requirements for a
|
who supplied the patch and found to meet all the requirements for a
|
||||||
commit-ready patch. A :ref:`merger <mergers-team>` now needs to give the patch
|
commit-ready contribution. A :ref:`merger <mergers-team>` now needs to give
|
||||||
a final review prior to being committed.
|
a final review prior to being committed.
|
||||||
|
|
||||||
There are a lot of pull requests. It can take a while for your patch to get
|
There are a lot of pull requests. It can take a while for your patch to get
|
||||||
@ -169,9 +169,9 @@ A number of flags, appearing as checkboxes in Trac, can be set on a ticket:
|
|||||||
Has patch
|
Has patch
|
||||||
---------
|
---------
|
||||||
|
|
||||||
This means the ticket has an associated
|
This means the ticket has an associated solution. These will be reviewed to
|
||||||
:doc:`patch<writing-code/submitting-patches>`. These will be reviewed
|
ensure they adhere to the :doc:`documented guidelines
|
||||||
to see if the patch is "good".
|
<writing-code/submitting-patches>`.
|
||||||
|
|
||||||
The following three fields (Needs documentation, Needs tests,
|
The following three fields (Needs documentation, Needs tests,
|
||||||
Patch needs improvement) apply only if a patch has been supplied.
|
Patch needs improvement) apply only if a patch has been supplied.
|
||||||
@ -187,12 +187,12 @@ Needs tests
|
|||||||
-----------
|
-----------
|
||||||
|
|
||||||
This flags the patch as needing associated unit tests. Again, this
|
This flags the patch as needing associated unit tests. Again, this
|
||||||
is a required part of a valid patch.
|
is a required part of a valid contribution.
|
||||||
|
|
||||||
Patch needs improvement
|
Patch needs improvement
|
||||||
-----------------------
|
-----------------------
|
||||||
|
|
||||||
This flag means that although the ticket *has* a patch, it's not quite
|
This flag means that although the ticket *has* a solution, it's not quite
|
||||||
ready for checkin. This could mean the patch no longer applies
|
ready for checkin. This could mean the patch no longer applies
|
||||||
cleanly, there is a flaw in the implementation, or that the code
|
cleanly, there is a flaw in the implementation, or that the code
|
||||||
doesn't meet our standards.
|
doesn't meet our standards.
|
||||||
@ -200,7 +200,7 @@ doesn't meet our standards.
|
|||||||
Easy pickings
|
Easy pickings
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
Tickets that would require small, easy, patches.
|
Tickets that would require small, easy, changes.
|
||||||
|
|
||||||
Type
|
Type
|
||||||
----
|
----
|
||||||
@ -374,7 +374,7 @@ Then, you can help out by:
|
|||||||
you should raise it for discussion (referencing the relevant tickets)
|
you should raise it for discussion (referencing the relevant tickets)
|
||||||
on the `Django Forum`_ or |django-developers|.
|
on the `Django Forum`_ or |django-developers|.
|
||||||
|
|
||||||
* Verify if patches submitted by other users are correct. If they are correct
|
* Verify if solutions submitted by others are correct. If they are correct
|
||||||
and also contain appropriate documentation and tests then move them to the
|
and also contain appropriate documentation and tests then move them to the
|
||||||
"Ready for Checkin" stage. If they are not correct then leave a comment to
|
"Ready for Checkin" stage. If they are not correct then leave a comment to
|
||||||
explain why and set the corresponding flags ("Patch needs improvement",
|
explain why and set the corresponding flags ("Patch needs improvement",
|
||||||
@ -383,7 +383,7 @@ Then, you can help out by:
|
|||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
The `Reports page`_ contains links to many useful Trac queries, including
|
The `Reports page`_ contains links to many useful Trac queries, including
|
||||||
several that are useful for triaging tickets and reviewing patches as
|
several that are useful for triaging tickets and reviewing proposals as
|
||||||
suggested above.
|
suggested above.
|
||||||
|
|
||||||
You can also find more :doc:`new-contributors`.
|
You can also find more :doc:`new-contributors`.
|
||||||
|
@ -46,7 +46,7 @@ Python style
|
|||||||
* Unless otherwise specified, follow :pep:`8`.
|
* Unless otherwise specified, follow :pep:`8`.
|
||||||
|
|
||||||
Use :pypi:`flake8` to check for problems in this area. Note that our
|
Use :pypi:`flake8` to check for problems in this area. Note that our
|
||||||
``setup.cfg`` file contains some excluded files (deprecated modules we don't
|
``.flake8`` file contains some excluded files (deprecated modules we don't
|
||||||
care about cleaning up and some third-party code that Django vendors) as well
|
care about cleaning up and some third-party code that Django vendors) as well
|
||||||
as some excluded errors that we don't consider as gross violations. Remember
|
as some excluded errors that we don't consider as gross violations. Remember
|
||||||
that :pep:`8` is only a guide, so respect the style of the surrounding code
|
that :pep:`8` is only a guide, so respect the style of the surrounding code
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
==================
|
========================
|
||||||
Submitting patches
|
Submitting contributions
|
||||||
==================
|
========================
|
||||||
|
|
||||||
We're always grateful for patches to Django's code. Indeed, bug reports
|
We're always grateful for contributions to Django's code. Indeed, bug reports
|
||||||
with associated patches will get fixed *far* more quickly than those
|
with associated contributions will get fixed *far* more quickly than those
|
||||||
without patches.
|
without a solution.
|
||||||
|
|
||||||
Typo fixes and trivial documentation changes
|
Typo fixes and trivial documentation changes
|
||||||
============================================
|
============================================
|
||||||
@ -52,7 +52,7 @@ and time availability), claim it by following these steps:
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
The Django software foundation requests that anyone contributing more than
|
The Django software foundation requests that anyone contributing more than
|
||||||
a trivial patch to Django sign and submit a `Contributor License
|
a trivial change to Django sign and submit a `Contributor License
|
||||||
Agreement`_, this ensures that the Django Software Foundation has clear
|
Agreement`_, this ensures that the Django Software Foundation has clear
|
||||||
license to all contributions allowing for a clear license for all users.
|
license to all contributions allowing for a clear license for all users.
|
||||||
|
|
||||||
@ -86,35 +86,32 @@ Going through the steps of claiming tickets is overkill in some cases.
|
|||||||
|
|
||||||
In the case of small changes, such as typos in the documentation or small bugs
|
In the case of small changes, such as typos in the documentation or small bugs
|
||||||
that will only take a few minutes to fix, you don't need to jump through the
|
that will only take a few minutes to fix, you don't need to jump through the
|
||||||
hoops of claiming tickets. Submit your patch directly and you're done!
|
hoops of claiming tickets. Submit your changes directly and you're done!
|
||||||
|
|
||||||
It is *always* acceptable, regardless whether someone has claimed it or not, to
|
It is *always* acceptable, regardless whether someone has claimed it or not, to
|
||||||
submit patches to a ticket if you happen to have a patch ready.
|
link proposals to a ticket if you happen to have the changes ready.
|
||||||
|
|
||||||
.. _patch-style:
|
.. _patch-style:
|
||||||
|
|
||||||
Patch style
|
Contribution style
|
||||||
===========
|
==================
|
||||||
|
|
||||||
Make sure that any contribution you do fulfills at least the following
|
Make sure that any contribution you do fulfills at least the following
|
||||||
requirements:
|
requirements:
|
||||||
|
|
||||||
* The code required to fix a problem or add a feature is an essential part
|
* The code required to fix a problem or add a feature is an essential part
|
||||||
of a patch, but it is not the only part. A good patch should also include a
|
of a solution, but it is not the only part. A good fix should also include a
|
||||||
:doc:`regression test <unit-tests>` to validate the behavior that has been
|
:doc:`regression test <unit-tests>` to validate the behavior that has been
|
||||||
fixed and to prevent the problem from arising again. Also, if some tickets
|
fixed and to prevent the problem from arising again. Also, if some tickets
|
||||||
are relevant to the code that you've written, mention the ticket numbers in
|
are relevant to the code that you've written, mention the ticket numbers in
|
||||||
some comments in the test so that one can easily trace back the relevant
|
some comments in the test so that one can easily trace back the relevant
|
||||||
discussions after your patch gets committed, and the tickets get closed.
|
discussions after your patch gets committed, and the tickets get closed.
|
||||||
|
|
||||||
* If the code associated with a patch adds a new feature, or modifies
|
* If the code adds a new feature, or modifies the behavior of an existing
|
||||||
behavior of an existing feature, the patch should also contain
|
feature, the change should also contain documentation.
|
||||||
documentation.
|
|
||||||
|
|
||||||
When you think your work is ready to be reviewed, send :doc:`a GitHub pull
|
When you think your work is ready to be reviewed, send :doc:`a GitHub pull
|
||||||
request <working-with-git>`. Please review the patch yourself using our
|
request <working-with-git>`.
|
||||||
:ref:`patch review checklist <patch-review-checklist>` first.
|
|
||||||
|
|
||||||
If you can't send a pull request for some reason, you can also use patches in
|
If you can't send a pull request for some reason, you can also use patches in
|
||||||
Trac. When using this style, follow these guidelines.
|
Trac. When using this style, follow these guidelines.
|
||||||
|
|
||||||
@ -129,7 +126,7 @@ Trac. When using this style, follow these guidelines.
|
|||||||
|
|
||||||
Regardless of the way you submit your work, follow these steps.
|
Regardless of the way you submit your work, follow these steps.
|
||||||
|
|
||||||
* Make sure your code fulfills the requirements in our :ref:`patch review
|
* Make sure your code fulfills the requirements in our :ref:`contribution
|
||||||
checklist <patch-review-checklist>`.
|
checklist <patch-review-checklist>`.
|
||||||
|
|
||||||
* Check the "Has patch" box on the ticket and make sure the "Needs
|
* Check the "Has patch" box on the ticket and make sure the "Needs
|
||||||
@ -140,17 +137,18 @@ Regardless of the way you submit your work, follow these steps.
|
|||||||
.. _ticket tracker: https://code.djangoproject.com/
|
.. _ticket tracker: https://code.djangoproject.com/
|
||||||
.. _Development dashboard: https://dashboard.djangoproject.com/
|
.. _Development dashboard: https://dashboard.djangoproject.com/
|
||||||
|
|
||||||
Non-trivial patches
|
Non-trivial contributions
|
||||||
===================
|
=========================
|
||||||
|
|
||||||
A "non-trivial" patch is one that is more than a small bug fix. It's a patch
|
A "non-trivial" contribution is one that is more than a small bug fix. It's a
|
||||||
that introduces Django functionality and makes some sort of design decision.
|
change that introduces new Django functionality and makes some sort of design
|
||||||
|
decision.
|
||||||
|
|
||||||
If you provide a non-trivial patch, include evidence that alternatives have
|
If you provide a non-trivial change, include evidence that alternatives have
|
||||||
been discussed on the `Django Forum`_ or |django-developers| list.
|
been discussed on the `Django Forum`_ or |django-developers| list.
|
||||||
|
|
||||||
If you're not sure whether your patch should be considered non-trivial, ask on
|
If you're not sure whether your contribution should be considered non-trivial,
|
||||||
the ticket for opinions.
|
ask on the ticket for opinions.
|
||||||
|
|
||||||
.. _Django Forum: https://forum.djangoproject.com/
|
.. _Django Forum: https://forum.djangoproject.com/
|
||||||
|
|
||||||
@ -253,15 +251,15 @@ Once you have completed these steps, you are finished with the deprecation.
|
|||||||
In each :term:`feature release <Feature release>`, all
|
In each :term:`feature release <Feature release>`, all
|
||||||
``RemovedInDjangoXXWarning``\s matching the new version are removed.
|
``RemovedInDjangoXXWarning``\s matching the new version are removed.
|
||||||
|
|
||||||
JavaScript patches
|
JavaScript contributions
|
||||||
==================
|
========================
|
||||||
|
|
||||||
For information on JavaScript patches, see the :ref:`javascript-patches`
|
For information on JavaScript contributions, see the :ref:`javascript-patches`
|
||||||
documentation.
|
documentation.
|
||||||
|
|
||||||
.. _patch-review-checklist:
|
.. _patch-review-checklist:
|
||||||
|
|
||||||
Patch review checklist
|
Contribution checklist
|
||||||
======================
|
======================
|
||||||
|
|
||||||
Use this checklist to review a pull request. If you are reviewing a pull
|
Use this checklist to review a pull request. If you are reviewing a pull
|
||||||
@ -271,14 +269,15 @@ If you've left comments for improvement on the pull request, please tick the
|
|||||||
appropriate flags on the Trac ticket based on the results of your review:
|
appropriate flags on the Trac ticket based on the results of your review:
|
||||||
"Patch needs improvement", "Needs documentation", and/or "Needs tests". As time
|
"Patch needs improvement", "Needs documentation", and/or "Needs tests". As time
|
||||||
and interest permits, mergers do final reviews of "Ready for checkin" tickets
|
and interest permits, mergers do final reviews of "Ready for checkin" tickets
|
||||||
and will either commit the patch or bump it back to "Accepted" if further works
|
and will either commit the changes or bump it back to "Accepted" if further
|
||||||
need to be done. If you're looking to become a merger, doing thorough reviews
|
work needs to be done. If you're looking to become a merger, doing thorough
|
||||||
of patches is a great way to earn trust.
|
reviews of contributions is a great way to earn trust.
|
||||||
|
|
||||||
Looking for a patch to review? Check out the "Patches needing review" section
|
Looking for a patch to review? Check out the "Patches needing review" section
|
||||||
of the `Django Development Dashboard <https://dashboard.djangoproject.com/>`_.
|
of the `Django Development Dashboard <https://dashboard.djangoproject.com/>`_.
|
||||||
Looking to get your patch reviewed? Ensure the Trac flags on the ticket are
|
|
||||||
set so that the ticket appears in that queue.
|
Looking to get your pull request reviewed? Ensure the Trac flags on the ticket
|
||||||
|
are set so that the ticket appears in that queue.
|
||||||
|
|
||||||
Documentation
|
Documentation
|
||||||
-------------
|
-------------
|
||||||
|
@ -15,6 +15,9 @@ about each item can often be found in the release notes of two versions prior.
|
|||||||
See the :ref:`Django 5.2 release notes <deprecated-features-5.2>` for more
|
See the :ref:`Django 5.2 release notes <deprecated-features-5.2>` for more
|
||||||
details on these changes.
|
details on these changes.
|
||||||
|
|
||||||
|
* The ``all`` keyword argument of ``django.contrib.staticfiles.finders.find()``
|
||||||
|
will be removed.
|
||||||
|
|
||||||
.. _deprecation-removed-in-6.0:
|
.. _deprecation-removed-in-6.0:
|
||||||
|
|
||||||
6.0
|
6.0
|
||||||
|
@ -83,7 +83,7 @@ permissions.
|
|||||||
|
|
||||||
.. code-block:: shell
|
.. code-block:: shell
|
||||||
|
|
||||||
$ python -m pip install wheel twine
|
$ python -m pip install build twine
|
||||||
|
|
||||||
* Access to `Django's project on PyPI <https://pypi.org/project/Django/>`_ to
|
* Access to `Django's project on PyPI <https://pypi.org/project/Django/>`_ to
|
||||||
upload binaries, ideally with extra permissions to `yank a release
|
upload binaries, ideally with extra permissions to `yank a release
|
||||||
@ -345,10 +345,11 @@ issuing **multiple releases**, repeat these steps for each release.
|
|||||||
<2719a7f8c161233f45d34b624a9df9392c86cc1b>`).
|
<2719a7f8c161233f45d34b624a9df9392c86cc1b>`).
|
||||||
|
|
||||||
#. If this is a pre-release package also update the "Development Status"
|
#. If this is a pre-release package also update the "Development Status"
|
||||||
trove classifier in ``setup.cfg`` to reflect this. An ``rc`` pre-release
|
trove classifier in ``pyproject.toml`` to reflect this. An ``rc``
|
||||||
should not change the trove classifier (:commit:`example commit for alpha
|
pre-release should not change the trove classifier (:commit:`example
|
||||||
release <eeeacc52a967234e920c001b7908c4acdfd7a848>`, :commit:`example
|
commit for alpha release <eeeacc52a967234e920c001b7908c4acdfd7a848>`,
|
||||||
commit for beta release <25fec8940b24107e21314ab6616e18ce8dec1c1c>`).
|
:commit:`example commit for beta release
|
||||||
|
<25fec8940b24107e21314ab6616e18ce8dec1c1c>`).
|
||||||
|
|
||||||
#. Otherwise, make sure the classifier is set to
|
#. Otherwise, make sure the classifier is set to
|
||||||
``Development Status :: 5 - Production/Stable``.
|
``Development Status :: 5 - Production/Stable``.
|
||||||
@ -370,8 +371,8 @@ issuing **multiple releases**, repeat these steps for each release.
|
|||||||
|
|
||||||
#. Make sure you have an absolutely clean tree by running ``git clean -dfx``.
|
#. Make sure you have an absolutely clean tree by running ``git clean -dfx``.
|
||||||
|
|
||||||
#. Run ``make -f extras/Makefile`` to generate the release packages. This will
|
#. Run ``python -m build`` to generate the release packages. This will create
|
||||||
create the release packages in a ``dist/`` directory.
|
the release packages in a ``dist/`` directory.
|
||||||
|
|
||||||
#. Generate the hashes of the release packages:
|
#. Generate the hashes of the release packages:
|
||||||
|
|
||||||
|
Before Width: | Height: | Size: 4.2 KiB After Width: | Height: | Size: 4.1 KiB |
Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 36 KiB |
Before Width: | Height: | Size: 10 KiB After Width: | Height: | Size: 28 KiB |
Before Width: | Height: | Size: 9.4 KiB After Width: | Height: | Size: 7.6 KiB |
Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 9.8 KiB |
Before Width: | Height: | Size: 9.2 KiB After Width: | Height: | Size: 6.6 KiB |
Before Width: | Height: | Size: 8.9 KiB After Width: | Height: | Size: 22 KiB |
Before Width: | Height: | Size: 10 KiB After Width: | Height: | Size: 26 KiB |
Before Width: | Height: | Size: 7.9 KiB After Width: | Height: | Size: 11 KiB |
Before Width: | Height: | Size: 34 KiB After Width: | Height: | Size: 14 KiB |
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 5.4 KiB |
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 9.7 KiB |
Before Width: | Height: | Size: 25 KiB After Width: | Height: | Size: 53 KiB |
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 7.3 KiB |
@ -222,10 +222,25 @@ and put the following Python code in it:
|
|||||||
def index(request):
|
def index(request):
|
||||||
return HttpResponse("Hello, world. You're at the polls index.")
|
return HttpResponse("Hello, world. You're at the polls index.")
|
||||||
|
|
||||||
This is the simplest view possible in Django. To call the view, we need to map
|
This is the most basic view possible in Django. To access it in a browser, we
|
||||||
it to a URL - and for this we need a URLconf.
|
need to map it to a URL - and for this we need to define a URL configuration,
|
||||||
|
or "URLconf" for short. These URL configurations are defined inside each
|
||||||
|
Django app, and they are Python files named ``urls.py``.
|
||||||
|
|
||||||
|
To define a URLconf for the ``polls`` app, create a file ``polls/urls.py``
|
||||||
|
with the following content:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
:caption: ``polls/urls.py``
|
||||||
|
|
||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
from . import views
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path("", views.index, name="index"),
|
||||||
|
]
|
||||||
|
|
||||||
To create a URLconf in the polls directory, create a file called ``urls.py``.
|
|
||||||
Your app directory should now look like:
|
Your app directory should now look like:
|
||||||
|
|
||||||
.. code-block:: text
|
.. code-block:: text
|
||||||
@ -241,21 +256,9 @@ Your app directory should now look like:
|
|||||||
urls.py
|
urls.py
|
||||||
views.py
|
views.py
|
||||||
|
|
||||||
In the ``polls/urls.py`` file include the following code:
|
The next step is to configure the root URLconf in the ``mysite`` project to
|
||||||
|
include the URLconf defined in ``polls.urls``. To do this, add an import for
|
||||||
.. code-block:: python
|
``django.urls.include`` in ``mysite/urls.py`` and insert an
|
||||||
:caption: ``polls/urls.py``
|
|
||||||
|
|
||||||
from django.urls import path
|
|
||||||
|
|
||||||
from . import views
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
path("", views.index, name="index"),
|
|
||||||
]
|
|
||||||
|
|
||||||
The next step is to point the root URLconf at the ``polls.urls`` module. In
|
|
||||||
``mysite/urls.py``, add an import for ``django.urls.include`` and insert an
|
|
||||||
:func:`~django.urls.include` in the ``urlpatterns`` list, so you have:
|
:func:`~django.urls.include` in the ``urlpatterns`` list, so you have:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
@ -17,48 +17,15 @@ Database setup
|
|||||||
Now, open up :file:`mysite/settings.py`. It's a normal Python module with
|
Now, open up :file:`mysite/settings.py`. It's a normal Python module with
|
||||||
module-level variables representing Django settings.
|
module-level variables representing Django settings.
|
||||||
|
|
||||||
By default, the configuration uses SQLite. If you're new to databases, or
|
By default, the :setting:`DATABASES` configuration uses SQLite. If you're new
|
||||||
you're just interested in trying Django, this is the easiest choice. SQLite is
|
to databases, or you're just interested in trying Django, this is the easiest
|
||||||
included in Python, so you won't need to install anything else to support your
|
choice. SQLite is included in Python, so you won't need to install anything
|
||||||
database. When starting your first real project, however, you may want to use a
|
else to support your database. When starting your first real project, however,
|
||||||
more scalable database like PostgreSQL, to avoid database-switching headaches
|
you may want to use a more scalable database like PostgreSQL, to avoid
|
||||||
down the road.
|
database-switching headaches down the road.
|
||||||
|
|
||||||
If you wish to use another database, install the appropriate :ref:`database
|
If you wish to use another database, see :ref:`details to customize and get
|
||||||
bindings <database-installation>` and change the following keys in the
|
your database running <database-installation>`.
|
||||||
:setting:`DATABASES` ``'default'`` item to match your database connection
|
|
||||||
settings:
|
|
||||||
|
|
||||||
* :setting:`ENGINE <DATABASE-ENGINE>` -- Either
|
|
||||||
``'django.db.backends.sqlite3'``,
|
|
||||||
``'django.db.backends.postgresql'``,
|
|
||||||
``'django.db.backends.mysql'``, or
|
|
||||||
``'django.db.backends.oracle'``. Other backends are :ref:`also available
|
|
||||||
<third-party-notes>`.
|
|
||||||
|
|
||||||
* :setting:`NAME` -- The name of your database. If you're using SQLite, the
|
|
||||||
database will be a file on your computer; in that case, :setting:`NAME`
|
|
||||||
should be the full absolute path, including filename, of that file. The
|
|
||||||
default value, ``BASE_DIR / 'db.sqlite3'``, will store the file in your
|
|
||||||
project directory.
|
|
||||||
|
|
||||||
If you are not using SQLite as your database, additional settings such as
|
|
||||||
:setting:`USER`, :setting:`PASSWORD`, and :setting:`HOST` must be added.
|
|
||||||
For more details, see the reference documentation for :setting:`DATABASES`.
|
|
||||||
|
|
||||||
.. admonition:: For databases other than SQLite
|
|
||||||
|
|
||||||
If you're using a database besides SQLite, make sure you've created a
|
|
||||||
database by this point. Do that with "``CREATE DATABASE database_name;``"
|
|
||||||
within your database's interactive prompt.
|
|
||||||
|
|
||||||
Also make sure that the database user provided in :file:`mysite/settings.py`
|
|
||||||
has "create database" privileges. This allows automatic creation of a
|
|
||||||
:ref:`test database <the-test-database>` which will be needed in a later
|
|
||||||
tutorial.
|
|
||||||
|
|
||||||
If you're using SQLite, you don't need to create anything beforehand - the
|
|
||||||
database file will be created automatically when it is needed.
|
|
||||||
|
|
||||||
While you're editing :file:`mysite/settings.py`, set :setting:`TIME_ZONE` to
|
While you're editing :file:`mysite/settings.py`, set :setting:`TIME_ZONE` to
|
||||||
your time zone.
|
your time zone.
|
||||||
|
@ -111,7 +111,7 @@ There are many ways to approach writing tests.
|
|||||||
|
|
||||||
Some programmers follow a discipline called "`test-driven development`_"; they
|
Some programmers follow a discipline called "`test-driven development`_"; they
|
||||||
actually write their tests before they write their code. This might seem
|
actually write their tests before they write their code. This might seem
|
||||||
counter-intuitive, but in fact it's similar to what most people will often do
|
counterintuitive, but in fact it's similar to what most people will often do
|
||||||
anyway: they describe a problem, then create some code to solve it. Test-driven
|
anyway: they describe a problem, then create some code to solve it. Test-driven
|
||||||
development formalizes the problem in a Python test case.
|
development formalizes the problem in a Python test case.
|
||||||
|
|
||||||
|
@ -186,6 +186,14 @@ Configurable attributes
|
|||||||
|
|
||||||
It must be unique across a Django project.
|
It must be unique across a Django project.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
Changing this attribute after migrations have been applied for an
|
||||||
|
application will result in breaking changes to a project or, in the
|
||||||
|
case of a reusable app, any existing installs of that app. This is
|
||||||
|
because ``AppConfig.label`` is used in database tables and migration
|
||||||
|
files when referencing an app in the dependencies list.
|
||||||
|
|
||||||
.. attribute:: AppConfig.verbose_name
|
.. attribute:: AppConfig.verbose_name
|
||||||
|
|
||||||
Human-readable name for the application, e.g. "Administration".
|
Human-readable name for the application, e.g. "Administration".
|
||||||
|
@ -116,24 +116,7 @@ a decorator overrides the middleware.
|
|||||||
Limitations
|
Limitations
|
||||||
===========
|
===========
|
||||||
|
|
||||||
The ``X-Frame-Options`` header will only protect against clickjacking in a
|
The ``X-Frame-Options`` header will only protect against clickjacking in
|
||||||
modern browser. Older browsers will quietly ignore the header and need `other
|
`modern browsers`_.
|
||||||
clickjacking prevention techniques`_.
|
|
||||||
|
|
||||||
Browsers that support ``X-Frame-Options``
|
.. _modern browsers: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Frame-Options#browser_compatibility
|
||||||
-----------------------------------------
|
|
||||||
|
|
||||||
* Internet Explorer 8+
|
|
||||||
* Edge
|
|
||||||
* Firefox 3.6.9+
|
|
||||||
* Opera 10.5+
|
|
||||||
* Safari 4+
|
|
||||||
* Chrome 4.1+
|
|
||||||
|
|
||||||
See also
|
|
||||||
--------
|
|
||||||
|
|
||||||
A `complete list`_ of browsers supporting ``X-Frame-Options``.
|
|
||||||
|
|
||||||
.. _complete list: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Frame-Options#browser_compatibility
|
|
||||||
.. _other clickjacking prevention techniques: https://en.wikipedia.org/wiki/Clickjacking#Prevention
|
|
||||||
|
Before Width: | Height: | Size: 68 KiB After Width: | Height: | Size: 55 KiB |
Before Width: | Height: | Size: 61 KiB After Width: | Height: | Size: 43 KiB |
Before Width: | Height: | Size: 40 KiB After Width: | Height: | Size: 37 KiB |
Before Width: | Height: | Size: 40 KiB After Width: | Height: | Size: 18 KiB |
Before Width: | Height: | Size: 39 KiB After Width: | Height: | Size: 47 KiB |
@ -826,7 +826,7 @@ specific to SQLite that you should be aware of.
|
|||||||
Substring matching and case sensitivity
|
Substring matching and case sensitivity
|
||||||
---------------------------------------
|
---------------------------------------
|
||||||
|
|
||||||
For all SQLite versions, there is some slightly counter-intuitive behavior when
|
For all SQLite versions, there is some slightly counterintuitive behavior when
|
||||||
attempting to match some types of strings. These are triggered when using the
|
attempting to match some types of strings. These are triggered when using the
|
||||||
:lookup:`iexact` or :lookup:`contains` filters in Querysets. The behavior
|
:lookup:`iexact` or :lookup:`contains` filters in Querysets. The behavior
|
||||||
splits into two cases:
|
splits into two cases:
|
||||||
|
@ -745,6 +745,11 @@ You can also refer to fields on related models with reverse relations through
|
|||||||
``"true"``, ``"false"``, and ``"null"`` strings for
|
``"true"``, ``"false"``, and ``"null"`` strings for
|
||||||
:class:`~django.db.models.JSONField` key transforms.
|
:class:`~django.db.models.JSONField` key transforms.
|
||||||
|
|
||||||
|
.. versionchanged:: 5.2
|
||||||
|
|
||||||
|
The ``SELECT`` clause generated when using ``values()`` was updated to
|
||||||
|
respect the order of the specified ``*fields`` and ``**expressions``.
|
||||||
|
|
||||||
``values_list()``
|
``values_list()``
|
||||||
~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
@ -835,6 +840,11 @@ not having any author:
|
|||||||
``"true"``, ``"false"``, and ``"null"`` strings for
|
``"true"``, ``"false"``, and ``"null"`` strings for
|
||||||
:class:`~django.db.models.JSONField` key transforms.
|
:class:`~django.db.models.JSONField` key transforms.
|
||||||
|
|
||||||
|
.. versionchanged:: 5.2
|
||||||
|
|
||||||
|
The ``SELECT`` clause generated when using ``values_list()`` was updated to
|
||||||
|
respect the order of the specified ``*fields``.
|
||||||
|
|
||||||
``dates()``
|
``dates()``
|
||||||
~~~~~~~~~~~
|
~~~~~~~~~~~
|
||||||
|
|
||||||
|
@ -1842,9 +1842,7 @@ standard :term:`language ID format <language code>`. For example, U.S. English
|
|||||||
is ``"en-us"``. See also the `list of language identifiers`_ and
|
is ``"en-us"``. See also the `list of language identifiers`_ and
|
||||||
:doc:`/topics/i18n/index`.
|
:doc:`/topics/i18n/index`.
|
||||||
|
|
||||||
:setting:`USE_I18N` must be active for this setting to have any effect.
|
It serves three purposes:
|
||||||
|
|
||||||
It serves two purposes:
|
|
||||||
|
|
||||||
* If the locale middleware isn't in use, it decides which translation is served
|
* If the locale middleware isn't in use, it decides which translation is served
|
||||||
to all users.
|
to all users.
|
||||||
@ -1852,6 +1850,11 @@ It serves two purposes:
|
|||||||
user's preferred language can't be determined or is not supported by the
|
user's preferred language can't be determined or is not supported by the
|
||||||
website. It also provides the fallback translation when a translation for a
|
website. It also provides the fallback translation when a translation for a
|
||||||
given literal doesn't exist for the user's preferred language.
|
given literal doesn't exist for the user's preferred language.
|
||||||
|
* If localization is explicitly disabled via the :tfilter:`unlocalize` filter
|
||||||
|
or the :ttag:`{% localize off %}<localize>` tag, it provides fallback
|
||||||
|
localization formats which will be applied instead. See
|
||||||
|
:ref:`controlling localization in templates <topic-l10n-templates>` for
|
||||||
|
details.
|
||||||
|
|
||||||
See :ref:`how-django-discovers-language-preference` for more details.
|
See :ref:`how-django-discovers-language-preference` for more details.
|
||||||
|
|
||||||
|
@ -1147,6 +1147,11 @@ For a complete discussion on the usage of the following see the
|
|||||||
``lang_code`` is ``'es-ar'`` and ``'es'`` is in :setting:`LANGUAGES` but
|
``lang_code`` is ``'es-ar'`` and ``'es'`` is in :setting:`LANGUAGES` but
|
||||||
``'es-ar'`` isn't.
|
``'es-ar'`` isn't.
|
||||||
|
|
||||||
|
``lang_code`` has a maximum accepted length of 500 characters. A
|
||||||
|
:exc:`ValueError` is raised if ``lang_code`` exceeds this limit and
|
||||||
|
``strict`` is ``True``, or if there is no generic variant and ``strict``
|
||||||
|
is ``False``.
|
||||||
|
|
||||||
If ``strict`` is ``False`` (the default), a country-specific variant may
|
If ``strict`` is ``False`` (the default), a country-specific variant may
|
||||||
be returned when neither the language code nor its generic variant is found.
|
be returned when neither the language code nor its generic variant is found.
|
||||||
For example, if only ``'es-co'`` is in :setting:`LANGUAGES`, that's
|
For example, if only ``'es-co'`` is in :setting:`LANGUAGES`, that's
|
||||||
@ -1155,6 +1160,11 @@ For a complete discussion on the usage of the following see the
|
|||||||
|
|
||||||
Raises :exc:`LookupError` if nothing is found.
|
Raises :exc:`LookupError` if nothing is found.
|
||||||
|
|
||||||
|
.. versionchanged:: 4.2.14
|
||||||
|
|
||||||
|
In older versions, ``lang_code`` values over 500 characters were
|
||||||
|
processed without raising a :exc:`ValueError`.
|
||||||
|
|
||||||
.. function:: to_locale(language)
|
.. function:: to_locale(language)
|
||||||
|
|
||||||
Turns a language name (en-us) into a locale name (en_US).
|
Turns a language name (en-us) into a locale name (en_US).
|
||||||
|
49
docs/releases/4.2.14.txt
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
===========================
|
||||||
|
Django 4.2.14 release notes
|
||||||
|
===========================
|
||||||
|
|
||||||
|
*July 9, 2024*
|
||||||
|
|
||||||
|
Django 4.2.14 fixes two security issues with severity "moderate" and two
|
||||||
|
security issues with severity "low" in 4.2.13.
|
||||||
|
|
||||||
|
CVE-2024-38875: Potential denial-of-service vulnerability in ``django.utils.html.urlize()``
|
||||||
|
===========================================================================================
|
||||||
|
|
||||||
|
:tfilter:`urlize` and :tfilter:`urlizetrunc` were subject to a potential
|
||||||
|
denial-of-service attack via certain inputs with a very large number of
|
||||||
|
brackets.
|
||||||
|
|
||||||
|
CVE-2024-39329: Username enumeration through timing difference for users with unusable passwords
|
||||||
|
================================================================================================
|
||||||
|
|
||||||
|
The :meth:`~django.contrib.auth.backends.ModelBackend.authenticate()` method
|
||||||
|
allowed remote attackers to enumerate users via a timing attack involving login
|
||||||
|
requests for users with unusable passwords.
|
||||||
|
|
||||||
|
CVE-2024-39330: Potential directory-traversal via ``Storage.save()``
|
||||||
|
====================================================================
|
||||||
|
|
||||||
|
Derived classes of the :class:`~django.core.files.storage.Storage` base class
|
||||||
|
which override :meth:`generate_filename()
|
||||||
|
<django.core.files.storage.Storage.generate_filename()>` without replicating
|
||||||
|
the file path validations existing in the parent class, allowed for potential
|
||||||
|
directory-traversal via certain inputs when calling :meth:`save()
|
||||||
|
<django.core.files.storage.Storage.save()>`.
|
||||||
|
|
||||||
|
Built-in ``Storage`` sub-classes were not affected by this vulnerability.
|
||||||
|
|
||||||
|
CVE-2024-39614: Potential denial-of-service vulnerability in ``get_supported_language_variant()``
|
||||||
|
=================================================================================================
|
||||||
|
|
||||||
|
:meth:`~django.utils.translation.get_supported_language_variant` was subject to
|
||||||
|
a potential denial-of-service attack when used with very long strings
|
||||||
|
containing specific characters.
|
||||||
|
|
||||||
|
To mitigate this vulnerability, the language code provided to
|
||||||
|
:meth:`~django.utils.translation.get_supported_language_variant` is now parsed
|
||||||
|
up to a maximum length of 500 characters.
|
||||||
|
|
||||||
|
When the language code is over 500 characters, a :exc:`ValueError` will now be
|
||||||
|
raised if ``strict`` is ``True``, or if there is no generic variant and
|
||||||
|
``strict`` is ``False``.
|
@ -2,11 +2,56 @@
|
|||||||
Django 5.0.7 release notes
|
Django 5.0.7 release notes
|
||||||
==========================
|
==========================
|
||||||
|
|
||||||
*Expected July 9, 2024*
|
*July 9, 2024*
|
||||||
|
|
||||||
Django 5.0.7 fixes several bugs in 5.0.6.
|
Django 5.0.7 fixes two security issues with severity "moderate", two security
|
||||||
|
issues with severity "low", and one bug in 5.0.6.
|
||||||
|
|
||||||
|
CVE-2024-38875: Potential denial-of-service vulnerability in ``django.utils.html.urlize()``
|
||||||
|
===========================================================================================
|
||||||
|
|
||||||
|
:tfilter:`urlize` and :tfilter:`urlizetrunc` were subject to a potential
|
||||||
|
denial-of-service attack via certain inputs with a very large number of
|
||||||
|
brackets.
|
||||||
|
|
||||||
|
CVE-2024-39329: Username enumeration through timing difference for users with unusable passwords
|
||||||
|
================================================================================================
|
||||||
|
|
||||||
|
The :meth:`~django.contrib.auth.backends.ModelBackend.authenticate()` method
|
||||||
|
allowed remote attackers to enumerate users via a timing attack involving login
|
||||||
|
requests for users with unusable passwords.
|
||||||
|
|
||||||
|
CVE-2024-39330: Potential directory-traversal via ``Storage.save()``
|
||||||
|
====================================================================
|
||||||
|
|
||||||
|
Derived classes of the :class:`~django.core.files.storage.Storage` base class
|
||||||
|
which override :meth:`generate_filename()
|
||||||
|
<django.core.files.storage.Storage.generate_filename()>` without replicating
|
||||||
|
the file path validations existing in the parent class, allowed for potential
|
||||||
|
directory-traversal via certain inputs when calling :meth:`save()
|
||||||
|
<django.core.files.storage.Storage.save()>`.
|
||||||
|
|
||||||
|
Built-in ``Storage`` sub-classes were not affected by this vulnerability.
|
||||||
|
|
||||||
|
CVE-2024-39614: Potential denial-of-service vulnerability in ``get_supported_language_variant()``
|
||||||
|
=================================================================================================
|
||||||
|
|
||||||
|
:meth:`~django.utils.translation.get_supported_language_variant` was subject to
|
||||||
|
a potential denial-of-service attack when used with very long strings
|
||||||
|
containing specific characters.
|
||||||
|
|
||||||
|
To mitigate this vulnerability, the language code provided to
|
||||||
|
:meth:`~django.utils.translation.get_supported_language_variant` is now parsed
|
||||||
|
up to a maximum length of 500 characters.
|
||||||
|
|
||||||
|
When the language code is over 500 characters, a :exc:`ValueError` will now be
|
||||||
|
raised if ``strict`` is ``True``, or if there is no generic variant and
|
||||||
|
``strict`` is ``False``.
|
||||||
|
|
||||||
Bugfixes
|
Bugfixes
|
||||||
========
|
========
|
||||||
|
|
||||||
* ...
|
* Fixed a bug in Django 5.0 that caused a crash of ``Model.full_clean()`` on
|
||||||
|
unsaved model instances with a ``GeneratedField`` and certain defined
|
||||||
|
:attr:`Meta.constraints <django.db.models.Options.constraints>`
|
||||||
|
(:ticket:`35560`).
|
||||||
|
12
docs/releases/5.0.8.txt
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
==========================
|
||||||
|
Django 5.0.8 release notes
|
||||||
|
==========================
|
||||||
|
|
||||||
|
*Expected August 6, 2024*
|
||||||
|
|
||||||
|
Django 5.0.8 fixes several bugs in 5.0.7.
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
========
|
||||||
|
|
||||||
|
* ...
|
@ -195,7 +195,13 @@ Migrations
|
|||||||
Models
|
Models
|
||||||
~~~~~~
|
~~~~~~
|
||||||
|
|
||||||
* ...
|
* The ``SELECT`` clause generated when using
|
||||||
|
:meth:`QuerySet.values()<django.db.models.query.QuerySet.values>` and
|
||||||
|
:meth:`~django.db.models.query.QuerySet.values_list` now matches the
|
||||||
|
specified order of the referenced expressions. Previously the order was based
|
||||||
|
of a set of counterintuitive rules which made query combination through
|
||||||
|
methods such as
|
||||||
|
:meth:`QuerySet.union()<django.db.models.query.QuerySet.union>` unpredictable.
|
||||||
|
|
||||||
Requests and Responses
|
Requests and Responses
|
||||||
~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~
|
||||||
@ -285,4 +291,6 @@ Miscellaneous
|
|||||||
~~~~~
|
~~~~~
|
||||||
|
|
||||||
* Subclasses of :class:`~django.middleware.cache.UpdateCacheMiddleware`
|
* Subclasses of :class:`~django.middleware.cache.UpdateCacheMiddleware`
|
||||||
will no longer cause duplication when used with cache decorator.
|
will no longer cause duplication when used with cache decorator.
|
||||||
|
* The ``all`` argument for the ``django.contrib.staticfiles.finders.find()``
|
||||||
|
function is deprecated in favor of the ``find_all`` argument.
|
||||||
|
@ -39,6 +39,7 @@ versions of the documentation contain the release notes for any later releases.
|
|||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
|
|
||||||
|
5.0.8
|
||||||
5.0.7
|
5.0.7
|
||||||
5.0.6
|
5.0.6
|
||||||
5.0.5
|
5.0.5
|
||||||
@ -54,6 +55,7 @@ versions of the documentation contain the release notes for any later releases.
|
|||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
|
|
||||||
|
4.2.14
|
||||||
4.2.13
|
4.2.13
|
||||||
4.2.12
|
4.2.12
|
||||||
4.2.11
|
4.2.11
|
||||||
|
@ -36,6 +36,47 @@ Issues under Django's security process
|
|||||||
All security issues have been handled under versions of Django's security
|
All security issues have been handled under versions of Django's security
|
||||||
process. These are listed below.
|
process. These are listed below.
|
||||||
|
|
||||||
|
July 9, 2024 - :cve:`2024-39614`
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
Potential denial-of-service in
|
||||||
|
``django.utils.translation.get_supported_language_variant()``.
|
||||||
|
`Full description
|
||||||
|
<https://www.djangoproject.com/weblog/2024/jul/09/security-releases/>`__
|
||||||
|
|
||||||
|
* Django 5.0 :commit:`(patch) <8e7a44e4bec0f11474699c3111a5e0a45afe7f49>`
|
||||||
|
* Django 4.2 :commit:`(patch) <17358fb35fb7217423d4c4877ccb6d1a3a40b1c3>`
|
||||||
|
|
||||||
|
July 9, 2024 - :cve:`2024-39330`
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
Potential directory-traversal in ``django.core.files.storage.Storage.save()``.
|
||||||
|
`Full description
|
||||||
|
<https://www.djangoproject.com/weblog/2024/jul/09/security-releases/>`__
|
||||||
|
|
||||||
|
* Django 5.0 :commit:`(patch) <9f4f63e9ebb7bf6cb9547ee4e2526b9b96703270>`
|
||||||
|
* Django 4.2 :commit:`(patch) <2b00edc0151a660d1eb86da4059904a0fc4e095e>`
|
||||||
|
|
||||||
|
July 9, 2024 - :cve:`2024-39329`
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
Username enumeration through timing difference for users with unusable
|
||||||
|
passwords. `Full description
|
||||||
|
<https://www.djangoproject.com/weblog/2024/jul/09/security-releases/>`__
|
||||||
|
|
||||||
|
* Django 5.0 :commit:`(patch) <07cefdee4a9d1fcd9a3a631cbd07c78defd1923b>`
|
||||||
|
* Django 4.2 :commit:`(patch) <156d3186c96e3ec2ca73b8b25dc2ef366e38df14>`
|
||||||
|
|
||||||
|
July 9, 2024 - :cve:`2024-38875`
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
Potential denial-of-service in ``django.utils.html.urlize()``.
|
||||||
|
`Full description
|
||||||
|
<https://www.djangoproject.com/weblog/2024/jul/09/security-releases/>`__
|
||||||
|
|
||||||
|
* Django 5.0 :commit:`(patch) <7285644640f085f41d60ab0c8ae4e9153f0485db>`
|
||||||
|
* Django 4.2 :commit:`(patch) <79f368764295df109a37192f6182fb6f361d85b5>`
|
||||||
|
|
||||||
March 4, 2024 - :cve:`2024-27351`
|
March 4, 2024 - :cve:`2024-27351`
|
||||||
---------------------------------
|
---------------------------------
|
||||||
|
|
||||||
|
@ -96,6 +96,7 @@ contenttypes
|
|||||||
contrib
|
contrib
|
||||||
coroutine
|
coroutine
|
||||||
coroutines
|
coroutines
|
||||||
|
counterintuitive
|
||||||
criticals
|
criticals
|
||||||
cron
|
cron
|
||||||
crontab
|
crontab
|
||||||
|
@ -97,7 +97,7 @@ To use Argon2id as your default storage algorithm, do the following:
|
|||||||
#. Install the :pypi:`argon2-cffi` package. This can be done by running
|
#. Install the :pypi:`argon2-cffi` package. This can be done by running
|
||||||
``python -m pip install django[argon2]``, which is equivalent to
|
``python -m pip install django[argon2]``, which is equivalent to
|
||||||
``python -m pip install argon2-cffi`` (along with any version requirement
|
``python -m pip install argon2-cffi`` (along with any version requirement
|
||||||
from Django's ``setup.cfg``).
|
from Django's ``pyproject.toml``).
|
||||||
|
|
||||||
#. Modify :setting:`PASSWORD_HASHERS` to list ``Argon2PasswordHasher`` first.
|
#. Modify :setting:`PASSWORD_HASHERS` to list ``Argon2PasswordHasher`` first.
|
||||||
That is, in your settings file, you'd put::
|
That is, in your settings file, you'd put::
|
||||||
@ -128,7 +128,7 @@ To use Bcrypt as your default storage algorithm, do the following:
|
|||||||
#. Install the :pypi:`bcrypt` package. This can be done by running
|
#. Install the :pypi:`bcrypt` package. This can be done by running
|
||||||
``python -m pip install django[bcrypt]``, which is equivalent to
|
``python -m pip install django[bcrypt]``, which is equivalent to
|
||||||
``python -m pip install bcrypt`` (along with any version requirement from
|
``python -m pip install bcrypt`` (along with any version requirement from
|
||||||
Django's ``setup.cfg``).
|
Django's ``pyproject.toml``).
|
||||||
|
|
||||||
#. Modify :setting:`PASSWORD_HASHERS` to list ``BCryptSHA256PasswordHasher``
|
#. Modify :setting:`PASSWORD_HASHERS` to list ``BCryptSHA256PasswordHasher``
|
||||||
first. That is, in your settings file, you'd put::
|
first. That is, in your settings file, you'd put::
|
||||||
|
@ -89,6 +89,9 @@ To activate or deactivate localization for a template block, use:
|
|||||||
{{ value }}
|
{{ value }}
|
||||||
{% endlocalize %}
|
{% endlocalize %}
|
||||||
|
|
||||||
|
When localization is disabled, the :ref:`localization settings <settings-l10n>`
|
||||||
|
formats are applied.
|
||||||
|
|
||||||
See :tfilter:`localize` and :tfilter:`unlocalize` for template filters that will
|
See :tfilter:`localize` and :tfilter:`unlocalize` for template filters that will
|
||||||
do the same job on a per-variable basis.
|
do the same job on a per-variable basis.
|
||||||
|
|
||||||
@ -133,8 +136,9 @@ To force localization of a single value, use :tfilter:`localize`. To
|
|||||||
control localization over a large section of a template, use the
|
control localization over a large section of a template, use the
|
||||||
:ttag:`localize` template tag.
|
:ttag:`localize` template tag.
|
||||||
|
|
||||||
Returns a string representation for unlocalized numbers (``int``, ``float``,
|
Returns a string representation for numbers (``int``, ``float``, or
|
||||||
or ``Decimal``).
|
``Decimal``) with the :ref:`localization settings <settings-l10n>` formats
|
||||||
|
applied.
|
||||||
|
|
||||||
.. _custom-format-files:
|
.. _custom-format-files:
|
||||||
|
|
||||||
|
@ -515,14 +515,18 @@ pass the translatable string as argument to another function, you can wrap
|
|||||||
this function inside a lazy call yourself. For example::
|
this function inside a lazy call yourself. For example::
|
||||||
|
|
||||||
from django.utils.functional import lazy
|
from django.utils.functional import lazy
|
||||||
from django.utils.safestring import mark_safe
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
mark_safe_lazy = lazy(mark_safe, str)
|
|
||||||
|
def to_lower(string):
|
||||||
|
return string.lower()
|
||||||
|
|
||||||
|
|
||||||
|
to_lower_lazy = lazy(to_lower, str)
|
||||||
|
|
||||||
And then later::
|
And then later::
|
||||||
|
|
||||||
lazy_string = mark_safe_lazy(_("<p>My <strong>string!</strong></p>"))
|
lazy_string = to_lower_lazy(_("My STRING!"))
|
||||||
|
|
||||||
Localized names of languages
|
Localized names of languages
|
||||||
----------------------------
|
----------------------------
|
||||||
|
@ -76,8 +76,8 @@ In addition to the officially supported databases, there are :ref:`backends
|
|||||||
provided by 3rd parties <third-party-notes>` that allow you to use other
|
provided by 3rd parties <third-party-notes>` that allow you to use other
|
||||||
databases with Django.
|
databases with Django.
|
||||||
|
|
||||||
In addition to a database backend, you'll need to make sure your Python
|
To use another database other than SQLite, you'll need to make sure that the
|
||||||
database bindings are installed.
|
appropriate Python database bindings are installed:
|
||||||
|
|
||||||
* If you're using PostgreSQL, you'll need the `psycopg`_ or `psycopg2`_
|
* If you're using PostgreSQL, you'll need the `psycopg`_ or `psycopg2`_
|
||||||
package. Refer to the :ref:`PostgreSQL notes <postgresql-notes>` for further
|
package. Refer to the :ref:`PostgreSQL notes <postgresql-notes>` for further
|
||||||
@ -97,6 +97,33 @@ database bindings are installed.
|
|||||||
* If you're using an unofficial 3rd party backend, please consult the
|
* If you're using an unofficial 3rd party backend, please consult the
|
||||||
documentation provided for any additional requirements.
|
documentation provided for any additional requirements.
|
||||||
|
|
||||||
|
And ensure that the following keys in the ``'default'`` item of the
|
||||||
|
:setting:`DATABASES` dictionary match your database connection settings:
|
||||||
|
|
||||||
|
* :setting:`ENGINE <DATABASE-ENGINE>` -- Either
|
||||||
|
``'django.db.backends.sqlite3'``,
|
||||||
|
``'django.db.backends.postgresql'``,
|
||||||
|
``'django.db.backends.mysql'``, or
|
||||||
|
``'django.db.backends.oracle'``. Other backends are :ref:`also available
|
||||||
|
<third-party-notes>`.
|
||||||
|
|
||||||
|
* :setting:`NAME` -- The name of your database. If you’re using SQLite, the
|
||||||
|
database will be a file on your computer. In that case, ``NAME`` should be
|
||||||
|
the full absolute path, including the filename of that file. You don’t need
|
||||||
|
to create anything beforehand; the database file will be created
|
||||||
|
automatically when needed. The default value, ``BASE_DIR / 'db.sqlite3'``,
|
||||||
|
will store the file in your project directory.
|
||||||
|
|
||||||
|
.. admonition:: For databases other than SQLite
|
||||||
|
|
||||||
|
If you are not using SQLite as your database, additional settings such as
|
||||||
|
:setting:`USER`, :setting:`PASSWORD`, and :setting:`HOST` must be added.
|
||||||
|
For more details, see the reference documentation for :setting:`DATABASES`.
|
||||||
|
|
||||||
|
Also, make sure that you've created the database by this point. Do that
|
||||||
|
with "``CREATE DATABASE database_name;``" within your database's
|
||||||
|
interactive prompt.
|
||||||
|
|
||||||
If you plan to use Django's ``manage.py migrate`` command to automatically
|
If you plan to use Django's ``manage.py migrate`` command to automatically
|
||||||
create database tables for your models (after first installing Django and
|
create database tables for your models (after first installing Django and
|
||||||
creating a project), you'll need to ensure that Django has permission to create
|
creating a project), you'll need to ensure that Django has permission to create
|
||||||
|
@ -1,9 +0,0 @@
|
|||||||
all: sdist bdist_wheel
|
|
||||||
|
|
||||||
sdist:
|
|
||||||
python setup.py sdist
|
|
||||||
|
|
||||||
bdist_wheel:
|
|
||||||
python setup.py bdist_wheel
|
|
||||||
|
|
||||||
.PHONY : sdist bdist_wheel
|
|
@ -1,12 +1,68 @@
|
|||||||
[build-system]
|
[build-system]
|
||||||
requires = ['setuptools>=40.8.0']
|
requires = ["setuptools>=61.0.0,<69.3.0"]
|
||||||
build-backend = 'setuptools.build_meta'
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "Django"
|
||||||
|
dynamic = ["version"]
|
||||||
|
requires-python = ">= 3.10"
|
||||||
|
dependencies = [
|
||||||
|
"asgiref>=3.7.0",
|
||||||
|
"sqlparse>=0.3.1",
|
||||||
|
"tzdata; sys_platform == 'win32'",
|
||||||
|
]
|
||||||
|
authors = [
|
||||||
|
{name = "Django Software Foundation", email = "foundation@djangoproject.com"},
|
||||||
|
]
|
||||||
|
description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design."
|
||||||
|
readme = "README.rst"
|
||||||
|
license = {text = "BSD-3-Clause"}
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 2 - Pre-Alpha",
|
||||||
|
"Environment :: Web Environment",
|
||||||
|
"Framework :: Django",
|
||||||
|
"Intended Audience :: Developers",
|
||||||
|
"License :: OSI Approved :: BSD License",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
"Programming Language :: Python",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Topic :: Internet :: WWW/HTTP",
|
||||||
|
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
|
||||||
|
"Topic :: Internet :: WWW/HTTP :: WSGI",
|
||||||
|
"Topic :: Software Development :: Libraries :: Application Frameworks",
|
||||||
|
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
argon2 = ["argon2-cffi>=19.1.0"]
|
||||||
|
bcrypt = ["bcrypt"]
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
django-admin = "django.core.management:execute_from_command_line"
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
Homepage = "https://www.djangoproject.com/"
|
||||||
|
Documentation = "https://docs.djangoproject.com/"
|
||||||
|
"Release notes" = "https://docs.djangoproject.com/en/stable/releases/"
|
||||||
|
Funding = "https://www.djangoproject.com/fundraising/"
|
||||||
|
Source = "https://github.com/django/django"
|
||||||
|
Tracker = "https://code.djangoproject.com/"
|
||||||
|
|
||||||
[tool.black]
|
[tool.black]
|
||||||
target-version = ['py310']
|
target-version = ["py310"]
|
||||||
force-exclude = 'tests/test_runner_apps/tagged/tests_syntax_error.py'
|
force-exclude = "tests/test_runner_apps/tagged/tests_syntax_error.py"
|
||||||
|
|
||||||
[tool.isort]
|
[tool.isort]
|
||||||
profile = 'black'
|
profile = "black"
|
||||||
default_section = 'THIRDPARTY'
|
default_section = "THIRDPARTY"
|
||||||
known_first_party = 'django'
|
known_first_party = "django"
|
||||||
|
|
||||||
|
[tool.setuptools.dynamic]
|
||||||
|
version = {attr = "django.__version__"}
|
||||||
|
|
||||||
|
[tool.setuptools.packages.find]
|
||||||
|
include = ["django*"]
|
||||||
|
61
setup.cfg
@ -1,61 +0,0 @@
|
|||||||
[metadata]
|
|
||||||
name = Django
|
|
||||||
version = attr: django.__version__
|
|
||||||
url = https://www.djangoproject.com/
|
|
||||||
author = Django Software Foundation
|
|
||||||
author_email = foundation@djangoproject.com
|
|
||||||
description = A high-level Python web framework that encourages rapid development and clean, pragmatic design.
|
|
||||||
long_description = file: README.rst
|
|
||||||
license = BSD-3-Clause
|
|
||||||
classifiers =
|
|
||||||
Development Status :: 2 - Pre-Alpha
|
|
||||||
Environment :: Web Environment
|
|
||||||
Framework :: Django
|
|
||||||
Intended Audience :: Developers
|
|
||||||
License :: OSI Approved :: BSD License
|
|
||||||
Operating System :: OS Independent
|
|
||||||
Programming Language :: Python
|
|
||||||
Programming Language :: Python :: 3
|
|
||||||
Programming Language :: Python :: 3 :: Only
|
|
||||||
Programming Language :: Python :: 3.10
|
|
||||||
Programming Language :: Python :: 3.11
|
|
||||||
Programming Language :: Python :: 3.12
|
|
||||||
Topic :: Internet :: WWW/HTTP
|
|
||||||
Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
|
||||||
Topic :: Internet :: WWW/HTTP :: WSGI
|
|
||||||
Topic :: Software Development :: Libraries :: Application Frameworks
|
|
||||||
Topic :: Software Development :: Libraries :: Python Modules
|
|
||||||
project_urls =
|
|
||||||
Documentation = https://docs.djangoproject.com/
|
|
||||||
Release notes = https://docs.djangoproject.com/en/stable/releases/
|
|
||||||
Funding = https://www.djangoproject.com/fundraising/
|
|
||||||
Source = https://github.com/django/django
|
|
||||||
Tracker = https://code.djangoproject.com/
|
|
||||||
|
|
||||||
[options]
|
|
||||||
python_requires = >=3.10
|
|
||||||
packages = find:
|
|
||||||
include_package_data = true
|
|
||||||
zip_safe = false
|
|
||||||
install_requires =
|
|
||||||
asgiref >= 3.7.0
|
|
||||||
sqlparse >= 0.3.1
|
|
||||||
tzdata; sys_platform == 'win32'
|
|
||||||
|
|
||||||
[options.entry_points]
|
|
||||||
console_scripts =
|
|
||||||
django-admin = django.core.management:execute_from_command_line
|
|
||||||
|
|
||||||
[options.extras_require]
|
|
||||||
argon2 = argon2-cffi >= 19.1.0
|
|
||||||
bcrypt = bcrypt
|
|
||||||
|
|
||||||
[flake8]
|
|
||||||
exclude = build,.git,.tox,./tests/.env
|
|
||||||
extend-ignore = E203
|
|
||||||
max-line-length = 88
|
|
||||||
per-file-ignores =
|
|
||||||
django/core/cache/backends/filebased.py:W601
|
|
||||||
django/core/cache/backends/base.py:W601
|
|
||||||
django/core/cache/backends/redis.py:W601
|
|
||||||
tests/cache/tests.py:W601
|
|
55
setup.py
@ -1,55 +0,0 @@
|
|||||||
import os
|
|
||||||
import site
|
|
||||||
import sys
|
|
||||||
from distutils.sysconfig import get_python_lib
|
|
||||||
|
|
||||||
from setuptools import setup
|
|
||||||
|
|
||||||
# Allow editable install into user site directory.
|
|
||||||
# See https://github.com/pypa/pip/issues/7953.
|
|
||||||
site.ENABLE_USER_SITE = "--user" in sys.argv[1:]
|
|
||||||
|
|
||||||
# Warn if we are installing over top of an existing installation. This can
|
|
||||||
# cause issues where files that were deleted from a more recent Django are
|
|
||||||
# still present in site-packages. See #18115.
|
|
||||||
overlay_warning = False
|
|
||||||
if "install" in sys.argv:
|
|
||||||
lib_paths = [get_python_lib()]
|
|
||||||
if lib_paths[0].startswith("/usr/lib/"):
|
|
||||||
# We have to try also with an explicit prefix of /usr/local in order to
|
|
||||||
# catch Debian's custom user site-packages directory.
|
|
||||||
lib_paths.append(get_python_lib(prefix="/usr/local"))
|
|
||||||
for lib_path in lib_paths:
|
|
||||||
existing_path = os.path.abspath(os.path.join(lib_path, "django"))
|
|
||||||
if os.path.exists(existing_path):
|
|
||||||
# We note the need for the warning here, but present it after the
|
|
||||||
# command is run, so it's more likely to be seen.
|
|
||||||
overlay_warning = True
|
|
||||||
break
|
|
||||||
|
|
||||||
|
|
||||||
setup()
|
|
||||||
|
|
||||||
|
|
||||||
if overlay_warning:
|
|
||||||
sys.stderr.write(
|
|
||||||
"""
|
|
||||||
|
|
||||||
========
|
|
||||||
WARNING!
|
|
||||||
========
|
|
||||||
|
|
||||||
You have just installed Django over top of an existing
|
|
||||||
installation, without removing it first. Because of this,
|
|
||||||
your install may now include extraneous files from a
|
|
||||||
previous version that have since been removed from
|
|
||||||
Django. This is known to cause a variety of problems. You
|
|
||||||
should manually remove the
|
|
||||||
|
|
||||||
%(existing_path)s
|
|
||||||
|
|
||||||
directory and re-install Django.
|
|
||||||
|
|
||||||
"""
|
|
||||||
% {"existing_path": existing_path}
|
|
||||||
)
|
|
@ -1858,6 +1858,7 @@ class SeleniumTests(AdminSeleniumTestCase):
|
|||||||
username="super", password="secret", email="super@example.com"
|
username="super", password="secret", email="super@example.com"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@screenshot_cases(["desktop_size", "mobile_size", "dark", "high_contrast"])
|
||||||
def test_add_stackeds(self):
|
def test_add_stackeds(self):
|
||||||
"""
|
"""
|
||||||
The "Add another XXX" link correctly adds items to the stacked formset.
|
The "Add another XXX" link correctly adds items to the stacked formset.
|
||||||
@ -1878,6 +1879,7 @@ class SeleniumTests(AdminSeleniumTestCase):
|
|||||||
)
|
)
|
||||||
add_button.click()
|
add_button.click()
|
||||||
self.assertCountSeleniumElements(rows_selector, 4)
|
self.assertCountSeleniumElements(rows_selector, 4)
|
||||||
|
self.take_screenshot("added")
|
||||||
|
|
||||||
def test_delete_stackeds(self):
|
def test_delete_stackeds(self):
|
||||||
from selenium.webdriver.common.by import By
|
from selenium.webdriver.common.by import By
|
||||||
|
@ -40,6 +40,7 @@ urlpatterns = [
|
|||||||
@override_settings(ROOT_URLCONF=__name__, DATABASE_ROUTERS=["%s.Router" % __name__])
|
@override_settings(ROOT_URLCONF=__name__, DATABASE_ROUTERS=["%s.Router" % __name__])
|
||||||
class MultiDatabaseTests(TestCase):
|
class MultiDatabaseTests(TestCase):
|
||||||
databases = {"default", "other"}
|
databases = {"default", "other"}
|
||||||
|
READ_ONLY_METHODS = {"get", "options", "head", "trace"}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def setUpTestData(cls):
|
def setUpTestData(cls):
|
||||||
@ -56,48 +57,116 @@ class MultiDatabaseTests(TestCase):
|
|||||||
b.save(using=db)
|
b.save(using=db)
|
||||||
cls.test_book_ids[db] = b.id
|
cls.test_book_ids[db] = b.id
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
# Reset the routers' state between each test.
|
||||||
|
Router.target_db = None
|
||||||
|
|
||||||
@mock.patch("django.contrib.admin.options.transaction")
|
@mock.patch("django.contrib.admin.options.transaction")
|
||||||
def test_add_view(self, mock):
|
def test_add_view(self, mock):
|
||||||
for db in self.databases:
|
for db in self.databases:
|
||||||
with self.subTest(db=db):
|
with self.subTest(db=db):
|
||||||
|
mock.mock_reset()
|
||||||
Router.target_db = db
|
Router.target_db = db
|
||||||
self.client.force_login(self.superusers[db])
|
self.client.force_login(self.superusers[db])
|
||||||
self.client.post(
|
response = self.client.post(
|
||||||
reverse("test_adminsite:admin_views_book_add"),
|
reverse("test_adminsite:admin_views_book_add"),
|
||||||
{"name": "Foobar: 5th edition"},
|
{"name": "Foobar: 5th edition"},
|
||||||
)
|
)
|
||||||
|
self.assertEqual(response.status_code, 302)
|
||||||
|
self.assertEqual(
|
||||||
|
response.url, reverse("test_adminsite:admin_views_book_changelist")
|
||||||
|
)
|
||||||
mock.atomic.assert_called_with(using=db)
|
mock.atomic.assert_called_with(using=db)
|
||||||
|
|
||||||
|
@mock.patch("django.contrib.admin.options.transaction")
|
||||||
|
def test_read_only_methods_add_view(self, mock):
|
||||||
|
for db in self.databases:
|
||||||
|
for method in self.READ_ONLY_METHODS:
|
||||||
|
with self.subTest(db=db, method=method):
|
||||||
|
mock.mock_reset()
|
||||||
|
Router.target_db = db
|
||||||
|
self.client.force_login(self.superusers[db])
|
||||||
|
response = getattr(self.client, method)(
|
||||||
|
reverse("test_adminsite:admin_views_book_add"),
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
mock.atomic.assert_not_called()
|
||||||
|
|
||||||
@mock.patch("django.contrib.admin.options.transaction")
|
@mock.patch("django.contrib.admin.options.transaction")
|
||||||
def test_change_view(self, mock):
|
def test_change_view(self, mock):
|
||||||
for db in self.databases:
|
for db in self.databases:
|
||||||
with self.subTest(db=db):
|
with self.subTest(db=db):
|
||||||
|
mock.mock_reset()
|
||||||
Router.target_db = db
|
Router.target_db = db
|
||||||
self.client.force_login(self.superusers[db])
|
self.client.force_login(self.superusers[db])
|
||||||
self.client.post(
|
response = self.client.post(
|
||||||
reverse(
|
reverse(
|
||||||
"test_adminsite:admin_views_book_change",
|
"test_adminsite:admin_views_book_change",
|
||||||
args=[self.test_book_ids[db]],
|
args=[self.test_book_ids[db]],
|
||||||
),
|
),
|
||||||
{"name": "Test Book 2: Test more"},
|
{"name": "Test Book 2: Test more"},
|
||||||
)
|
)
|
||||||
|
self.assertEqual(response.status_code, 302)
|
||||||
|
self.assertEqual(
|
||||||
|
response.url, reverse("test_adminsite:admin_views_book_changelist")
|
||||||
|
)
|
||||||
mock.atomic.assert_called_with(using=db)
|
mock.atomic.assert_called_with(using=db)
|
||||||
|
|
||||||
|
@mock.patch("django.contrib.admin.options.transaction")
|
||||||
|
def test_read_only_methods_change_view(self, mock):
|
||||||
|
for db in self.databases:
|
||||||
|
for method in self.READ_ONLY_METHODS:
|
||||||
|
with self.subTest(db=db, method=method):
|
||||||
|
mock.mock_reset()
|
||||||
|
Router.target_db = db
|
||||||
|
self.client.force_login(self.superusers[db])
|
||||||
|
response = getattr(self.client, method)(
|
||||||
|
reverse(
|
||||||
|
"test_adminsite:admin_views_book_change",
|
||||||
|
args=[self.test_book_ids[db]],
|
||||||
|
),
|
||||||
|
data={"name": "Test Book 2: Test more"},
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
mock.atomic.assert_not_called()
|
||||||
|
|
||||||
@mock.patch("django.contrib.admin.options.transaction")
|
@mock.patch("django.contrib.admin.options.transaction")
|
||||||
def test_delete_view(self, mock):
|
def test_delete_view(self, mock):
|
||||||
for db in self.databases:
|
for db in self.databases:
|
||||||
with self.subTest(db=db):
|
with self.subTest(db=db):
|
||||||
|
mock.mock_reset()
|
||||||
Router.target_db = db
|
Router.target_db = db
|
||||||
self.client.force_login(self.superusers[db])
|
self.client.force_login(self.superusers[db])
|
||||||
self.client.post(
|
response = self.client.post(
|
||||||
reverse(
|
reverse(
|
||||||
"test_adminsite:admin_views_book_delete",
|
"test_adminsite:admin_views_book_delete",
|
||||||
args=[self.test_book_ids[db]],
|
args=[self.test_book_ids[db]],
|
||||||
),
|
),
|
||||||
{"post": "yes"},
|
{"post": "yes"},
|
||||||
)
|
)
|
||||||
|
self.assertEqual(response.status_code, 302)
|
||||||
|
self.assertEqual(
|
||||||
|
response.url, reverse("test_adminsite:admin_views_book_changelist")
|
||||||
|
)
|
||||||
mock.atomic.assert_called_with(using=db)
|
mock.atomic.assert_called_with(using=db)
|
||||||
|
|
||||||
|
@mock.patch("django.contrib.admin.options.transaction")
|
||||||
|
def test_read_only_methods_delete_view(self, mock):
|
||||||
|
for db in self.databases:
|
||||||
|
for method in self.READ_ONLY_METHODS:
|
||||||
|
with self.subTest(db=db, method=method):
|
||||||
|
mock.mock_reset()
|
||||||
|
Router.target_db = db
|
||||||
|
self.client.force_login(self.superusers[db])
|
||||||
|
response = getattr(self.client, method)(
|
||||||
|
reverse(
|
||||||
|
"test_adminsite:admin_views_book_delete",
|
||||||
|
args=[self.test_book_ids[db]],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
mock.atomic.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
class ViewOnSiteRouter:
|
class ViewOnSiteRouter:
|
||||||
def db_for_read(self, model, instance=None, **hints):
|
def db_for_read(self, model, instance=None, **hints):
|
||||||
|
@ -7385,7 +7385,7 @@ class UserAdminTest(TestCase):
|
|||||||
# Don't depend on a warm cache, see #17377.
|
# Don't depend on a warm cache, see #17377.
|
||||||
ContentType.objects.clear_cache()
|
ContentType.objects.clear_cache()
|
||||||
|
|
||||||
expected_num_queries = 10 if connection.features.uses_savepoints else 8
|
expected_num_queries = 8 if connection.features.uses_savepoints else 6
|
||||||
with self.assertNumQueries(expected_num_queries):
|
with self.assertNumQueries(expected_num_queries):
|
||||||
response = self.client.get(reverse("admin:auth_user_change", args=(u.pk,)))
|
response = self.client.get(reverse("admin:auth_user_change", args=(u.pk,)))
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
@ -7433,7 +7433,7 @@ class GroupAdminTest(TestCase):
|
|||||||
# Ensure no queries are skipped due to cached content type for Group.
|
# Ensure no queries are skipped due to cached content type for Group.
|
||||||
ContentType.objects.clear_cache()
|
ContentType.objects.clear_cache()
|
||||||
|
|
||||||
expected_num_queries = 8 if connection.features.uses_savepoints else 6
|
expected_num_queries = 6 if connection.features.uses_savepoints else 4
|
||||||
with self.assertNumQueries(expected_num_queries):
|
with self.assertNumQueries(expected_num_queries):
|
||||||
response = self.client.get(reverse("admin:auth_group_change", args=(g.pk,)))
|
response = self.client.get(reverse("admin:auth_group_change", args=(g.pk,)))
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
|
@ -568,6 +568,16 @@ class NonAggregateAnnotationTestCase(TestCase):
|
|||||||
self.assertEqual(book["other_rating"], 4)
|
self.assertEqual(book["other_rating"], 4)
|
||||||
self.assertEqual(book["other_isbn"], "155860191")
|
self.assertEqual(book["other_isbn"], "155860191")
|
||||||
|
|
||||||
|
def test_values_fields_annotations_order(self):
|
||||||
|
qs = Book.objects.annotate(other_rating=F("rating") - 1).values(
|
||||||
|
"other_rating", "rating"
|
||||||
|
)
|
||||||
|
book = qs.get(pk=self.b1.pk)
|
||||||
|
self.assertEqual(
|
||||||
|
list(book.items()),
|
||||||
|
[("other_rating", self.b1.rating - 1), ("rating", self.b1.rating)],
|
||||||
|
)
|
||||||
|
|
||||||
def test_values_with_pk_annotation(self):
|
def test_values_with_pk_annotation(self):
|
||||||
# annotate references a field in values() with pk
|
# annotate references a field in values() with pk
|
||||||
publishers = Publisher.objects.values("id", "book__rating").annotate(
|
publishers = Publisher.objects.values("id", "book__rating").annotate(
|
||||||
|
@ -30,6 +30,7 @@ urlpatterns = [
|
|||||||
@override_settings(ROOT_URLCONF=__name__, DATABASE_ROUTERS=["%s.Router" % __name__])
|
@override_settings(ROOT_URLCONF=__name__, DATABASE_ROUTERS=["%s.Router" % __name__])
|
||||||
class MultiDatabaseTests(TestCase):
|
class MultiDatabaseTests(TestCase):
|
||||||
databases = {"default", "other"}
|
databases = {"default", "other"}
|
||||||
|
READ_ONLY_METHODS = {"get", "options", "head", "trace"}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def setUpTestData(cls):
|
def setUpTestData(cls):
|
||||||
@ -42,13 +43,17 @@ class MultiDatabaseTests(TestCase):
|
|||||||
email="test@test.org",
|
email="test@test.org",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
# Reset the routers' state between each test.
|
||||||
|
Router.target_db = None
|
||||||
|
|
||||||
@mock.patch("django.contrib.auth.admin.transaction")
|
@mock.patch("django.contrib.auth.admin.transaction")
|
||||||
def test_add_view(self, mock):
|
def test_add_view(self, mock):
|
||||||
for db in self.databases:
|
for db in self.databases:
|
||||||
with self.subTest(db_connection=db):
|
with self.subTest(db_connection=db):
|
||||||
Router.target_db = db
|
Router.target_db = db
|
||||||
self.client.force_login(self.superusers[db])
|
self.client.force_login(self.superusers[db])
|
||||||
self.client.post(
|
response = self.client.post(
|
||||||
reverse("test_adminsite:auth_user_add"),
|
reverse("test_adminsite:auth_user_add"),
|
||||||
{
|
{
|
||||||
"username": "some_user",
|
"username": "some_user",
|
||||||
@ -56,4 +61,19 @@ class MultiDatabaseTests(TestCase):
|
|||||||
"password2": "helloworld",
|
"password2": "helloworld",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
self.assertEqual(response.status_code, 302)
|
||||||
mock.atomic.assert_called_with(using=db)
|
mock.atomic.assert_called_with(using=db)
|
||||||
|
|
||||||
|
@mock.patch("django.contrib.auth.admin.transaction")
|
||||||
|
def test_read_only_methods_add_view(self, mock):
|
||||||
|
for db in self.databases:
|
||||||
|
for method in self.READ_ONLY_METHODS:
|
||||||
|
with self.subTest(db_connection=db, method=method):
|
||||||
|
mock.mock_reset()
|
||||||
|
Router.target_db = db
|
||||||
|
self.client.force_login(self.superusers[db])
|
||||||
|
response = getattr(self.client, method)(
|
||||||
|
reverse("test_adminsite:auth_user_add")
|
||||||
|
)
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
mock.atomic.assert_not_called()
|
||||||
|
@ -452,6 +452,38 @@ class TestUtilsHashPass(SimpleTestCase):
|
|||||||
check_password("wrong_password", encoded)
|
check_password("wrong_password", encoded)
|
||||||
self.assertEqual(hasher.harden_runtime.call_count, 1)
|
self.assertEqual(hasher.harden_runtime.call_count, 1)
|
||||||
|
|
||||||
|
def test_check_password_calls_make_password_to_fake_runtime(self):
|
||||||
|
hasher = get_hasher("default")
|
||||||
|
cases = [
|
||||||
|
(None, None, None), # no plain text password provided
|
||||||
|
("foo", make_password(password=None), None), # unusable encoded
|
||||||
|
("letmein", make_password(password="letmein"), ValueError), # valid encoded
|
||||||
|
]
|
||||||
|
for password, encoded, hasher_side_effect in cases:
|
||||||
|
with (
|
||||||
|
self.subTest(encoded=encoded),
|
||||||
|
mock.patch(
|
||||||
|
"django.contrib.auth.hashers.identify_hasher",
|
||||||
|
side_effect=hasher_side_effect,
|
||||||
|
) as mock_identify_hasher,
|
||||||
|
mock.patch(
|
||||||
|
"django.contrib.auth.hashers.make_password"
|
||||||
|
) as mock_make_password,
|
||||||
|
mock.patch(
|
||||||
|
"django.contrib.auth.hashers.get_random_string",
|
||||||
|
side_effect=lambda size: "x" * size,
|
||||||
|
),
|
||||||
|
mock.patch.object(hasher, "verify"),
|
||||||
|
):
|
||||||
|
# Ensure make_password is called to standardize timing.
|
||||||
|
check_password(password, encoded)
|
||||||
|
self.assertEqual(hasher.verify.call_count, 0)
|
||||||
|
self.assertEqual(mock_identify_hasher.mock_calls, [mock.call(encoded)])
|
||||||
|
self.assertEqual(
|
||||||
|
mock_make_password.mock_calls,
|
||||||
|
[mock.call("x" * UNUSABLE_PASSWORD_SUFFIX_LENGTH)],
|
||||||
|
)
|
||||||
|
|
||||||
def test_encode_invalid_salt(self):
|
def test_encode_invalid_salt(self):
|
||||||
hasher_classes = [
|
hasher_classes = [
|
||||||
MD5PasswordHasher,
|
MD5PasswordHasher,
|
||||||
|
@ -523,7 +523,7 @@ class CreatesuperuserManagementCommandTestCase(TestCase):
|
|||||||
self.assertEqual(u.group, group)
|
self.assertEqual(u.group, group)
|
||||||
|
|
||||||
non_existent_email = "mymail2@gmail.com"
|
non_existent_email = "mymail2@gmail.com"
|
||||||
msg = "email instance with email %r does not exist." % non_existent_email
|
msg = "email instance with email %r is not a valid choice." % non_existent_email
|
||||||
with self.assertRaisesMessage(CommandError, msg):
|
with self.assertRaisesMessage(CommandError, msg):
|
||||||
call_command(
|
call_command(
|
||||||
"createsuperuser",
|
"createsuperuser",
|
||||||
@ -594,7 +594,7 @@ class CreatesuperuserManagementCommandTestCase(TestCase):
|
|||||||
email = Email.objects.create(email="mymail@gmail.com")
|
email = Email.objects.create(email="mymail@gmail.com")
|
||||||
Group.objects.all().delete()
|
Group.objects.all().delete()
|
||||||
nonexistent_group_id = 1
|
nonexistent_group_id = 1
|
||||||
msg = f"group instance with id {nonexistent_group_id} does not exist."
|
msg = f"group instance with id {nonexistent_group_id} is not a valid choice."
|
||||||
|
|
||||||
with self.assertRaisesMessage(CommandError, msg):
|
with self.assertRaisesMessage(CommandError, msg):
|
||||||
call_command(
|
call_command(
|
||||||
@ -611,7 +611,7 @@ class CreatesuperuserManagementCommandTestCase(TestCase):
|
|||||||
email = Email.objects.create(email="mymail@gmail.com")
|
email = Email.objects.create(email="mymail@gmail.com")
|
||||||
Group.objects.all().delete()
|
Group.objects.all().delete()
|
||||||
nonexistent_group_id = 1
|
nonexistent_group_id = 1
|
||||||
msg = f"group instance with id {nonexistent_group_id} does not exist."
|
msg = f"group instance with id {nonexistent_group_id} is not a valid choice."
|
||||||
|
|
||||||
with mock.patch.dict(
|
with mock.patch.dict(
|
||||||
os.environ,
|
os.environ,
|
||||||
@ -631,7 +631,7 @@ class CreatesuperuserManagementCommandTestCase(TestCase):
|
|||||||
email = Email.objects.create(email="mymail@gmail.com")
|
email = Email.objects.create(email="mymail@gmail.com")
|
||||||
Group.objects.all().delete()
|
Group.objects.all().delete()
|
||||||
nonexistent_group_id = 1
|
nonexistent_group_id = 1
|
||||||
msg = f"group instance with id {nonexistent_group_id} does not exist."
|
msg = f"group instance with id {nonexistent_group_id} is not a valid choice."
|
||||||
|
|
||||||
@mock_inputs(
|
@mock_inputs(
|
||||||
{
|
{
|
||||||
|
@ -210,6 +210,52 @@ class ModelInstanceCreationTests(TestCase):
|
|||||||
a.save(False, False, None, None)
|
a.save(False, False, None, None)
|
||||||
self.assertEqual(Article.objects.count(), 1)
|
self.assertEqual(Article.objects.count(), 1)
|
||||||
|
|
||||||
|
def test_save_deprecation_positional_arguments_used(self):
|
||||||
|
a = Article()
|
||||||
|
fields = ["headline"]
|
||||||
|
with (
|
||||||
|
self.assertWarns(RemovedInDjango60Warning),
|
||||||
|
mock.patch.object(a, "save_base") as mock_save_base,
|
||||||
|
):
|
||||||
|
a.save(None, 1, 2, fields)
|
||||||
|
self.assertEqual(
|
||||||
|
mock_save_base.mock_calls,
|
||||||
|
[
|
||||||
|
mock.call(
|
||||||
|
using=2,
|
||||||
|
force_insert=None,
|
||||||
|
force_update=1,
|
||||||
|
update_fields=frozenset(fields),
|
||||||
|
)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_save_too_many_positional_arguments(self):
|
||||||
|
a = Article()
|
||||||
|
msg = "Model.save() takes from 1 to 5 positional arguments but 6 were given"
|
||||||
|
with (
|
||||||
|
self.assertWarns(RemovedInDjango60Warning),
|
||||||
|
self.assertRaisesMessage(TypeError, msg),
|
||||||
|
):
|
||||||
|
a.save(False, False, None, None, None)
|
||||||
|
|
||||||
|
def test_save_conflicting_positional_and_named_arguments(self):
|
||||||
|
a = Article()
|
||||||
|
cases = [
|
||||||
|
("force_insert", True, [42]),
|
||||||
|
("force_update", None, [42, 41]),
|
||||||
|
("using", "some-db", [42, 41, 40]),
|
||||||
|
("update_fields", ["foo"], [42, 41, 40, 39]),
|
||||||
|
]
|
||||||
|
for param_name, param_value, args in cases:
|
||||||
|
with self.subTest(param_name=param_name):
|
||||||
|
msg = f"Model.save() got multiple values for argument '{param_name}'"
|
||||||
|
with (
|
||||||
|
self.assertWarns(RemovedInDjango60Warning),
|
||||||
|
self.assertRaisesMessage(TypeError, msg),
|
||||||
|
):
|
||||||
|
a.save(*args, **{param_name: param_value})
|
||||||
|
|
||||||
async def test_asave_deprecation(self):
|
async def test_asave_deprecation(self):
|
||||||
a = Article(headline="original", pub_date=datetime(2014, 5, 16))
|
a = Article(headline="original", pub_date=datetime(2014, 5, 16))
|
||||||
msg = "Passing positional arguments to asave() is deprecated"
|
msg = "Passing positional arguments to asave() is deprecated"
|
||||||
@ -217,6 +263,52 @@ class ModelInstanceCreationTests(TestCase):
|
|||||||
await a.asave(False, False, None, None)
|
await a.asave(False, False, None, None)
|
||||||
self.assertEqual(await Article.objects.acount(), 1)
|
self.assertEqual(await Article.objects.acount(), 1)
|
||||||
|
|
||||||
|
async def test_asave_deprecation_positional_arguments_used(self):
|
||||||
|
a = Article()
|
||||||
|
fields = ["headline"]
|
||||||
|
with (
|
||||||
|
self.assertWarns(RemovedInDjango60Warning),
|
||||||
|
mock.patch.object(a, "save_base") as mock_save_base,
|
||||||
|
):
|
||||||
|
await a.asave(None, 1, 2, fields)
|
||||||
|
self.assertEqual(
|
||||||
|
mock_save_base.mock_calls,
|
||||||
|
[
|
||||||
|
mock.call(
|
||||||
|
using=2,
|
||||||
|
force_insert=None,
|
||||||
|
force_update=1,
|
||||||
|
update_fields=frozenset(fields),
|
||||||
|
)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
async def test_asave_too_many_positional_arguments(self):
|
||||||
|
a = Article()
|
||||||
|
msg = "Model.asave() takes from 1 to 5 positional arguments but 6 were given"
|
||||||
|
with (
|
||||||
|
self.assertWarns(RemovedInDjango60Warning),
|
||||||
|
self.assertRaisesMessage(TypeError, msg),
|
||||||
|
):
|
||||||
|
await a.asave(False, False, None, None, None)
|
||||||
|
|
||||||
|
async def test_asave_conflicting_positional_and_named_arguments(self):
|
||||||
|
a = Article()
|
||||||
|
cases = [
|
||||||
|
("force_insert", True, [42]),
|
||||||
|
("force_update", None, [42, 41]),
|
||||||
|
("using", "some-db", [42, 41, 40]),
|
||||||
|
("update_fields", ["foo"], [42, 41, 40, 39]),
|
||||||
|
]
|
||||||
|
for param_name, param_value, args in cases:
|
||||||
|
with self.subTest(param_name=param_name):
|
||||||
|
msg = f"Model.asave() got multiple values for argument '{param_name}'"
|
||||||
|
with (
|
||||||
|
self.assertWarns(RemovedInDjango60Warning),
|
||||||
|
self.assertRaisesMessage(TypeError, msg),
|
||||||
|
):
|
||||||
|
await a.asave(*args, **{param_name: param_value})
|
||||||
|
|
||||||
@ignore_warnings(category=RemovedInDjango60Warning)
|
@ignore_warnings(category=RemovedInDjango60Warning)
|
||||||
def test_save_positional_arguments(self):
|
def test_save_positional_arguments(self):
|
||||||
a = Article.objects.create(headline="original", pub_date=datetime(2014, 5, 16))
|
a = Article.objects.create(headline="original", pub_date=datetime(2014, 5, 16))
|
||||||
|
@ -2,7 +2,7 @@ from django.apps import apps
|
|||||||
from django.contrib.contenttypes.models import ContentType, ContentTypeManager
|
from django.contrib.contenttypes.models import ContentType, ContentTypeManager
|
||||||
from django.contrib.contenttypes.prefetch import GenericPrefetch
|
from django.contrib.contenttypes.prefetch import GenericPrefetch
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.migrations.state import ProjectState
|
from django.db.migrations.state import ModelState, ProjectState
|
||||||
from django.test import TestCase, override_settings
|
from django.test import TestCase, override_settings
|
||||||
from django.test.utils import isolate_apps
|
from django.test.utils import isolate_apps
|
||||||
|
|
||||||
@ -99,6 +99,25 @@ class ContentTypesTests(TestCase):
|
|||||||
cts, {ContentType: ContentType.objects.get_for_model(ContentType)}
|
cts, {ContentType: ContentType.objects.get_for_model(ContentType)}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@isolate_apps("contenttypes_tests")
|
||||||
|
def test_get_for_models_migrations_create_model(self):
|
||||||
|
state = ProjectState.from_apps(apps.get_app_config("contenttypes"))
|
||||||
|
|
||||||
|
class Foo(models.Model):
|
||||||
|
class Meta:
|
||||||
|
app_label = "contenttypes_tests"
|
||||||
|
|
||||||
|
state.add_model(ModelState.from_model(Foo))
|
||||||
|
ContentType = state.apps.get_model("contenttypes", "ContentType")
|
||||||
|
cts = ContentType.objects.get_for_models(FooWithUrl, Foo)
|
||||||
|
self.assertEqual(
|
||||||
|
cts,
|
||||||
|
{
|
||||||
|
Foo: ContentType.objects.get_for_model(Foo),
|
||||||
|
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
def test_get_for_models_full_cache(self):
|
def test_get_for_models_full_cache(self):
|
||||||
# Full cache
|
# Full cache
|
||||||
ContentType.objects.get_for_model(ContentType)
|
ContentType.objects.get_for_model(ContentType)
|
||||||
|
@ -614,6 +614,10 @@ class LookupTransformCallOrderTests(SimpleTestCase):
|
|||||||
)
|
)
|
||||||
TrackCallsYearTransform.call_order = []
|
TrackCallsYearTransform.call_order = []
|
||||||
# junk transform - tries transform only, then fails
|
# junk transform - tries transform only, then fails
|
||||||
|
msg = (
|
||||||
|
"Unsupported lookup 'junk__more_junk' for IntegerField or join"
|
||||||
|
" on the field not permitted."
|
||||||
|
)
|
||||||
with self.assertRaisesMessage(FieldError, msg):
|
with self.assertRaisesMessage(FieldError, msg):
|
||||||
Author.objects.filter(birthdate__testyear__junk__more_junk=2012)
|
Author.objects.filter(birthdate__testyear__junk__more_junk=2012)
|
||||||
self.assertEqual(TrackCallsYearTransform.call_order, ["transform"])
|
self.assertEqual(TrackCallsYearTransform.call_order, ["transform"])
|
||||||
|
@ -1,12 +1,7 @@
|
|||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
from django.test import SimpleTestCase
|
from django.test import SimpleTestCase
|
||||||
from django.utils.deprecation import (
|
from django.utils.deprecation import RemovedAfterNextVersionWarning, RenameMethodsBase
|
||||||
DeprecationInstanceCheck,
|
|
||||||
RemovedAfterNextVersionWarning,
|
|
||||||
RemovedInNextVersionWarning,
|
|
||||||
RenameMethodsBase,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class RenameManagerMethods(RenameMethodsBase):
|
class RenameManagerMethods(RenameMethodsBase):
|
||||||
@ -166,14 +161,3 @@ class RenameMethodsTests(SimpleTestCase):
|
|||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
issubclass(RemovedAfterNextVersionWarning, PendingDeprecationWarning)
|
issubclass(RemovedAfterNextVersionWarning, PendingDeprecationWarning)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class DeprecationInstanceCheckTest(SimpleTestCase):
|
|
||||||
def test_warning(self):
|
|
||||||
class Manager(metaclass=DeprecationInstanceCheck):
|
|
||||||
alternative = "fake.path.Foo"
|
|
||||||
deprecation_warning = RemovedInNextVersionWarning
|
|
||||||
|
|
||||||
msg = "`Manager` is deprecated, use `fake.path.Foo` instead."
|
|
||||||
with self.assertWarnsMessage(RemovedInNextVersionWarning, msg):
|
|
||||||
isinstance(object, Manager)
|
|
||||||
|
72
tests/file_storage/test_base.py
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
import os
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
from django.core.exceptions import SuspiciousFileOperation
|
||||||
|
from django.core.files.storage import Storage
|
||||||
|
from django.test import SimpleTestCase
|
||||||
|
|
||||||
|
|
||||||
|
class CustomStorage(Storage):
|
||||||
|
"""Simple Storage subclass implementing the bare minimum for testing."""
|
||||||
|
|
||||||
|
def exists(self, name):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _save(self, name):
|
||||||
|
return name
|
||||||
|
|
||||||
|
|
||||||
|
class StorageValidateFileNameTests(SimpleTestCase):
|
||||||
|
|
||||||
|
invalid_file_names = [
|
||||||
|
os.path.join("path", "to", os.pardir, "test.file"),
|
||||||
|
os.path.join(os.path.sep, "path", "to", "test.file"),
|
||||||
|
]
|
||||||
|
error_msg = "Detected path traversal attempt in '%s'"
|
||||||
|
|
||||||
|
def test_validate_before_get_available_name(self):
|
||||||
|
s = CustomStorage()
|
||||||
|
# The initial name passed to `save` is not valid nor safe, fail early.
|
||||||
|
for name in self.invalid_file_names:
|
||||||
|
with (
|
||||||
|
self.subTest(name=name),
|
||||||
|
mock.patch.object(s, "get_available_name") as mock_get_available_name,
|
||||||
|
mock.patch.object(s, "_save") as mock_internal_save,
|
||||||
|
):
|
||||||
|
with self.assertRaisesMessage(
|
||||||
|
SuspiciousFileOperation, self.error_msg % name
|
||||||
|
):
|
||||||
|
s.save(name, content="irrelevant")
|
||||||
|
self.assertEqual(mock_get_available_name.mock_calls, [])
|
||||||
|
self.assertEqual(mock_internal_save.mock_calls, [])
|
||||||
|
|
||||||
|
def test_validate_after_get_available_name(self):
|
||||||
|
s = CustomStorage()
|
||||||
|
# The initial name passed to `save` is valid and safe, but the returned
|
||||||
|
# name from `get_available_name` is not.
|
||||||
|
for name in self.invalid_file_names:
|
||||||
|
with (
|
||||||
|
self.subTest(name=name),
|
||||||
|
mock.patch.object(s, "get_available_name", return_value=name),
|
||||||
|
mock.patch.object(s, "_save") as mock_internal_save,
|
||||||
|
):
|
||||||
|
with self.assertRaisesMessage(
|
||||||
|
SuspiciousFileOperation, self.error_msg % name
|
||||||
|
):
|
||||||
|
s.save("valid-file-name.txt", content="irrelevant")
|
||||||
|
self.assertEqual(mock_internal_save.mock_calls, [])
|
||||||
|
|
||||||
|
def test_validate_after_internal_save(self):
|
||||||
|
s = CustomStorage()
|
||||||
|
# The initial name passed to `save` is valid and safe, but the result
|
||||||
|
# from `_save` is not (this is achieved by monkeypatching _save).
|
||||||
|
for name in self.invalid_file_names:
|
||||||
|
with (
|
||||||
|
self.subTest(name=name),
|
||||||
|
mock.patch.object(s, "_save", return_value=name),
|
||||||
|
):
|
||||||
|
|
||||||
|
with self.assertRaisesMessage(
|
||||||
|
SuspiciousFileOperation, self.error_msg % name
|
||||||
|
):
|
||||||
|
s.save("valid-file-name.txt", content="irrelevant")
|
@ -288,22 +288,17 @@ class FileStorageTests(SimpleTestCase):
|
|||||||
|
|
||||||
self.storage.delete("path/to/test.file")
|
self.storage.delete("path/to/test.file")
|
||||||
|
|
||||||
def test_file_save_abs_path(self):
|
|
||||||
test_name = "path/to/test.file"
|
|
||||||
f = ContentFile("file saved with path")
|
|
||||||
f_name = self.storage.save(os.path.join(self.temp_dir, test_name), f)
|
|
||||||
self.assertEqual(f_name, test_name)
|
|
||||||
|
|
||||||
@unittest.skipUnless(
|
@unittest.skipUnless(
|
||||||
symlinks_supported(), "Must be able to symlink to run this test."
|
symlinks_supported(), "Must be able to symlink to run this test."
|
||||||
)
|
)
|
||||||
def test_file_save_broken_symlink(self):
|
def test_file_save_broken_symlink(self):
|
||||||
"""A new path is created on save when a broken symlink is supplied."""
|
"""A new path is created on save when a broken symlink is supplied."""
|
||||||
nonexistent_file_path = os.path.join(self.temp_dir, "nonexistent.txt")
|
nonexistent_file_path = os.path.join(self.temp_dir, "nonexistent.txt")
|
||||||
broken_symlink_path = os.path.join(self.temp_dir, "symlink.txt")
|
broken_symlink_file_name = "symlink.txt"
|
||||||
|
broken_symlink_path = os.path.join(self.temp_dir, broken_symlink_file_name)
|
||||||
os.symlink(nonexistent_file_path, broken_symlink_path)
|
os.symlink(nonexistent_file_path, broken_symlink_path)
|
||||||
f = ContentFile("some content")
|
f = ContentFile("some content")
|
||||||
f_name = self.storage.save(broken_symlink_path, f)
|
f_name = self.storage.save(broken_symlink_file_name, f)
|
||||||
self.assertIs(os.path.exists(os.path.join(self.temp_dir, f_name)), True)
|
self.assertIs(os.path.exists(os.path.join(self.temp_dir, f_name)), True)
|
||||||
|
|
||||||
def test_save_doesnt_close(self):
|
def test_save_doesnt_close(self):
|
||||||
|
@ -880,7 +880,7 @@ class DirectoryCreationTests(SimpleTestCase):
|
|||||||
default_storage.delete(UPLOAD_TO)
|
default_storage.delete(UPLOAD_TO)
|
||||||
# Create a file with the upload directory name
|
# Create a file with the upload directory name
|
||||||
with SimpleUploadedFile(UPLOAD_TO, b"x") as file:
|
with SimpleUploadedFile(UPLOAD_TO, b"x") as file:
|
||||||
default_storage.save(UPLOAD_TO, file)
|
default_storage.save(UPLOAD_FOLDER, file)
|
||||||
self.addCleanup(default_storage.delete, UPLOAD_TO)
|
self.addCleanup(default_storage.delete, UPLOAD_TO)
|
||||||
msg = "%s exists and is not a directory." % UPLOAD_TO
|
msg = "%s exists and is not a directory." % UPLOAD_TO
|
||||||
with self.assertRaisesMessage(FileExistsError, msg):
|
with self.assertRaisesMessage(FileExistsError, msg):
|
||||||
|
@ -58,6 +58,7 @@ from django.utils.translation.reloader import (
|
|||||||
translation_file_changed,
|
translation_file_changed,
|
||||||
watch_for_translation_changes,
|
watch_for_translation_changes,
|
||||||
)
|
)
|
||||||
|
from django.utils.translation.trans_real import LANGUAGE_CODE_MAX_LENGTH
|
||||||
|
|
||||||
from .forms import CompanyForm, I18nForm, SelectDateForm
|
from .forms import CompanyForm, I18nForm, SelectDateForm
|
||||||
from .models import Company, TestModel
|
from .models import Company, TestModel
|
||||||
@ -1672,6 +1673,16 @@ class MiscTests(SimpleTestCase):
|
|||||||
g("xyz")
|
g("xyz")
|
||||||
with self.assertRaises(LookupError):
|
with self.assertRaises(LookupError):
|
||||||
g("xy-zz")
|
g("xy-zz")
|
||||||
|
msg = "'lang_code' exceeds the maximum accepted length"
|
||||||
|
with self.assertRaises(LookupError):
|
||||||
|
g("x" * LANGUAGE_CODE_MAX_LENGTH)
|
||||||
|
with self.assertRaisesMessage(ValueError, msg):
|
||||||
|
g("x" * (LANGUAGE_CODE_MAX_LENGTH + 1))
|
||||||
|
# 167 * 3 = 501 which is LANGUAGE_CODE_MAX_LENGTH + 1.
|
||||||
|
self.assertEqual(g("en-" * 167), "en")
|
||||||
|
with self.assertRaisesMessage(ValueError, msg):
|
||||||
|
g("en-" * 167, strict=True)
|
||||||
|
self.assertEqual(g("en-" * 30000), "en") # catastrophic test
|
||||||
|
|
||||||
def test_get_supported_language_variant_null(self):
|
def test_get_supported_language_variant_null(self):
|
||||||
g = trans_null.get_supported_language_variant
|
g = trans_null.get_supported_language_variant
|
||||||
|
@ -89,6 +89,23 @@ class RelativeFieldTests(SimpleTestCase):
|
|||||||
field = Model._meta.get_field("m2m")
|
field = Model._meta.get_field("m2m")
|
||||||
self.assertEqual(field.check(from_model=Model), [])
|
self.assertEqual(field.check(from_model=Model), [])
|
||||||
|
|
||||||
|
@isolate_apps("invalid_models_tests")
|
||||||
|
def test_auto_created_through_model(self):
|
||||||
|
class OtherModel(models.Model):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class M2MModel(models.Model):
|
||||||
|
many_to_many_rel = models.ManyToManyField(OtherModel)
|
||||||
|
|
||||||
|
class O2OModel(models.Model):
|
||||||
|
one_to_one_rel = models.OneToOneField(
|
||||||
|
"invalid_models_tests.M2MModel_many_to_many_rel",
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
)
|
||||||
|
|
||||||
|
field = O2OModel._meta.get_field("one_to_one_rel")
|
||||||
|
self.assertEqual(field.check(from_model=O2OModel), [])
|
||||||
|
|
||||||
def test_many_to_many_with_useless_options(self):
|
def test_many_to_many_with_useless_options(self):
|
||||||
class Model(models.Model):
|
class Model(models.Model):
|
||||||
name = models.CharField(max_length=20)
|
name = models.CharField(max_length=20)
|
||||||
|
@ -812,6 +812,34 @@ class LookupTests(TestCase):
|
|||||||
):
|
):
|
||||||
Article.objects.filter(pub_date__gobbledygook="blahblah")
|
Article.objects.filter(pub_date__gobbledygook="blahblah")
|
||||||
|
|
||||||
|
with self.assertRaisesMessage(
|
||||||
|
FieldError,
|
||||||
|
"Unsupported lookup 'gt__foo' for DateTimeField or join on the field "
|
||||||
|
"not permitted, perhaps you meant gt or gte?",
|
||||||
|
):
|
||||||
|
Article.objects.filter(pub_date__gt__foo="blahblah")
|
||||||
|
|
||||||
|
with self.assertRaisesMessage(
|
||||||
|
FieldError,
|
||||||
|
"Unsupported lookup 'gt__' for DateTimeField or join on the field "
|
||||||
|
"not permitted, perhaps you meant gt or gte?",
|
||||||
|
):
|
||||||
|
Article.objects.filter(pub_date__gt__="blahblah")
|
||||||
|
|
||||||
|
with self.assertRaisesMessage(
|
||||||
|
FieldError,
|
||||||
|
"Unsupported lookup 'gt__lt' for DateTimeField or join on the field "
|
||||||
|
"not permitted, perhaps you meant gt or gte?",
|
||||||
|
):
|
||||||
|
Article.objects.filter(pub_date__gt__lt="blahblah")
|
||||||
|
|
||||||
|
with self.assertRaisesMessage(
|
||||||
|
FieldError,
|
||||||
|
"Unsupported lookup 'gt__lt__foo' for DateTimeField or join"
|
||||||
|
" on the field not permitted, perhaps you meant gt or gte?",
|
||||||
|
):
|
||||||
|
Article.objects.filter(pub_date__gt__lt__foo="blahblah")
|
||||||
|
|
||||||
def test_unsupported_lookups_custom_lookups(self):
|
def test_unsupported_lookups_custom_lookups(self):
|
||||||
slug_field = Article._meta.get_field("slug")
|
slug_field = Article._meta.get_field("slug")
|
||||||
msg = (
|
msg = (
|
||||||
@ -825,7 +853,7 @@ class LookupTests(TestCase):
|
|||||||
def test_relation_nested_lookup_error(self):
|
def test_relation_nested_lookup_error(self):
|
||||||
# An invalid nested lookup on a related field raises a useful error.
|
# An invalid nested lookup on a related field raises a useful error.
|
||||||
msg = (
|
msg = (
|
||||||
"Unsupported lookup 'editor' for ForeignKey or join on the field not "
|
"Unsupported lookup 'editor__name' for ForeignKey or join on the field not "
|
||||||
"permitted."
|
"permitted."
|
||||||
)
|
)
|
||||||
with self.assertRaisesMessage(FieldError, msg):
|
with self.assertRaisesMessage(FieldError, msg):
|
||||||
@ -1059,6 +1087,10 @@ class LookupTests(TestCase):
|
|||||||
)
|
)
|
||||||
with self.assertRaisesMessage(FieldError, msg):
|
with self.assertRaisesMessage(FieldError, msg):
|
||||||
Article.objects.filter(headline__blahblah=99)
|
Article.objects.filter(headline__blahblah=99)
|
||||||
|
msg = (
|
||||||
|
"Unsupported lookup 'blahblah__exact' for CharField or join "
|
||||||
|
"on the field not permitted."
|
||||||
|
)
|
||||||
with self.assertRaisesMessage(FieldError, msg):
|
with self.assertRaisesMessage(FieldError, msg):
|
||||||
Article.objects.filter(headline__blahblah__exact=99)
|
Article.objects.filter(headline__blahblah__exact=99)
|
||||||
msg = (
|
msg = (
|
||||||
|
@ -223,7 +223,7 @@ class MailTests(HeadersCheckMixin, SimpleTestCase):
|
|||||||
cc=["foo@example.com"],
|
cc=["foo@example.com"],
|
||||||
headers={"Cc": "override@example.com"},
|
headers={"Cc": "override@example.com"},
|
||||||
).message()
|
).message()
|
||||||
self.assertEqual(message["Cc"], "override@example.com")
|
self.assertEqual(message.get_all("Cc"), ["override@example.com"])
|
||||||
|
|
||||||
def test_cc_in_headers_only(self):
|
def test_cc_in_headers_only(self):
|
||||||
message = EmailMessage(
|
message = EmailMessage(
|
||||||
@ -233,7 +233,7 @@ class MailTests(HeadersCheckMixin, SimpleTestCase):
|
|||||||
["to@example.com"],
|
["to@example.com"],
|
||||||
headers={"Cc": "foo@example.com"},
|
headers={"Cc": "foo@example.com"},
|
||||||
).message()
|
).message()
|
||||||
self.assertEqual(message["Cc"], "foo@example.com")
|
self.assertEqual(message.get_all("Cc"), ["foo@example.com"])
|
||||||
|
|
||||||
def test_reply_to(self):
|
def test_reply_to(self):
|
||||||
email = EmailMessage(
|
email = EmailMessage(
|
||||||
@ -379,7 +379,7 @@ class MailTests(HeadersCheckMixin, SimpleTestCase):
|
|||||||
headers={"From": "from@example.com"},
|
headers={"From": "from@example.com"},
|
||||||
)
|
)
|
||||||
message = email.message()
|
message = email.message()
|
||||||
self.assertEqual(message["From"], "from@example.com")
|
self.assertEqual(message.get_all("From"), ["from@example.com"])
|
||||||
|
|
||||||
def test_to_header(self):
|
def test_to_header(self):
|
||||||
"""
|
"""
|
||||||
@ -393,7 +393,7 @@ class MailTests(HeadersCheckMixin, SimpleTestCase):
|
|||||||
headers={"To": "mailing-list@example.com"},
|
headers={"To": "mailing-list@example.com"},
|
||||||
)
|
)
|
||||||
message = email.message()
|
message = email.message()
|
||||||
self.assertEqual(message["To"], "mailing-list@example.com")
|
self.assertEqual(message.get_all("To"), ["mailing-list@example.com"])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
email.to, ["list-subscriber@example.com", "list-subscriber2@example.com"]
|
email.to, ["list-subscriber@example.com", "list-subscriber2@example.com"]
|
||||||
)
|
)
|
||||||
@ -408,7 +408,8 @@ class MailTests(HeadersCheckMixin, SimpleTestCase):
|
|||||||
)
|
)
|
||||||
message = email.message()
|
message = email.message()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
message["To"], "list-subscriber@example.com, list-subscriber2@example.com"
|
message.get_all("To"),
|
||||||
|
["list-subscriber@example.com, list-subscriber2@example.com"],
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
email.to, ["list-subscriber@example.com", "list-subscriber2@example.com"]
|
email.to, ["list-subscriber@example.com", "list-subscriber2@example.com"]
|
||||||
@ -421,7 +422,7 @@ class MailTests(HeadersCheckMixin, SimpleTestCase):
|
|||||||
"bounce@example.com",
|
"bounce@example.com",
|
||||||
headers={"To": "to@example.com"},
|
headers={"To": "to@example.com"},
|
||||||
).message()
|
).message()
|
||||||
self.assertEqual(message["To"], "to@example.com")
|
self.assertEqual(message.get_all("To"), ["to@example.com"])
|
||||||
|
|
||||||
def test_reply_to_header(self):
|
def test_reply_to_header(self):
|
||||||
"""
|
"""
|
||||||
@ -436,7 +437,7 @@ class MailTests(HeadersCheckMixin, SimpleTestCase):
|
|||||||
headers={"Reply-To": "override@example.com"},
|
headers={"Reply-To": "override@example.com"},
|
||||||
)
|
)
|
||||||
message = email.message()
|
message = email.message()
|
||||||
self.assertEqual(message["Reply-To"], "override@example.com")
|
self.assertEqual(message.get_all("Reply-To"), ["override@example.com"])
|
||||||
|
|
||||||
def test_reply_to_in_headers_only(self):
|
def test_reply_to_in_headers_only(self):
|
||||||
message = EmailMessage(
|
message = EmailMessage(
|
||||||
@ -446,7 +447,7 @@ class MailTests(HeadersCheckMixin, SimpleTestCase):
|
|||||||
["to@example.com"],
|
["to@example.com"],
|
||||||
headers={"Reply-To": "reply_to@example.com"},
|
headers={"Reply-To": "reply_to@example.com"},
|
||||||
).message()
|
).message()
|
||||||
self.assertEqual(message["Reply-To"], "reply_to@example.com")
|
self.assertEqual(message.get_all("Reply-To"), ["reply_to@example.com"])
|
||||||
|
|
||||||
def test_multiple_message_call(self):
|
def test_multiple_message_call(self):
|
||||||
"""
|
"""
|
||||||
@ -461,9 +462,9 @@ class MailTests(HeadersCheckMixin, SimpleTestCase):
|
|||||||
headers={"From": "from@example.com"},
|
headers={"From": "from@example.com"},
|
||||||
)
|
)
|
||||||
message = email.message()
|
message = email.message()
|
||||||
self.assertEqual(message["From"], "from@example.com")
|
self.assertEqual(message.get_all("From"), ["from@example.com"])
|
||||||
message = email.message()
|
message = email.message()
|
||||||
self.assertEqual(message["From"], "from@example.com")
|
self.assertEqual(message.get_all("From"), ["from@example.com"])
|
||||||
|
|
||||||
def test_unicode_address_header(self):
|
def test_unicode_address_header(self):
|
||||||
"""
|
"""
|
||||||
|
@ -1131,6 +1131,22 @@ class StateTests(SimpleTestCase):
|
|||||||
self.assertIsNone(order_field.related_model)
|
self.assertIsNone(order_field.related_model)
|
||||||
self.assertIsInstance(order_field, models.PositiveSmallIntegerField)
|
self.assertIsInstance(order_field, models.PositiveSmallIntegerField)
|
||||||
|
|
||||||
|
def test_get_order_field_after_removed_order_with_respect_to_field(self):
|
||||||
|
new_apps = Apps()
|
||||||
|
|
||||||
|
class HistoricalRecord(models.Model):
|
||||||
|
_order = models.PositiveSmallIntegerField()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
app_label = "migrations"
|
||||||
|
apps = new_apps
|
||||||
|
|
||||||
|
model_state = ModelState.from_model(HistoricalRecord)
|
||||||
|
model_state.options["order_with_respect_to"] = None
|
||||||
|
order_field = model_state.get_field("_order")
|
||||||
|
self.assertIsNone(order_field.related_model)
|
||||||
|
self.assertIsInstance(order_field, models.PositiveSmallIntegerField)
|
||||||
|
|
||||||
def test_manager_refer_correct_model_version(self):
|
def test_manager_refer_correct_model_version(self):
|
||||||
"""
|
"""
|
||||||
#24147 - Managers refer to the correct version of a
|
#24147 - Managers refer to the correct version of a
|
||||||
|
@ -609,3 +609,79 @@ class GeneratedModelNullVirtual(models.Model):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
required_db_features = {"supports_virtual_generated_columns"}
|
required_db_features = {"supports_virtual_generated_columns"}
|
||||||
|
|
||||||
|
|
||||||
|
class GeneratedModelBase(models.Model):
|
||||||
|
a = models.IntegerField()
|
||||||
|
a_squared = models.GeneratedField(
|
||||||
|
expression=F("a") * F("a"),
|
||||||
|
output_field=models.IntegerField(),
|
||||||
|
db_persist=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
abstract = True
|
||||||
|
|
||||||
|
|
||||||
|
class GeneratedModelVirtualBase(models.Model):
|
||||||
|
a = models.IntegerField()
|
||||||
|
a_squared = models.GeneratedField(
|
||||||
|
expression=F("a") * F("a"),
|
||||||
|
output_field=models.IntegerField(),
|
||||||
|
db_persist=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
abstract = True
|
||||||
|
|
||||||
|
|
||||||
|
class GeneratedModelCheckConstraint(GeneratedModelBase):
|
||||||
|
class Meta:
|
||||||
|
required_db_features = {
|
||||||
|
"supports_stored_generated_columns",
|
||||||
|
"supports_table_check_constraints",
|
||||||
|
}
|
||||||
|
constraints = [
|
||||||
|
models.CheckConstraint(
|
||||||
|
condition=models.Q(a__gt=0),
|
||||||
|
name="Generated model check constraint a > 0",
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class GeneratedModelCheckConstraintVirtual(GeneratedModelVirtualBase):
|
||||||
|
class Meta:
|
||||||
|
required_db_features = {
|
||||||
|
"supports_virtual_generated_columns",
|
||||||
|
"supports_table_check_constraints",
|
||||||
|
}
|
||||||
|
constraints = [
|
||||||
|
models.CheckConstraint(
|
||||||
|
condition=models.Q(a__gt=0),
|
||||||
|
name="Generated model check constraint virtual a > 0",
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class GeneratedModelUniqueConstraint(GeneratedModelBase):
|
||||||
|
class Meta:
|
||||||
|
required_db_features = {
|
||||||
|
"supports_stored_generated_columns",
|
||||||
|
"supports_table_check_constraints",
|
||||||
|
}
|
||||||
|
constraints = [
|
||||||
|
models.UniqueConstraint(F("a"), name="Generated model unique constraint a"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class GeneratedModelUniqueConstraintVirtual(GeneratedModelVirtualBase):
|
||||||
|
class Meta:
|
||||||
|
required_db_features = {
|
||||||
|
"supports_virtual_generated_columns",
|
||||||
|
"supports_expression_indexes",
|
||||||
|
}
|
||||||
|
constraints = [
|
||||||
|
models.UniqueConstraint(
|
||||||
|
F("a"), name="Generated model unique constraint virtual a"
|
||||||
|
),
|
||||||
|
]
|
||||||
|
@ -2,6 +2,7 @@ import uuid
|
|||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
|
from django.core.exceptions import ValidationError
|
||||||
from django.db import IntegrityError, connection
|
from django.db import IntegrityError, connection
|
||||||
from django.db.models import (
|
from django.db.models import (
|
||||||
CharField,
|
CharField,
|
||||||
@ -18,6 +19,8 @@ from django.test.utils import isolate_apps
|
|||||||
from .models import (
|
from .models import (
|
||||||
Foo,
|
Foo,
|
||||||
GeneratedModel,
|
GeneratedModel,
|
||||||
|
GeneratedModelCheckConstraint,
|
||||||
|
GeneratedModelCheckConstraintVirtual,
|
||||||
GeneratedModelFieldWithConverters,
|
GeneratedModelFieldWithConverters,
|
||||||
GeneratedModelNull,
|
GeneratedModelNull,
|
||||||
GeneratedModelNullVirtual,
|
GeneratedModelNullVirtual,
|
||||||
@ -25,6 +28,8 @@ from .models import (
|
|||||||
GeneratedModelOutputFieldDbCollationVirtual,
|
GeneratedModelOutputFieldDbCollationVirtual,
|
||||||
GeneratedModelParams,
|
GeneratedModelParams,
|
||||||
GeneratedModelParamsVirtual,
|
GeneratedModelParamsVirtual,
|
||||||
|
GeneratedModelUniqueConstraint,
|
||||||
|
GeneratedModelUniqueConstraintVirtual,
|
||||||
GeneratedModelVirtual,
|
GeneratedModelVirtual,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -186,6 +191,42 @@ class GeneratedFieldTestMixin:
|
|||||||
m = self._refresh_if_needed(m)
|
m = self._refresh_if_needed(m)
|
||||||
self.assertEqual(m.field, 3)
|
self.assertEqual(m.field, 3)
|
||||||
|
|
||||||
|
@skipUnlessDBFeature("supports_table_check_constraints")
|
||||||
|
def test_full_clean_with_check_constraint(self):
|
||||||
|
model_name = self.check_constraint_model._meta.verbose_name.capitalize()
|
||||||
|
|
||||||
|
m = self.check_constraint_model(a=2)
|
||||||
|
m.full_clean()
|
||||||
|
m.save()
|
||||||
|
m = self._refresh_if_needed(m)
|
||||||
|
self.assertEqual(m.a_squared, 4)
|
||||||
|
|
||||||
|
m = self.check_constraint_model(a=-1)
|
||||||
|
with self.assertRaises(ValidationError) as cm:
|
||||||
|
m.full_clean()
|
||||||
|
self.assertEqual(
|
||||||
|
cm.exception.message_dict,
|
||||||
|
{"__all__": [f"Constraint “{model_name} a > 0” is violated."]},
|
||||||
|
)
|
||||||
|
|
||||||
|
@skipUnlessDBFeature("supports_expression_indexes")
|
||||||
|
def test_full_clean_with_unique_constraint_expression(self):
|
||||||
|
model_name = self.unique_constraint_model._meta.verbose_name.capitalize()
|
||||||
|
|
||||||
|
m = self.unique_constraint_model(a=2)
|
||||||
|
m.full_clean()
|
||||||
|
m.save()
|
||||||
|
m = self._refresh_if_needed(m)
|
||||||
|
self.assertEqual(m.a_squared, 4)
|
||||||
|
|
||||||
|
m = self.unique_constraint_model(a=2)
|
||||||
|
with self.assertRaises(ValidationError) as cm:
|
||||||
|
m.full_clean()
|
||||||
|
self.assertEqual(
|
||||||
|
cm.exception.message_dict,
|
||||||
|
{"__all__": [f"Constraint “{model_name} a” is violated."]},
|
||||||
|
)
|
||||||
|
|
||||||
def test_create(self):
|
def test_create(self):
|
||||||
m = self.base_model.objects.create(a=1, b=2)
|
m = self.base_model.objects.create(a=1, b=2)
|
||||||
m = self._refresh_if_needed(m)
|
m = self._refresh_if_needed(m)
|
||||||
@ -305,6 +346,8 @@ class GeneratedFieldTestMixin:
|
|||||||
class StoredGeneratedFieldTests(GeneratedFieldTestMixin, TestCase):
|
class StoredGeneratedFieldTests(GeneratedFieldTestMixin, TestCase):
|
||||||
base_model = GeneratedModel
|
base_model = GeneratedModel
|
||||||
nullable_model = GeneratedModelNull
|
nullable_model = GeneratedModelNull
|
||||||
|
check_constraint_model = GeneratedModelCheckConstraint
|
||||||
|
unique_constraint_model = GeneratedModelUniqueConstraint
|
||||||
output_field_db_collation_model = GeneratedModelOutputFieldDbCollation
|
output_field_db_collation_model = GeneratedModelOutputFieldDbCollation
|
||||||
params_model = GeneratedModelParams
|
params_model = GeneratedModelParams
|
||||||
|
|
||||||
@ -318,5 +361,7 @@ class StoredGeneratedFieldTests(GeneratedFieldTestMixin, TestCase):
|
|||||||
class VirtualGeneratedFieldTests(GeneratedFieldTestMixin, TestCase):
|
class VirtualGeneratedFieldTests(GeneratedFieldTestMixin, TestCase):
|
||||||
base_model = GeneratedModelVirtual
|
base_model = GeneratedModelVirtual
|
||||||
nullable_model = GeneratedModelNullVirtual
|
nullable_model = GeneratedModelNullVirtual
|
||||||
|
check_constraint_model = GeneratedModelCheckConstraintVirtual
|
||||||
|
unique_constraint_model = GeneratedModelUniqueConstraintVirtual
|
||||||
output_field_db_collation_model = GeneratedModelOutputFieldDbCollationVirtual
|
output_field_db_collation_model = GeneratedModelOutputFieldDbCollationVirtual
|
||||||
params_model = GeneratedModelParamsVirtual
|
params_model = GeneratedModelParamsVirtual
|
||||||
|
@ -466,8 +466,8 @@ class TestQuerying(PostgreSQLTestCase):
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
sql = ctx[0]["sql"]
|
sql = ctx[0]["sql"]
|
||||||
self.assertIn("GROUP BY 2", sql)
|
self.assertIn("GROUP BY 1", sql)
|
||||||
self.assertIn("ORDER BY 2", sql)
|
self.assertIn("ORDER BY 1", sql)
|
||||||
|
|
||||||
def test_order_by_arrayagg_index(self):
|
def test_order_by_arrayagg_index(self):
|
||||||
qs = (
|
qs = (
|
||||||
|