diff --git a/.github/workflows/linters.yml b/.github/workflows/linters.yml index 35113a945a..0a77a7f7ad 100644 --- a/.github/workflows/linters.yml +++ b/.github/workflows/linters.yml @@ -30,8 +30,8 @@ jobs: python-version: '3.11' - run: python -m pip install flake8 - name: flake8 - # Pinned to v1.0.2. - uses: liskin/gh-problem-matcher-wrap@e7d110d699a16b3dead9ef8b1f9470f93765ae95 + # Pinned to v2.0.0. + uses: liskin/gh-problem-matcher-wrap@d8afa2cfb66dd3f982b1950429e652bc14d0d7d2 with: linters: flake8 run: flake8 @@ -47,8 +47,8 @@ jobs: python-version: '3.11' - run: python -m pip install isort - name: isort - # Pinned to v1.0.2. - uses: liskin/gh-problem-matcher-wrap@e7d110d699a16b3dead9ef8b1f9470f93765ae95 + # Pinned to v2.0.0. + uses: liskin/gh-problem-matcher-wrap@d8afa2cfb66dd3f982b1950429e652bc14d0d7d2 with: linters: isort run: isort --check --diff django tests scripts diff --git a/.github/workflows/schedule_tests.yml b/.github/workflows/schedule_tests.yml index be23c8853b..d758642ef7 100644 --- a/.github/workflows/schedule_tests.yml +++ b/.github/workflows/schedule_tests.yml @@ -20,6 +20,7 @@ jobs: - '3.9' - '3.10' - '3.11' + - '3.12-dev' name: Windows, SQLite, Python ${{ matrix.python-version }} continue-on-error: true steps: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c175fbcc07..9a7c3620eb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,10 +9,10 @@ repos: hooks: - id: isort - repo: https://github.com/PyCQA/flake8 - rev: 5.0.4 + rev: 6.0.0 hooks: - id: flake8 - repo: https://github.com/pre-commit/mirrors-eslint - rev: v8.24.0 + rev: v8.29.0 hooks: - id: eslint diff --git a/AUTHORS b/AUTHORS index 72e26ebc3e..db1454c5ee 100644 --- a/AUTHORS +++ b/AUTHORS @@ -11,6 +11,7 @@ answer newbie questions, and generally made Django that much better: Abeer Upadhyay Abhijeet Viswa Abhinav Patil + Abhinav Yadav Abhishek Gautam Abhyudai Adam Allred @@ -330,6 +331,7 @@ answer newbie questions, and generally made Django that much better: Florian Demmer Florian Moussous Fran Hrženjak + Francesco Panico Francisco Albarran Cristobal Francisco Couzo François Freitag @@ -409,6 +411,7 @@ answer newbie questions, and generally made Django that much better: Ifedapo Olarewaju Igor Kolar Illia Volochii + Ilya Bass Ilya Semenov Ingo Klöcker I.S. van Oostveen @@ -428,6 +431,7 @@ answer newbie questions, and generally made Django that much better: james_027@yahoo.com James Aylett James Bennett + James Gillard James Murty James Tauber James Timmins @@ -495,6 +499,7 @@ answer newbie questions, and generally made Django that much better: John Shaffer Jökull Sólberg Auðunsson Jon Dufresne + Jon Janzen Jonas Haag Jonas Lundberg Jonathan Davis @@ -552,6 +557,7 @@ answer newbie questions, and generally made Django that much better: Kieran Holland kilian Kim Joon Hwan 김준환 + Kim Soung Ryoul 김성렬 Klaas van Schelven knox konrad@gwu.edu @@ -799,6 +805,7 @@ answer newbie questions, and generally made Django that much better: Rachel Tobin Rachel Willmer Radek Švarz + Rafael Giebisch Raffaele Salmaso Rajesh Dhawan Ramez Ashraf diff --git a/django/conf/locale/bg/LC_MESSAGES/django.mo b/django/conf/locale/bg/LC_MESSAGES/django.mo index 5b85f3380f..fb488b018d 100644 Binary files a/django/conf/locale/bg/LC_MESSAGES/django.mo and b/django/conf/locale/bg/LC_MESSAGES/django.mo differ diff --git a/django/conf/locale/bg/LC_MESSAGES/django.po b/django/conf/locale/bg/LC_MESSAGES/django.po index 0d4bf7a366..de5ffbf1c5 100644 --- a/django/conf/locale/bg/LC_MESSAGES/django.po +++ b/django/conf/locale/bg/LC_MESSAGES/django.po @@ -14,9 +14,9 @@ msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-09-21 10:22+0200\n" -"PO-Revision-Date: 2022-01-14 11:26+0000\n" -"Last-Translator: arneatec \n" +"POT-Creation-Date: 2022-05-17 05:23-0500\n" +"PO-Revision-Date: 2022-05-25 06:49+0000\n" +"Last-Translator: arneatec , 2022\n" "Language-Team: Bulgarian (http://www.transifex.com/django/django/language/" "bg/)\n" "MIME-Version: 1.0\n" @@ -392,6 +392,10 @@ msgid "Ensure this value is greater than or equal to %(limit_value)s." msgstr "" "Уверете се, че тази стойност е по-голяма или равна на %(limit_value)s ." +#, python-format +msgid "Ensure this value is a multiple of step size %(limit_value)s." +msgstr "Уверете се, че стойността е кратна на стъпката %(limit_value)s." + #, python-format msgid "" "Ensure this value has at least %(limit_value)d character (it has " @@ -465,6 +469,10 @@ msgstr "и" msgid "%(model_name)s with this %(field_labels)s already exists." msgstr "%(model_name)s с този %(field_labels)s вече съществува." +#, python-format +msgid "Constraint “%(name)s” is violated." +msgstr "Ограничението “%(name)s” е нарушено." + #, python-format msgid "Value %(value)r is not a valid choice." msgstr "Стойността %(value)r не е валиден избор." @@ -479,8 +487,8 @@ msgstr "Това поле не може да е празно." msgid "%(model_name)s with this %(field_label)s already exists." msgstr "%(model_name)s с този %(field_label)s вече съществува." -#. Translators: The 'lookup_type' is one of 'date', 'year' or 'month'. -#. Eg: "Title must be unique for pub_date year" +#. Translators: The 'lookup_type' is one of 'date', 'year' or +#. 'month'. Eg: "Title must be unique for pub_date year" #, python-format msgid "" "%(field_label)s must be unique for %(date_field_label)s %(lookup_type)s." @@ -776,16 +784,16 @@ msgstr "" "проблем продължава." #, python-format -msgid "Please submit at most %d form." -msgid_plural "Please submit at most %d forms." -msgstr[0] "Моля изпратете не повече от %d формуляр." -msgstr[1] "Моля изпратете не повече от %d формуляри." +msgid "Please submit at most %(num)d form." +msgid_plural "Please submit at most %(num)d forms." +msgstr[0] "Моля изпратете не повече от %(num)d формуляр." +msgstr[1] "Моля изпратете не повече от %(num)d формуляра." #, python-format -msgid "Please submit at least %d form." -msgid_plural "Please submit at least %d forms." -msgstr[0] "Моля изпратете поне %d формуляр." -msgstr[1] "Моля изпратете поне %d формуляра." +msgid "Please submit at least %(num)d form." +msgid_plural "Please submit at least %(num)d forms." +msgstr[0] "Моля изпратете поне %(num)d формуляр." +msgstr[1] "Моля изпратете поне %(num)d формуляра." msgid "Order" msgstr "Ред" @@ -1189,8 +1197,8 @@ msgid "" "If you are using the tag or " "including the “Referrer-Policy: no-referrer” header, please remove them. The " "CSRF protection requires the “Referer” header to do strict referer checking. " -"If you’re concerned about privacy, use alternatives like for links to third-party sites." +"If you’re concerned about privacy, use alternatives like for links to third-party sites." msgstr "" "Ако използвате таг или " "включвате “Referrer-Policy: no-referrer” header, моля премахнете ги. CSRF " @@ -1292,14 +1300,14 @@ msgstr "" #, python-format msgid "" "You are seeing this page because DEBUG=True is in your settings file and you have not configured any " -"URLs." +"%(version)s/ref/settings/#debug\" target=\"_blank\" " +"rel=\"noopener\">DEBUG=True is in your settings file and you have not " +"configured any URLs." msgstr "" "Вие виждате тази страница, защото DEBUG=True е във вашия файл с настройки и не сте конфигурирали " -"никакви URL-и." +"en/%(version)s/ref/settings/#debug\" target=\"_blank\" " +"rel=\"noopener\">DEBUG=True е във вашия файл с настройки и не сте " +"конфигурирали никакви URL-и." msgid "Django Documentation" msgstr "Django документация" diff --git a/django/conf/locale/eo/LC_MESSAGES/django.mo b/django/conf/locale/eo/LC_MESSAGES/django.mo index e606154811..05260e5b04 100644 Binary files a/django/conf/locale/eo/LC_MESSAGES/django.mo and b/django/conf/locale/eo/LC_MESSAGES/django.mo differ diff --git a/django/conf/locale/eo/LC_MESSAGES/django.po b/django/conf/locale/eo/LC_MESSAGES/django.po index 72d36b0291..66a2f38150 100644 --- a/django/conf/locale/eo/LC_MESSAGES/django.po +++ b/django/conf/locale/eo/LC_MESSAGES/django.po @@ -7,16 +7,17 @@ # Dinu Gherman , 2011 # kristjan , 2011 # Matthieu Desplantes , 2021 -# Nikolay Korotkiy , 2017-2018 -# Robin van der Vliet , 2019 +# Meiyer , 2022 +# Nikolay Korotkiy , 2017-2018 +# Robin van der Vliet , 2019 # Adamo Mesha , 2012 msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-01-15 09:00+0100\n" -"PO-Revision-Date: 2021-04-13 08:22+0000\n" -"Last-Translator: Matthieu Desplantes \n" +"POT-Creation-Date: 2022-05-17 05:23-0500\n" +"PO-Revision-Date: 2022-05-25 06:49+0000\n" +"Last-Translator: Meiyer , 2022\n" "Language-Team: Esperanto (http://www.transifex.com/django/django/language/" "eo/)\n" "MIME-Version: 1.0\n" @@ -194,7 +195,7 @@ msgid "Kyrgyz" msgstr "Kirgiza" msgid "Luxembourgish" -msgstr "Lukszemburga" +msgstr "Luksemburga" msgid "Lithuanian" msgstr "Litova" @@ -214,11 +215,14 @@ msgstr "Mongola" msgid "Marathi" msgstr "Marata" +msgid "Malay" +msgstr "Malaja" + msgid "Burmese" msgstr "Birma" msgid "Norwegian Bokmål" -msgstr "Norvega Bbokmål" +msgstr "Norvega (bokmål)" msgid "Nepali" msgstr "Nepala" @@ -331,44 +335,48 @@ msgid "…" msgstr "…" msgid "That page number is not an integer" -msgstr "Tuo paĝnumero ne estas entjero" +msgstr "Tia paĝnumero ne estas entjero" msgid "That page number is less than 1" -msgstr "Tuo paĝnumero estas malpli ol 1" +msgstr "La paĝnumero estas malpli ol 1" msgid "That page contains no results" msgstr "Tiu paĝo ne enhavas rezultojn" msgid "Enter a valid value." -msgstr "Enigu validan valoron." +msgstr "Enigu ĝustan valoron." msgid "Enter a valid URL." -msgstr "Enigu validan adreson." +msgstr "Enigu ĝustan retadreson." msgid "Enter a valid integer." -msgstr "Enigu validan entjero." +msgstr "Enigu ĝustaforman entjeron." msgid "Enter a valid email address." -msgstr "Enigu validan retpoŝtan adreson." +msgstr "Enigu ĝustaforman retpoŝtan adreson." #. Translators: "letters" means latin letters: a-z and A-Z. msgid "" "Enter a valid “slug” consisting of letters, numbers, underscores or hyphens." msgstr "" +"Enigu ĝustan “ĵetonvorton” konsistantan el latinaj literoj, ciferoj, " +"substrekoj, aŭ dividstrekoj." msgid "" "Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or " "hyphens." msgstr "" +"Enigu ĝustan “ĵetonvorton” konsistantan el Unikodaj literoj, ciferoj, " +"substrekoj, aŭ dividstrekoj." msgid "Enter a valid IPv4 address." -msgstr "Enigu validan IPv4-adreson." +msgstr "Enigu ĝustaforman IPv4-adreson." msgid "Enter a valid IPv6 address." -msgstr "Enigu validan IPv6-adreson." +msgstr "Enigu ĝustaforman IPv6-adreson." msgid "Enter a valid IPv4 or IPv6 address." -msgstr "Enigu validan IPv4 aŭ IPv6-adreson." +msgstr "Enigu ĝustaforman IPv4- aŭ IPv6-adreson." msgid "Enter only digits separated by commas." msgstr "Enigu nur ciferojn apartigitajn per komoj." @@ -386,6 +394,10 @@ msgstr "Certigu ke ĉi tiu valoro estas malpli ol aŭ egala al %(limit_value)s." msgid "Ensure this value is greater than or equal to %(limit_value)s." msgstr "Certigu ke ĉi tiu valoro estas pli ol aŭ egala al %(limit_value)s." +#, python-format +msgid "Ensure this value is a multiple of step size %(limit_value)s." +msgstr "Certigu ke ĉi tiu valoro estas oblo de paŝo-grando %(limit_value)s." + #, python-format msgid "" "Ensure this value has at least %(limit_value)d character (it has " @@ -397,7 +409,7 @@ msgstr[0] "" "Certigu, ke tiu valoro havas %(limit_value)d signon (ĝi havas " "%(show_value)d)." msgstr[1] "" -"Certigu, ke tiu valoro havas %(limit_value)d signojn (ĝi havas " +"Certigu ke ĉi tiu valoro enhavas almenaŭ %(limit_value)d signojn (ĝi havas " "%(show_value)d)." #, python-format @@ -411,7 +423,7 @@ msgstr[0] "" "Certigu, ke tio valuto maksimume havas %(limit_value)d karakterojn (ĝi havas " "%(show_value)d)." msgstr[1] "" -"Certigu, ke tiu valoro maksimume havas %(limit_value)d signojn (ĝi havas " +"Certigu ke ĉi tiu valoro maksimume enhavas %(limit_value)d signojn (ĝi havas " "%(show_value)d)." msgid "Enter a number." @@ -442,6 +454,8 @@ msgid "" "File extension “%(extension)s” is not allowed. Allowed extensions are: " "%(allowed_extensions)s." msgstr "" +"Sufikso “%(extension)s” de dosiernomo ne estas permesita. Eblaj sufiksoj " +"estas: %(allowed_extensions)s." msgid "Null characters are not allowed." msgstr "Nulsignoj ne estas permesitaj." @@ -453,9 +467,13 @@ msgstr "kaj" msgid "%(model_name)s with this %(field_labels)s already exists." msgstr "%(model_name)s kun tiuj %(field_labels)s jam ekzistas." +#, python-format +msgid "Constraint “%(name)s” is violated." +msgstr "Limigo “%(name)s” estas malobservita." + #, python-format msgid "Value %(value)r is not a valid choice." -msgstr "Valoro %(value)r ne estas valida elekto." +msgstr "Valoro %(value)r ne estas ebla elekto." msgid "This field cannot be null." msgstr "Tiu ĉi kampo ne povas esti senvalora (null)." @@ -467,8 +485,8 @@ msgstr "Tiu ĉi kampo ne povas esti malplena." msgid "%(model_name)s with this %(field_label)s already exists." msgstr "%(model_name)s kun tiu %(field_label)s jam ekzistas." -#. Translators: The 'lookup_type' is one of 'date', 'year' or 'month'. -#. Eg: "Title must be unique for pub_date year" +#. Translators: The 'lookup_type' is one of 'date', 'year' or +#. 'month'. Eg: "Title must be unique for pub_date year" #, python-format msgid "" "%(field_label)s must be unique for %(date_field_label)s %(lookup_type)s." @@ -481,11 +499,13 @@ msgstr "Kampo de tipo: %(field_type)s" #, python-format msgid "“%(value)s” value must be either True or False." -msgstr "" +msgstr "La valoro “%(value)s” devas esti aŭ Vera (True) aŭ Malvera (False)." #, python-format msgid "“%(value)s” value must be either True, False, or None." msgstr "" +"La valoro “%(value)s” devas esti Vera (True), Malvera (False), aŭ Nenia " +"(None)." msgid "Boolean (Either True or False)" msgstr "Bulea (Vera aŭ Malvera)" @@ -495,7 +515,7 @@ msgid "String (up to %(max_length)s)" msgstr "Ĉeno (ĝis %(max_length)s)" msgid "Comma-separated integers" -msgstr "Kom-apartigitaj entjeroj" +msgstr "Perkome disigitaj entjeroj" #, python-format msgid "" @@ -510,6 +530,8 @@ msgid "" "“%(value)s” value has the correct format (YYYY-MM-DD) but it is an invalid " "date." msgstr "" +"La valoro “%(value)s” havas la ĝustan formaton (JJJJ-MM-TT), sed ĝi estas " +"neekzistanta dato." msgid "Date (without time)" msgstr "Dato (sen horo)" @@ -519,19 +541,23 @@ msgid "" "“%(value)s” value has an invalid format. It must be in YYYY-MM-DD HH:MM[:ss[." "uuuuuu]][TZ] format." msgstr "" +"La valoro “%(value)s” havas malĝustan formaton. Ĝi devas esti en la formato " +"JJJJ-MM-TT HH:MM[:ss[.µµµµµµ]][TZ]." #, python-format msgid "" "“%(value)s” value has the correct format (YYYY-MM-DD HH:MM[:ss[.uuuuuu]]" "[TZ]) but it is an invalid date/time." msgstr "" +"La valoro “%(value)s” havas la ĝustan formaton (JJJJ-MM-TT HH:MM[:ss[." +"µµµµµµ]][TZ]), sed ĝi estas neekzistanta dato/tempo." msgid "Date (with time)" msgstr "Dato (kun horo)" #, python-format msgid "“%(value)s” value must be a decimal number." -msgstr "" +msgstr "La valoro “%(value)s” devas esti dekuma frakcio." msgid "Decimal number" msgstr "Dekuma nombro" @@ -541,6 +567,8 @@ msgid "" "“%(value)s” value has an invalid format. It must be in [DD] [[HH:]MM:]ss[." "uuuuuu] format." msgstr "" +"La valoro “%(value)s” havas malĝustan formaton. Ĝi devas esti en la formato " +"[TT] [[HH:]MM:]ss[.µµµµµµ]." msgid "Duration" msgstr "Daŭro" @@ -549,14 +577,14 @@ msgid "Email address" msgstr "Retpoŝtadreso" msgid "File path" -msgstr "Dosiervojo" +msgstr "Dosierindiko" #, python-format msgid "“%(value)s” value must be a float." -msgstr "" +msgstr "La valoro “%(value)s” devas esti glitpunkta nombro." msgid "Floating point number" -msgstr "Glitkoma nombro" +msgstr "Glitpunkta nombro" #, python-format msgid "“%(value)s” value must be an integer." @@ -566,7 +594,7 @@ msgid "Integer" msgstr "Entjero" msgid "Big (8 byte) integer" -msgstr "Granda (8 bitoka) entjero" +msgstr "Granda (8–bitoka) entjero" msgid "Small integer" msgstr "Malgranda entjero" @@ -580,12 +608,14 @@ msgstr "IP-adreso" #, python-format msgid "“%(value)s” value must be either None, True or False." msgstr "" +"La valoro “%(value)s” devas esti Nenia (None), Vera (True), aŭ Malvera " +"(False)." msgid "Boolean (Either True, False or None)" -msgstr "Buleo (Vera, Malvera aŭ Neniu)" +msgstr "Buleo (Vera, Malvera, aŭ Nenia)" msgid "Positive big integer" -msgstr "" +msgstr "Pozitiva granda entjero" msgid "Positive integer" msgstr "Pozitiva entjero" @@ -605,12 +635,16 @@ msgid "" "“%(value)s” value has an invalid format. It must be in HH:MM[:ss[.uuuuuu]] " "format." msgstr "" +"La valoro “%(value)s” havas malĝustan formaton. Ĝi devas esti en la formato " +"HH:MM[:ss[.µµµµµµ]]." #, python-format msgid "" "“%(value)s” value has the correct format (HH:MM[:ss[.uuuuuu]]) but it is an " "invalid time." msgstr "" +"La valoro “%(value)s” havas la (HH:MM[:ss[.µµµµµµ]]), sed tio estas " +"neekzistanta tempo." msgid "Time" msgstr "Horo" @@ -619,14 +653,14 @@ msgid "URL" msgstr "URL" msgid "Raw binary data" -msgstr "Kruda binara datumo" +msgstr "Kruda duuma datumo" #, python-format msgid "“%(value)s” is not a valid UUID." -msgstr "" +msgstr "“%(value)s” ne estas ĝustaforma UUID." msgid "Universally unique identifier" -msgstr "Universe unika identigilo" +msgstr "Universale unika identigilo" msgid "File" msgstr "Dosiero" @@ -638,11 +672,11 @@ msgid "A JSON object" msgstr "JSON-objekto" msgid "Value must be valid JSON." -msgstr "" +msgstr "La valoro devas esti ĝustaforma JSON." #, python-format msgid "%(model)s instance with %(field)s %(value)r does not exist." -msgstr "%(model)s kazo kun %(field)s %(value)r ne ekzistas." +msgstr "Ekzemplero de %(model)s kun %(field)s egala al %(value)r ne ekzistas." msgid "Foreign Key (type determined by related field)" msgstr "Fremda ŝlosilo (tipo determinita per rilata kampo)" @@ -674,20 +708,20 @@ msgid "Enter a whole number." msgstr "Enigu plenan nombron." msgid "Enter a valid date." -msgstr "Enigu validan daton." +msgstr "Enigu ĝustan daton." msgid "Enter a valid time." -msgstr "Enigu validan horon." +msgstr "Enigu ĝustan horon." msgid "Enter a valid date/time." -msgstr "Enigu validan daton/tempon." +msgstr "Enigu ĝustan daton/tempon." msgid "Enter a valid duration." -msgstr "Enigu validan daŭron." +msgstr "Enigu ĝustan daŭron." #, python-brace-format msgid "The number of days must be between {min_days} and {max_days}." -msgstr "La nombro da tagoj devas esti inter {min_days} kaj {max_days}." +msgstr "La nombro de tagoj devas esti inter {min_days} kaj {max_days}." msgid "No file was submitted. Check the encoding type on the form." msgstr "" @@ -708,23 +742,23 @@ msgstr[0] "" "Certigu, ke tio dosiernomo maksimume havas %(max)d karakteron (ĝi havas " "%(length)d)." msgstr[1] "" -"Certigu, ke tiu dosiernomo maksimume havas %(max)d signojn (ĝi havas " +"Certigu ke la dosiernomo maksimume havas %(max)d signojn (ĝi havas " "%(length)d)." msgid "Please either submit a file or check the clear checkbox, not both." msgstr "" -"Bonvolu aŭ alŝuti dosieron, aŭ elekti la malplenan markobutonon, ne ambaŭ." +"Bonvolu aŭ alŝuti dosieron, aŭ elekti la vakigan markobutonon, sed ne ambaŭ." msgid "" "Upload a valid image. The file you uploaded was either not an image or a " "corrupted image." msgstr "" -"Alŝutu validan bildon. La alŝutita dosiero ne estas bildo, aŭ estas " +"Alŝutu ĝustaforman bildon. La alŝutita dosiero ne estas bildo aŭ estas " "difektita bildo." #, python-format msgid "Select a valid choice. %(value)s is not one of the available choices." -msgstr "Elektu validan elekton. %(value)s ne estas el la eblaj elektoj." +msgstr "Elektu ekzistantan opcion. %(value)s ne estas el la eblaj elektoj." msgid "Enter a list of values." msgstr "Enigu liston de valoroj." @@ -733,10 +767,10 @@ msgid "Enter a complete value." msgstr "Enigu kompletan valoron." msgid "Enter a valid UUID." -msgstr "Enigu validan UUID-n." +msgstr "Enigu ĝustaforman UUID." msgid "Enter a valid JSON." -msgstr "" +msgstr "Enigu ĝustaforman JSON." #. Translators: This is the default suffix added to form field labels msgid ":" @@ -751,18 +785,21 @@ msgid "" "ManagementForm data is missing or has been tampered with. Missing fields: " "%(field_names)s. You may need to file a bug report if the issue persists." msgstr "" +"La datumoj de la mastruma ManagementForm mankas aŭ estis malice modifitaj. " +"Mankas la kampoj: %(field_names)s. Se la problemo plu okazas, vi poveble " +"devintus raporti cimon." #, python-format -msgid "Please submit at most %d form." -msgid_plural "Please submit at most %d forms." -msgstr[0] "" -msgstr[1] "" +msgid "Please submit at most %(num)d form." +msgid_plural "Please submit at most %(num)d forms." +msgstr[0] "Bonvolu forsendi maksimume %(num)d formularon." +msgstr[1] "Bonvolu forsendi maksimume %(num)d formularojn." #, python-format -msgid "Please submit at least %d form." -msgid_plural "Please submit at least %d forms." -msgstr[0] "" -msgstr[1] "" +msgid "Please submit at least %(num)d form." +msgid_plural "Please submit at least %(num)d forms." +msgstr[0] "Bonvolu forsendi almenaŭ %(num)d formularon." +msgstr[1] "Bonvolu forsendi almenaŭ %(num)d formularojn." msgid "Order" msgstr "Ordo" @@ -791,20 +828,22 @@ msgid "Please correct the duplicate values below." msgstr "Bonvolu ĝustigi la duoblan valoron sube." msgid "The inline value did not match the parent instance." -msgstr "La enteksta valoro ne egalas la patran aperon." +msgstr "La enteksta valoro ne egalas la patran ekzempleron." msgid "Select a valid choice. That choice is not one of the available choices." -msgstr "Elektu validan elekton. Ĉi tiu elekto ne estas el la eblaj elektoj." +msgstr "Elektu ekzistantan opcion. Ĉi tiu opcio ne estas el la eblaj elektoj." #, python-format msgid "“%(pk)s” is not a valid value." -msgstr "" +msgstr "“%(pk)s” estas neakceptebla valoro." #, python-format msgid "" "%(datetime)s couldn’t be interpreted in time zone %(current_timezone)s; it " "may be ambiguous or it may not exist." msgstr "" +"Ne eblis interpreti %(datetime)s en la tempo-zono %(current_timezone)s. Ĝi " +"eble estas ambigua aŭ ne ekzistas en tiu tempo-zono." msgid "Clear" msgstr "Vakigi" @@ -996,7 +1035,7 @@ msgstr "feb." msgctxt "abbrev. month" msgid "March" -msgstr "marto" +msgstr "mar." msgctxt "abbrev. month" msgid "April" @@ -1083,7 +1122,7 @@ msgid "December" msgstr "Decembro" msgid "This is not a valid IPv6 address." -msgstr "Tiu ne estas valida IPv6-adreso." +msgstr "Tio ne estas ĝustaforma IPv6-adreso." #, python-format msgctxt "String to return when truncating text" @@ -1098,133 +1137,150 @@ msgid ", " msgstr ", " #, python-format -msgid "%d year" -msgid_plural "%d years" -msgstr[0] "%d jaro" -msgstr[1] "%d jaroj" +msgid "%(num)d year" +msgid_plural "%(num)d years" +msgstr[0] "%(num)d jaro" +msgstr[1] "%(num)d jaroj" #, python-format -msgid "%d month" -msgid_plural "%d months" -msgstr[0] "%d monato" -msgstr[1] "%d monatoj" +msgid "%(num)d month" +msgid_plural "%(num)d months" +msgstr[0] "%(num)d monato" +msgstr[1] "%(num)d monatoj" #, python-format -msgid "%d week" -msgid_plural "%d weeks" -msgstr[0] "%d semajno" -msgstr[1] "%d semajnoj" +msgid "%(num)d week" +msgid_plural "%(num)d weeks" +msgstr[0] "%(num)d semajno" +msgstr[1] "%(num)d semajnoj" #, python-format -msgid "%d day" -msgid_plural "%d days" -msgstr[0] "%d tago" -msgstr[1] "%d tagoj" +msgid "%(num)d day" +msgid_plural "%(num)d days" +msgstr[0] "%(num)d tago" +msgstr[1] "%(num)d tagoj" #, python-format -msgid "%d hour" -msgid_plural "%d hours" -msgstr[0] "%d horo" -msgstr[1] "%d horoj" +msgid "%(num)d hour" +msgid_plural "%(num)d hours" +msgstr[0] "%(num)d horo" +msgstr[1] "%(num)d horoj" #, python-format -msgid "%d minute" -msgid_plural "%d minutes" -msgstr[0] "%d minuto" -msgstr[1] "%d minutoj" +msgid "%(num)d minute" +msgid_plural "%(num)d minutes" +msgstr[0] "%(num)d minuto" +msgstr[1] "%(num)d minutoj" msgid "Forbidden" -msgstr "Malpermesa" +msgstr "Malpermesita" msgid "CSRF verification failed. Request aborted." -msgstr "CSRF konfirmo malsukcesis. Peto ĉesigita." +msgstr "Kontrolo de CSRF malsukcesis. Peto ĉesigita." msgid "" "You are seeing this message because this HTTPS site requires a “Referer " -"header” to be sent by your Web browser, but none was sent. This header is " +"header” to be sent by your web browser, but none was sent. This header is " "required for security reasons, to ensure that your browser is not being " "hijacked by third parties." msgstr "" +"Vi vidas tiun ĉi mesaĝon ĉar ĉi-tiu HTTPS-retejo postulas ricevi la " +"kapinstrukcion “Referer” de via retumilo, sed neniu estis sendita. Tia " +"kapinstrukcio estas bezonata pro sekurecaj kialoj, por certigi ke via " +"retumilo ne agas laŭ nedezirataj instrukcioj de maliculoj." msgid "" "If you have configured your browser to disable “Referer” headers, please re-" "enable them, at least for this site, or for HTTPS connections, or for “same-" "origin” requests." msgstr "" +"Se la agordoj de via retumilo malebligas la kapinstrukciojn “Referer”, " +"bonvolu ebligi ilin por tiu ĉi retejo, aŭ por HTTPS-konektoj, aŭ por petoj " +"el sama fonto (“same-origin”)." msgid "" "If you are using the tag or " "including the “Referrer-Policy: no-referrer” header, please remove them. The " "CSRF protection requires the “Referer” header to do strict referer checking. " -"If you’re concerned about privacy, use alternatives like for links to third-party sites." +"If you’re concerned about privacy, use alternatives like for links to third-party sites." msgstr "" +"Se vi uzas la etikedon aŭ " +"sendas la kapinstrukcion “Referrer-Policy: no-referrer”, bonvolu forigi " +"ilin. La protekto kontraŭ CSRF postulas la ĉeeston de la kapinstrukcio " +"“Referer”, kaj strikte kontrolas la referencantan fonton. Se vi zorgas pri " +"privateco, uzu alternativojn kiajn por ligiloj al " +"eksteraj retejoj." msgid "" "You are seeing this message because this site requires a CSRF cookie when " "submitting forms. This cookie is required for security reasons, to ensure " "that your browser is not being hijacked by third parties." msgstr "" -"Vi vidas tiun mesaĝon ĉar tiu-ĉi retejo postulas CSRF kuketon sendante " -"formojn. Tiu-ĉi kuketo estas bezonata pro motivoj de sekureco, por certigi " -"ke via retumilo ne esti forrabita de triaj partioj." +"Vi vidas tiun ĉi mesaĝon ĉar ĉi-tiu retejo postulas ke CSRF-kuketo estu " +"sendita kune kun la formularoj. Tia kuketo estas bezonata pro sekurecaj " +"kialoj, por certigi ke via retumilo ne agas laŭ nedezirataj instrukcioj de " +"maliculoj." msgid "" "If you have configured your browser to disable cookies, please re-enable " "them, at least for this site, or for “same-origin” requests." msgstr "" +"Se la agordoj de via retumilo malebligas kuketojn, bonvolu ebligi ilin por " +"tiu ĉi retejo aŭ por petoj el sama fonto (“same-origin”)." msgid "More information is available with DEBUG=True." msgstr "Pliaj informoj estas videblaj kun DEBUG=True." msgid "No year specified" -msgstr "Neniu jaro specifita" +msgstr "Neniu jaro indikita" msgid "Date out of range" msgstr "Dato ne en la intervalo" msgid "No month specified" -msgstr "Neniu monato specifita" +msgstr "Neniu monato indikita" msgid "No day specified" -msgstr "Neniu tago specifita" +msgstr "Neniu tago indikita" msgid "No week specified" -msgstr "Neniu semajno specifita" +msgstr "Neniu semajno indikita" #, python-format msgid "No %(verbose_name_plural)s available" -msgstr "Neniu %(verbose_name_plural)s disponeblaj" +msgstr "Neniuj %(verbose_name_plural)s estas disponeblaj" #, python-format msgid "" "Future %(verbose_name_plural)s not available because %(class_name)s." "allow_future is False." msgstr "" -"Estonta %(verbose_name_plural)s ne disponeblas ĉar %(class_name)s." +"Estontaj %(verbose_name_plural)s ne disponeblas ĉar %(class_name)s." "allow_future estas Malvera." #, python-format msgid "Invalid date string “%(datestr)s” given format “%(format)s”" -msgstr "" +msgstr "Erarforma dato-ĉeno “%(datestr)s” se uzi la formaton “%(format)s”" #, python-format msgid "No %(verbose_name)s found matching the query" -msgstr "Neniu %(verbose_name)s trovita kongruas kun la informpeto" +msgstr "Neniu %(verbose_name)s trovita kongrua kun la informpeto" msgid "Page is not “last”, nor can it be converted to an int." -msgstr "" +msgstr "Paĝo ne estas “lasta”, nek eblas konverti ĝin en entjeron." #, python-format msgid "Invalid page (%(page_number)s): %(message)s" -msgstr "Nevalida paĝo (%(page_number)s): %(message)s" +msgstr "Malĝusta paĝo (%(page_number)s): %(message)s" #, python-format msgid "Empty list and “%(class_name)s.allow_empty” is False." msgstr "" +"La listo estas malplena dum “%(class_name)s.allow_empty” estas Malvera." msgid "Directory indexes are not allowed here." -msgstr "Dosierujaj indeksoj ne estas permesitaj tie." +msgstr "Dosierujaj indeksoj ne estas permesitaj ĉi tie." #, python-format msgid "“%(path)s” does not exist" @@ -1248,19 +1304,19 @@ msgstr "" #, python-format msgid "" "You are seeing this page because DEBUG=True is in your settings file and you have not configured any " -"URLs." +"%(version)s/ref/settings/#debug\" target=\"_blank\" " +"rel=\"noopener\">DEBUG=True is in your settings file and you have not " +"configured any URLs." msgstr "" "Vi vidas ĉi tiun paĝon ĉar DEBUG = " "True estas en via agorda dosiero kaj vi ne agordis ajnan URL." msgid "Django Documentation" -msgstr "Djanga dokumentaro" +msgstr "Dĵanga dokumentaro" msgid "Topics, references, & how-to’s" -msgstr "" +msgstr "Temoj, referencoj, kaj instruiloj" msgid "Tutorial: A Polling App" msgstr "Instruilo: apo pri enketoj" @@ -1269,7 +1325,7 @@ msgid "Get started with Django" msgstr "Komencu kun Dĵango" msgid "Django Community" -msgstr "Djanga komunumo" +msgstr "Dĵanga komunumo" msgid "Connect, get help, or contribute" msgstr "Konektiĝu, ricevu helpon aŭ kontribuu" diff --git a/django/conf/locale/es/LC_MESSAGES/django.mo b/django/conf/locale/es/LC_MESSAGES/django.mo index cec4c433fb..6656556d1c 100644 Binary files a/django/conf/locale/es/LC_MESSAGES/django.mo and b/django/conf/locale/es/LC_MESSAGES/django.mo differ diff --git a/django/conf/locale/es/LC_MESSAGES/django.po b/django/conf/locale/es/LC_MESSAGES/django.po index ce0a858ff7..5fb94799a1 100644 --- a/django/conf/locale/es/LC_MESSAGES/django.po +++ b/django/conf/locale/es/LC_MESSAGES/django.po @@ -23,9 +23,11 @@ # Igor Támara , 2015 # Jannis Leidel , 2011 # José Luis , 2016 +# José Luis , 2016 # Josue Naaman Nistal Guerra , 2014 # Leonardo J. Caballero G. , 2011,2013 # Luigy, 2019 +# Luigy, 2019 # Marc Garcia , 2011 # Mariusz Felisiak , 2021 # mpachas , 2022 @@ -33,16 +35,19 @@ # ntrrgc , 2013 # ntrrgc , 2013 # Pablo, 2015 +# Pablo, 2015 +# Sebastián Magrí, 2013 # Sebastián Magrí, 2013 # Uriel Medina , 2020-2021 # Veronicabh , 2015 +# Veronicabh , 2015 msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2022-05-17 05:23-0500\n" -"PO-Revision-Date: 2022-07-25 06:49+0000\n" -"Last-Translator: mpachas \n" +"PO-Revision-Date: 2022-05-25 06:49+0000\n" +"Last-Translator: mpachas , 2022\n" "Language-Team: Spanish (http://www.transifex.com/django/django/language/" "es/)\n" "MIME-Version: 1.0\n" @@ -437,6 +442,9 @@ msgstr[0] "" msgstr[1] "" "Asegúrese de que este valor tenga al menos %(limit_value)d carácter(es) " "(tiene%(show_value)d)." +msgstr[2] "" +"Asegúrese de que este valor tenga al menos %(limit_value)d carácter(es) " +"(tiene%(show_value)d)." #, python-format msgid "" @@ -451,6 +459,9 @@ msgstr[0] "" msgstr[1] "" "Asegúrese de que este valor tenga menos de %(limit_value)d caracteres (tiene " "%(show_value)d)." +msgstr[2] "" +"Asegúrese de que este valor tenga menos de %(limit_value)d caracteres (tiene " +"%(show_value)d)." msgid "Enter a number." msgstr "Introduzca un número." @@ -460,12 +471,14 @@ msgid "Ensure that there are no more than %(max)s digit in total." msgid_plural "Ensure that there are no more than %(max)s digits in total." msgstr[0] "Asegúrese de que no hay más de %(max)s dígito en total." msgstr[1] "Asegúrese de que no haya más de %(max)s dígitos en total." +msgstr[2] "Asegúrese de que no haya más de %(max)s dígitos en total." #, python-format msgid "Ensure that there are no more than %(max)s decimal place." msgid_plural "Ensure that there are no more than %(max)s decimal places." msgstr[0] "Asegúrese de que no haya más de %(max)s dígito decimal." msgstr[1] "Asegúrese de que no haya más de %(max)s dígitos decimales." +msgstr[2] "Asegúrese de que no haya más de %(max)s dígitos decimales." #, python-format msgid "" @@ -476,6 +489,8 @@ msgstr[0] "" "Asegúrese de que no haya más de %(max)s dígito antes del punto decimal" msgstr[1] "" "Asegúrese de que no haya más de %(max)s dígitos antes del punto decimal." +msgstr[2] "" +"Asegúrese de que no haya más de %(max)s dígitos antes del punto decimal." #, python-format msgid "" @@ -768,6 +783,9 @@ msgstr[0] "" msgstr[1] "" "Asegúrese de que este nombre de archivo tenga como máximo %(max)d " "carácter(es) (tiene %(length)d)." +msgstr[2] "" +"Asegúrese de que este nombre de archivo tenga como máximo %(max)d " +"carácter(es) (tiene %(length)d)." msgid "Please either submit a file or check the clear checkbox, not both." msgstr "" @@ -819,12 +837,14 @@ msgid "Please submit at most %(num)d form." msgid_plural "Please submit at most %(num)d forms." msgstr[0] "Por favor, envíe %(num)d formulario como máximo." msgstr[1] "Por favor, envíe %(num)d formularios como máximo." +msgstr[2] "Por favor, envíe %(num)d formularios como máximo." #, python-format msgid "Please submit at least %(num)d form." msgid_plural "Please submit at least %(num)d forms." msgstr[0] "Por favor, envíe %(num)d formulario como mínimo." msgstr[1] "Por favor, envíe %(num)d formularios como mínimo." +msgstr[2] "Por favor, envíe %(num)d formularios como mínimo." msgid "Order" msgstr "Orden" @@ -897,6 +917,7 @@ msgid "%(size)d byte" msgid_plural "%(size)d bytes" msgstr[0] "%(size)d byte" msgstr[1] "%(size)d bytes" +msgstr[2] "%(size)d bytes" #, python-format msgid "%s KB" @@ -1166,36 +1187,42 @@ msgid "%(num)d year" msgid_plural "%(num)d years" msgstr[0] "%(num)d años" msgstr[1] "%(num)d años" +msgstr[2] "%(num)d años" #, python-format msgid "%(num)d month" msgid_plural "%(num)d months" msgstr[0] "%(num)d mes" msgstr[1] "%(num)d meses" +msgstr[2] "%(num)d meses" #, python-format msgid "%(num)d week" msgid_plural "%(num)d weeks" msgstr[0] "%(num)d semana" msgstr[1] "%(num)d semanas" +msgstr[2] "%(num)d semanas" #, python-format msgid "%(num)d day" msgid_plural "%(num)d days" msgstr[0] "%(num)d día" msgstr[1] "%(num)d días" +msgstr[2] "%(num)d días" #, python-format msgid "%(num)d hour" msgid_plural "%(num)d hours" msgstr[0] "%(num)d hora" msgstr[1] "%(num)d horas" +msgstr[2] "%(num)d horas" #, python-format msgid "%(num)d minute" msgid_plural "%(num)d minutes" msgstr[0] "%(num)d minutos" msgstr[1] "%(num)d minutes" +msgstr[2] "%(num)d minutes" msgid "Forbidden" msgstr "Prohibido" @@ -1219,23 +1246,23 @@ msgid "" "enable them, at least for this site, or for HTTPS connections, or for “same-" "origin” requests." msgstr "" -"Si ha configurado su navegador para deshabilitar los encabezados \"Referer" -"\", vuelva a habilitarlos, al menos para este sitio, o para conexiones " -"HTTPS, o para solicitudes del \"mismo origen\"." +"Si ha configurado su navegador para deshabilitar los encabezados " +"\"Referer\", vuelva a habilitarlos, al menos para este sitio, o para " +"conexiones HTTPS, o para solicitudes del \"mismo origen\"." msgid "" "If you are using the tag or " "including the “Referrer-Policy: no-referrer” header, please remove them. The " "CSRF protection requires the “Referer” header to do strict referer checking. " -"If you’re concerned about privacy, use alternatives like for links to third-party sites." +"If you’re concerned about privacy, use alternatives like for links to third-party sites." msgstr "" -"Si esta utilizando la etiqueta o incluyendo el encabezado \"Referrer-Policy: no-referrer\", elimínelos. " -"La protección CSRF requiere que el encabezado \"Referer\" realice una " -"comprobación estricta del referente. Si le preocupa la privacidad, utilice " -"alternativas como para los enlaces a sitios de " -"terceros." +"Si esta utilizando la etiqueta o incluyendo el encabezado \"Referrer-Policy: no-referrer\", " +"elimínelos. La protección CSRF requiere que el encabezado \"Referer\" " +"realice una comprobación estricta del referente. Si le preocupa la " +"privacidad, utilice alternativas como para los " +"enlaces a sitios de terceros." msgid "" "You are seeing this message because this site requires a CSRF cookie when " @@ -1251,8 +1278,8 @@ msgid "" "them, at least for this site, or for “same-origin” requests." msgstr "" "Si ha configurado su navegador para deshabilitar las cookies, vuelva a " -"habilitarlas, al menos para este sitio o para solicitudes del \"mismo origen" -"\"." +"habilitarlas, al menos para este sitio o para solicitudes del \"mismo " +"origen\"." msgid "More information is available with DEBUG=True." msgstr "Más información disponible si se establece DEBUG=True." @@ -1329,14 +1356,14 @@ msgstr "" #, python-format msgid "" "You are seeing this page because DEBUG=True is in your settings file and you have not configured any " -"URLs." +"%(version)s/ref/settings/#debug\" target=\"_blank\" " +"rel=\"noopener\">DEBUG=True is in your settings file and you have not " +"configured any URLs." msgstr "" "Estás viendo esta página porque DEBUG=True está en su archivo de configuración y no ha configurado " -"ninguna URL." +"%(version)s/ref/settings/#debug\" target=\"_blank\" " +"rel=\"noopener\">DEBUG=True está en su archivo de configuración y no ha " +"configurado ninguna URL." msgid "Django Documentation" msgstr "Documentación de Django" diff --git a/django/conf/locale/hu/LC_MESSAGES/django.mo b/django/conf/locale/hu/LC_MESSAGES/django.mo index 858f0c4340..06465ca70b 100644 Binary files a/django/conf/locale/hu/LC_MESSAGES/django.mo and b/django/conf/locale/hu/LC_MESSAGES/django.mo differ diff --git a/django/conf/locale/hu/LC_MESSAGES/django.po b/django/conf/locale/hu/LC_MESSAGES/django.po index 3661576722..43a04a3884 100644 --- a/django/conf/locale/hu/LC_MESSAGES/django.po +++ b/django/conf/locale/hu/LC_MESSAGES/django.po @@ -8,15 +8,16 @@ # Istvan Farkas , 2019 # Jannis Leidel , 2011 # János R, 2011-2012,2014 +# János R, 2022 # Máté Őry , 2013 # Szilveszter Farkas , 2011 msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-01-15 09:00+0100\n" -"PO-Revision-Date: 2021-04-01 15:20+0000\n" -"Last-Translator: András Veres-Szentkirályi\n" +"POT-Creation-Date: 2022-05-17 05:23-0500\n" +"PO-Revision-Date: 2022-05-25 06:49+0000\n" +"Last-Translator: János R, 2022\n" "Language-Team: Hungarian (http://www.transifex.com/django/django/language/" "hu/)\n" "MIME-Version: 1.0\n" @@ -214,6 +215,9 @@ msgstr "Mongol" msgid "Marathi" msgstr "Maráthi" +msgid "Malay" +msgstr "" + msgid "Burmese" msgstr "Burmai" @@ -391,6 +395,10 @@ msgstr "Bizonyosodjon meg arról, hogy az érték %(limit_value)s, vagy kisebb." msgid "Ensure this value is greater than or equal to %(limit_value)s." msgstr "Bizonyosodjon meg arról, hogy az érték %(limit_value)s, vagy nagyobb." +#, python-format +msgid "Ensure this value is a multiple of step size %(limit_value)s." +msgstr "" + #, python-format msgid "" "Ensure this value has at least %(limit_value)d character (it has " @@ -466,6 +474,10 @@ msgstr "és" msgid "%(model_name)s with this %(field_labels)s already exists." msgstr "Már létezik %(model_name)s ilyennel: %(field_labels)s." +#, python-format +msgid "Constraint “%(name)s” is violated." +msgstr "" + #, python-format msgid "Value %(value)r is not a valid choice." msgstr "%(value)r érték érvénytelen." @@ -480,8 +492,8 @@ msgstr "Ez a mező nem lehet üres." msgid "%(model_name)s with this %(field_label)s already exists." msgstr "Már létezik %(model_name)s ilyennel: %(field_label)s." -#. Translators: The 'lookup_type' is one of 'date', 'year' or 'month'. -#. Eg: "Title must be unique for pub_date year" +#. Translators: The 'lookup_type' is one of 'date', 'year' or +#. 'month'. Eg: "Title must be unique for pub_date year" #, python-format msgid "" "%(field_label)s must be unique for %(date_field_label)s %(lookup_type)s." @@ -781,16 +793,16 @@ msgstr "" "%(field_names)s. Ha ez többször is előfordul, érdemes bejelenteni hibaként." #, python-format -msgid "Please submit at most %d form." -msgid_plural "Please submit at most %d forms." -msgstr[0] "Legfeljebb %d űrlapot küldjön be." -msgstr[1] "Legfeljebb %d űrlapot küldjön be." +msgid "Please submit at most %(num)d form." +msgid_plural "Please submit at most %(num)d forms." +msgstr[0] "" +msgstr[1] "" #, python-format -msgid "Please submit at least %d form." -msgid_plural "Please submit at least %d forms." -msgstr[0] "Legalább %d űrlapot küldjön be." -msgstr[1] "Legalább %d űrlapot küldjön be." +msgid "Please submit at least %(num)d form." +msgid_plural "Please submit at least %(num)d forms." +msgstr[0] "" +msgstr[1] "" msgid "Order" msgstr "Sorrend" @@ -1131,57 +1143,53 @@ msgid ", " msgstr ", " #, python-format -msgid "%d year" -msgid_plural "%d years" -msgstr[0] "%d év" -msgstr[1] "%d év" +msgid "%(num)d year" +msgid_plural "%(num)d years" +msgstr[0] "" +msgstr[1] "" #, python-format -msgid "%d month" -msgid_plural "%d months" -msgstr[0] "%d hónap" -msgstr[1] "%d hónap" +msgid "%(num)d month" +msgid_plural "%(num)d months" +msgstr[0] "" +msgstr[1] "" #, python-format -msgid "%d week" -msgid_plural "%d weeks" -msgstr[0] "%d hét" -msgstr[1] "%d hét" +msgid "%(num)d week" +msgid_plural "%(num)d weeks" +msgstr[0] "" +msgstr[1] "" #, python-format -msgid "%d day" -msgid_plural "%d days" -msgstr[0] "%d nap" -msgstr[1] "%d nap" +msgid "%(num)d day" +msgid_plural "%(num)d days" +msgstr[0] "" +msgstr[1] "" #, python-format -msgid "%d hour" -msgid_plural "%d hours" -msgstr[0] "%d óra" -msgstr[1] "%d óra" +msgid "%(num)d hour" +msgid_plural "%(num)d hours" +msgstr[0] "" +msgstr[1] "" #, python-format -msgid "%d minute" -msgid_plural "%d minutes" -msgstr[0] "%d perc" -msgstr[1] "%d perc" +msgid "%(num)d minute" +msgid_plural "%(num)d minutes" +msgstr[0] "" +msgstr[1] "" msgid "Forbidden" msgstr "Hozzáférés megtagadva" msgid "CSRF verification failed. Request aborted." -msgstr "CSRF ellenőrzés sikertelen. Kérést kiszolgálása megszakítva." +msgstr "CSRF ellenőrzés sikertelen. Kérés kiszolgálása megszakítva." msgid "" "You are seeing this message because this HTTPS site requires a “Referer " -"header” to be sent by your Web browser, but none was sent. This header is " +"header” to be sent by your web browser, but none was sent. This header is " "required for security reasons, to ensure that your browser is not being " "hijacked by third parties." msgstr "" -"Ezt az üzenetet azért látja, mert ezen a HTTPS oldalon kötelező a \"Referer " -"header\", amelyet a böngészőnek kellene küldenie, de nem tette. Ez az adat " -"biztonsági okokból kötelező, ez biztosítja, hogy a böngészőt nem irányítja " -"át egy harmadik fél." msgid "" "If you have configured your browser to disable “Referer” headers, please re-" @@ -1196,8 +1204,8 @@ msgid "" "If you are using the tag or " "including the “Referrer-Policy: no-referrer” header, please remove them. The " "CSRF protection requires the “Referer” header to do strict referer checking. " -"If you’re concerned about privacy, use alternatives like for links to third-party sites." +"If you’re concerned about privacy, use alternatives like for links to third-party sites." msgstr "" "Ha a címkét használja, vagy " "a “Referrer-Policy: no-referrer” fejlécet, kérjük távolítsa el ezeket. A " @@ -1299,14 +1307,14 @@ msgstr "" #, python-format msgid "" "You are seeing this page because DEBUG=True is in your settings file and you have not configured any " -"URLs." +"%(version)s/ref/settings/#debug\" target=\"_blank\" " +"rel=\"noopener\">DEBUG=True is in your settings file and you have not " +"configured any URLs." msgstr "" "Azért látod ezt az oldalt, mert a DEBUG=True szerepel a settings fájlban, és még nem került beállításra " -"egy URL sem." +"en/%(version)s/ref/settings/#debug\" target=\"_blank\" " +"rel=\"noopener\">DEBUG=True szerepel a settings fájlban, és még nem " +"került beállításra egy URL sem." msgid "Django Documentation" msgstr "Django Dokumentáció" diff --git a/django/conf/locale/ja/LC_MESSAGES/django.mo b/django/conf/locale/ja/LC_MESSAGES/django.mo index ed2414e37c..7dc15f67f3 100644 Binary files a/django/conf/locale/ja/LC_MESSAGES/django.mo and b/django/conf/locale/ja/LC_MESSAGES/django.mo differ diff --git a/django/conf/locale/ja/LC_MESSAGES/django.po b/django/conf/locale/ja/LC_MESSAGES/django.po index ec4bc35e32..b3e0682656 100644 --- a/django/conf/locale/ja/LC_MESSAGES/django.po +++ b/django/conf/locale/ja/LC_MESSAGES/django.po @@ -5,6 +5,7 @@ # tadasu , 2020 # Goto Hayato , 2021 # Goto Hayato , 2019 +# Hiroki Sawano, 2022 # Jannis Leidel , 2011 # Kamiyama Satoshi, 2021 # Kentaro Matsuzaki , 2015 @@ -20,8 +21,8 @@ msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2022-05-17 05:23-0500\n" -"PO-Revision-Date: 2021-12-25 06:49+0000\n" -"Last-Translator: Kamiyama Satoshi\n" +"PO-Revision-Date: 2022-05-25 06:49+0000\n" +"Last-Translator: Hiroki Sawano, 2022\n" "Language-Team: Japanese (http://www.transifex.com/django/django/language/" "ja/)\n" "MIME-Version: 1.0\n" @@ -464,7 +465,7 @@ msgstr "この %(field_labels)s を持った %(model_name)s が既に存在し #, python-format msgid "Constraint “%(name)s” is violated." -msgstr "" +msgstr "制約 “%(name)s” に違反しています。" #, python-format msgid "Value %(value)r is not a valid choice." @@ -1177,8 +1178,8 @@ msgid "" "If you are using the tag or " "including the “Referrer-Policy: no-referrer” header, please remove them. The " "CSRF protection requires the “Referer” header to do strict referer checking. " -"If you’re concerned about privacy, use alternatives like for links to third-party sites." +"If you’re concerned about privacy, use alternatives like for links to third-party sites." msgstr "" "もし タグを使用しているか " "“Referrer-Policy: no-referrer” ヘッダを含んでいる場合は削除してください。" @@ -1276,13 +1277,14 @@ msgstr "" #, python-format msgid "" "You are seeing this page because DEBUG=True is in your settings file and you have not configured any " -"URLs." +"%(version)s/ref/settings/#debug\" target=\"_blank\" " +"rel=\"noopener\">DEBUG=True is in your settings file and you have not " +"configured any URLs." msgstr "" "このページは、設定ファイルでDEBUG=Trueが指定され、何もURLが設定されていない時に表示されます。" +"%(version)s/ref/settings/#debug\" target=\"_blank\" " +"rel=\"noopener\">DEBUG=Trueが指定され、何もURLが設定されていない時に表示" +"されます。" msgid "Django Documentation" msgstr "Django ドキュメント" diff --git a/django/conf/locale/pt_BR/LC_MESSAGES/django.mo b/django/conf/locale/pt_BR/LC_MESSAGES/django.mo index df18ed4399..43a505d7e5 100644 Binary files a/django/conf/locale/pt_BR/LC_MESSAGES/django.mo and b/django/conf/locale/pt_BR/LC_MESSAGES/django.mo differ diff --git a/django/conf/locale/pt_BR/LC_MESSAGES/django.po b/django/conf/locale/pt_BR/LC_MESSAGES/django.po index ba76e54e71..29fdcee63e 100644 --- a/django/conf/locale/pt_BR/LC_MESSAGES/django.po +++ b/django/conf/locale/pt_BR/LC_MESSAGES/django.po @@ -10,20 +10,23 @@ # Camilo B. Moreira , 2017 # Carlos Leite , 2020 # Carlos Leite , 2016,2019 -# Filipe Cifali Stangler , 2016 +# Filipe Cifali , 2016 # Claudio Rogerio Carvalho Filho , 2020 # dudanogueira , 2012 # dudanogueira , 2019 # Elyézer Rezende , 2013 # Fábio C. Barrionuevo da Luz , 2014-2015 # Felipe Rodrigues , 2016 -# Filipe Cifali Stangler , 2019 +# Filipe Cifali , 2019 # Gladson , 2013 # semente, 2011-2014 +# Guilherme, 2022 +# Heron Fonsaca, 2022 # Igor Cavalcante , 2017 # Jannis Leidel , 2011 # Lucas Infante , 2015 # Luiz Boaretto , 2017 +# Marssal Jr. , 2022 # Marcelo Moro Brondani , 2018 # Mariusz Felisiak , 2021 # Rafael Fontenelle , 2021-2022 @@ -38,8 +41,8 @@ msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2022-05-17 05:23-0500\n" -"PO-Revision-Date: 2022-07-25 06:49+0000\n" -"Last-Translator: Rafael Fontenelle \n" +"PO-Revision-Date: 2022-05-25 06:49+0000\n" +"Last-Translator: Heron Fonsaca, 2022\n" "Language-Team: Portuguese (Brazil) (http://www.transifex.com/django/django/" "language/pt_BR/)\n" "MIME-Version: 1.0\n" @@ -417,6 +420,8 @@ msgstr "Certifique-se que este valor seja maior ou igual a %(limit_value)s." #, python-format msgid "Ensure this value is a multiple of step size %(limit_value)s." msgstr "" +"Certifique-se que este valor seja múltiplo do tamanho do passo " +"%(limit_value)s." #, python-format msgid "" @@ -431,6 +436,9 @@ msgstr[0] "" msgstr[1] "" "Certifique-se de que o valor tenha no mínimo %(limit_value)d caracteres (ele " "possui %(show_value)d)." +msgstr[2] "" +"Certifique-se de que o valor tenha no mínimo %(limit_value)d caracteres (ele " +"possui %(show_value)d)." #, python-format msgid "" @@ -445,6 +453,9 @@ msgstr[0] "" msgstr[1] "" "Certifique-se de que o valor tenha no máximo %(limit_value)d caracteres (ele " "possui %(show_value)d)." +msgstr[2] "" +"Certifique-se de que o valor tenha no máximo %(limit_value)d caracteres (ele " +"possui %(show_value)d)." msgid "Enter a number." msgstr "Informe um número." @@ -454,12 +465,14 @@ msgid "Ensure that there are no more than %(max)s digit in total." msgid_plural "Ensure that there are no more than %(max)s digits in total." msgstr[0] "Certifique-se de que não tenha mais de %(max)s dígito no total." msgstr[1] "Certifique-se de que não tenha mais de %(max)s dígitos no total." +msgstr[2] "Certifique-se de que não tenha mais de %(max)s dígitos no total." #, python-format msgid "Ensure that there are no more than %(max)s decimal place." msgid_plural "Ensure that there are no more than %(max)s decimal places." msgstr[0] "Certifique-se de que não tenha mais de %(max)s casa decimal." msgstr[1] "Certifique-se de que não tenha mais de %(max)s casas decimais." +msgstr[2] "Certifique-se de que não tenha mais de %(max)s casas decimais." #, python-format msgid "" @@ -471,6 +484,9 @@ msgstr[0] "" msgstr[1] "" "Certifique-se de que não tenha mais de %(max)s dígitos antes do ponto " "decimal." +msgstr[2] "" +"Certifique-se de que não tenha mais de %(max)s dígitos antes do ponto " +"decimal." #, python-format msgid "" @@ -492,7 +508,7 @@ msgstr "%(model_name)s com este %(field_labels)s já existe." #, python-format msgid "Constraint “%(name)s” is violated." -msgstr "" +msgstr "Restrição \"%(name)s\" foi violada." #, python-format msgid "Value %(value)r is not a valid choice." @@ -690,7 +706,7 @@ msgid "A JSON object" msgstr "Um objeto JSON" msgid "Value must be valid JSON." -msgstr "o Valor deve ser um JSON válido" +msgstr "O valor deve ser um JSON válido." #, python-format msgid "%(model)s instance with %(field)s %(value)r does not exist." @@ -760,6 +776,9 @@ msgstr[0] "" msgstr[1] "" "Certifique-se de que o arquivo tenha no máximo %(max)d caracteres (ele " "possui %(length)d)." +msgstr[2] "" +"Certifique-se de que o arquivo tenha no máximo %(max)d caracteres (ele " +"possui %(length)d)." msgid "Please either submit a file or check the clear checkbox, not both." msgstr "Por favor, envie um arquivo ou marque o checkbox, mas não ambos." @@ -807,14 +826,16 @@ msgstr "" #, python-format msgid "Please submit at most %(num)d form." msgid_plural "Please submit at most %(num)d forms." -msgstr[0] "" -msgstr[1] "" +msgstr[0] "Por favor, envie no máximo %(num)d formulário." +msgstr[1] "Por favor, envie no máximo %(num)d formulários." +msgstr[2] "Por favor, envie no máximo %(num)d formulários." #, python-format msgid "Please submit at least %(num)d form." msgid_plural "Please submit at least %(num)d forms." -msgstr[0] "" -msgstr[1] "" +msgstr[0] "Por favor, envie ao menos %(num)d formulário." +msgstr[1] "Por favor, envie ao menos %(num)d formulários." +msgstr[2] "Por favor, envie ao menos %(num)d formulários." msgid "Order" msgstr "Ordem" @@ -887,6 +908,7 @@ msgid "%(size)d byte" msgid_plural "%(size)d bytes" msgstr[0] "%(size)d byte" msgstr[1] "%(size)d bytes" +msgstr[2] "%(size)d bytes" #, python-format msgid "%s KB" @@ -1156,36 +1178,42 @@ msgid "%(num)d year" msgid_plural "%(num)d years" msgstr[0] "%(num)d ano" msgstr[1] "%(num)d anos" +msgstr[2] "%(num)d anos" #, python-format msgid "%(num)d month" msgid_plural "%(num)d months" msgstr[0] "%(num)d mês" msgstr[1] "%(num)d meses" +msgstr[2] "%(num)d meses" #, python-format msgid "%(num)d week" msgid_plural "%(num)d weeks" msgstr[0] "%(num)d semana" msgstr[1] "%(num)d semanas" +msgstr[2] "%(num)d semanas" #, python-format msgid "%(num)d day" msgid_plural "%(num)d days" msgstr[0] "%(num)d dia" msgstr[1] "%(num)d dias" +msgstr[2] "%(num)d dias" #, python-format msgid "%(num)d hour" msgid_plural "%(num)d hours" msgstr[0] "%(num)d hora" msgstr[1] "%(num)d horas" +msgstr[2] "%(num)d horas" #, python-format msgid "%(num)d minute" msgid_plural "%(num)d minutes" msgstr[0] "%(num)d minuto" msgstr[1] "%(num)d minutos" +msgstr[2] "%(num)d minutos" msgid "Forbidden" msgstr "Proibido" @@ -1217,8 +1245,8 @@ msgid "" "If you are using the tag or " "including the “Referrer-Policy: no-referrer” header, please remove them. The " "CSRF protection requires the “Referer” header to do strict referer checking. " -"If you’re concerned about privacy, use alternatives like for links to third-party sites." +"If you’re concerned about privacy, use alternatives like for links to third-party sites." msgstr "" "Se estiver usando a tag ou " "incluindo o cabeçalho “Referrer-Policy: no-referrer”, por favor remova-os. A " @@ -1318,14 +1346,14 @@ msgstr "" #, python-format msgid "" "You are seeing this page because DEBUG=True is in your settings file and you have not configured any " -"URLs." +"%(version)s/ref/settings/#debug\" target=\"_blank\" " +"rel=\"noopener\">DEBUG=True is in your settings file and you have not " +"configured any URLs." msgstr "" "Você está vendo esta página pois possui DEBUG=True no seu arquivo de configurações e não configurou nenhuma " -"URL." +"com/en/%(version)s/ref/settings/#debug\" target=\"_blank\" " +"rel=\"noopener\">DEBUG=True no seu arquivo de configurações e não " +"configurou nenhuma URL." msgid "Django Documentation" msgstr "Documentação do Django" diff --git a/django/conf/locale/ru/LC_MESSAGES/django.mo b/django/conf/locale/ru/LC_MESSAGES/django.mo index 742b2ce9bf..1aa69acc78 100644 Binary files a/django/conf/locale/ru/LC_MESSAGES/django.mo and b/django/conf/locale/ru/LC_MESSAGES/django.mo differ diff --git a/django/conf/locale/ru/LC_MESSAGES/django.po b/django/conf/locale/ru/LC_MESSAGES/django.po index 7c202fe6e2..567b39aae7 100644 --- a/django/conf/locale/ru/LC_MESSAGES/django.po +++ b/django/conf/locale/ru/LC_MESSAGES/django.po @@ -19,24 +19,25 @@ # Panasoft, 2021 # Вася Аникин , 2017 # SeryiMysh , 2020 -# Алексей Борискин , 2013-2017,2019-2020 -# Дмитрий Шатера , 2016,2018 +# Алексей Борискин , 2013-2017,2019-2020,2022 +# Bobsans , 2016,2018 msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-09-21 10:22+0200\n" -"PO-Revision-Date: 2021-11-18 21:19+0000\n" -"Last-Translator: Transifex Bot <>\n" +"POT-Creation-Date: 2022-05-17 05:23-0500\n" +"PO-Revision-Date: 2022-05-25 06:49+0000\n" +"Last-Translator: Алексей Борискин , " +"2013-2017,2019-2020,2022\n" "Language-Team: Russian (http://www.transifex.com/django/django/language/" "ru/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Language: ru\n" -"Plural-Forms: nplurals=4; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n" -"%10<=4 && (n%100<12 || n%100>14) ? 1 : n%10==0 || (n%10>=5 && n%10<=9) || (n" -"%100>=11 && n%100<=14)? 2 : 3);\n" +"Plural-Forms: nplurals=4; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && " +"n%10<=4 && (n%100<12 || n%100>14) ? 1 : n%10==0 || (n%10>=5 && n%10<=9) || " +"(n%100>=11 && n%100<=14)? 2 : 3);\n" msgid "Afrikaans" msgstr "Бурский" @@ -228,7 +229,7 @@ msgid "Marathi" msgstr "Маратхи" msgid "Malay" -msgstr "" +msgstr "Малайский" msgid "Burmese" msgstr "Бирманский" @@ -406,6 +407,10 @@ msgstr "Убедитесь, что это значение меньше либо msgid "Ensure this value is greater than or equal to %(limit_value)s." msgstr "Убедитесь, что это значение больше либо равно %(limit_value)s." +#, python-format +msgid "Ensure this value is a multiple of step size %(limit_value)s." +msgstr "Убедитесь, что это значение кратно числу %(limit_value)s." + #, python-format msgid "" "Ensure this value has at least %(limit_value)d character (it has " @@ -494,6 +499,10 @@ msgid "%(model_name)s with this %(field_labels)s already exists." msgstr "" "%(model_name)s с такими значениями полей %(field_labels)s уже существует." +#, python-format +msgid "Constraint “%(name)s” is violated." +msgstr "Нарушено ограничение \"%(name)s\"." + #, python-format msgid "Value %(value)r is not a valid choice." msgstr "Значения %(value)r нет среди допустимых вариантов." @@ -508,8 +517,8 @@ msgstr "Это поле не может быть пустым." msgid "%(model_name)s with this %(field_label)s already exists." msgstr "%(model_name)s с таким %(field_label)s уже существует." -#. Translators: The 'lookup_type' is one of 'date', 'year' or 'month'. -#. Eg: "Title must be unique for pub_date year" +#. Translators: The 'lookup_type' is one of 'date', 'year' or +#. 'month'. Eg: "Title must be unique for pub_date year" #, python-format msgid "" "%(field_label)s must be unique for %(date_field_label)s %(lookup_type)s." @@ -817,20 +826,20 @@ msgstr "" "отправить отчет об ошибке." #, python-format -msgid "Please submit at most %d form." -msgid_plural "Please submit at most %d forms." -msgstr[0] "Пожалуйста, отправляйте не больше %d формы." -msgstr[1] "Пожалуйста, отправляйте не больше %d форм." -msgstr[2] "Пожалуйста, отправляйте не больше %d форм." -msgstr[3] "Пожалуйста, отправляйте не больше %d форм." +msgid "Please submit at most %(num)d form." +msgid_plural "Please submit at most %(num)d forms." +msgstr[0] "Пожалуйста, отправьте не больше %(num)d формы." +msgstr[1] "Пожалуйста, отправьте не больше %(num)d форм." +msgstr[2] "Пожалуйста, отправьте не больше %(num)d форм." +msgstr[3] "Пожалуйста, отправьте не больше %(num)d форм." #, python-format -msgid "Please submit at least %d form." -msgid_plural "Please submit at least %d forms." -msgstr[0] "Пожалуйста, отправьте хотя бы %d форму." -msgstr[1] "Пожалуйста, отправьте хотя бы %d форм." -msgstr[2] "Пожалуйста, отправьте хотя бы %d форм." -msgstr[3] "Пожалуйста, отправьте хотя бы %d форм." +msgid "Please submit at least %(num)d form." +msgid_plural "Please submit at least %(num)d forms." +msgstr[0] "Пожалуйста, отправьте %(num)d форму." +msgstr[1] "Пожалуйста, отправьте %(num)d формы." +msgstr[2] "Пожалуйста, отправьте %(num)d форм." +msgstr[3] "Пожалуйста, отправьте %(num)d форм." msgid "Order" msgstr "Порядок" @@ -1174,50 +1183,50 @@ msgstr ", " #, python-format msgid "%(num)d year" msgid_plural "%(num)d years" -msgstr[0] "" -msgstr[1] "" -msgstr[2] "" -msgstr[3] "" +msgstr[0] "%(num)d год" +msgstr[1] "%(num)d года" +msgstr[2] "%(num)d лет" +msgstr[3] "%(num)d лет" #, python-format msgid "%(num)d month" msgid_plural "%(num)d months" -msgstr[0] "" -msgstr[1] "" -msgstr[2] "" -msgstr[3] "" +msgstr[0] "%(num)d месяц" +msgstr[1] "%(num)d месяца" +msgstr[2] "%(num)d месяцев" +msgstr[3] "%(num)d месяцев" #, python-format msgid "%(num)d week" msgid_plural "%(num)d weeks" -msgstr[0] "" -msgstr[1] "" -msgstr[2] "" -msgstr[3] "" +msgstr[0] "%(num)d неделя" +msgstr[1] "%(num)d недели" +msgstr[2] "%(num)d недель" +msgstr[3] "%(num)d недель" #, python-format msgid "%(num)d day" msgid_plural "%(num)d days" -msgstr[0] "" -msgstr[1] "" -msgstr[2] "" -msgstr[3] "" +msgstr[0] "%(num)d день" +msgstr[1] "%(num)d дня" +msgstr[2] "%(num)d дней" +msgstr[3] "%(num)d дней" #, python-format msgid "%(num)d hour" msgid_plural "%(num)d hours" -msgstr[0] "" -msgstr[1] "" -msgstr[2] "" -msgstr[3] "" +msgstr[0] "%(num)d час" +msgstr[1] "%(num)d часа" +msgstr[2] "%(num)d часов" +msgstr[3] "%(num)d часов" #, python-format msgid "%(num)d minute" msgid_plural "%(num)d minutes" -msgstr[0] "" -msgstr[1] "" -msgstr[2] "" -msgstr[3] "" +msgstr[0] "%(num)d минута" +msgstr[1] "%(num)d минуты" +msgstr[2] "%(num)d минут" +msgstr[3] "%(num)d минут" msgid "Forbidden" msgstr "Ошибка доступа" @@ -1231,6 +1240,11 @@ msgid "" "required for security reasons, to ensure that your browser is not being " "hijacked by third parties." msgstr "" +"Вы видите это сообщение потому что этот сайт работает по защищённому " +"протоколу HTTPS и требует, чтобы при запросе вашим браузером был передан " +"заголовок \"Referer\", но он не был передан. Этот заголовок необходим из " +"соображений безопасности: мы должны убедиться что запрос оправляете именно " +"вы." msgid "" "If you have configured your browser to disable “Referer” headers, please re-" @@ -1247,15 +1261,15 @@ msgid "" "If you are using the tag or " "including the “Referrer-Policy: no-referrer” header, please remove them. The " "CSRF protection requires the “Referer” header to do strict referer checking. " -"If you’re concerned about privacy, use alternatives like for links to third-party sites." +"If you’re concerned about privacy, use alternatives like for links to third-party sites." msgstr "" -"Если Вы используете HTML-тэг или добавили HTTP-заголовок “Referrer-Policy: no-referrer”, пожалуйста " -"удалите их. CSRF защите необходим заголовок “Referer” для строгой проверки " -"адреса ссылающейся страницы. Если Вы беспокоитесь о приватности, используйте " -"альтернативы, например , для ссылок на сайты третьих " -"лиц." +"Если Вы используете HTML-тэг или добавили HTTP-заголовок “Referrer-Policy: no-referrer”, " +"пожалуйста удалите их. CSRF защите необходим заголовок “Referer” для строгой " +"проверки адреса ссылающейся страницы. Если Вы беспокоитесь о приватности, " +"используйте альтернативы, например , для ссылок на " +"сайты третьих лиц." msgid "" "You are seeing this message because this site requires a CSRF cookie when " @@ -1357,13 +1371,13 @@ msgstr "" #, python-format msgid "" "You are seeing this page because DEBUG=True is in your settings file and you have not configured any " -"URLs." +"%(version)s/ref/settings/#debug\" target=\"_blank\" " +"rel=\"noopener\">DEBUG=True is in your settings file and you have not " +"configured any URLs." msgstr "" "Вы видите данную страницу, потому что указали DEBUG=True в файле настроек и не настроили ни одного " +"djangoproject.com/en/%(version)s/ref/settings/#debug\" target=\"_blank\" " +"rel=\"noopener\">DEBUG=True в файле настроек и не настроили ни одного " "обработчика URL-адресов." msgid "Django Documentation" diff --git a/django/conf/locale/tk/LC_MESSAGES/django.mo b/django/conf/locale/tk/LC_MESSAGES/django.mo index 4647125cc5..ec535df66b 100644 Binary files a/django/conf/locale/tk/LC_MESSAGES/django.mo and b/django/conf/locale/tk/LC_MESSAGES/django.mo differ diff --git a/django/conf/locale/tk/LC_MESSAGES/django.po b/django/conf/locale/tk/LC_MESSAGES/django.po index 5f4cc18746..ad1713daa5 100644 --- a/django/conf/locale/tk/LC_MESSAGES/django.po +++ b/django/conf/locale/tk/LC_MESSAGES/django.po @@ -2,15 +2,16 @@ # # Translators: # Mariusz Felisiak , 2020-2021 -# Resulkary , 2020 +# Resul , 2020 +# Resul , 2022 # Welbeck Garli , 2020 msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-09-21 10:22+0200\n" -"PO-Revision-Date: 2021-11-24 16:30+0000\n" -"Last-Translator: Mariusz Felisiak \n" +"POT-Creation-Date: 2022-05-17 05:23-0500\n" +"PO-Revision-Date: 2022-05-25 06:49+0000\n" +"Last-Translator: Resul , 2022\n" "Language-Team: Turkmen (http://www.transifex.com/django/django/language/" "tk/)\n" "MIME-Version: 1.0\n" @@ -209,7 +210,7 @@ msgid "Marathi" msgstr "Marasi" msgid "Malay" -msgstr "" +msgstr "Malaý" msgid "Burmese" msgstr "Birma" @@ -325,7 +326,7 @@ msgstr "Syndikasiýa" #. Translators: String used to replace omitted page numbers in elided page #. range generated by paginators, e.g. [1, 2, '…', 5, 6, 7, '…', 9, 10]. msgid "…" -msgstr "" +msgstr "..." msgid "That page number is not an integer" msgstr "Ol sahypanyň sany bitewi san däl" @@ -389,6 +390,10 @@ msgid "Ensure this value is greater than or equal to %(limit_value)s." msgstr "" "Maglumatyň %(limit_value)s bahasyndan köp ýa-da deň bolmagyny üpjün ediň." +#, python-format +msgid "Ensure this value is a multiple of step size %(limit_value)s." +msgstr "" + #, python-format msgid "" "Ensure this value has at least %(limit_value)d character (it has " @@ -458,6 +463,10 @@ msgstr "we" msgid "%(model_name)s with this %(field_labels)s already exists." msgstr "%(field_labels)s bilen baglanyşykly %(model_name)s eýýäm bar." +#, python-format +msgid "Constraint “%(name)s” is violated." +msgstr "" + #, python-format msgid "Value %(value)r is not a valid choice." msgstr "%(value)r dogry saýlaw däl." @@ -472,8 +481,8 @@ msgstr "Bu meýdan boş bolup bilmez." msgid "%(model_name)s with this %(field_label)s already exists." msgstr "%(field_label)s bilen baglanyşykly %(model_name)s eýýäm bar." -#. Translators: The 'lookup_type' is one of 'date', 'year' or 'month'. -#. Eg: "Title must be unique for pub_date year" +#. Translators: The 'lookup_type' is one of 'date', 'year' or +#. 'month'. Eg: "Title must be unique for pub_date year" #, python-format msgid "" "%(field_label)s must be unique for %(date_field_label)s %(lookup_type)s." @@ -766,16 +775,16 @@ msgid "" msgstr "" #, python-format -msgid "Please submit at most %d form." -msgid_plural "Please submit at most %d forms." -msgstr[0] "" -msgstr[1] "" +msgid "Please submit at most %(num)d form." +msgid_plural "Please submit at most %(num)d forms." +msgstr[0] "Haýyş, iň köp %(num)d form tabşyryň." +msgstr[1] "Haýyş, iň köp %(num)d form tabşyryň." #, python-format -msgid "Please submit at least %d form." -msgid_plural "Please submit at least %d forms." -msgstr[0] "" -msgstr[1] "" +msgid "Please submit at least %(num)d form." +msgid_plural "Please submit at least %(num)d forms." +msgstr[0] "Haýyş, azyndan %(num)d form tabşyryň." +msgstr[1] "Haýyş, azyndan %(num)d form tabşyryň." msgid "Order" msgstr "Tertip" @@ -1114,38 +1123,38 @@ msgstr "\"" #, python-format msgid "%(num)d year" msgid_plural "%(num)d years" -msgstr[0] "" -msgstr[1] "" +msgstr[0] "%(num)d ýyl" +msgstr[1] "%(num)d ýyl" #, python-format msgid "%(num)d month" msgid_plural "%(num)d months" -msgstr[0] "" -msgstr[1] "" +msgstr[0] "%(num)d aý" +msgstr[1] "%(num)d aý" #, python-format msgid "%(num)d week" msgid_plural "%(num)d weeks" -msgstr[0] "" -msgstr[1] "" +msgstr[0] "%(num)d hepde" +msgstr[1] "%(num)d hepde" #, python-format msgid "%(num)d day" msgid_plural "%(num)d days" -msgstr[0] "" -msgstr[1] "" +msgstr[0] "%(num)d gün" +msgstr[1] "%(num)d gün" #, python-format msgid "%(num)d hour" msgid_plural "%(num)d hours" -msgstr[0] "" -msgstr[1] "" +msgstr[0] "%(num)d sagat" +msgstr[1] "%(num)d sagat" #, python-format msgid "%(num)d minute" msgid_plural "%(num)d minutes" -msgstr[0] "" -msgstr[1] "" +msgstr[0] "%(num)d minut" +msgstr[1] "%(num)d minut" msgid "Forbidden" msgstr "Gadagan " @@ -1172,8 +1181,8 @@ msgid "" "If you are using the tag or " "including the “Referrer-Policy: no-referrer” header, please remove them. The " "CSRF protection requires the “Referer” header to do strict referer checking. " -"If you’re concerned about privacy, use alternatives like for links to third-party sites." +"If you’re concerned about privacy, use alternatives like for links to third-party sites." msgstr "" "Egerde siz diýen bellik " "ýada \"Referrer-Policy: no-referrer\" header ulanýan bolsaňyz, olary " @@ -1195,8 +1204,8 @@ msgid "" "If you have configured your browser to disable cookies, please re-enable " "them, at least for this site, or for “same-origin” requests." msgstr "" -"Brauzeriňizde kukileri öçüren bolsaňyz, iň bolmanda şu sahypa ýa-da \"meňzeş" -"\" talaplar üçin olary täzeden açyň." +"Brauzeriňizde kukileri öçüren bolsaňyz, iň bolmanda şu sahypa ýa-da " +"\"meňzeş\" talaplar üçin olary täzeden açyň." msgid "More information is available with DEBUG=True." msgstr "Has giňişleýin maglumat DEBUG=True bilen elýeterlidir." @@ -1273,13 +1282,13 @@ msgstr "" #, python-format msgid "" "You are seeing this page because DEBUG=True is in your settings file and you have not configured any " -"URLs." +"%(version)s/ref/settings/#debug\" target=\"_blank\" " +"rel=\"noopener\">DEBUG=True is in your settings file and you have not " +"configured any URLs." msgstr "" "Bu sahypany görýärsiňiz, sebäbi sazlamalar faýlyňyzda DEBUG=True we hiç hili URL düzmediňiz." +"djangoproject.com/en/%(version)s/ref/settings/#debug\" target=\"_blank\" " +"rel=\"noopener\">DEBUG=True we hiç hili URL düzmediňiz." msgid "Django Documentation" msgstr "Django resminamalary" diff --git a/django/conf/project_template/project_name/urls.py-tpl b/django/conf/project_template/project_name/urls.py-tpl index e23d6a92ba..622f79ef4a 100644 --- a/django/conf/project_template/project_name/urls.py-tpl +++ b/django/conf/project_template/project_name/urls.py-tpl @@ -1,4 +1,5 @@ -"""{{ project_name }} URL Configuration +""" +URL configuration for {{ project_name }} project. The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/{{ docs_version }}/topics/http/urls/ diff --git a/django/contrib/admin/locale/bg/LC_MESSAGES/django.mo b/django/contrib/admin/locale/bg/LC_MESSAGES/django.mo index 7b5d5da713..8f5b937e8f 100644 Binary files a/django/contrib/admin/locale/bg/LC_MESSAGES/django.mo and b/django/contrib/admin/locale/bg/LC_MESSAGES/django.mo differ diff --git a/django/contrib/admin/locale/bg/LC_MESSAGES/django.po b/django/contrib/admin/locale/bg/LC_MESSAGES/django.po index e9a216b03a..b782fc43a2 100644 --- a/django/contrib/admin/locale/bg/LC_MESSAGES/django.po +++ b/django/contrib/admin/locale/bg/LC_MESSAGES/django.po @@ -14,8 +14,8 @@ msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2022-05-17 05:10-0500\n" -"PO-Revision-Date: 2022-07-25 07:05+0000\n" -"Last-Translator: arneatec \n" +"PO-Revision-Date: 2022-05-25 07:05+0000\n" +"Last-Translator: arneatec , 2022\n" "Language-Team: Bulgarian (http://www.transifex.com/django/django/language/" "bg/)\n" "MIME-Version: 1.0\n" @@ -295,7 +295,7 @@ msgid "Django site admin" msgstr "Django административен сайт" msgid "Django administration" -msgstr "Администрация на Django" +msgstr "Django администрация" msgid "Site administration" msgstr "Администрация на сайта" diff --git a/django/contrib/admin/locale/eo/LC_MESSAGES/django.mo b/django/contrib/admin/locale/eo/LC_MESSAGES/django.mo index b9a3fd95be..b05f1212de 100644 Binary files a/django/contrib/admin/locale/eo/LC_MESSAGES/django.mo and b/django/contrib/admin/locale/eo/LC_MESSAGES/django.mo differ diff --git a/django/contrib/admin/locale/eo/LC_MESSAGES/django.po b/django/contrib/admin/locale/eo/LC_MESSAGES/django.po index 5a74272df0..ddc5901fdd 100644 --- a/django/contrib/admin/locale/eo/LC_MESSAGES/django.po +++ b/django/contrib/admin/locale/eo/LC_MESSAGES/django.po @@ -7,15 +7,16 @@ # Dinu Gherman , 2011 # kristjan , 2012 # Matthieu Desplantes , 2021 -# Nikolay Korotkiy , 2017 +# Meiyer , 2022 +# Nikolay Korotkiy , 2017 # Adamo Mesha , 2012 msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-01-15 09:00+0100\n" -"PO-Revision-Date: 2021-04-13 08:23+0000\n" -"Last-Translator: Matthieu Desplantes \n" +"POT-Creation-Date: 2022-05-17 05:10-0500\n" +"PO-Revision-Date: 2022-05-25 07:05+0000\n" +"Last-Translator: Meiyer , 2022\n" "Language-Team: Esperanto (http://www.transifex.com/django/django/language/" "eo/)\n" "MIME-Version: 1.0\n" @@ -86,8 +87,8 @@ msgid "" "Please enter the correct %(username)s and password for a staff account. Note " "that both fields may be case-sensitive." msgstr "" -"Bonvolu eniri la ĝustan %(username)s-n kaj pasvorton por personara konto. " -"Notu, ke ambaŭ kampoj povas esti usklecodistinga." +"Bonvolu enigi la ĝustajn %(username)sn kaj pasvorton por personara konto. " +"Notu, ke ambaŭ kampoj povas esti uskleco-distingaj." msgid "Action:" msgstr "Ago:" @@ -97,7 +98,7 @@ msgid "Add another %(verbose_name)s" msgstr "Aldoni alian %(verbose_name)sn" msgid "Remove" -msgstr "Forigu" +msgstr "Forigi" msgid "Addition" msgstr "Aldono" @@ -139,22 +140,22 @@ msgstr "protokoleroj" #, python-format msgid "Added “%(object)s”." -msgstr "Aldonis “%(object)s”" +msgstr "Aldono de “%(object)s”" #, python-format msgid "Changed “%(object)s” — %(changes)s" -msgstr "" +msgstr "Ŝanĝo de “%(object)s” — %(changes)s" #, python-format msgid "Deleted “%(object)s.”" -msgstr "" +msgstr "Forigo de “%(object)s”" msgid "LogEntry Object" msgstr "Protokolera objekto" #, python-brace-format msgid "Added {name} “{object}”." -msgstr "" +msgstr "Aldonita(j) {name} “{object}”." msgid "Added." msgstr "Aldonita." @@ -164,15 +165,15 @@ msgstr "kaj" #, python-brace-format msgid "Changed {fields} for {name} “{object}”." -msgstr "" +msgstr "Ŝanĝita(j) {fields} por {name} “{object}”." #, python-brace-format msgid "Changed {fields}." -msgstr "Ŝanĝita {fields}." +msgstr "Ŝanĝita(j) {fields}." #, python-brace-format msgid "Deleted {name} “{object}”." -msgstr "" +msgstr "Forigita(j) {name} “{object}”." msgid "No fields changed." msgstr "Neniu kampo ŝanĝita." @@ -185,7 +186,7 @@ msgstr "" #, python-brace-format msgid "The {name} “{obj}” was added successfully." -msgstr "" +msgstr "La {name} “{obj}” estis sukcese aldonita(j)." msgid "You may edit it again below." msgstr "Eblas redakti ĝin sube." @@ -218,15 +219,14 @@ msgid "" "Items must be selected in order to perform actions on them. No items have " "been changed." msgstr "" -"Elementoj devas esti elektitaj por elfari agojn sur ilin. Neniu elemento " -"estis ŝanĝita." +"Elementoj devas esti elektitaj por agi je ili. Neniu elemento estis ŝanĝita." msgid "No action selected." msgstr "Neniu ago elektita." #, python-format msgid "The %(name)s “%(obj)s” was deleted successfully." -msgstr "" +msgstr "La %(name)s “%(obj)s” estis sukcese forigita(j)." #, python-format msgid "%(name)s with ID “%(key)s” doesn’t exist. Perhaps it was deleted?" @@ -267,8 +267,9 @@ msgstr "0 el %(cnt)s elektita" msgid "Change history: %s" msgstr "Ŝanĝa historio: %s" -#. Translators: Model verbose name and instance representation, -#. suitable to be an item in a list. +#. Translators: Model verbose name and instance +#. representation, suitable to be an item in a +#. list. #, python-format msgid "%(class_name)s %(instance)s" msgstr "%(class_name)s %(instance)s" @@ -282,10 +283,10 @@ msgstr "" "protektitajn rilatajn objektojn: %(related_objects)s" msgid "Django site admin" -msgstr "Djanga reteja administrado" +msgstr "Dĵanga reteja administrado" msgid "Django administration" -msgstr "Djanga administrado" +msgstr "Dĵanga administrado" msgid "Site administration" msgstr "Reteja administrado" @@ -295,7 +296,7 @@ msgstr "Ensaluti" #, python-format msgid "%(app)s administration" -msgstr "%(app)s administrado" +msgstr "Administrado de %(app)s" msgid "Page not found" msgstr "Paĝo ne trovita" @@ -321,7 +322,7 @@ msgid "" msgstr "" msgid "Run the selected action" -msgstr "Lanĉi la elektita agon" +msgstr "Lanĉi la elektitan agon" msgid "Go" msgstr "Ek" @@ -338,7 +339,7 @@ msgstr "Viŝi elekton" #, python-format msgid "Models in the %(name)s application" -msgstr "Modeloj en la %(name)s aplikaĵo" +msgstr "Modeloj en la aplikaĵo “%(name)s”" msgid "Add" msgstr "Aldoni" @@ -511,11 +512,17 @@ msgstr "" "paĝon. Ĉu vi ŝatus ensaluti per alia konto?" msgid "Forgotten your password or username?" -msgstr "Ĉu vi forgesis vian pasvorton aŭ salutnomo?" +msgstr "Ĉu vi forgesis vian pasvorton aŭ vian salutnomon?" msgid "Toggle navigation" msgstr "Ŝalti navigadon" +msgid "Start typing to filter…" +msgstr "" + +msgid "Filter navigation items" +msgstr "" + msgid "Date/time" msgstr "Dato/horo" @@ -525,6 +532,12 @@ msgstr "Uzanto" msgid "Action" msgstr "Ago" +msgid "entry" +msgstr "" + +msgid "entries" +msgstr "" + msgid "" "This object doesn’t have a change history. It probably wasn’t added via this " "admin site." @@ -581,8 +594,12 @@ msgstr "Aldoni alian %(model)sn" msgid "Delete selected %(model)s" msgstr "Forigi elektitan %(model)sn" -msgid "Thanks for spending some quality time with the Web site today." -msgstr "Dankon pro pasigo de kvalita tempon kun la retejo hodiaŭ." +#, python-format +msgid "View selected %(model)s" +msgstr "" + +msgid "Thanks for spending some quality time with the web site today." +msgstr "" msgid "Log in again" msgstr "Ensaluti denove" @@ -607,7 +624,7 @@ msgid "Password reset" msgstr "Pasvorta rekomencigo" msgid "Your password has been set. You may go ahead and log in now." -msgstr "Via pasvorto estis ŝanĝita. Vi povas iri antaŭen kaj ensaluti nun." +msgstr "Via pasvorto estis ŝanĝita. Vi povas ensaluti nun." msgid "Password reset confirmation" msgstr "Konfirmo de restarigo de pasvorto" @@ -643,7 +660,7 @@ msgid "" "If you don’t receive an email, please make sure you’ve entered the address " "you registered with, and check your spam folder." msgstr "" -"Se vi ne ricevas retmesaĝon, bonvolu certiĝi, ke vi entajpis la adreson, per " +"Se vi ne ricevas retmesaĝon, bonvole certiĝu ke vi entajpis la adreson per " "kiu vi registriĝis, kaj kontrolu en via spamujo." #, python-format diff --git a/django/contrib/admin/locale/eo/LC_MESSAGES/djangojs.mo b/django/contrib/admin/locale/eo/LC_MESSAGES/djangojs.mo index 9b6aa8f21e..6e86ac2d4e 100644 Binary files a/django/contrib/admin/locale/eo/LC_MESSAGES/djangojs.mo and b/django/contrib/admin/locale/eo/LC_MESSAGES/djangojs.mo differ diff --git a/django/contrib/admin/locale/eo/LC_MESSAGES/djangojs.po b/django/contrib/admin/locale/eo/LC_MESSAGES/djangojs.po index f101319a4c..db9991387c 100644 --- a/django/contrib/admin/locale/eo/LC_MESSAGES/djangojs.po +++ b/django/contrib/admin/locale/eo/LC_MESSAGES/djangojs.po @@ -1,17 +1,18 @@ # This file is distributed under the same license as the Django package. # # Translators: -# Baptiste Darthenay , 2012 -# Baptiste Darthenay , 2014-2016 -# Jaffa McNeill , 2011 +# Batist D 🐍 , 2012 +# Batist D 🐍 , 2014-2016 +# 977db45bb2d7151f88325d4fbeca189e_848074d <3d1ba07956d05291bf7c987ecea0a7ef_13052>, 2011 +# Meiyer , 2022 # Adamo Mesha , 2012 msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2018-05-17 11:50+0200\n" -"PO-Revision-Date: 2017-09-19 16:41+0000\n" -"Last-Translator: Baptiste Darthenay \n" +"POT-Creation-Date: 2022-05-17 05:26-0500\n" +"PO-Revision-Date: 2022-05-25 07:05+0000\n" +"Last-Translator: Meiyer , 2022\n" "Language-Team: Esperanto (http://www.transifex.com/django/django/language/" "eo/)\n" "MIME-Version: 1.0\n" @@ -22,61 +23,59 @@ msgstr "" #, javascript-format msgid "Available %s" -msgstr "Disponebla %s" +msgstr "Disponeblaj %s" #, javascript-format msgid "" "This is the list of available %s. You may choose some by selecting them in " "the box below and then clicking the \"Choose\" arrow between the two boxes." msgstr "" -"Tio ĉi estas la listo de disponeblaj %s. Vi povas forigi kelkajn elektante " -"ilin en la suba skatolo kaj tiam klakante la \"Elekti\" sagon inter la du " -"skatoloj." +"Tio ĉi estas la listo de disponeblaj %s. Vi povas aktivigi kelkajn markante " +"ilin en la suba kesto kaj klakante la sagon “Elekti” inter la du kestoj." #, javascript-format msgid "Type into this box to filter down the list of available %s." -msgstr "Entipu en ĉi-tiu skatolo por filtri la liston de haveblaj %s." +msgstr "Tajpu en ĉi-tiu skatolo por filtri la liston de haveblaj %s." msgid "Filter" msgstr "Filtru" msgid "Choose all" -msgstr "Elekti ĉiuj" +msgstr "Elekti ĉiujn" #, javascript-format msgid "Click to choose all %s at once." -msgstr "Klaku por tuj elekti ĉiuj %s." +msgstr "Klaku por tuj elekti ĉiujn %sn." msgid "Choose" msgstr "Elekti" msgid "Remove" -msgstr "Forigu" +msgstr "Forigi" #, javascript-format msgid "Chosen %s" -msgstr "Elektita %s" +msgstr "Elektitaj %s" #, javascript-format msgid "" "This is the list of chosen %s. You may remove some by selecting them in the " "box below and then clicking the \"Remove\" arrow between the two boxes." msgstr "" -"Tio ĉi estas la listo de elektitaj %s. Vi povas forigi kelkajn elektante " -"ilin en la suba skatolo kaj tiam klakante la \"Forigi\" sagon inter la du " -"skatoloj." +"Tio ĉi estas la listo de elektitaj %s. Vi povas malaktivigi kelkajn markante " +"ilin en la suba kesto kaj klakante la sagon “Forigi” inter la du kestoj." msgid "Remove all" -msgstr "Forigu ĉiujn" +msgstr "Forigi ĉiujn" #, javascript-format msgid "Click to remove all chosen %s at once." -msgstr "Klaku por tuj forigi ĉiujn %s elektitajn." +msgstr "Klaku por tuj forigi ĉiujn %sn elektitajn." msgid "%(sel)s of %(cnt)s selected" msgid_plural "%(sel)s of %(cnt)s selected" msgstr[0] "%(sel)s de %(cnt)s elektita" -msgstr[1] "%(sel)s de %(cnt)s elektitaj" +msgstr[1] "%(sel)s el %(cnt)s elektitaj" msgid "" "You have unsaved changes on individual editable fields. If you run an " @@ -86,47 +85,43 @@ msgstr "" "agon, viaj neŝirmitaj ŝanĝoj perdiĝos." msgid "" -"You have selected an action, but you haven't saved your changes to " -"individual fields yet. Please click OK to save. You'll need to re-run the " +"You have selected an action, but you haven’t saved your changes to " +"individual fields yet. Please click OK to save. You’ll need to re-run the " "action." msgstr "" -"Vi elektas agon, sed vi ne ŝirmis viajn ŝanĝojn al individuaj kampoj ĝis " -"nun. Bonvolu klaku BONA por ŝirmi. Vi devos ripeton la agon" msgid "" -"You have selected an action, and you haven't made any changes on individual " -"fields. You're probably looking for the Go button rather than the Save " +"You have selected an action, and you haven’t made any changes on individual " +"fields. You’re probably looking for the Go button rather than the Save " "button." msgstr "" -"Vi elektas agon, kaj vi ne faris ajnajn ŝanĝojn ĉe unuopaj kampoj. Vi " -"verŝajne serĉas la Iru-butonon prefere ol la Ŝirmu-butono." msgid "Now" msgstr "Nun" msgid "Midnight" -msgstr "Noktomezo" +msgstr "Noktomeze" msgid "6 a.m." msgstr "6 a.t.m." msgid "Noon" -msgstr "Tagmezo" +msgstr "Tagmeze" msgid "6 p.m." -msgstr "6 ptm" +msgstr "6 p.t.m." #, javascript-format msgid "Note: You are %s hour ahead of server time." msgid_plural "Note: You are %s hours ahead of server time." -msgstr[0] "Noto: Vi estas %s horo antaŭ la servila horo." -msgstr[1] "Noto: Vi estas %s horoj antaŭ la servila horo." +msgstr[0] "Noto: Vi estas %s horon post la servila horo." +msgstr[1] "Noto: Vi estas %s horojn post la servila horo." #, javascript-format msgid "Note: You are %s hour behind server time." msgid_plural "Note: You are %s hours behind server time." -msgstr[0] "Noto: Vi estas %s horo post la servila horo." -msgstr[1] "Noto: Vi estas %s horoj post la servila horo." +msgstr[0] "Noto: Vi estas %s horon antaŭ la servila horo." +msgstr[1] "Noto: Vi estas %s horojn antaŭ la servila horo." msgid "Choose a Time" msgstr "Elektu horon" @@ -135,7 +130,7 @@ msgid "Choose a time" msgstr "Elektu tempon" msgid "Cancel" -msgstr "Malmendu" +msgstr "Nuligi" msgid "Today" msgstr "Hodiaŭ" @@ -185,6 +180,54 @@ msgstr "novembro" msgid "December" msgstr "decembro" +msgctxt "abbrev. month January" +msgid "Jan" +msgstr "jan." + +msgctxt "abbrev. month February" +msgid "Feb" +msgstr "feb." + +msgctxt "abbrev. month March" +msgid "Mar" +msgstr "mar." + +msgctxt "abbrev. month April" +msgid "Apr" +msgstr "apr." + +msgctxt "abbrev. month May" +msgid "May" +msgstr "maj." + +msgctxt "abbrev. month June" +msgid "Jun" +msgstr "jun." + +msgctxt "abbrev. month July" +msgid "Jul" +msgstr "jul." + +msgctxt "abbrev. month August" +msgid "Aug" +msgstr "aŭg." + +msgctxt "abbrev. month September" +msgid "Sep" +msgstr "sep." + +msgctxt "abbrev. month October" +msgid "Oct" +msgstr "okt." + +msgctxt "abbrev. month November" +msgid "Nov" +msgstr "nov." + +msgctxt "abbrev. month December" +msgid "Dec" +msgstr "dec." + msgctxt "one letter Sunday" msgid "S" msgstr "d" @@ -213,8 +256,13 @@ msgctxt "one letter Saturday" msgid "S" msgstr "s" +msgid "" +"You have already submitted this form. Are you sure you want to submit it " +"again?" +msgstr "Vi jam forsendis tiun ĉi formularon. Ĉu vi certe volas resendi ĝin?" + msgid "Show" -msgstr "Montru" +msgstr "Montri" msgid "Hide" -msgstr "Kaŝu" +msgstr "Kaŝi" diff --git a/django/contrib/admin/locale/es/LC_MESSAGES/django.mo b/django/contrib/admin/locale/es/LC_MESSAGES/django.mo index 72401f3525..89c15867db 100644 Binary files a/django/contrib/admin/locale/es/LC_MESSAGES/django.mo and b/django/contrib/admin/locale/es/LC_MESSAGES/django.mo differ diff --git a/django/contrib/admin/locale/es/LC_MESSAGES/django.po b/django/contrib/admin/locale/es/LC_MESSAGES/django.po index 65f3e0c3e8..bc49ce8867 100644 --- a/django/contrib/admin/locale/es/LC_MESSAGES/django.po +++ b/django/contrib/admin/locale/es/LC_MESSAGES/django.po @@ -17,15 +17,15 @@ # Marc Garcia , 2011 # Miguel Angel Tribaldos , 2017 # Pablo, 2015 -# Uriel Medina , 2020-2021 +# Uriel Medina , 2020-2022 # Veronicabh , 2015 msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-09-21 10:22+0200\n" -"PO-Revision-Date: 2021-11-10 03:53+0000\n" -"Last-Translator: Uriel Medina \n" +"POT-Creation-Date: 2022-05-17 05:10-0500\n" +"PO-Revision-Date: 2022-05-25 07:05+0000\n" +"Last-Translator: Uriel Medina , 2020-2022\n" "Language-Team: Spanish (http://www.transifex.com/django/django/language/" "es/)\n" "MIME-Version: 1.0\n" @@ -272,12 +272,14 @@ msgid "%(count)s %(name)s was changed successfully." msgid_plural "%(count)s %(name)s were changed successfully." msgstr[0] "%(count)s %(name)s fué modificado con éxito." msgstr[1] "%(count)s %(name)s fueron modificados con éxito." +msgstr[2] "%(count)s %(name)s fueron modificados con éxito." #, python-format msgid "%(total_count)s selected" msgid_plural "All %(total_count)s selected" msgstr[0] "%(total_count)s seleccionado" msgstr[1] "%(total_count)s seleccionados en total" +msgstr[2] "%(total_count)s seleccionados en total" #, python-format msgid "0 of %(cnt)s selected" @@ -287,8 +289,9 @@ msgstr "seleccionados 0 de %(cnt)s" msgid "Change history: %s" msgstr "Histórico de modificaciones: %s" -#. Translators: Model verbose name and instance representation, -#. suitable to be an item in a list. +#. Translators: Model verbose name and instance +#. representation, suitable to be an item in a +#. list. #, python-format msgid "%(class_name)s %(instance)s" msgstr "%(class_name)s %(instance)s" @@ -460,8 +463,8 @@ msgid "" "Are you sure you want to delete the %(object_name)s \"%(escaped_object)s\"? " "All of the following related items will be deleted:" msgstr "" -"¿Está seguro de que quiere borrar los %(object_name)s \"%(escaped_object)s" -"\"? Se borrarán los siguientes objetos relacionados:" +"¿Está seguro de que quiere borrar los %(object_name)s " +"\"%(escaped_object)s\"? Se borrarán los siguientes objetos relacionados:" msgid "Objects" msgstr "Objetos" @@ -561,6 +564,12 @@ msgstr "Usuario" msgid "Action" msgstr "Acción" +msgid "entry" +msgstr "entrada" + +msgid "entries" +msgstr "entradas" + msgid "" "This object doesn’t have a change history. It probably wasn’t added via this " "admin site." @@ -585,6 +594,7 @@ msgid "%(counter)s result" msgid_plural "%(counter)s results" msgstr[0] "%(counter)s resultado" msgstr[1] "%(counter)s resultados" +msgstr[2] "%(counter)s resultados" #, python-format msgid "%(full_result_count)s total" @@ -617,6 +627,10 @@ msgstr "Añadir otro %(model)s" msgid "Delete selected %(model)s" msgstr "Eliminar %(model)s seleccionada/o" +#, python-format +msgid "View selected %(model)s" +msgstr "Ver seleccionado %(model)s" + msgid "Thanks for spending some quality time with the web site today." msgstr "Gracias por pasar un buen rato con el sitio web hoy." diff --git a/django/contrib/admin/locale/es/LC_MESSAGES/djangojs.mo b/django/contrib/admin/locale/es/LC_MESSAGES/djangojs.mo index d732ed0c9d..e92aa365ef 100644 Binary files a/django/contrib/admin/locale/es/LC_MESSAGES/djangojs.mo and b/django/contrib/admin/locale/es/LC_MESSAGES/djangojs.mo differ diff --git a/django/contrib/admin/locale/es/LC_MESSAGES/djangojs.po b/django/contrib/admin/locale/es/LC_MESSAGES/djangojs.po index bdfacec6bc..9b2f80529f 100644 --- a/django/contrib/admin/locale/es/LC_MESSAGES/djangojs.po +++ b/django/contrib/admin/locale/es/LC_MESSAGES/djangojs.po @@ -6,15 +6,15 @@ # Jannis Leidel , 2011 # Josue Naaman Nistal Guerra , 2014 # Leonardo J. Caballero G. , 2011 -# Uriel Medina , 2020-2021 +# Uriel Medina , 2020-2022 # Veronicabh , 2015 msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-01-15 09:00+0100\n" -"PO-Revision-Date: 2021-02-11 05:37+0000\n" -"Last-Translator: Uriel Medina \n" +"POT-Creation-Date: 2022-05-17 05:26-0500\n" +"PO-Revision-Date: 2022-05-25 07:05+0000\n" +"Last-Translator: Uriel Medina , 2020-2022\n" "Language-Team: Spanish (http://www.transifex.com/django/django/language/" "es/)\n" "MIME-Version: 1.0\n" @@ -80,6 +80,7 @@ msgid "%(sel)s of %(cnt)s selected" msgid_plural "%(sel)s of %(cnt)s selected" msgstr[0] "%(sel)s de %(cnt)s seleccionado" msgstr[1] "%(sel)s de %(cnt)s seleccionados" +msgstr[2] "%(sel)s de %(cnt)s seleccionados" msgid "" "You have unsaved changes on individual editable fields. If you run an " @@ -126,12 +127,14 @@ msgid "Note: You are %s hour ahead of server time." msgid_plural "Note: You are %s hours ahead of server time." msgstr[0] "Nota: Usted esta a %s horas por delante de la hora del servidor." msgstr[1] "Nota: Usted va %s horas por delante de la hora del servidor." +msgstr[2] "Nota: Usted va %s horas por delante de la hora del servidor." #, javascript-format msgid "Note: You are %s hour behind server time." msgid_plural "Note: You are %s hours behind server time." msgstr[0] "Nota: Usted esta a %s hora de retraso de tiempo de servidor." msgstr[1] "Nota: Usted va %s horas por detrás de la hora del servidor." +msgstr[2] "Nota: Usted va %s horas por detrás de la hora del servidor." msgid "Choose a Time" msgstr "Elija una Hora" @@ -266,6 +269,12 @@ msgctxt "one letter Saturday" msgid "S" msgstr "S" +msgid "" +"You have already submitted this form. Are you sure you want to submit it " +"again?" +msgstr "" +"Ya ha enviado este formulario. ¿Está seguro de querer enviarlo de nuevo?" + msgid "Show" msgstr "Mostrar" diff --git a/django/contrib/admin/locale/fa/LC_MESSAGES/django.mo b/django/contrib/admin/locale/fa/LC_MESSAGES/django.mo index 2cc581a5bd..227a0cd422 100644 Binary files a/django/contrib/admin/locale/fa/LC_MESSAGES/django.mo and b/django/contrib/admin/locale/fa/LC_MESSAGES/django.mo differ diff --git a/django/contrib/admin/locale/fa/LC_MESSAGES/django.po b/django/contrib/admin/locale/fa/LC_MESSAGES/django.po index ae3144bcc0..03a221c552 100644 --- a/django/contrib/admin/locale/fa/LC_MESSAGES/django.po +++ b/django/contrib/admin/locale/fa/LC_MESSAGES/django.po @@ -4,6 +4,7 @@ # Ahmad Hosseini , 2020 # Ali Nikneshan , 2015,2020 # Ali Vakilzade , 2015 +# Aly Ahmady , 2022 # Amir Ajorloo , 2020 # Arash Fazeli , 2012 # Farshad Asadpour, 2021 @@ -18,9 +19,9 @@ msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-09-21 10:22+0200\n" -"PO-Revision-Date: 2021-10-23 18:10+0000\n" -"Last-Translator: Farshad Asadpour\n" +"POT-Creation-Date: 2022-05-17 05:10-0500\n" +"PO-Revision-Date: 2022-05-25 07:05+0000\n" +"Last-Translator: Aly Ahmady , 2022\n" "Language-Team: Persian (http://www.transifex.com/django/django/language/" "fa/)\n" "MIME-Version: 1.0\n" @@ -159,7 +160,7 @@ msgstr "شئ LogEntry" #, python-brace-format msgid "Added {name} “{object}”." -msgstr "اضافه شد {name} \"{object}\"." +msgstr "{name} \"{object}\" اضافه شد." msgid "Added." msgstr "اضافه شد" @@ -187,7 +188,7 @@ msgstr "هیچ" msgid "Hold down “Control”, or “Command” on a Mac, to select more than one." msgstr "" -"برای انتخاب بیش از یکی \"Control\"، یا \"Command\" روی Mac، را پایین نگه " +"برای انتخاب بیش از یکی، کلید \"Control\"، یا \"Command\" روی Mac، را نگه " "دارید." #, python-brace-format @@ -282,8 +283,9 @@ msgstr "0 از %(cnt)s انتخاب شده‌اند" msgid "Change history: %s" msgstr "تاریخچهٔ تغییر: %s" -#. Translators: Model verbose name and instance representation, -#. suitable to be an item in a list. +#. Translators: Model verbose name and instance +#. representation, suitable to be an item in a +#. list. #, python-format msgid "%(class_name)s %(instance)s" msgstr "%(class_name)s %(instance)s" @@ -552,6 +554,12 @@ msgstr "کاربر" msgid "Action" msgstr "عمل" +msgid "entry" +msgstr "ورودی" + +msgid "entries" +msgstr "ورودی ها" + msgid "" "This object doesn’t have a change history. It probably wasn’t added via this " "admin site." @@ -608,6 +616,10 @@ msgstr "افزدون %(model)s دیگر" msgid "Delete selected %(model)s" msgstr "حذف کردن %(model)s انتخاب شده" +#, python-format +msgid "View selected %(model)s" +msgstr "نمایش %(model)sهای انتخاب شده" + msgid "Thanks for spending some quality time with the web site today." msgstr "" "از شما ممنون هستیم که زمان با ارزش خود را برای این تارنما امروز صرف کرده اید" diff --git a/django/contrib/admin/locale/ja/LC_MESSAGES/django.mo b/django/contrib/admin/locale/ja/LC_MESSAGES/django.mo index b769e84a73..56707d83a1 100644 Binary files a/django/contrib/admin/locale/ja/LC_MESSAGES/django.mo and b/django/contrib/admin/locale/ja/LC_MESSAGES/django.mo differ diff --git a/django/contrib/admin/locale/ja/LC_MESSAGES/django.po b/django/contrib/admin/locale/ja/LC_MESSAGES/django.po index 5fbaba94cc..be55bf0aac 100644 --- a/django/contrib/admin/locale/ja/LC_MESSAGES/django.po +++ b/django/contrib/admin/locale/ja/LC_MESSAGES/django.po @@ -4,6 +4,7 @@ # akiyoko , 2020 # Claude Paroz , 2016 # Goto Hayato , 2019 +# Hiroki Sawano, 2022 # Jannis Leidel , 2011 # Shinichi Katsumata , 2019 # Shinya Okano , 2012-2018,2021 @@ -15,9 +16,9 @@ msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-09-21 10:22+0200\n" -"PO-Revision-Date: 2021-10-13 11:41+0000\n" -"Last-Translator: Shinya Okano \n" +"POT-Creation-Date: 2022-05-17 05:10-0500\n" +"PO-Revision-Date: 2022-05-25 07:05+0000\n" +"Last-Translator: Hiroki Sawano, 2022\n" "Language-Team: Japanese (http://www.transifex.com/django/django/language/" "ja/)\n" "MIME-Version: 1.0\n" @@ -269,8 +270,9 @@ msgstr "%(cnt)s個の内ひとつも選択されていません" msgid "Change history: %s" msgstr "変更履歴: %s" -#. Translators: Model verbose name and instance representation, -#. suitable to be an item in a list. +#. Translators: Model verbose name and instance +#. representation, suitable to be an item in a +#. list. #, python-format msgid "%(class_name)s %(instance)s" msgstr "%(class_name)s %(instance)s" @@ -540,6 +542,12 @@ msgstr "ユーザー" msgid "Action" msgstr "操作" +msgid "entry" +msgstr "エントリ" + +msgid "entries" +msgstr "エントリ" + msgid "" "This object doesn’t have a change history. It probably wasn’t added via this " "admin site." @@ -595,6 +603,10 @@ msgstr "%(model)s の追加" msgid "Delete selected %(model)s" msgstr "選択された %(model)s を削除" +#, python-format +msgid "View selected %(model)s" +msgstr "" + msgid "Thanks for spending some quality time with the web site today." msgstr "ご利用ありがとうございました。" diff --git a/django/contrib/admin/locale/mk/LC_MESSAGES/django.mo b/django/contrib/admin/locale/mk/LC_MESSAGES/django.mo index 0acf0fec22..9f4d6e436a 100644 Binary files a/django/contrib/admin/locale/mk/LC_MESSAGES/django.mo and b/django/contrib/admin/locale/mk/LC_MESSAGES/django.mo differ diff --git a/django/contrib/admin/locale/mk/LC_MESSAGES/django.po b/django/contrib/admin/locale/mk/LC_MESSAGES/django.po index 861cde4fb0..676173ca2c 100644 --- a/django/contrib/admin/locale/mk/LC_MESSAGES/django.po +++ b/django/contrib/admin/locale/mk/LC_MESSAGES/django.po @@ -4,6 +4,7 @@ # Dimce Grozdanoski , 2021 # dekomote , 2015 # Jannis Leidel , 2011 +# Martino Nikolovski, 2022 # Vasil Vangelovski , 2016-2017,2019,2021 # Vasil Vangelovski , 2013-2015 # Vasil Vangelovski , 2011-2013 @@ -11,9 +12,9 @@ msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-09-21 10:22+0200\n" -"PO-Revision-Date: 2021-09-22 07:21+0000\n" -"Last-Translator: Transifex Bot <>\n" +"POT-Creation-Date: 2022-05-17 05:10-0500\n" +"PO-Revision-Date: 2022-05-25 07:05+0000\n" +"Last-Translator: Martino Nikolovski, 2022\n" "Language-Team: Macedonian (http://www.transifex.com/django/django/language/" "mk/)\n" "MIME-Version: 1.0\n" @@ -265,8 +266,9 @@ msgstr "0 од %(cnt)s избрани" msgid "Change history: %s" msgstr "Историја на измени: %s" -#. Translators: Model verbose name and instance representation, -#. suitable to be an item in a list. +#. Translators: Model verbose name and instance +#. representation, suitable to be an item in a +#. list. #, python-format msgid "%(class_name)s %(instance)s" msgstr "%(class_name)s %(instance)s" @@ -299,7 +301,7 @@ msgid "Page not found" msgstr "Страницата не е најдена" msgid "We’re sorry, but the requested page could not be found." -msgstr "" +msgstr "Се извинуваме, страница која ја побаравте не е пронајдена" msgid "Home" msgstr "Дома" @@ -317,6 +319,8 @@ msgid "" "There’s been an error. It’s been reported to the site administrators via " "email and should be fixed shortly. Thanks for your patience." msgstr "" +"Наидовте на грешка. Известени се администраторите на страницата преку имејл " +"и би требало наскоро да биде поправена. Ви благодариме на трпението." msgid "Run the selected action" msgstr "Изврши ја избраната акција" @@ -345,12 +349,14 @@ msgid "View" msgstr "Погледни" msgid "You don’t have permission to view or edit anything." -msgstr "" +msgstr "Немате дозвола да прегледате или промените ништо" msgid "" "First, enter a username and password. Then, you’ll be able to edit more user " "options." msgstr "" +"Прво внесете корисничко име и лозинка па потоа ќе можете да уредувате повеќе " +"опции за корисникот" msgid "Enter a username and password." msgstr "Внесете корисничко име и лозинка." @@ -394,7 +400,7 @@ msgid "Filter" msgstr "Филтер" msgid "Clear all filters" -msgstr "" +msgstr "Ресетирај ги сите филтри" msgid "Remove from sorting" msgstr "Отстрани од сортирање" @@ -439,7 +445,7 @@ msgid "Objects" msgstr "Предмети" msgid "Yes, I’m sure" -msgstr "" +msgstr "Да, сигурен сум" msgid "No, take me back" msgstr "Не, врати ме назад" @@ -499,6 +505,9 @@ msgid "" "database tables have been created, and make sure the database is readable by " "the appropriate user." msgstr "" +"Нешто не е во ред со инсталацијата на базата на податоци. Уверете се дека " +"соодветните табели се создадени, и дека базата на податоци е пристапна до " +"соодветниот корисник." #, python-format msgid "" @@ -515,7 +524,7 @@ msgid "Toggle navigation" msgstr "" msgid "Start typing to filter…" -msgstr "" +msgstr "Започнете со пишување за да филтрирате..." msgid "Filter navigation items" msgstr "" @@ -529,6 +538,12 @@ msgstr "Корисник" msgid "Action" msgstr "Акција" +msgid "entry" +msgstr "" + +msgid "entries" +msgstr "" + msgid "" "This object doesn’t have a change history. It probably wasn’t added via this " "admin site." @@ -583,6 +598,10 @@ msgstr "Додади уште %(model)s" msgid "Delete selected %(model)s" msgstr "Избриши ги избраните %(model)s" +#, python-format +msgid "View selected %(model)s" +msgstr "" + msgid "Thanks for spending some quality time with the web site today." msgstr "" @@ -654,7 +673,7 @@ msgid "Please go to the following page and choose a new password:" msgstr "Ве молам одете на следната страница и внесете нова лозинка:" msgid "Your username, in case you’ve forgotten:" -msgstr "" +msgstr "Вашето корисничко име, во случај да сте заборавиле:" msgid "Thanks for using our site!" msgstr "Ви благодариме што го користите овој сајт!" @@ -667,6 +686,8 @@ msgid "" "Forgotten your password? Enter your email address below, and we’ll email " "instructions for setting a new one." msgstr "" +"Ја заборавивте вашата лозинка? Внесете го вашиот имејл и ќе ви пратиме " +"инструкции да подесите нова лозинка. " msgid "Email address:" msgstr "Email адреса:" diff --git a/django/contrib/admin/locale/pt_BR/LC_MESSAGES/django.mo b/django/contrib/admin/locale/pt_BR/LC_MESSAGES/django.mo index 2934fad0d5..fb044a1b32 100644 Binary files a/django/contrib/admin/locale/pt_BR/LC_MESSAGES/django.mo and b/django/contrib/admin/locale/pt_BR/LC_MESSAGES/django.mo differ diff --git a/django/contrib/admin/locale/pt_BR/LC_MESSAGES/django.po b/django/contrib/admin/locale/pt_BR/LC_MESSAGES/django.po index de023c20c1..c86bdbc2cd 100644 --- a/django/contrib/admin/locale/pt_BR/LC_MESSAGES/django.po +++ b/django/contrib/admin/locale/pt_BR/LC_MESSAGES/django.po @@ -6,7 +6,7 @@ # bruno.devpod , 2014 # Carlos Leite , 2019 # Carlos Leite , 2019 -# Filipe Cifali Stangler , 2016 +# Filipe Cifali , 2016 # dudanogueira , 2012 # Elyézer Rezende , 2013 # Fábio C. Barrionuevo da Luz , 2015 @@ -19,6 +19,7 @@ # João Paulo Andrade , 2018 # Lucas Infante , 2015 # Luiz Boaretto , 2017 +# Marssal Jr. , 2022 # Marcelo Moro Brondani , 2018 # Marco Rougeth , 2015 # Otávio Reis , 2018 @@ -34,8 +35,8 @@ msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2022-05-17 05:10-0500\n" -"PO-Revision-Date: 2022-07-25 07:05+0000\n" -"Last-Translator: Tomaz Marcelino Cunha Neto \n" +"PO-Revision-Date: 2022-05-25 07:05+0000\n" +"Last-Translator: Marssal Jr. , 2022\n" "Language-Team: Portuguese (Brazil) (http://www.transifex.com/django/django/" "language/pt_BR/)\n" "MIME-Version: 1.0\n" @@ -280,12 +281,14 @@ msgid "%(count)s %(name)s was changed successfully." msgid_plural "%(count)s %(name)s were changed successfully." msgstr[0] "%(count)s %(name)s modificado com sucesso." msgstr[1] "%(count)s %(name)s modificados com sucesso." +msgstr[2] "%(count)s %(name)s modificados com sucesso." #, python-format msgid "%(total_count)s selected" msgid_plural "All %(total_count)s selected" msgstr[0] "%(total_count)s selecionado" msgstr[1] "Todos %(total_count)s selecionados" +msgstr[2] "Todos %(total_count)s selecionados" #, python-format msgid "0 of %(cnt)s selected" @@ -569,10 +572,10 @@ msgid "Action" msgstr "Ação" msgid "entry" -msgstr "" +msgstr "registro" msgid "entries" -msgstr "" +msgstr "registros" msgid "" "This object doesn’t have a change history. It probably wasn’t added via this " @@ -598,6 +601,7 @@ msgid "%(counter)s result" msgid_plural "%(counter)s results" msgstr[0] "%(counter)s resultado" msgstr[1] "%(counter)s resultados" +msgstr[2] "%(counter)s resultados" #, python-format msgid "%(full_result_count)s total" @@ -632,7 +636,7 @@ msgstr "Excluir %(model)s selecionado" #, python-format msgid "View selected %(model)s" -msgstr "" +msgstr "Visualizar %(model)s selecionados" msgid "Thanks for spending some quality time with the web site today." msgstr "Obrigado por passar algum tempo de qualidade com o site hoje." diff --git a/django/contrib/admin/locale/pt_BR/LC_MESSAGES/djangojs.mo b/django/contrib/admin/locale/pt_BR/LC_MESSAGES/djangojs.mo index 813a169a6d..6b59b6de67 100644 Binary files a/django/contrib/admin/locale/pt_BR/LC_MESSAGES/djangojs.mo and b/django/contrib/admin/locale/pt_BR/LC_MESSAGES/djangojs.mo differ diff --git a/django/contrib/admin/locale/pt_BR/LC_MESSAGES/djangojs.po b/django/contrib/admin/locale/pt_BR/LC_MESSAGES/djangojs.po index 565b6ac709..7bcf5abde8 100644 --- a/django/contrib/admin/locale/pt_BR/LC_MESSAGES/djangojs.po +++ b/django/contrib/admin/locale/pt_BR/LC_MESSAGES/djangojs.po @@ -7,6 +7,7 @@ # semente, 2012 # Jannis Leidel , 2011 # Lucas Infante , 2015 +# Marssal Jr. , 2022 # Rafael Fontenelle , 2021 # Renata Barbosa Almeida , 2016 # Samuel Nogueira Bacelar , 2020 @@ -14,9 +15,9 @@ msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-01-15 09:00+0100\n" -"PO-Revision-Date: 2021-01-17 05:53+0000\n" -"Last-Translator: Rafael Fontenelle \n" +"POT-Creation-Date: 2022-05-17 05:26-0500\n" +"PO-Revision-Date: 2022-05-25 07:05+0000\n" +"Last-Translator: Marssal Jr. , 2022\n" "Language-Team: Portuguese (Brazil) (http://www.transifex.com/django/django/" "language/pt_BR/)\n" "MIME-Version: 1.0\n" @@ -80,6 +81,7 @@ msgid "%(sel)s of %(cnt)s selected" msgid_plural "%(sel)s of %(cnt)s selected" msgstr[0] "%(sel)s de %(cnt)s selecionado" msgstr[1] "%(sel)s de %(cnt)s selecionados" +msgstr[2] "%(sel)s de %(cnt)s selecionados" msgid "" "You have unsaved changes on individual editable fields. If you run an " @@ -125,12 +127,14 @@ msgid "Note: You are %s hour ahead of server time." msgid_plural "Note: You are %s hours ahead of server time." msgstr[0] "Nota: Você está %s hora à frente do horário do servidor." msgstr[1] "Nota: Você está %s horas à frente do horário do servidor." +msgstr[2] "Nota: Você está %s horas à frente do horário do servidor." #, javascript-format msgid "Note: You are %s hour behind server time." msgid_plural "Note: You are %s hours behind server time." msgstr[0] "Nota: Você está %s hora atrás do tempo do servidor." msgstr[1] "Nota: Você está %s horas atrás do horário do servidor." +msgstr[2] "Nota: Você está %s horas atrás do horário do servidor." msgid "Choose a Time" msgstr "Escolha um horário" @@ -265,6 +269,12 @@ msgctxt "one letter Saturday" msgid "S" msgstr "S" +msgid "" +"You have already submitted this form. Are you sure you want to submit it " +"again?" +msgstr "" +"Você já enviou este formulário. Tem certeza de que deseja reenviar de novo?" + msgid "Show" msgstr "Mostrar" diff --git a/django/contrib/admin/locale/ru/LC_MESSAGES/django.mo b/django/contrib/admin/locale/ru/LC_MESSAGES/django.mo index 72c8ce616a..3042369d37 100644 Binary files a/django/contrib/admin/locale/ru/LC_MESSAGES/django.mo and b/django/contrib/admin/locale/ru/LC_MESSAGES/django.mo differ diff --git a/django/contrib/admin/locale/ru/LC_MESSAGES/django.po b/django/contrib/admin/locale/ru/LC_MESSAGES/django.po index f9e671dcf8..6c7072246e 100644 --- a/django/contrib/admin/locale/ru/LC_MESSAGES/django.po +++ b/django/contrib/admin/locale/ru/LC_MESSAGES/django.po @@ -1,6 +1,7 @@ # This file is distributed under the same license as the Django package. # # Translators: +# Alex Ibragimov, 2021 # Ivan Ivaschenko , 2013 # Denis Darii , 2011 # Dimmus , 2011 @@ -9,25 +10,29 @@ # Sergey , 2016 # Jannis Leidel , 2011 # SeryiMysh , 2020 -# Алексей Борискин , 2012-2015 +# Алексей Борискин , 2012-2015,2022 # Дмитрий , 2019 -# Дмитрий Шатера , 2018 +# Bobsans , 2018 msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2020-07-14 19:53+0200\n" -"PO-Revision-Date: 2020-07-21 09:32+0000\n" -"Last-Translator: crazyzubr \n" +"POT-Creation-Date: 2022-05-17 05:10-0500\n" +"PO-Revision-Date: 2022-05-25 07:05+0000\n" +"Last-Translator: Алексей Борискин , 2012-2015,2022\n" "Language-Team: Russian (http://www.transifex.com/django/django/language/" "ru/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Language: ru\n" -"Plural-Forms: nplurals=4; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n" -"%10<=4 && (n%100<12 || n%100>14) ? 1 : n%10==0 || (n%10>=5 && n%10<=9) || (n" -"%100>=11 && n%100<=14)? 2 : 3);\n" +"Plural-Forms: nplurals=4; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && " +"n%10<=4 && (n%100<12 || n%100>14) ? 1 : n%10==0 || (n%10>=5 && n%10<=9) || " +"(n%100>=11 && n%100<=14)? 2 : 3);\n" + +#, python-format +msgid "Delete selected %(verbose_name_plural)s" +msgstr "Удалить выбранные %(verbose_name_plural)s" #, python-format msgid "Successfully deleted %(count)d %(items)s." @@ -40,10 +45,6 @@ msgstr "Не удается удалить %(name)s" msgid "Are you sure?" msgstr "Вы уверены?" -#, python-format -msgid "Delete selected %(verbose_name_plural)s" -msgstr "Удалить выбранные %(verbose_name_plural)s" - msgid "Administration" msgstr "Администрирование" @@ -282,8 +283,9 @@ msgstr "Выбрано 0 объектов из %(cnt)s " msgid "Change history: %s" msgstr "История изменений: %s" -#. Translators: Model verbose name and instance representation, -#. suitable to be an item in a list. +#. Translators: Model verbose name and instance +#. representation, suitable to be an item in a +#. list. #, python-format msgid "%(class_name)s %(instance)s" msgstr "%(class_name)s %(instance)s" @@ -539,6 +541,12 @@ msgstr "Забыли свой пароль или имя пользовател msgid "Toggle navigation" msgstr "Переключить навигацию" +msgid "Start typing to filter…" +msgstr "Начните печатать для фильтрации..." + +msgid "Filter navigation items" +msgstr "Фильтр элементов навигации" + msgid "Date/time" msgstr "Дата и время" @@ -548,6 +556,12 @@ msgstr "Пользователь" msgid "Action" msgstr "Действие" +msgid "entry" +msgstr "запись" + +msgid "entries" +msgstr "записи" + msgid "" "This object doesn’t have a change history. It probably wasn’t added via this " "admin site." @@ -606,7 +620,11 @@ msgstr "Добавить ещё один объект типа \"%(model)s\"" msgid "Delete selected %(model)s" msgstr "Удалить выбранный объект типа \"%(model)s\"" -msgid "Thanks for spending some quality time with the Web site today." +#, python-format +msgid "View selected %(model)s" +msgstr "Просмотреть выбранный объект типа \"%(model)s\"" + +msgid "Thanks for spending some quality time with the web site today." msgstr "Благодарим вас за время, проведенное на этом сайте." msgid "Log in again" diff --git a/django/contrib/admin/locale/ru/LC_MESSAGES/djangojs.mo b/django/contrib/admin/locale/ru/LC_MESSAGES/djangojs.mo index fe63491eb9..9c88ad0384 100644 Binary files a/django/contrib/admin/locale/ru/LC_MESSAGES/djangojs.mo and b/django/contrib/admin/locale/ru/LC_MESSAGES/djangojs.mo differ diff --git a/django/contrib/admin/locale/ru/LC_MESSAGES/djangojs.po b/django/contrib/admin/locale/ru/LC_MESSAGES/djangojs.po index 478d770bb5..78b814dd0a 100644 --- a/django/contrib/admin/locale/ru/LC_MESSAGES/djangojs.po +++ b/django/contrib/admin/locale/ru/LC_MESSAGES/djangojs.po @@ -9,24 +9,24 @@ # crazyzubr , 2020 # Jannis Leidel , 2011 # Panasoft, 2021 -# Алексей Борискин , 2012,2014-2015 +# Алексей Борискин , 2012,2014-2015,2022 # Андрей Щуров , 2016 msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2021-01-15 09:00+0100\n" -"PO-Revision-Date: 2021-03-16 22:55+0000\n" -"Last-Translator: Panasoft\n" +"POT-Creation-Date: 2022-05-17 05:26-0500\n" +"PO-Revision-Date: 2022-05-25 07:05+0000\n" +"Last-Translator: Алексей Борискин , 2012,2014-2015,2022\n" "Language-Team: Russian (http://www.transifex.com/django/django/language/" "ru/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Language: ru\n" -"Plural-Forms: nplurals=4; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n" -"%10<=4 && (n%100<12 || n%100>14) ? 1 : n%10==0 || (n%10>=5 && n%10<=9) || (n" -"%100>=11 && n%100<=14)? 2 : 3);\n" +"Plural-Forms: nplurals=4; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && " +"n%10<=4 && (n%100<12 || n%100>14) ? 1 : n%10==0 || (n%10>=5 && n%10<=9) || " +"(n%100>=11 && n%100<=14)? 2 : 3);\n" #, javascript-format msgid "Available %s" @@ -280,6 +280,12 @@ msgctxt "one letter Saturday" msgid "S" msgstr "С" +msgid "" +"You have already submitted this form. Are you sure you want to submit it " +"again?" +msgstr "" +"Вы уже отправили эту форму. Вы уверены, что хотите отправить её ещё раз?" + msgid "Show" msgstr "Показать" diff --git a/django/contrib/admin/options.py b/django/contrib/admin/options.py index e6d4ae12bb..6c6ee8cee7 100644 --- a/django/contrib/admin/options.py +++ b/django/contrib/admin/options.py @@ -561,7 +561,7 @@ class BaseModelAdmin(metaclass=forms.MediaDefiningClass): def has_delete_permission(self, request, obj=None): """ - Return True if the given request has permission to change the given + Return True if the given request has permission to delete the given Django model instance, the default implementation doesn't examine the `obj` parameter. diff --git a/django/contrib/admin/static/admin/css/base.css b/django/contrib/admin/static/admin/css/base.css index d5d3b58984..72f4ae169b 100644 --- a/django/contrib/admin/static/admin/css/base.css +++ b/django/contrib/admin/static/admin/css/base.css @@ -24,7 +24,7 @@ html[data-theme="light"], --breadcrumbs-link-fg: var(--body-bg); --breadcrumbs-bg: var(--primary); - --link-fg: #447e9b; + --link-fg: #417893; --link-hover-color: #036; --link-selected-fg: #5b80b2; @@ -194,7 +194,7 @@ li ul { li, dt, dd { font-size: 0.8125rem; - line-height: 20px; + line-height: 1.25rem; } dt { @@ -308,7 +308,7 @@ table { td, th { font-size: 0.8125rem; - line-height: 16px; + line-height: 1rem; border-bottom: 1px solid var(--hairline-color); vertical-align: top; padding: 8px; @@ -501,7 +501,7 @@ textarea:focus, select:focus, .vTextField:focus { } select { - height: 30px; + height: 1.875rem; } select[multiple] { @@ -761,7 +761,7 @@ a.deletelink:focus, a.deletelink:hover { display: block; float: left; margin-left: 5px; - height: 16px; + height: 1rem; } .object-tools a { @@ -1059,8 +1059,8 @@ a.deletelink:focus, a.deletelink:hover { .delete-confirmation form .cancel-link { display: inline-block; vertical-align: middle; - height: 15px; - line-height: 15px; + height: 0.9375rem; + line-height: 0.9375rem; border-radius: 4px; padding: 10px 15px; color: var(--button-fg); diff --git a/django/contrib/admin/static/admin/css/changelists.css b/django/contrib/admin/static/admin/css/changelists.css index 4406dccb93..641dccd7e3 100644 --- a/django/contrib/admin/static/admin/css/changelists.css +++ b/django/contrib/admin/static/admin/css/changelists.css @@ -90,7 +90,7 @@ } #toolbar #searchbar { - height: 19px; + height: 1.1875rem; border: 1px solid var(--border-color); padding: 2px 5px; margin: 0; @@ -270,7 +270,7 @@ background: var(--body-bg); border-top: none; border-bottom: none; - line-height: 24px; + line-height: 1.5rem; color: var(--body-quiet-color); width: 100%; } @@ -289,7 +289,7 @@ #changelist .actions select { vertical-align: top; - height: 24px; + height: 1.5rem; color: var(--body-fg); border: 1px solid var(--border-color); border-radius: 4px; @@ -316,7 +316,7 @@ background: var(--body-bg); box-shadow: 0 -15px 20px -10px rgba(0, 0, 0, 0.15) inset; cursor: pointer; - height: 24px; + height: 1.5rem; line-height: 1; padding: 4px 8px; margin: 0; diff --git a/django/contrib/admin/static/admin/css/forms.css b/django/contrib/admin/static/admin/css/forms.css index a326b3baf7..315f4b753c 100644 --- a/django/contrib/admin/static/admin/css/forms.css +++ b/django/contrib/admin/static/admin/css/forms.css @@ -22,6 +22,11 @@ form .form-row p { padding-left: 0; } +.form-row > div { + display: flex; + flex-wrap: wrap; +} + /* FORM LABELS */ label { @@ -69,7 +74,6 @@ form ul.inline li { .aligned label { display: block; padding: 4px 10px 0 0; - float: left; width: 160px; word-wrap: break-word; line-height: 1; @@ -79,10 +83,10 @@ form ul.inline li { content: ''; display: inline-block; vertical-align: middle; - height: 26px; + height: 1.625rem; } -.aligned label + p, .aligned label + div.help, .aligned label + div.readonly { +.aligned label + p, .aligned .checkbox-row + div.help, .aligned label + div.readonly { padding: 6px 0; margin-top: 0; margin-bottom: 0; @@ -90,6 +94,11 @@ form ul.inline li { overflow-wrap: break-word; } +.aligned label + div.readonly, +.aligned label + .datetime { + margin-left: 0; +} + .aligned ul label { display: inline; float: none; @@ -117,7 +126,6 @@ form .aligned div.radiolist { form .aligned p.help, form .aligned div.help { - clear: left; margin-top: 0; margin-left: 160px; padding-left: 10px; @@ -129,8 +137,7 @@ form .aligned p.datetime div.help.timezonewarning { font-weight: normal; } -form .aligned label + p.help, -form .aligned label + div.help { +form .aligned .checkbox-row + .help { margin-left: 0; padding-left: 0; } @@ -270,8 +277,8 @@ body.popup .submit-row { } .submit-row input { - height: 35px; - line-height: 15px; + height: 2.1875rem; + line-height: 0.9375rem; } .submit-row input, .submit-row a { @@ -290,9 +297,9 @@ body.popup .submit-row { display: block; background: var(--delete-button-bg); border-radius: 4px; - padding: 10px 15px; - height: 15px; - line-height: 15px; + padding: 0.625rem 0.9375rem; + height: 0.9375rem; + line-height: 0.9375rem; color: var(--button-fg); } @@ -301,8 +308,8 @@ body.popup .submit-row { background: var(--close-button-bg); border-radius: 4px; padding: 10px 15px; - height: 15px; - line-height: 15px; + height: 0.9375rem; + line-height: 0.9375rem; color: var(--button-fg); } @@ -517,8 +524,8 @@ body.popup .submit-row { } .related-lookup { - width: 16px; - height: 16px; + width: 1rem; + height: 1rem; background-image: url(../img/search.svg); } diff --git a/django/contrib/admin/static/admin/css/responsive.css b/django/contrib/admin/static/admin/css/responsive.css index ad6a5cb085..f250ea40ff 100644 --- a/django/contrib/admin/static/admin/css/responsive.css +++ b/django/contrib/admin/static/admin/css/responsive.css @@ -104,13 +104,13 @@ input[type="submit"], button { } #changelist-search label { - line-height: 22px; + line-height: 1.375rem; } #toolbar form #searchbar { flex: 1 0 auto; width: 0; - height: 22px; + height: 1.375rem; margin: 0 10px 0 6px; } @@ -186,12 +186,12 @@ input[type="submit"], button { box-sizing: border-box; margin: 0; padding: 6px 8px; - min-height: 36px; + min-height: 2.25rem; font-size: 0.875rem; } .form-row select { - height: 36px; + height: 2.25rem; } .form-row select[multiple] { @@ -957,7 +957,7 @@ input[type="submit"], button { .calendar-shortcuts { padding: 10px 0; font-size: 0.75rem; - line-height: 12px; + line-height: 0.75rem; } .calendar-shortcuts a { diff --git a/django/contrib/admin/static/admin/css/rtl.css b/django/contrib/admin/static/admin/css/rtl.css index 014fd1e591..9e9cffe31a 100644 --- a/django/contrib/admin/static/admin/css/rtl.css +++ b/django/contrib/admin/static/admin/css/rtl.css @@ -111,7 +111,6 @@ thead th.sorted .text { .aligned label { padding: 0 0 3px 1em; - float: right; } .submit-row a.deletelink { @@ -127,10 +126,6 @@ thead th.sorted .text { margin-left: 5px; } -form .aligned p.help, form .aligned div.help { - clear: right; -} - form .aligned ul { margin-right: 163px; margin-left: 0; @@ -142,6 +137,17 @@ form ul.inline li { padding-left: 7px; } +form .aligned p.help, +form .aligned div.help { + margin-right: 160px; + padding-right: 10px; +} + +form .aligned .checkbox-row + .help { + margin-right: 0; + padding-right: 0; +} + .submit-row { text-align: right; } diff --git a/django/contrib/admin/static/admin/css/widgets.css b/django/contrib/admin/static/admin/css/widgets.css index ad5d9090d6..572dc0a500 100644 --- a/django/contrib/admin/static/admin/css/widgets.css +++ b/django/contrib/admin/static/admin/css/widgets.css @@ -277,8 +277,8 @@ a.active.selector-clearall:focus, a.active.selector-clearall:hover { .selector .search-label-icon { background: url(../img/search.svg) 0 0 no-repeat; display: inline-block; - height: 18px; - width: 18px; + height: 1.125rem; + width: 1.125rem; } /* DATE AND TIME */ @@ -498,7 +498,7 @@ span.clearable-file-input label { background: var(--body-bg); color: var(--body-quiet-color); font-size: 0.6875rem; - line-height: 11px; + line-height: 0.6875rem; border-top: 1px solid var(--hairline-color); padding: 8px 0; } diff --git a/django/contrib/admin/templates/admin/includes/fieldset.html b/django/contrib/admin/templates/admin/includes/fieldset.html index ba260a36ce..7b6e903ec3 100644 --- a/django/contrib/admin/templates/admin/includes/fieldset.html +++ b/django/contrib/admin/templates/admin/includes/fieldset.html @@ -19,12 +19,12 @@ {{ field.field }} {% endif %} {% endif %} - {% if field.field.help_text %} -
- {{ field.field.help_text|safe }} -
- {% endif %} + {% if field.field.help_text %} +
+ {{ field.field.help_text|safe }} +
+ {% endif %} {% endfor %} {% endfor %} diff --git a/django/contrib/admindocs/locale/bg/LC_MESSAGES/django.mo b/django/contrib/admindocs/locale/bg/LC_MESSAGES/django.mo index 0356778574..8f522e1389 100644 Binary files a/django/contrib/admindocs/locale/bg/LC_MESSAGES/django.mo and b/django/contrib/admindocs/locale/bg/LC_MESSAGES/django.mo differ diff --git a/django/contrib/admindocs/locale/bg/LC_MESSAGES/django.po b/django/contrib/admindocs/locale/bg/LC_MESSAGES/django.po index 124d843720..cb702a1bb8 100644 --- a/django/contrib/admindocs/locale/bg/LC_MESSAGES/django.po +++ b/django/contrib/admindocs/locale/bg/LC_MESSAGES/django.po @@ -12,8 +12,8 @@ msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2021-01-15 09:00+0100\n" -"PO-Revision-Date: 2022-01-14 10:04+0000\n" -"Last-Translator: arneatec \n" +"PO-Revision-Date: 2022-05-25 07:05+0000\n" +"Last-Translator: arneatec , 2022\n" "Language-Team: Bulgarian (http://www.transifex.com/django/django/language/" "bg/)\n" "MIME-Version: 1.0\n" @@ -107,8 +107,8 @@ msgstr "Моля инсталирайте docutils" #, python-format msgid "" -"The admin documentation system requires Python’s docutils library." +"The admin documentation system requires Python’s docutils library." msgstr "" "Системата за администраторска документация изисква библиотеката за Python docutils." @@ -117,8 +117,8 @@ msgstr "" msgid "" "Please ask your administrators to install docutils." msgstr "" -"Моля, помолете вашите администратори да инсталират docutils ." +"Моля, помолете вашите администратори да инсталират docutils ." #, python-format msgid "Model: %(name)s" diff --git a/django/contrib/auth/forms.py b/django/contrib/auth/forms.py index 523830e8ee..0376d17709 100644 --- a/django/contrib/auth/forms.py +++ b/django/contrib/auth/forms.py @@ -81,7 +81,7 @@ class UsernameField(forms.CharField): } -class UserCreationForm(forms.ModelForm): +class BaseUserCreationForm(forms.ModelForm): """ A form that creates a user, with no privileges, from the given username and password. @@ -141,9 +141,26 @@ class UserCreationForm(forms.ModelForm): user.set_password(self.cleaned_data["password1"]) if commit: user.save() + if hasattr(self, "save_m2m"): + self.save_m2m() return user +class UserCreationForm(BaseUserCreationForm): + error_messages = { + **BaseUserCreationForm.error_messages, + "unique": _("A user with that username already exists."), + } + + def clean_username(self): + """Reject usernames that differ only in case.""" + username = self.cleaned_data.get("username") + if username and User.objects.filter(username__iexact=username).exists(): + raise forms.ValidationError(self.error_messages["unique"], code="unique") + else: + return username + + class UserChangeForm(forms.ModelForm): password = ReadOnlyPasswordHashField( label=_("Password"), @@ -354,7 +371,7 @@ class PasswordResetForm(forms.Form): class SetPasswordForm(forms.Form): """ - A form that lets a user change set their password without entering the old + A form that lets a user set their password without entering the old password """ diff --git a/django/contrib/auth/locale/bg/LC_MESSAGES/django.mo b/django/contrib/auth/locale/bg/LC_MESSAGES/django.mo index 1d682adb8a..37f90f5731 100644 Binary files a/django/contrib/auth/locale/bg/LC_MESSAGES/django.mo and b/django/contrib/auth/locale/bg/LC_MESSAGES/django.mo differ diff --git a/django/contrib/auth/locale/bg/LC_MESSAGES/django.po b/django/contrib/auth/locale/bg/LC_MESSAGES/django.po index 63b556eed6..878d5157d2 100644 --- a/django/contrib/auth/locale/bg/LC_MESSAGES/django.po +++ b/django/contrib/auth/locale/bg/LC_MESSAGES/django.po @@ -14,8 +14,8 @@ msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2021-09-21 10:22+0200\n" -"PO-Revision-Date: 2022-01-14 10:14+0000\n" -"Last-Translator: arneatec \n" +"PO-Revision-Date: 2022-04-24 20:19+0000\n" +"Last-Translator: arneatec , 2022\n" "Language-Team: Bulgarian (http://www.transifex.com/django/django/language/" "bg/)\n" "MIME-Version: 1.0\n" @@ -76,8 +76,8 @@ msgid "" "password, but you can change the password using this form." msgstr "" "Паролите не се съхраняват в чист вид, така че е невъзможно да видите " -"паролата на този потребител, но можете да промените паролата чрез този формуляр." +"паролата на този потребител, но можете да промените паролата чрез този формуляр." #, python-format msgid "" @@ -239,9 +239,9 @@ msgid_plural "" "This password is too short. It must contain at least %(min_length)d " "characters." msgstr[0] "" -"Паролата е прекелно къса. Трябва да съдържа поне %(min_length)d символ." +"Паролата е прекалено къса. Трябва да съдържа поне %(min_length)d символ." msgstr[1] "" -"Паролата е прекелно къса. Трябва да съдържа поне %(min_length)d символа." +"Паролата е прекалено къса. Трябва да съдържа поне %(min_length)d символа." #, python-format msgid "Your password must contain at least %(min_length)d character." diff --git a/django/contrib/auth/locale/eo/LC_MESSAGES/django.mo b/django/contrib/auth/locale/eo/LC_MESSAGES/django.mo index 5405971b5d..5fe6729ab6 100644 Binary files a/django/contrib/auth/locale/eo/LC_MESSAGES/django.mo and b/django/contrib/auth/locale/eo/LC_MESSAGES/django.mo differ diff --git a/django/contrib/auth/locale/eo/LC_MESSAGES/django.po b/django/contrib/auth/locale/eo/LC_MESSAGES/django.po index a1d711d50a..82a84ad81a 100644 --- a/django/contrib/auth/locale/eo/LC_MESSAGES/django.po +++ b/django/contrib/auth/locale/eo/LC_MESSAGES/django.po @@ -4,14 +4,15 @@ # Batist D 🐍 , 2012-2013 # Batist D 🐍 , 2013-2019 # Matthieu Desplantes , 2021 -# Robin van der Vliet , 2019 +# Meiyer , 2022 +# Robin van der Vliet , 2019 msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2019-09-08 17:27+0200\n" -"PO-Revision-Date: 2021-04-13 08:08+0000\n" -"Last-Translator: Matthieu Desplantes \n" +"POT-Creation-Date: 2021-09-21 10:22+0200\n" +"PO-Revision-Date: 2022-04-25 08:09+0000\n" +"Last-Translator: Meiyer , 2022\n" "Language-Team: Esperanto (http://www.transifex.com/django/django/language/" "eo/)\n" "MIME-Version: 1.0\n" @@ -31,7 +32,7 @@ msgstr "Gravaj datoj" #, python-format msgid "%(name)s object with primary key %(key)r does not exist." -msgstr "%(name)s objekto kun ĉefŝlosilo %(key)r ne ekzistas." +msgstr "Objekto %(name)skun ĉefŝlosilo %(key)r ne ekzistas." msgid "Password changed successfully." msgstr "Pasvorto suksese ŝanĝita." @@ -72,8 +73,8 @@ msgid "" "password, but you can change the password using this form." msgstr "" "La pasvortoj ne estas konservitaj en klara formo, do ne eblas vidi la " -"pasvorton de ĉi tiu uzanto, sed vi povas ŝanĝi la pasvorton per ĉi tiu formularo." +"pasvorton de ĉi tiu uzanto, sed vi povas ŝanĝi la pasvorton per ĉi tiu formularo." #, python-format msgid "" @@ -124,10 +125,10 @@ msgid "version" msgstr "versio" msgid "memory cost" -msgstr "memoria kosto" +msgstr "memor-kosto" msgid "time cost" -msgstr "tempa kosto" +msgstr "tempo-kosto" msgid "parallelism" msgstr "paralelismo" @@ -138,6 +139,9 @@ msgstr "laborfaktoro" msgid "checksum" msgstr "kontrolsumo" +msgid "block size" +msgstr "blok-grandeco" + msgid "name" msgstr "nomo" @@ -166,14 +170,14 @@ msgid "" "Designates that this user has all permissions without explicitly assigning " "them." msgstr "" -"Indikas ke tiu uzanto havas ĉiujn permesojn, sen eksplicite asigni ilin." +"Indikas ke tiu ĉi uzanto havas ĉiujn permesojn, sen eksplicite atribui ilin." msgid "" "The groups this user belongs to. A user will get all permissions granted to " "each of their groups." msgstr "" -"La grupoj en kiu tiu uzanto apartenas. Uzanto akiros ĉiuj permesoj donita al " -"ĉiuj de iliaj grupoj." +"La grupoj al kiuj tiu ĉi uzanto apartenas. Uzanto akiros ĉiujn permesojn " +"atribuitajn al ĉiu el tiuj grupoj." msgid "user permissions" msgstr "uzantaj permesoj" @@ -212,11 +216,11 @@ msgid "" "Designates whether this user should be treated as active. Unselect this " "instead of deleting accounts." msgstr "" -"Indikas ĉu tiu uzanto devus esti traktita kiel aktiva. Malselekti tion ĉi " +"Indikas ĉu la uzanto devus esti traktita kiel aktiva. Malmarku tion ĉi " "anstataŭ forigi kontojn." msgid "date joined" -msgstr "dato aliĝita" +msgstr "dato de aliĝo" msgid "user" msgstr "uzanto" @@ -258,7 +262,7 @@ msgid "Your password can’t be a commonly used password." msgstr "Via pasvorto ne povas esti ofte uzata pasvorto." msgid "This password is entirely numeric." -msgstr "Tiu pasvorto estas tute cefera." +msgstr "Tiu ĉi pasvorto konsistas nur el ciferoj." msgid "Your password can’t be entirely numeric." msgstr "Via pasvorto ne povas konsisti nur el ciferoj." @@ -271,15 +275,15 @@ msgid "" "Enter a valid username. This value may contain only English letters, " "numbers, and @/./+/-/_ characters." msgstr "" -"Enigu validan uzantnomon. Ĉi tiu valoro povas enhavi nur sensupersignajn " -"literojn, ciferojn kaj la signojn @/./+/-/_." +"Enigu salutnomon en ĝusta formo. Ĉi tiu valoro povas enhavi nur " +"sensupersignajn literojn, ciferojn kaj la signojn @/./+/-/_." msgid "" "Enter a valid username. This value may contain only letters, numbers, and " "@/./+/-/_ characters." msgstr "" -"Enigu validan uzantnomon. Ĉi tiu valoro povas enhavi nur literojn, ciferojn " -"kaj la signojn @/./+/-/_." +"Enigu salutnomon en ĝusta formo. Ĉi tiu valoro povas enhavi nur literojn, " +"ciferojn kaj la signojn @/./+/-/_." msgid "Logged out" msgstr "Adiaŭita" diff --git a/django/contrib/auth/locale/es/LC_MESSAGES/django.mo b/django/contrib/auth/locale/es/LC_MESSAGES/django.mo index 4dc2c96386..c2195be76e 100644 Binary files a/django/contrib/auth/locale/es/LC_MESSAGES/django.mo and b/django/contrib/auth/locale/es/LC_MESSAGES/django.mo differ diff --git a/django/contrib/auth/locale/es/LC_MESSAGES/django.po b/django/contrib/auth/locale/es/LC_MESSAGES/django.po index fbbc61ed60..a1f480cfd9 100644 --- a/django/contrib/auth/locale/es/LC_MESSAGES/django.po +++ b/django/contrib/auth/locale/es/LC_MESSAGES/django.po @@ -5,7 +5,7 @@ # Antoni Aloy , 2012-2013,2015-2017 # e4db27214f7e7544f2022c647b585925_bb0e321, 2015-2016 # e4db27214f7e7544f2022c647b585925_bb0e321, 2020 -# Ernesto Rico Schmidt , 2017 +# Ernesto Rico Schmidt , 2017 # guillem , 2012 # Igor Támara , 2015 # Jannis Leidel , 2011 @@ -18,8 +18,8 @@ msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2021-09-21 10:22+0200\n" -"PO-Revision-Date: 2021-11-10 03:52+0000\n" -"Last-Translator: Uriel Medina \n" +"PO-Revision-Date: 2022-04-25 08:09+0000\n" +"Last-Translator: Uriel Medina , 2020-2021\n" "Language-Team: Spanish (http://www.transifex.com/django/django/language/" "es/)\n" "MIME-Version: 1.0\n" @@ -80,8 +80,8 @@ msgid "" "password, but you can change the password using this form." msgstr "" "Las contraseñas no se almacenan en bruto, así que no hay manera de ver la " -"contraseña del usuario, pero se puede cambiar la contraseña mediante este formulario." +"contraseña del usuario, pero se puede cambiar la contraseña mediante este formulario." #, python-format msgid "" @@ -251,12 +251,16 @@ msgstr[0] "" msgstr[1] "" "Esta contraseña es demasiado corta. Debe contener al menos %(min_length)d " "caracteres." +msgstr[2] "" +"Esta contraseña es demasiado corta. Debe contener al menos %(min_length)d " +"caracteres." #, python-format msgid "Your password must contain at least %(min_length)d character." msgid_plural "Your password must contain at least %(min_length)d characters." msgstr[0] "Su contraseña debe contener al menos %(min_length)d caracter." msgstr[1] "Su contraseña debe contener al menos %(min_length)d caracteres." +msgstr[2] "Su contraseña debe contener al menos %(min_length)d caracteres." #, python-format msgid "The password is too similar to the %(verbose_name)s." diff --git a/django/contrib/auth/locale/pt_BR/LC_MESSAGES/django.mo b/django/contrib/auth/locale/pt_BR/LC_MESSAGES/django.mo index eba5bb8b01..dd8d149d73 100644 Binary files a/django/contrib/auth/locale/pt_BR/LC_MESSAGES/django.mo and b/django/contrib/auth/locale/pt_BR/LC_MESSAGES/django.mo differ diff --git a/django/contrib/auth/locale/pt_BR/LC_MESSAGES/django.po b/django/contrib/auth/locale/pt_BR/LC_MESSAGES/django.po index a46794e1b4..7d8846e6a5 100644 --- a/django/contrib/auth/locale/pt_BR/LC_MESSAGES/django.po +++ b/django/contrib/auth/locale/pt_BR/LC_MESSAGES/django.po @@ -6,7 +6,7 @@ # amcorreia , 2018 # Camilo B. Moreira , 2017 # Carlos Leite , 2016 -# Filipe Cifali Stangler , 2016 +# Filipe Cifali , 2016 # Claudemiro Alves Feitosa Neto , 2015 # dudanogueira , 2012 # dudanogueira , 2014 @@ -15,6 +15,7 @@ # Fábio C. Barrionuevo da Luz , 2015 # gilberto dos santos alves , 2013 # semente, 2012 +# Guilherme, 2022 # Jannis Leidel , 2011 # Lucas Infante , 2015 # Rafael Fontenelle , 2022 @@ -24,8 +25,8 @@ msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2021-09-21 10:22+0200\n" -"PO-Revision-Date: 2022-07-25 08:09+0000\n" -"Last-Translator: Rafael Fontenelle \n" +"PO-Revision-Date: 2022-04-25 08:09+0000\n" +"Last-Translator: Guilherme, 2022\n" "Language-Team: Portuguese (Brazil) (http://www.transifex.com/django/django/" "language/pt_BR/)\n" "MIME-Version: 1.0\n" @@ -86,8 +87,8 @@ msgid "" "password, but you can change the password using this form." msgstr "" "Senhas brutas não são armazenadas, então não há como visualizar a senha " -"desse usuário, porém você pode mudar a senha usandoesse form." +"desse usuário, porém você pode mudar a senha usando esse " +"form." #, python-format msgid "" @@ -256,12 +257,16 @@ msgstr[0] "" msgstr[1] "" "Esta senha é muito curta. Ela precisa conter pelo menos %(min_length)d " "caracteres." +msgstr[2] "" +"Esta senha é muito curta. Ela precisa conter pelo menos %(min_length)d " +"caracteres." #, python-format msgid "Your password must contain at least %(min_length)d character." msgid_plural "Your password must contain at least %(min_length)d characters." msgstr[0] "Sua senha precisa conter pelo menos %(min_length)d caracteres." msgstr[1] "Sua senha precisa conter pelo menos %(min_length)d caracteres." +msgstr[2] "Sua senha precisa conter pelo menos %(min_length)d caracteres." #, python-format msgid "The password is too similar to the %(verbose_name)s." diff --git a/django/contrib/auth/locale/ru/LC_MESSAGES/django.mo b/django/contrib/auth/locale/ru/LC_MESSAGES/django.mo index 6fdaf74506..17a60406cb 100644 Binary files a/django/contrib/auth/locale/ru/LC_MESSAGES/django.mo and b/django/contrib/auth/locale/ru/LC_MESSAGES/django.mo differ diff --git a/django/contrib/auth/locale/ru/LC_MESSAGES/django.po b/django/contrib/auth/locale/ru/LC_MESSAGES/django.po index 84eec34dba..1de18477e9 100644 --- a/django/contrib/auth/locale/ru/LC_MESSAGES/django.po +++ b/django/contrib/auth/locale/ru/LC_MESSAGES/django.po @@ -4,26 +4,26 @@ # crazyzubr , 2020 # Ivan Khomutov , 2017 # Jannis Leidel , 2011 -# Алексей Борискин , 2012-2015 +# Алексей Борискин , 2012-2015,2022 # Андрей Щуров , 2016 # Влад Мещеряков , 2021 -# Дмитрий Шатера , 2016 +# Bobsans , 2016 msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2019-09-08 17:27+0200\n" -"PO-Revision-Date: 2021-05-02 03:02+0000\n" -"Last-Translator: Влад Мещеряков \n" +"POT-Creation-Date: 2021-09-21 10:22+0200\n" +"PO-Revision-Date: 2022-04-25 08:09+0000\n" +"Last-Translator: Алексей Борискин , 2012-2015,2022\n" "Language-Team: Russian (http://www.transifex.com/django/django/language/" "ru/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Language: ru\n" -"Plural-Forms: nplurals=4; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n" -"%10<=4 && (n%100<12 || n%100>14) ? 1 : n%10==0 || (n%10>=5 && n%10<=9) || (n" -"%100>=11 && n%100<=14)? 2 : 3);\n" +"Plural-Forms: nplurals=4; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && " +"n%10<=4 && (n%100<12 || n%100>14) ? 1 : n%10==0 || (n%10>=5 && n%10<=9) || " +"(n%100>=11 && n%100<=14)? 2 : 3);\n" msgid "Personal info" msgstr "Персональная информация" @@ -77,8 +77,8 @@ msgid "" "password, but you can change the password using this form." msgstr "" "Пароли хранятся в зашифрованном виде, поэтому нет возможности посмотреть " -"пароль этого пользователя, но вы можете изменить его используя эту форму." +"пароль этого пользователя, но вы можете изменить его используя эту форму." #, python-format msgid "" @@ -142,6 +142,9 @@ msgstr "рабочий фактор" msgid "checksum" msgstr "контрольная сумма" +msgid "block size" +msgstr "размер блока" + msgid "name" msgstr "имя" diff --git a/django/contrib/auth/management/__init__.py b/django/contrib/auth/management/__init__.py index 0b5a982617..ad31a6e68f 100644 --- a/django/contrib/auth/management/__init__.py +++ b/django/contrib/auth/management/__init__.py @@ -95,11 +95,16 @@ def create_permissions( .values_list("content_type", "codename") ) - perms = [ - Permission(codename=codename, name=name, content_type=ct) - for ct, (codename, name) in searched_perms - if (ct.pk, codename) not in all_perms - ] + perms = [] + for ct, (codename, name) in searched_perms: + if (ct.pk, codename) not in all_perms: + permission = Permission() + permission._state.db = using + permission.codename = codename + permission.name = name + permission.content_type = ct + perms.append(permission) + Permission.objects.using(using).bulk_create(perms) if verbosity >= 2: for perm in perms: diff --git a/django/contrib/contenttypes/fields.py b/django/contrib/contenttypes/fields.py index bb93aa5d1e..35fcd0d908 100644 --- a/django/contrib/contenttypes/fields.py +++ b/django/contrib/contenttypes/fields.py @@ -2,6 +2,8 @@ import functools import itertools from collections import defaultdict +from asgiref.sync import sync_to_async + from django.contrib.contenttypes.models import ContentType from django.core import checks from django.core.exceptions import FieldDoesNotExist, ObjectDoesNotExist @@ -16,6 +18,7 @@ from django.db.models.fields.related import ( from django.db.models.query_utils import PathInfo from django.db.models.sql import AND from django.db.models.sql.where import WhereNode +from django.db.models.utils import AltersData from django.utils.functional import cached_property @@ -458,7 +461,7 @@ class GenericRelation(ForeignObject): to_opts=opts, target_fields=(opts.pk,), join_field=self, - m2m=not self.unique, + m2m=False, direct=False, filtered_relation=filtered_relation, ) @@ -560,7 +563,7 @@ def create_generic_related_manager(superclass, rel): specific to generic relations. """ - class GenericRelatedObjectManager(superclass): + class GenericRelatedObjectManager(superclass, AltersData): def __init__(self, instance=None): super().__init__() @@ -686,6 +689,11 @@ def create_generic_related_manager(superclass, rel): add.alters_data = True + async def aadd(self, *objs, bulk=True): + return await sync_to_async(self.add)(*objs, bulk=bulk) + + aadd.alters_data = True + def remove(self, *objs, bulk=True): if not objs: return @@ -693,11 +701,21 @@ def create_generic_related_manager(superclass, rel): remove.alters_data = True + async def aremove(self, *objs, bulk=True): + return await sync_to_async(self.remove)(*objs, bulk=bulk) + + aremove.alters_data = True + def clear(self, *, bulk=True): self._clear(self, bulk) clear.alters_data = True + async def aclear(self, *, bulk=True): + return await sync_to_async(self.clear)(bulk=bulk) + + aclear.alters_data = True + def _clear(self, queryset, bulk): self._remove_prefetched_objects() db = router.db_for_write(self.model, instance=self.instance) @@ -737,6 +755,11 @@ def create_generic_related_manager(superclass, rel): set.alters_data = True + async def aset(self, objs, *, bulk=True, clear=False): + return await sync_to_async(self.set)(objs, bulk=bulk, clear=clear) + + aset.alters_data = True + def create(self, **kwargs): self._remove_prefetched_objects() kwargs[self.content_type_field_name] = self.content_type @@ -746,6 +769,11 @@ def create_generic_related_manager(superclass, rel): create.alters_data = True + async def acreate(self, **kwargs): + return await sync_to_async(self.create)(**kwargs) + + acreate.alters_data = True + def get_or_create(self, **kwargs): kwargs[self.content_type_field_name] = self.content_type kwargs[self.object_id_field_name] = self.pk_val @@ -754,6 +782,11 @@ def create_generic_related_manager(superclass, rel): get_or_create.alters_data = True + async def aget_or_create(self, **kwargs): + return await sync_to_async(self.get_or_create)(**kwargs) + + aget_or_create.alters_data = True + def update_or_create(self, **kwargs): kwargs[self.content_type_field_name] = self.content_type kwargs[self.object_id_field_name] = self.pk_val @@ -762,4 +795,9 @@ def create_generic_related_manager(superclass, rel): update_or_create.alters_data = True + async def aupdate_or_create(self, **kwargs): + return await sync_to_async(self.update_or_create)(**kwargs) + + aupdate_or_create.alters_data = True + return GenericRelatedObjectManager diff --git a/django/contrib/contenttypes/locale/eo/LC_MESSAGES/django.mo b/django/contrib/contenttypes/locale/eo/LC_MESSAGES/django.mo index a209db2496..152620e64e 100644 Binary files a/django/contrib/contenttypes/locale/eo/LC_MESSAGES/django.mo and b/django/contrib/contenttypes/locale/eo/LC_MESSAGES/django.mo differ diff --git a/django/contrib/contenttypes/locale/eo/LC_MESSAGES/django.po b/django/contrib/contenttypes/locale/eo/LC_MESSAGES/django.po index 17c9e158d1..f64e0face6 100644 --- a/django/contrib/contenttypes/locale/eo/LC_MESSAGES/django.po +++ b/django/contrib/contenttypes/locale/eo/LC_MESSAGES/django.po @@ -1,14 +1,15 @@ # This file is distributed under the same license as the Django package. # # Translators: -# Baptiste Darthenay , 2014 +# Batist D 🐍 , 2014 +# Meiyer , 2022 msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2015-01-17 11:07+0100\n" -"PO-Revision-Date: 2017-09-19 16:40+0000\n" -"Last-Translator: Jannis Leidel \n" +"POT-Creation-Date: 2019-09-08 17:27+0200\n" +"PO-Revision-Date: 2022-04-24 19:22+0000\n" +"Last-Translator: Meiyer , 2022\n" "Language-Team: Esperanto (http://www.transifex.com/django/django/language/" "eo/)\n" "MIME-Version: 1.0\n" @@ -31,12 +32,12 @@ msgstr "enhavaj tipoj" #, python-format msgid "Content type %(ct_id)s object has no associated model" -msgstr "Objekto kun enhava tipo %(ct_id)s ne havas asociitaj modeloj" +msgstr "Objekto kun enhava tipo %(ct_id)s ne havas modelojn asociitajn kun ĝi" #, python-format -msgid "Content type %(ct_id)s object %(obj_id)s doesn't exist" +msgid "Content type %(ct_id)s object %(obj_id)s doesn’t exist" msgstr "Objekto %(obj_id)s kun enhava tipo %(ct_id)s ne ekzistas" #, python-format -msgid "%(ct_name)s objects don't have a get_absolute_url() method" -msgstr " %(ct_name)s objektoj ne havas metodon get_absolute_url()" +msgid "%(ct_name)s objects don’t have a get_absolute_url() method" +msgstr "Objektoj %(ct_name)s ne havas metodon get_absolute_url()" diff --git a/django/contrib/flatpages/locale/eo/LC_MESSAGES/django.mo b/django/contrib/flatpages/locale/eo/LC_MESSAGES/django.mo index 19f2a2d62d..16e74c30a1 100644 Binary files a/django/contrib/flatpages/locale/eo/LC_MESSAGES/django.mo and b/django/contrib/flatpages/locale/eo/LC_MESSAGES/django.mo differ diff --git a/django/contrib/flatpages/locale/eo/LC_MESSAGES/django.po b/django/contrib/flatpages/locale/eo/LC_MESSAGES/django.po index 151f971ba4..7a661721a6 100644 --- a/django/contrib/flatpages/locale/eo/LC_MESSAGES/django.po +++ b/django/contrib/flatpages/locale/eo/LC_MESSAGES/django.po @@ -4,14 +4,15 @@ # Batist D 🐍 , 2011-2012 # Batist D 🐍 , 2014-2015,2017,2019 # Matthieu Desplantes , 2021 -# Robin van der Vliet , 2019 +# Meiyer , 2022 +# Robin van der Vliet , 2019 msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2019-09-08 17:27+0200\n" -"PO-Revision-Date: 2021-04-13 08:09+0000\n" -"Last-Translator: Matthieu Desplantes \n" +"PO-Revision-Date: 2022-04-24 19:03+0000\n" +"Last-Translator: Meiyer , 2022\n" "Language-Team: Esperanto (http://www.transifex.com/django/django/language/" "eo/)\n" "MIME-Version: 1.0\n" @@ -39,17 +40,17 @@ msgid "" "This value must contain only letters, numbers, dots, underscores, dashes, " "slashes or tildes." msgstr "" -"Ĉi tiu valoro devus enhavi sole leterojn, nombrojn, punktojn, substrekoj, " -"haltostrekoj, oblikvoj aŭ tildoj." +"Ĉi tiu valoro devus enhavi sole literojn, ciferojn, punktojn, substrekojn, " +"haltostrekojn, oblikvajn strekojn, aŭ tildojn." msgid "Example: “/about/contact”. Make sure to have a leading slash." msgstr "Ekzemple: “/about/contact”. Certigu, ke estas suprenstreko komence." msgid "URL is missing a leading slash." -msgstr "La streka signo \"/\" ne ĉeestas en komenco de ĉeno." +msgstr "La strek-signo ‘/’ ne ĉeestas en la komenco de URL." msgid "URL is missing a trailing slash." -msgstr "La streka signo \"/\" ne ĉeestas en fino de ĉeno." +msgstr "La strek-signo ‘/’ ne ĉeestas en la fino de URL." #, python-format msgid "Flatpage with url %(url)s already exists for site %(site)s" @@ -62,22 +63,25 @@ msgid "content" msgstr "enhavo" msgid "enable comments" -msgstr "ebligu rimarkoj" +msgstr "ebligi rimarkojn" msgid "template name" -msgstr "ŝablono nomo" +msgstr "nomo de ŝablono" msgid "" "Example: “flatpages/contact_page.html”. If this isn’t provided, the system " "will use “flatpages/default.html”." msgstr "" +"Ekzemple: “flatpages/contact_page.html”. Se la ŝablono ne estas indikita, " +"estos uzata “flatpages/default.html”." msgid "registration required" -msgstr "registrado postulita" +msgstr "registriĝo postulita" msgid "If this is checked, only logged-in users will be able to view the page." msgstr "" -"Se ĉi tio estus elektita, nur ensalutitaj uzantoj povus rigardi la paĝon." +"Kiam la marko-butono estas elektita, nur ensalutintaj uzantoj povas rigardi " +"la paĝon." msgid "sites" msgstr "retejoj" diff --git a/django/contrib/gis/db/backends/base/operations.py b/django/contrib/gis/db/backends/base/operations.py index e7bffb11b4..f6eaf8f503 100644 --- a/django/contrib/gis/db/backends/base/operations.py +++ b/django/contrib/gis/db/backends/base/operations.py @@ -48,6 +48,7 @@ class BaseSpatialOperations: "GeoHash", "GeometryDistance", "Intersection", + "IsEmpty", "IsValid", "Length", "LineLocatePoint", diff --git a/django/contrib/gis/db/backends/mysql/operations.py b/django/contrib/gis/db/backends/mysql/operations.py index 6d04874537..46d8fc5d6a 100644 --- a/django/contrib/gis/db/backends/mysql/operations.py +++ b/django/contrib/gis/db/backends/mysql/operations.py @@ -72,6 +72,7 @@ class MySQLOperations(BaseSpatialOperations, DatabaseOperations): "BoundingCircle", "ForcePolygonCW", "GeometryDistance", + "IsEmpty", "LineLocatePoint", "MakeValid", "MemSize", diff --git a/django/contrib/gis/db/backends/oracle/operations.py b/django/contrib/gis/db/backends/oracle/operations.py index ba7e3ca4d8..aefb2f74e2 100644 --- a/django/contrib/gis/db/backends/oracle/operations.py +++ b/django/contrib/gis/db/backends/oracle/operations.py @@ -122,6 +122,7 @@ class OracleOperations(BaseSpatialOperations, DatabaseOperations): "ForcePolygonCW", "GeoHash", "GeometryDistance", + "IsEmpty", "LineLocatePoint", "MakeValid", "MemSize", diff --git a/django/contrib/gis/db/backends/postgis/adapter.py b/django/contrib/gis/db/backends/postgis/adapter.py index 20b0327d5c..c95f903253 100644 --- a/django/contrib/gis/db/backends/postgis/adapter.py +++ b/django/contrib/gis/db/backends/postgis/adapter.py @@ -1,11 +1,9 @@ """ This object provides quoting for GEOS geometries into PostgreSQL/PostGIS. """ -from psycopg2 import Binary -from psycopg2.extensions import ISQLQuote - from django.contrib.gis.db.backends.postgis.pgraster import to_pgraster from django.contrib.gis.geos import GEOSGeometry +from django.db.backends.postgresql.psycopg_any import sql class PostGISAdapter: @@ -19,7 +17,6 @@ class PostGISAdapter: # the adaptor) and the SRID from the geometry or raster. if self.is_geometry: self.ewkb = bytes(obj.ewkb) - self._adapter = Binary(self.ewkb) else: self.ewkb = to_pgraster(obj) @@ -28,6 +25,8 @@ class PostGISAdapter: def __conform__(self, proto): """Does the given protocol conform to what Psycopg2 expects?""" + from psycopg2.extensions import ISQLQuote + if proto == ISQLQuote: return self else: @@ -48,14 +47,6 @@ class PostGISAdapter: def _fix_polygon(cls, poly): return poly - def prepare(self, conn): - """ - This method allows escaping the binary in the style required by the - server's `standard_conforming_string` setting. - """ - if self.is_geometry: - self._adapter.prepare(conn) - def getquoted(self): """ Return a properly quoted string for use in PostgreSQL/PostGIS. @@ -64,8 +55,8 @@ class PostGISAdapter: # Psycopg will figure out whether to use E'\\000' or '\000'. return b"%s(%s)" % ( b"ST_GeogFromWKB" if self.geography else b"ST_GeomFromEWKB", - self._adapter.getquoted(), + sql.quote(self.ewkb).encode(), ) else: # For rasters, add explicit type cast to WKB string. - return b"'%s'::raster" % self.ewkb.encode() + return b"'%s'::raster" % self.ewkb.hex().encode() diff --git a/django/contrib/gis/db/backends/postgis/base.py b/django/contrib/gis/db/backends/postgis/base.py index 98c2813aa2..23ec0553f8 100644 --- a/django/contrib/gis/db/backends/postgis/base.py +++ b/django/contrib/gis/db/backends/postgis/base.py @@ -1,17 +1,93 @@ -from django.db.backends.base.base import NO_DB_ALIAS -from django.db.backends.postgresql.base import ( - DatabaseWrapper as Psycopg2DatabaseWrapper, -) +from functools import lru_cache +from django.db.backends.base.base import NO_DB_ALIAS +from django.db.backends.postgresql.base import DatabaseWrapper as PsycopgDatabaseWrapper +from django.db.backends.postgresql.psycopg_any import is_psycopg3 + +from .adapter import PostGISAdapter from .features import DatabaseFeatures from .introspection import PostGISIntrospection from .operations import PostGISOperations from .schema import PostGISSchemaEditor +if is_psycopg3: + from psycopg.adapt import Dumper + from psycopg.pq import Format + from psycopg.types import TypeInfo + from psycopg.types.string import TextBinaryLoader, TextLoader -class DatabaseWrapper(Psycopg2DatabaseWrapper): + class GeometryType: + pass + + class GeographyType: + pass + + class RasterType: + pass + + class BaseTextDumper(Dumper): + def dump(self, obj): + # Return bytes as hex for text formatting + return obj.ewkb.hex().encode() + + class BaseBinaryDumper(Dumper): + format = Format.BINARY + + def dump(self, obj): + return obj.ewkb + + @lru_cache + def postgis_adapters(geo_oid, geog_oid, raster_oid): + class BaseDumper(Dumper): + def __init_subclass__(cls, base_dumper): + super().__init_subclass__() + + cls.GeometryDumper = type( + "GeometryDumper", (base_dumper,), {"oid": geo_oid} + ) + cls.GeographyDumper = type( + "GeographyDumper", (base_dumper,), {"oid": geog_oid} + ) + cls.RasterDumper = type( + "RasterDumper", (BaseTextDumper,), {"oid": raster_oid} + ) + + def get_key(self, obj, format): + if obj.is_geometry: + return GeographyType if obj.geography else GeometryType + else: + return RasterType + + def upgrade(self, obj, format): + if obj.is_geometry: + if obj.geography: + return self.GeographyDumper(GeographyType) + else: + return self.GeometryDumper(GeometryType) + else: + return self.RasterDumper(RasterType) + + def dump(self, obj): + raise NotImplementedError + + class PostGISTextDumper(BaseDumper, base_dumper=BaseTextDumper): + pass + + class PostGISBinaryDumper(BaseDumper, base_dumper=BaseBinaryDumper): + format = Format.BINARY + + return PostGISTextDumper, PostGISBinaryDumper + + +class DatabaseWrapper(PsycopgDatabaseWrapper): SchemaEditorClass = PostGISSchemaEditor + _type_infos = { + "geometry": {}, + "geography": {}, + "raster": {}, + } + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) if kwargs.get("alias", "") != NO_DB_ALIAS: @@ -27,3 +103,45 @@ class DatabaseWrapper(Psycopg2DatabaseWrapper): if bool(cursor.fetchone()): return cursor.execute("CREATE EXTENSION IF NOT EXISTS postgis") + if is_psycopg3: + # Ensure adapters are registers if PostGIS is used within this + # connection. + self.register_geometry_adapters(self.connection, True) + + def get_new_connection(self, conn_params): + connection = super().get_new_connection(conn_params) + if is_psycopg3: + self.register_geometry_adapters(connection) + return connection + + if is_psycopg3: + + def _register_type(self, pg_connection, typename): + registry = self._type_infos[typename] + try: + info = registry[self.alias] + except KeyError: + info = TypeInfo.fetch(pg_connection, typename) + registry[self.alias] = info + + if info: # Can be None if the type does not exist (yet). + info.register(pg_connection) + pg_connection.adapters.register_loader(info.oid, TextLoader) + pg_connection.adapters.register_loader(info.oid, TextBinaryLoader) + + return info.oid if info else None + + def register_geometry_adapters(self, pg_connection, clear_caches=False): + if clear_caches: + for typename in self._type_infos: + self._type_infos[typename].pop(self.alias, None) + + geo_oid = self._register_type(pg_connection, "geometry") + geog_oid = self._register_type(pg_connection, "geography") + raster_oid = self._register_type(pg_connection, "raster") + + PostGISTextDumper, PostGISBinaryDumper = postgis_adapters( + geo_oid, geog_oid, raster_oid + ) + pg_connection.adapters.register_dumper(PostGISAdapter, PostGISTextDumper) + pg_connection.adapters.register_dumper(PostGISAdapter, PostGISBinaryDumper) diff --git a/django/contrib/gis/db/backends/postgis/features.py b/django/contrib/gis/db/backends/postgis/features.py index 29a1079631..d96e939db3 100644 --- a/django/contrib/gis/db/backends/postgis/features.py +++ b/django/contrib/gis/db/backends/postgis/features.py @@ -1,10 +1,10 @@ from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures from django.db.backends.postgresql.features import ( - DatabaseFeatures as Psycopg2DatabaseFeatures, + DatabaseFeatures as PsycopgDatabaseFeatures, ) -class DatabaseFeatures(BaseSpatialFeatures, Psycopg2DatabaseFeatures): +class DatabaseFeatures(BaseSpatialFeatures, PsycopgDatabaseFeatures): supports_geography = True supports_3d_storage = True supports_3d_functions = True diff --git a/django/contrib/gis/db/backends/postgis/operations.py b/django/contrib/gis/db/backends/postgis/operations.py index 31ad31e2e5..070f670a0b 100644 --- a/django/contrib/gis/db/backends/postgis/operations.py +++ b/django/contrib/gis/db/backends/postgis/operations.py @@ -11,6 +11,7 @@ from django.contrib.gis.measure import Distance from django.core.exceptions import ImproperlyConfigured from django.db import NotSupportedError, ProgrammingError from django.db.backends.postgresql.operations import DatabaseOperations +from django.db.backends.postgresql.psycopg_any import is_psycopg3 from django.db.models import Func, Value from django.utils.functional import cached_property from django.utils.version import get_version_tuple @@ -161,7 +162,8 @@ class PostGISOperations(BaseSpatialOperations, DatabaseOperations): unsupported_functions = set() - select = "%s::bytea" + select = "%s" if is_psycopg3 else "%s::bytea" + select_extent = None @cached_property @@ -407,6 +409,8 @@ class PostGISOperations(BaseSpatialOperations, DatabaseOperations): geom_class = expression.output_field.geom_class def converter(value, expression, connection): + if isinstance(value, str): # Coming from hex strings. + value = value.encode("ascii") return None if value is None else GEOSGeometryBase(read(value), geom_class) return converter diff --git a/django/contrib/gis/db/backends/postgis/pgraster.py b/django/contrib/gis/db/backends/postgis/pgraster.py index 22794342ca..c231b4863f 100644 --- a/django/contrib/gis/db/backends/postgis/pgraster.py +++ b/django/contrib/gis/db/backends/postgis/pgraster.py @@ -149,5 +149,4 @@ def to_pgraster(rast): # Add packed header and band data to result result += bandheader + band.data(as_memoryview=True) - # Convert raster to hex string before passing it to the DB. - return result.hex() + return result diff --git a/django/contrib/gis/db/backends/postgis/schema.py b/django/contrib/gis/db/backends/postgis/schema.py index 77a9096ef4..5464c85cf6 100644 --- a/django/contrib/gis/db/backends/postgis/schema.py +++ b/django/contrib/gis/db/backends/postgis/schema.py @@ -50,12 +50,16 @@ class PostGISSchemaEditor(DatabaseSchemaEditor): expressions=expressions, ) - def _alter_column_type_sql(self, table, old_field, new_field, new_type): + def _alter_column_type_sql( + self, table, old_field, new_field, new_type, old_collation, new_collation + ): """ Special case when dimension changed. """ if not hasattr(old_field, "dim") or not hasattr(new_field, "dim"): - return super()._alter_column_type_sql(table, old_field, new_field, new_type) + return super()._alter_column_type_sql( + table, old_field, new_field, new_type, old_collation, new_collation + ) if old_field.dim == 2 and new_field.dim == 3: sql_alter = self.sql_alter_column_to_3d @@ -69,6 +73,7 @@ class PostGISSchemaEditor(DatabaseSchemaEditor): % { "column": self.quote_name(new_field.column), "type": new_type, + "collation": "", }, [], ), diff --git a/django/contrib/gis/db/backends/spatialite/operations.py b/django/contrib/gis/db/backends/spatialite/operations.py index 8003fcb6c6..47f3d4ca75 100644 --- a/django/contrib/gis/db/backends/spatialite/operations.py +++ b/django/contrib/gis/db/backends/spatialite/operations.py @@ -78,7 +78,7 @@ class SpatiaLiteOperations(BaseSpatialOperations, DatabaseOperations): @cached_property def unsupported_functions(self): - unsupported = {"BoundingCircle", "GeometryDistance", "MemSize"} + unsupported = {"BoundingCircle", "GeometryDistance", "IsEmpty", "MemSize"} if not self.geom_lib_version(): unsupported |= {"Azimuth", "GeoHash", "MakeValid"} return unsupported diff --git a/django/contrib/gis/db/models/functions.py b/django/contrib/gis/db/models/functions.py index 5f6c7b5cfd..f97c540a1a 100644 --- a/django/contrib/gis/db/models/functions.py +++ b/django/contrib/gis/db/models/functions.py @@ -381,6 +381,12 @@ class Intersection(OracleToleranceMixin, GeomOutputGeoFunc): geom_param_pos = (0, 1) +@BaseSpatialField.register_lookup +class IsEmpty(GeoFuncMixin, Transform): + lookup_name = "isempty" + output_field = BooleanField() + + @BaseSpatialField.register_lookup class IsValid(OracleToleranceMixin, GeoFuncMixin, Transform): lookup_name = "isvalid" diff --git a/django/contrib/gis/forms/widgets.py b/django/contrib/gis/forms/widgets.py index 5f169e9cb3..49ca48794b 100644 --- a/django/contrib/gis/forms/widgets.py +++ b/django/contrib/gis/forms/widgets.py @@ -102,12 +102,12 @@ class OpenLayersWidget(BaseGeometryWidget): class Media: css = { "all": ( - "https://cdnjs.cloudflare.com/ajax/libs/ol3/4.6.5/ol.css", + "https://cdn.jsdelivr.net/npm/ol@v7.2.2/ol.css", "gis/css/ol3.css", ) } js = ( - "https://cdnjs.cloudflare.com/ajax/libs/ol3/4.6.5/ol.js", + "https://cdn.jsdelivr.net/npm/ol@v7.2.2/dist/ol.js", "gis/js/OLMapWidget.js", ) diff --git a/django/contrib/gis/gdal/libgdal.py b/django/contrib/gis/gdal/libgdal.py index 15639d1d83..225b4ff20d 100644 --- a/django/contrib/gis/gdal/libgdal.py +++ b/django/contrib/gis/gdal/libgdal.py @@ -22,6 +22,7 @@ if lib_path: elif os.name == "nt": # Windows NT shared libraries lib_names = [ + "gdal306", "gdal305", "gdal304", "gdal303", @@ -37,6 +38,7 @@ elif os.name == "posix": lib_names = [ "gdal", "GDAL", + "gdal3.6.0", "gdal3.5.0", "gdal3.4.0", "gdal3.3.0", diff --git a/django/contrib/gis/static/gis/js/OLMapWidget.js b/django/contrib/gis/static/gis/js/OLMapWidget.js index fe944a4a87..b750327409 100644 --- a/django/contrib/gis/static/gis/js/OLMapWidget.js +++ b/django/contrib/gis/static/gis/js/OLMapWidget.js @@ -1,39 +1,40 @@ /* global ol */ 'use strict'; -function GeometryTypeControl(opt_options) { +class GeometryTypeControl extends ol.control.Control { // Map control to switch type when geometry type is unknown - const options = opt_options || {}; + constructor(opt_options) { + const options = opt_options || {}; - const element = document.createElement('div'); - element.className = 'switch-type type-' + options.type + ' ol-control ol-unselectable'; - if (options.active) { - element.classList.add("type-active"); - } - - const self = this; - const switchType = function(e) { - e.preventDefault(); - if (options.widget.currentGeometryType !== self) { - options.widget.map.removeInteraction(options.widget.interactions.draw); - options.widget.interactions.draw = new ol.interaction.Draw({ - features: options.widget.featureCollection, - type: options.type - }); - options.widget.map.addInteraction(options.widget.interactions.draw); - options.widget.currentGeometryType.element.classList.remove('type-active'); - options.widget.currentGeometryType = self; + const element = document.createElement('div'); + element.className = 'switch-type type-' + options.type + ' ol-control ol-unselectable'; + if (options.active) { element.classList.add("type-active"); } - }; - element.addEventListener('click', switchType, false); - element.addEventListener('touchstart', switchType, false); + super({ + element: element, + target: options.target + }); + const self = this; + const switchType = function(e) { + e.preventDefault(); + if (options.widget.currentGeometryType !== self) { + options.widget.map.removeInteraction(options.widget.interactions.draw); + options.widget.interactions.draw = new ol.interaction.Draw({ + features: options.widget.featureCollection, + type: options.type + }); + options.widget.map.addInteraction(options.widget.interactions.draw); + options.widget.currentGeometryType.element.classList.remove('type-active'); + options.widget.currentGeometryType = self; + element.classList.add("type-active"); + } + }; - ol.control.Control.call(this, { - element: element - }); -}; -ol.inherits(GeometryTypeControl, ol.control.Control); + element.addEventListener('click', switchType, false); + element.addEventListener('touchstart', switchType, false); + } +} // TODO: allow deleting individual features (#8972) class MapWidget { diff --git a/django/contrib/humanize/locale/es/LC_MESSAGES/django.mo b/django/contrib/humanize/locale/es/LC_MESSAGES/django.mo index 36238b3a4e..8946e3a017 100644 Binary files a/django/contrib/humanize/locale/es/LC_MESSAGES/django.mo and b/django/contrib/humanize/locale/es/LC_MESSAGES/django.mo differ diff --git a/django/contrib/humanize/locale/es/LC_MESSAGES/django.po b/django/contrib/humanize/locale/es/LC_MESSAGES/django.po index 8010bf220e..b4faf1842e 100644 --- a/django/contrib/humanize/locale/es/LC_MESSAGES/django.po +++ b/django/contrib/humanize/locale/es/LC_MESSAGES/django.po @@ -15,8 +15,8 @@ msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2021-04-07 14:40+0200\n" -"PO-Revision-Date: 2021-11-10 04:02+0000\n" -"Last-Translator: Uriel Medina \n" +"PO-Revision-Date: 2022-04-24 18:40+0000\n" +"Last-Translator: Uriel Medina , 2020-2021\n" "Language-Team: Spanish (http://www.transifex.com/django/django/language/" "es/)\n" "MIME-Version: 1.0\n" @@ -88,66 +88,77 @@ msgid "%(value)s million" msgid_plural "%(value)s million" msgstr[0] "%(value)s millon" msgstr[1] "%(value)s millones" +msgstr[2] "%(value)s millones" #, python-format msgid "%(value)s billion" msgid_plural "%(value)s billion" msgstr[0] "%(value)s millardo" msgstr[1] "%(value)s millardos" +msgstr[2] "%(value)s millardos" #, python-format msgid "%(value)s trillion" msgid_plural "%(value)s trillion" msgstr[0] "%(value)s billón" msgstr[1] "%(value)s billones" +msgstr[2] "%(value)s billones" #, python-format msgid "%(value)s quadrillion" msgid_plural "%(value)s quadrillion" msgstr[0] "%(value)s billardos" msgstr[1] "%(value)s billardos" +msgstr[2] "%(value)s billardos" #, python-format msgid "%(value)s quintillion" msgid_plural "%(value)s quintillion" msgstr[0] "%(value)s trillón" msgstr[1] "%(value)s trillones" +msgstr[2] "%(value)s trillones" #, python-format msgid "%(value)s sextillion" msgid_plural "%(value)s sextillion" msgstr[0] "%(value)s trillardo" msgstr[1] "%(value)s trillardos" +msgstr[2] "%(value)s trillardos" #, python-format msgid "%(value)s septillion" msgid_plural "%(value)s septillion" msgstr[0] "%(value)s cuatrillón" msgstr[1] "%(value)s cuatrillones" +msgstr[2] "%(value)s cuatrillones" #, python-format msgid "%(value)s octillion" msgid_plural "%(value)s octillion" msgstr[0] "%(value)s cuatrillardo" msgstr[1] "%(value)s cuatrillardos" +msgstr[2] "%(value)s cuatrillardos" #, python-format msgid "%(value)s nonillion" msgid_plural "%(value)s nonillion" msgstr[0] "%(value)s quintillón" msgstr[1] "%(value)s quintillones" +msgstr[2] "%(value)s quintillones" #, python-format msgid "%(value)s decillion" msgid_plural "%(value)s decillion" msgstr[0] "%(value)s quintillardo" msgstr[1] "%(value)s quintillardos" +msgstr[2] "%(value)s quintillardos" #, python-format msgid "%(value)s googol" msgid_plural "%(value)s googol" msgstr[0] " %(value)s googol" msgstr[1] " %(value)s gúgoles" +msgstr[2] " %(value)s gúgoles" msgid "one" msgstr "uno" @@ -198,6 +209,7 @@ msgid "an hour ago" msgid_plural "%(count)s hours ago" msgstr[0] "hace una hora" msgstr[1] "hace %(count)s horas" +msgstr[2] "hace %(count)s horas" #. Translators: please keep a non-breaking space (U+00A0) between count #. and time unit. @@ -206,6 +218,7 @@ msgid "a minute ago" msgid_plural "%(count)s minutes ago" msgstr[0] "hace un minuto" msgstr[1] "hace %(count)s minutos" +msgstr[2] "hace %(count)s minutos" #. Translators: please keep a non-breaking space (U+00A0) between count #. and time unit. @@ -214,6 +227,7 @@ msgid "a second ago" msgid_plural "%(count)s seconds ago" msgstr[0] "hace un segundo" msgstr[1] "hace %(count)s segundos" +msgstr[2] "hace %(count)s segundos" msgid "now" msgstr "ahora" @@ -225,6 +239,7 @@ msgid "a second from now" msgid_plural "%(count)s seconds from now" msgstr[0] "un segundo a partir de ahora" msgstr[1] "%(count)s segundos a partir de ahora" +msgstr[2] "%(count)s segundos a partir de ahora" #. Translators: please keep a non-breaking space (U+00A0) between count #. and time unit. @@ -233,6 +248,7 @@ msgid "a minute from now" msgid_plural "%(count)s minutes from now" msgstr[0] "un minuto a partir de ahora" msgstr[1] "%(count)s minutos a partir de ahora" +msgstr[2] "%(count)s minutos a partir de ahora" #. Translators: please keep a non-breaking space (U+00A0) between count #. and time unit. @@ -241,6 +257,7 @@ msgid "an hour from now" msgid_plural "%(count)s hours from now" msgstr[0] "una hora a partir de ahora" msgstr[1] "%(count)s horas a partir de ahora" +msgstr[2] "%(count)s horas a partir de ahora" #. Translators: delta will contain a string like '2 months' or '1 month, 2 #. weeks' @@ -255,6 +272,7 @@ msgid "%(num)d year" msgid_plural "%(num)d years" msgstr[0] "%(num)d año" msgstr[1] "%(num)d años" +msgstr[2] "%(num)d años" #, python-format msgctxt "naturaltime-past" @@ -262,6 +280,7 @@ msgid "%(num)d month" msgid_plural "%(num)d months" msgstr[0] "%(num)d mes" msgstr[1] "%(num)d meses" +msgstr[2] "%(num)d meses" #, python-format msgctxt "naturaltime-past" @@ -269,6 +288,7 @@ msgid "%(num)d week" msgid_plural "%(num)d weeks" msgstr[0] "%(num)d semana" msgstr[1] "%(num)d semanas" +msgstr[2] "%(num)d semanas" #, python-format msgctxt "naturaltime-past" @@ -276,6 +296,7 @@ msgid "%(num)d day" msgid_plural "%(num)d days" msgstr[0] "%(num)d día" msgstr[1] "%(num)d días" +msgstr[2] "%(num)d días" #, python-format msgctxt "naturaltime-past" @@ -283,6 +304,7 @@ msgid "%(num)d hour" msgid_plural "%(num)d hours" msgstr[0] "%(num)d hora" msgstr[1] "%(num)d horas" +msgstr[2] "%(num)d horas" #, python-format msgctxt "naturaltime-past" @@ -290,6 +312,7 @@ msgid "%(num)d minute" msgid_plural "%(num)d minutes" msgstr[0] "%(num)d minuto" msgstr[1] "%(num)d minutos" +msgstr[2] "%(num)d minutos" #. Translators: 'naturaltime-future' strings will be included in '%(delta)s #. from now' @@ -299,6 +322,7 @@ msgid "%(num)d year" msgid_plural "%(num)d years" msgstr[0] "%(num)d año" msgstr[1] "%(num)d años" +msgstr[2] "%(num)d años" #, python-format msgctxt "naturaltime-future" @@ -306,6 +330,7 @@ msgid "%(num)d month" msgid_plural "%(num)d months" msgstr[0] "%(num)d mes" msgstr[1] "%(num)d meses" +msgstr[2] "%(num)d meses" #, python-format msgctxt "naturaltime-future" @@ -313,6 +338,7 @@ msgid "%(num)d week" msgid_plural "%(num)d weeks" msgstr[0] "%(num)d semana" msgstr[1] "%(num)d semanas" +msgstr[2] "%(num)d semanas" #, python-format msgctxt "naturaltime-future" @@ -320,6 +346,7 @@ msgid "%(num)d day" msgid_plural "%(num)d days" msgstr[0] "%(num)d día" msgstr[1] "%(num)d días" +msgstr[2] "%(num)d días" #, python-format msgctxt "naturaltime-future" @@ -327,6 +354,7 @@ msgid "%(num)d hour" msgid_plural "%(num)d hours" msgstr[0] "%(num)d hora" msgstr[1] "%(num)d horas" +msgstr[2] "%(num)d horas" #, python-format msgctxt "naturaltime-future" @@ -334,3 +362,4 @@ msgid "%(num)d minute" msgid_plural "%(num)d minutes" msgstr[0] "%(num)d minuto" msgstr[1] "%(num)d minutos" +msgstr[2] "%(num)d minutos" diff --git a/django/contrib/humanize/locale/pt_BR/LC_MESSAGES/django.mo b/django/contrib/humanize/locale/pt_BR/LC_MESSAGES/django.mo index fc6a798e04..032187d81c 100644 Binary files a/django/contrib/humanize/locale/pt_BR/LC_MESSAGES/django.mo and b/django/contrib/humanize/locale/pt_BR/LC_MESSAGES/django.mo differ diff --git a/django/contrib/humanize/locale/pt_BR/LC_MESSAGES/django.po b/django/contrib/humanize/locale/pt_BR/LC_MESSAGES/django.po index 2d2530e0f4..5c178dd379 100644 --- a/django/contrib/humanize/locale/pt_BR/LC_MESSAGES/django.po +++ b/django/contrib/humanize/locale/pt_BR/LC_MESSAGES/django.po @@ -14,8 +14,8 @@ msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2021-04-07 14:40+0200\n" -"PO-Revision-Date: 2022-07-24 18:40+0000\n" -"Last-Translator: Rafael Fontenelle \n" +"PO-Revision-Date: 2022-04-24 18:40+0000\n" +"Last-Translator: Rafael Fontenelle , 2022\n" "Language-Team: Portuguese (Brazil) (http://www.transifex.com/django/django/" "language/pt_BR/)\n" "MIME-Version: 1.0\n" @@ -87,66 +87,77 @@ msgid "%(value)s million" msgid_plural "%(value)s million" msgstr[0] "%(value)s milhão" msgstr[1] "%(value)s milhões" +msgstr[2] "%(value)s milhões" #, python-format msgid "%(value)s billion" msgid_plural "%(value)s billion" msgstr[0] "%(value)s bilhão" msgstr[1] "%(value)s bilhões" +msgstr[2] "%(value)s bilhões" #, python-format msgid "%(value)s trillion" msgid_plural "%(value)s trillion" msgstr[0] "%(value)s trilhão" msgstr[1] "%(value)s trilhões" +msgstr[2] "%(value)s trilhões" #, python-format msgid "%(value)s quadrillion" msgid_plural "%(value)s quadrillion" msgstr[0] "%(value)s quadrilhão" msgstr[1] "%(value)s quadrilhões" +msgstr[2] "%(value)s quadrilhões" #, python-format msgid "%(value)s quintillion" msgid_plural "%(value)s quintillion" msgstr[0] "%(value)s quintilhão" msgstr[1] "%(value)s quintilhões" +msgstr[2] "%(value)s quintilhões" #, python-format msgid "%(value)s sextillion" msgid_plural "%(value)s sextillion" msgstr[0] "%(value)s sextilhão" msgstr[1] "%(value)s sextilhões" +msgstr[2] "%(value)s sextilhões" #, python-format msgid "%(value)s septillion" msgid_plural "%(value)s septillion" msgstr[0] "%(value)s septilhão" msgstr[1] "%(value)s septilhões" +msgstr[2] "%(value)s septilhões" #, python-format msgid "%(value)s octillion" msgid_plural "%(value)s octillion" msgstr[0] "%(value)s octilhão" msgstr[1] "%(value)s octilhões" +msgstr[2] "%(value)s octilhões" #, python-format msgid "%(value)s nonillion" msgid_plural "%(value)s nonillion" msgstr[0] "%(value)s nonilhão" msgstr[1] "%(value)s nonilhões" +msgstr[2] "%(value)s nonilhões" #, python-format msgid "%(value)s decillion" msgid_plural "%(value)s decillion" msgstr[0] "%(value)s decilhão" msgstr[1] "%(value)s decilhões" +msgstr[2] "%(value)s decilhões" #, python-format msgid "%(value)s googol" msgid_plural "%(value)s googol" msgstr[0] "%(value)s googol" msgstr[1] "%(value)s googol" +msgstr[2] "%(value)s googol" msgid "one" msgstr "um" @@ -197,6 +208,7 @@ msgid "an hour ago" msgid_plural "%(count)s hours ago" msgstr[0] "uma hora atrás" msgstr[1] "%(count)s horas atrás" +msgstr[2] "%(count)s horas atrás" #. Translators: please keep a non-breaking space (U+00A0) between count #. and time unit. @@ -205,6 +217,7 @@ msgid "a minute ago" msgid_plural "%(count)s minutes ago" msgstr[0] "um minuto atrás" msgstr[1] "%(count)s minutos atrás" +msgstr[2] "%(count)s minutos atrás" #. Translators: please keep a non-breaking space (U+00A0) between count #. and time unit. @@ -213,6 +226,7 @@ msgid "a second ago" msgid_plural "%(count)s seconds ago" msgstr[0] "um segundo atrás" msgstr[1] "%(count)s segundos atrás" +msgstr[2] "%(count)s segundos atrás" msgid "now" msgstr "agora" @@ -224,6 +238,7 @@ msgid "a second from now" msgid_plural "%(count)s seconds from now" msgstr[0] "um segundo a partir de agora" msgstr[1] "%(count)s segundos a partir de agora" +msgstr[2] "%(count)s segundos a partir de agora" #. Translators: please keep a non-breaking space (U+00A0) between count #. and time unit. @@ -232,6 +247,7 @@ msgid "a minute from now" msgid_plural "%(count)s minutes from now" msgstr[0] "um minuto a partir de agora" msgstr[1] "%(count)s minutos a partir de agora" +msgstr[2] "%(count)s minutos a partir de agora" #. Translators: please keep a non-breaking space (U+00A0) between count #. and time unit. @@ -240,6 +256,7 @@ msgid "an hour from now" msgid_plural "%(count)s hours from now" msgstr[0] "uma hora a partir de agora" msgstr[1] "%(count)s horas a partir de agora" +msgstr[2] "%(count)s horas a partir de agora" #. Translators: delta will contain a string like '2 months' or '1 month, 2 #. weeks' @@ -254,6 +271,7 @@ msgid "%(num)d year" msgid_plural "%(num)d years" msgstr[0] "%(num)d ano" msgstr[1] "%(num)d anos" +msgstr[2] "%(num)d anos" #, python-format msgctxt "naturaltime-past" @@ -261,6 +279,7 @@ msgid "%(num)d month" msgid_plural "%(num)d months" msgstr[0] "%(num)d mês" msgstr[1] "%(num)d meses" +msgstr[2] "%(num)d meses" #, python-format msgctxt "naturaltime-past" @@ -268,6 +287,7 @@ msgid "%(num)d week" msgid_plural "%(num)d weeks" msgstr[0] "%(num)d semana" msgstr[1] "%(num)d semanas" +msgstr[2] "%(num)d semanas" #, python-format msgctxt "naturaltime-past" @@ -275,6 +295,7 @@ msgid "%(num)d day" msgid_plural "%(num)d days" msgstr[0] "%(num)d dia" msgstr[1] "%(num)d dias" +msgstr[2] "%(num)d dias" #, python-format msgctxt "naturaltime-past" @@ -282,6 +303,7 @@ msgid "%(num)d hour" msgid_plural "%(num)d hours" msgstr[0] "%(num)d hora" msgstr[1] "%(num)d horas" +msgstr[2] "%(num)d horas" #, python-format msgctxt "naturaltime-past" @@ -289,6 +311,7 @@ msgid "%(num)d minute" msgid_plural "%(num)d minutes" msgstr[0] "%(num)d minuto" msgstr[1] "%(num)d minutos" +msgstr[2] "%(num)d minutos" #. Translators: 'naturaltime-future' strings will be included in '%(delta)s #. from now' @@ -298,6 +321,7 @@ msgid "%(num)d year" msgid_plural "%(num)d years" msgstr[0] "%(num)d ano" msgstr[1] "%(num)d anos" +msgstr[2] "%(num)d anos" #, python-format msgctxt "naturaltime-future" @@ -305,6 +329,7 @@ msgid "%(num)d month" msgid_plural "%(num)d months" msgstr[0] "%(num)d mês" msgstr[1] "%(num)d mses" +msgstr[2] "%(num)d mses" #, python-format msgctxt "naturaltime-future" @@ -312,6 +337,7 @@ msgid "%(num)d week" msgid_plural "%(num)d weeks" msgstr[0] "%(num)d semana" msgstr[1] "%(num)d semanas" +msgstr[2] "%(num)d semanas" #, python-format msgctxt "naturaltime-future" @@ -319,6 +345,7 @@ msgid "%(num)d day" msgid_plural "%(num)d days" msgstr[0] "%(num)d dia" msgstr[1] "%(num)d dias" +msgstr[2] "%(num)d dias" #, python-format msgctxt "naturaltime-future" @@ -326,6 +353,7 @@ msgid "%(num)d hour" msgid_plural "%(num)d hours" msgstr[0] "%(num)d hora" msgstr[1] "%(num)d horas" +msgstr[2] "%(num)d horas" #, python-format msgctxt "naturaltime-future" @@ -333,3 +361,4 @@ msgid "%(num)d minute" msgid_plural "%(num)d minutes" msgstr[0] "%(num)d minuto" msgstr[1] "%(num)d minutos" +msgstr[2] "%(num)d minutos" diff --git a/django/contrib/humanize/locale/ru/LC_MESSAGES/django.mo b/django/contrib/humanize/locale/ru/LC_MESSAGES/django.mo index 7b0821932f..edd7505418 100644 Binary files a/django/contrib/humanize/locale/ru/LC_MESSAGES/django.mo and b/django/contrib/humanize/locale/ru/LC_MESSAGES/django.mo differ diff --git a/django/contrib/humanize/locale/ru/LC_MESSAGES/django.po b/django/contrib/humanize/locale/ru/LC_MESSAGES/django.po index a5f53197f9..51839a336f 100644 --- a/django/contrib/humanize/locale/ru/LC_MESSAGES/django.po +++ b/django/contrib/humanize/locale/ru/LC_MESSAGES/django.po @@ -8,24 +8,24 @@ # Jannis Leidel , 2011 # Mingun , 2014 # SeryiMysh , 2018 -# Алексей Борискин , 2012,2014 -# Дмитрий Шатера , 2018 +# Алексей Борискин , 2012,2014,2022 +# Bobsans , 2018 msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2019-01-16 20:42+0100\n" -"PO-Revision-Date: 2018-11-05 07:07+0000\n" -"Last-Translator: SeryiMysh \n" +"POT-Creation-Date: 2021-04-07 14:40+0200\n" +"PO-Revision-Date: 2022-04-24 18:40+0000\n" +"Last-Translator: Алексей Борискин , 2012,2014,2022\n" "Language-Team: Russian (http://www.transifex.com/django/django/language/" "ru/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Language: ru\n" -"Plural-Forms: nplurals=4; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n" -"%10<=4 && (n%100<12 || n%100>14) ? 1 : n%10==0 || (n%10>=5 && n%10<=9) || (n" -"%100>=11 && n%100<=14)? 2 : 3);\n" +"Plural-Forms: nplurals=4; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && " +"n%10<=4 && (n%100<12 || n%100>14) ? 1 : n%10==0 || (n%10>=5 && n%10<=9) || " +"(n%100>=11 && n%100<=14)? 2 : 3);\n" msgid "Humanize" msgstr "Приведение значений к виду, понятному человеку" @@ -85,14 +85,6 @@ msgctxt "ordinal 9" msgid "{}th" msgstr "{}-й" -#, python-format -msgid "%(value).1f million" -msgid_plural "%(value).1f million" -msgstr[0] "%(value).1f миллион" -msgstr[1] "%(value).1f миллиона" -msgstr[2] "%(value).1f миллионов" -msgstr[3] "%(value).1f миллионов" - #, python-format msgid "%(value)s million" msgid_plural "%(value)s million" @@ -101,14 +93,6 @@ msgstr[1] "%(value)s миллиона" msgstr[2] "%(value)s миллионов" msgstr[3] "%(value)s миллионов" -#, python-format -msgid "%(value).1f billion" -msgid_plural "%(value).1f billion" -msgstr[0] "%(value).1f миллиард" -msgstr[1] "%(value).1f миллиарда" -msgstr[2] "%(value).1f миллиардов" -msgstr[3] "%(value).1f миллиардов" - #, python-format msgid "%(value)s billion" msgid_plural "%(value)s billion" @@ -117,14 +101,6 @@ msgstr[1] "%(value)s миллиарда" msgstr[2] "%(value)s миллиардов" msgstr[3] "%(value)s миллиардов" -#, python-format -msgid "%(value).1f trillion" -msgid_plural "%(value).1f trillion" -msgstr[0] "%(value).1f триллион" -msgstr[1] "%(value).1f триллиона" -msgstr[2] "%(value).1f триллионов" -msgstr[3] "%(value).1f триллионов" - #, python-format msgid "%(value)s trillion" msgid_plural "%(value)s trillion" @@ -133,14 +109,6 @@ msgstr[1] "%(value)s триллиона" msgstr[2] "%(value)s триллионов" msgstr[3] "%(value)s триллионов" -#, python-format -msgid "%(value).1f quadrillion" -msgid_plural "%(value).1f quadrillion" -msgstr[0] "%(value).1f квадриллион" -msgstr[1] "%(value).1f квадриллиона" -msgstr[2] "%(value).1f квадриллионов" -msgstr[3] "%(value).1f квадриллионов" - #, python-format msgid "%(value)s quadrillion" msgid_plural "%(value)s quadrillion" @@ -149,14 +117,6 @@ msgstr[1] "%(value)s квадриллиона" msgstr[2] "%(value)s квадриллионов" msgstr[3] "%(value)s квадриллионов" -#, python-format -msgid "%(value).1f quintillion" -msgid_plural "%(value).1f quintillion" -msgstr[0] "%(value).1f квинтиллион" -msgstr[1] "%(value).1f квинтиллиона" -msgstr[2] "%(value).1f квинтиллионов" -msgstr[3] "%(value).1f квинтиллионов" - #, python-format msgid "%(value)s quintillion" msgid_plural "%(value)s quintillion" @@ -165,14 +125,6 @@ msgstr[1] "%(value)s квинтиллиона" msgstr[2] "%(value)s квинтиллионов" msgstr[3] "%(value)s квинтиллионов" -#, python-format -msgid "%(value).1f sextillion" -msgid_plural "%(value).1f sextillion" -msgstr[0] "%(value).1f секстиллион" -msgstr[1] "%(value).1f секстиллиона" -msgstr[2] "%(value).1f секстиллионов" -msgstr[3] "%(value).1f секстиллионов" - #, python-format msgid "%(value)s sextillion" msgid_plural "%(value)s sextillion" @@ -181,14 +133,6 @@ msgstr[1] "%(value)s секстиллиона" msgstr[2] "%(value)s секстиллионов" msgstr[3] "%(value)s секстиллионов" -#, python-format -msgid "%(value).1f septillion" -msgid_plural "%(value).1f septillion" -msgstr[0] "%(value).1f септиллион" -msgstr[1] "%(value).1f септиллиона" -msgstr[2] "%(value).1f септиллионов" -msgstr[3] "%(value).1f септиллионов" - #, python-format msgid "%(value)s septillion" msgid_plural "%(value)s septillion" @@ -197,14 +141,6 @@ msgstr[1] "%(value)s септиллиона" msgstr[2] "%(value)s септиллионов" msgstr[3] "%(value)s септиллионов" -#, python-format -msgid "%(value).1f octillion" -msgid_plural "%(value).1f octillion" -msgstr[0] "%(value).1f октиллион" -msgstr[1] "%(value).1f октиллиона" -msgstr[2] "%(value).1f октиллионов" -msgstr[3] "%(value).1f октиллионов" - #, python-format msgid "%(value)s octillion" msgid_plural "%(value)s octillion" @@ -213,14 +149,6 @@ msgstr[1] "%(value)s октиллиона" msgstr[2] "%(value)s октиллионов" msgstr[3] "%(value)s октиллионов" -#, python-format -msgid "%(value).1f nonillion" -msgid_plural "%(value).1f nonillion" -msgstr[0] "%(value).1f нониллион" -msgstr[1] "%(value).1f нониллиона" -msgstr[2] "%(value).1f нониллионов" -msgstr[3] "%(value).1f нониллионов" - #, python-format msgid "%(value)s nonillion" msgid_plural "%(value)s nonillion" @@ -229,14 +157,6 @@ msgstr[1] "%(value)s нониллиона" msgstr[2] "%(value)s нониллионов" msgstr[3] "%(value)s нониллионов" -#, python-format -msgid "%(value).1f decillion" -msgid_plural "%(value).1f decillion" -msgstr[0] "%(value).1f дециллион" -msgstr[1] "%(value).1f дециллиона" -msgstr[2] "%(value).1f дециллионов" -msgstr[3] "%(value).1f дециллионов" - #, python-format msgid "%(value)s decillion" msgid_plural "%(value)s decillion" @@ -245,14 +165,6 @@ msgstr[1] "%(value)s дециллиона" msgstr[2] "%(value)s дециллионов" msgstr[3] "%(value)s дециллионов" -#, python-format -msgid "%(value).1f googol" -msgid_plural "%(value).1f googol" -msgstr[0] "%(value).1f гугол" -msgstr[1] "%(value).1f гугола" -msgstr[2] "%(value).1f гуголов" -msgstr[3] "%(value).1f гуголов" - #, python-format msgid "%(value)s googol" msgid_plural "%(value)s googol" @@ -375,110 +287,110 @@ msgstr "через %(delta)s" #. Translators: 'naturaltime-past' strings will be included in '%(delta)s ago' #, python-format msgctxt "naturaltime-past" -msgid "%d year" -msgid_plural "%d years" -msgstr[0] "%d год" -msgstr[1] "%d года" -msgstr[2] "%d лет" -msgstr[3] "%d лет" +msgid "%(num)d year" +msgid_plural "%(num)d years" +msgstr[0] "%(num)d год" +msgstr[1] "%(num)d года" +msgstr[2] "%(num)d лет" +msgstr[3] "%(num)d лет" #, python-format msgctxt "naturaltime-past" -msgid "%d month" -msgid_plural "%d months" -msgstr[0] "%d месяц" -msgstr[1] "%d месяца" -msgstr[2] "%d месяцев" -msgstr[3] "%d месяцев" +msgid "%(num)d month" +msgid_plural "%(num)d months" +msgstr[0] "%(num)d месяц" +msgstr[1] "%(num)d месяца" +msgstr[2] "%(num)d месяцев" +msgstr[3] "%(num)d месяцев" #, python-format msgctxt "naturaltime-past" -msgid "%d week" -msgid_plural "%d weeks" -msgstr[0] "%d неделя" -msgstr[1] "%d недели" -msgstr[2] "%d недель" -msgstr[3] "%d недель" +msgid "%(num)d week" +msgid_plural "%(num)d weeks" +msgstr[0] "%(num)d неделю" +msgstr[1] "%(num)d недели" +msgstr[2] "%(num)d недель" +msgstr[3] "%(num)d недель" #, python-format msgctxt "naturaltime-past" -msgid "%d day" -msgid_plural "%d days" -msgstr[0] "%d день" -msgstr[1] "%d дня" -msgstr[2] "%d дней" -msgstr[3] "%d дней" +msgid "%(num)d day" +msgid_plural "%(num)d days" +msgstr[0] "%(num)d день" +msgstr[1] "%(num)d дня" +msgstr[2] "%(num)d дней" +msgstr[3] "%(num)d дней" #, python-format msgctxt "naturaltime-past" -msgid "%d hour" -msgid_plural "%d hours" -msgstr[0] "%d час" -msgstr[1] "%d часа" -msgstr[2] "%d часов" -msgstr[3] "%d часов" +msgid "%(num)d hour" +msgid_plural "%(num)d hours" +msgstr[0] "%(num)d час" +msgstr[1] "%(num)d часа" +msgstr[2] "%(num)d часов" +msgstr[3] "%(num)d часов" #, python-format msgctxt "naturaltime-past" -msgid "%d minute" -msgid_plural "%d minutes" -msgstr[0] "%d минуту" -msgstr[1] "%d минуты" -msgstr[2] "%d минут" -msgstr[3] "%d минут" +msgid "%(num)d minute" +msgid_plural "%(num)d minutes" +msgstr[0] "%(num)d минуту" +msgstr[1] "%(num)d минуты" +msgstr[2] "%(num)d минут" +msgstr[3] "%(num)d минут" #. Translators: 'naturaltime-future' strings will be included in '%(delta)s #. from now' #, python-format msgctxt "naturaltime-future" -msgid "%d year" -msgid_plural "%d years" -msgstr[0] "%d год" -msgstr[1] "%d года" -msgstr[2] "%d лет" -msgstr[3] "%d лет" +msgid "%(num)d year" +msgid_plural "%(num)d years" +msgstr[0] "%(num)d год" +msgstr[1] "%(num)d года" +msgstr[2] "%(num)d лет" +msgstr[3] "%(num)d лет" #, python-format msgctxt "naturaltime-future" -msgid "%d month" -msgid_plural "%d months" -msgstr[0] "%d месяц" -msgstr[1] "%d месяца" -msgstr[2] "%d месяцев" -msgstr[3] "%d месяцев" +msgid "%(num)d month" +msgid_plural "%(num)d months" +msgstr[0] "%(num)d месяц" +msgstr[1] "%(num)d месяца" +msgstr[2] "%(num)d месяцев" +msgstr[3] "%(num)d месяцев" #, python-format msgctxt "naturaltime-future" -msgid "%d week" -msgid_plural "%d weeks" -msgstr[0] "%d неделю" -msgstr[1] "%d недели" -msgstr[2] "%d недель" -msgstr[3] "%d недель" +msgid "%(num)d week" +msgid_plural "%(num)d weeks" +msgstr[0] "%(num)d неделю" +msgstr[1] "%(num)d недели" +msgstr[2] "%(num)d недель" +msgstr[3] "%(num)d недель" #, python-format msgctxt "naturaltime-future" -msgid "%d day" -msgid_plural "%d days" -msgstr[0] "%d день" -msgstr[1] "%d дня" -msgstr[2] "%d дней" -msgstr[3] "%d дней" +msgid "%(num)d day" +msgid_plural "%(num)d days" +msgstr[0] "%(num)d день" +msgstr[1] "%(num)d дня" +msgstr[2] "%(num)d дней" +msgstr[3] "%(num)d дней" #, python-format msgctxt "naturaltime-future" -msgid "%d hour" -msgid_plural "%d hours" -msgstr[0] "%d час" -msgstr[1] "%d часа" -msgstr[2] "%d часов" -msgstr[3] "%d часов" +msgid "%(num)d hour" +msgid_plural "%(num)d hours" +msgstr[0] "%(num)d час" +msgstr[1] "%(num)d часа" +msgstr[2] "%(num)d часов" +msgstr[3] "%(num)d часов" #, python-format msgctxt "naturaltime-future" -msgid "%d minute" -msgid_plural "%d minutes" -msgstr[0] "%d минута" -msgstr[1] "%d минуты" -msgstr[2] "%d минут" -msgstr[3] "%d минут" +msgid "%(num)d minute" +msgid_plural "%(num)d minutes" +msgstr[0] "%(num)d минуту" +msgstr[1] "%(num)d минуты" +msgstr[2] "%(num)d минут" +msgstr[3] "%(num)d минут" diff --git a/django/contrib/postgres/aggregates/general.py b/django/contrib/postgres/aggregates/general.py index f8b40fb709..3de59dfcfd 100644 --- a/django/contrib/postgres/aggregates/general.py +++ b/django/contrib/postgres/aggregates/general.py @@ -1,8 +1,9 @@ +import json import warnings from django.contrib.postgres.fields import ArrayField from django.db.models import Aggregate, BooleanField, JSONField, TextField, Value -from django.utils.deprecation import RemovedInDjango50Warning +from django.utils.deprecation import RemovedInDjango50Warning, RemovedInDjango51Warning from .mixins import OrderableAggMixin @@ -31,6 +32,14 @@ class DeprecatedConvertValueMixin: self._default_provided = True super().__init__(*expressions, default=default, **extra) + def resolve_expression(self, *args, **kwargs): + resolved = super().resolve_expression(*args, **kwargs) + if not self._default_provided: + resolved.empty_result_set_value = getattr( + self, "deprecation_empty_result_set_value", self.deprecation_value + ) + return resolved + def convert_value(self, value, expression, connection): if value is None and not self._default_provided: warnings.warn(self.deprecation_msg, category=RemovedInDjango50Warning) @@ -48,8 +57,7 @@ class ArrayAgg(DeprecatedConvertValueMixin, OrderableAggMixin, Aggregate): deprecation_msg = ( "In Django 5.0, ArrayAgg() will return None instead of an empty list " "if there are no rows. Pass default=None to opt into the new behavior " - "and silence this warning or default=Value([]) to keep the previous " - "behavior." + "and silence this warning or default=[] to keep the previous behavior." ) @property @@ -87,13 +95,46 @@ class JSONBAgg(DeprecatedConvertValueMixin, OrderableAggMixin, Aggregate): # RemovedInDjango50Warning deprecation_value = "[]" + deprecation_empty_result_set_value = property(lambda self: []) deprecation_msg = ( "In Django 5.0, JSONBAgg() will return None instead of an empty list " "if there are no rows. Pass default=None to opt into the new behavior " - "and silence this warning or default=Value('[]') to keep the previous " + "and silence this warning or default=[] to keep the previous " "behavior." ) + # RemovedInDjango51Warning: When the deprecation ends, remove __init__(). + # + # RemovedInDjango50Warning: When the deprecation ends, replace with: + # def __init__(self, *expressions, default=None, **extra): + def __init__(self, *expressions, default=NOT_PROVIDED, **extra): + super().__init__(*expressions, default=default, **extra) + if ( + isinstance(default, Value) + and isinstance(default.value, str) + and not isinstance(default.output_field, JSONField) + ): + value = default.value + try: + decoded = json.loads(value) + except json.JSONDecodeError: + warnings.warn( + "Passing a Value() with an output_field that isn't a JSONField as " + "JSONBAgg(default) is deprecated. Pass default=" + f"Value({value!r}, output_field=JSONField()) instead.", + stacklevel=2, + category=RemovedInDjango51Warning, + ) + self.default.output_field = self.output_field + else: + self.default = Value(decoded, self.output_field) + warnings.warn( + "Passing an encoded JSON string as JSONBAgg(default) is " + f"deprecated. Pass default={decoded!r} instead.", + stacklevel=2, + category=RemovedInDjango51Warning, + ) + class StringAgg(DeprecatedConvertValueMixin, OrderableAggMixin, Aggregate): function = "STRING_AGG" @@ -106,8 +147,7 @@ class StringAgg(DeprecatedConvertValueMixin, OrderableAggMixin, Aggregate): deprecation_msg = ( "In Django 5.0, StringAgg() will return None instead of an empty " "string if there are no rows. Pass default=None to opt into the new " - "behavior and silence this warning or default=Value('') to keep the " - "previous behavior." + 'behavior and silence this warning or default="" to keep the previous behavior.' ) def __init__(self, expression, delimiter, **extra): diff --git a/django/contrib/postgres/apps.py b/django/contrib/postgres/apps.py index 494cea245a..a8ee3fbf4b 100644 --- a/django/contrib/postgres/apps.py +++ b/django/contrib/postgres/apps.py @@ -1,8 +1,7 @@ -from psycopg2.extras import DateRange, DateTimeRange, DateTimeTZRange, NumericRange - from django.apps import AppConfig from django.core.signals import setting_changed from django.db import connections +from django.db.backends.postgresql.psycopg_any import RANGE_TYPES from django.db.backends.signals import connection_created from django.db.migrations.writer import MigrationWriter from django.db.models import CharField, OrderBy, TextField @@ -21,8 +20,6 @@ from .lookups import ( from .serializers import RangeSerializer from .signals import register_type_handlers -RANGE_TYPES = (DateRange, DateTimeRange, DateTimeTZRange, NumericRange) - def uninstall_if_needed(setting, value, enter, **kwargs): """ diff --git a/django/contrib/postgres/constraints.py b/django/contrib/postgres/constraints.py index 8b76de3c42..1caf432d16 100644 --- a/django/contrib/postgres/constraints.py +++ b/django/contrib/postgres/constraints.py @@ -51,8 +51,6 @@ class ExclusionConstraint(BaseConstraint): raise ValueError("The expressions must be a list of 2-tuples.") if not isinstance(condition, (type(None), Q)): raise ValueError("ExclusionConstraint.condition must be a Q instance.") - if condition and deferrable: - raise ValueError("ExclusionConstraint with conditions cannot be deferred.") if not isinstance(deferrable, (type(None), Deferrable)): raise ValueError( "ExclusionConstraint.deferrable must be a Deferrable instance." diff --git a/django/contrib/postgres/fields/array.py b/django/contrib/postgres/fields/array.py index c247387eb7..8477dd9fff 100644 --- a/django/contrib/postgres/fields/array.py +++ b/django/contrib/postgres/fields/array.py @@ -237,7 +237,7 @@ class ArrayField(CheckFieldDefaultMixin, Field): class ArrayRHSMixin: def __init__(self, lhs, rhs): - # Don't wrap arrays that contains only None values, psycopg2 doesn't + # Don't wrap arrays that contains only None values, psycopg doesn't # allow this. if isinstance(rhs, (tuple, list)) and any(self._rhs_not_none_values(rhs)): expressions = [] @@ -297,7 +297,7 @@ class ArrayLenTransform(Transform): return ( "CASE WHEN %(lhs)s IS NULL THEN NULL ELSE " "coalesce(array_length(%(lhs)s, 1), 0) END" - ) % {"lhs": lhs}, params + ) % {"lhs": lhs}, params * 2 @ArrayField.register_lookup diff --git a/django/contrib/postgres/fields/ranges.py b/django/contrib/postgres/fields/ranges.py index 6230b2f887..fbb6012660 100644 --- a/django/contrib/postgres/fields/ranges.py +++ b/django/contrib/postgres/fields/ranges.py @@ -1,10 +1,15 @@ import datetime import json -from psycopg2.extras import DateRange, DateTimeTZRange, NumericRange, Range - from django.contrib.postgres import forms, lookups from django.db import models +from django.db.backends.postgresql.psycopg_any import ( + DateRange, + DateTimeTZRange, + NumericRange, + Range, +) +from django.db.models.functions import Cast from django.db.models.lookups import PostgresOperatorLookup from .utils import AttributeSetter @@ -204,7 +209,14 @@ class DateRangeField(RangeField): return "daterange" -RangeField.register_lookup(lookups.DataContains) +class RangeContains(lookups.DataContains): + def get_prep_lookup(self): + if not isinstance(self.rhs, (list, tuple, Range)): + return Cast(self.rhs, self.lhs.field.base_field) + return super().get_prep_lookup() + + +RangeField.register_lookup(RangeContains) RangeField.register_lookup(lookups.ContainedBy) RangeField.register_lookup(lookups.Overlap) diff --git a/django/contrib/postgres/forms/ranges.py b/django/contrib/postgres/forms/ranges.py index 444991970d..3017f08e88 100644 --- a/django/contrib/postgres/forms/ranges.py +++ b/django/contrib/postgres/forms/ranges.py @@ -1,7 +1,10 @@ -from psycopg2.extras import DateRange, DateTimeTZRange, NumericRange - from django import forms from django.core import exceptions +from django.db.backends.postgresql.psycopg_any import ( + DateRange, + DateTimeTZRange, + NumericRange, +) from django.forms.widgets import HiddenInput, MultiWidget from django.utils.translation import gettext_lazy as _ diff --git a/django/contrib/postgres/locale/es/LC_MESSAGES/django.mo b/django/contrib/postgres/locale/es/LC_MESSAGES/django.mo index 36e110503c..7fef179176 100644 Binary files a/django/contrib/postgres/locale/es/LC_MESSAGES/django.mo and b/django/contrib/postgres/locale/es/LC_MESSAGES/django.mo differ diff --git a/django/contrib/postgres/locale/es/LC_MESSAGES/django.po b/django/contrib/postgres/locale/es/LC_MESSAGES/django.po index 4f76f7dbe4..af33acdd75 100644 --- a/django/contrib/postgres/locale/es/LC_MESSAGES/django.po +++ b/django/contrib/postgres/locale/es/LC_MESSAGES/django.po @@ -2,7 +2,7 @@ # # Translators: # Antoni Aloy , 2015,2017 -# Ernesto Avilés, 2015 +# e4db27214f7e7544f2022c647b585925_bb0e321, 2015 # Ignacio José Lizarán Rus , 2019 # Igor Támara , 2015 # Pablo, 2015 @@ -13,8 +13,8 @@ msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2020-05-11 20:56+0200\n" -"PO-Revision-Date: 2020-09-25 17:40+0000\n" -"Last-Translator: Uriel Medina \n" +"PO-Revision-Date: 2022-01-19 09:22+0000\n" +"Last-Translator: Uriel Medina , 2020\n" "Language-Team: Spanish (http://www.transifex.com/django/django/language/" "es/)\n" "MIME-Version: 1.0\n" @@ -77,6 +77,9 @@ msgstr[0] "" msgstr[1] "" "La lista contiene %(show_value)d elementos, no debería contener más de " "%(limit_value)d." +msgstr[2] "" +"La lista contiene %(show_value)d elementos, no debería contener más de " +"%(limit_value)d." #, python-format msgid "" @@ -91,6 +94,9 @@ msgstr[0] "" msgstr[1] "" "La lista contiene %(show_value)d elementos, no debería contener menos de " "%(limit_value)d." +msgstr[2] "" +"La lista contiene %(show_value)d elementos, no debería contener menos de " +"%(limit_value)d." #, python-format msgid "Some keys were missing: %(keys)s" diff --git a/django/contrib/postgres/locale/pt_BR/LC_MESSAGES/django.mo b/django/contrib/postgres/locale/pt_BR/LC_MESSAGES/django.mo index defc2893cc..fd325068ba 100644 Binary files a/django/contrib/postgres/locale/pt_BR/LC_MESSAGES/django.mo and b/django/contrib/postgres/locale/pt_BR/LC_MESSAGES/django.mo differ diff --git a/django/contrib/postgres/locale/pt_BR/LC_MESSAGES/django.po b/django/contrib/postgres/locale/pt_BR/LC_MESSAGES/django.po index 6079e1dd59..1a6504cf24 100644 --- a/django/contrib/postgres/locale/pt_BR/LC_MESSAGES/django.po +++ b/django/contrib/postgres/locale/pt_BR/LC_MESSAGES/django.po @@ -2,7 +2,7 @@ # # Translators: # Andre Machado , 2016 -# Carlos C. Leite , 2016,2019 +# Carlos Leite , 2016,2019 # Claudemiro Alves Feitosa Neto , 2015 # Fábio C. Barrionuevo da Luz , 2015 # Lucas Infante , 2015 @@ -14,8 +14,8 @@ msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2020-05-11 20:56+0200\n" -"PO-Revision-Date: 2020-05-12 20:01+0000\n" -"Last-Translator: Transifex Bot <>\n" +"PO-Revision-Date: 2022-01-19 09:22+0000\n" +"Last-Translator: Carlos Leite , 2016,2019\n" "Language-Team: Portuguese (Brazil) (http://www.transifex.com/django/django/" "language/pt_BR/)\n" "MIME-Version: 1.0\n" @@ -78,6 +78,9 @@ msgstr[0] "" msgstr[1] "" "A lista contém itens %(show_value)d, não deveria conter mais que " "%(limit_value)d." +msgstr[2] "" +"A lista contém itens %(show_value)d, não deveria conter mais que " +"%(limit_value)d." #, python-format msgid "" @@ -92,6 +95,9 @@ msgstr[0] "" msgstr[1] "" "A lista contém %(show_value)d itens, deveria conter não menos que " "%(limit_value)d." +msgstr[2] "" +"A lista contém %(show_value)d itens, deveria conter não menos que " +"%(limit_value)d." #, python-format msgid "Some keys were missing: %(keys)s" diff --git a/django/contrib/postgres/lookups.py b/django/contrib/postgres/lookups.py index f2f88ebc0a..4e1783f288 100644 --- a/django/contrib/postgres/lookups.py +++ b/django/contrib/postgres/lookups.py @@ -1,5 +1,6 @@ from django.db.models import Transform from django.db.models.lookups import PostgresOperatorLookup +from django.db.models.sql.query import Query from .search import SearchVector, SearchVectorExact, SearchVectorField @@ -18,6 +19,13 @@ class Overlap(PostgresOperatorLookup): lookup_name = "overlap" postgres_operator = "&&" + def get_prep_lookup(self): + from .expressions import ArraySubquery + + if isinstance(self.rhs, Query): + self.rhs = ArraySubquery(self.rhs) + return super().get_prep_lookup() + class HasKey(PostgresOperatorLookup): lookup_name = "has_key" diff --git a/django/contrib/postgres/operations.py b/django/contrib/postgres/operations.py index 9dbd491773..5ac396bedf 100644 --- a/django/contrib/postgres/operations.py +++ b/django/contrib/postgres/operations.py @@ -35,6 +35,10 @@ class CreateExtension(Operation): # installed, otherwise a subsequent data migration would use the same # connection. register_type_handlers(schema_editor.connection) + if hasattr(schema_editor.connection, "register_geometry_adapters"): + schema_editor.connection.register_geometry_adapters( + schema_editor.connection.connection, True + ) def database_backwards(self, app_label, schema_editor, from_state, to_state): if not router.allow_migrate(schema_editor.connection.alias, app_label): diff --git a/django/contrib/postgres/search.py b/django/contrib/postgres/search.py index 2b57156263..4e370aa167 100644 --- a/django/contrib/postgres/search.py +++ b/django/contrib/postgres/search.py @@ -1,5 +1,3 @@ -import psycopg2 - from django.db.models import ( CharField, Expression, @@ -41,6 +39,11 @@ class SearchQueryField(Field): return "tsquery" +class _Float4Field(Field): + def db_type(self, connection): + return "float4" + + class SearchConfig(Expression): def __init__(self, config): super().__init__() @@ -140,7 +143,11 @@ class SearchVector(SearchVectorCombinable, Func): if clone.weight: weight_sql, extra_params = compiler.compile(clone.weight) sql = "setweight({}, {})".format(sql, weight_sql) - return sql, config_params + params + extra_params + + # These parameters must be bound on the client side because we may + # want to create an index on this expression. + sql = connection.ops.compose_sql(sql, config_params + params + extra_params) + return sql, [] class CombinedSearchVector(SearchVectorCombinable, CombinedExpression): @@ -246,6 +253,8 @@ class SearchRank(Func): normalization=None, cover_density=False, ): + from .fields.array import ArrayField + if not hasattr(vector, "resolve_expression"): vector = SearchVector(vector) if not hasattr(query, "resolve_expression"): @@ -254,6 +263,7 @@ class SearchRank(Func): if weights is not None: if not hasattr(weights, "resolve_expression"): weights = Value(weights) + weights = Cast(weights, ArrayField(_Float4Field())) expressions = (weights,) + expressions if normalization is not None: if not hasattr(normalization, "resolve_expression"): @@ -309,14 +319,9 @@ class SearchHeadline(Func): options_sql = "" options_params = [] if self.options: - # getquoted() returns a quoted bytestring of the adapted value. options_params.append( ", ".join( - "%s=%s" - % ( - option, - psycopg2.extensions.adapt(value).getquoted().decode(), - ) + connection.ops.compose_sql(f"{option}=%s", [value]) for option, value in self.options.items() ) ) diff --git a/django/contrib/postgres/signals.py b/django/contrib/postgres/signals.py index b61673fe1f..a3816d3d30 100644 --- a/django/contrib/postgres/signals.py +++ b/django/contrib/postgres/signals.py @@ -1,22 +1,14 @@ import functools -import psycopg2 -from psycopg2 import ProgrammingError -from psycopg2.extras import register_hstore - from django.db import connections from django.db.backends.base.base import NO_DB_ALIAS +from django.db.backends.postgresql.psycopg_any import is_psycopg3 -@functools.lru_cache -def get_hstore_oids(connection_alias): - """Return hstore and hstore array OIDs.""" +def get_type_oids(connection_alias, type_name): with connections[connection_alias].cursor() as cursor: cursor.execute( - "SELECT t.oid, typarray " - "FROM pg_type t " - "JOIN pg_namespace ns ON typnamespace = ns.oid " - "WHERE typname = 'hstore'" + "SELECT oid, typarray FROM pg_type WHERE typname = %s", (type_name,) ) oids = [] array_oids = [] @@ -26,43 +18,63 @@ def get_hstore_oids(connection_alias): return tuple(oids), tuple(array_oids) +@functools.lru_cache +def get_hstore_oids(connection_alias): + """Return hstore and hstore array OIDs.""" + return get_type_oids(connection_alias, "hstore") + + @functools.lru_cache def get_citext_oids(connection_alias): - """Return citext array OIDs.""" - with connections[connection_alias].cursor() as cursor: - cursor.execute("SELECT typarray FROM pg_type WHERE typname = 'citext'") - return tuple(row[0] for row in cursor) + """Return citext and citext array OIDs.""" + return get_type_oids(connection_alias, "citext") -def register_type_handlers(connection, **kwargs): - if connection.vendor != "postgresql" or connection.alias == NO_DB_ALIAS: - return +if is_psycopg3: + from psycopg.types import TypeInfo, hstore + + def register_type_handlers(connection, **kwargs): + if connection.vendor != "postgresql" or connection.alias == NO_DB_ALIAS: + return - try: oids, array_oids = get_hstore_oids(connection.alias) - register_hstore( - connection.connection, globally=True, oid=oids, array_oid=array_oids - ) - except ProgrammingError: - # Hstore is not available on the database. - # - # If someone tries to create an hstore field it will error there. - # This is necessary as someone may be using PSQL without extensions - # installed but be using other features of contrib.postgres. - # - # This is also needed in order to create the connection in order to - # install the hstore extension. - pass + for oid, array_oid in zip(oids, array_oids): + ti = TypeInfo("hstore", oid, array_oid) + hstore.register_hstore(ti, connection.connection) - try: - citext_oids = get_citext_oids(connection.alias) - array_type = psycopg2.extensions.new_array_type( - citext_oids, "citext[]", psycopg2.STRING - ) - psycopg2.extensions.register_type(array_type, None) - except ProgrammingError: - # citext is not available on the database. + _, citext_oids = get_citext_oids(connection.alias) + for array_oid in citext_oids: + ti = TypeInfo("citext", 0, array_oid) + ti.register(connection.connection) + +else: + import psycopg2 + from psycopg2.extras import register_hstore + + def register_type_handlers(connection, **kwargs): + if connection.vendor != "postgresql" or connection.alias == NO_DB_ALIAS: + return + + oids, array_oids = get_hstore_oids(connection.alias) + # Don't register handlers when hstore is not available on the database. # - # The same comments in the except block of the above call to - # register_hstore() also apply here. - pass + # If someone tries to create an hstore field it will error there. This is + # necessary as someone may be using PSQL without extensions installed but + # be using other features of contrib.postgres. + # + # This is also needed in order to create the connection in order to install + # the hstore extension. + if oids: + register_hstore( + connection.connection, globally=True, oid=oids, array_oid=array_oids + ) + + oids, citext_oids = get_citext_oids(connection.alias) + # Don't register handlers when citext is not available on the database. + # + # The same comments in the above call to register_hstore() also apply here. + if oids: + array_type = psycopg2.extensions.new_array_type( + citext_oids, "citext[]", psycopg2.STRING + ) + psycopg2.extensions.register_type(array_type, None) diff --git a/django/contrib/redirects/locale/eo/LC_MESSAGES/django.mo b/django/contrib/redirects/locale/eo/LC_MESSAGES/django.mo index 82950c4e34..dfe941280a 100644 Binary files a/django/contrib/redirects/locale/eo/LC_MESSAGES/django.mo and b/django/contrib/redirects/locale/eo/LC_MESSAGES/django.mo differ diff --git a/django/contrib/redirects/locale/eo/LC_MESSAGES/django.po b/django/contrib/redirects/locale/eo/LC_MESSAGES/django.po index 8d613f0a0a..72c4b91ea9 100644 --- a/django/contrib/redirects/locale/eo/LC_MESSAGES/django.po +++ b/django/contrib/redirects/locale/eo/LC_MESSAGES/django.po @@ -1,16 +1,17 @@ # This file is distributed under the same license as the Django package. # # Translators: -# Baptiste Darthenay , 2011 -# Baptiste Darthenay , 2014-2015 +# Batist D 🐍 , 2011 +# Batist D 🐍 , 2014-2015 # kristjan , 2011 +# Meiyer , 2022 msgid "" msgstr "" "Project-Id-Version: django\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2015-10-09 17:42+0200\n" -"PO-Revision-Date: 2017-09-19 16:40+0000\n" -"Last-Translator: Baptiste Darthenay \n" +"POT-Creation-Date: 2021-01-15 09:00+0100\n" +"PO-Revision-Date: 2022-04-24 18:32+0000\n" +"Last-Translator: Meiyer , 2022\n" "Language-Team: Esperanto (http://www.transifex.com/django/django/language/" "eo/)\n" "MIME-Version: 1.0\n" @@ -20,33 +21,29 @@ msgstr "" "Plural-Forms: nplurals=2; plural=(n != 1);\n" msgid "Redirects" -msgstr "Alidirektoj" +msgstr "Alidirektigiloj" msgid "site" msgstr "retejo" msgid "redirect from" -msgstr "alidirekto de" +msgstr "alidirektigo de" msgid "" -"This should be an absolute path, excluding the domain name. Example: '/" -"events/search/'." +"This should be an absolute path, excluding the domain name. Example: “/" +"events/search/”." msgstr "" -"Tio ĉi devus esti absoluta vojo, escepte de la retregionan nomon. Ekzemplo: " -"'/eventoj/serĉo/'." msgid "redirect to" -msgstr "alidirekto al" +msgstr "alidirektigo al" msgid "" -"This can be either an absolute path (as above) or a full URL starting with " -"'http://'." +"This can be either an absolute path (as above) or a full URL starting with a " +"scheme such as “https://”." msgstr "" -"Tio ĉi povas esti aŭ absoluta vojo (kiel supre), aŭ URL komencante kun " -"'http://'." msgid "redirect" -msgstr "alidirekto" +msgstr "alidirektigilo" msgid "redirects" -msgstr "alidirektoj" +msgstr "alidirektigiloj" diff --git a/django/contrib/sessions/middleware.py b/django/contrib/sessions/middleware.py index 2fcd7d508a..9c934f9ddd 100644 --- a/django/contrib/sessions/middleware.py +++ b/django/contrib/sessions/middleware.py @@ -53,8 +53,8 @@ class SessionMiddleware(MiddlewareMixin): expires_time = time.time() + max_age expires = http_date(expires_time) # Save the session data and refresh the client cookie. - # Skip session save for 500 responses, refs #3881. - if response.status_code != 500: + # Skip session save for 5xx responses. + if response.status_code < 500: try: request.session.save() except UpdateError: diff --git a/django/contrib/sitemaps/__init__.py b/django/contrib/sitemaps/__init__.py index 27756d9d96..df57f1cd5c 100644 --- a/django/contrib/sitemaps/__init__.py +++ b/django/contrib/sitemaps/__init__.py @@ -92,6 +92,10 @@ class Sitemap: return attr(item) return attr + def get_languages_for_item(self, item): + """Languages for which this item is displayed.""" + return self._languages() + def _languages(self): if self.languages is not None: return self.languages @@ -103,8 +107,8 @@ class Sitemap: # This is necessary to paginate with all languages already considered. items = [ (item, lang_code) - for lang_code in self._languages() for item in self.items() + for lang_code in self.get_languages_for_item(item) ] return items return self.items() @@ -167,7 +171,7 @@ class Sitemap: return None if callable(self.lastmod): try: - return max([self.lastmod(item) for item in self.items()]) + return max([self.lastmod(item) for item in self.items()], default=None) except TypeError: return None else: @@ -201,7 +205,8 @@ class Sitemap: } if self.i18n and self.alternates: - for lang_code in self._languages(): + item_languages = self.get_languages_for_item(item[0]) + for lang_code in item_languages: loc = f"{protocol}://{domain}{self._location(item, lang_code)}" url_info["alternates"].append( { @@ -209,7 +214,7 @@ class Sitemap: "lang_code": lang_code, } ) - if self.x_default: + if self.x_default and settings.LANGUAGE_CODE in item_languages: lang_code = settings.LANGUAGE_CODE loc = f"{protocol}://{domain}{self._location(item, lang_code)}" loc = loc.replace(f"/{lang_code}/", "/", 1) diff --git a/django/contrib/staticfiles/storage.py b/django/contrib/staticfiles/storage.py index 5c0159c0b4..c09f01e446 100644 --- a/django/contrib/staticfiles/storage.py +++ b/django/contrib/staticfiles/storage.py @@ -72,6 +72,28 @@ class HashedFilesMixin: r"(?m)(?P)^(//# (?-i:sourceMappingURL)=(?P.*))$", "//# sourceMappingURL=%(url)s", ), + ( + ( + r"""(?Pimport(?s:(?P[\s\{].*?))""" + r"""\s*from\s*['"](?P[\.\/].*?)["']\s*;)""" + ), + """import%(import)s from "%(url)s";""", + ), + ( + ( + r"""(?Pexport(?s:(?P[\s\{].*?))""" + r"""\s*from\s*["'](?P[\.\/].*?)["']\s*;)""" + ), + """export%(exports)s from "%(url)s";""", + ), + ( + r"""(?Pimport\s*['"](?P[\.\/].*?)["']\s*;)""", + """import"%(url)s";""", + ), + ( + r"""(?Pimport\(["'](?P.*?)["']\))""", + """import("%(url)s")""", + ), ), ), ) @@ -417,7 +439,7 @@ class HashedFilesMixin: class ManifestFilesMixin(HashedFilesMixin): - manifest_version = "1.0" # the manifest format standard + manifest_version = "1.1" # the manifest format standard manifest_name = "staticfiles.json" manifest_strict = True keep_intermediate_files = False @@ -427,7 +449,7 @@ class ManifestFilesMixin(HashedFilesMixin): if manifest_storage is None: manifest_storage = self self.manifest_storage = manifest_storage - self.hashed_files = self.load_manifest() + self.hashed_files, self.manifest_hash = self.load_manifest() def read_manifest(self): try: @@ -439,15 +461,15 @@ class ManifestFilesMixin(HashedFilesMixin): def load_manifest(self): content = self.read_manifest() if content is None: - return {} + return {}, "" try: stored = json.loads(content) except json.JSONDecodeError: pass else: version = stored.get("version") - if version == "1.0": - return stored.get("paths", {}) + if version in ("1.0", "1.1"): + return stored.get("paths", {}), stored.get("hash", "") raise ValueError( "Couldn't load manifest '%s' (version %s)" % (self.manifest_name, self.manifest_version) @@ -460,7 +482,14 @@ class ManifestFilesMixin(HashedFilesMixin): self.save_manifest() def save_manifest(self): - payload = {"paths": self.hashed_files, "version": self.manifest_version} + self.manifest_hash = self.file_hash( + None, ContentFile(json.dumps(sorted(self.hashed_files.items())).encode()) + ) + payload = { + "paths": self.hashed_files, + "version": self.manifest_version, + "hash": self.manifest_hash, + } if self.manifest_storage.exists(self.manifest_name): self.manifest_storage.delete(self.manifest_name) contents = json.dumps(payload).encode() diff --git a/django/core/cache/backends/filebased.py b/django/core/cache/backends/filebased.py index 631da49444..215fefbcc0 100644 --- a/django/core/cache/backends/filebased.py +++ b/django/core/cache/backends/filebased.py @@ -90,10 +90,11 @@ class FileBasedCache(BaseCache): def has_key(self, key, version=None): fname = self._key_to_file(key, version) - if os.path.exists(fname): + try: with open(fname, "rb") as f: return not self._is_expired(f) - return False + except FileNotFoundError: + return False def _cull(self): """ diff --git a/django/core/cache/backends/redis.py b/django/core/cache/backends/redis.py index c370ff5cb7..eda8ac9457 100644 --- a/django/core/cache/backends/redis.py +++ b/django/core/cache/backends/redis.py @@ -130,7 +130,7 @@ class RedisCacheClient: return bool(client.exists(key)) def incr(self, key, delta): - client = self.get_client(key) + client = self.get_client(key, write=True) if not client.exists(key): raise ValueError("Key '%s' not found." % key) return client.incr(key, delta) diff --git a/django/core/exceptions.py b/django/core/exceptions.py index 7be4e16bc5..646644f3e0 100644 --- a/django/core/exceptions.py +++ b/django/core/exceptions.py @@ -233,6 +233,12 @@ class EmptyResultSet(Exception): pass +class FullResultSet(Exception): + """A database query predicate is matches everything.""" + + pass + + class SynchronousOnlyOperation(Exception): """The user tried to call a sync-only function from an async context.""" diff --git a/django/core/files/storage.py b/django/core/files/storage.py deleted file mode 100644 index 2eb8d08236..0000000000 --- a/django/core/files/storage.py +++ /dev/null @@ -1,427 +0,0 @@ -import os -import pathlib -from datetime import datetime, timezone -from urllib.parse import urljoin - -from django.conf import settings -from django.core.exceptions import SuspiciousFileOperation -from django.core.files import File, locks -from django.core.files.move import file_move_safe -from django.core.files.utils import validate_file_name -from django.core.signals import setting_changed -from django.utils._os import safe_join -from django.utils.crypto import get_random_string -from django.utils.deconstruct import deconstructible -from django.utils.encoding import filepath_to_uri -from django.utils.functional import LazyObject, cached_property -from django.utils.module_loading import import_string -from django.utils.text import get_valid_filename - -__all__ = ( - "Storage", - "FileSystemStorage", - "DefaultStorage", - "default_storage", - "get_storage_class", -) - - -class Storage: - """ - A base storage class, providing some default behaviors that all other - storage systems can inherit or override, as necessary. - """ - - # The following methods represent a public interface to private methods. - # These shouldn't be overridden by subclasses unless absolutely necessary. - - def open(self, name, mode="rb"): - """Retrieve the specified file from storage.""" - return self._open(name, mode) - - def save(self, name, content, max_length=None): - """ - Save new content to the file specified by name. The content should be - a proper File object or any Python file-like object, ready to be read - from the beginning. - """ - # Get the proper name for the file, as it will actually be saved. - if name is None: - name = content.name - - if not hasattr(content, "chunks"): - content = File(content, name) - - name = self.get_available_name(name, max_length=max_length) - name = self._save(name, content) - # Ensure that the name returned from the storage system is still valid. - validate_file_name(name, allow_relative_path=True) - return name - - # These methods are part of the public API, with default implementations. - - def get_valid_name(self, name): - """ - Return a filename, based on the provided filename, that's suitable for - use in the target storage system. - """ - return get_valid_filename(name) - - def get_alternative_name(self, file_root, file_ext): - """ - Return an alternative filename, by adding an underscore and a random 7 - character alphanumeric string (before the file extension, if one - exists) to the filename. - """ - return "%s_%s%s" % (file_root, get_random_string(7), file_ext) - - def get_available_name(self, name, max_length=None): - """ - Return a filename that's free on the target storage system and - available for new content to be written to. - """ - name = str(name).replace("\\", "/") - dir_name, file_name = os.path.split(name) - if ".." in pathlib.PurePath(dir_name).parts: - raise SuspiciousFileOperation( - "Detected path traversal attempt in '%s'" % dir_name - ) - validate_file_name(file_name) - file_root, file_ext = os.path.splitext(file_name) - # If the filename already exists, generate an alternative filename - # until it doesn't exist. - # Truncate original name if required, so the new filename does not - # exceed the max_length. - while self.exists(name) or (max_length and len(name) > max_length): - # file_ext includes the dot. - name = os.path.join( - dir_name, self.get_alternative_name(file_root, file_ext) - ) - if max_length is None: - continue - # Truncate file_root if max_length exceeded. - truncation = len(name) - max_length - if truncation > 0: - file_root = file_root[:-truncation] - # Entire file_root was truncated in attempt to find an - # available filename. - if not file_root: - raise SuspiciousFileOperation( - 'Storage can not find an available filename for "%s". ' - "Please make sure that the corresponding file field " - 'allows sufficient "max_length".' % name - ) - name = os.path.join( - dir_name, self.get_alternative_name(file_root, file_ext) - ) - return name - - def generate_filename(self, filename): - """ - Validate the filename by calling get_valid_name() and return a filename - to be passed to the save() method. - """ - filename = str(filename).replace("\\", "/") - # `filename` may include a path as returned by FileField.upload_to. - dirname, filename = os.path.split(filename) - if ".." in pathlib.PurePath(dirname).parts: - raise SuspiciousFileOperation( - "Detected path traversal attempt in '%s'" % dirname - ) - return os.path.normpath(os.path.join(dirname, self.get_valid_name(filename))) - - def path(self, name): - """ - Return a local filesystem path where the file can be retrieved using - Python's built-in open() function. Storage systems that can't be - accessed using open() should *not* implement this method. - """ - raise NotImplementedError("This backend doesn't support absolute paths.") - - # The following methods form the public API for storage systems, but with - # no default implementations. Subclasses must implement *all* of these. - - def delete(self, name): - """ - Delete the specified file from the storage system. - """ - raise NotImplementedError( - "subclasses of Storage must provide a delete() method" - ) - - def exists(self, name): - """ - Return True if a file referenced by the given name already exists in the - storage system, or False if the name is available for a new file. - """ - raise NotImplementedError( - "subclasses of Storage must provide an exists() method" - ) - - def listdir(self, path): - """ - List the contents of the specified path. Return a 2-tuple of lists: - the first item being directories, the second item being files. - """ - raise NotImplementedError( - "subclasses of Storage must provide a listdir() method" - ) - - def size(self, name): - """ - Return the total size, in bytes, of the file specified by name. - """ - raise NotImplementedError("subclasses of Storage must provide a size() method") - - def url(self, name): - """ - Return an absolute URL where the file's contents can be accessed - directly by a web browser. - """ - raise NotImplementedError("subclasses of Storage must provide a url() method") - - def get_accessed_time(self, name): - """ - Return the last accessed time (as a datetime) of the file specified by - name. The datetime will be timezone-aware if USE_TZ=True. - """ - raise NotImplementedError( - "subclasses of Storage must provide a get_accessed_time() method" - ) - - def get_created_time(self, name): - """ - Return the creation time (as a datetime) of the file specified by name. - The datetime will be timezone-aware if USE_TZ=True. - """ - raise NotImplementedError( - "subclasses of Storage must provide a get_created_time() method" - ) - - def get_modified_time(self, name): - """ - Return the last modified time (as a datetime) of the file specified by - name. The datetime will be timezone-aware if USE_TZ=True. - """ - raise NotImplementedError( - "subclasses of Storage must provide a get_modified_time() method" - ) - - -@deconstructible -class FileSystemStorage(Storage): - """ - Standard filesystem storage - """ - - # The combination of O_CREAT and O_EXCL makes os.open() raise OSError if - # the file already exists before it's opened. - OS_OPEN_FLAGS = os.O_WRONLY | os.O_CREAT | os.O_EXCL | getattr(os, "O_BINARY", 0) - - def __init__( - self, - location=None, - base_url=None, - file_permissions_mode=None, - directory_permissions_mode=None, - ): - self._location = location - self._base_url = base_url - self._file_permissions_mode = file_permissions_mode - self._directory_permissions_mode = directory_permissions_mode - setting_changed.connect(self._clear_cached_properties) - - def _clear_cached_properties(self, setting, **kwargs): - """Reset setting based property values.""" - if setting == "MEDIA_ROOT": - self.__dict__.pop("base_location", None) - self.__dict__.pop("location", None) - elif setting == "MEDIA_URL": - self.__dict__.pop("base_url", None) - elif setting == "FILE_UPLOAD_PERMISSIONS": - self.__dict__.pop("file_permissions_mode", None) - elif setting == "FILE_UPLOAD_DIRECTORY_PERMISSIONS": - self.__dict__.pop("directory_permissions_mode", None) - - def _value_or_setting(self, value, setting): - return setting if value is None else value - - @cached_property - def base_location(self): - return self._value_or_setting(self._location, settings.MEDIA_ROOT) - - @cached_property - def location(self): - return os.path.abspath(self.base_location) - - @cached_property - def base_url(self): - if self._base_url is not None and not self._base_url.endswith("/"): - self._base_url += "/" - return self._value_or_setting(self._base_url, settings.MEDIA_URL) - - @cached_property - def file_permissions_mode(self): - return self._value_or_setting( - self._file_permissions_mode, settings.FILE_UPLOAD_PERMISSIONS - ) - - @cached_property - def directory_permissions_mode(self): - return self._value_or_setting( - self._directory_permissions_mode, settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS - ) - - def _open(self, name, mode="rb"): - return File(open(self.path(name), mode)) - - def _save(self, name, content): - full_path = self.path(name) - - # Create any intermediate directories that do not exist. - directory = os.path.dirname(full_path) - try: - if self.directory_permissions_mode is not None: - # Set the umask because os.makedirs() doesn't apply the "mode" - # argument to intermediate-level directories. - old_umask = os.umask(0o777 & ~self.directory_permissions_mode) - try: - os.makedirs( - directory, self.directory_permissions_mode, exist_ok=True - ) - finally: - os.umask(old_umask) - else: - os.makedirs(directory, exist_ok=True) - except FileExistsError: - raise FileExistsError("%s exists and is not a directory." % directory) - - # There's a potential race condition between get_available_name and - # saving the file; it's possible that two threads might return the - # same name, at which point all sorts of fun happens. So we need to - # try to create the file, but if it already exists we have to go back - # to get_available_name() and try again. - - while True: - try: - # This file has a file path that we can move. - if hasattr(content, "temporary_file_path"): - file_move_safe(content.temporary_file_path(), full_path) - - # This is a normal uploadedfile that we can stream. - else: - # The current umask value is masked out by os.open! - fd = os.open(full_path, self.OS_OPEN_FLAGS, 0o666) - _file = None - try: - locks.lock(fd, locks.LOCK_EX) - for chunk in content.chunks(): - if _file is None: - mode = "wb" if isinstance(chunk, bytes) else "wt" - _file = os.fdopen(fd, mode) - _file.write(chunk) - finally: - locks.unlock(fd) - if _file is not None: - _file.close() - else: - os.close(fd) - except FileExistsError: - # A new name is needed if the file exists. - name = self.get_available_name(name) - full_path = self.path(name) - else: - # OK, the file save worked. Break out of the loop. - break - - if self.file_permissions_mode is not None: - os.chmod(full_path, self.file_permissions_mode) - - # Ensure the saved path is always relative to the storage root. - name = os.path.relpath(full_path, self.location) - # Ensure the moved file has the same gid as the storage root. - self._ensure_location_group_id(full_path) - # Store filenames with forward slashes, even on Windows. - return str(name).replace("\\", "/") - - def _ensure_location_group_id(self, full_path): - if os.name == "posix": - file_gid = os.stat(full_path).st_gid - location_gid = os.stat(self.location).st_gid - if file_gid != location_gid: - try: - os.chown(full_path, uid=-1, gid=location_gid) - except PermissionError: - pass - - def delete(self, name): - if not name: - raise ValueError("The name must be given to delete().") - name = self.path(name) - # If the file or directory exists, delete it from the filesystem. - try: - if os.path.isdir(name): - os.rmdir(name) - else: - os.remove(name) - except FileNotFoundError: - # FileNotFoundError is raised if the file or directory was removed - # concurrently. - pass - - def exists(self, name): - return os.path.lexists(self.path(name)) - - def listdir(self, path): - path = self.path(path) - directories, files = [], [] - with os.scandir(path) as entries: - for entry in entries: - if entry.is_dir(): - directories.append(entry.name) - else: - files.append(entry.name) - return directories, files - - def path(self, name): - return safe_join(self.location, name) - - def size(self, name): - return os.path.getsize(self.path(name)) - - def url(self, name): - if self.base_url is None: - raise ValueError("This file is not accessible via a URL.") - url = filepath_to_uri(name) - if url is not None: - url = url.lstrip("/") - return urljoin(self.base_url, url) - - def _datetime_from_timestamp(self, ts): - """ - If timezone support is enabled, make an aware datetime object in UTC; - otherwise make a naive one in the local timezone. - """ - tz = timezone.utc if settings.USE_TZ else None - return datetime.fromtimestamp(ts, tz=tz) - - def get_accessed_time(self, name): - return self._datetime_from_timestamp(os.path.getatime(self.path(name))) - - def get_created_time(self, name): - return self._datetime_from_timestamp(os.path.getctime(self.path(name))) - - def get_modified_time(self, name): - return self._datetime_from_timestamp(os.path.getmtime(self.path(name))) - - -def get_storage_class(import_path=None): - return import_string(import_path or settings.DEFAULT_FILE_STORAGE) - - -class DefaultStorage(LazyObject): - def _setup(self): - self._wrapped = get_storage_class()() - - -default_storage = DefaultStorage() diff --git a/django/core/files/storage/__init__.py b/django/core/files/storage/__init__.py new file mode 100644 index 0000000000..240bbc1795 --- /dev/null +++ b/django/core/files/storage/__init__.py @@ -0,0 +1,26 @@ +from django.conf import settings +from django.utils.functional import LazyObject +from django.utils.module_loading import import_string + +from .base import Storage +from .filesystem import FileSystemStorage + +__all__ = ( + "FileSystemStorage", + "Storage", + "DefaultStorage", + "default_storage", + "get_storage_class", +) + + +def get_storage_class(import_path=None): + return import_string(import_path or settings.DEFAULT_FILE_STORAGE) + + +class DefaultStorage(LazyObject): + def _setup(self): + self._wrapped = get_storage_class()() + + +default_storage = DefaultStorage() diff --git a/django/core/files/storage/base.py b/django/core/files/storage/base.py new file mode 100644 index 0000000000..16ac22f70a --- /dev/null +++ b/django/core/files/storage/base.py @@ -0,0 +1,190 @@ +import os +import pathlib + +from django.core.exceptions import SuspiciousFileOperation +from django.core.files import File +from django.core.files.utils import validate_file_name +from django.utils.crypto import get_random_string +from django.utils.text import get_valid_filename + + +class Storage: + """ + A base storage class, providing some default behaviors that all other + storage systems can inherit or override, as necessary. + """ + + # The following methods represent a public interface to private methods. + # These shouldn't be overridden by subclasses unless absolutely necessary. + + def open(self, name, mode="rb"): + """Retrieve the specified file from storage.""" + return self._open(name, mode) + + def save(self, name, content, max_length=None): + """ + Save new content to the file specified by name. The content should be + a proper File object or any Python file-like object, ready to be read + from the beginning. + """ + # Get the proper name for the file, as it will actually be saved. + if name is None: + name = content.name + + if not hasattr(content, "chunks"): + content = File(content, name) + + name = self.get_available_name(name, max_length=max_length) + name = self._save(name, content) + # Ensure that the name returned from the storage system is still valid. + validate_file_name(name, allow_relative_path=True) + return name + + # These methods are part of the public API, with default implementations. + + def get_valid_name(self, name): + """ + Return a filename, based on the provided filename, that's suitable for + use in the target storage system. + """ + return get_valid_filename(name) + + def get_alternative_name(self, file_root, file_ext): + """ + Return an alternative filename, by adding an underscore and a random 7 + character alphanumeric string (before the file extension, if one + exists) to the filename. + """ + return "%s_%s%s" % (file_root, get_random_string(7), file_ext) + + def get_available_name(self, name, max_length=None): + """ + Return a filename that's free on the target storage system and + available for new content to be written to. + """ + name = str(name).replace("\\", "/") + dir_name, file_name = os.path.split(name) + if ".." in pathlib.PurePath(dir_name).parts: + raise SuspiciousFileOperation( + "Detected path traversal attempt in '%s'" % dir_name + ) + validate_file_name(file_name) + file_root, file_ext = os.path.splitext(file_name) + # If the filename already exists, generate an alternative filename + # until it doesn't exist. + # Truncate original name if required, so the new filename does not + # exceed the max_length. + while self.exists(name) or (max_length and len(name) > max_length): + # file_ext includes the dot. + name = os.path.join( + dir_name, self.get_alternative_name(file_root, file_ext) + ) + if max_length is None: + continue + # Truncate file_root if max_length exceeded. + truncation = len(name) - max_length + if truncation > 0: + file_root = file_root[:-truncation] + # Entire file_root was truncated in attempt to find an + # available filename. + if not file_root: + raise SuspiciousFileOperation( + 'Storage can not find an available filename for "%s". ' + "Please make sure that the corresponding file field " + 'allows sufficient "max_length".' % name + ) + name = os.path.join( + dir_name, self.get_alternative_name(file_root, file_ext) + ) + return name + + def generate_filename(self, filename): + """ + Validate the filename by calling get_valid_name() and return a filename + to be passed to the save() method. + """ + filename = str(filename).replace("\\", "/") + # `filename` may include a path as returned by FileField.upload_to. + dirname, filename = os.path.split(filename) + if ".." in pathlib.PurePath(dirname).parts: + raise SuspiciousFileOperation( + "Detected path traversal attempt in '%s'" % dirname + ) + return os.path.normpath(os.path.join(dirname, self.get_valid_name(filename))) + + def path(self, name): + """ + Return a local filesystem path where the file can be retrieved using + Python's built-in open() function. Storage systems that can't be + accessed using open() should *not* implement this method. + """ + raise NotImplementedError("This backend doesn't support absolute paths.") + + # The following methods form the public API for storage systems, but with + # no default implementations. Subclasses must implement *all* of these. + + def delete(self, name): + """ + Delete the specified file from the storage system. + """ + raise NotImplementedError( + "subclasses of Storage must provide a delete() method" + ) + + def exists(self, name): + """ + Return True if a file referenced by the given name already exists in the + storage system, or False if the name is available for a new file. + """ + raise NotImplementedError( + "subclasses of Storage must provide an exists() method" + ) + + def listdir(self, path): + """ + List the contents of the specified path. Return a 2-tuple of lists: + the first item being directories, the second item being files. + """ + raise NotImplementedError( + "subclasses of Storage must provide a listdir() method" + ) + + def size(self, name): + """ + Return the total size, in bytes, of the file specified by name. + """ + raise NotImplementedError("subclasses of Storage must provide a size() method") + + def url(self, name): + """ + Return an absolute URL where the file's contents can be accessed + directly by a web browser. + """ + raise NotImplementedError("subclasses of Storage must provide a url() method") + + def get_accessed_time(self, name): + """ + Return the last accessed time (as a datetime) of the file specified by + name. The datetime will be timezone-aware if USE_TZ=True. + """ + raise NotImplementedError( + "subclasses of Storage must provide a get_accessed_time() method" + ) + + def get_created_time(self, name): + """ + Return the creation time (as a datetime) of the file specified by name. + The datetime will be timezone-aware if USE_TZ=True. + """ + raise NotImplementedError( + "subclasses of Storage must provide a get_created_time() method" + ) + + def get_modified_time(self, name): + """ + Return the last modified time (as a datetime) of the file specified by + name. The datetime will be timezone-aware if USE_TZ=True. + """ + raise NotImplementedError( + "subclasses of Storage must provide a get_modified_time() method" + ) diff --git a/django/core/files/storage/filesystem.py b/django/core/files/storage/filesystem.py new file mode 100644 index 0000000000..85fc4eff9f --- /dev/null +++ b/django/core/files/storage/filesystem.py @@ -0,0 +1,207 @@ +import os +from datetime import datetime, timezone +from urllib.parse import urljoin + +from django.conf import settings +from django.core.files import File, locks +from django.core.files.move import file_move_safe +from django.core.signals import setting_changed +from django.utils._os import safe_join +from django.utils.deconstruct import deconstructible +from django.utils.encoding import filepath_to_uri +from django.utils.functional import cached_property + +from .base import Storage +from .mixins import StorageSettingsMixin + + +@deconstructible(path="django.core.files.storage.FileSystemStorage") +class FileSystemStorage(Storage, StorageSettingsMixin): + """ + Standard filesystem storage + """ + + # The combination of O_CREAT and O_EXCL makes os.open() raise OSError if + # the file already exists before it's opened. + OS_OPEN_FLAGS = os.O_WRONLY | os.O_CREAT | os.O_EXCL | getattr(os, "O_BINARY", 0) + + def __init__( + self, + location=None, + base_url=None, + file_permissions_mode=None, + directory_permissions_mode=None, + ): + self._location = location + self._base_url = base_url + self._file_permissions_mode = file_permissions_mode + self._directory_permissions_mode = directory_permissions_mode + setting_changed.connect(self._clear_cached_properties) + + @cached_property + def base_location(self): + return self._value_or_setting(self._location, settings.MEDIA_ROOT) + + @cached_property + def location(self): + return os.path.abspath(self.base_location) + + @cached_property + def base_url(self): + if self._base_url is not None and not self._base_url.endswith("/"): + self._base_url += "/" + return self._value_or_setting(self._base_url, settings.MEDIA_URL) + + @cached_property + def file_permissions_mode(self): + return self._value_or_setting( + self._file_permissions_mode, settings.FILE_UPLOAD_PERMISSIONS + ) + + @cached_property + def directory_permissions_mode(self): + return self._value_or_setting( + self._directory_permissions_mode, settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS + ) + + def _open(self, name, mode="rb"): + return File(open(self.path(name), mode)) + + def _save(self, name, content): + full_path = self.path(name) + + # Create any intermediate directories that do not exist. + directory = os.path.dirname(full_path) + try: + if self.directory_permissions_mode is not None: + # Set the umask because os.makedirs() doesn't apply the "mode" + # argument to intermediate-level directories. + old_umask = os.umask(0o777 & ~self.directory_permissions_mode) + try: + os.makedirs( + directory, self.directory_permissions_mode, exist_ok=True + ) + finally: + os.umask(old_umask) + else: + os.makedirs(directory, exist_ok=True) + except FileExistsError: + raise FileExistsError("%s exists and is not a directory." % directory) + + # There's a potential race condition between get_available_name and + # saving the file; it's possible that two threads might return the + # same name, at which point all sorts of fun happens. So we need to + # try to create the file, but if it already exists we have to go back + # to get_available_name() and try again. + + while True: + try: + # This file has a file path that we can move. + if hasattr(content, "temporary_file_path"): + file_move_safe(content.temporary_file_path(), full_path) + + # This is a normal uploadedfile that we can stream. + else: + # The current umask value is masked out by os.open! + fd = os.open(full_path, self.OS_OPEN_FLAGS, 0o666) + _file = None + try: + locks.lock(fd, locks.LOCK_EX) + for chunk in content.chunks(): + if _file is None: + mode = "wb" if isinstance(chunk, bytes) else "wt" + _file = os.fdopen(fd, mode) + _file.write(chunk) + finally: + locks.unlock(fd) + if _file is not None: + _file.close() + else: + os.close(fd) + except FileExistsError: + # A new name is needed if the file exists. + name = self.get_available_name(name) + full_path = self.path(name) + else: + # OK, the file save worked. Break out of the loop. + break + + if self.file_permissions_mode is not None: + os.chmod(full_path, self.file_permissions_mode) + + # Ensure the saved path is always relative to the storage root. + name = os.path.relpath(full_path, self.location) + # Ensure the moved file has the same gid as the storage root. + self._ensure_location_group_id(full_path) + # Store filenames with forward slashes, even on Windows. + return str(name).replace("\\", "/") + + def _ensure_location_group_id(self, full_path): + if os.name == "posix": + file_gid = os.stat(full_path).st_gid + location_gid = os.stat(self.location).st_gid + if file_gid != location_gid: + try: + os.chown(full_path, uid=-1, gid=location_gid) + except PermissionError: + pass + + def delete(self, name): + if not name: + raise ValueError("The name must be given to delete().") + name = self.path(name) + # If the file or directory exists, delete it from the filesystem. + try: + if os.path.isdir(name): + os.rmdir(name) + else: + os.remove(name) + except FileNotFoundError: + # FileNotFoundError is raised if the file or directory was removed + # concurrently. + pass + + def exists(self, name): + return os.path.lexists(self.path(name)) + + def listdir(self, path): + path = self.path(path) + directories, files = [], [] + with os.scandir(path) as entries: + for entry in entries: + if entry.is_dir(): + directories.append(entry.name) + else: + files.append(entry.name) + return directories, files + + def path(self, name): + return safe_join(self.location, name) + + def size(self, name): + return os.path.getsize(self.path(name)) + + def url(self, name): + if self.base_url is None: + raise ValueError("This file is not accessible via a URL.") + url = filepath_to_uri(name) + if url is not None: + url = url.lstrip("/") + return urljoin(self.base_url, url) + + def _datetime_from_timestamp(self, ts): + """ + If timezone support is enabled, make an aware datetime object in UTC; + otherwise make a naive one in the local timezone. + """ + tz = timezone.utc if settings.USE_TZ else None + return datetime.fromtimestamp(ts, tz=tz) + + def get_accessed_time(self, name): + return self._datetime_from_timestamp(os.path.getatime(self.path(name))) + + def get_created_time(self, name): + return self._datetime_from_timestamp(os.path.getctime(self.path(name))) + + def get_modified_time(self, name): + return self._datetime_from_timestamp(os.path.getmtime(self.path(name))) diff --git a/django/core/files/storage/mixins.py b/django/core/files/storage/mixins.py new file mode 100644 index 0000000000..663a163bea --- /dev/null +++ b/django/core/files/storage/mixins.py @@ -0,0 +1,15 @@ +class StorageSettingsMixin: + def _clear_cached_properties(self, setting, **kwargs): + """Reset setting based property values.""" + if setting == "MEDIA_ROOT": + self.__dict__.pop("base_location", None) + self.__dict__.pop("location", None) + elif setting == "MEDIA_URL": + self.__dict__.pop("base_url", None) + elif setting == "FILE_UPLOAD_PERMISSIONS": + self.__dict__.pop("file_permissions_mode", None) + elif setting == "FILE_UPLOAD_DIRECTORY_PERMISSIONS": + self.__dict__.pop("directory_permissions_mode", None) + + def _value_or_setting(self, value, setting): + return setting if value is None else value diff --git a/django/core/handlers/asgi.py b/django/core/handlers/asgi.py index b5372a1d49..f0125e7321 100644 --- a/django/core/handlers/asgi.py +++ b/django/core/handlers/asgi.py @@ -19,6 +19,7 @@ from django.http import ( parse_cookie, ) from django.urls import set_script_prefix +from django.utils.asyncio import aclosing from django.utils.functional import cached_property logger = logging.getLogger("django.request") @@ -263,19 +264,22 @@ class ASGIHandler(base.BaseHandler): ) # Streaming responses need to be pinned to their iterator. if response.streaming: - # Access `__iter__` and not `streaming_content` directly in case - # it has been overridden in a subclass. - for part in response: - for chunk, _ in self.chunk_bytes(part): - await send( - { - "type": "http.response.body", - "body": chunk, - # Ignore "more" as there may be more parts; instead, - # use an empty final closing message with False. - "more_body": True, - } - ) + # - Consume via `__aiter__` and not `streaming_content` directly, to + # allow mapping of a sync iterator. + # - Use aclosing() when consuming aiter. + # See https://github.com/python/cpython/commit/6e8dcda + async with aclosing(response.__aiter__()) as content: + async for part in content: + for chunk, _ in self.chunk_bytes(part): + await send( + { + "type": "http.response.body", + "body": chunk, + # Ignore "more" as there may be more parts; instead, + # use an empty final closing message with False. + "more_body": True, + } + ) # Final closing message. await send({"type": "http.response.body"}) # Other responses just need chunking. diff --git a/django/core/handlers/base.py b/django/core/handlers/base.py index a934659186..8911543d4e 100644 --- a/django/core/handlers/base.py +++ b/django/core/handlers/base.py @@ -2,7 +2,7 @@ import asyncio import logging import types -from asgiref.sync import async_to_sync, sync_to_async +from asgiref.sync import async_to_sync, iscoroutinefunction, sync_to_async from django.conf import settings from django.core.exceptions import ImproperlyConfigured, MiddlewareNotUsed @@ -119,7 +119,7 @@ class BaseHandler: - Asynchronous methods are left alone """ if method_is_async is None: - method_is_async = asyncio.iscoroutinefunction(method) + method_is_async = iscoroutinefunction(method) if debug and not name: name = name or "method %s()" % method.__qualname__ if is_async: @@ -191,7 +191,7 @@ class BaseHandler: if response is None: wrapped_callback = self.make_view_atomic(callback) # If it is an asynchronous view, run it in a subthread. - if asyncio.iscoroutinefunction(wrapped_callback): + if iscoroutinefunction(wrapped_callback): wrapped_callback = async_to_sync(wrapped_callback) try: response = wrapped_callback(request, *callback_args, **callback_kwargs) @@ -245,7 +245,7 @@ class BaseHandler: if response is None: wrapped_callback = self.make_view_atomic(callback) # If it is a synchronous view, run it in a subthread - if not asyncio.iscoroutinefunction(wrapped_callback): + if not iscoroutinefunction(wrapped_callback): wrapped_callback = sync_to_async( wrapped_callback, thread_sensitive=True ) @@ -278,7 +278,7 @@ class BaseHandler: % (middleware_method.__self__.__class__.__name__,), ) try: - if asyncio.iscoroutinefunction(response.render): + if iscoroutinefunction(response.render): response = await response.render() else: response = await sync_to_async( @@ -346,7 +346,7 @@ class BaseHandler: non_atomic_requests = getattr(view, "_non_atomic_requests", set()) for alias, settings_dict in connections.settings.items(): if settings_dict["ATOMIC_REQUESTS"] and alias not in non_atomic_requests: - if asyncio.iscoroutinefunction(view): + if iscoroutinefunction(view): raise RuntimeError( "You cannot use ATOMIC_REQUESTS with async views." ) diff --git a/django/core/handlers/exception.py b/django/core/handlers/exception.py index 79577c2d0a..a0b1ba678a 100644 --- a/django/core/handlers/exception.py +++ b/django/core/handlers/exception.py @@ -1,9 +1,8 @@ -import asyncio import logging import sys from functools import wraps -from asgiref.sync import sync_to_async +from asgiref.sync import iscoroutinefunction, sync_to_async from django.conf import settings from django.core import signals @@ -34,7 +33,7 @@ def convert_exception_to_response(get_response): no middleware leaks an exception and that the next middleware in the stack can rely on getting a response instead of an exception. """ - if asyncio.iscoroutinefunction(get_response): + if iscoroutinefunction(get_response): @wraps(get_response) async def inner(request): diff --git a/django/core/handlers/wsgi.py b/django/core/handlers/wsgi.py index 8876f47dea..bca0857622 100644 --- a/django/core/handlers/wsgi.py +++ b/django/core/handlers/wsgi.py @@ -76,7 +76,7 @@ class WSGIRequest(HttpRequest): self.path_info = path_info # be careful to only replace the first slash in the path because of # http://test/something and http://test//something being different as - # stated in https://www.ietf.org/rfc/rfc2396.txt + # stated in RFC 3986. self.path = "%s/%s" % (script_name.rstrip("/"), path_info.replace("/", "", 1)) self.META = environ self.META["PATH_INFO"] = path_info diff --git a/django/core/mail/backends/smtp.py b/django/core/mail/backends/smtp.py index 5df7c20ae0..7ce29d27fd 100644 --- a/django/core/mail/backends/smtp.py +++ b/django/core/mail/backends/smtp.py @@ -7,6 +7,7 @@ from django.conf import settings from django.core.mail.backends.base import BaseEmailBackend from django.core.mail.message import sanitize_address from django.core.mail.utils import DNS_NAME +from django.utils.functional import cached_property class EmailBackend(BaseEmailBackend): @@ -54,6 +55,13 @@ class EmailBackend(BaseEmailBackend): def connection_class(self): return smtplib.SMTP_SSL if self.use_ssl else smtplib.SMTP + @cached_property + def ssl_context(self): + ssl_context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS_CLIENT) + if self.ssl_certfile or self.ssl_keyfile: + ssl_context.load_cert_chain(self.ssl_certfile, self.ssl_keyfile) + return ssl_context + def open(self): """ Ensure an open connection to the email server. Return whether or not a @@ -70,12 +78,7 @@ class EmailBackend(BaseEmailBackend): if self.timeout is not None: connection_params["timeout"] = self.timeout if self.use_ssl: - connection_params.update( - { - "keyfile": self.ssl_keyfile, - "certfile": self.ssl_certfile, - } - ) + connection_params["context"] = self.ssl_context try: self.connection = self.connection_class( self.host, self.port, **connection_params @@ -84,9 +87,7 @@ class EmailBackend(BaseEmailBackend): # TLS/SSL are mutually exclusive, so only attempt TLS over # non-secure connections. if not self.use_ssl and self.use_tls: - self.connection.starttls( - keyfile=self.ssl_keyfile, certfile=self.ssl_certfile - ) + self.connection.starttls(context=self.ssl_context) if self.username and self.password: self.connection.login(self.username, self.password) return True diff --git a/django/core/mail/message.py b/django/core/mail/message.py index cd5b71ad51..f3fe6186c7 100644 --- a/django/core/mail/message.py +++ b/django/core/mail/message.py @@ -36,7 +36,7 @@ class BadHeaderError(ValueError): pass -# Header names that contain structured address data (RFC #5322) +# Header names that contain structured address data (RFC 5322). ADDRESS_HEADERS = { "from", "sender", @@ -382,8 +382,8 @@ class EmailMessage: encoding = self.encoding or settings.DEFAULT_CHARSET attachment = SafeMIMEText(content, subtype, encoding) elif basetype == "message" and subtype == "rfc822": - # Bug #18967: per RFC2046 s5.2.1, message/rfc822 attachments - # must not be base64 encoded. + # Bug #18967: Per RFC 2046 Section 5.2.1, message/rfc822 + # attachments must not be base64 encoded. if isinstance(content, EmailMessage): # convert content into an email.Message first content = content.message() diff --git a/django/core/management/commands/inspectdb.py b/django/core/management/commands/inspectdb.py index d46180cd99..992c523a8e 100644 --- a/django/core/management/commands/inspectdb.py +++ b/django/core/management/commands/inspectdb.py @@ -78,18 +78,16 @@ class Command(BaseCommand): ) yield "from %s import models" % self.db_module known_models = [] - table_info = connection.introspection.get_table_list(cursor) - # Determine types of tables and/or views to be introspected. types = {"t"} if options["include_partitions"]: types.add("p") if options["include_views"]: types.add("v") + table_info = connection.introspection.get_table_list(cursor) + table_info = {info.name: info for info in table_info if info.type in types} - for table_name in options["table"] or sorted( - info.name for info in table_info if info.type in types - ): + for table_name in options["table"] or sorted(name for name in table_info): if table_name_filter is not None and callable(table_name_filter): if not table_name_filter(table_name): continue @@ -232,6 +230,10 @@ class Command(BaseCommand): if field_type.startswith(("ForeignKey(", "OneToOneField(")): field_desc += ", models.DO_NOTHING" + # Add comment. + if connection.features.supports_comments and row.comment: + extra_params["db_comment"] = row.comment + if extra_params: if not field_desc.endswith("("): field_desc += ", " @@ -242,14 +244,22 @@ class Command(BaseCommand): if comment_notes: field_desc += " # " + " ".join(comment_notes) yield " %s" % field_desc - is_view = any( - info.name == table_name and info.type == "v" for info in table_info - ) - is_partition = any( - info.name == table_name and info.type == "p" for info in table_info - ) + comment = None + if info := table_info.get(table_name): + is_view = info.type == "v" + is_partition = info.type == "p" + if connection.features.supports_comments: + comment = info.comment + else: + is_view = False + is_partition = False yield from self.get_meta( - table_name, constraints, column_to_field_name, is_view, is_partition + table_name, + constraints, + column_to_field_name, + is_view, + is_partition, + comment, ) def normalize_col_name(self, col_name, used_column_names, is_relation): @@ -328,8 +338,9 @@ class Command(BaseCommand): field_notes.append("This field type is a guess.") # Add max_length for all CharFields. - if field_type == "CharField" and row.internal_size: - field_params["max_length"] = int(row.internal_size) + if field_type == "CharField" and row.display_size: + if (size := int(row.display_size)) and size > 0: + field_params["max_length"] = size if field_type in {"CharField", "TextField"} and row.collation: field_params["db_collation"] = row.collation @@ -353,7 +364,13 @@ class Command(BaseCommand): return field_type, field_params, field_notes def get_meta( - self, table_name, constraints, column_to_field_name, is_view, is_partition + self, + table_name, + constraints, + column_to_field_name, + is_view, + is_partition, + comment, ): """ Return a sequence comprising the lines of code necessary @@ -391,4 +408,6 @@ class Command(BaseCommand): if unique_together: tup = "(" + ", ".join(unique_together) + ",)" meta += [" unique_together = %s" % tup] + if comment: + meta += [f" db_table_comment = {comment!r}"] return meta diff --git a/django/core/management/commands/loaddata.py b/django/core/management/commands/loaddata.py index 6fae255407..618c6c29b6 100644 --- a/django/core/management/commands/loaddata.py +++ b/django/core/management/commands/loaddata.py @@ -207,7 +207,7 @@ class Command(BaseCommand): self.models.add(obj.object.__class__) try: obj.save(using=self.using) - # psycopg2 raises ValueError if data contains NUL chars. + # psycopg raises ValueError if data contains NUL chars. except (DatabaseError, IntegrityError, ValueError) as e: e.args = ( "Could not load %(object_label)s(pk=%(pk)s): %(error_msg)s" diff --git a/django/core/signing.py b/django/core/signing.py index b402d87b87..5521493490 100644 --- a/django/core/signing.py +++ b/django/core/signing.py @@ -37,10 +37,12 @@ import base64 import datetime import json import time +import warnings import zlib from django.conf import settings from django.utils.crypto import constant_time_compare, salted_hmac +from django.utils.deprecation import RemovedInDjango51Warning from django.utils.encoding import force_bytes from django.utils.module_loading import import_string from django.utils.regex_helper import _lazy_re_compile @@ -147,7 +149,7 @@ def dumps( The serializer is expected to return a bytestring. """ - return TimestampSigner(key, salt=salt).sign_object( + return TimestampSigner(key=key, salt=salt).sign_object( obj, serializer=serializer, compress=compress ) @@ -165,7 +167,9 @@ def loads( The serializer is expected to accept a bytestring. """ - return TimestampSigner(key, salt=salt, fallback_keys=fallback_keys).unsign_object( + return TimestampSigner( + key=key, salt=salt, fallback_keys=fallback_keys + ).unsign_object( s, serializer=serializer, max_age=max_age, @@ -173,8 +177,13 @@ def loads( class Signer: + # RemovedInDjango51Warning: When the deprecation ends, replace with: + # def __init__( + # self, *, key=None, sep=":", salt=None, algorithm=None, fallback_keys=None + # ): def __init__( self, + *args, key=None, sep=":", salt=None, @@ -188,16 +197,29 @@ class Signer: else settings.SECRET_KEY_FALLBACKS ) self.sep = sep - if _SEP_UNSAFE.match(self.sep): - raise ValueError( - "Unsafe Signer separator: %r (cannot be empty or consist of " - "only A-z0-9-_=)" % sep, - ) self.salt = salt or "%s.%s" % ( self.__class__.__module__, self.__class__.__name__, ) self.algorithm = algorithm or "sha256" + # RemovedInDjango51Warning. + if args: + warnings.warn( + f"Passing positional arguments to {self.__class__.__name__} is " + f"deprecated.", + RemovedInDjango51Warning, + stacklevel=2, + ) + for arg, attr in zip( + args, ["key", "sep", "salt", "algorithm", "fallback_keys"] + ): + if arg or attr == "sep": + setattr(self, attr, arg) + if _SEP_UNSAFE.match(self.sep): + raise ValueError( + "Unsafe Signer separator: %r (cannot be empty or consist of " + "only A-z0-9-_=)" % sep, + ) def signature(self, value, key=None): key = key or self.key diff --git a/django/db/backends/base/base.py b/django/db/backends/base/base.py index f04bd8882a..3b845ec9b3 100644 --- a/django/db/backends/base/base.py +++ b/django/db/backends/base/base.py @@ -8,6 +8,8 @@ import warnings from collections import deque from contextlib import contextmanager +from django.db.backends.utils import debug_transaction + try: import zoneinfo except ImportError: @@ -307,12 +309,12 @@ class BaseDatabaseWrapper: def _commit(self): if self.connection is not None: - with self.wrap_database_errors: + with debug_transaction(self, "COMMIT"), self.wrap_database_errors: return self.connection.commit() def _rollback(self): if self.connection is not None: - with self.wrap_database_errors: + with debug_transaction(self, "ROLLBACK"), self.wrap_database_errors: return self.connection.rollback() def _close(self): @@ -488,9 +490,11 @@ class BaseDatabaseWrapper: if start_transaction_under_autocommit: self._start_transaction_under_autocommit() - else: + elif autocommit: self._set_autocommit(autocommit) - + else: + with debug_transaction(self, "BEGIN"): + self._set_autocommit(autocommit) self.autocommit = autocommit if autocommit and self.run_commit_hooks_on_set_autocommit_on: diff --git a/django/db/backends/base/features.py b/django/db/backends/base/features.py index 6b93508383..ef1fe88336 100644 --- a/django/db/backends/base/features.py +++ b/django/db/backends/base/features.py @@ -8,7 +8,6 @@ class BaseDatabaseFeatures: gis_enabled = False # Oracle can't group by LOB (large object) data types. allows_group_by_lob = True - allows_group_by_pk = False allows_group_by_selected_pks = False allows_group_by_refs = True empty_fetchmany_value = [] @@ -165,6 +164,8 @@ class BaseDatabaseFeatures: # Can we roll back DDL in a transaction? can_rollback_ddl = False + schema_editor_uses_clientside_param_binding = False + # Does it support operations requiring references rename in a transaction? supports_atomic_references_rename = True @@ -333,9 +334,20 @@ class BaseDatabaseFeatures: # Does the backend support non-deterministic collations? supports_non_deterministic_collations = True + # Does the backend support column and table comments? + supports_comments = False + # Does the backend support column comments in ADD COLUMN statements? + supports_comments_inline = False + # Does the backend support the logical XOR operator? supports_logical_xor = False + # Set to (exception, message) if null characters in text are disallowed. + prohibits_null_characters_in_text_exception = None + + # Does the backend support unlimited character columns? + supports_unlimited_charfield = False + # Collation names for use by the Django test suite. test_collations = { "ci": None, # Case-insensitive. diff --git a/django/db/backends/base/operations.py b/django/db/backends/base/operations.py index dd29068495..4ee73c0734 100644 --- a/django/db/backends/base/operations.py +++ b/django/db/backends/base/operations.py @@ -1,5 +1,6 @@ import datetime import decimal +import json from importlib import import_module import sqlparse @@ -524,6 +525,9 @@ class BaseDatabaseOperations: else: return value + def adapt_integerfield_value(self, value, internal_type): + return value + def adapt_datefield_value(self, value): """ Transform a date value to an object compatible with what is expected @@ -575,6 +579,9 @@ class BaseDatabaseOperations: """ return value or None + def adapt_json_value(self, value, encoder): + return json.dumps(value, cls=encoder) + def year_lookup_bounds_for_date_field(self, value, iso_year=False): """ Return a two-elements list with the lower and upper bound to be used diff --git a/django/db/backends/base/schema.py b/django/db/backends/base/schema.py index fe31967ce2..3a83e14be9 100644 --- a/django/db/backends/base/schema.py +++ b/django/db/backends/base/schema.py @@ -87,13 +87,12 @@ class BaseDatabaseSchemaEditor: sql_create_column = "ALTER TABLE %(table)s ADD COLUMN %(column)s %(definition)s" sql_alter_column = "ALTER TABLE %(table)s %(changes)s" - sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s" + sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s%(collation)s" sql_alter_column_null = "ALTER COLUMN %(column)s DROP NOT NULL" sql_alter_column_not_null = "ALTER COLUMN %(column)s SET NOT NULL" sql_alter_column_default = "ALTER COLUMN %(column)s SET DEFAULT %(default)s" sql_alter_column_no_default = "ALTER COLUMN %(column)s DROP DEFAULT" sql_alter_column_no_default_null = sql_alter_column_no_default - sql_alter_column_collate = "ALTER COLUMN %(column)s TYPE %(type)s%(collation)s" sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE" sql_rename_column = ( "ALTER TABLE %(table)s RENAME COLUMN %(old_column)s TO %(new_column)s" @@ -142,6 +141,9 @@ class BaseDatabaseSchemaEditor: sql_delete_procedure = "DROP PROCEDURE %(procedure)s" + sql_alter_table_comment = "COMMENT ON TABLE %(table)s IS %(comment)s" + sql_alter_column_comment = "COMMENT ON COLUMN %(table)s.%(column)s IS %(comment)s" + def __init__(self, connection, collect_sql=False, atomic=True): self.connection = connection self.collect_sql = collect_sql @@ -290,6 +292,8 @@ class BaseDatabaseSchemaEditor: yield column_db_type if collation := field_db_params.get("collation"): yield self._collate_sql(collation) + if self.connection.features.supports_comments_inline and field.db_comment: + yield self._comment_sql(field.db_comment) # Work out nullability. null = field.null # Include a default value, if requested. @@ -446,6 +450,23 @@ class BaseDatabaseSchemaEditor: # definition. self.execute(sql, params or None) + if self.connection.features.supports_comments: + # Add table comment. + if model._meta.db_table_comment: + self.alter_db_table_comment(model, None, model._meta.db_table_comment) + # Add column comments. + if not self.connection.features.supports_comments_inline: + for field in model._meta.local_fields: + if field.db_comment: + field_db_params = field.db_parameters( + connection=self.connection + ) + field_type = field_db_params["type"] + self.execute( + *self._alter_column_comment_sql( + model, field, field_type, field.db_comment + ) + ) # Add any field index and index_together's (deferred as SQLite # _remake_table needs it). self.deferred_sql.extend(self._model_indexes_sql(model)) @@ -615,6 +636,15 @@ class BaseDatabaseSchemaEditor: if isinstance(sql, Statement): sql.rename_table_references(old_db_table, new_db_table) + def alter_db_table_comment(self, model, old_db_table_comment, new_db_table_comment): + self.execute( + self.sql_alter_table_comment + % { + "table": self.quote_name(model._meta.db_table), + "comment": self.quote_value(new_db_table_comment or ""), + } + ) + def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace): """Move a model's table between tablespaces.""" self.execute( @@ -694,6 +724,18 @@ class BaseDatabaseSchemaEditor: "changes": changes_sql, } self.execute(sql, params) + # Add field comment, if required. + if ( + field.db_comment + and self.connection.features.supports_comments + and not self.connection.features.supports_comments_inline + ): + field_type = db_params["type"] + self.execute( + *self._alter_column_comment_sql( + model, field, field_type, field.db_comment + ) + ) # Add an index, if required self.deferred_sql.extend(self._field_indexes_sql(model, field)) # Reset connection if required @@ -814,6 +856,11 @@ class BaseDatabaseSchemaEditor: self.connection.features.supports_foreign_keys and old_field.remote_field and old_field.db_constraint + and self._field_should_be_altered( + old_field, + new_field, + ignore={"db_comment"}, + ) ): fk_names = self._constraint_names( model, [old_field.column], foreign_key=True @@ -950,17 +997,18 @@ class BaseDatabaseSchemaEditor: # Type suffix change? (e.g. auto increment). old_type_suffix = old_field.db_type_suffix(connection=self.connection) new_type_suffix = new_field.db_type_suffix(connection=self.connection) - # Collation change? - if old_collation != new_collation: - # Collation change handles also a type change. - fragment = self._alter_column_collation_sql( - model, new_field, new_type, new_collation, old_field + # Type, collation, or comment change? + if ( + old_type != new_type + or old_type_suffix != new_type_suffix + or old_collation != new_collation + or ( + self.connection.features.supports_comments + and old_field.db_comment != new_field.db_comment ) - actions.append(fragment) - # Type change? - elif (old_type, old_type_suffix) != (new_type, new_type_suffix): + ): fragment, other_actions = self._alter_column_type_sql( - model, old_field, new_field, new_type + model, old_field, new_field, new_type, old_collation, new_collation ) actions.append(fragment) post_actions.extend(other_actions) @@ -1076,20 +1124,14 @@ class BaseDatabaseSchemaEditor: rel_collation = rel_db_params.get("collation") old_rel_db_params = old_rel.field.db_parameters(connection=self.connection) old_rel_collation = old_rel_db_params.get("collation") - if old_rel_collation != rel_collation: - # Collation change handles also a type change. - fragment = self._alter_column_collation_sql( - new_rel.related_model, - new_rel.field, - rel_type, - rel_collation, - old_rel.field, - ) - other_actions = [] - else: - fragment, other_actions = self._alter_column_type_sql( - new_rel.related_model, old_rel.field, new_rel.field, rel_type - ) + fragment, other_actions = self._alter_column_type_sql( + new_rel.related_model, + old_rel.field, + new_rel.field, + rel_type, + old_rel_collation, + rel_collation, + ) self.execute( self.sql_alter_column % { @@ -1209,7 +1251,9 @@ class BaseDatabaseSchemaEditor: params, ) - def _alter_column_type_sql(self, model, old_field, new_field, new_type): + def _alter_column_type_sql( + self, model, old_field, new_field, new_type, old_collation, new_collation + ): """ Hook to specialize column type alteration for different backends, for cases when a creation type is different to an alteration type @@ -1219,33 +1263,54 @@ class BaseDatabaseSchemaEditor: an ALTER TABLE statement and a list of extra (sql, params) tuples to run once the field is altered. """ + other_actions = [] + if collate_sql := self._collate_sql( + new_collation, old_collation, model._meta.db_table + ): + collate_sql = f" {collate_sql}" + else: + collate_sql = "" + # Comment change? + comment_sql = "" + if self.connection.features.supports_comments and not new_field.many_to_many: + if old_field.db_comment != new_field.db_comment: + # PostgreSQL and Oracle can't execute 'ALTER COLUMN ...' and + # 'COMMENT ON ...' at the same time. + sql, params = self._alter_column_comment_sql( + model, new_field, new_type, new_field.db_comment + ) + if sql: + other_actions.append((sql, params)) + if new_field.db_comment: + comment_sql = self._comment_sql(new_field.db_comment) return ( ( self.sql_alter_column_type % { "column": self.quote_name(new_field.column), "type": new_type, + "collation": collate_sql, + "comment": comment_sql, }, [], ), - [], + other_actions, ) - def _alter_column_collation_sql( - self, model, new_field, new_type, new_collation, old_field - ): + def _alter_column_comment_sql(self, model, new_field, new_type, new_db_comment): return ( - self.sql_alter_column_collate + self.sql_alter_column_comment % { + "table": self.quote_name(model._meta.db_table), "column": self.quote_name(new_field.column), - "type": new_type, - "collation": " " + self._collate_sql(new_collation) - if new_collation - else "", + "comment": self._comment_sql(new_db_comment), }, [], ) + def _comment_sql(self, comment): + return self.quote_value(comment or "") + def _alter_many_to_many(self, model, old_field, new_field, strict): """Alter M2Ms to repoint their to= endpoints.""" # Rename the through table @@ -1439,16 +1504,18 @@ class BaseDatabaseSchemaEditor: output.append(self._create_index_sql(model, fields=[field])) return output - def _field_should_be_altered(self, old_field, new_field): + def _field_should_be_altered(self, old_field, new_field, ignore=None): + ignore = ignore or set() _, old_path, old_args, old_kwargs = old_field.deconstruct() _, new_path, new_args, new_kwargs = new_field.deconstruct() # Don't alter when: # - changing only a field name # - changing an attribute that doesn't affect the schema + # - changing an attribute in the provided set of ignored attributes # - adding only a db_column and the column name is not changed - for attr in old_field.non_db_attrs: + for attr in ignore.union(old_field.non_db_attrs): old_kwargs.pop(attr, None) - for attr in new_field.non_db_attrs: + for attr in ignore.union(new_field.non_db_attrs): new_kwargs.pop(attr, None) return self.quote_name(old_field.column) != self.quote_name( new_field.column @@ -1667,6 +1734,8 @@ class BaseDatabaseSchemaEditor: ) def _delete_check_sql(self, model, name): + if not self.connection.features.supports_table_check_constraints: + return None return self._delete_constraint_sql(self.sql_delete_check, model, name) def _delete_constraint_sql(self, template, model, name): @@ -1745,8 +1814,8 @@ class BaseDatabaseSchemaEditor: def _delete_primary_key_sql(self, model, name): return self._delete_constraint_sql(self.sql_delete_pk, model, name) - def _collate_sql(self, collation): - return "COLLATE " + self.quote_name(collation) + def _collate_sql(self, collation, old_collation=None, table_name=None): + return "COLLATE " + self.quote_name(collation) if collation else "" def remove_procedure(self, procedure_name, param_types=()): sql = self.sql_delete_procedure % { diff --git a/django/db/backends/mysql/compiler.py b/django/db/backends/mysql/compiler.py index bd2715fb43..2ec6bea2f1 100644 --- a/django/db/backends/mysql/compiler.py +++ b/django/db/backends/mysql/compiler.py @@ -1,4 +1,4 @@ -from django.core.exceptions import FieldError +from django.core.exceptions import FieldError, FullResultSet from django.db.models.expressions import Col from django.db.models.sql import compiler @@ -40,12 +40,16 @@ class SQLDeleteCompiler(compiler.SQLDeleteCompiler, SQLCompiler): "DELETE %s FROM" % self.quote_name_unless_alias(self.query.get_initial_alias()) ] - from_sql, from_params = self.get_from_clause() + from_sql, params = self.get_from_clause() result.extend(from_sql) - where_sql, where_params = self.compile(where) - if where_sql: + try: + where_sql, where_params = self.compile(where) + except FullResultSet: + pass + else: result.append("WHERE %s" % where_sql) - return " ".join(result), tuple(from_params) + tuple(where_params) + params.extend(where_params) + return " ".join(result), tuple(params) class SQLUpdateCompiler(compiler.SQLUpdateCompiler, SQLCompiler): diff --git a/django/db/backends/mysql/features.py b/django/db/backends/mysql/features.py index f2e4e1f1f4..2f5cc91881 100644 --- a/django/db/backends/mysql/features.py +++ b/django/db/backends/mysql/features.py @@ -6,7 +6,7 @@ from django.utils.functional import cached_property class DatabaseFeatures(BaseDatabaseFeatures): empty_fetchmany_value = () - allows_group_by_pk = True + allows_group_by_selected_pks = True related_fields_match_type = True # MySQL doesn't support sliced subqueries with IN/ALL/ANY/SOME. allow_sliced_subqueries_with_in = False @@ -16,9 +16,10 @@ class DatabaseFeatures(BaseDatabaseFeatures): supports_date_lookup_using_string = False supports_timezones = False requires_explicit_null_ordering_when_grouping = True - can_release_savepoints = True atomic_transactions = False can_clone_databases = True + supports_comments = True + supports_comments_inline = True supports_temporal_subtraction = True supports_slicing_ordering_in_compound = True supports_index_on_text_field = False @@ -109,18 +110,6 @@ class DatabaseFeatures(BaseDatabaseFeatures): "update.tests.AdvancedTests.test_update_ordered_by_m2m_annotation", }, } - if "ONLY_FULL_GROUP_BY" in self.connection.sql_mode: - skips.update( - { - "GROUP BY optimization does not work properly when " - "ONLY_FULL_GROUP_BY mode is enabled on MySQL, see #31331.": { - "aggregation.tests.AggregateTestCase." - "test_aggregation_subquery_annotation_multivalued", - "annotations.tests.NonAggregateAnnotationTestCase." - "test_annotation_aggregate_with_m2o", - }, - } - ) if self.connection.mysql_is_mariadb and ( 10, 4, diff --git a/django/db/backends/mysql/introspection.py b/django/db/backends/mysql/introspection.py index 9f57cd599a..a5ebf37112 100644 --- a/django/db/backends/mysql/introspection.py +++ b/django/db/backends/mysql/introspection.py @@ -5,18 +5,20 @@ from MySQLdb.constants import FIELD_TYPE from django.db.backends.base.introspection import BaseDatabaseIntrospection from django.db.backends.base.introspection import FieldInfo as BaseFieldInfo -from django.db.backends.base.introspection import TableInfo +from django.db.backends.base.introspection import TableInfo as BaseTableInfo from django.db.models import Index from django.utils.datastructures import OrderedSet FieldInfo = namedtuple( - "FieldInfo", BaseFieldInfo._fields + ("extra", "is_unsigned", "has_json_constraint") + "FieldInfo", + BaseFieldInfo._fields + ("extra", "is_unsigned", "has_json_constraint", "comment"), ) InfoLine = namedtuple( "InfoLine", "col_name data_type max_len num_prec num_scale extra column_default " - "collation is_unsigned", + "collation is_unsigned comment", ) +TableInfo = namedtuple("TableInfo", BaseTableInfo._fields + ("comment",)) class DatabaseIntrospection(BaseDatabaseIntrospection): @@ -68,9 +70,18 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): def get_table_list(self, cursor): """Return a list of table and view names in the current database.""" - cursor.execute("SHOW FULL TABLES") + cursor.execute( + """ + SELECT + table_name, + table_type, + table_comment + FROM information_schema.tables + WHERE table_schema = DATABASE() + """ + ) return [ - TableInfo(row[0], {"BASE TABLE": "t", "VIEW": "v"}.get(row[1])) + TableInfo(row[0], {"BASE TABLE": "t", "VIEW": "v"}.get(row[1]), row[2]) for row in cursor.fetchall() ] @@ -128,7 +139,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): CASE WHEN column_type LIKE '%% unsigned' THEN 1 ELSE 0 - END AS is_unsigned + END AS is_unsigned, + column_comment FROM information_schema.columns WHERE table_name = %s AND table_schema = DATABASE() """, @@ -148,7 +160,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): info = field_info[line[0]] fields.append( FieldInfo( - *line[:3], + *line[:2], + to_int(info.max_len) or line[2], to_int(info.max_len) or line[3], to_int(info.num_prec) or line[4], to_int(info.num_scale) or line[5], @@ -158,6 +171,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): info.extra, info.is_unsigned, line[0] in json_constraints, + info.comment, ) ) return fields diff --git a/django/db/backends/mysql/schema.py b/django/db/backends/mysql/schema.py index 821f4ddbce..64771fe6c3 100644 --- a/django/db/backends/mysql/schema.py +++ b/django/db/backends/mysql/schema.py @@ -9,8 +9,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): sql_alter_column_null = "MODIFY %(column)s %(type)s NULL" sql_alter_column_not_null = "MODIFY %(column)s %(type)s NOT NULL" - sql_alter_column_type = "MODIFY %(column)s %(type)s" - sql_alter_column_collate = "MODIFY %(column)s %(type)s%(collation)s" + sql_alter_column_type = "MODIFY %(column)s %(type)s%(collation)s%(comment)s" sql_alter_column_no_default_null = "ALTER COLUMN %(column)s SET DEFAULT NULL" # No 'CASCADE' which works as a no-op in MySQL but is undocumented @@ -33,6 +32,9 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s" + sql_alter_table_comment = "ALTER TABLE %(table)s COMMENT = %(comment)s" + sql_alter_column_comment = None + @property def sql_delete_check(self): if self.connection.mysql_is_mariadb: @@ -218,10 +220,22 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): new_type += " NOT NULL" return new_type - def _alter_column_type_sql(self, model, old_field, new_field, new_type): + def _alter_column_type_sql( + self, model, old_field, new_field, new_type, old_collation, new_collation + ): new_type = self._set_field_new_type_null_status(old_field, new_type) - return super()._alter_column_type_sql(model, old_field, new_field, new_type) + return super()._alter_column_type_sql( + model, old_field, new_field, new_type, old_collation, new_collation + ) def _rename_field_sql(self, table, old_field, new_field, new_type): new_type = self._set_field_new_type_null_status(old_field, new_type) return super()._rename_field_sql(table, old_field, new_field, new_type) + + def _alter_column_comment_sql(self, model, new_field, new_type, new_db_comment): + # Comment is alter when altering the column type. + return "", [] + + def _comment_sql(self, comment): + comment_sql = super()._comment_sql(comment) + return f" COMMENT {comment_sql}" diff --git a/django/db/backends/oracle/base.py b/django/db/backends/oracle/base.py index 29e43d1f62..845ab8ccf5 100644 --- a/django/db/backends/oracle/base.py +++ b/django/db/backends/oracle/base.py @@ -13,6 +13,7 @@ from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.db import IntegrityError from django.db.backends.base.base import BaseDatabaseWrapper +from django.db.backends.utils import debug_transaction from django.utils.asyncio import async_unsafe from django.utils.encoding import force_bytes, force_str from django.utils.functional import cached_property @@ -306,7 +307,7 @@ class DatabaseWrapper(BaseDatabaseWrapper): def _commit(self): if self.connection is not None: - with wrap_oracle_errors(): + with debug_transaction(self, "COMMIT"), wrap_oracle_errors(): return self.connection.commit() # Oracle doesn't support releasing savepoints. But we fake them when query diff --git a/django/db/backends/oracle/features.py b/django/db/backends/oracle/features.py index a85cf45560..df36245af9 100644 --- a/django/db/backends/oracle/features.py +++ b/django/db/backends/oracle/features.py @@ -25,6 +25,7 @@ class DatabaseFeatures(BaseDatabaseFeatures): supports_partially_nullable_unique_constraints = False supports_deferrable_unique_constraints = True truncates_names = True + supports_comments = True supports_tablespaces = True supports_sequence_reset = False can_introspect_materialized_views = True diff --git a/django/db/backends/oracle/introspection.py b/django/db/backends/oracle/introspection.py index 363b83efca..5d1e3e6761 100644 --- a/django/db/backends/oracle/introspection.py +++ b/django/db/backends/oracle/introspection.py @@ -5,10 +5,13 @@ import cx_Oracle from django.db import models from django.db.backends.base.introspection import BaseDatabaseIntrospection from django.db.backends.base.introspection import FieldInfo as BaseFieldInfo -from django.db.backends.base.introspection import TableInfo +from django.db.backends.base.introspection import TableInfo as BaseTableInfo from django.utils.functional import cached_property -FieldInfo = namedtuple("FieldInfo", BaseFieldInfo._fields + ("is_autofield", "is_json")) +FieldInfo = namedtuple( + "FieldInfo", BaseFieldInfo._fields + ("is_autofield", "is_json", "comment") +) +TableInfo = namedtuple("TableInfo", BaseTableInfo._fields + ("comment",)) class DatabaseIntrospection(BaseDatabaseIntrospection): @@ -77,8 +80,14 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): """Return a list of table and view names in the current database.""" cursor.execute( """ - SELECT table_name, 't' + SELECT + user_tables.table_name, + 't', + user_tab_comments.comments FROM user_tables + LEFT OUTER JOIN + user_tab_comments + ON user_tab_comments.table_name = user_tables.table_name WHERE NOT EXISTS ( SELECT 1 @@ -86,13 +95,13 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): WHERE user_mviews.mview_name = user_tables.table_name ) UNION ALL - SELECT view_name, 'v' FROM user_views + SELECT view_name, 'v', NULL FROM user_views UNION ALL - SELECT mview_name, 'v' FROM user_mviews + SELECT mview_name, 'v', NULL FROM user_mviews """ ) return [ - TableInfo(self.identifier_converter(row[0]), row[1]) + TableInfo(self.identifier_converter(row[0]), row[1], row[2]) for row in cursor.fetchall() ] @@ -116,7 +125,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): WHEN user_tab_cols.char_used IS NULL THEN user_tab_cols.data_length ELSE user_tab_cols.char_length - END as internal_size, + END as display_size, CASE WHEN user_tab_cols.identity_column = 'YES' THEN 1 ELSE 0 @@ -131,29 +140,36 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): ) THEN 1 ELSE 0 - END as is_json + END as is_json, + user_col_comments.comments as col_comment FROM user_tab_cols LEFT OUTER JOIN user_tables ON user_tables.table_name = user_tab_cols.table_name + LEFT OUTER JOIN + user_col_comments ON + user_col_comments.column_name = user_tab_cols.column_name AND + user_col_comments.table_name = user_tab_cols.table_name WHERE user_tab_cols.table_name = UPPER(%s) """, [table_name], ) field_map = { column: ( - internal_size, + display_size, default if default != "NULL" else None, collation, is_autofield, is_json, + comment, ) for ( column, default, collation, - internal_size, + display_size, is_autofield, is_json, + comment, ) in cursor.fetchall() } self.cache_bust_counter += 1 @@ -165,13 +181,21 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): description = [] for desc in cursor.description: name = desc[0] - internal_size, default, collation, is_autofield, is_json = field_map[name] + ( + display_size, + default, + collation, + is_autofield, + is_json, + comment, + ) = field_map[name] name %= {} # cx_Oracle, for some reason, doubles percent signs. description.append( FieldInfo( self.identifier_converter(name), - *desc[1:3], - internal_size, + desc[1], + display_size, + desc[3], desc[4] or 0, desc[5] or 0, *desc[6:], @@ -179,6 +203,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): collation, is_autofield, is_json, + comment, ) ) return description diff --git a/django/db/backends/oracle/operations.py b/django/db/backends/oracle/operations.py index 78f998183e..d34ca23bae 100644 --- a/django/db/backends/oracle/operations.py +++ b/django/db/backends/oracle/operations.py @@ -323,16 +323,16 @@ END; # Unlike Psycopg's `query` and MySQLdb`'s `_executed`, cx_Oracle's # `statement` doesn't contain the query parameters. Substitute # parameters manually. - if isinstance(params, (tuple, list)): - for i, param in enumerate(reversed(params), start=1): - param_num = len(params) - i - statement = statement.replace( - ":arg%d" % param_num, force_str(param, errors="replace") - ) - elif isinstance(params, dict): + if params: + if isinstance(params, (tuple, list)): + params = { + f":arg{i}": param for i, param in enumerate(dict.fromkeys(params)) + } + elif isinstance(params, dict): + params = {f":{key}": val for (key, val) in params.items()} for key in sorted(params, key=len, reverse=True): statement = statement.replace( - ":%s" % key, force_str(params[key], errors="replace") + key, force_str(params[key], errors="replace") ) return statement diff --git a/django/db/backends/oracle/schema.py b/django/db/backends/oracle/schema.py index 1bf12293e8..ec5c9f4142 100644 --- a/django/db/backends/oracle/schema.py +++ b/django/db/backends/oracle/schema.py @@ -13,13 +13,12 @@ from django.utils.duration import duration_iso_string class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): sql_create_column = "ALTER TABLE %(table)s ADD %(column)s %(definition)s" - sql_alter_column_type = "MODIFY %(column)s %(type)s" + sql_alter_column_type = "MODIFY %(column)s %(type)s%(collation)s" sql_alter_column_null = "MODIFY %(column)s NULL" sql_alter_column_not_null = "MODIFY %(column)s NOT NULL" sql_alter_column_default = "MODIFY %(column)s DEFAULT %(default)s" sql_alter_column_no_default = "MODIFY %(column)s DEFAULT NULL" sql_alter_column_no_default_null = sql_alter_column_no_default - sql_alter_column_collate = "MODIFY %(column)s %(type)s%(collation)s" sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s" sql_create_column_inline_fk = ( @@ -169,7 +168,9 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): self._create_fk_sql(rel.related_model, rel.field, "_fk") ) - def _alter_column_type_sql(self, model, old_field, new_field, new_type): + def _alter_column_type_sql( + self, model, old_field, new_field, new_type, old_collation, new_collation + ): auto_field_types = {"AutoField", "BigAutoField", "SmallAutoField"} # Drop the identity if migrating away from AutoField. if ( @@ -178,7 +179,9 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): and self._is_identity_column(model._meta.db_table, new_field.column) ): self._drop_identity(model._meta.db_table, new_field.column) - return super()._alter_column_type_sql(model, old_field, new_field, new_type) + return super()._alter_column_type_sql( + model, old_field, new_field, new_type, old_collation, new_collation + ) def normalize_name(self, name): """ @@ -242,11 +245,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): ) return cursor.fetchone()[0] - def _alter_column_collation_sql( - self, model, new_field, new_type, new_collation, old_field - ): - if new_collation is None: - new_collation = self._get_default_collation(model._meta.db_table) - return super()._alter_column_collation_sql( - model, new_field, new_type, new_collation, old_field - ) + def _collate_sql(self, collation, old_collation=None, table_name=None): + if collation is None and old_collation is not None: + collation = self._get_default_collation(table_name) + return super()._collate_sql(collation, old_collation, table_name) diff --git a/django/db/backends/postgresql/base.py b/django/db/backends/postgresql/base.py index 2758c402ab..99403f5322 100644 --- a/django/db/backends/postgresql/base.py +++ b/django/db/backends/postgresql/base.py @@ -1,7 +1,7 @@ """ PostgreSQL database backend for Django. -Requires psycopg 2: https://www.psycopg.org/ +Requires psycopg2 >= 2.8.4 or psycopg >= 3.1 """ import asyncio @@ -21,47 +21,69 @@ from django.utils.safestring import SafeString from django.utils.version import get_version_tuple try: - import psycopg2 as Database - import psycopg2.extensions - import psycopg2.extras -except ImportError as e: - raise ImproperlyConfigured("Error loading psycopg2 module: %s" % e) + try: + import psycopg as Database + except ImportError: + import psycopg2 as Database +except ImportError: + raise ImproperlyConfigured("Error loading psycopg2 or psycopg module") -def psycopg2_version(): - version = psycopg2.__version__.split(" ", 1)[0] +def psycopg_version(): + version = Database.__version__.split(" ", 1)[0] return get_version_tuple(version) -PSYCOPG2_VERSION = psycopg2_version() - -if PSYCOPG2_VERSION < (2, 8, 4): +if psycopg_version() < (2, 8, 4): raise ImproperlyConfigured( - "psycopg2 version 2.8.4 or newer is required; you have %s" - % psycopg2.__version__ + f"psycopg2 version 2.8.4 or newer is required; you have {Database.__version__}" + ) +if (3,) <= psycopg_version() < (3, 1): + raise ImproperlyConfigured( + f"psycopg version 3.1 or newer is required; you have {Database.__version__}" ) -# Some of these import psycopg2, so import them after checking if it's installed. -from .client import DatabaseClient # NOQA -from .creation import DatabaseCreation # NOQA -from .features import DatabaseFeatures # NOQA -from .introspection import DatabaseIntrospection # NOQA -from .operations import DatabaseOperations # NOQA -from .schema import DatabaseSchemaEditor # NOQA +from .psycopg_any import IsolationLevel, is_psycopg3 # NOQA isort:skip -psycopg2.extensions.register_adapter(SafeString, psycopg2.extensions.QuotedString) -psycopg2.extras.register_uuid() +if is_psycopg3: + from psycopg import adapters, sql + from psycopg.pq import Format -# Register support for inet[] manually so we don't have to handle the Inet() -# object on load all the time. -INETARRAY_OID = 1041 -INETARRAY = psycopg2.extensions.new_array_type( - (INETARRAY_OID,), - "INETARRAY", - psycopg2.extensions.UNICODE, -) -psycopg2.extensions.register_type(INETARRAY) + from .psycopg_any import get_adapters_template, register_tzloader + + TIMESTAMPTZ_OID = adapters.types["timestamptz"].oid + +else: + import psycopg2.extensions + import psycopg2.extras + + psycopg2.extensions.register_adapter(SafeString, psycopg2.extensions.QuotedString) + psycopg2.extras.register_uuid() + + # Register support for inet[] manually so we don't have to handle the Inet() + # object on load all the time. + INETARRAY_OID = 1041 + INETARRAY = psycopg2.extensions.new_array_type( + (INETARRAY_OID,), + "INETARRAY", + psycopg2.extensions.UNICODE, + ) + psycopg2.extensions.register_type(INETARRAY) + +# Some of these import psycopg, so import them after checking if it's installed. +from .client import DatabaseClient # NOQA isort:skip +from .creation import DatabaseCreation # NOQA isort:skip +from .features import DatabaseFeatures # NOQA isort:skip +from .introspection import DatabaseIntrospection # NOQA isort:skip +from .operations import DatabaseOperations # NOQA isort:skip +from .schema import DatabaseSchemaEditor # NOQA isort:skip + + +def _get_varchar_column(data): + if data["max_length"] is None: + return "varchar" + return "varchar(%(max_length)s)" % data class DatabaseWrapper(BaseDatabaseWrapper): @@ -76,7 +98,7 @@ class DatabaseWrapper(BaseDatabaseWrapper): "BigAutoField": "bigint", "BinaryField": "bytea", "BooleanField": "boolean", - "CharField": "varchar(%(max_length)s)", + "CharField": _get_varchar_column, "DateField": "date", "DateTimeField": "timestamp with time zone", "DecimalField": "numeric(%(max_digits)s, %(decimal_places)s)", @@ -186,16 +208,16 @@ class DatabaseWrapper(BaseDatabaseWrapper): self.ops.max_name_length(), ) ) - conn_params = {} + conn_params = {"client_encoding": "UTF8"} if settings_dict["NAME"]: conn_params = { - "database": settings_dict["NAME"], + "dbname": settings_dict["NAME"], **settings_dict["OPTIONS"], } elif settings_dict["NAME"] is None: # Connect to the default 'postgres' db. settings_dict.get("OPTIONS", {}).pop("service", None) - conn_params = {"database": "postgres", **settings_dict["OPTIONS"]} + conn_params = {"dbname": "postgres", **settings_dict["OPTIONS"]} else: conn_params = {**settings_dict["OPTIONS"]} @@ -208,38 +230,57 @@ class DatabaseWrapper(BaseDatabaseWrapper): conn_params["host"] = settings_dict["HOST"] if settings_dict["PORT"]: conn_params["port"] = settings_dict["PORT"] + if is_psycopg3: + conn_params["context"] = get_adapters_template( + settings.USE_TZ, self.timezone + ) + # Disable prepared statements by default to keep connection poolers + # working. Can be reenabled via OPTIONS in the settings dict. + conn_params["prepare_threshold"] = conn_params.pop( + "prepare_threshold", None + ) return conn_params @async_unsafe def get_new_connection(self, conn_params): - connection = Database.connect(**conn_params) - # self.isolation_level must be set: # - after connecting to the database in order to obtain the database's # default when no value is explicitly specified in options. # - before calling _set_autocommit() because if autocommit is on, that # will set connection.isolation_level to ISOLATION_LEVEL_AUTOCOMMIT. options = self.settings_dict["OPTIONS"] + set_isolation_level = False try: - self.isolation_level = options["isolation_level"] + isolation_level_value = options["isolation_level"] except KeyError: - self.isolation_level = connection.isolation_level + self.isolation_level = IsolationLevel.READ_COMMITTED else: # Set the isolation level to the value from OPTIONS. - if self.isolation_level != connection.isolation_level: - connection.set_session(isolation_level=self.isolation_level) - # Register dummy loads() to avoid a round trip from psycopg2's decode - # to json.dumps() to json.loads(), when using a custom decoder in - # JSONField. - psycopg2.extras.register_default_jsonb( - conn_or_curs=connection, loads=lambda x: x - ) + try: + self.isolation_level = IsolationLevel(isolation_level_value) + set_isolation_level = True + except ValueError: + raise ImproperlyConfigured( + f"Invalid transaction isolation level {isolation_level_value} " + f"specified. Use one of the psycopg.IsolationLevel values." + ) + connection = self.Database.connect(**conn_params) + if set_isolation_level: + connection.isolation_level = self.isolation_level + if not is_psycopg3: + # Register dummy loads() to avoid a round trip from psycopg2's + # decode to json.dumps() to json.loads(), when using a custom + # decoder in JSONField. + psycopg2.extras.register_default_jsonb( + conn_or_curs=connection, loads=lambda x: x + ) + connection.cursor_factory = Cursor return connection def ensure_timezone(self): if self.connection is None: return False - conn_timezone_name = self.connection.get_parameter_status("TimeZone") + conn_timezone_name = self.connection.info.parameter_status("TimeZone") timezone_name = self.timezone_name if timezone_name and conn_timezone_name != timezone_name: with self.connection.cursor() as cursor: @@ -249,7 +290,6 @@ class DatabaseWrapper(BaseDatabaseWrapper): def init_connection_state(self): super().init_connection_state() - self.connection.set_client_encoding("UTF8") timezone_changed = self.ensure_timezone() if timezone_changed: @@ -267,7 +307,15 @@ class DatabaseWrapper(BaseDatabaseWrapper): ) else: cursor = self.connection.cursor() - cursor.tzinfo_factory = self.tzinfo_factory if settings.USE_TZ else None + + if is_psycopg3: + # Register the cursor timezone only if the connection disagrees, to + # avoid copying the adapter map. + tzloader = self.connection.adapters.get_loader(TIMESTAMPTZ_OID, Format.TEXT) + if self.timezone != tzloader.timezone: + register_tzloader(self.timezone, cursor) + else: + cursor.tzinfo_factory = self.tzinfo_factory if settings.USE_TZ else None return cursor def tzinfo_factory(self, offset): @@ -365,17 +413,49 @@ class DatabaseWrapper(BaseDatabaseWrapper): @cached_property def pg_version(self): with self.temporary_connection(): - return self.connection.server_version + return self.connection.info.server_version def make_debug_cursor(self, cursor): return CursorDebugWrapper(cursor, self) -class CursorDebugWrapper(BaseCursorDebugWrapper): - def copy_expert(self, sql, file, *args): - with self.debug_sql(sql): - return self.cursor.copy_expert(sql, file, *args) +if is_psycopg3: - def copy_to(self, file, table, *args, **kwargs): - with self.debug_sql(sql="COPY %s TO STDOUT" % table): - return self.cursor.copy_to(file, table, *args, **kwargs) + class Cursor(Database.Cursor): + """ + A subclass of psycopg cursor implementing callproc. + """ + + def callproc(self, name, args=None): + if not isinstance(name, sql.Identifier): + name = sql.Identifier(name) + + qparts = [sql.SQL("SELECT * FROM "), name, sql.SQL("(")] + if args: + for item in args: + qparts.append(sql.Literal(item)) + qparts.append(sql.SQL(",")) + del qparts[-1] + + qparts.append(sql.SQL(")")) + stmt = sql.Composed(qparts) + self.execute(stmt) + return args + + class CursorDebugWrapper(BaseCursorDebugWrapper): + def copy(self, statement): + with self.debug_sql(statement): + return self.cursor.copy(statement) + +else: + + Cursor = psycopg2.extensions.cursor + + class CursorDebugWrapper(BaseCursorDebugWrapper): + def copy_expert(self, sql, file, *args): + with self.debug_sql(sql): + return self.cursor.copy_expert(sql, file, *args) + + def copy_to(self, file, table, *args, **kwargs): + with self.debug_sql(sql="COPY %s TO STDOUT" % table): + return self.cursor.copy_to(file, table, *args, **kwargs) diff --git a/django/db/backends/postgresql/creation.py b/django/db/backends/postgresql/creation.py index 70c3eda566..b445b89646 100644 --- a/django/db/backends/postgresql/creation.py +++ b/django/db/backends/postgresql/creation.py @@ -1,9 +1,8 @@ import sys -from psycopg2 import errorcodes - from django.core.exceptions import ImproperlyConfigured from django.db.backends.base.creation import BaseDatabaseCreation +from django.db.backends.postgresql.psycopg_any import errors from django.db.backends.utils import strip_quotes @@ -46,7 +45,8 @@ class DatabaseCreation(BaseDatabaseCreation): return super()._execute_create_test_db(cursor, parameters, keepdb) except Exception as e: - if getattr(e.__cause__, "pgcode", "") != errorcodes.DUPLICATE_DATABASE: + cause = e.__cause__ + if cause and not isinstance(cause, errors.DuplicateDatabase): # All errors except "database already exists" cancel tests. self.log("Got an error creating the test database: %s" % e) sys.exit(2) diff --git a/django/db/backends/postgresql/features.py b/django/db/backends/postgresql/features.py index 0eed8c8d63..6c20dd87f0 100644 --- a/django/db/backends/postgresql/features.py +++ b/django/db/backends/postgresql/features.py @@ -1,7 +1,8 @@ import operator -from django.db import InterfaceError +from django.db import DataError, InterfaceError from django.db.backends.base.features import BaseDatabaseFeatures +from django.db.backends.postgresql.psycopg_any import is_psycopg3 from django.utils.functional import cached_property @@ -21,11 +22,13 @@ class DatabaseFeatures(BaseDatabaseFeatures): has_select_for_update_skip_locked = True has_select_for_no_key_update = True can_release_savepoints = True + supports_comments = True supports_tablespaces = True supports_transactions = True can_introspect_materialized_views = True can_distinct_on_fields = True can_rollback_ddl = True + schema_editor_uses_clientside_param_binding = True supports_combined_alters = True nulls_order_largest = True closed_cursor_error_class = InterfaceError @@ -81,6 +84,13 @@ class DatabaseFeatures(BaseDatabaseFeatures): }, } + @cached_property + def prohibits_null_characters_in_text_exception(self): + if is_psycopg3: + return DataError, "PostgreSQL text fields cannot contain NUL (0x00) bytes" + else: + return ValueError, "A string literal cannot contain NUL (0x00) characters." + @cached_property def introspected_field_types(self): return { @@ -100,3 +110,4 @@ class DatabaseFeatures(BaseDatabaseFeatures): has_bit_xor = property(operator.attrgetter("is_postgresql_14")) supports_covering_spgist_indexes = property(operator.attrgetter("is_postgresql_14")) + supports_unlimited_charfield = True diff --git a/django/db/backends/postgresql/introspection.py b/django/db/backends/postgresql/introspection.py index 921501a4a3..d649b6fd4f 100644 --- a/django/db/backends/postgresql/introspection.py +++ b/django/db/backends/postgresql/introspection.py @@ -2,10 +2,11 @@ from collections import namedtuple from django.db.backends.base.introspection import BaseDatabaseIntrospection from django.db.backends.base.introspection import FieldInfo as BaseFieldInfo -from django.db.backends.base.introspection import TableInfo +from django.db.backends.base.introspection import TableInfo as BaseTableInfo from django.db.models import Index -FieldInfo = namedtuple("FieldInfo", BaseFieldInfo._fields + ("is_autofield",)) +FieldInfo = namedtuple("FieldInfo", BaseFieldInfo._fields + ("is_autofield", "comment")) +TableInfo = namedtuple("TableInfo", BaseTableInfo._fields + ("comment",)) class DatabaseIntrospection(BaseDatabaseIntrospection): @@ -62,7 +63,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): WHEN c.relispartition THEN 'p' WHEN c.relkind IN ('m', 'v') THEN 'v' ELSE 't' - END + END, + obj_description(c.oid) FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE c.relkind IN ('f', 'm', 'p', 'r', 'v') @@ -91,7 +93,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): NOT (a.attnotnull OR (t.typtype = 'd' AND t.typnotnull)) AS is_nullable, pg_get_expr(ad.adbin, ad.adrelid) AS column_default, CASE WHEN collname = 'default' THEN NULL ELSE collname END AS collation, - a.attidentity != '' AS is_autofield + a.attidentity != '' AS is_autofield, + col_description(a.attrelid, a.attnum) AS column_comment FROM pg_attribute a LEFT JOIN pg_attrdef ad ON a.attrelid = ad.adrelid AND a.attnum = ad.adnum LEFT JOIN pg_collation co ON a.attcollation = co.oid @@ -113,7 +116,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): FieldInfo( line.name, line.type_code, - line.display_size, + # display_size is always None on psycopg2. + line.internal_size if line.display_size is None else line.display_size, line.internal_size, line.precision, line.scale, diff --git a/django/db/backends/postgresql/operations.py b/django/db/backends/postgresql/operations.py index 2303703ebc..18cfcb29cb 100644 --- a/django/db/backends/postgresql/operations.py +++ b/django/db/backends/postgresql/operations.py @@ -1,9 +1,25 @@ -from psycopg2.extras import Inet +import json +from functools import lru_cache, partial from django.conf import settings from django.db.backends.base.operations import BaseDatabaseOperations +from django.db.backends.postgresql.psycopg_any import ( + Inet, + Jsonb, + errors, + is_psycopg3, + mogrify, +) from django.db.backends.utils import split_tzname_delta from django.db.models.constants import OnConflict +from django.utils.regex_helper import _lazy_re_compile + + +@lru_cache +def get_json_dumps(encoder): + if encoder is None: + return json.dumps + return partial(json.dumps, cls=encoder) class DatabaseOperations(BaseDatabaseOperations): @@ -27,6 +43,18 @@ class DatabaseOperations(BaseDatabaseOperations): "SmallAutoField": "smallint", } + if is_psycopg3: + from psycopg.types import numeric + + integerfield_type_map = { + "SmallIntegerField": numeric.Int2, + "IntegerField": numeric.Int4, + "BigIntegerField": numeric.Int8, + "PositiveSmallIntegerField": numeric.Int2, + "PositiveIntegerField": numeric.Int4, + "PositiveBigIntegerField": numeric.Int8, + } + def unification_cast_sql(self, output_field): internal_type = output_field.get_internal_type() if internal_type in ( @@ -47,19 +75,23 @@ class DatabaseOperations(BaseDatabaseOperations): ) return "%s" + # EXTRACT format cannot be passed in parameters. + _extract_format_re = _lazy_re_compile(r"[A-Z_]+") + def date_extract_sql(self, lookup_type, sql, params): # https://www.postgresql.org/docs/current/functions-datetime.html#FUNCTIONS-DATETIME-EXTRACT - extract_sql = f"EXTRACT(%s FROM {sql})" - extract_param = lookup_type if lookup_type == "week_day": # For consistency across backends, we return Sunday=1, Saturday=7. - extract_sql = f"EXTRACT(%s FROM {sql}) + 1" - extract_param = "dow" + return f"EXTRACT(DOW FROM {sql}) + 1", params elif lookup_type == "iso_week_day": - extract_param = "isodow" + return f"EXTRACT(ISODOW FROM {sql})", params elif lookup_type == "iso_year": - extract_param = "isoyear" - return extract_sql, (extract_param, *params) + return f"EXTRACT(ISOYEAR FROM {sql})", params + + lookup_type = lookup_type.upper() + if not self._extract_format_re.fullmatch(lookup_type): + raise ValueError(f"Invalid lookup type: {lookup_type!r}") + return f"EXTRACT({lookup_type} FROM {sql})", params def date_trunc_sql(self, lookup_type, sql, params, tzname=None): sql, params = self._convert_sql_to_tz(sql, params, tzname) @@ -91,10 +123,7 @@ class DatabaseOperations(BaseDatabaseOperations): sql, params = self._convert_sql_to_tz(sql, params, tzname) if lookup_type == "second": # Truncate fractional seconds. - return ( - f"EXTRACT(%s FROM DATE_TRUNC(%s, {sql}))", - ("second", "second", *params), - ) + return f"EXTRACT(SECOND FROM DATE_TRUNC(%s, {sql}))", ("second", *params) return self.date_extract_sql(lookup_type, sql, params) def datetime_trunc_sql(self, lookup_type, sql, params, tzname): @@ -105,10 +134,7 @@ class DatabaseOperations(BaseDatabaseOperations): def time_extract_sql(self, lookup_type, sql, params): if lookup_type == "second": # Truncate fractional seconds. - return ( - f"EXTRACT(%s FROM DATE_TRUNC(%s, {sql}))", - ("second", "second", *params), - ) + return f"EXTRACT(SECOND FROM DATE_TRUNC(%s, {sql}))", ("second", *params) return self.date_extract_sql(lookup_type, sql, params) def time_trunc_sql(self, lookup_type, sql, params, tzname=None): @@ -128,6 +154,16 @@ class DatabaseOperations(BaseDatabaseOperations): def lookup_cast(self, lookup_type, internal_type=None): lookup = "%s" + if lookup_type == "isnull" and internal_type in ( + "CharField", + "EmailField", + "TextField", + "CICharField", + "CIEmailField", + "CITextField", + ): + return "%s::text" + # Cast text lookups to text to allow things like filter(x__contains=4) if lookup_type in ( "iexact", @@ -165,8 +201,11 @@ class DatabaseOperations(BaseDatabaseOperations): return name # Quoting once is enough. return '"%s"' % name + def compose_sql(self, sql, params): + return mogrify(sql, params, self.connection) + def set_time_zone_sql(self): - return "SET TIME ZONE %s" + return "SELECT set_config('TimeZone', %s, false)" def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False): if not tables: @@ -266,12 +305,22 @@ class DatabaseOperations(BaseDatabaseOperations): else: return ["DISTINCT"], [] - def last_executed_query(self, cursor, sql, params): - # https://www.psycopg.org/docs/cursor.html#cursor.query - # The query attribute is a Psycopg extension to the DB API 2.0. - if cursor.query is not None: - return cursor.query.decode() - return None + if is_psycopg3: + + def last_executed_query(self, cursor, sql, params): + try: + return self.compose_sql(sql, params) + except errors.DataError: + return None + + else: + + def last_executed_query(self, cursor, sql, params): + # https://www.psycopg.org/docs/cursor.html#cursor.query + # The query attribute is a Psycopg extension to the DB API 2.0. + if cursor.query is not None: + return cursor.query.decode() + return None def return_insert_columns(self, fields): if not fields: @@ -291,6 +340,13 @@ class DatabaseOperations(BaseDatabaseOperations): values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql) return "VALUES " + values_sql + if is_psycopg3: + + def adapt_integerfield_value(self, value, internal_type): + if value is None or hasattr(value, "resolve_expression"): + return value + return self.integerfield_type_map[internal_type](value) + def adapt_datefield_value(self, value): return value @@ -308,6 +364,9 @@ class DatabaseOperations(BaseDatabaseOperations): return Inet(value) return None + def adapt_json_value(self, value, encoder): + return Jsonb(value, dumps=get_json_dumps(encoder)) + def subtract_temporals(self, internal_type, lhs, rhs): if internal_type == "DateField": lhs_sql, lhs_params = lhs diff --git a/django/db/backends/postgresql/psycopg_any.py b/django/db/backends/postgresql/psycopg_any.py new file mode 100644 index 0000000000..579104dead --- /dev/null +++ b/django/db/backends/postgresql/psycopg_any.py @@ -0,0 +1,102 @@ +import ipaddress +from functools import lru_cache + +try: + from psycopg import ClientCursor, IsolationLevel, adapt, adapters, errors, sql + from psycopg.postgres import types + from psycopg.types.datetime import TimestamptzLoader + from psycopg.types.json import Jsonb + from psycopg.types.range import Range, RangeDumper + from psycopg.types.string import TextLoader + + Inet = ipaddress.ip_address + + DateRange = DateTimeRange = DateTimeTZRange = NumericRange = Range + RANGE_TYPES = (Range,) + + TSRANGE_OID = types["tsrange"].oid + TSTZRANGE_OID = types["tstzrange"].oid + + def mogrify(sql, params, connection): + return ClientCursor(connection.connection).mogrify(sql, params) + + # Adapters. + class BaseTzLoader(TimestamptzLoader): + """ + Load a PostgreSQL timestamptz using the a specific timezone. + The timezone can be None too, in which case it will be chopped. + """ + + timezone = None + + def load(self, data): + res = super().load(data) + return res.replace(tzinfo=self.timezone) + + def register_tzloader(tz, context): + class SpecificTzLoader(BaseTzLoader): + timezone = tz + + context.adapters.register_loader("timestamptz", SpecificTzLoader) + + class DjangoRangeDumper(RangeDumper): + """A Range dumper customized for Django.""" + + def upgrade(self, obj, format): + # Dump ranges containing naive datetimes as tstzrange, because + # Django doesn't use tz-aware ones. + dumper = super().upgrade(obj, format) + if dumper is not self and dumper.oid == TSRANGE_OID: + dumper.oid = TSTZRANGE_OID + return dumper + + @lru_cache + def get_adapters_template(use_tz, timezone): + # Create at adapters map extending the base one. + ctx = adapt.AdaptersMap(adapters) + # Register a no-op dumper to avoid a round trip from psycopg version 3 + # decode to json.dumps() to json.loads(), when using a custom decoder + # in JSONField. + ctx.register_loader("jsonb", TextLoader) + # Don't convert automatically from PostgreSQL network types to Python + # ipaddress. + ctx.register_loader("inet", TextLoader) + ctx.register_loader("cidr", TextLoader) + ctx.register_dumper(Range, DjangoRangeDumper) + # Register a timestamptz loader configured on self.timezone. + # This, however, can be overridden by create_cursor. + register_tzloader(timezone, ctx) + return ctx + + is_psycopg3 = True + +except ImportError: + from enum import IntEnum + + from psycopg2 import errors, extensions, sql # NOQA + from psycopg2.extras import DateRange, DateTimeRange, DateTimeTZRange, Inet # NOQA + from psycopg2.extras import Json as Jsonb # NOQA + from psycopg2.extras import NumericRange, Range # NOQA + + RANGE_TYPES = (DateRange, DateTimeRange, DateTimeTZRange, NumericRange) + + class IsolationLevel(IntEnum): + READ_UNCOMMITTED = extensions.ISOLATION_LEVEL_READ_UNCOMMITTED + READ_COMMITTED = extensions.ISOLATION_LEVEL_READ_COMMITTED + REPEATABLE_READ = extensions.ISOLATION_LEVEL_REPEATABLE_READ + SERIALIZABLE = extensions.ISOLATION_LEVEL_SERIALIZABLE + + def _quote(value, connection=None): + adapted = extensions.adapt(value) + if hasattr(adapted, "encoding"): + adapted.encoding = "utf8" + # getquoted() returns a quoted bytestring of the adapted value. + return adapted.getquoted().decode() + + sql.quote = _quote + + def mogrify(sql, params, connection): + with connection.cursor() as cursor: + return cursor.mogrify(sql, params).decode() + + is_psycopg3 = False diff --git a/django/db/backends/postgresql/schema.py b/django/db/backends/postgresql/schema.py index 68c5bf31af..2887071254 100644 --- a/django/db/backends/postgresql/schema.py +++ b/django/db/backends/postgresql/schema.py @@ -1,7 +1,6 @@ -import psycopg2 - from django.db.backends.base.schema import BaseDatabaseSchemaEditor from django.db.backends.ddl_references import IndexColumns +from django.db.backends.postgresql.psycopg_any import sql from django.db.backends.utils import strip_quotes @@ -41,6 +40,14 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): ) sql_delete_procedure = "DROP FUNCTION %(procedure)s(%(param_types)s)" + def execute(self, sql, params=()): + # Merge the query client-side, as PostgreSQL won't do it server-side. + if params is None: + return super().execute(sql, params) + sql = self.connection.ops.compose_sql(str(sql), params) + # Don't let the superclass touch anything. + return super().execute(sql, None) + sql_add_identity = ( "ALTER TABLE %(table)s ALTER COLUMN %(column)s ADD " "GENERATED BY DEFAULT AS IDENTITY" @@ -52,11 +59,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): def quote_value(self, value): if isinstance(value, str): value = value.replace("%", "%%") - adapted = psycopg2.extensions.adapt(value) - if hasattr(adapted, "encoding"): - adapted.encoding = "utf8" - # getquoted() returns a quoted bytestring of the adapted value. - return adapted.getquoted().decode() + return sql.quote(value, self.connection.connection) def _field_indexes_sql(self, model, field): output = super()._field_indexes_sql(model, field) @@ -137,7 +140,9 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): return sequence["name"] return None - def _alter_column_type_sql(self, model, old_field, new_field, new_type): + def _alter_column_type_sql( + self, model, old_field, new_field, new_type, old_collation, new_collation + ): # Drop indexes on varchar/text/citext columns that are changing to a # different type. old_db_params = old_field.db_parameters(connection=self.connection) @@ -152,7 +157,9 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): ) self.execute(self._delete_index_sql(model, index_name)) - self.sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s" + self.sql_alter_column_type = ( + "ALTER COLUMN %(column)s TYPE %(type)s%(collation)s" + ) # Cast when data type changed. if using_sql := self._using_sql(new_field, old_field): self.sql_alter_column_type += using_sql @@ -175,6 +182,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): % { "column": self.quote_name(column), "type": new_type, + "collation": "", }, [], ), @@ -201,7 +209,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): ) column = strip_quotes(new_field.column) fragment, _ = super()._alter_column_type_sql( - model, old_field, new_field, new_type + model, old_field, new_field, new_type, old_collation, new_collation ) # Drop the sequence if exists (Django 4.1+ identity columns don't # have it). @@ -219,7 +227,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): return fragment, other_actions elif new_is_auto and old_is_auto and old_internal_type != new_internal_type: fragment, _ = super()._alter_column_type_sql( - model, old_field, new_field, new_type + model, old_field, new_field, new_type, old_collation, new_collation ) column = strip_quotes(new_field.column) db_types = { @@ -243,7 +251,9 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): ] return fragment, other_actions else: - return super()._alter_column_type_sql(model, old_field, new_field, new_type) + return super()._alter_column_type_sql( + model, old_field, new_field, new_type, old_collation, new_collation + ) def _alter_column_collation_sql( self, model, new_field, new_type, new_collation, old_field diff --git a/django/db/backends/sqlite3/base.py b/django/db/backends/sqlite3/base.py index feae360567..a3a382a56b 100644 --- a/django/db/backends/sqlite3/base.py +++ b/django/db/backends/sqlite3/base.py @@ -4,7 +4,8 @@ SQLite backend for the sqlite3 module in the standard library. import datetime import decimal import warnings -from itertools import chain +from collections.abc import Mapping +from itertools import chain, tee from sqlite3 import dbapi2 as Database from django.core.exceptions import ImproperlyConfigured @@ -158,7 +159,7 @@ class DatabaseWrapper(BaseDatabaseWrapper): # between multiple threads. The safe-guarding will be handled at a # higher level by the `BaseDatabaseWrapper.allow_thread_sharing` # property. This is necessary as the shareability is disabled by - # default in pysqlite and it cannot be changed once a connection is + # default in sqlite3 and it cannot be changed once a connection is # opened. if "check_same_thread" in kwargs and kwargs["check_same_thread"]: warnings.warn( @@ -238,103 +239,53 @@ class DatabaseWrapper(BaseDatabaseWrapper): determine if rows with invalid references were entered while constraint checks were off. """ - if self.features.supports_pragma_foreign_key_check: - with self.cursor() as cursor: - if table_names is None: - violations = cursor.execute("PRAGMA foreign_key_check").fetchall() - else: - violations = chain.from_iterable( - cursor.execute( - "PRAGMA foreign_key_check(%s)" - % self.ops.quote_name(table_name) - ).fetchall() - for table_name in table_names - ) - # See https://www.sqlite.org/pragma.html#pragma_foreign_key_check - for ( - table_name, - rowid, - referenced_table_name, - foreign_key_index, - ) in violations: - foreign_key = cursor.execute( - "PRAGMA foreign_key_list(%s)" % self.ops.quote_name(table_name) - ).fetchall()[foreign_key_index] - column_name, referenced_column_name = foreign_key[3:5] - primary_key_column_name = self.introspection.get_primary_key_column( - cursor, table_name - ) - primary_key_value, bad_value = cursor.execute( - "SELECT %s, %s FROM %s WHERE rowid = %%s" - % ( - self.ops.quote_name(primary_key_column_name), - self.ops.quote_name(column_name), - self.ops.quote_name(table_name), - ), - (rowid,), - ).fetchone() - raise IntegrityError( - "The row in table '%s' with primary key '%s' has an " - "invalid foreign key: %s.%s contains a value '%s' that " - "does not have a corresponding value in %s.%s." - % ( - table_name, - primary_key_value, - table_name, - column_name, - bad_value, - referenced_table_name, - referenced_column_name, - ) - ) - else: - with self.cursor() as cursor: - if table_names is None: - table_names = self.introspection.table_names(cursor) - for table_name in table_names: - primary_key_column_name = self.introspection.get_primary_key_column( - cursor, table_name - ) - if not primary_key_column_name: - continue - relations = self.introspection.get_relations(cursor, table_name) - for column_name, ( - referenced_column_name, + with self.cursor() as cursor: + if table_names is None: + violations = cursor.execute("PRAGMA foreign_key_check").fetchall() + else: + violations = chain.from_iterable( + cursor.execute( + "PRAGMA foreign_key_check(%s)" % self.ops.quote_name(table_name) + ).fetchall() + for table_name in table_names + ) + # See https://www.sqlite.org/pragma.html#pragma_foreign_key_check + for ( + table_name, + rowid, + referenced_table_name, + foreign_key_index, + ) in violations: + foreign_key = cursor.execute( + "PRAGMA foreign_key_list(%s)" % self.ops.quote_name(table_name) + ).fetchall()[foreign_key_index] + column_name, referenced_column_name = foreign_key[3:5] + primary_key_column_name = self.introspection.get_primary_key_column( + cursor, table_name + ) + primary_key_value, bad_value = cursor.execute( + "SELECT %s, %s FROM %s WHERE rowid = %%s" + % ( + self.ops.quote_name(primary_key_column_name), + self.ops.quote_name(column_name), + self.ops.quote_name(table_name), + ), + (rowid,), + ).fetchone() + raise IntegrityError( + "The row in table '%s' with primary key '%s' has an " + "invalid foreign key: %s.%s contains a value '%s' that " + "does not have a corresponding value in %s.%s." + % ( + table_name, + primary_key_value, + table_name, + column_name, + bad_value, referenced_table_name, - ) in relations.items(): - cursor.execute( - """ - SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING - LEFT JOIN `%s` as REFERRED - ON (REFERRING.`%s` = REFERRED.`%s`) - WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL - """ - % ( - primary_key_column_name, - column_name, - table_name, - referenced_table_name, - column_name, - referenced_column_name, - column_name, - referenced_column_name, - ) - ) - for bad_row in cursor.fetchall(): - raise IntegrityError( - "The row in table '%s' with primary key '%s' has an " - "invalid foreign key: %s.%s contains a value '%s' that " - "does not have a corresponding value in %s.%s." - % ( - table_name, - bad_row[0], - table_name, - column_name, - bad_row[1], - referenced_table_name, - referenced_column_name, - ) - ) + referenced_column_name, + ) + ) def is_usable(self): return True @@ -357,20 +308,40 @@ FORMAT_QMARK_REGEX = _lazy_re_compile(r"(?= (3, 35, 5) supports_parentheses_in_compound = False - # Deferred constraint checks can be emulated on SQLite < 3.20 but not in a - # reasonably performant way. - supports_pragma_foreign_key_check = Database.sqlite_version_info >= (3, 20, 0) - can_defer_constraint_checks = supports_pragma_foreign_key_check - supports_functions_in_partial_indexes = Database.sqlite_version_info >= (3, 15, 0) + can_defer_constraint_checks = True supports_over_clause = Database.sqlite_version_info >= (3, 25, 0) supports_frame_range_fixed_distance = Database.sqlite_version_info >= (3, 28, 0) supports_aggregate_filter_clause = Database.sqlite_version_info >= (3, 30, 1) @@ -110,6 +105,26 @@ class DatabaseFeatures(BaseDatabaseFeatures): "servers.tests.LiveServerTestCloseConnectionTest." "test_closes_connections", }, + "For SQLite in-memory tests, closing the connection destroys" + "the database.": { + "test_utils.tests.AssertNumQueriesUponConnectionTests." + "test_ignores_connection_configuration_queries", + }, + } + ) + else: + skips.update( + { + "Only connections to in-memory SQLite databases are passed to the " + "server thread.": { + "servers.tests.LiveServerInMemoryDatabaseLockTest." + "test_in_memory_database_lock", + }, + "multiprocessing's start method is checked only for in-memory " + "SQLite databases": { + "backends.sqlite.test_creation.TestDbSignatureTests." + "test_get_test_db_clone_settings_not_supported", + }, } ) return skips diff --git a/django/db/backends/sqlite3/introspection.py b/django/db/backends/sqlite3/introspection.py index 4805305aa5..d2fe3d8c71 100644 --- a/django/db/backends/sqlite3/introspection.py +++ b/django/db/backends/sqlite3/introspection.py @@ -119,10 +119,10 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): FieldInfo( name, data_type, - None, get_field_size(data_type), None, None, + None, not notnull, default, collations.get(name), diff --git a/django/db/backends/sqlite3/operations.py b/django/db/backends/sqlite3/operations.py index 0d3a4060ac..bb84d52071 100644 --- a/django/db/backends/sqlite3/operations.py +++ b/django/db/backends/sqlite3/operations.py @@ -168,9 +168,7 @@ class DatabaseOperations(BaseDatabaseOperations): def last_executed_query(self, cursor, sql, params): # Python substitutes parameters in Modules/_sqlite/cursor.c with: - # pysqlite_statement_bind_parameters( - # self->statement, parameters, allow_8bit_chars - # ); + # bind_parameters(state, self->statement, parameters); # Unfortunately there is no way to reach self->statement from Python, # so we quote and substitute parameters manually. if params: diff --git a/django/db/backends/sqlite3/schema.py b/django/db/backends/sqlite3/schema.py index 6c106ae868..c9e924b182 100644 --- a/django/db/backends/sqlite3/schema.py +++ b/django/db/backends/sqlite3/schema.py @@ -174,7 +174,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): super().alter_field(model, old_field, new_field, strict=strict) def _remake_table( - self, model, create_field=None, delete_field=None, alter_field=None + self, model, create_field=None, delete_field=None, alter_fields=None ): """ Shortcut to transform a model from old_model into new_model @@ -213,15 +213,16 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): # If any of the new or altered fields is introducing a new PK, # remove the old one restore_pk_field = None - if getattr(create_field, "primary_key", False) or ( - alter_field and getattr(alter_field[1], "primary_key", False) + alter_fields = alter_fields or [] + if getattr(create_field, "primary_key", False) or any( + getattr(new_field, "primary_key", False) for _, new_field in alter_fields ): for name, field in list(body.items()): - if field.primary_key and not ( + if field.primary_key and not any( # Do not remove the old primary key when an altered field # that introduces a primary key is the same field. - alter_field - and name == alter_field[1].name + name == new_field.name + for _, new_field in alter_fields ): field.primary_key = False restore_pk_field = field @@ -237,7 +238,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): self.effective_default(create_field), ) # Add in any altered fields - if alter_field: + for alter_field in alter_fields: old_field, new_field = alter_field body.pop(old_field.name, None) mapping.pop(old_field.column, None) @@ -379,7 +380,10 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): def add_field(self, model, field): """Create a field on a model.""" - if ( + # Special-case implicit M2M tables. + if field.many_to_many and field.remote_field.through._meta.auto_created: + self.create_model(field.remote_field.through) + elif ( # Primary keys and unique fields are not supported in ALTER TABLE # ADD COLUMN. field.primary_key @@ -454,7 +458,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): ) ) # Alter by remaking table - self._remake_table(model, alter_field=(old_field, new_field)) + self._remake_table(model, alter_fields=[(old_field, new_field)]) # Rebuild tables with FKs pointing to this field. old_collation = old_db_params.get("collation") new_collation = new_db_params.get("collation") @@ -492,18 +496,30 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): # propagate this altering. self._remake_table( old_field.remote_field.through, - alter_field=( - # The field that points to the target model is needed, so - # we can tell alter_field to change it - this is - # m2m_reverse_field_name() (as opposed to m2m_field_name(), - # which points to our model). - old_field.remote_field.through._meta.get_field( - old_field.m2m_reverse_field_name() + alter_fields=[ + ( + # The field that points to the target model is needed, + # so that table can be remade with the new m2m field - + # this is m2m_reverse_field_name(). + old_field.remote_field.through._meta.get_field( + old_field.m2m_reverse_field_name() + ), + new_field.remote_field.through._meta.get_field( + new_field.m2m_reverse_field_name() + ), ), - new_field.remote_field.through._meta.get_field( - new_field.m2m_reverse_field_name() + ( + # The field that points to the model itself is needed, + # so that table can be remade with the new self field - + # this is m2m_field_name(). + old_field.remote_field.through._meta.get_field( + old_field.m2m_field_name() + ), + new_field.remote_field.through._meta.get_field( + new_field.m2m_field_name() + ), ), - ), + ], ) return diff --git a/django/db/backends/utils.py b/django/db/backends/utils.py index d505cd7904..df6532e81f 100644 --- a/django/db/backends/utils.py +++ b/django/db/backends/utils.py @@ -144,6 +144,35 @@ class CursorDebugWrapper(CursorWrapper): ) +@contextmanager +def debug_transaction(connection, sql): + start = time.monotonic() + try: + yield + finally: + if connection.queries_logged: + stop = time.monotonic() + duration = stop - start + connection.queries_log.append( + { + "sql": "%s" % sql, + "time": "%.3f" % duration, + } + ) + logger.debug( + "(%.3f) %s; args=%s; alias=%s", + duration, + sql, + None, + connection.alias, + extra={ + "duration": duration, + "sql": sql, + "alias": connection.alias, + }, + ) + + def split_tzname_delta(tzname): """ Split a time zone name into a 3-tuple of (name, sign, offset). diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py index eae09eb65a..bb266cac9c 100644 --- a/django/db/migrations/autodetector.py +++ b/django/db/migrations/autodetector.py @@ -170,6 +170,7 @@ class MigrationAutodetector: self.generate_created_proxies() self.generate_altered_options() self.generate_altered_managers() + self.generate_altered_db_table_comment() # Create the renamed fields and store them in self.renamed_fields. # They are used by create_altered_indexes(), generate_altered_fields(), @@ -1552,6 +1553,28 @@ class MigrationAutodetector: ), ) + def generate_altered_db_table_comment(self): + models_to_check = self.kept_model_keys.union( + self.kept_proxy_keys, self.kept_unmanaged_keys + ) + for app_label, model_name in sorted(models_to_check): + old_model_name = self.renamed_models.get( + (app_label, model_name), model_name + ) + old_model_state = self.from_state.models[app_label, old_model_name] + new_model_state = self.to_state.models[app_label, model_name] + + old_db_table_comment = old_model_state.options.get("db_table_comment") + new_db_table_comment = new_model_state.options.get("db_table_comment") + if old_db_table_comment != new_db_table_comment: + self.add_operation( + app_label, + operations.AlterModelTableComment( + name=model_name, + table_comment=new_db_table_comment, + ), + ) + def generate_altered_options(self): """ Work out if any non-schema-affecting options have changed and make an diff --git a/django/db/migrations/operations/__init__.py b/django/db/migrations/operations/__init__.py index 987c7c1fe6..90dbdf8256 100644 --- a/django/db/migrations/operations/__init__.py +++ b/django/db/migrations/operations/__init__.py @@ -6,6 +6,7 @@ from .models import ( AlterModelManagers, AlterModelOptions, AlterModelTable, + AlterModelTableComment, AlterOrderWithRespectTo, AlterUniqueTogether, CreateModel, @@ -21,6 +22,7 @@ __all__ = [ "CreateModel", "DeleteModel", "AlterModelTable", + "AlterModelTableComment", "AlterUniqueTogether", "RenameModel", "AlterIndexTogether", diff --git a/django/db/migrations/operations/models.py b/django/db/migrations/operations/models.py index 3a33b4aff7..a243aba0b6 100644 --- a/django/db/migrations/operations/models.py +++ b/django/db/migrations/operations/models.py @@ -529,6 +529,44 @@ class AlterModelTable(ModelOptionOperation): return "alter_%s_table" % self.name_lower +class AlterModelTableComment(ModelOptionOperation): + def __init__(self, name, table_comment): + self.table_comment = table_comment + super().__init__(name) + + def deconstruct(self): + kwargs = { + "name": self.name, + "table_comment": self.table_comment, + } + return (self.__class__.__qualname__, [], kwargs) + + def state_forwards(self, app_label, state): + state.alter_model_options( + app_label, self.name_lower, {"db_table_comment": self.table_comment} + ) + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + new_model = to_state.apps.get_model(app_label, self.name) + if self.allow_migrate_model(schema_editor.connection.alias, new_model): + old_model = from_state.apps.get_model(app_label, self.name) + schema_editor.alter_db_table_comment( + new_model, + old_model._meta.db_table_comment, + new_model._meta.db_table_comment, + ) + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + return self.database_forwards(app_label, schema_editor, from_state, to_state) + + def describe(self): + return f"Alter {self.name} table comment" + + @property + def migration_name_fragment(self): + return f"alter_{self.name_lower}_table_comment" + + class AlterTogetherOptionOperation(ModelOptionOperation): option_name = None diff --git a/django/db/models/aggregates.py b/django/db/models/aggregates.py index ec21e5fd11..e672f0aeb0 100644 --- a/django/db/models/aggregates.py +++ b/django/db/models/aggregates.py @@ -1,8 +1,8 @@ """ Classes to represent the definitions of aggregate functions. """ -from django.core.exceptions import FieldError -from django.db.models.expressions import Case, Func, Star, When +from django.core.exceptions import FieldError, FullResultSet +from django.db.models.expressions import Case, Func, Star, Value, When from django.db.models.fields import IntegerField from django.db.models.functions.comparison import Coalesce from django.db.models.functions.mixins import ( @@ -85,6 +85,10 @@ class Aggregate(Func): return c if hasattr(default, "resolve_expression"): default = default.resolve_expression(query, allow_joins, reuse, summarize) + if default._output_field_or_none is None: + default.output_field = c._output_field_or_none + else: + default = Value(default, c._output_field_or_none) c.default = None # Reset the default argument before wrapping. coalesce = Coalesce(c, default, output_field=c._output_field_or_none) coalesce.is_summary = c.is_summary @@ -104,18 +108,22 @@ class Aggregate(Func): extra_context["distinct"] = "DISTINCT " if self.distinct else "" if self.filter: if connection.features.supports_aggregate_filter_clause: - filter_sql, filter_params = self.filter.as_sql(compiler, connection) - template = self.filter_template % extra_context.get( - "template", self.template - ) - sql, params = super().as_sql( - compiler, - connection, - template=template, - filter=filter_sql, - **extra_context, - ) - return sql, (*params, *filter_params) + try: + filter_sql, filter_params = self.filter.as_sql(compiler, connection) + except FullResultSet: + pass + else: + template = self.filter_template % extra_context.get( + "template", self.template + ) + sql, params = super().as_sql( + compiler, + connection, + template=template, + filter=filter_sql, + **extra_context, + ) + return sql, (*params, *filter_params) else: copy = self.copy() copy.filter = None diff --git a/django/db/models/base.py b/django/db/models/base.py index 2eb7ba7e9b..8c8a74158d 100644 --- a/django/db/models/base.py +++ b/django/db/models/base.py @@ -48,7 +48,7 @@ from django.db.models.signals import ( pre_init, pre_save, ) -from django.db.models.utils import make_model_tuple +from django.db.models.utils import AltersData, make_model_tuple from django.utils.encoding import force_str from django.utils.hashable import make_hashable from django.utils.text import capfirst, get_text_list @@ -456,7 +456,7 @@ class ModelState: fields_cache = ModelStateFieldsCacheDescriptor() -class Model(metaclass=ModelBase): +class Model(AltersData, metaclass=ModelBase): def __init__(self, *args, **kwargs): # Alias some things as locals to avoid repeat global lookups cls = self.__class__ @@ -737,6 +737,11 @@ class Model(metaclass=ModelBase): if field.is_cached(self): field.delete_cached_value(self) + # Clear cached private relations. + for field in self._meta.private_fields: + if field.is_relation and field.is_cached(self): + field.delete_cached_value(self) + self._state.db = db_instance._state.db async def arefresh_from_db(self, using=None, fields=None): @@ -1551,6 +1556,7 @@ class Model(metaclass=ModelBase): *cls._check_ordering(), *cls._check_constraints(databases), *cls._check_default_pk(), + *cls._check_db_table_comment(databases), ] return errors @@ -1587,6 +1593,29 @@ class Model(metaclass=ModelBase): ] return [] + @classmethod + def _check_db_table_comment(cls, databases): + if not cls._meta.db_table_comment: + return [] + errors = [] + for db in databases: + if not router.allow_migrate_model(db, cls): + continue + connection = connections[db] + if not ( + connection.features.supports_comments + or "supports_comments" in cls._meta.required_db_features + ): + errors.append( + checks.Warning( + f"{connection.display_name} does not support comments on " + f"tables (db_table_comment).", + obj=cls, + id="models.W046", + ) + ) + return errors + @classmethod def _check_swappable(cls): """Check if the swapped model exists.""" diff --git a/django/db/models/expressions.py b/django/db/models/expressions.py index 8b04e1f11b..c270ef16c7 100644 --- a/django/db/models/expressions.py +++ b/django/db/models/expressions.py @@ -7,7 +7,7 @@ from collections import defaultdict from decimal import Decimal from uuid import UUID -from django.core.exceptions import EmptyResultSet, FieldError +from django.core.exceptions import EmptyResultSet, FieldError, FullResultSet from django.db import DatabaseError, NotSupportedError, connection from django.db.models import fields from django.db.models.constants import LOOKUP_SEP @@ -405,6 +405,12 @@ class BaseExpression: ) return clone + def get_refs(self): + refs = set() + for expr in self.get_source_expressions(): + refs |= expr.get_refs() + return refs + def copy(self): return copy.copy(self) @@ -955,6 +961,8 @@ class Func(SQLiteNumericMixin, Expression): if empty_result_set_value is NotImplemented: raise arg_sql, arg_params = compiler.compile(Value(empty_result_set_value)) + except FullResultSet: + arg_sql, arg_params = compiler.compile(Value(True)) sql_parts.append(arg_sql) params.extend(arg_params) data = {**self.extra, **extra_context} @@ -1165,6 +1173,9 @@ class Ref(Expression): # just a reference to the name of `source`. return self + def get_refs(self): + return {self.refs} + def relabeled_clone(self, relabels): return self @@ -1367,14 +1378,6 @@ class When(Expression): template_params = extra_context sql_params = [] condition_sql, condition_params = compiler.compile(self.condition) - # Filters that match everything are handled as empty strings in the - # WHERE clause, but in a CASE WHEN expression they must use a predicate - # that's always True. - if condition_sql == "": - if connection.features.supports_boolean_expr_in_select_clause: - condition_sql, condition_params = compiler.compile(Value(True)) - else: - condition_sql, condition_params = "1=1", () template_params["condition"] = condition_sql result_sql, result_params = compiler.compile(self.result) template_params["result"] = result_sql @@ -1461,14 +1464,17 @@ class Case(SQLiteNumericMixin, Expression): template_params = {**self.extra, **extra_context} case_parts = [] sql_params = [] + default_sql, default_params = compiler.compile(self.default) for case in self.cases: try: case_sql, case_params = compiler.compile(case) except EmptyResultSet: continue + except FullResultSet: + default_sql, default_params = compiler.compile(case.result) + break case_parts.append(case_sql) sql_params.extend(case_params) - default_sql, default_params = compiler.compile(self.default) if not case_parts: return default_sql, default_params case_joiner = case_joiner or self.case_joiner diff --git a/django/db/models/fields/__init__.py b/django/db/models/fields/__init__.py index fd6969cd3d..060e1be605 100644 --- a/django/db/models/fields/__init__.py +++ b/django/db/models/fields/__init__.py @@ -200,6 +200,7 @@ class Field(RegisterLookupMixin): auto_created=False, validators=(), error_messages=None, + db_comment=None, ): self.name = name self.verbose_name = verbose_name # May be set by set_attributes_from_name @@ -221,6 +222,7 @@ class Field(RegisterLookupMixin): self.help_text = help_text self.db_index = db_index self.db_column = db_column + self.db_comment = db_comment self._db_tablespace = db_tablespace self.auto_created = auto_created @@ -259,6 +261,7 @@ class Field(RegisterLookupMixin): *self._check_field_name(), *self._check_choices(), *self._check_db_index(), + *self._check_db_comment(**kwargs), *self._check_null_allowed_for_primary_keys(), *self._check_backend_specific_checks(**kwargs), *self._check_validators(), @@ -385,6 +388,28 @@ class Field(RegisterLookupMixin): else: return [] + def _check_db_comment(self, databases=None, **kwargs): + if not self.db_comment or not databases: + return [] + errors = [] + for db in databases: + if not router.allow_migrate_model(db, self.model): + continue + connection = connections[db] + if not ( + connection.features.supports_comments + or "supports_comments" in self.model._meta.required_db_features + ): + errors.append( + checks.Warning( + f"{connection.display_name} does not support comments on " + f"columns (db_comment).", + obj=self, + id="fields.W163", + ) + ) + return errors + def _check_null_allowed_for_primary_keys(self): if ( self.primary_key @@ -411,12 +436,9 @@ class Field(RegisterLookupMixin): def _check_backend_specific_checks(self, databases=None, **kwargs): if databases is None: return [] - app_label = self.model._meta.app_label errors = [] for alias in databases: - if router.allow_migrate( - alias, app_label, model_name=self.model._meta.model_name - ): + if router.allow_migrate_model(alias, self.model): errors.extend(connections[alias].validation.check_field(self, **kwargs)) return errors @@ -541,6 +563,7 @@ class Field(RegisterLookupMixin): "choices": None, "help_text": "", "db_column": None, + "db_comment": None, "db_tablespace": None, "auto_created": False, "validators": [], @@ -794,9 +817,14 @@ class Field(RegisterLookupMixin): # exactly which wacky database column type you want to use. data = self.db_type_parameters(connection) try: - return connection.data_types[self.get_internal_type()] % data + column_type = connection.data_types[self.get_internal_type()] except KeyError: return None + else: + # column_type is either a single-parameter function or a string. + if callable(column_type): + return column_type(data) + return column_type % data def rel_db_type(self, connection): """ @@ -921,6 +949,8 @@ class Field(RegisterLookupMixin): def get_db_prep_save(self, value, connection): """Return field's value prepared for saving into a database.""" + if hasattr(value, "as_sql"): + return value return self.get_db_prep_value(value, connection=connection, prepared=False) def has_default(self): @@ -1103,25 +1133,21 @@ class BooleanField(Field): defaults = {"form_class": form_class, "required": False} return super().formfield(**{**defaults, **kwargs}) - def select_format(self, compiler, sql, params): - sql, params = super().select_format(compiler, sql, params) - # Filters that match everything are handled as empty strings in the - # WHERE clause, but in SELECT or GROUP BY list they must use a - # predicate that's always True. - if sql == "": - sql = "1" - return sql, params - class CharField(Field): - description = _("String (up to %(max_length)s)") - def __init__(self, *args, db_collation=None, **kwargs): super().__init__(*args, **kwargs) self.db_collation = db_collation if self.max_length is not None: self.validators.append(validators.MaxLengthValidator(self.max_length)) + @property + def description(self): + if self.max_length is not None: + return _("String (up to %(max_length)s)") + else: + return _("String (unlimited)") + def check(self, **kwargs): databases = kwargs.get("databases") or [] return [ @@ -1132,6 +1158,12 @@ class CharField(Field): def _check_max_length_attribute(self, **kwargs): if self.max_length is None: + if ( + connection.features.supports_unlimited_charfield + or "supports_unlimited_charfield" + in self.model._meta.required_db_features + ): + return [] return [ checks.Error( "CharFields must define a 'max_length' attribute.", @@ -1721,9 +1753,13 @@ class DecimalField(Field): ) return decimal_value - def get_db_prep_save(self, value, connection): + def get_db_prep_value(self, value, connection, prepared=False): + if not prepared: + value = self.get_prep_value(value) + if hasattr(value, "as_sql"): + return value return connection.ops.adapt_decimalfield_value( - self.to_python(value), self.max_digits, self.decimal_places + value, self.max_digits, self.decimal_places ) def get_prep_value(self, value): @@ -2022,6 +2058,10 @@ class IntegerField(Field): "Field '%s' expected a number but got %r." % (self.name, value), ) from e + def get_db_prep_value(self, value, connection, prepared=False): + value = super().get_db_prep_value(value, connection, prepared) + return connection.ops.adapt_integerfield_value(value, self.get_internal_type()) + def get_internal_type(self): return "IntegerField" diff --git a/django/db/models/fields/files.py b/django/db/models/fields/files.py index 3d1291a221..7a99b12691 100644 --- a/django/db/models/fields/files.py +++ b/django/db/models/fields/files.py @@ -10,10 +10,11 @@ from django.core.files.utils import validate_file_name from django.db.models import signals from django.db.models.fields import Field from django.db.models.query_utils import DeferredAttribute +from django.db.models.utils import AltersData from django.utils.translation import gettext_lazy as _ -class FieldFile(File): +class FieldFile(File, AltersData): def __init__(self, instance, field, name): super().__init__(None, name) self.instance = instance diff --git a/django/db/models/fields/json.py b/django/db/models/fields/json.py index 22c7e2ad00..eb2d35f100 100644 --- a/django/db/models/fields/json.py +++ b/django/db/models/fields/json.py @@ -1,12 +1,18 @@ import json +import warnings from django import forms from django.core import checks, exceptions from django.db import NotSupportedError, connections, router -from django.db.models import lookups +from django.db.models import expressions, lookups from django.db.models.constants import LOOKUP_SEP from django.db.models.fields import TextField -from django.db.models.lookups import PostgresOperatorLookup, Transform +from django.db.models.lookups import ( + FieldGetDbPrepValueMixin, + PostgresOperatorLookup, + Transform, +) +from django.utils.deprecation import RemovedInDjango51Warning from django.utils.translation import gettext_lazy as _ from . import Field @@ -92,10 +98,40 @@ class JSONField(CheckFieldDefaultMixin, Field): def get_internal_type(self): return "JSONField" - def get_prep_value(self, value): + def get_db_prep_value(self, value, connection, prepared=False): + # RemovedInDjango51Warning: When the deprecation ends, replace with: + # if ( + # isinstance(value, expressions.Value) + # and isinstance(value.output_field, JSONField) + # ): + # value = value.value + # elif hasattr(value, "as_sql"): ... + if isinstance(value, expressions.Value): + if isinstance(value.value, str) and not isinstance( + value.output_field, JSONField + ): + try: + value = json.loads(value.value, cls=self.decoder) + except json.JSONDecodeError: + value = value.value + else: + warnings.warn( + "Providing an encoded JSON string via Value() is deprecated. " + f"Use Value({value!r}, output_field=JSONField()) instead.", + category=RemovedInDjango51Warning, + ) + elif isinstance(value.output_field, JSONField): + value = value.value + else: + return value + elif hasattr(value, "as_sql"): + return value + return connection.ops.adapt_json_value(value, self.encoder) + + def get_db_prep_save(self, value, connection): if value is None: return value - return json.dumps(value, cls=self.encoder) + return self.get_db_prep_value(value, connection) def get_transform(self, name): transform = super().get_transform(name) @@ -141,7 +177,7 @@ def compile_json_path(key_transforms, include_root=True): return "".join(path) -class DataContains(PostgresOperatorLookup): +class DataContains(FieldGetDbPrepValueMixin, PostgresOperatorLookup): lookup_name = "contains" postgres_operator = "@>" @@ -156,7 +192,7 @@ class DataContains(PostgresOperatorLookup): return "JSON_CONTAINS(%s, %s)" % (lhs, rhs), params -class ContainedBy(PostgresOperatorLookup): +class ContainedBy(FieldGetDbPrepValueMixin, PostgresOperatorLookup): lookup_name = "contained_by" postgres_operator = "<@" diff --git a/django/db/models/fields/related.py b/django/db/models/fields/related.py index c763f555e3..e5dd4e2a85 100644 --- a/django/db/models/fields/related.py +++ b/django/db/models/fields/related.py @@ -1060,22 +1060,6 @@ class ForeignKey(ForeignObject): def target_field(self): return self.foreign_related_fields[0] - def get_reverse_path_info(self, filtered_relation=None): - """Get path from the related model to this field's model.""" - opts = self.model._meta - from_opts = self.remote_field.model._meta - return [ - PathInfo( - from_opts=from_opts, - to_opts=opts, - target_fields=(opts.pk,), - join_field=self.remote_field, - m2m=not self.unique, - direct=False, - filtered_relation=filtered_relation, - ) - ] - def validate(self, value, model_instance): if self.remote_field.parent_link: return @@ -1444,6 +1428,14 @@ class ManyToManyField(RelatedField): id="fields.W345", ) ) + if self.db_comment: + warnings.append( + checks.Warning( + "db_comment has no effect on ManyToManyField.", + obj=self, + id="fields.W346", + ) + ) return warnings diff --git a/django/db/models/fields/related_descriptors.py b/django/db/models/fields/related_descriptors.py index 04c956bd1e..422b08e6ca 100644 --- a/django/db/models/fields/related_descriptors.py +++ b/django/db/models/fields/related_descriptors.py @@ -63,6 +63,8 @@ and two directions (forward and reverse) for a total of six combinations. ``ReverseManyToManyDescriptor``, use ``ManyToManyDescriptor`` instead. """ +from asgiref.sync import sync_to_async + from django.core.exceptions import FieldError from django.db import ( DEFAULT_DB_ALIAS, @@ -76,7 +78,7 @@ from django.db.models.functions import RowNumber from django.db.models.lookups import GreaterThan, LessThanOrEqual from django.db.models.query import QuerySet from django.db.models.query_utils import DeferredAttribute -from django.db.models.utils import resolve_callables +from django.db.models.utils import AltersData, resolve_callables from django.utils.functional import cached_property @@ -635,7 +637,7 @@ def create_reverse_many_to_one_manager(superclass, rel): the related model, and adds behaviors specific to many-to-one relations. """ - class RelatedManager(superclass): + class RelatedManager(superclass, AltersData): def __init__(self, instance): super().__init__() @@ -785,6 +787,11 @@ def create_reverse_many_to_one_manager(superclass, rel): add.alters_data = True + async def aadd(self, *objs, bulk=True): + return await sync_to_async(self.add)(*objs, bulk=bulk) + + aadd.alters_data = True + def create(self, **kwargs): self._check_fk_val() kwargs[self.field.name] = self.instance @@ -793,6 +800,11 @@ def create_reverse_many_to_one_manager(superclass, rel): create.alters_data = True + async def acreate(self, **kwargs): + return await sync_to_async(self.create)(**kwargs) + + acreate.alters_data = True + def get_or_create(self, **kwargs): self._check_fk_val() kwargs[self.field.name] = self.instance @@ -801,6 +813,11 @@ def create_reverse_many_to_one_manager(superclass, rel): get_or_create.alters_data = True + async def aget_or_create(self, **kwargs): + return await sync_to_async(self.get_or_create)(**kwargs) + + aget_or_create.alters_data = True + def update_or_create(self, **kwargs): self._check_fk_val() kwargs[self.field.name] = self.instance @@ -809,6 +826,11 @@ def create_reverse_many_to_one_manager(superclass, rel): update_or_create.alters_data = True + async def aupdate_or_create(self, **kwargs): + return await sync_to_async(self.update_or_create)(**kwargs) + + aupdate_or_create.alters_data = True + # remove() and clear() are only provided if the ForeignKey can have a # value of null. if rel.field.null: @@ -839,12 +861,22 @@ def create_reverse_many_to_one_manager(superclass, rel): remove.alters_data = True + async def aremove(self, *objs, bulk=True): + return await sync_to_async(self.remove)(*objs, bulk=bulk) + + aremove.alters_data = True + def clear(self, *, bulk=True): self._check_fk_val() self._clear(self, bulk) clear.alters_data = True + async def aclear(self, *, bulk=True): + return await sync_to_async(self.clear)(bulk=bulk) + + aclear.alters_data = True + def _clear(self, queryset, bulk): self._remove_prefetched_objects() db = router.db_for_write(self.model, instance=self.instance) @@ -888,6 +920,11 @@ def create_reverse_many_to_one_manager(superclass, rel): set.alters_data = True + async def aset(self, objs, *, bulk=True, clear=False): + return await sync_to_async(self.set)(objs=objs, bulk=bulk, clear=clear) + + aset.alters_data = True + return RelatedManager @@ -946,7 +983,7 @@ def create_forward_many_to_many_manager(superclass, rel, reverse): the related model, and adds behaviors specific to many-to-many relations. """ - class ManyRelatedManager(superclass): + class ManyRelatedManager(superclass, AltersData): def __init__(self, instance=None): super().__init__() @@ -1115,12 +1152,24 @@ def create_forward_many_to_many_manager(superclass, rel, reverse): add.alters_data = True + async def aadd(self, *objs, through_defaults=None): + return await sync_to_async(self.add)( + *objs, through_defaults=through_defaults + ) + + aadd.alters_data = True + def remove(self, *objs): self._remove_prefetched_objects() self._remove_items(self.source_field_name, self.target_field_name, *objs) remove.alters_data = True + async def aremove(self, *objs): + return await sync_to_async(self.remove)(*objs) + + aremove.alters_data = True + def clear(self): db = router.db_for_write(self.through, instance=self.instance) with transaction.atomic(using=db, savepoint=False): @@ -1149,6 +1198,11 @@ def create_forward_many_to_many_manager(superclass, rel, reverse): clear.alters_data = True + async def aclear(self): + return await sync_to_async(self.clear)() + + aclear.alters_data = True + def set(self, objs, *, clear=False, through_defaults=None): # Force evaluation of `objs` in case it's a queryset whose value # could be affected by `manager.clear()`. Refs #19816. @@ -1183,6 +1237,13 @@ def create_forward_many_to_many_manager(superclass, rel, reverse): set.alters_data = True + async def aset(self, objs, *, clear=False, through_defaults=None): + return await sync_to_async(self.set)( + objs=objs, clear=clear, through_defaults=through_defaults + ) + + aset.alters_data = True + def create(self, *, through_defaults=None, **kwargs): db = router.db_for_write(self.instance.__class__, instance=self.instance) new_obj = super(ManyRelatedManager, self.db_manager(db)).create(**kwargs) @@ -1191,6 +1252,13 @@ def create_forward_many_to_many_manager(superclass, rel, reverse): create.alters_data = True + async def acreate(self, *, through_defaults=None, **kwargs): + return await sync_to_async(self.create)( + through_defaults=through_defaults, **kwargs + ) + + acreate.alters_data = True + def get_or_create(self, *, through_defaults=None, **kwargs): db = router.db_for_write(self.instance.__class__, instance=self.instance) obj, created = super(ManyRelatedManager, self.db_manager(db)).get_or_create( @@ -1204,6 +1272,13 @@ def create_forward_many_to_many_manager(superclass, rel, reverse): get_or_create.alters_data = True + async def aget_or_create(self, *, through_defaults=None, **kwargs): + return await sync_to_async(self.get_or_create)( + through_defaults=through_defaults, **kwargs + ) + + aget_or_create.alters_data = True + def update_or_create(self, *, through_defaults=None, **kwargs): db = router.db_for_write(self.instance.__class__, instance=self.instance) obj, created = super( @@ -1217,6 +1292,13 @@ def create_forward_many_to_many_manager(superclass, rel, reverse): update_or_create.alters_data = True + async def aupdate_or_create(self, *, through_defaults=None, **kwargs): + return await sync_to_async(self.update_or_create)( + through_defaults=through_defaults, **kwargs + ) + + aupdate_or_create.alters_data = True + def _get_target_ids(self, target_field_name, objs): """ Return the set of ids of `objs` that the target field references. diff --git a/django/db/models/functions/comparison.py b/django/db/models/functions/comparison.py index eb1f20a77c..de7eef4cdc 100644 --- a/django/db/models/functions/comparison.py +++ b/django/db/models/functions/comparison.py @@ -1,6 +1,7 @@ """Database functions that do comparisons or type conversions.""" from django.db import NotSupportedError from django.db.models.expressions import Func, Value +from django.db.models.fields import TextField from django.db.models.fields.json import JSONField from django.utils.regex_helper import _lazy_re_compile @@ -158,7 +159,14 @@ class JSONObject(Func): return super().as_sql(compiler, connection, **extra_context) def as_postgresql(self, compiler, connection, **extra_context): - return self.as_sql( + copy = self.copy() + copy.set_source_expressions( + [ + Cast(expression, TextField()) if index % 2 == 0 else expression + for index, expression in enumerate(copy.get_source_expressions()) + ] + ) + return super(JSONObject, copy).as_sql( compiler, connection, function="JSONB_BUILD_OBJECT", diff --git a/django/db/models/functions/text.py b/django/db/models/functions/text.py index a54ce8f19b..34a1e81982 100644 --- a/django/db/models/functions/text.py +++ b/django/db/models/functions/text.py @@ -1,7 +1,7 @@ from django.db import NotSupportedError from django.db.models.expressions import Func, Value -from django.db.models.fields import CharField, IntegerField -from django.db.models.functions import Coalesce +from django.db.models.fields import CharField, IntegerField, TextField +from django.db.models.functions import Cast, Coalesce from django.db.models.lookups import Transform @@ -82,6 +82,20 @@ class ConcatPair(Func): **extra_context, ) + def as_postgresql(self, compiler, connection, **extra_context): + copy = self.copy() + copy.set_source_expressions( + [ + Cast(expression, TextField()) + for expression in copy.get_source_expressions() + ] + ) + return super(ConcatPair, copy).as_sql( + compiler, + connection, + **extra_context, + ) + def as_mysql(self, compiler, connection, **extra_context): # Use CONCAT_WS with an empty separator so that NULLs are ignored. return super().as_sql( diff --git a/django/db/models/lookups.py b/django/db/models/lookups.py index 9b4bdb9bd6..9e2d9373e6 100644 --- a/django/db/models/lookups.py +++ b/django/db/models/lookups.py @@ -309,7 +309,7 @@ class FieldGetDbPrepValueIterableMixin(FieldGetDbPrepValueMixin): return sql, tuple(params) -class PostgresOperatorLookup(FieldGetDbPrepValueMixin, Lookup): +class PostgresOperatorLookup(Lookup): """Lookup defined by operators on PostgreSQL.""" postgres_operator = None @@ -568,7 +568,7 @@ class IsNull(BuiltinLookup): raise ValueError( "The QuerySet value for an isnull lookup must be True or False." ) - sql, params = compiler.compile(self.lhs) + sql, params = self.process_lhs(compiler, connection) if self.rhs: return "%s IS NULL" % sql, params else: diff --git a/django/db/models/options.py b/django/db/models/options.py index b6b8202802..607b19fb8a 100644 --- a/django/db/models/options.py +++ b/django/db/models/options.py @@ -30,6 +30,7 @@ DEFAULT_NAMES = ( "verbose_name", "verbose_name_plural", "db_table", + "db_table_comment", "ordering", "unique_together", "permissions", @@ -112,6 +113,7 @@ class Options: self.verbose_name = None self.verbose_name_plural = None self.db_table = "" + self.db_table_comment = "" self.ordering = [] self._ordering_clash = False self.indexes = [] diff --git a/django/db/models/query.py b/django/db/models/query.py index 9db4fabe97..1421bc9ba5 100644 --- a/django/db/models/query.py +++ b/django/db/models/query.py @@ -23,11 +23,15 @@ from django.db import ( from django.db.models import AutoField, DateField, DateTimeField, Field, sql from django.db.models.constants import LOOKUP_SEP, OnConflict from django.db.models.deletion import Collector -from django.db.models.expressions import Case, F, Ref, Value, When +from django.db.models.expressions import Case, F, Value, When from django.db.models.functions import Cast, Trunc from django.db.models.query_utils import FilteredRelation, Q from django.db.models.sql.constants import CURSOR, GET_ITERATOR_CHUNK_SIZE -from django.db.models.utils import create_namedtuple_class, resolve_callables +from django.db.models.utils import ( + AltersData, + create_namedtuple_class, + resolve_callables, +) from django.utils import timezone from django.utils.deprecation import RemovedInDjango50Warning from django.utils.functional import cached_property, partition @@ -284,7 +288,7 @@ class FlatValuesListIterable(BaseIterable): yield row[0] -class QuerySet: +class QuerySet(AltersData): """Represent a lazy database lookup for a set of objects.""" def __init__(self, model=None, query=None, using=None, hints=None): @@ -585,24 +589,7 @@ class QuerySet: raise TypeError("Complex aggregates require an alias") kwargs[arg.default_alias] = arg - query = self.query.chain() - for (alias, aggregate_expr) in kwargs.items(): - query.add_annotation(aggregate_expr, alias, is_summary=True) - annotation = query.annotations[alias] - if not annotation.contains_aggregate: - raise TypeError("%s is not an aggregate expression" % alias) - for expr in annotation.get_source_expressions(): - if ( - expr.contains_aggregate - and isinstance(expr, Ref) - and expr.refs in kwargs - ): - name = expr.refs - raise exceptions.FieldError( - "Cannot compute %s('%s'): '%s' is an aggregate" - % (annotation.name, name, name) - ) - return query.get_aggregation(self.db, kwargs) + return self.query.chain().get_aggregation(self.db, kwargs) async def aaggregate(self, *args, **kwargs): return await sync_to_async(self.aggregate)(*args, **kwargs) @@ -716,7 +703,6 @@ class QuerySet: "Unique fields that can trigger the upsert must be provided." ) # Updating primary keys and non-concrete fields is forbidden. - update_fields = [self.model._meta.get_field(name) for name in update_fields] if any(not f.concrete or f.many_to_many for f in update_fields): raise ValueError( "bulk_create() can only be used with concrete fields in " @@ -728,12 +714,6 @@ class QuerySet: "update_fields." ) if unique_fields: - # Primary key is allowed in unique_fields. - unique_fields = [ - self.model._meta.get_field(name) - for name in unique_fields - if name != "pk" - ] if any(not f.concrete or f.many_to_many for f in unique_fields): raise ValueError( "bulk_create() can only be used with concrete fields " @@ -781,6 +761,15 @@ class QuerySet: raise ValueError("Can't bulk create a multi-table inherited model") if not objs: return objs + opts = self.model._meta + if unique_fields: + # Primary key is allowed in unique_fields. + unique_fields = [ + self.model._meta.get_field(opts.pk.name if name == "pk" else name) + for name in unique_fields + ] + if update_fields: + update_fields = [self.model._meta.get_field(name) for name in update_fields] on_conflict = self._check_bulk_create_options( ignore_conflicts, update_conflicts, @@ -788,7 +777,6 @@ class QuerySet: unique_fields, ) self._for_write = True - opts = self.model._meta fields = opts.concrete_fields objs = list(objs) self._prepare_for_bulk_create(objs) @@ -1650,7 +1638,6 @@ class QuerySet: clone.query.add_annotation( annotation, alias, - is_summary=False, select=select, ) for alias, annotation in clone.query.annotations.items(): diff --git a/django/db/models/query_utils.py b/django/db/models/query_utils.py index 4a83fc380d..5c5644cfb3 100644 --- a/django/db/models/query_utils.py +++ b/django/db/models/query_utils.py @@ -90,6 +90,7 @@ class Q(tree.Node): allow_joins=allow_joins, split_subq=False, check_filterable=False, + summarize=summarize, ) query.promote_joins(joins) return clause @@ -358,9 +359,9 @@ def refs_expression(lookup_parts, annotations): """ for n in range(1, len(lookup_parts) + 1): level_n_lookup = LOOKUP_SEP.join(lookup_parts[0:n]) - if level_n_lookup in annotations and annotations[level_n_lookup]: - return annotations[level_n_lookup], lookup_parts[n:] - return False, () + if annotations.get(level_n_lookup): + return level_n_lookup, lookup_parts[n:] + return None, () def check_rel_lookup_compatibility(model, target_opts, field): diff --git a/django/db/models/sql/compiler.py b/django/db/models/sql/compiler.py index f3b2b3da41..b9722268ed 100644 --- a/django/db/models/sql/compiler.py +++ b/django/db/models/sql/compiler.py @@ -4,7 +4,7 @@ import re from functools import partial from itertools import chain -from django.core.exceptions import EmptyResultSet, FieldError +from django.core.exceptions import EmptyResultSet, FieldError, FullResultSet from django.db import DatabaseError, NotSupportedError from django.db.models.constants import LOOKUP_SEP from django.db.models.expressions import F, OrderBy, RawSQL, Ref, Value @@ -169,7 +169,7 @@ class SQLCompiler: expr = Ref(alias, expr) try: sql, params = self.compile(expr) - except EmptyResultSet: + except (EmptyResultSet, FullResultSet): continue sql, params = expr.select_format(self, sql, params) params_hash = make_hashable(params) @@ -179,41 +179,10 @@ class SQLCompiler: return result def collapse_group_by(self, expressions, having): - # If the DB can group by primary key, then group by the primary key of - # query's main model. Note that for PostgreSQL the GROUP BY clause must - # include the primary key of every table, but for MySQL it is enough to - # have the main table's primary key. - if self.connection.features.allows_group_by_pk: - # Determine if the main model's primary key is in the query. - pk = None - for expr in expressions: - # Is this a reference to query's base table primary key? If the - # expression isn't a Col-like, then skip the expression. - if ( - getattr(expr, "target", None) == self.query.model._meta.pk - and getattr(expr, "alias", None) == self.query.base_table - ): - pk = expr - break - # If the main model's primary key is in the query, group by that - # field, HAVING expressions, and expressions associated with tables - # that don't have a primary key included in the grouped columns. - if pk: - pk_aliases = { - expr.alias - for expr in expressions - if hasattr(expr, "target") and expr.target.primary_key - } - expressions = [pk] + [ - expr - for expr in expressions - if expr in having - or ( - getattr(expr, "alias", None) is not None - and expr.alias not in pk_aliases - ) - ] - elif self.connection.features.allows_group_by_selected_pks: + # If the database supports group by functional dependence reduction, + # then the expressions can be reduced to the set of selected table + # primary keys as all other columns are functionally dependent on them. + if self.connection.features.allows_group_by_selected_pks: # Filter out all expressions associated with a table's primary key # present in the grouped columns. This is done by identifying all # tables that have their primary key included in the grouped @@ -235,7 +204,9 @@ class SQLCompiler: expressions = [ expr for expr in expressions - if expr in pks or getattr(expr, "alias", None) not in aliases + if expr in pks + or expr in having + or getattr(expr, "alias", None) not in aliases ] return expressions @@ -316,6 +287,8 @@ class SQLCompiler: sql, params = "0", () else: sql, params = self.compile(Value(empty_result_set_value)) + except FullResultSet: + sql, params = self.compile(Value(True)) else: sql, params = col.select_format(self, sql, params) if alias is None and with_col_aliases: @@ -451,28 +424,37 @@ class SQLCompiler: src = resolved.expression expr_src = expr.expression for sel_expr, _, col_alias in self.select: - if col_alias and not ( - isinstance(expr_src, F) and col_alias == expr_src.name - ): - continue if src == sel_expr: + # When values() is used the exact alias must be used to + # reference annotations. + if ( + self.query.has_select_fields + and col_alias in self.query.annotation_select + and not ( + isinstance(expr_src, F) and col_alias == expr_src.name + ) + ): + continue resolved.set_source_expressions( [Ref(col_alias if col_alias else src.target.column, src)] ) break else: - if col_alias: - raise DatabaseError( - "ORDER BY term does not match any column in the result set." - ) # Add column used in ORDER BY clause to the selected # columns and to each combined query. order_by_idx = len(self.query.select) + 1 - col_name = f"__orderbycol{order_by_idx}" + col_alias = f"__orderbycol{order_by_idx}" for q in self.query.combined_queries: - q.add_annotation(expr_src, col_name) - self.query.add_select_col(resolved, col_name) - resolved.set_source_expressions([RawSQL(f"{order_by_idx}", ())]) + # If fields were explicitly selected through values() + # combined queries cannot be augmented. + if q.has_select_fields: + raise DatabaseError( + "ORDER BY term does not match any column in " + "the result set." + ) + q.add_annotation(expr_src, col_alias) + self.query.add_select_col(resolved, col_alias) + resolved.set_source_expressions([Ref(col_alias, src)]) sql, params = self.compile(resolved) # Don't add the same column twice, but the order direction is # not taken into account so we strip it. When this entire method @@ -567,7 +549,7 @@ class SQLCompiler: *self.query.annotation_select, ) ) - part_sql, part_args = compiler.as_sql() + part_sql, part_args = compiler.as_sql(with_col_aliases=True) if compiler.query.combinator: # Wrap in a subquery if wrapping in parentheses isn't # supported. @@ -715,8 +697,9 @@ class SQLCompiler: """ refcounts_before = self.query.alias_refcount.copy() try: + combinator = self.query.combinator extra_select, order_by, group_by = self.pre_sql_setup( - with_col_aliases=with_col_aliases, + with_col_aliases=with_col_aliases or bool(combinator), ) for_update_part = None # Is a LIMIT/OFFSET clause needed? @@ -750,9 +733,16 @@ class SQLCompiler: raise # Use a predicate that's always False. where, w_params = "0 = 1", [] - having, h_params = ( - self.compile(self.having) if self.having is not None else ("", []) - ) + except FullResultSet: + where, w_params = "", [] + try: + having, h_params = ( + self.compile(self.having) + if self.having is not None + else ("", []) + ) + except FullResultSet: + having, h_params = "", [] result = ["SELECT"] params = [] @@ -1224,14 +1214,18 @@ class SQLCompiler: for o in opts.related_objects if o.field.unique and not o.many_to_many ] - for f, model in related_fields: - related_select_mask = select_mask.get(f) or {} + for related_field, model in related_fields: + related_select_mask = select_mask.get(related_field) or {} if not select_related_descend( - f, restricted, requested, related_select_mask, reverse=True + related_field, + restricted, + requested, + related_select_mask, + reverse=True, ): continue - related_field_name = f.related_query_name() + related_field_name = related_field.related_query_name() fields_found.add(related_field_name) join_info = self.query.setup_joins( @@ -1241,10 +1235,10 @@ class SQLCompiler: from_parent = issubclass(model, opts.model) and model is not opts.model klass_info = { "model": model, - "field": f, + "field": related_field, "reverse": True, - "local_setter": f.remote_field.set_cached_value, - "remote_setter": f.set_cached_value, + "local_setter": related_field.remote_field.set_cached_value, + "remote_setter": related_field.set_cached_value, "from_parent": from_parent, } related_klass_infos.append(klass_info) @@ -1259,7 +1253,7 @@ class SQLCompiler: select_fields.append(len(select)) select.append((col, None)) klass_info["select_fields"] = select_fields - next = requested.get(f.related_query_name(), {}) + next = requested.get(related_field.related_query_name(), {}) next_klass_infos = self.get_related_selections( select, related_select_mask, @@ -1271,10 +1265,10 @@ class SQLCompiler: ) get_related_klass_infos(klass_info, next_klass_infos) - def local_setter(obj, from_obj): + def local_setter(final_field, obj, from_obj): # Set a reverse fk object when relation is non-empty. if from_obj: - f.remote_field.set_cached_value(from_obj, obj) + final_field.remote_field.set_cached_value(from_obj, obj) def remote_setter(name, obj, from_obj): setattr(from_obj, name, obj) @@ -1285,7 +1279,7 @@ class SQLCompiler: break if name in self.query._filtered_relations: fields_found.add(name) - f, _, join_opts, joins, _, _ = self.query.setup_joins( + final_field, _, join_opts, joins, _, _ = self.query.setup_joins( [name], opts, root_alias ) model = join_opts.model @@ -1295,15 +1289,15 @@ class SQLCompiler: ) klass_info = { "model": model, - "field": f, + "field": final_field, "reverse": True, - "local_setter": local_setter, + "local_setter": partial(local_setter, final_field), "remote_setter": partial(remote_setter, name), "from_parent": from_parent, } related_klass_infos.append(klass_info) select_fields = [] - field_select_mask = select_mask.get((name, f)) or {} + field_select_mask = select_mask.get((name, final_field)) or {} columns = self.get_default_columns( field_select_mask, start_alias=alias, @@ -1647,9 +1641,7 @@ class SQLInsertCompiler(SQLCompiler): "Window expressions are not allowed in this query (%s=%r)." % (field.name, value) ) - else: - value = field.get_db_prep_save(value, connection=self.connection) - return value + return field.get_db_prep_save(value, connection=self.connection) def pre_save_val(self, field, obj): """ @@ -1735,8 +1727,8 @@ class SQLInsertCompiler(SQLCompiler): on_conflict_suffix_sql = self.connection.ops.on_conflict_suffix_sql( fields, self.query.on_conflict, - self.query.update_fields, - self.query.unique_fields, + (f.column for f in self.query.update_fields), + (f.column for f in self.query.unique_fields), ) if ( self.returning_fields @@ -1846,11 +1838,12 @@ class SQLDeleteCompiler(SQLCompiler): ) def _as_sql(self, query): - result = ["DELETE FROM %s" % self.quote_name_unless_alias(query.base_table)] - where, params = self.compile(query.where) - if where: - result.append("WHERE %s" % where) - return " ".join(result), tuple(params) + delete = "DELETE FROM %s" % self.quote_name_unless_alias(query.base_table) + try: + where, params = self.compile(query.where) + except FullResultSet: + return delete, () + return f"{delete} WHERE {where}", tuple(params) def as_sql(self): """ @@ -1902,18 +1895,14 @@ class SQLUpdateCompiler(SQLCompiler): ) elif hasattr(val, "prepare_database_save"): if field.remote_field: - val = field.get_db_prep_save( - val.prepare_database_save(field), - connection=self.connection, - ) + val = val.prepare_database_save(field) else: raise TypeError( "Tried to update field %s with a model instance, %r. " "Use a value compatible with %s." % (field, val, field.__class__.__name__) ) - else: - val = field.get_db_prep_save(val, connection=self.connection) + val = field.get_db_prep_save(val, connection=self.connection) # Getting the placeholder for the field. if hasattr(field, "get_placeholder"): @@ -1935,8 +1924,11 @@ class SQLUpdateCompiler(SQLCompiler): "UPDATE %s SET" % qn(table), ", ".join(values), ] - where, params = self.compile(self.query.where) - if where: + try: + where, params = self.compile(self.query.where) + except FullResultSet: + params = [] + else: result.append("WHERE %s" % where) return " ".join(result), tuple(update_params + params) diff --git a/django/db/models/sql/datastructures.py b/django/db/models/sql/datastructures.py index 1edf040e82..069eb1a301 100644 --- a/django/db/models/sql/datastructures.py +++ b/django/db/models/sql/datastructures.py @@ -2,6 +2,7 @@ Useful auxiliary data structures for query construction. Not useful outside the SQL domain. """ +from django.core.exceptions import FullResultSet from django.db.models.sql.constants import INNER, LOUTER @@ -100,8 +101,11 @@ class Join: join_conditions.append("(%s)" % extra_sql) params.extend(extra_params) if self.filtered_relation: - extra_sql, extra_params = compiler.compile(self.filtered_relation) - if extra_sql: + try: + extra_sql, extra_params = compiler.compile(self.filtered_relation) + except FullResultSet: + pass + else: join_conditions.append("(%s)" % extra_sql) params.extend(extra_params) if not join_conditions: diff --git a/django/db/models/sql/query.py b/django/db/models/sql/query.py index dc80bdf5cc..0f37a86e4b 100644 --- a/django/db/models/sql/query.py +++ b/django/db/models/sql/query.py @@ -399,77 +399,36 @@ class Query(BaseExpression): alias = None return target.get_col(alias, field) - def rewrite_cols(self, annotation, col_cnt): - # We must make sure the inner query has the referred columns in it. - # If we are aggregating over an annotation, then Django uses Ref() - # instances to note this. However, if we are annotating over a column - # of a related model, then it might be that column isn't part of the - # SELECT clause of the inner query, and we must manually make sure - # the column is selected. An example case is: - # .aggregate(Sum('author__awards')) - # Resolving this expression results in a join to author, but there - # is no guarantee the awards column of author is in the select clause - # of the query. Thus we must manually add the column to the inner - # query. - orig_exprs = annotation.get_source_expressions() - new_exprs = [] - for expr in orig_exprs: - # FIXME: These conditions are fairly arbitrary. Identify a better - # method of having expressions decide which code path they should - # take. - if isinstance(expr, Ref): - # Its already a Ref to subquery (see resolve_ref() for - # details) - new_exprs.append(expr) - elif isinstance(expr, (WhereNode, Lookup)): - # Decompose the subexpressions further. The code here is - # copied from the else clause, but this condition must appear - # before the contains_aggregate/is_summary condition below. - new_expr, col_cnt = self.rewrite_cols(expr, col_cnt) - new_exprs.append(new_expr) - else: - # Reuse aliases of expressions already selected in subquery. - for col_alias, selected_annotation in self.annotation_select.items(): - if selected_annotation is expr: - new_expr = Ref(col_alias, expr) - break - else: - # An expression that is not selected the subquery. - if isinstance(expr, Col) or ( - expr.contains_aggregate and not expr.is_summary - ): - # Reference column or another aggregate. Select it - # under a non-conflicting alias. - col_cnt += 1 - col_alias = "__col%d" % col_cnt - self.annotations[col_alias] = expr - self.append_annotation_mask([col_alias]) - new_expr = Ref(col_alias, expr) - else: - # Some other expression not referencing database values - # directly. Its subexpression might contain Cols. - new_expr, col_cnt = self.rewrite_cols(expr, col_cnt) - new_exprs.append(new_expr) - annotation.set_source_expressions(new_exprs) - return annotation, col_cnt - - def get_aggregation(self, using, added_aggregate_names): + def get_aggregation(self, using, aggregate_exprs): """ Return the dictionary with the values of the existing aggregations. """ - if not self.annotation_select: + if not aggregate_exprs: return {} - existing_annotations = [ - annotation - for alias, annotation in self.annotations.items() - if alias not in added_aggregate_names - ] + aggregates = {} + for alias, aggregate_expr in aggregate_exprs.items(): + self.check_alias(alias) + aggregate = aggregate_expr.resolve_expression( + self, allow_joins=True, reuse=None, summarize=True + ) + if not aggregate.contains_aggregate: + raise TypeError("%s is not an aggregate expression" % alias) + aggregates[alias] = aggregate + # Existing usage of aggregation can be determined by the presence of + # selected aggregates but also by filters against aliased aggregates. + _, having, qualify = self.where.split_having_qualify() + has_existing_aggregation = ( + any( + getattr(annotation, "contains_aggregate", True) + for annotation in self.annotations.values() + ) + or having + ) # Decide if we need to use a subquery. # - # Existing annotations would cause incorrect results as get_aggregation() - # must produce just one result and thus must not use GROUP BY. But we - # aren't smart enough to remove the existing annotations from the - # query, so those would force us to use GROUP BY. + # Existing aggregations would cause incorrect results as + # get_aggregation() must produce just one result and thus must not use + # GROUP BY. # # If the query has limit or distinct, or uses set operations, then # those operations must be done in a subquery so that the query @@ -478,7 +437,8 @@ class Query(BaseExpression): if ( isinstance(self.group_by, tuple) or self.is_sliced - or existing_annotations + or has_existing_aggregation + or qualify or self.distinct or self.combinator ): @@ -500,33 +460,41 @@ class Query(BaseExpression): # query is grouped by the main model's primary key. However, # clearing the select clause can alter results if distinct is # used. - has_existing_aggregate_annotations = any( - annotation - for annotation in existing_annotations - if getattr(annotation, "contains_aggregate", True) - ) - if inner_query.default_cols and has_existing_aggregate_annotations: + if inner_query.default_cols and has_existing_aggregation: inner_query.group_by = ( self.model._meta.pk.get_col(inner_query.get_initial_alias()), ) inner_query.default_cols = False + if not qualify: + # Mask existing annotations that are not referenced by + # aggregates to be pushed to the outer query unless + # filtering against window functions is involved as it + # requires complex realising. + annotation_mask = set() + for aggregate in aggregates.values(): + annotation_mask |= aggregate.get_refs() + inner_query.set_annotation_mask(annotation_mask) - relabels = {t: "subquery" for t in inner_query.alias_map} - relabels[None] = "subquery" - # Remove any aggregates marked for reduction from the subquery - # and move them to the outer AggregateQuery. - col_cnt = 0 - for alias, expression in list(inner_query.annotation_select.items()): - annotation_select_mask = inner_query.annotation_select_mask - if expression.is_summary: - expression, col_cnt = inner_query.rewrite_cols(expression, col_cnt) - outer_query.annotations[alias] = expression.relabeled_clone( - relabels - ) - del inner_query.annotations[alias] - annotation_select_mask.remove(alias) - # Make sure the annotation_select wont use cached results. - inner_query.set_annotation_mask(inner_query.annotation_select_mask) + # Add aggregates to the outer AggregateQuery. This requires making + # sure all columns referenced by the aggregates are selected in the + # inner query. It is achieved by retrieving all column references + # by the aggregates, explicitly selecting them in the inner query, + # and making sure the aggregates are repointed to them. + col_refs = {} + for alias, aggregate in aggregates.items(): + replacements = {} + for col in self._gen_cols([aggregate], resolve_refs=False): + if not (col_ref := col_refs.get(col)): + index = len(col_refs) + 1 + col_alias = f"__col{index}" + col_ref = Ref(col_alias, col) + col_refs[col] = col_ref + inner_query.annotations[col_alias] = col + inner_query.append_annotation_mask([col_alias]) + replacements[col] = col_ref + outer_query.annotations[alias] = aggregate.replace_expressions( + replacements + ) if ( inner_query.select == () and not inner_query.default_cols @@ -543,6 +511,21 @@ class Query(BaseExpression): self.select = () self.default_cols = False self.extra = {} + if self.annotations: + # Inline reference to existing annotations and mask them as + # they are unnecessary given only the summarized aggregations + # are requested. + replacements = { + Ref(alias, annotation): annotation + for alias, annotation in self.annotations.items() + } + self.annotations = { + alias: aggregate.replace_expressions(replacements) + for alias, aggregate in aggregates.items() + } + else: + self.annotations = aggregates + self.set_annotation_mask(aggregates) empty_set_result = [ expression.empty_result_set_value @@ -557,9 +540,9 @@ class Query(BaseExpression): result = compiler.execute_sql(SINGLE) if result is None: result = empty_set_result - - converters = compiler.get_converters(outer_query.annotation_select.values()) - result = next(compiler.apply_converters((result,), converters)) + else: + converters = compiler.get_converters(outer_query.annotation_select.values()) + result = next(compiler.apply_converters((result,), converters)) return dict(zip(outer_query.annotation_select, result)) @@ -568,8 +551,7 @@ class Query(BaseExpression): Perform a COUNT() query using the current filter constraints. """ obj = self.clone() - obj.add_annotation(Count("*"), alias="__count", is_summary=True) - return obj.get_aggregation(using, ["__count"])["__count"] + return obj.get_aggregation(using, {"__count": Count("*")})["__count"] def has_filters(self): return self.where @@ -1116,12 +1098,10 @@ class Query(BaseExpression): "semicolons, or SQL comments." ) - def add_annotation(self, annotation, alias, is_summary=False, select=True): + def add_annotation(self, annotation, alias, select=True): """Add a single annotation expression to the Query.""" self.check_alias(alias) - annotation = annotation.resolve_expression( - self, allow_joins=True, reuse=None, summarize=is_summary - ) + annotation = annotation.resolve_expression(self, allow_joins=True, reuse=None) if select: self.append_annotation_mask([alias]) else: @@ -1210,16 +1190,19 @@ class Query(BaseExpression): return type_(values) return value - def solve_lookup_type(self, lookup): + def solve_lookup_type(self, lookup, summarize=False): """ Solve the lookup type from the lookup (e.g.: 'foobar__id__icontains'). """ lookup_splitted = lookup.split(LOOKUP_SEP) if self.annotations: - expression, expression_lookups = refs_expression( + annotation, expression_lookups = refs_expression( lookup_splitted, self.annotations ) - if expression: + if annotation: + expression = self.annotations[annotation] + if summarize: + expression = Ref(annotation, expression) return expression_lookups, (), expression _, field, _, lookup_parts = self.names_to_path(lookup_splitted, self.get_meta()) field_parts = lookup_splitted[0 : len(lookup_splitted) - len(lookup_parts)] @@ -1356,6 +1339,7 @@ class Query(BaseExpression): split_subq=True, reuse_with_filtered_relation=False, check_filterable=True, + summarize=False, ): """ Build a WhereNode for a single filter clause but don't add it @@ -1396,18 +1380,21 @@ class Query(BaseExpression): allow_joins=allow_joins, split_subq=split_subq, check_filterable=check_filterable, + summarize=summarize, ) if hasattr(filter_expr, "resolve_expression"): if not getattr(filter_expr, "conditional", False): raise TypeError("Cannot filter against a non-conditional expression.") - condition = filter_expr.resolve_expression(self, allow_joins=allow_joins) + condition = filter_expr.resolve_expression( + self, allow_joins=allow_joins, summarize=summarize + ) if not isinstance(condition, Lookup): condition = self.build_lookup(["exact"], condition, True) return WhereNode([condition], connector=AND), [] arg, value = filter_expr if not arg: raise FieldError("Cannot parse keyword query %r" % arg) - lookups, parts, reffed_expression = self.solve_lookup_type(arg) + lookups, parts, reffed_expression = self.solve_lookup_type(arg, summarize) if check_filterable: self.check_filterable(reffed_expression) @@ -1546,6 +1533,7 @@ class Query(BaseExpression): allow_joins=True, split_subq=True, check_filterable=True, + summarize=False, ): """Add a Q-object to the current filter.""" connector = q_object.connector @@ -1564,6 +1552,7 @@ class Query(BaseExpression): allow_joins=allow_joins, split_subq=split_subq, check_filterable=check_filterable, + summarize=summarize, ) joinpromoter.add_votes(needed_inner) if child_clause: @@ -1905,7 +1894,7 @@ class Query(BaseExpression): return targets, joins[-1], joins @classmethod - def _gen_cols(cls, exprs, include_external=False): + def _gen_cols(cls, exprs, include_external=False, resolve_refs=True): for expr in exprs: if isinstance(expr, Col): yield expr @@ -1914,9 +1903,12 @@ class Query(BaseExpression): ): yield from expr.get_external_cols() elif hasattr(expr, "get_source_expressions"): + if not resolve_refs and isinstance(expr, Ref): + continue yield from cls._gen_cols( expr.get_source_expressions(), include_external=include_external, + resolve_refs=resolve_refs, ) @classmethod diff --git a/django/db/models/sql/where.py b/django/db/models/sql/where.py index 63fdf58d9d..aaab1730b7 100644 --- a/django/db/models/sql/where.py +++ b/django/db/models/sql/where.py @@ -4,7 +4,7 @@ Code to manage the creation and SQL rendering of 'where' constraints. import operator from functools import reduce -from django.core.exceptions import EmptyResultSet +from django.core.exceptions import EmptyResultSet, FullResultSet from django.db.models.expressions import Case, When from django.db.models.lookups import Exact from django.utils import tree @@ -145,6 +145,8 @@ class WhereNode(tree.Node): sql, params = compiler.compile(child) except EmptyResultSet: empty_needed -= 1 + except FullResultSet: + full_needed -= 1 else: if sql: result.append(sql) @@ -158,24 +160,25 @@ class WhereNode(tree.Node): # counts. if empty_needed == 0: if self.negated: - return "", [] + raise FullResultSet else: raise EmptyResultSet if full_needed == 0: if self.negated: raise EmptyResultSet else: - return "", [] + raise FullResultSet conn = " %s " % self.connector sql_string = conn.join(result) - if sql_string: - if self.negated: - # Some backends (Oracle at least) need parentheses - # around the inner SQL in the negated case, even if the - # inner SQL contains just a single expression. - sql_string = "NOT (%s)" % sql_string - elif len(result) > 1 or self.resolved: - sql_string = "(%s)" % sql_string + if not sql_string: + raise FullResultSet + if self.negated: + # Some backends (Oracle at least) need parentheses around the inner + # SQL in the negated case, even if the inner SQL contains just a + # single expression. + sql_string = "NOT (%s)" % sql_string + elif len(result) > 1 or self.resolved: + sql_string = "(%s)" % sql_string return sql_string, result_params def get_group_by_cols(self): @@ -224,6 +227,12 @@ class WhereNode(tree.Node): clone.children.append(child.replace_expressions(replacements)) return clone + def get_refs(self): + refs = set() + for child in self.children: + refs |= child.get_refs() + return refs + @classmethod def _contains_aggregate(cls, obj): if isinstance(obj, tree.Node): diff --git a/django/db/models/utils.py b/django/db/models/utils.py index 5521f3cca5..c6cb5ef165 100644 --- a/django/db/models/utils.py +++ b/django/db/models/utils.py @@ -50,3 +50,20 @@ def create_namedtuple_class(*names): (namedtuple("Row", names),), {"__reduce__": __reduce__, "__slots__": ()}, ) + + +class AltersData: + """ + Make subclasses preserve the alters_data attribute on overridden methods. + """ + + def __init_subclass__(cls, **kwargs): + for fn_name, fn in vars(cls).items(): + if callable(fn) and not hasattr(fn, "alters_data"): + for base in cls.__bases__: + if base_fn := getattr(base, fn_name, None): + if hasattr(base_fn, "alters_data"): + fn.alters_data = base_fn.alters_data + break + + super().__init_subclass__(**kwargs) diff --git a/django/forms/boundfield.py b/django/forms/boundfield.py index 9f2ae59ab4..6764276148 100644 --- a/django/forms/boundfield.py +++ b/django/forms/boundfield.py @@ -96,9 +96,17 @@ class BoundField: attrs.setdefault( "id", self.html_initial_id if only_initial else self.auto_id ) + if only_initial and self.html_initial_name in self.form.data: + # Propagate the hidden initial value. + value = self.form._widget_data_value( + self.field.hidden_widget(), + self.html_initial_name, + ) + else: + value = self.value() return widget.render( name=self.html_initial_name if only_initial else self.html_name, - value=self.value(), + value=value, attrs=attrs, renderer=self.form.renderer, ) diff --git a/django/forms/forms.py b/django/forms/forms.py index 7c9f1034d2..6884e6e724 100644 --- a/django/forms/forms.py +++ b/django/forms/forms.py @@ -174,10 +174,6 @@ class BaseForm(RenderableFormMixin): def __getitem__(self, name): """Return a BoundField with the given name.""" - try: - return self._bound_fields_cache[name] - except KeyError: - pass try: field = self.fields[name] except KeyError: @@ -189,9 +185,9 @@ class BaseForm(RenderableFormMixin): ", ".join(sorted(self.fields)), ) ) - bound_field = field.get_bound_field(self, name) - self._bound_fields_cache[name] = bound_field - return bound_field + if name not in self._bound_fields_cache: + self._bound_fields_cache[name] = field.get_bound_field(self, name) + return self._bound_fields_cache[name] @property def errors(self): diff --git a/django/forms/models.py b/django/forms/models.py index b79157ce68..35ecdc1c0d 100644 --- a/django/forms/models.py +++ b/django/forms/models.py @@ -10,6 +10,7 @@ from django.core.exceptions import ( ImproperlyConfigured, ValidationError, ) +from django.db.models.utils import AltersData from django.forms.fields import ChoiceField, Field from django.forms.forms import BaseForm, DeclarativeFieldsMetaclass from django.forms.formsets import BaseFormSet, formset_factory @@ -329,7 +330,7 @@ class ModelFormMetaclass(DeclarativeFieldsMetaclass): return new_class -class BaseModelForm(BaseForm): +class BaseModelForm(BaseForm, AltersData): def __init__( self, data=None, @@ -644,7 +645,7 @@ def modelform_factory( # ModelFormSets ############################################################## -class BaseModelFormSet(BaseFormSet): +class BaseModelFormSet(BaseFormSet, AltersData): """ A ``FormSet`` for editing a queryset and/or adding new objects to it. """ diff --git a/django/http/__init__.py b/django/http/__init__.py index 87109059a3..628564ea09 100644 --- a/django/http/__init__.py +++ b/django/http/__init__.py @@ -1,5 +1,6 @@ from django.http.cookie import SimpleCookie, parse_cookie from django.http.request import ( + HttpHeaders, HttpRequest, QueryDict, RawPostDataException, @@ -27,6 +28,7 @@ from django.http.response import ( __all__ = [ "SimpleCookie", "parse_cookie", + "HttpHeaders", "HttpRequest", "QueryDict", "RawPostDataException", diff --git a/django/http/multipartparser.py b/django/http/multipartparser.py index b3e0925a42..8f0d85c3d8 100644 --- a/django/http/multipartparser.py +++ b/django/http/multipartparser.py @@ -43,7 +43,7 @@ FIELD = "field" class MultiPartParser: """ - A rfc2388 multipart/form-data parser. + An RFC 7578 multipart/form-data parser. ``MultiValueDict.parse()`` reads the input stream in ``chunk_size`` chunks and returns a tuple of ``(MultiValueDict(POST), MultiValueDict(FILES))``. diff --git a/django/http/request.py b/django/http/request.py index 815544368b..6b51d23e97 100644 --- a/django/http/request.py +++ b/django/http/request.py @@ -461,6 +461,31 @@ class HttpHeaders(CaseInsensitiveMapping): return None return header.replace("_", "-").title() + @classmethod + def to_wsgi_name(cls, header): + header = header.replace("-", "_").upper() + if header in cls.UNPREFIXED_HEADERS: + return header + return f"{cls.HTTP_PREFIX}{header}" + + @classmethod + def to_asgi_name(cls, header): + return header.replace("-", "_").upper() + + @classmethod + def to_wsgi_names(cls, headers): + return { + cls.to_wsgi_name(header_name): value + for header_name, value in headers.items() + } + + @classmethod + def to_asgi_names(cls, headers): + return { + cls.to_asgi_name(header_name): value + for header_name, value in headers.items() + } + class QueryDict(MultiValueDict): """ diff --git a/django/http/response.py b/django/http/response.py index bb94e81263..465a8553dc 100644 --- a/django/http/response.py +++ b/django/http/response.py @@ -6,9 +6,12 @@ import os import re import sys import time +import warnings from email.header import Header from http.client import responses -from urllib.parse import quote, urlparse +from urllib.parse import urlparse + +from asgiref.sync import async_to_sync, sync_to_async from django.conf import settings from django.core import signals, signing @@ -18,7 +21,7 @@ from django.http.cookie import SimpleCookie from django.utils import timezone from django.utils.datastructures import CaseInsensitiveMapping from django.utils.encoding import iri_to_uri -from django.utils.http import http_date +from django.utils.http import content_disposition_header, http_date from django.utils.regex_helper import _lazy_re_compile _charset_from_content_type_re = _lazy_re_compile( @@ -476,7 +479,18 @@ class StreamingHttpResponse(HttpResponseBase): @property def streaming_content(self): - return map(self.make_bytes, self._iterator) + if self.is_async: + # pull to lexical scope to capture fixed reference in case + # streaming_content is set again later. + _iterator = self._iterator + + async def awrapper(): + async for part in _iterator: + yield self.make_bytes(part) + + return awrapper() + else: + return map(self.make_bytes, self._iterator) @streaming_content.setter def streaming_content(self, value): @@ -484,12 +498,48 @@ class StreamingHttpResponse(HttpResponseBase): def _set_streaming_content(self, value): # Ensure we can never iterate on "value" more than once. - self._iterator = iter(value) + try: + self._iterator = iter(value) + self.is_async = False + except TypeError: + self._iterator = value.__aiter__() + self.is_async = True if hasattr(value, "close"): self._resource_closers.append(value.close) def __iter__(self): - return self.streaming_content + try: + return iter(self.streaming_content) + except TypeError: + warnings.warn( + "StreamingHttpResponse must consume asynchronous iterators in order to " + "serve them synchronously. Use a synchronous iterator instead.", + Warning, + ) + + # async iterator. Consume in async_to_sync and map back. + async def to_list(_iterator): + as_list = [] + async for chunk in _iterator: + as_list.append(chunk) + return as_list + + return map(self.make_bytes, iter(async_to_sync(to_list)(self._iterator))) + + async def __aiter__(self): + try: + async for part in self.streaming_content: + yield part + except TypeError: + warnings.warn( + "StreamingHttpResponse must consume synchronous iterators in order to " + "serve them asynchronously. Use an asynchronous iterator instead.", + Warning, + ) + # sync iterator. Consume via sync_to_async and yield via async + # generator. + for part in await sync_to_async(list)(self.streaming_content): + yield part def getvalue(self): return b"".join(self.streaming_content) @@ -569,20 +619,10 @@ class FileResponse(StreamingHttpResponse): else: self.headers["Content-Type"] = "application/octet-stream" - if filename: - disposition = "attachment" if self.as_attachment else "inline" - try: - filename.encode("ascii") - file_expr = 'filename="{}"'.format( - filename.replace("\\", "\\\\").replace('"', r"\"") - ) - except UnicodeEncodeError: - file_expr = "filename*=utf-8''{}".format(quote(filename)) - self.headers["Content-Disposition"] = "{}; {}".format( - disposition, file_expr - ) - elif self.as_attachment: - self.headers["Content-Disposition"] = "attachment" + if content_disposition := content_disposition_header( + self.as_attachment, filename + ): + self.headers["Content-Disposition"] = content_disposition class HttpResponseRedirectBase(HttpResponse): diff --git a/django/middleware/csrf.py b/django/middleware/csrf.py index 94f580fa71..d4b8eb9448 100644 --- a/django/middleware/csrf.py +++ b/django/middleware/csrf.py @@ -11,8 +11,7 @@ from urllib.parse import urlparse from django.conf import settings from django.core.exceptions import DisallowedHost, ImproperlyConfigured -from django.http import UnreadablePostError -from django.http.request import HttpHeaders +from django.http import HttpHeaders, UnreadablePostError from django.urls import get_callable from django.utils.cache import patch_vary_headers from django.utils.crypto import constant_time_compare, get_random_string @@ -426,7 +425,7 @@ class CsrfViewMiddleware(MiddlewareMixin): if getattr(callback, "csrf_exempt", False): return None - # Assume that anything not defined as 'safe' by RFC7231 needs protection + # Assume that anything not defined as 'safe' by RFC 9110 needs protection if request.method in ("GET", "HEAD", "OPTIONS", "TRACE"): return self._accept(request) diff --git a/django/middleware/gzip.py b/django/middleware/gzip.py index 6d27c1e335..45be6ccb43 100644 --- a/django/middleware/gzip.py +++ b/django/middleware/gzip.py @@ -13,6 +13,8 @@ class GZipMiddleware(MiddlewareMixin): on the Accept-Encoding header. """ + max_random_bytes = 100 + def process_response(self, request, response): # It's not worth attempting to compress really short responses. if not response.streaming and len(response.content) < 200: @@ -29,20 +31,40 @@ class GZipMiddleware(MiddlewareMixin): return response if response.streaming: + if response.is_async: + # pull to lexical scope to capture fixed reference in case + # streaming_content is set again later. + orignal_iterator = response.streaming_content + + async def gzip_wrapper(): + async for chunk in orignal_iterator: + yield compress_string( + chunk, + max_random_bytes=self.max_random_bytes, + ) + + response.streaming_content = gzip_wrapper() + else: + response.streaming_content = compress_sequence( + response.streaming_content, + max_random_bytes=self.max_random_bytes, + ) # Delete the `Content-Length` header for streaming content, because # we won't know the compressed size until we stream it. - response.streaming_content = compress_sequence(response.streaming_content) del response.headers["Content-Length"] else: # Return the compressed content only if it's actually shorter. - compressed_content = compress_string(response.content) + compressed_content = compress_string( + response.content, + max_random_bytes=self.max_random_bytes, + ) if len(compressed_content) >= len(response.content): return response response.content = compressed_content response.headers["Content-Length"] = str(len(response.content)) # If there is a strong ETag, make it weak to fulfill the requirements - # of RFC 7232 section-2.1 while also allowing conditional request + # of RFC 9110 Section 8.8.1 while also allowing conditional request # matches on ETags. etag = response.get("ETag") if etag and etag.startswith('"'): diff --git a/django/test/client.py b/django/test/client.py index 99e831aebd..8fdce54d4d 100644 --- a/django/test/client.py +++ b/django/test/client.py @@ -14,11 +14,11 @@ from asgiref.sync import sync_to_async from django.conf import settings from django.core.handlers.asgi import ASGIRequest from django.core.handlers.base import BaseHandler -from django.core.handlers.wsgi import WSGIRequest +from django.core.handlers.wsgi import LimitedStream, WSGIRequest from django.core.serializers.json import DjangoJSONEncoder from django.core.signals import got_request_exception, request_finished, request_started from django.db import close_old_connections -from django.http import HttpRequest, QueryDict, SimpleCookie +from django.http import HttpHeaders, HttpRequest, QueryDict, SimpleCookie from django.test import signals from django.test.utils import ContextList from django.urls import resolve @@ -110,7 +110,7 @@ def conditional_content_removal(request, response): """ Simulate the behavior of most web servers by removing the content of responses for HEAD requests, 1xx, 204, and 304 responses. Ensure - compliance with RFC 7230, section 3.3.3. + compliance with RFC 9112 Section 6.3. """ if 100 <= response.status_code < 200 or response.status_code in (204, 304): if response.streaming: @@ -198,7 +198,8 @@ class AsyncClientHandler(BaseHandler): sender=self.__class__, scope=scope ) request_started.connect(close_old_connections) - request = ASGIRequest(scope, body_file) + # Wrap FakePayload body_file to allow large read() in test environment. + request = ASGIRequest(scope, LimitedStream(body_file, len(body_file))) # Sneaky little hack so that we can easily get round # CsrfViewMiddleware. This makes life easier, and is probably required # for backwards compatibility with external tests against admin views. @@ -345,11 +346,13 @@ class RequestFactory: just as if that view had been hooked up using a URLconf. """ - def __init__(self, *, json_encoder=DjangoJSONEncoder, **defaults): + def __init__(self, *, json_encoder=DjangoJSONEncoder, headers=None, **defaults): self.json_encoder = json_encoder self.defaults = defaults self.cookies = SimpleCookie() self.errors = BytesIO() + if headers: + self.defaults.update(HttpHeaders.to_wsgi_names(headers)) def _base_environ(self, **request): """ @@ -421,13 +424,14 @@ class RequestFactory: # Refs comment in `get_bytes_from_wsgi()`. return path.decode("iso-8859-1") - def get(self, path, data=None, secure=False, **extra): + def get(self, path, data=None, secure=False, *, headers=None, **extra): """Construct a GET request.""" data = {} if data is None else data return self.generic( "GET", path, secure=secure, + headers=headers, **{ "QUERY_STRING": urlencode(data, doseq=True), **extra, @@ -435,32 +439,46 @@ class RequestFactory: ) def post( - self, path, data=None, content_type=MULTIPART_CONTENT, secure=False, **extra + self, + path, + data=None, + content_type=MULTIPART_CONTENT, + secure=False, + *, + headers=None, + **extra, ): """Construct a POST request.""" data = self._encode_json({} if data is None else data, content_type) post_data = self._encode_data(data, content_type) return self.generic( - "POST", path, post_data, content_type, secure=secure, **extra + "POST", + path, + post_data, + content_type, + secure=secure, + headers=headers, + **extra, ) - def head(self, path, data=None, secure=False, **extra): + def head(self, path, data=None, secure=False, *, headers=None, **extra): """Construct a HEAD request.""" data = {} if data is None else data return self.generic( "HEAD", path, secure=secure, + headers=headers, **{ "QUERY_STRING": urlencode(data, doseq=True), **extra, }, ) - def trace(self, path, secure=False, **extra): + def trace(self, path, secure=False, *, headers=None, **extra): """Construct a TRACE request.""" - return self.generic("TRACE", path, secure=secure, **extra) + return self.generic("TRACE", path, secure=secure, headers=headers, **extra) def options( self, @@ -468,10 +486,14 @@ class RequestFactory: data="", content_type="application/octet-stream", secure=False, + *, + headers=None, **extra, ): "Construct an OPTIONS request." - return self.generic("OPTIONS", path, data, content_type, secure=secure, **extra) + return self.generic( + "OPTIONS", path, data, content_type, secure=secure, headers=headers, **extra + ) def put( self, @@ -479,11 +501,15 @@ class RequestFactory: data="", content_type="application/octet-stream", secure=False, + *, + headers=None, **extra, ): """Construct a PUT request.""" data = self._encode_json(data, content_type) - return self.generic("PUT", path, data, content_type, secure=secure, **extra) + return self.generic( + "PUT", path, data, content_type, secure=secure, headers=headers, **extra + ) def patch( self, @@ -491,11 +517,15 @@ class RequestFactory: data="", content_type="application/octet-stream", secure=False, + *, + headers=None, **extra, ): """Construct a PATCH request.""" data = self._encode_json(data, content_type) - return self.generic("PATCH", path, data, content_type, secure=secure, **extra) + return self.generic( + "PATCH", path, data, content_type, secure=secure, headers=headers, **extra + ) def delete( self, @@ -503,11 +533,15 @@ class RequestFactory: data="", content_type="application/octet-stream", secure=False, + *, + headers=None, **extra, ): """Construct a DELETE request.""" data = self._encode_json(data, content_type) - return self.generic("DELETE", path, data, content_type, secure=secure, **extra) + return self.generic( + "DELETE", path, data, content_type, secure=secure, headers=headers, **extra + ) def generic( self, @@ -516,6 +550,8 @@ class RequestFactory: data="", content_type="application/octet-stream", secure=False, + *, + headers=None, **extra, ): """Construct an arbitrary HTTP request.""" @@ -535,6 +571,8 @@ class RequestFactory: "wsgi.input": FakePayload(data), } ) + if headers: + extra.update(HttpHeaders.to_wsgi_names(headers)) r.update(extra) # If QUERY_STRING is absent or empty, we want to extract it from the URL. if not r.get("QUERY_STRING"): @@ -598,7 +636,10 @@ class AsyncRequestFactory(RequestFactory): body_file = request.pop("_body_file") else: body_file = FakePayload("") - return ASGIRequest(self._base_scope(**request), body_file) + # Wrap FakePayload body_file to allow large read() in test environment. + return ASGIRequest( + self._base_scope(**request), LimitedStream(body_file, len(body_file)) + ) def generic( self, @@ -607,6 +648,8 @@ class AsyncRequestFactory(RequestFactory): data="", content_type="application/octet-stream", secure=False, + *, + headers=None, **extra, ): """Construct an arbitrary HTTP request.""" @@ -632,6 +675,8 @@ class AsyncRequestFactory(RequestFactory): s["follow"] = follow if query_string := extra.pop("QUERY_STRING", None): s["query_string"] = query_string + if headers: + extra.update(HttpHeaders.to_asgi_names(headers)) s["headers"] += [ (key.lower().encode("ascii"), value.encode("latin1")) for key, value in extra.items() @@ -778,9 +823,14 @@ class Client(ClientMixin, RequestFactory): """ def __init__( - self, enforce_csrf_checks=False, raise_request_exception=True, **defaults + self, + enforce_csrf_checks=False, + raise_request_exception=True, + *, + headers=None, + **defaults, ): - super().__init__(**defaults) + super().__init__(headers=headers, **defaults) self.handler = ClientHandler(enforce_csrf_checks) self.raise_request_exception = raise_request_exception self.exc_info = None @@ -833,12 +883,23 @@ class Client(ClientMixin, RequestFactory): self.cookies.update(response.cookies) return response - def get(self, path, data=None, follow=False, secure=False, **extra): + def get( + self, + path, + data=None, + follow=False, + secure=False, + *, + headers=None, + **extra, + ): """Request a response from the server using GET.""" self.extra = extra - response = super().get(path, data=data, secure=secure, **extra) + response = super().get(path, data=data, secure=secure, headers=headers, **extra) if follow: - response = self._handle_redirects(response, data=data, **extra) + response = self._handle_redirects( + response, data=data, headers=headers, **extra + ) return response def post( @@ -848,25 +909,45 @@ class Client(ClientMixin, RequestFactory): content_type=MULTIPART_CONTENT, follow=False, secure=False, + *, + headers=None, **extra, ): """Request a response from the server using POST.""" self.extra = extra response = super().post( - path, data=data, content_type=content_type, secure=secure, **extra + path, + data=data, + content_type=content_type, + secure=secure, + headers=headers, + **extra, ) if follow: response = self._handle_redirects( - response, data=data, content_type=content_type, **extra + response, data=data, content_type=content_type, headers=headers, **extra ) return response - def head(self, path, data=None, follow=False, secure=False, **extra): + def head( + self, + path, + data=None, + follow=False, + secure=False, + *, + headers=None, + **extra, + ): """Request a response from the server using HEAD.""" self.extra = extra - response = super().head(path, data=data, secure=secure, **extra) + response = super().head( + path, data=data, secure=secure, headers=headers, **extra + ) if follow: - response = self._handle_redirects(response, data=data, **extra) + response = self._handle_redirects( + response, data=data, headers=headers, **extra + ) return response def options( @@ -876,16 +957,23 @@ class Client(ClientMixin, RequestFactory): content_type="application/octet-stream", follow=False, secure=False, + *, + headers=None, **extra, ): """Request a response from the server using OPTIONS.""" self.extra = extra response = super().options( - path, data=data, content_type=content_type, secure=secure, **extra + path, + data=data, + content_type=content_type, + secure=secure, + headers=headers, + **extra, ) if follow: response = self._handle_redirects( - response, data=data, content_type=content_type, **extra + response, data=data, content_type=content_type, headers=headers, **extra ) return response @@ -896,16 +984,23 @@ class Client(ClientMixin, RequestFactory): content_type="application/octet-stream", follow=False, secure=False, + *, + headers=None, **extra, ): """Send a resource to the server using PUT.""" self.extra = extra response = super().put( - path, data=data, content_type=content_type, secure=secure, **extra + path, + data=data, + content_type=content_type, + secure=secure, + headers=headers, + **extra, ) if follow: response = self._handle_redirects( - response, data=data, content_type=content_type, **extra + response, data=data, content_type=content_type, headers=headers, **extra ) return response @@ -916,16 +1011,23 @@ class Client(ClientMixin, RequestFactory): content_type="application/octet-stream", follow=False, secure=False, + *, + headers=None, **extra, ): """Send a resource to the server using PATCH.""" self.extra = extra response = super().patch( - path, data=data, content_type=content_type, secure=secure, **extra + path, + data=data, + content_type=content_type, + secure=secure, + headers=headers, + **extra, ) if follow: response = self._handle_redirects( - response, data=data, content_type=content_type, **extra + response, data=data, content_type=content_type, headers=headers, **extra ) return response @@ -936,28 +1038,55 @@ class Client(ClientMixin, RequestFactory): content_type="application/octet-stream", follow=False, secure=False, + *, + headers=None, **extra, ): """Send a DELETE request to the server.""" self.extra = extra response = super().delete( - path, data=data, content_type=content_type, secure=secure, **extra + path, + data=data, + content_type=content_type, + secure=secure, + headers=headers, + **extra, ) if follow: response = self._handle_redirects( - response, data=data, content_type=content_type, **extra + response, data=data, content_type=content_type, headers=headers, **extra ) return response - def trace(self, path, data="", follow=False, secure=False, **extra): + def trace( + self, + path, + data="", + follow=False, + secure=False, + *, + headers=None, + **extra, + ): """Send a TRACE request to the server.""" self.extra = extra - response = super().trace(path, data=data, secure=secure, **extra) + response = super().trace( + path, data=data, secure=secure, headers=headers, **extra + ) if follow: - response = self._handle_redirects(response, data=data, **extra) + response = self._handle_redirects( + response, data=data, headers=headers, **extra + ) return response - def _handle_redirects(self, response, data="", content_type="", **extra): + def _handle_redirects( + self, + response, + data="", + content_type="", + headers=None, + **extra, + ): """ Follow any redirects by requesting responses from the server using GET. """ @@ -983,7 +1112,7 @@ class Client(ClientMixin, RequestFactory): extra["SERVER_PORT"] = str(url.port) path = url.path - # RFC 2616: bare domains without path are treated as the root. + # RFC 3986 Section 6.2.3: Empty path should be normalized to "/". if not path and url.netloc: path = "/" # Prepend the request path to handle relative path redirects @@ -1006,7 +1135,12 @@ class Client(ClientMixin, RequestFactory): content_type = None response = request_method( - path, data=data, content_type=content_type, follow=False, **extra + path, + data=data, + content_type=content_type, + follow=False, + headers=headers, + **extra, ) response.redirect_chain = redirect_chain @@ -1034,9 +1168,14 @@ class AsyncClient(ClientMixin, AsyncRequestFactory): """ def __init__( - self, enforce_csrf_checks=False, raise_request_exception=True, **defaults + self, + enforce_csrf_checks=False, + raise_request_exception=True, + *, + headers=None, + **defaults, ): - super().__init__(**defaults) + super().__init__(headers=headers, **defaults) self.handler = AsyncClientHandler(enforce_csrf_checks) self.raise_request_exception = raise_request_exception self.exc_info = None diff --git a/django/test/testcases.py b/django/test/testcases.py index a9349d82f1..090a31e7c4 100644 --- a/django/test/testcases.py +++ b/django/test/testcases.py @@ -1,4 +1,3 @@ -import asyncio import difflib import inspect import json @@ -26,7 +25,7 @@ from urllib.parse import ( ) from urllib.request import url2pathname -from asgiref.sync import async_to_sync +from asgiref.sync import async_to_sync, iscoroutinefunction from django.apps import apps from django.conf import settings @@ -401,7 +400,7 @@ class SimpleTestCase(unittest.TestCase): ) # Convert async test methods. - if asyncio.iscoroutinefunction(testMethod): + if iscoroutinefunction(testMethod): setattr(self, self._testMethodName, async_to_sync(testMethod)) if not skipped: @@ -1778,7 +1777,9 @@ class LiveServerThread(threading.Thread): try: # Create the handler for serving static and media files handler = self.static_handler(_MediaFilesHandler(WSGIHandler())) - self.httpd = self._create_server() + self.httpd = self._create_server( + connections_override=self.connections_override, + ) # If binding to port zero, assign the port allocated by the OS. if self.port == 0: self.port = self.httpd.server_address[1] diff --git a/django/test/utils.py b/django/test/utils.py index 2b2b92c593..5e5649b0ac 100644 --- a/django/test/utils.py +++ b/django/test/utils.py @@ -1,4 +1,3 @@ -import asyncio import collections import logging import os @@ -14,6 +13,8 @@ from types import SimpleNamespace from unittest import TestCase, skipIf, skipUnless from xml.dom.minidom import Node, parseString +from asgiref.sync import iscoroutinefunction + from django.apps import apps from django.apps.registry import Apps from django.conf import UserSettingsHolder, settings @@ -440,7 +441,7 @@ class TestContextDecorator: raise TypeError("Can only decorate subclasses of unittest.TestCase") def decorate_callable(self, func): - if asyncio.iscoroutinefunction(func): + if iscoroutinefunction(func): # If the inner function is an async function, we must execute async # as well so that the `with` statement executes at the right time. @wraps(func) diff --git a/django/utils/asyncio.py b/django/utils/asyncio.py index 1e79f90c2c..eea2df48e2 100644 --- a/django/utils/asyncio.py +++ b/django/utils/asyncio.py @@ -37,3 +37,28 @@ def async_unsafe(message): return decorator(func) else: return decorator + + +try: + from contextlib import aclosing +except ImportError: + # TODO: Remove when dropping support for PY39. + from contextlib import AbstractAsyncContextManager + + # Backport of contextlib.aclosing() from Python 3.10. Copyright (C) Python + # Software Foundation (see LICENSE.python). + class aclosing(AbstractAsyncContextManager): + """ + Async context manager for safely finalizing an asynchronously + cleaned-up resource such as an async generator, calling its + ``aclose()`` method. + """ + + def __init__(self, thing): + self.thing = thing + + async def __aenter__(self): + return self.thing + + async def __aexit__(self, *exc_info): + await self.thing.aclose() diff --git a/django/utils/cache.py b/django/utils/cache.py index 90292ce4da..2dd2c7796c 100644 --- a/django/utils/cache.py +++ b/django/utils/cache.py @@ -4,9 +4,7 @@ managing the "Vary" header of responses. It includes functions to patch the header of response objects directly and decorators that change functions to do that header-patching themselves. -For information on the Vary header, see: - - https://tools.ietf.org/html/rfc7231#section-7.1.4 +For information on the Vary header, see RFC 9110 Section 12.5.5. Essentially, the "Vary" HTTP header defines which headers a cache should take into account when building its cache key. Requests with the same path but @@ -139,7 +137,7 @@ def _precondition_failed(request): def _not_modified(request, response=None): new_response = HttpResponseNotModified() if response: - # Preserve the headers required by Section 4.1 of RFC 7232, as well as + # Preserve the headers required by RFC 9110 Section 15.4.5, as well as # Last-Modified. for header in ( "Cache-Control", @@ -177,7 +175,9 @@ def get_conditional_response(request, etag=None, last_modified=None, response=No if_modified_since = request.META.get("HTTP_IF_MODIFIED_SINCE") if_modified_since = if_modified_since and parse_http_date_safe(if_modified_since) - # Step 1 of section 6 of RFC 7232: Test the If-Match precondition. + # Evaluation of request preconditions below follows RFC 9110 Section + # 13.2.2. + # Step 1: Test the If-Match precondition. if if_match_etags and not _if_match_passes(etag, if_match_etags): return _precondition_failed(request) @@ -212,7 +212,7 @@ def get_conditional_response(request, etag=None, last_modified=None, response=No def _if_match_passes(target_etag, etags): """ - Test the If-Match comparison as defined in section 3.1 of RFC 7232. + Test the If-Match comparison as defined in RFC 9110 Section 13.1.1. """ if not target_etag: # If there isn't an ETag, then there can't be a match. @@ -233,15 +233,15 @@ def _if_match_passes(target_etag, etags): def _if_unmodified_since_passes(last_modified, if_unmodified_since): """ - Test the If-Unmodified-Since comparison as defined in section 3.4 of - RFC 7232. + Test the If-Unmodified-Since comparison as defined in RFC 9110 Section + 13.1.4. """ return last_modified and last_modified <= if_unmodified_since def _if_none_match_passes(target_etag, etags): """ - Test the If-None-Match comparison as defined in section 3.2 of RFC 7232. + Test the If-None-Match comparison as defined in RFC 9110 Section 13.1.2. """ if not target_etag: # If there isn't an ETag, then there isn't a match. @@ -260,7 +260,8 @@ def _if_none_match_passes(target_etag, etags): def _if_modified_since_passes(last_modified, if_modified_since): """ - Test the If-Modified-Since comparison as defined in section 3.3 of RFC 7232. + Test the If-Modified-Since comparison as defined in RFC 9110 Section + 13.1.3. """ return not last_modified or last_modified > if_modified_since diff --git a/django/utils/deprecation.py b/django/utils/deprecation.py index caed5b25d4..19379dfe58 100644 --- a/django/utils/deprecation.py +++ b/django/utils/deprecation.py @@ -1,8 +1,7 @@ -import asyncio import inspect import warnings -from asgiref.sync import sync_to_async +from asgiref.sync import iscoroutinefunction, markcoroutinefunction, sync_to_async class RemovedInDjango50Warning(DeprecationWarning): @@ -120,16 +119,14 @@ class MiddlewareMixin: If get_response is a coroutine function, turns us into async mode so a thread is not consumed during a whole request. """ - if asyncio.iscoroutinefunction(self.get_response): + if iscoroutinefunction(self.get_response): # Mark the class as async-capable, but do the actual switch # inside __call__ to avoid swapping out dunder methods - self._is_coroutine = asyncio.coroutines._is_coroutine - else: - self._is_coroutine = None + markcoroutinefunction(self) def __call__(self, request): # Exit out to async mode, if needed - if self._is_coroutine: + if iscoroutinefunction(self): return self.__acall__(request) response = None if hasattr(self, "process_request"): diff --git a/django/utils/encoding.py b/django/utils/encoding.py index 360eb91ed5..43847b5385 100644 --- a/django/utils/encoding.py +++ b/django/utils/encoding.py @@ -112,16 +112,15 @@ def iri_to_uri(iri): Convert an Internationalized Resource Identifier (IRI) portion to a URI portion that is suitable for inclusion in a URL. - This is the algorithm from section 3.1 of RFC 3987, slightly simplified - since the input is assumed to be a string rather than an arbitrary byte - stream. + This is the algorithm from RFC 3987 Section 3.1, slightly simplified since + the input is assumed to be a string rather than an arbitrary byte stream. Take an IRI (string or UTF-8 bytes, e.g. '/I ♥ Django/' or b'/I \xe2\x99\xa5 Django/') and return a string containing the encoded result with ASCII chars only (e.g. '/I%20%E2%99%A5%20Django/'). """ # The list of safe characters here is constructed from the "reserved" and - # "unreserved" characters specified in sections 2.2 and 2.3 of RFC 3986: + # "unreserved" characters specified in RFC 3986 Sections 2.2 and 2.3: # reserved = gen-delims / sub-delims # gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" # sub-delims = "!" / "$" / "&" / "'" / "(" / ")" @@ -130,7 +129,7 @@ def iri_to_uri(iri): # Of the unreserved characters, urllib.parse.quote() already considers all # but the ~ safe. # The % character is also added to the list of safe characters here, as the - # end of section 3.1 of RFC 3987 specifically mentions that % must not be + # end of RFC 3987 Section 3.1 specifically mentions that % must not be # converted. if iri is None: return iri @@ -161,7 +160,7 @@ def uri_to_iri(uri): Convert a Uniform Resource Identifier(URI) into an Internationalized Resource Identifier(IRI). - This is the algorithm from section 3.2 of RFC 3987, excluding step 4. + This is the algorithm from RFC 3987 Section 3.2, excluding step 4. Take an URI in ASCII bytes (e.g. '/I%20%E2%99%A5%20Django/') and return a string containing the encoded result (e.g. '/I%20♥%20Django/'). @@ -197,13 +196,13 @@ def escape_uri_path(path): Escape the unsafe characters from the path portion of a Uniform Resource Identifier (URI). """ - # These are the "reserved" and "unreserved" characters specified in - # sections 2.2 and 2.3 of RFC 2396: + # These are the "reserved" and "unreserved" characters specified in RFC + # 3986 Sections 2.2 and 2.3: # reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | "$" | "," # unreserved = alphanum | mark # mark = "-" | "_" | "." | "!" | "~" | "*" | "'" | "(" | ")" # The list of safe characters here is constructed subtracting ";", "=", - # and "?" according to section 3.3 of RFC 2396. + # and "?" according to RFC 3986 Section 3.3. # The reason for not subtracting and escaping "/" is that we are escaping # the entire path, not a path segment. return quote(path, safe="/:@&+$,-_.!~*'()") @@ -216,7 +215,7 @@ def punycode(domain): def repercent_broken_unicode(path): """ - As per section 3.2 of RFC 3987, step three of converting a URI into an IRI, + As per RFC 3987 Section 3.2, step three of converting a URI into an IRI, repercent-encode any octet produced that is not part of a strictly legal UTF-8 octet sequence. """ diff --git a/django/utils/html.py b/django/utils/html.py index 007602a14a..fdb88d6709 100644 --- a/django/utils/html.py +++ b/django/utils/html.py @@ -193,9 +193,8 @@ def smart_urlquote(url): def unquote_quote(segment): segment = unquote(segment) - # Tilde is part of RFC3986 Unreserved Characters - # https://tools.ietf.org/html/rfc3986#section-2.3 - # See also https://bugs.python.org/issue16285 + # Tilde is part of RFC 3986 Section 2.3 Unreserved Characters, + # see also https://bugs.python.org/issue16285 return quote(segment, safe=RFC3986_SUBDELIMS + RFC3986_GENDELIMS + "~") # Handle IDN before quoting. diff --git a/django/utils/http.py b/django/utils/http.py index d2ec2638b0..3e7acb5835 100644 --- a/django/utils/http.py +++ b/django/utils/http.py @@ -10,6 +10,7 @@ from urllib.parse import ( _coerce_args, _splitnetloc, _splitparams, + quote, scheme_chars, unquote, ) @@ -19,7 +20,7 @@ from urllib.parse import uses_params from django.utils.datastructures import MultiValueDict from django.utils.regex_helper import _lazy_re_compile -# based on RFC 7232, Appendix C +# Based on RFC 9110 Appendix A. ETAG_MATCH = _lazy_re_compile( r""" \A( # start of string and capture group @@ -94,8 +95,8 @@ def urlencode(query, doseq=False): def http_date(epoch_seconds=None): """ - Format the time to match the RFC1123 date format as specified by HTTP - RFC7231 section 7.1.1.1. + Format the time to match the RFC 5322 date format as specified by RFC 9110 + Section 5.6.7. `epoch_seconds` is a floating point number expressed in seconds since the epoch, in UTC - such as that outputted by time.time(). If set to None, it @@ -108,15 +109,15 @@ def http_date(epoch_seconds=None): def parse_http_date(date): """ - Parse a date format as specified by HTTP RFC7231 section 7.1.1.1. + Parse a date format as specified by HTTP RFC 9110 Section 5.6.7. The three formats allowed by the RFC are accepted, even if only the first one is still in widespread use. Return an integer expressed in seconds since the epoch, in UTC. """ - # email.utils.parsedate() does the job for RFC1123 dates; unfortunately - # RFC7231 makes it mandatory to support RFC850 dates too. So we roll + # email.utils.parsedate() does the job for RFC 1123 dates; unfortunately + # RFC 9110 makes it mandatory to support RFC 850 dates too. So we roll # our own RFC-compliant parsing. for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE: m = regex.match(date) @@ -210,7 +211,7 @@ def urlsafe_base64_decode(s): def parse_etags(etag_str): """ Parse a string of ETags given in an If-None-Match or If-Match header as - defined by RFC 7232. Return a list of quoted ETags, or ['*'] if all ETags + defined by RFC 9110. Return a list of quoted ETags, or ['*'] if all ETags should be matched. """ if etag_str.strip() == "*": @@ -425,3 +426,24 @@ def parse_header_parameters(line): value = unquote(value, encoding=encoding) pdict[name] = value return key, pdict + + +def content_disposition_header(as_attachment, filename): + """ + Construct a Content-Disposition HTTP header value from the given filename + as specified by RFC 6266. + """ + if filename: + disposition = "attachment" if as_attachment else "inline" + try: + filename.encode("ascii") + file_expr = 'filename="{}"'.format( + filename.replace("\\", "\\\\").replace('"', r"\"") + ) + except UnicodeEncodeError: + file_expr = "filename*=utf-8''{}".format(quote(filename)) + return f"{disposition}; {file_expr}" + elif as_attachment: + return "attachment" + else: + return None diff --git a/django/utils/text.py b/django/utils/text.py index dcfe3fba0e..86d3b52741 100644 --- a/django/utils/text.py +++ b/django/utils/text.py @@ -1,4 +1,6 @@ +import gzip import re +import secrets import unicodedata from gzip import GzipFile from gzip import compress as gzip_compress @@ -314,8 +316,23 @@ def phone2numeric(phone): return "".join(char2number.get(c, c) for c in phone.lower()) -def compress_string(s): - return gzip_compress(s, compresslevel=6, mtime=0) +def _get_random_filename(max_random_bytes): + return b"a" * secrets.randbelow(max_random_bytes) + + +def compress_string(s, *, max_random_bytes=None): + compressed_data = gzip_compress(s, compresslevel=6, mtime=0) + + if not max_random_bytes: + return compressed_data + + compressed_view = memoryview(compressed_data) + header = bytearray(compressed_view[:10]) + header[3] = gzip.FNAME + + filename = _get_random_filename(max_random_bytes) + b"\x00" + + return bytes(header) + filename + compressed_view[10:] class StreamingBuffer(BytesIO): @@ -327,9 +344,12 @@ class StreamingBuffer(BytesIO): # Like compress_string, but for iterators of strings. -def compress_sequence(sequence): +def compress_sequence(sequence, *, max_random_bytes=None): buf = StreamingBuffer() - with GzipFile(mode="wb", compresslevel=6, fileobj=buf, mtime=0) as zfile: + filename = _get_random_filename(max_random_bytes) if max_random_bytes else None + with GzipFile( + filename=filename, mode="wb", compresslevel=6, fileobj=buf, mtime=0 + ) as zfile: # Output headers... yield buf.read() for item in sequence: diff --git a/django/utils/translation/trans_real.py b/django/utils/translation/trans_real.py index 595a9ec2e4..c1e64d4ebd 100644 --- a/django/utils/translation/trans_real.py +++ b/django/utils/translation/trans_real.py @@ -30,8 +30,8 @@ _default = None # magic gettext number to separate context from message CONTEXT_SEPARATOR = "\x04" -# Format of Accept-Language header values. From RFC 2616, section 14.4 and 3.9 -# and RFC 3066, section 2.1 +# Format of Accept-Language header values. From RFC 9110 Sections 12.4.2 and +# 12.5.4, and RFC 5646 Section 2.1. accept_language_re = _lazy_re_compile( r""" # "en", "en-au", "x-y-z", "es-419", "*" diff --git a/django/views/debug.py b/django/views/debug.py index 30a1dbc6da..53b4125716 100644 --- a/django/views/debug.py +++ b/django/views/debug.py @@ -1,4 +1,5 @@ import functools +import itertools import re import sys import types @@ -15,7 +16,7 @@ from django.utils.datastructures import MultiValueDict from django.utils.encoding import force_str from django.utils.module_loading import import_string from django.utils.regex_helper import _lazy_re_compile -from django.utils.version import get_docs_version +from django.utils.version import PY311, get_docs_version # Minimal Django templates engine to render the error templates # regardless of the project's TEMPLATES setting. Templates are @@ -396,6 +397,8 @@ class ExceptionReporter: c["exception_type"] = self.exc_type.__name__ if self.exc_value: c["exception_value"] = str(self.exc_value) + if exc_notes := getattr(self.exc_value, "__notes__", None): + c["exception_notes"] = "\n" + "\n".join(exc_notes) if frames: c["lastframe"] = frames[-1] return c @@ -544,6 +547,24 @@ class ExceptionReporter: pre_context = [] context_line = "" post_context = [] + + colno = tb_area_colno = "" + if PY311: + _, _, start_column, end_column = next( + itertools.islice( + tb.tb_frame.f_code.co_positions(), tb.tb_lasti // 2, None + ) + ) + if start_column and end_column: + underline = "^" * (end_column - start_column) + spaces = " " * (start_column + len(str(lineno + 1)) + 2) + colno = f"\n{spaces}{underline}" + tb_area_spaces = " " * ( + 4 + + start_column + - (len(context_line) - len(context_line.lstrip())) + ) + tb_area_colno = f"\n{tb_area_spaces}{underline}" yield { "exc_cause": exc_cause, "exc_cause_explicit": exc_cause_explicit, @@ -560,6 +581,8 @@ class ExceptionReporter: "context_line": context_line, "post_context": post_context, "pre_context_lineno": pre_context_lineno + 1, + "colno": colno, + "tb_area_colno": tb_area_colno, } tb = tb.tb_next diff --git a/django/views/defaults.py b/django/views/defaults.py index f10b75d471..ccad802a54 100644 --- a/django/views/defaults.py +++ b/django/views/defaults.py @@ -133,7 +133,7 @@ def permission_denied(request, exception, template_name=ERROR_403_TEMPLATE_NAME) supplied). If the template does not exist, an Http403 response containing the text - "403 Forbidden" (as per RFC 7231) will be returned. + "403 Forbidden" (as per RFC 9110 Section 15.5.4) will be returned. """ try: template = loader.get_template(template_name) diff --git a/django/views/generic/base.py b/django/views/generic/base.py index 3a3afb0c73..8f8f9397e8 100644 --- a/django/views/generic/base.py +++ b/django/views/generic/base.py @@ -1,6 +1,7 @@ -import asyncio import logging +from asgiref.sync import iscoroutinefunction, markcoroutinefunction + from django.core.exceptions import ImproperlyConfigured from django.http import ( HttpResponse, @@ -68,8 +69,8 @@ class View: ] if not handlers: return False - is_async = asyncio.iscoroutinefunction(handlers[0]) - if not all(asyncio.iscoroutinefunction(h) == is_async for h in handlers[1:]): + is_async = iscoroutinefunction(handlers[0]) + if not all(iscoroutinefunction(h) == is_async for h in handlers[1:]): raise ImproperlyConfigured( f"{cls.__qualname__} HTTP handlers must either be all sync or all " "async." @@ -117,7 +118,7 @@ class View: # Mark the callback if the view class is async. if cls.view_is_async: - view._is_coroutine = asyncio.coroutines._is_coroutine + markcoroutinefunction(view) return view diff --git a/django/views/templates/technical_500.html b/django/views/templates/technical_500.html index 4483145ec3..a5c187147b 100644 --- a/django/views/templates/technical_500.html +++ b/django/views/templates/technical_500.html @@ -100,7 +100,7 @@

{% if exception_type %}{{ exception_type }}{% else %}Report{% endif %} {% if request %} at {{ request.path_info }}{% endif %}

-
{% if exception_value %}{{ exception_value|force_escape }}{% else %}No exception message supplied{% endif %}
+
{% if exception_value %}{{ exception_value|force_escape }}{% if exception_notes %}{{ exception_notes }}{% endif %}{% else %}No exception message supplied{% endif %}
{% if request %} @@ -242,7 +242,7 @@ {% endif %}
    -
  1. {{ frame.context_line }}
    {% if not is_email %} {% endif %}
  2. +
  3. {{ frame.context_line }}{{ frame.colno }}
    {% if not is_email %} {% endif %}
{% if frame.post_context and not is_email %}
    @@ -327,10 +327,10 @@ The above exception ({{ frame.exc_cause|force_escape }}) was the direct cause of {% else %} During handling of the above exception ({{ frame.exc_cause|force_escape }}), another exception occurred: {% endif %}{% endif %}{% endifchanged %} {% if frame.tb %}File "{{ frame.filename }}"{% if frame.context_line %}, line {{ frame.lineno }}{% endif %}, in {{ frame.function }} -{% if frame.context_line %} {% spaceless %}{{ frame.context_line }}{% endspaceless %}{% endif %}{% elif forloop.first %}None{% else %}Traceback: None{% endif %}{% endfor %} +{% if frame.context_line %} {% spaceless %}{{ frame.context_line }}{% endspaceless %}{{ frame.tb_area_colno }}{% endif %}{% elif forloop.first %}None{% else %}Traceback: None{% endif %}{% endfor %} Exception Type: {{ exception_type }}{% if request %} at {{ request.path_info }}{% endif %} -Exception Value: {{ exception_value|force_escape }} +Exception Value: {{ exception_value|force_escape }}{% if exception_notes %}{{ exception_notes }}{% endif %}

    diff --git a/django/views/templates/technical_500.txt b/django/views/templates/technical_500.txt index 87cf6b5bbc..5a75324ebc 100644 --- a/django/views/templates/technical_500.txt +++ b/django/views/templates/technical_500.txt @@ -31,10 +31,10 @@ Traceback (most recent call last): {% for frame in frames %}{% ifchanged frame.exc_cause %}{% if frame.exc_cause %} {% if frame.exc_cause_explicit %}The above exception ({{ frame.exc_cause }}) was the direct cause of the following exception:{% else %}During handling of the above exception ({{ frame.exc_cause }}), another exception occurred:{% endif %} {% endif %}{% endifchanged %} {% if frame.tb %}File "{{ frame.filename }}"{% if frame.context_line %}, line {{ frame.lineno }}{% endif %}, in {{ frame.function }} -{% if frame.context_line %} {% spaceless %}{{ frame.context_line }}{% endspaceless %}{% endif %}{% elif forloop.first %}None{% else %}Traceback: None{% endif %} +{% if frame.context_line %} {% spaceless %}{{ frame.context_line }}{% endspaceless %}{{ frame.tb_area_colno }}{% endif %}{% elif forloop.first %}None{% else %}Traceback: None{% endif %} {% endfor %} {% if exception_type %}Exception Type: {{ exception_type }}{% if request %} at {{ request.path_info }}{% endif %} -{% if exception_value %}Exception Value: {{ exception_value }}{% endif %}{% endif %}{% endif %} +{% if exception_value %}Exception Value: {{ exception_value }}{% endif %}{% if exception_notes %}{{ exception_notes }}{% endif %}{% endif %}{% endif %} {% if raising_view_name %}Raised during: {{ raising_view_name }}{% endif %} {% if request %}Request information: {% if user_str %}USER: {{ user_str }}{% endif %} diff --git a/docs/conf.py b/docs/conf.py index 3e37509337..1805e92e14 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -97,6 +97,10 @@ source_suffix = ".txt" # The root toctree document. root_doc = "contents" +# Disable auto-created table of contents entries for all domain objects (e.g. +# functions, classes, attributes, etc.) in Sphinx 5.2+. +toc_object_entries = False + # General substitutions. project = "Django" copyright = "Django Software Foundation and contributors" @@ -174,7 +178,7 @@ pygments_style = "trac" intersphinx_mapping = { "python": ("https://docs.python.org/3/", None), "sphinx": ("https://www.sphinx-doc.org/en/master/", None), - "psycopg2": ("https://www.psycopg.org/docs/", None), + "psycopg": ("https://www.psycopg.org/psycopg3/docs/", None), } # Python's docs don't change every week. diff --git a/docs/howto/csrf.txt b/docs/howto/csrf.txt index 2e3441dfc3..9e3024029e 100644 --- a/docs/howto/csrf.txt +++ b/docs/howto/csrf.txt @@ -104,7 +104,7 @@ The above code could be simplified by using the `JavaScript Cookie library Django provides a view decorator which forces setting of the cookie: :func:`~django.views.decorators.csrf.ensure_csrf_cookie`. -.. _BREACH: http://breachattack.com/ +.. _BREACH: https://www.breachattack.com/ .. _acquiring-csrf-token-from-html: diff --git a/docs/internals/contributing/bugs-and-features.txt b/docs/internals/contributing/bugs-and-features.txt index fdbc78c523..2e5ca8de34 100644 --- a/docs/internals/contributing/bugs-and-features.txt +++ b/docs/internals/contributing/bugs-and-features.txt @@ -125,9 +125,10 @@ How we make decisions ===================== Whenever possible, we strive for a rough consensus. To that end, we'll often -have informal votes on |django-developers| about a feature. In these votes we -follow the voting style invented by Apache and used on Python itself, where -votes are given as +1, +0, -0, or -1. Roughly translated, these votes mean: +have informal votes on |django-developers| or the Django Forum about a feature. +In these votes we follow the voting style invented by Apache and used on Python +itself, where votes are given as +1, +0, -0, or -1. +Roughly translated, these votes mean: * +1: "I love the idea and I'm strongly committed to it." @@ -138,29 +139,28 @@ votes are given as +1, +0, -0, or -1. Roughly translated, these votes mean: * -1: "I strongly disagree and would be very unhappy to see the idea turn into reality." -Although these votes on |django-developers| are informal, they'll be taken very -seriously. After a suitable voting period, if an obvious consensus arises we'll -follow the votes. +Although these votes are informal, they'll be taken very seriously. After a +suitable voting period, if an obvious consensus arises we'll follow the votes. However, consensus is not always possible. If consensus cannot be reached, or if the discussion toward a consensus fizzles out without a concrete decision, -the decision may be deferred to the :ref:`technical board `. +the decision may be deferred to the :ref:`steering council `. -Internally, the technical board will use the same voting mechanism. A +Internally, the steering council will use the same voting mechanism. A proposition will be considered carried if: -* There are at least three "+1" votes from members of the technical board. +* There are at least three "+1" votes from members of the steering council. -* There is no "-1" vote from any member of the technical board. +* There is no "-1" vote from any member of the steering council. Votes should be submitted within a week. -Since this process allows any technical board member to veto a proposal, a +Since this process allows any steering council member to veto a proposal, a "-1" vote should be accompanied by an explanation of what it would take to convert that "-1" into at least a "+0". Votes on technical matters should be announced and held in public on the -|django-developers| mailing list. +|django-developers| mailing list or on the Django Forum. .. _searching: https://code.djangoproject.com/search .. _custom queries: https://code.djangoproject.com/query diff --git a/docs/internals/contributing/committing-code.txt b/docs/internals/contributing/committing-code.txt index aab5cf300c..094c05a6bd 100644 --- a/docs/internals/contributing/committing-code.txt +++ b/docs/internals/contributing/committing-code.txt @@ -138,7 +138,7 @@ Django's Git repository: Credit the contributors in the commit message: "Thanks A for the report and B for review." Use git's `Co-Authored-By`_ as appropriate. - .. _Co-Authored-By: https://docs.github.com/en/github/committing-changes-to-your-project/creating-and-editing-commits/creating-a-commit-with-multiple-authors + .. _Co-Authored-By: https://docs.github.com/en/pull-requests/committing-changes-to-your-project/creating-and-editing-commits/creating-a-commit-with-multiple-authors * For commits to a branch, prefix the commit message with the branch name. For example: "[1.4.x] Fixed #xxxxx -- Added support for mind reading." diff --git a/docs/internals/contributing/writing-code/unit-tests.txt b/docs/internals/contributing/writing-code/unit-tests.txt index 939abb5631..e9102110eb 100644 --- a/docs/internals/contributing/writing-code/unit-tests.txt +++ b/docs/internals/contributing/writing-code/unit-tests.txt @@ -278,7 +278,7 @@ dependencies: * aiosmtpd_ * argon2-cffi_ 19.1.0+ -* asgiref_ 3.5.2+ (required) +* asgiref_ 3.6.0+ (required) * bcrypt_ * colorama_ * docutils_ diff --git a/docs/internals/deprecation.txt b/docs/internals/deprecation.txt index abd5a363bc..1908549e8c 100644 --- a/docs/internals/deprecation.txt +++ b/docs/internals/deprecation.txt @@ -39,6 +39,12 @@ details on these changes. * The ``TransactionTestCase.assertQuerysetEqual()`` method will be removed. +* Support for passing encoded JSON string literals to ``JSONField`` and + associated lookups and expressions will be removed. + +* Support for passing positional arguments to ``Signer`` and + ``TimestampSigner`` will be removed. + .. _deprecation-removed-in-5.0: 5.0 diff --git a/docs/internals/organization.txt b/docs/internals/organization.txt index ad6f2b7368..a80ea6cea2 100644 --- a/docs/internals/organization.txt +++ b/docs/internals/organization.txt @@ -43,15 +43,15 @@ Mergers hold the following prerogatives: approved by: - another Merger, - - a technical board member, + - a steering council member, - a member of the `triage & review team`_, or - a member of the `security team`_. - Initiating discussion of a minor change in the appropriate venue, and request that other Mergers refrain from merging it while discussion proceeds. -- Requesting a vote of the technical board regarding any minor change if, in +- Requesting a vote of the steering council regarding any minor change if, in the Merger's opinion, discussion has failed to reach a consensus. -- Requesting a vote of the technical board when a `major change`_ (significant +- Requesting a vote of the steering council when a `major change`_ (significant enough to require the use of the `DEP process`_) reaches one of its implementation milestones and is intended to merge. @@ -61,7 +61,7 @@ Mergers hold the following prerogatives: Membership ---------- -`The technical board`_ selects Mergers_ as necessary to maintain their number +`The steering council`_ selects Mergers_ as necessary to maintain their number at a minimum of three, in order to spread the workload and avoid over-burdening or burning out any individual Merger. There is no upper limit to the number of Mergers. @@ -72,33 +72,33 @@ to make the role of Merger sustainable. The following restrictions apply to the role of Merger: -- A person must not simultaneously serve as a member of the technical board. If - a Merger is elected to the technical board, they shall cease to be a Merger - immediately upon taking up membership in the technical board. +- A person must not simultaneously serve as a member of the steering council. If + a Merger is elected to the steering council, they shall cease to be a Merger + immediately upon taking up membership in the steering council. - A person may serve in the roles of Releaser and Merger simultaneously. -The selection process, when a vacancy occurs or when the technical board deems +The selection process, when a vacancy occurs or when the steering council deems it necessary to select additional persons for such a role, occur as follows: - Any member in good standing of an appropriate discussion venue, or the Django Software Foundation board acting with the input of the DSF's Fellowship committee, may suggest a person for consideration. -- The technical board considers the suggestions put forth, and then any member - of the technical board formally nominates a candidate for the role. -- The technical board votes on nominees. +- The steering council considers the suggestions put forth, and then any member + of the steering council formally nominates a candidate for the role. +- The steering council votes on nominees. Mergers may resign their role at any time, but should endeavor to provide some advance notice in order to allow the selection of a replacement. Termination of the contract of a Django Fellow by the Django Software Foundation temporarily -suspends that person's Merger role until such time as the technical board can +suspends that person's Merger role until such time as the steering council can vote on their nomination. Otherwise, a Merger may be removed by: -- Becoming disqualified due to election to the technical board. +- Becoming disqualified due to election to the steering council. - Becoming disqualified due to actions taken by the Code of Conduct committee of the Django Software Foundation. -- A vote of the technical board. +- A vote of the steering council. .. _releasers-team: @@ -122,7 +122,7 @@ website. Membership ---------- -`The technical board`_ selects Releasers_ as necessary to maintain their number +`The steering council`_ selects Releasers_ as necessary to maintain their number at a minimum of three, in order to spread the workload and avoid over-burdening or burning out any individual Releaser. There is no upper limit to the number of Releasers. @@ -133,40 +133,40 @@ to make the role of Releaser sustainable. A person may serve in the roles of Releaser and Merger simultaneously. -The selection process, when a vacancy occurs or when the technical board deems +The selection process, when a vacancy occurs or when the steering council deems it necessary to select additional persons for such a role, occur as follows: - Any member in good standing of an appropriate discussion venue, or the Django Software Foundation board acting with the input of the DSF's Fellowship committee, may suggest a person for consideration. -- The technical board considers the suggestions put forth, and then any member - of the technical board formally nominates a candidate for the role. -- The technical board votes on nominees. +- The steering council considers the suggestions put forth, and then any member + of the steering council formally nominates a candidate for the role. +- The steering council votes on nominees. Releasers may resign their role at any time, but should endeavor to provide some advance notice in order to allow the selection of a replacement. Termination of the contract of a Django Fellow by the Django Software Foundation temporarily suspends that person's Releaser role until such time as -the technical board can vote on their nomination. +the steering council can vote on their nomination. Otherwise, a Releaser may be removed by: - Becoming disqualified due to actions taken by the Code of Conduct committee of the Django Software Foundation. -- A vote of the technical board. +- A vote of the steering council. .. _`Python Package Index`: https://pypi.org/project/Django/ .. _djangoproject.com: https://www.djangoproject.com/download/ -.. _technical-board: +.. _steering-council: -Technical board -=============== +Steering council +================ Role ---- -The technical board is a group of experienced contributors who: +The steering council is a group of experienced contributors who: - provide oversight of Django's development and release process, - assist in setting the direction of feature development and releases, @@ -179,7 +179,7 @@ Framework. Prerogatives ------------ -The technical board holds the following prerogatives: +The steering council holds the following prerogatives: - Making a binding decision regarding any question of a technical change to Django. @@ -189,15 +189,15 @@ The technical board holds the following prerogatives: of Django. - Setting and adjusting the schedule of releases of Django. - Selecting and removing mergers and releasers. -- Participating in the removal of members of the technical board, when deemed +- Participating in the removal of members of the steering council, when deemed appropriate. -- Calling elections of the technical board outside of those which are - automatically triggered, at times when the technical board deems an election +- Calling elections of the steering council outside of those which are + automatically triggered, at times when the steering council deems an election is appropriate. - Participating in modifying Django's governance (see :ref:`organization-change`). -- Declining to vote on a matter the technical board feels is unripe for a - binding decision, or which the technical board feels is outside the scope of +- Declining to vote on a matter the steering council feels is unripe for a + binding decision, or which the steering council feels is outside the scope of its powers. - Taking charge of the governance of other technical teams within the Django open-source project, and governing those teams accordingly. @@ -205,30 +205,28 @@ The technical board holds the following prerogatives: Membership ---------- -`The technical board`_ is an elected group of five experienced contributors +`The steering council`_ is an elected group of five experienced contributors who demonstrate: -- A history of technical contributions to Django or the Django ecosystem. This - history must begin at least 18 months prior to the individual's candidacy for - the technical board. -- A history of participation in Django's development outside of contributions - merged to the `Django Git repository`_. This may include, but is not - restricted to: +- A history of substantive contributions to Django or the Django ecosystem. + This history must begin at least 18 months prior to the individual's + candidacy for the Steering Council, and include substantive contributions in + at least two of these bullet points: + - Code contributions on Django projects or major third-party packages in the Django ecosystem + - Reviewing pull requests and/or triaging Django project tickets + - Documentation, tutorials or blog posts + - Discussions about Django on the django-developers mailing list or the Django Forum + - Running Django-related events or user groups - - Participation in discussions on the |django-developers| mailing list or - the `Django forum`_. - - Reviewing and offering feedback on pull requests in the Django source-code - repository. - - Assisting in triage and management of the Django bug tracker. +- A history of engagement with the direction and future of Django. This does + not need to be recent, but candidates who have not engaged in the past three + years must still demonstrate an understanding of Django's changes and + direction within those three years. -- A history of recent engagement with the direction and development of Django. - Such engagement must have occurred within a period of no more than two years - prior to the individual's candidacy for the technical board. - -A new board is elected after each release cycle of Django. The election process +A new council is elected after each release cycle of Django. The election process works as follows: -#. The technical board direct one of its members to notify the Secretary of the +#. The steering council directs one of its members to notify the Secretary of the Django Software Foundation, in writing, of the triggering of the election, and the condition which triggered it. The Secretary post to the appropriate venue -- the |django-developers| mailing list and the `Django forum`_ to @@ -248,7 +246,7 @@ works as follows: roster of candidates are maintained by the DSF Board, and candidates must provide evidence of their qualifications as part of registration. The DSF Board may challenge and reject the registration of candidates it believes do - not meet the qualifications of members of the Technical Board, or who it + not meet the qualifications of members of the Steering Council, or who it believes are registering in bad faith. #. Registration of candidates close one week after it has opened. One week after registration of candidates closes, the Secretary of the DSF publish @@ -264,16 +262,16 @@ works as follows: majority vote of the DSF Board, then posted by the Secretary of the DSF to the |django-developers| mailing list and the Django Forum. The five candidates with the highest vote totals are immediately become the new - technical board. + steering council. -A member of the technical board may be removed by: +A member of the steering council may be removed by: - Becoming disqualified due to actions taken by the Code of Conduct committee of the Django Software Foundation. - Determining that they did not possess the qualifications of a member of the - technical board. This determination must be made jointly by the other members - of the technical board, and the `DSF Board`_. A valid determination of - ineligibility requires that all other members of the technical board and all + steering council. This determination must be made jointly by the other members + of the steering council, and the `DSF Board`_. A valid determination of + ineligibility requires that all other members of the steering council and all members of the DSF Board vote who can vote on the issue (the affected person, if a DSF Board member, must not vote) vote "yes" on a motion that the person in question is ineligible. @@ -285,7 +283,7 @@ A member of the technical board may be removed by: .. _mergers: https://www.djangoproject.com/foundation/teams/#mergers-team .. _releasers: https://www.djangoproject.com/foundation/teams/#releasers-team .. _`security team`: https://www.djangoproject.com/foundation/teams/#security-team -.. _`the technical board`: https://www.djangoproject.com/foundation/teams/#technical-board-team +.. _`the steering council`: https://www.djangoproject.com/foundation/teams/#steering-council-team .. _`triage & review team`: https://www.djangoproject.com/foundation/teams/#triage-review-team .. _organization-change: diff --git a/docs/ref/checks.txt b/docs/ref/checks.txt index f85c90059d..fa7c633487 100644 --- a/docs/ref/checks.txt +++ b/docs/ref/checks.txt @@ -196,6 +196,8 @@ Model fields * **fields.W161**: Fixed default value provided. * **fields.W162**: ```` does not support a database index on ```` columns. +* **fields.W163**: ```` does not support comments on columns + (``db_comment``). * **fields.E170**: ``BinaryField``’s ``default`` cannot be a string. Use bytes content instead. * **fields.E180**: ```` does not support ``JSONField``\s. @@ -315,6 +317,7 @@ Related fields the table name of ````/``.``. * **fields.W345**: ``related_name`` has no effect on ``ManyToManyField`` with a symmetrical relationship, e.g. to "self". +* **fields.W346**: ``db_comment`` has no effect on ``ManyToManyField``. Models ------ @@ -400,6 +403,8 @@ Models expressions. * **models.W045**: Check constraint ```` contains ``RawSQL()`` expression and won't be validated during the model ``full_clean()``. +* **models.W046**: ```` does not support comments on tables + (``db_table_comment``). Security -------- diff --git a/docs/ref/contrib/admin/index.txt b/docs/ref/contrib/admin/index.txt index 4da7a9691a..9303f17626 100644 --- a/docs/ref/contrib/admin/index.txt +++ b/docs/ref/contrib/admin/index.txt @@ -558,7 +558,7 @@ subclass:: @admin.display(description='Name') def upper_case_name(obj): - return ("%s %s" % (obj.first_name, obj.last_name)).upper() + return f"{obj.first_name} {obj.last_name}".upper() class PersonAdmin(admin.ModelAdmin): list_display = [upper_case_name] @@ -571,7 +571,7 @@ subclass:: @admin.display(description='Name') def upper_case_name(self, obj): - return ("%s %s" % (obj.first_name, obj.last_name)).upper() + return f"{obj.first_name} {obj.last_name}".upper() * A string representing a model attribute or method (without any required arguments). For example:: @@ -585,7 +585,8 @@ subclass:: @admin.display(description='Birth decade') def decade_born_in(self): - return '%d’s' % (self.birthday.year // 10 * 10) + decade = self.birthday.year // 10 * 10 + return f'{decade}’s' class PersonAdmin(admin.ModelAdmin): list_display = ['name', 'decade_born_in'] @@ -1564,7 +1565,8 @@ templates used by the :class:`ModelAdmin` views: The ``get_urls`` method on a ``ModelAdmin`` returns the URLs to be used for that ModelAdmin in the same way as a URLconf. Therefore you can extend - them as documented in :doc:`/topics/http/urls`:: + them as documented in :doc:`/topics/http/urls`, using the + ``AdminSite.admin_view()`` wrapper on your views:: from django.contrib import admin from django.template.response import TemplateResponse @@ -1574,7 +1576,7 @@ templates used by the :class:`ModelAdmin` views: def get_urls(self): urls = super().get_urls() my_urls = [ - path('my_view/', self.my_view), + path('my_view/', self.admin_site.admin_view(self.my_view)) ] return my_urls + urls @@ -1597,6 +1599,18 @@ templates used by the :class:`ModelAdmin` views: ... {% endblock %} + .. note:: + + Notice how the ``self.my_view`` function is wrapped in + ``self.admin_site.admin_view``. This is important, since it ensures two + things: + + #. Permission checks are run, ensuring only active staff users can + access the view. + #. The :func:`django.views.decorators.cache.never_cache` decorator is + applied to prevent caching, ensuring the returned information is + up-to-date. + .. note:: Notice that the custom patterns are included *before* the regular admin @@ -1608,36 +1622,6 @@ templates used by the :class:`ModelAdmin` views: ``/admin/myapp/mymodel/my_view/`` (assuming the admin URLs are included at ``/admin/``.) - However, the ``self.my_view`` function registered above suffers from two - problems: - - * It will *not* perform any permission checks, so it will be accessible - to the general public. - * It will *not* provide any header details to prevent caching. This means - if the page retrieves data from the database, and caching middleware is - active, the page could show outdated information. - - Since this is usually not what you want, Django provides a convenience - wrapper to check permissions and mark the view as non-cacheable. This - wrapper is ``AdminSite.admin_view()`` (i.e. ``self.admin_site.admin_view`` - inside a ``ModelAdmin`` instance); use it like so:: - - class MyModelAdmin(admin.ModelAdmin): - def get_urls(self): - urls = super().get_urls() - my_urls = [ - path('my_view/', self.admin_site.admin_view(self.my_view)) - ] - return my_urls + urls - - Notice the wrapped view in the fifth line above:: - - path('my_view/', self.admin_site.admin_view(self.my_view)) - - This wrapping will protect ``self.my_view`` from unauthorized access and - will apply the :func:`django.views.decorators.cache.never_cache` decorator to - make sure it is not cached if the cache middleware is active. - If the page is cacheable, but you still want the permission check to be performed, you can pass a ``cacheable=True`` argument to ``AdminSite.admin_view()``:: diff --git a/docs/ref/contrib/gis/db-api.txt b/docs/ref/contrib/gis/db-api.txt index bc216c7caf..97f1f6ca6b 100644 --- a/docs/ref/contrib/gis/db-api.txt +++ b/docs/ref/contrib/gis/db-api.txt @@ -316,6 +316,7 @@ Lookup Type PostGIS Oracle MariaDB MySQL [#]_ Sp :lookup:`equals` X X X X X C :lookup:`exact ` X X X X X B :lookup:`intersects` X X X X X B +:lookup:`isempty` X :lookup:`isvalid` X X X X :lookup:`overlaps` X X X X X B :lookup:`relate` X X X X C @@ -361,6 +362,7 @@ Function PostGIS Oracle MariaDB MySQL :class:`ForcePolygonCW` X X :class:`GeoHash` X X X (LWGEOM/RTTOPO) :class:`Intersection` X X X X X +:class:`IsEmpty` X :class:`IsValid` X X X X :class:`Length` X X X X X :class:`LineLocatePoint` X X diff --git a/docs/ref/contrib/gis/forms-api.txt b/docs/ref/contrib/gis/forms-api.txt index 0d7651f20e..9ac224a76e 100644 --- a/docs/ref/contrib/gis/forms-api.txt +++ b/docs/ref/contrib/gis/forms-api.txt @@ -10,7 +10,7 @@ display and edit geolocalized data on a map. By default, they use `OpenLayers`_-powered maps, with a base WMS layer provided by `NASA`_. .. _OpenLayers: https://openlayers.org/ -.. _NASA: https://earthdata.nasa.gov/ +.. _NASA: https://www.earthdata.nasa.gov/ Field arguments =============== diff --git a/docs/ref/contrib/gis/functions.txt b/docs/ref/contrib/gis/functions.txt index 00c48e665e..1c062515c6 100644 --- a/docs/ref/contrib/gis/functions.txt +++ b/docs/ref/contrib/gis/functions.txt @@ -23,11 +23,11 @@ Function's summary: ========================= ======================== ====================== ======================= ================== ===================== Measurement Relationships Operations Editors Output format Miscellaneous ========================= ======================== ====================== ======================= ================== ===================== -:class:`Area` :class:`Azimuth` :class:`Difference` :class:`ForcePolygonCW` :class:`AsGeoJSON` :class:`IsValid` -:class:`Distance` :class:`BoundingCircle` :class:`Intersection` :class:`MakeValid` :class:`AsGML` :class:`MemSize` -:class:`GeometryDistance` :class:`Centroid` :class:`SymDifference` :class:`Reverse` :class:`AsKML` :class:`NumGeometries` -:class:`Length` :class:`Envelope` :class:`Union` :class:`Scale` :class:`AsSVG` :class:`NumPoints` -:class:`Perimeter` :class:`LineLocatePoint` :class:`SnapToGrid` :class:`AsWKB` +:class:`Area` :class:`Azimuth` :class:`Difference` :class:`ForcePolygonCW` :class:`AsGeoJSON` :class:`IsEmpty` +:class:`Distance` :class:`BoundingCircle` :class:`Intersection` :class:`MakeValid` :class:`AsGML` :class:`IsValid` +:class:`GeometryDistance` :class:`Centroid` :class:`SymDifference` :class:`Reverse` :class:`AsKML` :class:`MemSize` +:class:`Length` :class:`Envelope` :class:`Union` :class:`Scale` :class:`AsSVG` :class:`NumGeometries` +:class:`Perimeter` :class:`LineLocatePoint` :class:`SnapToGrid` :class:`AsWKB` :class:`NumPoints` .. :class:`PointOnSurface` :class:`Transform` :class:`AsWKT` .. :class:`Translate` :class:`GeoHash` ========================= ======================== ====================== ======================= ================== ===================== @@ -368,6 +368,18 @@ it provides index-assisted nearest-neighbor result sets. Accepts two geographic fields or expressions and returns the geometric intersection between them. +``IsEmpty`` +=========== + +.. versionadded:: 4.2 + +.. class:: IsEmpty(expr) + +*Availability*: `PostGIS `__ + +Accepts a geographic field or expression and tests if the value is an empty +geometry. Returns ``True`` if its value is empty and ``False`` otherwise. + ``IsValid`` =========== diff --git a/docs/ref/contrib/gis/geoquerysets.txt b/docs/ref/contrib/gis/geoquerysets.txt index 760d508b17..53a8b6c3b9 100644 --- a/docs/ref/contrib/gis/geoquerysets.txt +++ b/docs/ref/contrib/gis/geoquerysets.txt @@ -346,6 +346,21 @@ MySQL ``ST_Intersects(poly, geom)`` SpatiaLite ``Intersects(poly, geom)`` ========== ================================================= +.. fieldlookup:: isempty + +``isempty`` +----------- + +.. versionadded:: 4.2 + +*Availability*: `PostGIS `__ + +Tests if the geometry is empty. + +Example:: + + Zipcode.objects.filter(poly__isempty=True) + .. fieldlookup:: isvalid ``isvalid`` diff --git a/docs/ref/contrib/gis/install/geolibs.txt b/docs/ref/contrib/gis/install/geolibs.txt index c7a8922b49..2e48bcf74f 100644 --- a/docs/ref/contrib/gis/install/geolibs.txt +++ b/docs/ref/contrib/gis/install/geolibs.txt @@ -5,16 +5,16 @@ Installing Geospatial libraries GeoDjango uses and/or provides interfaces for the following open source geospatial libraries: -======================== ==================================== ================================ =========================================== +======================== ==================================== ================================ ================================================ Program Description Required Supported Versions -======================== ==================================== ================================ =========================================== +======================== ==================================== ================================ ================================================ :doc:`GEOS <../geos>` Geometry Engine Open Source Yes 3.11, 3.10, 3.9, 3.8, 3.7, 3.6 `PROJ`_ Cartographic Projections library Yes (PostgreSQL and SQLite only) 9.x, 8.x, 7.x, 6.x, 5.x, 4.x -:doc:`GDAL <../gdal>` Geospatial Data Abstraction Library Yes 3.5, 3.4, 3.3, 3.2, 3.1, 3.0, 2.4, 2.3, 2.2 +:doc:`GDAL <../gdal>` Geospatial Data Abstraction Library Yes 3.6, 3.5, 3.4, 3.3, 3.2, 3.1, 3.0, 2.4, 2.3, 2.2 :doc:`GeoIP <../geoip2>` IP-based geolocation library No 2 `PostGIS`__ Spatial extensions for PostgreSQL Yes (PostgreSQL only) 3.3, 3.2, 3.1, 3.0, 2.5 `SpatiaLite`__ Spatial extensions for SQLite Yes (SQLite only) 5.0, 4.3 -======================== ==================================== ================================ =========================================== +======================== ==================================== ================================ ================================================ Note that older or more recent versions of these libraries *may* also work totally fine with GeoDjango. Your mileage may vary. @@ -36,6 +36,7 @@ totally fine with GeoDjango. Your mileage may vary. GDAL 3.3.0 2021-05-03 GDAL 3.4.0 2021-11-04 GDAL 3.5.0 2022-05-13 + GDAL 3.6.0 2022-11-03 PostGIS 2.5.0 2018-09-23 PostGIS 3.0.0 2019-10-20 PostGIS 3.1.0 2020-12-18 @@ -219,27 +220,25 @@ First download the latest GDAL release version and untar the archive:: $ wget https://download.osgeo.org/gdal/X.Y.Z/gdal-X.Y.Z.tar.gz $ tar xzf gdal-X.Y.Z.tar.gz + +For GDAL 3.6.x and greater, releases only support builds using ``CMake``. To +build with ``CMake`` create a ``build`` folder in the GDAL directory, and step +into it:: + $ cd gdal-X.Y.Z + $ mkdir build + $ cd build -Configure, make and install:: +Finally, configure, make and install GDAL:: - $ ./configure - $ make # Go get some coffee, this takes a while. - $ sudo make install - $ cd .. - -.. note:: - - Because GeoDjango has its own Python interface, the preceding instructions - do not build GDAL's own Python bindings. The bindings may be built by - adding the ``--with-python`` flag when running ``configure``. See - `GDAL/OGR In Python`__ for more information on GDAL's bindings. + $ cmake .. + $ cmake --build . + $ sudo cmake --build . --target install If you have any problems, please see the troubleshooting section below for suggestions and solutions. __ https://gdal.org/ -__ https://gdal.org/api/python.html .. _gdaltrouble: diff --git a/docs/ref/contrib/gis/install/index.txt b/docs/ref/contrib/gis/install/index.txt index 629a070bb8..026c64ccd1 100644 --- a/docs/ref/contrib/gis/install/index.txt +++ b/docs/ref/contrib/gis/install/index.txt @@ -61,7 +61,7 @@ Database Library Requirements Supported Versions Notes PostgreSQL GEOS, GDAL, PROJ, PostGIS 12+ Requires PostGIS. MySQL GEOS, GDAL 8+ :ref:`Limited functionality `. Oracle GEOS, GDAL 19+ XE not supported. -SQLite GEOS, GDAL, PROJ, SpatiaLite 3.9.0+ Requires SpatiaLite 4.3+ +SQLite GEOS, GDAL, PROJ, SpatiaLite 3.21.0+ Requires SpatiaLite 4.3+ ================== ============================== ================== ========================================= See also `this comparison matrix`__ on the OSGeo Wiki for @@ -197,7 +197,6 @@ several different options for installing GeoDjango. These options are: * :ref:`postgresapp` (easiest and recommended) * :ref:`homebrew` -* :ref:`kyngchaos` * :ref:`fink` * :ref:`macports` * :ref:`build_from_source` @@ -266,63 +265,6 @@ Summary:: __ https://brew.sh/ .. _Xcode: https://developer.apple.com/xcode/ -.. _kyngchaos: - -KyngChaos packages -~~~~~~~~~~~~~~~~~~ - -William Kyngesburye provides a number of `geospatial library binary packages`__ -that help to get GeoDjango installed on macOS without compiling them from -source. However, `Xcode`_ is still necessary for compiling the Python database -adapters :ref:`psycopg2_kyngchaos` (for PostGIS). - -.. note:: - - SpatiaLite users should consult the :ref:`spatialite_macos` section - after installing the packages for additional instructions. - -Download the framework packages for: - -* UnixImageIO -* PROJ -* GEOS -* SQLite3 (includes the SpatiaLite library) -* GDAL - -Install the packages in the order they are listed above, as the GDAL and SQLite -packages require the packages listed before them. - -Afterward, you can also install the KyngChaos binary packages for `PostgreSQL -and PostGIS`__. - -After installing the binary packages, you'll want to add the following to -your ``.profile`` to be able to run the package programs from the command-line:: - - export PATH=/Library/Frameworks/UnixImageIO.framework/Programs:$PATH - export PATH=/Library/Frameworks/PROJ.framework/Programs:$PATH - export PATH=/Library/Frameworks/GEOS.framework/Programs:$PATH - export PATH=/Library/Frameworks/SQLite3.framework/Programs:$PATH - export PATH=/Library/Frameworks/GDAL.framework/Programs:$PATH - export PATH=/usr/local/pgsql/bin:$PATH - -__ https://www.kyngchaos.com/software/frameworks -__ https://www.kyngchaos.com/software/postgres - -.. _psycopg2_kyngchaos: - -psycopg2 -^^^^^^^^ - -After you've installed the KyngChaos binaries and modified your ``PATH``, as -described above, ``psycopg2`` may be installed using the following command:: - - $ python -m pip install psycopg2 - -.. note:: - - If you don't have ``pip``, follow the :ref:`installation instructions - ` to install it. - .. _fink: Fink @@ -487,14 +429,14 @@ Install Django and set up database recommended that you create a :doc:`virtual environment ` for each project you create. -psycopg2 -~~~~~~~~ +psycopg +~~~~~~~ -The ``psycopg2`` Python module provides the interface between Python and the -PostgreSQL database. ``psycopg2`` can be installed via pip within your Python +The ``psycopg`` Python module provides the interface between Python and the +PostgreSQL database. ``psycopg`` can be installed via pip within your Python virtual environment:: - ...\> py -m pip install psycopg2 + ...\> py -m pip install psycopg .. rubric:: Footnotes .. [#] GeoDjango uses the :func:`~ctypes.util.find_library` routine from diff --git a/docs/ref/contrib/gis/install/postgis.txt b/docs/ref/contrib/gis/install/postgis.txt index 436e0979d2..e469ede4d0 100644 --- a/docs/ref/contrib/gis/install/postgis.txt +++ b/docs/ref/contrib/gis/install/postgis.txt @@ -7,20 +7,25 @@ into a spatial database. :ref:`geosbuild`, :ref:`proj4` and :ref:`gdalbuild` should be installed prior to building PostGIS. You might also need additional libraries, see `PostGIS requirements`_. -The `psycopg2`_ module is required for use as the database adapter when using -GeoDjango with PostGIS. +The `psycopg`_ or `psycopg2`_ module is required for use as the database +adapter when using GeoDjango with PostGIS. On Debian/Ubuntu, you are advised to install the following packages: -``postgresql-x.x``, ``postgresql-x.x-postgis``, ``postgresql-server-dev-x.x``, -and ``python-psycopg2`` (x.x matching the PostgreSQL version you want to +``postgresql-x``, ``postgresql-x-postgis-3``, ``postgresql-server-dev-x``, +and ``python3-psycopg3`` (x matching the PostgreSQL version you want to install). Alternately, you can `build from source`_. Consult the platform-specific instructions if you are on :ref:`macos` or :ref:`windows`. .. _PostGIS: https://postgis.net/ +.. _psycopg: https://www.psycopg.org/psycopg3/ .. _psycopg2: https://www.psycopg.org/ .. _PostGIS requirements: https://postgis.net/docs/postgis_installation.html#install_requirements .. _build from source: https://postgis.net/docs/postgis_installation.html#install_short_version +.. versionchanged:: 4.2 + + Support for ``psycopg`` 3.1+ was added. + Post-installation ================= diff --git a/docs/ref/contrib/gis/install/spatialite.txt b/docs/ref/contrib/gis/install/spatialite.txt index 3d9493352c..c6326d1e0d 100644 --- a/docs/ref/contrib/gis/install/spatialite.txt +++ b/docs/ref/contrib/gis/install/spatialite.txt @@ -87,30 +87,7 @@ __ https://www.gaia-gis.it/gaia-sins/libspatialite-sources/ macOS-specific instructions ============================== -To install the SpatiaLite library and tools, macOS users can choose between -:ref:`kyngchaos` and `Homebrew`_. - -KyngChaos ---------- - -First, follow the instructions in the :ref:`kyngchaos` section. - -When creating a SpatiaLite database, the ``spatialite`` program is required. -However, instead of attempting to compile the SpatiaLite tools from source, -download the `SpatiaLite Tools`__ package for macOS, and install ``spatialite`` -in a location available in your ``PATH``. For example:: - - $ curl -O https://www.kyngchaos.com/files/software/frameworks/Spatialite_Tools-4.3.zip - $ unzip Spatialite_Tools-4.3.zip - $ cd Spatialite\ Tools/tools - $ sudo cp spatialite /Library/Frameworks/SQLite3.framework/Programs - -Finally, for GeoDjango to be able to find the KyngChaos SpatiaLite library, -add the following to your ``settings.py``:: - - SPATIALITE_LIBRARY_PATH='/Library/Frameworks/SQLite3.framework/SQLite3' - -__ https://www.kyngchaos.com/software/frameworks/ +To install the SpatiaLite library and tools, macOS users can use `Homebrew`_. Homebrew -------- diff --git a/docs/ref/contrib/postgres/aggregates.txt b/docs/ref/contrib/postgres/aggregates.txt index 285e86f5d5..55ec952fa2 100644 --- a/docs/ref/contrib/postgres/aggregates.txt +++ b/docs/ref/contrib/postgres/aggregates.txt @@ -157,7 +157,7 @@ General-purpose aggregation functions class Room(models.Model): number = models.IntegerField(unique=True) - class HotelReservation(model.Model): + class HotelReservation(models.Model): room = models.ForeignKey('Room', on_delete=models.CASCADE) start = models.DateTimeField() end = models.DateTimeField() @@ -208,6 +208,32 @@ General-purpose aggregation functions Examples are the same as for :attr:`ArrayAgg.ordering`. + Usage example:: + + class Publication(models.Model): + title = models.CharField(max_length=30) + + class Article(models.Model): + headline = models.CharField(max_length=100) + publications = models.ManyToManyField(Publication) + + >>> article = Article.objects.create(headline="NASA uses Python") + >>> article.publications.create(title="The Python Journal") + + >>> article.publications.create(title="Science News") + + >>> from django.contrib.postgres.aggregates import StringAgg + >>> Article.objects.annotate( + ... publication_names=StringAgg( + ... "publications__title", + ... delimiter=", ", + ... ordering="publications__title", + ... ) + ... ).values("headline", "publication_names") + + .. deprecated:: 4.0 If there are no rows and ``default`` is not provided, ``StringAgg`` diff --git a/docs/ref/contrib/postgres/fields.txt b/docs/ref/contrib/postgres/fields.txt index 29dbc0db85..34ad06a09a 100644 --- a/docs/ref/contrib/postgres/fields.txt +++ b/docs/ref/contrib/postgres/fields.txt @@ -170,7 +170,7 @@ Returns objects where the data shares any results with the values passed. Uses the SQL operator ``&&``. For example:: >>> Post.objects.create(name='First post', tags=['thoughts', 'django']) - >>> Post.objects.create(name='Second post', tags=['thoughts']) + >>> Post.objects.create(name='Second post', tags=['thoughts', 'tutorial']) >>> Post.objects.create(name='Third post', tags=['tutorial', 'django']) >>> Post.objects.filter(tags__overlap=['thoughts']) @@ -179,6 +179,14 @@ the SQL operator ``&&``. For example:: >>> Post.objects.filter(tags__overlap=['thoughts', 'tutorial']) , , ]> + >>> Post.objects.filter(tags__overlap=Post.objects.values_list('tags')) + , , ]> + +.. versionchanged:: 4.2 + + Support for ``QuerySet.values()`` and ``values_list()`` as a right-hand + side was added. + .. fieldlookup:: arrayfield.len ``len`` @@ -307,7 +315,7 @@ transform do not change. For example:: similar to PostgreSQL's ``text`` type. .. _citext: https://www.postgresql.org/docs/current/citext.html - .. _the performance considerations: https://www.postgresql.org/docs/current/citext.html#id-1.11.7.17.7 + .. _the performance considerations: https://www.postgresql.org/docs/current/citext.html#id-1.11.7.19.9 .. admonition:: Case-insensitive collations @@ -530,8 +538,8 @@ PostgreSQL. These fields are used to store a range of values; for example the start and end timestamps of an event, or the range of ages an activity is suitable for. -All of the range fields translate to :ref:`psycopg2 Range objects -` in Python, but also accept tuples as input if no bounds +All of the range fields translate to :ref:`psycopg Range objects +` in Python, but also accept tuples as input if no bounds information is necessary. The default is lower bound included, upper bound excluded, that is ``[)`` (see the PostgreSQL documentation for details about `different bounds`_). The default bounds can be changed for non-discrete range @@ -545,8 +553,8 @@ the ``default_bounds`` argument. Stores a range of integers. Based on an :class:`~django.db.models.IntegerField`. Represented by an ``int4range`` in - the database and a :class:`~psycopg2:psycopg2.extras.NumericRange` in - Python. + the database and a + ``django.db.backends.postgresql.psycopg_any.NumericRange`` in Python. Regardless of the bounds specified when saving the data, PostgreSQL always returns a range in a canonical form that includes the lower bound and @@ -559,8 +567,8 @@ the ``default_bounds`` argument. Stores a range of large integers. Based on a :class:`~django.db.models.BigIntegerField`. Represented by an ``int8range`` - in the database and a :class:`~psycopg2:psycopg2.extras.NumericRange` in - Python. + in the database and a + ``django.db.backends.postgresql.psycopg_any.NumericRange`` in Python. Regardless of the bounds specified when saving the data, PostgreSQL always returns a range in a canonical form that includes the lower bound and @@ -573,8 +581,8 @@ the ``default_bounds`` argument. Stores a range of floating point values. Based on a :class:`~django.db.models.DecimalField`. Represented by a ``numrange`` in - the database and a :class:`~psycopg2:psycopg2.extras.NumericRange` in - Python. + the database and a + ``django.db.backends.postgresql.psycopg_any.NumericRange`` in Python. .. attribute:: DecimalRangeField.default_bounds @@ -584,7 +592,7 @@ the ``default_bounds`` argument. default is lower bound included, upper bound excluded, that is ``[)`` (see the PostgreSQL documentation for details about `different bounds`_). ``default_bounds`` is not used for - :class:`~psycopg2:psycopg2.extras.NumericRange` inputs. + ``django.db.backends.postgresql.psycopg_any.NumericRange`` inputs. ``DateTimeRangeField`` ---------------------- @@ -593,8 +601,8 @@ the ``default_bounds`` argument. Stores a range of timestamps. Based on a :class:`~django.db.models.DateTimeField`. Represented by a ``tstzrange`` in - the database and a :class:`~psycopg2:psycopg2.extras.DateTimeTZRange` in - Python. + the database and a + ``django.db.backends.postgresql.psycopg_any.DateTimeTZRange`` in Python. .. attribute:: DateTimeRangeField.default_bounds @@ -604,7 +612,7 @@ the ``default_bounds`` argument. default is lower bound included, upper bound excluded, that is ``[)`` (see the PostgreSQL documentation for details about `different bounds`_). ``default_bounds`` is not used for - :class:`~psycopg2:psycopg2.extras.DateTimeTZRange` inputs. + ``django.db.backends.postgresql.psycopg_any.DateTimeTZRange`` inputs. ``DateRangeField`` ------------------ @@ -613,7 +621,8 @@ the ``default_bounds`` argument. Stores a range of dates. Based on a :class:`~django.db.models.DateField`. Represented by a ``daterange`` in the - database and a :class:`~psycopg2:psycopg2.extras.DateRange` in Python. + database and a ``django.db.backends.postgresql.psycopg_any.DateRange`` in + Python. Regardless of the bounds specified when saving the data, PostgreSQL always returns a range in a canonical form that includes the lower bound and @@ -647,7 +656,7 @@ We will also use the following example objects:: and ``NumericRange``: - >>> from psycopg2.extras import NumericRange + >>> from django.db.backends.postgresql.psycopg_any import NumericRange Containment functions ~~~~~~~~~~~~~~~~~~~~~ @@ -682,7 +691,7 @@ The ``contained_by`` lookup is also available on the non-range field types: :class:`~django.db.models.DateField`, and :class:`~django.db.models.DateTimeField`. For example:: - >>> from psycopg2.extras import DateTimeTZRange + >>> from django.db.backends.postgresql.psycopg_any import DateTimeTZRange >>> Event.objects.filter( ... start__contained_by=DateTimeTZRange( ... timezone.now() - datetime.timedelta(hours=1), @@ -856,9 +865,9 @@ Defining your own range types ----------------------------- PostgreSQL allows the definition of custom range types. Django's model and form -field implementations use base classes below, and psycopg2 provides a -:func:`~psycopg2:psycopg2.extras.register_range` to allow use of custom range -types. +field implementations use base classes below, and ``psycopg`` provides a +:func:`~psycopg:psycopg.types.range.register_range` to allow use of custom +range types. .. class:: RangeField(**options) @@ -870,7 +879,7 @@ types. .. attribute:: range_type - The psycopg2 range type to use. + The range type to use. .. attribute:: form_field @@ -887,7 +896,7 @@ types. .. attribute:: range_type - The psycopg2 range type to use. + The range type to use. Range operators --------------- diff --git a/docs/ref/contrib/postgres/forms.txt b/docs/ref/contrib/postgres/forms.txt index e5d597655f..8f9dd449d1 100644 --- a/docs/ref/contrib/postgres/forms.txt +++ b/docs/ref/contrib/postgres/forms.txt @@ -173,7 +173,7 @@ not greater than the upper bound. All of these fields use .. class:: IntegerRangeField Based on :class:`~django.forms.IntegerField` and translates its input into - :class:`~psycopg2:psycopg2.extras.NumericRange`. Default for + ``django.db.backends.postgresql.psycopg_any.NumericRange``. Default for :class:`~django.contrib.postgres.fields.IntegerRangeField` and :class:`~django.contrib.postgres.fields.BigIntegerRangeField`. @@ -183,7 +183,7 @@ not greater than the upper bound. All of these fields use .. class:: DecimalRangeField Based on :class:`~django.forms.DecimalField` and translates its input into - :class:`~psycopg2:psycopg2.extras.NumericRange`. Default for + ``django.db.backends.postgresql.psycopg_any.NumericRange``. Default for :class:`~django.contrib.postgres.fields.DecimalRangeField`. ``DateTimeRangeField`` @@ -192,7 +192,7 @@ not greater than the upper bound. All of these fields use .. class:: DateTimeRangeField Based on :class:`~django.forms.DateTimeField` and translates its input into - :class:`~psycopg2:psycopg2.extras.DateTimeTZRange`. Default for + ``django.db.backends.postgresql.psycopg_any.DateTimeTZRange``. Default for :class:`~django.contrib.postgres.fields.DateTimeRangeField`. ``DateRangeField`` @@ -201,7 +201,7 @@ not greater than the upper bound. All of these fields use .. class:: DateRangeField Based on :class:`~django.forms.DateField` and translates its input into - :class:`~psycopg2:psycopg2.extras.DateRange`. Default for + ``django.db.backends.postgresql.psycopg_any.DateRange``. Default for :class:`~django.contrib.postgres.fields.DateRangeField`. Widgets diff --git a/docs/ref/contrib/sitemaps.txt b/docs/ref/contrib/sitemaps.txt index d3225405a3..7dc3dced51 100644 --- a/docs/ref/contrib/sitemaps.txt +++ b/docs/ref/contrib/sitemaps.txt @@ -311,6 +311,15 @@ Note: The latest ``lastmod`` returned by calling the method with all items returned by :meth:`Sitemap.items`. + .. method:: Sitemap.get_languages_for_item(item, lang_code) + + .. versionadded:: 4.2 + + **Optional.** A method that returns the sequence of language codes for + which the item is displayed. By default + :meth:`~Sitemap.get_languages_for_item` returns + :attr:`~Sitemap.languages`. + Shortcuts ========= diff --git a/docs/ref/contrib/staticfiles.txt b/docs/ref/contrib/staticfiles.txt index 152838736c..7ca3584c33 100644 --- a/docs/ref/contrib/staticfiles.txt +++ b/docs/ref/contrib/staticfiles.txt @@ -295,6 +295,8 @@ method). The regular expressions used to find those paths * The `@import`_ rule and `url()`_ statement of `Cascading Style Sheets`_. * `Source map`_ comments in CSS and JavaScript files. +* The `modules import`_ in JavaScript. +* The `modules aggregation`_ in JavaScript. For example, the ``'css/styles.css'`` file with this content: @@ -329,6 +331,19 @@ argument. For example:: Support for finding paths in CSS source map comments was added. +.. versionchanged:: 4.2 + + Support for finding paths to JavaScript modules in ``import`` and + ``export`` statements was added. + +.. attribute:: storage.ManifestStaticFilesStorage.manifest_hash + +.. versionadded:: 4.2 + +This attribute provides a single hash that changes whenever a file in the +manifest changes. This can be useful to communicate to SPAs that the assets on +the server have changed (due to a new deployment). + .. attribute:: storage.ManifestStaticFilesStorage.max_post_process_passes Since static files might reference other static files that need to have their @@ -382,6 +397,8 @@ hashing algorithm. .. _`url()`: https://www.w3.org/TR/CSS2/syndata.html#uri .. _`Cascading Style Sheets`: https://www.w3.org/Style/CSS/ .. _`source map`: https://developer.mozilla.org/en-US/docs/Tools/Debugger/How_to/Use_a_source_map +.. _`modules import`: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules#importing_features_into_your_script +.. _`modules aggregation`: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules#aggregating_modules ``ManifestFilesMixin`` ---------------------- diff --git a/docs/ref/csrf.txt b/docs/ref/csrf.txt index a4712c29f0..60522dd2a7 100644 --- a/docs/ref/csrf.txt +++ b/docs/ref/csrf.txt @@ -14,7 +14,7 @@ who visits the malicious site in their browser. A related type of attack, a site with someone else's credentials, is also covered. The first defense against CSRF attacks is to ensure that GET requests (and other -'safe' methods, as defined by :rfc:`7231#section-4.2.1`) are side effect free. +'safe' methods, as defined by :rfc:`9110#section-9.2.1`) are side effect free. Requests via 'unsafe' methods, such as POST, PUT, and DELETE, can then be protected by the steps outlined in :ref:`using-csrf`. @@ -90,9 +90,9 @@ This ensures that only forms that have originated from trusted domains can be used to POST data back. It deliberately ignores GET requests (and other requests that are defined as -'safe' by :rfc:`7231#section-4.2.1`). These requests ought never to have any +'safe' by :rfc:`9110#section-9.2.1`). These requests ought never to have any potentially dangerous side effects, and so a CSRF attack with a GET request -ought to be harmless. :rfc:`7231#section-4.2.1` defines POST, PUT, and DELETE +ought to be harmless. :rfc:`9110#section-9.2.1` defines POST, PUT, and DELETE as 'unsafe', and all other methods are also assumed to be unsafe, for maximum protection. @@ -115,7 +115,7 @@ vulnerability allows and much worse). alternatives like ``"`` for links to third-party sites. -.. _BREACH: http://breachattack.com/ +.. _BREACH: https://www.breachattack.com/ .. _Origin header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Origin .. _disable the referer: https://www.w3.org/TR/referrer-policy/#referrer-policy-delivery diff --git a/docs/ref/databases.txt b/docs/ref/databases.txt index 4b86e24795..d62adbe832 100644 --- a/docs/ref/databases.txt +++ b/docs/ref/databases.txt @@ -114,11 +114,21 @@ below for information on how to set up your database correctly. PostgreSQL notes ================ -Django supports PostgreSQL 12 and higher. `psycopg2`_ 2.8.4 or higher is -required, though the latest release is recommended. +Django supports PostgreSQL 12 and higher. `psycopg`_ 3.1+ or `psycopg2`_ 2.8.4+ +is required, though the latest `psycopg`_ 3.1+ is recommended. +.. _psycopg: https://www.psycopg.org/psycopg3/ .. _psycopg2: https://www.psycopg.org/ +.. note:: + + Support for ``psycopg2`` is likely to be deprecated and removed at some + point in the future. + +.. versionchanged:: 4.2 + + Support for ``psycopg`` 3.1+ was added. + .. _postgresql-connection-settings: PostgreSQL connection settings @@ -199,12 +209,12 @@ level`_. If you need a higher isolation level such as ``REPEATABLE READ`` or ``SERIALIZABLE``, set it in the :setting:`OPTIONS` part of your database configuration in :setting:`DATABASES`:: - import psycopg2.extensions + from django.db.backends.postgresql.psycopg_any import IsolationLevel DATABASES = { # ... 'OPTIONS': { - 'isolation_level': psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE, + 'isolation_level': IsolationLevel.SERIALIZABLE, }, } @@ -216,6 +226,10 @@ configuration in :setting:`DATABASES`:: .. _isolation level: https://www.postgresql.org/docs/current/transaction-iso.html +.. versionchanged:: 4.2 + + ``IsolationLevel`` was added. + Indexes for ``varchar`` and ``text`` columns -------------------------------------------- @@ -244,7 +258,7 @@ Server-side cursors When using :meth:`QuerySet.iterator() `, Django opens a :ref:`server-side -cursor `. By default, PostgreSQL assumes that +cursor `. By default, PostgreSQL assumes that only the first 10% of the results of cursor queries will be fetched. The query planner spends less time planning the query and starts returning results faster, but this could diminish performance if more than 10% of the results are @@ -730,7 +744,7 @@ appropriate typecasting. SQLite notes ============ -Django supports SQLite 3.9.0 and later. +Django supports SQLite 3.21.0 and later. SQLite_ provides an excellent development alternative for applications that are predominantly read-only or require a smaller installation footprint. As @@ -819,14 +833,6 @@ If you're getting this error, you can solve it by: SQLite does not support the ``SELECT ... FOR UPDATE`` syntax. Calling it will have no effect. -"pyformat" parameter style in raw queries not supported -------------------------------------------------------- - -For most backends, raw queries (``Manager.raw()`` or ``cursor.execute()``) -can use the "pyformat" parameter style, where placeholders in the query -are given as ``'%(name)s'`` and the parameters are passed as a dictionary -rather than a list. SQLite does not support this. - .. _sqlite-isolation: Isolation when using ``QuerySet.iterator()`` @@ -853,6 +859,10 @@ raised. To enable the JSON1 extension you can follow the instruction on `the wiki page`_. +.. note:: + + The JSON1 extension is enabled by default on SQLite 3.38+. + .. _JSON1 extension: https://www.sqlite.org/json1.html .. _the wiki page: https://code.djangoproject.com/wiki/JSON1Extension diff --git a/docs/ref/exceptions.txt b/docs/ref/exceptions.txt index 2b567414e6..b588d0ee81 100644 --- a/docs/ref/exceptions.txt +++ b/docs/ref/exceptions.txt @@ -42,6 +42,17 @@ Django core exception classes are defined in ``django.core.exceptions``. return any results. Most Django projects won't encounter this exception, but it might be useful for implementing custom lookups and expressions. +``FullResultSet`` +----------------- + +.. exception:: FullResultSet + +.. versionadded:: 4.2 + + ``FullResultSet`` may be raised during query generation if a query will + match everything. Most Django projects won't encounter this exception, but + it might be useful for implementing custom lookups and expressions. + ``FieldDoesNotExist`` --------------------- diff --git a/docs/ref/forms/api.txt b/docs/ref/forms/api.txt index 0d526d26aa..6d296c537e 100644 --- a/docs/ref/forms/api.txt +++ b/docs/ref/forms/api.txt @@ -342,23 +342,23 @@ attribute:: >>> f.fields['name'] -You can alter the field of :class:`Form` instance to change the way it is -presented in the form:: +You can alter the field and :class:`.BoundField` of :class:`Form` instance to +change the way it is presented in the form:: - >>> f.as_table().split('\n')[0] - '
' - >>> f.fields['name'].label = "Username" - >>> f.as_table().split('\n')[0] - '' + >>> f.as_div().split("")[0] + '
' + >>> f["subject"].label = "Topic" + >>> f.as_div().split("
")[0] + '
' Beware not to alter the ``base_fields`` attribute because this modification will influence all subsequent ``ContactForm`` instances within the same Python process:: - >>> f.base_fields['name'].label = "Username" + >>> f.base_fields["subject"].label_suffix = "?" >>> another_f = CommentForm(auto_id=False) - >>> another_f.as_table().split('\n')[0] - '
' + >>> f.as_div().split("")[0] + '
' Accessing "clean" data ====================== @@ -782,42 +782,22 @@ If ``auto_id`` is ``False``, then the form output will not include ``
- - - - >>> print(f.as_ul()) -
  • Subject:
  • -
  • Message:
  • -
  • Sender:
  • -
  • Cc myself:
  • - >>> print(f.as_p()) -

    Subject:

    -

    Message:

    -

    Sender:

    -

    Cc myself:

    + >>> print(f.as_div()) +
    Subject:
    +
    Message:
    +
    Sender:
    +
    Cc myself:
    If ``auto_id`` is set to ``True``, then the form output *will* include ``
    - - - - >>> print(f.as_ul()) -
  • -
  • -
  • -
  • - >>> print(f.as_p()) -

    -

    -

    -

    + >>> print(f.as_div()) +
    +
    +
    +
    If ``auto_id`` is set to a string containing the format character ``'%s'``, then the form output will include ``
    - - - - >>> print(f.as_ul()) -
  • -
  • -
  • -
  • - >>> print(f.as_p()) -

    -

    -

    -

    + >>> print(f.as_div()) +
    +
    +
    +
    If ``auto_id`` is set to any other true value -- such as a string that doesn't include ``%s`` -- then the library will act as if ``auto_id`` is ``True``. @@ -856,17 +826,17 @@ It's possible to customize that character, or omit it entirely, using the ``label_suffix`` parameter:: >>> f = ContactForm(auto_id='id_for_%s', label_suffix='') - >>> print(f.as_ul()) -
  • -
  • -
  • -
  • + >>> print(f.as_div()) +
    +
    +
    +
    >>> f = ContactForm(auto_id='id_for_%s', label_suffix=' ->') - >>> print(f.as_ul()) -
  • -
  • -
  • -
  • + >>> print(f.as_div()) +
    +
    +
    +
    Note that the label suffix is added only if the last character of the label isn't a punctuation character (in English, those are ``.``, ``!``, ``?`` @@ -953,20 +923,25 @@ method you're using:: ... 'sender': 'invalid email address', ... 'cc_myself': True} >>> f = ContactForm(data, auto_id=False) + >>> print(f.as_div()) +
    Subject:
    • This field is required.
    +
    Message:
    +
    Sender:
    • Enter a valid email address.
    +
    Cc myself:
    >>> print(f.as_table())
    - + >>> print(f.as_ul())
    • This field is required.
    Subject:
  • -
  • Message:
  • +
  • Message:
    • Enter a valid email address.
    Sender:
  • Cc myself:
  • >>> print(f.as_p())

    • This field is required.

    Subject:

    -

    Message:

    +

    Message:

    • Enter a valid email address.

    Sender:

    Cc myself:

    @@ -1466,12 +1441,12 @@ fields are ordered first:: >>> class ContactFormWithPriority(ContactForm): ... priority = forms.CharField() >>> f = ContactFormWithPriority(auto_id=False) - >>> print(f.as_ul()) -
  • Subject:
  • -
  • Message:
  • -
  • Sender:
  • -
  • Cc myself:
  • -
  • Priority:
  • + >>> print(f.as_div()) +
    Subject:
    +
    Message:
    +
    Sender:
    +
    Cc myself:
    +
    Priority:
    It's possible to subclass multiple forms, treating forms as mixins. In this example, ``BeatleForm`` subclasses both ``PersonForm`` and ``InstrumentForm`` @@ -1487,11 +1462,11 @@ classes:: >>> class BeatleForm(InstrumentForm, PersonForm): ... haircut_type = forms.CharField() >>> b = BeatleForm(auto_id=False) - >>> print(b.as_ul()) -
  • First name:
  • -
  • Last name:
  • -
  • Instrument:
  • -
  • Haircut type:
  • + >>> print(b.as_div()) +
    First name:
    +
    Last name:
    +
    Instrument:
    +
    Haircut type:
    It's possible to declaratively remove a ``Field`` inherited from a parent class by setting the name of the field to ``None`` on the subclass. For example:: @@ -1520,12 +1495,12 @@ You can put several Django forms inside one ``
    `` tag. To give each >>> mother = PersonForm(prefix="mother") >>> father = PersonForm(prefix="father") - >>> print(mother.as_ul()) -
  • -
  • - >>> print(father.as_ul()) -
  • -
  • + >>> print(mother.as_div()) +
    +
    + >>> print(father.as_div()) +
    +
    The prefix can also be specified on the form class:: diff --git a/docs/ref/logging.txt b/docs/ref/logging.txt index ba443a0a36..edd9f21ecb 100644 --- a/docs/ref/logging.txt +++ b/docs/ref/logging.txt @@ -196,9 +196,13 @@ For performance reasons, SQL logging is only enabled when level or handlers that are installed. This logging does not include framework-level initialization (e.g. -``SET TIMEZONE``) or transaction management queries (e.g. ``BEGIN``, -``COMMIT``, and ``ROLLBACK``). Turn on query logging in your database if you -wish to view all database queries. +``SET TIMEZONE``). Turn on query logging in your database if you wish to view +all database queries. + +.. versionchanged:: 4.2 + + Support for logging transaction management queries (``BEGIN``, ``COMMIT``, + and ``ROLLBACK``) was added. .. _django-security-logger: diff --git a/docs/ref/middleware.txt b/docs/ref/middleware.txt index 2ddcbae7fe..a270b8b774 100644 --- a/docs/ref/middleware.txt +++ b/docs/ref/middleware.txt @@ -93,18 +93,33 @@ GZip middleware .. class:: GZipMiddleware -.. warning:: + .. attribute:: max_random_bytes - Security researchers recently revealed that when compression techniques - (including ``GZipMiddleware``) are used on a website, the site may become - exposed to a number of possible attacks. Before using ``GZipMiddleware`` on - your site, you should consider very carefully whether you are subject to - these attacks. If you're in *any* doubt about whether you're affected, you - should avoid using ``GZipMiddleware``. For more details, see the `the BREACH - paper (PDF)`_ and `breachattack.com`_. + Defaults to 100. Subclass ``GZipMiddleware`` and override the attribute + to change the maximum number of random bytes that is included with + compressed responses. - .. _the BREACH paper (PDF): http://breachattack.com/resources/BREACH%20-%20SSL,%20gone%20in%2030%20seconds.pdf - .. _breachattack.com: http://breachattack.com +.. note:: + + Security researchers revealed that when compression techniques (including + ``GZipMiddleware``) are used on a website, the site may become exposed to a + number of possible attacks. + + To mitigate attacks, Django implements a technique called *Heal The Breach + (HTB)*. It adds up to 100 bytes (see + :attr:`.max_random_bytes`) of random bytes to each response + to make the attacks less effective. + + For more details, see the `BREACH paper (PDF)`_, `breachattack.com`_, and + the `Heal The Breach (HTB) paper`_. + + .. _BREACH paper (PDF): https://www.breachattack.com/resources/BREACH%20-%20SSL,%20gone%20in%2030%20seconds.pdf + .. _breachattack.com: https://www.breachattack.com/ + .. _Heal The Breach (HTB) paper: https://ieeexplore.ieee.org/document/9754554 + +.. versionchanged:: 4.2 + + Mitigation for the BREACH attack was added. The ``django.middleware.gzip.GZipMiddleware`` compresses content for browsers that understand GZip compression (all modern browsers). @@ -122,7 +137,7 @@ It will NOT compress content if any of the following are true: containing ``gzip``. If the response has an ``ETag`` header, the ETag is made weak to comply with -:rfc:`7232#section-2.1`. +:rfc:`9110#section-8.8.1`. You can apply GZip compression to individual views using the :func:`~django.views.decorators.gzip.gzip_page()` decorator. diff --git a/docs/ref/migration-operations.txt b/docs/ref/migration-operations.txt index a223ff6a23..b463bfc4ea 100644 --- a/docs/ref/migration-operations.txt +++ b/docs/ref/migration-operations.txt @@ -88,6 +88,17 @@ lose any data in the old table. Changes the model's table name (the :attr:`~django.db.models.Options.db_table` option on the ``Meta`` subclass). +``AlterModelTableComment`` +-------------------------- + +.. versionadded:: 4.2 + +.. class:: AlterModelTableComment(name, table_comment) + +Changes the model's table comment (the +:attr:`~django.db.models.Options.db_table_comment` option on the ``Meta`` +subclass). + ``AlterUniqueTogether`` ----------------------- diff --git a/docs/ref/models/fields.txt b/docs/ref/models/fields.txt index 0afddc14a8..4fb39fde82 100644 --- a/docs/ref/models/fields.txt +++ b/docs/ref/models/fields.txt @@ -325,6 +325,21 @@ characters that aren't allowed in Python variable names -- notably, the hyphen -- that's OK. Django quotes column and table names behind the scenes. +``db_comment`` +-------------- + +.. versionadded:: 4.2 + +.. attribute:: Field.db_comment + +The comment on the database column to use for this field. It is useful for +documenting fields for individuals with direct database access who may not be +looking at your Django code. For example:: + + pub_date = models.DateTimeField( + db_comment="Date and time when the article was published", + ) + ``db_index`` ------------ @@ -602,9 +617,11 @@ The default form widget for this field is a :class:`~django.forms.TextInput`. .. attribute:: CharField.max_length - Required. The maximum length (in characters) of the field. The max_length + The maximum length (in characters) of the field. The ``max_length`` is enforced at the database level and in Django's validation using - :class:`~django.core.validators.MaxLengthValidator`. + :class:`~django.core.validators.MaxLengthValidator`. It's required for all + database backends included with Django except PostgreSQL, which supports + unlimited ``VARCHAR`` columns. .. note:: @@ -613,6 +630,10 @@ The default form widget for this field is a :class:`~django.forms.TextInput`. ``max_length`` for some backends. Refer to the :doc:`database backend notes ` for details. + .. versionchanged:: 4.2 + + Support for unlimited ``VARCHAR`` columns was added on PostgreSQL. + .. attribute:: CharField.db_collation Optional. The database collation name of the field. diff --git a/docs/ref/models/instances.txt b/docs/ref/models/instances.txt index f9f11cac0d..b0f867d902 100644 --- a/docs/ref/models/instances.txt +++ b/docs/ref/models/instances.txt @@ -728,7 +728,7 @@ For example:: last_name = models.CharField(max_length=50) def __str__(self): - return '%s %s' % (self.first_name, self.last_name) + return f'{self.first_name} {self.last_name}' ``__eq__()`` ------------ @@ -848,7 +848,7 @@ track down every place that the URL might be created. Specify it once, in .. note:: The string you return from ``get_absolute_url()`` **must** contain only - ASCII characters (required by the URI specification, :rfc:`2396#section-2`) + ASCII characters (required by the URI specification, :rfc:`3986#section-2`) and be URL-encoded, if necessary. Code and templates calling ``get_absolute_url()`` should be able to use the diff --git a/docs/ref/models/options.txt b/docs/ref/models/options.txt index a1629168af..a882fcb05a 100644 --- a/docs/ref/models/options.txt +++ b/docs/ref/models/options.txt @@ -91,6 +91,24 @@ Django quotes column and table names behind the scenes. backends; except for Oracle, however, the quotes have no effect. See the :ref:`Oracle notes ` for more details. +``db_table_comment`` +-------------------- + +.. versionadded:: 4.2 + +.. attribute:: Options.db_table_comment + +The comment on the database table to use for this model. It is useful for +documenting database tables for individuals with direct database access who may +not be looking at your Django code. For example:: + + class Answer(models.Model): + question = models.ForeignKey(Question, on_delete=models.CASCADE) + answer = models.TextField() + + class Meta: + db_table_comment = "Question answers" + ``db_tablespace`` ----------------- diff --git a/docs/ref/models/querysets.txt b/docs/ref/models/querysets.txt index 58d9771cc5..57c64e32cf 100644 --- a/docs/ref/models/querysets.txt +++ b/docs/ref/models/querysets.txt @@ -1707,9 +1707,11 @@ one, doing so will result in an error. .. method:: only(*fields) -The ``only()`` method is more or less the opposite of :meth:`defer()`. You call -it with the fields that should *not* be deferred when retrieving a model. If -you have a model where almost all the fields need to be deferred, using +The ``only()`` method is essentially the opposite of :meth:`defer`. Only the +fields passed into this method and that are *not* already specified as deferred +are loaded immediately when the queryset is evaluated. + +If you have a model where almost all the fields need to be deferred, using ``only()`` to specify the complementary set of fields can result in simpler code. @@ -1734,8 +1736,7 @@ logically:: # Final result is that everything except "headline" is deferred. Entry.objects.only("headline", "body").defer("body") - # Final result loads headline and body immediately (only() replaces any - # existing set of fields). + # Final result loads headline immediately. Entry.objects.defer("body").only("headline", "body") All of the cautions in the note for the :meth:`defer` documentation apply to @@ -1756,6 +1757,11 @@ are in your ``only()`` call. deferred fields, only the loaded fields will be saved. See :meth:`~django.db.models.Model.save()` for more details. +.. note:: + + When using :meth:`defer` after ``only()`` the fields in :meth:`defer` will + override ``only()`` for fields that are listed in both. + ``using()`` ~~~~~~~~~~~ @@ -2174,7 +2180,7 @@ Finally, a word on using ``get_or_create()`` in Django views. Please make sure to use it only in ``POST`` requests unless you have a good reason not to. ``GET`` requests shouldn't have any effect on data. Instead, use ``POST`` whenever a request to a page has a side effect on your data. For more, see -:rfc:`Safe methods <7231#section-4.2.1>` in the HTTP spec. +:rfc:`Safe methods <9110#section-9.2.1>` in the HTTP spec. .. warning:: @@ -2271,6 +2277,11 @@ exists in the database, an :exc:`~django.db.IntegrityError` is raised. ``aupdate_or_create()`` method was added. +.. versionchanged:: 4.2 + + In older versions, ``update_or_create()`` didn't specify ``update_fields`` + when calling :meth:`Model.save() `. + ``bulk_create()`` ~~~~~~~~~~~~~~~~~ diff --git a/docs/ref/models/relations.txt b/docs/ref/models/relations.txt index 24bc38a354..1b1aad7425 100644 --- a/docs/ref/models/relations.txt +++ b/docs/ref/models/relations.txt @@ -37,6 +37,9 @@ Related objects reference ``topping.pizza_set`` and on ``pizza.toppings``. .. method:: add(*objs, bulk=True, through_defaults=None) + .. method:: aadd(*objs, bulk=True, through_defaults=None) + + *Asynchronous version*: ``aadd`` Adds the specified model objects to the related object set. @@ -75,7 +78,14 @@ Related objects reference dictionary and they will be evaluated once before creating any intermediate instance(s). + .. versionchanged:: 4.2 + + ``aadd()`` method was added. + .. method:: create(through_defaults=None, **kwargs) + .. method:: acreate(through_defaults=None, **kwargs) + + *Asynchronous version*: ``acreate`` Creates a new object, saves it and puts it in the related object set. Returns the newly created object:: @@ -110,7 +120,14 @@ Related objects reference needed. You can use callables as values in the ``through_defaults`` dictionary. + .. versionchanged:: 4.1 + + ``acreate()`` method was added. + .. method:: remove(*objs, bulk=True) + .. method:: aremove(*objs, bulk=True) + + *Asynchronous version*: ``aremove`` Removes the specified model objects from the related object set:: @@ -150,7 +167,14 @@ Related objects reference For many-to-many relationships, the ``bulk`` keyword argument doesn't exist. + .. versionchanged:: 4.2 + + ``aremove()`` method was added. + .. method:: clear(bulk=True) + .. method:: aclear(bulk=True) + + *Asynchronous version*: ``aclear`` Removes all objects from the related object set:: @@ -167,7 +191,14 @@ Related objects reference For many-to-many relationships, the ``bulk`` keyword argument doesn't exist. + .. versionchanged:: 4.2 + + ``aclear()`` method was added. + .. method:: set(objs, bulk=True, clear=False, through_defaults=None) + .. method:: aset(objs, bulk=True, clear=False, through_defaults=None) + + *Asynchronous version*: ``aset`` Replace the set of related objects:: @@ -200,13 +231,19 @@ Related objects reference dictionary and they will be evaluated once before creating any intermediate instance(s). + .. versionchanged:: 4.2 + + ``aset()`` method was added. + .. note:: - Note that ``add()``, ``create()``, ``remove()``, ``clear()``, and - ``set()`` all apply database changes immediately for all types of - related fields. In other words, there is no need to call ``save()`` - on either end of the relationship. + Note that ``add()``, ``aadd()``, ``create()``, ``acreate()``, + ``remove()``, ``aremove()``, ``clear()``, ``aclear()``, ``set()``, and + ``aset()`` all apply database changes immediately for all types of + related fields. In other words, there is no need to call + ``save()``/``asave()`` on either end of the relationship. If you use :meth:`~django.db.models.query.QuerySet.prefetch_related`, - the ``add()``, ``remove()``, ``clear()``, and ``set()`` methods clear - the prefetched cache. + the ``add()``, ``aadd()``, ``remove()``, ``aremove()``, ``clear()``, + ``aclear()``, ``set()``, and ``aset()`` methods clear the prefetched + cache. diff --git a/docs/ref/request-response.txt b/docs/ref/request-response.txt index 81558c8916..ebcd9ee523 100644 --- a/docs/ref/request-response.txt +++ b/docs/ref/request-response.txt @@ -759,7 +759,7 @@ Attributes .. attribute:: HttpResponse.status_code - The :rfc:`HTTP status code <7231#section-6>` for the response. + The :rfc:`HTTP status code <9110#section-15>` for the response. Unless :attr:`reason_phrase` is explicitly set, modifying the value of ``status_code`` outside the constructor will also modify the value of @@ -768,7 +768,7 @@ Attributes .. attribute:: HttpResponse.reason_phrase The HTTP reason phrase for the response. It uses the :rfc:`HTTP standard's - <7231#section-6.1>` default reason phrases. + <9110#section-15.1>` default reason phrases. Unless explicitly set, ``reason_phrase`` is determined by the value of :attr:`status_code`. @@ -803,9 +803,9 @@ Methods :setting:`DEFAULT_CHARSET` settings, by default: ``"text/html; charset=utf-8"``. - ``status`` is the :rfc:`HTTP status code <7231#section-6>` for the response. - You can use Python's :py:class:`http.HTTPStatus` for meaningful aliases, - such as ``HTTPStatus.NO_CONTENT``. + ``status`` is the :rfc:`HTTP status code <9110#section-15>` for the + response. You can use Python's :py:class:`http.HTTPStatus` for meaningful + aliases, such as ``HTTPStatus.NO_CONTENT``. ``reason`` is the HTTP response phrase. If not provided, a default phrase will be used. @@ -1116,43 +1116,76 @@ parameter to the constructor method:: .. class:: StreamingHttpResponse The :class:`StreamingHttpResponse` class is used to stream a response from -Django to the browser. You might want to do this if generating the response -takes too long or uses too much memory. For instance, it's useful for -:ref:`generating large CSV files `. +Django to the browser. -.. admonition:: Performance considerations +.. admonition:: Advanced usage - Django is designed for short-lived requests. Streaming responses will tie - a worker process for the entire duration of the response. This may result - in poor performance. + :class:`StreamingHttpResponse` is somewhat advanced, in that it is + important to know whether you'll be serving your application synchronously + under WSGI or asynchronously under ASGI, and adjust your usage + appropriately. - Generally speaking, you should perform expensive tasks outside of the - request-response cycle, rather than resorting to a streamed response. + Please read these notes with care. + +An example usage of :class:`StreamingHttpResponse` under WSGI is streaming +content when generating the response would take too long or uses too much +memory. For instance, it's useful for :ref:`generating large CSV files +`. + +There are performance considerations when doing this, though. Django, under +WSGI, is designed for short-lived requests. Streaming responses will tie a +worker process for the entire duration of the response. This may result in poor +performance. + +Generally speaking, you would perform expensive tasks outside of the +request-response cycle, rather than resorting to a streamed response. + +When serving under ASGI, however, a :class:`StreamingHttpResponse` need not +stop other requests from being served whilst waiting for I/O. This opens up +the possibility of long-lived requests for streaming content and implementing +patterns such as long-polling, and server-sent events. + +Even under ASGI note, :class:`StreamingHttpResponse` should only be used in +situations where it is absolutely required that the whole content isn't +iterated before transferring the data to the client. Because the content can't +be accessed, many middleware can't function normally. For example the ``ETag`` +and ``Content-Length`` headers can't be generated for streaming responses. The :class:`StreamingHttpResponse` is not a subclass of :class:`HttpResponse`, because it features a slightly different API. However, it is almost identical, with the following notable differences: -* It should be given an iterator that yields bytestrings as content. +* It should be given an iterator that yields bytestrings as content. When + serving under WSGI, this should be a sync iterator. When serving under ASGI, + this is should an async iterator. * You cannot access its content, except by iterating the response object - itself. This should only occur when the response is returned to the client. + itself. This should only occur when the response is returned to the client: + you should not iterate the response yourself. + + Under WSGI the response will be iterated synchronously. Under ASGI the + response will be iterated asynchronously. (This is why the iterator type must + match the protocol you're using.) + + To avoid a crash, an incorrect iterator type will be mapped to the correct + type during iteration, and a warning will be raised, but in order to do this + the iterator must be fully-consumed, which defeats the purpose of using a + :class:`StreamingHttpResponse` at all. * It has no ``content`` attribute. Instead, it has a - :attr:`~StreamingHttpResponse.streaming_content` attribute. + :attr:`~StreamingHttpResponse.streaming_content` attribute. This can be used + in middleware to wrap the response iterable, but should not be consumed. * You cannot use the file-like object ``tell()`` or ``write()`` methods. Doing so will raise an exception. -:class:`StreamingHttpResponse` should only be used in situations where it is -absolutely required that the whole content isn't iterated before transferring -the data to the client. Because the content can't be accessed, many -middleware can't function normally. For example the ``ETag`` and -``Content-Length`` headers can't be generated for streaming responses. - The :class:`HttpResponseBase` base class is common between :class:`HttpResponse` and :class:`StreamingHttpResponse`. +.. versionchanged:: 4.2 + + Support for asynchronous iteration was added. + Attributes ---------- @@ -1163,7 +1196,7 @@ Attributes .. attribute:: StreamingHttpResponse.status_code - The :rfc:`HTTP status code <7231#section-6>` for the response. + The :rfc:`HTTP status code <9110#section-15>` for the response. Unless :attr:`reason_phrase` is explicitly set, modifying the value of ``status_code`` outside the constructor will also modify the value of @@ -1172,7 +1205,7 @@ Attributes .. attribute:: StreamingHttpResponse.reason_phrase The HTTP reason phrase for the response. It uses the :rfc:`HTTP standard's - <7231#section-6.1>` default reason phrases. + <9110#section-15.1>` default reason phrases. Unless explicitly set, ``reason_phrase`` is determined by the value of :attr:`status_code`. @@ -1181,6 +1214,16 @@ Attributes This is always ``True``. +.. attribute:: StreamingHttpResponse.is_async + + .. versionadded:: 4.2 + + Boolean indicating whether :attr:`StreamingHttpResponse.streaming_content` + is an asynchronous iterator or not. + + This is useful for middleware needing to wrap + :attr:`StreamingHttpResponse.streaming_content`. + ``FileResponse`` objects ======================== @@ -1213,6 +1256,15 @@ a file open in binary mode like so:: The file will be closed automatically, so don't open it with a context manager. +.. admonition:: Use under ASGI + + Python's file API is synchronous. This means that the file must be fully + consumed in order to be served under ASGI. + + In order to stream a file asynchronously you need to use a third-party + package that provides an asynchronous file API, such as `aiofiles + `_. + Methods ------- diff --git a/docs/ref/schema-editor.txt b/docs/ref/schema-editor.txt index 99d93f5ab4..fed3e76309 100644 --- a/docs/ref/schema-editor.txt +++ b/docs/ref/schema-editor.txt @@ -127,6 +127,15 @@ value. Renames the model's table from ``old_db_table`` to ``new_db_table``. +``alter_db_table_comment()`` +---------------------------- + +.. versionadded:: 4.2 + +.. method:: BaseDatabaseSchemaEditor.alter_db_table_comment(model, old_db_table_comment, new_db_table_comment) + +Change the ``model``’s table comment to ``new_db_table_comment``. + ``alter_db_tablespace()`` ------------------------- diff --git a/docs/ref/settings.txt b/docs/ref/settings.txt index e1a2ead760..b1a8e2444d 100644 --- a/docs/ref/settings.txt +++ b/docs/ref/settings.txt @@ -2886,8 +2886,8 @@ Default: ``False`` A boolean that specifies whether to display numbers using a thousand separator. When set to ``True`` and :setting:`USE_L10N` is also ``True``, Django will format numbers using the :setting:`NUMBER_GROUPING` and -:setting:`THOUSAND_SEPARATOR` settings. These settings may also be dictated by -the locale, which takes precedence. +:setting:`THOUSAND_SEPARATOR` settings. The latter two settings may also be +dictated by the locale, which takes precedence. See also :setting:`DECIMAL_SEPARATOR`, :setting:`NUMBER_GROUPING` and :setting:`THOUSAND_SEPARATOR`. diff --git a/docs/ref/signals.txt b/docs/ref/signals.txt index 0e2a701fcd..1d21e2debe 100644 --- a/docs/ref/signals.txt +++ b/docs/ref/signals.txt @@ -391,7 +391,7 @@ Argument Value .. data:: django.db.models.signals.class_prepared :module: -Sent whenever a model class has been "prepared" -- that is, once model has +Sent whenever a model class has been "prepared" -- that is, once a model has been defined and registered with Django's model system. Django uses this signal internally; it's not generally used in third-party applications. diff --git a/docs/ref/unicode.txt b/docs/ref/unicode.txt index 57e08f6482..87e8b7d0b7 100644 --- a/docs/ref/unicode.txt +++ b/docs/ref/unicode.txt @@ -146,7 +146,7 @@ URI and IRI handling Web frameworks have to deal with URLs (which are a type of IRI). One requirement of URLs is that they are encoded using only ASCII characters. However, in an international environment, you might need to construct a -URL from an :rfc:`IRI <3987>` -- very loosely speaking, a :rfc:`URI <2396>` +URL from an :rfc:`IRI <3987>` -- very loosely speaking, a :rfc:`URI <3986>` that can contain Unicode characters. Use these functions for quoting and converting an IRI to a URI: diff --git a/docs/ref/utils.txt b/docs/ref/utils.txt index d296d2bda1..b1a08dc0e1 100644 --- a/docs/ref/utils.txt +++ b/docs/ref/utils.txt @@ -21,7 +21,7 @@ by managing the ``Vary`` header of responses. It includes functions to patch the header of response objects directly and decorators that change functions to do that header-patching themselves. -For information on the ``Vary`` header, see :rfc:`7231#section-7.1.4`. +For information on the ``Vary`` header, see :rfc:`9110#section-12.5.5`. Essentially, the ``Vary`` HTTP header defines which headers a cache should take into account when building its cache key. Requests with the same path but @@ -75,7 +75,7 @@ need to distinguish caches by the ``Accept-language`` header. Adds (or updates) the ``Vary`` header in the given ``HttpResponse`` object. ``newheaders`` is a list of header names that should be in ``Vary``. If headers contains an asterisk, then ``Vary`` header will consist of a single - asterisk ``'*'``, according to :rfc:`7231#section-7.1.4`. Otherwise, + asterisk ``'*'``, according to :rfc:`9110#section-12.5.5`. Otherwise, existing headers in ``Vary`` aren't removed. .. function:: get_cache_key(request, key_prefix=None, method='GET', cache=None) @@ -721,7 +721,7 @@ escaping HTML. .. function:: http_date(epoch_seconds=None) Formats the time to match the :rfc:`1123#section-5.2.14` date format as - specified by HTTP :rfc:`7231#section-7.1.1.1`. + specified by HTTP :rfc:`9110#section-5.6.7`. Accepts a floating point number expressed in seconds since the epoch in UTC--such as that outputted by ``time.time()``. If set to ``None``, @@ -729,6 +729,15 @@ escaping HTML. Outputs a string in the format ``Wdy, DD Mon YYYY HH:MM:SS GMT``. +.. function:: content_disposition_header(as_attachment, filename) + + .. versionadded:: 4.2 + + Constructs a ``Content-Disposition`` HTTP header value from the given + ``filename`` as specified by :rfc:`6266`. Returns ``None`` if + ``as_attachment`` is ``False`` and ``filename`` is ``None``, otherwise + returns a string suitable for the ``Content-Disposition`` HTTP header. + .. function:: base36_to_int(s) Converts a base 36 string to an integer. diff --git a/docs/ref/views.txt b/docs/ref/views.txt index 7ee6cc806f..250ab47932 100644 --- a/docs/ref/views.txt +++ b/docs/ref/views.txt @@ -121,9 +121,9 @@ default, call the view ``django.views.defaults.permission_denied``. This view loads and renders the template ``403.html`` in your root template directory, or if this file does not exist, instead serves the text -"403 Forbidden", as per :rfc:`7231#section-6.5.3` (the HTTP 1.1 Specification). -The template context contains ``exception``, which is the string -representation of the exception that triggered the view. +"403 Forbidden", as per :rfc:`9110#section-15.5.4` (the HTTP 1.1 +Specification). The template context contains ``exception``, which is the +string representation of the exception that triggered the view. ``django.views.defaults.permission_denied`` is triggered by a :exc:`~django.core.exceptions.PermissionDenied` exception. To deny access in a diff --git a/docs/releases/1.10.txt b/docs/releases/1.10.txt index 8b0303352a..bcb984b474 100644 --- a/docs/releases/1.10.txt +++ b/docs/releases/1.10.txt @@ -242,7 +242,7 @@ CSRF changes the form token value on every request (while keeping an invariant secret which can be used to validate the different tokens). -.. _BREACH: http://breachattack.com/ +.. _BREACH: https://www.breachattack.com/ Database backends ~~~~~~~~~~~~~~~~~ diff --git a/docs/releases/1.11.txt b/docs/releases/1.11.txt index 5da81cd739..50b78305d4 100644 --- a/docs/releases/1.11.txt +++ b/docs/releases/1.11.txt @@ -256,10 +256,11 @@ Database backends * Added the :setting:`TEST['TEMPLATE'] ` setting to let PostgreSQL users specify a template for creating the test database. -* :meth:`.QuerySet.iterator()` now uses :ref:`server-side cursors - ` on PostgreSQL. This feature transfers some of - the worker memory load (used to hold query results) to the database and might - increase database memory usage. +* :meth:`.QuerySet.iterator()` now uses `server-side cursors`_ on PostgreSQL. + This feature transfers some of the worker memory load (used to hold query + results) to the database and might increase database memory usage. + + .. _server-side cursors: https://www.psycopg.org/docs/usage.html#server-side-cursors * Added MySQL support for the ``'isolation_level'`` option in :setting:`OPTIONS` to allow specifying the :ref:`transaction isolation level diff --git a/docs/releases/1.4.txt b/docs/releases/1.4.txt index b3b3acda31..1c0f48eba5 100644 --- a/docs/releases/1.4.txt +++ b/docs/releases/1.4.txt @@ -900,8 +900,8 @@ object, use ``django.db.connections[DEFAULT_DB_ALIAS]`` instead. As part of this change, all underlying SQLite connections are now enabled for potential thread-sharing (by passing the ``check_same_thread=False`` attribute -to pysqlite). ``DatabaseWrapper`` however preserves the previous behavior by -disabling thread-sharing by default, so this does not affect any existing +to ``pysqlite``). ``DatabaseWrapper`` however preserves the previous behavior +by disabling thread-sharing by default, so this does not affect any existing code that purely relies on the ORM or on ``DatabaseWrapper.cursor()``. Finally, while it's now possible to pass connections between threads, Django diff --git a/docs/releases/4.1.4.txt b/docs/releases/4.1.4.txt index da3810875f..c1db23ba55 100644 --- a/docs/releases/4.1.4.txt +++ b/docs/releases/4.1.4.txt @@ -2,11 +2,27 @@ Django 4.1.4 release notes ========================== -*Expected December 6, 2022* +*December 6, 2022* Django 4.1.4 fixes several bugs in 4.1.3. Bugfixes ======== -* ... +* Fixed a regression in Django 4.1 that caused an unnecessary table rebuild + when adding a ``ManyToManyField`` on SQLite (:ticket:`34138`). + +* Fixed a bug in Django 4.1 that caused a crash of the sitemap index view with + an empty :meth:`Sitemap.items() ` and + a callable :attr:`~django.contrib.sitemaps.Sitemap.lastmod` + (:ticket:`34088`). + +* Fixed a bug in Django 4.1 that caused a crash using ``acreate()``, + ``aget_or_create()``, and ``aupdate_or_create()`` asynchronous methods of + related managers (:ticket:`34139`). + +* Fixed a bug in Django 4.1 that caused a crash of ``QuerySet.bulk_create()`` + with ``"pk"`` in ``unique_fields`` (:ticket:`34177`). + +* Fixed a bug in Django 4.1 that caused a crash of ``QuerySet.bulk_create()`` + on fields with ``db_column`` (:ticket:`34171`). diff --git a/docs/releases/4.1.5.txt b/docs/releases/4.1.5.txt new file mode 100644 index 0000000000..3f2610ea94 --- /dev/null +++ b/docs/releases/4.1.5.txt @@ -0,0 +1,16 @@ +========================== +Django 4.1.5 release notes +========================== + +*January 2, 2023* + +Django 4.1.5 fixes a bug in 4.1.4. Also, the latest string translations from +Transifex are incorporated. + +Bugfixes +======== + +* Fixed a long standing bug in the ``__len`` lookup for ``ArrayField`` that + caused a crash of model validation on + :attr:`Meta.constraints ` + (:ticket:`34205`). diff --git a/docs/releases/4.1.6.txt b/docs/releases/4.1.6.txt new file mode 100644 index 0000000000..46537196b9 --- /dev/null +++ b/docs/releases/4.1.6.txt @@ -0,0 +1,12 @@ +========================== +Django 4.1.6 release notes +========================== + +*Expected February 1, 2023* + +Django 4.1.6 fixes several bugs in 4.1.5. + +Bugfixes +======== + +* ... diff --git a/docs/releases/4.1.txt b/docs/releases/4.1.txt index dd1470fc15..1a59d4ab93 100644 --- a/docs/releases/4.1.txt +++ b/docs/releases/4.1.txt @@ -590,6 +590,12 @@ Miscellaneous * The minimum supported version of ``asgiref`` is increased from 3.4.1 to 3.5.2. +* Combined expressions no longer use the error-prone behavior of guessing + ``output_field`` when argument types match. As a consequence, resolving an + ``output_field`` for database functions and combined expressions may now + crash with mixed types. You will need to explicitly set the ``output_field`` + in such cases. + .. _deprecated-features-4.1: Features deprecated in 4.1 diff --git a/docs/releases/4.2.txt b/docs/releases/4.2.txt index 0dcfc647bc..f23ee72559 100644 --- a/docs/releases/4.2.txt +++ b/docs/releases/4.2.txt @@ -26,6 +26,65 @@ and only officially support the latest release of each series. What's new in Django 4.2 ======================== +Psycopg 3 support +----------------- + +Django now supports `psycopg`_ version 3.1 or higher. To update your code, +install the `psycopg library`_, you don't need to change the +:setting:`ENGINE ` as ``django.db.backends.postgresql`` +supports both libraries. + +Support for ``psycopg2`` is likely to be deprecated and removed at some point +in the future. + +.. _psycopg: https://www.psycopg.org/psycopg3/ +.. _psycopg library: https://pypi.org/project/psycopg/ + +Comments on columns and tables +------------------------------ + +The new :attr:`Field.db_comment ` and +:attr:`Meta.db_table_comment ` +options allow creating comments on columns and tables, respectively. For +example:: + + from django.db import models + + class Question(models.Model): + text = models.TextField(db_comment="Poll question") + pub_date = models.DateTimeField( + db_comment="Date and time when the question was published", + ) + + class Meta: + db_table_comment = "Poll questions" + + + class Answer(models.Model): + question = models.ForeignKey( + Question, + on_delete=models.CASCADE, + db_comment="Reference to a question" + ) + answer = models.TextField(db_comment="Question answer") + + class Meta: + db_table_comment = "Question answers" + +Also, the new :class:`~django.db.migrations.operations.AlterModelTableComment` +operation allows changing table comments defined in the +:attr:`Meta.db_table_comment `. + +Mitigation for the BREACH attack +-------------------------------- + +:class:`~django.middleware.gzip.GZipMiddleware` now includes a mitigation for +the BREACH attack. It will add up to 100 random bytes to gzip responses to make +BREACH attacks harder. Read more about the mitigation technique in the `Heal +The Breach (HTB) paper`_. + +.. _Heal The Breach (HTB) paper: https://ieeexplore.ieee.org/document/9754554 + Minor features -------------- @@ -65,6 +124,12 @@ Minor features * The default iteration count for the PBKDF2 password hasher is increased from 390,000 to 480,000. +* :class:`~django.contrib.auth.forms.UserCreationForm` now saves many-to-many + form fields for a custom user model. + +* The new :class:`~django.contrib.auth.forms.BaseUserCreationForm` is now the + recommended base class for customizing the user creation form. + :mod:`django.contrib.contenttypes` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -87,6 +152,13 @@ Minor features removes the former ``map_css`` block) to better comply with a strict Content Security Policy. +* :class:`~django.contrib.gis.forms.widgets.OpenLayersWidget` is now based on + OpenLayers 7.2.2 (previously 4.6.5). + +* The new :lookup:`isempty` lookup and + :class:`IsEmpty() ` + expression allow filtering empty geometries on PostGIS. + :mod:`django.contrib.messages` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -102,6 +174,9 @@ Minor features ` expressions allow using trigram strict word similarity. +* The :lookup:`arrayfield.overlap` lookup now supports ``QuerySet.values()`` + and ``values_list()`` as a right-hand side. + :mod:`django.contrib.redirects` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -115,7 +190,8 @@ Minor features :mod:`django.contrib.sitemaps` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -* ... +* The new :meth:`.Sitemap.get_languages_for_item` method allows customizing the + list of languages for which the item is displayed. :mod:`django.contrib.sites` ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -125,7 +201,13 @@ Minor features :mod:`django.contrib.staticfiles` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -* ... +* :class:`~django.contrib.staticfiles.storage.ManifestStaticFilesStorage` now + replaces paths to JavaScript modules in ``import`` and ``export`` statements + with their hashed counterparts. + +* The new :attr:`.ManifestStaticFilesStorage.manifest_hash` attribute provides + a hash over all files in the manifest and changes whenever one of the files + changes. :mod:`django.contrib.syndication` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -155,7 +237,8 @@ Email Error Reporting ~~~~~~~~~~~~~~~ -* ... +* The debug page now shows :pep:`exception notes <678>` and + :pep:`fine-grained error locations <657>` on Python 3.11+. File Storage ~~~~~~~~~~~~ @@ -196,7 +279,8 @@ Internationalization Logging ~~~~~~~ -* ... +* The :ref:`django-db-logger` logger now logs transaction management queries + (``BEGIN``, ``COMMIT``, and ``ROLLBACK``) at the ``DEBUG`` level. Management Commands ~~~~~~~~~~~~~~~~~~~ @@ -243,10 +327,20 @@ Models database, using an ``a`` prefix: :meth:`~.Model.adelete`, :meth:`~.Model.arefresh_from_db`, and :meth:`~.Model.asave`. +* Related managers now provide asynchronous versions of methods that change a + set of related objects, using an ``a`` prefix: :meth:`~.RelatedManager.aadd`, + :meth:`~.RelatedManager.aclear`, :meth:`~.RelatedManager.aremove`, and + :meth:`~.RelatedManager.aset`. + +* :attr:`CharField.max_length ` is no + longer required to be set on PostgreSQL, which supports unlimited ``VARCHAR`` + columns. + Requests and Responses ~~~~~~~~~~~~~~~~~~~~~~ -* ... +* :class:`~django.http.StreamingHttpResponse` now supports async iterators + when Django is served via ASGI. Security ~~~~~~~~ @@ -274,6 +368,22 @@ Tests * The :option:`test --debug-sql` option now formats SQL queries with ``sqlparse``. +* The :class:`~django.test.RequestFactory`, + :class:`~django.test.AsyncRequestFactory`, :class:`~django.test.Client`, and + :class:`~django.test.AsyncClient` classes now support the ``headers`` + parameter, which accepts a dictionary of header names and values. This allows + a more natural syntax for declaring headers. + + .. code-block:: python + + # Before: + self.client.get("/home/", HTTP_ACCEPT_LANGUAGE="fr") + await self.async_client.get("/home/", ACCEPT_LANGUAGE="fr") + + # After: + self.client.get("/home/", headers={"accept-language": "fr"}) + await self.async_client.get("/home/", headers={"accept-language": "fr"}) + URLs ~~~~ @@ -292,6 +402,9 @@ Utilities documented functions for handling URL redirects. The Django functions were not affected. +* The new :func:`django.utils.http.content_disposition_header` function returns + a ``Content-Disposition`` HTTP header value as specified by :rfc:`6266`. + Validators ~~~~~~~~~~ @@ -309,7 +422,12 @@ Database backend API This section describes changes that may be needed in third-party database backends. -* ... +* ``DatabaseFeatures.allows_group_by_pk`` is removed as it only remained to + accommodate a MySQL extension that has been supplanted by proper functional + dependency detection in MySQL 5.7.15. Note that + ``DatabaseFeatures.allows_group_by_selected_pks`` is still supported and + should be enabled if your backend supports functional dependency detection in + ``GROUP BY`` clauses as specified by the ``SQL:1999`` standard. Dropped support for MariaDB 10.3 -------------------------------- @@ -329,6 +447,16 @@ Dropped support for PostgreSQL 11 Upstream support for PostgreSQL 11 ends in November 2023. Django 4.2 supports PostgreSQL 12 and higher. +Setting ``update_fields`` in ``Model.save()`` may now be required +----------------------------------------------------------------- + +In order to avoid updating unnecessary columns, +:meth:`.QuerySet.update_or_create` now passes ``update_fields`` to the +:meth:`Model.save() ` calls. As a consequence, any +fields modified in the custom ``save()`` methods should be added to the +``update_fields`` keyword argument before calling ``super()``. See +:ref:`overriding-model-methods` for more details. + Miscellaneous ------------- @@ -358,6 +486,22 @@ Miscellaneous * Add functionality so :class:`~django.db.models.sql.RawQuery` can be used as a subquery. +* The ``is_summary`` argument of the undocumented ``Query.add_annotation()`` + method is removed. + +* The minimum supported version of SQLite is increased from 3.9.0 to 3.21.0. + +* :djadmin:`inspectdb` now uses ``display_size`` from + ``DatabaseIntrospection.get_table_description()`` rather than + ``internal_size`` for ``CharField``. + +* The minimum supported version of ``asgiref`` is increased from 3.5.2 to + 3.6.0. + +* :class:`~django.contrib.auth.forms.UserCreationForm` now rejects usernames + that differ only in case. If you need the previous behavior, use + :class:`~django.contrib.auth.forms.BaseUserCreationForm` instead. + .. _deprecated-features-4.2: Features deprecated in 4.2 @@ -400,6 +544,42 @@ but it should not be used for new migrations. Use :class:`~django.db.migrations.operations.AddIndex` and :class:`~django.db.migrations.operations.RemoveIndex` operations instead. +Passing encoded JSON string literals to ``JSONField`` is deprecated +------------------------------------------------------------------- + +``JSONField`` and its associated lookups and aggregates use to allow passing +JSON encoded string literals which caused ambiguity on whether string literals +were already encoded from database backend's perspective. + +During the deprecation period string literals will be attempted to be JSON +decoded and a warning will be emitted on success that points at passing +non-encoded forms instead. + +Code that use to pass JSON encoded string literals:: + + Document.objects.bulk_create( + Document(data=Value("null")), + Document(data=Value("[]")), + Document(data=Value('"foo-bar"')), + ) + Document.objects.annotate( + JSONBAgg("field", default=Value('[]')), + ) + +Should become:: + + Document.objects.bulk_create( + Document(data=Value(None, JSONField())), + Document(data=[]), + Document(data="foo-bar"), + ) + Document.objects.annotate( + JSONBAgg("field", default=[]), + ) + +From Django 5.1+ string literals will be implicitly interpreted as JSON string +literals. + Miscellaneous ------------- @@ -449,3 +629,6 @@ Miscellaneous * ``TransactionTestCase.assertQuerysetEqual()`` is deprecated in favor of ``assertQuerySetEqual()``. + +* Passing positional arguments to ``Signer`` and ``TimestampSigner`` is + deprecated in favor of keyword-only arguments. diff --git a/docs/releases/index.txt b/docs/releases/index.txt index 63cae35961..3985e7e878 100644 --- a/docs/releases/index.txt +++ b/docs/releases/index.txt @@ -33,6 +33,8 @@ versions of the documentation contain the release notes for any later releases. .. toctree:: :maxdepth: 1 + 4.1.6 + 4.1.5 4.1.4 4.1.3 4.1.2 diff --git a/docs/spelling_wordlist b/docs/spelling_wordlist index 0b9180fd35..c8884c5be9 100644 --- a/docs/spelling_wordlist +++ b/docs/spelling_wordlist @@ -136,6 +136,7 @@ dimensioned discoverable Disqus distro +django djangoproject dm docstring @@ -246,7 +247,6 @@ kilometre Koziarski kwarg kwargs -Kyngesburye Kyrgyz latin lawrence @@ -387,7 +387,6 @@ psycopg Punycode Puthraya pyformat -pysqlite pythonic qs queryset diff --git a/docs/topics/async.txt b/docs/topics/async.txt index 2b9b1a85d9..39ca864655 100644 --- a/docs/topics/async.txt +++ b/docs/topics/async.txt @@ -28,10 +28,10 @@ class-based view, this means declaring the HTTP method handlers, such as .. note:: - Django uses ``asyncio.iscoroutinefunction`` to test if your view is + Django uses ``asgiref.sync.iscoroutinefunction`` to test if your view is asynchronous or not. If you implement your own method of returning a - coroutine, ensure you set the ``_is_coroutine`` attribute of the view - to ``asyncio.coroutines._is_coroutine`` so this function returns ``True``. + coroutine, ensure you use ``asgiref.sync.markcoroutinefunction`` so this + function returns ``True``. Under a WSGI server, async views will run in their own, one-off event loop. This means you can use async features, like concurrent async HTTP requests, @@ -85,7 +85,7 @@ With some exceptions, Django can run ORM queries asynchronously as well:: Detailed notes can be found in :ref:`async-queries`, but in short: -* All ``QuerySet`` methods that cause a SQL query to occur have an +* All ``QuerySet`` methods that cause an SQL query to occur have an ``a``-prefixed asynchronous variant. * ``async for`` is supported on all QuerySets (including the output of @@ -97,13 +97,17 @@ Django also supports some asynchronous model methods that use the database:: book = Book(...) await book.asave(using="secondary") + async def make_book_with_tags(tags, ...): + book = await Book.objects.acreate(...) + await book.tags.aset(tags) + Transactions do not yet work in async mode. If you have a piece of code that needs transactions behavior, we recommend you write that piece as a single synchronous function and call it using :func:`sync_to_async`. .. versionchanged:: 4.2 - Asynchronous model interface was added. + Asynchronous model and related manager interfaces were added. Performance ----------- diff --git a/docs/topics/auth/customizing.txt b/docs/topics/auth/customizing.txt index c77e6c599d..9c0a256d25 100644 --- a/docs/topics/auth/customizing.txt +++ b/docs/topics/auth/customizing.txt @@ -840,6 +840,11 @@ extend these forms in this manner:: model = CustomUser fields = UserCreationForm.Meta.fields + ('custom_field',) +.. versionchanged:: 4.2 + + In older versions, :class:`~django.contrib.auth.forms.UserCreationForm` + didn't save many-to-many form fields for a custom user model. + Custom users and :mod:`django.contrib.admin` -------------------------------------------- diff --git a/docs/topics/auth/default.txt b/docs/topics/auth/default.txt index 6c8b1fe458..038f2b8eaf 100644 --- a/docs/topics/auth/default.txt +++ b/docs/topics/auth/default.txt @@ -473,7 +473,7 @@ login page:: def my_view(request): if not request.user.is_authenticated: - return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path)) + return redirect(f'{settings.LOGIN_URL}?next={request.path}') # ... ...or display an error message:: @@ -565,8 +565,8 @@ The ``login_required`` decorator .. currentmodule:: django.contrib.auth.mixins -The ``LoginRequired`` mixin -~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The ``LoginRequiredMixin`` mixin +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When using :doc:`class-based views `, you can achieve the same behavior as with ``login_required`` by using the @@ -1654,9 +1654,12 @@ provides several built-in forms located in :mod:`django.contrib.auth.forms`: A form used in the admin interface to change a user's information and permissions. -.. class:: UserCreationForm +.. class:: BaseUserCreationForm - A :class:`~django.forms.ModelForm` for creating a new user. + .. versionadded:: 4.2 + + A :class:`~django.forms.ModelForm` for creating a new user. This is the + recommended base class if you need to customize the user creation form. It has three fields: ``username`` (from the user model), ``password1``, and ``password2``. It verifies that ``password1`` and ``password2`` match, @@ -1665,6 +1668,19 @@ provides several built-in forms located in :mod:`django.contrib.auth.forms`: sets the user's password using :meth:`~django.contrib.auth.models.User.set_password()`. +.. class:: UserCreationForm + + Inherits from :class:`BaseUserCreationForm`. To help prevent confusion with + similar usernames, the form doesn't allow usernames that differ only in + case. + + .. versionchanged:: 4.2 + + In older versions, :class:`UserCreationForm` didn't save many-to-many + form fields for a custom user model. + + In older versions, usernames that differ only in case are allowed. + .. currentmodule:: django.contrib.auth Authentication data in templates diff --git a/docs/topics/auth/passwords.txt b/docs/topics/auth/passwords.txt index 7f92685775..57053c1a2a 100644 --- a/docs/topics/auth/passwords.txt +++ b/docs/topics/auth/passwords.txt @@ -474,14 +474,18 @@ The :mod:`django.contrib.auth.hashers` module provides a set of functions to create and validate hashed passwords. You can use them independently from the ``User`` model. -.. function:: check_password(password, encoded) +.. function:: check_password(password, encoded, setter=None, preferred="default") If you'd like to manually authenticate a user by comparing a plain-text password to the hashed password in the database, use the convenience - function :func:`check_password`. It takes two arguments: the plain-text - password to check, and the full value of a user's ``password`` field in the - database to check against, and returns ``True`` if they match, ``False`` - otherwise. + function :func:`check_password`. It takes two mandatory arguments: the + plain-text password to check, and the full value of a user's ``password`` + field in the database to check against. It returns ``True`` if they match, + ``False`` otherwise. Optionally, you can pass a callable ``setter`` that + takes the password and will be called when you need to regenerate it. You + can also pass ``preferred`` to change a hashing algorithm if you don't want + to use the default (first entry of ``PASSWORD_HASHERS`` setting). See + :ref:`auth-included-hashers` for the algorithm name of each hasher. .. function:: make_password(password, salt=None, hasher='default') diff --git a/docs/topics/cache.txt b/docs/topics/cache.txt index 34034c5541..6db9950c04 100644 --- a/docs/topics/cache.txt +++ b/docs/topics/cache.txt @@ -1351,7 +1351,7 @@ its first argument and a list/tuple of case-insensitive header names as its second argument. For more on Vary headers, see the :rfc:`official Vary spec -<7231#section-7.1.4>`. +<9110#section-12.5.5>`. Controlling cache: Using other headers ====================================== @@ -1402,10 +1402,10 @@ cache control header (it is internally called by the return response -You can control downstream caches in other ways as well (see :rfc:`7234` for +You can control downstream caches in other ways as well (see :rfc:`9111` for details on HTTP caching). For example, even if you don't use Django's server-side cache framework, you can still tell clients to cache a view for a -certain amount of time with the :rfc:`max-age <7234#section-5.2.2.8>` +certain amount of time with the :rfc:`max-age <9111#section-5.2.2.1>` directive:: from django.views.decorators.cache import cache_control diff --git a/docs/topics/conditional-view-processing.txt b/docs/topics/conditional-view-processing.txt index ed2c7f8363..ddf8170c77 100644 --- a/docs/topics/conditional-view-processing.txt +++ b/docs/topics/conditional-view-processing.txt @@ -15,16 +15,16 @@ or you can rely on the :class:`~django.middleware.http.ConditionalGetMiddleware` middleware to set the ``ETag`` header. When the client next requests the same resource, it might send along a header -such as either :rfc:`If-modified-since <7232#section-3.3>` or -:rfc:`If-unmodified-since <7232#section-3.4>`, containing the date of the last -modification time it was sent, or either :rfc:`If-match <7232#section-3.1>` or -:rfc:`If-none-match <7232#section-3.2>`, containing the last ``ETag`` it was -sent. If the current version of the page matches the ``ETag`` sent by the -client, or if the resource has not been modified, a 304 status code can be sent -back, instead of a full response, telling the client that nothing has changed. -Depending on the header, if the page has been modified or does not match the -``ETag`` sent by the client, a 412 status code (Precondition Failed) may be -returned. +such as either :rfc:`If-Modified-Since <9110#section-13.1.3>` or +:rfc:`If-Unmodified-Since <9110#section-13.1.4>`, containing the date of the +last modification time it was sent, or either :rfc:`If-Match +<9110#section-13.1.1>` or :rfc:`If-None-Match <9110#section-13.1.2>`, +containing the last ``ETag`` it was sent. If the current version of the page +matches the ``ETag`` sent by the client, or if the resource has not been +modified, a 304 status code can be sent back, instead of a full response, +telling the client that nothing has changed. Depending on the header, if the +page has been modified or does not match the ``ETag`` sent by the client, a 412 +status code (Precondition Failed) may be returned. When you need more fine-grained control you may use per-view conditional processing functions. @@ -35,7 +35,7 @@ The ``condition`` decorator =========================== Sometimes (in fact, quite often) you can create functions to rapidly compute -the :rfc:`ETag <7232#section-2.3>` value or the last-modified time for a +the :rfc:`ETag <9110#section-8.8.3>` value or the last-modified time for a resource, **without** needing to do all the computations needed to construct the full view. Django can then use these functions to provide an "early bailout" option for the view processing. Telling the client that the @@ -58,7 +58,7 @@ order, as the view function they are helping to wrap. The function passed ``last_modified_func`` should return a standard datetime value specifying the last time the resource was modified, or ``None`` if the resource doesn't exist. The function passed to the ``etag`` decorator should return a string -representing the :rfc:`ETag <7232#section-2.3>` for the resource, or ``None`` +representing the :rfc:`ETag <9110#section-8.8.3>` for the resource, or ``None`` if it doesn't exist. The decorator sets the ``ETag`` and ``Last-Modified`` headers on the response @@ -105,8 +105,8 @@ for your front page view:: :func:`~django.views.decorators.vary.vary_on_cookie`, :func:`~django.views.decorators.vary.vary_on_headers`, and :func:`~django.views.decorators.cache.cache_control` should come first - because :rfc:`RFC 7232 <7232#section-4.1>` requires that the headers they - set be present on 304 responses. + because :rfc:`RFC 9110 <9110#section-15.4.5>` requires that the headers + they set be present on 304 responses. Shortcuts for only computing one value ====================================== @@ -194,7 +194,7 @@ every time. The ``condition`` decorator only sets validator headers (``ETag`` and ``Last-Modified``) for safe HTTP methods, i.e. ``GET`` and ``HEAD``. If you wish to return them in other cases, set them in your view. See - :rfc:`7231#section-4.3.4` to learn about the distinction between setting a + :rfc:`9110#section-9.3.4` to learn about the distinction between setting a validator header in response to requests made with ``PUT`` versus ``POST``. Comparison with middleware conditional processing diff --git a/docs/topics/db/examples/many_to_one.txt b/docs/topics/db/examples/many_to_one.txt index e91f7e8a11..20e489397c 100644 --- a/docs/topics/db/examples/many_to_one.txt +++ b/docs/topics/db/examples/many_to_one.txt @@ -15,7 +15,7 @@ objects, but an ``Article`` can only have one ``Reporter`` object:: email = models.EmailField() def __str__(self): - return "%s %s" % (self.first_name, self.last_name) + return f"{self.first_name} {self.last_name}" class Article(models.Model): headline = models.CharField(max_length=100) diff --git a/docs/topics/db/examples/one_to_one.txt b/docs/topics/db/examples/one_to_one.txt index 95be1cf724..a1a238d0f2 100644 --- a/docs/topics/db/examples/one_to_one.txt +++ b/docs/topics/db/examples/one_to_one.txt @@ -14,7 +14,7 @@ In this example, a ``Place`` optionally can be a ``Restaurant``:: address = models.CharField(max_length=80) def __str__(self): - return "%s the place" % self.name + return f"{self.name} the place" class Restaurant(models.Model): place = models.OneToOneField( diff --git a/docs/topics/db/models.txt b/docs/topics/db/models.txt index 825d817ccf..2f8a5551ff 100644 --- a/docs/topics/db/models.txt +++ b/docs/topics/db/models.txt @@ -762,7 +762,7 @@ For example, this model has a few custom methods:: @property def full_name(self): "Returns the person's full name." - return '%s %s' % (self.first_name, self.last_name) + return f'{self.first_name} {self.last_name}' The last method in this example is a :term:`property`. @@ -844,6 +844,33 @@ built-in model methods, adding new arguments. If you use ``*args, **kwargs`` in your method definitions, you are guaranteed that your code will automatically support those arguments when they are added. +If you wish to update a field value in the :meth:`~Model.save` method, you may +also want to have this field added to the ``update_fields`` keyword argument. +This will ensure the field is saved when ``update_fields`` is specified. For +example:: + + from django.db import models + from django.utils.text import slugify + + class Blog(models.Model): + name = models.CharField(max_length=100) + slug = models.TextField() + + def save( + self, force_insert=False, force_update=False, using=None, update_fields=None + ): + self.slug = slugify(self.name) + if update_fields is not None and "name" in update_fields: + update_fields = {"slug"}.union(update_fields) + super().save( + force_insert=force_insert, + force_update=force_update, + using=using, + update_fields=update_fields, + ) + +See :ref:`ref-models-update-fields` for more details. + .. admonition:: Overridden model methods are not called on bulk operations Note that the :meth:`~Model.delete()` method for an object is not diff --git a/docs/topics/db/queries.txt b/docs/topics/db/queries.txt index 5114efb57d..977e287c53 100644 --- a/docs/topics/db/queries.txt +++ b/docs/topics/db/queries.txt @@ -971,7 +971,7 @@ Storing and querying for ``None`` As with other fields, storing ``None`` as the field's value will store it as SQL ``NULL``. While not recommended, it is possible to store JSON scalar -``null`` instead of SQL ``NULL`` by using :class:`Value('null') +``null`` instead of SQL ``NULL`` by using :class:`Value(None, JSONField()) `. Whichever of the values is stored, when retrieved from the database, the Python @@ -987,11 +987,13 @@ query for SQL ``NULL``, use :lookup:`isnull`:: >>> Dog.objects.create(name='Max', data=None) # SQL NULL. - >>> Dog.objects.create(name='Archie', data=Value('null')) # JSON null. + >>> Dog.objects.create( + ... name='Archie', data=Value(None, JSONField()) # JSON null. + ... ) >>> Dog.objects.filter(data=None) ]> - >>> Dog.objects.filter(data=Value('null')) + >>> Dog.objects.filter(data=Value(None, JSONField()) ]> >>> Dog.objects.filter(data__isnull=True) ]> @@ -1007,6 +1009,15 @@ Unless you are sure you wish to work with SQL ``NULL`` values, consider setting Storing JSON scalar ``null`` does not violate :attr:`null=False `. +.. versionchanged:: 4.2 + + Support for expressing JSON ``null`` using ``Value(None, JSONField())`` was + added. + +.. deprecated:: 4.2 + + Passing ``Value("null")`` to express JSON ``null`` is deprecated. + .. fieldlookup:: jsonfield.key Key, index, and path transforms diff --git a/docs/topics/db/transactions.txt b/docs/topics/db/transactions.txt index 004f8351b8..b41c9fa758 100644 --- a/docs/topics/db/transactions.txt +++ b/docs/topics/db/transactions.txt @@ -290,45 +290,47 @@ Performing actions after commit Sometimes you need to perform an action related to the current database transaction, but only if the transaction successfully commits. Examples might -include a `Celery`_ task, an email notification, or a cache invalidation. +include a background task, an email notification, or a cache invalidation. -.. _Celery: https://pypi.org/project/celery/ - -Django provides the :func:`on_commit` function to register callback functions -that should be executed after a transaction is successfully committed: +:func:`on_commit` allows you to register callbacks that will be executed after +the open transaction is successfully committed: .. function:: on_commit(func, using=None, robust=False) -Pass any function (that takes no arguments) to :func:`on_commit`:: +Pass a function, or any callable, to :func:`on_commit`:: from django.db import transaction - def do_something(): - pass # send a mail, invalidate a cache, fire off a Celery task, etc. + def send_welcome_email(): + ... - transaction.on_commit(do_something) + transaction.on_commit(send_welcome_email) -You can also bind arguments to your function using :func:`functools.partial`:: +Callbacks will not be passed any arguments, but you can bind them with +:func:`functools.partial`:: from functools import partial - transaction.on_commit(partial(some_celery_task.delay, 'arg1')) + for user in users: + transaction.on_commit( + partial(send_invite_email, user=user) + ) -The function you pass in will be called immediately after a hypothetical -database write made where ``on_commit()`` is called would be successfully -committed. +Callbacks are called after the open transaction is successfully committed. If +the transaction is instead rolled back (typically when an unhandled exception +is raised in an :func:`atomic` block), the callback will be discarded, and +never called. -If you call ``on_commit()`` while there isn't an active transaction, the -callback will be executed immediately. +If you call ``on_commit()`` while there isn't an open transaction, +the callback will be executed immediately. -If that hypothetical database write is instead rolled back (typically when an -unhandled exception is raised in an :func:`atomic` block), your function will -be discarded and never called. +It's sometimes useful to register callbacks that can fail. Passing +``robust=True`` allows the next callbacks to be executed even if the current +one throws an exception. All errors derived from Python's ``Exception`` class +are caught and logged to the ``django.db.backends.base`` logger. -It's sometimes useful to register callback functions that can fail. Passing -``robust=True`` allows the next functions to be executed even if the current -function throws an exception. All errors derived from Python's ``Exception`` -class are caught and logged to the ``django.db.backends.base`` logger. +You can use :meth:`.TestCase.captureOnCommitCallbacks` to test callbacks +registered with :func:`on_commit`. .. versionchanged:: 4.2 @@ -390,13 +392,13 @@ Timing of execution Your callbacks are executed *after* a successful commit, so a failure in a callback will not cause the transaction to roll back. They are executed conditionally upon the success of the transaction, but they are not *part* of -the transaction. For the intended use cases (mail notifications, Celery tasks, -etc.), this should be fine. If it's not (if your follow-up action is so +the transaction. For the intended use cases (mail notifications, background +tasks, etc.), this should be fine. If it's not (if your follow-up action is so critical that its failure should mean the failure of the transaction itself), then you don't want to use the :func:`on_commit` hook. Instead, you may want `two-phase commit`_ such as the :ref:`psycopg Two-Phase Commit protocol support -` and the :pep:`optional Two-Phase Commit Extensions in the -Python DB-API specification <249#optional-two-phase-commit-extensions>`. +` and the :pep:`optional Two-Phase Commit Extensions +in the Python DB-API specification <249#optional-two-phase-commit-extensions>`. Callbacks are not run until autocommit is restored on the connection following the commit (because otherwise any queries done in a callback would open an diff --git a/docs/topics/forms/formsets.txt b/docs/topics/forms/formsets.txt index a7a5c0e794..1624c380a1 100644 --- a/docs/topics/forms/formsets.txt +++ b/docs/topics/forms/formsets.txt @@ -748,6 +748,9 @@ argument - the index of the form in the formset. The index is ``None`` for the ... kwargs['custom_kwarg'] = index ... return kwargs + >>> ArticleFormSet = formset_factory(MyArticleForm, formset=BaseArticleFormSet) + >>> formset = ArticleFormSet() + .. _formset-prefix: Customizing a formset's prefix diff --git a/docs/topics/http/middleware.txt b/docs/topics/http/middleware.txt index 29f379889f..f0db49abe5 100644 --- a/docs/topics/http/middleware.txt +++ b/docs/topics/http/middleware.txt @@ -267,6 +267,16 @@ must test for streaming responses and adjust their behavior accordingly:: for chunk in content: yield alter_content(chunk) +:class:`~django.http.StreamingHttpResponse` allows both synchronous and +asynchronous iterators. The wrapping function must match. Check +:attr:`StreamingHttpResponse.is_async +` if your middleware needs to +support both types of iterator. + +.. versionchanged:: 4.2 + + Support for streaming responses with asynchronous iterators was added. + Exception handling ================== @@ -312,7 +322,7 @@ If your middleware has both ``sync_capable = True`` and ``async_capable = True``, then Django will pass it the request without converting it. In this case, you can work out if your middleware will receive async requests by checking if the ``get_response`` object you are passed is a -coroutine function, using ``asyncio.iscoroutinefunction``. +coroutine function, using ``asgiref.sync.iscoroutinefunction``. The ``django.utils.decorators`` module contains :func:`~django.utils.decorators.sync_only_middleware`, @@ -331,13 +341,13 @@ at an additional performance penalty. Here's an example of how to create a middleware function that supports both:: - import asyncio + from asgiref.sync import iscoroutinefunction from django.utils.decorators import sync_and_async_middleware @sync_and_async_middleware def simple_middleware(get_response): # One-time configuration and initialization goes here. - if asyncio.iscoroutinefunction(get_response): + if iscoroutinefunction(get_response): async def middleware(request): # Do something here! response = await get_response(request) diff --git a/docs/topics/http/sessions.txt b/docs/topics/http/sessions.txt index 1c1ad0c34b..4dc6f6af35 100644 --- a/docs/topics/http/sessions.txt +++ b/docs/topics/http/sessions.txt @@ -122,8 +122,8 @@ and the :setting:`SECRET_KEY` setting. .. warning:: - **If the ``SECRET_KEY`` or ``SECRET_KEY_FALLBACKS`` are not kept secret and - you are using the** + **If the** ``SECRET_KEY`` **or** ``SECRET_KEY_FALLBACKS`` **are not kept + secret and you are using the** ``django.contrib.sessions.serializers.PickleSerializer``, **this can lead to arbitrary remote code execution.** diff --git a/docs/topics/i18n/formatting.txt b/docs/topics/i18n/formatting.txt index 671901dcdc..b83c31e936 100644 --- a/docs/topics/i18n/formatting.txt +++ b/docs/topics/i18n/formatting.txt @@ -13,8 +13,8 @@ When it's enabled, two users accessing the same content may see dates, times and numbers formatted in different ways, depending on the formats for their current locale. -The formatting system is disabled by default. To enable it, it's -necessary to set :setting:`USE_L10N = True ` in your settings file. +The formatting system is enabled by default. To disable it, it's +necessary to set :setting:`USE_L10N = False ` in your settings file. .. note:: diff --git a/docs/topics/install.txt b/docs/topics/install.txt index 1590da8906..bbc74bd4e3 100644 --- a/docs/topics/install.txt +++ b/docs/topics/install.txt @@ -79,8 +79,9 @@ databases with Django. In addition to a database backend, you'll need to make sure your Python database bindings are installed. -* If you're using PostgreSQL, you'll need the `psycopg2`_ package. Refer to the - :ref:`PostgreSQL notes ` for further details. +* If you're using PostgreSQL, you'll need the `psycopg`_ or `psycopg2`_ + package. Refer to the :ref:`PostgreSQL notes ` for further + details. * If you're using MySQL or MariaDB, you'll need a :ref:`DB API driver ` like ``mysqlclient``. See :ref:`notes for the MySQL @@ -111,6 +112,7 @@ database queries, Django will need permission to create a test database. .. _PostgreSQL: https://www.postgresql.org/ .. _MariaDB: https://mariadb.org/ .. _MySQL: https://www.mysql.com/ +.. _psycopg: https://www.psycopg.org/psycopg3/ .. _psycopg2: https://www.psycopg.org/ .. _SQLite: https://www.sqlite.org/ .. _cx_Oracle: https://oracle.github.io/python-cx_Oracle/ diff --git a/docs/topics/migrations.txt b/docs/topics/migrations.txt index e67e074886..be3216c21f 100644 --- a/docs/topics/migrations.txt +++ b/docs/topics/migrations.txt @@ -561,7 +561,7 @@ the historical model and iterate over the rows:: # version than this migration expects. We use the historical version. Person = apps.get_model('yourappname', 'Person') for person in Person.objects.all(): - person.name = '%s %s' % (person.first_name, person.last_name) + person.name = f'{person.first_name} {person.last_name}' person.save() class Migration(migrations.Migration): diff --git a/docs/topics/settings.txt b/docs/topics/settings.txt index efc23c6296..5b8265660c 100644 --- a/docs/topics/settings.txt +++ b/docs/topics/settings.txt @@ -231,7 +231,7 @@ Normally, you will not need to override the defaults in this fashion. The Django defaults are sufficiently tame that you can safely use them. Be aware that if you do pass in a new default module, it entirely *replaces* the Django defaults, so you must specify a value for every possible setting that might be -used in that code you are importing. Check in +used in the code you are importing. Check in ``django.conf.settings.global_settings`` for the full list. Either ``configure()`` or :envvar:`DJANGO_SETTINGS_MODULE` is required diff --git a/docs/topics/signing.txt b/docs/topics/signing.txt index 9ae5ca3351..0e88c443cc 100644 --- a/docs/topics/signing.txt +++ b/docs/topics/signing.txt @@ -96,12 +96,12 @@ By default, the ``Signer`` class uses the :setting:`SECRET_KEY` setting to generate signatures. You can use a different secret by passing it to the ``Signer`` constructor:: - >>> signer = Signer('my-other-secret') + >>> signer = Signer(key='my-other-secret') >>> value = signer.sign('My string') >>> value 'My string:EkfQJafvGyiofrdGnuthdxImIJw' -.. class:: Signer(key=None, sep=':', salt=None, algorithm=None, fallback_keys=None) +.. class:: Signer(*, key=None, sep=':', salt=None, algorithm=None, fallback_keys=None) Returns a signer which uses ``key`` to generate signatures and ``sep`` to separate values. ``sep`` cannot be in the :rfc:`URL safe base64 alphabet @@ -115,6 +115,10 @@ generate signatures. You can use a different secret by passing it to the The ``fallback_keys`` argument was added. + .. deprecated:: 4.2 + + Support for passing positional arguments is deprecated. + Using the ``salt`` argument --------------------------- @@ -172,7 +176,7 @@ created within a specified period of time:: >>> signer.unsign(value, max_age=timedelta(seconds=20)) 'hello' -.. class:: TimestampSigner(key=None, sep=':', salt=None, algorithm='sha256') +.. class:: TimestampSigner(*, key=None, sep=':', salt=None, algorithm='sha256') .. method:: sign(value) @@ -195,6 +199,10 @@ created within a specified period of time:: otherwise raises ``SignatureExpired``. The ``max_age`` parameter can accept an integer or a :py:class:`datetime.timedelta` object. + .. deprecated:: 4.2 + + Support for passing positional arguments is deprecated. + .. _signing-complex-data: Protecting complex data structures diff --git a/docs/topics/testing/advanced.txt b/docs/topics/testing/advanced.txt index cdcbc44437..3b5b234481 100644 --- a/docs/topics/testing/advanced.txt +++ b/docs/topics/testing/advanced.txt @@ -32,6 +32,10 @@ restricted subset of the test client API: attributes must be supplied by the test itself if required for the view to function properly. +.. versionchanged:: 4.2 + + The ``headers`` parameter was added. + Example ------- @@ -70,6 +74,8 @@ The following is a unit test using the request factory:: AsyncRequestFactory ------------------- +.. class:: AsyncRequestFactory + ``RequestFactory`` creates WSGI-like requests. If you want to create ASGI-like requests, including having a correct ASGI ``scope``, you can instead use ``django.test.AsyncRequestFactory``. @@ -78,6 +84,13 @@ This class is directly API-compatible with ``RequestFactory``, with the only difference being that it returns ``ASGIRequest`` instances rather than ``WSGIRequest`` instances. All of its methods are still synchronous callables. +Arbitrary keyword arguments in ``defaults`` are added directly into the ASGI +scope. + +.. versionchanged:: 4.2 + + The ``headers`` parameter was added. + Testing class-based views ========================= diff --git a/docs/topics/testing/tools.txt b/docs/topics/testing/tools.txt index 27692d313d..ff34e81b8f 100644 --- a/docs/topics/testing/tools.txt +++ b/docs/topics/testing/tools.txt @@ -34,7 +34,7 @@ short: * Use Django's test client to establish that the correct template is being rendered and that the template is passed the correct context data. -* Use :class:`~django.test.RequestFactory` to test view functions directly, +* Use :class:`~django.test.RequestFactory` to test view functions directly, bypassing the routing and middleware layers. * Use in-browser frameworks like Selenium_ to test *rendered* HTML and the @@ -112,15 +112,27 @@ Making requests Use the ``django.test.Client`` class to make requests. -.. class:: Client(enforce_csrf_checks=False, raise_request_exception=True, json_encoder=DjangoJSONEncoder, **defaults) +.. class:: Client(enforce_csrf_checks=False, raise_request_exception=True, json_encoder=DjangoJSONEncoder, *, headers=None, **defaults) - It requires no arguments at time of construction. However, you can use - keyword arguments to specify some default headers. For example, this will - send a ``User-Agent`` HTTP header in each request:: + A testing HTTP client. Takes several arguments that can customize behavior. - >>> c = Client(HTTP_USER_AGENT='Mozilla/5.0') + ``headers`` allows you to specify default headers that will be sent with + every request. For example, to set a ``User-Agent`` header:: - The values from the ``extra`` keyword arguments passed to + client = Client(headers={"user-agent": "curl/7.79.1"}) + + Arbitrary keyword arguments in ``**defaults`` set WSGI + :pep:`environ variables <3333#environ-variables>`. For example, to set the + script name:: + + client = Client(SCRIPT_NAME="/app/") + + .. note:: + + Keyword arguments starting with a ``HTTP_`` prefix are set as headers, + but the ``headers`` parameter should be preferred for readability. + + The values from the ``headers`` and ``extra`` keyword arguments passed to :meth:`~django.test.Client.get()`, :meth:`~django.test.Client.post()`, etc. have precedence over the defaults passed to the class constructor. @@ -138,7 +150,11 @@ Use the ``django.test.Client`` class to make requests. Once you have a ``Client`` instance, you can call any of the following methods: - .. method:: Client.get(path, data=None, follow=False, secure=False, **extra) + .. versionchanged:: 4.2 + + The ``headers`` parameter was added. + + .. method:: Client.get(path, data=None, follow=False, secure=False, *, headers=None, **extra) Makes a GET request on the provided ``path`` and returns a ``Response`` object, which is documented below. @@ -153,25 +169,23 @@ Use the ``django.test.Client`` class to make requests. /customers/details/?name=fred&age=7 - The ``extra`` keyword arguments parameter can be used to specify - headers to be sent in the request. For example:: + The ``headers`` parameter can be used to specify headers to be sent in + the request. For example:: >>> c = Client() >>> c.get('/customers/details/', {'name': 'fred', 'age': 7}, - ... HTTP_ACCEPT='application/json') + ... headers={'accept': 'application/json'}) ...will send the HTTP header ``HTTP_ACCEPT`` to the details view, which is a good way to test code paths that use the :meth:`django.http.HttpRequest.accepts()` method. - .. admonition:: CGI specification + Arbitrary keyword arguments set WSGI + :pep:`environ variables <3333#environ-variables>`. For example, headers + to set the script name:: - The headers sent via ``**extra`` should follow CGI_ specification. - For example, emulating a different "Host" header as sent in the - HTTP request from the browser to the server should be passed - as ``HTTP_HOST``. - - .. _CGI: https://www.w3.org/CGI/ + >>> c = Client() + >>> c.get("/", SCRIPT_NAME="/app/") If you already have the GET arguments in URL-encoded form, you can use that encoding instead of using the data argument. For example, @@ -197,7 +211,11 @@ Use the ``django.test.Client`` class to make requests. If you set ``secure`` to ``True`` the client will emulate an HTTPS request. - .. method:: Client.post(path, data=None, content_type=MULTIPART_CONTENT, follow=False, secure=False, **extra) + .. versionchanged:: 4.2 + + The ``headers`` parameter was added. + + .. method:: Client.post(path, data=None, content_type=MULTIPART_CONTENT, follow=False, secure=False, *, headers=None, **extra) Makes a POST request on the provided ``path`` and returns a ``Response`` object, which is documented below. @@ -277,7 +295,8 @@ Use the ``django.test.Client`` class to make requests. such as an image, this means you will need to open the file in ``rb`` (read binary) mode. - The ``extra`` argument acts the same as for :meth:`Client.get`. + The ``headers`` and ``extra`` parameters acts the same as for + :meth:`Client.get`. If the URL you request with a POST contains encoded parameters, these parameters will be made available in the request.GET data. For example, @@ -296,14 +315,22 @@ Use the ``django.test.Client`` class to make requests. If you set ``secure`` to ``True`` the client will emulate an HTTPS request. - .. method:: Client.head(path, data=None, follow=False, secure=False, **extra) + .. versionchanged:: 4.2 + + The ``headers`` parameter was added. + + .. method:: Client.head(path, data=None, follow=False, secure=False, *, headers=None, **extra) Makes a HEAD request on the provided ``path`` and returns a ``Response`` object. This method works just like :meth:`Client.get`, - including the ``follow``, ``secure`` and ``extra`` arguments, except - it does not return a message body. + including the ``follow``, ``secure``, ``headers``, and ``extra`` + parameters, except it does not return a message body. - .. method:: Client.options(path, data='', content_type='application/octet-stream', follow=False, secure=False, **extra) + .. versionchanged:: 4.2 + + The ``headers`` parameter was added. + + .. method:: Client.options(path, data='', content_type='application/octet-stream', follow=False, secure=False, *, headers=None, **extra) Makes an OPTIONS request on the provided ``path`` and returns a ``Response`` object. Useful for testing RESTful interfaces. @@ -311,10 +338,14 @@ Use the ``django.test.Client`` class to make requests. When ``data`` is provided, it is used as the request body, and a ``Content-Type`` header is set to ``content_type``. - The ``follow``, ``secure`` and ``extra`` arguments act the same as for - :meth:`Client.get`. + The ``follow``, ``secure``, ``headers``, and ``extra`` parameters act + the same as for :meth:`Client.get`. - .. method:: Client.put(path, data='', content_type='application/octet-stream', follow=False, secure=False, **extra) + .. versionchanged:: 4.2 + + The ``headers`` parameter was added. + + .. method:: Client.put(path, data='', content_type='application/octet-stream', follow=False, secure=False, *, headers=None, **extra) Makes a PUT request on the provided ``path`` and returns a ``Response`` object. Useful for testing RESTful interfaces. @@ -322,18 +353,26 @@ Use the ``django.test.Client`` class to make requests. When ``data`` is provided, it is used as the request body, and a ``Content-Type`` header is set to ``content_type``. - The ``follow``, ``secure`` and ``extra`` arguments act the same as for - :meth:`Client.get`. + The ``follow``, ``secure``, ``headers``, and ``extra`` parameters act + the same as for :meth:`Client.get`. - .. method:: Client.patch(path, data='', content_type='application/octet-stream', follow=False, secure=False, **extra) + .. versionchanged:: 4.2 + + The ``headers`` parameter was added. + + .. method:: Client.patch(path, data='', content_type='application/octet-stream', follow=False, secure=False, *, headers=None, **extra) Makes a PATCH request on the provided ``path`` and returns a ``Response`` object. Useful for testing RESTful interfaces. - The ``follow``, ``secure`` and ``extra`` arguments act the same as for - :meth:`Client.get`. + The ``follow``, ``secure``, ``headers``, and ``extra`` parameters act + the same as for :meth:`Client.get`. - .. method:: Client.delete(path, data='', content_type='application/octet-stream', follow=False, secure=False, **extra) + .. versionchanged:: 4.2 + + The ``headers`` parameter was added. + + .. method:: Client.delete(path, data='', content_type='application/octet-stream', follow=False, secure=False, *, headers=None, **extra) Makes a DELETE request on the provided ``path`` and returns a ``Response`` object. Useful for testing RESTful interfaces. @@ -341,20 +380,28 @@ Use the ``django.test.Client`` class to make requests. When ``data`` is provided, it is used as the request body, and a ``Content-Type`` header is set to ``content_type``. - The ``follow``, ``secure`` and ``extra`` arguments act the same as for - :meth:`Client.get`. + The ``follow``, ``secure``, ``headers``, and ``extra`` parameters act + the same as for :meth:`Client.get`. - .. method:: Client.trace(path, follow=False, secure=False, **extra) + .. versionchanged:: 4.2 + + The ``headers`` parameter was added. + + .. method:: Client.trace(path, follow=False, secure=False, *, headers=None, **extra) Makes a TRACE request on the provided ``path`` and returns a ``Response`` object. Useful for simulating diagnostic probes. Unlike the other request methods, ``data`` is not provided as a keyword - parameter in order to comply with :rfc:`7231#section-4.3.8`, which + parameter in order to comply with :rfc:`9110#section-9.3.8`, which mandates that TRACE requests must not have a body. - The ``follow``, ``secure``, and ``extra`` arguments act the same as for - :meth:`Client.get`. + The ``follow``, ``secure``, ``headers``, and ``extra`` parameters act + the same as for :meth:`Client.get`. + + .. versionchanged:: 4.2 + + The ``headers`` parameter was added. .. method:: Client.login(**credentials) @@ -969,7 +1016,7 @@ The code for this test may look as follows:: super().tearDownClass() def test_login(self): - self.selenium.get('%s%s' % (self.live_server_url, '/login/')) + self.selenium.get(f'{self.live_server_url}/login/') username_input = self.selenium.find_element(By.NAME, "username") username_input.send_keys('myuser') password_input = self.selenium.find_element(By.NAME, "password") @@ -1905,9 +1952,13 @@ If you are testing from an asynchronous function, you must also use the asynchronous test client. This is available as ``django.test.AsyncClient``, or as ``self.async_client`` on any test. +.. class:: AsyncClient(enforce_csrf_checks=False, raise_request_exception=True, *, headers=None, **defaults) + ``AsyncClient`` has the same methods and signatures as the synchronous (normal) test client, with two exceptions: +* In the initialization, arbitrary keyword arguments in ``defaults`` are added + directly into the ASGI scope. * The ``follow`` parameter is not supported. * Headers passed as ``extra`` keyword arguments should not have the ``HTTP_`` prefix required by the synchronous client (see :meth:`Client.get`). For @@ -1920,6 +1971,10 @@ test client, with two exceptions: ... ACCEPT='application/json' ... ) +.. versionchanged:: 4.2 + + The ``headers`` parameter was added. + Using ``AsyncClient`` any method that makes a request must be awaited:: async def test_my_thing(self): diff --git a/js_tests/gis/mapwidget.test.js b/js_tests/gis/mapwidget.test.js index 8991738c6c..e0cc617a1e 100644 --- a/js_tests/gis/mapwidget.test.js +++ b/js_tests/gis/mapwidget.test.js @@ -33,7 +33,7 @@ QUnit.test('MapWidget.defaultCenter', function(assert) { '6.81,47.08', 'Default center at 6.81, 47.08' ); - assert.equal(widget.map.getView().getZoom(), 17); + assert.equal(Math.round(widget.map.getView().getZoom()), 17); }); QUnit.test('MapWidget.interactions', function(assert) { diff --git a/js_tests/tests.html b/js_tests/tests.html index 61bc4ac102..c3c1055c9a 100644 --- a/js_tests/tests.html +++ b/js_tests/tests.html @@ -157,7 +157,7 @@ - + diff --git a/package.json b/package.json index 2428f298f0..26dce69fe6 100644 --- a/package.json +++ b/package.json @@ -9,11 +9,11 @@ "npm": ">=1.3.0 <3.0.0" }, "devDependencies": { - "eslint": "^8.24.0", - "puppeteer": "^14.1.1", + "eslint": "^8.29.0", + "puppeteer": "^18.1.0", "grunt": "^1.5.3", "grunt-cli": "^1.4.3", - "grunt-contrib-qunit": "^6.1.0", - "qunit": "^2.19.1" + "grunt-contrib-qunit": "^6.2.1", + "qunit": "^2.19.3" } } diff --git a/setup.cfg b/setup.cfg index cc511c96e6..afef79c2ab 100644 --- a/setup.cfg +++ b/setup.cfg @@ -39,7 +39,7 @@ packages = find: include_package_data = true zip_safe = false install_requires = - asgiref >= 3.5.2 + asgiref >= 3.6.0 backports.zoneinfo; python_version<"3.9" sqlparse >= 0.2.2 tzdata; sys_platform == 'win32' diff --git a/tests/admin_docs/test_views.py b/tests/admin_docs/test_views.py index d85f401855..29bbe40310 100644 --- a/tests/admin_docs/test_views.py +++ b/tests/admin_docs/test_views.py @@ -447,6 +447,16 @@ class TestFieldType(unittest.TestCase): "Boolean (Either True or False)", ) + def test_char_fields(self): + self.assertEqual( + views.get_readable_field_data_type(fields.CharField(max_length=255)), + "String (up to 255)", + ) + self.assertEqual( + views.get_readable_field_data_type(fields.CharField()), + "String (unlimited)", + ) + def test_custom_fields(self): self.assertEqual( views.get_readable_field_data_type(CustomField()), "A custom field type" diff --git a/tests/admin_views/tests.py b/tests/admin_views/tests.py index 9796703ad1..4d1506d257 100644 --- a/tests/admin_views/tests.py +++ b/tests/admin_views/tests.py @@ -3371,8 +3371,8 @@ class AdminViewDeletedObjectsTest(TestCase): cls.ssh1 = SuperSecretHideout.objects.create( location="super floating castle!", supervillain=cls.sv1 ) - cls.cy1 = CyclicOne.objects.create(name="I am recursive", two_id=1) - cls.cy2 = CyclicTwo.objects.create(name="I am recursive too", one_id=1) + cls.cy1 = CyclicOne.objects.create(pk=1, name="I am recursive", two_id=1) + cls.cy2 = CyclicTwo.objects.create(pk=1, name="I am recursive too", one_id=1) def setUp(self): self.client.force_login(self.superuser) diff --git a/tests/aggregation/test_filter_argument.py b/tests/aggregation/test_filter_argument.py index f55e06dfa0..75835edb0b 100644 --- a/tests/aggregation/test_filter_argument.py +++ b/tests/aggregation/test_filter_argument.py @@ -139,7 +139,7 @@ class FilteredAggregateTests(TestCase): self.assertEqual(qs.get(pk__in=qs.values("pk")), self.a1) def test_filtered_aggregate_ref_annotation(self): - aggs = Author.objects.annotate(double_age=F("age") * 2,).aggregate( + aggs = Author.objects.annotate(double_age=F("age") * 2).aggregate( cnt=Count("pk", filter=Q(double_age__gt=100)), ) self.assertEqual(aggs["cnt"], 2) @@ -192,3 +192,29 @@ class FilteredAggregateTests(TestCase): ), ) self.assertEqual(aggregate, {"max_rating": 4.5}) + + def test_filtered_aggregate_empty_condition(self): + book = Book.objects.annotate( + authors_count=Count( + "authors", + filter=Q(authors__in=[]), + ), + ).get(pk=self.b1.pk) + self.assertEqual(book.authors_count, 0) + aggregate = Book.objects.aggregate( + max_rating=Max("rating", filter=Q(rating__in=[])) + ) + self.assertEqual(aggregate, {"max_rating": None}) + + def test_filtered_aggregate_full_condition(self): + book = Book.objects.annotate( + authors_count=Count( + "authors", + filter=~Q(authors__in=[]), + ), + ).get(pk=self.b1.pk) + self.assertEqual(book.authors_count, 2) + aggregate = Book.objects.aggregate( + max_rating=Max("rating", filter=~Q(rating__in=[])) + ) + self.assertEqual(aggregate, {"max_rating": 4.5}) diff --git a/tests/aggregation/tests.py b/tests/aggregation/tests.py index 39c8a45707..67a57c162c 100644 --- a/tests/aggregation/tests.py +++ b/tests/aggregation/tests.py @@ -34,6 +34,7 @@ from django.db.models.functions import ( Cast, Coalesce, Greatest, + Lower, Now, Pi, TruncDate, @@ -1257,11 +1258,11 @@ class AggregateTestCase(TestCase): self.assertEqual(author.sum_age, other_author.sum_age) def test_aggregate_over_aggregate(self): - msg = "Cannot compute Avg('age'): 'age' is an aggregate" + msg = "Cannot resolve keyword 'age_agg' into field." with self.assertRaisesMessage(FieldError, msg): - Author.objects.annotate(age_alias=F("age"),).aggregate( - age=Sum(F("age")), - avg_age=Avg(F("age")), + Author.objects.aggregate( + age_agg=Sum(F("age")), + avg_age=Avg(F("age_agg")), ) def test_annotated_aggregate_over_annotated_aggregate(self): @@ -2084,3 +2085,78 @@ class AggregateTestCase(TestCase): exists=Exists(Author.objects.extra(where=["1=0"])), ) self.assertEqual(len(qs), 6) + + def test_aggregation_over_annotation_shared_alias(self): + self.assertEqual( + Publisher.objects.annotate(agg=Count("book__authors")).aggregate( + agg=Count("agg"), + ), + {"agg": 5}, + ) + + +class AggregateAnnotationPruningTests(TestCase): + @classmethod + def setUpTestData(cls): + cls.a1 = Author.objects.create(age=1) + cls.a2 = Author.objects.create(age=2) + cls.p1 = Publisher.objects.create(num_awards=1) + cls.p2 = Publisher.objects.create(num_awards=0) + cls.b1 = Book.objects.create( + name="b1", + publisher=cls.p1, + pages=100, + rating=4.5, + price=10, + contact=cls.a1, + pubdate=datetime.date.today(), + ) + cls.b1.authors.add(cls.a1) + cls.b2 = Book.objects.create( + name="b2", + publisher=cls.p2, + pages=1000, + rating=3.2, + price=50, + contact=cls.a2, + pubdate=datetime.date.today(), + ) + cls.b2.authors.add(cls.a1, cls.a2) + + def test_unused_aliased_aggregate_pruned(self): + with CaptureQueriesContext(connection) as ctx: + cnt = Book.objects.alias( + authors_count=Count("authors"), + ).count() + self.assertEqual(cnt, 2) + sql = ctx.captured_queries[0]["sql"].lower() + self.assertEqual(sql.count("select"), 2, "Subquery wrapping required") + self.assertNotIn("authors_count", sql) + + def test_non_aggregate_annotation_pruned(self): + with CaptureQueriesContext(connection) as ctx: + Book.objects.annotate( + name_lower=Lower("name"), + ).count() + sql = ctx.captured_queries[0]["sql"].lower() + self.assertEqual(sql.count("select"), 1, "No subquery wrapping required") + self.assertNotIn("name_lower", sql) + + def test_unreferenced_aggregate_annotation_pruned(self): + with CaptureQueriesContext(connection) as ctx: + cnt = Book.objects.annotate( + authors_count=Count("authors"), + ).count() + self.assertEqual(cnt, 2) + sql = ctx.captured_queries[0]["sql"].lower() + self.assertEqual(sql.count("select"), 2, "Subquery wrapping required") + self.assertNotIn("authors_count", sql) + + def test_referenced_aggregate_annotation_kept(self): + with CaptureQueriesContext(connection) as ctx: + Book.objects.annotate( + authors_count=Count("authors"), + ).aggregate(Avg("authors_count")) + sql = ctx.captured_queries[0]["sql"].lower() + self.assertEqual(sql.count("select"), 2, "Subquery wrapping required") + self.assertEqual(sql.count("authors_count"), 2) diff --git a/tests/aggregation_regress/tests.py b/tests/aggregation_regress/tests.py index d3d06fe076..444a55276d 100644 --- a/tests/aggregation_regress/tests.py +++ b/tests/aggregation_regress/tests.py @@ -23,7 +23,7 @@ from django.db.models import ( Variance, When, ) -from django.test import TestCase, skipUnlessAnyDBFeature, skipUnlessDBFeature +from django.test import TestCase, skipUnlessDBFeature from django.test.utils import Approximate from .models import ( @@ -1420,7 +1420,7 @@ class AggregationTests(TestCase): # The query executes without problems. self.assertEqual(len(qs.exclude(publisher=-1)), 6) - @skipUnlessAnyDBFeature("allows_group_by_pk", "allows_group_by_selected_pks") + @skipUnlessDBFeature("allows_group_by_selected_pks") def test_aggregate_duplicate_columns(self): # Regression test for #17144 @@ -1448,7 +1448,7 @@ class AggregationTests(TestCase): ], ) - @skipUnlessAnyDBFeature("allows_group_by_pk", "allows_group_by_selected_pks") + @skipUnlessDBFeature("allows_group_by_selected_pks") def test_aggregate_duplicate_columns_only(self): # Works with only() too. results = Author.objects.only("id", "name").annotate( @@ -1474,18 +1474,14 @@ class AggregationTests(TestCase): ], ) - @skipUnlessAnyDBFeature("allows_group_by_pk", "allows_group_by_selected_pks") + @skipUnlessDBFeature("allows_group_by_selected_pks") def test_aggregate_duplicate_columns_select_related(self): # And select_related() results = Book.objects.select_related("contact").annotate( num_authors=Count("authors") ) _, _, grouping = results.query.get_compiler(using="default").pre_sql_setup() - # In the case of `group_by_selected_pks` we also group by contact.id - # because of the select_related. - self.assertEqual( - len(grouping), 1 if connection.features.allows_group_by_pk else 2 - ) + self.assertEqual(len(grouping), 2) self.assertIn("id", grouping[0][0]) self.assertNotIn("name", grouping[0][0]) self.assertNotIn("contact", grouping[0][0]) diff --git a/tests/annotations/tests.py b/tests/annotations/tests.py index 472669288c..d05af552b4 100644 --- a/tests/annotations/tests.py +++ b/tests/annotations/tests.py @@ -24,7 +24,15 @@ from django.db.models import ( When, ) from django.db.models.expressions import RawSQL -from django.db.models.functions import Coalesce, ExtractYear, Floor, Length, Lower, Trim +from django.db.models.functions import ( + Cast, + Coalesce, + ExtractYear, + Floor, + Length, + Lower, + Trim, +) from django.test import TestCase, skipUnlessDBFeature from django.test.utils import register_lookup @@ -282,6 +290,13 @@ class NonAggregateAnnotationTestCase(TestCase): self.assertEqual(len(books), Book.objects.count()) self.assertTrue(all(book.selected for book in books)) + def test_full_expression_wrapped_annotation(self): + books = Book.objects.annotate( + selected=Coalesce(~Q(pk__in=[]), True), + ) + self.assertEqual(len(books), Book.objects.count()) + self.assertTrue(all(book.selected for book in books)) + def test_full_expression_annotation_with_aggregation(self): qs = Book.objects.filter(isbn="159059725").annotate( selected=ExpressionWrapper(~Q(pk__in=[]), output_field=BooleanField()), @@ -292,7 +307,7 @@ class NonAggregateAnnotationTestCase(TestCase): def test_aggregate_over_full_expression_annotation(self): qs = Book.objects.annotate( selected=ExpressionWrapper(~Q(pk__in=[]), output_field=BooleanField()), - ).aggregate(Sum("selected")) + ).aggregate(selected__sum=Sum(Cast("selected", IntegerField()))) self.assertEqual(qs["selected__sum"], Book.objects.count()) def test_empty_queryset_annotation(self): @@ -550,21 +565,6 @@ class NonAggregateAnnotationTestCase(TestCase): for publisher in publishers.filter(pk=self.p1.pk): self.assertEqual(publisher["book__rating"], publisher["total"]) - @skipUnlessDBFeature("allows_group_by_pk") - def test_rawsql_group_by_collapse(self): - raw = RawSQL("SELECT MIN(id) FROM annotations_book", []) - qs = ( - Author.objects.values("id") - .annotate( - min_book_id=raw, - count_friends=Count("friends"), - ) - .order_by() - ) - _, _, group_by = qs.query.get_compiler(using="default").pre_sql_setup() - self.assertEqual(len(group_by), 1) - self.assertNotEqual(raw, group_by[0]) - def test_defer_annotation(self): """ Deferred attributes can be referenced by an annotation, diff --git a/tests/asgi/tests.py b/tests/asgi/tests.py index 4e51c2d9fe..61d040b45b 100644 --- a/tests/asgi/tests.py +++ b/tests/asgi/tests.py @@ -12,6 +12,7 @@ from django.db import close_old_connections from django.test import ( AsyncRequestFactory, SimpleTestCase, + ignore_warnings, modify_settings, override_settings, ) @@ -58,6 +59,13 @@ class ASGITest(SimpleTestCase): # Allow response.close() to finish. await communicator.wait() + # Python's file API is not async compatible. A third-party library such + # as https://github.com/Tinche/aiofiles allows passing the file to + # FileResponse as an async interator. With a sync iterator + # StreamingHTTPResponse triggers a warning when iterating the file. + # assertWarnsMessage is not async compatible, so ignore_warnings for the + # test. + @ignore_warnings(module="django.http.response") async def test_file_response(self): """ Makes sure that FileResponse works over ASGI. @@ -91,6 +99,8 @@ class ASGITest(SimpleTestCase): self.assertEqual(value, b"text/plain") else: raise + + # Warning ignored here. response_body = await communicator.receive_output() self.assertEqual(response_body["type"], "http.response.body") self.assertEqual(response_body["body"], test_file_contents) @@ -106,6 +116,7 @@ class ASGITest(SimpleTestCase): "django.contrib.staticfiles.finders.FileSystemFinder", ], ) + @ignore_warnings(module="django.http.response") async def test_static_file_response(self): application = ASGIStaticFilesHandler(get_asgi_application()) # Construct HTTP request. diff --git a/tests/async/models.py b/tests/async/models.py index 8cb051258c..a09ff79914 100644 --- a/tests/async/models.py +++ b/tests/async/models.py @@ -9,3 +9,7 @@ class RelatedModel(models.Model): class SimpleModel(models.Model): field = models.IntegerField() created = models.DateTimeField(default=timezone.now) + + +class ManyToManyModel(models.Model): + simples = models.ManyToManyField("SimpleModel") diff --git a/tests/async/test_async_related_managers.py b/tests/async/test_async_related_managers.py new file mode 100644 index 0000000000..c475b54899 --- /dev/null +++ b/tests/async/test_async_related_managers.py @@ -0,0 +1,106 @@ +from django.test import TestCase + +from .models import ManyToManyModel, RelatedModel, SimpleModel + + +class AsyncRelatedManagersOperationTest(TestCase): + @classmethod + def setUpTestData(cls): + cls.mtm1 = ManyToManyModel.objects.create() + cls.s1 = SimpleModel.objects.create(field=0) + cls.mtm2 = ManyToManyModel.objects.create() + cls.mtm2.simples.set([cls.s1]) + + async def test_acreate(self): + await self.mtm1.simples.acreate(field=2) + new_simple = await self.mtm1.simples.aget() + self.assertEqual(new_simple.field, 2) + + async def test_acreate_reverse(self): + await self.s1.relatedmodel_set.acreate() + new_relatedmodel = await self.s1.relatedmodel_set.aget() + self.assertEqual(new_relatedmodel.simple, self.s1) + + async def test_aget_or_create(self): + new_simple, created = await self.mtm1.simples.aget_or_create(field=2) + self.assertIs(created, True) + self.assertEqual(await self.mtm1.simples.acount(), 1) + self.assertEqual(new_simple.field, 2) + new_simple, created = await self.mtm1.simples.aget_or_create( + id=new_simple.id, through_defaults={"field": 3} + ) + self.assertIs(created, False) + self.assertEqual(await self.mtm1.simples.acount(), 1) + self.assertEqual(new_simple.field, 2) + + async def test_aget_or_create_reverse(self): + new_relatedmodel, created = await self.s1.relatedmodel_set.aget_or_create() + self.assertIs(created, True) + self.assertEqual(await self.s1.relatedmodel_set.acount(), 1) + self.assertEqual(new_relatedmodel.simple, self.s1) + + async def test_aupdate_or_create(self): + new_simple, created = await self.mtm1.simples.aupdate_or_create(field=2) + self.assertIs(created, True) + self.assertEqual(await self.mtm1.simples.acount(), 1) + self.assertEqual(new_simple.field, 2) + new_simple, created = await self.mtm1.simples.aupdate_or_create( + id=new_simple.id, defaults={"field": 3} + ) + self.assertIs(created, False) + self.assertEqual(await self.mtm1.simples.acount(), 1) + self.assertEqual(new_simple.field, 3) + + async def test_aupdate_or_create_reverse(self): + new_relatedmodel, created = await self.s1.relatedmodel_set.aupdate_or_create() + self.assertIs(created, True) + self.assertEqual(await self.s1.relatedmodel_set.acount(), 1) + self.assertEqual(new_relatedmodel.simple, self.s1) + + async def test_aadd(self): + await self.mtm1.simples.aadd(self.s1) + self.assertEqual(await self.mtm1.simples.aget(), self.s1) + + async def test_aadd_reverse(self): + r1 = await RelatedModel.objects.acreate() + await self.s1.relatedmodel_set.aadd(r1, bulk=False) + self.assertEqual(await self.s1.relatedmodel_set.aget(), r1) + + async def test_aremove(self): + self.assertEqual(await self.mtm2.simples.acount(), 1) + await self.mtm2.simples.aremove(self.s1) + self.assertEqual(await self.mtm2.simples.acount(), 0) + + async def test_aremove_reverse(self): + r1 = await RelatedModel.objects.acreate(simple=self.s1) + self.assertEqual(await self.s1.relatedmodel_set.acount(), 1) + await self.s1.relatedmodel_set.aremove(r1) + self.assertEqual(await self.s1.relatedmodel_set.acount(), 0) + + async def test_aset(self): + await self.mtm1.simples.aset([self.s1]) + self.assertEqual(await self.mtm1.simples.aget(), self.s1) + await self.mtm1.simples.aset([]) + self.assertEqual(await self.mtm1.simples.acount(), 0) + await self.mtm1.simples.aset([self.s1], clear=True) + self.assertEqual(await self.mtm1.simples.aget(), self.s1) + + async def test_aset_reverse(self): + r1 = await RelatedModel.objects.acreate() + await self.s1.relatedmodel_set.aset([r1]) + self.assertEqual(await self.s1.relatedmodel_set.aget(), r1) + await self.s1.relatedmodel_set.aset([]) + self.assertEqual(await self.s1.relatedmodel_set.acount(), 0) + await self.s1.relatedmodel_set.aset([r1], bulk=False, clear=True) + self.assertEqual(await self.s1.relatedmodel_set.aget(), r1) + + async def test_aclear(self): + self.assertEqual(await self.mtm2.simples.acount(), 1) + await self.mtm2.simples.aclear() + self.assertEqual(await self.mtm2.simples.acount(), 0) + + async def test_aclear_reverse(self): + await RelatedModel.objects.acreate(simple=self.s1) + self.assertEqual(await self.s1.relatedmodel_set.acount(), 1) + await self.s1.relatedmodel_set.aclear(bulk=False) + self.assertEqual(await self.s1.relatedmodel_set.acount(), 0) diff --git a/tests/async/tests.py b/tests/async/tests.py index 559f21b8b1..6ca5c989b0 100644 --- a/tests/async/tests.py +++ b/tests/async/tests.py @@ -2,7 +2,7 @@ import asyncio import os from unittest import mock -from asgiref.sync import async_to_sync +from asgiref.sync import async_to_sync, iscoroutinefunction from django.core.cache import DEFAULT_CACHE_ALIAS, caches from django.core.exceptions import ImproperlyConfigured, SynchronousOnlyOperation @@ -84,7 +84,7 @@ class ViewTests(SimpleTestCase): with self.subTest(view_cls=view_cls, is_async=is_async): self.assertIs(view_cls.view_is_async, is_async) callback = view_cls.as_view() - self.assertIs(asyncio.iscoroutinefunction(callback), is_async) + self.assertIs(iscoroutinefunction(callback), is_async) def test_mixed_views_raise_error(self): class MixedView(View): diff --git a/tests/auth_tests/test_forms.py b/tests/auth_tests/test_forms.py index 78078316e8..c3ce1f570f 100644 --- a/tests/auth_tests/test_forms.py +++ b/tests/auth_tests/test_forms.py @@ -6,6 +6,7 @@ from unittest import mock from django.contrib.auth.forms import ( AdminPasswordChangeForm, AuthenticationForm, + BaseUserCreationForm, PasswordChangeForm, PasswordResetForm, ReadOnlyPasswordHashField, @@ -35,6 +36,7 @@ from .models.custom_user import ( ) from .models.with_custom_email_field import CustomEmailField from .models.with_integer_username import IntegerUsernameUser +from .models.with_many_to_many import CustomUserWithM2M, Organization from .settings import AUTH_TEMPLATES @@ -53,14 +55,14 @@ class TestDataMixin: cls.u6 = User.objects.create(username="unknown_password", password="foo$bar") -class UserCreationFormTest(TestDataMixin, TestCase): +class BaseUserCreationFormTest(TestDataMixin, TestCase): def test_user_already_exists(self): data = { "username": "testclient", "password1": "test123", "password2": "test123", } - form = UserCreationForm(data) + form = BaseUserCreationForm(data) self.assertFalse(form.is_valid()) self.assertEqual( form["username"].errors, @@ -73,7 +75,7 @@ class UserCreationFormTest(TestDataMixin, TestCase): "password1": "test123", "password2": "test123", } - form = UserCreationForm(data) + form = BaseUserCreationForm(data) self.assertFalse(form.is_valid()) validator = next( v @@ -89,7 +91,7 @@ class UserCreationFormTest(TestDataMixin, TestCase): "password1": "test123", "password2": "test", } - form = UserCreationForm(data) + form = BaseUserCreationForm(data) self.assertFalse(form.is_valid()) self.assertEqual( form["password2"].errors, [str(form.error_messages["password_mismatch"])] @@ -98,7 +100,7 @@ class UserCreationFormTest(TestDataMixin, TestCase): def test_both_passwords(self): # One (or both) passwords weren't given data = {"username": "jsmith"} - form = UserCreationForm(data) + form = BaseUserCreationForm(data) required_error = [str(Field.default_error_messages["required"])] self.assertFalse(form.is_valid()) self.assertEqual(form["password1"].errors, required_error) @@ -118,7 +120,7 @@ class UserCreationFormTest(TestDataMixin, TestCase): "password1": "test123", "password2": "test123", } - form = UserCreationForm(data) + form = BaseUserCreationForm(data) self.assertTrue(form.is_valid()) form.save(commit=False) self.assertEqual(password_changed.call_count, 0) @@ -132,7 +134,7 @@ class UserCreationFormTest(TestDataMixin, TestCase): "password1": "test123", "password2": "test123", } - form = UserCreationForm(data) + form = BaseUserCreationForm(data) self.assertTrue(form.is_valid()) u = form.save() self.assertEqual(u.username, "宝") @@ -146,7 +148,7 @@ class UserCreationFormTest(TestDataMixin, TestCase): "password1": "pwd2", "password2": "pwd2", } - form = UserCreationForm(data) + form = BaseUserCreationForm(data) self.assertTrue(form.is_valid()) user = form.save() self.assertNotEqual(user.username, ohm_username) @@ -167,7 +169,7 @@ class UserCreationFormTest(TestDataMixin, TestCase): "password1": "pwd2", "password2": "pwd2", } - form = UserCreationForm(data) + form = BaseUserCreationForm(data) self.assertFalse(form.is_valid()) self.assertEqual( form.errors["username"], ["A user with that username already exists."] @@ -197,7 +199,7 @@ class UserCreationFormTest(TestDataMixin, TestCase): "password1": "testclient", "password2": "testclient", } - form = UserCreationForm(data) + form = BaseUserCreationForm(data) self.assertFalse(form.is_valid()) self.assertEqual(len(form["password2"].errors), 2) self.assertIn( @@ -209,8 +211,8 @@ class UserCreationFormTest(TestDataMixin, TestCase): ) def test_custom_form(self): - class CustomUserCreationForm(UserCreationForm): - class Meta(UserCreationForm.Meta): + class CustomUserCreationForm(BaseUserCreationForm): + class Meta(BaseUserCreationForm.Meta): model = ExtensionUser fields = UserCreationForm.Meta.fields + ("date_of_birth",) @@ -224,8 +226,8 @@ class UserCreationFormTest(TestDataMixin, TestCase): self.assertTrue(form.is_valid()) def test_custom_form_with_different_username_field(self): - class CustomUserCreationForm(UserCreationForm): - class Meta(UserCreationForm.Meta): + class CustomUserCreationForm(BaseUserCreationForm): + class Meta(BaseUserCreationForm.Meta): model = CustomUser fields = ("email", "date_of_birth") @@ -239,8 +241,8 @@ class UserCreationFormTest(TestDataMixin, TestCase): self.assertTrue(form.is_valid()) def test_custom_form_hidden_username_field(self): - class CustomUserCreationForm(UserCreationForm): - class Meta(UserCreationForm.Meta): + class CustomUserCreationForm(BaseUserCreationForm): + class Meta(BaseUserCreationForm.Meta): model = CustomUserWithoutIsActiveField fields = ("email",) # without USERNAME_FIELD @@ -252,13 +254,32 @@ class UserCreationFormTest(TestDataMixin, TestCase): form = CustomUserCreationForm(data) self.assertTrue(form.is_valid()) + def test_custom_form_saves_many_to_many_field(self): + class CustomUserCreationForm(BaseUserCreationForm): + class Meta(BaseUserCreationForm.Meta): + model = CustomUserWithM2M + fields = UserCreationForm.Meta.fields + ("orgs",) + + organization = Organization.objects.create(name="organization 1") + + data = { + "username": "testclient@example.com", + "password1": "testclient", + "password2": "testclient", + "orgs": [str(organization.pk)], + } + form = CustomUserCreationForm(data) + self.assertIs(form.is_valid(), True) + user = form.save(commit=True) + self.assertSequenceEqual(user.orgs.all(), [organization]) + def test_password_whitespace_not_stripped(self): data = { "username": "testuser", "password1": " testpassword ", "password2": " testpassword ", } - form = UserCreationForm(data) + form = BaseUserCreationForm(data) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data["password1"], data["password1"]) self.assertEqual(form.cleaned_data["password2"], data["password2"]) @@ -274,7 +295,7 @@ class UserCreationFormTest(TestDataMixin, TestCase): ] ) def test_password_help_text(self): - form = UserCreationForm() + form = BaseUserCreationForm() self.assertEqual( form.fields["password1"].help_text, "
    • " @@ -293,10 +314,12 @@ class UserCreationFormTest(TestDataMixin, TestCase): ] ) def test_user_create_form_validates_password_with_all_data(self): - """UserCreationForm password validation uses all of the form's data.""" + """ + BaseUserCreationForm password validation uses all of the form's data. + """ - class CustomUserCreationForm(UserCreationForm): - class Meta(UserCreationForm.Meta): + class CustomUserCreationForm(BaseUserCreationForm): + class Meta(BaseUserCreationForm.Meta): model = User fields = ("username", "email", "first_name", "last_name") @@ -316,13 +339,13 @@ class UserCreationFormTest(TestDataMixin, TestCase): ) def test_username_field_autocapitalize_none(self): - form = UserCreationForm() + form = BaseUserCreationForm() self.assertEqual( form.fields["username"].widget.attrs.get("autocapitalize"), "none" ) def test_html_autocomplete_attributes(self): - form = UserCreationForm() + form = BaseUserCreationForm() tests = ( ("username", "username"), ("password1", "new-password"), @@ -335,6 +358,21 @@ class UserCreationFormTest(TestDataMixin, TestCase): ) +class UserCreationFormTest(TestDataMixin, TestCase): + def test_case_insensitive_username(self): + data = { + "username": "TeStClIeNt", + "password1": "test123", + "password2": "test123", + } + form = UserCreationForm(data) + self.assertFalse(form.is_valid()) + self.assertEqual( + form["username"].errors, + ["A user with that username already exists."], + ) + + # To verify that the login form rejects inactive users, use an authentication # backend that allows them. @override_settings( diff --git a/tests/auth_tests/test_management.py b/tests/auth_tests/test_management.py index f567fd0dc1..7e0a301238 100644 --- a/tests/auth_tests/test_management.py +++ b/tests/auth_tests/test_management.py @@ -1485,3 +1485,22 @@ class CreatePermissionsTests(TestCase): codename=codename, ).exists() ) + + +class DefaultDBRouter: + """Route all writes to default.""" + + def db_for_write(self, model, **hints): + return "default" + + +@override_settings(DATABASE_ROUTERS=[DefaultDBRouter()]) +class CreatePermissionsMultipleDatabasesTests(TestCase): + databases = {"default", "other"} + + def test_set_permissions_fk_to_using_parameter(self): + Permission.objects.using("other").delete() + with self.assertNumQueries(6, using="other") as captured_queries: + create_permissions(apps.get_app_config("auth"), verbosity=0, using="other") + self.assertIn("INSERT INTO", captured_queries[-1]["sql"].upper()) + self.assertGreater(Permission.objects.using("other").count(), 0) diff --git a/tests/auth_tests/test_models.py b/tests/auth_tests/test_models.py index 01bb7981a4..fe1afcbdc3 100644 --- a/tests/auth_tests/test_models.py +++ b/tests/auth_tests/test_models.py @@ -122,8 +122,8 @@ class UserManagerTestCase(TransactionTestCase): self.assertFalse(user.has_usable_password()) def test_create_user_email_domain_normalize_rfc3696(self): - # According to https://tools.ietf.org/html/rfc3696#section-3 - # the "@" symbol can be part of the local part of an email address + # According to RFC 3696 Section 3 the "@" symbol can be part of the + # local part of an email address. returned = UserManager.normalize_email(r"Abc\@DEF@EXAMPLE.com") self.assertEqual(returned, r"Abc\@DEF@example.com") diff --git a/tests/backends/base/test_base.py b/tests/backends/base/test_base.py index 57d22ce269..ada2cc33c9 100644 --- a/tests/backends/base/test_base.py +++ b/tests/backends/base/test_base.py @@ -1,10 +1,16 @@ from unittest.mock import MagicMock, patch -from django.db import DEFAULT_DB_ALIAS, connection, connections +from django.db import DEFAULT_DB_ALIAS, connection, connections, transaction from django.db.backends.base.base import BaseDatabaseWrapper -from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature +from django.test import ( + SimpleTestCase, + TestCase, + TransactionTestCase, + skipUnlessDBFeature, +) +from django.test.utils import CaptureQueriesContext, override_settings -from ..models import Square +from ..models import Person, Square class DatabaseWrapperTests(SimpleTestCase): @@ -55,6 +61,57 @@ class DatabaseWrapperTests(SimpleTestCase): connection.check_database_version_supported() +class DatabaseWrapperLoggingTests(TransactionTestCase): + available_apps = [] + + @override_settings(DEBUG=True) + def test_commit_debug_log(self): + conn = connections[DEFAULT_DB_ALIAS] + with CaptureQueriesContext(conn): + with self.assertLogs("django.db.backends", "DEBUG") as cm: + with transaction.atomic(): + Person.objects.create(first_name="first", last_name="last") + + self.assertGreaterEqual(len(conn.queries_log), 3) + self.assertEqual(conn.queries_log[-3]["sql"], "BEGIN") + self.assertRegex( + cm.output[0], + r"DEBUG:django.db.backends:\(\d+.\d{3}\) " + rf"BEGIN; args=None; alias={DEFAULT_DB_ALIAS}", + ) + self.assertEqual(conn.queries_log[-1]["sql"], "COMMIT") + self.assertRegex( + cm.output[-1], + r"DEBUG:django.db.backends:\(\d+.\d{3}\) " + rf"COMMIT; args=None; alias={DEFAULT_DB_ALIAS}", + ) + + @override_settings(DEBUG=True) + def test_rollback_debug_log(self): + conn = connections[DEFAULT_DB_ALIAS] + with CaptureQueriesContext(conn): + with self.assertLogs("django.db.backends", "DEBUG") as cm: + with self.assertRaises(Exception), transaction.atomic(): + Person.objects.create(first_name="first", last_name="last") + raise Exception("Force rollback") + + self.assertEqual(conn.queries_log[-1]["sql"], "ROLLBACK") + self.assertRegex( + cm.output[-1], + r"DEBUG:django.db.backends:\(\d+.\d{3}\) " + rf"ROLLBACK; args=None; alias={DEFAULT_DB_ALIAS}", + ) + + def test_no_logs_without_debug(self): + with self.assertNoLogs("django.db.backends", "DEBUG"): + with self.assertRaises(Exception), transaction.atomic(): + Person.objects.create(first_name="first", last_name="last") + raise Exception("Force rollback") + + conn = connections[DEFAULT_DB_ALIAS] + self.assertEqual(len(conn.queries_log), 0) + + class ExecuteWrapperTests(TestCase): @staticmethod def call_execute(connection, params=None): diff --git a/tests/backends/postgresql/test_creation.py b/tests/backends/postgresql/test_creation.py index 319029334d..bc1e7eb1d8 100644 --- a/tests/backends/postgresql/test_creation.py +++ b/tests/backends/postgresql/test_creation.py @@ -9,12 +9,10 @@ from django.db.backends.base.creation import BaseDatabaseCreation from django.test import SimpleTestCase try: - import psycopg2 # NOQA + from django.db.backends.postgresql.psycopg_any import errors except ImportError: pass else: - from psycopg2 import errorcodes - from django.db.backends.postgresql.creation import DatabaseCreation @@ -73,13 +71,13 @@ class DatabaseCreationTests(SimpleTestCase): self.check_sql_table_creation_suffix(settings, None) def _execute_raise_database_already_exists(self, cursor, parameters, keepdb=False): - error = DatabaseError("database %s already exists" % parameters["dbname"]) - error.pgcode = errorcodes.DUPLICATE_DATABASE + error = errors.DuplicateDatabase( + "database %s already exists" % parameters["dbname"] + ) raise DatabaseError() from error def _execute_raise_permission_denied(self, cursor, parameters, keepdb=False): - error = DatabaseError("permission denied to create database") - error.pgcode = errorcodes.INSUFFICIENT_PRIVILEGE + error = errors.InsufficientPrivilege("permission denied to create database") raise DatabaseError() from error def patch_test_db_creation(self, execute_create_test_db): diff --git a/tests/backends/postgresql/tests.py b/tests/backends/postgresql/tests.py index e28acef2c0..7e1a2d000d 100644 --- a/tests/backends/postgresql/tests.py +++ b/tests/backends/postgresql/tests.py @@ -14,6 +14,11 @@ from django.db import ( from django.db.backends.base.base import BaseDatabaseWrapper from django.test import TestCase, override_settings +try: + from django.db.backends.postgresql.psycopg_any import is_psycopg3 +except ImportError: + is_psycopg3 = False + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL tests") class Tests(TestCase): @@ -164,7 +169,7 @@ class Tests(TestCase): settings["NAME"] = None settings["OPTIONS"] = {"service": "django_test"} params = DatabaseWrapper(settings).get_connection_params() - self.assertEqual(params["database"], "postgres") + self.assertEqual(params["dbname"], "postgres") self.assertNotIn("service", params) def test_connect_and_rollback(self): @@ -223,24 +228,40 @@ class Tests(TestCase): The transaction level can be configured with DATABASES ['OPTIONS']['isolation_level']. """ - from psycopg2.extensions import ISOLATION_LEVEL_SERIALIZABLE as serializable + from django.db.backends.postgresql.psycopg_any import IsolationLevel # Since this is a django.test.TestCase, a transaction is in progress # and the isolation level isn't reported as 0. This test assumes that # PostgreSQL is configured with the default isolation level. - # Check the level on the psycopg2 connection, not the Django wrapper. + # Check the level on the psycopg connection, not the Django wrapper. self.assertIsNone(connection.connection.isolation_level) new_connection = connection.copy() - new_connection.settings_dict["OPTIONS"]["isolation_level"] = serializable + new_connection.settings_dict["OPTIONS"][ + "isolation_level" + ] = IsolationLevel.SERIALIZABLE try: # Start a transaction so the isolation level isn't reported as 0. new_connection.set_autocommit(False) - # Check the level on the psycopg2 connection, not the Django wrapper. - self.assertEqual(new_connection.connection.isolation_level, serializable) + # Check the level on the psycopg connection, not the Django wrapper. + self.assertEqual( + new_connection.connection.isolation_level, + IsolationLevel.SERIALIZABLE, + ) finally: new_connection.close() + def test_connect_invalid_isolation_level(self): + self.assertIsNone(connection.connection.isolation_level) + new_connection = connection.copy() + new_connection.settings_dict["OPTIONS"]["isolation_level"] = -1 + msg = ( + "Invalid transaction isolation level -1 specified. Use one of the " + "psycopg.IsolationLevel values." + ) + with self.assertRaisesMessage(ImproperlyConfigured, msg): + new_connection.ensure_connection() + def test_connect_no_is_usable_checks(self): new_connection = connection.copy() try: @@ -252,7 +273,7 @@ class Tests(TestCase): def _select(self, val): with connection.cursor() as cursor: - cursor.execute("SELECT %s", (val,)) + cursor.execute("SELECT %s::text[]", (val,)) return cursor.fetchone()[0] def test_select_ascii_array(self): @@ -291,16 +312,19 @@ class Tests(TestCase): "::citext", do.lookup_cast(lookup, internal_type=field_type) ) - def test_correct_extraction_psycopg2_version(self): - from django.db.backends.postgresql.base import psycopg2_version + def test_correct_extraction_psycopg_version(self): + from django.db.backends.postgresql.base import Database, psycopg_version - with mock.patch("psycopg2.__version__", "4.2.1 (dt dec pq3 ext lo64)"): - self.assertEqual(psycopg2_version(), (4, 2, 1)) - with mock.patch("psycopg2.__version__", "4.2b0.dev1 (dt dec pq3 ext lo64)"): - self.assertEqual(psycopg2_version(), (4, 2)) + with mock.patch.object(Database, "__version__", "4.2.1 (dt dec pq3 ext lo64)"): + self.assertEqual(psycopg_version(), (4, 2, 1)) + with mock.patch.object( + Database, "__version__", "4.2b0.dev1 (dt dec pq3 ext lo64)" + ): + self.assertEqual(psycopg_version(), (4, 2)) @override_settings(DEBUG=True) - def test_copy_cursors(self): + @unittest.skipIf(is_psycopg3, "psycopg2 specific test") + def test_copy_to_expert_cursors(self): out = StringIO() copy_expert_sql = "COPY django_session TO STDOUT (FORMAT CSV, HEADER)" with connection.cursor() as cursor: @@ -311,6 +335,16 @@ class Tests(TestCase): [copy_expert_sql, "COPY django_session TO STDOUT"], ) + @override_settings(DEBUG=True) + @unittest.skipUnless(is_psycopg3, "psycopg3 specific test") + def test_copy_cursors(self): + copy_sql = "COPY django_session TO STDOUT (FORMAT CSV, HEADER)" + with connection.cursor() as cursor: + with cursor.copy(copy_sql) as copy: + for row in copy: + pass + self.assertEqual([q["sql"] for q in connection.queries], [copy_sql]) + def test_get_database_version(self): new_connection = connection.copy() new_connection.pg_version = 110009 diff --git a/tests/backends/sqlite/tests.py b/tests/backends/sqlite/tests.py index 0669d26191..c83cde60dd 100644 --- a/tests/backends/sqlite/tests.py +++ b/tests/backends/sqlite/tests.py @@ -106,9 +106,9 @@ class Tests(TestCase): connections["default"].close() self.assertTrue(os.path.isfile(os.path.join(tmp, "test.db"))) - @mock.patch.object(connection, "get_database_version", return_value=(3, 8)) + @mock.patch.object(connection, "get_database_version", return_value=(3, 20)) def test_check_database_version_supported(self, mocked_get_database_version): - msg = "SQLite 3.9 or later is required (found 3.8)." + msg = "SQLite 3.21 or later is required (found 3.20)." with self.assertRaisesMessage(NotSupportedError, msg): connection.check_database_version_supported() self.assertTrue(mocked_get_database_version.called) diff --git a/tests/backends/tests.py b/tests/backends/tests.py index c6980058ca..5f11f91958 100644 --- a/tests/backends/tests.py +++ b/tests/backends/tests.py @@ -142,6 +142,23 @@ class LastExecutedQueryTest(TestCase): sql % params, ) + def test_last_executed_query_with_duplicate_params(self): + square_opts = Square._meta + table = connection.introspection.identifier_converter(square_opts.db_table) + id_column = connection.ops.quote_name(square_opts.get_field("id").column) + root_column = connection.ops.quote_name(square_opts.get_field("root").column) + sql = f"UPDATE {table} SET {root_column} = %s + %s WHERE {id_column} = %s" + with connection.cursor() as cursor: + params = [42, 42, 1] + cursor.execute(sql, params) + last_executed_query = connection.ops.last_executed_query( + cursor, sql, params + ) + self.assertEqual( + last_executed_query, + f"UPDATE {table} SET {root_column} = 42 + 42 WHERE {id_column} = 1", + ) + class ParameterHandlingTest(TestCase): def test_bad_parameter_count(self): @@ -437,7 +454,7 @@ class BackendTestCase(TransactionTestCase): with connection.cursor() as cursor: self.assertIsInstance(cursor, CursorWrapper) # Both InterfaceError and ProgrammingError seem to be used when - # accessing closed cursor (psycopg2 has InterfaceError, rest seem + # accessing closed cursor (psycopg has InterfaceError, rest seem # to use ProgrammingError). with self.assertRaises(connection.features.closed_cursor_error_class): # cursor should be closed, so no queries should be possible. @@ -445,12 +462,12 @@ class BackendTestCase(TransactionTestCase): @unittest.skipUnless( connection.vendor == "postgresql", - "Psycopg2 specific cursor.closed attribute needed", + "Psycopg specific cursor.closed attribute needed", ) def test_cursor_contextmanager_closing(self): # There isn't a generic way to test that cursors are closed, but - # psycopg2 offers us a way to check that by closed attribute. - # So, run only on psycopg2 for that reason. + # psycopg offers us a way to check that by closed attribute. + # So, run only on psycopg for that reason. with connection.cursor() as cursor: self.assertIsInstance(cursor, CursorWrapper) self.assertTrue(cursor.closed) diff --git a/tests/bulk_create/models.py b/tests/bulk_create/models.py index 27abc416bd..8a21c7dfa1 100644 --- a/tests/bulk_create/models.py +++ b/tests/bulk_create/models.py @@ -69,6 +69,11 @@ class TwoFields(models.Model): name = models.CharField(max_length=15, null=True) +class FieldsWithDbColumns(models.Model): + rank = models.IntegerField(unique=True, db_column="rAnK") + name = models.CharField(max_length=15, null=True, db_column="oTheRNaMe") + + class UpsertConflict(models.Model): number = models.IntegerField(unique=True) rank = models.IntegerField() diff --git a/tests/bulk_create/tests.py b/tests/bulk_create/tests.py index bc2900110d..a5050c9b0b 100644 --- a/tests/bulk_create/tests.py +++ b/tests/bulk_create/tests.py @@ -21,6 +21,7 @@ from django.test import ( from .models import ( BigAutoFieldModel, Country, + FieldsWithDbColumns, NoFields, NullableFields, Pizzeria, @@ -595,6 +596,39 @@ class BulkCreateTests(TestCase): def test_update_conflicts_two_fields_unique_fields_second(self): self._test_update_conflicts_two_fields(["f2"]) + @skipUnlessDBFeature( + "supports_update_conflicts", "supports_update_conflicts_with_target" + ) + def test_update_conflicts_unique_fields_pk(self): + TwoFields.objects.bulk_create( + [ + TwoFields(f1=1, f2=1, name="a"), + TwoFields(f1=2, f2=2, name="b"), + ] + ) + self.assertEqual(TwoFields.objects.count(), 2) + + obj1 = TwoFields.objects.get(f1=1) + obj2 = TwoFields.objects.get(f1=2) + conflicting_objects = [ + TwoFields(pk=obj1.pk, f1=3, f2=3, name="c"), + TwoFields(pk=obj2.pk, f1=4, f2=4, name="d"), + ] + TwoFields.objects.bulk_create( + conflicting_objects, + update_conflicts=True, + unique_fields=["pk"], + update_fields=["name"], + ) + self.assertEqual(TwoFields.objects.count(), 2) + self.assertCountEqual( + TwoFields.objects.values("f1", "f2", "name"), + [ + {"f1": 1, "f2": 1, "name": "c"}, + {"f1": 2, "f2": 2, "name": "d"}, + ], + ) + @skipUnlessDBFeature( "supports_update_conflicts", "supports_update_conflicts_with_target" ) @@ -739,3 +773,34 @@ class BulkCreateTests(TestCase): @skipIfDBFeature("supports_update_conflicts_with_target") def test_update_conflicts_no_unique_fields(self): self._test_update_conflicts([]) + + @skipUnlessDBFeature( + "supports_update_conflicts", "supports_update_conflicts_with_target" + ) + def test_update_conflicts_unique_fields_update_fields_db_column(self): + FieldsWithDbColumns.objects.bulk_create( + [ + FieldsWithDbColumns(rank=1, name="a"), + FieldsWithDbColumns(rank=2, name="b"), + ] + ) + self.assertEqual(FieldsWithDbColumns.objects.count(), 2) + + conflicting_objects = [ + FieldsWithDbColumns(rank=1, name="c"), + FieldsWithDbColumns(rank=2, name="d"), + ] + FieldsWithDbColumns.objects.bulk_create( + conflicting_objects, + update_conflicts=True, + unique_fields=["rank"], + update_fields=["name"], + ) + self.assertEqual(FieldsWithDbColumns.objects.count(), 2) + self.assertCountEqual( + FieldsWithDbColumns.objects.values("rank", "name"), + [ + {"rank": 1, "name": "c"}, + {"rank": 2, "name": "d"}, + ], + ) diff --git a/tests/cache/tests.py b/tests/cache/tests.py index 937a55acc5..e280d3780b 100644 --- a/tests/cache/tests.py +++ b/tests/cache/tests.py @@ -1762,6 +1762,12 @@ class FileBasedCacheTests(BaseCacheTests, TestCase): with open(cache_file, "rb") as fh: self.assertIs(cache._is_expired(fh), True) + def test_has_key_race_handling(self): + self.assertIs(cache.add("key", "value"), True) + with mock.patch("builtins.open", side_effect=FileNotFoundError) as mocked_open: + self.assertIs(cache.has_key("key"), False) + mocked_open.assert_called_once() + @unittest.skipUnless(RedisCache_params, "Redis backend not configured") @override_settings( @@ -1781,6 +1787,14 @@ class RedisCacheTests(BaseCacheTests, TestCase): def incr_decr_type_error(self): return self.lib.ResponseError + def test_incr_write_connection(self): + cache.set("number", 42) + with mock.patch( + "django.core.cache.backends.redis.RedisCacheClient.get_client" + ) as mocked_get_client: + cache.incr("number") + self.assertEqual(mocked_get_client.call_args.kwargs, {"write": True}) + def test_cache_client_class(self): self.assertIs(cache._class, RedisCacheClient) self.assertIsInstance(cache._cache, RedisCacheClient) diff --git a/tests/conditional_processing/tests.py b/tests/conditional_processing/tests.py index 93f96546aa..67007b5d66 100644 --- a/tests/conditional_processing/tests.py +++ b/tests/conditional_processing/tests.py @@ -143,7 +143,7 @@ class ConditionalGet(SimpleTestCase): self.assertEqual(response.status_code, 412) def test_both_headers(self): - # see https://tools.ietf.org/html/rfc7232#section-6 + # See RFC 9110 Section 13.2.2. self.client.defaults["HTTP_IF_MODIFIED_SINCE"] = LAST_MODIFIED_STR self.client.defaults["HTTP_IF_NONE_MATCH"] = ETAG response = self.client.get("/condition/") diff --git a/tests/contenttypes_tests/test_fields.py b/tests/contenttypes_tests/test_fields.py index 170b38d018..418669140b 100644 --- a/tests/contenttypes_tests/test_fields.py +++ b/tests/contenttypes_tests/test_fields.py @@ -43,6 +43,14 @@ class GenericForeignKeyTests(TestCase): self.assertIsNone(post.parent) self.assertIsNone(post.parent) + def test_clear_cached_generic_relation(self): + question = Question.objects.create(text="What is your name?") + answer = Answer.objects.create(text="Answer", question=question) + old_entity = answer.question + answer.refresh_from_db() + new_entity = answer.question + self.assertIsNot(old_entity, new_entity) + class GenericRelationTests(TestCase): def test_value_to_string(self): diff --git a/tests/db_functions/datetime/test_extract_trunc.py b/tests/db_functions/datetime/test_extract_trunc.py index 2d38234981..b2327931f0 100644 --- a/tests/db_functions/datetime/test_extract_trunc.py +++ b/tests/db_functions/datetime/test_extract_trunc.py @@ -245,7 +245,7 @@ class DateFunctionTests(TestCase): self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) - with self.assertRaises((DataError, OperationalError, ValueError)): + with self.assertRaises((OperationalError, ValueError)): DTModel.objects.filter( start_datetime__year=Extract( "start_datetime", "day' FROM start_datetime)) OR 1=1;--" diff --git a/tests/db_utils/tests.py b/tests/db_utils/tests.py index 9c0ec905cc..a2d9cc7b5e 100644 --- a/tests/db_utils/tests.py +++ b/tests/db_utils/tests.py @@ -62,14 +62,20 @@ class ConnectionHandlerTests(SimpleTestCase): class DatabaseErrorWrapperTests(TestCase): @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL test") def test_reraising_backend_specific_database_exception(self): + from django.db.backends.postgresql.psycopg_any import is_psycopg3 + with connection.cursor() as cursor: msg = 'table "X" does not exist' with self.assertRaisesMessage(ProgrammingError, msg) as cm: cursor.execute('DROP TABLE "X"') self.assertNotEqual(type(cm.exception), type(cm.exception.__cause__)) self.assertIsNotNone(cm.exception.__cause__) - self.assertIsNotNone(cm.exception.__cause__.pgcode) - self.assertIsNotNone(cm.exception.__cause__.pgerror) + if is_psycopg3: + self.assertIsNotNone(cm.exception.__cause__.diag.sqlstate) + self.assertIsNotNone(cm.exception.__cause__.diag.message_primary) + else: + self.assertIsNotNone(cm.exception.__cause__.pgcode) + self.assertIsNotNone(cm.exception.__cause__.pgerror) class LoadBackendTests(SimpleTestCase): diff --git a/tests/delete/tests.py b/tests/delete/tests.py index d03492bb6f..01228631f4 100644 --- a/tests/delete/tests.py +++ b/tests/delete/tests.py @@ -1,7 +1,7 @@ from math import ceil from django.db import connection, models -from django.db.models import ProtectedError, RestrictedError +from django.db.models import ProtectedError, Q, RestrictedError from django.db.models.deletion import Collector from django.db.models.sql.constants import GET_ITERATOR_CHUNK_SIZE from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature @@ -397,18 +397,35 @@ class DeletionTests(TestCase): models.signals.post_delete.connect(log_post_delete) models.signals.pre_delete.connect(log_pre_delete) - r = R.objects.create(pk=1) - s1 = S.objects.create(pk=1, r=r) - s2 = S.objects.create(pk=2, r=r) - T.objects.create(pk=1, s=s1) - T.objects.create(pk=2, s=s2) - RChild.objects.create(r_ptr=r) + r = R.objects.create() + s1 = S.objects.create(r=r) + s2 = S.objects.create(r=r) + t1 = T.objects.create(s=s1) + t2 = T.objects.create(s=s2) + rchild = RChild.objects.create(r_ptr=r) + r_pk = r.pk r.delete() self.assertEqual( - pre_delete_order, [(T, 2), (T, 1), (RChild, 1), (S, 2), (S, 1), (R, 1)] + pre_delete_order, + [ + (T, t2.pk), + (T, t1.pk), + (RChild, rchild.pk), + (S, s2.pk), + (S, s1.pk), + (R, r_pk), + ], ) self.assertEqual( - post_delete_order, [(T, 1), (T, 2), (RChild, 1), (S, 1), (S, 2), (R, 1)] + post_delete_order, + [ + (T, t1.pk), + (T, t2.pk), + (RChild, rchild.pk), + (S, s1.pk), + (S, s2.pk), + (R, r_pk), + ], ) models.signals.post_delete.disconnect(log_post_delete) @@ -776,3 +793,10 @@ class FastDeleteTests(TestCase): (1, {"delete.Base": 1}), ) self.assertIs(Base.objects.exists(), False) + + def test_fast_delete_full_match(self): + avatar = Avatar.objects.create(desc="bar") + User.objects.create(avatar=avatar) + with self.assertNumQueries(1): + User.objects.filter(~Q(pk__in=[]) | Q(avatar__desc="foo")).delete() + self.assertFalse(User.objects.exists()) diff --git a/tests/deprecation/test_middleware_mixin.py b/tests/deprecation/test_middleware_mixin.py index 060c2f5f35..3b6ad6d8ee 100644 --- a/tests/deprecation/test_middleware_mixin.py +++ b/tests/deprecation/test_middleware_mixin.py @@ -1,7 +1,6 @@ -import asyncio import threading -from asgiref.sync import async_to_sync +from asgiref.sync import async_to_sync, iscoroutinefunction from django.contrib.admindocs.middleware import XViewMiddleware from django.contrib.auth.middleware import ( @@ -101,11 +100,11 @@ class MiddlewareMixinTests(SimpleTestCase): # Middleware appears as coroutine if get_function is # a coroutine. middleware_instance = middleware(async_get_response) - self.assertIs(asyncio.iscoroutinefunction(middleware_instance), True) + self.assertIs(iscoroutinefunction(middleware_instance), True) # Middleware doesn't appear as coroutine if get_function is not # a coroutine. middleware_instance = middleware(sync_get_response) - self.assertIs(asyncio.iscoroutinefunction(middleware_instance), False) + self.assertIs(iscoroutinefunction(middleware_instance), False) def test_sync_to_async_uses_base_thread_and_connection(self): """ diff --git a/tests/expressions_window/tests.py b/tests/expressions_window/tests.py index cac6114904..027fc9c25c 100644 --- a/tests/expressions_window/tests.py +++ b/tests/expressions_window/tests.py @@ -42,6 +42,7 @@ from django.db.models.functions import ( ) from django.db.models.lookups import Exact from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature +from django.test.utils import CaptureQueriesContext from .models import Classification, Detail, Employee, PastEmployeeDepartment @@ -1157,16 +1158,21 @@ class WindowFunctionTests(TestCase): ) def test_filter_count(self): - self.assertEqual( - Employee.objects.annotate( - department_salary_rank=Window( - Rank(), partition_by="department", order_by="-salary" + with CaptureQueriesContext(connection) as ctx: + self.assertEqual( + Employee.objects.annotate( + department_salary_rank=Window( + Rank(), partition_by="department", order_by="-salary" + ) ) + .filter(department_salary_rank=1) + .count(), + 5, ) - .filter(department_salary_rank=1) - .count(), - 5, - ) + self.assertEqual(len(ctx.captured_queries), 1) + sql = ctx.captured_queries[0]["sql"].lower() + self.assertEqual(sql.count("select"), 3) + self.assertNotIn("group by", sql) @skipUnlessDBFeature("supports_frame_range_fixed_distance") def test_range_n_preceding_and_following(self): diff --git a/tests/file_storage/tests.py b/tests/file_storage/tests.py index 87a5e70c33..5c7190d698 100644 --- a/tests/file_storage/tests.py +++ b/tests/file_storage/tests.py @@ -956,7 +956,7 @@ class FieldCallableFileStorageTests(SimpleTestCase): msg = ( "FileField.storage must be a subclass/instance of " - "django.core.files.storage.Storage" + "django.core.files.storage.base.Storage" ) for invalid_type in (NotStorage, str, list, set, tuple): with self.subTest(invalid_type=invalid_type): diff --git a/tests/file_uploads/tests.py b/tests/file_uploads/tests.py index c6d76aa4c9..ecbee48160 100644 --- a/tests/file_uploads/tests.py +++ b/tests/file_uploads/tests.py @@ -25,7 +25,8 @@ from .models import FileModel UNICODE_FILENAME = "test-0123456789_中文_Orléans.jpg" MEDIA_ROOT = sys_tempfile.mkdtemp() -UPLOAD_TO = os.path.join(MEDIA_ROOT, "test_upload") +UPLOAD_FOLDER = "test_upload" +UPLOAD_TO = os.path.join(MEDIA_ROOT, UPLOAD_FOLDER) CANDIDATE_TRAVERSAL_FILE_NAMES = [ "/tmp/hax0rd.txt", # Absolute path, *nix-style. @@ -189,8 +190,7 @@ class FileUploadTests(TestCase): def test_unicode_file_name_rfc2231(self): """ - Test receiving file upload when filename is encoded with RFC2231 - (#22971). + Receiving file upload when filename is encoded with RFC 2231. """ payload = client.FakePayload() payload.write( @@ -219,8 +219,7 @@ class FileUploadTests(TestCase): def test_unicode_name_rfc2231(self): """ - Test receiving file upload when filename is encoded with RFC2231 - (#22971). + Receiving file upload when filename is encoded with RFC 2231. """ payload = client.FakePayload() payload.write( diff --git a/tests/file_uploads/uploadhandler.py b/tests/file_uploads/uploadhandler.py index eecbc6dc9b..a1e1a5af05 100644 --- a/tests/file_uploads/uploadhandler.py +++ b/tests/file_uploads/uploadhandler.py @@ -55,7 +55,7 @@ class TraversalUploadHandler(FileUploadHandler): """A handler with potential directory-traversal vulnerability.""" def __init__(self, request=None): - from .views import UPLOAD_TO + from .tests import UPLOAD_TO super().__init__(request) self.upload_dir = UPLOAD_TO diff --git a/tests/file_uploads/views.py b/tests/file_uploads/views.py index d5efbba3ce..c1d4ca5358 100644 --- a/tests/file_uploads/views.py +++ b/tests/file_uploads/views.py @@ -6,7 +6,7 @@ from django.core.files.uploadhandler import TemporaryFileUploadHandler from django.http import HttpResponse, HttpResponseServerError, JsonResponse from .models import FileModel -from .tests import UNICODE_FILENAME, UPLOAD_TO +from .tests import UNICODE_FILENAME, UPLOAD_FOLDER from .uploadhandler import ( ErroringUploadHandler, QuotaUploadHandler, @@ -68,9 +68,13 @@ def file_upload_unicode_name(request): # Check to make sure the exotic characters are preserved even # through file save. uni_named_file = request.FILES["file_unicode"] - FileModel.objects.create(testfile=uni_named_file) - full_name = "%s/%s" % (UPLOAD_TO, uni_named_file.name) - return HttpResponse() if os.path.exists(full_name) else HttpResponseServerError() + file_model = FileModel.objects.create(testfile=uni_named_file) + full_name = f"{UPLOAD_FOLDER}/{uni_named_file.name}" + return ( + HttpResponse() + if file_model.testfile.storage.exists(full_name) + else HttpResponseServerError() + ) def file_upload_echo(request): diff --git a/tests/fixtures/tests.py b/tests/fixtures/tests.py index 9eb2740c90..deac1c2d77 100644 --- a/tests/fixtures/tests.py +++ b/tests/fixtures/tests.py @@ -916,15 +916,11 @@ class FixtureLoadingTests(DumpDataAssertMixin, TestCase): with self.assertRaisesMessage(IntegrityError, msg): management.call_command("loaddata", "invalid.json", verbosity=0) - @unittest.skipUnless( - connection.vendor == "postgresql", "psycopg2 prohibits null characters in data." - ) + @skipUnlessDBFeature("prohibits_null_characters_in_text_exception") def test_loaddata_null_characters_on_postgresql(self): - msg = ( - "Could not load fixtures.Article(pk=2): " - "A string literal cannot contain NUL (0x00) characters." - ) - with self.assertRaisesMessage(ValueError, msg): + error, msg = connection.features.prohibits_null_characters_in_text_exception + msg = f"Could not load fixtures.Article(pk=2): {msg}" + with self.assertRaisesMessage(error, msg): management.call_command("loaddata", "null_character_in_field_value.json") def test_loaddata_app_option(self): diff --git a/tests/forms_tests/tests/test_forms.py b/tests/forms_tests/tests/test_forms.py index 6005472194..930a200b14 100644 --- a/tests/forms_tests/tests/test_forms.py +++ b/tests/forms_tests/tests/test_forms.py @@ -4579,6 +4579,22 @@ Options: + +

      +

      + + + +

      + """, + ) + class FormsModelTestCase(TestCase): def test_unicode_filename(self): diff --git a/tests/generic_relations/tests.py b/tests/generic_relations/tests.py index 29307237a2..18e3578f60 100644 --- a/tests/generic_relations/tests.py +++ b/tests/generic_relations/tests.py @@ -45,6 +45,10 @@ class GenericRelationsTests(TestCase): # Original list of tags: return obj.tag, obj.content_type.model_class(), obj.object_id + async def test_generic_async_acreate(self): + await self.bacon.tags.acreate(tag="orange") + self.assertEqual(await self.bacon.tags.acount(), 3) + def test_generic_update_or_create_when_created(self): """ Should be able to use update_or_create from the generic related manager @@ -70,6 +74,18 @@ class GenericRelationsTests(TestCase): self.assertEqual(count + 1, self.bacon.tags.count()) self.assertEqual(tag.tag, "juicy") + async def test_generic_async_aupdate_or_create(self): + tag, created = await self.bacon.tags.aupdate_or_create( + id=self.fatty.id, defaults={"tag": "orange"} + ) + self.assertIs(created, False) + self.assertEqual(tag.tag, "orange") + self.assertEqual(await self.bacon.tags.acount(), 2) + tag, created = await self.bacon.tags.aupdate_or_create(tag="pink") + self.assertIs(created, True) + self.assertEqual(await self.bacon.tags.acount(), 3) + self.assertEqual(tag.tag, "pink") + def test_generic_get_or_create_when_created(self): """ Should be able to use get_or_create from the generic related manager @@ -96,6 +112,18 @@ class GenericRelationsTests(TestCase): # shouldn't had changed the tag self.assertEqual(tag.tag, "stinky") + async def test_generic_async_aget_or_create(self): + tag, created = await self.bacon.tags.aget_or_create( + id=self.fatty.id, defaults={"tag": "orange"} + ) + self.assertIs(created, False) + self.assertEqual(tag.tag, "fatty") + self.assertEqual(await self.bacon.tags.acount(), 2) + tag, created = await self.bacon.tags.aget_or_create(tag="orange") + self.assertIs(created, True) + self.assertEqual(await self.bacon.tags.acount(), 3) + self.assertEqual(tag.tag, "orange") + def test_generic_relations_m2m_mimic(self): """ Objects with declared GenericRelations can be tagged directly -- the @@ -296,6 +324,13 @@ class GenericRelationsTests(TestCase): with self.assertRaisesMessage(TypeError, msg): self.bacon.tags.add(self.lion) + async def test_aadd(self): + bacon = await Vegetable.objects.acreate(name="Bacon", is_yucky=False) + t1 = await TaggedItem.objects.acreate(content_object=self.quartz, tag="shiny") + t2 = await TaggedItem.objects.acreate(content_object=self.quartz, tag="fatty") + await bacon.tags.aadd(t1, t2, bulk=False) + self.assertEqual(await bacon.tags.acount(), 2) + def test_set(self): bacon = Vegetable.objects.create(name="Bacon", is_yucky=False) fatty = bacon.tags.create(tag="fatty") @@ -319,6 +354,16 @@ class GenericRelationsTests(TestCase): bacon.tags.set([], clear=True) self.assertSequenceEqual(bacon.tags.all(), []) + async def test_aset(self): + bacon = await Vegetable.objects.acreate(name="Bacon", is_yucky=False) + fatty = await bacon.tags.acreate(tag="fatty") + await bacon.tags.aset([fatty]) + self.assertEqual(await bacon.tags.acount(), 1) + await bacon.tags.aset([]) + self.assertEqual(await bacon.tags.acount(), 0) + await bacon.tags.aset([fatty], bulk=False, clear=True) + self.assertEqual(await bacon.tags.acount(), 1) + def test_assign(self): bacon = Vegetable.objects.create(name="Bacon", is_yucky=False) fatty = bacon.tags.create(tag="fatty") @@ -360,6 +405,10 @@ class GenericRelationsTests(TestCase): [self.hairy, self.yellow], ) + async def test_aclear(self): + await self.bacon.tags.aclear() + self.assertEqual(await self.bacon.tags.acount(), 0) + def test_remove(self): self.assertSequenceEqual( TaggedItem.objects.order_by("tag"), @@ -372,6 +421,12 @@ class GenericRelationsTests(TestCase): [self.hairy, self.salty, self.yellow], ) + async def test_aremove(self): + await self.bacon.tags.aremove(self.fatty) + self.assertEqual(await self.bacon.tags.acount(), 1) + await self.bacon.tags.aremove(self.salty) + self.assertEqual(await self.bacon.tags.acount(), 0) + def test_generic_relation_related_name_default(self): # GenericRelation isn't usable from the reverse side by default. msg = ( diff --git a/tests/generic_relations_regress/tests.py b/tests/generic_relations_regress/tests.py index 6c708fefbb..9b2f21b88b 100644 --- a/tests/generic_relations_regress/tests.py +++ b/tests/generic_relations_regress/tests.py @@ -308,3 +308,13 @@ class GenericRelationTests(TestCase): thing = HasLinkThing.objects.create() link = Link.objects.create(content_object=thing) self.assertCountEqual(link.targets.all(), [thing]) + + def test_generic_reverse_relation_exclude_filter(self): + place1 = Place.objects.create(name="Test Place 1") + place2 = Place.objects.create(name="Test Place 2") + Link.objects.create(content_object=place1) + link2 = Link.objects.create(content_object=place2) + qs = Link.objects.filter(~Q(places__name="Test Place 1")) + self.assertSequenceEqual(qs, [link2]) + qs = Link.objects.exclude(places__name="Test Place 1") + self.assertSequenceEqual(qs, [link2]) diff --git a/tests/gis_tests/geoapp/test_functions.py b/tests/gis_tests/geoapp/test_functions.py index e1a66d573e..535e552aa1 100644 --- a/tests/gis_tests/geoapp/test_functions.py +++ b/tests/gis_tests/geoapp/test_functions.py @@ -371,6 +371,18 @@ class GISFunctionsTests(FuncTestMixin, TestCase): else: self.assertIs(c.inter.empty, True) + @skipUnlessDBFeature("supports_empty_geometries", "has_IsEmpty_function") + def test_isempty(self): + empty = City.objects.create(name="Nowhere", point=Point(srid=4326)) + City.objects.create(name="Somewhere", point=Point(6.825, 47.1, srid=4326)) + self.assertSequenceEqual( + City.objects.annotate(isempty=functions.IsEmpty("point")).filter( + isempty=True + ), + [empty], + ) + self.assertSequenceEqual(City.objects.filter(point__isempty=True), [empty]) + @skipUnlessDBFeature("has_IsValid_function") def test_isvalid(self): valid_geom = fromstr("POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))") diff --git a/tests/gis_tests/geoapp/tests.py b/tests/gis_tests/geoapp/tests.py index 2691597225..a4dc5eec75 100644 --- a/tests/gis_tests/geoapp/tests.py +++ b/tests/gis_tests/geoapp/tests.py @@ -645,18 +645,16 @@ class GeoQuerySetTest(TestCase): self.assertIsNone(State.objects.aggregate(MakeLine("poly"))["poly__makeline"]) # Reference query: # SELECT AsText(ST_MakeLine(geoapp_city.point)) FROM geoapp_city; - ref_line = GEOSGeometry( - "LINESTRING(-95.363151 29.763374,-96.801611 32.782057," - "-97.521157 34.464642,174.783117 -41.315268,-104.609252 38.255001," - "-95.23506 38.971823,-87.650175 41.850385,-123.305196 48.462611)", - srid=4326, - ) - # We check for equality with a tolerance of 10e-5 which is a lower bound - # of the precisions of ref_line coordinates line = City.objects.aggregate(MakeLine("point"))["point__makeline"] - self.assertTrue( - ref_line.equals_exact(line, tolerance=10e-5), "%s != %s" % (ref_line, line) - ) + ref_points = City.objects.values_list("point", flat=True) + self.assertIsInstance(line, LineString) + self.assertEqual(len(line), ref_points.count()) + # Compare pairs of manually sorted points, as the default ordering is + # flaky. + for (point, ref_city) in zip(sorted(line), sorted(ref_points)): + point_x, point_y = point + self.assertAlmostEqual(point_x, ref_city.x, 5), + self.assertAlmostEqual(point_y, ref_city.y, 5), @skipUnlessDBFeature("supports_union_aggr") def test_unionagg(self): diff --git a/tests/gis_tests/tests.py b/tests/gis_tests/tests.py index d1c93592a8..9da2b4df99 100644 --- a/tests/gis_tests/tests.py +++ b/tests/gis_tests/tests.py @@ -36,7 +36,7 @@ if HAS_POSTGRES: raise NotImplementedError("This function was not expected to be called") -@unittest.skipUnless(HAS_POSTGRES, "The psycopg2 driver is needed for these tests") +@unittest.skipUnless(HAS_POSTGRES, "The psycopg driver is needed for these tests") class TestPostGISVersionCheck(unittest.TestCase): """ The PostGIS version check parses correctly the version numbers diff --git a/tests/handlers/tests.py b/tests/handlers/tests.py index 1e2da4672b..980887edfd 100644 --- a/tests/handlers/tests.py +++ b/tests/handlers/tests.py @@ -129,6 +129,19 @@ class TransactionsPerRequestTests(TransactionTestCase): finally: connection.settings_dict["ATOMIC_REQUESTS"] = old_atomic_requests self.assertContains(response, "False") + try: + connection.settings_dict["ATOMIC_REQUESTS"] = True + response = self.client.get("/not_in_transaction_using_none/") + finally: + connection.settings_dict["ATOMIC_REQUESTS"] = old_atomic_requests + self.assertContains(response, "False") + try: + connection.settings_dict["ATOMIC_REQUESTS"] = True + response = self.client.get("/not_in_transaction_using_text/") + finally: + connection.settings_dict["ATOMIC_REQUESTS"] = old_atomic_requests + # The non_atomic_requests decorator is used for an incorrect table. + self.assertContains(response, "True") @override_settings(ROOT_URLCONF="handlers.urls") diff --git a/tests/handlers/urls.py b/tests/handlers/urls.py index 59cc919ba5..73d99c7edf 100644 --- a/tests/handlers/urls.py +++ b/tests/handlers/urls.py @@ -10,6 +10,8 @@ urlpatterns = [ path("streaming/", views.streaming), path("in_transaction/", views.in_transaction), path("not_in_transaction/", views.not_in_transaction), + path("not_in_transaction_using_none/", views.not_in_transaction_using_none), + path("not_in_transaction_using_text/", views.not_in_transaction_using_text), path("bad_request/", views.bad_request), path("suspicious/", views.suspicious), path("malformed_post/", views.malformed_post), diff --git a/tests/handlers/views.py b/tests/handlers/views.py index b7d0716afc..351eb65264 100644 --- a/tests/handlers/views.py +++ b/tests/handlers/views.py @@ -33,6 +33,16 @@ def not_in_transaction(request): return HttpResponse(str(connection.in_atomic_block)) +@transaction.non_atomic_requests(using=None) +def not_in_transaction_using_none(request): + return HttpResponse(str(connection.in_atomic_block)) + + +@transaction.non_atomic_requests(using="incorrect") +def not_in_transaction_using_text(request): + return HttpResponse(str(connection.in_atomic_block)) + + def bad_request(request): raise BadRequest() diff --git a/tests/httpwrappers/tests.py b/tests/httpwrappers/tests.py index 6ab0cc52cb..fa2c8fd5d2 100644 --- a/tests/httpwrappers/tests.py +++ b/tests/httpwrappers/tests.py @@ -720,6 +720,42 @@ class StreamingHttpResponseTests(SimpleTestCase): '', ) + async def test_async_streaming_response(self): + async def async_iter(): + yield b"hello" + yield b"world" + + r = StreamingHttpResponse(async_iter()) + + chunks = [] + async for chunk in r: + chunks.append(chunk) + self.assertEqual(chunks, [b"hello", b"world"]) + + def test_async_streaming_response_warning(self): + async def async_iter(): + yield b"hello" + yield b"world" + + r = StreamingHttpResponse(async_iter()) + + msg = ( + "StreamingHttpResponse must consume asynchronous iterators in order to " + "serve them synchronously. Use a synchronous iterator instead." + ) + with self.assertWarnsMessage(Warning, msg): + self.assertEqual(list(r), [b"hello", b"world"]) + + async def test_sync_streaming_response_warning(self): + r = StreamingHttpResponse(iter(["hello", "world"])) + + msg = ( + "StreamingHttpResponse must consume synchronous iterators in order to " + "serve them asynchronously. Use an asynchronous iterator instead." + ) + with self.assertWarnsMessage(Warning, msg): + self.assertEqual(b"hello", await r.__aiter__().__anext__()) + class FileCloseTests(SimpleTestCase): def setUp(self): @@ -827,7 +863,7 @@ class CookieTests(unittest.TestCase): ) def test_cookie_edgecases(self): - # Cookies that RFC6265 allows. + # Cookies that RFC 6265 allows. self.assertEqual( parse_cookie("a=b; Domain=example.com"), {"a": "b", "Domain": "example.com"} ) @@ -837,7 +873,7 @@ class CookieTests(unittest.TestCase): def test_invalid_cookies(self): """ - Cookie strings that go against RFC6265 but browsers will send if set + Cookie strings that go against RFC 6265 but browsers will send if set via document.cookie. """ # Chunks without an equals sign appear as unnamed values per diff --git a/tests/i18n/tests.py b/tests/i18n/tests.py index 1fec6009a5..6fab454c1d 100644 --- a/tests/i18n/tests.py +++ b/tests/i18n/tests.py @@ -2139,7 +2139,7 @@ class UnprefixedDefaultLanguageTests(SimpleTestCase): def test_unprefixed_language_with_accept_language(self): """'Accept-Language' is respected.""" - response = self.client.get("/simple/", HTTP_ACCEPT_LANGUAGE="fr") + response = self.client.get("/simple/", headers={"accept-language": "fr"}) self.assertRedirects(response, "/fr/simple/") def test_unprefixed_language_with_cookie_language(self): @@ -2149,7 +2149,7 @@ class UnprefixedDefaultLanguageTests(SimpleTestCase): self.assertRedirects(response, "/fr/simple/") def test_unprefixed_language_with_non_valid_language(self): - response = self.client.get("/simple/", HTTP_ACCEPT_LANGUAGE="fi") + response = self.client.get("/simple/", headers={"accept-language": "fi"}) self.assertEqual(response.content, b"Yes") self.client.cookies.load({settings.LANGUAGE_COOKIE_NAME: "fi"}) response = self.client.get("/simple/") diff --git a/tests/inspectdb/models.py b/tests/inspectdb/models.py index c07cd4def1..9e6871ce46 100644 --- a/tests/inspectdb/models.py +++ b/tests/inspectdb/models.py @@ -106,6 +106,13 @@ class TextFieldDbCollation(models.Model): required_db_features = {"supports_collation_on_textfield"} +class CharFieldUnlimited(models.Model): + char_field = models.CharField(max_length=None) + + class Meta: + required_db_features = {"supports_unlimited_charfield"} + + class UniqueTogether(models.Model): field1 = models.IntegerField() field2 = models.CharField(max_length=10) @@ -132,3 +139,11 @@ class FuncUniqueConstraint(models.Model): ) ] required_db_features = {"supports_expression_indexes"} + + +class DbComment(models.Model): + rank = models.IntegerField(db_comment="'Rank' column comment") + + class Meta: + db_table_comment = "Custom table comment" + required_db_features = {"supports_comments"} diff --git a/tests/inspectdb/tests.py b/tests/inspectdb/tests.py index 66b2eb8260..ad929fd9bc 100644 --- a/tests/inspectdb/tests.py +++ b/tests/inspectdb/tests.py @@ -129,6 +129,24 @@ class InspectDBTestCase(TestCase): "null_json_field = models.JSONField(blank=True, null=True)", output ) + @skipUnlessDBFeature("supports_comments") + def test_db_comments(self): + out = StringIO() + call_command("inspectdb", "inspectdb_dbcomment", stdout=out) + output = out.getvalue() + integer_field_type = connection.features.introspected_field_types[ + "IntegerField" + ] + self.assertIn( + f"rank = models.{integer_field_type}(" + f"db_comment=\"'Rank' column comment\")", + output, + ) + self.assertIn( + " db_table_comment = 'Custom table comment'", + output, + ) + @skipUnlessDBFeature("supports_collation_on_charfield") @skipUnless(test_collation, "Language collations are not supported.") def test_char_field_db_collation(self): @@ -166,6 +184,13 @@ class InspectDBTestCase(TestCase): output, ) + @skipUnlessDBFeature("supports_unlimited_charfield") + def test_char_field_unlimited(self): + out = StringIO() + call_command("inspectdb", "inspectdb_charfieldunlimited", stdout=out) + output = out.getvalue() + self.assertIn("char_field = models.CharField()", output) + def test_number_field_types(self): """Test introspection of various Django field types""" assertFieldType = self.make_field_type_asserter() @@ -563,17 +588,19 @@ class InspectDBTransactionalTests(TransactionTestCase): "CREATE SERVER inspectdb_server FOREIGN DATA WRAPPER file_fdw" ) cursor.execute( - """\ - CREATE FOREIGN TABLE inspectdb_iris_foreign_table ( - petal_length real, - petal_width real, - sepal_length real, - sepal_width real - ) SERVER inspectdb_server OPTIONS ( - filename %s + connection.ops.compose_sql( + """ + CREATE FOREIGN TABLE inspectdb_iris_foreign_table ( + petal_length real, + petal_width real, + sepal_length real, + sepal_width real + ) SERVER inspectdb_server OPTIONS ( + filename %s + ) + """, + [os.devnull], ) - """, - [os.devnull], ) out = StringIO() foreign_table_model = "class InspectdbIrisForeignTable(models.Model):" diff --git a/tests/introspection/models.py b/tests/introspection/models.py index 31c1b0de80..d31eb0cbfa 100644 --- a/tests/introspection/models.py +++ b/tests/introspection/models.py @@ -102,3 +102,11 @@ class UniqueConstraintConditionModel(models.Model): condition=models.Q(color__isnull=True), ), ] + + +class DbCommentModel(models.Model): + name = models.CharField(max_length=15, db_comment="'Name' column comment") + + class Meta: + db_table_comment = "Custom table comment" + required_db_features = {"supports_comments"} diff --git a/tests/introspection/tests.py b/tests/introspection/tests.py index a877f25e56..a283aa0769 100644 --- a/tests/introspection/tests.py +++ b/tests/introspection/tests.py @@ -9,6 +9,7 @@ from .models import ( City, Comment, Country, + DbCommentModel, District, Reporter, UniqueConstraintConditionModel, @@ -132,7 +133,7 @@ class IntrospectionTests(TransactionTestCase): ) self.assertEqual( [ - r[3] + r[2] for r in desc if connection.introspection.get_field_type(r[1], r) == "CharField" ], @@ -179,6 +180,26 @@ class IntrospectionTests(TransactionTestCase): [connection.introspection.get_field_type(r[1], r) for r in desc], ) + @skipUnlessDBFeature("supports_comments") + def test_db_comments(self): + with connection.cursor() as cursor: + desc = connection.introspection.get_table_description( + cursor, DbCommentModel._meta.db_table + ) + table_list = connection.introspection.get_table_list(cursor) + self.assertEqual( + ["'Name' column comment"], + [field.comment for field in desc if field.name == "name"], + ) + self.assertEqual( + ["Custom table comment"], + [ + table.comment + for table in table_list + if table.name == "introspection_dbcommentmodel" + ], + ) + # Regression test for #9991 - 'real' types in postgres @skipUnlessDBFeature("has_real_datatype") def test_postgresql_real_type(self): diff --git a/tests/invalid_models_tests/test_models.py b/tests/invalid_models_tests/test_models.py index 5a5aeccdf5..c07c83d79d 100644 --- a/tests/invalid_models_tests/test_models.py +++ b/tests/invalid_models_tests/test_models.py @@ -1872,6 +1872,37 @@ class OtherModelTests(SimpleTestCase): ) +@isolate_apps("invalid_models_tests") +class DbTableCommentTests(TestCase): + def test_db_table_comment(self): + class Model(models.Model): + class Meta: + db_table_comment = "Table comment" + + errors = Model.check(databases=self.databases) + expected = ( + [] + if connection.features.supports_comments + else [ + Warning( + f"{connection.display_name} does not support comments on tables " + f"(db_table_comment).", + obj=Model, + id="models.W046", + ), + ] + ) + self.assertEqual(errors, expected) + + def test_db_table_comment_required_db_features(self): + class Model(models.Model): + class Meta: + db_table_comment = "Table comment" + required_db_features = {"supports_comments"} + + self.assertEqual(Model.check(databases=self.databases), []) + + class MultipleAutoFieldsTests(TestCase): def test_multiple_autofields(self): msg = ( diff --git a/tests/invalid_models_tests/test_ordinary_fields.py b/tests/invalid_models_tests/test_ordinary_fields.py index ef7f845a33..4e37c48286 100644 --- a/tests/invalid_models_tests/test_ordinary_fields.py +++ b/tests/invalid_models_tests/test_ordinary_fields.py @@ -112,16 +112,18 @@ class CharFieldTests(TestCase): field = models.CharField() field = Model._meta.get_field("field") - self.assertEqual( - field.check(), - [ + expected = ( + [] + if connection.features.supports_unlimited_charfield + else [ Error( "CharFields must define a 'max_length' attribute.", obj=field, id="fields.E120", ), - ], + ] ) + self.assertEqual(field.check(), expected) def test_negative_max_length(self): class Model(models.Model): @@ -1023,3 +1025,35 @@ class JSONFieldTests(TestCase): field = models.JSONField(default=callable_default) self.assertEqual(Model._meta.get_field("field").check(), []) + + +@isolate_apps("invalid_models_tests") +class DbCommentTests(TestCase): + def test_db_comment(self): + class Model(models.Model): + field = models.IntegerField(db_comment="Column comment") + + errors = Model._meta.get_field("field").check(databases=self.databases) + expected = ( + [] + if connection.features.supports_comments + else [ + DjangoWarning( + f"{connection.display_name} does not support comments on columns " + f"(db_comment).", + obj=Model._meta.get_field("field"), + id="fields.W163", + ), + ] + ) + self.assertEqual(errors, expected) + + def test_db_comment_required_db_features(self): + class Model(models.Model): + field = models.IntegerField(db_comment="Column comment") + + class Meta: + required_db_features = {"supports_comments"} + + errors = Model._meta.get_field("field").check(databases=self.databases) + self.assertEqual(errors, []) diff --git a/tests/invalid_models_tests/test_relative_fields.py b/tests/invalid_models_tests/test_relative_fields.py index 5b4bb45ff8..075bbaefbc 100644 --- a/tests/invalid_models_tests/test_relative_fields.py +++ b/tests/invalid_models_tests/test_relative_fields.py @@ -94,7 +94,9 @@ class RelativeFieldTests(SimpleTestCase): name = models.CharField(max_length=20) class ModelM2M(models.Model): - m2m = models.ManyToManyField(Model, null=True, validators=[lambda x: x]) + m2m = models.ManyToManyField( + Model, null=True, validators=[lambda x: x], db_comment="Column comment" + ) field = ModelM2M._meta.get_field("m2m") self.assertEqual( @@ -110,6 +112,11 @@ class RelativeFieldTests(SimpleTestCase): obj=field, id="fields.W341", ), + DjangoWarning( + "db_comment has no effect on ManyToManyField.", + obj=field, + id="fields.W346", + ), ], ) diff --git a/tests/known_related_objects/models.py b/tests/known_related_objects/models.py index bd8fd1d502..027d162828 100644 --- a/tests/known_related_objects/models.py +++ b/tests/known_related_objects/models.py @@ -24,3 +24,6 @@ class Pool(models.Model): class PoolStyle(models.Model): name = models.CharField(max_length=30) pool = models.OneToOneField(Pool, models.CASCADE) + another_pool = models.OneToOneField( + Pool, models.CASCADE, null=True, related_name="another_style" + ) diff --git a/tests/known_related_objects/tests.py b/tests/known_related_objects/tests.py index 0270220061..6080da3838 100644 --- a/tests/known_related_objects/tests.py +++ b/tests/known_related_objects/tests.py @@ -1,3 +1,4 @@ +from django.db.models import FilteredRelation from django.test import TestCase from .models import Organiser, Pool, PoolStyle, Tournament @@ -23,6 +24,9 @@ class ExistingRelatedInstancesTests(TestCase): ) cls.ps1 = PoolStyle.objects.create(name="T1 Pool 2 Style", pool=cls.p2) cls.ps2 = PoolStyle.objects.create(name="T2 Pool 1 Style", pool=cls.p3) + cls.ps3 = PoolStyle.objects.create( + name="T1 Pool 1/3 Style", pool=cls.p1, another_pool=cls.p3 + ) def test_foreign_key(self): with self.assertNumQueries(2): @@ -147,3 +151,16 @@ class ExistingRelatedInstancesTests(TestCase): pools = list(Pool.objects.prefetch_related("poolstyle").order_by("pk")) self.assertIs(pools[1], pools[1].poolstyle.pool) self.assertIs(pools[2], pools[2].poolstyle.pool) + + def test_reverse_fk_select_related_multiple(self): + with self.assertNumQueries(1): + ps = list( + PoolStyle.objects.annotate( + pool_1=FilteredRelation("pool"), + pool_2=FilteredRelation("another_pool"), + ) + .select_related("pool_1", "pool_2") + .order_by("-pk") + ) + self.assertIs(ps[0], ps[0].pool_1.poolstyle) + self.assertIs(ps[0], ps[0].pool_2.another_style) diff --git a/tests/lookup/models.py b/tests/lookup/models.py index 86f774ea11..9bb3412c33 100644 --- a/tests/lookup/models.py +++ b/tests/lookup/models.py @@ -50,11 +50,22 @@ class NulledTextField(models.TextField): return None if value == "" else value +class NullField(models.Field): + pass + + +NullField.register_lookup(IsNull) + + @NulledTextField.register_lookup class NulledTransform(models.Transform): lookup_name = "nulled" template = "NULL" + @property + def output_field(self): + return NullField() + @NulledTextField.register_lookup class IsNullWithNoneAsRHS(IsNull): diff --git a/tests/lookup/tests.py b/tests/lookup/tests.py index 10e4c3d1fe..53eb76d174 100644 --- a/tests/lookup/tests.py +++ b/tests/lookup/tests.py @@ -1218,7 +1218,7 @@ class LookupTests(TestCase): def test_exact_exists(self): qs = Article.objects.filter(pk=OuterRef("pk")) - seasons = Season.objects.annotate(pk_exists=Exists(qs),).filter( + seasons = Season.objects.annotate(pk_exists=Exists(qs)).filter( pk_exists=Exists(qs), ) self.assertCountEqual(seasons, Season.objects.all()) diff --git a/tests/mail/tests.py b/tests/mail/tests.py index 183a0c0ab1..54a136c1a9 100644 --- a/tests/mail/tests.py +++ b/tests/mail/tests.py @@ -1234,8 +1234,8 @@ class BaseEmailBackendTests(HeadersCheckMixin): def test_send_long_lines(self): """ - Email line length is limited to 998 chars by the RFC: - https://tools.ietf.org/html/rfc5322#section-2.1.1 + Email line length is limited to 998 chars by the RFC 5322 Section + 2.1.1. Message body containing longer lines are converted to Quoted-Printable to avoid having to insert newlines, which could be hairy to do properly. """ diff --git a/tests/middleware/tests.py b/tests/middleware/tests.py index 6009350365..e29d32ad74 100644 --- a/tests/middleware/tests.py +++ b/tests/middleware/tests.py @@ -3,6 +3,7 @@ import random import re import struct from io import BytesIO +from unittest import mock from urllib.parse import quote from django.conf import settings @@ -640,7 +641,7 @@ class ConditionalGetMiddlewareTest(SimpleTestCase): def test_not_modified_headers(self): """ The 304 Not Modified response should include only the headers required - by section 4.1 of RFC 7232, Last-Modified, and the cookies. + by RFC 9110 Section 15.4.5, Last-Modified, and the cookies. """ def get_response(req): @@ -898,6 +899,28 @@ class GZipMiddlewareTest(SimpleTestCase): self.assertEqual(r.get("Content-Encoding"), "gzip") self.assertFalse(r.has_header("Content-Length")) + async def test_compress_async_streaming_response(self): + """ + Compression is performed on responses with async streaming content. + """ + + async def get_stream_response(request): + async def iterator(): + for chunk in self.sequence: + yield chunk + + resp = StreamingHttpResponse(iterator()) + resp["Content-Type"] = "text/html; charset=UTF-8" + return resp + + r = await GZipMiddleware(get_stream_response)(self.req) + self.assertEqual( + self.decompress(b"".join([chunk async for chunk in r])), + b"".join(self.sequence), + ) + self.assertEqual(r.get("Content-Encoding"), "gzip") + self.assertFalse(r.has_header("Content-Length")) + def test_compress_streaming_response_unicode(self): """ Compression is performed on responses with streaming Unicode content. @@ -978,12 +1001,47 @@ class GZipMiddlewareTest(SimpleTestCase): ConditionalGetMiddleware from recognizing conditional matches on gzipped content). """ - r1 = GZipMiddleware(self.get_response)(self.req) - r2 = GZipMiddleware(self.get_response)(self.req) + + class DeterministicGZipMiddleware(GZipMiddleware): + max_random_bytes = 0 + + r1 = DeterministicGZipMiddleware(self.get_response)(self.req) + r2 = DeterministicGZipMiddleware(self.get_response)(self.req) self.assertEqual(r1.content, r2.content) self.assertEqual(self.get_mtime(r1.content), 0) self.assertEqual(self.get_mtime(r2.content), 0) + def test_random_bytes(self): + """A random number of bytes is added to mitigate the BREACH attack.""" + with mock.patch( + "django.utils.text.secrets.randbelow", autospec=True, return_value=3 + ): + r = GZipMiddleware(self.get_response)(self.req) + # The fourth byte of a gzip stream contains flags. + self.assertEqual(r.content[3], gzip.FNAME) + # A 3 byte filename "aaa" and a null byte are added. + self.assertEqual(r.content[10:14], b"aaa\x00") + self.assertEqual(self.decompress(r.content), self.compressible_string) + + def test_random_bytes_streaming_response(self): + """A random number of bytes is added to mitigate the BREACH attack.""" + + def get_stream_response(request): + resp = StreamingHttpResponse(self.sequence) + resp["Content-Type"] = "text/html; charset=UTF-8" + return resp + + with mock.patch( + "django.utils.text.secrets.randbelow", autospec=True, return_value=3 + ): + r = GZipMiddleware(get_stream_response)(self.req) + content = b"".join(r) + # The fourth byte of a gzip stream contains flags. + self.assertEqual(content[3], gzip.FNAME) + # A 3 byte filename "aaa" and a null byte are added. + self.assertEqual(content[10:14], b"aaa\x00") + self.assertEqual(self.decompress(content), b"".join(self.sequence)) + class ETagGZipMiddlewareTest(SimpleTestCase): """ diff --git a/tests/middleware_exceptions/middleware.py b/tests/middleware_exceptions/middleware.py index 5a0c82afb1..f50aa61327 100644 --- a/tests/middleware_exceptions/middleware.py +++ b/tests/middleware_exceptions/middleware.py @@ -1,4 +1,4 @@ -import asyncio +from asgiref.sync import iscoroutinefunction, markcoroutinefunction from django.http import Http404, HttpResponse from django.template import engines @@ -15,9 +15,8 @@ log = [] class BaseMiddleware: def __init__(self, get_response): self.get_response = get_response - if asyncio.iscoroutinefunction(self.get_response): - # Mark the class as async-capable. - self._is_coroutine = asyncio.coroutines._is_coroutine + if iscoroutinefunction(self.get_response): + markcoroutinefunction(self) def __call__(self, request): return self.get_response(request) diff --git a/tests/migrations/test_autodetector.py b/tests/migrations/test_autodetector.py index d2f9be75f7..82ddb17543 100644 --- a/tests/migrations/test_autodetector.py +++ b/tests/migrations/test_autodetector.py @@ -773,6 +773,14 @@ class AutodetectorTests(BaseAutodetectorTests): "verbose_name": "Authi", }, ) + author_with_db_table_comment = ModelState( + "testapp", + "Author", + [ + ("id", models.AutoField(primary_key=True)), + ], + {"db_table_comment": "Table comment"}, + ) author_with_db_table_options = ModelState( "testapp", "Author", @@ -2349,6 +2357,58 @@ class AutodetectorTests(BaseAutodetectorTests): changes, "testapp", 0, 1, name="newauthor", table="author_three" ) + def test_alter_db_table_comment_add(self): + changes = self.get_changes( + [self.author_empty], [self.author_with_db_table_comment] + ) + self.assertNumberMigrations(changes, "testapp", 1) + self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTableComment"]) + self.assertOperationAttributes( + changes, "testapp", 0, 0, name="author", table_comment="Table comment" + ) + + def test_alter_db_table_comment_change(self): + author_with_new_db_table_comment = ModelState( + "testapp", + "Author", + [ + ("id", models.AutoField(primary_key=True)), + ], + {"db_table_comment": "New table comment"}, + ) + changes = self.get_changes( + [self.author_with_db_table_comment], + [author_with_new_db_table_comment], + ) + self.assertNumberMigrations(changes, "testapp", 1) + self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTableComment"]) + self.assertOperationAttributes( + changes, + "testapp", + 0, + 0, + name="author", + table_comment="New table comment", + ) + + def test_alter_db_table_comment_remove(self): + changes = self.get_changes( + [self.author_with_db_table_comment], + [self.author_empty], + ) + self.assertNumberMigrations(changes, "testapp", 1) + self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTableComment"]) + self.assertOperationAttributes( + changes, "testapp", 0, 0, name="author", db_table_comment=None + ) + + def test_alter_db_table_comment_no_changes(self): + changes = self.get_changes( + [self.author_with_db_table_comment], + [self.author_with_db_table_comment], + ) + self.assertNumberMigrations(changes, "testapp", 0) + def test_identical_regex_doesnt_alter(self): from_state = ModelState( "testapp", diff --git a/tests/migrations/test_base.py b/tests/migrations/test_base.py index 3f1559b8d6..f038cd7605 100644 --- a/tests/migrations/test_base.py +++ b/tests/migrations/test_base.py @@ -75,6 +75,20 @@ class MigrationTestBase(TransactionTestCase): def assertColumnCollation(self, table, column, collation, using="default"): self.assertEqual(self._get_column_collation(table, column, using), collation) + def _get_table_comment(self, table, using): + with connections[using].cursor() as cursor: + return next( + t.comment + for t in connections[using].introspection.get_table_list(cursor) + if t.name == table + ) + + def assertTableComment(self, table, comment, using="default"): + self.assertEqual(self._get_table_comment(table, using), comment) + + def assertTableCommentNotExists(self, table, using="default"): + self.assertIn(self._get_table_comment(table, using), [None, ""]) + def assertIndexExists( self, table, columns, value=True, using="default", index_type=None ): diff --git a/tests/migrations/test_operations.py b/tests/migrations/test_operations.py index e02dc9ef3f..129360629d 100644 --- a/tests/migrations/test_operations.py +++ b/tests/migrations/test_operations.py @@ -1796,6 +1796,43 @@ class OperationTests(OperationTestBase): self.assertTableExists(original_m2m_table) self.assertTableNotExists(new_m2m_table) + def test_alter_model_table_m2m_field(self): + app_label = "test_talm2mfl" + project_state = self.set_up_test_model(app_label, second_model=True) + # Add the M2M field. + project_state = self.apply_operations( + app_label, + project_state, + operations=[ + migrations.AddField( + "Pony", + "stables", + models.ManyToManyField("Stable"), + ) + ], + ) + m2m_table = f"{app_label}_pony_stables" + self.assertColumnExists(m2m_table, "pony_id") + self.assertColumnExists(m2m_table, "stable_id") + # Point the M2M field to self. + with_field_state = project_state.clone() + operations = [ + migrations.AlterField( + model_name="Pony", + name="stables", + field=models.ManyToManyField("self"), + ) + ] + project_state = self.apply_operations( + app_label, project_state, operations=operations + ) + self.assertColumnExists(m2m_table, "from_pony_id") + self.assertColumnExists(m2m_table, "to_pony_id") + # Reversal. + self.unapply_operations(app_label, with_field_state, operations=operations) + self.assertColumnExists(m2m_table, "pony_id") + self.assertColumnExists(m2m_table, "stable_id") + def test_alter_field(self): """ Tests the AlterField operation. @@ -1885,6 +1922,37 @@ class OperationTests(OperationTestBase): operation.database_forwards(app_label, editor, new_state, project_state) self.assertColumnExists(rider_table, "pony_id") + @skipUnlessDBFeature("supports_comments") + def test_alter_model_table_comment(self): + app_label = "test_almotaco" + project_state = self.set_up_test_model(app_label) + pony_table = f"{app_label}_pony" + # Add table comment. + operation = migrations.AlterModelTableComment("Pony", "Custom pony comment") + self.assertEqual(operation.describe(), "Alter Pony table comment") + self.assertEqual(operation.migration_name_fragment, "alter_pony_table_comment") + new_state = project_state.clone() + operation.state_forwards(app_label, new_state) + self.assertEqual( + new_state.models[app_label, "pony"].options["db_table_comment"], + "Custom pony comment", + ) + self.assertTableCommentNotExists(pony_table) + with connection.schema_editor() as editor: + operation.database_forwards(app_label, editor, project_state, new_state) + self.assertTableComment(pony_table, "Custom pony comment") + # Reversal. + with connection.schema_editor() as editor: + operation.database_backwards(app_label, editor, new_state, project_state) + self.assertTableCommentNotExists(pony_table) + # Deconstruction. + definition = operation.deconstruct() + self.assertEqual(definition[0], "AlterModelTableComment") + self.assertEqual(definition[1], []) + self.assertEqual( + definition[2], {"name": "Pony", "table_comment": "Custom pony comment"} + ) + def test_alter_field_pk(self): """ The AlterField operation on primary keys (things like PostgreSQL's @@ -3593,7 +3661,6 @@ class OperationTests(OperationTestBase): Book.objects.create(read=70, unread=10) Book.objects.create(read=70, unread=30) - @skipUnlessDBFeature("supports_table_check_constraints") def test_remove_constraint(self): project_state = self.set_up_test_model( "test_removeconstraint", @@ -3636,7 +3703,10 @@ class OperationTests(OperationTestBase): "test_removeconstraint", editor, project_state, new_state ) Pony.objects.create(pink=1, weight=1.0).delete() - with self.assertRaises(IntegrityError), transaction.atomic(): + if connection.features.supports_table_check_constraints: + with self.assertRaises(IntegrityError), transaction.atomic(): + Pony.objects.create(pink=100, weight=1.0) + else: Pony.objects.create(pink=100, weight=1.0) # Remove the other one. lt_operation = migrations.RemoveConstraint( @@ -3661,7 +3731,10 @@ class OperationTests(OperationTestBase): gt_operation.database_backwards( "test_removeconstraint", editor, new_state, project_state ) - with self.assertRaises(IntegrityError), transaction.atomic(): + if connection.features.supports_table_check_constraints: + with self.assertRaises(IntegrityError), transaction.atomic(): + Pony.objects.create(pink=1, weight=1.0) + else: Pony.objects.create(pink=1, weight=1.0) # Test deconstruction definition = gt_operation.deconstruct() diff --git a/tests/model_fields/test_filefield.py b/tests/model_fields/test_filefield.py index e7c906112d..2259c1e480 100644 --- a/tests/model_fields/test_filefield.py +++ b/tests/model_fields/test_filefield.py @@ -120,9 +120,12 @@ class FileFieldTests(TestCase): with TemporaryUploadedFile( "foo.txt", "text/plain", 1, "utf-8" ) as tmp_file: - Document.objects.create(myfile=tmp_file) - self.assertTrue( - os.path.exists(os.path.join(tmp_dir, "unused", "foo.txt")) + document = Document.objects.create(myfile=tmp_file) + self.assertIs( + document.myfile.storage.exists( + os.path.join("unused", "foo.txt") + ), + True, ) def test_pickle(self): diff --git a/tests/model_fields/test_jsonfield.py b/tests/model_fields/test_jsonfield.py index 277e8aaa3c..60357d87b2 100644 --- a/tests/model_fields/test_jsonfield.py +++ b/tests/model_fields/test_jsonfield.py @@ -19,6 +19,7 @@ from django.db.models import ( ExpressionWrapper, F, IntegerField, + JSONField, OuterRef, Q, Subquery, @@ -36,6 +37,7 @@ from django.db.models.fields.json import ( from django.db.models.functions import Cast from django.test import SimpleTestCase, TestCase, skipIfDBFeature, skipUnlessDBFeature from django.test.utils import CaptureQueriesContext +from django.utils.deprecation import RemovedInDjango51Warning from .models import CustomJSONDecoder, JSONModel, NullableJSONModel, RelatedJSONModel @@ -191,15 +193,40 @@ class TestSaveLoad(TestCase): obj.refresh_from_db() self.assertIsNone(obj.value) + def test_ambiguous_str_value_deprecation(self): + msg = ( + "Providing an encoded JSON string via Value() is deprecated. Use Value([], " + "output_field=JSONField()) instead." + ) + with self.assertWarnsMessage(RemovedInDjango51Warning, msg): + obj = NullableJSONModel.objects.create(value=Value("[]")) + obj.refresh_from_db() + self.assertEqual(obj.value, []) + + @skipUnlessDBFeature("supports_primitives_in_json_field") + def test_value_str_primitives_deprecation(self): + msg = ( + "Providing an encoded JSON string via Value() is deprecated. Use " + "Value(None, output_field=JSONField()) instead." + ) + with self.assertWarnsMessage(RemovedInDjango51Warning, msg): + obj = NullableJSONModel.objects.create(value=Value("null")) + obj.refresh_from_db() + self.assertIsNone(obj.value) + obj = NullableJSONModel.objects.create(value=Value("invalid-json")) + obj.refresh_from_db() + self.assertEqual(obj.value, "invalid-json") + @skipUnlessDBFeature("supports_primitives_in_json_field") def test_json_null_different_from_sql_null(self): - json_null = NullableJSONModel.objects.create(value=Value("null")) + json_null = NullableJSONModel.objects.create(value=Value(None, JSONField())) + NullableJSONModel.objects.update(value=Value(None, JSONField())) json_null.refresh_from_db() sql_null = NullableJSONModel.objects.create(value=None) sql_null.refresh_from_db() # 'null' is not equal to NULL in the database. self.assertSequenceEqual( - NullableJSONModel.objects.filter(value=Value("null")), + NullableJSONModel.objects.filter(value=Value(None, JSONField())), [json_null], ) self.assertSequenceEqual( @@ -980,7 +1007,7 @@ class TestQuerying(TestCase): False, ) self.assertIn( - """."value" -> 'test'' = ''"a"'') OR 1 = 1 OR (''d') = '"x"' """, + """."value" -> 'test'' = ''"a"'') OR 1 = 1 OR (''d') = '"x"'""", queries[0]["sql"], ) diff --git a/tests/postgres_tests/__init__.py b/tests/postgres_tests/__init__.py index 6f02531ed0..02f39e3935 100644 --- a/tests/postgres_tests/__init__.py +++ b/tests/postgres_tests/__init__.py @@ -4,16 +4,30 @@ from forms_tests.widget_tests.base import WidgetTest from django.db import connection from django.test import SimpleTestCase, TestCase, modify_settings +from django.utils.functional import cached_property @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests") +# To register type handlers and locate the widget's template. +@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}) class PostgreSQLSimpleTestCase(SimpleTestCase): pass @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests") +# To register type handlers and locate the widget's template. +@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}) class PostgreSQLTestCase(TestCase): - pass + @cached_property + def default_text_search_config(self): + with connection.cursor() as cursor: + cursor.execute("SHOW default_text_search_config") + row = cursor.fetchone() + return row[0] if row else None + + def check_default_text_search_config(self): + if self.default_text_search_config != "pg_catalog.english": + self.skipTest("The default text search config is not 'english'.") @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests") diff --git a/tests/postgres_tests/test_aggregates.py b/tests/postgres_tests/test_aggregates.py index 9cca121802..b5474d361e 100644 --- a/tests/postgres_tests/test_aggregates.py +++ b/tests/postgres_tests/test_aggregates.py @@ -1,9 +1,10 @@ -from django.db import connection +from django.db import connection, transaction from django.db.models import ( CharField, F, Func, IntegerField, + JSONField, OuterRef, Q, Subquery, @@ -15,7 +16,7 @@ from django.db.models.functions import Cast, Concat, Substr from django.test import skipUnlessDBFeature from django.test.utils import Approximate, ignore_warnings from django.utils import timezone -from django.utils.deprecation import RemovedInDjango50Warning +from django.utils.deprecation import RemovedInDjango50Warning, RemovedInDjango51Warning from . import PostgreSQLTestCase from .models import AggregateTestModel, HotelReservation, Room, StatTestModel @@ -124,7 +125,12 @@ class TestGeneralAggregate(PostgreSQLTestCase): (BitOr("integer_field", default=0), 0), (BoolAnd("boolean_field", default=False), False), (BoolOr("boolean_field", default=False), False), - (JSONBAgg("integer_field", default=Value('[""]')), [""]), + (JSONBAgg("integer_field", default=[""]), [""]), + ( + JSONBAgg("integer_field", default=Value([""], JSONField())), + [""], + ), + (StringAgg("char_field", delimiter=";", default=""), ""), ( StringAgg("char_field", delimiter=";", default=Value("")), "", @@ -141,7 +147,7 @@ class TestGeneralAggregate(PostgreSQLTestCase): ) self.assertEqual(values, {"aggregation": expected_result}) # Empty result when query must be executed. - with self.assertNumQueries(1): + with transaction.atomic(), self.assertNumQueries(1): values = AggregateTestModel.objects.aggregate( aggregation=aggregation, ) @@ -188,9 +194,7 @@ class TestGeneralAggregate(PostgreSQLTestCase): {"aggregation": []}, ) self.assertEqual( - queryset.aggregate( - aggregation=JSONBAgg("integer_field", default=Value("[]")) - ), + queryset.aggregate(aggregation=JSONBAgg("integer_field", default=[])), {"aggregation": []}, ) self.assertEqual( @@ -200,6 +204,59 @@ class TestGeneralAggregate(PostgreSQLTestCase): {"aggregation": ""}, ) + @ignore_warnings(category=RemovedInDjango51Warning) + def test_jsonb_agg_default_str_value(self): + AggregateTestModel.objects.all().delete() + queryset = AggregateTestModel.objects.all() + self.assertEqual( + queryset.aggregate( + aggregation=JSONBAgg("integer_field", default=Value("")) + ), + {"aggregation": ""}, + ) + + def test_jsonb_agg_default_str_value_deprecation(self): + queryset = AggregateTestModel.objects.all() + msg = ( + "Passing a Value() with an output_field that isn't a JSONField as " + "JSONBAgg(default) is deprecated. Pass default=Value('', " + "output_field=JSONField()) instead." + ) + with self.assertWarnsMessage(RemovedInDjango51Warning, msg): + queryset.aggregate( + aggregation=JSONBAgg("integer_field", default=Value("")) + ) + with self.assertWarnsMessage(RemovedInDjango51Warning, msg): + queryset.none().aggregate( + aggregation=JSONBAgg("integer_field", default=Value("")) + ), + + @ignore_warnings(category=RemovedInDjango51Warning) + def test_jsonb_agg_default_encoded_json_string(self): + AggregateTestModel.objects.all().delete() + queryset = AggregateTestModel.objects.all() + self.assertEqual( + queryset.aggregate( + aggregation=JSONBAgg("integer_field", default=Value("[]")) + ), + {"aggregation": []}, + ) + + def test_jsonb_agg_default_encoded_json_string_deprecation(self): + queryset = AggregateTestModel.objects.all() + msg = ( + "Passing an encoded JSON string as JSONBAgg(default) is deprecated. Pass " + "default=[] instead." + ) + with self.assertWarnsMessage(RemovedInDjango51Warning, msg): + queryset.aggregate( + aggregation=JSONBAgg("integer_field", default=Value("[]")) + ) + with self.assertWarnsMessage(RemovedInDjango51Warning, msg): + queryset.none().aggregate( + aggregation=JSONBAgg("integer_field", default=Value("[]")) + ) + def test_array_agg_charfield(self): values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg("char_field")) self.assertEqual(values, {"arrayagg": ["Foo1", "Foo2", "Foo4", "Foo3"]}) diff --git a/tests/postgres_tests/test_apps.py b/tests/postgres_tests/test_apps.py index 340e555609..d9fb962251 100644 --- a/tests/postgres_tests/test_apps.py +++ b/tests/postgres_tests/test_apps.py @@ -1,25 +1,32 @@ +import unittest from decimal import Decimal +from django.db import connection from django.db.backends.signals import connection_created from django.db.migrations.writer import MigrationWriter +from django.test import TestCase from django.test.utils import modify_settings -from . import PostgreSQLTestCase - try: - from psycopg2.extras import DateRange, DateTimeRange, DateTimeTZRange, NumericRange - from django.contrib.postgres.fields import ( DateRangeField, DateTimeRangeField, DecimalRangeField, IntegerRangeField, ) + from django.db.backends.postgresql.psycopg_any import ( + DateRange, + DateTimeRange, + DateTimeTZRange, + NumericRange, + is_psycopg3, + ) except ImportError: pass -class PostgresConfigTests(PostgreSQLTestCase): +@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests") +class PostgresConfigTests(TestCase): def test_register_type_handlers_connection(self): from django.contrib.postgres.signals import register_type_handlers @@ -53,6 +60,7 @@ class PostgresConfigTests(PostgreSQLTestCase): MigrationWriter.serialize(field) assertNotSerializable() + import_name = "psycopg.types.range" if is_psycopg3 else "psycopg2.extras" with self.modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}): for default, test_field in tests: with self.subTest(default=default): @@ -62,16 +70,12 @@ class PostgresConfigTests(PostgreSQLTestCase): imports, { "import django.contrib.postgres.fields.ranges", - "import psycopg2.extras", + f"import {import_name}", }, ) self.assertIn( - "%s.%s(default=psycopg2.extras.%r)" - % ( - field.__module__, - field.__class__.__name__, - default, - ), + f"{field.__module__}.{field.__class__.__name__}" + f"(default={import_name}.{default!r})", serialized_field, ) assertNotSerializable() diff --git a/tests/postgres_tests/test_array.py b/tests/postgres_tests/test_array.py index 89603e24a0..4808f88689 100644 --- a/tests/postgres_tests/test_array.py +++ b/tests/postgres_tests/test_array.py @@ -12,12 +12,7 @@ from django.core.management import call_command from django.db import IntegrityError, connection, models from django.db.models.expressions import Exists, OuterRef, RawSQL, Value from django.db.models.functions import Cast, JSONObject, Upper -from django.test import ( - TransactionTestCase, - modify_settings, - override_settings, - skipUnlessDBFeature, -) +from django.test import TransactionTestCase, override_settings, skipUnlessDBFeature from django.test.utils import isolate_apps from django.utils import timezone @@ -36,8 +31,6 @@ from .models import ( ) try: - from psycopg2.extras import NumericRange - from django.contrib.postgres.aggregates import ArrayAgg from django.contrib.postgres.expressions import ArraySubquery from django.contrib.postgres.fields import ArrayField @@ -47,6 +40,7 @@ try: SplitArrayField, SplitArrayWidget, ) + from django.db.backends.postgresql.psycopg_any import NumericRange except ImportError: pass @@ -323,7 +317,7 @@ class TestQuerying(PostgreSQLTestCase): def test_in_including_F_object(self): # This test asserts that Array objects passed to filters can be # constructed to contain F objects. This currently doesn't work as the - # psycopg2 mogrify method that generates the ARRAY() syntax is + # psycopg mogrify method that generates the ARRAY() syntax is # expecting literals, not column references (#27095). self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__in=[[models.F("id")]]), @@ -414,6 +408,21 @@ class TestQuerying(PostgreSQLTestCase): [obj_1, obj_2], ) + def test_overlap_values(self): + qs = NullableIntegerArrayModel.objects.filter(order__lt=3) + self.assertCountEqual( + NullableIntegerArrayModel.objects.filter( + field__overlap=qs.values_list("field"), + ), + self.objs[:3], + ) + self.assertCountEqual( + NullableIntegerArrayModel.objects.filter( + field__overlap=qs.values("field"), + ), + self.objs[:3], + ) + def test_lookups_autofield_array(self): qs = ( NullableIntegerArrayModel.objects.filter( @@ -767,12 +776,12 @@ class TestOtherTypesExactQuerying(PostgreSQLTestCase): class TestChecks(PostgreSQLSimpleTestCase): def test_field_checks(self): class MyModel(PostgreSQLModel): - field = ArrayField(models.CharField()) + field = ArrayField(models.CharField(max_length=-1)) model = MyModel() errors = model.check() self.assertEqual(len(errors), 1) - # The inner CharField is missing a max_length. + # The inner CharField has a non-positive max_length. self.assertEqual(errors[0].id, "postgres.E001") self.assertIn("max_length", errors[0].msg) @@ -828,12 +837,12 @@ class TestChecks(PostgreSQLSimpleTestCase): """ class MyModel(PostgreSQLModel): - field = ArrayField(ArrayField(models.CharField())) + field = ArrayField(ArrayField(models.CharField(max_length=-1))) model = MyModel() errors = model.check() self.assertEqual(len(errors), 1) - # The inner CharField is missing a max_length. + # The inner CharField has a non-positive max_length. self.assertEqual(errors[0].id, "postgres.E001") self.assertIn("max_length", errors[0].msg) @@ -1244,8 +1253,6 @@ class TestSplitFormField(PostgreSQLSimpleTestCase): with self.assertRaisesMessage(exceptions.ValidationError, msg): SplitArrayField(forms.IntegerField(max_value=100), size=2).clean([0, 101]) - # To locate the widget's template. - @modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}) def test_rendering(self): class SplitForm(forms.Form): array = SplitArrayField(forms.CharField(), size=3) diff --git a/tests/postgres_tests/test_bulk_update.py b/tests/postgres_tests/test_bulk_update.py index 5f91f77791..85dfcedd09 100644 --- a/tests/postgres_tests/test_bulk_update.py +++ b/tests/postgres_tests/test_bulk_update.py @@ -1,7 +1,5 @@ from datetime import date -from django.test import modify_settings - from . import PostgreSQLTestCase from .models import ( HStoreModel, @@ -13,12 +11,11 @@ from .models import ( ) try: - from psycopg2.extras import DateRange, NumericRange + from django.db.backends.postgresql.psycopg_any import DateRange, NumericRange except ImportError: - pass # psycopg2 isn't installed. + pass # psycopg isn't installed. -@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}) class BulkSaveTests(PostgreSQLTestCase): def test_bulk_update(self): test_data = [ diff --git a/tests/postgres_tests/test_citext.py b/tests/postgres_tests/test_citext.py index 314f2e40d3..2abb56b39f 100644 --- a/tests/postgres_tests/test_citext.py +++ b/tests/postgres_tests/test_citext.py @@ -5,14 +5,12 @@ strings and thus eliminates the need for operations such as iexact and other modifiers to enforce use of an index. """ from django.db import IntegrityError -from django.test.utils import modify_settings from django.utils.deprecation import RemovedInDjango51Warning from . import PostgreSQLTestCase from .models import CITestModel -@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}) class CITextTestCase(PostgreSQLTestCase): case_sensitive_lookups = ("contains", "startswith", "endswith", "regex") diff --git a/tests/postgres_tests/test_constraints.py b/tests/postgres_tests/test_constraints.py index 844c04cd6d..ad21ffa2b5 100644 --- a/tests/postgres_tests/test_constraints.py +++ b/tests/postgres_tests/test_constraints.py @@ -16,7 +16,7 @@ from django.db.models import ( ) from django.db.models.fields.json import KeyTextTransform from django.db.models.functions import Cast, Left, Lower -from django.test import ignore_warnings, modify_settings, skipUnlessDBFeature +from django.test import ignore_warnings, skipUnlessDBFeature from django.test.utils import isolate_apps from django.utils import timezone from django.utils.deprecation import RemovedInDjango50Warning @@ -25,19 +25,17 @@ from . import PostgreSQLTestCase from .models import HotelReservation, IntegerArrayModel, RangesModel, Room, Scene try: - from psycopg2.extras import DateRange, NumericRange - from django.contrib.postgres.constraints import ExclusionConstraint from django.contrib.postgres.fields import ( DateTimeRangeField, RangeBoundary, RangeOperators, ) + from django.db.backends.postgresql.psycopg_any import DateRange, NumericRange except ImportError: pass -@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}) class SchemaTests(PostgreSQLTestCase): get_opclass_query = """ SELECT opcname, c.relname FROM pg_opclass AS oc @@ -77,6 +75,16 @@ class SchemaTests(PostgreSQLTestCase): constraint.validate(IntegerArrayModel, IntegerArrayModel()) constraint.validate(IntegerArrayModel, IntegerArrayModel(field=[1])) + def test_check_constraint_array_length(self): + constraint = CheckConstraint( + check=Q(field__len=1), + name="array_length", + ) + msg = f"Constraint “{constraint.name}” is violated." + with self.assertRaisesMessage(ValidationError, msg): + constraint.validate(IntegerArrayModel, IntegerArrayModel()) + constraint.validate(IntegerArrayModel, IntegerArrayModel(field=[1])) + def test_check_constraint_daterange_contains(self): constraint_name = "dates_contains" self.assertNotIn( @@ -255,7 +263,6 @@ class SchemaTests(PostgreSQLTestCase): Scene.objects.create(scene="ScEnE 10", setting="Sir Bedemir's Castle") -@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}) class ExclusionConstraintTests(PostgreSQLTestCase): def get_constraints(self, table): """Get the constraints on the table using a new cursor.""" @@ -312,16 +319,6 @@ class ExclusionConstraintTests(PostgreSQLTestCase): deferrable="invalid", ) - def test_deferrable_with_condition(self): - msg = "ExclusionConstraint with conditions cannot be deferred." - with self.assertRaisesMessage(ValueError, msg): - ExclusionConstraint( - name="exclude_invalid_condition", - expressions=[(F("datespan"), RangeOperators.OVERLAPS)], - condition=Q(cancelled=False), - deferrable=Deferrable.DEFERRED, - ) - def test_invalid_include_type(self): msg = "ExclusionConstraint.include must be a list or tuple." with self.assertRaisesMessage(ValueError, msg): @@ -912,6 +909,39 @@ class ExclusionConstraintTests(PostgreSQLTestCase): RangesModel.objects.create(ints=(10, 19)) RangesModel.objects.create(ints=(51, 60)) + def test_range_adjacent_initially_deferred_with_condition(self): + constraint_name = "ints_adjacent_deferred_with_condition" + self.assertNotIn( + constraint_name, self.get_constraints(RangesModel._meta.db_table) + ) + constraint = ExclusionConstraint( + name=constraint_name, + expressions=[("ints", RangeOperators.ADJACENT_TO)], + condition=Q(ints__lt=(100, 200)), + deferrable=Deferrable.DEFERRED, + ) + with connection.schema_editor() as editor: + editor.add_constraint(RangesModel, constraint) + self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) + RangesModel.objects.create(ints=(20, 50)) + adjacent_range = RangesModel.objects.create(ints=(10, 20)) + # Constraint behavior can be changed with SET CONSTRAINTS. + with self.assertRaises(IntegrityError): + with transaction.atomic(), connection.cursor() as cursor: + quoted_name = connection.ops.quote_name(constraint_name) + cursor.execute(f"SET CONSTRAINTS {quoted_name} IMMEDIATE") + # Remove adjacent range before the end of transaction. + adjacent_range.delete() + RangesModel.objects.create(ints=(10, 19)) + RangesModel.objects.create(ints=(51, 60)) + # Add adjacent range that doesn't match the condition. + RangesModel.objects.create(ints=(200, 500)) + adjacent_range = RangesModel.objects.create(ints=(100, 200)) + # Constraint behavior can be changed with SET CONSTRAINTS. + with transaction.atomic(), connection.cursor() as cursor: + quoted_name = connection.ops.quote_name(constraint_name) + cursor.execute(f"SET CONSTRAINTS {quoted_name} IMMEDIATE") + def test_range_adjacent_gist_include(self): constraint_name = "ints_adjacent_gist_include" self.assertNotIn( @@ -1175,7 +1205,6 @@ class ExclusionConstraintTests(PostgreSQLTestCase): ) -@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}) class ExclusionConstraintOpclassesDepracationTests(PostgreSQLTestCase): def get_constraints(self, table): """Get the constraints on the table using a new cursor.""" diff --git a/tests/postgres_tests/test_indexes.py b/tests/postgres_tests/test_indexes.py index dc1064b35d..52bfcbeb06 100644 --- a/tests/postgres_tests/test_indexes.py +++ b/tests/postgres_tests/test_indexes.py @@ -15,7 +15,7 @@ from django.db import NotSupportedError, connection from django.db.models import CharField, F, Index, Q from django.db.models.functions import Cast, Collate, Length, Lower from django.test import skipUnlessDBFeature -from django.test.utils import modify_settings, register_lookup +from django.test.utils import register_lookup from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase from .fields import SearchVector, SearchVectorField @@ -235,7 +235,6 @@ class SpGistIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase): ) -@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}) class SchemaTests(PostgreSQLTestCase): get_opclass_query = """ SELECT opcname, c.relname FROM pg_opclass AS oc diff --git a/tests/postgres_tests/test_introspection.py b/tests/postgres_tests/test_introspection.py index 3179b47cc9..73c426d1ba 100644 --- a/tests/postgres_tests/test_introspection.py +++ b/tests/postgres_tests/test_introspection.py @@ -1,12 +1,10 @@ from io import StringIO from django.core.management import call_command -from django.test.utils import modify_settings from . import PostgreSQLTestCase -@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}) class InspectDBTests(PostgreSQLTestCase): def assertFieldsInModel(self, model, field_outputs): out = StringIO() diff --git a/tests/postgres_tests/test_ranges.py b/tests/postgres_tests/test_ranges.py index 870039a6ad..038e0233c4 100644 --- a/tests/postgres_tests/test_ranges.py +++ b/tests/postgres_tests/test_ranges.py @@ -20,14 +20,17 @@ from .models import ( ) try: - from psycopg2.extras import DateRange, DateTimeTZRange, NumericRange - from django.contrib.postgres import fields as pg_fields from django.contrib.postgres import forms as pg_forms from django.contrib.postgres.validators import ( RangeMaxValueValidator, RangeMinValueValidator, ) + from django.db.backends.postgresql.psycopg_any import ( + DateRange, + DateTimeTZRange, + NumericRange, + ) except ImportError: pass diff --git a/tests/postgres_tests/test_search.py b/tests/postgres_tests/test_search.py index d085ac852c..6ec20c0654 100644 --- a/tests/postgres_tests/test_search.py +++ b/tests/postgres_tests/test_search.py @@ -7,7 +7,6 @@ transcript. """ from django.db import connection from django.db.models import F, Value -from django.test import modify_settings from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase from .models import Character, Line, LineSavedSearch, Scene @@ -103,17 +102,18 @@ class GrailTestData: ) -@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}) class SimpleSearchTest(GrailTestData, PostgreSQLTestCase): def test_simple(self): searched = Line.objects.filter(dialogue__search="elbows") self.assertSequenceEqual(searched, [self.verse1]) def test_non_exact_match(self): + self.check_default_text_search_config() searched = Line.objects.filter(dialogue__search="hearts") self.assertSequenceEqual(searched, [self.verse2]) def test_search_two_terms(self): + self.check_default_text_search_config() searched = Line.objects.filter(dialogue__search="heart bowel") self.assertSequenceEqual(searched, [self.verse2]) @@ -140,7 +140,6 @@ class SimpleSearchTest(GrailTestData, PostgreSQLTestCase): self.assertSequenceEqual(searched, [match]) -@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}) class SearchVectorFieldTest(GrailTestData, PostgreSQLTestCase): def test_existing_vector(self): Line.objects.update(dialogue_search_vector=SearchVector("dialogue")) @@ -339,7 +338,6 @@ class MultipleFieldsTest(GrailTestData, PostgreSQLTestCase): self.assertSequenceEqual(searched, [self.french]) -@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}) class TestCombinations(GrailTestData, PostgreSQLTestCase): def test_vector_add(self): searched = Line.objects.annotate( @@ -370,6 +368,7 @@ class TestCombinations(GrailTestData, PostgreSQLTestCase): Line.objects.filter(dialogue__search=None + SearchVector("character__name")) def test_combine_different_vector_configs(self): + self.check_default_text_search_config() searched = Line.objects.annotate( search=( SearchVector("dialogue", config="english") @@ -442,6 +441,7 @@ class TestCombinations(GrailTestData, PostgreSQLTestCase): self.assertSequenceEqual(searched, [self.verse2]) def test_combine_raw_phrase(self): + self.check_default_text_search_config() searched = Line.objects.filter( dialogue__search=( SearchQuery("burn:*", search_type="raw", config="simple") @@ -462,7 +462,6 @@ class TestCombinations(GrailTestData, PostgreSQLTestCase): Line.objects.filter(dialogue__search=None & SearchQuery("kneecaps")) -@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}) class TestRankingAndWeights(GrailTestData, PostgreSQLTestCase): def test_ranking(self): searched = ( @@ -515,10 +514,11 @@ class TestRankingAndWeights(GrailTestData, PostgreSQLTestCase): vector = SearchVector("dialogue", weight="D") + SearchVector( "character__name", weight="A" ) + weights = [1.0, 0.0, 0.0, 0.5] searched = ( Line.objects.filter(scene=self.witch_scene) .annotate( - rank=SearchRank(vector, SearchQuery("witch"), weights=[1, 0, 0, 0.5]), + rank=SearchRank(vector, SearchQuery("witch"), weights=weights), ) .order_by("-rank")[:2] ) @@ -661,9 +661,9 @@ class SearchQueryTests(PostgreSQLSimpleTestCase): self.assertEqual(str(query), expected_str) -@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}) class SearchHeadlineTests(GrailTestData, PostgreSQLTestCase): def test_headline(self): + self.check_default_text_search_config() searched = Line.objects.annotate( headline=SearchHeadline( F("dialogue"), @@ -679,6 +679,7 @@ class SearchHeadlineTests(GrailTestData, PostgreSQLTestCase): ) def test_headline_untyped_args(self): + self.check_default_text_search_config() searched = Line.objects.annotate( headline=SearchHeadline("dialogue", "killed", config="english"), ).get(pk=self.verse0.pk) @@ -731,6 +732,7 @@ class SearchHeadlineTests(GrailTestData, PostgreSQLTestCase): ) def test_headline_highlight_all_option(self): + self.check_default_text_search_config() searched = Line.objects.annotate( headline=SearchHeadline( "dialogue", @@ -745,6 +747,7 @@ class SearchHeadlineTests(GrailTestData, PostgreSQLTestCase): ) def test_headline_short_word_option(self): + self.check_default_text_search_config() searched = Line.objects.annotate( headline=SearchHeadline( "dialogue", @@ -762,6 +765,7 @@ class SearchHeadlineTests(GrailTestData, PostgreSQLTestCase): ) def test_headline_fragments_words_options(self): + self.check_default_text_search_config() searched = Line.objects.annotate( headline=SearchHeadline( "dialogue", diff --git a/tests/postgres_tests/test_signals.py b/tests/postgres_tests/test_signals.py index 764524d8e6..b88a7e0497 100644 --- a/tests/postgres_tests/test_signals.py +++ b/tests/postgres_tests/test_signals.py @@ -9,7 +9,7 @@ try: register_type_handlers, ) except ImportError: - pass # pyscogp2 isn't installed. + pass # psycopg isn't installed. class OIDTests(PostgreSQLTestCase): @@ -34,8 +34,9 @@ class OIDTests(PostgreSQLTestCase): self.assertOIDs(array_oids) def test_citext_values(self): - oids = get_citext_oids(connection.alias) + oids, citext_oids = get_citext_oids(connection.alias) self.assertOIDs(oids) + self.assertOIDs(citext_oids) def test_register_type_handlers_no_db(self): """Registering type handlers for the nodb connection does nothing.""" diff --git a/tests/postgres_tests/test_trigram.py b/tests/postgres_tests/test_trigram.py index 3fa0550441..812403a324 100644 --- a/tests/postgres_tests/test_trigram.py +++ b/tests/postgres_tests/test_trigram.py @@ -1,5 +1,3 @@ -from django.test import modify_settings - from . import PostgreSQLTestCase from .models import CharFieldModel, TextFieldModel @@ -16,7 +14,6 @@ except ImportError: pass -@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}) class TrigramTest(PostgreSQLTestCase): Model = CharFieldModel diff --git a/tests/postgres_tests/test_unaccent.py b/tests/postgres_tests/test_unaccent.py index a512184589..6d115773d4 100644 --- a/tests/postgres_tests/test_unaccent.py +++ b/tests/postgres_tests/test_unaccent.py @@ -1,11 +1,9 @@ from django.db import connection -from django.test import modify_settings from . import PostgreSQLTestCase from .models import CharFieldModel, TextFieldModel -@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}) class UnaccentTest(PostgreSQLTestCase): Model = CharFieldModel diff --git a/tests/prefetch_related/tests.py b/tests/prefetch_related/tests.py index 4faf13bcaf..c153c0d9ec 100644 --- a/tests/prefetch_related/tests.py +++ b/tests/prefetch_related/tests.py @@ -1731,7 +1731,7 @@ class DirectPrefetchedObjectCacheReuseTests(TestCase): lookup. """ with self.assertNumQueries(3): - books = Book.objects.filter(title__in=["book1", "book2"],).prefetch_related( + books = Book.objects.filter(title__in=["book1", "book2"]).prefetch_related( Prefetch( "first_time_authors", Author.objects.prefetch_related( @@ -1785,7 +1785,7 @@ class DirectPrefetchedObjectCacheReuseTests(TestCase): def test_detect_is_fetched_with_to_attr(self): with self.assertNumQueries(3): - books = Book.objects.filter(title__in=["book1", "book2"],).prefetch_related( + books = Book.objects.filter(title__in=["book1", "book2"]).prefetch_related( Prefetch( "first_time_authors", Author.objects.prefetch_related( diff --git a/tests/queries/test_qs_combinators.py b/tests/queries/test_qs_combinators.py index 865e172816..6358e537ae 100644 --- a/tests/queries/test_qs_combinators.py +++ b/tests/queries/test_qs_combinators.py @@ -304,6 +304,34 @@ class QuerySetSetOperationTests(TestCase): operator.itemgetter("num"), ) + def test_union_with_select_related_and_order(self): + e1 = ExtraInfo.objects.create(value=7, info="e1") + a1 = Author.objects.create(name="a1", num=1, extra=e1) + a2 = Author.objects.create(name="a2", num=3, extra=e1) + Author.objects.create(name="a3", num=2, extra=e1) + base_qs = Author.objects.select_related("extra").order_by() + qs1 = base_qs.filter(name="a1") + qs2 = base_qs.filter(name="a2") + self.assertSequenceEqual(qs1.union(qs2).order_by("pk"), [a1, a2]) + + @skipUnlessDBFeature("supports_slicing_ordering_in_compound") + def test_union_with_select_related_and_first(self): + e1 = ExtraInfo.objects.create(value=7, info="e1") + a1 = Author.objects.create(name="a1", num=1, extra=e1) + Author.objects.create(name="a2", num=3, extra=e1) + base_qs = Author.objects.select_related("extra") + qs1 = base_qs.filter(name="a1") + qs2 = base_qs.filter(name="a2") + self.assertEqual(qs1.union(qs2).first(), a1) + + def test_union_with_first(self): + e1 = ExtraInfo.objects.create(value=7, info="e1") + a1 = Author.objects.create(name="a1", num=1, extra=e1) + base_qs = Author.objects.order_by() + qs1 = base_qs.filter(name="a1") + qs2 = base_qs.filter(name="a2") + self.assertEqual(qs1.union(qs2).first(), a1) + def test_union_multiple_models_with_values_list_and_order(self): reserved_name = ReservedName.objects.create(name="rn1", order=0) qs1 = Celebrity.objects.all() @@ -396,6 +424,12 @@ class QuerySetSetOperationTests(TestCase): qs = Number.objects.filter(pk__in=[]) self.assertEqual(qs.union(qs).count(), 0) + def test_count_union_with_select_related(self): + e1 = ExtraInfo.objects.create(value=1, info="e1") + Author.objects.create(name="a1", num=1, extra=e1) + qs = Author.objects.select_related("extra").order_by() + self.assertEqual(qs.union(qs).count(), 1) + @skipUnlessDBFeature("supports_select_difference") def test_count_difference(self): qs1 = Number.objects.filter(num__lt=10) diff --git a/tests/queries/test_query.py b/tests/queries/test_query.py index b0a5058f6c..99d0e32427 100644 --- a/tests/queries/test_query.py +++ b/tests/queries/test_query.py @@ -164,12 +164,12 @@ class TestQuery(SimpleTestCase): class TestQueryNoModel(TestCase): def test_rawsql_annotation(self): query = Query(None) - sql = "%s IS NULL" + sql = "%s = 1" # Wrap with a CASE WHEN expression if a database backend (e.g. Oracle) # doesn't support boolean expression in SELECT list. if not connection.features.supports_boolean_expr_in_select_clause: sql = f"CASE WHEN {sql} THEN 1 ELSE 0 END" - query.add_annotation(RawSQL(sql, (None,), BooleanField()), "_check") + query.add_annotation(RawSQL(sql, (1,), BooleanField()), "_check") result = query.get_compiler(using=DEFAULT_DB_ALIAS).execute_sql(SINGLE) self.assertEqual(result[0], 1) @@ -183,8 +183,7 @@ class TestQueryNoModel(TestCase): def test_q_annotation(self): query = Query(None) check = ExpressionWrapper( - Q(RawSQL("%s IS NULL", (None,), BooleanField())) - | Q(Exists(Item.objects.all())), + Q(RawSQL("%s = 1", (1,), BooleanField())) | Q(Exists(Item.objects.all())), BooleanField(), ) query.add_annotation(check, "_check") diff --git a/tests/queries/tests.py b/tests/queries/tests.py index 63e9ea6687..a6a2b252eb 100644 --- a/tests/queries/tests.py +++ b/tests/queries/tests.py @@ -5,7 +5,7 @@ import unittest from operator import attrgetter from threading import Lock -from django.core.exceptions import EmptyResultSet, FieldError +from django.core.exceptions import EmptyResultSet, FieldError, FullResultSet from django.db import DEFAULT_DB_ALIAS, connection from django.db.models import CharField, Count, Exists, F, Max, OuterRef, Q from django.db.models.expressions import RawSQL @@ -3322,9 +3322,7 @@ class ExcludeTests(TestCase): ) self.assertCountEqual( Job.objects.annotate( - responsibility=subquery.filter(job=OuterRef("name"),).values( - "id" - )[:1] + responsibility=subquery.filter(job=OuterRef("name")).values("id")[:1] ), [self.j1, self.j2], ) @@ -3588,7 +3586,8 @@ class WhereNodeTest(SimpleTestCase): with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.negate() - self.assertEqual(w.as_sql(compiler, connection), ("", [])) + with self.assertRaises(FullResultSet): + w.as_sql(compiler, connection) w = WhereNode(children=[self.DummyNode(), self.DummyNode()]) self.assertEqual(w.as_sql(compiler, connection), ("(dummy AND dummy)", [])) w.negate() @@ -3597,7 +3596,8 @@ class WhereNodeTest(SimpleTestCase): with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.negate() - self.assertEqual(w.as_sql(compiler, connection), ("", [])) + with self.assertRaises(FullResultSet): + w.as_sql(compiler, connection) def test_empty_full_handling_disjunction(self): compiler = WhereNodeTest.MockCompiler() @@ -3605,7 +3605,8 @@ class WhereNodeTest(SimpleTestCase): with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.negate() - self.assertEqual(w.as_sql(compiler, connection), ("", [])) + with self.assertRaises(FullResultSet): + w.as_sql(compiler, connection) w = WhereNode(children=[self.DummyNode(), self.DummyNode()], connector=OR) self.assertEqual(w.as_sql(compiler, connection), ("(dummy OR dummy)", [])) w.negate() @@ -3619,7 +3620,8 @@ class WhereNodeTest(SimpleTestCase): compiler = WhereNodeTest.MockCompiler() empty_w = WhereNode() w = WhereNode(children=[empty_w, empty_w]) - self.assertEqual(w.as_sql(compiler, connection), ("", [])) + with self.assertRaises(FullResultSet): + w.as_sql(compiler, connection) w.negate() with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) @@ -3627,9 +3629,11 @@ class WhereNodeTest(SimpleTestCase): with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.negate() - self.assertEqual(w.as_sql(compiler, connection), ("", [])) + with self.assertRaises(FullResultSet): + w.as_sql(compiler, connection) w = WhereNode(children=[empty_w, NothingNode()], connector=OR) - self.assertEqual(w.as_sql(compiler, connection), ("", [])) + with self.assertRaises(FullResultSet): + w.as_sql(compiler, connection) w = WhereNode(children=[empty_w, NothingNode()], connector=AND) with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) diff --git a/tests/requests/tests.py b/tests/requests/tests.py index d8068583a7..3cbcefbda7 100644 --- a/tests/requests/tests.py +++ b/tests/requests/tests.py @@ -5,9 +5,14 @@ from urllib.parse import urlencode from django.core.exceptions import DisallowedHost from django.core.handlers.wsgi import LimitedStream, WSGIRequest -from django.http import HttpRequest, RawPostDataException, UnreadablePostError +from django.http import ( + HttpHeaders, + HttpRequest, + RawPostDataException, + UnreadablePostError, +) from django.http.multipartparser import MultiPartParserError -from django.http.request import HttpHeaders, split_domain_port +from django.http.request import split_domain_port from django.test import RequestFactory, SimpleTestCase, override_settings from django.test.client import FakePayload @@ -425,10 +430,8 @@ class RequestsTests(SimpleTestCase): Multipart POST requests with Content-Length >= 0 are valid and need to be handled. """ - # According to: - # https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.13 - # Every request.POST with Content-Length >= 0 is a valid request, - # this test ensures that we handle Content-Length == 0. + # According to RFC 9110 Section 8.6 every POST with Content-Length >= 0 + # is a valid request, so ensure that we handle Content-Length == 0. payload = FakePayload( "\r\n".join( [ diff --git a/tests/requirements/postgres.txt b/tests/requirements/postgres.txt index f0288c8b74..726a08b3e4 100644 --- a/tests/requirements/postgres.txt +++ b/tests/requirements/postgres.txt @@ -1 +1 @@ -psycopg2>=2.8.4 +psycopg[binary]>=3.1 diff --git a/tests/requirements/py3.txt b/tests/requirements/py3.txt index 29f8d1a1b2..89209ca9e6 100644 --- a/tests/requirements/py3.txt +++ b/tests/requirements/py3.txt @@ -1,14 +1,14 @@ aiosmtpd -asgiref >= 3.5.2 +asgiref >= 3.6.0 argon2-cffi >= 16.1.0 backports.zoneinfo; python_version < '3.9' bcrypt black docutils -geoip2 +geoip2; python_version < '3.12' jinja2 >= 2.9.2 -numpy -Pillow >= 6.2.0 +numpy; python_version < '3.12' +Pillow >= 6.2.0; sys.platform != 'win32' or python_version < '3.12' # pylibmc/libmemcached can't be built on Windows. pylibmc; sys.platform != 'win32' pymemcache >= 3.4.0 diff --git a/tests/schema/test_logging.py b/tests/schema/test_logging.py index 2821e5f406..9c7069c874 100644 --- a/tests/schema/test_logging.py +++ b/tests/schema/test_logging.py @@ -9,9 +9,9 @@ class SchemaLoggerTests(TestCase): params = [42, 1337] with self.assertLogs("django.db.backends.schema", "DEBUG") as cm: editor.execute(sql, params) + if connection.features.schema_editor_uses_clientside_param_binding: + sql = "SELECT * FROM foo WHERE id in (42, 1337)" + params = None self.assertEqual(cm.records[0].sql, sql) self.assertEqual(cm.records[0].params, params) - self.assertEqual( - cm.records[0].getMessage(), - "SELECT * FROM foo WHERE id in (%s, %s); (params [42, 1337])", - ) + self.assertEqual(cm.records[0].getMessage(), f"{sql}; (params {params})") diff --git a/tests/schema/tests.py b/tests/schema/tests.py index a2b72cd42a..e325fa0c88 100644 --- a/tests/schema/tests.py +++ b/tests/schema/tests.py @@ -273,6 +273,27 @@ class SchemaTests(TransactionTestCase): if f.name == column ) + def get_column_comment(self, table, column): + with connection.cursor() as cursor: + return next( + f.comment + for f in connection.introspection.get_table_description(cursor, table) + if f.name == column + ) + + def get_table_comment(self, table): + with connection.cursor() as cursor: + return next( + t.comment + for t in connection.introspection.get_table_list(cursor) + if t.name == table + ) + + def assert_column_comment_not_exists(self, table, column): + with connection.cursor() as cursor: + columns = connection.introspection.get_table_description(cursor, table) + self.assertFalse(any([c.name == column and c.comment for c in columns])) + def assertIndexOrder(self, table, index, order): constraints = self.get_constraints(table) self.assertIn(index, constraints) @@ -2200,8 +2221,25 @@ class SchemaTests(TransactionTestCase): with self.assertRaises(DatabaseError): self.column_classes(new_field.remote_field.through) # Add the field - with connection.schema_editor() as editor: + with CaptureQueriesContext( + connection + ) as ctx, connection.schema_editor() as editor: editor.add_field(LocalAuthorWithM2M, new_field) + # Table is not rebuilt. + self.assertEqual( + len( + [ + query["sql"] + for query in ctx.captured_queries + if "CREATE TABLE" in query["sql"] + ] + ), + 1, + ) + self.assertIs( + any("DROP TABLE" in query["sql"] for query in ctx.captured_queries), + False, + ) # Ensure there is now an m2m table there columns = self.column_classes(new_field.remote_field.through) self.assertEqual( @@ -4373,6 +4411,186 @@ class SchemaTests(TransactionTestCase): ], ) + @skipUnlessDBFeature("supports_comments") + def test_add_db_comment_charfield(self): + comment = "Custom comment" + field = CharField(max_length=255, db_comment=comment) + field.set_attributes_from_name("name_with_comment") + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.add_field(Author, field) + self.assertEqual( + self.get_column_comment(Author._meta.db_table, "name_with_comment"), + comment, + ) + + @skipUnlessDBFeature("supports_comments") + def test_add_db_comment_and_default_charfield(self): + comment = "Custom comment with default" + field = CharField(max_length=255, default="Joe Doe", db_comment=comment) + field.set_attributes_from_name("name_with_comment_default") + with connection.schema_editor() as editor: + editor.create_model(Author) + Author.objects.create(name="Before adding a new field") + editor.add_field(Author, field) + + self.assertEqual( + self.get_column_comment(Author._meta.db_table, "name_with_comment_default"), + comment, + ) + with connection.cursor() as cursor: + cursor.execute( + f"SELECT name_with_comment_default FROM {Author._meta.db_table};" + ) + for row in cursor.fetchall(): + self.assertEqual(row[0], "Joe Doe") + + @skipUnlessDBFeature("supports_comments") + def test_alter_db_comment(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + # Add comment. + old_field = Author._meta.get_field("name") + new_field = CharField(max_length=255, db_comment="Custom comment") + new_field.set_attributes_from_name("name") + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + self.assertEqual( + self.get_column_comment(Author._meta.db_table, "name"), + "Custom comment", + ) + # Alter comment. + old_field = new_field + new_field = CharField(max_length=255, db_comment="New custom comment") + new_field.set_attributes_from_name("name") + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + self.assertEqual( + self.get_column_comment(Author._meta.db_table, "name"), + "New custom comment", + ) + # Remove comment. + old_field = new_field + new_field = CharField(max_length=255) + new_field.set_attributes_from_name("name") + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + self.assertIn( + self.get_column_comment(Author._meta.db_table, "name"), + [None, ""], + ) + + @skipUnlessDBFeature("supports_comments", "supports_foreign_keys") + def test_alter_db_comment_foreign_key(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + + comment = "FK custom comment" + old_field = Book._meta.get_field("author") + new_field = ForeignKey(Author, CASCADE, db_comment=comment) + new_field.set_attributes_from_name("author") + with connection.schema_editor() as editor: + editor.alter_field(Book, old_field, new_field, strict=True) + self.assertEqual( + self.get_column_comment(Book._meta.db_table, "author_id"), + comment, + ) + + @skipUnlessDBFeature("supports_comments") + def test_alter_field_type_preserve_comment(self): + with connection.schema_editor() as editor: + editor.create_model(Author) + + comment = "This is the name." + old_field = Author._meta.get_field("name") + new_field = CharField(max_length=255, db_comment=comment) + new_field.set_attributes_from_name("name") + new_field.model = Author + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + self.assertEqual( + self.get_column_comment(Author._meta.db_table, "name"), + comment, + ) + # Changing a field type should preserve the comment. + old_field = new_field + new_field = CharField(max_length=511, db_comment=comment) + new_field.set_attributes_from_name("name") + new_field.model = Author + with connection.schema_editor() as editor: + editor.alter_field(Author, new_field, old_field, strict=True) + # Comment is preserved. + self.assertEqual( + self.get_column_comment(Author._meta.db_table, "name"), + comment, + ) + + @isolate_apps("schema") + @skipUnlessDBFeature("supports_comments") + def test_db_comment_table(self): + class ModelWithDbTableComment(Model): + class Meta: + app_label = "schema" + db_table_comment = "Custom table comment" + + with connection.schema_editor() as editor: + editor.create_model(ModelWithDbTableComment) + self.isolated_local_models = [ModelWithDbTableComment] + self.assertEqual( + self.get_table_comment(ModelWithDbTableComment._meta.db_table), + "Custom table comment", + ) + # Alter table comment. + old_db_table_comment = ModelWithDbTableComment._meta.db_table_comment + with connection.schema_editor() as editor: + editor.alter_db_table_comment( + ModelWithDbTableComment, old_db_table_comment, "New table comment" + ) + self.assertEqual( + self.get_table_comment(ModelWithDbTableComment._meta.db_table), + "New table comment", + ) + # Remove table comment. + old_db_table_comment = ModelWithDbTableComment._meta.db_table_comment + with connection.schema_editor() as editor: + editor.alter_db_table_comment( + ModelWithDbTableComment, old_db_table_comment, None + ) + self.assertIn( + self.get_table_comment(ModelWithDbTableComment._meta.db_table), + [None, ""], + ) + + @isolate_apps("schema") + @skipUnlessDBFeature("supports_comments", "supports_foreign_keys") + def test_db_comments_from_abstract_model(self): + class AbstractModelWithDbComments(Model): + name = CharField( + max_length=255, db_comment="Custom comment", null=True, blank=True + ) + + class Meta: + app_label = "schema" + abstract = True + db_table_comment = "Custom table comment" + + class ModelWithDbComments(AbstractModelWithDbComments): + pass + + with connection.schema_editor() as editor: + editor.create_model(ModelWithDbComments) + self.isolated_local_models = [ModelWithDbComments] + + self.assertEqual( + self.get_column_comment(ModelWithDbComments._meta.db_table, "name"), + "Custom comment", + ) + self.assertEqual( + self.get_table_comment(ModelWithDbComments._meta.db_table), + "Custom table comment", + ) + @unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific") def test_alter_field_add_index_to_charfield(self): # Create the table and verify no initial indexes. @@ -4933,6 +5151,38 @@ class SchemaTests(TransactionTestCase): editor.alter_field(Author, new_field, old_field, strict=True) self.assertIsNone(self.get_column_collation(Author._meta.db_table, "name")) + @skipUnlessDBFeature("supports_collation_on_charfield") + def test_alter_field_type_preserve_db_collation(self): + collation = connection.features.test_collations.get("non_default") + if not collation: + self.skipTest("Language collations are not supported.") + + with connection.schema_editor() as editor: + editor.create_model(Author) + + old_field = Author._meta.get_field("name") + new_field = CharField(max_length=255, db_collation=collation) + new_field.set_attributes_from_name("name") + new_field.model = Author + with connection.schema_editor() as editor: + editor.alter_field(Author, old_field, new_field, strict=True) + self.assertEqual( + self.get_column_collation(Author._meta.db_table, "name"), + collation, + ) + # Changing a field type should preserve the collation. + old_field = new_field + new_field = CharField(max_length=511, db_collation=collation) + new_field.set_attributes_from_name("name") + new_field.model = Author + with connection.schema_editor() as editor: + editor.alter_field(Author, new_field, old_field, strict=True) + # Collation is preserved. + self.assertEqual( + self.get_column_collation(Author._meta.db_table, "name"), + collation, + ) + @skipUnlessDBFeature("supports_collation_on_charfield") def test_alter_primary_key_db_collation(self): collation = connection.features.test_collations.get("non_default") diff --git a/tests/select_for_update/tests.py b/tests/select_for_update/tests.py index 5f5ada8939..97c067d76e 100644 --- a/tests/select_for_update/tests.py +++ b/tests/select_for_update/tests.py @@ -243,7 +243,7 @@ class SelectForUpdateTests(TransactionTestCase): def test_for_update_sql_model_proxy_generated_of(self): with transaction.atomic(), CaptureQueriesContext(connection) as ctx: list( - CityCountryProxy.objects.select_related("country",).select_for_update( + CityCountryProxy.objects.select_related("country").select_for_update( of=("country",), ) ) @@ -422,7 +422,7 @@ class SelectForUpdateTests(TransactionTestCase): with self.subTest(name=name): with self.assertRaisesMessage(FieldError, msg % name): with transaction.atomic(): - Person.objects.select_related("born", "profile",).exclude( + Person.objects.select_related("born", "profile").exclude( profile=None ).select_for_update(of=(name,)).get() diff --git a/tests/servers/tests.py b/tests/servers/tests.py index 91f766926b..66f0af1604 100644 --- a/tests/servers/tests.py +++ b/tests/servers/tests.py @@ -5,6 +5,7 @@ import errno import os import socket import threading +import unittest from http.client import HTTPConnection from urllib.error import HTTPError from urllib.parse import urlencode @@ -12,7 +13,7 @@ from urllib.request import urlopen from django.conf import settings from django.core.servers.basehttp import ThreadedWSGIServer, WSGIServer -from django.db import DEFAULT_DB_ALIAS, connections +from django.db import DEFAULT_DB_ALIAS, connection, connections from django.test import LiveServerTestCase, override_settings from django.test.testcases import LiveServerThread, QuietWSGIRequestHandler @@ -107,8 +108,33 @@ class LiveServerTestCloseConnectionTest(LiveServerBase): self.assertIsNone(conn.connection) +@unittest.skipUnless(connection.vendor == "sqlite", "SQLite specific test.") +class LiveServerInMemoryDatabaseLockTest(LiveServerBase): + def test_in_memory_database_lock(self): + """ + With a threaded LiveServer and an in-memory database, an error can + occur when 2 requests reach the server and try to lock the database + at the same time, if the requests do not share the same database + connection. + """ + conn = self.server_thread.connections_override[DEFAULT_DB_ALIAS] + # Open a connection to the database. + conn.connect() + # Create a transaction to lock the database. + cursor = conn.cursor() + cursor.execute("BEGIN IMMEDIATE TRANSACTION") + try: + with self.urlopen("/create_model_instance/") as f: + self.assertEqual(f.status, 200) + except HTTPError: + self.fail("Unexpected error due to a database lock.") + finally: + # Release the transaction. + cursor.execute("ROLLBACK") + + class FailingLiveServerThread(LiveServerThread): - def _create_server(self): + def _create_server(self, connections_override=None): raise RuntimeError("Error creating server.") @@ -150,7 +176,7 @@ class LiveServerAddress(LiveServerBase): class LiveServerSingleThread(LiveServerThread): - def _create_server(self): + def _create_server(self, connections_override=None): return WSGIServer( (self.host, self.port), QuietWSGIRequestHandler, allow_reuse_address=False ) diff --git a/tests/sessions_tests/tests.py b/tests/sessions_tests/tests.py index 96f8dbcd5b..d13c485342 100644 --- a/tests/sessions_tests/tests.py +++ b/tests/sessions_tests/tests.py @@ -715,7 +715,7 @@ class SessionMiddlewareTests(TestCase): ) def test_session_save_on_500(self): - def response_500(requset): + def response_500(request): response = HttpResponse("Horrible error") response.status_code = 500 request.session["hello"] = "world" @@ -727,6 +727,19 @@ class SessionMiddlewareTests(TestCase): # The value wasn't saved above. self.assertNotIn("hello", request.session.load()) + def test_session_save_on_5xx(self): + def response_503(request): + response = HttpResponse("Service Unavailable") + response.status_code = 503 + request.session["hello"] = "world" + return response + + request = self.request_factory.get("/") + SessionMiddleware(response_503)(request) + + # The value wasn't saved above. + self.assertNotIn("hello", request.session.load()) + def test_session_update_error_redirect(self): def response_delete_session(request): request.session = DatabaseSession() diff --git a/tests/signing/tests.py b/tests/signing/tests.py index 15d66f29d8..c145ce1e7d 100644 --- a/tests/signing/tests.py +++ b/tests/signing/tests.py @@ -2,15 +2,16 @@ import datetime from django.core import signing from django.test import SimpleTestCase, override_settings -from django.test.utils import freeze_time +from django.test.utils import freeze_time, ignore_warnings from django.utils.crypto import InvalidAlgorithm +from django.utils.deprecation import RemovedInDjango51Warning class TestSigner(SimpleTestCase): def test_signature(self): "signature() method should generate a signature" - signer = signing.Signer("predictable-secret") - signer2 = signing.Signer("predictable-secret2") + signer = signing.Signer(key="predictable-secret") + signer2 = signing.Signer(key="predictable-secret2") for s in ( b"hello", b"3098247:529:087:", @@ -28,8 +29,7 @@ class TestSigner(SimpleTestCase): self.assertNotEqual(signer.signature(s), signer2.signature(s)) def test_signature_with_salt(self): - "signature(value, salt=...) should work" - signer = signing.Signer("predictable-secret", salt="extra-salt") + signer = signing.Signer(key="predictable-secret", salt="extra-salt") self.assertEqual( signer.signature("hello"), signing.base64_hmac( @@ -40,12 +40,12 @@ class TestSigner(SimpleTestCase): ), ) self.assertNotEqual( - signing.Signer("predictable-secret", salt="one").signature("hello"), - signing.Signer("predictable-secret", salt="two").signature("hello"), + signing.Signer(key="predictable-secret", salt="one").signature("hello"), + signing.Signer(key="predictable-secret", salt="two").signature("hello"), ) def test_custom_algorithm(self): - signer = signing.Signer("predictable-secret", algorithm="sha512") + signer = signing.Signer(key="predictable-secret", algorithm="sha512") self.assertEqual( signer.signature("hello"), "Usf3uVQOZ9m6uPfVonKR-EBXjPe7bjMbp3_Fq8MfsptgkkM1ojidN0BxYaT5HAEN1" @@ -53,14 +53,14 @@ class TestSigner(SimpleTestCase): ) def test_invalid_algorithm(self): - signer = signing.Signer("predictable-secret", algorithm="whatever") + signer = signing.Signer(key="predictable-secret", algorithm="whatever") msg = "'whatever' is not an algorithm accepted by the hashlib module." with self.assertRaisesMessage(InvalidAlgorithm, msg): signer.sign("hello") def test_sign_unsign(self): "sign/unsign should be reversible" - signer = signing.Signer("predictable-secret") + signer = signing.Signer(key="predictable-secret") examples = [ "q;wjmbk;wkmb", "3098247529087", @@ -75,7 +75,7 @@ class TestSigner(SimpleTestCase): self.assertEqual(example, signer.unsign(signed)) def test_sign_unsign_non_string(self): - signer = signing.Signer("predictable-secret") + signer = signing.Signer(key="predictable-secret") values = [ 123, 1.23, @@ -91,7 +91,7 @@ class TestSigner(SimpleTestCase): def test_unsign_detects_tampering(self): "unsign should raise an exception if the value has been tampered with" - signer = signing.Signer("predictable-secret") + signer = signing.Signer(key="predictable-secret") value = "Another string" signed_value = signer.sign(value) transforms = ( @@ -106,7 +106,7 @@ class TestSigner(SimpleTestCase): signer.unsign(transform(signed_value)) def test_sign_unsign_object(self): - signer = signing.Signer("predictable-secret") + signer = signing.Signer(key="predictable-secret") tests = [ ["a", "list"], "a string \u2019", @@ -155,7 +155,7 @@ class TestSigner(SimpleTestCase): def test_works_with_non_ascii_keys(self): binary_key = b"\xe7" # Set some binary (non-ASCII key) - s = signing.Signer(binary_key) + s = signing.Signer(key=binary_key) self.assertEqual( "foo:EE4qGC5MEKyQG5msxYA0sBohAxLC0BJf8uRhemh0BGU", s.sign("foo"), @@ -164,7 +164,7 @@ class TestSigner(SimpleTestCase): def test_valid_sep(self): separators = ["/", "*sep*", ","] for sep in separators: - signer = signing.Signer("predictable-secret", sep=sep) + signer = signing.Signer(key="predictable-secret", sep=sep) self.assertEqual( "foo%sjZQoX_FtSO70jX9HLRGg2A_2s4kdDBxz1QoO_OpEQb0" % sep, signer.sign("foo"), @@ -181,16 +181,16 @@ class TestSigner(SimpleTestCase): signing.Signer(sep=sep) def test_verify_with_non_default_key(self): - old_signer = signing.Signer("secret") + old_signer = signing.Signer(key="secret") new_signer = signing.Signer( - "newsecret", fallback_keys=["othersecret", "secret"] + key="newsecret", fallback_keys=["othersecret", "secret"] ) signed = old_signer.sign("abc") self.assertEqual(new_signer.unsign(signed), "abc") def test_sign_unsign_multiple_keys(self): """The default key is a valid verification key.""" - signer = signing.Signer("secret", fallback_keys=["oldsecret"]) + signer = signing.Signer(key="secret", fallback_keys=["oldsecret"]) signed = signer.sign("abc") self.assertEqual(signer.unsign(signed), "abc") @@ -199,7 +199,7 @@ class TestSigner(SimpleTestCase): SECRET_KEY_FALLBACKS=["oldsecret"], ) def test_sign_unsign_ignore_secret_key_fallbacks(self): - old_signer = signing.Signer("oldsecret") + old_signer = signing.Signer(key="oldsecret") signed = old_signer.sign("abc") signer = signing.Signer(fallback_keys=[]) with self.assertRaises(signing.BadSignature): @@ -210,7 +210,7 @@ class TestSigner(SimpleTestCase): SECRET_KEY_FALLBACKS=["oldsecret"], ) def test_default_keys_verification(self): - old_signer = signing.Signer("oldsecret") + old_signer = signing.Signer(key="oldsecret") signed = old_signer.sign("abc") signer = signing.Signer() self.assertEqual(signer.unsign(signed), "abc") @@ -220,9 +220,9 @@ class TestTimestampSigner(SimpleTestCase): def test_timestamp_signer(self): value = "hello" with freeze_time(123456789): - signer = signing.TimestampSigner("predictable-key") + signer = signing.TimestampSigner(key="predictable-key") ts = signer.sign(value) - self.assertNotEqual(ts, signing.Signer("predictable-key").sign(value)) + self.assertNotEqual(ts, signing.Signer(key="predictable-key").sign(value)) self.assertEqual(signer.unsign(ts), value) with freeze_time(123456800): @@ -240,3 +240,23 @@ class TestBase62(SimpleTestCase): tests = [-(10**10), 10**10, 1620378259, *range(-100, 100)] for i in tests: self.assertEqual(i, signing.b62_decode(signing.b62_encode(i))) + + +class SignerPositionalArgumentsDeprecationTests(SimpleTestCase): + def test_deprecation(self): + msg = "Passing positional arguments to Signer is deprecated." + with self.assertRaisesMessage(RemovedInDjango51Warning, msg): + signing.Signer("predictable-secret") + msg = "Passing positional arguments to TimestampSigner is deprecated." + with self.assertRaisesMessage(RemovedInDjango51Warning, msg): + signing.TimestampSigner("predictable-secret") + + @ignore_warnings(category=RemovedInDjango51Warning) + def test_positional_arguments(self): + signer = signing.Signer("secret", "/", "somesalt", "sha1", ["oldsecret"]) + signed = signer.sign("xyz") + self.assertEqual(signed, "xyz/zzdO_8rk-NGnm8jNasXRTF2P5kY") + self.assertEqual(signer.unsign(signed), "xyz") + old_signer = signing.Signer("oldsecret", "/", "somesalt", "sha1") + signed = old_signer.sign("xyz") + self.assertEqual(signer.unsign(signed), "xyz") diff --git a/tests/sitemaps_tests/test_http.py b/tests/sitemaps_tests/test_http.py index 4a269b8737..12e387757b 100644 --- a/tests/sitemaps_tests/test_http.py +++ b/tests/sitemaps_tests/test_http.py @@ -10,7 +10,7 @@ from django.utils.deprecation import RemovedInDjango50Warning from django.utils.formats import localize from .base import SitemapTestsBase -from .models import TestModel +from .models import I18nTestModel, TestModel class HTTPSitemapTests(SitemapTestsBase): @@ -440,6 +440,72 @@ class HTTPSitemapTests(SitemapTestsBase): ) self.assertXMLEqual(response.content.decode(), expected_content) + @override_settings(LANGUAGES=(("en", "English"), ("pt", "Portuguese"))) + def test_language_for_item_i18n_sitemap(self): + """ + A i18n sitemap index in which item can be chosen to be displayed for a + lang or not. + """ + only_pt = I18nTestModel.objects.create(name="Only for PT") + response = self.client.get("/item-by-lang/i18n.xml") + url, pk, only_pt_pk = self.base_url, self.i18n_model.pk, only_pt.pk + expected_urls = ( + f"{url}/en/i18n/testmodel/{pk}/" + f"never0.5" + f"{url}/pt/i18n/testmodel/{pk}/" + f"never0.5" + f"{url}/pt/i18n/testmodel/{only_pt_pk}/" + f"never0.5" + ) + expected_content = ( + f'\n' + f'\n' + f"{expected_urls}\n" + f"" + ) + self.assertXMLEqual(response.content.decode(), expected_content) + + @override_settings(LANGUAGES=(("en", "English"), ("pt", "Portuguese"))) + def test_alternate_language_for_item_i18n_sitemap(self): + """ + A i18n sitemap index in which item can be chosen to be displayed for a + lang or not. + """ + only_pt = I18nTestModel.objects.create(name="Only for PT") + response = self.client.get("/item-by-lang-alternates/i18n.xml") + url, pk, only_pt_pk = self.base_url, self.i18n_model.pk, only_pt.pk + expected_urls = ( + f"{url}/en/i18n/testmodel/{pk}/" + f"never0.5" + f'' + f'' + f'' + f"{url}/pt/i18n/testmodel/{pk}/" + f"never0.5" + f'' + f'' + f'' + f"{url}/pt/i18n/testmodel/{only_pt_pk}/" + f"never0.5" + f'' + ) + expected_content = ( + f'\n' + f'\n' + f"{expected_urls}\n" + f"" + ) + self.assertXMLEqual(response.content.decode(), expected_content) + def test_sitemap_without_entries(self): response = self.client.get("/sitemap-without-entries/sitemap.xml") expected_content = ( @@ -507,6 +573,16 @@ class HTTPSitemapTests(SitemapTestsBase): self.assertXMLEqual(index_response.content.decode(), expected_content_index) self.assertXMLEqual(sitemap_response.content.decode(), expected_content_sitemap) + def test_callable_sitemod_no_items(self): + index_response = self.client.get("/callable-lastmod-no-items/index.xml") + self.assertNotIn("Last-Modified", index_response) + expected_content_index = """ + + http://example.com/simple/sitemap-callable-lastmod.xml + + """ + self.assertXMLEqual(index_response.content.decode(), expected_content_index) + # RemovedInDjango50Warning class DeprecatedTests(SitemapTestsBase): diff --git a/tests/sitemaps_tests/urls/http.py b/tests/sitemaps_tests/urls/http.py index 9a468de476..2b512cfd69 100644 --- a/tests/sitemaps_tests/urls/http.py +++ b/tests/sitemaps_tests/urls/http.py @@ -48,6 +48,22 @@ class XDefaultI18nSitemap(AlternatesI18nSitemap): x_default = True +class ItemByLangSitemap(SimpleI18nSitemap): + def get_languages_for_item(self, item): + if item.name == "Only for PT": + return ["pt"] + return super().get_languages_for_item(item) + + +class ItemByLangAlternatesSitemap(AlternatesI18nSitemap): + x_default = True + + def get_languages_for_item(self, item): + if item.name == "Only for PT": + return ["pt"] + return super().get_languages_for_item(item) + + class EmptySitemap(Sitemap): changefreq = "never" priority = 0.5 @@ -114,6 +130,16 @@ class CallableLastmodFullSitemap(Sitemap): return obj.lastmod +class CallableLastmodNoItemsSitemap(Sitemap): + location = "/location/" + + def items(self): + return [] + + def lastmod(self, obj): + return obj.lastmod + + class GetLatestLastmodNoneSiteMap(Sitemap): changefreq = "never" priority = 0.5 @@ -158,6 +184,14 @@ xdefault_i18n_sitemaps = { "i18n-xdefault": XDefaultI18nSitemap, } +item_by_lang_i18n_sitemaps = { + "i18n-item-by-lang": ItemByLangSitemap, +} + +item_by_lang_alternates_i18n_sitemaps = { + "i18n-item-by-lang-alternates": ItemByLangAlternatesSitemap, +} + simple_sitemaps_not_callable = { "simple": SimpleSitemap(), } @@ -233,6 +267,10 @@ callable_lastmod_full_sitemap = { "callable-lastmod": CallableLastmodFullSitemap, } +callable_lastmod_no_items_sitemap = { + "callable-lastmod": CallableLastmodNoItemsSitemap, +} + urlpatterns = [ path("simple/index.xml", views.index, {"sitemaps": simple_sitemaps}), path("simple-paged/index.xml", views.index, {"sitemaps": simple_sitemaps_paged}), @@ -344,6 +382,18 @@ urlpatterns = [ {"sitemaps": sitemaps_lastmod_ascending}, name="django.contrib.sitemaps.views.sitemap", ), + path( + "item-by-lang/i18n.xml", + views.sitemap, + {"sitemaps": item_by_lang_i18n_sitemaps}, + name="django.contrib.sitemaps.views.sitemap", + ), + path( + "item-by-lang-alternates/i18n.xml", + views.sitemap, + {"sitemaps": item_by_lang_alternates_i18n_sitemaps}, + name="django.contrib.sitemaps.views.sitemap", + ), path( "lastmod-sitemaps/descending.xml", views.sitemap, @@ -417,6 +467,11 @@ urlpatterns = [ views.sitemap, {"sitemaps": callable_lastmod_full_sitemap}, ), + path( + "callable-lastmod-no-items/index.xml", + views.index, + {"sitemaps": callable_lastmod_no_items_sitemap}, + ), path( "generic-lastmod/index.xml", views.index, diff --git a/tests/staticfiles_tests/project/documents/absolute_root.js b/tests/staticfiles_tests/project/documents/absolute_root.js new file mode 100644 index 0000000000..4561b0389b --- /dev/null +++ b/tests/staticfiles_tests/project/documents/absolute_root.js @@ -0,0 +1,2 @@ +const rootConst = "root"; +export default rootConst; diff --git a/tests/staticfiles_tests/project/documents/cached/module.js b/tests/staticfiles_tests/project/documents/cached/module.js new file mode 100644 index 0000000000..7764e740d6 --- /dev/null +++ b/tests/staticfiles_tests/project/documents/cached/module.js @@ -0,0 +1,22 @@ +// Static imports. +import rootConst from "/static/absolute_root.js"; +import testConst from "./module_test.js"; +import * as NewModule from "./module_test.js"; +import { testConst as alias } from "./module_test.js"; +import { firstConst, secondConst } from "./module_test.js"; +import { + firstVar1 as firstVarAlias, + $second_var_2 as secondVarAlias +} from "./module_test.js"; +import relativeModule from "../nested/js/nested.js"; + +// Dynamic imports. +const dynamicModule = import("./module_test.js"); + +// Modules exports to aggregate modules. +export * from "./module_test.js"; +export { testConst } from "./module_test.js"; +export { + firstVar as firstVarAlias, + secondVar as secondVarAlias +} from "./module_test.js"; diff --git a/tests/staticfiles_tests/project/documents/cached/module_test.js b/tests/staticfiles_tests/project/documents/cached/module_test.js new file mode 100644 index 0000000000..219372f891 --- /dev/null +++ b/tests/staticfiles_tests/project/documents/cached/module_test.js @@ -0,0 +1,5 @@ +export const testConst = "test"; +export const firstConst = "first"; +export const secondConst = "second"; +export var firstVar1 = "test_1"; +export var SecondVar2 = "test_2"; diff --git a/tests/staticfiles_tests/project/documents/nested/js/nested.js b/tests/staticfiles_tests/project/documents/nested/js/nested.js new file mode 100644 index 0000000000..7646bbd17d --- /dev/null +++ b/tests/staticfiles_tests/project/documents/nested/js/nested.js @@ -0,0 +1 @@ +export default null; diff --git a/tests/staticfiles_tests/project/documents/staticfiles_v1.json b/tests/staticfiles_tests/project/documents/staticfiles_v1.json new file mode 100644 index 0000000000..4f85945e3f --- /dev/null +++ b/tests/staticfiles_tests/project/documents/staticfiles_v1.json @@ -0,0 +1,6 @@ +{ + "version": "1.0", + "paths": { + "dummy.txt": "dummy.txt" + } +} diff --git a/tests/staticfiles_tests/test_storage.py b/tests/staticfiles_tests/test_storage.py index 16bb556d4f..f2f1899aac 100644 --- a/tests/staticfiles_tests/test_storage.py +++ b/tests/staticfiles_tests/test_storage.py @@ -177,6 +177,52 @@ class TestHashedFiles: self.assertIn(b"https://", relfile.read()) self.assertPostCondition() + def test_module_import(self): + relpath = self.hashed_file_path("cached/module.js") + self.assertEqual(relpath, "cached/module.55fd6938fbc5.js") + tests = [ + # Relative imports. + b'import testConst from "./module_test.477bbebe77f0.js";', + b'import relativeModule from "../nested/js/nested.866475c46bb4.js";', + b'import { firstConst, secondConst } from "./module_test.477bbebe77f0.js";', + # Absolute import. + b'import rootConst from "/static/absolute_root.5586327fe78c.js";', + # Dynamic import. + b'const dynamicModule = import("./module_test.477bbebe77f0.js");', + # Creating a module object. + b'import * as NewModule from "./module_test.477bbebe77f0.js";', + # Aliases. + b'import { testConst as alias } from "./module_test.477bbebe77f0.js";', + b"import {\n" + b" firstVar1 as firstVarAlias,\n" + b" $second_var_2 as secondVarAlias\n" + b'} from "./module_test.477bbebe77f0.js";', + ] + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + for module_import in tests: + with self.subTest(module_import=module_import): + self.assertIn(module_import, content) + self.assertPostCondition() + + def test_aggregating_modules(self): + relpath = self.hashed_file_path("cached/module.js") + self.assertEqual(relpath, "cached/module.55fd6938fbc5.js") + tests = [ + b'export * from "./module_test.477bbebe77f0.js";', + b'export { testConst } from "./module_test.477bbebe77f0.js";', + b"export {\n" + b" firstVar as firstVarAlias,\n" + b" secondVar as secondVarAlias\n" + b'} from "./module_test.477bbebe77f0.js";', + ] + with storage.staticfiles_storage.open(relpath) as relfile: + content = relfile.read() + for module_import in tests: + with self.subTest(module_import=module_import): + self.assertIn(module_import, content) + self.assertPostCondition() + @override_settings( STATICFILES_DIRS=[os.path.join(TEST_ROOT, "project", "loop")], STATICFILES_FINDERS=["django.contrib.staticfiles.finders.FileSystemFinder"], @@ -390,7 +436,7 @@ class TestCollectionManifestStorage(TestHashedFiles, CollectionTestCase): # The in-memory version of the manifest matches the one on disk # since a properly created manifest should cover all filenames. if hashed_files: - manifest = storage.staticfiles_storage.load_manifest() + manifest, _ = storage.staticfiles_storage.load_manifest() self.assertEqual(hashed_files, manifest) def test_manifest_exists(self): @@ -417,7 +463,7 @@ class TestCollectionManifestStorage(TestHashedFiles, CollectionTestCase): def test_parse_cache(self): hashed_files = storage.staticfiles_storage.hashed_files - manifest = storage.staticfiles_storage.load_manifest() + manifest, _ = storage.staticfiles_storage.load_manifest() self.assertEqual(hashed_files, manifest) def test_clear_empties_manifest(self): @@ -430,7 +476,7 @@ class TestCollectionManifestStorage(TestHashedFiles, CollectionTestCase): hashed_files = storage.staticfiles_storage.hashed_files self.assertIn(cleared_file_name, hashed_files) - manifest_content = storage.staticfiles_storage.load_manifest() + manifest_content, _ = storage.staticfiles_storage.load_manifest() self.assertIn(cleared_file_name, manifest_content) original_path = storage.staticfiles_storage.path(cleared_file_name) @@ -445,7 +491,7 @@ class TestCollectionManifestStorage(TestHashedFiles, CollectionTestCase): hashed_files = storage.staticfiles_storage.hashed_files self.assertNotIn(cleared_file_name, hashed_files) - manifest_content = storage.staticfiles_storage.load_manifest() + manifest_content, _ = storage.staticfiles_storage.load_manifest() self.assertNotIn(cleared_file_name, manifest_content) def test_missing_entry(self): @@ -489,6 +535,29 @@ class TestCollectionManifestStorage(TestHashedFiles, CollectionTestCase): 2, ) + def test_manifest_hash(self): + # Collect the additional file. + self.run_collectstatic() + + _, manifest_hash_orig = storage.staticfiles_storage.load_manifest() + self.assertNotEqual(manifest_hash_orig, "") + self.assertEqual(storage.staticfiles_storage.manifest_hash, manifest_hash_orig) + # Saving doesn't change the hash. + storage.staticfiles_storage.save_manifest() + self.assertEqual(storage.staticfiles_storage.manifest_hash, manifest_hash_orig) + # Delete the original file from the app, collect with clear. + os.unlink(self._clear_filename) + self.run_collectstatic(clear=True) + # Hash is changed. + _, manifest_hash = storage.staticfiles_storage.load_manifest() + self.assertNotEqual(manifest_hash, manifest_hash_orig) + + def test_manifest_hash_v1(self): + storage.staticfiles_storage.manifest_name = "staticfiles_v1.json" + manifest_content, manifest_hash = storage.staticfiles_storage.load_manifest() + self.assertEqual(manifest_hash, "") + self.assertEqual(manifest_content, {"dummy.txt": "dummy.txt"}) + @override_settings(STATICFILES_STORAGE="staticfiles_tests.storage.NoneHashStorage") class TestCollectionNoneHashStorage(CollectionTestCase): diff --git a/tests/template_tests/test_callables.py b/tests/template_tests/test_callables.py index acd8fb9d2c..bd53de5ca5 100644 --- a/tests/template_tests/test_callables.py +++ b/tests/template_tests/test_callables.py @@ -1,5 +1,6 @@ from unittest import TestCase +from django.db.models.utils import AltersData from django.template import Context, Engine @@ -63,6 +64,68 @@ class CallableVariablesTests(TestCase): # template rendering. self.assertEqual(my_doodad.num_calls, 0) + def test_alters_data_propagation(self): + class GrandParentLeft(AltersData): + def my_method(self): + return 42 + + my_method.alters_data = True + + class ParentLeft(GrandParentLeft): + def change_alters_data_method(self): + return 63 + + change_alters_data_method.alters_data = True + + def sub_non_callable_method(self): + return 64 + + sub_non_callable_method.alters_data = True + + class ParentRight(AltersData): + def other_method(self): + return 52 + + other_method.alters_data = True + + class Child(ParentLeft, ParentRight): + def my_method(self): + return 101 + + def other_method(self): + return 102 + + def change_alters_data_method(self): + return 103 + + change_alters_data_method.alters_data = False + + sub_non_callable_method = 104 + + class GrandChild(Child): + pass + + child = Child() + self.assertIs(child.my_method.alters_data, True) + self.assertIs(child.other_method.alters_data, True) + self.assertIs(child.change_alters_data_method.alters_data, False) + + grand_child = GrandChild() + self.assertIs(grand_child.my_method.alters_data, True) + self.assertIs(grand_child.other_method.alters_data, True) + self.assertIs(grand_child.change_alters_data_method.alters_data, False) + + c = Context({"element": grand_child}) + + t = self.engine.from_string("{{ element.my_method }}") + self.assertEqual(t.render(c), "") + t = self.engine.from_string("{{ element.other_method }}") + self.assertEqual(t.render(c), "") + t = self.engine.from_string("{{ element.change_alters_data_method }}") + self.assertEqual(t.render(c), "103") + t = self.engine.from_string("{{ element.sub_non_callable_method }}") + self.assertEqual(t.render(c), "104") + def test_do_not_call(self): class Doodad: do_not_call_in_templates = True diff --git a/tests/test_client/tests.py b/tests/test_client/tests.py index 57dc22ea0c..a0473c7310 100644 --- a/tests/test_client/tests.py +++ b/tests/test_client/tests.py @@ -1066,6 +1066,52 @@ class RequestFactoryTest(SimpleTestCase): echoed_request_line = "TRACE {} {}".format(url_path, protocol) self.assertContains(response, echoed_request_line) + def test_request_factory_default_headers(self): + request = RequestFactory( + HTTP_AUTHORIZATION="Bearer faketoken", + HTTP_X_ANOTHER_HEADER="some other value", + ).get("/somewhere/") + self.assertEqual(request.headers["authorization"], "Bearer faketoken") + self.assertIn("HTTP_AUTHORIZATION", request.META) + self.assertEqual(request.headers["x-another-header"], "some other value") + self.assertIn("HTTP_X_ANOTHER_HEADER", request.META) + + request = RequestFactory( + headers={ + "Authorization": "Bearer faketoken", + "X-Another-Header": "some other value", + } + ).get("/somewhere/") + self.assertEqual(request.headers["authorization"], "Bearer faketoken") + self.assertIn("HTTP_AUTHORIZATION", request.META) + self.assertEqual(request.headers["x-another-header"], "some other value") + self.assertIn("HTTP_X_ANOTHER_HEADER", request.META) + + def test_request_factory_sets_headers(self): + for method_name, view in self.http_methods_and_views: + method = getattr(self.request_factory, method_name) + request = method( + "/somewhere/", + HTTP_AUTHORIZATION="Bearer faketoken", + HTTP_X_ANOTHER_HEADER="some other value", + ) + self.assertEqual(request.headers["authorization"], "Bearer faketoken") + self.assertIn("HTTP_AUTHORIZATION", request.META) + self.assertEqual(request.headers["x-another-header"], "some other value") + self.assertIn("HTTP_X_ANOTHER_HEADER", request.META) + + request = method( + "/somewhere/", + headers={ + "Authorization": "Bearer faketoken", + "X-Another-Header": "some other value", + }, + ) + self.assertEqual(request.headers["authorization"], "Bearer faketoken") + self.assertIn("HTTP_AUTHORIZATION", request.META) + self.assertEqual(request.headers["x-another-header"], "some other value") + self.assertIn("HTTP_X_ANOTHER_HEADER", request.META) + @override_settings(ROOT_URLCONF="test_client.urls") class AsyncClientTest(TestCase): @@ -1103,6 +1149,14 @@ class AsyncClientTest(TestCase): response = await self.async_client.get("/get_view/", {"var": "val"}) self.assertContains(response, "This is a test. val is the value.") + async def test_post_data(self): + response = await self.async_client.post("/post_view/", {"value": 37}) + self.assertContains(response, "Data received: 37 is the value.") + + async def test_body_read_on_get_data(self): + response = await self.async_client.get("/post_view/") + self.assertContains(response, "Viewing GET page.") + @override_settings(ROOT_URLCONF="test_client.urls") class AsyncRequestFactoryTest(SimpleTestCase): @@ -1147,6 +1201,16 @@ class AsyncRequestFactoryTest(SimpleTestCase): self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'{"example": "data"}') + async def test_request_limited_read(self): + tests = ["GET", "POST"] + for method in tests: + with self.subTest(method=method): + request = self.request_factory.generic( + method, + "/somewhere", + ) + self.assertEqual(request.read(200), b"") + def test_request_factory_sets_headers(self): request = self.request_factory.get( "/somewhere/", @@ -1158,6 +1222,18 @@ class AsyncRequestFactoryTest(SimpleTestCase): self.assertEqual(request.headers["x-another-header"], "some other value") self.assertIn("HTTP_X_ANOTHER_HEADER", request.META) + request = self.request_factory.get( + "/somewhere/", + headers={ + "Authorization": "Bearer faketoken", + "X-Another-Header": "some other value", + }, + ) + self.assertEqual(request.headers["authorization"], "Bearer faketoken") + self.assertIn("HTTP_AUTHORIZATION", request.META) + self.assertEqual(request.headers["x-another-header"], "some other value") + self.assertIn("HTTP_X_ANOTHER_HEADER", request.META) + def test_request_factory_query_string(self): request = self.request_factory.get("/somewhere/", {"example": "data"}) self.assertNotIn("Query-String", request.headers) diff --git a/tests/test_client/views.py b/tests/test_client/views.py index 773e9e4e98..494844009d 100644 --- a/tests/test_client/views.py +++ b/tests/test_client/views.py @@ -90,6 +90,8 @@ def post_view(request): c = Context() else: t = Template("Viewing GET page.", name="Empty GET Template") + # Used by test_body_read_on_get_data. + request.read(200) c = Context() return HttpResponse(t.render(c)) @@ -393,8 +395,7 @@ def django_project_redirect(request): def no_trailing_slash_external_redirect(request): """ - RFC 2616 3.2.2: A bare domain without any abs_path element should be - treated as having the trailing `/`. + RFC 3986 Section 6.2.3: Empty path should be normalized to "/". Use https://testserver, rather than an external domain, in order to allow use of follow=True, triggering Client._handle_redirects(). diff --git a/tests/test_utils/tests.py b/tests/test_utils/tests.py index 9304f22a2e..79757045dd 100644 --- a/tests/test_utils/tests.py +++ b/tests/test_utils/tests.py @@ -235,10 +235,6 @@ class AssertNumQueriesTests(TestCase): self.assertNumQueries(2, test_func) -@unittest.skipUnless( - connection.vendor != "sqlite" or not connection.is_in_memory_db(), - "For SQLite in-memory tests, closing the connection destroys the database.", -) class AssertNumQueriesUponConnectionTests(TransactionTestCase): available_apps = [] diff --git a/tests/transactions/tests.py b/tests/transactions/tests.py index bdf912653c..1f2634224b 100644 --- a/tests/transactions/tests.py +++ b/tests/transactions/tests.py @@ -226,6 +226,22 @@ class AtomicTests(TransactionTestCase): transaction.savepoint_rollback(sid) self.assertSequenceEqual(Reporter.objects.all(), [reporter]) + @skipUnlessDBFeature("can_release_savepoints") + def test_failure_on_exit_transaction(self): + with transaction.atomic(): + with self.assertRaises(DatabaseError): + with transaction.atomic(): + Reporter.objects.create(last_name="Tintin") + self.assertEqual(len(Reporter.objects.all()), 1) + # Incorrect savepoint id to provoke a database error. + connection.savepoint_ids.append("12") + with self.assertRaises(transaction.TransactionManagementError): + len(Reporter.objects.all()) + self.assertIs(connection.needs_rollback, True) + if connection.savepoint_ids: + connection.savepoint_ids.pop() + self.assertSequenceEqual(Reporter.objects.all(), []) + class AtomicInsideTransactionTests(AtomicTests): """All basic tests for atomic should also pass within an existing transaction.""" diff --git a/tests/utils_tests/test_http.py b/tests/utils_tests/test_http.py index 9978c7bb52..2290fe85fb 100644 --- a/tests/utils_tests/test_http.py +++ b/tests/utils_tests/test_http.py @@ -7,6 +7,7 @@ from django.test import SimpleTestCase from django.utils.datastructures import MultiValueDict from django.utils.http import ( base36_to_int, + content_disposition_header, escape_leading_slashes, http_date, int_to_base36, @@ -329,7 +330,7 @@ class ETagProcessingTests(unittest.TestCase): ) self.assertEqual(parse_etags("*"), ["*"]) - # Ignore RFC 2616 ETags that are invalid according to RFC 7232. + # Ignore RFC 2616 ETags that are invalid according to RFC 9110. self.assertEqual(parse_etags(r'"etag", "e\"t\"ag"'), ['"etag"']) def test_quoting(self): @@ -511,3 +512,28 @@ class ParseHeaderParameterTests(unittest.TestCase): for raw_line, expected_title in test_data: parsed = parse_header_parameters(raw_line) self.assertEqual(parsed[1]["title"], expected_title) + + +class ContentDispositionHeaderTests(unittest.TestCase): + def test_basic(self): + tests = ( + ((False, None), None), + ((False, "example"), 'inline; filename="example"'), + ((True, None), "attachment"), + ((True, "example"), 'attachment; filename="example"'), + ( + (True, '"example" file\\name'), + 'attachment; filename="\\"example\\" file\\\\name"', + ), + ((True, "espécimen"), "attachment; filename*=utf-8''esp%C3%A9cimen"), + ( + (True, '"espécimen" filename'), + "attachment; filename*=utf-8''%22esp%C3%A9cimen%22%20filename", + ), + ) + + for (is_attachment, filename), expected in tests: + with self.subTest(is_attachment=is_attachment, filename=filename): + self.assertEqual( + content_disposition_header(is_attachment, filename), expected + ) diff --git a/tests/utils_tests/test_ipv6.py b/tests/utils_tests/test_ipv6.py index b41778693d..bf78ed91c0 100644 --- a/tests/utils_tests/test_ipv6.py +++ b/tests/utils_tests/test_ipv6.py @@ -32,7 +32,7 @@ class TestUtilsIPv6(unittest.TestCase): self.assertFalse(is_valid_ipv6_address("::ffff:999.42.16.14")) self.assertFalse(is_valid_ipv6_address("::ffff:zzzz:0a0a")) # The ::1.2.3.4 format used to be valid but was deprecated - # in rfc4291 section 2.5.5.1 + # in RFC 4291 section 2.5.5.1. self.assertTrue(is_valid_ipv6_address("::254.42.16.14")) self.assertTrue(is_valid_ipv6_address("::0a0a:0a0a")) self.assertFalse(is_valid_ipv6_address("::999.42.16.14")) diff --git a/tests/view_tests/tests/test_debug.py b/tests/view_tests/tests/test_debug.py index 7224a2b6b6..020ac7193e 100644 --- a/tests/view_tests/tests/test_debug.py +++ b/tests/view_tests/tests/test_debug.py @@ -7,7 +7,7 @@ import tempfile import threading from io import StringIO from pathlib import Path -from unittest import mock, skipIf +from unittest import mock, skipIf, skipUnless from django.core import mail from django.core.files.uploadedfile import SimpleUploadedFile @@ -22,6 +22,7 @@ from django.urls.converters import IntConverter from django.utils.functional import SimpleLazyObject from django.utils.regex_helper import _lazy_re_compile from django.utils.safestring import mark_safe +from django.utils.version import PY311 from django.views.debug import ( CallableSettingWrapper, ExceptionCycleWarning, @@ -659,6 +660,40 @@ class ExceptionReporterTests(SimpleTestCase): text, ) + @skipUnless(PY311, "Exception notes were added in Python 3.11.") + def test_exception_with_notes(self): + request = self.rf.get("/test_view/") + try: + try: + raise RuntimeError("Oops") + except Exception as err: + err.add_note("First Note") + err.add_note("Second Note") + err.add_note(mark_safe("")) + raise err + except Exception: + exc_type, exc_value, tb = sys.exc_info() + + reporter = ExceptionReporter(request, exc_type, exc_value, tb) + html = reporter.get_traceback_html() + self.assertIn( + '
      Oops\nFirst Note\nSecond Note\n'
      +            "<script>alert(1);</script>
      ", + html, + ) + self.assertIn( + "Exception Value: Oops\nFirst Note\nSecond Note\n" + "<script>alert(1);</script>", + html, + ) + + text = reporter.get_traceback_text() + self.assertIn( + "Exception Value: Oops\nFirst Note\nSecond Note\n" + "", + text, + ) + def test_mid_stack_exception_without_traceback(self): try: try: @@ -731,6 +766,79 @@ class ExceptionReporterTests(SimpleTestCase): self.assertIn(implicit_exc.format("

      Second exception

      "), text) self.assertEqual(3, text.count("

      Final exception

      ")) + @skipIf( + sys._xoptions.get("no_debug_ranges", False) + or os.environ.get("PYTHONNODEBUGRANGES", False), + "Fine-grained error locations are disabled.", + ) + @skipUnless(PY311, "Fine-grained error locations were added in Python 3.11.") + def test_highlight_error_position(self): + request = self.rf.get("/test_view/") + try: + try: + raise AttributeError("Top level") + except AttributeError as explicit: + try: + raise ValueError(mark_safe("

      2nd exception

      ")) from explicit + except ValueError: + raise IndexError("Final exception") + except Exception: + exc_type, exc_value, tb = sys.exc_info() + + reporter = ExceptionReporter(request, exc_type, exc_value, tb) + html = reporter.get_traceback_html() + self.assertIn( + "
                      raise AttributeError("Top level")\n"
      +            "                     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      ", + html, + ) + self.assertIn( + "
                          raise ValueError(mark_safe("
      +            ""<p>2nd exception</p>")) from explicit\n"
      +            "                         "
      +            "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      ", + html, + ) + self.assertIn( + "
                          raise IndexError("Final exception")\n"
      +            "                         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      ", + html, + ) + # Pastebin. + self.assertIn( + " raise AttributeError("Top level")\n" + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + html, + ) + self.assertIn( + " raise ValueError(mark_safe(" + ""<p>2nd exception</p>")) from explicit\n" + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + html, + ) + self.assertIn( + " raise IndexError("Final exception")\n" + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + html, + ) + # Text traceback. + text = reporter.get_traceback_text() + self.assertIn( + ' raise AttributeError("Top level")\n' + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + text, + ) + self.assertIn( + ' raise ValueError(mark_safe("

      2nd exception

      ")) from explicit\n' + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + text, + ) + self.assertIn( + ' raise IndexError("Final exception")\n' + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + text, + ) + def test_reporting_frames_without_source(self): try: source = "def funcName():\n raise Error('Whoops')\nfuncName()" diff --git a/tests/view_tests/tests/test_static.py b/tests/view_tests/tests/test_static.py index 003e229859..9c873b447f 100644 --- a/tests/view_tests/tests/test_static.py +++ b/tests/view_tests/tests/test_static.py @@ -83,7 +83,7 @@ class StaticTests(SimpleTestCase): """Handle bogus If-Modified-Since values gracefully Assume that a file is modified since an invalid timestamp as per RFC - 2616, section 14.25. + 9110 Section 13.1.3. """ file_name = "file.txt" invalid_date = "Mon, 28 May 999999999999 28:25:26 GMT" @@ -99,7 +99,7 @@ class StaticTests(SimpleTestCase): """Handle even more bogus If-Modified-Since values gracefully Assume that a file is modified since an invalid timestamp as per RFC - 2616, section 14.25. + 9110 Section 13.1.3. """ file_name = "file.txt" invalid_date = ": 1291108438, Wed, 20 Oct 2010 14:05:00 GMT"
    Name:
    Username:
    Username:
    Subject:
    Message:
    Sender:
    Cc myself:
    Subject:
    • This field is required.
    Message:
    Message:
    Sender:
    • Enter a valid email address.
    Cc myself: