1
0
mirror of https://github.com/django/django.git synced 2024-12-22 17:16:24 +00:00

Removed unneeded escapes in regexes.

Special characters lose their special meaning inside sets of characters.

"-" lose its special meaning if it's placed as the first or last
character.

Follow up to 7c6b66383d.
This commit is contained in:
Mariusz Felisiak 2023-08-02 19:53:16 +02:00 committed by GitHub
parent 7cd187a5ba
commit 9b9c805ced
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 30 additions and 30 deletions

View File

@ -38,7 +38,7 @@ urlpatterns = [
name="django-admindocs-models-index",
),
re_path(
r"^models/(?P<app_label>[^\.]+)\.(?P<model_name>[^/]+)/$",
r"^models/(?P<app_label>[^.]+)\.(?P<model_name>[^/]+)/$",
views.ModelDetailView.as_view(),
name="django-admindocs-models-detail",
),

View File

@ -10,7 +10,7 @@ class FlatpageForm(forms.ModelForm):
url = forms.RegexField(
label=_("URL"),
max_length=100,
regex=r"^[-\w/\.~]+$",
regex=r"^[-\w/.~]+$",
help_text=_(
"Example: “/about/contact/”. Make sure to have leading and trailing "
"slashes."

View File

@ -273,7 +273,7 @@ class OverlapsLookup(GISLookup):
class RelateLookup(GISLookup):
lookup_name = "relate"
sql_template = "%(func)s(%(lhs)s, %(rhs)s, %%s)"
pattern_regex = _lazy_re_compile(r"^[012TF\*]{9}$")
pattern_regex = _lazy_re_compile(r"^[012TF*]{9}$")
def process_rhs(self, compiler, connection):
# Check the pattern argument

View File

@ -11,7 +11,7 @@ wkt_regex = _lazy_re_compile(
r"(?P<wkt>"
r"(?P<type>POINT|LINESTRING|LINEARRING|POLYGON|MULTIPOINT|"
r"MULTILINESTRING|MULTIPOLYGON|GEOMETRYCOLLECTION)"
r"[ACEGIMLONPSRUTYZ0-9,\.\-\+\(\) ]+)$",
r"[ACEGIMLONPSRUTYZ0-9,.+() -]+)$",
re.I,
)
json_regex = _lazy_re_compile(r"^(\s+)?\{.*}(\s+)?$", re.DOTALL)

View File

@ -54,19 +54,19 @@ class HashedFilesMixin:
(
(
r"""(?P<matched>import(?s:(?P<import>[\s\{].*?))"""
r"""\s*from\s*['"](?P<url>[\.\/].*?)["']\s*;)"""
r"""\s*from\s*['"](?P<url>[./].*?)["']\s*;)"""
),
"""import%(import)s from "%(url)s";""",
),
(
(
r"""(?P<matched>export(?s:(?P<exports>[\s\{].*?))"""
r"""\s*from\s*["'](?P<url>[\.\/].*?)["']\s*;)"""
r"""\s*from\s*["'](?P<url>[./].*?)["']\s*;)"""
),
"""export%(exports)s from "%(url)s";""",
),
(
r"""(?P<matched>import\s*['"](?P<url>[\.\/].*?)["']\s*;)""",
r"""(?P<matched>import\s*['"](?P<url>[./].*?)["']\s*;)""",
"""import"%(url)s";""",
),
(

View File

@ -108,7 +108,7 @@ class Collate(Func):
allowed_default = False
# Inspired from
# https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
collation_re = _lazy_re_compile(r"^[\w\-]+$")
collation_re = _lazy_re_compile(r"^[\w-]+$")
def __init__(self, expression, collation):
if not (collation and self.collation_re.match(collation)):

View File

@ -52,7 +52,7 @@ FORBIDDEN_ALIAS_PATTERN = _lazy_re_compile(r"['`\"\]\[;\s]|--|/\*|\*/")
# Inspired from
# https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
EXPLAIN_OPTIONS_PATTERN = _lazy_re_compile(r"[\w\-]+")
EXPLAIN_OPTIONS_PATTERN = _lazy_re_compile(r"[\w-]+")
def get_field_names_from_opts(opts):

View File

@ -632,7 +632,7 @@ filter_raw_string = r"""
)?
)""" % {
"constant": constant_string,
"num": r"[-+\.]?\d[\d\.e]*",
"num": r"[-+.]?\d[\d.e]*",
"var_chars": r"\w\.",
"filter_sep": re.escape(FILTER_SEPARATOR),
"arg_sep": re.escape(FILTER_ARGUMENT_SEPARATOR),

View File

@ -14,13 +14,13 @@ date_re = _lazy_re_compile(r"(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})
time_re = _lazy_re_compile(
r"(?P<hour>\d{1,2}):(?P<minute>\d{1,2})"
r"(?::(?P<second>\d{1,2})(?:[\.,](?P<microsecond>\d{1,6})\d{0,6})?)?$"
r"(?::(?P<second>\d{1,2})(?:[.,](?P<microsecond>\d{1,6})\d{0,6})?)?$"
)
datetime_re = _lazy_re_compile(
r"(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})"
r"[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})"
r"(?::(?P<second>\d{1,2})(?:[\.,](?P<microsecond>\d{1,6})\d{0,6})?)?"
r"(?::(?P<second>\d{1,2})(?:[.,](?P<microsecond>\d{1,6})\d{0,6})?)?"
r"\s*(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$"
)
@ -31,7 +31,7 @@ standard_duration_re = _lazy_re_compile(
r"((?:(?P<hours>\d+):)(?=\d+:\d+))?"
r"(?:(?P<minutes>\d+):)?"
r"(?P<seconds>\d+)"
r"(?:[\.,](?P<microseconds>\d{1,6})\d{0,6})?"
r"(?:[.,](?P<microseconds>\d{1,6})\d{0,6})?"
r"$"
)
@ -40,11 +40,11 @@ standard_duration_re = _lazy_re_compile(
iso8601_duration_re = _lazy_re_compile(
r"^(?P<sign>[-+]?)"
r"P"
r"(?:(?P<days>\d+([\.,]\d+)?)D)?"
r"(?:(?P<days>\d+([.,]\d+)?)D)?"
r"(?:T"
r"(?:(?P<hours>\d+([\.,]\d+)?)H)?"
r"(?:(?P<minutes>\d+([\.,]\d+)?)M)?"
r"(?:(?P<seconds>\d+([\.,]\d+)?)S)?"
r"(?:(?P<hours>\d+([.,]\d+)?)H)?"
r"(?:(?P<minutes>\d+([.,]\d+)?)M)?"
r"(?:(?P<seconds>\d+([.,]\d+)?)S)?"
r")?"
r"$"
)

View File

@ -54,8 +54,8 @@ autosectionlabel_maxdepth = 2
linkcheck_ignore = [
# Special-use addresses and domain names. (RFC 6761/6890)
r"^https?://(?:127\.0\.0\.1|\[::1\])(?::\d+)?/",
r"^https?://(?:[^/\.]+\.)*example\.(?:com|net|org)(?::\d+)?/",
r"^https?://(?:[^/\.]+\.)*(?:example|invalid|localhost|test)(?::\d+)?/",
r"^https?://(?:[^/.]+\.)*example\.(?:com|net|org)(?::\d+)?/",
r"^https?://(?:[^/.]+\.)*(?:example|invalid|localhost|test)(?::\d+)?/",
# Pages that are inaccessible because they require authentication.
r"^https://github\.com/[^/]+/[^/]+/fork",
r"^https://code\.djangoproject\.com/github/login",

View File

@ -690,24 +690,24 @@ class TestValidatorEquality(TestCase):
def test_regex_equality(self):
self.assertEqual(
RegexValidator(r"^(?:[a-z0-9\.\-]*)://"),
RegexValidator(r"^(?:[a-z0-9\.\-]*)://"),
RegexValidator(r"^(?:[a-z0-9.-]*)://"),
RegexValidator(r"^(?:[a-z0-9.-]*)://"),
)
self.assertNotEqual(
RegexValidator(r"^(?:[a-z0-9\.\-]*)://"),
RegexValidator(r"^(?:[0-9\.\-]*)://"),
RegexValidator(r"^(?:[a-z0-9.-]*)://"),
RegexValidator(r"^(?:[0-9.-]*)://"),
)
self.assertEqual(
RegexValidator(r"^(?:[a-z0-9\.\-]*)://", "oh noes", "invalid"),
RegexValidator(r"^(?:[a-z0-9\.\-]*)://", "oh noes", "invalid"),
RegexValidator(r"^(?:[a-z0-9.-]*)://", "oh noes", "invalid"),
RegexValidator(r"^(?:[a-z0-9.-]*)://", "oh noes", "invalid"),
)
self.assertNotEqual(
RegexValidator(r"^(?:[a-z0-9\.\-]*)://", "oh", "invalid"),
RegexValidator(r"^(?:[a-z0-9\.\-]*)://", "oh noes", "invalid"),
RegexValidator(r"^(?:[a-z0-9.-]*)://", "oh", "invalid"),
RegexValidator(r"^(?:[a-z0-9.-]*)://", "oh noes", "invalid"),
)
self.assertNotEqual(
RegexValidator(r"^(?:[a-z0-9\.\-]*)://", "oh noes", "invalid"),
RegexValidator(r"^(?:[a-z0-9\.\-]*)://"),
RegexValidator(r"^(?:[a-z0-9.-]*)://", "oh noes", "invalid"),
RegexValidator(r"^(?:[a-z0-9.-]*)://"),
)
self.assertNotEqual(
@ -721,7 +721,7 @@ class TestValidatorEquality(TestCase):
)
def test_regex_equality_nocache(self):
pattern = r"^(?:[a-z0-9\.\-]*)://"
pattern = r"^(?:[a-z0-9.-]*)://"
left = RegexValidator(pattern)
re.purge()
right = RegexValidator(pattern)