mirror of
https://github.com/django/django.git
synced 2024-12-23 01:25:58 +00:00
Fixed #29949 -- Refactored db introspection identifier converters.
Removed DatabaseIntrospection.table_name_converter()/column_name_converter() and use instead DatabaseIntrospection.identifier_converter(). Removed DatabaseFeatures.uppercases_column_names. Thanks Tim Graham for the initial patch and review and Simon Charette for the review.
This commit is contained in:
parent
2e4776196d
commit
d5f4ce9849
@ -308,7 +308,7 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
def model_installed(model):
|
def model_installed(model):
|
||||||
opts = model._meta
|
opts = model._meta
|
||||||
converter = connection.introspection.table_name_converter
|
converter = connection.introspection.identifier_converter
|
||||||
return not (
|
return not (
|
||||||
(converter(opts.db_table) in tables) or
|
(converter(opts.db_table) in tables) or
|
||||||
(opts.auto_created and converter(opts.auto_created._meta.db_table) in tables)
|
(opts.auto_created and converter(opts.auto_created._meta.db_table) in tables)
|
||||||
|
@ -200,8 +200,6 @@ class BaseDatabaseFeatures:
|
|||||||
# If NULL is implied on columns without needing to be explicitly specified
|
# If NULL is implied on columns without needing to be explicitly specified
|
||||||
implied_column_null = False
|
implied_column_null = False
|
||||||
|
|
||||||
uppercases_column_names = False
|
|
||||||
|
|
||||||
# Does the backend support "select for update" queries with limit (and offset)?
|
# Does the backend support "select for update" queries with limit (and offset)?
|
||||||
supports_select_for_update_with_limit = True
|
supports_select_for_update_with_limit = True
|
||||||
|
|
||||||
|
@ -24,22 +24,14 @@ class BaseDatabaseIntrospection:
|
|||||||
"""
|
"""
|
||||||
return self.data_types_reverse[data_type]
|
return self.data_types_reverse[data_type]
|
||||||
|
|
||||||
def table_name_converter(self, name):
|
def identifier_converter(self, name):
|
||||||
"""
|
"""
|
||||||
Apply a conversion to the name for the purposes of comparison.
|
Apply a conversion to the identifier for the purposes of comparison.
|
||||||
|
|
||||||
The default table name converter is for case sensitive comparison.
|
The default identifier converter is for case sensitive comparison.
|
||||||
"""
|
"""
|
||||||
return name
|
return name
|
||||||
|
|
||||||
def column_name_converter(self, name):
|
|
||||||
"""
|
|
||||||
Apply a conversion to the column name for the purposes of comparison.
|
|
||||||
|
|
||||||
Use table_name_converter() by default.
|
|
||||||
"""
|
|
||||||
return self.table_name_converter(name)
|
|
||||||
|
|
||||||
def table_names(self, cursor=None, include_views=False):
|
def table_names(self, cursor=None, include_views=False):
|
||||||
"""
|
"""
|
||||||
Return a list of names of all tables that exist in the database.
|
Return a list of names of all tables that exist in the database.
|
||||||
@ -83,11 +75,11 @@ class BaseDatabaseIntrospection:
|
|||||||
)
|
)
|
||||||
tables = list(tables)
|
tables = list(tables)
|
||||||
if only_existing:
|
if only_existing:
|
||||||
existing_tables = self.table_names(include_views=include_views)
|
existing_tables = set(self.table_names(include_views=include_views))
|
||||||
tables = [
|
tables = [
|
||||||
t
|
t
|
||||||
for t in tables
|
for t in tables
|
||||||
if self.table_name_converter(t) in existing_tables
|
if self.identifier_converter(t) in existing_tables
|
||||||
]
|
]
|
||||||
return tables
|
return tables
|
||||||
|
|
||||||
@ -101,10 +93,10 @@ class BaseDatabaseIntrospection:
|
|||||||
all_models = []
|
all_models = []
|
||||||
for app_config in apps.get_app_configs():
|
for app_config in apps.get_app_configs():
|
||||||
all_models.extend(router.get_migratable_models(app_config, self.connection.alias))
|
all_models.extend(router.get_migratable_models(app_config, self.connection.alias))
|
||||||
tables = list(map(self.table_name_converter, tables))
|
tables = set(map(self.identifier_converter, tables))
|
||||||
return {
|
return {
|
||||||
m for m in all_models
|
m for m in all_models
|
||||||
if self.table_name_converter(m._meta.db_table) in tables
|
if self.identifier_converter(m._meta.db_table) in tables
|
||||||
}
|
}
|
||||||
|
|
||||||
def sequence_list(self):
|
def sequence_list(self):
|
||||||
|
@ -1048,7 +1048,7 @@ class BaseDatabaseSchemaEditor:
|
|||||||
"""Return all constraint names matching the columns and conditions."""
|
"""Return all constraint names matching the columns and conditions."""
|
||||||
if column_names is not None:
|
if column_names is not None:
|
||||||
column_names = [
|
column_names = [
|
||||||
self.connection.introspection.column_name_converter(name)
|
self.connection.introspection.identifier_converter(name)
|
||||||
for name in column_names
|
for name in column_names
|
||||||
]
|
]
|
||||||
with self.connection.cursor() as cursor:
|
with self.connection.cursor() as cursor:
|
||||||
|
@ -28,7 +28,6 @@ class DatabaseFeatures(BaseDatabaseFeatures):
|
|||||||
requires_literal_defaults = True
|
requires_literal_defaults = True
|
||||||
closed_cursor_error_class = InterfaceError
|
closed_cursor_error_class = InterfaceError
|
||||||
bare_select_suffix = " FROM DUAL"
|
bare_select_suffix = " FROM DUAL"
|
||||||
uppercases_column_names = True
|
|
||||||
# select for update with limit can be achieved on Oracle, but not with the current backend.
|
# select for update with limit can be achieved on Oracle, but not with the current backend.
|
||||||
supports_select_for_update_with_limit = False
|
supports_select_for_update_with_limit = False
|
||||||
supports_temporal_subtraction = True
|
supports_temporal_subtraction = True
|
||||||
|
@ -50,7 +50,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
"""Return a list of table and view names in the current database."""
|
"""Return a list of table and view names in the current database."""
|
||||||
cursor.execute("SELECT TABLE_NAME, 't' FROM USER_TABLES UNION ALL "
|
cursor.execute("SELECT TABLE_NAME, 't' FROM USER_TABLES UNION ALL "
|
||||||
"SELECT VIEW_NAME, 'v' FROM USER_VIEWS")
|
"SELECT VIEW_NAME, 'v' FROM USER_VIEWS")
|
||||||
return [TableInfo(row[0].lower(), row[1]) for row in cursor.fetchall()]
|
return [TableInfo(self.identifier_converter(row[0]), row[1]) for row in cursor.fetchall()]
|
||||||
|
|
||||||
def get_table_description(self, cursor, table_name):
|
def get_table_description(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
@ -86,13 +86,13 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
internal_size, default, is_autofield = field_map[name]
|
internal_size, default, is_autofield = field_map[name]
|
||||||
name = name % {} # cx_Oracle, for some reason, doubles percent signs.
|
name = name % {} # cx_Oracle, for some reason, doubles percent signs.
|
||||||
description.append(FieldInfo(
|
description.append(FieldInfo(
|
||||||
name.lower(), *desc[1:3], internal_size, desc[4] or 0,
|
self.identifier_converter(name), *desc[1:3], internal_size, desc[4] or 0,
|
||||||
desc[5] or 0, *desc[6:], default, is_autofield,
|
desc[5] or 0, *desc[6:], default, is_autofield,
|
||||||
))
|
))
|
||||||
return description
|
return description
|
||||||
|
|
||||||
def table_name_converter(self, name):
|
def identifier_converter(self, name):
|
||||||
"""Table name comparison is case insensitive under Oracle."""
|
"""Identifier comparison is case insensitive under Oracle."""
|
||||||
return name.lower()
|
return name.lower()
|
||||||
|
|
||||||
def get_sequences(self, cursor, table_name, table_fields=()):
|
def get_sequences(self, cursor, table_name, table_fields=()):
|
||||||
@ -114,7 +114,11 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
# Oracle allows only one identity column per table.
|
# Oracle allows only one identity column per table.
|
||||||
row = cursor.fetchone()
|
row = cursor.fetchone()
|
||||||
if row:
|
if row:
|
||||||
return [{'name': row[0].lower(), 'table': table_name, 'column': row[1].lower()}]
|
return [{
|
||||||
|
'name': self.identifier_converter(row[0]),
|
||||||
|
'table': self.identifier_converter(table_name),
|
||||||
|
'column': self.identifier_converter(row[1]),
|
||||||
|
}]
|
||||||
# To keep backward compatibility for AutoFields that aren't Oracle
|
# To keep backward compatibility for AutoFields that aren't Oracle
|
||||||
# identity columns.
|
# identity columns.
|
||||||
for f in table_fields:
|
for f in table_fields:
|
||||||
@ -136,10 +140,12 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
user_constraints.r_constraint_name = cb.constraint_name AND
|
user_constraints.r_constraint_name = cb.constraint_name AND
|
||||||
ca.position = cb.position""", [table_name])
|
ca.position = cb.position""", [table_name])
|
||||||
|
|
||||||
relations = {}
|
return {
|
||||||
for row in cursor.fetchall():
|
self.identifier_converter(field_name): (
|
||||||
relations[row[0].lower()] = (row[2].lower(), row[1].lower())
|
self.identifier_converter(rel_field_name),
|
||||||
return relations
|
self.identifier_converter(rel_table_name),
|
||||||
|
) for field_name, rel_table_name, rel_field_name in cursor.fetchall()
|
||||||
|
}
|
||||||
|
|
||||||
def get_key_columns(self, cursor, table_name):
|
def get_key_columns(self, cursor, table_name):
|
||||||
cursor.execute("""
|
cursor.execute("""
|
||||||
@ -150,8 +156,10 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
JOIN user_cons_columns rcol
|
JOIN user_cons_columns rcol
|
||||||
ON rcol.constraint_name = c.r_constraint_name
|
ON rcol.constraint_name = c.r_constraint_name
|
||||||
WHERE c.table_name = %s AND c.constraint_type = 'R'""", [table_name.upper()])
|
WHERE c.table_name = %s AND c.constraint_type = 'R'""", [table_name.upper()])
|
||||||
return [tuple(cell.lower() for cell in row)
|
return [
|
||||||
for row in cursor.fetchall()]
|
tuple(self.identifier_converter(cell) for cell in row)
|
||||||
|
for row in cursor.fetchall()
|
||||||
|
]
|
||||||
|
|
||||||
def get_constraints(self, cursor, table_name):
|
def get_constraints(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
@ -186,6 +194,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
GROUP BY user_constraints.constraint_name, user_constraints.constraint_type
|
GROUP BY user_constraints.constraint_name, user_constraints.constraint_type
|
||||||
""", [table_name])
|
""", [table_name])
|
||||||
for constraint, columns, pk, unique, check in cursor.fetchall():
|
for constraint, columns, pk, unique, check in cursor.fetchall():
|
||||||
|
constraint = self.identifier_converter(constraint)
|
||||||
constraints[constraint] = {
|
constraints[constraint] = {
|
||||||
'columns': columns.split(','),
|
'columns': columns.split(','),
|
||||||
'primary_key': pk,
|
'primary_key': pk,
|
||||||
@ -213,6 +222,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
GROUP BY cons.constraint_name, rcols.table_name, rcols.column_name
|
GROUP BY cons.constraint_name, rcols.table_name, rcols.column_name
|
||||||
""", [table_name])
|
""", [table_name])
|
||||||
for constraint, columns, other_table, other_column in cursor.fetchall():
|
for constraint, columns, other_table, other_column in cursor.fetchall():
|
||||||
|
constraint = self.identifier_converter(constraint)
|
||||||
constraints[constraint] = {
|
constraints[constraint] = {
|
||||||
'primary_key': False,
|
'primary_key': False,
|
||||||
'unique': False,
|
'unique': False,
|
||||||
@ -240,6 +250,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||||||
GROUP BY ind.index_name, ind.index_type
|
GROUP BY ind.index_name, ind.index_type
|
||||||
""", [table_name])
|
""", [table_name])
|
||||||
for constraint, type_, columns, orders in cursor.fetchall():
|
for constraint, type_, columns, orders in cursor.fetchall():
|
||||||
|
constraint = self.identifier_converter(constraint)
|
||||||
constraints[constraint] = {
|
constraints[constraint] = {
|
||||||
'primary_key': False,
|
'primary_key': False,
|
||||||
'unique': False,
|
'unique': False,
|
||||||
|
@ -1347,7 +1347,7 @@ class RawQuerySet:
|
|||||||
|
|
||||||
def resolve_model_init_order(self):
|
def resolve_model_init_order(self):
|
||||||
"""Resolve the init field names and value positions."""
|
"""Resolve the init field names and value positions."""
|
||||||
converter = connections[self.db].introspection.column_name_converter
|
converter = connections[self.db].introspection.identifier_converter
|
||||||
model_init_fields = [f for f in self.model._meta.fields if converter(f.column) in self.columns]
|
model_init_fields = [f for f in self.model._meta.fields if converter(f.column) in self.columns]
|
||||||
annotation_fields = [(column, pos) for pos, column in enumerate(self.columns)
|
annotation_fields = [(column, pos) for pos, column in enumerate(self.columns)
|
||||||
if column not in self.model_fields]
|
if column not in self.model_fields]
|
||||||
@ -1469,7 +1469,7 @@ class RawQuerySet:
|
|||||||
@cached_property
|
@cached_property
|
||||||
def model_fields(self):
|
def model_fields(self):
|
||||||
"""A dict mapping column names to model field names."""
|
"""A dict mapping column names to model field names."""
|
||||||
converter = connections[self.db].introspection.table_name_converter
|
converter = connections[self.db].introspection.identifier_converter
|
||||||
model_fields = {}
|
model_fields = {}
|
||||||
for field in self.model._meta.fields:
|
for field in self.model._meta.fields:
|
||||||
name, column = field.get_attname_column()
|
name, column = field.get_attname_column()
|
||||||
|
@ -92,7 +92,7 @@ class RawQuery:
|
|||||||
def get_columns(self):
|
def get_columns(self):
|
||||||
if self.cursor is None:
|
if self.cursor is None:
|
||||||
self._execute_query()
|
self._execute_query()
|
||||||
converter = connections[self.using].introspection.column_name_converter
|
converter = connections[self.using].introspection.identifier_converter
|
||||||
return [converter(column_meta[0])
|
return [converter(column_meta[0])
|
||||||
for column_meta in self.cursor.description]
|
for column_meta in self.cursor.description]
|
||||||
|
|
||||||
|
@ -312,6 +312,13 @@ backends.
|
|||||||
* ``sql_create_fk`` is replaced with ``sql_foreign_key_constraint``,
|
* ``sql_create_fk`` is replaced with ``sql_foreign_key_constraint``,
|
||||||
``sql_constraint``, and ``sql_create_constraint``.
|
``sql_constraint``, and ``sql_create_constraint``.
|
||||||
|
|
||||||
|
* ``DatabaseIntrospection.table_name_converter()`` and
|
||||||
|
``column_name_converter()`` are removed. Third party database backends may
|
||||||
|
need to instead implement ``DatabaseIntrospection.identifier_converter()``.
|
||||||
|
In that case, the constraint names that
|
||||||
|
``DatabaseIntrospection.get_constraints()`` returns must be normalized by
|
||||||
|
``identifier_converter()``.
|
||||||
|
|
||||||
Admin actions are no longer collected from base ``ModelAdmin`` classes
|
Admin actions are no longer collected from base ``ModelAdmin`` classes
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
|
|
||||||
|
@ -80,7 +80,7 @@ class ParameterHandlingTest(TestCase):
|
|||||||
"An executemany call with too many/not enough parameters will raise an exception (Refs #12612)"
|
"An executemany call with too many/not enough parameters will raise an exception (Refs #12612)"
|
||||||
with connection.cursor() as cursor:
|
with connection.cursor() as cursor:
|
||||||
query = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (
|
query = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (
|
||||||
connection.introspection.table_name_converter('backends_square'),
|
connection.introspection.identifier_converter('backends_square'),
|
||||||
connection.ops.quote_name('root'),
|
connection.ops.quote_name('root'),
|
||||||
connection.ops.quote_name('square')
|
connection.ops.quote_name('square')
|
||||||
))
|
))
|
||||||
@ -217,7 +217,7 @@ class BackendTestCase(TransactionTestCase):
|
|||||||
|
|
||||||
def create_squares(self, args, paramstyle, multiple):
|
def create_squares(self, args, paramstyle, multiple):
|
||||||
opts = Square._meta
|
opts = Square._meta
|
||||||
tbl = connection.introspection.table_name_converter(opts.db_table)
|
tbl = connection.introspection.identifier_converter(opts.db_table)
|
||||||
f1 = connection.ops.quote_name(opts.get_field('root').column)
|
f1 = connection.ops.quote_name(opts.get_field('root').column)
|
||||||
f2 = connection.ops.quote_name(opts.get_field('square').column)
|
f2 = connection.ops.quote_name(opts.get_field('square').column)
|
||||||
if paramstyle == 'format':
|
if paramstyle == 'format':
|
||||||
@ -303,7 +303,7 @@ class BackendTestCase(TransactionTestCase):
|
|||||||
'SELECT %s, %s FROM %s ORDER BY %s' % (
|
'SELECT %s, %s FROM %s ORDER BY %s' % (
|
||||||
qn(f3.column),
|
qn(f3.column),
|
||||||
qn(f4.column),
|
qn(f4.column),
|
||||||
connection.introspection.table_name_converter(opts2.db_table),
|
connection.introspection.identifier_converter(opts2.db_table),
|
||||||
qn(f3.column),
|
qn(f3.column),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -48,8 +48,6 @@ class CheckConstraintTests(TestCase):
|
|||||||
def test_name(self):
|
def test_name(self):
|
||||||
constraints = get_constraints(Product._meta.db_table)
|
constraints = get_constraints(Product._meta.db_table)
|
||||||
expected_name = 'price_gt_discounted_price'
|
expected_name = 'price_gt_discounted_price'
|
||||||
if connection.features.uppercases_column_names:
|
|
||||||
expected_name = expected_name.upper()
|
|
||||||
self.assertIn(expected_name, constraints)
|
self.assertIn(expected_name, constraints)
|
||||||
|
|
||||||
|
|
||||||
@ -87,6 +85,4 @@ class UniqueConstraintTests(TestCase):
|
|||||||
def test_name(self):
|
def test_name(self):
|
||||||
constraints = get_constraints(Product._meta.db_table)
|
constraints = get_constraints(Product._meta.db_table)
|
||||||
expected_name = 'unique_name'
|
expected_name = 'unique_name'
|
||||||
if connection.features.uppercases_column_names:
|
|
||||||
expected_name = expected_name.upper()
|
|
||||||
self.assertIn(expected_name, constraints)
|
self.assertIn(expected_name, constraints)
|
||||||
|
@ -63,12 +63,9 @@ class OperationTestCase(TransactionTestCase):
|
|||||||
self.current_state = self.apply_operations('gis', ProjectState(), operations)
|
self.current_state = self.apply_operations('gis', ProjectState(), operations)
|
||||||
|
|
||||||
def assertGeometryColumnsCount(self, expected_count):
|
def assertGeometryColumnsCount(self, expected_count):
|
||||||
table_name = 'gis_neighborhood'
|
|
||||||
if connection.features.uppercases_column_names:
|
|
||||||
table_name = table_name.upper()
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
GeometryColumns.objects.filter(**{
|
GeometryColumns.objects.filter(**{
|
||||||
GeometryColumns.table_name_col(): table_name,
|
'%s__iexact' % GeometryColumns.table_name_col(): 'gis_neighborhood',
|
||||||
}).count(),
|
}).count(),
|
||||||
expected_count
|
expected_count
|
||||||
)
|
)
|
||||||
|
@ -184,7 +184,7 @@ class InspectDBTestCase(TestCase):
|
|||||||
out = StringIO()
|
out = StringIO()
|
||||||
call_command('inspectdb', table_name_filter=special_table_only, stdout=out)
|
call_command('inspectdb', table_name_filter=special_table_only, stdout=out)
|
||||||
output = out.getvalue()
|
output = out.getvalue()
|
||||||
base_name = 'field' if connection.features.uppercases_column_names else 'Field'
|
base_name = connection.introspection.identifier_converter('Field')
|
||||||
self.assertIn("field = models.IntegerField()", output)
|
self.assertIn("field = models.IntegerField()", output)
|
||||||
self.assertIn("field_field = models.IntegerField(db_column='%s_')" % base_name, output)
|
self.assertIn("field_field = models.IntegerField(db_column='%s_')" % base_name, output)
|
||||||
self.assertIn("field_field_0 = models.IntegerField(db_column='%s__')" % base_name, output)
|
self.assertIn("field_field_0 = models.IntegerField(db_column='%s__')" % base_name, output)
|
||||||
|
@ -85,7 +85,7 @@ class SchemaTests(TransactionTestCase):
|
|||||||
|
|
||||||
def delete_tables(self):
|
def delete_tables(self):
|
||||||
"Deletes all model tables for our models for a clean test environment"
|
"Deletes all model tables for our models for a clean test environment"
|
||||||
converter = connection.introspection.table_name_converter
|
converter = connection.introspection.identifier_converter
|
||||||
with connection.schema_editor() as editor:
|
with connection.schema_editor() as editor:
|
||||||
connection.disable_constraint_checking()
|
connection.disable_constraint_checking()
|
||||||
table_names = connection.introspection.table_names()
|
table_names = connection.introspection.table_names()
|
||||||
@ -1868,9 +1868,6 @@ class SchemaTests(TransactionTestCase):
|
|||||||
table_name=AuthorWithIndexedName._meta.db_table,
|
table_name=AuthorWithIndexedName._meta.db_table,
|
||||||
column_names=('name',),
|
column_names=('name',),
|
||||||
)
|
)
|
||||||
if connection.features.uppercases_column_names:
|
|
||||||
author_index_name = author_index_name.upper()
|
|
||||||
db_index_name = db_index_name.upper()
|
|
||||||
try:
|
try:
|
||||||
AuthorWithIndexedName._meta.indexes = [index]
|
AuthorWithIndexedName._meta.indexes = [index]
|
||||||
with connection.schema_editor() as editor:
|
with connection.schema_editor() as editor:
|
||||||
@ -1908,8 +1905,6 @@ class SchemaTests(TransactionTestCase):
|
|||||||
with connection.schema_editor() as editor:
|
with connection.schema_editor() as editor:
|
||||||
editor.add_index(Author, index)
|
editor.add_index(Author, index)
|
||||||
if connection.features.supports_index_column_ordering:
|
if connection.features.supports_index_column_ordering:
|
||||||
if connection.features.uppercases_column_names:
|
|
||||||
index_name = index_name.upper()
|
|
||||||
self.assertIndexOrder(Author._meta.db_table, index_name, ['ASC', 'DESC'])
|
self.assertIndexOrder(Author._meta.db_table, index_name, ['ASC', 'DESC'])
|
||||||
# Drop the index
|
# Drop the index
|
||||||
with connection.schema_editor() as editor:
|
with connection.schema_editor() as editor:
|
||||||
@ -2122,12 +2117,14 @@ class SchemaTests(TransactionTestCase):
|
|||||||
field = get_field()
|
field = get_field()
|
||||||
table = model._meta.db_table
|
table = model._meta.db_table
|
||||||
column = field.column
|
column = field.column
|
||||||
|
identifier_converter = connection.introspection.identifier_converter
|
||||||
|
|
||||||
with connection.schema_editor() as editor:
|
with connection.schema_editor() as editor:
|
||||||
editor.create_model(model)
|
editor.create_model(model)
|
||||||
editor.add_field(model, field)
|
editor.add_field(model, field)
|
||||||
|
|
||||||
constraint_name = "CamelCaseIndex"
|
constraint_name = 'CamelCaseIndex'
|
||||||
|
expected_constraint_name = identifier_converter(constraint_name)
|
||||||
editor.execute(
|
editor.execute(
|
||||||
editor.sql_create_index % {
|
editor.sql_create_index % {
|
||||||
"table": editor.quote_name(table),
|
"table": editor.quote_name(table),
|
||||||
@ -2138,22 +2135,20 @@ class SchemaTests(TransactionTestCase):
|
|||||||
"condition": "",
|
"condition": "",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
if connection.features.uppercases_column_names:
|
self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
|
||||||
constraint_name = constraint_name.upper()
|
|
||||||
self.assertIn(constraint_name, self.get_constraints(model._meta.db_table))
|
|
||||||
editor.alter_field(model, get_field(db_index=True), field, strict=True)
|
editor.alter_field(model, get_field(db_index=True), field, strict=True)
|
||||||
self.assertNotIn(constraint_name, self.get_constraints(model._meta.db_table))
|
self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
|
||||||
|
|
||||||
constraint_name = "CamelCaseUniqConstraint"
|
constraint_name = 'CamelCaseUniqConstraint'
|
||||||
|
expected_constraint_name = identifier_converter(constraint_name)
|
||||||
editor.execute(editor._create_unique_sql(model, [field.column], constraint_name))
|
editor.execute(editor._create_unique_sql(model, [field.column], constraint_name))
|
||||||
if connection.features.uppercases_column_names:
|
self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
|
||||||
constraint_name = constraint_name.upper()
|
|
||||||
self.assertIn(constraint_name, self.get_constraints(model._meta.db_table))
|
|
||||||
editor.alter_field(model, get_field(unique=True), field, strict=True)
|
editor.alter_field(model, get_field(unique=True), field, strict=True)
|
||||||
self.assertNotIn(constraint_name, self.get_constraints(model._meta.db_table))
|
self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
|
||||||
|
|
||||||
if editor.sql_foreign_key_constraint:
|
if editor.sql_foreign_key_constraint:
|
||||||
constraint_name = "CamelCaseFKConstraint"
|
constraint_name = 'CamelCaseFKConstraint'
|
||||||
|
expected_constraint_name = identifier_converter(constraint_name)
|
||||||
fk_sql = editor.sql_foreign_key_constraint % {
|
fk_sql = editor.sql_foreign_key_constraint % {
|
||||||
"column": editor.quote_name(column),
|
"column": editor.quote_name(column),
|
||||||
"to_table": editor.quote_name(table),
|
"to_table": editor.quote_name(table),
|
||||||
@ -2170,11 +2165,9 @@ class SchemaTests(TransactionTestCase):
|
|||||||
"constraint": constraint_sql,
|
"constraint": constraint_sql,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
if connection.features.uppercases_column_names:
|
self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
|
||||||
constraint_name = constraint_name.upper()
|
|
||||||
self.assertIn(constraint_name, self.get_constraints(model._meta.db_table))
|
|
||||||
editor.alter_field(model, get_field(Author, CASCADE, field_class=ForeignKey), field, strict=True)
|
editor.alter_field(model, get_field(Author, CASCADE, field_class=ForeignKey), field, strict=True)
|
||||||
self.assertNotIn(constraint_name, self.get_constraints(model._meta.db_table))
|
self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
|
||||||
|
|
||||||
def test_add_field_use_effective_default(self):
|
def test_add_field_use_effective_default(self):
|
||||||
"""
|
"""
|
||||||
@ -2491,11 +2484,7 @@ class SchemaTests(TransactionTestCase):
|
|||||||
new_field.set_attributes_from_name('weight')
|
new_field.set_attributes_from_name('weight')
|
||||||
with connection.schema_editor() as editor:
|
with connection.schema_editor() as editor:
|
||||||
editor.alter_field(Author, old_field, new_field, strict=True)
|
editor.alter_field(Author, old_field, new_field, strict=True)
|
||||||
|
self.assertEqual(self.get_constraints_for_column(Author, 'weight'), ['schema_author_weight_587740f9'])
|
||||||
expected = 'schema_author_weight_587740f9'
|
|
||||||
if connection.features.uppercases_column_names:
|
|
||||||
expected = expected.upper()
|
|
||||||
self.assertEqual(self.get_constraints_for_column(Author, 'weight'), [expected])
|
|
||||||
|
|
||||||
# Remove db_index=True to drop index.
|
# Remove db_index=True to drop index.
|
||||||
with connection.schema_editor() as editor:
|
with connection.schema_editor() as editor:
|
||||||
|
@ -113,11 +113,10 @@ class SelectForUpdateTests(TransactionTestCase):
|
|||||||
))
|
))
|
||||||
features = connections['default'].features
|
features = connections['default'].features
|
||||||
if features.select_for_update_of_column:
|
if features.select_for_update_of_column:
|
||||||
expected = ['"select_for_update_person"."id"', '"select_for_update_country"."id"']
|
expected = ['select_for_update_person"."id', 'select_for_update_country"."id']
|
||||||
else:
|
else:
|
||||||
expected = ['"select_for_update_person"', '"select_for_update_country"']
|
expected = ['select_for_update_person', 'select_for_update_country']
|
||||||
if features.uppercases_column_names:
|
expected = [connection.ops.quote_name(value) for value in expected]
|
||||||
expected = [value.upper() for value in expected]
|
|
||||||
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
|
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
|
||||||
|
|
||||||
@skipUnlessDBFeature('has_select_for_update_of')
|
@skipUnlessDBFeature('has_select_for_update_of')
|
||||||
|
Loading…
Reference in New Issue
Block a user