1
0
mirror of https://github.com/django/django.git synced 2024-12-22 17:16:24 +00:00

Fixed #29949 -- Refactored db introspection identifier converters.

Removed DatabaseIntrospection.table_name_converter()/column_name_converter()
and use instead DatabaseIntrospection.identifier_converter().

Removed DatabaseFeatures.uppercases_column_names.

Thanks Tim Graham for the initial patch and review and Simon Charette
for the review.
This commit is contained in:
Mariusz Felisiak 2018-11-21 09:06:50 +01:00 committed by GitHub
parent 2e4776196d
commit d5f4ce9849
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 64 additions and 76 deletions

View File

@ -308,7 +308,7 @@ class Command(BaseCommand):
def model_installed(model):
opts = model._meta
converter = connection.introspection.table_name_converter
converter = connection.introspection.identifier_converter
return not (
(converter(opts.db_table) in tables) or
(opts.auto_created and converter(opts.auto_created._meta.db_table) in tables)

View File

@ -200,8 +200,6 @@ class BaseDatabaseFeatures:
# If NULL is implied on columns without needing to be explicitly specified
implied_column_null = False
uppercases_column_names = False
# Does the backend support "select for update" queries with limit (and offset)?
supports_select_for_update_with_limit = True

View File

@ -24,22 +24,14 @@ class BaseDatabaseIntrospection:
"""
return self.data_types_reverse[data_type]
def table_name_converter(self, name):
def identifier_converter(self, name):
"""
Apply a conversion to the name for the purposes of comparison.
Apply a conversion to the identifier for the purposes of comparison.
The default table name converter is for case sensitive comparison.
The default identifier converter is for case sensitive comparison.
"""
return name
def column_name_converter(self, name):
"""
Apply a conversion to the column name for the purposes of comparison.
Use table_name_converter() by default.
"""
return self.table_name_converter(name)
def table_names(self, cursor=None, include_views=False):
"""
Return a list of names of all tables that exist in the database.
@ -83,11 +75,11 @@ class BaseDatabaseIntrospection:
)
tables = list(tables)
if only_existing:
existing_tables = self.table_names(include_views=include_views)
existing_tables = set(self.table_names(include_views=include_views))
tables = [
t
for t in tables
if self.table_name_converter(t) in existing_tables
if self.identifier_converter(t) in existing_tables
]
return tables
@ -101,10 +93,10 @@ class BaseDatabaseIntrospection:
all_models = []
for app_config in apps.get_app_configs():
all_models.extend(router.get_migratable_models(app_config, self.connection.alias))
tables = list(map(self.table_name_converter, tables))
tables = set(map(self.identifier_converter, tables))
return {
m for m in all_models
if self.table_name_converter(m._meta.db_table) in tables
if self.identifier_converter(m._meta.db_table) in tables
}
def sequence_list(self):

View File

@ -1048,7 +1048,7 @@ class BaseDatabaseSchemaEditor:
"""Return all constraint names matching the columns and conditions."""
if column_names is not None:
column_names = [
self.connection.introspection.column_name_converter(name)
self.connection.introspection.identifier_converter(name)
for name in column_names
]
with self.connection.cursor() as cursor:

View File

@ -28,7 +28,6 @@ class DatabaseFeatures(BaseDatabaseFeatures):
requires_literal_defaults = True
closed_cursor_error_class = InterfaceError
bare_select_suffix = " FROM DUAL"
uppercases_column_names = True
# select for update with limit can be achieved on Oracle, but not with the current backend.
supports_select_for_update_with_limit = False
supports_temporal_subtraction = True

View File

@ -50,7 +50,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
"""Return a list of table and view names in the current database."""
cursor.execute("SELECT TABLE_NAME, 't' FROM USER_TABLES UNION ALL "
"SELECT VIEW_NAME, 'v' FROM USER_VIEWS")
return [TableInfo(row[0].lower(), row[1]) for row in cursor.fetchall()]
return [TableInfo(self.identifier_converter(row[0]), row[1]) for row in cursor.fetchall()]
def get_table_description(self, cursor, table_name):
"""
@ -86,13 +86,13 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
internal_size, default, is_autofield = field_map[name]
name = name % {} # cx_Oracle, for some reason, doubles percent signs.
description.append(FieldInfo(
name.lower(), *desc[1:3], internal_size, desc[4] or 0,
self.identifier_converter(name), *desc[1:3], internal_size, desc[4] or 0,
desc[5] or 0, *desc[6:], default, is_autofield,
))
return description
def table_name_converter(self, name):
"""Table name comparison is case insensitive under Oracle."""
def identifier_converter(self, name):
"""Identifier comparison is case insensitive under Oracle."""
return name.lower()
def get_sequences(self, cursor, table_name, table_fields=()):
@ -114,7 +114,11 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
# Oracle allows only one identity column per table.
row = cursor.fetchone()
if row:
return [{'name': row[0].lower(), 'table': table_name, 'column': row[1].lower()}]
return [{
'name': self.identifier_converter(row[0]),
'table': self.identifier_converter(table_name),
'column': self.identifier_converter(row[1]),
}]
# To keep backward compatibility for AutoFields that aren't Oracle
# identity columns.
for f in table_fields:
@ -136,10 +140,12 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
user_constraints.r_constraint_name = cb.constraint_name AND
ca.position = cb.position""", [table_name])
relations = {}
for row in cursor.fetchall():
relations[row[0].lower()] = (row[2].lower(), row[1].lower())
return relations
return {
self.identifier_converter(field_name): (
self.identifier_converter(rel_field_name),
self.identifier_converter(rel_table_name),
) for field_name, rel_table_name, rel_field_name in cursor.fetchall()
}
def get_key_columns(self, cursor, table_name):
cursor.execute("""
@ -150,8 +156,10 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
JOIN user_cons_columns rcol
ON rcol.constraint_name = c.r_constraint_name
WHERE c.table_name = %s AND c.constraint_type = 'R'""", [table_name.upper()])
return [tuple(cell.lower() for cell in row)
for row in cursor.fetchall()]
return [
tuple(self.identifier_converter(cell) for cell in row)
for row in cursor.fetchall()
]
def get_constraints(self, cursor, table_name):
"""
@ -186,6 +194,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
GROUP BY user_constraints.constraint_name, user_constraints.constraint_type
""", [table_name])
for constraint, columns, pk, unique, check in cursor.fetchall():
constraint = self.identifier_converter(constraint)
constraints[constraint] = {
'columns': columns.split(','),
'primary_key': pk,
@ -213,6 +222,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
GROUP BY cons.constraint_name, rcols.table_name, rcols.column_name
""", [table_name])
for constraint, columns, other_table, other_column in cursor.fetchall():
constraint = self.identifier_converter(constraint)
constraints[constraint] = {
'primary_key': False,
'unique': False,
@ -240,6 +250,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
GROUP BY ind.index_name, ind.index_type
""", [table_name])
for constraint, type_, columns, orders in cursor.fetchall():
constraint = self.identifier_converter(constraint)
constraints[constraint] = {
'primary_key': False,
'unique': False,

View File

@ -1347,7 +1347,7 @@ class RawQuerySet:
def resolve_model_init_order(self):
"""Resolve the init field names and value positions."""
converter = connections[self.db].introspection.column_name_converter
converter = connections[self.db].introspection.identifier_converter
model_init_fields = [f for f in self.model._meta.fields if converter(f.column) in self.columns]
annotation_fields = [(column, pos) for pos, column in enumerate(self.columns)
if column not in self.model_fields]
@ -1469,7 +1469,7 @@ class RawQuerySet:
@cached_property
def model_fields(self):
"""A dict mapping column names to model field names."""
converter = connections[self.db].introspection.table_name_converter
converter = connections[self.db].introspection.identifier_converter
model_fields = {}
for field in self.model._meta.fields:
name, column = field.get_attname_column()

View File

@ -92,7 +92,7 @@ class RawQuery:
def get_columns(self):
if self.cursor is None:
self._execute_query()
converter = connections[self.using].introspection.column_name_converter
converter = connections[self.using].introspection.identifier_converter
return [converter(column_meta[0])
for column_meta in self.cursor.description]

View File

@ -312,6 +312,13 @@ backends.
* ``sql_create_fk`` is replaced with ``sql_foreign_key_constraint``,
``sql_constraint``, and ``sql_create_constraint``.
* ``DatabaseIntrospection.table_name_converter()`` and
``column_name_converter()`` are removed. Third party database backends may
need to instead implement ``DatabaseIntrospection.identifier_converter()``.
In that case, the constraint names that
``DatabaseIntrospection.get_constraints()`` returns must be normalized by
``identifier_converter()``.
Admin actions are no longer collected from base ``ModelAdmin`` classes
----------------------------------------------------------------------

View File

@ -80,7 +80,7 @@ class ParameterHandlingTest(TestCase):
"An executemany call with too many/not enough parameters will raise an exception (Refs #12612)"
with connection.cursor() as cursor:
query = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (
connection.introspection.table_name_converter('backends_square'),
connection.introspection.identifier_converter('backends_square'),
connection.ops.quote_name('root'),
connection.ops.quote_name('square')
))
@ -217,7 +217,7 @@ class BackendTestCase(TransactionTestCase):
def create_squares(self, args, paramstyle, multiple):
opts = Square._meta
tbl = connection.introspection.table_name_converter(opts.db_table)
tbl = connection.introspection.identifier_converter(opts.db_table)
f1 = connection.ops.quote_name(opts.get_field('root').column)
f2 = connection.ops.quote_name(opts.get_field('square').column)
if paramstyle == 'format':
@ -303,7 +303,7 @@ class BackendTestCase(TransactionTestCase):
'SELECT %s, %s FROM %s ORDER BY %s' % (
qn(f3.column),
qn(f4.column),
connection.introspection.table_name_converter(opts2.db_table),
connection.introspection.identifier_converter(opts2.db_table),
qn(f3.column),
)
)

View File

@ -48,8 +48,6 @@ class CheckConstraintTests(TestCase):
def test_name(self):
constraints = get_constraints(Product._meta.db_table)
expected_name = 'price_gt_discounted_price'
if connection.features.uppercases_column_names:
expected_name = expected_name.upper()
self.assertIn(expected_name, constraints)
@ -87,6 +85,4 @@ class UniqueConstraintTests(TestCase):
def test_name(self):
constraints = get_constraints(Product._meta.db_table)
expected_name = 'unique_name'
if connection.features.uppercases_column_names:
expected_name = expected_name.upper()
self.assertIn(expected_name, constraints)

View File

@ -63,12 +63,9 @@ class OperationTestCase(TransactionTestCase):
self.current_state = self.apply_operations('gis', ProjectState(), operations)
def assertGeometryColumnsCount(self, expected_count):
table_name = 'gis_neighborhood'
if connection.features.uppercases_column_names:
table_name = table_name.upper()
self.assertEqual(
GeometryColumns.objects.filter(**{
GeometryColumns.table_name_col(): table_name,
'%s__iexact' % GeometryColumns.table_name_col(): 'gis_neighborhood',
}).count(),
expected_count
)

View File

@ -184,7 +184,7 @@ class InspectDBTestCase(TestCase):
out = StringIO()
call_command('inspectdb', table_name_filter=special_table_only, stdout=out)
output = out.getvalue()
base_name = 'field' if connection.features.uppercases_column_names else 'Field'
base_name = connection.introspection.identifier_converter('Field')
self.assertIn("field = models.IntegerField()", output)
self.assertIn("field_field = models.IntegerField(db_column='%s_')" % base_name, output)
self.assertIn("field_field_0 = models.IntegerField(db_column='%s__')" % base_name, output)

View File

@ -85,7 +85,7 @@ class SchemaTests(TransactionTestCase):
def delete_tables(self):
"Deletes all model tables for our models for a clean test environment"
converter = connection.introspection.table_name_converter
converter = connection.introspection.identifier_converter
with connection.schema_editor() as editor:
connection.disable_constraint_checking()
table_names = connection.introspection.table_names()
@ -1868,9 +1868,6 @@ class SchemaTests(TransactionTestCase):
table_name=AuthorWithIndexedName._meta.db_table,
column_names=('name',),
)
if connection.features.uppercases_column_names:
author_index_name = author_index_name.upper()
db_index_name = db_index_name.upper()
try:
AuthorWithIndexedName._meta.indexes = [index]
with connection.schema_editor() as editor:
@ -1908,8 +1905,6 @@ class SchemaTests(TransactionTestCase):
with connection.schema_editor() as editor:
editor.add_index(Author, index)
if connection.features.supports_index_column_ordering:
if connection.features.uppercases_column_names:
index_name = index_name.upper()
self.assertIndexOrder(Author._meta.db_table, index_name, ['ASC', 'DESC'])
# Drop the index
with connection.schema_editor() as editor:
@ -2122,12 +2117,14 @@ class SchemaTests(TransactionTestCase):
field = get_field()
table = model._meta.db_table
column = field.column
identifier_converter = connection.introspection.identifier_converter
with connection.schema_editor() as editor:
editor.create_model(model)
editor.add_field(model, field)
constraint_name = "CamelCaseIndex"
constraint_name = 'CamelCaseIndex'
expected_constraint_name = identifier_converter(constraint_name)
editor.execute(
editor.sql_create_index % {
"table": editor.quote_name(table),
@ -2138,22 +2135,20 @@ class SchemaTests(TransactionTestCase):
"condition": "",
}
)
if connection.features.uppercases_column_names:
constraint_name = constraint_name.upper()
self.assertIn(constraint_name, self.get_constraints(model._meta.db_table))
self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
editor.alter_field(model, get_field(db_index=True), field, strict=True)
self.assertNotIn(constraint_name, self.get_constraints(model._meta.db_table))
self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
constraint_name = "CamelCaseUniqConstraint"
constraint_name = 'CamelCaseUniqConstraint'
expected_constraint_name = identifier_converter(constraint_name)
editor.execute(editor._create_unique_sql(model, [field.column], constraint_name))
if connection.features.uppercases_column_names:
constraint_name = constraint_name.upper()
self.assertIn(constraint_name, self.get_constraints(model._meta.db_table))
self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
editor.alter_field(model, get_field(unique=True), field, strict=True)
self.assertNotIn(constraint_name, self.get_constraints(model._meta.db_table))
self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
if editor.sql_foreign_key_constraint:
constraint_name = "CamelCaseFKConstraint"
constraint_name = 'CamelCaseFKConstraint'
expected_constraint_name = identifier_converter(constraint_name)
fk_sql = editor.sql_foreign_key_constraint % {
"column": editor.quote_name(column),
"to_table": editor.quote_name(table),
@ -2170,11 +2165,9 @@ class SchemaTests(TransactionTestCase):
"constraint": constraint_sql,
}
)
if connection.features.uppercases_column_names:
constraint_name = constraint_name.upper()
self.assertIn(constraint_name, self.get_constraints(model._meta.db_table))
self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
editor.alter_field(model, get_field(Author, CASCADE, field_class=ForeignKey), field, strict=True)
self.assertNotIn(constraint_name, self.get_constraints(model._meta.db_table))
self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
def test_add_field_use_effective_default(self):
"""
@ -2491,11 +2484,7 @@ class SchemaTests(TransactionTestCase):
new_field.set_attributes_from_name('weight')
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
expected = 'schema_author_weight_587740f9'
if connection.features.uppercases_column_names:
expected = expected.upper()
self.assertEqual(self.get_constraints_for_column(Author, 'weight'), [expected])
self.assertEqual(self.get_constraints_for_column(Author, 'weight'), ['schema_author_weight_587740f9'])
# Remove db_index=True to drop index.
with connection.schema_editor() as editor:

View File

@ -113,11 +113,10 @@ class SelectForUpdateTests(TransactionTestCase):
))
features = connections['default'].features
if features.select_for_update_of_column:
expected = ['"select_for_update_person"."id"', '"select_for_update_country"."id"']
expected = ['select_for_update_person"."id', 'select_for_update_country"."id']
else:
expected = ['"select_for_update_person"', '"select_for_update_country"']
if features.uppercases_column_names:
expected = [value.upper() for value in expected]
expected = ['select_for_update_person', 'select_for_update_country']
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature('has_select_for_update_of')