From 8ba5bf31986fa746ecc81683c64999dcea4f8e0a Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 18 Jun 2012 17:32:03 +0100 Subject: [PATCH 001/161] Very start of schema alteration port. Create/delete model and some tests. --- django/db/backends/__init__.py | 8 + .../db/backends/postgresql_psycopg2/base.py | 6 + .../db/backends/postgresql_psycopg2/schema.py | 5 + django/db/backends/schema.py | 178 ++++++++++++++++++ tests/modeltests/schema/__init__.py | 0 tests/modeltests/schema/models.py | 21 +++ tests/modeltests/schema/tests.py | 102 ++++++++++ 7 files changed, 320 insertions(+) create mode 100644 django/db/backends/postgresql_psycopg2/schema.py create mode 100644 django/db/backends/schema.py create mode 100644 tests/modeltests/schema/__init__.py create mode 100644 tests/modeltests/schema/models.py create mode 100644 tests/modeltests/schema/tests.py diff --git a/django/db/backends/__init__.py b/django/db/backends/__init__.py index d70fe54bdb..ed2a54277f 100644 --- a/django/db/backends/__init__.py +++ b/django/db/backends/__init__.py @@ -312,6 +312,11 @@ class BaseDatabaseWrapper(object): def make_debug_cursor(self, cursor): return util.CursorDebugWrapper(cursor, self) + def schema_editor(self): + "Returns a new instance of this backend's SchemaEditor" + raise NotImplementedError() + + class BaseDatabaseFeatures(object): allows_group_by_pk = False # True if django.db.backend.utils.typecast_timestamp is used on values @@ -411,6 +416,9 @@ class BaseDatabaseFeatures(object): # Support for the DISTINCT ON clause can_distinct_on_fields = False + # Can we roll back DDL in a transaction? + can_rollback_ddl = False + def __init__(self, connection): self.connection = connection diff --git a/django/db/backends/postgresql_psycopg2/base.py b/django/db/backends/postgresql_psycopg2/base.py index 61be680d83..6c56bb9c91 100644 --- a/django/db/backends/postgresql_psycopg2/base.py +++ b/django/db/backends/postgresql_psycopg2/base.py @@ -13,6 +13,7 @@ from django.db.backends.postgresql_psycopg2.client import DatabaseClient from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation from django.db.backends.postgresql_psycopg2.version import get_version from django.db.backends.postgresql_psycopg2.introspection import DatabaseIntrospection +from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor from django.utils.log import getLogger from django.utils.safestring import SafeUnicode, SafeString from django.utils.timezone import utc @@ -83,6 +84,7 @@ class DatabaseFeatures(BaseDatabaseFeatures): has_bulk_insert = True supports_tablespaces = True can_distinct_on_fields = True + can_rollback_ddl = True class DatabaseWrapper(BaseDatabaseWrapper): vendor = 'postgresql' @@ -235,3 +237,7 @@ class DatabaseWrapper(BaseDatabaseWrapper): return self.connection.commit() except Database.IntegrityError as e: raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2] + + def schema_editor(self): + "Returns a new instance of this backend's SchemaEditor" + return DatabaseSchemaEditor(self) diff --git a/django/db/backends/postgresql_psycopg2/schema.py b/django/db/backends/postgresql_psycopg2/schema.py new file mode 100644 index 0000000000..b86e0857bb --- /dev/null +++ b/django/db/backends/postgresql_psycopg2/schema.py @@ -0,0 +1,5 @@ +from django.db.backends.schema import BaseDatabaseSchemaEditor + + +class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): + pass diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py new file mode 100644 index 0000000000..73a9b99b50 --- /dev/null +++ b/django/db/backends/schema.py @@ -0,0 +1,178 @@ +import sys +import time + +from django.conf import settings +from django.db import transaction +from django.db.utils import load_backend +from django.utils.log import getLogger + +logger = getLogger('django.db.backends.schema') + + +class BaseDatabaseSchemaEditor(object): + """ + This class (and its subclasses) are responsible for emitting schema-changing + statements to the databases - model creation/removal/alteration, field + renaming, index fiddling, and so on. + + It is intended to eventually completely replace DatabaseCreation. + + This class should be used by creating an instance for each set of schema + changes (e.g. a syncdb run, a migration file), and by first calling start(), + then the relevant actions, and then commit(). This is necessary to allow + things like circular foreign key references - FKs will only be created once + commit() is called. + """ + + # Overrideable SQL templates + sql_create_table = "CREATE TABLE %(table)s (%(definition)s)" + sql_rename_table = "ALTER TABLE %(old_table)s RENAME TO %(new_table)s" + sql_delete_table = "DROP TABLE %(table)s CASCADE" + + sql_create_column = "ALTER TABLE %(table)s ADD COLUMN %(definition)s" + sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s" + sql_alter_column_null = "ALTER COLUMN %(column)s DROP NOT NULL" + sql_alter_column_not_null = "ALTER COLUMN %(column)s SET NOT NULL" + sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE;" + + sql_create_check = "ADD CONSTRAINT %(name)s CHECK (%(check)s)" + sql_delete_check = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" + + sql_create_unique = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s UNIQUE (%(columns)s)" + sql_delete_unique = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" + + sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s) DEFERRABLE INITIALLY DEFERRED" + sql_delete_fk = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" + + sql_create_index = "CREATE %(unique)s INDEX %(name)s ON %(table)s (%(columns)s)%s;" + sql_delete_index = "DROP INDEX %(name)s" + + sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)" + sql_delete_pk = "ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s" + + def __init__(self, connection): + self.connection = connection + + # State-managing methods + + def start(self): + "Marks the start of a schema-altering run" + self.deferred_sql = [] + self.connection.commit_unless_managed() + self.connection.enter_transaction_management() + self.connection.managed(True) + + def commit(self): + "Finishes a schema-altering run" + for sql in self.deferred_sql: + self.execute(sql) + self.connection.commit() + self.connection.leave_transaction_management() + + def rollback(self): + "Tries to roll back a schema-altering run. Call instead of commit()" + if not self.connection.features.can_rollback_ddl: + raise RuntimeError("Cannot rollback schema changes on this backend") + self.connection.rollback() + self.connection.leave_transaction_management() + + # Core utility functions + + def execute(self, sql, params=[], fetch_results=False): + """ + Executes the given SQL statement, with optional parameters. + """ + # Get the cursor + cursor = self.connection.cursor() + # Log the command we're running, then run it + logger.info("%s; (params %r)" % (sql, params)) + cursor.execute(sql, params) + + def quote_name(self, name): + return self.connection.ops.quote_name(name) + + # Actions + + def create_model(self, model): + """ + Takes a model and creates a table for it in the database. + Will also create any accompanying indexes or unique constraints. + """ + # Do nothing if this is an unmanaged or proxy model + if not model._meta.managed or model._meta.proxy: + return [], {} + # Create column SQL, add FK deferreds if needed + column_sqls = [] + for field in model._meta.local_fields: + # SQL + definition = self.column_sql(model, field) + if definition is None: + continue + column_sqls.append("%s %s" % ( + self.quote_name(field.column), + definition, + )) + # FK + if field.rel: + to_table = field.rel.to._meta.db_table + to_column = field.rel.to._meta.get_field(field.rel.field_name).column + self.deferred_sql.append( + self.sql_create_fk % { + "name": '%s_refs_%s_%x' % ( + field.column, + to_column, + abs(hash((model._meta.db_table, to_table))) + ), + "table": self.quote_name(model._meta.db_table), + "column": self.quote_name(field.column), + "to_table": self.quote_name(to_table), + "to_column": self.quote_name(to_column), + } + ) + # Make the table + sql = self.sql_create_table % { + "table": model._meta.db_table, + "definition": ", ".join(column_sqls) + } + self.execute(sql) + + def column_sql(self, model, field, include_default=False): + """ + Takes a field and returns its column definition. + The field must already have had set_attributes_from_name called. + """ + # Get the column's type and use that as the basis of the SQL + sql = field.db_type(connection=self.connection) + # Check for fields that aren't actually columns (e.g. M2M) + if sql is None: + return None + # Optionally add the tablespace if it's an implicitly indexed column + tablespace = field.db_tablespace or model._meta.db_tablespace + if tablespace and self.connection.features.supports_tablespaces and field.unique: + sql += " %s" % self.connection.ops.tablespace_sql(tablespace, inline=True) + # Work out nullability + null = field.null + # Oracle treats the empty string ('') as null, so coerce the null + # option whenever '' is a possible value. + if (field.empty_strings_allowed and not field.primary_key and + self.connection.features.interprets_empty_strings_as_nulls): + null = True + if null: + sql += " NULL" + else: + sql += " NOT NULL" + # Primary key/unique outputs + if field.primary_key: + sql += " PRIMARY KEY" + elif field.unique: + sql += " UNIQUE" + # If we were told to include a default value, do so + if include_default: + raise NotImplementedError() + # Return the sql + return sql + + def delete_model(self, model): + self.execute(self.sql_delete_table % { + "table": self.quote_name(model._meta.db_table), + }) diff --git a/tests/modeltests/schema/__init__.py b/tests/modeltests/schema/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/modeltests/schema/models.py b/tests/modeltests/schema/models.py new file mode 100644 index 0000000000..2c5dc829c6 --- /dev/null +++ b/tests/modeltests/schema/models.py @@ -0,0 +1,21 @@ +from django.db import models + +# Because we want to test creation and deletion of these as separate things, +# these models are all marked as unmanaged and only marked as managed while +# a schema test is running. + + +class Author(models.Model): + name = models.CharField(max_length=255) + + class Meta: + managed = False + + +class Book(models.Model): + author = models.ForeignKey(Author) + title = models.CharField(max_length=100) + pub_date = models.DateTimeField() + + class Meta: + managed = False diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py new file mode 100644 index 0000000000..6d5d27cdf1 --- /dev/null +++ b/tests/modeltests/schema/tests.py @@ -0,0 +1,102 @@ +from __future__ import absolute_import +import copy +import datetime +from django.test import TestCase +from django.db.models.loading import cache +from django.db import connection, DatabaseError, IntegrityError +from .models import Author, Book + + +class SchemaTests(TestCase): + """ + Tests that the schema-alteration code works correctly. + + Be aware that these tests are more liable than most to false results, + as sometimes the code to check if a test has worked is almost as complex + as the code it is testing. + """ + + models = [Author, Book] + + def setUp(self): + # Make sure we're in manual transaction mode + connection.commit_unless_managed() + connection.enter_transaction_management() + connection.managed(True) + # The unmanaged models need to be removed after the test in order to + # prevent bad interactions with the flush operation in other tests. + self.old_app_models = copy.deepcopy(cache.app_models) + self.old_app_store = copy.deepcopy(cache.app_store) + for model in self.models: + model._meta.managed = True + + def tearDown(self): + # Rollback anything that may have happened + connection.rollback() + # Delete any tables made for our models + cursor = connection.cursor() + for model in self.models: + try: + cursor.execute("DROP TABLE %s CASCADE" % ( + connection.ops.quote_name(model._meta.db_table), + )) + except DatabaseError: + connection.rollback() + else: + connection.commit() + # Unhook our models + for model in self.models: + model._meta.managed = False + cache.app_models = self.old_app_models + cache.app_store = self.old_app_store + cache._get_models_cache = {} + + def test_creation_deletion(self): + """ + Tries creating a model's table, and then deleting it. + """ + # Create the table + editor = connection.schema_editor() + editor.start() + editor.create_model(Author) + editor.commit() + # Check that it's there + try: + list(Author.objects.all()) + except DatabaseError, e: + self.fail("Table not created: %s" % e) + # Clean up that table + editor.start() + editor.delete_model(Author) + editor.commit() + # Check that it's gone + self.assertRaises( + DatabaseError, + lambda: list(Author.objects.all()), + ) + + def test_creation_fk(self): + "Tests that creating tables out of FK order works" + # Create the table + editor = connection.schema_editor() + editor.start() + editor.create_model(Book) + editor.create_model(Author) + editor.commit() + # Check that both tables are there + try: + list(Author.objects.all()) + except DatabaseError, e: + self.fail("Author table not created: %s" % e) + try: + list(Book.objects.all()) + except DatabaseError, e: + self.fail("Book table not created: %s" % e) + # Make sure the FK constraint is present + with self.assertRaises(IntegrityError): + Book.objects.create( + author_id = 1, + title = "Much Ado About Foreign Keys", + pub_date = datetime.datetime.now(), + ) + connection.commit() From 959a3f9791d780062c4efe8765404a8ef95e87f0 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 19 Jun 2012 13:25:22 +0100 Subject: [PATCH 002/161] Add some field schema alteration methods and tests. --- django/db/backends/__init__.py | 3 + .../db/backends/postgresql_psycopg2/base.py | 1 + django/db/backends/schema.py | 280 +++++++++++++++--- tests/modeltests/schema/tests.py | 74 ++++- 4 files changed, 309 insertions(+), 49 deletions(-) diff --git a/django/db/backends/__init__.py b/django/db/backends/__init__.py index ed2a54277f..0c1905c6b8 100644 --- a/django/db/backends/__init__.py +++ b/django/db/backends/__init__.py @@ -419,6 +419,9 @@ class BaseDatabaseFeatures(object): # Can we roll back DDL in a transaction? can_rollback_ddl = False + # Can we issue more than one ALTER COLUMN clause in an ALTER TABLE? + supports_combined_alters = False + def __init__(self, connection): self.connection = connection diff --git a/django/db/backends/postgresql_psycopg2/base.py b/django/db/backends/postgresql_psycopg2/base.py index 6c56bb9c91..ebb4109f79 100644 --- a/django/db/backends/postgresql_psycopg2/base.py +++ b/django/db/backends/postgresql_psycopg2/base.py @@ -85,6 +85,7 @@ class DatabaseFeatures(BaseDatabaseFeatures): supports_tablespaces = True can_distinct_on_fields = True can_rollback_ddl = True + supports_combined_alters = True class DatabaseWrapper(BaseDatabaseWrapper): vendor = 'postgresql' diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 73a9b99b50..bf838d2094 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -5,6 +5,7 @@ from django.conf import settings from django.db import transaction from django.db.utils import load_backend from django.utils.log import getLogger +from django.db.models.fields.related import ManyToManyField logger = getLogger('django.db.backends.schema') @@ -29,11 +30,15 @@ class BaseDatabaseSchemaEditor(object): sql_rename_table = "ALTER TABLE %(old_table)s RENAME TO %(new_table)s" sql_delete_table = "DROP TABLE %(table)s CASCADE" - sql_create_column = "ALTER TABLE %(table)s ADD COLUMN %(definition)s" + sql_create_column = "ALTER TABLE %(table)s ADD COLUMN %(column)s %(definition)s" + sql_alter_column = "ALTER TABLE %(table)s %(changes)s" sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s" sql_alter_column_null = "ALTER COLUMN %(column)s DROP NOT NULL" sql_alter_column_not_null = "ALTER COLUMN %(column)s SET NOT NULL" - sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE;" + sql_alter_column_default = "ALTER COLUMN %(column)s SET DEFAULT %(default)s" + sql_alter_column_no_default = "ALTER COLUMN %(column)s DROP DEFAULT" + sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE" + sql_rename_column = "ALTER TABLE %(table)s RENAME COLUMN %(old_column)s TO %(new_column)s" sql_create_check = "ADD CONSTRAINT %(name)s CHECK (%(check)s)" sql_delete_check = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" @@ -91,50 +96,7 @@ class BaseDatabaseSchemaEditor(object): def quote_name(self, name): return self.connection.ops.quote_name(name) - # Actions - - def create_model(self, model): - """ - Takes a model and creates a table for it in the database. - Will also create any accompanying indexes or unique constraints. - """ - # Do nothing if this is an unmanaged or proxy model - if not model._meta.managed or model._meta.proxy: - return [], {} - # Create column SQL, add FK deferreds if needed - column_sqls = [] - for field in model._meta.local_fields: - # SQL - definition = self.column_sql(model, field) - if definition is None: - continue - column_sqls.append("%s %s" % ( - self.quote_name(field.column), - definition, - )) - # FK - if field.rel: - to_table = field.rel.to._meta.db_table - to_column = field.rel.to._meta.get_field(field.rel.field_name).column - self.deferred_sql.append( - self.sql_create_fk % { - "name": '%s_refs_%s_%x' % ( - field.column, - to_column, - abs(hash((model._meta.db_table, to_table))) - ), - "table": self.quote_name(model._meta.db_table), - "column": self.quote_name(field.column), - "to_table": self.quote_name(to_table), - "to_column": self.quote_name(to_column), - } - ) - # Make the table - sql = self.sql_create_table % { - "table": model._meta.db_table, - "definition": ", ".join(column_sqls) - } - self.execute(sql) + # Field <-> database mapping functions def column_sql(self, model, field, include_default=False): """ @@ -143,6 +105,7 @@ class BaseDatabaseSchemaEditor(object): """ # Get the column's type and use that as the basis of the SQL sql = field.db_type(connection=self.connection) + params = [] # Check for fields that aren't actually columns (e.g. M2M) if sql is None: return None @@ -168,11 +131,232 @@ class BaseDatabaseSchemaEditor(object): sql += " UNIQUE" # If we were told to include a default value, do so if include_default: - raise NotImplementedError() + sql += " DEFAULT %s" + params += [self.effective_default(field)] # Return the sql - return sql + return sql, params + + def effective_default(self, field): + "Returns a field's effective database default value" + if field.has_default(): + default = field.get_default() + elif not field.null and field.blank and field.empty_strings_allowed: + default = "" + else: + default = None + # If it's a callable, call it + if callable(default): + default = default() + return default + + # Actions + + def create_model(self, model): + """ + Takes a model and creates a table for it in the database. + Will also create any accompanying indexes or unique constraints. + """ + # Do nothing if this is an unmanaged or proxy model + if not model._meta.managed or model._meta.proxy: + return + # Create column SQL, add FK deferreds if needed + column_sqls = [] + params = [] + for field in model._meta.local_fields: + # SQL + definition, extra_params = self.column_sql(model, field) + if definition is None: + continue + column_sqls.append("%s %s" % ( + self.quote_name(field.column), + definition, + )) + params.extend(extra_params) + # FK + if field.rel: + to_table = field.rel.to._meta.db_table + to_column = field.rel.to._meta.get_field(field.rel.field_name).column + self.deferred_sql.append( + self.sql_create_fk % { + "name": '%s_refs_%s_%x' % ( + field.column, + to_column, + abs(hash((model._meta.db_table, to_table))) + ), + "table": self.quote_name(model._meta.db_table), + "column": self.quote_name(field.column), + "to_table": self.quote_name(to_table), + "to_column": self.quote_name(to_column), + } + ) + # Make the table + sql = self.sql_create_table % { + "table": model._meta.db_table, + "definition": ", ".join(column_sqls) + } + self.execute(sql, params) def delete_model(self, model): + """ + Deletes a model from the database. + """ + # Do nothing if this is an unmanaged or proxy model + if not model._meta.managed or model._meta.proxy: + return + # Delete the table self.execute(self.sql_delete_table % { "table": self.quote_name(model._meta.db_table), }) + + def create_field(self, model, field, keep_default=False): + """ + Creates a field on a model. + Usually involves adding a column, but may involve adding a + table instead (for M2M fields) + """ + # Special-case implicit M2M tables + if isinstance(field, ManyToManyField) and field.rel.through._meta.auto_created: + return self.create_model(field.rel.through) + # Get the column's definition + definition, params = self.column_sql(model, field, include_default=True) + # It might not actually have a column behind it + if definition is None: + return + # Build the SQL and run it + sql = self.sql_create_column % { + "table": self.quote_name(model._meta.db_table), + "column": self.quote_name(field.column), + "definition": definition, + } + self.execute(sql, params) + # Drop the default if we need to + # (Django usually does not use in-database defaults) + if not keep_default and field.default is not None: + sql = self.sql_alter_column % { + "table": self.quote_name(model._meta.db_table), + "changes": self.sql_alter_column_no_default % { + "column": self.quote_name(field.column), + } + } + # Add any FK constraints later + if field.rel: + to_table = field.rel.to._meta.db_table + to_column = field.rel.to._meta.get_field(field.rel.field_name).column + self.deferred_sql.append( + self.sql_create_fk % { + "name": '%s_refs_%s_%x' % ( + field.column, + to_column, + abs(hash((model._meta.db_table, to_table))) + ), + "table": self.quote_name(model._meta.db_table), + "column": self.quote_name(field.column), + "to_table": self.quote_name(to_table), + "to_column": self.quote_name(to_column), + } + ) + + def delete_field(self, model, field): + """ + Removes a field from a model. Usually involves deleting a column, + but for M2Ms may involve deleting a table. + """ + # Special-case implicit M2M tables + if isinstance(field, ManyToManyField) and field.rel.through._meta.auto_created: + return self.delete_model(field.rel.through) + # Get the column's definition + definition, params = self.column_sql(model, field) + # It might not actually have a column behind it + if definition is None: + return + # Delete the column + sql = self.sql_delete_column % { + "table": self.quote_name(model._meta.db_table), + "column": self.quote_name(field.column), + } + self.execute(sql) + + def alter_field(self, model, old_field, new_field): + """ + Allows a field's type, uniqueness, nullability, default, column, + constraints etc. to be modified. + Requires a copy of the old field as well so we can only perform + changes that are required. + """ + # Ensure this field is even column-based + old_type = old_field.db_type(connection=self.connection) + new_type = new_field.db_type(connection=self.connection) + if old_type is None and new_type is None: + # TODO: Handle M2M fields being repointed + return + elif old_type is None or new_type is None: + raise ValueError("Cannot alter field %s into %s - they are not compatible types" % ( + old_field, + new_field, + )) + # First, have they renamed the column? + if old_field.column != new_field.column: + self.execute(self.sql_rename_column % { + "table": self.quote_name(model._meta.db_table), + "old_column": self.quote_name(old_field.column), + "new_column": self.quote_name(new_field.column), + }) + # Next, start accumulating actions to do + actions = [] + # Type change? + if old_type != new_type: + actions.append(( + self.sql_alter_column_type % { + "column": self.quote_name(new_field.column), + "type": new_type, + }, + [], + )) + # Default change? + old_default = self.effective_default(old_field) + new_default = self.effective_default(new_field) + if old_default != new_default: + if new_default is None: + actions.append(( + self.sql_alter_column_no_default % { + "column": self.quote_name(new_field.column), + }, + [], + )) + else: + actions.append(( + self.sql_alter_column_default % { + "column": self.quote_name(new_field.column), + "default": "%s", + }, + [new_default], + )) + # Nullability change? + if old_field.null != new_field.null: + if new_field.null: + actions.append(( + self.sql_alter_column_null % { + "column": self.quote_name(new_field.column), + }, + [], + )) + else: + actions.append(( + self.sql_alter_column_null % { + "column": self.quote_name(new_field.column), + }, + [], + )) + # Combine actions together if we can (e.g. postgres) + if self.connection.features.supports_combined_alters: + sql, params = tuple(zip(*actions)) + actions = [(", ".join(sql), params)] + # Apply those actions + for sql, params in actions: + self.execute( + self.sql_alter_column % { + "table": self.quote_name(model._meta.db_table), + "changes": sql, + }, + params, + ) diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index 6d5d27cdf1..83b2dabd45 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -2,8 +2,9 @@ from __future__ import absolute_import import copy import datetime from django.test import TestCase -from django.db.models.loading import cache from django.db import connection, DatabaseError, IntegrityError +from django.db.models.fields import IntegerField, TextField +from django.db.models.loading import cache from .models import Author, Book @@ -18,6 +19,8 @@ class SchemaTests(TestCase): models = [Author, Book] + # Utility functions + def setUp(self): # Make sure we're in manual transaction mode connection.commit_unless_managed() @@ -51,6 +54,18 @@ class SchemaTests(TestCase): cache.app_store = self.old_app_store cache._get_models_cache = {} + def column_classes(self, model): + cursor = connection.cursor() + return dict( + (d[0], (connection.introspection.get_field_type(d[1], d), d)) + for d in connection.introspection.get_table_description( + cursor, + model._meta.db_table, + ) + ) + + # Tests + def test_creation_deletion(self): """ Tries creating a model's table, and then deleting it. @@ -100,3 +115,60 @@ class SchemaTests(TestCase): pub_date = datetime.datetime.now(), ) connection.commit() + + def test_create_field(self): + """ + Tests adding fields to models + """ + # Create the table + editor = connection.schema_editor() + editor.start() + editor.create_model(Author) + editor.commit() + # Ensure there's no age field + columns = self.column_classes(Author) + self.assertNotIn("age", columns) + # Alter the name field to a TextField + new_field = IntegerField(null=True) + new_field.set_attributes_from_name("age") + editor = connection.schema_editor() + editor.start() + editor.create_field( + Author, + new_field, + ) + editor.commit() + # Ensure the field is right afterwards + columns = self.column_classes(Author) + self.assertEqual(columns['age'][0], "IntegerField") + self.assertEqual(columns['age'][1][6], True) + + def test_alter(self): + """ + Tests simple altering of fields + """ + # Create the table + editor = connection.schema_editor() + editor.start() + editor.create_model(Author) + editor.commit() + # Ensure the field is right to begin with + columns = self.column_classes(Author) + self.assertEqual(columns['name'][0], "CharField") + self.assertEqual(columns['name'][1][3], 255) + self.assertEqual(columns['name'][1][6], False) + # Alter the name field to a TextField + new_field = TextField(null=True) + new_field.set_attributes_from_name("name") + editor = connection.schema_editor() + editor.start() + editor.alter_field( + Author, + Author._meta.get_field_by_name("name")[0], + new_field, + ) + editor.commit() + # Ensure the field is right afterwards + columns = self.column_classes(Author) + self.assertEqual(columns['name'][0], "TextField") + self.assertEqual(columns['name'][1][6], True) From b139315f1c5e3eb05c76237c2824bdf03bd689b6 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 2 Aug 2012 15:08:39 +0100 Subject: [PATCH 003/161] Add M2M tests and some unique support --- django/db/backends/__init__.py | 12 ++ django/db/backends/creation.py | 3 +- .../postgresql_psycopg2/introspection.py | 32 +++++ django/db/backends/schema.py | 82 +++++++++-- tests/modeltests/schema/models.py | 13 ++ tests/modeltests/schema/tests.py | 132 +++++++++++++++++- 6 files changed, 259 insertions(+), 15 deletions(-) diff --git a/django/db/backends/__init__.py b/django/db/backends/__init__.py index 816cbcda63..b9c642d093 100644 --- a/django/db/backends/__init__.py +++ b/django/db/backends/__init__.py @@ -427,6 +427,9 @@ class BaseDatabaseFeatures(object): # Can we issue more than one ALTER COLUMN clause in an ALTER TABLE? supports_combined_alters = False + # What's the maximum length for index names? + max_index_name_length = 63 + def __init__(self, connection): self.connection = connection @@ -1056,6 +1059,15 @@ class BaseDatabaseIntrospection(object): """ raise NotImplementedError + def get_constraints(self, cursor, table_name): + """ + Returns {'cnname': {'columns': set(columns), 'primary_key': bool, 'unique': bool}} + + Both single- and multi-column constraints are introspected. + """ + raise NotImplementedError + + class BaseDatabaseClient(object): """ This class encapsulates all backend-specific methods for opening a diff --git a/django/db/backends/creation.py b/django/db/backends/creation.py index fcc6ab7584..4dffd78f44 100644 --- a/django/db/backends/creation.py +++ b/django/db/backends/creation.py @@ -21,7 +21,8 @@ class BaseDatabaseCreation(object): def __init__(self, connection): self.connection = connection - def _digest(self, *args): + @classmethod + def _digest(cls, *args): """ Generates a 32-bit digest of a set of arguments that can be used to shorten identifying names. diff --git a/django/db/backends/postgresql_psycopg2/introspection.py b/django/db/backends/postgresql_psycopg2/introspection.py index 99573b9019..c8b8ec833b 100644 --- a/django/db/backends/postgresql_psycopg2/introspection.py +++ b/django/db/backends/postgresql_psycopg2/introspection.py @@ -88,3 +88,35 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): continue indexes[row[0]] = {'primary_key': row[3], 'unique': row[2]} return indexes + + def get_constraints(self, cursor, table_name): + """ + Retrieves any constraints (unique, pk, check) across one or more columns. + Returns {'cnname': {'columns': set(columns), 'primary_key': bool, 'unique': bool}} + """ + constraints = {} + # Loop over the constraint tables, collecting things as constraints + ifsc_tables = ["constraint_column_usage", "key_column_usage"] + for ifsc_table in ifsc_tables: + cursor.execute(""" + SELECT kc.constraint_name, kc.column_name, c.constraint_type + FROM information_schema.%s AS kc + JOIN information_schema.table_constraints AS c ON + kc.table_schema = c.table_schema AND + kc.table_name = c.table_name AND + kc.constraint_name = c.constraint_name + WHERE + kc.table_schema = %%s AND + kc.table_name = %%s + """ % ifsc_table, ["public", table_name]) + for constraint, column, kind in cursor.fetchall(): + # If we're the first column, make the record + if constraint not in constraints: + constraints[constraint] = { + "columns": set(), + "primary_key": kind.lower() == "primary key", + "unique": kind.lower() in ["primary key", "unique"], + } + # Record the details + constraints[constraint]['columns'].add(column) + return constraints diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index bf838d2094..5f4e0146b4 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -4,6 +4,8 @@ import time from django.conf import settings from django.db import transaction from django.db.utils import load_backend +from django.db.backends.creation import BaseDatabaseCreation +from django.db.backends.util import truncate_name from django.utils.log import getLogger from django.db.models.fields.related import ManyToManyField @@ -294,7 +296,23 @@ class BaseDatabaseSchemaEditor(object): old_field, new_field, )) - # First, have they renamed the column? + # Has unique been removed? + if old_field.unique and not new_field.unique: + # Find the unique constraint for this field + constraint_names = self._constraint_names(model, [old_field.column], unique=True) + if len(constraint_names) != 1: + raise ValueError("Found wrong number (%s) of constraints for %s.%s" % ( + len(constraint_names), + model._meta.db_table, + old_field.column, + )) + self.execute( + self.sql_delete_unique % { + "table": self.quote_name(model._meta.db_table), + "name": constraint_names[0], + }, + ) + # Have they renamed the column? if old_field.column != new_field.column: self.execute(self.sql_rename_column % { "table": self.quote_name(model._meta.db_table), @@ -347,16 +365,58 @@ class BaseDatabaseSchemaEditor(object): }, [], )) - # Combine actions together if we can (e.g. postgres) - if self.connection.features.supports_combined_alters: - sql, params = tuple(zip(*actions)) - actions = [(", ".join(sql), params)] - # Apply those actions - for sql, params in actions: + if actions: + # Combine actions together if we can (e.g. postgres) + if self.connection.features.supports_combined_alters: + sql, params = tuple(zip(*actions)) + actions = [(", ".join(sql), params)] + # Apply those actions + for sql, params in actions: + self.execute( + self.sql_alter_column % { + "table": self.quote_name(model._meta.db_table), + "changes": sql, + }, + params, + ) + # Added a unique? + if not old_field.unique and new_field.unique: self.execute( - self.sql_alter_column % { + self.sql_create_unique % { "table": self.quote_name(model._meta.db_table), - "changes": sql, - }, - params, + "name": self._create_index_name(model, [new_field.column], suffix="_uniq"), + "columns": self.quote_name(new_field.column), + } ) + + def _create_index_name(self, model, column_names, suffix=""): + "Generates a unique name for an index/unique constraint." + # If there is just one column in the index, use a default algorithm from Django + if len(column_names) == 1 and not suffix: + return truncate_name( + '%s_%s' % (model._meta.db_table, BaseDatabaseCreation._digest(column_names[0])), + self.connection.ops.max_name_length() + ) + # Else generate the name for the index by South + table_name = model._meta.db_table.replace('"', '').replace('.', '_') + index_unique_name = '_%x' % abs(hash((table_name, ','.join(column_names)))) + # If the index name is too long, truncate it + index_name = ('%s_%s%s%s' % (table_name, column_names[0], index_unique_name, suffix)).replace('"', '').replace('.', '_') + if len(index_name) > self.connection.features.max_index_name_length: + part = ('_%s%s%s' % (column_names[0], index_unique_name, suffix)) + index_name = '%s%s' % (table_name[:(self.connection.features.max_index_name_length - len(part))], part) + return index_name + + def _constraint_names(self, model, column_names, unique=None, primary_key=None): + "Returns all constraint names matching the columns and conditions" + column_names = set(column_names) + constraints = self.connection.introspection.get_constraints(self.connection.cursor(), model._meta.db_table) + result = [] + for name, infodict in constraints.items(): + if column_names == infodict['columns']: + if unique is not None and infodict['unique'] != unique: + continue + if primary_key is not None and infodict['primary_key'] != unique: + continue + result.append(name) + return result diff --git a/tests/modeltests/schema/models.py b/tests/modeltests/schema/models.py index 2c5dc829c6..2362718bf3 100644 --- a/tests/modeltests/schema/models.py +++ b/tests/modeltests/schema/models.py @@ -12,10 +12,23 @@ class Author(models.Model): managed = False +class AuthorWithM2M(models.Model): + name = models.CharField(max_length=255) + + class Meta: + managed = False + + class Book(models.Model): author = models.ForeignKey(Author) title = models.CharField(max_length=100) pub_date = models.DateTimeField() + #tags = models.ManyToManyField("Tag", related_name="books") class Meta: managed = False + + +class Tag(models.Model): + title = models.CharField(max_length=255) + slug = models.SlugField(unique=True) diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index 83b2dabd45..8708fd7c8d 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -3,9 +3,10 @@ import copy import datetime from django.test import TestCase from django.db import connection, DatabaseError, IntegrityError -from django.db.models.fields import IntegerField, TextField +from django.db.models.fields import IntegerField, TextField, CharField, SlugField +from django.db.models.fields.related import ManyToManyField from django.db.models.loading import cache -from .models import Author, Book +from .models import Author, Book, AuthorWithM2M, Tag class SchemaTests(TestCase): @@ -17,7 +18,7 @@ class SchemaTests(TestCase): as the code it is testing. """ - models = [Author, Book] + models = [Author, Book, AuthorWithM2M, Tag] # Utility functions @@ -39,6 +40,17 @@ class SchemaTests(TestCase): # Delete any tables made for our models cursor = connection.cursor() for model in self.models: + # Remove any M2M tables first + for field in model._meta.local_many_to_many: + try: + cursor.execute("DROP TABLE %s CASCADE" % ( + connection.ops.quote_name(field.rel.through._meta.db_table), + )) + except DatabaseError: + connection.rollback() + else: + connection.commit() + # Then remove the main tables try: cursor.execute("DROP TABLE %s CASCADE" % ( connection.ops.quote_name(model._meta.db_table), @@ -172,3 +184,117 @@ class SchemaTests(TestCase): columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "TextField") self.assertEqual(columns['name'][1][6], True) + + def test_rename(self): + """ + Tests simple altering of fields + """ + # Create the table + editor = connection.schema_editor() + editor.start() + editor.create_model(Author) + editor.commit() + # Ensure the field is right to begin with + columns = self.column_classes(Author) + self.assertEqual(columns['name'][0], "CharField") + self.assertEqual(columns['name'][1][3], 255) + self.assertNotIn("display_name", columns) + # Alter the name field's name + new_field = CharField(max_length=254) + new_field.set_attributes_from_name("display_name") + editor = connection.schema_editor() + editor.start() + editor.alter_field( + Author, + Author._meta.get_field_by_name("name")[0], + new_field, + ) + editor.commit() + # Ensure the field is right afterwards + columns = self.column_classes(Author) + self.assertEqual(columns['display_name'][0], "CharField") + self.assertEqual(columns['display_name'][1][3], 254) + self.assertNotIn("name", columns) + + def test_m2m(self): + """ + Tests adding/removing M2M fields on models + """ + # Create the tables + editor = connection.schema_editor() + editor.start() + editor.create_model(AuthorWithM2M) + editor.create_model(Tag) + editor.commit() + # Create an M2M field + new_field = ManyToManyField("schema.Tag", related_name="authors") + new_field.contribute_to_class(AuthorWithM2M, "tags") + # Ensure there's no m2m table there + self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through) + connection.rollback() + # Add the field + editor = connection.schema_editor() + editor.start() + editor.create_field( + Author, + new_field, + ) + editor.commit() + # Ensure there is now an m2m table there + columns = self.column_classes(new_field.rel.through) + self.assertEqual(columns['tag_id'][0], "IntegerField") + # Remove the M2M table again + editor = connection.schema_editor() + editor.start() + editor.delete_field( + Author, + new_field, + ) + editor.commit() + # Ensure there's no m2m table there + self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through) + connection.rollback() + + def test_unique(self): + """ + Tests removing and adding unique constraints to a single column. + """ + # Create the table + editor = connection.schema_editor() + editor.start() + editor.create_model(Tag) + editor.commit() + # Ensure the field is unique to begin with + Tag.objects.create(title="foo", slug="foo") + self.assertRaises(IntegrityError, Tag.objects.create, title="bar", slug="foo") + connection.rollback() + # Alter the slug field to be non-unique + new_field = SlugField(unique=False) + new_field.set_attributes_from_name("slug") + editor = connection.schema_editor() + editor.start() + editor.alter_field( + Tag, + Tag._meta.get_field_by_name("slug")[0], + new_field, + ) + editor.commit() + # Ensure the field is no longer unique + Tag.objects.create(title="foo", slug="foo") + Tag.objects.create(title="bar", slug="foo") + connection.rollback() + # Alter the slug field to be non-unique + new_new_field = SlugField(unique=True) + new_new_field.set_attributes_from_name("slug") + editor = connection.schema_editor() + editor.start() + editor.alter_field( + Tag, + new_field, + new_new_field, + ) + editor.commit() + # Ensure the field is unique again + Tag.objects.create(title="foo", slug="foo") + self.assertRaises(IntegrityError, Tag.objects.create, title="bar", slug="foo") + connection.rollback() From c4b2a3262cc79383d6562cfc7e9af20135c8e0bf Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 10 Aug 2012 12:38:18 +0100 Subject: [PATCH 004/161] Add support for unique_together --- django/db/backends/schema.py | 53 ++++++++++++++++++++++++++++++- tests/modeltests/schema/models.py | 12 +++++++ tests/modeltests/schema/tests.py | 48 ++++++++++++++++++++++++++-- 3 files changed, 110 insertions(+), 3 deletions(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 5f4e0146b4..c5e297197b 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -29,6 +29,7 @@ class BaseDatabaseSchemaEditor(object): # Overrideable SQL templates sql_create_table = "CREATE TABLE %(table)s (%(definition)s)" + sql_create_table_unique = "UNIQUE (%(columns)s)" sql_rename_table = "ALTER TABLE %(old_table)s RENAME TO %(new_table)s" sql_delete_table = "DROP TABLE %(table)s CASCADE" @@ -51,7 +52,7 @@ class BaseDatabaseSchemaEditor(object): sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s) DEFERRABLE INITIALLY DEFERRED" sql_delete_fk = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" - sql_create_index = "CREATE %(unique)s INDEX %(name)s ON %(table)s (%(columns)s)%s;" + sql_create_index = "CREATE %(unique)s INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s;" sql_delete_index = "DROP INDEX %(name)s" sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)" @@ -174,6 +175,17 @@ class BaseDatabaseSchemaEditor(object): definition, )) params.extend(extra_params) + # Indexes + if field.db_index: + self.deferred_sql.append( + self.sql_create_index % { + "unique": "", + "name": self._create_index_name(model, [field.column], suffix=""), + "table": self.quote_name(model._meta.db_table), + "columns": self.quote_name(field.column), + "extra": "", + } + ) # FK if field.rel: to_table = field.rel.to._meta.db_table @@ -191,6 +203,12 @@ class BaseDatabaseSchemaEditor(object): "to_column": self.quote_name(to_column), } ) + # Add any unique_togethers + for fields in model._meta.unique_together: + columns = [model._meta.get_field_by_name(field)[0].column for field in fields] + column_sqls.append(self.sql_create_table_unique % { + "columns": ", ".join(self.quote_name(column) for column in columns), + }) # Make the table sql = self.sql_create_table % { "table": model._meta.db_table, @@ -210,6 +228,39 @@ class BaseDatabaseSchemaEditor(object): "table": self.quote_name(model._meta.db_table), }) + def alter_unique_together(self, model, old_unique_together, new_unique_together): + """ + Deals with a model changing its unique_together. + Note: The input unique_togethers must be doubly-nested, not the single- + nested ["foo", "bar"] format. + """ + olds = set(frozenset(fields) for fields in old_unique_together) + news = set(frozenset(fields) for fields in new_unique_together) + # Deleted uniques + for fields in olds.difference(news): + columns = [model._meta.get_field_by_name(field)[0].column for field in fields] + constraint_names = self._constraint_names(model, list(columns), unique=True) + if len(constraint_names) != 1: + raise ValueError("Found wrong number (%s) of constraints for %s(%s)" % ( + len(constraint_names), + model._meta.db_table, + ", ".join(columns), + )) + self.execute( + self.sql_delete_unique % { + "table": self.quote_name(model._meta.db_table), + "name": constraint_names[0], + }, + ) + # Created uniques + for fields in news.difference(olds): + columns = [model._meta.get_field_by_name(field)[0].column for field in fields] + self.execute(self.sql_create_unique % { + "table": self.quote_name(model._meta.db_table), + "name": self._create_index_name(model, columns, suffix="_uniq"), + "columns": ", ".join(self.quote_name(column) for column in columns), + }) + def create_field(self, model, field, keep_default=False): """ Creates a field on a model. diff --git a/tests/modeltests/schema/models.py b/tests/modeltests/schema/models.py index 2362718bf3..e9eba1fc6f 100644 --- a/tests/modeltests/schema/models.py +++ b/tests/modeltests/schema/models.py @@ -32,3 +32,15 @@ class Book(models.Model): class Tag(models.Model): title = models.CharField(max_length=255) slug = models.SlugField(unique=True) + + class Meta: + managed = False + + +class UniqueTest(models.Model): + year = models.IntegerField() + slug = models.SlugField(unique=False) + + class Meta: + managed = False + unique_together = ["year", "slug"] diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index 8708fd7c8d..52d99225f0 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -6,7 +6,7 @@ from django.db import connection, DatabaseError, IntegrityError from django.db.models.fields import IntegerField, TextField, CharField, SlugField from django.db.models.fields.related import ManyToManyField from django.db.models.loading import cache -from .models import Author, Book, AuthorWithM2M, Tag +from .models import Author, Book, AuthorWithM2M, Tag, UniqueTest class SchemaTests(TestCase): @@ -18,7 +18,7 @@ class SchemaTests(TestCase): as the code it is testing. """ - models = [Author, Book, AuthorWithM2M, Tag] + models = [Author, Book, AuthorWithM2M, Tag, UniqueTest] # Utility functions @@ -298,3 +298,47 @@ class SchemaTests(TestCase): Tag.objects.create(title="foo", slug="foo") self.assertRaises(IntegrityError, Tag.objects.create, title="bar", slug="foo") connection.rollback() + + def test_unique_together(self): + """ + Tests removing and adding unique_together constraints on a model. + """ + # Create the table + editor = connection.schema_editor() + editor.start() + editor.create_model(UniqueTest) + editor.commit() + # Ensure the fields are unique to begin with + UniqueTest.objects.create(year=2012, slug="foo") + UniqueTest.objects.create(year=2011, slug="foo") + UniqueTest.objects.create(year=2011, slug="bar") + self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo") + connection.rollback() + # Alter the model to it's non-unique-together companion + editor = connection.schema_editor() + editor.start() + editor.alter_unique_together( + UniqueTest, + UniqueTest._meta.unique_together, + [], + ) + editor.commit() + # Ensure the fields are no longer unique + UniqueTest.objects.create(year=2012, slug="foo") + UniqueTest.objects.create(year=2012, slug="foo") + connection.rollback() + # Alter it back + new_new_field = SlugField(unique=True) + new_new_field.set_attributes_from_name("slug") + editor = connection.schema_editor() + editor.start() + editor.alter_unique_together( + UniqueTest, + [], + UniqueTest._meta.unique_together, + ) + editor.commit() + # Ensure the fields are unique again + UniqueTest.objects.create(year=2012, slug="foo") + self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo") + connection.rollback() From 60873ea2ade8bed909b9f2dabb0f8a499226e10d Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 10 Aug 2012 15:03:18 +0100 Subject: [PATCH 005/161] Add db_table and db_tablespace handling --- django/db/backends/schema.py | 32 +++++++++++++++++++++----- tests/modeltests/schema/tests.py | 39 ++++++++++++++++++++++++++++++++ 2 files changed, 65 insertions(+), 6 deletions(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index c5e297197b..bae0b1d71d 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -1,9 +1,3 @@ -import sys -import time - -from django.conf import settings -from django.db import transaction -from django.db.utils import load_backend from django.db.backends.creation import BaseDatabaseCreation from django.db.backends.util import truncate_name from django.utils.log import getLogger @@ -25,12 +19,19 @@ class BaseDatabaseSchemaEditor(object): then the relevant actions, and then commit(). This is necessary to allow things like circular foreign key references - FKs will only be created once commit() is called. + + TODO: + - Repointing of FKs + - Repointing of M2Ms + - Check constraints (PosIntField) + - PK changing """ # Overrideable SQL templates sql_create_table = "CREATE TABLE %(table)s (%(definition)s)" sql_create_table_unique = "UNIQUE (%(columns)s)" sql_rename_table = "ALTER TABLE %(old_table)s RENAME TO %(new_table)s" + sql_retablespace_table = "ALTER TABLE %(table)s SET TABLESPACE %(new_tablespace)s" sql_delete_table = "DROP TABLE %(table)s CASCADE" sql_create_column = "ALTER TABLE %(table)s ADD COLUMN %(column)s %(definition)s" @@ -261,6 +262,25 @@ class BaseDatabaseSchemaEditor(object): "columns": ", ".join(self.quote_name(column) for column in columns), }) + def alter_db_table(self, model, old_db_table, new_db_table): + """ + Renames the table a model points to. + """ + self.execute(self.sql_rename_table % { + "old_table": self.quote_name(old_db_table), + "new_table": self.quote_name(new_db_table), + }) + + def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace): + """ + Moves a model's table between tablespaces + """ + self.execute(self.sql_rename_table % { + "table": self.quote_name(model._meta.db_table), + "old_tablespace": self.quote_name(old_db_tablespace), + "new_tablespace": self.quote_name(new_db_tablespace), + }) + def create_field(self, model, field, keep_default=False): """ Creates a field on a model. diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index 52d99225f0..8813d3ca23 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -342,3 +342,42 @@ class SchemaTests(TestCase): UniqueTest.objects.create(year=2012, slug="foo") self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo") connection.rollback() + + def test_db_table(self): + """ + Tests renaming of the table + """ + # Create the table + editor = connection.schema_editor() + editor.start() + editor.create_model(Author) + editor.commit() + # Ensure the table is there to begin with + columns = self.column_classes(Author) + self.assertEqual(columns['name'][0], "CharField") + # Alter the table + editor = connection.schema_editor() + editor.start() + editor.alter_db_table( + Author, + "schema_author", + "schema_otherauthor", + ) + editor.commit() + # Ensure the table is there afterwards + Author._meta.db_table = "schema_otherauthor" + columns = self.column_classes(Author) + self.assertEqual(columns['name'][0], "CharField") + # Alter the table again + editor = connection.schema_editor() + editor.start() + editor.alter_db_table( + Author, + "schema_otherauthor", + "schema_author", + ) + editor.commit() + # Ensure the table is still there + Author._meta.db_table = "schema_author" + columns = self.column_classes(Author) + self.assertEqual(columns['name'][0], "CharField") From cab044c66ca3d1f3cfc704f64364aced2b9645af Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 18 Aug 2012 12:29:31 +0100 Subject: [PATCH 006/161] First stab at MySQL support --- django/db/backends/mysql/base.py | 5 +++ django/db/backends/mysql/introspection.py | 32 +++++++++++++++++++ django/db/backends/mysql/schema.py | 24 ++++++++++++++ .../postgresql_psycopg2/introspection.py | 2 +- tests/modeltests/schema/tests.py | 3 -- 5 files changed, 62 insertions(+), 4 deletions(-) create mode 100644 django/db/backends/mysql/schema.py diff --git a/django/db/backends/mysql/base.py b/django/db/backends/mysql/base.py index 2222f89cf0..b3e09edc57 100644 --- a/django/db/backends/mysql/base.py +++ b/django/db/backends/mysql/base.py @@ -37,6 +37,7 @@ from django.db.backends.mysql.client import DatabaseClient from django.db.backends.mysql.creation import DatabaseCreation from django.db.backends.mysql.introspection import DatabaseIntrospection from django.db.backends.mysql.validation import DatabaseValidation +from django.db.backends.mysql.schema import DatabaseSchemaEditor from django.utils.functional import cached_property from django.utils.safestring import SafeString, SafeUnicode from django.utils import six @@ -488,3 +489,7 @@ class DatabaseWrapper(BaseDatabaseWrapper): % (table_name, bad_row[0], table_name, column_name, bad_row[1], referenced_table_name, referenced_column_name)) + + def schema_editor(self): + "Returns a new instance of this backend's SchemaEditor" + return DatabaseSchemaEditor(self) diff --git a/django/db/backends/mysql/introspection.py b/django/db/backends/mysql/introspection.py index 6aab0b99ab..61ab3038c4 100644 --- a/django/db/backends/mysql/introspection.py +++ b/django/db/backends/mysql/introspection.py @@ -102,3 +102,35 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): indexes[row[4]] = {'primary_key': (row[2] == 'PRIMARY'), 'unique': not bool(row[1])} return indexes + def get_constraints(self, cursor, table_name): + """ + Retrieves any constraints (unique, pk, fk, check) across one or more columns. + Returns {'cnname': {'columns': set(columns), 'primary_key': bool, 'unique': bool}} + """ + constraints = {} + # Loop over the constraint tables, collecting things as constraints + ifsc_tables = ["constraint_column_usage", "key_column_usage"] + for ifsc_table in ifsc_tables: + cursor.execute(""" + SELECT kc.constraint_name, kc.column_name, c.constraint_type + FROM information_schema.%s AS kc + JOIN information_schema.table_constraints AS c ON + kc.table_schema = c.table_schema AND + kc.table_name = c.table_name AND + kc.constraint_name = c.constraint_name + WHERE + kc.table_schema = %%s AND + kc.table_name = %%s + """ % ifsc_table, [self.connection.settings_dict['NAME'], table_name]) + for constraint, column, kind in cursor.fetchall(): + # If we're the first column, make the record + if constraint not in constraints: + constraints[constraint] = { + "columns": set(), + "primary_key": kind.lower() == "primary key", + "foreign_key": kind.lower() == "foreign key", + "unique": kind.lower() in ["primary key", "unique"], + } + # Record the details + constraints[constraint]['columns'].add(column) + return constraints diff --git a/django/db/backends/mysql/schema.py b/django/db/backends/mysql/schema.py new file mode 100644 index 0000000000..2c0ad5b8af --- /dev/null +++ b/django/db/backends/mysql/schema.py @@ -0,0 +1,24 @@ +from django.db.backends.schema import BaseDatabaseSchemaEditor + + +class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): + + sql_rename_table = "RENAME TABLE %(old_table)s TO %(new_table)s" + + sql_alter_column_null = "MODIFY %(column)s %(type)s NULL" + sql_alter_column_not_null = "MODIFY %(column)s %(type)s NULL" + + sql_delete_unique = "ALTER TABLE %(table)s DROP INDEX %(name)s" + + sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s)" + sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s" + + sql_delete_index = "DROP INDEX %(name)s ON %(table_name)s" + + sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY" + + + + + alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;' + alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;' diff --git a/django/db/backends/postgresql_psycopg2/introspection.py b/django/db/backends/postgresql_psycopg2/introspection.py index c8b8ec833b..d85bdc9d6b 100644 --- a/django/db/backends/postgresql_psycopg2/introspection.py +++ b/django/db/backends/postgresql_psycopg2/introspection.py @@ -91,7 +91,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): def get_constraints(self, cursor, table_name): """ - Retrieves any constraints (unique, pk, check) across one or more columns. + Retrieves any constraints (unique, pk, fk, check) across one or more columns. Returns {'cnname': {'columns': set(columns), 'primary_key': bool, 'unique': bool}} """ constraints = {} diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index 8813d3ca23..7b1de268a4 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -167,7 +167,6 @@ class SchemaTests(TestCase): # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") - self.assertEqual(columns['name'][1][3], 255) self.assertEqual(columns['name'][1][6], False) # Alter the name field to a TextField new_field = TextField(null=True) @@ -197,7 +196,6 @@ class SchemaTests(TestCase): # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") - self.assertEqual(columns['name'][1][3], 255) self.assertNotIn("display_name", columns) # Alter the name field's name new_field = CharField(max_length=254) @@ -213,7 +211,6 @@ class SchemaTests(TestCase): # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['display_name'][0], "CharField") - self.assertEqual(columns['display_name'][1][3], 254) self.assertNotIn("name", columns) def test_m2m(self): From f7955c703de85059f06963ae948d64c28ceeef27 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 18 Aug 2012 13:48:54 +0100 Subject: [PATCH 007/161] All tests passing on MySQL --- django/db/backends/mysql/introspection.py | 62 +++++++++++++---------- django/db/backends/mysql/schema.py | 5 +- django/db/backends/schema.py | 14 ++++- tests/modeltests/schema/tests.py | 2 + 4 files changed, 53 insertions(+), 30 deletions(-) diff --git a/django/db/backends/mysql/introspection.py b/django/db/backends/mysql/introspection.py index 61ab3038c4..48d6905092 100644 --- a/django/db/backends/mysql/introspection.py +++ b/django/db/backends/mysql/introspection.py @@ -105,32 +105,42 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): def get_constraints(self, cursor, table_name): """ Retrieves any constraints (unique, pk, fk, check) across one or more columns. - Returns {'cnname': {'columns': set(columns), 'primary_key': bool, 'unique': bool}} + Returns {'cnname': {'columns': set(columns), 'primary_key': bool, 'unique': bool, 'foreign_key': None|(tbl, col)}} """ constraints = {} - # Loop over the constraint tables, collecting things as constraints - ifsc_tables = ["constraint_column_usage", "key_column_usage"] - for ifsc_table in ifsc_tables: - cursor.execute(""" - SELECT kc.constraint_name, kc.column_name, c.constraint_type - FROM information_schema.%s AS kc - JOIN information_schema.table_constraints AS c ON - kc.table_schema = c.table_schema AND - kc.table_name = c.table_name AND - kc.constraint_name = c.constraint_name - WHERE - kc.table_schema = %%s AND - kc.table_name = %%s - """ % ifsc_table, [self.connection.settings_dict['NAME'], table_name]) - for constraint, column, kind in cursor.fetchall(): - # If we're the first column, make the record - if constraint not in constraints: - constraints[constraint] = { - "columns": set(), - "primary_key": kind.lower() == "primary key", - "foreign_key": kind.lower() == "foreign key", - "unique": kind.lower() in ["primary key", "unique"], - } - # Record the details - constraints[constraint]['columns'].add(column) + # Get the actual constraint names and columns + name_query = """ + SELECT kc.`constraint_name`, kc.`column_name`, + kc.`referenced_table_name`, kc.`referenced_column_name` + FROM information_schema.key_column_usage AS kc + WHERE + kc.table_schema = %s AND + kc.table_name = %s + """ + cursor.execute(name_query, [self.connection.settings_dict['NAME'], table_name]) + for constraint, column, ref_table, ref_column in cursor.fetchall(): + if constraint not in constraints: + constraints[constraint] = { + 'columns': set(), + 'primary_key': False, + 'unique': False, + 'foreign_key': (ref_table, ref_column) if ref_column else None, + } + constraints[constraint]['columns'].add(column) + # Now get the constraint types + type_query = """ + SELECT c.constraint_name, c.constraint_type + FROM information_schema.table_constraints AS c + WHERE + c.table_schema = %s AND + c.table_name = %s + """ + cursor.execute(type_query, [self.connection.settings_dict['NAME'], table_name]) + for constraint, kind in cursor.fetchall(): + if kind.lower() == "primary key": + constraints[constraint]['primary_key'] = True + constraints[constraint]['unique'] = True + elif kind.lower() == "unique": + constraints[constraint]['unique'] = True + # Return return constraints diff --git a/django/db/backends/mysql/schema.py b/django/db/backends/mysql/schema.py index 2c0ad5b8af..efc469d9fb 100644 --- a/django/db/backends/mysql/schema.py +++ b/django/db/backends/mysql/schema.py @@ -7,6 +7,8 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): sql_alter_column_null = "MODIFY %(column)s %(type)s NULL" sql_alter_column_not_null = "MODIFY %(column)s %(type)s NULL" + sql_alter_column_type = "MODIFY %(column)s %(type)s" + sql_rename_column = "ALTER TABLE %(table)s CHANGE %(old_column)s %(new_column)s %(type)s" sql_delete_unique = "ALTER TABLE %(table)s DROP INDEX %(name)s" @@ -17,8 +19,5 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY" - - - alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;' alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;' diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index bae0b1d71d..788be3be35 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -25,6 +25,7 @@ class BaseDatabaseSchemaEditor(object): - Repointing of M2Ms - Check constraints (PosIntField) - PK changing + - db_index on alter field """ # Overrideable SQL templates @@ -358,7 +359,7 @@ class BaseDatabaseSchemaEditor(object): """ # Ensure this field is even column-based old_type = old_field.db_type(connection=self.connection) - new_type = new_field.db_type(connection=self.connection) + new_type = self._type_for_alter(new_field) if old_type is None and new_type is None: # TODO: Handle M2M fields being repointed return @@ -389,6 +390,7 @@ class BaseDatabaseSchemaEditor(object): "table": self.quote_name(model._meta.db_table), "old_column": self.quote_name(old_field.column), "new_column": self.quote_name(new_field.column), + "type": new_type, }) # Next, start accumulating actions to do actions = [] @@ -426,6 +428,7 @@ class BaseDatabaseSchemaEditor(object): actions.append(( self.sql_alter_column_null % { "column": self.quote_name(new_field.column), + "type": new_type, }, [], )) @@ -433,6 +436,7 @@ class BaseDatabaseSchemaEditor(object): actions.append(( self.sql_alter_column_null % { "column": self.quote_name(new_field.column), + "type": new_type, }, [], )) @@ -460,6 +464,14 @@ class BaseDatabaseSchemaEditor(object): } ) + def _type_for_alter(self, field): + """ + Returns a field's type suitable for ALTER COLUMN. + By default it just returns field.db_type(). + To be overriden by backend specific subclasses + """ + return field.db_type(connection=self.connection) + def _create_index_name(self, model, column_names, suffix=""): "Generates a unique name for an index/unique constraint." # If there is just one column in the index, use a default algorithm from Django diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index 7b1de268a4..7031138280 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -39,6 +39,7 @@ class SchemaTests(TestCase): connection.rollback() # Delete any tables made for our models cursor = connection.cursor() + connection.disable_constraint_checking() for model in self.models: # Remove any M2M tables first for field in model._meta.local_many_to_many: @@ -59,6 +60,7 @@ class SchemaTests(TestCase): connection.rollback() else: connection.commit() + connection.enable_constraint_checking() # Unhook our models for model in self.models: model._meta.managed = False From 0b013951086814ebee048f0fa9a620e03f891494 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 18 Aug 2012 14:00:42 +0100 Subject: [PATCH 008/161] Test that unique constraints get ported with column rename --- tests/modeltests/schema/models.py | 9 +++++++++ tests/modeltests/schema/tests.py | 17 ++++++++++++++++- 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/tests/modeltests/schema/models.py b/tests/modeltests/schema/models.py index e9eba1fc6f..053aa026f7 100644 --- a/tests/modeltests/schema/models.py +++ b/tests/modeltests/schema/models.py @@ -37,6 +37,15 @@ class Tag(models.Model): managed = False +class TagUniqueRename(models.Model): + title = models.CharField(max_length=255) + slug2 = models.SlugField(unique=True) + + class Meta: + managed = False + db_table = "schema_tag" + + class UniqueTest(models.Model): year = models.IntegerField() slug = models.SlugField(unique=False) diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index 7031138280..18f1abad63 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -6,7 +6,7 @@ from django.db import connection, DatabaseError, IntegrityError from django.db.models.fields import IntegerField, TextField, CharField, SlugField from django.db.models.fields.related import ManyToManyField from django.db.models.loading import cache -from .models import Author, Book, AuthorWithM2M, Tag, UniqueTest +from .models import Author, Book, AuthorWithM2M, Tag, TagUniqueRename, UniqueTest class SchemaTests(TestCase): @@ -297,6 +297,21 @@ class SchemaTests(TestCase): Tag.objects.create(title="foo", slug="foo") self.assertRaises(IntegrityError, Tag.objects.create, title="bar", slug="foo") connection.rollback() + # Rename the field + new_field = SlugField(unique=False) + new_field.set_attributes_from_name("slug2") + editor = connection.schema_editor() + editor.start() + editor.alter_field( + Tag, + Tag._meta.get_field_by_name("slug")[0], + TagUniqueRename._meta.get_field_by_name("slug2")[0], + ) + editor.commit() + # Ensure the field is still unique + TagUniqueRename.objects.create(title="foo", slug2="foo") + self.assertRaises(IntegrityError, TagUniqueRename.objects.create, title="bar", slug2="foo") + connection.rollback() def test_unique_together(self): """ From d3d1e599211c31e05d095b878de517dbb6fc998c Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 18 Aug 2012 14:16:52 +0100 Subject: [PATCH 009/161] Add a SQlite backend. One test passes! --- django/db/backends/sqlite3/base.py | 5 +++++ django/db/backends/sqlite3/schema.py | 6 ++++++ tests/modeltests/schema/tests.py | 12 ++++++------ 3 files changed, 17 insertions(+), 6 deletions(-) create mode 100644 django/db/backends/sqlite3/schema.py diff --git a/django/db/backends/sqlite3/base.py b/django/db/backends/sqlite3/base.py index 0880079189..7918d5d3ef 100644 --- a/django/db/backends/sqlite3/base.py +++ b/django/db/backends/sqlite3/base.py @@ -18,6 +18,7 @@ from django.db.backends.signals import connection_created from django.db.backends.sqlite3.client import DatabaseClient from django.db.backends.sqlite3.creation import DatabaseCreation from django.db.backends.sqlite3.introspection import DatabaseIntrospection +from django.db.backends.sqlite3.schema import DatabaseSchemaEditor from django.utils.dateparse import parse_date, parse_datetime, parse_time from django.utils.functional import cached_property from django.utils.safestring import SafeString @@ -336,6 +337,10 @@ class DatabaseWrapper(BaseDatabaseWrapper): if self.settings_dict['NAME'] != ":memory:": BaseDatabaseWrapper.close(self) + def schema_editor(self): + "Returns a new instance of this backend's SchemaEditor" + return DatabaseSchemaEditor(self) + FORMAT_QMARK_REGEX = re.compile(r'(? Date: Thu, 30 Aug 2012 23:11:56 +0100 Subject: [PATCH 010/161] db_index alteration mostly working --- django/db/backends/mysql/introspection.py | 19 +++- django/db/backends/mysql/schema.py | 2 +- .../postgresql_psycopg2/introspection.py | 107 +++++++++++++----- django/db/backends/schema.py | 52 +++++++-- tests/modeltests/schema/models.py | 2 +- tests/modeltests/schema/tests.py | 85 +++++++++++++- 6 files changed, 225 insertions(+), 42 deletions(-) diff --git a/django/db/backends/mysql/introspection.py b/django/db/backends/mysql/introspection.py index 48d6905092..022e3b74cb 100644 --- a/django/db/backends/mysql/introspection.py +++ b/django/db/backends/mysql/introspection.py @@ -104,8 +104,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): def get_constraints(self, cursor, table_name): """ - Retrieves any constraints (unique, pk, fk, check) across one or more columns. - Returns {'cnname': {'columns': set(columns), 'primary_key': bool, 'unique': bool, 'foreign_key': None|(tbl, col)}} + Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns. """ constraints = {} # Get the actual constraint names and columns @@ -124,6 +123,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): 'columns': set(), 'primary_key': False, 'unique': False, + 'index': False, + 'check': False, 'foreign_key': (ref_table, ref_column) if ref_column else None, } constraints[constraint]['columns'].add(column) @@ -142,5 +143,19 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): constraints[constraint]['unique'] = True elif kind.lower() == "unique": constraints[constraint]['unique'] = True + # Now add in the indexes + cursor.execute("SHOW INDEX FROM %s" % self.connection.ops.quote_name(table_name)) + for table, non_unique, index, colseq, column in [x[:5] for x in cursor.fetchall()]: + if index not in constraints: + constraints[index] = { + 'columns': set(), + 'primary_key': False, + 'unique': False, + 'index': True, + 'check': False, + 'foreign_key': None, + } + constraints[index]['index'] = True + constraints[index]['columns'].add(column) # Return return constraints diff --git a/django/db/backends/mysql/schema.py b/django/db/backends/mysql/schema.py index efc469d9fb..c5c2e5cf1f 100644 --- a/django/db/backends/mysql/schema.py +++ b/django/db/backends/mysql/schema.py @@ -15,7 +15,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s)" sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s" - sql_delete_index = "DROP INDEX %(name)s ON %(table_name)s" + sql_delete_index = "DROP INDEX %(name)s ON %(table)s" sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY" diff --git a/django/db/backends/postgresql_psycopg2/introspection.py b/django/db/backends/postgresql_psycopg2/introspection.py index d85bdc9d6b..1a08984bd2 100644 --- a/django/db/backends/postgresql_psycopg2/introspection.py +++ b/django/db/backends/postgresql_psycopg2/introspection.py @@ -91,32 +91,87 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): def get_constraints(self, cursor, table_name): """ - Retrieves any constraints (unique, pk, fk, check) across one or more columns. - Returns {'cnname': {'columns': set(columns), 'primary_key': bool, 'unique': bool}} + Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns. """ constraints = {} - # Loop over the constraint tables, collecting things as constraints - ifsc_tables = ["constraint_column_usage", "key_column_usage"] - for ifsc_table in ifsc_tables: - cursor.execute(""" - SELECT kc.constraint_name, kc.column_name, c.constraint_type - FROM information_schema.%s AS kc - JOIN information_schema.table_constraints AS c ON - kc.table_schema = c.table_schema AND - kc.table_name = c.table_name AND - kc.constraint_name = c.constraint_name - WHERE - kc.table_schema = %%s AND - kc.table_name = %%s - """ % ifsc_table, ["public", table_name]) - for constraint, column, kind in cursor.fetchall(): - # If we're the first column, make the record - if constraint not in constraints: - constraints[constraint] = { - "columns": set(), - "primary_key": kind.lower() == "primary key", - "unique": kind.lower() in ["primary key", "unique"], - } - # Record the details - constraints[constraint]['columns'].add(column) + # Loop over the key table, collecting things as constraints + # This will get PKs, FKs, and uniques, but not CHECK + cursor.execute(""" + SELECT + kc.constraint_name, + kc.column_name, + c.constraint_type, + array(SELECT table_name::text || '.' || column_name::text FROM information_schema.constraint_column_usage WHERE constraint_name = kc.constraint_name) + FROM information_schema.key_column_usage AS kc + JOIN information_schema.table_constraints AS c ON + kc.table_schema = c.table_schema AND + kc.table_name = c.table_name AND + kc.constraint_name = c.constraint_name + WHERE + kc.table_schema = %s AND + kc.table_name = %s + """, ["public", table_name]) + for constraint, column, kind, used_cols in cursor.fetchall(): + # If we're the first column, make the record + if constraint not in constraints: + constraints[constraint] = { + "columns": set(), + "primary_key": kind.lower() == "primary key", + "unique": kind.lower() in ["primary key", "unique"], + "foreign_key": set([tuple(x.split(".", 1)) for x in used_cols]) if kind.lower() == "foreign key" else None, + "check": False, + "index": False, + } + # Record the details + constraints[constraint]['columns'].add(column) + # Now get CHECK constraint columns + cursor.execute(""" + SELECT kc.constraint_name, kc.column_name + FROM information_schema.constraint_column_usage AS kc + JOIN information_schema.table_constraints AS c ON + kc.table_schema = c.table_schema AND + kc.table_name = c.table_name AND + kc.constraint_name = c.constraint_name + WHERE + c.constraint_type = 'CHECK' AND + kc.table_schema = %s AND + kc.table_name = %s + """, ["public", table_name]) + for constraint, column, kind in cursor.fetchall(): + # If we're the first column, make the record + if constraint not in constraints: + constraints[constraint] = { + "columns": set(), + "primary_key": False, + "unique": False, + "foreign_key": False, + "check": True, + "index": False, + } + # Record the details + constraints[constraint]['columns'].add(column) + # Now get indexes + cursor.execute(""" + SELECT c2.relname, attr.attname, idx.indkey, idx.indisunique, idx.indisprimary + FROM pg_catalog.pg_class c, pg_catalog.pg_class c2, + pg_catalog.pg_index idx, pg_catalog.pg_attribute attr + WHERE c.oid = idx.indrelid + AND idx.indexrelid = c2.oid + AND attr.attrelid = c.oid + AND attr.attnum = idx.indkey[0] + AND c.relname = %s + """, [table_name]) + for index, column, coli, unique, primary in cursor.fetchall(): + # If we're the first column, make the record + if index not in constraints: + constraints[index] = { + "columns": set(), + "primary_key": False, + "unique": False, + "foreign_key": False, + "check": False, + "index": True, + } + # Record the details + constraints[index]['columns'].add(column) return constraints diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 788be3be35..88fc4b7e62 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -54,7 +54,7 @@ class BaseDatabaseSchemaEditor(object): sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s) DEFERRABLE INITIALLY DEFERRED" sql_delete_fk = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" - sql_create_index = "CREATE %(unique)s INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s;" + sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s;" sql_delete_index = "DROP INDEX %(name)s" sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)" @@ -181,7 +181,6 @@ class BaseDatabaseSchemaEditor(object): if field.db_index: self.deferred_sql.append( self.sql_create_index % { - "unique": "", "name": self._create_index_name(model, [field.column], suffix=""), "table": self.quote_name(model._meta.db_table), "columns": self.quote_name(field.column), @@ -350,12 +349,13 @@ class BaseDatabaseSchemaEditor(object): } self.execute(sql) - def alter_field(self, model, old_field, new_field): + def alter_field(self, model, old_field, new_field, strict=False): """ Allows a field's type, uniqueness, nullability, default, column, constraints etc. to be modified. Requires a copy of the old field as well so we can only perform changes that are required. + If strict is true, raises errors if the old column does not match old_field precisely. """ # Ensure this field is even column-based old_type = old_field.db_type(connection=self.connection) @@ -372,18 +372,36 @@ class BaseDatabaseSchemaEditor(object): if old_field.unique and not new_field.unique: # Find the unique constraint for this field constraint_names = self._constraint_names(model, [old_field.column], unique=True) - if len(constraint_names) != 1: + if strict and len(constraint_names) != 1: raise ValueError("Found wrong number (%s) of constraints for %s.%s" % ( len(constraint_names), model._meta.db_table, old_field.column, )) - self.execute( - self.sql_delete_unique % { - "table": self.quote_name(model._meta.db_table), - "name": constraint_names[0], - }, - ) + for constraint_name in constraint_names: + self.execute( + self.sql_delete_unique % { + "table": self.quote_name(model._meta.db_table), + "name": constraint_name, + }, + ) + # Removed an index? + if old_field.db_index and not new_field.db_index and not old_field.unique and not new_field.unique: + # Find the index for this field + index_names = self._constraint_names(model, [old_field.column], index=True) + if strict and len(index_names) != 1: + raise ValueError("Found wrong number (%s) of indexes for %s.%s" % ( + len(index_names), + model._meta.db_table, + old_field.column, + )) + for index_name in index_names: + self.execute( + self.sql_delete_index % { + "table": self.quote_name(model._meta.db_table), + "name": index_name, + } + ) # Have they renamed the column? if old_field.column != new_field.column: self.execute(self.sql_rename_column % { @@ -463,6 +481,16 @@ class BaseDatabaseSchemaEditor(object): "columns": self.quote_name(new_field.column), } ) + # Added an index? + if not old_field.db_index and new_field.db_index and not old_field.unique and not new_field.unique: + self.execute( + self.sql_create_index % { + "table": self.quote_name(model._meta.db_table), + "name": self._create_index_name(model, [new_field.column], suffix="_uniq"), + "columns": self.quote_name(new_field.column), + "extra": "", + } + ) def _type_for_alter(self, field): """ @@ -490,7 +518,7 @@ class BaseDatabaseSchemaEditor(object): index_name = '%s%s' % (table_name[:(self.connection.features.max_index_name_length - len(part))], part) return index_name - def _constraint_names(self, model, column_names, unique=None, primary_key=None): + def _constraint_names(self, model, column_names, unique=None, primary_key=None, index=None): "Returns all constraint names matching the columns and conditions" column_names = set(column_names) constraints = self.connection.introspection.get_constraints(self.connection.cursor(), model._meta.db_table) @@ -501,5 +529,7 @@ class BaseDatabaseSchemaEditor(object): continue if primary_key is not None and infodict['primary_key'] != unique: continue + if index is not None and infodict['index'] != index: + continue result.append(name) return result diff --git a/tests/modeltests/schema/models.py b/tests/modeltests/schema/models.py index 053aa026f7..9d0a8a2074 100644 --- a/tests/modeltests/schema/models.py +++ b/tests/modeltests/schema/models.py @@ -21,7 +21,7 @@ class AuthorWithM2M(models.Model): class Book(models.Model): author = models.ForeignKey(Author) - title = models.CharField(max_length=100) + title = models.CharField(max_length=100, db_index=True) pub_date = models.DateTimeField() #tags = models.ManyToManyField("Tag", related_name="books") diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index 3d0b106405..6ef24ca11e 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -179,7 +179,8 @@ class SchemaTests(TestCase): Author, Author._meta.get_field_by_name("name")[0], new_field, - ) + strict=True, + ) editor.commit() # Ensure the field is right afterwards columns = self.column_classes(Author) @@ -208,6 +209,7 @@ class SchemaTests(TestCase): Author, Author._meta.get_field_by_name("name")[0], new_field, + strict = True, ) editor.commit() # Ensure the field is right afterwards @@ -276,6 +278,7 @@ class SchemaTests(TestCase): Tag, Tag._meta.get_field_by_name("slug")[0], new_field, + strict = True, ) editor.commit() # Ensure the field is no longer unique @@ -291,6 +294,7 @@ class SchemaTests(TestCase): Tag, new_field, new_new_field, + strict = True, ) editor.commit() # Ensure the field is unique again @@ -306,6 +310,7 @@ class SchemaTests(TestCase): Tag, Tag._meta.get_field_by_name("slug")[0], TagUniqueRename._meta.get_field_by_name("slug2")[0], + strict = True, ) editor.commit() # Ensure the field is still unique @@ -395,3 +400,81 @@ class SchemaTests(TestCase): Author._meta.db_table = "schema_author" columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") + + def test_indexes(self): + """ + Tests creation/altering of indexes + """ + # Create the table + editor = connection.schema_editor() + editor.start() + editor.create_model(Author) + editor.create_model(Book) + editor.commit() + # Ensure the table is there and has the right index + self.assertIn( + "title", + connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table), + ) + # Alter to remove the index + new_field = CharField(max_length=100, db_index=False) + new_field.set_attributes_from_name("title") + editor = connection.schema_editor() + editor.start() + editor.alter_field( + Book, + Book._meta.get_field_by_name("title")[0], + new_field, + strict = True, + ) + editor.commit() + # Ensure the table is there and has no index + self.assertNotIn( + "title", + connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table), + ) + # Alter to re-add the index + editor = connection.schema_editor() + editor.start() + editor.alter_field( + Book, + new_field, + Book._meta.get_field_by_name("title")[0], + strict = True, + ) + editor.commit() + # Ensure the table is there and has the index again + self.assertIn( + "title", + connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table), + ) + # Add a unique column, verify that creates an implicit index + new_field = CharField(max_length=20, unique=True) + new_field.set_attributes_from_name("slug") + editor = connection.schema_editor() + editor.start() + editor.create_field( + Book, + new_field, + ) + editor.commit() + self.assertIn( + "slug", + connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table), + ) + # Remove the unique, check the index goes with it + new_field2 = CharField(max_length=20, unique=False) + new_field2.set_attributes_from_name("slug") + editor = connection.schema_editor() + editor.start() + editor.alter_field( + Book, + new_field, + new_field2, + strict = True, + ) + editor.commit() + self.assertNotIn( + "slug", + connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table), + ) From cd583d6dbd222ae61331a6965b0e1fc86c974c50 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 4 Sep 2012 12:53:31 -0400 Subject: [PATCH 011/161] Implement primary key changing --- django/db/backends/mysql/introspection.py | 7 +++- django/db/backends/mysql/schema.py | 3 ++ django/db/backends/schema.py | 45 ++++++++++++++++++----- tests/modeltests/schema/tests.py | 34 +++++++++++++++++ 4 files changed, 78 insertions(+), 11 deletions(-) diff --git a/django/db/backends/mysql/introspection.py b/django/db/backends/mysql/introspection.py index 022e3b74cb..310f270a0b 100644 --- a/django/db/backends/mysql/introspection.py +++ b/django/db/backends/mysql/introspection.py @@ -99,7 +99,12 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): for row in rows: if row[2] in multicol_indexes: continue - indexes[row[4]] = {'primary_key': (row[2] == 'PRIMARY'), 'unique': not bool(row[1])} + if row[4] not in indexes: + indexes[row[4]] = {'primary_key': False, 'unique': False} + if row[2] == 'PRIMARY': + indexes[row[4]]['primary_key'] = True + if not bool(row[1]): + indexes[row[4]]['unique'] = True return indexes def get_constraints(self, cursor, table_name): diff --git a/django/db/backends/mysql/schema.py b/django/db/backends/mysql/schema.py index c5c2e5cf1f..08e883d80c 100644 --- a/django/db/backends/mysql/schema.py +++ b/django/db/backends/mysql/schema.py @@ -21,3 +21,6 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;' alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;' + + sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)" + sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY" diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 88fc4b7e62..ae80f60c30 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -24,8 +24,6 @@ class BaseDatabaseSchemaEditor(object): - Repointing of FKs - Repointing of M2Ms - Check constraints (PosIntField) - - PK changing - - db_index on alter field """ # Overrideable SQL templates @@ -57,8 +55,8 @@ class BaseDatabaseSchemaEditor(object): sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s;" sql_delete_index = "DROP INDEX %(name)s" - sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)" - sql_delete_pk = "ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s" + sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)" + sql_delete_pk = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" def __init__(self, connection): self.connection = connection @@ -135,9 +133,10 @@ class BaseDatabaseSchemaEditor(object): elif field.unique: sql += " UNIQUE" # If we were told to include a default value, do so - if include_default: + default_value = self.effective_default(field) + if include_default and default_value is not None: sql += " DEFAULT %s" - params += [self.effective_default(field)] + params += [default_value] # Return the sql return sql, params @@ -491,6 +490,32 @@ class BaseDatabaseSchemaEditor(object): "extra": "", } ) + # Changed to become primary key? + # Note that we don't detect unsetting of a PK, as we assume another field + # will always come along and replace it. + if not old_field.primary_key and new_field.primary_key: + # First, drop the old PK + constraint_names = self._constraint_names(model, primary_key=True) + if strict and len(constraint_names) != 1: + raise ValueError("Found wrong number (%s) of PK constraints for %s" % ( + len(constraint_names), + model._meta.db_table, + )) + for constraint_name in constraint_names: + self.execute( + self.sql_delete_pk % { + "table": self.quote_name(model._meta.db_table), + "name": constraint_name, + }, + ) + # Make the new one + self.execute( + self.sql_create_pk % { + "table": self.quote_name(model._meta.db_table), + "name": self._create_index_name(model, [new_field.column], suffix="_pk"), + "columns": self.quote_name(new_field.column), + } + ) def _type_for_alter(self, field): """ @@ -518,16 +543,16 @@ class BaseDatabaseSchemaEditor(object): index_name = '%s%s' % (table_name[:(self.connection.features.max_index_name_length - len(part))], part) return index_name - def _constraint_names(self, model, column_names, unique=None, primary_key=None, index=None): + def _constraint_names(self, model, column_names=None, unique=None, primary_key=None, index=None): "Returns all constraint names matching the columns and conditions" - column_names = set(column_names) + column_names = set(column_names) if column_names else None constraints = self.connection.introspection.get_constraints(self.connection.cursor(), model._meta.db_table) result = [] for name, infodict in constraints.items(): - if column_names == infodict['columns']: + if column_names is None or column_names == infodict['columns']: if unique is not None and infodict['unique'] != unique: continue - if primary_key is not None and infodict['primary_key'] != unique: + if primary_key is not None and infodict['primary_key'] != primary_key: continue if index is not None and infodict['index'] != index: continue diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index 6ef24ca11e..db374dc7ad 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -478,3 +478,37 @@ class SchemaTests(TestCase): "slug", connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table), ) + + def test_primary_key(self): + """ + Tests altering of the primary key + """ + # Create the table + editor = connection.schema_editor() + editor.start() + editor.create_model(Tag) + editor.commit() + # Ensure the table is there and has the right PK + self.assertTrue( + connection.introspection.get_indexes(connection.cursor(), Tag._meta.db_table)['id']['primary_key'], + ) + # Alter to change the PK + new_field = SlugField(primary_key=True) + new_field.set_attributes_from_name("slug") + editor = connection.schema_editor() + editor.start() + editor.delete_field(Tag, Tag._meta.get_field_by_name("id")[0]) + editor.alter_field( + Tag, + Tag._meta.get_field_by_name("slug")[0], + new_field, + ) + editor.commit() + # Ensure the PK changed + self.assertNotIn( + 'id', + connection.introspection.get_indexes(connection.cursor(), Tag._meta.db_table), + ) + self.assertTrue( + connection.introspection.get_indexes(connection.cursor(), Tag._meta.db_table)['slug']['primary_key'], + ) From 7e81213b5a4570926afbd67eff7f2675f636d720 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Wed, 5 Sep 2012 15:12:39 -0400 Subject: [PATCH 012/161] Add some state management methods to AppCache. --- django/db/models/loading.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/django/db/models/loading.py b/django/db/models/loading.py index 7a9cb2cb41..0ed6caffa4 100644 --- a/django/db/models/loading.py +++ b/django/db/models/loading.py @@ -244,6 +244,37 @@ class AppCache(object): model_dict[model_name] = model self._get_models_cache.clear() + def save_state(self): + """ + Returns an object that contains the current AppCache state. + Can be provided to restore_state to undo actions. + """ + return { + "app_store": SortedDict(self.app_store.items()), + "app_labels": dict(self.app_errors.items()), + "app_models": SortedDict(self.app_models.items()), + "app_errors": dict(self.app_errors.items()), + } + + def restore_state(self, state): + """ + Restores the AppCache to a previous state from save_state. + """ + self.app_store = state['app_store'] + self.app_labels = state['app_labels'] + self.app_models = state['app_models'] + self.app_errors = state['app_errors'] + + def unregister_all(self): + """ + Wipes the AppCache clean of all registered models. + Used for things like migration libraries' fake ORMs. + """ + self.app_store = SortedDict() + self.app_labels = {} + self.app_models = SortedDict() + self.app_errors = {} + cache = AppCache() # These methods were always module level, so are kept that way for backwards From d683263f97aedd67f17f4b78ac65d915f4e70d36 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 7 Sep 2012 12:51:11 -0400 Subject: [PATCH 013/161] Added SQLite backend which passes all current tests --- django/db/backends/__init__.py | 3 + django/db/backends/schema.py | 4 +- django/db/backends/sqlite3/base.py | 1 + django/db/backends/sqlite3/introspection.py | 36 ++++++- django/db/backends/sqlite3/schema.py | 110 ++++++++++++++++++++ django/db/models/loading.py | 21 ++++ tests/modeltests/schema/models.py | 11 ++ tests/modeltests/schema/tests.py | 24 +++-- 8 files changed, 199 insertions(+), 11 deletions(-) diff --git a/django/db/backends/__init__.py b/django/db/backends/__init__.py index efea9f802e..39883de35c 100644 --- a/django/db/backends/__init__.py +++ b/django/db/backends/__init__.py @@ -432,6 +432,9 @@ class BaseDatabaseFeatures(object): # What's the maximum length for index names? max_index_name_length = 63 + # Does it support foreign keys? + supports_foreign_keys = True + def __init__(self, connection): self.connection = connection diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index ae80f60c30..a9601221bb 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -187,7 +187,7 @@ class BaseDatabaseSchemaEditor(object): } ) # FK - if field.rel: + if field.rel and self.connection.features.supports_foreign_keys: to_table = field.rel.to._meta.db_table to_column = field.rel.to._meta.get_field(field.rel.field_name).column self.deferred_sql.append( @@ -311,7 +311,7 @@ class BaseDatabaseSchemaEditor(object): } } # Add any FK constraints later - if field.rel: + if field.rel and self.connection.features.supports_foreign_keys: to_table = field.rel.to._meta.db_table to_column = field.rel.to._meta.get_field(field.rel.field_name).column self.deferred_sql.append( diff --git a/django/db/backends/sqlite3/base.py b/django/db/backends/sqlite3/base.py index d0a6fda78e..45e7264e5c 100644 --- a/django/db/backends/sqlite3/base.py +++ b/django/db/backends/sqlite3/base.py @@ -96,6 +96,7 @@ class DatabaseFeatures(BaseDatabaseFeatures): supports_mixed_date_datetime_comparisons = False has_bulk_insert = True can_combine_inserts_with_and_without_auto_increment_pk = False + supports_foreign_keys = False @cached_property def supports_stddev(self): diff --git a/django/db/backends/sqlite3/introspection.py b/django/db/backends/sqlite3/introspection.py index 1df4c18c1c..62c53e075a 100644 --- a/django/db/backends/sqlite3/introspection.py +++ b/django/db/backends/sqlite3/introspection.py @@ -154,7 +154,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): if len(info) != 1: continue name = info[0][2] # seqno, cid, name - indexes[name] = {'primary_key': False, + indexes[name] = {'primary_key': indexes.get(name, {}).get("primary_key", False), 'unique': unique} return indexes @@ -182,3 +182,37 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): 'null_ok': not field[3], 'pk': field[5] # undocumented } for field in cursor.fetchall()] + + def get_constraints(self, cursor, table_name): + """ + Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns. + """ + constraints = {} + # Get the index info + cursor.execute("PRAGMA index_list(%s)" % self.connection.ops.quote_name(table_name)) + for number, index, unique in cursor.fetchall(): + # Get the index info for that index + cursor.execute('PRAGMA index_info(%s)' % self.connection.ops.quote_name(index)) + for index_rank, column_rank, column in cursor.fetchall(): + if index not in constraints: + constraints[index] = { + "columns": set(), + "primary_key": False, + "unique": bool(unique), + "foreign_key": False, + "check": False, + "index": True, + } + constraints[index]['columns'].add(column) + # Get the PK + pk_column = self.get_primary_key_column(cursor, table_name) + if pk_column: + constraints["__primary__"] = { + "columns": set([pk_column]), + "primary_key": True, + "unique": False, # It's not actually a unique constraint + "foreign_key": False, + "check": False, + "index": False, + } + return constraints diff --git a/django/db/backends/sqlite3/schema.py b/django/db/backends/sqlite3/schema.py index bfd943c6fb..7938ad79cf 100644 --- a/django/db/backends/sqlite3/schema.py +++ b/django/db/backends/sqlite3/schema.py @@ -1,6 +1,116 @@ from django.db.backends.schema import BaseDatabaseSchemaEditor +from django.db.models.loading import cache +from django.db.models.fields.related import ManyToManyField class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): sql_delete_table = "DROP TABLE %(table)s" + + def _remake_table(self, model, create_fields=[], delete_fields=[], alter_fields=[], rename_fields=[], override_uniques=None): + "Shortcut to transform a model from old_model into new_model" + # Work out the new fields dict / mapping + body = dict((f.name, f) for f in model._meta.local_fields) + mapping = dict((f.column, f.column) for f in model._meta.local_fields) + # If any of the new or altered fields is introducing a new PK, + # remove the old one + restore_pk_field = None + if any(f.primary_key for f in create_fields) or any(n.primary_key for o, n in alter_fields): + for name, field in list(body.items()): + if field.primary_key: + field.primary_key = False + restore_pk_field = field + if field.auto_created: + del body[name] + del mapping[field.column] + # Add in any created fields + for field in create_fields: + body[field.name] = field + # Add in any altered fields + for (old_field, new_field) in alter_fields: + del body[old_field.name] + del mapping[old_field.column] + body[new_field.name] = new_field + mapping[new_field.column] = old_field.column + # Remove any deleted fields + for field in delete_fields: + del body[field.name] + del mapping[field.column] + # Construct a new model for the new state + meta_contents = { + 'app_label': model._meta.app_label, + 'db_table': model._meta.db_table + "__new", + 'unique_together': model._meta.unique_together if override_uniques is None else override_uniques, + } + meta = type("Meta", tuple(), meta_contents) + body['Meta'] = meta + body['__module__'] = "__fake__" + with cache.temporary_state(): + del cache.app_models[model._meta.app_label][model._meta.object_name.lower()] + temp_model = type(model._meta.object_name, model.__bases__, body) + # Create a new table with that format + self.create_model(temp_model) + # Copy data from the old table + field_maps = list(mapping.items()) + self.execute("INSERT INTO %s (%s) SELECT %s FROM %s;" % ( + self.quote_name(temp_model._meta.db_table), + ', '.join([x for x, y in field_maps]), + ', '.join([y for x, y in field_maps]), + self.quote_name(model._meta.db_table), + )) + # Delete the old table + self.delete_model(model) + # Rename the new to the old + self.alter_db_table(model, temp_model._meta.db_table, model._meta.db_table) + # Run deferred SQL on correct table + for sql in self.deferred_sql: + self.execute(sql.replace(temp_model._meta.db_table, model._meta.db_table)) + self.deferred_sql = [] + # Fix any PK-removed field + if restore_pk_field: + restore_pk_field.primary_key = True + + def create_field(self, model, field): + """ + Creates a field on a model. + Usually involves adding a column, but may involve adding a + table instead (for M2M fields) + """ + # Special-case implicit M2M tables + if isinstance(field, ManyToManyField) and field.rel.through._meta.auto_created: + return self.create_model(field.rel.through) + # Detect bad field combinations + if (not field.null and + (not field.has_default() or field.get_default() is None) and + not field.empty_strings_allowed): + raise ValueError("You cannot add a null=False column without a default value on SQLite.") + self._remake_table(model, create_fields=[field]) + + def delete_field(self, model, field): + """ + Removes a field from a model. Usually involves deleting a column, + but for M2Ms may involve deleting a table. + """ + # Special-case implicit M2M tables + if isinstance(field, ManyToManyField) and field.rel.through._meta.auto_created: + return self.delete_model(field.rel.through) + # For everything else, remake. + self._remake_table(model, delete_fields=[field]) + + def alter_field(self, model, old_field, new_field, strict=False): + # Ensure this field is even column-based + old_type = old_field.db_type(connection=self.connection) + new_type = self._type_for_alter(new_field) + if old_type is None and new_type is None: + # TODO: Handle M2M fields being repointed + return + elif old_type is None or new_type is None: + raise ValueError("Cannot alter field %s into %s - they are not compatible types" % ( + old_field, + new_field, + )) + # Alter by remaking table + self._remake_table(model, alter_fields=[(old_field, new_field)]) + + def alter_unique_together(self, model, old_unique_together, new_unique_together): + self._remake_table(model, override_uniques=new_unique_together) diff --git a/django/db/models/loading.py b/django/db/models/loading.py index 0ed6caffa4..e0d943853b 100644 --- a/django/db/models/loading.py +++ b/django/db/models/loading.py @@ -265,6 +265,10 @@ class AppCache(object): self.app_models = state['app_models'] self.app_errors = state['app_errors'] + def temporary_state(self): + "Returns a context manager that restores the state on exit" + return StateContextManager(self) + def unregister_all(self): """ Wipes the AppCache clean of all registered models. @@ -275,6 +279,23 @@ class AppCache(object): self.app_models = SortedDict() self.app_errors = {} + +class StateContextManager(object): + """ + Context manager for locking cache state. + Useful for making temporary models you don't want to stay in the cache. + """ + + def __init__(self, cache): + self.cache = cache + + def __enter__(self): + self.state = self.cache.save_state() + + def __exit__(self, type, value, traceback): + self.cache.restore_state(self.state) + + cache = AppCache() # These methods were always module level, so are kept that way for backwards diff --git a/tests/modeltests/schema/models.py b/tests/modeltests/schema/models.py index 9d0a8a2074..b18d2a9c16 100644 --- a/tests/modeltests/schema/models.py +++ b/tests/modeltests/schema/models.py @@ -29,6 +29,17 @@ class Book(models.Model): managed = False +class BookWithSlug(models.Model): + author = models.ForeignKey(Author) + title = models.CharField(max_length=100, db_index=True) + pub_date = models.DateTimeField() + slug = models.CharField(max_length=20, unique=True) + + class Meta: + managed = False + db_table = "schema_book" + + class Tag(models.Model): title = models.CharField(max_length=255) slug = models.SlugField(unique=True) diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index db374dc7ad..c76ca8ca16 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -2,11 +2,12 @@ from __future__ import absolute_import import copy import datetime from django.test import TestCase +from django.utils.unittest import skipUnless from django.db import connection, DatabaseError, IntegrityError from django.db.models.fields import IntegerField, TextField, CharField, SlugField from django.db.models.fields.related import ManyToManyField from django.db.models.loading import cache -from .models import Author, Book, AuthorWithM2M, Tag, TagUniqueRename, UniqueTest +from .models import Author, Book, BookWithSlug, AuthorWithM2M, Tag, TagUniqueRename, UniqueTest class SchemaTests(TestCase): @@ -18,7 +19,7 @@ class SchemaTests(TestCase): as the code it is testing. """ - models = [Author, Book, AuthorWithM2M, Tag, UniqueTest] + models = [Author, Book, BookWithSlug, AuthorWithM2M, Tag, TagUniqueRename, UniqueTest] # Utility functions @@ -70,13 +71,21 @@ class SchemaTests(TestCase): def column_classes(self, model): cursor = connection.cursor() - return dict( + columns = dict( (d[0], (connection.introspection.get_field_type(d[1], d), d)) for d in connection.introspection.get_table_description( cursor, model._meta.db_table, ) ) + # SQLite has a different format for field_type + for name, (type, desc) in columns.items(): + if isinstance(type, tuple): + columns[name] = (type[0], desc) + # SQLite also doesn't error properly + if not columns: + raise DatabaseError("Table does not exist (empty pragma)") + return columns # Tests @@ -104,6 +113,7 @@ class SchemaTests(TestCase): lambda: list(Author.objects.all()), ) + @skipUnless(connection.features.supports_foreign_keys, "No FK support") def test_creation_fk(self): "Tests that creating tables out of FK order works" # Create the table @@ -449,13 +459,11 @@ class SchemaTests(TestCase): connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table), ) # Add a unique column, verify that creates an implicit index - new_field = CharField(max_length=20, unique=True) - new_field.set_attributes_from_name("slug") editor = connection.schema_editor() editor.start() editor.create_field( Book, - new_field, + BookWithSlug._meta.get_field_by_name("slug")[0], ) editor.commit() self.assertIn( @@ -468,8 +476,8 @@ class SchemaTests(TestCase): editor = connection.schema_editor() editor.start() editor.alter_field( - Book, - new_field, + BookWithSlug, + BookWithSlug._meta.get_field_by_name("slug")[0], new_field2, strict = True, ) From a92bae0f0622fb45afb94bf5448b49bc32ebb643 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 7 Sep 2012 13:31:05 -0400 Subject: [PATCH 014/161] Repoint ForeignKeys when their to= changes. --- .../postgresql_psycopg2/introspection.py | 2 +- django/db/backends/schema.py | 32 +++++++++++++++++-- tests/modeltests/schema/tests.py | 29 ++++++++++++++--- 3 files changed, 56 insertions(+), 7 deletions(-) diff --git a/django/db/backends/postgresql_psycopg2/introspection.py b/django/db/backends/postgresql_psycopg2/introspection.py index 916073c09f..580d16d1fb 100644 --- a/django/db/backends/postgresql_psycopg2/introspection.py +++ b/django/db/backends/postgresql_psycopg2/introspection.py @@ -118,7 +118,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): "columns": set(), "primary_key": kind.lower() == "primary key", "unique": kind.lower() in ["primary key", "unique"], - "foreign_key": set([tuple(x.split(".", 1)) for x in used_cols]) if kind.lower() == "foreign key" else None, + "foreign_key": tuple(used_cols[0].split(".", 1)) if kind.lower() == "foreign key" else None, "check": False, "index": False, } diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index a9601221bb..55d687a7f6 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -21,7 +21,6 @@ class BaseDatabaseSchemaEditor(object): commit() is called. TODO: - - Repointing of FKs - Repointing of M2Ms - Check constraints (PosIntField) """ @@ -401,6 +400,22 @@ class BaseDatabaseSchemaEditor(object): "name": index_name, } ) + # Drop any FK constraints, we'll remake them later + if getattr(old_field, "rel"): + fk_names = self._constraint_names(model, [old_field.column], foreign_key=True) + if strict and len(fk_names) != 1: + raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % ( + len(fk_names), + model._meta.db_table, + old_field.column, + )) + for fk_name in fk_names: + self.execute( + self.sql_delete_fk % { + "table": self.quote_name(model._meta.db_table), + "name": fk_name, + } + ) # Have they renamed the column? if old_field.column != new_field.column: self.execute(self.sql_rename_column % { @@ -516,6 +531,17 @@ class BaseDatabaseSchemaEditor(object): "columns": self.quote_name(new_field.column), } ) + # Does it have a foreign key? + if getattr(new_field, "rel"): + self.execute( + self.sql_create_fk % { + "table": self.quote_name(model._meta.db_table), + "name": self._create_index_name(model, [new_field.column], suffix="_fk"), + "column": self.quote_name(new_field.column), + "to_table": self.quote_name(new_field.rel.to._meta.db_table), + "to_column": self.quote_name(new_field.rel.get_related_field().column), + } + ) def _type_for_alter(self, field): """ @@ -543,7 +569,7 @@ class BaseDatabaseSchemaEditor(object): index_name = '%s%s' % (table_name[:(self.connection.features.max_index_name_length - len(part))], part) return index_name - def _constraint_names(self, model, column_names=None, unique=None, primary_key=None, index=None): + def _constraint_names(self, model, column_names=None, unique=None, primary_key=None, index=None, foreign_key=None): "Returns all constraint names matching the columns and conditions" column_names = set(column_names) if column_names else None constraints = self.connection.introspection.get_constraints(self.connection.cursor(), model._meta.db_table) @@ -556,5 +582,7 @@ class BaseDatabaseSchemaEditor(object): continue if index is not None and infodict['index'] != index: continue + if foreign_key is not None and not infodict['foreign_key']: + continue result.append(name) return result diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index c76ca8ca16..310f74a4e1 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -5,7 +5,7 @@ from django.test import TestCase from django.utils.unittest import skipUnless from django.db import connection, DatabaseError, IntegrityError from django.db.models.fields import IntegerField, TextField, CharField, SlugField -from django.db.models.fields.related import ManyToManyField +from django.db.models.fields.related import ManyToManyField, ForeignKey from django.db.models.loading import cache from .models import Author, Book, BookWithSlug, AuthorWithM2M, Tag, TagUniqueRename, UniqueTest @@ -114,15 +114,16 @@ class SchemaTests(TestCase): ) @skipUnless(connection.features.supports_foreign_keys, "No FK support") - def test_creation_fk(self): - "Tests that creating tables out of FK order works" + def test_fk(self): + "Tests that creating tables out of FK order, then repointing, works" # Create the table editor = connection.schema_editor() editor.start() editor.create_model(Book) editor.create_model(Author) + editor.create_model(Tag) editor.commit() - # Check that both tables are there + # Check that initial tables are there try: list(Author.objects.all()) except DatabaseError, e: @@ -139,6 +140,26 @@ class SchemaTests(TestCase): pub_date = datetime.datetime.now(), ) connection.commit() + # Repoint the FK constraint + new_field = ForeignKey(Tag) + new_field.set_attributes_from_name("author") + editor = connection.schema_editor() + editor.start() + editor.alter_field( + Book, + Book._meta.get_field_by_name("author")[0], + new_field, + strict=True, + ) + editor.commit() + # Make sure the new FK constraint is present + constraints = connection.introspection.get_constraints(connection.cursor(), Book._meta.db_table) + for name, details in constraints.items(): + if details['columns'] == set(["author_id"]) and details['foreign_key']: + self.assertEqual(details['foreign_key'], ('schema_tag', 'id')) + break + else: + self.fail("No FK constraint for author_id found") def test_create_field(self): """ From 375178fc19c1170fe046ad26befeba02fc19548c Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 7 Sep 2012 14:39:22 -0400 Subject: [PATCH 015/161] Add M2M repointing --- django/db/backends/schema.py | 32 +++++++++----- django/db/backends/sqlite3/schema.py | 29 +++++++++++-- tests/modeltests/schema/models.py | 10 +++++ tests/modeltests/schema/tests.py | 64 +++++++++++++++++++++++++++- 4 files changed, 119 insertions(+), 16 deletions(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 55d687a7f6..bd86d52e88 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -21,7 +21,6 @@ class BaseDatabaseSchemaEditor(object): commit() is called. TODO: - - Repointing of M2Ms - Check constraints (PosIntField) """ @@ -154,13 +153,13 @@ class BaseDatabaseSchemaEditor(object): # Actions - def create_model(self, model): + def create_model(self, model, force=False): """ Takes a model and creates a table for it in the database. Will also create any accompanying indexes or unique constraints. """ # Do nothing if this is an unmanaged or proxy model - if not model._meta.managed or model._meta.proxy: + if not force and (not model._meta.managed or model._meta.proxy): return # Create column SQL, add FK deferreds if needed column_sqls = [] @@ -214,13 +213,16 @@ class BaseDatabaseSchemaEditor(object): "definition": ", ".join(column_sqls) } self.execute(sql, params) + # Make M2M tables + for field in model._meta.local_many_to_many: + self.create_model(field.rel.through, force=True) - def delete_model(self, model): + def delete_model(self, model, force=False): """ Deletes a model from the database. """ # Do nothing if this is an unmanaged or proxy model - if not model._meta.managed or model._meta.proxy: + if not force and (not model._meta.managed or model._meta.proxy): return # Delete the table self.execute(self.sql_delete_table % { @@ -287,7 +289,7 @@ class BaseDatabaseSchemaEditor(object): """ # Special-case implicit M2M tables if isinstance(field, ManyToManyField) and field.rel.through._meta.auto_created: - return self.create_model(field.rel.through) + return self.create_model(field.rel.through, force=True) # Get the column's definition definition, params = self.column_sql(model, field, include_default=True) # It might not actually have a column behind it @@ -358,11 +360,10 @@ class BaseDatabaseSchemaEditor(object): # Ensure this field is even column-based old_type = old_field.db_type(connection=self.connection) new_type = self._type_for_alter(new_field) - if old_type is None and new_type is None: - # TODO: Handle M2M fields being repointed - return + if old_type is None and new_type is None and (old_field.rel.through and new_field.rel.through and old_field.rel.through._meta.auto_created and new_field.rel.through._meta.auto_created): + return self._alter_many_to_many(model, old_field, new_field, strict) elif old_type is None or new_type is None: - raise ValueError("Cannot alter field %s into %s - they are not compatible types" % ( + raise ValueError("Cannot alter field %s into %s - they are not compatible types (probably means only one is an M2M with implicit through model)" % ( old_field, new_field, )) @@ -543,6 +544,17 @@ class BaseDatabaseSchemaEditor(object): } ) + def _alter_many_to_many(self, model, old_field, new_field, strict): + "Alters M2Ms to repoint their to= endpoints." + # Rename the through table + self.alter_db_table(old_field.rel.through, old_field.rel.through._meta.db_table, new_field.rel.through._meta.db_table) + # Repoint the FK to the other side + self.alter_field( + new_field.rel.through, + old_field.rel.through._meta.get_field_by_name(old_field.m2m_reverse_field_name())[0], + new_field.rel.through._meta.get_field_by_name(new_field.m2m_reverse_field_name())[0], + ) + def _type_for_alter(self, field): """ Returns a field's type suitable for ALTER COLUMN. diff --git a/django/db/backends/sqlite3/schema.py b/django/db/backends/sqlite3/schema.py index 7938ad79cf..e660f26c87 100644 --- a/django/db/backends/sqlite3/schema.py +++ b/django/db/backends/sqlite3/schema.py @@ -101,11 +101,10 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): # Ensure this field is even column-based old_type = old_field.db_type(connection=self.connection) new_type = self._type_for_alter(new_field) - if old_type is None and new_type is None: - # TODO: Handle M2M fields being repointed - return + if old_type is None and new_type is None and (old_field.rel.through and new_field.rel.through and old_field.rel.through._meta.auto_created and new_field.rel.through._meta.auto_created): + return self._alter_many_to_many(model, old_field, new_field, strict) elif old_type is None or new_type is None: - raise ValueError("Cannot alter field %s into %s - they are not compatible types" % ( + raise ValueError("Cannot alter field %s into %s - they are not compatible types (probably means only one is an M2M with implicit through model)" % ( old_field, new_field, )) @@ -114,3 +113,25 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): def alter_unique_together(self, model, old_unique_together, new_unique_together): self._remake_table(model, override_uniques=new_unique_together) + + def _alter_many_to_many(self, model, old_field, new_field, strict): + "Alters M2Ms to repoint their to= endpoints." + # Make a new through table + self.create_model(new_field.rel.through) + # Copy the data across + self.execute("INSERT INTO %s (%s) SELECT %s FROM %s;" % ( + self.quote_name(new_field.rel.through._meta.db_table), + ', '.join([ + "id", + new_field.m2m_column_name(), + new_field.m2m_reverse_name(), + ]), + ', '.join([ + "id", + old_field.m2m_column_name(), + old_field.m2m_reverse_name(), + ]), + self.quote_name(old_field.rel.through._meta.db_table), + )) + # Delete the old through table + self.delete_model(old_field.rel.through, force=True) diff --git a/tests/modeltests/schema/models.py b/tests/modeltests/schema/models.py index b18d2a9c16..76a8cf3687 100644 --- a/tests/modeltests/schema/models.py +++ b/tests/modeltests/schema/models.py @@ -29,6 +29,16 @@ class Book(models.Model): managed = False +class BookWithM2M(models.Model): + author = models.ForeignKey(Author) + title = models.CharField(max_length=100, db_index=True) + pub_date = models.DateTimeField() + tags = models.ManyToManyField("Tag", related_name="books") + + class Meta: + managed = False + + class BookWithSlug(models.Model): author = models.ForeignKey(Author) title = models.CharField(max_length=100, db_index=True) diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index 310f74a4e1..b3fc5d1c80 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -7,7 +7,7 @@ from django.db import connection, DatabaseError, IntegrityError from django.db.models.fields import IntegerField, TextField, CharField, SlugField from django.db.models.fields.related import ManyToManyField, ForeignKey from django.db.models.loading import cache -from .models import Author, Book, BookWithSlug, AuthorWithM2M, Tag, TagUniqueRename, UniqueTest +from .models import Author, Book, BookWithSlug, BookWithM2M, AuthorWithM2M, Tag, TagUniqueRename, UniqueTest class SchemaTests(TestCase): @@ -19,7 +19,7 @@ class SchemaTests(TestCase): as the code it is testing. """ - models = [Author, Book, BookWithSlug, AuthorWithM2M, Tag, TagUniqueRename, UniqueTest] + models = [Author, Book, BookWithSlug, BookWithM2M, AuthorWithM2M, Tag, TagUniqueRename, UniqueTest] # Utility functions @@ -248,6 +248,21 @@ class SchemaTests(TestCase): self.assertEqual(columns['display_name'][0], "CharField") self.assertNotIn("name", columns) + def test_m2m_create(self): + """ + Tests M2M fields on models during creation + """ + # Create the tables + editor = connection.schema_editor() + editor.start() + editor.create_model(Author) + editor.create_model(Tag) + editor.create_model(BookWithM2M) + editor.commit() + # Ensure there is now an m2m table there + columns = self.column_classes(BookWithM2M._meta.get_field_by_name("tags")[0].rel.through) + self.assertEqual(columns['tag_id'][0], "IntegerField") + def test_m2m(self): """ Tests adding/removing M2M fields on models @@ -287,6 +302,51 @@ class SchemaTests(TestCase): self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through) connection.rollback() + def test_m2m_repoint(self): + """ + Tests repointing M2M fields + """ + # Create the tables + editor = connection.schema_editor() + editor.start() + editor.create_model(Author) + editor.create_model(BookWithM2M) + editor.create_model(Tag) + editor.create_model(UniqueTest) + editor.commit() + # Ensure the M2M exists and points to Tag + constraints = connection.introspection.get_constraints(connection.cursor(), BookWithM2M._meta.get_field_by_name("tags")[0].rel.through._meta.db_table) + if connection.features.supports_foreign_keys: + for name, details in constraints.items(): + if details['columns'] == set(["tag_id"]) and details['foreign_key']: + self.assertEqual(details['foreign_key'], ('schema_tag', 'id')) + break + else: + self.fail("No FK constraint for tag_id found") + # Repoint the M2M + new_field = ManyToManyField(UniqueTest) + new_field.contribute_to_class(BookWithM2M, "uniques") + editor = connection.schema_editor() + editor.start() + editor.alter_field( + Author, + BookWithM2M._meta.get_field_by_name("tags")[0], + new_field, + ) + editor.commit() + # Ensure old M2M is gone + self.assertRaises(DatabaseError, self.column_classes, BookWithM2M._meta.get_field_by_name("tags")[0].rel.through) + connection.rollback() + # Ensure the new M2M exists and points to UniqueTest + constraints = connection.introspection.get_constraints(connection.cursor(), new_field.rel.through._meta.db_table) + if connection.features.supports_foreign_keys: + for name, details in constraints.items(): + if details['columns'] == set(["uniquetest_id"]) and details['foreign_key']: + self.assertEqual(details['foreign_key'], ('schema_uniquetest', 'id')) + break + else: + self.fail("No FK constraint for tag_id found") + def test_unique(self): """ Tests removing and adding unique constraints to a single column. From ca9c3cd39fade827cced1b5198dd37bb80c208b0 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 7 Sep 2012 15:40:59 -0400 Subject: [PATCH 016/161] Add check constraint support - needed a few Field changes --- django/db/backends/__init__.py | 3 + django/db/backends/creation.py | 1 + django/db/backends/mysql/base.py | 1 + .../backends/postgresql_psycopg2/creation.py | 9 ++- .../postgresql_psycopg2/introspection.py | 2 +- django/db/backends/schema.py | 61 ++++++++++++++----- django/db/backends/sqlite3/base.py | 1 + django/db/backends/sqlite3/schema.py | 6 +- django/db/models/fields/__init__.py | 26 +++++++- django/db/models/fields/related.py | 6 ++ tests/modeltests/schema/models.py | 1 + tests/modeltests/schema/tests.py | 50 +++++++++++++++ 12 files changed, 143 insertions(+), 24 deletions(-) diff --git a/django/db/backends/__init__.py b/django/db/backends/__init__.py index 39883de35c..021d9bd450 100644 --- a/django/db/backends/__init__.py +++ b/django/db/backends/__init__.py @@ -435,6 +435,9 @@ class BaseDatabaseFeatures(object): # Does it support foreign keys? supports_foreign_keys = True + # Does it support CHECK constraints? + supports_check_constraints = True + def __init__(self, connection): self.connection = connection diff --git a/django/db/backends/creation.py b/django/db/backends/creation.py index 0cc01cc876..52d5edf57d 100644 --- a/django/db/backends/creation.py +++ b/django/db/backends/creation.py @@ -18,6 +18,7 @@ class BaseDatabaseCreation(object): destruction of test databases. """ data_types = {} + data_type_check_constraints = {} def __init__(self, connection): self.connection = connection diff --git a/django/db/backends/mysql/base.py b/django/db/backends/mysql/base.py index 10649b64b9..4694dcd46f 100644 --- a/django/db/backends/mysql/base.py +++ b/django/db/backends/mysql/base.py @@ -170,6 +170,7 @@ class DatabaseFeatures(BaseDatabaseFeatures): requires_explicit_null_ordering_when_grouping = True allows_primary_key_0 = False uses_savepoints = True + supports_check_constraints = False def __init__(self, connection): super(DatabaseFeatures, self).__init__(connection) diff --git a/django/db/backends/postgresql_psycopg2/creation.py b/django/db/backends/postgresql_psycopg2/creation.py index ca389b9046..f131d14abe 100644 --- a/django/db/backends/postgresql_psycopg2/creation.py +++ b/django/db/backends/postgresql_psycopg2/creation.py @@ -26,14 +26,19 @@ class DatabaseCreation(BaseDatabaseCreation): 'GenericIPAddressField': 'inet', 'NullBooleanField': 'boolean', 'OneToOneField': 'integer', - 'PositiveIntegerField': 'integer CHECK ("%(column)s" >= 0)', - 'PositiveSmallIntegerField': 'smallint CHECK ("%(column)s" >= 0)', + 'PositiveIntegerField': 'integer', + 'PositiveSmallIntegerField': 'smallint', 'SlugField': 'varchar(%(max_length)s)', 'SmallIntegerField': 'smallint', 'TextField': 'text', 'TimeField': 'time', } + data_type_check_constraints = { + 'PositiveIntegerField': '"%(column)s" >= 0', + 'PositiveSmallIntegerField': '"%(column)s" >= 0', + } + def sql_table_creation_suffix(self): assert self.connection.settings_dict['TEST_COLLATION'] is None, "PostgreSQL does not support collation setting at database creation time." if self.connection.settings_dict['TEST_CHARSET']: diff --git a/django/db/backends/postgresql_psycopg2/introspection.py b/django/db/backends/postgresql_psycopg2/introspection.py index 580d16d1fb..5a29932859 100644 --- a/django/db/backends/postgresql_psycopg2/introspection.py +++ b/django/db/backends/postgresql_psycopg2/introspection.py @@ -137,7 +137,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): kc.table_schema = %s AND kc.table_name = %s """, ["public", table_name]) - for constraint, column, kind in cursor.fetchall(): + for constraint, column in cursor.fetchall(): # If we're the first column, make the record if constraint not in constraints: constraints[constraint] = { diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index bd86d52e88..974a18cc34 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -19,9 +19,6 @@ class BaseDatabaseSchemaEditor(object): then the relevant actions, and then commit(). This is necessary to allow things like circular foreign key references - FKs will only be created once commit() is called. - - TODO: - - Check constraints (PosIntField) """ # Overrideable SQL templates @@ -41,7 +38,7 @@ class BaseDatabaseSchemaEditor(object): sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE" sql_rename_column = "ALTER TABLE %(table)s RENAME COLUMN %(old_column)s TO %(new_column)s" - sql_create_check = "ADD CONSTRAINT %(name)s CHECK (%(check)s)" + sql_create_check = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s CHECK (%(check)s)" sql_delete_check = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" sql_create_unique = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s UNIQUE (%(columns)s)" @@ -105,7 +102,8 @@ class BaseDatabaseSchemaEditor(object): The field must already have had set_attributes_from_name called. """ # Get the column's type and use that as the basis of the SQL - sql = field.db_type(connection=self.connection) + db_params = field.db_parameters(connection=self.connection) + sql = db_params['type'] params = [] # Check for fields that aren't actually columns (e.g. M2M) if sql is None: @@ -169,6 +167,11 @@ class BaseDatabaseSchemaEditor(object): definition, extra_params = self.column_sql(model, field) if definition is None: continue + # Check constraints can go on the column SQL here + db_params = field.db_parameters(connection=self.connection) + if db_params['check']: + definition += " CHECK (%s)" % db_params['check'] + # Add the SQL to our big list column_sqls.append("%s %s" % ( self.quote_name(field.column), definition, @@ -295,6 +298,10 @@ class BaseDatabaseSchemaEditor(object): # It might not actually have a column behind it if definition is None: return + # Check constraints can go on the column SQL here + db_params = field.db_parameters(connection=self.connection) + if db_params['check']: + definition += " CHECK (%s)" % db_params['check'] # Build the SQL and run it sql = self.sql_create_column % { "table": self.quote_name(model._meta.db_table), @@ -358,8 +365,10 @@ class BaseDatabaseSchemaEditor(object): If strict is true, raises errors if the old column does not match old_field precisely. """ # Ensure this field is even column-based - old_type = old_field.db_type(connection=self.connection) - new_type = self._type_for_alter(new_field) + old_db_params = old_field.db_parameters(connection=self.connection) + old_type = old_db_params['type'] + new_db_params = new_field.db_parameters(connection=self.connection) + new_type = new_db_params['type'] if old_type is None and new_type is None and (old_field.rel.through and new_field.rel.through and old_field.rel.through._meta.auto_created and new_field.rel.through._meta.auto_created): return self._alter_many_to_many(model, old_field, new_field, strict) elif old_type is None or new_type is None: @@ -417,6 +426,22 @@ class BaseDatabaseSchemaEditor(object): "name": fk_name, } ) + # Change check constraints? + if old_db_params['check'] != new_db_params['check'] and old_db_params['check']: + constraint_names = self._constraint_names(model, [old_field.column], check=True) + if strict and len(constraint_names) != 1: + raise ValueError("Found wrong number (%s) of check constraints for %s.%s" % ( + len(constraint_names), + model._meta.db_table, + old_field.column, + )) + for constraint_name in constraint_names: + self.execute( + self.sql_delete_check % { + "table": self.quote_name(model._meta.db_table), + "name": constraint_name, + } + ) # Have they renamed the column? if old_field.column != new_field.column: self.execute(self.sql_rename_column % { @@ -543,6 +568,16 @@ class BaseDatabaseSchemaEditor(object): "to_column": self.quote_name(new_field.rel.get_related_field().column), } ) + # Does it have check constraints we need to add? + if old_db_params['check'] != new_db_params['check'] and new_db_params['check']: + self.execute( + self.sql_create_check % { + "table": self.quote_name(model._meta.db_table), + "name": self._create_index_name(model, [new_field.column], suffix="_check"), + "column": self.quote_name(new_field.column), + "check": new_db_params['check'], + } + ) def _alter_many_to_many(self, model, old_field, new_field, strict): "Alters M2Ms to repoint their to= endpoints." @@ -555,14 +590,6 @@ class BaseDatabaseSchemaEditor(object): new_field.rel.through._meta.get_field_by_name(new_field.m2m_reverse_field_name())[0], ) - def _type_for_alter(self, field): - """ - Returns a field's type suitable for ALTER COLUMN. - By default it just returns field.db_type(). - To be overriden by backend specific subclasses - """ - return field.db_type(connection=self.connection) - def _create_index_name(self, model, column_names, suffix=""): "Generates a unique name for an index/unique constraint." # If there is just one column in the index, use a default algorithm from Django @@ -581,7 +608,7 @@ class BaseDatabaseSchemaEditor(object): index_name = '%s%s' % (table_name[:(self.connection.features.max_index_name_length - len(part))], part) return index_name - def _constraint_names(self, model, column_names=None, unique=None, primary_key=None, index=None, foreign_key=None): + def _constraint_names(self, model, column_names=None, unique=None, primary_key=None, index=None, foreign_key=None, check=None): "Returns all constraint names matching the columns and conditions" column_names = set(column_names) if column_names else None constraints = self.connection.introspection.get_constraints(self.connection.cursor(), model._meta.db_table) @@ -594,6 +621,8 @@ class BaseDatabaseSchemaEditor(object): continue if index is not None and infodict['index'] != index: continue + if check is not None and infodict['check'] != check: + continue if foreign_key is not None and not infodict['foreign_key']: continue result.append(name) diff --git a/django/db/backends/sqlite3/base.py b/django/db/backends/sqlite3/base.py index 45e7264e5c..8e30c7f22d 100644 --- a/django/db/backends/sqlite3/base.py +++ b/django/db/backends/sqlite3/base.py @@ -97,6 +97,7 @@ class DatabaseFeatures(BaseDatabaseFeatures): has_bulk_insert = True can_combine_inserts_with_and_without_auto_increment_pk = False supports_foreign_keys = False + supports_check_constraints = False @cached_property def supports_stddev(self): diff --git a/django/db/backends/sqlite3/schema.py b/django/db/backends/sqlite3/schema.py index e660f26c87..6149a4e772 100644 --- a/django/db/backends/sqlite3/schema.py +++ b/django/db/backends/sqlite3/schema.py @@ -99,8 +99,10 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): def alter_field(self, model, old_field, new_field, strict=False): # Ensure this field is even column-based - old_type = old_field.db_type(connection=self.connection) - new_type = self._type_for_alter(new_field) + old_db_params = old_field.db_parameters(connection=self.connection) + old_type = old_db_params['type'] + new_db_params = new_field.db_parameters(connection=self.connection) + new_type = new_db_params['type'] if old_type is None and new_type is None and (old_field.rel.through and new_field.rel.through and old_field.rel.through._meta.auto_created and new_field.rel.through._meta.auto_created): return self._alter_many_to_many(model, old_field, new_field, strict) elif old_type is None or new_type is None: diff --git a/django/db/models/fields/__init__.py b/django/db/models/fields/__init__.py index 58ae3413f3..a0b09c9fec 100644 --- a/django/db/models/fields/__init__.py +++ b/django/db/models/fields/__init__.py @@ -232,12 +232,32 @@ class Field(object): # mapped to one of the built-in Django field types. In this case, you # can implement db_type() instead of get_internal_type() to specify # exactly which wacky database column type you want to use. + params = self.db_parameters(connection) + if params['type']: + if params['check']: + return "%s CHECK (%s)" % (params['type'], params['check']) + else: + return params['type'] + return None + + def db_parameters(self, connection): + """ + Replacement for db_type, providing a range of different return + values (type, checks) + """ data = DictWrapper(self.__dict__, connection.ops.quote_name, "qn_") try: - return (connection.creation.data_types[self.get_internal_type()] - % data) + type_string = connection.creation.data_types[self.get_internal_type()] % data except KeyError: - return None + type_string = None + try: + check_string = connection.creation.data_type_check_constraints[self.get_internal_type()] % data + except KeyError: + check_string = None + return { + "type": type_string, + "check": check_string, + } @property def unique(self): diff --git a/django/db/models/fields/related.py b/django/db/models/fields/related.py index 08cc0a747f..37bf4e8072 100644 --- a/django/db/models/fields/related.py +++ b/django/db/models/fields/related.py @@ -1050,6 +1050,9 @@ class ForeignKey(RelatedField, Field): return IntegerField().db_type(connection=connection) return rel_field.db_type(connection=connection) + def db_parameters(self, connection): + return {"type": self.db_type(connection), "check": []} + class OneToOneField(ForeignKey): """ A OneToOneField is essentially the same as a ForeignKey, with the exception @@ -1292,3 +1295,6 @@ class ManyToManyField(RelatedField, Field): # A ManyToManyField is not represented by a single column, # so return None. return None + + def db_parameters(self, connection): + return {"type": None, "check": None} diff --git a/tests/modeltests/schema/models.py b/tests/modeltests/schema/models.py index 76a8cf3687..f3d6d09e2e 100644 --- a/tests/modeltests/schema/models.py +++ b/tests/modeltests/schema/models.py @@ -7,6 +7,7 @@ from django.db import models class Author(models.Model): name = models.CharField(max_length=255) + height = models.PositiveIntegerField(null=True, blank=True) class Meta: managed = False diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index b3fc5d1c80..7d8602eff7 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -347,6 +347,56 @@ class SchemaTests(TestCase): else: self.fail("No FK constraint for tag_id found") + @skipUnless(connection.features.supports_check_constraints, "No check constraints") + def test_check_constraints(self): + """ + Tests creating/deleting CHECK constraints + """ + # Create the tables + editor = connection.schema_editor() + editor.start() + editor.create_model(Author) + editor.commit() + # Ensure the constraint exists + constraints = connection.introspection.get_constraints(connection.cursor(), Author._meta.db_table) + for name, details in constraints.items(): + if details['columns'] == set(["height"]) and details['check']: + break + else: + self.fail("No check constraint for height found") + # Alter the column to remove it + new_field = IntegerField(null=True, blank=True) + new_field.set_attributes_from_name("height") + editor = connection.schema_editor() + editor.start() + editor.alter_field( + Author, + Author._meta.get_field_by_name("height")[0], + new_field, + strict = True, + ) + editor.commit() + constraints = connection.introspection.get_constraints(connection.cursor(), Author._meta.db_table) + for name, details in constraints.items(): + if details['columns'] == set(["height"]) and details['check']: + self.fail("Check constraint for height found") + # Alter the column to re-add it + editor = connection.schema_editor() + editor.start() + editor.alter_field( + Author, + new_field, + Author._meta.get_field_by_name("height")[0], + strict = True, + ) + editor.commit() + constraints = connection.introspection.get_constraints(connection.cursor(), Author._meta.db_table) + for name, details in constraints.items(): + if details['columns'] == set(["height"]) and details['check']: + break + else: + self.fail("No check constraint for height found") + def test_unique(self): """ Tests removing and adding unique constraints to a single column. From 8413c85f3d9a5d7f0485e621ec5836484c974f30 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 7 Sep 2012 15:48:22 -0400 Subject: [PATCH 017/161] Very initial Oracle support --- django/db/backends/oracle/base.py | 5 +++++ django/db/backends/oracle/creation.py | 15 +++++++++++---- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/django/db/backends/oracle/base.py b/django/db/backends/oracle/base.py index 6bf2e815a7..f4b11ec327 100644 --- a/django/db/backends/oracle/base.py +++ b/django/db/backends/oracle/base.py @@ -51,6 +51,7 @@ from django.db.backends.signals import connection_created from django.db.backends.oracle.client import DatabaseClient from django.db.backends.oracle.creation import DatabaseCreation from django.db.backends.oracle.introspection import DatabaseIntrospection +from django.db.backends.oracle.schema import DatabaseSchemaEditor from django.utils.encoding import force_bytes, force_text from django.utils import six from django.utils import timezone @@ -571,6 +572,10 @@ class DatabaseWrapper(BaseDatabaseWrapper): and x.code == 2091 and 'ORA-02291' in x.message: six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2]) six.reraise(utils.DatabaseError, utils.DatabaseError(*tuple(e.args)), sys.exc_info()[2]) + + def schema_editor(self): + "Returns a new instance of this backend's SchemaEditor" + return DatabaseSchemaEditor(self) class OracleParam(object): diff --git a/django/db/backends/oracle/creation.py b/django/db/backends/oracle/creation.py index d9bf3dfea2..c4dc80503c 100644 --- a/django/db/backends/oracle/creation.py +++ b/django/db/backends/oracle/creation.py @@ -17,7 +17,7 @@ class DatabaseCreation(BaseDatabaseCreation): data_types = { 'AutoField': 'NUMBER(11)', - 'BooleanField': 'NUMBER(1) CHECK (%(qn_column)s IN (0,1))', + 'BooleanField': 'NUMBER(1)', 'CharField': 'NVARCHAR2(%(max_length)s)', 'CommaSeparatedIntegerField': 'VARCHAR2(%(max_length)s)', 'DateField': 'DATE', @@ -30,10 +30,10 @@ class DatabaseCreation(BaseDatabaseCreation): 'BigIntegerField': 'NUMBER(19)', 'IPAddressField': 'VARCHAR2(15)', 'GenericIPAddressField': 'VARCHAR2(39)', - 'NullBooleanField': 'NUMBER(1) CHECK ((%(qn_column)s IN (0,1)) OR (%(qn_column)s IS NULL))', + 'NullBooleanField': 'NUMBER(1)', 'OneToOneField': 'NUMBER(11)', - 'PositiveIntegerField': 'NUMBER(11) CHECK (%(qn_column)s >= 0)', - 'PositiveSmallIntegerField': 'NUMBER(11) CHECK (%(qn_column)s >= 0)', + 'PositiveIntegerField': 'NUMBER(11)', + 'PositiveSmallIntegerField': 'NUMBER(11)', 'SlugField': 'NVARCHAR2(%(max_length)s)', 'SmallIntegerField': 'NUMBER(11)', 'TextField': 'NCLOB', @@ -41,6 +41,13 @@ class DatabaseCreation(BaseDatabaseCreation): 'URLField': 'VARCHAR2(%(max_length)s)', } + data_type_check_constraints = { + 'BooleanField': '%(qn_column)s IN (0,1)', + 'NullBooleanField': '(%(qn_column)s IN (0,1)) OR (%(qn_column)s IS NULL)', + 'PositiveIntegerField': '"%(qn_column)s" >= 0', + 'PositiveSmallIntegerField': '"%(qn_column)s" >= 0', + } + def __init__(self, connection): super(DatabaseCreation, self).__init__(connection) From 3ffbfe4abcc9380fbaeeb7a370109775166b23d8 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 7 Sep 2012 15:56:07 -0400 Subject: [PATCH 018/161] Stubbed-out oracle schema file --- django/db/backends/oracle/schema.py | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 django/db/backends/oracle/schema.py diff --git a/django/db/backends/oracle/schema.py b/django/db/backends/oracle/schema.py new file mode 100644 index 0000000000..b86e0857bb --- /dev/null +++ b/django/db/backends/oracle/schema.py @@ -0,0 +1,5 @@ +from django.db.backends.schema import BaseDatabaseSchemaEditor + + +class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): + pass From dbf8b93c527733fb5e3ea101a67bd94db745888e Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 17 Sep 2012 19:57:23 +0100 Subject: [PATCH 019/161] Fix app loading/test interaction --- django/db/models/loading.py | 3 ++- tests/modeltests/schema/tests.py | 24 ++++++++++++++---------- 2 files changed, 16 insertions(+), 11 deletions(-) diff --git a/django/db/models/loading.py b/django/db/models/loading.py index e0d943853b..6f7a50dcf8 100644 --- a/django/db/models/loading.py +++ b/django/db/models/loading.py @@ -251,7 +251,7 @@ class AppCache(object): """ return { "app_store": SortedDict(self.app_store.items()), - "app_labels": dict(self.app_errors.items()), + "app_labels": dict(self.app_labels.items()), "app_models": SortedDict(self.app_models.items()), "app_errors": dict(self.app_errors.items()), } @@ -264,6 +264,7 @@ class AppCache(object): self.app_labels = state['app_labels'] self.app_models = state['app_models'] self.app_errors = state['app_errors'] + self._get_models_cache.clear() def temporary_state(self): "Returns a context manager that restores the state on exit" diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index 7d8602eff7..fdc3cf38ce 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -7,7 +7,7 @@ from django.db import connection, DatabaseError, IntegrityError from django.db.models.fields import IntegerField, TextField, CharField, SlugField from django.db.models.fields.related import ManyToManyField, ForeignKey from django.db.models.loading import cache -from .models import Author, Book, BookWithSlug, BookWithM2M, AuthorWithM2M, Tag, TagUniqueRename, UniqueTest +from .models import Author, AuthorWithM2M, Book, BookWithSlug, BookWithM2M, Tag, TagUniqueRename, UniqueTest class SchemaTests(TestCase): @@ -19,7 +19,7 @@ class SchemaTests(TestCase): as the code it is testing. """ - models = [Author, Book, BookWithSlug, BookWithM2M, AuthorWithM2M, Tag, TagUniqueRename, UniqueTest] + models = [Author, AuthorWithM2M, Book, BookWithSlug, BookWithM2M, Tag, TagUniqueRename, UniqueTest] # Utility functions @@ -30,8 +30,8 @@ class SchemaTests(TestCase): connection.managed(True) # The unmanaged models need to be removed after the test in order to # prevent bad interactions with the flush operation in other tests. - self.old_app_models = copy.deepcopy(cache.app_models) - self.old_app_store = copy.deepcopy(cache.app_store) + self.cache_state = cache.save_state() + cache.load_app("modeltests.schema") for model in self.models: model._meta.managed = True @@ -39,6 +39,16 @@ class SchemaTests(TestCase): # Rollback anything that may have happened connection.rollback() # Delete any tables made for our models + self.delete_tables() + # Unhook our models + for model in self.models: + model._meta.managed = False + if "schema" in self.cache_state['app_labels']: + del self.cache_state['app_labels']['schema'] + cache.restore_state(self.cache_state) + + def delete_tables(self): + "Deletes all model tables for our models for a clean test environment" cursor = connection.cursor() connection.disable_constraint_checking() for model in self.models: @@ -62,12 +72,6 @@ class SchemaTests(TestCase): else: connection.commit() connection.enable_constraint_checking() - # Unhook our models - for model in self.models: - model._meta.managed = False - cache.app_models = self.old_app_models - cache.app_store = self.old_app_store - cache._get_models_cache = {} def column_classes(self, model): cursor = connection.cursor() From d0b353696478a05937377dd5c8289e69a95f059a Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 17 Sep 2012 21:16:36 +0100 Subject: [PATCH 020/161] More schema test fixing --- django/core/management/commands/syncdb.py | 4 +- django/db/models/loading.py | 3 +- tests/modeltests/schema/tests.py | 108 ++++++++++++---------- 3 files changed, 63 insertions(+), 52 deletions(-) diff --git a/django/core/management/commands/syncdb.py b/django/core/management/commands/syncdb.py index cceec07be8..bb3f916968 100644 --- a/django/core/management/commands/syncdb.py +++ b/django/core/management/commands/syncdb.py @@ -71,9 +71,11 @@ class Command(NoArgsCommand): def model_installed(model): opts = model._meta converter = connection.introspection.table_name_converter - return not ((converter(opts.db_table) in tables) or + # Note that if a model is unmanaged we short-circuit and never try to install it + return opts.managed and not ((converter(opts.db_table) in tables) or (opts.auto_created and converter(opts.auto_created._meta.db_table) in tables)) + manifest = SortedDict( (app_name, list(filter(model_installed, model_list))) for app_name, model_list in all_models diff --git a/django/db/models/loading.py b/django/db/models/loading.py index 6f7a50dcf8..773050d624 100644 --- a/django/db/models/loading.py +++ b/django/db/models/loading.py @@ -252,13 +252,14 @@ class AppCache(object): return { "app_store": SortedDict(self.app_store.items()), "app_labels": dict(self.app_labels.items()), - "app_models": SortedDict(self.app_models.items()), + "app_models": SortedDict((k, SortedDict(v.items())) for k, v in self.app_models.items()), "app_errors": dict(self.app_errors.items()), } def restore_state(self, state): """ Restores the AppCache to a previous state from save_state. + Note that the state is used by reference, not copied in. """ self.app_store = state['app_store'] self.app_labels = state['app_labels'] diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index fdc3cf38ce..df482e8181 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -31,20 +31,18 @@ class SchemaTests(TestCase): # The unmanaged models need to be removed after the test in order to # prevent bad interactions with the flush operation in other tests. self.cache_state = cache.save_state() - cache.load_app("modeltests.schema") for model in self.models: model._meta.managed = True def tearDown(self): - # Rollback anything that may have happened - connection.rollback() # Delete any tables made for our models self.delete_tables() + # Rollback anything that may have happened + connection.rollback() + connection.leave_transaction_management() # Unhook our models for model in self.models: model._meta.managed = False - if "schema" in self.cache_state['app_labels']: - del self.cache_state['app_labels']['schema'] cache.restore_state(self.cache_state) def delete_tables(self): @@ -280,31 +278,36 @@ class SchemaTests(TestCase): # Create an M2M field new_field = ManyToManyField("schema.Tag", related_name="authors") new_field.contribute_to_class(AuthorWithM2M, "tags") - # Ensure there's no m2m table there - self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through) - connection.rollback() - # Add the field - editor = connection.schema_editor() - editor.start() - editor.create_field( - Author, - new_field, - ) - editor.commit() - # Ensure there is now an m2m table there - columns = self.column_classes(new_field.rel.through) - self.assertEqual(columns['tag_id'][0], "IntegerField") - # Remove the M2M table again - editor = connection.schema_editor() - editor.start() - editor.delete_field( - Author, - new_field, - ) - editor.commit() - # Ensure there's no m2m table there - self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through) - connection.rollback() + try: + # Ensure there's no m2m table there + self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through) + connection.rollback() + # Add the field + editor = connection.schema_editor() + editor.start() + editor.create_field( + Author, + new_field, + ) + editor.commit() + # Ensure there is now an m2m table there + columns = self.column_classes(new_field.rel.through) + self.assertEqual(columns['tag_id'][0], "IntegerField") + # Remove the M2M table again + editor = connection.schema_editor() + editor.start() + editor.delete_field( + Author, + new_field, + ) + editor.commit() + # Ensure there's no m2m table there + self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through) + connection.rollback() + finally: + # Cleanup model states + AuthorWithM2M._meta.local_many_to_many.remove(new_field) + del AuthorWithM2M._meta._m2m_cache def test_m2m_repoint(self): """ @@ -330,26 +333,31 @@ class SchemaTests(TestCase): # Repoint the M2M new_field = ManyToManyField(UniqueTest) new_field.contribute_to_class(BookWithM2M, "uniques") - editor = connection.schema_editor() - editor.start() - editor.alter_field( - Author, - BookWithM2M._meta.get_field_by_name("tags")[0], - new_field, - ) - editor.commit() - # Ensure old M2M is gone - self.assertRaises(DatabaseError, self.column_classes, BookWithM2M._meta.get_field_by_name("tags")[0].rel.through) - connection.rollback() - # Ensure the new M2M exists and points to UniqueTest - constraints = connection.introspection.get_constraints(connection.cursor(), new_field.rel.through._meta.db_table) - if connection.features.supports_foreign_keys: - for name, details in constraints.items(): - if details['columns'] == set(["uniquetest_id"]) and details['foreign_key']: - self.assertEqual(details['foreign_key'], ('schema_uniquetest', 'id')) - break - else: - self.fail("No FK constraint for tag_id found") + try: + editor = connection.schema_editor() + editor.start() + editor.alter_field( + Author, + BookWithM2M._meta.get_field_by_name("tags")[0], + new_field, + ) + editor.commit() + # Ensure old M2M is gone + self.assertRaises(DatabaseError, self.column_classes, BookWithM2M._meta.get_field_by_name("tags")[0].rel.through) + connection.rollback() + # Ensure the new M2M exists and points to UniqueTest + constraints = connection.introspection.get_constraints(connection.cursor(), new_field.rel.through._meta.db_table) + if connection.features.supports_foreign_keys: + for name, details in constraints.items(): + if details['columns'] == set(["uniquetest_id"]) and details['foreign_key']: + self.assertEqual(details['foreign_key'], ('schema_uniquetest', 'id')) + break + else: + self.fail("No FK constraint for tag_id found") + finally: + # Cleanup model states + BookWithM2M._meta.local_many_to_many.remove(new_field) + del BookWithM2M._meta._m2m_cache @skipUnless(connection.features.supports_check_constraints, "No check constraints") def test_check_constraints(self): From c5e2ecae6949d4e89530610d768bbbd563ddc19b Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 17 Sep 2012 23:45:00 +0100 Subject: [PATCH 021/161] Fix bug in get_indexes affecting the tests --- django/db/backends/mysql/introspection.py | 1 + django/db/backends/postgresql_psycopg2/introspection.py | 8 +++++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/django/db/backends/mysql/introspection.py b/django/db/backends/mysql/introspection.py index 93e89c1263..477478ca9d 100644 --- a/django/db/backends/mysql/introspection.py +++ b/django/db/backends/mysql/introspection.py @@ -103,6 +103,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): continue if row[4] not in indexes: indexes[row[4]] = {'primary_key': False, 'unique': False} + # It's possible to have the unique and PK constraints in separate indexes. if row[2] == 'PRIMARY': indexes[row[4]]['primary_key'] = True if not bool(row[1]): diff --git a/django/db/backends/postgresql_psycopg2/introspection.py b/django/db/backends/postgresql_psycopg2/introspection.py index 5a29932859..d43b95406c 100644 --- a/django/db/backends/postgresql_psycopg2/introspection.py +++ b/django/db/backends/postgresql_psycopg2/introspection.py @@ -86,7 +86,13 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): # Here, we skip any indexes across multiple fields. if ' ' in row[1]: continue - indexes[row[0]] = {'primary_key': row[3], 'unique': row[2]} + if row[0] not in indexes: + indexes[row[0]] = {'primary_key': False, 'unique': False} + # It's possible to have the unique and PK constraints in separate indexes. + if row[3]: + indexes[row[0]]['primary_key'] = True + if row[2]: + indexes[row[0]]['unique'] = True return indexes def get_constraints(self, cursor, table_name): From 06227fbb835aabab1f4bfb9ef26c9d07c90ff8ea Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 17 Sep 2012 23:51:48 +0100 Subject: [PATCH 022/161] Python 3 compatability. 2.6 was a while back, I should learn ' as '. --- tests/modeltests/schema/tests.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index df482e8181..3a9f85643a 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -103,7 +103,7 @@ class SchemaTests(TestCase): # Check that it's there try: list(Author.objects.all()) - except DatabaseError, e: + except DatabaseError as e: self.fail("Table not created: %s" % e) # Clean up that table editor.start() @@ -128,11 +128,11 @@ class SchemaTests(TestCase): # Check that initial tables are there try: list(Author.objects.all()) - except DatabaseError, e: + except DatabaseError as e: self.fail("Author table not created: %s" % e) try: list(Book.objects.all()) - except DatabaseError, e: + except DatabaseError as e: self.fail("Book table not created: %s" % e) # Make sure the FK constraint is present with self.assertRaises(IntegrityError): From ae6ffd2e7ef69180560eaeae9a00f76cd3721691 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 18 Sep 2012 10:37:30 +0100 Subject: [PATCH 023/161] Stylistic fixes: triple-quoted docstrings, more comments --- django/db/backends/schema.py | 28 +++++++++++++++------ django/db/backends/sqlite3/introspection.py | 5 +++- django/db/backends/sqlite3/schema.py | 21 +++++++++++++--- 3 files changed, 43 insertions(+), 11 deletions(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 974a18cc34..baa8fd3ddb 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -59,21 +59,27 @@ class BaseDatabaseSchemaEditor(object): # State-managing methods def start(self): - "Marks the start of a schema-altering run" + """ + Marks the start of a schema-altering run. + """ self.deferred_sql = [] self.connection.commit_unless_managed() self.connection.enter_transaction_management() self.connection.managed(True) def commit(self): - "Finishes a schema-altering run" + """ + Finishes a schema-altering run. + """ for sql in self.deferred_sql: self.execute(sql) self.connection.commit() self.connection.leave_transaction_management() def rollback(self): - "Tries to roll back a schema-altering run. Call instead of commit()" + """ + Tries to roll back a schema-altering run. Call instead of commit(). + """ if not self.connection.features.can_rollback_ddl: raise RuntimeError("Cannot rollback schema changes on this backend") self.connection.rollback() @@ -137,7 +143,9 @@ class BaseDatabaseSchemaEditor(object): return sql, params def effective_default(self, field): - "Returns a field's effective database default value" + """ + Returns a field's effective database default value + """ if field.has_default(): default = field.get_default() elif not field.null and field.blank and field.empty_strings_allowed: @@ -580,7 +588,9 @@ class BaseDatabaseSchemaEditor(object): ) def _alter_many_to_many(self, model, old_field, new_field, strict): - "Alters M2Ms to repoint their to= endpoints." + """ + Alters M2Ms to repoint their to= endpoints. + """ # Rename the through table self.alter_db_table(old_field.rel.through, old_field.rel.through._meta.db_table, new_field.rel.through._meta.db_table) # Repoint the FK to the other side @@ -591,7 +601,9 @@ class BaseDatabaseSchemaEditor(object): ) def _create_index_name(self, model, column_names, suffix=""): - "Generates a unique name for an index/unique constraint." + """ + Generates a unique name for an index/unique constraint. + """ # If there is just one column in the index, use a default algorithm from Django if len(column_names) == 1 and not suffix: return truncate_name( @@ -609,7 +621,9 @@ class BaseDatabaseSchemaEditor(object): return index_name def _constraint_names(self, model, column_names=None, unique=None, primary_key=None, index=None, foreign_key=None, check=None): - "Returns all constraint names matching the columns and conditions" + """ + Returns all constraint names matching the columns and conditions + """ column_names = set(column_names) if column_names else None constraints = self.connection.introspection.get_constraints(self.connection.cursor(), model._meta.db_table) result = [] diff --git a/django/db/backends/sqlite3/introspection.py b/django/db/backends/sqlite3/introspection.py index 62c53e075a..711ee6fac5 100644 --- a/django/db/backends/sqlite3/introspection.py +++ b/django/db/backends/sqlite3/introspection.py @@ -207,10 +207,13 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): # Get the PK pk_column = self.get_primary_key_column(cursor, table_name) if pk_column: + # SQLite doesn't actually give a name to the PK constraint, + # so we invent one. This is fine, as the SQLite backend never + # deletes PK constraints by name. constraints["__primary__"] = { "columns": set([pk_column]), "primary_key": True, - "unique": False, # It's not actually a unique constraint + "unique": False, # It's not actually a unique constraint. "foreign_key": False, "check": False, "index": False, diff --git a/django/db/backends/sqlite3/schema.py b/django/db/backends/sqlite3/schema.py index 6149a4e772..e42f2c62d9 100644 --- a/django/db/backends/sqlite3/schema.py +++ b/django/db/backends/sqlite3/schema.py @@ -8,7 +8,9 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): sql_delete_table = "DROP TABLE %(table)s" def _remake_table(self, model, create_fields=[], delete_fields=[], alter_fields=[], rename_fields=[], override_uniques=None): - "Shortcut to transform a model from old_model into new_model" + """ + Shortcut to transform a model from old_model into new_model + """ # Work out the new fields dict / mapping body = dict((f.name, f) for f in model._meta.local_fields) mapping = dict((f.column, f.column) for f in model._meta.local_fields) @@ -98,7 +100,13 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): self._remake_table(model, delete_fields=[field]) def alter_field(self, model, old_field, new_field, strict=False): - # Ensure this field is even column-based + """ + Allows a field's type, uniqueness, nullability, default, column, + constraints etc. to be modified. + Requires a copy of the old field as well so we can only perform + changes that are required. + If strict is true, raises errors if the old column does not match old_field precisely. + """ old_db_params = old_field.db_parameters(connection=self.connection) old_type = old_db_params['type'] new_db_params = new_field.db_parameters(connection=self.connection) @@ -114,10 +122,17 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): self._remake_table(model, alter_fields=[(old_field, new_field)]) def alter_unique_together(self, model, old_unique_together, new_unique_together): + """ + Deals with a model changing its unique_together. + Note: The input unique_togethers must be doubly-nested, not the single- + nested ["foo", "bar"] format. + """ self._remake_table(model, override_uniques=new_unique_together) def _alter_many_to_many(self, model, old_field, new_field, strict): - "Alters M2Ms to repoint their to= endpoints." + """ + Alters M2Ms to repoint their to= endpoints. + """ # Make a new through table self.create_model(new_field.rel.through) # Copy the data across From 7e8c64d8e68398937028400f801dbe02a830f9d4 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 18 Sep 2012 10:59:03 +0100 Subject: [PATCH 024/161] Dropping pointless self.fail calls --- tests/modeltests/schema/tests.py | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index 3a9f85643a..7c3e6ffc03 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -101,10 +101,7 @@ class SchemaTests(TestCase): editor.create_model(Author) editor.commit() # Check that it's there - try: - list(Author.objects.all()) - except DatabaseError as e: - self.fail("Table not created: %s" % e) + list(Author.objects.all()) # Clean up that table editor.start() editor.delete_model(Author) @@ -126,14 +123,8 @@ class SchemaTests(TestCase): editor.create_model(Tag) editor.commit() # Check that initial tables are there - try: - list(Author.objects.all()) - except DatabaseError as e: - self.fail("Author table not created: %s" % e) - try: - list(Book.objects.all()) - except DatabaseError as e: - self.fail("Book table not created: %s" % e) + list(Author.objects.all()) + list(Book.objects.all()) # Make sure the FK constraint is present with self.assertRaises(IntegrityError): Book.objects.create( From dbc17d035b255a4da977251fe399f5c80cffeecd Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 21 Sep 2012 23:36:26 +0100 Subject: [PATCH 025/161] Un-borg-ify AppCache --- django/db/models/loading.py | 40 +++++++++++++++---------------------- 1 file changed, 16 insertions(+), 24 deletions(-) diff --git a/django/db/models/loading.py b/django/db/models/loading.py index 773050d624..d57780c578 100644 --- a/django/db/models/loading.py +++ b/django/db/models/loading.py @@ -14,37 +14,29 @@ import os __all__ = ('get_apps', 'get_app', 'get_models', 'get_model', 'register_models', 'load_app', 'app_cache_ready') + class AppCache(object): """ A cache that stores installed applications and their models. Used to provide reverse-relations and for app introspection (e.g. admin). """ - # Use the Borg pattern to share state between all instances. Details at - # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66531. - __shared_state = dict( - # Keys of app_store are the model modules for each application. - app_store = SortedDict(), - - # Mapping of installed app_labels to model modules for that app. - app_labels = {}, - - # Mapping of app_labels to a dictionary of model names to model code. - # May contain apps that are not installed. - app_models = SortedDict(), - - # Mapping of app_labels to errors raised when trying to import the app. - app_errors = {}, - - # -- Everything below here is only used when populating the cache -- - loaded = False, - handled = {}, - postponed = [], - nesting_level = 0, - _get_models_cache = {}, - ) def __init__(self): - self.__dict__ = self.__shared_state + # Keys of app_store are the model modules for each application. + self.app_store = SortedDict() + # Mapping of installed app_labels to model modules for that app. + self.app_labels = {} + # Mapping of app_labels to a dictionary of model names to model code. + # May contain apps that are not installed. + self.app_models = SortedDict() + # Mapping of app_labels to errors raised when trying to import the app. + self.app_errors = {} + # -- Everything below here is only used when populating the cache -- + self.loaded = False + self.handled = {} + self.postponed = [] + self.nesting_level = 0 + self._get_models_cache = {} def _populate(self): """ From 49d1e6b0e20a363cbf9b105e8e6d3fc5fc1cad2f Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 22 Sep 2012 00:47:04 +0100 Subject: [PATCH 026/161] Remove AppCache state handling, replace with swappable caches --- django/db/backends/sqlite3/schema.py | 12 ++-- django/db/models/base.py | 19 +++--- django/db/models/loading.py | 88 +++++++++++----------------- django/db/models/options.py | 5 +- tests/modeltests/schema/models.py | 16 ++--- tests/modeltests/schema/tests.py | 15 ++--- 6 files changed, 72 insertions(+), 83 deletions(-) diff --git a/django/db/backends/sqlite3/schema.py b/django/db/backends/sqlite3/schema.py index e42f2c62d9..a1fd95fe8b 100644 --- a/django/db/backends/sqlite3/schema.py +++ b/django/db/backends/sqlite3/schema.py @@ -1,5 +1,5 @@ from django.db.backends.schema import BaseDatabaseSchemaEditor -from django.db.models.loading import cache +from django.db.models.loading import cache, default_cache, AppCache from django.db.models.fields.related import ManyToManyField @@ -46,10 +46,12 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): } meta = type("Meta", tuple(), meta_contents) body['Meta'] = meta - body['__module__'] = "__fake__" - with cache.temporary_state(): - del cache.app_models[model._meta.app_label][model._meta.object_name.lower()] - temp_model = type(model._meta.object_name, model.__bases__, body) + body['__module__'] = model.__module__ + self.app_cache = AppCache() + cache.set_cache(self.app_cache) + cache.copy_from(default_cache) + temp_model = type(model._meta.object_name, model.__bases__, body) + cache.set_cache(default_cache) # Create a new table with that format self.create_model(temp_model) # Copy data from the old table diff --git a/django/db/models/base.py b/django/db/models/base.py index 62024c8ee4..de94911601 100644 --- a/django/db/models/base.py +++ b/django/db/models/base.py @@ -228,14 +228,17 @@ class ModelBase(type): return new_class new_class._prepare() - register_models(new_class._meta.app_label, new_class) - - # Because of the way imports happen (recursively), we may or may not be - # the first time this model tries to register with the framework. There - # should only be one class for each model, so we always return the - # registered version. - return get_model(new_class._meta.app_label, name, - seed_cache=False, only_installed=False) + + if new_class._meta.auto_register: + register_models(new_class._meta.app_label, new_class) + # Because of the way imports happen (recursively), we may or may not be + # the first time this model tries to register with the framework. There + # should only be one class for each model, so we always return the + # registered version. + return get_model(new_class._meta.app_label, name, + seed_cache=False, only_installed=False) + else: + return new_class def copy_managers(cls, base_managers): # This is in-place sorting of an Options attribute, but that's fine. diff --git a/django/db/models/loading.py b/django/db/models/loading.py index d57780c578..6ae37505a5 100644 --- a/django/db/models/loading.py +++ b/django/db/models/loading.py @@ -236,69 +236,49 @@ class AppCache(object): model_dict[model_name] = model self._get_models_cache.clear() - def save_state(self): - """ - Returns an object that contains the current AppCache state. - Can be provided to restore_state to undo actions. - """ - return { - "app_store": SortedDict(self.app_store.items()), - "app_labels": dict(self.app_labels.items()), - "app_models": SortedDict((k, SortedDict(v.items())) for k, v in self.app_models.items()), - "app_errors": dict(self.app_errors.items()), - } - - def restore_state(self, state): - """ - Restores the AppCache to a previous state from save_state. - Note that the state is used by reference, not copied in. - """ - self.app_store = state['app_store'] - self.app_labels = state['app_labels'] - self.app_models = state['app_models'] - self.app_errors = state['app_errors'] - self._get_models_cache.clear() - - def temporary_state(self): - "Returns a context manager that restores the state on exit" - return StateContextManager(self) - - def unregister_all(self): - """ - Wipes the AppCache clean of all registered models. - Used for things like migration libraries' fake ORMs. - """ - self.app_store = SortedDict() - self.app_labels = {} - self.app_models = SortedDict() - self.app_errors = {} + def copy_from(self, other): + "Registers all models from the other cache into this one" + cache._populate() + for app_label, models in other.app_models.items(): + self.register_models(app_label, *models.values()) -class StateContextManager(object): +class AppCacheWrapper(object): """ - Context manager for locking cache state. - Useful for making temporary models you don't want to stay in the cache. + As AppCache can be changed at runtime, this class wraps it so any + imported references to 'cache' are changed along with it. """ def __init__(self, cache): - self.cache = cache + self._cache = cache - def __enter__(self): - self.state = self.cache.save_state() + def set_cache(self, cache): + self._cache = cache - def __exit__(self, type, value, traceback): - self.cache.restore_state(self.state) + def __getattr__(self, attr): + if attr in ("_cache", "set_cache"): + return self.__dict__[attr] + return getattr(self._cache, attr) + + def __setattr__(self, attr, value): + if attr in ("_cache", "set_cache"): + self.__dict__[attr] = value + return + return setattr(self._cache, attr, value) -cache = AppCache() +default_cache = AppCache() +cache = AppCacheWrapper(default_cache) + # These methods were always module level, so are kept that way for backwards -# compatibility. -get_apps = cache.get_apps -get_app = cache.get_app -get_app_errors = cache.get_app_errors -get_models = cache.get_models -get_model = cache.get_model -register_models = cache.register_models -load_app = cache.load_app -app_cache_ready = cache.app_cache_ready +# compatibility. These are wrapped with lambdas to stop the attribute +# access resolving directly to a method on a single cache instance. +get_apps = lambda *x, **y: cache.get_apps(*x, **y) +get_app = lambda *x, **y: cache.get_app(*x, **y) +get_app_errors = lambda *x, **y: cache.get_app_errors(*x, **y) +get_models = lambda *x, **y: cache.get_models(*x, **y) +get_model = lambda *x, **y: cache.get_model(*x, **y) +register_models = lambda *x, **y: cache.register_models(*x, **y) +load_app = lambda *x, **y: cache.load_app(*x, **y) +app_cache_ready = lambda *x, **y: cache.app_cache_ready(*x, **y) diff --git a/django/db/models/options.py b/django/db/models/options.py index 6814ce27ff..820540559f 100644 --- a/django/db/models/options.py +++ b/django/db/models/options.py @@ -21,7 +21,7 @@ get_verbose_name = lambda class_name: re.sub('(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]| DEFAULT_NAMES = ('verbose_name', 'verbose_name_plural', 'db_table', 'ordering', 'unique_together', 'permissions', 'get_latest_by', 'order_with_respect_to', 'app_label', 'db_tablespace', - 'abstract', 'managed', 'proxy', 'auto_created') + 'abstract', 'managed', 'proxy', 'auto_created', 'auto_register') @python_2_unicode_compatible class Options(object): @@ -68,6 +68,9 @@ class Options(object): # from *other* models. Needed for some admin checks. Internal use only. self.related_fkey_lookups = [] + # If we should auto-register with the AppCache + self.auto_register = True + def contribute_to_class(self, cls, name): from django.db import connection from django.db.backends.util import truncate_name diff --git a/tests/modeltests/schema/models.py b/tests/modeltests/schema/models.py index f3d6d09e2e..fdf950860c 100644 --- a/tests/modeltests/schema/models.py +++ b/tests/modeltests/schema/models.py @@ -10,14 +10,14 @@ class Author(models.Model): height = models.PositiveIntegerField(null=True, blank=True) class Meta: - managed = False + auto_register = False class AuthorWithM2M(models.Model): name = models.CharField(max_length=255) class Meta: - managed = False + auto_register = False class Book(models.Model): @@ -27,7 +27,7 @@ class Book(models.Model): #tags = models.ManyToManyField("Tag", related_name="books") class Meta: - managed = False + auto_register = False class BookWithM2M(models.Model): @@ -37,7 +37,7 @@ class BookWithM2M(models.Model): tags = models.ManyToManyField("Tag", related_name="books") class Meta: - managed = False + auto_register = False class BookWithSlug(models.Model): @@ -47,7 +47,7 @@ class BookWithSlug(models.Model): slug = models.CharField(max_length=20, unique=True) class Meta: - managed = False + auto_register = False db_table = "schema_book" @@ -56,7 +56,7 @@ class Tag(models.Model): slug = models.SlugField(unique=True) class Meta: - managed = False + auto_register = False class TagUniqueRename(models.Model): @@ -64,7 +64,7 @@ class TagUniqueRename(models.Model): slug2 = models.SlugField(unique=True) class Meta: - managed = False + auto_register = False db_table = "schema_tag" @@ -73,5 +73,5 @@ class UniqueTest(models.Model): slug = models.SlugField(unique=False) class Meta: - managed = False + auto_register = False unique_together = ["year", "slug"] diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index 7c3e6ffc03..5fabe6e91d 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -6,7 +6,7 @@ from django.utils.unittest import skipUnless from django.db import connection, DatabaseError, IntegrityError from django.db.models.fields import IntegerField, TextField, CharField, SlugField from django.db.models.fields.related import ManyToManyField, ForeignKey -from django.db.models.loading import cache +from django.db.models.loading import cache, default_cache, AppCache from .models import Author, AuthorWithM2M, Book, BookWithSlug, BookWithM2M, Tag, TagUniqueRename, UniqueTest @@ -30,9 +30,12 @@ class SchemaTests(TestCase): connection.managed(True) # The unmanaged models need to be removed after the test in order to # prevent bad interactions with the flush operation in other tests. - self.cache_state = cache.save_state() + self.app_cache = AppCache() + cache.set_cache(self.app_cache) + cache.copy_from(default_cache) for model in self.models: - model._meta.managed = True + cache.register_models("schema", model) + model._prepare() def tearDown(self): # Delete any tables made for our models @@ -40,10 +43,8 @@ class SchemaTests(TestCase): # Rollback anything that may have happened connection.rollback() connection.leave_transaction_management() - # Unhook our models - for model in self.models: - model._meta.managed = False - cache.restore_state(self.cache_state) + cache.set_cache(default_cache) + cache.app_models['schema'] = {} # One M2M gets left in the old cache def delete_tables(self): "Deletes all model tables for our models for a clean test environment" From 45e5eedea99ed5aaa1df8ab505527566097e2328 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 22 Sep 2012 01:17:08 +0100 Subject: [PATCH 027/161] Remove special-casing for proxy/unmanaged models --- django/db/backends/schema.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index baa8fd3ddb..5f45ee93e9 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -159,14 +159,11 @@ class BaseDatabaseSchemaEditor(object): # Actions - def create_model(self, model, force=False): + def create_model(self, model): """ Takes a model and creates a table for it in the database. Will also create any accompanying indexes or unique constraints. """ - # Do nothing if this is an unmanaged or proxy model - if not force and (not model._meta.managed or model._meta.proxy): - return # Create column SQL, add FK deferreds if needed column_sqls = [] params = [] @@ -226,15 +223,12 @@ class BaseDatabaseSchemaEditor(object): self.execute(sql, params) # Make M2M tables for field in model._meta.local_many_to_many: - self.create_model(field.rel.through, force=True) + self.create_model(field.rel.through) - def delete_model(self, model, force=False): + def delete_model(self, model): """ Deletes a model from the database. """ - # Do nothing if this is an unmanaged or proxy model - if not force and (not model._meta.managed or model._meta.proxy): - return # Delete the table self.execute(self.sql_delete_table % { "table": self.quote_name(model._meta.db_table), @@ -300,7 +294,7 @@ class BaseDatabaseSchemaEditor(object): """ # Special-case implicit M2M tables if isinstance(field, ManyToManyField) and field.rel.through._meta.auto_created: - return self.create_model(field.rel.through, force=True) + return self.create_model(field.rel.through) # Get the column's definition definition, params = self.column_sql(model, field, include_default=True) # It might not actually have a column behind it From 9234131c05ec0c63e035883d9586cb1630f3881a Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 22 Sep 2012 01:22:43 +0100 Subject: [PATCH 028/161] Remove weird syncdb managed thing - no longer needed. --- django/core/management/commands/syncdb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/django/core/management/commands/syncdb.py b/django/core/management/commands/syncdb.py index bb3f916968..56e94d9e80 100644 --- a/django/core/management/commands/syncdb.py +++ b/django/core/management/commands/syncdb.py @@ -72,7 +72,7 @@ class Command(NoArgsCommand): opts = model._meta converter = connection.introspection.table_name_converter # Note that if a model is unmanaged we short-circuit and never try to install it - return opts.managed and not ((converter(opts.db_table) in tables) or + return not ((converter(opts.db_table) in tables) or (opts.auto_created and converter(opts.auto_created._meta.db_table) in tables)) From 3074243a5fdd2116fdd34d4cbcd58c45fe35e12a Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 22 Sep 2012 01:22:58 +0100 Subject: [PATCH 029/161] Fix introspection PK comment more. --- django/db/backends/sqlite3/introspection.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/django/db/backends/sqlite3/introspection.py b/django/db/backends/sqlite3/introspection.py index 711ee6fac5..c364468516 100644 --- a/django/db/backends/sqlite3/introspection.py +++ b/django/db/backends/sqlite3/introspection.py @@ -209,7 +209,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): if pk_column: # SQLite doesn't actually give a name to the PK constraint, # so we invent one. This is fine, as the SQLite backend never - # deletes PK constraints by name. + # deletes PK constraints by name, as you can't delete constraints + # in SQLite; we remake the table with a new PK instead. constraints["__primary__"] = { "columns": set([pk_column]), "primary_key": True, From f0e09e27f6461cf65cbaa68c097e8368e0f14ec0 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 24 Sep 2012 12:16:16 +0100 Subject: [PATCH 030/161] Fix non-executed SQL --- django/db/backends/schema.py | 1 + 1 file changed, 1 insertion(+) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 5f45ee93e9..99a84204af 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -320,6 +320,7 @@ class BaseDatabaseSchemaEditor(object): "column": self.quote_name(field.column), } } + self.execute(sql) # Add any FK constraints later if field.rel and self.connection.features.supports_foreign_keys: to_table = field.rel.to._meta.db_table From 1a6d07783d980664312ce729e1fc8867ad1a3b5c Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 24 Sep 2012 12:19:32 +0100 Subject: [PATCH 031/161] Do cheaper check for column having a type. --- django/db/backends/schema.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 99a84204af..2e064909cb 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -347,11 +347,11 @@ class BaseDatabaseSchemaEditor(object): # Special-case implicit M2M tables if isinstance(field, ManyToManyField) and field.rel.through._meta.auto_created: return self.delete_model(field.rel.through) + # It might not actually have a column behind it + if field.db_parameters(connection=self.connection)['type'] is None: + return # Get the column's definition definition, params = self.column_sql(model, field) - # It might not actually have a column behind it - if definition is None: - return # Delete the column sql = self.sql_delete_column % { "table": self.quote_name(model._meta.db_table), From e3c9742cd462706d4da76fcdd6ede7dd1354ad6c Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 24 Sep 2012 12:51:50 +0100 Subject: [PATCH 032/161] Fix error message for unique constraints --- django/db/backends/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 2e064909cb..82fa160745 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -384,7 +384,7 @@ class BaseDatabaseSchemaEditor(object): # Find the unique constraint for this field constraint_names = self._constraint_names(model, [old_field.column], unique=True) if strict and len(constraint_names) != 1: - raise ValueError("Found wrong number (%s) of constraints for %s.%s" % ( + raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % ( len(constraint_names), model._meta.db_table, old_field.column, From 588b839b26d922f2ddc2bbdd6377367f31e4ab4d Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 24 Sep 2012 12:52:43 +0100 Subject: [PATCH 033/161] Fix NOT NULL sql for MySQL --- django/db/backends/mysql/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/django/db/backends/mysql/schema.py b/django/db/backends/mysql/schema.py index 08e883d80c..dc74b2db2a 100644 --- a/django/db/backends/mysql/schema.py +++ b/django/db/backends/mysql/schema.py @@ -6,7 +6,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): sql_rename_table = "RENAME TABLE %(old_table)s TO %(new_table)s" sql_alter_column_null = "MODIFY %(column)s %(type)s NULL" - sql_alter_column_not_null = "MODIFY %(column)s %(type)s NULL" + sql_alter_column_not_null = "MODIFY %(column)s %(type)s NOT NULL" sql_alter_column_type = "MODIFY %(column)s %(type)s" sql_rename_column = "ALTER TABLE %(table)s CHANGE %(old_column)s %(new_column)s %(type)s" From 0354cecbfd0cbd4e7440d56332dbb4d20f6a2fb2 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 24 Sep 2012 12:53:37 +0100 Subject: [PATCH 034/161] Fix nullability changing code --- django/db/backends/schema.py | 2 +- django/db/backends/sqlite3/schema.py | 2 +- tests/modeltests/schema/tests.py | 18 +++++++++++++++++- 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 82fa160745..09d1202aad 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -495,7 +495,7 @@ class BaseDatabaseSchemaEditor(object): )) else: actions.append(( - self.sql_alter_column_null % { + self.sql_alter_column_not_null % { "column": self.quote_name(new_field.column), "type": new_type, }, diff --git a/django/db/backends/sqlite3/schema.py b/django/db/backends/sqlite3/schema.py index a1fd95fe8b..c1df0c7981 100644 --- a/django/db/backends/sqlite3/schema.py +++ b/django/db/backends/sqlite3/schema.py @@ -153,4 +153,4 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): self.quote_name(old_field.rel.through._meta.db_table), )) # Delete the old through table - self.delete_model(old_field.rel.through, force=True) + self.delete_model(old_field.rel.through) diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index 5fabe6e91d..9818b0451c 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -205,12 +205,28 @@ class SchemaTests(TestCase): Author._meta.get_field_by_name("name")[0], new_field, strict=True, - ) + ) editor.commit() # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "TextField") self.assertEqual(columns['name'][1][6], True) + # Change nullability again + new_field2 = TextField(null=False) + new_field2.set_attributes_from_name("name") + editor = connection.schema_editor() + editor.start() + editor.alter_field( + Author, + new_field, + new_field2, + strict=True, + ) + editor.commit() + # Ensure the field is right afterwards + columns = self.column_classes(Author) + self.assertEqual(columns['name'][0], "TextField") + self.assertEqual(columns['name'][1][6], False) def test_rename(self): """ From 49dc1e7d28477534daa61a34df2f0308742287e4 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 24 Sep 2012 13:15:08 +0100 Subject: [PATCH 035/161] Fix altering of indexes alongside uniques --- django/db/backends/schema.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 09d1202aad..960fd7035d 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -397,7 +397,7 @@ class BaseDatabaseSchemaEditor(object): }, ) # Removed an index? - if old_field.db_index and not new_field.db_index and not old_field.unique and not new_field.unique: + if old_field.db_index and not new_field.db_index and not old_field.unique and not (not new_field.unique and old_field.unique): # Find the index for this field index_names = self._constraint_names(model, [old_field.column], index=True) if strict and len(index_names) != 1: @@ -525,7 +525,7 @@ class BaseDatabaseSchemaEditor(object): } ) # Added an index? - if not old_field.db_index and new_field.db_index and not old_field.unique and not new_field.unique: + if not old_field.db_index and new_field.db_index and not new_field.unique and not (not old_field.unique and new_field.unique): self.execute( self.sql_create_index % { "table": self.quote_name(model._meta.db_table), From 3a338d00ec0d32d54faa0bf88409d02638eae60d Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 24 Sep 2012 13:17:38 +0100 Subject: [PATCH 036/161] Add comment to usage of m2m_reverse_field_name --- django/db/backends/schema.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 960fd7035d..5a5932abeb 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -591,6 +591,8 @@ class BaseDatabaseSchemaEditor(object): # Repoint the FK to the other side self.alter_field( new_field.rel.through, + # We need the field that points to the target model, so we can tell alter_field to change it - + # this is m2m_reverse_field_name() (as opposed to m2m_field_name, which points to our model) old_field.rel.through._meta.get_field_by_name(old_field.m2m_reverse_field_name())[0], new_field.rel.through._meta.get_field_by_name(new_field.m2m_reverse_field_name())[0], ) From d146b250aecd7adb13ad7b998cc60cc3a10fd0b6 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 24 Sep 2012 13:18:14 +0100 Subject: [PATCH 037/161] Remove one of the last traces of South --- django/db/backends/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 5a5932abeb..08b53e7cb2 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -607,7 +607,7 @@ class BaseDatabaseSchemaEditor(object): '%s_%s' % (model._meta.db_table, BaseDatabaseCreation._digest(column_names[0])), self.connection.ops.max_name_length() ) - # Else generate the name for the index by South + # Else generate the name for the index using a different algorithm table_name = model._meta.db_table.replace('"', '').replace('.', '_') index_unique_name = '_%x' % abs(hash((table_name, ','.join(column_names)))) # If the index name is too long, truncate it From 0bcfc068b011c41aeed135649276106147d90d70 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 24 Sep 2012 13:23:25 +0100 Subject: [PATCH 038/161] Add second shortener to create_index_name for very long columns --- django/db/backends/schema.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 08b53e7cb2..a9e069aa7b 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -1,3 +1,4 @@ +import hashlib from django.db.backends.creation import BaseDatabaseCreation from django.db.backends.util import truncate_name from django.utils.log import getLogger @@ -615,6 +616,9 @@ class BaseDatabaseSchemaEditor(object): if len(index_name) > self.connection.features.max_index_name_length: part = ('_%s%s%s' % (column_names[0], index_unique_name, suffix)) index_name = '%s%s' % (table_name[:(self.connection.features.max_index_name_length - len(part))], part) + # If it's STILL too long, just hash it down + if len(index_name) > self.connection.features.max_index_name_length: + index_name = hashlib.md5(index_name).hexdigest()[:self.connection.features.max_index_name_length] return index_name def _constraint_names(self, model, column_names=None, unique=None, primary_key=None, index=None, foreign_key=None, check=None): From 15c1920964d2ff9a820986232cac11ae2dea048d Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 24 Sep 2012 13:30:17 +0100 Subject: [PATCH 039/161] Only swallow table-does-not-exist errors in tests --- tests/modeltests/schema/tests.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index 9818b0451c..f679624538 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -20,6 +20,7 @@ class SchemaTests(TestCase): """ models = [Author, AuthorWithM2M, Book, BookWithSlug, BookWithM2M, Tag, TagUniqueRename, UniqueTest] + no_table_strings = ["no such table", "unknown table", "does not exist"] # Utility functions @@ -57,8 +58,11 @@ class SchemaTests(TestCase): cursor.execute(connection.schema_editor().sql_delete_table % { "table": connection.ops.quote_name(field.rel.through._meta.db_table), }) - except DatabaseError: - connection.rollback() + except DatabaseError as e: + if any([s in str(e).lower() for s in self.no_table_strings]): + connection.rollback() + else: + raise else: connection.commit() # Then remove the main tables @@ -66,8 +70,11 @@ class SchemaTests(TestCase): cursor.execute(connection.schema_editor().sql_delete_table % { "table": connection.ops.quote_name(model._meta.db_table), }) - except DatabaseError: - connection.rollback() + except DatabaseError as e: + if any([s in str(e).lower() for s in self.no_table_strings]): + connection.rollback() + else: + raise else: connection.commit() connection.enable_constraint_checking() From b6784bee66d2c310318b6553fbb13f5707475efb Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 24 Sep 2012 13:40:56 +0100 Subject: [PATCH 040/161] Use create_index_name for FK names --- django/db/backends/schema.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index a9e069aa7b..bbd4f8b23f 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -199,11 +199,7 @@ class BaseDatabaseSchemaEditor(object): to_column = field.rel.to._meta.get_field(field.rel.field_name).column self.deferred_sql.append( self.sql_create_fk % { - "name": '%s_refs_%s_%x' % ( - field.column, - to_column, - abs(hash((model._meta.db_table, to_table))) - ), + "name": self._create_index_name(model, [field.column], suffix="_fk_%s_%s" % (to_table, to_column)), "table": self.quote_name(model._meta.db_table), "column": self.quote_name(field.column), "to_table": self.quote_name(to_table), From a589fdff81ab36c57ff0a1003e60ee0dd55f3a88 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 24 Sep 2012 13:55:37 +0100 Subject: [PATCH 041/161] Fix PostgreSQL failing on a test --- tests/modeltests/schema/tests.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/modeltests/schema/tests.py b/tests/modeltests/schema/tests.py index f679624538..6ac72b4c02 100644 --- a/tests/modeltests/schema/tests.py +++ b/tests/modeltests/schema/tests.py @@ -119,6 +119,7 @@ class SchemaTests(TestCase): DatabaseError, lambda: list(Author.objects.all()), ) + connection.rollback() @skipUnless(connection.features.supports_foreign_keys, "No FK support") def test_fk(self): From 6e21a59402e1a90c16113e1fac8e4ff39b6914d3 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 19 Apr 2013 09:01:45 +0100 Subject: [PATCH 042/161] Fix schema editor interaction with new transactions --- django/db/backends/schema.py | 8 +++++--- tests/schema/tests.py | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 5e4eb6bf17..e4a923f2be 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -64,7 +64,9 @@ class BaseDatabaseSchemaEditor(object): Marks the start of a schema-altering run. """ self.deferred_sql = [] - self.connection.set_autocommit(False) + self.old_autocommit = self.connection.autocommit + if self.connection.autocommit: + self.connection.set_autocommit(False) def commit(self): """ @@ -73,7 +75,7 @@ class BaseDatabaseSchemaEditor(object): for sql in self.deferred_sql: self.execute(sql) self.connection.commit() - self.connection.set_autocommit(True) + self.connection.set_autocommit(self.old_autocommit) def rollback(self): """ @@ -82,7 +84,7 @@ class BaseDatabaseSchemaEditor(object): if not self.connection.features.can_rollback_ddl: raise RuntimeError("Cannot rollback schema changes on this backend") self.connection.rollback() - self.connection.set_autocommit(True) + self.connection.set_autocommit(self.old_autocommit) # Core utility functions diff --git a/tests/schema/tests.py b/tests/schema/tests.py index 72c0c07af5..bd4ae0db34 100644 --- a/tests/schema/tests.py +++ b/tests/schema/tests.py @@ -453,7 +453,7 @@ class SchemaTests(TransactionTestCase): Tag.objects.create(title="foo", slug="foo") Tag.objects.create(title="bar", slug="foo") connection.rollback() - # Alter the slug field to be non-unique + # Alter the slug field to be unique new_new_field = SlugField(unique=True) new_new_field.set_attributes_from_name("slug") editor = connection.schema_editor() From ade34c44dae4f5cf9d51bf7f900bf06efa98ff12 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 9 May 2013 14:04:07 +0100 Subject: [PATCH 043/161] Improve error message for bad FK resolution --- django/db/models/fields/related.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/django/db/models/fields/related.py b/django/db/models/fields/related.py index c92bdb6468..728d78ba52 100644 --- a/django/db/models/fields/related.py +++ b/django/db/models/fields/related.py @@ -958,6 +958,8 @@ class ForeignObject(RelatedField): def resolve_related_fields(self): if len(self.from_fields) < 1 or len(self.from_fields) != len(self.to_fields): raise ValueError('Foreign Object from and to fields must be the same non-zero length') + if isinstance(self.rel.to, basestring): + raise ValueError('Related model %r cannot been resolved' % self.rel.to) related_fields = [] for index in range(len(self.from_fields)): from_field_name = self.from_fields[index] From 941d23e54890bcc9e73734c5c1f3c82193fd97d6 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 9 May 2013 14:13:15 +0100 Subject: [PATCH 044/161] Whoops. Need to be good and use six. --- django/db/models/fields/related.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/django/db/models/fields/related.py b/django/db/models/fields/related.py index 728d78ba52..7fc7a092fd 100644 --- a/django/db/models/fields/related.py +++ b/django/db/models/fields/related.py @@ -958,7 +958,7 @@ class ForeignObject(RelatedField): def resolve_related_fields(self): if len(self.from_fields) < 1 or len(self.from_fields) != len(self.to_fields): raise ValueError('Foreign Object from and to fields must be the same non-zero length') - if isinstance(self.rel.to, basestring): + if isinstance(self.rel.to, six.string_types): raise ValueError('Related model %r cannot been resolved' % self.rel.to) related_fields = [] for index in range(len(self.from_fields)): From 104ad0504b4b123277b3f0e7c0be7fb9e84c2d72 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 9 May 2013 15:16:43 +0100 Subject: [PATCH 045/161] Split out a BaseAppCache, make AppCache borg again, add _meta.app_cache --- django/db/backends/sqlite3/schema.py | 15 ++- django/db/models/base.py | 20 ++-- django/db/models/loading.py | 151 ++++++++++++++------------- django/db/models/options.py | 8 +- tests/schema/models.py | 25 +++-- tests/schema/tests.py | 12 --- 6 files changed, 114 insertions(+), 117 deletions(-) diff --git a/django/db/backends/sqlite3/schema.py b/django/db/backends/sqlite3/schema.py index c1df0c7981..de32dfd893 100644 --- a/django/db/backends/sqlite3/schema.py +++ b/django/db/backends/sqlite3/schema.py @@ -1,6 +1,6 @@ from django.db.backends.schema import BaseDatabaseSchemaEditor -from django.db.models.loading import cache, default_cache, AppCache from django.db.models.fields.related import ManyToManyField +from django.db.models.loading import BaseAppCache class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): @@ -38,20 +38,19 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): for field in delete_fields: del body[field.name] del mapping[field.column] + # Work inside a new AppCache + app_cache = BaseAppCache() # Construct a new model for the new state meta_contents = { 'app_label': model._meta.app_label, 'db_table': model._meta.db_table + "__new", 'unique_together': model._meta.unique_together if override_uniques is None else override_uniques, + 'app_cache': app_cache, } meta = type("Meta", tuple(), meta_contents) body['Meta'] = meta body['__module__'] = model.__module__ - self.app_cache = AppCache() - cache.set_cache(self.app_cache) - cache.copy_from(default_cache) temp_model = type(model._meta.object_name, model.__bases__, body) - cache.set_cache(default_cache) # Create a new table with that format self.create_model(temp_model) # Copy data from the old table @@ -117,9 +116,9 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): return self._alter_many_to_many(model, old_field, new_field, strict) elif old_type is None or new_type is None: raise ValueError("Cannot alter field %s into %s - they are not compatible types (probably means only one is an M2M with implicit through model)" % ( - old_field, - new_field, - )) + old_field, + new_field, + )) # Alter by remaking table self._remake_table(model, alter_fields=[(old_field, new_field)]) diff --git a/django/db/models/base.py b/django/db/models/base.py index 8a87a63ffe..d6870b561a 100644 --- a/django/db/models/base.py +++ b/django/db/models/base.py @@ -19,7 +19,6 @@ from django.db.models.query_utils import DeferredAttribute, deferred_class_facto from django.db.models.deletion import Collector from django.db.models.options import Options from django.db.models import signals -from django.db.models.loading import register_models, get_model from django.utils.translation import ugettext_lazy as _ from django.utils.functional import curry from django.utils.encoding import force_str, force_text @@ -134,7 +133,7 @@ class ModelBase(type): new_class._base_manager = new_class._base_manager._copy_to_model(new_class) # Bail out early if we have already created this class. - m = get_model(new_class._meta.app_label, name, + m = new_class._meta.app_cache.get_model(new_class._meta.app_label, name, seed_cache=False, only_installed=False) if m is not None: return m @@ -242,16 +241,13 @@ class ModelBase(type): new_class._prepare() - if new_class._meta.auto_register: - register_models(new_class._meta.app_label, new_class) - # Because of the way imports happen (recursively), we may or may not be - # the first time this model tries to register with the framework. There - # should only be one class for each model, so we always return the - # registered version. - return get_model(new_class._meta.app_label, name, - seed_cache=False, only_installed=False) - else: - return new_class + new_class._meta.app_cache.register_models(new_class._meta.app_label, new_class) + # Because of the way imports happen (recursively), we may or may not be + # the first time this model tries to register with the framework. There + # should only be one class for each model, so we always return the + # registered version. + return new_class._meta.app_cache.get_model(new_class._meta.app_label, name, + seed_cache=False, only_installed=False) def copy_managers(cls, base_managers): # This is in-place sorting of an Options attribute, but that's fine. diff --git a/django/db/models/loading.py b/django/db/models/loading.py index 412bd76e0d..61273e512a 100644 --- a/django/db/models/loading.py +++ b/django/db/models/loading.py @@ -16,57 +16,52 @@ __all__ = ('get_apps', 'get_app', 'get_models', 'get_model', 'register_models', 'load_app', 'app_cache_ready') -class AppCache(object): +def _initialize(): + """ + Returns a dictionary to be used as the initial value of the + [shared] state of the app cache. + """ + return dict( + # Keys of app_store are the model modules for each application. + app_store = SortedDict(), + + # Mapping of installed app_labels to model modules for that app. + app_labels = {}, + + # Mapping of app_labels to a dictionary of model names to model code. + # May contain apps that are not installed. + app_models = SortedDict(), + + # Mapping of app_labels to errors raised when trying to import the app. + app_errors = {}, + + # -- Everything below here is only used when populating the cache -- + loaded = False, + handled = {}, + postponed = [], + nesting_level = 0, + _get_models_cache = {}, + ) + + +class BaseAppCache(object): """ A cache that stores installed applications and their models. Used to provide reverse-relations and for app introspection (e.g. admin). + + This provides the base (non-Borg) AppCache class - the AppCache + subclass adds borg-like behaviour for the few cases where it's needed, + and adds the code that auto-loads from INSTALLED_APPS. """ def __init__(self): - # Keys of app_store are the model modules for each application. - self.app_store = SortedDict() - # Mapping of installed app_labels to model modules for that app. - self.app_labels = {} - # Mapping of app_labels to a dictionary of model names to model code. - # May contain apps that are not installed. - self.app_models = SortedDict() - # Mapping of app_labels to errors raised when trying to import the app. - self.app_errors = {} - # -- Everything below here is only used when populating the cache -- - self.loaded = False - self.handled = {} - self.postponed = [] - self.nesting_level = 0 - self._get_models_cache = {} + self.__dict__ = _initialize() def _populate(self): """ - Fill in all the cache information. This method is threadsafe, in the - sense that every caller will see the same state upon return, and if the - cache is already initialised, it does no work. + Stub method - this base class does no auto-loading. """ - if self.loaded: - return - # Note that we want to use the import lock here - the app loading is - # in many cases initiated implicitly by importing, and thus it is - # possible to end up in deadlock when one thread initiates loading - # without holding the importer lock and another thread then tries to - # import something which also launches the app loading. For details of - # this situation see #18251. - imp.acquire_lock() - try: - if self.loaded: - return - for app_name in settings.INSTALLED_APPS: - if app_name in self.handled: - continue - self.load_app(app_name, True) - if not self.nesting_level: - for app_name in self.postponed: - self.load_app(app_name) - self.loaded = True - finally: - imp.release_lock() + self.loaded = True def _label_for(self, app_mod): """ @@ -253,42 +248,58 @@ class AppCache(object): self.register_models(app_label, *models.values()) -class AppCacheWrapper(object): +class AppCache(BaseAppCache): """ - As AppCache can be changed at runtime, this class wraps it so any - imported references to 'cache' are changed along with it. + A cache that stores installed applications and their models. Used to + provide reverse-relations and for app introspection (e.g. admin). + + Borg version of the BaseAppCache class. """ - def __init__(self, cache): - self._cache = cache + __shared_state = _initialize() - def set_cache(self, cache): - self._cache = cache + def __init__(self): + self.__dict__ = self.__shared_state - def __getattr__(self, attr): - if attr in ("_cache", "set_cache"): - return self.__dict__[attr] - return getattr(self._cache, attr) - - def __setattr__(self, attr, value): - if attr in ("_cache", "set_cache"): - self.__dict__[attr] = value + def _populate(self): + """ + Fill in all the cache information. This method is threadsafe, in the + sense that every caller will see the same state upon return, and if the + cache is already initialised, it does no work. + """ + if self.loaded: return - return setattr(self._cache, attr, value) + # Note that we want to use the import lock here - the app loading is + # in many cases initiated implicitly by importing, and thus it is + # possible to end up in deadlock when one thread initiates loading + # without holding the importer lock and another thread then tries to + # import something which also launches the app loading. For details of + # this situation see #18251. + imp.acquire_lock() + try: + if self.loaded: + return + for app_name in settings.INSTALLED_APPS: + if app_name in self.handled: + continue + self.load_app(app_name, True) + if not self.nesting_level: + for app_name in self.postponed: + self.load_app(app_name) + self.loaded = True + finally: + imp.release_lock() - -default_cache = AppCache() -cache = AppCacheWrapper(default_cache) +cache = AppCache() # These methods were always module level, so are kept that way for backwards -# compatibility. These are wrapped with lambdas to stop the attribute -# access resolving directly to a method on a single cache instance. -get_apps = lambda *x, **y: cache.get_apps(*x, **y) -get_app = lambda *x, **y: cache.get_app(*x, **y) -get_app_errors = lambda *x, **y: cache.get_app_errors(*x, **y) -get_models = lambda *x, **y: cache.get_models(*x, **y) -get_model = lambda *x, **y: cache.get_model(*x, **y) -register_models = lambda *x, **y: cache.register_models(*x, **y) -load_app = lambda *x, **y: cache.load_app(*x, **y) -app_cache_ready = lambda *x, **y: cache.app_cache_ready(*x, **y) +# compatibility. +get_apps = cache.get_apps +get_app = cache.get_app +get_app_errors = cache.get_app_errors +get_models = cache.get_models +get_model = cache.get_model +register_models = cache.register_models +load_app = cache.load_app +app_cache_ready = cache.app_cache_ready diff --git a/django/db/models/options.py b/django/db/models/options.py index a878fe28c4..7ca2f1c321 100644 --- a/django/db/models/options.py +++ b/django/db/models/options.py @@ -8,7 +8,7 @@ from django.conf import settings from django.db.models.fields.related import ManyToManyRel from django.db.models.fields import AutoField, FieldDoesNotExist from django.db.models.fields.proxy import OrderWrt -from django.db.models.loading import get_models, app_cache_ready +from django.db.models.loading import get_models, app_cache_ready, cache from django.utils import six from django.utils.functional import cached_property from django.utils.datastructures import SortedDict @@ -21,7 +21,7 @@ get_verbose_name = lambda class_name: re.sub('(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]| DEFAULT_NAMES = ('verbose_name', 'verbose_name_plural', 'db_table', 'ordering', 'unique_together', 'permissions', 'get_latest_by', 'order_with_respect_to', 'app_label', 'db_tablespace', - 'abstract', 'managed', 'proxy', 'swappable', 'auto_created', 'index_together', 'auto_register') + 'abstract', 'managed', 'proxy', 'swappable', 'auto_created', 'index_together', 'app_cache') @python_2_unicode_compatible @@ -70,8 +70,8 @@ class Options(object): # from *other* models. Needed for some admin checks. Internal use only. self.related_fkey_lookups = [] - # If we should auto-register with the AppCache - self.auto_register = True + # A custom AppCache to use, if you're making a separate model set. + self.app_cache = cache def contribute_to_class(self, cls, name): from django.db import connection diff --git a/tests/schema/models.py b/tests/schema/models.py index fdf950860c..a160b9aaa8 100644 --- a/tests/schema/models.py +++ b/tests/schema/models.py @@ -1,8 +1,11 @@ from django.db import models +from django.db.models.loading import BaseAppCache # Because we want to test creation and deletion of these as separate things, -# these models are all marked as unmanaged and only marked as managed while -# a schema test is running. +# these models are all inserted into a separate AppCache so the main test +# runner doesn't syncdb them. + +new_app_cache = BaseAppCache() class Author(models.Model): @@ -10,24 +13,24 @@ class Author(models.Model): height = models.PositiveIntegerField(null=True, blank=True) class Meta: - auto_register = False + app_cache = new_app_cache class AuthorWithM2M(models.Model): name = models.CharField(max_length=255) class Meta: - auto_register = False + app_cache = new_app_cache class Book(models.Model): author = models.ForeignKey(Author) title = models.CharField(max_length=100, db_index=True) pub_date = models.DateTimeField() - #tags = models.ManyToManyField("Tag", related_name="books") + # tags = models.ManyToManyField("Tag", related_name="books") class Meta: - auto_register = False + app_cache = new_app_cache class BookWithM2M(models.Model): @@ -37,7 +40,7 @@ class BookWithM2M(models.Model): tags = models.ManyToManyField("Tag", related_name="books") class Meta: - auto_register = False + app_cache = new_app_cache class BookWithSlug(models.Model): @@ -47,7 +50,7 @@ class BookWithSlug(models.Model): slug = models.CharField(max_length=20, unique=True) class Meta: - auto_register = False + app_cache = new_app_cache db_table = "schema_book" @@ -56,7 +59,7 @@ class Tag(models.Model): slug = models.SlugField(unique=True) class Meta: - auto_register = False + app_cache = new_app_cache class TagUniqueRename(models.Model): @@ -64,7 +67,7 @@ class TagUniqueRename(models.Model): slug2 = models.SlugField(unique=True) class Meta: - auto_register = False + app_cache = new_app_cache db_table = "schema_tag" @@ -73,5 +76,5 @@ class UniqueTest(models.Model): slug = models.SlugField(unique=False) class Meta: - auto_register = False + app_cache = new_app_cache unique_together = ["year", "slug"] diff --git a/tests/schema/tests.py b/tests/schema/tests.py index bd4ae0db34..85e1dfc9ea 100644 --- a/tests/schema/tests.py +++ b/tests/schema/tests.py @@ -1,12 +1,10 @@ from __future__ import absolute_import -import copy import datetime from django.test import TransactionTestCase from django.utils.unittest import skipUnless from django.db import connection, DatabaseError, IntegrityError from django.db.models.fields import IntegerField, TextField, CharField, SlugField from django.db.models.fields.related import ManyToManyField, ForeignKey -from django.db.models.loading import cache, default_cache, AppCache from .models import Author, AuthorWithM2M, Book, BookWithSlug, BookWithM2M, Tag, TagUniqueRename, UniqueTest @@ -27,14 +25,6 @@ class SchemaTests(TransactionTestCase): def setUp(self): # Make sure we're in manual transaction mode connection.set_autocommit(False) - # The unmanaged models need to be removed after the test in order to - # prevent bad interactions with the flush operation in other tests. - self.app_cache = AppCache() - cache.set_cache(self.app_cache) - cache.copy_from(default_cache) - for model in self.models: - cache.register_models("schema", model) - model._prepare() def tearDown(self): # Delete any tables made for our models @@ -43,8 +33,6 @@ class SchemaTests(TransactionTestCase): # Rollback anything that may have happened connection.rollback() connection.set_autocommit(True) - cache.set_cache(default_cache) - cache.app_models['schema'] = {} # One M2M gets left in the old cache def delete_tables(self): "Deletes all model tables for our models for a clean test environment" From 75bf394d86b802de670502400e9ab7eca2808935 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 9 May 2013 15:59:26 +0100 Subject: [PATCH 046/161] Rest of the _meta.app_cache stuff. Schema tests work now. --- django/db/models/fields/related.py | 11 ++-- django/db/models/loading.py | 80 ++++++++++++++++-------------- tests/app_cache/__init__.py | 0 tests/app_cache/models.py | 17 +++++++ tests/app_cache/tests.py | 50 +++++++++++++++++++ 5 files changed, 115 insertions(+), 43 deletions(-) create mode 100644 tests/app_cache/__init__.py create mode 100644 tests/app_cache/models.py create mode 100644 tests/app_cache/tests.py diff --git a/django/db/models/fields/related.py b/django/db/models/fields/related.py index 7fc7a092fd..c30ba03489 100644 --- a/django/db/models/fields/related.py +++ b/django/db/models/fields/related.py @@ -2,7 +2,7 @@ from operator import attrgetter from django.db import connection, connections, router from django.db.backends import util -from django.db.models import signals, get_model +from django.db.models import signals from django.db.models.fields import (AutoField, Field, IntegerField, PositiveIntegerField, PositiveSmallIntegerField, FieldDoesNotExist) from django.db.models.related import RelatedObject, PathInfo @@ -18,8 +18,6 @@ from django import forms RECURSIVE_RELATIONSHIP_CONSTANT = 'self' -pending_lookups = {} - def add_lazy_relation(cls, field, relation, operation): """ @@ -70,14 +68,14 @@ def add_lazy_relation(cls, field, relation, operation): # string right away. If get_model returns None, it means that the related # model isn't loaded yet, so we need to pend the relation until the class # is prepared. - model = get_model(app_label, model_name, + model = cls._meta.app_cache.get_model(app_label, model_name, seed_cache=False, only_installed=False) if model: operation(field, model, cls) else: key = (app_label, model_name) value = (cls, field, operation) - pending_lookups.setdefault(key, []).append(value) + cls._meta.app_cache.pending_lookups.setdefault(key, []).append(value) def do_pending_lookups(sender, **kwargs): @@ -85,7 +83,7 @@ def do_pending_lookups(sender, **kwargs): Handle any pending relations to the sending model. Sent from class_prepared. """ key = (sender._meta.app_label, sender.__name__) - for cls, field, operation in pending_lookups.pop(key, []): + for cls, field, operation in sender._meta.app_cache.pending_lookups.pop(key, []): operation(field, sender, cls) signals.class_prepared.connect(do_pending_lookups) @@ -1330,6 +1328,7 @@ def create_many_to_many_intermediary_model(field, klass): 'unique_together': (from_, to), 'verbose_name': '%(from)s-%(to)s relationship' % {'from': from_, 'to': to}, 'verbose_name_plural': '%(from)s-%(to)s relationships' % {'from': from_, 'to': to}, + 'app_cache': field.model._meta.app_cache, }) # Construct and return the new class. return type(str(name), (models.Model,), { diff --git a/django/db/models/loading.py b/django/db/models/loading.py index 61273e512a..075cae4c61 100644 --- a/django/db/models/loading.py +++ b/django/db/models/loading.py @@ -35,7 +35,11 @@ def _initialize(): # Mapping of app_labels to errors raised when trying to import the app. app_errors = {}, + # Pending lookups for lazy relations + pending_lookups = {}, + # -- Everything below here is only used when populating the cache -- + loads_installed = True, loaded = False, handled = {}, postponed = [], @@ -56,12 +60,44 @@ class BaseAppCache(object): def __init__(self): self.__dict__ = _initialize() + # This stops _populate loading from INSTALLED_APPS and ignores the + # only_installed arguments to get_model[s] + self.loads_installed = False def _populate(self): """ Stub method - this base class does no auto-loading. """ - self.loaded = True + """ + Fill in all the cache information. This method is threadsafe, in the + sense that every caller will see the same state upon return, and if the + cache is already initialised, it does no work. + """ + if self.loaded: + return + if not self.loads_installed: + self.loaded = True + return + # Note that we want to use the import lock here - the app loading is + # in many cases initiated implicitly by importing, and thus it is + # possible to end up in deadlock when one thread initiates loading + # without holding the importer lock and another thread then tries to + # import something which also launches the app loading. For details of + # this situation see #18251. + imp.acquire_lock() + try: + if self.loaded: + return + for app_name in settings.INSTALLED_APPS: + if app_name in self.handled: + continue + self.load_app(app_name, True) + if not self.nesting_level: + for app_name in self.postponed: + self.load_app(app_name) + self.loaded = True + finally: + imp.release_lock() def _label_for(self, app_mod): """ @@ -169,12 +205,15 @@ class BaseAppCache(object): By default, models that aren't part of installed apps will *not* be included in the list of models. However, if you specify - only_installed=False, they will be. + only_installed=False, they will be. If you're using a non-default + AppCache, this argument does nothing - all models will be included. By default, models that have been swapped out will *not* be included in the list of models. However, if you specify include_swapped, they will be. """ + if not self.loads_installed: + only_installed = False cache_key = (app_mod, include_auto_created, include_deferred, only_installed, include_swapped) try: return self._get_models_cache[cache_key] @@ -212,6 +251,8 @@ class BaseAppCache(object): Returns None if no model is found. """ + if not self.loads_installed: + only_installed = False if seed_cache: self._populate() if only_installed and app_label not in self.app_labels: @@ -241,12 +282,6 @@ class BaseAppCache(object): model_dict[model_name] = model self._get_models_cache.clear() - def copy_from(self, other): - "Registers all models from the other cache into this one" - cache._populate() - for app_label, models in other.app_models.items(): - self.register_models(app_label, *models.values()) - class AppCache(BaseAppCache): """ @@ -261,35 +296,6 @@ class AppCache(BaseAppCache): def __init__(self): self.__dict__ = self.__shared_state - def _populate(self): - """ - Fill in all the cache information. This method is threadsafe, in the - sense that every caller will see the same state upon return, and if the - cache is already initialised, it does no work. - """ - if self.loaded: - return - # Note that we want to use the import lock here - the app loading is - # in many cases initiated implicitly by importing, and thus it is - # possible to end up in deadlock when one thread initiates loading - # without holding the importer lock and another thread then tries to - # import something which also launches the app loading. For details of - # this situation see #18251. - imp.acquire_lock() - try: - if self.loaded: - return - for app_name in settings.INSTALLED_APPS: - if app_name in self.handled: - continue - self.load_app(app_name, True) - if not self.nesting_level: - for app_name in self.postponed: - self.load_app(app_name) - self.loaded = True - finally: - imp.release_lock() - cache = AppCache() diff --git a/tests/app_cache/__init__.py b/tests/app_cache/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/app_cache/models.py b/tests/app_cache/models.py new file mode 100644 index 0000000000..1b4d33c2f9 --- /dev/null +++ b/tests/app_cache/models.py @@ -0,0 +1,17 @@ +from django.db import models +from django.db.models.loading import BaseAppCache + +# We're testing app cache presence on load, so this is handy. + +new_app_cache = BaseAppCache() + + +class TotallyNormal(models.Model): + name = models.CharField(max_length=255) + + +class SoAlternative(models.Model): + name = models.CharField(max_length=255) + + class Meta: + app_cache = new_app_cache diff --git a/tests/app_cache/tests.py b/tests/app_cache/tests.py new file mode 100644 index 0000000000..42598d90c7 --- /dev/null +++ b/tests/app_cache/tests.py @@ -0,0 +1,50 @@ +from __future__ import absolute_import +import datetime +from django.test import TransactionTestCase +from django.utils.unittest import skipUnless +from django.db import connection, DatabaseError, IntegrityError +from django.db.models.fields import IntegerField, TextField, CharField, SlugField +from django.db.models.fields.related import ManyToManyField, ForeignKey +from django.db.models.loading import cache, BaseAppCache +from django.db import models +from .models import TotallyNormal, SoAlternative, new_app_cache + + +class AppCacheTests(TransactionTestCase): + """ + Tests the AppCache borg and non-borg versions + """ + + def test_models_py(self): + """ + Tests that the models in the models.py file were loaded correctly. + """ + + self.assertEqual(cache.get_model("app_cache", "TotallyNormal"), TotallyNormal) + self.assertEqual(cache.get_model("app_cache", "SoAlternative"), None) + + self.assertEqual(new_app_cache.get_model("app_cache", "TotallyNormal"), None) + self.assertEqual(new_app_cache.get_model("app_cache", "SoAlternative"), SoAlternative) + + def test_dynamic_load(self): + """ + Makes a new model at runtime and ensures it goes into the right place. + """ + old_models = cache.get_models(cache.get_app("app_cache")) + # Construct a new model in a new app cache + body = {} + new_app_cache = BaseAppCache() + meta_contents = { + 'app_label': "app_cache", + 'app_cache': new_app_cache, + } + meta = type("Meta", tuple(), meta_contents) + body['Meta'] = meta + body['__module__'] = TotallyNormal.__module__ + temp_model = type("SouthPonies", (models.Model,), body) + # Make sure it appeared in the right place! + self.assertEqual( + old_models, + cache.get_models(cache.get_app("app_cache")), + ) + self.assertEqual(new_app_cache.get_model("app_cache", "SouthPonies"), temp_model) From f6801a234fb9460eac80d146534ac340e178c466 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 10 May 2013 12:52:04 +0100 Subject: [PATCH 047/161] Adding a dependency graph class and tests --- django/db/migrations/__init__.py | 0 django/db/migrations/graph.py | 96 +++++++++++++++++++++++++++++++ django/utils/datastructures.py | 30 ++++++++++ tests/migrations/__init__.py | 0 tests/migrations/models.py | 0 tests/migrations/tests.py | 98 ++++++++++++++++++++++++++++++++ 6 files changed, 224 insertions(+) create mode 100644 django/db/migrations/__init__.py create mode 100644 django/db/migrations/graph.py create mode 100644 tests/migrations/__init__.py create mode 100644 tests/migrations/models.py create mode 100644 tests/migrations/tests.py diff --git a/django/db/migrations/__init__.py b/django/db/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/django/db/migrations/graph.py b/django/db/migrations/graph.py new file mode 100644 index 0000000000..bab3d2a49e --- /dev/null +++ b/django/db/migrations/graph.py @@ -0,0 +1,96 @@ +from django.utils.datastructures import SortedSet + + +class MigrationsGraph(object): + """ + Represents the digraph of all migrations in a project. + + Each migration is a node, and each dependency is an edge. There are + no implicit dependencies between numbered migrations - the numbering is + merely a convention to aid file listing. Every new numbered migration + has a declared dependency to the previous number, meaning that VCS + branch merges can be detected and resolved. + + Migrations files can be marked as replacing another set of migrations - + this is to support the "squash" feature. The graph handler isn't resposible + for these; instead, the code to load them in here should examine the + migration files and if the replaced migrations are all either unapplied + or not present, it should ignore the replaced ones, load in just the + replacing migration, and repoint any dependencies that pointed to the + replaced migrations to point to the replacing one. + + A node should be a tuple: (applabel, migration_name) - but the code + here doesn't really care. + """ + + def __init__(self): + self.nodes = {} + self.dependencies = {} + self.dependents = {} + + def add_node(self, node, implementation): + self.nodes[node] = implementation + + def add_dependency(self, child, parent): + self.nodes[child] = None + self.nodes[parent] = None + self.dependencies.setdefault(child, set()).add(parent) + self.dependents.setdefault(parent, set()).add(child) + + def forwards_plan(self, node): + """ + Given a node, returns a list of which previous nodes (dependencies) + must be applied, ending with the node itself. + This is the list you would follow if applying the migrations to + a database. + """ + if node not in self.nodes: + raise ValueError("Node %r not a valid node" % node) + return self.dfs(node, lambda x: self.dependencies.get(x, set())) + + def backwards_plan(self, node): + """ + Given a node, returns a list of which dependent nodes (dependencies) + must be unapplied, ending with the node itself. + This is the list you would follow if removing the migrations from + a database. + """ + if node not in self.nodes: + raise ValueError("Node %r not a valid node" % node) + return self.dfs(node, lambda x: self.dependents.get(x, set())) + + def dfs(self, start, get_children): + """ + Dynamic programming based depth first search, for finding dependencies. + """ + cache = {} + def _dfs(start, get_children, path): + # If we already computed this, use that (dynamic programming) + if (start, get_children) in cache: + return cache[(start, get_children)] + # If we've traversed here before, that's a circular dep + if start in path: + raise CircularDependencyException(path[path.index(start):] + [start]) + # Build our own results list, starting with us + results = [] + results.append(start) + # We need to add to results all the migrations this one depends on + children = sorted(get_children(start)) + path.append(start) + for n in children: + results = _dfs(n, get_children, path) + results + path.pop() + # Use SortedSet to ensure only one instance of each result + results = list(SortedSet(results)) + # Populate DP cache + cache[(start, get_children)] = results + # Done! + return results + return _dfs(start, get_children, []) + + +class CircularDependencyException(Exception): + """ + Raised when there's an impossible-to-resolve circular dependency. + """ + pass diff --git a/django/utils/datastructures.py b/django/utils/datastructures.py index 64c218fe43..ec68892870 100644 --- a/django/utils/datastructures.py +++ b/django/utils/datastructures.py @@ -252,6 +252,36 @@ class SortedDict(dict): super(SortedDict, self).clear() self.keyOrder = [] +class SortedSet(object): + """ + A set which keeps the ordering of the inserted items. + Currently backs onto SortedDict. + """ + + def __init__(self, iterable=None): + self.dict = SortedDict(((x, None) for x in iterable) if iterable else []) + + def add(self, item): + self.dict[item] = None + + def remove(self, item): + del self.dict[item] + + def discard(self, item): + try: + self.remove(item) + except KeyError: + pass + + def __iter__(self): + return iter(self.dict.keys()) + + def __contains__(self, item): + return item in self.dict + + def __nonzero__(self): + return bool(self.dict) + class MultiValueDictKeyError(KeyError): pass diff --git a/tests/migrations/__init__.py b/tests/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/migrations/models.py b/tests/migrations/models.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/migrations/tests.py b/tests/migrations/tests.py new file mode 100644 index 0000000000..a330330c17 --- /dev/null +++ b/tests/migrations/tests.py @@ -0,0 +1,98 @@ +from django.test import TransactionTestCase +from django.db.migrations.graph import MigrationsGraph, CircularDependencyException + + +class GraphTests(TransactionTestCase): + """ + Tests the digraph structure. + """ + + def test_simple_graph(self): + """ + Tests a basic dependency graph: + + app_a: 0001 <-- 0002 <--- 0003 <-- 0004 + / + app_b: 0001 <-- 0002 <-/ + """ + # Build graph + graph = MigrationsGraph() + graph.add_dependency(("app_a", "0004"), ("app_a", "0003")) + graph.add_dependency(("app_a", "0003"), ("app_a", "0002")) + graph.add_dependency(("app_a", "0002"), ("app_a", "0001")) + graph.add_dependency(("app_a", "0003"), ("app_b", "0002")) + graph.add_dependency(("app_b", "0002"), ("app_b", "0001")) + # Test root migration case + self.assertEqual( + graph.forwards_plan(("app_a", "0001")), + [('app_a', '0001')], + ) + # Test branch B only + self.assertEqual( + graph.forwards_plan(("app_b", "0002")), + [("app_b", "0001"), ("app_b", "0002")], + ) + # Test whole graph + self.assertEqual( + graph.forwards_plan(("app_a", "0004")), + [('app_b', '0001'), ('app_b', '0002'), ('app_a', '0001'), ('app_a', '0002'), ('app_a', '0003'), ('app_a', '0004')], + ) + # Test reverse to b:0002 + self.assertEqual( + graph.backwards_plan(("app_b", "0002")), + [('app_a', '0004'), ('app_a', '0003'), ('app_b', '0002')], + ) + + def test_complex_graph(self): + """ + Tests a complex dependency graph: + + app_a: 0001 <-- 0002 <--- 0003 <-- 0004 + \ \ / / + app_b: 0001 <-\ 0002 <-X / + \ \ / + app_c: \ 0001 <-- 0002 <- + """ + # Build graph + graph = MigrationsGraph() + graph.add_dependency(("app_a", "0004"), ("app_a", "0003")) + graph.add_dependency(("app_a", "0003"), ("app_a", "0002")) + graph.add_dependency(("app_a", "0002"), ("app_a", "0001")) + graph.add_dependency(("app_a", "0003"), ("app_b", "0002")) + graph.add_dependency(("app_b", "0002"), ("app_b", "0001")) + graph.add_dependency(("app_a", "0004"), ("app_c", "0002")) + graph.add_dependency(("app_c", "0002"), ("app_c", "0001")) + graph.add_dependency(("app_c", "0001"), ("app_b", "0001")) + graph.add_dependency(("app_c", "0002"), ("app_a", "0002")) + # Test branch C only + self.assertEqual( + graph.forwards_plan(("app_c", "0002")), + [('app_b', '0001'), ('app_c', '0001'), ('app_a', '0001'), ('app_a', '0002'), ('app_c', '0002')], + ) + # Test whole graph + self.assertEqual( + graph.forwards_plan(("app_a", "0004")), + [('app_b', '0001'), ('app_c', '0001'), ('app_a', '0001'), ('app_a', '0002'), ('app_c', '0002'), ('app_b', '0002'), ('app_a', '0003'), ('app_a', '0004')], + ) + # Test reverse to b:0001 + self.assertEqual( + graph.backwards_plan(("app_b", "0001")), + [('app_a', '0004'), ('app_c', '0002'), ('app_c', '0001'), ('app_a', '0003'), ('app_b', '0002'), ('app_b', '0001')], + ) + + def test_circular_graph(self): + """ + Tests a circular dependency graph. + """ + # Build graph + graph = MigrationsGraph() + graph.add_dependency(("app_a", "0003"), ("app_a", "0002")) + graph.add_dependency(("app_a", "0002"), ("app_a", "0001")) + graph.add_dependency(("app_a", "0001"), ("app_b", "0002")) + graph.add_dependency(("app_b", "0002"), ("app_b", "0001")) + graph.add_dependency(("app_b", "0001"), ("app_a", "0003")) + # Test whole graph + self.assertRaises( + CircularDependencyException, + graph.forwards_plan, ("app_a", "0003"), + ) From 9ce83546720b9536c02817e802c9376eb74f811d Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 10 May 2013 16:00:55 +0100 Subject: [PATCH 048/161] First phase of loading migrations from disk --- django/db/migrations/__init__.py | 1 + django/db/migrations/graph.py | 11 +- django/db/migrations/loader.py | 128 ++++++++++++++++++++ django/db/migrations/migration.py | 30 +++++ django/db/migrations/recorder.py | 64 ++++++++++ tests/migrations/migrations/0001_initial.py | 5 + tests/migrations/migrations/0002_second.py | 6 + tests/migrations/migrations/__init__.py | 0 tests/migrations/tests.py | 29 ++++- 9 files changed, 265 insertions(+), 9 deletions(-) create mode 100644 django/db/migrations/loader.py create mode 100644 django/db/migrations/migration.py create mode 100644 django/db/migrations/recorder.py create mode 100644 tests/migrations/migrations/0001_initial.py create mode 100644 tests/migrations/migrations/0002_second.py create mode 100644 tests/migrations/migrations/__init__.py diff --git a/django/db/migrations/__init__.py b/django/db/migrations/__init__.py index e69de29bb2..154e728341 100644 --- a/django/db/migrations/__init__.py +++ b/django/db/migrations/__init__.py @@ -0,0 +1 @@ +from .migration import Migration diff --git a/django/db/migrations/graph.py b/django/db/migrations/graph.py index bab3d2a49e..08481869f4 100644 --- a/django/db/migrations/graph.py +++ b/django/db/migrations/graph.py @@ -1,7 +1,7 @@ from django.utils.datastructures import SortedSet -class MigrationsGraph(object): +class MigrationGraph(object): """ Represents the digraph of all migrations in a project. @@ -19,7 +19,7 @@ class MigrationsGraph(object): replacing migration, and repoint any dependencies that pointed to the replaced migrations to point to the replacing one. - A node should be a tuple: (applabel, migration_name) - but the code + A node should be a tuple: (app_path, migration_name) - but the code here doesn't really care. """ @@ -70,7 +70,7 @@ class MigrationsGraph(object): return cache[(start, get_children)] # If we've traversed here before, that's a circular dep if start in path: - raise CircularDependencyException(path[path.index(start):] + [start]) + raise CircularDependencyError(path[path.index(start):] + [start]) # Build our own results list, starting with us results = [] results.append(start) @@ -88,8 +88,11 @@ class MigrationsGraph(object): return results return _dfs(start, get_children, []) + def __str__(self): + return "Graph: %s nodes, %s edges" % (len(self.nodes), sum(len(x) for x in self.dependencies.values())) -class CircularDependencyException(Exception): + +class CircularDependencyError(Exception): """ Raised when there's an impossible-to-resolve circular dependency. """ diff --git a/django/db/migrations/loader.py b/django/db/migrations/loader.py new file mode 100644 index 0000000000..4d191714cb --- /dev/null +++ b/django/db/migrations/loader.py @@ -0,0 +1,128 @@ +import os +from django.utils.importlib import import_module +from django.db.models.loading import cache +from django.db.migrations.recorder import MigrationRecorder +from django.db.migrations.graph import MigrationGraph + + +class MigrationLoader(object): + """ + Loads migration files from disk, and their status from the database. + + Migration files are expected to live in the "migrations" directory of + an app. Their names are entirely unimportant from a code perspective, + but will probably follow the 1234_name.py convention. + + On initialisation, this class will scan those directories, and open and + read the python files, looking for a class called Migration, which should + inherit from django.db.migrations.Migration. See + django.db.migrations.migration for what that looks like. + + Some migrations will be marked as "replacing" another set of migrations. + These are loaded into a separate set of migrations away from the main ones. + If all the migrations they replace are either unapplied or missing from + disk, then they are injected into the main set, replacing the named migrations. + Any dependency pointers to the replaced migrations are re-pointed to the + new migration. + + This does mean that this class MUST also talk to the database as well as + to disk, but this is probably fine. We're already not just operating + in memory. + """ + + def __init__(self, connection): + self.connection = connection + self.disk_migrations = None + self.applied_migrations = None + + def load_disk(self): + """ + Loads the migrations from all INSTALLED_APPS from disk. + """ + self.disk_migrations = {} + for app in cache.get_apps(): + # Get the migrations module directory + module_name = ".".join(app.__name__.split(".")[:-1] + ["migrations"]) + app_label = module_name.split(".")[-2] + try: + module = import_module(module_name) + except ImportError as e: + # I hate doing this, but I don't want to squash other import errors. + # Might be better to try a directory check directly. + if "No module named migrations" in str(e): + continue + directory = os.path.dirname(module.__file__) + # Scan for .py[c|o] files + migration_names = set() + for name in os.listdir(directory): + if name.endswith(".py") or name.endswith(".pyc") or name.endswith(".pyo"): + import_name = name.rsplit(".", 1)[0] + if import_name[0] not in "_.~": + migration_names.add(import_name) + # Load them + for migration_name in migration_names: + migration_module = import_module("%s.%s" % (module_name, migration_name)) + if not hasattr(migration_module, "Migration"): + raise BadMigrationError("Migration %s in app %s has no Migration class" % (migration_name, app_label)) + self.disk_migrations[app_label, migration_name] = migration_module.Migration + + def build_graph(self): + """ + Builds a migration dependency graph using both the disk and database. + """ + # Make sure we have the disk data + if self.disk_migrations is None: + self.load_disk() + # And the database data + if self.applied_migrations is None: + recorder = MigrationRecorder(self.connection) + self.applied_migrations = recorder.applied_migrations() + # Do a first pass to separate out replacing and non-replacing migrations + normal = {} + replacing = {} + for key, migration in self.disk_migrations.items(): + if migration.replaces: + replacing[key] = migration + else: + normal[key] = migration + # Calculate reverse dependencies - i.e., for each migration, what depends on it? + # This is just for dependency re-pointing when applying replacements, + # so we ignore run_before here. + reverse_dependencies = {} + for key, migration in normal.items(): + for parent in migration.dependencies: + reverse_dependencies.setdefault(parent, set()).add(key) + # Carry out replacements if we can - that is, if all replaced migrations + # are either unapplied or missing. + for key, migration in replacing.items(): + # Do the check + can_replace = True + for target in migration.replaces: + if target in self.applied_migrations: + can_replace = False + break + if not can_replace: + continue + # Alright, time to replace. Step through the replaced migrations + # and remove, repointing dependencies if needs be. + for replaced in migration.replaces: + if replaced in normal: + del normal[replaced] + for child_key in reverse_dependencies.get(replaced, set()): + normal[child_key].dependencies.remove(replaced) + normal[child_key].dependencies.append(key) + normal[key] = migration + # Finally, make a graph and load everything into it + graph = MigrationGraph() + for key, migration in normal.items(): + graph.add_node(key, migration) + for parent in migration.dependencies: + graph.add_dependency(key, parent) + return graph + + +class BadMigrationError(Exception): + """ + Raised when there's a bad migration (unreadable/bad format/etc.) + """ + pass diff --git a/django/db/migrations/migration.py b/django/db/migrations/migration.py new file mode 100644 index 0000000000..afbcf65297 --- /dev/null +++ b/django/db/migrations/migration.py @@ -0,0 +1,30 @@ +class Migration(object): + """ + The base class for all migrations. + + Migration files will import this from django.db.migrations.Migration + and subclass it as a class called Migration. It will have one or more + of the following attributes: + + - operations: A list of Operation instances, probably from django.db.migrations.operations + - dependencies: A list of tuples of (app_path, migration_name) + - run_before: A list of tuples of (app_path, migration_name) + - replaces: A list of migration_names + """ + + # Operations to apply during this migration, in order. + operations = [] + + # Other migrations that should be run before this migration. + # Should be a list of (app, migration_name). + dependencies = [] + + # Other migrations that should be run after this one (i.e. have + # this migration added to their dependencies). Useful to make third-party + # apps' migrations run after your AUTH_USER replacement, for example. + run_before = [] + + # Migration names in this app that this migration replaces. If this is + # non-empty, this migration will only be applied if all these migrations + # are not applied. + replaces = [] diff --git a/django/db/migrations/recorder.py b/django/db/migrations/recorder.py new file mode 100644 index 0000000000..6fb927adef --- /dev/null +++ b/django/db/migrations/recorder.py @@ -0,0 +1,64 @@ +import datetime +from django.db import models +from django.db.models.loading import BaseAppCache + + +class MigrationRecorder(object): + """ + Deals with storing migration records in the database. + + Because this table is actually itself used for dealing with model + creation, it's the one thing we can't do normally via syncdb or migrations. + We manually handle table creation/schema updating (using schema backend) + and then have a floating model to do queries with. + + If a migration is unapplied its row is removed from the table. Having + a row in the table always means a migration is applied. + """ + + class Migration(models.Model): + app = models.CharField(max_length=255) + name = models.CharField(max_length=255) + applied = models.DateTimeField(default=datetime.datetime.utcnow) + class Meta: + app_cache = BaseAppCache() + app_label = "migrations" + db_table = "django_migrations" + + def __init__(self, connection): + self.connection = connection + + def ensure_schema(self): + """ + Ensures the table exists and has the correct schema. + """ + # If the table's there, that's fine - we've never changed its schema + # in the codebase. + if self.Migration._meta.db_table in self.connection.introspection.get_table_list(self.connection.cursor()): + return + # Make the table + editor = self.connection.schema_editor() + editor.start() + editor.create_model(self.Migration) + editor.commit() + + def applied_migrations(self): + """ + Returns a set of (app, name) of applied migrations. + """ + self.ensure_schema() + return set(tuple(x) for x in self.Migration.objects.values_list("app", "name")) + + def record_applied(self, app, name): + """ + Records that a migration was applied. + """ + self.ensure_schema() + self.Migration.objects.create(app=app, name=name) + + def record_unapplied(self, app, name): + """ + Records that a migration was unapplied. + """ + self.ensure_schema() + self.Migration.objects.filter(app=app, name=name).delete() diff --git a/tests/migrations/migrations/0001_initial.py b/tests/migrations/migrations/0001_initial.py new file mode 100644 index 0000000000..bd613aa95e --- /dev/null +++ b/tests/migrations/migrations/0001_initial.py @@ -0,0 +1,5 @@ +from django.db import migrations + + +class Migration(migrations.Migration): + pass diff --git a/tests/migrations/migrations/0002_second.py b/tests/migrations/migrations/0002_second.py new file mode 100644 index 0000000000..f4d3ba9902 --- /dev/null +++ b/tests/migrations/migrations/0002_second.py @@ -0,0 +1,6 @@ +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [("migrations", "0001_initial")] diff --git a/tests/migrations/migrations/__init__.py b/tests/migrations/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/migrations/tests.py b/tests/migrations/tests.py index a330330c17..56f4c9fdb9 100644 --- a/tests/migrations/tests.py +++ b/tests/migrations/tests.py @@ -1,5 +1,7 @@ from django.test import TransactionTestCase -from django.db.migrations.graph import MigrationsGraph, CircularDependencyException +from django.db import connection +from django.db.migrations.graph import MigrationGraph, CircularDependencyError +from django.db.migrations.loader import MigrationLoader class GraphTests(TransactionTestCase): @@ -16,7 +18,7 @@ class GraphTests(TransactionTestCase): app_b: 0001 <-- 0002 <-/ """ # Build graph - graph = MigrationsGraph() + graph = MigrationGraph() graph.add_dependency(("app_a", "0004"), ("app_a", "0003")) graph.add_dependency(("app_a", "0003"), ("app_a", "0002")) graph.add_dependency(("app_a", "0002"), ("app_a", "0001")) @@ -54,7 +56,7 @@ class GraphTests(TransactionTestCase): app_c: \ 0001 <-- 0002 <- """ # Build graph - graph = MigrationsGraph() + graph = MigrationGraph() graph.add_dependency(("app_a", "0004"), ("app_a", "0003")) graph.add_dependency(("app_a", "0003"), ("app_a", "0002")) graph.add_dependency(("app_a", "0002"), ("app_a", "0001")) @@ -85,7 +87,7 @@ class GraphTests(TransactionTestCase): Tests a circular dependency graph. """ # Build graph - graph = MigrationsGraph() + graph = MigrationGraph() graph.add_dependency(("app_a", "0003"), ("app_a", "0002")) graph.add_dependency(("app_a", "0002"), ("app_a", "0001")) graph.add_dependency(("app_a", "0001"), ("app_b", "0002")) @@ -93,6 +95,23 @@ class GraphTests(TransactionTestCase): graph.add_dependency(("app_b", "0001"), ("app_a", "0003")) # Test whole graph self.assertRaises( - CircularDependencyException, + CircularDependencyError, graph.forwards_plan, ("app_a", "0003"), ) + + +class LoaderTests(TransactionTestCase): + """ + Tests the disk and database loader. + """ + + def test_load(self): + """ + Makes sure the loader can load the migrations for the test apps. + """ + migration_loader = MigrationLoader(connection) + graph = migration_loader.build_graph() + self.assertEqual( + graph.forwards_plan(("migrations", "0002_second")), + [("migrations", "0001_initial"), ("migrations", "0002_second")], + ) From 8a1f0177778275d8ee4707ef0dca79553b03a035 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 10 May 2013 16:09:57 +0100 Subject: [PATCH 049/161] Add root_node and leaf_node functions to MigrationGraph --- django/db/migrations/graph.py | 30 ++++++++++++++++++++++++++++-- tests/migrations/tests.py | 18 ++++++++++++++++++ 2 files changed, 46 insertions(+), 2 deletions(-) diff --git a/django/db/migrations/graph.py b/django/db/migrations/graph.py index 08481869f4..8d23b36cb7 100644 --- a/django/db/migrations/graph.py +++ b/django/db/migrations/graph.py @@ -19,8 +19,9 @@ class MigrationGraph(object): replacing migration, and repoint any dependencies that pointed to the replaced migrations to point to the replacing one. - A node should be a tuple: (app_path, migration_name) - but the code - here doesn't really care. + A node should be a tuple: (app_path, migration_name). The tree special-cases + things within an app - namely, root nodes and leaf nodes ignore dependencies + to other apps. """ def __init__(self): @@ -59,6 +60,31 @@ class MigrationGraph(object): raise ValueError("Node %r not a valid node" % node) return self.dfs(node, lambda x: self.dependents.get(x, set())) + def root_nodes(self): + """ + Returns all root nodes - that is, nodes with no dependencies inside + their app. These are the starting point for an app. + """ + roots = set() + for node in self.nodes: + if not filter(lambda key: key[0] == node[0], self.dependencies.get(node, set())): + roots.add(node) + return roots + + def leaf_nodes(self): + """ + Returns all leaf nodes - that is, nodes with no dependents in their app. + These are the "most current" version of an app's schema. + Having more than one per app is technically an error, but one that + gets handled further up, in the interactive command - it's usually the + result of a VCS merge and needs some user input. + """ + leaves = set() + for node in self.nodes: + if not filter(lambda key: key[0] == node[0], self.dependents.get(node, set())): + leaves.add(node) + return leaves + def dfs(self, start, get_children): """ Dynamic programming based depth first search, for finding dependencies. diff --git a/tests/migrations/tests.py b/tests/migrations/tests.py index 56f4c9fdb9..338e28aa56 100644 --- a/tests/migrations/tests.py +++ b/tests/migrations/tests.py @@ -44,6 +44,15 @@ class GraphTests(TransactionTestCase): graph.backwards_plan(("app_b", "0002")), [('app_a', '0004'), ('app_a', '0003'), ('app_b', '0002')], ) + # Test roots and leaves + self.assertEqual( + graph.root_nodes(), + set([('app_a', '0001'), ('app_b', '0001')]), + ) + self.assertEqual( + graph.leaf_nodes(), + set([('app_a', '0004'), ('app_b', '0002')]), + ) def test_complex_graph(self): """ @@ -81,6 +90,15 @@ class GraphTests(TransactionTestCase): graph.backwards_plan(("app_b", "0001")), [('app_a', '0004'), ('app_c', '0002'), ('app_c', '0001'), ('app_a', '0003'), ('app_b', '0002'), ('app_b', '0001')], ) + # Test roots and leaves + self.assertEqual( + graph.root_nodes(), + set([('app_a', '0001'), ('app_b', '0001'), ('app_c', '0001')]), + ) + self.assertEqual( + graph.leaf_nodes(), + set([('app_a', '0004'), ('app_b', '0002'), ('app_c', '0002')]), + ) def test_circular_graph(self): """ From eb5e50215a630ed028eca2d85d4131e9e186377d Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 10 May 2013 16:18:19 +0100 Subject: [PATCH 050/161] Do some basic testing of the recorder --- tests/migrations/tests.py | 29 ++++++++++++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/tests/migrations/tests.py b/tests/migrations/tests.py index 338e28aa56..9ef5e37b8f 100644 --- a/tests/migrations/tests.py +++ b/tests/migrations/tests.py @@ -1,7 +1,8 @@ -from django.test import TransactionTestCase +from django.test import TransactionTestCase, TestCase from django.db import connection from django.db.migrations.graph import MigrationGraph, CircularDependencyError from django.db.migrations.loader import MigrationLoader +from django.db.migrations.recorder import MigrationRecorder class GraphTests(TransactionTestCase): @@ -133,3 +134,29 @@ class LoaderTests(TransactionTestCase): graph.forwards_plan(("migrations", "0002_second")), [("migrations", "0001_initial"), ("migrations", "0002_second")], ) + + +class RecorderTests(TestCase): + """ + Tests the disk and database loader. + """ + + def test_apply(self): + """ + Tests marking migrations as applied/unapplied. + """ + recorder = MigrationRecorder(connection) + self.assertEqual( + recorder.applied_migrations(), + set(), + ) + recorder.record_applied("myapp", "0432_ponies") + self.assertEqual( + recorder.applied_migrations(), + set([("myapp", "0432_ponies")]), + ) + recorder.record_unapplied("myapp", "0432_ponies") + self.assertEqual( + recorder.applied_migrations(), + set(), + ) From 76d93a52cd56be23104f824e6755ecc8d3a34d94 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 10 May 2013 17:07:13 +0100 Subject: [PATCH 051/161] Make a start on operations and state (not sure if final layout) --- django/db/migrations/operations/__init__.py | 1 + django/db/migrations/operations/base.py | 38 ++++++++++ django/db/migrations/operations/models.py | 26 +++++++ django/db/migrations/state.py | 81 +++++++++++++++++++++ 4 files changed, 146 insertions(+) create mode 100644 django/db/migrations/operations/__init__.py create mode 100644 django/db/migrations/operations/base.py create mode 100644 django/db/migrations/operations/models.py create mode 100644 django/db/migrations/state.py diff --git a/django/db/migrations/operations/__init__.py b/django/db/migrations/operations/__init__.py new file mode 100644 index 0000000000..4fb70b0418 --- /dev/null +++ b/django/db/migrations/operations/__init__.py @@ -0,0 +1 @@ +from .models import CreateModel, DeleteModel diff --git a/django/db/migrations/operations/base.py b/django/db/migrations/operations/base.py new file mode 100644 index 0000000000..b24b45a09a --- /dev/null +++ b/django/db/migrations/operations/base.py @@ -0,0 +1,38 @@ +class Operation(object): + """ + Base class for migration operations. + + It's responsible for both mutating the in-memory model state + (see db/migrations/state.py) to represent what it performs, as well + as actually performing it against a live database. + + Note that some operations won't modify memory state at all (e.g. data + copying operations), and some will need their modifications to be + optionally specified by the user (e.g. custom Python code snippets) + """ + + # If this migration can be run in reverse. + # Some operations are impossible to reverse, like deleting data. + reversible = True + + def state_forwards(self, app, state): + """ + Takes the state from the previous migration, and mutates it + so that it matches what this migration would perform. + """ + raise NotImplementedError() + + def database_forwards(self, app, schema_editor, from_state, to_state): + """ + Performs the mutation on the database schema in the normal + (forwards) direction. + """ + raise NotImplementedError() + + def database_backwards(self, app, schema_editor, from_state, to_state): + """ + Performs the mutation on the database schema in the reverse + direction - e.g. if this were CreateModel, it would in fact + drop the model's table. + """ + raise NotImplementedError() diff --git a/django/db/migrations/operations/models.py b/django/db/migrations/operations/models.py new file mode 100644 index 0000000000..fd709e26fa --- /dev/null +++ b/django/db/migrations/operations/models.py @@ -0,0 +1,26 @@ +from .base import Operation +from django.db.migrations.state import ModelState + + +class CreateModel(Operation): + """ + Create a model's table. + """ + + def __init__(self, name): + self.name = name + + def state_forwards(self, app, state): + state.models[app, self.name.lower()] = ModelState(state, app, self.name) + + def database_forwards(self, app, schema_editor, from_state, to_state): + app_cache = to_state.render() + schema_editor.create_model(app_cache.get_model(app, self.name)) + + def database_backwards(self, app, schema_editor, from_state, to_state): + """ + Performs the mutation on the database schema in the reverse + direction - e.g. if this were CreateModel, it would in fact + drop the model's table. + """ + raise NotImplementedError() diff --git a/django/db/migrations/state.py b/django/db/migrations/state.py new file mode 100644 index 0000000000..3dbbbe27f8 --- /dev/null +++ b/django/db/migrations/state.py @@ -0,0 +1,81 @@ +from django.db import models +from django.db.models.loading import BaseAppCache + + +class ProjectState(object): + """ + Represents the entire project's overall state. + This is the item that is passed around - we do it here rather than at the + app level so that cross-app FKs/etc. resolve properly. + """ + + def __init__(self, models=None): + self.models = models or {} + self.app_cache = None + + def clone(self): + "Returns an exact copy of this ProjectState" + ps = ProjectState( + models = dict((k, v.copy()) for k, v in self.models.items()) + ) + for model in ps.models.values(): + model.project_state = ps + return ps + + def render(self): + "Turns the project state into actual models in a new AppCache" + if self.app_cache is None: + self.app_cache = BaseAppCache() + for model in self.model.values: + model.render(self.app_cache) + return self.app_cache + + +class ModelState(object): + """ + Represents a Django Model. We don't use the actual Model class + as it's not designed to have its options changed - instead, we + mutate this one and then render it into a Model as required. + """ + + def __init__(self, project_state, app_label, name, fields=None, options=None, bases=None): + self.project_state = project_state + self.app_label = app_label + self.name = name + self.fields = fields or [] + self.options = options or {} + self.bases = bases or None + + def clone(self): + "Returns an exact copy of this ModelState" + return self.__class__( + project_state = self.project_state, + app_label = self.app_label, + name = self.name, + fields = self.fields, + options = self.options, + bases = self.bases, + ) + + def render(self, app_cache): + "Creates a Model object from our current state into the given app_cache" + # First, make a Meta object + meta_contents = {'app_label': self.app_label, "app_cache": app_cache} + meta_contents.update(self.options) + meta = type("Meta", tuple(), meta_contents) + # Then, work out our bases + # TODO: Use the actual bases + if self.bases: + raise NotImplementedError("Custom bases not quite done yet!") + else: + bases = [models.Model] + # Turn fields into a dict for the body, add other bits + body = dict(self.fields) + body['Meta'] = meta + body['__module__'] = "__fake__" + # Then, make a Model object + return type( + self.name, + tuple(bases), + body, + ) From ce5bd42259bc95d372ab0d65dbae793e6251ea80 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 18 May 2013 11:06:30 +0200 Subject: [PATCH 052/161] Turn SchemaEditor into a context manager --- django/db/backends/schema.py | 22 +++++++++++++++------- django/db/migrations/recorder.py | 6 ++---- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index e4a923f2be..78ea80022f 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -1,8 +1,10 @@ +import sys import hashlib from django.db.backends.creation import BaseDatabaseCreation from django.db.backends.util import truncate_name from django.utils.log import getLogger from django.db.models.fields.related import ManyToManyField +from django.db.transaction import atomic logger = getLogger('django.db.backends.schema') @@ -64,9 +66,7 @@ class BaseDatabaseSchemaEditor(object): Marks the start of a schema-altering run. """ self.deferred_sql = [] - self.old_autocommit = self.connection.autocommit - if self.connection.autocommit: - self.connection.set_autocommit(False) + atomic(self.connection.alias).__enter__() def commit(self): """ @@ -74,8 +74,7 @@ class BaseDatabaseSchemaEditor(object): """ for sql in self.deferred_sql: self.execute(sql) - self.connection.commit() - self.connection.set_autocommit(self.old_autocommit) + atomic(self.connection.alias).__exit__(None, None, None) def rollback(self): """ @@ -83,8 +82,17 @@ class BaseDatabaseSchemaEditor(object): """ if not self.connection.features.can_rollback_ddl: raise RuntimeError("Cannot rollback schema changes on this backend") - self.connection.rollback() - self.connection.set_autocommit(self.old_autocommit) + atomic(self.connection.alias).__exit__(*sys.exc_info()) + + def __enter__(self): + self.start() + return self + + def __exit__(self, exc_type, exc_value, traceback): + if exc_type is None: + self.commit() + else: + self.rollback() # Core utility functions diff --git a/django/db/migrations/recorder.py b/django/db/migrations/recorder.py index 6fb927adef..a1f111f2bc 100644 --- a/django/db/migrations/recorder.py +++ b/django/db/migrations/recorder.py @@ -37,10 +37,8 @@ class MigrationRecorder(object): if self.Migration._meta.db_table in self.connection.introspection.get_table_list(self.connection.cursor()): return # Make the table - editor = self.connection.schema_editor() - editor.start() - editor.create_model(self.Migration) - editor.commit() + with self.connection.schema_editor() as editor: + editor.create_model(self.Migration) def applied_migrations(self): """ From 331546f6ee7f50a92c01f919e1bb4bea6ed32625 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 18 May 2013 11:48:46 +0200 Subject: [PATCH 053/161] More conversion to a ContextManager schema_editor --- django/db/backends/schema.py | 33 +-- django/db/migrations/state.py | 6 + tests/schema/tests.py | 488 ++++++++++++++-------------------- 3 files changed, 215 insertions(+), 312 deletions(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 78ea80022f..d282e0898b 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -61,38 +61,19 @@ class BaseDatabaseSchemaEditor(object): # State-managing methods - def start(self): - """ - Marks the start of a schema-altering run. - """ - self.deferred_sql = [] - atomic(self.connection.alias).__enter__() - - def commit(self): - """ - Finishes a schema-altering run. - """ - for sql in self.deferred_sql: - self.execute(sql) - atomic(self.connection.alias).__exit__(None, None, None) - - def rollback(self): - """ - Tries to roll back a schema-altering run. Call instead of commit(). - """ - if not self.connection.features.can_rollback_ddl: - raise RuntimeError("Cannot rollback schema changes on this backend") - atomic(self.connection.alias).__exit__(*sys.exc_info()) - def __enter__(self): - self.start() + self.deferred_sql = [] + atomic(self.connection.alias, self.connection.features.can_rollback_ddl).__enter__() return self def __exit__(self, exc_type, exc_value, traceback): if exc_type is None: - self.commit() + for sql in self.deferred_sql: + self.execute(sql) + atomic(self.connection.alias, self.connection.features.can_rollback_ddl).__exit__(None, None, None) else: - self.rollback() + # Continue propagating exception + return None # Core utility functions diff --git a/django/db/migrations/state.py b/django/db/migrations/state.py index 3dbbbe27f8..9678026c79 100644 --- a/django/db/migrations/state.py +++ b/django/db/migrations/state.py @@ -30,6 +30,12 @@ class ProjectState(object): model.render(self.app_cache) return self.app_cache + @classmethod + def from_app_cache(cls, app_cache): + "Takes in an AppCache and returns a ProjectState matching it" + for model in app_cache.get_models(): + print model + class ModelState(object): """ diff --git a/tests/schema/tests.py b/tests/schema/tests.py index 85e1dfc9ea..752f9a5d0b 100644 --- a/tests/schema/tests.py +++ b/tests/schema/tests.py @@ -5,6 +5,7 @@ from django.utils.unittest import skipUnless from django.db import connection, DatabaseError, IntegrityError from django.db.models.fields import IntegerField, TextField, CharField, SlugField from django.db.models.fields.related import ManyToManyField, ForeignKey +from django.db.transaction import atomic from .models import Author, AuthorWithM2M, Book, BookWithSlug, BookWithM2M, Tag, TagUniqueRename, UniqueTest @@ -22,17 +23,9 @@ class SchemaTests(TransactionTestCase): # Utility functions - def setUp(self): - # Make sure we're in manual transaction mode - connection.set_autocommit(False) - def tearDown(self): # Delete any tables made for our models - connection.rollback() self.delete_tables() - # Rollback anything that may have happened - connection.rollback() - connection.set_autocommit(True) def delete_tables(self): "Deletes all model tables for our models for a clean test environment" @@ -41,29 +34,27 @@ class SchemaTests(TransactionTestCase): for model in self.models: # Remove any M2M tables first for field in model._meta.local_many_to_many: + with atomic(): + try: + cursor.execute(connection.schema_editor().sql_delete_table % { + "table": connection.ops.quote_name(field.rel.through._meta.db_table), + }) + except DatabaseError as e: + if any([s in str(e).lower() for s in self.no_table_strings]): + pass + else: + raise + # Then remove the main tables + with atomic(): try: cursor.execute(connection.schema_editor().sql_delete_table % { - "table": connection.ops.quote_name(field.rel.through._meta.db_table), + "table": connection.ops.quote_name(model._meta.db_table), }) except DatabaseError as e: if any([s in str(e).lower() for s in self.no_table_strings]): - connection.rollback() + pass else: raise - else: - connection.commit() - # Then remove the main tables - try: - cursor.execute(connection.schema_editor().sql_delete_table % { - "table": connection.ops.quote_name(model._meta.db_table), - }) - except DatabaseError as e: - if any([s in str(e).lower() for s in self.no_table_strings]): - connection.rollback() - else: - raise - else: - connection.commit() connection.enable_constraint_checking() def column_classes(self, model): @@ -91,33 +82,27 @@ class SchemaTests(TransactionTestCase): Tries creating a model's table, and then deleting it. """ # Create the table - editor = connection.schema_editor() - editor.start() - editor.create_model(Author) - editor.commit() + with connection.schema_editor() as editor: + editor.create_model(Author) # Check that it's there list(Author.objects.all()) # Clean up that table - editor.start() - editor.delete_model(Author) - editor.commit() + with connection.schema_editor() as editor: + editor.delete_model(Author) # Check that it's gone self.assertRaises( DatabaseError, lambda: list(Author.objects.all()), ) - connection.rollback() @skipUnless(connection.features.supports_foreign_keys, "No FK support") def test_fk(self): "Tests that creating tables out of FK order, then repointing, works" # Create the table - editor = connection.schema_editor() - editor.start() - editor.create_model(Book) - editor.create_model(Author) - editor.create_model(Tag) - editor.commit() + with connection.schema_editor() as editor: + editor.create_model(Book) + editor.create_model(Author) + editor.create_model(Tag) # Check that initial tables are there list(Author.objects.all()) list(Book.objects.all()) @@ -128,19 +113,16 @@ class SchemaTests(TransactionTestCase): title = "Much Ado About Foreign Keys", pub_date = datetime.datetime.now(), ) - connection.commit() # Repoint the FK constraint new_field = ForeignKey(Tag) new_field.set_attributes_from_name("author") - editor = connection.schema_editor() - editor.start() - editor.alter_field( - Book, - Book._meta.get_field_by_name("author")[0], - new_field, - strict=True, - ) - editor.commit() + with connection.schema_editor() as editor: + editor.alter_field( + Book, + Book._meta.get_field_by_name("author")[0], + new_field, + strict=True, + ) # Make sure the new FK constraint is present constraints = connection.introspection.get_constraints(connection.cursor(), Book._meta.db_table) for name, details in constraints.items(): @@ -155,23 +137,19 @@ class SchemaTests(TransactionTestCase): Tests adding fields to models """ # Create the table - editor = connection.schema_editor() - editor.start() - editor.create_model(Author) - editor.commit() + with connection.schema_editor() as editor: + editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Alter the name field to a TextField new_field = IntegerField(null=True) new_field.set_attributes_from_name("age") - editor = connection.schema_editor() - editor.start() - editor.create_field( - Author, - new_field, - ) - editor.commit() + with connection.schema_editor() as editor: + editor.create_field( + Author, + new_field, + ) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['age'][0], "IntegerField") @@ -182,10 +160,8 @@ class SchemaTests(TransactionTestCase): Tests simple altering of fields """ # Create the table - editor = connection.schema_editor() - editor.start() - editor.create_model(Author) - editor.commit() + with connection.schema_editor() as editor: + editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") @@ -193,15 +169,13 @@ class SchemaTests(TransactionTestCase): # Alter the name field to a TextField new_field = TextField(null=True) new_field.set_attributes_from_name("name") - editor = connection.schema_editor() - editor.start() - editor.alter_field( - Author, - Author._meta.get_field_by_name("name")[0], - new_field, - strict=True, - ) - editor.commit() + with connection.schema_editor() as editor: + editor.alter_field( + Author, + Author._meta.get_field_by_name("name")[0], + new_field, + strict=True, + ) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "TextField") @@ -209,15 +183,13 @@ class SchemaTests(TransactionTestCase): # Change nullability again new_field2 = TextField(null=False) new_field2.set_attributes_from_name("name") - editor = connection.schema_editor() - editor.start() - editor.alter_field( - Author, - new_field, - new_field2, - strict=True, - ) - editor.commit() + with connection.schema_editor() as editor: + editor.alter_field( + Author, + new_field, + new_field2, + strict=True, + ) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "TextField") @@ -228,10 +200,8 @@ class SchemaTests(TransactionTestCase): Tests simple altering of fields """ # Create the table - editor = connection.schema_editor() - editor.start() - editor.create_model(Author) - editor.commit() + with connection.schema_editor() as editor: + editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") @@ -239,15 +209,13 @@ class SchemaTests(TransactionTestCase): # Alter the name field's name new_field = CharField(max_length=254) new_field.set_attributes_from_name("display_name") - editor = connection.schema_editor() - editor.start() - editor.alter_field( - Author, - Author._meta.get_field_by_name("name")[0], - new_field, - strict = True, - ) - editor.commit() + with connection.schema_editor() as editor: + editor.alter_field( + Author, + Author._meta.get_field_by_name("name")[0], + new_field, + strict = True, + ) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['display_name'][0], "CharField") @@ -258,12 +226,10 @@ class SchemaTests(TransactionTestCase): Tests M2M fields on models during creation """ # Create the tables - editor = connection.schema_editor() - editor.start() - editor.create_model(Author) - editor.create_model(Tag) - editor.create_model(BookWithM2M) - editor.commit() + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Tag) + editor.create_model(BookWithM2M) # Ensure there is now an m2m table there columns = self.column_classes(BookWithM2M._meta.get_field_by_name("tags")[0].rel.through) self.assertEqual(columns['tag_id'][0], "IntegerField") @@ -273,11 +239,9 @@ class SchemaTests(TransactionTestCase): Tests adding/removing M2M fields on models """ # Create the tables - editor = connection.schema_editor() - editor.start() - editor.create_model(AuthorWithM2M) - editor.create_model(Tag) - editor.commit() + with connection.schema_editor() as editor: + editor.create_model(AuthorWithM2M) + editor.create_model(Tag) # Create an M2M field new_field = ManyToManyField("schema.Tag", related_name="authors") new_field.contribute_to_class(AuthorWithM2M, "tags") @@ -286,24 +250,20 @@ class SchemaTests(TransactionTestCase): self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through) connection.rollback() # Add the field - editor = connection.schema_editor() - editor.start() - editor.create_field( - Author, - new_field, - ) - editor.commit() + with connection.schema_editor() as editor: + editor.create_field( + Author, + new_field, + ) # Ensure there is now an m2m table there columns = self.column_classes(new_field.rel.through) self.assertEqual(columns['tag_id'][0], "IntegerField") # Remove the M2M table again - editor = connection.schema_editor() - editor.start() - editor.delete_field( - Author, - new_field, - ) - editor.commit() + with connection.schema_editor() as editor: + editor.delete_field( + Author, + new_field, + ) # Ensure there's no m2m table there self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through) connection.rollback() @@ -317,13 +277,11 @@ class SchemaTests(TransactionTestCase): Tests repointing M2M fields """ # Create the tables - editor = connection.schema_editor() - editor.start() - editor.create_model(Author) - editor.create_model(BookWithM2M) - editor.create_model(Tag) - editor.create_model(UniqueTest) - editor.commit() + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(BookWithM2M) + editor.create_model(Tag) + editor.create_model(UniqueTest) # Ensure the M2M exists and points to Tag constraints = connection.introspection.get_constraints(connection.cursor(), BookWithM2M._meta.get_field_by_name("tags")[0].rel.through._meta.db_table) if connection.features.supports_foreign_keys: @@ -337,14 +295,12 @@ class SchemaTests(TransactionTestCase): new_field = ManyToManyField(UniqueTest) new_field.contribute_to_class(BookWithM2M, "uniques") try: - editor = connection.schema_editor() - editor.start() - editor.alter_field( - Author, - BookWithM2M._meta.get_field_by_name("tags")[0], - new_field, - ) - editor.commit() + with connection.schema_editor() as editor: + editor.alter_field( + Author, + BookWithM2M._meta.get_field_by_name("tags")[0], + new_field, + ) # Ensure old M2M is gone self.assertRaises(DatabaseError, self.column_classes, BookWithM2M._meta.get_field_by_name("tags")[0].rel.through) connection.rollback() @@ -368,10 +324,8 @@ class SchemaTests(TransactionTestCase): Tests creating/deleting CHECK constraints """ # Create the tables - editor = connection.schema_editor() - editor.start() - editor.create_model(Author) - editor.commit() + with connection.schema_editor() as editor: + editor.create_model(Author) # Ensure the constraint exists constraints = connection.introspection.get_constraints(connection.cursor(), Author._meta.db_table) for name, details in constraints.items(): @@ -382,29 +336,25 @@ class SchemaTests(TransactionTestCase): # Alter the column to remove it new_field = IntegerField(null=True, blank=True) new_field.set_attributes_from_name("height") - editor = connection.schema_editor() - editor.start() - editor.alter_field( - Author, - Author._meta.get_field_by_name("height")[0], - new_field, - strict = True, - ) - editor.commit() + with connection.schema_editor() as editor: + editor.alter_field( + Author, + Author._meta.get_field_by_name("height")[0], + new_field, + strict = True, + ) constraints = connection.introspection.get_constraints(connection.cursor(), Author._meta.db_table) for name, details in constraints.items(): if details['columns'] == set(["height"]) and details['check']: self.fail("Check constraint for height found") # Alter the column to re-add it - editor = connection.schema_editor() - editor.start() - editor.alter_field( - Author, - new_field, - Author._meta.get_field_by_name("height")[0], - strict = True, - ) - editor.commit() + with connection.schema_editor() as editor: + editor.alter_field( + Author, + new_field, + Author._meta.get_field_by_name("height")[0], + strict = True, + ) constraints = connection.introspection.get_constraints(connection.cursor(), Author._meta.db_table) for name, details in constraints.items(): if details['columns'] == set(["height"]) and details['check']: @@ -417,141 +367,121 @@ class SchemaTests(TransactionTestCase): Tests removing and adding unique constraints to a single column. """ # Create the table - editor = connection.schema_editor() - editor.start() - editor.create_model(Tag) - editor.commit() + with connection.schema_editor() as editor: + editor.create_model(Tag) # Ensure the field is unique to begin with Tag.objects.create(title="foo", slug="foo") self.assertRaises(IntegrityError, Tag.objects.create, title="bar", slug="foo") - connection.rollback() + Tag.objects.all().delete() # Alter the slug field to be non-unique new_field = SlugField(unique=False) new_field.set_attributes_from_name("slug") - editor = connection.schema_editor() - editor.start() - editor.alter_field( - Tag, - Tag._meta.get_field_by_name("slug")[0], - new_field, - strict = True, - ) - editor.commit() + with connection.schema_editor() as editor: + editor.alter_field( + Tag, + Tag._meta.get_field_by_name("slug")[0], + new_field, + strict = True, + ) # Ensure the field is no longer unique Tag.objects.create(title="foo", slug="foo") Tag.objects.create(title="bar", slug="foo") - connection.rollback() + Tag.objects.all().delete() # Alter the slug field to be unique new_new_field = SlugField(unique=True) new_new_field.set_attributes_from_name("slug") - editor = connection.schema_editor() - editor.start() - editor.alter_field( - Tag, - new_field, - new_new_field, - strict = True, - ) - editor.commit() + with connection.schema_editor() as editor: + editor.alter_field( + Tag, + new_field, + new_new_field, + strict = True, + ) # Ensure the field is unique again Tag.objects.create(title="foo", slug="foo") self.assertRaises(IntegrityError, Tag.objects.create, title="bar", slug="foo") - connection.rollback() + Tag.objects.all().delete() # Rename the field new_field = SlugField(unique=False) new_field.set_attributes_from_name("slug2") - editor = connection.schema_editor() - editor.start() - editor.alter_field( - Tag, - Tag._meta.get_field_by_name("slug")[0], - TagUniqueRename._meta.get_field_by_name("slug2")[0], - strict = True, - ) - editor.commit() + with connection.schema_editor() as editor: + editor.alter_field( + Tag, + Tag._meta.get_field_by_name("slug")[0], + TagUniqueRename._meta.get_field_by_name("slug2")[0], + strict = True, + ) # Ensure the field is still unique TagUniqueRename.objects.create(title="foo", slug2="foo") self.assertRaises(IntegrityError, TagUniqueRename.objects.create, title="bar", slug2="foo") - connection.rollback() + Tag.objects.all().delete() def test_unique_together(self): """ Tests removing and adding unique_together constraints on a model. """ # Create the table - editor = connection.schema_editor() - editor.start() - editor.create_model(UniqueTest) - editor.commit() + with connection.schema_editor() as editor: + editor.create_model(UniqueTest) # Ensure the fields are unique to begin with UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.create(year=2011, slug="foo") UniqueTest.objects.create(year=2011, slug="bar") self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo") - connection.rollback() + UniqueTest.objects.all().delete() # Alter the model to it's non-unique-together companion - editor = connection.schema_editor() - editor.start() - editor.alter_unique_together( - UniqueTest, - UniqueTest._meta.unique_together, - [], - ) - editor.commit() + with connection.schema_editor() as editor: + editor.alter_unique_together( + UniqueTest, + UniqueTest._meta.unique_together, + [], + ) # Ensure the fields are no longer unique UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.create(year=2012, slug="foo") - connection.rollback() + UniqueTest.objects.all().delete() # Alter it back new_new_field = SlugField(unique=True) new_new_field.set_attributes_from_name("slug") - editor = connection.schema_editor() - editor.start() - editor.alter_unique_together( - UniqueTest, - [], - UniqueTest._meta.unique_together, - ) - editor.commit() + with connection.schema_editor() as editor: + editor.alter_unique_together( + UniqueTest, + [], + UniqueTest._meta.unique_together, + ) # Ensure the fields are unique again UniqueTest.objects.create(year=2012, slug="foo") self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo") - connection.rollback() + UniqueTest.objects.all().delete() def test_db_table(self): """ Tests renaming of the table """ # Create the table - editor = connection.schema_editor() - editor.start() - editor.create_model(Author) - editor.commit() + with connection.schema_editor() as editor: + editor.create_model(Author) # Ensure the table is there to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") # Alter the table - editor = connection.schema_editor() - editor.start() - editor.alter_db_table( - Author, - "schema_author", - "schema_otherauthor", - ) - editor.commit() + with connection.schema_editor() as editor: + editor.alter_db_table( + Author, + "schema_author", + "schema_otherauthor", + ) # Ensure the table is there afterwards Author._meta.db_table = "schema_otherauthor" columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") # Alter the table again - editor = connection.schema_editor() - editor.start() - editor.alter_db_table( - Author, - "schema_otherauthor", - "schema_author", - ) - editor.commit() + with connection.schema_editor() as editor: + editor.alter_db_table( + Author, + "schema_otherauthor", + "schema_author", + ) # Ensure the table is still there Author._meta.db_table = "schema_author" columns = self.column_classes(Author) @@ -562,11 +492,9 @@ class SchemaTests(TransactionTestCase): Tests creation/altering of indexes """ # Create the table - editor = connection.schema_editor() - editor.start() - editor.create_model(Author) - editor.create_model(Book) - editor.commit() + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) # Ensure the table is there and has the right index self.assertIn( "title", @@ -575,43 +503,37 @@ class SchemaTests(TransactionTestCase): # Alter to remove the index new_field = CharField(max_length=100, db_index=False) new_field.set_attributes_from_name("title") - editor = connection.schema_editor() - editor.start() - editor.alter_field( - Book, - Book._meta.get_field_by_name("title")[0], - new_field, - strict = True, - ) - editor.commit() + with connection.schema_editor() as editor: + editor.alter_field( + Book, + Book._meta.get_field_by_name("title")[0], + new_field, + strict = True, + ) # Ensure the table is there and has no index self.assertNotIn( "title", connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table), ) # Alter to re-add the index - editor = connection.schema_editor() - editor.start() - editor.alter_field( - Book, - new_field, - Book._meta.get_field_by_name("title")[0], - strict = True, - ) - editor.commit() + with connection.schema_editor() as editor: + editor.alter_field( + Book, + new_field, + Book._meta.get_field_by_name("title")[0], + strict = True, + ) # Ensure the table is there and has the index again self.assertIn( "title", connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table), ) # Add a unique column, verify that creates an implicit index - editor = connection.schema_editor() - editor.start() - editor.create_field( - Book, - BookWithSlug._meta.get_field_by_name("slug")[0], - ) - editor.commit() + with connection.schema_editor() as editor: + editor.create_field( + Book, + BookWithSlug._meta.get_field_by_name("slug")[0], + ) self.assertIn( "slug", connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table), @@ -619,15 +541,13 @@ class SchemaTests(TransactionTestCase): # Remove the unique, check the index goes with it new_field2 = CharField(max_length=20, unique=False) new_field2.set_attributes_from_name("slug") - editor = connection.schema_editor() - editor.start() - editor.alter_field( - BookWithSlug, - BookWithSlug._meta.get_field_by_name("slug")[0], - new_field2, - strict = True, - ) - editor.commit() + with connection.schema_editor() as editor: + editor.alter_field( + BookWithSlug, + BookWithSlug._meta.get_field_by_name("slug")[0], + new_field2, + strict = True, + ) self.assertNotIn( "slug", connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table), @@ -638,10 +558,8 @@ class SchemaTests(TransactionTestCase): Tests altering of the primary key """ # Create the table - editor = connection.schema_editor() - editor.start() - editor.create_model(Tag) - editor.commit() + with connection.schema_editor() as editor: + editor.create_model(Tag) # Ensure the table is there and has the right PK self.assertTrue( connection.introspection.get_indexes(connection.cursor(), Tag._meta.db_table)['id']['primary_key'], @@ -649,15 +567,13 @@ class SchemaTests(TransactionTestCase): # Alter to change the PK new_field = SlugField(primary_key=True) new_field.set_attributes_from_name("slug") - editor = connection.schema_editor() - editor.start() - editor.delete_field(Tag, Tag._meta.get_field_by_name("id")[0]) - editor.alter_field( - Tag, - Tag._meta.get_field_by_name("slug")[0], - new_field, - ) - editor.commit() + with connection.schema_editor() as editor: + editor.delete_field(Tag, Tag._meta.get_field_by_name("id")[0]) + editor.alter_field( + Tag, + Tag._meta.get_field_by_name("slug")[0], + new_field, + ) # Ensure the PK changed self.assertNotIn( 'id', From 028bbd15cad0e48d2b679b986e90c6af0f7e28a0 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 18 May 2013 13:48:57 +0200 Subject: [PATCH 054/161] Add a deconstruct() method to Fields. This allows the field's initial argument to be obtained so it can be serialised to, and re-created from, a textual format. --- django/db/models/fields/__init__.py | 188 +++++++++++++++++- django/db/models/fields/files.py | 19 ++ django/db/models/fields/related.py | 41 ++++ tests/field_deconstruction/__init__.py | 0 tests/field_deconstruction/models.py | 0 tests/field_deconstruction/tests.py | 253 +++++++++++++++++++++++++ 6 files changed, 500 insertions(+), 1 deletion(-) create mode 100644 tests/field_deconstruction/__init__.py create mode 100644 tests/field_deconstruction/models.py create mode 100644 tests/field_deconstruction/tests.py diff --git a/django/db/models/fields/__init__.py b/django/db/models/fields/__init__.py index b1c1601300..86a0711d7c 100644 --- a/django/db/models/fields/__init__.py +++ b/django/db/models/fields/__init__.py @@ -99,7 +99,8 @@ class Field(object): db_tablespace=None, auto_created=False, validators=[], error_messages=None): self.name = name - self.verbose_name = verbose_name + self.verbose_name = verbose_name # May be set by set_attributes_from_name + self._verbose_name = verbose_name # Store original for deconstruction self.primary_key = primary_key self.max_length, self._unique = max_length, unique self.blank, self.null = blank, null @@ -128,14 +129,99 @@ class Field(object): self.creation_counter = Field.creation_counter Field.creation_counter += 1 + self._validators = validators # Store for deconstruction later self.validators = self.default_validators + validators messages = {} for c in reversed(self.__class__.__mro__): messages.update(getattr(c, 'default_error_messages', {})) messages.update(error_messages or {}) + self._error_messages = error_messages # Store for deconstruction later self.error_messages = messages + def deconstruct(self): + """ + Returns enough information to recreate the field as a 4-tuple: + + * The name of the field on the model, if contribute_to_class has been run + * The import path of the field, including the class: django.db.models.IntegerField + This should be the most portable version, so less specific may be better. + * A list of positional arguments + * A dict of keyword arguments + + Note that the positional or keyword arguments must contain values of the + following types (including inner values of collection types): + + * None, bool, str, unicode, int, long, float, complex, set, frozenset, list, tuple, dict + * UUID + * datetime.datetime (naive), datetime.date + * top-level classes, top-level functions - will be referenced by their full import path + * Storage instances - these have their own deconstruct() method + + This is because the values here must be serialised into a text format + (possibly new Python code, possibly JSON) and these are the only types + with encoding handlers defined. + + There's no need to return the exact way the field was instantiated this time, + just ensure that the resulting field is the same - prefer keyword arguments + over positional ones, and omit parameters with their default values. + """ + # Short-form way of fetching all the default parameters + keywords = {} + possibles = { + "verbose_name": None, + "primary_key": False, + "max_length": None, + "unique": False, + "blank": False, + "null": False, + "db_index": False, + "default": NOT_PROVIDED, + "editable": True, + "serialize": True, + "unique_for_date": None, + "unique_for_month": None, + "unique_for_year": None, + "choices": [], + "help_text": '', + "db_column": None, + "db_tablespace": settings.DEFAULT_INDEX_TABLESPACE, + "auto_created": False, + "validators": [], + "error_messages": None, + } + attr_overrides = { + "unique": "_unique", + "choices": "_choices", + "error_messages": "_error_messages", + "validators": "_validators", + "verbose_name": "_verbose_name", + } + equals_comparison = set(["choices", "validators", "db_tablespace"]) + for name, default in possibles.items(): + value = getattr(self, attr_overrides.get(name, name)) + if name in equals_comparison: + if value != default: + keywords[name] = value + else: + if value is not default: + keywords[name] = value + # Work out path - we shorten it for known Django core fields + path = "%s.%s" % (self.__class__.__module__, self.__class__.__name__) + if path.startswith("django.db.models.fields.related"): + path = path.replace("django.db.models.fields.related", "django.db.models") + if path.startswith("django.db.models.fields.files"): + path = path.replace("django.db.models.fields.files", "django.db.models") + if path.startswith("django.db.models.fields"): + path = path.replace("django.db.models.fields", "django.db.models") + # Return basic info - other fields should override this. + return ( + self.name, + path, + [], + keywords, + ) + def __eq__(self, other): # Needed for @total_ordering if isinstance(other, Field): @@ -584,6 +670,7 @@ class Field(object): return '<%s: %s>' % (path, name) return '<%s>' % path + class AutoField(Field): description = _("Integer") @@ -598,6 +685,12 @@ class AutoField(Field): kwargs['blank'] = True Field.__init__(self, *args, **kwargs) + def deconstruct(self): + name, path, args, kwargs = super(AutoField, self).deconstruct() + del kwargs['blank'] + kwargs['primary_key'] = True + return name, path, args, kwargs + def get_internal_type(self): return "AutoField" @@ -645,6 +738,11 @@ class BooleanField(Field): kwargs['blank'] = True Field.__init__(self, *args, **kwargs) + def deconstruct(self): + name, path, args, kwargs = super(BooleanField, self).deconstruct() + del kwargs['blank'] + return name, path, args, kwargs + def get_internal_type(self): return "BooleanField" @@ -745,6 +843,18 @@ class DateField(Field): kwargs['blank'] = True Field.__init__(self, verbose_name, name, **kwargs) + def deconstruct(self): + name, path, args, kwargs = super(DateField, self).deconstruct() + if self.auto_now: + kwargs['auto_now'] = True + del kwargs['editable'] + del kwargs['blank'] + if self.auto_now_add: + kwargs['auto_now_add'] = True + del kwargs['editable'] + del kwargs['blank'] + return name, path, args, kwargs + def get_internal_type(self): return "DateField" @@ -924,6 +1034,14 @@ class DecimalField(Field): self.max_digits, self.decimal_places = max_digits, decimal_places Field.__init__(self, verbose_name, name, **kwargs) + def deconstruct(self): + name, path, args, kwargs = super(DecimalField, self).deconstruct() + if self.max_digits: + kwargs['max_digits'] = self.max_digits + if self.decimal_places: + kwargs['decimal_places'] = self.decimal_places + return name, path, args, kwargs + def get_internal_type(self): return "DecimalField" @@ -983,6 +1101,12 @@ class EmailField(CharField): kwargs['max_length'] = kwargs.get('max_length', 75) CharField.__init__(self, *args, **kwargs) + def deconstruct(self): + name, path, args, kwargs = super(EmailField, self).deconstruct() + if kwargs.get("max_length", None) == 75: + del kwargs['max_length'] + return name, path, args, kwargs + def formfield(self, **kwargs): # As with CharField, this will cause email validation to be performed # twice. @@ -1002,6 +1126,22 @@ class FilePathField(Field): kwargs['max_length'] = kwargs.get('max_length', 100) Field.__init__(self, verbose_name, name, **kwargs) + def deconstruct(self): + name, path, args, kwargs = super(FilePathField, self).deconstruct() + if self.path != '': + kwargs['path'] = self.path + if self.match is not None: + kwargs['match'] = self.match + if self.recursive is not False: + kwargs['recursive'] = self.recursive + if self.allow_files is not True: + kwargs['allow_files'] = self.allow_files + if self.allow_folders is not False: + kwargs['allow_folders'] = self.allow_folders + if kwargs.get("max_length", None) == 100: + del kwargs["max_length"] + return name, path, args, kwargs + def formfield(self, **kwargs): defaults = { 'path': self.path, @@ -1103,6 +1243,11 @@ class IPAddressField(Field): kwargs['max_length'] = 15 Field.__init__(self, *args, **kwargs) + def deconstruct(self): + name, path, args, kwargs = super(IPAddressField, self).deconstruct() + del kwargs['max_length'] + return name, path, args, kwargs + def get_internal_type(self): return "IPAddressField" @@ -1119,12 +1264,23 @@ class GenericIPAddressField(Field): def __init__(self, verbose_name=None, name=None, protocol='both', unpack_ipv4=False, *args, **kwargs): self.unpack_ipv4 = unpack_ipv4 + self.protocol = protocol self.default_validators, invalid_error_message = \ validators.ip_address_validators(protocol, unpack_ipv4) self.default_error_messages['invalid'] = invalid_error_message kwargs['max_length'] = 39 Field.__init__(self, verbose_name, name, *args, **kwargs) + def deconstruct(self): + name, path, args, kwargs = super(GenericIPAddressField, self).deconstruct() + if self.unpack_ipv4 is not False: + kwargs['unpack_ipv4'] = self.unpack_ipv4 + if self.protocol != "both": + kwargs['protocol'] = self.protocol + if kwargs.get("max_length", None) == 39: + del kwargs['max_length'] + return name, path, args, kwargs + def get_internal_type(self): return "GenericIPAddressField" @@ -1165,6 +1321,12 @@ class NullBooleanField(Field): kwargs['blank'] = True Field.__init__(self, *args, **kwargs) + def deconstruct(self): + name, path, args, kwargs = super(NullBooleanField, self).deconstruct() + del kwargs['null'] + del kwargs['blank'] + return name, path, args, kwargs + def get_internal_type(self): return "NullBooleanField" @@ -1238,6 +1400,16 @@ class SlugField(CharField): kwargs['db_index'] = True super(SlugField, self).__init__(*args, **kwargs) + def deconstruct(self): + name, path, args, kwargs = super(SlugField, self).deconstruct() + if kwargs.get("max_length", None) == 50: + del kwargs['max_length'] + if self.db_index is False: + kwargs['db_index'] = False + else: + del kwargs['db_index'] + return name, path, args, kwargs + def get_internal_type(self): return "SlugField" @@ -1286,6 +1458,14 @@ class TimeField(Field): kwargs['blank'] = True Field.__init__(self, verbose_name, name, **kwargs) + def deconstruct(self): + name, path, args, kwargs = super(TimeField, self).deconstruct() + if self.auto_now is not False: + kwargs["auto_now"] = self.auto_now + if self.auto_now_add is not False: + kwargs["auto_now_add"] = self.auto_now_add + return name, path, args, kwargs + def get_internal_type(self): return "TimeField" @@ -1345,6 +1525,12 @@ class URLField(CharField): CharField.__init__(self, verbose_name, name, **kwargs) self.validators.append(validators.URLValidator()) + def deconstruct(self): + name, path, args, kwargs = super(URLField, self).deconstruct() + if kwargs.get("max_length", None) == 200: + del kwargs['max_length'] + return name, path, args, kwargs + def formfield(self, **kwargs): # As with CharField, this will cause URL validation to be performed # twice. diff --git a/django/db/models/fields/files.py b/django/db/models/fields/files.py index e631f177e9..0a913e908b 100644 --- a/django/db/models/fields/files.py +++ b/django/db/models/fields/files.py @@ -227,6 +227,17 @@ class FileField(Field): kwargs['max_length'] = kwargs.get('max_length', 100) super(FileField, self).__init__(verbose_name, name, **kwargs) + def deconstruct(self): + name, path, args, kwargs = super(FileField, self).deconstruct() + if kwargs.get("max_length", None) != 100: + kwargs["max_length"] = 100 + else: + del kwargs["max_length"] + kwargs['upload_to'] = self.upload_to + if self.storage is not default_storage: + kwargs['storage'] = self.storage + return name, path, args, kwargs + def get_internal_type(self): return "FileField" @@ -326,6 +337,14 @@ class ImageField(FileField): self.width_field, self.height_field = width_field, height_field super(ImageField, self).__init__(verbose_name, name, **kwargs) + def deconstruct(self): + name, path, args, kwargs = super(ImageField, self).deconstruct() + if self.width_field: + kwargs['width_field'] = self.width_field + if self.height_field: + kwargs['height_field'] = self.height_field + return name, path, args, kwargs + def contribute_to_class(self, cls, name): super(ImageField, self).contribute_to_class(cls, name) # Attach update_dimension_fields so that dimension fields declared diff --git a/django/db/models/fields/related.py b/django/db/models/fields/related.py index 37fa8b1027..5ef713e5e6 100644 --- a/django/db/models/fields/related.py +++ b/django/db/models/fields/related.py @@ -1146,6 +1146,27 @@ class ForeignKey(ForeignObject): ) super(ForeignKey, self).__init__(to, ['self'], [to_field], **kwargs) + def deconstruct(self): + name, path, args, kwargs = super(ForeignKey, self).deconstruct() + # Handle the simpler arguments + if self.db_index: + del kwargs['db_index'] + else: + kwargs['db_index'] = False + if self.db_constraint is not True: + kwargs['db_constraint'] = self.db_constraint + if self.rel.on_delete is not CASCADE: + kwargs['on_delete'] = self.rel.on_delete + # Rel needs more work. + rel = self.rel + if self.rel.field_name: + kwargs['to_field'] = self.rel.field_name + if isinstance(self.rel.to, basestring): + kwargs['to'] = self.rel.to + else: + kwargs['to'] = "%s.%s" % (self.rel.to._meta.app_label, self.rel.to._meta.object_name) + return name, path, args, kwargs + @property def related_field(self): return self.foreign_related_fields[0] @@ -1263,6 +1284,12 @@ class OneToOneField(ForeignKey): kwargs['unique'] = True super(OneToOneField, self).__init__(to, to_field, OneToOneRel, **kwargs) + def deconstruct(self): + name, path, args, kwargs = super(OneToOneField, self).deconstruct() + if "unique" in kwargs: + del kwargs['unique'] + return name, path, args, kwargs + def contribute_to_related_class(self, cls, related): setattr(cls, related.get_accessor_name(), SingleRelatedObjectDescriptor(related)) @@ -1355,6 +1382,20 @@ class ManyToManyField(RelatedField): msg = _('Hold down "Control", or "Command" on a Mac, to select more than one.') self.help_text = string_concat(self.help_text, ' ', msg) + def deconstruct(self): + name, path, args, kwargs = super(ManyToManyField, self).deconstruct() + # Handle the simpler arguments + if self.rel.db_constraint is not True: + kwargs['db_constraint'] = self.db_constraint + del kwargs['help_text'] + # Rel needs more work. + rel = self.rel + if isinstance(self.rel.to, basestring): + kwargs['to'] = self.rel.to + else: + kwargs['to'] = "%s.%s" % (self.rel.to._meta.app_label, self.rel.to._meta.object_name) + return name, path, args, kwargs + def _get_path_info(self, direct=False): """ Called by both direct an indirect m2m traversal. diff --git a/tests/field_deconstruction/__init__.py b/tests/field_deconstruction/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/field_deconstruction/models.py b/tests/field_deconstruction/models.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/field_deconstruction/tests.py b/tests/field_deconstruction/tests.py new file mode 100644 index 0000000000..683854940b --- /dev/null +++ b/tests/field_deconstruction/tests.py @@ -0,0 +1,253 @@ +from django.test import TestCase +from django.db import models + + +class FieldDeconstructionTests(TestCase): + """ + Tests the deconstruct() method on all core fields. + """ + + def test_name(self): + """ + Tests the outputting of the correct name if assigned one. + """ + # First try using a "normal" field + field = models.CharField(max_length=65) + name, path, args, kwargs = field.deconstruct() + self.assertEqual(name, None) + field.set_attributes_from_name("is_awesome_test") + name, path, args, kwargs = field.deconstruct() + self.assertEqual(name, "is_awesome_test") + # Now try with a ForeignKey + field = models.ForeignKey("some_fake.ModelName") + name, path, args, kwargs = field.deconstruct() + self.assertEqual(name, None) + field.set_attributes_from_name("author") + name, path, args, kwargs = field.deconstruct() + self.assertEqual(name, "author") + + def test_auto_field(self): + field = models.AutoField(primary_key=True) + field.set_attributes_from_name("id") + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.AutoField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"primary_key": True}) + + def test_big_integer_field(self): + field = models.BigIntegerField() + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.BigIntegerField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {}) + + def test_boolean_field(self): + field = models.BooleanField() + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.BooleanField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {}) + field = models.BooleanField(default=True) + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.BooleanField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"default": True}) + + def test_char_field(self): + field = models.CharField(max_length=65) + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.CharField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"max_length": 65}) + field = models.CharField(max_length=65, null=True, blank=True) + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.CharField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"max_length": 65, "null": True, "blank": True}) + + def test_csi_field(self): + field = models.CommaSeparatedIntegerField(max_length=100) + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.CommaSeparatedIntegerField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"max_length": 100}) + + def test_date_field(self): + field = models.DateField() + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.DateField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {}) + field = models.DateField(auto_now=True) + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.DateField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"auto_now": True}) + + def test_datetime_field(self): + field = models.DateTimeField() + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.DateTimeField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {}) + field = models.DateTimeField(auto_now_add=True) + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.DateTimeField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"auto_now_add": True}) + + def test_decimal_field(self): + field = models.DecimalField(max_digits=5, decimal_places=2) + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.DecimalField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"max_digits": 5, "decimal_places": 2}) + + def test_email_field(self): + field = models.EmailField() + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.EmailField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {}) + field = models.EmailField(max_length=255) + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.EmailField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"max_length": 255}) + + def test_file_field(self): + field = models.FileField(upload_to="foo/bar") + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.FileField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"upload_to": "foo/bar"}) + + def test_file_path_field(self): + field = models.FilePathField(match=".*\.txt$") + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.FilePathField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"match": ".*\.txt$"}) + field = models.FilePathField(recursive=True, allow_folders=True) + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.FilePathField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"recursive": True, "allow_folders": True}) + + def test_float_field(self): + field = models.FloatField() + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.FloatField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {}) + + def test_foreign_key(self): + field = models.ForeignKey("auth.User") + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.ForeignKey") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"to": "auth.User"}) + field = models.ForeignKey("something.Else") + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.ForeignKey") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"to": "something.Else"}) + field = models.ForeignKey("auth.User", on_delete=models.SET_NULL) + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.ForeignKey") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"to": "auth.User", "on_delete": models.SET_NULL}) + + def test_image_field(self): + field = models.ImageField(upload_to="foo/barness", width_field="width", height_field="height") + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.ImageField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"upload_to": "foo/barness", "width_field": "width", "height_field": "height"}) + + def test_integer_field(self): + field = models.IntegerField() + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.IntegerField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {}) + + def test_ip_address_field(self): + field = models.IPAddressField() + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.IPAddressField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {}) + + def test_generic_ip_address_field(self): + field = models.GenericIPAddressField() + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.GenericIPAddressField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {}) + field = models.GenericIPAddressField(protocol="IPv6") + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.GenericIPAddressField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"protocol": "IPv6"}) + + def test_many_to_many_field(self): + field = models.ManyToManyField("auth.User") + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.ManyToManyField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"to": "auth.User"}) + + def test_null_boolean_field(self): + field = models.NullBooleanField() + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.NullBooleanField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {}) + + def test_positive_integer_field(self): + field = models.PositiveIntegerField() + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.PositiveIntegerField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {}) + + def test_positive_small_integer_field(self): + field = models.PositiveSmallIntegerField() + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.PositiveSmallIntegerField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {}) + + def test_slug_field(self): + field = models.SlugField() + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.SlugField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {}) + field = models.SlugField(db_index=False) + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.SlugField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {"db_index": False}) + + def test_small_integer_field(self): + field = models.SmallIntegerField() + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.SmallIntegerField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {}) + + def test_text_field(self): + field = models.TextField() + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.TextField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {}) + + def test_url_field(self): + field = models.URLField() + name, path, args, kwargs = field.deconstruct() + self.assertEqual(path, "django.db.models.URLField") + self.assertEqual(args, []) + self.assertEqual(kwargs, {}) From 7d041b93944174c88bf8c9342d91203b28041a04 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 18 May 2013 13:49:56 +0200 Subject: [PATCH 055/161] Split up test and make the State classes a bit better. --- django/db/migrations/state.py | 37 +++++++++++++---- tests/migrations/{tests.py => test_graph.py} | 26 ------------ tests/migrations/test_loader.py | 29 +++++++++++++ tests/migrations/test_state.py | 43 ++++++++++++++++++++ 4 files changed, 100 insertions(+), 35 deletions(-) rename tests/migrations/{tests.py => test_graph.py} (88%) create mode 100644 tests/migrations/test_loader.py create mode 100644 tests/migrations/test_state.py diff --git a/django/db/migrations/state.py b/django/db/migrations/state.py index 9678026c79..a38ab0ed1d 100644 --- a/django/db/migrations/state.py +++ b/django/db/migrations/state.py @@ -1,5 +1,6 @@ from django.db import models from django.db.models.loading import BaseAppCache +from django.utils.module_loading import import_by_path class ProjectState(object): @@ -15,12 +16,9 @@ class ProjectState(object): def clone(self): "Returns an exact copy of this ProjectState" - ps = ProjectState( + return ProjectState( models = dict((k, v.copy()) for k, v in self.models.items()) ) - for model in ps.models.values(): - model.project_state = ps - return ps def render(self): "Turns the project state into actual models in a new AppCache" @@ -33,8 +31,11 @@ class ProjectState(object): @classmethod def from_app_cache(cls, app_cache): "Takes in an AppCache and returns a ProjectState matching it" + models = {} for model in app_cache.get_models(): - print model + model_state = ModelState.from_model(model) + models[(model_state.app_label, model_state.name.lower())] = model_state + return cls(models) class ModelState(object): @@ -44,18 +45,36 @@ class ModelState(object): mutate this one and then render it into a Model as required. """ - def __init__(self, project_state, app_label, name, fields=None, options=None, bases=None): - self.project_state = project_state + def __init__(self, app_label, name, fields=None, options=None, bases=None): self.app_label = app_label self.name = name self.fields = fields or [] self.options = options or {} - self.bases = bases or None + self.bases = bases or (models.Model, ) + + @classmethod + def from_model(cls, model): + """ + Feed me a model, get a ModelState representing it out. + """ + # Deconstruct the fields + fields = [] + for field in model._meta.local_fields: + name, path, args, kwargs = field.deconstruct() + field_class = import_by_path(path) + fields.append((name, field_class(*args, **kwargs))) + # Make our record + return cls( + model._meta.app_label, + model._meta.object_name, + fields, + {}, + None, + ) def clone(self): "Returns an exact copy of this ModelState" return self.__class__( - project_state = self.project_state, app_label = self.app_label, name = self.name, fields = self.fields, diff --git a/tests/migrations/tests.py b/tests/migrations/test_graph.py similarity index 88% rename from tests/migrations/tests.py rename to tests/migrations/test_graph.py index 9ef5e37b8f..b35d04fb8a 100644 --- a/tests/migrations/tests.py +++ b/tests/migrations/test_graph.py @@ -134,29 +134,3 @@ class LoaderTests(TransactionTestCase): graph.forwards_plan(("migrations", "0002_second")), [("migrations", "0001_initial"), ("migrations", "0002_second")], ) - - -class RecorderTests(TestCase): - """ - Tests the disk and database loader. - """ - - def test_apply(self): - """ - Tests marking migrations as applied/unapplied. - """ - recorder = MigrationRecorder(connection) - self.assertEqual( - recorder.applied_migrations(), - set(), - ) - recorder.record_applied("myapp", "0432_ponies") - self.assertEqual( - recorder.applied_migrations(), - set([("myapp", "0432_ponies")]), - ) - recorder.record_unapplied("myapp", "0432_ponies") - self.assertEqual( - recorder.applied_migrations(), - set(), - ) diff --git a/tests/migrations/test_loader.py b/tests/migrations/test_loader.py new file mode 100644 index 0000000000..f8f31734f1 --- /dev/null +++ b/tests/migrations/test_loader.py @@ -0,0 +1,29 @@ +from django.test import TestCase +from django.db import connection +from django.db.migrations.recorder import MigrationRecorder + + +class RecorderTests(TestCase): + """ + Tests the disk and database loader. + """ + + def test_apply(self): + """ + Tests marking migrations as applied/unapplied. + """ + recorder = MigrationRecorder(connection) + self.assertEqual( + recorder.applied_migrations(), + set(), + ) + recorder.record_applied("myapp", "0432_ponies") + self.assertEqual( + recorder.applied_migrations(), + set([("myapp", "0432_ponies")]), + ) + recorder.record_unapplied("myapp", "0432_ponies") + self.assertEqual( + recorder.applied_migrations(), + set(), + ) diff --git a/tests/migrations/test_state.py b/tests/migrations/test_state.py new file mode 100644 index 0000000000..72a259bb24 --- /dev/null +++ b/tests/migrations/test_state.py @@ -0,0 +1,43 @@ +from django.test import TestCase +from django.db import models +from django.db.models.loading import BaseAppCache +from django.db.migrations.state import ProjectState + + +class StateTests(TestCase): + """ + Tests state construction, rendering and modification by operations. + """ + + def test_create(self): + """ + Tests making a ProjectState from an AppCache + """ + new_app_cache = BaseAppCache() + + class Author(models.Model): + name = models.CharField(max_length=255) + bio = models.TextField() + age = models.IntegerField(blank=True, null=True) + class Meta: + app_label = "migrations" + app_cache = new_app_cache + + class Book(models.Model): + title = models.CharField(max_length=1000) + author = models.ForeignKey(Author) + class Meta: + app_label = "migrations" + app_cache = new_app_cache + + project_state = ProjectState.from_app_cache(new_app_cache) + author_state = project_state.models['migrations', 'author'] + book_state = project_state.models['migrations', 'book'] + + self.assertEqual(author_state.app_label, "migrations") + self.assertEqual(author_state.name, "Author") + self.assertEqual([x for x, y in author_state.fields], ["id", "name", "bio", "age"]) + self.assertEqual(author_state.fields[1][1].max_length, 255) + self.assertEqual(author_state.fields[2][1].null, False) + self.assertEqual(author_state.fields[3][1].null, True) + self.assertEqual(author_state.bases, (models.Model, )) From 38a8cf1cdccb937297c7735aa73f6c02d468e7df Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 18 May 2013 18:30:34 +0200 Subject: [PATCH 056/161] Fix state tests a little --- django/db/migrations/state.py | 11 +++++------ tests/migrations/test_state.py | 31 ++++++++++++++++++++++++++++++- 2 files changed, 35 insertions(+), 7 deletions(-) diff --git a/django/db/migrations/state.py b/django/db/migrations/state.py index a38ab0ed1d..d695b1a0c4 100644 --- a/django/db/migrations/state.py +++ b/django/db/migrations/state.py @@ -14,6 +14,9 @@ class ProjectState(object): self.models = models or {} self.app_cache = None + def add_model_state(self, model_state): + self.models[(model_state.app_label, model_state.name.lower())] = model_state + def clone(self): "Returns an exact copy of this ProjectState" return ProjectState( @@ -24,7 +27,7 @@ class ProjectState(object): "Turns the project state into actual models in a new AppCache" if self.app_cache is None: self.app_cache = BaseAppCache() - for model in self.model.values: + for model in self.models.values(): model.render(self.app_cache) return self.app_cache @@ -90,10 +93,6 @@ class ModelState(object): meta = type("Meta", tuple(), meta_contents) # Then, work out our bases # TODO: Use the actual bases - if self.bases: - raise NotImplementedError("Custom bases not quite done yet!") - else: - bases = [models.Model] # Turn fields into a dict for the body, add other bits body = dict(self.fields) body['Meta'] = meta @@ -101,6 +100,6 @@ class ModelState(object): # Then, make a Model object return type( self.name, - tuple(bases), + tuple(self.bases), body, ) diff --git a/tests/migrations/test_state.py b/tests/migrations/test_state.py index 72a259bb24..5d6a457f72 100644 --- a/tests/migrations/test_state.py +++ b/tests/migrations/test_state.py @@ -1,7 +1,7 @@ from django.test import TestCase from django.db import models from django.db.models.loading import BaseAppCache -from django.db.migrations.state import ProjectState +from django.db.migrations.state import ProjectState, ModelState class StateTests(TestCase): @@ -13,6 +13,7 @@ class StateTests(TestCase): """ Tests making a ProjectState from an AppCache """ + new_app_cache = BaseAppCache() class Author(models.Model): @@ -41,3 +42,31 @@ class StateTests(TestCase): self.assertEqual(author_state.fields[2][1].null, False) self.assertEqual(author_state.fields[3][1].null, True) self.assertEqual(author_state.bases, (models.Model, )) + + self.assertEqual(book_state.app_label, "migrations") + self.assertEqual(book_state.name, "Book") + self.assertEqual([x for x, y in book_state.fields], ["id", "title", "author"]) + self.assertEqual(book_state.fields[1][1].max_length, 1000) + self.assertEqual(book_state.fields[2][1].null, False) + self.assertEqual(book_state.bases, (models.Model, )) + + def test_render(self): + """ + Tests rendering a ProjectState into an AppCache. + """ + project_state = ProjectState() + project_state.add_model_state(ModelState( + "migrations", + "Tag", + [ + ("id", models.AutoField(primary_key=True)), + ("name", models.CharField(max_length=100)), + ("hidden", models.BooleanField()), + ], + {}, + None, + )) + + new_app_cache = project_state.render() + self.assertEqual(new_app_cache.get_model("migrations", "Tag")._meta.get_field_by_name("name")[0].max_length, 100) + self.assertEqual(new_app_cache.get_model("migrations", "Tag")._meta.get_field_by_name("hidden")[0].null, False) From d58c98d73c41b97e9543f88d3a81ffa6f23e4c7d Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sun, 19 May 2013 12:27:17 +0200 Subject: [PATCH 057/161] Fix proxy objects to respect != --- django/utils/functional.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/django/utils/functional.py b/django/utils/functional.py index 1592828c7f..cab74886d3 100644 --- a/django/utils/functional.py +++ b/django/utils/functional.py @@ -147,6 +147,11 @@ def lazy(func, *resultclasses): else: return func(*self.__args, **self.__kw) + def __ne__(self, other): + if isinstance(other, Promise): + other = other.__cast() + return self.__cast() != other + def __eq__(self, other): if isinstance(other, Promise): other = other.__cast() From 264f8650e375112e874298ec1c5ba65b193fda2a Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sun, 19 May 2013 12:35:17 +0200 Subject: [PATCH 058/161] ModelState now freezes options and bases --- django/db/migrations/state.py | 13 +++++++++++-- django/db/models/options.py | 6 ++++++ tests/migrations/test_state.py | 5 +++++ 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/django/db/migrations/state.py b/django/db/migrations/state.py index d695b1a0c4..44ee166121 100644 --- a/django/db/migrations/state.py +++ b/django/db/migrations/state.py @@ -1,5 +1,6 @@ from django.db import models from django.db.models.loading import BaseAppCache +from django.db.models.options import DEFAULT_NAMES from django.utils.module_loading import import_by_path @@ -66,13 +67,21 @@ class ModelState(object): name, path, args, kwargs = field.deconstruct() field_class = import_by_path(path) fields.append((name, field_class(*args, **kwargs))) + # Extract the options + options = {} + for name in DEFAULT_NAMES: + # Ignore some special options + if name in ["app_cache", "app_label"]: + continue + if name in model._meta.original_attrs: + options[name] = model._meta.original_attrs[name] # Make our record return cls( model._meta.app_label, model._meta.object_name, fields, - {}, - None, + options, + model.__bases__, ) def clone(self): diff --git a/django/db/models/options.py b/django/db/models/options.py index 46c18a64c6..1a9421c0fa 100644 --- a/django/db/models/options.py +++ b/django/db/models/options.py @@ -85,6 +85,10 @@ class Options(object): self.model_name = self.object_name.lower() self.verbose_name = get_verbose_name(self.object_name) + # Store the original user-defined values for each option, + # for use when serializing the model definition + self.original_attrs = {} + # Next, apply any overridden values from 'class Meta'. if self.meta: meta_attrs = self.meta.__dict__.copy() @@ -97,8 +101,10 @@ class Options(object): for attr_name in DEFAULT_NAMES: if attr_name in meta_attrs: setattr(self, attr_name, meta_attrs.pop(attr_name)) + self.original_attrs[attr_name] = getattr(self, attr_name) elif hasattr(self.meta, attr_name): setattr(self, attr_name, getattr(self.meta, attr_name)) + self.original_attrs[attr_name] = getattr(self, attr_name) # unique_together can be either a tuple of tuples, or a single # tuple of two strings. Normalize it to a tuple of tuples, so that diff --git a/tests/migrations/test_state.py b/tests/migrations/test_state.py index 5d6a457f72..c6930873ef 100644 --- a/tests/migrations/test_state.py +++ b/tests/migrations/test_state.py @@ -23,6 +23,7 @@ class StateTests(TestCase): class Meta: app_label = "migrations" app_cache = new_app_cache + unique_together = ["name", "bio"] class Book(models.Model): title = models.CharField(max_length=1000) @@ -30,6 +31,8 @@ class StateTests(TestCase): class Meta: app_label = "migrations" app_cache = new_app_cache + verbose_name = "tome" + db_table = "test_tome" project_state = ProjectState.from_app_cache(new_app_cache) author_state = project_state.models['migrations', 'author'] @@ -41,6 +44,7 @@ class StateTests(TestCase): self.assertEqual(author_state.fields[1][1].max_length, 255) self.assertEqual(author_state.fields[2][1].null, False) self.assertEqual(author_state.fields[3][1].null, True) + self.assertEqual(author_state.options, {"unique_together": ["name", "bio"]}) self.assertEqual(author_state.bases, (models.Model, )) self.assertEqual(book_state.app_label, "migrations") @@ -48,6 +52,7 @@ class StateTests(TestCase): self.assertEqual([x for x, y in book_state.fields], ["id", "title", "author"]) self.assertEqual(book_state.fields[1][1].max_length, 1000) self.assertEqual(book_state.fields[2][1].null, False) + self.assertEqual(book_state.options, {"verbose_name": "tome", "db_table": "test_tome"}) self.assertEqual(book_state.bases, (models.Model, )) def test_render(self): From d0ecefc2c9114b21e5f83d534990ffc3a44b8cba Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Wed, 29 May 2013 17:47:10 +0100 Subject: [PATCH 059/161] Start adding operations that work and tests for them --- django/db/backends/schema.py | 4 +- django/db/backends/sqlite3/schema.py | 4 +- django/db/migrations/__init__.py | 1 + django/db/migrations/graph.py | 17 ++++++- django/db/migrations/loader.py | 7 ++- django/db/migrations/migration.py | 17 +++++++ django/db/migrations/operations/__init__.py | 1 + django/db/migrations/operations/base.py | 6 +-- django/db/migrations/operations/fields.py | 52 +++++++++++++++++++++ django/db/migrations/operations/models.py | 38 +++++++++++---- django/db/migrations/state.py | 9 ++-- tests/migrations/migrations/0001_initial.py | 26 ++++++++++- tests/migrations/migrations/0002_second.py | 20 +++++++- tests/migrations/test_graph.py | 24 +--------- tests/migrations/test_loader.py | 39 +++++++++++++++- tests/schema/tests.py | 12 ++--- 16 files changed, 221 insertions(+), 56 deletions(-) create mode 100644 django/db/migrations/operations/fields.py diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index d282e0898b..21eeefab82 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -272,7 +272,7 @@ class BaseDatabaseSchemaEditor(object): "new_tablespace": self.quote_name(new_db_tablespace), }) - def create_field(self, model, field, keep_default=False): + def add_field(self, model, field, keep_default=False): """ Creates a field on a model. Usually involves adding a column, but may involve adding a @@ -325,7 +325,7 @@ class BaseDatabaseSchemaEditor(object): } ) - def delete_field(self, model, field): + def remove_field(self, model, field): """ Removes a field from a model. Usually involves deleting a column, but for M2Ms may involve deleting a table. diff --git a/django/db/backends/sqlite3/schema.py b/django/db/backends/sqlite3/schema.py index de32dfd893..19bffc7520 100644 --- a/django/db/backends/sqlite3/schema.py +++ b/django/db/backends/sqlite3/schema.py @@ -73,7 +73,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): if restore_pk_field: restore_pk_field.primary_key = True - def create_field(self, model, field): + def add_field(self, model, field): """ Creates a field on a model. Usually involves adding a column, but may involve adding a @@ -89,7 +89,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): raise ValueError("You cannot add a null=False column without a default value on SQLite.") self._remake_table(model, create_fields=[field]) - def delete_field(self, model, field): + def remove_field(self, model, field): """ Removes a field from a model. Usually involves deleting a column, but for M2Ms may involve deleting a table. diff --git a/django/db/migrations/__init__.py b/django/db/migrations/__init__.py index 154e728341..e072786473 100644 --- a/django/db/migrations/__init__.py +++ b/django/db/migrations/__init__.py @@ -1 +1,2 @@ from .migration import Migration +from .operations import * diff --git a/django/db/migrations/graph.py b/django/db/migrations/graph.py index 8d23b36cb7..8e2446ca99 100644 --- a/django/db/migrations/graph.py +++ b/django/db/migrations/graph.py @@ -1,4 +1,5 @@ from django.utils.datastructures import SortedSet +from django.db.migrations.state import ProjectState class MigrationGraph(object): @@ -33,8 +34,10 @@ class MigrationGraph(object): self.nodes[node] = implementation def add_dependency(self, child, parent): - self.nodes[child] = None - self.nodes[parent] = None + if child not in self.nodes: + raise KeyError("Dependency references nonexistent child node %r" % (child,)) + if parent not in self.nodes: + raise KeyError("Dependency references nonexistent parent node %r" % (parent,)) self.dependencies.setdefault(child, set()).add(parent) self.dependents.setdefault(parent, set()).add(child) @@ -117,6 +120,16 @@ class MigrationGraph(object): def __str__(self): return "Graph: %s nodes, %s edges" % (len(self.nodes), sum(len(x) for x in self.dependencies.values())) + def project_state(self, node): + """ + Given a migration node, returns a complete ProjectState for it. + """ + plan = self.forwards_plan(node) + project_state = ProjectState() + for node in plan: + project_state = self.nodes[node].mutate_state(project_state) + return project_state + class CircularDependencyError(Exception): """ diff --git a/django/db/migrations/loader.py b/django/db/migrations/loader.py index 4d191714cb..ce9fb7c8de 100644 --- a/django/db/migrations/loader.py +++ b/django/db/migrations/loader.py @@ -1,5 +1,6 @@ import os from django.utils.importlib import import_module +from django.utils.functional import cached_property from django.db.models.loading import cache from django.db.migrations.recorder import MigrationRecorder from django.db.migrations.graph import MigrationGraph @@ -64,9 +65,10 @@ class MigrationLoader(object): migration_module = import_module("%s.%s" % (module_name, migration_name)) if not hasattr(migration_module, "Migration"): raise BadMigrationError("Migration %s in app %s has no Migration class" % (migration_name, app_label)) - self.disk_migrations[app_label, migration_name] = migration_module.Migration + self.disk_migrations[app_label, migration_name] = migration_module.Migration(migration_name, app_label) - def build_graph(self): + @cached_property + def graph(self): """ Builds a migration dependency graph using both the disk and database. """ @@ -116,6 +118,7 @@ class MigrationLoader(object): graph = MigrationGraph() for key, migration in normal.items(): graph.add_node(key, migration) + for key, migration in normal.items(): for parent in migration.dependencies: graph.add_dependency(key, parent) return graph diff --git a/django/db/migrations/migration.py b/django/db/migrations/migration.py index afbcf65297..a8b744a9b4 100644 --- a/django/db/migrations/migration.py +++ b/django/db/migrations/migration.py @@ -10,6 +10,9 @@ class Migration(object): - dependencies: A list of tuples of (app_path, migration_name) - run_before: A list of tuples of (app_path, migration_name) - replaces: A list of migration_names + + Note that all migrations come out of migrations and into the Loader or + Graph as instances, having been initialised with their app label and name. """ # Operations to apply during this migration, in order. @@ -28,3 +31,17 @@ class Migration(object): # non-empty, this migration will only be applied if all these migrations # are not applied. replaces = [] + + def __init__(self, name, app_label): + self.name = name + self.app_label = app_label + + def mutate_state(self, project_state): + """ + Takes a ProjectState and returns a new one with the migration's + operations applied to it. + """ + new_state = project_state.clone() + for operation in self.operations: + operation.state_forwards(self.app_label, new_state) + return new_state diff --git a/django/db/migrations/operations/__init__.py b/django/db/migrations/operations/__init__.py index 4fb70b0418..0aa7e2d119 100644 --- a/django/db/migrations/operations/__init__.py +++ b/django/db/migrations/operations/__init__.py @@ -1 +1,2 @@ from .models import CreateModel, DeleteModel +from .fields import AddField, RemoveField diff --git a/django/db/migrations/operations/base.py b/django/db/migrations/operations/base.py index b24b45a09a..f1b30d79f5 100644 --- a/django/db/migrations/operations/base.py +++ b/django/db/migrations/operations/base.py @@ -15,21 +15,21 @@ class Operation(object): # Some operations are impossible to reverse, like deleting data. reversible = True - def state_forwards(self, app, state): + def state_forwards(self, app_label, state): """ Takes the state from the previous migration, and mutates it so that it matches what this migration would perform. """ raise NotImplementedError() - def database_forwards(self, app, schema_editor, from_state, to_state): + def database_forwards(self, app_label, schema_editor, from_state, to_state): """ Performs the mutation on the database schema in the normal (forwards) direction. """ raise NotImplementedError() - def database_backwards(self, app, schema_editor, from_state, to_state): + def database_backwards(self, app_label, schema_editor, from_state, to_state): """ Performs the mutation on the database schema in the reverse direction - e.g. if this were CreateModel, it would in fact diff --git a/django/db/migrations/operations/fields.py b/django/db/migrations/operations/fields.py new file mode 100644 index 0000000000..2ecf77f7ef --- /dev/null +++ b/django/db/migrations/operations/fields.py @@ -0,0 +1,52 @@ +from .base import Operation + + +class AddField(Operation): + """ + Adds a field to a model. + """ + + def __init__(self, model_name, name, instance): + self.model_name = model_name + self.name = name + self.instance = instance + + def state_forwards(self, app_label, state): + state.models[app_label, self.model_name.lower()].fields.append((self.name, self.instance)) + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + app_cache = to_state.render() + model = app_cache.get_model(app_label, self.name) + schema_editor.add_field(model, model._meta.get_field_by_name(self.name)) + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + app_cache = from_state.render() + model = app_cache.get_model(app_label, self.name) + schema_editor.remove_field(model, model._meta.get_field_by_name(self.name)) + + +class RemoveField(Operation): + """ + Removes a field from a model. + """ + + def __init__(self, model_name, name): + self.model_name = model_name + self.name = name + + def state_forwards(self, app_label, state): + new_fields = [] + for name, instance in state.models[app_label, self.model_name.lower()].fields: + if name != self.name: + new_fields.append((name, instance)) + state.models[app_label, self.model_name.lower()].fields = new_fields + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + app_cache = from_state.render() + model = app_cache.get_model(app_label, self.name) + schema_editor.remove_field(model, model._meta.get_field_by_name(self.name)) + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + app_cache = to_state.render() + model = app_cache.get_model(app_label, self.name) + schema_editor.add_field(model, model._meta.get_field_by_name(self.name)) diff --git a/django/db/migrations/operations/models.py b/django/db/migrations/operations/models.py index fd709e26fa..22d24f1eed 100644 --- a/django/db/migrations/operations/models.py +++ b/django/db/migrations/operations/models.py @@ -1,4 +1,5 @@ from .base import Operation +from django.db import models from django.db.migrations.state import ModelState @@ -7,20 +8,39 @@ class CreateModel(Operation): Create a model's table. """ - def __init__(self, name): + def __init__(self, name, fields, options=None, bases=None): self.name = name + self.fields = fields + self.options = options or {} + self.bases = bases or (models.Model,) - def state_forwards(self, app, state): - state.models[app, self.name.lower()] = ModelState(state, app, self.name) + def state_forwards(self, app_label, state): + state.models[app_label, self.name.lower()] = ModelState(app_label, self.name, self.fields, self.options, self.bases) def database_forwards(self, app, schema_editor, from_state, to_state): app_cache = to_state.render() schema_editor.create_model(app_cache.get_model(app, self.name)) def database_backwards(self, app, schema_editor, from_state, to_state): - """ - Performs the mutation on the database schema in the reverse - direction - e.g. if this were CreateModel, it would in fact - drop the model's table. - """ - raise NotImplementedError() + app_cache = from_state.render() + schema_editor.delete_model(app_cache.get_model(app, self.name)) + + +class DeleteModel(Operation): + """ + Drops a model's table. + """ + + def __init__(self, name): + self.name = name + + def state_forwards(self, app_label, state): + del state.models[app_label, self.name.lower()] + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + app_cache = from_state.render() + schema_editor.delete_model(app_cache.get_model(app_label, self.name)) + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + app_cache = to_state.render() + schema_editor.create_model(app_cache.get_model(app_label, self.name)) diff --git a/django/db/migrations/state.py b/django/db/migrations/state.py index 44ee166121..d189e8709e 100644 --- a/django/db/migrations/state.py +++ b/django/db/migrations/state.py @@ -21,7 +21,7 @@ class ProjectState(object): def clone(self): "Returns an exact copy of this ProjectState" return ProjectState( - models = dict((k, v.copy()) for k, v in self.models.items()) + models = dict((k, v.clone()) for k, v in self.models.items()) ) def render(self): @@ -49,12 +49,15 @@ class ModelState(object): mutate this one and then render it into a Model as required. """ - def __init__(self, app_label, name, fields=None, options=None, bases=None): + def __init__(self, app_label, name, fields, options=None, bases=None): self.app_label = app_label self.name = name - self.fields = fields or [] + self.fields = fields self.options = options or {} self.bases = bases or (models.Model, ) + # Sanity-check that fields is NOT a dict. It must be ordered. + if isinstance(self.fields, dict): + raise ValueError("ModelState.fields cannot be a dict - it must be a list of 2-tuples.") @classmethod def from_model(cls, model): diff --git a/tests/migrations/migrations/0001_initial.py b/tests/migrations/migrations/0001_initial.py index bd613aa95e..e2ed8559a6 100644 --- a/tests/migrations/migrations/0001_initial.py +++ b/tests/migrations/migrations/0001_initial.py @@ -1,5 +1,27 @@ -from django.db import migrations +from django.db import migrations, models class Migration(migrations.Migration): - pass + + operations = [ + + migrations.CreateModel( + "Author", + [ + ("id", models.AutoField(primary_key=True)), + ("name", models.CharField(max_length=255)), + ("slug", models.SlugField(null=True)), + ("age", models.IntegerField(default=0)), + ("silly_field", models.BooleanField()), + ], + ), + + migrations.CreateModel( + "Tribble", + [ + ("id", models.AutoField(primary_key=True)), + ("fluffy", models.BooleanField(default=True)), + ], + ) + + ] diff --git a/tests/migrations/migrations/0002_second.py b/tests/migrations/migrations/0002_second.py index f4d3ba9902..fbaef11f71 100644 --- a/tests/migrations/migrations/0002_second.py +++ b/tests/migrations/migrations/0002_second.py @@ -1,6 +1,24 @@ -from django.db import migrations +from django.db import migrations, models class Migration(migrations.Migration): dependencies = [("migrations", "0001_initial")] + + operations = [ + + migrations.DeleteModel("Tribble"), + + migrations.RemoveField("Author", "silly_field"), + + migrations.AddField("Author", "important", models.BooleanField()), + + migrations.CreateModel( + "Book", + [ + ("id", models.AutoField(primary_key=True)), + ("author", models.ForeignKey("migrations.Author", null=True)), + ], + ) + + ] diff --git a/tests/migrations/test_graph.py b/tests/migrations/test_graph.py index b35d04fb8a..207cc45741 100644 --- a/tests/migrations/test_graph.py +++ b/tests/migrations/test_graph.py @@ -1,11 +1,8 @@ -from django.test import TransactionTestCase, TestCase -from django.db import connection +from django.test import TestCase from django.db.migrations.graph import MigrationGraph, CircularDependencyError -from django.db.migrations.loader import MigrationLoader -from django.db.migrations.recorder import MigrationRecorder -class GraphTests(TransactionTestCase): +class GraphTests(TestCase): """ Tests the digraph structure. """ @@ -117,20 +114,3 @@ class GraphTests(TransactionTestCase): CircularDependencyError, graph.forwards_plan, ("app_a", "0003"), ) - - -class LoaderTests(TransactionTestCase): - """ - Tests the disk and database loader. - """ - - def test_load(self): - """ - Makes sure the loader can load the migrations for the test apps. - """ - migration_loader = MigrationLoader(connection) - graph = migration_loader.build_graph() - self.assertEqual( - graph.forwards_plan(("migrations", "0002_second")), - [("migrations", "0001_initial"), ("migrations", "0002_second")], - ) diff --git a/tests/migrations/test_loader.py b/tests/migrations/test_loader.py index f8f31734f1..badace57cc 100644 --- a/tests/migrations/test_loader.py +++ b/tests/migrations/test_loader.py @@ -1,11 +1,12 @@ -from django.test import TestCase +from django.test import TestCase, TransactionTestCase from django.db import connection +from django.db.migrations.loader import MigrationLoader from django.db.migrations.recorder import MigrationRecorder class RecorderTests(TestCase): """ - Tests the disk and database loader. + Tests recording migrations as applied or not. """ def test_apply(self): @@ -27,3 +28,37 @@ class RecorderTests(TestCase): recorder.applied_migrations(), set(), ) + + +class LoaderTests(TransactionTestCase): + """ + Tests the disk and database loader, and running through migrations + in memory. + """ + + def test_load(self): + """ + Makes sure the loader can load the migrations for the test apps, + and then render them out to a new AppCache. + """ + # Load and test the plan + migration_loader = MigrationLoader(connection) + self.assertEqual( + migration_loader.graph.forwards_plan(("migrations", "0002_second")), + [("migrations", "0001_initial"), ("migrations", "0002_second")], + ) + # Now render it out! + project_state = migration_loader.graph.project_state(("migrations", "0002_second")) + self.assertEqual(len(project_state.models), 2) + + author_state = project_state.models["migrations", "author"] + self.assertEqual( + [x for x, y in author_state.fields], + ["id", "name", "slug", "age", "important"] + ) + + book_state = project_state.models["migrations", "book"] + self.assertEqual( + [x for x, y in book_state.fields], + ["id", "author"] + ) diff --git a/tests/schema/tests.py b/tests/schema/tests.py index 752f9a5d0b..f643f3ed68 100644 --- a/tests/schema/tests.py +++ b/tests/schema/tests.py @@ -132,7 +132,7 @@ class SchemaTests(TransactionTestCase): else: self.fail("No FK constraint for author_id found") - def test_create_field(self): + def test_add_field(self): """ Tests adding fields to models """ @@ -146,7 +146,7 @@ class SchemaTests(TransactionTestCase): new_field = IntegerField(null=True) new_field.set_attributes_from_name("age") with connection.schema_editor() as editor: - editor.create_field( + editor.add_field( Author, new_field, ) @@ -251,7 +251,7 @@ class SchemaTests(TransactionTestCase): connection.rollback() # Add the field with connection.schema_editor() as editor: - editor.create_field( + editor.add_field( Author, new_field, ) @@ -260,7 +260,7 @@ class SchemaTests(TransactionTestCase): self.assertEqual(columns['tag_id'][0], "IntegerField") # Remove the M2M table again with connection.schema_editor() as editor: - editor.delete_field( + editor.remove_field( Author, new_field, ) @@ -530,7 +530,7 @@ class SchemaTests(TransactionTestCase): ) # Add a unique column, verify that creates an implicit index with connection.schema_editor() as editor: - editor.create_field( + editor.add_field( Book, BookWithSlug._meta.get_field_by_name("slug")[0], ) @@ -568,7 +568,7 @@ class SchemaTests(TransactionTestCase): new_field = SlugField(primary_key=True) new_field.set_attributes_from_name("slug") with connection.schema_editor() as editor: - editor.delete_field(Tag, Tag._meta.get_field_by_name("id")[0]) + editor.remove_field(Tag, Tag._meta.get_field_by_name("id")[0]) editor.alter_field( Tag, Tag._meta.get_field_by_name("slug")[0], From 52d2a8b3119479246225f7f888c370320cf8622f Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Wed, 29 May 2013 17:52:17 +0100 Subject: [PATCH 060/161] Add test for new __ne__ method on Promise. --- tests/utils_tests/test_functional.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tests/utils_tests/test_functional.py b/tests/utils_tests/test_functional.py index 3bb50007c6..fc2256a1f2 100644 --- a/tests/utils_tests/test_functional.py +++ b/tests/utils_tests/test_functional.py @@ -64,3 +64,15 @@ class FunctionalTestCase(unittest.TestCase): # check that it behaves like a property when there's no instance self.assertIsInstance(A.value, cached_property) + + def test_lazy_equality(self): + """ + Tests that == and != work correctly for Promises. + """ + + lazy_a = lazy(lambda: 4, int) + lazy_b = lazy(lambda: 4, int) + lazy_c = lazy(lambda: 5, int) + + self.assertEqual(lazy_a(), lazy_b()) + self.assertNotEqual(lazy_b(), lazy_c()) From 05929ee89bc51de772343d18d5a7bf2518d41333 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 30 May 2013 17:55:59 +0100 Subject: [PATCH 061/161] Add the start of some operation tests --- tests/migrations/test_operations.py | 84 +++++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 tests/migrations/test_operations.py diff --git a/tests/migrations/test_operations.py b/tests/migrations/test_operations.py new file mode 100644 index 0000000000..ea6dea0302 --- /dev/null +++ b/tests/migrations/test_operations.py @@ -0,0 +1,84 @@ +from django.test import TransactionTestCase +from django.db import connection, models, migrations +from django.db.migrations.state import ProjectState, ModelState + + +class OperationTests(TransactionTestCase): + """ + Tests running the operations and making sure they do what they say they do. + Each test looks at their state changing, and then their database operation - + both forwards and backwards. + """ + + def assertTableExists(self, table): + self.assertIn(table, connection.introspection.get_table_list(connection.cursor())) + + def assertTableNotExists(self, table): + self.assertNotIn(table, connection.introspection.get_table_list(connection.cursor())) + + def set_up_test_model(self, app_label): + """ + Creates a test model state and database table. + """ + # Make the "current" state + creation = migrations.CreateModel( + "Pony", + [ + ("id", models.AutoField(primary_key=True)), + ("pink", models.BooleanField(default=True)), + ], + ) + project_state = ProjectState() + creation.state_forwards(app_label, project_state) + # Set up the database + with connection.schema_editor() as editor: + creation.database_forwards(app_label, editor, ProjectState(), project_state) + return project_state + + def test_create_model(self): + """ + Tests the CreateModel operation. + Most other tests use this as part of setup, so check failures here first. + """ + operation = migrations.CreateModel( + "Pony", + [ + ("id", models.AutoField(primary_key=True)), + ("pink", models.BooleanField(default=True)), + ], + ) + # Test the state alteration + project_state = ProjectState() + new_state = project_state.clone() + operation.state_forwards("test_crmo", new_state) + self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony") + self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2) + # Test the database alteration + self.assertTableNotExists("test_crmo_pony") + with connection.schema_editor() as editor: + operation.database_forwards("test_crmo", editor, project_state, new_state) + self.assertTableExists("test_crmo_pony") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_crmo", editor, new_state, project_state) + self.assertTableNotExists("test_crmo_pony") + + def test_delete_model(self): + """ + Tests the DeleteModel operation. + """ + project_state = self.set_up_test_model("test_dlmo") + # Test the state alteration + operation = migrations.DeleteModel("Pony") + new_state = project_state.clone() + operation.state_forwards("test_dlmo", new_state) + self.assertNotIn(("test_dlmo", "pony"), new_state.models) + # Test the database alteration + self.assertTableExists("test_dlmo_pony") + with connection.schema_editor() as editor: + operation.database_forwards("test_dlmo", editor, project_state, new_state) + self.assertTableNotExists("test_dlmo_pony") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_dlmo", editor, new_state, project_state) + self.assertTableExists("test_dlmo_pony") From dfa7c5cade318ce113a95e12cae47c446604e993 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 30 May 2013 17:56:26 +0100 Subject: [PATCH 062/161] Allow retrieval of project state at start of migrations --- django/db/migrations/graph.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/django/db/migrations/graph.py b/django/db/migrations/graph.py index 8e2446ca99..620534bc22 100644 --- a/django/db/migrations/graph.py +++ b/django/db/migrations/graph.py @@ -120,11 +120,14 @@ class MigrationGraph(object): def __str__(self): return "Graph: %s nodes, %s edges" % (len(self.nodes), sum(len(x) for x in self.dependencies.values())) - def project_state(self, node): + def project_state(self, node, at_end=True): """ Given a migration node, returns a complete ProjectState for it. + If at_end is False, returns the state before the migration has run. """ plan = self.forwards_plan(node) + if not at_end: + plan = plan[:-1] project_state = ProjectState() for node in plan: project_state = self.nodes[node].mutate_state(project_state) From 7f9a0b7061e78a43b97abd6a3716d1e2017d72e3 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 30 May 2013 17:56:53 +0100 Subject: [PATCH 063/161] Fix graph tests --- tests/migrations/test_graph.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/tests/migrations/test_graph.py b/tests/migrations/test_graph.py index 207cc45741..e3d5a28283 100644 --- a/tests/migrations/test_graph.py +++ b/tests/migrations/test_graph.py @@ -17,6 +17,12 @@ class GraphTests(TestCase): """ # Build graph graph = MigrationGraph() + graph.add_node(("app_a", "0001"), None) + graph.add_node(("app_a", "0002"), None) + graph.add_node(("app_a", "0003"), None) + graph.add_node(("app_a", "0004"), None) + graph.add_node(("app_b", "0001"), None) + graph.add_node(("app_b", "0002"), None) graph.add_dependency(("app_a", "0004"), ("app_a", "0003")) graph.add_dependency(("app_a", "0003"), ("app_a", "0002")) graph.add_dependency(("app_a", "0002"), ("app_a", "0001")) @@ -64,6 +70,14 @@ class GraphTests(TestCase): """ # Build graph graph = MigrationGraph() + graph.add_node(("app_a", "0001"), None) + graph.add_node(("app_a", "0002"), None) + graph.add_node(("app_a", "0003"), None) + graph.add_node(("app_a", "0004"), None) + graph.add_node(("app_b", "0001"), None) + graph.add_node(("app_b", "0002"), None) + graph.add_node(("app_c", "0001"), None) + graph.add_node(("app_c", "0002"), None) graph.add_dependency(("app_a", "0004"), ("app_a", "0003")) graph.add_dependency(("app_a", "0003"), ("app_a", "0002")) graph.add_dependency(("app_a", "0002"), ("app_a", "0001")) @@ -104,6 +118,11 @@ class GraphTests(TestCase): """ # Build graph graph = MigrationGraph() + graph.add_node(("app_a", "0001"), None) + graph.add_node(("app_a", "0002"), None) + graph.add_node(("app_a", "0003"), None) + graph.add_node(("app_b", "0001"), None) + graph.add_node(("app_b", "0002"), None) graph.add_dependency(("app_a", "0003"), ("app_a", "0002")) graph.add_dependency(("app_a", "0002"), ("app_a", "0001")) graph.add_dependency(("app_a", "0001"), ("app_b", "0002")) From e6f7f4533c183800c2a9ac526d8ee8887e96ac5d Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 30 May 2013 18:08:58 +0100 Subject: [PATCH 064/161] Add an Executor for end-to-end running --- django/db/migrations/executor.py | 68 ++++++++++++++++++++++ django/db/migrations/migration.py | 48 +++++++++++++++ django/db/migrations/operations/fields.py | 16 ++--- tests/migrations/migrations/0002_second.py | 2 +- tests/migrations/test_executor.py | 35 +++++++++++ tests/migrations/test_loader.py | 2 +- tests/migrations/test_operations.py | 28 ++++++++- 7 files changed, 188 insertions(+), 11 deletions(-) create mode 100644 django/db/migrations/executor.py create mode 100644 tests/migrations/test_executor.py diff --git a/django/db/migrations/executor.py b/django/db/migrations/executor.py new file mode 100644 index 0000000000..e9e98d41fd --- /dev/null +++ b/django/db/migrations/executor.py @@ -0,0 +1,68 @@ +from .loader import MigrationLoader +from .recorder import MigrationRecorder + + +class MigrationExecutor(object): + """ + End-to-end migration execution - loads migrations, and runs them + up or down to a specified set of targets. + """ + + def __init__(self, connection): + self.connection = connection + self.loader = MigrationLoader(self.connection) + self.recorder = MigrationRecorder(self.connection) + + def migration_plan(self, targets): + """ + Given a set of targets, returns a list of (Migration instance, backwards?). + """ + plan = [] + applied = self.recorder.applied_migrations() + for target in targets: + # If the migration is already applied, do backwards mode, + # otherwise do forwards mode. + if target in applied: + for migration in self.loader.graph.backwards_plan(target)[:-1]: + if migration in applied: + plan.append((self.loader.graph.nodes[migration], True)) + applied.remove(migration) + else: + for migration in self.loader.graph.forwards_plan(target): + if migration not in applied: + plan.append((self.loader.graph.nodes[migration], False)) + applied.add(migration) + return plan + + def migrate(self, targets): + """ + Migrates the database up to the given targets. + """ + plan = self.migration_plan(targets) + for migration, backwards in plan: + if not backwards: + self.apply_migration(migration) + else: + self.unapply_migration(migration) + + def apply_migration(self, migration): + """ + Runs a migration forwards. + """ + print "Applying %s" % migration + with self.connection.schema_editor() as schema_editor: + project_state = self.loader.graph.project_state((migration.app_label, migration.name), at_end=False) + migration.apply(project_state, schema_editor) + self.recorder.record_applied(migration.app_label, migration.name) + print "Finished %s" % migration + + def unapply_migration(self, migration): + """ + Runs a migration backwards. + """ + print "Unapplying %s" % migration + with self.connection.schema_editor() as schema_editor: + project_state = self.loader.graph.project_state((migration.app_label, migration.name), at_end=False) + migration.unapply(project_state, schema_editor) + self.recorder.record_unapplied(migration.app_label, migration.name) + print "Finished %s" % migration diff --git a/django/db/migrations/migration.py b/django/db/migrations/migration.py index a8b744a9b4..672e7440ad 100644 --- a/django/db/migrations/migration.py +++ b/django/db/migrations/migration.py @@ -36,6 +36,17 @@ class Migration(object): self.name = name self.app_label = app_label + def __eq__(self, other): + if not isinstance(other, Migration): + return False + return (self.name == other.name) and (self.app_label == other.app_label) + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "" % (self.app_label, self.name) + def mutate_state(self, project_state): """ Takes a ProjectState and returns a new one with the migration's @@ -45,3 +56,40 @@ class Migration(object): for operation in self.operations: operation.state_forwards(self.app_label, new_state) return new_state + + def apply(self, project_state, schema_editor): + """ + Takes a project_state representing all migrations prior to this one + and a schema_editor for a live database and applies the migration + in a forwards order. + + Returns the resulting project state for efficient re-use by following + Migrations. + """ + for operation in self.operations: + # Get the state after the operation has run + new_state = project_state.clone() + operation.state_forwards(self.app_label, new_state) + # Run the operation + operation.database_forwards(self.app_label, schema_editor, project_state, new_state) + # Switch states + project_state = new_state + return project_state + + def unapply(self, project_state, schema_editor): + """ + Takes a project_state representing all migrations prior to this one + and a schema_editor for a live database and applies the migration + in a reverse order. + """ + # We need to pre-calculate the stack of project states + to_run = [] + for operation in self.operations: + new_state = project_state.clone() + operation.state_forwards(self.app_label, new_state) + to_run.append((operation, project_state, new_state)) + project_state = new_state + # Now run them in reverse + to_run.reverse() + for operation, to_state, from_state in to_run: + operation.database_backwards(self.app_label, schema_editor, from_state, to_state) diff --git a/django/db/migrations/operations/fields.py b/django/db/migrations/operations/fields.py index 2ecf77f7ef..efb12b22c3 100644 --- a/django/db/migrations/operations/fields.py +++ b/django/db/migrations/operations/fields.py @@ -16,13 +16,13 @@ class AddField(Operation): def database_forwards(self, app_label, schema_editor, from_state, to_state): app_cache = to_state.render() - model = app_cache.get_model(app_label, self.name) - schema_editor.add_field(model, model._meta.get_field_by_name(self.name)) + model = app_cache.get_model(app_label, self.model_name) + schema_editor.add_field(model, model._meta.get_field_by_name(self.name)[0]) def database_backwards(self, app_label, schema_editor, from_state, to_state): app_cache = from_state.render() - model = app_cache.get_model(app_label, self.name) - schema_editor.remove_field(model, model._meta.get_field_by_name(self.name)) + model = app_cache.get_model(app_label, self.model_name) + schema_editor.remove_field(model, model._meta.get_field_by_name(self.name)[0]) class RemoveField(Operation): @@ -43,10 +43,10 @@ class RemoveField(Operation): def database_forwards(self, app_label, schema_editor, from_state, to_state): app_cache = from_state.render() - model = app_cache.get_model(app_label, self.name) - schema_editor.remove_field(model, model._meta.get_field_by_name(self.name)) + model = app_cache.get_model(app_label, self.model_name) + schema_editor.remove_field(model, model._meta.get_field_by_name(self.name)[0]) def database_backwards(self, app_label, schema_editor, from_state, to_state): app_cache = to_state.render() - model = app_cache.get_model(app_label, self.name) - schema_editor.add_field(model, model._meta.get_field_by_name(self.name)) + model = app_cache.get_model(app_label, self.model_name) + schema_editor.add_field(model, model._meta.get_field_by_name(self.name)[0]) diff --git a/tests/migrations/migrations/0002_second.py b/tests/migrations/migrations/0002_second.py index fbaef11f71..ace9a83347 100644 --- a/tests/migrations/migrations/0002_second.py +++ b/tests/migrations/migrations/0002_second.py @@ -11,7 +11,7 @@ class Migration(migrations.Migration): migrations.RemoveField("Author", "silly_field"), - migrations.AddField("Author", "important", models.BooleanField()), + migrations.AddField("Author", "rating", models.IntegerField(default=0)), migrations.CreateModel( "Book", diff --git a/tests/migrations/test_executor.py b/tests/migrations/test_executor.py new file mode 100644 index 0000000000..629c47de56 --- /dev/null +++ b/tests/migrations/test_executor.py @@ -0,0 +1,35 @@ +from django.test import TransactionTestCase +from django.db import connection +from django.db.migrations.executor import MigrationExecutor + + +class ExecutorTests(TransactionTestCase): + """ + Tests the migration executor (full end-to-end running). + + Bear in mind that if these are failing you should fix the other + test failures first, as they may be propagating into here. + """ + + def test_run(self): + """ + Tests running a simple set of migrations. + """ + executor = MigrationExecutor(connection) + # Let's look at the plan first and make sure it's up to scratch + plan = executor.migration_plan([("migrations", "0002_second")]) + self.assertEqual( + plan, + [ + (executor.loader.graph.nodes["migrations", "0001_initial"], False), + (executor.loader.graph.nodes["migrations", "0002_second"], False), + ], + ) + # Were the tables there before? + self.assertNotIn("migrations_author", connection.introspection.get_table_list(connection.cursor())) + self.assertNotIn("migrations_book", connection.introspection.get_table_list(connection.cursor())) + # Alright, let's try running it + executor.migrate([("migrations", "0002_second")]) + # Are the tables there now? + self.assertIn("migrations_author", connection.introspection.get_table_list(connection.cursor())) + self.assertIn("migrations_book", connection.introspection.get_table_list(connection.cursor())) diff --git a/tests/migrations/test_loader.py b/tests/migrations/test_loader.py index badace57cc..9318f77004 100644 --- a/tests/migrations/test_loader.py +++ b/tests/migrations/test_loader.py @@ -54,7 +54,7 @@ class LoaderTests(TransactionTestCase): author_state = project_state.models["migrations", "author"] self.assertEqual( [x for x, y in author_state.fields], - ["id", "name", "slug", "age", "important"] + ["id", "name", "slug", "age", "rating"] ) book_state = project_state.models["migrations", "book"] diff --git a/tests/migrations/test_operations.py b/tests/migrations/test_operations.py index ea6dea0302..bf8549e092 100644 --- a/tests/migrations/test_operations.py +++ b/tests/migrations/test_operations.py @@ -1,6 +1,6 @@ from django.test import TransactionTestCase from django.db import connection, models, migrations -from django.db.migrations.state import ProjectState, ModelState +from django.db.migrations.state import ProjectState class OperationTests(TransactionTestCase): @@ -16,6 +16,12 @@ class OperationTests(TransactionTestCase): def assertTableNotExists(self, table): self.assertNotIn(table, connection.introspection.get_table_list(connection.cursor())) + def assertColumnExists(self, table, column): + self.assertIn(column, [c.name for c in connection.introspection.get_table_description(connection.cursor(), table)]) + + def assertColumnNotExists(self, table, column): + self.assertNotIn(column, [c.name for c in connection.introspection.get_table_description(connection.cursor(), table)]) + def set_up_test_model(self, app_label): """ Creates a test model state and database table. @@ -82,3 +88,23 @@ class OperationTests(TransactionTestCase): with connection.schema_editor() as editor: operation.database_backwards("test_dlmo", editor, new_state, project_state) self.assertTableExists("test_dlmo_pony") + + def test_add_field(self): + """ + Tests the AddField operation. + """ + project_state = self.set_up_test_model("test_adfl") + # Test the state alteration + operation = migrations.AddField("Pony", "height", models.FloatField(null=True)) + new_state = project_state.clone() + operation.state_forwards("test_adfl", new_state) + self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 3) + # Test the database alteration + self.assertColumnNotExists("test_adfl_pony", "height") + with connection.schema_editor() as editor: + operation.database_forwards("test_adfl", editor, project_state, new_state) + self.assertColumnExists("test_adfl_pony", "height") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_adfl", editor, new_state, project_state) + self.assertColumnNotExists("test_adfl_pony", "height") From e6ba63def327441a167a901108b10e8dccfe9ab1 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 30 May 2013 18:21:32 +0100 Subject: [PATCH 065/161] Fix error in ModelState.clone() not copying deep enough --- django/db/migrations/state.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/django/db/migrations/state.py b/django/db/migrations/state.py index d189e8709e..b6618041f2 100644 --- a/django/db/migrations/state.py +++ b/django/db/migrations/state.py @@ -47,6 +47,10 @@ class ModelState(object): Represents a Django Model. We don't use the actual Model class as it's not designed to have its options changed - instead, we mutate this one and then render it into a Model as required. + + Note that while you are allowed to mutate .fields, you are not allowed + to mutate the Field instances inside there themselves - you must instead + assign new ones, as these are not detached during a clone. """ def __init__(self, app_label, name, fields, options=None, bases=None): @@ -92,8 +96,8 @@ class ModelState(object): return self.__class__( app_label = self.app_label, name = self.name, - fields = self.fields, - options = self.options, + fields = list(self.fields), + options = dict(self.options), bases = self.bases, ) From 8d81c6bc821b614f8c6fec337a7bc6c992816c31 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 30 May 2013 18:24:20 +0100 Subject: [PATCH 066/161] Use correct model versions for field operations --- django/db/migrations/operations/fields.py | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/django/db/migrations/operations/fields.py b/django/db/migrations/operations/fields.py index efb12b22c3..862716a347 100644 --- a/django/db/migrations/operations/fields.py +++ b/django/db/migrations/operations/fields.py @@ -15,14 +15,13 @@ class AddField(Operation): state.models[app_label, self.model_name.lower()].fields.append((self.name, self.instance)) def database_forwards(self, app_label, schema_editor, from_state, to_state): - app_cache = to_state.render() - model = app_cache.get_model(app_label, self.model_name) - schema_editor.add_field(model, model._meta.get_field_by_name(self.name)[0]) + from_model = from_state.render().get_model(app_label, self.model_name) + to_model = to_state.render().get_model(app_label, self.model_name) + schema_editor.add_field(from_model, to_model._meta.get_field_by_name(self.name)[0]) def database_backwards(self, app_label, schema_editor, from_state, to_state): - app_cache = from_state.render() - model = app_cache.get_model(app_label, self.model_name) - schema_editor.remove_field(model, model._meta.get_field_by_name(self.name)[0]) + from_model = from_state.render().get_model(app_label, self.model_name) + schema_editor.remove_field(from_model, from_model._meta.get_field_by_name(self.name)[0]) class RemoveField(Operation): @@ -42,11 +41,10 @@ class RemoveField(Operation): state.models[app_label, self.model_name.lower()].fields = new_fields def database_forwards(self, app_label, schema_editor, from_state, to_state): - app_cache = from_state.render() - model = app_cache.get_model(app_label, self.model_name) - schema_editor.remove_field(model, model._meta.get_field_by_name(self.name)[0]) + from_model = from_state.render().get_model(app_label, self.model_name) + schema_editor.remove_field(from_model, from_model._meta.get_field_by_name(self.name)[0]) def database_backwards(self, app_label, schema_editor, from_state, to_state): - app_cache = to_state.render() - model = app_cache.get_model(app_label, self.model_name) - schema_editor.add_field(model, model._meta.get_field_by_name(self.name)[0]) + from_model = from_state.render().get_model(app_label, self.model_name) + to_model = to_state.render().get_model(app_label, self.model_name) + schema_editor.add_field(from_model, to_model._meta.get_field_by_name(self.name)[0]) From 7609e0b42e0014a6ad0adf9dafc7018cb268070e Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 30 May 2013 18:25:57 +0100 Subject: [PATCH 067/161] Remove debug prints --- django/db/migrations/executor.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/django/db/migrations/executor.py b/django/db/migrations/executor.py index e9e98d41fd..0f16001500 100644 --- a/django/db/migrations/executor.py +++ b/django/db/migrations/executor.py @@ -49,20 +49,16 @@ class MigrationExecutor(object): """ Runs a migration forwards. """ - print "Applying %s" % migration with self.connection.schema_editor() as schema_editor: project_state = self.loader.graph.project_state((migration.app_label, migration.name), at_end=False) migration.apply(project_state, schema_editor) self.recorder.record_applied(migration.app_label, migration.name) - print "Finished %s" % migration def unapply_migration(self, migration): """ Runs a migration backwards. """ - print "Unapplying %s" % migration with self.connection.schema_editor() as schema_editor: project_state = self.loader.graph.project_state((migration.app_label, migration.name), at_end=False) migration.unapply(project_state, schema_editor) self.recorder.record_unapplied(migration.app_label, migration.name) - print "Finished %s" % migration From 4492f06408828355a827824c805fd631a3b59d1b Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 7 Jun 2013 15:28:38 +0100 Subject: [PATCH 068/161] A bit of an autodetector and a bit of a writer --- django/db/migrations/autodetector.py | 69 +++++++++++++ django/db/migrations/operations/base.py | 18 ++++ django/db/migrations/writer.py | 123 ++++++++++++++++++++++++ tests/migrations/test_operations.py | 6 ++ tests/migrations/test_writer.py | 64 ++++++++++++ 5 files changed, 280 insertions(+) create mode 100644 django/db/migrations/autodetector.py create mode 100644 django/db/migrations/writer.py create mode 100644 tests/migrations/test_writer.py diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py new file mode 100644 index 0000000000..3eea1d7291 --- /dev/null +++ b/django/db/migrations/autodetector.py @@ -0,0 +1,69 @@ +from django.db.migrations import operations +from django.db.migrations.migration import Migration + + +class AutoDetector(object): + """ + Takes a pair of ProjectStates, and compares them to see what the + first would need doing to make it match the second (the second + usually being the project's current state). + + Note that this naturally operates on entire projects at a time, + as it's likely that changes interact (for example, you can't + add a ForeignKey without having a migration to add the table it + depends on first). A user interface may offer single-app detection + if it wishes, with the caveat that it may not always be possible. + """ + + def __init__(self, from_state, to_state): + self.from_state = from_state + self.to_state = to_state + + def changes(self): + """ + Returns a set of migration plans which will achieve the + change from from_state to to_state. + """ + # We'll store migrations as lists by app names for now + self.migrations = {} + # Stage one: Adding models. + added_models = set(self.to_state.keys()) - set(self.from_state.keys()) + for app_label, model_name in added_models: + model_state = self.to_state[app_label, model_name] + self.add_to_migration( + app_label, + operations.CreateModel( + model_state.name, + model_state.fields, + model_state.options, + model_state.bases, + ) + ) + # Removing models + removed_models = set(self.from_state.keys()) - set(self.to_state.keys()) + for app_label, model_name in removed_models: + model_state = self.from_state[app_label, model_name] + self.add_to_migration( + app_label, + operations.DeleteModel( + model_state.name, + ) + ) + # Alright, now sort out and return the migrations + for app_label, migrations in self.migrations.items(): + for m1, m2 in zip(migrations, migrations[1:]): + m2.dependencies.append((app_label, m1.name)) + # Flatten and return + result = set() + for app_label, migrations in self.migrations.items(): + for migration in migrations: + subclass = type("Migration", (Migration,), migration) + instance = subclass(migration['name'], app_label) + result.append(instance) + return result + + def add_to_migration(self, app_label, operation): + migrations = self.migrations.setdefault(app_label, []) + if not migrations: + migrations.append({"name": "temp-%i" % len(migrations) + 1, "operations": [], "dependencies": []}) + migrations[-1].operations.append(operation) diff --git a/django/db/migrations/operations/base.py b/django/db/migrations/operations/base.py index f1b30d79f5..084ce14959 100644 --- a/django/db/migrations/operations/base.py +++ b/django/db/migrations/operations/base.py @@ -15,6 +15,24 @@ class Operation(object): # Some operations are impossible to reverse, like deleting data. reversible = True + def __new__(cls, *args, **kwargs): + # We capture the arguments to make returning them trivial + self = object.__new__(cls) + self._constructor_args = (args, kwargs) + return self + + def deconstruct(self): + """ + Returns a 3-tuple of class import path (or just name if it lives + under django.db.migrations), positional arguments, and keyword + arguments. + """ + return ( + self.__class__.__name__, + self._constructor_args[0], + self._constructor_args[1], + ) + def state_forwards(self, app_label, state): """ Takes the state from the previous migration, and mutates it diff --git a/django/db/migrations/writer.py b/django/db/migrations/writer.py new file mode 100644 index 0000000000..b32bb987d9 --- /dev/null +++ b/django/db/migrations/writer.py @@ -0,0 +1,123 @@ +import datetime +import types + + +class MigrationWriter(object): + """ + Takes a Migration instance and is able to produce the contents + of the migration file from it. + """ + + def __init__(self, migration): + self.migration = migration + + def as_string(self): + """ + Returns a string of the file contents. + """ + items = { + "dependencies": repr(self.migration.dependencies), + } + imports = set() + # Deconstruct operations + operation_strings = [] + for operation in self.migration.operations: + name, args, kwargs = operation.deconstruct() + arg_strings = [] + for arg in args: + arg_string, arg_imports = self.serialize(arg) + arg_strings.append(arg_string) + imports.update(arg_imports) + for kw, arg in kwargs.items(): + arg_string, arg_imports = self.serialize(arg) + imports.update(arg_imports) + arg_strings.append("%s = %s" % (kw, arg_string)) + operation_strings.append("migrations.%s(%s\n )" % (name, "".join("\n %s," % arg for arg in arg_strings))) + items["operations"] = "[%s\n ]" % "".join("\n %s," % s for s in operation_strings) + # Format imports nicely + if not imports: + items["imports"] = "" + else: + items["imports"] = "\n".join(imports) + "\n" + return MIGRATION_TEMPLATE % items + + @property + def filename(self): + return "%s.py" % self.migration.name + + @classmethod + def serialize(cls, value): + """ + Serializes the value to a string that's parsable by Python, along + with any needed imports to make that string work. + More advanced than repr() as it can encode things + like datetime.datetime.now. + """ + # Sequences + if isinstance(value, (list, set, tuple)): + imports = set() + strings = [] + for item in value: + item_string, item_imports = cls.serialize(item) + imports.update(item_imports) + strings.append(item_string) + if isinstance(value, set): + format = "set([%s])" + elif isinstance(value, tuple): + format = "(%s,)" + else: + format = "[%s]" + return format % (", ".join(strings)), imports + # Dictionaries + elif isinstance(value, dict): + imports = set() + strings = [] + for k, v in value.items(): + k_string, k_imports = cls.serialize(k) + v_string, v_imports = cls.serialize(v) + imports.update(k_imports) + imports.update(v_imports) + strings.append((k_string, v_string)) + return "{%s}" % (", ".join(["%s: %s" % (k, v) for k, v in strings])), imports + # Datetimes + elif isinstance(value, (datetime.datetime, datetime.date)): + return repr(value), set(["import datetime"]) + # Simple types + elif isinstance(value, (int, long, float, str, unicode, bool, types.NoneType)): + return repr(value), set() + # Functions + elif isinstance(value, (types.FunctionType, types.BuiltinFunctionType)): + # Special-cases, as these don't have im_class + special_cases = [ + (datetime.datetime.now, "datetime.datetime.now", ["import datetime"]), + (datetime.datetime.utcnow, "datetime.datetime.utcnow", ["import datetime"]), + (datetime.date.today, "datetime.date.today", ["import datetime"]), + ] + for func, string, imports in special_cases: + if func == value: # For some reason "utcnow is not utcnow" + return string, set(imports) + # Method? + if hasattr(value, "im_class"): + klass = value.im_class + module = klass.__module__ + return "%s.%s.%s" % (module, klass.__name__, value.__name__), set(["import %s" % module]) + else: + module = value.__module__ + if module is None: + raise ValueError("Cannot serialize function %r: No module" % value) + return "%s.%s" % (module, value.__name__), set(["import %s" % module]) + # Uh oh. + else: + raise ValueError("Cannot serialize: %r" % value) + + +MIGRATION_TEMPLATE = """# encoding: utf8 +from django.db import models, migrations +%(imports)s + +class Migration(migrations.Migration): + + dependencies = %(dependencies)s + + operations = %(operations)s +""" diff --git a/tests/migrations/test_operations.py b/tests/migrations/test_operations.py index bf8549e092..9c25e43990 100644 --- a/tests/migrations/test_operations.py +++ b/tests/migrations/test_operations.py @@ -68,6 +68,12 @@ class OperationTests(TransactionTestCase): with connection.schema_editor() as editor: operation.database_backwards("test_crmo", editor, new_state, project_state) self.assertTableNotExists("test_crmo_pony") + # And deconstruction + definition = operation.deconstruct() + self.assertEqual(definition[0], "CreateModel") + self.assertEqual(len(definition[1]), 2) + self.assertEqual(len(definition[2]), 0) + self.assertEqual(definition[1][0], "Pony") def test_delete_model(self): """ diff --git a/tests/migrations/test_writer.py b/tests/migrations/test_writer.py new file mode 100644 index 0000000000..8c1753e794 --- /dev/null +++ b/tests/migrations/test_writer.py @@ -0,0 +1,64 @@ +# encoding: utf8 +import datetime +from django.test import TransactionTestCase +from django.db.migrations.writer import MigrationWriter +from django.db import migrations + + +class WriterTests(TransactionTestCase): + """ + Tests the migration writer (makes migration files from Migration instances) + """ + + def safe_exec(self, value, string): + l = {} + try: + exec(string, {}, l) + except: + self.fail("Could not serialize %r: failed to exec %r" % (value, string.strip())) + return l + + def assertSerializedEqual(self, value): + string, imports = MigrationWriter.serialize(value) + new_value = self.safe_exec(value, "%s\ntest_value_result = %s" % ("\n".join(imports), string))['test_value_result'] + self.assertEqual(new_value, value) + + def assertSerializedIs(self, value): + string, imports = MigrationWriter.serialize(value) + new_value = self.safe_exec(value, "%s\ntest_value_result = %s" % ("\n".join(imports), string))['test_value_result'] + self.assertIs(new_value, value) + + def test_serialize(self): + """ + Tests various different forms of the serializer. + This does not care about formatting, just that the parsed result is + correct, so we always exec() the result and check that. + """ + # Basic values + self.assertSerializedEqual(1) + self.assertSerializedEqual(None) + self.assertSerializedEqual("foobar") + self.assertSerializedEqual(u"föobár") + self.assertSerializedEqual({1: 2}) + self.assertSerializedEqual(["a", 2, True, None]) + self.assertSerializedEqual(set([2, 3, "eighty"])) + self.assertSerializedEqual({"lalalala": ["yeah", "no", "maybe"]}) + # Datetime stuff + self.assertSerializedEqual(datetime.datetime.utcnow()) + self.assertSerializedEqual(datetime.datetime.utcnow) + self.assertSerializedEqual(datetime.date.today()) + self.assertSerializedEqual(datetime.date.today) + + def test_simple_migration(self): + """ + Tests serializing a simple migration. + """ + migration = type("Migration", (migrations.Migration,), { + "operations": [ + migrations.DeleteModel("MyModel"), + ], + "dependencies": [("testapp", "some_other_one")], + }) + writer = MigrationWriter(migration) + output = writer.as_string() + print output From c7aa4b5338e6bcf62f66eb53b309aa3a69b9a55d Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 7 Jun 2013 15:36:31 +0100 Subject: [PATCH 069/161] Field encoding --- django/db/migrations/writer.py | 20 ++++++++++++++++++++ tests/migrations/test_writer.py | 3 ++- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/django/db/migrations/writer.py b/django/db/migrations/writer.py index b32bb987d9..b21c6c9648 100644 --- a/django/db/migrations/writer.py +++ b/django/db/migrations/writer.py @@ -1,5 +1,6 @@ import datetime import types +from django.db import models class MigrationWriter(object): @@ -85,6 +86,25 @@ class MigrationWriter(object): # Simple types elif isinstance(value, (int, long, float, str, unicode, bool, types.NoneType)): return repr(value), set() + # Django fields + elif isinstance(value, models.Field): + attr_name, path, args, kwargs = value.deconstruct() + module, name = path.rsplit(".", 1) + if module == "django.db.models": + imports = set() + else: + imports = set("import %s" % module) + name = path + arg_strings = [] + for arg in args: + arg_string, arg_imports = cls.serialize(arg) + arg_strings.append(arg_string) + imports.update(arg_imports) + for kw, arg in kwargs.items(): + arg_string, arg_imports = cls.serialize(arg) + imports.update(arg_imports) + arg_strings.append("%s=%s" % (kw, arg_string)) + return "%s(%s)" % (name, ", ".join(arg_strings)), imports # Functions elif isinstance(value, (types.FunctionType, types.BuiltinFunctionType)): # Special-cases, as these don't have im_class diff --git a/tests/migrations/test_writer.py b/tests/migrations/test_writer.py index 8c1753e794..0581d6a4bd 100644 --- a/tests/migrations/test_writer.py +++ b/tests/migrations/test_writer.py @@ -2,7 +2,7 @@ import datetime from django.test import TransactionTestCase from django.db.migrations.writer import MigrationWriter -from django.db import migrations +from django.db import models, migrations class WriterTests(TransactionTestCase): @@ -56,6 +56,7 @@ class WriterTests(TransactionTestCase): migration = type("Migration", (migrations.Migration,), { "operations": [ migrations.DeleteModel("MyModel"), + migrations.AddField("OtherModel", "field_name", models.DateTimeField(default=datetime.datetime.utcnow)) ], "dependencies": [("testapp", "some_other_one")], }) From cd809619a270ad48f1a77ee8c32bb0c7d8293f63 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 7 Jun 2013 15:49:48 +0100 Subject: [PATCH 070/161] Autodetector tests --- django/db/migrations/autodetector.py | 16 ++++---- django/db/migrations/migration.py | 3 ++ tests/migrations/test_autodetector.py | 54 +++++++++++++++++++++++++++ 3 files changed, 65 insertions(+), 8 deletions(-) create mode 100644 tests/migrations/test_autodetector.py diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py index 3eea1d7291..ddb14520d3 100644 --- a/django/db/migrations/autodetector.py +++ b/django/db/migrations/autodetector.py @@ -2,7 +2,7 @@ from django.db.migrations import operations from django.db.migrations.migration import Migration -class AutoDetector(object): +class MigrationAutodetector(object): """ Takes a pair of ProjectStates, and compares them to see what the first would need doing to make it match the second (the second @@ -27,9 +27,9 @@ class AutoDetector(object): # We'll store migrations as lists by app names for now self.migrations = {} # Stage one: Adding models. - added_models = set(self.to_state.keys()) - set(self.from_state.keys()) + added_models = set(self.to_state.models.keys()) - set(self.from_state.models.keys()) for app_label, model_name in added_models: - model_state = self.to_state[app_label, model_name] + model_state = self.to_state.models[app_label, model_name] self.add_to_migration( app_label, operations.CreateModel( @@ -40,9 +40,9 @@ class AutoDetector(object): ) ) # Removing models - removed_models = set(self.from_state.keys()) - set(self.to_state.keys()) + removed_models = set(self.from_state.models.keys()) - set(self.to_state.models.keys()) for app_label, model_name in removed_models: - model_state = self.from_state[app_label, model_name] + model_state = self.from_state.models[app_label, model_name] self.add_to_migration( app_label, operations.DeleteModel( @@ -59,11 +59,11 @@ class AutoDetector(object): for migration in migrations: subclass = type("Migration", (Migration,), migration) instance = subclass(migration['name'], app_label) - result.append(instance) + result.add(instance) return result def add_to_migration(self, app_label, operation): migrations = self.migrations.setdefault(app_label, []) if not migrations: - migrations.append({"name": "temp-%i" % len(migrations) + 1, "operations": [], "dependencies": []}) - migrations[-1].operations.append(operation) + migrations.append({"name": "auto_%i" % (len(migrations) + 1), "operations": [], "dependencies": []}) + migrations[-1]['operations'].append(operation) diff --git a/django/db/migrations/migration.py b/django/db/migrations/migration.py index 672e7440ad..ed7fad747b 100644 --- a/django/db/migrations/migration.py +++ b/django/db/migrations/migration.py @@ -47,6 +47,9 @@ class Migration(object): def __repr__(self): return "" % (self.app_label, self.name) + def __hash__(self): + return hash("%s.%s" % (self.app_label, self.name)) + def mutate_state(self, project_state): """ Takes a ProjectState and returns a new one with the migration's diff --git a/tests/migrations/test_autodetector.py b/tests/migrations/test_autodetector.py new file mode 100644 index 0000000000..8e6a1e4160 --- /dev/null +++ b/tests/migrations/test_autodetector.py @@ -0,0 +1,54 @@ +# encoding: utf8 +from django.test import TransactionTestCase +from django.db.migrations.autodetector import MigrationAutodetector +from django.db.migrations.state import ProjectState, ModelState +from django.db import models + + +class AutodetectorTests(TransactionTestCase): + """ + Tests the migration autodetector. + """ + + author_empty = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) + + def make_project_state(self, model_states): + "Shortcut to make ProjectStates from lists of predefined models" + project_state = ProjectState() + for model_state in model_states: + project_state.add_model_state(model_state) + return project_state + + def test_new_model(self): + "Tests autodetection of new models" + # Make state + before = self.make_project_state([]) + after = self.make_project_state([self.author_empty]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector.changes() + # Right number of migrations? + self.assertEqual(len(changes), 1) + # Right number of actions? + migration = changes.pop() + self.assertEqual(len(migration.operations), 1) + # Right action? + action = migration.operations[0] + self.assertEqual(action.__class__.__name__, "CreateModel") + self.assertEqual(action.name, "Author") + + def test_old_model(self): + "Tests deletion of old models" + # Make state + before = self.make_project_state([self.author_empty]) + after = self.make_project_state([]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector.changes() + # Right number of migrations? + self.assertEqual(len(changes), 1) + # Right number of actions? + migration = changes.pop() + self.assertEqual(len(migration.operations), 1) + # Right action? + action = migration.operations[0] + self.assertEqual(action.__class__.__name__, "DeleteModel") + self.assertEqual(action.name, "Author") From 91c470def50c4de420b0c6ee7debddc5bbd53ec8 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 7 Jun 2013 17:56:43 +0100 Subject: [PATCH 071/161] Auto-naming for migrations and some writer fixes --- django/db/migrations/autodetector.py | 91 +++++++++++++++++++++++---- django/db/migrations/graph.py | 16 +++-- django/db/migrations/writer.py | 12 ++-- tests/migrations/test_autodetector.py | 33 ++++++++-- tests/migrations/test_writer.py | 41 ++++++++---- 5 files changed, 156 insertions(+), 37 deletions(-) diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py index ddb14520d3..be3e1c561f 100644 --- a/django/db/migrations/autodetector.py +++ b/django/db/migrations/autodetector.py @@ -1,3 +1,4 @@ +import re from django.db.migrations import operations from django.db.migrations.migration import Migration @@ -11,7 +12,7 @@ class MigrationAutodetector(object): Note that this naturally operates on entire projects at a time, as it's likely that changes interact (for example, you can't add a ForeignKey without having a migration to add the table it - depends on first). A user interface may offer single-app detection + depends on first). A user interface may offer single-app usage if it wishes, with the caveat that it may not always be possible. """ @@ -21,8 +22,12 @@ class MigrationAutodetector(object): def changes(self): """ - Returns a set of migration plans which will achieve the - change from from_state to to_state. + Returns a dict of migration plans which will achieve the + change from from_state to to_state. The dict has app labels + as kays and a list of migrations as values. + + The resulting migrations aren't specially named, but the names + do matter for dependencies inside the set. """ # We'll store migrations as lists by app names for now self.migrations = {} @@ -53,17 +58,77 @@ class MigrationAutodetector(object): for app_label, migrations in self.migrations.items(): for m1, m2 in zip(migrations, migrations[1:]): m2.dependencies.append((app_label, m1.name)) - # Flatten and return - result = set() - for app_label, migrations in self.migrations.items(): - for migration in migrations: - subclass = type("Migration", (Migration,), migration) - instance = subclass(migration['name'], app_label) - result.add(instance) - return result + return self.migrations def add_to_migration(self, app_label, operation): migrations = self.migrations.setdefault(app_label, []) if not migrations: - migrations.append({"name": "auto_%i" % (len(migrations) + 1), "operations": [], "dependencies": []}) - migrations[-1]['operations'].append(operation) + subclass = type("Migration", (Migration,), {"operations": [], "dependencies": []}) + instance = subclass("auto_%i" % (len(migrations) + 1), app_label) + migrations.append(instance) + migrations[-1].operations.append(operation) + + @classmethod + def suggest_name(cls, ops): + """ + Given a set of operations, suggests a name for the migration + they might represent. Names not guaranteed to be unique; they + must be prefixed by a number or date. + """ + if len(ops) == 1: + if isinstance(ops[0], operations.CreateModel): + return ops[0].name.lower() + elif isinstance(ops[0], operations.DeleteModel): + return "delete_%s" % ops[0].name.lower() + elif all(isinstance(o, operations.CreateModel) for o in ops): + return "_".join(sorted(o.name.lower() for o in ops)) + return "auto" + + @classmethod + def parse_number(cls, name): + """ + Given a migration name, tries to extract a number from the + beginning of it. If no number found, returns None. + """ + if re.match(r"^\d+_", name): + return int(name.split("_")[0]) + return None + + @classmethod + def arrange_for_graph(cls, changes, graph): + """ + Takes in a result from changes() and a MigrationGraph, + and fixes the names and dependencies of the changes so they + extend the graph from the leaf nodes for each app. + """ + leaves = graph.leaf_nodes() + name_map = {} + for app_label, migrations in changes.items(): + if not migrations: + continue + # Find the app label's current leaf node + app_leaf = None + for leaf in leaves: + if leaf[0] == app_label: + app_leaf = leaf + break + # Work out the next number in the sequence + if app_leaf is None: + next_number = 1 + else: + next_number = (cls.parse_number(app_leaf[1]) or 0) + 1 + # Name each migration + for i, migration in enumerate(migrations): + if i == 0 and app_leaf: + migration.dependencies.append(app_leaf) + if i == 0 and not app_leaf: + new_name = "0001_initial" + else: + new_name = "%04i_%s" % (next_number, cls.suggest_name(migration.operations)) + name_map[(app_label, migration.name)] = (app_label, new_name) + migration.name = new_name + # Now fix dependencies + for app_label, migrations in changes.items(): + for migration in migrations: + migration.dependencies = [name_map.get(d, d) for d in migration.dependencies] + return changes diff --git a/django/db/migrations/graph.py b/django/db/migrations/graph.py index 620534bc22..c1c3ba75bb 100644 --- a/django/db/migrations/graph.py +++ b/django/db/migrations/graph.py @@ -120,14 +120,20 @@ class MigrationGraph(object): def __str__(self): return "Graph: %s nodes, %s edges" % (len(self.nodes), sum(len(x) for x in self.dependencies.values())) - def project_state(self, node, at_end=True): + def project_state(self, nodes, at_end=True): """ - Given a migration node, returns a complete ProjectState for it. + Given a migration node or nodes, returns a complete ProjectState for it. If at_end is False, returns the state before the migration has run. """ - plan = self.forwards_plan(node) - if not at_end: - plan = plan[:-1] + if not isinstance(nodes[0], tuple): + nodes = [nodes] + plan = [] + for node in nodes: + for migration in self.forwards_plan(node): + if migration not in plan: + if not at_end and migration in nodes: + continue + plan.append(migration) project_state = ProjectState() for node in plan: project_state = self.nodes[node].mutate_state(project_state) diff --git a/django/db/migrations/writer.py b/django/db/migrations/writer.py index b21c6c9648..f386cd847c 100644 --- a/django/db/migrations/writer.py +++ b/django/db/migrations/writer.py @@ -1,5 +1,7 @@ +from __future__ import unicode_literals import datetime import types +from django.utils import six from django.db import models @@ -36,11 +38,12 @@ class MigrationWriter(object): operation_strings.append("migrations.%s(%s\n )" % (name, "".join("\n %s," % arg for arg in arg_strings))) items["operations"] = "[%s\n ]" % "".join("\n %s," % s for s in operation_strings) # Format imports nicely + imports.discard("from django.db import models") if not imports: items["imports"] = "" else: items["imports"] = "\n".join(imports) + "\n" - return MIGRATION_TEMPLATE % items + return (MIGRATION_TEMPLATE % items).encode("utf8") @property def filename(self): @@ -84,16 +87,17 @@ class MigrationWriter(object): elif isinstance(value, (datetime.datetime, datetime.date)): return repr(value), set(["import datetime"]) # Simple types - elif isinstance(value, (int, long, float, str, unicode, bool, types.NoneType)): + elif isinstance(value, (int, long, float, six.binary_type, six.text_type, bool, types.NoneType)): return repr(value), set() # Django fields elif isinstance(value, models.Field): attr_name, path, args, kwargs = value.deconstruct() module, name = path.rsplit(".", 1) if module == "django.db.models": - imports = set() + imports = set(["from django.db import models"]) + name = "models.%s" % name else: - imports = set("import %s" % module) + imports = set(["import %s" % module]) name = path arg_strings = [] for arg in args: diff --git a/tests/migrations/test_autodetector.py b/tests/migrations/test_autodetector.py index 8e6a1e4160..1fc8f7aefb 100644 --- a/tests/migrations/test_autodetector.py +++ b/tests/migrations/test_autodetector.py @@ -2,6 +2,7 @@ from django.test import TransactionTestCase from django.db.migrations.autodetector import MigrationAutodetector from django.db.migrations.state import ProjectState, ModelState +from django.db.migrations.graph import MigrationGraph from django.db import models @@ -11,6 +12,8 @@ class AutodetectorTests(TransactionTestCase): """ author_empty = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) + other_pony = ModelState("otherapp", "Pony", [("id", models.AutoField(primary_key=True))]) + other_stable = ModelState("otherapp", "Stable", [("id", models.AutoField(primary_key=True))]) def make_project_state(self, model_states): "Shortcut to make ProjectStates from lists of predefined models" @@ -19,6 +22,28 @@ class AutodetectorTests(TransactionTestCase): project_state.add_model_state(model_state) return project_state + def test_arrange_for_graph(self): + "Tests auto-naming of migrations for graph matching." + # Make a fake graph + graph = MigrationGraph() + graph.add_node(("testapp", "0001_initial"), None) + graph.add_node(("testapp", "0002_foobar"), None) + graph.add_node(("otherapp", "0001_initial"), None) + graph.add_dependency(("testapp", "0002_foobar"), ("testapp", "0001_initial")) + graph.add_dependency(("testapp", "0002_foobar"), ("otherapp", "0001_initial")) + # Use project state to make a new migration change set + before = self.make_project_state([]) + after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector.changes() + # Run through arrange_for_graph + changes = autodetector.arrange_for_graph(changes, graph) + # Make sure there's a new name, deps match, etc. + self.assertEqual(changes["testapp"][0].name, "0003_author") + self.assertEqual(changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]) + self.assertEqual(changes["otherapp"][0].name, "0002_pony_stable") + self.assertEqual(changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]) + def test_new_model(self): "Tests autodetection of new models" # Make state @@ -27,9 +52,9 @@ class AutodetectorTests(TransactionTestCase): autodetector = MigrationAutodetector(before, after) changes = autodetector.changes() # Right number of migrations? - self.assertEqual(len(changes), 1) + self.assertEqual(len(changes['testapp']), 1) # Right number of actions? - migration = changes.pop() + migration = changes['testapp'][0] self.assertEqual(len(migration.operations), 1) # Right action? action = migration.operations[0] @@ -44,9 +69,9 @@ class AutodetectorTests(TransactionTestCase): autodetector = MigrationAutodetector(before, after) changes = autodetector.changes() # Right number of migrations? - self.assertEqual(len(changes), 1) + self.assertEqual(len(changes['testapp']), 1) # Right number of actions? - migration = changes.pop() + migration = changes['testapp'][0] self.assertEqual(len(migration.operations), 1) # Right action? action = migration.operations[0] diff --git a/tests/migrations/test_writer.py b/tests/migrations/test_writer.py index 0581d6a4bd..c6ca100c1a 100644 --- a/tests/migrations/test_writer.py +++ b/tests/migrations/test_writer.py @@ -1,5 +1,6 @@ # encoding: utf8 import datetime +from django.utils import six from django.test import TransactionTestCase from django.db.migrations.writer import MigrationWriter from django.db import models, migrations @@ -10,23 +11,33 @@ class WriterTests(TransactionTestCase): Tests the migration writer (makes migration files from Migration instances) """ - def safe_exec(self, value, string): + def safe_exec(self, string, value=None): l = {} try: - exec(string, {}, l) - except: - self.fail("Could not serialize %r: failed to exec %r" % (value, string.strip())) + exec(string, globals(), l) + except Exception as e: + if value: + self.fail("Could not exec %r (from value %r): %s" % (string.strip(), value, e)) + else: + self.fail("Could not exec %r: %s" % (string.strip(), e)) return l - def assertSerializedEqual(self, value): + def serialize_round_trip(self, value): string, imports = MigrationWriter.serialize(value) - new_value = self.safe_exec(value, "%s\ntest_value_result = %s" % ("\n".join(imports), string))['test_value_result'] - self.assertEqual(new_value, value) + return self.safe_exec("%s\ntest_value_result = %s" % ("\n".join(imports), string), value)['test_value_result'] + + def assertSerializedEqual(self, value): + self.assertEqual(self.serialize_round_trip(value), value) def assertSerializedIs(self, value): - string, imports = MigrationWriter.serialize(value) - new_value = self.safe_exec(value, "%s\ntest_value_result = %s" % ("\n".join(imports), string))['test_value_result'] - self.assertIs(new_value, value) + self.assertIs(self.serialize_round_trip(value), value) + + def assertSerializedFieldEqual(self, value): + new_value = self.serialize_round_trip(value) + self.assertEqual(value.__class__, new_value.__class__) + self.assertEqual(value.max_length, new_value.max_length) + self.assertEqual(value.null, new_value.null) + self.assertEqual(value.unique, new_value.unique) def test_serialize(self): """ @@ -48,6 +59,9 @@ class WriterTests(TransactionTestCase): self.assertSerializedEqual(datetime.datetime.utcnow) self.assertSerializedEqual(datetime.date.today()) self.assertSerializedEqual(datetime.date.today) + # Django fields + self.assertSerializedFieldEqual(models.CharField(max_length=255)) + self.assertSerializedFieldEqual(models.TextField(null=True, blank=True)) def test_simple_migration(self): """ @@ -62,4 +76,9 @@ class WriterTests(TransactionTestCase): }) writer = MigrationWriter(migration) output = writer.as_string() - print output + # It should NOT be unicode. + self.assertIsInstance(output, six.binary_type, "Migration as_string returned unicode") + # We don't test the output formatting - that's too fragile. + # Just make sure it runs for now, and that things look alright. + result = self.safe_exec(output) + self.assertIn("Migration", result) From 315ab41e416c777d4f42932d42df07872e8f8895 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 7 Jun 2013 18:47:17 +0100 Subject: [PATCH 072/161] Initial stab at a migrate command, it's probably quite a way off. --- django/core/management/commands/migrate.py | 210 +++++++++++++++++++++ django/core/management/commands/syncdb.py | 152 +-------------- django/db/migrations/executor.py | 17 +- django/db/migrations/loader.py | 4 +- django/db/migrations/migration.py | 3 + django/utils/termcolors.py | 6 + 6 files changed, 241 insertions(+), 151 deletions(-) create mode 100644 django/core/management/commands/migrate.py diff --git a/django/core/management/commands/migrate.py b/django/core/management/commands/migrate.py new file mode 100644 index 0000000000..68897fe19c --- /dev/null +++ b/django/core/management/commands/migrate.py @@ -0,0 +1,210 @@ +from optparse import make_option +import itertools +import traceback + +from django.conf import settings +from django.core.management import call_command +from django.core.management.base import NoArgsCommand +from django.core.management.color import color_style +from django.core.management.sql import custom_sql_for_model, emit_post_sync_signal, emit_pre_sync_signal +from django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS +from django.db.migrations.executor import MigrationExecutor +from django.utils.datastructures import SortedDict +from django.utils.importlib import import_module + + +class Command(NoArgsCommand): + option_list = NoArgsCommand.option_list + ( + make_option('--noinput', action='store_false', dest='interactive', default=True, + help='Tells Django to NOT prompt the user for input of any kind.'), + make_option('--no-initial-data', action='store_false', dest='load_initial_data', default=True, + help='Tells Django not to load any initial data after database synchronization.'), + make_option('--database', action='store', dest='database', + default=DEFAULT_DB_ALIAS, help='Nominates a database to synchronize. ' + 'Defaults to the "default" database.'), + ) + + help = "Updates database schema. Manages both apps with migrations and those without." + + def handle_noargs(self, **options): + + self.verbosity = int(options.get('verbosity')) + self.interactive = options.get('interactive') + self.show_traceback = options.get('traceback') + self.load_initial_data = options.get('load_initial_data') + + self.style = color_style() + + # Import the 'management' module within each installed app, to register + # dispatcher events. + for app_name in settings.INSTALLED_APPS: + try: + import_module('.management', app_name) + except ImportError as exc: + # This is slightly hackish. We want to ignore ImportErrors + # if the "management" module itself is missing -- but we don't + # want to ignore the exception if the management module exists + # but raises an ImportError for some reason. The only way we + # can do this is to check the text of the exception. Note that + # we're a bit broad in how we check the text, because different + # Python implementations may not use the same text. + # CPython uses the text "No module named management" + # PyPy uses "No module named myproject.myapp.management" + msg = exc.args[0] + if not msg.startswith('No module named') or 'management' not in msg: + raise + + # Get the database we're operating from + db = options.get('database') + connection = connections[db] + + # Work out which apps have migrations and which do not + if self.verbosity >= 1: + self.stdout.write(self.style.MIGRATE_HEADING("Calculating migration plan:")) + executor = MigrationExecutor(connection, self.migration_progress_callback) + if self.verbosity >= 1: + self.stdout.write(self.style.MIGRATE_LABEL(" Apps without migrations: ") + (", ".join(executor.loader.unmigrated_apps) or "(none)")) + + # Work out what targets they want, and then make a migration plan + # TODO: Let users select targets + targets = executor.loader.graph.leaf_nodes() + plan = executor.migration_plan(targets) + + if self.verbosity >= 1: + self.stdout.write(self.style.MIGRATE_LABEL(" Apps with migrations: ") + (", ".join(executor.loader.disk_migrations) or "(none)")) + + # Run the syncdb phase. + # If you ever manage to get rid of this, I owe you many, many drinks. + self.stdout.write(self.style.MIGRATE_HEADING("Synchronizing apps without migrations:")) + self.sync_apps(connection, executor.loader.unmigrated_apps) + + # Migrate! + if self.verbosity >= 1: + self.stdout.write(self.style.MIGRATE_HEADING("Running migrations:")) + if not plan: + if self.verbosity >= 1: + self.stdout.write(" No migrations needed.") + else: + executor.migrate(targets, plan) + + def migration_progress_callback(self, action, migration): + if self.verbosity >= 1: + if action == "apply_start": + self.stdout.write(" Applying %s... " % migration) + self.stdout.flush() + elif action == "apply_success": + self.stdout.write(" OK\n") + elif action == "unapply_start": + self.stdout.write(" Unapplying %s... " % migration) + self.stdout.flush() + elif action == "unapply_success": + self.stdout.write(" OK\n") + + def sync_apps(self, connection, apps): + "Runs the old syncdb-style operation on a list of apps." + cursor = connection.cursor() + + # Get a list of already installed *models* so that references work right. + tables = connection.introspection.table_names() + seen_models = connection.introspection.installed_models(tables) + created_models = set() + pending_references = {} + + # Build the manifest of apps and models that are to be synchronized + all_models = [ + (app.__name__.split('.')[-2], + [ + m for m in models.get_models(app, include_auto_created=True) + if router.allow_syncdb(connection.alias, m) + ]) + for app in models.get_apps() if app.__name__.split('.')[-2] in apps + ] + + def model_installed(model): + opts = model._meta + converter = connection.introspection.table_name_converter + # Note that if a model is unmanaged we short-circuit and never try to install it + return not ((converter(opts.db_table) in tables) or + (opts.auto_created and converter(opts.auto_created._meta.db_table) in tables)) + + manifest = SortedDict( + (app_name, list(filter(model_installed, model_list))) + for app_name, model_list in all_models + ) + + create_models = set([x for x in itertools.chain(*manifest.values())]) + emit_pre_sync_signal(create_models, self.verbosity, self.interactive, connection.alias) + + # Create the tables for each model + if self.verbosity >= 1: + self.stdout.write(" Creating tables...\n") + with transaction.commit_on_success_unless_managed(using=connection.alias): + for app_name, model_list in manifest.items(): + for model in model_list: + # Create the model's database table, if it doesn't already exist. + if self.verbosity >= 3: + self.stdout.write(" Processing %s.%s model\n" % (app_name, model._meta.object_name)) + sql, references = connection.creation.sql_create_model(model, self.style, seen_models) + seen_models.add(model) + created_models.add(model) + for refto, refs in references.items(): + pending_references.setdefault(refto, []).extend(refs) + if refto in seen_models: + sql.extend(connection.creation.sql_for_pending_references(refto, self.style, pending_references)) + sql.extend(connection.creation.sql_for_pending_references(model, self.style, pending_references)) + if self.verbosity >= 1 and sql: + self.stdout.write(" Creating table %s\n" % model._meta.db_table) + for statement in sql: + cursor.execute(statement) + tables.append(connection.introspection.table_name_converter(model._meta.db_table)) + + # Send the post_syncdb signal, so individual apps can do whatever they need + # to do at this point. + emit_post_sync_signal(created_models, self.verbosity, self.interactive, connection.alias) + + # The connection may have been closed by a syncdb handler. + cursor = connection.cursor() + + # Install custom SQL for the app (but only if this + # is a model we've just created) + if self.verbosity >= 1: + self.stdout.write(" Installing custom SQL...\n") + for app_name, model_list in manifest.items(): + for model in model_list: + if model in created_models: + custom_sql = custom_sql_for_model(model, self.style, connection) + if custom_sql: + if self.verbosity >= 2: + self.stdout.write(" Installing custom SQL for %s.%s model\n" % (app_name, model._meta.object_name)) + try: + with transaction.commit_on_success_unless_managed(using=connection.alias): + for sql in custom_sql: + cursor.execute(sql) + except Exception as e: + self.stderr.write(" Failed to install custom SQL for %s.%s model: %s\n" % (app_name, model._meta.object_name, e)) + if self.show_traceback: + traceback.print_exc() + else: + if self.verbosity >= 3: + self.stdout.write(" No custom SQL for %s.%s model\n" % (app_name, model._meta.object_name)) + + if self.verbosity >= 1: + self.stdout.write(" Installing indexes...\n") + # Install SQL indices for all newly created models + for app_name, model_list in manifest.items(): + for model in model_list: + if model in created_models: + index_sql = connection.creation.sql_indexes_for_model(model, self.style) + if index_sql: + if self.verbosity >= 2: + self.stdout.write(" Installing index for %s.%s model\n" % (app_name, model._meta.object_name)) + try: + with transaction.commit_on_success_unless_managed(using=connection.alias): + for sql in index_sql: + cursor.execute(sql) + except Exception as e: + self.stderr.write(" Failed to install index for %s.%s model: %s\n" % (app_name, model._meta.object_name, e)) + + # Load initial_data fixtures (unless that has been disabled) + if self.load_initial_data: + call_command('loaddata', 'initial_data', verbosity=self.verbosity, database=connection.alias, skip_validation=True) diff --git a/django/core/management/commands/syncdb.py b/django/core/management/commands/syncdb.py index 51470d7bda..17ea51f4d5 100644 --- a/django/core/management/commands/syncdb.py +++ b/django/core/management/commands/syncdb.py @@ -1,15 +1,8 @@ +import warnings from optparse import make_option -import itertools -import traceback - -from django.conf import settings +from django.db import DEFAULT_DB_ALIAS from django.core.management import call_command from django.core.management.base import NoArgsCommand -from django.core.management.color import no_style -from django.core.management.sql import custom_sql_for_model, emit_post_sync_signal, emit_pre_sync_signal -from django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS -from django.utils.datastructures import SortedDict -from django.utils.importlib import import_module class Command(NoArgsCommand): @@ -22,143 +15,8 @@ class Command(NoArgsCommand): default=DEFAULT_DB_ALIAS, help='Nominates a database to synchronize. ' 'Defaults to the "default" database.'), ) - help = "Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created." + help = "Deprecated - use 'migrate' instead." def handle_noargs(self, **options): - - verbosity = int(options.get('verbosity')) - interactive = options.get('interactive') - show_traceback = options.get('traceback') - load_initial_data = options.get('load_initial_data') - - self.style = no_style() - - # Import the 'management' module within each installed app, to register - # dispatcher events. - for app_name in settings.INSTALLED_APPS: - try: - import_module('.management', app_name) - except ImportError as exc: - # This is slightly hackish. We want to ignore ImportErrors - # if the "management" module itself is missing -- but we don't - # want to ignore the exception if the management module exists - # but raises an ImportError for some reason. The only way we - # can do this is to check the text of the exception. Note that - # we're a bit broad in how we check the text, because different - # Python implementations may not use the same text. - # CPython uses the text "No module named management" - # PyPy uses "No module named myproject.myapp.management" - msg = exc.args[0] - if not msg.startswith('No module named') or 'management' not in msg: - raise - - db = options.get('database') - connection = connections[db] - cursor = connection.cursor() - - # Get a list of already installed *models* so that references work right. - tables = connection.introspection.table_names() - seen_models = connection.introspection.installed_models(tables) - created_models = set() - pending_references = {} - - # Build the manifest of apps and models that are to be synchronized - all_models = [ - (app.__name__.split('.')[-2], - [m for m in models.get_models(app, include_auto_created=True) - if router.allow_syncdb(db, m)]) - for app in models.get_apps() - ] - - def model_installed(model): - opts = model._meta - converter = connection.introspection.table_name_converter - # Note that if a model is unmanaged we short-circuit and never try to install it - return not ((converter(opts.db_table) in tables) or - (opts.auto_created and converter(opts.auto_created._meta.db_table) in tables)) - - - manifest = SortedDict( - (app_name, list(filter(model_installed, model_list))) - for app_name, model_list in all_models - ) - - create_models = set([x for x in itertools.chain(*manifest.values())]) - emit_pre_sync_signal(create_models, verbosity, interactive, db) - - # Create the tables for each model - if verbosity >= 1: - self.stdout.write("Creating tables ...\n") - with transaction.commit_on_success_unless_managed(using=db): - for app_name, model_list in manifest.items(): - for model in model_list: - # Create the model's database table, if it doesn't already exist. - if verbosity >= 3: - self.stdout.write("Processing %s.%s model\n" % (app_name, model._meta.object_name)) - sql, references = connection.creation.sql_create_model(model, self.style, seen_models) - seen_models.add(model) - created_models.add(model) - for refto, refs in references.items(): - pending_references.setdefault(refto, []).extend(refs) - if refto in seen_models: - sql.extend(connection.creation.sql_for_pending_references(refto, self.style, pending_references)) - sql.extend(connection.creation.sql_for_pending_references(model, self.style, pending_references)) - if verbosity >= 1 and sql: - self.stdout.write("Creating table %s\n" % model._meta.db_table) - for statement in sql: - cursor.execute(statement) - tables.append(connection.introspection.table_name_converter(model._meta.db_table)) - - # Send the post_syncdb signal, so individual apps can do whatever they need - # to do at this point. - emit_post_sync_signal(created_models, verbosity, interactive, db) - - # The connection may have been closed by a syncdb handler. - cursor = connection.cursor() - - # Install custom SQL for the app (but only if this - # is a model we've just created) - if verbosity >= 1: - self.stdout.write("Installing custom SQL ...\n") - for app_name, model_list in manifest.items(): - for model in model_list: - if model in created_models: - custom_sql = custom_sql_for_model(model, self.style, connection) - if custom_sql: - if verbosity >= 2: - self.stdout.write("Installing custom SQL for %s.%s model\n" % (app_name, model._meta.object_name)) - try: - with transaction.commit_on_success_unless_managed(using=db): - for sql in custom_sql: - cursor.execute(sql) - except Exception as e: - self.stderr.write("Failed to install custom SQL for %s.%s model: %s\n" % \ - (app_name, model._meta.object_name, e)) - if show_traceback: - traceback.print_exc() - else: - if verbosity >= 3: - self.stdout.write("No custom SQL for %s.%s model\n" % (app_name, model._meta.object_name)) - - if verbosity >= 1: - self.stdout.write("Installing indexes ...\n") - # Install SQL indices for all newly created models - for app_name, model_list in manifest.items(): - for model in model_list: - if model in created_models: - index_sql = connection.creation.sql_indexes_for_model(model, self.style) - if index_sql: - if verbosity >= 2: - self.stdout.write("Installing index for %s.%s model\n" % (app_name, model._meta.object_name)) - try: - with transaction.commit_on_success_unless_managed(using=db): - for sql in index_sql: - cursor.execute(sql) - except Exception as e: - self.stderr.write("Failed to install index for %s.%s model: %s\n" % \ - (app_name, model._meta.object_name, e)) - - # Load initial_data fixtures (unless that has been disabled) - if load_initial_data: - call_command('loaddata', 'initial_data', verbosity=verbosity, - database=db, skip_validation=True) + warnings.warn("The syncdb command will be removed in Django 1.9", PendingDeprecationWarning) + call_command("migrate", **options) diff --git a/django/db/migrations/executor.py b/django/db/migrations/executor.py index 0f16001500..82601be7c0 100644 --- a/django/db/migrations/executor.py +++ b/django/db/migrations/executor.py @@ -8,10 +8,12 @@ class MigrationExecutor(object): up or down to a specified set of targets. """ - def __init__(self, connection): + def __init__(self, connection, progress_callback=None): self.connection = connection self.loader = MigrationLoader(self.connection) + self.loader.load_disk() self.recorder = MigrationRecorder(self.connection) + self.progress_callback = progress_callback def migration_plan(self, targets): """ @@ -34,11 +36,12 @@ class MigrationExecutor(object): applied.add(migration) return plan - def migrate(self, targets): + def migrate(self, targets, plan=None): """ Migrates the database up to the given targets. """ - plan = self.migration_plan(targets) + if plan is None: + plan = self.migration_plan(targets) for migration, backwards in plan: if not backwards: self.apply_migration(migration) @@ -49,16 +52,24 @@ class MigrationExecutor(object): """ Runs a migration forwards. """ + if self.progress_callback: + self.progress_callback("apply_start", migration) with self.connection.schema_editor() as schema_editor: project_state = self.loader.graph.project_state((migration.app_label, migration.name), at_end=False) migration.apply(project_state, schema_editor) self.recorder.record_applied(migration.app_label, migration.name) + if self.progress_callback: + self.progress_callback("apply_success", migration) def unapply_migration(self, migration): """ Runs a migration backwards. """ + if self.progress_callback: + self.progress_callback("unapply_start", migration) with self.connection.schema_editor() as schema_editor: project_state = self.loader.graph.project_state((migration.app_label, migration.name), at_end=False) migration.unapply(project_state, schema_editor) self.recorder.record_unapplied(migration.app_label, migration.name) + if self.progress_callback: + self.progress_callback("unapply_success", migration) diff --git a/django/db/migrations/loader.py b/django/db/migrations/loader.py index ce9fb7c8de..894d8c91f2 100644 --- a/django/db/migrations/loader.py +++ b/django/db/migrations/loader.py @@ -41,6 +41,7 @@ class MigrationLoader(object): Loads the migrations from all INSTALLED_APPS from disk. """ self.disk_migrations = {} + self.unmigrated_apps = set() for app in cache.get_apps(): # Get the migrations module directory module_name = ".".join(app.__name__.split(".")[:-1] + ["migrations"]) @@ -50,7 +51,8 @@ class MigrationLoader(object): except ImportError as e: # I hate doing this, but I don't want to squash other import errors. # Might be better to try a directory check directly. - if "No module named migrations" in str(e): + if "No module named" in str(e) and "migrations" in str(e): + self.unmigrated_apps.add(app_label) continue directory = os.path.dirname(module.__file__) # Scan for .py[c|o] files diff --git a/django/db/migrations/migration.py b/django/db/migrations/migration.py index ed7fad747b..277c5faa3f 100644 --- a/django/db/migrations/migration.py +++ b/django/db/migrations/migration.py @@ -47,6 +47,9 @@ class Migration(object): def __repr__(self): return "" % (self.app_label, self.name) + def __str__(self): + return "%s.%s" % (self.app_label, self.name) + def __hash__(self): return hash("%s.%s" % (self.app_label, self.name)) diff --git a/django/utils/termcolors.py b/django/utils/termcolors.py index bb14837716..3562fa4fb5 100644 --- a/django/utils/termcolors.py +++ b/django/utils/termcolors.py @@ -86,6 +86,8 @@ PALETTES = { 'HTTP_BAD_REQUEST': {}, 'HTTP_NOT_FOUND': {}, 'HTTP_SERVER_ERROR': {}, + 'MIGRATE_HEADING': {}, + 'MIGRATE_LABEL': {}, }, DARK_PALETTE: { 'ERROR': { 'fg': 'red', 'opts': ('bold',) }, @@ -101,6 +103,8 @@ PALETTES = { 'HTTP_BAD_REQUEST': { 'fg': 'red', 'opts': ('bold',) }, 'HTTP_NOT_FOUND': { 'fg': 'yellow' }, 'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) }, + 'MIGRATE_HEADING': { 'fg': 'cyan', 'opts': ('bold',) }, + 'MIGRATE_LABEL': { 'opts': ('bold',) }, }, LIGHT_PALETTE: { 'ERROR': { 'fg': 'red', 'opts': ('bold',) }, @@ -116,6 +120,8 @@ PALETTES = { 'HTTP_BAD_REQUEST': { 'fg': 'red', 'opts': ('bold',) }, 'HTTP_NOT_FOUND': { 'fg': 'red' }, 'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) }, + 'MIGRATE_HEADING': { 'fg': 'cyan', 'opts': ('bold',) }, + 'MIGRATE_LABEL': { 'opts': ('bold',) }, } } DEFAULT_PALETTE = DARK_PALETTE From 2ae8a8a77d6968a155db9b17ba13e21d91bd351b Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Wed, 19 Jun 2013 15:36:02 +0100 Subject: [PATCH 073/161] Fix test running with new apps stuff/migrate actually running migrations --- django/conf/global_settings.py | 7 +++++++ django/db/backends/creation.py | 7 ++++--- django/db/migrations/loader.py | 11 +++++++++-- django/test/runner.py | 2 +- tests/migrations/test_executor.py | 4 ++++ tests/migrations/test_loader.py | 6 ++++-- tests/migrations/test_operations.py | 4 ++-- tests/migrations/test_writer.py | 4 ++-- 8 files changed, 33 insertions(+), 12 deletions(-) diff --git a/django/conf/global_settings.py b/django/conf/global_settings.py index 596f4ae78a..310b5c163f 100644 --- a/django/conf/global_settings.py +++ b/django/conf/global_settings.py @@ -602,3 +602,10 @@ STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.AppDirectoriesFinder', # 'django.contrib.staticfiles.finders.DefaultStorageFinder', ) + +############## +# MIGRATIONS # +############## + +# Migration module overrides for apps, by app label. +MIGRATION_MODULES = {} diff --git a/django/db/backends/creation.py b/django/db/backends/creation.py index 98830407fb..ef45cbeeff 100644 --- a/django/db/backends/creation.py +++ b/django/db/backends/creation.py @@ -331,14 +331,15 @@ class BaseDatabaseCreation(object): settings.DATABASES[self.connection.alias]["NAME"] = test_database_name self.connection.settings_dict["NAME"] = test_database_name - # Report syncdb messages at one level lower than that requested. + # Report migrate messages at one level lower than that requested. # This ensures we don't get flooded with messages during testing # (unless you really ask to be flooded) - call_command('syncdb', + call_command('migrate', verbosity=max(verbosity - 1, 0), interactive=False, database=self.connection.alias, - load_initial_data=False) + load_initial_data=False, + test_database=True) # We need to then do a flush to ensure that any data installed by # custom SQL has been removed. The only test data should come from diff --git a/django/db/migrations/loader.py b/django/db/migrations/loader.py index 894d8c91f2..76d3fe8329 100644 --- a/django/db/migrations/loader.py +++ b/django/db/migrations/loader.py @@ -4,6 +4,7 @@ from django.utils.functional import cached_property from django.db.models.loading import cache from django.db.migrations.recorder import MigrationRecorder from django.db.migrations.graph import MigrationGraph +from django.conf import settings class MigrationLoader(object): @@ -36,6 +37,12 @@ class MigrationLoader(object): self.disk_migrations = None self.applied_migrations = None + def migration_module(self, app_label): + if app_label in settings.MIGRATION_MODULES: + return settings.MIGRATION_MODULES[app_label] + app = cache.get_app(app_label) + return ".".join(app.__name__.split(".")[:-1] + ["migrations"]) + def load_disk(self): """ Loads the migrations from all INSTALLED_APPS from disk. @@ -44,8 +51,8 @@ class MigrationLoader(object): self.unmigrated_apps = set() for app in cache.get_apps(): # Get the migrations module directory - module_name = ".".join(app.__name__.split(".")[:-1] + ["migrations"]) - app_label = module_name.split(".")[-2] + app_label = app.__name__.split(".")[-2] + module_name = self.migration_module(app_label) try: module = import_module(module_name) except ImportError as e: diff --git a/django/test/runner.py b/django/test/runner.py index e753e365fa..4d0517a4b4 100644 --- a/django/test/runner.py +++ b/django/test/runner.py @@ -266,7 +266,7 @@ def setup_databases(verbosity, interactive, **kwargs): # Second pass -- actually create the databases. old_names = [] mirrors = [] - + for signature, (db_name, aliases) in dependency_ordered( test_databases.items(), dependencies): test_db_name = None diff --git a/tests/migrations/test_executor.py b/tests/migrations/test_executor.py index 629c47de56..c426defe4a 100644 --- a/tests/migrations/test_executor.py +++ b/tests/migrations/test_executor.py @@ -1,4 +1,5 @@ from django.test import TransactionTestCase +from django.test.utils import override_settings from django.db import connection from django.db.migrations.executor import MigrationExecutor @@ -11,6 +12,9 @@ class ExecutorTests(TransactionTestCase): test failures first, as they may be propagating into here. """ + available_apps = ["migrations"] + + @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_run(self): """ Tests running a simple set of migrations. diff --git a/tests/migrations/test_loader.py b/tests/migrations/test_loader.py index 9318f77004..255efe9cfb 100644 --- a/tests/migrations/test_loader.py +++ b/tests/migrations/test_loader.py @@ -1,4 +1,5 @@ -from django.test import TestCase, TransactionTestCase +from django.test import TestCase +from django.test.utils import override_settings from django.db import connection from django.db.migrations.loader import MigrationLoader from django.db.migrations.recorder import MigrationRecorder @@ -30,12 +31,13 @@ class RecorderTests(TestCase): ) -class LoaderTests(TransactionTestCase): +class LoaderTests(TestCase): """ Tests the disk and database loader, and running through migrations in memory. """ + @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_load(self): """ Makes sure the loader can load the migrations for the test apps, diff --git a/tests/migrations/test_operations.py b/tests/migrations/test_operations.py index 9c25e43990..fc5aa47faf 100644 --- a/tests/migrations/test_operations.py +++ b/tests/migrations/test_operations.py @@ -1,9 +1,9 @@ -from django.test import TransactionTestCase +from django.test import TestCase from django.db import connection, models, migrations from django.db.migrations.state import ProjectState -class OperationTests(TransactionTestCase): +class OperationTests(TestCase): """ Tests running the operations and making sure they do what they say they do. Each test looks at their state changing, and then their database operation - diff --git a/tests/migrations/test_writer.py b/tests/migrations/test_writer.py index c6ca100c1a..22925fee9b 100644 --- a/tests/migrations/test_writer.py +++ b/tests/migrations/test_writer.py @@ -1,12 +1,12 @@ # encoding: utf8 import datetime from django.utils import six -from django.test import TransactionTestCase +from django.test import TestCase from django.db.migrations.writer import MigrationWriter from django.db import models, migrations -class WriterTests(TransactionTestCase): +class WriterTests(TestCase): """ Tests the migration writer (makes migration files from Migration instances) """ From ab5cbae9b7f2639ae33165e36c30e6563c1364c4 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Wed, 19 Jun 2013 15:36:22 +0100 Subject: [PATCH 074/161] First stab at some migration creation commands --- .../core/management/commands/makemigration.py | 52 ++++++ django/core/management/commands/migrate.py | 13 +- django/db/migrations/autodetector.py | 153 +++++++++++++----- django/db/migrations/graph.py | 11 +- django/db/migrations/recorder.py | 7 + tests/migrations/test_autodetector.py | 24 ++- .../0001_initial.py | 0 .../0002_second.py | 0 .../__init__.py | 0 tests/schema/tests.py | 2 + 10 files changed, 210 insertions(+), 52 deletions(-) create mode 100644 django/core/management/commands/makemigration.py rename tests/migrations/{migrations => test_migrations}/0001_initial.py (100%) rename tests/migrations/{migrations => test_migrations}/0002_second.py (100%) rename tests/migrations/{migrations => test_migrations}/__init__.py (100%) diff --git a/django/core/management/commands/makemigration.py b/django/core/management/commands/makemigration.py new file mode 100644 index 0000000000..fd12652036 --- /dev/null +++ b/django/core/management/commands/makemigration.py @@ -0,0 +1,52 @@ +import sys +from optparse import make_option + +from django.core.management.base import BaseCommand +from django.core.management.color import color_style +from django.core.exceptions import ImproperlyConfigured +from django.db import connections +from django.db.migrations.loader import MigrationLoader +from django.db.migrations.autodetector import MigrationAutodetector, InteractiveMigrationQuestioner +from django.db.migrations.state import ProjectState +from django.db.models.loading import cache + + +class Command(BaseCommand): + option_list = BaseCommand.option_list + ( + make_option('--empty', action='store_true', dest='empty', default=False, + help='Make a blank migration.'), + ) + + help = "Creates new migration(s) for apps." + usage_str = "Usage: ./manage.py createmigration [--empty] [app [app ...]]" + + def handle(self, *app_labels, **options): + + self.verbosity = int(options.get('verbosity')) + self.interactive = options.get('interactive') + self.style = color_style() + + # Make sure the app they asked for exists + app_labels = set(app_labels) + for app_label in app_labels: + try: + cache.get_app(app_label) + except ImproperlyConfigured: + self.stderr.write("The app you specified - '%s' - could not be found. Is it in INSTALLED_APPS?" % app_label) + sys.exit(2) + + # Load the current graph state + loader = MigrationLoader(connections["default"]) + + # Detect changes + autodetector = MigrationAutodetector( + loader.graph.project_state(), + ProjectState.from_app_cache(cache), + InteractiveMigrationQuestioner(specified_apps=app_labels), + ) + changes = autodetector.changes() + changes = autodetector.arrange_for_graph(changes, loader.graph) + if app_labels: + changes = autodetector.trim_to_apps(changes, app_labels) + + print changes diff --git a/django/core/management/commands/migrate.py b/django/core/management/commands/migrate.py index 68897fe19c..0f12508559 100644 --- a/django/core/management/commands/migrate.py +++ b/django/core/management/commands/migrate.py @@ -5,7 +5,7 @@ import traceback from django.conf import settings from django.core.management import call_command from django.core.management.base import NoArgsCommand -from django.core.management.color import color_style +from django.core.management.color import color_style, no_style from django.core.management.sql import custom_sql_for_model, emit_post_sync_signal, emit_pre_sync_signal from django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS from django.db.migrations.executor import MigrationExecutor @@ -32,6 +32,7 @@ class Command(NoArgsCommand): self.interactive = options.get('interactive') self.show_traceback = options.get('traceback') self.load_initial_data = options.get('load_initial_data') + self.test_database = options.get('test_database', False) self.style = color_style() @@ -144,14 +145,14 @@ class Command(NoArgsCommand): # Create the model's database table, if it doesn't already exist. if self.verbosity >= 3: self.stdout.write(" Processing %s.%s model\n" % (app_name, model._meta.object_name)) - sql, references = connection.creation.sql_create_model(model, self.style, seen_models) + sql, references = connection.creation.sql_create_model(model, no_style(), seen_models) seen_models.add(model) created_models.add(model) for refto, refs in references.items(): pending_references.setdefault(refto, []).extend(refs) if refto in seen_models: - sql.extend(connection.creation.sql_for_pending_references(refto, self.style, pending_references)) - sql.extend(connection.creation.sql_for_pending_references(model, self.style, pending_references)) + sql.extend(connection.creation.sql_for_pending_references(refto, no_style(), pending_references)) + sql.extend(connection.creation.sql_for_pending_references(model, no_style(), pending_references)) if self.verbosity >= 1 and sql: self.stdout.write(" Creating table %s\n" % model._meta.db_table) for statement in sql: @@ -172,7 +173,7 @@ class Command(NoArgsCommand): for app_name, model_list in manifest.items(): for model in model_list: if model in created_models: - custom_sql = custom_sql_for_model(model, self.style, connection) + custom_sql = custom_sql_for_model(model, no_style(), connection) if custom_sql: if self.verbosity >= 2: self.stdout.write(" Installing custom SQL for %s.%s model\n" % (app_name, model._meta.object_name)) @@ -194,7 +195,7 @@ class Command(NoArgsCommand): for app_name, model_list in manifest.items(): for model in model_list: if model in created_models: - index_sql = connection.creation.sql_indexes_for_model(model, self.style) + index_sql = connection.creation.sql_indexes_for_model(model, no_style()) if index_sql: if self.verbosity >= 2: self.stdout.write(" Installing index for %s.%s model\n" % (app_name, model._meta.object_name)) diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py index be3e1c561f..f89634f57b 100644 --- a/django/db/migrations/autodetector.py +++ b/django/db/migrations/autodetector.py @@ -1,6 +1,8 @@ import re +from django.utils.six.moves import input from django.db.migrations import operations from django.db.migrations.migration import Migration +from django.db.models.loading import cache class MigrationAutodetector(object): @@ -16,9 +18,10 @@ class MigrationAutodetector(object): if it wishes, with the caveat that it may not always be possible. """ - def __init__(self, from_state, to_state): + def __init__(self, from_state, to_state, questioner=None): self.from_state = from_state self.to_state = to_state + self.questioner = questioner or MigrationQuestioner() def changes(self): """ @@ -54,7 +57,7 @@ class MigrationAutodetector(object): model_state.name, ) ) - # Alright, now sort out and return the migrations + # Alright, now add internal dependencies for app_label, migrations in self.migrations.items(): for m1, m2 in zip(migrations, migrations[1:]): m2.dependencies.append((app_label, m1.name)) @@ -68,6 +71,77 @@ class MigrationAutodetector(object): migrations.append(instance) migrations[-1].operations.append(operation) + def arrange_for_graph(self, changes, graph): + """ + Takes in a result from changes() and a MigrationGraph, + and fixes the names and dependencies of the changes so they + extend the graph from the leaf nodes for each app. + """ + leaves = graph.leaf_nodes() + name_map = {} + for app_label, migrations in list(changes.items()): + if not migrations: + continue + # Find the app label's current leaf node + app_leaf = None + for leaf in leaves: + if leaf[0] == app_label: + app_leaf = leaf + break + # Do they want an initial migration for this app? + if app_leaf is None and not self.questioner.ask_initial(app_label): + # They don't. + for migration in migrations: + name_map[(app_label, migration.name)] = (app_label, "__first__") + del changes[app_label] + # Work out the next number in the sequence + if app_leaf is None: + next_number = 1 + else: + next_number = (self.parse_number(app_leaf[1]) or 0) + 1 + # Name each migration + for i, migration in enumerate(migrations): + if i == 0 and app_leaf: + migration.dependencies.append(app_leaf) + if i == 0 and not app_leaf: + new_name = "0001_initial" + else: + new_name = "%04i_%s" % (next_number, self.suggest_name(migration.operations)) + name_map[(app_label, migration.name)] = (app_label, new_name) + migration.name = new_name + # Now fix dependencies + for app_label, migrations in changes.items(): + for migration in migrations: + migration.dependencies = [name_map.get(d, d) for d in migration.dependencies] + return changes + + def trim_to_apps(self, changes, app_labels): + """ + Takes changes from arrange_for_graph and set of app labels and + returns a modified set of changes which trims out as many migrations + that are not in app_labels as possible. + Note that some other migrations may still be present, as they may be + required dependencies. + """ + # Gather other app dependencies in a first pass + app_dependencies = {} + for app_label, migrations in changes.items(): + for migration in migrations: + for dep_app_label, name in migration.dependencies: + app_dependencies.setdefault(app_label, set()).add(dep_app_label) + required_apps = set(app_labels) + # Keep resolving till there's no change + old_required_apps = None + while old_required_apps != required_apps: + old_required_apps = set(required_apps) + for app_label in list(required_apps): + required_apps.update(app_dependencies.get(app_label, set())) + # Remove all migrations that aren't needed + for app_label in list(changes.keys()): + if app_label not in required_apps: + del changes[app_label] + return changes + @classmethod def suggest_name(cls, ops): """ @@ -94,41 +168,40 @@ class MigrationAutodetector(object): return int(name.split("_")[0]) return None - @classmethod - def arrange_for_graph(cls, changes, graph): - """ - Takes in a result from changes() and a MigrationGraph, - and fixes the names and dependencies of the changes so they - extend the graph from the leaf nodes for each app. - """ - leaves = graph.leaf_nodes() - name_map = {} - for app_label, migrations in changes.items(): - if not migrations: - continue - # Find the app label's current leaf node - app_leaf = None - for leaf in leaves: - if leaf[0] == app_label: - app_leaf = leaf - break - # Work out the next number in the sequence - if app_leaf is None: - next_number = 1 - else: - next_number = (cls.parse_number(app_leaf[1]) or 0) + 1 - # Name each migration - for i, migration in enumerate(migrations): - if i == 0 and app_leaf: - migration.dependencies.append(app_leaf) - if i == 0 and not app_leaf: - new_name = "0001_initial" - else: - new_name = "%04i_%s" % (next_number, cls.suggest_name(migration.operations)) - name_map[(app_label, migration.name)] = (app_label, new_name) - migration.name = new_name - # Now fix dependencies - for app_label, migrations in changes.items(): - for migration in migrations: - migration.dependencies = [name_map.get(d, d) for d in migration.dependencies] - return changes + +class MigrationQuestioner(object): + """ + Gives the autodetector responses to questions it might have. + This base class has a built-in noninteractive mode, but the + interactive subclass is what the command-line arguments will use. + """ + + def __init__(self, defaults=None): + self.defaults = defaults or {} + + def ask_initial(self, app_label): + "Should we create an initial migration for the app?" + return self.defaults.get("ask_initial", False) + + +class InteractiveMigrationQuestioner(MigrationQuestioner): + + def __init__(self, specified_apps=set()): + self.specified_apps = specified_apps + + def _boolean_input(self, question): + result = input("%s " % question) + while len(result) < 1 or result[0].lower() not in "yn": + result = input("Please answer yes or no: ") + return result[0].lower() == "y" + + def ask_initial(self, app_label): + # Don't ask for django.contrib apps + app = cache.get_app(app_label) + if app.__name__.startswith("django.contrib"): + return False + # If it was specified on the command line, definitely true + if app_label in self.specified_apps: + return True + # Now ask + return self._boolean_input("Do you want to enable migrations for app '%s'?" % app_label) diff --git a/django/db/migrations/graph.py b/django/db/migrations/graph.py index c1c3ba75bb..1bbe0092ae 100644 --- a/django/db/migrations/graph.py +++ b/django/db/migrations/graph.py @@ -49,7 +49,7 @@ class MigrationGraph(object): a database. """ if node not in self.nodes: - raise ValueError("Node %r not a valid node" % node) + raise ValueError("Node %r not a valid node" % (node, )) return self.dfs(node, lambda x: self.dependencies.get(x, set())) def backwards_plan(self, node): @@ -60,7 +60,7 @@ class MigrationGraph(object): a database. """ if node not in self.nodes: - raise ValueError("Node %r not a valid node" % node) + raise ValueError("Node %r not a valid node" % (node, )) return self.dfs(node, lambda x: self.dependents.get(x, set())) def root_nodes(self): @@ -120,11 +120,16 @@ class MigrationGraph(object): def __str__(self): return "Graph: %s nodes, %s edges" % (len(self.nodes), sum(len(x) for x in self.dependencies.values())) - def project_state(self, nodes, at_end=True): + def project_state(self, nodes=None, at_end=True): """ Given a migration node or nodes, returns a complete ProjectState for it. If at_end is False, returns the state before the migration has run. + If nodes is not provided, returns the overall most current project state. """ + if nodes is None: + nodes = list(self.leaf_nodes()) + if len(nodes) == 0: + return ProjectState() if not isinstance(nodes[0], tuple): nodes = [nodes] plan = [] diff --git a/django/db/migrations/recorder.py b/django/db/migrations/recorder.py index a1f111f2bc..65e4fbbda7 100644 --- a/django/db/migrations/recorder.py +++ b/django/db/migrations/recorder.py @@ -60,3 +60,10 @@ class MigrationRecorder(object): """ self.ensure_schema() self.Migration.objects.filter(app=app, name=name).delete() + + @classmethod + def flush(cls): + """ + Deletes all migration records. Useful if you're testing migrations. + """ + cls.Migration.objects.all().delete() diff --git a/tests/migrations/test_autodetector.py b/tests/migrations/test_autodetector.py index 1fc8f7aefb..2b9ce21a1d 100644 --- a/tests/migrations/test_autodetector.py +++ b/tests/migrations/test_autodetector.py @@ -1,12 +1,12 @@ # encoding: utf8 -from django.test import TransactionTestCase -from django.db.migrations.autodetector import MigrationAutodetector +from django.test import TestCase +from django.db.migrations.autodetector import MigrationAutodetector, MigrationQuestioner from django.db.migrations.state import ProjectState, ModelState from django.db.migrations.graph import MigrationGraph from django.db import models -class AutodetectorTests(TransactionTestCase): +class AutodetectorTests(TestCase): """ Tests the migration autodetector. """ @@ -14,6 +14,7 @@ class AutodetectorTests(TransactionTestCase): author_empty = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) other_pony = ModelState("otherapp", "Pony", [("id", models.AutoField(primary_key=True))]) other_stable = ModelState("otherapp", "Stable", [("id", models.AutoField(primary_key=True))]) + third_thing = ModelState("thirdapp", "Thing", [("id", models.AutoField(primary_key=True))]) def make_project_state(self, model_states): "Shortcut to make ProjectStates from lists of predefined models" @@ -44,6 +45,23 @@ class AutodetectorTests(TransactionTestCase): self.assertEqual(changes["otherapp"][0].name, "0002_pony_stable") self.assertEqual(changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]) + def test_trim_apps(self): + "Tests that trim does not remove dependencies but does remove unwanted apps" + # Use project state to make a new migration change set + before = self.make_project_state([]) + after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable, self.third_thing]) + autodetector = MigrationAutodetector(before, after, MigrationQuestioner({"ask_initial": True})) + changes = autodetector.changes() + # Run through arrange_for_graph + graph = MigrationGraph() + changes = autodetector.arrange_for_graph(changes, graph) + changes["testapp"][0].dependencies.append(("otherapp", "0001_initial")) + changes = autodetector.trim_to_apps(changes, set(["testapp"])) + # Make sure there's the right set of migrations + self.assertEqual(changes["testapp"][0].name, "0001_initial") + self.assertEqual(changes["otherapp"][0].name, "0001_initial") + self.assertNotIn("thirdapp", changes) + def test_new_model(self): "Tests autodetection of new models" # Make state diff --git a/tests/migrations/migrations/0001_initial.py b/tests/migrations/test_migrations/0001_initial.py similarity index 100% rename from tests/migrations/migrations/0001_initial.py rename to tests/migrations/test_migrations/0001_initial.py diff --git a/tests/migrations/migrations/0002_second.py b/tests/migrations/test_migrations/0002_second.py similarity index 100% rename from tests/migrations/migrations/0002_second.py rename to tests/migrations/test_migrations/0002_second.py diff --git a/tests/migrations/migrations/__init__.py b/tests/migrations/test_migrations/__init__.py similarity index 100% rename from tests/migrations/migrations/__init__.py rename to tests/migrations/test_migrations/__init__.py diff --git a/tests/schema/tests.py b/tests/schema/tests.py index f643f3ed68..89853088ac 100644 --- a/tests/schema/tests.py +++ b/tests/schema/tests.py @@ -17,6 +17,8 @@ class SchemaTests(TransactionTestCase): as sometimes the code to check if a test has worked is almost as complex as the code it is testing. """ + + available_apps = [] models = [Author, AuthorWithM2M, Book, BookWithSlug, BookWithM2M, Tag, TagUniqueRename, UniqueTest] no_table_strings = ["no such table", "unknown table", "does not exist"] From f25a385a5eb81ee660a82e6dd41e80938672f69c Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Wed, 19 Jun 2013 16:23:52 +0100 Subject: [PATCH 075/161] Makemigration command now works --- .../core/management/commands/makemigration.py | 30 ++++++++++++++++- django/db/migrations/autodetector.py | 8 ++--- django/db/migrations/loader.py | 5 +-- django/db/migrations/operations/base.py | 6 ++++ django/db/migrations/operations/fields.py | 6 ++++ django/db/migrations/operations/models.py | 6 ++++ django/db/migrations/writer.py | 33 +++++++++++++++++++ 7 files changed, 87 insertions(+), 7 deletions(-) diff --git a/django/core/management/commands/makemigration.py b/django/core/management/commands/makemigration.py index fd12652036..baf3f075ca 100644 --- a/django/core/management/commands/makemigration.py +++ b/django/core/management/commands/makemigration.py @@ -1,4 +1,5 @@ import sys +import os from optparse import make_option from django.core.management.base import BaseCommand @@ -8,6 +9,7 @@ from django.db import connections from django.db.migrations.loader import MigrationLoader from django.db.migrations.autodetector import MigrationAutodetector, InteractiveMigrationQuestioner from django.db.migrations.state import ProjectState +from django.db.migrations.writer import MigrationWriter from django.db.models.loading import cache @@ -49,4 +51,30 @@ class Command(BaseCommand): if app_labels: changes = autodetector.trim_to_apps(changes, app_labels) - print changes + # No changes? Tell them. + if not changes: + if len(app_labels) == 1: + self.stdout.write("No changes detected in app '%s'" % app_labels.pop()) + elif len(app_labels) > 1: + self.stdout.write("No changes detected in apps '%s'" % ("', '".join(app_labels))) + else: + self.stdout.write("No changes detected") + return + + for app_label, migrations in changes.items(): + self.stdout.write(self.style.MIGRATE_HEADING("Migrations for '%s':" % app_label) + "\n") + for migration in migrations: + # Describe the migration + writer = MigrationWriter(migration) + self.stdout.write(" %s:\n" % (self.style.MIGRATE_LABEL(writer.filename),)) + for operation in migration.operations: + self.stdout.write(" - %s\n" % operation.describe()) + # Write it + migrations_directory = os.path.dirname(writer.path) + if not os.path.isdir(migrations_directory): + os.mkdir(migrations_directory) + init_path = os.path.join(migrations_directory, "__init__.py") + if not os.path.isfile(init_path): + open(init_path, "w").close() + with open(writer.path, "w") as fh: + fh.write(writer.as_string()) diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py index f89634f57b..79f710482d 100644 --- a/django/db/migrations/autodetector.py +++ b/django/db/migrations/autodetector.py @@ -41,10 +41,10 @@ class MigrationAutodetector(object): self.add_to_migration( app_label, operations.CreateModel( - model_state.name, - model_state.fields, - model_state.options, - model_state.bases, + name = model_state.name, + fields = model_state.fields, + options = model_state.options, + bases = model_state.bases, ) ) # Removing models diff --git a/django/db/migrations/loader.py b/django/db/migrations/loader.py index 76d3fe8329..9658793094 100644 --- a/django/db/migrations/loader.py +++ b/django/db/migrations/loader.py @@ -37,7 +37,8 @@ class MigrationLoader(object): self.disk_migrations = None self.applied_migrations = None - def migration_module(self, app_label): + @classmethod + def migrations_module(cls, app_label): if app_label in settings.MIGRATION_MODULES: return settings.MIGRATION_MODULES[app_label] app = cache.get_app(app_label) @@ -52,7 +53,7 @@ class MigrationLoader(object): for app in cache.get_apps(): # Get the migrations module directory app_label = app.__name__.split(".")[-2] - module_name = self.migration_module(app_label) + module_name = self.migrations_module(app_label) try: module = import_module(module_name) except ImportError as e: diff --git a/django/db/migrations/operations/base.py b/django/db/migrations/operations/base.py index 084ce14959..dcdb1ad30b 100644 --- a/django/db/migrations/operations/base.py +++ b/django/db/migrations/operations/base.py @@ -54,3 +54,9 @@ class Operation(object): drop the model's table. """ raise NotImplementedError() + + def describe(self): + """ + Outputs a brief summary of what the action does. + """ + return "%s: %s" % (self.__class__.__name__, self._constructor_args) diff --git a/django/db/migrations/operations/fields.py b/django/db/migrations/operations/fields.py index 862716a347..8fd8c9151d 100644 --- a/django/db/migrations/operations/fields.py +++ b/django/db/migrations/operations/fields.py @@ -23,6 +23,9 @@ class AddField(Operation): from_model = from_state.render().get_model(app_label, self.model_name) schema_editor.remove_field(from_model, from_model._meta.get_field_by_name(self.name)[0]) + def describe(self): + return "Add field %s to %s" % (self.name, self.model_name) + class RemoveField(Operation): """ @@ -48,3 +51,6 @@ class RemoveField(Operation): from_model = from_state.render().get_model(app_label, self.model_name) to_model = to_state.render().get_model(app_label, self.model_name) schema_editor.add_field(from_model, to_model._meta.get_field_by_name(self.name)[0]) + + def describe(self): + return "Remove field %s from %s" % (self.name, self.model_name) diff --git a/django/db/migrations/operations/models.py b/django/db/migrations/operations/models.py index 22d24f1eed..ef7dafab90 100644 --- a/django/db/migrations/operations/models.py +++ b/django/db/migrations/operations/models.py @@ -25,6 +25,9 @@ class CreateModel(Operation): app_cache = from_state.render() schema_editor.delete_model(app_cache.get_model(app, self.name)) + def describe(self): + return "Create model %s" % (self.name, ) + class DeleteModel(Operation): """ @@ -44,3 +47,6 @@ class DeleteModel(Operation): def database_backwards(self, app_label, schema_editor, from_state, to_state): app_cache = to_state.render() schema_editor.create_model(app_cache.get_model(app_label, self.name)) + + def describe(self): + return "Delete model %s" % (self.name, ) diff --git a/django/db/migrations/writer.py b/django/db/migrations/writer.py index f386cd847c..00e83681cd 100644 --- a/django/db/migrations/writer.py +++ b/django/db/migrations/writer.py @@ -1,8 +1,12 @@ from __future__ import unicode_literals import datetime import types +import os from django.utils import six +from django.utils.importlib import import_module from django.db import models +from django.db.models.loading import cache +from django.db.migrations.loader import MigrationLoader class MigrationWriter(object): @@ -49,6 +53,24 @@ class MigrationWriter(object): def filename(self): return "%s.py" % self.migration.name + @property + def path(self): + migrations_module_name = MigrationLoader.migrations_module(self.migration.app_label) + app_module = cache.get_app(self.migration.app_label) + # See if we can import the migrations module directly + try: + migrations_module = import_module(migrations_module_name) + basedir = os.path.dirname(migrations_module.__file__) + except ImportError: + # Alright, see if it's a direct submodule of the app + oneup = ".".join(migrations_module_name.split(".")[:-1]) + app_oneup = ".".join(app_module.__name__.split(".")[:-1]) + if oneup == app_oneup: + basedir = os.path.join(os.path.dirname(app_module.__file__), migrations_module_name.split(".")[-1]) + else: + raise ImportError("Cannot open migrations module %s for app %s" % (migrations_module_name, self.migration.app_label)) + return os.path.join(basedir, self.filename) + @classmethod def serialize(cls, value): """ @@ -130,6 +152,17 @@ class MigrationWriter(object): if module is None: raise ValueError("Cannot serialize function %r: No module" % value) return "%s.%s" % (module, value.__name__), set(["import %s" % module]) + # Classes + elif isinstance(value, type): + special_cases = [ + (models.Model, "models.Model", []), + ] + for case, string, imports in special_cases: + if case is value: + return string, set(imports) + if hasattr(value, "__module__"): + module = value.__module__ + return "%s.%s" % (module, value.__name__), set(["import %s" % module]) # Uh oh. else: raise ValueError("Cannot serialize: %r" % value) From 41214eaf18c083012c57836befa4b833cf8a3698 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Wed, 19 Jun 2013 16:41:04 +0100 Subject: [PATCH 076/161] Autodetect fields, have migrate actually work --- django/core/management/commands/migrate.py | 2 +- django/db/migrations/autodetector.py | 28 +++++++++++++++++++++- django/db/migrations/loader.py | 2 ++ django/db/migrations/operations/fields.py | 6 ++--- 4 files changed, 33 insertions(+), 5 deletions(-) diff --git a/django/core/management/commands/migrate.py b/django/core/management/commands/migrate.py index 0f12508559..a07e3e0d68 100644 --- a/django/core/management/commands/migrate.py +++ b/django/core/management/commands/migrate.py @@ -72,7 +72,7 @@ class Command(NoArgsCommand): plan = executor.migration_plan(targets) if self.verbosity >= 1: - self.stdout.write(self.style.MIGRATE_LABEL(" Apps with migrations: ") + (", ".join(executor.loader.disk_migrations) or "(none)")) + self.stdout.write(self.style.MIGRATE_LABEL(" Apps with migrations: ") + (", ".join(executor.loader.migrated_apps) or "(none)")) # Run the syncdb phase. # If you ever manage to get rid of this, I owe you many, many drinks. diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py index 79f710482d..213516f908 100644 --- a/django/db/migrations/autodetector.py +++ b/django/db/migrations/autodetector.py @@ -34,7 +34,7 @@ class MigrationAutodetector(object): """ # We'll store migrations as lists by app names for now self.migrations = {} - # Stage one: Adding models. + # Adding models. added_models = set(self.to_state.models.keys()) - set(self.from_state.models.keys()) for app_label, model_name in added_models: model_state = self.to_state.models[app_label, model_name] @@ -57,6 +57,32 @@ class MigrationAutodetector(object): model_state.name, ) ) + # Changes within models + kept_models = set(self.from_state.models.keys()).intersection(self.to_state.models.keys()) + for app_label, model_name in kept_models: + old_model_state = self.from_state.models[app_label, model_name] + new_model_state = self.to_state.models[app_label, model_name] + # New fields + old_field_names = set([x for x, y in old_model_state.fields]) + new_field_names = set([x for x, y in new_model_state.fields]) + for field_name in new_field_names - old_field_names: + self.add_to_migration( + app_label, + operations.AddField( + model_name = model_name, + name = field_name, + field = [y for x, y in new_model_state.fields if x == field_name][0], + ) + ) + # Old fields + for field_name in old_field_names - new_field_names: + self.add_to_migration( + app_label, + operations.RemoveField( + model_name = model_name, + name = field_name, + ) + ) # Alright, now add internal dependencies for app_label, migrations in self.migrations.items(): for m1, m2 in zip(migrations, migrations[1:]): diff --git a/django/db/migrations/loader.py b/django/db/migrations/loader.py index 9658793094..36e1540299 100644 --- a/django/db/migrations/loader.py +++ b/django/db/migrations/loader.py @@ -50,6 +50,7 @@ class MigrationLoader(object): """ self.disk_migrations = {} self.unmigrated_apps = set() + self.migrated_apps = set() for app in cache.get_apps(): # Get the migrations module directory app_label = app.__name__.split(".")[-2] @@ -62,6 +63,7 @@ class MigrationLoader(object): if "No module named" in str(e) and "migrations" in str(e): self.unmigrated_apps.add(app_label) continue + self.migrated_apps.add(app_label) directory = os.path.dirname(module.__file__) # Scan for .py[c|o] files migration_names = set() diff --git a/django/db/migrations/operations/fields.py b/django/db/migrations/operations/fields.py index 8fd8c9151d..660cba6b72 100644 --- a/django/db/migrations/operations/fields.py +++ b/django/db/migrations/operations/fields.py @@ -6,13 +6,13 @@ class AddField(Operation): Adds a field to a model. """ - def __init__(self, model_name, name, instance): + def __init__(self, model_name, name, field): self.model_name = model_name self.name = name - self.instance = instance + self.field = field def state_forwards(self, app_label, state): - state.models[app_label, self.model_name.lower()].fields.append((self.name, self.instance)) + state.models[app_label, self.model_name.lower()].fields.append((self.name, self.field)) def database_forwards(self, app_label, schema_editor, from_state, to_state): from_model = from_state.render().get_model(app_label, self.model_name) From 73e30e9d3b1890502a7feef80c8856690fcd8c07 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Wed, 19 Jun 2013 17:01:48 +0100 Subject: [PATCH 077/161] Better naming, and prompt for NOT NULL field addition --- django/db/migrations/autodetector.py | 61 +++++++++++++++++++++++++++- 1 file changed, 60 insertions(+), 1 deletion(-) diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py index 213516f908..a5aec671f8 100644 --- a/django/db/migrations/autodetector.py +++ b/django/db/migrations/autodetector.py @@ -1,4 +1,6 @@ import re +import sys +from django.utils import datetime_safe from django.utils.six.moves import input from django.db.migrations import operations from django.db.migrations.migration import Migration @@ -66,12 +68,16 @@ class MigrationAutodetector(object): old_field_names = set([x for x, y in old_model_state.fields]) new_field_names = set([x for x, y in new_model_state.fields]) for field_name in new_field_names - old_field_names: + field = [y for x, y in new_model_state.fields if x == field_name][0] + # You can't just add NOT NULL fields with no default + if not field.null and not field.has_default(): + field.default = self.questioner.ask_not_null_addition(field_name, model_name) self.add_to_migration( app_label, operations.AddField( model_name = model_name, name = field_name, - field = [y for x, y in new_model_state.fields if x == field_name][0], + field = field, ) ) # Old fields @@ -180,6 +186,10 @@ class MigrationAutodetector(object): return ops[0].name.lower() elif isinstance(ops[0], operations.DeleteModel): return "delete_%s" % ops[0].name.lower() + elif isinstance(ops[0], operations.AddField): + return "%s_%s" % (ops[0].model_name.lower(), ops[0].name.lower()) + elif isinstance(ops[0], operations.RemoveField): + return "remove_%s_%s" % (ops[0].model_name.lower(), ops[0].name.lower()) elif all(isinstance(o, operations.CreateModel) for o in ops): return "_".join(sorted(o.name.lower() for o in ops)) return "auto" @@ -209,6 +219,11 @@ class MigrationQuestioner(object): "Should we create an initial migration for the app?" return self.defaults.get("ask_initial", False) + def ask_not_null_addition(self, field_name, model_name): + "Adding a NOT NULL field to a model" + # None means quit + return None + class InteractiveMigrationQuestioner(MigrationQuestioner): @@ -221,7 +236,22 @@ class InteractiveMigrationQuestioner(MigrationQuestioner): result = input("Please answer yes or no: ") return result[0].lower() == "y" + def _choice_input(self, question, choices): + print question + for i, choice in enumerate(choices): + print " %s) %s" % (i + 1, choice) + result = input("Select an option: ") + while True: + try: + value = int(result) + if 0 < value <= len(choices): + return value + except ValueError: + pass + result = input("Please select a valid option: ") + def ask_initial(self, app_label): + "Should we create an initial migration for the app?" # Don't ask for django.contrib apps app = cache.get_app(app_label) if app.__name__.startswith("django.contrib"): @@ -231,3 +261,32 @@ class InteractiveMigrationQuestioner(MigrationQuestioner): return True # Now ask return self._boolean_input("Do you want to enable migrations for app '%s'?" % app_label) + + def ask_not_null_addition(self, field_name, model_name): + "Adding a NOT NULL field to a model" + choice = self._choice_input( + "You are trying to add a non-nullable field '%s' to %s without a default;\n" % (field_name, model_name) + + "this is not possible. Please select a fix:", + [ + "Provide a one-off default now (will be set on all existing rows)", + "Quit, and let me add a default in models.py", + ] + ) + if choice == 2: + sys.exit(3) + else: + print("Please enter the default value now, as valid Python") + print("The datetime module is available, so you can do e.g. datetime.date.today()") + while True: + code = input(">>> ") + if not code: + print("Please enter some code, or 'exit' (with no quotes) to exit.") + elif code == "exit": + sys.exit(1) + else: + try: + return eval(code, {}, {"datetime": datetime_safe}) + except (SyntaxError, NameError) as e: + print("Invalid input: %s" % e) + else: + break From 6f667999e1186b8eaa9c86e4cbd80d5c0ba20576 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 20 Jun 2013 14:54:11 +0100 Subject: [PATCH 078/161] Add operation that renames tables --- django/db/backends/schema.py | 6 +-- django/db/migrations/operations/__init__.py | 2 +- django/db/migrations/operations/models.py | 28 ++++++++++++++ tests/migrations/test_autodetector.py | 35 +++++++++++++++++ tests/migrations/test_operations.py | 43 +++++++++++++++++++++ 5 files changed, 110 insertions(+), 4 deletions(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 21eeefab82..c2503e5f53 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -362,9 +362,9 @@ class BaseDatabaseSchemaEditor(object): return self._alter_many_to_many(model, old_field, new_field, strict) elif old_type is None or new_type is None: raise ValueError("Cannot alter field %s into %s - they are not compatible types (probably means only one is an M2M with implicit through model)" % ( - old_field, - new_field, - )) + old_field, + new_field, + )) # Has unique been removed? if old_field.unique and not new_field.unique: # Find the unique constraint for this field diff --git a/django/db/migrations/operations/__init__.py b/django/db/migrations/operations/__init__.py index 0aa7e2d119..6c2c784635 100644 --- a/django/db/migrations/operations/__init__.py +++ b/django/db/migrations/operations/__init__.py @@ -1,2 +1,2 @@ -from .models import CreateModel, DeleteModel +from .models import CreateModel, DeleteModel, AlterModelTable from .fields import AddField, RemoveField diff --git a/django/db/migrations/operations/models.py b/django/db/migrations/operations/models.py index ef7dafab90..c73ff179d4 100644 --- a/django/db/migrations/operations/models.py +++ b/django/db/migrations/operations/models.py @@ -50,3 +50,31 @@ class DeleteModel(Operation): def describe(self): return "Delete model %s" % (self.name, ) + + +class AlterModelTable(Operation): + """ + Renames a model's table + """ + + def __init__(self, name, table): + self.name = name + self.table = table + + def state_forwards(self, app_label, state): + state.models[app_label, self.name.lower()].options["db_table"] = self.table + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + old_app_cache = from_state.render() + new_app_cache = to_state.render() + schema_editor.alter_db_table( + new_app_cache.get_model(app_label, self.name), + old_app_cache.get_model(app_label, self.name)._meta.db_table, + new_app_cache.get_model(app_label, self.name)._meta.db_table, + ) + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + return self.database_forwards(app_label, schema_editor, from_state, to_state) + + def describe(self): + return "Rename table for %s to %s" % (self.name, self.table) diff --git a/tests/migrations/test_autodetector.py b/tests/migrations/test_autodetector.py index 2b9ce21a1d..7a90110127 100644 --- a/tests/migrations/test_autodetector.py +++ b/tests/migrations/test_autodetector.py @@ -12,6 +12,7 @@ class AutodetectorTests(TestCase): """ author_empty = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) + author_name = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200))]) other_pony = ModelState("otherapp", "Pony", [("id", models.AutoField(primary_key=True))]) other_stable = ModelState("otherapp", "Stable", [("id", models.AutoField(primary_key=True))]) third_thing = ModelState("thirdapp", "Thing", [("id", models.AutoField(primary_key=True))]) @@ -95,3 +96,37 @@ class AutodetectorTests(TestCase): action = migration.operations[0] self.assertEqual(action.__class__.__name__, "DeleteModel") self.assertEqual(action.name, "Author") + + def test_add_field(self): + "Tests autodetection of new fields" + # Make state + before = self.make_project_state([self.author_empty]) + after = self.make_project_state([self.author_name]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector.changes() + # Right number of migrations? + self.assertEqual(len(changes['testapp']), 1) + # Right number of actions? + migration = changes['testapp'][0] + self.assertEqual(len(migration.operations), 1) + # Right action? + action = migration.operations[0] + self.assertEqual(action.__class__.__name__, "AddField") + self.assertEqual(action.name, "name") + + def test_remove_field(self): + "Tests autodetection of removed fields" + # Make state + before = self.make_project_state([self.author_name]) + after = self.make_project_state([self.author_empty]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector.changes() + # Right number of migrations? + self.assertEqual(len(changes['testapp']), 1) + # Right number of actions? + migration = changes['testapp'][0] + self.assertEqual(len(migration.operations), 1) + # Right action? + action = migration.operations[0] + self.assertEqual(action.__class__.__name__, "RemoveField") + self.assertEqual(action.name, "name") diff --git a/tests/migrations/test_operations.py b/tests/migrations/test_operations.py index fc5aa47faf..50674e84ba 100644 --- a/tests/migrations/test_operations.py +++ b/tests/migrations/test_operations.py @@ -114,3 +114,46 @@ class OperationTests(TestCase): with connection.schema_editor() as editor: operation.database_backwards("test_adfl", editor, new_state, project_state) self.assertColumnNotExists("test_adfl_pony", "height") + + def test_remove_field(self): + """ + Tests the RemoveField operation. + """ + project_state = self.set_up_test_model("test_rmfl") + # Test the state alteration + operation = migrations.RemoveField("Pony", "pink") + new_state = project_state.clone() + operation.state_forwards("test_rmfl", new_state) + self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 1) + # Test the database alteration + self.assertColumnExists("test_rmfl_pony", "pink") + with connection.schema_editor() as editor: + operation.database_forwards("test_rmfl", editor, project_state, new_state) + self.assertColumnNotExists("test_rmfl_pony", "pink") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_rmfl", editor, new_state, project_state) + self.assertColumnExists("test_rmfl_pony", "pink") + + def test_alter_model_table(self): + """ + Tests the AlterModelTable operation. + """ + project_state = self.set_up_test_model("test_almota") + # Test the state alteration + operation = migrations.AlterModelTable("Pony", "test_almota_pony_2") + new_state = project_state.clone() + operation.state_forwards("test_almota", new_state) + self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony_2") + # Test the database alteration + self.assertTableExists("test_almota_pony") + self.assertTableNotExists("test_almota_pony_2") + with connection.schema_editor() as editor: + operation.database_forwards("test_almota", editor, project_state, new_state) + self.assertTableNotExists("test_almota_pony") + self.assertTableExists("test_almota_pony_2") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_almota", editor, new_state, project_state) + self.assertTableExists("test_almota_pony") + self.assertTableNotExists("test_almota_pony_2") From 80bdf68d6b5fd44056479ccc74cd24281b787a64 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 20 Jun 2013 15:12:59 +0100 Subject: [PATCH 079/161] Add AlterField and RenameField operations --- django/db/migrations/operations/__init__.py | 2 +- django/db/migrations/operations/fields.py | 68 +++++++++++++++++++++ django/db/migrations/state.py | 9 ++- tests/migrations/test_operations.py | 53 +++++++++++++++- 4 files changed, 129 insertions(+), 3 deletions(-) diff --git a/django/db/migrations/operations/__init__.py b/django/db/migrations/operations/__init__.py index 6c2c784635..925b05fff3 100644 --- a/django/db/migrations/operations/__init__.py +++ b/django/db/migrations/operations/__init__.py @@ -1,2 +1,2 @@ from .models import CreateModel, DeleteModel, AlterModelTable -from .fields import AddField, RemoveField +from .fields import AddField, RemoveField, AlterField, RenameField diff --git a/django/db/migrations/operations/fields.py b/django/db/migrations/operations/fields.py index 660cba6b72..cc4f4a43df 100644 --- a/django/db/migrations/operations/fields.py +++ b/django/db/migrations/operations/fields.py @@ -54,3 +54,71 @@ class RemoveField(Operation): def describe(self): return "Remove field %s from %s" % (self.name, self.model_name) + + +class AlterField(Operation): + """ + Alters a field's database column (e.g. null, max_length) to the provided new field + """ + + def __init__(self, model_name, name, field): + self.model_name = model_name + self.name = name + self.field = field + + def state_forwards(self, app_label, state): + state.models[app_label, self.model_name.lower()].fields = [ + (n, self.field if n == self.name else f) for n, f in state.models[app_label, self.model_name.lower()].fields + ] + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + from_model = from_state.render().get_model(app_label, self.model_name) + to_model = to_state.render().get_model(app_label, self.model_name) + schema_editor.alter_field( + from_model, + from_model._meta.get_field_by_name(self.name)[0], + to_model._meta.get_field_by_name(self.name)[0], + ) + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + self.database_forwards(app_label, schema_editor, from_state, to_state) + + def describe(self): + return "Alter field %s on %s" % (self.name, self.model_name) + + +class RenameField(Operation): + """ + Renames a field on the model. Might affect db_column too. + """ + + def __init__(self, model_name, old_name, new_name): + self.model_name = model_name + self.old_name = old_name + self.new_name = new_name + + def state_forwards(self, app_label, state): + state.models[app_label, self.model_name.lower()].fields = [ + (self.new_name if n == self.old_name else n, f) for n, f in state.models[app_label, self.model_name.lower()].fields + ] + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + from_model = from_state.render().get_model(app_label, self.model_name) + to_model = to_state.render().get_model(app_label, self.model_name) + schema_editor.alter_field( + from_model, + from_model._meta.get_field_by_name(self.old_name)[0], + to_model._meta.get_field_by_name(self.new_name)[0], + ) + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + from_model = from_state.render().get_model(app_label, self.model_name) + to_model = to_state.render().get_model(app_label, self.model_name) + schema_editor.alter_field( + from_model, + from_model._meta.get_field_by_name(self.new_name)[0], + to_model._meta.get_field_by_name(self.old_name)[0], + ) + + def describe(self): + return "Rename field %s on %s to %s" % (self.old_name, self.model_name, self.new_name) diff --git a/django/db/migrations/state.py b/django/db/migrations/state.py index b6618041f2..65b749c80c 100644 --- a/django/db/migrations/state.py +++ b/django/db/migrations/state.py @@ -93,10 +93,17 @@ class ModelState(object): def clone(self): "Returns an exact copy of this ModelState" + # We deep-clone the fields using deconstruction + fields = [] + for name, field in self.fields: + _, path, args, kwargs = field.deconstruct() + field_class = import_by_path(path) + fields.append((name, field_class(*args, **kwargs))) + # Now make a copy return self.__class__( app_label = self.app_label, name = self.name, - fields = list(self.fields), + fields = fields, options = dict(self.options), bases = self.bases, ) diff --git a/tests/migrations/test_operations.py b/tests/migrations/test_operations.py index 50674e84ba..6ee60afa5b 100644 --- a/tests/migrations/test_operations.py +++ b/tests/migrations/test_operations.py @@ -22,6 +22,12 @@ class OperationTests(TestCase): def assertColumnNotExists(self, table, column): self.assertNotIn(column, [c.name for c in connection.introspection.get_table_description(connection.cursor(), table)]) + def assertColumnNull(self, table, column): + self.assertEqual([c.null_ok for c in connection.introspection.get_table_description(connection.cursor(), table) if c.name == column][0], True) + + def assertColumnNotNull(self, table, column): + self.assertEqual([c.null_ok for c in connection.introspection.get_table_description(connection.cursor(), table) if c.name == column][0], False) + def set_up_test_model(self, app_label): """ Creates a test model state and database table. @@ -50,7 +56,7 @@ class OperationTests(TestCase): "Pony", [ ("id", models.AutoField(primary_key=True)), - ("pink", models.BooleanField(default=True)), + ("pink", models.IntegerField(default=1)), ], ) # Test the state alteration @@ -157,3 +163,48 @@ class OperationTests(TestCase): operation.database_backwards("test_almota", editor, new_state, project_state) self.assertTableExists("test_almota_pony") self.assertTableNotExists("test_almota_pony_2") + + def test_alter_field(self): + """ + Tests the AlterField operation. + """ + project_state = self.set_up_test_model("test_alfl") + # Test the state alteration + operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) + new_state = project_state.clone() + operation.state_forwards("test_alfl", new_state) + self.assertEqual([f for n, f in project_state.models["test_alfl", "pony"].fields if n == "pink"][0].null, False) + self.assertEqual([f for n, f in new_state.models["test_alfl", "pony"].fields if n == "pink"][0].null, True) + # Test the database alteration + self.assertColumnNotNull("test_alfl_pony", "pink") + with connection.schema_editor() as editor: + operation.database_forwards("test_alfl", editor, project_state, new_state) + self.assertColumnNull("test_alfl_pony", "pink") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_alfl", editor, new_state, project_state) + self.assertColumnNotNull("test_alfl_pony", "pink") + + def test_rename_field(self): + """ + Tests the RenameField operation. + """ + project_state = self.set_up_test_model("test_rnfl") + # Test the state alteration + operation = migrations.RenameField("Pony", "pink", "blue") + new_state = project_state.clone() + operation.state_forwards("test_rnfl", new_state) + self.assertIn("blue", [n for n, f in new_state.models["test_rnfl", "pony"].fields]) + self.assertNotIn("pink", [n for n, f in new_state.models["test_rnfl", "pony"].fields]) + # Test the database alteration + self.assertColumnExists("test_rnfl_pony", "pink") + self.assertColumnNotExists("test_rnfl_pony", "blue") + with connection.schema_editor() as editor: + operation.database_forwards("test_rnfl", editor, project_state, new_state) + self.assertColumnExists("test_rnfl_pony", "blue") + self.assertColumnNotExists("test_rnfl_pony", "pink") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_rnfl", editor, new_state, project_state) + self.assertColumnExists("test_rnfl_pony", "pink") + self.assertColumnNotExists("test_rnfl_pony", "blue") From 47e4b86ddf67c3ab0725f41d4802bc05ed0a6423 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 20 Jun 2013 15:19:30 +0100 Subject: [PATCH 080/161] Autodetect field alters --- django/db/migrations/autodetector.py | 14 ++++++++++++++ django/db/migrations/state.py | 6 ++++++ tests/migrations/test_autodetector.py | 18 ++++++++++++++++++ tests/migrations/test_operations.py | 4 ++-- 4 files changed, 40 insertions(+), 2 deletions(-) diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py index a5aec671f8..508d4526e4 100644 --- a/django/db/migrations/autodetector.py +++ b/django/db/migrations/autodetector.py @@ -89,6 +89,20 @@ class MigrationAutodetector(object): name = field_name, ) ) + # The same fields + for field_name in old_field_names.intersection(new_field_names): + # Did the field change? + old_field_dec = old_model_state.get_field_by_name(field_name).deconstruct() + new_field_dec = new_model_state.get_field_by_name(field_name).deconstruct() + if old_field_dec != new_field_dec: + self.add_to_migration( + app_label, + operations.AlterField( + model_name = model_name, + name = field_name, + field = new_model_state.get_field_by_name(field_name), + ) + ) # Alright, now add internal dependencies for app_label, migrations in self.migrations.items(): for m1, m2 in zip(migrations, migrations[1:]): diff --git a/django/db/migrations/state.py b/django/db/migrations/state.py index 65b749c80c..d1ed22bc29 100644 --- a/django/db/migrations/state.py +++ b/django/db/migrations/state.py @@ -126,3 +126,9 @@ class ModelState(object): tuple(self.bases), body, ) + + def get_field_by_name(self, name): + for fname, field in self.fields: + if fname == name: + return field + raise ValueError("No field called %s on model %s" % (name, self.name)) diff --git a/tests/migrations/test_autodetector.py b/tests/migrations/test_autodetector.py index 7a90110127..8f32174e7f 100644 --- a/tests/migrations/test_autodetector.py +++ b/tests/migrations/test_autodetector.py @@ -13,6 +13,7 @@ class AutodetectorTests(TestCase): author_empty = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) author_name = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200))]) + author_name_longer = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=400))]) other_pony = ModelState("otherapp", "Pony", [("id", models.AutoField(primary_key=True))]) other_stable = ModelState("otherapp", "Stable", [("id", models.AutoField(primary_key=True))]) third_thing = ModelState("thirdapp", "Thing", [("id", models.AutoField(primary_key=True))]) @@ -130,3 +131,20 @@ class AutodetectorTests(TestCase): action = migration.operations[0] self.assertEqual(action.__class__.__name__, "RemoveField") self.assertEqual(action.name, "name") + + def test_alter_field(self): + "Tests autodetection of new fields" + # Make state + before = self.make_project_state([self.author_name]) + after = self.make_project_state([self.author_name_longer]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector.changes() + # Right number of migrations? + self.assertEqual(len(changes['testapp']), 1) + # Right number of actions? + migration = changes['testapp'][0] + self.assertEqual(len(migration.operations), 1) + # Right action? + action = migration.operations[0] + self.assertEqual(action.__class__.__name__, "AlterField") + self.assertEqual(action.name, "name") diff --git a/tests/migrations/test_operations.py b/tests/migrations/test_operations.py index 6ee60afa5b..2e72e11954 100644 --- a/tests/migrations/test_operations.py +++ b/tests/migrations/test_operations.py @@ -173,8 +173,8 @@ class OperationTests(TestCase): operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) new_state = project_state.clone() operation.state_forwards("test_alfl", new_state) - self.assertEqual([f for n, f in project_state.models["test_alfl", "pony"].fields if n == "pink"][0].null, False) - self.assertEqual([f for n, f in new_state.models["test_alfl", "pony"].fields if n == "pink"][0].null, True) + self.assertEqual(project_state.models["test_alfl", "pony"].get_field_by_name("pink").null, False) + self.assertEqual(new_state.models["test_alfl", "pony"].get_field_by_name("pink").null, True) # Test the database alteration self.assertColumnNotNull("test_alfl_pony", "pink") with connection.schema_editor() as editor: From 0e8ee50e868e7ce501fffb48096fcf8d0b1e05a6 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 20 Jun 2013 15:27:33 +0100 Subject: [PATCH 081/161] Rename makemigration to makemigrations --- .../management/commands/{makemigration.py => makemigrations.py} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename django/core/management/commands/{makemigration.py => makemigrations.py} (97%) diff --git a/django/core/management/commands/makemigration.py b/django/core/management/commands/makemigrations.py similarity index 97% rename from django/core/management/commands/makemigration.py rename to django/core/management/commands/makemigrations.py index baf3f075ca..0f04b2fc1f 100644 --- a/django/core/management/commands/makemigration.py +++ b/django/core/management/commands/makemigrations.py @@ -20,7 +20,7 @@ class Command(BaseCommand): ) help = "Creates new migration(s) for apps." - usage_str = "Usage: ./manage.py createmigration [--empty] [app [app ...]]" + usage_str = "Usage: ./manage.py makemigrations [--empty] [app [app ...]]" def handle(self, *app_labels, **options): From 92a10f5552315b22e7d374123e3d09249b6b9883 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 20 Jun 2013 16:02:43 +0100 Subject: [PATCH 082/161] Autodetect field renames. HAHAHA. AHAHAHAHA. YES. --- django/db/migrations/autodetector.py | 21 ++++++++++++++++++++- tests/migrations/test_autodetector.py | 21 ++++++++++++++++++++- 2 files changed, 40 insertions(+), 2 deletions(-) diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py index 508d4526e4..b915a69293 100644 --- a/django/db/migrations/autodetector.py +++ b/django/db/migrations/autodetector.py @@ -68,7 +68,26 @@ class MigrationAutodetector(object): old_field_names = set([x for x, y in old_model_state.fields]) new_field_names = set([x for x, y in new_model_state.fields]) for field_name in new_field_names - old_field_names: - field = [y for x, y in new_model_state.fields if x == field_name][0] + field = new_model_state.get_field_by_name(field_name) + # Scan to see if this is actually a rename! + field_dec = field.deconstruct()[1:] + found_rename = False + for removed_field_name in (old_field_names - new_field_names): + if old_model_state.get_field_by_name(removed_field_name).deconstruct()[1:] == field_dec: + self.add_to_migration( + app_label, + operations.RenameField( + model_name = model_name, + old_name = removed_field_name, + new_name = field_name, + ) + ) + old_field_names.remove(removed_field_name) + new_field_names.remove(field_name) + found_rename = True + break + if found_rename: + continue # You can't just add NOT NULL fields with no default if not field.null and not field.has_default(): field.default = self.questioner.ask_not_null_addition(field_name, model_name) diff --git a/tests/migrations/test_autodetector.py b/tests/migrations/test_autodetector.py index 8f32174e7f..d9031faca7 100644 --- a/tests/migrations/test_autodetector.py +++ b/tests/migrations/test_autodetector.py @@ -14,6 +14,7 @@ class AutodetectorTests(TestCase): author_empty = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) author_name = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200))]) author_name_longer = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=400))]) + author_name_renamed = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True)), ("names", models.CharField(max_length=200))]) other_pony = ModelState("otherapp", "Pony", [("id", models.AutoField(primary_key=True))]) other_stable = ModelState("otherapp", "Stable", [("id", models.AutoField(primary_key=True))]) third_thing = ModelState("thirdapp", "Thing", [("id", models.AutoField(primary_key=True))]) @@ -22,7 +23,7 @@ class AutodetectorTests(TestCase): "Shortcut to make ProjectStates from lists of predefined models" project_state = ProjectState() for model_state in model_states: - project_state.add_model_state(model_state) + project_state.add_model_state(model_state.clone()) return project_state def test_arrange_for_graph(self): @@ -148,3 +149,21 @@ class AutodetectorTests(TestCase): action = migration.operations[0] self.assertEqual(action.__class__.__name__, "AlterField") self.assertEqual(action.name, "name") + + def test_rename_field(self): + "Tests autodetection of renamed fields" + # Make state + before = self.make_project_state([self.author_name]) + after = self.make_project_state([self.author_name_renamed]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector.changes() + # Right number of migrations? + self.assertEqual(len(changes['testapp']), 1) + # Right number of actions? + migration = changes['testapp'][0] + self.assertEqual(len(migration.operations), 1) + # Right action? + action = migration.operations[0] + self.assertEqual(action.__class__.__name__, "RenameField") + self.assertEqual(action.old_name, "name") + self.assertEqual(action.new_name, "names") From cca40703dfdc6171bebe9be50a82ef61a0749cb0 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 21 Jun 2013 15:32:15 +0100 Subject: [PATCH 083/161] Prompt about renames rather than doing them automatically --- django/db/migrations/autodetector.py | 31 +++++++++++++++++---------- tests/migrations/test_autodetector.py | 2 +- 2 files changed, 21 insertions(+), 12 deletions(-) diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py index b915a69293..107141d1cf 100644 --- a/django/db/migrations/autodetector.py +++ b/django/db/migrations/autodetector.py @@ -74,18 +74,19 @@ class MigrationAutodetector(object): found_rename = False for removed_field_name in (old_field_names - new_field_names): if old_model_state.get_field_by_name(removed_field_name).deconstruct()[1:] == field_dec: - self.add_to_migration( - app_label, - operations.RenameField( - model_name = model_name, - old_name = removed_field_name, - new_name = field_name, + if self.questioner.ask_rename(model_name, removed_field_name, field_name, field): + self.add_to_migration( + app_label, + operations.RenameField( + model_name = model_name, + old_name = removed_field_name, + new_name = field_name, + ) ) - ) - old_field_names.remove(removed_field_name) - new_field_names.remove(field_name) - found_rename = True - break + old_field_names.remove(removed_field_name) + new_field_names.remove(field_name) + found_rename = True + break if found_rename: continue # You can't just add NOT NULL fields with no default @@ -257,6 +258,10 @@ class MigrationQuestioner(object): # None means quit return None + def ask_rename(self, model_name, old_name, new_name, field_instance): + "Was this field really renamed?" + return self.defaults.get("ask_rename", False) + class InteractiveMigrationQuestioner(MigrationQuestioner): @@ -323,3 +328,7 @@ class InteractiveMigrationQuestioner(MigrationQuestioner): print("Invalid input: %s" % e) else: break + + def ask_rename(self, model_name, old_name, new_name, field_instance): + "Was this field really renamed?" + return self._boolean_input("Did you rename %s.%s to %s.%s (a %s)?" % (model_name, old_name, model_name, new_name, field_instance.__class__.__name__)) diff --git a/tests/migrations/test_autodetector.py b/tests/migrations/test_autodetector.py index d9031faca7..7a2f4715fa 100644 --- a/tests/migrations/test_autodetector.py +++ b/tests/migrations/test_autodetector.py @@ -155,7 +155,7 @@ class AutodetectorTests(TestCase): # Make state before = self.make_project_state([self.author_name]) after = self.make_project_state([self.author_name_renamed]) - autodetector = MigrationAutodetector(before, after) + autodetector = MigrationAutodetector(before, after, MigrationQuestioner({"ask_rename": True})) changes = autodetector.changes() # Right number of migrations? self.assertEqual(len(changes['testapp']), 1) From 48493cff73fd01870306965f1c48193602754a78 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 22 Jun 2013 12:56:37 +0100 Subject: [PATCH 084/161] Remove EmailField max_length default removal in deconstruct() --- django/db/models/fields/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/django/db/models/fields/__init__.py b/django/db/models/fields/__init__.py index 7f3242b213..9263f81184 100644 --- a/django/db/models/fields/__init__.py +++ b/django/db/models/fields/__init__.py @@ -1129,8 +1129,8 @@ class EmailField(CharField): def deconstruct(self): name, path, args, kwargs = super(EmailField, self).deconstruct() - if kwargs.get("max_length", None) == 75: - del kwargs['max_length'] + # We do not exclude max_length if it matches default as we want to change + # the default in future. return name, path, args, kwargs def formfield(self, **kwargs): From e2d7e83256234251a81ad3388428f6579795a672 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 22 Jun 2013 17:15:51 +0100 Subject: [PATCH 085/161] Autodetect ForeignKeys and add dependencies/split on circulars --- django/db/migrations/autodetector.py | 101 +++++++++++++++++++++++--- django/db/migrations/state.py | 7 +- tests/migrations/test_autodetector.py | 67 +++++++++++++++++ 3 files changed, 164 insertions(+), 11 deletions(-) diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py index 107141d1cf..bb065e99bf 100644 --- a/django/db/migrations/autodetector.py +++ b/django/db/migrations/autodetector.py @@ -36,19 +36,87 @@ class MigrationAutodetector(object): """ # We'll store migrations as lists by app names for now self.migrations = {} - # Adding models. + old_app_cache = self.from_state.render() + new_app_cache = self.to_state.render() + # Adding models. Phase 1 is adding models with no outward relationships. added_models = set(self.to_state.models.keys()) - set(self.from_state.models.keys()) + pending_add = {} for app_label, model_name in added_models: + model_state = self.to_state.models[app_label, model_name] + # Are there any relationships out from this model? if so, punt it to the next phase. + related_fields = [] + for field in new_app_cache.get_model(app_label, model_name)._meta.fields: + if hasattr(field, "rel"): + if hasattr(field.rel, "to"): + related_fields.append((field.name, field.rel.to._meta.app_label.lower(), field.rel.to._meta.object_name.lower())) + if hasattr(field.rel, "through") and not field.rel.though._meta.auto_created: + related_fields.append((field.name, field.rel.through._meta.app_label.lower(), field.rel.through._meta.object_name.lower())) + if related_fields: + pending_add[app_label, model_name] = related_fields + else: + self.add_to_migration( + app_label, + operations.CreateModel( + name = model_state.name, + fields = model_state.fields, + options = model_state.options, + bases = model_state.bases, + ) + ) + # Phase 2 is progressively adding pending models, splitting up into two + # migrations if required. + pending_new_fks = [] + while pending_add: + # Is there one we can add that has all dependencies satisfied? + satisfied = [(m, rf) for m, rf in pending_add.items() if all((al, mn) not in pending_add for f, al, mn in rf)] + if satisfied: + (app_label, model_name), related_fields = sorted(satisfied)[0] + model_state = self.to_state.models[app_label, model_name] + self.add_to_migration( + app_label, + operations.CreateModel( + name = model_state.name, + fields = model_state.fields, + options = model_state.options, + bases = model_state.bases, + ) + ) + for field_name, other_app_label, other_model_name in related_fields: + self.add_dependency(app_label, other_app_label) + del pending_add[app_label, model_name] + # Ah well, we'll need to split one. Pick deterministically. + else: + (app_label, model_name), related_fields = sorted(pending_add.items())[0] + model_state = self.to_state.models[app_label, model_name] + # Work out the fields that need splitting out + bad_fields = dict((f, (al, mn)) for f, al, mn in related_fields if (al, mn) in pending_add) + # Create the model, without those + self.add_to_migration( + app_label, + operations.CreateModel( + name = model_state.name, + fields = [(n, f) for n, f in model_state.fields if n not in bad_fields], + options = model_state.options, + bases = model_state.bases, + ) + ) + # Add the bad fields to be made in a phase 3 + for field_name, (other_app_label, other_model_name) in bad_fields.items(): + pending_new_fks.append((app_label, model_name, field_name, other_app_label)) + del pending_add[app_label, model_name] + # Phase 3 is adding the final set of FKs as separate new migrations + for app_label, model_name, field_name, other_app_label in pending_new_fks: model_state = self.to_state.models[app_label, model_name] self.add_to_migration( app_label, - operations.CreateModel( - name = model_state.name, - fields = model_state.fields, - options = model_state.options, - bases = model_state.bases, - ) + operations.AddField( + model_name = model_name, + name = field_name, + field = model_state.get_field_by_name(field_name), + ), + new = True, ) + self.add_dependency(app_label, other_app_label) # Removing models removed_models = set(self.from_state.models.keys()) - set(self.to_state.models.keys()) for app_label, model_name in removed_models: @@ -127,16 +195,31 @@ class MigrationAutodetector(object): for app_label, migrations in self.migrations.items(): for m1, m2 in zip(migrations, migrations[1:]): m2.dependencies.append((app_label, m1.name)) + # Clean up dependencies + for app_label, migrations in self.migrations.items(): + for migration in migrations: + migration.dependencies = list(set(migration.dependencies)) return self.migrations - def add_to_migration(self, app_label, operation): + def add_to_migration(self, app_label, operation, new=False): migrations = self.migrations.setdefault(app_label, []) - if not migrations: + if not migrations or new: subclass = type("Migration", (Migration,), {"operations": [], "dependencies": []}) instance = subclass("auto_%i" % (len(migrations) + 1), app_label) migrations.append(instance) migrations[-1].operations.append(operation) + def add_dependency(self, app_label, other_app_label): + """ + Adds a dependency to app_label's newest migration on + other_app_label's latest migration. + """ + if self.migrations.get(other_app_label, []): + dependency = (other_app_label, self.migrations[other_app_label][-1].name) + else: + dependency = (other_app_label, "__first__") + self.migrations[app_label][-1].dependencies.append(dependency) + def arrange_for_graph(self, changes, graph): """ Takes in a result from changes() and a MigrationGraph, diff --git a/django/db/migrations/state.py b/django/db/migrations/state.py index d1ed22bc29..4ecdb18896 100644 --- a/django/db/migrations/state.py +++ b/django/db/migrations/state.py @@ -70,7 +70,7 @@ class ModelState(object): """ # Deconstruct the fields fields = [] - for field in model._meta.local_fields: + for field in model._meta.fields: name, path, args, kwargs = field.deconstruct() field_class = import_by_path(path) fields.append((name, field_class(*args, **kwargs))) @@ -83,12 +83,15 @@ class ModelState(object): if name in model._meta.original_attrs: options[name] = model._meta.original_attrs[name] # Make our record + bases = tuple(model for model in model.__bases__ if (not hasattr(model, "_meta") or not model._meta.abstract)) + if not bases: + bases = (models.Model, ) return cls( model._meta.app_label, model._meta.object_name, fields, options, - model.__bases__, + bases, ) def clone(self): diff --git a/tests/migrations/test_autodetector.py b/tests/migrations/test_autodetector.py index 7a2f4715fa..540e84e8df 100644 --- a/tests/migrations/test_autodetector.py +++ b/tests/migrations/test_autodetector.py @@ -15,9 +15,12 @@ class AutodetectorTests(TestCase): author_name = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200))]) author_name_longer = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=400))]) author_name_renamed = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True)), ("names", models.CharField(max_length=200))]) + author_with_book = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book"))]) other_pony = ModelState("otherapp", "Pony", [("id", models.AutoField(primary_key=True))]) other_stable = ModelState("otherapp", "Stable", [("id", models.AutoField(primary_key=True))]) third_thing = ModelState("thirdapp", "Thing", [("id", models.AutoField(primary_key=True))]) + book = ModelState("otherapp", "Book", [("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author")), ("title", models.CharField(max_length=200))]) + edition = ModelState("thirdapp", "Edition", [("id", models.AutoField(primary_key=True)), ("book", models.ForeignKey("otherapp.Book"))]) def make_project_state(self, model_states): "Shortcut to make ProjectStates from lists of predefined models" @@ -167,3 +170,67 @@ class AutodetectorTests(TestCase): self.assertEqual(action.__class__.__name__, "RenameField") self.assertEqual(action.old_name, "name") self.assertEqual(action.new_name, "names") + + def test_fk_dependency(self): + "Tests that having a ForeignKey automatically adds a dependency" + # Make state + before = self.make_project_state([]) + after = self.make_project_state([self.author_name, self.book, self.edition]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector.changes() + # Right number of migrations? + self.assertEqual(len(changes['testapp']), 1) + self.assertEqual(len(changes['otherapp']), 1) + self.assertEqual(len(changes['thirdapp']), 1) + # Right number of actions? + migration1 = changes['testapp'][0] + self.assertEqual(len(migration1.operations), 1) + migration2 = changes['otherapp'][0] + self.assertEqual(len(migration2.operations), 1) + migration3 = changes['thirdapp'][0] + self.assertEqual(len(migration3.operations), 1) + # Right actions? + action = migration1.operations[0] + self.assertEqual(action.__class__.__name__, "CreateModel") + action = migration2.operations[0] + self.assertEqual(action.__class__.__name__, "CreateModel") + action = migration3.operations[0] + self.assertEqual(action.__class__.__name__, "CreateModel") + # Right dependencies? + self.assertEqual(migration1.dependencies, []) + self.assertEqual(migration2.dependencies, [("testapp", "auto_1")]) + self.assertEqual(migration3.dependencies, [("otherapp", "auto_1")]) + + def test_circular_fk_dependency(self): + """ + Tests that having a circular ForeignKey dependency automatically + resolves the situation into 2 migrations on one side and 1 on the other. + """ + # Make state + before = self.make_project_state([]) + after = self.make_project_state([self.author_with_book, self.book]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector.changes() + # Right number of migrations? + self.assertEqual(len(changes['testapp']), 1) + self.assertEqual(len(changes['otherapp']), 2) + # Right number of actions? + migration1 = changes['testapp'][0] + self.assertEqual(len(migration1.operations), 1) + migration2 = changes['otherapp'][0] + self.assertEqual(len(migration2.operations), 1) + migration3 = changes['otherapp'][1] + self.assertEqual(len(migration2.operations), 1) + # Right actions? + action = migration1.operations[0] + self.assertEqual(action.__class__.__name__, "CreateModel") + action = migration2.operations[0] + self.assertEqual(action.__class__.__name__, "CreateModel") + self.assertEqual(len(action.fields), 2) + action = migration3.operations[0] + self.assertEqual(action.__class__.__name__, "AddField") + self.assertEqual(action.name, "author") + # Right dependencies? + self.assertEqual(migration1.dependencies, [("otherapp", "auto_1")]) + self.assertEqual(migration2.dependencies, []) + self.assertEqual(set(migration3.dependencies), set([("otherapp", "auto_1"), ("testapp", "auto_1")])) From 310cdf492d2642e7cf00bcc169895f5954f10369 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 2 Jul 2013 11:08:16 +0100 Subject: [PATCH 086/161] Fix M2M interaction with transactions --- tests/schema/tests.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tests/schema/tests.py b/tests/schema/tests.py index 89853088ac..4f3c9b7f10 100644 --- a/tests/schema/tests.py +++ b/tests/schema/tests.py @@ -250,7 +250,6 @@ class SchemaTests(TransactionTestCase): try: # Ensure there's no m2m table there self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through) - connection.rollback() # Add the field with connection.schema_editor() as editor: editor.add_field( @@ -268,11 +267,9 @@ class SchemaTests(TransactionTestCase): ) # Ensure there's no m2m table there self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through) - connection.rollback() finally: # Cleanup model states AuthorWithM2M._meta.local_many_to_many.remove(new_field) - del AuthorWithM2M._meta._m2m_cache def test_m2m_repoint(self): """ @@ -305,7 +302,6 @@ class SchemaTests(TransactionTestCase): ) # Ensure old M2M is gone self.assertRaises(DatabaseError, self.column_classes, BookWithM2M._meta.get_field_by_name("tags")[0].rel.through) - connection.rollback() # Ensure the new M2M exists and points to UniqueTest constraints = connection.introspection.get_constraints(connection.cursor(), new_field.rel.through._meta.db_table) if connection.features.supports_foreign_keys: From 67dcea711e92025d0e8676b869b7ef15dbc6db73 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 2 Jul 2013 11:19:02 +0100 Subject: [PATCH 087/161] Add unique_together altering operation --- django/db/migrations/operations/__init__.py | 2 +- django/db/migrations/operations/fields.py | 8 ++--- django/db/migrations/operations/models.py | 36 +++++++++++++++++-- django/db/migrations/state.py | 9 +++-- tests/migrations/test_autodetector.py | 4 +-- tests/migrations/test_operations.py | 40 ++++++++++++++++++--- tests/migrations/test_state.py | 2 +- 7 files changed, 84 insertions(+), 17 deletions(-) diff --git a/django/db/migrations/operations/__init__.py b/django/db/migrations/operations/__init__.py index 925b05fff3..afa5c85cdc 100644 --- a/django/db/migrations/operations/__init__.py +++ b/django/db/migrations/operations/__init__.py @@ -1,2 +1,2 @@ -from .models import CreateModel, DeleteModel, AlterModelTable +from .models import CreateModel, DeleteModel, AlterModelTable, AlterUniqueTogether from .fields import AddField, RemoveField, AlterField, RenameField diff --git a/django/db/migrations/operations/fields.py b/django/db/migrations/operations/fields.py index cc4f4a43df..37e0c063e1 100644 --- a/django/db/migrations/operations/fields.py +++ b/django/db/migrations/operations/fields.py @@ -7,7 +7,7 @@ class AddField(Operation): """ def __init__(self, model_name, name, field): - self.model_name = model_name + self.model_name = model_name.lower() self.name = name self.field = field @@ -33,7 +33,7 @@ class RemoveField(Operation): """ def __init__(self, model_name, name): - self.model_name = model_name + self.model_name = model_name.lower() self.name = name def state_forwards(self, app_label, state): @@ -62,7 +62,7 @@ class AlterField(Operation): """ def __init__(self, model_name, name, field): - self.model_name = model_name + self.model_name = model_name.lower() self.name = name self.field = field @@ -93,7 +93,7 @@ class RenameField(Operation): """ def __init__(self, model_name, old_name, new_name): - self.model_name = model_name + self.model_name = model_name.lower() self.old_name = old_name self.new_name = new_name diff --git a/django/db/migrations/operations/models.py b/django/db/migrations/operations/models.py index c73ff179d4..7279a163f0 100644 --- a/django/db/migrations/operations/models.py +++ b/django/db/migrations/operations/models.py @@ -9,7 +9,7 @@ class CreateModel(Operation): """ def __init__(self, name, fields, options=None, bases=None): - self.name = name + self.name = name.lower() self.fields = fields self.options = options or {} self.bases = bases or (models.Model,) @@ -35,7 +35,7 @@ class DeleteModel(Operation): """ def __init__(self, name): - self.name = name + self.name = name.lower() def state_forwards(self, app_label, state): del state.models[app_label, self.name.lower()] @@ -58,7 +58,7 @@ class AlterModelTable(Operation): """ def __init__(self, name, table): - self.name = name + self.name = name.lower() self.table = table def state_forwards(self, app_label, state): @@ -78,3 +78,33 @@ class AlterModelTable(Operation): def describe(self): return "Rename table for %s to %s" % (self.name, self.table) + + +class AlterUniqueTogether(Operation): + """ + Changes the value of unique_together to the target one. + Input value of unique_together must be a set of tuples. + """ + + def __init__(self, name, unique_together): + self.name = name.lower() + self.unique_together = set(tuple(cons) for cons in unique_together) + + def state_forwards(self, app_label, state): + model_state = state.models[app_label, self.name.lower()] + model_state.options["unique_together"] = self.unique_together + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + old_app_cache = from_state.render() + new_app_cache = to_state.render() + schema_editor.alter_unique_together( + new_app_cache.get_model(app_label, self.name), + getattr(old_app_cache.get_model(app_label, self.name)._meta, "unique_together", set()), + getattr(new_app_cache.get_model(app_label, self.name)._meta, "unique_together", set()), + ) + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + return self.database_forwards(app_label, schema_editor, from_state, to_state) + + def describe(self): + return "Alter unique_together for %s (%s constraints)" % (self.name, len(self.unique_together)) diff --git a/django/db/migrations/state.py b/django/db/migrations/state.py index 4ecdb18896..8f0078d731 100644 --- a/django/db/migrations/state.py +++ b/django/db/migrations/state.py @@ -80,8 +80,11 @@ class ModelState(object): # Ignore some special options if name in ["app_cache", "app_label"]: continue - if name in model._meta.original_attrs: - options[name] = model._meta.original_attrs[name] + elif name in model._meta.original_attrs: + if name == "unique_together": + options[name] = set(model._meta.original_attrs["unique_together"]) + else: + options[name] = model._meta.original_attrs[name] # Make our record bases = tuple(model for model in model.__bases__ if (not hasattr(model, "_meta") or not model._meta.abstract)) if not bases: @@ -116,6 +119,8 @@ class ModelState(object): # First, make a Meta object meta_contents = {'app_label': self.app_label, "app_cache": app_cache} meta_contents.update(self.options) + if "unique_together" in meta_contents: + meta_contents["unique_together"] = list(meta_contents["unique_together"]) meta = type("Meta", tuple(), meta_contents) # Then, work out our bases # TODO: Use the actual bases diff --git a/tests/migrations/test_autodetector.py b/tests/migrations/test_autodetector.py index 540e84e8df..659b45dbd2 100644 --- a/tests/migrations/test_autodetector.py +++ b/tests/migrations/test_autodetector.py @@ -83,7 +83,7 @@ class AutodetectorTests(TestCase): # Right action? action = migration.operations[0] self.assertEqual(action.__class__.__name__, "CreateModel") - self.assertEqual(action.name, "Author") + self.assertEqual(action.name, "author") def test_old_model(self): "Tests deletion of old models" @@ -100,7 +100,7 @@ class AutodetectorTests(TestCase): # Right action? action = migration.operations[0] self.assertEqual(action.__class__.__name__, "DeleteModel") - self.assertEqual(action.name, "Author") + self.assertEqual(action.name, "author") def test_add_field(self): "Tests autodetection of new fields" diff --git a/tests/migrations/test_operations.py b/tests/migrations/test_operations.py index 2e72e11954..b2912de53c 100644 --- a/tests/migrations/test_operations.py +++ b/tests/migrations/test_operations.py @@ -1,5 +1,6 @@ from django.test import TestCase from django.db import connection, models, migrations +from django.db.utils import IntegrityError from django.db.migrations.state import ProjectState @@ -38,6 +39,7 @@ class OperationTests(TestCase): [ ("id", models.AutoField(primary_key=True)), ("pink", models.BooleanField(default=True)), + ("weight", models.FloatField()), ], ) project_state = ProjectState() @@ -50,7 +52,7 @@ class OperationTests(TestCase): def test_create_model(self): """ Tests the CreateModel operation. - Most other tests use this as part of setup, so check failures here first. + Most other tests use this operation as part of setup, so check failures here first. """ operation = migrations.CreateModel( "Pony", @@ -63,7 +65,7 @@ class OperationTests(TestCase): project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crmo", new_state) - self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony") + self.assertEqual(new_state.models["test_crmo", "pony"].name, "pony") self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crmo_pony") @@ -110,7 +112,7 @@ class OperationTests(TestCase): operation = migrations.AddField("Pony", "height", models.FloatField(null=True)) new_state = project_state.clone() operation.state_forwards("test_adfl", new_state) - self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 3) + self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 4) # Test the database alteration self.assertColumnNotExists("test_adfl_pony", "height") with connection.schema_editor() as editor: @@ -130,7 +132,7 @@ class OperationTests(TestCase): operation = migrations.RemoveField("Pony", "pink") new_state = project_state.clone() operation.state_forwards("test_rmfl", new_state) - self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 1) + self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 2) # Test the database alteration self.assertColumnExists("test_rmfl_pony", "pink") with connection.schema_editor() as editor: @@ -208,3 +210,33 @@ class OperationTests(TestCase): operation.database_backwards("test_rnfl", editor, new_state, project_state) self.assertColumnExists("test_rnfl_pony", "pink") self.assertColumnNotExists("test_rnfl_pony", "blue") + + def test_alter_unique_together(self): + """ + Tests the AlterUniqueTogether operation. + """ + project_state = self.set_up_test_model("test_alunto") + # Test the state alteration + operation = migrations.AlterUniqueTogether("Pony", [("pink", "weight")]) + new_state = project_state.clone() + operation.state_forwards("test_alunto", new_state) + self.assertEqual(len(project_state.models["test_alunto", "pony"].options.get("unique_together", set())), 0) + self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) + # Make sure we can insert duplicate rows + cursor = connection.cursor() + cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (1, 1, 1)") + cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (2, 1, 1)") + cursor.execute("DELETE FROM test_alunto_pony") + # Test the database alteration + with connection.schema_editor() as editor: + operation.database_forwards("test_alunto", editor, project_state, new_state) + cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (1, 1, 1)") + with self.assertRaises(IntegrityError): + cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (2, 1, 1)") + cursor.execute("DELETE FROM test_alunto_pony") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_alunto", editor, new_state, project_state) + cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (1, 1, 1)") + cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (2, 1, 1)") + cursor.execute("DELETE FROM test_alunto_pony") diff --git a/tests/migrations/test_state.py b/tests/migrations/test_state.py index c6930873ef..e5b3fbfa08 100644 --- a/tests/migrations/test_state.py +++ b/tests/migrations/test_state.py @@ -44,7 +44,7 @@ class StateTests(TestCase): self.assertEqual(author_state.fields[1][1].max_length, 255) self.assertEqual(author_state.fields[2][1].null, False) self.assertEqual(author_state.fields[3][1].null, True) - self.assertEqual(author_state.options, {"unique_together": ["name", "bio"]}) + self.assertEqual(author_state.options, {"unique_together": set(("name", "bio"))}) self.assertEqual(author_state.bases, (models.Model, )) self.assertEqual(book_state.app_label, "migrations") From 3b20af3e96b45fd3cfd9beb74bd09f65b4e38aa8 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 2 Jul 2013 11:25:18 +0100 Subject: [PATCH 088/161] Autodetection of unique_together changes --- django/db/migrations/autodetector.py | 9 +++++++ tests/migrations/test_autodetector.py | 38 +++++++++++++++++++++++++++ 2 files changed, 47 insertions(+) diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py index bb065e99bf..d524f96a0b 100644 --- a/django/db/migrations/autodetector.py +++ b/django/db/migrations/autodetector.py @@ -191,6 +191,15 @@ class MigrationAutodetector(object): field = new_model_state.get_field_by_name(field_name), ) ) + # unique_together changes + if old_model_state.options.get("unique_together", set()) != new_model_state.options.get("unique_together", set()): + self.add_to_migration( + app_label, + operations.AlterUniqueTogether( + name = model_name, + unique_together = new_model_state.options.get("unique_together", set()), + ) + ) # Alright, now add internal dependencies for app_label, migrations in self.migrations.items(): for m1, m2 in zip(migrations, migrations[1:]): diff --git a/tests/migrations/test_autodetector.py b/tests/migrations/test_autodetector.py index 659b45dbd2..7bed4eb57e 100644 --- a/tests/migrations/test_autodetector.py +++ b/tests/migrations/test_autodetector.py @@ -20,6 +20,8 @@ class AutodetectorTests(TestCase): other_stable = ModelState("otherapp", "Stable", [("id", models.AutoField(primary_key=True))]) third_thing = ModelState("thirdapp", "Thing", [("id", models.AutoField(primary_key=True))]) book = ModelState("otherapp", "Book", [("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author")), ("title", models.CharField(max_length=200))]) + book_unique = ModelState("otherapp", "Book", [("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author")), ("title", models.CharField(max_length=200))], {"unique_together": [("author", "title")]}) + book_unique_2 = ModelState("otherapp", "Book", [("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author")), ("title", models.CharField(max_length=200))], {"unique_together": [("title", "author")]}) edition = ModelState("thirdapp", "Edition", [("id", models.AutoField(primary_key=True)), ("book", models.ForeignKey("otherapp.Book"))]) def make_project_state(self, model_states): @@ -234,3 +236,39 @@ class AutodetectorTests(TestCase): self.assertEqual(migration1.dependencies, [("otherapp", "auto_1")]) self.assertEqual(migration2.dependencies, []) self.assertEqual(set(migration3.dependencies), set([("otherapp", "auto_1"), ("testapp", "auto_1")])) + + def test_unique_together(self): + "Tests unique_together detection" + # Make state + before = self.make_project_state([self.author_empty, self.book]) + after = self.make_project_state([self.author_empty, self.book_unique]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector.changes() + # Right number of migrations? + self.assertEqual(len(changes['otherapp']), 1) + # Right number of actions? + migration = changes['otherapp'][0] + self.assertEqual(len(migration.operations), 1) + # Right action? + action = migration.operations[0] + self.assertEqual(action.__class__.__name__, "AlterUniqueTogether") + self.assertEqual(action.name, "book") + self.assertEqual(action.unique_together, set([("author", "title")])) + + def test_unique_together_ordering(self): + "Tests that unique_together also triggers on ordering changes" + # Make state + before = self.make_project_state([self.author_empty, self.book_unique]) + after = self.make_project_state([self.author_empty, self.book_unique_2]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector.changes() + # Right number of migrations? + self.assertEqual(len(changes['otherapp']), 1) + # Right number of actions? + migration = changes['otherapp'][0] + self.assertEqual(len(migration.operations), 1) + # Right action? + action = migration.operations[0] + self.assertEqual(action.__class__.__name__, "AlterUniqueTogether") + self.assertEqual(action.name, "book") + self.assertEqual(action.unique_together, set([("title", "author")])) From 6a8cfbf07b2ba7e18db4d86aed0111be4457981e Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 2 Jul 2013 11:43:44 +0100 Subject: [PATCH 089/161] Support for index_together in schema backends --- django/db/backends/schema.py | 36 ++++++++++++++++++++++++- tests/schema/tests.py | 51 ++++++++++++++++++++++++++++++++++++ 2 files changed, 86 insertions(+), 1 deletion(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index c2503e5f53..c8b09f0d99 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -84,7 +84,7 @@ class BaseDatabaseSchemaEditor(object): # Get the cursor cursor = self.connection.cursor() # Log the command we're running, then run it - logger.info("%s; (params %r)" % (sql, params)) + logger.debug("%s; (params %r)" % (sql, params)) cursor.execute(sql, params) def quote_name(self, name): @@ -253,6 +253,40 @@ class BaseDatabaseSchemaEditor(object): "columns": ", ".join(self.quote_name(column) for column in columns), }) + def alter_index_together(self, model, old_index_together, new_index_together): + """ + Deals with a model changing its index_together. + Note: The input index_togethers must be doubly-nested, not the single- + nested ["foo", "bar"] format. + """ + olds = set(frozenset(fields) for fields in old_index_together) + news = set(frozenset(fields) for fields in new_index_together) + # Deleted indexes + for fields in olds.difference(news): + columns = [model._meta.get_field_by_name(field)[0].column for field in fields] + constraint_names = self._constraint_names(model, list(columns), index=True) + if len(constraint_names) != 1: + raise ValueError("Found wrong number (%s) of constraints for %s(%s)" % ( + len(constraint_names), + model._meta.db_table, + ", ".join(columns), + )) + self.execute( + self.sql_delete_index % { + "table": self.quote_name(model._meta.db_table), + "name": constraint_names[0], + }, + ) + # Created indexes + for fields in news.difference(olds): + columns = [model._meta.get_field_by_name(field)[0].column for field in fields] + self.execute(self.sql_create_index % { + "table": self.quote_name(model._meta.db_table), + "name": self._create_index_name(model, columns, suffix="_idx"), + "columns": ", ".join(self.quote_name(column) for column in columns), + "extra": "", + }) + def alter_db_table(self, model, old_db_table, new_db_table): """ Renames the table a model points to. diff --git a/tests/schema/tests.py b/tests/schema/tests.py index 4f3c9b7f10..a92c3f7910 100644 --- a/tests/schema/tests.py +++ b/tests/schema/tests.py @@ -452,6 +452,57 @@ class SchemaTests(TransactionTestCase): self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo") UniqueTest.objects.all().delete() + def test_index_together(self): + """ + Tests removing and adding index_together constraints on a model. + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Tag) + # Ensure there's no index on the year/slug columns first + self.assertEqual( + False, + any( + c["index"] + for c in connection.introspection.get_constraints(connection.cursor(), "schema_tag").values() + if c['columns'] == set(["slug", "title"]) + ), + ) + # Alter the model to add an index + with connection.schema_editor() as editor: + editor.alter_index_together( + Tag, + [], + [("slug", "title")], + ) + # Ensure there is now an index + self.assertEqual( + True, + any( + c["index"] + for c in connection.introspection.get_constraints(connection.cursor(), "schema_tag").values() + if c['columns'] == set(["slug", "title"]) + ), + ) + # Alter it back + new_new_field = SlugField(unique=True) + new_new_field.set_attributes_from_name("slug") + with connection.schema_editor() as editor: + editor.alter_unique_together( + Tag, + [("slug", "title")], + [], + ) + # Ensure there's no index + self.assertEqual( + False, + any( + c["index"] + for c in connection.introspection.get_constraints(connection.cursor(), "schema_tag").values() + if c['columns'] == set(["slug", "title"]) + ), + ) + def test_db_table(self): """ Tests renaming of the table From f343cbf06cba0e2ace0157224f85b89488093fa1 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 2 Jul 2013 11:51:18 +0100 Subject: [PATCH 090/161] Fix combined alters on PostgreSQL --- django/db/backends/schema.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index c8b09f0d99..31ad876be2 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -1,10 +1,12 @@ import sys import hashlib +import operator from django.db.backends.creation import BaseDatabaseCreation from django.db.backends.util import truncate_name from django.utils.log import getLogger from django.db.models.fields.related import ManyToManyField from django.db.transaction import atomic +from django.utils.six.moves import reduce logger = getLogger('django.db.backends.schema') @@ -525,7 +527,7 @@ class BaseDatabaseSchemaEditor(object): # Combine actions together if we can (e.g. postgres) if self.connection.features.supports_combined_alters: sql, params = tuple(zip(*actions)) - actions = [(", ".join(sql), params)] + actions = [(", ".join(sql), reduce(operator.add, params))] # Apply those actions for sql, params in actions: self.execute( From 9ef715d256938bd5d2c95a88ce0479aa5e493981 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 2 Jul 2013 11:51:38 +0100 Subject: [PATCH 091/161] Fix some bad test running under PostgreSQL --- tests/migrations/test_operations.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/migrations/test_operations.py b/tests/migrations/test_operations.py index b2912de53c..a6d57ceb7a 100644 --- a/tests/migrations/test_operations.py +++ b/tests/migrations/test_operations.py @@ -1,5 +1,6 @@ from django.test import TestCase from django.db import connection, models, migrations +from django.db.transaction import atomic from django.db.utils import IntegrityError from django.db.migrations.state import ProjectState @@ -38,7 +39,7 @@ class OperationTests(TestCase): "Pony", [ ("id", models.AutoField(primary_key=True)), - ("pink", models.BooleanField(default=True)), + ("pink", models.IntegerField(default=3)), ("weight", models.FloatField()), ], ) @@ -232,7 +233,8 @@ class OperationTests(TestCase): operation.database_forwards("test_alunto", editor, project_state, new_state) cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (1, 1, 1)") with self.assertRaises(IntegrityError): - cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (2, 1, 1)") + with atomic(): + cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (2, 1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # And test reversal with connection.schema_editor() as editor: From dbd3e775c1d2209d2b35cb1fa4b82e20b18c99d7 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 2 Jul 2013 12:06:00 +0100 Subject: [PATCH 092/161] Fix get_constraints to do multi-column indexes properly on pg --- .../postgresql_psycopg2/introspection.py | 27 +++++++++++-------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/django/db/backends/postgresql_psycopg2/introspection.py b/django/db/backends/postgresql_psycopg2/introspection.py index c4974b89ca..ec953285ad 100644 --- a/django/db/backends/postgresql_psycopg2/introspection.py +++ b/django/db/backends/postgresql_psycopg2/introspection.py @@ -177,26 +177,31 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): constraints[constraint]['columns'].add(column) # Now get indexes cursor.execute(""" - SELECT c2.relname, attr.attname, idx.indkey, idx.indisunique, idx.indisprimary + SELECT + c2.relname, + ARRAY( + SELECT attr.attname + FROM unnest(idx.indkey) i, pg_catalog.pg_attribute attr + WHERE + attr.attnum = i AND + attr.attrelid = c.oid + ), + idx.indisunique, + idx.indisprimary FROM pg_catalog.pg_class c, pg_catalog.pg_class c2, - pg_catalog.pg_index idx, pg_catalog.pg_attribute attr + pg_catalog.pg_index idx WHERE c.oid = idx.indrelid AND idx.indexrelid = c2.oid - AND attr.attrelid = c.oid - AND attr.attnum = idx.indkey[0] AND c.relname = %s """, [table_name]) - for index, column, coli, unique, primary in cursor.fetchall(): - # If we're the first column, make the record + for index, columns, unique, primary in cursor.fetchall(): if index not in constraints: constraints[index] = { - "columns": set(), - "primary_key": False, - "unique": False, + "columns": set(columns), + "primary_key": primary, + "unique": unique, "foreign_key": False, "check": False, "index": True, } - # Record the details - constraints[index]['columns'].add(column) return constraints From 2202e3f7d3b3855d59d87c3865982f43b9370ba1 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 2 Jul 2013 12:06:26 +0100 Subject: [PATCH 093/161] Fix index_together test --- tests/schema/tests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/schema/tests.py b/tests/schema/tests.py index a92c3f7910..ba035f1287 100644 --- a/tests/schema/tests.py +++ b/tests/schema/tests.py @@ -488,7 +488,7 @@ class SchemaTests(TransactionTestCase): new_new_field = SlugField(unique=True) new_new_field.set_attributes_from_name("slug") with connection.schema_editor() as editor: - editor.alter_unique_together( + editor.alter_index_together( Tag, [("slug", "title")], [], From 61ff46cf8b483d857768832818693f6225b534ca Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 2 Jul 2013 18:02:01 +0100 Subject: [PATCH 094/161] Add AlterIndexTogether operation --- django/db/migrations/operations/__init__.py | 2 +- django/db/migrations/operations/models.py | 32 ++++++++++++++++++- tests/migrations/test_operations.py | 35 +++++++++++++++++++++ 3 files changed, 67 insertions(+), 2 deletions(-) diff --git a/django/db/migrations/operations/__init__.py b/django/db/migrations/operations/__init__.py index afa5c85cdc..1240a5d1f5 100644 --- a/django/db/migrations/operations/__init__.py +++ b/django/db/migrations/operations/__init__.py @@ -1,2 +1,2 @@ -from .models import CreateModel, DeleteModel, AlterModelTable, AlterUniqueTogether +from .models import CreateModel, DeleteModel, AlterModelTable, AlterUniqueTogether, AlterIndexTogether from .fields import AddField, RemoveField, AlterField, RenameField diff --git a/django/db/migrations/operations/models.py b/django/db/migrations/operations/models.py index 7279a163f0..0c9e69e127 100644 --- a/django/db/migrations/operations/models.py +++ b/django/db/migrations/operations/models.py @@ -82,7 +82,7 @@ class AlterModelTable(Operation): class AlterUniqueTogether(Operation): """ - Changes the value of unique_together to the target one. + Changes the value of index_together to the target one. Input value of unique_together must be a set of tuples. """ @@ -108,3 +108,33 @@ class AlterUniqueTogether(Operation): def describe(self): return "Alter unique_together for %s (%s constraints)" % (self.name, len(self.unique_together)) + + +class AlterIndexTogether(Operation): + """ + Changes the value of index_together to the target one. + Input value of index_together must be a set of tuples. + """ + + def __init__(self, name, index_together): + self.name = name.lower() + self.index_together = set(tuple(cons) for cons in index_together) + + def state_forwards(self, app_label, state): + model_state = state.models[app_label, self.name.lower()] + model_state.options["index_together"] = self.index_together + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + old_app_cache = from_state.render() + new_app_cache = to_state.render() + schema_editor.alter_index_together( + new_app_cache.get_model(app_label, self.name), + getattr(old_app_cache.get_model(app_label, self.name)._meta, "index_together", set()), + getattr(new_app_cache.get_model(app_label, self.name)._meta, "index_together", set()), + ) + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + return self.database_forwards(app_label, schema_editor, from_state, to_state) + + def describe(self): + return "Alter index_together for %s (%s constraints)" % (self.name, len(self.index_together)) diff --git a/tests/migrations/test_operations.py b/tests/migrations/test_operations.py index a6d57ceb7a..810ed0b929 100644 --- a/tests/migrations/test_operations.py +++ b/tests/migrations/test_operations.py @@ -30,6 +30,19 @@ class OperationTests(TestCase): def assertColumnNotNull(self, table, column): self.assertEqual([c.null_ok for c in connection.introspection.get_table_description(connection.cursor(), table) if c.name == column][0], False) + def assertIndexExists(self, table, columns, value=True): + self.assertEqual( + value, + any( + c["index"] + for c in connection.introspection.get_constraints(connection.cursor(), table).values() + if c['columns'] == list(columns) + ), + ) + + def assertIndexNotExists(self, table, columns): + return self.assertIndexExists(table, columns, False) + def set_up_test_model(self, app_label): """ Creates a test model state and database table. @@ -242,3 +255,25 @@ class OperationTests(TestCase): cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (1, 1, 1)") cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (2, 1, 1)") cursor.execute("DELETE FROM test_alunto_pony") + + def test_alter_index_together(self): + """ + Tests the AlterIndexTogether operation. + """ + project_state = self.set_up_test_model("test_alinto") + # Test the state alteration + operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")]) + new_state = project_state.clone() + operation.state_forwards("test_alinto", new_state) + self.assertEqual(len(project_state.models["test_alinto", "pony"].options.get("index_together", set())), 0) + self.assertEqual(len(new_state.models["test_alinto", "pony"].options.get("index_together", set())), 1) + # Make sure there's no matching index + self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) + # Test the database alteration + with connection.schema_editor() as editor: + operation.database_forwards("test_alinto", editor, project_state, new_state) + self.assertIndexExists("test_alinto_pony", ["pink", "weight"]) + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_alinto", editor, new_state, project_state) + self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) From 3a6580e485c11fc5502ffd1bb53347459db43421 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 2 Jul 2013 18:02:20 +0100 Subject: [PATCH 095/161] Make get_constraints return columns in order --- django/db/backends/mysql/introspection.py | 9 ++++++--- .../postgresql_psycopg2/introspection.py | 17 +++++++---------- django/db/backends/schema.py | 13 +++++++------ django/db/backends/sqlite3/introspection.py | 6 +++--- tests/schema/tests.py | 18 +++++++++--------- 5 files changed, 32 insertions(+), 31 deletions(-) diff --git a/django/db/backends/mysql/introspection.py b/django/db/backends/mysql/introspection.py index 3cb55a239d..00c567a90b 100644 --- a/django/db/backends/mysql/introspection.py +++ b/django/db/backends/mysql/introspection.py @@ -1,6 +1,6 @@ import re from .base import FIELD_TYPE - +from django.utils.datastructures import SortedSet from django.db.backends import BaseDatabaseIntrospection, FieldInfo from django.utils.encoding import force_text @@ -141,7 +141,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): for constraint, column, ref_table, ref_column in cursor.fetchall(): if constraint not in constraints: constraints[constraint] = { - 'columns': set(), + 'columns': SortedSet(), 'primary_key': False, 'unique': False, 'index': False, @@ -169,7 +169,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): for table, non_unique, index, colseq, column in [x[:5] for x in cursor.fetchall()]: if index not in constraints: constraints[index] = { - 'columns': set(), + 'columns': SortedSet(), 'primary_key': False, 'unique': False, 'index': True, @@ -178,5 +178,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): } constraints[index]['index'] = True constraints[index]['columns'].add(column) + # Convert the sorted sets to lists + for constraint in constraints.values(): + constraint['columns'] = list(constraint['columns']) # Return return constraints diff --git a/django/db/backends/postgresql_psycopg2/introspection.py b/django/db/backends/postgresql_psycopg2/introspection.py index ec953285ad..4f77059376 100644 --- a/django/db/backends/postgresql_psycopg2/introspection.py +++ b/django/db/backends/postgresql_psycopg2/introspection.py @@ -140,7 +140,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): # If we're the first column, make the record if constraint not in constraints: constraints[constraint] = { - "columns": set(), + "columns": [], "primary_key": kind.lower() == "primary key", "unique": kind.lower() in ["primary key", "unique"], "foreign_key": tuple(used_cols[0].split(".", 1)) if kind.lower() == "foreign key" else None, @@ -148,7 +148,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): "index": False, } # Record the details - constraints[constraint]['columns'].add(column) + constraints[constraint]['columns'].append(column) # Now get CHECK constraint columns cursor.execute(""" SELECT kc.constraint_name, kc.column_name @@ -166,7 +166,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): # If we're the first column, make the record if constraint not in constraints: constraints[constraint] = { - "columns": set(), + "columns": [], "primary_key": False, "unique": False, "foreign_key": False, @@ -174,17 +174,14 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): "index": False, } # Record the details - constraints[constraint]['columns'].add(column) + constraints[constraint]['columns'].append(column) # Now get indexes cursor.execute(""" SELECT c2.relname, ARRAY( - SELECT attr.attname - FROM unnest(idx.indkey) i, pg_catalog.pg_attribute attr - WHERE - attr.attnum = i AND - attr.attrelid = c.oid + SELECT (SELECT attname FROM pg_catalog.pg_attribute WHERE attnum = i AND attrelid = c.oid) + FROM unnest(idx.indkey) i ), idx.indisunique, idx.indisprimary @@ -197,7 +194,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): for index, columns, unique, primary in cursor.fetchall(): if index not in constraints: constraints[index] = { - "columns": set(columns), + "columns": list(columns), "primary_key": primary, "unique": unique, "foreign_key": False, diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 31ad876be2..b96e00445c 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -87,6 +87,7 @@ class BaseDatabaseSchemaEditor(object): cursor = self.connection.cursor() # Log the command we're running, then run it logger.debug("%s; (params %r)" % (sql, params)) + #print("%s; (params %r)" % (sql, params)) cursor.execute(sql, params) def quote_name(self, name): @@ -228,12 +229,12 @@ class BaseDatabaseSchemaEditor(object): Note: The input unique_togethers must be doubly-nested, not the single- nested ["foo", "bar"] format. """ - olds = set(frozenset(fields) for fields in old_unique_together) - news = set(frozenset(fields) for fields in new_unique_together) + olds = set(tuple(fields) for fields in old_unique_together) + news = set(tuple(fields) for fields in new_unique_together) # Deleted uniques for fields in olds.difference(news): columns = [model._meta.get_field_by_name(field)[0].column for field in fields] - constraint_names = self._constraint_names(model, list(columns), unique=True) + constraint_names = self._constraint_names(model, columns, unique=True) if len(constraint_names) != 1: raise ValueError("Found wrong number (%s) of constraints for %s(%s)" % ( len(constraint_names), @@ -261,8 +262,8 @@ class BaseDatabaseSchemaEditor(object): Note: The input index_togethers must be doubly-nested, not the single- nested ["foo", "bar"] format. """ - olds = set(frozenset(fields) for fields in old_index_together) - news = set(frozenset(fields) for fields in new_index_together) + olds = set(tuple(fields) for fields in old_index_together) + news = set(tuple(fields) for fields in new_index_together) # Deleted indexes for fields in olds.difference(news): columns = [model._meta.get_field_by_name(field)[0].column for field in fields] @@ -646,7 +647,7 @@ class BaseDatabaseSchemaEditor(object): """ Returns all constraint names matching the columns and conditions """ - column_names = set(column_names) if column_names else None + column_names = list(column_names) if column_names else None constraints = self.connection.introspection.get_constraints(self.connection.cursor(), model._meta.db_table) result = [] for name, infodict in constraints.items(): diff --git a/django/db/backends/sqlite3/introspection.py b/django/db/backends/sqlite3/introspection.py index bb47c88182..799d6ec8fd 100644 --- a/django/db/backends/sqlite3/introspection.py +++ b/django/db/backends/sqlite3/introspection.py @@ -197,14 +197,14 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): for index_rank, column_rank, column in cursor.fetchall(): if index not in constraints: constraints[index] = { - "columns": set(), + "columns": [], "primary_key": False, "unique": bool(unique), "foreign_key": False, "check": False, "index": True, } - constraints[index]['columns'].add(column) + constraints[index]['columns'].append(column) # Get the PK pk_column = self.get_primary_key_column(cursor, table_name) if pk_column: @@ -213,7 +213,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): # deletes PK constraints by name, as you can't delete constraints # in SQLite; we remake the table with a new PK instead. constraints["__primary__"] = { - "columns": set([pk_column]), + "columns": [pk_column], "primary_key": True, "unique": False, # It's not actually a unique constraint. "foreign_key": False, diff --git a/tests/schema/tests.py b/tests/schema/tests.py index ba035f1287..3a82cd15ff 100644 --- a/tests/schema/tests.py +++ b/tests/schema/tests.py @@ -128,7 +128,7 @@ class SchemaTests(TransactionTestCase): # Make sure the new FK constraint is present constraints = connection.introspection.get_constraints(connection.cursor(), Book._meta.db_table) for name, details in constraints.items(): - if details['columns'] == set(["author_id"]) and details['foreign_key']: + if details['columns'] == ["author_id"] and details['foreign_key']: self.assertEqual(details['foreign_key'], ('schema_tag', 'id')) break else: @@ -285,7 +285,7 @@ class SchemaTests(TransactionTestCase): constraints = connection.introspection.get_constraints(connection.cursor(), BookWithM2M._meta.get_field_by_name("tags")[0].rel.through._meta.db_table) if connection.features.supports_foreign_keys: for name, details in constraints.items(): - if details['columns'] == set(["tag_id"]) and details['foreign_key']: + if details['columns'] == ["tag_id"] and details['foreign_key']: self.assertEqual(details['foreign_key'], ('schema_tag', 'id')) break else: @@ -306,7 +306,7 @@ class SchemaTests(TransactionTestCase): constraints = connection.introspection.get_constraints(connection.cursor(), new_field.rel.through._meta.db_table) if connection.features.supports_foreign_keys: for name, details in constraints.items(): - if details['columns'] == set(["uniquetest_id"]) and details['foreign_key']: + if details['columns'] == ["uniquetest_id"] and details['foreign_key']: self.assertEqual(details['foreign_key'], ('schema_uniquetest', 'id')) break else: @@ -327,7 +327,7 @@ class SchemaTests(TransactionTestCase): # Ensure the constraint exists constraints = connection.introspection.get_constraints(connection.cursor(), Author._meta.db_table) for name, details in constraints.items(): - if details['columns'] == set(["height"]) and details['check']: + if details['columns'] == ["height"] and details['check']: break else: self.fail("No check constraint for height found") @@ -343,7 +343,7 @@ class SchemaTests(TransactionTestCase): ) constraints = connection.introspection.get_constraints(connection.cursor(), Author._meta.db_table) for name, details in constraints.items(): - if details['columns'] == set(["height"]) and details['check']: + if details['columns'] == ["height"] and details['check']: self.fail("Check constraint for height found") # Alter the column to re-add it with connection.schema_editor() as editor: @@ -355,7 +355,7 @@ class SchemaTests(TransactionTestCase): ) constraints = connection.introspection.get_constraints(connection.cursor(), Author._meta.db_table) for name, details in constraints.items(): - if details['columns'] == set(["height"]) and details['check']: + if details['columns'] == ["height"] and details['check']: break else: self.fail("No check constraint for height found") @@ -465,7 +465,7 @@ class SchemaTests(TransactionTestCase): any( c["index"] for c in connection.introspection.get_constraints(connection.cursor(), "schema_tag").values() - if c['columns'] == set(["slug", "title"]) + if c['columns'] == ["slug", "title"] ), ) # Alter the model to add an index @@ -481,7 +481,7 @@ class SchemaTests(TransactionTestCase): any( c["index"] for c in connection.introspection.get_constraints(connection.cursor(), "schema_tag").values() - if c['columns'] == set(["slug", "title"]) + if c['columns'] == ["slug", "title"] ), ) # Alter it back @@ -499,7 +499,7 @@ class SchemaTests(TransactionTestCase): any( c["index"] for c in connection.introspection.get_constraints(connection.cursor(), "schema_tag").values() - if c['columns'] == set(["slug", "title"]) + if c['columns'] == ["slug", "title"] ), ) From 52eb19b545118e8c2044d106f4527ce2200c967a Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 22 Jul 2013 19:36:03 +0100 Subject: [PATCH 096/161] Make multi-app-cache tests work again --- tests/app_cache/tests.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/tests/app_cache/tests.py b/tests/app_cache/tests.py index 42598d90c7..b72b862de3 100644 --- a/tests/app_cache/tests.py +++ b/tests/app_cache/tests.py @@ -1,16 +1,11 @@ from __future__ import absolute_import -import datetime -from django.test import TransactionTestCase -from django.utils.unittest import skipUnless -from django.db import connection, DatabaseError, IntegrityError -from django.db.models.fields import IntegerField, TextField, CharField, SlugField -from django.db.models.fields.related import ManyToManyField, ForeignKey +from django.test import TestCase from django.db.models.loading import cache, BaseAppCache from django.db import models from .models import TotallyNormal, SoAlternative, new_app_cache -class AppCacheTests(TransactionTestCase): +class AppCacheTests(TestCase): """ Tests the AppCache borg and non-borg versions """ @@ -19,7 +14,6 @@ class AppCacheTests(TransactionTestCase): """ Tests that the models in the models.py file were loaded correctly. """ - self.assertEqual(cache.get_model("app_cache", "TotallyNormal"), TotallyNormal) self.assertEqual(cache.get_model("app_cache", "SoAlternative"), None) From 162f7b938f6cff91b1e4d6086ddcc88cbb1a1cd6 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 22 Jul 2013 19:43:58 +0100 Subject: [PATCH 097/161] Make migrate command recognise prefixes and 'zero'. --- django/core/management/commands/migrate.py | 62 +++++++++++++++++----- django/db/migrations/executor.py | 10 +++- django/db/migrations/loader.py | 24 +++++++++ tests/migrations/test_loader.py | 15 +++++- 4 files changed, 95 insertions(+), 16 deletions(-) diff --git a/django/core/management/commands/migrate.py b/django/core/management/commands/migrate.py index a07e3e0d68..29b8b2c9c0 100644 --- a/django/core/management/commands/migrate.py +++ b/django/core/management/commands/migrate.py @@ -4,17 +4,18 @@ import traceback from django.conf import settings from django.core.management import call_command -from django.core.management.base import NoArgsCommand +from django.core.management.base import BaseCommand, CommandError from django.core.management.color import color_style, no_style from django.core.management.sql import custom_sql_for_model, emit_post_sync_signal, emit_pre_sync_signal from django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS from django.db.migrations.executor import MigrationExecutor +from django.db.migrations.loader import AmbiguityError from django.utils.datastructures import SortedDict from django.utils.importlib import import_module -class Command(NoArgsCommand): - option_list = NoArgsCommand.option_list + ( +class Command(BaseCommand): + option_list = BaseCommand.option_list + ( make_option('--noinput', action='store_false', dest='interactive', default=True, help='Tells Django to NOT prompt the user for input of any kind.'), make_option('--no-initial-data', action='store_false', dest='load_initial_data', default=True, @@ -26,7 +27,7 @@ class Command(NoArgsCommand): help = "Updates database schema. Manages both apps with migrations and those without." - def handle_noargs(self, **options): + def handle(self, *args, **options): self.verbosity = int(options.get('verbosity')) self.interactive = options.get('interactive') @@ -60,24 +61,57 @@ class Command(NoArgsCommand): connection = connections[db] # Work out which apps have migrations and which do not - if self.verbosity >= 1: - self.stdout.write(self.style.MIGRATE_HEADING("Calculating migration plan:")) executor = MigrationExecutor(connection, self.migration_progress_callback) - if self.verbosity >= 1: - self.stdout.write(self.style.MIGRATE_LABEL(" Apps without migrations: ") + (", ".join(executor.loader.unmigrated_apps) or "(none)")) - # Work out what targets they want, and then make a migration plan - # TODO: Let users select targets - targets = executor.loader.graph.leaf_nodes() + # If they supplied command line arguments, work out what they mean. + run_syncdb = False + target_app_labels_only = True + if len(args) > 2: + raise CommandError("Too many command-line arguments (expecting 'appname' or 'appname migrationname')") + elif len(args) == 2: + app_label, migration_name = args + if app_label not in executor.loader.migrated_apps: + raise CommandError("App '%s' does not have migrations (you cannot selectively sync unmigrated apps)" % app_label) + if migration_name == "zero": + migration_name = None + else: + try: + migration = executor.loader.get_migration_by_prefix(app_label, migration_name) + except AmbiguityError: + raise CommandError("More than one migration matches '%s' in app '%s'. Please be more specific." % (app_label, migration_name)) + except KeyError: + raise CommandError("Cannot find a migration matching '%s' from app '%s'. Is it in INSTALLED_APPS?" % (app_label, migration_name)) + targets = [(app_label, migration.name)] + target_app_labels_only = False + elif len(args) == 1: + app_label = args[0] + if app_label not in executor.loader.migrated_apps: + raise CommandError("App '%s' does not have migrations (you cannot selectively sync unmigrated apps)" % app_label) + targets = [key for key in executor.loader.graph.leaf_nodes() if key[0] == app_label] + else: + targets = executor.loader.graph.leaf_nodes() + run_syncdb = True + plan = executor.migration_plan(targets) + # Print some useful info if self.verbosity >= 1: - self.stdout.write(self.style.MIGRATE_LABEL(" Apps with migrations: ") + (", ".join(executor.loader.migrated_apps) or "(none)")) + self.stdout.write(self.style.MIGRATE_HEADING("Operations to perform:")) + if run_syncdb: + self.stdout.write(self.style.MIGRATE_LABEL(" Synchronize unmigrated apps: ") + (", ".join(executor.loader.unmigrated_apps) or "(none)")) + if target_app_labels_only: + self.stdout.write(self.style.MIGRATE_LABEL(" Apply all migrations: ") + (", ".join(set(a for a, n in targets)) or "(none)")) + else: + if targets[0][1] is None: + self.stdout.write(self.style.MIGRATE_LABEL(" Unapply all migrations: ") + "%s" % (targets[0][0], )) + else: + self.stdout.write(self.style.MIGRATE_LABEL(" Target specific migration: ") + "%s, from %s" % (targets[0][1], targets[0][0])) # Run the syncdb phase. # If you ever manage to get rid of this, I owe you many, many drinks. - self.stdout.write(self.style.MIGRATE_HEADING("Synchronizing apps without migrations:")) - self.sync_apps(connection, executor.loader.unmigrated_apps) + if run_syncdb: + self.stdout.write(self.style.MIGRATE_HEADING("Synchronizing apps without migrations:")) + self.sync_apps(connection, executor.loader.unmigrated_apps) # Migrate! if self.verbosity >= 1: diff --git a/django/db/migrations/executor.py b/django/db/migrations/executor.py index 82601be7c0..46bbfc0ef2 100644 --- a/django/db/migrations/executor.py +++ b/django/db/migrations/executor.py @@ -22,9 +22,17 @@ class MigrationExecutor(object): plan = [] applied = self.recorder.applied_migrations() for target in targets: + # If the target is (appname, None), that means unmigrate everything + if target[1] is None: + for root in self.loader.graph.root_nodes(): + if root[0] == target[0]: + for migration in self.loader.graph.backwards_plan(root): + if migration in applied: + plan.append((self.loader.graph.nodes[migration], True)) + applied.remove(migration) # If the migration is already applied, do backwards mode, # otherwise do forwards mode. - if target in applied: + elif target in applied: for migration in self.loader.graph.backwards_plan(target)[:-1]: if migration in applied: plan.append((self.loader.graph.nodes[migration], True)) diff --git a/django/db/migrations/loader.py b/django/db/migrations/loader.py index 36e1540299..441480b194 100644 --- a/django/db/migrations/loader.py +++ b/django/db/migrations/loader.py @@ -79,6 +79,23 @@ class MigrationLoader(object): raise BadMigrationError("Migration %s in app %s has no Migration class" % (migration_name, app_label)) self.disk_migrations[app_label, migration_name] = migration_module.Migration(migration_name, app_label) + def get_migration_by_prefix(self, app_label, name_prefix): + "Returns the migration(s) which match the given app label and name _prefix_" + # Make sure we have the disk data + if self.disk_migrations is None: + self.load_disk() + # Do the search + results = [] + for l, n in self.disk_migrations: + if l == app_label and n.startswith(name_prefix): + results.append((l, n)) + if len(results) > 1: + raise AmbiguityError("There is more than one migration for '%s' with the prefix '%s'" % (app_label, name_prefix)) + elif len(results) == 0: + raise KeyError("There no migrations for '%s' with the prefix '%s'" % (app_label, name_prefix)) + else: + return self.disk_migrations[results[0]] + @cached_property def graph(self): """ @@ -141,3 +158,10 @@ class BadMigrationError(Exception): Raised when there's a bad migration (unreadable/bad format/etc.) """ pass + + +class AmbiguityError(Exception): + """ + Raised when more than one migration matches a name prefix + """ + pass diff --git a/tests/migrations/test_loader.py b/tests/migrations/test_loader.py index 255efe9cfb..b9ad9726ae 100644 --- a/tests/migrations/test_loader.py +++ b/tests/migrations/test_loader.py @@ -1,7 +1,7 @@ from django.test import TestCase from django.test.utils import override_settings from django.db import connection -from django.db.migrations.loader import MigrationLoader +from django.db.migrations.loader import MigrationLoader, AmbiguityError from django.db.migrations.recorder import MigrationRecorder @@ -64,3 +64,16 @@ class LoaderTests(TestCase): [x for x, y in book_state.fields], ["id", "author"] ) + + @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) + def test_name_match(self): + "Tests prefix name matching" + migration_loader = MigrationLoader(connection) + self.assertEqual( + migration_loader.get_migration_by_prefix("migrations", "0001").name, + "0001_initial", + ) + with self.assertRaises(AmbiguityError): + migration_loader.get_migration_by_prefix("migrations", "0") + with self.assertRaises(KeyError): + migration_loader.get_migration_by_prefix("migrations", "blarg") From 00276e0414ce796a71a28d4c675a22b041aa3450 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 25 Jul 2013 13:52:35 +0100 Subject: [PATCH 098/161] Add tests for the migrate command and fix a bug they exposed --- django/core/management/commands/migrate.py | 7 ++-- tests/migrations/test_base.py | 39 +++++++++++++++++++ tests/migrations/test_commands.py | 37 ++++++++++++++++++ tests/migrations/test_executor.py | 1 + .../test_migrations/0001_initial.py | 2 +- tests/migrations/test_operations.py | 35 +---------------- 6 files changed, 84 insertions(+), 37 deletions(-) create mode 100644 tests/migrations/test_base.py create mode 100644 tests/migrations/test_commands.py diff --git a/django/core/management/commands/migrate.py b/django/core/management/commands/migrate.py index 29b8b2c9c0..8e3c79a431 100644 --- a/django/core/management/commands/migrate.py +++ b/django/core/management/commands/migrate.py @@ -73,7 +73,7 @@ class Command(BaseCommand): if app_label not in executor.loader.migrated_apps: raise CommandError("App '%s' does not have migrations (you cannot selectively sync unmigrated apps)" % app_label) if migration_name == "zero": - migration_name = None + targets = [(app_label, None)] else: try: migration = executor.loader.get_migration_by_prefix(app_label, migration_name) @@ -81,7 +81,7 @@ class Command(BaseCommand): raise CommandError("More than one migration matches '%s' in app '%s'. Please be more specific." % (app_label, migration_name)) except KeyError: raise CommandError("Cannot find a migration matching '%s' from app '%s'. Is it in INSTALLED_APPS?" % (app_label, migration_name)) - targets = [(app_label, migration.name)] + targets = [(app_label, migration.name)] target_app_labels_only = False elif len(args) == 1: app_label = args[0] @@ -110,7 +110,8 @@ class Command(BaseCommand): # Run the syncdb phase. # If you ever manage to get rid of this, I owe you many, many drinks. if run_syncdb: - self.stdout.write(self.style.MIGRATE_HEADING("Synchronizing apps without migrations:")) + if self.verbosity >= 1: + self.stdout.write(self.style.MIGRATE_HEADING("Synchronizing apps without migrations:")) self.sync_apps(connection, executor.loader.unmigrated_apps) # Migrate! diff --git a/tests/migrations/test_base.py b/tests/migrations/test_base.py new file mode 100644 index 0000000000..01062667aa --- /dev/null +++ b/tests/migrations/test_base.py @@ -0,0 +1,39 @@ +from django.test import TestCase +from django.db import connection + + +class MigrationTestBase(TestCase): + """ + Contains an extended set of asserts for testing migrations and schema operations. + """ + + def assertTableExists(self, table): + self.assertIn(table, connection.introspection.get_table_list(connection.cursor())) + + def assertTableNotExists(self, table): + self.assertNotIn(table, connection.introspection.get_table_list(connection.cursor())) + + def assertColumnExists(self, table, column): + self.assertIn(column, [c.name for c in connection.introspection.get_table_description(connection.cursor(), table)]) + + def assertColumnNotExists(self, table, column): + self.assertNotIn(column, [c.name for c in connection.introspection.get_table_description(connection.cursor(), table)]) + + def assertColumnNull(self, table, column): + self.assertEqual([c.null_ok for c in connection.introspection.get_table_description(connection.cursor(), table) if c.name == column][0], True) + + def assertColumnNotNull(self, table, column): + self.assertEqual([c.null_ok for c in connection.introspection.get_table_description(connection.cursor(), table) if c.name == column][0], False) + + def assertIndexExists(self, table, columns, value=True): + self.assertEqual( + value, + any( + c["index"] + for c in connection.introspection.get_constraints(connection.cursor(), table).values() + if c['columns'] == list(columns) + ), + ) + + def assertIndexNotExists(self, table, columns): + return self.assertIndexExists(table, columns, False) diff --git a/tests/migrations/test_commands.py b/tests/migrations/test_commands.py new file mode 100644 index 0000000000..d775d1eba7 --- /dev/null +++ b/tests/migrations/test_commands.py @@ -0,0 +1,37 @@ +from django.core.management import call_command +from django.test.utils import override_settings +from .test_base import MigrationTestBase + + +class CommandTests(MigrationTestBase): + """ + Tests running the commands (migrate, makemigrations). + """ + + @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) + def test_migrate(self): + """ + Tests basic usage of the migrate command. + """ + # Make sure no tables are created + self.assertTableNotExists("migrations_author") + self.assertTableNotExists("migrations_tribble") + self.assertTableNotExists("migrations_book") + # Run the migrations to 0001 only + call_command("migrate", "migrations", "0001", verbosity=0) + # Make sure the right tables exist + self.assertTableExists("migrations_author") + self.assertTableExists("migrations_tribble") + self.assertTableNotExists("migrations_book") + # Run migrations all the way + call_command("migrate", verbosity=0) + # Make sure the right tables exist + self.assertTableExists("migrations_author") + self.assertTableNotExists("migrations_tribble") + self.assertTableExists("migrations_book") + # Unmigrate everything + call_command("migrate", "migrations", "zero", verbosity=0) + # Make sure it's all gone + self.assertTableNotExists("migrations_author") + self.assertTableNotExists("migrations_tribble") + self.assertTableNotExists("migrations_book") diff --git a/tests/migrations/test_executor.py b/tests/migrations/test_executor.py index c426defe4a..ddbfa78db7 100644 --- a/tests/migrations/test_executor.py +++ b/tests/migrations/test_executor.py @@ -20,6 +20,7 @@ class ExecutorTests(TransactionTestCase): Tests running a simple set of migrations. """ executor = MigrationExecutor(connection) + executor.recorder.flush() # Let's look at the plan first and make sure it's up to scratch plan = executor.migration_plan([("migrations", "0002_second")]) self.assertEqual( diff --git a/tests/migrations/test_migrations/0001_initial.py b/tests/migrations/test_migrations/0001_initial.py index e2ed8559a6..f20bac8aec 100644 --- a/tests/migrations/test_migrations/0001_initial.py +++ b/tests/migrations/test_migrations/0001_initial.py @@ -12,7 +12,7 @@ class Migration(migrations.Migration): ("name", models.CharField(max_length=255)), ("slug", models.SlugField(null=True)), ("age", models.IntegerField(default=0)), - ("silly_field", models.BooleanField()), + ("silly_field", models.BooleanField(default=False)), ], ), diff --git a/tests/migrations/test_operations.py b/tests/migrations/test_operations.py index 810ed0b929..c74d40e4f2 100644 --- a/tests/migrations/test_operations.py +++ b/tests/migrations/test_operations.py @@ -1,48 +1,17 @@ -from django.test import TestCase from django.db import connection, models, migrations from django.db.transaction import atomic from django.db.utils import IntegrityError from django.db.migrations.state import ProjectState +from .test_base import MigrationTestBase -class OperationTests(TestCase): +class OperationTests(MigrationTestBase): """ Tests running the operations and making sure they do what they say they do. Each test looks at their state changing, and then their database operation - both forwards and backwards. """ - def assertTableExists(self, table): - self.assertIn(table, connection.introspection.get_table_list(connection.cursor())) - - def assertTableNotExists(self, table): - self.assertNotIn(table, connection.introspection.get_table_list(connection.cursor())) - - def assertColumnExists(self, table, column): - self.assertIn(column, [c.name for c in connection.introspection.get_table_description(connection.cursor(), table)]) - - def assertColumnNotExists(self, table, column): - self.assertNotIn(column, [c.name for c in connection.introspection.get_table_description(connection.cursor(), table)]) - - def assertColumnNull(self, table, column): - self.assertEqual([c.null_ok for c in connection.introspection.get_table_description(connection.cursor(), table) if c.name == column][0], True) - - def assertColumnNotNull(self, table, column): - self.assertEqual([c.null_ok for c in connection.introspection.get_table_description(connection.cursor(), table) if c.name == column][0], False) - - def assertIndexExists(self, table, columns, value=True): - self.assertEqual( - value, - any( - c["index"] - for c in connection.introspection.get_constraints(connection.cursor(), table).values() - if c['columns'] == list(columns) - ), - ) - - def assertIndexNotExists(self, table, columns): - return self.assertIndexExists(table, columns, False) - def set_up_test_model(self, app_label): """ Creates a test model state and database table. From 06103c8ef53b7ac71def7ed34c337bb4b7dd89d9 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 25 Jul 2013 14:45:38 +0100 Subject: [PATCH 099/161] Small start to migrations documentation --- docs/index.txt | 3 + docs/ref/django-admin.txt | 86 +++++++++++++++------------- docs/topics/index.txt | 1 + docs/topics/migrations.txt | 113 +++++++++++++++++++++++++++++++++++++ 4 files changed, 164 insertions(+), 39 deletions(-) create mode 100644 docs/topics/migrations.txt diff --git a/docs/index.txt b/docs/index.txt index 7f9d1bd032..19d574cb26 100644 --- a/docs/index.txt +++ b/docs/index.txt @@ -71,6 +71,9 @@ manipulating the data of your Web application. Learn more about it below: :doc:`Instance methods ` | :doc:`Accessing related objects ` +* **Migrations:** + :doc:`Introduction to Migrations` + * **Advanced:** :doc:`Managers ` | :doc:`Raw SQL ` | diff --git a/docs/ref/django-admin.txt b/docs/ref/django-admin.txt index d16766618a..73992623aa 100644 --- a/docs/ref/django-admin.txt +++ b/docs/ref/django-admin.txt @@ -572,6 +572,48 @@ Use the ``--keep-pot`` option to prevent django from deleting the temporary .pot file it generates before creating the .po file. This is useful for debugging errors which may prevent the final language files from being created. +makemigrations [] +-------------------------- + +.. django-admin:: makemigrations + +Creates new migrations based on the changes detected to your models. +Migrations, their relationship with apps and more are covered in depth in +:doc:`the migrations documentation`. + +Providing one or more app names as arguments will limit the migrations created +to the app specified and any dependencies needed (the table at the other end +of a ForeignKey, for example) + +.. django-admin-option:: --empty + +The ``--empty`` option will cause ``makemigrations`` to output an empty +migration for the specified apps, for manual editing. This option is only +for advanced users and should not be used unless you are familiar with +the migration format, migration operations and the dependencies between +your migrations. + +migrate [ []] +------------------------------------- + +.. django-admin:: migrate + +Synchronises the database state with the current set of models and migrations. +Migrations, their relationship with apps and more are covered in depth in +:doc:`the migrations documentation`. + +The behaviour of this command changes depending on the arguments provided: + +* No arguments: All migrated apps have all of their migrations run, + and all unmigrated apps are synchronized with the database, +* ````: The specified app has its migrations run, up to the most + recent migration. This may involve running other apps' migrations too, due + to dependencies. +* `` ``: Brings the database schema to a state where it + would have just run the given migration, but no further - this may involve + unapplying migrations if you have previously migrated past the named + migration. Use the name `zero` to unapply all migrations for an app. + runfcgi [options] ----------------- @@ -1107,47 +1149,13 @@ syncdb .. django-admin:: syncdb -Creates the database tables for all apps in :setting:`INSTALLED_APPS` whose -tables have not already been created. +.. deprecated:: 1.7 -Use this command when you've added new applications to your project and want to -install them in the database. This includes any apps shipped with Django that -might be in :setting:`INSTALLED_APPS` by default. When you start a new project, -run this command to install the default apps. + This command has been deprecated in favour of the :djadmin:`migrate` + command, which performs both the old behaviour as well as executing + migrations. It is now just an alias to that command. -.. admonition:: Syncdb will not alter existing tables - - ``syncdb`` will only create tables for models which have not yet been - installed. It will *never* issue ``ALTER TABLE`` statements to match - changes made to a model class after installation. Changes to model classes - and database schemas often involve some form of ambiguity and, in those - cases, Django would have to guess at the correct changes to make. There is - a risk that critical data would be lost in the process. - - If you have made changes to a model and wish to alter the database tables - to match, use the ``sql`` command to display the new SQL structure and - compare that to your existing table schema to work out the changes. - -If you're installing the ``django.contrib.auth`` application, ``syncdb`` will -give you the option of creating a superuser immediately. - -``syncdb`` will also search for and install any fixture named ``initial_data`` -with an appropriate extension (e.g. ``json`` or ``xml``). See the -documentation for ``loaddata`` for details on the specification of fixture -data files. - -The :djadminopt:`--noinput` option may be provided to suppress all user -prompts. - -The :djadminopt:`--database` option can be used to specify the database to -synchronize. - ---no-initial-data -~~~~~~~~~~~~~~~~~ - -.. versionadded:: 1.5 - -Use ``--no-initial-data`` to avoid loading the initial_data fixture. +Alias for :djadmin:`migrate`. test ----------------------------- diff --git a/docs/topics/index.txt b/docs/topics/index.txt index f8f60b2953..b248e10268 100644 --- a/docs/topics/index.txt +++ b/docs/topics/index.txt @@ -12,6 +12,7 @@ Introductions to all the key parts of Django you'll need to know: forms/index templates class-based-views/index + migrations files testing/index auth/index diff --git a/docs/topics/migrations.txt b/docs/topics/migrations.txt new file mode 100644 index 0000000000..e24a7ce085 --- /dev/null +++ b/docs/topics/migrations.txt @@ -0,0 +1,113 @@ +========== +Migrations +========== + +.. module:: django.db.migrations + :synopsis: Schema migration support for Django models + +.. versionadded:: 1.7 + +Migrations are Django's way of propagating changes you make to your models +(adding a field, deleting a model, etc.) into your database schema. They're +designed to be mostly automatic, but you'll need to know when to make +migrations, when to run them, and the common problems you might run into. + +A Brief History +--------------- + +Prior to version 1.7, Django only supported adding new models to the +database; it was not possible to alter or remove existing models via the +``syncdb`` command (the predecessor to ``migrate``). + +Third-party tools, most notably `South `_, +provided support for these additional types of change, but it was considered +important enough that support was brought into core Django. + +Two Commands +------------ + +There are two commands which you will use to interact with migrations +and Django's handling of database schema: + +* :djadmin:`migrate`, which is responsible for applying migrations, as well as + unapplying and listing their status. + +* :djadmin:`makemigrations`, which is responsible for creating new migrations + based on the changes you have made to your models. + +It's worth noting that migrations are created and run on a per-app basis. +In particular, it's possible to have apps that *do not use migrations* (these +are referred to as "unmigrated" apps) - these apps will instead mimic the +legacy behaviour of just adding new models. + +You should think of migrations as a version control system for your database +schema. ``makemigrations`` is responsible for packaging up your model changes +into individual migration files - analagous to commits - and ``migrate`` is +responsible for applying those to your database. + +The migration files for each app live in a "migrations" directory inside +of that app, and are designed to be committed to, and distributed as part +of, its codebase. You should be making them once on your development machine +and then running the same migrations on your colleagues' machines, your +staging machines and eventually your production machines. + +Migrations will run the same way every time and produce consistent results, +meaning that what you see in development and staging is exactly what will +happen in production - no unexpected surprises. + +Backend Support +--------------- + +Migrations are supported on all backends that Django ships with, as well +as any third-party backends if they have programmed in support for schema +alteration (done via the SchemaEditor class). + +However, some databases are more capable than others when it comes to +schema migrations; some of the caveats are covered below. + +PostgreSQL +~~~~~~~~~~ + +PostgreSQL is the most capable of all the databases here in terms of schema +support; the only caveat is that adding columns with default values will +lock a table for a time proportional to the number of rows in it. + +For this reason, it's recommended you always create new columns with +``null=True``, as this way they will be added immediately. + +MySQL +~~~~~ + +MySQL lacks support for transactions around schema alteration operations, +meaning that if a migration fails to apply you will have to manually unpick +the changes in order to try again (it's impossible to roll back to an +earlier point). + +In addition, MySQL will lock tables for almost every schema operation and +generally takes a time proportional to the number of rows in the table to +add or remove columns. On slower hardware this can be worse than a minute +per million rows - adding a few columns to a table with just a few million +rows could lock your site up for over ten minutes. + +Finally, MySQL has reasonably small limits on name lengths for columns, tables +and indexes, as well as a limit on the combined size of all columns an index +covers. This means that indexes that are possible on other backends will +fail to be created under MySQL. + +SQLite +~~~~~~ + +SQLite has very little built-in schema alteration support, and so Django +attempts to emulate it by: + +* Creating a new table with the new schema +* Copying the data across +* Dropping the old table +* Renaming the new table to match the original name + +This process generally works well, but it can be slow and occasionally +buggy. It is not recommended that you run and migrate SQLite in a +production environment unless you are very aware of the risks and +its limitations; the support Django ships with is designed to allow +developers to use SQLite on their local machines to develop less complex +Django projects without the need for a full database. From f8297f63233ccf78f923a597ed7d8327f90230c2 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 25 Jul 2013 16:19:36 +0100 Subject: [PATCH 100/161] More migration docs, and conversion of all easy syncdb references --- docs/howto/legacy-databases.txt | 4 +- .../contributing/writing-documentation.txt | 4 +- docs/intro/overview.txt | 7 +- docs/intro/reusable-apps.txt | 2 +- docs/man/django-admin.1 | 9 +- docs/ref/contrib/comments/index.txt | 2 +- docs/ref/contrib/contenttypes.txt | 2 +- docs/ref/contrib/flatpages.txt | 4 +- docs/ref/contrib/index.txt | 2 +- docs/ref/contrib/redirects.txt | 4 +- docs/ref/contrib/sites.txt | 2 +- docs/ref/databases.txt | 6 +- docs/ref/django-admin.txt | 16 ++- docs/ref/models/options.txt | 12 +- docs/topics/auth/customizing.txt | 14 +-- docs/topics/auth/default.txt | 10 +- docs/topics/auth/index.txt | 2 +- docs/topics/db/models.txt | 7 +- docs/topics/http/sessions.txt | 2 +- docs/topics/install.txt | 6 +- docs/topics/migrations.txt | 112 +++++++++++++++++- docs/topics/serialization.txt | 2 +- docs/topics/testing/advanced.txt | 4 +- docs/topics/testing/overview.txt | 4 +- 24 files changed, 172 insertions(+), 67 deletions(-) diff --git a/docs/howto/legacy-databases.txt b/docs/howto/legacy-databases.txt index 0bea8b41c4..1cf8329e79 100644 --- a/docs/howto/legacy-databases.txt +++ b/docs/howto/legacy-databases.txt @@ -81,10 +81,10 @@ access to your precious data on a model by model basis. Install the core Django tables ============================== -Next, run the :djadmin:`syncdb` command to install any extra needed database +Next, run the :djadmin:`migrate` command to install any extra needed database records such as admin permissions and content types:: - python manage.py syncdb + python manage.py migrate Test and tweak ============== diff --git a/docs/internals/contributing/writing-documentation.txt b/docs/internals/contributing/writing-documentation.txt index 2944dea504..d2cfaddc89 100644 --- a/docs/internals/contributing/writing-documentation.txt +++ b/docs/internals/contributing/writing-documentation.txt @@ -165,9 +165,9 @@ __ http://sphinx.pocoo.org/markup/desc.html * ``django-admin`` commands:: - .. django-admin:: syncdb + .. django-admin:: migrate - To link, use ``:djadmin:`syncdb```. + To link, use ``:djadmin:`migrate```. * ``django-admin`` command-line options:: diff --git a/docs/intro/overview.txt b/docs/intro/overview.txt index 55366fb2c6..415e831faf 100644 --- a/docs/intro/overview.txt +++ b/docs/intro/overview.txt @@ -53,10 +53,11 @@ automatically: .. code-block:: bash - manage.py syncdb + manage.py migrate -The :djadmin:`syncdb` command looks at all your available models and creates -tables in your database for whichever tables don't already exist. +The :djadmin:`migrate` command looks at all your available models and creates +tables in your database for whichever tables don't already exist, as well as +optionally providing :doc:`much richer schema control `. Enjoy the free API ================== diff --git a/docs/intro/reusable-apps.txt b/docs/intro/reusable-apps.txt index 7fa1ffc8d9..51c1228cc1 100644 --- a/docs/intro/reusable-apps.txt +++ b/docs/intro/reusable-apps.txt @@ -155,7 +155,7 @@ this. For a small app like polls, this process isn't too difficult. url(r'^polls/', include('polls.urls')), - 3. Run `python manage.py syncdb` to create the polls models. + 3. Run `python manage.py migrate` to create the polls models. 4. Start the development server and visit http://127.0.0.1:8000/admin/ to create a poll (you'll need the Admin app enabled). diff --git a/docs/man/django-admin.1 b/docs/man/django-admin.1 index 4d937b488b..f1b568daf5 100644 --- a/docs/man/django-admin.1 +++ b/docs/man/django-admin.1 @@ -45,8 +45,7 @@ Outputs to standard output all data in the database associated with the named application(s). .TP .BI flush -Returns the database to the state it was in immediately after syncdb was -executed. +Removes all data from the database and then re-installs any initial data. .TP .B inspectdb Introspects the database tables in the database specified in settings.py and outputs a Django @@ -114,9 +113,9 @@ the current directory or the optional destination. Creates a Django project directory structure for the given project name in the current directory or the optional destination. .TP -.BI syncdb -Creates the database tables for all apps in INSTALLED_APPS whose tables -haven't already been created. +.BI migrate +Runs migrations for apps containing migrations, and just creates missing tables +for apps without migrations. .TP .BI "test [" "\-\-verbosity" "] [" "\-\-failfast" "] [" "appname ..." "]" Runs the test suite for the specified applications, or the entire project if diff --git a/docs/ref/contrib/comments/index.txt b/docs/ref/contrib/comments/index.txt index 6db69d8168..c08ac21d4e 100644 --- a/docs/ref/contrib/comments/index.txt +++ b/docs/ref/contrib/comments/index.txt @@ -31,7 +31,7 @@ To get started using the ``comments`` app, follow these steps: #. Install the comments framework by adding ``'django.contrib.comments'`` to :setting:`INSTALLED_APPS`. -#. Run ``manage.py syncdb`` so that Django will create the comment tables. +#. Run ``manage.py migrate`` so that Django will create the comment tables. #. Add the comment app's URLs to your project's ``urls.py``: diff --git a/docs/ref/contrib/contenttypes.txt b/docs/ref/contrib/contenttypes.txt index fcd66a5b03..89c4a88e00 100644 --- a/docs/ref/contrib/contenttypes.txt +++ b/docs/ref/contrib/contenttypes.txt @@ -86,7 +86,7 @@ The ``ContentType`` model Let's look at an example to see how this works. If you already have the :mod:`~django.contrib.contenttypes` application installed, and then add :mod:`the sites application ` to your -:setting:`INSTALLED_APPS` setting and run ``manage.py syncdb`` to install it, +:setting:`INSTALLED_APPS` setting and run ``manage.py migrate`` to install it, the model :class:`django.contrib.sites.models.Site` will be installed into your database. Along with it a new instance of :class:`~django.contrib.contenttypes.models.ContentType` will be diff --git a/docs/ref/contrib/flatpages.txt b/docs/ref/contrib/flatpages.txt index 11d74d75c3..be9fe0c636 100644 --- a/docs/ref/contrib/flatpages.txt +++ b/docs/ref/contrib/flatpages.txt @@ -55,14 +55,14 @@ or: 3. Add ``'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware'`` to your :setting:`MIDDLEWARE_CLASSES` setting. -4. Run the command :djadmin:`manage.py syncdb `. +4. Run the command :djadmin:`manage.py migrate `. .. currentmodule:: django.contrib.flatpages.middleware How it works ============ -``manage.py syncdb`` creates two tables in your database: ``django_flatpage`` +``manage.py migrate`` creates two tables in your database: ``django_flatpage`` and ``django_flatpage_sites``. ``django_flatpage`` is a simple lookup table that simply maps a URL to a title and bunch of text content. ``django_flatpage_sites`` associates a flatpage with a site. diff --git a/docs/ref/contrib/index.txt b/docs/ref/contrib/index.txt index e5cea01ead..727fab01dc 100644 --- a/docs/ref/contrib/index.txt +++ b/docs/ref/contrib/index.txt @@ -15,7 +15,7 @@ those packages have. For most of these add-ons -- specifically, the add-ons that include either models or template tags -- you'll need to add the package name (e.g., ``'django.contrib.admin'``) to your :setting:`INSTALLED_APPS` setting and - re-run ``manage.py syncdb``. + re-run ``manage.py migrate``. .. _"batteries included" philosophy: http://docs.python.org/tutorial/stdlib.html#batteries-included diff --git a/docs/ref/contrib/redirects.txt b/docs/ref/contrib/redirects.txt index 0c0cb2a3c2..eefbb96721 100644 --- a/docs/ref/contrib/redirects.txt +++ b/docs/ref/contrib/redirects.txt @@ -18,12 +18,12 @@ To install the redirects app, follow these steps: 2. Add ``'django.contrib.redirects'`` to your :setting:`INSTALLED_APPS` setting. 3. Add ``'django.contrib.redirects.middleware.RedirectFallbackMiddleware'`` to your :setting:`MIDDLEWARE_CLASSES` setting. -4. Run the command :djadmin:`manage.py syncdb `. +4. Run the command :djadmin:`manage.py migrate `. How it works ============ -``manage.py syncdb`` creates a ``django_redirect`` table in your database. This +``manage.py migrate`` creates a ``django_redirect`` table in your database. This is a simple lookup table with ``site_id``, ``old_path`` and ``new_path`` fields. The ``RedirectFallbackMiddleware`` does all of the work. Each time any Django diff --git a/docs/ref/contrib/sites.txt b/docs/ref/contrib/sites.txt index 65838dfa3e..48f781310c 100644 --- a/docs/ref/contrib/sites.txt +++ b/docs/ref/contrib/sites.txt @@ -264,7 +264,7 @@ To enable the sites framework, follow these steps: SITE_ID = 1 -3. Run :djadmin:`syncdb`. +3. Run :djadmin:`migrate`. ``django.contrib.sites`` registers a :data:`~django.db.models.signals.post_syncdb` signal handler which creates a diff --git a/docs/ref/databases.txt b/docs/ref/databases.txt index 29f2f3972d..60153eb735 100644 --- a/docs/ref/databases.txt +++ b/docs/ref/databases.txt @@ -224,7 +224,7 @@ If you upgrade an existing project to MySQL 5.5.5 and subsequently add some tables, ensure that your tables are using the same storage engine (i.e. MyISAM vs. InnoDB). Specifically, if tables that have a ``ForeignKey`` between them use different storage engines, you may see an error like the following when -running ``syncdb``:: +running ``migrate``:: _mysql_exceptions.OperationalError: ( 1005, "Can't create table '\\db_name\\.#sql-4a8_ab' (errno: 150)" @@ -663,7 +663,7 @@ required. .. _`Oracle Database Server`: http://www.oracle.com/ .. _`cx_Oracle`: http://cx-oracle.sourceforge.net/ -In order for the ``python manage.py syncdb`` command to work, your Oracle +In order for the ``python manage.py migrate`` command to work, your Oracle database user must have privileges to run the following commands: * CREATE TABLE @@ -752,7 +752,7 @@ Oracle imposes a name length limit of 30 characters. To accommodate this, the backend truncates database identifiers to fit, replacing the final four characters of the truncated name with a repeatable MD5 hash value. -When running syncdb, an ``ORA-06552`` error may be encountered if +When running ``migrate``, an ``ORA-06552`` error may be encountered if certain Oracle keywords are used as the name of a model field or the value of a ``db_column`` option. Django quotes all identifiers used in queries to prevent most such problems, but this error can still diff --git a/docs/ref/django-admin.txt b/docs/ref/django-admin.txt index 73992623aa..05fc8a1191 100644 --- a/docs/ref/django-admin.txt +++ b/docs/ref/django-admin.txt @@ -242,10 +242,8 @@ flush .. django-admin:: flush -Returns the database to the state it was in immediately after :djadmin:`syncdb` -was executed. This means that all data will be removed from the database, any -post-synchronization handlers will be re-executed, and the ``initial_data`` -fixture will be re-installed. +Removes all data from the database, re-executes any post-synchronization +handlers, and reinstalls any initial data fixtures. The :djadminopt:`--noinput` option may be provided to suppress all user prompts. @@ -1293,7 +1291,7 @@ This command is only available if Django's :doc:`authentication system Creates a superuser account (a user who has all permissions). This is useful if you need to create an initial superuser account but did not -do so during ``syncdb``, or if you need to programmatically generate +do so during the first ``migrate``, or if you need to programmatically generate superuser accounts for your site(s). When run interactively, this command will prompt for a password for @@ -1379,7 +1377,7 @@ allows for the following options: Example usage:: - django-admin.py syncdb --pythonpath='/home/djangoprojects/myproject' + django-admin.py migrate --pythonpath='/home/djangoprojects/myproject' Adds the given filesystem path to the Python `import search path`_. If this isn't provided, ``django-admin.py`` will use the ``PYTHONPATH`` environment @@ -1394,7 +1392,7 @@ setting the Python path for you. Example usage:: - django-admin.py syncdb --settings=mysite.settings + django-admin.py migrate --settings=mysite.settings Explicitly specifies the settings module to use. The settings module should be in Python package syntax, e.g. ``mysite.settings``. If this isn't provided, @@ -1408,7 +1406,7 @@ Note that this option is unnecessary in ``manage.py``, because it uses Example usage:: - django-admin.py syncdb --traceback + django-admin.py migrate --traceback By default, ``django-admin.py`` will show a simple error message whenever an :class:`~django.core.management.CommandError` occurs, but a full stack trace @@ -1424,7 +1422,7 @@ will also output a full stack trace when a ``CommandError`` is raised. Example usage:: - django-admin.py syncdb --verbosity 2 + django-admin.py migrate --verbosity 2 Use ``--verbosity`` to specify the amount of notification and debug information that ``django-admin.py`` should print to the console. diff --git a/docs/ref/models/options.txt b/docs/ref/models/options.txt index d54af37e86..6eeed51b6f 100644 --- a/docs/ref/models/options.txt +++ b/docs/ref/models/options.txt @@ -106,9 +106,9 @@ Django quotes column and table names behind the scenes. .. attribute:: Options.managed Defaults to ``True``, meaning Django will create the appropriate database - tables in :djadmin:`syncdb` and remove them as part of a :djadmin:`flush` - management command. That is, Django *manages* the database tables' - lifecycles. + tables in :djadmin:`migrate` or as part of migrations and remove them as + part of a :djadmin:`flush` management command. That is, Django + *manages* the database tables' lifecycles. If ``False``, no database table creation or deletion operations will be performed for this model. This is useful if the model represents an existing @@ -192,9 +192,9 @@ Django quotes column and table names behind the scenes. .. admonition:: Changing order_with_respect_to ``order_with_respect_to`` adds an additional field/database column - named ``_order``, so be sure to handle that as you would any other - change to your models if you add or change ``order_with_respect_to`` - after your initial :djadmin:`syncdb`. + named ``_order``, so be sure to make and apply the appropriate + migrations if you add or change ``order_with_respect_to`` + after your initial :djadmin:`migrate`. ``ordering`` ------------ diff --git a/docs/topics/auth/customizing.txt b/docs/topics/auth/customizing.txt index e27f257f80..e92e775bda 100644 --- a/docs/topics/auth/customizing.txt +++ b/docs/topics/auth/customizing.txt @@ -275,7 +275,7 @@ can or cannot do with Task instances, specific to your application:: ) The only thing this does is create those extra permissions when you run -:djadmin:`manage.py syncdb `. Your code is in charge of checking the +:djadmin:`manage.py migrate `. Your code is in charge of checking the value of these permissions when an user is trying to access the functionality provided by the application (viewing tasks, changing the status of tasks, closing tasks.) Continuing the above example, the following checks if a user may @@ -380,14 +380,12 @@ use as your User model. Changing :setting:`AUTH_USER_MODEL` has a big effect on your database structure. It changes the tables that are available, and it will affect the construction of foreign keys and many-to-many relationships. If you intend - to set :setting:`AUTH_USER_MODEL`, you should set it before running - ``manage.py syncdb`` for the first time. + to set :setting:`AUTH_USER_MODEL`, you should set it before creating + any migrations or running ``manage.py migrate`` for the first time. - If you have an existing project and you want to migrate to using a custom - User model, you may need to look into using a migration tool like South_ - to ease the transition. - -.. _South: http://south.aeracode.org + Changing this setting after you have tables created is not supported + by :djadmin:`makemigrations` and will result in you having to manually + write a set of migrations to fix your schema. Referencing the User model -------------------------- diff --git a/docs/topics/auth/default.txt b/docs/topics/auth/default.txt index b7f679bf28..cfa3ec3cad 100644 --- a/docs/topics/auth/default.txt +++ b/docs/topics/auth/default.txt @@ -65,7 +65,7 @@ interactively `. Creating superusers ------------------- -:djadmin:`manage.py syncdb ` prompts you to create a superuser the +:djadmin:`manage.py migrate ` prompts you to create a superuser the first time you run it with ``'django.contrib.auth'`` in your :setting:`INSTALLED_APPS`. If you need to create a superuser at a later date, you can use a command line utility:: @@ -190,13 +190,13 @@ setting, it will ensure that three default permissions -- add, change and delete -- are created for each Django model defined in one of your installed applications. -These permissions will be created when you run :djadmin:`manage.py syncdb -`; the first time you run ``syncdb`` after adding +These permissions will be created when you run :djadmin:`manage.py migrate +`; the first time you run ``migrate`` after adding ``django.contrib.auth`` to :setting:`INSTALLED_APPS`, the default permissions will be created for all previously-installed models, as well as for any new models being installed at that time. Afterward, it will create default -permissions for new models each time you run :djadmin:`manage.py syncdb -`. +permissions for new models each time you run :djadmin:`manage.py migrate +`. Assuming you have an application with an :attr:`~django.db.models.Options.app_label` ``foo`` and a model named ``Bar``, diff --git a/docs/topics/auth/index.txt b/docs/topics/auth/index.txt index 8447d449ce..81b6996d00 100644 --- a/docs/topics/auth/index.txt +++ b/docs/topics/auth/index.txt @@ -67,7 +67,7 @@ and two items in your :setting:`MIDDLEWARE_CLASSES` setting: 2. :class:`~django.contrib.auth.middleware.AuthenticationMiddleware` associates users with requests using sessions. -With these settings in place, running the command ``manage.py syncdb`` creates +With these settings in place, running the command ``manage.py migrate`` creates the necessary database tables for auth related models, creates permissions for any models defined in your installed apps, and prompts you to create a superuser account the first time you run it. diff --git a/docs/topics/db/models.txt b/docs/topics/db/models.txt index 2b565758e7..b0011e1098 100644 --- a/docs/topics/db/models.txt +++ b/docs/topics/db/models.txt @@ -77,7 +77,8 @@ application by the :djadmin:`manage.py startapp ` script), ) When you add new apps to :setting:`INSTALLED_APPS`, be sure to run -:djadmin:`manage.py syncdb `. +:djadmin:`manage.py migrate `, optionally making migrations +for them first with :djadmin:`manage.py makemigrations `. Fields ====== @@ -956,7 +957,7 @@ The reverse name of the ``common.ChildA.m2m`` field will be reverse name of the ``rare.ChildB.m2m`` field will be ``rare_childb_related``. It is up to you how you use the ``'%(class)s'`` and ``'%(app_label)s`` portion to construct your related name, but if you forget to use it, Django will raise -errors when you validate your models (or run :djadmin:`syncdb`). +errors when you validate your models (or run :djadmin:`migrate`). If you don't specify a :attr:`~django.db.models.ForeignKey.related_name` attribute for a field in an abstract base class, the default reverse name will @@ -1049,7 +1050,7 @@ are putting those types of relations on a subclass of another model, you **must** specify the :attr:`~django.db.models.ForeignKey.related_name` attribute on each such field. If you forget, Django will raise an error when you run -:djadmin:`validate` or :djadmin:`syncdb`. +:djadmin:`validate` or :djadmin:`migrate`. For example, using the above ``Place`` class again, let's create another subclass with a :class:`~django.db.models.ManyToManyField`:: diff --git a/docs/topics/http/sessions.txt b/docs/topics/http/sessions.txt index 772ee122d5..0b818c4f5d 100644 --- a/docs/topics/http/sessions.txt +++ b/docs/topics/http/sessions.txt @@ -44,7 +44,7 @@ Using database-backed sessions If you want to use a database-backed session, you need to add ``'django.contrib.sessions'`` to your :setting:`INSTALLED_APPS` setting. -Once you have configured your installation, run ``manage.py syncdb`` +Once you have configured your installation, run ``manage.py migrate`` to install the single database table that stores session data. .. _cached-sessions-backend: diff --git a/docs/topics/install.txt b/docs/topics/install.txt index 9cf02d96de..5d88ed4f82 100644 --- a/docs/topics/install.txt +++ b/docs/topics/install.txt @@ -122,14 +122,12 @@ database bindings are installed. * If you're using an unofficial 3rd party backend, please consult the documentation provided for any additional requirements. -If you plan to use Django's ``manage.py syncdb`` command to automatically +If you plan to use Django's ``manage.py migrate`` command to automatically create database tables for your models (after first installing Django and creating a project), you'll need to ensure that Django has permission to create and alter tables in the database you're using; if you plan to manually create the tables, you can simply grant Django ``SELECT``, ``INSERT``, ``UPDATE`` and -``DELETE`` permissions. On some databases, Django will need ``ALTER TABLE`` -privileges during ``syncdb`` but won't issue ``ALTER TABLE`` statements on a -table once ``syncdb`` has created it. After creating a database user with these +``DELETE`` permissions. After creating a database user with these permissions, you'll specify the details in your project's settings file, see :setting:`DATABASES` for details. diff --git a/docs/topics/migrations.txt b/docs/topics/migrations.txt index e24a7ce085..60f83e497c 100644 --- a/docs/topics/migrations.txt +++ b/docs/topics/migrations.txt @@ -31,7 +31,7 @@ and Django's handling of database schema: * :djadmin:`migrate`, which is responsible for applying migrations, as well as unapplying and listing their status. - + * :djadmin:`makemigrations`, which is responsible for creating new migrations based on the changes you have made to your models. @@ -111,3 +111,113 @@ production environment unless you are very aware of the risks and its limitations; the support Django ships with is designed to allow developers to use SQLite on their local machines to develop less complex Django projects without the need for a full database. + +Workflow +-------- + +Working with migrations is simple. Make changes to your models - say, add +a field and remove a model - and then run :djadmin:`makemigrations`:: + + $ python manage.py makemigrations + Migrations for 'books': + 0003_auto.py: + - Alter field author on book + +Your models will be scanned and compared to the versions currently +contained in your migration files, and then a new set of migrations +will be written out. Make sure to read the output to see what +``makemigrations`` thinks you have changed - it's not perfect, and for +complex changes it might not be detecting what you expect. + +Once you have your new migration files, you should apply them to your +database to make sure they work as expected:: + + $ python manage.py migrate + Operations to perform: + Synchronize unmigrated apps: sessions, admin, messages, auth, staticfiles, contenttypes + Apply all migrations: books + Synchronizing apps without migrations: + Creating tables... + Installing custom SQL... + Installing indexes... + Installed 0 object(s) from 0 fixture(s) + Running migrations: + Applying books.0003_auto... OK + +The command runs in two stages; first, it synchronizes unmigrated apps +(performing the same functionality that ``syncdb`` used to provide), and +then it runs any migrations that have not yet been applied. + +Once the migration is applied, commit the migration and the models change +to your version control system as a single commit - that way, when other +developers (or your production servers) check out the code, they'll +get both the changes to your models and the accompanying migration at the +same time. + +Dependencies +------------ + +While migrations are per-app, the tables and relationships implied by +your models are too complex to be created for just one app at a time. When +you make a migration that requires something else to run - for example, +you add a ForeignKey in your ``books`` app to your ``authors`` app - the +resulting migration will contain a dependency on a migration in ``authors``. + +This means that when you run the migrations, the ``authors`` migration runs +first and creates the table the ForeignKey references, and then the migration +that makes the ForeignKey column runs afterwards and creates the constraint. +If this didn't happen, the migration would try to create the ForeignKey column +without the table it's referencing existing and your database would +throw an error. + +This dependency behaviour affects most migration operations where you +restrict to a single app. Restricting to a single app (either in +``makemigrations`` or ``migrate``) is a best-efforts promise, and not +a guarantee; any other apps that need to be used to get dependencies correct +will be. + +Migration files +--------------- + +Migrations are stored as an on-disk format, referred to here as +"migration files". These files are actually just normal Python files with +an agreed-upon object layout, written in a declarative style. + +A basic migration file looks like this:: + + from django.db import migrations, models + + class Migration(migrations.Migration): + + dependencies = [("migrations", "0001_initial")] + + operations = [ + migrations.DeleteModel("Tribble"), + migrations.AddField("Author", "rating", models.IntegerField(default=0)), + ] + +What Django looks for when it loads a migration file (as a Python module) is +a subclass of ``django.db.migrations.Migration`` called ``Migration``. It then +inspects this object for four attributes, only two of which are used +most of the time: + +* ``dependencies``, a list of migrations this one depends on. +* ``operations``, a list of Operation classes that define what this migration + does. + +The operations are the key; they are a set of declarative instructions which +tell Django what schema changes need to be made. Django scans them and +builds an in-memory representation of all of the schema changes to all apps, +and uses this to generate the SQL which makes the schema changes. + +That in-memory structure is also used to work out what the differences are +between your models and the current state of your migrations; Django runs +through all the changes, in order, on an in-memory set of models to come +up with the state of your models last time you ran ``makemigrations``. It +then uses these models to compare against the ones in your ``models.py`` files +to work out what you have changed. + +You should rarely, if ever, need to edit migration files by hand, but +it's entirely possible to write them manually if you need to. Some of the +more complex operations are not autodetectable and are only available via +a hand-written migration, so don't be scared about editing them if you have to. diff --git a/docs/topics/serialization.txt b/docs/topics/serialization.txt index e88e16029e..d3fda16479 100644 --- a/docs/topics/serialization.txt +++ b/docs/topics/serialization.txt @@ -296,7 +296,7 @@ serialize an object that refers to a content type, then you need to have a way to refer to that content type to begin with. Since ``ContentType`` objects are automatically created by Django during the database synchronization process, the primary key of a given content type isn't easy to predict; it will -depend on how and when :djadmin:`syncdb` was executed. This is true for all +depend on how and when :djadmin:`migrate` was executed. This is true for all models which automatically generate objects, notably including :class:`~django.contrib.auth.models.Permission`, :class:`~django.contrib.auth.models.Group`, and diff --git a/docs/topics/testing/advanced.txt b/docs/topics/testing/advanced.txt index 2417274ab5..4d7f22aaa2 100644 --- a/docs/topics/testing/advanced.txt +++ b/docs/topics/testing/advanced.txt @@ -278,7 +278,7 @@ testing behavior. This behavior involves: #. Creating the test databases. -#. Running ``syncdb`` to install models and initial data into the test +#. Running ``migrate`` to install models and initial data into the test databases. #. Running the tests that were found. @@ -469,7 +469,7 @@ can be useful during testing. .. function:: create_test_db([verbosity=1, autoclobber=False]) - Creates a new test database and runs ``syncdb`` against it. + Creates a new test database and runs ``migrate`` against it. ``verbosity`` has the same behavior as in ``run_tests()``. diff --git a/docs/topics/testing/overview.txt b/docs/topics/testing/overview.txt index 0380d6931b..dbeaf0fb00 100644 --- a/docs/topics/testing/overview.txt +++ b/docs/topics/testing/overview.txt @@ -1215,9 +1215,9 @@ documentation` for more details. .. note:: - If you've ever run :djadmin:`manage.py syncdb`, you've + If you've ever run :djadmin:`manage.py migrate`, you've already used a fixture without even knowing it! When you call - :djadmin:`syncdb` in the database for the first time, Django + :djadmin:`migrate` in the database for the first time, Django installs a fixture called ``initial_data``. This gives you a way of populating a new database with any initial data, such as a default set of categories. From a758c9c1866559ab5dbf4a7705c07e8532dcd253 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Thu, 25 Jul 2013 16:31:34 +0100 Subject: [PATCH 101/161] Add test for creating M2Ms --- django/db/migrations/operations/models.py | 10 +++--- tests/migrations/test_autodetector.py | 4 +-- tests/migrations/test_operations.py | 37 +++++++++++++++++++---- 3 files changed, 38 insertions(+), 13 deletions(-) diff --git a/django/db/migrations/operations/models.py b/django/db/migrations/operations/models.py index 0c9e69e127..bf15201194 100644 --- a/django/db/migrations/operations/models.py +++ b/django/db/migrations/operations/models.py @@ -9,7 +9,7 @@ class CreateModel(Operation): """ def __init__(self, name, fields, options=None, bases=None): - self.name = name.lower() + self.name = name self.fields = fields self.options = options or {} self.bases = bases or (models.Model,) @@ -35,7 +35,7 @@ class DeleteModel(Operation): """ def __init__(self, name): - self.name = name.lower() + self.name = name def state_forwards(self, app_label, state): del state.models[app_label, self.name.lower()] @@ -58,7 +58,7 @@ class AlterModelTable(Operation): """ def __init__(self, name, table): - self.name = name.lower() + self.name = name self.table = table def state_forwards(self, app_label, state): @@ -87,7 +87,7 @@ class AlterUniqueTogether(Operation): """ def __init__(self, name, unique_together): - self.name = name.lower() + self.name = name self.unique_together = set(tuple(cons) for cons in unique_together) def state_forwards(self, app_label, state): @@ -117,7 +117,7 @@ class AlterIndexTogether(Operation): """ def __init__(self, name, index_together): - self.name = name.lower() + self.name = name self.index_together = set(tuple(cons) for cons in index_together) def state_forwards(self, app_label, state): diff --git a/tests/migrations/test_autodetector.py b/tests/migrations/test_autodetector.py index 7bed4eb57e..9b7fbd5e8a 100644 --- a/tests/migrations/test_autodetector.py +++ b/tests/migrations/test_autodetector.py @@ -85,7 +85,7 @@ class AutodetectorTests(TestCase): # Right action? action = migration.operations[0] self.assertEqual(action.__class__.__name__, "CreateModel") - self.assertEqual(action.name, "author") + self.assertEqual(action.name, "Author") def test_old_model(self): "Tests deletion of old models" @@ -102,7 +102,7 @@ class AutodetectorTests(TestCase): # Right action? action = migration.operations[0] self.assertEqual(action.__class__.__name__, "DeleteModel") - self.assertEqual(action.name, "author") + self.assertEqual(action.name, "Author") def test_add_field(self): "Tests autodetection of new fields" diff --git a/tests/migrations/test_operations.py b/tests/migrations/test_operations.py index c74d40e4f2..ad909f7fdd 100644 --- a/tests/migrations/test_operations.py +++ b/tests/migrations/test_operations.py @@ -12,24 +12,28 @@ class OperationTests(MigrationTestBase): both forwards and backwards. """ - def set_up_test_model(self, app_label): + def set_up_test_model(self, app_label, second_model=False): """ Creates a test model state and database table. """ # Make the "current" state - creation = migrations.CreateModel( + operations = [migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=3)), ("weight", models.FloatField()), ], - ) + )] + if second_model: + operations.append(migrations.CreateModel("Stable", [("id", models.AutoField(primary_key=True))])) project_state = ProjectState() - creation.state_forwards(app_label, project_state) + for operation in operations: + operation.state_forwards(app_label, project_state) # Set up the database with connection.schema_editor() as editor: - creation.database_forwards(app_label, editor, ProjectState(), project_state) + for operation in operations: + operation.database_forwards(app_label, editor, ProjectState(), project_state) return project_state def test_create_model(self): @@ -48,7 +52,7 @@ class OperationTests(MigrationTestBase): project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crmo", new_state) - self.assertEqual(new_state.models["test_crmo", "pony"].name, "pony") + self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony") self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crmo_pony") @@ -106,6 +110,27 @@ class OperationTests(MigrationTestBase): operation.database_backwards("test_adfl", editor, new_state, project_state) self.assertColumnNotExists("test_adfl_pony", "height") + def test_add_field_m2m(self): + """ + Tests the AddField operation with a ManyToManyField. + """ + project_state = self.set_up_test_model("test_adflmm", second_model=True) + # Test the state alteration + operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable")) + new_state = project_state.clone() + operation.state_forwards("test_adflmm", new_state) + self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 4) + # Test the database alteration + self.assertTableNotExists("test_adflmm_pony_stables") + with connection.schema_editor() as editor: + operation.database_forwards("test_adflmm", editor, project_state, new_state) + self.assertTableExists("test_adflmm_pony_stables") + self.assertColumnNotExists("test_adflmm_pony", "stables") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_adflmm", editor, new_state, project_state) + self.assertTableNotExists("test_adflmm_pony_stables") + def test_remove_field(self): """ Tests the RemoveField operation. From 88e1e6f9f3fc6eb9f148805b6df6ec8af4cf6977 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 26 Jul 2013 16:28:09 +0100 Subject: [PATCH 102/161] A bit more documentation --- docs/topics/migrations.txt | 51 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/docs/topics/migrations.txt b/docs/topics/migrations.txt index 60f83e497c..f80cbf81fd 100644 --- a/docs/topics/migrations.txt +++ b/docs/topics/migrations.txt @@ -154,6 +154,26 @@ developers (or your production servers) check out the code, they'll get both the changes to your models and the accompanying migration at the same time. +Version control +~~~~~~~~~~~~~~~ + +Because migrations are stored in version control, you'll occasionally +come across situations where you and another developer have both committed +a migration to the same app at the same time, resulting in two migrations +with the same number. + +Don't worry - the numbers are just there for developers' reference, Django +just cares that each migration has a different name. Migrations specify which +other migrations they depend on - including earlier migrations in the same +app - in the file, so it's possible to detect when there's two new migrations +for the same app that aren't ordered. + +When this happens, Django will prompt you and give you some options. If it +thinks it's safe enough, it will offer to automatically linearise the two +migrations for you. If not, you'll have to go in and modify the migrations +yourself - don't worry, this isn't difficult, and is explained more in +:ref:`migration-files` below. + Dependencies ------------ @@ -176,6 +196,8 @@ restrict to a single app. Restricting to a single app (either in a guarantee; any other apps that need to be used to get dependencies correct will be. +.. migration-files: + Migration files --------------- @@ -221,3 +243,32 @@ You should rarely, if ever, need to edit migration files by hand, but it's entirely possible to write them manually if you need to. Some of the more complex operations are not autodetectable and are only available via a hand-written migration, so don't be scared about editing them if you have to. + +Adding migrations to apps +------------------------- + +Adding migrations to new apps is straightforward - they come preconfigured to +accept migrations, and so just run :djadmin:`makemigrations` once you've made +some changes. + +If your app already has models and database tables, and doesn't have migrations +yet (for example, you created it against a previous Django version), you'll +need to convert it to use migrations; this is a simple process:: + + python manage.py makemigrations --force yourappname + +This will make a new initial migration for your app (the ``--force`` argument +is to override Django's default behaviour, as it thinks your app does not want +migrations). Now, when you run :djadmin:`migrate`, Django will detect that +you have an initial migration *and* that the tables it wants to create already +exist, and will mark the migration as already applied. + +Note that this only works given two things: + +* You have not changed your models since you made their tables. For migrations + to work, you must make the initial migration *first* and then make changes, + as Django compares changes against migration files, not the database. + +* You have not manually edited your database - Django won't be able to detect + that your database doesn't match your models, you'll just get errors when + migrations try and modify those tables. From 6b39010d5793689af2d3e5d9a2f9e92d99d0f105 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 26 Jul 2013 16:33:32 +0100 Subject: [PATCH 103/161] Remove nasty error message checking hack --- django/core/management/commands/migrate.py | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/django/core/management/commands/migrate.py b/django/core/management/commands/migrate.py index 8e3c79a431..d9fbb846ac 100644 --- a/django/core/management/commands/migrate.py +++ b/django/core/management/commands/migrate.py @@ -12,6 +12,7 @@ from django.db.migrations.executor import MigrationExecutor from django.db.migrations.loader import AmbiguityError from django.utils.datastructures import SortedDict from django.utils.importlib import import_module +from django.utils.module_loading import module_has_submodule, import_by_path class Command(BaseCommand): @@ -40,21 +41,8 @@ class Command(BaseCommand): # Import the 'management' module within each installed app, to register # dispatcher events. for app_name in settings.INSTALLED_APPS: - try: + if module_has_submodule(import_module(app_name), "management"): import_module('.management', app_name) - except ImportError as exc: - # This is slightly hackish. We want to ignore ImportErrors - # if the "management" module itself is missing -- but we don't - # want to ignore the exception if the management module exists - # but raises an ImportError for some reason. The only way we - # can do this is to check the text of the exception. Note that - # we're a bit broad in how we check the text, because different - # Python implementations may not use the same text. - # CPython uses the text "No module named management" - # PyPy uses "No module named myproject.myapp.management" - msg = exc.args[0] - if not msg.startswith('No module named') or 'management' not in msg: - raise # Get the database we're operating from db = options.get('database') From c1ed21fa9ee5f64bcf6f70fa30580989bd579a91 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 26 Jul 2013 16:39:44 +0100 Subject: [PATCH 104/161] Use new transaction API in syncdb section of migrate --- django/core/management/commands/migrate.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/django/core/management/commands/migrate.py b/django/core/management/commands/migrate.py index d9fbb846ac..882ebde073 100644 --- a/django/core/management/commands/migrate.py +++ b/django/core/management/commands/migrate.py @@ -12,7 +12,7 @@ from django.db.migrations.executor import MigrationExecutor from django.db.migrations.loader import AmbiguityError from django.utils.datastructures import SortedDict from django.utils.importlib import import_module -from django.utils.module_loading import module_has_submodule, import_by_path +from django.utils.module_loading import module_has_submodule class Command(BaseCommand): @@ -162,7 +162,7 @@ class Command(BaseCommand): # Create the tables for each model if self.verbosity >= 1: self.stdout.write(" Creating tables...\n") - with transaction.commit_on_success_unless_managed(using=connection.alias): + with transaction.atomic(using=connection.alias, savepoint=False): for app_name, model_list in manifest.items(): for model in model_list: # Create the model's database table, if it doesn't already exist. @@ -181,6 +181,10 @@ class Command(BaseCommand): for statement in sql: cursor.execute(statement) tables.append(connection.introspection.table_name_converter(model._meta.db_table)) + + # We force a commit here, as that was the previous behaviour. + # If you can prove we don't need this, remove it. + transaction.set_dirty(using=connection.alias) # Send the post_syncdb signal, so individual apps can do whatever they need # to do at this point. From 9c6d57ef5a242e4058b5743471db8f2b6273c86e Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 26 Jul 2013 16:40:27 +0100 Subject: [PATCH 105/161] Prettier imports --- django/db/backends/schema.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index b96e00445c..b2e6c29e25 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -1,11 +1,12 @@ -import sys import hashlib import operator +import sys + from django.db.backends.creation import BaseDatabaseCreation from django.db.backends.util import truncate_name -from django.utils.log import getLogger from django.db.models.fields.related import ManyToManyField from django.db.transaction import atomic +from django.utils.log import getLogger from django.utils.six.moves import reduce logger = getLogger('django.db.backends.schema') From 3f7113f1d976a9bd668afd8114269e8c594dd842 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 26 Jul 2013 16:46:48 +0100 Subject: [PATCH 106/161] Fix timezone warnings if USE_TZ=True --- django/db/migrations/recorder.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/django/db/migrations/recorder.py b/django/db/migrations/recorder.py index 65e4fbbda7..c66d122068 100644 --- a/django/db/migrations/recorder.py +++ b/django/db/migrations/recorder.py @@ -1,6 +1,6 @@ -import datetime from django.db import models from django.db.models.loading import BaseAppCache +from django.utils.timezone import now class MigrationRecorder(object): @@ -19,7 +19,7 @@ class MigrationRecorder(object): class Migration(models.Model): app = models.CharField(max_length=255) name = models.CharField(max_length=255) - applied = models.DateTimeField(default=datetime.datetime.utcnow) + applied = models.DateTimeField(default=now) class Meta: app_cache = BaseAppCache() app_label = "migrations" From 52643a69e3fac42c14143f3ca71ec9f5b7e64296 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 26 Jul 2013 16:47:00 +0100 Subject: [PATCH 107/161] Add --fake option to migrate --- django/core/management/commands/migrate.py | 6 ++++-- django/db/migrations/executor.py | 24 ++++++++++++---------- 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/django/core/management/commands/migrate.py b/django/core/management/commands/migrate.py index 882ebde073..73d453cf99 100644 --- a/django/core/management/commands/migrate.py +++ b/django/core/management/commands/migrate.py @@ -24,6 +24,8 @@ class Command(BaseCommand): make_option('--database', action='store', dest='database', default=DEFAULT_DB_ALIAS, help='Nominates a database to synchronize. ' 'Defaults to the "default" database.'), + make_option('--fake', action='store_true', dest='fake', default=False, + help='Mark migrations as run without actually running them'), ) help = "Updates database schema. Manages both apps with migrations and those without." @@ -109,7 +111,7 @@ class Command(BaseCommand): if self.verbosity >= 1: self.stdout.write(" No migrations needed.") else: - executor.migrate(targets, plan) + executor.migrate(targets, plan, fake=options.get("fake", False)) def migration_progress_callback(self, action, migration): if self.verbosity >= 1: @@ -181,7 +183,7 @@ class Command(BaseCommand): for statement in sql: cursor.execute(statement) tables.append(connection.introspection.table_name_converter(model._meta.db_table)) - + # We force a commit here, as that was the previous behaviour. # If you can prove we don't need this, remove it. transaction.set_dirty(using=connection.alias) diff --git a/django/db/migrations/executor.py b/django/db/migrations/executor.py index 46bbfc0ef2..9d99c90c11 100644 --- a/django/db/migrations/executor.py +++ b/django/db/migrations/executor.py @@ -44,7 +44,7 @@ class MigrationExecutor(object): applied.add(migration) return plan - def migrate(self, targets, plan=None): + def migrate(self, targets, plan=None, fake=False): """ Migrates the database up to the given targets. """ @@ -52,32 +52,34 @@ class MigrationExecutor(object): plan = self.migration_plan(targets) for migration, backwards in plan: if not backwards: - self.apply_migration(migration) + self.apply_migration(migration, fake=fake) else: - self.unapply_migration(migration) + self.unapply_migration(migration, fake=fake) - def apply_migration(self, migration): + def apply_migration(self, migration, fake=False): """ Runs a migration forwards. """ if self.progress_callback: self.progress_callback("apply_start", migration) - with self.connection.schema_editor() as schema_editor: - project_state = self.loader.graph.project_state((migration.app_label, migration.name), at_end=False) - migration.apply(project_state, schema_editor) + if not fake: + with self.connection.schema_editor() as schema_editor: + project_state = self.loader.graph.project_state((migration.app_label, migration.name), at_end=False) + migration.apply(project_state, schema_editor) self.recorder.record_applied(migration.app_label, migration.name) if self.progress_callback: self.progress_callback("apply_success", migration) - def unapply_migration(self, migration): + def unapply_migration(self, migration, fake=False): """ Runs a migration backwards. """ if self.progress_callback: self.progress_callback("unapply_start", migration) - with self.connection.schema_editor() as schema_editor: - project_state = self.loader.graph.project_state((migration.app_label, migration.name), at_end=False) - migration.unapply(project_state, schema_editor) + if not fake: + with self.connection.schema_editor() as schema_editor: + project_state = self.loader.graph.project_state((migration.app_label, migration.name), at_end=False) + migration.unapply(project_state, schema_editor) self.recorder.record_unapplied(migration.app_label, migration.name) if self.progress_callback: self.progress_callback("unapply_success", migration) From d5ca1693341daccce9b0dd8504967a56b289d92f Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 26 Jul 2013 16:52:17 +0100 Subject: [PATCH 108/161] Fix "OK" alignment in migrate output --- django/core/management/commands/migrate.py | 8 ++++---- django/utils/termcolors.py | 6 ++++++ 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/django/core/management/commands/migrate.py b/django/core/management/commands/migrate.py index 73d453cf99..d9a677e567 100644 --- a/django/core/management/commands/migrate.py +++ b/django/core/management/commands/migrate.py @@ -116,15 +116,15 @@ class Command(BaseCommand): def migration_progress_callback(self, action, migration): if self.verbosity >= 1: if action == "apply_start": - self.stdout.write(" Applying %s... " % migration) + self.stdout.write(" Applying %s..." % migration, ending="") self.stdout.flush() elif action == "apply_success": - self.stdout.write(" OK\n") + self.stdout.write(self.style.MIGRATE_SUCCESS(" OK")) elif action == "unapply_start": - self.stdout.write(" Unapplying %s... " % migration) + self.stdout.write(" Unapplying %s..." % migration, ending="") self.stdout.flush() elif action == "unapply_success": - self.stdout.write(" OK\n") + self.stdout.write(self.style.MIGRATE_SUCCESS(" OK")) def sync_apps(self, connection, apps): "Runs the old syncdb-style operation on a list of apps." diff --git a/django/utils/termcolors.py b/django/utils/termcolors.py index 3562fa4fb5..95d0d17f0f 100644 --- a/django/utils/termcolors.py +++ b/django/utils/termcolors.py @@ -88,6 +88,8 @@ PALETTES = { 'HTTP_SERVER_ERROR': {}, 'MIGRATE_HEADING': {}, 'MIGRATE_LABEL': {}, + 'MIGRATE_SUCCESS': {}, + 'MIGRATE_FAILURE': {}, }, DARK_PALETTE: { 'ERROR': { 'fg': 'red', 'opts': ('bold',) }, @@ -105,6 +107,8 @@ PALETTES = { 'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) }, 'MIGRATE_HEADING': { 'fg': 'cyan', 'opts': ('bold',) }, 'MIGRATE_LABEL': { 'opts': ('bold',) }, + 'MIGRATE_SUCCESS': { 'fg': 'green', 'opts': ('bold',) }, + 'MIGRATE_FAILURE': { 'fg': 'red', 'opts': ('bold',) }, }, LIGHT_PALETTE: { 'ERROR': { 'fg': 'red', 'opts': ('bold',) }, @@ -122,6 +126,8 @@ PALETTES = { 'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) }, 'MIGRATE_HEADING': { 'fg': 'cyan', 'opts': ('bold',) }, 'MIGRATE_LABEL': { 'opts': ('bold',) }, + 'MIGRATE_SUCCESS': { 'fg': 'green', 'opts': ('bold',) }, + 'MIGRATE_FAILURE': { 'fg': 'red', 'opts': ('bold',) }, } } DEFAULT_PALETTE = DARK_PALETTE From c8cbdabfab3a150904a2214930e82112d0231ff2 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 26 Jul 2013 17:08:12 +0100 Subject: [PATCH 109/161] Fix Python 3 support --- django/db/migrations/autodetector.py | 4 ++-- django/db/migrations/graph.py | 4 ++-- django/db/migrations/writer.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py index d524f96a0b..e737cb8af9 100644 --- a/django/db/migrations/autodetector.py +++ b/django/db/migrations/autodetector.py @@ -367,9 +367,9 @@ class InteractiveMigrationQuestioner(MigrationQuestioner): return result[0].lower() == "y" def _choice_input(self, question, choices): - print question + print(question) for i, choice in enumerate(choices): - print " %s) %s" % (i + 1, choice) + print(" %s) %s" % (i + 1, choice)) result = input("Select an option: ") while True: try: diff --git a/django/db/migrations/graph.py b/django/db/migrations/graph.py index 1bbe0092ae..3b6495689a 100644 --- a/django/db/migrations/graph.py +++ b/django/db/migrations/graph.py @@ -70,7 +70,7 @@ class MigrationGraph(object): """ roots = set() for node in self.nodes: - if not filter(lambda key: key[0] == node[0], self.dependencies.get(node, set())): + if not any(key[0] == node[0] for key in self.dependencies.get(node, set())): roots.add(node) return roots @@ -84,7 +84,7 @@ class MigrationGraph(object): """ leaves = set() for node in self.nodes: - if not filter(lambda key: key[0] == node[0], self.dependents.get(node, set())): + if not any(key[0] == node[0] for key in self.dependents.get(node, set())): leaves.add(node) return leaves diff --git a/django/db/migrations/writer.py b/django/db/migrations/writer.py index 00e83681cd..4cc84dcca7 100644 --- a/django/db/migrations/writer.py +++ b/django/db/migrations/writer.py @@ -109,7 +109,7 @@ class MigrationWriter(object): elif isinstance(value, (datetime.datetime, datetime.date)): return repr(value), set(["import datetime"]) # Simple types - elif isinstance(value, (int, long, float, six.binary_type, six.text_type, bool, types.NoneType)): + elif isinstance(value, six.integer_types + (float, six.binary_type, six.text_type, bool, type(None))): return repr(value), set() # Django fields elif isinstance(value, models.Field): From 086389f5fc64bc47661f53332bf6ab3e2e882392 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 30 Jul 2013 11:52:36 +0100 Subject: [PATCH 110/161] Start adding schema migration into the release notes --- docs/internals/deprecation.txt | 6 ++++++ docs/releases/1.7.txt | 24 ++++++++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/docs/internals/deprecation.txt b/docs/internals/deprecation.txt index b0f5566cb3..7fb7ec7cad 100644 --- a/docs/internals/deprecation.txt +++ b/docs/internals/deprecation.txt @@ -414,6 +414,12 @@ these changes. * ``django.utils.unittest`` will be removed. +* ``django.db.models.signals.pre_syncdb`` and + ``django.db.models.signals.post_syncdb`` will be removed, and + ``django.db.models.signals.pre_migrate`` and + ``django.db.models.signals.post_migrate`` will lose their + ``create_models`` and ``created_models`` arguments. + 2.0 --- diff --git a/docs/releases/1.7.txt b/docs/releases/1.7.txt index 8c5a0fb585..5bd462b4a8 100644 --- a/docs/releases/1.7.txt +++ b/docs/releases/1.7.txt @@ -30,6 +30,30 @@ security support until the release of Django 1.8. What's new in Django 1.7 ======================== +Schema migrations +~~~~~~~~~~~~~~~~~ + +Django now has built-in support for schema migrations, which allows models +to be updated, changed and deleted and the changes stored into migration files +and then run on any deployed database. + +Migrations are covered in :doc:`their own documentation`, +but a few of the key features are: + +* ``syncdb`` has been deprecated and replaced by ``migrate``. Don't worry - + calls to ``syncdb`` will still work as before. + +* A new ``makemigrations`` command provides an easy way to autodetect changes + to your models and make migrations for them. + +* :data:`~django.db.models.signals.post_syncdb` and + :data:`~django.db.models.signals.post_syncdb` have been renamed to + :data:`~django.db.models.signals.pre_migrate` and + :data:`~django.db.models.signals.post_migrate` respectively. The + ``create_models``/``created_models`` argument has also been deprecated. + +* Routers something something. + Admin shortcuts support time zones ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 68e0a169c4f9fa7f8071e014b274fd59e970f9a3 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 30 Jul 2013 11:52:52 +0100 Subject: [PATCH 111/161] Rename pre_ and post_syncdb to *_migrate, with aliases from old names --- django/contrib/auth/management/__init__.py | 4 +- django/contrib/contenttypes/management.py | 2 +- .../gis/db/backends/spatialite/creation.py | 2 +- django/contrib/sites/management.py | 2 +- django/core/management/commands/flush.py | 22 ++--- django/core/management/commands/migrate.py | 21 +++-- django/core/management/sql.py | 16 ++-- django/db/backends/creation.py | 2 +- django/db/models/signals.py | 6 +- django/test/testcases.py | 11 ++- docs/ref/contrib/sites.txt | 2 +- docs/ref/signals.txt | 87 +++++++++++++------ docs/topics/testing/advanced.txt | 10 +-- 13 files changed, 113 insertions(+), 74 deletions(-) diff --git a/django/contrib/auth/management/__init__.py b/django/contrib/auth/management/__init__.py index 1f338469f8..e1b9be2e9b 100644 --- a/django/contrib/auth/management/__init__.py +++ b/django/contrib/auth/management/__init__.py @@ -187,7 +187,7 @@ def get_default_username(check_db=True): return '' return default_username -signals.post_syncdb.connect(create_permissions, +signals.post_migrate.connect(create_permissions, dispatch_uid="django.contrib.auth.management.create_permissions") -signals.post_syncdb.connect(create_superuser, +signals.post_migrate.connect(create_superuser, sender=auth_app, dispatch_uid="django.contrib.auth.management.create_superuser") diff --git a/django/contrib/contenttypes/management.py b/django/contrib/contenttypes/management.py index 64d1c418ef..21a34b2bfa 100644 --- a/django/contrib/contenttypes/management.py +++ b/django/contrib/contenttypes/management.py @@ -88,7 +88,7 @@ def update_all_contenttypes(verbosity=2, **kwargs): for app in get_apps(): update_contenttypes(app, None, verbosity, **kwargs) -signals.post_syncdb.connect(update_contenttypes) +signals.post_migrate.connect(update_contenttypes) if __name__ == "__main__": update_all_contenttypes() diff --git a/django/contrib/gis/db/backends/spatialite/creation.py b/django/contrib/gis/db/backends/spatialite/creation.py index d0a5f82033..2f0720ed84 100644 --- a/django/contrib/gis/db/backends/spatialite/creation.py +++ b/django/contrib/gis/db/backends/spatialite/creation.py @@ -45,7 +45,7 @@ class SpatiaLiteCreation(DatabaseCreation): # We need to then do a flush to ensure that any data installed by # custom SQL has been removed. The only test data should come from - # test fixtures, or autogenerated from post_syncdb triggers. + # test fixtures, or autogenerated from post_migrate triggers. # This has the side effect of loading initial data (which was # intentionally skipped in the syncdb). call_command('flush', diff --git a/django/contrib/sites/management.py b/django/contrib/sites/management.py index 7a29e82d4c..3ab49e5482 100644 --- a/django/contrib/sites/management.py +++ b/django/contrib/sites/management.py @@ -33,4 +33,4 @@ def create_default_site(app, created_models, verbosity, db, **kwargs): Site.objects.clear_cache() -signals.post_syncdb.connect(create_default_site, sender=site_app) +signals.post_migrate.connect(create_default_site, sender=site_app) diff --git a/django/core/management/commands/flush.py b/django/core/management/commands/flush.py index 95dd634d08..2ced3a2d60 100644 --- a/django/core/management/commands/flush.py +++ b/django/core/management/commands/flush.py @@ -6,7 +6,7 @@ from django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS from django.core.management import call_command from django.core.management.base import NoArgsCommand, CommandError from django.core.management.color import no_style -from django.core.management.sql import sql_flush, emit_post_sync_signal +from django.core.management.sql import sql_flush, emit_post_migrate_signal from django.utils.importlib import import_module from django.utils.six.moves import input from django.utils import six @@ -23,8 +23,8 @@ class Command(NoArgsCommand): help='Tells Django not to load any initial data after database synchronization.'), ) help = ('Returns the database to the state it was in immediately after ' - 'syncdb was executed. This means that all data will be removed ' - 'from the database, any post-synchronization handlers will be ' + 'migrate was first executed. This means that all data will be removed ' + 'from the database, any post-migration handlers will be ' 're-executed, and the initial_data fixture will be re-installed.') def handle_noargs(self, **options): @@ -35,7 +35,7 @@ class Command(NoArgsCommand): # The following are stealth options used by Django's internals. reset_sequences = options.get('reset_sequences', True) allow_cascade = options.get('allow_cascade', False) - inhibit_post_syncdb = options.get('inhibit_post_syncdb', False) + inhibit_post_migrate = options.get('inhibit_post_migrate', False) self.style = no_style() @@ -54,7 +54,7 @@ class Command(NoArgsCommand): if interactive: confirm = input("""You have requested a flush of the database. This will IRREVERSIBLY DESTROY all data currently in the %r database, -and return each table to the state it was in after syncdb. +and return each table to a fresh state. Are you sure you want to do this? Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME']) @@ -77,8 +77,8 @@ Are you sure you want to do this? "The full error: %s") % (connection.settings_dict['NAME'], e) six.reraise(CommandError, CommandError(new_msg), sys.exc_info()[2]) - if not inhibit_post_syncdb: - self.emit_post_syncdb(verbosity, interactive, db) + if not inhibit_post_migrate: + self.emit_post_migrate(verbosity, interactive, db) # Reinstall the initial_data fixture. if options.get('load_initial_data'): @@ -89,13 +89,13 @@ Are you sure you want to do this? self.stdout.write("Flush cancelled.\n") @staticmethod - def emit_post_syncdb(verbosity, interactive, database): - # Emit the post sync signal. This allows individual applications to - # respond as if the database had been sync'd from scratch. + def emit_post_migrate(verbosity, interactive, database): + # Emit the post migrate signal. This allows individual applications to + # respond as if the database had been migrated from scratch. all_models = [] for app in models.get_apps(): all_models.extend([ m for m in models.get_models(app, include_auto_created=True) if router.allow_syncdb(database, m) ]) - emit_post_sync_signal(set(all_models), verbosity, interactive, database) + emit_post_migrate_signal(set(all_models), verbosity, interactive, database) diff --git a/django/core/management/commands/migrate.py b/django/core/management/commands/migrate.py index d9a677e567..cf0e40e6c7 100644 --- a/django/core/management/commands/migrate.py +++ b/django/core/management/commands/migrate.py @@ -6,7 +6,7 @@ from django.conf import settings from django.core.management import call_command from django.core.management.base import BaseCommand, CommandError from django.core.management.color import color_style, no_style -from django.core.management.sql import custom_sql_for_model, emit_post_sync_signal, emit_pre_sync_signal +from django.core.management.sql import custom_sql_for_model, emit_post_migrate_signal, emit_pre_migrate_signal from django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS from django.db.migrations.executor import MigrationExecutor from django.db.migrations.loader import AmbiguityError @@ -99,10 +99,14 @@ class Command(BaseCommand): # Run the syncdb phase. # If you ever manage to get rid of this, I owe you many, many drinks. + # Note that pre_migrate is called from inside here, as it needs + # the list of models about to be installed. if run_syncdb: if self.verbosity >= 1: self.stdout.write(self.style.MIGRATE_HEADING("Synchronizing apps without migrations:")) - self.sync_apps(connection, executor.loader.unmigrated_apps) + created_models = self.sync_apps(connection, executor.loader.unmigrated_apps) + else: + created_models = [] # Migrate! if self.verbosity >= 1: @@ -113,6 +117,10 @@ class Command(BaseCommand): else: executor.migrate(targets, plan, fake=options.get("fake", False)) + # Send the post_migrate signal, so individual apps can do whatever they need + # to do at this point. + emit_post_migrate_signal(created_models, self.verbosity, self.interactive, connection.alias) + def migration_progress_callback(self, action, migration): if self.verbosity >= 1: if action == "apply_start": @@ -159,7 +167,7 @@ class Command(BaseCommand): ) create_models = set([x for x in itertools.chain(*manifest.values())]) - emit_pre_sync_signal(create_models, self.verbosity, self.interactive, connection.alias) + emit_pre_migrate_signal(create_models, self.verbosity, self.interactive, connection.alias) # Create the tables for each model if self.verbosity >= 1: @@ -188,10 +196,6 @@ class Command(BaseCommand): # If you can prove we don't need this, remove it. transaction.set_dirty(using=connection.alias) - # Send the post_syncdb signal, so individual apps can do whatever they need - # to do at this point. - emit_post_sync_signal(created_models, self.verbosity, self.interactive, connection.alias) - # The connection may have been closed by a syncdb handler. cursor = connection.cursor() @@ -220,6 +224,7 @@ class Command(BaseCommand): if self.verbosity >= 1: self.stdout.write(" Installing indexes...\n") + # Install SQL indices for all newly created models for app_name, model_list in manifest.items(): for model in model_list: @@ -238,3 +243,5 @@ class Command(BaseCommand): # Load initial_data fixtures (unless that has been disabled) if self.load_initial_data: call_command('loaddata', 'initial_data', verbosity=self.verbosity, database=connection.alias, skip_validation=True) + + return created_models diff --git a/django/core/management/sql.py b/django/core/management/sql.py index b58d89f60a..4a61fcddb9 100644 --- a/django/core/management/sql.py +++ b/django/core/management/sql.py @@ -192,25 +192,25 @@ def custom_sql_for_model(model, style, connection): return output -def emit_pre_sync_signal(create_models, verbosity, interactive, db): - # Emit the pre_sync signal for every application. +def emit_pre_migrate_signal(create_models, verbosity, interactive, db): + # Emit the pre_migrate signal for every application. for app in models.get_apps(): app_name = app.__name__.split('.')[-2] if verbosity >= 2: - print("Running pre-sync handlers for application %s" % app_name) - models.signals.pre_syncdb.send(sender=app, app=app, + print("Running pre-migrate handlers for application %s" % app_name) + models.signals.pre_migrate.send(sender=app, app=app, create_models=create_models, verbosity=verbosity, interactive=interactive, db=db) -def emit_post_sync_signal(created_models, verbosity, interactive, db): - # Emit the post_sync signal for every application. +def emit_post_migrate_signal(created_models, verbosity, interactive, db): + # Emit the post_migrate signal for every application. for app in models.get_apps(): app_name = app.__name__.split('.')[-2] if verbosity >= 2: - print("Running post-sync handlers for application %s" % app_name) - models.signals.post_syncdb.send(sender=app, app=app, + print("Running post-migrate handlers for application %s" % app_name) + models.signals.post_migrate.send(sender=app, app=app, created_models=created_models, verbosity=verbosity, interactive=interactive, db=db) diff --git a/django/db/backends/creation.py b/django/db/backends/creation.py index 2ebbbbf2d5..51716a88bd 100644 --- a/django/db/backends/creation.py +++ b/django/db/backends/creation.py @@ -344,7 +344,7 @@ class BaseDatabaseCreation(object): # We need to then do a flush to ensure that any data installed by # custom SQL has been removed. The only test data should come from - # test fixtures, or autogenerated from post_syncdb triggers. + # test fixtures, or autogenerated from post_migrate triggers. # This has the side effect of loading initial data (which was # intentionally skipped in the syncdb). call_command('flush', diff --git a/django/db/models/signals.py b/django/db/models/signals.py index 3e321893c1..e53ffc3d1f 100644 --- a/django/db/models/signals.py +++ b/django/db/models/signals.py @@ -12,7 +12,9 @@ post_save = Signal(providing_args=["instance", "raw", "created", "using", "updat pre_delete = Signal(providing_args=["instance", "using"], use_caching=True) post_delete = Signal(providing_args=["instance", "using"], use_caching=True) -pre_syncdb = Signal(providing_args=["app", "create_models", "verbosity", "interactive", "db"]) -post_syncdb = Signal(providing_args=["class", "app", "created_models", "verbosity", "interactive", "db"], use_caching=True) +pre_migrate = Signal(providing_args=["app", "create_models", "verbosity", "interactive", "db"]) +pre_syncdb = pre_migrate +post_migrate = Signal(providing_args=["class", "app", "created_models", "verbosity", "interactive", "db"], use_caching=True) +post_syncdb = post_migrate m2m_changed = Signal(providing_args=["action", "instance", "reverse", "model", "pk_set", "using"], use_caching=True) diff --git a/django/test/testcases.py b/django/test/testcases.py index 6f3f1c00e4..5b72d4e8b8 100644 --- a/django/test/testcases.py +++ b/django/test/testcases.py @@ -718,7 +718,7 @@ class TransactionTestCase(SimpleTestCase): """Performs any pre-test setup. This includes: * If the class has an 'available_apps' attribute, restricting the app - cache to these applications, then firing post_syncdb -- it must run + cache to these applications, then firing post_migrate -- it must run with the correct set of applications for the test case. * If the class has a 'fixtures' attribute, installing these fixtures. """ @@ -726,8 +726,7 @@ class TransactionTestCase(SimpleTestCase): if self.available_apps is not None: cache.set_available_apps(self.available_apps) for db_name in self._databases_names(include_mirrors=False): - flush.Command.emit_post_syncdb( - verbosity=0, interactive=False, database=db_name) + flush.Command.emit_post_migrate(verbosity=0, interactive=False, database=db_name) try: self._fixture_setup() except Exception: @@ -772,7 +771,7 @@ class TransactionTestCase(SimpleTestCase): """Performs any post-test things. This includes: * Flushing the contents of the database, to leave a clean slate. If - the class has an 'available_apps' attribute, post_syncdb isn't fired. + the class has an 'available_apps' attribute, post_migrate isn't fired. * Force-closing the connection, so the next test gets a clean cursor. """ try: @@ -790,14 +789,14 @@ class TransactionTestCase(SimpleTestCase): cache.unset_available_apps() def _fixture_teardown(self): - # Allow TRUNCATE ... CASCADE and don't emit the post_syncdb signal + # Allow TRUNCATE ... CASCADE and don't emit the post_migrate signal # when flushing only a subset of the apps for db_name in self._databases_names(include_mirrors=False): call_command('flush', verbosity=0, interactive=False, database=db_name, skip_validation=True, reset_sequences=False, allow_cascade=self.available_apps is not None, - inhibit_post_syncdb=self.available_apps is not None) + inhibit_post_migrate=self.available_apps is not None) def assertQuerysetEqual(self, qs, values, transform=repr, ordered=True): items = six.moves.map(transform, qs) diff --git a/docs/ref/contrib/sites.txt b/docs/ref/contrib/sites.txt index 48f781310c..f6480cae3a 100644 --- a/docs/ref/contrib/sites.txt +++ b/docs/ref/contrib/sites.txt @@ -267,7 +267,7 @@ To enable the sites framework, follow these steps: 3. Run :djadmin:`migrate`. ``django.contrib.sites`` registers a -:data:`~django.db.models.signals.post_syncdb` signal handler which creates a +:data:`~django.db.models.signals.post_migrate` signal handler which creates a default site named ``example.com`` with the domain ``example.com``. This site will also be created after Django creates the test database. To set the correct name and domain for your project, you can use an :doc:`initial data diff --git a/docs/ref/signals.txt b/docs/ref/signals.txt index d4f261cadb..b988371aa7 100644 --- a/docs/ref/signals.txt +++ b/docs/ref/signals.txt @@ -360,40 +360,36 @@ Management signals Signals sent by :doc:`django-admin `. -pre_syncdb ----------- +pre_migrate +----------- -.. data:: django.db.models.signals.pre_syncdb +.. data:: django.db.models.signals.pre_migrate :module: -Sent by the :djadmin:`syncdb` command before it starts to install an +Sent by the :djadmin:`migrate` command before it starts to install an application. Any handlers that listen to this signal need to be written in a particular place: a ``management`` module in one of your :setting:`INSTALLED_APPS`. If handlers are registered anywhere else they may not be loaded by -:djadmin:`syncdb`. +:djadmin:`migrate`. Arguments sent with this signal: ``sender`` - The ``models`` module that was just installed. That is, if - :djadmin:`syncdb` just installed an app called ``"foo.bar.myapp"``, - ``sender`` will be the ``foo.bar.myapp.models`` module. + The ``models`` module of the app about to be migrated/synced. + For example, if :djadmin:`migrate` is about to install + an app called ``"foo.bar.myapp"``, ``sender`` will be the + ``foo.bar.myapp.models`` module. ``app`` Same as ``sender``. -``create_models`` - A list of the model classes from any app which :djadmin:`syncdb` plans to - create. - - ``verbosity`` Indicates how much information manage.py is printing on screen. See the :djadminopt:`--verbosity` flag for details. - Functions which listen for :data:`pre_syncdb` should adjust what they + Functions which listen for :data:`pre_migrate` should adjust what they output to the screen based on the value of this argument. ``interactive`` @@ -407,42 +403,57 @@ Arguments sent with this signal: ``db`` The alias of database on which a command will operate. -post_syncdb ------------ -.. data:: django.db.models.signals.post_syncdb +pre_syncdb +---------- + +.. data:: django.db.models.signals.pre_syncdb :module: -Sent by the :djadmin:`syncdb` command after it installs an application, and the +.. deprecated:: 1.7 + + This signal has been renamed to :data:`~django.db.models.signals.pre_migrate`. + +Alias of :data:`django.db.models.signals.pre_migrate`. As long as this alias +is present, for backwards-compatability this signal has an extra argument it sends: + +``create_models`` + A list of the model classes from any app which :djadmin:`migrate` is + going to create, **only if the app has no migrations**. + + +post_migrate +------------ + +.. data:: django.db.models.signals.post_migrate + :module: + +Sent by the :djadmin:`migrate` command after it installs an application, and the :djadmin:`flush` command. Any handlers that listen to this signal need to be written in a particular place: a ``management`` module in one of your :setting:`INSTALLED_APPS`. If handlers are registered anywhere else they may not be loaded by -:djadmin:`syncdb`. It is important that handlers of this signal perform +:djadmin:`migrate`. It is important that handlers of this signal perform idempotent changes (e.g. no database alterations) as this may cause the :djadmin:`flush` management command to fail if it also ran during the -:djadmin:`syncdb` command. +:djadmin:`migrate` command. Arguments sent with this signal: ``sender`` The ``models`` module that was just installed. That is, if - :djadmin:`syncdb` just installed an app called ``"foo.bar.myapp"``, + :djadmin:`migrate` just installed an app called ``"foo.bar.myapp"``, ``sender`` will be the ``foo.bar.myapp.models`` module. ``app`` Same as ``sender``. -``created_models`` - A list of the model classes from any app which :djadmin:`syncdb` has - created so far. - ``verbosity`` Indicates how much information manage.py is printing on screen. See the :djadminopt:`--verbosity` flag for details. - Functions which listen for :data:`post_syncdb` should adjust what they + Functions which listen for :data:`post_migrate` should adjust what they output to the screen based on the value of this argument. ``interactive`` @@ -459,14 +470,34 @@ Arguments sent with this signal: For example, ``yourapp/management/__init__.py`` could be written like:: - from django.db.models.signals import post_syncdb + from django.db.models.signals import post_migrate import yourapp.models def my_callback(sender, **kwargs): # Your specific logic here pass - post_syncdb.connect(my_callback, sender=yourapp.models) + post_migrate.connect(my_callback, sender=yourapp.models) + + +post_syncdb +----------- + +.. data:: django.db.models.signals.post_syncdb + :module: + +.. deprecated:: 1.7 + + This signal has been renamed to :data:`~django.db.models.signals.post_migrate`. + +Alias of :data:`django.db.models.signals.post_migrate`. As long as this alias +is present, for backwards-compatability this signal has an extra argument it sends: + +``created_models`` + A list of the model classes from any app which :djadmin:`migrate` has + created, **only if the app has no migrations**. + + Request/response signals ======================== diff --git a/docs/topics/testing/advanced.txt b/docs/topics/testing/advanced.txt index 4d7f22aaa2..6d9ea8d5c1 100644 --- a/docs/topics/testing/advanced.txt +++ b/docs/topics/testing/advanced.txt @@ -182,7 +182,7 @@ Advanced features of ``TransactionTestCase`` By default, ``available_apps`` is set to ``None``. After each test, Django calls :djadmin:`flush` to reset the database state. This empties all tables - and emits the :data:`~django.db.models.signals.post_syncdb` signal, which + and emits the :data:`~django.db.models.signals.post_migrate` signal, which re-creates one content type and three permissions for each model. This operation gets expensive proportionally to the number of models. @@ -190,13 +190,13 @@ Advanced features of ``TransactionTestCase`` behave as if only the models from these applications were available. The behavior of ``TransactionTestCase`` changes as follows: - - :data:`~django.db.models.signals.post_syncdb` is fired before each + - :data:`~django.db.models.signals.post_migrate` is fired before each test to create the content types and permissions for each model in available apps, in case they're missing. - After each test, Django empties only tables corresponding to models in available apps. However, at the database level, truncation may cascade to related models in unavailable apps. Furthermore - :data:`~django.db.models.signals.post_syncdb` isn't fired; it will be + :data:`~django.db.models.signals.post_migrate` isn't fired; it will be fired by the next ``TransactionTestCase``, after the correct set of applications is selected. @@ -205,10 +205,10 @@ Advanced features of ``TransactionTestCase`` cause unrelated tests to fail. Be careful with tests that use sessions; the default session engine stores them in the database. - Since :data:`~django.db.models.signals.post_syncdb` isn't emitted after + Since :data:`~django.db.models.signals.post_migrate` isn't emitted after flushing the database, its state after a ``TransactionTestCase`` isn't the same as after a ``TestCase``: it's missing the rows created by listeners - to :data:`~django.db.models.signals.post_syncdb`. Considering the + to :data:`~django.db.models.signals.post_migrate`. Considering the :ref:`order in which tests are executed `, this isn't an issue, provided either all ``TransactionTestCase`` in a given test suite declare ``available_apps``, or none of them. From 12e9804d163777af17cc2a3dfdfff49e5f750ebd Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 30 Jul 2013 12:08:59 +0100 Subject: [PATCH 112/161] Rename allow_syncdb to allow_migrate --- django/contrib/auth/management/__init__.py | 2 +- django/contrib/contenttypes/management.py | 2 +- django/contrib/gis/tests/layermap/tests.py | 2 +- django/contrib/sites/management.py | 2 +- .../management/commands/createcachetable.py | 2 +- django/core/management/commands/dumpdata.py | 2 +- django/core/management/commands/flush.py | 2 +- django/core/management/commands/loaddata.py | 2 +- django/core/management/commands/migrate.py | 2 +- django/db/backends/__init__.py | 6 +-- django/db/utils.py | 7 ++- docs/internals/deprecation.txt | 5 ++ docs/releases/1.7.txt | 5 +- docs/topics/cache.txt | 4 +- docs/topics/db/multi-db.txt | 15 +++--- tests/cache/tests.py | 2 +- tests/multiple_database/tests.py | 46 +++++++++---------- 17 files changed, 61 insertions(+), 47 deletions(-) diff --git a/django/contrib/auth/management/__init__.py b/django/contrib/auth/management/__init__.py index e1b9be2e9b..343828ec17 100644 --- a/django/contrib/auth/management/__init__.py +++ b/django/contrib/auth/management/__init__.py @@ -64,7 +64,7 @@ def create_permissions(app, created_models, verbosity, db=DEFAULT_DB_ALIAS, **kw except UnavailableApp: return - if not router.allow_syncdb(db, auth_app.Permission): + if not router.allow_migrate(db, auth_app.Permission): return from django.contrib.contenttypes.models import ContentType diff --git a/django/contrib/contenttypes/management.py b/django/contrib/contenttypes/management.py index 21a34b2bfa..4278bbd1e7 100644 --- a/django/contrib/contenttypes/management.py +++ b/django/contrib/contenttypes/management.py @@ -16,7 +16,7 @@ def update_contenttypes(app, created_models, verbosity=2, db=DEFAULT_DB_ALIAS, * except UnavailableApp: return - if not router.allow_syncdb(db, ContentType): + if not router.allow_migrate(db, ContentType): return ContentType.objects.clear_cache() diff --git a/django/contrib/gis/tests/layermap/tests.py b/django/contrib/gis/tests/layermap/tests.py index c4c27b353e..3b040624f3 100644 --- a/django/contrib/gis/tests/layermap/tests.py +++ b/django/contrib/gis/tests/layermap/tests.py @@ -311,7 +311,7 @@ class OtherRouter(object): def allow_relation(self, obj1, obj2, **hints): return None - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): return True diff --git a/django/contrib/sites/management.py b/django/contrib/sites/management.py index 3ab49e5482..d9e3a2126c 100644 --- a/django/contrib/sites/management.py +++ b/django/contrib/sites/management.py @@ -11,7 +11,7 @@ from django.core.management.color import no_style def create_default_site(app, created_models, verbosity, db, **kwargs): # Only create the default sites in databases where Django created the table - if Site in created_models and router.allow_syncdb(db, Site) : + if Site in created_models and router.allow_migrate(db, Site) : # The default settings set SITE_ID = 1, and some tests in Django's test # suite rely on this value. However, if database sequences are reused # (e.g. in the test suite after flush/syncdb), it isn't guaranteed that diff --git a/django/core/management/commands/createcachetable.py b/django/core/management/commands/createcachetable.py index d7ce3e93fd..27668f272d 100644 --- a/django/core/management/commands/createcachetable.py +++ b/django/core/management/commands/createcachetable.py @@ -24,7 +24,7 @@ class Command(LabelCommand): def handle_label(self, tablename, **options): db = options.get('database') cache = BaseDatabaseCache(tablename, {}) - if not router.allow_syncdb(db, cache.cache_model_class): + if not router.allow_migrate(db, cache.cache_model_class): return connection = connections[db] fields = ( diff --git a/django/core/management/commands/dumpdata.py b/django/core/management/commands/dumpdata.py index c5eb1b9a9e..5e440196fc 100644 --- a/django/core/management/commands/dumpdata.py +++ b/django/core/management/commands/dumpdata.py @@ -117,7 +117,7 @@ class Command(BaseCommand): for model in sort_dependencies(app_list.items()): if model in excluded_models: continue - if not model._meta.proxy and router.allow_syncdb(using, model): + if not model._meta.proxy and router.allow_migrate(using, model): if use_base_manager: objects = model._base_manager else: diff --git a/django/core/management/commands/flush.py b/django/core/management/commands/flush.py index 2ced3a2d60..a6ea45ce95 100644 --- a/django/core/management/commands/flush.py +++ b/django/core/management/commands/flush.py @@ -96,6 +96,6 @@ Are you sure you want to do this? for app in models.get_apps(): all_models.extend([ m for m in models.get_models(app, include_auto_created=True) - if router.allow_syncdb(database, m) + if router.allow_migrate(database, m) ]) emit_post_migrate_signal(set(all_models), verbosity, interactive, database) diff --git a/django/core/management/commands/loaddata.py b/django/core/management/commands/loaddata.py index 6856e85e45..802226f9d1 100644 --- a/django/core/management/commands/loaddata.py +++ b/django/core/management/commands/loaddata.py @@ -133,7 +133,7 @@ class Command(BaseCommand): for obj in objects: objects_in_fixture += 1 - if router.allow_syncdb(self.using, obj.object.__class__): + if router.allow_migrate(self.using, obj.object.__class__): loaded_objects_in_fixture += 1 self.models.add(obj.object.__class__) try: diff --git a/django/core/management/commands/migrate.py b/django/core/management/commands/migrate.py index cf0e40e6c7..17b5a7dfe9 100644 --- a/django/core/management/commands/migrate.py +++ b/django/core/management/commands/migrate.py @@ -149,7 +149,7 @@ class Command(BaseCommand): (app.__name__.split('.')[-2], [ m for m in models.get_models(app, include_auto_created=True) - if router.allow_syncdb(connection.alias, m) + if router.allow_migrate(connection.alias, m) ]) for app in models.get_apps() if app.__name__.split('.')[-2] in apps ] diff --git a/django/db/backends/__init__.py b/django/db/backends/__init__.py index 80e66b3ad4..7185644cc3 100644 --- a/django/db/backends/__init__.py +++ b/django/db/backends/__init__.py @@ -1243,7 +1243,7 @@ class BaseDatabaseIntrospection(object): for model in models.get_models(app): if not model._meta.managed: continue - if not router.allow_syncdb(self.connection.alias, model): + if not router.allow_migrate(self.connection.alias, model): continue tables.add(model._meta.db_table) tables.update([f.m2m_db_table() for f in model._meta.local_many_to_many]) @@ -1263,7 +1263,7 @@ class BaseDatabaseIntrospection(object): all_models = [] for app in models.get_apps(): for model in models.get_models(app): - if router.allow_syncdb(self.connection.alias, model): + if router.allow_migrate(self.connection.alias, model): all_models.append(model) tables = list(map(self.table_name_converter, tables)) return set([ @@ -1284,7 +1284,7 @@ class BaseDatabaseIntrospection(object): continue if model._meta.swapped: continue - if not router.allow_syncdb(self.connection.alias, model): + if not router.allow_migrate(self.connection.alias, model): continue for f in model._meta.local_fields: if isinstance(f, models.AutoField): diff --git a/django/db/utils.py b/django/db/utils.py index 36b89d9acf..c1bce7326b 100644 --- a/django/db/utils.py +++ b/django/db/utils.py @@ -262,10 +262,13 @@ class ConnectionRouter(object): return allow return obj1._state.db == obj2._state.db - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): for router in self.routers: try: - method = router.allow_syncdb + try: + method = router.allow_migrate + except AttributeError: + method = router.allow_syncdb except AttributeError: # If the router doesn't have a method, skip to the next one. pass diff --git a/docs/internals/deprecation.txt b/docs/internals/deprecation.txt index 7fb7ec7cad..25d54d5269 100644 --- a/docs/internals/deprecation.txt +++ b/docs/internals/deprecation.txt @@ -414,12 +414,17 @@ these changes. * ``django.utils.unittest`` will be removed. +* The ``syncdb`` command will be removed. + * ``django.db.models.signals.pre_syncdb`` and ``django.db.models.signals.post_syncdb`` will be removed, and ``django.db.models.signals.pre_migrate`` and ``django.db.models.signals.post_migrate`` will lose their ``create_models`` and ``created_models`` arguments. +* ``allow_syncdb`` on database routers will no longer automatically become + ``allow_migrate``. + 2.0 --- diff --git a/docs/releases/1.7.txt b/docs/releases/1.7.txt index 5bd462b4a8..df2b10d18c 100644 --- a/docs/releases/1.7.txt +++ b/docs/releases/1.7.txt @@ -52,7 +52,10 @@ but a few of the key features are: :data:`~django.db.models.signals.post_migrate` respectively. The ``create_models``/``created_models`` argument has also been deprecated. -* Routers something something. +* The ``allow_syncdb`` method on database routers is now called ``allow_migrate``, + but still performs the same function. Routers with ``allow_syncdb`` methods + will still work, but that method name is deprecated and you should change + it as soon as possible (nothing more than renaming is required). Admin shortcuts support time zones ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/docs/topics/cache.txt b/docs/topics/cache.txt index 2352770bad..ea23943c81 100644 --- a/docs/topics/cache.txt +++ b/docs/topics/cache.txt @@ -215,8 +215,8 @@ operations to ``cache_slave``, and all write operations to return 'cache_master' return None - def allow_syncdb(self, db, model): - "Only synchronize the cache model on master" + def allow_migrate(self, db, model): + "Only install the cache model on master" if model._meta.app_label in ('django_cache',): return db == 'cache_master' return None diff --git a/docs/topics/db/multi-db.txt b/docs/topics/db/multi-db.txt index ac329cc4fc..6c74fb944d 100644 --- a/docs/topics/db/multi-db.txt +++ b/docs/topics/db/multi-db.txt @@ -155,14 +155,17 @@ A database Router is a class that provides up to four methods: used by foreign key and many to many operations to determine if a relation should be allowed between two objects. -.. method:: allow_syncdb(db, model) +.. method:: allow_migrate(db, model) - Determine if the ``model`` should be synchronized onto the + Determine if the ``model`` should have tables/indexes created in the database with alias ``db``. Return True if the model should be - synchronized, False if it should not be synchronized, or None if + migrated, False if it should not be migrated, or None if the router has no opinion. This method can be used to determine the availability of a model on a given database. + Note that if this returns ``True`` for an app with migrations but + ``False`` for an app those migrations depend on, Django will error. + A router doesn't have to provide *all* these methods -- it may omit one or more of them. If one of the methods is omitted, Django will skip that router when performing the relevant check. @@ -288,7 +291,7 @@ send queries for the ``auth`` app to ``auth_db``:: return True return None - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): """ Make sure the auth app only appears in the 'auth_db' database. @@ -328,7 +331,7 @@ from:: return True return None - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): """ All non-auth models end up in this pool. """ @@ -347,7 +350,7 @@ be queried in the order the are listed in the result, decisions concerning the models in ``auth`` are processed before any other decision is made. If the :setting:`DATABASE_ROUTERS` setting listed the two routers in the other order, -``MasterSlaveRouter.allow_syncdb()`` would be processed first. The +``MasterSlaveRouter.allow_migrate()`` would be processed first. The catch-all nature of the MasterSlaveRouter implementation would mean that all models would be available on all databases. diff --git a/tests/cache/tests.py b/tests/cache/tests.py index bccac6b5a8..5cf199794c 100644 --- a/tests/cache/tests.py +++ b/tests/cache/tests.py @@ -895,7 +895,7 @@ class DBCacheRouter(object): if model._meta.app_label == 'django_cache': return 'other' - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): if model._meta.app_label == 'django_cache': return db == 'other' diff --git a/tests/multiple_database/tests.py b/tests/multiple_database/tests.py index 12a6379ca0..e65fa4e19d 100644 --- a/tests/multiple_database/tests.py +++ b/tests/multiple_database/tests.py @@ -933,7 +933,7 @@ class TestRouter(object): def allow_relation(self, obj1, obj2, **hints): return obj1._state.db in ('default', 'other') and obj2._state.db in ('default', 'other') - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): return True class AuthRouter(object): @@ -960,7 +960,7 @@ class AuthRouter(object): return True return None - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): "Make sure the auth app only appears on the 'other' db" if db == 'other': return model._meta.app_label == 'auth' @@ -1022,30 +1022,30 @@ class RouterTestCase(TestCase): def test_syncdb_selection(self): "Synchronization behavior is predictable" - self.assertTrue(router.allow_syncdb('default', User)) - self.assertTrue(router.allow_syncdb('default', Book)) + self.assertTrue(router.allow_migrate('default', User)) + self.assertTrue(router.allow_migrate('default', Book)) - self.assertTrue(router.allow_syncdb('other', User)) - self.assertTrue(router.allow_syncdb('other', Book)) + self.assertTrue(router.allow_migrate('other', User)) + self.assertTrue(router.allow_migrate('other', Book)) # Add the auth router to the chain. # TestRouter is a universal synchronizer, so it should have no effect. router.routers = [TestRouter(), AuthRouter()] - self.assertTrue(router.allow_syncdb('default', User)) - self.assertTrue(router.allow_syncdb('default', Book)) + self.assertTrue(router.allow_migrate('default', User)) + self.assertTrue(router.allow_migrate('default', Book)) - self.assertTrue(router.allow_syncdb('other', User)) - self.assertTrue(router.allow_syncdb('other', Book)) + self.assertTrue(router.allow_migrate('other', User)) + self.assertTrue(router.allow_migrate('other', Book)) # Now check what happens if the router order is the other way around router.routers = [AuthRouter(), TestRouter()] - self.assertFalse(router.allow_syncdb('default', User)) - self.assertTrue(router.allow_syncdb('default', Book)) + self.assertFalse(router.allow_migrate('default', User)) + self.assertTrue(router.allow_migrate('default', Book)) - self.assertTrue(router.allow_syncdb('other', User)) - self.assertFalse(router.allow_syncdb('other', Book)) + self.assertTrue(router.allow_migrate('other', User)) + self.assertFalse(router.allow_migrate('other', Book)) def test_partial_router(self): "A router can choose to implement a subset of methods" @@ -1062,8 +1062,8 @@ class RouterTestCase(TestCase): self.assertTrue(router.allow_relation(dive, dive)) - self.assertTrue(router.allow_syncdb('default', User)) - self.assertTrue(router.allow_syncdb('default', Book)) + self.assertTrue(router.allow_migrate('default', User)) + self.assertTrue(router.allow_migrate('default', Book)) router.routers = [WriteRouter(), AuthRouter(), TestRouter()] @@ -1075,8 +1075,8 @@ class RouterTestCase(TestCase): self.assertTrue(router.allow_relation(dive, dive)) - self.assertFalse(router.allow_syncdb('default', User)) - self.assertTrue(router.allow_syncdb('default', Book)) + self.assertFalse(router.allow_migrate('default', User)) + self.assertTrue(router.allow_migrate('default', Book)) def test_database_routing(self): @@ -1607,12 +1607,12 @@ class AuthTestCase(TestCase): self.assertEqual(User.objects.using('other').count(), 1) def test_dumpdata(self): - "Check that dumpdata honors allow_syncdb restrictions on the router" + "Check that dumpdata honors allow_migrate restrictions on the router" User.objects.create_user('alice', 'alice@example.com') User.objects.db_manager('default').create_user('bob', 'bob@example.com') # Check that dumping the default database doesn't try to include auth - # because allow_syncdb prohibits auth on default + # because allow_migrate prohibits auth on default new_io = StringIO() management.call_command('dumpdata', 'auth', format='json', database='default', stdout=new_io) command_output = new_io.getvalue().strip() @@ -1625,10 +1625,10 @@ class AuthTestCase(TestCase): self.assertTrue('"email": "alice@example.com"' in command_output) class AntiPetRouter(object): - # A router that only expresses an opinion on syncdb, + # A router that only expresses an opinion on migrate, # passing pets to the 'other' database - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): "Make sure the auth app only appears on the 'other' db" if db == 'other': return model._meta.object_name == 'Pet' @@ -1917,7 +1917,7 @@ class RouterModelArgumentTestCase(TestCase): class SyncOnlyDefaultDatabaseRouter(object): - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): return db == DEFAULT_DB_ALIAS From fddc5957c53bd654312c4a238a8cdcfe5f4ef4cc Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 30 Jul 2013 12:34:31 +0100 Subject: [PATCH 113/161] Implement allow_migrate for migration operations --- django/db/migrations/operations/fields.py | 46 ++++++++++------- django/db/migrations/operations/models.py | 61 +++++++++++++++-------- docs/releases/1.7.txt | 12 +++++ docs/topics/db/multi-db.txt | 10 +++- docs/topics/migrations.txt | 23 +++++++++ tests/migrations/test_operations.py | 48 +++++++++++++++++- 6 files changed, 156 insertions(+), 44 deletions(-) diff --git a/django/db/migrations/operations/fields.py b/django/db/migrations/operations/fields.py index 37e0c063e1..7c619d49ce 100644 --- a/django/db/migrations/operations/fields.py +++ b/django/db/migrations/operations/fields.py @@ -1,3 +1,4 @@ +from django.db import router from .base import Operation @@ -17,11 +18,13 @@ class AddField(Operation): def database_forwards(self, app_label, schema_editor, from_state, to_state): from_model = from_state.render().get_model(app_label, self.model_name) to_model = to_state.render().get_model(app_label, self.model_name) - schema_editor.add_field(from_model, to_model._meta.get_field_by_name(self.name)[0]) + if router.allow_migrate(schema_editor.connection.alias, to_model): + schema_editor.add_field(from_model, to_model._meta.get_field_by_name(self.name)[0]) def database_backwards(self, app_label, schema_editor, from_state, to_state): from_model = from_state.render().get_model(app_label, self.model_name) - schema_editor.remove_field(from_model, from_model._meta.get_field_by_name(self.name)[0]) + if router.allow_migrate(schema_editor.connection.alias, from_model): + schema_editor.remove_field(from_model, from_model._meta.get_field_by_name(self.name)[0]) def describe(self): return "Add field %s to %s" % (self.name, self.model_name) @@ -45,12 +48,14 @@ class RemoveField(Operation): def database_forwards(self, app_label, schema_editor, from_state, to_state): from_model = from_state.render().get_model(app_label, self.model_name) - schema_editor.remove_field(from_model, from_model._meta.get_field_by_name(self.name)[0]) + if router.allow_migrate(schema_editor.connection.alias, from_model): + schema_editor.remove_field(from_model, from_model._meta.get_field_by_name(self.name)[0]) def database_backwards(self, app_label, schema_editor, from_state, to_state): from_model = from_state.render().get_model(app_label, self.model_name) to_model = to_state.render().get_model(app_label, self.model_name) - schema_editor.add_field(from_model, to_model._meta.get_field_by_name(self.name)[0]) + if router.allow_migrate(schema_editor.connection.alias, to_model): + schema_editor.add_field(from_model, to_model._meta.get_field_by_name(self.name)[0]) def describe(self): return "Remove field %s from %s" % (self.name, self.model_name) @@ -74,11 +79,12 @@ class AlterField(Operation): def database_forwards(self, app_label, schema_editor, from_state, to_state): from_model = from_state.render().get_model(app_label, self.model_name) to_model = to_state.render().get_model(app_label, self.model_name) - schema_editor.alter_field( - from_model, - from_model._meta.get_field_by_name(self.name)[0], - to_model._meta.get_field_by_name(self.name)[0], - ) + if router.allow_migrate(schema_editor.connection.alias, to_model): + schema_editor.alter_field( + from_model, + from_model._meta.get_field_by_name(self.name)[0], + to_model._meta.get_field_by_name(self.name)[0], + ) def database_backwards(self, app_label, schema_editor, from_state, to_state): self.database_forwards(app_label, schema_editor, from_state, to_state) @@ -105,20 +111,22 @@ class RenameField(Operation): def database_forwards(self, app_label, schema_editor, from_state, to_state): from_model = from_state.render().get_model(app_label, self.model_name) to_model = to_state.render().get_model(app_label, self.model_name) - schema_editor.alter_field( - from_model, - from_model._meta.get_field_by_name(self.old_name)[0], - to_model._meta.get_field_by_name(self.new_name)[0], - ) + if router.allow_migrate(schema_editor.connection.alias, to_model): + schema_editor.alter_field( + from_model, + from_model._meta.get_field_by_name(self.old_name)[0], + to_model._meta.get_field_by_name(self.new_name)[0], + ) def database_backwards(self, app_label, schema_editor, from_state, to_state): from_model = from_state.render().get_model(app_label, self.model_name) to_model = to_state.render().get_model(app_label, self.model_name) - schema_editor.alter_field( - from_model, - from_model._meta.get_field_by_name(self.new_name)[0], - to_model._meta.get_field_by_name(self.old_name)[0], - ) + if router.allow_migrate(schema_editor.connection.alias, to_model): + schema_editor.alter_field( + from_model, + from_model._meta.get_field_by_name(self.new_name)[0], + to_model._meta.get_field_by_name(self.old_name)[0], + ) def describe(self): return "Rename field %s on %s to %s" % (self.old_name, self.model_name, self.new_name) diff --git a/django/db/migrations/operations/models.py b/django/db/migrations/operations/models.py index bf15201194..406efa6ef1 100644 --- a/django/db/migrations/operations/models.py +++ b/django/db/migrations/operations/models.py @@ -1,5 +1,5 @@ from .base import Operation -from django.db import models +from django.db import models, router from django.db.migrations.state import ModelState @@ -17,13 +17,17 @@ class CreateModel(Operation): def state_forwards(self, app_label, state): state.models[app_label, self.name.lower()] = ModelState(app_label, self.name, self.fields, self.options, self.bases) - def database_forwards(self, app, schema_editor, from_state, to_state): + def database_forwards(self, app_label, schema_editor, from_state, to_state): app_cache = to_state.render() - schema_editor.create_model(app_cache.get_model(app, self.name)) + model = app_cache.get_model(app_label, self.name) + if router.allow_migrate(schema_editor.connection.alias, model): + schema_editor.create_model(model) - def database_backwards(self, app, schema_editor, from_state, to_state): + def database_backwards(self, app_label, schema_editor, from_state, to_state): app_cache = from_state.render() - schema_editor.delete_model(app_cache.get_model(app, self.name)) + model = app_cache.get_model(app_label, self.name) + if router.allow_migrate(schema_editor.connection.alias, model): + schema_editor.delete_model(model) def describe(self): return "Create model %s" % (self.name, ) @@ -42,11 +46,15 @@ class DeleteModel(Operation): def database_forwards(self, app_label, schema_editor, from_state, to_state): app_cache = from_state.render() - schema_editor.delete_model(app_cache.get_model(app_label, self.name)) + model = app_cache.get_model(app_label, self.name) + if router.allow_migrate(schema_editor.connection.alias, model): + schema_editor.delete_model(model) def database_backwards(self, app_label, schema_editor, from_state, to_state): app_cache = to_state.render() - schema_editor.create_model(app_cache.get_model(app_label, self.name)) + model = app_cache.get_model(app_label, self.name) + if router.allow_migrate(schema_editor.connection.alias, model): + schema_editor.create_model(model) def describe(self): return "Delete model %s" % (self.name, ) @@ -67,11 +75,14 @@ class AlterModelTable(Operation): def database_forwards(self, app_label, schema_editor, from_state, to_state): old_app_cache = from_state.render() new_app_cache = to_state.render() - schema_editor.alter_db_table( - new_app_cache.get_model(app_label, self.name), - old_app_cache.get_model(app_label, self.name)._meta.db_table, - new_app_cache.get_model(app_label, self.name)._meta.db_table, - ) + old_model = old_app_cache.get_model(app_label, self.name) + new_model = new_app_cache.get_model(app_label, self.name) + if router.allow_migrate(schema_editor.connection.alias, new_model): + schema_editor.alter_db_table( + new_model, + old_model._meta.db_table, + new_model._meta.db_table, + ) def database_backwards(self, app_label, schema_editor, from_state, to_state): return self.database_forwards(app_label, schema_editor, from_state, to_state) @@ -97,11 +108,14 @@ class AlterUniqueTogether(Operation): def database_forwards(self, app_label, schema_editor, from_state, to_state): old_app_cache = from_state.render() new_app_cache = to_state.render() - schema_editor.alter_unique_together( - new_app_cache.get_model(app_label, self.name), - getattr(old_app_cache.get_model(app_label, self.name)._meta, "unique_together", set()), - getattr(new_app_cache.get_model(app_label, self.name)._meta, "unique_together", set()), - ) + old_model = old_app_cache.get_model(app_label, self.name) + new_model = new_app_cache.get_model(app_label, self.name) + if router.allow_migrate(schema_editor.connection.alias, new_model): + schema_editor.alter_unique_together( + new_model, + getattr(old_model._meta, "unique_together", set()), + getattr(new_model._meta, "unique_together", set()), + ) def database_backwards(self, app_label, schema_editor, from_state, to_state): return self.database_forwards(app_label, schema_editor, from_state, to_state) @@ -127,11 +141,14 @@ class AlterIndexTogether(Operation): def database_forwards(self, app_label, schema_editor, from_state, to_state): old_app_cache = from_state.render() new_app_cache = to_state.render() - schema_editor.alter_index_together( - new_app_cache.get_model(app_label, self.name), - getattr(old_app_cache.get_model(app_label, self.name)._meta, "index_together", set()), - getattr(new_app_cache.get_model(app_label, self.name)._meta, "index_together", set()), - ) + old_model = old_app_cache.get_model(app_label, self.name) + new_model = new_app_cache.get_model(app_label, self.name) + if router.allow_migrate(schema_editor.connection.alias, new_model): + schema_editor.alter_index_together( + new_model, + getattr(old_model._meta, "index_together", set()), + getattr(new_model._meta, "index_together", set()), + ) def database_backwards(self, app_label, schema_editor, from_state, to_state): return self.database_forwards(app_label, schema_editor, from_state, to_state) diff --git a/docs/releases/1.7.txt b/docs/releases/1.7.txt index df2b10d18c..a617c90b34 100644 --- a/docs/releases/1.7.txt +++ b/docs/releases/1.7.txt @@ -110,6 +110,18 @@ Backwards incompatible changes in 1.7 deprecation timeline for a given feature, its removal may appear as a backwards incompatible change. +allow_syncdb/allow_migrate +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +While Django will still look at ``allow_syncdb`` methods even though they +should be renamed to ``allow_migrate``, there is a subtle difference in which +models get passed to these methods. + +For apps with migrations, ``allow_migrate`` will now get passed +:ref:`historical models `, which are special versioned models +without custom attributes, methods or managers. Make sure your ``allow_migrate`` +methods are only referring to fields or other items in ``model._meta``. + Miscellaneous ~~~~~~~~~~~~~ diff --git a/docs/topics/db/multi-db.txt b/docs/topics/db/multi-db.txt index 6c74fb944d..6e19844b5c 100644 --- a/docs/topics/db/multi-db.txt +++ b/docs/topics/db/multi-db.txt @@ -163,8 +163,14 @@ A database Router is a class that provides up to four methods: the router has no opinion. This method can be used to determine the availability of a model on a given database. - Note that if this returns ``True`` for an app with migrations but - ``False`` for an app those migrations depend on, Django will error. + Note that migrations will just silently not perform any operations + on a model for which this returns ``False``. This may result in broken + ForeignKeys, extra tables or missing tables if you change it once you + have applied some migrations. + + The value passed for ``model`` may be a + :ref:`historical model `, and thus not have any + custom attributes, methods or managers. You should only rely on ``_meta``. A router doesn't have to provide *all* these methods -- it may omit one or more of them. If one of the methods is omitted, Django will skip diff --git a/docs/topics/migrations.txt b/docs/topics/migrations.txt index f80cbf81fd..5f7def7107 100644 --- a/docs/topics/migrations.txt +++ b/docs/topics/migrations.txt @@ -272,3 +272,26 @@ Note that this only works given two things: * You have not manually edited your database - Django won't be able to detect that your database doesn't match your models, you'll just get errors when migrations try and modify those tables. + + +.. historical-models: + +Historical models +----------------- + +When you run migrations, Django is working from historical versions of +your models stored in the migration files. If you write Python code +using the ``django.db.migrations.RunPython`` operation, or if you have +``allow_migrate`` methods on your database routers, you will be exposed +to these versions of your models. + +Because it's impossible to serialize arbitrary Python code, these historical +models will not have any custom methods or managers that you have defined. +They will, however, have the same fields, relationships and ``Meta`` options +(also versioned, so they may be different from your current ones). + +In addition, the base classes of the model are just stored as pointers, +so you must always keep base classes around for as long as there is a migration +that contains a reference to them. On the plus side, methods and managers +from these base classes inherit normally, so if you absolutely need access +to these you can opt to move them into a superclass. diff --git a/tests/migrations/test_operations.py b/tests/migrations/test_operations.py index ad909f7fdd..33b870a335 100644 --- a/tests/migrations/test_operations.py +++ b/tests/migrations/test_operations.py @@ -1,4 +1,4 @@ -from django.db import connection, models, migrations +from django.db import connection, models, migrations, router from django.db.transaction import atomic from django.db.utils import IntegrityError from django.db.migrations.state import ProjectState @@ -271,3 +271,49 @@ class OperationTests(MigrationTestBase): with connection.schema_editor() as editor: operation.database_backwards("test_alinto", editor, new_state, project_state) self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) + + +class MigrateNothingRouter(object): + """ + A router that sends all writes to the other database. + """ + def allow_migrate(self, db, model): + return False + + +class MultiDBOperationTests(MigrationTestBase): + multi_db = True + + def setUp(self): + # Make the 'other' database appear to be a slave of the 'default' + self.old_routers = router.routers + router.routers = [MigrateNothingRouter()] + + def tearDown(self): + # Restore the 'other' database as an independent database + router.routers = self.old_routers + + def test_create_model(self): + """ + Tests that CreateModel honours multi-db settings. + """ + operation = migrations.CreateModel( + "Pony", + [ + ("id", models.AutoField(primary_key=True)), + ("pink", models.IntegerField(default=1)), + ], + ) + # Test the state alteration + project_state = ProjectState() + new_state = project_state.clone() + operation.state_forwards("test_crmo", new_state) + # Test the database alteration + self.assertTableNotExists("test_crmo_pony") + with connection.schema_editor() as editor: + operation.database_forwards("test_crmo", editor, project_state, new_state) + self.assertTableNotExists("test_crmo_pony") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_crmo", editor, new_state, project_state) + self.assertTableNotExists("test_crmo_pony") From 1d1cfd0bd8016358719a1e73117c811f02ca8c02 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 9 Aug 2013 14:31:24 +0100 Subject: [PATCH 114/161] Document new field API in release notes --- docs/releases/1.7.txt | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/docs/releases/1.7.txt b/docs/releases/1.7.txt index 23e4e3e64a..5d187967da 100644 --- a/docs/releases/1.7.txt +++ b/docs/releases/1.7.txt @@ -57,6 +57,33 @@ but a few of the key features are: will still work, but that method name is deprecated and you should change it as soon as possible (nothing more than renaming is required). +New method on Field subclasses +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To help power both schema migrations and composite keys, the Field API now +has a new required method: ``deconstruct()``. + +This method takes no arguments, and returns a tuple of four items: + +* ``name``: The field's attribute name on its parent model, or None if it is not part of a model +* ``path``: A dotted, Python path to the class of this field, including the class name. +* ``args``: Positional arguments, as a list +* ``kwargs``: Keyword arguments, as a dict + +These four values allow any field to be serialized into a file, as well as +allowing the field to be copied safely, both essential parts of these new features. + +This change should not affect you unless you write custom Field subclasses; +if you do, you may need to reimplement the ``deconstruct()`` method if your +subclass changes the method signature of ``__init__`` in any way. If your +field just inherits from a built-in Django field and doesn't override ``__init__``, +no changes are necessary. + +If you do need to override ``deconstruct()``, a good place to start is the +built-in Django fields (``django/db/models/fields/__init__.py``) as several +fields, including ``DecimalField`` and ``DateField``, override it and show how +to call the method on the superclass and simply add or remove extra arguments. + Calling custom ``QuerySet`` methods from the ``Manager`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 679627660fef3c7a9f7be743a168930e4a0e58ae Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 9 Aug 2013 17:34:35 +0100 Subject: [PATCH 115/161] Remove useless override of self.style --- django/core/management/commands/migrate.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/django/core/management/commands/migrate.py b/django/core/management/commands/migrate.py index 699b22edaa..085b6b2cc7 100644 --- a/django/core/management/commands/migrate.py +++ b/django/core/management/commands/migrate.py @@ -6,7 +6,7 @@ import traceback from django.conf import settings from django.core.management import call_command from django.core.management.base import BaseCommand, CommandError -from django.core.management.color import color_style, no_style +from django.core.management.color import no_style from django.core.management.sql import custom_sql_for_model, emit_post_migrate_signal, emit_pre_migrate_signal from django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS from django.db.migrations.executor import MigrationExecutor @@ -38,8 +38,6 @@ class Command(BaseCommand): self.load_initial_data = options.get('load_initial_data') self.test_database = options.get('test_database', False) - self.style = color_style() - # Import the 'management' module within each installed app, to register # dispatcher events. for app_name in settings.INSTALLED_APPS: From 9f736294205486443f047e93f8ca53b5b00ac1d3 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 9 Aug 2013 17:36:16 +0100 Subject: [PATCH 116/161] utils.importlib is deprecated --- django/core/management/commands/flush.py | 1 - django/core/management/commands/migrate.py | 2 +- django/db/migrations/loader.py | 2 +- django/db/migrations/writer.py | 2 +- 4 files changed, 3 insertions(+), 4 deletions(-) diff --git a/django/core/management/commands/flush.py b/django/core/management/commands/flush.py index 5e951f97b4..ea0952cb53 100644 --- a/django/core/management/commands/flush.py +++ b/django/core/management/commands/flush.py @@ -8,7 +8,6 @@ from django.core.management import call_command from django.core.management.base import NoArgsCommand, CommandError from django.core.management.color import no_style from django.core.management.sql import sql_flush, emit_post_migrate_signal -from django.utils.importlib import import_module from django.utils.six.moves import input from django.utils import six diff --git a/django/core/management/commands/migrate.py b/django/core/management/commands/migrate.py index 085b6b2cc7..dbec389bed 100644 --- a/django/core/management/commands/migrate.py +++ b/django/core/management/commands/migrate.py @@ -1,5 +1,6 @@ from optparse import make_option from collections import OrderedDict +from importlib import import_module import itertools import traceback @@ -11,7 +12,6 @@ from django.core.management.sql import custom_sql_for_model, emit_post_migrate_s from django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS from django.db.migrations.executor import MigrationExecutor from django.db.migrations.loader import AmbiguityError -from django.utils.importlib import import_module from django.utils.module_loading import module_has_submodule diff --git a/django/db/migrations/loader.py b/django/db/migrations/loader.py index 441480b194..6ad6959787 100644 --- a/django/db/migrations/loader.py +++ b/django/db/migrations/loader.py @@ -1,5 +1,5 @@ import os -from django.utils.importlib import import_module +from importlib import import_module from django.utils.functional import cached_property from django.db.models.loading import cache from django.db.migrations.recorder import MigrationRecorder diff --git a/django/db/migrations/writer.py b/django/db/migrations/writer.py index 4cc84dcca7..753aeacd04 100644 --- a/django/db/migrations/writer.py +++ b/django/db/migrations/writer.py @@ -2,8 +2,8 @@ from __future__ import unicode_literals import datetime import types import os +from importlib import import_module from django.utils import six -from django.utils.importlib import import_module from django.db import models from django.db.models.loading import cache from django.db.migrations.loader import MigrationLoader From b3cec920a2a7d547944823c539a7ebd99b3af23a Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 9 Aug 2013 17:39:07 +0100 Subject: [PATCH 117/161] Remove other color_style override --- django/core/management/commands/makemigrations.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/django/core/management/commands/makemigrations.py b/django/core/management/commands/makemigrations.py index 0f04b2fc1f..e1a6d5e319 100644 --- a/django/core/management/commands/makemigrations.py +++ b/django/core/management/commands/makemigrations.py @@ -3,7 +3,6 @@ import os from optparse import make_option from django.core.management.base import BaseCommand -from django.core.management.color import color_style from django.core.exceptions import ImproperlyConfigured from django.db import connections from django.db.migrations.loader import MigrationLoader @@ -26,7 +25,6 @@ class Command(BaseCommand): self.verbosity = int(options.get('verbosity')) self.interactive = options.get('interactive') - self.style = color_style() # Make sure the app they asked for exists app_labels = set(app_labels) From fb16ee5a31256c1e230a697044c3ded2dbdec3e8 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 9 Aug 2013 17:41:39 +0100 Subject: [PATCH 118/161] Remove commented-out print --- django/db/backends/schema.py | 1 - 1 file changed, 1 deletion(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index b2e6c29e25..9bb6bd2428 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -88,7 +88,6 @@ class BaseDatabaseSchemaEditor(object): cursor = self.connection.cursor() # Log the command we're running, then run it logger.debug("%s; (params %r)" % (sql, params)) - #print("%s; (params %r)" % (sql, params)) cursor.execute(sql, params) def quote_name(self, name): From ae96ad872f0bd16c0997991c0a7109e0139f9373 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 9 Aug 2013 17:42:56 +0100 Subject: [PATCH 119/161] Remove pointless fetch_results param --- django/db/backends/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 9bb6bd2428..191e72c53a 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -80,7 +80,7 @@ class BaseDatabaseSchemaEditor(object): # Core utility functions - def execute(self, sql, params=[], fetch_results=False): + def execute(self, sql, params=[]): """ Executes the given SQL statement, with optional parameters. """ From 9b4a789eefc4b0c081056f3cb97e07895b5ffc00 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 9 Aug 2013 17:45:19 +0100 Subject: [PATCH 120/161] Add [y/n] to autodetector questions and allow for default value --- django/db/migrations/autodetector.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py index e737cb8af9..259ee0bd5b 100644 --- a/django/db/migrations/autodetector.py +++ b/django/db/migrations/autodetector.py @@ -360,8 +360,10 @@ class InteractiveMigrationQuestioner(MigrationQuestioner): def __init__(self, specified_apps=set()): self.specified_apps = specified_apps - def _boolean_input(self, question): + def _boolean_input(self, question, default=None): result = input("%s " % question) + if not result and default is not None: + return default while len(result) < 1 or result[0].lower() not in "yn": result = input("Please answer yes or no: ") return result[0].lower() == "y" @@ -390,7 +392,7 @@ class InteractiveMigrationQuestioner(MigrationQuestioner): if app_label in self.specified_apps: return True # Now ask - return self._boolean_input("Do you want to enable migrations for app '%s'?" % app_label) + return self._boolean_input("Do you want to enable migrations for app '%s'? [y/N]" % app_label, False) def ask_not_null_addition(self, field_name, model_name): "Adding a NOT NULL field to a model" @@ -423,4 +425,4 @@ class InteractiveMigrationQuestioner(MigrationQuestioner): def ask_rename(self, model_name, old_name, new_name, field_instance): "Was this field really renamed?" - return self._boolean_input("Did you rename %s.%s to %s.%s (a %s)?" % (model_name, old_name, model_name, new_name, field_instance.__class__.__name__)) + return self._boolean_input("Did you rename %s.%s to %s.%s (a %s)? [y/N]" % (model_name, old_name, model_name, new_name, field_instance.__class__.__name__), False) From f093646bfc2b459b4e37bae8fe3f2b52e4e58ff3 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 9 Aug 2013 17:47:13 +0100 Subject: [PATCH 121/161] Remove pointless comment. --- django/db/backends/mysql/introspection.py | 1 - 1 file changed, 1 deletion(-) diff --git a/django/db/backends/mysql/introspection.py b/django/db/backends/mysql/introspection.py index e2d382b895..0cad8d9b9d 100644 --- a/django/db/backends/mysql/introspection.py +++ b/django/db/backends/mysql/introspection.py @@ -182,5 +182,4 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): # Convert the sorted sets to lists for constraint in constraints.values(): constraint['columns'] = list(constraint['columns']) - # Return return constraints From 3c3d308ea3e017868b6530df144dd1824471b6f2 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 10 Aug 2013 19:48:46 +0100 Subject: [PATCH 122/161] Back SortedSet onto OrderedDict, rename it, and a few typo fixes --- django/utils/datastructures.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/django/utils/datastructures.py b/django/utils/datastructures.py index 706c09593b..d61b569112 100644 --- a/django/utils/datastructures.py +++ b/django/utils/datastructures.py @@ -1,5 +1,6 @@ import copy import warnings +from collections import OrderedDict from django.utils import six class MergeDict(object): @@ -239,11 +240,11 @@ class SortedDict(dict): class SortedSet(object): """ A set which keeps the ordering of the inserted items. - Currently backs onto SortedDict. + Currently backs onto OrderedDict. """ def __init__(self, iterable=None): - self.dict = SortedDict(((x, None) for x in iterable) if iterable else []) + self.dict = OrderedDict(((x, None) for x in iterable) if iterable else []) def add(self, item): self.dict[item] = None From 7970d97a708f0d2f4fbd654eaf785338ab04cc1e Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 10 Aug 2013 20:00:12 +0100 Subject: [PATCH 123/161] Docs tweaks (thanks timgraham) --- django/db/backends/mysql/introspection.py | 6 +++--- django/db/migrations/autodetector.py | 2 +- django/db/migrations/graph.py | 8 ++++---- django/test/runner.py | 2 +- django/utils/datastructures.py | 2 +- docs/ref/django-admin.txt | 14 +++++++++----- docs/ref/signals.txt | 5 ----- docs/releases/1.7.txt | 18 +++++++++++++----- docs/topics/migrations.txt | 12 ++++++------ 9 files changed, 38 insertions(+), 31 deletions(-) diff --git a/django/db/backends/mysql/introspection.py b/django/db/backends/mysql/introspection.py index 0cad8d9b9d..d7a29057de 100644 --- a/django/db/backends/mysql/introspection.py +++ b/django/db/backends/mysql/introspection.py @@ -1,6 +1,6 @@ import re from .base import FIELD_TYPE -from django.utils.datastructures import SortedSet +from django.utils.datastructures import OrderedSet from django.db.backends import BaseDatabaseIntrospection, FieldInfo from django.utils.encoding import force_text @@ -142,7 +142,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): for constraint, column, ref_table, ref_column in cursor.fetchall(): if constraint not in constraints: constraints[constraint] = { - 'columns': SortedSet(), + 'columns': OrderedSet(), 'primary_key': False, 'unique': False, 'index': False, @@ -170,7 +170,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): for table, non_unique, index, colseq, column in [x[:5] for x in cursor.fetchall()]: if index not in constraints: constraints[index] = { - 'columns': SortedSet(), + 'columns': OrderedSet(), 'primary_key': False, 'unique': False, 'index': True, diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py index 259ee0bd5b..ff5957fe7e 100644 --- a/django/db/migrations/autodetector.py +++ b/django/db/migrations/autodetector.py @@ -29,7 +29,7 @@ class MigrationAutodetector(object): """ Returns a dict of migration plans which will achieve the change from from_state to to_state. The dict has app labels - as kays and a list of migrations as values. + as keys and a list of migrations as values. The resulting migrations aren't specially named, but the names do matter for dependencies inside the set. diff --git a/django/db/migrations/graph.py b/django/db/migrations/graph.py index 3b6495689a..fcd83913c8 100644 --- a/django/db/migrations/graph.py +++ b/django/db/migrations/graph.py @@ -1,4 +1,4 @@ -from django.utils.datastructures import SortedSet +from django.utils.datastructures import OrderedSet from django.db.migrations.state import ProjectState @@ -13,7 +13,7 @@ class MigrationGraph(object): branch merges can be detected and resolved. Migrations files can be marked as replacing another set of migrations - - this is to support the "squash" feature. The graph handler isn't resposible + this is to support the "squash" feature. The graph handler isn't responsible for these; instead, the code to load them in here should examine the migration files and if the replaced migrations are all either unapplied or not present, it should ignore the replaced ones, load in just the @@ -109,8 +109,8 @@ class MigrationGraph(object): for n in children: results = _dfs(n, get_children, path) + results path.pop() - # Use SortedSet to ensure only one instance of each result - results = list(SortedSet(results)) + # Use OrderedSet to ensure only one instance of each result + results = list(OrderedSet(results)) # Populate DP cache cache[(start, get_children)] = results # Done! diff --git a/django/test/runner.py b/django/test/runner.py index c7881ada63..84fe2499f1 100644 --- a/django/test/runner.py +++ b/django/test/runner.py @@ -267,7 +267,7 @@ def setup_databases(verbosity, interactive, **kwargs): # Second pass -- actually create the databases. old_names = [] mirrors = [] - + for signature, (db_name, aliases) in dependency_ordered( test_databases.items(), dependencies): test_db_name = None diff --git a/django/utils/datastructures.py b/django/utils/datastructures.py index d61b569112..a0ee3e06ef 100644 --- a/django/utils/datastructures.py +++ b/django/utils/datastructures.py @@ -237,7 +237,7 @@ class SortedDict(dict): super(SortedDict, self).clear() self.keyOrder = [] -class SortedSet(object): +class OrderedSet(object): """ A set which keeps the ordering of the inserted items. Currently backs onto OrderedDict. diff --git a/docs/ref/django-admin.txt b/docs/ref/django-admin.txt index ab83e2cf33..27b68e4f1c 100644 --- a/docs/ref/django-admin.txt +++ b/docs/ref/django-admin.txt @@ -575,20 +575,22 @@ makemigrations [] .. django-admin:: makemigrations +.. versionadded:: 1.7 + Creates new migrations based on the changes detected to your models. Migrations, their relationship with apps and more are covered in depth in :doc:`the migrations documentation`. Providing one or more app names as arguments will limit the migrations created -to the app specified and any dependencies needed (the table at the other end -of a ForeignKey, for example) +to the app(s) specified and any dependencies needed (the table at the other end +of a ``ForeignKey``, for example). .. django-admin-option:: --empty The ``--empty`` option will cause ``makemigrations`` to output an empty migration for the specified apps, for manual editing. This option is only for advanced users and should not be used unless you are familiar with -the migration format, migration operations and the dependencies between +the migration format, migration operations, and the dependencies between your migrations. migrate [ []] @@ -596,11 +598,13 @@ migrate [ []] .. django-admin:: migrate -Synchronises the database state with the current set of models and migrations. +.. versionadded:: 1.7 + +Synchronizes the database state with the current set of models and migrations. Migrations, their relationship with apps and more are covered in depth in :doc:`the migrations documentation`. -The behaviour of this command changes depending on the arguments provided: +The behavior of this command changes depending on the arguments provided: * No arguments: All migrated apps have all of their migrations run, and all unmigrated apps are synchronized with the database, diff --git a/docs/ref/signals.txt b/docs/ref/signals.txt index b988371aa7..ca1668000c 100644 --- a/docs/ref/signals.txt +++ b/docs/ref/signals.txt @@ -403,7 +403,6 @@ Arguments sent with this signal: ``db`` The alias of database on which a command will operate. - pre_syncdb ---------- @@ -421,7 +420,6 @@ is present, for backwards-compatability this signal has an extra argument it sen A list of the model classes from any app which :djadmin:`migrate` is going to create, **only if the app has no migrations**. - post_migrate ------------ @@ -479,7 +477,6 @@ For example, ``yourapp/management/__init__.py`` could be written like:: post_migrate.connect(my_callback, sender=yourapp.models) - post_syncdb ----------- @@ -497,8 +494,6 @@ is present, for backwards-compatability this signal has an extra argument it sen A list of the model classes from any app which :djadmin:`migrate` has created, **only if the app has no migrations**. - - Request/response signals ======================== diff --git a/docs/releases/1.7.txt b/docs/releases/1.7.txt index c20a278496..3b1fe0c40e 100644 --- a/docs/releases/1.7.txt +++ b/docs/releases/1.7.txt @@ -33,15 +33,16 @@ What's new in Django 1.7 Schema migrations ~~~~~~~~~~~~~~~~~ -Django now has built-in support for schema migrations, which allows models -to be updated, changed and deleted and the changes stored into migration files -and then run on any deployed database. +Django now has built-in support for schema migrations. It allows models +to be updated, changed, and deleted by creating migration files that represent +the model changes and which can be run on any development, staging or production +database. Migrations are covered in :doc:`their own documentation`, but a few of the key features are: * ``syncdb`` has been deprecated and replaced by ``migrate``. Don't worry - - calls to ``syncdb`` will still work as before. + calls to ``syncdb`` will still work as before. * A new ``makemigrations`` command provides an easy way to autodetect changes to your models and make migrations for them. @@ -60,7 +61,7 @@ but a few of the key features are: New method on Field subclasses ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -To help power both schema migrations and composite keys, the Field API now +To help power both schema migrations and composite keys, the :class:`~django.db.models.Field` API now has a new required method: ``deconstruct()``. This method takes no arguments, and returns a tuple of four items: @@ -278,3 +279,10 @@ work until Django 1.9. it will go through a regular deprecation path. This attribute was mostly used by methods that bypassed ``ModelAdmin.get_fieldsets()`` but this was considered a bug and has been addressed. + +``syncdb`` +~~~~~~~~~~ + +The ``syncdb`` command has been deprecated in favour of the new ``migrate`` +command. ``migrate`` takes the same arguments as ``syncdb`` used to plus a few +more, so it's safe to just change the name you're calling and nothing else. diff --git a/docs/topics/migrations.txt b/docs/topics/migrations.txt index 5f7def7107..5862c5defe 100644 --- a/docs/topics/migrations.txt +++ b/docs/topics/migrations.txt @@ -49,7 +49,7 @@ The migration files for each app live in a "migrations" directory inside of that app, and are designed to be committed to, and distributed as part of, its codebase. You should be making them once on your development machine and then running the same migrations on your colleagues' machines, your -staging machines and eventually your production machines. +staging machines, and eventually your production machines. Migrations will run the same way every time and produce consistent results, meaning that what you see in development and staging is exactly what will @@ -60,7 +60,7 @@ Backend Support Migrations are supported on all backends that Django ships with, as well as any third-party backends if they have programmed in support for schema -alteration (done via the SchemaEditor class). +alteration (done via the ``SchemaEditor`` class). However, some databases are more capable than others when it comes to schema migrations; some of the caveats are covered below. @@ -169,7 +169,7 @@ app - in the file, so it's possible to detect when there's two new migrations for the same app that aren't ordered. When this happens, Django will prompt you and give you some options. If it -thinks it's safe enough, it will offer to automatically linearise the two +thinks it's safe enough, it will offer to automatically linearize the two migrations for you. If not, you'll have to go in and modify the migrations yourself - don't worry, this isn't difficult, and is explained more in :ref:`migration-files` below. @@ -184,8 +184,8 @@ you add a ForeignKey in your ``books`` app to your ``authors`` app - the resulting migration will contain a dependency on a migration in ``authors``. This means that when you run the migrations, the ``authors`` migration runs -first and creates the table the ForeignKey references, and then the migration -that makes the ForeignKey column runs afterwards and creates the constraint. +first and creates the table the ``ForeignKey`` references, and then the migration +that makes the ``ForeignKey`` column runs afterwards and creates the constraint. If this didn't happen, the migration would try to create the ForeignKey column without the table it's referencing existing and your database would throw an error. @@ -271,7 +271,7 @@ Note that this only works given two things: * You have not manually edited your database - Django won't be able to detect that your database doesn't match your models, you'll just get errors when - migrations try and modify those tables. + migrations try to modify those tables. .. historical-models: From 3f1f91f155c89d0be9f920bb88a2c32f652fa79b Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 10 Aug 2013 20:02:55 +0100 Subject: [PATCH 124/161] Print all bad apps passed to makemigrations, not just the first one. --- django/core/management/commands/makemigrations.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/django/core/management/commands/makemigrations.py b/django/core/management/commands/makemigrations.py index e1a6d5e319..1296a14cdd 100644 --- a/django/core/management/commands/makemigrations.py +++ b/django/core/management/commands/makemigrations.py @@ -28,12 +28,16 @@ class Command(BaseCommand): # Make sure the app they asked for exists app_labels = set(app_labels) + bad_app_labels = set() for app_label in app_labels: try: cache.get_app(app_label) except ImproperlyConfigured: - self.stderr.write("The app you specified - '%s' - could not be found. Is it in INSTALLED_APPS?" % app_label) - sys.exit(2) + bad_app_labels.add(app_label) + if bad_app_labels: + for app_label in bad_app_labels: + self.stderr.write("App '%s' could not be found. Is it in INSTALLED_APPS?" % app_label) + sys.exit(2) # Load the current graph state loader = MigrationLoader(connections["default"]) From d5a7a3d6a80684caa8f5c3c7d5b454e3da1799a1 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 10 Aug 2013 21:04:59 +0100 Subject: [PATCH 125/161] Add clarifying comment --- django/core/management/commands/makemigrations.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/django/core/management/commands/makemigrations.py b/django/core/management/commands/makemigrations.py index 1296a14cdd..cd30311820 100644 --- a/django/core/management/commands/makemigrations.py +++ b/django/core/management/commands/makemigrations.py @@ -39,7 +39,8 @@ class Command(BaseCommand): self.stderr.write("App '%s' could not be found. Is it in INSTALLED_APPS?" % app_label) sys.exit(2) - # Load the current graph state + # Load the current graph state. Takes a connection, but it's not used + # (makemigrations doesn't look at the database state). loader = MigrationLoader(connections["default"]) # Detect changes From 77028194415cb03b1ff2a85a86d806a0366bccff Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sat, 10 Aug 2013 23:58:12 +0100 Subject: [PATCH 126/161] Update get_constraints with better comments --- django/db/backends/__init__.py | 17 ++++++++++++++--- .../postgresql_psycopg2/introspection.py | 4 ++-- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/django/db/backends/__init__.py b/django/db/backends/__init__.py index ec6081678b..771b9af59c 100644 --- a/django/db/backends/__init__.py +++ b/django/db/backends/__init__.py @@ -1328,9 +1328,20 @@ class BaseDatabaseIntrospection(object): def get_constraints(self, cursor, table_name): """ - Returns {'cnname': {'columns': set(columns), 'primary_key': bool, 'unique': bool}} - - Both single- and multi-column constraints are introspected. + Retrieves any constraints or keys (unique, pk, fk, check, index) + across one or more columns. + + Returns a dict mapping constraint names to their attributes, + where attributes is a dict with keys: + * columns: List of columns this covers + * primary_key: True if primary key, False otherwise + * unique: True if this is a unique constraint, False otherwise + * foreign_key: (table, column) of target, or None + * check: True if check constraint, False otherwise + * index: True if index, False otherwise. + + Some backends may return special constraint names that don't exist + if they don't name constraints of a certain type (e.g. SQLite) """ raise NotImplementedError diff --git a/django/db/backends/postgresql_psycopg2/introspection.py b/django/db/backends/postgresql_psycopg2/introspection.py index a816bb34d8..3e2574b0c1 100644 --- a/django/db/backends/postgresql_psycopg2/introspection.py +++ b/django/db/backends/postgresql_psycopg2/introspection.py @@ -169,7 +169,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): "columns": [], "primary_key": False, "unique": False, - "foreign_key": False, + "foreign_key": None, "check": True, "index": False, } @@ -197,7 +197,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): "columns": list(columns), "primary_key": primary, "unique": unique, - "foreign_key": False, + "foreign_key": None, "check": False, "index": True, } From 21be9fef7b14edd75c6ee402ec2bb28bf9b6ce59 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sun, 11 Aug 2013 00:01:30 +0100 Subject: [PATCH 127/161] Stop being overcautious about Field.rel --- django/db/backends/schema.py | 4 ++-- django/db/migrations/autodetector.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 191e72c53a..1e1376a4e2 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -437,7 +437,7 @@ class BaseDatabaseSchemaEditor(object): } ) # Drop any FK constraints, we'll remake them later - if getattr(old_field, "rel"): + if old_field.rel: fk_names = self._constraint_names(model, [old_field.column], foreign_key=True) if strict and len(fk_names) != 1: raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % ( @@ -584,7 +584,7 @@ class BaseDatabaseSchemaEditor(object): } ) # Does it have a foreign key? - if getattr(new_field, "rel"): + if new_field.rel: self.execute( self.sql_create_fk % { "table": self.quote_name(model._meta.db_table), diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py index ff5957fe7e..2771bcbc68 100644 --- a/django/db/migrations/autodetector.py +++ b/django/db/migrations/autodetector.py @@ -46,8 +46,8 @@ class MigrationAutodetector(object): # Are there any relationships out from this model? if so, punt it to the next phase. related_fields = [] for field in new_app_cache.get_model(app_label, model_name)._meta.fields: - if hasattr(field, "rel"): - if hasattr(field.rel, "to"): + if field.rel: + if field.rel.to: related_fields.append((field.name, field.rel.to._meta.app_label.lower(), field.rel.to._meta.object_name.lower())) if hasattr(field.rel, "through") and not field.rel.though._meta.auto_created: related_fields.append((field.name, field.rel.through._meta.app_label.lower(), field.rel.through._meta.object_name.lower())) From ae19315b4d8a19eda07ea8f313c485ca0a7875d0 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sun, 11 Aug 2013 14:23:31 +0100 Subject: [PATCH 128/161] Support index_together during model creation --- django/db/backends/schema.py | 9 +++++++++ django/db/backends/sqlite3/introspection.py | 5 ++++- tests/schema/models.py | 9 +++++++++ tests/schema/tests.py | 19 ++++++++++++++++++- 4 files changed, 40 insertions(+), 2 deletions(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 1e1376a4e2..d868bf79b5 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -210,6 +210,15 @@ class BaseDatabaseSchemaEditor(object): "definition": ", ".join(column_sqls) } self.execute(sql, params) + # Add any index_togethers + for fields in model._meta.index_together: + columns = [model._meta.get_field_by_name(field)[0].column for field in fields] + self.execute(self.sql_create_index % { + "table": self.quote_name(model._meta.db_table), + "name": self._create_index_name(model, columns, suffix="_idx"), + "columns": ", ".join(self.quote_name(column) for column in columns), + "extra": "", + }) # Make M2M tables for field in model._meta.local_many_to_many: self.create_model(field.rel.through) diff --git a/django/db/backends/sqlite3/introspection.py b/django/db/backends/sqlite3/introspection.py index 2e674bc05b..92777dd910 100644 --- a/django/db/backends/sqlite3/introspection.py +++ b/django/db/backends/sqlite3/introspection.py @@ -168,7 +168,10 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): """ # Don't use PRAGMA because that causes issues with some transactions cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s AND type = %s", [table_name, "table"]) - results = cursor.fetchone()[0].strip() + row = cursor.fetchone() + if row is None: + raise ValueError("Table %s does not exist" % table_name) + results = row[0].strip() results = results[results.index('(') + 1:results.rindex(')')] for field_desc in results.split(','): field_desc = field_desc.strip() diff --git a/tests/schema/models.py b/tests/schema/models.py index a160b9aaa8..69cf06f3c4 100644 --- a/tests/schema/models.py +++ b/tests/schema/models.py @@ -62,6 +62,15 @@ class Tag(models.Model): app_cache = new_app_cache +class TagIndexed(models.Model): + title = models.CharField(max_length=255) + slug = models.SlugField(unique=True) + + class Meta: + app_cache = new_app_cache + index_together = [["slug", "title"]] + + class TagUniqueRename(models.Model): title = models.CharField(max_length=255) slug2 = models.SlugField(unique=True) diff --git a/tests/schema/tests.py b/tests/schema/tests.py index 3a82cd15ff..f6e45599b8 100644 --- a/tests/schema/tests.py +++ b/tests/schema/tests.py @@ -6,7 +6,7 @@ from django.db import connection, DatabaseError, IntegrityError from django.db.models.fields import IntegerField, TextField, CharField, SlugField from django.db.models.fields.related import ManyToManyField, ForeignKey from django.db.transaction import atomic -from .models import Author, AuthorWithM2M, Book, BookWithSlug, BookWithM2M, Tag, TagUniqueRename, UniqueTest +from .models import Author, AuthorWithM2M, Book, BookWithSlug, BookWithM2M, Tag, TagIndexed, TagUniqueRename, UniqueTest class SchemaTests(TransactionTestCase): @@ -503,6 +503,23 @@ class SchemaTests(TransactionTestCase): ), ) + def test_create_index_together(self): + """ + Tests creating models with index_together already defined + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(TagIndexed) + # Ensure there is an index + self.assertEqual( + True, + any( + c["index"] + for c in connection.introspection.get_constraints(connection.cursor(), "schema_tagindexed").values() + if c['columns'] == ["slug", "title"] + ), + ) + def test_db_table(self): """ Tests renaming of the table From d985fd7a189cdbfa2093c1225bdeb7eefaeb6978 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sun, 11 Aug 2013 14:27:42 +0100 Subject: [PATCH 129/161] Fix tablespace command --- django/db/backends/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index d868bf79b5..0a283b7fd8 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -312,7 +312,7 @@ class BaseDatabaseSchemaEditor(object): """ Moves a model's table between tablespaces """ - self.execute(self.sql_rename_table % { + self.execute(self.sql_retablespace_table % { "table": self.quote_name(model._meta.db_table), "old_tablespace": self.quote_name(old_db_tablespace), "new_tablespace": self.quote_name(new_db_tablespace), From b4c493ecd3b8d0d55347949984561762ad2ef1dc Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sun, 11 Aug 2013 14:28:55 +0100 Subject: [PATCH 130/161] Remove keep_default from add_field --- django/db/backends/schema.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 0a283b7fd8..19a737883f 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -318,7 +318,7 @@ class BaseDatabaseSchemaEditor(object): "new_tablespace": self.quote_name(new_db_tablespace), }) - def add_field(self, model, field, keep_default=False): + def add_field(self, model, field): """ Creates a field on a model. Usually involves adding a column, but may involve adding a @@ -345,7 +345,7 @@ class BaseDatabaseSchemaEditor(object): self.execute(sql, params) # Drop the default if we need to # (Django usually does not use in-database defaults) - if not keep_default and field.default is not None: + if field.default is not None: sql = self.sql_alter_column % { "table": self.quote_name(model._meta.db_table), "changes": self.sql_alter_column_no_default % { From b61b6346284fb32614aab965bd2cb09b383fc9f5 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Sun, 11 Aug 2013 15:28:51 +0100 Subject: [PATCH 131/161] Fix weird planning issues when already fully migrated. --- django/db/migrations/executor.py | 13 ++++--- tests/migrations/test_executor.py | 34 ++++++++++++++++++- .../test_migrations_2/0001_initial.py | 21 ++++++++++++ .../migrations/test_migrations_2/__init__.py | 0 4 files changed, 63 insertions(+), 5 deletions(-) create mode 100644 tests/migrations/test_migrations_2/0001_initial.py create mode 100644 tests/migrations/test_migrations_2/__init__.py diff --git a/django/db/migrations/executor.py b/django/db/migrations/executor.py index 9d99c90c11..fe0ac6b061 100644 --- a/django/db/migrations/executor.py +++ b/django/db/migrations/executor.py @@ -33,10 +33,15 @@ class MigrationExecutor(object): # If the migration is already applied, do backwards mode, # otherwise do forwards mode. elif target in applied: - for migration in self.loader.graph.backwards_plan(target)[:-1]: - if migration in applied: - plan.append((self.loader.graph.nodes[migration], True)) - applied.remove(migration) + backwards_plan = self.loader.graph.backwards_plan(target)[:-1] + # We only do this if the migration is not the most recent one + # in its app - that is, another migration with the same app + # label is in the backwards plan + if any(node[0] == target[0] for node in backwards_plan): + for migration in backwards_plan: + if migration in applied: + plan.append((self.loader.graph.nodes[migration], True)) + applied.remove(migration) else: for migration in self.loader.graph.forwards_plan(target): if migration not in applied: diff --git a/tests/migrations/test_executor.py b/tests/migrations/test_executor.py index ddbfa78db7..5167f428d1 100644 --- a/tests/migrations/test_executor.py +++ b/tests/migrations/test_executor.py @@ -12,7 +12,7 @@ class ExecutorTests(TransactionTestCase): test failures first, as they may be propagating into here. """ - available_apps = ["migrations"] + available_apps = ["migrations", "django.contrib.sessions"] @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_run(self): @@ -38,3 +38,35 @@ class ExecutorTests(TransactionTestCase): # Are the tables there now? self.assertIn("migrations_author", connection.introspection.get_table_list(connection.cursor())) self.assertIn("migrations_book", connection.introspection.get_table_list(connection.cursor())) + + @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations", "sessions": "migrations.test_migrations_2"}) + def test_empty_plan(self): + """ + Tests that re-planning a full migration of a fully-migrated set doesn't + perform spurious unmigrations and remigrations. + + There was previously a bug where the executor just always performed the + backwards plan for applied migrations - which even for the most recent + migration in an app, might include other, dependent apps, and these + were being unmigrated. + """ + # Make the initial plan, check it + # We use 'sessions' here as the second app as it's always present + # in INSTALLED_APPS, so we can happily assign it test migrations. + executor = MigrationExecutor(connection) + plan = executor.migration_plan([("migrations", "0002_second"), ("sessions", "0001_initial")]) + self.assertEqual( + plan, + [ + (executor.loader.graph.nodes["migrations", "0001_initial"], False), + (executor.loader.graph.nodes["migrations", "0002_second"], False), + (executor.loader.graph.nodes["sessions", "0001_initial"], False), + ], + ) + # Fake-apply all migrations + executor.migrate([("migrations", "0002_second"), ("sessions", "0001_initial")], fake=True) + # Now plan a second time and make sure it's empty + plan = executor.migration_plan([("migrations", "0002_second"), ("sessions", "0001_initial")]) + self.assertEqual(plan, []) + # Erase all the fake records + executor.recorder.flush() diff --git a/tests/migrations/test_migrations_2/0001_initial.py b/tests/migrations/test_migrations_2/0001_initial.py new file mode 100644 index 0000000000..94c4bc0746 --- /dev/null +++ b/tests/migrations/test_migrations_2/0001_initial.py @@ -0,0 +1,21 @@ +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [("migrations", "0002_second")] + + operations = [ + + migrations.CreateModel( + "OtherAuthor", + [ + ("id", models.AutoField(primary_key=True)), + ("name", models.CharField(max_length=255)), + ("slug", models.SlugField(null=True)), + ("age", models.IntegerField(default=0)), + ("silly_field", models.BooleanField(default=False)), + ], + ), + + ] diff --git a/tests/migrations/test_migrations_2/__init__.py b/tests/migrations/test_migrations_2/__init__.py new file mode 100644 index 0000000000..e69de29bb2 From 841b7af8120e854f3aa37b8918f461efcbbef3d6 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 12 Aug 2013 16:40:41 +0100 Subject: [PATCH 132/161] Use DEFAULT_DB_ALIAS --- django/core/management/commands/makemigrations.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/django/core/management/commands/makemigrations.py b/django/core/management/commands/makemigrations.py index cd30311820..b05f37a8bb 100644 --- a/django/core/management/commands/makemigrations.py +++ b/django/core/management/commands/makemigrations.py @@ -4,7 +4,7 @@ from optparse import make_option from django.core.management.base import BaseCommand from django.core.exceptions import ImproperlyConfigured -from django.db import connections +from django.db import connections, DEFAULT_DB_ALIAS from django.db.migrations.loader import MigrationLoader from django.db.migrations.autodetector import MigrationAutodetector, InteractiveMigrationQuestioner from django.db.migrations.state import ProjectState @@ -41,7 +41,7 @@ class Command(BaseCommand): # Load the current graph state. Takes a connection, but it's not used # (makemigrations doesn't look at the database state). - loader = MigrationLoader(connections["default"]) + loader = MigrationLoader(connections[DEFAULT_DB_ALIAS]) # Detect changes autodetector = MigrationAutodetector( From 44f907dd980defaab2c06b4ead2255ec3566bcd5 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 12 Aug 2013 20:05:20 +0100 Subject: [PATCH 133/161] Start of getting Oracle to do schema stuff --- django/db/backends/oracle/base.py | 1 + django/db/backends/oracle/creation.py | 4 ++-- django/db/backends/oracle/schema.py | 11 ++++++++++- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/django/db/backends/oracle/base.py b/django/db/backends/oracle/base.py index 46022a97b1..9b08bc097a 100644 --- a/django/db/backends/oracle/base.py +++ b/django/db/backends/oracle/base.py @@ -91,6 +91,7 @@ class DatabaseFeatures(BaseDatabaseFeatures): has_bulk_insert = True supports_tablespaces = True supports_sequence_reset = False + supports_combined_alters = False class DatabaseOperations(BaseDatabaseOperations): diff --git a/django/db/backends/oracle/creation.py b/django/db/backends/oracle/creation.py index f7d43ae7b6..b1a8782aa9 100644 --- a/django/db/backends/oracle/creation.py +++ b/django/db/backends/oracle/creation.py @@ -49,8 +49,8 @@ class DatabaseCreation(BaseDatabaseCreation): data_type_check_constraints = { 'BooleanField': '%(qn_column)s IN (0,1)', 'NullBooleanField': '(%(qn_column)s IN (0,1)) OR (%(qn_column)s IS NULL)', - 'PositiveIntegerField': '"%(qn_column)s" >= 0', - 'PositiveSmallIntegerField': '"%(qn_column)s" >= 0', + 'PositiveIntegerField': '%(qn_column)s >= 0', + 'PositiveSmallIntegerField': '%(qn_column)s >= 0', } def __init__(self, connection): diff --git a/django/db/backends/oracle/schema.py b/django/db/backends/oracle/schema.py index b86e0857bb..4a679e79eb 100644 --- a/django/db/backends/oracle/schema.py +++ b/django/db/backends/oracle/schema.py @@ -2,4 +2,13 @@ from django.db.backends.schema import BaseDatabaseSchemaEditor class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): - pass + + sql_create_column = "ALTER TABLE %(table)s ADD %(column)s %(definition)s" + sql_alter_column_type = "MODIFY %(column)s %(type)s" + sql_alter_column_null = "MODIFY %(column)s NULL" + sql_alter_column_not_null = "MODIFY %(column)s NOT NULL" + sql_alter_column_default = "MODIFY %(column)s DEFAULT %(default)s" + sql_alter_column_no_default = "MODIFY %(column)s DEFAULT NULL" + sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s" + sql_delete_table = "DROP TABLE %(table)s CASCADE CONSTRAINTS" + From 157604a87fa7e1331c25fcbed558f0799aa5b8df Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Tue, 13 Aug 2013 20:54:57 +0100 Subject: [PATCH 134/161] Oracle schema backend, passes most tests and is pretty complete. --- django/db/backends/oracle/base.py | 1 + django/db/backends/oracle/introspection.py | 139 +++++++++++++++++++++ django/db/backends/oracle/schema.py | 77 ++++++++++++ django/db/backends/schema.py | 35 ++++-- tests/schema/tests.py | 4 +- 5 files changed, 247 insertions(+), 9 deletions(-) diff --git a/django/db/backends/oracle/base.py b/django/db/backends/oracle/base.py index 9b08bc097a..b6812a6d3e 100644 --- a/django/db/backends/oracle/base.py +++ b/django/db/backends/oracle/base.py @@ -92,6 +92,7 @@ class DatabaseFeatures(BaseDatabaseFeatures): supports_tablespaces = True supports_sequence_reset = False supports_combined_alters = False + max_index_name_length = 30 class DatabaseOperations(BaseDatabaseOperations): diff --git a/django/db/backends/oracle/introspection.py b/django/db/backends/oracle/introspection.py index a2fad92509..e4ef1ae81b 100644 --- a/django/db/backends/oracle/introspection.py +++ b/django/db/backends/oracle/introspection.py @@ -134,3 +134,142 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): indexes[row[0]] = {'primary_key': bool(row[1]), 'unique': bool(row[2])} return indexes + + def get_constraints(self, cursor, table_name): + """ + Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns. + """ + constraints = {} + # Loop over the constraints, getting PKs and uniques + cursor.execute(""" + SELECT + user_constraints.constraint_name, + LOWER(cols.column_name) AS column_name, + CASE user_constraints.constraint_type + WHEN 'P' THEN 1 + ELSE 0 + END AS is_primary_key, + CASE user_indexes.uniqueness + WHEN 'UNIQUE' THEN 1 + ELSE 0 + END AS is_unique, + CASE user_constraints.constraint_type + WHEN 'C' THEN 1 + ELSE 0 + END AS is_check_constraint + FROM + user_constraints + INNER JOIN + user_indexes ON user_indexes.index_name = user_constraints.index_name + LEFT OUTER JOIN + user_cons_columns cols ON user_constraints.constraint_name = cols.constraint_name + WHERE + ( + user_constraints.constraint_type = 'P' OR + user_constraints.constraint_type = 'U' + ) + AND user_constraints.table_name = UPPER(%s) + ORDER BY cols.position + """, [table_name]) + for constraint, column, pk, unique, check in cursor.fetchall(): + # If we're the first column, make the record + if constraint not in constraints: + constraints[constraint] = { + "columns": [], + "primary_key": pk, + "unique": unique, + "foreign_key": None, + "check": check, + "index": True, + } + # Record the details + constraints[constraint]['columns'].append(column) + # Check constraints + cursor.execute(""" + SELECT + cons.constraint_name, + LOWER(cols.column_name) AS column_name + FROM + user_constraints cons + LEFT OUTER JOIN + user_cons_columns cols ON cons.constraint_name = cols.constraint_name + WHERE + cons.constraint_type = 'C' AND + cons.table_name = UPPER(%s) + ORDER BY cols.position + """, [table_name]) + for constraint, column in cursor.fetchall(): + # If we're the first column, make the record + if constraint not in constraints: + constraints[constraint] = { + "columns": [], + "primary_key": False, + "unique": False, + "foreign_key": None, + "check": True, + "index": False, + } + # Record the details + constraints[constraint]['columns'].append(column) + # Foreign key constraints + cursor.execute(""" + SELECT + cons.constraint_name, + LOWER(cols.column_name) AS column_name, + LOWER(rcons.table_name), + LOWER(rcols.column_name) + FROM + user_constraints cons + INNER JOIN + user_constraints rcons ON cons.r_constraint_name = rcons.constraint_name + INNER JOIN + user_cons_columns rcols ON rcols.constraint_name = rcons.constraint_name + LEFT OUTER JOIN + user_cons_columns cols ON cons.constraint_name = cols.constraint_name + WHERE + cons.constraint_type = 'R' AND + cons.table_name = UPPER(%s) + ORDER BY cols.position + """, [table_name]) + for constraint, column, other_table, other_column in cursor.fetchall(): + # If we're the first column, make the record + if constraint not in constraints: + constraints[constraint] = { + "columns": [], + "primary_key": False, + "unique": False, + "foreign_key": (other_table, other_column), + "check": False, + "index": False, + } + # Record the details + constraints[constraint]['columns'].append(column) + # Now get indexes + cursor.execute(""" + SELECT + index_name, + LOWER(column_name) + FROM + user_ind_columns cols + WHERE + table_name = UPPER(%s) AND + NOT EXISTS ( + SELECT 1 + FROM user_constraints cons + WHERE cols.index_name = cons.index_name + ) + """, [table_name]) + for constraint, column in cursor.fetchall(): + # If we're the first column, make the record + if constraint not in constraints: + constraints[constraint] = { + "columns": [], + "primary_key": False, + "unique": False, + "foreign_key": None, + "check": False, + "index": True, + } + # Record the details + constraints[constraint]['columns'].append(column) + return constraints diff --git a/django/db/backends/oracle/schema.py b/django/db/backends/oracle/schema.py index 4a679e79eb..c78294cad5 100644 --- a/django/db/backends/oracle/schema.py +++ b/django/db/backends/oracle/schema.py @@ -1,4 +1,6 @@ +import copy from django.db.backends.schema import BaseDatabaseSchemaEditor +from django.db.utils import DatabaseError class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): @@ -12,3 +14,78 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s" sql_delete_table = "DROP TABLE %(table)s CASCADE CONSTRAINTS" + def delete_model(self, model): + # Run superclass action + super(DatabaseSchemaEditor, self).delete_model(model) + # Clean up any autoincrement trigger + self.execute(""" + DECLARE + i INTEGER; + BEGIN + SELECT COUNT(*) INTO i FROM USER_CATALOG + WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE'; + IF i = 1 THEN + EXECUTE IMMEDIATE 'DROP SEQUENCE "%(sq_name)s"'; + END IF; + END; + /""" % {'sq_name': self.connection.ops._get_sequence_name(model._meta.db_table)}) + + def alter_field(self, model, old_field, new_field, strict=False): + try: + # Run superclass action + super(DatabaseSchemaEditor, self).alter_field(model, old_field, new_field, strict) + except DatabaseError as e: + description = str(e) + # If we're changing to/from LOB fields, we need to do a + # SQLite-ish workaround + if 'ORA-22858' in description or 'ORA-22859' in description: + self._alter_field_lob_workaround(model, old_field, new_field) + else: + raise + + def _alter_field_lob_workaround(self, model, old_field, new_field): + """ + Oracle refuses to change a column type from/to LOB to/from a regular + column. In Django, this shows up when the field is changed from/to + a TextField. + What we need to do instead is: + - Add the desired field with a temporary name + - Update the table to transfer values from old to new + - Drop old column + - Rename the new column + """ + # Make a new field that's like the new one but with a temporary + # column name. + new_temp_field = copy.deepcopy(new_field) + new_temp_field.column = self._generate_temp_name(new_field.column) + # Add it + self.add_field(model, new_temp_field) + # Transfer values across + self.execute("UPDATE %s set %s=%s" % ( + self.quote_name(model._meta.db_table), + self.quote_name(new_temp_field.column), + self.quote_name(old_field.column), + )) + # Drop the old field + self.remove_field(model, old_field) + # Rename the new field + self.alter_field(model, new_temp_field, new_field) + # Close the connection to force cx_Oracle to get column types right + # on a new cursor + self.connection.close() + + def normalize_name(self, name): + """ + Get the properly shortened and uppercased identifier as returned by quote_name(), but without the actual quotes. + """ + nn = self.quote_name(name) + if nn[0] == '"' and nn[-1] == '"': + nn = nn[1:-1] + return nn + + def _generate_temp_name(self, for_name): + """ + Generates temporary names for workarounds that need temp columns + """ + suffix = hex(hash(for_name)).upper()[1:] + return self.normalize_name(for_name + "_" + suffix) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 19a737883f..7beae7417a 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -113,6 +113,11 @@ class BaseDatabaseSchemaEditor(object): sql += " %s" % self.connection.ops.tablespace_sql(tablespace, inline=True) # Work out nullability null = field.null + # If we were told to include a default value, do so + default_value = self.effective_default(field) + if include_default and default_value is not None: + sql += " DEFAULT %s" + params += [default_value] # Oracle treats the empty string ('') as null, so coerce the null # option whenever '' is a possible value. if (field.empty_strings_allowed and not field.primary_key and @@ -127,11 +132,6 @@ class BaseDatabaseSchemaEditor(object): sql += " PRIMARY KEY" elif field.unique: sql += " UNIQUE" - # If we were told to include a default value, do so - default_value = self.effective_default(field) - if include_default and default_value is not None: - sql += " DEFAULT %s" - params += [default_value] # Return the sql return sql, params @@ -176,7 +176,7 @@ class BaseDatabaseSchemaEditor(object): )) params.extend(extra_params) # Indexes - if field.db_index: + if field.db_index and not field.unique: self.deferred_sql.append( self.sql_create_index % { "name": self._create_index_name(model, [field.column], suffix=""), @@ -198,6 +198,11 @@ class BaseDatabaseSchemaEditor(object): "to_column": self.quote_name(to_column), } ) + # Autoincrement SQL + if field.get_internal_type() == "AutoField": + autoinc_sql = self.connection.ops.autoinc_sql(model._meta.db_table, field.column) + if autoinc_sql: + self.deferred_sql.extend(autoinc_sql) # Add any unique_togethers for fields in model._meta.unique_together: columns = [model._meta.get_field_by_name(field)[0].column for field in fields] @@ -353,6 +358,16 @@ class BaseDatabaseSchemaEditor(object): } } self.execute(sql) + # Add an index, if required + if field.db_index and not field.unique: + self.deferred_sql.append( + self.sql_create_index % { + "name": self._create_index_name(model, [field.column], suffix=""), + "table": self.quote_name(model._meta.db_table), + "columns": self.quote_name(field.column), + "extra": "", + } + ) # Add any FK constraints later if field.rel and self.connection.features.supports_foreign_keys: to_table = field.rel.to._meta.db_table @@ -412,7 +427,7 @@ class BaseDatabaseSchemaEditor(object): new_field, )) # Has unique been removed? - if old_field.unique and not new_field.unique: + if old_field.unique and (not new_field.unique or (not old_field.primary_key and new_field.primary_key)): # Find the unique constraint for this field constraint_names = self._constraint_names(model, [old_field.column], unique=True) if strict and len(constraint_names) != 1: @@ -647,9 +662,15 @@ class BaseDatabaseSchemaEditor(object): if len(index_name) > self.connection.features.max_index_name_length: part = ('_%s%s%s' % (column_names[0], index_unique_name, suffix)) index_name = '%s%s' % (table_name[:(self.connection.features.max_index_name_length - len(part))], part) + # It shouldn't start with an underscore (Oracle hates this) + if index_name[0] == "_": + index_name = index_name[1:] # If it's STILL too long, just hash it down if len(index_name) > self.connection.features.max_index_name_length: index_name = hashlib.md5(index_name).hexdigest()[:self.connection.features.max_index_name_length] + # It can't start with a number on Oracle, so prepend D if we need to + if index_name[0].isdigit(): + index_name = "D%s" % index_name[:-1] return index_name def _constraint_names(self, model, column_names=None, unique=None, primary_key=None, index=None, foreign_key=None, check=None): diff --git a/tests/schema/tests.py b/tests/schema/tests.py index f6e45599b8..d4e76e8567 100644 --- a/tests/schema/tests.py +++ b/tests/schema/tests.py @@ -167,7 +167,7 @@ class SchemaTests(TransactionTestCase): # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") - self.assertEqual(columns['name'][1][6], False) + self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls)) # Alter the name field to a TextField new_field = TextField(null=True) new_field.set_attributes_from_name("name") @@ -195,7 +195,7 @@ class SchemaTests(TransactionTestCase): # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "TextField") - self.assertEqual(columns['name'][1][6], False) + self.assertEqual(columns['name'][1][6], bool(connection.features.interprets_empty_strings_as_nulls)) def test_rename(self): """ From 5b522cd85a63cf5e352c500447aa91002e83845e Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 19 Aug 2013 13:12:48 +0100 Subject: [PATCH 135/161] Minor oracle fixes --- django/db/backends/oracle/introspection.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/django/db/backends/oracle/introspection.py b/django/db/backends/oracle/introspection.py index e4ef1ae81b..70c38c8de8 100644 --- a/django/db/backends/oracle/introspection.py +++ b/django/db/backends/oracle/introspection.py @@ -180,7 +180,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): "unique": unique, "foreign_key": None, "check": check, - "index": True, + "index": True, # All P and U come with index, see inner join above } # Record the details constraints[constraint]['columns'].append(column) @@ -258,6 +258,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): FROM user_constraints cons WHERE cols.index_name = cons.index_name ) + ORDER BY cols.column_position """, [table_name]) for constraint, column in cursor.fetchall(): # If we're the first column, make the record From 52edc16086e3c28a78c31975bb4da2f9450590b4 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Mon, 19 Aug 2013 13:50:26 +0100 Subject: [PATCH 136/161] Add more stringent M2M tests and fix the bug they exposed --- django/db/models/options.py | 6 +++--- tests/migrations/test_operations.py | 9 ++++++++- tests/schema/models.py | 10 +++++++++- tests/schema/tests.py | 24 ++++++++++++------------ 4 files changed, 32 insertions(+), 17 deletions(-) diff --git a/django/db/models/options.py b/django/db/models/options.py index d39873fd70..14f73c301f 100644 --- a/django/db/models/options.py +++ b/django/db/models/options.py @@ -9,7 +9,7 @@ from django.conf import settings from django.db.models.fields.related import ManyToManyRel from django.db.models.fields import AutoField, FieldDoesNotExist from django.db.models.fields.proxy import OrderWrt -from django.db.models.loading import get_models, app_cache_ready, cache +from django.db.models.loading import app_cache_ready, cache from django.utils import six from django.utils.functional import cached_property from django.utils.encoding import force_text, smart_text, python_2_unicode_compatible @@ -495,7 +495,7 @@ class Options(object): cache[obj] = model # Collect also objects which are in relation to some proxy child/parent of self. proxy_cache = cache.copy() - for klass in get_models(include_auto_created=True, only_installed=False): + for klass in self.app_cache.get_models(include_auto_created=True, only_installed=False): if not klass._meta.swapped: for f in klass._meta.local_fields: if f.rel and not isinstance(f.rel.to, six.string_types) and f.generate_reverse_relation: @@ -538,7 +538,7 @@ class Options(object): cache[obj] = parent else: cache[obj] = model - for klass in get_models(only_installed=False): + for klass in self.app_cache.get_models(only_installed=False): if not klass._meta.swapped: for f in klass._meta.local_many_to_many: if (f.rel diff --git a/tests/migrations/test_operations.py b/tests/migrations/test_operations.py index 33b870a335..2ff3f73b8a 100644 --- a/tests/migrations/test_operations.py +++ b/tests/migrations/test_operations.py @@ -116,7 +116,7 @@ class OperationTests(MigrationTestBase): """ project_state = self.set_up_test_model("test_adflmm", second_model=True) # Test the state alteration - operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable")) + operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) new_state = project_state.clone() operation.state_forwards("test_adflmm", new_state) self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 4) @@ -126,6 +126,13 @@ class OperationTests(MigrationTestBase): operation.database_forwards("test_adflmm", editor, project_state, new_state) self.assertTableExists("test_adflmm_pony_stables") self.assertColumnNotExists("test_adflmm_pony", "stables") + # Make sure the M2M field actually works + app_cache = new_state.render() + Pony = app_cache.get_model("test_adflmm", "Pony") + p = Pony.objects.create(pink=False, weight=4.55) + p.stables.create() + self.assertEqual(p.stables.count(), 1) + p.stables.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adflmm", editor, new_state, project_state) diff --git a/tests/schema/models.py b/tests/schema/models.py index 69cf06f3c4..dc717ec105 100644 --- a/tests/schema/models.py +++ b/tests/schema/models.py @@ -37,7 +37,7 @@ class BookWithM2M(models.Model): author = models.ForeignKey(Author) title = models.CharField(max_length=100, db_index=True) pub_date = models.DateTimeField() - tags = models.ManyToManyField("Tag", related_name="books") + tags = models.ManyToManyField("TagM2MTest", related_name="books") class Meta: app_cache = new_app_cache @@ -62,6 +62,14 @@ class Tag(models.Model): app_cache = new_app_cache +class TagM2MTest(models.Model): + title = models.CharField(max_length=255) + slug = models.SlugField(unique=True) + + class Meta: + app_cache = new_app_cache + + class TagIndexed(models.Model): title = models.CharField(max_length=255) slug = models.SlugField(unique=True) diff --git a/tests/schema/tests.py b/tests/schema/tests.py index d4e76e8567..bf9fa6bbcc 100644 --- a/tests/schema/tests.py +++ b/tests/schema/tests.py @@ -6,7 +6,7 @@ from django.db import connection, DatabaseError, IntegrityError from django.db.models.fields import IntegerField, TextField, CharField, SlugField from django.db.models.fields.related import ManyToManyField, ForeignKey from django.db.transaction import atomic -from .models import Author, AuthorWithM2M, Book, BookWithSlug, BookWithM2M, Tag, TagIndexed, TagUniqueRename, UniqueTest +from .models import Author, AuthorWithM2M, Book, BookWithSlug, BookWithM2M, Tag, TagIndexed, TagM2MTest, TagUniqueRename, UniqueTest class SchemaTests(TransactionTestCase): @@ -20,7 +20,7 @@ class SchemaTests(TransactionTestCase): available_apps = [] - models = [Author, AuthorWithM2M, Book, BookWithSlug, BookWithM2M, Tag, TagUniqueRename, UniqueTest] + models = [Author, AuthorWithM2M, Book, BookWithSlug, BookWithM2M, Tag, TagIndexed, TagM2MTest, TagUniqueRename, UniqueTest] no_table_strings = ["no such table", "unknown table", "does not exist"] # Utility functions @@ -234,7 +234,7 @@ class SchemaTests(TransactionTestCase): editor.create_model(BookWithM2M) # Ensure there is now an m2m table there columns = self.column_classes(BookWithM2M._meta.get_field_by_name("tags")[0].rel.through) - self.assertEqual(columns['tag_id'][0], "IntegerField") + self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField") def test_m2m(self): """ @@ -243,9 +243,9 @@ class SchemaTests(TransactionTestCase): # Create the tables with connection.schema_editor() as editor: editor.create_model(AuthorWithM2M) - editor.create_model(Tag) + editor.create_model(TagM2MTest) # Create an M2M field - new_field = ManyToManyField("schema.Tag", related_name="authors") + new_field = ManyToManyField("schema.TagM2MTest", related_name="authors") new_field.contribute_to_class(AuthorWithM2M, "tags") try: # Ensure there's no m2m table there @@ -258,7 +258,7 @@ class SchemaTests(TransactionTestCase): ) # Ensure there is now an m2m table there columns = self.column_classes(new_field.rel.through) - self.assertEqual(columns['tag_id'][0], "IntegerField") + self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField") # Remove the M2M table again with connection.schema_editor() as editor: editor.remove_field( @@ -279,17 +279,17 @@ class SchemaTests(TransactionTestCase): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithM2M) - editor.create_model(Tag) + editor.create_model(TagM2MTest) editor.create_model(UniqueTest) - # Ensure the M2M exists and points to Tag + # Ensure the M2M exists and points to TagM2MTest constraints = connection.introspection.get_constraints(connection.cursor(), BookWithM2M._meta.get_field_by_name("tags")[0].rel.through._meta.db_table) if connection.features.supports_foreign_keys: for name, details in constraints.items(): - if details['columns'] == ["tag_id"] and details['foreign_key']: - self.assertEqual(details['foreign_key'], ('schema_tag', 'id')) + if details['columns'] == ["tagm2mtest_id"] and details['foreign_key']: + self.assertEqual(details['foreign_key'], ('schema_tagm2mtest', 'id')) break else: - self.fail("No FK constraint for tag_id found") + self.fail("No FK constraint for tagm2mtest_id found") # Repoint the M2M new_field = ManyToManyField(UniqueTest) new_field.contribute_to_class(BookWithM2M, "uniques") @@ -310,7 +310,7 @@ class SchemaTests(TransactionTestCase): self.assertEqual(details['foreign_key'], ('schema_uniquetest', 'id')) break else: - self.fail("No FK constraint for tag_id found") + self.fail("No FK constraint for uniquetest_id found") finally: # Cleanup model states BookWithM2M._meta.local_many_to_many.remove(new_field) From 2e7f45a3721a8afbc61a4320ae9ab3908d1e81b4 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Wed, 21 Aug 2013 22:25:15 +0100 Subject: [PATCH 137/161] Change autodetector changes API to be just one method --- .../management/commands/makemigrations.py | 5 +--- django/db/migrations/autodetector.py | 18 +++++++++-- tests/migrations/test_autodetector.py | 30 +++++++++---------- 3 files changed, 31 insertions(+), 22 deletions(-) diff --git a/django/core/management/commands/makemigrations.py b/django/core/management/commands/makemigrations.py index b05f37a8bb..9f0690b483 100644 --- a/django/core/management/commands/makemigrations.py +++ b/django/core/management/commands/makemigrations.py @@ -49,10 +49,7 @@ class Command(BaseCommand): ProjectState.from_app_cache(cache), InteractiveMigrationQuestioner(specified_apps=app_labels), ) - changes = autodetector.changes() - changes = autodetector.arrange_for_graph(changes, loader.graph) - if app_labels: - changes = autodetector.trim_to_apps(changes, app_labels) + changes = autodetector.changes(graph=loader.graph, trim_to_apps=app_labels or None) # No changes? Tell them. if not changes: diff --git a/django/db/migrations/autodetector.py b/django/db/migrations/autodetector.py index 2771bcbc68..334c26d973 100644 --- a/django/db/migrations/autodetector.py +++ b/django/db/migrations/autodetector.py @@ -25,7 +25,19 @@ class MigrationAutodetector(object): self.to_state = to_state self.questioner = questioner or MigrationQuestioner() - def changes(self): + def changes(self, graph, trim_to_apps=None): + """ + Main entry point to produce a list of appliable changes. + Takes a graph to base names on and an optional set of apps + to try and restrict to (restriction is not guaranteed) + """ + changes = self._detect_changes() + changes = self._arrange_for_graph(changes, graph) + if trim_to_apps: + changes = self._trim_to_apps(changes, trim_to_apps) + return changes + + def _detect_changes(self): """ Returns a dict of migration plans which will achieve the change from from_state to to_state. The dict has app labels @@ -229,7 +241,7 @@ class MigrationAutodetector(object): dependency = (other_app_label, "__first__") self.migrations[app_label][-1].dependencies.append(dependency) - def arrange_for_graph(self, changes, graph): + def _arrange_for_graph(self, changes, graph): """ Takes in a result from changes() and a MigrationGraph, and fixes the names and dependencies of the changes so they @@ -273,7 +285,7 @@ class MigrationAutodetector(object): migration.dependencies = [name_map.get(d, d) for d in migration.dependencies] return changes - def trim_to_apps(self, changes, app_labels): + def _trim_to_apps(self, changes, app_labels): """ Takes changes from arrange_for_graph and set of app labels and returns a modified set of changes which trims out as many migrations diff --git a/tests/migrations/test_autodetector.py b/tests/migrations/test_autodetector.py index 9b7fbd5e8a..de3b156ba6 100644 --- a/tests/migrations/test_autodetector.py +++ b/tests/migrations/test_autodetector.py @@ -44,9 +44,9 @@ class AutodetectorTests(TestCase): before = self.make_project_state([]) after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable]) autodetector = MigrationAutodetector(before, after) - changes = autodetector.changes() + changes = autodetector._detect_changes() # Run through arrange_for_graph - changes = autodetector.arrange_for_graph(changes, graph) + changes = autodetector._arrange_for_graph(changes, graph) # Make sure there's a new name, deps match, etc. self.assertEqual(changes["testapp"][0].name, "0003_author") self.assertEqual(changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]) @@ -59,12 +59,12 @@ class AutodetectorTests(TestCase): before = self.make_project_state([]) after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable, self.third_thing]) autodetector = MigrationAutodetector(before, after, MigrationQuestioner({"ask_initial": True})) - changes = autodetector.changes() + changes = autodetector._detect_changes() # Run through arrange_for_graph graph = MigrationGraph() - changes = autodetector.arrange_for_graph(changes, graph) + changes = autodetector._arrange_for_graph(changes, graph) changes["testapp"][0].dependencies.append(("otherapp", "0001_initial")) - changes = autodetector.trim_to_apps(changes, set(["testapp"])) + changes = autodetector._trim_to_apps(changes, set(["testapp"])) # Make sure there's the right set of migrations self.assertEqual(changes["testapp"][0].name, "0001_initial") self.assertEqual(changes["otherapp"][0].name, "0001_initial") @@ -76,7 +76,7 @@ class AutodetectorTests(TestCase): before = self.make_project_state([]) after = self.make_project_state([self.author_empty]) autodetector = MigrationAutodetector(before, after) - changes = autodetector.changes() + changes = autodetector._detect_changes() # Right number of migrations? self.assertEqual(len(changes['testapp']), 1) # Right number of actions? @@ -93,7 +93,7 @@ class AutodetectorTests(TestCase): before = self.make_project_state([self.author_empty]) after = self.make_project_state([]) autodetector = MigrationAutodetector(before, after) - changes = autodetector.changes() + changes = autodetector._detect_changes() # Right number of migrations? self.assertEqual(len(changes['testapp']), 1) # Right number of actions? @@ -110,7 +110,7 @@ class AutodetectorTests(TestCase): before = self.make_project_state([self.author_empty]) after = self.make_project_state([self.author_name]) autodetector = MigrationAutodetector(before, after) - changes = autodetector.changes() + changes = autodetector._detect_changes() # Right number of migrations? self.assertEqual(len(changes['testapp']), 1) # Right number of actions? @@ -127,7 +127,7 @@ class AutodetectorTests(TestCase): before = self.make_project_state([self.author_name]) after = self.make_project_state([self.author_empty]) autodetector = MigrationAutodetector(before, after) - changes = autodetector.changes() + changes = autodetector._detect_changes() # Right number of migrations? self.assertEqual(len(changes['testapp']), 1) # Right number of actions? @@ -144,7 +144,7 @@ class AutodetectorTests(TestCase): before = self.make_project_state([self.author_name]) after = self.make_project_state([self.author_name_longer]) autodetector = MigrationAutodetector(before, after) - changes = autodetector.changes() + changes = autodetector._detect_changes() # Right number of migrations? self.assertEqual(len(changes['testapp']), 1) # Right number of actions? @@ -161,7 +161,7 @@ class AutodetectorTests(TestCase): before = self.make_project_state([self.author_name]) after = self.make_project_state([self.author_name_renamed]) autodetector = MigrationAutodetector(before, after, MigrationQuestioner({"ask_rename": True})) - changes = autodetector.changes() + changes = autodetector._detect_changes() # Right number of migrations? self.assertEqual(len(changes['testapp']), 1) # Right number of actions? @@ -179,7 +179,7 @@ class AutodetectorTests(TestCase): before = self.make_project_state([]) after = self.make_project_state([self.author_name, self.book, self.edition]) autodetector = MigrationAutodetector(before, after) - changes = autodetector.changes() + changes = autodetector._detect_changes() # Right number of migrations? self.assertEqual(len(changes['testapp']), 1) self.assertEqual(len(changes['otherapp']), 1) @@ -212,7 +212,7 @@ class AutodetectorTests(TestCase): before = self.make_project_state([]) after = self.make_project_state([self.author_with_book, self.book]) autodetector = MigrationAutodetector(before, after) - changes = autodetector.changes() + changes = autodetector._detect_changes() # Right number of migrations? self.assertEqual(len(changes['testapp']), 1) self.assertEqual(len(changes['otherapp']), 2) @@ -243,7 +243,7 @@ class AutodetectorTests(TestCase): before = self.make_project_state([self.author_empty, self.book]) after = self.make_project_state([self.author_empty, self.book_unique]) autodetector = MigrationAutodetector(before, after) - changes = autodetector.changes() + changes = autodetector._detect_changes() # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? @@ -261,7 +261,7 @@ class AutodetectorTests(TestCase): before = self.make_project_state([self.author_empty, self.book_unique]) after = self.make_project_state([self.author_empty, self.book_unique_2]) autodetector = MigrationAutodetector(before, after) - changes = autodetector.changes() + changes = autodetector._detect_changes() # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? From e9b703f5a5e5de68a28abd2e6651e6100b0c6b49 Mon Sep 17 00:00:00 2001 From: Simon Charette Date: Mon, 19 Aug 2013 18:42:48 -0400 Subject: [PATCH 138/161] Correctly format missing Pillow/PIL exceptions messages. refs #19934 --- django/utils/image.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/django/utils/image.py b/django/utils/image.py index dd2fab6197..8df5850338 100644 --- a/django/utils/image.py +++ b/django/utils/image.py @@ -102,7 +102,7 @@ def _detect_image_library(): except ImportError as err: # Neither worked, so it's likely not installed. raise ImproperlyConfigured( - _("Neither Pillow nor PIL could be imported: %s" % err) + _("Neither Pillow nor PIL could be imported: %s") % err ) # ``Image.alpha_composite`` was added to Pillow in SHA: e414c6 & is not @@ -125,7 +125,7 @@ def _detect_image_library(): except ImportError as err: raise ImproperlyConfigured( _("The '_imaging' module for the PIL could not be " - "imported: %s" % err) + "imported: %s") % err ) # Try to import ImageFile as well. From eadecf0cdbc30d2351b91ae77d7a441f592bc5d0 Mon Sep 17 00:00:00 2001 From: Simon Charette Date: Mon, 19 Aug 2013 19:42:53 -0400 Subject: [PATCH 139/161] Avoid importing the deprecated `django.utils.importlib` package. --- django/utils/formats.py | 2 ++ django/utils/module_loading.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/django/utils/formats.py b/django/utils/formats.py index 6b89b40ecd..11e2bef8ae 100644 --- a/django/utils/formats.py +++ b/django/utils/formats.py @@ -1,3 +1,5 @@ +from __future__ import absolute_import # Avoid importing `importlib` from this package. + import decimal import datetime from importlib import import_module diff --git a/django/utils/module_loading.py b/django/utils/module_loading.py index 359982e6ba..9c8ea98d50 100644 --- a/django/utils/module_loading.py +++ b/django/utils/module_loading.py @@ -1,3 +1,5 @@ +from __future__ import absolute_import # Avoid importing `importlib` from this package. + import imp from importlib import import_module import os From 77478d84ade2e7b4720231e52d0c517741b18768 Mon Sep 17 00:00:00 2001 From: Simon Charette Date: Mon, 19 Aug 2013 20:39:30 -0400 Subject: [PATCH 140/161] Fixed an aggregation test failure on MySQL. --- tests/aggregation/tests.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/tests/aggregation/tests.py b/tests/aggregation/tests.py index 7d2490a77c..ce7f4e9b9d 100644 --- a/tests/aggregation/tests.py +++ b/tests/aggregation/tests.py @@ -2,6 +2,7 @@ from __future__ import unicode_literals import datetime from decimal import Decimal +import re from django.db import connection from django.db.models import Avg, Sum, Count, Max, Min @@ -640,5 +641,14 @@ class BaseAggregateTestCase(TestCase): self.assertEqual(len(captured_queries), 1) qstr = captured_queries[0]['sql'].lower() self.assertNotIn('for update', qstr) - self.assertNotIn('order by', qstr) + forced_ordering = connection.ops.force_no_ordering() + if forced_ordering: + # If the backend needs to force an ordering we make sure it's + # the only "ORDER BY" clause present in the query. + self.assertEqual( + re.findall(r'order by (\w+)', qstr), + [', '.join(forced_ordering).lower()] + ) + else: + self.assertNotIn('order by', qstr) self.assertEqual(qstr.count(' join '), 0) From 63378163f9c5f3f9f9b42a6f260f798aa7e4b1f6 Mon Sep 17 00:00:00 2001 From: Simon Charette Date: Mon, 19 Aug 2013 23:14:21 -0400 Subject: [PATCH 141/161] Fixed #20943 -- Weakly reference senders when caching their associated receivers --- django/db/models/signals.py | 2 +- django/dispatch/dispatcher.py | 12 ++++++++---- tests/dispatch/tests/test_dispatcher.py | 21 +++++++++++++++++++++ 3 files changed, 30 insertions(+), 5 deletions(-) diff --git a/django/db/models/signals.py b/django/db/models/signals.py index e53ffc3d1f..6b7605839c 100644 --- a/django/db/models/signals.py +++ b/django/db/models/signals.py @@ -14,7 +14,7 @@ post_delete = Signal(providing_args=["instance", "using"], use_caching=True) pre_migrate = Signal(providing_args=["app", "create_models", "verbosity", "interactive", "db"]) pre_syncdb = pre_migrate -post_migrate = Signal(providing_args=["class", "app", "created_models", "verbosity", "interactive", "db"], use_caching=True) +post_migrate = Signal(providing_args=["class", "app", "created_models", "verbosity", "interactive", "db"]) post_syncdb = post_migrate m2m_changed = Signal(providing_args=["action", "instance", "reverse", "model", "pk_set", "using"], use_caching=True) diff --git a/django/dispatch/dispatcher.py b/django/dispatch/dispatcher.py index 65c5c408ff..a8cdc93b21 100644 --- a/django/dispatch/dispatcher.py +++ b/django/dispatch/dispatcher.py @@ -4,8 +4,10 @@ import threading from django.dispatch import saferef from django.utils.six.moves import xrange + WEAKREF_TYPES = (weakref.ReferenceType, saferef.BoundMethodWeakref) + def _make_id(target): if hasattr(target, '__func__'): return (id(target.__self__), id(target.__func__)) @@ -15,6 +17,7 @@ NONE_ID = _make_id(None) # A marker for caching NO_RECEIVERS = object() + class Signal(object): """ Base class for all signals @@ -42,7 +45,7 @@ class Signal(object): # distinct sender we cache the receivers that sender has in # 'sender_receivers_cache'. The cache is cleaned when .connect() or # .disconnect() is called and populated on send(). - self.sender_receivers_cache = {} + self.sender_receivers_cache = weakref.WeakKeyDictionary() if use_caching else {} def connect(self, receiver, sender=None, weak=True, dispatch_uid=None): """ @@ -116,7 +119,7 @@ class Signal(object): break else: self.receivers.append((lookup_key, receiver)) - self.sender_receivers_cache = {} + self.sender_receivers_cache.clear() def disconnect(self, receiver=None, sender=None, weak=True, dispatch_uid=None): """ @@ -151,7 +154,7 @@ class Signal(object): if r_key == lookup_key: del self.receivers[index] break - self.sender_receivers_cache = {} + self.sender_receivers_cache.clear() def has_listeners(self, sender=None): return bool(self._live_receivers(sender)) @@ -276,7 +279,8 @@ class Signal(object): for idx, (r_key, _) in enumerate(reversed(self.receivers)): if r_key == key: del self.receivers[last_idx - idx] - self.sender_receivers_cache = {} + self.sender_receivers_cache.clear() + def receiver(signal, **kwargs): """ diff --git a/tests/dispatch/tests/test_dispatcher.py b/tests/dispatch/tests/test_dispatcher.py index 5f7dca87cc..e25f60b0c7 100644 --- a/tests/dispatch/tests/test_dispatcher.py +++ b/tests/dispatch/tests/test_dispatcher.py @@ -2,6 +2,7 @@ import gc import sys import time import unittest +import weakref from django.dispatch import Signal, receiver @@ -35,6 +36,8 @@ class Callable(object): a_signal = Signal(providing_args=["val"]) b_signal = Signal(providing_args=["val"]) c_signal = Signal(providing_args=["val"]) +d_signal = Signal(providing_args=["val"], use_caching=True) + class DispatcherTests(unittest.TestCase): """Test suite for dispatcher (barely started)""" @@ -72,6 +75,24 @@ class DispatcherTests(unittest.TestCase): self.assertEqual(result, expected) self._testIsClean(a_signal) + def testCachedGarbagedCollected(self): + """ + Make sure signal caching sender receivers don't prevent garbage + collection of senders. + """ + class sender: + pass + wref = weakref.ref(sender) + d_signal.connect(receiver_1_arg) + d_signal.send(sender, val='garbage') + del sender + garbage_collect() + try: + self.assertIsNone(wref()) + finally: + # Disconnect after reference check since it flushes the tested cache. + d_signal.disconnect(receiver_1_arg) + def testMultipleRegistration(self): a = Callable() a_signal.connect(a) From b773ef8fa0ef09641abf376ca8d4083d8966ec52 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Anssi=20K=C3=A4=C3=A4ri=C3=A4inen?= Date: Tue, 20 Aug 2013 09:47:43 +0300 Subject: [PATCH 142/161] Fixed #14043 -- Made sure nullable o2o delete works as expected There was an old complaint about nullable one-to-one field cascading even when the o2o field was saved to None value before the deletion. Added an test to verify this doesn't happen. Also some PEP 8 cleanup. --- tests/one_to_one_regress/tests.py | 34 +++++++++++++++++++------------ 1 file changed, 21 insertions(+), 13 deletions(-) diff --git a/tests/one_to_one_regress/tests.py b/tests/one_to_one_regress/tests.py index 0e20f19acb..55836d47e2 100644 --- a/tests/one_to_one_regress/tests.py +++ b/tests/one_to_one_regress/tests.py @@ -25,7 +25,7 @@ class OneToOneRegressionTests(TestCase): # The bug in #9023: if you access the one-to-one relation *before* # setting to None and deleting, the cascade happens anyway. self.p1.undergroundbar - bar.place.name='foo' + bar.place.name = 'foo' bar.place = None bar.save() self.p1.delete() @@ -40,12 +40,12 @@ class OneToOneRegressionTests(TestCase): Check that we create models via the m2m relation if the remote model has a OneToOneField. """ - f = Favorites(name = 'Fred') + f = Favorites(name='Fred') f.save() f.restaurants = [self.r1] self.assertQuerysetEqual( - f.restaurants.all(), - [''] + f.restaurants.all(), + [''] ) def test_reverse_object_cache(self): @@ -114,23 +114,23 @@ class OneToOneRegressionTests(TestCase): misbehaving. We test both (primary_key=True & False) cases here to prevent any reappearance of the problem. """ - t = Target.objects.create() + Target.objects.create() self.assertQuerysetEqual( - Target.objects.filter(pointer=None), - [''] + Target.objects.filter(pointer=None), + [''] ) self.assertQuerysetEqual( - Target.objects.exclude(pointer=None), - [] + Target.objects.exclude(pointer=None), + [] ) self.assertQuerysetEqual( - Target.objects.filter(pointer2=None), - [''] + Target.objects.filter(pointer2=None), + [''] ) self.assertQuerysetEqual( - Target.objects.exclude(pointer2=None), - [] + Target.objects.exclude(pointer2=None), + [] ) def test_reverse_object_does_not_exist_cache(self): @@ -235,3 +235,11 @@ class OneToOneRegressionTests(TestCase): b = UndergroundBar.objects.create() with self.assertNumQueries(0), self.assertRaises(ValueError): p.undergroundbar = b + + def test_nullable_o2o_delete(self): + u = UndergroundBar.objects.create(place=self.p1) + u.place_id = None + u.save() + self.p1.delete() + self.assertTrue(UndergroundBar.objects.filter(pk=u.pk).exists()) + self.assertIsNone(UndergroundBar.objects.get(pk=u.pk).place) From cea720450485e0871daa7f9477fdf2bff5a5b821 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Anssi=20K=C3=A4=C3=A4ri=C3=A4inen?= Date: Tue, 20 Aug 2013 10:32:18 +0300 Subject: [PATCH 143/161] Fixed #14056 -- Made sure LEFT JOIN aren't trimmed in ORDER BY MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit If LEFT JOINs are required for correct results, then trimming the join can lead to incorrect results. Consider case: TBL A: ID | TBL B: ID A_ID 1 1 1 2 Now A.order_by('b__a') did use a join to B, and B's a_id column. This was seen to contain the same value as A's id, and so the join was trimmed. But this wasn't correct as the join is LEFT JOIN, and for row A.id = 2 the B.a_id column is NULL. --- django/db/models/sql/compiler.py | 46 +++++++++----------------------- tests/queries/tests.py | 13 ++++++++- 2 files changed, 24 insertions(+), 35 deletions(-) diff --git a/django/db/models/sql/compiler.py b/django/db/models/sql/compiler.py index 54b4e86245..caaeaefa6e 100644 --- a/django/db/models/sql/compiler.py +++ b/django/db/models/sql/compiler.py @@ -319,10 +319,10 @@ class SQLCompiler(object): for name in self.query.distinct_fields: parts = name.split(LOOKUP_SEP) - field, cols, alias, _, _ = self._setup_joins(parts, opts, None) - cols, alias = self._final_join_removal(cols, alias) - for col in cols: - result.append("%s.%s" % (qn(alias), qn2(col))) + _, targets, alias, joins, path, _ = self._setup_joins(parts, opts, None) + targets, alias, _ = self.query.trim_joins(targets, joins, path) + for target in targets: + result.append("%s.%s" % (qn(alias), qn2(target.column))) return result def get_ordering(self): @@ -421,7 +421,7 @@ class SQLCompiler(object): return result, params, group_by def find_ordering_name(self, name, opts, alias=None, default_order='ASC', - already_seen=None): + already_seen=None): """ Returns the table alias (the name might be ambiguous, the alias will not be) and column name for ordering by the given 'name' parameter. @@ -429,11 +429,11 @@ class SQLCompiler(object): """ name, order = get_order_dir(name, default_order) pieces = name.split(LOOKUP_SEP) - field, cols, alias, joins, opts = self._setup_joins(pieces, opts, alias) + field, targets, alias, joins, path, opts = self._setup_joins(pieces, opts, alias) # If we get to this point and the field is a relation to another model, # append the default ordering for that model. - if field.rel and len(joins) > 1 and opts.ordering: + if field.rel and path and opts.ordering: # Firstly, avoid infinite loops. if not already_seen: already_seen = set() @@ -445,10 +445,10 @@ class SQLCompiler(object): results = [] for item in opts.ordering: results.extend(self.find_ordering_name(item, opts, alias, - order, already_seen)) + order, already_seen)) return results - cols, alias = self._final_join_removal(cols, alias) - return [(alias, cols, order)] + targets, alias, _ = self.query.trim_joins(targets, joins, path) + return [(alias, [t.column for t in targets], order)] def _setup_joins(self, pieces, opts, alias): """ @@ -461,13 +461,12 @@ class SQLCompiler(object): """ if not alias: alias = self.query.get_initial_alias() - field, targets, opts, joins, _ = self.query.setup_joins( + field, targets, opts, joins, path = self.query.setup_joins( pieces, opts, alias) # We will later on need to promote those joins that were added to the # query afresh above. joins_to_promote = [j for j in joins if self.query.alias_refcount[j] < 2] alias = joins[-1] - cols = [target.column for target in targets] if not field.rel: # To avoid inadvertent trimming of a necessary alias, use the # refcount to show that we are referencing a non-relation field on @@ -478,28 +477,7 @@ class SQLCompiler(object): # Ordering or distinct must not affect the returned set, and INNER # JOINS for nullable fields could do this. self.query.promote_joins(joins_to_promote) - return field, cols, alias, joins, opts - - def _final_join_removal(self, cols, alias): - """ - A helper method for get_distinct and get_ordering. This method will - trim extra not-needed joins from the tail of the join chain. - - This is very similar to what is done in trim_joins, but we will - trim LEFT JOINS here. It would be a good idea to consolidate this - method and query.trim_joins(). - """ - if alias: - while 1: - join = self.query.alias_map[alias] - lhs_cols, rhs_cols = zip(*[(lhs_col, rhs_col) for lhs_col, rhs_col in join.join_cols]) - if set(cols) != set(rhs_cols): - break - - cols = [lhs_cols[rhs_cols.index(col)] for col in cols] - self.query.unref_alias(alias) - alias = join.lhs_alias - return cols, alias + return field, targets, alias, joins, path, opts def get_from_clause(self): """ diff --git a/tests/queries/tests.py b/tests/queries/tests.py index 4d9ffb353f..bb4e9eee8f 100644 --- a/tests/queries/tests.py +++ b/tests/queries/tests.py @@ -25,7 +25,7 @@ from .models import ( OneToOneCategory, NullableName, ProxyCategory, SingleObject, RelatedObject, ModelA, ModelB, ModelC, ModelD, Responsibility, Job, JobResponsibilities, BaseA, FK1, Identifier, Program, Channel, Page, Paragraph, Chapter, Book, - MyObject, Order, OrderItem) + MyObject, Order, OrderItem, SharedConnection) class BaseQuerysetTest(TestCase): def assertValueQuerysetEqual(self, qs, values): @@ -2977,3 +2977,14 @@ class RelatedLookupTypeTests(TestCase): self.assertQuerysetEqual( ObjectB.objects.filter(objecta__in=[wrong_type]), [ob], lambda x: x) + +class Ticket14056Tests(TestCase): + def test_ticket_14056(self): + s1 = SharedConnection.objects.create(data='s1') + s2 = SharedConnection.objects.create(data='s2') + s3 = SharedConnection.objects.create(data='s3') + PointerA.objects.create(connection=s2) + self.assertQuerysetEqual( + SharedConnection.objects.order_by('pointera__connection', 'pk'), + [s1, s3, s2], lambda x: x + ) From ced3e6b17d18174216cdbe1ec7c24cc1819db787 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Anssi=20K=C3=A4=C3=A4ri=C3=A4inen?= Date: Tue, 20 Aug 2013 11:33:44 +0300 Subject: [PATCH 144/161] Fixed test failure caused by different NULL ordering between backends --- django/db/backends/__init__.py | 3 +++ django/db/backends/postgresql_psycopg2/base.py | 1 + tests/queries/models.py | 4 ++++ tests/queries/tests.py | 10 +++++++--- 4 files changed, 15 insertions(+), 3 deletions(-) diff --git a/django/db/backends/__init__.py b/django/db/backends/__init__.py index 771b9af59c..8d3c09ab1a 100644 --- a/django/db/backends/__init__.py +++ b/django/db/backends/__init__.py @@ -599,6 +599,9 @@ class BaseDatabaseFeatures(object): # to remove any ordering? requires_explicit_null_ordering_when_grouping = False + # Does the backend order NULL values as largest or smallest? + nulls_order_largest = False + # Is there a 1000 item limit on query parameters? supports_1000_query_parameters = True diff --git a/django/db/backends/postgresql_psycopg2/base.py b/django/db/backends/postgresql_psycopg2/base.py index f283476429..76b2935a1f 100644 --- a/django/db/backends/postgresql_psycopg2/base.py +++ b/django/db/backends/postgresql_psycopg2/base.py @@ -58,6 +58,7 @@ class DatabaseFeatures(BaseDatabaseFeatures): can_distinct_on_fields = True can_rollback_ddl = True supports_combined_alters = True + nulls_order_largest = True class DatabaseWrapper(BaseDatabaseWrapper): diff --git a/tests/queries/models.py b/tests/queries/models.py index 71346d8be9..3a638b2867 100644 --- a/tests/queries/models.py +++ b/tests/queries/models.py @@ -262,9 +262,13 @@ class ReservedName(models.Model): return self.name # A simpler shared-foreign-key setup that can expose some problems. +@python_2_unicode_compatible class SharedConnection(models.Model): data = models.CharField(max_length=10) + def __str__(self): + return self.data + class PointerA(models.Model): connection = models.ForeignKey(SharedConnection) diff --git a/tests/queries/tests.py b/tests/queries/tests.py index bb4e9eee8f..91d4b17d0b 100644 --- a/tests/queries/tests.py +++ b/tests/queries/tests.py @@ -2984,7 +2984,11 @@ class Ticket14056Tests(TestCase): s2 = SharedConnection.objects.create(data='s2') s3 = SharedConnection.objects.create(data='s3') PointerA.objects.create(connection=s2) - self.assertQuerysetEqual( - SharedConnection.objects.order_by('pointera__connection', 'pk'), - [s1, s3, s2], lambda x: x + expected_ordering = ( + [s1, s3, s2] if connection.features.nulls_order_largest + else [s2, s1, s3] + ) + self.assertQuerysetEqual( + SharedConnection.objects.order_by('-pointera__connection', 'pk'), + expected_ordering, lambda x: x ) From f5552571dc7a0d9da9df1d108bc4bbef6857c157 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Anssi=20K=C3=A4=C3=A4ri=C3=A4inen?= Date: Tue, 20 Aug 2013 16:23:25 +0300 Subject: [PATCH 145/161] Fixed #20820 -- Model inheritance + m2m fixture loading regression Tests by Tim Graham, report from jeroen.pulles@redslider.net. --- django/db/models/fields/related.py | 11 ++++++++++- tests/fixtures_regress/fixtures/special-article.json | 10 ++++++++++ tests/fixtures_regress/models.py | 5 +++++ tests/fixtures_regress/tests.py | 11 +++++++++++ 4 files changed, 36 insertions(+), 1 deletion(-) create mode 100644 tests/fixtures_regress/fixtures/special-article.json diff --git a/django/db/models/fields/related.py b/django/db/models/fields/related.py index 00da186279..6c11df4cbd 100644 --- a/django/db/models/fields/related.py +++ b/django/db/models/fields/related.py @@ -989,7 +989,16 @@ class ForeignObject(RelatedField): @staticmethod def get_instance_value_for_fields(instance, fields): - return tuple([getattr(instance, field.attname) for field in fields]) + ret = [] + for field in fields: + # Gotcha: in some cases (like fixture loading) a model can have + # different values in parent_ptr_id and parent's id. So, use + # instance.pk (that is, parent_ptr_id) when asked for instance.id. + if field.primary_key: + ret.append(instance.pk) + else: + ret.append(getattr(instance, field.attname)) + return tuple(ret) def get_attname_column(self): attname, column = super(ForeignObject, self).get_attname_column() diff --git a/tests/fixtures_regress/fixtures/special-article.json b/tests/fixtures_regress/fixtures/special-article.json new file mode 100644 index 0000000000..435ceb7ca4 --- /dev/null +++ b/tests/fixtures_regress/fixtures/special-article.json @@ -0,0 +1,10 @@ +[ + { + "pk": 1, + "model": "fixtures_regress.specialarticle", + "fields": { + "title": "Article Title 1", + "channels": [] + } + } +] diff --git a/tests/fixtures_regress/models.py b/tests/fixtures_regress/models.py index 99096728a7..ab4fb8750c 100644 --- a/tests/fixtures_regress/models.py +++ b/tests/fixtures_regress/models.py @@ -70,6 +70,11 @@ class Article(models.Model): ordering = ('id',) +# Subclass of a model with a ManyToManyField for test_ticket_20820 +class SpecialArticle(Article): + pass + + # Models to regression test #11428 @python_2_unicode_compatible class Widget(models.Model): diff --git a/tests/fixtures_regress/tests.py b/tests/fixtures_regress/tests.py index f917b21642..e2985d3350 100644 --- a/tests/fixtures_regress/tests.py +++ b/tests/fixtures_regress/tests.py @@ -444,6 +444,17 @@ class TestFixtures(TestCase): self.assertTrue("No fixture 'this_fixture_doesnt_exist' in" in force_text(stdout_output.getvalue())) + def test_ticket_20820(self): + """ + Regression for ticket #20820 -- loaddata on a model that inherits + from a model with a M2M shouldn't blow up. + """ + management.call_command( + 'loaddata', + 'special-article.json', + verbosity=0, + ) + class NaturalKeyFixtureTests(TestCase): From 3a8ae71ab5efb387b31386bcd7d8ce72d25d1abd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Anssi=20K=C3=A4=C3=A4ri=C3=A4inen?= Date: Tue, 20 Aug 2013 17:48:02 +0300 Subject: [PATCH 146/161] Fixed invalid testing fixture --- tests/fixtures_regress/fixtures/special-article.json | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/fixtures_regress/fixtures/special-article.json b/tests/fixtures_regress/fixtures/special-article.json index 435ceb7ca4..a36244acc1 100644 --- a/tests/fixtures_regress/fixtures/special-article.json +++ b/tests/fixtures_regress/fixtures/special-article.json @@ -1,4 +1,10 @@ [ + { + "pk": 1, + "model": "fixtures_regress.article", + "fields": {"title": "foof" + } + }, { "pk": 1, "model": "fixtures_regress.specialarticle", From 1db5fce1eee27cb260b47d3c631ee8f8d02de8c2 Mon Sep 17 00:00:00 2001 From: Simon Charette Date: Tue, 20 Aug 2013 12:28:59 -0400 Subject: [PATCH 147/161] Oracle also treats NULLs as largests values when ordering. --- django/db/backends/oracle/base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/django/db/backends/oracle/base.py b/django/db/backends/oracle/base.py index b6812a6d3e..e6435a9e96 100644 --- a/django/db/backends/oracle/base.py +++ b/django/db/backends/oracle/base.py @@ -93,6 +93,7 @@ class DatabaseFeatures(BaseDatabaseFeatures): supports_sequence_reset = False supports_combined_alters = False max_index_name_length = 30 + nulls_order_largest = True class DatabaseOperations(BaseDatabaseOperations): From 839940f27f25c2dafaa5b2ec934c2dd6b80903e8 Mon Sep 17 00:00:00 2001 From: Florian Apolloner Date: Tue, 20 Aug 2013 19:03:33 +0200 Subject: [PATCH 148/161] Fixed #20933 -- Allowed loaddata to load fixtures from relative paths. --- django/core/management/commands/loaddata.py | 2 +- docs/howto/initial-data.txt | 4 ++-- tests/fixtures_regress/models.py | 6 ------ tests/fixtures_regress/tests.py | 17 ++++++++++++++++- 4 files changed, 19 insertions(+), 10 deletions(-) diff --git a/django/core/management/commands/loaddata.py b/django/core/management/commands/loaddata.py index 0da36a3c52..a6e22d9173 100644 --- a/django/core/management/commands/loaddata.py +++ b/django/core/management/commands/loaddata.py @@ -183,7 +183,7 @@ class Command(BaseCommand): if self.verbosity >= 2: self.stdout.write("Loading '%s' fixtures..." % fixture_name) - if os.path.isabs(fixture_name): + if os.path.sep in fixture_name: fixture_dirs = [os.path.dirname(fixture_name)] fixture_name = os.path.basename(fixture_name) else: diff --git a/docs/howto/initial-data.txt b/docs/howto/initial-data.txt index b86aaa834e..70d1ae18a6 100644 --- a/docs/howto/initial-data.txt +++ b/docs/howto/initial-data.txt @@ -90,8 +90,8 @@ fixtures. You can set the :setting:`FIXTURE_DIRS` setting to a list of additional directories where Django should look. When running :djadmin:`manage.py loaddata `, you can also -specify an absolute path to a fixture file, which overrides searching -the usual directories. +specify a path to a fixture file, which overrides searching the usual +directories. .. seealso:: diff --git a/tests/fixtures_regress/models.py b/tests/fixtures_regress/models.py index ab4fb8750c..4b33cef09b 100644 --- a/tests/fixtures_regress/models.py +++ b/tests/fixtures_regress/models.py @@ -39,12 +39,6 @@ class Stuff(models.Model): class Absolute(models.Model): name = models.CharField(max_length=40) - load_count = 0 - - def __init__(self, *args, **kwargs): - super(Absolute, self).__init__(*args, **kwargs) - Absolute.load_count += 1 - class Parent(models.Model): name = models.CharField(max_length=10) diff --git a/tests/fixtures_regress/tests.py b/tests/fixtures_regress/tests.py index e2985d3350..334aa6cadc 100644 --- a/tests/fixtures_regress/tests.py +++ b/tests/fixtures_regress/tests.py @@ -148,7 +148,22 @@ class TestFixtures(TestCase): load_absolute_path, verbosity=0, ) - self.assertEqual(Absolute.load_count, 1) + self.assertEqual(Absolute.objects.count(), 1) + + def test_relative_path(self): + directory = os.path.dirname(upath(__file__)) + relative_path = os.path.join('fixtures', 'absolute.json') + cwd = os.getcwd() + try: + os.chdir(directory) + management.call_command( + 'loaddata', + relative_path, + verbosity=0, + ) + finally: + os.chdir(cwd) + self.assertEqual(Absolute.objects.count(), 1) def test_unknown_format(self): """ From 01223840f34ff2eacf1425bc133c347564fe2614 Mon Sep 17 00:00:00 2001 From: Ramiro Morales Date: Tue, 20 Aug 2013 22:17:26 -0300 Subject: [PATCH 149/161] Fixed #18967 -- Don't base64-encode message/rfc822 attachments. Thanks Michael Farrell for the report and his work on the fix. --- django/core/mail/message.py | 44 ++++++++++++++++++++++++++++++++++--- docs/topics/email.txt | 14 ++++++++++-- tests/mail/tests.py | 33 ++++++++++++++++++++++++++++ 3 files changed, 86 insertions(+), 5 deletions(-) diff --git a/django/core/mail/message.py b/django/core/mail/message.py index db9023a0bb..9796e59260 100644 --- a/django/core/mail/message.py +++ b/django/core/mail/message.py @@ -4,11 +4,13 @@ import mimetypes import os import random import time -from email import charset as Charset, encoders as Encoders +from email import charset as Charset, encoders as Encoders, message_from_string from email.generator import Generator +from email.message import Message from email.mime.text import MIMEText from email.mime.multipart import MIMEMultipart from email.mime.base import MIMEBase +from email.mime.message import MIMEMessage from email.header import Header from email.utils import formatdate, getaddresses, formataddr, parseaddr @@ -118,6 +120,27 @@ def sanitize_address(addr, encoding): return formataddr((nm, addr)) +class SafeMIMEMessage(MIMEMessage): + + def __setitem__(self, name, val): + # message/rfc822 attachments must be ASCII + name, val = forbid_multi_line_headers(name, val, 'ascii') + MIMEMessage.__setitem__(self, name, val) + + def as_string(self, unixfrom=False): + """Return the entire formatted message as a string. + Optional `unixfrom' when True, means include the Unix From_ envelope + header. + + This overrides the default as_string() implementation to not mangle + lines that begin with 'From '. See bug #13433 for details. + """ + fp = six.StringIO() + g = Generator(fp, mangle_from_=False) + g.flatten(self, unixfrom=unixfrom) + return fp.getvalue() + + class SafeMIMEText(MIMEText): def __init__(self, text, subtype, charset): @@ -137,7 +160,7 @@ class SafeMIMEText(MIMEText): lines that begin with 'From '. See bug #13433 for details. """ fp = six.StringIO() - g = Generator(fp, mangle_from_ = False) + g = Generator(fp, mangle_from_=False) g.flatten(self, unixfrom=unixfrom) return fp.getvalue() @@ -161,7 +184,7 @@ class SafeMIMEMultipart(MIMEMultipart): lines that begin with 'From '. See bug #13433 for details. """ fp = six.StringIO() - g = Generator(fp, mangle_from_ = False) + g = Generator(fp, mangle_from_=False) g.flatten(self, unixfrom=unixfrom) return fp.getvalue() @@ -292,11 +315,26 @@ class EmailMessage(object): def _create_mime_attachment(self, content, mimetype): """ Converts the content, mimetype pair into a MIME attachment object. + + If the mimetype is message/rfc822, content may be an + email.Message or EmailMessage object, as well as a str. """ basetype, subtype = mimetype.split('/', 1) if basetype == 'text': encoding = self.encoding or settings.DEFAULT_CHARSET attachment = SafeMIMEText(content, subtype, encoding) + elif basetype == 'message' and subtype == 'rfc822': + # Bug #18967: per RFC2046 s5.2.1, message/rfc822 attachments + # must not be base64 encoded. + if isinstance(content, EmailMessage): + # convert content into an email.Message first + content = content.message() + elif not isinstance(content, Message): + # For compatibility with existing code, parse the message + # into a email.Message object if it is not one already. + content = message_from_string(content) + + attachment = SafeMIMEMessage(content, subtype) else: # Encode non-text attachments with base64. attachment = MIMEBase(basetype, subtype) diff --git a/docs/topics/email.txt b/docs/topics/email.txt index c007c2b856..ebbb0963f4 100644 --- a/docs/topics/email.txt +++ b/docs/topics/email.txt @@ -319,6 +319,18 @@ The class has the following methods: message.attach('design.png', img_data, 'image/png') + .. versionchanged:: 1.7 + + If you specify a ``mimetype`` of ``message/rfc822``, it will also accept + :class:`django.core.mail.EmailMessage` and :py:class:`email.message.Message`. + + In addition, ``message/rfc822`` attachments will no longer be + base64-encoded in violation of :rfc:`2046#section-5.2.1`, which can cause + issues with displaying the attachments in `Evolution`__ and `Thunderbird`__. + + __ https://bugzilla.gnome.org/show_bug.cgi?id=651197 + __ https://bugzilla.mozilla.org/show_bug.cgi?id=333880 + * ``attach_file()`` creates a new attachment using a file from your filesystem. Call it with the path of the file to attach and, optionally, the MIME type to use for the attachment. If the MIME type is omitted, it @@ -326,8 +338,6 @@ The class has the following methods: message.attach_file('/images/weather_map.png') -.. _DEFAULT_FROM_EMAIL: ../settings/#default-from-email - Sending alternative content types ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests/mail/tests.py b/tests/mail/tests.py index 0f85cc0c76..2ba428e359 100644 --- a/tests/mail/tests.py +++ b/tests/mail/tests.py @@ -331,6 +331,39 @@ class MailTests(TestCase): self.assertFalse(str('Content-Transfer-Encoding: quoted-printable') in s) self.assertTrue(str('Content-Transfer-Encoding: 8bit') in s) + def test_dont_base64_encode_message_rfc822(self): + # Ticket #18967 + # Shouldn't use base64 encoding for a child EmailMessage attachment. + # Create a child message first + child_msg = EmailMessage('Child Subject', 'Some body of child message', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'}) + child_s = child_msg.message().as_string() + + # Now create a parent + parent_msg = EmailMessage('Parent Subject', 'Some parent body', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'}) + + # Attach to parent as a string + parent_msg.attach(content=child_s, mimetype='message/rfc822') + parent_s = parent_msg.message().as_string() + + # Verify that the child message header is not base64 encoded + self.assertTrue(str('Child Subject') in parent_s) + + # Feature test: try attaching email.Message object directly to the mail. + parent_msg = EmailMessage('Parent Subject', 'Some parent body', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'}) + parent_msg.attach(content=child_msg.message(), mimetype='message/rfc822') + parent_s = parent_msg.message().as_string() + + # Verify that the child message header is not base64 encoded + self.assertTrue(str('Child Subject') in parent_s) + + # Feature test: try attaching Django's EmailMessage object directly to the mail. + parent_msg = EmailMessage('Parent Subject', 'Some parent body', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'}) + parent_msg.attach(content=child_msg, mimetype='message/rfc822') + parent_s = parent_msg.message().as_string() + + # Verify that the child message header is not base64 encoded + self.assertTrue(str('Child Subject') in parent_s) + class BaseEmailBackendTests(object): email_backend = None From c5fbd1636203e697c214dcad54e4f3db1d7c9685 Mon Sep 17 00:00:00 2001 From: Ramiro Morales Date: Tue, 20 Aug 2013 14:59:23 -0300 Subject: [PATCH 150/161] Import test case classes from their public API module. --- django/contrib/messages/tests/test_mixins.py | 2 +- tests/deprecation/tests.py | 2 +- tests/runtests.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/django/contrib/messages/tests/test_mixins.py b/django/contrib/messages/tests/test_mixins.py index 8eef4cb3dc..a24d580bd8 100644 --- a/django/contrib/messages/tests/test_mixins.py +++ b/django/contrib/messages/tests/test_mixins.py @@ -1,4 +1,4 @@ -from django.test.testcases import TestCase +from django.test import TestCase from django.contrib.messages.tests.urls import ContactFormViewWithMsg from django.core.urlresolvers import reverse diff --git a/tests/deprecation/tests.py b/tests/deprecation/tests.py index fda780c7e6..719bd635db 100644 --- a/tests/deprecation/tests.py +++ b/tests/deprecation/tests.py @@ -1,7 +1,7 @@ from __future__ import unicode_literals import warnings -from django.test.testcases import SimpleTestCase +from django.test import SimpleTestCase from django.utils import six from django.utils.deprecation import RenameMethodsBase diff --git a/tests/runtests.py b/tests/runtests.py index adfc77b13b..5053b4f54c 100755 --- a/tests/runtests.py +++ b/tests/runtests.py @@ -97,7 +97,7 @@ def get_installed(): def setup(verbosity, test_labels): from django.conf import settings from django.db.models.loading import get_apps, load_app - from django.test.testcases import TransactionTestCase, TestCase + from django.test import TransactionTestCase, TestCase # Force declaring available_apps in TransactionTestCase for faster tests. def no_available_apps(self): From 7775ced938da18066da73adba322be1a49be3e6d Mon Sep 17 00:00:00 2001 From: Kevin Christopher Henry Date: Tue, 20 Aug 2013 23:22:25 -0400 Subject: [PATCH 151/161] Documentation - Noted that OneToOneField doesn't respect unique. Added OneToOneField to the list of model fields for which the unique argument isn't valid. (OneToOneFields are inherently unique, and if the user supplies a value for unique it is ignored / overwritten.) --- docs/ref/models/fields.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/ref/models/fields.txt b/docs/ref/models/fields.txt index a9673ce3d2..6ef487e90f 100644 --- a/docs/ref/models/fields.txt +++ b/docs/ref/models/fields.txt @@ -281,8 +281,8 @@ you try to save a model with a duplicate value in a :attr:`~Field.unique` field, a :exc:`django.db.IntegrityError` will be raised by the model's :meth:`~django.db.models.Model.save` method. -This option is valid on all field types except :class:`ManyToManyField` and -:class:`FileField`. +This option is valid on all field types except :class:`ManyToManyField`, +:class:`OneToOneField`, and :class:`FileField`. Note that when ``unique`` is ``True``, you don't need to specify :attr:`~Field.db_index`, because ``unique`` implies the creation of an index. From 244e2b71f512605f3d0a8e1ba4c9d6b538acf69d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Anssi=20K=C3=A4=C3=A4ri=C3=A4inen?= Date: Tue, 20 Aug 2013 17:13:41 +0300 Subject: [PATCH 152/161] Fixed #20946 -- model inheritance + m2m failure Cleaned up the internal implementation of m2m fields by removing related.py _get_fk_val(). The _get_fk_val() was doing the wrong thing if asked for the foreign key value on foreign key to parent model's primary key when child model had different primary key field. --- django/db/models/fields/related.py | 38 ++++++++++++------------------ tests/model_inheritance/models.py | 6 +++++ tests/model_inheritance/tests.py | 16 ++++++++++++- 3 files changed, 36 insertions(+), 24 deletions(-) diff --git a/django/db/models/fields/related.py b/django/db/models/fields/related.py index 6c11df4cbd..4ff93e701f 100644 --- a/django/db/models/fields/related.py +++ b/django/db/models/fields/related.py @@ -501,8 +501,6 @@ def create_many_related_manager(superclass, rel): self.through = through self.prefetch_cache_name = prefetch_cache_name self.related_val = source_field.get_foreign_related_value(instance) - # Used for single column related auto created models - self._fk_val = self.related_val[0] if None in self.related_val: raise ValueError('"%r" needs to have a value for field "%s" before ' 'this many-to-many relationship can be used.' % @@ -515,18 +513,6 @@ def create_many_related_manager(superclass, rel): "a many-to-many relationship can be used." % instance.__class__.__name__) - def _get_fk_val(self, obj, field_name): - """ - Returns the correct value for this relationship's foreign key. This - might be something else than pk value when to_field is used. - """ - fk = self.through._meta.get_field(field_name) - if fk.rel.field_name and fk.rel.field_name != fk.rel.to._meta.pk.attname: - attname = fk.rel.get_related_field().get_attname() - return fk.get_prep_lookup('exact', getattr(obj, attname)) - else: - return obj.pk - def get_queryset(self): try: return self.instance._prefetched_objects_cache[self.prefetch_cache_name] @@ -624,11 +610,12 @@ def create_many_related_manager(superclass, rel): if not router.allow_relation(obj, self.instance): raise ValueError('Cannot add "%r": instance is on database "%s", value is on database "%s"' % (obj, self.instance._state.db, obj._state.db)) - fk_val = self._get_fk_val(obj, target_field_name) + fk_val = self.through._meta.get_field( + target_field_name).get_foreign_related_value(obj)[0] if fk_val is None: raise ValueError('Cannot add "%r": the value for field "%s" is None' % (obj, target_field_name)) - new_ids.add(self._get_fk_val(obj, target_field_name)) + new_ids.add(fk_val) elif isinstance(obj, Model): raise TypeError("'%s' instance expected, got %r" % (self.model._meta.object_name, obj)) else: @@ -636,7 +623,7 @@ def create_many_related_manager(superclass, rel): db = router.db_for_write(self.through, instance=self.instance) vals = self.through._default_manager.using(db).values_list(target_field_name, flat=True) vals = vals.filter(**{ - source_field_name: self._fk_val, + source_field_name: self.related_val[0], '%s__in' % target_field_name: new_ids, }) new_ids = new_ids - set(vals) @@ -650,7 +637,7 @@ def create_many_related_manager(superclass, rel): # Add the ones that aren't there already self.through._default_manager.using(db).bulk_create([ self.through(**{ - '%s_id' % source_field_name: self._fk_val, + '%s_id' % source_field_name: self.related_val[0], '%s_id' % target_field_name: obj_id, }) for obj_id in new_ids @@ -674,7 +661,9 @@ def create_many_related_manager(superclass, rel): old_ids = set() for obj in objs: if isinstance(obj, self.model): - old_ids.add(self._get_fk_val(obj, target_field_name)) + fk_val = self.through._meta.get_field( + target_field_name).get_foreign_related_value(obj)[0] + old_ids.add(fk_val) else: old_ids.add(obj) # Work out what DB we're operating on @@ -688,7 +677,7 @@ def create_many_related_manager(superclass, rel): model=self.model, pk_set=old_ids, using=db) # Remove the specified objects from the join table self.through._default_manager.using(db).filter(**{ - source_field_name: self._fk_val, + source_field_name: self.related_val[0], '%s__in' % target_field_name: old_ids }).delete() if self.reverse or source_field_name == self.source_field_name: @@ -994,10 +983,13 @@ class ForeignObject(RelatedField): # Gotcha: in some cases (like fixture loading) a model can have # different values in parent_ptr_id and parent's id. So, use # instance.pk (that is, parent_ptr_id) when asked for instance.id. + opts = instance._meta if field.primary_key: - ret.append(instance.pk) - else: - ret.append(getattr(instance, field.attname)) + possible_parent_link = opts.get_ancestor_link(field.model) + if not possible_parent_link or possible_parent_link.primary_key: + ret.append(instance.pk) + continue + ret.append(getattr(instance, field.attname)) return tuple(ret) def get_attname_column(self): diff --git a/tests/model_inheritance/models.py b/tests/model_inheritance/models.py index 106645d23c..020bb35bc7 100644 --- a/tests/model_inheritance/models.py +++ b/tests/model_inheritance/models.py @@ -162,3 +162,9 @@ class Mixin(object): class MixinModel(models.Model, Mixin): pass + +class Base(models.Model): + titles = models.ManyToManyField(Title) + +class SubBase(Base): + sub_id = models.IntegerField(primary_key=True) diff --git a/tests/model_inheritance/tests.py b/tests/model_inheritance/tests.py index b8ab0c8581..dab3088a41 100644 --- a/tests/model_inheritance/tests.py +++ b/tests/model_inheritance/tests.py @@ -10,7 +10,8 @@ from django.utils import six from .models import ( Chef, CommonInfo, ItalianRestaurant, ParkingLot, Place, Post, - Restaurant, Student, StudentWorker, Supplier, Worker, MixinModel) + Restaurant, Student, StudentWorker, Supplier, Worker, MixinModel, + Title, Base, SubBase) class ModelInheritanceTests(TestCase): @@ -357,3 +358,16 @@ class ModelInheritanceTests(TestCase): [Place.objects.get(pk=s.pk)], lambda x: x ) + + def test_custompk_m2m(self): + b = Base.objects.create() + b.titles.add(Title.objects.create(title="foof")) + s = SubBase.objects.create(sub_id=b.id) + b = Base.objects.get(pk=s.id) + self.assertNotEqual(b.pk, s.pk) + # Low-level test for related_val + self.assertEqual(s.titles.related_val, (s.id,)) + # Higher level test for correct query values (title foof not + # accidentally found). + self.assertQuerysetEqual( + s.titles.all(), []) From 8e571e5f8f21d87ab5a5462730289c755b8022d3 Mon Sep 17 00:00:00 2001 From: Ramiro Morales Date: Mon, 19 Aug 2013 20:04:50 -0300 Subject: [PATCH 153/161] Fixed #12422 -- Don't override global email charset behavior for utf-8. Thanks simonb for the report, Claude Paroz and Susan Tan for their work on a fix. --- django/core/mail/message.py | 14 +++++- tests/mail/tests.py | 97 +++++++++++++++++++++++++++++++++---- 2 files changed, 99 insertions(+), 12 deletions(-) diff --git a/django/core/mail/message.py b/django/core/mail/message.py index 9796e59260..a24817ac90 100644 --- a/django/core/mail/message.py +++ b/django/core/mail/message.py @@ -22,7 +22,8 @@ from django.utils import six # Don't BASE64-encode UTF-8 messages so that we avoid unwanted attention from # some spam filters. -Charset.add_charset('utf-8', Charset.SHORTEST, None, 'utf-8') +utf8_charset = Charset.Charset('utf-8') +utf8_charset.body_encoding = None # Python defaults to BASE64 # Default MIME type to use on attachments (if it is not explicitly given # and cannot be guessed). @@ -145,7 +146,16 @@ class SafeMIMEText(MIMEText): def __init__(self, text, subtype, charset): self.encoding = charset - MIMEText.__init__(self, text, subtype, charset) + if charset == 'utf-8': + # Unfortunately, Python doesn't support setting a Charset instance + # as MIMEText init parameter (http://bugs.python.org/issue16324). + # We do it manually and trigger re-encoding of the payload. + MIMEText.__init__(self, text, subtype, None) + del self['Content-Transfer-Encoding'] + self.set_payload(text, utf8_charset) + self.replace_header('Content-Type', 'text/%s; charset="%s"' % (subtype, charset)) + else: + MIMEText.__init__(self, text, subtype, charset) def __setitem__(self, name, val): name, val = forbid_multi_line_headers(name, val, self.encoding) diff --git a/tests/mail/tests.py b/tests/mail/tests.py index 2ba428e359..71733d69ae 100644 --- a/tests/mail/tests.py +++ b/tests/mail/tests.py @@ -3,6 +3,7 @@ from __future__ import unicode_literals import asyncore import email +from email.mime.text import MIMEText import os import shutil import smtpd @@ -20,11 +21,32 @@ from django.core.mail.message import BadHeaderError from django.test import TestCase from django.test.utils import override_settings from django.utils.encoding import force_str, force_text -from django.utils.six import PY3, StringIO +from django.utils.six import PY3, StringIO, string_types from django.utils.translation import ugettext_lazy -class MailTests(TestCase): +class HeadersCheckMixin(object): + + def assertMessageHasHeaders(self, message, headers): + """ + Check that :param message: has all :param headers: headers. + + :param message: can be an instance of an email.Message subclass or a + string with the contens of an email message. + :param headers: should be a set of (header-name, header-value) tuples. + """ + if isinstance(message, string_types): + just_headers = message.split('\n\n', 1)[0] + hlist = just_headers.split('\n') + pairs = [hl.split(':', 1) for hl in hlist] + msg_headers = {(n, v.lstrip()) for (n, v) in pairs} + else: + msg_headers = set(message.items()) + self.assertTrue(headers.issubset(msg_headers), msg='Message is missing ' + 'the following headers: %s' % (headers - msg_headers),) + + +class MailTests(HeadersCheckMixin, TestCase): """ Non-backend specific tests. """ @@ -93,7 +115,7 @@ class MailTests(TestCase): headers = {"date": "Fri, 09 Nov 2001 01:08:47 -0000", "Message-ID": "foo"} email = EmailMessage('subject', 'content', 'from@example.com', ['to@example.com'], headers=headers) - self.assertEqual(sorted(email.message().items()), [ + self.assertMessageHasHeaders(email.message(), { ('Content-Transfer-Encoding', '7bit'), ('Content-Type', 'text/plain; charset="utf-8"'), ('From', 'from@example.com'), @@ -102,7 +124,7 @@ class MailTests(TestCase): ('Subject', 'subject'), ('To', 'to@example.com'), ('date', 'Fri, 09 Nov 2001 01:08:47 -0000'), - ]) + }) def test_from_header(self): """ @@ -184,7 +206,13 @@ class MailTests(TestCase): email = EmailMessage('Subject', 'Firstname Sürname is a great guy.', 'from@example.com', ['other@example.com']) email.encoding = 'iso-8859-1' message = email.message() - self.assertTrue(message.as_string().startswith('Content-Type: text/plain; charset="iso-8859-1"\nMIME-Version: 1.0\nContent-Transfer-Encoding: quoted-printable\nSubject: Subject\nFrom: from@example.com\nTo: other@example.com')) + self.assertMessageHasHeaders(message, { + ('MIME-Version', '1.0'), + ('Content-Type', 'text/plain; charset="iso-8859-1"'), + ('Content-Transfer-Encoding', 'quoted-printable'), + ('Subject', 'Subject'), + ('From', 'from@example.com'), + ('To', 'other@example.com')}) self.assertEqual(message.get_payload(), 'Firstname S=FCrname is a great guy.') # Make sure MIME attachments also works correctly with other encodings than utf-8 @@ -193,8 +221,18 @@ class MailTests(TestCase): msg = EmailMultiAlternatives('Subject', text_content, 'from@example.com', ['to@example.com']) msg.encoding = 'iso-8859-1' msg.attach_alternative(html_content, "text/html") - self.assertEqual(msg.message().get_payload(0).as_string(), 'Content-Type: text/plain; charset="iso-8859-1"\nMIME-Version: 1.0\nContent-Transfer-Encoding: quoted-printable\n\nFirstname S=FCrname is a great guy.') - self.assertEqual(msg.message().get_payload(1).as_string(), 'Content-Type: text/html; charset="iso-8859-1"\nMIME-Version: 1.0\nContent-Transfer-Encoding: quoted-printable\n\n

Firstname S=FCrname is a great guy.

') + payload0 = msg.message().get_payload(0) + self.assertMessageHasHeaders(payload0, { + ('MIME-Version', '1.0'), + ('Content-Type', 'text/plain; charset="iso-8859-1"'), + ('Content-Transfer-Encoding', 'quoted-printable')}) + self.assertTrue(payload0.as_string().endswith('\n\nFirstname S=FCrname is a great guy.')) + payload1 = msg.message().get_payload(1) + self.assertMessageHasHeaders(payload1, { + ('MIME-Version', '1.0'), + ('Content-Type', 'text/html; charset="iso-8859-1"'), + ('Content-Transfer-Encoding', 'quoted-printable')}) + self.assertTrue(payload1.as_string().endswith('\n\n

Firstname S=FCrname is a great guy.

')) def test_attachments(self): """Regression test for #9367""" @@ -365,7 +403,31 @@ class MailTests(TestCase): self.assertTrue(str('Child Subject') in parent_s) -class BaseEmailBackendTests(object): +class PythonGlobalState(TestCase): + """ + Tests for #12422 -- Django smarts (#2472/#11212) with charset of utf-8 text + parts shouldn't pollute global email Python package charset registry when + django.mail.message is imported. + """ + + def test_utf8(self): + txt = MIMEText('UTF-8 encoded body', 'plain', 'utf-8') + self.assertTrue('Content-Transfer-Encoding: base64' in txt.as_string()) + + def test_7bit(self): + txt = MIMEText('Body with only ASCII characters.', 'plain', 'utf-8') + self.assertTrue('Content-Transfer-Encoding: base64' in txt.as_string()) + + def test_8bit_latin(self): + txt = MIMEText('Body with latin characters: àáä.', 'plain', 'utf-8') + self.assertTrue(str('Content-Transfer-Encoding: base64') in txt.as_string()) + + def test_8bit_non_latin(self): + txt = MIMEText('Body with non latin characters: А Б В Г Д Е Ж Ѕ З И І К Л М Н О П.', 'plain', 'utf-8') + self.assertTrue(str('Content-Transfer-Encoding: base64') in txt.as_string()) + + +class BaseEmailBackendTests(HeadersCheckMixin, object): email_backend = None def setUp(self): @@ -523,7 +585,15 @@ class BaseEmailBackendTests(object): email = EmailMessage('Subject', 'Content', 'from@example.com', ['to@example.com'], cc=['cc@example.com']) mail.get_connection().send_messages([email]) message = self.get_the_message() - self.assertStartsWith(message.as_string(), 'Content-Type: text/plain; charset="utf-8"\nMIME-Version: 1.0\nContent-Transfer-Encoding: 7bit\nSubject: Subject\nFrom: from@example.com\nTo: to@example.com\nCc: cc@example.com\nDate: ') + self.assertMessageHasHeaders(message, { + ('MIME-Version', '1.0'), + ('Content-Type', 'text/plain; charset="utf-8"'), + ('Content-Transfer-Encoding', '7bit'), + ('Subject', 'Subject'), + ('From', 'from@example.com'), + ('To', 'to@example.com'), + ('Cc', 'cc@example.com')}) + self.assertIn('\nDate: ', message.as_string()) def test_idn_send(self): """ @@ -681,7 +751,14 @@ class ConsoleBackendTests(BaseEmailBackendTests, TestCase): s = StringIO() connection = mail.get_connection('django.core.mail.backends.console.EmailBackend', stream=s) send_mail('Subject', 'Content', 'from@example.com', ['to@example.com'], connection=connection) - self.assertTrue(s.getvalue().startswith('Content-Type: text/plain; charset="utf-8"\nMIME-Version: 1.0\nContent-Transfer-Encoding: 7bit\nSubject: Subject\nFrom: from@example.com\nTo: to@example.com\nDate: ')) + self.assertMessageHasHeaders(s.getvalue(), { + ('MIME-Version', '1.0'), + ('Content-Type', 'text/plain; charset="utf-8"'), + ('Content-Transfer-Encoding', '7bit'), + ('Subject', 'Subject'), + ('From', 'from@example.com'), + ('To', 'to@example.com')}) + self.assertIn('\nDate: ', s.getvalue()) class FakeSMTPChannel(smtpd.SMTPChannel): From c7364a11f6df8d1c2d340a52e6e58e9a76ceb44f Mon Sep 17 00:00:00 2001 From: Ramiro Morales Date: Wed, 21 Aug 2013 07:48:16 -0300 Subject: [PATCH 154/161] Switched mail tests to SimpleTestCase. --- tests/mail/tests.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/mail/tests.py b/tests/mail/tests.py index 71733d69ae..bb57ca37ff 100644 --- a/tests/mail/tests.py +++ b/tests/mail/tests.py @@ -18,7 +18,7 @@ from django.core.mail import (EmailMessage, mail_admins, mail_managers, EmailMultiAlternatives, send_mail, send_mass_mail) from django.core.mail.backends import console, dummy, locmem, filebased, smtp from django.core.mail.message import BadHeaderError -from django.test import TestCase +from django.test import SimpleTestCase from django.test.utils import override_settings from django.utils.encoding import force_str, force_text from django.utils.six import PY3, StringIO, string_types @@ -46,7 +46,7 @@ class HeadersCheckMixin(object): 'the following headers: %s' % (headers - msg_headers),) -class MailTests(HeadersCheckMixin, TestCase): +class MailTests(HeadersCheckMixin, SimpleTestCase): """ Non-backend specific tests. """ @@ -403,7 +403,7 @@ class MailTests(HeadersCheckMixin, TestCase): self.assertTrue(str('Child Subject') in parent_s) -class PythonGlobalState(TestCase): +class PythonGlobalState(SimpleTestCase): """ Tests for #12422 -- Django smarts (#2472/#11212) with charset of utf-8 text parts shouldn't pollute global email Python package charset registry when @@ -636,7 +636,7 @@ class BaseEmailBackendTests(HeadersCheckMixin, object): self.fail("close() unexpectedly raised an exception: %s" % e) -class LocmemBackendTests(BaseEmailBackendTests, TestCase): +class LocmemBackendTests(BaseEmailBackendTests, SimpleTestCase): email_backend = 'django.core.mail.backends.locmem.EmailBackend' def get_mailbox_content(self): @@ -666,7 +666,7 @@ class LocmemBackendTests(BaseEmailBackendTests, TestCase): send_mail('Subject\nMultiline', 'Content', 'from@example.com', ['to@example.com']) -class FileBackendTests(BaseEmailBackendTests, TestCase): +class FileBackendTests(BaseEmailBackendTests, SimpleTestCase): email_backend = 'django.core.mail.backends.filebased.EmailBackend' def setUp(self): @@ -723,7 +723,7 @@ class FileBackendTests(BaseEmailBackendTests, TestCase): connection.close() -class ConsoleBackendTests(BaseEmailBackendTests, TestCase): +class ConsoleBackendTests(BaseEmailBackendTests, SimpleTestCase): email_backend = 'django.core.mail.backends.console.EmailBackend' def setUp(self): @@ -826,7 +826,7 @@ class FakeSMTPServer(smtpd.SMTPServer, threading.Thread): self.join() -class SMTPBackendTests(BaseEmailBackendTests, TestCase): +class SMTPBackendTests(BaseEmailBackendTests, SimpleTestCase): email_backend = 'django.core.mail.backends.smtp.EmailBackend' @classmethod From 3f416f637918cc162877be95a59d50825b203089 Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Tue, 20 Aug 2013 14:13:43 -0400 Subject: [PATCH 155/161] Fixed a regression with get_or_create and virtual fields. refs #20429 Thanks Simon Charette for the report and review. --- django/db/models/query.py | 20 ++++++++------------ tests/generic_relations/tests.py | 23 +++++++++++++++++++++++ 2 files changed, 31 insertions(+), 12 deletions(-) diff --git a/django/db/models/query.py b/django/db/models/query.py index 836d394e9b..67780a4991 100644 --- a/django/db/models/query.py +++ b/django/db/models/query.py @@ -411,7 +411,7 @@ class QuerySet(object): Returns a tuple of (object, created), where created is a boolean specifying whether an object was created. """ - lookup, params, _ = self._extract_model_params(defaults, **kwargs) + lookup, params = self._extract_model_params(defaults, **kwargs) self._for_write = True try: return self.get(**lookup), False @@ -425,7 +425,8 @@ class QuerySet(object): Returns a tuple (object, created), where created is a boolean specifying whether an object was created. """ - lookup, params, filtered_defaults = self._extract_model_params(defaults, **kwargs) + defaults = defaults or {} + lookup, params = self._extract_model_params(defaults, **kwargs) self._for_write = True try: obj = self.get(**lookup) @@ -433,12 +434,12 @@ class QuerySet(object): obj, created = self._create_object_from_params(lookup, params) if created: return obj, created - for k, v in six.iteritems(filtered_defaults): + for k, v in six.iteritems(defaults): setattr(obj, k, v) sid = transaction.savepoint(using=self.db) try: - obj.save(update_fields=filtered_defaults.keys(), using=self.db) + obj.save(using=self.db) transaction.savepoint_commit(sid, using=self.db) return obj, False except DatabaseError: @@ -469,22 +470,17 @@ class QuerySet(object): def _extract_model_params(self, defaults, **kwargs): """ Prepares `lookup` (kwargs that are valid model attributes), `params` - (for creating a model instance) and `filtered_defaults` (defaults - that are valid model attributes) based on given kwargs; for use by + (for creating a model instance) based on given kwargs; for use by get_or_create and update_or_create. """ defaults = defaults or {} - filtered_defaults = {} lookup = kwargs.copy() for f in self.model._meta.fields: - # Filter out fields that don't belongs to the model. if f.attname in lookup: lookup[f.name] = lookup.pop(f.attname) - if f.attname in defaults: - filtered_defaults[f.name] = defaults.pop(f.attname) params = dict((k, v) for k, v in kwargs.items() if LOOKUP_SEP not in k) - params.update(filtered_defaults) - return lookup, params, filtered_defaults + params.update(defaults) + return lookup, params def _earliest_or_latest(self, field_name=None, direction="-"): """ diff --git a/tests/generic_relations/tests.py b/tests/generic_relations/tests.py index 2b52ebac56..253eb76e32 100644 --- a/tests/generic_relations/tests.py +++ b/tests/generic_relations/tests.py @@ -263,6 +263,29 @@ class GenericRelationsTests(TestCase): formset = GenericFormSet(initial=initial_data) self.assertEqual(formset.forms[0].initial, initial_data[0]) + def test_get_or_create(self): + # get_or_create should work with virtual fields (content_object) + quartz = Mineral.objects.create(name="Quartz", hardness=7) + tag, created = TaggedItem.objects.get_or_create(tag="shiny", + defaults={'content_object': quartz}) + self.assertTrue(created) + self.assertEqual(tag.tag, "shiny") + self.assertEqual(tag.content_object.id, quartz.id) + + def test_update_or_create_defaults(self): + # update_or_create should work with virtual fields (content_object) + quartz = Mineral.objects.create(name="Quartz", hardness=7) + diamond = Mineral.objects.create(name="Diamond", hardness=7) + tag, created = TaggedItem.objects.update_or_create(tag="shiny", + defaults={'content_object': quartz}) + self.assertTrue(created) + self.assertEqual(tag.content_object.id, quartz.id) + + tag, created = TaggedItem.objects.update_or_create(tag="shiny", + defaults={'content_object': diamond}) + self.assertFalse(created) + self.assertEqual(tag.content_object.id, diamond.id) + class CustomWidget(forms.TextInput): pass From cb5c0bec14fd9be19daa84a7b21cf29f7a19ff3b Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Wed, 21 Aug 2013 09:01:52 -0400 Subject: [PATCH 156/161] Fixed docstring typo, thanks minddust. --- django/template/loader_tags.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/django/template/loader_tags.py b/django/template/loader_tags.py index 406775da9d..63ddbd4a6a 100644 --- a/django/template/loader_tags.py +++ b/django/template/loader_tags.py @@ -204,7 +204,7 @@ def do_extends(parser, token): uses the literal value "base" as the name of the parent template to extend, or ``{% extends variable %}`` uses the value of ``variable`` as either the name of the parent template to extend (if it evaluates to a string) or as - the parent tempate itelf (if it evaluates to a Template object). + the parent tempate itself (if it evaluates to a Template object). """ bits = token.split_contents() if len(bits) != 2: From 3e20a8856bf72e2f921b3a39440bed8336cb9713 Mon Sep 17 00:00:00 2001 From: evildmp Date: Tue, 20 Aug 2013 22:56:39 +0200 Subject: [PATCH 157/161] Added myself to the committers list. --- AUTHORS | 2 +- docs/internals/committers.txt | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/AUTHORS b/AUTHORS index 059310d5c6..c343eeb67b 100644 --- a/AUTHORS +++ b/AUTHORS @@ -46,6 +46,7 @@ The PRIMARY AUTHORS are (and/or have been): * Daniel Lindsley * Marc Tamlyn * Baptiste Mispelon + * Daniele Procida More information on the main contributors to Django can be found in docs/internals/committers.txt. @@ -478,7 +479,6 @@ answer newbie questions, and generally made Django that much better: polpak@yahoo.com Ross Poulton Mihai Preda - Daniele Procida Matthias Pronk Jyrki Pulliainen Thejaswi Puthraya diff --git a/docs/internals/committers.txt b/docs/internals/committers.txt index 6732f1561f..cc0a59e44a 100644 --- a/docs/internals/committers.txt +++ b/docs/internals/committers.txt @@ -537,6 +537,20 @@ Baptiste Mispelon .. _M2BPO: http://www.m2bpo.fr +`Daniele Procida`_ + Daniele works at Cardiff University `School of Medicine`_. He unexpectedly + became a Django developer on 29th April 2009. Since then he has relied + daily on Django's documentation, which has been a constant companion to + him. More recently he has been able to contribute back to the project by + helping improve the documentation itself. + + He is the author of `Arkestra`_ and `Don't be afraid to commit`_. + +.. _Daniele Procida: http://medicine.cf.ac.uk/person/mr-daniele-marco-procida/ +.. _School of Medicine: http://medicine.cf.ac.uk/ +.. _Arkestra: http://arkestra-project.org/ +.. _Don\'t be afraid to commit: https://dont-be-afraid-to-commit.readthedocs.org + Developers Emeritus =================== From 2d903929a70078cf1c50bf7b6e7718b1a4a691df Mon Sep 17 00:00:00 2001 From: Tim Graham Date: Wed, 21 Aug 2013 10:49:50 -0400 Subject: [PATCH 158/161] Fixed #20949 -- Typo #2 in docstring --- django/template/loader_tags.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/django/template/loader_tags.py b/django/template/loader_tags.py index 63ddbd4a6a..d48f85eb35 100644 --- a/django/template/loader_tags.py +++ b/django/template/loader_tags.py @@ -204,7 +204,7 @@ def do_extends(parser, token): uses the literal value "base" as the name of the parent template to extend, or ``{% extends variable %}`` uses the value of ``variable`` as either the name of the parent template to extend (if it evaluates to a string) or as - the parent tempate itself (if it evaluates to a Template object). + the parent template itself (if it evaluates to a Template object). """ bits = token.split_contents() if len(bits) != 2: From beefac8aaeed0bd8c66e8c7fcbfae1c0f8e01f85 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Wed, 21 Aug 2013 22:27:46 +0100 Subject: [PATCH 159/161] Only create the migration directory once per app --- django/core/management/commands/makemigrations.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/django/core/management/commands/makemigrations.py b/django/core/management/commands/makemigrations.py index 9f0690b483..d802e2924a 100644 --- a/django/core/management/commands/makemigrations.py +++ b/django/core/management/commands/makemigrations.py @@ -61,6 +61,7 @@ class Command(BaseCommand): self.stdout.write("No changes detected") return + directory_created = {} for app_label, migrations in changes.items(): self.stdout.write(self.style.MIGRATE_HEADING("Migrations for '%s':" % app_label) + "\n") for migration in migrations: @@ -71,10 +72,13 @@ class Command(BaseCommand): self.stdout.write(" - %s\n" % operation.describe()) # Write it migrations_directory = os.path.dirname(writer.path) - if not os.path.isdir(migrations_directory): - os.mkdir(migrations_directory) - init_path = os.path.join(migrations_directory, "__init__.py") - if not os.path.isfile(init_path): - open(init_path, "w").close() + if not directory_created.get(app_label, False): + if not os.path.isdir(migrations_directory): + os.mkdir(migrations_directory) + init_path = os.path.join(migrations_directory, "__init__.py") + if not os.path.isfile(init_path): + open(init_path, "w").close() + # We just do this once per app + directory_created[app_label] = True with open(writer.path, "w") as fh: fh.write(writer.as_string()) From ac45f9c9c5b5f64556c95d458368185298d21042 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 23 Aug 2013 12:07:43 +0100 Subject: [PATCH 160/161] Fix some small errors in the tests --- tests/migrations/test_base.py | 6 ++++-- tests/migrations/test_executor.py | 5 +++++ tests/migrations/test_operations.py | 13 +++++++------ tests/schema/tests.py | 4 ++-- 4 files changed, 18 insertions(+), 10 deletions(-) diff --git a/tests/migrations/test_base.py b/tests/migrations/test_base.py index 01062667aa..7ab09b04a5 100644 --- a/tests/migrations/test_base.py +++ b/tests/migrations/test_base.py @@ -1,12 +1,14 @@ -from django.test import TestCase +from django.test import TransactionTestCase from django.db import connection -class MigrationTestBase(TestCase): +class MigrationTestBase(TransactionTestCase): """ Contains an extended set of asserts for testing migrations and schema operations. """ + available_apps = ["migrations"] + def assertTableExists(self, table): self.assertIn(table, connection.introspection.get_table_list(connection.cursor())) diff --git a/tests/migrations/test_executor.py b/tests/migrations/test_executor.py index 5167f428d1..dbdea900a5 100644 --- a/tests/migrations/test_executor.py +++ b/tests/migrations/test_executor.py @@ -38,6 +38,11 @@ class ExecutorTests(TransactionTestCase): # Are the tables there now? self.assertIn("migrations_author", connection.introspection.get_table_list(connection.cursor())) self.assertIn("migrations_book", connection.introspection.get_table_list(connection.cursor())) + # Alright, let's undo what we did + executor.migrate([("migrations", None)]) + # Are the tables gone? + self.assertNotIn("migrations_author", connection.introspection.get_table_list(connection.cursor())) + self.assertNotIn("migrations_book", connection.introspection.get_table_list(connection.cursor())) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations", "sessions": "migrations.test_migrations_2"}) def test_empty_plan(self): diff --git a/tests/migrations/test_operations.py b/tests/migrations/test_operations.py index 2ff3f73b8a..1bc4a42d7e 100644 --- a/tests/migrations/test_operations.py +++ b/tests/migrations/test_operations.py @@ -127,12 +127,13 @@ class OperationTests(MigrationTestBase): self.assertTableExists("test_adflmm_pony_stables") self.assertColumnNotExists("test_adflmm_pony", "stables") # Make sure the M2M field actually works - app_cache = new_state.render() - Pony = app_cache.get_model("test_adflmm", "Pony") - p = Pony.objects.create(pink=False, weight=4.55) - p.stables.create() - self.assertEqual(p.stables.count(), 1) - p.stables.all().delete() + with atomic(): + app_cache = new_state.render() + Pony = app_cache.get_model("test_adflmm", "Pony") + p = Pony.objects.create(pink=False, weight=4.55) + p.stables.create() + self.assertEqual(p.stables.count(), 1) + p.stables.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adflmm", editor, new_state, project_state) diff --git a/tests/schema/tests.py b/tests/schema/tests.py index bf9fa6bbcc..c3764979d6 100644 --- a/tests/schema/tests.py +++ b/tests/schema/tests.py @@ -195,7 +195,7 @@ class SchemaTests(TransactionTestCase): # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "TextField") - self.assertEqual(columns['name'][1][6], bool(connection.features.interprets_empty_strings_as_nulls)) + self.assertEqual(bool(columns['name'][1][6]), False) def test_rename(self): """ @@ -230,7 +230,7 @@ class SchemaTests(TransactionTestCase): # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) - editor.create_model(Tag) + editor.create_model(TagM2MTest) editor.create_model(BookWithM2M) # Ensure there is now an m2m table there columns = self.column_classes(BookWithM2M._meta.get_field_by_name("tags")[0].rel.through) From 9cc6cfc4057e07b73a1d72a1177d568362b0c517 Mon Sep 17 00:00:00 2001 From: Andrew Godwin Date: Fri, 23 Aug 2013 12:07:55 +0100 Subject: [PATCH 161/161] Fix Oracle's default handling and schema-prepared-statement issue --- django/db/backends/__init__.py | 6 ++++ django/db/backends/oracle/base.py | 2 ++ django/db/backends/oracle/schema.py | 12 +++++++ django/db/backends/schema.py | 51 ++++++++++++++++++++++++----- 4 files changed, 62 insertions(+), 9 deletions(-) diff --git a/django/db/backends/__init__.py b/django/db/backends/__init__.py index 8d3c09ab1a..6274d5bc55 100644 --- a/django/db/backends/__init__.py +++ b/django/db/backends/__init__.py @@ -654,6 +654,12 @@ class BaseDatabaseFeatures(object): # supported by the Python driver supports_paramstyle_pyformat = True + # Does the backend require literal defaults, rather than parameterised ones? + requires_literal_defaults = False + + # Does the backend require a connection reset after each material schema change? + connection_persists_old_columns = False + def __init__(self, connection): self.connection = connection diff --git a/django/db/backends/oracle/base.py b/django/db/backends/oracle/base.py index e6435a9e96..a363c09001 100644 --- a/django/db/backends/oracle/base.py +++ b/django/db/backends/oracle/base.py @@ -94,6 +94,8 @@ class DatabaseFeatures(BaseDatabaseFeatures): supports_combined_alters = False max_index_name_length = 30 nulls_order_largest = True + requires_literal_defaults = True + connection_persists_old_columns = True class DatabaseOperations(BaseDatabaseOperations): diff --git a/django/db/backends/oracle/schema.py b/django/db/backends/oracle/schema.py index c78294cad5..18d67b254f 100644 --- a/django/db/backends/oracle/schema.py +++ b/django/db/backends/oracle/schema.py @@ -1,4 +1,6 @@ import copy +import datetime +from django.utils import six from django.db.backends.schema import BaseDatabaseSchemaEditor from django.db.utils import DatabaseError @@ -89,3 +91,13 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): """ suffix = hex(hash(for_name)).upper()[1:] return self.normalize_name(for_name + "_" + suffix) + + def prepare_default(self, value): + if isinstance(value, (datetime.date, datetime.time, datetime.datetime)): + return "'%s'" % value + elif isinstance(value, six.string_types): + return repr(value) + elif isinstance(value, bool): + return "1" if value else "0" + else: + return str(value) diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py index 7beae7417a..64098499f6 100644 --- a/django/db/backends/schema.py +++ b/django/db/backends/schema.py @@ -116,8 +116,14 @@ class BaseDatabaseSchemaEditor(object): # If we were told to include a default value, do so default_value = self.effective_default(field) if include_default and default_value is not None: - sql += " DEFAULT %s" - params += [default_value] + if self.connection.features.requires_literal_defaults: + # Some databases can't take defaults as a parameter (oracle) + # If this is the case, the individual schema backend should + # implement prepare_default + sql += " DEFAULT %s" % self.prepare_default(default_value) + else: + sql += " DEFAULT %s" + params += [default_value] # Oracle treats the empty string ('') as null, so coerce the null # option whenever '' is a possible value. if (field.empty_strings_allowed and not field.primary_key and @@ -135,6 +141,12 @@ class BaseDatabaseSchemaEditor(object): # Return the sql return sql, params + def prepare_default(self, value): + """ + Only used for backends which have requires_literal_defaults feature + """ + raise NotImplementedError() + def effective_default(self, field): """ Returns a field's effective database default value @@ -385,6 +397,9 @@ class BaseDatabaseSchemaEditor(object): "to_column": self.quote_name(to_column), } ) + # Reset connection if required + if self.connection.features.connection_persists_old_columns: + self.connection.close() def remove_field(self, model, field): """ @@ -405,6 +420,9 @@ class BaseDatabaseSchemaEditor(object): "column": self.quote_name(field.column), } self.execute(sql) + # Reset connection if required + if self.connection.features.connection_persists_old_columns: + self.connection.close() def alter_field(self, model, old_field, new_field, strict=False): """ @@ -523,13 +541,25 @@ class BaseDatabaseSchemaEditor(object): [], )) else: - actions.append(( - self.sql_alter_column_default % { - "column": self.quote_name(new_field.column), - "default": "%s", - }, - [new_default], - )) + if self.connection.features.requires_literal_defaults: + # Some databases can't take defaults as a parameter (oracle) + # If this is the case, the individual schema backend should + # implement prepare_default + actions.append(( + self.sql_alter_column_default % { + "column": self.quote_name(new_field.column), + "default": self.prepare_default(new_default), + }, + [], + )) + else: + actions.append(( + self.sql_alter_column_default % { + "column": self.quote_name(new_field.column), + "default": "%s", + }, + [new_default], + )) # Nullability change? if old_field.null != new_field.null: if new_field.null: @@ -628,6 +658,9 @@ class BaseDatabaseSchemaEditor(object): "check": new_db_params['check'], } ) + # Reset connection if required + if self.connection.features.connection_persists_old_columns: + self.connection.close() def _alter_many_to_many(self, model, old_field, new_field, strict): """