mirror of
https://github.com/django/django.git
synced 2025-07-05 18:29:11 +00:00
schema-evolution: re-applied schema evolution changes from:
http://kered.org/blog/wp-content/uploads/2007/07/django_schema_evolution-svn20070719patch.txt git-svn-id: http://code.djangoproject.com/svn/django/branches/schema-evolution@5735 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
parent
365f4b8698
commit
dac5af33de
@ -481,6 +481,228 @@ def get_sql_indexes_for_model(model):
|
||||
)
|
||||
return output
|
||||
|
||||
def get_sql_evolution(app):
|
||||
"Returns SQL to update an existing schema to match the existing models."
|
||||
from django.db import get_creation_module, models, backend, get_introspection_module, connection
|
||||
data_types = get_creation_module().DATA_TYPES
|
||||
|
||||
if not data_types:
|
||||
# This must be the "dummy" database backend, which means the user
|
||||
# hasn't set DATABASE_ENGINE.
|
||||
sys.stderr.write(style.ERROR("Error: Django doesn't know which syntax to use for your SQL statements,\n" +
|
||||
"because you haven't specified the DATABASE_ENGINE setting.\n" +
|
||||
"Edit your settings file and change DATABASE_ENGINE to something like 'postgresql' or 'mysql'.\n"))
|
||||
sys.exit(1)
|
||||
|
||||
# First, try validating the models.
|
||||
_check_for_validation_errors()
|
||||
|
||||
final_output = []
|
||||
|
||||
# stolen and trimmed from syncdb so that we know which models are about
|
||||
# to be created (so we don't check them for updates)
|
||||
table_list = _get_table_list()
|
||||
seen_models = _get_installed_models(table_list)
|
||||
created_models = set()
|
||||
pending_references = {}
|
||||
|
||||
model_list = models.get_models(app)
|
||||
for model in model_list:
|
||||
# Create the model's database table, if it doesn't already exist.
|
||||
if model._meta.db_table in table_list or model._meta.aka in table_list or len(set(model._meta.aka) & set(table_list))>0:
|
||||
continue
|
||||
sql, references = _get_sql_model_create(model, seen_models)
|
||||
seen_models.add(model)
|
||||
created_models.add(model)
|
||||
table_list.append(model._meta.db_table)
|
||||
|
||||
introspection = get_introspection_module()
|
||||
# This should work even if a connecton isn't available
|
||||
try:
|
||||
cursor = connection.cursor()
|
||||
except:
|
||||
cursor = None
|
||||
|
||||
# get the existing models, minus the models we've just created
|
||||
app_models = models.get_models(app)
|
||||
for model in created_models:
|
||||
if model in app_models:
|
||||
app_models.remove(model)
|
||||
|
||||
for klass in app_models:
|
||||
|
||||
output, new_table_name = get_sql_evolution_check_for_changed_model_name(klass)
|
||||
final_output.extend(output)
|
||||
|
||||
output = get_sql_evolution_check_for_changed_field_flags(klass, new_table_name)
|
||||
final_output.extend(output)
|
||||
|
||||
output = get_sql_evolution_check_for_changed_field_name(klass, new_table_name)
|
||||
final_output.extend(output)
|
||||
|
||||
output = get_sql_evolution_check_for_new_fields(klass, new_table_name)
|
||||
final_output.extend(output)
|
||||
|
||||
output = get_sql_evolution_check_for_dead_fields(klass, new_table_name)
|
||||
final_output.extend(output)
|
||||
|
||||
return final_output
|
||||
get_sql_evolution.help_doc = "Returns SQL to update an existing schema to match the existing models."
|
||||
get_sql_evolution.args = APP_ARGS
|
||||
|
||||
def get_sql_evolution_check_for_new_fields(klass, new_table_name):
|
||||
"checks for model fields that are not in the existing data structure"
|
||||
from django.db import backend, get_creation_module, models, get_introspection_module, connection
|
||||
data_types = get_creation_module().DATA_TYPES
|
||||
cursor = connection.cursor()
|
||||
introspection = get_introspection_module()
|
||||
opts = klass._meta
|
||||
output = []
|
||||
db_table = klass._meta.db_table
|
||||
if new_table_name:
|
||||
db_table = new_table_name
|
||||
for f in opts.fields:
|
||||
existing_fields = introspection.get_columns(cursor,db_table)
|
||||
if f.column not in existing_fields and f.aka not in existing_fields and len(set(f.aka) & set(existing_fields))==0:
|
||||
rel_field = f
|
||||
data_type = f.get_internal_type()
|
||||
col_type = data_types[data_type]
|
||||
if col_type is not None:
|
||||
# field_output = []
|
||||
# field_output.append('ALTER TABLE')
|
||||
# field_output.append(db_table)
|
||||
# field_output.append('ADD COLUMN')
|
||||
# field_output.append(backend.quote_name(f.column))
|
||||
# field_output.append(style.SQL_COLTYPE(col_type % rel_field.__dict__))
|
||||
# field_output.append(style.SQL_KEYWORD('%sNULL' % (not f.null and 'NOT ' or '')))
|
||||
# if f.unique:
|
||||
# field_output.append(style.SQL_KEYWORD('UNIQUE'))
|
||||
# if f.primary_key:
|
||||
# field_output.append(style.SQL_KEYWORD('PRIMARY KEY'))
|
||||
# output.append(' '.join(field_output) + ';')
|
||||
output.append( backend.get_add_column_sql( db_table, f.column, style.SQL_COLTYPE(col_type % rel_field.__dict__), f.null, f.unique, f.primary_key ) )
|
||||
return output
|
||||
|
||||
def get_sql_evolution_check_for_changed_model_name(klass):
|
||||
from django.db import backend, get_creation_module, models, get_introspection_module, connection
|
||||
cursor = connection.cursor()
|
||||
introspection = get_introspection_module()
|
||||
table_list = introspection.get_table_list(cursor)
|
||||
if klass._meta.db_table in table_list:
|
||||
return [], None
|
||||
if klass._meta.aka in table_list:
|
||||
return [ 'ALTER TABLE '+ backend.quote_name(klass._meta.aka) +' RENAME TO '+ backend.quote_name(klass._meta.db_table) + ';' ], klass._meta.aka
|
||||
elif len(set(klass._meta.aka) & set(table_list))==1:
|
||||
return [ 'ALTER TABLE '+ backend.quote_name(klass._meta.aka[0]) +' RENAME TO '+ backend.quote_name(klass._meta.db_table) + ';' ], klass._meta.aka[0]
|
||||
else:
|
||||
return [], None
|
||||
|
||||
def get_sql_evolution_check_for_changed_field_name(klass, new_table_name):
|
||||
from django.db import backend, get_creation_module, models, get_introspection_module, connection
|
||||
data_types = get_creation_module().DATA_TYPES
|
||||
cursor = connection.cursor()
|
||||
introspection = get_introspection_module()
|
||||
opts = klass._meta
|
||||
output = []
|
||||
db_table = klass._meta.db_table
|
||||
if new_table_name:
|
||||
db_table = new_table_name
|
||||
for f in opts.fields:
|
||||
existing_fields = introspection.get_columns(cursor,db_table)
|
||||
if f.column not in existing_fields and (f.aka in existing_fields or len(set(f.aka) & set(existing_fields)))==1:
|
||||
old_col = None
|
||||
if isinstance( f.aka, str ):
|
||||
old_col = f.aka
|
||||
else:
|
||||
old_col = f.aka[0]
|
||||
rel_field = f
|
||||
data_type = f.get_internal_type()
|
||||
col_type = data_types[data_type]
|
||||
if col_type is not None:
|
||||
field_output = []
|
||||
col_def = style.SQL_COLTYPE(col_type % rel_field.__dict__) +' '+ style.SQL_KEYWORD('%sNULL' % (not f.null and 'NOT ' or ''))
|
||||
if f.unique:
|
||||
col_def += style.SQL_KEYWORD(' UNIQUE')
|
||||
if f.primary_key:
|
||||
col_def += style.SQL_KEYWORD(' PRIMARY KEY')
|
||||
field_output.append( backend.get_change_column_name_sql( klass._meta.db_table, introspection.get_indexes(cursor,db_table), backend.quote_name(old_col), backend.quote_name(f.column), col_def ) )
|
||||
output.append(' '.join(field_output))
|
||||
return output
|
||||
|
||||
def get_sql_evolution_check_for_changed_field_flags(klass, new_table_name):
|
||||
from django.db import backend, get_creation_module, models, get_introspection_module, connection
|
||||
from django.db.models.fields import CharField, SlugField
|
||||
from django.db.models.fields.related import RelatedField, ForeignKey
|
||||
data_types = get_creation_module().DATA_TYPES
|
||||
cursor = connection.cursor()
|
||||
introspection = get_introspection_module()
|
||||
opts = klass._meta
|
||||
output = []
|
||||
db_table = klass._meta.db_table
|
||||
if new_table_name:
|
||||
db_table = new_table_name
|
||||
for f in opts.fields:
|
||||
existing_fields = introspection.get_columns(cursor,db_table)
|
||||
cf = None # current field, ie what it is before any renames
|
||||
if f.column in existing_fields:
|
||||
cf = f.column
|
||||
elif f.aka in existing_fields:
|
||||
cf = f.aka
|
||||
elif len(set(f.aka) & set(existing_fields))==1:
|
||||
cf = f.aka[0]
|
||||
else:
|
||||
continue # no idea what column you're talking about - should be handled by get_sql_evolution_check_for_new_fields())
|
||||
data_type = f.get_internal_type()
|
||||
if data_types.has_key(data_type):
|
||||
column_flags = introspection.get_known_column_flags(cursor, db_table, cf)
|
||||
if column_flags['allow_null']!=f.null or \
|
||||
( not f.primary_key and isinstance(f, CharField) and column_flags['maxlength']!=str(f.maxlength) ) or \
|
||||
( not f.primary_key and isinstance(f, SlugField) and column_flags['maxlength']!=str(f.maxlength) ) or \
|
||||
column_flags['unique']!=f.unique or \
|
||||
column_flags['primary_key']!=f.primary_key:
|
||||
#column_flags['foreign_key']!=f.foreign_key:
|
||||
# print
|
||||
# print db_table, f.column, column_flags
|
||||
# print "column_flags['allow_null']!=f.null", column_flags['allow_null']!=f.null
|
||||
# print "not f.primary_key and isinstance(f, CharField) and column_flags['maxlength']!=str(f.maxlength)", not f.primary_key and isinstance(f, CharField) and column_flags['maxlength']!=str(f.maxlength)
|
||||
# print "not f.primary_key and isinstance(f, SlugField) and column_flags['maxlength']!=str(f.maxlength)", not f.primary_key and isinstance(f, SlugField) and column_flags['maxlength']!=str(f.maxlength)
|
||||
# print "column_flags['unique']!=f.unique", column_flags['unique']!=f.unique
|
||||
# print "column_flags['primary_key']!=f.primary_key", column_flags['primary_key']!=f.primary_key
|
||||
col_type = data_types[data_type]
|
||||
col_type_def = style.SQL_COLTYPE(col_type % f.__dict__)
|
||||
# col_def = style.SQL_COLTYPE(col_type % f.__dict__) +' '+ style.SQL_KEYWORD('%sNULL' % (not f.null and 'NOT ' or ''))
|
||||
# if f.unique:
|
||||
# col_def += ' '+ style.SQL_KEYWORD('UNIQUE')
|
||||
# if f.primary_key:
|
||||
# col_def += ' '+ style.SQL_KEYWORD('PRIMARY KEY')
|
||||
output.append( backend.get_change_column_def_sql( db_table, cf, col_type_def, f.null, f.unique, f.primary_key ) )
|
||||
#print db_table, cf, f.maxlength, introspection.get_known_column_flags(cursor, db_table, cf)
|
||||
return output
|
||||
|
||||
def get_sql_evolution_check_for_dead_fields(klass, new_table_name):
|
||||
from django.db import backend, get_creation_module, models, get_introspection_module, connection
|
||||
from django.db.models.fields import CharField, SlugField
|
||||
from django.db.models.fields.related import RelatedField, ForeignKey
|
||||
data_types = get_creation_module().DATA_TYPES
|
||||
cursor = connection.cursor()
|
||||
introspection = get_introspection_module()
|
||||
opts = klass._meta
|
||||
output = []
|
||||
db_table = klass._meta.db_table
|
||||
if new_table_name:
|
||||
db_table = new_table_name
|
||||
suspect_fields = set(introspection.get_columns(cursor,db_table))
|
||||
for f in opts.fields:
|
||||
suspect_fields.discard(f.column)
|
||||
suspect_fields.discard(f.aka)
|
||||
suspect_fields.difference_update(f.aka)
|
||||
if len(suspect_fields)>0:
|
||||
output.append( '-- warning: as the following may cause data loss, it/they must be run manually' )
|
||||
for suspect_field in suspect_fields:
|
||||
output.append( backend.get_drop_column_sql( db_table, suspect_field ) )
|
||||
output.append( '-- end warning' )
|
||||
return output
|
||||
|
||||
def get_sql_all(app):
|
||||
"Returns a list of CREATE TABLE SQL, initial-data inserts, and CREATE INDEX SQL for the given module."
|
||||
return get_sql_create(app) + get_custom_sql(app) + get_sql_indexes(app)
|
||||
@ -540,7 +762,7 @@ def syncdb(verbosity=1, interactive=True):
|
||||
# Create the model's database table, if it doesn't already exist.
|
||||
if verbosity >= 2:
|
||||
print "Processing %s.%s model" % (app_name, model._meta.object_name)
|
||||
if table_name_converter(model._meta.db_table) in table_list:
|
||||
if table_name_converter(model._meta.db_table) in table_list or table_name_converter(model._meta.aka) in table_list or len(set(model._meta.aka) & set(table_list))>0:
|
||||
continue
|
||||
sql, references = _get_sql_model_create(model, seen_models)
|
||||
seen_models.add(model)
|
||||
@ -568,6 +790,10 @@ def syncdb(verbosity=1, interactive=True):
|
||||
for statement in sql:
|
||||
cursor.execute(statement)
|
||||
|
||||
for sql in get_sql_evolution(app):
|
||||
print sql
|
||||
# cursor.execute(sql)
|
||||
|
||||
transaction.commit_unless_managed()
|
||||
|
||||
# Send the post_syncdb signal, so individual apps can do whatever they need
|
||||
@ -1521,6 +1747,7 @@ DEFAULT_ACTION_MAPPING = {
|
||||
'sqlinitialdata': get_sql_initial_data,
|
||||
'sqlreset': get_sql_reset,
|
||||
'sqlsequencereset': get_sql_sequence_reset,
|
||||
'sqlevolve': get_sql_evolution,
|
||||
'startapp': startapp,
|
||||
'startproject': startproject,
|
||||
'syncdb': syncdb,
|
||||
|
@ -242,6 +242,48 @@ def get_sql_sequence_reset(style, model_list):
|
||||
# No sequence reset required
|
||||
return []
|
||||
|
||||
def get_change_column_name_sql( table_name, indexes, old_col_name, new_col_name, col_def ):
|
||||
# mysql doesn't support column renames (AFAIK), so we fake it
|
||||
# TODO: only supports a single primary key so far
|
||||
pk_name = None
|
||||
for key in indexes.keys():
|
||||
if indexes[key]['primary_key']: pk_name = key
|
||||
output = []
|
||||
output.append( 'ALTER TABLE '+ quote_name(table_name) +' CHANGE COLUMN '+ quote_name(old_col_name) +' '+ quote_name(new_col_name) +' '+ col_def + ';' )
|
||||
return '\n'.join(output)
|
||||
|
||||
def get_change_column_def_sql( table_name, col_name, col_type, null, unique, primary_key ):
|
||||
output = []
|
||||
col_def = col_type +' '+ ('%sNULL' % (not null and 'NOT ' or ''))
|
||||
if unique:
|
||||
col_def += ' '+ 'UNIQUE'
|
||||
if primary_key:
|
||||
col_def += ' '+ 'PRIMARY KEY'
|
||||
output.append( 'ALTER TABLE '+ quote_name(table_name) +' MODIFY COLUMN '+ quote_name(col_name) +' '+ col_def + ';' )
|
||||
return '\n'.join(output)
|
||||
|
||||
def get_add_column_sql( table_name, col_name, col_type, null, unique, primary_key ):
|
||||
output = []
|
||||
field_output = []
|
||||
field_output.append('ALTER TABLE')
|
||||
field_output.append(quote_name(table_name))
|
||||
field_output.append('ADD COLUMN')
|
||||
field_output.append(quote_name(col_name))
|
||||
field_output.append(col_type)
|
||||
field_output.append(('%sNULL' % (not null and 'NOT ' or '')))
|
||||
if unique:
|
||||
field_output.append(('UNIQUE'))
|
||||
if primary_key:
|
||||
field_output.append(('PRIMARY KEY'))
|
||||
output.append(' '.join(field_output) + ';')
|
||||
return '\n'.join(output)
|
||||
|
||||
def get_drop_column_sql( table_name, col_name ):
|
||||
output = []
|
||||
output.append( '-- ALTER TABLE '+ quote_name(table_name) +' DROP COLUMN '+ quote_name(col_name) + ';' )
|
||||
return '\n'.join(output)
|
||||
|
||||
|
||||
OPERATOR_MAPPING = {
|
||||
'exact': '= %s',
|
||||
'iexact': 'LIKE %s',
|
||||
|
@ -73,6 +73,43 @@ def get_indexes(cursor, table_name):
|
||||
indexes[row[4]] = {'primary_key': (row[2] == 'PRIMARY'), 'unique': not bool(row[1])}
|
||||
return indexes
|
||||
|
||||
def get_columns(cursor, table_name):
|
||||
try:
|
||||
cursor.execute("describe %s" % quote_name(table_name))
|
||||
return [row[0] for row in cursor.fetchall()]
|
||||
except:
|
||||
return []
|
||||
|
||||
def get_known_column_flags( cursor, table_name, column_name ):
|
||||
cursor.execute("describe %s" % quote_name(table_name))
|
||||
dict = {}
|
||||
for row in cursor.fetchall():
|
||||
if row[0] == column_name:
|
||||
|
||||
# maxlength check goes here
|
||||
if row[1][0:7]=='varchar':
|
||||
dict['maxlength'] = row[1][8:len(row[1])-1]
|
||||
|
||||
# default flag check goes here
|
||||
if row[2]=='YES': dict['allow_null'] = True
|
||||
else: dict['allow_null'] = False
|
||||
|
||||
# primary/foreign/unique key flag check goes here
|
||||
if row[3]=='PRI': dict['primary_key'] = True
|
||||
else: dict['primary_key'] = False
|
||||
if row[3]=='FOR': dict['foreign_key'] = True
|
||||
else: dict['foreign_key'] = False
|
||||
if row[3]=='UNI': dict['unique'] = True
|
||||
else: dict['unique'] = False
|
||||
|
||||
# default value check goes here
|
||||
# if row[4]=='NULL': dict['default'] = None
|
||||
# else: dict['default'] = row[4]
|
||||
dict['default'] = row[4]
|
||||
|
||||
# print table_name, column_name, dict
|
||||
return dict
|
||||
|
||||
DATA_TYPES_REVERSE = {
|
||||
FIELD_TYPE.BLOB: 'TextField',
|
||||
FIELD_TYPE.CHAR: 'CharField',
|
||||
|
@ -282,6 +282,42 @@ def typecast_string(s):
|
||||
return s
|
||||
return smart_unicode(s)
|
||||
|
||||
def get_change_column_name_sql( table_name, indexes, old_col_name, new_col_name, col_def ):
|
||||
# TODO: only supports a single primary key so far
|
||||
pk_name = None
|
||||
for key in indexes.keys():
|
||||
if indexes[key]['primary_key']: pk_name = key
|
||||
output = []
|
||||
output.append( 'ALTER TABLE '+ quote_name(table_name) +' RENAME COLUMN '+ quote_name(old_col_name) +' TO '+ quote_name(new_col_name) +';' )
|
||||
return '\n'.join(output)
|
||||
|
||||
def get_change_column_def_sql( table_name, col_name, col_type, null, unique, primary_key ):
|
||||
output = []
|
||||
output.append( 'ALTER TABLE '+ quote_name(table_name) +' ADD COLUMN '+ quote_name(col_name+'_tmp') +' '+ col_type + ';' )
|
||||
output.append( 'UPDATE '+ quote_name(table_name) +' SET '+ quote_name(col_name+'_tmp') +' = '+ quote_name(col_name) + ';' )
|
||||
output.append( 'ALTER TABLE '+ quote_name(table_name) +' DROP COLUMN '+ quote_name(col_name) +';' )
|
||||
output.append( 'ALTER TABLE '+ quote_name(table_name) +' RENAME COLUMN '+ quote_name(col_name+'_tmp') +' TO '+ quote_name(col_name) + ';' )
|
||||
if not null:
|
||||
output.append( 'ALTER TABLE '+ quote_name(table_name) +' ALTER COLUMN '+ quote_name(col_name) +' SET NOT NULL;' )
|
||||
if unique:
|
||||
output.append( 'ALTER TABLE '+ quote_name(table_name) +' ADD CONSTRAINT '+ table_name +'_'+ col_name +'_unique_constraint UNIQUE('+ col_name +');' )
|
||||
|
||||
return '\n'.join(output)
|
||||
|
||||
def get_add_column_sql( table_name, col_name, col_type, null, unique, primary_key ):
|
||||
output = []
|
||||
output.append( 'ALTER TABLE '+ quote_name(table_name) +' ADD COLUMN '+ quote_name(col_name) +' '+ col_type + ';' )
|
||||
if not null:
|
||||
output.append( 'ALTER TABLE '+ quote_name(table_name) +' ALTER COLUMN '+ quote_name(col_name) +' SET NOT NULL;' )
|
||||
if unique:
|
||||
output.append( 'ALTER TABLE '+ quote_name(table_name) +' ADD CONSTRAINT '+ table_name +'_'+ col_name +'_unique_constraint UNIQUE('+ col_name +');' )
|
||||
return '\n'.join(output)
|
||||
|
||||
def get_drop_column_sql( table_name, col_name ):
|
||||
output = []
|
||||
output.append( '-- ALTER TABLE '+ quote_name(table_name) +' DROP COLUMN '+ quote_name(col_name) + ';' )
|
||||
return '\n'.join(output)
|
||||
|
||||
# Register these custom typecasts, because Django expects dates/times to be
|
||||
# in Python's native (standard-library) datetime/time format, whereas psycopg
|
||||
# use mx.DateTime by default.
|
||||
|
@ -66,6 +66,58 @@ def get_indexes(cursor, table_name):
|
||||
indexes[row[0]] = {'primary_key': row[3], 'unique': row[2]}
|
||||
return indexes
|
||||
|
||||
def get_columns(cursor, table_name):
|
||||
try:
|
||||
cursor.execute("SELECT a.attname, pg_catalog.format_type(a.atttypid, a.atttypmod), (SELECT substring(d.adsrc for 128) FROM pg_catalog.pg_attrdef d WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum AND a.atthasdef), a.attnotnull, a.attnum, pg_catalog.col_description(a.attrelid, a.attnum) FROM pg_catalog.pg_attribute a WHERE a.attrelid = (SELECT c.oid from pg_catalog.pg_class c where c.relname ~ '^%s$') AND a.attnum > 0 AND NOT a.attisdropped ORDER BY a.attnum" % table_name)
|
||||
return [row[0] for row in cursor.fetchall()]
|
||||
except:
|
||||
return []
|
||||
|
||||
def get_known_column_flags( cursor, table_name, column_name ):
|
||||
# print "SELECT a.attname, pg_catalog.format_type(a.atttypid, a.atttypmod), (SELECT substring(d.adsrc for 128) FROM pg_catalog.pg_attrdef d WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum AND a.atthasdef), a.attnotnull, a.attnum, pg_catalog.col_description(a.attrelid, a.attnum) FROM pg_catalog.pg_attribute a WHERE a.attrelid = (SELECT c.oid from pg_catalog.pg_class c where c.relname ~ '^%s$') AND a.attnum > 0 AND NOT a.attisdropped ORDER BY a.attnum" % table_name
|
||||
cursor.execute("SELECT a.attname, pg_catalog.format_type(a.atttypid, a.atttypmod), (SELECT substring(d.adsrc for 128) FROM pg_catalog.pg_attrdef d WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum AND a.atthasdef), a.attnotnull, a.attnum, pg_catalog.col_description(a.attrelid, a.attnum) FROM pg_catalog.pg_attribute a WHERE a.attrelid = (SELECT c.oid from pg_catalog.pg_class c where c.relname ~ '^%s$') AND a.attnum > 0 AND NOT a.attisdropped ORDER BY a.attnum" % table_name)
|
||||
dict = {}
|
||||
dict['primary_key'] = False
|
||||
dict['foreign_key'] = False
|
||||
dict['unique'] = False
|
||||
dict['default'] = ''
|
||||
|
||||
# dict['allow_null'] = False
|
||||
for row in cursor.fetchall():
|
||||
if row[0] == column_name:
|
||||
|
||||
# maxlength check goes here
|
||||
if row[1][0:17]=='character varying':
|
||||
dict['maxlength'] = row[1][18:len(row[1])-1]
|
||||
|
||||
# null flag check goes here
|
||||
dict['allow_null'] = not row[3]
|
||||
|
||||
# pk, fk and unique checks go here
|
||||
# print "select pg_constraint.conname, pg_constraint.contype, pg_attribute.attname from pg_constraint, pg_attribute where pg_constraint.conrelid=pg_attribute.attrelid and pg_attribute.attnum=any(pg_constraint.conkey) and pg_constraint.conname~'^%s'" % table_name
|
||||
unique_conname = None
|
||||
shared_unique_connames = set()
|
||||
cursor.execute("select pg_constraint.conname, pg_constraint.contype, pg_attribute.attname from pg_constraint, pg_attribute where pg_constraint.conrelid=pg_attribute.attrelid and pg_attribute.attnum=any(pg_constraint.conkey) and pg_constraint.conname~'^%s'" % table_name )
|
||||
for row in cursor.fetchall():
|
||||
if row[2] == column_name:
|
||||
if row[1]=='p': dict['primary_key'] = True
|
||||
if row[1]=='f': dict['foreign_key'] = True
|
||||
if row[1]=='u': unique_conname = row[0]
|
||||
else:
|
||||
if row[1]=='u': shared_unique_connames.add( row[0] )
|
||||
if unique_conname and unique_conname not in shared_unique_connames:
|
||||
dict['unique'] = True
|
||||
|
||||
# default value check goes here
|
||||
cursor.execute("select pg_attribute.attname, adsrc from pg_attrdef, pg_attribute WHERE pg_attrdef.adrelid=pg_attribute.attrelid and pg_attribute.attnum=pg_attrdef.adnum and pg_attrdef.adrelid = (SELECT c.oid from pg_catalog.pg_class c where c.relname ~ '^%s$')" % table_name )
|
||||
for row in cursor.fetchall():
|
||||
if row[0] == column_name:
|
||||
if row[1][0:7] == 'nextval': continue
|
||||
dict['default'] = row[1][1:row[1].index("'",1)]
|
||||
|
||||
# print table_name, column_name, dict
|
||||
return dict
|
||||
|
||||
# Maps type codes to Django Field types.
|
||||
DATA_TYPES_REVERSE = {
|
||||
16: 'BooleanField',
|
||||
|
@ -214,6 +214,51 @@ def _sqlite_regexp(re_pattern, re_string):
|
||||
except:
|
||||
return False
|
||||
|
||||
def get_change_column_name_sql( table_name, indexes, old_col_name, new_col_name, col_def ):
|
||||
# sqlite doesn't support column renames, so we fake it
|
||||
# TODO: only supports a single primary key so far
|
||||
pk_name = None
|
||||
for key in indexes.keys():
|
||||
if indexes[key]['primary_key']: pk_name = key
|
||||
output = []
|
||||
output.append( 'ALTER TABLE '+ quote_name(table_name) +' ADD COLUMN '+ quote_name(new_col_name) +' '+ col_def + ';' )
|
||||
output.append( 'UPDATE '+ quote_name(table_name) +' SET '+ new_col_name +' = '+ old_col_name +' WHERE '+ pk_name +'=(select '+ pk_name +' from '+ table_name +');' )
|
||||
output.append( '-- FYI: sqlite does not support deleting columns, so '+ quote_name(old_col_name) +' remains as cruft' )
|
||||
# use the following when sqlite gets drop support
|
||||
#output.append( 'ALTER TABLE '+ quote_name(table_name) +' DROP COLUMN '+ quote_name(old_col_name) )
|
||||
return '\n'.join(output)
|
||||
|
||||
def get_change_column_def_sql( table_name, col_name, col_def ):
|
||||
# sqlite doesn't support column modifications, so we fake it
|
||||
output = []
|
||||
# TODO: fake via renaming the table, building a new one and deleting the old
|
||||
output.append('-- sqlite does not support column modifications '+ quote_name(table_name) +'.'+ quote_name(col_name) +' to '+ col_def)
|
||||
return '\n'.join(output)
|
||||
|
||||
def get_add_column_sql( table_name, col_name, col_type, null, unique, primary_key ):
|
||||
output = []
|
||||
field_output = []
|
||||
field_output.append('ALTER TABLE')
|
||||
field_output.append(quote_name(table_name))
|
||||
field_output.append('ADD COLUMN')
|
||||
field_output.append(quote_name(col_name))
|
||||
field_output.append(col_type)
|
||||
field_output.append(('%sNULL' % (not null and 'NOT ' or '')))
|
||||
if unique:
|
||||
field_output.append(('UNIQUE'))
|
||||
if primary_key:
|
||||
field_output.append(('PRIMARY KEY'))
|
||||
output.append(' '.join(field_output) + ';')
|
||||
return '\n'.join(output)
|
||||
|
||||
def get_drop_column_sql( table_name, col_name ):
|
||||
output = []
|
||||
output.append( '-- FYI: sqlite does not support deleting columns, so '+ quote_name(old_col_name) +' remains as cruft' )
|
||||
# use the following when sqlite gets drop support
|
||||
# output.append( '-- ALTER TABLE '+ quote_name(table_name) +' DROP COLUMN '+ quote_name(col_name) )
|
||||
return '\n'.join(output)
|
||||
|
||||
|
||||
# SQLite requires LIKE statements to include an ESCAPE clause if the value
|
||||
# being escaped has a percent or underscore in it.
|
||||
# See http://www.sqlite.org/lang_expr.html for an explanation.
|
||||
|
@ -43,6 +43,43 @@ def get_indexes(cursor, table_name):
|
||||
indexes[name]['unique'] = True
|
||||
return indexes
|
||||
|
||||
def get_columns(cursor, table_name):
|
||||
try:
|
||||
cursor.execute("PRAGMA table_info(%s)" % quote_name(table_name))
|
||||
return [row[1] for row in cursor.fetchall()]
|
||||
except:
|
||||
return []
|
||||
|
||||
def get_known_column_flags( cursor, table_name, column_name ):
|
||||
cursor.execute("PRAGMA table_info(%s)" % quote_name(table_name))
|
||||
dict = {}
|
||||
for row in cursor.fetchall():
|
||||
if row[1] == column_name:
|
||||
|
||||
# maxlength check goes here
|
||||
if row[2][0:7]=='varchar':
|
||||
dict['maxlength'] = row[2][8:len(row[2])-1]
|
||||
|
||||
# default flag check goes here
|
||||
#if row[2]=='YES': dict['allow_null'] = True
|
||||
#else: dict['allow_null'] = False
|
||||
|
||||
# primary/foreign/unique key flag check goes here
|
||||
#if row[3]=='PRI': dict['primary_key'] = True
|
||||
#else: dict['primary_key'] = False
|
||||
#if row[3]=='FOR': dict['foreign_key'] = True
|
||||
#else: dict['foreign_key'] = False
|
||||
#if row[3]=='UNI': dict['unique'] = True
|
||||
#else: dict['unique'] = False
|
||||
|
||||
# default value check goes here
|
||||
# if row[4]=='NULL': dict['default'] = None
|
||||
# else: dict['default'] = row[4]
|
||||
#dict['default'] = row[4]
|
||||
|
||||
print table_name, column_name, dict
|
||||
return dict
|
||||
|
||||
def _table_info(cursor, name):
|
||||
cursor.execute('PRAGMA table_info(%s)' % quote_name(name))
|
||||
# cid, name, type, notnull, dflt_value, pk
|
||||
|
@ -76,7 +76,7 @@ class Field(object):
|
||||
core=False, rel=None, default=NOT_PROVIDED, editable=True, serialize=True,
|
||||
prepopulate_from=None, unique_for_date=None, unique_for_month=None,
|
||||
unique_for_year=None, validator_list=None, choices=None, radio_admin=None,
|
||||
help_text='', db_column=None, db_tablespace=None):
|
||||
help_text='', db_column=None, aka=None, db_tablespace=None):
|
||||
self.name = name
|
||||
self.verbose_name = verbose_name
|
||||
self.primary_key = primary_key
|
||||
@ -97,6 +97,7 @@ class Field(object):
|
||||
self.radio_admin = radio_admin
|
||||
self.help_text = help_text
|
||||
self.db_column = db_column
|
||||
self.aka = aka
|
||||
self.db_tablespace = db_tablespace
|
||||
|
||||
# Set db_index to True if the field has a relationship and doesn't explicitly set db_index.
|
||||
|
@ -15,7 +15,7 @@ get_verbose_name = lambda class_name: re.sub('(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|
|
||||
|
||||
DEFAULT_NAMES = ('verbose_name', 'db_table', 'ordering',
|
||||
'unique_together', 'permissions', 'get_latest_by',
|
||||
'order_with_respect_to', 'app_label', 'db_tablespace')
|
||||
'order_with_respect_to', 'app_label', 'aka', 'db_tablespace')
|
||||
|
||||
class Options(object):
|
||||
def __init__(self, meta):
|
||||
@ -23,6 +23,7 @@ class Options(object):
|
||||
self.module_name, self.verbose_name = None, None
|
||||
self.verbose_name_plural = None
|
||||
self.db_table = ''
|
||||
self.aka = ''
|
||||
self.ordering = []
|
||||
self.unique_together = []
|
||||
self.permissions = []
|
||||
@ -76,6 +77,14 @@ class Options(object):
|
||||
auto.creation_counter = -1
|
||||
model.add_to_class('id', auto)
|
||||
|
||||
if isinstance(self.aka, str):
|
||||
self.aka = "%s_%s" % (self.app_label, self.aka.lower())
|
||||
if isinstance(self.aka, tuple):
|
||||
real_aka = []
|
||||
for some_aka in self.aka:
|
||||
real_aka.append( "%s_%s" % (self.app_label, some_aka.lower()) )
|
||||
self.aka = tuple(real_aka)
|
||||
|
||||
# If the db_table wasn't provided, use the app_label + module_name.
|
||||
if not self.db_table:
|
||||
self.db_table = "%s_%s" % (self.app_label, self.module_name)
|
||||
|
Loading…
x
Reference in New Issue
Block a user