Replace South with South 0.8.4

- Legacy-Id: 6872
This commit is contained in:
Ole Laursen 2013-12-11 14:07:19 +00:00
parent 1654988abc
commit 73f3ce0905
69 changed files with 4035 additions and 1012 deletions

1
south/.gitignore vendored
View file

@ -1 +0,0 @@
/*.pyc

View file

@ -2,7 +2,7 @@
South - Useable migrations for Django apps
"""
__version__ = "0.7.3"
__version__ = "0.8.4"
__authors__ = [
"Andrew Godwin <andrew@aeracode.org>",
"Andy McCurdy <andy@andymccurdy.com>"

View file

@ -4,14 +4,18 @@ Each one has a class, which can take the action description and insert code
blocks into the forwards() and backwards() methods, in the right place.
"""
from __future__ import print_function
import sys
import datetime
from django.db.models.fields.related import RECURSIVE_RELATIONSHIP_CONSTANT
from django.db.models.fields import FieldDoesNotExist, NOT_PROVIDED, CharField, TextField
from south import modelsinspector
from south.modelsinspector import value_clean
from south.creator.freezer import remove_useless_attributes, model_key
from south.utils import datetime_utils
from south.utils.py3 import raw_input
class Action(object):
"""
@ -70,11 +74,11 @@ class AddModel(Action):
db.create_table(%(table_name)r, (
%(field_defs)s
))
db.send_create_signal(%(app_label)r, [%(model_name)r])'''
db.send_create_signal(%(app_label)r, [%(model_name)r])'''[1:] + "\n"
BACKWARDS_TEMPLATE = '''
# Deleting model '%(model_name)s'
db.delete_table(%(table_name)r)'''
db.delete_table(%(table_name)r)'''[1:] + "\n"
def __init__(self, model, model_def):
self.model = model
@ -133,12 +137,14 @@ class _NullIssuesField(object):
A field that might need to ask a question about rogue NULL values.
"""
allow_third_null_option = False
issue_with_backward_migration = False
irreversible = False
IRREVERSIBLE_TEMPLATE = '''
# User chose to not deal with backwards NULL issues for '%(model_name)s.%(field_name)s'
raise RuntimeError("Cannot reverse this migration. '%(model_name)s.%(field_name)s' and its values cannot be restored.")'''
raise RuntimeError("Cannot reverse this migration. '%(model_name)s.%(field_name)s' and its values cannot be restored.")
# The following code is provided here to aid in writing a correct migration'''
def deal_with_not_null_no_default(self, field, field_def):
# If it's a CharField or TextField that's blank, skip this step.
@ -146,26 +152,26 @@ class _NullIssuesField(object):
field_def[2]['default'] = repr("")
return
# Oh dear. Ask them what to do.
print " ? The field '%s.%s' does not have a default specified, yet is NOT NULL." % (
print(" ? The field '%s.%s' does not have a default specified, yet is NOT NULL." % (
self.model._meta.object_name,
field.name,
)
print " ? Since you are %s, you MUST specify a default" % self.null_reason
print " ? value to use for existing rows. Would you like to:"
print " ? 1. Quit now, and add a default to the field in models.py"
print " ? 2. Specify a one-off value to use for existing columns now"
if self.allow_third_null_option:
print " ? 3. Disable the backwards migration by raising an exception."
))
print(" ? Since you are %s, you MUST specify a default" % self.null_reason)
print(" ? value to use for existing rows. Would you like to:")
print(" ? 1. Quit now"+("." if self.issue_with_backward_migration else ", and add a default to the field in models.py" ))
print(" ? 2. Specify a one-off value to use for existing columns now")
if self.issue_with_backward_migration:
print(" ? 3. Disable the backwards migration by raising an exception; you can edit the migration to fix it later")
while True:
choice = raw_input(" ? Please select a choice: ")
if choice == "1":
sys.exit(1)
elif choice == "2":
break
elif choice == "3" and self.allow_third_null_option:
elif choice == "3" and self.issue_with_backward_migration:
break
else:
print " ! Invalid choice."
print(" ! Invalid choice.")
if choice == "2":
self.add_one_time_default(field, field_def)
elif choice == "3":
@ -173,23 +179,23 @@ class _NullIssuesField(object):
def add_one_time_default(self, field, field_def):
# OK, they want to pick their own one-time default. Who are we to refuse?
print " ? Please enter Python code for your one-off default value."
print " ? The datetime module is available, so you can do e.g. datetime.date.today()"
print(" ? Please enter Python code for your one-off default value.")
print(" ? The datetime module is available, so you can do e.g. datetime.date.today()")
while True:
code = raw_input(" >>> ")
if not code:
print " ! Please enter some code, or 'exit' (with no quotes) to exit."
print(" ! Please enter some code, or 'exit' (with no quotes) to exit.")
elif code == "exit":
sys.exit(1)
else:
try:
result = eval(code, {}, {"datetime": datetime})
except (SyntaxError, NameError), e:
print " ! Invalid input: %s" % e
result = eval(code, {}, {"datetime": datetime_utils})
except (SyntaxError, NameError) as e:
print(" ! Invalid input: %s" % e)
else:
break
# Right, add the default in.
field_def[2]['default'] = repr(result)
field_def[2]['default'] = value_clean(result)
def irreversable_code(self, field):
return self.IRREVERSIBLE_TEMPLATE % {
@ -209,11 +215,13 @@ class AddField(Action, _NullIssuesField):
FORWARDS_TEMPLATE = '''
# Adding field '%(model_name)s.%(field_name)s'
db.add_column(%(table_name)r, %(field_name)r, %(field_def)s, keep_default=False)'''
db.add_column(%(table_name)r, %(field_name)r,
%(field_def)s,
keep_default=False)'''[1:] + "\n"
BACKWARDS_TEMPLATE = '''
# Deleting field '%(model_name)s.%(field_name)s'
db.delete_column(%(table_name)r, %(field_column)r)'''
db.delete_column(%(table_name)r, %(field_column)r)'''[1:] + "\n"
def __init__(self, model, field, field_def):
self.model = model
@ -260,7 +268,7 @@ class DeleteField(AddField):
"""
null_reason = "removing this field"
allow_third_null_option = True
issue_with_backward_migration = True
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
@ -277,7 +285,7 @@ class DeleteField(AddField):
if not self.irreversible:
return AddField.forwards_code(self)
else:
return self.irreversable_code(self.field)
return self.irreversable_code(self.field) + AddField.forwards_code(self)
class ChangeField(Action, _NullIssuesField):
@ -309,7 +317,7 @@ class ChangeField(Action, _NullIssuesField):
self.deal_with_not_null_no_default(self.new_field, self.new_def)
if not self.old_field.null and self.new_field.null and not old_default:
self.null_reason = "making this field nullable"
self.allow_third_null_option = True
self.issue_with_backward_migration = True
self.deal_with_not_null_no_default(self.old_field, self.old_def)
def console_line(self):
@ -347,10 +355,11 @@ class ChangeField(Action, _NullIssuesField):
return self._code(self.old_field, self.new_field, self.new_def)
def backwards_code(self):
change_code = self._code(self.new_field, self.old_field, self.old_def)
if not self.irreversible:
return self._code(self.new_field, self.old_field, self.old_def)
return change_code
else:
return self.irreversable_code(self.old_field)
return self.irreversable_code(self.old_field) + change_code
class AddUnique(Action):
@ -360,11 +369,11 @@ class AddUnique(Action):
FORWARDS_TEMPLATE = '''
# Adding unique constraint on '%(model_name)s', fields %(field_names)s
db.create_unique(%(table_name)r, %(fields)r)'''
db.create_unique(%(table_name)r, %(fields)r)'''[1:] + "\n"
BACKWARDS_TEMPLATE = '''
# Removing unique constraint on '%(model_name)s', fields %(field_names)s
db.delete_unique(%(table_name)r, %(fields)r)'''
db.delete_unique(%(table_name)r, %(fields)r)'''[1:] + "\n"
prepend_backwards = True
@ -428,11 +437,11 @@ class AddIndex(AddUnique):
FORWARDS_TEMPLATE = '''
# Adding index on '%(model_name)s', fields %(field_names)s
db.create_index(%(table_name)r, %(fields)r)'''
db.create_index(%(table_name)r, %(fields)r)'''[1:] + "\n"
BACKWARDS_TEMPLATE = '''
# Removing index on '%(model_name)s', fields %(field_names)s
db.delete_index(%(table_name)r, %(fields)r)'''
db.delete_index(%(table_name)r, %(fields)r)'''[1:] + "\n"
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
@ -470,16 +479,17 @@ class AddM2M(Action):
FORWARDS_TEMPLATE = '''
# Adding M2M table for field %(field_name)s on '%(model_name)s'
db.create_table(%(table_name)r, (
m2m_table_name = %(table_name)s
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
(%(left_field)r, models.ForeignKey(orm[%(left_model_key)r], null=False)),
(%(right_field)r, models.ForeignKey(orm[%(right_model_key)r], null=False))
))
db.create_unique(%(table_name)r, [%(left_column)r, %(right_column)r])'''
db.create_unique(m2m_table_name, [%(left_column)r, %(right_column)r])'''[1:] + "\n"
BACKWARDS_TEMPLATE = '''
# Removing M2M table for field %(field_name)s on '%(model_name)s'
db.delete_table('%(table_name)s')'''
db.delete_table(%(table_name)s)'''[1:] + "\n"
def __init__(self, model, field):
self.model = model
@ -492,13 +502,25 @@ class AddM2M(Action):
self.model._meta.app_label,
self.model._meta.object_name,
)
def table_name(self):
# This is part of a workaround for the fact that Django uses
# different shortening for automatically generated m2m table names
# (as opposed to any explicitly specified table name)
f = self.field
explicit = f.db_table
if explicit:
return "%r" % explicit
else:
auto = "%s_%s" % (self.model._meta.db_table, f.name)
return 'db.shorten_name(%r)' % auto
def forwards_code(self):
return self.FORWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"field_name": self.field.name,
"table_name": self.field.m2m_db_table(),
"table_name": self.table_name(),
"left_field": self.field.m2m_column_name()[:-3], # Remove the _id part
"left_column": self.field.m2m_column_name(),
"left_model_key": model_key(self.model),
@ -512,7 +534,7 @@ class AddM2M(Action):
return self.BACKWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"field_name": self.field.name,
"table_name": self.field.m2m_db_table(),
"table_name": self.table_name(),
}

View file

@ -3,12 +3,15 @@ Contains things to detect changes - either using options passed in on the
commandline, or by using autodetection, etc.
"""
from __future__ import print_function
from django.db import models
from django.contrib.contenttypes.generic import GenericRelation
from django.utils.datastructures import SortedDict
from south.creator.freezer import remove_useless_attributes, freeze_apps, model_key
from south.utils import auto_through
from south.utils.py3 import string_types
class BaseChanges(object):
"""
@ -100,6 +103,16 @@ class AutoChanges(BaseChanges):
params['model']._meta.object_name.lower(),
"_".join([x.name for x in params['fields']]),
))
elif change_name == "AddIndex":
parts.append("add_index_%s_%s" % (
params['model']._meta.object_name.lower(),
"_".join([x.name for x in params['fields']]),
))
elif change_name == "DeleteIndex":
parts.append("del_index_%s_%s" % (
params['model']._meta.object_name.lower(),
"_".join([x.name for x in params['fields']]),
))
return ("__".join(parts))[:70]
def get_changes(self):
@ -127,18 +140,19 @@ class AutoChanges(BaseChanges):
field = self.old_orm[key + ":" + fieldname]
if auto_through(field):
yield ("DeleteM2M", {"model": self.old_orm[key], "field": field})
# And any unique constraints it had
unique_together = eval(old_meta.get("unique_together", "[]"))
if unique_together:
# If it's only a single tuple, make it into the longer one
if isinstance(unique_together[0], basestring):
unique_together = [unique_together]
# For each combination, make an action for it
for fields in unique_together:
yield ("DeleteUnique", {
"model": self.old_orm[key],
"fields": [self.old_orm[key]._meta.get_field_by_name(x)[0] for x in fields],
})
# And any index/uniqueness constraints it had
for attr, operation in (("unique_together", "DeleteUnique"), ("index_together", "DeleteIndex")):
together = eval(old_meta.get(attr, "[]"))
if together:
# If it's only a single tuple, make it into the longer one
if isinstance(together[0], string_types):
together = [together]
# For each combination, make an action for it
for fields in together:
yield (operation, {
"model": self.old_orm[key],
"fields": [self.old_orm[key]._meta.get_field_by_name(x)[0] for x in fields],
})
# We always add it in here so we ignore it later
deleted_models.add(key)
@ -158,18 +172,19 @@ class AutoChanges(BaseChanges):
field = self.current_field_from_key(key, fieldname)
if auto_through(field):
yield ("AddM2M", {"model": self.current_model_from_key(key), "field": field})
# And any unique constraints it has
unique_together = eval(new_meta.get("unique_together", "[]"))
if unique_together:
# If it's only a single tuple, make it into the longer one
if isinstance(unique_together[0], basestring):
unique_together = [unique_together]
# For each combination, make an action for it
for fields in unique_together:
yield ("AddUnique", {
"model": self.current_model_from_key(key),
"fields": [self.current_model_from_key(key)._meta.get_field_by_name(x)[0] for x in fields],
})
# And any index/uniqueness constraints it has
for attr, operation in (("unique_together", "AddUnique"), ("index_together", "AddIndex")):
together = eval(new_meta.get(attr, "[]"))
if together:
# If it's only a single tuple, make it into the longer one
if isinstance(together[0], string_types):
together = [together]
# For each combination, make an action for it
for fields in together:
yield (operation, {
"model": self.current_model_from_key(key),
"fields": [self.current_model_from_key(key)._meta.get_field_by_name(x)[0] for x in fields],
})
# Now, for every model that's stayed the same, check its fields.
for key in self.old_defs:
@ -178,6 +193,10 @@ class AutoChanges(BaseChanges):
old_fields, old_meta, old_m2ms = self.split_model_def(self.old_orm[key], self.old_defs[key])
new_fields, new_meta, new_m2ms = self.split_model_def(self.current_model_from_key(key), self.new_defs[key])
# Do nothing for models which are now not managed.
if new_meta.get("managed", "True") == "False":
continue
# Find fields that have vanished.
for fieldname in old_fields:
if fieldname not in new_fields:
@ -281,26 +300,27 @@ class AutoChanges(BaseChanges):
if not auto_through(old_field) and auto_through(new_field):
yield ("AddM2M", {"model": self.current_model_from_key(key), "field": new_field})
## See if the unique_togethers have changed
# First, normalise them into lists of sets.
old_unique_together = eval(old_meta.get("unique_together", "[]"))
new_unique_together = eval(new_meta.get("unique_together", "[]"))
if old_unique_together and isinstance(old_unique_together[0], basestring):
old_unique_together = [old_unique_together]
if new_unique_together and isinstance(new_unique_together[0], basestring):
new_unique_together = [new_unique_together]
old_unique_together = map(set, old_unique_together)
new_unique_together = map(set, new_unique_together)
# See if any appeared or disappeared
for item in old_unique_together:
if item not in new_unique_together:
yield ("DeleteUnique", {
## See if the {index,unique}_togethers have changed
for attr, add_operation, del_operation in (("unique_together", "AddUnique", "DeleteUnique"), ("index_together", "AddIndex", "DeleteIndex")):
# First, normalise them into lists of sets.
old_together = eval(old_meta.get(attr, "[]"))
new_together = eval(new_meta.get(attr, "[]"))
if old_together and isinstance(old_together[0], string_types):
old_together = [old_together]
if new_together and isinstance(new_together[0], string_types):
new_together = [new_together]
old_together = frozenset(tuple(o) for o in old_together)
new_together = frozenset(tuple(n) for n in new_together)
# See if any appeared or disappeared
disappeared = old_together.difference(new_together)
appeared = new_together.difference(old_together)
for item in disappeared:
yield (del_operation, {
"model": self.old_orm[key],
"fields": [self.old_orm[key + ":" + x] for x in item],
})
for item in new_unique_together:
if item not in old_unique_together:
yield ("AddUnique", {
for item in appeared:
yield (add_operation, {
"model": self.current_model_from_key(key),
"fields": [self.current_field_from_key(key, x) for x in item],
})
@ -309,7 +329,7 @@ class AutoChanges(BaseChanges):
def is_triple(cls, triple):
"Returns whether the argument is a triple."
return isinstance(triple, (list, tuple)) and len(triple) == 3 and \
isinstance(triple[0], (str, unicode)) and \
isinstance(triple[0], string_types) and \
isinstance(triple[1], (list, tuple)) and \
isinstance(triple[2], dict)
@ -404,7 +424,7 @@ class ManualChanges(BaseChanges):
try:
model_name, field_name = field_desc.split(".")
except (TypeError, ValueError):
print "%r is not a valid field description." % field_desc
raise ValueError("%r is not a valid field description." % field_desc)
model = models.get_model(self.migrations.app_label(), model_name)
real_fields, meta, m2m_fields = self.split_model_def(model, model_defs[model_key(model)])
yield ("AddField", {
@ -417,7 +437,7 @@ class ManualChanges(BaseChanges):
try:
model_name, field_name = field_desc.split(".")
except (TypeError, ValueError):
print "%r is not a valid field description." % field_desc
print("%r is not a valid field description." % field_desc)
model = models.get_model(self.migrations.app_label(), model_name)
yield ("AddIndex", {
"model": model,
@ -453,19 +473,20 @@ class InitialChanges(BaseChanges):
"model_def": real_fields,
})
# Then, add any uniqueness that's around
# Then, add any indexing/uniqueness that's around
if meta:
unique_together = eval(meta.get("unique_together", "[]"))
if unique_together:
# If it's only a single tuple, make it into the longer one
if isinstance(unique_together[0], basestring):
unique_together = [unique_together]
# For each combination, make an action for it
for fields in unique_together:
yield ("AddUnique", {
"model": model,
"fields": [model._meta.get_field_by_name(x)[0] for x in fields],
})
for attr, operation in (("unique_together", "AddUnique"), ("index_together", "AddIndex")):
together = eval(meta.get(attr, "[]"))
if together:
# If it's only a single tuple, make it into the longer one
if isinstance(together[0], string_types):
together = [together]
# For each combination, make an action for it
for fields in together:
yield (operation, {
"model": model,
"fields": [model._meta.get_field_by_name(x)[0] for x in fields],
})
# Finally, see if there's some M2M action
for name, triple in m2m_fields.items():

View file

@ -2,20 +2,23 @@
Handles freezing of models into FakeORMs.
"""
from __future__ import print_function
import sys
from django.db import models
from django.db.models.base import ModelBase, Model
from django.contrib.contenttypes.generic import GenericRelation
from south.orm import FakeORM
from south.utils import auto_model
from south.utils import get_attribute, auto_through
from south import modelsinspector
from south.utils.py3 import string_types
def freeze_apps(apps):
"""
Takes a list of app labels, and returns a string of their frozen form.
"""
if isinstance(apps, basestring):
if isinstance(apps, string_types):
apps = [apps]
frozen_models = set()
# For each app, add in all its models
@ -41,14 +44,14 @@ def freeze_apps(apps):
missing_fields = True
model_class = model_classes[key]
field_class = model_class._meta.get_field_by_name(field_name)[0]
print " ! Cannot freeze field '%s.%s'" % (key, field_name)
print " ! (this field has class %s.%s)" % (field_class.__class__.__module__, field_class.__class__.__name__)
print(" ! Cannot freeze field '%s.%s'" % (key, field_name))
print(" ! (this field has class %s.%s)" % (field_class.__class__.__module__, field_class.__class__.__name__))
if missing_fields:
print ""
print " ! South cannot introspect some fields; this is probably because they are custom"
print " ! fields. If they worked in 0.6 or below, this is because we have removed the"
print " ! models parser (it often broke things)."
print " ! To fix this, read http://south.aeracode.org/wiki/MyFieldsDontWork"
print("")
print(" ! South cannot introspect some fields; this is probably because they are custom")
print(" ! fields. If they worked in 0.6 or below, this is because we have removed the")
print(" ! models parser (it often broke things).")
print(" ! To fix this, read http://south.aeracode.org/wiki/MyFieldsDontWork")
sys.exit(1)
return model_defs
@ -90,10 +93,10 @@ def model_dependencies(model, checked_models=None):
checked_models = checked_models or set()
# Get deps for each field
for field in model._meta.fields + model._meta.many_to_many:
depends.update(field_dependencies(field))
depends.update(field_dependencies(field, checked_models))
# Add in any non-abstract bases
for base in model.__bases__:
if issubclass(base, models.Model) and (base is not models.Model) and not base._meta.abstract:
if issubclass(base, models.Model) and hasattr(base, '_meta') and not base._meta.abstract:
depends.add(base)
# Now recurse
new_to_check = depends - checked_models
@ -114,21 +117,35 @@ def model_dependencies(model, checked_models=None):
def field_dependencies(field, checked_models=None):
checked_models = checked_models or set()
depends = set()
if isinstance(field, (models.OneToOneField, models.ForeignKey, models.ManyToManyField, GenericRelation)):
if field.rel.to in checked_models:
return depends
checked_models.add(field.rel.to)
depends.add(field.rel.to)
depends.update(field_dependencies(field.rel.to._meta.pk, checked_models))
# Also include M2M throughs
if isinstance(field, models.ManyToManyField):
if field.rel.through:
if hasattr(field.rel, "through_model"): # 1.1 and below
depends.add(field.rel.through_model)
else:
# Make sure it's not an automatic one
if not auto_model(field.rel.through):
depends.add(field.rel.through) # 1.2 and up
arg_defs, kwarg_defs = modelsinspector.matching_details(field)
for attrname, options in arg_defs + list(kwarg_defs.values()):
if options.get("ignore_if_auto_through", False) and auto_through(field):
continue
if options.get("is_value", False):
value = attrname
elif attrname == 'rel.through' and hasattr(getattr(field, 'rel', None), 'through_model'):
# Hack for django 1.1 and below, where the through model is stored
# in rel.through_model while rel.through stores only the model name.
value = field.rel.through_model
else:
try:
value = get_attribute(field, attrname)
except AttributeError:
if options.get("ignore_missing", False):
continue
raise
if isinstance(value, Model):
value = value.__class__
if not isinstance(value, ModelBase):
continue
if getattr(value._meta, "proxy", False):
value = value._meta.proxy_for_model
if value in checked_models:
continue
checked_models.add(value)
depends.add(value)
depends.update(model_dependencies(value, checked_models))
return depends
### Prettyprinters
@ -172,4 +189,4 @@ def remove_useless_meta(meta):
for name in USELESS_META:
if name in meta:
del meta[name]
return meta
return meta

1
south/db/.gitignore vendored
View file

@ -1 +0,0 @@
/*.pyc

View file

@ -10,13 +10,19 @@ engine_modules = {
'django.db.backends.postgresql_psycopg2': 'postgresql_psycopg2',
'django.db.backends.sqlite3': 'sqlite3',
'django.db.backends.mysql': 'mysql',
'mysql_oursql.standard': 'mysql',
'django.db.backends.oracle': 'oracle',
'sql_server.pyodbc': 'sql_server.pyodbc', #django-pyodbc
'sql_server.pyodbc': 'sql_server.pyodbc', #django-pyodbc-azure
'django_pyodbc': 'sql_server.pyodbc', #django-pyodbc
'sqlserver_ado': 'sql_server.pyodbc', #django-mssql
'firebird': 'firebird', #django-firebird
'django.contrib.gis.db.backends.postgis': 'postgresql_psycopg2',
'django.contrib.gis.db.backends.spatialite': 'sqlite3',
'django.contrib.gis.db.backends.mysql': 'mysql',
'django.contrib.gis.db.backends.oracle': 'oracle',
'doj.backends.zxjdbc.postgresql': 'postgresql_psycopg2', #django-jython
'doj.backends.zxjdbc.mysql': 'mysql', #django-jython
'doj.backends.zxjdbc.oracle': 'oracle', #django-jython
}
# First, work out if we're multi-db or not, and which databases we have
@ -35,8 +41,9 @@ else:
# Loop over the defined databases, gathering up their engines
db_engines = dict([
# Note we check to see if contrib.gis has overridden us.
(alias, "south.db.%s" % engine_modules.get(db_settings['ENGINE'], None))
(alias, "south.db.%s" % engine_modules[db_settings['ENGINE']])
for alias, db_settings in settings.DATABASES.items()
if db_settings['ENGINE'] in engine_modules
])
# Update with any overrides
db_engines.update(getattr(settings, "SOUTH_DATABASE_ADAPTERS", {}))

362
south/db/firebird.py Normal file
View file

@ -0,0 +1,362 @@
# firebird
from __future__ import print_function
import datetime
from django.db import connection, models
from django.core.management.color import no_style
from django.db.utils import DatabaseError
from south.db import generic
from south.utils.py3 import string_types
class DatabaseOperations(generic.DatabaseOperations):
backend_name = 'firebird'
alter_string_set_type = 'ALTER %(column)s TYPE %(type)s'
alter_string_set_default = 'ALTER %(column)s SET DEFAULT %(default)s;'
alter_string_drop_null = ''
add_column_string = 'ALTER TABLE %s ADD %s;'
delete_column_string = 'ALTER TABLE %s DROP %s;'
rename_table_sql = ''
# Features
allows_combined_alters = False
has_booleans = False
def _fill_constraint_cache(self, db_name, table_name):
self._constraint_cache.setdefault(db_name, {})
self._constraint_cache[db_name][table_name] = {}
rows = self.execute("""
SELECT
rc.RDB$CONSTRAINT_NAME,
rc.RDB$CONSTRAINT_TYPE,
cc.RDB$TRIGGER_NAME
FROM rdb$relation_constraints rc
JOIN rdb$check_constraints cc
ON rc.rdb$constraint_name = cc.rdb$constraint_name
WHERE rc.rdb$constraint_type = 'NOT NULL'
AND rc.rdb$relation_name = '%s'
""" % table_name)
for constraint, kind, column in rows:
self._constraint_cache[db_name][table_name].setdefault(column, set())
self._constraint_cache[db_name][table_name][column].add((kind, constraint))
return
def _alter_column_set_null(self, table_name, column_name, is_null):
sql = """
UPDATE RDB$RELATION_FIELDS SET RDB$NULL_FLAG = %(null_flag)s
WHERE RDB$FIELD_NAME = '%(column)s'
AND RDB$RELATION_NAME = '%(table_name)s'
"""
null_flag = 'NULL' if is_null else '1'
return sql % {
'null_flag': null_flag,
'column': column_name.upper(),
'table_name': table_name.upper()
}
def _column_has_default(self, params):
sql = """
SELECT a.RDB$DEFAULT_VALUE
FROM RDB$RELATION_FIELDS a
WHERE a.RDB$FIELD_NAME = '%(column)s'
AND a.RDB$RELATION_NAME = '%(table_name)s'
"""
value = self.execute(sql % params)
return True if value else False
def _alter_set_defaults(self, field, name, params, sqls):
"Subcommand of alter_column that sets default values (overrideable)"
# Historically, we used to set defaults here.
# But since South 0.8, we don't ever set defaults on alter-column -- we only
# use database-level defaults as scaffolding when adding columns.
# However, we still sometimes need to remove defaults in alter-column.
if self._column_has_default(params):
sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (self.quote_name(name),), []))
@generic.invalidate_table_constraints
def create_table(self, table_name, fields):
columns = []
autoinc_sql = ''
for field_name, field in fields:
# avoid default values in CREATE TABLE statements (#925)
field._suppress_default = True
col = self.column_sql(table_name, field_name, field)
if not col:
continue
columns.append(col)
if isinstance(field, models.AutoField):
field_name = field.db_column or field.column
autoinc_sql = connection.ops.autoinc_sql(table_name, field_name)
self.execute(self.create_table_sql % {
"table": self.quote_name(table_name),
"columns": ', '.join([col for col in columns if col]),
})
if autoinc_sql:
self.execute(autoinc_sql[0])
self.execute(autoinc_sql[1])
def rename_table(self, old_table_name, table_name):
"""
Renames table is not supported by firebird.
This involve recreate all related objects (store procedure, views, triggers, etc)
"""
pass
@generic.invalidate_table_constraints
def delete_table(self, table_name, cascade=False):
"""
Deletes the table 'table_name'.
Firebird will also delete any triggers associated with the table.
"""
super(DatabaseOperations, self).delete_table(table_name, cascade=False)
# Also, drop sequence if exists
sql = connection.ops.drop_sequence_sql(table_name)
if sql:
try:
self.execute(sql)
except:
pass
def column_sql(self, table_name, field_name, field, tablespace='', with_name=True, field_prepared=False):
"""
Creates the SQL snippet for a column. Used by add_column and add_table.
"""
# If the field hasn't already been told its attribute name, do so.
if not field_prepared:
field.set_attributes_from_name(field_name)
# hook for the field to do any resolution prior to it's attributes being queried
if hasattr(field, 'south_init'):
field.south_init()
# Possible hook to fiddle with the fields (e.g. defaults & TEXT on MySQL)
field = self._field_sanity(field)
try:
sql = field.db_type(connection=self._get_connection())
except TypeError:
sql = field.db_type()
if sql:
# Some callers, like the sqlite stuff, just want the extended type.
if with_name:
field_output = [self.quote_name(field.column), sql]
else:
field_output = [sql]
if field.primary_key:
field_output.append('NOT NULL PRIMARY KEY')
elif field.unique:
# Just use UNIQUE (no indexes any more, we have delete_unique)
field_output.append('UNIQUE')
sql = ' '.join(field_output)
sqlparams = ()
# if the field is "NOT NULL" and a default value is provided, create the column with it
# this allows the addition of a NOT NULL field to a table with existing rows
if not getattr(field, '_suppress_default', False):
if field.has_default():
default = field.get_default()
# If the default is actually None, don't add a default term
if default is not None:
# If the default is a callable, then call it!
if callable(default):
default = default()
# Now do some very cheap quoting. TODO: Redesign return values to avoid this.
if isinstance(default, string_types):
default = "'%s'" % default.replace("'", "''")
elif isinstance(default, (datetime.date, datetime.time, datetime.datetime)):
default = "'%s'" % default
elif isinstance(default, bool):
default = int(default)
# Escape any % signs in the output (bug #317)
if isinstance(default, string_types):
default = default.replace("%", "%%")
# Add it in
sql += " DEFAULT %s"
sqlparams = (default)
elif (not field.null and field.blank) or (field.get_default() == ''):
if field.empty_strings_allowed and self._get_connection().features.interprets_empty_strings_as_nulls:
sql += " DEFAULT ''"
# Error here would be nice, but doesn't seem to play fair.
#else:
# raise ValueError("Attempting to add a non null column that isn't character based without an explicit default value.")
# Firebird need set not null after of default value keyword
if not field.primary_key and not field.null:
sql += ' NOT NULL'
if field.rel and self.supports_foreign_keys:
self.add_deferred_sql(
self.foreign_key_sql(
table_name,
field.column,
field.rel.to._meta.db_table,
field.rel.to._meta.get_field(field.rel.field_name).column
)
)
# Things like the contrib.gis module fields have this in 1.1 and below
if hasattr(field, 'post_create_sql'):
for stmt in field.post_create_sql(no_style(), table_name):
self.add_deferred_sql(stmt)
# Avoid double index creation (#1317)
# Firebird creates an index implicity for each foreign key field
# sql_indexes_for_field tries to create an index for that field too
if not field.rel:
# In 1.2 and above, you have to ask the DatabaseCreation stuff for it.
# This also creates normal indexes in 1.1.
if hasattr(self._get_connection().creation, "sql_indexes_for_field"):
# Make a fake model to pass in, with only db_table
model = self.mock_model("FakeModelForGISCreation", table_name)
for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()):
self.add_deferred_sql(stmt)
if sql:
return sql % sqlparams
else:
return None
def _drop_constraints(self, table_name, name, field):
if self.has_check_constraints:
check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK")
for constraint in check_constraints:
self.execute(self.delete_check_sql % {
'table': self.quote_name(table_name),
'constraint': self.quote_name(constraint),
})
# Drop or add UNIQUE constraint
unique_constraint = list(self._constraints_affecting_columns(table_name, [name], "UNIQUE"))
if field.unique and not unique_constraint:
self.create_unique(table_name, [name])
elif not field.unique and unique_constraint:
self.delete_unique(table_name, [name])
# Drop all foreign key constraints
try:
self.delete_foreign_key(table_name, name)
except ValueError:
# There weren't any
pass
@generic.invalidate_table_constraints
def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
"""
Alters the given column name so it will match the given field.
Note that conversion between the two by the database must be possible.
Will not automatically add _id by default; to have this behavour, pass
explicit_name=False.
@param table_name: The name of the table to add the column to
@param name: The name of the column to alter
@param field: The new field definition to use
"""
if self.dry_run:
if self.debug:
print(' - no dry run output for alter_column() due to dynamic DDL, sorry')
return
# hook for the field to do any resolution prior to it's attributes being queried
if hasattr(field, 'south_init'):
field.south_init()
# Add _id or whatever if we need to
field.set_attributes_from_name(name)
if not explicit_name:
name = field.column
else:
field.column = name
if not ignore_constraints:
# Drop all check constraints. Note that constraints will be added back
# with self.alter_string_set_type and self.alter_string_drop_null.
self._drop_constraints(table_name, name, field)
# First, change the type
params = {
"column": self.quote_name(name),
"type": self._db_type_for_alter_column(field),
"table_name": table_name
}
# SQLs is a list of (SQL, values) pairs.
sqls = []
sqls_extra = []
# Only alter the column if it has a type (Geometry ones sometimes don't)
if params["type"] is not None:
sqls.append((self.alter_string_set_type % params, []))
# Add any field- and backend- specific modifications
self._alter_add_column_mods(field, name, params, sqls)
# Next, nullity: modified, firebird doesn't support DROP NOT NULL
sqls_extra.append(self._alter_column_set_null(table_name, name, field.null))
# Next, set any default
self._alter_set_defaults(field, name, params, sqls)
# Finally, actually change the column
if self.allows_combined_alters:
sqls, values = list(zip(*sqls))
self.execute(
"ALTER TABLE %s %s;" % (self.quote_name(table_name), ", ".join(sqls)),
generic.flatten(values),
)
else:
# Databases like e.g. MySQL don't like more than one alter at once.
for sql, values in sqls:
try:
self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), sql), values)
except DatabaseError as e:
print(e)
# Execute extra sql, which don't need ALTER TABLE statement
for sql in sqls_extra:
self.execute(sql)
if not ignore_constraints:
# Add back FK constraints if needed
if field.rel and self.supports_foreign_keys:
self.execute(
self.foreign_key_sql(
table_name,
field.column,
field.rel.to._meta.db_table,
field.rel.to._meta.get_field(field.rel.field_name).column
)
)
@generic.copy_column_constraints
@generic.delete_column_constraints
def rename_column(self, table_name, old, new):
if old == new:
# Short-circuit out
return []
self.execute('ALTER TABLE %s ALTER %s TO %s;' % (
self.quote_name(table_name),
self.quote_name(old),
self.quote_name(new),
))

View file

@ -1,19 +1,35 @@
from __future__ import print_function
import datetime
import string
import random
import re
import sys
from django.core.management.color import no_style
from django.db import transaction, models
from django.db.utils import DatabaseError
from django.db.backends.util import truncate_name
from django.db.backends.creation import BaseDatabaseCreation
from django.db.models.fields import NOT_PROVIDED
from django.dispatch import dispatcher
from django.conf import settings
from django.utils.datastructures import SortedDict
try:
from django.utils.functional import cached_property
except ImportError:
class cached_property(object):
"""
Decorator that creates converts a method with a single
self argument into a property cached on the instance.
"""
def __init__(self, func):
self.func = func
def __get__(self, instance, type):
res = instance.__dict__[self.func.__name__] = self.func(instance)
return res
from south.logger import get_logger
from south.utils.py3 import string_types, text_type
def alias(attrname):
"""
@ -25,33 +41,102 @@ def alias(attrname):
return func
class DatabaseOperations(object):
def invalidate_table_constraints(func):
def _cache_clear(self, table, *args, **opts):
self._set_cache(table, value=INVALID)
return func(self, table, *args, **opts)
return _cache_clear
def delete_column_constraints(func):
def _column_rm(self, table, column, *args, **opts):
self._set_cache(table, column, value=[])
return func(self, table, column, *args, **opts)
return _column_rm
def copy_column_constraints(func):
def _column_cp(self, table, column_old, column_new, *args, **opts):
db_name = self._get_setting('NAME')
self._set_cache(table, column_new, value=self.lookup_constraint(db_name, table, column_old))
return func(self, table, column_old, column_new, *args, **opts)
return _column_cp
class INVALID(Exception):
def __repr__(self):
return 'INVALID'
class DryRunError(ValueError):
pass
class DatabaseOperations(object):
"""
Generic SQL implementation of the DatabaseOperations.
Some of this code comes from Django Evolution.
"""
# We assume the generic DB can handle DDL transactions. MySQL wil change this.
has_ddl_transactions = True
alter_string_set_type = 'ALTER COLUMN %(column)s TYPE %(type)s'
alter_string_set_null = 'ALTER COLUMN %(column)s DROP NOT NULL'
alter_string_drop_null = 'ALTER COLUMN %(column)s SET NOT NULL'
has_check_constraints = True
delete_check_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s'
allows_combined_alters = True
add_column_string = 'ALTER TABLE %s ADD COLUMN %s;'
delete_unique_sql = "ALTER TABLE %s DROP CONSTRAINT %s"
delete_foreign_key_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s'
supports_foreign_keys = True
create_table_sql = 'CREATE TABLE %(table)s (%(columns)s)'
max_index_name_length = 63
drop_index_string = 'DROP INDEX %(index_name)s'
delete_column_string = 'ALTER TABLE %s DROP COLUMN %s CASCADE;'
create_primary_key_string = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)"
delete_primary_key_sql = "ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s"
add_check_constraint_fragment = "ADD CONSTRAINT %(constraint)s CHECK (%(check)s)"
rename_table_sql = "ALTER TABLE %s RENAME TO %s;"
backend_name = None
default_schema_name = "public"
# Features
allows_combined_alters = True
supports_foreign_keys = True
has_check_constraints = True
has_booleans = True
raises_default_errors = True
@cached_property
def has_ddl_transactions(self):
"""
Tests the database using feature detection to see if it has
transactional DDL support.
"""
self._possibly_initialise()
connection = self._get_connection()
if hasattr(connection.features, "confirm") and not connection.features._confirmed:
connection.features.confirm()
# Django 1.3's MySQLdb backend doesn't raise DatabaseError
exceptions = (DatabaseError, )
try:
from MySQLdb import OperationalError
exceptions += (OperationalError, )
except ImportError:
pass
# Now do the test
if getattr(connection.features, 'supports_transactions', True):
cursor = connection.cursor()
self.start_transaction()
cursor.execute('CREATE TABLE DDL_TRANSACTION_TEST (X INT)')
self.rollback_transaction()
try:
try:
cursor.execute('CREATE TABLE DDL_TRANSACTION_TEST (X INT)')
except exceptions:
return False
else:
return True
finally:
cursor.execute('DROP TABLE DDL_TRANSACTION_TEST')
else:
return False
def __init__(self, db_alias):
self.debug = False
@ -60,36 +145,79 @@ class DatabaseOperations(object):
self.pending_transactions = 0
self.pending_create_signals = []
self.db_alias = db_alias
self._constraint_cache = {}
self._initialised = False
def lookup_constraint(self, db_name, table_name, column_name=None):
""" return a set() of constraints for db_name.table_name.column_name """
def _lookup():
table = self._constraint_cache[db_name][table_name]
if table is INVALID:
raise INVALID
elif column_name is None:
return list(table.items())
else:
return table[column_name]
try:
ret = _lookup()
return ret
except INVALID:
del self._constraint_cache[db_name][table_name]
self._fill_constraint_cache(db_name, table_name)
except KeyError:
if self._is_valid_cache(db_name, table_name):
return []
self._fill_constraint_cache(db_name, table_name)
return self.lookup_constraint(db_name, table_name, column_name)
def _set_cache(self, table_name, column_name=None, value=INVALID):
db_name = self._get_setting('NAME')
try:
if column_name is not None:
self._constraint_cache[db_name][table_name][column_name] = value
else:
self._constraint_cache[db_name][table_name] = value
except (LookupError, TypeError):
pass
def _is_valid_cache(self, db_name, table_name):
# we cache per-table so if the table is there it is valid
try:
return self._constraint_cache[db_name][table_name] is not INVALID
except KeyError:
return False
def _is_multidb(self):
try:
try:
from django.db import connections
connections # Prevents "unused import" warning
except ImportError:
return False
else:
return True
def _get_connection(self):
"""
Returns a django connection for a given DB Alias
def _get_connection(self):
"""
Returns a django connection for a given DB Alias
"""
if self._is_multidb():
from django.db import connections
return connections[self.db_alias]
from django.db import connections
return connections[self.db_alias]
else:
from django.db import connection
return connection
from django.db import connection
return connection
def _get_setting(self, setting_name):
"""
Allows code to get a setting (like, for example, STORAGE_ENGINE)
"""
setting_name = setting_name.upper()
connection = self._get_connection()
connection = self._get_connection()
if self._is_multidb():
# Django 1.2 and above
return connection.settings_dict[setting_name]
return connection.settings_dict[setting_name]
else:
# Django 1.1 and below
return getattr(settings, "DATABASE_%s" % setting_name)
@ -110,7 +238,6 @@ class DatabaseOperations(object):
return self._get_setting('schema')
except (KeyError, AttributeError):
return self.default_schema_name
def _possibly_initialise(self):
if not self._initialised:
@ -130,7 +257,11 @@ class DatabaseOperations(object):
"""
return self._get_connection().ops.quote_name(name)
def execute(self, sql, params=[]):
def _print_sql_error(self, e, sql, params=[]):
print('FATAL ERROR - The following SQL query failed: %s' % sql, file=sys.stderr)
print('The error was: %s' % e, file=sys.stderr)
def execute(self, sql, params=[], print_all_errors=True):
"""
Executes the given SQL statement, with optional parameters.
If the instance's debug attribute is True, prints out what it executes.
@ -140,20 +271,25 @@ class DatabaseOperations(object):
cursor = self._get_connection().cursor()
if self.debug:
print " = %s" % sql, params
get_logger().debug('south execute "%s" with params "%s"' % (sql, params))
print(" = %s" % sql, params)
if self.dry_run:
return []
cursor.execute(sql, params)
get_logger().debug(text_type('execute "%s" with params "%s"' % (sql, params)))
try:
cursor.execute(sql, params)
except DatabaseError as e:
if print_all_errors:
self._print_sql_error(e, sql, params)
raise
try:
return cursor.fetchall()
except:
return []
def execute_many(self, sql, regex=r"(?mx) ([^';]* (?:'[^']*'[^';]*)*)", comment_regex=r"(?mx) (?:^\s*$)|(?:--.*$)"):
"""
Takes a SQL file and executes it as many separate statements.
@ -167,7 +303,6 @@ class DatabaseOperations(object):
for st in re.split(regex, sql)[1:][::2]:
self.execute(st)
def add_deferred_sql(self, sql):
"""
Add a SQL statement to the deferred list, that won't be executed until
@ -175,7 +310,6 @@ class DatabaseOperations(object):
"""
self.deferred_sql.append(sql)
def execute_deferred_sql(self):
"""
Executes all deferred SQL, resetting the deferred_sql list
@ -185,14 +319,12 @@ class DatabaseOperations(object):
self.deferred_sql = []
def clear_deferred_sql(self):
"""
Resets the deferred_sql list to empty.
"""
self.deferred_sql = []
def clear_run_data(self, pending_creates = None):
"""
Resets variables to how they should be before a run. Used for dry runs.
@ -201,11 +333,10 @@ class DatabaseOperations(object):
self.clear_deferred_sql()
self.pending_create_signals = pending_creates or []
def get_pending_creates(self):
return self.pending_create_signals
@invalidate_table_constraints
def create_table(self, table_name, fields):
"""
Creates the table 'table_name'. 'fields' is a tuple of fields,
@ -214,21 +345,25 @@ class DatabaseOperations(object):
"""
if len(table_name) > 63:
print " ! WARNING: You have a table name longer than 63 characters; this will not fully work on PostgreSQL or MySQL."
print(" ! WARNING: You have a table name longer than 63 characters; this will not fully work on PostgreSQL or MySQL.")
# avoid default values in CREATE TABLE statements (#925)
for field_name, field in fields:
field._suppress_default = True
columns = [
self.column_sql(table_name, field_name, field)
for field_name, field in fields
]
self.execute('CREATE TABLE %s (%s);' % (
self.quote_name(table_name),
', '.join([col for col in columns if col]),
))
add_table = alias('create_table') # Alias for consistency's sake
self.execute(self.create_table_sql % {
"table": self.quote_name(table_name),
"columns": ', '.join([col for col in columns if col]),
})
add_table = alias('create_table') # Alias for consistency's sake
@invalidate_table_constraints
def rename_table(self, old_table_name, table_name):
"""
Renames the table 'old_table_name' to 'table_name'.
@ -237,9 +372,11 @@ class DatabaseOperations(object):
# Short-circuit out.
return
params = (self.quote_name(old_table_name), self.quote_name(table_name))
self.execute('ALTER TABLE %s RENAME TO %s;' % params)
self.execute(self.rename_table_sql % params)
# Invalidate the not-yet-indexed table
self._set_cache(table_name, value=INVALID)
@invalidate_table_constraints
def delete_table(self, table_name, cascade=True):
"""
Deletes the table 'table_name'.
@ -252,7 +389,7 @@ class DatabaseOperations(object):
drop_table = alias('delete_table')
@invalidate_table_constraints
def clear_table(self, table_name):
"""
Deletes all rows from 'table_name'.
@ -260,8 +397,7 @@ class DatabaseOperations(object):
params = (self.quote_name(table_name), )
self.execute('DELETE FROM %s;' % params)
@invalidate_table_constraints
def add_column(self, table_name, name, field, keep_default=True):
"""
Adds the column 'name' to the table 'table_name'.
@ -282,11 +418,10 @@ class DatabaseOperations(object):
self.execute(sql)
# Now, drop the default if we need to
if not keep_default and field.default is not None:
if field.default is not None:
field.default = NOT_PROVIDED
self.alter_column(table_name, name, field, explicit_name=False, ignore_constraints=True)
def _db_type_for_alter_column(self, field):
"""
Returns a field's type suitable for ALTER COLUMN.
@ -299,15 +434,28 @@ class DatabaseOperations(object):
except TypeError:
return field.db_type()
def _alter_set_defaults(self, field, name, params, sqls):
"Subcommand of alter_column that sets default values (overrideable)"
# Next, set any default
if not field.null and field.has_default():
default = field.get_default()
sqls.append(('ALTER COLUMN %s SET DEFAULT %%s ' % (self.quote_name(name),), [default]))
else:
sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (self.quote_name(name),), []))
def _alter_add_column_mods(self, field, name, params, sqls):
"""
Subcommand of alter_column that modifies column definitions beyond
the type string -- e.g. adding constraints where they cannot be specified
as part of the type (overrideable)
"""
pass
def _alter_set_defaults(self, field, name, params, sqls):
"Subcommand of alter_column that sets default values (overrideable)"
# Historically, we used to set defaults here.
# But since South 0.8, we don't ever set defaults on alter-column -- we only
# use database-level defaults as scaffolding when adding columns.
# However, we still sometimes need to remove defaults in alter-column.
sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (self.quote_name(name),), []))
def _update_nulls_to_default(self, params, field):
"Subcommand of alter_column that updates nulls to default value (overrideable)"
default = field.get_db_prep_save(field.get_default(), connection=self._get_connection())
self.execute('UPDATE %(table_name)s SET %(column)s=%%s WHERE %(column)s IS NULL' % params, [default])
@invalidate_table_constraints
def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
"""
Alters the given column name so it will match the given field.
@ -321,6 +469,8 @@ class DatabaseOperations(object):
"""
if self.dry_run:
if self.debug:
print(' - no dry run output for alter_column() due to dynamic DDL, sorry')
return
# hook for the field to do any resolution prior to it's attributes being queried
@ -335,7 +485,8 @@ class DatabaseOperations(object):
field.column = name
if not ignore_constraints:
# Drop all check constraints. TODO: Add the right ones back.
# Drop all check constraints. Note that constraints will be added back
# with self.alter_string_set_type and self.alter_string_drop_null.
if self.has_check_constraints:
check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK")
for constraint in check_constraints:
@ -354,8 +505,8 @@ class DatabaseOperations(object):
# First, change the type
params = {
"column": self.quote_name(name),
"type": self._db_type_for_alter_column(field),
"table_name": table_name
"type": self._db_type_for_alter_column(field),
"table_name": self.quote_name(table_name)
}
# SQLs is a list of (SQL, values) pairs.
@ -365,16 +516,18 @@ class DatabaseOperations(object):
if params["type"] is not None:
sqls.append((self.alter_string_set_type % params, []))
# Add any field- and backend- specific modifications
self._alter_add_column_mods(field, name, params, sqls)
# Next, nullity
if field.null:
if field.null or field.has_default():
sqls.append((self.alter_string_set_null % params, []))
else:
sqls.append((self.alter_string_drop_null % params, []))
# Next, set any default
# Do defaults
self._alter_set_defaults(field, name, params, sqls)
# Finally, actually change the column
# Actually change the column (step 1 -- Nullity may need to be fixed)
if self.allows_combined_alters:
sqls, values = zip(*sqls)
self.execute(
@ -385,7 +538,12 @@ class DatabaseOperations(object):
# Databases like e.g. MySQL don't like more than one alter at once.
for sql, values in sqls:
self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), sql), values)
if not field.null and field.has_default():
# Final fixes
self._update_nulls_to_default(params, field)
self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), self.alter_string_drop_null % params), [])
if not ignore_constraints:
# Add back FK constraints if needed
if field.rel and self.supports_foreign_keys:
@ -398,52 +556,56 @@ class DatabaseOperations(object):
)
)
def _fill_constraint_cache(self, db_name, table_name):
schema = self._get_schema_name()
ifsc_tables = ["constraint_column_usage", "key_column_usage"]
self._constraint_cache.setdefault(db_name, {})
self._constraint_cache[db_name][table_name] = {}
for ifsc_table in ifsc_tables:
rows = self.execute("""
SELECT kc.constraint_name, kc.column_name, c.constraint_type
FROM information_schema.%s AS kc
JOIN information_schema.table_constraints AS c ON
kc.table_schema = c.table_schema AND
kc.table_name = c.table_name AND
kc.constraint_name = c.constraint_name
WHERE
kc.table_schema = %%s AND
kc.table_name = %%s
""" % ifsc_table, [schema, table_name])
for constraint, column, kind in rows:
self._constraint_cache[db_name][table_name].setdefault(column, set())
self._constraint_cache[db_name][table_name][column].add((kind, constraint))
return
def _constraints_affecting_columns(self, table_name, columns, type="UNIQUE"):
"""
Gets the names of the constraints affecting the given columns.
If columns is None, returns all constraints of the type on the table.
"""
if self.dry_run:
raise ValueError("Cannot get constraints for columns during a dry run.")
raise DryRunError("Cannot get constraints for columns.")
if columns is not None:
columns = set(columns)
columns = set(map(lambda s: s.lower(), columns))
if type == "CHECK":
ifsc_table = "constraint_column_usage"
else:
ifsc_table = "key_column_usage"
db_name = self._get_setting('NAME')
schema = self._get_schema_name()
cnames = {}
for col, constraints in self.lookup_constraint(db_name, table_name):
for kind, cname in constraints:
if kind == type:
cnames.setdefault(cname, set())
cnames[cname].add(col.lower())
# First, load all constraint->col mappings for this table.
rows = self.execute("""
SELECT kc.constraint_name, kc.column_name
FROM information_schema.%s AS kc
JOIN information_schema.table_constraints AS c ON
kc.table_schema = c.table_schema AND
kc.table_name = c.table_name AND
kc.constraint_name = c.constraint_name
WHERE
kc.table_schema = %%s AND
kc.table_name = %%s AND
c.constraint_type = %%s
""" % ifsc_table, [schema, table_name, type])
# Load into a dict
mapping = {}
for constraint, column in rows:
mapping.setdefault(constraint, set())
mapping[constraint].add(column)
# Find ones affecting these columns
for constraint, itscols in mapping.items():
# If columns is None we definitely want this field! (see docstring)
if itscols == columns or columns is None:
yield constraint
for cname, cols in cnames.items():
if cols == columns or columns is None:
yield cname
@invalidate_table_constraints
def create_unique(self, table_name, columns):
"""
Creates a UNIQUE constraint on the columns on the given table.
@ -456,12 +618,13 @@ class DatabaseOperations(object):
cols = ", ".join(map(self.quote_name, columns))
self.execute("ALTER TABLE %s ADD CONSTRAINT %s UNIQUE (%s)" % (
self.quote_name(table_name),
self.quote_name(name),
self.quote_name(table_name),
self.quote_name(name),
cols,
))
return name
@invalidate_table_constraints
def delete_unique(self, table_name, columns):
"""
Deletes a UNIQUE constraint on precisely the columns on the given table.
@ -472,6 +635,8 @@ class DatabaseOperations(object):
# Dry runs mean we can't do anything.
if self.dry_run:
if self.debug:
print(' - no dry run output for delete_unique_column() due to dynamic DDL, sorry')
return
constraints = list(self._constraints_affecting_columns(table_name, columns))
@ -479,11 +644,10 @@ class DatabaseOperations(object):
raise ValueError("Cannot find a UNIQUE constraint on table %s, columns %r" % (table_name, columns))
for constraint in constraints:
self.execute(self.delete_unique_sql % (
self.quote_name(table_name),
self.quote_name(table_name),
self.quote_name(constraint),
))
def column_sql(self, table_name, field_name, field, tablespace='', with_name=True, field_prepared=False):
"""
Creates the SQL snippet for a column. Used by add_column and add_table.
@ -521,7 +685,7 @@ class DatabaseOperations(object):
field_output.append('UNIQUE')
tablespace = field.db_tablespace or tablespace
if tablespace and self._get_connection().features.supports_tablespaces and field.unique:
if tablespace and getattr(self._get_connection().features, "supports_tablespaces", False) and field.unique:
# We must specify the index tablespace inline, because we
# won't be generating a CREATE INDEX statement for this field.
field_output.append(self._get_connection().ops.tablespace_sql(tablespace, inline=True))
@ -538,13 +702,14 @@ class DatabaseOperations(object):
# If the default is a callable, then call it!
if callable(default):
default = default()
default = field.get_db_prep_save(default, connection=self._get_connection())
default = self._default_value_workaround(default)
# Now do some very cheap quoting. TODO: Redesign return values to avoid this.
if isinstance(default, basestring):
if isinstance(default, string_types):
default = "'%s'" % default.replace("'", "''")
elif isinstance(default, (datetime.date, datetime.time, datetime.datetime)):
default = "'%s'" % default
# Escape any % signs in the output (bug #317)
if isinstance(default, basestring):
if isinstance(default, string_types):
default = default.replace("%", "%%")
# Add it in
sql += " DEFAULT %s"
@ -584,7 +749,6 @@ class DatabaseOperations(object):
else:
return None
def _field_sanity(self, field):
"""
Placeholder for DBMS-specific field alterations (some combos aren't valid,
@ -592,27 +756,40 @@ class DatabaseOperations(object):
"""
return field
def _default_value_workaround(self, value):
"""
DBMS-specific value alterations (this really works around
missing functionality in Django backends)
"""
if isinstance(value, bool) and not self.has_booleans:
return int(value)
else:
return value
def foreign_key_sql(self, from_table_name, from_column_name, to_table_name, to_column_name):
"""
Generates a full SQL statement to add a foreign key constraint
"""
constraint_name = '%s_refs_%s_%x' % (from_column_name, to_column_name, abs(hash((from_table_name, to_table_name))))
constraint_name = '%s_refs_%s_%s' % (from_column_name, to_column_name, self._digest(from_table_name, to_table_name))
return 'ALTER TABLE %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % (
self.quote_name(from_table_name),
self.quote_name(truncate_name(constraint_name, self._get_connection().ops.max_name_length())),
self.quote_name(self.shorten_name(constraint_name)),
self.quote_name(from_column_name),
self.quote_name(to_table_name),
self.quote_name(to_column_name),
self._get_connection().ops.deferrable_sql() # Django knows this
self._get_connection().ops.deferrable_sql() # Django knows this
)
@invalidate_table_constraints
def delete_foreign_key(self, table_name, column):
"Drop a foreign key constraint"
"""
Drop a foreign key constraint
"""
if self.dry_run:
return # We can't look at the DB to get the constraints
constraints = list(self._constraints_affecting_columns(table_name, [column], "FOREIGN KEY"))
if self.debug:
print(' - no dry run output for delete_foreign_key() due to dynamic DDL, sorry')
return # We can't look at the DB to get the constraints
constraints = self._find_foreign_constraints(table_name, column)
if not constraints:
raise ValueError("Cannot find a FOREIGN KEY constraint on table %s, column %s" % (table_name, column))
for constraint_name in constraints:
@ -623,33 +800,67 @@ class DatabaseOperations(object):
drop_foreign_key = alias('delete_foreign_key')
def _find_foreign_constraints(self, table_name, column_name=None):
constraints = self._constraints_affecting_columns(
table_name, [column_name], "FOREIGN KEY")
primary_key_columns = self._find_primary_key_columns(table_name)
if len(primary_key_columns) > 1:
# Composite primary keys cannot be referenced by a foreign key
return list(constraints)
else:
primary_key_columns.add(column_name)
recursive_constraints = set(self._constraints_affecting_columns(
table_name, primary_key_columns, "FOREIGN KEY"))
return list(recursive_constraints.union(constraints))
def _digest(self, *args):
"""
Use django.db.backends.creation.BaseDatabaseCreation._digest
to create index name in Django style. An evil hack :(
"""
if not hasattr(self, '_django_db_creation'):
self._django_db_creation = BaseDatabaseCreation(self._get_connection())
return self._django_db_creation._digest(*args)
def shorten_name(self, name):
return truncate_name(name, self._get_connection().ops.max_name_length())
def create_index_name(self, table_name, column_names, suffix=""):
"""
Generate a unique name for the index
"""
table_name = table_name.replace('"', '').replace('.', '_')
index_unique_name = ''
# If there is just one column in the index, use a default algorithm from Django
if len(column_names) == 1 and not suffix:
try:
_hash = self._digest([column_names[0]])
except TypeError:
# Django < 1.5 backward compatibility.
_hash = self._digest(column_names[0])
return self.shorten_name(
'%s_%s' % (table_name, _hash),
)
if len(column_names) > 1:
index_unique_name = '_%x' % abs(hash((table_name, ','.join(column_names))))
# Else generate the name for the index by South
table_name = table_name.replace('"', '').replace('.', '_')
index_unique_name = '_%x' % abs(hash((table_name, ','.join(column_names))))
# If the index name is too long, truncate it
index_name = ('%s_%s%s%s' % (table_name, column_names[0], index_unique_name, suffix)).replace('"', '').replace('.', '_')
if len(index_name) > self.max_index_name_length:
part = ('_%s%s%s' % (column_names[0], index_unique_name, suffix))
index_name = '%s%s' % (table_name[:(self.max_index_name_length-len(part))], part)
index_name = '%s%s' % (table_name[:(self.max_index_name_length - len(part))], part)
return index_name
def create_index_sql(self, table_name, column_names, unique=False, db_tablespace=''):
"""
Generates a create index statement on 'table_name' for a list of 'column_names'
"""
if not column_names:
print "No column names supplied on which to create an index"
print("No column names supplied on which to create an index")
return ''
connection = self._get_connection()
@ -667,19 +878,20 @@ class DatabaseOperations(object):
tablespace_sql
)
@invalidate_table_constraints
def create_index(self, table_name, column_names, unique=False, db_tablespace=''):
""" Executes a create index statement """
sql = self.create_index_sql(table_name, column_names, unique, db_tablespace)
self.execute(sql)
@invalidate_table_constraints
def delete_index(self, table_name, column_names, db_tablespace=''):
"""
Deletes an index created with create_index.
This is possible using only columns due to the deterministic
index naming function which relies on column names.
"""
if isinstance(column_names, (str, unicode)):
if isinstance(column_names, string_types):
column_names = [column_names]
name = self.create_index_name(table_name, column_names)
sql = self.drop_index_string % {
@ -690,7 +902,7 @@ class DatabaseOperations(object):
drop_index = alias('delete_index')
@delete_column_constraints
def delete_column(self, table_name, name):
"""
Deletes the column 'column_name' from the table 'table_name'.
@ -700,20 +912,21 @@ class DatabaseOperations(object):
drop_column = alias('delete_column')
def rename_column(self, table_name, old, new):
"""
Renames the column 'old' from the table 'table_name' to 'new'.
"""
raise NotImplementedError("rename_column has no generic SQL syntax")
@invalidate_table_constraints
def delete_primary_key(self, table_name):
"""
Drops the old primary key.
"""
# Dry runs mean we can't do anything.
if self.dry_run:
if self.debug:
print(' - no dry run output for delete_primary_key() due to dynamic DDL, sorry')
return
constraints = list(self._constraints_affecting_columns(table_name, None, type="PRIMARY KEY"))
@ -728,7 +941,7 @@ class DatabaseOperations(object):
drop_primary_key = alias('delete_primary_key')
@invalidate_table_constraints
def create_primary_key(self, table_name, columns):
"""
Creates a new primary key on the specified columns.
@ -737,10 +950,23 @@ class DatabaseOperations(object):
columns = [columns]
self.execute(self.create_primary_key_string % {
"table": self.quote_name(table_name),
"constraint": self.quote_name(table_name+"_pkey"),
"constraint": self.quote_name(table_name + "_pkey"),
"columns": ", ".join(map(self.quote_name, columns)),
})
def _find_primary_key_columns(self, table_name):
"""
Find all columns of the primary key of the specified table
"""
db_name = self._get_setting('NAME')
primary_key_columns = set()
for col, constraints in self.lookup_constraint(db_name, table_name):
for kind, cname in constraints:
if kind == 'PRIMARY KEY':
primary_key_columns.add(col.lower())
return primary_key_columns
def start_transaction(self):
"""
@ -749,10 +975,9 @@ class DatabaseOperations(object):
"""
if self.dry_run:
self.pending_transactions += 1
transaction.commit_unless_managed()
transaction.enter_transaction_management()
transaction.managed(True)
transaction.commit_unless_managed(using=self.db_alias)
transaction.enter_transaction_management(using=self.db_alias)
transaction.managed(True, using=self.db_alias)
def commit_transaction(self):
"""
@ -761,9 +986,8 @@ class DatabaseOperations(object):
"""
if self.dry_run:
return
transaction.commit()
transaction.leave_transaction_management()
transaction.commit(using=self.db_alias)
transaction.leave_transaction_management(using=self.db_alias)
def rollback_transaction(self):
"""
@ -772,8 +996,8 @@ class DatabaseOperations(object):
"""
if self.dry_run:
self.pending_transactions -= 1
transaction.rollback()
transaction.leave_transaction_management()
transaction.rollback(using=self.db_alias)
transaction.leave_transaction_management(using=self.db_alias)
def rollback_transactions_dry_run(self):
"""
@ -783,16 +1007,14 @@ class DatabaseOperations(object):
return
while self.pending_transactions > 0:
self.rollback_transaction()
if transaction.is_dirty():
if transaction.is_dirty(using=self.db_alias):
# Force an exception, if we're still in a dirty transaction.
# This means we are missing a COMMIT/ROLLBACK.
transaction.leave_transaction_management()
transaction.leave_transaction_management(using=self.db_alias)
def send_create_signal(self, app_label, model_names):
self.pending_create_signals.append((app_label, model_names))
def send_pending_create_signals(self, verbosity=0, interactive=False):
# Group app_labels together
signals = SortedDict()
@ -802,13 +1024,12 @@ class DatabaseOperations(object):
except KeyError:
signals[app_label] = list(model_names)
# Send only one signal per app.
for (app_label, model_names) in signals.iteritems():
for (app_label, model_names) in signals.items():
self.really_send_create_signal(app_label, list(set(model_names)),
verbosity=verbosity,
interactive=interactive)
self.pending_create_signals = []
def really_send_create_signal(self, app_label, model_names,
verbosity=0, interactive=False):
"""
@ -824,7 +1045,7 @@ class DatabaseOperations(object):
"""
if self.debug:
print " - Sending post_syncdb signal for %s: %s" % (app_label, model_names)
print(" - Sending post_syncdb signal for %s: %s" % (app_label, model_names))
app = models.get_app(app_label)
if not app:
@ -864,8 +1085,7 @@ class DatabaseOperations(object):
interactive=interactive,
)
def mock_model(self, model_name, db_table, db_tablespace='',
def mock_model(self, model_name, db_table, db_tablespace='',
pk_field_name='id', pk_field_type=models.AutoField,
pk_field_args=[], pk_field_kwargs={}):
"""
@ -908,6 +1128,33 @@ class DatabaseOperations(object):
MockModel._meta.model = MockModel
return MockModel
def _db_positive_type_for_alter_column(self, klass, field):
"""
A helper for subclasses overriding _db_type_for_alter_column:
Remove the check constraint from the type string for PositiveInteger
and PositiveSmallInteger fields.
@param klass: The type of the child (required to allow this to be used when it is subclassed)
@param field: The field to generate type for
"""
super_result = super(klass, self)._db_type_for_alter_column(field)
if isinstance(field, (models.PositiveSmallIntegerField, models.PositiveIntegerField)):
return super_result.split(" ", 1)[0]
return super_result
def _alter_add_positive_check(self, klass, field, name, params, sqls):
"""
A helper for subclasses overriding _alter_add_column_mods:
Add a check constraint verifying positivity to PositiveInteger and
PositiveSmallInteger fields.
"""
super(klass, self)._alter_add_column_mods(field, name, params, sqls)
if isinstance(field, (models.PositiveSmallIntegerField, models.PositiveIntegerField)):
uniq_hash = abs(hash(tuple(params.values())))
d = dict(
constraint = "CK_%s_PSTV_%s" % (name, hex(uniq_hash)[2:]),
check = "%s >= 0" % self.quote_name(name))
sqls.append((self.add_check_constraint_fragment % d, []))
# Single-level flattening of lists
def flatten(ls):

View file

@ -1,22 +1,95 @@
# MySQL-specific implementations for south
# Original author: Andrew Godwin
# Patches by: F. Gabriel Gosselin <gabrielNOSPAM@evidens.ca>
from django.db import connection
from django.conf import settings
from south.db import generic
from south.db.generic import DryRunError, INVALID
from south.logger import get_logger
def delete_column_constraints(func):
"""
Decorates column operation functions for MySQL.
Deletes the constraints from the database and clears local cache.
"""
def _column_rm(self, table_name, column_name, *args, **opts):
# Delete foreign key constraints
try:
self.delete_foreign_key(table_name, column_name)
except ValueError:
pass # If no foreign key on column, OK because it checks first
# Delete constraints referring to this column
try:
reverse = self._lookup_reverse_constraint(table_name, column_name)
for cname, rtable, rcolumn in reverse:
self.delete_foreign_key(rtable, rcolumn)
except DryRunError:
pass
return func(self, table_name, column_name, *args, **opts)
return _column_rm
def copy_column_constraints(func):
"""
Decorates column operation functions for MySQL.
Determines existing constraints and copies them to a new column
"""
def _column_cp(self, table_name, column_old, column_new, *args, **opts):
# Copy foreign key constraint
try:
constraint = self._find_foreign_constraints(
table_name, column_old)[0]
refs = self._lookup_constraint_references(table_name, constraint)
if refs is not None:
(ftable, fcolumn) = refs
if ftable and fcolumn:
fk_sql = self.foreign_key_sql(
table_name, column_new, ftable, fcolumn)
get_logger().debug("Foreign key SQL: " + fk_sql)
self.add_deferred_sql(fk_sql)
except IndexError:
pass # No constraint exists so ignore
except DryRunError:
pass
# Copy constraints referring to this column
try:
reverse = self._lookup_reverse_constraint(table_name, column_old)
for cname, rtable, rcolumn in reverse:
fk_sql = self.foreign_key_sql(
rtable, rcolumn, table_name, column_new)
self.add_deferred_sql(fk_sql)
except DryRunError:
pass
return func(self, table_name, column_old, column_new, *args, **opts)
return _column_cp
def invalidate_table_constraints(func):
"""
For MySQL we grab all table constraints simultaneously, so this is
effective.
It further solves the issues of invalidating referred table constraints.
"""
def _cache_clear(self, table, *args, **opts):
db_name = self._get_setting('NAME')
if db_name in self._constraint_cache:
del self._constraint_cache[db_name]
if db_name in self._reverse_cache:
del self._reverse_cache[db_name]
if db_name in self._constraint_references:
del self._constraint_references[db_name]
return func(self, table, *args, **opts)
return _cache_clear
class DatabaseOperations(generic.DatabaseOperations):
"""
MySQL implementation of database operations.
MySQL is an 'interesting' database; it has no DDL transaction support,
among other things. This can confuse people when they ask how they can
roll back - hence the dry runs, etc., found in the migration code.
Alex agrees, and Alex is always right.
[19:06] <Alex_Gaynor> Also, I want to restate once again that MySQL is a special database
(Still, if you want a key-value store with relational tendancies, go MySQL!)
MySQL has no DDL transaction support This can confuse people when they ask
how to roll back - hence the dry runs, etc., found in the migration code.
"""
backend_name = "mysql"
alter_string_set_type = ''
alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;'
@ -24,34 +97,113 @@ class DatabaseOperations(generic.DatabaseOperations):
drop_index_string = 'DROP INDEX %(index_name)s ON %(table_name)s'
delete_primary_key_sql = "ALTER TABLE %(table)s DROP PRIMARY KEY"
delete_foreign_key_sql = "ALTER TABLE %(table)s DROP FOREIGN KEY %(constraint)s"
allows_combined_alters = False
has_ddl_transactions = False
has_check_constraints = False
delete_unique_sql = "ALTER TABLE %s DROP INDEX %s"
rename_table_sql = "RENAME TABLE %s TO %s;"
allows_combined_alters = False
has_check_constraints = False
raises_default_errors = False
geom_types = ['geometry', 'point', 'linestring', 'polygon']
text_types = ['text', 'blob']
def __init__(self, db_alias):
self._constraint_references = {}
self._reverse_cache = {}
super(DatabaseOperations, self).__init__(db_alias)
if self._has_setting('STORAGE_ENGINE') and self._get_setting('STORAGE_ENGINE'):
self.create_table_sql = self.create_table_sql + ' ENGINE=%s' % self._get_setting('STORAGE_ENGINE')
def _is_valid_cache(self, db_name, table_name):
cache = self._constraint_cache
# we cache the whole db so if there are any tables table_name is valid
return db_name in cache and cache[db_name].get(table_name, None) is not INVALID
def _fill_constraint_cache(self, db_name, table_name):
# for MySQL grab all constraints for this database. It's just as cheap as a single column.
self._constraint_cache[db_name] = {}
self._constraint_cache[db_name][table_name] = {}
self._reverse_cache[db_name] = {}
self._constraint_references[db_name] = {}
name_query = """
SELECT kc.`constraint_name`, kc.`column_name`, kc.`table_name`,
kc.`referenced_table_name`, kc.`referenced_column_name`
FROM information_schema.key_column_usage AS kc
WHERE
kc.table_schema = %s
"""
rows = self.execute(name_query, [db_name])
if not rows:
return
cnames = {}
for constraint, column, table, ref_table, ref_column in rows:
key = (table, constraint)
cnames.setdefault(key, set())
cnames[key].add((column, ref_table, ref_column))
type_query = """
SELECT c.constraint_name, c.table_name, c.constraint_type
FROM information_schema.table_constraints AS c
WHERE
c.table_schema = %s
"""
rows = self.execute(type_query, [db_name])
for constraint, table, kind in rows:
key = (table, constraint)
self._constraint_cache[db_name].setdefault(table, {})
try:
cols = cnames[key]
except KeyError:
cols = set()
for column_set in cols:
(column, ref_table, ref_column) = column_set
self._constraint_cache[db_name][table].setdefault(column, set())
if kind == 'FOREIGN KEY':
self._constraint_cache[db_name][table][column].add((kind,
constraint))
# Create constraint lookup, see constraint_references
self._constraint_references[db_name][(table,
constraint)] = (ref_table, ref_column)
# Create reverse table lookup, reverse_lookup
self._reverse_cache[db_name].setdefault(ref_table, {})
self._reverse_cache[db_name][ref_table].setdefault(ref_column,
set())
self._reverse_cache[db_name][ref_table][ref_column].add(
(constraint, table, column))
else:
self._constraint_cache[db_name][table][column].add((kind,
constraint))
def connection_init(self):
"""
Run before any SQL to let database-specific config be sent as a command,
e.g. which storage engine (MySQL) or transaction serialisability level.
"""
cursor = self._get_connection().cursor()
if cursor.execute("SHOW variables WHERE Variable_Name='default_storage_engine';"):
engine_var = 'default_storage_engine'
else:
engine_var = 'storage_engine'
if self._has_setting('STORAGE_ENGINE') and self._get_setting('STORAGE_ENGINE'):
cursor.execute("SET storage_engine=%s;" % self._get_setting('STORAGE_ENGINE'))
# Turn off foreign key checks, and turn them back on at the end
cursor.execute("SET FOREIGN_KEY_CHECKS=0;")
self.deferred_sql.append("SET FOREIGN_KEY_CHECKS=1;")
cursor.execute("SET %s=%s;" % (engine_var, self._get_setting('STORAGE_ENGINE')))
def start_transaction(self):
super(DatabaseOperations, self).start_transaction()
self.execute("SET FOREIGN_KEY_CHECKS=0;")
@copy_column_constraints
@delete_column_constraints
@invalidate_table_constraints
def rename_column(self, table_name, old, new):
if old == new or self.dry_run:
return []
rows = [x for x in self.execute('DESCRIBE %s' % (self.quote_name(table_name),)) if x[0] == old]
if not rows:
raise ValueError("No column '%s' in '%s'." % (old, table_name))
params = (
self.quote_name(table_name),
self.quote_name(old),
@ -62,108 +214,77 @@ class DatabaseOperations(generic.DatabaseOperations):
rows[0][4] and "%s" or "",
rows[0][5] or "",
)
sql = 'ALTER TABLE %s CHANGE COLUMN %s %s %s %s %s %s %s;' % params
if rows[0][4]:
self.execute(sql, (rows[0][4],))
else:
self.execute(sql)
@delete_column_constraints
def delete_column(self, table_name, name):
db_name = self._get_setting('NAME')
# See if there is a foreign key on this column
cursor = self._get_connection().cursor()
get_fkeyname_query = "SELECT tc.constraint_name FROM \
information_schema.table_constraints tc, \
information_schema.key_column_usage kcu \
WHERE tc.table_name=kcu.table_name \
AND tc.table_schema=kcu.table_schema \
AND tc.constraint_name=kcu.constraint_name \
AND tc.constraint_type='FOREIGN KEY' \
AND tc.table_schema='%s' \
AND tc.table_name='%s' \
AND kcu.column_name='%s'"
result = cursor.execute(get_fkeyname_query % (db_name, table_name, name))
# If a foreign key exists, we need to delete it first
if result > 0:
assert result == 1 # We should only have one result, otherwise there's Issues
fkey_name = cursor.fetchone()[0]
drop_query = "ALTER TABLE %s DROP FOREIGN KEY %s"
cursor.execute(drop_query % (self.quote_name(table_name), self.quote_name(fkey_name)))
super(DatabaseOperations, self).delete_column(table_name, name)
@invalidate_table_constraints
def rename_table(self, old_table_name, table_name):
super(DatabaseOperations, self).rename_table(old_table_name,
table_name)
@invalidate_table_constraints
def delete_table(self, table_name):
super(DatabaseOperations, self).delete_table(table_name)
def _lookup_constraint_references(self, table_name, cname):
"""
Renames the table 'old_table_name' to 'table_name'.
Provided an existing table and constraint, returns tuple of (foreign
table, column)
"""
if old_table_name == table_name:
# No Operation
return
params = (self.quote_name(old_table_name), self.quote_name(table_name))
self.execute('RENAME TABLE %s TO %s;' % params)
def _constraints_affecting_columns(self, table_name, columns, type="UNIQUE"):
"""
Gets the names of the constraints affecting the given columns.
If columns is None, returns all constraints of the type on the table.
"""
if self.dry_run:
raise ValueError("Cannot get constraints for columns during a dry run.")
if columns is not None:
columns = set(columns)
db_name = self._get_setting('NAME')
# First, load all constraint->col mappings for this table.
rows = self.execute("""
SELECT kc.constraint_name, kc.column_name
FROM information_schema.key_column_usage AS kc
JOIN information_schema.table_constraints AS c ON
kc.table_schema = c.table_schema AND
kc.table_name = c.table_name AND
kc.constraint_name = c.constraint_name
WHERE
kc.table_schema = %s AND
kc.table_catalog IS NULL AND
kc.table_name = %s AND
c.constraint_type = %s
""", [db_name, table_name, type])
# Load into a dict
mapping = {}
for constraint, column in rows:
mapping.setdefault(constraint, set())
mapping[constraint].add(column)
# Find ones affecting these columns
for constraint, itscols in mapping.items():
if itscols == columns or columns is None:
yield constraint
try:
return self._constraint_references[db_name][(table_name, cname)]
except KeyError:
return None
def _lookup_reverse_constraint(self, table_name, column_name=None):
"""Look for the column referenced by a foreign constraint"""
db_name = self._get_setting('NAME')
if self.dry_run:
raise DryRunError("Cannot get constraints for columns.")
if not self._is_valid_cache(db_name, table_name):
# Piggy-back on lookup_constraint, ensures cache exists
self.lookup_constraint(db_name, table_name)
try:
table = self._reverse_cache[db_name][table_name]
if column_name == None:
return [(y, tuple(y)) for x, y in table.items()]
else:
return tuple(table[column_name])
except KeyError:
return []
def _field_sanity(self, field):
"""
This particular override stops us sending DEFAULTs for BLOB/TEXT columns.
"""
if self._db_type_for_alter_column(field).upper() in ["BLOB", "TEXT", "LONGTEXT"]:
# MySQL does not support defaults for geometry columns also
type = self._db_type_for_alter_column(field).lower()
is_geom = True in [type.find(t) > -1 for t in self.geom_types]
is_text = True in [type.find(t) > -1 for t in self.text_types]
if is_geom or is_text:
field._suppress_default = True
return field
def _alter_set_defaults(self, field, name, params, sqls):
"""
MySQL does not support defaults on text or blob columns.
"""
type = params['type']
if not (type.endswith('text') or type.endswith('blob')):
# MySQL does not support defaults for geometry columns also
is_geom = True in [type.find(t) > -1 for t in self.geom_types]
is_text = True in [type.find(t) > -1 for t in self.text_types]
if not is_geom and not is_text:
super(DatabaseOperations, self)._alter_set_defaults(field, name, params, sqls)

View file

@ -1,17 +1,28 @@
from __future__ import print_function
import os.path
import sys
import re
import warnings
import cx_Oracle
from django.db import connection, models
from django.db.backends.util import truncate_name
from django.core.management.color import no_style
from django.db.backends.oracle.base import get_sequence_name
from django.db.models.fields import NOT_PROVIDED
from south.db import generic
from django.db.utils import DatabaseError
print >> sys.stderr, " ! WARNING: South's Oracle support is still alpha."
print >> sys.stderr, " ! Be wary of posible bugs."
# In revision r16016 function get_sequence_name has been transformed into
# method of DatabaseOperations class. To make code backward-compatible we
# need to handle both situations.
try:
from django.db.backends.oracle.base import get_sequence_name\
as original_get_sequence_name
except ImportError:
original_get_sequence_name = None
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
@ -19,98 +30,115 @@ class DatabaseOperations(generic.DatabaseOperations):
"""
backend_name = 'oracle'
alter_string_set_type = 'ALTER TABLE %(table_name)s MODIFY "%(column)s" %(type)s %(nullity)s;'
alter_string_set_default = 'ALTER TABLE %(table_name)s MODIFY "%(column)s" DEFAULT %(default)s;'
alter_string_set_type = 'ALTER TABLE %(table_name)s MODIFY %(column)s %(type)s %(nullity)s;'
alter_string_set_default = 'ALTER TABLE %(table_name)s MODIFY %(column)s DEFAULT %(default)s;'
alter_string_update_nulls_to_default = \
'UPDATE %(table_name)s SET %(column)s = %(default)s WHERE %(column)s IS NULL;'
add_column_string = 'ALTER TABLE %s ADD %s;'
delete_column_string = 'ALTER TABLE %s DROP COLUMN %s;'
add_constraint_string = 'ALTER TABLE %(table_name)s ADD CONSTRAINT %(constraint)s %(clause)s'
allows_combined_alters = False
has_booleans = False
constraits_dict = {
'PRIMARY KEY': 'P',
'UNIQUE': 'U',
'CHECK': 'C',
'REFERENCES': 'R'
constraints_dict = {
'P': 'PRIMARY KEY',
'U': 'UNIQUE',
'C': 'CHECK',
'R': 'FOREIGN KEY'
}
table_names_cache = set()
def get_sequence_name(self, table_name):
if original_get_sequence_name is None:
return self._get_connection().ops._get_sequence_name(table_name)
else:
return original_get_sequence_name(table_name)
#TODO: This will cause very obscure bugs if anyone uses a column name or string value
# that looks like a column definition (with 'CHECK', 'DEFAULT' and/or 'NULL' in it)
# e.g. "CHECK MATE" varchar(10) DEFAULT 'NULL'
def adj_column_sql(self, col):
col = re.sub('(?P<constr>CHECK \(.*\))(?P<any>.*)(?P<default>DEFAULT [0|1])',
lambda mo: '%s %s%s'%(mo.group('default'), mo.group('constr'), mo.group('any')), col) #syntax fix for boolean field only
col = re.sub('(?P<not_null>NOT NULL) (?P<default>DEFAULT.+)',
lambda mo: '%s %s'%(mo.group('default'), mo.group('not_null')), col) #fix order of DEFAULT and NOT NULL
# Syntax fixes -- Oracle is picky about clause order
col = re.sub('(?P<constr>CHECK \(.*\))(?P<any>.*)(?P<default>DEFAULT \d+)',
lambda mo: '%s %s%s'%(mo.group('default'), mo.group('constr'), mo.group('any')), col) #syntax fix for boolean/integer field only
col = re.sub('(?P<not_null>(NOT )?NULL) (?P<misc>(.* )?)(?P<default>DEFAULT.+)',
lambda mo: '%s %s %s'%(mo.group('default'),mo.group('not_null'),mo.group('misc') or ''), col) #fix order of NULL/NOT NULL and DEFAULT
return col
def check_m2m(self, table_name):
m2m_table_name = table_name
existing_tables = []
if not self.table_names_cache:
self.check_meta(table_name)
self.table_names_cache = set(connection.introspection.table_names())
tn = table_name.rsplit('_', 1)
while len(tn) == 2:
tn2qn = self.quote_name(tn[0], upper = False, check_m2m = False)
if tn2qn in self.table_names_cache:
m2m_table_name = table_name.replace(tn[0], tn2qn)
break
else:
if not existing_tables:
existing_tables = connection.introspection.table_names()
if tn2qn in existing_tables:
m2m_table_name = table_name.replace(tn[0], tn2qn)
break
tn = tn[0].rsplit('_', 1)
self.table_names_cache.add(m2m_table_name)
return m2m_table_name
def check_meta(self, table_name):
return table_name in [ m._meta.db_table for m in models.get_models() ] #caching provided by Django
def normalize_name(self, name):
"""
Get the properly shortened and uppercased identifier as returned by quote_name(), but without the actual quotes.
"""
nn = self.quote_name(name)
if nn[0] == '"' and nn[-1] == '"':
nn = nn[1:-1]
return nn
def quote_name(self, name, upper=True, column = False, check_m2m = True):
if not column:
if check_m2m:
name = self.check_m2m(name)
if self.check_meta(name): #replication of Django flow for models where Meta.db_table is set by user
name = name.upper()
tn = truncate_name(name, connection.ops.max_name_length())
return upper and tn.upper() or tn.lower()
@generic.invalidate_table_constraints
def create_table(self, table_name, fields):
qn = self.quote_name(table_name, upper = False)
qn_upper = qn.upper()
qn = self.quote_name(table_name)
columns = []
autoinc_sql = ''
for field_name, field in fields:
col = self.column_sql(qn_upper, field_name, field)
field = self._field_sanity(field)
# avoid default values in CREATE TABLE statements (#925)
field._suppress_default = True
col = self.column_sql(table_name, field_name, field)
if not col:
continue
col = self.adj_column_sql(col)
columns.append(col)
if isinstance(field, models.AutoField):
autoinc_sql = connection.ops.autoinc_sql(self.check_meta(table_name) and table_name or qn, field_name)
autoinc_sql = connection.ops.autoinc_sql(table_name, field_name)
sql = 'CREATE TABLE %s (%s);' % (qn_upper, ', '.join([col for col in columns]))
sql = 'CREATE TABLE %s (%s);' % (qn, ', '.join([col for col in columns]))
self.execute(sql)
if autoinc_sql:
self.execute(autoinc_sql[0])
self.execute(autoinc_sql[1])
@generic.invalidate_table_constraints
def delete_table(self, table_name, cascade=True):
qn = self.quote_name(table_name, upper = False)
qn = self.quote_name(table_name)
# Note: PURGE is not valid syntax for Oracle 9i (it was added in 10)
if cascade:
self.execute('DROP TABLE %s CASCADE CONSTRAINTS PURGE;' % qn.upper())
self.execute('DROP TABLE %s CASCADE CONSTRAINTS;' % qn)
else:
self.execute('DROP TABLE %s;' % qn.upper())
self.execute('DROP SEQUENCE %s;'%get_sequence_name(qn))
self.execute('DROP TABLE %s;' % qn)
# If the table has an AutoField a sequence was created.
sequence_sql = """
DECLARE
i INTEGER;
BEGIN
SELECT COUNT(*) INTO i FROM USER_CATALOG
WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE';
IF i = 1 THEN
EXECUTE IMMEDIATE 'DROP SEQUENCE "%(sq_name)s"';
END IF;
END;
/""" % {'sq_name': self.get_sequence_name(table_name)}
self.execute(sequence_sql)
@generic.invalidate_table_constraints
def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
if self.dry_run:
if self.debug:
print(' - no dry run output for alter_column() due to dynamic DDL, sorry')
return
def alter_column(self, table_name, name, field, explicit_name=True):
qn = self.quote_name(table_name)
# hook for the field to do any resolution prior to it's attributes being queried
@ -122,9 +150,11 @@ class DatabaseOperations(generic.DatabaseOperations):
field.set_attributes_from_name(name)
if not explicit_name:
name = field.column
qn_col = self.quote_name(name, column = True)
qn_col = self.quote_name(name)
# First, change the type
# This will actually also add any CHECK constraints needed,
# since e.g. 'type' for a BooleanField is 'NUMBER(1) CHECK (%(qn_column)s IN (0,1))'
params = {
'table_name':qn,
'column': qn_col,
@ -133,84 +163,183 @@ class DatabaseOperations(generic.DatabaseOperations):
'default': 'NULL'
}
if field.null:
params['nullity'] = ''
sqls = [self.alter_string_set_type % params]
params['nullity'] = 'NULL'
sql_templates = [
(self.alter_string_set_type, params, []),
(self.alter_string_set_default, params, []),
]
if not field.null and field.has_default():
params['default'] = field.get_default()
# Use default for rows that had nulls. To support the case where
# the new default does not fit the old type, we need to first change
# the column type to the new type, but null=True; then set the default;
# then complete the type change.
def change_params(**kw):
"A little helper for non-destructively changing the params"
p = params.copy()
p.update(kw)
return p
sql_templates[:0] = [
(self.alter_string_set_type, change_params(nullity='NULL'),[]),
(self.alter_string_update_nulls_to_default, change_params(default="%s"), [field.get_default()]),
]
sqls.append(self.alter_string_set_default % params)
#UNIQUE constraint
unique_constraint = list(self._constraints_affecting_columns(qn, [qn_col]))
if field.unique and not unique_constraint:
self.create_unique(qn, [qn_col])
elif not field.unique and unique_constraint:
self.delete_unique(qn, [qn_col])
#CHECK constraint is not handled
for sql in sqls:
if not ignore_constraints:
# drop CHECK constraints. Make sure this is executed before the ALTER TABLE statements
# generated above, since those statements recreate the constraints we delete here.
check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK")
for constraint in check_constraints:
self.execute(self.delete_check_sql % {
'table': self.quote_name(table_name),
'constraint': self.quote_name(constraint),
})
# Drop foreign constraints
try:
self.execute(sql)
except cx_Oracle.DatabaseError, exc:
if str(exc).find('ORA-01442') == -1:
self.delete_foreign_key(qn, qn_col)
except ValueError:
# There weren't any
pass
for sql_template, params, args in sql_templates:
try:
self.execute(sql_template % params, args, print_all_errors=False)
except DatabaseError as exc:
description = str(exc)
# Oracle complains if a column is already NULL/NOT NULL
if 'ORA-01442' in description or 'ORA-01451' in description:
# so we just drop NULL/NOT NULL part from target sql and retry
params['nullity'] = ''
sql = sql_template % params
self.execute(sql)
# Oracle also has issues if we try to change a regular column
# to a LOB or vice versa (also REF, object, VARRAY or nested
# table, but these don't come up much in Django apps)
elif 'ORA-22858' in description or 'ORA-22859' in description:
self._alter_column_lob_workaround(table_name, name, field)
else:
self._print_sql_error(exc, sql_template % params)
raise
def add_column(self, table_name, name, field, keep_default=True):
qn = self.quote_name(table_name, upper = False)
sql = self.column_sql(qn, name, field)
if not ignore_constraints:
# Add back FK constraints if needed
if field.rel: #and self.supports_foreign_keys:
self.add_deferred_sql(
self.foreign_key_sql(
qn[1:-1], # foreign_key_sql uses this as part of constraint name
qn_col[1:-1], # foreign_key_sql uses this as part of constraint name
field.rel.to._meta.db_table,
field.rel.to._meta.get_field(field.rel.field_name).column
)
)
def _alter_column_lob_workaround(self, table_name, name, field):
"""
Oracle refuses to change a column type from/to LOB to/from a regular
column. In Django, this shows up when the field is changed from/to
a TextField.
What we need to do instead is:
- Rename the original column
- Add the desired field as new
- Update the table to transfer values from old to new
- Drop old column
"""
renamed = self._generate_temp_name(name)
self.rename_column(table_name, name, renamed)
self.add_column(table_name, name, field, keep_default=False)
self.execute("UPDATE %s set %s=%s" % (
self.quote_name(table_name),
self.quote_name(name),
self.quote_name(renamed),
))
self.delete_column(table_name, renamed)
def _generate_temp_name(self, for_name):
suffix = hex(hash(for_name)).upper()[1:]
return self.normalize_name(for_name + "_" + suffix)
@generic.copy_column_constraints #TODO: Appears to be nulled by the delete decorator below...
@generic.delete_column_constraints
def rename_column(self, table_name, old, new):
if old == new:
# Short-circuit out
return []
self.execute('ALTER TABLE %s RENAME COLUMN %s TO %s;' % (
self.quote_name(table_name),
self.quote_name(old),
self.quote_name(new),
))
@generic.invalidate_table_constraints
def add_column(self, table_name, name, field, keep_default=False):
field = self._field_sanity(field)
sql = self.column_sql(table_name, name, field)
sql = self.adj_column_sql(sql)
if sql:
params = (
qn.upper(),
self.quote_name(table_name),
sql
)
sql = self.add_column_string % params
self.execute(sql)
# Now, drop the default if we need to
if not keep_default and field.default is not None:
if field.default is not None:
field.default = NOT_PROVIDED
self.alter_column(table_name, name, field, explicit_name=False)
self.alter_column(table_name, name, field, explicit_name=False, ignore_constraints=True)
def delete_column(self, table_name, name):
return super(DatabaseOperations, self).delete_column(self.quote_name(table_name), name)
def lookup_constraint(self, db_name, table_name, column_name=None):
if column_name:
# Column names in the constraint cache come from the database,
# make sure we use the properly shortened/uppercased version
# for lookup.
column_name = self.normalize_name(column_name)
return super(DatabaseOperations, self).lookup_constraint(db_name, table_name, column_name)
def _constraints_affecting_columns(self, table_name, columns, type="UNIQUE"):
if columns:
columns = [self.normalize_name(c) for c in columns]
return super(DatabaseOperations, self)._constraints_affecting_columns(table_name, columns, type)
def _field_sanity(self, field):
"""
This particular override stops us sending DEFAULTs for BooleanField.
"""
if isinstance(field, models.BooleanField) and field.has_default():
field.default = int(field.to_python(field.get_default()))
# On Oracle, empty strings are null
if isinstance(field, (models.CharField, models.TextField)):
field.null = field.empty_strings_allowed
return field
def _constraints_affecting_columns(self, table_name, columns, type='UNIQUE'):
"""
Gets the names of the constraints affecting the given columns.
"""
qn = self.quote_name
if self.dry_run:
raise ValueError("Cannot get constraints for columns during a dry run.")
columns = set(columns)
def _default_value_workaround(self, value):
from datetime import date,time,datetime
if isinstance(value, (date,time,datetime)):
return "'%s'" % value
else:
return super(DatabaseOperations, self)._default_value_workaround(value)
def _fill_constraint_cache(self, db_name, table_name):
self._constraint_cache.setdefault(db_name, {})
self._constraint_cache[db_name][table_name] = {}
rows = self.execute("""
SELECT user_cons_columns.constraint_name, user_cons_columns.column_name
SELECT user_cons_columns.constraint_name,
user_cons_columns.column_name,
user_constraints.constraint_type
FROM user_constraints
JOIN user_cons_columns ON
user_constraints.table_name = user_cons_columns.table_name AND
user_constraints.constraint_name = user_cons_columns.constraint_name
WHERE user_constraints.table_name = '%s' AND
user_constraints.constraint_type = '%s'
""" % (qn(table_name), self.constraits_dict[type]))
# Load into a dict
mapping = {}
for constraint, column in rows:
mapping.setdefault(constraint, set())
mapping[constraint].add(column)
# Find ones affecting these columns
for constraint, itscols in mapping.items():
if itscols == columns:
yield constraint
WHERE user_constraints.table_name = '%s'
""" % self.normalize_name(table_name))
for constraint, column, kind in rows:
self._constraint_cache[db_name][table_name].setdefault(column, set())
self._constraint_cache[db_name][table_name][column].add((self.constraints_dict[kind], constraint))
return

View file

@ -1,15 +1,36 @@
from __future__ import print_function
from django.db import connection, models
import uuid
from django.db.backends.util import truncate_name
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
PsycoPG2 implementation of database operations.
"""
backend_name = "postgres"
def create_index_name(self, table_name, column_names, suffix=""):
"""
Generate a unique name for the index
Django's logic for naming field indexes is different in the
postgresql_psycopg2 backend, so we follow that for single-column
indexes.
"""
if len(column_names) == 1:
return truncate_name(
'%s_%s%s' % (table_name, column_names[0], suffix),
self._get_connection().ops.max_name_length()
)
return super(DatabaseOperations, self).create_index_name(table_name, column_names, suffix)
@generic.copy_column_constraints
@generic.delete_column_constraints
def rename_column(self, table_name, old, new):
if old == new:
# Short-circuit out
@ -19,50 +40,57 @@ class DatabaseOperations(generic.DatabaseOperations):
self.quote_name(old),
self.quote_name(new),
))
@generic.invalidate_table_constraints
def rename_table(self, old_table_name, table_name):
"will rename the table and an associated ID sequence and primary key index"
# First, rename the table
generic.DatabaseOperations.rename_table(self, old_table_name, table_name)
# Then, try renaming the ID sequence
# (if you're using other AutoFields... your problem, unfortunately)
self.commit_transaction()
self.start_transaction()
try:
generic.DatabaseOperations.rename_table(self, old_table_name+"_id_seq", table_name+"_id_seq")
except:
if self.debug:
print " ~ No such sequence (ignoring error)"
self.rollback_transaction()
else:
self.commit_transaction()
self.start_transaction()
if self.execute(
"""
SELECT 1
FROM information_schema.sequences
WHERE sequence_name = %s
""",
[old_table_name + '_id_seq']
):
generic.DatabaseOperations.rename_table(self, old_table_name + "_id_seq", table_name + "_id_seq")
# Rename primary key index, will not rename other indices on
# the table that are used by django (e.g. foreign keys). Until
# figure out how, you need to do this yourself.
try:
generic.DatabaseOperations.rename_table(self, old_table_name+"_pkey", table_name+ "_pkey")
except:
if self.debug:
print " ~ No such primary key (ignoring error)"
self.rollback_transaction()
else:
self.commit_transaction()
self.start_transaction()
pkey_index_names = self.execute(
"""
SELECT pg_index.indexrelid::regclass
FROM pg_index, pg_attribute
WHERE
indrelid = %s::regclass AND
pg_attribute.attrelid = indrelid AND
pg_attribute.attnum = any(pg_index.indkey)
AND indisprimary
""",
[table_name]
)
if old_table_name + "_pkey" in pkey_index_names:
generic.DatabaseOperations.rename_table(self, old_table_name + "_pkey", table_name + "_pkey")
def rename_index(self, old_index_name, index_name):
"Rename an index individually"
generic.DatabaseOperations.rename_table(self, old_index_name, index_name)
def _default_value_workaround(self, value):
"Support for UUIDs on psql"
if isinstance(value, uuid.UUID):
return str(value)
else:
return super(DatabaseOperations, self)._default_value_workaround(value)
def _db_type_for_alter_column(self, field):
"""
Returns a field's type suitable for ALTER COLUMN.
Strips CHECKs from PositiveSmallIntegerField) and PositiveIntegerField
@param field: The field to generate type for
"""
super_result = super(DatabaseOperations, self)._db_type_for_alter_column(field)
if isinstance(field, models.PositiveSmallIntegerField) or isinstance(field, models.PositiveIntegerField):
return super_result.split(" ")[0]
return super_result
return self._db_positive_type_for_alter_column(DatabaseOperations, field)
def _alter_add_column_mods(self, field, name, params, sqls):
return self._alter_add_positive_check(DatabaseOperations, field, name, params, sqls)

View file

@ -1 +0,0 @@
/*.pyc

View file

@ -1,6 +1,16 @@
from datetime import date, datetime, time
from warnings import warn
from django.db import models
from django.db.models import fields
from south.db import generic
from south.db.generic import delete_column_constraints, invalidate_table_constraints, copy_column_constraints
from south.exceptions import ConstraintDropped
from south.utils.py3 import string_types
try:
from django.utils.encoding import smart_text # Django >= 1.5
except ImportError:
from django.utils.encoding import smart_unicode as smart_text # Django < 1.5
from django.core.management.color import no_style
class DatabaseOperations(generic.DatabaseOperations):
"""
@ -19,25 +29,35 @@ class DatabaseOperations(generic.DatabaseOperations):
drop_index_string = 'DROP INDEX %(index_name)s ON %(table_name)s'
drop_constraint_string = 'ALTER TABLE %(table_name)s DROP CONSTRAINT %(constraint_name)s'
delete_column_string = 'ALTER TABLE %s DROP COLUMN %s'
#create_check_constraint_sql = "ALTER TABLE %(table)s " + \
# generic.DatabaseOperations.add_check_constraint_fragment
create_foreign_key_sql = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s " + \
"FOREIGN KEY (%(column)s) REFERENCES %(target)s"
create_unique_sql = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s UNIQUE (%(columns)s)"
default_schema_name = "dbo"
has_booleans = False
@delete_column_constraints
def delete_column(self, table_name, name):
q_table_name, q_name = (self.quote_name(table_name), self.quote_name(name))
# Zap the indexes
for ind in self._find_indexes_for_column(table_name,name):
params = {'table_name':q_table_name, 'index_name': ind}
sql = self.drop_index_string % params
self.execute(sql, [])
# Zap the constraints
for const in self._find_constraints_for_column(table_name,name):
params = {'table_name':q_table_name, 'constraint_name': const}
sql = self.drop_constraint_string % params
self.execute(sql, [])
# Zap the indexes
for ind in self._find_indexes_for_column(table_name,name):
params = {'table_name':q_table_name, 'index_name': ind}
sql = self.drop_index_string % params
self.execute(sql, [])
# Zap default if exists
drop_default = self.drop_column_default_sql(table_name, name)
if drop_default:
@ -52,22 +72,22 @@ class DatabaseOperations(generic.DatabaseOperations):
sql = """
SELECT si.name, si.id, sik.colid, sc.name
FROM dbo.sysindexes SI WITH (NOLOCK)
INNER JOIN dbo.sysindexkeys SIK WITH (NOLOCK)
ON SIK.id = Si.id
AND SIK.indid = SI.indid
INNER JOIN dbo.syscolumns SC WITH (NOLOCK)
ON SI.id = SC.id
AND SIK.colid = SC.colid
WHERE SI.indid !=0
AND Si.id = OBJECT_ID('%s')
AND SC.name = '%s'
FROM dbo.sysindexes si WITH (NOLOCK)
INNER JOIN dbo.sysindexkeys sik WITH (NOLOCK)
ON sik.id = si.id
AND sik.indid = si.indid
INNER JOIN dbo.syscolumns sc WITH (NOLOCK)
ON si.id = sc.id
AND sik.colid = sc.colid
WHERE si.indid !=0
AND si.id = OBJECT_ID('%s')
AND sc.name = '%s'
"""
idx = self.execute(sql % (table_name, name), [])
return [i[0] for i in idx]
def _find_constraints_for_column(self, table_name, name):
def _find_constraints_for_column(self, table_name, name, just_names=True):
"""
Find the constraints that apply to a column, needed when deleting. Defaults not included.
This is more general than the parent _constraints_affecting_columns, as on MSSQL this
@ -75,35 +95,183 @@ class DatabaseOperations(generic.DatabaseOperations):
"""
sql = """
SELECT CONSTRAINT_NAME
FROM INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE
WHERE CONSTRAINT_CATALOG = TABLE_CATALOG
AND CONSTRAINT_SCHEMA = TABLE_SCHEMA
AND TABLE_CATALOG = %s
AND TABLE_SCHEMA = %s
AND TABLE_NAME = %s
AND COLUMN_NAME = %s
SELECT CC.[CONSTRAINT_NAME]
,TC.[CONSTRAINT_TYPE]
,CHK.[CHECK_CLAUSE]
,RFD.TABLE_SCHEMA
,RFD.TABLE_NAME
,RFD.COLUMN_NAME
-- used for normalized names
,CC.TABLE_NAME
,CC.COLUMN_NAME
FROM [INFORMATION_SCHEMA].[TABLE_CONSTRAINTS] TC
JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE CC
ON TC.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG
AND TC.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA
AND TC.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
LEFT JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS CHK
ON CHK.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG
AND CHK.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA
AND CHK.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
AND 'CHECK' = TC.CONSTRAINT_TYPE
LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS REF
ON REF.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG
AND REF.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA
AND REF.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
AND 'FOREIGN KEY' = TC.CONSTRAINT_TYPE
LEFT JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE RFD
ON RFD.CONSTRAINT_CATALOG = REF.UNIQUE_CONSTRAINT_CATALOG
AND RFD.CONSTRAINT_SCHEMA = REF.UNIQUE_CONSTRAINT_SCHEMA
AND RFD.CONSTRAINT_NAME = REF.UNIQUE_CONSTRAINT_NAME
WHERE CC.CONSTRAINT_CATALOG = CC.TABLE_CATALOG
AND CC.CONSTRAINT_SCHEMA = CC.TABLE_SCHEMA
AND CC.TABLE_CATALOG = %s
AND CC.TABLE_SCHEMA = %s
AND CC.TABLE_NAME = %s
AND CC.COLUMN_NAME = %s
"""
db_name = self._get_setting('name')
schema_name = self._get_schema_name()
cons = self.execute(sql, [db_name, schema_name, table_name, name])
return [c[0] for c in cons]
table = self.execute(sql, [db_name, schema_name, table_name, name])
if just_names:
return [r[0] for r in table]
all = {}
for r in table:
cons_name, type = r[:2]
if type=='PRIMARY KEY' or type=='UNIQUE':
cons = all.setdefault(cons_name, (type,[]))
sql = '''
SELECT COLUMN_NAME
FROM INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE RFD
WHERE RFD.CONSTRAINT_CATALOG = %s
AND RFD.CONSTRAINT_SCHEMA = %s
AND RFD.TABLE_NAME = %s
AND RFD.CONSTRAINT_NAME = %s
'''
columns = self.execute(sql, [db_name, schema_name, table_name, cons_name])
cons[1].extend(col for col, in columns)
elif type=='CHECK':
cons = (type, r[2])
elif type=='FOREIGN KEY':
if cons_name in all:
raise NotImplementedError("Multiple-column foreign keys are not supported")
else:
cons = (type, r[3:6])
else:
raise NotImplementedError("Don't know how to handle constraints of type "+ type)
all[cons_name] = cons
return all
@invalidate_table_constraints
def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
"""
Alters the given column name so it will match the given field.
Note that conversion between the two by the database must be possible.
Will not automatically add _id by default; to have this behavour, pass
explicit_name=False.
@param table_name: The name of the table to add the column to
@param name: The name of the column to alter
@param field: The new field definition to use
"""
self._fix_field_definition(field)
if not ignore_constraints:
qn = self.quote_name
sch = qn(self._get_schema_name())
tab = qn(table_name)
table = ".".join([sch, tab])
try:
self.delete_foreign_key(table_name, name)
except ValueError:
# no FK constraint on this field. That's OK.
pass
constraints = self._find_constraints_for_column(table_name, name, False)
for constraint in constraints.keys():
params = dict(table_name = table,
constraint_name = qn(constraint))
sql = self.drop_constraint_string % params
self.execute(sql, [])
ret_val = super(DatabaseOperations, self).alter_column(table_name, name, field, explicit_name, ignore_constraints=True)
if not ignore_constraints:
for cname, (ctype,args) in constraints.items():
params = dict(table = table,
constraint = qn(cname))
if ctype=='UNIQUE':
params['columns'] = ", ".join(map(qn,args))
sql = self.create_unique_sql % params
elif ctype=='PRIMARY KEY':
params['columns'] = ", ".join(map(qn,args))
sql = self.create_primary_key_string % params
elif ctype=='FOREIGN KEY':
continue
# Foreign keys taken care of below
#target = "%s.%s(%s)" % tuple(map(qn,args))
#params.update(column = qn(name), target = target)
#sql = self.create_foreign_key_sql % params
elif ctype=='CHECK':
warn(ConstraintDropped("CHECK "+ args, table_name, name))
continue
#TODO: Some check constraints should be restored; but not before the generic
# backend restores them.
#params['check'] = args
#sql = self.create_check_constraint_sql % params
else:
raise NotImplementedError("Don't know how to handle constraints of type "+ type)
self.execute(sql, [])
# Create foreign key if necessary
if field.rel and self.supports_foreign_keys:
self.execute(
self.foreign_key_sql(
table_name,
field.column,
field.rel.to._meta.db_table,
field.rel.to._meta.get_field(field.rel.field_name).column
)
)
model = self.mock_model("FakeModelForIndexCreation", table_name)
for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()):
self.execute(stmt)
return ret_val
def _alter_set_defaults(self, field, name, params, sqls):
"Subcommand of alter_column that sets default values (overrideable)"
# First drop the current default if one exists
# Historically, we used to set defaults here.
# But since South 0.8, we don't ever set defaults on alter-column -- we only
# use database-level defaults as scaffolding when adding columns.
# However, we still sometimes need to remove defaults in alter-column.
table_name = self.quote_name(params['table_name'])
drop_default = self.drop_column_default_sql(table_name, name)
if drop_default:
sqls.append((drop_default, []))
# Next, set any default
def _value_to_unquoted_literal(self, field, value):
# Start with the field's own translation
conn = self._get_connection()
value = field.get_db_prep_save(value, connection=conn)
# This is still a Python object -- nobody expects to need a literal.
if isinstance(value, string_types):
return smart_text(value)
elif isinstance(value, (date,time,datetime)):
return value.isoformat()
else:
#TODO: Anybody else needs special translations?
return str(value)
def _default_value_workaround(self, value):
if isinstance(value, (date,time,datetime)):
return value.isoformat()
else:
return super(DatabaseOperations, self)._default_value_workaround(value)
if field.has_default(): # was: and not field.null
default = field.get_default()
sqls.append(('ADD DEFAULT %%s for %s' % (self.quote_name(name),), [default]))
#else:
# sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (self.quote_name(name),), []))
def _quote_string(self, s):
return "'" + s.replace("'","''") + "'"
def drop_column_default_sql(self, table_name, name, q_name=None):
"MSSQL specific drop default, which is a pain"
@ -120,23 +288,59 @@ class DatabaseOperations(generic.DatabaseOperations):
return None
def _fix_field_definition(self, field):
if isinstance(field, fields.BooleanField):
if isinstance(field, (fields.BooleanField, fields.NullBooleanField)):
if field.default == True:
field.default = 1
if field.default == False:
field.default = 0
def add_column(self, table_name, name, field, keep_default=True):
self._fix_field_definition(field)
generic.DatabaseOperations.add_column(self, table_name, name, field, keep_default)
# This is copied from South's generic add_column, with two modifications:
# 1) The sql-server-specific call to _fix_field_definition
# 2) Removing a default, when needed, by calling drop_default and not the more general alter_column
@invalidate_table_constraints
def add_column(self, table_name, name, field, keep_default=False):
"""
Adds the column 'name' to the table 'table_name'.
Uses the 'field' paramater, a django.db.models.fields.Field instance,
to generate the necessary sql
@param table_name: The name of the table to add the column to
@param name: The name of the column to add
@param field: The field to use
"""
self._fix_field_definition(field)
sql = self.column_sql(table_name, name, field)
if sql:
params = (
self.quote_name(table_name),
sql,
)
sql = self.add_column_string % params
self.execute(sql)
# Now, drop the default if we need to
if not keep_default and field.default is not None:
field.default = fields.NOT_PROVIDED
#self.alter_column(table_name, name, field, explicit_name=False, ignore_constraints=True)
self.drop_default(table_name, name, field)
@invalidate_table_constraints
def drop_default(self, table_name, name, field):
fragment = self.drop_column_default_sql(table_name, name)
if fragment:
table_name = self.quote_name(table_name)
sql = " ".join(["ALTER TABLE", table_name, fragment])
self.execute(sql)
@invalidate_table_constraints
def create_table(self, table_name, field_defs):
# Tweak stuff as needed
for _, f in field_defs:
self._fix_field_definition(f)
# Run
generic.DatabaseOperations.create_table(self, table_name, field_defs)
super(DatabaseOperations, self).create_table(table_name, field_defs)
def _find_referencing_fks(self, table_name):
"MSSQL does not support cascading FKs when dropping tables, we need to implement."
@ -166,6 +370,7 @@ class DatabaseOperations(generic.DatabaseOperations):
schema_name = self._get_schema_name()
return self.execute(sql, [db_name, schema_name, table_name])
@invalidate_table_constraints
def delete_table(self, table_name, cascade=True):
"""
Deletes the table 'table_name'.
@ -181,6 +386,8 @@ class DatabaseOperations(generic.DatabaseOperations):
cascade = False
super(DatabaseOperations, self).delete_table(table_name, cascade)
@copy_column_constraints
@delete_column_constraints
def rename_column(self, table_name, old, new):
"""
Renames the column of 'table_name' from 'old' to 'new'.
@ -193,6 +400,7 @@ class DatabaseOperations(generic.DatabaseOperations):
params = (table_name, self.quote_name(old), self.quote_name(new))
self.execute("EXEC sp_rename '%s.%s', %s, 'COLUMN'" % params)
@invalidate_table_constraints
def rename_table(self, old_table_name, table_name):
"""
Renames the table 'old_table_name' to 'table_name'.
@ -204,15 +412,33 @@ class DatabaseOperations(generic.DatabaseOperations):
params = (self.quote_name(old_table_name), self.quote_name(table_name))
self.execute('EXEC sp_rename %s, %s' % params)
# Copied from South's psycopg2 backend
def _db_type_for_alter_column(self, field):
"""
Returns a field's type suitable for ALTER COLUMN.
Strips CHECKs from PositiveSmallIntegerField) and PositiveIntegerField
@param field: The field to generate type for
"""
super_result = super(DatabaseOperations, self)._db_type_for_alter_column(field)
if isinstance(field, models.PositiveSmallIntegerField) or isinstance(field, models.PositiveIntegerField):
return super_result.split(" ")[0]
return super_result
def _db_type_for_alter_column(self, field):
return self._db_positive_type_for_alter_column(DatabaseOperations, field)
def _alter_add_column_mods(self, field, name, params, sqls):
return self._alter_add_positive_check(DatabaseOperations, field, name, params, sqls)
@invalidate_table_constraints
def delete_foreign_key(self, table_name, column):
super(DatabaseOperations, self).delete_foreign_key(table_name, column)
# A FK also implies a non-unique index
find_index_sql = """
SELECT i.name -- s.name, t.name, c.name
FROM sys.tables t
INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
INNER JOIN sys.indexes i ON i.object_id = t.object_id
INNER JOIN sys.index_columns ic ON ic.object_id = t.object_id
AND ic.index_id = i.index_id
INNER JOIN sys.columns c ON c.object_id = t.object_id
AND ic.column_id = c.column_id
WHERE i.is_unique=0 AND i.is_primary_key=0 AND i.is_unique_constraint=0
AND s.name = %s
AND t.name = %s
AND c.name = %s
"""
schema = self._get_schema_name()
indexes = self.execute(find_index_sql, [schema, table_name, column])
qn = self.quote_name
for index in (i[0] for i in indexes if i[0]): # "if i[0]" added because an empty name may return
self.execute("DROP INDEX %s on %s.%s" % (qn(index), qn(schema), qn(table_name) ))

View file

@ -1,10 +1,5 @@
import inspect
import re
from django.db.models import ForeignKey
from south.db import generic
from django.core.management.commands import inspectdb
class DatabaseOperations(generic.DatabaseOperations):
@ -17,26 +12,44 @@ class DatabaseOperations(generic.DatabaseOperations):
# SQLite ignores several constraints. I wish I could.
supports_foreign_keys = False
has_check_constraints = False
has_booleans = False
def add_column(self, table_name, name, field, *args, **kwds):
"""
Adds a column.
"""
# If it's not nullable, and has no default, raise an error (SQLite is picky)
if (not field.null and
(not field.has_default() or field.get_default() is None) and
not field.empty_strings_allowed):
if (not field.null and
(not field.has_default() or field.get_default() is None) and
not field.empty_strings_allowed):
raise ValueError("You cannot add a null=False column without a default value.")
# Initialise the field.
field.set_attributes_from_name(name)
# We add columns by remaking the table; even though SQLite supports
# We add columns by remaking the table; even though SQLite supports
# adding columns, it doesn't support adding PRIMARY KEY or UNIQUE cols.
# We define fields with no default; a default will be used, though, to fill up the remade table
field_default = None
if not getattr(field, '_suppress_default', False):
default = field.get_default()
if default is not None:
field_default = "'%s'" % field.get_db_prep_save(default, connection=self._get_connection())
field._suppress_default = True
self._remake_table(table_name, added={
field.column: self._column_sql_for_create(table_name, name, field, False),
field.column: (self._column_sql_for_create(table_name, name, field, False), field_default)
})
def _remake_table(self, table_name, added={}, renames={}, deleted=[], altered={},
primary_key_override=None, uniques_deleted=[]):
def _get_full_table_description(self, connection, cursor, table_name):
cursor.execute('PRAGMA table_info(%s)' % connection.ops.quote_name(table_name))
# cid, name, type, notnull, dflt_value, pk
return [{'name': field[1],
'type': field[2],
'null_ok': not field[3],
'dflt_value': field[4],
'pk': field[5] # undocumented
} for field in cursor.fetchall()]
@generic.invalidate_table_constraints
def _remake_table(self, table_name, added={}, renames={}, deleted=[], altered={}, primary_key_override=None, uniques_deleted=[]):
"""
Given a table and three sets of changes (renames, deletes, alters),
recreates it with the modified schema.
@ -51,46 +64,63 @@ class DatabaseOperations(generic.DatabaseOperations):
cursor = self._get_connection().cursor()
# Get the index descriptions
indexes = self._get_connection().introspection.get_indexes(cursor, table_name)
multi_indexes = self._get_multi_indexes(table_name)
standalone_indexes = self._get_standalone_indexes(table_name)
# Work out new column defs.
for column_info in self._get_connection().introspection.get_table_description(cursor, table_name):
name = column_info[0]
for column_info in self._get_full_table_description(self._get_connection(), cursor, table_name):
name = column_info['name']
if name in deleted:
continue
# Get the type, ignoring PRIMARY KEY (we need to be consistent)
type = column_info[1].replace("PRIMARY KEY", "")
# Add on unique or primary key if needed.
if indexes[name]['unique'] and name not in uniques_deleted:
type += " UNIQUE"
type = column_info['type'].replace("PRIMARY KEY", "")
# Add on primary key, not null or unique if needed.
if (primary_key_override and primary_key_override == name) or \
(not primary_key_override and indexes[name]['primary_key']):
(not primary_key_override and name in indexes and
indexes[name]['primary_key']):
type += " PRIMARY KEY"
elif not column_info['null_ok']:
type += " NOT NULL"
if (name in indexes and indexes[name]['unique'] and
name not in uniques_deleted):
type += " UNIQUE"
if column_info['dflt_value'] is not None:
type += " DEFAULT " + column_info['dflt_value']
# Deal with a rename
if name in renames:
name = renames[name]
# Add to the defs
definitions[name] = type
# Add on altered columns
definitions.update(altered)
for name, type in altered.items():
if (primary_key_override and primary_key_override == name) or \
(not primary_key_override and name in indexes and
indexes[name]['primary_key']):
type += " PRIMARY KEY"
if (name in indexes and indexes[name]['unique'] and
name not in uniques_deleted):
type += " UNIQUE"
definitions[name] = type
# Add on the new columns
definitions.update(added)
for name, (type,_) in added.items():
if (primary_key_override and primary_key_override == name):
type += " PRIMARY KEY"
definitions[name] = type
# Alright, Make the table
self.execute("CREATE TABLE %s (%s)" % (
self.quote_name(temp_name),
", ".join(["%s %s" % (self.quote_name(cname), ctype) for cname, ctype in definitions.items()]),
))
# Copy over the data
self._copy_data(table_name, temp_name, renames)
self._copy_data(table_name, temp_name, renames, added)
# Delete the old table, move our new one over it
self.delete_table(table_name)
self.rename_table(temp_name, table_name)
# Recreate multi-valued indexes
# We can't do that before since it's impossible to rename indexes
# and index name scope is global
self._make_multi_indexes(table_name, multi_indexes, renames=renames, deleted=deleted, uniques_deleted=uniques_deleted)
self._make_standalone_indexes(table_name, standalone_indexes, renames=renames, deleted=deleted, uniques_deleted=uniques_deleted)
self.deferred_sql = [] # prevent double indexing
def _copy_data(self, src, dst, field_renames={}):
def _copy_data(self, src, dst, field_renames={}, added={}):
"Used to copy data into a new table"
# Make a list of all the fields to select
cursor = self._get_connection().cursor()
@ -106,6 +136,11 @@ class DatabaseOperations(generic.DatabaseOperations):
else:
continue
src_fields_new.append(self.quote_name(field))
for field, (_,default) in added.items():
if default is not None:
field = self.quote_name(field)
src_fields_new.append("%s as %s" % (default, field))
dst_fields_new.append(field)
# Copy over the data
self.execute("INSERT INTO %s (%s) SELECT %s FROM %s;" % (
self.quote_name(dst),
@ -115,32 +150,38 @@ class DatabaseOperations(generic.DatabaseOperations):
))
def _create_unique(self, table_name, columns):
self.execute("CREATE UNIQUE INDEX %s ON %s(%s);" % (
self.quote_name('%s_%s' % (table_name, '__'.join(columns))),
self._create_index(table_name, columns, True)
def _create_index(self, table_name, columns, unique=False, index_name=None):
if index_name is None:
index_name = '%s_%s' % (table_name, '__'.join(columns))
self.execute("CREATE %sINDEX %s ON %s(%s);" % (
unique and "UNIQUE " or "",
self.quote_name(index_name),
self.quote_name(table_name),
', '.join(self.quote_name(c) for c in columns),
))
def _get_multi_indexes(self, table_name):
def _get_standalone_indexes(self, table_name):
indexes = []
cursor = self._get_connection().cursor()
cursor.execute('PRAGMA index_list(%s)' % self.quote_name(table_name))
# seq, name, unique
for index, unique in [(field[1], field[2]) for field in cursor.fetchall()]:
if not unique:
continue
cursor.execute('PRAGMA index_info(%s)' % self.quote_name(index))
info = cursor.fetchall()
if len(info) == 1:
if len(info) == 1 and unique:
# This index is already specified in the CREATE TABLE columns
# specification
continue
columns = []
for field in info:
columns.append(field[2])
indexes.append(columns)
indexes.append((index, columns, unique))
return indexes
def _make_multi_indexes(self, table_name, indexes, deleted=[], renames={}, uniques_deleted=[]):
for index in indexes:
def _make_standalone_indexes(self, table_name, indexes, deleted=[], renames={}, uniques_deleted=[]):
for index_name, index, unique in indexes:
columns = []
for name in index:
@ -154,28 +195,39 @@ class DatabaseOperations(generic.DatabaseOperations):
name = renames[name]
columns.append(name)
if columns and columns != uniques_deleted:
self._create_unique(table_name, columns)
if columns and (set(columns) != set(uniques_deleted) or not unique):
self._create_index(table_name, columns, unique, index_name)
def _column_sql_for_create(self, table_name, name, field, explicit_name=True):
"Given a field and its name, returns the full type for the CREATE TABLE."
"Given a field and its name, returns the full type for the CREATE TABLE (without unique/pk)"
field.set_attributes_from_name(name)
if not explicit_name:
name = field.db_column
else:
field.column = name
sql = self.column_sql(table_name, name, field, with_name=False, field_prepared=True)
#if field.primary_key:
# sql += " PRIMARY KEY"
#if field.unique:
# sql += " UNIQUE"
# Remove keywords we don't want (this should be type only, not constraint)
if sql:
sql = sql.replace("PRIMARY KEY", "")
return sql
def alter_column(self, table_name, name, field, explicit_name=True):
def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
"""
Changes a column's SQL definition
Changes a column's SQL definition.
Note that this sqlite3 implementation ignores the ignore_constraints argument.
The argument is accepted for API compatibility with the generic
DatabaseOperations.alter_column() method.
"""
# Change nulls to default if needed
if not field.null and field.has_default():
params = {
"column": self.quote_name(name),
"table_name": self.quote_name(table_name)
}
self._update_nulls_to_default(params, field)
# Remake the table correctly
field._suppress_default = True
self._remake_table(table_name, altered={
name: self._column_sql_for_create(table_name, name, field, explicit_name),
})

View file

@ -1,8 +1,12 @@
from traceback import format_exception
from __future__ import print_function
from traceback import format_exception, format_exc
class SouthError(RuntimeError):
pass
class SouthWarning(RuntimeWarning):
pass
class BrokenMigration(SouthError):
def __init__(self, migration, exc_info):
@ -10,6 +14,11 @@ class BrokenMigration(SouthError):
self.exc_info = exc_info
if self.exc_info:
self.traceback = ''.join(format_exception(*self.exc_info))
else:
try:
self.traceback = format_exc()
except AttributeError: # Python3 when there is no previous exception
self.traceback = None
def __str__(self):
return ("While loading migration '%(migration)s':\n"
@ -18,6 +27,8 @@ class BrokenMigration(SouthError):
class UnknownMigration(BrokenMigration):
def __str__(self):
if not hasattr(self, "traceback"):
self.traceback = ""
return ("Migration '%(migration)s' probably doesn't exist.\n"
'%(traceback)s' % self.__dict__)
@ -45,7 +56,7 @@ class MultiplePrefixMatches(SouthError):
self.matches = matches
def __str__(self):
self.matches_list = "\n ".join([unicode(m) for m in self.matches])
self.matches_list = "\n ".join([str(m) for m in self.matches])
return ("Prefix '%(prefix)s' matches more than one migration:\n"
" %(matches_list)s") % self.__dict__
@ -55,7 +66,7 @@ class GhostMigrations(SouthError):
self.ghosts = ghosts
def __str__(self):
self.ghosts_list = "\n ".join([unicode(m) for m in self.ghosts])
self.ghosts_list = "\n ".join([str(m) for m in self.ghosts])
return ("\n\n ! These migrations are in the database but not on disk:\n"
" %(ghosts_list)s\n"
" ! I'm not trusting myself; either fix this yourself by fiddling\n"
@ -68,7 +79,7 @@ class CircularDependency(SouthError):
self.trace = trace
def __str__(self):
trace = " -> ".join([unicode(s) for s in self.trace])
trace = " -> ".join([str(s) for s in self.trace])
return ("Found circular dependency:\n"
" %s") % trace
@ -98,7 +109,7 @@ class DependsOnUnknownMigration(SouthError):
self.depends_on = depends_on
def __str__(self):
print "Migration '%(migration)s' depends on unknown migration '%(depends_on)s'." % self.__dict__
print("Migration '%(migration)s' depends on unknown migration '%(depends_on)s'." % self.__dict__)
class DependsOnUnmigratedApplication(SouthError):
@ -135,3 +146,15 @@ class UnfreezeMeLater(Exception):
class ImpossibleORMUnfreeze(SouthError):
"""Raised if the ORM can't manage to unfreeze all the models in a linear fashion."""
pass
class ConstraintDropped(SouthWarning):
def __init__(self, constraint, table, column=None):
self.table = table
if column:
self.column = ".%s" % column
else:
self.column = ""
self.constraint = constraint
def __str__(self):
return "Constraint %(constraint)s was dropped from %(table)s%(column)s -- was this intended?" % self.__dict__

View file

@ -1 +0,0 @@
/*.pyc

View file

@ -2,11 +2,21 @@
Hacks for the Django 1.0/1.0.2 releases.
"""
import django
from django.conf import settings
from django.db import models
from django.db.models.loading import AppCache, cache
from django.db.backends.creation import BaseDatabaseCreation
from django.db.models.loading import cache
from django.core import management
from django.core.management.commands.flush import Command as FlushCommand
from django.utils.datastructures import SortedDict
from south.utils.py3 import string_types
class SkipFlushCommand(FlushCommand):
def handle_noargs(self, **options):
# no-op to avoid calling flush
return
class Hacks:
def set_installed_apps(self, apps):
@ -19,7 +29,7 @@ class Hacks:
# Make sure it contains strings
if apps:
assert isinstance(apps[0], basestring), "The argument to set_installed_apps must be a list of strings."
assert isinstance(apps[0], string_types), "The argument to set_installed_apps must be a list of strings."
# Monkeypatch in!
settings.INSTALLED_APPS, settings.OLD_INSTALLED_APPS = (
@ -41,14 +51,13 @@ class Hacks:
"""
Used to repopulate AppCache after fiddling with INSTALLED_APPS.
"""
a = AppCache()
a.loaded = False
a.handled = {}
a.postponed = []
a.app_store = SortedDict()
a.app_models = SortedDict()
a.app_errors = {}
a._populate()
cache.loaded = False
cache.handled = set() if django.VERSION >= (1, 6) else {}
cache.postponed = []
cache.app_store = SortedDict()
cache.app_models = SortedDict()
cache.app_errors = {}
cache._populate()
def clear_app_cache(self):
@ -72,4 +81,30 @@ class Hacks:
Rebuilds AppCache with the real model definitions.
"""
cache._populate()
def store_app_cache_state(self):
self.stored_app_cache_state = dict(**cache.__dict__)
def restore_app_cache_state(self):
cache.__dict__ = self.stored_app_cache_state
def patch_flush_during_test_db_creation(self):
"""
Patches BaseDatabaseCreation.create_test_db to not flush database
"""
def patch(f):
def wrapper(*args, **kwargs):
# hold onto the original and replace flush command with a no-op
original_flush_command = management._commands['flush']
try:
management._commands['flush'] = SkipFlushCommand()
# run create_test_db
return f(*args, **kwargs)
finally:
# unpatch flush back to the original
management._commands['flush'] = original_flush_command
return wrapper
BaseDatabaseCreation.create_test_db = patch(BaseDatabaseCreation.create_test_db)

View file

@ -1 +0,0 @@
/*.pyc

View file

@ -3,6 +3,7 @@
# These imports trigger the lower-down files
import south.introspection_plugins.geodjango
import south.introspection_plugins.django_audit_log
import south.introspection_plugins.django_tagging
import south.introspection_plugins.django_taggit
import south.introspection_plugins.django_objectpermissions

View file

@ -1,9 +1,11 @@
from django.conf import settings
from south.modelsinspector import add_introspection_rules
try:
from annoying.fields import AutoOneToOneField
except ImportError:
pass
else:
#django-annoying's AutoOneToOneField is essentially a OneToOneField.
add_introspection_rules([], ["^annoying\.fields\.AutoOneToOneField"])
if 'annoying' in settings.INSTALLED_APPS:
try:
from annoying.fields import AutoOneToOneField
except ImportError:
pass
else:
#django-annoying's AutoOneToOneField is essentially a OneToOneField.
add_introspection_rules([], ["^annoying\.fields\.AutoOneToOneField"])

View file

@ -0,0 +1,30 @@
"""
South introspection rules for django-audit-log
"""
from django.contrib.auth.models import User
from django.conf import settings
from south.modelsinspector import add_introspection_rules
if "audit_log" in settings.INSTALLED_APPS:
try:
# Try and import the field so we can see if audit_log is available
from audit_log.models import fields
# Make sure the `to` and `null` parameters will be ignored
rules = [(
(fields.LastUserField,),
[],
{
'to': ['rel.to', {'default': User}],
'null': ['null', {'default': True}],
},
)]
# Add the rules for the `LastUserField`
add_introspection_rules(
rules,
['^audit_log\.models\.fields\.LastUserField'],
)
except ImportError:
pass

View file

@ -2,13 +2,15 @@
South introspection rules for django-objectpermissions
"""
from django.conf import settings
from south.modelsinspector import add_ignored_fields
try:
from objectpermissions.models import UserPermissionRelation, GroupPermissionRelation
except ImportError:
pass
else:
add_ignored_fields(["^objectpermissions\.models\.UserPermissionRelation",
"^objectpermissions\.models\.GroupPermissionRelation"])
if 'objectpermissions' in settings.INSTALLED_APPS:
try:
from objectpermissions.models import UserPermissionRelation, GroupPermissionRelation
except ImportError:
pass
else:
add_ignored_fields(["^objectpermissions\.models\.UserPermissionRelation",
"^objectpermissions\.models\.GroupPermissionRelation"])

View file

@ -2,11 +2,13 @@
South introspection rules for django-taggit
"""
from django.conf import settings
from south.modelsinspector import add_ignored_fields
try:
from taggit.managers import TaggableManager
except ImportError:
pass
else:
add_ignored_fields(["^taggit\.managers"])
if 'taggit' in settings.INSTALLED_APPS:
try:
from taggit.managers import TaggableManager
except ImportError:
pass
else:
add_ignored_fields(["^taggit\.managers"])

View file

@ -24,6 +24,7 @@ if has_gis:
"srid": ["srid", {"default": 4326}],
"spatial_index": ["spatial_index", {"default": True}],
"dim": ["dim", {"default": 2}],
"geography": ["geography", {"default": False}],
},
),
]

View file

@ -7,10 +7,6 @@ class NullHandler(logging.Handler):
def emit(self, record):
pass
_logger = logging.getLogger("south")
_logger.addHandler(NullHandler())
_logger.setLevel(logging.DEBUG)
def get_logger():
"Attach a file handler to the logger if there isn't one already."
debug_on = getattr(settings, "SOUTH_LOGGING_ON", False)
@ -22,7 +18,7 @@ def get_logger():
_logger.addHandler(logging.FileHandler(logging_file))
_logger.setLevel(logging.DEBUG)
else:
raise IOError, "SOUTH_LOGGING_ON is True. You also need a SOUTH_LOGGING_FILE setting."
raise IOError("SOUTH_LOGGING_ON is True. You also need a SOUTH_LOGGING_FILE setting.")
return _logger
@ -31,4 +27,12 @@ def close_logger():
for handler in _logger.handlers:
_logger.removeHandler(handler)
if isinstance(handler, logging.FileHandler):
handler.close()
handler.close()
def init_logger():
"Initialize the south logger"
logger = logging.getLogger("south")
logger.addHandler(NullHandler())
return logger
_logger = init_logger()

View file

@ -1 +0,0 @@
/*.pyc

View file

@ -1 +0,0 @@
/*.pyc

View file

@ -9,6 +9,7 @@ from django.conf import settings
# Make sure the template loader cache is fixed _now_ (#448)
import django.template.loaders.app_directories
from south.hacks import hacks
from south.management.commands.syncdb import Command as SyncCommand
class MigrateAndSyncCommand(SyncCommand):
@ -30,4 +31,10 @@ def patch_for_test_db_setup():
# tests should always be up to date with the most recent model structure
management._commands['syncdb'] = 'django.core'
else:
management._commands['syncdb'] = MigrateAndSyncCommand()
management._commands['syncdb'] = MigrateAndSyncCommand()
# Avoid flushing data migrations.
# http://code.djangoproject.com/ticket/14661 introduced change that flushed custom
# sql during the test database creation (thus flushing the data migrations).
# we patch flush to be no-op during create_test_db, but still allow flushing
# after each test for non-transactional backends.
hacks.patch_flush_during_test_db_creation()

View file

@ -2,6 +2,8 @@
Quick conversion command module.
"""
from __future__ import print_function
from optparse import make_option
import sys
@ -38,7 +40,7 @@ class Command(BaseCommand):
# Make sure we have an app
if not app:
print "Please specify an app to convert."
print("Please specify an app to convert.")
return
# See if the app exists
@ -46,14 +48,14 @@ class Command(BaseCommand):
try:
app_module = models.get_app(app)
except ImproperlyConfigured:
print "There is no enabled application matching '%s'." % app
print("There is no enabled application matching '%s'." % app)
return
# Try to get its list of models
model_list = models.get_models(app_module)
if not model_list:
print "This application has no models; this command is for applications that already have models syncdb'd."
print "Make some models, and then use ./manage.py startmigration %s --initial instead." % app
print("This application has no models; this command is for applications that already have models syncdb'd.")
print("Make some models, and then use ./manage.py schemamigration %s --initial instead." % app)
return
# Ask South if it thinks it's already got migrations
@ -62,7 +64,7 @@ class Command(BaseCommand):
except NoMigrations:
pass
else:
print "This application is already managed by South."
print("This application is already managed by South.")
return
# Finally! It seems we've got a candidate, so do the two-command trick
@ -87,7 +89,7 @@ class Command(BaseCommand):
delete_ghosts=options.get("delete_ghosts", False),
)
print
print "App '%s' converted. Note that South assumed the application's models matched the database" % app
print "(i.e. you haven't changed it since last syncdb); if you have, you should delete the %s/migrations" % app
print "directory, revert models.py so it matches the database, and try again."
print()
print("App '%s' converted. Note that South assumed the application's models matched the database" % app)
print("(i.e. you haven't changed it since last syncdb); if you have, you should delete the %s/migrations" % app)
print("directory, revert models.py so it matches the database, and try again.")

View file

@ -2,6 +2,8 @@
Data migration creation command
"""
from __future__ import print_function
import sys
import os
import re
@ -32,6 +34,8 @@ class Command(BaseCommand):
usage_str = "Usage: ./manage.py datamigration appname migrationname [--stdout] [--freeze appname]"
def handle(self, app=None, name="", freeze_list=None, stdout=False, verbosity=1, **options):
verbosity = int(verbosity)
# Any supposed lists that are None become empty lists
freeze_list = freeze_list or []
@ -44,13 +48,19 @@ class Command(BaseCommand):
if re.search('[^_\w]', name) and name != "-":
self.error("Migration names should contain only alphanumeric characters and underscores.")
# if not name, there's an error
# If not name, there's an error
if not name:
self.error("You must provide a name for this migration\n" + self.usage_str)
self.error("You must provide a name for this migration.\n" + self.usage_str)
if not app:
self.error("You must provide an app to create a migration for.\n" + self.usage_str)
# Ensure that verbosity is not a string (Python 3)
try:
verbosity = int(verbosity)
except ValueError:
self.error("Verbosity must be an number.\n" + self.usage_str)
# Get the Migrations for this app (creating the migrations dir if needed)
migrations = Migrations(app, force_creation=True, verbose_creation=verbosity > 0)
@ -61,20 +71,20 @@ class Command(BaseCommand):
apps_to_freeze = self.calc_frozen_apps(migrations, freeze_list)
# So, what's in this file, then?
file_contents = MIGRATION_TEMPLATE % {
file_contents = self.get_migration_template() % {
"frozen_models": freezer.freeze_apps_to_string(apps_to_freeze),
"complete_apps": apps_to_freeze and "complete_apps = [%s]" % (", ".join(map(repr, apps_to_freeze))) or ""
}
# - is a special name which means 'print to stdout'
if name == "-":
print file_contents
print(file_contents)
# Write the migration file if the name isn't -
else:
fp = open(os.path.join(migrations.migrations_dir(), new_filename), "w")
fp.write(file_contents)
fp.close()
print >>sys.stderr, "Created %s." % new_filename
print("Created %s." % new_filename, file=sys.stderr)
def calc_frozen_apps(self, migrations, freeze_list):
"""
@ -98,12 +108,15 @@ class Command(BaseCommand):
"""
Prints the error, and exits with the given code.
"""
print >>sys.stderr, message
print(message, file=sys.stderr)
sys.exit(code)
def get_migration_template(self):
return MIGRATION_TEMPLATE
MIGRATION_TEMPLATE = """# encoding: utf-8
import datetime
MIGRATION_TEMPLATE = """# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
@ -112,13 +125,15 @@ class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
# Note: Don't use "from appname.models import ModelName".
# Use orm.ModelName to refer to models in this application,
# and orm['appname.ModelName'] for models in other applications.
def backwards(self, orm):
"Write your backwards methods here."
models = %(frozen_models)s
%(complete_apps)s
symmetrical = True
"""

View file

@ -2,7 +2,11 @@
Outputs a graphviz dot file of the dependencies.
"""
from __future__ import print_function
from optparse import make_option
import re
import textwrap
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
@ -17,23 +21,43 @@ class Command(BaseCommand):
# Resolve dependencies
Migrations.calculate_dependencies()
colors = [ 'crimson', 'darkgreen', 'darkgoldenrod', 'navy',
'brown', 'darkorange', 'aquamarine' , 'blueviolet' ]
color_index = 0
wrapper = textwrap.TextWrapper(width=40)
print "digraph G {"
print("digraph G {")
# Print each app in a cluster
#for migrations in all_migrations():
# print " subgraph %s {" % migrations.app_label()
# # Nodes inside here are linked
# print (" -> ".join(['"%s.%s"' % (migration.app_label(), migration.name()) for migration in migrations])) + ";"
# print " }"
# Group each app in a subgraph
for migrations in all_migrations():
print(" subgraph %s {" % migrations.app_label())
print(" node [color=%s];" % colors[color_index])
for migration in migrations:
# Munge the label - text wrap and change _ to spaces
label = "%s - %s" % (
migration.app_label(), migration.name())
label = re.sub(r"_+", " ", label)
label= "\\n".join(wrapper.wrap(label))
print(' "%s.%s" [label="%s"];' % (
migration.app_label(), migration.name(), label))
print(" }")
color_index = (color_index + 1) % len(colors)
# For every migration, print its links.
for migrations in all_migrations():
for migration in migrations:
for other in migration.dependencies:
print '"%s.%s" -> "%s.%s"' % (
# Added weight tends to keep migrations from the same app
# in vertical alignment
attrs = "[weight=2.0]"
# But the more interesting edges are those between apps
if other.app_label() != migration.app_label():
attrs = "[style=bold]"
print(' "%s.%s" -> "%s.%s" %s;' % (
other.app_label(), other.name(),
migration.app_label(), migration.name(),
)
attrs
))
print "}";
print("}");

View file

@ -2,17 +2,18 @@
Migrate management command.
"""
import sys
from __future__ import print_function
import os.path, re, sys
from functools import reduce
from optparse import make_option
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
from django.conf import settings
from django.db import models
from django.utils.importlib import import_module
from south import migration
from south.migration import Migration, Migrations
from south.migration.utils import get_app_label
from south.migration import Migrations
from south.exceptions import NoMigrations
from south.db import DEFAULT_DB_ALIAS
@ -22,6 +23,8 @@ class Command(BaseCommand):
help='Run the specified migration for all apps.'),
make_option('--list', action='store_true', dest='show_list', default=False,
help='List migrations noting those that have been applied'),
make_option('--changes', action='store_true', dest='show_changes', default=False,
help='List changes for migrations'),
make_option('--skip', action='store_true', dest='skip', default=False,
help='Will skip over out-of-order missing migrations'),
make_option('--merge', action='store_true', dest='merge', default=False,
@ -51,7 +54,7 @@ class Command(BaseCommand):
help = "Runs migrations for all apps."
args = "[appname] [migrationname|zero] [--all] [--list] [--skip] [--merge] [--no-initial-data] [--fake] [--db-dry-run] [--database=dbalias]"
def handle(self, app=None, target=None, skip=False, merge=False, backwards=False, fake=False, db_dry_run=False, show_list=False, database=DEFAULT_DB_ALIAS, delete_ghosts=False, ignore_ghosts=False, **options):
def handle(self, app=None, target=None, skip=False, merge=False, backwards=False, fake=False, db_dry_run=False, show_list=False, show_changes=False, database=DEFAULT_DB_ALIAS, delete_ghosts=False, ignore_ghosts=False, **options):
# NOTE: THIS IS DUPLICATED FROM django.core.management.commands.syncdb
# This code imports any module named 'management' in INSTALLED_APPS.
@ -60,8 +63,8 @@ class Command(BaseCommand):
# we need apps to behave correctly.
for app_name in settings.INSTALLED_APPS:
try:
__import__(app_name + '.management', {}, {}, [''])
except ImportError, exc:
import_module('.management', app_name)
except ImportError as exc:
msg = exc.args[0]
if not msg.startswith('No module named') or 'management' not in msg:
raise
@ -77,17 +80,20 @@ class Command(BaseCommand):
try:
apps = [Migrations(app)]
except NoMigrations:
print "The app '%s' does not appear to use migrations." % app
print "./manage.py migrate " + self.args
print("The app '%s' does not appear to use migrations." % app)
print("./manage.py migrate " + self.args)
return
else:
apps = list(migration.all_migrations())
# Do we need to show the list of migrations?
if show_list and apps:
list_migrations(apps, database)
list_migrations(apps, database, **options)
if show_changes and apps:
show_migration_changes(apps)
if not show_list:
if not (show_list or show_changes):
for app in apps:
result = migration.migrate_app(
@ -108,7 +114,7 @@ class Command(BaseCommand):
sys.exit(1) # Migration failed, so the command fails.
def list_migrations(apps, database = DEFAULT_DB_ALIAS):
def list_migrations(apps, database = DEFAULT_DB_ALIAS, **options):
"""
Prints a list of all available migrations, and which ones are currently applied.
Accepts a list of Migrations instances.
@ -117,21 +123,142 @@ def list_migrations(apps, database = DEFAULT_DB_ALIAS):
applied_migrations = MigrationHistory.objects.filter(app_name__in=[app.app_label() for app in apps])
if database != DEFAULT_DB_ALIAS:
applied_migrations = applied_migrations.using(database)
applied_migrations = ['%s.%s' % (mi.app_name,mi.migration) for mi in applied_migrations]
applied_migrations_lookup = dict(('%s.%s' % (mi.app_name, mi.migration), mi) for mi in applied_migrations)
print
print()
for app in apps:
print " " + app.app_label()
print(" " + app.app_label())
# Get the migrations object
for migration in app:
if migration.app_label() + "." + migration.name() in applied_migrations:
print format_migration_list_item(migration.name())
full_name = migration.app_label() + "." + migration.name()
if full_name in applied_migrations_lookup:
applied_migration = applied_migrations_lookup[full_name]
print(format_migration_list_item(migration.name(), applied=applied_migration.applied, **options))
else:
print format_migration_list_item(migration.name(), applied=False)
print
print(format_migration_list_item(migration.name(), applied=False, **options))
print()
def show_migration_changes(apps):
"""
Prints a list of all available migrations, and which ones are currently applied.
Accepts a list of Migrations instances.
Much simpler, less clear, and much less robust version:
grep "ing " migrations/*.py
"""
for app in apps:
print(app.app_label())
# Get the migrations objects
migrations = [migration for migration in app]
# we use reduce to compare models in pairs, not to generate a value
reduce(diff_migrations, migrations)
def format_migration_list_item(name, applied=True):
def format_migration_list_item(name, applied=True, **options):
if applied:
return ' (*) %s' % name
return ' ( ) %s' % name
if int(options.get('verbosity')) >= 2:
return ' (*) %-80s (applied %s)' % (name, applied)
else:
return ' (*) %s' % name
else:
return ' ( ) %s' % name
def diff_migrations(migration1, migration2):
def model_name(models, model):
return models[model].get('Meta', {}).get('object_name', model)
def field_name(models, model, field):
return '%s.%s' % (model_name(models, model), field)
print(" " + migration2.name())
models1 = migration1.migration_class().models
models2 = migration2.migration_class().models
# find new models
for model in models2.keys():
if not model in models1.keys():
print(' added model %s' % model_name(models2, model))
# find removed models
for model in models1.keys():
if not model in models2.keys():
print(' removed model %s' % model_name(models1, model))
# compare models
for model in models1:
if model in models2:
# find added fields
for field in models2[model]:
if not field in models1[model]:
print(' added field %s' % field_name(models2, model, field))
# find removed fields
for field in models1[model]:
if not field in models2[model]:
print(' removed field %s' % field_name(models1, model, field))
# compare fields
for field in models1[model]:
if field in models2[model]:
name = field_name(models1, model, field)
# compare field attributes
field_value1 = models1[model][field]
field_value2 = models2[model][field]
# if a field has become a class, or vice versa
if type(field_value1) != type(field_value2):
print(' type of %s changed from %s to %s' % (
name, field_value1, field_value2))
# if class
elif isinstance(field_value1, dict):
# print ' %s is a class' % name
pass
# else regular field
else:
type1, attr_list1, field_attrs1 = models1[model][field]
type2, attr_list2, field_attrs2 = models2[model][field]
if type1 != type2:
print(' %s type changed from %s to %s' % (
name, type1, type2))
if attr_list1 != []:
print(' %s list %s is not []' % (
name, attr_list1))
if attr_list2 != []:
print(' %s list %s is not []' % (
name, attr_list2))
if attr_list1 != attr_list2:
print(' %s list changed from %s to %s' % (
name, attr_list1, attr_list2))
# find added field attributes
for attr in field_attrs2:
if not attr in field_attrs1:
print(' added %s attribute %s=%s' % (
name, attr, field_attrs2[attr]))
# find removed field attributes
for attr in field_attrs1:
if not attr in field_attrs2:
print(' removed attribute %s(%s=%s)' % (
name, attr, field_attrs1[attr]))
# compare field attributes
for attr in field_attrs1:
if attr in field_attrs2:
value1 = field_attrs1[attr]
value2 = field_attrs2[attr]
if value1 != value2:
print(' %s attribute %s changed from %s to %s' % (
name, attr, value1, value2))
return migration2

View file

@ -0,0 +1,67 @@
from django.core.exceptions import ImproperlyConfigured
from django.core.management import call_command, CommandError
from django.core.management.base import BaseCommand
from django.conf import settings
from django.db.models import loading
from django.test import simple
from south.migration import Migrations
from south.exceptions import NoMigrations
from south.hacks import hacks
class Command(BaseCommand):
help = "Runs migrations for each app in turn, detecting missing depends_on values."
usage_str = "Usage: ./manage.py migrationcheck"
def handle(self, check_app_name=None, **options):
runner = simple.DjangoTestSuiteRunner(verbosity=0)
err_msg = "Failed to migrate %s; see output for hints at missing dependencies:\n"
hacks.patch_flush_during_test_db_creation()
failures = 0
if check_app_name is None:
app_names = settings.INSTALLED_APPS
else:
app_names = [check_app_name]
for app_name in app_names:
app_label = app_name.split(".")[-1]
if app_name == 'south':
continue
try:
Migrations(app_name)
except (NoMigrations, ImproperlyConfigured):
continue
app = loading.get_app(app_label)
verbosity = int(options.get('verbosity', 1))
if verbosity >= 1:
self.stderr.write("processing %s\n" % app_name)
old_config = runner.setup_databases()
try:
call_command('migrate', app_label, noinput=True, verbosity=verbosity)
for model in loading.get_models(app):
dummy = model._default_manager.exists()
except (KeyboardInterrupt, SystemExit):
raise
except Exception as e:
failures += 1
if verbosity >= 1:
self.stderr.write(err_msg % app_name)
self.stderr.write("%s\n" % e)
finally:
runner.teardown_databases(old_config)
if failures > 0:
raise CommandError("Missing depends_on found in %s app(s)." % failures)
self.stderr.write("No missing depends_on found.\n")
#
#for each app:
# start with blank db.
# syncdb only south (and contrib?)
#
# migrate a single app all the way up. any errors is missing depends_on.
# for all models of that app, try the default manager:
# from django.db.models import loading
# for m in loading.get_models(loading.get_app('a')):
# m._default_manager.exists()
# Any error is also a missing depends on.

View file

@ -2,6 +2,8 @@
Startmigration command, version 2.
"""
from __future__ import print_function
import sys
import os
import re
@ -17,10 +19,12 @@ except NameError:
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.conf import settings
from south.migration import Migrations
from south.migration import Migrations, migrate_app
from south.models import MigrationHistory
from south.exceptions import NoMigrations
from south.creator import changes, actions, freezer
from south.management.commands.datamigration import Command as DataCommand
@ -28,7 +32,7 @@ from south.management.commands.datamigration import Command as DataCommand
class Command(DataCommand):
option_list = DataCommand.option_list + (
make_option('--add-model', action='append', dest='added_model_list', type='string',
help='Generate a Create Table migration for the specified model. Add multiple models to this migration with subsequent --model parameters.'),
help='Generate a Create Table migration for the specified model. Add multiple models to this migration with subsequent --add-model parameters.'),
make_option('--add-field', action='append', dest='added_field_list', type='string',
help='Generate an Add Column migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
make_option('--add-index', action='append', dest='added_index_list', type='string',
@ -39,11 +43,13 @@ class Command(DataCommand):
help='Attempt to automatically detect differences from the last migration.'),
make_option('--empty', action='store_true', dest='empty', default=False,
help='Make a blank migration.'),
make_option('--update', action='store_true', dest='update', default=False,
help='Update the most recent migration instead of creating a new one. Rollback this migration if it is already applied.'),
)
help = "Creates a new template schema migration for the given app"
usage_str = "Usage: ./manage.py schemamigration appname migrationname [--empty] [--initial] [--auto] [--add-model ModelName] [--add-field ModelName.field_name] [--stdout]"
def handle(self, app=None, name="", added_model_list=None, added_field_list=None, freeze_list=None, initial=False, auto=False, stdout=False, added_index_list=None, verbosity=1, empty=False, **options):
def handle(self, app=None, name="", added_model_list=None, added_field_list=None, freeze_list=None, initial=False, auto=False, stdout=False, added_index_list=None, verbosity=1, empty=False, update=False, **options):
# Any supposed lists that are None become empty lists
added_model_list = added_model_list or []
@ -68,15 +74,23 @@ class Command(DataCommand):
if not app:
self.error("You must provide an app to create a migration for.\n" + self.usage_str)
# See if the app exists
app = app.split(".")[-1]
try:
app_module = models.get_app(app)
except ImproperlyConfigured:
print("There is no enabled application matching '%s'." % app)
return
# Get the Migrations for this app (creating the migrations dir if needed)
migrations = Migrations(app, force_creation=True, verbose_creation=verbosity > 0)
migrations = Migrations(app, force_creation=True, verbose_creation=int(verbosity) > 0)
# What actions do we need to do?
if auto:
# Get the old migration
try:
last_migration = migrations[-1]
last_migration = migrations[-2 if update else -1]
except IndexError:
self.error("You cannot use --auto on an app with no migrations. Try --initial.")
# Make sure it has stored models
@ -114,19 +128,22 @@ class Command(DataCommand):
elif empty:
change_source = None
else:
print >>sys.stderr, "You have not passed any of --initial, --auto, --empty, --add-model, --add-field or --add-index."
print("You have not passed any of --initial, --auto, --empty, --add-model, --add-field or --add-index.", file=sys.stderr)
sys.exit(1)
# Validate this so we can access the last migration without worrying
if update and not migrations:
self.error("You cannot use --update on an app with no migrations.")
# if not name, there's an error
if not name:
if change_source:
name = change_source.suggest_name()
if update:
name = re.sub(r'^\d{4}_', '', migrations[-1].name())
if not name:
self.error("You must provide a name for this migration\n" + self.usage_str)
# See what filename is next in line. We assume they use numbers.
new_filename = migrations.next_filename(name)
# Get the actions, and then insert them into the actions lists
forwards_actions = []
backwards_actions = []
@ -141,7 +158,7 @@ class Command(DataCommand):
action = action_class(**params)
action.add_forwards(forwards_actions)
action.add_backwards(backwards_actions)
print >>sys.stderr, action.console_line()
print(action.console_line(), file=sys.stderr)
# Nowt happen? That's not good for --auto.
if auto and not forwards_actions:
@ -151,44 +168,62 @@ class Command(DataCommand):
apps_to_freeze = self.calc_frozen_apps(migrations, freeze_list)
# So, what's in this file, then?
file_contents = MIGRATION_TEMPLATE % {
"forwards": "\n".join(forwards_actions or ["pass"]),
"backwards": "\n".join(backwards_actions or ["pass"]),
file_contents = self.get_migration_template() % {
"forwards": "\n".join(forwards_actions or [" pass"]),
"backwards": "\n".join(backwards_actions or [" pass"]),
"frozen_models": freezer.freeze_apps_to_string(apps_to_freeze),
"complete_apps": apps_to_freeze and "complete_apps = [%s]" % (", ".join(map(repr, apps_to_freeze))) or ""
}
# Deal with update mode as late as possible, avoid a rollback as long
# as something else can go wrong.
if update:
last_migration = migrations[-1]
if MigrationHistory.objects.filter(applied__isnull=False, app_name=app, migration=last_migration.name()):
print("Migration to be updated, %s, is already applied, rolling it back now..." % last_migration.name(), file=sys.stderr)
migrate_app(migrations, 'current-1', verbosity=verbosity)
for ext in ('py', 'pyc'):
old_filename = "%s.%s" % (os.path.join(migrations.migrations_dir(), last_migration.filename), ext)
if os.path.isfile(old_filename):
os.unlink(old_filename)
migrations.remove(last_migration)
# See what filename is next in line. We assume they use numbers.
new_filename = migrations.next_filename(name)
# - is a special name which means 'print to stdout'
if name == "-":
print file_contents
print(file_contents)
# Write the migration file if the name isn't -
else:
fp = open(os.path.join(migrations.migrations_dir(), new_filename), "w")
fp.write(file_contents)
fp.close()
verb = 'Updated' if update else 'Created'
if empty:
print >>sys.stderr, "Created %s. You must now edit this migration and add the code for each direction." % new_filename
print("%s %s. You must now edit this migration and add the code for each direction." % (verb, new_filename), file=sys.stderr)
else:
print >>sys.stderr, "Created %s. You can now apply this migration with: ./manage.py migrate %s" % (new_filename, app)
print("%s %s. You can now apply this migration with: ./manage.py migrate %s" % (verb, new_filename, app), file=sys.stderr)
def get_migration_template(self):
return MIGRATION_TEMPLATE
MIGRATION_TEMPLATE = """# encoding: utf-8
import datetime
MIGRATION_TEMPLATE = """# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
%(forwards)s
%(forwards)s
def backwards(self, orm):
%(backwards)s
%(backwards)s
models = %(frozen_models)s
%(complete_apps)s
"""
%(complete_apps)s"""

View file

@ -2,6 +2,8 @@
Now-obsolete startmigration command.
"""
from __future__ import print_function
from optparse import make_option
from django.core.management.base import BaseCommand
@ -10,7 +12,7 @@ from django.core.management.color import no_style
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--model', action='append', dest='added_model_list', type='string',
help='Generate a Create Table migration for the specified model. Add multiple models to this migration with subsequent --model parameters.'),
help='Generate a Create Table migration for the specified model. Add multiple models to this migration with subsequent --add-model parameters.'),
make_option('--add-field', action='append', dest='added_field_list', type='string',
help='Generate an Add Column migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
make_option('--add-index', action='append', dest='added_index_list', type='string',
@ -28,4 +30,4 @@ class Command(BaseCommand):
def handle(self, app=None, name="", added_model_list=None, added_field_list=None, initial=False, freeze_list=None, auto=False, stdout=False, added_index_list=None, **options):
print "The 'startmigration' command is now deprecated; please use the new 'schemamigration' and 'datamigration' commands."
print("The 'startmigration' command is now deprecated; please use the new 'schemamigration' and 'datamigration' commands.")

View file

@ -2,6 +2,8 @@
Overridden syncdb command
"""
from __future__ import print_function
import sys
from optparse import make_option
@ -49,7 +51,7 @@ class Command(NoArgsCommand):
for app_name in settings.INSTALLED_APPS:
try:
import_module('.management', app_name)
except ImportError, exc:
except ImportError as exc:
msg = exc.args[0]
if not msg.startswith('No module named') or 'management' not in msg:
raise
@ -74,7 +76,7 @@ class Command(NoArgsCommand):
# Run syncdb on only the ones needed
if verbosity:
print "Syncing..."
print("Syncing...")
old_installed, settings.INSTALLED_APPS = settings.INSTALLED_APPS, apps_needing_sync
old_app_store, cache.app_store = cache.app_store, SortedDict([
@ -95,17 +97,19 @@ class Command(NoArgsCommand):
# Migrate if needed
if options.get('migrate', True):
if verbosity:
print "Migrating..."
print("Migrating...")
# convert from store_true to store_false
options['no_initial_data'] = not options.get('load_initial_data', True)
management.call_command('migrate', **options)
# Be obvious about what we did
if verbosity:
print "\nSynced:\n > %s" % "\n > ".join(apps_needing_sync)
print("\nSynced:\n > %s" % "\n > ".join(apps_needing_sync))
if options.get('migrate', True):
if verbosity:
print "\nMigrated:\n - %s" % "\n - ".join(apps_migrated)
print("\nMigrated:\n - %s" % "\n - ".join(apps_migrated))
else:
if verbosity:
print "\nNot synced (use migrations):\n - %s" % "\n - ".join(apps_migrated)
print "(use ./manage.py migrate to migrate these)"
print("\nNot synced (use migrations):\n - %s" % "\n - ".join(apps_migrated))
print("(use ./manage.py migrate to migrate these)")

View file

@ -2,6 +2,8 @@
Main migration logic.
"""
from __future__ import print_function
import sys
from django.core.exceptions import ImproperlyConfigured
@ -50,16 +52,21 @@ def inner_problem_check(problems, done, verbosity):
"Takes a set of possible problems and gets the actual issues out of it."
result = []
for last, migration in problems:
checked = set([])
# 'Last' is the last applied migration. Step back from it until we
# either find nothing wrong, or we find something.
to_check = list(last.dependencies)
while to_check:
checking = to_check.pop()
if checking in checked:
continue
checked.add(checking)
if checking not in done:
# That's bad. Error.
if verbosity:
print (" ! Migration %s should not have been applied "
"before %s but was." % (last, checking))
print((" ! Migration %s should not have been applied "
"before %s but was." % (last, checking)))
result.append((last, checking))
else:
to_check.extend(checking.dependencies)
@ -142,44 +149,66 @@ def get_migrator(direction, db_dry_run, fake, load_initial_data):
direction = LoadInitialDataMigrator(migrator=direction)
return direction
def get_unapplied_migrations(migrations, applied_migrations):
applied_migration_names = ['%s.%s' % (mi.app_name,mi.migration) for mi in applied_migrations]
for migration in migrations:
is_applied = '%s.%s' % (migration.app_label(), migration.name()) in applied_migration_names
if not is_applied:
yield migration
def migrate_app(migrations, target_name=None, merge=False, fake=False, db_dry_run=False, yes=False, verbosity=0, load_initial_data=False, skip=False, database=DEFAULT_DB_ALIAS, delete_ghosts=False, ignore_ghosts=False, interactive=False):
app_label = migrations.app_label()
verbosity = int(verbosity)
# Fire off the pre-migrate signal
pre_migrate.send(None, app=app_label)
pre_migrate.send(None, app=app_label, verbosity=verbosity, interactive=verbosity, db=database)
# If there aren't any, quit quizically
if not migrations:
print "? You have no migrations for the '%s' app. You might want some." % app_label
print("? You have no migrations for the '%s' app. You might want some." % app_label)
return
# Load the entire dependency graph
Migrations.calculate_dependencies()
# Check there's no strange ones in the database
applied = MigrationHistory.objects.filter(applied__isnull=False)
# If we're using a different database, use that
if database != DEFAULT_DB_ALIAS:
applied = applied.using(database)
south.db.db = south.db.dbs[database]
# We now have to make sure the migrations are all reloaded, as they'll
# have imported the old value of south.db.db.
Migrations.invalidate_all_modules()
applied_all = MigrationHistory.objects.filter(applied__isnull=False).order_by('applied').using(database)
applied = applied_all.filter(app_name=app_label).using(database)
south.db.db = south.db.dbs[database]
Migrations.invalidate_all_modules()
south.db.db.debug = (verbosity > 1)
applied = check_migration_histories(applied, delete_ghosts, ignore_ghosts)
if target_name == 'current-1':
if applied.count() > 1:
previous_migration = applied[applied.count() - 2]
if verbosity:
print('previous_migration: %s (applied: %s)' % (previous_migration.migration, previous_migration.applied))
target_name = previous_migration.migration
else:
if verbosity:
print('previous_migration: zero')
target_name = 'zero'
elif target_name == 'current+1':
try:
first_unapplied_migration = get_unapplied_migrations(migrations, applied).next()
target_name = first_unapplied_migration.name()
except StopIteration:
target_name = None
applied_all = check_migration_histories(applied_all, delete_ghosts, ignore_ghosts)
# Guess the target_name
target = migrations.guess_migration(target_name)
if verbosity:
if target_name not in ('zero', None) and target.name() != target_name:
print " - Soft matched migration %s to %s." % (target_name,
target.name())
print "Running migrations for %s:" % app_label
print(" - Soft matched migration %s to %s." % (target_name,
target.name()))
print("Running migrations for %s:" % app_label)
# Get the forwards and reverse dependencies for this target
direction, problems, workplan = get_direction(target, applied, migrations,
direction, problems, workplan = get_direction(target, applied_all, migrations,
verbosity, interactive)
if problems and not (merge or skip):
raise exceptions.InconsistentMigrationHistory(problems)
@ -191,15 +220,16 @@ def migrate_app(migrations, target_name=None, merge=False, fake=False, db_dry_ru
success = migrator.migrate_many(target, workplan, database)
# Finally, fire off the post-migrate signal
if success:
post_migrate.send(None, app=app_label)
elif verbosity:
# Say there's nothing.
print '- Nothing to migrate.'
post_migrate.send(None, app=app_label, verbosity=verbosity, interactive=verbosity, db=database)
else:
if verbosity:
# Say there's nothing.
print('- Nothing to migrate.')
# If we have initial data enabled, and we're at the most recent
# migration, do initial data.
# Note: We use a fake Forwards() migrator here. It's never used really.
if load_initial_data:
migrator = LoadInitialDataMigrator(migrator=Forwards(verbosity=verbosity))
migrator.load_initial_data(target)
migrator.load_initial_data(target, db=database)
# Send signal.
post_migrate.send(None, app=app_label)
post_migrate.send(None, app=app_label, verbosity=verbosity, interactive=verbosity, db=database)

View file

@ -1,5 +1,8 @@
from __future__ import print_function
from collections import deque
import datetime
from imp import reload
import os
import re
import sys
@ -7,13 +10,14 @@ import sys
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.conf import settings
from django.utils import importlib
from south import exceptions
from south.migration.utils import depends, dfs, flatten, get_app_label
from south.orm import FakeORM
from south.utils import memoize, ask_for_it_by_name
from south.utils import memoize, ask_for_it_by_name, datetime_utils
from south.migration.utils import app_label_to_app_module
from south.utils.py3 import string_types, with_metaclass
def all_migrations(applications=None):
"""
@ -33,7 +37,7 @@ def all_migrations(applications=None):
def application_to_app_label(application):
"Works out the app label from either the app label, the app name, or the module"
if isinstance(application, basestring):
if isinstance(application, string_types):
app_label = application.split('.')[-1]
else:
app_label = application.__name__.split('.')[-1]
@ -66,13 +70,11 @@ class MigrationsMetaclass(type):
self.instances = {}
class Migrations(list):
class Migrations(with_metaclass(MigrationsMetaclass, list)):
"""
Holds a list of Migration objects for a particular app.
"""
__metaclass__ = MigrationsMetaclass
if getattr(settings, "SOUTH_USE_PYC", False):
MIGRATION_FILENAME = re.compile(r'(?!__init__)' # Don't match __init__.py
r'[0-9a-zA-Z_]*' # Don't match dotfiles, or names with dots/invalid chars in them
@ -93,14 +95,14 @@ class Migrations(list):
# Make the directory if it's not already there
if not os.path.isdir(migrations_dir):
if verbose:
print "Creating migrations directory at '%s'..." % migrations_dir
print("Creating migrations directory at '%s'..." % migrations_dir)
os.mkdir(migrations_dir)
# Same for __init__.py
init_path = os.path.join(migrations_dir, "__init__.py")
if not os.path.isfile(init_path):
# Touch the init py file
if verbose:
print "Creating __init__.py in '%s'..." % migrations_dir
print("Creating __init__.py in '%s'..." % migrations_dir)
open(init_path, "w").close()
def migrations_dir(self):
@ -111,11 +113,11 @@ class Migrations(list):
"""
module_path = self.migrations_module()
try:
module = __import__(module_path, {}, {}, [''])
module = importlib.import_module(module_path)
except ImportError:
# There's no migrations module made yet; guess!
try:
parent = __import__(".".join(module_path.split(".")[:-1]), {}, {}, [''])
parent = importlib.import_module(".".join(module_path.split(".")[:-1]))
except ImportError:
# The parent doesn't even exist, that's an issue.
raise exceptions.InvalidMigrationModule(
@ -149,12 +151,12 @@ class Migrations(list):
self._application = application
if not hasattr(application, 'migrations'):
try:
module = __import__(self.migrations_module(), {}, {}, [''])
module = importlib.import_module(self.migrations_module())
self._migrations = application.migrations = module
except ImportError:
if force_creation:
self.create_migrations_directory(verbose_creation)
module = __import__(self.migrations_module(), {}, {}, [''])
module = importlib.import_module(self.migrations_module())
self._migrations = application.migrations = module
else:
raise exceptions.NoMigrations(application)
@ -189,7 +191,7 @@ class Migrations(list):
return self._cache[name]
def __getitem__(self, value):
if isinstance(value, basestring):
if isinstance(value, string_types):
return self.migration(value)
return super(Migrations, self).__getitem__(value)
@ -269,7 +271,13 @@ class Migration(object):
return self.app_label() + ':' + self.name()
def __repr__(self):
return u'<Migration: %s>' % unicode(self)
return '<Migration: %s>' % str(self)
def __eq__(self, other):
return self.app_label() == other.app_label() and self.name() == other.name()
def __hash__(self):
return hash(str(self))
def app_label(self):
return self.migrations.app_label()
@ -292,13 +300,13 @@ class Migration(object):
except KeyError:
try:
migration = __import__(full_name, {}, {}, ['Migration'])
except ImportError, e:
except ImportError as e:
raise exceptions.UnknownMigration(self, sys.exc_info())
except Exception, e:
except Exception as e:
raise exceptions.BrokenMigration(self, sys.exc_info())
# Override some imports
migration._ = lambda x: x # Fake i18n
migration.datetime = datetime
migration.datetime = datetime_utils
return migration
migration = memoize(migration)
@ -411,6 +419,8 @@ class Migration(object):
return False
def prev_orm(self):
if getattr(self.migration_class(), 'symmetrical', False):
return self.orm()
previous = self.previous()
if previous is None:
# First migration? The 'previous ORM' is empty.

View file

@ -1,5 +1,6 @@
from copy import copy
from cStringIO import StringIO
from __future__ import print_function
from copy import copy, deepcopy
import datetime
import inspect
import sys
@ -8,12 +9,14 @@ import traceback
from django.core.management import call_command
from django.core.management.commands import loaddata
from django.db import models
from django import VERSION as DJANGO_VERSION
import south.db
from south import exceptions
from south.db import DEFAULT_DB_ALIAS
from south.models import MigrationHistory
from south.signals import ran_migration
from south.utils.py3 import StringIO
class Migrator(object):
@ -27,7 +30,7 @@ class Migrator(object):
def print_title(self, target):
if self.verbosity:
print self.title(target)
print(self.title(target))
@staticmethod
def status(target):
@ -36,7 +39,7 @@ class Migrator(object):
def print_status(self, migration):
status = self.status(migration)
if self.verbosity and status:
print status
print(status)
@staticmethod
def orm(migration):
@ -70,50 +73,55 @@ class Migrator(object):
'%s\n'
' ! The South developers regret this has happened, and would\n'
' ! like to gently persuade you to consider a slightly\n'
' ! easier-to-deal-with DBMS.\n'
' ! easier-to-deal-with DBMS (one that supports DDL transactions)\n'
' ! NOTE: The error which caused the migration to fail is further up.'
) % extra_info
def run_migration(self, migration):
def run_migration(self, migration, database):
migration_function = self.direction(migration)
south.db.db.start_transaction()
try:
migration_function()
south.db.db.execute_deferred_sql()
if not isinstance(getattr(self, '_wrapper', self), DryRunMigrator):
# record us as having done this in the same transaction,
# since we're not in a dry run
self.record(migration, database)
except:
south.db.db.rollback_transaction()
if not south.db.db.has_ddl_transactions:
print self.run_migration_error(migration)
print(self.run_migration_error(migration))
print("Error in migration: %s" % migration)
raise
else:
south.db.db.commit_transaction()
try:
south.db.db.commit_transaction()
except:
print("Error during commit in migration: %s" % migration)
raise
def run(self, migration):
def run(self, migration, database):
# Get the correct ORM.
south.db.db.current_orm = self.orm(migration)
# If the database doesn't support running DDL inside a transaction
# *cough*MySQL*cough* then do a dry run first.
if not south.db.db.has_ddl_transactions:
dry_run = DryRunMigrator(migrator=self, ignore_fail=False)
dry_run.run_migration(migration)
return self.run_migration(migration)
# If we're not already in a dry run, and the database doesn't support
# running DDL inside a transaction, *cough*MySQL*cough* then do a dry
# run first.
if not isinstance(getattr(self, '_wrapper', self), DryRunMigrator):
if not south.db.db.has_ddl_transactions:
dry_run = DryRunMigrator(migrator=self, ignore_fail=False)
dry_run.run_migration(migration, database)
return self.run_migration(migration, database)
def done_migrate(self, migration, database):
south.db.db.start_transaction()
try:
# Record us as having done this
self.record(migration, database)
except:
south.db.db.rollback_transaction()
raise
else:
south.db.db.commit_transaction()
def send_ran_migration(self, migration):
def send_ran_migration(self, migration, database):
ran_migration.send(None,
app=migration.app_label(),
migration=migration,
method=self.__class__.__name__.lower())
method=self.__class__.__name__.lower(),
verbosity=self.verbosity,
interactive=self.interactive,
db=database)
def migrate(self, migration, database):
"""
@ -122,9 +130,8 @@ class Migrator(object):
app = migration.migrations._migrations
migration_name = migration.name()
self.print_status(migration)
result = self.run(migration)
self.done_migrate(migration, database)
self.send_ran_migration(migration)
result = self.run(migration, database)
self.send_ran_migration(migration, database)
return result
def migrate_many(self, target, migrations, database):
@ -135,9 +142,10 @@ class MigratorWrapper(object):
def __init__(self, migrator, *args, **kwargs):
self._migrator = copy(migrator)
attributes = dict([(k, getattr(self, k))
for k in self.__class__.__dict__.iterkeys()
for k in self.__class__.__dict__
if not k.startswith('__')])
self._migrator.__dict__.update(attributes)
self._migrator.__dict__['_wrapper'] = self
def __getattr__(self, name):
return getattr(self._migrator, name)
@ -151,9 +159,11 @@ class DryRunMigrator(MigratorWrapper):
def _run_migration(self, migration):
if migration.no_dry_run():
if self.verbosity:
print " - Migration '%s' is marked for no-dry-run." % migration
print(" - Migration '%s' is marked for no-dry-run." % migration)
return
south.db.db.dry_run = True
# preserve the constraint cache as it can be mutated by the dry run
constraint_cache = deepcopy(south.db.db._constraint_cache)
if self._ignore_fail:
south.db.db.debug, old_debug = False, south.db.db.debug
pending_creates = south.db.db.get_pending_creates()
@ -171,8 +181,11 @@ class DryRunMigrator(MigratorWrapper):
south.db.db.debug = old_debug
south.db.db.clear_run_data(pending_creates)
south.db.db.dry_run = False
# restore the preserved constraint cache from before dry run was
# executed
south.db.db._constraint_cache = constraint_cache
def run_migration(self, migration):
def run_migration(self, migration, database):
try:
self._run_migration(migration)
except exceptions.FailedDryRun:
@ -180,30 +193,35 @@ class DryRunMigrator(MigratorWrapper):
return False
raise
def done_migrate(self, *args, **kwargs):
pass
def send_ran_migration(self, *args, **kwargs):
pass
class FakeMigrator(MigratorWrapper):
def run(self, migration):
def run(self, migration, database):
# Don't actually run, just record as if ran
self.record(migration, database)
if self.verbosity:
print ' (faked)'
print(' (faked)')
def send_ran_migration(self, *args, **kwargs):
pass
class LoadInitialDataMigrator(MigratorWrapper):
def load_initial_data(self, target, db='default'):
if target is None or target != target.migrations[-1]:
return
# Load initial data, if we ended up at target
if self.verbosity:
print " - Loading initial data for %s." % target.app_label()
print(" - Loading initial data for %s." % target.app_label())
if DJANGO_VERSION < (1, 6):
self.pre_1_6(target, db)
else:
self.post_1_6(target, db)
def pre_1_6(self, target, db):
# Override Django's get_apps call temporarily to only load from the
# current app
old_get_apps = models.get_apps
@ -216,6 +234,21 @@ class LoadInitialDataMigrator(MigratorWrapper):
models.get_apps = old_get_apps
loaddata.get_apps = old_get_apps
def post_1_6(self, target, db):
import django.db.models.loading
## build a new 'AppCache' object with just the app we care about.
old_cache = django.db.models.loading.cache
new_cache = django.db.models.loading.AppCache()
new_cache.get_apps = lambda: [new_cache.get_app(target.app_label())]
## monkeypatch
django.db.models.loading.cache = new_cache
try:
call_command('loaddata', 'initial_data', verbosity=self.verbosity, database=db)
finally:
## unmonkeypatch
django.db.models.loading.cache = old_cache
def migrate_many(self, target, migrations, database):
migrator = self._migrator
result = migrator.__class__.migrate_many(migrator, target, migrations, database)
@ -254,7 +287,11 @@ class Forwards(Migrator):
def record(migration, database):
# Record us as having done this
record = MigrationHistory.for_migration(migration, database)
record.applied = datetime.datetime.utcnow()
try:
from django.utils.timezone import now
record.applied = now()
except ImportError:
record.applied = datetime.datetime.utcnow()
if database != DEFAULT_DB_ALIAS:
record.save(using=database)
else:

View file

@ -52,32 +52,43 @@ def flatten(*stack):
stack = deque(stack)
while stack:
try:
x = stack[0].next()
except AttributeError:
x = next(stack[0])
except TypeError:
stack[0] = iter(stack[0])
x = stack[0].next()
x = next(stack[0])
except StopIteration:
stack.popleft()
continue
if hasattr(x, '__iter__'):
if hasattr(x, '__iter__') and not isinstance(x, str):
stack.appendleft(x)
else:
yield x
dependency_cache = {}
def _dfs(start, get_children, path):
if (start, get_children) in dependency_cache:
return dependency_cache[(start, get_children)]
results = []
if start in path:
raise exceptions.CircularDependency(path[path.index(start):] + [start])
path.append(start)
yield start
results.append(start)
children = sorted(get_children(start), key=lambda x: str(x))
if children:
# We need to apply all the migrations this one depends on
yield (_dfs(n, get_children, path) for n in children)
# We need to apply all the migrations this one depends on
for n in children:
results = _dfs(n, get_children, path) + results
path.pop()
results = list(SortedSet(results))
dependency_cache[(start, get_children)] = results
return results
def dfs(start, get_children):
return flatten(_dfs(start, get_children, []))
return _dfs(start, get_children, [])
def depends(start, get_children):
result = SortedSet(reversed(list(dfs(start, get_children))))
return list(result)
return dfs(start, get_children)

View file

@ -33,5 +33,5 @@ class MigrationHistory(models.Model):
def get_migration(self):
return self.get_migrations().migration(self.migration)
def __unicode__(self):
def __str__(self):
return "<%s: %s>" % (self.app_name, self.migration)

View file

@ -3,11 +3,14 @@ Like the old south.modelsparser, but using introspection where possible
rather than direct inspection of models.py.
"""
from __future__ import print_function
import datetime
import re
import decimal
from south.utils import get_attribute, auto_through
from south.utils.py3 import text_type
from django.db import models
from django.db.models.base import ModelBase, Model
@ -20,6 +23,38 @@ from django.utils import datetime_safe
NOISY = False
try:
from django.utils import timezone
except ImportError:
timezone = False
# Define any converter functions first to prevent NameErrors
def convert_on_delete_handler(value):
django_db_models_module = 'models' # relative to standard import 'django.db'
if hasattr(models, "PROTECT"):
if value in (models.CASCADE, models.PROTECT, models.DO_NOTHING, models.SET_DEFAULT):
# straightforward functions
return '%s.%s' % (django_db_models_module, value.__name__)
else:
# This is totally dependent on the implementation of django.db.models.deletion.SET
func_name = getattr(value, '__name__', None)
if func_name == 'set_on_delete':
# we must inspect the function closure to see what parameters were passed in
closure_contents = value.__closure__[0].cell_contents
if closure_contents is None:
return "%s.SET_NULL" % (django_db_models_module)
# simple function we can perhaps cope with:
elif hasattr(closure_contents, '__call__'):
raise ValueError("South does not support on_delete with SET(function) as values.")
else:
# Attempt to serialise the value
return "%s.SET(%s)" % (django_db_models_module, value_clean(closure_contents))
raise ValueError("%s was not recognized as a valid model deletion handler. Possible values: %s." % (value, ', '.join(f.__name__ for f in (models.CASCADE, models.PROTECT, models.SET, models.SET_NULL, models.SET_DEFAULT, models.DO_NOTHING))))
else:
raise ValueError("on_delete argument encountered in Django version that does not support it")
# Gives information about how to introspect certain fields.
# This is a list of triples; the first item is a list of fields it applies to,
# (note that isinstance is used, so superclasses are perfectly valid here)
@ -31,6 +66,7 @@ NOISY = False
# is an optional dict.
#
# The introspector uses the combination of all matching entries, in order.
introspection_details = [
(
(models.Field, ),
@ -50,12 +86,13 @@ introspection_details = [
(
(models.ForeignKey, models.OneToOneField),
[],
{
"to": ["rel.to", {}],
"to_field": ["rel.field_name", {"default_attr": "rel.to._meta.pk.name"}],
"related_name": ["rel.related_name", {"default": None}],
"db_index": ["db_index", {"default": True}],
},
dict([
("to", ["rel.to", {}]),
("to_field", ["rel.field_name", {"default_attr": "rel.to._meta.pk.name"}]),
("related_name", ["rel.related_name", {"default": None}]),
("db_index", ["db_index", {"default": True}]),
("on_delete", ["rel.on_delete", {"default": getattr(models, "CASCADE", None), "is_django_function": True, "converter": convert_on_delete_handler, "ignore_missing": True}])
])
),
(
(models.ManyToManyField,),
@ -85,6 +122,13 @@ introspection_details = [
"decimal_places": ["decimal_places", {"default": None}],
},
),
(
(models.SlugField, ),
[],
{
"db_index": ["db_index", {"default": True}],
},
),
(
(models.BooleanField, ),
[],
@ -120,6 +164,7 @@ allowed_fields = [
"^django\.db",
"^django\.contrib\.contenttypes\.generic",
"^django\.contrib\.localflavor",
"^django_localflavor_\w\w",
]
# Regexes of ignored fields (custom fields which look like fields, but have no column behind them)
@ -133,13 +178,11 @@ meta_details = {
"db_table": ["db_table", {"default_attr_concat": ["%s_%s", "app_label", "module_name"]}],
"db_tablespace": ["db_tablespace", {"default": settings.DEFAULT_TABLESPACE}],
"unique_together": ["unique_together", {"default": []}],
"index_together": ["index_together", {"default": [], "ignore_missing": True}],
"ordering": ["ordering", {"default": []}],
"proxy": ["proxy", {"default": False, "ignore_missing": True}],
}
# 2.4 compatability
any = lambda x: reduce(lambda y, z: y or z, x, False)
def add_introspection_rules(rules=[], patterns=[]):
"Allows you to add some introspection rules at runtime, e.g. for 3rd party apps."
@ -148,11 +191,13 @@ def add_introspection_rules(rules=[], patterns=[]):
allowed_fields.extend(patterns)
introspection_details.extend(rules)
def add_ignored_fields(patterns):
"Allows you to add some ignore field patterns."
assert isinstance(patterns, (list, tuple))
ignored_fields.extend(patterns)
def can_ignore(field):
"""
Returns True if we know for certain that we can ignore this field, False
@ -164,6 +209,7 @@ def can_ignore(field):
return True
return False
def can_introspect(field):
"""
Returns True if we are allowed to introspect this field, False otherwise.
@ -216,9 +262,10 @@ def get_value(field, descriptor):
raise IsDefault
else:
raise
# Lazy-eval functions get eval'd.
if isinstance(value, Promise):
value = unicode(value)
value = text_type(value)
# If the value is the same as the default, omit it for clarity
if "default" in options and value == options['default']:
raise IsDefault
@ -241,16 +288,29 @@ def get_value(field, descriptor):
default_value = format % tuple(map(lambda x: get_attribute(field, x), attrs))
if value == default_value:
raise IsDefault
# Clean and return the value
return value_clean(value, options)
def value_clean(value, options={}):
"Takes a value and cleans it up (so e.g. it has timezone working right)"
# Lazy-eval functions get eval'd.
if isinstance(value, Promise):
value = text_type(value)
# Callables get called.
if callable(value) and not isinstance(value, ModelBase):
if not options.get('is_django_function', False) and callable(value) and not isinstance(value, ModelBase):
# Datetime.datetime.now is special, as we can access it from the eval
# context (and because it changes all the time; people will file bugs otherwise).
if value == datetime.datetime.now:
return "datetime.datetime.now"
if value == datetime.datetime.utcnow:
elif value == datetime.datetime.utcnow:
return "datetime.datetime.utcnow"
if value == datetime.date.today:
elif value == datetime.date.today:
return "datetime.date.today"
# In case we use Django's own now function, revert to datetime's
# original one since we'll deal with timezones on our own.
elif timezone and value == timezone.now:
return "datetime.datetime.now"
# All other callables get called.
value = value()
# Models get their own special repr()
@ -267,16 +327,32 @@ def get_value(field, descriptor):
# Make sure Decimal is converted down into a string
if isinstance(value, decimal.Decimal):
value = str(value)
# in case the value is timezone aware
datetime_types = (
datetime.datetime,
datetime.time,
datetime_safe.datetime,
)
if (timezone and isinstance(value, datetime_types) and
getattr(settings, 'USE_TZ', False) and
value is not None and timezone.is_aware(value)):
default_timezone = timezone.get_default_timezone()
value = timezone.make_naive(value, default_timezone)
# datetime_safe has an improper repr value
if isinstance(value, datetime_safe.datetime):
value = datetime.datetime(*value.utctimetuple()[:7])
if isinstance(value, datetime_safe.date):
value = datetime.date(*value.timetuple()[:3])
# converting a date value to a datetime to be able to handle
# timezones later gracefully
elif isinstance(value, (datetime.date, datetime_safe.date)):
value = datetime.datetime(*value.timetuple()[:3])
# Now, apply the converter func if there is one
if "converter" in options:
value = options['converter'](value)
# Return the final value
return repr(value)
if options.get('is_django_function', False):
return value
else:
return repr(value)
def introspector(field):
@ -310,7 +386,7 @@ def get_model_fields(model, m2m=False):
# Go through all bases (that are themselves models, but not Model)
for base in model.__bases__:
if base != models.Model and issubclass(base, models.Model):
if hasattr(base, '_meta') and issubclass(base, models.Model):
if not base._meta.abstract:
# Looks like we need their fields, Ma.
inherited_fields.update(get_model_fields(base))
@ -327,7 +403,7 @@ def get_model_fields(model, m2m=False):
# Does it define a south_field_triple method?
if hasattr(field, "south_field_triple"):
if NOISY:
print " ( Nativing field: %s" % field.name
print(" ( Nativing field: %s" % field.name)
field_defs[field.name] = field.south_field_triple()
# Can we introspect it?
elif can_introspect(field):
@ -343,7 +419,7 @@ def get_model_fields(model, m2m=False):
# Shucks, no definition!
else:
if NOISY:
print " ( Nodefing field: %s" % field.name
print(" ( Nodefing field: %s" % field.name)
field_defs[field.name] = None
# If they've used the horrific hack that is order_with_respect_to, deal with
@ -371,7 +447,7 @@ def get_model_meta(model):
# This is called _ormbases as the _bases variable was previously used
# for a list of full class paths to bases, so we can't conflict.
for base in model.__bases__:
if base != models.Model and issubclass(base, models.Model):
if hasattr(base, '_meta') and issubclass(base, models.Model):
if not base._meta.abstract:
# OK, that matches our terms.
if "_ormbases" not in meta_def:

View file

@ -3,17 +3,19 @@ South's fake ORM; lets you not have to write SQL inside migrations.
Roughly emulates the real Django ORM, to a point.
"""
from __future__ import print_function
import inspect
import datetime
from django.db import models
from django.db.models.loading import cache
from django.core.exceptions import ImproperlyConfigured
from south.db import db
from south.utils import ask_for_it_by_name
from south.utils import ask_for_it_by_name, datetime_utils
from south.hacks import hacks
from south.exceptions import UnfreezeMeLater, ORMBaseNotIncluded, ImpossibleORMUnfreeze
from south.utils.py3 import string_types
class ModelsLocals(object):
@ -179,12 +181,16 @@ class _FakeORM(object):
"Evaluates the given code in the context of the migration file."
# Drag in the migration module's locals (hopefully including models.py)
fake_locals = dict(inspect.getmodule(self.cls).__dict__)
# Remove all models from that (i.e. from modern models.py), to stop pollution
for key, value in fake_locals.items():
if isinstance(value, type) and issubclass(value, models.Model) and hasattr(value, "_meta"):
del fake_locals[key]
# excluding all models from that (i.e. from modern models.py), to stop pollution
fake_locals = dict(
(key, value)
for key, value in inspect.getmodule(self.cls).__dict__.items()
if not (
isinstance(value, type)
and issubclass(value, models.Model)
and hasattr(value, "_meta")
)
)
# We add our models into the locals for the eval
fake_locals.update(dict([
@ -206,7 +212,7 @@ class _FakeORM(object):
fake_locals['_'] = lambda x: x
# Datetime; there should be no datetime direct accesses
fake_locals['datetime'] = datetime
fake_locals['datetime'] = datetime_utils
# Now, go through the requested imports and import them.
for name, value in extra_imports.items():
@ -228,7 +234,7 @@ class _FakeORM(object):
if name == "SouthFieldClass":
raise ValueError("Cannot import the required field '%s'" % value)
else:
print "WARNING: Cannot import '%s'" % value
print("WARNING: Cannot import '%s'" % value)
# Use ModelsLocals to make lookups work right for CapitalisedModels
fake_locals = ModelsLocals(fake_locals)
@ -249,7 +255,7 @@ class _FakeORM(object):
# OK, add it.
try:
results[key] = self.eval_in_context(code, app)
except (NameError, AttributeError), e:
except (NameError, AttributeError) as e:
raise ValueError("Cannot successfully create meta field '%s' for model '%s.%s': %s." % (
key, app, model, e
))
@ -266,7 +272,7 @@ class _FakeORM(object):
key = key.lower()
if key not in self.models:
raise ORMBaseNotIncluded("Cannot find ORM base %s" % key)
elif isinstance(self.models[key], basestring):
elif isinstance(self.models[key], string_types):
# Then the other model hasn't been unfrozen yet.
# We postpone ourselves; the situation will eventually resolve.
raise UnfreezeMeLater()
@ -295,7 +301,7 @@ class _FakeORM(object):
continue
elif not params:
raise ValueError("Field '%s' on model '%s.%s' has no definition." % (fname, app, name))
elif isinstance(params, (str, unicode)):
elif isinstance(params, string_types):
# It's a premade definition string! Let's hope it works...
code = params
extra_imports = {}
@ -365,7 +371,7 @@ class _FakeORM(object):
for fname, (code, extra_imports) in model._failed_fields.items():
try:
field = self.eval_in_context(code, app, extra_imports)
except (NameError, AttributeError, AssertionError, KeyError), e:
except (NameError, AttributeError, AssertionError, KeyError) as e:
# It's failed again. Complain.
raise ValueError("Cannot successfully create field '%s' for model '%s': %s." % (
fname, modelname, e

View file

@ -6,13 +6,13 @@ from django.dispatch import Signal
from django.conf import settings
# Sent at the start of the migration of an app
pre_migrate = Signal(providing_args=["app"])
pre_migrate = Signal(providing_args=["app", "verbosity", "interactive", "db"])
# Sent after each successful migration of an app
post_migrate = Signal(providing_args=["app"])
post_migrate = Signal(providing_args=["app", "verbosity", "interactive", "db"])
# Sent after each run of a particular migration in a direction
ran_migration = Signal(providing_args=["app","migration","method"])
ran_migration = Signal(providing_args=["app", "migration", "method", "verbosity", "interactive", "db"])
# Compatibility code for django.contrib.auth
# Is causing strange errors, removing for now (we might need to fix up orm first)

6
south/test_shim.py Normal file
View file

@ -0,0 +1,6 @@
"""
This file is needed as 1.6 only finds tests in files labelled test_*,
and ignores tests/__init__.py.
"""
from south.tests import *

View file

@ -1 +0,0 @@
/*.pyc

View file

@ -1,10 +1,53 @@
from __future__ import print_function
import unittest
#import unittest
import os
import sys
from functools import wraps
from django.conf import settings
from south.hacks import hacks
# Make sure skipping tests is available.
try:
# easiest and best is unittest included in Django>=1.3
from django.utils import unittest
except ImportError:
# earlier django... use unittest from stdlib
import unittest
# however, skipUnless was only added in Python 2.7;
# if not available, we need to do something else
try:
skipUnless = unittest.skipUnless #@UnusedVariable
except AttributeError:
def skipUnless(condition, message):
def decorator(testfunc):
@wraps(testfunc)
def wrapper(self):
if condition:
# Apply method
testfunc(self)
else:
# The skip exceptions are not available either...
print("Skipping", testfunc.__name__,"--", message)
return wrapper
return decorator
# ditto for skipIf
try:
skipIf = unittest.skipIf #@UnusedVariable
except AttributeError:
def skipIf(condition, message):
def decorator(testfunc):
@wraps(testfunc)
def wrapper(self):
if condition:
print("Skipping", testfunc.__name__,"--", message)
else:
# Apply method
testfunc(self)
return wrapper
return decorator
# Add the tests directory so fakeapp is on sys.path
test_root = os.path.dirname(__file__)
sys.path.append(test_root)
@ -30,21 +73,23 @@ class Monkeypatcher(unittest.TestCase):
pass
return fake
def setUp(self):
"""
Changes the Django environment so we can run tests against our test apps.
"""
if getattr(self, 'installed_apps', None):
if hasattr(self, 'installed_apps'):
hacks.store_app_cache_state()
hacks.set_installed_apps(self.installed_apps)
# Make sure dependencies are calculated for new apps
Migrations._dependencies_done = False
def tearDown(self):
"""
Undoes what setUp did.
"""
if getattr(self, 'installed_apps', None):
if hasattr(self, 'installed_apps'):
hacks.reset_installed_apps()
hacks.restore_app_cache_state()
# Try importing all tests if asked for (then we can run 'em)
@ -55,7 +100,10 @@ except:
if not skiptest:
from south.tests.db import *
from south.tests.db_mysql import *
from south.tests.db_firebird import *
from south.tests.logic import *
from south.tests.autodetection import *
from south.tests.logger import *
from south.tests.inspector import *
from south.tests.freezer import *

View file

@ -1,6 +1,17 @@
import unittest
from south.tests import unittest
from south.creator.changes import AutoChanges
from south.creator.changes import AutoChanges, InitialChanges
from south.migration.base import Migrations
from south.tests import Monkeypatcher
from south.creator import freezer
from south.orm import FakeORM
from south.v2 import SchemaMigration
try:
from django.utils.six.moves import reload_module
except ImportError:
# Older django, no python3 support
reload_module = reload
class TestComparison(unittest.TestCase):
@ -230,4 +241,120 @@ class TestComparison(unittest.TestCase):
('django.db.models.fields.IntField', [], {'to':'hah'}),
),
True,
)
)
class TestNonManagedIgnored(Monkeypatcher):
installed_apps = ["non_managed"]
full_defs = {
'non_managed.legacy': {
'Meta': {'object_name': 'Legacy', 'db_table': "'legacy_table'", 'managed': 'False'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True'}),
'size': ('django.db.models.fields.IntegerField', [], {})
}
}
def test_not_added_init(self):
migrations = Migrations("non_managed")
changes = InitialChanges(migrations)
change_list = changes.get_changes()
if list(change_list):
self.fail("Initial migration creates table for non-managed model")
def test_not_added_auto(self):
empty_defs = { }
class EmptyMigration(SchemaMigration):
"Serves as fake previous migration"
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = empty_defs
complete_apps = ['non_managed']
migrations = Migrations("non_managed")
empty_orm = FakeORM(EmptyMigration, "non_managed")
changes = AutoChanges(
migrations = migrations,
old_defs = empty_defs,
old_orm = empty_orm,
new_defs = self.full_defs,
)
change_list = changes.get_changes()
if list(change_list):
self.fail("Auto migration creates table for non-managed model")
def test_not_deleted_auto(self):
empty_defs = { }
old_defs = freezer.freeze_apps(["non_managed"])
class InitialMigration(SchemaMigration):
"Serves as fake previous migration"
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = self.full_defs
complete_apps = ['non_managed']
migrations = Migrations("non_managed")
initial_orm = FakeORM(InitialMigration, "non_managed")
changes = AutoChanges(
migrations = migrations,
old_defs = self.full_defs,
old_orm = initial_orm,
new_defs = empty_defs,
)
change_list = changes.get_changes()
if list(change_list):
self.fail("Auto migration deletes table for non-managed model")
def test_not_modified_auto(self):
fake_defs = {
'non_managed.legacy': {
'Meta': {'object_name': 'Legacy', 'db_table': "'legacy_table'", 'managed': 'False'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True'}),
#'size': ('django.db.models.fields.IntegerField', [], {}) # The "change" is the addition of this field
}
}
class InitialMigration(SchemaMigration):
"Serves as fake previous migration"
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = fake_defs
complete_apps = ['non_managed']
from non_managed import models as dummy_import_to_force_loading_models # TODO: Does needing this indicate a bug in MokeyPatcher?
reload_module(dummy_import_to_force_loading_models) # really force...
migrations = Migrations("non_managed")
initial_orm = FakeORM(InitialMigration, "non_managed")
changes = AutoChanges(
migrations = migrations,
old_defs = fake_defs,
old_orm = initial_orm,
new_defs = self.full_defs
)
change_list = changes.get_changes()
if list(change_list):
self.fail("Auto migration changes table for non-managed model")

View file

@ -1,7 +1,11 @@
import unittest
import datetime
from warnings import filterwarnings
from south.db import db
from django.db import connection, models
from south.db import db, generic
from django.db import connection, models, IntegrityError as DjangoIntegrityError
from south.tests import unittest, skipIf, skipUnless
from south.utils.py3 import text_type, with_metaclass
# Create a list of error classes from the various database libraries
errors = []
@ -12,6 +16,19 @@ except ImportError:
pass
errors = tuple(errors)
# On SQL Server, the backend's IntegrityError is not (a subclass of) Django's.
try:
from sql_server.pyodbc.base import IntegrityError as SQLServerIntegrityError
IntegrityError = (DjangoIntegrityError, SQLServerIntegrityError)
except ImportError:
IntegrityError = DjangoIntegrityError
try:
from south.db import mysql
except ImportError:
mysql = None
class TestOperations(unittest.TestCase):
"""
@ -21,6 +38,12 @@ class TestOperations(unittest.TestCase):
def setUp(self):
db.debug = False
try:
import MySQLdb
except ImportError:
pass
else:
filterwarnings('ignore', category=MySQLdb.Warning)
db.clear_deferred_sql()
db.start_transaction()
@ -42,22 +65,35 @@ class TestOperations(unittest.TestCase):
# Make sure we can't do the same query on an empty table
try:
cursor.execute("SELECT * FROM nottheretest1")
self.fail("Non-existent table could be selected!")
except:
pass
else:
self.fail("Non-existent table could be selected!")
@skipUnless(db.raises_default_errors, 'This database does not raise errors on missing defaults.')
def test_create_default(self):
"""
Test creation of tables, make sure defaults are not left in the database
"""
db.create_table("test_create_default", [('a', models.IntegerField()),
('b', models.IntegerField(default=17))])
cursor = connection.cursor()
self.assertRaises(IntegrityError, cursor.execute, "INSERT INTO test_create_default(a) VALUES (17)")
def test_delete(self):
"""
Test deletion of tables.
"""
cursor = connection.cursor()
db.create_table("test_deltable", [('email_confirmed', models.BooleanField(default=False))])
db.delete_table("test_deltable")
# Make sure it went
try:
cursor.execute("SELECT * FROM test1")
self.fail("Just-deleted table could be selected!")
cursor.execute("SELECT * FROM test_deltable")
except:
pass
else:
self.fail("Just-deleted table could be selected!")
def test_nonexistent_delete(self):
"""
@ -65,9 +101,10 @@ class TestOperations(unittest.TestCase):
"""
try:
db.delete_table("test_nonexistdeltable")
self.fail("Non-existent table could be deleted!")
except:
pass
else:
self.fail("Non-existent table could be deleted!")
def test_foreign_keys(self):
"""
@ -82,6 +119,22 @@ class TestOperations(unittest.TestCase):
('UNIQUE', models.ForeignKey(Test)),
])
db.execute_deferred_sql()
@skipUnless(db.supports_foreign_keys, 'Foreign keys can only be deleted on '
'engines that support them.')
def test_recursive_foreign_key_delete(self):
"""
Test that recursive foreign keys are deleted correctly (see #1065)
"""
Test = db.mock_model(model_name='Test', db_table='test_rec_fk_del',
db_tablespace='', pk_field_name='id',
pk_field_type=models.AutoField, pk_field_args=[])
db.create_table('test_rec_fk_del', [
('id', models.AutoField(primary_key=True, auto_created=True)),
('fk', models.ForeignKey(Test)),
])
db.execute_deferred_sql()
db.delete_foreign_key('test_rec_fk_del', 'fk_id')
def test_rename(self):
"""
@ -98,9 +151,10 @@ class TestOperations(unittest.TestCase):
db.start_transaction()
try:
cursor.execute("SELECT spam FROM test_rn")
self.fail("Just-renamed column could be selected!")
except:
pass
else:
self.fail("Just-renamed column could be selected!")
db.rollback_transaction()
db.delete_table("test_rn")
db.start_transaction()
@ -123,9 +177,10 @@ class TestOperations(unittest.TestCase):
db.start_transaction()
try:
cursor.execute("SELECT eggs FROM test_drn")
self.fail("Dry-renamed new column could be selected!")
except:
pass
else:
self.fail("Dry-renamed new column could be selected!")
db.rollback_transaction()
db.delete_table("test_drn")
db.start_transaction()
@ -145,9 +200,10 @@ class TestOperations(unittest.TestCase):
db.start_transaction()
try:
cursor.execute("SELECT spam FROM testtr")
self.fail("Just-renamed column could be selected!")
except:
pass
else:
self.fail("Just-renamed column could be selected!")
db.rollback_transaction()
db.delete_table("testtr2")
db.start_transaction()
@ -156,7 +212,6 @@ class TestOperations(unittest.TestCase):
"""
Test that % in a default gets escaped to %%.
"""
cursor = connection.cursor()
try:
db.create_table("testpind", [('cf', models.CharField(max_length=255, default="It should be 2%!"))])
except IndexError:
@ -205,28 +260,21 @@ class TestOperations(unittest.TestCase):
def test_primary_key_implicit(self):
"""
Tests changing primary key implicitly.
Tests that changing primary key implicitly fails.
"""
# This is ONLY important for SQLite. It's not a feature we support, but
# not implementing it means SQLite fails (due to the table-copying weirdness).
if db.backend_name != "sqlite3":
return
db.create_table("test_pki", [
('id', models.IntegerField(primary_key=True)),
('new_pkey', models.IntegerField()),
('eggs', models.IntegerField(unique=True)),
])
db.execute_deferred_sql()
# Remove the default primary key, and make eggs it
# Fiddle with alter_column to attempt to make it remove the primary key
db.alter_column("test_pki", "id", models.IntegerField())
db.alter_column("test_pki", "new_pkey", models.IntegerField(primary_key=True))
# Try inserting a now-valid row pair
# Try inserting a should-be-valid row pair
db.execute("INSERT INTO test_pki (id, new_pkey, eggs) VALUES (1, 2, 3)")
db.execute("INSERT INTO test_pki (id, new_pkey, eggs) VALUES (1, 3, 4)")
db.execute("INSERT INTO test_pki (id, new_pkey, eggs) VALUES (2, 2, 4)")
db.delete_table("test_pki")
def test_add_columns(self):
"""
@ -237,17 +285,51 @@ class TestOperations(unittest.TestCase):
('eggs', models.IntegerField()),
])
# Add a column
db.add_column("test_addc", "add1", models.IntegerField(default=3), keep_default=False)
# Add a FK with keep_default=False (#69)
db.add_column("test_addc", "add1", models.IntegerField(default=3))
User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
# insert some data so we can test the default value of the added fkey
db.execute("INSERT INTO test_addc (eggs, add1) VALUES (1, 2)")
db.add_column("test_addc", "user", models.ForeignKey(User, null=True), keep_default=False)
db.execute("INSERT INTO test_addc (spam, eggs, add1) VALUES (%s, 1, 2)", [False])
db.add_column("test_addc", "user", models.ForeignKey(User, null=True))
db.execute_deferred_sql()
# try selecting from the user_id column to make sure it was actually created
val = db.execute("SELECT user_id FROM test_addc")[0][0]
self.assertEquals(val, None)
db.delete_column("test_addc", "add1")
# make sure adding an indexed field works
db.add_column("test_addc", "add2", models.CharField(max_length=15, db_index=True, default='pi'))
db.execute_deferred_sql()
db.delete_table("test_addc")
def test_delete_columns(self):
"""
Test deleting columns
"""
db.create_table("test_delc", [
('spam', models.BooleanField(default=False)),
('eggs', models.IntegerField(db_index=True, unique=True)),
])
db.delete_column("test_delc", "eggs")
def test_add_nullbool_column(self):
"""
Test adding NullBoolean columns
"""
db.create_table("test_addnbc", [
('spam', models.BooleanField(default=False)),
('eggs', models.IntegerField()),
])
# Add a column
db.add_column("test_addnbc", "add1", models.NullBooleanField())
# Add a column with a default
db.add_column("test_addnbc", "add2", models.NullBooleanField(default=True))
# insert some data so we can test the default values of the added column
db.execute("INSERT INTO test_addnbc (spam, eggs) VALUES (%s, 1)", [False])
# try selecting from the new columns to make sure they were properly created
false, null1, null2 = db.execute("SELECT spam,add1,add2 FROM test_addnbc")[0][0:3]
self.assertIsNone(null1, "Null boolean field with no value inserted returns non-null")
self.assertIsNone(null2, "Null boolean field (added with default) with no value inserted returns non-null")
self.assertEquals(false, False)
db.delete_table("test_addnbc")
def test_alter_columns(self):
"""
@ -257,10 +339,78 @@ class TestOperations(unittest.TestCase):
('spam', models.BooleanField(default=False)),
('eggs', models.IntegerField()),
])
db.execute_deferred_sql()
# Change eggs to be a FloatField
db.alter_column("test_alterc", "eggs", models.FloatField())
db.execute_deferred_sql()
db.delete_table("test_alterc")
db.execute_deferred_sql()
def test_alter_char_default(self):
"""
Test altering column defaults with char fields
"""
db.create_table("test_altercd", [
('spam', models.CharField(max_length=30)),
('eggs', models.IntegerField()),
])
# Change spam default
db.alter_column("test_altercd", "spam", models.CharField(max_length=30, default="loof", null=True))
# Assert the default is not in the database
db.execute("INSERT INTO test_altercd (eggs) values (12)")
null = db.execute("SELECT spam FROM test_altercd")[0][0]
self.assertFalse(null, "Default for char field was installed into database")
# Change again to a column with default and not null
db.alter_column("test_altercd", "spam", models.CharField(max_length=30, default="loof", null=False))
# Assert the default is not in the database
if 'oracle' in db.backend_name:
# Oracle special treatment -- nulls are always allowed in char columns, so
# inserting doesn't raise an integrity error; so we check again as above
db.execute("DELETE FROM test_altercd")
db.execute("INSERT INTO test_altercd (eggs) values (12)")
null = db.execute("SELECT spam FROM test_altercd")[0][0]
self.assertFalse(null, "Default for char field was installed into database")
else:
# For other backends, insert should now just fail
self.assertRaises(IntegrityError,
db.execute, "INSERT INTO test_altercd (eggs) values (12)")
@skipIf('oracle' in db.backend_name, "Oracle does not differentiate empty trings from null")
def test_default_empty_string(self):
"""
Test altering column defaults with char fields
"""
db.create_table("test_cd_empty", [
('spam', models.CharField(max_length=30, default='')),
('eggs', models.CharField(max_length=30)),
])
# Create a record
db.execute("INSERT INTO test_cd_empty (spam, eggs) values ('1','2')")
# Add a column
db.add_column("test_cd_empty", "ham", models.CharField(max_length=30, default=''))
empty = db.execute("SELECT ham FROM test_cd_empty")[0][0]
self.assertEquals(empty, "", "Empty Default for char field isn't empty string")
@skipUnless('oracle' in db.backend_name, "Oracle does not differentiate empty trings from null")
def test_oracle_strings_null(self):
"""
Test that under Oracle, CherFields are created as null even when specified not-null,
because otherwise they would not be able to hold empty strings (which Oracle equates
with nulls).
Verify fix of #1269.
"""
db.create_table("test_ora_char_nulls", [
('spam', models.CharField(max_length=30, null=True)),
('eggs', models.CharField(max_length=30)),
])
db.add_column("test_ora_char_nulls", "ham", models.CharField(max_length=30))
db.alter_column("test_ora_char_nulls", "spam", models.CharField(max_length=30, null=False))
# So, by the look of it, we should now have three not-null columns
db.execute("INSERT INTO test_ora_char_nulls VALUES (NULL, NULL, NULL)")
def test_mysql_defaults(self):
"""
Test MySQL default handling for BLOB and TEXT.
@ -300,18 +450,18 @@ class TestOperations(unittest.TestCase):
db.delete_table("test_multiword")
@skipUnless(db.has_check_constraints, 'Only applies to databases that '
'support CHECK constraints.')
def test_alter_constraints(self):
"""
Tests that going from a PostiveIntegerField to an IntegerField drops
the constraint on the database.
"""
# Only applies to databases that support CHECK constraints
if not db.has_check_constraints:
return
# Make the test table
db.create_table("test_alterc", [
('num', models.PositiveIntegerField()),
])
db.execute_deferred_sql()
# Add in some test values
db.execute("INSERT INTO test_alterc (num) VALUES (1)")
db.execute("INSERT INTO test_alterc (num) VALUES (2)")
@ -326,21 +476,19 @@ class TestOperations(unittest.TestCase):
self.fail("Could insert a negative integer into a PositiveIntegerField.")
# Alter it to a normal IntegerField
db.alter_column("test_alterc", "num", models.IntegerField())
db.execute_deferred_sql()
# It should now work
db.execute("INSERT INTO test_alterc (num) VALUES (-3)")
db.delete_table("test_alterc")
# We need to match up for tearDown
db.start_transaction()
@skipIf(db.backend_name == "sqlite3", "SQLite backend doesn't support this "
"yet.")
def test_unique(self):
"""
Tests creating/deleting unique constraints.
"""
# SQLite backend doesn't support this yet.
if db.backend_name == "sqlite3":
return
db.create_table("test_unique2", [
('id', models.AutoField(primary_key=True)),
])
@ -349,24 +497,32 @@ class TestOperations(unittest.TestCase):
('eggs', models.IntegerField()),
('ham', models.ForeignKey(db.mock_model('Unique2', 'test_unique2'))),
])
db.execute_deferred_sql()
# Add a constraint
db.create_unique("test_unique", ["spam"])
db.execute_deferred_sql()
# Shouldn't do anything during dry-run
db.dry_run = True
db.delete_unique("test_unique", ["spam"])
db.dry_run = False
db.delete_unique("test_unique", ["spam"])
db.create_unique("test_unique", ["spam"])
db.commit_transaction()
db.start_transaction()
# Test it works
# Special preparations for Sql Server
if db.backend_name == "pyodbc":
db.execute("SET IDENTITY_INSERT test_unique2 ON;")
db.execute("INSERT INTO test_unique2 (id) VALUES (1)")
db.execute("INSERT INTO test_unique2 (id) VALUES (2)")
db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 0, 1)")
db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (false, 1, 2)")
db.commit_transaction()
db.start_transaction()
# Test it works
TRUE = (True,)
FALSE = (False,)
db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE)
db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 2)", FALSE)
try:
db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 2, 1)")
db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 2, 1)", FALSE)
except:
db.rollback_transaction()
else:
@ -379,10 +535,10 @@ class TestOperations(unittest.TestCase):
db.start_transaction()
# Test similarly
db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 0, 1)")
db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (false, 1, 2)")
db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE)
db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 2)", FALSE)
try:
db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 1, 1)")
db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 1)", TRUE)
except:
db.rollback_transaction()
else:
@ -394,10 +550,10 @@ class TestOperations(unittest.TestCase):
db.create_unique("test_unique", ["spam", "eggs", "ham_id"])
db.start_transaction()
# Test similarly
db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 0, 1)")
db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (false, 1, 1)")
db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE)
db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 1)", FALSE)
try:
db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 0, 1)")
db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE)
except:
db.rollback_transaction()
else:
@ -405,9 +561,106 @@ class TestOperations(unittest.TestCase):
db.delete_unique("test_unique", ["spam", "eggs", "ham_id"])
db.start_transaction()
def test_alter_unique(self):
"""
Tests that unique constraints are not affected when
altering columns (that's handled by create_/delete_unique)
"""
db.create_table("test_alter_unique", [
('spam', models.IntegerField()),
('eggs', models.IntegerField(unique=True)),
])
db.execute_deferred_sql()
# Make sure the unique constraint is created
db.execute('INSERT INTO test_alter_unique (spam, eggs) VALUES (0, 42)')
db.commit_transaction()
db.start_transaction()
try:
db.execute("INSERT INTO test_alter_unique (spam, eggs) VALUES (1, 42)")
except:
pass
else:
self.fail("Could insert the same integer twice into a unique field.")
db.rollback_transaction()
# Alter without unique=True (should not affect anything)
db.alter_column("test_alter_unique", "eggs", models.IntegerField())
# Insertion should still fail
db.start_transaction()
try:
db.execute("INSERT INTO test_alter_unique (spam, eggs) VALUES (1, 42)")
except:
pass
else:
self.fail("Could insert the same integer twice into a unique field after alter_column with unique=False.")
db.rollback_transaction()
# Delete the unique index/constraint
if db.backend_name != "sqlite3":
db.delete_unique("test_alter_unique", ["eggs"])
db.delete_table("test_alter_unique")
db.start_transaction()
# Test multi-field constraint
db.create_table("test_alter_unique2", [
('spam', models.IntegerField()),
('eggs', models.IntegerField()),
])
db.create_unique('test_alter_unique2', ('spam', 'eggs'))
db.execute_deferred_sql()
db.execute('INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 42)')
db.commit_transaction()
# Verify that constraint works
db.start_transaction()
try:
db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (1, 42)")
except:
self.fail("Looks like multi-field unique constraint applied to only one field.")
db.rollback_transaction()
db.start_transaction()
try:
db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 43)")
except:
self.fail("Looks like multi-field unique constraint applied to only one field.")
db.rollback_transaction()
db.start_transaction()
try:
db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 42)")
except:
pass
else:
self.fail("Could insert the same pair twice into unique-together fields.")
db.rollback_transaction()
# Altering one column should not drop or modify multi-column constraint
db.alter_column("test_alter_unique2", "eggs", models.PositiveIntegerField())
db.start_transaction()
try:
db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (1, 42)")
except:
self.fail("Altering one column broken multi-column unique constraint.")
db.rollback_transaction()
db.start_transaction()
try:
db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 43)")
except:
self.fail("Altering one column broken multi-column unique constraint.")
db.rollback_transaction()
db.start_transaction()
try:
db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 42)")
except:
pass
else:
self.fail("Could insert the same pair twice into unique-together fields after alter_column with unique=False.")
db.rollback_transaction()
db.delete_table("test_alter_unique2")
db.start_transaction()
def test_capitalised_constraints(self):
"""
Under PostgreSQL at least, capitalised constrains must be quoted.
Under PostgreSQL at least, capitalised constraints must be quoted.
"""
db.create_table("test_capconst", [
('SOMECOL', models.PositiveIntegerField(primary_key=True)),
@ -422,7 +675,85 @@ class TestOperations(unittest.TestCase):
db.create_table("test_textdef", [
('textcol', models.TextField(blank=True)),
])
def test_text_to_char(self):
"""
On Oracle, you can't simply ALTER TABLE MODIFY a textfield to a charfield
"""
value = "kawabanga"
db.create_table("test_text_to_char", [
('textcol', models.TextField()),
])
db.execute_deferred_sql()
db.execute("INSERT INTO test_text_to_char VALUES (%s)", [value])
db.alter_column("test_text_to_char", "textcol", models.CharField(max_length=100))
db.execute_deferred_sql()
after = db.execute("select * from test_text_to_char")[0][0]
self.assertEqual(value, after, "Change from text to char altered value [ %r != %r ]" % (value, after))
def test_char_to_text(self):
"""
On Oracle, you can't simply ALTER TABLE MODIFY a charfield to a textfield either
"""
value = "agnabawak"
db.create_table("test_char_to_text", [
('textcol', models.CharField(max_length=100)),
])
db.execute_deferred_sql()
db.execute("INSERT INTO test_char_to_text VALUES (%s)", [value])
db.alter_column("test_char_to_text", "textcol", models.TextField())
db.execute_deferred_sql()
after = db.execute("select * from test_char_to_text")[0][0]
after = text_type(after) # Oracle text fields return a sort of lazy string -- force evaluation
self.assertEqual(value, after, "Change from char to text altered value [ %r != %r ]" % (value, after))
@skipUnless(db.raises_default_errors, 'This database does not raise errors on missing defaults.')
def test_datetime_default(self):
"""
Test that defaults are correctly not created for datetime columns
"""
end_of_world = datetime.datetime(2012, 12, 21, 0, 0, 1)
try:
from django.utils import timezone
except ImportError:
pass
else:
from django.conf import settings
if getattr(settings, 'USE_TZ', False):
end_of_world = end_of_world.replace(tzinfo=timezone.utc)
db.create_table("test_datetime_def", [
('col0', models.IntegerField(null=True)),
('col1', models.DateTimeField(default=end_of_world)),
('col2', models.DateTimeField(null=True)),
])
db.execute_deferred_sql()
# insert a row
db.execute("INSERT INTO test_datetime_def (col0, col1, col2) values (null,%s,null)", [end_of_world])
db.alter_column("test_datetime_def", "col2", models.DateTimeField(default=end_of_world))
db.add_column("test_datetime_def", "col3", models.DateTimeField(default=end_of_world))
db.execute_deferred_sql()
db.commit_transaction()
# In the single existing row, we now expect col1=col2=col3=end_of_world...
db.start_transaction()
ends = db.execute("select col1,col2,col3 from test_datetime_def")[0]
self.failUnlessEqual(len(ends), 3)
for e in ends:
self.failUnlessEqual(e, end_of_world)
db.commit_transaction()
# ...but there should not be a default in the database for col1 or col3
for cols in ["col1,col2", "col2,col3"]:
db.start_transaction()
statement = "insert into test_datetime_def (col0,%s) values (null,%%s,%%s)" % cols
self.assertRaises(
IntegrityError,
db.execute, statement, [end_of_world, end_of_world]
)
db.rollback_transaction()
db.start_transaction() # To preserve the sanity and semantics of this test class
def test_add_unique_fk(self):
"""
Test adding a ForeignKey with unique=True or a OneToOneField
@ -431,7 +762,299 @@ class TestOperations(unittest.TestCase):
('spam', models.BooleanField(default=False))
])
db.add_column("test_add_unique_fk", "mock1", models.ForeignKey(db.mock_model('Mock', 'mock'), null=True, unique=True))
db.add_column("test_add_unique_fk", "mock2", models.OneToOneField(db.mock_model('Mock', 'mock'), null=True))
db.add_column("test_add_unique_fk", "mock1", models.ForeignKey(db.mock_model('User', 'auth_user'), null=True, unique=True))
db.add_column("test_add_unique_fk", "mock2", models.OneToOneField(db.mock_model('User', 'auth_user'), null=True))
db.execute_deferred_sql()
db.delete_table("test_add_unique_fk")
@skipUnless(db.has_check_constraints, 'Only applies to databases that '
'support CHECK constraints.')
def test_column_constraint(self):
"""
Tests that the value constraint of PositiveIntegerField is enforced on
the database level.
"""
db.create_table("test_column_constraint", [
('spam', models.PositiveIntegerField()),
])
db.execute_deferred_sql()
# Make sure we can't insert negative values
db.commit_transaction()
db.start_transaction()
try:
db.execute("INSERT INTO test_column_constraint VALUES (-42)")
except:
pass
else:
self.fail("Could insert a negative value into a PositiveIntegerField.")
db.rollback_transaction()
# remove constraint
db.alter_column("test_column_constraint", "spam", models.IntegerField())
db.execute_deferred_sql()
# make sure the insertion works now
db.execute('INSERT INTO test_column_constraint VALUES (-42)')
db.execute('DELETE FROM test_column_constraint')
# add it back again
db.alter_column("test_column_constraint", "spam", models.PositiveIntegerField())
db.execute_deferred_sql()
# it should fail again
db.start_transaction()
try:
db.execute("INSERT INTO test_column_constraint VALUES (-42)")
except:
pass
else:
self.fail("Could insert a negative value after changing an IntegerField to a PositiveIntegerField.")
db.rollback_transaction()
db.delete_table("test_column_constraint")
db.start_transaction()
def test_sql_defaults(self):
"""
Test that sql default value is correct for non-string field types.
Datetimes are handled in test_datetime_default.
"""
class CustomField(with_metaclass(models.SubfieldBase, models.CharField)):
description = 'CustomField'
def get_default(self):
if self.has_default():
if callable(self.default):
return self.default()
return self.default
return super(CustomField, self).get_default()
def get_prep_value(self, value):
if not value:
return value
return ','.join(map(str, value))
def to_python(self, value):
if not value or isinstance(value, list):
return value
return list(map(int, value.split(',')))
false_value = db.has_booleans and 'False' or '0'
defaults = (
(models.CharField(default='sukasuka'), 'DEFAULT \'sukasuka'),
(models.BooleanField(default=False), 'DEFAULT %s' % false_value),
(models.IntegerField(default=42), 'DEFAULT 42'),
(CustomField(default=[2012, 2018, 2021, 2036]), 'DEFAULT \'2012,2018,2021,2036')
)
for field, sql_test_str in defaults:
sql = db.column_sql('fish', 'YAAAAAAZ', field)
if sql_test_str not in sql:
self.fail("default sql value was not properly generated for field %r.\nSql was %s" % (field, sql))
def test_make_added_foreign_key_not_null(self):
# Table for FK to target
User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
# Table with no foreign key
db.create_table("test_fk", [
('eggs', models.IntegerField()),
])
db.execute_deferred_sql()
# Add foreign key
db.add_column("test_fk", 'foreik', models.ForeignKey(User, null=True))
db.execute_deferred_sql()
# Make the FK not null
db.alter_column("test_fk", "foreik_id", models.ForeignKey(User))
db.execute_deferred_sql()
def test_make_foreign_key_null(self):
# Table for FK to target
User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
# Table with no foreign key
db.create_table("test_make_fk_null", [
('eggs', models.IntegerField()),
('foreik', models.ForeignKey(User))
])
db.execute_deferred_sql()
# Make the FK null
db.alter_column("test_make_fk_null", "foreik_id", models.ForeignKey(User, null=True))
db.execute_deferred_sql()
def test_change_foreign_key_target(self):
# Tables for FK to target
User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
db.create_table("test_fk_changed_target", [
('eggs', models.IntegerField(primary_key=True)),
])
Egg = db.mock_model(model_name='Egg', db_table='test_fk_changed_target', db_tablespace='', pk_field_name='eggs', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
# Table with a foreign key to the wrong table
db.create_table("test_fk_changing", [
('egg', models.ForeignKey(User, null=True)),
])
db.execute_deferred_sql()
# Change foreign key pointing
db.alter_column("test_fk_changing", "egg_id", models.ForeignKey(Egg, null=True))
db.execute_deferred_sql()
# Test that it is pointing at the right table now
try:
non_user_id = db.execute("SELECT MAX(id) FROM auth_user")[0][0] + 1
except (TypeError, IndexError):
# Got a "None" or no records, treat as 0
non_user_id = 17
db.execute("INSERT INTO test_fk_changed_target (eggs) VALUES (%s)", [non_user_id])
db.execute("INSERT INTO test_fk_changing (egg_id) VALUES (%s)", [non_user_id])
db.commit_transaction()
db.start_transaction() # The test framework expects tests to end in transaction
def test_alter_double_indexed_column(self):
# Table for FK to target
User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
# Table with no foreign key
db.create_table("test_2indexed", [
('eggs', models.IntegerField()),
('foreik', models.ForeignKey(User))
])
db.create_unique("test_2indexed", ["eggs", "foreik_id"])
db.execute_deferred_sql()
# Make the FK null
db.alter_column("test_2indexed", "foreik_id", models.ForeignKey(User, null=True))
db.execute_deferred_sql()
class TestCacheGeneric(unittest.TestCase):
base_ops_cls = generic.DatabaseOperations
def setUp(self):
class CacheOps(self.base_ops_cls):
def __init__(self):
self._constraint_cache = {}
self.cache_filled = 0
self.settings = {'NAME': 'db'}
def _fill_constraint_cache(self, db, table):
self.cache_filled += 1
self._constraint_cache.setdefault(db, {})
self._constraint_cache[db].setdefault(table, {})
@generic.invalidate_table_constraints
def clear_con(self, table):
pass
@generic.copy_column_constraints
def cp_column(self, table, column_old, column_new):
pass
@generic.delete_column_constraints
def rm_column(self, table, column):
pass
@generic.copy_column_constraints
@generic.delete_column_constraints
def mv_column(self, table, column_old, column_new):
pass
def _get_setting(self, attr):
return self.settings[attr]
self.CacheOps = CacheOps
def test_cache(self):
ops = self.CacheOps()
self.assertEqual(0, ops.cache_filled)
self.assertFalse(ops.lookup_constraint('db', 'table'))
self.assertEqual(1, ops.cache_filled)
self.assertFalse(ops.lookup_constraint('db', 'table'))
self.assertEqual(1, ops.cache_filled)
ops.clear_con('table')
self.assertEqual(1, ops.cache_filled)
self.assertFalse(ops.lookup_constraint('db', 'table'))
self.assertEqual(2, ops.cache_filled)
self.assertFalse(ops.lookup_constraint('db', 'table', 'column'))
self.assertEqual(2, ops.cache_filled)
cache = ops._constraint_cache
cache['db']['table']['column'] = 'constraint'
self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column'))
self.assertEqual([('column', 'constraint')], ops.lookup_constraint('db', 'table'))
self.assertEqual(2, ops.cache_filled)
# invalidate_table_constraints
ops.clear_con('new_table')
self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column'))
self.assertEqual(2, ops.cache_filled)
self.assertFalse(ops.lookup_constraint('db', 'new_table'))
self.assertEqual(3, ops.cache_filled)
# delete_column_constraints
cache['db']['table']['column'] = 'constraint'
self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column'))
ops.rm_column('table', 'column')
self.assertEqual([], ops.lookup_constraint('db', 'table', 'column'))
self.assertEqual([], ops.lookup_constraint('db', 'table', 'noexist_column'))
# copy_column_constraints
cache['db']['table']['column'] = 'constraint'
self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column'))
ops.cp_column('table', 'column', 'column_new')
self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column_new'))
self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column'))
# copy + delete
cache['db']['table']['column'] = 'constraint'
self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column'))
ops.mv_column('table', 'column', 'column_new')
self.assertEqual('constraint', ops.lookup_constraint('db', 'table', 'column_new'))
self.assertEqual([], ops.lookup_constraint('db', 'table', 'column'))
def test_valid(self):
ops = self.CacheOps()
# none of these should vivify a table into a valid state
self.assertFalse(ops._is_valid_cache('db', 'table'))
self.assertFalse(ops._is_valid_cache('db', 'table'))
ops.clear_con('table')
self.assertFalse(ops._is_valid_cache('db', 'table'))
ops.rm_column('table', 'column')
self.assertFalse(ops._is_valid_cache('db', 'table'))
# these should change the cache state
ops.lookup_constraint('db', 'table')
self.assertTrue(ops._is_valid_cache('db', 'table'))
ops.lookup_constraint('db', 'table', 'column')
self.assertTrue(ops._is_valid_cache('db', 'table'))
ops.clear_con('table')
self.assertFalse(ops._is_valid_cache('db', 'table'))
def test_valid_implementation(self):
# generic fills the cache on a per-table basis
ops = self.CacheOps()
self.assertFalse(ops._is_valid_cache('db', 'table'))
self.assertFalse(ops._is_valid_cache('db', 'other_table'))
ops.lookup_constraint('db', 'table')
self.assertTrue(ops._is_valid_cache('db', 'table'))
self.assertFalse(ops._is_valid_cache('db', 'other_table'))
ops.lookup_constraint('db', 'other_table')
self.assertTrue(ops._is_valid_cache('db', 'table'))
self.assertTrue(ops._is_valid_cache('db', 'other_table'))
ops.clear_con('table')
self.assertFalse(ops._is_valid_cache('db', 'table'))
self.assertTrue(ops._is_valid_cache('db', 'other_table'))
if mysql:
class TestCacheMysql(TestCacheGeneric):
base_ops_cls = mysql.DatabaseOperations
def test_valid_implementation(self):
# mysql fills the cache on a per-db basis
ops = self.CacheOps()
self.assertFalse(ops._is_valid_cache('db', 'table'))
self.assertFalse(ops._is_valid_cache('db', 'other_table'))
ops.lookup_constraint('db', 'table')
self.assertTrue(ops._is_valid_cache('db', 'table'))
self.assertTrue(ops._is_valid_cache('db', 'other_table'))
ops.lookup_constraint('db', 'other_table')
self.assertTrue(ops._is_valid_cache('db', 'table'))
self.assertTrue(ops._is_valid_cache('db', 'other_table'))
ops.clear_con('table')
self.assertFalse(ops._is_valid_cache('db', 'table'))
self.assertTrue(ops._is_valid_cache('db', 'other_table'))

View file

@ -0,0 +1,39 @@
from django.db import models
from south.db import db
from south.tests import unittest, skipUnless
class FirebirdTests(unittest.TestCase):
"""
Tests firebird related issues
"""
def setUp(self):
print('=' * 80)
print('Begin Firebird test')
def tearDown(self):
print('End Firebird test')
print('=' * 80)
@skipUnless(db.backend_name == "firebird", "Firebird-only test")
def test_firebird_double_index_creation_1317(self):
"""
Tests foreign key creation, especially uppercase (see #61)
"""
Test = db.mock_model(model_name='Test',
db_table='test5a',
db_tablespace='',
pk_field_name='ID',
pk_field_type=models.AutoField,
pk_field_args=[]
)
db.create_table("test5a", [('ID', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True))])
db.create_table("test5b", [
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('UNIQUE', models.ForeignKey(Test)),
])
db.execute_deferred_sql()

164
south/tests/db_mysql.py Normal file
View file

@ -0,0 +1,164 @@
# Additional MySQL-specific tests
# Written by: F. Gabriel Gosselin <gabrielNOSPAM@evidens.ca>
# Based on tests by: aarranz
from south.tests import unittest, skipUnless
from south.db import db, generic, mysql
from django.db import connection, models
from south.utils.py3 import with_metaclass
# A class decoration may be used in lieu of this when Python 2.5 is the
# minimum.
class TestMySQLOperationsMeta(type):
def __new__(mcs, name, bases, dict_):
decorator = skipUnless(db.backend_name == "mysql", 'MySQL-specific tests')
for key, method in dict_.items():
if key.startswith('test'):
dict_[key] = decorator(method)
return type.__new__(mcs, name, bases, dict_)
class TestMySQLOperations(with_metaclass(TestMySQLOperationsMeta, unittest.TestCase)):
"""MySQL-specific tests"""
def setUp(self):
db.debug = False
db.clear_deferred_sql()
def tearDown(self):
pass
def _create_foreign_tables(self, main_name, reference_name):
# Create foreign table and model
Foreign = db.mock_model(model_name='Foreign', db_table=reference_name,
db_tablespace='', pk_field_name='id',
pk_field_type=models.AutoField,
pk_field_args=[])
db.create_table(reference_name, [
('id', models.AutoField(primary_key=True)),
])
# Create table with foreign key
db.create_table(main_name, [
('id', models.AutoField(primary_key=True)),
('foreign', models.ForeignKey(Foreign)),
])
return Foreign
def test_constraint_references(self):
"""Tests that referred table is reported accurately"""
main_table = 'test_cns_ref'
reference_table = 'test_cr_foreign'
db.start_transaction()
self._create_foreign_tables(main_table, reference_table)
db.execute_deferred_sql()
constraint = db._find_foreign_constraints(main_table, 'foreign_id')[0]
references = db._lookup_constraint_references(main_table, constraint)
self.assertEquals((reference_table, 'id'), references)
db.delete_table(main_table)
db.delete_table(reference_table)
def test_reverse_column_constraint(self):
"""Tests that referred column in a foreign key (ex. id) is found"""
main_table = 'test_reverse_ref'
reference_table = 'test_rr_foreign'
db.start_transaction()
self._create_foreign_tables(main_table, reference_table)
db.execute_deferred_sql()
inverse = db._lookup_reverse_constraint(reference_table, 'id')
(cname, rev_table, rev_column) = inverse[0]
self.assertEquals(main_table, rev_table)
self.assertEquals('foreign_id', rev_column)
db.delete_table(main_table)
db.delete_table(reference_table)
def test_delete_fk_column(self):
main_table = 'test_drop_foreign'
ref_table = 'test_df_ref'
self._create_foreign_tables(main_table, ref_table)
db.execute_deferred_sql()
constraints = db._find_foreign_constraints(main_table, 'foreign_id')
self.assertEquals(len(constraints), 1)
db.delete_column(main_table, 'foreign_id')
constraints = db._find_foreign_constraints(main_table, 'foreign_id')
self.assertEquals(len(constraints), 0)
db.delete_table(main_table)
db.delete_table(ref_table)
def test_rename_fk_column(self):
main_table = 'test_rename_foreign'
ref_table = 'test_rf_ref'
self._create_foreign_tables(main_table, ref_table)
db.execute_deferred_sql()
constraints = db._find_foreign_constraints(main_table, 'foreign_id')
self.assertEquals(len(constraints), 1)
db.rename_column(main_table, 'foreign_id', 'reference_id')
db.execute_deferred_sql() #Create constraints
constraints = db._find_foreign_constraints(main_table, 'reference_id')
self.assertEquals(len(constraints), 1)
db.delete_table(main_table)
db.delete_table(ref_table)
def test_rename_fk_inbound(self):
"""
Tests that the column referred to by an external column can be renamed.
Edge case, but also useful as stepping stone to renaming tables.
"""
main_table = 'test_rename_fk_inbound'
ref_table = 'test_rfi_ref'
self._create_foreign_tables(main_table, ref_table)
db.execute_deferred_sql()
constraints = db._lookup_reverse_constraint(ref_table, 'id')
self.assertEquals(len(constraints), 1)
db.rename_column(ref_table, 'id', 'rfi_id')
db.execute_deferred_sql() #Create constraints
constraints = db._lookup_reverse_constraint(ref_table, 'rfi_id')
self.assertEquals(len(constraints), 1)
cname = db._find_foreign_constraints(main_table, 'foreign_id')[0]
(rtable, rcolumn) = db._lookup_constraint_references(main_table, cname)
self.assertEquals(rcolumn, 'rfi_id')
db.delete_table(main_table)
db.delete_table(ref_table)
def test_rename_constrained_table(self):
"""Renames a table with a foreign key column (towards another table)"""
main_table = 'test_rn_table'
ref_table = 'test_rt_ref'
renamed_table = 'test_renamed_table'
self._create_foreign_tables(main_table, ref_table)
db.execute_deferred_sql()
constraints = db._find_foreign_constraints(main_table, 'foreign_id')
self.assertEquals(len(constraints), 1)
db.rename_table(main_table, renamed_table)
db.execute_deferred_sql() #Create constraints
constraints = db._find_foreign_constraints(renamed_table, 'foreign_id')
self.assertEquals(len(constraints), 1)
(rtable, rcolumn) = db._lookup_constraint_references(
renamed_table, constraints[0])
self.assertEquals(rcolumn, 'id')
db.delete_table(renamed_table)
db.delete_table(ref_table)
def test_renamed_referenced_table(self):
"""Rename a table referred to in a foreign key"""
main_table = 'test_rn_refd_table'
ref_table = 'test_rrt_ref'
renamed_table = 'test_renamed_ref'
self._create_foreign_tables(main_table, ref_table)
db.execute_deferred_sql()
constraints = db._lookup_reverse_constraint(ref_table)
self.assertEquals(len(constraints), 1)
db.rename_table(ref_table, renamed_table)
db.execute_deferred_sql() #Create constraints
constraints = db._find_foreign_constraints(main_table, 'foreign_id')
self.assertEquals(len(constraints), 1)
(rtable, rcolumn) = db._lookup_constraint_references(
main_table, constraints[0])
self.assertEquals(renamed_table, rtable)
db.delete_table(main_table)
db.delete_table(renamed_table)

View file

@ -1 +0,0 @@
/*.pyc

View file

@ -1 +0,0 @@
/*.pyc

View file

@ -5,11 +5,11 @@ class Migration:
def forwards(self):
db.alter_column("southtest_spam", 'name', models.CharField(max_length=255, null=True))
db.alter_column("southtest_spam", 'weight', models.FloatField(null=True))
def backwards(self):
db.alter_column("southtest_spam", 'name', models.CharField(max_length=255))
db.alter_column("southtest_spam", 'weight', models.FloatField())
models = {
"fakeapp.bug135": {

View file

@ -3,17 +3,31 @@
from django.db import models
from django.contrib.auth.models import User as UserAlias
from south.modelsinspector import add_introspection_rules
on_delete_is_available = hasattr(models, "PROTECT") # models here is django.db.models
def default_func():
return "yays"
# An empty case.
class Other1(models.Model): pass
# Another one
class Other3(models.Model): pass
def get_sentinel_object():
"""
A function to return the object to be used in place of any deleted object,
when using the SET option for on_delete.
"""
# Create a new one, so we always have an instance to test with. Can't work!
return Other3()
# Nastiness.
class HorribleModel(models.Model):
"A model to test the edge cases of model parsing"
ZERO, ONE = range(2)
ZERO, ONE = 0, 1
# First, some nice fields
name = models.CharField(max_length=255)
@ -24,6 +38,16 @@ class HorribleModel(models.Model):
o1 = models.ForeignKey(Other1)
o2 = models.ForeignKey('Other2')
if on_delete_is_available:
o_set_null_on_delete = models.ForeignKey('Other3', null=True, on_delete=models.SET_NULL)
o_cascade_delete = models.ForeignKey('Other3', null=True, on_delete=models.CASCADE, related_name="cascademe")
o_protect = models.ForeignKey('Other3', null=True, on_delete=models.PROTECT, related_name="dontcascademe")
o_default_on_delete = models.ForeignKey('Other3', null=True, default=1, on_delete=models.SET_DEFAULT, related_name="setmedefault")
o_set_on_delete_function = models.ForeignKey('Other3', null=True, default=1, on_delete=models.SET(get_sentinel_object), related_name="setsentinel")
o_set_on_delete_value = models.ForeignKey('Other3', null=True, default=1, on_delete=models.SET(get_sentinel_object()), related_name="setsentinelwithactualvalue") # dubious case
o_no_action_on_delete = models.ForeignKey('Other3', null=True, default=1, on_delete=models.DO_NOTHING, related_name="deletemeatyourperil")
# Now to something outside
user = models.ForeignKey(UserAlias, related_name="horribles")
@ -48,8 +72,40 @@ and love the bomb"""
multiline = \
models.TextField(
)
# Special case.
class Other2(models.Model):
# Try loading a field without a newline after it (inspect hates this)
close_but_no_cigar = models.PositiveIntegerField(primary_key=True)
close_but_no_cigar = models.PositiveIntegerField(primary_key=True)
class CustomField(models.IntegerField):
def __init__(self, an_other_model, **kwargs):
super(CustomField, self).__init__(**kwargs)
self.an_other_model = an_other_model
add_introspection_rules([
(
[CustomField],
[],
{'an_other_model': ('an_other_model', {})},
),
], ['^south\.tests\.fakeapp\.models\.CustomField'])
class BaseModel(models.Model):
pass
class SubModel(BaseModel):
others = models.ManyToManyField(Other1)
custom = CustomField(Other2)
class CircularA(models.Model):
c = models.ForeignKey('CircularC')
class CircularB(models.Model):
a = models.ForeignKey(CircularA)
class CircularC(models.Model):
b = models.ForeignKey(CircularB)
class Recursive(models.Model):
self = models.ForeignKey('self')

15
south/tests/freezer.py Normal file
View file

@ -0,0 +1,15 @@
from south.tests import unittest
from south.creator.freezer import model_dependencies
from south.tests.fakeapp import models
class TestFreezer(unittest.TestCase):
def test_dependencies(self):
self.assertEqual(set(model_dependencies(models.SubModel)),
set([models.BaseModel, models.Other1, models.Other2]))
self.assertEqual(set(model_dependencies(models.CircularA)),
set([models.CircularA, models.CircularB, models.CircularC]))
self.assertEqual(set(model_dependencies(models.Recursive)),
set([models.Recursive]))

View file

@ -1,8 +1,13 @@
import unittest
from south.tests import Monkeypatcher
from south.modelsinspector import *
from fakeapp.models import HorribleModel
from south.tests import Monkeypatcher, skipUnless
from south.modelsinspector import (convert_on_delete_handler, get_value,
IsDefault, models, value_clean)
from fakeapp.models import HorribleModel, get_sentinel_object
on_delete_is_available = hasattr(models, "PROTECT") # models here is django.db.models
skipUnlessOnDeleteAvailable = skipUnless(on_delete_is_available, "not testing on_delete -- not available on Django<1.3")
class TestModelInspector(Monkeypatcher):
@ -46,4 +51,59 @@ class TestModelInspector(Monkeypatcher):
slug,
["unique", {"default": True}],
)
@skipUnlessOnDeleteAvailable
def test_get_value_on_delete(self):
# First validate the FK fields with on_delete options
o_set_null_on_delete = HorribleModel._meta.get_field_by_name("o_set_null_on_delete")[0]
o_cascade_delete = HorribleModel._meta.get_field_by_name("o_cascade_delete")[0]
o_protect = HorribleModel._meta.get_field_by_name("o_protect")[0]
o_default_on_delete = HorribleModel._meta.get_field_by_name("o_default_on_delete")[0]
o_set_on_delete_function = HorribleModel._meta.get_field_by_name("o_set_on_delete_function")[0]
o_set_on_delete_value = HorribleModel._meta.get_field_by_name("o_set_on_delete_value")[0]
o_no_action_on_delete = HorribleModel._meta.get_field_by_name("o_no_action_on_delete")[0]
# TODO this is repeated from the introspection_details in modelsinspector:
# better to refactor that so we can reference these settings, in case they
# must change at some point.
on_delete = ["rel.on_delete", {"default": models.CASCADE, "is_django_function": True, "converter": convert_on_delete_handler, }]
# Foreign Key cascade update/delete
self.assertRaises(
IsDefault,
get_value,
o_cascade_delete,
on_delete,
)
self.assertEqual(
get_value(o_protect, on_delete),
"models.PROTECT",
)
self.assertEqual(
get_value(o_no_action_on_delete, on_delete),
"models.DO_NOTHING",
)
self.assertEqual(
get_value(o_set_null_on_delete, on_delete),
"models.SET_NULL",
)
self.assertEqual(
get_value(o_default_on_delete, on_delete),
"models.SET_DEFAULT",
)
# For now o_set_on_delete raises, see modelsinspector.py
#self.assertEqual(
# get_value(o_set_on_delete_function, on_delete),
# "models.SET(get_sentinel_object)",
#)
self.assertRaises(
ValueError,
get_value,
o_set_on_delete_function,
on_delete,
)
self.assertEqual(
get_value(o_set_on_delete_value, on_delete),
"models.SET(%s)" % value_clean(get_sentinel_object()),
)

View file

@ -1,6 +1,9 @@
import io
import logging
import os
import unittest
import tempfile
from south.tests import unittest
import sys
from django.conf import settings
from django.db import connection, models
@ -20,13 +23,31 @@ class TestLogger(unittest.TestCase):
self.test_path = tempfile.mkstemp(suffix=".south.log")[1]
def test_db_execute_logging_nofile(self):
"Does logging degrade nicely if SOUTH_DEBUG_ON not set?"
"Does logging degrade nicely if SOUTH_LOGGING_ON not set?"
settings.SOUTH_LOGGING_ON = False # this needs to be set to False
# to avoid issues where other tests
# set this to True. settings is shared
# between these tests.
db.create_table("test9", [('email_confirmed', models.BooleanField(default=False))])
def test_db_execute_logging_off_with_basic_config(self):
"""
Does the south logger avoid outputing debug information with
south logging turned off and python logging configured with
a basic config?"
"""
settings.SOUTH_LOGGING_ON = False
# Set root logger to capture WARNING and worse
logging_stream = io.StringIO()
logging.basicConfig(stream=logging_stream, level=logging.WARNING)
db.create_table("test12", [('email_confirmed', models.BooleanField(default=False))])
# since south logging is off, and our root logger is at WARNING
# we should not find DEBUG info in the log
self.assertEqual(logging_stream.getvalue(), '')
def test_db_execute_logging_validfile(self):
"Does logging work when passing in a valid file?"
settings.SOUTH_LOGGING_ON = True
@ -59,5 +80,3 @@ class TestLogger(unittest.TestCase):
"test11",
[('email_confirmed', models.BooleanField(default=False))],
)

View file

@ -1,6 +1,11 @@
import unittest
from south.tests import unittest
import datetime
import sys
try:
set # builtin, python >=2.6
except NameError:
from sets import Set as set # in stdlib, python >=2.3
from south import exceptions
from south.migration import migrate_app
@ -507,18 +512,19 @@ class TestMigrationLogic(Monkeypatcher):
installed_apps = ["fakeapp", "otherfakeapp"]
def assertListEqual(self, list1, list2):
list1 = list(list1)
list2 = list(list2)
list1.sort()
list2.sort()
return self.assertEqual(list1, list2)
def setUp(self):
super(TestMigrationLogic, self).setUp()
MigrationHistory.objects.all().delete()
def assertListEqual(self, list1, list2, msg=None):
list1 = set(list1)
list2 = set(list2)
return self.assert_(list1 == list2, "%s is not equal to %s" % (list1, list2))
def test_find_ghost_migrations(self):
pass
def test_apply_migrations(self):
MigrationHistory.objects.all().delete()
migrations = Migrations("fakeapp")
# We should start with no migrations
@ -530,9 +536,9 @@ class TestMigrationLogic(Monkeypatcher):
# We should finish with all migrations
self.assertListEqual(
((u"fakeapp", u"0001_spam"),
(u"fakeapp", u"0002_eggs"),
(u"fakeapp", u"0003_alter_spam"),),
(("fakeapp", "0001_spam"),
("fakeapp", "0002_eggs"),
("fakeapp", "0003_alter_spam"),),
MigrationHistory.objects.values_list("app_name", "migration"),
)
@ -544,7 +550,6 @@ class TestMigrationLogic(Monkeypatcher):
def test_migration_merge_forwards(self):
MigrationHistory.objects.all().delete()
migrations = Migrations("fakeapp")
# We should start with no migrations
@ -557,7 +562,7 @@ class TestMigrationLogic(Monkeypatcher):
# Did it go in?
self.assertListEqual(
((u"fakeapp", u"0002_eggs"),),
(("fakeapp", "0002_eggs"),),
MigrationHistory.objects.values_list("app_name", "migration"),
)
@ -570,7 +575,7 @@ class TestMigrationLogic(Monkeypatcher):
migrations, target_name='zero', fake=False)
try:
migrate_app(migrations, target_name=None, fake=False)
except exceptions.InconsistentMigrationHistory, e:
except exceptions.InconsistentMigrationHistory as e:
self.assertEqual(
[
(
@ -582,7 +587,7 @@ class TestMigrationLogic(Monkeypatcher):
)
try:
migrate_app(migrations, target_name="zero", fake=False)
except exceptions.InconsistentMigrationHistory, e:
except exceptions.InconsistentMigrationHistory as e:
self.assertEqual(
[
(
@ -595,7 +600,7 @@ class TestMigrationLogic(Monkeypatcher):
# Nothing should have changed (no merge mode!)
self.assertListEqual(
((u"fakeapp", u"0002_eggs"),),
(("fakeapp", "0002_eggs"),),
MigrationHistory.objects.values_list("app_name", "migration"),
)
@ -604,9 +609,9 @@ class TestMigrationLogic(Monkeypatcher):
# We should finish with all migrations
self.assertListEqual(
((u"fakeapp", u"0001_spam"),
(u"fakeapp", u"0002_eggs"),
(u"fakeapp", u"0003_alter_spam"),),
(("fakeapp", "0001_spam"),
("fakeapp", "0002_eggs"),
("fakeapp", "0003_alter_spam"),),
MigrationHistory.objects.values_list("app_name", "migration"),
)
@ -620,7 +625,7 @@ class TestMigrationLogic(Monkeypatcher):
def test_alter_column_null(self):
def null_ok():
def null_ok(eat_exception=True):
from django.db import connection, transaction
# the DBAPI introspection module fails on postgres NULLs.
cursor = connection.cursor()
@ -628,14 +633,24 @@ class TestMigrationLogic(Monkeypatcher):
# SQLite has weird now()
if db.backend_name == "sqlite3":
now_func = "DATETIME('NOW')"
# So does SQLServer... should we be using a backend attribute?
elif db.backend_name == "pyodbc":
now_func = "GETDATE()"
elif db.backend_name == "oracle":
now_func = "SYSDATE"
else:
now_func = "NOW()"
try:
cursor.execute("INSERT INTO southtest_spam (id, weight, expires, name) VALUES (100, 10.1, %s, NULL);" % now_func)
if db.backend_name == "pyodbc":
cursor.execute("SET IDENTITY_INSERT southtest_spam ON;")
cursor.execute("INSERT INTO southtest_spam (id, weight, expires, name) VALUES (100, NULL, %s, 'whatever');" % now_func)
except:
transaction.rollback()
return False
if eat_exception:
transaction.rollback()
return False
else:
raise
else:
cursor.execute("DELETE FROM southtest_spam")
transaction.commit()
@ -648,27 +663,27 @@ class TestMigrationLogic(Monkeypatcher):
migrate_app(migrations, target_name="0002", fake=False)
self.failIf(null_ok())
self.assertListEqual(
((u"fakeapp", u"0001_spam"),
(u"fakeapp", u"0002_eggs"),),
(("fakeapp", "0001_spam"),
("fakeapp", "0002_eggs"),),
MigrationHistory.objects.values_list("app_name", "migration"),
)
# after 0003, it should be NULL
migrate_app(migrations, target_name="0003", fake=False)
self.assert_(null_ok())
self.assert_(null_ok(False))
self.assertListEqual(
((u"fakeapp", u"0001_spam"),
(u"fakeapp", u"0002_eggs"),
(u"fakeapp", u"0003_alter_spam"),),
(("fakeapp", "0001_spam"),
("fakeapp", "0002_eggs"),
("fakeapp", "0003_alter_spam"),),
MigrationHistory.objects.values_list("app_name", "migration"),
)
# make sure it is NOT NULL again
migrate_app(migrations, target_name="0002", fake=False)
self.failIf(null_ok(), 'name not null after migration')
self.failIf(null_ok(), 'weight not null after migration')
self.assertListEqual(
((u"fakeapp", u"0001_spam"),
(u"fakeapp", u"0002_eggs"),),
(("fakeapp", "0001_spam"),
("fakeapp", "0002_eggs"),),
MigrationHistory.objects.values_list("app_name", "migration"),
)
@ -799,7 +814,7 @@ class TestUtils(unittest.TestCase):
)
try:
depends(target, lambda n: graph[n])
except exceptions.CircularDependency, e:
except exceptions.CircularDependency as e:
self.assertEqual(trace, e.trace)
def test_depends_cycle(self):

View file

View file

@ -0,0 +1,16 @@
# -*- coding: UTF-8 -*-
"""
An app with a model that is not managed for testing that South does
not try to manage it in any way
"""
from django.db import models
class Legacy(models.Model):
name = models.CharField(max_length=10)
size = models.IntegerField()
class Meta:
db_table = "legacy_table"
managed = False

View file

@ -1 +0,0 @@
/*.pyc

View file

@ -1 +0,0 @@
/*.pyc

View file

@ -5,28 +5,28 @@ Generally helpful utility functions.
def _ask_for_it_by_name(name):
"Returns an object referenced by absolute path."
bits = name.split(".")
bits = str(name).split(".")
## what if there is no absolute reference?
if len(bits)>1:
if len(bits) > 1:
modulename = ".".join(bits[:-1])
else:
modulename=bits[0]
modulename = bits[0]
module = __import__(modulename, {}, {}, bits[-1])
if len(bits) == 1:
return module
else:
return getattr(module, bits[-1])
def ask_for_it_by_name(name):
def ask_for_it_by_name(name):
"Returns an object referenced by absolute path. (Memoised outer wrapper)"
if name not in ask_for_it_by_name.cache:
ask_for_it_by_name.cache[name] = _ask_for_it_by_name(name)
return ask_for_it_by_name.cache[name]
ask_for_it_by_name.cache = {}
if name not in ask_for_it_by_name.cache:
ask_for_it_by_name.cache[name] = _ask_for_it_by_name(name)
return ask_for_it_by_name.cache[name]
ask_for_it_by_name.cache = {}
def get_attribute(item, attribute):
@ -56,17 +56,17 @@ def memoize(function):
"Standard memoization decorator."
name = function.__name__
_name = '_' + name
def method(self):
if not hasattr(self, _name):
value = function(self)
setattr(self, _name, value)
return getattr(self, _name)
def invalidate():
if hasattr(method, _name):
delattr(method, _name)
method.__name__ = function.__name__
method.__doc__ = function.__doc__
method._invalidate = invalidate

View file

@ -0,0 +1,28 @@
from datetime import *
import django
from django.conf import settings
if django.VERSION[:2] >= (1, 4) and getattr(settings, 'USE_TZ', False):
from django.utils import timezone
from datetime import datetime as _datetime
class datetime(_datetime):
"""
A custom datetime.datetime class which acts as a compatibility
layer between South and Django 1.4's timezone aware datetime
instances.
It basically adds the default timezone (as configured in Django's
settings) automatically if no tzinfo is given.
"""
def __new__(cls, year, month, day,
hour=0, minute=0, second=0, microsecond=0, tzinfo=None):
dt = _datetime(year, month, day,
hour, minute, second, microsecond,
tzinfo=tzinfo)
if tzinfo is None:
default_timezone = timezone.get_default_timezone()
dt = timezone.make_aware(dt, default_timezone)
return dt

28
south/utils/py3.py Normal file
View file

@ -0,0 +1,28 @@
"""
Python 2 + 3 compatibility functions. This is a very small subset of six.
"""
import sys
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
text_type = str
raw_input = input
import io
StringIO = io.StringIO
else:
string_types = basestring,
text_type = unicode
raw_input = raw_input
import cStringIO
StringIO = cStringIO.StringIO
def with_metaclass(meta, base=object):
"""Create a base class with a metaclass."""
return meta("NewBase", (base,), {})

View file

@ -9,7 +9,9 @@ class BaseMigration(object):
def gf(self, field_name):
"Gets a field by absolute reference."
return ask_for_it_by_name(field_name)
field = ask_for_it_by_name(field_name)
field.model = FakeModel
return field
class SchemaMigration(BaseMigration):
pass
@ -17,3 +19,7 @@ class SchemaMigration(BaseMigration):
class DataMigration(BaseMigration):
# Data migrations shouldn't be dry-run
no_dry_run = True
class FakeModel(object):
"Fake model so error messages on fields don't explode"
pass