Wyświetlanie linków do Lektury.Gazeta.pl na stronach lektur i w katalogu.
--- /dev/null
+# -*- coding: utf-8 -*-
+from south.db import db
+from django.db import models
+
+
+class Migration:
+ def forwards(self):
+ db.add_column('catalogue_tag', 'gazeta_link', models.CharField(blank=True, max_length=240))
+ db.add_column('catalogue_book', 'gazeta_link', models.CharField(blank=True, max_length=240))
+
+ def backwards(self):
+ db.delete_column('catalogue_tag', 'gazeta_link')
+ db.delete_column('catalogue_book', 'gazeta_link')
user = models.ForeignKey(User, blank=True, null=True)
book_count = models.IntegerField(_('book count'), default=0, blank=False, null=False)
+ gazeta_link = models.CharField(blank=True, max_length=240)
def has_description(self):
return len(self.description) > 0
_short_html = models.TextField(_('short HTML'), editable=False)
parent_number = models.IntegerField(_('parent number'), default=0)
extra_info = JSONField(_('extra information'))
+ gazeta_link = models.CharField(blank=True, max_length=240)
# Formats
xml_file = models.FileField(_('XML file'), upload_to=book_upload_path('xml'), blank=True)
South - Useable migrations for Django apps
"""
-__version__ = "0.3"
-__authors__ = ["Andrew Godwin <andrew@aeracode.org>", "Andy McCurdy <andy@andymccurdy.com>"]
\ No newline at end of file
+__version__ = "0.4"
+__authors__ = ["Andrew Godwin <andrew@aeracode.org>", "Andy McCurdy <andy@andymccurdy.com>"]
+import datetime
from django.core.management.color import no_style
from django.db import connection, transaction, models
from django.db.backends.util import truncate_name
+from django.db.models.fields import NOT_PROVIDED
from django.dispatch import dispatcher
from django.conf import settings
+
+def alias(attrname):
+ """
+ Returns a function which calls 'attrname' - for function aliasing.
+ We can't just use foo = bar, as this breaks subclassing.
+ """
+ def func(self, *args, **kwds):
+ return getattr(self, attrname)(*args, **kwds)
+ return func
+
+
class DatabaseOperations(object):
"""
Some of this code comes from Django Evolution.
"""
+ # We assume the generic DB can handle DDL transactions. MySQL wil change this.
+ has_ddl_transactions = True
+
def __init__(self):
self.debug = False
self.deferred_sql = []
-
+ self.dry_run = False
+ self.pending_create_signals = []
def execute(self, sql, params=[]):
"""
cursor = connection.cursor()
if self.debug:
print " = %s" % sql, params
+
+ if self.dry_run:
+ return []
+
cursor.execute(sql, params)
try:
return cursor.fetchall()
except:
return []
-
-
+
+
def add_deferred_sql(self, sql):
"""
Add a SQL statement to the deferred list, that won't be executed until
this instance's execute_deferred_sql method is run.
"""
self.deferred_sql.append(sql)
-
-
+
+
def execute_deferred_sql(self):
"""
Executes all deferred SQL, resetting the deferred_sql list
"""
for sql in self.deferred_sql:
self.execute(sql)
-
+
self.deferred_sql = []
+ def clear_deferred_sql(self):
+ """
+ Resets the deferred_sql list to empty.
+ """
+ self.deferred_sql = []
+
+
+ def clear_run_data(self):
+ """
+ Resets variables to how they should be before a run. Used for dry runs.
+ """
+ self.clear_deferred_sql()
+ self.pending_create_signals = []
+
+
def create_table(self, table_name, fields):
"""
Creates the table 'table_name'. 'fields' is a tuple of fields,
django.db.models.fields.Field object
"""
qn = connection.ops.quote_name
+
+ # allow fields to be a dictionary
+ # removed for now - philosophical reasons (this is almost certainly not what you want)
+ #try:
+ # fields = fields.items()
+ #except AttributeError:
+ # pass
+
columns = [
self.column_sql(table_name, field_name, field)
for field_name, field in fields
]
-
+
self.execute('CREATE TABLE %s (%s);' % (qn(table_name), ', '.join([col for col in columns if col])))
-
- add_table = create_table # Alias for consistency's sake
+
+ add_table = alias('create_table') # Alias for consistency's sake
def rename_table(self, old_table_name, table_name):
qn = connection.ops.quote_name
params = (qn(table_name), )
self.execute('DROP TABLE %s;' % params)
-
- drop_table = delete_table
+
+ drop_table = alias('delete_table')
- def add_column(self, table_name, name, field):
+ def clear_table(self, table_name):
+ """
+ Deletes all rows from 'table_name'.
+ """
+ qn = connection.ops.quote_name
+ params = (qn(table_name), )
+ self.execute('DELETE FROM %s;' % params)
+
+ add_column_string = 'ALTER TABLE %s ADD COLUMN %s;'
+
+ def add_column(self, table_name, name, field, keep_default=True):
"""
Adds the column 'name' to the table 'table_name'.
Uses the 'field' paramater, a django.db.models.fields.Field instance,
to generate the necessary sql
-
+
@param table_name: The name of the table to add the column to
@param name: The name of the column to add
@param field: The field to use
qn(table_name),
sql,
)
- sql = 'ALTER TABLE %s ADD COLUMN %s;' % params
+ sql = self.add_column_string % params
self.execute(sql)
-
-
+
+ # Now, drop the default if we need to
+ if not keep_default and field.default:
+ field.default = NOT_PROVIDED
+ self.alter_column(table_name, name, field, explicit_name=False)
+
alter_string_set_type = 'ALTER COLUMN %(column)s TYPE %(type)s'
- alter_string_set_null = 'ALTER COLUMN %(column)s SET NOT NULL'
- alter_string_drop_null = 'ALTER COLUMN %(column)s DROP NOT NULL'
-
- def alter_column(self, table_name, name, field):
+ alter_string_set_null = 'ALTER COLUMN %(column)s DROP NOT NULL'
+ alter_string_drop_null = 'ALTER COLUMN %(column)s SET NOT NULL'
+ allows_combined_alters = True
+
+ def alter_column(self, table_name, name, field, explicit_name=True):
"""
Alters the given column name so it will match the given field.
Note that conversion between the two by the database must be possible.
-
+ Will not automatically add _id by default; to have this behavour, pass
+ explicit_name=False.
+
@param table_name: The name of the table to add the column to
@param name: The name of the column to alter
@param field: The new field definition to use
"""
-
+
# hook for the field to do any resolution prior to it's attributes being queried
if hasattr(field, 'south_init'):
field.south_init()
-
+
qn = connection.ops.quote_name
+ # Add _id or whatever if we need to
+ if not explicit_name:
+ field.set_attributes_from_name(name)
+ name = field.column
+
# First, change the type
params = {
"column": qn(name),
"type": field.db_type(),
}
- sqls = [self.alter_string_set_type % params]
-
-
+
+ # SQLs is a list of (SQL, values) pairs.
+ sqls = [(self.alter_string_set_type % params, [])]
+
# Next, set any default
- params = (
- qn(name),
- )
-
if not field.null and field.has_default():
default = field.get_default()
- if isinstance(default, basestring):
- default = "'%s'" % default
- params += ("SET DEFAULT %s",)
+ sqls.append(('ALTER COLUMN %s SET DEFAULT %%s ' % (qn(name),), [default]))
else:
- params += ("DROP DEFAULT",)
-
- sqls.append('ALTER COLUMN %s %s ' % params)
-
-
+ sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (qn(name),), []))
+
+
# Next, nullity
params = {
"column": qn(name),
"type": field.db_type(),
}
if field.null:
- sqls.append(self.alter_string_drop_null % params)
+ sqls.append((self.alter_string_set_null % params, []))
else:
- sqls.append(self.alter_string_set_null % params)
-
-
+ sqls.append((self.alter_string_drop_null % params, []))
+
+
# TODO: Unique
-
- self.execute("ALTER TABLE %s %s;" % (qn(table_name), ", ".join(sqls)))
+
+ if self.allows_combined_alters:
+ sqls, values = zip(*sqls)
+ self.execute(
+ "ALTER TABLE %s %s;" % (qn(table_name), ", ".join(sqls)),
+ flatten(values),
+ )
+ else:
+ # Databases like e.g. MySQL don't like more than one alter at once.
+ for sql, values in sqls:
+ self.execute("ALTER TABLE %s %s;" % (qn(table_name), sql), values)
def column_sql(self, table_name, field_name, field, tablespace=''):
Creates the SQL snippet for a column. Used by add_column and add_table.
"""
qn = connection.ops.quote_name
-
+
field.set_attributes_from_name(field_name)
-
+
# hook for the field to do any resolution prior to it's attributes being queried
if hasattr(field, 'south_init'):
field.south_init()
-
+
sql = field.db_type()
if sql:
field_output = [qn(field.column), sql]
if field.primary_key:
field_output.append('PRIMARY KEY')
elif field.unique:
- field_output.append('UNIQUE')
-
+ # Instead of using UNIQUE, add a unique index with a predictable name
+ self.add_deferred_sql(
+ self.create_index_sql(
+ table_name,
+ [field.column],
+ unique = True,
+ db_tablespace = tablespace,
+ )
+ )
+
tablespace = field.db_tablespace or tablespace
if tablespace and connection.features.supports_tablespaces and field.unique:
# We must specify the index tablespace inline, because we
# won't be generating a CREATE INDEX statement for this field.
field_output.append(connection.ops.tablespace_sql(tablespace, inline=True))
-
+
sql = ' '.join(field_output)
sqlparams = ()
# if the field is "NOT NULL" and a default value is provided, create the column with it
# this allows the addition of a NOT NULL field to a table with existing rows
if not field.null and field.has_default():
default = field.get_default()
+ # If the default is a callable, then call it!
+ if callable(default):
+ default = default()
+ # Now do some very cheap quoting. TODO: Redesign return values to avoid this.
if isinstance(default, basestring):
default = "'%s'" % default.replace("'", "''")
+ elif isinstance(default, datetime.date):
+ default = "'%s'" % default
sql += " DEFAULT %s"
sqlparams = (default)
-
- if field.rel:
+
+ if field.rel and self.supports_foreign_keys:
self.add_deferred_sql(
self.foreign_key_sql(
table_name,
field.rel.to._meta.get_field(field.rel.field_name).column
)
)
-
+
if field.db_index and not field.unique:
self.add_deferred_sql(self.create_index_sql(table_name, [field.column]))
-
+
if hasattr(field, 'post_create_sql'):
style = no_style()
for stmt in field.post_create_sql(style, table_name):
return sql % sqlparams
else:
return None
-
+
+
+ supports_foreign_keys = True
+
def foreign_key_sql(self, from_table_name, from_column_name, to_table_name, to_column_name):
"""
Generates a full SQL statement to add a foreign key constraint
"""
+ qn = connection.ops.quote_name
constraint_name = '%s_refs_%s_%x' % (from_column_name, to_column_name, abs(hash((from_table_name, to_table_name))))
return 'ALTER TABLE %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % (
- from_table_name,
- truncate_name(constraint_name, connection.ops.max_name_length()),
- from_column_name,
- to_table_name,
- to_column_name,
+ qn(from_table_name),
+ qn(truncate_name(constraint_name, connection.ops.max_name_length())),
+ qn(from_column_name),
+ qn(to_table_name),
+ qn(to_column_name),
connection.ops.deferrable_sql() # Django knows this
)
-
+
+
def create_index_name(self, table_name, column_names):
"""
Generate a unique name for the index
return '%s_%s%s' % (table_name, column_names[0], index_unique_name)
+
def create_index_sql(self, table_name, column_names, unique=False, db_tablespace=''):
"""
Generates a create index statement on 'table_name' for a list of 'column_names'
"""
+ qn = connection.ops.quote_name
if not column_names:
print "No column names supplied on which to create an index"
return ''
-
+
if db_tablespace and connection.features.supports_tablespaces:
tablespace_sql = ' ' + connection.ops.tablespace_sql(db_tablespace)
else:
tablespace_sql = ''
-
+
index_name = self.create_index_name(table_name, column_names)
qn = connection.ops.quote_name
return 'CREATE %sINDEX %s ON %s (%s)%s;' % (
unique and 'UNIQUE ' or '',
- index_name,
- table_name,
+ qn(index_name),
+ qn(table_name),
','.join([qn(field) for field in column_names]),
tablespace_sql
- )
-
+ )
+
def create_index(self, table_name, column_names, unique=False, db_tablespace=''):
""" Executes a create index statement """
sql = self.create_index_sql(table_name, column_names, unique, db_tablespace)
self.execute(sql)
+ drop_index_string = 'DROP INDEX %(index_name)s'
+
def delete_index(self, table_name, column_names, db_tablespace=''):
"""
Deletes an index created with create_index.
This is possible using only columns due to the deterministic
index naming function which relies on column names.
"""
+ if isinstance(column_names, (str, unicode)):
+ column_names = [column_names]
name = self.create_index_name(table_name, column_names)
- sql = "DROP INDEX %s" % name
+ qn = connection.ops.quote_name
+ sql = self.drop_index_string % {"index_name": qn(name), "table_name": qn(table_name)}
self.execute(sql)
+ drop_index = alias('delete_index')
+
+ delete_column_string = 'ALTER TABLE %s DROP COLUMN %s CASCADE;'
def delete_column(self, table_name, name):
"""
"""
qn = connection.ops.quote_name
params = (qn(table_name), qn(name))
- self.execute('ALTER TABLE %s DROP COLUMN %s CASCADE;' % params, [])
+ self.execute(self.delete_column_string % params, [])
+
+ drop_column = alias('delete_column')
def rename_column(self, table_name, old, new):
Makes sure the following commands are inside a transaction.
Must be followed by a (commit|rollback)_transaction call.
"""
+ if self.dry_run:
+ return
transaction.commit_unless_managed()
transaction.enter_transaction_management()
transaction.managed(True)
Commits the current transaction.
Must be preceded by a start_transaction call.
"""
+ if self.dry_run:
+ return
transaction.commit()
transaction.leave_transaction_management()
Rolls back the current transaction.
Must be preceded by a start_transaction call.
"""
+ if self.dry_run:
+ return
transaction.rollback()
transaction.leave_transaction_management()
-
-
+
+
def send_create_signal(self, app_label, model_names):
+ self.pending_create_signals.append((app_label, model_names))
+
+
+ def send_pending_create_signals(self):
+ for (app_label, model_names) in self.pending_create_signals:
+ self.really_send_create_signal(app_label, model_names)
+ self.pending_create_signals = []
+
+
+ def really_send_create_signal(self, app_label, model_names):
"""
Sends a post_syncdb signal for the model specified.
-
+
If the model is not found (perhaps it's been deleted?),
no signal is sent.
-
+
TODO: The behavior of django.contrib.* apps seems flawed in that
they don't respect created_models. Rather, they blindly execute
over all models within the app sending the signal. This is a
patch we should push Django to make For now, this should work.
"""
+ if self.debug:
+ print " - Sending post_syncdb signal for %s: %s" % (app_label, model_names)
app = models.get_app(app_label)
if not app:
return
-
+
created_models = []
for model_name in model_names:
model = models.get_model(app_label, model_name)
if model:
created_models.append(model)
-
+
if created_models:
# syncdb defaults -- perhaps take these as options?
verbosity = 1
interactive = True
-
+
if hasattr(dispatcher, "send"):
dispatcher.send(signal=models.signals.post_syncdb, sender=app,
- app=app, created_models=created_models,
- verbosity=verbosity, interactive=interactive)
+ app=app, created_models=created_models,
+ verbosity=verbosity, interactive=interactive)
else:
models.signals.post_syncdb.send(sender=app,
- app=app, created_models=created_models,
- verbosity=verbosity, interactive=interactive)
-
+ app=app, created_models=created_models,
+ verbosity=verbosity, interactive=interactive)
+
def mock_model(self, model_name, db_table, db_tablespace='',
- pk_field_name='id', pk_field_type=models.AutoField,
- pk_field_kwargs={}):
+ pk_field_name='id', pk_field_type=models.AutoField,
+ pk_field_args=[], pk_field_kwargs={}):
"""
Generates a MockModel class that provides enough information
to be used by a foreign key/many-to-many relationship.
-
+
Migrations should prefer to use these rather than actual models
as models could get deleted over time, but these can remain in
migration files forever.
if pk_field_type == models.AutoField:
pk_field_kwargs['primary_key'] = True
- self.pk = pk_field_type(**pk_field_kwargs)
+ self.pk = pk_field_type(*pk_field_args, **pk_field_kwargs)
self.pk.set_attributes_from_name(pk_field_name)
self.abstract = False
MockModel._meta = MockOptions()
MockModel._meta.model = MockModel
return MockModel
+
+# Single-level flattening of lists
+def flatten(ls):
+ nl = []
+ for l in ls:
+ nl += l
+ return nl
+
from django.db import connection
+from django.conf import settings
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
alter_string_set_type = ''
alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;'
alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;'
+ drop_index_string = 'DROP INDEX %(index_name)s ON %(table_name)s'
+ allows_combined_alters = False
+ has_ddl_transactions = False
+
+ def execute(self, sql, params=[]):
+ if hasattr(settings, "DATABASE_STORAGE_ENGINE") and \
+ settings.DATABASE_STORAGE_ENGINE:
+ generic.DatabaseOperations.execute(self, "SET storage_engine=%s;" %
+ settings.DATABASE_STORAGE_ENGINE)
+ return generic.DatabaseOperations.execute(self, sql, params)
+ execute.__doc__ = generic.DatabaseOperations.execute.__doc__
def rename_column(self, table_name, old, new):
- if old == new:
+ if old == new or self.dry_run:
return []
qn = connection.ops.quote_name
qn(table_name),
qn(old),
qn(new),
- "%s %s %s %s %s" % (
- rows[0][1],
- rows[0][2] == "YES" and "NULL" or "NOT NULL",
- rows[0][3] == "PRI" and "PRIMARY KEY" or "",
- rows[0][4] and "DEFAULT %s" % rows[0][4] or "",
- rows[0][5] or "",
- ),
+ rows[0][1],
+ rows[0][2] == "YES" and "NULL" or "NOT NULL",
+ rows[0][3] == "PRI" and "PRIMARY KEY" or "",
+ rows[0][4] and "DEFAULT " or "",
+ rows[0][4] and "%s" or "",
+ rows[0][5] or "",
)
- self.execute('ALTER TABLE %s CHANGE COLUMN %s %s %s;' % params)
-
-
+
+ sql = 'ALTER TABLE %s CHANGE COLUMN %s %s %s %s %s %s %s %s;' % params
+
+ if rows[0][4]:
+ self.execute(sql, (rows[0][4],))
+ else:
+ self.execute(sql)
+
+
def rename_table(self, old_table_name, table_name):
"""
Renames the table 'old_table_name' to 'table_name'.
return
qn = connection.ops.quote_name
params = (qn(old_table_name), qn(table_name))
- self.execute('RENAME TABLE %s TO %s;' % params)
\ No newline at end of file
+ self.execute('RENAME TABLE %s TO %s;' % params)
self.execute('ALTER TABLE %s RENAME COLUMN %s TO %s;' % params)
def rename_table(self, old_table_name, table_name):
+ "will rename the table and an associated ID sequence and primary key index"
# First, rename the table
generic.DatabaseOperations.rename_table(self, old_table_name, table_name)
# Then, try renaming the ID sequence
try:
generic.DatabaseOperations.rename_table(self, old_table_name+"_id_seq", table_name+"_id_seq")
except:
- print " ~ No such sequence (ignoring error)"
+ if self.debug:
+ print " ~ No such sequence (ignoring error)"
self.rollback_transaction()
else:
self.commit_transaction()
- self.start_transaction()
\ No newline at end of file
+ self.start_transaction()
+
+ # Rename primary key index, will not rename other indices on
+ # the table that are used by django (e.g. foreign keys). Until
+ # figure out how, you need to do this yourself.
+ try:
+ generic.DatabaseOperations.rename_table(self, old_table_name+"_pkey", table_name+ "_pkey")
+ except:
+ if self.debug:
+ print " ~ No such primary key (ignoring error)"
+ self.rollback_transaction()
+ else:
+ self.commit_transaction()
+ self.start_transaction()
+
+
+ def rename_index(self, old_index_name, index_name):
+ "Rename an index individually"
+ generic.DatabaseOperations.rename_table(self, old_index_name, index_name)
--- /dev/null
+from django.db import connection
+from django.db.models.fields import *
+from south.db import generic
+
+class DatabaseOperations(generic.DatabaseOperations):
+ """
+ django-pyodbc (sql_server.pyodbc) implementation of database operations.
+ """
+
+ add_column_string = 'ALTER TABLE %s ADD %s;'
+ alter_string_set_type = 'ALTER COLUMN %(column)s %(type)s'
+ allows_combined_alters = False
+ delete_column_string = 'ALTER TABLE %s DROP COLUMN %s;'
+
+ def create_table(self, table_name, fields):
+ # Tweak stuff as needed
+ for name,f in fields:
+ if isinstance(f, BooleanField):
+ if f.default == True:
+ f.default = 1
+ if f.default == False:
+ f.default = 0
+
+ # Run
+ generic.DatabaseOperations.create_table(self, table_name, fields)
SQLite3 implementation of database operations.
"""
- def __init__(self):
- raise NotImplementedError("Support for SQLite3 is not yet complete.")
\ No newline at end of file
+ # SQLite ignores foreign key constraints. I wish I could.
+ supports_foreign_keys = False
+
+ # You can't add UNIQUE columns with an ALTER TABLE.
+ def add_column(self, table_name, name, field, *args, **kwds):
+ # Run ALTER TABLE with no unique column
+ unique, field._unique, field.db_index = field.unique, False, False
+ generic.DatabaseOperations.add_column(self, table_name, name, field, *args, **kwds)
+ # If it _was_ unique, make an index on it.
+ if unique:
+ self.create_index(table_name, [name], unique=True)
+
+ # SQLite doesn't have ALTER COLUMN
+ def alter_column(self, table_name, name, field, explicit_name=True):
+ """
+ Not supported under SQLite.
+ """
+ raise NotImplementedError("SQLite does not support altering columns.")
+
+ # Nor DROP COLUMN
+ def delete_column(self, table_name, name, field):
+ """
+ Not supported under SQLite.
+ """
+ raise NotImplementedError("SQLite does not support deleting columns.")
+
+ # Nor RENAME COLUMN
+ def rename_column(self, table_name, old, new):
+ """
+ Not supported under SQLite.
+ """
+ raise NotImplementedError("SQLite does not support renaming columns.")
\ No newline at end of file
+++ /dev/null
-To use this setup.py, make sure you checked out this trunk or branch into a directory called 'south', copy the setup.py into the directory above it, and off you go.
+++ /dev/null
-#!/usr/bin/python
-
-from setuptools import setup, find_packages
-
-setup(
- name='South',
- version='0.3',
- description='South: Migrations for Django',
- author='Andrew Godwin & Andy McCurdy',
- author_email='south@aeracode.org',
- url='http://south.aeracode.org/',
- packages=["south", "south.db", "south.management", "south.management.commands", "south.tests"],
-)
help='Only runs or rolls back the migration specified, and none around it.'),
make_option('--fake', action='store_true', dest='fake', default=False,
help="Pretends to do the migrations, but doesn't actually execute them."),
+
+ make_option('--db-dry-run', action='store_true', dest='db_dry_run', default=False,
+ help="Doesn't execute the SQL generated by the db methods, and doesn't store a record that the migration(s) occurred. Useful to test migrations before applying them."),
)
+ if '--verbosity' not in [opt.get_opt_string() for opt in BaseCommand.option_list]:
+ option_list += (
+ make_option('--verbosity', action='store', dest='verbosity', default='1',
+ type='choice', choices=['0', '1', '2'],
+ help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
+ )
help = "Runs migrations for all apps."
- def handle(self, app=None, target=None, skip=False, merge=False, only=False, backwards=False, fake=False, **options):
-
+ def handle(self, app=None, target=None, skip=False, merge=False, only=False, backwards=False, fake=False, db_dry_run=False, **options):
+
# Work out what the resolve mode is
resolve_mode = merge and "merge" or (skip and "skip" or None)
# Turn on db debugging
apps = [migration.get_app(app)]
else:
apps = migration.get_migrated_apps()
+ silent = options.get('verbosity', 0) == 0
for app in apps:
migration.migrate_app(
app,
resolve_mode = resolve_mode,
target_name = target,
fake = fake,
+ db_dry_run = db_dry_run,
+ silent = silent,
+ load_inital_data = True,
)
from django.db import models
from django.db.models.fields.related import RECURSIVE_RELATIONSHIP_CONSTANT
from django.contrib.contenttypes.generic import GenericRelation
+from django.db.models.fields import FieldDoesNotExist
from optparse import make_option
from south import migration
import sys
option_list = BaseCommand.option_list + (
make_option('--model', action='append', dest='model_list', type='string',
help='Generate a Create Table migration for the specified model. Add multiple models to this migration with subsequent --model parameters.'),
+ make_option('--add-field', action='append', dest='field_list', type='string',
+ help='Generate an Add Column migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
make_option('--initial', action='store_true', dest='initial', default=False,
help='Generate the initial schema for the app.'),
)
help = "Creates a new template migration for the given app"
- def handle(self, app=None, name="", model_list=None, initial=False, **options):
+ def handle(self, app=None, name="", model_list=None, field_list=None, initial=False, **options):
# If model_list is None, then it's an empty list
model_list = model_list or []
+ # If field_list is None, then it's an empty list
+ field_list = field_list or []
+
# make sure --model and --all aren't both specified
- if initial and model_list:
+ if initial and (model_list or field_list):
print "You cannot use --initial and other options together"
return
return
models_to_migrate.append(model)
-
+
+ # See what fields need to be included
+ fields_to_add = []
+ for field_spec in field_list:
+ model_name, field_name = field_spec.split(".", 1)
+ model = models.get_model(app, model_name)
+ if not model:
+ print "Couldn't find model '%s' in app '%s'" % (model_name, app)
+ return
+ try:
+ field = model._meta.get_field(field_name)
+ except FieldDoesNotExist:
+ print "Model '%s' doesn't have a field '%s'" % (model_name, field_name)
+ return
+ fields_to_add.append((model, field_name, field))
+
# Make the migrations directory if it's not there
app_module_path = app_models_module.__name__.split('.')[0:-1]
try:
os.path.dirname(app_module.__file__),
"migrations",
)
+ # Make sure there's a migrations directory and __init__.py
if not os.path.isdir(migrations_dir):
print "Creating migrations directory at '%s'..." % migrations_dir
os.mkdir(migrations_dir)
+ init_path = os.path.join(migrations_dir, "__init__.py")
+ if not os.path.isfile(init_path):
# Touch the init py file
- open(os.path.join(migrations_dir, "__init__.py"), "w").close()
+ print "Creating __init__.py in '%s'..." % migrations_dir
+ open(init_path, "w").close()
# See what filename is next in line. We assume they use numbers.
migrations = migration.get_migration_names(migration.get_app(app))
highest_number = 0
)
# If there's a model, make the migration skeleton, else leave it bare
forwards, backwards = '', ''
+ if fields_to_add:
+ # First, do the added fields
+ for model, field_name, field in fields_to_add:
+ field_definition = generate_field_definition(model, field)
+
+ if isinstance(field, models.ManyToManyField):
+ # Make a mock model for each side
+ mock_model = "\n".join([
+ create_mock_model(model, " "),
+ create_mock_model(field.rel.to, " ")
+ ])
+ # And a field defn, that's actually a table creation
+ forwards += '''
+ # Mock Model
+%s
+ # Adding ManyToManyField '%s.%s'
+ db.create_table('%s', (
+ ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
+ ('%s', models.ForeignKey(%s, null=False)),
+ ('%s', models.ForeignKey(%s, null=False))
+ )) ''' % (
+ mock_model,
+ model._meta.object_name,
+ field.name,
+ field.m2m_db_table(),
+ field.m2m_column_name()[:-3], # strip off the '_id' at the end
+ model._meta.object_name,
+ field.m2m_reverse_name()[:-3], # strip off the '_id' at the ned
+ field.rel.to._meta.object_name
+ )
+ backwards += '''
+ # Dropping ManyToManyField '%s.%s'
+ db.drop_table('%s')''' % (
+ model._meta.object_name,
+ field.name,
+ field.m2m_db_table()
+ )
+ continue
+ elif field.rel: # ForeignKey, etc.
+ mock_model = create_mock_model(field.rel.to, " ")
+ field_definition = related_field_definition(field, field_definition)
+ else:
+ mock_model = None
+
+ # If we can't get it (inspect madness?) then insert placeholder
+ if not field_definition:
+ print "Warning: Could not generate field definition for %s.%s, manual editing of migration required." % \
+ (model._meta.object_name, field.name)
+ field_definition = '<<< REPLACE THIS WITH FIELD DEFINITION FOR %s.%s >>>' % (model._meta.object_name, f.name)
+
+ if mock_model:
+ forwards += '''
+ # Mock model
+%s
+ ''' % (mock_model)
+
+ forwards += '''
+ # Adding field '%s.%s'
+ db.add_column(%r, %r, %s)
+ ''' % (
+ model._meta.object_name,
+ field.name,
+ model._meta.db_table,
+ field.name,
+ field_definition,
+ )
+ backwards += '''
+ # Deleting field '%s.%s'
+ db.delete_column(%r, %r)
+ ''' % (
+ model._meta.object_name,
+ field.name,
+ model._meta.db_table,
+ field.column,
+ )
+
if models_to_migrate:
+ # Now, do the added models
for model in models_to_migrate:
table_name = model._meta.db_table
mock_models = []
fields = []
for f in model._meta.local_fields:
- # look up the field definition to see how this was created
+
+ # Look up the field definition to see how this was created
field_definition = generate_field_definition(model, f)
- if field_definition:
+
+ # If it's a OneToOneField, and ends in _ptr, just use it
+ if isinstance(f, models.OneToOneField) and f.name.endswith("_ptr"):
+ mock_models.append(create_mock_model(f.rel.to, " "))
+ field_definition = "models.OneToOneField(%s)" % f.rel.to.__name__
+
+ # It's probably normal then
+ elif field_definition:
if isinstance(f, models.ForeignKey):
- mock_models.append(create_mock_model(f.rel.to))
+ mock_models.append(create_mock_model(f.rel.to, " "))
field_definition = related_field_definition(f, field_definition)
-
+
+ # Oh noes, no defn found
else:
print "Warning: Could not generate field definition for %s.%s, manual editing of migration required." % \
(model._meta.object_name, f.name)
+ print f, type(f)
field_definition = '<<< REPLACE THIS WITH FIELD DEFINITION FOR %s.%s >>>' % (model._meta.object_name, f.name)
forwards += '''
# Mock Models
- %s
- ''' % "\n ".join(mock_models)
+%s
+ ''' % "\n".join(mock_models)
forwards += '''
# Model '%s'
- db.create_table('%s', (
+ db.create_table(%r, (
%s
))''' % (
model._meta.object_name,
if m.rel.through:
continue
- mock_models = [create_mock_model(model), create_mock_model(m.rel.to)]
+ mock_models = [create_mock_model(model, " "), create_mock_model(m.rel.to, " ")]
forwards += '''
# Mock Models
- %s
+%s
# M2M field '%s.%s'
db.create_table('%s', (
('%s', models.ForeignKey(%s, null=False)),
('%s', models.ForeignKey(%s, null=False))
)) ''' % (
- "\n ".join(mock_models),
+ "\n".join(mock_models),
model._meta.object_name,
m.name,
m.m2m_db_table(),
"','".join(model._meta.object_name for model in models_to_migrate)
)
- else:
+ # Try sniffing the encoding using PEP 0263's method
+ encoding = None
+ first_two_lines = inspect.getsourcelines(app_models_module)[0][:2]
+ for line in first_two_lines:
+ if re.search("coding[:=]\s*([-\w.]+)", line):
+ encoding = line
+
+ if (not forwards) and (not backwards):
forwards = '"Write your forwards migration here"'
backwards = '"Write your backwards migration here"'
fp = open(os.path.join(migrations_dir, new_filename), "w")
- fp.write("""
+ fp.write("""%s
from south.db import db
+from django.db import models
from %s.models import *
class Migration:
def backwards(self):
%s
-""" % ('.'.join(app_module_path), forwards, backwards))
+""" % (encoding or "", '.'.join(app_module_path), forwards, backwards))
fp.close()
print "Created %s." % new_filename
# the correct comment.
if test_field(stripped_definition):
return stripped_definition
-
- index = field_definition.index('#', index+1)
+
+ try:
+ index = field_definition.index('#', index+1)
+ except ValueError:
+ break
return field_definition
source = inspect.getsourcelines(model)
if not source:
raise Exception("Could not find source to model: '%s'" % (model.__name__))
-
+
# look for a line starting with the field name
start_field_re = re.compile(r'\s*%s\s*=\s*(.*)' % field.name)
for line in source[0]:
return field_definition
-def create_mock_model(model):
+def create_mock_model(model, indent=" "):
# produce a string representing the python syntax necessary for creating
# a mock model using the supplied real model
- if model._meta.pk.__class__.__module__ != 'django.db.models.fields':
+ if not model._meta.pk.__class__.__module__.startswith('django.db.models.fields'):
# we can fix this with some clever imports, but it doesn't seem necessary to
# spend time on just yet
- print "Can't generate a mock model for %s because it's primary key isn't a default django field" % model
+ print "Can't generate a mock model for %s because it's primary key isn't a default django field; it's type %s." % (model, model._meta.pk.__class__)
sys.exit()
- return "%s = db.mock_model(model_name='%s', db_table='%s', db_tablespace='%s', pk_field_name='%s', pk_field_type=models.%s)" % \
+ pk_field_args = []
+ pk_field_kwargs = {}
+ other_mocks = []
+ # If it's a OneToOneField or ForeignKey, take it's first arg
+ if model._meta.pk.__class__.__name__ in ["OneToOneField", "ForeignKey"]:
+ if model._meta.pk.rel.to == model:
+ pk_field_args += ["'self'"]
+ else:
+ pk_field_args += [model._meta.pk.rel.to._meta.object_name]
+ other_mocks += [model._meta.pk.rel.to]
+
+ # Perhaps it has a max_length set?
+ if model._meta.pk.max_length:
+ pk_field_kwargs["max_length"] = model._meta.pk.max_length
+
+ return "%s%s%s = db.mock_model(model_name='%s', db_table='%s', db_tablespace='%s', pk_field_name='%s', pk_field_type=models.%s, pk_field_args=[%s], pk_field_kwargs=%r)" % \
(
+ "\n".join([create_mock_model(m, indent) for m in other_mocks]+[""]),
+ indent,
model._meta.object_name,
model._meta.object_name,
model._meta.db_table,
model._meta.db_tablespace,
model._meta.pk.name,
- model._meta.pk.__class__.__name__
- )
\ No newline at end of file
+ model._meta.pk.__class__.__name__,
+ ", ".join(pk_field_args),
+ pk_field_kwargs,
+ )
-from django.core.management.base import NoArgsCommand
+from django.core.management.base import NoArgsCommand, BaseCommand
from django.core.management.color import no_style
from django.utils.datastructures import SortedDict
from optparse import make_option
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
- make_option('--verbosity', action='store', dest='verbosity', default='1',
- type='choice', choices=['0', '1', '2'],
- help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
make_option('--noinput', action='store_false', dest='interactive', default=True,
help='Tells Django to NOT prompt the user for input of any kind.'),
make_option('--migrate', action='store_true', dest='migrate', default=False,
help='Tells South to also perform migrations after the sync. Default for during testing, and other internal calls.'),
)
+ if '--verbosity' not in [opt.get_opt_string() for opt in BaseCommand.option_list]:
+ option_list += (
+ make_option('--verbosity', action='store', dest='verbosity', default='1',
+ type='choice', choices=['0', '1', '2'],
+ help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
+ )
help = "Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created, except those which use migrations."
def handle_noargs(self, **options):
else:
# This is a migrated app, leave it
apps_migrated.append(app_name)
+ verbosity = int(options.get('verbosity', 0))
# Run syncdb on only the ones needed
- print "Syncing..."
+ if verbosity > 0:
+ print "Syncing..."
old_installed, settings.INSTALLED_APPS = settings.INSTALLED_APPS, apps_needing_sync
old_app_store, cache.app_store = cache.app_store, SortedDict([
(k, v) for (k, v) in cache.app_store.items()
cache.app_store = old_app_store
# Migrate if needed
if options.get('migrate', True):
- print "Migrating..."
- management.call_command('migrate')
+ if verbosity > 0:
+ print "Migrating..."
+ management.call_command('migrate', **options)
# Be obvious about what we did
- print "\nSynced:\n > %s" % "\n > ".join(apps_needing_sync)
+ if verbosity > 0:
+ print "\nSynced:\n > %s" % "\n > ".join(apps_needing_sync)
if options.get('migrate', True):
- print "\nMigrated:\n - %s" % "\n - ".join(apps_migrated)
+ if verbosity > 0:
+ print "\nMigrated:\n - %s" % "\n - ".join(apps_migrated)
else:
- print "\nNot synced (use migrations):\n - %s" % "\n - ".join(apps_migrated)
- print "(use ./manage.py migrate to migrate these)"
+ if verbosity > 0:
+ print "\nNot synced (use migrations):\n - %s" % "\n - ".join(apps_migrated)
+ print "(use ./manage.py migrate to migrate these)"
--- /dev/null
+from django.core import management
+from django.core.management.commands import test
+from django.core.management.commands import syncdb
+
+class Command(test.Command):
+
+ def handle(self, *args, **kwargs):
+ # point at the core syncdb command when creating tests
+ # tests should always be up to date with the most recent model structure
+ management.get_commands()
+ management._commands['syncdb'] = 'django.core'
+ super(Command, self).handle(*args, **kwargs)
\ No newline at end of file
import datetime
import os
import sys
+import traceback
from django.conf import settings
from django.db import models
+from django.core.exceptions import ImproperlyConfigured
+from django.core.management import call_command
from models import MigrationHistory
from south.db import db
return sorted([
filename[:-3]
for filename in os.listdir(os.path.dirname(app.__file__))
- if filename.endswith(".py") and filename != "__init__.py"
+ if filename.endswith(".py") and filename != "__init__.py" and not filename.startswith(".")
])
module = __import__(app.__name__ + "." + name, '', '', ['Migration'])
return module.Migration
except ImportError:
- raise ValueError("Migration %s:%s does not exist." % (get_app_name(app), name))
+ print " ! Migration %s:%s probably doesn't exist." % (get_app_name(app), name)
+ print " - Traceback:"
+ raise
def all_migrations():
return remove_duplicates(needed)
-def run_forwards(app, migrations, fake=False, silent=False):
+def run_migrations(toprint, torun, recorder, app, migrations, fake=False, db_dry_run=False, silent=False):
"""
Runs the specified migrations forwards, in order.
"""
for migration in migrations:
app_name = get_app_name(app)
if not silent:
- print " > %s: %s" % (app_name, migration)
+ print toprint % (app_name, migration)
klass = get_migration(app, migration)
+
if fake:
if not silent:
print " (faked)"
else:
- db.start_transaction()
+
+ # If the database doesn't support running DDL inside a transaction
+ # *cough*MySQL*cough* then do a dry run first.
+ if not db.has_ddl_transactions:
+ db.dry_run = True
+ db.debug, old_debug = False, db.debug
+ try:
+ getattr(klass(), torun)()
+ except:
+ traceback.print_exc()
+ print " ! Error found during dry run of migration! Aborting."
+ return False
+ db.debug = old_debug
+ db.clear_run_data()
+
+ db.dry_run = bool(db_dry_run)
+
+ if db.has_ddl_transactions:
+ db.start_transaction()
try:
- klass().forwards()
+ getattr(klass(), torun)()
db.execute_deferred_sql()
except:
- db.rollback_transaction()
- raise
+ if db.has_ddl_transactions:
+ db.rollback_transaction()
+ raise
+ else:
+ traceback.print_exc()
+ print " ! Error found during real run of migration! Aborting."
+ print
+ print " ! Since you have a database that does not support running"
+ print " ! schema-altering statements in transactions, we have had to"
+ print " ! leave it in an interim state between migrations."
+ if torun == "forwards":
+ print
+ print " ! You *might* be able to recover with:"
+ db.debug = db.dry_run = True
+ klass().backwards()
+ print
+ print " ! The South developers regret this has happened, and would"
+ print " ! like to gently persuade you to consider a slightly"
+ print " ! easier-to-deal-with DBMS."
+ return False
else:
- db.commit_transaction()
+ if db.has_ddl_transactions:
+ db.commit_transaction()
+
+ if not db_dry_run:
+ # Record us as having done this
+ recorder(app_name, migration)
+
+
+def run_forwards(app, migrations, fake=False, db_dry_run=False, silent=False):
+ """
+ Runs the specified migrations forwards, in order.
+ """
+
+ def record(app_name, migration):
# Record us as having done this
record = MigrationHistory.for_migration(app_name, migration)
record.applied = datetime.datetime.utcnow()
record.save()
-
-
-def run_backwards(app, migrations, ignore=[], fake=False, silent=False):
+
+ return run_migrations(
+ toprint = " > %s: %s",
+ torun = "forwards",
+ recorder = record,
+ app = app,
+ migrations = migrations,
+ fake = fake,
+ db_dry_run = db_dry_run,
+ silent = silent,
+ )
+
+
+def run_backwards(app, migrations, ignore=[], fake=False, db_dry_run=False, silent=False):
"""
Runs the specified migrations backwards, in order, skipping those
migrations in 'ignore'.
"""
- for migration in migrations:
- if migration not in ignore:
- app_name = get_app_name(app)
- if not silent:
- print " < %s: %s" % (app_name, migration)
- klass = get_migration(app, migration)
- if fake:
- if not silent:
- print " (faked)"
- else:
- db.start_transaction()
- try:
- klass().backwards()
- db.execute_deferred_sql()
- except:
- db.rollback_transaction()
- raise
- else:
- db.commit_transaction()
- # Record us as having not done this
- record = MigrationHistory.for_migration(app_name, migration)
- record.delete()
+
+ def record(app_name, migration):
+ # Record us as having not done this
+ record = MigrationHistory.for_migration(app_name, migration)
+ record.delete()
+
+ return run_migrations(
+ toprint = " < %s: %s",
+ torun = "backwards",
+ recorder = record,
+ app = app,
+ migrations = [x for x in migrations if x not in ignore],
+ fake = fake,
+ db_dry_run = db_dry_run,
+ silent = silent,
+ )
def right_side_of(x, y):
return problems
-def migrate_app(app, target_name=None, resolve_mode=None, fake=False, yes=False, silent=False):
+def migrate_app(app, target_name=None, resolve_mode=None, fake=False, db_dry_run=False, yes=False, silent=False, load_inital_data=False):
app_name = get_app_name(app)
tree = dependency_tree()
migrations = get_migration_names(app)
+ # If there aren't any, quit quizically
+ if not migrations:
+ if not silent:
+ print "? You have no migrations for the '%s' app. You might want some." % app_name
+ return
+
if target_name not in migrations and target_name not in ["zero", None]:
matches = [x for x in migrations if x.startswith(target_name)]
if len(matches) == 1:
return
# Check there's no strange ones in the database
- ghost_migrations = [m for m in MigrationHistory.objects.filter(applied__isnull = False) if get_app(m.app_name) not in tree or m.migration not in tree[get_app(m.app_name)]]
+ ghost_migrations = []
+ for m in MigrationHistory.objects.filter(applied__isnull = False):
+ try:
+ if get_app(m.app_name) not in tree or m.migration not in tree[get_app(m.app_name)]:
+ ghost_migrations.append(m)
+ except ImproperlyConfigured:
+ pass
+
+
if ghost_migrations:
if not silent:
print " ! These migrations are in the database but not on disk:"
backwards = []
# Get the list of currently applied migrations from the db
- current_migrations = [(get_app(m.app_name), m.migration) for m in MigrationHistory.objects.filter(applied__isnull = False)]
+ current_migrations = []
+ for m in MigrationHistory.objects.filter(applied__isnull = False):
+ try:
+ current_migrations.append((get_app(m.app_name), m.migration))
+ except ImproperlyConfigured:
+ pass
direction = None
bad = False
if direction == 1:
if not silent:
print " - Migrating forwards to %s." % target_name
- for mapp, mname in forwards:
- if (mapp, mname) not in current_migrations:
- run_forwards(mapp, [mname], fake=fake, silent=silent)
+ try:
+ for mapp, mname in forwards:
+ if (mapp, mname) not in current_migrations:
+ result = run_forwards(mapp, [mname], fake=fake, db_dry_run=db_dry_run, silent=silent)
+ if result is False: # The migrations errored, but nicely.
+ return
+ finally:
+ # Call any pending post_syncdb signals
+ db.send_pending_create_signals()
+ # Now load initial data, only if we're really doing things and ended up at current
+ if not fake and not db_dry_run and load_inital_data and target_name == migrations[-1]:
+ print " - Loading initial data for %s." % app_name
+ # Override Django's get_apps call temporarily to only load from the
+ # current app
+ old_get_apps, models.get_apps = (
+ models.get_apps,
+ lambda: [models.get_app(get_app_name(app))],
+ )
+ # Load the initial fixture
+ call_command('loaddata', 'initial_data', verbosity=1)
+ # Un-override
+ models.get_apps = old_get_apps
elif direction == -1:
if not silent:
print " - Migrating backwards to just after %s." % target_name
for mapp, mname in backwards:
if (mapp, mname) in current_migrations:
- run_backwards(mapp, [mname], fake=fake, silent=silent)
+ run_backwards(mapp, [mname], fake=fake, db_dry_run=db_dry_run, silent=silent)
else:
if not silent:
- print "- Nothing to migrate."
\ No newline at end of file
+ print "- Nothing to migrate."
--- /dev/null
+#!/usr/bin/python
+
+from setuptools import setup, find_packages
+
+setup(
+ name='South',
+ version='0.4',
+ description='South: Migrations for Django',
+ long_description='South is an intelligent database migrations library for the Django web framework. It is database-independent and DVCS-friendly, as well as a whole host of other features.',
+ author='Andrew Godwin & Andy McCurdy',
+ author_email='south@aeracode.org',
+ url='http://south.aeracode.org/',
+ download_url='http://south.aeracode.org/wiki/Download',
+ classifiers=[
+ "Development Status :: 5 - Production/Stable",
+ "Framework :: Django",
+ "Intended Audience :: Developers",
+ "Intended Audience :: System Administrators",
+ "Intended Audience :: System Administrators",
+ "License :: OSI Approved :: Apache Software License",
+ "Operating System :: OS Independent",
+ "Topic :: Software Development"
+ ],
+ packages=["south", "south.db", "south.management", "south.management.commands", "south.tests", "south.tests.fakeapp", "south.tests.fakeapp.migrations"],
+ package_dir = {"south" : ""},
+)
def setUp(self):
db.debug = False
+ db.clear_deferred_sql()
def test_create(self):
"""
db.rollback_transaction()
db.start_transaction()
# Remove the table
- db.delete_table("test1")
+ db.drop_table("test1")
# Make sure it went
try:
cursor.execute("SELECT * FROM test1")
pass
db.rollback_transaction()
+ def test_foreign_keys(self):
+ """
+ Tests foreign key creation, especially uppercase (see #61)
+ """
+ Test = db.mock_model(model_name='Test', db_table='test5a',
+ db_tablespace='', pk_field_name='ID',
+ pk_field_type=models.AutoField, pk_field_args=[])
+ cursor = connection.cursor()
+ db.start_transaction()
+ db.create_table("test5a", [('ID', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True))])
+ db.create_table("test5b", [
+ ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
+ ('UNIQUE', models.ForeignKey(Test)),
+ ])
+ db.execute_deferred_sql()
+ db.rollback_transaction()
+
def test_rename(self):
"""
Test column renaming
self.fail("Just-renamed column could be selected!")
except:
pass
- db.rollback_transaction()
\ No newline at end of file
+ db.rollback_transaction()
+ db.delete_table("test2")
+
+ def test_dry_rename(self):
+ """
+ Test column renaming while --dry-run is turned on (should do nothing)
+ See ticket #65
+ """
+ cursor = connection.cursor()
+ db.create_table("test2", [('spam', models.BooleanField(default=False))])
+ db.start_transaction()
+ # Make sure we can select the column
+ cursor.execute("SELECT spam FROM test2")
+ # Rename it
+ db.dry_run = True
+ db.rename_column("test2", "spam", "eggs")
+ db.dry_run = False
+ cursor.execute("SELECT spam FROM test2")
+ try:
+ cursor.execute("SELECT eggs FROM test2")
+ self.fail("Dry-renamed new column could be selected!")
+ except:
+ pass
+ db.rollback_transaction()
+ db.delete_table("test2")
+
+ def test_table_rename(self):
+ """
+ Test column renaming
+ """
+ cursor = connection.cursor()
+ db.create_table("testtr", [('spam', models.BooleanField(default=False))])
+ db.start_transaction()
+ # Make sure we can select the column
+ cursor.execute("SELECT spam FROM testtr")
+ # Rename it
+ db.rename_table("testtr", "testtr2")
+ cursor.execute("SELECT spam FROM testtr2")
+ try:
+ cursor.execute("SELECT spam FROM testtr")
+ self.fail("Just-renamed column could be selected!")
+ except:
+ pass
+ db.rollback_transaction()
+ db.delete_table("testtr2")
+
+ def test_index(self):
+ """
+ Test the index operations
+ """
+ db.create_table("test3", [
+ ('SELECT', models.BooleanField(default=False)),
+ ('eggs', models.IntegerField(unique=True)),
+ ])
+ db.execute_deferred_sql()
+ db.start_transaction()
+ # Add an index on that column
+ db.create_index("test3", ["SELECT"])
+ # Add another index on two columns
+ db.create_index("test3", ["SELECT", "eggs"])
+ # Delete them both
+ db.delete_index("test3", ["SELECT"])
+ db.delete_index("test3", ["SELECT", "eggs"])
+ # Delete the unique index
+ db.delete_index("test3", ["eggs"])
+ db.rollback_transaction()
+ db.delete_table("test3")
+
+ def test_alter(self):
+ """
+ Test altering columns/tables
+ """
+ db.create_table("test4", [
+ ('spam', models.BooleanField(default=False)),
+ ('eggs', models.IntegerField()),
+ ])
+ db.start_transaction()
+ # Add a column
+ db.add_column("test4", "add1", models.IntegerField(default=3), keep_default=False)
+ # Add a FK with keep_default=False (#69)
+ User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
+ db.add_column("test4", "user", models.ForeignKey(User), keep_default=False)
+
+ db.rollback_transaction()
+ db.delete_table("test4")
\ No newline at end of file
--- /dev/null
+from south.db import db
+from django.db import models
+
+class Migration:
+
+ def forwards(self):
+
+ db.alter_column("southtest_spam", 'name', models.CharField(max_length=255, null=True))
+
+ def backwards(self):
+
+ db.alter_column("southtest_spam", 'name', models.CharField(max_length=255))
app = self.create_test_app()
self.assertEqual(
- ["0001_spam", "0002_eggs"],
+ ["0001_spam", "0002_eggs", "0003_alter_spam"],
migration.get_migration_names(app),
)
# Can't use vanilla import, modules beginning with numbers aren't in grammar
M1 = __import__("fakeapp.migrations.0001_spam", {}, {}, ['Migration']).Migration
M2 = __import__("fakeapp.migrations.0002_eggs", {}, {}, ['Migration']).Migration
+ M3 = __import__("fakeapp.migrations.0003_alter_spam", {}, {}, ['Migration']).Migration
self.assertEqual(
- [M1, M2],
+ [M1, M2, M3],
list(migration.get_migration_classes(app)),
)
self.assertEqual(M1, migration.get_migration(app, "0001_spam"))
self.assertEqual(M2, migration.get_migration(app, "0002_eggs"))
- self.assertRaises(ValueError, migration.get_migration, app, "0001_jam")
+ self.assertRaises((ImportError, ValueError), migration.get_migration, app, "0001_jam")
def test_all_migrations(self):
{app: {
"0001_spam": migration.get_migration(app, "0001_spam"),
"0002_eggs": migration.get_migration(app, "0002_eggs"),
+ "0003_alter_spam": migration.get_migration(app, "0003_alter_spam"),
}},
migration.all_migrations(),
)
(
(u"fakeapp", u"0001_spam"),
(u"fakeapp", u"0002_eggs"),
+ (u"fakeapp", u"0003_alter_spam"),
),
migration.MigrationHistory.objects.values_list("app_name", "migration"),
)
(
(u"fakeapp", u"0001_spam"),
(u"fakeapp", u"0002_eggs"),
+ (u"fakeapp", u"0003_alter_spam"),
),
migration.MigrationHistory.objects.values_list("app_name", "migration"),
)
# Now roll them backwards
+ migration.migrate_app(app, target_name="0002", resolve_mode=None, fake=False, silent=True)
migration.migrate_app(app, target_name="0001", resolve_mode=None, fake=True, silent=True)
migration.migrate_app(app, target_name="zero", resolve_mode=None, fake=False, silent=True)
# Finish with none
+ self.assertEqual(list(migration.MigrationHistory.objects.all()), [])
+
+ def test_alter_column_null(self):
+ def null_ok():
+ from django.db import connection, transaction
+ # the DBAPI introspection module fails on postgres NULLs.
+ cursor = connection.cursor()
+ try:
+ cursor.execute("INSERT INTO southtest_spam (id, weight, expires, name) VALUES (100, 10.1, now(), NULL);")
+ except:
+ transaction.rollback()
+ return False
+ else:
+ cursor.execute("DELETE FROM southtest_spam")
+ transaction.commit()
+ return True
+
+ app = migration.get_app("fakeapp")
+ self.assertEqual(list(migration.MigrationHistory.objects.all()), [])
+
+ # by default name is NOT NULL
+ migration.migrate_app(app, target_name="0002", resolve_mode=None, fake=False, silent=True)
+ self.failIf(null_ok())
+
+ # after 0003, it should be NULL
+ migration.migrate_app(app, target_name="0003", resolve_mode=None, fake=False, silent=True)
+ self.assert_(null_ok())
+
+ # make sure it is NOT NULL again
+ migration.migrate_app(app, target_name="0002", resolve_mode=None, fake=False, silent=True)
+ self.failIf(null_ok(), 'name not null after migration')
+
+ # finish with no migrations, otherwise other tests fail...
+ migration.migrate_app(app, target_name="zero", resolve_mode=None, fake=False, silent=True)
self.assertEqual(list(migration.MigrationHistory.objects.all()), [])
\ No newline at end of file
<a href="{{ tag.get_absolute_url }}">{{ tag }}</a>
{% endfor %}
</li>
+ </ul>
+ <h2>W innych miejscach</h2>
+ <ul>
<li><a href="{{ extra_info.about }}">Lektura na wiki projektu</a></li>
<li><a href="{{ extra_info.source_url }}">Lektura w CBN Polona</a></li>
- <li><a href="{{ book.xml_file.url }}">Kod źródłowy utworu (XML)</a></li>
+ {% if book.gazeta_link %}
+ <li><a href="{{ book.gazeta_link }}">Omówienie lektury w Lektury.Gazeta.pl</a></li>
+ {% endif %}
</ul>
</div>
<div id="themes-list">
Pobierz wszystkie książki z tej półki
</a>
{% endif %}
+ {% if last_tag.gazeta_link %}
+ <p><a href="{{ last_tag.gazeta_link }}">Przeczytaj omówienia lektur w Lektury.Gazeta.pl</a></p>
+ {% endif %}
<ol>
{% for book in object_list %}
<li>