Commit b51e6838 authored by Nelso Jost's avatar Nelso Jost

ADD: db migration management

parent dc150585
......@@ -54,6 +54,18 @@ gunicorn:
${VENVPY} prod/nginx-gunicorn/deploy.py --make-gunicorn-conf
${VENVDIR}/bin/gunicorn -c `pwd`/'.gunicorn.conf' manage:app
migrate-init:
${VENVPY} manage.py db init
migrate-first:
${VENVPY} manage.py db migrate
migrate-upgrade:
${VENVPY} manage.py db upgrade
migrate-help:
${VENVPY} manage.py db --help
run:
${VENVPY} manage.py runserver --port ${PORT}
......
......@@ -27,12 +27,16 @@ from app.models import *
from app.dbmanage import DBInitCommand
from flask.ext.script import Manager, Shell
from flask_migrate import Migrate, MigrateCommand
from datetime import datetime
app = create_app()
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command("dbinit", DBInitCommand(app=app))
manager.add_command('db', MigrateCommand)
# short nicknames for commom db.session operations
for n in ('commit', 'rollback', 'flush'):
......
Generic single-database configuration.
\ No newline at end of file
# A generic, single database configuration.
[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
import logging
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from flask import current_app
config.set_main_option('sqlalchemy.url',
current_app.config.get('SQLALCHEMY_DATABASE_URI'))
target_metadata = current_app.extensions['migrate'].db.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.readthedocs.org/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
engine = engine_from_config(config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
connection = engine.connect()
context.configure(connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}
"""empty message
Revision ID: 0d44b1c7dd73
Revises:
Create Date: 2017-04-23 10:26:01.797141
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0d44b1c7dd73'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('expositions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=30), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('measurements',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=50), nullable=False),
sa.Column('label_br', sa.String(length=50), nullable=False),
sa.Column('label_en', sa.String(length=50), nullable=False),
sa.Column('short_br', sa.String(length=10), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('label_br'),
sa.UniqueConstraint('label_en'),
sa.UniqueConstraint('name'),
sa.UniqueConstraint('short_br')
)
op.create_table('roles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=30), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('unities',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('label', sa.String(length=30), nullable=False),
sa.Column('latex_label', sa.String(length=150), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('label'),
sa.UniqueConstraint('latex_label')
)
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=30), nullable=False),
sa.Column('email', sa.String(length=200), nullable=False),
sa.Column('password_hash', sa.String(length=128), nullable=True),
sa.Column('confirmed', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('username')
)
op.create_table('boards',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('exposition_id', sa.Integer(), nullable=False),
sa.Column('nickname', sa.String(length=30), nullable=False),
sa.Column('latitude', sa.String(length=30), nullable=False),
sa.Column('longitude', sa.String(length=30), nullable=False),
sa.Column('description', sa.String(length=1000), nullable=True),
sa.Column('_userhash', sa.String(length=128), nullable=True),
sa.ForeignKeyConstraint(['exposition_id'], ['expositions.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('nickname')
)
op.create_table('unitymeasurements',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('measurement_id', sa.Integer(), nullable=False),
sa.Column('unity_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['measurement_id'], ['measurements.id'], ),
sa.ForeignKeyConstraint(['unity_id'], ['unities.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('rawsensordata',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('board_id', sa.Integer(), nullable=True),
sa.Column('datetime', sa.DateTime(), nullable=True),
sa.Column('sensor_values', sa.PickleType(), nullable=True),
sa.ForeignKeyConstraint(['board_id'], ['boards.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('sensors',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=20), nullable=True),
sa.Column('description', sa.String(length=300), nullable=True),
sa.Column('unitymeasurement_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['unitymeasurement_id'], ['unitymeasurements.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('boardsensors',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('board_id', sa.Integer(), nullable=True),
sa.Column('sensor_id', sa.Integer(), nullable=True),
sa.Column('order', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['board_id'], ['boards.id'], ),
sa.ForeignKeyConstraint(['sensor_id'], ['sensors.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('boardsensors')
op.drop_table('sensors')
op.drop_table('rawsensordata')
op.drop_table('unitymeasurements')
op.drop_table('boards')
op.drop_table('users')
op.drop_table('unities')
op.drop_table('roles')
op.drop_table('measurements')
op.drop_table('expositions')
# ### end Alembic commands ###
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment