Get alembic up and running

I took a stab at a reasonable schema just to get started. For now I want
workflows that are working. I'll pound the thing into the correct shape
in time
This commit is contained in:
Eli Ribble 2016-05-02 08:44:54 -06:00
parent 3e4eab0802
commit 681a62bbf5
7 changed files with 304 additions and 0 deletions

1
alembic/README Normal file
View file

@ -0,0 +1 @@
Generic single-database configuration.

71
alembic/env.py Normal file
View file

@ -0,0 +1,71 @@
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
from vanth.tables import metadata
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

24
alembic/script.py.mako Normal file
View file

@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View file

@ -0,0 +1,66 @@
"""create initial tables
Revision ID: 58ea73d3d07b
Revises:
Create Date: 2016-05-02 08:42:38.061749
"""
# revision identifiers, used by Alembic.
revision = '58ea73d3d07b'
down_revision = None
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
op.create_table('credit_card',
sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('brand', sa.String(length=20), nullable=False),
sa.Column('card_id', sa.String(length=100), nullable=False),
sa.Column('country', sa.String(length=1024), nullable=False),
sa.Column('cvc_check', sa.String(length=100), nullable=False),
sa.Column('expiration_month', sa.Integer(), nullable=False),
sa.Column('expiration_year', sa.Integer(), nullable=False),
sa.Column('last_four', sa.Integer(), nullable=False),
sa.Column('token', sa.String(), nullable=False),
sa.Column('user_uri', sa.String(length=2048), nullable=False),
sa.Column('created', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
sa.Column('updated', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
sa.Column('deleted', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('uuid', name=op.f('pk_credit_card'))
)
op.create_table('ofxrecord',
sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('fid', sa.String(length=255), nullable=False),
sa.Column('amount', sa.Float(), nullable=False),
sa.Column('available', sa.Date(), nullable=True),
sa.Column('name', sa.String(length=1024), nullable=False),
sa.Column('posted', sa.Date(), nullable=True),
sa.Column('memo', sa.String(length=2048), nullable=True),
sa.Column('type', sa.String(length=255), nullable=True),
sa.Column('created', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
sa.Column('updated', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('uuid', name=op.f('pk_ofxrecord'))
)
op.create_table('ofxsource',
sa.Column('uuid', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('fid', sa.String(length=255), nullable=False),
sa.Column('bankid', sa.String(length=255), nullable=False),
sa.Column('created', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
sa.Column('updated', sa.DateTime(), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('uuid', name=op.f('pk_ofxsource'))
)
op.create_table('ofxaccount',
)
def downgrade():
op.drop_table('ofxaccount')
op.drop_table('ofxsource')
op.drop_table('ofxrecord')
op.drop_table('credit_card')