final migration issues fixed
This commit is contained in:
@@ -2,7 +2,7 @@
|
||||
|
||||
Revision ID: c770e08966b4
|
||||
Revises: e7e4ff171f7a
|
||||
Create Date: 2025-06-01 20:09:08.019884
|
||||
Create Date: 2025-06-02 14:00:05.521776
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
@@ -23,28 +23,38 @@ def upgrade():
|
||||
inspector = inspect(conn)
|
||||
tables = inspector.get_table_names()
|
||||
|
||||
if 'email_templates' not in tables:
|
||||
op.create_table('email_templates',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=100), nullable=False),
|
||||
sa.Column('subject', sa.String(length=200), nullable=False),
|
||||
sa.Column('body', sa.Text(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('created_by', sa.Integer(), nullable=False),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['created_by'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.drop_table('notification')
|
||||
if 'notification' in tables:
|
||||
op.drop_table('notification')
|
||||
|
||||
op.create_table('email_template',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=100), nullable=False),
|
||||
sa.Column('subject', sa.String(length=200), nullable=False),
|
||||
sa.Column('body', sa.Text(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('name')
|
||||
)
|
||||
|
||||
# Check for existing indexes before dropping them
|
||||
with op.batch_alter_table('events', schema=None) as batch_op:
|
||||
batch_op.alter_column('details',
|
||||
existing_type=postgresql.JSONB(astext_type=sa.Text()),
|
||||
type_=sa.JSON(),
|
||||
existing_nullable=True)
|
||||
batch_op.drop_index(batch_op.f('idx_events_event_type'))
|
||||
batch_op.drop_index(batch_op.f('idx_events_timestamp'))
|
||||
batch_op.drop_index(batch_op.f('idx_events_user_id'))
|
||||
|
||||
# Get existing indexes
|
||||
indexes = inspector.get_indexes('events')
|
||||
index_names = [idx['name'] for idx in indexes]
|
||||
|
||||
# Only drop indexes if they exist
|
||||
if 'idx_events_event_type' in index_names:
|
||||
batch_op.drop_index(batch_op.f('idx_events_event_type'))
|
||||
if 'idx_events_timestamp' in index_names:
|
||||
batch_op.drop_index(batch_op.f('idx_events_timestamp'))
|
||||
if 'idx_events_user_id' in index_names:
|
||||
batch_op.drop_index(batch_op.f('idx_events_user_id'))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@@ -60,22 +70,5 @@ def downgrade():
|
||||
type_=postgresql.JSONB(astext_type=sa.Text()),
|
||||
existing_nullable=True)
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = inspect(conn)
|
||||
tables = inspector.get_table_names()
|
||||
|
||||
if 'notification' not in tables:
|
||||
op.create_table('notification',
|
||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False),
|
||||
sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=False),
|
||||
sa.Column('message', sa.TEXT(), autoincrement=False, nullable=False),
|
||||
sa.Column('type', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
|
||||
sa.Column('is_read', sa.BOOLEAN(), autoincrement=False, nullable=True),
|
||||
sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
|
||||
sa.Column('link', sa.VARCHAR(length=512), autoincrement=False, nullable=True),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('notification_user_id_fkey')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('notification_pkey'))
|
||||
)
|
||||
op.drop_table('email_templates')
|
||||
op.drop_table('email_template')
|
||||
# ### end Alembic commands ###
|
||||
Reference in New Issue
Block a user