Add source
This commit is contained in:
1
alembic/README
Normal file
1
alembic/README
Normal file
@@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
||||
114
alembic/env.py
Normal file
114
alembic/env.py
Normal file
@@ -0,0 +1,114 @@
|
||||
"""
|
||||
Alembic environment configuration for async SQLAlchemy
|
||||
"""
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Import settings and models
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add project root to path
|
||||
project_root = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(project_root))
|
||||
|
||||
from shared.config import settings
|
||||
from shared.database.models import Base
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set target metadata for autogenerate
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# Override sqlalchemy.url from settings if not set in alembic.ini
|
||||
if config.get_main_option("sqlalchemy.url") == "driver://user:pass@localhost/dbname":
|
||||
config.set_main_option("sqlalchemy.url", settings.DATABASE_URL)
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""
|
||||
Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""
|
||||
Run migrations with the given connection.
|
||||
"""
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=False # Disable autogenerate comparison to use explicit migration definitions
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""
|
||||
Run migrations in 'online' mode with async engine.
|
||||
"""
|
||||
# Get database URL from settings
|
||||
database_url = settings.DATABASE_URL
|
||||
|
||||
# Create async engine from config
|
||||
configuration = config.get_section(config.config_ini_section, {})
|
||||
configuration["sqlalchemy.url"] = database_url
|
||||
|
||||
connectable = async_engine_from_config(
|
||||
configuration,
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""
|
||||
Run migrations in 'online' mode.
|
||||
|
||||
For async SQLAlchemy, we use asyncio to run async migrations.
|
||||
"""
|
||||
import asyncio
|
||||
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
28
alembic/script.py.mako
Normal file
28
alembic/script.py.mako
Normal file
@@ -0,0 +1,28 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
${downgrades if downgrades else "pass"}
|
||||
114
alembic/versions/7ac28bbbc5ee_initial_migration.py
Normal file
114
alembic/versions/7ac28bbbc5ee_initial_migration.py
Normal file
@@ -0,0 +1,114 @@
|
||||
"""Initial migration
|
||||
|
||||
Revision ID: 7ac28bbbc5ee
|
||||
Revises:
|
||||
Create Date: 2025-12-02 00:26:38.350265
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '7ac28bbbc5ee'
|
||||
down_revision: Union[str, Sequence[str], None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# Create users table
|
||||
op.create_table(
|
||||
'users',
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('username', sa.String(length=255), nullable=True),
|
||||
sa.Column('first_name', sa.String(length=255), nullable=True),
|
||||
sa.Column('last_name', sa.String(length=255), nullable=True),
|
||||
sa.Column('is_admin', sa.Boolean(), nullable=True),
|
||||
sa.Column('is_blocked', sa.Boolean(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('user_id', name=op.f('pk_users')),
|
||||
sa.UniqueConstraint('user_id', name=op.f('uq_users_user_id'))
|
||||
)
|
||||
op.create_index(op.f('ix_users_user_id'), 'users', ['user_id'], unique=False)
|
||||
|
||||
# Create tasks table
|
||||
op.create_table(
|
||||
'tasks',
|
||||
sa.Column('id', sa.BigInteger(), nullable=False, autoincrement=True),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('task_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('status', sa.String(length=50), nullable=True),
|
||||
sa.Column('url', sa.Text(), nullable=True),
|
||||
sa.Column('file_path', sa.String(length=500), nullable=True),
|
||||
sa.Column('progress', sa.Integer(), nullable=True),
|
||||
sa.Column('error_message', sa.String(1000), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('completed_at', sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.user_id'], name=op.f('fk_tasks_user_id_users')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_tasks'))
|
||||
)
|
||||
op.create_index(op.f('ix_tasks_created_at'), 'tasks', ['created_at'], unique=False)
|
||||
op.create_index(op.f('ix_tasks_status'), 'tasks', ['status'], unique=False)
|
||||
op.create_index(op.f('ix_tasks_user_id'), 'tasks', ['user_id'], unique=False)
|
||||
|
||||
# Create downloads table
|
||||
op.create_table(
|
||||
'downloads',
|
||||
sa.Column('id', sa.Integer(), nullable=False, autoincrement=True),
|
||||
sa.Column('task_id', sa.BigInteger(), nullable=False),
|
||||
sa.Column('url', sa.Text(), nullable=False),
|
||||
sa.Column('download_type', sa.String(length=50), nullable=False),
|
||||
sa.Column('file_path', sa.String(length=500), nullable=True),
|
||||
sa.Column('file_size', sa.Integer(), nullable=True),
|
||||
sa.Column('duration', sa.Integer(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['task_id'], ['tasks.id'], name=op.f('fk_downloads_task_id_tasks')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_downloads'))
|
||||
)
|
||||
op.create_index(op.f('ix_downloads_id'), 'downloads', ['id'], unique=False)
|
||||
|
||||
# Create otp_codes table
|
||||
op.create_table(
|
||||
'otp_codes',
|
||||
sa.Column('id', sa.Integer(), nullable=False, autoincrement=True),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('code', sa.String(length=6), nullable=False),
|
||||
sa.Column('expires_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('used', sa.Boolean(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.user_id'], name=op.f('fk_otp_codes_user_id_users')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_otp_codes'))
|
||||
)
|
||||
op.create_index(op.f('ix_otp_codes_code'), 'otp_codes', ['code'], unique=False)
|
||||
op.create_index(op.f('ix_otp_codes_expires_at'), 'otp_codes', ['expires_at'], unique=False)
|
||||
op.create_index(op.f('ix_otp_codes_id'), 'otp_codes', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_otp_codes_used'), 'otp_codes', ['used'], unique=False)
|
||||
op.create_index(op.f('ix_otp_codes_user_id'), 'otp_codes', ['user_id'], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# Drop tables in reverse order (respecting foreign key constraints)
|
||||
op.drop_index(op.f('ix_otp_codes_user_id'), table_name='otp_codes')
|
||||
op.drop_index(op.f('ix_otp_codes_used'), table_name='otp_codes')
|
||||
op.drop_index(op.f('ix_otp_codes_id'), table_name='otp_codes')
|
||||
op.drop_index(op.f('ix_otp_codes_expires_at'), table_name='otp_codes')
|
||||
op.drop_index(op.f('ix_otp_codes_code'), table_name='otp_codes')
|
||||
op.drop_table('otp_codes')
|
||||
|
||||
op.drop_index(op.f('ix_downloads_id'), table_name='downloads')
|
||||
op.drop_table('downloads')
|
||||
|
||||
op.drop_index(op.f('ix_tasks_user_id'), table_name='tasks')
|
||||
op.drop_index(op.f('ix_tasks_status'), table_name='tasks')
|
||||
op.drop_index(op.f('ix_tasks_created_at'), table_name='tasks')
|
||||
op.drop_table('tasks')
|
||||
|
||||
op.drop_index(op.f('ix_users_user_id'), table_name='users')
|
||||
op.drop_table('users')
|
||||
Reference in New Issue
Block a user