Include full contents of all nested repositories

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-27 16:25:02 +01:00
parent 14ff8fd54c
commit 2401ed446f
7271 changed files with 1310112 additions and 6 deletions

View File

@@ -0,0 +1,94 @@
"""Alembic migration environment configuration for async SQLAlchemy."""
import asyncio
from logging.config import fileConfig
from alembic import context
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from app.config import settings
from app.models import Base
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Override sqlalchemy.url with environment variable
config.set_main_option("sqlalchemy.url", settings.DATABASE_URL)
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Run migrations with a connection."""
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True,
compare_server_default=True,
)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode with async engine."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,110 @@
"""initial_schema
Revision ID: 4ca4b9958baf
Revises:
Create Date: 2025-12-02 18:50:17.377481
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '4ca4b9958baf'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tenants',
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('domain', sa.String(length=255), nullable=True),
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('domain')
)
op.create_index(op.f('ix_tenants_name'), 'tenants', ['name'], unique=True)
op.create_table('agents',
sa.Column('tenant_id', sa.Uuid(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('last_heartbeat', sa.DateTime(timezone=True), nullable=True),
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_agents_tenant_id'), 'agents', ['tenant_id'], unique=False)
op.create_table('servers',
sa.Column('tenant_id', sa.Uuid(), nullable=False),
sa.Column('hostname', sa.String(length=255), nullable=False),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_servers_tenant_id'), 'servers', ['tenant_id'], unique=False)
op.create_table('tasks',
sa.Column('tenant_id', sa.Uuid(), nullable=False),
sa.Column('agent_id', sa.Uuid(), nullable=True),
sa.Column('type', sa.String(length=100), nullable=False),
sa.Column('payload', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('result', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['agent_id'], ['agents.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tasks_agent_id'), 'tasks', ['agent_id'], unique=False)
op.create_index(op.f('ix_tasks_status'), 'tasks', ['status'], unique=False)
op.create_index(op.f('ix_tasks_tenant_id'), 'tasks', ['tenant_id'], unique=False)
op.create_index(op.f('ix_tasks_type'), 'tasks', ['type'], unique=False)
op.create_table('events',
sa.Column('tenant_id', sa.Uuid(), nullable=False),
sa.Column('task_id', sa.Uuid(), nullable=True),
sa.Column('event_type', sa.String(length=100), nullable=False),
sa.Column('payload', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('id', sa.Uuid(), nullable=False),
sa.ForeignKeyConstraint(['task_id'], ['tasks.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_events_created_at'), 'events', ['created_at'], unique=False)
op.create_index(op.f('ix_events_event_type'), 'events', ['event_type'], unique=False)
op.create_index(op.f('ix_events_task_id'), 'events', ['task_id'], unique=False)
op.create_index(op.f('ix_events_tenant_id'), 'events', ['tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_events_tenant_id'), table_name='events')
op.drop_index(op.f('ix_events_task_id'), table_name='events')
op.drop_index(op.f('ix_events_event_type'), table_name='events')
op.drop_index(op.f('ix_events_created_at'), table_name='events')
op.drop_table('events')
op.drop_index(op.f('ix_tasks_type'), table_name='tasks')
op.drop_index(op.f('ix_tasks_tenant_id'), table_name='tasks')
op.drop_index(op.f('ix_tasks_status'), table_name='tasks')
op.drop_index(op.f('ix_tasks_agent_id'), table_name='tasks')
op.drop_table('tasks')
op.drop_index(op.f('ix_servers_tenant_id'), table_name='servers')
op.drop_table('servers')
op.drop_index(op.f('ix_agents_tenant_id'), table_name='agents')
op.drop_table('agents')
op.drop_index(op.f('ix_tenants_name'), table_name='tenants')
op.drop_table('tenants')
# ### end Alembic commands ###

View File

@@ -0,0 +1,48 @@
"""add_agent_fields_and_nullable_tenant
Revision ID: add_agent_fields
Revises: 4ca4b9958baf
Create Date: 2025-12-02 19:30:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'add_agent_fields'
down_revision: Union[str, None] = '4ca4b9958baf'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add new columns to agents table
op.add_column('agents', sa.Column('version', sa.String(length=50), nullable=False, server_default=''))
op.add_column('agents', sa.Column('status', sa.String(length=20), nullable=False, server_default='offline'))
op.add_column('agents', sa.Column('token', sa.Text(), nullable=False, server_default=''))
# Create index on status for efficient queries
op.create_index(op.f('ix_agents_status'), 'agents', ['status'], unique=False)
# Make tenant_id nullable (agents can register without a tenant)
op.alter_column('agents', 'tenant_id',
existing_type=sa.UUID(),
nullable=True)
def downgrade() -> None:
# Make tenant_id NOT NULL again (will fail if there are rows with NULL tenant_id)
op.alter_column('agents', 'tenant_id',
existing_type=sa.UUID(),
nullable=False)
# Drop the status index
op.drop_index(op.f('ix_agents_status'), table_name='agents')
# Drop new columns
op.drop_column('agents', 'token')
op.drop_column('agents', 'status')
op.drop_column('agents', 'version')

View File

@@ -0,0 +1,36 @@
"""add_dashboard_token_hash_to_tenants
Revision ID: add_dashboard_token_hash
Revises: add_registration_tokens
Create Date: 2025-01-05 12:00:00.000000
This migration adds dashboard_token_hash column to tenants table
for authenticating dashboard-to-orchestrator communication.
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'add_dashboard_token_hash'
down_revision: Union[str, None] = 'add_registration_tokens'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.add_column(
'tenants',
sa.Column(
'dashboard_token_hash',
sa.String(length=64),
nullable=True,
comment='SHA-256 hash of dashboard authentication token'
)
)
def downgrade() -> None:
op.drop_column('tenants', 'dashboard_token_hash')

View File

@@ -0,0 +1,94 @@
"""add_registration_tokens_and_agent_secret_hash
Revision ID: add_registration_tokens
Revises: add_agent_fields
Create Date: 2025-12-06 10:00:00.000000
This migration adds:
1. registration_tokens table for secure agent registration
2. secret_hash column to agents for new auth scheme
3. registration_token_id FK in agents to track token usage
"""
from typing import Sequence, Union
import hashlib
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = 'add_registration_tokens'
down_revision: Union[str, None] = 'add_agent_fields'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# 1. Create registration_tokens table
op.create_table(
'registration_tokens',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('token_hash', sa.String(length=64), nullable=False, comment='SHA-256 hash of the registration token'),
sa.Column('description', sa.String(length=255), nullable=True, comment='Human-readable description for the token'),
sa.Column('max_uses', sa.Integer(), nullable=False, server_default='1', comment='Maximum number of uses (0 = unlimited)'),
sa.Column('use_count', sa.Integer(), nullable=False, server_default='0', comment='Current number of times this token has been used'),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=True, comment='Optional expiration timestamp'),
sa.Column('revoked', sa.Boolean(), nullable=False, server_default='false', comment='Whether this token has been manually revoked'),
sa.Column('created_by', sa.String(length=255), nullable=True, comment='Identifier of who created this token (for audit)'),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.PrimaryKeyConstraint('id'),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
)
op.create_index(op.f('ix_registration_tokens_tenant_id'), 'registration_tokens', ['tenant_id'], unique=False)
op.create_index(op.f('ix_registration_tokens_token_hash'), 'registration_tokens', ['token_hash'], unique=False)
# 2. Add secret_hash column to agents (for new auth scheme)
# Initialize with empty string, will be populated during agent migration
op.add_column(
'agents',
sa.Column('secret_hash', sa.String(length=64), nullable=False, server_default='', comment='SHA-256 hash of the agent secret')
)
# 3. Add registration_token_id FK to agents
op.add_column(
'agents',
sa.Column('registration_token_id', sa.UUID(), nullable=True)
)
op.create_foreign_key(
'fk_agents_registration_token_id',
'agents', 'registration_tokens',
['registration_token_id'], ['id'],
ondelete='SET NULL'
)
op.create_index(op.f('ix_agents_registration_token_id'), 'agents', ['registration_token_id'], unique=False)
# 4. Migrate existing agent tokens to secret_hash
# For existing agents, we'll hash their current token and store it as secret_hash
# This allows backward compatibility during the transition period
connection = op.get_bind()
agents = connection.execute(sa.text("SELECT id, token FROM agents WHERE token != ''"))
for agent in agents:
if agent.token:
hashed = hashlib.sha256(agent.token.encode()).hexdigest()
connection.execute(
sa.text("UPDATE agents SET secret_hash = :hash WHERE id = :id"),
{"hash": hashed, "id": agent.id}
)
def downgrade() -> None:
# Drop registration_token_id FK and index from agents
op.drop_index(op.f('ix_agents_registration_token_id'), table_name='agents')
op.drop_constraint('fk_agents_registration_token_id', 'agents', type_='foreignkey')
op.drop_column('agents', 'registration_token_id')
# Drop secret_hash column from agents
op.drop_column('agents', 'secret_hash')
# Drop registration_tokens table
op.drop_index(op.f('ix_registration_tokens_token_hash'), table_name='registration_tokens')
op.drop_index(op.f('ix_registration_tokens_tenant_id'), table_name='registration_tokens')
op.drop_table('registration_tokens')