Include full contents of all nested repositories

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-27 16:25:02 +01:00
parent 14ff8fd54c
commit 2401ed446f
7271 changed files with 1310112 additions and 6 deletions

View File

@@ -0,0 +1,110 @@
"""initial_schema
Revision ID: 4ca4b9958baf
Revises:
Create Date: 2025-12-02 18:50:17.377481
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '4ca4b9958baf'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tenants',
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('domain', sa.String(length=255), nullable=True),
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('domain')
)
op.create_index(op.f('ix_tenants_name'), 'tenants', ['name'], unique=True)
op.create_table('agents',
sa.Column('tenant_id', sa.Uuid(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('last_heartbeat', sa.DateTime(timezone=True), nullable=True),
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_agents_tenant_id'), 'agents', ['tenant_id'], unique=False)
op.create_table('servers',
sa.Column('tenant_id', sa.Uuid(), nullable=False),
sa.Column('hostname', sa.String(length=255), nullable=False),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_servers_tenant_id'), 'servers', ['tenant_id'], unique=False)
op.create_table('tasks',
sa.Column('tenant_id', sa.Uuid(), nullable=False),
sa.Column('agent_id', sa.Uuid(), nullable=True),
sa.Column('type', sa.String(length=100), nullable=False),
sa.Column('payload', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('status', sa.String(length=50), nullable=False),
sa.Column('result', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('id', sa.Uuid(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['agent_id'], ['agents.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tasks_agent_id'), 'tasks', ['agent_id'], unique=False)
op.create_index(op.f('ix_tasks_status'), 'tasks', ['status'], unique=False)
op.create_index(op.f('ix_tasks_tenant_id'), 'tasks', ['tenant_id'], unique=False)
op.create_index(op.f('ix_tasks_type'), 'tasks', ['type'], unique=False)
op.create_table('events',
sa.Column('tenant_id', sa.Uuid(), nullable=False),
sa.Column('task_id', sa.Uuid(), nullable=True),
sa.Column('event_type', sa.String(length=100), nullable=False),
sa.Column('payload', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('id', sa.Uuid(), nullable=False),
sa.ForeignKeyConstraint(['task_id'], ['tasks.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_events_created_at'), 'events', ['created_at'], unique=False)
op.create_index(op.f('ix_events_event_type'), 'events', ['event_type'], unique=False)
op.create_index(op.f('ix_events_task_id'), 'events', ['task_id'], unique=False)
op.create_index(op.f('ix_events_tenant_id'), 'events', ['tenant_id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_events_tenant_id'), table_name='events')
op.drop_index(op.f('ix_events_task_id'), table_name='events')
op.drop_index(op.f('ix_events_event_type'), table_name='events')
op.drop_index(op.f('ix_events_created_at'), table_name='events')
op.drop_table('events')
op.drop_index(op.f('ix_tasks_type'), table_name='tasks')
op.drop_index(op.f('ix_tasks_tenant_id'), table_name='tasks')
op.drop_index(op.f('ix_tasks_status'), table_name='tasks')
op.drop_index(op.f('ix_tasks_agent_id'), table_name='tasks')
op.drop_table('tasks')
op.drop_index(op.f('ix_servers_tenant_id'), table_name='servers')
op.drop_table('servers')
op.drop_index(op.f('ix_agents_tenant_id'), table_name='agents')
op.drop_table('agents')
op.drop_index(op.f('ix_tenants_name'), table_name='tenants')
op.drop_table('tenants')
# ### end Alembic commands ###

View File

@@ -0,0 +1,48 @@
"""add_agent_fields_and_nullable_tenant
Revision ID: add_agent_fields
Revises: 4ca4b9958baf
Create Date: 2025-12-02 19:30:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'add_agent_fields'
down_revision: Union[str, None] = '4ca4b9958baf'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add new columns to agents table
op.add_column('agents', sa.Column('version', sa.String(length=50), nullable=False, server_default=''))
op.add_column('agents', sa.Column('status', sa.String(length=20), nullable=False, server_default='offline'))
op.add_column('agents', sa.Column('token', sa.Text(), nullable=False, server_default=''))
# Create index on status for efficient queries
op.create_index(op.f('ix_agents_status'), 'agents', ['status'], unique=False)
# Make tenant_id nullable (agents can register without a tenant)
op.alter_column('agents', 'tenant_id',
existing_type=sa.UUID(),
nullable=True)
def downgrade() -> None:
# Make tenant_id NOT NULL again (will fail if there are rows with NULL tenant_id)
op.alter_column('agents', 'tenant_id',
existing_type=sa.UUID(),
nullable=False)
# Drop the status index
op.drop_index(op.f('ix_agents_status'), table_name='agents')
# Drop new columns
op.drop_column('agents', 'token')
op.drop_column('agents', 'status')
op.drop_column('agents', 'version')

View File

@@ -0,0 +1,36 @@
"""add_dashboard_token_hash_to_tenants
Revision ID: add_dashboard_token_hash
Revises: add_registration_tokens
Create Date: 2025-01-05 12:00:00.000000
This migration adds dashboard_token_hash column to tenants table
for authenticating dashboard-to-orchestrator communication.
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'add_dashboard_token_hash'
down_revision: Union[str, None] = 'add_registration_tokens'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.add_column(
'tenants',
sa.Column(
'dashboard_token_hash',
sa.String(length=64),
nullable=True,
comment='SHA-256 hash of dashboard authentication token'
)
)
def downgrade() -> None:
op.drop_column('tenants', 'dashboard_token_hash')

View File

@@ -0,0 +1,94 @@
"""add_registration_tokens_and_agent_secret_hash
Revision ID: add_registration_tokens
Revises: add_agent_fields
Create Date: 2025-12-06 10:00:00.000000
This migration adds:
1. registration_tokens table for secure agent registration
2. secret_hash column to agents for new auth scheme
3. registration_token_id FK in agents to track token usage
"""
from typing import Sequence, Union
import hashlib
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = 'add_registration_tokens'
down_revision: Union[str, None] = 'add_agent_fields'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# 1. Create registration_tokens table
op.create_table(
'registration_tokens',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('tenant_id', sa.UUID(), nullable=False),
sa.Column('token_hash', sa.String(length=64), nullable=False, comment='SHA-256 hash of the registration token'),
sa.Column('description', sa.String(length=255), nullable=True, comment='Human-readable description for the token'),
sa.Column('max_uses', sa.Integer(), nullable=False, server_default='1', comment='Maximum number of uses (0 = unlimited)'),
sa.Column('use_count', sa.Integer(), nullable=False, server_default='0', comment='Current number of times this token has been used'),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=True, comment='Optional expiration timestamp'),
sa.Column('revoked', sa.Boolean(), nullable=False, server_default='false', comment='Whether this token has been manually revoked'),
sa.Column('created_by', sa.String(length=255), nullable=True, comment='Identifier of who created this token (for audit)'),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False, server_default=sa.func.now()),
sa.PrimaryKeyConstraint('id'),
sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ondelete='CASCADE'),
)
op.create_index(op.f('ix_registration_tokens_tenant_id'), 'registration_tokens', ['tenant_id'], unique=False)
op.create_index(op.f('ix_registration_tokens_token_hash'), 'registration_tokens', ['token_hash'], unique=False)
# 2. Add secret_hash column to agents (for new auth scheme)
# Initialize with empty string, will be populated during agent migration
op.add_column(
'agents',
sa.Column('secret_hash', sa.String(length=64), nullable=False, server_default='', comment='SHA-256 hash of the agent secret')
)
# 3. Add registration_token_id FK to agents
op.add_column(
'agents',
sa.Column('registration_token_id', sa.UUID(), nullable=True)
)
op.create_foreign_key(
'fk_agents_registration_token_id',
'agents', 'registration_tokens',
['registration_token_id'], ['id'],
ondelete='SET NULL'
)
op.create_index(op.f('ix_agents_registration_token_id'), 'agents', ['registration_token_id'], unique=False)
# 4. Migrate existing agent tokens to secret_hash
# For existing agents, we'll hash their current token and store it as secret_hash
# This allows backward compatibility during the transition period
connection = op.get_bind()
agents = connection.execute(sa.text("SELECT id, token FROM agents WHERE token != ''"))
for agent in agents:
if agent.token:
hashed = hashlib.sha256(agent.token.encode()).hexdigest()
connection.execute(
sa.text("UPDATE agents SET secret_hash = :hash WHERE id = :id"),
{"hash": hashed, "id": agent.id}
)
def downgrade() -> None:
# Drop registration_token_id FK and index from agents
op.drop_index(op.f('ix_agents_registration_token_id'), table_name='agents')
op.drop_constraint('fk_agents_registration_token_id', 'agents', type_='foreignkey')
op.drop_column('agents', 'registration_token_id')
# Drop secret_hash column from agents
op.drop_column('agents', 'secret_hash')
# Drop registration_tokens table
op.drop_index(op.f('ix_registration_tokens_token_hash'), table_name='registration_tokens')
op.drop_index(op.f('ix_registration_tokens_tenant_id'), table_name='registration_tokens')
op.drop_table('registration_tokens')