feat: Initial Hub implementation
Complete LetsBe Hub service for license management and telemetry: - Client and Instance CRUD APIs - License key generation and validation (lb_inst_ format) - Hub API key generation (hk_ format) for telemetry auth - Instance activation endpoint - Telemetry collection with privacy-first redactor - Key rotation and suspend/reactivate functionality - Alembic migrations for PostgreSQL - Docker Compose deployment ready - Comprehensive test suite 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
89
alembic/env.py
Normal file
89
alembic/env.py
Normal file
@@ -0,0 +1,89 @@
|
||||
"""Alembic migration environment configuration for async SQLAlchemy."""
|
||||
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
|
||||
from app.config import settings
|
||||
from app.models import Base
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Override sqlalchemy.url with environment variable
|
||||
config.set_main_option("sqlalchemy.url", settings.DATABASE_URL)
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
"""Run migrations with a connection."""
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
compare_type=True,
|
||||
compare_server_default=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode with async engine."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
alembic/script.py.mako
Normal file
26
alembic/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
142
alembic/versions/001_initial_hub_schema.py
Normal file
142
alembic/versions/001_initial_hub_schema.py
Normal file
@@ -0,0 +1,142 @@
|
||||
"""Initial Hub schema with clients, instances, and usage samples.
|
||||
|
||||
Revision ID: 001
|
||||
Revises:
|
||||
Create Date: 2024-12-09
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "001"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create clients table
|
||||
op.create_table(
|
||||
"clients",
|
||||
sa.Column("id", sa.UUID(), nullable=False),
|
||||
sa.Column("name", sa.String(length=255), nullable=False),
|
||||
sa.Column("contact_email", sa.String(length=255), nullable=True),
|
||||
sa.Column("billing_plan", sa.String(length=50), nullable=False, server_default="free"),
|
||||
sa.Column("status", sa.String(length=50), nullable=False, server_default="active"),
|
||||
sa.Column(
|
||||
"created_at",
|
||||
sa.DateTime(timezone=True),
|
||||
nullable=False,
|
||||
server_default=sa.text("now()"),
|
||||
),
|
||||
sa.Column(
|
||||
"updated_at",
|
||||
sa.DateTime(timezone=True),
|
||||
nullable=False,
|
||||
server_default=sa.text("now()"),
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
|
||||
# Create instances table
|
||||
op.create_table(
|
||||
"instances",
|
||||
sa.Column("id", sa.UUID(), nullable=False),
|
||||
sa.Column("client_id", sa.UUID(), nullable=False),
|
||||
sa.Column("instance_id", sa.String(length=255), nullable=False),
|
||||
# Licensing
|
||||
sa.Column("license_key_hash", sa.String(length=64), nullable=False),
|
||||
sa.Column("license_key_prefix", sa.String(length=12), nullable=False),
|
||||
sa.Column("license_status", sa.String(length=50), nullable=False, server_default="active"),
|
||||
sa.Column("license_issued_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("license_expires_at", sa.DateTime(timezone=True), nullable=True),
|
||||
# Activation state
|
||||
sa.Column("activated_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("last_activation_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("activation_count", sa.Integer(), nullable=False, server_default="0"),
|
||||
# Telemetry
|
||||
sa.Column("hub_api_key_hash", sa.String(length=64), nullable=True),
|
||||
# Metadata
|
||||
sa.Column("region", sa.String(length=50), nullable=True),
|
||||
sa.Column("version", sa.String(length=50), nullable=True),
|
||||
sa.Column("last_seen_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column("status", sa.String(length=50), nullable=False, server_default="pending"),
|
||||
# Timestamps
|
||||
sa.Column(
|
||||
"created_at",
|
||||
sa.DateTime(timezone=True),
|
||||
nullable=False,
|
||||
server_default=sa.text("now()"),
|
||||
),
|
||||
sa.Column(
|
||||
"updated_at",
|
||||
sa.DateTime(timezone=True),
|
||||
nullable=False,
|
||||
server_default=sa.text("now()"),
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["client_id"],
|
||||
["clients.id"],
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_instances_instance_id"),
|
||||
"instances",
|
||||
["instance_id"],
|
||||
unique=True,
|
||||
)
|
||||
|
||||
# Create usage_samples table
|
||||
op.create_table(
|
||||
"usage_samples",
|
||||
sa.Column("id", sa.UUID(), nullable=False),
|
||||
sa.Column("instance_id", sa.UUID(), nullable=False),
|
||||
# Time window
|
||||
sa.Column("window_start", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("window_end", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("window_type", sa.String(length=20), nullable=False),
|
||||
# Tool (ONLY name)
|
||||
sa.Column("tool_name", sa.String(length=255), nullable=False),
|
||||
# Counts
|
||||
sa.Column("call_count", sa.Integer(), nullable=False, server_default="0"),
|
||||
sa.Column("success_count", sa.Integer(), nullable=False, server_default="0"),
|
||||
sa.Column("error_count", sa.Integer(), nullable=False, server_default="0"),
|
||||
sa.Column("rate_limited_count", sa.Integer(), nullable=False, server_default="0"),
|
||||
# Duration stats
|
||||
sa.Column("total_duration_ms", sa.Integer(), nullable=False, server_default="0"),
|
||||
sa.Column("min_duration_ms", sa.Integer(), nullable=False, server_default="0"),
|
||||
sa.Column("max_duration_ms", sa.Integer(), nullable=False, server_default="0"),
|
||||
sa.ForeignKeyConstraint(
|
||||
["instance_id"],
|
||||
["instances.id"],
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_usage_samples_instance_id"),
|
||||
"usage_samples",
|
||||
["instance_id"],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_usage_samples_tool_name"),
|
||||
"usage_samples",
|
||||
["tool_name"],
|
||||
unique=False,
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index(op.f("ix_usage_samples_tool_name"), table_name="usage_samples")
|
||||
op.drop_index(op.f("ix_usage_samples_instance_id"), table_name="usage_samples")
|
||||
op.drop_table("usage_samples")
|
||||
op.drop_index(op.f("ix_instances_instance_id"), table_name="instances")
|
||||
op.drop_table("instances")
|
||||
op.drop_table("clients")
|
||||
63
alembic/versions/002_add_telemetry_samples.py
Normal file
63
alembic/versions/002_add_telemetry_samples.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""Add telemetry_samples table for aggregated orchestrator metrics.
|
||||
|
||||
Revision ID: 002
|
||||
Revises: 001
|
||||
Create Date: 2024-12-17
|
||||
|
||||
This table stores aggregated telemetry from orchestrator instances.
|
||||
Uses a unique constraint on (instance_id, window_start) for de-duplication.
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "002"
|
||||
down_revision: Union[str, None] = "001"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create telemetry_samples table
|
||||
op.create_table(
|
||||
"telemetry_samples",
|
||||
sa.Column("id", sa.UUID(), nullable=False),
|
||||
sa.Column("instance_id", sa.UUID(), nullable=False),
|
||||
# Time window
|
||||
sa.Column("window_start", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("window_end", sa.DateTime(timezone=True), nullable=False),
|
||||
# Orchestrator uptime
|
||||
sa.Column("uptime_seconds", sa.Integer(), nullable=False),
|
||||
# Aggregated metrics stored as JSONB
|
||||
sa.Column("metrics", postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
||||
# Foreign key and primary key
|
||||
sa.ForeignKeyConstraint(
|
||||
["instance_id"],
|
||||
["instances.id"],
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
# Unique constraint for de-duplication
|
||||
# Prevents double-counting if orchestrator retries submissions
|
||||
sa.UniqueConstraint(
|
||||
"instance_id",
|
||||
"window_start",
|
||||
name="uq_telemetry_instance_window",
|
||||
),
|
||||
)
|
||||
# Index on instance_id for efficient queries
|
||||
op.create_index(
|
||||
op.f("ix_telemetry_samples_instance_id"),
|
||||
"telemetry_samples",
|
||||
["instance_id"],
|
||||
unique=False,
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index(op.f("ix_telemetry_samples_instance_id"), table_name="telemetry_samples")
|
||||
op.drop_table("telemetry_samples")
|
||||
Reference in New Issue
Block a user