mirror of
https://github.com/mblanke/ThreatHunt.git
synced 2026-03-01 14:00:20 -05:00
- NetworkMap: hunt-scoped force-directed graph with click-to-inspect popover - NetworkMap: zoom/pan (wheel, drag, buttons), viewport transform - NetworkMap: clickable IP/Host/Domain/URL legend chips to filter node types - NetworkMap: brighter colors, 20% smaller nodes - DatasetViewer: IOC columns highlighted with colored headers + cell tinting - AUPScanner: hunt dropdown replacing dataset checkboxes, auto-select all - Rename 'Social Media (Personal)' theme to 'Social Media' with DB migration - Fix /api/hunts timeout: Dataset.rows lazy='noload' (was selectin cascade) - Add OS column mapping to normalizer - Full backend services, DB models, alembic migrations, new routes - New components: Dashboard, HuntManager, FileUpload, NetworkMap, etc. - Docker Compose deployment with nginx reverse proxy
211 lines
10 KiB
Python
211 lines
10 KiB
Python
"""initial schema
|
|
|
|
Revision ID: 9790f482da06
|
|
Revises:
|
|
Create Date: 2026-02-19 11:40:02.108830
|
|
|
|
"""
|
|
from typing import Sequence, Union
|
|
|
|
from alembic import op
|
|
import sqlalchemy as sa
|
|
|
|
|
|
# revision identifiers, used by Alembic.
|
|
revision: str = '9790f482da06'
|
|
down_revision: Union[str, Sequence[str], None] = None
|
|
branch_labels: Union[str, Sequence[str], None] = None
|
|
depends_on: Union[str, Sequence[str], None] = None
|
|
|
|
|
|
def upgrade() -> None:
|
|
"""Upgrade schema."""
|
|
# ### commands auto generated by Alembic - please adjust! ###
|
|
op.create_table('users',
|
|
sa.Column('id', sa.String(length=32), nullable=False),
|
|
sa.Column('username', sa.String(length=64), nullable=False),
|
|
sa.Column('email', sa.String(length=256), nullable=False),
|
|
sa.Column('hashed_password', sa.String(length=256), nullable=False),
|
|
sa.Column('role', sa.String(length=16), nullable=False),
|
|
sa.Column('is_active', sa.Boolean(), nullable=False),
|
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
|
sa.PrimaryKeyConstraint('id'),
|
|
sa.UniqueConstraint('email')
|
|
)
|
|
with op.batch_alter_table('users', schema=None) as batch_op:
|
|
batch_op.create_index(batch_op.f('ix_users_username'), ['username'], unique=True)
|
|
|
|
op.create_table('hunts',
|
|
sa.Column('id', sa.String(length=32), nullable=False),
|
|
sa.Column('name', sa.String(length=256), nullable=False),
|
|
sa.Column('description', sa.Text(), nullable=True),
|
|
sa.Column('status', sa.String(length=32), nullable=False),
|
|
sa.Column('owner_id', sa.String(length=32), nullable=True),
|
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
|
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
|
sa.ForeignKeyConstraint(['owner_id'], ['users.id'], ),
|
|
sa.PrimaryKeyConstraint('id')
|
|
)
|
|
op.create_table('datasets',
|
|
sa.Column('id', sa.String(length=32), nullable=False),
|
|
sa.Column('name', sa.String(length=256), nullable=False),
|
|
sa.Column('filename', sa.String(length=512), nullable=False),
|
|
sa.Column('source_tool', sa.String(length=64), nullable=True),
|
|
sa.Column('row_count', sa.Integer(), nullable=False),
|
|
sa.Column('column_schema', sa.JSON(), nullable=True),
|
|
sa.Column('normalized_columns', sa.JSON(), nullable=True),
|
|
sa.Column('ioc_columns', sa.JSON(), nullable=True),
|
|
sa.Column('file_size_bytes', sa.Integer(), nullable=False),
|
|
sa.Column('encoding', sa.String(length=32), nullable=True),
|
|
sa.Column('delimiter', sa.String(length=4), nullable=True),
|
|
sa.Column('time_range_start', sa.DateTime(timezone=True), nullable=True),
|
|
sa.Column('time_range_end', sa.DateTime(timezone=True), nullable=True),
|
|
sa.Column('hunt_id', sa.String(length=32), nullable=True),
|
|
sa.Column('uploaded_by', sa.String(length=32), nullable=True),
|
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
|
sa.ForeignKeyConstraint(['hunt_id'], ['hunts.id'], ),
|
|
sa.PrimaryKeyConstraint('id')
|
|
)
|
|
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
|
batch_op.create_index('ix_datasets_hunt', ['hunt_id'], unique=False)
|
|
batch_op.create_index(batch_op.f('ix_datasets_name'), ['name'], unique=False)
|
|
|
|
op.create_table('hypotheses',
|
|
sa.Column('id', sa.String(length=32), nullable=False),
|
|
sa.Column('hunt_id', sa.String(length=32), nullable=True),
|
|
sa.Column('title', sa.String(length=256), nullable=False),
|
|
sa.Column('description', sa.Text(), nullable=True),
|
|
sa.Column('mitre_technique', sa.String(length=32), nullable=True),
|
|
sa.Column('status', sa.String(length=16), nullable=False),
|
|
sa.Column('evidence_row_ids', sa.JSON(), nullable=True),
|
|
sa.Column('evidence_notes', sa.Text(), nullable=True),
|
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
|
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
|
sa.ForeignKeyConstraint(['hunt_id'], ['hunts.id'], ),
|
|
sa.PrimaryKeyConstraint('id')
|
|
)
|
|
with op.batch_alter_table('hypotheses', schema=None) as batch_op:
|
|
batch_op.create_index('ix_hypotheses_hunt', ['hunt_id'], unique=False)
|
|
|
|
op.create_table('conversations',
|
|
sa.Column('id', sa.String(length=32), nullable=False),
|
|
sa.Column('title', sa.String(length=256), nullable=True),
|
|
sa.Column('hunt_id', sa.String(length=32), nullable=True),
|
|
sa.Column('dataset_id', sa.String(length=32), nullable=True),
|
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
|
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
|
sa.ForeignKeyConstraint(['dataset_id'], ['datasets.id'], ),
|
|
sa.ForeignKeyConstraint(['hunt_id'], ['hunts.id'], ),
|
|
sa.PrimaryKeyConstraint('id')
|
|
)
|
|
op.create_table('dataset_rows',
|
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
|
sa.Column('dataset_id', sa.String(length=32), nullable=False),
|
|
sa.Column('row_index', sa.Integer(), nullable=False),
|
|
sa.Column('data', sa.JSON(), nullable=False),
|
|
sa.Column('normalized_data', sa.JSON(), nullable=True),
|
|
sa.ForeignKeyConstraint(['dataset_id'], ['datasets.id'], ondelete='CASCADE'),
|
|
sa.PrimaryKeyConstraint('id')
|
|
)
|
|
with op.batch_alter_table('dataset_rows', schema=None) as batch_op:
|
|
batch_op.create_index('ix_dataset_rows_dataset', ['dataset_id'], unique=False)
|
|
batch_op.create_index('ix_dataset_rows_dataset_idx', ['dataset_id', 'row_index'], unique=False)
|
|
|
|
op.create_table('enrichment_results',
|
|
sa.Column('id', sa.String(length=32), nullable=False),
|
|
sa.Column('ioc_value', sa.String(length=512), nullable=False),
|
|
sa.Column('ioc_type', sa.String(length=32), nullable=False),
|
|
sa.Column('source', sa.String(length=32), nullable=False),
|
|
sa.Column('verdict', sa.String(length=16), nullable=True),
|
|
sa.Column('confidence', sa.Float(), nullable=True),
|
|
sa.Column('raw_result', sa.JSON(), nullable=True),
|
|
sa.Column('summary', sa.Text(), nullable=True),
|
|
sa.Column('dataset_id', sa.String(length=32), nullable=True),
|
|
sa.Column('cached_at', sa.DateTime(timezone=True), nullable=False),
|
|
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=True),
|
|
sa.ForeignKeyConstraint(['dataset_id'], ['datasets.id'], ),
|
|
sa.PrimaryKeyConstraint('id')
|
|
)
|
|
with op.batch_alter_table('enrichment_results', schema=None) as batch_op:
|
|
batch_op.create_index('ix_enrichment_ioc_source', ['ioc_value', 'source'], unique=False)
|
|
batch_op.create_index(batch_op.f('ix_enrichment_results_ioc_value'), ['ioc_value'], unique=False)
|
|
|
|
op.create_table('annotations',
|
|
sa.Column('id', sa.String(length=32), nullable=False),
|
|
sa.Column('row_id', sa.Integer(), nullable=True),
|
|
sa.Column('dataset_id', sa.String(length=32), nullable=True),
|
|
sa.Column('author_id', sa.String(length=32), nullable=True),
|
|
sa.Column('text', sa.Text(), nullable=False),
|
|
sa.Column('severity', sa.String(length=16), nullable=False),
|
|
sa.Column('tag', sa.String(length=32), nullable=True),
|
|
sa.Column('highlight_color', sa.String(length=16), nullable=True),
|
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
|
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
|
sa.ForeignKeyConstraint(['author_id'], ['users.id'], ),
|
|
sa.ForeignKeyConstraint(['dataset_id'], ['datasets.id'], ),
|
|
sa.ForeignKeyConstraint(['row_id'], ['dataset_rows.id'], ondelete='SET NULL'),
|
|
sa.PrimaryKeyConstraint('id')
|
|
)
|
|
with op.batch_alter_table('annotations', schema=None) as batch_op:
|
|
batch_op.create_index('ix_annotations_dataset', ['dataset_id'], unique=False)
|
|
batch_op.create_index('ix_annotations_row', ['row_id'], unique=False)
|
|
|
|
op.create_table('messages',
|
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
|
sa.Column('conversation_id', sa.String(length=32), nullable=False),
|
|
sa.Column('role', sa.String(length=16), nullable=False),
|
|
sa.Column('content', sa.Text(), nullable=False),
|
|
sa.Column('model_used', sa.String(length=128), nullable=True),
|
|
sa.Column('node_used', sa.String(length=64), nullable=True),
|
|
sa.Column('token_count', sa.Integer(), nullable=True),
|
|
sa.Column('latency_ms', sa.Integer(), nullable=True),
|
|
sa.Column('response_meta', sa.JSON(), nullable=True),
|
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
|
sa.ForeignKeyConstraint(['conversation_id'], ['conversations.id'], ondelete='CASCADE'),
|
|
sa.PrimaryKeyConstraint('id')
|
|
)
|
|
with op.batch_alter_table('messages', schema=None) as batch_op:
|
|
batch_op.create_index('ix_messages_conversation', ['conversation_id'], unique=False)
|
|
|
|
# ### end Alembic commands ###
|
|
|
|
|
|
def downgrade() -> None:
|
|
"""Downgrade schema."""
|
|
# ### commands auto generated by Alembic - please adjust! ###
|
|
with op.batch_alter_table('messages', schema=None) as batch_op:
|
|
batch_op.drop_index('ix_messages_conversation')
|
|
|
|
op.drop_table('messages')
|
|
with op.batch_alter_table('annotations', schema=None) as batch_op:
|
|
batch_op.drop_index('ix_annotations_row')
|
|
batch_op.drop_index('ix_annotations_dataset')
|
|
|
|
op.drop_table('annotations')
|
|
with op.batch_alter_table('enrichment_results', schema=None) as batch_op:
|
|
batch_op.drop_index(batch_op.f('ix_enrichment_results_ioc_value'))
|
|
batch_op.drop_index('ix_enrichment_ioc_source')
|
|
|
|
op.drop_table('enrichment_results')
|
|
with op.batch_alter_table('dataset_rows', schema=None) as batch_op:
|
|
batch_op.drop_index('ix_dataset_rows_dataset_idx')
|
|
batch_op.drop_index('ix_dataset_rows_dataset')
|
|
|
|
op.drop_table('dataset_rows')
|
|
op.drop_table('conversations')
|
|
with op.batch_alter_table('hypotheses', schema=None) as batch_op:
|
|
batch_op.drop_index('ix_hypotheses_hunt')
|
|
|
|
op.drop_table('hypotheses')
|
|
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
|
batch_op.drop_index(batch_op.f('ix_datasets_name'))
|
|
batch_op.drop_index('ix_datasets_hunt')
|
|
|
|
op.drop_table('datasets')
|
|
op.drop_table('hunts')
|
|
with op.batch_alter_table('users', schema=None) as batch_op:
|
|
batch_op.drop_index(batch_op.f('ix_users_username'))
|
|
|
|
op.drop_table('users')
|
|
# ### end Alembic commands ###
|