mirror of
https://github.com/mblanke/ThreatHunt.git
synced 2026-03-01 14:00:20 -05:00
feat: interactive network map, IOC highlighting, AUP hunt selector, type filters
- NetworkMap: hunt-scoped force-directed graph with click-to-inspect popover - NetworkMap: zoom/pan (wheel, drag, buttons), viewport transform - NetworkMap: clickable IP/Host/Domain/URL legend chips to filter node types - NetworkMap: brighter colors, 20% smaller nodes - DatasetViewer: IOC columns highlighted with colored headers + cell tinting - AUPScanner: hunt dropdown replacing dataset checkboxes, auto-select all - Rename 'Social Media (Personal)' theme to 'Social Media' with DB migration - Fix /api/hunts timeout: Dataset.rows lazy='noload' (was selectin cascade) - Add OS column mapping to normalizer - Full backend services, DB models, alembic migrations, new routes - New components: Dashboard, HuntManager, FileUpload, NetworkMap, etc. - Docker Compose deployment with nginx reverse proxy
This commit is contained in:
1
backend/alembic/README
Normal file
1
backend/alembic/README
Normal file
@@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
||||
67
backend/alembic/env.py
Normal file
67
backend/alembic/env.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""Alembic async env — autogenerate from app.db.models."""
|
||||
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
|
||||
from alembic import context
|
||||
|
||||
# Alembic Config
|
||||
config = context.config
|
||||
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Import all models so autogenerate sees them
|
||||
from app.db.engine import Base # noqa: E402
|
||||
from app.db import models as _models # noqa: E402, F401
|
||||
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
render_as_batch=True, # required for SQLite ALTER TABLE
|
||||
)
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection):
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
render_as_batch=True,
|
||||
)
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode with an async engine."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
28
backend/alembic/script.py.mako
Normal file
28
backend/alembic/script.py.mako
Normal file
@@ -0,0 +1,28 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
${downgrades if downgrades else "pass"}
|
||||
210
backend/alembic/versions/9790f482da06_initial_schema.py
Normal file
210
backend/alembic/versions/9790f482da06_initial_schema.py
Normal file
@@ -0,0 +1,210 @@
|
||||
"""initial schema
|
||||
|
||||
Revision ID: 9790f482da06
|
||||
Revises:
|
||||
Create Date: 2026-02-19 11:40:02.108830
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '9790f482da06'
|
||||
down_revision: Union[str, Sequence[str], None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('users',
|
||||
sa.Column('id', sa.String(length=32), nullable=False),
|
||||
sa.Column('username', sa.String(length=64), nullable=False),
|
||||
sa.Column('email', sa.String(length=256), nullable=False),
|
||||
sa.Column('hashed_password', sa.String(length=256), nullable=False),
|
||||
sa.Column('role', sa.String(length=16), nullable=False),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('email')
|
||||
)
|
||||
with op.batch_alter_table('users', schema=None) as batch_op:
|
||||
batch_op.create_index(batch_op.f('ix_users_username'), ['username'], unique=True)
|
||||
|
||||
op.create_table('hunts',
|
||||
sa.Column('id', sa.String(length=32), nullable=False),
|
||||
sa.Column('name', sa.String(length=256), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('status', sa.String(length=32), nullable=False),
|
||||
sa.Column('owner_id', sa.String(length=32), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(['owner_id'], ['users.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('datasets',
|
||||
sa.Column('id', sa.String(length=32), nullable=False),
|
||||
sa.Column('name', sa.String(length=256), nullable=False),
|
||||
sa.Column('filename', sa.String(length=512), nullable=False),
|
||||
sa.Column('source_tool', sa.String(length=64), nullable=True),
|
||||
sa.Column('row_count', sa.Integer(), nullable=False),
|
||||
sa.Column('column_schema', sa.JSON(), nullable=True),
|
||||
sa.Column('normalized_columns', sa.JSON(), nullable=True),
|
||||
sa.Column('ioc_columns', sa.JSON(), nullable=True),
|
||||
sa.Column('file_size_bytes', sa.Integer(), nullable=False),
|
||||
sa.Column('encoding', sa.String(length=32), nullable=True),
|
||||
sa.Column('delimiter', sa.String(length=4), nullable=True),
|
||||
sa.Column('time_range_start', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('time_range_end', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('hunt_id', sa.String(length=32), nullable=True),
|
||||
sa.Column('uploaded_by', sa.String(length=32), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(['hunt_id'], ['hunts.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.create_index('ix_datasets_hunt', ['hunt_id'], unique=False)
|
||||
batch_op.create_index(batch_op.f('ix_datasets_name'), ['name'], unique=False)
|
||||
|
||||
op.create_table('hypotheses',
|
||||
sa.Column('id', sa.String(length=32), nullable=False),
|
||||
sa.Column('hunt_id', sa.String(length=32), nullable=True),
|
||||
sa.Column('title', sa.String(length=256), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('mitre_technique', sa.String(length=32), nullable=True),
|
||||
sa.Column('status', sa.String(length=16), nullable=False),
|
||||
sa.Column('evidence_row_ids', sa.JSON(), nullable=True),
|
||||
sa.Column('evidence_notes', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(['hunt_id'], ['hunts.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
with op.batch_alter_table('hypotheses', schema=None) as batch_op:
|
||||
batch_op.create_index('ix_hypotheses_hunt', ['hunt_id'], unique=False)
|
||||
|
||||
op.create_table('conversations',
|
||||
sa.Column('id', sa.String(length=32), nullable=False),
|
||||
sa.Column('title', sa.String(length=256), nullable=True),
|
||||
sa.Column('hunt_id', sa.String(length=32), nullable=True),
|
||||
sa.Column('dataset_id', sa.String(length=32), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(['dataset_id'], ['datasets.id'], ),
|
||||
sa.ForeignKeyConstraint(['hunt_id'], ['hunts.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('dataset_rows',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('dataset_id', sa.String(length=32), nullable=False),
|
||||
sa.Column('row_index', sa.Integer(), nullable=False),
|
||||
sa.Column('data', sa.JSON(), nullable=False),
|
||||
sa.Column('normalized_data', sa.JSON(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['dataset_id'], ['datasets.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
with op.batch_alter_table('dataset_rows', schema=None) as batch_op:
|
||||
batch_op.create_index('ix_dataset_rows_dataset', ['dataset_id'], unique=False)
|
||||
batch_op.create_index('ix_dataset_rows_dataset_idx', ['dataset_id', 'row_index'], unique=False)
|
||||
|
||||
op.create_table('enrichment_results',
|
||||
sa.Column('id', sa.String(length=32), nullable=False),
|
||||
sa.Column('ioc_value', sa.String(length=512), nullable=False),
|
||||
sa.Column('ioc_type', sa.String(length=32), nullable=False),
|
||||
sa.Column('source', sa.String(length=32), nullable=False),
|
||||
sa.Column('verdict', sa.String(length=16), nullable=True),
|
||||
sa.Column('confidence', sa.Float(), nullable=True),
|
||||
sa.Column('raw_result', sa.JSON(), nullable=True),
|
||||
sa.Column('summary', sa.Text(), nullable=True),
|
||||
sa.Column('dataset_id', sa.String(length=32), nullable=True),
|
||||
sa.Column('cached_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['dataset_id'], ['datasets.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
with op.batch_alter_table('enrichment_results', schema=None) as batch_op:
|
||||
batch_op.create_index('ix_enrichment_ioc_source', ['ioc_value', 'source'], unique=False)
|
||||
batch_op.create_index(batch_op.f('ix_enrichment_results_ioc_value'), ['ioc_value'], unique=False)
|
||||
|
||||
op.create_table('annotations',
|
||||
sa.Column('id', sa.String(length=32), nullable=False),
|
||||
sa.Column('row_id', sa.Integer(), nullable=True),
|
||||
sa.Column('dataset_id', sa.String(length=32), nullable=True),
|
||||
sa.Column('author_id', sa.String(length=32), nullable=True),
|
||||
sa.Column('text', sa.Text(), nullable=False),
|
||||
sa.Column('severity', sa.String(length=16), nullable=False),
|
||||
sa.Column('tag', sa.String(length=32), nullable=True),
|
||||
sa.Column('highlight_color', sa.String(length=16), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(['author_id'], ['users.id'], ),
|
||||
sa.ForeignKeyConstraint(['dataset_id'], ['datasets.id'], ),
|
||||
sa.ForeignKeyConstraint(['row_id'], ['dataset_rows.id'], ondelete='SET NULL'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
with op.batch_alter_table('annotations', schema=None) as batch_op:
|
||||
batch_op.create_index('ix_annotations_dataset', ['dataset_id'], unique=False)
|
||||
batch_op.create_index('ix_annotations_row', ['row_id'], unique=False)
|
||||
|
||||
op.create_table('messages',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('conversation_id', sa.String(length=32), nullable=False),
|
||||
sa.Column('role', sa.String(length=16), nullable=False),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('model_used', sa.String(length=128), nullable=True),
|
||||
sa.Column('node_used', sa.String(length=64), nullable=True),
|
||||
sa.Column('token_count', sa.Integer(), nullable=True),
|
||||
sa.Column('latency_ms', sa.Integer(), nullable=True),
|
||||
sa.Column('response_meta', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(['conversation_id'], ['conversations.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.create_index('ix_messages_conversation', ['conversation_id'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.drop_index('ix_messages_conversation')
|
||||
|
||||
op.drop_table('messages')
|
||||
with op.batch_alter_table('annotations', schema=None) as batch_op:
|
||||
batch_op.drop_index('ix_annotations_row')
|
||||
batch_op.drop_index('ix_annotations_dataset')
|
||||
|
||||
op.drop_table('annotations')
|
||||
with op.batch_alter_table('enrichment_results', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('ix_enrichment_results_ioc_value'))
|
||||
batch_op.drop_index('ix_enrichment_ioc_source')
|
||||
|
||||
op.drop_table('enrichment_results')
|
||||
with op.batch_alter_table('dataset_rows', schema=None) as batch_op:
|
||||
batch_op.drop_index('ix_dataset_rows_dataset_idx')
|
||||
batch_op.drop_index('ix_dataset_rows_dataset')
|
||||
|
||||
op.drop_table('dataset_rows')
|
||||
op.drop_table('conversations')
|
||||
with op.batch_alter_table('hypotheses', schema=None) as batch_op:
|
||||
batch_op.drop_index('ix_hypotheses_hunt')
|
||||
|
||||
op.drop_table('hypotheses')
|
||||
with op.batch_alter_table('datasets', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('ix_datasets_name'))
|
||||
batch_op.drop_index('ix_datasets_hunt')
|
||||
|
||||
op.drop_table('datasets')
|
||||
op.drop_table('hunts')
|
||||
with op.batch_alter_table('users', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('ix_users_username'))
|
||||
|
||||
op.drop_table('users')
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,64 @@
|
||||
"""add_keyword_themes_and_keywords_tables
|
||||
|
||||
Revision ID: 98ab619418bc
|
||||
Revises: 9790f482da06
|
||||
Create Date: 2026-02-19 12:01:38.174653
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '98ab619418bc'
|
||||
down_revision: Union[str, Sequence[str], None] = '9790f482da06'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('keyword_themes',
|
||||
sa.Column('id', sa.String(length=32), nullable=False),
|
||||
sa.Column('name', sa.String(length=128), nullable=False),
|
||||
sa.Column('color', sa.String(length=16), nullable=False),
|
||||
sa.Column('enabled', sa.Boolean(), nullable=False),
|
||||
sa.Column('is_builtin', sa.Boolean(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
with op.batch_alter_table('keyword_themes', schema=None) as batch_op:
|
||||
batch_op.create_index(batch_op.f('ix_keyword_themes_name'), ['name'], unique=True)
|
||||
|
||||
op.create_table('keywords',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('theme_id', sa.String(length=32), nullable=False),
|
||||
sa.Column('value', sa.String(length=256), nullable=False),
|
||||
sa.Column('is_regex', sa.Boolean(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(['theme_id'], ['keyword_themes.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
with op.batch_alter_table('keywords', schema=None) as batch_op:
|
||||
batch_op.create_index('ix_keywords_theme', ['theme_id'], unique=False)
|
||||
batch_op.create_index('ix_keywords_value', ['value'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('keywords', schema=None) as batch_op:
|
||||
batch_op.drop_index('ix_keywords_value')
|
||||
batch_op.drop_index('ix_keywords_theme')
|
||||
|
||||
op.drop_table('keywords')
|
||||
with op.batch_alter_table('keyword_themes', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('ix_keyword_themes_name'))
|
||||
|
||||
op.drop_table('keyword_themes')
|
||||
# ### end Alembic commands ###
|
||||
Reference in New Issue
Block a user