两种模式的部署文件

main
mula.liu 2026-04-13 18:10:25 +08:00
parent e8932bec17
commit 7971182478
19 changed files with 702 additions and 894 deletions

View File

@ -22,13 +22,11 @@ PIP_TRUSTED_HOST=pypi.tuna.tsinghua.edu.cn
# Frontend package registry mirror (used by yarn, recommended in CN)
NPM_REGISTRY=https://registry.npmmirror.com
# Database (choose one: SQLite / PostgreSQL / MySQL)
# SQLite example:
# DATABASE_URL=sqlite:////app/data/nanobot_dashboard.db
# PostgreSQL example:
# DATABASE_URL=postgresql+psycopg://user:password@127.0.0.1:5432/nanobot_dashboard
# MySQL example:
# DATABASE_URL=mysql+pymysql://user:password@127.0.0.1:3306/nanobot_dashboard
# Database for deploy-prod.sh
# This mode now supports external PostgreSQL only.
# Before running deploy-prod.sh, initialize the target database explicitly with:
# scripts/sql/create-tables.sql
# scripts/sql/init-data.sql
DATABASE_URL=postgresql+psycopg://postgres:change_me@127.0.0.1:5432/dashboard
DATABASE_POOL_SIZE=20
DATABASE_MAX_OVERFLOW=40

View File

@ -59,8 +59,8 @@ graph TD
## 默认资源
- 项目根目录 `data/templates/` 保存默认模板资源,会在初始化时同步到运行时数据目录
- 项目根目录 `data/skills/` 保存默认 skill 包,会在数据库初始化阶段自动注册到 `skill_market_item`
- 项目根目录 `data/templates/` 保存默认模板资源,后端运行时直接读取这里的文件,不再在启动阶段做复制或兜底回填
- 项目根目录 `data/skills/` 保存默认 skill 包,数据库初始化阶段会把这些默认 skill 注册到 `skill_market_item`
- `data/model/` 不包含语音识别模型文件;模型需要用户自行下载放入该目录或 `STT_MODEL_DIR` 指向的目录。
- 如果语音模型缺失,后端启动时会打印明确告警,但不会阻断服务启动。
@ -110,6 +110,7 @@ graph TD
1. 准备部署变量
- 复制 `.env.prod.example``.env.prod`(位于项目根目录)
- `data/` 会自动映射到宿主机项目根目录下的 `./data`
- `deploy-prod.sh` 现在要求使用外部 PostgreSQL且目标库必须提前执行 `scripts/sql/create-tables.sql``scripts/sql/init-data.sql`
- 只需要配置绝对路径:
- `HOST_BOTS_WORKSPACE_ROOT`
- 如启用本地语音识别,请将 Whisper `.bin` 模型文件放到宿主机项目根目录的 `data/model/`
@ -127,6 +128,7 @@ graph TD
### 关键说明
- `backend` 不开放宿主机端口,仅在内部网络被 Nginx 访问。
- `deploy-prod.sh` 仅负责前后端容器部署,不会初始化外部数据库;外部 PostgreSQL 需要事先建表并导入初始化数据。
- 上传大小使用单一参数 `UPLOAD_MAX_MB` 控制(后端校验 + Nginx 限制)。
- 必须挂载 `/var/run/docker.sock`,否则后端无法操作 Bot 镜像与容器。
- `data/` 始终绑定到宿主机项目根目录下的 `./data`,其中模板、默认 skills、语音模型和运行数据都落在这里。
@ -145,6 +147,8 @@ graph TD
- `scripts/deploy-full.sh`
- `scripts/init-full-db.sh`
- `scripts/stop-full.sh`
- `scripts/sql/create-tables.sql`
- `scripts/sql/init-data.sql`
- `scripts/sql/init-postgres-bootstrap.sql`
- `scripts/sql/init-postgres-app.sql`
@ -172,7 +176,9 @@ graph TD
- 创建或更新业务账号
- 创建业务库并授权
- 修正 `public` schema 权限
- Dashboard 业务表本身仍由后端启动时自动执行 `SQLModel.metadata.create_all(...)` 与补列/索引对齐。
- 执行 `scripts/sql/create-tables.sql` 创建业务表
- 执行 `scripts/sql/init-data.sql` 初始化 `sys_setting` 与默认 skill 市场数据
- 后端启动时只做初始化完整性校验,不再自动补表、补列、补数据或迁移旧结构;缺库表、缺 `sys_setting`、缺模板文件都会直接报错。
### 停止

View File

@ -9,8 +9,7 @@ from core.docker_instance import docker_manager
from core.speech_service import inspect_speech_model_status
from core.settings import DATABASE_URL_DISPLAY, REDIS_ENABLED
from models.bot import BotInstance
from services.bot_storage_service import _migrate_bot_resources_store
from services.default_assets_service import ensure_default_skill_market_items, ensure_runtime_data_assets
from services.default_assets_service import validate_runtime_data_assets
from services.platform_service import get_speech_runtime_settings, prune_expired_activity_events
from services.runtime_service import docker_callback, set_main_loop
@ -31,24 +30,13 @@ def register_app_runtime(app: FastAPI) -> None:
current_loop = asyncio.get_running_loop()
app.state.main_loop = current_loop
set_main_loop(current_loop)
asset_report = ensure_runtime_data_assets()
if asset_report["templates_initialized"] or asset_report["skills_synchronized"]:
print(
"[init] 默认资源已同步 "
f"(templates={asset_report['templates_initialized']}, skills={asset_report['skills_synchronized']})"
)
validate_runtime_data_assets()
print("[init] data 目录校验通过")
init_database()
with Session(engine) as session:
skill_report = ensure_default_skill_market_items(session)
if skill_report["created"] or skill_report["updated"]:
print(
"[init] 默认 skills 已入库 "
f"(created={len(skill_report['created'])}, updated={len(skill_report['updated'])})"
)
prune_expired_activity_events(session, force=True)
bots = session.exec(select(BotInstance)).all()
for bot in bots:
_migrate_bot_resources_store(bot.id)
docker_manager.ensure_monitor(bot.id, docker_callback)
speech_settings = get_speech_runtime_settings()
model_status = inspect_speech_model_status()

View File

@ -1,5 +1,5 @@
from sqlalchemy import inspect, text
from sqlmodel import SQLModel, Session, create_engine
from sqlmodel import Session, create_engine
from core.settings import (
DATABASE_ECHO,
@ -10,13 +10,6 @@ from core.settings import (
DATABASE_URL,
)
# Ensure table models are registered in SQLModel metadata before create_all.
from models import auth as _auth_models # noqa: F401
from models import bot as _bot_models # noqa: F401
from models import platform as _platform_models # noqa: F401
from models import skill as _skill_models # noqa: F401
from models import topic as _topic_models # noqa: F401
_engine_kwargs = {
"echo": DATABASE_ECHO,
"pool_pre_ping": True,
@ -34,270 +27,69 @@ BOT_IMAGE_TABLE = "bot_image"
BOT_REQUEST_USAGE_TABLE = "bot_request_usage"
BOT_ACTIVITY_EVENT_TABLE = "bot_activity_event"
SYS_LOGIN_LOG_TABLE = "sys_login_log"
LEGACY_AUTH_LOGIN_LOG_TABLE = "auth_login_log"
SYS_SETTING_TABLE = "sys_setting"
POSTGRES_MIGRATION_LOCK_KEY = 2026031801
REQUIRED_TABLES = (
BOT_INSTANCE_TABLE,
BOT_MESSAGE_TABLE,
BOT_IMAGE_TABLE,
BOT_REQUEST_USAGE_TABLE,
BOT_ACTIVITY_EVENT_TABLE,
SYS_LOGIN_LOG_TABLE,
SYS_SETTING_TABLE,
"skill_market_item",
"bot_skill_install",
"topic_topic",
"topic_item",
)
REQUIRED_SYS_SETTING_KEYS = (
"page_size",
"chat_pull_page_size",
"command_auto_unlock_seconds",
"auth_token_ttl_hours",
"auth_token_max_active",
"upload_max_mb",
"allowed_attachment_extensions",
"workspace_download_extensions",
"speech_enabled",
"activity_event_retention_days",
)
def _quote_ident(name: str) -> str:
return f'"{str(name).replace(chr(34), chr(34) * 2)}"'
def _acquire_migration_lock():
if engine.dialect.name == "postgresql":
conn = engine.connect()
conn.execute(text("SELECT pg_advisory_lock(:key)"), {"key": POSTGRES_MIGRATION_LOCK_KEY})
return conn
return None
def _release_migration_lock(lock_conn) -> None:
if lock_conn is None:
return
try:
if engine.dialect.name == "postgresql":
lock_conn.execute(text("SELECT pg_advisory_unlock(:key)"), {"key": POSTGRES_MIGRATION_LOCK_KEY})
finally:
lock_conn.close()
def _rename_table_if_needed(old_name: str, new_name: str) -> None:
def _validate_required_tables() -> None:
inspector = inspect(engine)
if not inspector.has_table(old_name) or inspector.has_table(new_name):
return
missing = [table_name for table_name in REQUIRED_TABLES if not inspector.has_table(table_name)]
if missing:
raise RuntimeError(
"Database schema is not initialized. "
f"Missing tables: {', '.join(missing)}. "
"Run scripts/init-full-db.sh or apply scripts/sql/create-tables.sql before starting the backend."
)
def _validate_required_sys_settings() -> None:
placeholders = ", ".join(f":k{i}" for i, _ in enumerate(REQUIRED_SYS_SETTING_KEYS))
params = {f"k{i}": key for i, key in enumerate(REQUIRED_SYS_SETTING_KEYS)}
with engine.connect() as conn:
conn.execute(text(f"ALTER TABLE {_quote_ident(old_name)} RENAME TO {_quote_ident(new_name)}"))
conn.commit()
def _ensure_botinstance_columns() -> None:
required_columns = {
"current_state": "TEXT DEFAULT 'IDLE'",
"last_action": "TEXT",
"image_tag": "TEXT DEFAULT 'nanobot-base:v0.1.4'",
"access_password": "TEXT DEFAULT ''",
"enabled": "BOOLEAN NOT NULL DEFAULT TRUE",
}
inspector = inspect(engine)
if not inspector.has_table(BOT_INSTANCE_TABLE):
return
with engine.connect() as conn:
existing = {
str(row.get("name"))
for row in inspect(conn).get_columns(BOT_INSTANCE_TABLE)
if row.get("name")
}
for col, ddl in required_columns.items():
if col in existing:
continue
conn.execute(text(f"ALTER TABLE {BOT_INSTANCE_TABLE} ADD COLUMN {col} {ddl}"))
if "enabled" in existing:
conn.execute(text(f"UPDATE {BOT_INSTANCE_TABLE} SET enabled = TRUE WHERE enabled IS NULL"))
conn.commit()
def _ensure_sys_setting_columns() -> None:
required_columns = {
"name": "TEXT NOT NULL DEFAULT ''",
"category": "TEXT NOT NULL DEFAULT 'general'",
"description": "TEXT NOT NULL DEFAULT ''",
"value_type": "TEXT NOT NULL DEFAULT 'json'",
"is_public": "BOOLEAN NOT NULL DEFAULT FALSE",
"sort_order": "INTEGER NOT NULL DEFAULT 100",
}
inspector = inspect(engine)
if not inspector.has_table(SYS_SETTING_TABLE):
return
with engine.connect() as conn:
existing = {
str(row.get("name"))
for row in inspect(conn).get_columns(SYS_SETTING_TABLE)
if row.get("name")
}
for col, ddl in required_columns.items():
if col in existing:
continue
conn.execute(text(f"ALTER TABLE {SYS_SETTING_TABLE} ADD COLUMN {col} {ddl}"))
conn.commit()
def _ensure_bot_request_usage_columns() -> None:
required_columns = {
"message_id": "INTEGER",
"provider": "TEXT",
"model": "TEXT",
}
inspector = inspect(engine)
if not inspector.has_table(BOT_REQUEST_USAGE_TABLE):
return
with engine.connect() as conn:
existing = {
str(row.get("name"))
for row in inspect(conn).get_columns(BOT_REQUEST_USAGE_TABLE)
if row.get("name")
}
for col, ddl in required_columns.items():
if col in existing:
continue
conn.execute(text(f"ALTER TABLE {BOT_REQUEST_USAGE_TABLE} ADD COLUMN {col} {ddl}"))
conn.commit()
def _migrate_auth_login_log_table() -> None:
_rename_table_if_needed(LEGACY_AUTH_LOGIN_LOG_TABLE, SYS_LOGIN_LOG_TABLE)
def _ensure_auth_login_log_columns() -> None:
required_columns = {
"auth_type": "TEXT NOT NULL DEFAULT 'bot'",
"token_hash": "TEXT",
"auth_source": "TEXT NOT NULL DEFAULT ''",
"revoke_reason": "TEXT",
"device_info": "TEXT",
}
inspector = inspect(engine)
if not inspector.has_table(SYS_LOGIN_LOG_TABLE):
return
with engine.connect() as conn:
existing = {
str(row.get("name"))
for row in inspect(conn).get_columns(SYS_LOGIN_LOG_TABLE)
if row.get("name")
}
for col, ddl in required_columns.items():
if col in existing:
continue
conn.execute(text(f"ALTER TABLE {SYS_LOGIN_LOG_TABLE} ADD COLUMN {col} {ddl}"))
conn.commit()
def _ensure_topic_columns() -> None:
required_columns = {
"topic_topic": {
"name": "TEXT NOT NULL DEFAULT ''",
"description": "TEXT NOT NULL DEFAULT ''",
"is_active": "BOOLEAN NOT NULL DEFAULT TRUE",
"is_default_fallback": "BOOLEAN NOT NULL DEFAULT FALSE",
"routing_json": "TEXT NOT NULL DEFAULT '{}'",
"view_schema_json": "TEXT NOT NULL DEFAULT '{}'",
"created_at": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
"updated_at": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
},
"topic_item": {
"title": "TEXT NOT NULL DEFAULT ''",
"level": "TEXT NOT NULL DEFAULT 'info'",
"tags_json": "TEXT",
"view_json": "TEXT",
"source": "TEXT NOT NULL DEFAULT 'mcp'",
"dedupe_key": "TEXT",
"is_read": "BOOLEAN NOT NULL DEFAULT FALSE",
"created_at": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
},
}
inspector = inspect(engine)
with engine.connect() as conn:
for table_name, cols in required_columns.items():
if not inspector.has_table(table_name):
continue
existing = {
str(row.get("name"))
for row in inspector.get_columns(table_name)
if row.get("name")
}
for col, ddl in cols.items():
if col in existing:
continue
conn.execute(text(f"ALTER TABLE {table_name} ADD COLUMN {col} {ddl}"))
conn.commit()
def _ensure_platform_indexes() -> None:
inspector = inspect(engine)
with engine.connect() as conn:
if inspector.has_table(BOT_ACTIVITY_EVENT_TABLE):
try:
conn.execute(
text(
f"""
CREATE INDEX IF NOT EXISTS idx_bot_activity_event_bot_id_request_present
ON {BOT_ACTIVITY_EVENT_TABLE} (bot_id)
WHERE request_id IS NOT NULL AND request_id <> ''
"""
)
)
except Exception:
# Fall back silently when the current database dialect does not support partial indexes.
conn.execute(
text(
f"""
CREATE INDEX IF NOT EXISTS idx_bot_activity_event_bot_id
ON {BOT_ACTIVITY_EVENT_TABLE} (bot_id)
"""
)
)
if inspector.has_table(BOT_REQUEST_USAGE_TABLE):
conn.execute(
text(
f"""
CREATE INDEX IF NOT EXISTS idx_bot_request_usage_started_at_bot_id
ON {BOT_REQUEST_USAGE_TABLE} (started_at, bot_id)
"""
)
)
conn.commit()
def align_postgres_sequences() -> None:
if engine.dialect.name != "postgresql":
return
sequence_targets = [
(SYS_LOGIN_LOG_TABLE, "id"),
(BOT_MESSAGE_TABLE, "id"),
(BOT_REQUEST_USAGE_TABLE, "id"),
(BOT_ACTIVITY_EVENT_TABLE, "id"),
("skill_market_item", "id"),
("bot_skill_install", "id"),
]
with engine.connect() as conn:
for table_name, column_name in sequence_targets:
seq_name = conn.execute(
text("SELECT pg_get_serial_sequence(:table_name, :column_name)"),
{"table_name": table_name, "column_name": column_name},
).scalar()
if not seq_name:
continue
max_id = conn.execute(
text(f'SELECT COALESCE(MAX("{column_name}"), 0) FROM "{table_name}"')
).scalar()
max_id = int(max_id or 0)
conn.execute(
text("SELECT setval(:seq_name, :next_value, :is_called)"),
{
"seq_name": seq_name,
"next_value": max_id if max_id > 0 else 1,
"is_called": max_id > 0,
},
)
conn.commit()
rows = conn.execute(
text(f'SELECT key FROM "{SYS_SETTING_TABLE}" WHERE key IN ({placeholders})'),
params,
).scalars().all()
present = {str(row or "").strip() for row in rows if str(row or "").strip()}
missing = [key for key in REQUIRED_SYS_SETTING_KEYS if key not in present]
if missing:
raise RuntimeError(
"Database seed data is not initialized. "
f"Missing sys_setting keys: {', '.join(missing)}. "
"Run scripts/init-full-db.sh or apply scripts/sql/init-data.sql before starting the backend."
)
def init_database() -> None:
lock_conn = _acquire_migration_lock()
try:
_migrate_auth_login_log_table()
SQLModel.metadata.create_all(engine)
_ensure_auth_login_log_columns()
_ensure_sys_setting_columns()
_ensure_bot_request_usage_columns()
_ensure_botinstance_columns()
_ensure_topic_columns()
_ensure_platform_indexes()
align_postgres_sequences()
finally:
_release_migration_lock(lock_conn)
with engine.connect() as conn:
conn.execute(text("SELECT 1"))
_validate_required_tables()
_validate_required_sys_settings()
def get_session():

View File

@ -117,11 +117,8 @@ DATA_ROOT: Final[str] = _normalize_dir_path(os.getenv("DATA_ROOT", str(PROJECT_R
BOTS_WORKSPACE_ROOT: Final[str] = _normalize_dir_path(
os.getenv("BOTS_WORKSPACE_ROOT", str(PROJECT_ROOT / "workspace" / "bots"))
)
BUNDLED_DATA_ROOT: Final[Path] = (PROJECT_ROOT / "data").resolve()
RUNTIME_DATA_ROOT: Final[Path] = Path(DATA_ROOT).resolve()
BUNDLED_TEMPLATES_ROOT: Final[Path] = (BUNDLED_DATA_ROOT / "templates").resolve()
RUNTIME_TEMPLATES_ROOT: Final[Path] = (RUNTIME_DATA_ROOT / "templates").resolve()
BUNDLED_SKILLS_ROOT: Final[Path] = (BUNDLED_DATA_ROOT / "skills").resolve()
RUNTIME_SKILLS_ROOT: Final[Path] = (RUNTIME_DATA_ROOT / "skills").resolve()
RUNTIME_MODEL_ROOT: Final[Path] = (RUNTIME_DATA_ROOT / "model").resolve()
@ -249,5 +246,3 @@ APP_RELOAD: Final[bool] = _env_bool("APP_RELOAD", False)
AGENT_MD_TEMPLATES_FILE: Final[Path] = RUNTIME_TEMPLATES_ROOT / "agent_md_templates.json"
TOPIC_PRESETS_TEMPLATES_FILE: Final[Path] = RUNTIME_TEMPLATES_ROOT / "topic_presets.json"
BUNDLED_AGENT_MD_TEMPLATES_FILE: Final[Path] = BUNDLED_TEMPLATES_ROOT / "agent_md_templates.json"
BUNDLED_TOPIC_PRESETS_TEMPLATES_FILE: Final[Path] = BUNDLED_TEMPLATES_ROOT / "topic_presets.json"

View File

@ -1,73 +0,0 @@
-- Topic subsystem schema (SQLite)
-- Apply manually before/after backend deployment if needed.
BEGIN TRANSACTION;
CREATE TABLE IF NOT EXISTS topic_topic (
id INTEGER PRIMARY KEY AUTOINCREMENT,
bot_id TEXT NOT NULL,
topic_key TEXT NOT NULL,
name TEXT NOT NULL DEFAULT '',
description TEXT NOT NULL DEFAULT '',
is_active INTEGER NOT NULL DEFAULT 1,
is_default_fallback INTEGER NOT NULL DEFAULT 0,
routing_json TEXT NOT NULL DEFAULT '{}',
view_schema_json TEXT NOT NULL DEFAULT '{}',
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(bot_id) REFERENCES bot_instance(id)
);
CREATE TABLE IF NOT EXISTS topic_item (
id INTEGER PRIMARY KEY AUTOINCREMENT,
bot_id TEXT NOT NULL,
topic_key TEXT NOT NULL,
title TEXT NOT NULL DEFAULT '',
content TEXT NOT NULL DEFAULT '',
level TEXT NOT NULL DEFAULT 'info',
tags_json TEXT,
view_json TEXT,
source TEXT NOT NULL DEFAULT 'mcp',
dedupe_key TEXT,
is_read INTEGER NOT NULL DEFAULT 0,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(bot_id) REFERENCES bot_instance(id)
);
CREATE UNIQUE INDEX IF NOT EXISTS uq_topic_topic_bot_topic_key
ON topic_topic(bot_id, topic_key);
CREATE INDEX IF NOT EXISTS idx_topic_topic_bot_id
ON topic_topic(bot_id);
CREATE INDEX IF NOT EXISTS idx_topic_topic_topic_key
ON topic_topic(topic_key);
CREATE INDEX IF NOT EXISTS idx_topic_topic_bot_fallback
ON topic_topic(bot_id, is_default_fallback);
CREATE INDEX IF NOT EXISTS idx_topic_item_bot_id
ON topic_item(bot_id);
CREATE INDEX IF NOT EXISTS idx_topic_item_topic_key
ON topic_item(topic_key);
CREATE INDEX IF NOT EXISTS idx_topic_item_level
ON topic_item(level);
CREATE INDEX IF NOT EXISTS idx_topic_item_source
ON topic_item(source);
CREATE INDEX IF NOT EXISTS idx_topic_item_is_read
ON topic_item(is_read);
CREATE INDEX IF NOT EXISTS idx_topic_item_created_at
ON topic_item(created_at);
CREATE INDEX IF NOT EXISTS idx_topic_item_bot_topic_created_at
ON topic_item(bot_id, topic_key, created_at);
CREATE INDEX IF NOT EXISTS idx_topic_item_bot_dedupe
ON topic_item(bot_id, dedupe_key);
COMMIT;

View File

@ -29,7 +29,6 @@ __all__ = [
"_bot_data_root",
"_clear_bot_dashboard_direct_session",
"_clear_bot_sessions",
"_migrate_bot_resources_store",
"_normalize_env_params",
"_normalize_resource_limits",
"_read_bot_config",
@ -186,30 +185,21 @@ def write_bot_resource_limits(bot_id: str, cpu_cores: Any, memory_mb: Any, stora
def _read_bot_resources(bot_id: str, config_data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
cpu_raw: Any = None
memory_raw: Any = None
storage_raw: Any = None
data = _read_json_object(_resources_json_path(bot_id))
if data:
cpu_raw = data.get("cpuCores", data.get("cpu_cores"))
memory_raw = data.get("memoryMB", data.get("memory_mb"))
storage_raw = data.get("storageGB", data.get("storage_gb"))
if cpu_raw is None or memory_raw is None or storage_raw is None:
cfg = config_data if isinstance(config_data, dict) else _read_bot_config(bot_id)
runtime_cfg = cfg.get("runtime")
if isinstance(runtime_cfg, dict):
resources_raw = runtime_cfg.get("resources")
if isinstance(resources_raw, dict):
if cpu_raw is None:
cpu_raw = resources_raw.get("cpuCores", resources_raw.get("cpu_cores"))
if memory_raw is None:
memory_raw = resources_raw.get("memoryMB", resources_raw.get("memory_mb"))
if storage_raw is None:
storage_raw = resources_raw.get("storageGB", resources_raw.get("storage_gb"))
return _normalize_resource_limits(cpu_raw, memory_raw, storage_raw)
_ = config_data
path = _resources_json_path(bot_id)
if not os.path.isfile(path):
raise RuntimeError(
f"Missing bot resource file: {path}. "
"Please restore it or recreate the bot configuration; runtime compatibility fallback has been removed."
)
data = _read_json_object(path)
if not data:
raise RuntimeError(f"Invalid bot resource file: {path}.")
return _normalize_resource_limits(
data.get("cpuCores", data.get("cpu_cores")),
data.get("memoryMB", data.get("memory_mb")),
data.get("storageGB", data.get("storage_gb")),
)
def get_bot_resource_limits(bot_id: str, config_data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
@ -230,31 +220,6 @@ def get_bot_workspace_snapshot(bot_id: str, config_data: Optional[Dict[str, Any]
}
def _migrate_bot_resources_store(bot_id: str) -> None:
config_data = _read_bot_config(bot_id)
runtime_cfg = config_data.get("runtime")
resources_raw: Dict[str, Any] = {}
if isinstance(runtime_cfg, dict):
legacy_raw = runtime_cfg.get("resources")
if isinstance(legacy_raw, dict):
resources_raw = legacy_raw
path = _resources_json_path(bot_id)
if not os.path.isfile(path):
_write_bot_resources(
bot_id,
resources_raw.get("cpuCores", resources_raw.get("cpu_cores")),
resources_raw.get("memoryMB", resources_raw.get("memory_mb")),
resources_raw.get("storageGB", resources_raw.get("storage_gb")),
)
if isinstance(runtime_cfg, dict) and "resources" in runtime_cfg:
runtime_cfg.pop("resources", None)
if not runtime_cfg:
config_data.pop("runtime", None)
_write_bot_config(bot_id, config_data)
def _env_store_path(bot_id: str) -> str:
return os.path.join(_bot_data_root(bot_id), "env.json")

View File

@ -1,206 +1,42 @@
from __future__ import annotations
import json
import os
import re
import shutil
import zipfile
from pathlib import Path
from typing import Any, Dict, List
from sqlmodel import Session, select
from typing import Dict
from core.settings import (
AGENT_MD_TEMPLATES_FILE,
BUNDLED_AGENT_MD_TEMPLATES_FILE,
BUNDLED_SKILLS_ROOT,
BUNDLED_TOPIC_PRESETS_TEMPLATES_FILE,
DATA_ROOT,
RUNTIME_MODEL_ROOT,
RUNTIME_SKILLS_ROOT,
RUNTIME_TEMPLATES_ROOT,
TOPIC_PRESETS_TEMPLATES_FILE,
)
from core.utils import (
_is_ignored_skill_zip_top_level,
_is_valid_top_level_skill_name,
_read_description_from_text,
_sanitize_skill_market_key,
)
from models.skill import SkillMarketItem
def _copy_if_missing(src: Path, dst: Path) -> bool:
if not src.exists() or not src.is_file():
return False
if src.resolve() == dst.resolve() if dst.exists() else False:
return False
if dst.exists():
return False
dst.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(src, dst)
return True
def _copy_if_different(src: Path, dst: Path) -> bool:
if not src.exists() or not src.is_file():
return False
if src.resolve() == dst.resolve() if dst.exists() else False:
return False
dst.parent.mkdir(parents=True, exist_ok=True)
if dst.exists():
try:
if src.stat().st_size == dst.stat().st_size and src.read_bytes() == dst.read_bytes():
return False
except Exception:
pass
shutil.copy2(src, dst)
return True
def _iter_bundled_skill_packages() -> List[Path]:
if not BUNDLED_SKILLS_ROOT.exists() or not BUNDLED_SKILLS_ROOT.is_dir():
return []
return sorted(path for path in BUNDLED_SKILLS_ROOT.iterdir() if path.is_file() and path.suffix.lower() == ".zip")
def ensure_runtime_data_assets() -> Dict[str, int]:
Path(DATA_ROOT).mkdir(parents=True, exist_ok=True)
RUNTIME_TEMPLATES_ROOT.mkdir(parents=True, exist_ok=True)
RUNTIME_SKILLS_ROOT.mkdir(parents=True, exist_ok=True)
RUNTIME_MODEL_ROOT.mkdir(parents=True, exist_ok=True)
templates_initialized = 0
skills_synchronized = 0
if _copy_if_missing(BUNDLED_AGENT_MD_TEMPLATES_FILE, AGENT_MD_TEMPLATES_FILE):
templates_initialized += 1
if _copy_if_missing(BUNDLED_TOPIC_PRESETS_TEMPLATES_FILE, TOPIC_PRESETS_TEMPLATES_FILE):
templates_initialized += 1
for src in _iter_bundled_skill_packages():
if _copy_if_different(src, RUNTIME_SKILLS_ROOT / src.name):
skills_synchronized += 1
return {
"templates_initialized": templates_initialized,
"skills_synchronized": skills_synchronized,
}
def _extract_skill_zip_summary(zip_path: Path) -> Dict[str, Any]:
entry_names: List[str] = []
description = ""
with zipfile.ZipFile(zip_path) as archive:
members = archive.infolist()
file_members = [member for member in members if not member.is_dir()]
for member in file_members:
raw_name = str(member.filename or "").replace("\\", "/").lstrip("/")
if not raw_name:
continue
first = raw_name.split("/", 1)[0].strip()
if _is_ignored_skill_zip_top_level(first):
continue
if _is_valid_top_level_skill_name(first) and first not in entry_names:
entry_names.append(first)
candidates = sorted(
[
str(member.filename or "").replace("\\", "/").lstrip("/")
for member in file_members
if str(member.filename or "").replace("\\", "/").rsplit("/", 1)[-1].lower()
in {"skill.md", "readme.md"}
],
key=lambda value: (value.count("/"), value.lower()),
def _require_dir(path: Path, *, label: str) -> str:
resolved = path.resolve()
if not resolved.exists() or not resolved.is_dir():
raise RuntimeError(
f"Missing required {label} directory: {resolved}. "
"Please mount the project-root data directory to /app/data before starting the backend."
)
for candidate in candidates:
try:
with archive.open(candidate, "r") as file:
preview = file.read(4096).decode("utf-8", errors="ignore")
description = _read_description_from_text(preview)
if description:
break
except Exception:
continue
return str(resolved)
def _require_file(path: Path, *, label: str) -> str:
resolved = path.resolve()
if not resolved.exists() or not resolved.is_file():
raise RuntimeError(
f"Missing required {label} file: {resolved}. "
"Please restore the tracked files under data/templates before starting the backend."
)
return str(resolved)
def validate_runtime_data_assets() -> Dict[str, str]:
return {
"entry_names": entry_names,
"description": description,
"data_root": _require_dir(Path(DATA_ROOT), label="data"),
"templates_root": _require_dir(RUNTIME_TEMPLATES_ROOT, label="templates"),
"skills_root": _require_dir(RUNTIME_SKILLS_ROOT, label="skills"),
"agent_md_templates_file": _require_file(AGENT_MD_TEMPLATES_FILE, label="agent templates"),
"topic_presets_file": _require_file(TOPIC_PRESETS_TEMPLATES_FILE, label="topic presets"),
}
def _default_display_name(stem: str) -> str:
chunks = [chunk for chunk in re.split(r"[-_]+", str(stem or "").strip()) if chunk]
if not chunks:
return "Skill"
return " ".join(chunk.upper() if chunk.isupper() else chunk.capitalize() for chunk in chunks)
def _resolve_unique_skill_key(existing_keys: set[str], preferred_key: str) -> str:
base_key = _sanitize_skill_market_key(preferred_key) or "skill"
candidate = base_key
counter = 2
while candidate in existing_keys:
candidate = f"{base_key}-{counter}"
counter += 1
existing_keys.add(candidate)
return candidate
def ensure_default_skill_market_items(session: Session) -> Dict[str, List[str]]:
report: Dict[str, List[str]] = {"created": [], "updated": []}
default_packages = _iter_bundled_skill_packages()
if not default_packages:
return report
rows = session.exec(select(SkillMarketItem)).all()
existing_by_zip = {str(row.zip_filename or "").strip(): row for row in rows if str(row.zip_filename or "").strip()}
existing_keys = {str(row.skill_key or "").strip() for row in rows if str(row.skill_key or "").strip()}
for bundled_path in default_packages:
runtime_path = RUNTIME_SKILLS_ROOT / bundled_path.name
source_path = runtime_path if runtime_path.exists() else bundled_path
try:
summary = _extract_skill_zip_summary(source_path)
except Exception:
continue
zip_filename = bundled_path.name
entry_names_json = json.dumps(summary["entry_names"], ensure_ascii=False)
display_name = _default_display_name((summary["entry_names"] or [bundled_path.stem])[0])
zip_size_bytes = int(source_path.stat().st_size) if source_path.exists() else 0
row = existing_by_zip.get(zip_filename)
if row is None:
row = SkillMarketItem(
skill_key=_resolve_unique_skill_key(existing_keys, bundled_path.stem),
display_name=display_name,
description=str(summary["description"] or "").strip(),
zip_filename=zip_filename,
zip_size_bytes=zip_size_bytes,
entry_names_json=entry_names_json,
)
session.add(row)
existing_by_zip[zip_filename] = row
report["created"].append(zip_filename)
continue
changed = False
if int(row.zip_size_bytes or 0) != zip_size_bytes:
row.zip_size_bytes = zip_size_bytes
changed = True
if str(row.entry_names_json or "") != entry_names_json:
row.entry_names_json = entry_names_json
changed = True
if not str(row.display_name or "").strip():
row.display_name = display_name
changed = True
if not str(row.description or "").strip() and str(summary["description"] or "").strip():
row.description = str(summary["description"] or "").strip()
changed = True
if changed:
report["updated"].append(zip_filename)
if report["created"] or report["updated"]:
session.commit()
return report

View File

@ -13,9 +13,7 @@ from services.platform_settings_service import (
SETTING_KEYS,
SYSTEM_SETTING_DEFINITIONS,
create_or_update_system_setting,
default_platform_settings,
delete_system_setting,
ensure_default_system_settings,
get_activity_event_retention_days,
get_auth_token_max_active,
get_auth_token_ttl_hours,
@ -29,6 +27,7 @@ from services.platform_settings_service import (
get_workspace_download_extensions,
list_system_settings,
save_platform_settings,
validate_required_system_settings,
)
from services.platform_usage_service import (
bind_usage_message,

View File

@ -1,5 +1,4 @@
import json
import os
import re
from datetime import datetime
from typing import Any, Dict, List
@ -34,19 +33,6 @@ SETTING_KEYS = (
"speech_enabled",
)
PROTECTED_SETTING_KEYS = set(SETTING_KEYS) | {ACTIVITY_EVENT_RETENTION_SETTING_KEY}
DEPRECATED_SETTING_KEYS = {
"loading_page",
"speech_max_audio_seconds",
"speech_default_language",
"speech_force_simplified",
"speech_audio_preprocess",
"speech_audio_filter",
"speech_initial_prompt",
"sys_auth_token_ttl_days",
"auth_token_ttl_days",
"panel_session_ttl_days",
"bot_session_ttl_days",
}
SYSTEM_SETTING_DEFINITIONS: Dict[str, Dict[str, Any]] = {
"page_size": {
"name": "分页大小",
@ -144,15 +130,6 @@ SYSTEM_SETTING_DEFINITIONS: Dict[str, Dict[str, Any]] = {
def _utcnow() -> datetime:
return datetime.utcnow()
def _normalize_activity_event_retention_days(raw: Any) -> int:
try:
value = int(raw)
except Exception:
value = DEFAULT_ACTIVITY_EVENT_RETENTION_DAYS
return max(1, min(3650, value))
def _normalize_extension(raw: Any) -> str:
text = str(raw or "").strip().lower()
if not text:
@ -177,77 +154,6 @@ def _normalize_extension_list(rows: Any) -> List[str]:
return normalized
def _legacy_env_int(name: str, default: int, min_value: int, max_value: int) -> int:
raw = os.getenv(name)
if raw is None:
return default
try:
value = int(str(raw).strip())
except Exception:
value = default
return max(min_value, min(max_value, value))
def _legacy_env_bool(name: str, default: bool) -> bool:
raw = os.getenv(name)
if raw is None:
return default
return str(raw).strip().lower() in {"1", "true", "yes", "on"}
def _legacy_env_extensions(name: str, default: List[str]) -> List[str]:
raw = os.getenv(name)
if raw is None:
return list(default)
source = re.split(r"[,;\s]+", str(raw))
normalized: List[str] = []
for item in source:
ext = _normalize_extension(item)
if ext and ext not in normalized:
normalized.append(ext)
return normalized
def _bootstrap_platform_setting_values() -> Dict[str, Any]:
return {
"page_size": _legacy_env_int("PAGE_SIZE", DEFAULT_PAGE_SIZE, 1, 100),
"chat_pull_page_size": _legacy_env_int(
"CHAT_PULL_PAGE_SIZE",
DEFAULT_CHAT_PULL_PAGE_SIZE,
10,
500,
),
"command_auto_unlock_seconds": _legacy_env_int(
"COMMAND_AUTO_UNLOCK_SECONDS",
DEFAULT_COMMAND_AUTO_UNLOCK_SECONDS,
1,
600,
),
"auth_token_ttl_hours": _legacy_env_int(
"AUTH_TOKEN_TTL_HOURS",
DEFAULT_AUTH_TOKEN_TTL_HOURS,
1,
720,
),
"auth_token_max_active": _legacy_env_int(
"AUTH_TOKEN_MAX_ACTIVE",
DEFAULT_AUTH_TOKEN_MAX_ACTIVE,
1,
20,
),
"upload_max_mb": _legacy_env_int("UPLOAD_MAX_MB", DEFAULT_UPLOAD_MAX_MB, 1, 2048),
"allowed_attachment_extensions": _legacy_env_extensions(
"ALLOWED_ATTACHMENT_EXTENSIONS",
list(DEFAULT_ALLOWED_ATTACHMENT_EXTENSIONS),
),
"workspace_download_extensions": _legacy_env_extensions(
"WORKSPACE_DOWNLOAD_EXTENSIONS",
list(DEFAULT_WORKSPACE_DOWNLOAD_EXTENSIONS),
),
"speech_enabled": _legacy_env_bool("STT_ENABLED", STT_ENABLED_DEFAULT),
}
def _normalize_setting_key(raw: Any) -> str:
text = str(raw or "").strip()
return re.sub(r"[^a-zA-Z0-9_.-]+", "_", text).strip("._-").lower()

View File

@ -21,7 +21,6 @@ from services.platform_settings_core import (
DEFAULT_ALLOWED_ATTACHMENT_EXTENSIONS,
SETTING_KEYS,
SYSTEM_SETTING_DEFINITIONS,
_bootstrap_platform_setting_values,
_normalize_extension_list,
_read_setting_value,
_upsert_setting_row,
@ -29,57 +28,42 @@ from services.platform_settings_core import (
from services.platform_system_settings_service import (
create_or_update_system_setting,
delete_system_setting,
ensure_default_system_settings,
get_activity_event_retention_days,
list_system_settings,
validate_required_system_settings,
)
def default_platform_settings() -> PlatformSettingsPayload:
bootstrap = _bootstrap_platform_setting_values()
return PlatformSettingsPayload(
page_size=int(bootstrap["page_size"]),
chat_pull_page_size=int(bootstrap["chat_pull_page_size"]),
command_auto_unlock_seconds=int(bootstrap["command_auto_unlock_seconds"]),
auth_token_ttl_hours=int(bootstrap["auth_token_ttl_hours"]),
auth_token_max_active=int(bootstrap["auth_token_max_active"]),
upload_max_mb=int(bootstrap["upload_max_mb"]),
allowed_attachment_extensions=list(bootstrap["allowed_attachment_extensions"]),
workspace_download_extensions=list(bootstrap["workspace_download_extensions"]),
speech_enabled=bool(bootstrap["speech_enabled"]),
)
def get_platform_settings(session: Session) -> PlatformSettingsPayload:
defaults = default_platform_settings()
ensure_default_system_settings(session)
validate_required_system_settings(session)
rows = session.exec(select(PlatformSetting).where(PlatformSetting.key.in_(SETTING_KEYS))).all()
data: Dict[str, Any] = {row.key: _read_setting_value(row) for row in rows}
merged = defaults.model_dump()
merged["page_size"] = max(1, min(100, int(data.get("page_size") or merged["page_size"])))
merged["chat_pull_page_size"] = max(10, min(500, int(data.get("chat_pull_page_size") or merged["chat_pull_page_size"])))
merged["command_auto_unlock_seconds"] = max(
1,
min(600, int(data.get("command_auto_unlock_seconds") or merged["command_auto_unlock_seconds"])),
)
merged["auth_token_ttl_hours"] = max(
1,
min(720, int(data.get("auth_token_ttl_hours") or merged["auth_token_ttl_hours"])),
)
merged["auth_token_max_active"] = max(
1,
min(20, int(data.get("auth_token_max_active") or merged["auth_token_max_active"])),
)
merged["upload_max_mb"] = int(data.get("upload_max_mb") or merged["upload_max_mb"])
merged["allowed_attachment_extensions"] = _normalize_extension_list(
data.get("allowed_attachment_extensions", merged["allowed_attachment_extensions"])
)
merged["workspace_download_extensions"] = _normalize_extension_list(
data.get("workspace_download_extensions", merged["workspace_download_extensions"])
)
merged["speech_enabled"] = bool(data.get("speech_enabled", merged["speech_enabled"]))
return PlatformSettingsPayload.model_validate(merged)
missing = [key for key in SETTING_KEYS if key not in data]
if missing:
raise RuntimeError(
"Database seed data is not initialized. "
f"Missing sys_setting keys: {', '.join(missing)}. "
"Run scripts/init-full-db.sh or apply scripts/sql/init-data.sql before starting the backend."
)
try:
return PlatformSettingsPayload.model_validate(
{
"page_size": max(1, min(100, int(data["page_size"]))),
"chat_pull_page_size": max(10, min(500, int(data["chat_pull_page_size"]))),
"command_auto_unlock_seconds": max(1, min(600, int(data["command_auto_unlock_seconds"]))),
"auth_token_ttl_hours": max(1, min(720, int(data["auth_token_ttl_hours"]))),
"auth_token_max_active": max(1, min(20, int(data["auth_token_max_active"]))),
"upload_max_mb": int(data["upload_max_mb"]),
"allowed_attachment_extensions": _normalize_extension_list(data["allowed_attachment_extensions"]),
"workspace_download_extensions": _normalize_extension_list(data["workspace_download_extensions"]),
"speech_enabled": bool(data["speech_enabled"]),
}
)
except Exception as exc:
raise RuntimeError(
"sys_setting contains invalid platform configuration values. "
"Fix the rows manually or reapply scripts/sql/init-data.sql."
) from exc
def save_platform_settings(session: Session, payload: PlatformSettingsPayload) -> PlatformSettingsPayload:

View File

@ -1,4 +1,3 @@
import json
from typing import Any, Dict, List
from sqlmodel import Session, select
@ -7,128 +6,35 @@ from models.platform import PlatformSetting
from schemas.platform import SystemSettingPayload
from services.platform_settings_core import (
ACTIVITY_EVENT_RETENTION_SETTING_KEY,
DEFAULT_ACTIVITY_EVENT_RETENTION_DAYS,
DEPRECATED_SETTING_KEYS,
PROTECTED_SETTING_KEYS,
SETTING_KEYS,
SYSTEM_SETTING_DEFINITIONS,
_bootstrap_platform_setting_values,
_normalize_activity_event_retention_days,
_normalize_setting_key,
_read_setting_value,
_setting_item_from_row,
_upsert_setting_row,
_utcnow,
)
def _coerce_auth_ttl_hours_from_legacy(value: Any) -> int:
try:
normalized = int(value)
except Exception:
normalized = 0
return max(1, min(720, normalized * 24))
REQUIRED_SYSTEM_SETTING_KEYS = tuple(SYSTEM_SETTING_DEFINITIONS.keys())
def ensure_default_system_settings(session: Session) -> None:
bootstrap_values = _bootstrap_platform_setting_values()
legacy_row = session.get(PlatformSetting, "global")
if legacy_row is not None:
try:
legacy_data = _read_setting_value(legacy_row)
except Exception:
legacy_data = {}
if isinstance(legacy_data, dict):
for key in SETTING_KEYS:
meta = SYSTEM_SETTING_DEFINITIONS[key]
_upsert_setting_row(
session,
key,
name=str(meta["name"]),
category=str(meta["category"]),
description=str(meta["description"]),
value_type=str(meta["value_type"]),
value=legacy_data.get(key, bootstrap_values.get(key, meta["value"])),
is_public=bool(meta["is_public"]),
sort_order=int(meta["sort_order"]),
)
session.delete(legacy_row)
session.commit()
legacy_auth_ttl_hours = None
dirty = False
for key in DEPRECATED_SETTING_KEYS:
legacy_row = session.get(PlatformSetting, key)
if legacy_row is not None:
if key in {"sys_auth_token_ttl_days", "auth_token_ttl_days"} and legacy_auth_ttl_hours is None:
try:
legacy_auth_ttl_hours = _coerce_auth_ttl_hours_from_legacy(_read_setting_value(legacy_row))
except Exception:
legacy_auth_ttl_hours = None
session.delete(legacy_row)
dirty = True
for key, meta in SYSTEM_SETTING_DEFINITIONS.items():
row = session.get(PlatformSetting, key)
default_value = bootstrap_values.get(key, meta["value"])
if key == "auth_token_ttl_hours" and legacy_auth_ttl_hours is not None:
default_value = legacy_auth_ttl_hours
if row is None:
_upsert_setting_row(
session,
key,
name=str(meta["name"]),
category=str(meta["category"]),
description=str(meta["description"]),
value_type=str(meta["value_type"]),
value=default_value,
is_public=bool(meta["is_public"]),
sort_order=int(meta["sort_order"]),
)
dirty = True
continue
changed = False
if key == "auth_token_ttl_hours" and legacy_auth_ttl_hours is not None:
try:
current_value = int(_read_setting_value(row))
except Exception:
current_value = int(meta["value"])
if current_value == int(meta["value"]) and legacy_auth_ttl_hours != current_value:
row.value_type = str(meta["value_type"])
row.value_json = json.dumps(legacy_auth_ttl_hours, ensure_ascii=False)
changed = True
for field in ("name", "category", "description", "value_type"):
value = str(meta[field])
if key in PROTECTED_SETTING_KEYS:
if getattr(row, field) != value:
setattr(row, field, value)
changed = True
elif not getattr(row, field):
setattr(row, field, value)
changed = True
if key in PROTECTED_SETTING_KEYS:
if int(getattr(row, "sort_order", 100) or 100) != int(meta["sort_order"]):
row.sort_order = int(meta["sort_order"])
changed = True
if bool(getattr(row, "is_public", False)) != bool(meta["is_public"]):
row.is_public = bool(meta["is_public"])
changed = True
elif getattr(row, "sort_order", None) is None:
row.sort_order = int(meta["sort_order"])
changed = True
if key not in PROTECTED_SETTING_KEYS and getattr(row, "is_public", None) is None:
row.is_public = bool(meta["is_public"])
changed = True
if changed:
row.updated_at = _utcnow()
session.add(row)
dirty = True
if dirty:
session.commit()
def validate_required_system_settings(session: Session) -> None:
stmt = select(PlatformSetting.key).where(PlatformSetting.key.in_(REQUIRED_SYSTEM_SETTING_KEYS))
present = {
str(key or "").strip()
for key in session.exec(stmt).all()
if str(key or "").strip()
}
missing = [key for key in REQUIRED_SYSTEM_SETTING_KEYS if key not in present]
if missing:
raise RuntimeError(
"Database seed data is not initialized. "
f"Missing sys_setting keys: {', '.join(missing)}. "
"Run scripts/init-full-db.sh or apply scripts/sql/init-data.sql before starting the backend."
)
def list_system_settings(session: Session, search: str = "") -> List[Dict[str, Any]]:
ensure_default_system_settings(session)
validate_required_system_settings(session)
stmt = select(PlatformSetting).order_by(PlatformSetting.sort_order.asc(), PlatformSetting.key.asc())
rows = session.exec(stmt).all()
keyword = str(search or "").strip().lower()
@ -146,7 +52,6 @@ def list_system_settings(session: Session, search: str = "") -> List[Dict[str, A
def create_or_update_system_setting(session: Session, payload: SystemSettingPayload) -> Dict[str, Any]:
ensure_default_system_settings(session)
normalized_key = _normalize_setting_key(payload.key)
definition = SYSTEM_SETTING_DEFINITIONS.get(normalized_key, {})
row = _upsert_setting_row(
@ -181,11 +86,19 @@ def delete_system_setting(session: Session, key: str) -> None:
def get_activity_event_retention_days(session: Session) -> int:
validate_required_system_settings(session)
row = session.get(PlatformSetting, ACTIVITY_EVENT_RETENTION_SETTING_KEY)
if row is None:
return DEFAULT_ACTIVITY_EVENT_RETENTION_DAYS
raise RuntimeError(
"Database seed data is not initialized. "
f"Missing sys_setting key: {ACTIVITY_EVENT_RETENTION_SETTING_KEY}. "
"Run scripts/init-full-db.sh or apply scripts/sql/init-data.sql before starting the backend."
)
try:
value = _read_setting_value(row)
except Exception:
value = DEFAULT_ACTIVITY_EVENT_RETENTION_DAYS
return _normalize_activity_event_retention_days(value)
value = int(_read_setting_value(row))
except Exception as exc:
raise RuntimeError(
f"sys_setting value is invalid for key: {ACTIVITY_EVENT_RETENTION_SETTING_KEY}. "
"Fix the row manually or reapply scripts/sql/init-data.sql."
) from exc
return max(1, min(3650, value))

View File

@ -1,32 +1,30 @@
from __future__ import annotations
from pathlib import Path
from typing import Any, Dict, List
from core.settings import (
AGENT_MD_TEMPLATES_FILE,
BUNDLED_AGENT_MD_TEMPLATES_FILE,
BUNDLED_TOPIC_PRESETS_TEMPLATES_FILE,
TOPIC_PRESETS_TEMPLATES_FILE,
)
from core.settings import AGENT_MD_TEMPLATES_FILE, TOPIC_PRESETS_TEMPLATES_FILE
TEMPLATE_KEYS = ("agents_md", "soul_md", "user_md", "tools_md", "identity_md")
def _load_json_object(path: str, fallback_path: str = "") -> Dict[str, Any]:
def _load_json_object(path: Path, *, label: str) -> Dict[str, Any]:
import json
for candidate in [path, fallback_path]:
candidate = str(candidate or "").strip()
if not candidate:
continue
try:
with open(candidate, "r", encoding="utf-8") as file:
data = json.load(file)
if isinstance(data, dict):
return data
except Exception:
continue
return {}
target = Path(path).resolve()
if not target.is_file():
raise RuntimeError(
f"Missing required {label} file: {target}. "
"Please restore the tracked files under data/templates before starting the backend."
)
try:
with target.open("r", encoding="utf-8") as file:
data = json.load(file)
except Exception as exc:
raise RuntimeError(f"Invalid JSON in {label} file: {target}") from exc
if not isinstance(data, dict):
raise RuntimeError(f"{label} file must contain a JSON object: {target}")
return data
def _normalize_md_text(value: Any) -> str:
@ -45,12 +43,12 @@ def _write_json_atomic(path: str, payload: Dict[str, Any]) -> None:
def get_agent_md_templates() -> Dict[str, str]:
raw = _load_json_object(str(AGENT_MD_TEMPLATES_FILE), str(BUNDLED_AGENT_MD_TEMPLATES_FILE))
raw = _load_json_object(AGENT_MD_TEMPLATES_FILE, label="agent templates")
return {key: _normalize_md_text(raw.get(key)) for key in TEMPLATE_KEYS}
def get_topic_presets() -> Dict[str, Any]:
raw = _load_json_object(str(TOPIC_PRESETS_TEMPLATES_FILE), str(BUNDLED_TOPIC_PRESETS_TEMPLATES_FILE))
raw = _load_json_object(TOPIC_PRESETS_TEMPLATES_FILE, label="topic presets")
presets = raw.get("presets")
if not isinstance(presets, list):
return {"presets": []}

View File

@ -116,9 +116,9 @@ Dashboard 渠道对话历史(用于会话回放与反馈)。
## 4. 初始化与迁移策略
服务启动时(`backend/core/database.py`
数据库初始化改为离线显式执行
1. 使用 PostgreSQL Advisory Lock 确保多节点部署时的单实例初始化
2. `SQLModel.metadata.create_all(engine)` 自动创建缺失表
3. 执行列对齐检查,确保旧表结构平滑升级
4. 自动对齐 PostgreSQL Sequences 以防 ID 冲突
1. `scripts/sql/create-tables.sql` 负责创建业务表和索引
2. `scripts/sql/init-data.sql` 负责初始化 `sys_setting` 和默认 `skill_market_item` 数据
3. `scripts/init-full-db.sh` 在完整部署场景下会按顺序执行 PostgreSQL 引导 SQL、建表 SQL、初始化数据 SQL
4. 后端启动时(`backend/core/database.py`)只校验必需表和核心 `sys_setting` 是否已经存在;若缺失则直接中止启动,不再做运行时迁移或结构修复

View File

@ -5,6 +5,10 @@ ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
ENV_FILE="${1:-$ROOT_DIR/.env.full}"
COMPOSE_FILE="$ROOT_DIR/docker-compose.full.yml"
DATA_DIR="$ROOT_DIR/data"
INIT_DB_SCRIPT="$ROOT_DIR/scripts/init-full-db.sh"
AGENT_TEMPLATES_FILE="$DATA_DIR/templates/agent_md_templates.json"
TOPIC_PRESETS_FILE="$DATA_DIR/templates/topic_presets.json"
SKILLS_DIR="$DATA_DIR/skills"
require_file() {
local path="$1"
@ -19,6 +23,19 @@ require_file() {
exit 1
}
require_dir() {
local path="$1"
local hint="$2"
if [[ -d "$path" ]]; then
return 0
fi
echo "Missing directory: $path"
if [[ -n "$hint" ]]; then
echo "$hint"
fi
exit 1
}
require_env() {
local name="$1"
if [[ -n "${!name:-}" ]]; then
@ -89,6 +106,10 @@ wait_for_health() {
require_file "$ENV_FILE" "Create it from: $ROOT_DIR/.env.full.example"
require_file "$COMPOSE_FILE" ""
require_file "$INIT_DB_SCRIPT" ""
require_file "$AGENT_TEMPLATES_FILE" "Expected tracked template file under project-root data/templates/"
require_file "$TOPIC_PRESETS_FILE" "Expected tracked template file under project-root data/templates/"
require_dir "$SKILLS_DIR" "Expected tracked skills directory under project-root data/skills/"
load_env_var HOST_BOTS_WORKSPACE_ROOT
load_env_var POSTGRES_SUPERUSER postgres
@ -125,7 +146,7 @@ wait_for_health "dashboard-nanobot-postgres" 120
wait_for_health "dashboard-nanobot-redis" 60
echo "[deploy-full] initializing application database"
"$ROOT_DIR/scripts/init-full-db.sh" "$ENV_FILE"
"$INIT_DB_SCRIPT" "$ENV_FILE"
echo "[deploy-full] starting backend and nginx"
docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" up -d --build backend nginx

View File

@ -4,6 +4,81 @@ set -euo pipefail
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
ENV_FILE="${1:-$ROOT_DIR/.env.prod}"
DATA_DIR="$ROOT_DIR/data"
COMPOSE_FILE="$ROOT_DIR/docker-compose.prod.yml"
AGENT_TEMPLATES_FILE="$DATA_DIR/templates/agent_md_templates.json"
TOPIC_PRESETS_FILE="$DATA_DIR/templates/topic_presets.json"
SKILLS_DIR="$DATA_DIR/skills"
require_file() {
local path="$1"
local hint="$2"
if [[ -f "$path" ]]; then
return 0
fi
echo "Missing file: $path"
if [[ -n "$hint" ]]; then
echo "$hint"
fi
exit 1
}
require_dir() {
local path="$1"
local hint="$2"
if [[ -d "$path" ]]; then
return 0
fi
echo "Missing directory: $path"
if [[ -n "$hint" ]]; then
echo "$hint"
fi
exit 1
}
require_env() {
local name="$1"
if [[ -n "${!name:-}" ]]; then
return 0
fi
echo "Missing required env: $name"
exit 1
}
read_env_value() {
local key="$1"
local line=""
local value=""
while IFS= read -r line || [[ -n "$line" ]]; do
line="${line%$'\r'}"
[[ -z "${line//[[:space:]]/}" ]] && continue
[[ "${line#\#}" != "$line" ]] && continue
[[ "${line#export }" != "$line" ]] && line="${line#export }"
[[ "$line" == "$key="* ]] || continue
value="${line#*=}"
if [[ "$value" =~ ^\"(.*)\"$ ]]; then
value="${BASH_REMATCH[1]}"
elif [[ "$value" =~ ^\'(.*)\'$ ]]; then
value="${BASH_REMATCH[1]}"
fi
printf '%s' "$value"
return 0
done < "$ENV_FILE"
return 1
}
load_env_var() {
local name="$1"
local default_value="${2:-}"
local value=""
value="$(read_env_value "$name" || true)"
if [[ -z "$value" ]]; then
value="$default_value"
fi
printf -v "$name" '%s' "$value"
}
if [[ ! -f "$ENV_FILE" ]]; then
echo "Missing env file: $ENV_FILE"
@ -11,12 +86,41 @@ if [[ ! -f "$ENV_FILE" ]]; then
exit 1
fi
require_file "$COMPOSE_FILE" ""
require_file "$AGENT_TEMPLATES_FILE" "Expected tracked template file under project-root data/templates/"
require_file "$TOPIC_PRESETS_FILE" "Expected tracked template file under project-root data/templates/"
require_dir "$SKILLS_DIR" "Expected tracked skills directory under project-root data/skills/"
load_env_var HOST_BOTS_WORKSPACE_ROOT
load_env_var DATABASE_URL
load_env_var NGINX_PORT 8080
load_env_var REDIS_ENABLED false
load_env_var REDIS_URL
require_env HOST_BOTS_WORKSPACE_ROOT
require_env DATABASE_URL
require_env NGINX_PORT
if [[ "$DATABASE_URL" != postgresql* ]]; then
echo "Unsupported DATABASE_URL for deploy-prod.sh: $DATABASE_URL"
echo "deploy-prod.sh now supports external PostgreSQL only."
echo "If you need one-click PostgreSQL + Redis deployment, use scripts/deploy-full.sh."
exit 1
fi
if [[ "${REDIS_ENABLED,,}" =~ ^(1|true|yes|on)$ ]] && [[ -z "$REDIS_URL" ]]; then
echo "Missing required env: REDIS_URL"
exit 1
fi
echo "[deploy] using env: $ENV_FILE"
mkdir -p "$DATA_DIR" "$DATA_DIR/model"
docker compose --env-file "$ENV_FILE" -f "$ROOT_DIR/docker-compose.prod.yml" config -q
docker compose --env-file "$ENV_FILE" -f "$ROOT_DIR/docker-compose.prod.yml" up -d --build
mkdir -p "$DATA_DIR" "$DATA_DIR/model" "$HOST_BOTS_WORKSPACE_ROOT"
echo "[deploy] expecting external PostgreSQL to be pre-initialized with scripts/sql/create-tables.sql and scripts/sql/init-data.sql"
docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" config -q
docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" up -d --build
echo "[deploy] service status"
docker compose --env-file "$ENV_FILE" -f "$ROOT_DIR/docker-compose.prod.yml" ps
docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" ps
echo "[deploy] done"

View File

@ -6,6 +6,8 @@ ENV_FILE="${1:-$ROOT_DIR/.env.full}"
COMPOSE_FILE="$ROOT_DIR/docker-compose.full.yml"
BOOTSTRAP_SQL="$ROOT_DIR/scripts/sql/init-postgres-bootstrap.sql"
APP_SQL="$ROOT_DIR/scripts/sql/init-postgres-app.sql"
SCHEMA_SQL="$ROOT_DIR/scripts/sql/create-tables.sql"
SEED_SQL="$ROOT_DIR/scripts/sql/init-data.sql"
require_file() {
local path="$1"
@ -65,6 +67,31 @@ load_env_var() {
printf -v "$name" '%s' "$value"
}
csv_to_json_array() {
local raw="$1"
local result="["
local first=1
local item=""
local old_ifs="$IFS"
IFS=','
for item in $raw; do
item="${item#"${item%%[![:space:]]*}"}"
item="${item%"${item##*[![:space:]]}"}"
[[ -z "$item" ]] && continue
item="${item//\\/\\\\}"
item="${item//\"/\\\"}"
if (( first == 0 )); then
result+=", "
fi
result+="\"$item\""
first=0
done
IFS="$old_ifs"
result+="]"
printf '%s' "$result"
}
wait_for_postgres() {
local timeout_seconds="${1:-120}"
local elapsed=0
@ -89,6 +116,8 @@ require_file "$ENV_FILE" "Create it from: $ROOT_DIR/.env.full.example"
require_file "$COMPOSE_FILE" ""
require_file "$BOOTSTRAP_SQL" ""
require_file "$APP_SQL" ""
require_file "$SCHEMA_SQL" ""
require_file "$SEED_SQL" ""
load_env_var POSTGRES_SUPERUSER postgres
load_env_var POSTGRES_SUPERPASSWORD
@ -96,6 +125,16 @@ load_env_var POSTGRES_BOOTSTRAP_DB postgres
load_env_var POSTGRES_APP_DB
load_env_var POSTGRES_APP_USER
load_env_var POSTGRES_APP_PASSWORD
load_env_var PAGE_SIZE 10
load_env_var CHAT_PULL_PAGE_SIZE 60
load_env_var COMMAND_AUTO_UNLOCK_SECONDS 10
load_env_var AUTH_TOKEN_TTL_HOURS 24
load_env_var AUTH_TOKEN_MAX_ACTIVE 2
load_env_var UPLOAD_MAX_MB 100
load_env_var ALLOWED_ATTACHMENT_EXTENSIONS
load_env_var WORKSPACE_DOWNLOAD_EXTENSIONS ".pdf,.doc,.docx,.xls,.xlsx,.xlsm,.ppt,.pptx,.odt,.ods,.odp,.wps"
load_env_var STT_ENABLED true
load_env_var ACTIVITY_EVENT_RETENTION_DAYS 7
require_env POSTGRES_SUPERUSER
require_env POSTGRES_SUPERPASSWORD
@ -132,4 +171,49 @@ docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" exec -T \
-d "$POSTGRES_APP_DB" \
-f - < "$APP_SQL"
echo "[init-full-db] applying application schema"
docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" exec -T \
-e PGPASSWORD="$POSTGRES_SUPERPASSWORD" \
postgres \
psql \
-v ON_ERROR_STOP=1 \
-U "$POSTGRES_SUPERUSER" \
-d "$POSTGRES_APP_DB" \
-f - < "$SCHEMA_SQL"
PAGE_SIZE_JSON="$PAGE_SIZE"
CHAT_PULL_PAGE_SIZE_JSON="$CHAT_PULL_PAGE_SIZE"
COMMAND_AUTO_UNLOCK_SECONDS_JSON="$COMMAND_AUTO_UNLOCK_SECONDS"
AUTH_TOKEN_TTL_HOURS_JSON="$AUTH_TOKEN_TTL_HOURS"
AUTH_TOKEN_MAX_ACTIVE_JSON="$AUTH_TOKEN_MAX_ACTIVE"
UPLOAD_MAX_MB_JSON="$UPLOAD_MAX_MB"
ALLOWED_ATTACHMENT_EXTENSIONS_JSON="$(csv_to_json_array "$ALLOWED_ATTACHMENT_EXTENSIONS")"
WORKSPACE_DOWNLOAD_EXTENSIONS_JSON="$(csv_to_json_array "$WORKSPACE_DOWNLOAD_EXTENSIONS")"
if [[ "${STT_ENABLED,,}" =~ ^(1|true|yes|on)$ ]]; then
SPEECH_ENABLED_JSON="true"
else
SPEECH_ENABLED_JSON="false"
fi
ACTIVITY_EVENT_RETENTION_DAYS_JSON="$ACTIVITY_EVENT_RETENTION_DAYS"
echo "[init-full-db] applying initial data"
docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" exec -T \
-e PGPASSWORD="$POSTGRES_SUPERPASSWORD" \
postgres \
psql \
-v ON_ERROR_STOP=1 \
-v page_size_json="$PAGE_SIZE_JSON" \
-v chat_pull_page_size_json="$CHAT_PULL_PAGE_SIZE_JSON" \
-v command_auto_unlock_seconds_json="$COMMAND_AUTO_UNLOCK_SECONDS_JSON" \
-v auth_token_ttl_hours_json="$AUTH_TOKEN_TTL_HOURS_JSON" \
-v auth_token_max_active_json="$AUTH_TOKEN_MAX_ACTIVE_JSON" \
-v upload_max_mb_json="$UPLOAD_MAX_MB_JSON" \
-v allowed_attachment_extensions_json="$ALLOWED_ATTACHMENT_EXTENSIONS_JSON" \
-v workspace_download_extensions_json="$WORKSPACE_DOWNLOAD_EXTENSIONS_JSON" \
-v speech_enabled_json="$SPEECH_ENABLED_JSON" \
-v activity_event_retention_days_json="$ACTIVITY_EVENT_RETENTION_DAYS_JSON" \
-U "$POSTGRES_SUPERUSER" \
-d "$POSTGRES_APP_DB" \
-f - < "$SEED_SQL"
echo "[init-full-db] done"

View File

@ -0,0 +1,228 @@
\set ON_ERROR_STOP on
BEGIN;
CREATE TABLE IF NOT EXISTS bot_instance (
id TEXT PRIMARY KEY,
name TEXT NOT NULL,
enabled BOOLEAN NOT NULL DEFAULT TRUE,
access_password TEXT NOT NULL DEFAULT '',
workspace_dir TEXT NOT NULL UNIQUE,
docker_status TEXT NOT NULL DEFAULT 'STOPPED',
current_state TEXT DEFAULT 'IDLE',
last_action TEXT,
image_tag TEXT NOT NULL DEFAULT 'nanobot-base:v0.1.4',
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS bot_image (
tag TEXT PRIMARY KEY,
image_id TEXT,
version TEXT NOT NULL,
status TEXT NOT NULL DEFAULT 'READY',
source_dir TEXT,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS bot_message (
id SERIAL PRIMARY KEY,
bot_id TEXT NOT NULL REFERENCES bot_instance(id),
role TEXT NOT NULL,
text TEXT NOT NULL,
media_json TEXT,
feedback TEXT,
feedback_at TIMESTAMP,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS sys_login_log (
id SERIAL PRIMARY KEY,
auth_type TEXT NOT NULL,
token_hash TEXT NOT NULL,
subject_id TEXT NOT NULL,
bot_id TEXT,
auth_source TEXT NOT NULL DEFAULT '',
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
expires_at TIMESTAMP NOT NULL,
last_seen_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
revoked_at TIMESTAMP,
revoke_reason TEXT,
client_ip TEXT,
user_agent TEXT,
device_info TEXT
);
CREATE TABLE IF NOT EXISTS sys_setting (
key VARCHAR(120) PRIMARY KEY,
name VARCHAR(200) NOT NULL DEFAULT '',
category VARCHAR(64) NOT NULL DEFAULT 'general',
description TEXT NOT NULL DEFAULT '',
value_type VARCHAR(32) NOT NULL DEFAULT 'json',
value_json TEXT NOT NULL DEFAULT '{}',
is_public BOOLEAN NOT NULL DEFAULT FALSE,
sort_order INTEGER NOT NULL DEFAULT 100,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS bot_request_usage (
id SERIAL PRIMARY KEY,
bot_id TEXT NOT NULL REFERENCES bot_instance(id),
message_id INTEGER,
request_id VARCHAR(120) NOT NULL,
channel VARCHAR(64) NOT NULL DEFAULT 'dashboard',
status VARCHAR(32) NOT NULL DEFAULT 'PENDING',
provider VARCHAR(120),
model VARCHAR(255),
token_source VARCHAR(32) NOT NULL DEFAULT 'estimated',
input_tokens INTEGER NOT NULL DEFAULT 0,
output_tokens INTEGER NOT NULL DEFAULT 0,
total_tokens INTEGER NOT NULL DEFAULT 0,
input_text_preview TEXT,
output_text_preview TEXT,
attachments_json TEXT,
error_text TEXT,
metadata_json TEXT,
started_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
completed_at TIMESTAMP,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS bot_activity_event (
id SERIAL PRIMARY KEY,
bot_id TEXT NOT NULL REFERENCES bot_instance(id),
request_id VARCHAR(120),
event_type VARCHAR(64) NOT NULL,
channel VARCHAR(64) NOT NULL DEFAULT 'dashboard',
detail TEXT,
metadata_json TEXT,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS skill_market_item (
id SERIAL PRIMARY KEY,
skill_key VARCHAR(120) NOT NULL,
display_name VARCHAR(255) NOT NULL DEFAULT '',
description TEXT NOT NULL DEFAULT '',
zip_filename VARCHAR(255) NOT NULL,
zip_size_bytes INTEGER NOT NULL DEFAULT 0,
entry_names_json TEXT NOT NULL DEFAULT '[]',
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT uq_skill_market_item_skill_key UNIQUE (skill_key),
CONSTRAINT uq_skill_market_item_zip_filename UNIQUE (zip_filename)
);
CREATE TABLE IF NOT EXISTS bot_skill_install (
id SERIAL PRIMARY KEY,
bot_id TEXT NOT NULL REFERENCES bot_instance(id),
skill_market_item_id INTEGER NOT NULL REFERENCES skill_market_item(id),
installed_entries_json TEXT NOT NULL DEFAULT '[]',
source_zip_filename VARCHAR(255) NOT NULL DEFAULT '',
status VARCHAR(32) NOT NULL DEFAULT 'INSTALLED',
last_error TEXT,
installed_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT uq_bot_skill_install_bot_market UNIQUE (bot_id, skill_market_item_id)
);
CREATE TABLE IF NOT EXISTS topic_topic (
id SERIAL PRIMARY KEY,
bot_id TEXT NOT NULL REFERENCES bot_instance(id),
topic_key TEXT NOT NULL,
name TEXT NOT NULL DEFAULT '',
description TEXT NOT NULL DEFAULT '',
is_active BOOLEAN NOT NULL DEFAULT TRUE,
is_default_fallback BOOLEAN NOT NULL DEFAULT FALSE,
routing_json TEXT NOT NULL DEFAULT '{}',
view_schema_json TEXT NOT NULL DEFAULT '{}',
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT uq_topic_topic_bot_topic_key UNIQUE (bot_id, topic_key)
);
CREATE TABLE IF NOT EXISTS topic_item (
id SERIAL PRIMARY KEY,
bot_id TEXT NOT NULL REFERENCES bot_instance(id),
topic_key TEXT NOT NULL,
title TEXT NOT NULL DEFAULT '',
content TEXT NOT NULL DEFAULT '',
level TEXT NOT NULL DEFAULT 'info',
tags_json TEXT,
view_json TEXT,
source TEXT NOT NULL DEFAULT 'mcp',
dedupe_key TEXT,
is_read BOOLEAN NOT NULL DEFAULT FALSE,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_bot_instance_enabled ON bot_instance (enabled);
CREATE INDEX IF NOT EXISTS idx_bot_instance_docker_status ON bot_instance (docker_status);
CREATE INDEX IF NOT EXISTS idx_bot_message_bot_id ON bot_message (bot_id);
CREATE INDEX IF NOT EXISTS idx_bot_message_role ON bot_message (role);
CREATE INDEX IF NOT EXISTS idx_bot_message_feedback ON bot_message (feedback);
CREATE INDEX IF NOT EXISTS idx_bot_message_created_at ON bot_message (created_at);
CREATE UNIQUE INDEX IF NOT EXISTS idx_sys_login_log_token_hash ON sys_login_log (token_hash);
CREATE INDEX IF NOT EXISTS idx_sys_login_log_auth_type ON sys_login_log (auth_type);
CREATE INDEX IF NOT EXISTS idx_sys_login_log_subject_id ON sys_login_log (subject_id);
CREATE INDEX IF NOT EXISTS idx_sys_login_log_bot_id ON sys_login_log (bot_id);
CREATE INDEX IF NOT EXISTS idx_sys_login_log_auth_source ON sys_login_log (auth_source);
CREATE INDEX IF NOT EXISTS idx_sys_login_log_created_at ON sys_login_log (created_at);
CREATE INDEX IF NOT EXISTS idx_sys_login_log_expires_at ON sys_login_log (expires_at);
CREATE INDEX IF NOT EXISTS idx_sys_login_log_last_seen_at ON sys_login_log (last_seen_at);
CREATE INDEX IF NOT EXISTS idx_sys_login_log_revoked_at ON sys_login_log (revoked_at);
CREATE INDEX IF NOT EXISTS idx_sys_setting_category ON sys_setting (category);
CREATE INDEX IF NOT EXISTS idx_sys_setting_is_public ON sys_setting (is_public);
CREATE INDEX IF NOT EXISTS idx_sys_setting_sort_order ON sys_setting (sort_order);
CREATE INDEX IF NOT EXISTS idx_sys_setting_updated_at ON sys_setting (updated_at);
CREATE INDEX IF NOT EXISTS idx_bot_request_usage_bot_id ON bot_request_usage (bot_id);
CREATE INDEX IF NOT EXISTS idx_bot_request_usage_message_id ON bot_request_usage (message_id);
CREATE INDEX IF NOT EXISTS idx_bot_request_usage_request_id ON bot_request_usage (request_id);
CREATE INDEX IF NOT EXISTS idx_bot_request_usage_channel ON bot_request_usage (channel);
CREATE INDEX IF NOT EXISTS idx_bot_request_usage_status ON bot_request_usage (status);
CREATE INDEX IF NOT EXISTS idx_bot_request_usage_started_at ON bot_request_usage (started_at);
CREATE INDEX IF NOT EXISTS idx_bot_request_usage_completed_at ON bot_request_usage (completed_at);
CREATE INDEX IF NOT EXISTS idx_bot_request_usage_created_at ON bot_request_usage (created_at);
CREATE INDEX IF NOT EXISTS idx_bot_request_usage_updated_at ON bot_request_usage (updated_at);
CREATE INDEX IF NOT EXISTS idx_bot_request_usage_started_at_bot_id ON bot_request_usage (started_at, bot_id);
CREATE INDEX IF NOT EXISTS idx_bot_activity_event_bot_id ON bot_activity_event (bot_id);
CREATE INDEX IF NOT EXISTS idx_bot_activity_event_request_id ON bot_activity_event (request_id);
CREATE INDEX IF NOT EXISTS idx_bot_activity_event_event_type ON bot_activity_event (event_type);
CREATE INDEX IF NOT EXISTS idx_bot_activity_event_channel ON bot_activity_event (channel);
CREATE INDEX IF NOT EXISTS idx_bot_activity_event_created_at ON bot_activity_event (created_at);
CREATE INDEX IF NOT EXISTS idx_bot_activity_event_bot_id_request_present
ON bot_activity_event (bot_id)
WHERE request_id IS NOT NULL AND request_id <> '';
CREATE INDEX IF NOT EXISTS idx_skill_market_item_created_at ON skill_market_item (created_at);
CREATE INDEX IF NOT EXISTS idx_skill_market_item_updated_at ON skill_market_item (updated_at);
CREATE INDEX IF NOT EXISTS idx_bot_skill_install_bot_id ON bot_skill_install (bot_id);
CREATE INDEX IF NOT EXISTS idx_bot_skill_install_skill_market_item_id ON bot_skill_install (skill_market_item_id);
CREATE INDEX IF NOT EXISTS idx_bot_skill_install_status ON bot_skill_install (status);
CREATE INDEX IF NOT EXISTS idx_bot_skill_install_installed_at ON bot_skill_install (installed_at);
CREATE INDEX IF NOT EXISTS idx_bot_skill_install_updated_at ON bot_skill_install (updated_at);
CREATE INDEX IF NOT EXISTS idx_topic_topic_bot_id ON topic_topic (bot_id);
CREATE INDEX IF NOT EXISTS idx_topic_topic_topic_key ON topic_topic (topic_key);
CREATE INDEX IF NOT EXISTS idx_topic_topic_created_at ON topic_topic (created_at);
CREATE INDEX IF NOT EXISTS idx_topic_topic_updated_at ON topic_topic (updated_at);
CREATE INDEX IF NOT EXISTS idx_topic_topic_bot_fallback ON topic_topic (bot_id, is_default_fallback);
CREATE INDEX IF NOT EXISTS idx_topic_item_bot_id ON topic_item (bot_id);
CREATE INDEX IF NOT EXISTS idx_topic_item_topic_key ON topic_item (topic_key);
CREATE INDEX IF NOT EXISTS idx_topic_item_level ON topic_item (level);
CREATE INDEX IF NOT EXISTS idx_topic_item_source ON topic_item (source);
CREATE INDEX IF NOT EXISTS idx_topic_item_is_read ON topic_item (is_read);
CREATE INDEX IF NOT EXISTS idx_topic_item_created_at ON topic_item (created_at);
CREATE INDEX IF NOT EXISTS idx_topic_item_bot_topic_created_at ON topic_item (bot_id, topic_key, created_at);
CREATE INDEX IF NOT EXISTS idx_topic_item_bot_dedupe ON topic_item (bot_id, dedupe_key);
COMMIT;

View File

@ -0,0 +1,64 @@
\set ON_ERROR_STOP on
BEGIN;
INSERT INTO sys_setting (
key,
name,
category,
description,
value_type,
value_json,
is_public,
sort_order,
created_at,
updated_at
)
VALUES
('page_size', '分页大小', 'ui', '平台各类列表默认每页条数。', 'integer', :'page_size_json', TRUE, 5, NOW(), NOW()),
('chat_pull_page_size', '对话懒加载条数', 'chat', 'Bot 对话区向上懒加载时每次读取的消息条数。', 'integer', :'chat_pull_page_size_json', TRUE, 8, NOW(), NOW()),
('command_auto_unlock_seconds', '发送按钮自动恢复秒数', 'chat', '对话发送后按钮保持停止态的最长秒数,超时后自动恢复为可发送状态。', 'integer', :'command_auto_unlock_seconds_json', TRUE, 9, NOW(), NOW()),
('auth_token_ttl_hours', '认证 Token 过期小时数', 'auth', 'Panel 与 Bot 登录 Token 的统一有效时长,单位小时。', 'integer', :'auth_token_ttl_hours_json', FALSE, 10, NOW(), NOW()),
('auth_token_max_active', '认证 Token 最大并发数', 'auth', '同一主体允许同时活跃的 Token 数量,超过时自动撤销最旧 Token。', 'integer', :'auth_token_max_active_json', FALSE, 11, NOW(), NOW()),
('upload_max_mb', '上传大小限制', 'upload', '单文件上传大小限制,单位 MB。', 'integer', :'upload_max_mb_json', FALSE, 20, NOW(), NOW()),
('allowed_attachment_extensions', '允许附件后缀', 'upload', '允许上传的附件后缀列表,留空表示不限制。', 'json', :'allowed_attachment_extensions_json', FALSE, 20, NOW(), NOW()),
('workspace_download_extensions', '工作区下载后缀', 'workspace', '命中后缀的工作区文件默认走下载模式。', 'json', :'workspace_download_extensions_json', FALSE, 30, NOW(), NOW()),
('speech_enabled', '语音识别开关', 'speech', '控制 Bot 语音转写功能是否启用。', 'boolean', :'speech_enabled_json', TRUE, 32, NOW(), NOW()),
('activity_event_retention_days', '活动事件保留天数', 'maintenance', 'bot_activity_event 运维事件的保留天数,超期记录会自动清理。', 'integer', :'activity_event_retention_days_json', FALSE, 34, NOW(), NOW())
ON CONFLICT (key) DO UPDATE
SET
name = EXCLUDED.name,
category = EXCLUDED.category,
description = EXCLUDED.description,
value_type = EXCLUDED.value_type,
is_public = EXCLUDED.is_public,
sort_order = EXCLUDED.sort_order,
updated_at = NOW();
INSERT INTO skill_market_item (
skill_key,
display_name,
description,
zip_filename,
zip_size_bytes,
entry_names_json,
created_at,
updated_at
)
VALUES
('nano-banana-pro', 'Nano Banana Pro', 'Generate and edit images with Nano Banana Pro.', 'nano-banana-pro.zip', 5317, '["nano-banana-pro"]', NOW(), NOW()),
('powerpoint-pptx', 'Powerpoint PPTX', 'Create and edit PowerPoint PPTX files.', 'powerpoint-pptx.zip', 3596, '["powerpoint-pptx"]', NOW(), NOW()),
('self-improving-agent', 'Self Improving Agent', 'Capture learnings, errors, and corrections for continuous improvement.', 'self-improving-agent.zip', 26866, '["self-improving-agent"]', NOW(), NOW()),
('stock-analysis', 'Stock Analysis', 'Analyze stocks and crypto with research and portfolio workflows.', 'stock-analysis.zip', 79536, '["stock-analysis"]', NOW(), NOW()),
('tavily-search', 'Tavily Search', 'AI-optimized web search skill powered by Tavily.', 'tavily-search.zip', 3459, '["tavily-search"]', NOW(), NOW()),
('writing', 'Writing', 'Adapt writing voice and improve clarity, structure, and style.', 'writing.zip', 8618, '["writing"]', NOW(), NOW())
ON CONFLICT (skill_key) DO UPDATE
SET
display_name = EXCLUDED.display_name,
description = EXCLUDED.description,
zip_filename = EXCLUDED.zip_filename,
zip_size_bytes = EXCLUDED.zip_size_bytes,
entry_names_json = EXCLUDED.entry_names_json,
updated_at = NOW();
COMMIT;