Compare commits

...

3 Commits

Author SHA1 Message Date
mula.liu e8932bec17 完整的部署文件 2026-04-13 16:36:58 +08:00
mula.liu 4d7aa42a18 v0.1.4-p5 2026-04-13 15:54:26 +08:00
mula.liu 9ad37d0aa6 增加了全量部署脚本 2026-04-13 15:47:23 +08:00
32 changed files with 945 additions and 341 deletions

0
(
View File

View File

@ -6,7 +6,14 @@ frontend/node_modules
frontend/dist
backend/venv
data
data/*
!data/templates/
!data/templates/**
!data/skills/
!data/skills/**
!data/model/
data/model/*
!data/model/README.md
workspace
**/__pycache__

81
.env.full.example 100644
View File

@ -0,0 +1,81 @@
# Public exposed port (only nginx is exposed)
NGINX_PORT=8080
# Project data is always mounted from the repository root `./data`.
# Only workspace root still needs an absolute host path.
HOST_BOTS_WORKSPACE_ROOT=/opt/dashboard-nanobot/workspace/bots
# Optional custom image tags
BACKEND_IMAGE_TAG=latest
FRONTEND_IMAGE_TAG=latest
# Optional base images / mirrors
PYTHON_BASE_IMAGE=python:3.12-slim
NODE_BASE_IMAGE=node:22-alpine
NGINX_BASE_IMAGE=nginx:alpine
POSTGRES_IMAGE=postgres:16-alpine
REDIS_IMAGE=redis:7-alpine
# Python package index mirror (recommended in CN)
PIP_INDEX_URL=https://pypi.tuna.tsinghua.edu.cn/simple
PIP_TRUSTED_HOST=pypi.tuna.tsinghua.edu.cn
# Frontend package registry mirror (used by yarn, recommended in CN)
NPM_REGISTRY=https://registry.npmmirror.com
# Container timezone
TZ=Asia/Shanghai
# PostgreSQL bootstrap account.
# These values are used by the postgres container itself.
POSTGRES_SUPERUSER=postgres
POSTGRES_SUPERPASSWORD=change_me_pg_super_password
POSTGRES_BOOTSTRAP_DB=postgres
# Dashboard application database account.
# deploy-full.sh will call scripts/init-full-db.sh to create/update these idempotently.
POSTGRES_APP_DB=nanobot
POSTGRES_APP_USER=nanobot
POSTGRES_APP_PASSWORD=change_me_nanobot_password
DATABASE_POOL_SIZE=20
DATABASE_MAX_OVERFLOW=40
DATABASE_POOL_TIMEOUT=30
DATABASE_POOL_RECYCLE=1800
# Redis cache (managed by docker-compose.full.yml)
REDIS_ENABLED=true
REDIS_DB=8
REDIS_PREFIX=nanobot
REDIS_DEFAULT_TTL=60
# Chat history page size for upward lazy loading (per request)
CHAT_PULL_PAGE_SIZE=60
COMMAND_AUTO_UNLOCK_SECONDS=10
DEFAULT_BOT_SYSTEM_TIMEZONE=Asia/Shanghai
# Panel access protection
PANEL_ACCESS_PASSWORD=change_me_panel_password
# Browser credential requests must use an explicit CORS allowlist.
# If frontend and backend are served under the same origin via nginx `/api` proxy,
# this can usually stay unset. Otherwise set the real dashboard origin(s).
# Example:
# CORS_ALLOWED_ORIGINS=https://dashboard.example.com
# Max upload size for backend validation (MB)
UPLOAD_MAX_MB=200
# Workspace files that should use direct download behavior in dashboard
WORKSPACE_DOWNLOAD_EXTENSIONS=.pdf,.doc,.docx,.xls,.xlsx,.xlsm,.ppt,.pptx,.odt,.ods,.odp,.wps,.stl,.scad,.zip,.rar
# Local speech-to-text (Whisper via whisper.cpp model file)
STT_ENABLED=true
STT_MODEL=ggml-small-q8_0.bin
STT_MODEL_DIR=/app/data/model
STT_DEVICE=cpu
STT_MAX_AUDIO_SECONDS=20
STT_DEFAULT_LANGUAGE=zh
STT_FORCE_SIMPLIFIED=true
STT_AUDIO_PREPROCESS=true
STT_AUDIO_FILTER=highpass=f=120,lowpass=f=7600,afftdn=nf=-20
STT_INITIAL_PROMPT=以下内容可能包含简体中文和英文术语。请优先输出简体中文,英文单词、缩写、品牌名和数字保持原文,不要翻译。

View File

@ -1,9 +1,8 @@
# Public exposed port (only nginx is exposed)
NGINX_PORT=8080
# REQUIRED absolute host paths.
# They must exist and be writable by docker daemon.
HOST_DATA_ROOT=/opt/dashboard-nanobot/data
# Project data is always mounted from the repository root `./data`.
# Only workspace root still needs an absolute host path.
HOST_BOTS_WORKSPACE_ROOT=/opt/dashboard-nanobot/workspace/bots
# Optional custom image tags
@ -25,7 +24,7 @@ NPM_REGISTRY=https://registry.npmmirror.com
# Database (choose one: SQLite / PostgreSQL / MySQL)
# SQLite example:
# DATABASE_URL=sqlite:///${HOST_DATA_ROOT}/nanobot_dashboard.db
# DATABASE_URL=sqlite:////app/data/nanobot_dashboard.db
# PostgreSQL example:
# DATABASE_URL=postgresql+psycopg://user:password@127.0.0.1:5432/nanobot_dashboard
# MySQL example:
@ -65,7 +64,7 @@ WORKSPACE_DOWNLOAD_EXTENSIONS=.pdf,.doc,.docx,.xls,.xlsx,.xlsm,.ppt,.pptx,.odt,.
# Local speech-to-text (Whisper via whisper.cpp model file)
STT_ENABLED=true
STT_MODEL=ggml-small-q8_0.bin
STT_MODEL_DIR=${HOST_DATA_ROOT}/model
STT_MODEL_DIR=/app/data/model
STT_DEVICE=cpu
STT_MAX_AUDIO_SECONDS=20
STT_DEFAULT_LANGUAGE=zh

10
.gitignore vendored
View File

@ -30,7 +30,14 @@ backend/__pycache__/
backend/*.log
# Project runtime data (generated locally)
data/
data/*
!data/templates/
!data/templates/**
!data/skills/
!data/skills/**
!data/model/
data/model/*
!data/model/README.md
workspace/
engines/
@ -49,6 +56,7 @@ frontend/coverage/
.env
.env.*
!.env.example
!.env.full.example
!.env.prod.example
backend/.env
frontend/.env

View File

@ -13,7 +13,7 @@ Dashboard Nanobot 是面向 `nanobot` 的控制平面项目,提供镜像管理
- `USER.md`
- `TOOLS.md`
- `IDENTITY.md`
- 模板管理:系统级模板改为文件化配置(`backend/templates/agent_md_templates.json` 与 `backend/templates/topic_presets.json`)。
- 模板管理:系统级模板改为文件化配置(`data/templates/agent_md_templates.json` 与 `data/templates/topic_presets.json`)。
- 2D 运维 DashboardBot 列表、启停、命令发送、日志流、遥测。
- UI 全局支持Light/Dark 切换、中文/English 切换。
@ -57,6 +57,13 @@ graph TD
- 架构设计:`design/architecture.md`
- 数据库设计:`design/database.md`
## 默认资源
- 项目根目录 `data/templates/` 保存默认模板资源,会在初始化时同步到运行时数据目录。
- 项目根目录 `data/skills/` 保存默认 skill 包,会在数据库初始化阶段自动注册到 `skill_market_item`
- `data/model/` 不包含语音识别模型文件;模型需要用户自行下载放入该目录或 `STT_MODEL_DIR` 指向的目录。
- 如果语音模型缺失,后端启动时会打印明确告警,但不会阻断服务启动。
## 环境变量配置
- 后端:
@ -102,10 +109,10 @@ graph TD
1. 准备部署变量
- 复制 `.env.prod.example``.env.prod`(位于项目根目录)
- 配置绝对路径:
- `HOST_DATA_ROOT`
- `data/` 会自动映射到宿主机项目根目录下的 `./data`
- 只需要配置绝对路径:
- `HOST_BOTS_WORKSPACE_ROOT`
- 如启用本地语音识别,请将 Whisper `.bin` 模型文件放到 `${HOST_DATA_ROOT}/model/`
- 如启用本地语音识别,请将 Whisper `.bin` 模型文件放到宿主机项目根目录的 `data/model/`
并让 `STT_MODEL` 指向完整文件名,例如 `ggml-small-q8_0.bin`
- 中国网络建议配置加速项:
- `PIP_INDEX_URL`、`PIP_TRUSTED_HOST`
@ -122,6 +129,57 @@ graph TD
- `backend` 不开放宿主机端口,仅在内部网络被 Nginx 访问。
- 上传大小使用单一参数 `UPLOAD_MAX_MB` 控制(后端校验 + Nginx 限制)。
- 必须挂载 `/var/run/docker.sock`,否则后端无法操作 Bot 镜像与容器。
- `data/` 始终绑定到宿主机项目根目录下的 `./data`,其中模板、默认 skills、语音模型和运行数据都落在这里。
- `HOST_BOTS_WORKSPACE_ROOT` 必须是宿主机绝对路径,并且在 `docker-compose.prod.yml` 中以“同路径”挂载到后端容器。
原因:后端通过 Docker API 创建 Bot 容器时,使用的是宿主机可见的 bind 路径。
- 语音识别当前基于 `pywhispercpp==1.3.1` + Whisper `.bin` 模型文件,不使用 `faster-whisper`
## Docker 完整部署(内置 PostgreSQL / Redis
这套方案和 `deploy-prod.sh` 并存适合目标机器上直接把前端、后端、PostgreSQL、Redis 一起拉起。
### 文件
- `docker-compose.full.yml`
- `.env.full.example`
- `scripts/deploy-full.sh`
- `scripts/init-full-db.sh`
- `scripts/stop-full.sh`
- `scripts/sql/init-postgres-bootstrap.sql`
- `scripts/sql/init-postgres-app.sql`
### 启动步骤
1. 准备部署变量
- 复制 `.env.full.example``.env.full`
- `data/` 会自动映射到宿主机项目根目录下的 `./data`
- 必填修改:
- `HOST_BOTS_WORKSPACE_ROOT`
- `POSTGRES_SUPERPASSWORD`
- `POSTGRES_APP_PASSWORD`
- `PANEL_ACCESS_PASSWORD`
- 如启用本地语音识别,请将 Whisper `.bin` 模型文件放到宿主机项目根目录的 `data/model/`
2. 启动完整栈
- `./scripts/deploy-full.sh`
3. 访问
- `http://<host>:${NGINX_PORT}`(默认 `8080`
### 初始化说明
- `scripts/deploy-full.sh` 会先启动 `postgres` / `redis`,然后自动调用 `scripts/init-full-db.sh`
- `scripts/init-full-db.sh` 负责:
- 等待 PostgreSQL 就绪
- 创建或更新业务账号
- 创建业务库并授权
- 修正 `public` schema 权限
- Dashboard 业务表本身仍由后端启动时自动执行 `SQLModel.metadata.create_all(...)` 与补列/索引对齐。
### 停止
- `./scripts/stop-full.sh`
### 注意事项
- `deploy-prod.sh``deploy-full.sh` 使用的是两套 compose 文件,但复用了相同容器名,不能同时在同一台机器上并行启动。
- PostgreSQL 数据默认落盘到宿主机项目根目录 `./data/postgres`Redis 数据默认落盘到 `./data/redis`
- 如果你只想保留前后端容器,继续使用 `deploy-prod.sh`;如果希望把依赖也打包进来,使用 `deploy-full.sh`

View File

@ -20,6 +20,7 @@ RUN if [ -n "${PIP_INDEX_URL}" ]; then pip config set global.index-url "${PIP_IN
&& pip install -r requirements.txt
COPY backend/ /app/backend/
COPY data/ /app/data/
EXPOSE 8000

View File

@ -1,5 +1,6 @@
from fastapi import APIRouter, HTTPException
from core.speech_service import inspect_speech_model_status
from core.utils import _get_default_system_timezone
from schemas.system import SystemTemplatesUpdateRequest
from services.platform_service import get_platform_settings_snapshot, get_speech_runtime_settings
@ -17,6 +18,7 @@ def get_system_defaults():
md_templates = get_agent_md_templates()
platform_settings = get_platform_settings_snapshot()
speech_settings = get_speech_runtime_settings()
model_status = inspect_speech_model_status()
return {
"templates": md_templates,
"limits": {
@ -41,6 +43,9 @@ def get_system_defaults():
"device": speech_settings["device"],
"max_audio_seconds": speech_settings["max_audio_seconds"],
"default_language": speech_settings["default_language"],
"ready": model_status["ready"],
"message": model_status["message"],
"expected_path": model_status["expected_path"],
},
}

View File

@ -6,10 +6,12 @@ from sqlmodel import Session, select
from core.cache import cache
from core.database import engine, init_database
from core.docker_instance import docker_manager
from core.speech_service import inspect_speech_model_status
from core.settings import DATABASE_URL_DISPLAY, REDIS_ENABLED
from models.bot import BotInstance
from services.bot_storage_service import _migrate_bot_resources_store
from services.platform_service import prune_expired_activity_events
from services.default_assets_service import ensure_default_skill_market_items, ensure_runtime_data_assets
from services.platform_service import get_speech_runtime_settings, prune_expired_activity_events
from services.runtime_service import docker_callback, set_main_loop
@ -29,11 +31,31 @@ def register_app_runtime(app: FastAPI) -> None:
current_loop = asyncio.get_running_loop()
app.state.main_loop = current_loop
set_main_loop(current_loop)
asset_report = ensure_runtime_data_assets()
if asset_report["templates_initialized"] or asset_report["skills_synchronized"]:
print(
"[init] 默认资源已同步 "
f"(templates={asset_report['templates_initialized']}, skills={asset_report['skills_synchronized']})"
)
init_database()
with Session(engine) as session:
skill_report = ensure_default_skill_market_items(session)
if skill_report["created"] or skill_report["updated"]:
print(
"[init] 默认 skills 已入库 "
f"(created={len(skill_report['created'])}, updated={len(skill_report['updated'])})"
)
prune_expired_activity_events(session, force=True)
bots = session.exec(select(BotInstance)).all()
for bot in bots:
_migrate_bot_resources_store(bot.id)
docker_manager.ensure_monitor(bot.id, docker_callback)
speech_settings = get_speech_runtime_settings()
model_status = inspect_speech_model_status()
if speech_settings["enabled"]:
if model_status["ready"]:
print(f"🎙️ 语音识别模型就绪: {model_status['resolved_path']}")
else:
hint = f",请将模型文件放到 {model_status['expected_path']}" if model_status["expected_path"] else ""
print(f"⚠️ 语音识别模型未就绪: {model_status['message']}{hint}")
print("✅ 启动自检完成")

View File

@ -117,6 +117,13 @@ DATA_ROOT: Final[str] = _normalize_dir_path(os.getenv("DATA_ROOT", str(PROJECT_R
BOTS_WORKSPACE_ROOT: Final[str] = _normalize_dir_path(
os.getenv("BOTS_WORKSPACE_ROOT", str(PROJECT_ROOT / "workspace" / "bots"))
)
BUNDLED_DATA_ROOT: Final[Path] = (PROJECT_ROOT / "data").resolve()
RUNTIME_DATA_ROOT: Final[Path] = Path(DATA_ROOT).resolve()
BUNDLED_TEMPLATES_ROOT: Final[Path] = (BUNDLED_DATA_ROOT / "templates").resolve()
RUNTIME_TEMPLATES_ROOT: Final[Path] = (RUNTIME_DATA_ROOT / "templates").resolve()
BUNDLED_SKILLS_ROOT: Final[Path] = (BUNDLED_DATA_ROOT / "skills").resolve()
RUNTIME_SKILLS_ROOT: Final[Path] = (RUNTIME_DATA_ROOT / "skills").resolve()
RUNTIME_MODEL_ROOT: Final[Path] = (RUNTIME_DATA_ROOT / "model").resolve()
def _normalize_database_url(url: str) -> str:
@ -205,7 +212,7 @@ DEFAULT_WORKSPACE_DOWNLOAD_EXTENSIONS: Final[tuple[str, ...]] = (
)
STT_ENABLED_DEFAULT: Final[bool] = True
STT_MODEL: Final[str] = str(os.getenv("STT_MODEL") or "ggml-small-q8_0.bin").strip()
_DEFAULT_STT_MODEL_DIR: Final[Path] = (Path(DATA_ROOT) / "model").resolve()
_DEFAULT_STT_MODEL_DIR: Final[Path] = RUNTIME_MODEL_ROOT
_configured_stt_model_dir = _normalize_dir_path(os.getenv("STT_MODEL_DIR", str(_DEFAULT_STT_MODEL_DIR)))
if _configured_stt_model_dir and not Path(_configured_stt_model_dir).exists() and _DEFAULT_STT_MODEL_DIR.exists():
STT_MODEL_DIR: Final[str] = str(_DEFAULT_STT_MODEL_DIR)
@ -240,6 +247,7 @@ APP_HOST: Final[str] = str(os.getenv("APP_HOST") or "0.0.0.0").strip()
APP_PORT: Final[int] = _env_int("APP_PORT", 8000, 1, 65535)
APP_RELOAD: Final[bool] = _env_bool("APP_RELOAD", False)
TEMPLATE_ROOT: Final[Path] = (BACKEND_ROOT / "templates").resolve()
AGENT_MD_TEMPLATES_FILE: Final[Path] = TEMPLATE_ROOT / "agent_md_templates.json"
TOPIC_PRESETS_TEMPLATES_FILE: Final[Path] = TEMPLATE_ROOT / "topic_presets.json"
AGENT_MD_TEMPLATES_FILE: Final[Path] = RUNTIME_TEMPLATES_ROOT / "agent_md_templates.json"
TOPIC_PRESETS_TEMPLATES_FILE: Final[Path] = RUNTIME_TEMPLATES_ROOT / "topic_presets.json"
BUNDLED_AGENT_MD_TEMPLATES_FILE: Final[Path] = BUNDLED_TEMPLATES_ROOT / "agent_md_templates.json"
BUNDLED_TOPIC_PRESETS_TEMPLATES_FILE: Final[Path] = BUNDLED_TEMPLATES_ROOT / "topic_presets.json"

View File

@ -24,6 +24,39 @@ class SpeechDurationError(SpeechServiceError):
pass
def inspect_speech_model_status() -> Dict[str, Any]:
service = WhisperSpeechService()
model = str(STT_MODEL or "").strip()
model_dir = str(STT_MODEL_DIR or "").strip()
expected_path = ""
if model:
if any(sep in model for sep in ("/", "\\")):
expected_path = str(Path(model).expanduser())
elif model_dir:
expected_path = str((Path(model_dir).expanduser() / model).resolve())
try:
resolved_path = service._resolve_model_source()
return {
"ready": True,
"model": model,
"model_dir": model_dir,
"expected_path": expected_path or resolved_path,
"resolved_path": resolved_path,
"message": "",
}
except SpeechServiceError as exc:
return {
"ready": False,
"model": model,
"model_dir": model_dir,
"expected_path": expected_path,
"resolved_path": "",
"message": str(exc),
}
class WhisperSpeechService:
def __init__(self) -> None:
self._model: Any = None

View File

@ -0,0 +1,206 @@
from __future__ import annotations
import json
import os
import re
import shutil
import zipfile
from pathlib import Path
from typing import Any, Dict, List
from sqlmodel import Session, select
from core.settings import (
AGENT_MD_TEMPLATES_FILE,
BUNDLED_AGENT_MD_TEMPLATES_FILE,
BUNDLED_SKILLS_ROOT,
BUNDLED_TOPIC_PRESETS_TEMPLATES_FILE,
DATA_ROOT,
RUNTIME_MODEL_ROOT,
RUNTIME_SKILLS_ROOT,
RUNTIME_TEMPLATES_ROOT,
TOPIC_PRESETS_TEMPLATES_FILE,
)
from core.utils import (
_is_ignored_skill_zip_top_level,
_is_valid_top_level_skill_name,
_read_description_from_text,
_sanitize_skill_market_key,
)
from models.skill import SkillMarketItem
def _copy_if_missing(src: Path, dst: Path) -> bool:
if not src.exists() or not src.is_file():
return False
if src.resolve() == dst.resolve() if dst.exists() else False:
return False
if dst.exists():
return False
dst.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(src, dst)
return True
def _copy_if_different(src: Path, dst: Path) -> bool:
if not src.exists() or not src.is_file():
return False
if src.resolve() == dst.resolve() if dst.exists() else False:
return False
dst.parent.mkdir(parents=True, exist_ok=True)
if dst.exists():
try:
if src.stat().st_size == dst.stat().st_size and src.read_bytes() == dst.read_bytes():
return False
except Exception:
pass
shutil.copy2(src, dst)
return True
def _iter_bundled_skill_packages() -> List[Path]:
if not BUNDLED_SKILLS_ROOT.exists() or not BUNDLED_SKILLS_ROOT.is_dir():
return []
return sorted(path for path in BUNDLED_SKILLS_ROOT.iterdir() if path.is_file() and path.suffix.lower() == ".zip")
def ensure_runtime_data_assets() -> Dict[str, int]:
Path(DATA_ROOT).mkdir(parents=True, exist_ok=True)
RUNTIME_TEMPLATES_ROOT.mkdir(parents=True, exist_ok=True)
RUNTIME_SKILLS_ROOT.mkdir(parents=True, exist_ok=True)
RUNTIME_MODEL_ROOT.mkdir(parents=True, exist_ok=True)
templates_initialized = 0
skills_synchronized = 0
if _copy_if_missing(BUNDLED_AGENT_MD_TEMPLATES_FILE, AGENT_MD_TEMPLATES_FILE):
templates_initialized += 1
if _copy_if_missing(BUNDLED_TOPIC_PRESETS_TEMPLATES_FILE, TOPIC_PRESETS_TEMPLATES_FILE):
templates_initialized += 1
for src in _iter_bundled_skill_packages():
if _copy_if_different(src, RUNTIME_SKILLS_ROOT / src.name):
skills_synchronized += 1
return {
"templates_initialized": templates_initialized,
"skills_synchronized": skills_synchronized,
}
def _extract_skill_zip_summary(zip_path: Path) -> Dict[str, Any]:
entry_names: List[str] = []
description = ""
with zipfile.ZipFile(zip_path) as archive:
members = archive.infolist()
file_members = [member for member in members if not member.is_dir()]
for member in file_members:
raw_name = str(member.filename or "").replace("\\", "/").lstrip("/")
if not raw_name:
continue
first = raw_name.split("/", 1)[0].strip()
if _is_ignored_skill_zip_top_level(first):
continue
if _is_valid_top_level_skill_name(first) and first not in entry_names:
entry_names.append(first)
candidates = sorted(
[
str(member.filename or "").replace("\\", "/").lstrip("/")
for member in file_members
if str(member.filename or "").replace("\\", "/").rsplit("/", 1)[-1].lower()
in {"skill.md", "readme.md"}
],
key=lambda value: (value.count("/"), value.lower()),
)
for candidate in candidates:
try:
with archive.open(candidate, "r") as file:
preview = file.read(4096).decode("utf-8", errors="ignore")
description = _read_description_from_text(preview)
if description:
break
except Exception:
continue
return {
"entry_names": entry_names,
"description": description,
}
def _default_display_name(stem: str) -> str:
chunks = [chunk for chunk in re.split(r"[-_]+", str(stem or "").strip()) if chunk]
if not chunks:
return "Skill"
return " ".join(chunk.upper() if chunk.isupper() else chunk.capitalize() for chunk in chunks)
def _resolve_unique_skill_key(existing_keys: set[str], preferred_key: str) -> str:
base_key = _sanitize_skill_market_key(preferred_key) or "skill"
candidate = base_key
counter = 2
while candidate in existing_keys:
candidate = f"{base_key}-{counter}"
counter += 1
existing_keys.add(candidate)
return candidate
def ensure_default_skill_market_items(session: Session) -> Dict[str, List[str]]:
report: Dict[str, List[str]] = {"created": [], "updated": []}
default_packages = _iter_bundled_skill_packages()
if not default_packages:
return report
rows = session.exec(select(SkillMarketItem)).all()
existing_by_zip = {str(row.zip_filename or "").strip(): row for row in rows if str(row.zip_filename or "").strip()}
existing_keys = {str(row.skill_key or "").strip() for row in rows if str(row.skill_key or "").strip()}
for bundled_path in default_packages:
runtime_path = RUNTIME_SKILLS_ROOT / bundled_path.name
source_path = runtime_path if runtime_path.exists() else bundled_path
try:
summary = _extract_skill_zip_summary(source_path)
except Exception:
continue
zip_filename = bundled_path.name
entry_names_json = json.dumps(summary["entry_names"], ensure_ascii=False)
display_name = _default_display_name((summary["entry_names"] or [bundled_path.stem])[0])
zip_size_bytes = int(source_path.stat().st_size) if source_path.exists() else 0
row = existing_by_zip.get(zip_filename)
if row is None:
row = SkillMarketItem(
skill_key=_resolve_unique_skill_key(existing_keys, bundled_path.stem),
display_name=display_name,
description=str(summary["description"] or "").strip(),
zip_filename=zip_filename,
zip_size_bytes=zip_size_bytes,
entry_names_json=entry_names_json,
)
session.add(row)
existing_by_zip[zip_filename] = row
report["created"].append(zip_filename)
continue
changed = False
if int(row.zip_size_bytes or 0) != zip_size_bytes:
row.zip_size_bytes = zip_size_bytes
changed = True
if str(row.entry_names_json or "") != entry_names_json:
row.entry_names_json = entry_names_json
changed = True
if not str(row.display_name or "").strip():
row.display_name = display_name
changed = True
if not str(row.description or "").strip() and str(summary["description"] or "").strip():
row.description = str(summary["description"] or "").strip()
changed = True
if changed:
report["updated"].append(zip_filename)
if report["created"] or report["updated"]:
session.commit()
return report

View File

@ -2,21 +2,30 @@ from __future__ import annotations
from typing import Any, Dict, List
from core.settings import AGENT_MD_TEMPLATES_FILE, TOPIC_PRESETS_TEMPLATES_FILE
from core.settings import (
AGENT_MD_TEMPLATES_FILE,
BUNDLED_AGENT_MD_TEMPLATES_FILE,
BUNDLED_TOPIC_PRESETS_TEMPLATES_FILE,
TOPIC_PRESETS_TEMPLATES_FILE,
)
TEMPLATE_KEYS = ("agents_md", "soul_md", "user_md", "tools_md", "identity_md")
def _load_json_object(path: str) -> Dict[str, Any]:
def _load_json_object(path: str, fallback_path: str = "") -> Dict[str, Any]:
import json
try:
with open(path, "r", encoding="utf-8") as file:
data = json.load(file)
if isinstance(data, dict):
return data
except Exception:
pass
for candidate in [path, fallback_path]:
candidate = str(candidate or "").strip()
if not candidate:
continue
try:
with open(candidate, "r", encoding="utf-8") as file:
data = json.load(file)
if isinstance(data, dict):
return data
except Exception:
continue
return {}
@ -36,12 +45,12 @@ def _write_json_atomic(path: str, payload: Dict[str, Any]) -> None:
def get_agent_md_templates() -> Dict[str, str]:
raw = _load_json_object(str(AGENT_MD_TEMPLATES_FILE))
raw = _load_json_object(str(AGENT_MD_TEMPLATES_FILE), str(BUNDLED_AGENT_MD_TEMPLATES_FILE))
return {key: _normalize_md_text(raw.get(key)) for key in TEMPLATE_KEYS}
def get_topic_presets() -> Dict[str, Any]:
raw = _load_json_object(str(TOPIC_PRESETS_TEMPLATES_FILE))
raw = _load_json_object(str(TOPIC_PRESETS_TEMPLATES_FILE), str(BUNDLED_TOPIC_PRESETS_TEMPLATES_FILE))
presets = raw.get("presets")
if not isinstance(presets, list):
return {"presets": []}
@ -68,4 +77,3 @@ def update_topic_presets(raw: Dict[str, Any]) -> Dict[str, Any]:
def get_agent_template_value(key: str) -> str:
return get_agent_md_templates().get(key, "")

View File

@ -0,0 +1,9 @@
# Speech Model Directory
This directory is reserved for local Whisper `.bin` model files and is intentionally not committed to source control.
Put the file configured by `STT_MODEL` here, for example:
- `ggml-small-q8_0.bin`
If the model file is missing, the backend will still start, but it will print a startup warning and speech transcription requests will not work until the file is added.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -4,4 +4,4 @@
"user_md": "- 语言: 中文\n- 风格: 专业\n- 偏好: 简明且有步骤",
"tools_md": "- 谨慎使用 shell\n- 修改文件后复核\n- 失败时说明原因并重试策略",
"identity_md": "- 角色: 企业数字员工\n- 领域: 运维与任务执行"
}
}

View File

@ -6,7 +6,7 @@
- 保持装配层足够薄
- 保持业务边界清晰
- 避免再次出现单文件多职责膨胀
- 避免出现单文件多职责膨胀
- 让后续迭代继续走低风险、小步验证路线
本文档自落地起作为**后续开发强制规范**执行。

View File

@ -0,0 +1,153 @@
services:
postgres:
image: ${POSTGRES_IMAGE:-postgres:16-alpine}
container_name: dashboard-nanobot-postgres
restart: unless-stopped
environment:
TZ: ${TZ:-Asia/Shanghai}
POSTGRES_USER: ${POSTGRES_SUPERUSER:-postgres}
POSTGRES_PASSWORD: ${POSTGRES_SUPERPASSWORD:?POSTGRES_SUPERPASSWORD is required}
POSTGRES_DB: ${POSTGRES_BOOTSTRAP_DB:-postgres}
volumes:
- ./data/postgres:/var/lib/postgresql/data
expose:
- "5432"
healthcheck:
test: ["CMD-SHELL", "pg_isready -U \"$${POSTGRES_USER}\" -d \"$${POSTGRES_DB}\""]
interval: 10s
timeout: 5s
retries: 10
start_period: 20s
logging:
driver: json-file
options:
max-size: "20m"
max-file: "3"
redis:
image: ${REDIS_IMAGE:-redis:7-alpine}
container_name: dashboard-nanobot-redis
restart: unless-stopped
environment:
TZ: ${TZ:-Asia/Shanghai}
command: ["redis-server", "--appendonly", "yes", "--save", "60", "1000"]
volumes:
- ./data/redis:/data
expose:
- "6379"
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 5s
retries: 10
start_period: 10s
logging:
driver: json-file
options:
max-size: "20m"
max-file: "3"
backend:
build:
context: .
dockerfile: backend/Dockerfile
args:
PYTHON_BASE_IMAGE: ${PYTHON_BASE_IMAGE:-python:3.12-slim}
PIP_INDEX_URL: ${PIP_INDEX_URL:-https://pypi.org/simple}
PIP_TRUSTED_HOST: ${PIP_TRUSTED_HOST:-}
image: dashboard-nanobot/backend:${BACKEND_IMAGE_TAG:-latest}
container_name: dashboard-nanobot-backend
restart: unless-stopped
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
environment:
TZ: ${TZ:-Asia/Shanghai}
APP_HOST: 0.0.0.0
APP_PORT: 8000
APP_RELOAD: "false"
DATABASE_ECHO: "false"
DATABASE_POOL_SIZE: ${DATABASE_POOL_SIZE:-20}
DATABASE_MAX_OVERFLOW: ${DATABASE_MAX_OVERFLOW:-40}
DATABASE_POOL_TIMEOUT: ${DATABASE_POOL_TIMEOUT:-30}
DATABASE_POOL_RECYCLE: ${DATABASE_POOL_RECYCLE:-1800}
UPLOAD_MAX_MB: ${UPLOAD_MAX_MB:-100}
DATA_ROOT: /app/data
BOTS_WORKSPACE_ROOT: ${HOST_BOTS_WORKSPACE_ROOT}
DATABASE_URL: postgresql+psycopg://${POSTGRES_APP_USER}:${POSTGRES_APP_PASSWORD}@postgres:5432/${POSTGRES_APP_DB}
REDIS_ENABLED: ${REDIS_ENABLED:-true}
REDIS_URL: redis://redis:6379/${REDIS_DB:-8}
REDIS_PREFIX: ${REDIS_PREFIX:-dashboard_nanobot}
REDIS_DEFAULT_TTL: ${REDIS_DEFAULT_TTL:-60}
CHAT_PULL_PAGE_SIZE: ${CHAT_PULL_PAGE_SIZE:-60}
COMMAND_AUTO_UNLOCK_SECONDS: ${COMMAND_AUTO_UNLOCK_SECONDS:-10}
DEFAULT_BOT_SYSTEM_TIMEZONE: ${DEFAULT_BOT_SYSTEM_TIMEZONE:-Asia/Shanghai}
PANEL_ACCESS_PASSWORD: ${PANEL_ACCESS_PASSWORD:-}
CORS_ALLOWED_ORIGINS: ${CORS_ALLOWED_ORIGINS:-}
WORKSPACE_DOWNLOAD_EXTENSIONS: ${WORKSPACE_DOWNLOAD_EXTENSIONS:-}
STT_ENABLED: ${STT_ENABLED:-true}
STT_MODEL: ${STT_MODEL:-ggml-small-q8_0.bin}
STT_MODEL_DIR: ${STT_MODEL_DIR:-/app/data/model}
STT_DEVICE: ${STT_DEVICE:-cpu}
STT_MAX_AUDIO_SECONDS: ${STT_MAX_AUDIO_SECONDS:-20}
STT_DEFAULT_LANGUAGE: ${STT_DEFAULT_LANGUAGE:-zh}
STT_FORCE_SIMPLIFIED: ${STT_FORCE_SIMPLIFIED:-true}
STT_AUDIO_PREPROCESS: ${STT_AUDIO_PREPROCESS:-true}
STT_AUDIO_FILTER: ${STT_AUDIO_FILTER:-highpass=f=120,lowpass=f=7600,afftdn=nf=-20}
STT_INITIAL_PROMPT: ${STT_INITIAL_PROMPT:-以下内容可能包含简体中文和英文术语。请优先输出简体中文,英文单词、缩写、品牌名和数字保持原文,不要翻译。}
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- ./data:/app/data
- ${HOST_BOTS_WORKSPACE_ROOT}:${HOST_BOTS_WORKSPACE_ROOT}
expose:
- "8000"
healthcheck:
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://127.0.0.1:8000/api/health', timeout=3).read()"]
interval: 15s
timeout: 5s
retries: 5
start_period: 20s
logging:
driver: json-file
options:
max-size: "20m"
max-file: "3"
nginx:
build:
context: ./frontend
dockerfile: Dockerfile
args:
NODE_BASE_IMAGE: ${NODE_BASE_IMAGE:-node:22-alpine}
NGINX_BASE_IMAGE: ${NGINX_BASE_IMAGE:-nginx:alpine}
NPM_REGISTRY: ${NPM_REGISTRY:-https://registry.npmjs.org/}
VITE_API_BASE: /api
VITE_WS_BASE: /ws/monitor
image: dashboard-nanobot/nginx:${FRONTEND_IMAGE_TAG:-latest}
container_name: dashboard-nanobot-nginx
restart: unless-stopped
environment:
TZ: ${TZ:-Asia/Shanghai}
UPLOAD_MAX_MB: ${UPLOAD_MAX_MB:-100}
depends_on:
backend:
condition: service_healthy
ports:
- "${NGINX_PORT}:80"
healthcheck:
test: ["CMD", "wget", "-q", "-O", "/dev/null", "http://127.0.0.1/"]
interval: 15s
timeout: 5s
retries: 5
start_period: 10s
logging:
driver: json-file
options:
max-size: "20m"
max-file: "3"
networks:
default:
name: dashboard-nanobot-network

View File

@ -20,7 +20,7 @@ services:
DATABASE_POOL_TIMEOUT: ${DATABASE_POOL_TIMEOUT:-30}
DATABASE_POOL_RECYCLE: ${DATABASE_POOL_RECYCLE:-1800}
UPLOAD_MAX_MB: ${UPLOAD_MAX_MB:-100}
DATA_ROOT: ${HOST_DATA_ROOT}
DATA_ROOT: /app/data
BOTS_WORKSPACE_ROOT: ${HOST_BOTS_WORKSPACE_ROOT}
DATABASE_URL: ${DATABASE_URL:-}
REDIS_ENABLED: ${REDIS_ENABLED:-false}
@ -33,7 +33,7 @@ services:
PANEL_ACCESS_PASSWORD: ${PANEL_ACCESS_PASSWORD:-}
STT_ENABLED: ${STT_ENABLED:-true}
STT_MODEL: ${STT_MODEL:-ggml-small-q8_0.bin}
STT_MODEL_DIR: ${STT_MODEL_DIR:-${HOST_DATA_ROOT}/model}
STT_MODEL_DIR: ${STT_MODEL_DIR:-/app/data/model}
STT_DEVICE: ${STT_DEVICE:-cpu}
STT_MAX_AUDIO_SECONDS: ${STT_MAX_AUDIO_SECONDS:-20}
STT_DEFAULT_LANGUAGE: ${STT_DEFAULT_LANGUAGE:-zh}
@ -43,7 +43,7 @@ services:
STT_INITIAL_PROMPT: ${STT_INITIAL_PROMPT:-以下内容可能包含简体中文和英文术语。请优先输出简体中文,英文单词、缩写、品牌名和数字保持原文,不要翻译。}
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- ${HOST_DATA_ROOT}:${HOST_DATA_ROOT}
- ./data:/app/data
- ${HOST_BOTS_WORKSPACE_ROOT}:${HOST_BOTS_WORKSPACE_ROOT}
expose:
- "8000"

View File

@ -1,308 +0,0 @@
#!/usr/bin/env python3
from __future__ import annotations
import ast
import builtins
import dis
import importlib
import inspect
import pathlib
import pkgutil
import sys
from dataclasses import dataclass
from typing import Iterable, List, Sequence
PROJECT_ROOT = pathlib.Path(__file__).resolve().parents[1]
BACKEND_ROOT = PROJECT_ROOT / "backend"
ROUTER_MAX_LINES = 400
SERVICE_MAX_LINES = 500
@dataclass
class Finding:
severity: str
code: str
path: str
detail: str
def _iter_backend_py_files() -> Iterable[pathlib.Path]:
for path in sorted(BACKEND_ROOT.rglob("*.py")):
if "venv" in path.parts:
continue
yield path
def _iter_backend_pyc_files() -> Iterable[pathlib.Path]:
for path in sorted(BACKEND_ROOT.rglob("*.pyc")):
if "venv" in path.parts:
continue
yield path
def _module_name_from_path(path: pathlib.Path) -> str:
rel = path.relative_to(BACKEND_ROOT).with_suffix("")
return ".".join(rel.parts)
def _parse_ast(path: pathlib.Path) -> ast.AST | None:
try:
return ast.parse(path.read_text(encoding="utf-8"), filename=str(path))
except Exception as exc:
return ast.parse("", filename=f"{path} [parse failed: {exc}]")
def _check_import_all(paths: Sequence[pathlib.Path]) -> List[Finding]:
findings: List[Finding] = []
for path in paths:
tree = _parse_ast(path)
for node in ast.walk(tree):
if isinstance(node, ast.ImportFrom) and any(alias.name == "*" for alias in node.names):
findings.append(
Finding(
severity="ERROR",
code="import-star",
path=str(path.relative_to(PROJECT_ROOT)),
detail=f"wildcard import from '{node.module or ''}'",
)
)
return findings
def _check_settings_imports(paths: Sequence[pathlib.Path]) -> List[Finding]:
findings: List[Finding] = []
sys.path.insert(0, str(BACKEND_ROOT))
settings = importlib.import_module("core.settings")
available = set(dir(settings))
for path in paths:
tree = _parse_ast(path)
for node in ast.walk(tree):
if not isinstance(node, ast.ImportFrom) or node.module != "core.settings":
continue
for alias in node.names:
if alias.name == "*":
continue
if alias.name not in available:
findings.append(
Finding(
severity="ERROR",
code="settings-missing-symbol",
path=str(path.relative_to(PROJECT_ROOT)),
detail=f"imports missing symbol '{alias.name}' from core.settings",
)
)
return findings
def _check_importability(paths: Sequence[pathlib.Path]) -> List[Finding]:
findings: List[Finding] = []
sys.path.insert(0, str(BACKEND_ROOT))
for path in paths:
if path.name == "__init__.py":
continue
module_name = _module_name_from_path(path)
try:
importlib.import_module(module_name)
except Exception as exc:
findings.append(
Finding(
severity="ERROR",
code="module-import-failed",
path=str(path.relative_to(PROJECT_ROOT)),
detail=f"{module_name}: {exc.__class__.__name__}: {exc}",
)
)
return findings
def _check_runtime_global_refs(paths: Sequence[pathlib.Path]) -> List[Finding]:
findings: List[Finding] = []
sys.path.insert(0, str(BACKEND_ROOT))
builtin_names = set(dir(builtins))
module_names = {
_module_name_from_path(path)
for path in paths
if path.name != "__init__.py"
}
for module_name in sorted(module_names):
try:
module = importlib.import_module(module_name)
except Exception:
continue
module_globals = module.__dict__
def _check_function(obj: object, qualname: str) -> None:
if module_name.startswith("models.") and qualname.endswith(".__init__"):
return
try:
instructions = list(dis.get_instructions(obj))
except TypeError:
return
for ins in instructions:
if ins.opname not in {"LOAD_GLOBAL", "LOAD_NAME"}:
continue
ref_name = str(ins.argval or "").strip()
if not ref_name or ref_name in builtin_names or ref_name in module_globals:
continue
findings.append(
Finding(
severity="ERROR",
code="runtime-missing-global",
path=module_name.replace(".", "/") + ".py",
detail=f"{qualname} uses missing global '{ref_name}' ({ins.opname})",
)
)
for _attr_name, obj in vars(module).items():
if inspect.isfunction(obj) and obj.__module__ == module.__name__:
_check_function(obj, obj.__qualname__)
elif inspect.isclass(obj) and obj.__module__ == module.__name__:
for _method_name, method in vars(obj).items():
if inspect.isfunction(method):
_check_function(method, f"{obj.__qualname__}.{method.__name__}")
deduped: dict[tuple[str, str, str], Finding] = {}
for finding in findings:
key = (finding.code, finding.path, finding.detail)
deduped[key] = finding
return list(deduped.values())
def _check_app_factory() -> List[Finding]:
findings: List[Finding] = []
sys.path.insert(0, str(BACKEND_ROOT))
try:
from app_factory import create_app
except Exception as exc:
return [
Finding(
severity="ERROR",
code="app-factory-import-failed",
path="backend/app_factory.py",
detail=f"{exc.__class__.__name__}: {exc}",
)
]
try:
app = create_app()
except Exception as exc:
return [
Finding(
severity="ERROR",
code="app-factory-create-failed",
path="backend/app_factory.py",
detail=f"{exc.__class__.__name__}: {exc}",
)
]
route_keys: set[tuple[str, str]] = set()
duplicates: set[tuple[str, str]] = set()
for route in getattr(app, "routes", []):
path = str(getattr(route, "path", "") or "").strip()
methods = sorted(getattr(route, "methods", []) or [])
for method in methods:
if method in {"HEAD", "OPTIONS"}:
continue
key = (method, path)
if key in route_keys:
duplicates.add(key)
else:
route_keys.add(key)
for method, path in sorted(duplicates):
findings.append(
Finding(
severity="ERROR",
code="duplicate-route",
path="backend/app_factory.py",
detail=f"duplicate route registered for {method} {path}",
)
)
return findings
def _source_path_for_pyc(path: pathlib.Path) -> pathlib.Path | None:
if path.name == "__init__.cpython-312.pyc":
return path.parent.parent / "__init__.py"
if path.parent.name != "__pycache__":
return None
stem = path.name.split(".cpython-", 1)[0]
return path.parent.parent / f"{stem}.py"
def _check_pyc_without_source(pyc_paths: Sequence[pathlib.Path]) -> List[Finding]:
findings: List[Finding] = []
for path in pyc_paths:
source = _source_path_for_pyc(path)
if source is None:
continue
if source.exists():
continue
findings.append(
Finding(
severity="ERROR",
code="pyc-without-source",
path=str(path.relative_to(PROJECT_ROOT)),
detail=f"compiled module has no source file at {source.relative_to(PROJECT_ROOT)}",
)
)
return findings
def _check_file_sizes(paths: Sequence[pathlib.Path]) -> List[Finding]:
findings: List[Finding] = []
for path in paths:
rel = path.relative_to(BACKEND_ROOT)
line_count = sum(1 for _ in path.open("r", encoding="utf-8"))
if rel.parts[:1] == ("api",) and line_count > ROUTER_MAX_LINES:
findings.append(
Finding(
severity="WARN",
code="router-too-large",
path=str(path.relative_to(PROJECT_ROOT)),
detail=f"{line_count} lines exceeds router limit {ROUTER_MAX_LINES}",
)
)
if rel.parts[:1] == ("services",) and line_count > SERVICE_MAX_LINES:
findings.append(
Finding(
severity="WARN",
code="service-too-large",
path=str(path.relative_to(PROJECT_ROOT)),
detail=f"{line_count} lines exceeds service limit {SERVICE_MAX_LINES}",
)
)
return findings
def _print_findings(findings: Sequence[Finding]) -> None:
if not findings:
print("PASS backend integrity audit")
return
for finding in findings:
print(f"[{finding.severity}] {finding.code} {finding.path} :: {finding.detail}")
def main() -> int:
py_files = list(_iter_backend_py_files())
pyc_files = list(_iter_backend_pyc_files())
findings: List[Finding] = []
findings.extend(_check_import_all(py_files))
findings.extend(_check_settings_imports(py_files))
findings.extend(_check_importability(py_files))
findings.extend(_check_runtime_global_refs(py_files))
findings.extend(_check_app_factory())
findings.extend(_check_pyc_without_source(pyc_files))
findings.extend(_check_file_sizes(py_files))
findings.sort(key=lambda item: (item.severity != "ERROR", item.code, item.path))
_print_findings(findings)
return 1 if any(item.severity == "ERROR" for item in findings) else 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@ -0,0 +1,139 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
ENV_FILE="${1:-$ROOT_DIR/.env.full}"
COMPOSE_FILE="$ROOT_DIR/docker-compose.full.yml"
DATA_DIR="$ROOT_DIR/data"
require_file() {
local path="$1"
local hint="$2"
if [[ -f "$path" ]]; then
return 0
fi
echo "Missing file: $path"
if [[ -n "$hint" ]]; then
echo "$hint"
fi
exit 1
}
require_env() {
local name="$1"
if [[ -n "${!name:-}" ]]; then
return 0
fi
echo "Missing required env: $name"
exit 1
}
read_env_value() {
local key="$1"
local line=""
local value=""
while IFS= read -r line || [[ -n "$line" ]]; do
line="${line%$'\r'}"
[[ -z "${line//[[:space:]]/}" ]] && continue
[[ "${line#\#}" != "$line" ]] && continue
[[ "${line#export }" != "$line" ]] && line="${line#export }"
[[ "$line" == "$key="* ]] || continue
value="${line#*=}"
if [[ "$value" =~ ^\"(.*)\"$ ]]; then
value="${BASH_REMATCH[1]}"
elif [[ "$value" =~ ^\'(.*)\'$ ]]; then
value="${BASH_REMATCH[1]}"
fi
printf '%s' "$value"
return 0
done < "$ENV_FILE"
return 1
}
load_env_var() {
local name="$1"
local default_value="${2:-}"
local value=""
value="$(read_env_value "$name" || true)"
if [[ -z "$value" ]]; then
value="$default_value"
fi
printf -v "$name" '%s' "$value"
}
wait_for_health() {
local container_name="$1"
local timeout_seconds="$2"
local elapsed=0
local status=""
while (( elapsed < timeout_seconds )); do
status="$(
docker inspect --format '{{if .State.Health}}{{.State.Health.Status}}{{else}}{{.State.Status}}{{end}}' "$container_name" 2>/dev/null || true
)"
if [[ "$status" == "healthy" || "$status" == "running" ]]; then
echo "[deploy-full] $container_name is $status"
return 0
fi
sleep 2
elapsed=$((elapsed + 2))
done
echo "[deploy-full] timed out waiting for $container_name (last status: ${status:-unknown})"
docker logs --tail 80 "$container_name" 2>/dev/null || true
exit 1
}
require_file "$ENV_FILE" "Create it from: $ROOT_DIR/.env.full.example"
require_file "$COMPOSE_FILE" ""
load_env_var HOST_BOTS_WORKSPACE_ROOT
load_env_var POSTGRES_SUPERUSER postgres
load_env_var POSTGRES_SUPERPASSWORD
load_env_var POSTGRES_BOOTSTRAP_DB postgres
load_env_var POSTGRES_APP_DB
load_env_var POSTGRES_APP_USER
load_env_var POSTGRES_APP_PASSWORD
load_env_var NGINX_PORT 8080
require_env HOST_BOTS_WORKSPACE_ROOT
require_env POSTGRES_SUPERUSER
require_env POSTGRES_SUPERPASSWORD
require_env POSTGRES_BOOTSTRAP_DB
require_env POSTGRES_APP_DB
require_env POSTGRES_APP_USER
require_env POSTGRES_APP_PASSWORD
require_env NGINX_PORT
echo "[deploy-full] using env: $ENV_FILE"
mkdir -p \
"$DATA_DIR" \
"$DATA_DIR/postgres" \
"$DATA_DIR/redis" \
"$DATA_DIR/model" \
"$HOST_BOTS_WORKSPACE_ROOT"
docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" config -q
echo "[deploy-full] starting postgres and redis"
docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" up -d postgres redis
wait_for_health "dashboard-nanobot-postgres" 120
wait_for_health "dashboard-nanobot-redis" 60
echo "[deploy-full] initializing application database"
"$ROOT_DIR/scripts/init-full-db.sh" "$ENV_FILE"
echo "[deploy-full] starting backend and nginx"
docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" up -d --build backend nginx
wait_for_health "dashboard-nanobot-backend" 180
wait_for_health "dashboard-nanobot-nginx" 120
echo "[deploy-full] service status"
docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" ps
echo "[deploy-full] done"

View File

@ -3,6 +3,7 @@ set -euo pipefail
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
ENV_FILE="${1:-$ROOT_DIR/.env.prod}"
DATA_DIR="$ROOT_DIR/data"
if [[ ! -f "$ENV_FILE" ]]; then
echo "Missing env file: $ENV_FILE"
@ -11,6 +12,7 @@ if [[ ! -f "$ENV_FILE" ]]; then
fi
echo "[deploy] using env: $ENV_FILE"
mkdir -p "$DATA_DIR" "$DATA_DIR/model"
docker compose --env-file "$ENV_FILE" -f "$ROOT_DIR/docker-compose.prod.yml" config -q
docker compose --env-file "$ENV_FILE" -f "$ROOT_DIR/docker-compose.prod.yml" up -d --build

View File

@ -0,0 +1,135 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
ENV_FILE="${1:-$ROOT_DIR/.env.full}"
COMPOSE_FILE="$ROOT_DIR/docker-compose.full.yml"
BOOTSTRAP_SQL="$ROOT_DIR/scripts/sql/init-postgres-bootstrap.sql"
APP_SQL="$ROOT_DIR/scripts/sql/init-postgres-app.sql"
require_file() {
local path="$1"
local hint="$2"
if [[ -f "$path" ]]; then
return 0
fi
echo "Missing file: $path"
if [[ -n "$hint" ]]; then
echo "$hint"
fi
exit 1
}
require_env() {
local name="$1"
if [[ -n "${!name:-}" ]]; then
return 0
fi
echo "Missing required env: $name"
exit 1
}
read_env_value() {
local key="$1"
local line=""
local value=""
while IFS= read -r line || [[ -n "$line" ]]; do
line="${line%$'\r'}"
[[ -z "${line//[[:space:]]/}" ]] && continue
[[ "${line#\#}" != "$line" ]] && continue
[[ "${line#export }" != "$line" ]] && line="${line#export }"
[[ "$line" == "$key="* ]] || continue
value="${line#*=}"
if [[ "$value" =~ ^\"(.*)\"$ ]]; then
value="${BASH_REMATCH[1]}"
elif [[ "$value" =~ ^\'(.*)\'$ ]]; then
value="${BASH_REMATCH[1]}"
fi
printf '%s' "$value"
return 0
done < "$ENV_FILE"
return 1
}
load_env_var() {
local name="$1"
local default_value="${2:-}"
local value=""
value="$(read_env_value "$name" || true)"
if [[ -z "$value" ]]; then
value="$default_value"
fi
printf -v "$name" '%s' "$value"
}
wait_for_postgres() {
local timeout_seconds="${1:-120}"
local elapsed=0
while (( elapsed < timeout_seconds )); do
if docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" exec -T \
-e PGPASSWORD="$POSTGRES_SUPERPASSWORD" \
postgres \
pg_isready -U "$POSTGRES_SUPERUSER" -d "$POSTGRES_BOOTSTRAP_DB" >/dev/null 2>&1; then
return 0
fi
sleep 2
elapsed=$((elapsed + 2))
done
echo "[init-full-db] timed out waiting for postgres"
docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" logs --tail 100 postgres || true
exit 1
}
require_file "$ENV_FILE" "Create it from: $ROOT_DIR/.env.full.example"
require_file "$COMPOSE_FILE" ""
require_file "$BOOTSTRAP_SQL" ""
require_file "$APP_SQL" ""
load_env_var POSTGRES_SUPERUSER postgres
load_env_var POSTGRES_SUPERPASSWORD
load_env_var POSTGRES_BOOTSTRAP_DB postgres
load_env_var POSTGRES_APP_DB
load_env_var POSTGRES_APP_USER
load_env_var POSTGRES_APP_PASSWORD
require_env POSTGRES_SUPERUSER
require_env POSTGRES_SUPERPASSWORD
require_env POSTGRES_BOOTSTRAP_DB
require_env POSTGRES_APP_DB
require_env POSTGRES_APP_USER
require_env POSTGRES_APP_PASSWORD
docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" up -d postgres >/dev/null
wait_for_postgres 120
echo "[init-full-db] ensuring role/database exist"
docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" exec -T \
-e PGPASSWORD="$POSTGRES_SUPERPASSWORD" \
postgres \
psql \
-v ON_ERROR_STOP=1 \
-v app_db="$POSTGRES_APP_DB" \
-v app_user="$POSTGRES_APP_USER" \
-v app_password="$POSTGRES_APP_PASSWORD" \
-U "$POSTGRES_SUPERUSER" \
-d "$POSTGRES_BOOTSTRAP_DB" \
-f - < "$BOOTSTRAP_SQL"
echo "[init-full-db] ensuring schema privileges in $POSTGRES_APP_DB"
docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" exec -T \
-e PGPASSWORD="$POSTGRES_SUPERPASSWORD" \
postgres \
psql \
-v ON_ERROR_STOP=1 \
-v app_user="$POSTGRES_APP_USER" \
-U "$POSTGRES_SUPERUSER" \
-d "$POSTGRES_APP_DB" \
-f - < "$APP_SQL"
echo "[init-full-db] done"

View File

@ -0,0 +1,4 @@
\set ON_ERROR_STOP on
SELECT format('ALTER SCHEMA public OWNER TO %I', :'app_user')\gexec
SELECT format('GRANT ALL ON SCHEMA public TO %I', :'app_user')\gexec

View File

@ -0,0 +1,20 @@
\set ON_ERROR_STOP on
SELECT format('CREATE ROLE %I LOGIN PASSWORD %L', :'app_user', :'app_password')
WHERE NOT EXISTS (
SELECT 1
FROM pg_catalog.pg_roles
WHERE rolname = :'app_user'
)\gexec
SELECT format('ALTER ROLE %I WITH LOGIN PASSWORD %L', :'app_user', :'app_password')\gexec
SELECT format('CREATE DATABASE %I OWNER %I', :'app_db', :'app_user')
WHERE NOT EXISTS (
SELECT 1
FROM pg_database
WHERE datname = :'app_db'
)\gexec
SELECT format('ALTER DATABASE %I OWNER TO %I', :'app_db', :'app_user')\gexec
SELECT format('GRANT ALL PRIVILEGES ON DATABASE %I TO %I', :'app_db', :'app_user')\gexec

View File

@ -0,0 +1,14 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
ENV_FILE="${1:-$ROOT_DIR/.env.full}"
COMPOSE_FILE="$ROOT_DIR/docker-compose.full.yml"
if [[ ! -f "$ENV_FILE" ]]; then
echo "Missing env file: $ENV_FILE"
echo "Create it from: $ROOT_DIR/.env.full.example"
exit 1
fi
docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" down