Compare commits
5 Commits
| Author | SHA1 | Date |
|---|---|---|
|
|
4cb568c754 | |
|
|
f20dabc58e | |
|
|
b8ca934bd1 | |
|
|
914436ac3b | |
|
|
da018d515d |
|
|
@ -6,15 +6,8 @@ frontend/node_modules
|
||||||
frontend/dist
|
frontend/dist
|
||||||
backend/venv
|
backend/venv
|
||||||
|
|
||||||
data/*
|
data
|
||||||
!data/templates/
|
workspace
|
||||||
!data/templates/**
|
|
||||||
!data/skills/
|
|
||||||
!data/skills/**
|
|
||||||
!data/model/
|
|
||||||
data/model/*
|
|
||||||
!data/model/README.md
|
|
||||||
/workspace
|
|
||||||
|
|
||||||
**/__pycache__
|
**/__pycache__
|
||||||
**/*.pyc
|
**/*.pyc
|
||||||
|
|
|
||||||
|
|
@ -1,84 +0,0 @@
|
||||||
# Public exposed port (only nginx is exposed)
|
|
||||||
NGINX_PORT=8080
|
|
||||||
|
|
||||||
# Project data is always mounted from the repository root `./data`.
|
|
||||||
# Only workspace root still needs an absolute host path.
|
|
||||||
HOST_BOTS_WORKSPACE_ROOT=/opt/dashboard-nanobot/workspace/bots
|
|
||||||
|
|
||||||
# Fixed Docker bridge subnet for the compose network.
|
|
||||||
# Change this if it conflicts with your host LAN / VPN / intranet routing.
|
|
||||||
DOCKER_NETWORK_NAME=dashboard-nanobot-network
|
|
||||||
DOCKER_NETWORK_SUBNET=172.20.0.0/16
|
|
||||||
|
|
||||||
# Optional custom image tags
|
|
||||||
BACKEND_IMAGE_TAG=latest
|
|
||||||
FRONTEND_IMAGE_TAG=latest
|
|
||||||
|
|
||||||
# Optional base images / mirrors
|
|
||||||
PYTHON_BASE_IMAGE=python:3.12-slim
|
|
||||||
NODE_BASE_IMAGE=node:22-alpine
|
|
||||||
NGINX_BASE_IMAGE=nginx:alpine
|
|
||||||
POSTGRES_IMAGE=postgres:16-alpine
|
|
||||||
REDIS_IMAGE=redis:7-alpine
|
|
||||||
|
|
||||||
# Python package index mirror (recommended in CN)
|
|
||||||
PIP_INDEX_URL=https://pypi.tuna.tsinghua.edu.cn/simple
|
|
||||||
PIP_TRUSTED_HOST=pypi.tuna.tsinghua.edu.cn
|
|
||||||
|
|
||||||
# Frontend package registry mirror (used by yarn, recommended in CN)
|
|
||||||
NPM_REGISTRY=https://registry.npmmirror.com
|
|
||||||
|
|
||||||
# Container timezone
|
|
||||||
TZ=Asia/Shanghai
|
|
||||||
|
|
||||||
# PostgreSQL bootstrap account.
|
|
||||||
# These values are used by the postgres container itself.
|
|
||||||
POSTGRES_SUPERUSER=postgres
|
|
||||||
POSTGRES_SUPERPASSWORD=change_me_pg_super_password
|
|
||||||
POSTGRES_BOOTSTRAP_DB=postgres
|
|
||||||
|
|
||||||
# Dashboard application database account.
|
|
||||||
# deploy-full.sh will call scripts/init-full-db.sh to create/update these idempotently.
|
|
||||||
POSTGRES_APP_DB=nanobot
|
|
||||||
POSTGRES_APP_USER=nanobot
|
|
||||||
POSTGRES_APP_PASSWORD=change_me_nanobot_password
|
|
||||||
DATABASE_POOL_SIZE=20
|
|
||||||
DATABASE_MAX_OVERFLOW=40
|
|
||||||
DATABASE_POOL_TIMEOUT=30
|
|
||||||
DATABASE_POOL_RECYCLE=1800
|
|
||||||
|
|
||||||
# Redis cache (managed by docker-compose.full.yml)
|
|
||||||
REDIS_ENABLED=true
|
|
||||||
REDIS_DB=8
|
|
||||||
REDIS_PREFIX=nanobot
|
|
||||||
REDIS_DEFAULT_TTL=60
|
|
||||||
|
|
||||||
# Default timezone injected into newly created bot runtime env (`TZ`).
|
|
||||||
# If unset, backend falls back to `TZ` and then `Asia/Shanghai`.
|
|
||||||
DEFAULT_BOT_SYSTEM_TIMEZONE=Asia/Shanghai
|
|
||||||
|
|
||||||
# Panel access protection (deployment secret, not stored in sys_setting)
|
|
||||||
PANEL_ACCESS_PASSWORD=change_me_panel_password
|
|
||||||
|
|
||||||
# Browser credential requests must use an explicit CORS allowlist (deployment security setting).
|
|
||||||
# If frontend and backend are served under the same origin via nginx `/api` proxy,
|
|
||||||
# this can usually stay unset. Otherwise set the real dashboard origin(s).
|
|
||||||
# Example:
|
|
||||||
# CORS_ALLOWED_ORIGINS=https://dashboard.example.com
|
|
||||||
|
|
||||||
# Nginx upload entry limit (MB).
|
|
||||||
# The backend business limit is stored in `sys_setting.upload_max_mb`;
|
|
||||||
# for full deployment this value is also used as the initial DB seed.
|
|
||||||
UPLOAD_MAX_MB=200
|
|
||||||
|
|
||||||
# Local speech-to-text (Whisper via whisper.cpp model file)
|
|
||||||
STT_ENABLED=true
|
|
||||||
STT_MODEL=ggml-small-q8_0.bin
|
|
||||||
STT_MODEL_DIR=/app/data/model
|
|
||||||
STT_DEVICE=cpu
|
|
||||||
STT_MAX_AUDIO_SECONDS=20
|
|
||||||
STT_DEFAULT_LANGUAGE=zh
|
|
||||||
STT_FORCE_SIMPLIFIED=true
|
|
||||||
STT_AUDIO_PREPROCESS=true
|
|
||||||
STT_AUDIO_FILTER=highpass=f=120,lowpass=f=7600,afftdn=nf=-20
|
|
||||||
STT_INITIAL_PROMPT=以下内容可能包含简体中文和英文术语。请优先输出简体中文,英文单词、缩写、品牌名和数字保持原文,不要翻译。
|
|
||||||
|
|
@ -1,14 +1,10 @@
|
||||||
# Public exposed port (only nginx is exposed)
|
# Public exposed port (only nginx is exposed)
|
||||||
NGINX_PORT=8082
|
NGINX_PORT=8080
|
||||||
|
|
||||||
# Project data is always mounted from the repository root `./data`.
|
# REQUIRED absolute host paths.
|
||||||
# Only workspace root still needs an absolute host path.
|
# They must exist and be writable by docker daemon.
|
||||||
HOST_BOTS_WORKSPACE_ROOT=/dep/dashboard-nanobot/workspace/bots
|
HOST_DATA_ROOT=/opt/dashboard-nanobot/data
|
||||||
|
HOST_BOTS_WORKSPACE_ROOT=/opt/dashboard-nanobot/workspace/bots
|
||||||
# Fixed Docker bridge subnet for the compose network.
|
|
||||||
# Change this if it conflicts with your host LAN / VPN / intranet routing.
|
|
||||||
DOCKER_NETWORK_NAME=dashboard-nanobot-network
|
|
||||||
DOCKER_NETWORK_SUBNET=172.20.0.0/16
|
|
||||||
|
|
||||||
# Optional custom image tags
|
# Optional custom image tags
|
||||||
BACKEND_IMAGE_TAG=latest
|
BACKEND_IMAGE_TAG=latest
|
||||||
|
|
@ -27,43 +23,43 @@ PIP_TRUSTED_HOST=pypi.tuna.tsinghua.edu.cn
|
||||||
# Frontend package registry mirror (used by yarn, recommended in CN)
|
# Frontend package registry mirror (used by yarn, recommended in CN)
|
||||||
NPM_REGISTRY=https://registry.npmmirror.com
|
NPM_REGISTRY=https://registry.npmmirror.com
|
||||||
|
|
||||||
DATABASE_URL=postgresql+psycopg://postgres:postgres@10.100.52.43:5433/nanobot
|
# Database (choose one: SQLite / PostgreSQL / MySQL)
|
||||||
|
# SQLite example:
|
||||||
|
# DATABASE_URL=sqlite:///${HOST_DATA_ROOT}/nanobot_dashboard.db
|
||||||
|
# PostgreSQL example:
|
||||||
|
# DATABASE_URL=postgresql+psycopg://user:password@127.0.0.1:5432/nanobot_dashboard
|
||||||
|
# MySQL example:
|
||||||
|
# DATABASE_URL=mysql+pymysql://user:password@127.0.0.1:3306/nanobot_dashboard
|
||||||
|
DATABASE_URL=postgresql+psycopg://postgres:change_me@127.0.0.1:5432/dashboard
|
||||||
DATABASE_POOL_SIZE=20
|
DATABASE_POOL_SIZE=20
|
||||||
DATABASE_MAX_OVERFLOW=40
|
DATABASE_MAX_OVERFLOW=40
|
||||||
DATABASE_POOL_TIMEOUT=30
|
DATABASE_POOL_TIMEOUT=30
|
||||||
DATABASE_POOL_RECYCLE=1800
|
DATABASE_POOL_RECYCLE=1800
|
||||||
|
|
||||||
# Redis cache (optional)
|
# Redis cache (optional)
|
||||||
# REDIS_URL must be reachable from the backend container.
|
|
||||||
# In docker-compose.prod.yml, 127.0.0.1 points to the backend container itself, not the host machine.
|
|
||||||
REDIS_ENABLED=true
|
REDIS_ENABLED=true
|
||||||
REDIS_URL=redis://10.100.52.43:6380/8
|
REDIS_URL=redis://127.0.0.1:6379/8
|
||||||
REDIS_PASSWORD=Unis@123
|
|
||||||
REDIS_PREFIX=nanobot
|
REDIS_PREFIX=nanobot
|
||||||
REDIS_DEFAULT_TTL=60
|
REDIS_DEFAULT_TTL=60
|
||||||
|
|
||||||
# Default timezone injected into newly created bot runtime env (`TZ`).
|
# Chat history page size for upward lazy loading (per request)
|
||||||
# If unset, backend falls back to `TZ` and then `Asia/Shanghai`.
|
CHAT_PULL_PAGE_SIZE=60
|
||||||
|
COMMAND_AUTO_UNLOCK_SECONDS=10
|
||||||
DEFAULT_BOT_SYSTEM_TIMEZONE=Asia/Shanghai
|
DEFAULT_BOT_SYSTEM_TIMEZONE=Asia/Shanghai
|
||||||
|
|
||||||
# Panel access protection (deployment secret, not stored in sys_setting)
|
# Panel access protection
|
||||||
PANEL_ACCESS_PASSWORD=change_me_panel_password
|
PANEL_ACCESS_PASSWORD=change_me_panel_password
|
||||||
|
|
||||||
# Browser credential requests must use an explicit CORS allowlist (deployment security setting).
|
# Max upload size for backend validation (MB)
|
||||||
# If frontend and backend are served under the same origin via nginx `/api` proxy,
|
|
||||||
# this can usually stay unset. Otherwise set the real dashboard origin(s).
|
|
||||||
# Example:
|
|
||||||
# CORS_ALLOWED_ORIGINS=https://dashboard.example.com
|
|
||||||
|
|
||||||
# Nginx upload entry limit (MB).
|
|
||||||
# The backend business limit is stored in `sys_setting.upload_max_mb`;
|
|
||||||
# if you change the DB value later, remember to sync this nginx limit too.
|
|
||||||
UPLOAD_MAX_MB=200
|
UPLOAD_MAX_MB=200
|
||||||
|
|
||||||
|
# Workspace files that should use direct download behavior in dashboard
|
||||||
|
WORKSPACE_DOWNLOAD_EXTENSIONS=.pdf,.doc,.docx,.xls,.xlsx,.xlsm,.ppt,.pptx,.odt,.ods,.odp,.wps,.stl,.scad,.zip,.rar
|
||||||
|
|
||||||
# Local speech-to-text (Whisper via whisper.cpp model file)
|
# Local speech-to-text (Whisper via whisper.cpp model file)
|
||||||
STT_ENABLED=true
|
STT_ENABLED=true
|
||||||
STT_MODEL=ggml-small-q8_0.bin
|
STT_MODEL=ggml-small-q8_0.bin
|
||||||
STT_MODEL_DIR=/app/data/model
|
STT_MODEL_DIR=${HOST_DATA_ROOT}/model
|
||||||
STT_DEVICE=cpu
|
STT_DEVICE=cpu
|
||||||
STT_MAX_AUDIO_SECONDS=20
|
STT_MAX_AUDIO_SECONDS=20
|
||||||
STT_DEFAULT_LANGUAGE=zh
|
STT_DEFAULT_LANGUAGE=zh
|
||||||
|
|
|
||||||
|
|
@ -30,16 +30,9 @@ backend/__pycache__/
|
||||||
backend/*.log
|
backend/*.log
|
||||||
|
|
||||||
# Project runtime data (generated locally)
|
# Project runtime data (generated locally)
|
||||||
data/*
|
data/
|
||||||
!data/templates/
|
workspace/
|
||||||
!data/templates/**
|
engines/
|
||||||
!data/skills/
|
|
||||||
!data/skills/**
|
|
||||||
!data/model/
|
|
||||||
data/model/*
|
|
||||||
!data/model/README.md
|
|
||||||
/workspace/
|
|
||||||
/engines/
|
|
||||||
|
|
||||||
# Frontend (Vite/Node)
|
# Frontend (Vite/Node)
|
||||||
frontend/node_modules/
|
frontend/node_modules/
|
||||||
|
|
@ -56,7 +49,6 @@ frontend/coverage/
|
||||||
.env
|
.env
|
||||||
.env.*
|
.env.*
|
||||||
!.env.example
|
!.env.example
|
||||||
!.env.full.example
|
|
||||||
!.env.prod.example
|
!.env.prod.example
|
||||||
backend/.env
|
backend/.env
|
||||||
frontend/.env
|
frontend/.env
|
||||||
|
|
@ -68,4 +60,3 @@ npm-debug.log*
|
||||||
yarn-debug.log*
|
yarn-debug.log*
|
||||||
yarn-error.log*
|
yarn-error.log*
|
||||||
pnpm-debug.log*
|
pnpm-debug.log*
|
||||||
bot-images/nanobot-base-*
|
|
||||||
|
|
|
||||||
83
README.md
83
README.md
|
|
@ -13,7 +13,7 @@ Dashboard Nanobot 是面向 `nanobot` 的控制平面项目,提供镜像管理
|
||||||
- `USER.md`
|
- `USER.md`
|
||||||
- `TOOLS.md`
|
- `TOOLS.md`
|
||||||
- `IDENTITY.md`
|
- `IDENTITY.md`
|
||||||
- 模板管理:系统级模板改为文件化配置(`data/templates/agent_md_templates.json` 与 `data/templates/topic_presets.json`)。
|
- 模板管理:系统级模板改为文件化配置(`backend/templates/agent_md_templates.json` 与 `backend/templates/topic_presets.json`)。
|
||||||
- 2D 运维 Dashboard:Bot 列表、启停、命令发送、日志流、遥测。
|
- 2D 运维 Dashboard:Bot 列表、启停、命令发送、日志流、遥测。
|
||||||
- UI 全局支持:Light/Dark 切换、中文/English 切换。
|
- UI 全局支持:Light/Dark 切换、中文/English 切换。
|
||||||
|
|
||||||
|
|
@ -23,7 +23,7 @@ Dashboard Nanobot 是面向 `nanobot` 的控制平面项目,提供镜像管理
|
||||||
graph TD
|
graph TD
|
||||||
User((User)) --> Frontend[Frontend Control Plane]
|
User((User)) --> Frontend[Frontend Control Plane]
|
||||||
Frontend --> API[FastAPI Backend]
|
Frontend --> API[FastAPI Backend]
|
||||||
API --> DB[(PostgreSQL)]
|
API --> DB[(SQLite)]
|
||||||
API --> Docker[Docker Daemon]
|
API --> Docker[Docker Daemon]
|
||||||
|
|
||||||
Docker --> BotA[Bot Container A]
|
Docker --> BotA[Bot Container A]
|
||||||
|
|
@ -57,25 +57,17 @@ graph TD
|
||||||
- 架构设计:`design/architecture.md`
|
- 架构设计:`design/architecture.md`
|
||||||
- 数据库设计:`design/database.md`
|
- 数据库设计:`design/database.md`
|
||||||
|
|
||||||
## 默认资源
|
|
||||||
|
|
||||||
- 项目根目录 `data/templates/` 保存默认模板资源,后端运行时直接读取这里的文件,不再在启动阶段做复制或兜底回填。
|
|
||||||
- 项目根目录 `data/skills/` 保存默认 skill 包,数据库初始化阶段会把这些默认 skill 注册到 `skill_market_item`。
|
|
||||||
- `data/model/` 不包含语音识别模型文件;模型需要用户自行下载放入该目录或 `STT_MODEL_DIR` 指向的目录。
|
|
||||||
- 如果语音模型缺失,后端启动时会打印明确告警,但不会阻断服务启动。
|
|
||||||
|
|
||||||
## 环境变量配置
|
## 环境变量配置
|
||||||
|
|
||||||
- 后端:
|
- 后端:
|
||||||
- 示例文件:`backend/.env.example`
|
- 示例文件:`backend/.env.example`
|
||||||
- 本地配置:`backend/.env`
|
- 本地配置:`backend/.env`
|
||||||
- 关键项:
|
- 关键项:
|
||||||
- `DATABASE_URL`:数据库连接串(建议使用 PostgreSQL)
|
- `DATABASE_URL`:数据库连接串(三选一:SQLite / PostgreSQL / MySQL)
|
||||||
- `DATABASE_ECHO`:SQL 日志输出开关
|
- `DATABASE_ECHO`:SQL 日志输出开关
|
||||||
- 不提供自动数据迁移(如需升级迁移请离线完成后再切换连接串)
|
- 不提供自动数据迁移(如需升级迁移请离线完成后再切换连接串)
|
||||||
- `DATA_ROOT`、`BOTS_WORKSPACE_ROOT`:运行数据与 Bot 工作目录
|
- `DATA_ROOT`、`BOTS_WORKSPACE_ROOT`:运行数据与 Bot 工作目录
|
||||||
- `PANEL_ACCESS_PASSWORD`、`CORS_ALLOWED_ORIGINS`:仍属于部署层安全参数
|
- `DEFAULT_*_MD`:可选覆盖值(一般留空,推荐走模板文件)
|
||||||
- `DEFAULT_BOT_SYSTEM_TIMEZONE`:新建 Bot 默认注入的 `TZ`
|
|
||||||
- 前端:
|
- 前端:
|
||||||
- 示例文件:`frontend/.env.example`
|
- 示例文件:`frontend/.env.example`
|
||||||
- 本地配置:`frontend/.env`
|
- 本地配置:`frontend/.env`
|
||||||
|
|
@ -110,11 +102,10 @@ graph TD
|
||||||
|
|
||||||
1. 准备部署变量
|
1. 准备部署变量
|
||||||
- 复制 `.env.prod.example` 为 `.env.prod`(位于项目根目录)
|
- 复制 `.env.prod.example` 为 `.env.prod`(位于项目根目录)
|
||||||
- `data/` 会自动映射到宿主机项目根目录下的 `./data`
|
- 配置绝对路径:
|
||||||
- `deploy-prod.sh` 现在要求使用外部 PostgreSQL,且目标库必须提前执行 `scripts/sql/create-tables.sql` 与 `scripts/sql/init-data.sql`
|
- `HOST_DATA_ROOT`
|
||||||
- 只需要配置绝对路径:
|
|
||||||
- `HOST_BOTS_WORKSPACE_ROOT`
|
- `HOST_BOTS_WORKSPACE_ROOT`
|
||||||
- 如启用本地语音识别,请将 Whisper `.bin` 模型文件放到宿主机项目根目录的 `data/model/`
|
- 如启用本地语音识别,请将 Whisper `.bin` 模型文件放到 `${HOST_DATA_ROOT}/model/`
|
||||||
并让 `STT_MODEL` 指向完整文件名,例如 `ggml-small-q8_0.bin`
|
并让 `STT_MODEL` 指向完整文件名,例如 `ggml-small-q8_0.bin`
|
||||||
- 中国网络建议配置加速项:
|
- 中国网络建议配置加速项:
|
||||||
- `PIP_INDEX_URL`、`PIP_TRUSTED_HOST`
|
- `PIP_INDEX_URL`、`PIP_TRUSTED_HOST`
|
||||||
|
|
@ -129,66 +120,8 @@ graph TD
|
||||||
### 关键说明
|
### 关键说明
|
||||||
|
|
||||||
- `backend` 不开放宿主机端口,仅在内部网络被 Nginx 访问。
|
- `backend` 不开放宿主机端口,仅在内部网络被 Nginx 访问。
|
||||||
- `deploy-prod.sh` 仅负责前后端容器部署,不会初始化外部数据库;外部 PostgreSQL 需要事先建表并导入初始化数据。
|
- 上传大小使用单一参数 `UPLOAD_MAX_MB` 控制(后端校验 + Nginx 限制)。
|
||||||
- 如果启用 Redis,`REDIS_URL` 必须从 `backend` 容器内部可达;在 `docker-compose.prod.yml` 里使用 `127.0.0.1` 只会指向后端容器自己,不是宿主机。
|
|
||||||
- Redis 不可达时,通用缓存健康检查会显示 `degraded`;面板登录认证会自动回退到数据库登录态,不再因为缓存不可达直接报错。
|
|
||||||
- `UPLOAD_MAX_MB` 仅用于 Nginx 入口限制;后端业务校验值来自 `sys_setting.upload_max_mb`。
|
|
||||||
- 必须挂载 `/var/run/docker.sock`,否则后端无法操作 Bot 镜像与容器。
|
- 必须挂载 `/var/run/docker.sock`,否则后端无法操作 Bot 镜像与容器。
|
||||||
- `data/` 始终绑定到宿主机项目根目录下的 `./data`,其中模板、默认 skills、语音模型和运行数据都落在这里。
|
|
||||||
- `HOST_BOTS_WORKSPACE_ROOT` 必须是宿主机绝对路径,并且在 `docker-compose.prod.yml` 中以“同路径”挂载到后端容器。
|
- `HOST_BOTS_WORKSPACE_ROOT` 必须是宿主机绝对路径,并且在 `docker-compose.prod.yml` 中以“同路径”挂载到后端容器。
|
||||||
原因:后端通过 Docker API 创建 Bot 容器时,使用的是宿主机可见的 bind 路径。
|
原因:后端通过 Docker API 创建 Bot 容器时,使用的是宿主机可见的 bind 路径。
|
||||||
- 语音识别当前基于 `pywhispercpp==1.3.1` + Whisper `.bin` 模型文件,不使用 `faster-whisper`。
|
- 语音识别当前基于 `pywhispercpp==1.3.1` + Whisper `.bin` 模型文件,不使用 `faster-whisper`。
|
||||||
|
|
||||||
## Docker 完整部署(内置 PostgreSQL / Redis)
|
|
||||||
|
|
||||||
这套方案和 `deploy-prod.sh` 并存,适合目标机器上直接把前端、后端、PostgreSQL、Redis 一起拉起。
|
|
||||||
|
|
||||||
### 文件
|
|
||||||
|
|
||||||
- `docker-compose.full.yml`
|
|
||||||
- `.env.full.example`
|
|
||||||
- `scripts/deploy-full.sh`
|
|
||||||
- `scripts/init-full-db.sh`
|
|
||||||
- `scripts/stop-full.sh`
|
|
||||||
- `scripts/sql/create-tables.sql`
|
|
||||||
- `scripts/sql/init-data.sql`
|
|
||||||
- `scripts/sql/init-postgres-bootstrap.sql`
|
|
||||||
- `scripts/sql/init-postgres-app.sql`
|
|
||||||
|
|
||||||
### 启动步骤
|
|
||||||
|
|
||||||
1. 准备部署变量
|
|
||||||
- 复制 `.env.full.example` 为 `.env.full`
|
|
||||||
- `data/` 会自动映射到宿主机项目根目录下的 `./data`
|
|
||||||
- 必填修改:
|
|
||||||
- `HOST_BOTS_WORKSPACE_ROOT`
|
|
||||||
- `POSTGRES_SUPERPASSWORD`
|
|
||||||
- `POSTGRES_APP_PASSWORD`
|
|
||||||
- `PANEL_ACCESS_PASSWORD`
|
|
||||||
- 如启用本地语音识别,请将 Whisper `.bin` 模型文件放到宿主机项目根目录的 `data/model/`
|
|
||||||
2. 启动完整栈
|
|
||||||
- `./scripts/deploy-full.sh`
|
|
||||||
3. 访问
|
|
||||||
- `http://<host>:${NGINX_PORT}`(默认 `8080`)
|
|
||||||
|
|
||||||
### 初始化说明
|
|
||||||
|
|
||||||
- `scripts/deploy-full.sh` 会先启动 `postgres` / `redis`,然后自动调用 `scripts/init-full-db.sh`。
|
|
||||||
- `scripts/init-full-db.sh` 负责:
|
|
||||||
- 等待 PostgreSQL 就绪
|
|
||||||
- 创建或更新业务账号
|
|
||||||
- 创建业务库并授权
|
|
||||||
- 修正 `public` schema 权限
|
|
||||||
- 执行 `scripts/sql/create-tables.sql` 创建业务表
|
|
||||||
- 执行 `scripts/sql/init-data.sql` 初始化 `sys_setting` 与默认 skill 市场数据
|
|
||||||
- 后端启动时只做初始化完整性校验,不再自动补表、补列、补数据或迁移旧结构;缺库表、缺 `sys_setting`、缺模板文件都会直接报错。
|
|
||||||
|
|
||||||
### 停止
|
|
||||||
|
|
||||||
- `./scripts/stop-full.sh`
|
|
||||||
|
|
||||||
### 注意事项
|
|
||||||
|
|
||||||
- `deploy-prod.sh` 和 `deploy-full.sh` 使用的是两套 compose 文件,但复用了相同容器名,不能同时在同一台机器上并行启动。
|
|
||||||
- PostgreSQL 数据默认落盘到宿主机项目根目录 `./data/postgres`,Redis 数据默认落盘到 `./data/redis`。
|
|
||||||
- 如果你只想保留前后端容器,继续使用 `deploy-prod.sh`;如果希望把依赖也打包进来,使用 `deploy-full.sh`。
|
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,12 @@
|
||||||
# Runtime paths
|
# Runtime paths
|
||||||
DATA_ROOT=../data
|
DATA_ROOT=../data
|
||||||
BOTS_WORKSPACE_ROOT=../workspace/bots
|
BOTS_WORKSPACE_ROOT=../workspace/bots
|
||||||
# Optional: when backend itself runs inside docker-compose and bot containers
|
|
||||||
# should join that same user-defined network, set the network name here.
|
|
||||||
# Leave empty for local development to use Docker's default bridge network.
|
|
||||||
DOCKER_NETWORK_NAME=
|
|
||||||
|
|
||||||
# Database
|
# Database
|
||||||
# PostgreSQL is required:
|
# PostgreSQL:
|
||||||
DATABASE_URL=postgresql+psycopg://user:password@127.0.0.1:5432/nanobot_dashboard
|
DATABASE_URL=postgresql+psycopg://user:password@127.0.0.1:5432/nanobot_dashboard
|
||||||
|
# MySQL:
|
||||||
|
# DATABASE_URL=mysql+pymysql://user:password@127.0.0.1:3306/nanobot_dashboard
|
||||||
# Show SQL statements in backend logs (debug only).
|
# Show SQL statements in backend logs (debug only).
|
||||||
DATABASE_ECHO=true
|
DATABASE_ECHO=true
|
||||||
DATABASE_POOL_SIZE=20
|
DATABASE_POOL_SIZE=20
|
||||||
|
|
@ -26,22 +24,19 @@ REDIS_DEFAULT_TTL=60
|
||||||
|
|
||||||
# Optional panel-level access password for all backend API/WS calls.
|
# Optional panel-level access password for all backend API/WS calls.
|
||||||
PANEL_ACCESS_PASSWORD=
|
PANEL_ACCESS_PASSWORD=
|
||||||
|
# The following platform-level items are now managed in sys_setting / 平台参数:
|
||||||
# Explicit CORS allowlist for browser credential requests.
|
|
||||||
# For local development, the backend defaults to common Vite dev origins.
|
|
||||||
# In production, prefer same-origin `/api` reverse proxy, or set your real dashboard origin explicitly.
|
|
||||||
# Example:
|
|
||||||
# CORS_ALLOWED_ORIGINS=http://localhost:5173,https://dashboard.example.com
|
|
||||||
# Default timezone injected into newly created bot runtime env (`TZ`).
|
|
||||||
DEFAULT_BOT_SYSTEM_TIMEZONE=Asia/Shanghai
|
|
||||||
|
|
||||||
# The following platform-level items are initialized by SQL and managed in sys_setting / 平台参数:
|
|
||||||
# - page_size
|
# - page_size
|
||||||
# - chat_pull_page_size
|
# - chat_pull_page_size
|
||||||
# - upload_max_mb
|
# - upload_max_mb
|
||||||
# - allowed_attachment_extensions
|
# - allowed_attachment_extensions
|
||||||
# - workspace_download_extensions
|
# - workspace_download_extensions
|
||||||
# - speech_enabled
|
# - speech_enabled
|
||||||
|
# - speech_max_audio_seconds
|
||||||
|
# - speech_default_language
|
||||||
|
# - speech_force_simplified
|
||||||
|
# - speech_audio_preprocess
|
||||||
|
# - speech_audio_filter
|
||||||
|
# - speech_initial_prompt
|
||||||
|
|
||||||
# Local speech-to-text (Whisper via whisper.cpp model file)
|
# Local speech-to-text (Whisper via whisper.cpp model file)
|
||||||
STT_MODEL=ggml-small-q8_0.bin
|
STT_MODEL=ggml-small-q8_0.bin
|
||||||
|
|
@ -52,3 +47,12 @@ STT_DEVICE=cpu
|
||||||
APP_HOST=0.0.0.0
|
APP_HOST=0.0.0.0
|
||||||
APP_PORT=8000
|
APP_PORT=8000
|
||||||
APP_RELOAD=true
|
APP_RELOAD=true
|
||||||
|
APP_LOG_LEVEL=warning
|
||||||
|
APP_ACCESS_LOG=false
|
||||||
|
|
||||||
|
# Optional overrides (fallback only; usually keep empty when using template files)
|
||||||
|
DEFAULT_AGENTS_MD=
|
||||||
|
DEFAULT_SOUL_MD=
|
||||||
|
DEFAULT_USER_MD=
|
||||||
|
DEFAULT_TOOLS_MD=
|
||||||
|
DEFAULT_IDENTITY_MD=
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,6 @@ RUN if [ -n "${PIP_INDEX_URL}" ]; then pip config set global.index-url "${PIP_IN
|
||||||
&& pip install -r requirements.txt
|
&& pip install -r requirements.txt
|
||||||
|
|
||||||
COPY backend/ /app/backend/
|
COPY backend/ /app/backend/
|
||||||
COPY data/ /app/data/
|
|
||||||
|
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,102 +0,0 @@
|
||||||
from fastapi import APIRouter, Depends, HTTPException
|
|
||||||
from sqlmodel import Session
|
|
||||||
|
|
||||||
from core.database import get_session
|
|
||||||
from models.bot import BotInstance
|
|
||||||
from schemas.bot import (
|
|
||||||
BotEnvParamsUpdateRequest,
|
|
||||||
BotMcpConfigUpdateRequest,
|
|
||||||
BotToolsConfigUpdateRequest,
|
|
||||||
ChannelConfigRequest,
|
|
||||||
ChannelConfigUpdateRequest,
|
|
||||||
)
|
|
||||||
from services.bot_config_service import (
|
|
||||||
create_bot_channel_config,
|
|
||||||
delete_bot_channel_config,
|
|
||||||
get_bot_env_params_state,
|
|
||||||
get_bot_mcp_config_state,
|
|
||||||
get_bot_resources_snapshot,
|
|
||||||
get_bot_tools_config_state,
|
|
||||||
list_bot_channels_config,
|
|
||||||
reject_bot_tools_config_update,
|
|
||||||
update_bot_channel_config,
|
|
||||||
update_bot_env_params_state,
|
|
||||||
update_bot_mcp_config_state,
|
|
||||||
)
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/resources")
|
|
||||||
def get_bot_resources(bot_id: str, session: Session = Depends(get_session)):
|
|
||||||
return get_bot_resources_snapshot(session, bot_id=bot_id)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/channels")
|
|
||||||
def list_bot_channels(bot_id: str, session: Session = Depends(get_session)):
|
|
||||||
return list_bot_channels_config(session, bot_id=bot_id)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/tools-config")
|
|
||||||
def get_bot_tools_config(bot_id: str, session: Session = Depends(get_session)):
|
|
||||||
return get_bot_tools_config_state(session, bot_id=bot_id)
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/api/bots/{bot_id}/tools-config")
|
|
||||||
def update_bot_tools_config(
|
|
||||||
bot_id: str,
|
|
||||||
payload: BotToolsConfigUpdateRequest,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
return reject_bot_tools_config_update(session, bot_id=bot_id, payload=payload)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/mcp-config")
|
|
||||||
def get_bot_mcp_config(bot_id: str, session: Session = Depends(get_session)):
|
|
||||||
return get_bot_mcp_config_state(session, bot_id=bot_id)
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/api/bots/{bot_id}/mcp-config")
|
|
||||||
def update_bot_mcp_config(
|
|
||||||
bot_id: str,
|
|
||||||
payload: BotMcpConfigUpdateRequest,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
return update_bot_mcp_config_state(session, bot_id=bot_id, payload=payload)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/env-params")
|
|
||||||
def get_bot_env_params(bot_id: str, session: Session = Depends(get_session)):
|
|
||||||
return get_bot_env_params_state(session, bot_id=bot_id)
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/api/bots/{bot_id}/env-params")
|
|
||||||
def update_bot_env_params(
|
|
||||||
bot_id: str,
|
|
||||||
payload: BotEnvParamsUpdateRequest,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
return update_bot_env_params_state(session, bot_id=bot_id, payload=payload)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/channels")
|
|
||||||
def create_bot_channel(
|
|
||||||
bot_id: str,
|
|
||||||
payload: ChannelConfigRequest,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
return create_bot_channel_config(session, bot_id=bot_id, payload=payload)
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/api/bots/{bot_id}/channels/{channel_id}")
|
|
||||||
def update_bot_channel(
|
|
||||||
bot_id: str,
|
|
||||||
channel_id: str,
|
|
||||||
payload: ChannelConfigUpdateRequest,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
return update_bot_channel_config(session, bot_id=bot_id, channel_id=channel_id, payload=payload)
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/api/bots/{bot_id}/channels/{channel_id}")
|
|
||||||
def delete_bot_channel(bot_id: str, channel_id: str, session: Session = Depends(get_session)):
|
|
||||||
return delete_bot_channel_config(session, bot_id=bot_id, channel_id=channel_id)
|
|
||||||
|
|
@ -1,68 +0,0 @@
|
||||||
from fastapi import APIRouter, Depends, HTTPException
|
|
||||||
from sqlmodel import Session
|
|
||||||
|
|
||||||
from core.database import get_session
|
|
||||||
from services.bot_lifecycle_service import (
|
|
||||||
deactivate_bot_instance,
|
|
||||||
delete_bot_instance,
|
|
||||||
disable_bot_instance,
|
|
||||||
enable_bot_instance,
|
|
||||||
start_bot_instance,
|
|
||||||
stop_bot_instance,
|
|
||||||
)
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/start")
|
|
||||||
async def start_bot(bot_id: str, session: Session = Depends(get_session)):
|
|
||||||
try:
|
|
||||||
return await start_bot_instance(session, bot_id)
|
|
||||||
except ValueError as exc:
|
|
||||||
raise HTTPException(status_code=404, detail=str(exc)) from exc
|
|
||||||
except PermissionError as exc:
|
|
||||||
raise HTTPException(status_code=403, detail=str(exc)) from exc
|
|
||||||
except RuntimeError as exc:
|
|
||||||
raise HTTPException(status_code=500, detail=str(exc)) from exc
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/stop")
|
|
||||||
def stop_bot(bot_id: str, session: Session = Depends(get_session)):
|
|
||||||
try:
|
|
||||||
return stop_bot_instance(session, bot_id)
|
|
||||||
except ValueError as exc:
|
|
||||||
raise HTTPException(status_code=404, detail=str(exc)) from exc
|
|
||||||
except PermissionError as exc:
|
|
||||||
raise HTTPException(status_code=403, detail=str(exc)) from exc
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/enable")
|
|
||||||
def enable_bot(bot_id: str, session: Session = Depends(get_session)):
|
|
||||||
try:
|
|
||||||
return enable_bot_instance(session, bot_id)
|
|
||||||
except ValueError as exc:
|
|
||||||
raise HTTPException(status_code=404, detail=str(exc)) from exc
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/disable")
|
|
||||||
def disable_bot(bot_id: str, session: Session = Depends(get_session)):
|
|
||||||
try:
|
|
||||||
return disable_bot_instance(session, bot_id)
|
|
||||||
except ValueError as exc:
|
|
||||||
raise HTTPException(status_code=404, detail=str(exc)) from exc
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/deactivate")
|
|
||||||
def deactivate_bot(bot_id: str, session: Session = Depends(get_session)):
|
|
||||||
try:
|
|
||||||
return deactivate_bot_instance(session, bot_id)
|
|
||||||
except ValueError as exc:
|
|
||||||
raise HTTPException(status_code=404, detail=str(exc)) from exc
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/api/bots/{bot_id}")
|
|
||||||
def delete_bot(bot_id: str, delete_workspace: bool = True, session: Session = Depends(get_session)):
|
|
||||||
try:
|
|
||||||
return delete_bot_instance(session, bot_id, delete_workspace=delete_workspace)
|
|
||||||
except ValueError as exc:
|
|
||||||
raise HTTPException(status_code=404, detail=str(exc)) from exc
|
|
||||||
|
|
@ -1,103 +0,0 @@
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Request, Response
|
|
||||||
from sqlmodel import Session
|
|
||||||
|
|
||||||
from core.database import get_session
|
|
||||||
from models.bot import BotInstance
|
|
||||||
from schemas.bot import BotCreateRequest, BotPageAuthLoginRequest, BotUpdateRequest
|
|
||||||
from services.platform_auth_service import (
|
|
||||||
clear_bot_token_cookie,
|
|
||||||
create_bot_token,
|
|
||||||
resolve_bot_request_auth,
|
|
||||||
revoke_bot_token,
|
|
||||||
set_bot_token_cookie,
|
|
||||||
)
|
|
||||||
from services.bot_management_service import (
|
|
||||||
authenticate_bot_page_access,
|
|
||||||
create_bot_record,
|
|
||||||
get_bot_detail_cached,
|
|
||||||
list_bots_with_cache,
|
|
||||||
update_bot_record,
|
|
||||||
)
|
|
||||||
from services.image_service import list_registered_images
|
|
||||||
from services.provider_service import test_provider_connection
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/providers/test")
|
|
||||||
async def test_provider(payload: dict):
|
|
||||||
return await test_provider_connection(payload)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/providers/test")
|
|
||||||
async def test_bot_provider(bot_id: str, payload: dict, session: Session = Depends(get_session)):
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
return await test_provider_connection(payload)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/images")
|
|
||||||
def list_bot_images(bot_id: str, session: Session = Depends(get_session)):
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
return list_registered_images(session)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/bots")
|
|
||||||
def create_bot(payload: BotCreateRequest, session: Session = Depends(get_session)):
|
|
||||||
return create_bot_record(session, payload=payload)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/bots")
|
|
||||||
def list_bots(session: Session = Depends(get_session)):
|
|
||||||
return list_bots_with_cache(session)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}")
|
|
||||||
def get_bot_detail(bot_id: str, session: Session = Depends(get_session)):
|
|
||||||
return get_bot_detail_cached(session, bot_id=bot_id)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/auth/login")
|
|
||||||
def login_bot_page(
|
|
||||||
bot_id: str,
|
|
||||||
payload: BotPageAuthLoginRequest,
|
|
||||||
request: Request,
|
|
||||||
response: Response,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
result = authenticate_bot_page_access(session, bot_id=bot_id, password=payload.password)
|
|
||||||
try:
|
|
||||||
raw_token = create_bot_token(session, request, bot_id)
|
|
||||||
except RuntimeError as exc:
|
|
||||||
raise HTTPException(status_code=503, detail=str(exc)) from exc
|
|
||||||
set_bot_token_cookie(response, request, bot_id, raw_token, session)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/auth/status")
|
|
||||||
def get_bot_auth_status(bot_id: str, request: Request, session: Session = Depends(get_session)):
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
return {"enabled": False, "authenticated": False, "auth_source": None, "bot_id": bot_id}
|
|
||||||
principal = resolve_bot_request_auth(session, request, bot_id)
|
|
||||||
return {
|
|
||||||
"enabled": bool(str(bot.access_password or "").strip()),
|
|
||||||
"authenticated": bool(principal.authenticated),
|
|
||||||
"auth_source": principal.auth_source if principal.authenticated else None,
|
|
||||||
"bot_id": bot_id,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/auth/logout")
|
|
||||||
def logout_bot_page(bot_id: str, request: Request, response: Response, session: Session = Depends(get_session)):
|
|
||||||
revoke_bot_token(session, request, bot_id)
|
|
||||||
clear_bot_token_cookie(response, bot_id)
|
|
||||||
return {"success": True, "bot_id": bot_id}
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/api/bots/{bot_id}")
|
|
||||||
def update_bot(bot_id: str, payload: BotUpdateRequest, session: Session = Depends(get_session)):
|
|
||||||
return update_bot_record(session, bot_id=bot_id, payload=payload)
|
|
||||||
|
|
@ -1,138 +0,0 @@
|
||||||
import logging
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, WebSocket, WebSocketDisconnect
|
|
||||||
from sqlmodel import Session
|
|
||||||
|
|
||||||
from core.database import engine, get_session
|
|
||||||
from core.docker_instance import docker_manager
|
|
||||||
from core.websocket_manager import manager
|
|
||||||
from services.bot_runtime_service import (
|
|
||||||
delete_cron_job as delete_cron_job_service,
|
|
||||||
ensure_monitor_websocket_access,
|
|
||||||
get_bot_logs as get_bot_logs_service,
|
|
||||||
list_cron_jobs as list_cron_jobs_service,
|
|
||||||
relogin_weixin as relogin_weixin_service,
|
|
||||||
start_cron_job as start_cron_job_service,
|
|
||||||
stop_cron_job as stop_cron_job_service,
|
|
||||||
)
|
|
||||||
from services.runtime_service import docker_callback
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
logger = logging.getLogger("dashboard.backend")
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/logs")
|
|
||||||
def get_bot_logs(
|
|
||||||
bot_id: str,
|
|
||||||
tail: Optional[int] = 300,
|
|
||||||
offset: int = 0,
|
|
||||||
limit: Optional[int] = None,
|
|
||||||
reverse: bool = False,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
return get_bot_logs_service(
|
|
||||||
session,
|
|
||||||
bot_id=bot_id,
|
|
||||||
tail=tail,
|
|
||||||
offset=offset,
|
|
||||||
limit=limit,
|
|
||||||
reverse=reverse,
|
|
||||||
)
|
|
||||||
except LookupError as exc:
|
|
||||||
raise HTTPException(status_code=404, detail=str(exc)) from exc
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/weixin/relogin")
|
|
||||||
async def relogin_weixin(bot_id: str, session: Session = Depends(get_session)):
|
|
||||||
try:
|
|
||||||
return await relogin_weixin_service(session, bot_id=bot_id)
|
|
||||||
except LookupError as exc:
|
|
||||||
raise HTTPException(status_code=404, detail=str(exc)) from exc
|
|
||||||
except ValueError as exc:
|
|
||||||
raise HTTPException(status_code=404, detail=str(exc)) from exc
|
|
||||||
except RuntimeError as exc:
|
|
||||||
raise HTTPException(status_code=500, detail=str(exc)) from exc
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/cron/jobs")
|
|
||||||
def list_cron_jobs(bot_id: str, include_disabled: bool = True, session: Session = Depends(get_session)):
|
|
||||||
try:
|
|
||||||
return list_cron_jobs_service(session, bot_id=bot_id, include_disabled=include_disabled)
|
|
||||||
except LookupError as exc:
|
|
||||||
raise HTTPException(status_code=404, detail=str(exc)) from exc
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/cron/jobs/{job_id}/stop")
|
|
||||||
def stop_cron_job(bot_id: str, job_id: str, session: Session = Depends(get_session)):
|
|
||||||
try:
|
|
||||||
return stop_cron_job_service(session, bot_id=bot_id, job_id=job_id)
|
|
||||||
except LookupError as exc:
|
|
||||||
raise HTTPException(status_code=404, detail=str(exc)) from exc
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/cron/jobs/{job_id}/start")
|
|
||||||
def start_cron_job(bot_id: str, job_id: str, session: Session = Depends(get_session)):
|
|
||||||
try:
|
|
||||||
return start_cron_job_service(session, bot_id=bot_id, job_id=job_id)
|
|
||||||
except LookupError as exc:
|
|
||||||
raise HTTPException(status_code=404, detail=str(exc)) from exc
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/api/bots/{bot_id}/cron/jobs/{job_id}")
|
|
||||||
def delete_cron_job(bot_id: str, job_id: str, session: Session = Depends(get_session)):
|
|
||||||
try:
|
|
||||||
return delete_cron_job_service(session, bot_id=bot_id, job_id=job_id)
|
|
||||||
except LookupError as exc:
|
|
||||||
raise HTTPException(status_code=404, detail=str(exc)) from exc
|
|
||||||
|
|
||||||
|
|
||||||
@router.websocket("/ws/monitor/{bot_id}")
|
|
||||||
async def websocket_endpoint(websocket: WebSocket, bot_id: str):
|
|
||||||
with Session(engine) as session:
|
|
||||||
try:
|
|
||||||
ensure_monitor_websocket_access(session, websocket, bot_id)
|
|
||||||
except PermissionError:
|
|
||||||
await websocket.close(code=4401, reason="Bot or panel authentication required")
|
|
||||||
return
|
|
||||||
except LookupError:
|
|
||||||
await websocket.close(code=4404, reason="Bot not found")
|
|
||||||
return
|
|
||||||
|
|
||||||
connected = False
|
|
||||||
try:
|
|
||||||
await manager.connect(bot_id, websocket)
|
|
||||||
connected = True
|
|
||||||
except Exception as exc:
|
|
||||||
logger.warning("websocket connect failed bot_id=%s detail=%s", bot_id, exc)
|
|
||||||
try:
|
|
||||||
await websocket.close(code=1011, reason="WebSocket accept failed")
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
return
|
|
||||||
|
|
||||||
docker_manager.ensure_monitor(bot_id, docker_callback)
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
await websocket.receive_text()
|
|
||||||
with Session(engine) as session:
|
|
||||||
try:
|
|
||||||
ensure_monitor_websocket_access(session, websocket, bot_id)
|
|
||||||
except PermissionError:
|
|
||||||
await websocket.close(code=4401, reason="Authentication expired")
|
|
||||||
return
|
|
||||||
except LookupError:
|
|
||||||
await websocket.close(code=4404, reason="Bot not found")
|
|
||||||
return
|
|
||||||
except WebSocketDisconnect:
|
|
||||||
pass
|
|
||||||
except RuntimeError as exc:
|
|
||||||
msg = str(exc or "").lower()
|
|
||||||
if "need to call \"accept\" first" not in msg and "not connected" not in msg:
|
|
||||||
logger.exception("websocket runtime error bot_id=%s", bot_id)
|
|
||||||
except Exception:
|
|
||||||
logger.exception("websocket unexpected error bot_id=%s", bot_id)
|
|
||||||
finally:
|
|
||||||
if connected:
|
|
||||||
manager.disconnect(bot_id, websocket)
|
|
||||||
|
|
@ -1,33 +0,0 @@
|
||||||
import logging
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, File, Form, HTTPException, Request, UploadFile
|
|
||||||
from sqlmodel import Session
|
|
||||||
|
|
||||||
from core.database import get_session
|
|
||||||
from core.speech_service import WhisperSpeechService
|
|
||||||
from services.speech_transcribe_service import transcribe_bot_speech_upload
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
logger = logging.getLogger("dashboard.backend")
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/speech/transcribe")
|
|
||||||
async def transcribe_bot_speech(
|
|
||||||
bot_id: str,
|
|
||||||
request: Request,
|
|
||||||
file: UploadFile = File(...),
|
|
||||||
language: Optional[str] = Form(None),
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
speech_service = getattr(request.app.state, "speech_service", None)
|
|
||||||
if not isinstance(speech_service, WhisperSpeechService):
|
|
||||||
raise HTTPException(status_code=500, detail="Speech service is not initialized")
|
|
||||||
return await transcribe_bot_speech_upload(
|
|
||||||
session,
|
|
||||||
bot_id,
|
|
||||||
upload=file,
|
|
||||||
language=language,
|
|
||||||
speech_service=speech_service,
|
|
||||||
logger=logger,
|
|
||||||
)
|
|
||||||
|
|
@ -1,79 +0,0 @@
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
|
||||||
from sqlmodel import Session
|
|
||||||
|
|
||||||
from core.database import get_session
|
|
||||||
from schemas.bot import MessageFeedbackRequest
|
|
||||||
from services.chat_history_service import (
|
|
||||||
clear_bot_messages_payload,
|
|
||||||
clear_dashboard_direct_session_payload,
|
|
||||||
delete_bot_message_payload,
|
|
||||||
list_bot_messages_by_date_payload,
|
|
||||||
list_bot_messages_page_payload,
|
|
||||||
list_bot_messages_payload,
|
|
||||||
update_bot_message_feedback_payload,
|
|
||||||
)
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/messages")
|
|
||||||
def list_bot_messages(bot_id: str, limit: int = 200, session: Session = Depends(get_session)):
|
|
||||||
return list_bot_messages_payload(session, bot_id, limit=limit)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/messages/page")
|
|
||||||
def list_bot_messages_page(
|
|
||||||
bot_id: str,
|
|
||||||
limit: Optional[int] = None,
|
|
||||||
before_id: Optional[int] = None,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
return list_bot_messages_page_payload(session, bot_id, limit=limit, before_id=before_id)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/messages/by-date")
|
|
||||||
def list_bot_messages_by_date(
|
|
||||||
bot_id: str,
|
|
||||||
date: str,
|
|
||||||
tz_offset_minutes: Optional[int] = None,
|
|
||||||
limit: Optional[int] = None,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
return list_bot_messages_by_date_payload(
|
|
||||||
session,
|
|
||||||
bot_id,
|
|
||||||
date=date,
|
|
||||||
tz_offset_minutes=tz_offset_minutes,
|
|
||||||
limit=limit,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/api/bots/{bot_id}/messages/{message_id}/feedback")
|
|
||||||
def update_bot_message_feedback(
|
|
||||||
bot_id: str,
|
|
||||||
message_id: int,
|
|
||||||
payload: MessageFeedbackRequest,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
return update_bot_message_feedback_payload(session, bot_id, message_id, payload.feedback)
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/api/bots/{bot_id}/messages/{message_id}")
|
|
||||||
def delete_bot_message(
|
|
||||||
bot_id: str,
|
|
||||||
message_id: int,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
return delete_bot_message_payload(session, bot_id, message_id)
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/api/bots/{bot_id}/messages")
|
|
||||||
def clear_bot_messages(bot_id: str, session: Session = Depends(get_session)):
|
|
||||||
return clear_bot_messages_payload(session, bot_id)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/sessions/dashboard-direct/clear")
|
|
||||||
def clear_bot_dashboard_direct_session(bot_id: str, session: Session = Depends(get_session)):
|
|
||||||
return clear_dashboard_direct_session_payload(session, bot_id)
|
|
||||||
|
|
@ -1,29 +0,0 @@
|
||||||
from typing import Any, Dict, Tuple
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Body, Depends
|
|
||||||
from sqlmodel import Session
|
|
||||||
|
|
||||||
from core.database import get_session
|
|
||||||
from services.chat_command_service import send_bot_command
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_command_payload(payload: Dict[str, Any] | None) -> Tuple[str, Any]:
|
|
||||||
body = payload if isinstance(payload, dict) else {}
|
|
||||||
return str(body.get("command") or ""), body.get("attachments")
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/command")
|
|
||||||
def send_command(
|
|
||||||
bot_id: str,
|
|
||||||
payload: Dict[str, Any] | None = Body(default=None),
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
command, attachments = _parse_command_payload(payload)
|
|
||||||
return send_bot_command(
|
|
||||||
session,
|
|
||||||
bot_id,
|
|
||||||
command=command,
|
|
||||||
attachments=attachments,
|
|
||||||
)
|
|
||||||
|
|
@ -0,0 +1,124 @@
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, File, Form, UploadFile
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from core.database import get_session
|
||||||
|
from models.bot import NanobotImage
|
||||||
|
|
||||||
|
from api.dashboard_router_support import DashboardRouterDeps
|
||||||
|
|
||||||
|
|
||||||
|
def build_dashboard_assets_router(*, deps: DashboardRouterDeps) -> APIRouter:
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
@router.get("/api/images", response_model=List[NanobotImage])
|
||||||
|
def list_images(session: Session = Depends(get_session)):
|
||||||
|
return deps.image_service.list_images(session=session)
|
||||||
|
|
||||||
|
@router.delete("/api/images/{tag:path}")
|
||||||
|
def delete_image(tag: str, session: Session = Depends(get_session)):
|
||||||
|
return deps.image_service.delete_image(session=session, tag=tag)
|
||||||
|
|
||||||
|
@router.get("/api/docker-images")
|
||||||
|
def list_docker_images(repository: str = "nanobot-base"):
|
||||||
|
return deps.image_service.list_docker_images(repository=repository)
|
||||||
|
|
||||||
|
@router.post("/api/images/register")
|
||||||
|
def register_image(payload: dict, session: Session = Depends(get_session)):
|
||||||
|
return deps.image_service.register_image(session=session, payload=payload)
|
||||||
|
|
||||||
|
@router.post("/api/providers/test")
|
||||||
|
async def test_provider(payload: dict):
|
||||||
|
return await deps.provider_test_service.test_provider(payload=payload)
|
||||||
|
|
||||||
|
@router.get("/api/platform/skills")
|
||||||
|
def list_skill_market(session: Session = Depends(get_session)):
|
||||||
|
return deps.skill_service.list_market_items(session=session)
|
||||||
|
|
||||||
|
@router.post("/api/platform/skills")
|
||||||
|
async def create_skill_market_item(
|
||||||
|
skill_key: str = Form(""),
|
||||||
|
display_name: str = Form(""),
|
||||||
|
description: str = Form(""),
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
session: Session = Depends(get_session),
|
||||||
|
):
|
||||||
|
return await deps.skill_service.create_market_item(
|
||||||
|
session=session,
|
||||||
|
skill_key=skill_key,
|
||||||
|
display_name=display_name,
|
||||||
|
description=description,
|
||||||
|
file=file,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.put("/api/platform/skills/{skill_id}")
|
||||||
|
async def update_skill_market_item(
|
||||||
|
skill_id: int,
|
||||||
|
skill_key: str = Form(""),
|
||||||
|
display_name: str = Form(""),
|
||||||
|
description: str = Form(""),
|
||||||
|
file: Optional[UploadFile] = File(None),
|
||||||
|
session: Session = Depends(get_session),
|
||||||
|
):
|
||||||
|
return await deps.skill_service.update_market_item(
|
||||||
|
session=session,
|
||||||
|
skill_id=skill_id,
|
||||||
|
skill_key=skill_key,
|
||||||
|
display_name=display_name,
|
||||||
|
description=description,
|
||||||
|
file=file,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.delete("/api/platform/skills/{skill_id}")
|
||||||
|
def delete_skill_market_item(skill_id: int, session: Session = Depends(get_session)):
|
||||||
|
return deps.skill_service.delete_market_item(session=session, skill_id=skill_id)
|
||||||
|
|
||||||
|
@router.get("/api/bots/{bot_id}/skills")
|
||||||
|
def list_bot_skills(bot_id: str, session: Session = Depends(get_session)):
|
||||||
|
return deps.skill_service.list_workspace_skills_for_bot(
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
resolve_edge_state_context=deps.resolve_edge_state_context,
|
||||||
|
logger=deps.logger,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.get("/api/bots/{bot_id}/skill-market")
|
||||||
|
def list_bot_skill_market(bot_id: str, session: Session = Depends(get_session)):
|
||||||
|
return deps.skill_service.list_bot_market_items_for_bot(
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
resolve_edge_state_context=deps.resolve_edge_state_context,
|
||||||
|
logger=deps.logger,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.post("/api/bots/{bot_id}/skill-market/{skill_id}/install")
|
||||||
|
def install_bot_skill_from_market(bot_id: str, skill_id: int, session: Session = Depends(get_session)):
|
||||||
|
return deps.skill_service.install_market_item_for_bot_checked(
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
skill_id=skill_id,
|
||||||
|
resolve_edge_state_context=deps.resolve_edge_state_context,
|
||||||
|
logger=deps.logger,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.post("/api/bots/{bot_id}/skills/upload")
|
||||||
|
async def upload_bot_skill_zip(bot_id: str, file: UploadFile = File(...), session: Session = Depends(get_session)):
|
||||||
|
return await deps.skill_service.upload_bot_skill_zip_for_bot(
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
file=file,
|
||||||
|
resolve_edge_state_context=deps.resolve_edge_state_context,
|
||||||
|
logger=deps.logger,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.delete("/api/bots/{bot_id}/skills/{skill_name}")
|
||||||
|
def delete_bot_skill(bot_id: str, skill_name: str, session: Session = Depends(get_session)):
|
||||||
|
return deps.skill_service.delete_workspace_skill_for_bot(
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
skill_name=skill_name,
|
||||||
|
resolve_edge_state_context=deps.resolve_edge_state_context,
|
||||||
|
)
|
||||||
|
|
||||||
|
return router
|
||||||
|
|
@ -0,0 +1,153 @@
|
||||||
|
from fastapi import APIRouter, Depends, Request
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from core.database import get_session
|
||||||
|
from schemas.dashboard import (
|
||||||
|
BotCreateRequest,
|
||||||
|
BotDeployRequest,
|
||||||
|
BotEnvParamsUpdateRequest,
|
||||||
|
BotMcpConfigUpdateRequest,
|
||||||
|
BotToolsConfigUpdateRequest,
|
||||||
|
BotUpdateRequest,
|
||||||
|
ChannelConfigRequest,
|
||||||
|
ChannelConfigUpdateRequest,
|
||||||
|
)
|
||||||
|
|
||||||
|
from api.dashboard_router_support import DashboardRouterDeps
|
||||||
|
|
||||||
|
|
||||||
|
def build_dashboard_bot_admin_router(*, deps: DashboardRouterDeps) -> APIRouter:
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
@router.post("/api/bots")
|
||||||
|
def create_bot(payload: BotCreateRequest, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_lifecycle_service.create_bot(session=session, payload=payload)
|
||||||
|
|
||||||
|
@router.get("/api/bots")
|
||||||
|
def list_bots(request: Request, session: Session = Depends(get_session)):
|
||||||
|
current_user_id = int(getattr(request.state, "sys_user_id", 0) or 0)
|
||||||
|
return deps.bot_query_service.list_bots(app_state=request.app.state, session=session, current_user_id=current_user_id)
|
||||||
|
|
||||||
|
@router.get("/api/bots/{bot_id}")
|
||||||
|
def get_bot_detail(bot_id: str, request: Request, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_query_service.get_bot_detail(app_state=request.app.state, session=session, bot_id=bot_id)
|
||||||
|
|
||||||
|
@router.get("/api/bots/{bot_id}/resources")
|
||||||
|
def get_bot_resources(bot_id: str, request: Request, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_query_service.get_bot_resources(app_state=request.app.state, session=session, bot_id=bot_id)
|
||||||
|
|
||||||
|
@router.put("/api/bots/{bot_id}")
|
||||||
|
def update_bot(bot_id: str, payload: BotUpdateRequest, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_lifecycle_service.update_bot(session=session, bot_id=bot_id, payload=payload)
|
||||||
|
|
||||||
|
@router.post("/api/bots/{bot_id}/deploy")
|
||||||
|
async def deploy_bot(bot_id: str, payload: BotDeployRequest, request: Request, session: Session = Depends(get_session)):
|
||||||
|
return await deps.bot_lifecycle_service.deploy_bot(
|
||||||
|
app_state=request.app.state,
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
node_id=payload.node_id,
|
||||||
|
runtime_kind=payload.runtime_kind,
|
||||||
|
image_tag=payload.image_tag,
|
||||||
|
auto_start=bool(payload.auto_start),
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.post("/api/bots/{bot_id}/start")
|
||||||
|
async def start_bot(bot_id: str, request: Request, session: Session = Depends(get_session)):
|
||||||
|
return await deps.bot_lifecycle_service.start_bot(app_state=request.app.state, session=session, bot_id=bot_id)
|
||||||
|
|
||||||
|
@router.post("/api/bots/{bot_id}/stop")
|
||||||
|
def stop_bot(bot_id: str, request: Request, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_lifecycle_service.stop_bot(app_state=request.app.state, session=session, bot_id=bot_id)
|
||||||
|
|
||||||
|
@router.post("/api/bots/{bot_id}/enable")
|
||||||
|
def enable_bot(bot_id: str, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_lifecycle_service.enable_bot(session=session, bot_id=bot_id)
|
||||||
|
|
||||||
|
@router.post("/api/bots/{bot_id}/disable")
|
||||||
|
def disable_bot(bot_id: str, request: Request, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_lifecycle_service.disable_bot(app_state=request.app.state, session=session, bot_id=bot_id)
|
||||||
|
|
||||||
|
@router.post("/api/bots/{bot_id}/deactivate")
|
||||||
|
def deactivate_bot(bot_id: str, request: Request, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_lifecycle_service.deactivate_bot(app_state=request.app.state, session=session, bot_id=bot_id)
|
||||||
|
|
||||||
|
@router.delete("/api/bots/{bot_id}")
|
||||||
|
def delete_bot(bot_id: str, request: Request, delete_workspace: bool = True, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_lifecycle_service.delete_bot(
|
||||||
|
app_state=request.app.state,
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
delete_workspace=delete_workspace,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.get("/api/bots/{bot_id}/channels")
|
||||||
|
def list_bot_channels(bot_id: str, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_channel_service.list_channels(session=session, bot_id=bot_id)
|
||||||
|
|
||||||
|
@router.post("/api/bots/{bot_id}/channels")
|
||||||
|
def create_bot_channel(bot_id: str, payload: ChannelConfigRequest, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_channel_service.create_channel(session=session, bot_id=bot_id, payload=payload)
|
||||||
|
|
||||||
|
@router.put("/api/bots/{bot_id}/channels/{channel_id}")
|
||||||
|
def update_bot_channel(bot_id: str, channel_id: str, payload: ChannelConfigUpdateRequest, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_channel_service.update_channel(
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
channel_id=channel_id,
|
||||||
|
payload=payload,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.delete("/api/bots/{bot_id}/channels/{channel_id}")
|
||||||
|
def delete_bot_channel(bot_id: str, channel_id: str, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_channel_service.delete_channel(session=session, bot_id=bot_id, channel_id=channel_id)
|
||||||
|
|
||||||
|
@router.get("/api/bots/{bot_id}/tools-config")
|
||||||
|
def get_bot_tools_config(bot_id: str, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_query_service.get_tools_config(session=session, bot_id=bot_id)
|
||||||
|
|
||||||
|
@router.put("/api/bots/{bot_id}/tools-config")
|
||||||
|
def update_bot_tools_config(bot_id: str, payload: BotToolsConfigUpdateRequest, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_query_service.update_tools_config(session=session, bot_id=bot_id, payload=payload)
|
||||||
|
|
||||||
|
@router.get("/api/bots/{bot_id}/mcp-config")
|
||||||
|
def get_bot_mcp_config(bot_id: str, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_config_state_service.get_mcp_config_for_bot(session=session, bot_id=bot_id)
|
||||||
|
|
||||||
|
@router.put("/api/bots/{bot_id}/mcp-config")
|
||||||
|
def update_bot_mcp_config(bot_id: str, payload: BotMcpConfigUpdateRequest, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_config_state_service.update_mcp_config_for_bot(
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
mcp_servers=payload.mcp_servers,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.get("/api/bots/{bot_id}/env-params")
|
||||||
|
def get_bot_env_params(bot_id: str, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_config_state_service.get_env_params_for_bot(session=session, bot_id=bot_id)
|
||||||
|
|
||||||
|
@router.put("/api/bots/{bot_id}/env-params")
|
||||||
|
def update_bot_env_params(bot_id: str, payload: BotEnvParamsUpdateRequest, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_config_state_service.update_env_params_for_bot(
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
env_params=payload.env_params,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.get("/api/bots/{bot_id}/cron/jobs")
|
||||||
|
def list_cron_jobs(bot_id: str, include_disabled: bool = True, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_config_state_service.list_cron_jobs_for_bot(
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
include_disabled=include_disabled,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.post("/api/bots/{bot_id}/cron/jobs/{job_id}/stop")
|
||||||
|
def stop_cron_job(bot_id: str, job_id: str, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_config_state_service.stop_cron_job_for_bot(session=session, bot_id=bot_id, job_id=job_id)
|
||||||
|
|
||||||
|
@router.delete("/api/bots/{bot_id}/cron/jobs/{job_id}")
|
||||||
|
def delete_cron_job(bot_id: str, job_id: str, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_config_state_service.delete_cron_job_for_bot(session=session, bot_id=bot_id, job_id=job_id)
|
||||||
|
|
||||||
|
return router
|
||||||
|
|
@ -0,0 +1,197 @@
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, File, Form, Request, UploadFile, WebSocket
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from core.database import get_session
|
||||||
|
from schemas.dashboard import (
|
||||||
|
CommandRequest,
|
||||||
|
MessageFeedbackRequest,
|
||||||
|
WorkspaceFileUpdateRequest,
|
||||||
|
)
|
||||||
|
|
||||||
|
from api.dashboard_router_support import DashboardRouterDeps
|
||||||
|
|
||||||
|
|
||||||
|
def build_dashboard_bot_io_router(*, deps: DashboardRouterDeps) -> APIRouter:
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
@router.post("/api/bots/{bot_id}/command")
|
||||||
|
def send_command(bot_id: str, payload: CommandRequest, request: Request, session: Session = Depends(get_session)):
|
||||||
|
return deps.runtime_service.send_command_for_bot(
|
||||||
|
app_state=request.app.state,
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
payload=payload,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.get("/api/bots/{bot_id}/messages")
|
||||||
|
def list_bot_messages(bot_id: str, limit: int = 200, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_message_service.list_messages(session=session, bot_id=bot_id, limit=limit)
|
||||||
|
|
||||||
|
@router.get("/api/bots/{bot_id}/messages/page")
|
||||||
|
def list_bot_messages_page(bot_id: str, limit: Optional[int] = None, before_id: Optional[int] = None, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_message_service.list_messages_page(
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
limit=limit,
|
||||||
|
before_id=before_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.get("/api/bots/{bot_id}/messages/by-date")
|
||||||
|
def list_bot_messages_by_date(
|
||||||
|
bot_id: str,
|
||||||
|
date: str,
|
||||||
|
tz_offset_minutes: Optional[int] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
session: Session = Depends(get_session),
|
||||||
|
):
|
||||||
|
return deps.bot_message_service.list_messages_by_date(
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
date=date,
|
||||||
|
tz_offset_minutes=tz_offset_minutes,
|
||||||
|
limit=limit,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.put("/api/bots/{bot_id}/messages/{message_id}/feedback")
|
||||||
|
def update_bot_message_feedback(bot_id: str, message_id: int, payload: MessageFeedbackRequest, session: Session = Depends(get_session)):
|
||||||
|
return deps.bot_message_service.update_feedback(
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
message_id=message_id,
|
||||||
|
feedback=payload.feedback,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.delete("/api/bots/{bot_id}/messages")
|
||||||
|
def clear_bot_messages(bot_id: str, request: Request, session: Session = Depends(get_session)):
|
||||||
|
return deps.runtime_service.clear_messages_for_bot(app_state=request.app.state, session=session, bot_id=bot_id)
|
||||||
|
|
||||||
|
@router.post("/api/bots/{bot_id}/sessions/dashboard-direct/clear")
|
||||||
|
def clear_bot_dashboard_direct_session(bot_id: str, request: Request, session: Session = Depends(get_session)):
|
||||||
|
return deps.runtime_service.clear_dashboard_direct_session_for_bot(
|
||||||
|
app_state=request.app.state,
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.get("/api/bots/{bot_id}/logs")
|
||||||
|
def get_bot_logs(bot_id: str, tail: int = 300, request: Request = None, session: Session = Depends(get_session)):
|
||||||
|
return deps.runtime_service.get_logs_for_bot(
|
||||||
|
app_state=request.app.state,
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
tail=tail,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.get("/api/bots/{bot_id}/workspace/tree")
|
||||||
|
def get_workspace_tree(bot_id: str, path: Optional[str] = None, recursive: bool = False, request: Request = None, session: Session = Depends(get_session)):
|
||||||
|
return deps.workspace_service.list_tree_for_bot(
|
||||||
|
app_state=request.app.state,
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
path=path,
|
||||||
|
recursive=recursive,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.get("/api/bots/{bot_id}/workspace/file")
|
||||||
|
def read_workspace_file(bot_id: str, path: str, max_bytes: int = 200000, request: Request = None, session: Session = Depends(get_session)):
|
||||||
|
return deps.workspace_service.read_file_for_bot(
|
||||||
|
app_state=request.app.state,
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
path=path,
|
||||||
|
max_bytes=max_bytes,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.put("/api/bots/{bot_id}/workspace/file")
|
||||||
|
def update_workspace_file(bot_id: str, path: str, payload: WorkspaceFileUpdateRequest, request: Request = None, session: Session = Depends(get_session)):
|
||||||
|
return deps.workspace_service.write_markdown_for_bot(
|
||||||
|
app_state=request.app.state,
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
path=path,
|
||||||
|
content=str(payload.content or ""),
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.get("/api/bots/{bot_id}/workspace/download")
|
||||||
|
def download_workspace_file(bot_id: str, path: str, download: bool = False, request: Request = None, session: Session = Depends(get_session)):
|
||||||
|
return deps.workspace_service.serve_file_for_bot(
|
||||||
|
app_state=request.app.state,
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
path=path,
|
||||||
|
download=download,
|
||||||
|
request=request,
|
||||||
|
public=False,
|
||||||
|
redirect_html_to_raw=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.get("/public/bots/{bot_id}/workspace/download")
|
||||||
|
def public_download_workspace_file(bot_id: str, path: str, download: bool = False, request: Request = None, session: Session = Depends(get_session)):
|
||||||
|
return deps.workspace_service.serve_file_for_bot(
|
||||||
|
app_state=request.app.state,
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
path=path,
|
||||||
|
download=download,
|
||||||
|
request=request,
|
||||||
|
public=True,
|
||||||
|
redirect_html_to_raw=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.get("/api/bots/{bot_id}/workspace/raw/{path:path}")
|
||||||
|
def raw_workspace_file(bot_id: str, path: str, download: bool = False, request: Request = None, session: Session = Depends(get_session)):
|
||||||
|
return deps.workspace_service.serve_file_for_bot(
|
||||||
|
app_state=request.app.state,
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
path=path,
|
||||||
|
download=download,
|
||||||
|
request=request,
|
||||||
|
public=False,
|
||||||
|
redirect_html_to_raw=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.get("/public/bots/{bot_id}/workspace/raw/{path:path}")
|
||||||
|
def public_raw_workspace_file(bot_id: str, path: str, download: bool = False, request: Request = None, session: Session = Depends(get_session)):
|
||||||
|
return deps.workspace_service.serve_file_for_bot(
|
||||||
|
app_state=request.app.state,
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
path=path,
|
||||||
|
download=download,
|
||||||
|
request=request,
|
||||||
|
public=True,
|
||||||
|
redirect_html_to_raw=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.post("/api/bots/{bot_id}/workspace/upload")
|
||||||
|
async def upload_workspace_files(bot_id: str, files: List[UploadFile] = File(...), path: Optional[str] = None, request: Request = None, session: Session = Depends(get_session)):
|
||||||
|
return await deps.workspace_service.upload_files_for_bot(
|
||||||
|
app_state=request.app.state,
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
files=files,
|
||||||
|
path=path,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.post("/api/bots/{bot_id}/speech/transcribe")
|
||||||
|
async def transcribe_bot_speech(
|
||||||
|
bot_id: str,
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
language: Optional[str] = Form(None),
|
||||||
|
session: Session = Depends(get_session),
|
||||||
|
):
|
||||||
|
return await deps.speech_transcription_service.transcribe(
|
||||||
|
session=session,
|
||||||
|
bot_id=bot_id,
|
||||||
|
file=file,
|
||||||
|
language=language,
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.websocket("/ws/monitor/{bot_id}")
|
||||||
|
async def websocket_endpoint(websocket: WebSocket, bot_id: str):
|
||||||
|
await deps.app_lifecycle_service.handle_websocket(websocket, bot_id)
|
||||||
|
|
||||||
|
return router
|
||||||
|
|
@ -0,0 +1,46 @@
|
||||||
|
from fastapi import APIRouter
|
||||||
|
|
||||||
|
from api.dashboard_assets_router import build_dashboard_assets_router
|
||||||
|
from api.dashboard_bot_admin_router import build_dashboard_bot_admin_router
|
||||||
|
from api.dashboard_bot_io_router import build_dashboard_bot_io_router
|
||||||
|
from api.dashboard_router_support import DashboardRouterDeps
|
||||||
|
|
||||||
|
|
||||||
|
def build_dashboard_router(
|
||||||
|
*,
|
||||||
|
image_service,
|
||||||
|
provider_test_service,
|
||||||
|
bot_lifecycle_service,
|
||||||
|
bot_query_service,
|
||||||
|
bot_channel_service,
|
||||||
|
skill_service,
|
||||||
|
bot_config_state_service,
|
||||||
|
runtime_service,
|
||||||
|
bot_message_service,
|
||||||
|
workspace_service,
|
||||||
|
speech_transcription_service,
|
||||||
|
app_lifecycle_service,
|
||||||
|
resolve_edge_state_context,
|
||||||
|
logger,
|
||||||
|
) -> APIRouter:
|
||||||
|
deps = DashboardRouterDeps(
|
||||||
|
image_service=image_service,
|
||||||
|
provider_test_service=provider_test_service,
|
||||||
|
bot_lifecycle_service=bot_lifecycle_service,
|
||||||
|
bot_query_service=bot_query_service,
|
||||||
|
bot_channel_service=bot_channel_service,
|
||||||
|
skill_service=skill_service,
|
||||||
|
bot_config_state_service=bot_config_state_service,
|
||||||
|
runtime_service=runtime_service,
|
||||||
|
bot_message_service=bot_message_service,
|
||||||
|
workspace_service=workspace_service,
|
||||||
|
speech_transcription_service=speech_transcription_service,
|
||||||
|
app_lifecycle_service=app_lifecycle_service,
|
||||||
|
resolve_edge_state_context=resolve_edge_state_context,
|
||||||
|
logger=logger,
|
||||||
|
)
|
||||||
|
router = APIRouter()
|
||||||
|
router.include_router(build_dashboard_assets_router(deps=deps))
|
||||||
|
router.include_router(build_dashboard_bot_admin_router(deps=deps))
|
||||||
|
router.include_router(build_dashboard_bot_io_router(deps=deps))
|
||||||
|
return router
|
||||||
|
|
@ -0,0 +1,20 @@
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Callable
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class DashboardRouterDeps:
|
||||||
|
image_service: Any
|
||||||
|
provider_test_service: Any
|
||||||
|
bot_lifecycle_service: Any
|
||||||
|
bot_query_service: Any
|
||||||
|
bot_channel_service: Any
|
||||||
|
skill_service: Any
|
||||||
|
bot_config_state_service: Any
|
||||||
|
runtime_service: Any
|
||||||
|
bot_message_service: Any
|
||||||
|
workspace_service: Any
|
||||||
|
speech_transcription_service: Any
|
||||||
|
app_lifecycle_service: Any
|
||||||
|
resolve_edge_state_context: Callable[[str], Any]
|
||||||
|
logger: Any
|
||||||
|
|
@ -1,46 +0,0 @@
|
||||||
from fastapi import APIRouter, HTTPException
|
|
||||||
from sqlmodel import Session, select
|
|
||||||
|
|
||||||
from core.cache import auth_cache, cache
|
|
||||||
from core.database import engine
|
|
||||||
from core.settings import DATABASE_ENGINE, REDIS_ENABLED, REDIS_PREFIX, REDIS_URL
|
|
||||||
from models.bot import BotInstance
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/health")
|
|
||||||
def get_health():
|
|
||||||
try:
|
|
||||||
with Session(engine) as session:
|
|
||||||
session.exec(select(BotInstance).limit(1)).first()
|
|
||||||
return {"status": "ok", "database": DATABASE_ENGINE}
|
|
||||||
except Exception as exc:
|
|
||||||
raise HTTPException(status_code=503, detail=f"database check failed: {exc}") from exc
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/health/cache")
|
|
||||||
def get_cache_health():
|
|
||||||
redis_url = str(REDIS_URL or "").strip()
|
|
||||||
configured = bool(REDIS_ENABLED and redis_url)
|
|
||||||
client_enabled = bool(getattr(cache, "enabled", False))
|
|
||||||
reachable = bool(cache.ping()) if client_enabled else False
|
|
||||||
status = "ok"
|
|
||||||
if configured and not reachable:
|
|
||||||
status = "degraded"
|
|
||||||
return {
|
|
||||||
"status": status,
|
|
||||||
"cache": {
|
|
||||||
"configured": configured,
|
|
||||||
"enabled": client_enabled,
|
|
||||||
"reachable": reachable,
|
|
||||||
"prefix": REDIS_PREFIX,
|
|
||||||
"status": str(getattr(cache, "status", "") or ""),
|
|
||||||
"detail": str(getattr(cache, "status_detail", "") or ""),
|
|
||||||
},
|
|
||||||
"auth_store": {
|
|
||||||
"enabled": bool(getattr(auth_cache, "enabled", False)),
|
|
||||||
"status": str(getattr(auth_cache, "status", "") or ""),
|
|
||||||
"detail": str(getattr(auth_cache, "status_detail", "") or ""),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
@ -1,31 +0,0 @@
|
||||||
from typing import Dict
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
|
||||||
from sqlmodel import Session
|
|
||||||
|
|
||||||
from core.database import get_session
|
|
||||||
from services.image_service import (
|
|
||||||
delete_registered_image,
|
|
||||||
list_docker_images_by_repository,
|
|
||||||
list_registered_images,
|
|
||||||
register_image as register_image_record,
|
|
||||||
)
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/images")
|
|
||||||
def list_images(session: Session = Depends(get_session)):
|
|
||||||
return list_registered_images(session)
|
|
||||||
|
|
||||||
@router.delete("/api/images/{tag:path}")
|
|
||||||
def delete_image(tag: str, session: Session = Depends(get_session)):
|
|
||||||
return delete_registered_image(session, tag=tag)
|
|
||||||
|
|
||||||
@router.get("/api/docker-images")
|
|
||||||
def list_docker_images(repository: str = "nanobot-base"):
|
|
||||||
return list_docker_images_by_repository(repository)
|
|
||||||
|
|
||||||
@router.post("/api/images/register")
|
|
||||||
def register_image(payload: dict, session: Session = Depends(get_session)):
|
|
||||||
return register_image_record(session, payload)
|
|
||||||
|
|
@ -1,55 +0,0 @@
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Request, Response
|
|
||||||
from sqlmodel import Session
|
|
||||||
|
|
||||||
from core.database import get_session
|
|
||||||
from core.settings import PANEL_ACCESS_PASSWORD
|
|
||||||
from schemas.system import PanelLoginRequest
|
|
||||||
from services.platform_auth_service import (
|
|
||||||
clear_panel_token_cookie,
|
|
||||||
create_panel_token,
|
|
||||||
resolve_panel_request_auth,
|
|
||||||
revoke_panel_token,
|
|
||||||
set_panel_token_cookie,
|
|
||||||
)
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/panel/auth/status")
|
|
||||||
def get_panel_auth_status(request: Request, session: Session = Depends(get_session)):
|
|
||||||
configured = str(PANEL_ACCESS_PASSWORD or "").strip()
|
|
||||||
principal = resolve_panel_request_auth(session, request)
|
|
||||||
return {
|
|
||||||
"enabled": bool(configured),
|
|
||||||
"authenticated": bool(principal.authenticated),
|
|
||||||
"auth_source": principal.auth_source if principal.authenticated else None,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/panel/auth/login")
|
|
||||||
def panel_login(
|
|
||||||
payload: PanelLoginRequest,
|
|
||||||
request: Request,
|
|
||||||
response: Response,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
configured = str(PANEL_ACCESS_PASSWORD or "").strip()
|
|
||||||
if not configured:
|
|
||||||
clear_panel_token_cookie(response)
|
|
||||||
return {"success": True, "enabled": False}
|
|
||||||
supplied = str(payload.password or "").strip()
|
|
||||||
if supplied != configured:
|
|
||||||
raise HTTPException(status_code=401, detail="Invalid panel access password")
|
|
||||||
try:
|
|
||||||
raw_token = create_panel_token(session, request)
|
|
||||||
except RuntimeError as exc:
|
|
||||||
raise HTTPException(status_code=503, detail=str(exc)) from exc
|
|
||||||
set_panel_token_cookie(response, request, raw_token, session)
|
|
||||||
return {"success": True, "enabled": True, "authenticated": True}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/panel/auth/logout")
|
|
||||||
def panel_logout(request: Request, response: Response, session: Session = Depends(get_session)):
|
|
||||||
revoke_panel_token(session, request)
|
|
||||||
clear_panel_token_cookie(response)
|
|
||||||
return {"success": True}
|
|
||||||
|
|
@ -0,0 +1,8 @@
|
||||||
|
from fastapi import APIRouter
|
||||||
|
|
||||||
|
from api.platform_overview_router import router as platform_overview_router
|
||||||
|
from api.platform_settings_router import router as platform_settings_router
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
router.include_router(platform_overview_router)
|
||||||
|
router.include_router(platform_settings_router)
|
||||||
|
|
@ -0,0 +1,159 @@
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, Request
|
||||||
|
from sqlmodel import Session, select
|
||||||
|
|
||||||
|
from core.database import get_session
|
||||||
|
from models.bot import BotInstance
|
||||||
|
from providers.target import ProviderTarget
|
||||||
|
from services.node_registry_service import ManagedNode
|
||||||
|
|
||||||
|
from api.platform_node_support import (
|
||||||
|
edge_node_self_with_native_preflight,
|
||||||
|
managed_node_from_payload,
|
||||||
|
normalize_node_payload,
|
||||||
|
serialize_node,
|
||||||
|
)
|
||||||
|
from api.platform_shared import (
|
||||||
|
cached_platform_nodes_payload,
|
||||||
|
invalidate_platform_nodes_cache,
|
||||||
|
invalidate_platform_overview_cache,
|
||||||
|
logger,
|
||||||
|
store_platform_nodes_payload,
|
||||||
|
)
|
||||||
|
from clients.edge.errors import log_edge_failure
|
||||||
|
from schemas.platform import ManagedNodePayload
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/platform/nodes")
|
||||||
|
def list_platform_nodes(request: Request, session: Session = Depends(get_session)):
|
||||||
|
cached_payload = cached_platform_nodes_payload()
|
||||||
|
if cached_payload is not None:
|
||||||
|
return cached_payload
|
||||||
|
|
||||||
|
node_registry = getattr(request.app.state, "node_registry_service", None)
|
||||||
|
if node_registry is None or not hasattr(node_registry, "list_nodes"):
|
||||||
|
return {"items": []}
|
||||||
|
resolve_edge_client = getattr(request.app.state, "resolve_edge_client", None)
|
||||||
|
refreshed_items = []
|
||||||
|
for node in node_registry.list_nodes():
|
||||||
|
metadata = dict(node.metadata or {})
|
||||||
|
refresh_failed = False
|
||||||
|
if callable(resolve_edge_client) and str(metadata.get("transport_kind") or "").strip().lower() == "edge" and bool(node.enabled):
|
||||||
|
try:
|
||||||
|
client = resolve_edge_client(
|
||||||
|
ProviderTarget(
|
||||||
|
node_id=node.node_id,
|
||||||
|
transport_kind="edge",
|
||||||
|
runtime_kind=str(metadata.get("runtime_kind") or "docker"),
|
||||||
|
core_adapter=str(metadata.get("core_adapter") or "nanobot"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
node_self = edge_node_self_with_native_preflight(client=client, node=node)
|
||||||
|
node = node_registry.mark_node_seen(
|
||||||
|
session,
|
||||||
|
node_id=node.node_id,
|
||||||
|
display_name=str(node.display_name or node_self.get("display_name") or node.node_id),
|
||||||
|
capabilities=dict(node_self.get("capabilities") or {}),
|
||||||
|
resources=dict(node_self.get("resources") or {}),
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
refresh_failed = True
|
||||||
|
log_edge_failure(
|
||||||
|
logger,
|
||||||
|
key=f"platform-node-refresh:{node.node_id}",
|
||||||
|
exc=exc,
|
||||||
|
message=f"Failed to refresh edge node metadata for node_id={node.node_id}",
|
||||||
|
)
|
||||||
|
refreshed_items.append((node, refresh_failed))
|
||||||
|
return store_platform_nodes_payload([
|
||||||
|
serialize_node(node, refresh_failed=refresh_failed)
|
||||||
|
for node, refresh_failed in refreshed_items
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/platform/nodes/{node_id}")
|
||||||
|
def get_platform_node(node_id: str, request: Request, session: Session = Depends(get_session)):
|
||||||
|
normalized_node_id = str(node_id or "").strip().lower()
|
||||||
|
node_registry = getattr(request.app.state, "node_registry_service", None)
|
||||||
|
if node_registry is None or not hasattr(node_registry, "get_node"):
|
||||||
|
raise HTTPException(status_code=500, detail="node registry is unavailable")
|
||||||
|
node = node_registry.get_node(normalized_node_id)
|
||||||
|
if node is None:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Managed node not found: {normalized_node_id}")
|
||||||
|
return serialize_node(node)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/platform/nodes")
|
||||||
|
def create_platform_node(payload: ManagedNodePayload, request: Request, session: Session = Depends(get_session)):
|
||||||
|
node_registry = getattr(request.app.state, "node_registry_service", None)
|
||||||
|
if node_registry is None or not hasattr(node_registry, "get_node"):
|
||||||
|
raise HTTPException(status_code=500, detail="node registry is unavailable")
|
||||||
|
normalized = normalize_node_payload(payload)
|
||||||
|
if node_registry.get_node(normalized.node_id) is not None:
|
||||||
|
raise HTTPException(status_code=409, detail=f"Node already exists: {normalized.node_id}")
|
||||||
|
node = node_registry.upsert_node(session, managed_node_from_payload(normalized))
|
||||||
|
invalidate_platform_overview_cache()
|
||||||
|
invalidate_platform_nodes_cache()
|
||||||
|
return serialize_node(node)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/api/platform/nodes/{node_id}")
|
||||||
|
def update_platform_node(node_id: str, payload: ManagedNodePayload, request: Request, session: Session = Depends(get_session)):
|
||||||
|
normalized_node_id = str(node_id or "").strip().lower()
|
||||||
|
node_registry = getattr(request.app.state, "node_registry_service", None)
|
||||||
|
if node_registry is None or not hasattr(node_registry, "get_node"):
|
||||||
|
raise HTTPException(status_code=500, detail="node registry is unavailable")
|
||||||
|
existing = node_registry.get_node(normalized_node_id)
|
||||||
|
if existing is None:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Managed node not found: {normalized_node_id}")
|
||||||
|
normalized = normalize_node_payload(payload)
|
||||||
|
if normalized.node_id != normalized_node_id:
|
||||||
|
raise HTTPException(status_code=400, detail="node_id cannot be changed")
|
||||||
|
node = node_registry.upsert_node(
|
||||||
|
session,
|
||||||
|
ManagedNode(
|
||||||
|
node_id=normalized_node_id,
|
||||||
|
display_name=normalized.display_name,
|
||||||
|
base_url=normalized.base_url,
|
||||||
|
enabled=bool(normalized.enabled),
|
||||||
|
auth_token=normalized.auth_token or existing.auth_token,
|
||||||
|
metadata={
|
||||||
|
"transport_kind": normalized.transport_kind,
|
||||||
|
"runtime_kind": normalized.runtime_kind,
|
||||||
|
"core_adapter": normalized.core_adapter,
|
||||||
|
"workspace_root": normalized.workspace_root,
|
||||||
|
"native_command": normalized.native_command,
|
||||||
|
"native_workdir": normalized.native_workdir,
|
||||||
|
"native_sandbox_mode": normalized.native_sandbox_mode,
|
||||||
|
},
|
||||||
|
capabilities=dict(existing.capabilities or {}),
|
||||||
|
resources=dict(existing.resources or {}),
|
||||||
|
last_seen_at=existing.last_seen_at,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
invalidate_platform_overview_cache()
|
||||||
|
invalidate_platform_nodes_cache()
|
||||||
|
return serialize_node(node)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/api/platform/nodes/{node_id}")
|
||||||
|
def delete_platform_node(node_id: str, request: Request, session: Session = Depends(get_session)):
|
||||||
|
normalized_node_id = str(node_id or "").strip().lower()
|
||||||
|
if normalized_node_id == "local":
|
||||||
|
raise HTTPException(status_code=400, detail="Local node cannot be deleted")
|
||||||
|
node_registry = getattr(request.app.state, "node_registry_service", None)
|
||||||
|
if node_registry is None or not hasattr(node_registry, "get_node"):
|
||||||
|
raise HTTPException(status_code=500, detail="node registry is unavailable")
|
||||||
|
if node_registry.get_node(normalized_node_id) is None:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Managed node not found: {normalized_node_id}")
|
||||||
|
attached_bot_ids = session.exec(select(BotInstance.id).where(BotInstance.node_id == normalized_node_id)).all()
|
||||||
|
if attached_bot_ids:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"Node {normalized_node_id} still has bots assigned: {', '.join(str(item) for item in attached_bot_ids[:5])}",
|
||||||
|
)
|
||||||
|
node_registry.delete_node(session, normalized_node_id)
|
||||||
|
invalidate_platform_overview_cache()
|
||||||
|
invalidate_platform_nodes_cache()
|
||||||
|
return {"status": "deleted", "node_id": normalized_node_id}
|
||||||
|
|
@ -0,0 +1,119 @@
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, Request
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from clients.edge.http import HttpEdgeClient
|
||||||
|
from core.database import get_session
|
||||||
|
from schemas.platform import ManagedNodePayload
|
||||||
|
|
||||||
|
from api.platform_node_support import (
|
||||||
|
managed_node_from_payload,
|
||||||
|
normalize_node_payload,
|
||||||
|
test_edge_connectivity,
|
||||||
|
test_edge_native_preflight,
|
||||||
|
)
|
||||||
|
from api.platform_shared import invalidate_platform_nodes_cache
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/platform/nodes/test")
|
||||||
|
def test_platform_node(payload: ManagedNodePayload, request: Request):
|
||||||
|
normalized = normalize_node_payload(payload)
|
||||||
|
temp_node = managed_node_from_payload(normalized)
|
||||||
|
result = test_edge_connectivity(
|
||||||
|
lambda _target: HttpEdgeClient(
|
||||||
|
node=temp_node,
|
||||||
|
http_client_factory=lambda: httpx.Client(timeout=10.0, trust_env=False),
|
||||||
|
async_http_client_factory=lambda: httpx.AsyncClient(timeout=10.0, trust_env=False),
|
||||||
|
),
|
||||||
|
temp_node,
|
||||||
|
)
|
||||||
|
return result.model_dump()
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/platform/nodes/native/preflight")
|
||||||
|
def test_platform_node_native_preflight(payload: ManagedNodePayload, request: Request):
|
||||||
|
normalized = normalize_node_payload(payload)
|
||||||
|
temp_node = managed_node_from_payload(normalized)
|
||||||
|
result = test_edge_native_preflight(
|
||||||
|
lambda _target: HttpEdgeClient(
|
||||||
|
node=temp_node,
|
||||||
|
http_client_factory=lambda: httpx.Client(timeout=10.0, trust_env=False),
|
||||||
|
async_http_client_factory=lambda: httpx.AsyncClient(timeout=10.0, trust_env=False),
|
||||||
|
),
|
||||||
|
temp_node,
|
||||||
|
native_command=str(normalized.native_command or "").strip() or None,
|
||||||
|
native_workdir=str(normalized.native_workdir or "").strip() or None,
|
||||||
|
)
|
||||||
|
return result.model_dump()
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/platform/nodes/{node_id}/test")
|
||||||
|
def test_saved_platform_node(node_id: str, request: Request, session: Session = Depends(get_session)):
|
||||||
|
normalized_node_id = str(node_id or "").strip().lower()
|
||||||
|
node_registry = getattr(request.app.state, "node_registry_service", None)
|
||||||
|
if node_registry is None or not hasattr(node_registry, "get_node"):
|
||||||
|
raise HTTPException(status_code=500, detail="node registry is unavailable")
|
||||||
|
node = node_registry.get_node(normalized_node_id)
|
||||||
|
if node is None:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Managed node not found: {normalized_node_id}")
|
||||||
|
transport_kind = str((node.metadata or {}).get("transport_kind") or "edge").strip().lower()
|
||||||
|
if transport_kind != "edge":
|
||||||
|
invalidate_platform_nodes_cache()
|
||||||
|
raise HTTPException(status_code=400, detail="Only edge transport is supported")
|
||||||
|
result = test_edge_connectivity(
|
||||||
|
lambda _target: HttpEdgeClient(
|
||||||
|
node=node,
|
||||||
|
http_client_factory=lambda: httpx.Client(timeout=10.0, trust_env=False),
|
||||||
|
async_http_client_factory=lambda: httpx.AsyncClient(timeout=10.0, trust_env=False),
|
||||||
|
),
|
||||||
|
node,
|
||||||
|
)
|
||||||
|
if result.ok:
|
||||||
|
node_registry.mark_node_seen(
|
||||||
|
session,
|
||||||
|
node_id=node.node_id,
|
||||||
|
display_name=str(node.display_name or result.node_self.get("display_name") or node.node_id) if result.node_self else node.display_name,
|
||||||
|
capabilities=dict(result.node_self.get("capabilities") or {}) if result.node_self else dict(node.capabilities or {}),
|
||||||
|
resources=dict(result.node_self.get("resources") or {}) if result.node_self else dict(getattr(node, "resources", {}) or {}),
|
||||||
|
)
|
||||||
|
invalidate_platform_nodes_cache()
|
||||||
|
return result.model_dump()
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/platform/nodes/{node_id}/native/preflight")
|
||||||
|
def test_saved_platform_node_native_preflight(node_id: str, request: Request, session: Session = Depends(get_session)):
|
||||||
|
normalized_node_id = str(node_id or "").strip().lower()
|
||||||
|
node_registry = getattr(request.app.state, "node_registry_service", None)
|
||||||
|
if node_registry is None or not hasattr(node_registry, "get_node"):
|
||||||
|
raise HTTPException(status_code=500, detail="node registry is unavailable")
|
||||||
|
node = node_registry.get_node(normalized_node_id)
|
||||||
|
if node is None:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Managed node not found: {normalized_node_id}")
|
||||||
|
transport_kind = str((node.metadata or {}).get("transport_kind") or "edge").strip().lower()
|
||||||
|
if transport_kind != "edge":
|
||||||
|
invalidate_platform_nodes_cache()
|
||||||
|
raise HTTPException(status_code=400, detail="Only edge transport is supported")
|
||||||
|
metadata = dict(node.metadata or {})
|
||||||
|
result = test_edge_native_preflight(
|
||||||
|
lambda _target: HttpEdgeClient(
|
||||||
|
node=node,
|
||||||
|
http_client_factory=lambda: httpx.Client(timeout=10.0, trust_env=False),
|
||||||
|
async_http_client_factory=lambda: httpx.AsyncClient(timeout=10.0, trust_env=False),
|
||||||
|
),
|
||||||
|
node,
|
||||||
|
native_command=str(metadata.get("native_command") or "").strip() or None,
|
||||||
|
native_workdir=str(metadata.get("native_workdir") or "").strip() or None,
|
||||||
|
)
|
||||||
|
if result.status == "online" and result.node_self:
|
||||||
|
node_registry.mark_node_seen(
|
||||||
|
session,
|
||||||
|
node_id=node.node_id,
|
||||||
|
display_name=str(node.display_name or result.node_self.get("display_name") or node.node_id),
|
||||||
|
capabilities=dict(result.node_self.get("capabilities") or {}),
|
||||||
|
resources=dict(result.node_self.get("resources") or {}),
|
||||||
|
)
|
||||||
|
invalidate_platform_nodes_cache()
|
||||||
|
return result.model_dump()
|
||||||
|
|
@ -0,0 +1,57 @@
|
||||||
|
from fastapi import APIRouter, Depends, Request
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from clients.edge.errors import log_edge_failure
|
||||||
|
from core.database import get_session
|
||||||
|
from providers.selector import get_runtime_provider
|
||||||
|
from providers.target import ProviderTarget
|
||||||
|
from services.platform_overview_service import build_node_resource_overview
|
||||||
|
|
||||||
|
from api.platform_shared import logger
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/platform/nodes/{node_id}/resources")
|
||||||
|
def get_platform_node_resources(node_id: str, request: Request, session: Session = Depends(get_session)):
|
||||||
|
normalized_node_id = str(node_id or "").strip().lower()
|
||||||
|
node_registry = getattr(request.app.state, "node_registry_service", None)
|
||||||
|
if node_registry is not None and hasattr(node_registry, "get_node"):
|
||||||
|
node = node_registry.get_node(normalized_node_id)
|
||||||
|
if node is not None:
|
||||||
|
metadata = dict(getattr(node, "metadata", {}) or {})
|
||||||
|
if str(metadata.get("transport_kind") or "").strip().lower() == "edge":
|
||||||
|
resolve_edge_client = getattr(request.app.state, "resolve_edge_client", None)
|
||||||
|
if callable(resolve_edge_client):
|
||||||
|
base = build_node_resource_overview(session, node_id=normalized_node_id, read_runtime=None)
|
||||||
|
client = resolve_edge_client(
|
||||||
|
ProviderTarget(
|
||||||
|
node_id=normalized_node_id,
|
||||||
|
transport_kind="edge",
|
||||||
|
runtime_kind=str(metadata.get("runtime_kind") or "docker"),
|
||||||
|
core_adapter=str(metadata.get("core_adapter") or "nanobot"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
resource_report = dict(client.get_node_resources() or {})
|
||||||
|
except Exception as exc:
|
||||||
|
log_edge_failure(
|
||||||
|
logger,
|
||||||
|
key=f"platform-node-resources:{normalized_node_id}",
|
||||||
|
exc=exc,
|
||||||
|
message=f"Failed to load edge node resources for node_id={normalized_node_id}",
|
||||||
|
)
|
||||||
|
return base
|
||||||
|
base["resources"] = dict(resource_report.get("resources") or resource_report)
|
||||||
|
if resource_report:
|
||||||
|
base["node_report"] = resource_report
|
||||||
|
return base
|
||||||
|
|
||||||
|
def _read_runtime(bot):
|
||||||
|
provider = get_runtime_provider(request.app.state, bot)
|
||||||
|
status = str(provider.get_runtime_status(bot_id=str(bot.id or "")) or "STOPPED").upper()
|
||||||
|
runtime = dict(provider.get_resource_snapshot(bot_id=str(bot.id or "")) or {})
|
||||||
|
runtime.setdefault("docker_status", status)
|
||||||
|
return status, runtime
|
||||||
|
|
||||||
|
return build_node_resource_overview(session, node_id=normalized_node_id, read_runtime=_read_runtime)
|
||||||
|
|
@ -0,0 +1,251 @@
|
||||||
|
import shlex
|
||||||
|
import time
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from fastapi import HTTPException
|
||||||
|
|
||||||
|
from clients.edge.errors import log_edge_failure, summarize_edge_exception
|
||||||
|
from clients.edge.http import HttpEdgeClient
|
||||||
|
from providers.target import ProviderTarget
|
||||||
|
from schemas.platform import (
|
||||||
|
ManagedNodeConnectivityResult,
|
||||||
|
ManagedNodeNativePreflightResult,
|
||||||
|
ManagedNodePayload,
|
||||||
|
)
|
||||||
|
from services.node_registry_service import ManagedNode
|
||||||
|
|
||||||
|
from api.platform_shared import logger
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_native_sandbox_mode(raw_value: Any) -> str:
|
||||||
|
text = str(raw_value or "").strip().lower()
|
||||||
|
if text in {"workspace", "sandbox", "strict"}:
|
||||||
|
return "workspace"
|
||||||
|
if text in {"full_access", "full-access", "danger-full-access", "escape"}:
|
||||||
|
return "full_access"
|
||||||
|
return "inherit"
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_node_payload(payload: ManagedNodePayload) -> ManagedNodePayload:
|
||||||
|
normalized_node_id = str(payload.node_id or "").strip().lower()
|
||||||
|
if not normalized_node_id:
|
||||||
|
raise HTTPException(status_code=400, detail="node_id is required")
|
||||||
|
transport_kind = str(payload.transport_kind or "edge").strip().lower() or "edge"
|
||||||
|
if transport_kind != "edge":
|
||||||
|
raise HTTPException(status_code=400, detail="Only edge transport is supported")
|
||||||
|
runtime_kind = str(payload.runtime_kind or "docker").strip().lower() or "docker"
|
||||||
|
core_adapter = str(payload.core_adapter or "nanobot").strip().lower() or "nanobot"
|
||||||
|
native_sandbox_mode = normalize_native_sandbox_mode(payload.native_sandbox_mode)
|
||||||
|
base_url = str(payload.base_url or "").strip()
|
||||||
|
if transport_kind == "edge" and not base_url:
|
||||||
|
raise HTTPException(status_code=400, detail="base_url is required for edge nodes")
|
||||||
|
return payload.model_copy(
|
||||||
|
update={
|
||||||
|
"node_id": normalized_node_id,
|
||||||
|
"display_name": str(payload.display_name or normalized_node_id).strip() or normalized_node_id,
|
||||||
|
"base_url": base_url,
|
||||||
|
"auth_token": str(payload.auth_token or "").strip(),
|
||||||
|
"transport_kind": transport_kind,
|
||||||
|
"runtime_kind": runtime_kind,
|
||||||
|
"core_adapter": core_adapter,
|
||||||
|
"workspace_root": str(payload.workspace_root or "").strip(),
|
||||||
|
"native_command": str(payload.native_command or "").strip(),
|
||||||
|
"native_workdir": str(payload.native_workdir or "").strip(),
|
||||||
|
"native_sandbox_mode": native_sandbox_mode,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def managed_node_from_payload(payload: ManagedNodePayload) -> ManagedNode:
|
||||||
|
normalized = normalize_node_payload(payload)
|
||||||
|
return ManagedNode(
|
||||||
|
node_id=normalized.node_id,
|
||||||
|
display_name=normalized.display_name,
|
||||||
|
base_url=normalized.base_url,
|
||||||
|
enabled=bool(normalized.enabled),
|
||||||
|
auth_token=normalized.auth_token,
|
||||||
|
metadata={
|
||||||
|
"transport_kind": normalized.transport_kind,
|
||||||
|
"runtime_kind": normalized.runtime_kind,
|
||||||
|
"core_adapter": normalized.core_adapter,
|
||||||
|
"workspace_root": normalized.workspace_root,
|
||||||
|
"native_command": normalized.native_command,
|
||||||
|
"native_workdir": normalized.native_workdir,
|
||||||
|
"native_sandbox_mode": normalized.native_sandbox_mode,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def node_status(node: ManagedNode, *, refresh_failed: bool = False) -> str:
|
||||||
|
if not bool(node.enabled):
|
||||||
|
return "disabled"
|
||||||
|
transport_kind = str((node.metadata or {}).get("transport_kind") or "edge").strip().lower()
|
||||||
|
if transport_kind != "edge":
|
||||||
|
return "unknown"
|
||||||
|
if refresh_failed:
|
||||||
|
return "offline"
|
||||||
|
return "online" if node.last_seen_at else "unknown"
|
||||||
|
|
||||||
|
|
||||||
|
def serialize_node(node: ManagedNode, *, refresh_failed: bool = False) -> Dict[str, Any]:
|
||||||
|
metadata = dict(node.metadata or {})
|
||||||
|
return {
|
||||||
|
"node_id": node.node_id,
|
||||||
|
"display_name": node.display_name,
|
||||||
|
"base_url": node.base_url,
|
||||||
|
"enabled": bool(node.enabled),
|
||||||
|
"transport_kind": str(metadata.get("transport_kind") or ""),
|
||||||
|
"runtime_kind": str(metadata.get("runtime_kind") or ""),
|
||||||
|
"core_adapter": str(metadata.get("core_adapter") or ""),
|
||||||
|
"workspace_root": str(metadata.get("workspace_root") or ""),
|
||||||
|
"native_command": str(metadata.get("native_command") or ""),
|
||||||
|
"native_workdir": str(metadata.get("native_workdir") or ""),
|
||||||
|
"native_sandbox_mode": str(metadata.get("native_sandbox_mode") or "inherit"),
|
||||||
|
"metadata": metadata,
|
||||||
|
"capabilities": dict(node.capabilities or {}),
|
||||||
|
"resources": dict(getattr(node, "resources", {}) or {}),
|
||||||
|
"last_seen_at": node.last_seen_at,
|
||||||
|
"status": node_status(node, refresh_failed=refresh_failed),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def split_native_command(raw_command: Optional[str]) -> list[str]:
|
||||||
|
text = str(raw_command or "").strip()
|
||||||
|
if not text:
|
||||||
|
return []
|
||||||
|
try:
|
||||||
|
return [str(item or "").strip() for item in shlex.split(text) if str(item or "").strip()]
|
||||||
|
except Exception:
|
||||||
|
return [text]
|
||||||
|
|
||||||
|
|
||||||
|
def runtime_native_supported(node_self: Dict[str, Any]) -> bool:
|
||||||
|
capabilities = dict(node_self.get("capabilities") or {})
|
||||||
|
runtime_caps = dict(capabilities.get("runtime") or {})
|
||||||
|
return bool(runtime_caps.get("native") is True)
|
||||||
|
|
||||||
|
|
||||||
|
def edge_node_self_with_native_preflight(*, client: HttpEdgeClient, node: ManagedNode) -> Dict[str, Any]:
|
||||||
|
node_self = dict(client.heartbeat_node() or {})
|
||||||
|
metadata = dict(node.metadata or {})
|
||||||
|
native_command = str(metadata.get("native_command") or "").strip() or None
|
||||||
|
native_workdir = str(metadata.get("native_workdir") or "").strip() or None
|
||||||
|
runtime_kind = str(metadata.get("runtime_kind") or "docker").strip().lower()
|
||||||
|
should_probe = bool(native_command or native_workdir or runtime_kind == "native")
|
||||||
|
if not should_probe:
|
||||||
|
return node_self
|
||||||
|
try:
|
||||||
|
preflight = dict(client.preflight_native(native_command=native_command, native_workdir=native_workdir) or {})
|
||||||
|
except Exception as exc:
|
||||||
|
log_edge_failure(
|
||||||
|
logger,
|
||||||
|
key=f"platform-node-native-preflight:{node.node_id}",
|
||||||
|
exc=exc,
|
||||||
|
message=f"Failed to run native preflight for node_id={node.node_id}",
|
||||||
|
)
|
||||||
|
return node_self
|
||||||
|
caps = dict(node_self.get("capabilities") or {})
|
||||||
|
process_caps = dict(caps.get("process") or {})
|
||||||
|
if preflight.get("command"):
|
||||||
|
process_caps["command"] = list(preflight.get("command") or [])
|
||||||
|
process_caps["available"] = bool(preflight.get("ok"))
|
||||||
|
process_caps["command_available"] = bool(preflight.get("command_available"))
|
||||||
|
process_caps["workdir_exists"] = bool(preflight.get("workdir_exists"))
|
||||||
|
process_caps["workdir"] = str(preflight.get("workdir") or "")
|
||||||
|
process_caps["detail"] = str(preflight.get("detail") or "")
|
||||||
|
caps["process"] = process_caps
|
||||||
|
node_self["capabilities"] = caps
|
||||||
|
node_self["native_preflight"] = preflight
|
||||||
|
return node_self
|
||||||
|
|
||||||
|
|
||||||
|
def test_edge_connectivity(resolve_edge_client, node: ManagedNode) -> ManagedNodeConnectivityResult:
|
||||||
|
started = time.perf_counter()
|
||||||
|
try:
|
||||||
|
client = resolve_edge_client(
|
||||||
|
ProviderTarget(
|
||||||
|
node_id=node.node_id,
|
||||||
|
transport_kind="edge",
|
||||||
|
runtime_kind=str((node.metadata or {}).get("runtime_kind") or "docker"),
|
||||||
|
core_adapter=str((node.metadata or {}).get("core_adapter") or "nanobot"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
node_self = edge_node_self_with_native_preflight(client=client, node=node)
|
||||||
|
latency_ms = max(1, int((time.perf_counter() - started) * 1000))
|
||||||
|
return ManagedNodeConnectivityResult(
|
||||||
|
ok=True,
|
||||||
|
status="online",
|
||||||
|
latency_ms=latency_ms,
|
||||||
|
detail="dashboard-edge reachable",
|
||||||
|
node_self=node_self,
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
latency_ms = max(1, int((time.perf_counter() - started) * 1000))
|
||||||
|
return ManagedNodeConnectivityResult(
|
||||||
|
ok=False,
|
||||||
|
status="offline",
|
||||||
|
latency_ms=latency_ms,
|
||||||
|
detail=summarize_edge_exception(exc),
|
||||||
|
node_self=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_edge_native_preflight(
|
||||||
|
resolve_edge_client,
|
||||||
|
node: ManagedNode,
|
||||||
|
*,
|
||||||
|
native_command: Optional[str] = None,
|
||||||
|
native_workdir: Optional[str] = None,
|
||||||
|
) -> ManagedNodeNativePreflightResult:
|
||||||
|
started = time.perf_counter()
|
||||||
|
command_hint = split_native_command(native_command)
|
||||||
|
workdir_hint = str(native_workdir or "").strip()
|
||||||
|
try:
|
||||||
|
client = resolve_edge_client(
|
||||||
|
ProviderTarget(
|
||||||
|
node_id=node.node_id,
|
||||||
|
transport_kind="edge",
|
||||||
|
runtime_kind=str((node.metadata or {}).get("runtime_kind") or "docker"),
|
||||||
|
core_adapter=str((node.metadata or {}).get("core_adapter") or "nanobot"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
node_self = dict(client.heartbeat_node() or {})
|
||||||
|
preflight = dict(
|
||||||
|
client.preflight_native(
|
||||||
|
native_command=native_command,
|
||||||
|
native_workdir=native_workdir,
|
||||||
|
) or {}
|
||||||
|
)
|
||||||
|
latency_ms = max(1, int((time.perf_counter() - started) * 1000))
|
||||||
|
command = [str(item or "").strip() for item in list(preflight.get("command") or []) if str(item or "").strip()]
|
||||||
|
workdir = str(preflight.get("workdir") or "")
|
||||||
|
detail = str(preflight.get("detail") or "")
|
||||||
|
if not detail:
|
||||||
|
detail = "native launcher ready" if bool(preflight.get("ok")) else "native launcher not ready"
|
||||||
|
return ManagedNodeNativePreflightResult(
|
||||||
|
ok=bool(preflight.get("ok")),
|
||||||
|
status="online",
|
||||||
|
latency_ms=latency_ms,
|
||||||
|
detail=detail,
|
||||||
|
command=command,
|
||||||
|
workdir=workdir,
|
||||||
|
command_available=bool(preflight.get("command_available")),
|
||||||
|
workdir_exists=bool(preflight.get("workdir_exists")),
|
||||||
|
runtime_native_supported=runtime_native_supported(node_self),
|
||||||
|
node_self=node_self,
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
latency_ms = max(1, int((time.perf_counter() - started) * 1000))
|
||||||
|
return ManagedNodeNativePreflightResult(
|
||||||
|
ok=False,
|
||||||
|
status="offline",
|
||||||
|
latency_ms=latency_ms,
|
||||||
|
detail=summarize_edge_exception(exc),
|
||||||
|
command=command_hint,
|
||||||
|
workdir=workdir_hint,
|
||||||
|
command_available=False,
|
||||||
|
workdir_exists=False if workdir_hint else True,
|
||||||
|
runtime_native_supported=False,
|
||||||
|
node_self=None,
|
||||||
|
)
|
||||||
|
|
@ -0,0 +1,11 @@
|
||||||
|
from fastapi import APIRouter
|
||||||
|
|
||||||
|
from api.platform_node_catalog_router import router as platform_node_catalog_router
|
||||||
|
from api.platform_node_probe_router import router as platform_node_probe_router
|
||||||
|
from api.platform_node_resource_router import router as platform_node_resource_router
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
router.include_router(platform_node_catalog_router)
|
||||||
|
router.include_router(platform_node_probe_router)
|
||||||
|
router.include_router(platform_node_resource_router)
|
||||||
|
|
||||||
|
|
@ -0,0 +1,79 @@
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, Request
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from api.platform_shared import (
|
||||||
|
apply_platform_runtime_changes,
|
||||||
|
cached_platform_overview_payload,
|
||||||
|
invalidate_platform_nodes_cache,
|
||||||
|
invalidate_platform_overview_cache,
|
||||||
|
store_platform_overview_payload,
|
||||||
|
)
|
||||||
|
from core.database import get_session
|
||||||
|
from providers.selector import get_runtime_provider
|
||||||
|
from services.platform_activity_service import list_activity_events
|
||||||
|
from services.platform_analytics_service import build_dashboard_analytics
|
||||||
|
from services.platform_overview_service import build_platform_overview
|
||||||
|
from services.platform_usage_service import list_usage
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/platform/overview")
|
||||||
|
def get_platform_overview(request: Request, session: Session = Depends(get_session)):
|
||||||
|
cached_payload = cached_platform_overview_payload()
|
||||||
|
if cached_payload is not None:
|
||||||
|
return cached_payload
|
||||||
|
|
||||||
|
def _read_runtime(bot):
|
||||||
|
provider = get_runtime_provider(request.app.state, bot)
|
||||||
|
status = str(provider.get_runtime_status(bot_id=str(bot.id or "")) or "STOPPED").upper()
|
||||||
|
runtime = dict(provider.get_resource_snapshot(bot_id=str(bot.id or "")) or {})
|
||||||
|
runtime.setdefault("docker_status", status)
|
||||||
|
return status, runtime
|
||||||
|
|
||||||
|
payload = build_platform_overview(session, read_runtime=_read_runtime)
|
||||||
|
return store_platform_overview_payload(payload)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/platform/cache/clear")
|
||||||
|
def clear_platform_cache():
|
||||||
|
invalidate_platform_overview_cache()
|
||||||
|
invalidate_platform_nodes_cache()
|
||||||
|
return {"status": "cleared"}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/platform/reload")
|
||||||
|
def reload_platform_runtime(request: Request):
|
||||||
|
apply_platform_runtime_changes(request)
|
||||||
|
return {"status": "reloaded"}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/platform/usage")
|
||||||
|
def get_platform_usage(
|
||||||
|
bot_id: Optional[str] = None,
|
||||||
|
limit: int = 100,
|
||||||
|
offset: int = 0,
|
||||||
|
session: Session = Depends(get_session),
|
||||||
|
):
|
||||||
|
return list_usage(session, bot_id=bot_id, limit=limit, offset=offset)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/platform/dashboard-analytics")
|
||||||
|
def get_platform_dashboard_analytics(
|
||||||
|
since_days: int = 7,
|
||||||
|
events_limit: int = 20,
|
||||||
|
session: Session = Depends(get_session),
|
||||||
|
):
|
||||||
|
return build_dashboard_analytics(session, since_days=since_days, events_limit=events_limit)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/platform/events")
|
||||||
|
def get_platform_events(
|
||||||
|
bot_id: Optional[str] = None,
|
||||||
|
limit: int = 100,
|
||||||
|
offset: int = 0,
|
||||||
|
session: Session = Depends(get_session),
|
||||||
|
):
|
||||||
|
return list_activity_events(session, bot_id=bot_id, limit=limit, offset=offset)
|
||||||
|
|
@ -1,125 +1,9 @@
|
||||||
from typing import Optional
|
from fastapi import APIRouter
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Request
|
from api.platform_admin_router import router as platform_admin_router
|
||||||
from sqlmodel import Session
|
from api.platform_nodes_router import router as platform_nodes_router
|
||||||
|
|
||||||
from bootstrap.app_runtime import reload_platform_runtime
|
|
||||||
from core.cache import cache
|
|
||||||
from core.database import get_session
|
|
||||||
from schemas.platform import PlatformSettingsPayload, SystemSettingPayload
|
|
||||||
from services.platform_activity_service import get_bot_activity_stats, list_activity_events
|
|
||||||
from services.platform_login_log_service import list_login_logs
|
|
||||||
from services.platform_overview_service import build_platform_overview
|
|
||||||
from services.platform_settings_service import get_platform_settings, save_platform_settings
|
|
||||||
from services.platform_system_settings_service import (
|
|
||||||
create_or_update_system_setting,
|
|
||||||
delete_system_setting,
|
|
||||||
list_system_settings,
|
|
||||||
)
|
|
||||||
from services.platform_usage_service import list_usage
|
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
router.include_router(platform_admin_router)
|
||||||
|
router.include_router(platform_nodes_router)
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/platform/overview")
|
|
||||||
def get_platform_overview(request: Request, session: Session = Depends(get_session)):
|
|
||||||
docker_manager = getattr(request.app.state, "docker_manager", None)
|
|
||||||
return build_platform_overview(session, docker_manager)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/platform/settings")
|
|
||||||
def get_platform_settings_api(session: Session = Depends(get_session)):
|
|
||||||
return get_platform_settings(session).model_dump()
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/api/platform/settings")
|
|
||||||
def update_platform_settings_api(payload: PlatformSettingsPayload, request: Request, session: Session = Depends(get_session)):
|
|
||||||
result = save_platform_settings(session, payload).model_dump()
|
|
||||||
reload_platform_runtime(request.app)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/platform/cache/clear")
|
|
||||||
def clear_platform_cache():
|
|
||||||
cache.delete_prefix("")
|
|
||||||
return {"status": "cleared"}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/platform/reload")
|
|
||||||
def reload_platform_runtime_api(request: Request):
|
|
||||||
reload_platform_runtime(request.app)
|
|
||||||
return {"status": "reloaded"}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/platform/usage")
|
|
||||||
def get_platform_usage(
|
|
||||||
bot_id: Optional[str] = None,
|
|
||||||
limit: int = 100,
|
|
||||||
offset: int = 0,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
return list_usage(session, bot_id=bot_id, limit=limit, offset=offset)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/platform/activity-stats")
|
|
||||||
def get_platform_activity_stats(session: Session = Depends(get_session)):
|
|
||||||
return {"items": get_bot_activity_stats(session)}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/platform/events")
|
|
||||||
def get_platform_events(bot_id: Optional[str] = None, limit: int = 100, session: Session = Depends(get_session)):
|
|
||||||
return {"items": list_activity_events(session, bot_id=bot_id, limit=limit)}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/platform/login-logs")
|
|
||||||
def get_platform_login_logs(
|
|
||||||
search: str = "",
|
|
||||||
auth_type: str = "",
|
|
||||||
status: str = "all",
|
|
||||||
limit: int = 50,
|
|
||||||
offset: int = 0,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
return list_login_logs(
|
|
||||||
session,
|
|
||||||
search=search,
|
|
||||||
auth_type=auth_type,
|
|
||||||
status=status,
|
|
||||||
limit=limit,
|
|
||||||
offset=offset,
|
|
||||||
).model_dump()
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/platform/system-settings")
|
|
||||||
def get_system_settings(search: str = "", session: Session = Depends(get_session)):
|
|
||||||
return {"items": list_system_settings(session, search=search)}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/platform/system-settings")
|
|
||||||
def create_system_setting(payload: SystemSettingPayload, request: Request, session: Session = Depends(get_session)):
|
|
||||||
try:
|
|
||||||
result = create_or_update_system_setting(session, payload)
|
|
||||||
reload_platform_runtime(request.app)
|
|
||||||
return result
|
|
||||||
except ValueError as exc:
|
|
||||||
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
||||||
|
|
||||||
|
|
||||||
@router.put("/api/platform/system-settings/{key}")
|
|
||||||
def update_system_setting(key: str, payload: SystemSettingPayload, request: Request, session: Session = Depends(get_session)):
|
|
||||||
try:
|
|
||||||
result = create_or_update_system_setting(session, payload.model_copy(update={"key": key}))
|
|
||||||
reload_platform_runtime(request.app)
|
|
||||||
return result
|
|
||||||
except ValueError as exc:
|
|
||||||
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/api/platform/system-settings/{key}")
|
|
||||||
def remove_system_setting(key: str, request: Request, session: Session = Depends(get_session)):
|
|
||||||
try:
|
|
||||||
delete_system_setting(session, key)
|
|
||||||
reload_platform_runtime(request.app)
|
|
||||||
except ValueError as exc:
|
|
||||||
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
||||||
return {"status": "deleted", "key": key}
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,71 @@
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, Request
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from api.platform_shared import apply_platform_runtime_changes
|
||||||
|
from core.database import get_session
|
||||||
|
from schemas.platform import PlatformSettingsPayload, SystemSettingPayload
|
||||||
|
from services.platform_settings_service import (
|
||||||
|
create_or_update_system_setting,
|
||||||
|
delete_system_setting,
|
||||||
|
get_platform_settings,
|
||||||
|
list_system_settings,
|
||||||
|
save_platform_settings,
|
||||||
|
)
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/platform/settings")
|
||||||
|
def get_platform_settings_api(session: Session = Depends(get_session)):
|
||||||
|
return get_platform_settings(session).model_dump()
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/api/platform/settings")
|
||||||
|
def update_platform_settings_api(
|
||||||
|
payload: PlatformSettingsPayload,
|
||||||
|
request: Request,
|
||||||
|
session: Session = Depends(get_session),
|
||||||
|
):
|
||||||
|
result = save_platform_settings(session, payload).model_dump()
|
||||||
|
apply_platform_runtime_changes(request)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/platform/system-settings")
|
||||||
|
def get_system_settings(search: str = "", session: Session = Depends(get_session)):
|
||||||
|
return {"items": list_system_settings(session, search=search)}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/platform/system-settings")
|
||||||
|
def create_system_setting(payload: SystemSettingPayload, request: Request, session: Session = Depends(get_session)):
|
||||||
|
try:
|
||||||
|
result = create_or_update_system_setting(session, payload)
|
||||||
|
apply_platform_runtime_changes(request)
|
||||||
|
return result
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/api/platform/system-settings/{key}")
|
||||||
|
def update_system_setting(
|
||||||
|
key: str,
|
||||||
|
payload: SystemSettingPayload,
|
||||||
|
request: Request,
|
||||||
|
session: Session = Depends(get_session),
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
result = create_or_update_system_setting(session, payload.model_copy(update={"key": key}))
|
||||||
|
apply_platform_runtime_changes(request)
|
||||||
|
return result
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/api/platform/system-settings/{key}")
|
||||||
|
def remove_system_setting(key: str, request: Request, session: Session = Depends(get_session)):
|
||||||
|
try:
|
||||||
|
delete_system_setting(session, key)
|
||||||
|
apply_platform_runtime_changes(request)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||||
|
return {"status": "deleted", "key": key}
|
||||||
|
|
@ -0,0 +1,54 @@
|
||||||
|
import logging
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
from fastapi import Request
|
||||||
|
|
||||||
|
from core.cache import cache
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
PLATFORM_OVERVIEW_CACHE_KEY = "platform:overview"
|
||||||
|
PLATFORM_OVERVIEW_CACHE_TTL_SECONDS = 15
|
||||||
|
PLATFORM_NODES_CACHE_KEY = "platform:nodes:list"
|
||||||
|
PLATFORM_NODES_CACHE_TTL_SECONDS = 20
|
||||||
|
|
||||||
|
|
||||||
|
def cached_platform_overview_payload() -> Optional[Dict[str, Any]]:
|
||||||
|
cached = cache.get_json(PLATFORM_OVERVIEW_CACHE_KEY)
|
||||||
|
return cached if isinstance(cached, dict) else None
|
||||||
|
|
||||||
|
|
||||||
|
def store_platform_overview_payload(payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
cache.set_json(PLATFORM_OVERVIEW_CACHE_KEY, payload, ttl=PLATFORM_OVERVIEW_CACHE_TTL_SECONDS)
|
||||||
|
return payload
|
||||||
|
|
||||||
|
|
||||||
|
def invalidate_platform_overview_cache() -> None:
|
||||||
|
cache.delete(PLATFORM_OVERVIEW_CACHE_KEY)
|
||||||
|
|
||||||
|
|
||||||
|
def cached_platform_nodes_payload() -> Optional[Dict[str, Any]]:
|
||||||
|
cached = cache.get_json(PLATFORM_NODES_CACHE_KEY)
|
||||||
|
if not isinstance(cached, dict):
|
||||||
|
return None
|
||||||
|
items = cached.get("items")
|
||||||
|
if not isinstance(items, list):
|
||||||
|
return None
|
||||||
|
return {"items": items}
|
||||||
|
|
||||||
|
|
||||||
|
def store_platform_nodes_payload(items: list[Dict[str, Any]]) -> Dict[str, Any]:
|
||||||
|
payload = {"items": items}
|
||||||
|
cache.set_json(PLATFORM_NODES_CACHE_KEY, payload, ttl=PLATFORM_NODES_CACHE_TTL_SECONDS)
|
||||||
|
return payload
|
||||||
|
|
||||||
|
|
||||||
|
def invalidate_platform_nodes_cache() -> None:
|
||||||
|
cache.delete(PLATFORM_NODES_CACHE_KEY)
|
||||||
|
|
||||||
|
|
||||||
|
def apply_platform_runtime_changes(request: Request) -> None:
|
||||||
|
invalidate_platform_overview_cache()
|
||||||
|
invalidate_platform_nodes_cache()
|
||||||
|
speech_service = getattr(request.app.state, "speech_service", None)
|
||||||
|
if speech_service is not None and hasattr(speech_service, "reset_runtime"):
|
||||||
|
speech_service.reset_runtime()
|
||||||
|
|
@ -1,100 +0,0 @@
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile
|
|
||||||
from sqlmodel import Session
|
|
||||||
|
|
||||||
from core.database import get_session
|
|
||||||
from models.bot import BotInstance
|
|
||||||
from services.skill_market_service import (
|
|
||||||
create_skill_market_item_record,
|
|
||||||
delete_skill_market_item_record,
|
|
||||||
install_skill_market_item_for_bot,
|
|
||||||
list_bot_skill_market_items,
|
|
||||||
list_skill_market_items,
|
|
||||||
update_skill_market_item_record,
|
|
||||||
)
|
|
||||||
from services.skill_service import (
|
|
||||||
delete_workspace_skill_entry,
|
|
||||||
list_bot_skills as list_workspace_bot_skills,
|
|
||||||
upload_bot_skill_zip_to_workspace,
|
|
||||||
)
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/platform/skills")
|
|
||||||
def list_skill_market(session: Session = Depends(get_session)):
|
|
||||||
return list_skill_market_items(session)
|
|
||||||
|
|
||||||
@router.post("/api/platform/skills")
|
|
||||||
async def create_skill_market_item(
|
|
||||||
skill_key: str = Form(""),
|
|
||||||
display_name: str = Form(""),
|
|
||||||
description: str = Form(""),
|
|
||||||
file: UploadFile = File(...),
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
return await create_skill_market_item_record(
|
|
||||||
session,
|
|
||||||
skill_key=skill_key,
|
|
||||||
display_name=display_name,
|
|
||||||
description=description,
|
|
||||||
upload=file,
|
|
||||||
)
|
|
||||||
|
|
||||||
@router.put("/api/platform/skills/{skill_id}")
|
|
||||||
async def update_skill_market_item(
|
|
||||||
skill_id: int,
|
|
||||||
skill_key: str = Form(""),
|
|
||||||
display_name: str = Form(""),
|
|
||||||
description: str = Form(""),
|
|
||||||
file: Optional[UploadFile] = File(None),
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
return await update_skill_market_item_record(
|
|
||||||
session,
|
|
||||||
skill_id=skill_id,
|
|
||||||
skill_key=skill_key,
|
|
||||||
display_name=display_name,
|
|
||||||
description=description,
|
|
||||||
upload=file,
|
|
||||||
)
|
|
||||||
|
|
||||||
@router.delete("/api/platform/skills/{skill_id}")
|
|
||||||
def delete_skill_market_item(skill_id: int, session: Session = Depends(get_session)):
|
|
||||||
return delete_skill_market_item_record(session, skill_id=skill_id)
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/skills")
|
|
||||||
def list_bot_skills(bot_id: str, session: Session = Depends(get_session)):
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
return list_workspace_bot_skills(bot_id)
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/skill-market")
|
|
||||||
def list_bot_skill_market(bot_id: str, session: Session = Depends(get_session)):
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
return list_bot_skill_market_items(session, bot_id=bot_id)
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/skill-market/{skill_id}/install")
|
|
||||||
def install_bot_skill_from_market(bot_id: str, skill_id: int, session: Session = Depends(get_session)):
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
return install_skill_market_item_for_bot(session, bot_id=bot_id, skill_id=skill_id)
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/skills/upload")
|
|
||||||
async def upload_bot_skill_zip(bot_id: str, file: UploadFile = File(...), session: Session = Depends(get_session)):
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
return await upload_bot_skill_zip_to_workspace(bot_id, upload=file)
|
|
||||||
|
|
||||||
@router.delete("/api/bots/{bot_id}/skills/{skill_name}")
|
|
||||||
def delete_bot_skill(bot_id: str, skill_name: str, session: Session = Depends(get_session)):
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
return delete_workspace_skill_entry(bot_id, skill_name=skill_name)
|
|
||||||
|
|
@ -0,0 +1,230 @@
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, Request
|
||||||
|
from sqlmodel import Session, select
|
||||||
|
|
||||||
|
from core.database import get_session
|
||||||
|
from models.sys_auth import SysUser
|
||||||
|
from schemas.sys_auth import (
|
||||||
|
SysAuthBootstrapResponse,
|
||||||
|
SysAuthLoginRequest,
|
||||||
|
SysProfileUpdateRequest,
|
||||||
|
SysAuthStatusResponse,
|
||||||
|
SysRoleGrantBootstrapResponse,
|
||||||
|
SysRoleListResponse,
|
||||||
|
SysRoleSummaryResponse,
|
||||||
|
SysRoleUpsertRequest,
|
||||||
|
SysUserCreateRequest,
|
||||||
|
SysUserListResponse,
|
||||||
|
SysUserSummaryResponse,
|
||||||
|
SysUserUpdateRequest,
|
||||||
|
)
|
||||||
|
from services.sys_auth_service import (
|
||||||
|
DEFAULT_ADMIN_USERNAME,
|
||||||
|
authenticate_user,
|
||||||
|
build_user_bootstrap,
|
||||||
|
create_sys_role,
|
||||||
|
create_sys_user,
|
||||||
|
delete_sys_role,
|
||||||
|
delete_sys_user,
|
||||||
|
issue_user_token,
|
||||||
|
list_role_grant_bootstrap,
|
||||||
|
list_sys_roles,
|
||||||
|
list_sys_users,
|
||||||
|
resolve_user_by_token,
|
||||||
|
revoke_user_token,
|
||||||
|
update_sys_role,
|
||||||
|
update_sys_user,
|
||||||
|
update_current_sys_user_profile,
|
||||||
|
)
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_auth_token(request: Request) -> str:
|
||||||
|
authorization = str(request.headers.get("authorization") or "").strip()
|
||||||
|
if authorization.lower().startswith("bearer "):
|
||||||
|
return authorization[7:].strip()
|
||||||
|
return str(request.headers.get("x-auth-token") or request.query_params.get("auth_token") or "").strip()
|
||||||
|
|
||||||
|
|
||||||
|
def _require_current_user(request: Request, session: Session) -> SysUser:
|
||||||
|
state_user_id = getattr(request.state, "sys_user_id", None)
|
||||||
|
if state_user_id:
|
||||||
|
user = session.get(SysUser, state_user_id)
|
||||||
|
if user is not None and bool(user.is_active):
|
||||||
|
return user
|
||||||
|
token = _extract_auth_token(request)
|
||||||
|
user = resolve_user_by_token(session, token)
|
||||||
|
if user is None:
|
||||||
|
raise HTTPException(status_code=401, detail="Authentication required")
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/sys/auth/status", response_model=SysAuthStatusResponse)
|
||||||
|
def get_sys_auth_status(session: Session = Depends(get_session)):
|
||||||
|
user_count = len(session.exec(select(SysUser)).all())
|
||||||
|
return SysAuthStatusResponse(
|
||||||
|
enabled=True,
|
||||||
|
user_count=user_count,
|
||||||
|
default_username=DEFAULT_ADMIN_USERNAME,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/sys/auth/login", response_model=SysAuthBootstrapResponse)
|
||||||
|
def login_sys_user(payload: SysAuthLoginRequest, session: Session = Depends(get_session)):
|
||||||
|
username = str(payload.username or "").strip().lower()
|
||||||
|
password = str(payload.password or "")
|
||||||
|
user = authenticate_user(session, username, password)
|
||||||
|
if user is None:
|
||||||
|
raise HTTPException(status_code=401, detail="Invalid username or password")
|
||||||
|
try:
|
||||||
|
token, expires_at = issue_user_token(session, user)
|
||||||
|
except RuntimeError as exc:
|
||||||
|
raise HTTPException(status_code=503, detail=str(exc)) from exc
|
||||||
|
return SysAuthBootstrapResponse.model_validate(build_user_bootstrap(session, user, token=token, expires_at=expires_at))
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/sys/auth/logout")
|
||||||
|
def logout_sys_user(request: Request, session: Session = Depends(get_session)):
|
||||||
|
token = _extract_auth_token(request)
|
||||||
|
user = resolve_user_by_token(session, token)
|
||||||
|
if user is not None:
|
||||||
|
revoke_user_token(token)
|
||||||
|
return {"success": True}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/sys/auth/me", response_model=SysAuthBootstrapResponse)
|
||||||
|
def get_current_sys_user(request: Request, session: Session = Depends(get_session)):
|
||||||
|
user = _require_current_user(request, session)
|
||||||
|
return SysAuthBootstrapResponse.model_validate(build_user_bootstrap(session, user))
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/api/sys/auth/me", response_model=SysAuthBootstrapResponse)
|
||||||
|
def update_current_sys_user(
|
||||||
|
payload: SysProfileUpdateRequest,
|
||||||
|
request: Request,
|
||||||
|
session: Session = Depends(get_session),
|
||||||
|
):
|
||||||
|
current_user = _require_current_user(request, session)
|
||||||
|
try:
|
||||||
|
user = update_current_sys_user_profile(
|
||||||
|
session,
|
||||||
|
user_id=int(current_user.id or 0),
|
||||||
|
display_name=payload.display_name,
|
||||||
|
password=payload.password,
|
||||||
|
)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||||
|
return SysAuthBootstrapResponse.model_validate(build_user_bootstrap(session, user))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/sys/users", response_model=SysUserListResponse)
|
||||||
|
def list_sys_users_api(request: Request, session: Session = Depends(get_session)):
|
||||||
|
_require_current_user(request, session)
|
||||||
|
return SysUserListResponse(items=[SysUserSummaryResponse.model_validate(item) for item in list_sys_users(session)])
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/sys/users", response_model=SysUserSummaryResponse)
|
||||||
|
def create_sys_user_api(payload: SysUserCreateRequest, request: Request, session: Session = Depends(get_session)):
|
||||||
|
_require_current_user(request, session)
|
||||||
|
try:
|
||||||
|
item = create_sys_user(
|
||||||
|
session,
|
||||||
|
username=payload.username,
|
||||||
|
display_name=payload.display_name,
|
||||||
|
password=payload.password,
|
||||||
|
role_id=int(payload.role_id),
|
||||||
|
is_active=bool(payload.is_active),
|
||||||
|
bot_ids=list(payload.bot_ids or []),
|
||||||
|
)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||||
|
return SysUserSummaryResponse.model_validate(item)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/api/sys/users/{user_id}", response_model=SysUserSummaryResponse)
|
||||||
|
def update_sys_user_api(user_id: int, payload: SysUserUpdateRequest, request: Request, session: Session = Depends(get_session)):
|
||||||
|
current_user = _require_current_user(request, session)
|
||||||
|
try:
|
||||||
|
item = update_sys_user(
|
||||||
|
session,
|
||||||
|
user_id=int(user_id),
|
||||||
|
display_name=payload.display_name,
|
||||||
|
password=payload.password,
|
||||||
|
role_id=int(payload.role_id),
|
||||||
|
is_active=bool(payload.is_active),
|
||||||
|
bot_ids=list(payload.bot_ids or []),
|
||||||
|
acting_user_id=int(current_user.id or 0),
|
||||||
|
)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||||
|
return SysUserSummaryResponse.model_validate(item)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/api/sys/users/{user_id}")
|
||||||
|
def delete_sys_user_api(user_id: int, request: Request, session: Session = Depends(get_session)):
|
||||||
|
current_user = _require_current_user(request, session)
|
||||||
|
try:
|
||||||
|
delete_sys_user(session, user_id=int(user_id), acting_user_id=int(current_user.id or 0))
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||||
|
return {"success": True}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/sys/roles", response_model=SysRoleListResponse)
|
||||||
|
def list_sys_roles_api(request: Request, session: Session = Depends(get_session)):
|
||||||
|
_require_current_user(request, session)
|
||||||
|
return SysRoleListResponse(items=[SysRoleSummaryResponse.model_validate(item) for item in list_sys_roles(session)])
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/sys/roles/grants/bootstrap", response_model=SysRoleGrantBootstrapResponse)
|
||||||
|
def list_sys_role_grants_bootstrap_api(request: Request, session: Session = Depends(get_session)):
|
||||||
|
_require_current_user(request, session)
|
||||||
|
return SysRoleGrantBootstrapResponse.model_validate(list_role_grant_bootstrap(session))
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/sys/roles", response_model=SysRoleSummaryResponse)
|
||||||
|
def create_sys_role_api(payload: SysRoleUpsertRequest, request: Request, session: Session = Depends(get_session)):
|
||||||
|
_require_current_user(request, session)
|
||||||
|
try:
|
||||||
|
item = create_sys_role(
|
||||||
|
session,
|
||||||
|
role_key=payload.role_key,
|
||||||
|
name=payload.name,
|
||||||
|
description=payload.description,
|
||||||
|
is_active=bool(payload.is_active),
|
||||||
|
sort_order=int(payload.sort_order),
|
||||||
|
menu_keys=list(payload.menu_keys or []),
|
||||||
|
permission_keys=list(payload.permission_keys or []),
|
||||||
|
)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||||
|
return SysRoleSummaryResponse.model_validate(item)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/api/sys/roles/{role_id}", response_model=SysRoleSummaryResponse)
|
||||||
|
def update_sys_role_api(role_id: int, payload: SysRoleUpsertRequest, request: Request, session: Session = Depends(get_session)):
|
||||||
|
_require_current_user(request, session)
|
||||||
|
try:
|
||||||
|
item = update_sys_role(
|
||||||
|
session,
|
||||||
|
role_id=int(role_id),
|
||||||
|
name=payload.name,
|
||||||
|
description=payload.description,
|
||||||
|
is_active=bool(payload.is_active),
|
||||||
|
sort_order=int(payload.sort_order),
|
||||||
|
menu_keys=list(payload.menu_keys or []),
|
||||||
|
permission_keys=list(payload.permission_keys or []),
|
||||||
|
)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||||
|
return SysRoleSummaryResponse.model_validate(item)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/api/sys/roles/{role_id}")
|
||||||
|
def delete_sys_role_api(role_id: int, request: Request, session: Session = Depends(get_session)):
|
||||||
|
_require_current_user(request, session)
|
||||||
|
try:
|
||||||
|
delete_sys_role(session, role_id=int(role_id))
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||||
|
return {"success": True}
|
||||||
|
|
@ -1,73 +0,0 @@
|
||||||
from fastapi import APIRouter, HTTPException
|
|
||||||
|
|
||||||
from core.speech_service import inspect_speech_model_status
|
|
||||||
from core.utils import _get_default_system_timezone
|
|
||||||
from schemas.system import SystemTemplatesUpdateRequest
|
|
||||||
from services.platform_settings_service import get_platform_settings_snapshot, get_speech_runtime_settings
|
|
||||||
from services.template_service import (
|
|
||||||
get_agent_md_templates,
|
|
||||||
get_topic_presets,
|
|
||||||
update_agent_md_templates,
|
|
||||||
update_topic_presets,
|
|
||||||
)
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
@router.get("/api/system/defaults")
|
|
||||||
def get_system_defaults():
|
|
||||||
md_templates = get_agent_md_templates()
|
|
||||||
platform_settings = get_platform_settings_snapshot()
|
|
||||||
speech_settings = get_speech_runtime_settings()
|
|
||||||
model_status = inspect_speech_model_status()
|
|
||||||
return {
|
|
||||||
"templates": md_templates,
|
|
||||||
"limits": {
|
|
||||||
"upload_max_mb": platform_settings.upload_max_mb,
|
|
||||||
},
|
|
||||||
"workspace": {
|
|
||||||
"download_extensions": list(platform_settings.workspace_download_extensions),
|
|
||||||
"allowed_attachment_extensions": list(platform_settings.allowed_attachment_extensions),
|
|
||||||
},
|
|
||||||
"bot": {
|
|
||||||
"system_timezone": _get_default_system_timezone(),
|
|
||||||
},
|
|
||||||
"chat": {
|
|
||||||
"pull_page_size": platform_settings.chat_pull_page_size,
|
|
||||||
"page_size": platform_settings.page_size,
|
|
||||||
},
|
|
||||||
"topic_presets": get_topic_presets()["presets"],
|
|
||||||
"speech": {
|
|
||||||
"enabled": speech_settings["enabled"],
|
|
||||||
"model": speech_settings["model"],
|
|
||||||
"device": speech_settings["device"],
|
|
||||||
"max_audio_seconds": speech_settings["max_audio_seconds"],
|
|
||||||
"default_language": speech_settings["default_language"],
|
|
||||||
"ready": model_status["ready"],
|
|
||||||
"message": model_status["message"],
|
|
||||||
"expected_path": model_status["expected_path"],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
@router.get("/api/system/templates")
|
|
||||||
def get_system_templates():
|
|
||||||
return {
|
|
||||||
"agent_md_templates": get_agent_md_templates(),
|
|
||||||
"topic_presets": get_topic_presets(),
|
|
||||||
}
|
|
||||||
|
|
||||||
@router.put("/api/system/templates")
|
|
||||||
def update_system_templates(payload: SystemTemplatesUpdateRequest):
|
|
||||||
if payload.agent_md_templates is not None:
|
|
||||||
update_agent_md_templates(payload.agent_md_templates.model_dump())
|
|
||||||
|
|
||||||
if payload.topic_presets is not None:
|
|
||||||
try:
|
|
||||||
update_topic_presets(payload.topic_presets)
|
|
||||||
except ValueError as exc:
|
|
||||||
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
||||||
|
|
||||||
return {
|
|
||||||
"status": "ok",
|
|
||||||
"agent_md_templates": get_agent_md_templates(),
|
|
||||||
"topic_presets": get_topic_presets(),
|
|
||||||
}
|
|
||||||
|
|
@ -0,0 +1,29 @@
|
||||||
|
from fastapi import APIRouter
|
||||||
|
|
||||||
|
from schemas.dashboard import SystemTemplatesUpdateRequest
|
||||||
|
|
||||||
|
|
||||||
|
def build_system_runtime_router(*, system_service) -> APIRouter:
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
@router.get("/api/system/defaults")
|
||||||
|
def get_system_defaults():
|
||||||
|
return system_service.get_system_defaults()
|
||||||
|
|
||||||
|
@router.get("/api/system/templates")
|
||||||
|
def get_system_templates():
|
||||||
|
return system_service.get_system_templates()
|
||||||
|
|
||||||
|
@router.put("/api/system/templates")
|
||||||
|
def update_system_templates(payload: SystemTemplatesUpdateRequest):
|
||||||
|
return system_service.update_system_templates(payload=payload)
|
||||||
|
|
||||||
|
@router.get("/api/health")
|
||||||
|
def get_health():
|
||||||
|
return system_service.get_health()
|
||||||
|
|
||||||
|
@router.get("/api/health/cache")
|
||||||
|
def get_cache_health():
|
||||||
|
return system_service.get_cache_health()
|
||||||
|
|
||||||
|
return router
|
||||||
|
|
@ -1,24 +1,42 @@
|
||||||
from typing import Any, Dict, Optional
|
import json
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
from fastapi import APIRouter, Depends, HTTPException
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from sqlmodel import Session
|
from sqlalchemy import func
|
||||||
|
from sqlmodel import Session, select
|
||||||
|
|
||||||
from core.database import get_session
|
from core.database import get_session
|
||||||
|
from models.bot import BotInstance
|
||||||
|
from models.topic import TopicItem, TopicTopic
|
||||||
from services.topic_service import (
|
from services.topic_service import (
|
||||||
create_topic,
|
_TOPIC_KEY_RE,
|
||||||
delete_topic,
|
_list_topics,
|
||||||
delete_topic_item,
|
_normalize_topic_key,
|
||||||
get_topic_item_stats,
|
_topic_item_to_dict,
|
||||||
list_topic_items,
|
_topic_to_dict,
|
||||||
list_topics,
|
|
||||||
mark_topic_item_read,
|
|
||||||
update_topic,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
def _count_topic_items(
|
||||||
|
session: Session,
|
||||||
|
bot_id: str,
|
||||||
|
topic_key: Optional[str] = None,
|
||||||
|
unread_only: bool = False,
|
||||||
|
) -> int:
|
||||||
|
stmt = select(func.count()).select_from(TopicItem).where(TopicItem.bot_id == bot_id)
|
||||||
|
normalized_topic_key = _normalize_topic_key(topic_key or "")
|
||||||
|
if normalized_topic_key:
|
||||||
|
stmt = stmt.where(TopicItem.topic_key == normalized_topic_key)
|
||||||
|
if unread_only:
|
||||||
|
stmt = stmt.where(TopicItem.is_read == False) # noqa: E712
|
||||||
|
value = session.exec(stmt).one()
|
||||||
|
return int(value or 0)
|
||||||
|
|
||||||
|
|
||||||
class TopicCreateRequest(BaseModel):
|
class TopicCreateRequest(BaseModel):
|
||||||
topic_key: str
|
topic_key: str
|
||||||
name: Optional[str] = None
|
name: Optional[str] = None
|
||||||
|
|
@ -38,31 +56,112 @@ class TopicUpdateRequest(BaseModel):
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/topics")
|
@router.get("/api/bots/{bot_id}/topics")
|
||||||
def list_bot_topics(bot_id: str, session: Session = Depends(get_session)):
|
def list_bot_topics(bot_id: str, session: Session = Depends(get_session)):
|
||||||
return list_topics(session, bot_id)
|
bot = session.get(BotInstance, bot_id)
|
||||||
|
if not bot:
|
||||||
|
raise HTTPException(status_code=404, detail="Bot not found")
|
||||||
|
return _list_topics(session, bot_id)
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/topics")
|
@router.post("/api/bots/{bot_id}/topics")
|
||||||
def create_bot_topic(bot_id: str, payload: TopicCreateRequest, session: Session = Depends(get_session)):
|
def create_bot_topic(bot_id: str, payload: TopicCreateRequest, session: Session = Depends(get_session)):
|
||||||
return create_topic(
|
bot = session.get(BotInstance, bot_id)
|
||||||
session,
|
if not bot:
|
||||||
|
raise HTTPException(status_code=404, detail="Bot not found")
|
||||||
|
topic_key = _normalize_topic_key(payload.topic_key)
|
||||||
|
if not topic_key:
|
||||||
|
raise HTTPException(status_code=400, detail="topic_key is required")
|
||||||
|
if not _TOPIC_KEY_RE.fullmatch(topic_key):
|
||||||
|
raise HTTPException(status_code=400, detail="invalid topic_key")
|
||||||
|
exists = session.exec(
|
||||||
|
select(TopicTopic)
|
||||||
|
.where(TopicTopic.bot_id == bot_id)
|
||||||
|
.where(TopicTopic.topic_key == topic_key)
|
||||||
|
.limit(1)
|
||||||
|
).first()
|
||||||
|
if exists:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Topic already exists: {topic_key}")
|
||||||
|
|
||||||
|
now = datetime.utcnow()
|
||||||
|
row = TopicTopic(
|
||||||
bot_id=bot_id,
|
bot_id=bot_id,
|
||||||
topic_key=payload.topic_key,
|
topic_key=topic_key,
|
||||||
name=payload.name,
|
name=str(payload.name or topic_key).strip() or topic_key,
|
||||||
description=payload.description,
|
description=str(payload.description or "").strip(),
|
||||||
is_active=payload.is_active,
|
is_active=bool(payload.is_active),
|
||||||
routing=payload.routing,
|
is_default_fallback=False,
|
||||||
view_schema=payload.view_schema,
|
routing_json=json.dumps(payload.routing or {}, ensure_ascii=False),
|
||||||
|
view_schema_json=json.dumps(payload.view_schema or {}, ensure_ascii=False),
|
||||||
|
created_at=now,
|
||||||
|
updated_at=now,
|
||||||
)
|
)
|
||||||
|
session.add(row)
|
||||||
|
session.commit()
|
||||||
|
session.refresh(row)
|
||||||
|
return _topic_to_dict(row)
|
||||||
|
|
||||||
|
|
||||||
@router.put("/api/bots/{bot_id}/topics/{topic_key}")
|
@router.put("/api/bots/{bot_id}/topics/{topic_key}")
|
||||||
def update_bot_topic(bot_id: str, topic_key: str, payload: TopicUpdateRequest, session: Session = Depends(get_session)):
|
def update_bot_topic(bot_id: str, topic_key: str, payload: TopicUpdateRequest, session: Session = Depends(get_session)):
|
||||||
return update_topic(session, bot_id=bot_id, topic_key=topic_key, updates=payload.model_dump(exclude_unset=True))
|
bot = session.get(BotInstance, bot_id)
|
||||||
|
if not bot:
|
||||||
|
raise HTTPException(status_code=404, detail="Bot not found")
|
||||||
|
normalized_key = _normalize_topic_key(topic_key)
|
||||||
|
if not normalized_key:
|
||||||
|
raise HTTPException(status_code=400, detail="topic_key is required")
|
||||||
|
row = session.exec(
|
||||||
|
select(TopicTopic)
|
||||||
|
.where(TopicTopic.bot_id == bot_id)
|
||||||
|
.where(TopicTopic.topic_key == normalized_key)
|
||||||
|
.limit(1)
|
||||||
|
).first()
|
||||||
|
if not row:
|
||||||
|
raise HTTPException(status_code=404, detail="Topic not found")
|
||||||
|
|
||||||
|
update_data = payload.model_dump(exclude_unset=True)
|
||||||
|
if "name" in update_data:
|
||||||
|
row.name = str(update_data.get("name") or "").strip() or row.topic_key
|
||||||
|
if "description" in update_data:
|
||||||
|
row.description = str(update_data.get("description") or "").strip()
|
||||||
|
if "is_active" in update_data:
|
||||||
|
row.is_active = bool(update_data.get("is_active"))
|
||||||
|
if "routing" in update_data:
|
||||||
|
row.routing_json = json.dumps(update_data.get("routing") or {}, ensure_ascii=False)
|
||||||
|
if "view_schema" in update_data:
|
||||||
|
row.view_schema_json = json.dumps(update_data.get("view_schema") or {}, ensure_ascii=False)
|
||||||
|
row.is_default_fallback = False
|
||||||
|
row.updated_at = datetime.utcnow()
|
||||||
|
session.add(row)
|
||||||
|
session.commit()
|
||||||
|
session.refresh(row)
|
||||||
|
return _topic_to_dict(row)
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/api/bots/{bot_id}/topics/{topic_key}")
|
@router.delete("/api/bots/{bot_id}/topics/{topic_key}")
|
||||||
def delete_bot_topic(bot_id: str, topic_key: str, session: Session = Depends(get_session)):
|
def delete_bot_topic(bot_id: str, topic_key: str, session: Session = Depends(get_session)):
|
||||||
return delete_topic(session, bot_id=bot_id, topic_key=topic_key)
|
bot = session.get(BotInstance, bot_id)
|
||||||
|
if not bot:
|
||||||
|
raise HTTPException(status_code=404, detail="Bot not found")
|
||||||
|
normalized_key = _normalize_topic_key(topic_key)
|
||||||
|
if not normalized_key:
|
||||||
|
raise HTTPException(status_code=400, detail="topic_key is required")
|
||||||
|
row = session.exec(
|
||||||
|
select(TopicTopic)
|
||||||
|
.where(TopicTopic.bot_id == bot_id)
|
||||||
|
.where(TopicTopic.topic_key == normalized_key)
|
||||||
|
.limit(1)
|
||||||
|
).first()
|
||||||
|
if not row:
|
||||||
|
raise HTTPException(status_code=404, detail="Topic not found")
|
||||||
|
items = session.exec(
|
||||||
|
select(TopicItem)
|
||||||
|
.where(TopicItem.bot_id == bot_id)
|
||||||
|
.where(TopicItem.topic_key == normalized_key)
|
||||||
|
).all()
|
||||||
|
for item in items:
|
||||||
|
session.delete(item)
|
||||||
|
session.delete(row)
|
||||||
|
session.commit()
|
||||||
|
return {"status": "deleted", "bot_id": bot_id, "topic_key": normalized_key}
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/topic-items")
|
@router.get("/api/bots/{bot_id}/topic-items")
|
||||||
|
|
@ -73,19 +172,97 @@ def list_bot_topic_items(
|
||||||
limit: int = 50,
|
limit: int = 50,
|
||||||
session: Session = Depends(get_session),
|
session: Session = Depends(get_session),
|
||||||
):
|
):
|
||||||
return list_topic_items(session, bot_id=bot_id, topic_key=topic_key, cursor=cursor, limit=limit)
|
bot = session.get(BotInstance, bot_id)
|
||||||
|
if not bot:
|
||||||
|
raise HTTPException(status_code=404, detail="Bot not found")
|
||||||
|
normalized_limit = max(1, min(int(limit or 50), 100))
|
||||||
|
stmt = select(TopicItem).where(TopicItem.bot_id == bot_id)
|
||||||
|
normalized_topic_key = _normalize_topic_key(topic_key or "")
|
||||||
|
if normalized_topic_key:
|
||||||
|
stmt = stmt.where(TopicItem.topic_key == normalized_topic_key)
|
||||||
|
if cursor is not None:
|
||||||
|
normalized_cursor = int(cursor)
|
||||||
|
if normalized_cursor > 0:
|
||||||
|
stmt = stmt.where(TopicItem.id < normalized_cursor)
|
||||||
|
rows = session.exec(
|
||||||
|
stmt.order_by(TopicItem.id.desc()).limit(normalized_limit + 1)
|
||||||
|
).all()
|
||||||
|
next_cursor: Optional[int] = None
|
||||||
|
if len(rows) > normalized_limit:
|
||||||
|
next_cursor = rows[-1].id
|
||||||
|
rows = rows[:normalized_limit]
|
||||||
|
return {
|
||||||
|
"bot_id": bot_id,
|
||||||
|
"topic_key": normalized_topic_key or None,
|
||||||
|
"items": [_topic_item_to_dict(row) for row in rows],
|
||||||
|
"next_cursor": next_cursor,
|
||||||
|
"unread_count": _count_topic_items(session, bot_id, normalized_topic_key, unread_only=True),
|
||||||
|
"total_unread_count": _count_topic_items(session, bot_id, unread_only=True),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/topic-items/stats")
|
@router.get("/api/bots/{bot_id}/topic-items/stats")
|
||||||
def get_bot_topic_item_stats(bot_id: str, session: Session = Depends(get_session)):
|
def get_bot_topic_item_stats(bot_id: str, session: Session = Depends(get_session)):
|
||||||
return get_topic_item_stats(session, bot_id=bot_id)
|
bot = session.get(BotInstance, bot_id)
|
||||||
|
if not bot:
|
||||||
|
raise HTTPException(status_code=404, detail="Bot not found")
|
||||||
|
latest_item = session.exec(
|
||||||
|
select(TopicItem)
|
||||||
|
.where(TopicItem.bot_id == bot_id)
|
||||||
|
.order_by(TopicItem.id.desc())
|
||||||
|
.limit(1)
|
||||||
|
).first()
|
||||||
|
return {
|
||||||
|
"bot_id": bot_id,
|
||||||
|
"total_count": _count_topic_items(session, bot_id),
|
||||||
|
"unread_count": _count_topic_items(session, bot_id, unread_only=True),
|
||||||
|
"latest_item_id": int(latest_item.id or 0) if latest_item and latest_item.id else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/topic-items/{item_id}/read")
|
@router.post("/api/bots/{bot_id}/topic-items/{item_id}/read")
|
||||||
def mark_bot_topic_item_read(bot_id: str, item_id: int, session: Session = Depends(get_session)):
|
def mark_bot_topic_item_read(bot_id: str, item_id: int, session: Session = Depends(get_session)):
|
||||||
return mark_topic_item_read(session, bot_id=bot_id, item_id=item_id)
|
bot = session.get(BotInstance, bot_id)
|
||||||
|
if not bot:
|
||||||
|
raise HTTPException(status_code=404, detail="Bot not found")
|
||||||
|
row = session.exec(
|
||||||
|
select(TopicItem)
|
||||||
|
.where(TopicItem.bot_id == bot_id)
|
||||||
|
.where(TopicItem.id == item_id)
|
||||||
|
.limit(1)
|
||||||
|
).first()
|
||||||
|
if not row:
|
||||||
|
raise HTTPException(status_code=404, detail="Topic item not found")
|
||||||
|
if not bool(row.is_read):
|
||||||
|
row.is_read = True
|
||||||
|
session.add(row)
|
||||||
|
session.commit()
|
||||||
|
session.refresh(row)
|
||||||
|
return {
|
||||||
|
"status": "updated",
|
||||||
|
"bot_id": bot_id,
|
||||||
|
"item": _topic_item_to_dict(row),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/api/bots/{bot_id}/topic-items/{item_id}")
|
@router.delete("/api/bots/{bot_id}/topic-items/{item_id}")
|
||||||
def delete_bot_topic_item(bot_id: str, item_id: int, session: Session = Depends(get_session)):
|
def delete_bot_topic_item(bot_id: str, item_id: int, session: Session = Depends(get_session)):
|
||||||
return delete_topic_item(session, bot_id=bot_id, item_id=item_id)
|
bot = session.get(BotInstance, bot_id)
|
||||||
|
if not bot:
|
||||||
|
raise HTTPException(status_code=404, detail="Bot not found")
|
||||||
|
row = session.exec(
|
||||||
|
select(TopicItem)
|
||||||
|
.where(TopicItem.bot_id == bot_id)
|
||||||
|
.where(TopicItem.id == item_id)
|
||||||
|
.limit(1)
|
||||||
|
).first()
|
||||||
|
if not row:
|
||||||
|
raise HTTPException(status_code=404, detail="Topic item not found")
|
||||||
|
payload = _topic_item_to_dict(row)
|
||||||
|
session.delete(row)
|
||||||
|
session.commit()
|
||||||
|
return {
|
||||||
|
"status": "deleted",
|
||||||
|
"bot_id": bot_id,
|
||||||
|
"item": payload,
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,146 +0,0 @@
|
||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, File, HTTPException, Request, UploadFile
|
|
||||||
from sqlmodel import Session
|
|
||||||
|
|
||||||
from core.database import get_session
|
|
||||||
from models.bot import BotInstance
|
|
||||||
from schemas.system import WorkspaceFileUpdateRequest
|
|
||||||
from services.workspace_service import (
|
|
||||||
get_workspace_tree_data,
|
|
||||||
read_workspace_text_file,
|
|
||||||
serve_workspace_file,
|
|
||||||
update_workspace_markdown_file,
|
|
||||||
upload_workspace_files_to_workspace,
|
|
||||||
)
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/workspace/tree")
|
|
||||||
def get_workspace_tree(
|
|
||||||
bot_id: str,
|
|
||||||
path: Optional[str] = None,
|
|
||||||
recursive: bool = False,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
return get_workspace_tree_data(bot_id, path=path, recursive=recursive)
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/workspace/file")
|
|
||||||
def read_workspace_file(
|
|
||||||
bot_id: str,
|
|
||||||
path: str,
|
|
||||||
max_bytes: int = 200000,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
return read_workspace_text_file(bot_id, path=path, max_bytes=max_bytes)
|
|
||||||
|
|
||||||
@router.put("/api/bots/{bot_id}/workspace/file")
|
|
||||||
def update_workspace_file(
|
|
||||||
bot_id: str,
|
|
||||||
path: str,
|
|
||||||
payload: WorkspaceFileUpdateRequest,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
return update_workspace_markdown_file(bot_id, path=path, content=payload.content)
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/workspace/download")
|
|
||||||
def download_workspace_file(
|
|
||||||
bot_id: str,
|
|
||||||
path: str,
|
|
||||||
download: bool = False,
|
|
||||||
request: Request = None,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
return serve_workspace_file(
|
|
||||||
bot_id=bot_id,
|
|
||||||
path=path,
|
|
||||||
download=download,
|
|
||||||
request=request,
|
|
||||||
public=False,
|
|
||||||
redirect_html_to_raw=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
@router.get("/public/bots/{bot_id}/workspace/download")
|
|
||||||
def public_download_workspace_file(
|
|
||||||
bot_id: str,
|
|
||||||
path: str,
|
|
||||||
download: bool = False,
|
|
||||||
request: Request = None,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
return serve_workspace_file(
|
|
||||||
bot_id=bot_id,
|
|
||||||
path=path,
|
|
||||||
download=download,
|
|
||||||
request=request,
|
|
||||||
public=True,
|
|
||||||
redirect_html_to_raw=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
@router.get("/api/bots/{bot_id}/workspace/raw/{path:path}")
|
|
||||||
def raw_workspace_file(
|
|
||||||
bot_id: str,
|
|
||||||
path: str,
|
|
||||||
download: bool = False,
|
|
||||||
request: Request = None,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
return serve_workspace_file(
|
|
||||||
bot_id=bot_id,
|
|
||||||
path=path,
|
|
||||||
download=download,
|
|
||||||
request=request,
|
|
||||||
public=False,
|
|
||||||
redirect_html_to_raw=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
@router.get("/public/bots/{bot_id}/workspace/raw/{path:path}")
|
|
||||||
def public_raw_workspace_file(
|
|
||||||
bot_id: str,
|
|
||||||
path: str,
|
|
||||||
download: bool = False,
|
|
||||||
request: Request = None,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
return serve_workspace_file(
|
|
||||||
bot_id=bot_id,
|
|
||||||
path=path,
|
|
||||||
download=download,
|
|
||||||
request=request,
|
|
||||||
public=True,
|
|
||||||
redirect_html_to_raw=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
@router.post("/api/bots/{bot_id}/workspace/upload")
|
|
||||||
async def upload_workspace_files(
|
|
||||||
bot_id: str,
|
|
||||||
files: List[UploadFile] = File(...),
|
|
||||||
path: Optional[str] = None,
|
|
||||||
session: Session = Depends(get_session),
|
|
||||||
):
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
return await upload_workspace_files_to_workspace(bot_id, files=files, path=path)
|
|
||||||
|
|
@ -1,64 +1,85 @@
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from fastapi import FastAPI
|
from fastapi import FastAPI, Request
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
|
||||||
from api.bot_config_router import router as bot_config_router
|
|
||||||
from api.bot_management_router import router as bot_management_router
|
|
||||||
from api.bot_router import router as bot_router
|
|
||||||
from api.bot_runtime_router import router as bot_runtime_router
|
|
||||||
from api.bot_speech_router import router as bot_speech_router
|
|
||||||
from api.chat_history_router import router as chat_history_router
|
|
||||||
from api.chat_router import router as chat_router
|
|
||||||
from api.health_router import router as health_router
|
|
||||||
from api.image_router import router as image_router
|
|
||||||
from api.panel_auth_router import router as panel_auth_router
|
|
||||||
from api.platform_router import router as platform_router
|
from api.platform_router import router as platform_router
|
||||||
from api.skill_router import router as skill_router
|
from api.sys_router import router as sys_router
|
||||||
from api.system_router import router as system_router
|
from api.system_runtime_router import build_system_runtime_router
|
||||||
from api.topic_router import router as topic_router
|
from api.topic_router import router as topic_router
|
||||||
from api.workspace_router import router as workspace_router
|
from bootstrap.app_runtime import assemble_app_runtime
|
||||||
from bootstrap.app_runtime import register_app_runtime
|
from core.config_manager import BotConfigManager
|
||||||
from core.auth_middleware import AuthAccessMiddleware
|
from core.docker_manager import BotDockerManager
|
||||||
from core.docker_instance import docker_manager
|
from core.settings import BOTS_WORKSPACE_ROOT, DATA_ROOT
|
||||||
from core.settings import BOTS_WORKSPACE_ROOT, CORS_ALLOWED_ORIGINS, DATA_ROOT
|
|
||||||
from core.speech_service import WhisperSpeechService
|
from core.speech_service import WhisperSpeechService
|
||||||
|
|
||||||
|
app = FastAPI(title="Dashboard Nanobot API")
|
||||||
|
logger = logging.getLogger("dashboard.backend")
|
||||||
|
LAST_ACTION_MAX_LENGTH = 16000
|
||||||
|
|
||||||
def create_app() -> FastAPI:
|
|
||||||
app = FastAPI(title="Dashboard Nanobot API")
|
|
||||||
|
|
||||||
speech_service = WhisperSpeechService()
|
def _normalize_last_action_text(value: Any) -> str:
|
||||||
app.state.docker_manager = docker_manager
|
text = str(value or "").replace("\r\n", "\n").replace("\r", "\n").strip()
|
||||||
app.state.speech_service = speech_service
|
if not text:
|
||||||
|
return ""
|
||||||
|
text = re.sub(r"\n{4,}", "\n\n\n", text)
|
||||||
|
return text[:LAST_ACTION_MAX_LENGTH]
|
||||||
|
|
||||||
app.add_middleware(AuthAccessMiddleware)
|
|
||||||
app.add_middleware(
|
|
||||||
CORSMiddleware,
|
|
||||||
allow_origins=list(CORS_ALLOWED_ORIGINS),
|
|
||||||
allow_methods=["*"],
|
|
||||||
allow_headers=["*"],
|
|
||||||
allow_credentials=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
app.include_router(panel_auth_router)
|
def _apply_log_noise_guard() -> None:
|
||||||
app.include_router(health_router)
|
for name in (
|
||||||
app.include_router(platform_router)
|
"httpx",
|
||||||
app.include_router(topic_router)
|
"httpcore",
|
||||||
app.include_router(system_router)
|
"uvicorn.access",
|
||||||
app.include_router(image_router)
|
"watchfiles.main",
|
||||||
app.include_router(skill_router)
|
"watchfiles.watcher",
|
||||||
app.include_router(chat_router)
|
):
|
||||||
app.include_router(chat_history_router)
|
logging.getLogger(name).setLevel(logging.WARNING)
|
||||||
app.include_router(bot_speech_router)
|
|
||||||
app.include_router(workspace_router)
|
|
||||||
app.include_router(bot_config_router)
|
|
||||||
app.include_router(bot_runtime_router)
|
|
||||||
app.include_router(bot_management_router)
|
|
||||||
app.include_router(bot_router)
|
|
||||||
|
|
||||||
os.makedirs(BOTS_WORKSPACE_ROOT, exist_ok=True)
|
|
||||||
os.makedirs(DATA_ROOT, exist_ok=True)
|
|
||||||
|
|
||||||
register_app_runtime(app)
|
_apply_log_noise_guard()
|
||||||
return app
|
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"],
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
app.include_router(topic_router)
|
||||||
|
app.include_router(platform_router)
|
||||||
|
app.include_router(sys_router)
|
||||||
|
|
||||||
|
os.makedirs(BOTS_WORKSPACE_ROOT, exist_ok=True)
|
||||||
|
os.makedirs(DATA_ROOT, exist_ok=True)
|
||||||
|
|
||||||
|
docker_manager = BotDockerManager(host_data_root=BOTS_WORKSPACE_ROOT)
|
||||||
|
config_manager = BotConfigManager(host_data_root=BOTS_WORKSPACE_ROOT)
|
||||||
|
speech_service = WhisperSpeechService()
|
||||||
|
app.state.docker_manager = docker_manager
|
||||||
|
app.state.speech_service = speech_service
|
||||||
|
BOT_ID_PATTERN = re.compile(r"^[A-Za-z0-9_]+$")
|
||||||
|
|
||||||
|
runtime_assembly = assemble_app_runtime(
|
||||||
|
app=app,
|
||||||
|
logger=logger,
|
||||||
|
bots_workspace_root=BOTS_WORKSPACE_ROOT,
|
||||||
|
data_root=DATA_ROOT,
|
||||||
|
docker_manager=docker_manager,
|
||||||
|
config_manager=config_manager,
|
||||||
|
speech_service=speech_service,
|
||||||
|
bot_id_pattern=BOT_ID_PATTERN,
|
||||||
|
)
|
||||||
|
app.include_router(build_system_runtime_router(system_service=runtime_assembly.system_service))
|
||||||
|
|
||||||
|
|
||||||
|
@app.middleware("http")
|
||||||
|
async def bot_access_password_guard(request: Request, call_next):
|
||||||
|
return await runtime_assembly.dashboard_auth_service.guard(request, call_next)
|
||||||
|
|
||||||
|
|
||||||
|
@app.on_event("startup")
|
||||||
|
async def on_startup():
|
||||||
|
await runtime_assembly.app_lifecycle_service.on_startup()
|
||||||
|
|
|
||||||
|
|
@ -1,53 +1,482 @@
|
||||||
import asyncio
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Dict
|
||||||
from fastapi import FastAPI
|
|
||||||
from sqlmodel import Session, select
|
|
||||||
|
|
||||||
|
from clients.edge.errors import is_expected_edge_offline_error, log_edge_failure, summarize_edge_exception
|
||||||
from core.cache import cache
|
from core.cache import cache
|
||||||
from core.database import engine, init_database
|
from core.database import engine, init_database
|
||||||
from core.docker_instance import docker_manager
|
from core.settings import (
|
||||||
from core.speech_service import inspect_speech_model_status
|
AGENT_MD_TEMPLATES_FILE,
|
||||||
from core.settings import DATABASE_URL_DISPLAY, REDIS_ENABLED
|
DATABASE_ECHO,
|
||||||
from models.bot import BotInstance
|
DATABASE_ENGINE,
|
||||||
from services.default_assets_service import validate_runtime_data_assets
|
DATABASE_URL_DISPLAY,
|
||||||
from services.platform_activity_service import prune_expired_activity_events
|
DEFAULT_AGENTS_MD,
|
||||||
from services.platform_settings_service import get_speech_runtime_settings
|
DEFAULT_BOT_SYSTEM_TIMEZONE,
|
||||||
from services.runtime_service import docker_callback, set_main_loop
|
DEFAULT_IDENTITY_MD,
|
||||||
|
DEFAULT_SOUL_MD,
|
||||||
|
DEFAULT_TOOLS_MD,
|
||||||
|
DEFAULT_USER_MD,
|
||||||
|
PROJECT_ROOT,
|
||||||
|
REDIS_ENABLED,
|
||||||
|
REDIS_PREFIX,
|
||||||
|
REDIS_URL,
|
||||||
|
TOPIC_PRESET_TEMPLATES,
|
||||||
|
TOPIC_PRESETS_TEMPLATES_FILE,
|
||||||
|
load_agent_md_templates,
|
||||||
|
load_topic_presets_template,
|
||||||
|
)
|
||||||
|
from providers.provision.edge import EdgeProvisionProvider
|
||||||
|
from providers.provision.local import LocalProvisionProvider
|
||||||
|
from providers.registry import ProviderRegistry
|
||||||
|
from providers.runtime.edge import EdgeRuntimeProvider
|
||||||
|
from providers.runtime.local import LocalRuntimeProvider
|
||||||
|
from providers.selector import get_provision_provider, get_runtime_provider
|
||||||
|
from providers.target import ProviderTarget, normalize_provider_target, provider_target_from_config, provider_target_to_dict
|
||||||
|
from providers.workspace.edge import EdgeWorkspaceProvider
|
||||||
|
from providers.workspace.local import LocalWorkspaceProvider
|
||||||
|
from services.app_lifecycle_service import AppLifecycleService
|
||||||
|
from services.bot_channel_service import BotChannelService
|
||||||
|
from services.bot_command_service import BotCommandService
|
||||||
|
from services.bot_config_state_service import BotConfigStateService
|
||||||
|
from services.bot_infra_service import BotInfraService
|
||||||
|
from services.bot_lifecycle_service import BotLifecycleService
|
||||||
|
from services.bot_message_service import BotMessageService
|
||||||
|
from services.bot_query_service import BotQueryService
|
||||||
|
from services.bot_runtime_snapshot_service import BotRuntimeSnapshotService
|
||||||
|
from services.dashboard_auth_service import DashboardAuthService
|
||||||
|
from services.image_service import ImageService
|
||||||
|
from services.node_registry_service import NodeRegistryService
|
||||||
|
from services.platform_activity_service import (
|
||||||
|
prune_expired_activity_events,
|
||||||
|
record_activity_event,
|
||||||
|
)
|
||||||
|
from services.platform_settings_service import (
|
||||||
|
get_chat_pull_page_size,
|
||||||
|
get_platform_settings_snapshot,
|
||||||
|
get_speech_runtime_settings,
|
||||||
|
)
|
||||||
|
from services.platform_usage_service import (
|
||||||
|
bind_usage_message,
|
||||||
|
create_usage_request,
|
||||||
|
fail_latest_usage,
|
||||||
|
finalize_usage_from_packet,
|
||||||
|
)
|
||||||
|
from services.provider_test_service import ProviderTestService
|
||||||
|
from services.runtime_event_service import RuntimeEventService
|
||||||
|
from services.runtime_service import RuntimeService
|
||||||
|
from services.skill_service import SkillService
|
||||||
|
from services.system_service import SystemService
|
||||||
|
from services.topic_runtime import publish_runtime_topic_packet
|
||||||
|
from services.workspace_service import WorkspaceService
|
||||||
|
from bootstrap.app_runtime_support import (
|
||||||
|
attach_runtime_services,
|
||||||
|
build_image_runtime_service,
|
||||||
|
build_speech_transcription_runtime_service,
|
||||||
|
build_system_runtime_service,
|
||||||
|
include_dashboard_api,
|
||||||
|
reconcile_image_registry,
|
||||||
|
register_provider_runtime,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def reload_platform_runtime(app: FastAPI) -> None:
|
@dataclass
|
||||||
cache.delete_prefix("")
|
class AppRuntimeAssembly:
|
||||||
speech_service = getattr(app.state, "speech_service", None)
|
dashboard_auth_service: DashboardAuthService
|
||||||
if speech_service is not None and hasattr(speech_service, "reset_runtime"):
|
system_service: SystemService
|
||||||
speech_service.reset_runtime()
|
app_lifecycle_service: AppLifecycleService
|
||||||
|
def assemble_app_runtime(
|
||||||
|
*,
|
||||||
|
app: Any,
|
||||||
|
logger: Any,
|
||||||
|
bots_workspace_root: str,
|
||||||
|
data_root: str,
|
||||||
|
docker_manager: Any,
|
||||||
|
config_manager: Any,
|
||||||
|
speech_service: Any,
|
||||||
|
bot_id_pattern: Any,
|
||||||
|
) -> AppRuntimeAssembly:
|
||||||
|
node_registry_service = NodeRegistryService()
|
||||||
|
skill_service = SkillService()
|
||||||
|
dashboard_auth_service = DashboardAuthService(engine=engine)
|
||||||
|
provider_registry = ProviderRegistry()
|
||||||
|
|
||||||
|
bot_infra_service = BotInfraService(
|
||||||
|
app=app,
|
||||||
|
engine=engine,
|
||||||
|
config_manager=config_manager,
|
||||||
|
node_registry_service=node_registry_service,
|
||||||
|
logger=logger,
|
||||||
|
bots_workspace_root=bots_workspace_root,
|
||||||
|
default_soul_md=DEFAULT_SOUL_MD,
|
||||||
|
default_agents_md=DEFAULT_AGENTS_MD,
|
||||||
|
default_user_md=DEFAULT_USER_MD,
|
||||||
|
default_tools_md=DEFAULT_TOOLS_MD,
|
||||||
|
default_identity_md=DEFAULT_IDENTITY_MD,
|
||||||
|
default_bot_system_timezone=DEFAULT_BOT_SYSTEM_TIMEZONE,
|
||||||
|
normalize_provider_target=normalize_provider_target,
|
||||||
|
provider_target_from_config=provider_target_from_config,
|
||||||
|
provider_target_to_dict=provider_target_to_dict,
|
||||||
|
resolve_provider_bundle_key=lambda target: provider_registry.resolve_bundle_key(target),
|
||||||
|
get_provision_provider=get_provision_provider,
|
||||||
|
read_env_store=lambda bot_id: bot_config_state_service.read_env_store(bot_id),
|
||||||
|
read_bot_runtime_snapshot=lambda bot: _read_bot_runtime_snapshot(bot),
|
||||||
|
normalize_media_list=lambda raw, bot_id: _normalize_media_list(raw, bot_id),
|
||||||
|
)
|
||||||
|
node_registry_service.register_node(bot_infra_service.local_managed_node())
|
||||||
|
app.state.node_registry_service = node_registry_service
|
||||||
|
|
||||||
def register_app_runtime(app: FastAPI) -> None:
|
_read_bot_config = bot_infra_service.read_bot_config
|
||||||
@app.on_event("startup")
|
_write_bot_config = bot_infra_service.write_bot_config
|
||||||
async def _on_startup() -> None:
|
_default_provider_target = bot_infra_service.default_provider_target
|
||||||
redis_state = "Disabled"
|
_read_bot_provider_target = bot_infra_service.read_bot_provider_target
|
||||||
if REDIS_ENABLED:
|
_resolve_bot_provider_target_for_instance = bot_infra_service.resolve_bot_provider_target_for_instance
|
||||||
redis_state = "Connected" if cache.enabled else f"Unavailable ({cache.status})"
|
_clear_provider_target_override = bot_infra_service.clear_provider_target_override
|
||||||
print(
|
_apply_provider_target_to_bot = bot_infra_service.apply_provider_target_to_bot
|
||||||
f"🚀 Dashboard Backend 启动中... (DB: {DATABASE_URL_DISPLAY}, REDIS: {redis_state})"
|
_local_managed_node = bot_infra_service.local_managed_node
|
||||||
)
|
_provider_target_from_node = bot_infra_service.provider_target_from_node
|
||||||
current_loop = asyncio.get_running_loop()
|
_node_display_name = bot_infra_service.node_display_name
|
||||||
app.state.main_loop = current_loop
|
_node_metadata = bot_infra_service.node_metadata
|
||||||
set_main_loop(current_loop)
|
_serialize_provider_target_summary = bot_infra_service.serialize_provider_target_summary
|
||||||
validate_runtime_data_assets()
|
_resolve_edge_client = bot_infra_service.resolve_edge_client
|
||||||
print("[init] data 目录校验通过")
|
_resolve_edge_state_context = bot_infra_service.resolve_edge_state_context
|
||||||
init_database()
|
_read_edge_state_data = bot_infra_service.read_edge_state_data
|
||||||
with Session(engine) as session:
|
_write_edge_state_data = bot_infra_service.write_edge_state_data
|
||||||
prune_expired_activity_events(session, force=True)
|
_read_bot_resources = bot_infra_service.read_bot_resources
|
||||||
bots = session.exec(select(BotInstance)).all()
|
_migrate_bot_resources_store = bot_infra_service.migrate_bot_resources_store
|
||||||
for bot in bots:
|
_normalize_channel_extra = bot_infra_service.normalize_channel_extra
|
||||||
docker_manager.ensure_monitor(bot.id, docker_callback)
|
_read_global_delivery_flags = bot_infra_service.read_global_delivery_flags
|
||||||
speech_settings = get_speech_runtime_settings()
|
_channel_api_to_cfg = bot_infra_service.channel_api_to_cfg
|
||||||
model_status = inspect_speech_model_status()
|
_get_bot_channels_from_config = bot_infra_service.get_bot_channels_from_config
|
||||||
if speech_settings["enabled"]:
|
_normalize_initial_channels = bot_infra_service.normalize_initial_channels
|
||||||
if model_status["ready"]:
|
_parse_message_media = bot_infra_service.parse_message_media
|
||||||
print(f"🎙️ 语音识别模型就绪: {model_status['resolved_path']}")
|
_normalize_env_params = bot_infra_service.normalize_env_params
|
||||||
else:
|
_get_default_system_timezone = bot_infra_service.get_default_system_timezone
|
||||||
hint = f",请将模型文件放到 {model_status['expected_path']}" if model_status["expected_path"] else ""
|
_normalize_system_timezone = bot_infra_service.normalize_system_timezone
|
||||||
print(f"⚠️ 语音识别模型未就绪: {model_status['message']}{hint}")
|
_resolve_bot_env_params = bot_infra_service.resolve_bot_env_params
|
||||||
print("✅ 启动自检完成")
|
_safe_float = bot_infra_service.safe_float
|
||||||
|
_safe_int = bot_infra_service.safe_int
|
||||||
|
_normalize_resource_limits = bot_infra_service.normalize_resource_limits
|
||||||
|
_sync_workspace_channels = bot_infra_service.sync_workspace_channels
|
||||||
|
_set_bot_provider_target = bot_infra_service.set_bot_provider_target
|
||||||
|
_sync_bot_workspace_via_provider = bot_infra_service.sync_bot_workspace_via_provider
|
||||||
|
_workspace_root = bot_infra_service.workspace_root
|
||||||
|
_cron_store_path = bot_infra_service.cron_store_path
|
||||||
|
_env_store_path = bot_infra_service.env_store_path
|
||||||
|
_clear_bot_sessions = bot_infra_service.clear_bot_sessions
|
||||||
|
_clear_bot_dashboard_direct_session = bot_infra_service.clear_bot_dashboard_direct_session
|
||||||
|
_ensure_provider_target_supported = bot_infra_service.ensure_provider_target_supported
|
||||||
|
_resolve_workspace_path = bot_infra_service.resolve_workspace_path
|
||||||
|
_calc_dir_size_bytes = bot_infra_service.calc_dir_size_bytes
|
||||||
|
_is_video_attachment_path = bot_infra_service.is_video_attachment_path
|
||||||
|
_is_visual_attachment_path = bot_infra_service.is_visual_attachment_path
|
||||||
|
|
||||||
|
bot_config_state_service = BotConfigStateService(
|
||||||
|
read_edge_state_data=_read_edge_state_data,
|
||||||
|
write_edge_state_data=_write_edge_state_data,
|
||||||
|
read_bot_config=_read_bot_config,
|
||||||
|
write_bot_config=_write_bot_config,
|
||||||
|
invalidate_bot_detail_cache=lambda *args, **kwargs: _invalidate_bot_detail_cache(*args, **kwargs),
|
||||||
|
env_store_path=_env_store_path,
|
||||||
|
cron_store_path=_cron_store_path,
|
||||||
|
normalize_env_params=_normalize_env_params,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _write_env_store(bot_id: str, env_params: Dict[str, str]) -> None:
|
||||||
|
bot_config_state_service.write_env_store(bot_id, env_params)
|
||||||
|
|
||||||
|
local_provision_provider = LocalProvisionProvider(sync_workspace_func=_sync_workspace_channels)
|
||||||
|
local_runtime_provider = LocalRuntimeProvider(
|
||||||
|
docker_manager=docker_manager,
|
||||||
|
on_state_change=lambda *args, **kwargs: docker_callback(*args, **kwargs),
|
||||||
|
provision_provider=local_provision_provider,
|
||||||
|
read_runtime_snapshot=lambda *args, **kwargs: _read_bot_runtime_snapshot(*args, **kwargs),
|
||||||
|
resolve_env_params=_resolve_bot_env_params,
|
||||||
|
write_env_store=_write_env_store,
|
||||||
|
invalidate_bot_cache=lambda *args, **kwargs: _invalidate_bot_detail_cache(*args, **kwargs),
|
||||||
|
record_agent_loop_ready_warning=lambda *args, **kwargs: _record_agent_loop_ready_warning(*args, **kwargs),
|
||||||
|
safe_float=_safe_float,
|
||||||
|
safe_int=_safe_int,
|
||||||
|
)
|
||||||
|
local_workspace_provider = LocalWorkspaceProvider()
|
||||||
|
edge_provision_provider = EdgeProvisionProvider(
|
||||||
|
read_provider_target=_read_bot_provider_target,
|
||||||
|
resolve_edge_client=_resolve_edge_client,
|
||||||
|
read_runtime_snapshot=lambda *args, **kwargs: _read_bot_runtime_snapshot(*args, **kwargs),
|
||||||
|
read_bot_channels=_get_bot_channels_from_config,
|
||||||
|
read_node_metadata=_node_metadata,
|
||||||
|
)
|
||||||
|
edge_runtime_provider = EdgeRuntimeProvider(
|
||||||
|
read_provider_target=_read_bot_provider_target,
|
||||||
|
resolve_edge_client=_resolve_edge_client,
|
||||||
|
read_runtime_snapshot=lambda *args, **kwargs: _read_bot_runtime_snapshot(*args, **kwargs),
|
||||||
|
resolve_env_params=_resolve_bot_env_params,
|
||||||
|
read_bot_channels=_get_bot_channels_from_config,
|
||||||
|
read_node_metadata=_node_metadata,
|
||||||
|
)
|
||||||
|
edge_workspace_provider = EdgeWorkspaceProvider(
|
||||||
|
read_provider_target=_read_bot_provider_target,
|
||||||
|
resolve_edge_client=_resolve_edge_client,
|
||||||
|
read_node_metadata=_node_metadata,
|
||||||
|
)
|
||||||
|
local_provider_target = ProviderTarget(
|
||||||
|
node_id="local",
|
||||||
|
transport_kind="edge",
|
||||||
|
runtime_kind="docker",
|
||||||
|
core_adapter="nanobot",
|
||||||
|
)
|
||||||
|
register_provider_runtime(
|
||||||
|
app=app,
|
||||||
|
provider_registry=provider_registry,
|
||||||
|
local_provider_target=local_provider_target,
|
||||||
|
local_provision_provider=local_provision_provider,
|
||||||
|
local_runtime_provider=local_runtime_provider,
|
||||||
|
local_workspace_provider=local_workspace_provider,
|
||||||
|
edge_provision_provider=edge_provision_provider,
|
||||||
|
edge_runtime_provider=edge_runtime_provider,
|
||||||
|
edge_workspace_provider=edge_workspace_provider,
|
||||||
|
resolve_bot_provider_target_for_instance=_resolve_bot_provider_target_for_instance,
|
||||||
|
resolve_edge_client=_resolve_edge_client,
|
||||||
|
)
|
||||||
|
|
||||||
|
bot_runtime_snapshot_service = BotRuntimeSnapshotService(
|
||||||
|
engine=engine,
|
||||||
|
logger=logger,
|
||||||
|
docker_manager=docker_manager,
|
||||||
|
default_soul_md=DEFAULT_SOUL_MD,
|
||||||
|
default_agents_md=DEFAULT_AGENTS_MD,
|
||||||
|
default_user_md=DEFAULT_USER_MD,
|
||||||
|
default_tools_md=DEFAULT_TOOLS_MD,
|
||||||
|
default_identity_md=DEFAULT_IDENTITY_MD,
|
||||||
|
workspace_root=_workspace_root,
|
||||||
|
resolve_edge_state_context=_resolve_edge_state_context,
|
||||||
|
read_bot_config=_read_bot_config,
|
||||||
|
resolve_bot_env_params=_resolve_bot_env_params,
|
||||||
|
resolve_bot_provider_target_for_instance=_resolve_bot_provider_target_for_instance,
|
||||||
|
read_global_delivery_flags=_read_global_delivery_flags,
|
||||||
|
safe_float=_safe_float,
|
||||||
|
safe_int=_safe_int,
|
||||||
|
get_default_system_timezone=_get_default_system_timezone,
|
||||||
|
read_bot_resources=_read_bot_resources,
|
||||||
|
node_display_name=_node_display_name,
|
||||||
|
get_runtime_provider=get_runtime_provider,
|
||||||
|
invalidate_bot_detail_cache=lambda *args, **kwargs: _invalidate_bot_detail_cache(*args, **kwargs),
|
||||||
|
record_activity_event=record_activity_event,
|
||||||
|
)
|
||||||
|
_read_bot_runtime_snapshot = bot_runtime_snapshot_service.read_bot_runtime_snapshot
|
||||||
|
_serialize_bot = bot_runtime_snapshot_service.serialize_bot
|
||||||
|
_serialize_bot_list_item = bot_runtime_snapshot_service.serialize_bot_list_item
|
||||||
|
_refresh_bot_runtime_status = bot_runtime_snapshot_service.refresh_bot_runtime_status
|
||||||
|
_record_agent_loop_ready_warning = bot_runtime_snapshot_service.record_agent_loop_ready_warning
|
||||||
|
|
||||||
|
runtime_event_service = RuntimeEventService(
|
||||||
|
app=app,
|
||||||
|
engine=engine,
|
||||||
|
cache=cache,
|
||||||
|
logger=logger,
|
||||||
|
publish_runtime_topic_packet=publish_runtime_topic_packet,
|
||||||
|
bind_usage_message=bind_usage_message,
|
||||||
|
finalize_usage_from_packet=finalize_usage_from_packet,
|
||||||
|
workspace_root=_workspace_root,
|
||||||
|
parse_message_media=_parse_message_media,
|
||||||
|
)
|
||||||
|
_normalize_media_list = runtime_event_service.normalize_media_list
|
||||||
|
_persist_runtime_packet = runtime_event_service.persist_runtime_packet
|
||||||
|
_broadcast_runtime_packet = runtime_event_service.broadcast_runtime_packet
|
||||||
|
docker_callback = runtime_event_service.docker_callback
|
||||||
|
_cache_key_bots_list = runtime_event_service.cache_key_bots_list
|
||||||
|
_cache_key_bot_detail = runtime_event_service.cache_key_bot_detail
|
||||||
|
_cache_key_bot_messages = runtime_event_service.cache_key_bot_messages
|
||||||
|
_cache_key_bot_messages_page = runtime_event_service.cache_key_bot_messages_page
|
||||||
|
_serialize_bot_message_row = runtime_event_service.serialize_bot_message_row
|
||||||
|
_resolve_local_day_range = runtime_event_service.resolve_local_day_range
|
||||||
|
_cache_key_images = runtime_event_service.cache_key_images
|
||||||
|
_invalidate_bot_detail_cache = runtime_event_service.invalidate_bot_detail_cache
|
||||||
|
_invalidate_bot_messages_cache = runtime_event_service.invalidate_bot_messages_cache
|
||||||
|
_invalidate_images_cache = runtime_event_service.invalidate_images_cache
|
||||||
|
|
||||||
|
bot_command_service = BotCommandService(
|
||||||
|
read_runtime_snapshot=_read_bot_runtime_snapshot,
|
||||||
|
normalize_media_list=_normalize_media_list,
|
||||||
|
resolve_workspace_path=_resolve_workspace_path,
|
||||||
|
is_visual_attachment_path=_is_visual_attachment_path,
|
||||||
|
is_video_attachment_path=_is_video_attachment_path,
|
||||||
|
create_usage_request=create_usage_request,
|
||||||
|
record_activity_event=record_activity_event,
|
||||||
|
fail_latest_usage=fail_latest_usage,
|
||||||
|
persist_runtime_packet=_persist_runtime_packet,
|
||||||
|
get_main_loop=lambda app_state: getattr(app_state, "main_loop", None),
|
||||||
|
broadcast_packet=_broadcast_runtime_packet,
|
||||||
|
)
|
||||||
|
workspace_service = WorkspaceService()
|
||||||
|
runtime_service = RuntimeService(
|
||||||
|
command_service=bot_command_service,
|
||||||
|
resolve_runtime_provider=get_runtime_provider,
|
||||||
|
clear_bot_sessions=_clear_bot_sessions,
|
||||||
|
clear_dashboard_direct_session_file=_clear_bot_dashboard_direct_session,
|
||||||
|
invalidate_bot_detail_cache=_invalidate_bot_detail_cache,
|
||||||
|
invalidate_bot_messages_cache=_invalidate_bot_messages_cache,
|
||||||
|
record_activity_event=record_activity_event,
|
||||||
|
)
|
||||||
|
app_lifecycle_service = AppLifecycleService(
|
||||||
|
app=app,
|
||||||
|
engine=engine,
|
||||||
|
cache=cache,
|
||||||
|
logger=logger,
|
||||||
|
project_root=PROJECT_ROOT,
|
||||||
|
database_engine=DATABASE_ENGINE,
|
||||||
|
database_echo=DATABASE_ECHO,
|
||||||
|
database_url_display=DATABASE_URL_DISPLAY,
|
||||||
|
redis_enabled=REDIS_ENABLED,
|
||||||
|
init_database=init_database,
|
||||||
|
node_registry_service=node_registry_service,
|
||||||
|
local_managed_node=_local_managed_node,
|
||||||
|
prune_expired_activity_events=prune_expired_activity_events,
|
||||||
|
migrate_bot_resources_store=_migrate_bot_resources_store,
|
||||||
|
resolve_bot_provider_target_for_instance=_resolve_bot_provider_target_for_instance,
|
||||||
|
default_provider_target=_default_provider_target,
|
||||||
|
set_bot_provider_target=_set_bot_provider_target,
|
||||||
|
apply_provider_target_to_bot=_apply_provider_target_to_bot,
|
||||||
|
normalize_provider_target=normalize_provider_target,
|
||||||
|
runtime_service=runtime_service,
|
||||||
|
runtime_event_service=runtime_event_service,
|
||||||
|
clear_provider_target_overrides=bot_infra_service.clear_provider_target_overrides,
|
||||||
|
)
|
||||||
|
bot_query_service = BotQueryService(
|
||||||
|
cache=cache,
|
||||||
|
cache_key_bots_list=_cache_key_bots_list,
|
||||||
|
cache_key_bot_detail=_cache_key_bot_detail,
|
||||||
|
refresh_bot_runtime_status=_refresh_bot_runtime_status,
|
||||||
|
serialize_bot=_serialize_bot,
|
||||||
|
serialize_bot_list_item=_serialize_bot_list_item,
|
||||||
|
read_bot_resources=_read_bot_resources,
|
||||||
|
resolve_bot_provider_target=_resolve_bot_provider_target_for_instance,
|
||||||
|
get_runtime_provider=get_runtime_provider,
|
||||||
|
workspace_root=_workspace_root,
|
||||||
|
calc_dir_size_bytes=_calc_dir_size_bytes,
|
||||||
|
logger=logger,
|
||||||
|
)
|
||||||
|
bot_channel_service = BotChannelService(
|
||||||
|
read_bot_config=_read_bot_config,
|
||||||
|
write_bot_config=_write_bot_config,
|
||||||
|
sync_bot_workspace_via_provider=_sync_bot_workspace_via_provider,
|
||||||
|
invalidate_bot_detail_cache=_invalidate_bot_detail_cache,
|
||||||
|
get_bot_channels_from_config=_get_bot_channels_from_config,
|
||||||
|
normalize_channel_extra=_normalize_channel_extra,
|
||||||
|
channel_api_to_cfg=_channel_api_to_cfg,
|
||||||
|
read_global_delivery_flags=_read_global_delivery_flags,
|
||||||
|
)
|
||||||
|
bot_message_service = BotMessageService(
|
||||||
|
cache=cache,
|
||||||
|
cache_key_bot_messages=_cache_key_bot_messages,
|
||||||
|
cache_key_bot_messages_page=_cache_key_bot_messages_page,
|
||||||
|
serialize_bot_message_row=_serialize_bot_message_row,
|
||||||
|
resolve_local_day_range=_resolve_local_day_range,
|
||||||
|
invalidate_bot_messages_cache=_invalidate_bot_messages_cache,
|
||||||
|
get_chat_pull_page_size=get_chat_pull_page_size,
|
||||||
|
)
|
||||||
|
speech_transcription_service = build_speech_transcription_runtime_service(
|
||||||
|
data_root=data_root,
|
||||||
|
speech_service=speech_service,
|
||||||
|
get_speech_runtime_settings=get_speech_runtime_settings,
|
||||||
|
logger=logger,
|
||||||
|
)
|
||||||
|
image_service = build_image_runtime_service(
|
||||||
|
cache=cache,
|
||||||
|
cache_key_images=_cache_key_images,
|
||||||
|
invalidate_images_cache=_invalidate_images_cache,
|
||||||
|
docker_manager=docker_manager,
|
||||||
|
reconcile_image_registry_fn=lambda session: reconcile_image_registry(session, docker_manager=docker_manager),
|
||||||
|
)
|
||||||
|
provider_test_service = ProviderTestService()
|
||||||
|
system_service = build_system_runtime_service(
|
||||||
|
engine=engine,
|
||||||
|
cache=cache,
|
||||||
|
database_engine=DATABASE_ENGINE,
|
||||||
|
redis_enabled=REDIS_ENABLED,
|
||||||
|
redis_url=REDIS_URL,
|
||||||
|
redis_prefix=REDIS_PREFIX,
|
||||||
|
agent_md_templates_file=str(AGENT_MD_TEMPLATES_FILE),
|
||||||
|
topic_presets_templates_file=str(TOPIC_PRESETS_TEMPLATES_FILE),
|
||||||
|
default_soul_md=DEFAULT_SOUL_MD,
|
||||||
|
default_agents_md=DEFAULT_AGENTS_MD,
|
||||||
|
default_user_md=DEFAULT_USER_MD,
|
||||||
|
default_tools_md=DEFAULT_TOOLS_MD,
|
||||||
|
default_identity_md=DEFAULT_IDENTITY_MD,
|
||||||
|
topic_preset_templates=TOPIC_PRESET_TEMPLATES,
|
||||||
|
get_default_system_timezone=_get_default_system_timezone,
|
||||||
|
load_agent_md_templates=load_agent_md_templates,
|
||||||
|
load_topic_presets_template=load_topic_presets_template,
|
||||||
|
get_platform_settings_snapshot=get_platform_settings_snapshot,
|
||||||
|
get_speech_runtime_settings=get_speech_runtime_settings,
|
||||||
|
)
|
||||||
|
bot_lifecycle_service = BotLifecycleService(
|
||||||
|
bot_id_pattern=bot_id_pattern,
|
||||||
|
runtime_service=runtime_service,
|
||||||
|
refresh_bot_runtime_status=_refresh_bot_runtime_status,
|
||||||
|
resolve_bot_provider_target=_resolve_bot_provider_target_for_instance,
|
||||||
|
provider_target_from_node=_provider_target_from_node,
|
||||||
|
default_provider_target=_default_provider_target,
|
||||||
|
ensure_provider_target_supported=_ensure_provider_target_supported,
|
||||||
|
require_ready_image=image_service.require_ready_image,
|
||||||
|
sync_bot_workspace_via_provider=_sync_bot_workspace_via_provider,
|
||||||
|
apply_provider_target_to_bot=_apply_provider_target_to_bot,
|
||||||
|
serialize_provider_target_summary=_serialize_provider_target_summary,
|
||||||
|
serialize_bot=_serialize_bot,
|
||||||
|
node_display_name=_node_display_name,
|
||||||
|
invalidate_bot_detail_cache=_invalidate_bot_detail_cache,
|
||||||
|
record_activity_event=record_activity_event,
|
||||||
|
normalize_env_params=_normalize_env_params,
|
||||||
|
normalize_system_timezone=_normalize_system_timezone,
|
||||||
|
normalize_resource_limits=_normalize_resource_limits,
|
||||||
|
write_env_store=_write_env_store,
|
||||||
|
resolve_bot_env_params=_resolve_bot_env_params,
|
||||||
|
clear_provider_target_override=_clear_provider_target_override,
|
||||||
|
normalize_initial_channels=_normalize_initial_channels,
|
||||||
|
is_expected_edge_offline_error=is_expected_edge_offline_error,
|
||||||
|
summarize_edge_exception=summarize_edge_exception,
|
||||||
|
resolve_edge_client=_resolve_edge_client,
|
||||||
|
node_metadata=_node_metadata,
|
||||||
|
log_edge_failure=log_edge_failure,
|
||||||
|
invalidate_bot_messages_cache=_invalidate_bot_messages_cache,
|
||||||
|
logger=logger,
|
||||||
|
)
|
||||||
|
|
||||||
|
attach_runtime_services(
|
||||||
|
app=app,
|
||||||
|
bot_command_service=bot_command_service,
|
||||||
|
bot_lifecycle_service=bot_lifecycle_service,
|
||||||
|
app_lifecycle_service=app_lifecycle_service,
|
||||||
|
bot_query_service=bot_query_service,
|
||||||
|
bot_channel_service=bot_channel_service,
|
||||||
|
bot_message_service=bot_message_service,
|
||||||
|
bot_runtime_snapshot_service=bot_runtime_snapshot_service,
|
||||||
|
image_service=image_service,
|
||||||
|
provider_test_service=provider_test_service,
|
||||||
|
runtime_event_service=runtime_event_service,
|
||||||
|
speech_transcription_service=speech_transcription_service,
|
||||||
|
system_service=system_service,
|
||||||
|
workspace_service=workspace_service,
|
||||||
|
runtime_service=runtime_service,
|
||||||
|
)
|
||||||
|
include_dashboard_api(
|
||||||
|
app=app,
|
||||||
|
image_service=image_service,
|
||||||
|
provider_test_service=provider_test_service,
|
||||||
|
bot_lifecycle_service=bot_lifecycle_service,
|
||||||
|
bot_query_service=bot_query_service,
|
||||||
|
bot_channel_service=bot_channel_service,
|
||||||
|
skill_service=skill_service,
|
||||||
|
bot_config_state_service=bot_config_state_service,
|
||||||
|
runtime_service=runtime_service,
|
||||||
|
bot_message_service=bot_message_service,
|
||||||
|
workspace_service=workspace_service,
|
||||||
|
speech_transcription_service=speech_transcription_service,
|
||||||
|
app_lifecycle_service=app_lifecycle_service,
|
||||||
|
resolve_edge_state_context=_resolve_edge_state_context,
|
||||||
|
logger=logger,
|
||||||
|
)
|
||||||
|
|
||||||
|
return AppRuntimeAssembly(
|
||||||
|
dashboard_auth_service=dashboard_auth_service,
|
||||||
|
system_service=system_service,
|
||||||
|
app_lifecycle_service=app_lifecycle_service,
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,231 @@
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from sqlmodel import Session, select
|
||||||
|
|
||||||
|
from api.dashboard_router import build_dashboard_router
|
||||||
|
from models.bot import NanobotImage
|
||||||
|
from services.image_service import ImageService
|
||||||
|
from services.speech_transcription_service import SpeechTranscriptionService
|
||||||
|
from services.system_service import SystemService
|
||||||
|
|
||||||
|
|
||||||
|
def reconcile_image_registry(session: Session, *, docker_manager: Any) -> None:
|
||||||
|
db_images = session.exec(select(NanobotImage)).all()
|
||||||
|
for image in db_images:
|
||||||
|
if docker_manager.has_image(image.tag):
|
||||||
|
try:
|
||||||
|
docker_image = docker_manager.client.images.get(image.tag) if docker_manager.client else None
|
||||||
|
image.image_id = docker_image.id if docker_image else image.image_id
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
image.status = "READY"
|
||||||
|
else:
|
||||||
|
image.status = "UNKNOWN"
|
||||||
|
session.add(image)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def register_provider_runtime(
|
||||||
|
*,
|
||||||
|
app: Any,
|
||||||
|
provider_registry: Any,
|
||||||
|
local_provider_target: Any,
|
||||||
|
local_provision_provider: Any,
|
||||||
|
local_runtime_provider: Any,
|
||||||
|
local_workspace_provider: Any,
|
||||||
|
edge_provision_provider: Any,
|
||||||
|
edge_runtime_provider: Any,
|
||||||
|
edge_workspace_provider: Any,
|
||||||
|
resolve_bot_provider_target_for_instance: Any,
|
||||||
|
resolve_edge_client: Any,
|
||||||
|
) -> None:
|
||||||
|
provider_registry.register_bundle(
|
||||||
|
key=local_provider_target.key,
|
||||||
|
runtime_provider=local_runtime_provider,
|
||||||
|
workspace_provider=local_workspace_provider,
|
||||||
|
provision_provider=local_provision_provider,
|
||||||
|
)
|
||||||
|
provider_registry.register_bundle(
|
||||||
|
key=type(local_provider_target)(
|
||||||
|
node_id="local",
|
||||||
|
transport_kind="edge",
|
||||||
|
runtime_kind="docker",
|
||||||
|
core_adapter="nanobot",
|
||||||
|
).key,
|
||||||
|
runtime_provider=edge_runtime_provider,
|
||||||
|
workspace_provider=edge_workspace_provider,
|
||||||
|
provision_provider=edge_provision_provider,
|
||||||
|
)
|
||||||
|
provider_registry.register_bundle(
|
||||||
|
key=type(local_provider_target)(
|
||||||
|
node_id="local",
|
||||||
|
transport_kind="edge",
|
||||||
|
runtime_kind="native",
|
||||||
|
core_adapter="nanobot",
|
||||||
|
).key,
|
||||||
|
runtime_provider=edge_runtime_provider,
|
||||||
|
workspace_provider=edge_workspace_provider,
|
||||||
|
provision_provider=edge_provision_provider,
|
||||||
|
)
|
||||||
|
app.state.provider_default_node_id = local_provider_target.node_id
|
||||||
|
app.state.provider_default_transport_kind = local_provider_target.transport_kind
|
||||||
|
app.state.provider_default_runtime_kind = local_provider_target.runtime_kind
|
||||||
|
app.state.provider_default_core_adapter = local_provider_target.core_adapter
|
||||||
|
app.state.provider_registry = provider_registry
|
||||||
|
app.state.resolve_bot_provider_target = resolve_bot_provider_target_for_instance
|
||||||
|
app.state.resolve_edge_client = resolve_edge_client
|
||||||
|
app.state.edge_provision_provider = edge_provision_provider
|
||||||
|
app.state.edge_runtime_provider = edge_runtime_provider
|
||||||
|
app.state.edge_workspace_provider = edge_workspace_provider
|
||||||
|
app.state.provision_provider = local_provision_provider
|
||||||
|
app.state.runtime_provider = local_runtime_provider
|
||||||
|
app.state.workspace_provider = local_workspace_provider
|
||||||
|
|
||||||
|
|
||||||
|
def build_speech_transcription_runtime_service(
|
||||||
|
*,
|
||||||
|
data_root: str,
|
||||||
|
speech_service: Any,
|
||||||
|
get_speech_runtime_settings: Any,
|
||||||
|
logger: Any,
|
||||||
|
) -> SpeechTranscriptionService:
|
||||||
|
return SpeechTranscriptionService(
|
||||||
|
data_root=data_root,
|
||||||
|
speech_service=speech_service,
|
||||||
|
get_speech_runtime_settings=get_speech_runtime_settings,
|
||||||
|
logger=logger,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def build_image_runtime_service(
|
||||||
|
*,
|
||||||
|
cache: Any,
|
||||||
|
cache_key_images: Any,
|
||||||
|
invalidate_images_cache: Any,
|
||||||
|
docker_manager: Any,
|
||||||
|
reconcile_image_registry_fn: Any,
|
||||||
|
) -> ImageService:
|
||||||
|
return ImageService(
|
||||||
|
cache=cache,
|
||||||
|
cache_key_images=cache_key_images,
|
||||||
|
invalidate_images_cache=invalidate_images_cache,
|
||||||
|
reconcile_image_registry=reconcile_image_registry_fn,
|
||||||
|
docker_manager=docker_manager,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def build_system_runtime_service(
|
||||||
|
*,
|
||||||
|
engine: Any,
|
||||||
|
cache: Any,
|
||||||
|
database_engine: str,
|
||||||
|
redis_enabled: bool,
|
||||||
|
redis_url: str,
|
||||||
|
redis_prefix: str,
|
||||||
|
agent_md_templates_file: str,
|
||||||
|
topic_presets_templates_file: str,
|
||||||
|
default_soul_md: str,
|
||||||
|
default_agents_md: str,
|
||||||
|
default_user_md: str,
|
||||||
|
default_tools_md: str,
|
||||||
|
default_identity_md: str,
|
||||||
|
topic_preset_templates: Any,
|
||||||
|
get_default_system_timezone: Any,
|
||||||
|
load_agent_md_templates: Any,
|
||||||
|
load_topic_presets_template: Any,
|
||||||
|
get_platform_settings_snapshot: Any,
|
||||||
|
get_speech_runtime_settings: Any,
|
||||||
|
) -> SystemService:
|
||||||
|
return SystemService(
|
||||||
|
engine=engine,
|
||||||
|
cache=cache,
|
||||||
|
database_engine=database_engine,
|
||||||
|
redis_enabled=redis_enabled,
|
||||||
|
redis_url=redis_url,
|
||||||
|
redis_prefix=redis_prefix,
|
||||||
|
agent_md_templates_file=agent_md_templates_file,
|
||||||
|
topic_presets_templates_file=topic_presets_templates_file,
|
||||||
|
default_soul_md=default_soul_md,
|
||||||
|
default_agents_md=default_agents_md,
|
||||||
|
default_user_md=default_user_md,
|
||||||
|
default_tools_md=default_tools_md,
|
||||||
|
default_identity_md=default_identity_md,
|
||||||
|
topic_preset_templates=topic_preset_templates,
|
||||||
|
get_default_system_timezone=get_default_system_timezone,
|
||||||
|
load_agent_md_templates=load_agent_md_templates,
|
||||||
|
load_topic_presets_template=load_topic_presets_template,
|
||||||
|
get_platform_settings_snapshot=get_platform_settings_snapshot,
|
||||||
|
get_speech_runtime_settings=get_speech_runtime_settings,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def attach_runtime_services(
|
||||||
|
*,
|
||||||
|
app: Any,
|
||||||
|
bot_command_service: Any,
|
||||||
|
bot_lifecycle_service: Any,
|
||||||
|
app_lifecycle_service: Any,
|
||||||
|
bot_query_service: Any,
|
||||||
|
bot_channel_service: Any,
|
||||||
|
bot_message_service: Any,
|
||||||
|
bot_runtime_snapshot_service: Any,
|
||||||
|
image_service: Any,
|
||||||
|
provider_test_service: Any,
|
||||||
|
runtime_event_service: Any,
|
||||||
|
speech_transcription_service: Any,
|
||||||
|
system_service: Any,
|
||||||
|
workspace_service: Any,
|
||||||
|
runtime_service: Any,
|
||||||
|
) -> None:
|
||||||
|
app.state.bot_command_service = bot_command_service
|
||||||
|
app.state.bot_lifecycle_service = bot_lifecycle_service
|
||||||
|
app.state.app_lifecycle_service = app_lifecycle_service
|
||||||
|
app.state.bot_query_service = bot_query_service
|
||||||
|
app.state.bot_channel_service = bot_channel_service
|
||||||
|
app.state.bot_message_service = bot_message_service
|
||||||
|
app.state.bot_runtime_snapshot_service = bot_runtime_snapshot_service
|
||||||
|
app.state.image_service = image_service
|
||||||
|
app.state.provider_test_service = provider_test_service
|
||||||
|
app.state.runtime_event_service = runtime_event_service
|
||||||
|
app.state.speech_transcription_service = speech_transcription_service
|
||||||
|
app.state.system_service = system_service
|
||||||
|
app.state.workspace_service = workspace_service
|
||||||
|
app.state.runtime_service = runtime_service
|
||||||
|
|
||||||
|
|
||||||
|
def include_dashboard_api(
|
||||||
|
*,
|
||||||
|
app: Any,
|
||||||
|
image_service: Any,
|
||||||
|
provider_test_service: Any,
|
||||||
|
bot_lifecycle_service: Any,
|
||||||
|
bot_query_service: Any,
|
||||||
|
bot_channel_service: Any,
|
||||||
|
skill_service: Any,
|
||||||
|
bot_config_state_service: Any,
|
||||||
|
runtime_service: Any,
|
||||||
|
bot_message_service: Any,
|
||||||
|
workspace_service: Any,
|
||||||
|
speech_transcription_service: Any,
|
||||||
|
app_lifecycle_service: Any,
|
||||||
|
resolve_edge_state_context: Any,
|
||||||
|
logger: Any,
|
||||||
|
) -> None:
|
||||||
|
app.include_router(
|
||||||
|
build_dashboard_router(
|
||||||
|
image_service=image_service,
|
||||||
|
provider_test_service=provider_test_service,
|
||||||
|
bot_lifecycle_service=bot_lifecycle_service,
|
||||||
|
bot_query_service=bot_query_service,
|
||||||
|
bot_channel_service=bot_channel_service,
|
||||||
|
skill_service=skill_service,
|
||||||
|
bot_config_state_service=bot_config_state_service,
|
||||||
|
runtime_service=runtime_service,
|
||||||
|
bot_message_service=bot_message_service,
|
||||||
|
workspace_service=workspace_service,
|
||||||
|
speech_transcription_service=speech_transcription_service,
|
||||||
|
app_lifecycle_service=app_lifecycle_service,
|
||||||
|
resolve_edge_state_context=resolve_edge_state_context,
|
||||||
|
logger=logger,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
@ -1,97 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from enum import Enum
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
|
|
||||||
class RouteAccessMode(str, Enum):
|
|
||||||
PUBLIC = "public"
|
|
||||||
PANEL_ONLY = "panel_only"
|
|
||||||
BOT_OR_PANEL = "bot_or_panel"
|
|
||||||
PUBLIC_BOT_OR_PANEL = "public_bot_or_panel"
|
|
||||||
|
|
||||||
|
|
||||||
_PUBLIC_EXACT_PATHS = {
|
|
||||||
"/api/health",
|
|
||||||
"/api/health/cache",
|
|
||||||
"/api/system/defaults",
|
|
||||||
}
|
|
||||||
|
|
||||||
_PANEL_AUTH_SEGMENTS = ("api", "panel", "auth")
|
|
||||||
_BOT_PUBLIC_SEGMENTS = ("public", "bots")
|
|
||||||
_BOT_API_SEGMENTS = ("api", "bots")
|
|
||||||
_BOT_AUTH_SEGMENT = "auth"
|
|
||||||
_BOT_PANEL_ONLY_ACTIONS = {"enable", "disable", "deactivate"}
|
|
||||||
_BOT_PUBLIC_AUTH_ACTIONS = {"login", "logout", "status"}
|
|
||||||
|
|
||||||
|
|
||||||
def _path_segments(path: str) -> list[str]:
|
|
||||||
raw = str(path or "").strip().strip("/")
|
|
||||||
if not raw:
|
|
||||||
return []
|
|
||||||
return [segment for segment in raw.split("/") if segment]
|
|
||||||
|
|
||||||
|
|
||||||
def extract_bot_id(path: str) -> Optional[str]:
|
|
||||||
segments = _path_segments(path)
|
|
||||||
if len(segments) < 3:
|
|
||||||
return None
|
|
||||||
if tuple(segments[:2]) not in {_BOT_API_SEGMENTS, _BOT_PUBLIC_SEGMENTS}:
|
|
||||||
return None
|
|
||||||
bot_id = str(segments[2] or "").strip()
|
|
||||||
return bot_id or None
|
|
||||||
|
|
||||||
|
|
||||||
def _is_panel_auth_route(segments: list[str]) -> bool:
|
|
||||||
return tuple(segments[:3]) == _PANEL_AUTH_SEGMENTS
|
|
||||||
|
|
||||||
|
|
||||||
def _is_public_bot_route(segments: list[str]) -> bool:
|
|
||||||
return tuple(segments[:2]) == _BOT_PUBLIC_SEGMENTS and len(segments) >= 3
|
|
||||||
|
|
||||||
|
|
||||||
def _is_bot_auth_route(segments: list[str]) -> bool:
|
|
||||||
return (
|
|
||||||
tuple(segments[:2]) == _BOT_API_SEGMENTS
|
|
||||||
and len(segments) >= 5
|
|
||||||
and segments[3] == _BOT_AUTH_SEGMENT
|
|
||||||
and segments[4] in _BOT_PUBLIC_AUTH_ACTIONS
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _is_panel_only_bot_action(segments: list[str], method: str) -> bool:
|
|
||||||
if tuple(segments[:2]) != _BOT_API_SEGMENTS or len(segments) < 3:
|
|
||||||
return False
|
|
||||||
if len(segments) == 3 and method == "DELETE":
|
|
||||||
return True
|
|
||||||
return len(segments) >= 4 and method == "POST" and segments[3] in _BOT_PANEL_ONLY_ACTIONS
|
|
||||||
|
|
||||||
|
|
||||||
def _is_bot_scoped_api_route(segments: list[str]) -> bool:
|
|
||||||
return tuple(segments[:2]) == _BOT_API_SEGMENTS and len(segments) >= 3
|
|
||||||
|
|
||||||
|
|
||||||
def resolve_route_access_mode(path: str, method: str) -> RouteAccessMode:
|
|
||||||
raw_path = str(path or "").strip()
|
|
||||||
verb = str(method or "GET").strip().upper()
|
|
||||||
segments = _path_segments(raw_path)
|
|
||||||
|
|
||||||
if raw_path in _PUBLIC_EXACT_PATHS:
|
|
||||||
return RouteAccessMode.PUBLIC
|
|
||||||
|
|
||||||
if _is_panel_auth_route(segments) or _is_bot_auth_route(segments):
|
|
||||||
return RouteAccessMode.PUBLIC
|
|
||||||
|
|
||||||
if _is_public_bot_route(segments):
|
|
||||||
return RouteAccessMode.PUBLIC_BOT_OR_PANEL
|
|
||||||
|
|
||||||
if _is_panel_only_bot_action(segments, verb):
|
|
||||||
return RouteAccessMode.PANEL_ONLY
|
|
||||||
|
|
||||||
if _is_bot_scoped_api_route(segments):
|
|
||||||
return RouteAccessMode.BOT_OR_PANEL
|
|
||||||
|
|
||||||
if raw_path.startswith("/api/"):
|
|
||||||
return RouteAccessMode.PANEL_ONLY
|
|
||||||
|
|
||||||
return RouteAccessMode.PUBLIC
|
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
# Client package for dashboard-edge integrations.
|
||||||
|
|
@ -0,0 +1,168 @@
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from fastapi import Request, UploadFile
|
||||||
|
from fastapi.responses import Response
|
||||||
|
|
||||||
|
from models.bot import BotInstance
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeClient(ABC):
|
||||||
|
@abstractmethod
|
||||||
|
async def start_bot(self, *, bot: BotInstance, start_payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def stop_bot(self, *, bot: BotInstance) -> Dict[str, Any]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def deliver_command(self, *, bot_id: str, command: str, media: Optional[List[str]] = None) -> Optional[str]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_recent_logs(self, *, bot_id: str, tail: int = 300) -> List[str]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def ensure_monitor(self, *, bot_id: str) -> bool:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_monitor_packets(self, *, bot_id: str, after_seq: int = 0, limit: int = 200) -> List[Dict[str, Any]]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_runtime_status(self, *, bot_id: str) -> str:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_resource_snapshot(self, *, bot_id: str) -> Dict[str, Any]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_node_resources(self) -> Dict[str, Any]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_node_self(self) -> Dict[str, Any]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def preflight_native(self, *, native_command: Optional[str] = None, native_workdir: Optional[str] = None) -> Dict[str, Any]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def read_state(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
state_key: str,
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def write_state(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
state_key: str,
|
||||||
|
data: Dict[str, Any],
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def sync_bot_workspace(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
channels_override: Optional[List[Dict[str, Any]]] = None,
|
||||||
|
global_delivery_override: Optional[Dict[str, Any]] = None,
|
||||||
|
runtime_overrides: Optional[Dict[str, Any]] = None,
|
||||||
|
) -> None:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def purge_workspace(self, *, bot_id: str, workspace_root: Optional[str] = None) -> Dict[str, Any]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def list_tree(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
path: Optional[str] = None,
|
||||||
|
recursive: bool = False,
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def read_file(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
path: str,
|
||||||
|
max_bytes: int = 200000,
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def write_markdown(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
path: str,
|
||||||
|
content: str,
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def write_text_file(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
path: str,
|
||||||
|
content: str,
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def upload_files(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
files: List[UploadFile],
|
||||||
|
path: Optional[str] = None,
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def delete_workspace_path(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
path: str,
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def serve_file(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
path: str,
|
||||||
|
download: bool,
|
||||||
|
request: Request,
|
||||||
|
public: bool = False,
|
||||||
|
redirect_html_to_raw: bool = False,
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Response:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
@ -0,0 +1,84 @@
|
||||||
|
import logging
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from fastapi import HTTPException
|
||||||
|
|
||||||
|
_OFFLINE_LOG_LOCK = threading.Lock()
|
||||||
|
_OFFLINE_LOGGED_AT: dict[str, float] = {}
|
||||||
|
_DEFAULT_LOG_COOLDOWN_SECONDS = 60.0
|
||||||
|
|
||||||
|
|
||||||
|
def describe_edge_node(node: Any) -> str:
|
||||||
|
display_name = str(getattr(node, "display_name", "") or "").strip()
|
||||||
|
node_id = str(getattr(node, "node_id", "") or "").strip()
|
||||||
|
if display_name and node_id and display_name != node_id:
|
||||||
|
return f"{display_name} ({node_id})"
|
||||||
|
return display_name or node_id or "unknown edge node"
|
||||||
|
|
||||||
|
|
||||||
|
def summarize_edge_exception(exc: Exception) -> str:
|
||||||
|
detail = getattr(exc, "detail", None)
|
||||||
|
text = str(detail if detail is not None else exc).strip()
|
||||||
|
if not text:
|
||||||
|
return exc.__class__.__name__
|
||||||
|
return text[:400]
|
||||||
|
|
||||||
|
|
||||||
|
def edge_transport_http_exception(exc: httpx.RequestError, *, node: Any) -> HTTPException:
|
||||||
|
node_label = describe_edge_node(node)
|
||||||
|
if isinstance(exc, httpx.TimeoutException):
|
||||||
|
detail = f"dashboard-edge timed out for node {node_label}"
|
||||||
|
else:
|
||||||
|
reason = str(exc).strip() or exc.__class__.__name__
|
||||||
|
detail = f"dashboard-edge is unreachable for node {node_label}: {reason}"
|
||||||
|
return HTTPException(status_code=502, detail=detail[:400])
|
||||||
|
|
||||||
|
|
||||||
|
def is_expected_edge_offline_error(exc: Exception) -> bool:
|
||||||
|
if isinstance(exc, httpx.RequestError):
|
||||||
|
return True
|
||||||
|
if not isinstance(exc, HTTPException):
|
||||||
|
return False
|
||||||
|
if int(getattr(exc, "status_code", 0) or 0) not in {502, 503, 504}:
|
||||||
|
return False
|
||||||
|
detail = summarize_edge_exception(exc).lower()
|
||||||
|
markers = (
|
||||||
|
"dashboard-edge is unreachable",
|
||||||
|
"dashboard-edge timed out",
|
||||||
|
"connection refused",
|
||||||
|
"request failed before receiving a response",
|
||||||
|
"name or service not known",
|
||||||
|
"nodename nor servname provided",
|
||||||
|
"temporary failure in name resolution",
|
||||||
|
)
|
||||||
|
return any(marker in detail for marker in markers)
|
||||||
|
|
||||||
|
|
||||||
|
def log_edge_failure(
|
||||||
|
logger: logging.Logger,
|
||||||
|
*,
|
||||||
|
key: str,
|
||||||
|
exc: Exception,
|
||||||
|
message: str,
|
||||||
|
cooldown_seconds: float = _DEFAULT_LOG_COOLDOWN_SECONDS,
|
||||||
|
) -> None:
|
||||||
|
detail = summarize_edge_exception(exc)
|
||||||
|
if is_expected_edge_offline_error(exc):
|
||||||
|
if _should_emit_offline_log(key=key, cooldown_seconds=cooldown_seconds):
|
||||||
|
logger.info("%s detail=%s", message, detail)
|
||||||
|
return
|
||||||
|
logger.exception("%s detail=%s", message, detail)
|
||||||
|
|
||||||
|
|
||||||
|
def _should_emit_offline_log(*, key: str, cooldown_seconds: float) -> bool:
|
||||||
|
now = time.monotonic()
|
||||||
|
normalized_key = str(key or "edge-offline").strip() or "edge-offline"
|
||||||
|
with _OFFLINE_LOG_LOCK:
|
||||||
|
last_logged_at = _OFFLINE_LOGGED_AT.get(normalized_key, 0.0)
|
||||||
|
if now - last_logged_at < max(1.0, float(cooldown_seconds or _DEFAULT_LOG_COOLDOWN_SECONDS)):
|
||||||
|
return False
|
||||||
|
_OFFLINE_LOGGED_AT[normalized_key] = now
|
||||||
|
return True
|
||||||
|
|
@ -0,0 +1,543 @@
|
||||||
|
import mimetypes
|
||||||
|
import os
|
||||||
|
from typing import Any, Callable, Dict, List, Optional
|
||||||
|
from urllib.parse import quote
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
from fastapi import HTTPException, Request, UploadFile
|
||||||
|
from fastapi.responses import RedirectResponse, Response
|
||||||
|
|
||||||
|
from clients.edge.base import EdgeClient
|
||||||
|
from clients.edge.errors import edge_transport_http_exception
|
||||||
|
from models.bot import BotInstance
|
||||||
|
from schemas.edge import (
|
||||||
|
EdgeCommandRequest,
|
||||||
|
EdgeLogsResponse,
|
||||||
|
EdgeNativePreflightRequest,
|
||||||
|
EdgeNativePreflightResponse,
|
||||||
|
EdgeNodeHeartbeatResponse,
|
||||||
|
EdgeMonitorPacketsResponse,
|
||||||
|
EdgeMarkdownWriteRequest,
|
||||||
|
EdgeMonitorEnsureResponse,
|
||||||
|
EdgeNodeResourcesResponse,
|
||||||
|
EdgeNodeSelfResponse,
|
||||||
|
EdgeStateResponse,
|
||||||
|
EdgeStateWriteRequest,
|
||||||
|
EdgeStartBotRequest,
|
||||||
|
EdgeStatusResponse,
|
||||||
|
EdgeWorkspaceSyncRequest,
|
||||||
|
)
|
||||||
|
from services.node_registry_service import ManagedNode
|
||||||
|
|
||||||
|
EDGE_AUTH_HEADER = "x-dashboard-edge-token"
|
||||||
|
|
||||||
|
|
||||||
|
class HttpEdgeClient(EdgeClient):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
node: ManagedNode,
|
||||||
|
http_client_factory: Optional[Callable[[], httpx.Client]] = None,
|
||||||
|
async_http_client_factory: Optional[Callable[[], httpx.AsyncClient]] = None,
|
||||||
|
) -> None:
|
||||||
|
self._node = node
|
||||||
|
self._http_client_factory = http_client_factory or (lambda: httpx.Client(timeout=15.0, trust_env=False))
|
||||||
|
self._async_http_client_factory = async_http_client_factory or (
|
||||||
|
lambda: httpx.AsyncClient(timeout=15.0, trust_env=False)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def start_bot(self, *, bot: BotInstance, start_payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
payload = await self._async_request_json(
|
||||||
|
"POST",
|
||||||
|
f"/api/edge/bots/{bot.id}/start",
|
||||||
|
json=EdgeStartBotRequest.model_validate(start_payload).model_dump(),
|
||||||
|
)
|
||||||
|
return EdgeStatusResponse.model_validate(payload).model_dump()
|
||||||
|
|
||||||
|
def stop_bot(self, *, bot: BotInstance) -> Dict[str, Any]:
|
||||||
|
payload = self._request_json("POST", f"/api/edge/bots/{bot.id}/stop")
|
||||||
|
return EdgeStatusResponse.model_validate(payload).model_dump()
|
||||||
|
|
||||||
|
def deliver_command(self, *, bot_id: str, command: str, media: Optional[List[str]] = None) -> Optional[str]:
|
||||||
|
self._request_json(
|
||||||
|
"POST",
|
||||||
|
f"/api/edge/bots/{bot_id}/command",
|
||||||
|
json=EdgeCommandRequest(command=command, media=list(media or [])).model_dump(),
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_recent_logs(self, *, bot_id: str, tail: int = 300) -> List[str]:
|
||||||
|
payload = self._request_json(
|
||||||
|
"GET",
|
||||||
|
f"/api/edge/bots/{bot_id}/logs",
|
||||||
|
params={"tail": max(1, int(tail or 300))},
|
||||||
|
)
|
||||||
|
return EdgeLogsResponse.model_validate(payload).logs
|
||||||
|
|
||||||
|
def ensure_monitor(self, *, bot_id: str) -> bool:
|
||||||
|
payload = self._request_json("POST", f"/api/edge/bots/{bot_id}/monitor/ensure")
|
||||||
|
return bool(EdgeMonitorEnsureResponse.model_validate(payload).ensured)
|
||||||
|
|
||||||
|
def get_monitor_packets(self, *, bot_id: str, after_seq: int = 0, limit: int = 200) -> List[Dict[str, Any]]:
|
||||||
|
payload = self._request_json(
|
||||||
|
"GET",
|
||||||
|
f"/api/edge/bots/{bot_id}/monitor/packets",
|
||||||
|
params={"after_seq": max(0, int(after_seq or 0)), "limit": max(1, int(limit or 200))},
|
||||||
|
)
|
||||||
|
parsed = EdgeMonitorPacketsResponse.model_validate(payload)
|
||||||
|
rows: List[Dict[str, Any]] = []
|
||||||
|
for item in parsed.packets or []:
|
||||||
|
rows.append(item.model_dump())
|
||||||
|
return rows
|
||||||
|
|
||||||
|
def get_runtime_status(self, *, bot_id: str) -> str:
|
||||||
|
payload = self._request_json("GET", f"/api/edge/bots/{bot_id}/runtime/status")
|
||||||
|
return str(payload.get("status") or "STOPPED").upper()
|
||||||
|
|
||||||
|
def get_resource_snapshot(self, *, bot_id: str) -> Dict[str, Any]:
|
||||||
|
return self._request_json("GET", f"/api/edge/bots/{bot_id}/resources")
|
||||||
|
|
||||||
|
def get_node_resources(self) -> Dict[str, Any]:
|
||||||
|
payload = self._request_json("GET", "/api/edge/node/resources")
|
||||||
|
return EdgeNodeResourcesResponse.model_validate(payload).model_dump()
|
||||||
|
|
||||||
|
def get_node_self(self) -> Dict[str, Any]:
|
||||||
|
payload = self._request_json("GET", "/api/edge/node/self")
|
||||||
|
return EdgeNodeSelfResponse.model_validate(payload).model_dump()
|
||||||
|
|
||||||
|
def heartbeat_node(self) -> Dict[str, Any]:
|
||||||
|
payload = self._request_json("POST", "/api/edge/node/heartbeat")
|
||||||
|
return EdgeNodeHeartbeatResponse.model_validate(payload).model_dump()
|
||||||
|
|
||||||
|
def preflight_native(self, *, native_command: Optional[str] = None, native_workdir: Optional[str] = None) -> Dict[str, Any]:
|
||||||
|
payload = self._request_json(
|
||||||
|
"POST",
|
||||||
|
"/api/edge/runtime/native/preflight",
|
||||||
|
json=EdgeNativePreflightRequest(
|
||||||
|
native_command=str(native_command or "").strip() or None,
|
||||||
|
native_workdir=str(native_workdir or "").strip() or None,
|
||||||
|
).model_dump(),
|
||||||
|
)
|
||||||
|
return EdgeNativePreflightResponse.model_validate(payload).model_dump()
|
||||||
|
|
||||||
|
def read_state(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
state_key: str,
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
params: Dict[str, Any] = {}
|
||||||
|
if workspace_root:
|
||||||
|
params["workspace_root"] = str(workspace_root).strip()
|
||||||
|
payload = self._request_json(
|
||||||
|
"GET",
|
||||||
|
f"/api/edge/bots/{bot_id}/state/{state_key}",
|
||||||
|
params=params or None,
|
||||||
|
)
|
||||||
|
return EdgeStateResponse.model_validate(payload).model_dump()
|
||||||
|
|
||||||
|
def write_state(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
state_key: str,
|
||||||
|
data: Dict[str, Any],
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
payload = self._request_json(
|
||||||
|
"PUT",
|
||||||
|
f"/api/edge/bots/{bot_id}/state/{state_key}",
|
||||||
|
json=EdgeStateWriteRequest(
|
||||||
|
data=dict(data or {}),
|
||||||
|
workspace_root=str(workspace_root or "").strip() or None,
|
||||||
|
).model_dump(),
|
||||||
|
)
|
||||||
|
return EdgeStateResponse.model_validate(payload).model_dump()
|
||||||
|
|
||||||
|
def sync_bot_workspace(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
channels_override: Optional[List[Dict[str, Any]]] = None,
|
||||||
|
global_delivery_override: Optional[Dict[str, Any]] = None,
|
||||||
|
runtime_overrides: Optional[Dict[str, Any]] = None,
|
||||||
|
) -> None:
|
||||||
|
self._request_json(
|
||||||
|
"POST",
|
||||||
|
f"/api/edge/bots/{bot_id}/workspace/sync",
|
||||||
|
json=EdgeWorkspaceSyncRequest(
|
||||||
|
channels_override=channels_override,
|
||||||
|
global_delivery_override=global_delivery_override,
|
||||||
|
runtime_overrides=runtime_overrides,
|
||||||
|
).model_dump(),
|
||||||
|
)
|
||||||
|
|
||||||
|
def purge_workspace(self, *, bot_id: str, workspace_root: Optional[str] = None) -> Dict[str, Any]:
|
||||||
|
params: Dict[str, Any] = {}
|
||||||
|
if workspace_root:
|
||||||
|
params["workspace_root"] = str(workspace_root).strip()
|
||||||
|
payload = self._request_json(
|
||||||
|
"POST",
|
||||||
|
f"/api/edge/bots/{bot_id}/workspace/purge",
|
||||||
|
params=params or None,
|
||||||
|
)
|
||||||
|
return EdgeStatusResponse.model_validate(payload).model_dump()
|
||||||
|
|
||||||
|
def list_tree(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
path: Optional[str] = None,
|
||||||
|
recursive: bool = False,
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
params: Dict[str, Any] = {"recursive": bool(recursive)}
|
||||||
|
if path:
|
||||||
|
params["path"] = path
|
||||||
|
if workspace_root:
|
||||||
|
params["workspace_root"] = str(workspace_root).strip()
|
||||||
|
return self._request_json("GET", f"/api/edge/bots/{bot_id}/workspace/tree", params=params)
|
||||||
|
|
||||||
|
def read_file(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
path: str,
|
||||||
|
max_bytes: int = 200000,
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
params: Dict[str, Any] = {"path": path, "max_bytes": max(4096, int(max_bytes or 200000))}
|
||||||
|
if workspace_root:
|
||||||
|
params["workspace_root"] = str(workspace_root).strip()
|
||||||
|
return self._request_json(
|
||||||
|
"GET",
|
||||||
|
f"/api/edge/bots/{bot_id}/workspace/file",
|
||||||
|
params=params,
|
||||||
|
)
|
||||||
|
|
||||||
|
def write_markdown(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
path: str,
|
||||||
|
content: str,
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
params: Dict[str, Any] = {"path": path}
|
||||||
|
if workspace_root:
|
||||||
|
params["workspace_root"] = str(workspace_root).strip()
|
||||||
|
return self._request_json(
|
||||||
|
"PUT",
|
||||||
|
f"/api/edge/bots/{bot_id}/workspace/file/markdown",
|
||||||
|
params=params,
|
||||||
|
json=EdgeMarkdownWriteRequest(content=str(content or "")).model_dump(),
|
||||||
|
)
|
||||||
|
|
||||||
|
def write_text_file(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
path: str,
|
||||||
|
content: str,
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
params: Dict[str, Any] = {"path": path}
|
||||||
|
if workspace_root:
|
||||||
|
params["workspace_root"] = str(workspace_root).strip()
|
||||||
|
return self._request_json(
|
||||||
|
"PUT",
|
||||||
|
f"/api/edge/bots/{bot_id}/workspace/file/text",
|
||||||
|
params=params,
|
||||||
|
json=EdgeMarkdownWriteRequest(content=str(content or "")).model_dump(),
|
||||||
|
)
|
||||||
|
|
||||||
|
async def upload_files(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
files: List[UploadFile],
|
||||||
|
path: Optional[str] = None,
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
base_url = self._require_base_url()
|
||||||
|
multipart_files = []
|
||||||
|
response: httpx.Response | None = None
|
||||||
|
try:
|
||||||
|
async with self._async_http_client_factory() as client:
|
||||||
|
for upload in files:
|
||||||
|
await upload.seek(0)
|
||||||
|
multipart_files.append(
|
||||||
|
(
|
||||||
|
"files",
|
||||||
|
(
|
||||||
|
upload.filename or "upload.bin",
|
||||||
|
upload.file,
|
||||||
|
upload.content_type or "application/octet-stream",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
response = await client.request(
|
||||||
|
method="POST",
|
||||||
|
url=f"{base_url}/api/edge/bots/{quote(bot_id, safe='')}/workspace/upload",
|
||||||
|
headers=self._headers(),
|
||||||
|
params=self._workspace_upload_params(path=path, workspace_root=workspace_root),
|
||||||
|
files=multipart_files,
|
||||||
|
)
|
||||||
|
except httpx.RequestError as exc:
|
||||||
|
raise edge_transport_http_exception(exc, node=self._node) from exc
|
||||||
|
finally:
|
||||||
|
for upload in files:
|
||||||
|
await upload.close()
|
||||||
|
if response is None:
|
||||||
|
raise HTTPException(status_code=502, detail="dashboard-edge upload request failed before receiving a response")
|
||||||
|
return self._parse_json_response(response)
|
||||||
|
|
||||||
|
def delete_workspace_path(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
path: str,
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
params: Dict[str, Any] = {"path": path}
|
||||||
|
if workspace_root:
|
||||||
|
params["workspace_root"] = str(workspace_root).strip()
|
||||||
|
return self._request_json(
|
||||||
|
"DELETE",
|
||||||
|
f"/api/edge/bots/{bot_id}/workspace/file",
|
||||||
|
params=params,
|
||||||
|
)
|
||||||
|
|
||||||
|
def upload_local_files(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
local_paths: List[str],
|
||||||
|
path: Optional[str] = None,
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
if not local_paths:
|
||||||
|
return {"bot_id": bot_id, "files": []}
|
||||||
|
base_url = self._require_base_url()
|
||||||
|
multipart_files = []
|
||||||
|
handles = []
|
||||||
|
response: httpx.Response | None = None
|
||||||
|
try:
|
||||||
|
for local_path in local_paths:
|
||||||
|
normalized = os.path.abspath(os.path.expanduser(str(local_path or "").strip()))
|
||||||
|
if not os.path.isfile(normalized):
|
||||||
|
raise HTTPException(status_code=400, detail=f"Local upload file not found: {local_path}")
|
||||||
|
handle = open(normalized, "rb")
|
||||||
|
handles.append(handle)
|
||||||
|
multipart_files.append(
|
||||||
|
(
|
||||||
|
"files",
|
||||||
|
(
|
||||||
|
os.path.basename(normalized),
|
||||||
|
handle,
|
||||||
|
mimetypes.guess_type(normalized)[0] or "application/octet-stream",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
with self._http_client_factory() as client:
|
||||||
|
response = client.request(
|
||||||
|
method="POST",
|
||||||
|
url=f"{base_url}/api/edge/bots/{quote(bot_id, safe='')}/workspace/upload",
|
||||||
|
headers=self._headers(),
|
||||||
|
params=self._workspace_upload_params(path=path, workspace_root=workspace_root),
|
||||||
|
files=multipart_files,
|
||||||
|
)
|
||||||
|
except OSError as exc:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to open local upload file: {exc.strerror or str(exc)}") from exc
|
||||||
|
except httpx.RequestError as exc:
|
||||||
|
raise edge_transport_http_exception(exc, node=self._node) from exc
|
||||||
|
finally:
|
||||||
|
for handle in handles:
|
||||||
|
try:
|
||||||
|
handle.close()
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
if response is None:
|
||||||
|
raise HTTPException(status_code=502, detail="dashboard-edge upload request failed before receiving a response")
|
||||||
|
return self._parse_json_response(response)
|
||||||
|
|
||||||
|
def serve_file(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
path: str,
|
||||||
|
download: bool,
|
||||||
|
request: Request,
|
||||||
|
public: bool = False,
|
||||||
|
redirect_html_to_raw: bool = False,
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> Response:
|
||||||
|
media_type, _ = mimetypes.guess_type(path)
|
||||||
|
if redirect_html_to_raw and not download and str(media_type or "").startswith("text/html"):
|
||||||
|
raw_url = self._build_dashboard_raw_url(bot_id=bot_id, path=path, public=public)
|
||||||
|
if raw_url:
|
||||||
|
return RedirectResponse(url=raw_url, status_code=307)
|
||||||
|
|
||||||
|
base_url = self._require_base_url()
|
||||||
|
url = self._build_edge_file_url(
|
||||||
|
bot_id=bot_id,
|
||||||
|
path=path,
|
||||||
|
download=download,
|
||||||
|
raw=not redirect_html_to_raw,
|
||||||
|
workspace_root=workspace_root,
|
||||||
|
)
|
||||||
|
headers = self._headers()
|
||||||
|
range_header = request.headers.get("range", "").strip()
|
||||||
|
if range_header and not download:
|
||||||
|
headers["range"] = range_header
|
||||||
|
try:
|
||||||
|
with self._http_client_factory() as client:
|
||||||
|
response = client.request(
|
||||||
|
method="GET",
|
||||||
|
url=f"{base_url}{url}",
|
||||||
|
headers=headers,
|
||||||
|
)
|
||||||
|
except httpx.RequestError as exc:
|
||||||
|
raise edge_transport_http_exception(exc, node=self._node) from exc
|
||||||
|
self._raise_for_status(response)
|
||||||
|
return Response(
|
||||||
|
content=response.content,
|
||||||
|
status_code=response.status_code,
|
||||||
|
media_type=response.headers.get("content-type") or "application/octet-stream",
|
||||||
|
headers=self._response_proxy_headers(response),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _request_json(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
path: str,
|
||||||
|
*,
|
||||||
|
params: Optional[Dict[str, Any]] = None,
|
||||||
|
json: Optional[Dict[str, Any]] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
base_url = self._require_base_url()
|
||||||
|
try:
|
||||||
|
with self._http_client_factory() as client:
|
||||||
|
response = client.request(
|
||||||
|
method=method.upper(),
|
||||||
|
url=f"{base_url}{path}",
|
||||||
|
headers=self._headers(),
|
||||||
|
params=params,
|
||||||
|
json=json,
|
||||||
|
)
|
||||||
|
except httpx.RequestError as exc:
|
||||||
|
raise edge_transport_http_exception(exc, node=self._node) from exc
|
||||||
|
return self._parse_json_response(response)
|
||||||
|
|
||||||
|
async def _async_request_json(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
path: str,
|
||||||
|
*,
|
||||||
|
params: Optional[Dict[str, Any]] = None,
|
||||||
|
json: Optional[Dict[str, Any]] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
base_url = self._require_base_url()
|
||||||
|
try:
|
||||||
|
async with self._async_http_client_factory() as client:
|
||||||
|
response = await client.request(
|
||||||
|
method=method.upper(),
|
||||||
|
url=f"{base_url}{path}",
|
||||||
|
headers=self._headers(),
|
||||||
|
params=params,
|
||||||
|
json=json,
|
||||||
|
)
|
||||||
|
except httpx.RequestError as exc:
|
||||||
|
raise edge_transport_http_exception(exc, node=self._node) from exc
|
||||||
|
return self._parse_json_response(response)
|
||||||
|
|
||||||
|
def _headers(self) -> Dict[str, str]:
|
||||||
|
headers = {"accept": "application/json"}
|
||||||
|
token = str(self._node.auth_token or "").strip()
|
||||||
|
if token:
|
||||||
|
headers[EDGE_AUTH_HEADER] = token
|
||||||
|
return headers
|
||||||
|
|
||||||
|
def _require_base_url(self) -> str:
|
||||||
|
base_url = str(self._node.base_url or "").strip().rstrip("/")
|
||||||
|
if not base_url:
|
||||||
|
raise self._not_implemented("connect to node")
|
||||||
|
return base_url
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _raise_for_status(response: httpx.Response) -> None:
|
||||||
|
try:
|
||||||
|
response.raise_for_status()
|
||||||
|
except httpx.HTTPStatusError as exc:
|
||||||
|
detail = exc.response.text.strip() or str(exc)
|
||||||
|
raise HTTPException(status_code=502, detail=f"dashboard-edge request failed: {detail[:400]}") from exc
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _parse_json_response(cls, response: httpx.Response) -> Dict[str, Any]:
|
||||||
|
cls._raise_for_status(response)
|
||||||
|
try:
|
||||||
|
payload = response.json()
|
||||||
|
except Exception as exc:
|
||||||
|
raise HTTPException(status_code=502, detail="dashboard-edge returned invalid JSON") from exc
|
||||||
|
if not isinstance(payload, dict):
|
||||||
|
raise HTTPException(status_code=502, detail="dashboard-edge returned unexpected payload")
|
||||||
|
return payload
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _build_dashboard_raw_url(bot_id: str, path: str, public: bool) -> str:
|
||||||
|
normalized = "/".join(part for part in str(path or "").strip().split("/") if part)
|
||||||
|
if not normalized:
|
||||||
|
return ""
|
||||||
|
prefix = "/public" if public else "/api"
|
||||||
|
return f"{prefix}/bots/{quote(bot_id, safe='')}/workspace/raw/{quote(normalized, safe='/')}"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _build_edge_file_url(
|
||||||
|
*,
|
||||||
|
bot_id: str,
|
||||||
|
path: str,
|
||||||
|
download: bool,
|
||||||
|
raw: bool,
|
||||||
|
workspace_root: Optional[str] = None,
|
||||||
|
) -> str:
|
||||||
|
workspace_root_qs = ""
|
||||||
|
normalized_workspace_root = str(workspace_root or "").strip()
|
||||||
|
if normalized_workspace_root:
|
||||||
|
workspace_root_qs = f"&workspace_root={quote(normalized_workspace_root, safe='/')}"
|
||||||
|
if raw:
|
||||||
|
normalized = "/".join(part for part in str(path or "").strip().split("/") if part)
|
||||||
|
if not normalized:
|
||||||
|
raise HTTPException(status_code=400, detail="invalid workspace path")
|
||||||
|
return (
|
||||||
|
f"/api/edge/bots/{quote(bot_id, safe='')}/workspace/raw/"
|
||||||
|
f"{quote(normalized, safe='/')}?download={'true' if download else 'false'}{workspace_root_qs}"
|
||||||
|
)
|
||||||
|
return (
|
||||||
|
f"/api/edge/bots/{quote(bot_id, safe='')}/workspace/download"
|
||||||
|
f"?path={quote(str(path or ''), safe='/')}&download={'true' if download else 'false'}{workspace_root_qs}"
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _workspace_upload_params(*, path: Optional[str], workspace_root: Optional[str]) -> Optional[Dict[str, Any]]:
|
||||||
|
params: Dict[str, Any] = {}
|
||||||
|
if path:
|
||||||
|
params["path"] = path
|
||||||
|
normalized_workspace_root = str(workspace_root or "").strip()
|
||||||
|
if normalized_workspace_root:
|
||||||
|
params["workspace_root"] = normalized_workspace_root
|
||||||
|
return params or None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _response_proxy_headers(response: httpx.Response) -> Dict[str, str]:
|
||||||
|
kept: Dict[str, str] = {}
|
||||||
|
for name in ("accept-ranges", "content-disposition", "content-length", "content-range", "cache-control"):
|
||||||
|
value = response.headers.get(name)
|
||||||
|
if value:
|
||||||
|
kept[name] = value
|
||||||
|
return kept
|
||||||
|
|
||||||
|
def _not_implemented(self, capability: str) -> HTTPException:
|
||||||
|
node_label = self._node.display_name or self._node.node_id
|
||||||
|
return HTTPException(status_code=501, detail=f"dashboard-edge {capability} is not implemented yet for node {node_label}")
|
||||||
|
|
@ -1,50 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from fastapi import Request
|
|
||||||
from fastapi.responses import JSONResponse
|
|
||||||
from sqlmodel import Session
|
|
||||||
from starlette.middleware.base import BaseHTTPMiddleware
|
|
||||||
|
|
||||||
from bootstrap.auth_access import RouteAccessMode, extract_bot_id, resolve_route_access_mode
|
|
||||||
from core.database import engine
|
|
||||||
from services.platform_auth_service import (
|
|
||||||
resolve_bot_request_auth,
|
|
||||||
resolve_panel_request_auth,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _unauthorized(detail: str) -> JSONResponse:
|
|
||||||
return JSONResponse(status_code=401, content={"detail": detail})
|
|
||||||
|
|
||||||
|
|
||||||
class AuthAccessMiddleware(BaseHTTPMiddleware):
|
|
||||||
async def dispatch(self, request: Request, call_next):
|
|
||||||
if request.method.upper() == "OPTIONS":
|
|
||||||
return await call_next(request)
|
|
||||||
|
|
||||||
path = request.url.path
|
|
||||||
access_mode = resolve_route_access_mode(path, request.method)
|
|
||||||
if access_mode == RouteAccessMode.PUBLIC:
|
|
||||||
return await call_next(request)
|
|
||||||
|
|
||||||
bot_id = extract_bot_id(path)
|
|
||||||
with Session(engine) as session:
|
|
||||||
panel_principal = resolve_panel_request_auth(session, request)
|
|
||||||
if panel_principal.authenticated:
|
|
||||||
request.state.auth_principal = panel_principal
|
|
||||||
return await call_next(request)
|
|
||||||
|
|
||||||
if access_mode == RouteAccessMode.PANEL_ONLY:
|
|
||||||
return _unauthorized("Panel authentication required")
|
|
||||||
|
|
||||||
if not bot_id:
|
|
||||||
return _unauthorized("Bot authentication required")
|
|
||||||
|
|
||||||
bot_principal = resolve_bot_request_auth(session, request, bot_id)
|
|
||||||
if bot_principal.authenticated:
|
|
||||||
request.state.auth_principal = bot_principal
|
|
||||||
return await call_next(request)
|
|
||||||
|
|
||||||
if access_mode == RouteAccessMode.PUBLIC_BOT_OR_PANEL:
|
|
||||||
return _unauthorized("Bot or panel authentication required to access this resource")
|
|
||||||
return _unauthorized("Bot or panel authentication required")
|
|
||||||
|
|
@ -1,5 +1,3 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
|
@ -12,32 +10,18 @@ except Exception: # pragma: no cover
|
||||||
|
|
||||||
|
|
||||||
class RedisCache:
|
class RedisCache:
|
||||||
def __init__(self, *, prefix_override: Optional[str] = None, default_ttl_override: Optional[int] = None):
|
def __init__(self):
|
||||||
self.prefix = str(prefix_override or REDIS_PREFIX).strip() or REDIS_PREFIX
|
self.enabled = bool(REDIS_ENABLED and REDIS_URL and Redis is not None)
|
||||||
self.default_ttl = int(default_ttl_override if default_ttl_override is not None else REDIS_DEFAULT_TTL)
|
self.prefix = REDIS_PREFIX
|
||||||
self.enabled = False
|
self.default_ttl = int(REDIS_DEFAULT_TTL)
|
||||||
self.status = "disabled"
|
|
||||||
self.status_detail = ""
|
|
||||||
self._client: Optional["Redis"] = None
|
self._client: Optional["Redis"] = None
|
||||||
if not REDIS_ENABLED:
|
if self.enabled:
|
||||||
return
|
try:
|
||||||
if not REDIS_URL:
|
self._client = Redis.from_url(REDIS_URL, decode_responses=True)
|
||||||
self.status = "missing_url"
|
self._client.ping()
|
||||||
return
|
except Exception:
|
||||||
if Redis is None:
|
self.enabled = False
|
||||||
self.status = "client_unavailable"
|
self._client = None
|
||||||
self.status_detail = "redis python package is not installed"
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
self._client = Redis.from_url(REDIS_URL, decode_responses=True)
|
|
||||||
self._client.ping()
|
|
||||||
self.enabled = True
|
|
||||||
self.status = "connected"
|
|
||||||
except Exception as exc:
|
|
||||||
self.enabled = False
|
|
||||||
self._client = None
|
|
||||||
self.status = "connection_failed"
|
|
||||||
self.status_detail = str(exc or "").strip()[:200]
|
|
||||||
|
|
||||||
def _full_key(self, key: str) -> str:
|
def _full_key(self, key: str) -> str:
|
||||||
return f"{self.prefix}:{key}"
|
return f"{self.prefix}:{key}"
|
||||||
|
|
@ -50,28 +34,11 @@ class RedisCache:
|
||||||
except Exception:
|
except Exception:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get(self, key: str) -> Optional[str]:
|
|
||||||
if not self.enabled or self._client is None:
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
return self._client.get(self._full_key(key))
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def set(self, key: str, value: str, ttl: Optional[int] = None) -> None:
|
|
||||||
if not self.enabled or self._client is None:
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
ttl_seconds = int(ttl if ttl is not None else self.default_ttl)
|
|
||||||
self._client.setex(self._full_key(key), ttl_seconds, str(value))
|
|
||||||
except Exception:
|
|
||||||
return
|
|
||||||
|
|
||||||
def get_json(self, key: str) -> Any:
|
def get_json(self, key: str) -> Any:
|
||||||
if not self.enabled or self._client is None:
|
if not self.enabled or self._client is None:
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
raw = self.get(key)
|
raw = self._client.get(self._full_key(key))
|
||||||
if not raw:
|
if not raw:
|
||||||
return None
|
return None
|
||||||
return json.loads(raw)
|
return json.loads(raw)
|
||||||
|
|
@ -82,46 +49,11 @@ class RedisCache:
|
||||||
if not self.enabled or self._client is None:
|
if not self.enabled or self._client is None:
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
self.set(key, json.dumps(value, ensure_ascii=False, default=str), ttl=ttl)
|
self._client.setex(
|
||||||
except Exception:
|
self._full_key(key),
|
||||||
return
|
int(ttl if ttl is not None else self.default_ttl),
|
||||||
|
json.dumps(value, ensure_ascii=False, default=str),
|
||||||
def sadd(self, key: str, *members: str) -> None:
|
)
|
||||||
if not self.enabled or self._client is None:
|
|
||||||
return
|
|
||||||
normalized = [str(member or "").strip() for member in members if str(member or "").strip()]
|
|
||||||
if not normalized:
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
self._client.sadd(self._full_key(key), *normalized)
|
|
||||||
except Exception:
|
|
||||||
return
|
|
||||||
|
|
||||||
def srem(self, key: str, *members: str) -> None:
|
|
||||||
if not self.enabled or self._client is None:
|
|
||||||
return
|
|
||||||
normalized = [str(member or "").strip() for member in members if str(member or "").strip()]
|
|
||||||
if not normalized:
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
self._client.srem(self._full_key(key), *normalized)
|
|
||||||
except Exception:
|
|
||||||
return
|
|
||||||
|
|
||||||
def smembers(self, key: str) -> set[str]:
|
|
||||||
if not self.enabled or self._client is None:
|
|
||||||
return set()
|
|
||||||
try:
|
|
||||||
rows = self._client.smembers(self._full_key(key))
|
|
||||||
return {str(row or "").strip() for row in rows if str(row or "").strip()}
|
|
||||||
except Exception:
|
|
||||||
return set()
|
|
||||||
|
|
||||||
def expire(self, key: str, ttl: int) -> None:
|
|
||||||
if not self.enabled or self._client is None:
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
self._client.expire(self._full_key(key), max(1, int(ttl)))
|
|
||||||
except Exception:
|
except Exception:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
@ -153,4 +85,4 @@ class RedisCache:
|
||||||
|
|
||||||
|
|
||||||
cache = RedisCache()
|
cache = RedisCache()
|
||||||
auth_cache = RedisCache(prefix_override=f"{REDIS_PREFIX}_auth")
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,265 @@
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
|
from core.settings import (
|
||||||
|
DEFAULT_AGENTS_MD,
|
||||||
|
DEFAULT_IDENTITY_MD,
|
||||||
|
DEFAULT_SOUL_MD,
|
||||||
|
DEFAULT_TOOLS_MD,
|
||||||
|
DEFAULT_USER_MD,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BotConfigManager:
|
||||||
|
def __init__(self, host_data_root: str):
|
||||||
|
self.host_data_root = host_data_root
|
||||||
|
|
||||||
|
def update_workspace(self, bot_id: str, bot_data: Dict[str, Any], channels: List[Dict[str, Any]]):
|
||||||
|
"""Generate/update nanobot workspace files and config.json."""
|
||||||
|
bot_dir = os.path.join(self.host_data_root, bot_id)
|
||||||
|
dot_nanobot_dir = os.path.join(bot_dir, ".nanobot")
|
||||||
|
workspace_dir = os.path.join(dot_nanobot_dir, "workspace")
|
||||||
|
memory_dir = os.path.join(workspace_dir, "memory")
|
||||||
|
skills_dir = os.path.join(workspace_dir, "skills")
|
||||||
|
|
||||||
|
for d in [dot_nanobot_dir, workspace_dir, memory_dir, skills_dir]:
|
||||||
|
os.makedirs(d, exist_ok=True)
|
||||||
|
|
||||||
|
raw_provider_name = (bot_data.get("llm_provider") or "openrouter").strip().lower()
|
||||||
|
provider_name = raw_provider_name
|
||||||
|
model_name = (bot_data.get("llm_model") or "openai/gpt-4o-mini").strip()
|
||||||
|
api_key = (bot_data.get("api_key") or "").strip()
|
||||||
|
api_base = (bot_data.get("api_base") or "").strip() or None
|
||||||
|
|
||||||
|
provider_alias = {
|
||||||
|
"aliyun": "dashscope",
|
||||||
|
"qwen": "dashscope",
|
||||||
|
"aliyun-qwen": "dashscope",
|
||||||
|
"moonshot": "kimi",
|
||||||
|
# Xunfei Spark provides OpenAI-compatible endpoint.
|
||||||
|
"xunfei": "openai",
|
||||||
|
"iflytek": "openai",
|
||||||
|
"xfyun": "openai",
|
||||||
|
}
|
||||||
|
provider_name = provider_alias.get(provider_name, provider_name)
|
||||||
|
if provider_name == "openai" and raw_provider_name in {"xunfei", "iflytek", "xfyun"}:
|
||||||
|
if model_name and "/" not in model_name:
|
||||||
|
model_name = f"openai/{model_name}"
|
||||||
|
|
||||||
|
provider_cfg: Dict[str, Any] = {
|
||||||
|
"apiKey": api_key,
|
||||||
|
}
|
||||||
|
if api_base:
|
||||||
|
provider_cfg["apiBase"] = api_base
|
||||||
|
|
||||||
|
channels_cfg: Dict[str, Any] = {
|
||||||
|
"sendProgress": bool(bot_data.get("send_progress", False)),
|
||||||
|
"sendToolHints": bool(bot_data.get("send_tool_hints", False)),
|
||||||
|
}
|
||||||
|
|
||||||
|
existing_config: Dict[str, Any] = {}
|
||||||
|
config_path = os.path.join(dot_nanobot_dir, "config.json")
|
||||||
|
if os.path.isfile(config_path):
|
||||||
|
try:
|
||||||
|
with open(config_path, "r", encoding="utf-8") as f:
|
||||||
|
loaded = json.load(f)
|
||||||
|
if isinstance(loaded, dict):
|
||||||
|
existing_config = loaded
|
||||||
|
except Exception:
|
||||||
|
existing_config = {}
|
||||||
|
|
||||||
|
existing_tools = existing_config.get("tools")
|
||||||
|
tools_cfg: Dict[str, Any] = dict(existing_tools) if isinstance(existing_tools, dict) else {}
|
||||||
|
native_sandbox_mode = self._normalize_native_sandbox_mode(bot_data.get("native_sandbox_mode"))
|
||||||
|
if native_sandbox_mode == "workspace":
|
||||||
|
tools_cfg["restrictToWorkspace"] = True
|
||||||
|
elif native_sandbox_mode == "full_access":
|
||||||
|
tools_cfg["restrictToWorkspace"] = False
|
||||||
|
if "mcp_servers" in bot_data:
|
||||||
|
mcp_servers = bot_data.get("mcp_servers")
|
||||||
|
if isinstance(mcp_servers, dict):
|
||||||
|
tools_cfg["mcpServers"] = mcp_servers
|
||||||
|
|
||||||
|
config_data: Dict[str, Any] = {
|
||||||
|
"agents": {
|
||||||
|
"defaults": {
|
||||||
|
"model": model_name,
|
||||||
|
"temperature": float(bot_data.get("temperature") or 0.2),
|
||||||
|
"topP": float(bot_data.get("top_p") or 1.0),
|
||||||
|
"maxTokens": int(bot_data.get("max_tokens") or 8192),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"providers": {
|
||||||
|
provider_name: provider_cfg,
|
||||||
|
},
|
||||||
|
"channels": channels_cfg,
|
||||||
|
}
|
||||||
|
if tools_cfg:
|
||||||
|
config_data["tools"] = tools_cfg
|
||||||
|
|
||||||
|
existing_channels = existing_config.get("channels")
|
||||||
|
existing_dashboard_cfg = (
|
||||||
|
existing_channels.get("dashboard")
|
||||||
|
if isinstance(existing_channels, dict) and isinstance(existing_channels.get("dashboard"), dict)
|
||||||
|
else {}
|
||||||
|
)
|
||||||
|
dashboard_cfg: Dict[str, Any] = {
|
||||||
|
"enabled": True,
|
||||||
|
"host": "0.0.0.0",
|
||||||
|
"port": 9000,
|
||||||
|
"allowFrom": ["*"],
|
||||||
|
}
|
||||||
|
for key in ("host", "port", "allowFrom"):
|
||||||
|
if key in existing_dashboard_cfg:
|
||||||
|
dashboard_cfg[key] = existing_dashboard_cfg[key]
|
||||||
|
channels_cfg["dashboard"] = dashboard_cfg
|
||||||
|
|
||||||
|
for channel in channels:
|
||||||
|
channel_type = (channel.get("channel_type") or "").strip()
|
||||||
|
if not channel_type:
|
||||||
|
continue
|
||||||
|
raw_extra = channel.get("extra_config")
|
||||||
|
extra: Dict[str, Any] = {}
|
||||||
|
if isinstance(raw_extra, str) and raw_extra.strip():
|
||||||
|
try:
|
||||||
|
parsed = json.loads(raw_extra)
|
||||||
|
if isinstance(parsed, dict):
|
||||||
|
extra = parsed
|
||||||
|
except Exception:
|
||||||
|
extra = {}
|
||||||
|
elif isinstance(raw_extra, dict):
|
||||||
|
extra = raw_extra
|
||||||
|
|
||||||
|
# Dashboard channel is deprecated in DB routing. Global flags now come from bot fields.
|
||||||
|
if channel_type == "dashboard":
|
||||||
|
continue
|
||||||
|
|
||||||
|
enabled = bool(channel.get("is_active", True))
|
||||||
|
external = channel.get("external_app_id", "") or ""
|
||||||
|
secret = channel.get("app_secret", "") or ""
|
||||||
|
|
||||||
|
if channel_type == "telegram":
|
||||||
|
channels_cfg["telegram"] = {
|
||||||
|
"enabled": enabled,
|
||||||
|
"token": secret,
|
||||||
|
"proxy": extra.get("proxy", ""),
|
||||||
|
"replyToMessage": bool(extra.get("replyToMessage", False)),
|
||||||
|
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
|
||||||
|
if channel_type == "feishu":
|
||||||
|
channels_cfg["feishu"] = {
|
||||||
|
"enabled": enabled,
|
||||||
|
"appId": external,
|
||||||
|
"appSecret": secret,
|
||||||
|
"encryptKey": extra.get("encryptKey", ""),
|
||||||
|
"verificationToken": extra.get("verificationToken", ""),
|
||||||
|
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
|
||||||
|
if channel_type == "dingtalk":
|
||||||
|
channels_cfg["dingtalk"] = {
|
||||||
|
"enabled": enabled,
|
||||||
|
"clientId": external,
|
||||||
|
"clientSecret": secret,
|
||||||
|
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
|
||||||
|
if channel_type == "slack":
|
||||||
|
channels_cfg["slack"] = {
|
||||||
|
"enabled": enabled,
|
||||||
|
"mode": extra.get("mode", "socket"),
|
||||||
|
"botToken": external,
|
||||||
|
"appToken": secret,
|
||||||
|
"replyInThread": bool(extra.get("replyInThread", True)),
|
||||||
|
"groupPolicy": extra.get("groupPolicy", "mention"),
|
||||||
|
"groupAllowFrom": extra.get("groupAllowFrom", []),
|
||||||
|
"reactEmoji": extra.get("reactEmoji", "eyes"),
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
|
||||||
|
if channel_type == "qq":
|
||||||
|
channels_cfg["qq"] = {
|
||||||
|
"enabled": enabled,
|
||||||
|
"appId": external,
|
||||||
|
"secret": secret,
|
||||||
|
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
|
||||||
|
if channel_type == "email":
|
||||||
|
channels_cfg["email"] = {
|
||||||
|
"enabled": enabled,
|
||||||
|
"consentGranted": bool(extra.get("consentGranted", False)),
|
||||||
|
"imapHost": extra.get("imapHost", ""),
|
||||||
|
"imapPort": max(1, min(int(extra.get("imapPort", 993) or 993), 65535)),
|
||||||
|
"imapUsername": extra.get("imapUsername", ""),
|
||||||
|
"imapPassword": extra.get("imapPassword", ""),
|
||||||
|
"imapMailbox": extra.get("imapMailbox", "INBOX"),
|
||||||
|
"imapUseSsl": bool(extra.get("imapUseSsl", True)),
|
||||||
|
"smtpHost": extra.get("smtpHost", ""),
|
||||||
|
"smtpPort": max(1, min(int(extra.get("smtpPort", 587) or 587), 65535)),
|
||||||
|
"smtpUsername": extra.get("smtpUsername", ""),
|
||||||
|
"smtpPassword": extra.get("smtpPassword", ""),
|
||||||
|
"smtpUseTls": bool(extra.get("smtpUseTls", True)),
|
||||||
|
"smtpUseSsl": bool(extra.get("smtpUseSsl", False)),
|
||||||
|
"fromAddress": extra.get("fromAddress", ""),
|
||||||
|
"autoReplyEnabled": bool(extra.get("autoReplyEnabled", True)),
|
||||||
|
"pollIntervalSeconds": max(5, int(extra.get("pollIntervalSeconds", 30) or 30)),
|
||||||
|
"markSeen": bool(extra.get("markSeen", True)),
|
||||||
|
"maxBodyChars": max(1, int(extra.get("maxBodyChars", 12000) or 12000)),
|
||||||
|
"subjectPrefix": extra.get("subjectPrefix", "Re: "),
|
||||||
|
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Fallback for future custom channels.
|
||||||
|
channels_cfg[channel_type] = {
|
||||||
|
"enabled": enabled,
|
||||||
|
"appId": external,
|
||||||
|
"appSecret": secret,
|
||||||
|
**extra,
|
||||||
|
}
|
||||||
|
|
||||||
|
with open(config_path, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(config_data, f, indent=4, ensure_ascii=False)
|
||||||
|
|
||||||
|
bootstrap_files = {
|
||||||
|
"AGENTS.md": bot_data.get("agents_md") or DEFAULT_AGENTS_MD,
|
||||||
|
"SOUL.md": bot_data.get("soul_md") or bot_data.get("system_prompt") or DEFAULT_SOUL_MD,
|
||||||
|
"USER.md": bot_data.get("user_md") or DEFAULT_USER_MD,
|
||||||
|
"TOOLS.md": bot_data.get("tools_md") or DEFAULT_TOOLS_MD,
|
||||||
|
"IDENTITY.md": bot_data.get("identity_md") or DEFAULT_IDENTITY_MD,
|
||||||
|
}
|
||||||
|
|
||||||
|
for filename, content in bootstrap_files.items():
|
||||||
|
file_path = os.path.join(workspace_dir, filename)
|
||||||
|
with open(file_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(str(content).strip() + "\n")
|
||||||
|
|
||||||
|
return dot_nanobot_dir
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _normalize_allow_from(raw: Any) -> List[str]:
|
||||||
|
rows: List[str] = []
|
||||||
|
if isinstance(raw, list):
|
||||||
|
for item in raw:
|
||||||
|
text = str(item or "").strip()
|
||||||
|
if text and text not in rows:
|
||||||
|
rows.append(text)
|
||||||
|
if not rows:
|
||||||
|
return ["*"]
|
||||||
|
return rows
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _normalize_native_sandbox_mode(raw_value: Any) -> str:
|
||||||
|
text = str(raw_value or "").strip().lower()
|
||||||
|
if text in {"workspace", "sandbox", "strict"}:
|
||||||
|
return "workspace"
|
||||||
|
if text in {"full_access", "full-access", "danger-full-access", "escape"}:
|
||||||
|
return "full_access"
|
||||||
|
return "inherit"
|
||||||
|
|
@ -1,8 +1,9 @@
|
||||||
from sqlalchemy import inspect, text
|
from sqlalchemy import inspect, text
|
||||||
from sqlmodel import Session, create_engine
|
from sqlmodel import SQLModel, Session, create_engine
|
||||||
|
|
||||||
from core.settings import (
|
from core.settings import (
|
||||||
DATABASE_ECHO,
|
DATABASE_ECHO,
|
||||||
|
DATABASE_ENGINE,
|
||||||
DATABASE_MAX_OVERFLOW,
|
DATABASE_MAX_OVERFLOW,
|
||||||
DATABASE_POOL_RECYCLE,
|
DATABASE_POOL_RECYCLE,
|
||||||
DATABASE_POOL_SIZE,
|
DATABASE_POOL_SIZE,
|
||||||
|
|
@ -10,14 +11,26 @@ from core.settings import (
|
||||||
DATABASE_URL,
|
DATABASE_URL,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Ensure table models are registered in SQLModel metadata before create_all.
|
||||||
|
from models import bot as _bot_models # noqa: F401
|
||||||
|
from models import platform as _platform_models # noqa: F401
|
||||||
|
from models import skill as _skill_models # noqa: F401
|
||||||
|
from models import sys_auth as _sys_auth_models # noqa: F401
|
||||||
|
from models import topic as _topic_models # noqa: F401
|
||||||
|
from services.sys_auth_service import seed_sys_auth
|
||||||
|
|
||||||
_engine_kwargs = {
|
_engine_kwargs = {
|
||||||
"echo": DATABASE_ECHO,
|
"echo": DATABASE_ECHO,
|
||||||
"pool_pre_ping": True,
|
|
||||||
"pool_size": DATABASE_POOL_SIZE,
|
|
||||||
"max_overflow": DATABASE_MAX_OVERFLOW,
|
|
||||||
"pool_timeout": DATABASE_POOL_TIMEOUT,
|
|
||||||
"pool_recycle": DATABASE_POOL_RECYCLE,
|
|
||||||
}
|
}
|
||||||
|
_engine_kwargs.update(
|
||||||
|
{
|
||||||
|
"pool_pre_ping": True,
|
||||||
|
"pool_size": DATABASE_POOL_SIZE,
|
||||||
|
"max_overflow": DATABASE_MAX_OVERFLOW,
|
||||||
|
"pool_timeout": DATABASE_POOL_TIMEOUT,
|
||||||
|
"pool_recycle": DATABASE_POOL_RECYCLE,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
engine = create_engine(DATABASE_URL, **_engine_kwargs)
|
engine = create_engine(DATABASE_URL, **_engine_kwargs)
|
||||||
|
|
||||||
|
|
@ -26,69 +39,791 @@ BOT_MESSAGE_TABLE = "bot_message"
|
||||||
BOT_IMAGE_TABLE = "bot_image"
|
BOT_IMAGE_TABLE = "bot_image"
|
||||||
BOT_REQUEST_USAGE_TABLE = "bot_request_usage"
|
BOT_REQUEST_USAGE_TABLE = "bot_request_usage"
|
||||||
BOT_ACTIVITY_EVENT_TABLE = "bot_activity_event"
|
BOT_ACTIVITY_EVENT_TABLE = "bot_activity_event"
|
||||||
SYS_LOGIN_LOG_TABLE = "sys_login_log"
|
|
||||||
SYS_SETTING_TABLE = "sys_setting"
|
SYS_SETTING_TABLE = "sys_setting"
|
||||||
REQUIRED_TABLES = (
|
MANAGED_NODE_TABLE = "managed_node"
|
||||||
BOT_INSTANCE_TABLE,
|
POSTGRES_MIGRATION_LOCK_KEY = 2026031801
|
||||||
BOT_MESSAGE_TABLE,
|
MYSQL_MIGRATION_LOCK_NAME = "dashboard_nanobot_schema_migration"
|
||||||
BOT_IMAGE_TABLE,
|
LEGACY_TABLE_PAIRS = [
|
||||||
BOT_REQUEST_USAGE_TABLE,
|
("botinstance", BOT_INSTANCE_TABLE),
|
||||||
BOT_ACTIVITY_EVENT_TABLE,
|
("botmessage", BOT_MESSAGE_TABLE),
|
||||||
SYS_LOGIN_LOG_TABLE,
|
("nanobotimage", BOT_IMAGE_TABLE),
|
||||||
SYS_SETTING_TABLE,
|
("platformsetting", SYS_SETTING_TABLE),
|
||||||
"skill_market_item",
|
("botrequestusage", BOT_REQUEST_USAGE_TABLE),
|
||||||
"bot_skill_install",
|
("botactivityevent", BOT_ACTIVITY_EVENT_TABLE),
|
||||||
"topic_topic",
|
]
|
||||||
"topic_item",
|
|
||||||
)
|
|
||||||
|
|
||||||
REQUIRED_SYS_SETTING_KEYS = (
|
|
||||||
"page_size",
|
|
||||||
"chat_pull_page_size",
|
|
||||||
"auth_token_ttl_hours",
|
|
||||||
"auth_token_max_active",
|
|
||||||
"upload_max_mb",
|
|
||||||
"allowed_attachment_extensions",
|
|
||||||
"workspace_download_extensions",
|
|
||||||
"speech_enabled",
|
|
||||||
"activity_event_retention_days",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _validate_required_tables() -> None:
|
def _quote_ident(name: str) -> str:
|
||||||
|
if engine.dialect.name == "mysql":
|
||||||
|
return f"`{str(name).replace('`', '``')}`"
|
||||||
|
return f'"{str(name).replace(chr(34), chr(34) * 2)}"'
|
||||||
|
|
||||||
|
|
||||||
|
def _rename_table_if_needed(old_name: str, new_name: str) -> None:
|
||||||
inspector = inspect(engine)
|
inspector = inspect(engine)
|
||||||
missing = [table_name for table_name in REQUIRED_TABLES if not inspector.has_table(table_name)]
|
if not inspector.has_table(old_name) or inspector.has_table(new_name):
|
||||||
if missing:
|
return
|
||||||
raise RuntimeError(
|
dialect = engine.dialect.name
|
||||||
"Database schema is not initialized. "
|
|
||||||
f"Missing tables: {', '.join(missing)}. "
|
|
||||||
"Run scripts/init-full-db.sh or apply scripts/sql/create-tables.sql before starting the backend."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _validate_required_sys_settings() -> None:
|
|
||||||
placeholders = ", ".join(f":k{i}" for i, _ in enumerate(REQUIRED_SYS_SETTING_KEYS))
|
|
||||||
params = {f"k{i}": key for i, key in enumerate(REQUIRED_SYS_SETTING_KEYS)}
|
|
||||||
with engine.connect() as conn:
|
with engine.connect() as conn:
|
||||||
rows = conn.execute(
|
if dialect == "mysql":
|
||||||
text(f'SELECT key FROM "{SYS_SETTING_TABLE}" WHERE key IN ({placeholders})'),
|
conn.execute(text(f"RENAME TABLE `{old_name}` TO `{new_name}`"))
|
||||||
params,
|
else:
|
||||||
).scalars().all()
|
conn.execute(text(f'ALTER TABLE "{old_name}" RENAME TO "{new_name}"'))
|
||||||
present = {str(row or "").strip() for row in rows if str(row or "").strip()}
|
conn.commit()
|
||||||
missing = [key for key in REQUIRED_SYS_SETTING_KEYS if key not in present]
|
|
||||||
if missing:
|
|
||||||
raise RuntimeError(
|
def _rename_legacy_tables() -> None:
|
||||||
"Database seed data is not initialized. "
|
_rename_table_if_needed("botinstance", BOT_INSTANCE_TABLE)
|
||||||
f"Missing sys_setting keys: {', '.join(missing)}. "
|
_rename_table_if_needed("botmessage", BOT_MESSAGE_TABLE)
|
||||||
"Run scripts/init-full-db.sh or apply scripts/sql/init-data.sql before starting the backend."
|
_rename_table_if_needed("nanobotimage", BOT_IMAGE_TABLE)
|
||||||
|
_rename_table_if_needed("platformsetting", SYS_SETTING_TABLE)
|
||||||
|
_rename_table_if_needed("botrequestusage", BOT_REQUEST_USAGE_TABLE)
|
||||||
|
_rename_table_if_needed("botactivityevent", BOT_ACTIVITY_EVENT_TABLE)
|
||||||
|
|
||||||
|
|
||||||
|
def _acquire_migration_lock():
|
||||||
|
if engine.dialect.name == "postgresql":
|
||||||
|
conn = engine.connect()
|
||||||
|
conn.execute(text("SELECT pg_advisory_lock(:key)"), {"key": POSTGRES_MIGRATION_LOCK_KEY})
|
||||||
|
return conn
|
||||||
|
if engine.dialect.name == "mysql":
|
||||||
|
conn = engine.connect()
|
||||||
|
acquired = conn.execute(
|
||||||
|
text("SELECT GET_LOCK(:name, :timeout)"),
|
||||||
|
{"name": MYSQL_MIGRATION_LOCK_NAME, "timeout": 120},
|
||||||
|
).scalar()
|
||||||
|
if int(acquired or 0) != 1:
|
||||||
|
conn.close()
|
||||||
|
raise RuntimeError("Failed to acquire schema migration lock")
|
||||||
|
return conn
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _release_migration_lock(lock_conn) -> None:
|
||||||
|
if lock_conn is None:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
if engine.dialect.name == "postgresql":
|
||||||
|
lock_conn.execute(text("SELECT pg_advisory_unlock(:key)"), {"key": POSTGRES_MIGRATION_LOCK_KEY})
|
||||||
|
elif engine.dialect.name == "mysql":
|
||||||
|
lock_conn.execute(text("SELECT RELEASE_LOCK(:name)"), {"name": MYSQL_MIGRATION_LOCK_NAME})
|
||||||
|
finally:
|
||||||
|
lock_conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
def _table_row_count(table_name: str) -> int:
|
||||||
|
inspector = inspect(engine)
|
||||||
|
if not inspector.has_table(table_name):
|
||||||
|
return 0
|
||||||
|
with engine.connect() as conn:
|
||||||
|
value = conn.execute(text(f"SELECT COUNT(*) FROM {_quote_ident(table_name)}")).scalar()
|
||||||
|
return int(value or 0)
|
||||||
|
|
||||||
|
|
||||||
|
def _copy_legacy_table_rows(old_name: str, new_name: str) -> None:
|
||||||
|
inspector = inspect(engine)
|
||||||
|
if not inspector.has_table(old_name) or not inspector.has_table(new_name):
|
||||||
|
return
|
||||||
|
if _table_row_count(old_name) <= 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
old_columns = {
|
||||||
|
str(row.get("name"))
|
||||||
|
for row in inspector.get_columns(old_name)
|
||||||
|
if row.get("name")
|
||||||
|
}
|
||||||
|
new_columns = [
|
||||||
|
str(row.get("name"))
|
||||||
|
for row in inspector.get_columns(new_name)
|
||||||
|
if row.get("name")
|
||||||
|
]
|
||||||
|
shared_columns = [col for col in new_columns if col in old_columns]
|
||||||
|
if not shared_columns:
|
||||||
|
return
|
||||||
|
pk = inspector.get_pk_constraint(new_name) or {}
|
||||||
|
pk_columns = [
|
||||||
|
str(col)
|
||||||
|
for col in (pk.get("constrained_columns") or [])
|
||||||
|
if col and col in shared_columns and col in old_columns
|
||||||
|
]
|
||||||
|
if not pk_columns:
|
||||||
|
return
|
||||||
|
|
||||||
|
columns_sql = ", ".join(_quote_ident(col) for col in shared_columns)
|
||||||
|
join_sql = " AND ".join(
|
||||||
|
f'n.{_quote_ident(col)} = o.{_quote_ident(col)}'
|
||||||
|
for col in pk_columns
|
||||||
|
)
|
||||||
|
null_check_col = _quote_ident(pk_columns[0])
|
||||||
|
with engine.connect() as conn:
|
||||||
|
conn.execute(
|
||||||
|
text(
|
||||||
|
f"INSERT INTO {_quote_ident(new_name)} ({columns_sql}) "
|
||||||
|
f"SELECT {', '.join(f'o.{_quote_ident(col)}' for col in shared_columns)} "
|
||||||
|
f"FROM {_quote_ident(old_name)} o "
|
||||||
|
f"LEFT JOIN {_quote_ident(new_name)} n ON {join_sql} "
|
||||||
|
f"WHERE n.{null_check_col} IS NULL"
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def _migrate_legacy_table_rows() -> None:
|
||||||
|
for old_name, new_name in LEGACY_TABLE_PAIRS:
|
||||||
|
_copy_legacy_table_rows(old_name, new_name)
|
||||||
|
|
||||||
|
|
||||||
|
def _topic_fk_target(table_name: str, constrained_column: str = "bot_id") -> str | None:
|
||||||
|
inspector = inspect(engine)
|
||||||
|
if not inspector.has_table(table_name):
|
||||||
|
return None
|
||||||
|
for fk in inspector.get_foreign_keys(table_name):
|
||||||
|
cols = [str(col) for col in (fk.get("constrained_columns") or []) if col]
|
||||||
|
if cols == [constrained_column]:
|
||||||
|
referred = fk.get("referred_table")
|
||||||
|
return str(referred) if referred else None
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _repair_postgres_topic_foreign_keys() -> None:
|
||||||
|
if engine.dialect.name != "postgresql":
|
||||||
|
return
|
||||||
|
targets = {
|
||||||
|
"topic_topic": "topic_topic_bot_id_fkey",
|
||||||
|
"topic_item": "topic_item_bot_id_fkey",
|
||||||
|
}
|
||||||
|
with engine.connect() as conn:
|
||||||
|
changed = False
|
||||||
|
for table_name, constraint_name in targets.items():
|
||||||
|
if _topic_fk_target(table_name) == BOT_INSTANCE_TABLE:
|
||||||
|
continue
|
||||||
|
conn.execute(
|
||||||
|
text(
|
||||||
|
f'ALTER TABLE {_quote_ident(table_name)} '
|
||||||
|
f'DROP CONSTRAINT IF EXISTS {_quote_ident(constraint_name)}'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
conn.execute(
|
||||||
|
text(
|
||||||
|
f'ALTER TABLE {_quote_ident(table_name)} '
|
||||||
|
f'ADD CONSTRAINT {_quote_ident(constraint_name)} '
|
||||||
|
f'FOREIGN KEY ({_quote_ident("bot_id")}) '
|
||||||
|
f'REFERENCES {_quote_ident(BOT_INSTANCE_TABLE)}({_quote_ident("id")}) '
|
||||||
|
f'ON DELETE CASCADE'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
changed = True
|
||||||
|
if changed:
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def _legacy_rows_missing_in_new(old_name: str, new_name: str) -> int:
|
||||||
|
inspector = inspect(engine)
|
||||||
|
if not inspector.has_table(old_name) or not inspector.has_table(new_name):
|
||||||
|
return 0
|
||||||
|
pk = inspector.get_pk_constraint(new_name) or {}
|
||||||
|
pk_columns = [
|
||||||
|
str(col)
|
||||||
|
for col in (pk.get("constrained_columns") or [])
|
||||||
|
if col
|
||||||
|
]
|
||||||
|
if not pk_columns:
|
||||||
|
return _table_row_count(old_name)
|
||||||
|
join_sql = " AND ".join(
|
||||||
|
f'n.{_quote_ident(col)} = o.{_quote_ident(col)}'
|
||||||
|
for col in pk_columns
|
||||||
|
)
|
||||||
|
null_check_col = _quote_ident(pk_columns[0])
|
||||||
|
with engine.connect() as conn:
|
||||||
|
value = conn.execute(
|
||||||
|
text(
|
||||||
|
f'SELECT COUNT(*) FROM {_quote_ident(old_name)} o '
|
||||||
|
f'LEFT JOIN {_quote_ident(new_name)} n ON {join_sql} '
|
||||||
|
f'WHERE n.{null_check_col} IS NULL'
|
||||||
|
)
|
||||||
|
).scalar()
|
||||||
|
return int(value or 0)
|
||||||
|
|
||||||
|
|
||||||
|
def _drop_legacy_tables() -> None:
|
||||||
|
droppable = [
|
||||||
|
old_name
|
||||||
|
for old_name, new_name in LEGACY_TABLE_PAIRS
|
||||||
|
if _legacy_rows_missing_in_new(old_name, new_name) <= 0
|
||||||
|
]
|
||||||
|
if not droppable:
|
||||||
|
return
|
||||||
|
with engine.connect() as conn:
|
||||||
|
for old_name in droppable:
|
||||||
|
if engine.dialect.name == "postgresql":
|
||||||
|
conn.execute(text(f'DROP TABLE IF EXISTS {_quote_ident(old_name)} CASCADE'))
|
||||||
|
else:
|
||||||
|
conn.execute(text(f'DROP TABLE IF EXISTS {_quote_ident(old_name)}'))
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_botinstance_columns() -> None:
|
||||||
|
dialect = engine.dialect.name
|
||||||
|
required_columns = {
|
||||||
|
"current_state": {
|
||||||
|
"postgresql": "TEXT DEFAULT 'IDLE'",
|
||||||
|
"mysql": "VARCHAR(64) DEFAULT 'IDLE'",
|
||||||
|
},
|
||||||
|
"last_action": {
|
||||||
|
"postgresql": "TEXT",
|
||||||
|
"mysql": "LONGTEXT",
|
||||||
|
},
|
||||||
|
"image_tag": {
|
||||||
|
"postgresql": "TEXT DEFAULT 'nanobot-base:v0.1.4'",
|
||||||
|
"mysql": "VARCHAR(255) DEFAULT 'nanobot-base:v0.1.4'",
|
||||||
|
},
|
||||||
|
"access_password": {
|
||||||
|
"postgresql": "TEXT DEFAULT ''",
|
||||||
|
"mysql": "VARCHAR(255) DEFAULT ''",
|
||||||
|
},
|
||||||
|
"enabled": {
|
||||||
|
"postgresql": "BOOLEAN NOT NULL DEFAULT TRUE",
|
||||||
|
"mysql": "BOOLEAN NOT NULL DEFAULT TRUE",
|
||||||
|
},
|
||||||
|
"node_id": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT 'local'",
|
||||||
|
"mysql": "VARCHAR(120) NOT NULL DEFAULT 'local'",
|
||||||
|
},
|
||||||
|
"transport_kind": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT 'direct'",
|
||||||
|
"mysql": "VARCHAR(32) NOT NULL DEFAULT 'direct'",
|
||||||
|
},
|
||||||
|
"runtime_kind": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT 'docker'",
|
||||||
|
"mysql": "VARCHAR(32) NOT NULL DEFAULT 'docker'",
|
||||||
|
},
|
||||||
|
"core_adapter": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT 'nanobot'",
|
||||||
|
"mysql": "VARCHAR(64) NOT NULL DEFAULT 'nanobot'",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
inspector = inspect(engine)
|
||||||
|
if not inspector.has_table(BOT_INSTANCE_TABLE):
|
||||||
|
return
|
||||||
|
with engine.connect() as conn:
|
||||||
|
existing = {
|
||||||
|
str(row.get("name"))
|
||||||
|
for row in inspect(conn).get_columns(BOT_INSTANCE_TABLE)
|
||||||
|
if row.get("name")
|
||||||
|
}
|
||||||
|
for col, ddl_map in required_columns.items():
|
||||||
|
if col in existing:
|
||||||
|
continue
|
||||||
|
ddl = ddl_map.get(dialect) or ddl_map.get("postgresql")
|
||||||
|
conn.execute(text(f"ALTER TABLE {BOT_INSTANCE_TABLE} ADD COLUMN {col} {ddl}"))
|
||||||
|
if "enabled" in existing:
|
||||||
|
conn.execute(text(f"UPDATE {BOT_INSTANCE_TABLE} SET enabled = TRUE WHERE enabled IS NULL"))
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_botinstance_indexes() -> None:
|
||||||
|
required_indexes = [
|
||||||
|
("idx_bot_instance_enabled", BOT_INSTANCE_TABLE, ["enabled"]),
|
||||||
|
("idx_bot_instance_docker_status", BOT_INSTANCE_TABLE, ["docker_status"]),
|
||||||
|
("idx_bot_instance_node_id", BOT_INSTANCE_TABLE, ["node_id"]),
|
||||||
|
("idx_bot_instance_transport_kind", BOT_INSTANCE_TABLE, ["transport_kind"]),
|
||||||
|
("idx_bot_instance_runtime_kind", BOT_INSTANCE_TABLE, ["runtime_kind"]),
|
||||||
|
("idx_bot_instance_core_adapter", BOT_INSTANCE_TABLE, ["core_adapter"]),
|
||||||
|
("idx_bot_instance_node_transport_runtime", BOT_INSTANCE_TABLE, ["node_id", "transport_kind", "runtime_kind"]),
|
||||||
|
]
|
||||||
|
inspector = inspect(engine)
|
||||||
|
with engine.connect() as conn:
|
||||||
|
if not inspector.has_table(BOT_INSTANCE_TABLE):
|
||||||
|
return
|
||||||
|
existing = {
|
||||||
|
str(item.get("name"))
|
||||||
|
for item in inspector.get_indexes(BOT_INSTANCE_TABLE)
|
||||||
|
if item.get("name")
|
||||||
|
}
|
||||||
|
for name, table_name, columns in required_indexes:
|
||||||
|
if name in existing:
|
||||||
|
continue
|
||||||
|
conn.execute(text(f"CREATE INDEX {name} ON {table_name} ({', '.join(columns)})"))
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def _drop_legacy_botinstance_columns() -> None:
|
||||||
|
legacy_columns = [
|
||||||
|
"avatar_model",
|
||||||
|
"avatar_skin",
|
||||||
|
"system_prompt",
|
||||||
|
"soul_md",
|
||||||
|
"agents_md",
|
||||||
|
"user_md",
|
||||||
|
"tools_md",
|
||||||
|
"tools_config_json",
|
||||||
|
"identity_md",
|
||||||
|
"llm_provider",
|
||||||
|
"llm_model",
|
||||||
|
"api_key",
|
||||||
|
"api_base",
|
||||||
|
"temperature",
|
||||||
|
"top_p",
|
||||||
|
"max_tokens",
|
||||||
|
"presence_penalty",
|
||||||
|
"frequency_penalty",
|
||||||
|
"send_progress",
|
||||||
|
"send_tool_hints",
|
||||||
|
"bot_env_json",
|
||||||
|
]
|
||||||
|
with engine.connect() as conn:
|
||||||
|
existing = {
|
||||||
|
str(col.get("name"))
|
||||||
|
for col in inspect(conn).get_columns(BOT_INSTANCE_TABLE)
|
||||||
|
if col.get("name")
|
||||||
|
}
|
||||||
|
for col in legacy_columns:
|
||||||
|
if col not in existing:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
if engine.dialect.name == "mysql":
|
||||||
|
conn.execute(text(f"ALTER TABLE {BOT_INSTANCE_TABLE} DROP COLUMN `{col}`"))
|
||||||
|
else:
|
||||||
|
conn.execute(text(f'ALTER TABLE {BOT_INSTANCE_TABLE} DROP COLUMN IF EXISTS "{col}"'))
|
||||||
|
except Exception:
|
||||||
|
# Keep startup resilient on mixed/legacy database engines.
|
||||||
|
continue
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def _drop_legacy_skill_tables() -> None:
|
||||||
|
"""Drop deprecated skill registry tables (moved to workspace filesystem mode)."""
|
||||||
|
with engine.connect() as conn:
|
||||||
|
conn.execute(text("DROP TABLE IF EXISTS botskillmapping"))
|
||||||
|
conn.execute(text("DROP TABLE IF EXISTS skillregistry"))
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_sys_setting_columns() -> None:
|
||||||
|
dialect = engine.dialect.name
|
||||||
|
required_columns = {
|
||||||
|
"name": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT ''",
|
||||||
|
"mysql": "VARCHAR(200) NOT NULL DEFAULT ''",
|
||||||
|
},
|
||||||
|
"category": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT 'general'",
|
||||||
|
"mysql": "VARCHAR(64) NOT NULL DEFAULT 'general'",
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT ''",
|
||||||
|
"mysql": "LONGTEXT",
|
||||||
|
},
|
||||||
|
"value_type": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT 'json'",
|
||||||
|
"mysql": "VARCHAR(32) NOT NULL DEFAULT 'json'",
|
||||||
|
},
|
||||||
|
"is_public": {
|
||||||
|
"postgresql": "BOOLEAN NOT NULL DEFAULT FALSE",
|
||||||
|
"mysql": "BOOLEAN NOT NULL DEFAULT FALSE",
|
||||||
|
},
|
||||||
|
"sort_order": {
|
||||||
|
"postgresql": "INTEGER NOT NULL DEFAULT 100",
|
||||||
|
"mysql": "INTEGER NOT NULL DEFAULT 100",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
inspector = inspect(engine)
|
||||||
|
if not inspector.has_table(SYS_SETTING_TABLE):
|
||||||
|
return
|
||||||
|
with engine.connect() as conn:
|
||||||
|
existing = {
|
||||||
|
str(row.get("name"))
|
||||||
|
for row in inspect(conn).get_columns(SYS_SETTING_TABLE)
|
||||||
|
if row.get("name")
|
||||||
|
}
|
||||||
|
for col, ddl_map in required_columns.items():
|
||||||
|
if col in existing:
|
||||||
|
continue
|
||||||
|
ddl = ddl_map.get(dialect) or ddl_map.get("postgresql")
|
||||||
|
conn.execute(text(f"ALTER TABLE {SYS_SETTING_TABLE} ADD COLUMN {col} {ddl}"))
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_bot_request_usage_columns() -> None:
|
||||||
|
dialect = engine.dialect.name
|
||||||
|
required_columns = {
|
||||||
|
"message_id": {
|
||||||
|
"postgresql": "INTEGER",
|
||||||
|
"mysql": "INTEGER",
|
||||||
|
},
|
||||||
|
"provider": {
|
||||||
|
"postgresql": "TEXT",
|
||||||
|
"mysql": "VARCHAR(120)",
|
||||||
|
},
|
||||||
|
"model": {
|
||||||
|
"postgresql": "TEXT",
|
||||||
|
"mysql": "VARCHAR(255)",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
inspector = inspect(engine)
|
||||||
|
if not inspector.has_table(BOT_REQUEST_USAGE_TABLE):
|
||||||
|
return
|
||||||
|
with engine.connect() as conn:
|
||||||
|
existing = {
|
||||||
|
str(row.get("name"))
|
||||||
|
for row in inspect(conn).get_columns(BOT_REQUEST_USAGE_TABLE)
|
||||||
|
if row.get("name")
|
||||||
|
}
|
||||||
|
for col, ddl_map in required_columns.items():
|
||||||
|
if col in existing:
|
||||||
|
continue
|
||||||
|
ddl = ddl_map.get(dialect) or ddl_map.get("postgresql")
|
||||||
|
conn.execute(text(f"ALTER TABLE {BOT_REQUEST_USAGE_TABLE} ADD COLUMN {col} {ddl}"))
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_managed_node_columns() -> None:
|
||||||
|
dialect = engine.dialect.name
|
||||||
|
required_columns = {
|
||||||
|
"display_name": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT ''",
|
||||||
|
"mysql": "VARCHAR(200) NOT NULL DEFAULT ''",
|
||||||
|
},
|
||||||
|
"base_url": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT ''",
|
||||||
|
"mysql": "TEXT NOT NULL",
|
||||||
|
},
|
||||||
|
"enabled": {
|
||||||
|
"postgresql": "BOOLEAN NOT NULL DEFAULT TRUE",
|
||||||
|
"mysql": "BOOLEAN NOT NULL DEFAULT TRUE",
|
||||||
|
},
|
||||||
|
"auth_token": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT ''",
|
||||||
|
"mysql": "TEXT NOT NULL",
|
||||||
|
},
|
||||||
|
"transport_kind": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT 'direct'",
|
||||||
|
"mysql": "VARCHAR(32) NOT NULL DEFAULT 'direct'",
|
||||||
|
},
|
||||||
|
"runtime_kind": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT 'docker'",
|
||||||
|
"mysql": "VARCHAR(32) NOT NULL DEFAULT 'docker'",
|
||||||
|
},
|
||||||
|
"core_adapter": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT 'nanobot'",
|
||||||
|
"mysql": "VARCHAR(64) NOT NULL DEFAULT 'nanobot'",
|
||||||
|
},
|
||||||
|
"metadata_json": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT '{}'",
|
||||||
|
"mysql": "LONGTEXT",
|
||||||
|
},
|
||||||
|
"capabilities_json": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT '{}'",
|
||||||
|
"mysql": "LONGTEXT",
|
||||||
|
},
|
||||||
|
"resources_json": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT '{}'",
|
||||||
|
"mysql": "LONGTEXT",
|
||||||
|
},
|
||||||
|
"last_seen_at": {
|
||||||
|
"postgresql": "TIMESTAMP",
|
||||||
|
"mysql": "DATETIME",
|
||||||
|
},
|
||||||
|
"created_at": {
|
||||||
|
"postgresql": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
||||||
|
"mysql": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
||||||
|
},
|
||||||
|
"updated_at": {
|
||||||
|
"postgresql": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
||||||
|
"mysql": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
inspector = inspect(engine)
|
||||||
|
if not inspector.has_table(MANAGED_NODE_TABLE):
|
||||||
|
return
|
||||||
|
with engine.connect() as conn:
|
||||||
|
existing = {
|
||||||
|
str(row.get("name"))
|
||||||
|
for row in inspect(conn).get_columns(MANAGED_NODE_TABLE)
|
||||||
|
if row.get("name")
|
||||||
|
}
|
||||||
|
for col, ddl_map in required_columns.items():
|
||||||
|
if col in existing:
|
||||||
|
continue
|
||||||
|
ddl = ddl_map.get(dialect) or ddl_map.get("postgresql")
|
||||||
|
conn.execute(text(f"ALTER TABLE {MANAGED_NODE_TABLE} ADD COLUMN {col} {ddl}"))
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_managed_node_indexes() -> None:
|
||||||
|
required_indexes = [
|
||||||
|
("idx_managed_node_enabled", MANAGED_NODE_TABLE, ["enabled"]),
|
||||||
|
("idx_managed_node_transport_kind", MANAGED_NODE_TABLE, ["transport_kind"]),
|
||||||
|
("idx_managed_node_runtime_kind", MANAGED_NODE_TABLE, ["runtime_kind"]),
|
||||||
|
("idx_managed_node_core_adapter", MANAGED_NODE_TABLE, ["core_adapter"]),
|
||||||
|
("idx_managed_node_last_seen_at", MANAGED_NODE_TABLE, ["last_seen_at"]),
|
||||||
|
]
|
||||||
|
inspector = inspect(engine)
|
||||||
|
with engine.connect() as conn:
|
||||||
|
if not inspector.has_table(MANAGED_NODE_TABLE):
|
||||||
|
return
|
||||||
|
existing = {
|
||||||
|
str(item.get("name"))
|
||||||
|
for item in inspector.get_indexes(MANAGED_NODE_TABLE)
|
||||||
|
if item.get("name")
|
||||||
|
}
|
||||||
|
for name, table_name, columns in required_indexes:
|
||||||
|
if name in existing:
|
||||||
|
continue
|
||||||
|
conn.execute(text(f"CREATE INDEX {name} ON {table_name} ({', '.join(columns)})"))
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_topic_columns() -> None:
|
||||||
|
dialect = engine.dialect.name
|
||||||
|
required_columns = {
|
||||||
|
"topic_topic": {
|
||||||
|
"name": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT ''",
|
||||||
|
"mysql": "VARCHAR(255) NOT NULL DEFAULT ''",
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT ''",
|
||||||
|
"mysql": "LONGTEXT",
|
||||||
|
},
|
||||||
|
"is_active": {
|
||||||
|
"postgresql": "BOOLEAN NOT NULL DEFAULT TRUE",
|
||||||
|
"mysql": "BOOLEAN NOT NULL DEFAULT TRUE",
|
||||||
|
},
|
||||||
|
"is_default_fallback": {
|
||||||
|
"postgresql": "BOOLEAN NOT NULL DEFAULT FALSE",
|
||||||
|
"mysql": "BOOLEAN NOT NULL DEFAULT FALSE",
|
||||||
|
},
|
||||||
|
"routing_json": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT '{}'",
|
||||||
|
"mysql": "LONGTEXT",
|
||||||
|
},
|
||||||
|
"view_schema_json": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT '{}'",
|
||||||
|
"mysql": "LONGTEXT",
|
||||||
|
},
|
||||||
|
"created_at": {
|
||||||
|
"postgresql": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
||||||
|
"mysql": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
||||||
|
},
|
||||||
|
"updated_at": {
|
||||||
|
"postgresql": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
||||||
|
"mysql": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"topic_item": {
|
||||||
|
"title": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT ''",
|
||||||
|
"mysql": "VARCHAR(2000) NOT NULL DEFAULT ''",
|
||||||
|
},
|
||||||
|
"level": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT 'info'",
|
||||||
|
"mysql": "VARCHAR(32) NOT NULL DEFAULT 'info'",
|
||||||
|
},
|
||||||
|
"tags_json": {
|
||||||
|
"postgresql": "TEXT",
|
||||||
|
"mysql": "LONGTEXT",
|
||||||
|
},
|
||||||
|
"view_json": {
|
||||||
|
"postgresql": "TEXT",
|
||||||
|
"mysql": "LONGTEXT",
|
||||||
|
},
|
||||||
|
"source": {
|
||||||
|
"postgresql": "TEXT NOT NULL DEFAULT 'mcp'",
|
||||||
|
"mysql": "VARCHAR(64) NOT NULL DEFAULT 'mcp'",
|
||||||
|
},
|
||||||
|
"dedupe_key": {
|
||||||
|
"postgresql": "TEXT",
|
||||||
|
"mysql": "VARCHAR(200)",
|
||||||
|
},
|
||||||
|
"is_read": {
|
||||||
|
"postgresql": "BOOLEAN NOT NULL DEFAULT FALSE",
|
||||||
|
"mysql": "BOOLEAN NOT NULL DEFAULT FALSE",
|
||||||
|
},
|
||||||
|
"created_at": {
|
||||||
|
"postgresql": "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
||||||
|
"mysql": "DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
inspector = inspect(engine)
|
||||||
|
with engine.connect() as conn:
|
||||||
|
for table_name, cols in required_columns.items():
|
||||||
|
if not inspector.has_table(table_name):
|
||||||
|
continue
|
||||||
|
existing = {
|
||||||
|
str(row.get("name"))
|
||||||
|
for row in inspector.get_columns(table_name)
|
||||||
|
if row.get("name")
|
||||||
|
}
|
||||||
|
for col, ddl_map in cols.items():
|
||||||
|
if col in existing:
|
||||||
|
continue
|
||||||
|
ddl = ddl_map.get(dialect) or ddl_map.get("postgresql")
|
||||||
|
conn.execute(text(f"ALTER TABLE {table_name} ADD COLUMN {col} {ddl}"))
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_topic_indexes() -> None:
|
||||||
|
required_indexes = [
|
||||||
|
("uq_topic_topic_bot_topic_key", "topic_topic", ["bot_id", "topic_key"], True),
|
||||||
|
("idx_topic_topic_bot_id", "topic_topic", ["bot_id"], False),
|
||||||
|
("idx_topic_topic_topic_key", "topic_topic", ["topic_key"], False),
|
||||||
|
("idx_topic_topic_bot_fallback", "topic_topic", ["bot_id", "is_default_fallback"], False),
|
||||||
|
("idx_topic_item_bot_id", "topic_item", ["bot_id"], False),
|
||||||
|
("idx_topic_item_topic_key", "topic_item", ["topic_key"], False),
|
||||||
|
("idx_topic_item_level", "topic_item", ["level"], False),
|
||||||
|
("idx_topic_item_source", "topic_item", ["source"], False),
|
||||||
|
("idx_topic_item_is_read", "topic_item", ["is_read"], False),
|
||||||
|
("idx_topic_item_created_at", "topic_item", ["created_at"], False),
|
||||||
|
("idx_topic_item_bot_topic_created_at", "topic_item", ["bot_id", "topic_key", "created_at"], False),
|
||||||
|
("idx_topic_item_bot_dedupe", "topic_item", ["bot_id", "dedupe_key"], False),
|
||||||
|
]
|
||||||
|
inspector = inspect(engine)
|
||||||
|
with engine.connect() as conn:
|
||||||
|
for name, table_name, columns, unique in required_indexes:
|
||||||
|
if not inspector.has_table(table_name):
|
||||||
|
continue
|
||||||
|
existing = {
|
||||||
|
str(item.get("name"))
|
||||||
|
for item in inspector.get_indexes(table_name)
|
||||||
|
if item.get("name")
|
||||||
|
}
|
||||||
|
existing.update(
|
||||||
|
str(item.get("name"))
|
||||||
|
for item in inspector.get_unique_constraints(table_name)
|
||||||
|
if item.get("name")
|
||||||
|
)
|
||||||
|
if name in existing:
|
||||||
|
continue
|
||||||
|
unique_sql = "UNIQUE " if unique else ""
|
||||||
|
cols_sql = ", ".join(columns)
|
||||||
|
conn.execute(text(f"CREATE {unique_sql}INDEX {name} ON {table_name} ({cols_sql})"))
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def _drop_obsolete_topic_tables() -> None:
|
||||||
|
with engine.connect() as conn:
|
||||||
|
if engine.dialect.name == "postgresql":
|
||||||
|
conn.execute(text('DROP TABLE IF EXISTS "topic_bot_settings"'))
|
||||||
|
elif engine.dialect.name == "mysql":
|
||||||
|
conn.execute(text("DROP TABLE IF EXISTS `topic_bot_settings`"))
|
||||||
|
else:
|
||||||
|
conn.execute(text('DROP TABLE IF EXISTS "topic_bot_settings"'))
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def _cleanup_legacy_default_topics() -> None:
|
||||||
|
"""
|
||||||
|
Remove legacy auto-created fallback topic rows from early topic-feed design.
|
||||||
|
|
||||||
|
Historical rows look like:
|
||||||
|
- topic_key = inbox
|
||||||
|
- name = Inbox
|
||||||
|
- description = Default topic for uncategorized items
|
||||||
|
- routing_json contains "Fallback topic"
|
||||||
|
"""
|
||||||
|
with engine.connect() as conn:
|
||||||
|
legacy_rows = conn.execute(
|
||||||
|
text(
|
||||||
|
"""
|
||||||
|
SELECT bot_id, topic_key
|
||||||
|
FROM topic_topic
|
||||||
|
WHERE lower(coalesce(topic_key, '')) = 'inbox'
|
||||||
|
AND lower(coalesce(name, '')) = 'inbox'
|
||||||
|
AND lower(coalesce(description, '')) = 'default topic for uncategorized items'
|
||||||
|
AND lower(coalesce(routing_json, '')) LIKE '%fallback topic%'
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
).fetchall()
|
||||||
|
if not legacy_rows:
|
||||||
|
return
|
||||||
|
for row in legacy_rows:
|
||||||
|
bot_id = str(row[0] or "").strip()
|
||||||
|
topic_key = str(row[1] or "").strip().lower()
|
||||||
|
if not bot_id or not topic_key:
|
||||||
|
continue
|
||||||
|
conn.execute(
|
||||||
|
text(
|
||||||
|
"""
|
||||||
|
DELETE FROM topic_item
|
||||||
|
WHERE bot_id = :bot_id AND lower(coalesce(topic_key, '')) = :topic_key
|
||||||
|
"""
|
||||||
|
),
|
||||||
|
{"bot_id": bot_id, "topic_key": topic_key},
|
||||||
|
)
|
||||||
|
conn.execute(
|
||||||
|
text(
|
||||||
|
"""
|
||||||
|
DELETE FROM topic_topic
|
||||||
|
WHERE bot_id = :bot_id AND lower(coalesce(topic_key, '')) = :topic_key
|
||||||
|
"""
|
||||||
|
),
|
||||||
|
{"bot_id": bot_id, "topic_key": topic_key},
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def align_postgres_sequences() -> None:
|
||||||
|
if engine.dialect.name != "postgresql":
|
||||||
|
return
|
||||||
|
sequence_targets = [
|
||||||
|
(BOT_MESSAGE_TABLE, "id"),
|
||||||
|
(BOT_REQUEST_USAGE_TABLE, "id"),
|
||||||
|
(BOT_ACTIVITY_EVENT_TABLE, "id"),
|
||||||
|
("skill_market_item", "id"),
|
||||||
|
("bot_skill_install", "id"),
|
||||||
|
]
|
||||||
|
with engine.connect() as conn:
|
||||||
|
for table_name, column_name in sequence_targets:
|
||||||
|
seq_name = conn.execute(
|
||||||
|
text("SELECT pg_get_serial_sequence(:table_name, :column_name)"),
|
||||||
|
{"table_name": table_name, "column_name": column_name},
|
||||||
|
).scalar()
|
||||||
|
if not seq_name:
|
||||||
|
continue
|
||||||
|
max_id = conn.execute(
|
||||||
|
text(f'SELECT COALESCE(MAX("{column_name}"), 0) FROM "{table_name}"')
|
||||||
|
).scalar()
|
||||||
|
max_id = int(max_id or 0)
|
||||||
|
conn.execute(
|
||||||
|
text("SELECT setval(:seq_name, :next_value, :is_called)"),
|
||||||
|
{
|
||||||
|
"seq_name": seq_name,
|
||||||
|
"next_value": max_id if max_id > 0 else 1,
|
||||||
|
"is_called": max_id > 0,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
def init_database() -> None:
|
def init_database() -> None:
|
||||||
with engine.connect() as conn:
|
lock_conn = _acquire_migration_lock()
|
||||||
conn.execute(text("SELECT 1"))
|
try:
|
||||||
_validate_required_tables()
|
_rename_legacy_tables()
|
||||||
_validate_required_sys_settings()
|
SQLModel.metadata.create_all(engine)
|
||||||
|
_migrate_legacy_table_rows()
|
||||||
|
_drop_legacy_skill_tables()
|
||||||
|
_ensure_sys_setting_columns()
|
||||||
|
_ensure_bot_request_usage_columns()
|
||||||
|
_ensure_managed_node_columns()
|
||||||
|
_ensure_botinstance_columns()
|
||||||
|
_ensure_botinstance_indexes()
|
||||||
|
_ensure_managed_node_indexes()
|
||||||
|
_drop_legacy_botinstance_columns()
|
||||||
|
_repair_postgres_topic_foreign_keys()
|
||||||
|
_ensure_topic_columns()
|
||||||
|
_ensure_topic_indexes()
|
||||||
|
_drop_obsolete_topic_tables()
|
||||||
|
_cleanup_legacy_default_topics()
|
||||||
|
_drop_legacy_tables()
|
||||||
|
align_postgres_sequences()
|
||||||
|
with Session(engine) as session:
|
||||||
|
seed_sys_auth(session)
|
||||||
|
finally:
|
||||||
|
_release_migration_lock(lock_conn)
|
||||||
|
|
||||||
|
|
||||||
def get_session():
|
def get_session():
|
||||||
|
|
|
||||||
|
|
@ -1,7 +0,0 @@
|
||||||
from core.docker_manager import BotDockerManager
|
|
||||||
from core.settings import BOTS_WORKSPACE_ROOT, DOCKER_NETWORK_NAME
|
|
||||||
|
|
||||||
docker_manager = BotDockerManager(
|
|
||||||
host_data_root=BOTS_WORKSPACE_ROOT,
|
|
||||||
network_name=DOCKER_NETWORK_NAME,
|
|
||||||
)
|
|
||||||
|
|
@ -11,23 +11,7 @@ import docker
|
||||||
|
|
||||||
|
|
||||||
class BotDockerManager:
|
class BotDockerManager:
|
||||||
_RUNTIME_BOOTSTRAP_LABEL_KEY = "dashboard.runtime_bootstrap"
|
def __init__(self, host_data_root: str, base_image: str = "nanobot-base:v0.1.4"):
|
||||||
_RUNTIME_BOOTSTRAP_LABEL_VALUE = "env-json-v1"
|
|
||||||
_DASHBOARD_READY_LOG_MARKERS = (
|
|
||||||
"nanobot.channels.dashboard:start",
|
|
||||||
"dashboard channel 代理已上线",
|
|
||||||
)
|
|
||||||
_DASHBOARD_FAILURE_LOG_MARKERS = (
|
|
||||||
"failed to start channel dashboard",
|
|
||||||
"dashboard channel not available",
|
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
host_data_root: str,
|
|
||||||
base_image: str = "nanobot-base",
|
|
||||||
network_name: str = "",
|
|
||||||
):
|
|
||||||
try:
|
try:
|
||||||
self.client = docker.from_env(timeout=6)
|
self.client = docker.from_env(timeout=6)
|
||||||
self.client.version()
|
self.client.version()
|
||||||
|
|
@ -38,11 +22,8 @@ class BotDockerManager:
|
||||||
|
|
||||||
self.host_data_root = host_data_root
|
self.host_data_root = host_data_root
|
||||||
self.base_image = base_image
|
self.base_image = base_image
|
||||||
self.network_name = str(network_name or "").strip()
|
|
||||||
self.active_monitors = {}
|
self.active_monitors = {}
|
||||||
self._last_delivery_error: Dict[str, str] = {}
|
self._last_delivery_error: Dict[str, str] = {}
|
||||||
self._storage_limit_supported: Optional[bool] = None
|
|
||||||
self._storage_limit_warning_emitted = False
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _normalize_resource_limits(
|
def _normalize_resource_limits(
|
||||||
|
|
@ -107,282 +88,6 @@ class BotDockerManager:
|
||||||
print(f"[DockerManager] list_images_by_repo failed: {e}")
|
print(f"[DockerManager] list_images_by_repo failed: {e}")
|
||||||
return rows
|
return rows
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _docker_error_message(exc: Exception) -> str:
|
|
||||||
explanation = getattr(exc, "explanation", None)
|
|
||||||
if isinstance(explanation, bytes):
|
|
||||||
try:
|
|
||||||
explanation = explanation.decode("utf-8", errors="replace")
|
|
||||||
except Exception:
|
|
||||||
explanation = str(explanation)
|
|
||||||
if explanation:
|
|
||||||
return str(explanation)
|
|
||||||
response = getattr(exc, "response", None)
|
|
||||||
text = getattr(response, "text", None)
|
|
||||||
if text:
|
|
||||||
return str(text)
|
|
||||||
return str(exc)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _is_unsupported_storage_opt_error(cls, exc: Exception) -> bool:
|
|
||||||
message = cls._docker_error_message(exc).lower()
|
|
||||||
if "storage-opt" not in message and "storage opt" not in message:
|
|
||||||
return False
|
|
||||||
markers = (
|
|
||||||
"overlay over xfs",
|
|
||||||
"overlay2 over xfs",
|
|
||||||
"pquota",
|
|
||||||
"project quota",
|
|
||||||
"storage driver does not support",
|
|
||||||
"xfs",
|
|
||||||
)
|
|
||||||
return any(marker in message for marker in markers)
|
|
||||||
|
|
||||||
def _cleanup_container_if_exists(self, container_name: str) -> None:
|
|
||||||
if not self.client:
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
container = self.client.containers.get(container_name)
|
|
||||||
container.remove(force=True)
|
|
||||||
except docker.errors.NotFound:
|
|
||||||
pass
|
|
||||||
except Exception as e:
|
|
||||||
print(f"[DockerManager] failed to cleanup container {container_name}: {e}")
|
|
||||||
|
|
||||||
def _resolve_container_network(self) -> str:
|
|
||||||
if not self.client or not self.network_name:
|
|
||||||
return "bridge"
|
|
||||||
try:
|
|
||||||
self.client.networks.get(self.network_name)
|
|
||||||
return self.network_name
|
|
||||||
except docker.errors.NotFound:
|
|
||||||
print(f"[DockerManager] network '{self.network_name}' not found; falling back to bridge")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"[DockerManager] failed to inspect network '{self.network_name}': {e}; falling back to bridge")
|
|
||||||
return "bridge"
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _container_uses_network(container: Any, network_name: str) -> bool:
|
|
||||||
attrs = getattr(container, "attrs", {}) or {}
|
|
||||||
network_settings = attrs.get("NetworkSettings") or {}
|
|
||||||
networks = network_settings.get("Networks") or {}
|
|
||||||
if network_name in networks:
|
|
||||||
return True
|
|
||||||
if network_name == "bridge" and not networks and str(network_settings.get("IPAddress") or "").strip():
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _get_container_network_ip(container: Any, preferred_network: str = "") -> str:
|
|
||||||
attrs = getattr(container, "attrs", {}) or {}
|
|
||||||
network_settings = attrs.get("NetworkSettings") or {}
|
|
||||||
networks = network_settings.get("Networks") or {}
|
|
||||||
|
|
||||||
if preferred_network:
|
|
||||||
preferred = networks.get(preferred_network) or {}
|
|
||||||
preferred_ip = str(preferred.get("IPAddress") or "").strip()
|
|
||||||
if preferred_ip:
|
|
||||||
return preferred_ip
|
|
||||||
|
|
||||||
for network in networks.values():
|
|
||||||
ip_address = str((network or {}).get("IPAddress") or "").strip()
|
|
||||||
if ip_address:
|
|
||||||
return ip_address
|
|
||||||
|
|
||||||
return str(network_settings.get("IPAddress") or "").strip()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _container_uses_expected_bootstrap(cls, container: Any) -> bool:
|
|
||||||
attrs = getattr(container, "attrs", {}) or {}
|
|
||||||
config = attrs.get("Config") or {}
|
|
||||||
labels = config.get("Labels") or {}
|
|
||||||
return str(labels.get(cls._RUNTIME_BOOTSTRAP_LABEL_KEY) or "").strip() == cls._RUNTIME_BOOTSTRAP_LABEL_VALUE
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _runtime_bootstrap_entrypoint() -> List[str]:
|
|
||||||
bootstrap_code = "\n".join(
|
|
||||||
[
|
|
||||||
"import json",
|
|
||||||
"import os",
|
|
||||||
"import pathlib",
|
|
||||||
"import re",
|
|
||||||
"",
|
|
||||||
"path = pathlib.Path('/root/.nanobot/env.json')",
|
|
||||||
"pattern = re.compile(r'^[A-Z_][A-Z0-9_]{0,127}$')",
|
|
||||||
"data = {}",
|
|
||||||
"if path.is_file():",
|
|
||||||
" try:",
|
|
||||||
" data = json.loads(path.read_text(encoding='utf-8'))",
|
|
||||||
" except Exception:",
|
|
||||||
" data = {}",
|
|
||||||
"if not isinstance(data, dict):",
|
|
||||||
" data = {}",
|
|
||||||
"for raw_key, raw_value in data.items():",
|
|
||||||
" key = str(raw_key or '').strip().upper()",
|
|
||||||
" if not pattern.fullmatch(key):",
|
|
||||||
" continue",
|
|
||||||
" os.environ[key] = str(raw_value or '').strip()",
|
|
||||||
"os.execvp('nanobot', ['nanobot', 'gateway'])",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
return [
|
|
||||||
"python",
|
|
||||||
"-c",
|
|
||||||
bootstrap_code,
|
|
||||||
]
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _container_has_mount(container: Any, source: str, destination: str) -> bool:
|
|
||||||
attrs = getattr(container, "attrs", {}) or {}
|
|
||||||
mounts = attrs.get("Mounts") or []
|
|
||||||
expected_source = os.path.normpath(source)
|
|
||||||
expected_destination = str(destination or "").strip()
|
|
||||||
for mount in mounts:
|
|
||||||
if not isinstance(mount, dict):
|
|
||||||
continue
|
|
||||||
current_source = os.path.normpath(str(mount.get("Source") or ""))
|
|
||||||
current_destination = str(mount.get("Destination") or "").strip()
|
|
||||||
if current_source != expected_source or current_destination != expected_destination:
|
|
||||||
continue
|
|
||||||
if mount.get("RW") is False:
|
|
||||||
continue
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _desired_memory_bytes(memory_mb: int) -> int:
|
|
||||||
return int(memory_mb) * 1024 * 1024 if int(memory_mb or 0) > 0 else 0
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _desired_storage_bytes(storage_gb: int) -> Optional[int]:
|
|
||||||
storage = int(storage_gb or 0)
|
|
||||||
if storage <= 0:
|
|
||||||
return None
|
|
||||||
return storage * 1024 * 1024 * 1024
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _get_container_cpu_cores(container: Any) -> float:
|
|
||||||
attrs = getattr(container, "attrs", {}) or {}
|
|
||||||
host_cfg = attrs.get("HostConfig") or {}
|
|
||||||
nano_cpus = int(host_cfg.get("NanoCpus") or 0)
|
|
||||||
if nano_cpus > 0:
|
|
||||||
return nano_cpus / 1_000_000_000
|
|
||||||
cpu_quota = int(host_cfg.get("CpuQuota") or 0)
|
|
||||||
cpu_period = int(host_cfg.get("CpuPeriod") or 0)
|
|
||||||
if cpu_quota > 0 and cpu_period > 0:
|
|
||||||
return cpu_quota / cpu_period
|
|
||||||
return 0.0
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _normalize_image_id(raw: Any) -> str:
|
|
||||||
text = str(raw or "").strip().lower()
|
|
||||||
if text.startswith("sha256:"):
|
|
||||||
return text[7:]
|
|
||||||
return text
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _get_container_image_id(cls, container: Any) -> str:
|
|
||||||
attrs = getattr(container, "attrs", {}) or {}
|
|
||||||
image_id = attrs.get("Image")
|
|
||||||
if image_id:
|
|
||||||
return cls._normalize_image_id(image_id)
|
|
||||||
image = getattr(container, "image", None)
|
|
||||||
return cls._normalize_image_id(getattr(image, "id", ""))
|
|
||||||
|
|
||||||
def _resolve_image_id(self, image_ref: str) -> str:
|
|
||||||
if not self.client:
|
|
||||||
return ""
|
|
||||||
try:
|
|
||||||
image = self.client.images.get(image_ref)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"[DockerManager] failed to resolve image id for {image_ref}: {e}")
|
|
||||||
return ""
|
|
||||||
return self._normalize_image_id(getattr(image, "id", ""))
|
|
||||||
|
|
||||||
def _container_storage_matches(self, actual_storage_bytes: Optional[int], desired_storage_gb: int) -> bool:
|
|
||||||
expected_storage_bytes = self._desired_storage_bytes(desired_storage_gb)
|
|
||||||
if expected_storage_bytes is None:
|
|
||||||
return actual_storage_bytes in {None, 0}
|
|
||||||
if actual_storage_bytes == expected_storage_bytes:
|
|
||||||
return True
|
|
||||||
return actual_storage_bytes is None and self._storage_limit_supported is not True
|
|
||||||
|
|
||||||
def _container_matches_runtime(
|
|
||||||
self,
|
|
||||||
container: Any,
|
|
||||||
*,
|
|
||||||
image_id: str,
|
|
||||||
cpu_cores: float,
|
|
||||||
memory_mb: int,
|
|
||||||
storage_gb: int,
|
|
||||||
bot_workspace: str,
|
|
||||||
network_name: str,
|
|
||||||
) -> bool:
|
|
||||||
attrs = getattr(container, "attrs", {}) or {}
|
|
||||||
host_cfg = attrs.get("HostConfig") or {}
|
|
||||||
current_image_id = self._get_container_image_id(container)
|
|
||||||
desired_image_id = self._normalize_image_id(image_id)
|
|
||||||
if not desired_image_id or not current_image_id or current_image_id != desired_image_id:
|
|
||||||
return False
|
|
||||||
if not self._container_uses_expected_bootstrap(container):
|
|
||||||
return False
|
|
||||||
if not self._container_uses_network(container, network_name):
|
|
||||||
return False
|
|
||||||
if not self._container_has_mount(container, bot_workspace, "/root/.nanobot"):
|
|
||||||
return False
|
|
||||||
|
|
||||||
actual_memory_bytes = int(host_cfg.get("Memory") or 0)
|
|
||||||
if actual_memory_bytes != self._desired_memory_bytes(memory_mb):
|
|
||||||
return False
|
|
||||||
|
|
||||||
desired_cpu = float(cpu_cores or 0)
|
|
||||||
actual_cpu = self._get_container_cpu_cores(container)
|
|
||||||
if abs(actual_cpu - desired_cpu) > 0.01:
|
|
||||||
return False
|
|
||||||
|
|
||||||
storage_opt = host_cfg.get("StorageOpt") or {}
|
|
||||||
actual_storage_bytes = self._parse_size_to_bytes(storage_opt.get("size"))
|
|
||||||
if not self._container_storage_matches(actual_storage_bytes, storage_gb):
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _run_container_with_storage_fallback(
|
|
||||||
self,
|
|
||||||
bot_id: str,
|
|
||||||
container_name: str,
|
|
||||||
storage_gb: int,
|
|
||||||
**base_kwargs: Any,
|
|
||||||
):
|
|
||||||
if not self.client:
|
|
||||||
raise RuntimeError("Docker client is not available")
|
|
||||||
if storage_gb <= 0:
|
|
||||||
return self.client.containers.run(**base_kwargs)
|
|
||||||
if self._storage_limit_supported is False:
|
|
||||||
return self.client.containers.run(**base_kwargs)
|
|
||||||
|
|
||||||
try:
|
|
||||||
container = self.client.containers.run(
|
|
||||||
storage_opt={"size": f"{storage_gb}G"},
|
|
||||||
**base_kwargs,
|
|
||||||
)
|
|
||||||
self._storage_limit_supported = True
|
|
||||||
return container
|
|
||||||
except Exception as exc:
|
|
||||||
if not self._is_unsupported_storage_opt_error(exc):
|
|
||||||
raise
|
|
||||||
self._storage_limit_supported = False
|
|
||||||
if not self._storage_limit_warning_emitted:
|
|
||||||
print(
|
|
||||||
"[DockerManager] storage limit not supported by current Docker storage driver; "
|
|
||||||
f"falling back to unlimited container filesystem size. Details: {self._docker_error_message(exc)}"
|
|
||||||
)
|
|
||||||
self._storage_limit_warning_emitted = True
|
|
||||||
else:
|
|
||||||
print(f"[DockerManager] storage limit skipped for {bot_id}: unsupported by current Docker storage driver")
|
|
||||||
self._cleanup_container_if_exists(container_name)
|
|
||||||
return self.client.containers.run(**base_kwargs)
|
|
||||||
|
|
||||||
def start_bot(
|
def start_bot(
|
||||||
self,
|
self,
|
||||||
bot_id: str,
|
bot_id: str,
|
||||||
|
|
@ -401,30 +106,22 @@ class BotDockerManager:
|
||||||
if not self.has_image(image):
|
if not self.has_image(image):
|
||||||
print(f"❌ 错误: 镜像不存在: {image}")
|
print(f"❌ 错误: 镜像不存在: {image}")
|
||||||
return False
|
return False
|
||||||
desired_image_id = self._resolve_image_id(image)
|
|
||||||
if not desired_image_id:
|
|
||||||
print(f"❌ 错误: 无法解析镜像 ID: {image}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
bot_workspace = os.path.join(self.host_data_root, bot_id, ".nanobot")
|
bot_workspace = os.path.join(self.host_data_root, bot_id, ".nanobot")
|
||||||
container_name = f"worker_{bot_id}"
|
container_name = f"worker_{bot_id}"
|
||||||
os.makedirs(bot_workspace, exist_ok=True)
|
os.makedirs(bot_workspace, exist_ok=True)
|
||||||
cpu, memory, storage = self._normalize_resource_limits(cpu_cores, memory_mb, storage_gb)
|
cpu, memory, storage = self._normalize_resource_limits(cpu_cores, memory_mb, storage_gb)
|
||||||
target_network = self._resolve_container_network()
|
|
||||||
base_kwargs = {
|
base_kwargs = {
|
||||||
"image": image,
|
"image": image,
|
||||||
"name": container_name,
|
"name": container_name,
|
||||||
"detach": True,
|
"detach": True,
|
||||||
"stdin_open": True,
|
"stdin_open": True,
|
||||||
"tty": True,
|
"tty": True,
|
||||||
"entrypoint": self._runtime_bootstrap_entrypoint(),
|
"environment": env_vars or {},
|
||||||
"labels": {
|
|
||||||
self._RUNTIME_BOOTSTRAP_LABEL_KEY: self._RUNTIME_BOOTSTRAP_LABEL_VALUE,
|
|
||||||
},
|
|
||||||
"volumes": {
|
"volumes": {
|
||||||
bot_workspace: {"bind": "/root/.nanobot", "mode": "rw"},
|
bot_workspace: {"bind": "/root/.nanobot", "mode": "rw"},
|
||||||
},
|
},
|
||||||
"network": target_network,
|
"network_mode": "bridge",
|
||||||
}
|
}
|
||||||
if memory > 0:
|
if memory > 0:
|
||||||
base_kwargs["mem_limit"] = f"{memory}m"
|
base_kwargs["mem_limit"] = f"{memory}m"
|
||||||
|
|
@ -435,46 +132,27 @@ class BotDockerManager:
|
||||||
try:
|
try:
|
||||||
container = self.client.containers.get(container_name)
|
container = self.client.containers.get(container_name)
|
||||||
container.reload()
|
container.reload()
|
||||||
runtime_matches = self._container_matches_runtime(
|
if container.status == "running":
|
||||||
container,
|
|
||||||
image_id=desired_image_id,
|
|
||||||
cpu_cores=cpu,
|
|
||||||
memory_mb=memory,
|
|
||||||
storage_gb=storage,
|
|
||||||
bot_workspace=bot_workspace,
|
|
||||||
network_name=target_network,
|
|
||||||
)
|
|
||||||
if container.status in {"running", "restarting"} and runtime_matches:
|
|
||||||
if on_state_change:
|
if on_state_change:
|
||||||
self.ensure_monitor(bot_id, on_state_change)
|
self.ensure_monitor(bot_id, on_state_change)
|
||||||
return True
|
return True
|
||||||
if container.status in {"running", "restarting"}:
|
container.remove(force=True)
|
||||||
if not self._container_uses_network(container, target_network):
|
|
||||||
print(
|
|
||||||
f"[DockerManager] recreating {container_name} to switch network "
|
|
||||||
f"from current attachment to '{target_network}'"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
print(f"[DockerManager] recreating {container_name} because container config no longer matches desired runtime")
|
|
||||||
container.remove(force=True)
|
|
||||||
elif runtime_matches:
|
|
||||||
container.start()
|
|
||||||
if on_state_change:
|
|
||||||
self.ensure_monitor(bot_id, on_state_change)
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"[DockerManager] recreating {container_name} because container config no longer matches desired runtime")
|
|
||||||
container.remove(force=True)
|
|
||||||
except docker.errors.NotFound:
|
except docker.errors.NotFound:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
container = None
|
container = None
|
||||||
container = self._run_container_with_storage_fallback(
|
if storage > 0:
|
||||||
bot_id,
|
try:
|
||||||
container_name,
|
container = self.client.containers.run(
|
||||||
storage,
|
storage_opt={"size": f"{storage}G"},
|
||||||
**base_kwargs,
|
**base_kwargs,
|
||||||
)
|
)
|
||||||
|
except Exception as e:
|
||||||
|
# Some Docker engines (e.g. Desktop/overlay2) may not support size storage option.
|
||||||
|
print(f"[DockerManager] storage limit not applied for {bot_id}: {e}")
|
||||||
|
container = self.client.containers.run(**base_kwargs)
|
||||||
|
else:
|
||||||
|
container = self.client.containers.run(**base_kwargs)
|
||||||
|
|
||||||
if on_state_change:
|
if on_state_change:
|
||||||
monitor_thread = threading.Thread(
|
monitor_thread = threading.Thread(
|
||||||
|
|
@ -516,17 +194,14 @@ class BotDockerManager:
|
||||||
print(f"[DockerManager] Error ensuring monitor for {bot_id}: {e}")
|
print(f"[DockerManager] Error ensuring monitor for {bot_id}: {e}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def stop_bot(self, bot_id: str, remove: bool = False) -> bool:
|
def stop_bot(self, bot_id: str) -> bool:
|
||||||
if not self.client:
|
if not self.client:
|
||||||
return False
|
return False
|
||||||
container_name = f"worker_{bot_id}"
|
container_name = f"worker_{bot_id}"
|
||||||
try:
|
try:
|
||||||
container = self.client.containers.get(container_name)
|
container = self.client.containers.get(container_name)
|
||||||
container.reload()
|
container.stop(timeout=5)
|
||||||
if str(container.status or "").strip().lower() in {"running", "restarting", "paused"}:
|
container.remove()
|
||||||
container.stop(timeout=5)
|
|
||||||
if remove:
|
|
||||||
container.remove()
|
|
||||||
self.active_monitors.pop(bot_id, None)
|
self.active_monitors.pop(bot_id, None)
|
||||||
return True
|
return True
|
||||||
except docker.errors.NotFound:
|
except docker.errors.NotFound:
|
||||||
|
|
@ -544,11 +219,6 @@ class BotDockerManager:
|
||||||
media_paths = [str(v).strip().replace("\\", "/") for v in (media or []) if str(v).strip()]
|
media_paths = [str(v).strip().replace("\\", "/") for v in (media or []) if str(v).strip()]
|
||||||
self._last_delivery_error.pop(bot_id, None)
|
self._last_delivery_error.pop(bot_id, None)
|
||||||
|
|
||||||
if not self._wait_for_dashboard_ready(bot_id):
|
|
||||||
if bot_id not in self._last_delivery_error:
|
|
||||||
self._last_delivery_error[bot_id] = "Dashboard channel is not ready"
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Primary path on Docker Desktop/Mac: execute curl inside container namespace.
|
# Primary path on Docker Desktop/Mac: execute curl inside container namespace.
|
||||||
for attempt in range(3):
|
for attempt in range(3):
|
||||||
if self._send_command_via_exec(bot_id, command, media_paths):
|
if self._send_command_via_exec(bot_id, command, media_paths):
|
||||||
|
|
@ -567,45 +237,6 @@ class BotDockerManager:
|
||||||
def get_last_delivery_error(self, bot_id: str) -> str:
|
def get_last_delivery_error(self, bot_id: str) -> str:
|
||||||
return str(self._last_delivery_error.get(bot_id, "") or "").strip()
|
return str(self._last_delivery_error.get(bot_id, "") or "").strip()
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _log_indicates_dashboard_ready(cls, line: str) -> bool:
|
|
||||||
lowered = str(line or "").strip().lower()
|
|
||||||
return any(marker in lowered for marker in cls._DASHBOARD_READY_LOG_MARKERS)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _log_indicates_dashboard_failure(cls, line: str) -> bool:
|
|
||||||
lowered = str(line or "").strip().lower()
|
|
||||||
return any(marker in lowered for marker in cls._DASHBOARD_FAILURE_LOG_MARKERS)
|
|
||||||
|
|
||||||
def _wait_for_dashboard_ready(
|
|
||||||
self,
|
|
||||||
bot_id: str,
|
|
||||||
timeout_seconds: float = 15.0,
|
|
||||||
poll_interval_seconds: float = 0.5,
|
|
||||||
) -> bool:
|
|
||||||
deadline = time.monotonic() + max(1.0, timeout_seconds)
|
|
||||||
while time.monotonic() < deadline:
|
|
||||||
status = self.get_bot_status(bot_id)
|
|
||||||
if status != "RUNNING":
|
|
||||||
self._last_delivery_error[bot_id] = f"Container status is {status.lower()}"
|
|
||||||
return False
|
|
||||||
|
|
||||||
logs = self.get_recent_logs(bot_id, tail=200)
|
|
||||||
for line in logs:
|
|
||||||
if self._log_indicates_dashboard_failure(line):
|
|
||||||
detail = str(line or "").strip()
|
|
||||||
self._last_delivery_error[bot_id] = detail[:300] if detail else "Dashboard channel failed to start"
|
|
||||||
return False
|
|
||||||
if self._log_indicates_dashboard_ready(line):
|
|
||||||
return True
|
|
||||||
|
|
||||||
time.sleep(max(0.1, poll_interval_seconds))
|
|
||||||
|
|
||||||
self._last_delivery_error[bot_id] = (
|
|
||||||
f"Dashboard channel was not ready within {int(max(1.0, timeout_seconds))}s"
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_bot_status(self, bot_id: str) -> str:
|
def get_bot_status(self, bot_id: str) -> str:
|
||||||
"""Return normalized runtime status from Docker: RUNNING or STOPPED."""
|
"""Return normalized runtime status from Docker: RUNNING or STOPPED."""
|
||||||
if not self.client:
|
if not self.client:
|
||||||
|
|
@ -890,8 +521,7 @@ class BotDockerManager:
|
||||||
container_name = f"worker_{bot_id}"
|
container_name = f"worker_{bot_id}"
|
||||||
payload = {"message": command, "media": media or []}
|
payload = {"message": command, "media": media or []}
|
||||||
container = self.client.containers.get(container_name)
|
container = self.client.containers.get(container_name)
|
||||||
container.reload()
|
ip_address = container.attrs["NetworkSettings"]["IPAddress"] or "127.0.0.1"
|
||||||
ip_address = self._get_container_network_ip(container, preferred_network=self.network_name) or "127.0.0.1"
|
|
||||||
target_url = f"http://{ip_address}:9000/chat"
|
target_url = f"http://{ip_address}:9000/chat"
|
||||||
|
|
||||||
with httpx.Client(timeout=4.0) as client:
|
with httpx.Client(timeout=4.0) as client:
|
||||||
|
|
@ -908,65 +538,19 @@ class BotDockerManager:
|
||||||
self._last_delivery_error[bot_id] = reason
|
self._last_delivery_error[bot_id] = reason
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _read_log_lines_with_client(self, client, bot_id: str, tail: Optional[int] = None) -> List[str]:
|
def get_recent_logs(self, bot_id: str, tail: int = 300) -> List[str]:
|
||||||
container = client.containers.get(f"worker_{bot_id}")
|
|
||||||
raw = container.logs(tail=max(1, int(tail))) if tail is not None else container.logs()
|
|
||||||
if isinstance(raw, (bytes, bytearray)):
|
|
||||||
text = raw.decode("utf-8", errors="ignore")
|
|
||||||
else:
|
|
||||||
text = str(raw or "")
|
|
||||||
return [line for line in text.splitlines() if line.strip()]
|
|
||||||
|
|
||||||
def _read_log_lines(self, bot_id: str, tail: Optional[int] = None) -> List[str]:
|
|
||||||
if not self.client:
|
if not self.client:
|
||||||
return []
|
return []
|
||||||
|
container_name = f"worker_{bot_id}"
|
||||||
try:
|
try:
|
||||||
return self._read_log_lines_with_client(self.client, bot_id, tail=tail)
|
container = self.client.containers.get(container_name)
|
||||||
|
raw = container.logs(tail=max(1, int(tail)))
|
||||||
|
text = raw.decode("utf-8", errors="ignore")
|
||||||
|
return [line for line in text.splitlines() if line.strip()]
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"[DockerManager] Error reading logs for {bot_id}: {e}")
|
print(f"[DockerManager] Error reading logs for {bot_id}: {e}")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def get_recent_logs(self, bot_id: str, tail: int = 300) -> List[str]:
|
|
||||||
return self._read_log_lines(bot_id, tail=max(1, int(tail)))
|
|
||||||
|
|
||||||
def get_logs_page(
|
|
||||||
self,
|
|
||||||
bot_id: str,
|
|
||||||
offset: int = 0,
|
|
||||||
limit: int = 50,
|
|
||||||
reverse: bool = True,
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
safe_offset = max(0, int(offset))
|
|
||||||
safe_limit = max(1, int(limit))
|
|
||||||
if reverse:
|
|
||||||
# Docker logs API supports tail but not arbitrary offsets. For reverse pagination
|
|
||||||
# we only read the minimal newest slice needed for the requested page.
|
|
||||||
tail_count = safe_offset + safe_limit + 1
|
|
||||||
lines = self._read_log_lines(bot_id, tail=tail_count)
|
|
||||||
ordered = list(reversed(lines))
|
|
||||||
page = ordered[safe_offset:safe_offset + safe_limit]
|
|
||||||
has_more = len(lines) > safe_offset + safe_limit
|
|
||||||
return {
|
|
||||||
"logs": page,
|
|
||||||
"total": None,
|
|
||||||
"offset": safe_offset,
|
|
||||||
"limit": safe_limit,
|
|
||||||
"has_more": has_more,
|
|
||||||
"reverse": reverse,
|
|
||||||
}
|
|
||||||
|
|
||||||
lines = self._read_log_lines(bot_id, tail=None)
|
|
||||||
total = len(lines)
|
|
||||||
page = lines[safe_offset:safe_offset + safe_limit]
|
|
||||||
return {
|
|
||||||
"logs": page,
|
|
||||||
"total": total,
|
|
||||||
"offset": safe_offset,
|
|
||||||
"limit": safe_limit,
|
|
||||||
"has_more": safe_offset + safe_limit < total,
|
|
||||||
"reverse": reverse,
|
|
||||||
}
|
|
||||||
|
|
||||||
def _monitor_container_logs(self, bot_id: str, container, callback: Callable[[str, dict], None]):
|
def _monitor_container_logs(self, bot_id: str, container, callback: Callable[[str, dict], None]):
|
||||||
try:
|
try:
|
||||||
buffer = ""
|
buffer = ""
|
||||||
|
|
@ -1119,6 +703,12 @@ class BotDockerManager:
|
||||||
if response_match:
|
if response_match:
|
||||||
channel = response_match.group(1).strip().lower()
|
channel = response_match.group(1).strip().lower()
|
||||||
action_msg = response_match.group(2).strip()
|
action_msg = response_match.group(2).strip()
|
||||||
|
if channel == "dashboard":
|
||||||
|
return {
|
||||||
|
"type": "ASSISTANT_MESSAGE",
|
||||||
|
"channel": "dashboard",
|
||||||
|
"text": action_msg[:4000],
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
"type": "AGENT_STATE",
|
"type": "AGENT_STATE",
|
||||||
"channel": channel,
|
"channel": channel,
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,7 @@
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import shutil
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Final
|
from typing import Final
|
||||||
from urllib.parse import urlsplit, urlunsplit
|
from urllib.parse import urlsplit, urlunsplit
|
||||||
|
|
@ -29,6 +31,13 @@ for _k, _v in _prod_env_values.items():
|
||||||
os.environ[_k] = str(_v)
|
os.environ[_k] = str(_v)
|
||||||
|
|
||||||
|
|
||||||
|
def _env_text(name: str, default: str) -> str:
|
||||||
|
raw = os.getenv(name)
|
||||||
|
if raw is None:
|
||||||
|
return default
|
||||||
|
return str(raw).replace("\\n", "\n")
|
||||||
|
|
||||||
|
|
||||||
def _env_bool(name: str, default: bool) -> bool:
|
def _env_bool(name: str, default: bool) -> bool:
|
||||||
raw = os.getenv(name)
|
raw = os.getenv(name)
|
||||||
if raw is None:
|
if raw is None:
|
||||||
|
|
@ -76,32 +85,6 @@ def _env_extensions(name: str, default: tuple[str, ...]) -> tuple[str, ...]:
|
||||||
return tuple(rows)
|
return tuple(rows)
|
||||||
|
|
||||||
|
|
||||||
def _normalize_origin(raw: str) -> str:
|
|
||||||
text = str(raw or "").strip()
|
|
||||||
if not text:
|
|
||||||
return ""
|
|
||||||
try:
|
|
||||||
parsed = urlsplit(text)
|
|
||||||
except Exception:
|
|
||||||
return ""
|
|
||||||
scheme = str(parsed.scheme or "").strip().lower()
|
|
||||||
netloc = str(parsed.netloc or "").strip().lower()
|
|
||||||
if scheme not in {"http", "https"} or not netloc:
|
|
||||||
return ""
|
|
||||||
return urlunsplit((scheme, netloc, "", "", ""))
|
|
||||||
|
|
||||||
|
|
||||||
def _env_origins(name: str, default: tuple[str, ...]) -> tuple[str, ...]:
|
|
||||||
raw = os.getenv(name)
|
|
||||||
source = list(default) if raw is None else re.split(r"[,;\s]+", str(raw))
|
|
||||||
rows: list[str] = []
|
|
||||||
for item in source:
|
|
||||||
origin = _normalize_origin(item)
|
|
||||||
if origin and origin not in rows:
|
|
||||||
rows.append(origin)
|
|
||||||
return tuple(rows)
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_dir_path(path_value: str) -> str:
|
def _normalize_dir_path(path_value: str) -> str:
|
||||||
raw = str(path_value or "").strip()
|
raw = str(path_value or "").strip()
|
||||||
if not raw:
|
if not raw:
|
||||||
|
|
@ -113,32 +96,35 @@ def _normalize_dir_path(path_value: str) -> str:
|
||||||
return str((BACKEND_ROOT / p).resolve())
|
return str((BACKEND_ROOT / p).resolve())
|
||||||
|
|
||||||
|
|
||||||
|
def _load_json_object(path: Path) -> dict[str, object]:
|
||||||
|
try:
|
||||||
|
with open(path, "r", encoding="utf-8") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
if isinstance(data, dict):
|
||||||
|
return data
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def _read_template_md(raw: object) -> str:
|
||||||
|
if raw is None:
|
||||||
|
return ""
|
||||||
|
return str(raw).replace("\r\n", "\n").strip()
|
||||||
|
|
||||||
|
|
||||||
DATA_ROOT: Final[str] = _normalize_dir_path(os.getenv("DATA_ROOT", str(PROJECT_ROOT / "data")))
|
DATA_ROOT: Final[str] = _normalize_dir_path(os.getenv("DATA_ROOT", str(PROJECT_ROOT / "data")))
|
||||||
BOTS_WORKSPACE_ROOT: Final[str] = _normalize_dir_path(
|
BOTS_WORKSPACE_ROOT: Final[str] = _normalize_dir_path(
|
||||||
os.getenv("BOTS_WORKSPACE_ROOT", str(PROJECT_ROOT / "workspace" / "bots"))
|
os.getenv("BOTS_WORKSPACE_ROOT", str(PROJECT_ROOT / "workspace" / "bots"))
|
||||||
)
|
)
|
||||||
RUNTIME_DATA_ROOT: Final[Path] = Path(DATA_ROOT).resolve()
|
|
||||||
RUNTIME_TEMPLATES_ROOT: Final[Path] = (RUNTIME_DATA_ROOT / "templates").resolve()
|
|
||||||
RUNTIME_SKILLS_ROOT: Final[Path] = (RUNTIME_DATA_ROOT / "skills").resolve()
|
|
||||||
RUNTIME_MODEL_ROOT: Final[Path] = (RUNTIME_DATA_ROOT / "model").resolve()
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_database_url(url: str) -> str:
|
def _normalize_database_url(url: str) -> str:
|
||||||
raw = str(url or "").strip()
|
return str(url or "").strip()
|
||||||
prefix = "sqlite:///"
|
|
||||||
if not raw.startswith(prefix):
|
|
||||||
return raw
|
|
||||||
path_part = raw[len(prefix) :]
|
|
||||||
if not path_part or path_part.startswith("/"):
|
|
||||||
return raw
|
|
||||||
abs_path = (BACKEND_ROOT / path_part).resolve()
|
|
||||||
return f"{prefix}{abs_path.as_posix()}"
|
|
||||||
|
|
||||||
|
|
||||||
def _database_engine(url: str) -> str:
|
def _database_engine(url: str) -> str:
|
||||||
raw = str(url or "").strip().lower()
|
raw = str(url or "").strip().lower()
|
||||||
if raw.startswith("sqlite"):
|
|
||||||
return "sqlite"
|
|
||||||
if raw.startswith("postgresql"):
|
if raw.startswith("postgresql"):
|
||||||
return "postgresql"
|
return "postgresql"
|
||||||
if raw.startswith("mysql"):
|
if raw.startswith("mysql"):
|
||||||
|
|
@ -152,7 +138,7 @@ def _database_engine(url: str) -> str:
|
||||||
|
|
||||||
def _mask_database_url(url: str) -> str:
|
def _mask_database_url(url: str) -> str:
|
||||||
raw = str(url or "").strip()
|
raw = str(url or "").strip()
|
||||||
if not raw or raw.startswith("sqlite"):
|
if not raw:
|
||||||
return raw
|
return raw
|
||||||
try:
|
try:
|
||||||
parsed = urlsplit(raw)
|
parsed = urlsplit(raw)
|
||||||
|
|
@ -174,10 +160,11 @@ def _mask_database_url(url: str) -> str:
|
||||||
|
|
||||||
_db_env = str(os.getenv("DATABASE_URL") or "").strip()
|
_db_env = str(os.getenv("DATABASE_URL") or "").strip()
|
||||||
if not _db_env:
|
if not _db_env:
|
||||||
raise RuntimeError("DATABASE_URL is not set in environment. PostgreSQL is required.")
|
raise RuntimeError("DATABASE_URL is required")
|
||||||
|
|
||||||
DATABASE_URL: Final[str] = _normalize_database_url(_db_env)
|
DATABASE_URL: Final[str] = _normalize_database_url(_db_env)
|
||||||
DATABASE_ENGINE: Final[str] = _database_engine(DATABASE_URL)
|
DATABASE_ENGINE: Final[str] = _database_engine(DATABASE_URL)
|
||||||
|
if DATABASE_ENGINE not in {"postgresql", "mysql"}:
|
||||||
|
raise RuntimeError(f"Unsupported DATABASE_URL engine: {DATABASE_ENGINE}")
|
||||||
DATABASE_URL_DISPLAY: Final[str] = _mask_database_url(DATABASE_URL)
|
DATABASE_URL_DISPLAY: Final[str] = _mask_database_url(DATABASE_URL)
|
||||||
DATABASE_ECHO: Final[bool] = _env_bool("DATABASE_ECHO", True)
|
DATABASE_ECHO: Final[bool] = _env_bool("DATABASE_ECHO", True)
|
||||||
DATABASE_POOL_SIZE: Final[int] = _env_int("DATABASE_POOL_SIZE", 20, 1, 200)
|
DATABASE_POOL_SIZE: Final[int] = _env_int("DATABASE_POOL_SIZE", 20, 1, 200)
|
||||||
|
|
@ -187,8 +174,7 @@ DATABASE_POOL_RECYCLE: Final[int] = _env_int("DATABASE_POOL_RECYCLE", 1800, 30,
|
||||||
DEFAULT_UPLOAD_MAX_MB: Final[int] = 100
|
DEFAULT_UPLOAD_MAX_MB: Final[int] = 100
|
||||||
DEFAULT_PAGE_SIZE: Final[int] = 10
|
DEFAULT_PAGE_SIZE: Final[int] = 10
|
||||||
DEFAULT_CHAT_PULL_PAGE_SIZE: Final[int] = 60
|
DEFAULT_CHAT_PULL_PAGE_SIZE: Final[int] = 60
|
||||||
DEFAULT_AUTH_TOKEN_TTL_HOURS: Final[int] = _env_int("AUTH_TOKEN_TTL_HOURS", 24, 1, 720)
|
DEFAULT_COMMAND_AUTO_UNLOCK_SECONDS: Final[int] = _env_int("COMMAND_AUTO_UNLOCK_SECONDS", 10, 1, 600)
|
||||||
DEFAULT_AUTH_TOKEN_MAX_ACTIVE: Final[int] = _env_int("AUTH_TOKEN_MAX_ACTIVE", 2, 1, 20)
|
|
||||||
DEFAULT_BOT_SYSTEM_TIMEZONE: Final[str] = str(
|
DEFAULT_BOT_SYSTEM_TIMEZONE: Final[str] = str(
|
||||||
os.getenv("DEFAULT_BOT_SYSTEM_TIMEZONE") or os.getenv("TZ") or "Asia/Shanghai"
|
os.getenv("DEFAULT_BOT_SYSTEM_TIMEZONE") or os.getenv("TZ") or "Asia/Shanghai"
|
||||||
).strip() or "Asia/Shanghai"
|
).strip() or "Asia/Shanghai"
|
||||||
|
|
@ -208,7 +194,7 @@ DEFAULT_WORKSPACE_DOWNLOAD_EXTENSIONS: Final[tuple[str, ...]] = (
|
||||||
)
|
)
|
||||||
STT_ENABLED_DEFAULT: Final[bool] = True
|
STT_ENABLED_DEFAULT: Final[bool] = True
|
||||||
STT_MODEL: Final[str] = str(os.getenv("STT_MODEL") or "ggml-small-q8_0.bin").strip()
|
STT_MODEL: Final[str] = str(os.getenv("STT_MODEL") or "ggml-small-q8_0.bin").strip()
|
||||||
_DEFAULT_STT_MODEL_DIR: Final[Path] = RUNTIME_MODEL_ROOT
|
_DEFAULT_STT_MODEL_DIR: Final[Path] = (Path(DATA_ROOT) / "model").resolve()
|
||||||
_configured_stt_model_dir = _normalize_dir_path(os.getenv("STT_MODEL_DIR", str(_DEFAULT_STT_MODEL_DIR)))
|
_configured_stt_model_dir = _normalize_dir_path(os.getenv("STT_MODEL_DIR", str(_DEFAULT_STT_MODEL_DIR)))
|
||||||
if _configured_stt_model_dir and not Path(_configured_stt_model_dir).exists() and _DEFAULT_STT_MODEL_DIR.exists():
|
if _configured_stt_model_dir and not Path(_configured_stt_model_dir).exists() and _DEFAULT_STT_MODEL_DIR.exists():
|
||||||
STT_MODEL_DIR: Final[str] = str(_DEFAULT_STT_MODEL_DIR)
|
STT_MODEL_DIR: Final[str] = str(_DEFAULT_STT_MODEL_DIR)
|
||||||
|
|
@ -228,21 +214,74 @@ REDIS_ENABLED: Final[bool] = _env_bool("REDIS_ENABLED", False)
|
||||||
REDIS_URL: Final[str] = str(os.getenv("REDIS_URL") or "").strip()
|
REDIS_URL: Final[str] = str(os.getenv("REDIS_URL") or "").strip()
|
||||||
REDIS_PREFIX: Final[str] = str(os.getenv("REDIS_PREFIX") or "dashboard_nanobot").strip() or "dashboard_nanobot"
|
REDIS_PREFIX: Final[str] = str(os.getenv("REDIS_PREFIX") or "dashboard_nanobot").strip() or "dashboard_nanobot"
|
||||||
REDIS_DEFAULT_TTL: Final[int] = _env_int("REDIS_DEFAULT_TTL", 60, 1, 86400)
|
REDIS_DEFAULT_TTL: Final[int] = _env_int("REDIS_DEFAULT_TTL", 60, 1, 86400)
|
||||||
PANEL_ACCESS_PASSWORD: Final[str] = str(os.getenv("PANEL_ACCESS_PASSWORD") or "").strip()
|
JWT_ALGORITHM: Final[str] = "HS256"
|
||||||
CORS_ALLOWED_ORIGINS: Final[tuple[str, ...]] = _env_origins(
|
JWT_SECRET: Final[str] = str(
|
||||||
"CORS_ALLOWED_ORIGINS",
|
os.getenv("JWT_SECRET")
|
||||||
(
|
or f"{PROJECT_ROOT.name}:{REDIS_PREFIX}:jwt"
|
||||||
"http://localhost:5173",
|
).strip()
|
||||||
"http://127.0.0.1:5173",
|
|
||||||
"http://localhost:4173",
|
|
||||||
"http://127.0.0.1:4173",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
APP_HOST: Final[str] = str(os.getenv("APP_HOST") or "0.0.0.0").strip()
|
LEGACY_TEMPLATE_ROOT: Final[Path] = (BACKEND_ROOT / "templates").resolve()
|
||||||
APP_PORT: Final[int] = _env_int("APP_PORT", 8000, 1, 65535)
|
TEMPLATE_ROOT: Final[Path] = (Path(DATA_ROOT) / "templates").resolve()
|
||||||
APP_RELOAD: Final[bool] = _env_bool("APP_RELOAD", False)
|
TEMPLATE_ROOT.mkdir(parents=True, exist_ok=True)
|
||||||
DOCKER_NETWORK_NAME: Final[str] = str(os.getenv("DOCKER_NETWORK_NAME") or "").strip()
|
|
||||||
|
|
||||||
AGENT_MD_TEMPLATES_FILE: Final[Path] = RUNTIME_TEMPLATES_ROOT / "agent_md_templates.json"
|
|
||||||
TOPIC_PRESETS_TEMPLATES_FILE: Final[Path] = RUNTIME_TEMPLATES_ROOT / "topic_presets.json"
|
def _resolve_template_file(filename: str) -> Path:
|
||||||
|
target = (TEMPLATE_ROOT / filename).resolve()
|
||||||
|
legacy = (LEGACY_TEMPLATE_ROOT / filename).resolve()
|
||||||
|
if target.exists():
|
||||||
|
return target
|
||||||
|
if legacy.exists():
|
||||||
|
try:
|
||||||
|
shutil.copy2(legacy, target)
|
||||||
|
return target
|
||||||
|
except Exception:
|
||||||
|
return legacy
|
||||||
|
return target
|
||||||
|
|
||||||
|
|
||||||
|
AGENT_MD_TEMPLATES_FILE: Final[Path] = _resolve_template_file("agent_md_templates.json")
|
||||||
|
TOPIC_PRESETS_TEMPLATES_FILE: Final[Path] = _resolve_template_file("topic_presets.json")
|
||||||
|
|
||||||
|
_agent_md_templates_raw = _load_json_object(AGENT_MD_TEMPLATES_FILE)
|
||||||
|
DEFAULT_AGENTS_MD: Final[str] = _env_text(
|
||||||
|
"DEFAULT_AGENTS_MD",
|
||||||
|
_read_template_md(_agent_md_templates_raw.get("agents_md")),
|
||||||
|
).strip()
|
||||||
|
DEFAULT_SOUL_MD: Final[str] = _env_text(
|
||||||
|
"DEFAULT_SOUL_MD",
|
||||||
|
_read_template_md(_agent_md_templates_raw.get("soul_md")),
|
||||||
|
).strip()
|
||||||
|
DEFAULT_USER_MD: Final[str] = _env_text(
|
||||||
|
"DEFAULT_USER_MD",
|
||||||
|
_read_template_md(_agent_md_templates_raw.get("user_md")),
|
||||||
|
).strip()
|
||||||
|
DEFAULT_TOOLS_MD: Final[str] = _env_text(
|
||||||
|
"DEFAULT_TOOLS_MD",
|
||||||
|
_read_template_md(_agent_md_templates_raw.get("tools_md")),
|
||||||
|
).strip()
|
||||||
|
DEFAULT_IDENTITY_MD: Final[str] = _env_text(
|
||||||
|
"DEFAULT_IDENTITY_MD",
|
||||||
|
_read_template_md(_agent_md_templates_raw.get("identity_md")),
|
||||||
|
).strip()
|
||||||
|
|
||||||
|
_topic_presets_raw = _load_json_object(TOPIC_PRESETS_TEMPLATES_FILE)
|
||||||
|
_topic_presets_list = _topic_presets_raw.get("presets")
|
||||||
|
TOPIC_PRESET_TEMPLATES: Final[list[dict[str, object]]] = [
|
||||||
|
dict(row) for row in (_topic_presets_list if isinstance(_topic_presets_list, list) else []) if isinstance(row, dict)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def load_agent_md_templates() -> dict[str, str]:
|
||||||
|
raw = _load_json_object(AGENT_MD_TEMPLATES_FILE)
|
||||||
|
rows: dict[str, str] = {}
|
||||||
|
for key in ("agents_md", "soul_md", "user_md", "tools_md", "identity_md"):
|
||||||
|
rows[key] = _read_template_md(raw.get(key))
|
||||||
|
return rows
|
||||||
|
|
||||||
|
|
||||||
|
def load_topic_presets_template() -> dict[str, object]:
|
||||||
|
raw = _load_json_object(TOPIC_PRESETS_TEMPLATES_FILE)
|
||||||
|
presets = raw.get("presets")
|
||||||
|
if not isinstance(presets, list):
|
||||||
|
return {"presets": []}
|
||||||
|
return {"presets": [dict(row) for row in presets if isinstance(row, dict)]}
|
||||||
|
|
|
||||||
|
|
@ -24,39 +24,6 @@ class SpeechDurationError(SpeechServiceError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def inspect_speech_model_status() -> Dict[str, Any]:
|
|
||||||
service = WhisperSpeechService()
|
|
||||||
model = str(STT_MODEL or "").strip()
|
|
||||||
model_dir = str(STT_MODEL_DIR or "").strip()
|
|
||||||
expected_path = ""
|
|
||||||
|
|
||||||
if model:
|
|
||||||
if any(sep in model for sep in ("/", "\\")):
|
|
||||||
expected_path = str(Path(model).expanduser())
|
|
||||||
elif model_dir:
|
|
||||||
expected_path = str((Path(model_dir).expanduser() / model).resolve())
|
|
||||||
|
|
||||||
try:
|
|
||||||
resolved_path = service._resolve_model_source()
|
|
||||||
return {
|
|
||||||
"ready": True,
|
|
||||||
"model": model,
|
|
||||||
"model_dir": model_dir,
|
|
||||||
"expected_path": expected_path or resolved_path,
|
|
||||||
"resolved_path": resolved_path,
|
|
||||||
"message": "",
|
|
||||||
}
|
|
||||||
except SpeechServiceError as exc:
|
|
||||||
return {
|
|
||||||
"ready": False,
|
|
||||||
"model": model,
|
|
||||||
"model_dir": model_dir,
|
|
||||||
"expected_path": expected_path,
|
|
||||||
"resolved_path": "",
|
|
||||||
"message": str(exc),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class WhisperSpeechService:
|
class WhisperSpeechService:
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self._model: Any = None
|
self._model: Any = None
|
||||||
|
|
|
||||||
|
|
@ -1,160 +0,0 @@
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import json
|
|
||||||
from datetime import datetime, timezone, timedelta
|
|
||||||
from typing import Any, Dict, List, Optional
|
|
||||||
from zoneinfo import ZoneInfo
|
|
||||||
from fastapi import HTTPException
|
|
||||||
from core.settings import DEFAULT_BOT_SYSTEM_TIMEZONE
|
|
||||||
|
|
||||||
_ENV_KEY_RE = re.compile(r"^[A-Z_][A-Z0-9_]{0,127}$")
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"_calc_dir_size_bytes",
|
|
||||||
"_get_default_system_timezone",
|
|
||||||
"_is_ignored_skill_zip_top_level",
|
|
||||||
"_is_image_attachment_path",
|
|
||||||
"_is_valid_top_level_skill_name",
|
|
||||||
"_is_video_attachment_path",
|
|
||||||
"_is_visual_attachment_path",
|
|
||||||
"_normalize_env_params",
|
|
||||||
"_normalize_system_timezone",
|
|
||||||
"_parse_env_params",
|
|
||||||
"_parse_json_string_list",
|
|
||||||
"_read_description_from_text",
|
|
||||||
"_resolve_local_day_range",
|
|
||||||
"_safe_float",
|
|
||||||
"_safe_int",
|
|
||||||
"_sanitize_skill_market_key",
|
|
||||||
"_sanitize_zip_filename",
|
|
||||||
"_workspace_stat_ctime_iso",
|
|
||||||
]
|
|
||||||
|
|
||||||
def _resolve_local_day_range(date_text: str, tz_offset_minutes: Optional[int]) -> tuple[datetime, datetime]:
|
|
||||||
try:
|
|
||||||
local_day = datetime.strptime(str(date_text or "").strip(), "%Y-%m-%d")
|
|
||||||
except ValueError as exc:
|
|
||||||
raise HTTPException(status_code=400, detail="Invalid date, expected YYYY-MM-DD") from exc
|
|
||||||
|
|
||||||
offset = timedelta(minutes=tz_offset_minutes if tz_offset_minutes is not None else 0)
|
|
||||||
utc_start = (local_day).replace(tzinfo=timezone.utc) + offset
|
|
||||||
utc_end = utc_start + timedelta(days=1)
|
|
||||||
return utc_start, utc_end
|
|
||||||
|
|
||||||
def _sanitize_zip_filename(name: str) -> str:
|
|
||||||
s = str(name or "").strip()
|
|
||||||
s = re.sub(r"[^a-zA-Z0-9._-]", "_", s)
|
|
||||||
return s if s else "upload.zip"
|
|
||||||
|
|
||||||
def _normalize_env_params(raw: Any) -> Dict[str, str]:
|
|
||||||
if not isinstance(raw, dict):
|
|
||||||
return {}
|
|
||||||
res: Dict[str, str] = {}
|
|
||||||
for k, v in raw.items():
|
|
||||||
ks = str(k).strip()
|
|
||||||
if _ENV_KEY_RE.match(ks):
|
|
||||||
res[ks] = str(v or "").strip()
|
|
||||||
return res
|
|
||||||
|
|
||||||
def _get_default_system_timezone() -> str:
|
|
||||||
return str(DEFAULT_BOT_SYSTEM_TIMEZONE or "Asia/Shanghai").strip()
|
|
||||||
|
|
||||||
def _normalize_system_timezone(raw: Any) -> str:
|
|
||||||
s = str(raw or "").strip()
|
|
||||||
if not s:
|
|
||||||
return _get_default_system_timezone()
|
|
||||||
try:
|
|
||||||
ZoneInfo(s)
|
|
||||||
return s
|
|
||||||
except Exception:
|
|
||||||
return _get_default_system_timezone()
|
|
||||||
|
|
||||||
def _safe_float(raw: Any, default: float) -> float:
|
|
||||||
try:
|
|
||||||
return float(raw)
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
return default
|
|
||||||
|
|
||||||
def _safe_int(raw: Any, default: int) -> int:
|
|
||||||
try:
|
|
||||||
return int(raw)
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
return default
|
|
||||||
|
|
||||||
def _parse_env_params(raw: Any) -> Dict[str, str]:
|
|
||||||
if isinstance(raw, dict):
|
|
||||||
return _normalize_env_params(raw)
|
|
||||||
if isinstance(raw, str):
|
|
||||||
try:
|
|
||||||
parsed = json.loads(raw)
|
|
||||||
return _normalize_env_params(parsed)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
return {}
|
|
||||||
|
|
||||||
def _is_valid_top_level_skill_name(name: str) -> bool:
|
|
||||||
return bool(re.match(r"^[a-zA-Z0-9_-]+$", name))
|
|
||||||
|
|
||||||
def _parse_json_string_list(raw: Any) -> List[str]:
|
|
||||||
if not raw:
|
|
||||||
return []
|
|
||||||
if isinstance(raw, list):
|
|
||||||
return [str(v) for v in raw]
|
|
||||||
if isinstance(raw, str):
|
|
||||||
try:
|
|
||||||
parsed = json.loads(raw)
|
|
||||||
if isinstance(parsed, list):
|
|
||||||
return [str(v) for v in parsed]
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
return []
|
|
||||||
|
|
||||||
def _is_ignored_skill_zip_top_level(name: str) -> bool:
|
|
||||||
return name.startswith(".") or name.startswith("__") or name in {"venv", "node_modules"}
|
|
||||||
|
|
||||||
def _read_description_from_text(text: str) -> str:
|
|
||||||
if not text:
|
|
||||||
return ""
|
|
||||||
lines = text.strip().split("\n")
|
|
||||||
for line in lines:
|
|
||||||
s = line.strip()
|
|
||||||
if s and not s.startswith("#"):
|
|
||||||
return s[:200]
|
|
||||||
return ""
|
|
||||||
|
|
||||||
def _sanitize_skill_market_key(key: str) -> str:
|
|
||||||
s = str(key or "").strip().lower()
|
|
||||||
s = re.sub(r"[^a-z0-9_-]", "_", s)
|
|
||||||
return s
|
|
||||||
|
|
||||||
def _calc_dir_size_bytes(path: str) -> int:
|
|
||||||
total = 0
|
|
||||||
try:
|
|
||||||
for root, dirs, files in os.walk(path):
|
|
||||||
for f in files:
|
|
||||||
fp = os.path.join(root, f)
|
|
||||||
if not os.path.islink(fp):
|
|
||||||
total += os.path.getsize(fp)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
return total
|
|
||||||
|
|
||||||
def _is_image_attachment_path(path: str) -> bool:
|
|
||||||
ext = (os.path.splitext(path)[1] or "").lower()
|
|
||||||
return ext in {".png", ".jpg", ".jpeg", ".gif", ".webp", ".svg", ".bmp"}
|
|
||||||
|
|
||||||
def _is_video_attachment_path(path: str) -> bool:
|
|
||||||
ext = (os.path.splitext(path)[1] or "").lower()
|
|
||||||
return ext in {".mp4", ".mov", ".avi", ".mkv", ".webm"}
|
|
||||||
|
|
||||||
def _is_visual_attachment_path(path: str) -> bool:
|
|
||||||
return _is_image_attachment_path(path) or _is_video_attachment_path(path)
|
|
||||||
|
|
||||||
def _workspace_stat_ctime_iso(stat: os.stat_result) -> str:
|
|
||||||
ts = getattr(stat, "st_birthtime", None)
|
|
||||||
if ts is None:
|
|
||||||
ts = getattr(stat, "st_ctime", None)
|
|
||||||
try:
|
|
||||||
return datetime.fromtimestamp(float(ts), tz=timezone.utc).isoformat().replace("+00:00", "Z")
|
|
||||||
except Exception:
|
|
||||||
return datetime.fromtimestamp(stat.st_mtime, tz=timezone.utc).isoformat().replace("+00:00", "Z")
|
|
||||||
|
|
@ -1,27 +0,0 @@
|
||||||
from typing import Any, Dict, List
|
|
||||||
from fastapi import WebSocket
|
|
||||||
|
|
||||||
class WSConnectionManager:
|
|
||||||
def __init__(self):
|
|
||||||
self.connections: Dict[str, List[WebSocket]] = {}
|
|
||||||
|
|
||||||
async def connect(self, bot_id: str, websocket: WebSocket):
|
|
||||||
await websocket.accept()
|
|
||||||
self.connections.setdefault(bot_id, []).append(websocket)
|
|
||||||
|
|
||||||
def disconnect(self, bot_id: str, websocket: WebSocket):
|
|
||||||
conns = self.connections.get(bot_id, [])
|
|
||||||
if websocket in conns:
|
|
||||||
conns.remove(websocket)
|
|
||||||
if not conns and bot_id in self.connections:
|
|
||||||
del self.connections[bot_id]
|
|
||||||
|
|
||||||
async def broadcast(self, bot_id: str, data: Dict[str, Any]):
|
|
||||||
conns = list(self.connections.get(bot_id, []))
|
|
||||||
for ws in conns:
|
|
||||||
try:
|
|
||||||
await ws.send_json(data)
|
|
||||||
except Exception:
|
|
||||||
self.disconnect(bot_id, ws)
|
|
||||||
|
|
||||||
manager = WSConnectionManager()
|
|
||||||
|
|
@ -0,0 +1,73 @@
|
||||||
|
-- Topic subsystem schema (SQLite)
|
||||||
|
-- Apply manually before/after backend deployment if needed.
|
||||||
|
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS topic_topic (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
bot_id TEXT NOT NULL,
|
||||||
|
topic_key TEXT NOT NULL,
|
||||||
|
name TEXT NOT NULL DEFAULT '',
|
||||||
|
description TEXT NOT NULL DEFAULT '',
|
||||||
|
is_active INTEGER NOT NULL DEFAULT 1,
|
||||||
|
is_default_fallback INTEGER NOT NULL DEFAULT 0,
|
||||||
|
routing_json TEXT NOT NULL DEFAULT '{}',
|
||||||
|
view_schema_json TEXT NOT NULL DEFAULT '{}',
|
||||||
|
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY(bot_id) REFERENCES bot_instance(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS topic_item (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
bot_id TEXT NOT NULL,
|
||||||
|
topic_key TEXT NOT NULL,
|
||||||
|
title TEXT NOT NULL DEFAULT '',
|
||||||
|
content TEXT NOT NULL DEFAULT '',
|
||||||
|
level TEXT NOT NULL DEFAULT 'info',
|
||||||
|
tags_json TEXT,
|
||||||
|
view_json TEXT,
|
||||||
|
source TEXT NOT NULL DEFAULT 'mcp',
|
||||||
|
dedupe_key TEXT,
|
||||||
|
is_read INTEGER NOT NULL DEFAULT 0,
|
||||||
|
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY(bot_id) REFERENCES bot_instance(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE UNIQUE INDEX IF NOT EXISTS uq_topic_topic_bot_topic_key
|
||||||
|
ON topic_topic(bot_id, topic_key);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_topic_topic_bot_id
|
||||||
|
ON topic_topic(bot_id);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_topic_topic_topic_key
|
||||||
|
ON topic_topic(topic_key);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_topic_topic_bot_fallback
|
||||||
|
ON topic_topic(bot_id, is_default_fallback);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_topic_item_bot_id
|
||||||
|
ON topic_item(bot_id);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_topic_item_topic_key
|
||||||
|
ON topic_item(topic_key);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_topic_item_level
|
||||||
|
ON topic_item(level);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_topic_item_source
|
||||||
|
ON topic_item(source);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_topic_item_is_read
|
||||||
|
ON topic_item(is_read);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_topic_item_created_at
|
||||||
|
ON topic_item(created_at);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_topic_item_bot_topic_created_at
|
||||||
|
ON topic_item(bot_id, topic_key, created_at);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_topic_item_bot_dedupe
|
||||||
|
ON topic_item(bot_id, dedupe_key);
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
|
|
@ -1,14 +1,39 @@
|
||||||
from app_factory import create_app
|
import os
|
||||||
from core.settings import APP_HOST, APP_PORT, APP_RELOAD
|
|
||||||
|
from app_factory import app
|
||||||
|
|
||||||
|
|
||||||
|
def _main_server_options() -> tuple[str, int, bool, str, bool]:
|
||||||
|
host = str(os.getenv("APP_HOST", "0.0.0.0") or "0.0.0.0").strip() or "0.0.0.0"
|
||||||
|
try:
|
||||||
|
port = int(os.getenv("APP_PORT", "8000"))
|
||||||
|
except Exception:
|
||||||
|
port = 8000
|
||||||
|
port = max(1, min(port, 65535))
|
||||||
|
reload_flag = str(os.getenv("APP_RELOAD", "true")).strip().lower() in {"1", "true", "yes", "on"}
|
||||||
|
log_level = str(os.getenv("APP_LOG_LEVEL", "warning") or "warning").strip().lower() or "warning"
|
||||||
|
access_log = str(os.getenv("APP_ACCESS_LOG", "false")).strip().lower() in {"1", "true", "yes", "on"}
|
||||||
|
return host, port, reload_flag, log_level, access_log
|
||||||
|
|
||||||
app = create_app()
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
import uvicorn
|
import uvicorn
|
||||||
|
|
||||||
if APP_RELOAD:
|
host, port, reload_flag, log_level, access_log = _main_server_options()
|
||||||
# Use import string to support hot-reloading
|
if reload_flag:
|
||||||
uvicorn.run("main:app", host=APP_HOST, port=APP_PORT, reload=True)
|
uvicorn.run(
|
||||||
|
"main:app",
|
||||||
|
host=host,
|
||||||
|
port=port,
|
||||||
|
reload=True,
|
||||||
|
log_level=log_level,
|
||||||
|
access_log=access_log,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
# Use app object for faster/direct startup
|
uvicorn.run(
|
||||||
uvicorn.run(app, host=APP_HOST, port=APP_PORT)
|
app,
|
||||||
|
host=host,
|
||||||
|
port=port,
|
||||||
|
log_level=log_level,
|
||||||
|
access_log=access_log,
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -1,23 +0,0 @@
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from sqlmodel import Field, SQLModel
|
|
||||||
|
|
||||||
|
|
||||||
class AuthLoginLog(SQLModel, table=True):
|
|
||||||
__tablename__ = "sys_login_log"
|
|
||||||
|
|
||||||
id: Optional[int] = Field(default=None, primary_key=True)
|
|
||||||
auth_type: str = Field(index=True) # panel | bot
|
|
||||||
token_hash: str = Field(index=True, unique=True)
|
|
||||||
subject_id: str = Field(index=True)
|
|
||||||
bot_id: Optional[str] = Field(default=None, index=True)
|
|
||||||
auth_source: str = Field(default="", index=True)
|
|
||||||
created_at: datetime = Field(default_factory=datetime.utcnow, index=True)
|
|
||||||
expires_at: datetime = Field(index=True)
|
|
||||||
last_seen_at: datetime = Field(default_factory=datetime.utcnow, index=True)
|
|
||||||
revoked_at: Optional[datetime] = Field(default=None, index=True)
|
|
||||||
revoke_reason: Optional[str] = Field(default=None)
|
|
||||||
client_ip: Optional[str] = Field(default=None)
|
|
||||||
user_agent: Optional[str] = Field(default=None)
|
|
||||||
device_info: Optional[str] = Field(default=None)
|
|
||||||
|
|
@ -13,7 +13,11 @@ class BotInstance(SQLModel, table=True):
|
||||||
docker_status: str = Field(default="STOPPED", index=True)
|
docker_status: str = Field(default="STOPPED", index=True)
|
||||||
current_state: Optional[str] = Field(default="IDLE")
|
current_state: Optional[str] = Field(default="IDLE")
|
||||||
last_action: Optional[str] = Field(default=None)
|
last_action: Optional[str] = Field(default=None)
|
||||||
image_tag: str = Field(default="nanobot-base") # 记录该机器人使用的镜像版本
|
image_tag: str = Field(default="nanobot-base:v0.1.4") # 记录该机器人使用的镜像版本
|
||||||
|
node_id: str = Field(default="local", index=True)
|
||||||
|
transport_kind: str = Field(default="direct", index=True)
|
||||||
|
runtime_kind: str = Field(default="docker", index=True)
|
||||||
|
core_adapter: str = Field(default="nanobot", index=True)
|
||||||
created_at: datetime = Field(default_factory=datetime.utcnow)
|
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||||
updated_at: datetime = Field(default_factory=datetime.utcnow)
|
updated_at: datetime = Field(default_factory=datetime.utcnow)
|
||||||
|
|
||||||
|
|
@ -32,7 +36,7 @@ class BotMessage(SQLModel, table=True):
|
||||||
class NanobotImage(SQLModel, table=True):
|
class NanobotImage(SQLModel, table=True):
|
||||||
__tablename__ = "bot_image"
|
__tablename__ = "bot_image"
|
||||||
|
|
||||||
tag: str = Field(primary_key=True) # e.g., nanobot-base
|
tag: str = Field(primary_key=True) # e.g., nanobot-base:v0.1.4
|
||||||
image_id: Optional[str] = Field(default=None) # Docker 内部的 Image ID
|
image_id: Optional[str] = Field(default=None) # Docker 内部的 Image ID
|
||||||
version: str # e.g., 0.1.4
|
version: str # e.g., 0.1.4
|
||||||
status: str = Field(default="READY") # READY, BUILDING, ERROR
|
status: str = Field(default="READY") # READY, BUILDING, ERROR
|
||||||
|
|
|
||||||
|
|
@ -19,6 +19,25 @@ class PlatformSetting(SQLModel, table=True):
|
||||||
updated_at: datetime = Field(default_factory=datetime.utcnow, index=True)
|
updated_at: datetime = Field(default_factory=datetime.utcnow, index=True)
|
||||||
|
|
||||||
|
|
||||||
|
class ManagedNodeRecord(SQLModel, table=True):
|
||||||
|
__tablename__ = "managed_node"
|
||||||
|
|
||||||
|
node_id: str = Field(primary_key=True, max_length=120)
|
||||||
|
display_name: str = Field(default="", max_length=200)
|
||||||
|
base_url: str = Field(default="")
|
||||||
|
enabled: bool = Field(default=True, index=True)
|
||||||
|
auth_token: str = Field(default="")
|
||||||
|
transport_kind: str = Field(default="direct", max_length=32, index=True)
|
||||||
|
runtime_kind: str = Field(default="docker", max_length=32, index=True)
|
||||||
|
core_adapter: str = Field(default="nanobot", max_length=64, index=True)
|
||||||
|
metadata_json: str = Field(default="{}")
|
||||||
|
capabilities_json: str = Field(default="{}")
|
||||||
|
resources_json: str = Field(default="{}")
|
||||||
|
last_seen_at: Optional[datetime] = Field(default=None, index=True)
|
||||||
|
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||||
|
updated_at: datetime = Field(default_factory=datetime.utcnow, index=True)
|
||||||
|
|
||||||
|
|
||||||
class BotRequestUsage(SQLModel, table=True):
|
class BotRequestUsage(SQLModel, table=True):
|
||||||
__tablename__ = "bot_request_usage"
|
__tablename__ = "bot_request_usage"
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,115 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from sqlalchemy import UniqueConstraint
|
||||||
|
from sqlmodel import Field, SQLModel
|
||||||
|
|
||||||
|
|
||||||
|
class SysRole(SQLModel, table=True):
|
||||||
|
__tablename__ = "sys_role"
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint("role_key", name="uq_sys_role_role_key"),
|
||||||
|
)
|
||||||
|
|
||||||
|
id: Optional[int] = Field(default=None, primary_key=True)
|
||||||
|
role_key: str = Field(index=True, max_length=64)
|
||||||
|
name: str = Field(default="", max_length=120)
|
||||||
|
description: str = Field(default="")
|
||||||
|
is_active: bool = Field(default=True, index=True)
|
||||||
|
sort_order: int = Field(default=100, index=True)
|
||||||
|
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||||
|
updated_at: datetime = Field(default_factory=datetime.utcnow, index=True)
|
||||||
|
|
||||||
|
|
||||||
|
class SysUser(SQLModel, table=True):
|
||||||
|
__tablename__ = "sys_user"
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint("username", name="uq_sys_user_username"),
|
||||||
|
)
|
||||||
|
|
||||||
|
id: Optional[int] = Field(default=None, primary_key=True)
|
||||||
|
username: str = Field(index=True, max_length=64)
|
||||||
|
display_name: str = Field(default="", max_length=120)
|
||||||
|
password_hash: str = Field(default="", max_length=255)
|
||||||
|
password_salt: str = Field(default="", max_length=64)
|
||||||
|
role_id: Optional[int] = Field(default=None, foreign_key="sys_role.id", index=True)
|
||||||
|
is_active: bool = Field(default=True, index=True)
|
||||||
|
last_login_at: Optional[datetime] = Field(default=None, index=True)
|
||||||
|
current_token_hash: Optional[str] = Field(default=None, index=True, max_length=255)
|
||||||
|
current_token_expires_at: Optional[datetime] = Field(default=None, index=True)
|
||||||
|
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||||
|
updated_at: datetime = Field(default_factory=datetime.utcnow, index=True)
|
||||||
|
|
||||||
|
|
||||||
|
class SysMenu(SQLModel, table=True):
|
||||||
|
__tablename__ = "sys_menu"
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint("menu_key", name="uq_sys_menu_menu_key"),
|
||||||
|
)
|
||||||
|
|
||||||
|
id: Optional[int] = Field(default=None, primary_key=True)
|
||||||
|
menu_key: str = Field(index=True, max_length=64)
|
||||||
|
parent_key: str = Field(default="", index=True, max_length=64)
|
||||||
|
title: str = Field(default="", max_length=120)
|
||||||
|
title_en: str = Field(default="", max_length=120)
|
||||||
|
menu_type: str = Field(default="item", max_length=32, index=True)
|
||||||
|
route_path: str = Field(default="", max_length=255)
|
||||||
|
icon: str = Field(default="", max_length=64)
|
||||||
|
permission_key: str = Field(default="", max_length=120)
|
||||||
|
visible: bool = Field(default=True, index=True)
|
||||||
|
sort_order: int = Field(default=100, index=True)
|
||||||
|
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||||
|
updated_at: datetime = Field(default_factory=datetime.utcnow, index=True)
|
||||||
|
|
||||||
|
|
||||||
|
class SysPermission(SQLModel, table=True):
|
||||||
|
__tablename__ = "sys_permission"
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint("permission_key", name="uq_sys_permission_permission_key"),
|
||||||
|
)
|
||||||
|
|
||||||
|
id: Optional[int] = Field(default=None, primary_key=True)
|
||||||
|
permission_key: str = Field(index=True, max_length=120)
|
||||||
|
name: str = Field(default="", max_length=120)
|
||||||
|
menu_key: str = Field(default="", index=True, max_length=64)
|
||||||
|
action: str = Field(default="view", max_length=32, index=True)
|
||||||
|
description: str = Field(default="")
|
||||||
|
sort_order: int = Field(default=100, index=True)
|
||||||
|
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||||
|
updated_at: datetime = Field(default_factory=datetime.utcnow, index=True)
|
||||||
|
|
||||||
|
|
||||||
|
class SysRoleMenu(SQLModel, table=True):
|
||||||
|
__tablename__ = "sys_role_menu"
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint("role_id", "menu_id", name="uq_sys_role_menu_role_menu"),
|
||||||
|
)
|
||||||
|
|
||||||
|
id: Optional[int] = Field(default=None, primary_key=True)
|
||||||
|
role_id: int = Field(foreign_key="sys_role.id", index=True)
|
||||||
|
menu_id: int = Field(foreign_key="sys_menu.id", index=True)
|
||||||
|
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||||
|
|
||||||
|
|
||||||
|
class SysRolePermission(SQLModel, table=True):
|
||||||
|
__tablename__ = "sys_role_permission"
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint("role_id", "permission_id", name="uq_sys_role_permission_role_permission"),
|
||||||
|
)
|
||||||
|
|
||||||
|
id: Optional[int] = Field(default=None, primary_key=True)
|
||||||
|
role_id: int = Field(foreign_key="sys_role.id", index=True)
|
||||||
|
permission_id: int = Field(foreign_key="sys_permission.id", index=True)
|
||||||
|
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||||
|
|
||||||
|
|
||||||
|
class SysUserBot(SQLModel, table=True):
|
||||||
|
__tablename__ = "sys_user_bot"
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint("user_id", "bot_id", name="uq_sys_user_bot_user_bot"),
|
||||||
|
)
|
||||||
|
|
||||||
|
id: Optional[int] = Field(default=None, primary_key=True)
|
||||||
|
user_id: int = Field(foreign_key="sys_user.id", index=True)
|
||||||
|
bot_id: str = Field(foreign_key="bot_instance.id", index=True, max_length=120)
|
||||||
|
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||||
|
|
@ -1,3 +1 @@
|
||||||
from providers.bot_workspace_provider import BotWorkspaceProvider
|
# Provider package for runtime/workspace/provision abstractions.
|
||||||
|
|
||||||
__all__ = ["BotWorkspaceProvider"]
|
|
||||||
|
|
|
||||||
|
|
@ -1,283 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
from typing import Any, Dict, List
|
|
||||||
|
|
||||||
_PROVIDER_ALIAS_MAP = {
|
|
||||||
"aliyun": "dashscope",
|
|
||||||
"qwen": "dashscope",
|
|
||||||
"aliyun-qwen": "dashscope",
|
|
||||||
"moonshot": "kimi",
|
|
||||||
"xunfei": "openai",
|
|
||||||
"iflytek": "openai",
|
|
||||||
"xfyun": "openai",
|
|
||||||
"vllm": "openai",
|
|
||||||
}
|
|
||||||
|
|
||||||
_MANAGED_WORKSPACE_FILES = ("AGENTS.md", "SOUL.md", "USER.md", "TOOLS.md", "IDENTITY.md")
|
|
||||||
|
|
||||||
|
|
||||||
def _require_text(raw: Any, *, field: str) -> str:
|
|
||||||
value = str(raw if raw is not None else "").strip()
|
|
||||||
if not value:
|
|
||||||
raise RuntimeError(f"Missing required bot runtime field: {field}")
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_markdown_text(raw: Any, *, field: str) -> str:
|
|
||||||
if raw is None:
|
|
||||||
raise RuntimeError(f"Missing required workspace markdown field: {field}")
|
|
||||||
return str(raw).replace("\r\n", "\n").strip() + "\n"
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_provider_name(raw_provider_name: str) -> tuple[str, str]:
|
|
||||||
normalized = raw_provider_name.strip().lower()
|
|
||||||
if not normalized:
|
|
||||||
raise RuntimeError("Missing required bot runtime field: llm_provider")
|
|
||||||
canonical = _PROVIDER_ALIAS_MAP.get(normalized, normalized)
|
|
||||||
return normalized, canonical
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_allow_from(raw: Any) -> List[str]:
|
|
||||||
rows: List[str] = []
|
|
||||||
if isinstance(raw, list):
|
|
||||||
for item in raw:
|
|
||||||
text = str(item or "").strip()
|
|
||||||
if text and text not in rows:
|
|
||||||
rows.append(text)
|
|
||||||
return rows or ["*"]
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_extra_config(raw: Any) -> Dict[str, Any]:
|
|
||||||
if raw is None:
|
|
||||||
return {}
|
|
||||||
if not isinstance(raw, dict):
|
|
||||||
raise RuntimeError("Channel extra_config must be an object")
|
|
||||||
return dict(raw)
|
|
||||||
|
|
||||||
|
|
||||||
def _write_json_atomic(path: str, payload: Dict[str, Any]) -> None:
|
|
||||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
|
||||||
tmp_path = f"{path}.tmp"
|
|
||||||
with open(tmp_path, "w", encoding="utf-8") as file:
|
|
||||||
json.dump(payload, file, ensure_ascii=False, indent=2)
|
|
||||||
os.replace(tmp_path, path)
|
|
||||||
|
|
||||||
|
|
||||||
def _write_text_atomic(path: str, content: str) -> None:
|
|
||||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
|
||||||
tmp_path = f"{path}.tmp"
|
|
||||||
with open(tmp_path, "w", encoding="utf-8") as file:
|
|
||||||
file.write(content)
|
|
||||||
os.replace(tmp_path, path)
|
|
||||||
|
|
||||||
|
|
||||||
class BotWorkspaceProvider:
|
|
||||||
def __init__(self, host_data_root: str):
|
|
||||||
self.host_data_root = host_data_root
|
|
||||||
|
|
||||||
def write_workspace(self, bot_id: str, bot_data: Dict[str, Any], channels: List[Dict[str, Any]]) -> str:
|
|
||||||
raw_provider_name, provider_name = _normalize_provider_name(_require_text(bot_data.get("llm_provider"), field="llm_provider"))
|
|
||||||
model_name = _require_text(bot_data.get("llm_model"), field="llm_model")
|
|
||||||
api_key = _require_text(bot_data.get("api_key"), field="api_key")
|
|
||||||
api_base = _require_text(bot_data.get("api_base"), field="api_base")
|
|
||||||
temperature = float(bot_data.get("temperature"))
|
|
||||||
top_p = float(bot_data.get("top_p"))
|
|
||||||
max_tokens = int(bot_data.get("max_tokens"))
|
|
||||||
send_progress = bool(bot_data.get("send_progress"))
|
|
||||||
send_tool_hints = bool(bot_data.get("send_tool_hints"))
|
|
||||||
|
|
||||||
bot_root = os.path.join(self.host_data_root, bot_id)
|
|
||||||
dot_nanobot_dir = os.path.join(bot_root, ".nanobot")
|
|
||||||
workspace_dir = os.path.join(dot_nanobot_dir, "workspace")
|
|
||||||
memory_dir = os.path.join(workspace_dir, "memory")
|
|
||||||
skills_dir = os.path.join(workspace_dir, "skills")
|
|
||||||
for path in (dot_nanobot_dir, workspace_dir, memory_dir, skills_dir):
|
|
||||||
os.makedirs(path, exist_ok=True)
|
|
||||||
|
|
||||||
provider_cfg: Dict[str, Any] = {
|
|
||||||
"apiKey": api_key,
|
|
||||||
"apiBase": api_base,
|
|
||||||
}
|
|
||||||
if raw_provider_name in {"xunfei", "iflytek", "xfyun", "vllm"}:
|
|
||||||
provider_cfg["dashboardProviderAlias"] = raw_provider_name
|
|
||||||
|
|
||||||
effective_model_name = model_name
|
|
||||||
if provider_name == "openai" and raw_provider_name in {"xunfei", "iflytek", "xfyun"} and "/" not in model_name:
|
|
||||||
effective_model_name = f"openai/{model_name}"
|
|
||||||
|
|
||||||
config_data: Dict[str, Any] = {
|
|
||||||
"agents": {
|
|
||||||
"defaults": {
|
|
||||||
"model": effective_model_name,
|
|
||||||
"temperature": temperature,
|
|
||||||
"topP": top_p,
|
|
||||||
"maxTokens": max_tokens,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"providers": {
|
|
||||||
provider_name: provider_cfg,
|
|
||||||
},
|
|
||||||
"channels": {
|
|
||||||
"sendProgress": send_progress,
|
|
||||||
"sendToolHints": send_tool_hints,
|
|
||||||
"dashboard": {
|
|
||||||
"enabled": True,
|
|
||||||
"host": "0.0.0.0",
|
|
||||||
"port": 9000,
|
|
||||||
"allowFrom": ["*"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
mcp_servers = bot_data.get("mcp_servers")
|
|
||||||
if mcp_servers is not None:
|
|
||||||
if not isinstance(mcp_servers, dict):
|
|
||||||
raise RuntimeError("mcp_servers must be an object")
|
|
||||||
config_data["tools"] = {"mcpServers": mcp_servers}
|
|
||||||
|
|
||||||
channels_cfg = config_data["channels"]
|
|
||||||
for channel in channels:
|
|
||||||
channel_type = str(channel.get("channel_type") or "").strip().lower()
|
|
||||||
if not channel_type or channel_type == "dashboard":
|
|
||||||
continue
|
|
||||||
extra = _normalize_extra_config(channel.get("extra_config"))
|
|
||||||
enabled = bool(channel.get("is_active"))
|
|
||||||
external_app_id = str(channel.get("external_app_id") or "").strip()
|
|
||||||
app_secret = str(channel.get("app_secret") or "").strip()
|
|
||||||
|
|
||||||
if channel_type == "telegram":
|
|
||||||
channels_cfg["telegram"] = {
|
|
||||||
"enabled": enabled,
|
|
||||||
"token": app_secret,
|
|
||||||
"proxy": str(extra.get("proxy") or "").strip(),
|
|
||||||
"replyToMessage": bool(extra.get("replyToMessage")),
|
|
||||||
"allowFrom": _normalize_allow_from(extra.get("allowFrom")),
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
|
|
||||||
if channel_type == "feishu":
|
|
||||||
channels_cfg["feishu"] = {
|
|
||||||
"enabled": enabled,
|
|
||||||
"appId": external_app_id,
|
|
||||||
"appSecret": app_secret,
|
|
||||||
"encryptKey": str(extra.get("encryptKey") or "").strip(),
|
|
||||||
"verificationToken": str(extra.get("verificationToken") or "").strip(),
|
|
||||||
"allowFrom": _normalize_allow_from(extra.get("allowFrom")),
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
|
|
||||||
if channel_type == "dingtalk":
|
|
||||||
channels_cfg["dingtalk"] = {
|
|
||||||
"enabled": enabled,
|
|
||||||
"clientId": external_app_id,
|
|
||||||
"clientSecret": app_secret,
|
|
||||||
"allowFrom": _normalize_allow_from(extra.get("allowFrom")),
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
|
|
||||||
if channel_type == "slack":
|
|
||||||
channels_cfg["slack"] = {
|
|
||||||
"enabled": enabled,
|
|
||||||
"mode": str(extra.get("mode") or "socket"),
|
|
||||||
"botToken": external_app_id,
|
|
||||||
"appToken": app_secret,
|
|
||||||
"replyInThread": bool(extra.get("replyInThread", True)),
|
|
||||||
"groupPolicy": str(extra.get("groupPolicy") or "mention"),
|
|
||||||
"groupAllowFrom": extra.get("groupAllowFrom") if isinstance(extra.get("groupAllowFrom"), list) else [],
|
|
||||||
"reactEmoji": str(extra.get("reactEmoji") or "eyes"),
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
|
|
||||||
if channel_type == "qq":
|
|
||||||
channels_cfg["qq"] = {
|
|
||||||
"enabled": enabled,
|
|
||||||
"appId": external_app_id,
|
|
||||||
"secret": app_secret,
|
|
||||||
"allowFrom": _normalize_allow_from(extra.get("allowFrom")),
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
|
|
||||||
if channel_type == "wecom":
|
|
||||||
wecom_cfg: Dict[str, Any] = {
|
|
||||||
"enabled": enabled,
|
|
||||||
"botId": external_app_id,
|
|
||||||
"secret": app_secret,
|
|
||||||
"allowFrom": _normalize_allow_from(extra.get("allowFrom")),
|
|
||||||
}
|
|
||||||
welcome_message = str(extra.get("welcomeMessage") or "").strip()
|
|
||||||
if welcome_message:
|
|
||||||
wecom_cfg["welcomeMessage"] = welcome_message
|
|
||||||
channels_cfg["wecom"] = wecom_cfg
|
|
||||||
continue
|
|
||||||
|
|
||||||
if channel_type == "weixin":
|
|
||||||
weixin_cfg: Dict[str, Any] = {
|
|
||||||
"enabled": enabled,
|
|
||||||
"allowFrom": _normalize_allow_from(extra.get("allowFrom")),
|
|
||||||
}
|
|
||||||
route_tag = str(extra.get("routeTag") or "").strip()
|
|
||||||
if route_tag:
|
|
||||||
weixin_cfg["routeTag"] = route_tag
|
|
||||||
state_dir = str(extra.get("stateDir") or "").strip()
|
|
||||||
if state_dir:
|
|
||||||
weixin_cfg["stateDir"] = state_dir
|
|
||||||
base_url = str(extra.get("baseUrl") or "").strip()
|
|
||||||
if base_url:
|
|
||||||
weixin_cfg["baseUrl"] = base_url
|
|
||||||
cdn_base_url = str(extra.get("cdnBaseUrl") or "").strip()
|
|
||||||
if cdn_base_url:
|
|
||||||
weixin_cfg["cdnBaseUrl"] = cdn_base_url
|
|
||||||
poll_timeout = extra.get("pollTimeout", extra.get("poll_timeout"))
|
|
||||||
if poll_timeout not in {None, ""}:
|
|
||||||
weixin_cfg["pollTimeout"] = max(1, int(poll_timeout))
|
|
||||||
channels_cfg["weixin"] = weixin_cfg
|
|
||||||
continue
|
|
||||||
|
|
||||||
if channel_type == "email":
|
|
||||||
channels_cfg["email"] = {
|
|
||||||
"enabled": enabled,
|
|
||||||
"consentGranted": bool(extra.get("consentGranted")),
|
|
||||||
"imapHost": str(extra.get("imapHost") or "").strip(),
|
|
||||||
"imapPort": max(1, min(int(extra.get("imapPort") or 993), 65535)),
|
|
||||||
"imapUsername": str(extra.get("imapUsername") or "").strip(),
|
|
||||||
"imapPassword": str(extra.get("imapPassword") or "").strip(),
|
|
||||||
"imapMailbox": str(extra.get("imapMailbox") or "INBOX"),
|
|
||||||
"imapUseSsl": bool(extra.get("imapUseSsl", True)),
|
|
||||||
"smtpHost": str(extra.get("smtpHost") or "").strip(),
|
|
||||||
"smtpPort": max(1, min(int(extra.get("smtpPort") or 587), 65535)),
|
|
||||||
"smtpUsername": str(extra.get("smtpUsername") or "").strip(),
|
|
||||||
"smtpPassword": str(extra.get("smtpPassword") or "").strip(),
|
|
||||||
"smtpUseTls": bool(extra.get("smtpUseTls", True)),
|
|
||||||
"smtpUseSsl": bool(extra.get("smtpUseSsl")),
|
|
||||||
"fromAddress": str(extra.get("fromAddress") or "").strip(),
|
|
||||||
"autoReplyEnabled": bool(extra.get("autoReplyEnabled", True)),
|
|
||||||
"pollIntervalSeconds": max(5, int(extra.get("pollIntervalSeconds") or 30)),
|
|
||||||
"markSeen": bool(extra.get("markSeen", True)),
|
|
||||||
"maxBodyChars": max(1, int(extra.get("maxBodyChars") or 12000)),
|
|
||||||
"subjectPrefix": str(extra.get("subjectPrefix") or "Re: "),
|
|
||||||
"allowFrom": _normalize_allow_from(extra.get("allowFrom")),
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
|
|
||||||
channels_cfg[channel_type] = {
|
|
||||||
"enabled": enabled,
|
|
||||||
"appId": external_app_id,
|
|
||||||
"appSecret": app_secret,
|
|
||||||
**extra,
|
|
||||||
}
|
|
||||||
|
|
||||||
_write_json_atomic(os.path.join(dot_nanobot_dir, "config.json"), config_data)
|
|
||||||
|
|
||||||
workspace_files = {
|
|
||||||
"AGENTS.md": _normalize_markdown_text(bot_data.get("agents_md"), field="agents_md"),
|
|
||||||
"SOUL.md": _normalize_markdown_text(bot_data.get("soul_md"), field="soul_md"),
|
|
||||||
"USER.md": _normalize_markdown_text(bot_data.get("user_md"), field="user_md"),
|
|
||||||
"TOOLS.md": _normalize_markdown_text(bot_data.get("tools_md"), field="tools_md"),
|
|
||||||
"IDENTITY.md": _normalize_markdown_text(bot_data.get("identity_md"), field="identity_md"),
|
|
||||||
}
|
|
||||||
for filename in _MANAGED_WORKSPACE_FILES:
|
|
||||||
_write_text_atomic(os.path.join(workspace_dir, filename), workspace_files[filename])
|
|
||||||
|
|
||||||
return dot_nanobot_dir
|
|
||||||
|
|
@ -0,0 +1,4 @@
|
||||||
|
from providers.provision.base import ProvisionProvider
|
||||||
|
from providers.provision.local import LocalProvisionProvider
|
||||||
|
|
||||||
|
__all__ = ["ProvisionProvider", "LocalProvisionProvider"]
|
||||||
|
|
@ -0,0 +1,18 @@
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
|
||||||
|
class ProvisionProvider(ABC):
|
||||||
|
@abstractmethod
|
||||||
|
def sync_bot_workspace(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
session: Session,
|
||||||
|
bot_id: str,
|
||||||
|
channels_override: Optional[List[Dict[str, Any]]] = None,
|
||||||
|
global_delivery_override: Optional[Dict[str, Any]] = None,
|
||||||
|
runtime_overrides: Optional[Dict[str, Any]] = None,
|
||||||
|
) -> None:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
@ -0,0 +1,105 @@
|
||||||
|
from typing import Any, Callable, Dict, List, Optional
|
||||||
|
|
||||||
|
from fastapi import HTTPException
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from clients.edge.base import EdgeClient
|
||||||
|
from models.bot import BotInstance
|
||||||
|
from providers.provision.base import ProvisionProvider
|
||||||
|
from providers.target import ProviderTarget
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeProvisionProvider(ProvisionProvider):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
read_provider_target: Callable[[str], ProviderTarget],
|
||||||
|
resolve_edge_client: Callable[[ProviderTarget], EdgeClient],
|
||||||
|
read_runtime_snapshot: Callable[[BotInstance], Dict[str, Any]],
|
||||||
|
read_bot_channels: Callable[[BotInstance], List[Dict[str, Any]]],
|
||||||
|
read_node_metadata: Callable[[str], Dict[str, Any]],
|
||||||
|
) -> None:
|
||||||
|
self._read_provider_target = read_provider_target
|
||||||
|
self._resolve_edge_client = resolve_edge_client
|
||||||
|
self._read_runtime_snapshot = read_runtime_snapshot
|
||||||
|
self._read_bot_channels = read_bot_channels
|
||||||
|
self._read_node_metadata = read_node_metadata
|
||||||
|
|
||||||
|
def sync_bot_workspace(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
session: Session,
|
||||||
|
bot_id: str,
|
||||||
|
channels_override: Optional[List[Dict[str, Any]]] = None,
|
||||||
|
global_delivery_override: Optional[Dict[str, Any]] = None,
|
||||||
|
runtime_overrides: Optional[Dict[str, Any]] = None,
|
||||||
|
) -> None:
|
||||||
|
bot = session.get(BotInstance, bot_id)
|
||||||
|
if bot is None:
|
||||||
|
raise HTTPException(status_code=404, detail="Bot not found")
|
||||||
|
snapshot = dict(self._read_runtime_snapshot(bot))
|
||||||
|
merged_runtime = dict(snapshot)
|
||||||
|
if isinstance(runtime_overrides, dict):
|
||||||
|
for key, value in runtime_overrides.items():
|
||||||
|
if key in {"api_key", "llm_provider", "llm_model"}:
|
||||||
|
text = str(value or "").strip()
|
||||||
|
if not text:
|
||||||
|
continue
|
||||||
|
merged_runtime[key] = text
|
||||||
|
continue
|
||||||
|
if key == "api_base":
|
||||||
|
merged_runtime[key] = str(value or "").strip()
|
||||||
|
continue
|
||||||
|
merged_runtime[key] = value
|
||||||
|
target = self._read_provider_target(bot_id)
|
||||||
|
merged_runtime.update(self._node_runtime_overrides(target.node_id, target.runtime_kind))
|
||||||
|
|
||||||
|
resolved_delivery = dict(global_delivery_override or {})
|
||||||
|
if "sendProgress" not in resolved_delivery:
|
||||||
|
resolved_delivery["sendProgress"] = bool(merged_runtime.get("send_progress", False))
|
||||||
|
if "sendToolHints" not in resolved_delivery:
|
||||||
|
resolved_delivery["sendToolHints"] = bool(merged_runtime.get("send_tool_hints", False))
|
||||||
|
|
||||||
|
self._client_for_target(target).sync_bot_workspace(
|
||||||
|
bot_id=bot_id,
|
||||||
|
channels_override=channels_override if channels_override is not None else self._read_bot_channels(bot),
|
||||||
|
global_delivery_override=resolved_delivery,
|
||||||
|
runtime_overrides=merged_runtime,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _client_for_bot(self, bot_id: str) -> EdgeClient:
|
||||||
|
target = self._read_provider_target(bot_id)
|
||||||
|
return self._client_for_target(target)
|
||||||
|
|
||||||
|
def _client_for_target(self, target: ProviderTarget) -> EdgeClient:
|
||||||
|
if target.transport_kind != "edge":
|
||||||
|
raise HTTPException(status_code=400, detail=f"edge provision provider requires edge transport, got {target.transport_kind}")
|
||||||
|
return self._resolve_edge_client(target)
|
||||||
|
|
||||||
|
def _node_runtime_overrides(self, node_id: str, runtime_kind: str) -> Dict[str, str]:
|
||||||
|
metadata = dict(self._read_node_metadata(str(node_id or "").strip().lower()) or {})
|
||||||
|
payload: Dict[str, str] = {}
|
||||||
|
workspace_root = str(metadata.get("workspace_root") or "").strip()
|
||||||
|
if workspace_root:
|
||||||
|
payload["workspace_root"] = workspace_root
|
||||||
|
if str(runtime_kind or "").strip().lower() != "native":
|
||||||
|
return payload
|
||||||
|
native_sandbox_mode = self._normalize_native_sandbox_mode(metadata.get("native_sandbox_mode"))
|
||||||
|
if native_sandbox_mode != "inherit":
|
||||||
|
payload["native_sandbox_mode"] = native_sandbox_mode
|
||||||
|
native_command = str(metadata.get("native_command") or "").strip()
|
||||||
|
native_workdir = str(metadata.get("native_workdir") or "").strip()
|
||||||
|
if native_command:
|
||||||
|
payload["native_command"] = native_command
|
||||||
|
if native_workdir:
|
||||||
|
payload["native_workdir"] = native_workdir
|
||||||
|
return payload
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _normalize_native_sandbox_mode(raw_value: Any) -> str:
|
||||||
|
text = str(raw_value or "").strip().lower()
|
||||||
|
if text in {"workspace", "sandbox", "strict"}:
|
||||||
|
return "workspace"
|
||||||
|
if text in {"full_access", "full-access", "danger-full-access", "escape"}:
|
||||||
|
return "full_access"
|
||||||
|
return "inherit"
|
||||||
|
|
@ -0,0 +1,34 @@
|
||||||
|
from typing import Any, Callable, Dict, List, Optional
|
||||||
|
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from providers.provision.base import ProvisionProvider
|
||||||
|
|
||||||
|
|
||||||
|
class LocalProvisionProvider(ProvisionProvider):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
sync_workspace_func: Callable[
|
||||||
|
[Session, str, Optional[List[Dict[str, Any]]], Optional[Dict[str, Any]], Optional[Dict[str, Any]]],
|
||||||
|
None,
|
||||||
|
],
|
||||||
|
) -> None:
|
||||||
|
self._sync_workspace_func = sync_workspace_func
|
||||||
|
|
||||||
|
def sync_bot_workspace(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
session: Session,
|
||||||
|
bot_id: str,
|
||||||
|
channels_override: Optional[List[Dict[str, Any]]] = None,
|
||||||
|
global_delivery_override: Optional[Dict[str, Any]] = None,
|
||||||
|
runtime_overrides: Optional[Dict[str, Any]] = None,
|
||||||
|
) -> None:
|
||||||
|
self._sync_workspace_func(
|
||||||
|
session,
|
||||||
|
bot_id,
|
||||||
|
channels_override,
|
||||||
|
global_delivery_override,
|
||||||
|
runtime_overrides,
|
||||||
|
)
|
||||||
|
|
@ -0,0 +1,47 @@
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Dict, Optional
|
||||||
|
|
||||||
|
from providers.target import ProviderTarget
|
||||||
|
|
||||||
|
from providers.provision.base import ProvisionProvider
|
||||||
|
from providers.runtime.base import RuntimeProvider
|
||||||
|
from providers.workspace.base import WorkspaceProvider
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ProviderRegistry:
|
||||||
|
runtime: Dict[str, RuntimeProvider] = field(default_factory=dict)
|
||||||
|
workspace: Dict[str, WorkspaceProvider] = field(default_factory=dict)
|
||||||
|
provision: Dict[str, ProvisionProvider] = field(default_factory=dict)
|
||||||
|
|
||||||
|
def register_bundle(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
key: str,
|
||||||
|
runtime_provider: RuntimeProvider,
|
||||||
|
workspace_provider: WorkspaceProvider,
|
||||||
|
provision_provider: ProvisionProvider,
|
||||||
|
) -> None:
|
||||||
|
self.runtime[key] = runtime_provider
|
||||||
|
self.workspace[key] = workspace_provider
|
||||||
|
self.provision[key] = provision_provider
|
||||||
|
|
||||||
|
def resolve_bundle_key(self, target: ProviderTarget) -> Optional[str]:
|
||||||
|
exact = target.key
|
||||||
|
if exact in self.runtime and exact in self.workspace and exact in self.provision:
|
||||||
|
return exact
|
||||||
|
|
||||||
|
for key in self.runtime.keys():
|
||||||
|
if key not in self.workspace or key not in self.provision:
|
||||||
|
continue
|
||||||
|
parts = str(key or "").split(":")
|
||||||
|
if len(parts) < 4:
|
||||||
|
continue
|
||||||
|
_, transport_kind, runtime_kind, core_adapter = parts[0], parts[1], parts[2], ":".join(parts[3:])
|
||||||
|
if (
|
||||||
|
str(transport_kind or "").strip().lower() == str(target.transport_kind or "").strip().lower()
|
||||||
|
and str(runtime_kind or "").strip().lower() == str(target.runtime_kind or "").strip().lower()
|
||||||
|
and str(core_adapter or "").strip().lower() == str(target.core_adapter or "").strip().lower()
|
||||||
|
):
|
||||||
|
return key
|
||||||
|
return None
|
||||||
|
|
@ -0,0 +1,4 @@
|
||||||
|
from providers.runtime.base import RuntimeProvider
|
||||||
|
from providers.runtime.local import LocalRuntimeProvider
|
||||||
|
|
||||||
|
__all__ = ["RuntimeProvider", "LocalRuntimeProvider"]
|
||||||
|
|
@ -0,0 +1,40 @@
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from models.bot import BotInstance
|
||||||
|
|
||||||
|
|
||||||
|
class RuntimeProvider(ABC):
|
||||||
|
@abstractmethod
|
||||||
|
async def start_bot(self, *, session: Session, bot: BotInstance) -> Dict[str, Any]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def stop_bot(self, *, session: Session, bot: BotInstance) -> Dict[str, Any]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def deliver_command(self, *, bot_id: str, command: str, media: Optional[List[str]] = None) -> Optional[str]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_recent_logs(self, *, bot_id: str, tail: int = 300) -> List[str]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def ensure_monitor(self, *, bot_id: str) -> bool:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_monitor_packets(self, *, bot_id: str, after_seq: int = 0, limit: int = 200) -> List[Dict[str, Any]]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_runtime_status(self, *, bot_id: str) -> str:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_resource_snapshot(self, *, bot_id: str) -> Dict[str, Any]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
@ -0,0 +1,136 @@
|
||||||
|
from typing import Any, Callable, Dict, List, Optional
|
||||||
|
|
||||||
|
from fastapi import HTTPException
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from clients.edge.base import EdgeClient
|
||||||
|
from models.bot import BotInstance
|
||||||
|
from providers.runtime.base import RuntimeProvider
|
||||||
|
from providers.target import ProviderTarget, provider_target_to_dict
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeRuntimeProvider(RuntimeProvider):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
read_provider_target: Callable[[str], ProviderTarget],
|
||||||
|
resolve_edge_client: Callable[[ProviderTarget], EdgeClient],
|
||||||
|
read_runtime_snapshot: Callable[[BotInstance], Dict[str, Any]],
|
||||||
|
resolve_env_params: Callable[[str], Dict[str, str]],
|
||||||
|
read_bot_channels: Callable[[BotInstance], List[Dict[str, Any]]],
|
||||||
|
read_node_metadata: Callable[[str], Dict[str, Any]],
|
||||||
|
) -> None:
|
||||||
|
self._read_provider_target = read_provider_target
|
||||||
|
self._resolve_edge_client = resolve_edge_client
|
||||||
|
self._read_runtime_snapshot = read_runtime_snapshot
|
||||||
|
self._resolve_env_params = resolve_env_params
|
||||||
|
self._read_bot_channels = read_bot_channels
|
||||||
|
self._read_node_metadata = read_node_metadata
|
||||||
|
|
||||||
|
async def start_bot(self, *, session: Session, bot: BotInstance) -> Dict[str, Any]:
|
||||||
|
bot_id = str(bot.id or "").strip()
|
||||||
|
if not bot_id:
|
||||||
|
raise HTTPException(status_code=400, detail="Bot id is required")
|
||||||
|
if not bool(getattr(bot, "enabled", True)):
|
||||||
|
raise HTTPException(status_code=403, detail="Bot is disabled. Enable it first.")
|
||||||
|
runtime_snapshot = self._read_runtime_snapshot(bot)
|
||||||
|
target = self._read_provider_target(bot_id)
|
||||||
|
client = self._client_for_target(target)
|
||||||
|
node_runtime_overrides = self._node_runtime_overrides(target.node_id, target.runtime_kind)
|
||||||
|
workspace_runtime = {
|
||||||
|
**dict(runtime_snapshot),
|
||||||
|
**provider_target_to_dict(target),
|
||||||
|
**node_runtime_overrides,
|
||||||
|
}
|
||||||
|
client.sync_bot_workspace(
|
||||||
|
bot_id=bot_id,
|
||||||
|
channels_override=self._read_bot_channels(bot),
|
||||||
|
global_delivery_override={
|
||||||
|
"sendProgress": bool(runtime_snapshot.get("send_progress")),
|
||||||
|
"sendToolHints": bool(runtime_snapshot.get("send_tool_hints")),
|
||||||
|
},
|
||||||
|
runtime_overrides=workspace_runtime,
|
||||||
|
)
|
||||||
|
result = await client.start_bot(
|
||||||
|
bot=bot,
|
||||||
|
start_payload={
|
||||||
|
"image_tag": bot.image_tag,
|
||||||
|
"runtime_kind": target.runtime_kind,
|
||||||
|
"env_vars": self._resolve_env_params(bot_id),
|
||||||
|
"cpu_cores": runtime_snapshot.get("cpu_cores"),
|
||||||
|
"memory_mb": runtime_snapshot.get("memory_mb"),
|
||||||
|
"storage_gb": runtime_snapshot.get("storage_gb"),
|
||||||
|
**node_runtime_overrides,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
bot.docker_status = "RUNNING"
|
||||||
|
session.add(bot)
|
||||||
|
session.commit()
|
||||||
|
return result
|
||||||
|
|
||||||
|
def stop_bot(self, *, session: Session, bot: BotInstance) -> Dict[str, Any]:
|
||||||
|
bot_id = str(bot.id or "").strip()
|
||||||
|
if not bot_id:
|
||||||
|
raise HTTPException(status_code=400, detail="Bot id is required")
|
||||||
|
if not bool(getattr(bot, "enabled", True)):
|
||||||
|
raise HTTPException(status_code=403, detail="Bot is disabled. Enable it first.")
|
||||||
|
result = self._client_for_bot(bot_id).stop_bot(bot=bot)
|
||||||
|
bot.docker_status = "STOPPED"
|
||||||
|
session.add(bot)
|
||||||
|
session.commit()
|
||||||
|
return result
|
||||||
|
|
||||||
|
def deliver_command(self, *, bot_id: str, command: str, media: Optional[List[str]] = None) -> Optional[str]:
|
||||||
|
return self._client_for_bot(bot_id).deliver_command(bot_id=bot_id, command=command, media=media)
|
||||||
|
|
||||||
|
def get_recent_logs(self, *, bot_id: str, tail: int = 300) -> List[str]:
|
||||||
|
return self._client_for_bot(bot_id).get_recent_logs(bot_id=bot_id, tail=tail)
|
||||||
|
|
||||||
|
def ensure_monitor(self, *, bot_id: str) -> bool:
|
||||||
|
return bool(self._client_for_bot(bot_id).ensure_monitor(bot_id=bot_id))
|
||||||
|
|
||||||
|
def get_monitor_packets(self, *, bot_id: str, after_seq: int = 0, limit: int = 200) -> List[Dict[str, Any]]:
|
||||||
|
return list(self._client_for_bot(bot_id).get_monitor_packets(bot_id=bot_id, after_seq=after_seq, limit=limit) or [])
|
||||||
|
|
||||||
|
def get_runtime_status(self, *, bot_id: str) -> str:
|
||||||
|
return str(self._client_for_bot(bot_id).get_runtime_status(bot_id=bot_id) or "STOPPED").upper()
|
||||||
|
|
||||||
|
def get_resource_snapshot(self, *, bot_id: str) -> Dict[str, Any]:
|
||||||
|
return dict(self._client_for_bot(bot_id).get_resource_snapshot(bot_id=bot_id) or {})
|
||||||
|
|
||||||
|
def _client_for_bot(self, bot_id: str) -> EdgeClient:
|
||||||
|
target = self._read_provider_target(bot_id)
|
||||||
|
return self._client_for_target(target)
|
||||||
|
|
||||||
|
def _client_for_target(self, target: ProviderTarget) -> EdgeClient:
|
||||||
|
if target.transport_kind != "edge":
|
||||||
|
raise HTTPException(status_code=400, detail=f"edge runtime provider requires edge transport, got {target.transport_kind}")
|
||||||
|
return self._resolve_edge_client(target)
|
||||||
|
|
||||||
|
def _node_runtime_overrides(self, node_id: str, runtime_kind: str) -> Dict[str, str]:
|
||||||
|
metadata = dict(self._read_node_metadata(str(node_id or "").strip().lower()) or {})
|
||||||
|
payload: Dict[str, str] = {}
|
||||||
|
workspace_root = str(metadata.get("workspace_root") or "").strip()
|
||||||
|
if workspace_root:
|
||||||
|
payload["workspace_root"] = workspace_root
|
||||||
|
if str(runtime_kind or "").strip().lower() != "native":
|
||||||
|
return payload
|
||||||
|
native_sandbox_mode = self._normalize_native_sandbox_mode(metadata.get("native_sandbox_mode"))
|
||||||
|
if native_sandbox_mode != "inherit":
|
||||||
|
payload["native_sandbox_mode"] = native_sandbox_mode
|
||||||
|
native_command = str(metadata.get("native_command") or "").strip()
|
||||||
|
native_workdir = str(metadata.get("native_workdir") or "").strip()
|
||||||
|
if native_command:
|
||||||
|
payload["native_command"] = native_command
|
||||||
|
if native_workdir:
|
||||||
|
payload["native_workdir"] = native_workdir
|
||||||
|
return payload
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _normalize_native_sandbox_mode(raw_value: Any) -> str:
|
||||||
|
text = str(raw_value or "").strip().lower()
|
||||||
|
if text in {"workspace", "sandbox", "strict"}:
|
||||||
|
return "workspace"
|
||||||
|
if text in {"full_access", "full-access", "danger-full-access", "escape"}:
|
||||||
|
return "full_access"
|
||||||
|
return "inherit"
|
||||||
|
|
@ -0,0 +1,117 @@
|
||||||
|
import asyncio
|
||||||
|
from typing import Any, Awaitable, Callable, Dict, List, Optional
|
||||||
|
|
||||||
|
from fastapi import HTTPException
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from models.bot import BotInstance
|
||||||
|
from providers.provision.base import ProvisionProvider
|
||||||
|
from providers.runtime.base import RuntimeProvider
|
||||||
|
from services.platform_activity_service import record_activity_event
|
||||||
|
|
||||||
|
|
||||||
|
class LocalRuntimeProvider(RuntimeProvider):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
docker_manager: Any,
|
||||||
|
on_state_change: Callable[[str, dict], None],
|
||||||
|
provision_provider: ProvisionProvider,
|
||||||
|
read_runtime_snapshot: Callable[[BotInstance], Dict[str, Any]],
|
||||||
|
resolve_env_params: Callable[[str], Dict[str, str]],
|
||||||
|
write_env_store: Callable[[str, Dict[str, str]], None],
|
||||||
|
invalidate_bot_cache: Callable[[str], None],
|
||||||
|
record_agent_loop_ready_warning: Callable[[str], Awaitable[None]],
|
||||||
|
safe_float: Callable[[Any, float], float],
|
||||||
|
safe_int: Callable[[Any, int], int],
|
||||||
|
) -> None:
|
||||||
|
self._docker_manager = docker_manager
|
||||||
|
self._on_state_change = on_state_change
|
||||||
|
self._provision_provider = provision_provider
|
||||||
|
self._read_runtime_snapshot = read_runtime_snapshot
|
||||||
|
self._resolve_env_params = resolve_env_params
|
||||||
|
self._write_env_store = write_env_store
|
||||||
|
self._invalidate_bot_cache = invalidate_bot_cache
|
||||||
|
self._record_agent_loop_ready_warning = record_agent_loop_ready_warning
|
||||||
|
self._safe_float = safe_float
|
||||||
|
self._safe_int = safe_int
|
||||||
|
|
||||||
|
async def start_bot(self, *, session: Session, bot: BotInstance) -> Dict[str, Any]:
|
||||||
|
bot_id = str(bot.id or "").strip()
|
||||||
|
if not bot_id:
|
||||||
|
raise HTTPException(status_code=400, detail="Bot id is required")
|
||||||
|
if not bool(getattr(bot, "enabled", True)):
|
||||||
|
raise HTTPException(status_code=403, detail="Bot is disabled. Enable it first.")
|
||||||
|
|
||||||
|
self._provision_provider.sync_bot_workspace(session=session, bot_id=bot_id)
|
||||||
|
runtime_snapshot = self._read_runtime_snapshot(bot)
|
||||||
|
env_params = self._resolve_env_params(bot_id)
|
||||||
|
self._write_env_store(bot_id, env_params)
|
||||||
|
success = self._docker_manager.start_bot(
|
||||||
|
bot_id,
|
||||||
|
image_tag=bot.image_tag,
|
||||||
|
on_state_change=self._on_state_change,
|
||||||
|
env_vars=env_params,
|
||||||
|
cpu_cores=self._safe_float(runtime_snapshot.get("cpu_cores"), 1.0),
|
||||||
|
memory_mb=self._safe_int(runtime_snapshot.get("memory_mb"), 1024),
|
||||||
|
storage_gb=self._safe_int(runtime_snapshot.get("storage_gb"), 10),
|
||||||
|
)
|
||||||
|
if not success:
|
||||||
|
bot.docker_status = "STOPPED"
|
||||||
|
session.add(bot)
|
||||||
|
session.commit()
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to start container with image {bot.image_tag}")
|
||||||
|
|
||||||
|
actual_status = self._docker_manager.get_bot_status(bot_id)
|
||||||
|
bot.docker_status = actual_status
|
||||||
|
if actual_status != "RUNNING":
|
||||||
|
session.add(bot)
|
||||||
|
session.commit()
|
||||||
|
self._invalidate_bot_cache(bot_id)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500,
|
||||||
|
detail="Bot container failed shortly after startup. Check bot logs/config.",
|
||||||
|
)
|
||||||
|
|
||||||
|
asyncio.create_task(self._record_agent_loop_ready_warning(bot_id))
|
||||||
|
session.add(bot)
|
||||||
|
record_activity_event(session, bot_id, "bot_started", channel="system", detail=f"Container started for {bot_id}")
|
||||||
|
session.commit()
|
||||||
|
self._invalidate_bot_cache(bot_id)
|
||||||
|
return {"status": "started"}
|
||||||
|
|
||||||
|
def stop_bot(self, *, session: Session, bot: BotInstance) -> Dict[str, Any]:
|
||||||
|
bot_id = str(bot.id or "").strip()
|
||||||
|
if not bot_id:
|
||||||
|
raise HTTPException(status_code=400, detail="Bot id is required")
|
||||||
|
if not bool(getattr(bot, "enabled", True)):
|
||||||
|
raise HTTPException(status_code=403, detail="Bot is disabled. Enable it first.")
|
||||||
|
|
||||||
|
self._docker_manager.stop_bot(bot_id)
|
||||||
|
bot.docker_status = "STOPPED"
|
||||||
|
session.add(bot)
|
||||||
|
record_activity_event(session, bot_id, "bot_stopped", channel="system", detail=f"Container stopped for {bot_id}")
|
||||||
|
session.commit()
|
||||||
|
self._invalidate_bot_cache(bot_id)
|
||||||
|
return {"status": "stopped"}
|
||||||
|
|
||||||
|
def deliver_command(self, *, bot_id: str, command: str, media: Optional[List[str]] = None) -> Optional[str]:
|
||||||
|
success = self._docker_manager.send_command(bot_id, command, media=media)
|
||||||
|
if success:
|
||||||
|
return None
|
||||||
|
return self._docker_manager.get_last_delivery_error(bot_id) or "command delivery failed"
|
||||||
|
|
||||||
|
def get_recent_logs(self, *, bot_id: str, tail: int = 300) -> List[str]:
|
||||||
|
return list(self._docker_manager.get_recent_logs(bot_id, tail=tail) or [])
|
||||||
|
|
||||||
|
def ensure_monitor(self, *, bot_id: str) -> bool:
|
||||||
|
return bool(self._docker_manager.ensure_monitor(bot_id, self._on_state_change))
|
||||||
|
|
||||||
|
def get_monitor_packets(self, *, bot_id: str, after_seq: int = 0, limit: int = 200) -> List[Dict[str, Any]]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_runtime_status(self, *, bot_id: str) -> str:
|
||||||
|
return str(self._docker_manager.get_bot_status(bot_id) or "STOPPED").upper()
|
||||||
|
|
||||||
|
def get_resource_snapshot(self, *, bot_id: str) -> Dict[str, Any]:
|
||||||
|
return dict(self._docker_manager.get_bot_resource_snapshot(bot_id) or {})
|
||||||
|
|
@ -0,0 +1,59 @@
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from models.bot import BotInstance
|
||||||
|
from providers.registry import ProviderRegistry
|
||||||
|
from providers.provision.base import ProvisionProvider
|
||||||
|
from providers.runtime.base import RuntimeProvider
|
||||||
|
from providers.target import resolve_provider_target
|
||||||
|
from providers.workspace.base import WorkspaceProvider
|
||||||
|
|
||||||
|
|
||||||
|
def _require_provider(value: Any, label: str):
|
||||||
|
if value is None:
|
||||||
|
raise RuntimeError(f"{label} is not configured")
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def _get_registry(app_state: Any) -> ProviderRegistry | None:
|
||||||
|
registry = getattr(app_state, "provider_registry", None)
|
||||||
|
if registry is None:
|
||||||
|
return None
|
||||||
|
if not isinstance(registry, ProviderRegistry):
|
||||||
|
raise RuntimeError("provider registry is misconfigured")
|
||||||
|
return registry
|
||||||
|
|
||||||
|
|
||||||
|
def get_runtime_provider(app_state: Any, bot: BotInstance) -> RuntimeProvider:
|
||||||
|
registry = _get_registry(app_state)
|
||||||
|
if registry is not None:
|
||||||
|
target = resolve_provider_target(app_state, bot)
|
||||||
|
bundle_key = registry.resolve_bundle_key(target)
|
||||||
|
provider = registry.runtime.get(bundle_key) if bundle_key else None
|
||||||
|
if provider is not None:
|
||||||
|
return provider
|
||||||
|
raise RuntimeError(f"runtime provider is not configured for target {target.key}")
|
||||||
|
return _require_provider(getattr(app_state, "runtime_provider", None), "runtime provider")
|
||||||
|
|
||||||
|
|
||||||
|
def get_workspace_provider(app_state: Any, bot: BotInstance) -> WorkspaceProvider:
|
||||||
|
registry = _get_registry(app_state)
|
||||||
|
if registry is not None:
|
||||||
|
target = resolve_provider_target(app_state, bot)
|
||||||
|
bundle_key = registry.resolve_bundle_key(target)
|
||||||
|
provider = registry.workspace.get(bundle_key) if bundle_key else None
|
||||||
|
if provider is not None:
|
||||||
|
return provider
|
||||||
|
raise RuntimeError(f"workspace provider is not configured for target {target.key}")
|
||||||
|
return _require_provider(getattr(app_state, "workspace_provider", None), "workspace provider")
|
||||||
|
|
||||||
|
|
||||||
|
def get_provision_provider(app_state: Any, bot: BotInstance) -> ProvisionProvider:
|
||||||
|
registry = _get_registry(app_state)
|
||||||
|
if registry is not None:
|
||||||
|
target = resolve_provider_target(app_state, bot)
|
||||||
|
bundle_key = registry.resolve_bundle_key(target)
|
||||||
|
provider = registry.provision.get(bundle_key) if bundle_key else None
|
||||||
|
if provider is not None:
|
||||||
|
return provider
|
||||||
|
raise RuntimeError(f"provision provider is not configured for target {target.key}")
|
||||||
|
return _require_provider(getattr(app_state, "provision_provider", None), "provision provider")
|
||||||
|
|
@ -0,0 +1,118 @@
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from models.bot import BotInstance
|
||||||
|
|
||||||
|
DEFAULT_NODE_ID = "local"
|
||||||
|
DEFAULT_TRANSPORT_KIND = "direct"
|
||||||
|
DEFAULT_RUNTIME_KIND = "docker"
|
||||||
|
DEFAULT_CORE_ADAPTER = "nanobot"
|
||||||
|
TARGET_CONFIG_KEY = "dashboardRuntime"
|
||||||
|
SUPPORTED_TRANSPORT_KINDS = {"direct", "edge"}
|
||||||
|
SUPPORTED_RUNTIME_KINDS = {"docker", "native"}
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_target_part(value: Any, fallback: str) -> str:
|
||||||
|
text = str(value or "").strip().lower()
|
||||||
|
return text or fallback
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class ProviderTarget:
|
||||||
|
node_id: str = DEFAULT_NODE_ID
|
||||||
|
transport_kind: str = DEFAULT_TRANSPORT_KIND
|
||||||
|
runtime_kind: str = DEFAULT_RUNTIME_KIND
|
||||||
|
core_adapter: str = DEFAULT_CORE_ADAPTER
|
||||||
|
|
||||||
|
@property
|
||||||
|
def key(self) -> str:
|
||||||
|
return ":".join([self.node_id, self.transport_kind, self.runtime_kind, self.core_adapter])
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_provider_target(value: Any, fallback: ProviderTarget | None = None) -> ProviderTarget:
|
||||||
|
base = fallback or ProviderTarget()
|
||||||
|
if isinstance(value, ProviderTarget):
|
||||||
|
raw_node_id = value.node_id
|
||||||
|
raw_transport_kind = value.transport_kind
|
||||||
|
raw_runtime_kind = value.runtime_kind
|
||||||
|
raw_core_adapter = value.core_adapter
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
raw_node_id = value.get("node_id", value.get("nodeId"))
|
||||||
|
raw_transport_kind = value.get("transport_kind", value.get("transportKind"))
|
||||||
|
raw_runtime_kind = value.get("runtime_kind", value.get("runtimeKind"))
|
||||||
|
raw_core_adapter = value.get("core_adapter", value.get("coreAdapter"))
|
||||||
|
else:
|
||||||
|
raw_node_id = None
|
||||||
|
raw_transport_kind = None
|
||||||
|
raw_runtime_kind = None
|
||||||
|
raw_core_adapter = None
|
||||||
|
|
||||||
|
transport_kind = _normalize_target_part(raw_transport_kind, base.transport_kind)
|
||||||
|
if transport_kind not in SUPPORTED_TRANSPORT_KINDS:
|
||||||
|
transport_kind = base.transport_kind
|
||||||
|
|
||||||
|
runtime_kind = _normalize_target_part(raw_runtime_kind, base.runtime_kind)
|
||||||
|
if runtime_kind not in SUPPORTED_RUNTIME_KINDS:
|
||||||
|
runtime_kind = base.runtime_kind
|
||||||
|
|
||||||
|
return ProviderTarget(
|
||||||
|
node_id=_normalize_target_part(raw_node_id, base.node_id),
|
||||||
|
transport_kind=transport_kind,
|
||||||
|
runtime_kind=runtime_kind,
|
||||||
|
core_adapter=_normalize_target_part(raw_core_adapter, base.core_adapter),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def provider_target_to_dict(target: ProviderTarget) -> dict[str, str]:
|
||||||
|
return {
|
||||||
|
"node_id": target.node_id,
|
||||||
|
"transport_kind": target.transport_kind,
|
||||||
|
"runtime_kind": target.runtime_kind,
|
||||||
|
"core_adapter": target.core_adapter,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def provider_target_from_config(config_data: Any, fallback: ProviderTarget | None = None) -> ProviderTarget:
|
||||||
|
if not isinstance(config_data, dict):
|
||||||
|
return normalize_provider_target(None, fallback=fallback)
|
||||||
|
return normalize_provider_target(config_data.get(TARGET_CONFIG_KEY), fallback=fallback)
|
||||||
|
|
||||||
|
|
||||||
|
def write_provider_target_config(config_data: dict[str, Any], target: ProviderTarget) -> dict[str, Any]:
|
||||||
|
config_data[TARGET_CONFIG_KEY] = {
|
||||||
|
"nodeId": target.node_id,
|
||||||
|
"transportKind": target.transport_kind,
|
||||||
|
"runtimeKind": target.runtime_kind,
|
||||||
|
"coreAdapter": target.core_adapter,
|
||||||
|
}
|
||||||
|
return config_data
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_provider_target(app_state: Any, bot: BotInstance) -> ProviderTarget:
|
||||||
|
fallback = ProviderTarget(
|
||||||
|
node_id=_normalize_target_part(getattr(app_state, "provider_default_node_id", None), DEFAULT_NODE_ID),
|
||||||
|
transport_kind=_normalize_target_part(
|
||||||
|
getattr(app_state, "provider_default_transport_kind", None),
|
||||||
|
DEFAULT_TRANSPORT_KIND,
|
||||||
|
),
|
||||||
|
runtime_kind=_normalize_target_part(
|
||||||
|
getattr(app_state, "provider_default_runtime_kind", None),
|
||||||
|
DEFAULT_RUNTIME_KIND,
|
||||||
|
),
|
||||||
|
core_adapter=_normalize_target_part(
|
||||||
|
getattr(app_state, "provider_default_core_adapter", None),
|
||||||
|
DEFAULT_CORE_ADAPTER,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
resolver = getattr(app_state, "resolve_bot_provider_target", None)
|
||||||
|
if callable(resolver):
|
||||||
|
return normalize_provider_target(resolver(bot), fallback=fallback)
|
||||||
|
return normalize_provider_target(
|
||||||
|
{
|
||||||
|
"node_id": getattr(bot, "node_id", None),
|
||||||
|
"transport_kind": getattr(bot, "transport_kind", None),
|
||||||
|
"runtime_kind": getattr(bot, "runtime_kind", None),
|
||||||
|
"core_adapter": getattr(bot, "core_adapter", None),
|
||||||
|
},
|
||||||
|
fallback=fallback,
|
||||||
|
)
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
fastapi==0.110.0
|
fastapi==0.110.0
|
||||||
uvicorn[standard]==0.27.1
|
uvicorn==0.27.1
|
||||||
docker==7.0.0
|
docker==7.0.0
|
||||||
sqlmodel==0.0.16
|
sqlmodel==0.0.16
|
||||||
pydantic==2.6.3
|
pydantic==2.6.3
|
||||||
|
|
@ -15,5 +15,7 @@ watchfiles==0.21.0
|
||||||
urllib3==1.26.18
|
urllib3==1.26.18
|
||||||
requests==2.31.0
|
requests==2.31.0
|
||||||
redis==5.0.8
|
redis==5.0.8
|
||||||
|
bcrypt==4.2.1
|
||||||
|
PyJWT==2.10.1
|
||||||
opencc-purepy==1.1.0
|
opencc-purepy==1.1.0
|
||||||
pywhispercpp==1.3.1
|
pywhispercpp==1.3.1
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
from typing import Optional, Dict, Any, List
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
from pydantic import BaseModel, ConfigDict
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
class ChannelConfigRequest(BaseModel):
|
class ChannelConfigRequest(BaseModel):
|
||||||
|
|
@ -22,47 +22,47 @@ class ChannelConfigUpdateRequest(BaseModel):
|
||||||
|
|
||||||
|
|
||||||
class BotCreateRequest(BaseModel):
|
class BotCreateRequest(BaseModel):
|
||||||
model_config = ConfigDict(extra="forbid")
|
|
||||||
|
|
||||||
id: str
|
id: str
|
||||||
name: str
|
name: str
|
||||||
enabled: Optional[bool] = True
|
enabled: Optional[bool] = True
|
||||||
image_tag: str
|
|
||||||
access_password: Optional[str] = None
|
|
||||||
llm_provider: str
|
llm_provider: str
|
||||||
llm_model: str
|
llm_model: str
|
||||||
api_key: str
|
api_key: str
|
||||||
api_base: str
|
image_tag: Optional[str] = None
|
||||||
|
system_prompt: Optional[str] = None
|
||||||
|
api_base: Optional[str] = None
|
||||||
temperature: float = 0.2
|
temperature: float = 0.2
|
||||||
top_p: float = 1.0
|
top_p: float = 1.0
|
||||||
max_tokens: int = 8192
|
max_tokens: int = 8192
|
||||||
cpu_cores: float = 1.0
|
cpu_cores: float = 1.0
|
||||||
memory_mb: int = 1024
|
memory_mb: int = 1024
|
||||||
storage_gb: int = 10
|
storage_gb: int = 10
|
||||||
system_timezone: str
|
system_timezone: Optional[str] = None
|
||||||
soul_md: str
|
soul_md: Optional[str] = None
|
||||||
agents_md: str
|
agents_md: Optional[str] = None
|
||||||
user_md: str
|
user_md: Optional[str] = None
|
||||||
tools_md: str
|
tools_md: Optional[str] = None
|
||||||
tools_config: Optional[Dict[str, Any]] = None
|
tools_config: Optional[Dict[str, Any]] = None
|
||||||
env_params: Optional[Dict[str, str]] = None
|
env_params: Optional[Dict[str, str]] = None
|
||||||
identity_md: str
|
identity_md: Optional[str] = None
|
||||||
channels: Optional[List[ChannelConfigRequest]] = None
|
channels: Optional[List[ChannelConfigRequest]] = None
|
||||||
send_progress: Optional[bool] = None
|
send_progress: Optional[bool] = None
|
||||||
send_tool_hints: Optional[bool] = None
|
send_tool_hints: Optional[bool] = None
|
||||||
|
node_id: Optional[str] = None
|
||||||
|
transport_kind: Optional[str] = None
|
||||||
|
runtime_kind: Optional[str] = None
|
||||||
|
core_adapter: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class BotUpdateRequest(BaseModel):
|
class BotUpdateRequest(BaseModel):
|
||||||
model_config = ConfigDict(extra="forbid")
|
|
||||||
|
|
||||||
name: Optional[str] = None
|
name: Optional[str] = None
|
||||||
enabled: Optional[bool] = None
|
enabled: Optional[bool] = None
|
||||||
image_tag: Optional[str] = None
|
|
||||||
access_password: Optional[str] = None
|
|
||||||
llm_provider: Optional[str] = None
|
llm_provider: Optional[str] = None
|
||||||
llm_model: Optional[str] = None
|
llm_model: Optional[str] = None
|
||||||
api_key: Optional[str] = None
|
api_key: Optional[str] = None
|
||||||
api_base: Optional[str] = None
|
api_base: Optional[str] = None
|
||||||
|
image_tag: Optional[str] = None
|
||||||
|
system_prompt: Optional[str] = None
|
||||||
temperature: Optional[float] = None
|
temperature: Optional[float] = None
|
||||||
top_p: Optional[float] = None
|
top_p: Optional[float] = None
|
||||||
max_tokens: Optional[int] = None
|
max_tokens: Optional[int] = None
|
||||||
|
|
@ -70,8 +70,8 @@ class BotUpdateRequest(BaseModel):
|
||||||
memory_mb: Optional[int] = None
|
memory_mb: Optional[int] = None
|
||||||
storage_gb: Optional[int] = None
|
storage_gb: Optional[int] = None
|
||||||
system_timezone: Optional[str] = None
|
system_timezone: Optional[str] = None
|
||||||
agents_md: Optional[str] = None
|
|
||||||
soul_md: Optional[str] = None
|
soul_md: Optional[str] = None
|
||||||
|
agents_md: Optional[str] = None
|
||||||
user_md: Optional[str] = None
|
user_md: Optional[str] = None
|
||||||
tools_md: Optional[str] = None
|
tools_md: Optional[str] = None
|
||||||
tools_config: Optional[Dict[str, Any]] = None
|
tools_config: Optional[Dict[str, Any]] = None
|
||||||
|
|
@ -79,6 +79,17 @@ class BotUpdateRequest(BaseModel):
|
||||||
identity_md: Optional[str] = None
|
identity_md: Optional[str] = None
|
||||||
send_progress: Optional[bool] = None
|
send_progress: Optional[bool] = None
|
||||||
send_tool_hints: Optional[bool] = None
|
send_tool_hints: Optional[bool] = None
|
||||||
|
node_id: Optional[str] = None
|
||||||
|
transport_kind: Optional[str] = None
|
||||||
|
runtime_kind: Optional[str] = None
|
||||||
|
core_adapter: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class BotDeployRequest(BaseModel):
|
||||||
|
node_id: str
|
||||||
|
runtime_kind: Optional[str] = None
|
||||||
|
image_tag: Optional[str] = None
|
||||||
|
auto_start: bool = False
|
||||||
|
|
||||||
|
|
||||||
class BotToolsConfigUpdateRequest(BaseModel):
|
class BotToolsConfigUpdateRequest(BaseModel):
|
||||||
|
|
@ -93,10 +104,6 @@ class BotEnvParamsUpdateRequest(BaseModel):
|
||||||
env_params: Optional[Dict[str, str]] = None
|
env_params: Optional[Dict[str, str]] = None
|
||||||
|
|
||||||
|
|
||||||
class BotPageAuthLoginRequest(BaseModel):
|
|
||||||
password: str
|
|
||||||
|
|
||||||
|
|
||||||
class CommandRequest(BaseModel):
|
class CommandRequest(BaseModel):
|
||||||
command: Optional[str] = None
|
command: Optional[str] = None
|
||||||
attachments: Optional[List[str]] = None
|
attachments: Optional[List[str]] = None
|
||||||
|
|
@ -104,3 +111,12 @@ class CommandRequest(BaseModel):
|
||||||
|
|
||||||
class MessageFeedbackRequest(BaseModel):
|
class MessageFeedbackRequest(BaseModel):
|
||||||
feedback: Optional[str] = None
|
feedback: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class WorkspaceFileUpdateRequest(BaseModel):
|
||||||
|
content: str
|
||||||
|
|
||||||
|
|
||||||
|
class SystemTemplatesUpdateRequest(BaseModel):
|
||||||
|
agent_md_templates: Optional[Dict[str, str]] = None
|
||||||
|
topic_presets: Optional[Dict[str, Any]] = None
|
||||||
|
|
@ -0,0 +1,128 @@
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
NODE_PROTOCOL_VERSION = "1"
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeNodeIdentityBase(BaseModel):
|
||||||
|
protocol_version: str = NODE_PROTOCOL_VERSION
|
||||||
|
node_id: str
|
||||||
|
display_name: str
|
||||||
|
service: str = "dashboard-edge"
|
||||||
|
transport_kind: str = "edge"
|
||||||
|
runtime_kind: str = "docker"
|
||||||
|
core_adapter: str = "nanobot"
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeStatusResponse(BaseModel):
|
||||||
|
status: str
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeStateWriteRequest(BaseModel):
|
||||||
|
data: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
workspace_root: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeStateResponse(BaseModel):
|
||||||
|
bot_id: str
|
||||||
|
state_key: str
|
||||||
|
data: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeNativePreflightRequest(BaseModel):
|
||||||
|
native_command: Optional[str] = None
|
||||||
|
native_workdir: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeNativePreflightResponse(BaseModel):
|
||||||
|
ok: bool = False
|
||||||
|
command: List[str] = Field(default_factory=list)
|
||||||
|
workdir: str = ""
|
||||||
|
command_available: bool = False
|
||||||
|
workdir_exists: bool = False
|
||||||
|
detail: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeStartBotRequest(BaseModel):
|
||||||
|
image_tag: str
|
||||||
|
runtime_kind: str = "docker"
|
||||||
|
env_vars: Dict[str, str] = Field(default_factory=dict)
|
||||||
|
workspace_root: Optional[str] = None
|
||||||
|
native_command: Optional[str] = None
|
||||||
|
native_workdir: Optional[str] = None
|
||||||
|
cpu_cores: float = 1.0
|
||||||
|
memory_mb: int = 1024
|
||||||
|
storage_gb: int = 10
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeCommandRequest(BaseModel):
|
||||||
|
command: str
|
||||||
|
media: List[str] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeLogsResponse(BaseModel):
|
||||||
|
bot_id: str
|
||||||
|
logs: List[str] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeMonitorEnsureResponse(BaseModel):
|
||||||
|
ensured: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeMonitorPacket(BaseModel):
|
||||||
|
protocol_version: str = NODE_PROTOCOL_VERSION
|
||||||
|
node_id: str = ""
|
||||||
|
bot_id: str = ""
|
||||||
|
seq: int = 0
|
||||||
|
captured_at: str = ""
|
||||||
|
packet: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeMonitorPacketsResponse(BaseModel):
|
||||||
|
protocol_version: str = NODE_PROTOCOL_VERSION
|
||||||
|
node_id: str = ""
|
||||||
|
bot_id: str
|
||||||
|
latest_seq: int = 0
|
||||||
|
packets: List[EdgeMonitorPacket] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeWorkspaceSyncRequest(BaseModel):
|
||||||
|
channels_override: Optional[List[Dict[str, Any]]] = None
|
||||||
|
global_delivery_override: Optional[Dict[str, Any]] = None
|
||||||
|
runtime_overrides: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeMarkdownWriteRequest(BaseModel):
|
||||||
|
content: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeNodeResourcesResponse(BaseModel):
|
||||||
|
protocol_version: str = NODE_PROTOCOL_VERSION
|
||||||
|
node_id: str
|
||||||
|
display_name: str = ""
|
||||||
|
service: str = "dashboard-edge"
|
||||||
|
transport_kind: str = "edge"
|
||||||
|
runtime_kind: str = "docker"
|
||||||
|
core_adapter: str = "nanobot"
|
||||||
|
resources: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
reported_at: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeNodeSelfResponse(BaseModel):
|
||||||
|
protocol_version: str = NODE_PROTOCOL_VERSION
|
||||||
|
node_id: str
|
||||||
|
display_name: str
|
||||||
|
service: str = "dashboard-edge"
|
||||||
|
transport_kind: str = "edge"
|
||||||
|
runtime_kind: str = "docker"
|
||||||
|
core_adapter: str = "nanobot"
|
||||||
|
capabilities: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
resources: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
reported_at: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeNodeHeartbeatResponse(EdgeNodeIdentityBase):
|
||||||
|
capabilities: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
resources: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
reported_at: str = ""
|
||||||
|
|
@ -3,15 +3,29 @@ from typing import Any, Dict, List, Optional
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
class LoadingPageSettings(BaseModel):
|
||||||
|
title: str = "Dashboard Nanobot"
|
||||||
|
subtitle: str = "平台正在准备管理面板"
|
||||||
|
description: str = "请稍候,正在加载 Bot 平台数据。"
|
||||||
|
|
||||||
|
|
||||||
class PlatformSettingsPayload(BaseModel):
|
class PlatformSettingsPayload(BaseModel):
|
||||||
page_size: int = Field(default=10, ge=1, le=100)
|
page_size: int = Field(default=10, ge=1, le=100)
|
||||||
chat_pull_page_size: int = Field(default=60, ge=10, le=500)
|
chat_pull_page_size: int = Field(default=60, ge=10, le=500)
|
||||||
auth_token_ttl_hours: int = Field(default=24, ge=1, le=720)
|
command_auto_unlock_seconds: int = Field(default=10, ge=1, le=600)
|
||||||
auth_token_max_active: int = Field(default=2, ge=1, le=20)
|
|
||||||
upload_max_mb: int = Field(default=100, ge=1, le=2048)
|
upload_max_mb: int = Field(default=100, ge=1, le=2048)
|
||||||
allowed_attachment_extensions: List[str] = Field(default_factory=list)
|
allowed_attachment_extensions: List[str] = Field(default_factory=list)
|
||||||
workspace_download_extensions: List[str] = Field(default_factory=list)
|
workspace_download_extensions: List[str] = Field(default_factory=list)
|
||||||
speech_enabled: bool = True
|
speech_enabled: bool = True
|
||||||
|
speech_max_audio_seconds: int = Field(default=20, ge=5, le=600)
|
||||||
|
speech_default_language: str = Field(default="zh", min_length=1, max_length=16)
|
||||||
|
speech_force_simplified: bool = True
|
||||||
|
speech_audio_preprocess: bool = True
|
||||||
|
speech_audio_filter: str = Field(default="highpass=f=120,lowpass=f=7600,afftdn=nf=-20")
|
||||||
|
speech_initial_prompt: str = Field(
|
||||||
|
default="以下内容可能包含简体中文和英文术语。请优先输出简体中文,英文单词、缩写、品牌名和数字保持原文,不要翻译。"
|
||||||
|
)
|
||||||
|
loading_page: LoadingPageSettings = Field(default_factory=LoadingPageSettings)
|
||||||
|
|
||||||
|
|
||||||
class PlatformUsageItem(BaseModel):
|
class PlatformUsageItem(BaseModel):
|
||||||
|
|
@ -41,19 +55,6 @@ class PlatformUsageSummary(BaseModel):
|
||||||
total_tokens: int
|
total_tokens: int
|
||||||
|
|
||||||
|
|
||||||
class PlatformUsageAnalyticsSeries(BaseModel):
|
|
||||||
model: str
|
|
||||||
total_requests: int
|
|
||||||
daily_counts: List[int]
|
|
||||||
|
|
||||||
|
|
||||||
class PlatformUsageAnalytics(BaseModel):
|
|
||||||
window_days: int
|
|
||||||
days: List[str]
|
|
||||||
total_requests: int
|
|
||||||
series: List[PlatformUsageAnalyticsSeries]
|
|
||||||
|
|
||||||
|
|
||||||
class PlatformUsageResponse(BaseModel):
|
class PlatformUsageResponse(BaseModel):
|
||||||
summary: PlatformUsageSummary
|
summary: PlatformUsageSummary
|
||||||
items: List[PlatformUsageItem]
|
items: List[PlatformUsageItem]
|
||||||
|
|
@ -61,32 +62,6 @@ class PlatformUsageResponse(BaseModel):
|
||||||
limit: int
|
limit: int
|
||||||
offset: int
|
offset: int
|
||||||
has_more: bool
|
has_more: bool
|
||||||
analytics: PlatformUsageAnalytics
|
|
||||||
|
|
||||||
|
|
||||||
class PlatformLoginLogItem(BaseModel):
|
|
||||||
id: int
|
|
||||||
auth_type: str
|
|
||||||
subject_id: str
|
|
||||||
bot_id: Optional[str] = None
|
|
||||||
auth_source: str
|
|
||||||
client_ip: Optional[str] = None
|
|
||||||
user_agent: Optional[str] = None
|
|
||||||
device_info: Optional[str] = None
|
|
||||||
created_at: str
|
|
||||||
last_seen_at: Optional[str] = None
|
|
||||||
expires_at: Optional[str] = None
|
|
||||||
revoked_at: Optional[str] = None
|
|
||||||
revoke_reason: Optional[str] = None
|
|
||||||
status: str
|
|
||||||
|
|
||||||
|
|
||||||
class PlatformLoginLogResponse(BaseModel):
|
|
||||||
items: List[PlatformLoginLogItem]
|
|
||||||
total: int
|
|
||||||
limit: int
|
|
||||||
offset: int
|
|
||||||
has_more: bool
|
|
||||||
|
|
||||||
|
|
||||||
class PlatformActivityItem(BaseModel):
|
class PlatformActivityItem(BaseModel):
|
||||||
|
|
@ -100,6 +75,36 @@ class PlatformActivityItem(BaseModel):
|
||||||
created_at: str
|
created_at: str
|
||||||
|
|
||||||
|
|
||||||
|
class PlatformActivityListResponse(BaseModel):
|
||||||
|
items: List[PlatformActivityItem] = Field(default_factory=list)
|
||||||
|
total: int = 0
|
||||||
|
limit: int = 20
|
||||||
|
offset: int = 0
|
||||||
|
has_more: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class PlatformDashboardUsagePoint(BaseModel):
|
||||||
|
bucket_at: str
|
||||||
|
label: str
|
||||||
|
call_count: int = 0
|
||||||
|
|
||||||
|
|
||||||
|
class PlatformDashboardUsageSeries(BaseModel):
|
||||||
|
model: str
|
||||||
|
total_calls: int = 0
|
||||||
|
points: List[PlatformDashboardUsagePoint] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class PlatformDashboardAnalyticsResponse(BaseModel):
|
||||||
|
total_request_count: int = 0
|
||||||
|
total_model_count: int = 0
|
||||||
|
granularity: str = "day"
|
||||||
|
since_days: int = 7
|
||||||
|
events_page_size: int = 20
|
||||||
|
series: List[PlatformDashboardUsageSeries] = Field(default_factory=list)
|
||||||
|
recent_events: List[PlatformActivityItem] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
class SystemSettingPayload(BaseModel):
|
class SystemSettingPayload(BaseModel):
|
||||||
key: str
|
key: str
|
||||||
name: str = ""
|
name: str = ""
|
||||||
|
|
@ -122,3 +127,39 @@ class SystemSettingItem(BaseModel):
|
||||||
sort_order: int = 100
|
sort_order: int = 100
|
||||||
created_at: str
|
created_at: str
|
||||||
updated_at: str
|
updated_at: str
|
||||||
|
|
||||||
|
|
||||||
|
class ManagedNodePayload(BaseModel):
|
||||||
|
node_id: str
|
||||||
|
display_name: str = ""
|
||||||
|
base_url: str = ""
|
||||||
|
enabled: bool = True
|
||||||
|
auth_token: str = ""
|
||||||
|
transport_kind: str = "edge"
|
||||||
|
runtime_kind: str = "docker"
|
||||||
|
core_adapter: str = "nanobot"
|
||||||
|
workspace_root: str = ""
|
||||||
|
native_command: str = ""
|
||||||
|
native_workdir: str = ""
|
||||||
|
native_sandbox_mode: str = "inherit"
|
||||||
|
|
||||||
|
|
||||||
|
class ManagedNodeConnectivityResult(BaseModel):
|
||||||
|
ok: bool
|
||||||
|
status: str
|
||||||
|
latency_ms: int = 0
|
||||||
|
detail: str = ""
|
||||||
|
node_self: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ManagedNodeNativePreflightResult(BaseModel):
|
||||||
|
ok: bool
|
||||||
|
status: str
|
||||||
|
latency_ms: int = 0
|
||||||
|
detail: str = ""
|
||||||
|
command: List[str] = Field(default_factory=list)
|
||||||
|
workdir: str = ""
|
||||||
|
command_available: bool = False
|
||||||
|
workdir_exists: bool = False
|
||||||
|
runtime_native_supported: bool = False
|
||||||
|
node_self: Optional[Dict[str, Any]] = None
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,153 @@
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
class SysAuthLoginRequest(BaseModel):
|
||||||
|
username: str
|
||||||
|
password: str
|
||||||
|
|
||||||
|
|
||||||
|
class SysAuthMenuItem(BaseModel):
|
||||||
|
menu_key: str
|
||||||
|
parent_key: str = ""
|
||||||
|
title: str
|
||||||
|
title_en: str = ""
|
||||||
|
menu_type: str = "item"
|
||||||
|
route_path: str = ""
|
||||||
|
icon: str = ""
|
||||||
|
permission_key: str = ""
|
||||||
|
sort_order: int = 100
|
||||||
|
children: List["SysAuthMenuItem"] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class SysAuthRolePayload(BaseModel):
|
||||||
|
id: int = 0
|
||||||
|
role_key: str
|
||||||
|
name: str
|
||||||
|
|
||||||
|
|
||||||
|
class SysAuthUserPayload(BaseModel):
|
||||||
|
id: int
|
||||||
|
username: str
|
||||||
|
display_name: str
|
||||||
|
role: Optional[SysAuthRolePayload] = None
|
||||||
|
|
||||||
|
|
||||||
|
class SysAssignedBotPayload(BaseModel):
|
||||||
|
id: str
|
||||||
|
name: str
|
||||||
|
enabled: bool = True
|
||||||
|
node_id: str = ""
|
||||||
|
node_display_name: str = ""
|
||||||
|
docker_status: str = "STOPPED"
|
||||||
|
image_tag: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
class SysRoleSummaryResponse(BaseModel):
|
||||||
|
id: int
|
||||||
|
role_key: str
|
||||||
|
name: str
|
||||||
|
description: str = ""
|
||||||
|
is_active: bool = True
|
||||||
|
sort_order: int = 100
|
||||||
|
user_count: int = 0
|
||||||
|
menu_keys: List[str] = Field(default_factory=list)
|
||||||
|
permission_keys: List[str] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class SysRoleListResponse(BaseModel):
|
||||||
|
items: List[SysRoleSummaryResponse] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class SysRoleUpsertRequest(BaseModel):
|
||||||
|
role_key: str
|
||||||
|
name: str
|
||||||
|
description: str = ""
|
||||||
|
is_active: bool = True
|
||||||
|
sort_order: int = 100
|
||||||
|
menu_keys: List[str] = Field(default_factory=list)
|
||||||
|
permission_keys: List[str] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class SysRoleGrantMenuItem(BaseModel):
|
||||||
|
menu_key: str
|
||||||
|
parent_key: str = ""
|
||||||
|
title: str
|
||||||
|
title_en: str = ""
|
||||||
|
menu_type: str = "item"
|
||||||
|
route_path: str = ""
|
||||||
|
icon: str = ""
|
||||||
|
sort_order: int = 100
|
||||||
|
children: List["SysRoleGrantMenuItem"] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class SysPermissionSummaryResponse(BaseModel):
|
||||||
|
id: int
|
||||||
|
permission_key: str
|
||||||
|
name: str
|
||||||
|
menu_key: str = ""
|
||||||
|
action: str = "view"
|
||||||
|
description: str = ""
|
||||||
|
sort_order: int = 100
|
||||||
|
|
||||||
|
|
||||||
|
class SysRoleGrantBootstrapResponse(BaseModel):
|
||||||
|
menus: List[SysRoleGrantMenuItem] = Field(default_factory=list)
|
||||||
|
permissions: List[SysPermissionSummaryResponse] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class SysUserSummaryResponse(BaseModel):
|
||||||
|
id: int
|
||||||
|
username: str
|
||||||
|
display_name: str
|
||||||
|
is_active: bool = True
|
||||||
|
last_login_at: Optional[str] = None
|
||||||
|
role: Optional[SysAuthRolePayload] = None
|
||||||
|
bot_ids: List[str] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class SysUserListResponse(BaseModel):
|
||||||
|
items: List[SysUserSummaryResponse] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class SysUserCreateRequest(BaseModel):
|
||||||
|
username: str
|
||||||
|
display_name: str
|
||||||
|
password: str
|
||||||
|
role_id: int
|
||||||
|
is_active: bool = True
|
||||||
|
bot_ids: List[str] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class SysUserUpdateRequest(BaseModel):
|
||||||
|
display_name: str
|
||||||
|
password: str = ""
|
||||||
|
role_id: int
|
||||||
|
is_active: bool = True
|
||||||
|
bot_ids: List[str] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class SysProfileUpdateRequest(BaseModel):
|
||||||
|
display_name: str
|
||||||
|
password: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
class SysAuthBootstrapResponse(BaseModel):
|
||||||
|
token: str = ""
|
||||||
|
expires_at: Optional[str] = None
|
||||||
|
user: SysAuthUserPayload
|
||||||
|
menus: List[SysAuthMenuItem] = Field(default_factory=list)
|
||||||
|
permissions: List[str] = Field(default_factory=list)
|
||||||
|
home_path: str = "/dashboard"
|
||||||
|
assigned_bots: List[SysAssignedBotPayload] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class SysAuthStatusResponse(BaseModel):
|
||||||
|
enabled: bool = True
|
||||||
|
user_count: int = 0
|
||||||
|
default_username: str = "admin"
|
||||||
|
|
||||||
|
|
||||||
|
SysAuthMenuItem.model_rebuild()
|
||||||
|
SysRoleGrantMenuItem.model_rebuild()
|
||||||
|
|
@ -1,23 +0,0 @@
|
||||||
from typing import Optional, Dict, Any
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceFileUpdateRequest(BaseModel):
|
|
||||||
content: str
|
|
||||||
|
|
||||||
|
|
||||||
class PanelLoginRequest(BaseModel):
|
|
||||||
password: str
|
|
||||||
|
|
||||||
|
|
||||||
class AgentMdTemplatesPayload(BaseModel):
|
|
||||||
agents_md: Optional[str] = None
|
|
||||||
soul_md: Optional[str] = None
|
|
||||||
user_md: Optional[str] = None
|
|
||||||
tools_md: Optional[str] = None
|
|
||||||
identity_md: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class SystemTemplatesUpdateRequest(BaseModel):
|
|
||||||
agent_md_templates: Optional[AgentMdTemplatesPayload] = None
|
|
||||||
topic_presets: Optional[Dict[str, Any]] = None
|
|
||||||
|
|
@ -0,0 +1,165 @@
|
||||||
|
import asyncio
|
||||||
|
from typing import Any, Callable
|
||||||
|
|
||||||
|
from fastapi import HTTPException, WebSocket, WebSocketDisconnect
|
||||||
|
from sqlmodel import Session, select
|
||||||
|
|
||||||
|
from models.bot import BotInstance
|
||||||
|
from models.platform import BotRequestUsage
|
||||||
|
|
||||||
|
|
||||||
|
class AppLifecycleService:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
app: Any,
|
||||||
|
engine: Any,
|
||||||
|
cache: Any,
|
||||||
|
logger: Any,
|
||||||
|
project_root: str,
|
||||||
|
database_engine: str,
|
||||||
|
database_echo: Any,
|
||||||
|
database_url_display: str,
|
||||||
|
redis_enabled: bool,
|
||||||
|
init_database: Callable[[], None],
|
||||||
|
node_registry_service: Any,
|
||||||
|
local_managed_node: Callable[[], Any],
|
||||||
|
prune_expired_activity_events: Callable[..., int],
|
||||||
|
migrate_bot_resources_store: Callable[[str], None],
|
||||||
|
resolve_bot_provider_target_for_instance: Callable[[Any], Any],
|
||||||
|
default_provider_target: Callable[[], Any],
|
||||||
|
set_bot_provider_target: Callable[[str, Any], None],
|
||||||
|
apply_provider_target_to_bot: Callable[[Any, Any], None],
|
||||||
|
normalize_provider_target: Callable[[Any], Any],
|
||||||
|
runtime_service: Any,
|
||||||
|
runtime_event_service: Any,
|
||||||
|
clear_provider_target_overrides: Callable[[], None],
|
||||||
|
) -> None:
|
||||||
|
self._app = app
|
||||||
|
self._engine = engine
|
||||||
|
self._cache = cache
|
||||||
|
self._logger = logger
|
||||||
|
self._project_root = project_root
|
||||||
|
self._database_engine = database_engine
|
||||||
|
self._database_echo = database_echo
|
||||||
|
self._database_url_display = database_url_display
|
||||||
|
self._redis_enabled = redis_enabled
|
||||||
|
self._init_database = init_database
|
||||||
|
self._node_registry_service = node_registry_service
|
||||||
|
self._local_managed_node = local_managed_node
|
||||||
|
self._prune_expired_activity_events = prune_expired_activity_events
|
||||||
|
self._migrate_bot_resources_store = migrate_bot_resources_store
|
||||||
|
self._resolve_bot_provider_target_for_instance = resolve_bot_provider_target_for_instance
|
||||||
|
self._default_provider_target = default_provider_target
|
||||||
|
self._set_bot_provider_target = set_bot_provider_target
|
||||||
|
self._apply_provider_target_to_bot = apply_provider_target_to_bot
|
||||||
|
self._normalize_provider_target = normalize_provider_target
|
||||||
|
self._runtime_service = runtime_service
|
||||||
|
self._runtime_event_service = runtime_event_service
|
||||||
|
self._clear_provider_target_overrides = clear_provider_target_overrides
|
||||||
|
|
||||||
|
async def on_startup(self) -> None:
|
||||||
|
self._app.state.main_loop = asyncio.get_running_loop()
|
||||||
|
self._clear_provider_target_overrides()
|
||||||
|
self._logger.info(
|
||||||
|
"startup project_root=%s db_engine=%s db_echo=%s db_url=%s redis=%s",
|
||||||
|
self._project_root,
|
||||||
|
self._database_engine,
|
||||||
|
self._database_echo,
|
||||||
|
self._database_url_display,
|
||||||
|
"enabled" if self._cache.ping() else ("disabled" if self._redis_enabled else "not_configured"),
|
||||||
|
)
|
||||||
|
self._init_database()
|
||||||
|
self._cache.delete_prefix("")
|
||||||
|
with Session(self._engine) as session:
|
||||||
|
self._node_registry_service.load_from_session(session)
|
||||||
|
self._node_registry_service.upsert_node(session, self._local_managed_node())
|
||||||
|
pruned_events = self._prune_expired_activity_events(session, force=True)
|
||||||
|
if pruned_events > 0:
|
||||||
|
session.commit()
|
||||||
|
target_dirty = False
|
||||||
|
for bot in session.exec(select(BotInstance)).all():
|
||||||
|
self._migrate_bot_resources_store(bot.id)
|
||||||
|
target = self._resolve_bot_provider_target_for_instance(bot)
|
||||||
|
if str(target.transport_kind or "").strip().lower() != "edge":
|
||||||
|
target = self._normalize_provider_target(
|
||||||
|
{
|
||||||
|
"node_id": target.node_id,
|
||||||
|
"transport_kind": "edge",
|
||||||
|
"runtime_kind": target.runtime_kind,
|
||||||
|
"core_adapter": target.core_adapter,
|
||||||
|
},
|
||||||
|
fallback=self._default_provider_target(),
|
||||||
|
)
|
||||||
|
self._set_bot_provider_target(bot.id, target)
|
||||||
|
if (
|
||||||
|
str(getattr(bot, "node_id", "") or "").strip().lower() != target.node_id
|
||||||
|
or str(getattr(bot, "transport_kind", "") or "").strip().lower() != target.transport_kind
|
||||||
|
or str(getattr(bot, "runtime_kind", "") or "").strip().lower() != target.runtime_kind
|
||||||
|
or str(getattr(bot, "core_adapter", "") or "").strip().lower() != target.core_adapter
|
||||||
|
):
|
||||||
|
self._apply_provider_target_to_bot(bot, target)
|
||||||
|
session.add(bot)
|
||||||
|
target_dirty = True
|
||||||
|
if target_dirty:
|
||||||
|
session.commit()
|
||||||
|
running_bots = session.exec(select(BotInstance).where(BotInstance.docker_status == "RUNNING")).all()
|
||||||
|
for bot in running_bots:
|
||||||
|
try:
|
||||||
|
self._runtime_service.ensure_monitor(app_state=self._app.state, bot=bot)
|
||||||
|
pending_usage = session.exec(
|
||||||
|
select(BotRequestUsage)
|
||||||
|
.where(BotRequestUsage.bot_id == str(bot.id or "").strip())
|
||||||
|
.where(BotRequestUsage.status == "PENDING")
|
||||||
|
.order_by(BotRequestUsage.started_at.desc(), BotRequestUsage.id.desc())
|
||||||
|
.limit(1)
|
||||||
|
).first()
|
||||||
|
if pending_usage and str(getattr(pending_usage, "request_id", "") or "").strip():
|
||||||
|
self._runtime_service.sync_edge_monitor_packets(
|
||||||
|
app_state=self._app.state,
|
||||||
|
bot=bot,
|
||||||
|
request_id=str(pending_usage.request_id or "").strip(),
|
||||||
|
)
|
||||||
|
except HTTPException as exc:
|
||||||
|
self._logger.warning(
|
||||||
|
"Skip runtime monitor restore on startup for bot_id=%s due to unavailable runtime backend: %s",
|
||||||
|
str(bot.id or ""),
|
||||||
|
str(getattr(exc, "detail", "") or exc),
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
self._logger.exception("Failed to restore runtime monitor on startup for bot_id=%s", str(bot.id or ""))
|
||||||
|
|
||||||
|
async def handle_websocket(self, websocket: WebSocket, bot_id: str) -> None:
|
||||||
|
with Session(self._engine) as session:
|
||||||
|
bot = session.get(BotInstance, bot_id)
|
||||||
|
if not bot:
|
||||||
|
await websocket.close(code=4404, reason="Bot not found")
|
||||||
|
return
|
||||||
|
|
||||||
|
connected = False
|
||||||
|
try:
|
||||||
|
await self._runtime_event_service.manager.connect(bot_id, websocket)
|
||||||
|
connected = True
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.warning("websocket connect failed bot_id=%s detail=%s", bot_id, exc)
|
||||||
|
try:
|
||||||
|
await websocket.close(code=1011, reason="WebSocket accept failed")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return
|
||||||
|
|
||||||
|
self._runtime_service.ensure_monitor(app_state=websocket.app.state, bot=bot)
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
await websocket.receive_text()
|
||||||
|
except WebSocketDisconnect:
|
||||||
|
pass
|
||||||
|
except RuntimeError as exc:
|
||||||
|
msg = str(exc or "").lower()
|
||||||
|
if "need to call \"accept\" first" not in msg and "not connected" not in msg:
|
||||||
|
self._logger.exception("websocket runtime error bot_id=%s", bot_id)
|
||||||
|
except Exception:
|
||||||
|
self._logger.exception("websocket unexpected error bot_id=%s", bot_id)
|
||||||
|
finally:
|
||||||
|
if connected:
|
||||||
|
self._runtime_event_service.manager.disconnect(bot_id, websocket)
|
||||||
|
|
@ -0,0 +1,175 @@
|
||||||
|
from typing import Any, Callable, Dict, List
|
||||||
|
|
||||||
|
from fastapi import HTTPException
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from models.bot import BotInstance
|
||||||
|
|
||||||
|
ReadBotConfig = Callable[[str], Dict[str, Any]]
|
||||||
|
WriteBotConfig = Callable[[str, Dict[str, Any]], None]
|
||||||
|
SyncBotWorkspace = Callable[[Session, BotInstance], None]
|
||||||
|
InvalidateBotCache = Callable[[str], None]
|
||||||
|
GetBotChannels = Callable[[BotInstance], List[Dict[str, Any]]]
|
||||||
|
NormalizeChannelExtra = Callable[[Any], Dict[str, Any]]
|
||||||
|
ChannelApiToCfg = Callable[[Dict[str, Any]], Dict[str, Any]]
|
||||||
|
ReadGlobalDeliveryFlags = Callable[[Any], tuple[bool, bool]]
|
||||||
|
|
||||||
|
|
||||||
|
class BotChannelService:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
read_bot_config: ReadBotConfig,
|
||||||
|
write_bot_config: WriteBotConfig,
|
||||||
|
sync_bot_workspace_via_provider: SyncBotWorkspace,
|
||||||
|
invalidate_bot_detail_cache: InvalidateBotCache,
|
||||||
|
get_bot_channels_from_config: GetBotChannels,
|
||||||
|
normalize_channel_extra: NormalizeChannelExtra,
|
||||||
|
channel_api_to_cfg: ChannelApiToCfg,
|
||||||
|
read_global_delivery_flags: ReadGlobalDeliveryFlags,
|
||||||
|
) -> None:
|
||||||
|
self._read_bot_config = read_bot_config
|
||||||
|
self._write_bot_config = write_bot_config
|
||||||
|
self._sync_bot_workspace_via_provider = sync_bot_workspace_via_provider
|
||||||
|
self._invalidate_bot_detail_cache = invalidate_bot_detail_cache
|
||||||
|
self._get_bot_channels_from_config = get_bot_channels_from_config
|
||||||
|
self._normalize_channel_extra = normalize_channel_extra
|
||||||
|
self._channel_api_to_cfg = channel_api_to_cfg
|
||||||
|
self._read_global_delivery_flags = read_global_delivery_flags
|
||||||
|
|
||||||
|
def _require_bot(self, *, session: Session, bot_id: str) -> BotInstance:
|
||||||
|
bot = session.get(BotInstance, bot_id)
|
||||||
|
if not bot:
|
||||||
|
raise HTTPException(status_code=404, detail="Bot not found")
|
||||||
|
return bot
|
||||||
|
|
||||||
|
def list_channels(self, *, session: Session, bot_id: str) -> List[Dict[str, Any]]:
|
||||||
|
bot = self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
return self._get_bot_channels_from_config(bot)
|
||||||
|
|
||||||
|
def create_channel(self, *, session: Session, bot_id: str, payload: Any) -> Dict[str, Any]:
|
||||||
|
bot = self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
|
||||||
|
ctype = str(getattr(payload, "channel_type", "") or "").strip().lower()
|
||||||
|
if not ctype:
|
||||||
|
raise HTTPException(status_code=400, detail="channel_type is required")
|
||||||
|
if ctype == "dashboard":
|
||||||
|
raise HTTPException(status_code=400, detail="dashboard channel is built-in and cannot be created manually")
|
||||||
|
current_rows = self._get_bot_channels_from_config(bot)
|
||||||
|
if any(str(row.get("channel_type") or "").lower() == ctype for row in current_rows):
|
||||||
|
raise HTTPException(status_code=400, detail=f"Channel already exists: {ctype}")
|
||||||
|
|
||||||
|
new_row = {
|
||||||
|
"id": ctype,
|
||||||
|
"bot_id": bot_id,
|
||||||
|
"channel_type": ctype,
|
||||||
|
"external_app_id": str(getattr(payload, "external_app_id", "") or "").strip() or f"{ctype}-{bot_id}",
|
||||||
|
"app_secret": str(getattr(payload, "app_secret", "") or "").strip(),
|
||||||
|
"internal_port": max(1, min(int(getattr(payload, "internal_port", 8080) or 8080), 65535)),
|
||||||
|
"is_active": bool(getattr(payload, "is_active", True)),
|
||||||
|
"extra_config": self._normalize_channel_extra(getattr(payload, "extra_config", None)),
|
||||||
|
"locked": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
config_data = self._read_bot_config(bot_id)
|
||||||
|
channels_cfg = config_data.get("channels")
|
||||||
|
if not isinstance(channels_cfg, dict):
|
||||||
|
channels_cfg = {}
|
||||||
|
config_data["channels"] = channels_cfg
|
||||||
|
channels_cfg[ctype] = self._channel_api_to_cfg(new_row)
|
||||||
|
self._write_bot_config(bot_id, config_data)
|
||||||
|
self._sync_bot_workspace_via_provider(session, bot)
|
||||||
|
self._invalidate_bot_detail_cache(bot_id)
|
||||||
|
return new_row
|
||||||
|
|
||||||
|
def update_channel(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
session: Session,
|
||||||
|
bot_id: str,
|
||||||
|
channel_id: str,
|
||||||
|
payload: Any,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
bot = self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
channel_key = str(channel_id or "").strip().lower()
|
||||||
|
rows = self._get_bot_channels_from_config(bot)
|
||||||
|
row = next((r for r in rows if str(r.get("id") or "").lower() == channel_key), None)
|
||||||
|
if not row:
|
||||||
|
raise HTTPException(status_code=404, detail="Channel not found")
|
||||||
|
if str(row.get("channel_type") or "").strip().lower() == "dashboard" or bool(row.get("locked")):
|
||||||
|
raise HTTPException(status_code=400, detail="dashboard channel is built-in and cannot be modified")
|
||||||
|
|
||||||
|
update_data = payload.model_dump(exclude_unset=True)
|
||||||
|
existing_type = str(row.get("channel_type") or "").strip().lower()
|
||||||
|
new_type = existing_type
|
||||||
|
if "channel_type" in update_data and update_data["channel_type"] is not None:
|
||||||
|
new_type = str(update_data["channel_type"]).strip().lower()
|
||||||
|
if not new_type:
|
||||||
|
raise HTTPException(status_code=400, detail="channel_type cannot be empty")
|
||||||
|
if existing_type == "dashboard" and new_type != "dashboard":
|
||||||
|
raise HTTPException(status_code=400, detail="dashboard channel type cannot be changed")
|
||||||
|
if new_type != existing_type and any(str(r.get("channel_type") or "").lower() == new_type for r in rows):
|
||||||
|
raise HTTPException(status_code=400, detail=f"Channel already exists: {new_type}")
|
||||||
|
|
||||||
|
if "external_app_id" in update_data and update_data["external_app_id"] is not None:
|
||||||
|
row["external_app_id"] = str(update_data["external_app_id"]).strip()
|
||||||
|
if "app_secret" in update_data and update_data["app_secret"] is not None:
|
||||||
|
row["app_secret"] = str(update_data["app_secret"]).strip()
|
||||||
|
if "internal_port" in update_data and update_data["internal_port"] is not None:
|
||||||
|
row["internal_port"] = max(1, min(int(update_data["internal_port"]), 65535))
|
||||||
|
if "is_active" in update_data and update_data["is_active"] is not None:
|
||||||
|
next_active = bool(update_data["is_active"])
|
||||||
|
if existing_type == "dashboard" and not next_active:
|
||||||
|
raise HTTPException(status_code=400, detail="dashboard channel must remain enabled")
|
||||||
|
row["is_active"] = next_active
|
||||||
|
if "extra_config" in update_data:
|
||||||
|
row["extra_config"] = self._normalize_channel_extra(update_data.get("extra_config"))
|
||||||
|
row["channel_type"] = new_type
|
||||||
|
row["id"] = new_type
|
||||||
|
row["locked"] = new_type == "dashboard"
|
||||||
|
|
||||||
|
config_data = self._read_bot_config(bot_id)
|
||||||
|
channels_cfg = config_data.get("channels")
|
||||||
|
if not isinstance(channels_cfg, dict):
|
||||||
|
channels_cfg = {}
|
||||||
|
config_data["channels"] = channels_cfg
|
||||||
|
current_send_progress, current_send_tool_hints = self._read_global_delivery_flags(channels_cfg)
|
||||||
|
if new_type == "dashboard":
|
||||||
|
extra = self._normalize_channel_extra(row.get("extra_config"))
|
||||||
|
channels_cfg["sendProgress"] = bool(extra.get("sendProgress", current_send_progress))
|
||||||
|
channels_cfg["sendToolHints"] = bool(extra.get("sendToolHints", current_send_tool_hints))
|
||||||
|
else:
|
||||||
|
channels_cfg["sendProgress"] = current_send_progress
|
||||||
|
channels_cfg["sendToolHints"] = current_send_tool_hints
|
||||||
|
channels_cfg.pop("dashboard", None)
|
||||||
|
if existing_type != "dashboard" and existing_type in channels_cfg and existing_type != new_type:
|
||||||
|
channels_cfg.pop(existing_type, None)
|
||||||
|
if new_type != "dashboard":
|
||||||
|
channels_cfg[new_type] = self._channel_api_to_cfg(row)
|
||||||
|
self._write_bot_config(bot_id, config_data)
|
||||||
|
session.commit()
|
||||||
|
self._sync_bot_workspace_via_provider(session, bot)
|
||||||
|
self._invalidate_bot_detail_cache(bot_id)
|
||||||
|
return row
|
||||||
|
|
||||||
|
def delete_channel(self, *, session: Session, bot_id: str, channel_id: str) -> Dict[str, Any]:
|
||||||
|
bot = self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
channel_key = str(channel_id or "").strip().lower()
|
||||||
|
rows = self._get_bot_channels_from_config(bot)
|
||||||
|
row = next((r for r in rows if str(r.get("id") or "").lower() == channel_key), None)
|
||||||
|
if not row:
|
||||||
|
raise HTTPException(status_code=404, detail="Channel not found")
|
||||||
|
if str(row.get("channel_type") or "").lower() == "dashboard":
|
||||||
|
raise HTTPException(status_code=400, detail="dashboard channel cannot be deleted")
|
||||||
|
|
||||||
|
config_data = self._read_bot_config(bot_id)
|
||||||
|
channels_cfg = config_data.get("channels")
|
||||||
|
if not isinstance(channels_cfg, dict):
|
||||||
|
channels_cfg = {}
|
||||||
|
config_data["channels"] = channels_cfg
|
||||||
|
channels_cfg.pop(str(row.get("channel_type") or "").lower(), None)
|
||||||
|
self._write_bot_config(bot_id, config_data)
|
||||||
|
session.commit()
|
||||||
|
self._sync_bot_workspace_via_provider(session, bot)
|
||||||
|
self._invalidate_bot_detail_cache(bot_id)
|
||||||
|
return {"status": "deleted"}
|
||||||
|
|
@ -0,0 +1,333 @@
|
||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from typing import Any, Callable, Dict, List, Optional
|
||||||
|
|
||||||
|
from fastapi import HTTPException
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from models.bot import BotInstance
|
||||||
|
from providers.runtime.base import RuntimeProvider
|
||||||
|
|
||||||
|
|
||||||
|
class BotCommandService:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
read_runtime_snapshot: Callable[[BotInstance], Dict[str, Any]],
|
||||||
|
normalize_media_list: Callable[[Any, str], List[str]],
|
||||||
|
resolve_workspace_path: Callable[[str, Optional[str]], tuple[str, str]],
|
||||||
|
is_visual_attachment_path: Callable[[str], bool],
|
||||||
|
is_video_attachment_path: Callable[[str], bool],
|
||||||
|
create_usage_request: Callable[..., str],
|
||||||
|
record_activity_event: Callable[..., None],
|
||||||
|
fail_latest_usage: Callable[[Session, str, str], None],
|
||||||
|
persist_runtime_packet: Callable[[str, Dict[str, Any]], Optional[int]],
|
||||||
|
get_main_loop: Callable[[Any], Any],
|
||||||
|
broadcast_packet: Callable[[str, Dict[str, Any], Any], None],
|
||||||
|
) -> None:
|
||||||
|
self._read_runtime_snapshot = read_runtime_snapshot
|
||||||
|
self._normalize_media_list = normalize_media_list
|
||||||
|
self._resolve_workspace_path = resolve_workspace_path
|
||||||
|
self._is_visual_attachment_path = is_visual_attachment_path
|
||||||
|
self._is_video_attachment_path = is_video_attachment_path
|
||||||
|
self._create_usage_request = create_usage_request
|
||||||
|
self._record_activity_event = record_activity_event
|
||||||
|
self._fail_latest_usage = fail_latest_usage
|
||||||
|
self._persist_runtime_packet = persist_runtime_packet
|
||||||
|
self._get_main_loop = get_main_loop
|
||||||
|
self._broadcast_packet = broadcast_packet
|
||||||
|
self._monitor_sync_threads: Dict[tuple[str, str], threading.Thread] = {}
|
||||||
|
self._monitor_sync_lock = threading.Lock()
|
||||||
|
self._monitor_sync_seq_lock = threading.Lock()
|
||||||
|
self._monitor_sync_last_seq: Dict[str, int] = {}
|
||||||
|
|
||||||
|
def execute(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
session: Session,
|
||||||
|
bot_id: str,
|
||||||
|
bot: BotInstance,
|
||||||
|
payload: Any,
|
||||||
|
runtime_provider: RuntimeProvider,
|
||||||
|
app_state: Any,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
runtime_snapshot = self._read_runtime_snapshot(bot)
|
||||||
|
attachments = self._normalize_media_list(getattr(payload, "attachments", None), bot_id)
|
||||||
|
command = str(getattr(payload, "command", None) or "").strip()
|
||||||
|
if not command and not attachments:
|
||||||
|
raise HTTPException(status_code=400, detail="Command or attachments is required")
|
||||||
|
|
||||||
|
checked_attachments: List[str] = []
|
||||||
|
transport_kind = str(getattr(bot, "transport_kind", "") or "").strip().lower()
|
||||||
|
for rel in attachments:
|
||||||
|
if transport_kind != "edge":
|
||||||
|
_, target = self._resolve_workspace_path(bot_id, rel)
|
||||||
|
if not os.path.isfile(target):
|
||||||
|
raise HTTPException(status_code=400, detail=f"attachment not found: {rel}")
|
||||||
|
checked_attachments.append(rel)
|
||||||
|
delivery_media = [f"/root/.nanobot/workspace/{p.lstrip('/')}" for p in checked_attachments]
|
||||||
|
|
||||||
|
display_command = command if command else "[attachment message]"
|
||||||
|
delivery_command = self._build_delivery_command(command=command, checked_attachments=checked_attachments)
|
||||||
|
|
||||||
|
request_id = self._create_usage_request(
|
||||||
|
session,
|
||||||
|
bot_id,
|
||||||
|
display_command,
|
||||||
|
attachments=checked_attachments,
|
||||||
|
channel="dashboard",
|
||||||
|
metadata={"attachment_count": len(checked_attachments)},
|
||||||
|
provider=str(runtime_snapshot.get("llm_provider") or "").strip() or None,
|
||||||
|
model=str(runtime_snapshot.get("llm_model") or "").strip() or None,
|
||||||
|
)
|
||||||
|
self._record_activity_event(
|
||||||
|
session,
|
||||||
|
bot_id,
|
||||||
|
"command_submitted",
|
||||||
|
request_id=request_id,
|
||||||
|
channel="dashboard",
|
||||||
|
detail="command submitted",
|
||||||
|
metadata={
|
||||||
|
"attachment_count": len(checked_attachments),
|
||||||
|
"has_text": bool(command),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
outbound_user_packet: Optional[Dict[str, Any]] = None
|
||||||
|
if display_command or checked_attachments:
|
||||||
|
outbound_user_packet = {
|
||||||
|
"type": "USER_COMMAND",
|
||||||
|
"channel": "dashboard",
|
||||||
|
"text": display_command,
|
||||||
|
"media": checked_attachments,
|
||||||
|
"request_id": request_id,
|
||||||
|
}
|
||||||
|
self._persist_runtime_packet(bot_id, outbound_user_packet)
|
||||||
|
|
||||||
|
loop = self._get_main_loop(app_state)
|
||||||
|
if loop and loop.is_running() and outbound_user_packet:
|
||||||
|
self._broadcast_packet(bot_id, outbound_user_packet, loop)
|
||||||
|
|
||||||
|
detail = runtime_provider.deliver_command(bot_id=bot_id, command=delivery_command, media=delivery_media)
|
||||||
|
if detail is not None:
|
||||||
|
self._fail_latest_usage(session, bot_id, detail or "command delivery failed")
|
||||||
|
self._record_activity_event(
|
||||||
|
session,
|
||||||
|
bot_id,
|
||||||
|
"command_failed",
|
||||||
|
request_id=request_id,
|
||||||
|
channel="dashboard",
|
||||||
|
detail=(detail or "command delivery failed")[:400],
|
||||||
|
)
|
||||||
|
session.commit()
|
||||||
|
if loop and loop.is_running():
|
||||||
|
self._broadcast_packet(
|
||||||
|
bot_id,
|
||||||
|
{
|
||||||
|
"type": "AGENT_STATE",
|
||||||
|
"channel": "dashboard",
|
||||||
|
"payload": {
|
||||||
|
"state": "ERROR",
|
||||||
|
"action_msg": detail or "command delivery failed",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
loop,
|
||||||
|
)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=502,
|
||||||
|
detail=f"Failed to deliver command to bot dashboard channel{': ' + detail if detail else ''}",
|
||||||
|
)
|
||||||
|
|
||||||
|
self._maybe_sync_edge_monitor_packets(
|
||||||
|
runtime_provider=runtime_provider,
|
||||||
|
bot_id=bot_id,
|
||||||
|
request_id=request_id,
|
||||||
|
after_seq=self._resolve_monitor_baseline_seq(runtime_provider, bot_id),
|
||||||
|
app_state=app_state,
|
||||||
|
)
|
||||||
|
return {"success": True}
|
||||||
|
|
||||||
|
def _maybe_sync_edge_monitor_packets(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
runtime_provider: RuntimeProvider,
|
||||||
|
bot_id: str,
|
||||||
|
request_id: str,
|
||||||
|
after_seq: int,
|
||||||
|
app_state: Any,
|
||||||
|
) -> None:
|
||||||
|
provider_name = runtime_provider.__class__.__name__.strip().lower()
|
||||||
|
if provider_name != "edgeruntimeprovider":
|
||||||
|
return
|
||||||
|
bot_key = str(bot_id or "").strip()
|
||||||
|
if not bot_key:
|
||||||
|
return
|
||||||
|
request_key = str(request_id or "").strip() or f"seq:{int(after_seq or 0)}"
|
||||||
|
thread_key = (bot_key, request_key)
|
||||||
|
with self._monitor_sync_lock:
|
||||||
|
existing = self._monitor_sync_threads.get(thread_key)
|
||||||
|
if existing and existing.is_alive():
|
||||||
|
return
|
||||||
|
thread = threading.Thread(
|
||||||
|
target=self._sync_edge_monitor_packets,
|
||||||
|
args=(runtime_provider, bot_key, request_id, after_seq, app_state),
|
||||||
|
daemon=True,
|
||||||
|
)
|
||||||
|
self._monitor_sync_threads[thread_key] = thread
|
||||||
|
thread.start()
|
||||||
|
|
||||||
|
def sync_edge_monitor_packets(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
runtime_provider: RuntimeProvider,
|
||||||
|
bot_id: str,
|
||||||
|
request_id: str,
|
||||||
|
app_state: Any,
|
||||||
|
) -> None:
|
||||||
|
self._maybe_sync_edge_monitor_packets(
|
||||||
|
runtime_provider=runtime_provider,
|
||||||
|
bot_id=bot_id,
|
||||||
|
request_id=request_id,
|
||||||
|
after_seq=0,
|
||||||
|
app_state=app_state,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _sync_edge_monitor_packets(
|
||||||
|
self,
|
||||||
|
runtime_provider: RuntimeProvider,
|
||||||
|
bot_id: str,
|
||||||
|
request_id: str,
|
||||||
|
after_seq: int,
|
||||||
|
app_state: Any,
|
||||||
|
) -> None:
|
||||||
|
loop = self._get_main_loop(app_state)
|
||||||
|
last_seq = max(0, int(after_seq or 0))
|
||||||
|
deadline = time.monotonic() + 18.0
|
||||||
|
request_id_norm = str(request_id or "").strip()
|
||||||
|
try:
|
||||||
|
while time.monotonic() < deadline:
|
||||||
|
try:
|
||||||
|
rows = runtime_provider.get_monitor_packets(bot_id=bot_id, after_seq=last_seq, limit=200)
|
||||||
|
except Exception:
|
||||||
|
time.sleep(0.5)
|
||||||
|
continue
|
||||||
|
|
||||||
|
for row in rows or []:
|
||||||
|
try:
|
||||||
|
seq = int(row.get("seq") or 0)
|
||||||
|
except Exception:
|
||||||
|
seq = 0
|
||||||
|
|
||||||
|
packet = dict(row.get("packet") or {})
|
||||||
|
if not packet:
|
||||||
|
continue
|
||||||
|
packet_type = str(packet.get("type") or "").strip().upper()
|
||||||
|
packet_request_id = str(packet.get("request_id") or "").strip()
|
||||||
|
if packet_type == "USER_COMMAND":
|
||||||
|
continue
|
||||||
|
if packet_type in {"ASSISTANT_MESSAGE", "BUS_EVENT"} and request_id_norm and packet_request_id and packet_request_id != request_id_norm:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not self._mark_monitor_seq(bot_id, seq):
|
||||||
|
continue
|
||||||
|
last_seq = max(last_seq, seq)
|
||||||
|
|
||||||
|
self._persist_runtime_packet(bot_id, packet)
|
||||||
|
if loop and loop.is_running():
|
||||||
|
self._broadcast_packet(bot_id, packet, loop)
|
||||||
|
time.sleep(0.5)
|
||||||
|
finally:
|
||||||
|
with self._monitor_sync_lock:
|
||||||
|
request_key = request_id_norm or f"seq:{int(after_seq or 0)}"
|
||||||
|
existing = self._monitor_sync_threads.get((bot_id, request_key))
|
||||||
|
if existing is threading.current_thread():
|
||||||
|
self._monitor_sync_threads.pop((bot_id, request_key), None)
|
||||||
|
|
||||||
|
def _resolve_monitor_baseline_seq(self, runtime_provider: RuntimeProvider, bot_id: str) -> int:
|
||||||
|
try:
|
||||||
|
rows = runtime_provider.get_monitor_packets(bot_id=bot_id, after_seq=0, limit=1000)
|
||||||
|
except Exception:
|
||||||
|
return self._get_monitor_seq(bot_id)
|
||||||
|
latest_seq = 0
|
||||||
|
for row in rows or []:
|
||||||
|
try:
|
||||||
|
seq = int(row.get("seq") or 0)
|
||||||
|
except Exception:
|
||||||
|
seq = 0
|
||||||
|
latest_seq = max(latest_seq, seq)
|
||||||
|
return max(latest_seq, self._get_monitor_seq(bot_id))
|
||||||
|
|
||||||
|
def _mark_monitor_seq(self, bot_id: str, seq: int) -> bool:
|
||||||
|
if seq <= 0:
|
||||||
|
return False
|
||||||
|
bot_key = str(bot_id or "").strip()
|
||||||
|
with self._monitor_sync_seq_lock:
|
||||||
|
current = int(self._monitor_sync_last_seq.get(bot_key, 0) or 0)
|
||||||
|
if seq <= current:
|
||||||
|
return False
|
||||||
|
self._monitor_sync_last_seq[bot_key] = seq
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _get_monitor_seq(self, bot_id: str) -> int:
|
||||||
|
bot_key = str(bot_id or "").strip()
|
||||||
|
with self._monitor_sync_seq_lock:
|
||||||
|
return int(self._monitor_sync_last_seq.get(bot_key, 0) or 0)
|
||||||
|
|
||||||
|
def _build_delivery_command(self, *, command: str, checked_attachments: List[str]) -> str:
|
||||||
|
display_command = command if command else "[attachment message]"
|
||||||
|
delivery_command = display_command
|
||||||
|
if not checked_attachments:
|
||||||
|
return delivery_command
|
||||||
|
|
||||||
|
attachment_block = "\n".join(f"- {p}" for p in checked_attachments)
|
||||||
|
all_visual = all(self._is_visual_attachment_path(p) for p in checked_attachments)
|
||||||
|
if all_visual:
|
||||||
|
has_video = any(self._is_video_attachment_path(p) for p in checked_attachments)
|
||||||
|
media_label = "图片/视频" if has_video else "图片"
|
||||||
|
capability_hint = (
|
||||||
|
"1) 附件已随请求附带;图片在可用时可直接作为多模态输入理解,视频请按附件路径处理。\n"
|
||||||
|
if has_video
|
||||||
|
else "1) 附件中的图片已作为多模态输入提供,优先直接理解并回答。\n"
|
||||||
|
)
|
||||||
|
if command:
|
||||||
|
return (
|
||||||
|
f"{command}\n\n"
|
||||||
|
"[Attached files]\n"
|
||||||
|
f"{attachment_block}\n\n"
|
||||||
|
"【附件处理要求】\n"
|
||||||
|
f"{capability_hint}"
|
||||||
|
"2) 若当前模型或接口不支持直接理解该附件,请明确说明后再调用工具解析。\n"
|
||||||
|
"3) 除非用户明确要求,不要先调用工具读取附件文件。\n"
|
||||||
|
"4) 回复语言必须遵循 USER.md;若未指定,则与用户当前输入语言保持一致。\n"
|
||||||
|
"5) 仅基于可见内容回答;看不清或无法确认的部分请明确说明,不要猜测。"
|
||||||
|
)
|
||||||
|
return (
|
||||||
|
"请先处理已附带的附件列表:\n"
|
||||||
|
f"{attachment_block}\n\n"
|
||||||
|
f"请直接分析已附带的{media_label}并总结关键信息。\n"
|
||||||
|
f"{'图片在可用时可直接作为多模态输入理解,视频请按附件路径处理。' if has_video else ''}\n"
|
||||||
|
"若当前模型或接口不支持直接理解该附件,请明确说明后再调用工具解析。\n"
|
||||||
|
"回复语言必须遵循 USER.md;若未指定,则与用户当前输入语言保持一致。\n"
|
||||||
|
"仅基于可见内容回答;看不清或无法确认的部分请明确说明,不要猜测。"
|
||||||
|
)
|
||||||
|
|
||||||
|
command_has_paths = all(p in command for p in checked_attachments) if command else False
|
||||||
|
if command and not command_has_paths:
|
||||||
|
return (
|
||||||
|
f"{command}\n\n"
|
||||||
|
"[Attached files]\n"
|
||||||
|
f"{attachment_block}\n\n"
|
||||||
|
"Please process the attached file(s) listed above when answering this request.\n"
|
||||||
|
"Reply language must follow USER.md. If not specified, use the same language as the user input."
|
||||||
|
)
|
||||||
|
if not command:
|
||||||
|
return (
|
||||||
|
"Please process the uploaded file(s) listed below:\n"
|
||||||
|
f"{attachment_block}\n\n"
|
||||||
|
"Reply language must follow USER.md. If not specified, use the same language as the user input."
|
||||||
|
)
|
||||||
|
return delivery_command
|
||||||
|
|
@ -1,380 +0,0 @@
|
||||||
import os
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Any, Dict, Optional
|
|
||||||
|
|
||||||
from fastapi import HTTPException
|
|
||||||
from sqlmodel import Session
|
|
||||||
|
|
||||||
from core.docker_instance import docker_manager
|
|
||||||
from core.settings import BOTS_WORKSPACE_ROOT
|
|
||||||
from models.bot import BotInstance
|
|
||||||
from schemas.bot import (
|
|
||||||
BotEnvParamsUpdateRequest,
|
|
||||||
BotMcpConfigUpdateRequest,
|
|
||||||
ChannelConfigRequest,
|
|
||||||
ChannelConfigUpdateRequest,
|
|
||||||
)
|
|
||||||
from services.bot_service import (
|
|
||||||
channel_api_to_config,
|
|
||||||
list_bot_channels_from_config,
|
|
||||||
normalize_channel_extra,
|
|
||||||
read_global_delivery_flags,
|
|
||||||
sync_bot_workspace_channels,
|
|
||||||
)
|
|
||||||
from services.bot_mcp_service import (
|
|
||||||
_merge_mcp_servers_preserving_extras,
|
|
||||||
_normalize_mcp_servers,
|
|
||||||
)
|
|
||||||
from services.bot_storage_service import (
|
|
||||||
get_bot_resource_limits,
|
|
||||||
get_bot_workspace_snapshot,
|
|
||||||
normalize_bot_env_params,
|
|
||||||
read_bot_config_data,
|
|
||||||
read_bot_env_params,
|
|
||||||
write_bot_config_data,
|
|
||||||
write_bot_env_params,
|
|
||||||
)
|
|
||||||
from services.cache_service import _invalidate_bot_detail_cache
|
|
||||||
|
|
||||||
MANAGED_WORKSPACE_FILENAMES = ("AGENTS.md", "SOUL.md", "USER.md", "TOOLS.md", "IDENTITY.md")
|
|
||||||
|
|
||||||
|
|
||||||
def _get_bot_or_404(session: Session, bot_id: str) -> BotInstance:
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
return bot
|
|
||||||
|
|
||||||
|
|
||||||
def _read_bot_config_object(bot_id: str) -> Dict[str, Any]:
|
|
||||||
config_data = read_bot_config_data(bot_id)
|
|
||||||
return config_data if isinstance(config_data, dict) else {}
|
|
||||||
|
|
||||||
|
|
||||||
def _read_bot_tools_cfg(bot_id: str) -> tuple[Dict[str, Any], Dict[str, Any]]:
|
|
||||||
config_data = _read_bot_config_object(bot_id)
|
|
||||||
tools_cfg = config_data.get("tools")
|
|
||||||
if not isinstance(tools_cfg, dict):
|
|
||||||
tools_cfg = {}
|
|
||||||
config_data["tools"] = tools_cfg
|
|
||||||
return config_data, tools_cfg
|
|
||||||
|
|
||||||
|
|
||||||
def _read_bot_channels_cfg(bot_id: str) -> tuple[Dict[str, Any], Dict[str, Any]]:
|
|
||||||
config_data = _read_bot_config_object(bot_id)
|
|
||||||
channels_cfg = config_data.get("channels")
|
|
||||||
if not isinstance(channels_cfg, dict):
|
|
||||||
channels_cfg = {}
|
|
||||||
config_data["channels"] = channels_cfg
|
|
||||||
return config_data, channels_cfg
|
|
||||||
|
|
||||||
|
|
||||||
def _managed_bot_file_paths(bot_id: str) -> Dict[str, str]:
|
|
||||||
bot_root = os.path.join(BOTS_WORKSPACE_ROOT, bot_id, ".nanobot")
|
|
||||||
workspace_root = os.path.join(bot_root, "workspace")
|
|
||||||
paths = {
|
|
||||||
"config": os.path.join(bot_root, "config.json"),
|
|
||||||
"resources": os.path.join(bot_root, "resources.json"),
|
|
||||||
}
|
|
||||||
for filename in MANAGED_WORKSPACE_FILENAMES:
|
|
||||||
paths[f"workspace:{filename}"] = os.path.join(workspace_root, filename)
|
|
||||||
return paths
|
|
||||||
|
|
||||||
|
|
||||||
def _snapshot_managed_bot_files(bot_id: str) -> Dict[str, Optional[bytes]]:
|
|
||||||
snapshot: Dict[str, Optional[bytes]] = {}
|
|
||||||
for key, path in _managed_bot_file_paths(bot_id).items():
|
|
||||||
if os.path.isfile(path):
|
|
||||||
with open(path, "rb") as file:
|
|
||||||
snapshot[key] = file.read()
|
|
||||||
else:
|
|
||||||
snapshot[key] = None
|
|
||||||
return snapshot
|
|
||||||
|
|
||||||
|
|
||||||
def _restore_managed_bot_files(bot_id: str, snapshot: Dict[str, Optional[bytes]]) -> None:
|
|
||||||
for key, path in _managed_bot_file_paths(bot_id).items():
|
|
||||||
payload = snapshot.get(key)
|
|
||||||
if payload is None:
|
|
||||||
if os.path.exists(path):
|
|
||||||
os.remove(path)
|
|
||||||
continue
|
|
||||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
|
||||||
tmp_path = f"{path}.tmp"
|
|
||||||
with open(tmp_path, "wb") as file:
|
|
||||||
file.write(payload)
|
|
||||||
os.replace(tmp_path, path)
|
|
||||||
|
|
||||||
|
|
||||||
def _write_bot_config_state(
|
|
||||||
session: Session,
|
|
||||||
*,
|
|
||||||
bot_id: str,
|
|
||||||
config_data: Dict[str, Any],
|
|
||||||
sync_workspace: bool = False,
|
|
||||||
) -> None:
|
|
||||||
managed_file_snapshot = _snapshot_managed_bot_files(bot_id) if sync_workspace else None
|
|
||||||
try:
|
|
||||||
write_bot_config_data(bot_id, config_data)
|
|
||||||
if sync_workspace:
|
|
||||||
sync_bot_workspace_channels(session, bot_id)
|
|
||||||
except Exception:
|
|
||||||
if managed_file_snapshot is not None:
|
|
||||||
_restore_managed_bot_files(bot_id, managed_file_snapshot)
|
|
||||||
session.rollback()
|
|
||||||
raise
|
|
||||||
_invalidate_bot_detail_cache(bot_id)
|
|
||||||
|
|
||||||
|
|
||||||
def _find_channel_row(rows: list[Dict[str, Any]], channel_id: str) -> Dict[str, Any]:
|
|
||||||
channel_key = str(channel_id or "").strip().lower()
|
|
||||||
row = next((item for item in rows if str(item.get("id") or "").lower() == channel_key), None)
|
|
||||||
if not row:
|
|
||||||
raise HTTPException(status_code=404, detail="Channel not found")
|
|
||||||
return row
|
|
||||||
|
|
||||||
|
|
||||||
def get_bot_resources_snapshot(session: Session, *, bot_id: str) -> Dict[str, Any]:
|
|
||||||
bot = _get_bot_or_404(session, bot_id)
|
|
||||||
|
|
||||||
configured = get_bot_resource_limits(bot_id)
|
|
||||||
runtime = docker_manager.get_bot_resource_snapshot(bot_id)
|
|
||||||
workspace = get_bot_workspace_snapshot(bot_id)
|
|
||||||
workspace_root = str(workspace.get("path") or "")
|
|
||||||
workspace_bytes = int(workspace.get("usage_bytes") or 0)
|
|
||||||
configured_storage_bytes = int(workspace.get("configured_limit_bytes") or 0)
|
|
||||||
workspace_percent = 0.0
|
|
||||||
if configured_storage_bytes > 0:
|
|
||||||
workspace_percent = (workspace_bytes / configured_storage_bytes) * 100.0
|
|
||||||
|
|
||||||
limits = runtime.get("limits") or {}
|
|
||||||
cpu_limited = (limits.get("cpu_cores") or 0) > 0
|
|
||||||
memory_limited = (limits.get("memory_bytes") or 0) > 0
|
|
||||||
storage_limited = bool(limits.get("storage_bytes")) or bool(limits.get("storage_opt_raw"))
|
|
||||||
|
|
||||||
return {
|
|
||||||
"bot_id": bot_id,
|
|
||||||
"docker_status": runtime.get("docker_status") or bot.docker_status,
|
|
||||||
"configured": configured,
|
|
||||||
"runtime": runtime,
|
|
||||||
"workspace": {
|
|
||||||
"path": workspace_root,
|
|
||||||
"usage_bytes": workspace_bytes,
|
|
||||||
"configured_limit_bytes": configured_storage_bytes if configured_storage_bytes > 0 else None,
|
|
||||||
"usage_percent": max(0.0, workspace_percent),
|
|
||||||
},
|
|
||||||
"enforcement": {
|
|
||||||
"cpu_limited": cpu_limited,
|
|
||||||
"memory_limited": memory_limited,
|
|
||||||
"storage_limited": storage_limited,
|
|
||||||
},
|
|
||||||
"note": (
|
|
||||||
"Resource value 0 means unlimited. CPU/Memory limits come from Docker HostConfig and are enforced by cgroup. "
|
|
||||||
"Storage limit depends on Docker storage driver support."
|
|
||||||
),
|
|
||||||
"collected_at": datetime.utcnow().isoformat() + "Z",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def list_bot_channels_config(session: Session, *, bot_id: str):
|
|
||||||
bot = _get_bot_or_404(session, bot_id)
|
|
||||||
return list_bot_channels_from_config(bot)
|
|
||||||
|
|
||||||
|
|
||||||
def get_bot_tools_config_state(session: Session, *, bot_id: str) -> Dict[str, Any]:
|
|
||||||
_get_bot_or_404(session, bot_id)
|
|
||||||
return {
|
|
||||||
"bot_id": bot_id,
|
|
||||||
"tools_config": {},
|
|
||||||
"managed_by_dashboard": False,
|
|
||||||
"hint": "Tools config is disabled in dashboard. Configure tool-related env vars manually.",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def reject_bot_tools_config_update(
|
|
||||||
session: Session,
|
|
||||||
*,
|
|
||||||
bot_id: str,
|
|
||||||
payload: Any,
|
|
||||||
) -> None:
|
|
||||||
_get_bot_or_404(session, bot_id)
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail="Tools config is no longer managed by dashboard. Please set required env vars manually.",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_bot_mcp_config_state(session: Session, *, bot_id: str) -> Dict[str, Any]:
|
|
||||||
_get_bot_or_404(session, bot_id)
|
|
||||||
_config_data, tools_cfg = _read_bot_tools_cfg(bot_id)
|
|
||||||
mcp_servers = _normalize_mcp_servers(tools_cfg.get("mcpServers"))
|
|
||||||
return {
|
|
||||||
"bot_id": bot_id,
|
|
||||||
"mcp_servers": mcp_servers,
|
|
||||||
"locked_servers": [],
|
|
||||||
"restart_required": True,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def update_bot_mcp_config_state(
|
|
||||||
session: Session,
|
|
||||||
*,
|
|
||||||
bot_id: str,
|
|
||||||
payload: BotMcpConfigUpdateRequest,
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
_get_bot_or_404(session, bot_id)
|
|
||||||
config_data, tools_cfg = _read_bot_tools_cfg(bot_id)
|
|
||||||
normalized_mcp_servers = _normalize_mcp_servers(payload.mcp_servers or {})
|
|
||||||
current_mcp_servers = tools_cfg.get("mcpServers")
|
|
||||||
merged_mcp_servers = _merge_mcp_servers_preserving_extras(current_mcp_servers, normalized_mcp_servers)
|
|
||||||
tools_cfg["mcpServers"] = merged_mcp_servers
|
|
||||||
sanitized_after_save = _normalize_mcp_servers(tools_cfg.get("mcpServers"))
|
|
||||||
_write_bot_config_state(session, bot_id=bot_id, config_data=config_data)
|
|
||||||
return {
|
|
||||||
"status": "updated",
|
|
||||||
"bot_id": bot_id,
|
|
||||||
"mcp_servers": sanitized_after_save,
|
|
||||||
"locked_servers": [],
|
|
||||||
"restart_required": True,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def get_bot_env_params_state(session: Session, *, bot_id: str) -> Dict[str, Any]:
|
|
||||||
_get_bot_or_404(session, bot_id)
|
|
||||||
return {
|
|
||||||
"bot_id": bot_id,
|
|
||||||
"env_params": read_bot_env_params(bot_id),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def update_bot_env_params_state(
|
|
||||||
session: Session,
|
|
||||||
*,
|
|
||||||
bot_id: str,
|
|
||||||
payload: BotEnvParamsUpdateRequest,
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
_get_bot_or_404(session, bot_id)
|
|
||||||
normalized = normalize_bot_env_params(payload.env_params)
|
|
||||||
write_bot_env_params(bot_id, normalized)
|
|
||||||
_invalidate_bot_detail_cache(bot_id)
|
|
||||||
return {
|
|
||||||
"status": "updated",
|
|
||||||
"bot_id": bot_id,
|
|
||||||
"env_params": normalized,
|
|
||||||
"restart_required": True,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def create_bot_channel_config(
|
|
||||||
session: Session,
|
|
||||||
*,
|
|
||||||
bot_id: str,
|
|
||||||
payload: ChannelConfigRequest,
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
bot = _get_bot_or_404(session, bot_id)
|
|
||||||
|
|
||||||
ctype = (payload.channel_type or "").strip().lower()
|
|
||||||
if not ctype:
|
|
||||||
raise HTTPException(status_code=400, detail="channel_type is required")
|
|
||||||
if ctype == "dashboard":
|
|
||||||
raise HTTPException(status_code=400, detail="dashboard channel is built-in and cannot be created manually")
|
|
||||||
current_rows = list_bot_channels_from_config(bot)
|
|
||||||
if any(str(row.get("channel_type") or "").lower() == ctype for row in current_rows):
|
|
||||||
raise HTTPException(status_code=400, detail=f"Channel already exists: {ctype}")
|
|
||||||
|
|
||||||
new_row = {
|
|
||||||
"id": ctype,
|
|
||||||
"bot_id": bot_id,
|
|
||||||
"channel_type": ctype,
|
|
||||||
"external_app_id": (payload.external_app_id or "").strip() or f"{ctype}-{bot_id}",
|
|
||||||
"app_secret": (payload.app_secret or "").strip(),
|
|
||||||
"internal_port": max(1, min(int(payload.internal_port or 8080), 65535)),
|
|
||||||
"is_active": bool(payload.is_active),
|
|
||||||
"extra_config": normalize_channel_extra(payload.extra_config),
|
|
||||||
"locked": False,
|
|
||||||
}
|
|
||||||
|
|
||||||
config_data, channels_cfg = _read_bot_channels_cfg(bot_id)
|
|
||||||
channels_cfg[ctype] = channel_api_to_config(new_row)
|
|
||||||
_write_bot_config_state(session, bot_id=bot_id, config_data=config_data, sync_workspace=True)
|
|
||||||
return new_row
|
|
||||||
|
|
||||||
|
|
||||||
def update_bot_channel_config(
|
|
||||||
session: Session,
|
|
||||||
*,
|
|
||||||
bot_id: str,
|
|
||||||
channel_id: str,
|
|
||||||
payload: ChannelConfigUpdateRequest,
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
bot = _get_bot_or_404(session, bot_id)
|
|
||||||
|
|
||||||
rows = list_bot_channels_from_config(bot)
|
|
||||||
row = _find_channel_row(rows, channel_id)
|
|
||||||
if str(row.get("channel_type") or "").strip().lower() == "dashboard" or bool(row.get("locked")):
|
|
||||||
raise HTTPException(status_code=400, detail="dashboard channel is built-in and cannot be modified")
|
|
||||||
|
|
||||||
update_data = payload.model_dump(exclude_unset=True)
|
|
||||||
existing_type = str(row.get("channel_type") or "").strip().lower()
|
|
||||||
new_type = existing_type
|
|
||||||
if "channel_type" in update_data and update_data["channel_type"] is not None:
|
|
||||||
new_type = str(update_data["channel_type"]).strip().lower()
|
|
||||||
if not new_type:
|
|
||||||
raise HTTPException(status_code=400, detail="channel_type cannot be empty")
|
|
||||||
if existing_type == "dashboard" and new_type != "dashboard":
|
|
||||||
raise HTTPException(status_code=400, detail="dashboard channel type cannot be changed")
|
|
||||||
if new_type != existing_type and any(str(r.get("channel_type") or "").lower() == new_type for r in rows):
|
|
||||||
raise HTTPException(status_code=400, detail=f"Channel already exists: {new_type}")
|
|
||||||
|
|
||||||
if "external_app_id" in update_data and update_data["external_app_id"] is not None:
|
|
||||||
row["external_app_id"] = str(update_data["external_app_id"]).strip()
|
|
||||||
if "app_secret" in update_data and update_data["app_secret"] is not None:
|
|
||||||
row["app_secret"] = str(update_data["app_secret"]).strip()
|
|
||||||
if "internal_port" in update_data and update_data["internal_port"] is not None:
|
|
||||||
row["internal_port"] = max(1, min(int(update_data["internal_port"]), 65535))
|
|
||||||
if "is_active" in update_data and update_data["is_active"] is not None:
|
|
||||||
next_active = bool(update_data["is_active"])
|
|
||||||
if existing_type == "dashboard" and not next_active:
|
|
||||||
raise HTTPException(status_code=400, detail="dashboard channel must remain enabled")
|
|
||||||
row["is_active"] = next_active
|
|
||||||
if "extra_config" in update_data:
|
|
||||||
row["extra_config"] = normalize_channel_extra(update_data.get("extra_config"))
|
|
||||||
row["channel_type"] = new_type
|
|
||||||
row["id"] = new_type
|
|
||||||
row["locked"] = new_type == "dashboard"
|
|
||||||
|
|
||||||
config_data, channels_cfg = _read_bot_channels_cfg(bot_id)
|
|
||||||
current_send_progress, current_send_tool_hints = read_global_delivery_flags(channels_cfg)
|
|
||||||
if new_type == "dashboard":
|
|
||||||
extra = normalize_channel_extra(row.get("extra_config"))
|
|
||||||
channels_cfg["sendProgress"] = bool(extra.get("sendProgress", current_send_progress))
|
|
||||||
channels_cfg["sendToolHints"] = bool(extra.get("sendToolHints", current_send_tool_hints))
|
|
||||||
else:
|
|
||||||
channels_cfg["sendProgress"] = current_send_progress
|
|
||||||
channels_cfg["sendToolHints"] = current_send_tool_hints
|
|
||||||
channels_cfg.pop("dashboard", None)
|
|
||||||
if existing_type != "dashboard" and existing_type in channels_cfg and existing_type != new_type:
|
|
||||||
channels_cfg.pop(existing_type, None)
|
|
||||||
if new_type != "dashboard":
|
|
||||||
channels_cfg[new_type] = channel_api_to_config(row)
|
|
||||||
_write_bot_config_state(session, bot_id=bot_id, config_data=config_data, sync_workspace=True)
|
|
||||||
return row
|
|
||||||
|
|
||||||
|
|
||||||
def delete_bot_channel_config(
|
|
||||||
session: Session,
|
|
||||||
*,
|
|
||||||
bot_id: str,
|
|
||||||
channel_id: str,
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
bot = _get_bot_or_404(session, bot_id)
|
|
||||||
|
|
||||||
rows = list_bot_channels_from_config(bot)
|
|
||||||
row = _find_channel_row(rows, channel_id)
|
|
||||||
if str(row.get("channel_type") or "").lower() == "dashboard":
|
|
||||||
raise HTTPException(status_code=400, detail="dashboard channel cannot be deleted")
|
|
||||||
|
|
||||||
config_data, channels_cfg = _read_bot_channels_cfg(bot_id)
|
|
||||||
channels_cfg.pop(str(row.get("channel_type") or "").lower(), None)
|
|
||||||
_write_bot_config_state(session, bot_id=bot_id, config_data=config_data, sync_workspace=True)
|
|
||||||
return {"status": "deleted"}
|
|
||||||
|
|
@ -0,0 +1,320 @@
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, Callable, Dict, List
|
||||||
|
|
||||||
|
from fastapi import HTTPException
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from models.bot import BotInstance
|
||||||
|
|
||||||
|
|
||||||
|
ReadEdgeStateData = Callable[..., Dict[str, Any]]
|
||||||
|
WriteEdgeStateData = Callable[..., bool]
|
||||||
|
ReadBotConfig = Callable[[str], Dict[str, Any]]
|
||||||
|
WriteBotConfig = Callable[[str, Dict[str, Any]], None]
|
||||||
|
InvalidateBotCache = Callable[[str], None]
|
||||||
|
PathResolver = Callable[[str], str]
|
||||||
|
NormalizeEnvParams = Callable[[Any], Dict[str, str]]
|
||||||
|
|
||||||
|
|
||||||
|
class BotConfigStateService:
|
||||||
|
_MCP_SERVER_NAME_RE = re.compile(r"^[A-Za-z0-9._-]{1,64}$")
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
read_edge_state_data: ReadEdgeStateData,
|
||||||
|
write_edge_state_data: WriteEdgeStateData,
|
||||||
|
read_bot_config: ReadBotConfig,
|
||||||
|
write_bot_config: WriteBotConfig,
|
||||||
|
invalidate_bot_detail_cache: InvalidateBotCache,
|
||||||
|
env_store_path: PathResolver,
|
||||||
|
cron_store_path: PathResolver,
|
||||||
|
normalize_env_params: NormalizeEnvParams,
|
||||||
|
) -> None:
|
||||||
|
self._read_edge_state_data = read_edge_state_data
|
||||||
|
self._write_edge_state_data = write_edge_state_data
|
||||||
|
self._read_bot_config = read_bot_config
|
||||||
|
self._write_bot_config = write_bot_config
|
||||||
|
self._invalidate_bot_detail_cache = invalidate_bot_detail_cache
|
||||||
|
self._env_store_path = env_store_path
|
||||||
|
self._cron_store_path = cron_store_path
|
||||||
|
self._normalize_env_params = normalize_env_params
|
||||||
|
|
||||||
|
def _require_bot(self, *, session: Session, bot_id: str) -> BotInstance:
|
||||||
|
bot = session.get(BotInstance, bot_id)
|
||||||
|
if not bot:
|
||||||
|
raise HTTPException(status_code=404, detail="Bot not found")
|
||||||
|
return bot
|
||||||
|
|
||||||
|
def read_env_store(self, bot_id: str) -> Dict[str, str]:
|
||||||
|
data = self._read_edge_state_data(bot_id=bot_id, state_key="env", default_payload={})
|
||||||
|
if data:
|
||||||
|
return self._normalize_env_params(data)
|
||||||
|
|
||||||
|
path = self._env_store_path(bot_id)
|
||||||
|
if not os.path.isfile(path):
|
||||||
|
return {}
|
||||||
|
try:
|
||||||
|
with open(path, "r", encoding="utf-8") as file:
|
||||||
|
payload = json.load(file)
|
||||||
|
return self._normalize_env_params(payload)
|
||||||
|
except Exception:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def write_env_store(self, bot_id: str, env_params: Dict[str, str]) -> None:
|
||||||
|
normalized_env = self._normalize_env_params(env_params)
|
||||||
|
if self._write_edge_state_data(bot_id=bot_id, state_key="env", data=normalized_env):
|
||||||
|
return
|
||||||
|
path = self._env_store_path(bot_id)
|
||||||
|
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||||
|
tmp_path = f"{path}.tmp"
|
||||||
|
with open(tmp_path, "w", encoding="utf-8") as file:
|
||||||
|
json.dump(normalized_env, file, ensure_ascii=False, indent=2)
|
||||||
|
os.replace(tmp_path, path)
|
||||||
|
|
||||||
|
def get_env_params(self, bot_id: str) -> Dict[str, Any]:
|
||||||
|
return {
|
||||||
|
"bot_id": bot_id,
|
||||||
|
"env_params": self.read_env_store(bot_id),
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_env_params_for_bot(self, *, session: Session, bot_id: str) -> Dict[str, Any]:
|
||||||
|
self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
return self.get_env_params(bot_id)
|
||||||
|
|
||||||
|
def update_env_params(self, bot_id: str, env_params: Any) -> Dict[str, Any]:
|
||||||
|
normalized = self._normalize_env_params(env_params)
|
||||||
|
self.write_env_store(bot_id, normalized)
|
||||||
|
self._invalidate_bot_detail_cache(bot_id)
|
||||||
|
return {
|
||||||
|
"status": "updated",
|
||||||
|
"bot_id": bot_id,
|
||||||
|
"env_params": normalized,
|
||||||
|
"restart_required": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
def update_env_params_for_bot(self, *, session: Session, bot_id: str, env_params: Any) -> Dict[str, Any]:
|
||||||
|
self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
return self.update_env_params(bot_id, env_params)
|
||||||
|
|
||||||
|
def normalize_mcp_servers(self, raw: Any) -> Dict[str, Dict[str, Any]]:
|
||||||
|
if not isinstance(raw, dict):
|
||||||
|
return {}
|
||||||
|
rows: Dict[str, Dict[str, Any]] = {}
|
||||||
|
for server_name, server_cfg in raw.items():
|
||||||
|
name = str(server_name or "").strip()
|
||||||
|
if not name or not self._MCP_SERVER_NAME_RE.fullmatch(name):
|
||||||
|
continue
|
||||||
|
if not isinstance(server_cfg, dict):
|
||||||
|
continue
|
||||||
|
|
||||||
|
url = str(server_cfg.get("url") or "").strip()
|
||||||
|
if not url:
|
||||||
|
continue
|
||||||
|
|
||||||
|
transport_type = str(server_cfg.get("type") or "streamableHttp").strip()
|
||||||
|
if transport_type not in {"streamableHttp", "sse"}:
|
||||||
|
transport_type = "streamableHttp"
|
||||||
|
|
||||||
|
headers_raw = server_cfg.get("headers")
|
||||||
|
headers: Dict[str, str] = {}
|
||||||
|
if isinstance(headers_raw, dict):
|
||||||
|
for key, value in headers_raw.items():
|
||||||
|
header_key = str(key or "").strip()
|
||||||
|
if not header_key:
|
||||||
|
continue
|
||||||
|
headers[header_key] = str(value or "").strip()
|
||||||
|
|
||||||
|
timeout_raw = server_cfg.get("toolTimeout", 60)
|
||||||
|
try:
|
||||||
|
timeout = int(timeout_raw)
|
||||||
|
except Exception:
|
||||||
|
timeout = 60
|
||||||
|
timeout = max(1, min(timeout, 600))
|
||||||
|
|
||||||
|
rows[name] = {
|
||||||
|
"type": transport_type,
|
||||||
|
"url": url,
|
||||||
|
"headers": headers,
|
||||||
|
"toolTimeout": timeout,
|
||||||
|
}
|
||||||
|
return rows
|
||||||
|
|
||||||
|
def _merge_mcp_servers_preserving_extras(
|
||||||
|
self,
|
||||||
|
current_raw: Any,
|
||||||
|
normalized: Dict[str, Dict[str, Any]],
|
||||||
|
) -> Dict[str, Dict[str, Any]]:
|
||||||
|
current_map = current_raw if isinstance(current_raw, dict) else {}
|
||||||
|
merged: Dict[str, Dict[str, Any]] = {}
|
||||||
|
for name, normalized_cfg in normalized.items():
|
||||||
|
base = current_map.get(name)
|
||||||
|
base_cfg = dict(base) if isinstance(base, dict) else {}
|
||||||
|
next_cfg = dict(base_cfg)
|
||||||
|
next_cfg.update(normalized_cfg)
|
||||||
|
merged[name] = next_cfg
|
||||||
|
return merged
|
||||||
|
|
||||||
|
def _sanitize_mcp_servers_in_config_data(self, config_data: Dict[str, Any]) -> Dict[str, Dict[str, Any]]:
|
||||||
|
if not isinstance(config_data, dict):
|
||||||
|
return {}
|
||||||
|
tools_cfg = config_data.get("tools")
|
||||||
|
if not isinstance(tools_cfg, dict):
|
||||||
|
tools_cfg = {}
|
||||||
|
current_raw = tools_cfg.get("mcpServers")
|
||||||
|
normalized = self.normalize_mcp_servers(current_raw)
|
||||||
|
merged = self._merge_mcp_servers_preserving_extras(current_raw, normalized)
|
||||||
|
tools_cfg["mcpServers"] = merged
|
||||||
|
config_data["tools"] = tools_cfg
|
||||||
|
return merged
|
||||||
|
|
||||||
|
def get_mcp_config(self, bot_id: str) -> Dict[str, Any]:
|
||||||
|
config_data = self._read_bot_config(bot_id)
|
||||||
|
tools_cfg = config_data.get("tools") if isinstance(config_data, dict) else {}
|
||||||
|
if not isinstance(tools_cfg, dict):
|
||||||
|
tools_cfg = {}
|
||||||
|
mcp_servers = self.normalize_mcp_servers(tools_cfg.get("mcpServers"))
|
||||||
|
return {
|
||||||
|
"bot_id": bot_id,
|
||||||
|
"mcp_servers": mcp_servers,
|
||||||
|
"locked_servers": [],
|
||||||
|
"restart_required": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_mcp_config_for_bot(self, *, session: Session, bot_id: str) -> Dict[str, Any]:
|
||||||
|
self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
return self.get_mcp_config(bot_id)
|
||||||
|
|
||||||
|
def update_mcp_config(self, bot_id: str, mcp_servers: Any) -> Dict[str, Any]:
|
||||||
|
config_data = self._read_bot_config(bot_id)
|
||||||
|
if not isinstance(config_data, dict):
|
||||||
|
config_data = {}
|
||||||
|
tools_cfg = config_data.get("tools")
|
||||||
|
if not isinstance(tools_cfg, dict):
|
||||||
|
tools_cfg = {}
|
||||||
|
normalized_mcp_servers = self.normalize_mcp_servers(mcp_servers or {})
|
||||||
|
current_mcp_servers = tools_cfg.get("mcpServers")
|
||||||
|
merged_mcp_servers = self._merge_mcp_servers_preserving_extras(current_mcp_servers, normalized_mcp_servers)
|
||||||
|
tools_cfg["mcpServers"] = merged_mcp_servers
|
||||||
|
config_data["tools"] = tools_cfg
|
||||||
|
sanitized_after_save = self._sanitize_mcp_servers_in_config_data(config_data)
|
||||||
|
self._write_bot_config(bot_id, config_data)
|
||||||
|
self._invalidate_bot_detail_cache(bot_id)
|
||||||
|
return {
|
||||||
|
"status": "updated",
|
||||||
|
"bot_id": bot_id,
|
||||||
|
"mcp_servers": self.normalize_mcp_servers(sanitized_after_save),
|
||||||
|
"locked_servers": [],
|
||||||
|
"restart_required": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
def update_mcp_config_for_bot(self, *, session: Session, bot_id: str, mcp_servers: Any) -> Dict[str, Any]:
|
||||||
|
self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
return self.update_mcp_config(bot_id, mcp_servers)
|
||||||
|
|
||||||
|
def read_cron_store(self, bot_id: str) -> Dict[str, Any]:
|
||||||
|
data = self._read_edge_state_data(
|
||||||
|
bot_id=bot_id,
|
||||||
|
state_key="cron",
|
||||||
|
default_payload={"version": 1, "jobs": []},
|
||||||
|
)
|
||||||
|
if isinstance(data, dict) and data:
|
||||||
|
jobs = data.get("jobs")
|
||||||
|
if not isinstance(jobs, list):
|
||||||
|
jobs = []
|
||||||
|
try:
|
||||||
|
version = int(data.get("version", 1) or 1)
|
||||||
|
except Exception:
|
||||||
|
version = 1
|
||||||
|
return {"version": max(1, version), "jobs": jobs}
|
||||||
|
|
||||||
|
path = self._cron_store_path(bot_id)
|
||||||
|
if not os.path.isfile(path):
|
||||||
|
return {"version": 1, "jobs": []}
|
||||||
|
try:
|
||||||
|
with open(path, "r", encoding="utf-8") as file:
|
||||||
|
payload = json.load(file)
|
||||||
|
if not isinstance(payload, dict):
|
||||||
|
return {"version": 1, "jobs": []}
|
||||||
|
jobs = payload.get("jobs")
|
||||||
|
if not isinstance(jobs, list):
|
||||||
|
payload["jobs"] = []
|
||||||
|
if "version" not in payload:
|
||||||
|
payload["version"] = 1
|
||||||
|
return payload
|
||||||
|
except Exception:
|
||||||
|
return {"version": 1, "jobs": []}
|
||||||
|
|
||||||
|
def write_cron_store(self, bot_id: str, store: Dict[str, Any]) -> None:
|
||||||
|
normalized_store = dict(store if isinstance(store, dict) else {})
|
||||||
|
jobs = normalized_store.get("jobs")
|
||||||
|
if not isinstance(jobs, list):
|
||||||
|
normalized_store["jobs"] = []
|
||||||
|
try:
|
||||||
|
normalized_store["version"] = max(1, int(normalized_store.get("version", 1) or 1))
|
||||||
|
except Exception:
|
||||||
|
normalized_store["version"] = 1
|
||||||
|
if self._write_edge_state_data(bot_id=bot_id, state_key="cron", data=normalized_store):
|
||||||
|
return
|
||||||
|
path = self._cron_store_path(bot_id)
|
||||||
|
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||||
|
tmp_path = f"{path}.tmp"
|
||||||
|
with open(tmp_path, "w", encoding="utf-8") as file:
|
||||||
|
json.dump(normalized_store, file, ensure_ascii=False, indent=2)
|
||||||
|
os.replace(tmp_path, path)
|
||||||
|
|
||||||
|
def list_cron_jobs(self, bot_id: str, include_disabled: bool = True) -> Dict[str, Any]:
|
||||||
|
store = self.read_cron_store(bot_id)
|
||||||
|
rows = []
|
||||||
|
for row in store.get("jobs", []):
|
||||||
|
if not isinstance(row, dict):
|
||||||
|
continue
|
||||||
|
enabled = bool(row.get("enabled", True))
|
||||||
|
if not include_disabled and not enabled:
|
||||||
|
continue
|
||||||
|
rows.append(row)
|
||||||
|
rows.sort(key=lambda value: int(((value.get("state") or {}).get("nextRunAtMs")) or 2**62))
|
||||||
|
return {"bot_id": bot_id, "version": int(store.get("version", 1) or 1), "jobs": rows}
|
||||||
|
|
||||||
|
def list_cron_jobs_for_bot(self, *, session: Session, bot_id: str, include_disabled: bool = True) -> Dict[str, Any]:
|
||||||
|
self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
return self.list_cron_jobs(bot_id, include_disabled=include_disabled)
|
||||||
|
|
||||||
|
def stop_cron_job(self, bot_id: str, job_id: str) -> Dict[str, Any]:
|
||||||
|
store = self.read_cron_store(bot_id)
|
||||||
|
jobs = store.get("jobs", [])
|
||||||
|
if not isinstance(jobs, list):
|
||||||
|
jobs = []
|
||||||
|
found = None
|
||||||
|
for row in jobs:
|
||||||
|
if isinstance(row, dict) and str(row.get("id")) == job_id:
|
||||||
|
found = row
|
||||||
|
break
|
||||||
|
if not found:
|
||||||
|
raise HTTPException(status_code=404, detail="Cron job not found")
|
||||||
|
found["enabled"] = False
|
||||||
|
found["updatedAtMs"] = int(datetime.utcnow().timestamp() * 1000)
|
||||||
|
self.write_cron_store(bot_id, {"version": int(store.get("version", 1) or 1), "jobs": jobs})
|
||||||
|
return {"status": "stopped", "job_id": job_id}
|
||||||
|
|
||||||
|
def stop_cron_job_for_bot(self, *, session: Session, bot_id: str, job_id: str) -> Dict[str, Any]:
|
||||||
|
self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
return self.stop_cron_job(bot_id, job_id)
|
||||||
|
|
||||||
|
def delete_cron_job(self, bot_id: str, job_id: str) -> Dict[str, Any]:
|
||||||
|
store = self.read_cron_store(bot_id)
|
||||||
|
jobs = store.get("jobs", [])
|
||||||
|
if not isinstance(jobs, list):
|
||||||
|
jobs = []
|
||||||
|
kept = [row for row in jobs if not (isinstance(row, dict) and str(row.get("id")) == job_id)]
|
||||||
|
if len(kept) == len(jobs):
|
||||||
|
raise HTTPException(status_code=404, detail="Cron job not found")
|
||||||
|
self.write_cron_store(bot_id, {"version": int(store.get("version", 1) or 1), "jobs": kept})
|
||||||
|
return {"status": "deleted", "job_id": job_id}
|
||||||
|
|
||||||
|
def delete_cron_job_for_bot(self, *, session: Session, bot_id: str, job_id: str) -> Dict[str, Any]:
|
||||||
|
self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
return self.delete_cron_job(bot_id, job_id)
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,159 +1,611 @@
|
||||||
import asyncio
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
from typing import Any, Dict
|
from datetime import datetime
|
||||||
|
from typing import Any, Callable, Dict, Optional
|
||||||
|
|
||||||
|
from fastapi import HTTPException
|
||||||
from sqlmodel import Session, select
|
from sqlmodel import Session, select
|
||||||
|
|
||||||
from core.docker_instance import docker_manager
|
from core.settings import (
|
||||||
from core.settings import BOTS_WORKSPACE_ROOT
|
BOTS_WORKSPACE_ROOT,
|
||||||
|
DEFAULT_AGENTS_MD,
|
||||||
|
DEFAULT_IDENTITY_MD,
|
||||||
|
DEFAULT_SOUL_MD,
|
||||||
|
DEFAULT_TOOLS_MD,
|
||||||
|
DEFAULT_USER_MD,
|
||||||
|
)
|
||||||
from models.bot import BotInstance, BotMessage
|
from models.bot import BotInstance, BotMessage
|
||||||
from models.platform import BotActivityEvent, BotRequestUsage
|
from models.platform import BotActivityEvent, BotRequestUsage
|
||||||
from models.skill import BotSkillInstall
|
from models.skill import BotSkillInstall
|
||||||
from models.topic import TopicItem, TopicTopic
|
from models.topic import TopicItem, TopicTopic
|
||||||
from services.bot_service import (
|
from providers.target import ProviderTarget, normalize_provider_target, provider_target_to_dict
|
||||||
_safe_float,
|
from services.runtime_service import RuntimeService
|
||||||
_safe_int,
|
|
||||||
read_bot_runtime_snapshot,
|
RefreshBotRuntimeStatus = Callable[[Any, BotInstance], str]
|
||||||
resolve_bot_runtime_env_params,
|
ResolveBotProviderTarget = Callable[[BotInstance], ProviderTarget]
|
||||||
sync_bot_workspace_channels,
|
ProviderTargetFromNode = Callable[[Optional[str]], Optional[ProviderTarget]]
|
||||||
)
|
DefaultProviderTarget = Callable[[], ProviderTarget]
|
||||||
from services.bot_storage_service import write_bot_env_params
|
EnsureProviderTargetSupported = Callable[[ProviderTarget], None]
|
||||||
from services.cache_service import _invalidate_bot_detail_cache, _invalidate_bot_messages_cache
|
RequireReadyImage = Callable[..., Any]
|
||||||
from services.platform_activity_service import record_activity_event
|
SyncBotWorkspaceViaProvider = Callable[..., None]
|
||||||
from services.runtime_service import docker_callback, record_agent_loop_ready_warning
|
ApplyProviderTargetToBot = Callable[[BotInstance, ProviderTarget], None]
|
||||||
|
SerializeProviderTargetSummary = Callable[[ProviderTarget], Dict[str, Any]]
|
||||||
|
SerializeBot = Callable[[BotInstance], Dict[str, Any]]
|
||||||
|
NodeDisplayName = Callable[[str], str]
|
||||||
|
InvalidateBotCache = Callable[[str], None]
|
||||||
|
RecordActivityEvent = Callable[..., None]
|
||||||
|
NormalizeEnvParams = Callable[[Any], Dict[str, str]]
|
||||||
|
NormalizeSystemTimezone = Callable[[Any], str]
|
||||||
|
NormalizeResourceLimits = Callable[[Any, Any, Any], Dict[str, Any]]
|
||||||
|
WriteEnvStore = Callable[[str, Dict[str, str]], None]
|
||||||
|
ResolveBotEnvParams = Callable[[str], Dict[str, str]]
|
||||||
|
ClearProviderTargetOverride = Callable[[str], None]
|
||||||
|
NormalizeInitialChannels = Callable[[str, Any], Any]
|
||||||
|
ExpectedEdgeOfflineError = Callable[[Exception], bool]
|
||||||
|
SummarizeEdgeException = Callable[[Exception], str]
|
||||||
|
ResolveEdgeClient = Callable[[ProviderTarget], Any]
|
||||||
|
NodeMetadata = Callable[[str], Dict[str, Any]]
|
||||||
|
LogEdgeFailure = Callable[..., None]
|
||||||
|
InvalidateBotMessagesCache = Callable[[str], None]
|
||||||
|
|
||||||
|
|
||||||
def _get_bot_or_404(session: Session, bot_id: str) -> BotInstance:
|
class BotLifecycleService:
|
||||||
bot = session.get(BotInstance, bot_id)
|
def __init__(
|
||||||
if not bot:
|
self,
|
||||||
raise ValueError("Bot not found")
|
*,
|
||||||
return bot
|
bot_id_pattern: re.Pattern[str],
|
||||||
|
runtime_service: RuntimeService,
|
||||||
|
refresh_bot_runtime_status: RefreshBotRuntimeStatus,
|
||||||
|
resolve_bot_provider_target: ResolveBotProviderTarget,
|
||||||
|
provider_target_from_node: ProviderTargetFromNode,
|
||||||
|
default_provider_target: DefaultProviderTarget,
|
||||||
|
ensure_provider_target_supported: EnsureProviderTargetSupported,
|
||||||
|
require_ready_image: RequireReadyImage,
|
||||||
|
sync_bot_workspace_via_provider: SyncBotWorkspaceViaProvider,
|
||||||
|
apply_provider_target_to_bot: ApplyProviderTargetToBot,
|
||||||
|
serialize_provider_target_summary: SerializeProviderTargetSummary,
|
||||||
|
serialize_bot: SerializeBot,
|
||||||
|
node_display_name: NodeDisplayName,
|
||||||
|
invalidate_bot_detail_cache: InvalidateBotCache,
|
||||||
|
record_activity_event: RecordActivityEvent,
|
||||||
|
normalize_env_params: NormalizeEnvParams,
|
||||||
|
normalize_system_timezone: NormalizeSystemTimezone,
|
||||||
|
normalize_resource_limits: NormalizeResourceLimits,
|
||||||
|
write_env_store: WriteEnvStore,
|
||||||
|
resolve_bot_env_params: ResolveBotEnvParams,
|
||||||
|
clear_provider_target_override: ClearProviderTargetOverride,
|
||||||
|
normalize_initial_channels: NormalizeInitialChannels,
|
||||||
|
is_expected_edge_offline_error: ExpectedEdgeOfflineError,
|
||||||
|
summarize_edge_exception: SummarizeEdgeException,
|
||||||
|
resolve_edge_client: ResolveEdgeClient,
|
||||||
|
node_metadata: NodeMetadata,
|
||||||
|
log_edge_failure: LogEdgeFailure,
|
||||||
|
invalidate_bot_messages_cache: InvalidateBotMessagesCache,
|
||||||
|
logger: logging.Logger,
|
||||||
|
) -> None:
|
||||||
|
self._bot_id_pattern = bot_id_pattern
|
||||||
|
self._runtime_service = runtime_service
|
||||||
|
self._refresh_bot_runtime_status = refresh_bot_runtime_status
|
||||||
|
self._resolve_bot_provider_target = resolve_bot_provider_target
|
||||||
|
self._provider_target_from_node = provider_target_from_node
|
||||||
|
self._default_provider_target = default_provider_target
|
||||||
|
self._ensure_provider_target_supported = ensure_provider_target_supported
|
||||||
|
self._require_ready_image = require_ready_image
|
||||||
|
self._sync_bot_workspace_via_provider = sync_bot_workspace_via_provider
|
||||||
|
self._apply_provider_target_to_bot = apply_provider_target_to_bot
|
||||||
|
self._serialize_provider_target_summary = serialize_provider_target_summary
|
||||||
|
self._serialize_bot = serialize_bot
|
||||||
|
self._node_display_name = node_display_name
|
||||||
|
self._invalidate_bot_detail_cache = invalidate_bot_detail_cache
|
||||||
|
self._record_activity_event = record_activity_event
|
||||||
|
self._normalize_env_params = normalize_env_params
|
||||||
|
self._normalize_system_timezone = normalize_system_timezone
|
||||||
|
self._normalize_resource_limits = normalize_resource_limits
|
||||||
|
self._write_env_store = write_env_store
|
||||||
|
self._resolve_bot_env_params = resolve_bot_env_params
|
||||||
|
self._clear_provider_target_override = clear_provider_target_override
|
||||||
|
self._normalize_initial_channels = normalize_initial_channels
|
||||||
|
self._is_expected_edge_offline_error = is_expected_edge_offline_error
|
||||||
|
self._summarize_edge_exception = summarize_edge_exception
|
||||||
|
self._resolve_edge_client = resolve_edge_client
|
||||||
|
self._node_metadata = node_metadata
|
||||||
|
self._log_edge_failure = log_edge_failure
|
||||||
|
self._invalidate_bot_messages_cache = invalidate_bot_messages_cache
|
||||||
|
self._logger = logger
|
||||||
|
|
||||||
|
def _require_bot(self, *, session: Session, bot_id: str) -> BotInstance:
|
||||||
|
bot = session.get(BotInstance, bot_id)
|
||||||
|
if not bot:
|
||||||
|
raise HTTPException(status_code=404, detail="Bot not found")
|
||||||
|
return bot
|
||||||
|
|
||||||
async def start_bot_instance(session: Session, bot_id: str) -> Dict[str, Any]:
|
def create_bot(self, *, session: Session, payload: Any) -> Dict[str, Any]:
|
||||||
bot = _get_bot_or_404(session, bot_id)
|
normalized_bot_id = str(getattr(payload, "id", "") or "").strip()
|
||||||
if not bool(getattr(bot, "enabled", True)):
|
if not normalized_bot_id:
|
||||||
raise PermissionError("Bot is disabled. Enable it first.")
|
raise HTTPException(status_code=400, detail="Bot ID is required")
|
||||||
|
if not self._bot_id_pattern.fullmatch(normalized_bot_id):
|
||||||
|
raise HTTPException(status_code=400, detail="Bot ID can only contain letters, numbers, and underscores")
|
||||||
|
if session.get(BotInstance, normalized_bot_id):
|
||||||
|
raise HTTPException(status_code=409, detail=f"Bot ID already exists: {normalized_bot_id}")
|
||||||
|
|
||||||
sync_bot_workspace_channels(session, bot_id)
|
normalized_env_params = self._normalize_env_params(getattr(payload, "env_params", None))
|
||||||
runtime_snapshot = read_bot_runtime_snapshot(bot)
|
try:
|
||||||
env_params = resolve_bot_runtime_env_params(bot_id)
|
normalized_env_params["TZ"] = self._normalize_system_timezone(getattr(payload, "system_timezone", None))
|
||||||
write_bot_env_params(bot_id, env_params)
|
except ValueError as exc:
|
||||||
success = docker_manager.start_bot(
|
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||||
bot_id,
|
|
||||||
image_tag=bot.image_tag,
|
provider_target = normalize_provider_target(
|
||||||
on_state_change=docker_callback,
|
{
|
||||||
env_vars=env_params,
|
"node_id": getattr(payload, "node_id", None),
|
||||||
cpu_cores=_safe_float(runtime_snapshot.get("cpu_cores"), 1.0),
|
"transport_kind": getattr(payload, "transport_kind", None),
|
||||||
memory_mb=_safe_int(runtime_snapshot.get("memory_mb"), 1024),
|
"runtime_kind": getattr(payload, "runtime_kind", None),
|
||||||
storage_gb=_safe_int(runtime_snapshot.get("storage_gb"), 10),
|
"core_adapter": getattr(payload, "core_adapter", None),
|
||||||
)
|
},
|
||||||
if not success:
|
fallback=self._provider_target_from_node(getattr(payload, "node_id", None)) or self._default_provider_target(),
|
||||||
|
)
|
||||||
|
self._ensure_provider_target_supported(provider_target)
|
||||||
|
|
||||||
|
normalized_image_tag = str(getattr(payload, "image_tag", "") or "").strip()
|
||||||
|
if provider_target.runtime_kind == "docker":
|
||||||
|
self._require_ready_image(session, normalized_image_tag, require_local_image=True)
|
||||||
|
|
||||||
|
bot = BotInstance(
|
||||||
|
id=normalized_bot_id,
|
||||||
|
name=getattr(payload, "name", None),
|
||||||
|
enabled=bool(getattr(payload, "enabled", True)) if getattr(payload, "enabled", None) is not None else True,
|
||||||
|
access_password="",
|
||||||
|
image_tag=normalized_image_tag,
|
||||||
|
node_id=provider_target.node_id,
|
||||||
|
transport_kind=provider_target.transport_kind,
|
||||||
|
runtime_kind=provider_target.runtime_kind,
|
||||||
|
core_adapter=provider_target.core_adapter,
|
||||||
|
workspace_dir=os.path.join(BOTS_WORKSPACE_ROOT, normalized_bot_id),
|
||||||
|
)
|
||||||
|
|
||||||
|
session.add(bot)
|
||||||
|
session.commit()
|
||||||
|
session.refresh(bot)
|
||||||
|
|
||||||
|
resource_limits = self._normalize_resource_limits(
|
||||||
|
getattr(payload, "cpu_cores", None),
|
||||||
|
getattr(payload, "memory_mb", None),
|
||||||
|
getattr(payload, "storage_gb", None),
|
||||||
|
)
|
||||||
|
workspace_synced = True
|
||||||
|
sync_error_detail = ""
|
||||||
|
try:
|
||||||
|
self._write_env_store(normalized_bot_id, normalized_env_params)
|
||||||
|
self._sync_bot_workspace_via_provider(
|
||||||
|
session,
|
||||||
|
bot,
|
||||||
|
target_override=provider_target,
|
||||||
|
channels_override=self._normalize_initial_channels(normalized_bot_id, getattr(payload, "channels", None)),
|
||||||
|
global_delivery_override={
|
||||||
|
"sendProgress": bool(getattr(payload, "send_progress", None))
|
||||||
|
if getattr(payload, "send_progress", None) is not None
|
||||||
|
else False,
|
||||||
|
"sendToolHints": bool(getattr(payload, "send_tool_hints", None))
|
||||||
|
if getattr(payload, "send_tool_hints", None) is not None
|
||||||
|
else False,
|
||||||
|
},
|
||||||
|
runtime_overrides={
|
||||||
|
"llm_provider": getattr(payload, "llm_provider", None),
|
||||||
|
"llm_model": getattr(payload, "llm_model", None),
|
||||||
|
"api_key": getattr(payload, "api_key", None),
|
||||||
|
"api_base": getattr(payload, "api_base", "") or "",
|
||||||
|
"temperature": getattr(payload, "temperature", None),
|
||||||
|
"top_p": getattr(payload, "top_p", None),
|
||||||
|
"max_tokens": getattr(payload, "max_tokens", None),
|
||||||
|
"cpu_cores": resource_limits["cpu_cores"],
|
||||||
|
"memory_mb": resource_limits["memory_mb"],
|
||||||
|
"storage_gb": resource_limits["storage_gb"],
|
||||||
|
"node_id": provider_target.node_id,
|
||||||
|
"transport_kind": provider_target.transport_kind,
|
||||||
|
"runtime_kind": provider_target.runtime_kind,
|
||||||
|
"core_adapter": provider_target.core_adapter,
|
||||||
|
"system_prompt": getattr(payload, "system_prompt", None) or getattr(payload, "soul_md", None) or DEFAULT_SOUL_MD,
|
||||||
|
"soul_md": getattr(payload, "soul_md", None) or getattr(payload, "system_prompt", None) or DEFAULT_SOUL_MD,
|
||||||
|
"agents_md": getattr(payload, "agents_md", None) or DEFAULT_AGENTS_MD,
|
||||||
|
"user_md": getattr(payload, "user_md", None) or DEFAULT_USER_MD,
|
||||||
|
"tools_md": getattr(payload, "tools_md", None) or DEFAULT_TOOLS_MD,
|
||||||
|
"identity_md": getattr(payload, "identity_md", None) or DEFAULT_IDENTITY_MD,
|
||||||
|
"send_progress": bool(getattr(payload, "send_progress", None))
|
||||||
|
if getattr(payload, "send_progress", None) is not None
|
||||||
|
else False,
|
||||||
|
"send_tool_hints": bool(getattr(payload, "send_tool_hints", None))
|
||||||
|
if getattr(payload, "send_tool_hints", None) is not None
|
||||||
|
else False,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
if self._is_expected_edge_offline_error(exc):
|
||||||
|
workspace_synced = False
|
||||||
|
sync_error_detail = self._summarize_edge_exception(exc)
|
||||||
|
self._logger.info(
|
||||||
|
"Create bot pending sync due to offline edge bot_id=%s node=%s detail=%s",
|
||||||
|
normalized_bot_id,
|
||||||
|
provider_target.node_id,
|
||||||
|
sync_error_detail,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
detail = self._summarize_edge_exception(exc)
|
||||||
|
try:
|
||||||
|
doomed = session.get(BotInstance, normalized_bot_id)
|
||||||
|
if doomed is not None:
|
||||||
|
session.delete(doomed)
|
||||||
|
session.commit()
|
||||||
|
self._clear_provider_target_override(normalized_bot_id)
|
||||||
|
except Exception:
|
||||||
|
session.rollback()
|
||||||
|
raise HTTPException(status_code=502, detail=f"Failed to initialize bot workspace: {detail}") from exc
|
||||||
|
|
||||||
|
session.refresh(bot)
|
||||||
|
self._record_activity_event(
|
||||||
|
session,
|
||||||
|
normalized_bot_id,
|
||||||
|
"bot_created",
|
||||||
|
channel="system",
|
||||||
|
detail=f"Bot {normalized_bot_id} created",
|
||||||
|
metadata={
|
||||||
|
"image_tag": normalized_image_tag,
|
||||||
|
"workspace_synced": workspace_synced,
|
||||||
|
"sync_error": sync_error_detail if not workspace_synced else "",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if not workspace_synced:
|
||||||
|
self._record_activity_event(
|
||||||
|
session,
|
||||||
|
normalized_bot_id,
|
||||||
|
"bot_warning",
|
||||||
|
channel="system",
|
||||||
|
detail="Bot created, but node is offline. Workspace sync is pending.",
|
||||||
|
metadata={"sync_error": sync_error_detail, "node_id": provider_target.node_id},
|
||||||
|
)
|
||||||
|
session.commit()
|
||||||
|
self._invalidate_bot_detail_cache(normalized_bot_id)
|
||||||
|
return self._serialize_bot(bot)
|
||||||
|
|
||||||
|
def update_bot(self, *, session: Session, bot_id: str, payload: Any) -> Dict[str, Any]:
|
||||||
|
bot = self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
update_data = payload.model_dump(exclude_unset=True)
|
||||||
|
|
||||||
|
env_params = update_data.pop("env_params", None) if isinstance(update_data, dict) else None
|
||||||
|
system_timezone = update_data.pop("system_timezone", None) if isinstance(update_data, dict) else None
|
||||||
|
normalized_system_timezone: Optional[str] = None
|
||||||
|
if system_timezone is not None:
|
||||||
|
try:
|
||||||
|
normalized_system_timezone = self._normalize_system_timezone(system_timezone)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||||
|
|
||||||
|
runtime_overrides: Dict[str, Any] = {}
|
||||||
|
update_data.pop("tools_config", None) if isinstance(update_data, dict) else None
|
||||||
|
|
||||||
|
runtime_fields = {
|
||||||
|
"llm_provider",
|
||||||
|
"llm_model",
|
||||||
|
"api_key",
|
||||||
|
"api_base",
|
||||||
|
"temperature",
|
||||||
|
"top_p",
|
||||||
|
"max_tokens",
|
||||||
|
"cpu_cores",
|
||||||
|
"memory_mb",
|
||||||
|
"storage_gb",
|
||||||
|
"soul_md",
|
||||||
|
"agents_md",
|
||||||
|
"user_md",
|
||||||
|
"tools_md",
|
||||||
|
"identity_md",
|
||||||
|
"send_progress",
|
||||||
|
"send_tool_hints",
|
||||||
|
"system_prompt",
|
||||||
|
}
|
||||||
|
execution_target_fields = {
|
||||||
|
"node_id",
|
||||||
|
"transport_kind",
|
||||||
|
"runtime_kind",
|
||||||
|
"core_adapter",
|
||||||
|
}
|
||||||
|
deploy_only_fields = {"image_tag", *execution_target_fields}
|
||||||
|
if deploy_only_fields & set(update_data.keys()):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"Use /api/bots/{bot_id}/deploy for execution target or image changes",
|
||||||
|
)
|
||||||
|
for field in runtime_fields:
|
||||||
|
if field in update_data:
|
||||||
|
runtime_overrides[field] = update_data.pop(field)
|
||||||
|
|
||||||
|
next_target: Optional[ProviderTarget] = None
|
||||||
|
|
||||||
|
for text_field in ("llm_provider", "llm_model", "api_key"):
|
||||||
|
if text_field in runtime_overrides:
|
||||||
|
text = str(runtime_overrides.get(text_field) or "").strip()
|
||||||
|
if not text:
|
||||||
|
runtime_overrides.pop(text_field, None)
|
||||||
|
else:
|
||||||
|
runtime_overrides[text_field] = text
|
||||||
|
if "api_base" in runtime_overrides:
|
||||||
|
runtime_overrides["api_base"] = str(runtime_overrides.get("api_base") or "").strip()
|
||||||
|
|
||||||
|
if "system_prompt" in runtime_overrides and "soul_md" not in runtime_overrides:
|
||||||
|
runtime_overrides["soul_md"] = runtime_overrides["system_prompt"]
|
||||||
|
if "soul_md" in runtime_overrides and "system_prompt" not in runtime_overrides:
|
||||||
|
runtime_overrides["system_prompt"] = runtime_overrides["soul_md"]
|
||||||
|
if {"cpu_cores", "memory_mb", "storage_gb"} & set(runtime_overrides.keys()):
|
||||||
|
normalized_resources = self._normalize_resource_limits(
|
||||||
|
runtime_overrides.get("cpu_cores"),
|
||||||
|
runtime_overrides.get("memory_mb"),
|
||||||
|
runtime_overrides.get("storage_gb"),
|
||||||
|
)
|
||||||
|
runtime_overrides.update(normalized_resources)
|
||||||
|
|
||||||
|
db_fields = {"name", "enabled"}
|
||||||
|
for key, value in update_data.items():
|
||||||
|
if key in db_fields:
|
||||||
|
setattr(bot, key, value)
|
||||||
|
|
||||||
|
previous_env_params: Optional[Dict[str, str]] = None
|
||||||
|
next_env_params: Optional[Dict[str, str]] = None
|
||||||
|
if env_params is not None or normalized_system_timezone is not None:
|
||||||
|
previous_env_params = self._resolve_bot_env_params(bot_id)
|
||||||
|
next_env_params = dict(previous_env_params)
|
||||||
|
if env_params is not None:
|
||||||
|
next_env_params = self._normalize_env_params(env_params)
|
||||||
|
if normalized_system_timezone is not None:
|
||||||
|
next_env_params["TZ"] = normalized_system_timezone
|
||||||
|
|
||||||
|
global_delivery_override: Optional[Dict[str, Any]] = None
|
||||||
|
if "send_progress" in runtime_overrides or "send_tool_hints" in runtime_overrides:
|
||||||
|
global_delivery_override = {}
|
||||||
|
if "send_progress" in runtime_overrides:
|
||||||
|
global_delivery_override["sendProgress"] = bool(runtime_overrides.get("send_progress"))
|
||||||
|
if "send_tool_hints" in runtime_overrides:
|
||||||
|
global_delivery_override["sendToolHints"] = bool(runtime_overrides.get("send_tool_hints"))
|
||||||
|
|
||||||
|
self._sync_bot_workspace_via_provider(
|
||||||
|
session,
|
||||||
|
bot,
|
||||||
|
target_override=next_target,
|
||||||
|
runtime_overrides=runtime_overrides if runtime_overrides else None,
|
||||||
|
global_delivery_override=global_delivery_override,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
if next_env_params is not None:
|
||||||
|
self._write_env_store(bot_id, next_env_params)
|
||||||
|
if next_target is not None:
|
||||||
|
self._apply_provider_target_to_bot(bot, next_target)
|
||||||
|
session.add(bot)
|
||||||
|
session.commit()
|
||||||
|
except Exception:
|
||||||
|
session.rollback()
|
||||||
|
if previous_env_params is not None:
|
||||||
|
self._write_env_store(bot_id, previous_env_params)
|
||||||
|
raise
|
||||||
|
|
||||||
|
session.refresh(bot)
|
||||||
|
self._invalidate_bot_detail_cache(bot_id)
|
||||||
|
return self._serialize_bot(bot)
|
||||||
|
|
||||||
|
async def start_bot(self, *, app_state: Any, session: Session, bot_id: str) -> Dict[str, Any]:
|
||||||
|
bot = self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
return await self._runtime_service.start_bot(app_state=app_state, session=session, bot=bot)
|
||||||
|
|
||||||
|
def stop_bot(self, *, app_state: Any, session: Session, bot_id: str) -> Dict[str, Any]:
|
||||||
|
bot = self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
return self._runtime_service.stop_bot(app_state=app_state, session=session, bot=bot)
|
||||||
|
|
||||||
|
def enable_bot(self, *, session: Session, bot_id: str) -> Dict[str, Any]:
|
||||||
|
bot = self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
bot.enabled = True
|
||||||
|
session.add(bot)
|
||||||
|
self._record_activity_event(session, bot_id, "bot_enabled", channel="system", detail=f"Bot {bot_id} enabled")
|
||||||
|
session.commit()
|
||||||
|
self._invalidate_bot_detail_cache(bot_id)
|
||||||
|
return {"status": "enabled", "enabled": True}
|
||||||
|
|
||||||
|
def disable_bot(self, *, app_state: Any, session: Session, bot_id: str) -> Dict[str, Any]:
|
||||||
|
bot = self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
self._set_inactive(app_state=app_state, session=session, bot=bot, activity_type="bot_disabled", detail="disabled")
|
||||||
|
return {"status": "disabled", "enabled": False}
|
||||||
|
|
||||||
|
def deactivate_bot(self, *, app_state: Any, session: Session, bot_id: str) -> Dict[str, Any]:
|
||||||
|
bot = self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
self._set_inactive(
|
||||||
|
app_state=app_state,
|
||||||
|
session=session,
|
||||||
|
bot=bot,
|
||||||
|
activity_type="bot_deactivated",
|
||||||
|
detail="deactivated",
|
||||||
|
)
|
||||||
|
return {"status": "deactivated"}
|
||||||
|
|
||||||
|
def delete_bot(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
app_state: Any,
|
||||||
|
session: Session,
|
||||||
|
bot_id: str,
|
||||||
|
delete_workspace: bool = True,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
bot = self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
target = self._resolve_bot_provider_target(bot)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._runtime_service.stop_bot(app_state=app_state, session=session, bot=bot)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
workspace_deleted = not bool(delete_workspace)
|
||||||
|
if delete_workspace:
|
||||||
|
if target.transport_kind == "edge":
|
||||||
|
try:
|
||||||
|
workspace_root = str(self._node_metadata(target.node_id).get("workspace_root") or "").strip() or None
|
||||||
|
purge_result = self._resolve_edge_client(target).purge_workspace(
|
||||||
|
bot_id=bot_id,
|
||||||
|
workspace_root=workspace_root,
|
||||||
|
)
|
||||||
|
workspace_deleted = str(purge_result.get("status") or "").strip().lower() in {"deleted", "not_found"}
|
||||||
|
except Exception as exc:
|
||||||
|
self._log_edge_failure(
|
||||||
|
self._logger,
|
||||||
|
key=f"bot-delete-workspace:{bot_id}",
|
||||||
|
exc=exc,
|
||||||
|
message=f"Failed to purge edge workspace for bot_id={bot_id}",
|
||||||
|
)
|
||||||
|
workspace_deleted = False
|
||||||
|
|
||||||
|
workspace_root = os.path.join(BOTS_WORKSPACE_ROOT, bot_id)
|
||||||
|
if os.path.isdir(workspace_root):
|
||||||
|
shutil.rmtree(workspace_root, ignore_errors=True)
|
||||||
|
workspace_deleted = True
|
||||||
|
|
||||||
|
messages = session.exec(select(BotMessage).where(BotMessage.bot_id == bot_id)).all()
|
||||||
|
for row in messages:
|
||||||
|
session.delete(row)
|
||||||
|
topic_items = session.exec(select(TopicItem).where(TopicItem.bot_id == bot_id)).all()
|
||||||
|
for row in topic_items:
|
||||||
|
session.delete(row)
|
||||||
|
topics = session.exec(select(TopicTopic).where(TopicTopic.bot_id == bot_id)).all()
|
||||||
|
for row in topics:
|
||||||
|
session.delete(row)
|
||||||
|
usage_rows = session.exec(select(BotRequestUsage).where(BotRequestUsage.bot_id == bot_id)).all()
|
||||||
|
for row in usage_rows:
|
||||||
|
session.delete(row)
|
||||||
|
activity_rows = session.exec(select(BotActivityEvent).where(BotActivityEvent.bot_id == bot_id)).all()
|
||||||
|
for row in activity_rows:
|
||||||
|
session.delete(row)
|
||||||
|
skill_install_rows = session.exec(select(BotSkillInstall).where(BotSkillInstall.bot_id == bot_id)).all()
|
||||||
|
for row in skill_install_rows:
|
||||||
|
session.delete(row)
|
||||||
|
|
||||||
|
session.delete(bot)
|
||||||
|
session.commit()
|
||||||
|
self._clear_provider_target_override(bot_id)
|
||||||
|
self._invalidate_bot_detail_cache(bot_id)
|
||||||
|
self._invalidate_bot_messages_cache(bot_id)
|
||||||
|
return {"status": "deleted", "workspace_deleted": workspace_deleted}
|
||||||
|
|
||||||
|
async def deploy_bot(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
app_state: Any,
|
||||||
|
session: Session,
|
||||||
|
bot_id: str,
|
||||||
|
node_id: str,
|
||||||
|
runtime_kind: Optional[str] = None,
|
||||||
|
image_tag: Optional[str] = None,
|
||||||
|
auto_start: bool = False,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
bot = self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
|
||||||
|
actual_status = self._refresh_bot_runtime_status(app_state, bot)
|
||||||
|
session.add(bot)
|
||||||
|
session.commit()
|
||||||
|
if actual_status == "RUNNING":
|
||||||
|
raise HTTPException(status_code=409, detail="Stop the bot before deploy or migrate")
|
||||||
|
|
||||||
|
current_target = self._resolve_bot_provider_target(bot)
|
||||||
|
next_target_base = self._provider_target_from_node(node_id)
|
||||||
|
if next_target_base is None:
|
||||||
|
raise HTTPException(status_code=400, detail=f"Managed node not found: {node_id}")
|
||||||
|
next_target = normalize_provider_target(
|
||||||
|
{
|
||||||
|
"node_id": node_id,
|
||||||
|
"runtime_kind": runtime_kind,
|
||||||
|
},
|
||||||
|
fallback=next_target_base,
|
||||||
|
)
|
||||||
|
self._ensure_provider_target_supported(next_target)
|
||||||
|
|
||||||
|
existing_image_tag = str(bot.image_tag or "").strip()
|
||||||
|
requested_image_tag = str(image_tag or "").strip()
|
||||||
|
if next_target.runtime_kind == "docker":
|
||||||
|
requested_image_tag = requested_image_tag or existing_image_tag
|
||||||
|
image_changed = requested_image_tag != str(bot.image_tag or "").strip()
|
||||||
|
target_changed = next_target.key != current_target.key
|
||||||
|
if not image_changed and not target_changed:
|
||||||
|
raise HTTPException(status_code=400, detail="No deploy changes detected")
|
||||||
|
|
||||||
|
if next_target.runtime_kind == "docker":
|
||||||
|
self._require_ready_image(
|
||||||
|
session,
|
||||||
|
requested_image_tag,
|
||||||
|
require_local_image=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
self._sync_bot_workspace_via_provider(
|
||||||
|
session,
|
||||||
|
bot,
|
||||||
|
target_override=next_target,
|
||||||
|
runtime_overrides=provider_target_to_dict(next_target),
|
||||||
|
)
|
||||||
|
|
||||||
|
previous_image_tag = str(bot.image_tag or "").strip()
|
||||||
|
bot.image_tag = requested_image_tag
|
||||||
|
self._apply_provider_target_to_bot(bot, next_target)
|
||||||
|
bot.updated_at = datetime.utcnow()
|
||||||
|
session.add(bot)
|
||||||
|
self._record_activity_event(
|
||||||
|
session,
|
||||||
|
bot_id,
|
||||||
|
"bot_deployed",
|
||||||
|
channel="system",
|
||||||
|
detail=(
|
||||||
|
f"Bot {bot_id} deployed to {self._node_display_name(next_target.node_id)}"
|
||||||
|
if target_changed
|
||||||
|
else f"Bot {bot_id} redeployed with image {requested_image_tag}"
|
||||||
|
),
|
||||||
|
metadata={
|
||||||
|
"previous_target": self._serialize_provider_target_summary(current_target),
|
||||||
|
"next_target": self._serialize_provider_target_summary(next_target),
|
||||||
|
"previous_image_tag": previous_image_tag,
|
||||||
|
"image_tag": requested_image_tag,
|
||||||
|
"auto_start": bool(auto_start),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
session.commit()
|
||||||
|
session.refresh(bot)
|
||||||
|
|
||||||
|
started = False
|
||||||
|
if bool(auto_start):
|
||||||
|
await self._runtime_service.start_bot(app_state=app_state, session=session, bot=bot)
|
||||||
|
session.refresh(bot)
|
||||||
|
started = True
|
||||||
|
|
||||||
|
self._invalidate_bot_detail_cache(bot_id)
|
||||||
|
return {
|
||||||
|
"status": "deployed",
|
||||||
|
"bot": self._serialize_bot(bot),
|
||||||
|
"started": started,
|
||||||
|
"image_tag": requested_image_tag,
|
||||||
|
"previous_image_tag": previous_image_tag,
|
||||||
|
"previous_target": self._serialize_provider_target_summary(current_target),
|
||||||
|
"next_target": self._serialize_provider_target_summary(next_target),
|
||||||
|
}
|
||||||
|
|
||||||
|
def _set_inactive(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
app_state: Any,
|
||||||
|
session: Session,
|
||||||
|
bot: BotInstance,
|
||||||
|
activity_type: str,
|
||||||
|
detail: str,
|
||||||
|
) -> None:
|
||||||
|
bot_id = str(bot.id or "").strip()
|
||||||
|
try:
|
||||||
|
self._runtime_service.stop_bot(app_state=app_state, session=session, bot=bot)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
bot.enabled = False
|
||||||
bot.docker_status = "STOPPED"
|
bot.docker_status = "STOPPED"
|
||||||
|
if str(bot.current_state or "").upper() not in {"ERROR"}:
|
||||||
|
bot.current_state = "IDLE"
|
||||||
session.add(bot)
|
session.add(bot)
|
||||||
|
self._record_activity_event(session, bot_id, activity_type, channel="system", detail=f"Bot {bot_id} {detail}")
|
||||||
session.commit()
|
session.commit()
|
||||||
raise RuntimeError(f"Failed to start container with image {bot.image_tag}")
|
self._invalidate_bot_detail_cache(bot_id)
|
||||||
|
|
||||||
actual_status = docker_manager.get_bot_status(bot_id)
|
|
||||||
bot.docker_status = actual_status
|
|
||||||
if actual_status != "RUNNING":
|
|
||||||
session.add(bot)
|
|
||||||
session.commit()
|
|
||||||
_invalidate_bot_detail_cache(bot_id)
|
|
||||||
raise RuntimeError("Bot container failed shortly after startup. Check bot logs/config.")
|
|
||||||
|
|
||||||
asyncio.create_task(record_agent_loop_ready_warning(bot_id))
|
|
||||||
session.add(bot)
|
|
||||||
record_activity_event(session, bot_id, "bot_started", channel="system", detail=f"Container started for {bot_id}")
|
|
||||||
session.commit()
|
|
||||||
_invalidate_bot_detail_cache(bot_id)
|
|
||||||
return {"status": "started"}
|
|
||||||
|
|
||||||
|
|
||||||
def stop_bot_instance(session: Session, bot_id: str) -> Dict[str, Any]:
|
|
||||||
bot = _get_bot_or_404(session, bot_id)
|
|
||||||
if not bool(getattr(bot, "enabled", True)):
|
|
||||||
raise PermissionError("Bot is disabled. Enable it first.")
|
|
||||||
|
|
||||||
docker_manager.stop_bot(bot_id)
|
|
||||||
bot.docker_status = "STOPPED"
|
|
||||||
session.add(bot)
|
|
||||||
record_activity_event(session, bot_id, "bot_stopped", channel="system", detail=f"Container stopped for {bot_id}")
|
|
||||||
session.commit()
|
|
||||||
_invalidate_bot_detail_cache(bot_id)
|
|
||||||
return {"status": "stopped"}
|
|
||||||
|
|
||||||
|
|
||||||
def enable_bot_instance(session: Session, bot_id: str) -> Dict[str, Any]:
|
|
||||||
bot = _get_bot_or_404(session, bot_id)
|
|
||||||
bot.enabled = True
|
|
||||||
session.add(bot)
|
|
||||||
record_activity_event(session, bot_id, "bot_enabled", channel="system", detail=f"Bot {bot_id} enabled")
|
|
||||||
session.commit()
|
|
||||||
_invalidate_bot_detail_cache(bot_id)
|
|
||||||
return {"status": "enabled", "enabled": True}
|
|
||||||
|
|
||||||
|
|
||||||
def disable_bot_instance(session: Session, bot_id: str) -> Dict[str, Any]:
|
|
||||||
bot = _get_bot_or_404(session, bot_id)
|
|
||||||
docker_manager.stop_bot(bot_id)
|
|
||||||
bot.enabled = False
|
|
||||||
bot.docker_status = "STOPPED"
|
|
||||||
if str(bot.current_state or "").upper() not in {"ERROR"}:
|
|
||||||
bot.current_state = "IDLE"
|
|
||||||
session.add(bot)
|
|
||||||
record_activity_event(session, bot_id, "bot_disabled", channel="system", detail=f"Bot {bot_id} disabled")
|
|
||||||
session.commit()
|
|
||||||
_invalidate_bot_detail_cache(bot_id)
|
|
||||||
return {"status": "disabled", "enabled": False}
|
|
||||||
|
|
||||||
|
|
||||||
def deactivate_bot_instance(session: Session, bot_id: str) -> Dict[str, Any]:
|
|
||||||
bot = _get_bot_or_404(session, bot_id)
|
|
||||||
docker_manager.stop_bot(bot_id)
|
|
||||||
bot.enabled = False
|
|
||||||
bot.docker_status = "STOPPED"
|
|
||||||
if str(bot.current_state or "").upper() not in {"ERROR"}:
|
|
||||||
bot.current_state = "IDLE"
|
|
||||||
session.add(bot)
|
|
||||||
record_activity_event(session, bot_id, "bot_deactivated", channel="system", detail=f"Bot {bot_id} deactivated")
|
|
||||||
session.commit()
|
|
||||||
_invalidate_bot_detail_cache(bot_id)
|
|
||||||
return {"status": "deactivated"}
|
|
||||||
|
|
||||||
|
|
||||||
def delete_bot_instance(session: Session, bot_id: str, delete_workspace: bool = True) -> Dict[str, Any]:
|
|
||||||
bot = _get_bot_or_404(session, bot_id)
|
|
||||||
docker_manager.stop_bot(bot_id, remove=True)
|
|
||||||
|
|
||||||
messages = session.exec(select(BotMessage).where(BotMessage.bot_id == bot_id)).all()
|
|
||||||
for row in messages:
|
|
||||||
session.delete(row)
|
|
||||||
topic_items = session.exec(select(TopicItem).where(TopicItem.bot_id == bot_id)).all()
|
|
||||||
for row in topic_items:
|
|
||||||
session.delete(row)
|
|
||||||
topics = session.exec(select(TopicTopic).where(TopicTopic.bot_id == bot_id)).all()
|
|
||||||
for row in topics:
|
|
||||||
session.delete(row)
|
|
||||||
usage_rows = session.exec(select(BotRequestUsage).where(BotRequestUsage.bot_id == bot_id)).all()
|
|
||||||
for row in usage_rows:
|
|
||||||
session.delete(row)
|
|
||||||
activity_rows = session.exec(select(BotActivityEvent).where(BotActivityEvent.bot_id == bot_id)).all()
|
|
||||||
for row in activity_rows:
|
|
||||||
session.delete(row)
|
|
||||||
skill_install_rows = session.exec(select(BotSkillInstall).where(BotSkillInstall.bot_id == bot_id)).all()
|
|
||||||
for row in skill_install_rows:
|
|
||||||
session.delete(row)
|
|
||||||
|
|
||||||
session.delete(bot)
|
|
||||||
session.commit()
|
|
||||||
|
|
||||||
if delete_workspace:
|
|
||||||
workspace_root = os.path.join(BOTS_WORKSPACE_ROOT, bot_id)
|
|
||||||
if os.path.isdir(workspace_root):
|
|
||||||
shutil.rmtree(workspace_root, ignore_errors=True)
|
|
||||||
|
|
||||||
_invalidate_bot_detail_cache(bot_id)
|
|
||||||
_invalidate_bot_messages_cache(bot_id)
|
|
||||||
return {"status": "deleted", "workspace_deleted": bool(delete_workspace)}
|
|
||||||
|
|
|
||||||
|
|
@ -1,339 +0,0 @@
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
from typing import Any, Dict, List, Optional
|
|
||||||
|
|
||||||
from fastapi import HTTPException
|
|
||||||
from sqlmodel import Session, select
|
|
||||||
|
|
||||||
from core.cache import cache
|
|
||||||
from core.docker_instance import docker_manager
|
|
||||||
from core.settings import BOTS_WORKSPACE_ROOT
|
|
||||||
from models.bot import BotInstance, NanobotImage
|
|
||||||
from schemas.bot import BotCreateRequest, BotUpdateRequest
|
|
||||||
from services.bot_service import (
|
|
||||||
normalize_initial_bot_channels,
|
|
||||||
normalize_bot_system_timezone,
|
|
||||||
resolve_bot_runtime_env_params,
|
|
||||||
serialize_bot_detail,
|
|
||||||
serialize_bot_list_entry,
|
|
||||||
sync_bot_workspace_channels,
|
|
||||||
)
|
|
||||||
from services.bot_storage_service import (
|
|
||||||
normalize_bot_env_params,
|
|
||||||
normalize_bot_resource_limits,
|
|
||||||
write_bot_env_params,
|
|
||||||
write_bot_resource_limits,
|
|
||||||
)
|
|
||||||
from services.cache_service import _cache_key_bot_detail, _cache_key_bots_list, _invalidate_bot_detail_cache
|
|
||||||
from services.platform_activity_service import record_activity_event
|
|
||||||
|
|
||||||
BOT_ID_PATTERN = re.compile(r"^[A-Za-z0-9_]+$")
|
|
||||||
MANAGED_WORKSPACE_FILENAMES = ("AGENTS.md", "SOUL.md", "USER.md", "TOOLS.md", "IDENTITY.md")
|
|
||||||
|
|
||||||
|
|
||||||
def _managed_bot_file_paths(bot_id: str) -> Dict[str, str]:
|
|
||||||
bot_root = os.path.join(BOTS_WORKSPACE_ROOT, bot_id, ".nanobot")
|
|
||||||
workspace_root = os.path.join(bot_root, "workspace")
|
|
||||||
paths = {
|
|
||||||
"config": os.path.join(bot_root, "config.json"),
|
|
||||||
"env": os.path.join(bot_root, "env.json"),
|
|
||||||
"resources": os.path.join(bot_root, "resources.json"),
|
|
||||||
}
|
|
||||||
for filename in MANAGED_WORKSPACE_FILENAMES:
|
|
||||||
paths[f"workspace:{filename}"] = os.path.join(workspace_root, filename)
|
|
||||||
return paths
|
|
||||||
|
|
||||||
|
|
||||||
def _snapshot_managed_bot_files(bot_id: str) -> Dict[str, Optional[bytes]]:
|
|
||||||
snapshot: Dict[str, Optional[bytes]] = {}
|
|
||||||
for key, path in _managed_bot_file_paths(bot_id).items():
|
|
||||||
if os.path.isfile(path):
|
|
||||||
with open(path, "rb") as file:
|
|
||||||
snapshot[key] = file.read()
|
|
||||||
else:
|
|
||||||
snapshot[key] = None
|
|
||||||
return snapshot
|
|
||||||
|
|
||||||
|
|
||||||
def _restore_managed_bot_files(bot_id: str, snapshot: Dict[str, Optional[bytes]]) -> None:
|
|
||||||
for key, path in _managed_bot_file_paths(bot_id).items():
|
|
||||||
payload = snapshot.get(key)
|
|
||||||
if payload is None:
|
|
||||||
if os.path.exists(path):
|
|
||||||
os.remove(path)
|
|
||||||
continue
|
|
||||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
|
||||||
tmp_path = f"{path}.tmp"
|
|
||||||
with open(tmp_path, "wb") as file:
|
|
||||||
file.write(payload)
|
|
||||||
os.replace(tmp_path, path)
|
|
||||||
|
|
||||||
|
|
||||||
def _cleanup_bot_workspace_root(bot_id: str) -> None:
|
|
||||||
bot_root = os.path.join(BOTS_WORKSPACE_ROOT, bot_id)
|
|
||||||
if os.path.isdir(bot_root):
|
|
||||||
shutil.rmtree(bot_root, ignore_errors=True)
|
|
||||||
|
|
||||||
|
|
||||||
def _require_runtime_text(raw: Any, *, field: str) -> str:
|
|
||||||
value = str(raw if raw is not None else "").strip()
|
|
||||||
if not value:
|
|
||||||
raise HTTPException(status_code=400, detail=f"{field} is required")
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def create_bot_record(session: Session, *, payload: BotCreateRequest) -> Dict[str, Any]:
|
|
||||||
normalized_bot_id = str(payload.id or "").strip()
|
|
||||||
if not normalized_bot_id:
|
|
||||||
raise HTTPException(status_code=400, detail="Bot ID is required")
|
|
||||||
if not BOT_ID_PATTERN.fullmatch(normalized_bot_id):
|
|
||||||
raise HTTPException(status_code=400, detail="Bot ID can only contain letters, numbers, and underscores")
|
|
||||||
if session.get(BotInstance, normalized_bot_id):
|
|
||||||
raise HTTPException(status_code=409, detail=f"Bot ID already exists: {normalized_bot_id}")
|
|
||||||
|
|
||||||
image_row = session.get(NanobotImage, payload.image_tag)
|
|
||||||
if not image_row:
|
|
||||||
raise HTTPException(status_code=400, detail=f"Image not registered in DB: {payload.image_tag}")
|
|
||||||
if image_row.status != "READY":
|
|
||||||
raise HTTPException(status_code=400, detail=f"Image status is not READY: {payload.image_tag} ({image_row.status})")
|
|
||||||
if not docker_manager.has_image(payload.image_tag):
|
|
||||||
raise HTTPException(status_code=400, detail=f"Docker image not found locally: {payload.image_tag}")
|
|
||||||
|
|
||||||
normalized_env_params = normalize_bot_env_params(payload.env_params)
|
|
||||||
try:
|
|
||||||
normalized_env_params["TZ"] = normalize_bot_system_timezone(payload.system_timezone)
|
|
||||||
except ValueError as exc:
|
|
||||||
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
||||||
|
|
||||||
llm_provider = _require_runtime_text(payload.llm_provider, field="llm_provider")
|
|
||||||
llm_model = _require_runtime_text(payload.llm_model, field="llm_model")
|
|
||||||
api_key = _require_runtime_text(payload.api_key, field="api_key")
|
|
||||||
api_base = _require_runtime_text(payload.api_base, field="api_base")
|
|
||||||
|
|
||||||
bot = BotInstance(
|
|
||||||
id=normalized_bot_id,
|
|
||||||
name=payload.name,
|
|
||||||
enabled=bool(payload.enabled) if payload.enabled is not None else True,
|
|
||||||
access_password=str(payload.access_password or ""),
|
|
||||||
image_tag=payload.image_tag,
|
|
||||||
workspace_dir=os.path.join(BOTS_WORKSPACE_ROOT, normalized_bot_id),
|
|
||||||
)
|
|
||||||
resource_limits = normalize_bot_resource_limits(payload.cpu_cores, payload.memory_mb, payload.storage_gb)
|
|
||||||
try:
|
|
||||||
session.add(bot)
|
|
||||||
session.flush()
|
|
||||||
write_bot_env_params(normalized_bot_id, normalized_env_params)
|
|
||||||
write_bot_resource_limits(
|
|
||||||
normalized_bot_id,
|
|
||||||
resource_limits["cpu_cores"],
|
|
||||||
resource_limits["memory_mb"],
|
|
||||||
resource_limits["storage_gb"],
|
|
||||||
)
|
|
||||||
sync_bot_workspace_channels(
|
|
||||||
session,
|
|
||||||
normalized_bot_id,
|
|
||||||
channels_override=normalize_initial_bot_channels(normalized_bot_id, payload.channels),
|
|
||||||
global_delivery_override={
|
|
||||||
"sendProgress": bool(payload.send_progress) if payload.send_progress is not None else False,
|
|
||||||
"sendToolHints": bool(payload.send_tool_hints) if payload.send_tool_hints is not None else False,
|
|
||||||
},
|
|
||||||
runtime_overrides={
|
|
||||||
"llm_provider": llm_provider,
|
|
||||||
"llm_model": llm_model,
|
|
||||||
"api_key": api_key,
|
|
||||||
"api_base": api_base,
|
|
||||||
"temperature": payload.temperature,
|
|
||||||
"top_p": payload.top_p,
|
|
||||||
"max_tokens": payload.max_tokens,
|
|
||||||
"cpu_cores": resource_limits["cpu_cores"],
|
|
||||||
"memory_mb": resource_limits["memory_mb"],
|
|
||||||
"storage_gb": resource_limits["storage_gb"],
|
|
||||||
"soul_md": payload.soul_md,
|
|
||||||
"agents_md": payload.agents_md,
|
|
||||||
"user_md": payload.user_md,
|
|
||||||
"tools_md": payload.tools_md,
|
|
||||||
"identity_md": payload.identity_md,
|
|
||||||
"send_progress": bool(payload.send_progress) if payload.send_progress is not None else False,
|
|
||||||
"send_tool_hints": bool(payload.send_tool_hints) if payload.send_tool_hints is not None else False,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
record_activity_event(
|
|
||||||
session,
|
|
||||||
normalized_bot_id,
|
|
||||||
"bot_created",
|
|
||||||
channel="system",
|
|
||||||
detail=f"Bot {normalized_bot_id} created",
|
|
||||||
metadata={"image_tag": payload.image_tag},
|
|
||||||
)
|
|
||||||
session.commit()
|
|
||||||
session.refresh(bot)
|
|
||||||
except Exception:
|
|
||||||
session.rollback()
|
|
||||||
_cleanup_bot_workspace_root(normalized_bot_id)
|
|
||||||
raise
|
|
||||||
_invalidate_bot_detail_cache(normalized_bot_id)
|
|
||||||
return serialize_bot_detail(bot)
|
|
||||||
|
|
||||||
|
|
||||||
def list_bots_with_cache(session: Session) -> List[Dict[str, Any]]:
|
|
||||||
cached = cache.get_json(_cache_key_bots_list())
|
|
||||||
if isinstance(cached, list):
|
|
||||||
return cached
|
|
||||||
bots = session.exec(
|
|
||||||
select(BotInstance).order_by(BotInstance.created_at.desc(), BotInstance.id.asc())
|
|
||||||
).all()
|
|
||||||
dirty = False
|
|
||||||
for bot in bots:
|
|
||||||
actual_status = docker_manager.get_bot_status(bot.id)
|
|
||||||
if bot.docker_status != actual_status:
|
|
||||||
bot.docker_status = actual_status
|
|
||||||
if actual_status != "RUNNING" and str(bot.current_state or "").upper() not in {"ERROR"}:
|
|
||||||
bot.current_state = "IDLE"
|
|
||||||
session.add(bot)
|
|
||||||
dirty = True
|
|
||||||
if dirty:
|
|
||||||
session.commit()
|
|
||||||
for bot in bots:
|
|
||||||
session.refresh(bot)
|
|
||||||
rows = [serialize_bot_list_entry(bot) for bot in bots]
|
|
||||||
cache.set_json(_cache_key_bots_list(), rows, ttl=30)
|
|
||||||
return rows
|
|
||||||
|
|
||||||
|
|
||||||
def get_bot_detail_cached(session: Session, *, bot_id: str) -> Dict[str, Any]:
|
|
||||||
cached = cache.get_json(_cache_key_bot_detail(bot_id))
|
|
||||||
if isinstance(cached, dict):
|
|
||||||
return cached
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
row = serialize_bot_detail(bot)
|
|
||||||
cache.set_json(_cache_key_bot_detail(bot_id), row, ttl=30)
|
|
||||||
return row
|
|
||||||
|
|
||||||
|
|
||||||
def authenticate_bot_page_access(session: Session, *, bot_id: str, password: str) -> Dict[str, Any]:
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
|
|
||||||
configured = str(bot.access_password or "").strip()
|
|
||||||
if not configured:
|
|
||||||
return {"ok": True, "enabled": False, "bot_id": bot_id}
|
|
||||||
|
|
||||||
candidate = str(password or "").strip()
|
|
||||||
if not candidate:
|
|
||||||
raise HTTPException(status_code=401, detail="Bot access password required")
|
|
||||||
if candidate != configured:
|
|
||||||
raise HTTPException(status_code=401, detail="Invalid bot access password")
|
|
||||||
return {"ok": True, "enabled": True, "bot_id": bot_id}
|
|
||||||
|
|
||||||
|
|
||||||
def update_bot_record(session: Session, *, bot_id: str, payload: BotUpdateRequest) -> Dict[str, Any]:
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
managed_file_snapshot = _snapshot_managed_bot_files(bot_id)
|
|
||||||
|
|
||||||
update_data = payload.model_dump(exclude_unset=True)
|
|
||||||
if "image_tag" in update_data and update_data["image_tag"]:
|
|
||||||
image_tag = str(update_data["image_tag"]).strip()
|
|
||||||
image_row = session.get(NanobotImage, image_tag)
|
|
||||||
if not image_row:
|
|
||||||
raise HTTPException(status_code=400, detail=f"Image not registered in DB: {image_tag}")
|
|
||||||
if image_row.status != "READY":
|
|
||||||
raise HTTPException(status_code=400, detail=f"Image status is not READY: {image_tag} ({image_row.status})")
|
|
||||||
if not docker_manager.has_image(image_tag):
|
|
||||||
raise HTTPException(status_code=400, detail=f"Docker image not found locally: {image_tag}")
|
|
||||||
|
|
||||||
env_params = update_data.pop("env_params", None) if isinstance(update_data, dict) else None
|
|
||||||
system_timezone = update_data.pop("system_timezone", None) if isinstance(update_data, dict) else None
|
|
||||||
normalized_system_timezone: Optional[str] = None
|
|
||||||
if system_timezone is not None:
|
|
||||||
try:
|
|
||||||
normalized_system_timezone = normalize_bot_system_timezone(system_timezone)
|
|
||||||
except ValueError as exc:
|
|
||||||
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
||||||
|
|
||||||
runtime_fields = {
|
|
||||||
"llm_provider",
|
|
||||||
"llm_model",
|
|
||||||
"api_key",
|
|
||||||
"api_base",
|
|
||||||
"temperature",
|
|
||||||
"top_p",
|
|
||||||
"max_tokens",
|
|
||||||
"cpu_cores",
|
|
||||||
"memory_mb",
|
|
||||||
"storage_gb",
|
|
||||||
"soul_md",
|
|
||||||
"agents_md",
|
|
||||||
"user_md",
|
|
||||||
"tools_md",
|
|
||||||
"identity_md",
|
|
||||||
"send_progress",
|
|
||||||
"send_tool_hints",
|
|
||||||
}
|
|
||||||
runtime_overrides: Dict[str, Any] = {}
|
|
||||||
update_data.pop("tools_config", None) if isinstance(update_data, dict) else None
|
|
||||||
for field in runtime_fields:
|
|
||||||
if field in update_data:
|
|
||||||
runtime_overrides[field] = update_data.pop(field)
|
|
||||||
|
|
||||||
for text_field in ("llm_provider", "llm_model", "api_key", "api_base"):
|
|
||||||
if text_field in runtime_overrides:
|
|
||||||
runtime_overrides[text_field] = _require_runtime_text(
|
|
||||||
runtime_overrides.get(text_field),
|
|
||||||
field=text_field,
|
|
||||||
)
|
|
||||||
if {"cpu_cores", "memory_mb", "storage_gb"} & set(runtime_overrides.keys()):
|
|
||||||
runtime_overrides.update(
|
|
||||||
normalize_bot_resource_limits(
|
|
||||||
runtime_overrides.get("cpu_cores"),
|
|
||||||
runtime_overrides.get("memory_mb"),
|
|
||||||
runtime_overrides.get("storage_gb"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
for key, value in update_data.items():
|
|
||||||
if key in {"name", "image_tag", "access_password", "enabled"}:
|
|
||||||
setattr(bot, key, value)
|
|
||||||
try:
|
|
||||||
session.add(bot)
|
|
||||||
session.flush()
|
|
||||||
|
|
||||||
if env_params is not None or normalized_system_timezone is not None:
|
|
||||||
next_env_params = resolve_bot_runtime_env_params(bot_id)
|
|
||||||
if env_params is not None:
|
|
||||||
next_env_params = normalize_bot_env_params(env_params)
|
|
||||||
if normalized_system_timezone is not None:
|
|
||||||
next_env_params["TZ"] = normalized_system_timezone
|
|
||||||
write_bot_env_params(bot_id, next_env_params)
|
|
||||||
|
|
||||||
global_delivery_override: Optional[Dict[str, Any]] = None
|
|
||||||
if "send_progress" in runtime_overrides or "send_tool_hints" in runtime_overrides:
|
|
||||||
global_delivery_override = {}
|
|
||||||
if "send_progress" in runtime_overrides:
|
|
||||||
global_delivery_override["sendProgress"] = bool(runtime_overrides.get("send_progress"))
|
|
||||||
if "send_tool_hints" in runtime_overrides:
|
|
||||||
global_delivery_override["sendToolHints"] = bool(runtime_overrides.get("send_tool_hints"))
|
|
||||||
|
|
||||||
sync_bot_workspace_channels(
|
|
||||||
session,
|
|
||||||
bot_id,
|
|
||||||
runtime_overrides=runtime_overrides if runtime_overrides else None,
|
|
||||||
global_delivery_override=global_delivery_override,
|
|
||||||
)
|
|
||||||
session.commit()
|
|
||||||
session.refresh(bot)
|
|
||||||
except Exception:
|
|
||||||
session.rollback()
|
|
||||||
_restore_managed_bot_files(bot_id, managed_file_snapshot)
|
|
||||||
refreshed_bot = session.get(BotInstance, bot_id)
|
|
||||||
if refreshed_bot:
|
|
||||||
session.refresh(refreshed_bot)
|
|
||||||
bot = refreshed_bot
|
|
||||||
raise
|
|
||||||
_invalidate_bot_detail_cache(bot_id)
|
|
||||||
return serialize_bot_detail(bot)
|
|
||||||
|
|
@ -1,71 +0,0 @@
|
||||||
import re
|
|
||||||
from typing import Any, Dict
|
|
||||||
|
|
||||||
_MCP_SERVER_NAME_RE = re.compile(r"[A-Za-z0-9][A-Za-z0-9._-]{0,63}")
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_mcp_servers(raw: Any) -> Dict[str, Dict[str, Any]]:
|
|
||||||
if not isinstance(raw, dict):
|
|
||||||
return {}
|
|
||||||
rows: Dict[str, Dict[str, Any]] = {}
|
|
||||||
for server_name, server_cfg in raw.items():
|
|
||||||
name = str(server_name or "").strip()
|
|
||||||
if not name or not _MCP_SERVER_NAME_RE.fullmatch(name):
|
|
||||||
continue
|
|
||||||
if not isinstance(server_cfg, dict):
|
|
||||||
continue
|
|
||||||
url = str(server_cfg.get("url") or "").strip()
|
|
||||||
if not url:
|
|
||||||
continue
|
|
||||||
transport_type = str(server_cfg.get("type") or "streamableHttp").strip()
|
|
||||||
if transport_type not in {"streamableHttp", "sse"}:
|
|
||||||
transport_type = "streamableHttp"
|
|
||||||
headers_raw = server_cfg.get("headers")
|
|
||||||
headers: Dict[str, str] = {}
|
|
||||||
if isinstance(headers_raw, dict):
|
|
||||||
for key, value in headers_raw.items():
|
|
||||||
header_key = str(key or "").strip()
|
|
||||||
if not header_key:
|
|
||||||
continue
|
|
||||||
headers[header_key] = str(value or "").strip()
|
|
||||||
timeout_raw = server_cfg.get("toolTimeout", 60)
|
|
||||||
try:
|
|
||||||
timeout = int(timeout_raw)
|
|
||||||
except Exception:
|
|
||||||
timeout = 60
|
|
||||||
rows[name] = {
|
|
||||||
"type": transport_type,
|
|
||||||
"url": url,
|
|
||||||
"headers": headers,
|
|
||||||
"toolTimeout": max(1, min(timeout, 600)),
|
|
||||||
}
|
|
||||||
return rows
|
|
||||||
|
|
||||||
|
|
||||||
def _merge_mcp_servers_preserving_extras(
|
|
||||||
current_raw: Any,
|
|
||||||
normalized: Dict[str, Dict[str, Any]],
|
|
||||||
) -> Dict[str, Dict[str, Any]]:
|
|
||||||
current_map = current_raw if isinstance(current_raw, dict) else {}
|
|
||||||
merged: Dict[str, Dict[str, Any]] = {}
|
|
||||||
for name, normalized_cfg in normalized.items():
|
|
||||||
base = current_map.get(name)
|
|
||||||
base_cfg = dict(base) if isinstance(base, dict) else {}
|
|
||||||
next_cfg = dict(base_cfg)
|
|
||||||
next_cfg.update(normalized_cfg)
|
|
||||||
merged[name] = next_cfg
|
|
||||||
return merged
|
|
||||||
|
|
||||||
|
|
||||||
def _sanitize_mcp_servers_in_config_data(config_data: Dict[str, Any]) -> Dict[str, Dict[str, Any]]:
|
|
||||||
if not isinstance(config_data, dict):
|
|
||||||
return {}
|
|
||||||
tools_cfg = config_data.get("tools")
|
|
||||||
if not isinstance(tools_cfg, dict):
|
|
||||||
tools_cfg = {}
|
|
||||||
current_raw = tools_cfg.get("mcpServers")
|
|
||||||
normalized = _normalize_mcp_servers(current_raw)
|
|
||||||
merged = _merge_mcp_servers_preserving_extras(current_raw, normalized)
|
|
||||||
tools_cfg["mcpServers"] = merged
|
|
||||||
config_data["tools"] = tools_cfg
|
|
||||||
return merged
|
|
||||||
|
|
@ -0,0 +1,246 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, Callable, Dict, Optional
|
||||||
|
|
||||||
|
from fastapi import HTTPException
|
||||||
|
from sqlmodel import Session, select
|
||||||
|
|
||||||
|
from models.bot import BotInstance, BotMessage
|
||||||
|
|
||||||
|
CacheKeyMessages = Callable[[str, int], str]
|
||||||
|
CacheKeyMessagesPage = Callable[[str, int, Optional[int]], str]
|
||||||
|
SerializeMessageRow = Callable[[str, BotMessage], Dict[str, Any]]
|
||||||
|
ResolveLocalDayRange = Callable[[str, Optional[int]], tuple[datetime, datetime]]
|
||||||
|
InvalidateMessagesCache = Callable[[str], None]
|
||||||
|
GetChatPullPageSize = Callable[[], int]
|
||||||
|
|
||||||
|
|
||||||
|
class BotMessageService:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
cache: Any,
|
||||||
|
cache_key_bot_messages: CacheKeyMessages,
|
||||||
|
cache_key_bot_messages_page: CacheKeyMessagesPage,
|
||||||
|
serialize_bot_message_row: SerializeMessageRow,
|
||||||
|
resolve_local_day_range: ResolveLocalDayRange,
|
||||||
|
invalidate_bot_messages_cache: InvalidateMessagesCache,
|
||||||
|
get_chat_pull_page_size: GetChatPullPageSize,
|
||||||
|
) -> None:
|
||||||
|
self._cache = cache
|
||||||
|
self._cache_key_bot_messages = cache_key_bot_messages
|
||||||
|
self._cache_key_bot_messages_page = cache_key_bot_messages_page
|
||||||
|
self._serialize_bot_message_row = serialize_bot_message_row
|
||||||
|
self._resolve_local_day_range = resolve_local_day_range
|
||||||
|
self._invalidate_bot_messages_cache = invalidate_bot_messages_cache
|
||||||
|
self._get_chat_pull_page_size = get_chat_pull_page_size
|
||||||
|
|
||||||
|
def _require_bot(self, *, session: Session, bot_id: str) -> BotInstance:
|
||||||
|
bot = session.get(BotInstance, bot_id)
|
||||||
|
if not bot:
|
||||||
|
raise HTTPException(status_code=404, detail="Bot not found")
|
||||||
|
return bot
|
||||||
|
|
||||||
|
def list_messages(self, *, session: Session, bot_id: str, limit: int = 200) -> list[Dict[str, Any]]:
|
||||||
|
self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
safe_limit = max(1, min(int(limit), 500))
|
||||||
|
cached = self._cache.get_json(self._cache_key_bot_messages(bot_id, safe_limit))
|
||||||
|
if isinstance(cached, list):
|
||||||
|
return cached
|
||||||
|
rows = session.exec(
|
||||||
|
select(BotMessage)
|
||||||
|
.where(BotMessage.bot_id == bot_id)
|
||||||
|
.order_by(BotMessage.created_at.desc(), BotMessage.id.desc())
|
||||||
|
.limit(safe_limit)
|
||||||
|
).all()
|
||||||
|
ordered = list(reversed(rows))
|
||||||
|
payload = [self._serialize_bot_message_row(bot_id, row) for row in ordered]
|
||||||
|
self._cache.set_json(self._cache_key_bot_messages(bot_id, safe_limit), payload, ttl=30)
|
||||||
|
return payload
|
||||||
|
|
||||||
|
def list_messages_page(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
session: Session,
|
||||||
|
bot_id: str,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
before_id: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
configured_limit = self._get_chat_pull_page_size()
|
||||||
|
safe_limit = max(1, min(int(limit if limit is not None else configured_limit), 500))
|
||||||
|
safe_before_id = int(before_id) if isinstance(before_id, int) and before_id > 0 else None
|
||||||
|
cache_key = self._cache_key_bot_messages_page(bot_id, safe_limit, safe_before_id)
|
||||||
|
cached = self._cache.get_json(cache_key)
|
||||||
|
if isinstance(cached, dict) and isinstance(cached.get("items"), list):
|
||||||
|
return cached
|
||||||
|
|
||||||
|
stmt = (
|
||||||
|
select(BotMessage)
|
||||||
|
.where(BotMessage.bot_id == bot_id)
|
||||||
|
.order_by(BotMessage.created_at.desc(), BotMessage.id.desc())
|
||||||
|
.limit(safe_limit + 1)
|
||||||
|
)
|
||||||
|
if safe_before_id is not None:
|
||||||
|
stmt = stmt.where(BotMessage.id < safe_before_id)
|
||||||
|
|
||||||
|
rows = session.exec(stmt).all()
|
||||||
|
has_more = len(rows) > safe_limit
|
||||||
|
if has_more:
|
||||||
|
rows = rows[:safe_limit]
|
||||||
|
ordered = list(reversed(rows))
|
||||||
|
items = [self._serialize_bot_message_row(bot_id, row) for row in ordered]
|
||||||
|
next_before_id = rows[-1].id if rows else None
|
||||||
|
payload = {
|
||||||
|
"items": items,
|
||||||
|
"has_more": bool(has_more),
|
||||||
|
"next_before_id": next_before_id,
|
||||||
|
"limit": safe_limit,
|
||||||
|
}
|
||||||
|
self._cache.set_json(cache_key, payload, ttl=30)
|
||||||
|
return payload
|
||||||
|
|
||||||
|
def list_messages_by_date(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
session: Session,
|
||||||
|
bot_id: str,
|
||||||
|
date: str,
|
||||||
|
tz_offset_minutes: Optional[int] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
utc_start, utc_end = self._resolve_local_day_range(date, tz_offset_minutes)
|
||||||
|
configured_limit = max(60, self._get_chat_pull_page_size())
|
||||||
|
safe_limit = max(12, min(int(limit if limit is not None else configured_limit), 240))
|
||||||
|
before_limit = max(3, min(18, safe_limit // 4))
|
||||||
|
after_limit = max(0, safe_limit - before_limit - 1)
|
||||||
|
|
||||||
|
exact_anchor = session.exec(
|
||||||
|
select(BotMessage)
|
||||||
|
.where(
|
||||||
|
BotMessage.bot_id == bot_id,
|
||||||
|
BotMessage.created_at >= utc_start,
|
||||||
|
BotMessage.created_at < utc_end,
|
||||||
|
)
|
||||||
|
.order_by(BotMessage.created_at.asc(), BotMessage.id.asc())
|
||||||
|
.limit(1)
|
||||||
|
).first()
|
||||||
|
|
||||||
|
anchor = exact_anchor
|
||||||
|
matched_exact_date = exact_anchor is not None
|
||||||
|
if anchor is None:
|
||||||
|
next_row = session.exec(
|
||||||
|
select(BotMessage)
|
||||||
|
.where(BotMessage.bot_id == bot_id, BotMessage.created_at >= utc_end)
|
||||||
|
.order_by(BotMessage.created_at.asc(), BotMessage.id.asc())
|
||||||
|
.limit(1)
|
||||||
|
).first()
|
||||||
|
prev_row = session.exec(
|
||||||
|
select(BotMessage)
|
||||||
|
.where(BotMessage.bot_id == bot_id, BotMessage.created_at < utc_start)
|
||||||
|
.order_by(BotMessage.created_at.desc(), BotMessage.id.desc())
|
||||||
|
.limit(1)
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if next_row and prev_row:
|
||||||
|
gap_after = next_row.created_at - utc_end
|
||||||
|
gap_before = utc_start - prev_row.created_at
|
||||||
|
anchor = next_row if gap_after <= gap_before else prev_row
|
||||||
|
else:
|
||||||
|
anchor = next_row or prev_row
|
||||||
|
|
||||||
|
if anchor is None or anchor.id is None:
|
||||||
|
return {
|
||||||
|
"items": [],
|
||||||
|
"anchor_id": None,
|
||||||
|
"resolved_ts": None,
|
||||||
|
"matched_exact_date": False,
|
||||||
|
"has_more_before": False,
|
||||||
|
"has_more_after": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
before_rows = session.exec(
|
||||||
|
select(BotMessage)
|
||||||
|
.where(BotMessage.bot_id == bot_id, BotMessage.id < anchor.id)
|
||||||
|
.order_by(BotMessage.created_at.desc(), BotMessage.id.desc())
|
||||||
|
.limit(before_limit)
|
||||||
|
).all()
|
||||||
|
after_rows = session.exec(
|
||||||
|
select(BotMessage)
|
||||||
|
.where(BotMessage.bot_id == bot_id, BotMessage.id > anchor.id)
|
||||||
|
.order_by(BotMessage.created_at.asc(), BotMessage.id.asc())
|
||||||
|
.limit(after_limit)
|
||||||
|
).all()
|
||||||
|
|
||||||
|
ordered = list(reversed(before_rows)) + [anchor] + after_rows
|
||||||
|
first_row = ordered[0] if ordered else None
|
||||||
|
last_row = ordered[-1] if ordered else None
|
||||||
|
|
||||||
|
has_more_before = False
|
||||||
|
if first_row is not None and first_row.id is not None:
|
||||||
|
has_more_before = (
|
||||||
|
session.exec(
|
||||||
|
select(BotMessage.id)
|
||||||
|
.where(BotMessage.bot_id == bot_id, BotMessage.id < first_row.id)
|
||||||
|
.order_by(BotMessage.id.desc())
|
||||||
|
.limit(1)
|
||||||
|
).first()
|
||||||
|
is not None
|
||||||
|
)
|
||||||
|
|
||||||
|
has_more_after = False
|
||||||
|
if last_row is not None and last_row.id is not None:
|
||||||
|
has_more_after = (
|
||||||
|
session.exec(
|
||||||
|
select(BotMessage.id)
|
||||||
|
.where(BotMessage.bot_id == bot_id, BotMessage.id > last_row.id)
|
||||||
|
.order_by(BotMessage.id.asc())
|
||||||
|
.limit(1)
|
||||||
|
).first()
|
||||||
|
is not None
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"items": [self._serialize_bot_message_row(bot_id, row) for row in ordered],
|
||||||
|
"anchor_id": anchor.id,
|
||||||
|
"resolved_ts": int(anchor.created_at.timestamp() * 1000),
|
||||||
|
"matched_exact_date": matched_exact_date,
|
||||||
|
"has_more_before": has_more_before,
|
||||||
|
"has_more_after": has_more_after,
|
||||||
|
}
|
||||||
|
|
||||||
|
def update_feedback(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
session: Session,
|
||||||
|
bot_id: str,
|
||||||
|
message_id: int,
|
||||||
|
feedback: Optional[str],
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
row = session.get(BotMessage, message_id)
|
||||||
|
if not row or row.bot_id != bot_id:
|
||||||
|
raise HTTPException(status_code=404, detail="Message not found")
|
||||||
|
if row.role != "assistant":
|
||||||
|
raise HTTPException(status_code=400, detail="Only assistant messages support feedback")
|
||||||
|
|
||||||
|
raw = str(feedback or "").strip().lower()
|
||||||
|
if raw in {"", "none", "null"}:
|
||||||
|
row.feedback = None
|
||||||
|
row.feedback_at = None
|
||||||
|
elif raw in {"up", "down"}:
|
||||||
|
row.feedback = raw
|
||||||
|
row.feedback_at = datetime.utcnow()
|
||||||
|
else:
|
||||||
|
raise HTTPException(status_code=400, detail="feedback must be 'up' or 'down'")
|
||||||
|
|
||||||
|
session.add(row)
|
||||||
|
session.commit()
|
||||||
|
self._invalidate_bot_messages_cache(bot_id)
|
||||||
|
return {
|
||||||
|
"status": "updated",
|
||||||
|
"bot_id": bot_id,
|
||||||
|
"message_id": row.id,
|
||||||
|
"feedback": row.feedback,
|
||||||
|
"feedback_at": row.feedback_at.isoformat() if row.feedback_at else None,
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,180 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, Callable, Dict, Optional
|
||||||
|
|
||||||
|
from fastapi import HTTPException
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from clients.edge.errors import log_edge_failure
|
||||||
|
from models.bot import BotInstance
|
||||||
|
|
||||||
|
CacheKeyBotsList = Callable[[Optional[int]], str]
|
||||||
|
CacheKeyBotDetail = Callable[[str], str]
|
||||||
|
RefreshBotRuntimeStatus = Callable[[Any, BotInstance], str]
|
||||||
|
SerializeBot = Callable[[BotInstance], Dict[str, Any]]
|
||||||
|
SerializeBotListItem = Callable[[BotInstance], Dict[str, Any]]
|
||||||
|
ReadBotResources = Callable[[str], Dict[str, Any]]
|
||||||
|
ResolveBotProviderTarget = Callable[[BotInstance], Any]
|
||||||
|
WorkspaceRoot = Callable[[str], str]
|
||||||
|
CalcDirSizeBytes = Callable[[str], int]
|
||||||
|
|
||||||
|
|
||||||
|
class BotQueryService:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
cache: Any,
|
||||||
|
cache_key_bots_list: CacheKeyBotsList,
|
||||||
|
cache_key_bot_detail: CacheKeyBotDetail,
|
||||||
|
refresh_bot_runtime_status: RefreshBotRuntimeStatus,
|
||||||
|
serialize_bot: SerializeBot,
|
||||||
|
serialize_bot_list_item: SerializeBotListItem,
|
||||||
|
read_bot_resources: ReadBotResources,
|
||||||
|
resolve_bot_provider_target: ResolveBotProviderTarget,
|
||||||
|
get_runtime_provider: Callable[[Any, BotInstance], Any],
|
||||||
|
workspace_root: WorkspaceRoot,
|
||||||
|
calc_dir_size_bytes: CalcDirSizeBytes,
|
||||||
|
logger: Any,
|
||||||
|
) -> None:
|
||||||
|
self._cache = cache
|
||||||
|
self._cache_key_bots_list = cache_key_bots_list
|
||||||
|
self._cache_key_bot_detail = cache_key_bot_detail
|
||||||
|
self._refresh_bot_runtime_status = refresh_bot_runtime_status
|
||||||
|
self._serialize_bot = serialize_bot
|
||||||
|
self._serialize_bot_list_item = serialize_bot_list_item
|
||||||
|
self._read_bot_resources = read_bot_resources
|
||||||
|
self._resolve_bot_provider_target = resolve_bot_provider_target
|
||||||
|
self._get_runtime_provider = get_runtime_provider
|
||||||
|
self._workspace_root = workspace_root
|
||||||
|
self._calc_dir_size_bytes = calc_dir_size_bytes
|
||||||
|
self._logger = logger
|
||||||
|
|
||||||
|
def _require_bot(self, *, session: Session, bot_id: str) -> BotInstance:
|
||||||
|
bot = session.get(BotInstance, bot_id)
|
||||||
|
if not bot:
|
||||||
|
raise HTTPException(status_code=404, detail="Bot not found")
|
||||||
|
return bot
|
||||||
|
|
||||||
|
def list_bots(self, *, app_state: Any, session: Session, current_user_id: int) -> list[Dict[str, Any]]:
|
||||||
|
from models.sys_auth import SysUser
|
||||||
|
from services.sys_auth_service import list_accessible_bots_for_user
|
||||||
|
|
||||||
|
cached = self._cache.get_json(self._cache_key_bots_list(current_user_id))
|
||||||
|
if isinstance(cached, list):
|
||||||
|
return cached
|
||||||
|
current_user = session.get(SysUser, current_user_id) if current_user_id > 0 else None
|
||||||
|
if current_user is None:
|
||||||
|
raise HTTPException(status_code=401, detail="Authentication required")
|
||||||
|
bots = list_accessible_bots_for_user(session, current_user)
|
||||||
|
dirty = False
|
||||||
|
for bot in bots:
|
||||||
|
previous_status = str(bot.docker_status or "").upper()
|
||||||
|
previous_state = str(bot.current_state or "")
|
||||||
|
actual_status = self._refresh_bot_runtime_status(app_state, bot)
|
||||||
|
if previous_status != actual_status or previous_state != str(bot.current_state or ""):
|
||||||
|
session.add(bot)
|
||||||
|
dirty = True
|
||||||
|
if dirty:
|
||||||
|
session.commit()
|
||||||
|
for bot in bots:
|
||||||
|
session.refresh(bot)
|
||||||
|
rows = [self._serialize_bot_list_item(bot) for bot in bots]
|
||||||
|
self._cache.set_json(self._cache_key_bots_list(current_user_id), rows, ttl=30)
|
||||||
|
return rows
|
||||||
|
|
||||||
|
def get_bot_detail(self, *, app_state: Any, session: Session, bot_id: str) -> Dict[str, Any]:
|
||||||
|
cached = self._cache.get_json(self._cache_key_bot_detail(bot_id))
|
||||||
|
if isinstance(cached, dict):
|
||||||
|
return cached
|
||||||
|
bot = self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
previous_status = str(bot.docker_status or "").upper()
|
||||||
|
previous_state = str(bot.current_state or "")
|
||||||
|
actual_status = self._refresh_bot_runtime_status(app_state, bot)
|
||||||
|
if previous_status != actual_status or previous_state != str(bot.current_state or ""):
|
||||||
|
session.add(bot)
|
||||||
|
session.commit()
|
||||||
|
session.refresh(bot)
|
||||||
|
row = self._serialize_bot(bot)
|
||||||
|
self._cache.set_json(self._cache_key_bot_detail(bot_id), row, ttl=30)
|
||||||
|
return row
|
||||||
|
|
||||||
|
def get_bot_resources(self, *, app_state: Any, session: Session, bot_id: str) -> Dict[str, Any]:
|
||||||
|
bot = self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
configured = self._read_bot_resources(bot_id)
|
||||||
|
try:
|
||||||
|
runtime = self._get_runtime_provider(app_state, bot).get_resource_snapshot(bot_id=bot_id)
|
||||||
|
except Exception as exc:
|
||||||
|
log_edge_failure(
|
||||||
|
self._logger,
|
||||||
|
key=f"bot-resources:{bot_id}",
|
||||||
|
exc=exc,
|
||||||
|
message=f"Failed to refresh bot resources for bot_id={bot_id}",
|
||||||
|
)
|
||||||
|
runtime = {"usage": {}, "limits": {}, "docker_status": str(bot.docker_status or "STOPPED").upper()}
|
||||||
|
runtime_status = str(runtime.get("docker_status") or "").upper()
|
||||||
|
previous_status = str(bot.docker_status or "").upper()
|
||||||
|
previous_state = str(bot.current_state or "")
|
||||||
|
if runtime_status:
|
||||||
|
bot.docker_status = runtime_status
|
||||||
|
if runtime_status != "RUNNING" and str(bot.current_state or "").upper() not in {"ERROR"}:
|
||||||
|
bot.current_state = "IDLE"
|
||||||
|
if previous_status != str(bot.docker_status or "").upper() or previous_state != str(bot.current_state or ""):
|
||||||
|
session.add(bot)
|
||||||
|
session.commit()
|
||||||
|
session.refresh(bot)
|
||||||
|
target = self._resolve_bot_provider_target(bot)
|
||||||
|
usage_payload = dict(runtime.get("usage") or {})
|
||||||
|
workspace_bytes = int(usage_payload.get("container_rw_bytes") or usage_payload.get("workspace_used_bytes") or 0)
|
||||||
|
workspace_root = ""
|
||||||
|
if workspace_bytes <= 0:
|
||||||
|
workspace_root = self._workspace_root(bot_id)
|
||||||
|
workspace_bytes = self._calc_dir_size_bytes(workspace_root)
|
||||||
|
elif target.transport_kind != "edge":
|
||||||
|
workspace_root = self._workspace_root(bot_id)
|
||||||
|
configured_storage_bytes = int(configured.get("storage_gb", 0) or 0) * 1024 * 1024 * 1024
|
||||||
|
workspace_percent = 0.0
|
||||||
|
if configured_storage_bytes > 0:
|
||||||
|
workspace_percent = (workspace_bytes / configured_storage_bytes) * 100.0
|
||||||
|
|
||||||
|
limits = runtime.get("limits") or {}
|
||||||
|
cpu_limited = (limits.get("cpu_cores") or 0) > 0
|
||||||
|
memory_limited = (limits.get("memory_bytes") or 0) > 0
|
||||||
|
storage_limited = bool(limits.get("storage_bytes")) or bool(limits.get("storage_opt_raw"))
|
||||||
|
|
||||||
|
return {
|
||||||
|
"bot_id": bot_id,
|
||||||
|
"docker_status": runtime.get("docker_status") or bot.docker_status,
|
||||||
|
"configured": configured,
|
||||||
|
"runtime": runtime,
|
||||||
|
"workspace": {
|
||||||
|
"path": workspace_root or None,
|
||||||
|
"usage_bytes": workspace_bytes,
|
||||||
|
"configured_limit_bytes": configured_storage_bytes if configured_storage_bytes > 0 else None,
|
||||||
|
"usage_percent": max(0.0, workspace_percent),
|
||||||
|
},
|
||||||
|
"enforcement": {
|
||||||
|
"cpu_limited": cpu_limited,
|
||||||
|
"memory_limited": memory_limited,
|
||||||
|
"storage_limited": storage_limited,
|
||||||
|
},
|
||||||
|
"note": (
|
||||||
|
"Resource value 0 means unlimited. CPU/Memory limits come from Docker HostConfig and are enforced by cgroup. "
|
||||||
|
"Storage limit depends on Docker storage driver support."
|
||||||
|
),
|
||||||
|
"collected_at": datetime.utcnow().isoformat() + "Z",
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_tools_config(self, *, session: Session, bot_id: str) -> Dict[str, Any]:
|
||||||
|
self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
return {
|
||||||
|
"bot_id": bot_id,
|
||||||
|
"tools_config": {},
|
||||||
|
"managed_by_dashboard": False,
|
||||||
|
"hint": "Tools config is disabled in dashboard. Configure tool-related env vars manually.",
|
||||||
|
}
|
||||||
|
|
||||||
|
def update_tools_config(self, *, session: Session, bot_id: str, payload: Any) -> Dict[str, Any]:
|
||||||
|
self._require_bot(session=session, bot_id=bot_id)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail="Tools config is no longer managed by dashboard. Please set required env vars manually.",
|
||||||
|
)
|
||||||
|
|
@ -1,214 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import time
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any, Dict, Optional
|
|
||||||
from zoneinfo import ZoneInfo
|
|
||||||
|
|
||||||
from fastapi import WebSocket
|
|
||||||
from sqlmodel import Session
|
|
||||||
|
|
||||||
from core.docker_instance import docker_manager
|
|
||||||
from core.settings import BOTS_WORKSPACE_ROOT
|
|
||||||
from models.bot import BotInstance
|
|
||||||
from services.bot_lifecycle_service import start_bot_instance, stop_bot_instance
|
|
||||||
from services.bot_service import list_bot_channels_from_config
|
|
||||||
from services.bot_storage_service import (
|
|
||||||
read_bot_config_data,
|
|
||||||
read_bot_cron_jobs_store,
|
|
||||||
write_bot_config_data,
|
|
||||||
write_bot_cron_jobs_store,
|
|
||||||
)
|
|
||||||
from services.platform_auth_service import resolve_bot_websocket_auth, resolve_panel_websocket_auth
|
|
||||||
|
|
||||||
|
|
||||||
def _now_ms() -> int:
|
|
||||||
return int(time.time() * 1000)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_bot_or_raise(session: Session, bot_id: str) -> BotInstance:
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise LookupError("Bot not found")
|
|
||||||
return bot
|
|
||||||
|
|
||||||
|
|
||||||
def _weixin_state_file_path(bot_id: str) -> Path:
|
|
||||||
return Path(BOTS_WORKSPACE_ROOT) / bot_id / ".nanobot" / "weixin" / "account.json"
|
|
||||||
|
|
||||||
|
|
||||||
def _compute_cron_next_run(schedule: Dict[str, Any], now_ms: Optional[int] = None) -> Optional[int]:
|
|
||||||
current_ms = int(now_ms or _now_ms())
|
|
||||||
kind = str(schedule.get("kind") or "").strip().lower()
|
|
||||||
|
|
||||||
if kind == "at":
|
|
||||||
at_ms = int(schedule.get("atMs") or 0)
|
|
||||||
return at_ms if at_ms > current_ms else None
|
|
||||||
|
|
||||||
if kind == "every":
|
|
||||||
every_ms = int(schedule.get("everyMs") or 0)
|
|
||||||
return current_ms + every_ms if every_ms > 0 else None
|
|
||||||
|
|
||||||
if kind == "cron":
|
|
||||||
expr = str(schedule.get("expr") or "").strip()
|
|
||||||
if not expr:
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
from croniter import croniter
|
|
||||||
|
|
||||||
tz_name = str(schedule.get("tz") or "").strip()
|
|
||||||
tz = ZoneInfo(tz_name) if tz_name else datetime.now().astimezone().tzinfo
|
|
||||||
base_dt = datetime.fromtimestamp(current_ms / 1000, tz=tz)
|
|
||||||
next_dt = croniter(expr, base_dt).get_next(datetime)
|
|
||||||
return int(next_dt.timestamp() * 1000)
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_bot_logs(
|
|
||||||
session: Session,
|
|
||||||
*,
|
|
||||||
bot_id: str,
|
|
||||||
tail: Optional[int] = 300,
|
|
||||||
offset: int = 0,
|
|
||||||
limit: Optional[int] = None,
|
|
||||||
reverse: bool = False,
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
_get_bot_or_raise(session, bot_id)
|
|
||||||
if limit is not None:
|
|
||||||
page = docker_manager.get_logs_page(
|
|
||||||
bot_id,
|
|
||||||
offset=max(0, int(offset)),
|
|
||||||
limit=max(1, int(limit)),
|
|
||||||
reverse=bool(reverse),
|
|
||||||
)
|
|
||||||
return {"bot_id": bot_id, **page}
|
|
||||||
effective_tail = max(1, int(tail or 300))
|
|
||||||
return {
|
|
||||||
"bot_id": bot_id,
|
|
||||||
"logs": docker_manager.get_recent_logs(bot_id, tail=effective_tail),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async def relogin_weixin(session: Session, *, bot_id: str) -> Dict[str, Any]:
|
|
||||||
bot = _get_bot_or_raise(session, bot_id)
|
|
||||||
weixin_channel = next(
|
|
||||||
(
|
|
||||||
row
|
|
||||||
for row in list_bot_channels_from_config(bot)
|
|
||||||
if str(row.get("channel_type") or "").strip().lower() == "weixin"
|
|
||||||
),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
if not weixin_channel:
|
|
||||||
raise ValueError("Weixin channel not found")
|
|
||||||
|
|
||||||
state_file = _weixin_state_file_path(bot_id)
|
|
||||||
removed = False
|
|
||||||
try:
|
|
||||||
if state_file.is_file():
|
|
||||||
state_file.unlink()
|
|
||||||
removed = True
|
|
||||||
except Exception as exc:
|
|
||||||
raise RuntimeError(f"Failed to remove weixin state: {exc}") from exc
|
|
||||||
|
|
||||||
config_data = read_bot_config_data(bot_id)
|
|
||||||
channels_cfg = config_data.get("channels") if isinstance(config_data, dict) else {}
|
|
||||||
weixin_cfg = channels_cfg.get("weixin") if isinstance(channels_cfg, dict) else None
|
|
||||||
if isinstance(weixin_cfg, dict) and "token" in weixin_cfg:
|
|
||||||
weixin_cfg.pop("token", None)
|
|
||||||
write_bot_config_data(bot_id, config_data)
|
|
||||||
|
|
||||||
restarted = False
|
|
||||||
if str(bot.docker_status or "").upper() == "RUNNING":
|
|
||||||
stop_bot_instance(session, bot_id)
|
|
||||||
await start_bot_instance(session, bot_id)
|
|
||||||
restarted = True
|
|
||||||
|
|
||||||
return {
|
|
||||||
"status": "relogin_started",
|
|
||||||
"bot_id": bot_id,
|
|
||||||
"removed_state": removed,
|
|
||||||
"restarted": restarted,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def list_cron_jobs(session: Session, *, bot_id: str, include_disabled: bool = True) -> Dict[str, Any]:
|
|
||||||
_get_bot_or_raise(session, bot_id)
|
|
||||||
store = read_bot_cron_jobs_store(bot_id)
|
|
||||||
rows = []
|
|
||||||
for row in store.get("jobs", []):
|
|
||||||
if not isinstance(row, dict):
|
|
||||||
continue
|
|
||||||
enabled = bool(row.get("enabled", True))
|
|
||||||
if not include_disabled and not enabled:
|
|
||||||
continue
|
|
||||||
rows.append(row)
|
|
||||||
rows.sort(key=lambda value: int(((value.get("state") or {}).get("nextRunAtMs")) or 2**62))
|
|
||||||
return {"bot_id": bot_id, "version": int(store.get("version", 1) or 1), "jobs": rows}
|
|
||||||
|
|
||||||
|
|
||||||
def stop_cron_job(session: Session, *, bot_id: str, job_id: str) -> Dict[str, Any]:
|
|
||||||
_get_bot_or_raise(session, bot_id)
|
|
||||||
store = read_bot_cron_jobs_store(bot_id)
|
|
||||||
jobs = store.get("jobs", [])
|
|
||||||
if not isinstance(jobs, list):
|
|
||||||
jobs = []
|
|
||||||
found = next((row for row in jobs if isinstance(row, dict) and str(row.get("id")) == job_id), None)
|
|
||||||
if not found:
|
|
||||||
raise LookupError("Cron job not found")
|
|
||||||
found["enabled"] = False
|
|
||||||
found["updatedAtMs"] = _now_ms()
|
|
||||||
state = found.get("state")
|
|
||||||
if not isinstance(state, dict):
|
|
||||||
state = {}
|
|
||||||
found["state"] = state
|
|
||||||
state["nextRunAtMs"] = None
|
|
||||||
write_bot_cron_jobs_store(bot_id, {"version": int(store.get("version", 1) or 1), "jobs": jobs})
|
|
||||||
return {"status": "stopped", "job_id": job_id}
|
|
||||||
|
|
||||||
|
|
||||||
def start_cron_job(session: Session, *, bot_id: str, job_id: str) -> Dict[str, Any]:
|
|
||||||
_get_bot_or_raise(session, bot_id)
|
|
||||||
store = read_bot_cron_jobs_store(bot_id)
|
|
||||||
jobs = store.get("jobs", [])
|
|
||||||
if not isinstance(jobs, list):
|
|
||||||
jobs = []
|
|
||||||
found = next((row for row in jobs if isinstance(row, dict) and str(row.get("id")) == job_id), None)
|
|
||||||
if not found:
|
|
||||||
raise LookupError("Cron job not found")
|
|
||||||
found["enabled"] = True
|
|
||||||
found["updatedAtMs"] = _now_ms()
|
|
||||||
state = found.get("state")
|
|
||||||
if not isinstance(state, dict):
|
|
||||||
state = {}
|
|
||||||
found["state"] = state
|
|
||||||
schedule = found.get("schedule")
|
|
||||||
state["nextRunAtMs"] = _compute_cron_next_run(schedule if isinstance(schedule, dict) else {})
|
|
||||||
write_bot_cron_jobs_store(bot_id, {"version": int(store.get("version", 1) or 1), "jobs": jobs})
|
|
||||||
return {"status": "started", "job_id": job_id}
|
|
||||||
|
|
||||||
|
|
||||||
def delete_cron_job(session: Session, *, bot_id: str, job_id: str) -> Dict[str, Any]:
|
|
||||||
_get_bot_or_raise(session, bot_id)
|
|
||||||
store = read_bot_cron_jobs_store(bot_id)
|
|
||||||
jobs = store.get("jobs", [])
|
|
||||||
if not isinstance(jobs, list):
|
|
||||||
jobs = []
|
|
||||||
kept = [row for row in jobs if not (isinstance(row, dict) and str(row.get("id")) == job_id)]
|
|
||||||
if len(kept) == len(jobs):
|
|
||||||
raise LookupError("Cron job not found")
|
|
||||||
write_bot_cron_jobs_store(bot_id, {"version": int(store.get("version", 1) or 1), "jobs": kept})
|
|
||||||
return {"status": "deleted", "job_id": job_id}
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_monitor_websocket_access(session: Session, websocket: WebSocket, bot_id: str) -> BotInstance:
|
|
||||||
principal = resolve_panel_websocket_auth(session, websocket)
|
|
||||||
if not principal.authenticated:
|
|
||||||
principal = resolve_bot_websocket_auth(session, websocket, bot_id)
|
|
||||||
if not principal.authenticated:
|
|
||||||
raise PermissionError("Bot or panel authentication required")
|
|
||||||
return _get_bot_or_raise(session, bot_id)
|
|
||||||
|
|
@ -0,0 +1,288 @@
|
||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
from typing import Any, Callable, Dict
|
||||||
|
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from clients.edge.errors import log_edge_failure
|
||||||
|
from models.bot import BotInstance
|
||||||
|
from providers.target import provider_target_to_dict
|
||||||
|
|
||||||
|
|
||||||
|
class BotRuntimeSnapshotService:
|
||||||
|
_AGENT_LOOP_READY_MARKER = "Agent loop started"
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
engine: Any,
|
||||||
|
logger: Any,
|
||||||
|
docker_manager: Any,
|
||||||
|
default_soul_md: str,
|
||||||
|
default_agents_md: str,
|
||||||
|
default_user_md: str,
|
||||||
|
default_tools_md: str,
|
||||||
|
default_identity_md: str,
|
||||||
|
workspace_root: Callable[[str], str],
|
||||||
|
resolve_edge_state_context: Callable[[str], Any],
|
||||||
|
read_bot_config: Callable[[str], Dict[str, Any]],
|
||||||
|
resolve_bot_env_params: Callable[[str], Dict[str, str]],
|
||||||
|
resolve_bot_provider_target_for_instance: Callable[[BotInstance], Any],
|
||||||
|
read_global_delivery_flags: Callable[[Any], tuple[bool, bool]],
|
||||||
|
safe_float: Callable[[Any, float], float],
|
||||||
|
safe_int: Callable[[Any, int], int],
|
||||||
|
get_default_system_timezone: Callable[[], str],
|
||||||
|
read_bot_resources: Callable[[str, Any], Dict[str, Any]],
|
||||||
|
node_display_name: Callable[[str], str],
|
||||||
|
get_runtime_provider: Callable[[Any, BotInstance], Any],
|
||||||
|
invalidate_bot_detail_cache: Callable[[str], None],
|
||||||
|
record_activity_event: Callable[..., None],
|
||||||
|
) -> None:
|
||||||
|
self._engine = engine
|
||||||
|
self._logger = logger
|
||||||
|
self._docker_manager = docker_manager
|
||||||
|
self._default_soul_md = default_soul_md
|
||||||
|
self._default_agents_md = default_agents_md
|
||||||
|
self._default_user_md = default_user_md
|
||||||
|
self._default_tools_md = default_tools_md
|
||||||
|
self._default_identity_md = default_identity_md
|
||||||
|
self._workspace_root = workspace_root
|
||||||
|
self._resolve_edge_state_context = resolve_edge_state_context
|
||||||
|
self._read_bot_config = read_bot_config
|
||||||
|
self._resolve_bot_env_params = resolve_bot_env_params
|
||||||
|
self._resolve_bot_provider_target_for_instance = resolve_bot_provider_target_for_instance
|
||||||
|
self._read_global_delivery_flags = read_global_delivery_flags
|
||||||
|
self._safe_float = safe_float
|
||||||
|
self._safe_int = safe_int
|
||||||
|
self._get_default_system_timezone = get_default_system_timezone
|
||||||
|
self._read_bot_resources = read_bot_resources
|
||||||
|
self._node_display_name = node_display_name
|
||||||
|
self._get_runtime_provider = get_runtime_provider
|
||||||
|
self._invalidate_bot_detail_cache = invalidate_bot_detail_cache
|
||||||
|
self._record_activity_event = record_activity_event
|
||||||
|
|
||||||
|
def read_workspace_md(self, bot_id: str, filename: str, default_value: str) -> str:
|
||||||
|
edge_context = self._resolve_edge_state_context(bot_id)
|
||||||
|
if edge_context is not None:
|
||||||
|
client, workspace_root, node_id = edge_context
|
||||||
|
try:
|
||||||
|
payload = client.read_file(
|
||||||
|
bot_id=bot_id,
|
||||||
|
path=filename,
|
||||||
|
max_bytes=1_000_000,
|
||||||
|
workspace_root=workspace_root,
|
||||||
|
)
|
||||||
|
if bool(payload.get("is_markdown")):
|
||||||
|
content = payload.get("content")
|
||||||
|
if isinstance(content, str):
|
||||||
|
return content.strip()
|
||||||
|
except Exception as exc:
|
||||||
|
log_edge_failure(
|
||||||
|
self._logger,
|
||||||
|
key=f"workspace-md-read:{node_id}:{bot_id}:{filename}",
|
||||||
|
exc=exc,
|
||||||
|
message=f"Failed to read edge workspace markdown for bot_id={bot_id}, file={filename}",
|
||||||
|
)
|
||||||
|
return default_value
|
||||||
|
path = os.path.join(self._workspace_root(bot_id), filename)
|
||||||
|
if not os.path.isfile(path):
|
||||||
|
return default_value
|
||||||
|
try:
|
||||||
|
with open(path, "r", encoding="utf-8") as file:
|
||||||
|
return file.read().strip()
|
||||||
|
except Exception:
|
||||||
|
return default_value
|
||||||
|
|
||||||
|
def read_bot_runtime_snapshot(self, bot: BotInstance) -> Dict[str, Any]:
|
||||||
|
config_data = self._read_bot_config(bot.id)
|
||||||
|
env_params = self._resolve_bot_env_params(bot.id)
|
||||||
|
target = self._resolve_bot_provider_target_for_instance(bot)
|
||||||
|
|
||||||
|
provider_name = ""
|
||||||
|
provider_cfg: Dict[str, Any] = {}
|
||||||
|
providers_cfg = config_data.get("providers")
|
||||||
|
if isinstance(providers_cfg, dict):
|
||||||
|
for p_name, p_cfg in providers_cfg.items():
|
||||||
|
provider_name = str(p_name or "").strip()
|
||||||
|
if isinstance(p_cfg, dict):
|
||||||
|
provider_cfg = p_cfg
|
||||||
|
break
|
||||||
|
|
||||||
|
agents_defaults: Dict[str, Any] = {}
|
||||||
|
agents_cfg = config_data.get("agents")
|
||||||
|
if isinstance(agents_cfg, dict):
|
||||||
|
defaults = agents_cfg.get("defaults")
|
||||||
|
if isinstance(defaults, dict):
|
||||||
|
agents_defaults = defaults
|
||||||
|
|
||||||
|
channels_cfg = config_data.get("channels")
|
||||||
|
send_progress, send_tool_hints = self._read_global_delivery_flags(channels_cfg)
|
||||||
|
|
||||||
|
llm_provider = provider_name or "dashscope"
|
||||||
|
llm_model = str(agents_defaults.get("model") or "")
|
||||||
|
api_key = str(provider_cfg.get("apiKey") or "").strip()
|
||||||
|
api_base = str(provider_cfg.get("apiBase") or "").strip()
|
||||||
|
api_base_lower = api_base.lower()
|
||||||
|
if llm_provider == "openai" and ("spark-api-open.xf-yun.com" in api_base_lower or "xf-yun.com" in api_base_lower):
|
||||||
|
llm_provider = "xunfei"
|
||||||
|
|
||||||
|
soul_md = self.read_workspace_md(bot.id, "SOUL.md", self._default_soul_md)
|
||||||
|
resources = self._read_bot_resources(bot.id, config_data=config_data)
|
||||||
|
return {
|
||||||
|
**provider_target_to_dict(target),
|
||||||
|
"llm_provider": llm_provider,
|
||||||
|
"llm_model": llm_model,
|
||||||
|
"api_key": api_key,
|
||||||
|
"api_base": api_base,
|
||||||
|
"temperature": self._safe_float(agents_defaults.get("temperature"), 0.2),
|
||||||
|
"top_p": self._safe_float(agents_defaults.get("topP"), 1.0),
|
||||||
|
"max_tokens": self._safe_int(agents_defaults.get("maxTokens"), 8192),
|
||||||
|
"cpu_cores": resources["cpu_cores"],
|
||||||
|
"memory_mb": resources["memory_mb"],
|
||||||
|
"storage_gb": resources["storage_gb"],
|
||||||
|
"system_timezone": env_params.get("TZ") or self._get_default_system_timezone(),
|
||||||
|
"send_progress": send_progress,
|
||||||
|
"send_tool_hints": send_tool_hints,
|
||||||
|
"soul_md": soul_md,
|
||||||
|
"agents_md": self.read_workspace_md(bot.id, "AGENTS.md", self._default_agents_md),
|
||||||
|
"user_md": self.read_workspace_md(bot.id, "USER.md", self._default_user_md),
|
||||||
|
"tools_md": self.read_workspace_md(bot.id, "TOOLS.md", self._default_tools_md),
|
||||||
|
"identity_md": self.read_workspace_md(bot.id, "IDENTITY.md", self._default_identity_md),
|
||||||
|
"system_prompt": soul_md,
|
||||||
|
}
|
||||||
|
|
||||||
|
def serialize_bot(self, bot: BotInstance) -> Dict[str, Any]:
|
||||||
|
runtime = self.read_bot_runtime_snapshot(bot)
|
||||||
|
target = self._resolve_bot_provider_target_for_instance(bot)
|
||||||
|
return {
|
||||||
|
"id": bot.id,
|
||||||
|
"name": bot.name,
|
||||||
|
"enabled": bool(getattr(bot, "enabled", True)),
|
||||||
|
"avatar_model": "base",
|
||||||
|
"avatar_skin": "blue_suit",
|
||||||
|
"image_tag": bot.image_tag,
|
||||||
|
"llm_provider": runtime.get("llm_provider") or "",
|
||||||
|
"llm_model": runtime.get("llm_model") or "",
|
||||||
|
"system_prompt": runtime.get("system_prompt") or "",
|
||||||
|
"api_base": runtime.get("api_base") or "",
|
||||||
|
"temperature": self._safe_float(runtime.get("temperature"), 0.2),
|
||||||
|
"top_p": self._safe_float(runtime.get("top_p"), 1.0),
|
||||||
|
"max_tokens": self._safe_int(runtime.get("max_tokens"), 8192),
|
||||||
|
"cpu_cores": self._safe_float(runtime.get("cpu_cores"), 1.0),
|
||||||
|
"memory_mb": self._safe_int(runtime.get("memory_mb"), 1024),
|
||||||
|
"storage_gb": self._safe_int(runtime.get("storage_gb"), 10),
|
||||||
|
"system_timezone": str(runtime.get("system_timezone") or self._get_default_system_timezone()),
|
||||||
|
"send_progress": bool(runtime.get("send_progress")),
|
||||||
|
"send_tool_hints": bool(runtime.get("send_tool_hints")),
|
||||||
|
"node_id": target.node_id,
|
||||||
|
"node_display_name": self._node_display_name(target.node_id),
|
||||||
|
"transport_kind": target.transport_kind,
|
||||||
|
"runtime_kind": target.runtime_kind,
|
||||||
|
"core_adapter": target.core_adapter,
|
||||||
|
"soul_md": runtime.get("soul_md") or "",
|
||||||
|
"agents_md": runtime.get("agents_md") or "",
|
||||||
|
"user_md": runtime.get("user_md") or "",
|
||||||
|
"tools_md": runtime.get("tools_md") or "",
|
||||||
|
"identity_md": runtime.get("identity_md") or "",
|
||||||
|
"workspace_dir": bot.workspace_dir,
|
||||||
|
"docker_status": bot.docker_status,
|
||||||
|
"current_state": bot.current_state,
|
||||||
|
"last_action": bot.last_action,
|
||||||
|
"created_at": bot.created_at,
|
||||||
|
"updated_at": bot.updated_at,
|
||||||
|
}
|
||||||
|
|
||||||
|
def serialize_bot_list_item(self, bot: BotInstance) -> Dict[str, Any]:
|
||||||
|
runtime = self.read_bot_runtime_snapshot(bot)
|
||||||
|
target = self._resolve_bot_provider_target_for_instance(bot)
|
||||||
|
return {
|
||||||
|
"id": bot.id,
|
||||||
|
"name": bot.name,
|
||||||
|
"enabled": bool(getattr(bot, "enabled", True)),
|
||||||
|
"image_tag": bot.image_tag,
|
||||||
|
"llm_provider": runtime.get("llm_provider") or "",
|
||||||
|
"llm_model": runtime.get("llm_model") or "",
|
||||||
|
"node_id": target.node_id,
|
||||||
|
"node_display_name": self._node_display_name(target.node_id),
|
||||||
|
"transport_kind": target.transport_kind,
|
||||||
|
"runtime_kind": target.runtime_kind,
|
||||||
|
"core_adapter": target.core_adapter,
|
||||||
|
"docker_status": bot.docker_status,
|
||||||
|
"current_state": bot.current_state,
|
||||||
|
"last_action": bot.last_action,
|
||||||
|
"updated_at": bot.updated_at,
|
||||||
|
}
|
||||||
|
|
||||||
|
def refresh_bot_runtime_status(self, app_state: Any, bot: BotInstance) -> str:
|
||||||
|
current_status = str(bot.docker_status or "STOPPED").upper()
|
||||||
|
try:
|
||||||
|
status = str(self._get_runtime_provider(app_state, bot).get_runtime_status(bot_id=str(bot.id or "")) or "STOPPED").upper()
|
||||||
|
except Exception as exc:
|
||||||
|
log_edge_failure(
|
||||||
|
self._logger,
|
||||||
|
key=f"bot-runtime-status:{bot.id}",
|
||||||
|
exc=exc,
|
||||||
|
message=f"Failed to refresh runtime status for bot_id={bot.id}",
|
||||||
|
)
|
||||||
|
return current_status
|
||||||
|
bot.docker_status = status
|
||||||
|
if status != "RUNNING" and str(bot.current_state or "").upper() not in {"ERROR"}:
|
||||||
|
bot.current_state = "IDLE"
|
||||||
|
return status
|
||||||
|
|
||||||
|
async def wait_for_agent_loop_ready(
|
||||||
|
self,
|
||||||
|
bot_id: str,
|
||||||
|
timeout_seconds: float = 12.0,
|
||||||
|
poll_interval_seconds: float = 0.5,
|
||||||
|
) -> bool:
|
||||||
|
deadline = time.monotonic() + max(1.0, timeout_seconds)
|
||||||
|
marker = self._AGENT_LOOP_READY_MARKER.lower()
|
||||||
|
while time.monotonic() < deadline:
|
||||||
|
logs = self._docker_manager.get_recent_logs(bot_id, tail=200)
|
||||||
|
if any(marker in str(line or "").lower() for line in logs):
|
||||||
|
return True
|
||||||
|
await asyncio.sleep(max(0.1, poll_interval_seconds))
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def record_agent_loop_ready_warning(
|
||||||
|
self,
|
||||||
|
bot_id: str,
|
||||||
|
timeout_seconds: float = 12.0,
|
||||||
|
poll_interval_seconds: float = 0.5,
|
||||||
|
) -> None:
|
||||||
|
try:
|
||||||
|
agent_loop_ready = await self.wait_for_agent_loop_ready(
|
||||||
|
bot_id,
|
||||||
|
timeout_seconds=timeout_seconds,
|
||||||
|
poll_interval_seconds=poll_interval_seconds,
|
||||||
|
)
|
||||||
|
if agent_loop_ready:
|
||||||
|
return
|
||||||
|
if self._docker_manager.get_bot_status(bot_id) != "RUNNING":
|
||||||
|
return
|
||||||
|
detail = (
|
||||||
|
"Bot container started, but ready marker was not found in logs within "
|
||||||
|
f"{int(timeout_seconds)}s. Check bot logs or MCP config if the bot stays unavailable."
|
||||||
|
)
|
||||||
|
self._logger.warning("bot_id=%s agent loop ready marker not found within %ss", bot_id, timeout_seconds)
|
||||||
|
with Session(self._engine) as background_session:
|
||||||
|
if not background_session.get(BotInstance, bot_id):
|
||||||
|
return
|
||||||
|
self._record_activity_event(
|
||||||
|
background_session,
|
||||||
|
bot_id,
|
||||||
|
"bot_warning",
|
||||||
|
channel="system",
|
||||||
|
detail=detail,
|
||||||
|
metadata={
|
||||||
|
"kind": "agent_loop_ready_timeout",
|
||||||
|
"marker": self._AGENT_LOOP_READY_MARKER,
|
||||||
|
"timeout_seconds": timeout_seconds,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
background_session.commit()
|
||||||
|
self._invalidate_bot_detail_cache(bot_id)
|
||||||
|
except Exception:
|
||||||
|
self._logger.exception("Failed to record agent loop readiness warning for bot_id=%s", bot_id)
|
||||||
|
|
@ -1,549 +0,0 @@
|
||||||
import os
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any, Dict, List, Optional
|
|
||||||
from zoneinfo import ZoneInfo
|
|
||||||
|
|
||||||
from sqlmodel import Session
|
|
||||||
|
|
||||||
from core.settings import BOTS_WORKSPACE_ROOT
|
|
||||||
from models.bot import BotInstance
|
|
||||||
from providers.bot_workspace_provider import BotWorkspaceProvider
|
|
||||||
from schemas.bot import ChannelConfigRequest
|
|
||||||
from services.bot_storage_service import (
|
|
||||||
_normalize_env_params,
|
|
||||||
_read_bot_config,
|
|
||||||
_read_bot_resources,
|
|
||||||
_read_env_store,
|
|
||||||
_safe_float,
|
|
||||||
_safe_int,
|
|
||||||
_workspace_root,
|
|
||||||
normalize_bot_resource_limits,
|
|
||||||
write_bot_resource_limits,
|
|
||||||
)
|
|
||||||
|
|
||||||
workspace_provider = BotWorkspaceProvider(host_data_root=BOTS_WORKSPACE_ROOT)
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_bot_system_timezone(raw: Any) -> str:
|
|
||||||
value = str(raw or "").strip()
|
|
||||||
if not value:
|
|
||||||
raise ValueError("System timezone is required")
|
|
||||||
try:
|
|
||||||
ZoneInfo(value)
|
|
||||||
except Exception as exc:
|
|
||||||
raise ValueError("Invalid system timezone. Use an IANA timezone such as Asia/Shanghai.") from exc
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def resolve_bot_runtime_env_params(bot_id: str, raw: Optional[Dict[str, str]] = None) -> Dict[str, str]:
|
|
||||||
env_params = _normalize_env_params(raw if isinstance(raw, dict) else _read_env_store(bot_id))
|
|
||||||
if "TZ" not in env_params:
|
|
||||||
raise RuntimeError(f"Missing required TZ in bot env settings: {bot_id}")
|
|
||||||
env_params["TZ"] = normalize_bot_system_timezone(env_params.get("TZ"))
|
|
||||||
return env_params
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_channel_extra(raw: Any) -> Dict[str, Any]:
|
|
||||||
if not isinstance(raw, dict):
|
|
||||||
return {}
|
|
||||||
return raw
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_allow_from(raw: Any) -> List[str]:
|
|
||||||
rows: List[str] = []
|
|
||||||
if isinstance(raw, list):
|
|
||||||
for item in raw:
|
|
||||||
text = str(item or "").strip()
|
|
||||||
if text and text not in rows:
|
|
||||||
rows.append(text)
|
|
||||||
return rows or ["*"]
|
|
||||||
|
|
||||||
|
|
||||||
def read_global_delivery_flags(channels_cfg: Any) -> tuple[bool, bool]:
|
|
||||||
if not isinstance(channels_cfg, dict):
|
|
||||||
return False, False
|
|
||||||
return bool(channels_cfg.get("sendProgress")), bool(channels_cfg.get("sendToolHints"))
|
|
||||||
|
|
||||||
|
|
||||||
def channel_config_to_api(bot_id: str, channel_type: str, cfg: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
ctype = str(channel_type or "").strip().lower()
|
|
||||||
enabled = bool(cfg.get("enabled", True))
|
|
||||||
port = max(1, min(int(cfg.get("port", 8080) or 8080), 65535))
|
|
||||||
extra: Dict[str, Any] = {}
|
|
||||||
external_app_id = ""
|
|
||||||
app_secret = ""
|
|
||||||
|
|
||||||
if ctype == "feishu":
|
|
||||||
external_app_id = str(cfg.get("appId") or "")
|
|
||||||
app_secret = str(cfg.get("appSecret") or "")
|
|
||||||
extra = {
|
|
||||||
"encryptKey": cfg.get("encryptKey", ""),
|
|
||||||
"verificationToken": cfg.get("verificationToken", ""),
|
|
||||||
"allowFrom": _normalize_allow_from(cfg.get("allowFrom", [])),
|
|
||||||
}
|
|
||||||
elif ctype == "dingtalk":
|
|
||||||
external_app_id = str(cfg.get("clientId") or "")
|
|
||||||
app_secret = str(cfg.get("clientSecret") or "")
|
|
||||||
extra = {"allowFrom": _normalize_allow_from(cfg.get("allowFrom", []))}
|
|
||||||
elif ctype == "telegram":
|
|
||||||
app_secret = str(cfg.get("token") or "")
|
|
||||||
extra = {
|
|
||||||
"proxy": cfg.get("proxy", ""),
|
|
||||||
"replyToMessage": bool(cfg.get("replyToMessage", False)),
|
|
||||||
"allowFrom": _normalize_allow_from(cfg.get("allowFrom", [])),
|
|
||||||
}
|
|
||||||
elif ctype == "slack":
|
|
||||||
external_app_id = str(cfg.get("botToken") or "")
|
|
||||||
app_secret = str(cfg.get("appToken") or "")
|
|
||||||
extra = {
|
|
||||||
"mode": cfg.get("mode", "socket"),
|
|
||||||
"replyInThread": bool(cfg.get("replyInThread", True)),
|
|
||||||
"groupPolicy": cfg.get("groupPolicy", "mention"),
|
|
||||||
"groupAllowFrom": cfg.get("groupAllowFrom", []),
|
|
||||||
"reactEmoji": cfg.get("reactEmoji", "eyes"),
|
|
||||||
}
|
|
||||||
elif ctype == "qq":
|
|
||||||
external_app_id = str(cfg.get("appId") or "")
|
|
||||||
app_secret = str(cfg.get("secret") or "")
|
|
||||||
extra = {"allowFrom": _normalize_allow_from(cfg.get("allowFrom", []))}
|
|
||||||
elif ctype == "wecom":
|
|
||||||
external_app_id = str(cfg.get("botId") or "")
|
|
||||||
app_secret = str(cfg.get("secret") or "")
|
|
||||||
extra = {
|
|
||||||
"allowFrom": _normalize_allow_from(cfg.get("allowFrom", [])),
|
|
||||||
"welcomeMessage": str(cfg.get("welcomeMessage") or ""),
|
|
||||||
}
|
|
||||||
elif ctype == "weixin":
|
|
||||||
app_secret = ""
|
|
||||||
extra = {
|
|
||||||
"hasSavedState": (Path(BOTS_WORKSPACE_ROOT) / bot_id / ".nanobot" / "weixin" / "account.json").is_file(),
|
|
||||||
}
|
|
||||||
elif ctype == "email":
|
|
||||||
extra = {
|
|
||||||
"consentGranted": bool(cfg.get("consentGranted", False)),
|
|
||||||
"imapHost": str(cfg.get("imapHost") or ""),
|
|
||||||
"imapPort": int(cfg.get("imapPort") or 993),
|
|
||||||
"imapUsername": str(cfg.get("imapUsername") or ""),
|
|
||||||
"imapPassword": str(cfg.get("imapPassword") or ""),
|
|
||||||
"imapMailbox": str(cfg.get("imapMailbox") or "INBOX"),
|
|
||||||
"imapUseSsl": bool(cfg.get("imapUseSsl", True)),
|
|
||||||
"smtpHost": str(cfg.get("smtpHost") or ""),
|
|
||||||
"smtpPort": int(cfg.get("smtpPort") or 587),
|
|
||||||
"smtpUsername": str(cfg.get("smtpUsername") or ""),
|
|
||||||
"smtpPassword": str(cfg.get("smtpPassword") or ""),
|
|
||||||
"smtpUseTls": bool(cfg.get("smtpUseTls", True)),
|
|
||||||
"smtpUseSsl": bool(cfg.get("smtpUseSsl", False)),
|
|
||||||
"fromAddress": str(cfg.get("fromAddress") or ""),
|
|
||||||
"autoReplyEnabled": bool(cfg.get("autoReplyEnabled", True)),
|
|
||||||
"pollIntervalSeconds": int(cfg.get("pollIntervalSeconds") or 30),
|
|
||||||
"markSeen": bool(cfg.get("markSeen", True)),
|
|
||||||
"maxBodyChars": int(cfg.get("maxBodyChars") or 12000),
|
|
||||||
"subjectPrefix": str(cfg.get("subjectPrefix") or "Re: "),
|
|
||||||
"allowFrom": _normalize_allow_from(cfg.get("allowFrom", [])),
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
external_app_id = str(
|
|
||||||
cfg.get("appId") or cfg.get("clientId") or cfg.get("botToken") or cfg.get("externalAppId") or ""
|
|
||||||
)
|
|
||||||
app_secret = str(
|
|
||||||
cfg.get("appSecret")
|
|
||||||
or cfg.get("clientSecret")
|
|
||||||
or cfg.get("secret")
|
|
||||||
or cfg.get("token")
|
|
||||||
or cfg.get("appToken")
|
|
||||||
or ""
|
|
||||||
)
|
|
||||||
extra = {
|
|
||||||
key: value
|
|
||||||
for key, value in cfg.items()
|
|
||||||
if key
|
|
||||||
not in {
|
|
||||||
"enabled",
|
|
||||||
"port",
|
|
||||||
"appId",
|
|
||||||
"clientId",
|
|
||||||
"botToken",
|
|
||||||
"externalAppId",
|
|
||||||
"appSecret",
|
|
||||||
"clientSecret",
|
|
||||||
"secret",
|
|
||||||
"token",
|
|
||||||
"appToken",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
"id": ctype,
|
|
||||||
"bot_id": bot_id,
|
|
||||||
"channel_type": ctype,
|
|
||||||
"external_app_id": external_app_id,
|
|
||||||
"app_secret": app_secret,
|
|
||||||
"internal_port": port,
|
|
||||||
"is_active": enabled,
|
|
||||||
"extra_config": extra,
|
|
||||||
"locked": ctype == "dashboard",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def channel_api_to_config(row: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
ctype = str(row.get("channel_type") or "").strip().lower()
|
|
||||||
enabled = bool(row.get("is_active", True))
|
|
||||||
extra = normalize_channel_extra(row.get("extra_config"))
|
|
||||||
external_app_id = str(row.get("external_app_id") or "")
|
|
||||||
app_secret = str(row.get("app_secret") or "")
|
|
||||||
port = max(1, min(int(row.get("internal_port") or 8080), 65535))
|
|
||||||
|
|
||||||
if ctype == "feishu":
|
|
||||||
return {
|
|
||||||
"enabled": enabled,
|
|
||||||
"appId": external_app_id,
|
|
||||||
"appSecret": app_secret,
|
|
||||||
"encryptKey": extra.get("encryptKey", ""),
|
|
||||||
"verificationToken": extra.get("verificationToken", ""),
|
|
||||||
"allowFrom": _normalize_allow_from(extra.get("allowFrom", [])),
|
|
||||||
}
|
|
||||||
if ctype == "dingtalk":
|
|
||||||
return {
|
|
||||||
"enabled": enabled,
|
|
||||||
"clientId": external_app_id,
|
|
||||||
"clientSecret": app_secret,
|
|
||||||
"allowFrom": _normalize_allow_from(extra.get("allowFrom", [])),
|
|
||||||
}
|
|
||||||
if ctype == "telegram":
|
|
||||||
return {
|
|
||||||
"enabled": enabled,
|
|
||||||
"token": app_secret,
|
|
||||||
"proxy": extra.get("proxy", ""),
|
|
||||||
"replyToMessage": bool(extra.get("replyToMessage", False)),
|
|
||||||
"allowFrom": _normalize_allow_from(extra.get("allowFrom", [])),
|
|
||||||
}
|
|
||||||
if ctype == "slack":
|
|
||||||
return {
|
|
||||||
"enabled": enabled,
|
|
||||||
"mode": extra.get("mode", "socket"),
|
|
||||||
"botToken": external_app_id,
|
|
||||||
"appToken": app_secret,
|
|
||||||
"replyInThread": bool(extra.get("replyInThread", True)),
|
|
||||||
"groupPolicy": extra.get("groupPolicy", "mention"),
|
|
||||||
"groupAllowFrom": extra.get("groupAllowFrom", []),
|
|
||||||
"reactEmoji": extra.get("reactEmoji", "eyes"),
|
|
||||||
}
|
|
||||||
if ctype == "qq":
|
|
||||||
return {
|
|
||||||
"enabled": enabled,
|
|
||||||
"appId": external_app_id,
|
|
||||||
"secret": app_secret,
|
|
||||||
"allowFrom": _normalize_allow_from(extra.get("allowFrom", [])),
|
|
||||||
}
|
|
||||||
if ctype == "wecom":
|
|
||||||
return {
|
|
||||||
"enabled": enabled,
|
|
||||||
"botId": external_app_id,
|
|
||||||
"secret": app_secret,
|
|
||||||
"allowFrom": _normalize_allow_from(extra.get("allowFrom", [])),
|
|
||||||
"welcomeMessage": str(extra.get("welcomeMessage") or ""),
|
|
||||||
}
|
|
||||||
if ctype == "weixin":
|
|
||||||
return {
|
|
||||||
"enabled": enabled,
|
|
||||||
"token": app_secret,
|
|
||||||
}
|
|
||||||
if ctype == "email":
|
|
||||||
return {
|
|
||||||
"enabled": enabled,
|
|
||||||
"consentGranted": bool(extra.get("consentGranted", False)),
|
|
||||||
"imapHost": str(extra.get("imapHost") or ""),
|
|
||||||
"imapPort": max(1, min(int(extra.get("imapPort") or 993), 65535)),
|
|
||||||
"imapUsername": str(extra.get("imapUsername") or ""),
|
|
||||||
"imapPassword": str(extra.get("imapPassword") or ""),
|
|
||||||
"imapMailbox": str(extra.get("imapMailbox") or "INBOX"),
|
|
||||||
"imapUseSsl": bool(extra.get("imapUseSsl", True)),
|
|
||||||
"smtpHost": str(extra.get("smtpHost") or ""),
|
|
||||||
"smtpPort": max(1, min(int(extra.get("smtpPort") or 587), 65535)),
|
|
||||||
"smtpUsername": str(extra.get("smtpUsername") or ""),
|
|
||||||
"smtpPassword": str(extra.get("smtpPassword") or ""),
|
|
||||||
"smtpUseTls": bool(extra.get("smtpUseTls", True)),
|
|
||||||
"smtpUseSsl": bool(extra.get("smtpUseSsl", False)),
|
|
||||||
"fromAddress": str(extra.get("fromAddress") or ""),
|
|
||||||
"autoReplyEnabled": bool(extra.get("autoReplyEnabled", True)),
|
|
||||||
"pollIntervalSeconds": max(5, int(extra.get("pollIntervalSeconds") or 30)),
|
|
||||||
"markSeen": bool(extra.get("markSeen", True)),
|
|
||||||
"maxBodyChars": max(1, int(extra.get("maxBodyChars") or 12000)),
|
|
||||||
"subjectPrefix": str(extra.get("subjectPrefix") or "Re: "),
|
|
||||||
"allowFrom": _normalize_allow_from(extra.get("allowFrom", [])),
|
|
||||||
}
|
|
||||||
merged = dict(extra)
|
|
||||||
merged.update(
|
|
||||||
{
|
|
||||||
"enabled": enabled,
|
|
||||||
"appId": external_app_id,
|
|
||||||
"appSecret": app_secret,
|
|
||||||
"port": port,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return merged
|
|
||||||
|
|
||||||
|
|
||||||
def list_bot_channels_from_config(bot: BotInstance) -> List[Dict[str, Any]]:
|
|
||||||
config_data = _read_bot_config(bot.id)
|
|
||||||
channels_cfg = config_data.get("channels")
|
|
||||||
if not isinstance(channels_cfg, dict):
|
|
||||||
channels_cfg = {}
|
|
||||||
send_progress, send_tool_hints = read_global_delivery_flags(channels_cfg)
|
|
||||||
rows: List[Dict[str, Any]] = [
|
|
||||||
{
|
|
||||||
"id": "dashboard",
|
|
||||||
"bot_id": bot.id,
|
|
||||||
"channel_type": "dashboard",
|
|
||||||
"external_app_id": f"dashboard-{bot.id}",
|
|
||||||
"app_secret": "",
|
|
||||||
"internal_port": 9000,
|
|
||||||
"is_active": True,
|
|
||||||
"extra_config": {
|
|
||||||
"sendProgress": send_progress,
|
|
||||||
"sendToolHints": send_tool_hints,
|
|
||||||
},
|
|
||||||
"locked": True,
|
|
||||||
}
|
|
||||||
]
|
|
||||||
for ctype, cfg in channels_cfg.items():
|
|
||||||
if ctype in {"sendProgress", "sendToolHints", "dashboard"} or not isinstance(cfg, dict):
|
|
||||||
continue
|
|
||||||
rows.append(channel_config_to_api(bot.id, ctype, cfg))
|
|
||||||
return rows
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_initial_bot_channels(bot_id: str, channels: Optional[List[ChannelConfigRequest]]) -> List[Dict[str, Any]]:
|
|
||||||
rows: List[Dict[str, Any]] = []
|
|
||||||
seen_types: set[str] = set()
|
|
||||||
for channel in channels or []:
|
|
||||||
ctype = (channel.channel_type or "").strip().lower()
|
|
||||||
if not ctype or ctype == "dashboard" or ctype in seen_types:
|
|
||||||
continue
|
|
||||||
seen_types.add(ctype)
|
|
||||||
rows.append(
|
|
||||||
{
|
|
||||||
"id": ctype,
|
|
||||||
"bot_id": bot_id,
|
|
||||||
"channel_type": ctype,
|
|
||||||
"external_app_id": (channel.external_app_id or "").strip() or f"{ctype}-{bot_id}",
|
|
||||||
"app_secret": (channel.app_secret or "").strip(),
|
|
||||||
"internal_port": max(1, min(int(channel.internal_port or 8080), 65535)),
|
|
||||||
"is_active": bool(channel.is_active),
|
|
||||||
"extra_config": normalize_channel_extra(channel.extra_config),
|
|
||||||
"locked": False,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return rows
|
|
||||||
|
|
||||||
|
|
||||||
def _read_workspace_md(bot_id: str, filename: str) -> str:
|
|
||||||
path = os.path.join(_workspace_root(bot_id), filename)
|
|
||||||
if not os.path.isfile(path):
|
|
||||||
raise RuntimeError(f"Missing required workspace file: {path}")
|
|
||||||
try:
|
|
||||||
with open(path, "r", encoding="utf-8") as file:
|
|
||||||
return file.read().strip()
|
|
||||||
except Exception as exc:
|
|
||||||
raise RuntimeError(f"Failed to read workspace file: {path}") from exc
|
|
||||||
|
|
||||||
|
|
||||||
def read_bot_runtime_snapshot(bot: BotInstance) -> Dict[str, Any]:
|
|
||||||
config_data = _read_bot_config(bot.id)
|
|
||||||
env_params = resolve_bot_runtime_env_params(bot.id)
|
|
||||||
|
|
||||||
provider_name = ""
|
|
||||||
provider_cfg: Dict[str, Any] = {}
|
|
||||||
providers_cfg = config_data.get("providers")
|
|
||||||
if isinstance(providers_cfg, dict):
|
|
||||||
for p_name, p_cfg in providers_cfg.items():
|
|
||||||
provider_name = str(p_name or "").strip()
|
|
||||||
if isinstance(p_cfg, dict):
|
|
||||||
provider_cfg = p_cfg
|
|
||||||
break
|
|
||||||
if not provider_name or not provider_cfg:
|
|
||||||
raise RuntimeError(f"Missing provider configuration in bot config: {bot.id}")
|
|
||||||
|
|
||||||
agents_defaults: Dict[str, Any] = {}
|
|
||||||
agents_cfg = config_data.get("agents")
|
|
||||||
if isinstance(agents_cfg, dict):
|
|
||||||
defaults = agents_cfg.get("defaults")
|
|
||||||
if isinstance(defaults, dict):
|
|
||||||
agents_defaults = defaults
|
|
||||||
if not agents_defaults:
|
|
||||||
raise RuntimeError(f"Missing agents.defaults in bot config: {bot.id}")
|
|
||||||
|
|
||||||
channels_cfg = config_data.get("channels")
|
|
||||||
send_progress, send_tool_hints = read_global_delivery_flags(channels_cfg)
|
|
||||||
|
|
||||||
llm_provider = provider_name or ""
|
|
||||||
llm_model = str(agents_defaults.get("model") or "")
|
|
||||||
api_key = str(provider_cfg.get("apiKey") or "").strip()
|
|
||||||
api_base = str(provider_cfg.get("apiBase") or "").strip()
|
|
||||||
if not llm_model:
|
|
||||||
raise RuntimeError(f"Missing model in bot config: {bot.id}")
|
|
||||||
if not api_key:
|
|
||||||
raise RuntimeError(f"Missing apiKey in bot config: {bot.id}")
|
|
||||||
if not api_base:
|
|
||||||
raise RuntimeError(f"Missing apiBase in bot config: {bot.id}")
|
|
||||||
api_base_lower = api_base.lower()
|
|
||||||
provider_alias = str(provider_cfg.get("dashboardProviderAlias") or "").strip().lower()
|
|
||||||
if llm_provider == "openai" and provider_alias in {"xunfei", "iflytek", "xfyun", "vllm"}:
|
|
||||||
llm_provider = "xunfei" if provider_alias in {"iflytek", "xfyun"} else provider_alias
|
|
||||||
elif llm_provider == "openai" and ("spark-api-open.xf-yun.com" in api_base_lower or "xf-yun.com" in api_base_lower):
|
|
||||||
llm_provider = "xunfei"
|
|
||||||
|
|
||||||
tools_cfg = config_data.get("tools")
|
|
||||||
if tools_cfg is not None and not isinstance(tools_cfg, dict):
|
|
||||||
raise RuntimeError(f"Invalid tools configuration in bot config: {bot.id}")
|
|
||||||
mcp_servers = tools_cfg.get("mcpServers") if isinstance(tools_cfg, dict) else None
|
|
||||||
|
|
||||||
soul_md = _read_workspace_md(bot.id, "SOUL.md")
|
|
||||||
resources = _read_bot_resources(bot.id, config_data=config_data)
|
|
||||||
return {
|
|
||||||
"llm_provider": llm_provider,
|
|
||||||
"llm_model": llm_model,
|
|
||||||
"api_key": api_key,
|
|
||||||
"api_base": api_base,
|
|
||||||
"temperature": _safe_float(agents_defaults.get("temperature"), 0.2),
|
|
||||||
"top_p": _safe_float(agents_defaults.get("topP"), 1.0),
|
|
||||||
"max_tokens": _safe_int(agents_defaults.get("maxTokens"), 8192),
|
|
||||||
"cpu_cores": resources["cpu_cores"],
|
|
||||||
"memory_mb": resources["memory_mb"],
|
|
||||||
"storage_gb": resources["storage_gb"],
|
|
||||||
"system_timezone": env_params["TZ"],
|
|
||||||
"send_progress": send_progress,
|
|
||||||
"send_tool_hints": send_tool_hints,
|
|
||||||
"soul_md": soul_md,
|
|
||||||
"agents_md": _read_workspace_md(bot.id, "AGENTS.md"),
|
|
||||||
"user_md": _read_workspace_md(bot.id, "USER.md"),
|
|
||||||
"tools_md": _read_workspace_md(bot.id, "TOOLS.md"),
|
|
||||||
"identity_md": _read_workspace_md(bot.id, "IDENTITY.md"),
|
|
||||||
"mcp_servers": mcp_servers if isinstance(mcp_servers, dict) else None,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def serialize_bot_detail(bot: BotInstance) -> Dict[str, Any]:
|
|
||||||
runtime = read_bot_runtime_snapshot(bot)
|
|
||||||
created_at = bot.created_at.isoformat() + "Z" if bot.created_at else None
|
|
||||||
updated_at = bot.updated_at.isoformat() + "Z" if bot.updated_at else None
|
|
||||||
return {
|
|
||||||
"id": bot.id,
|
|
||||||
"name": bot.name,
|
|
||||||
"enabled": bool(getattr(bot, "enabled", True)),
|
|
||||||
"access_password": bot.access_password or "",
|
|
||||||
"has_access_password": bool(str(bot.access_password or "").strip()),
|
|
||||||
"avatar_model": "base",
|
|
||||||
"avatar_skin": "blue_suit",
|
|
||||||
"image_tag": bot.image_tag,
|
|
||||||
"llm_provider": runtime["llm_provider"],
|
|
||||||
"llm_model": runtime["llm_model"],
|
|
||||||
"api_key": runtime["api_key"],
|
|
||||||
"api_base": runtime["api_base"],
|
|
||||||
"temperature": runtime["temperature"],
|
|
||||||
"top_p": runtime["top_p"],
|
|
||||||
"max_tokens": runtime["max_tokens"],
|
|
||||||
"cpu_cores": runtime["cpu_cores"],
|
|
||||||
"memory_mb": runtime["memory_mb"],
|
|
||||||
"storage_gb": runtime["storage_gb"],
|
|
||||||
"system_timezone": runtime["system_timezone"],
|
|
||||||
"send_progress": runtime["send_progress"],
|
|
||||||
"send_tool_hints": runtime["send_tool_hints"],
|
|
||||||
"soul_md": runtime["soul_md"],
|
|
||||||
"agents_md": runtime["agents_md"],
|
|
||||||
"user_md": runtime["user_md"],
|
|
||||||
"tools_md": runtime["tools_md"],
|
|
||||||
"identity_md": runtime["identity_md"],
|
|
||||||
"workspace_dir": bot.workspace_dir,
|
|
||||||
"docker_status": bot.docker_status,
|
|
||||||
"current_state": bot.current_state,
|
|
||||||
"last_action": bot.last_action,
|
|
||||||
"created_at": created_at,
|
|
||||||
"updated_at": updated_at,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def serialize_bot_list_entry(bot: BotInstance) -> Dict[str, Any]:
|
|
||||||
created_at = bot.created_at.isoformat() + "Z" if bot.created_at else None
|
|
||||||
updated_at = bot.updated_at.isoformat() + "Z" if bot.updated_at else None
|
|
||||||
return {
|
|
||||||
"id": bot.id,
|
|
||||||
"name": bot.name,
|
|
||||||
"enabled": bool(getattr(bot, "enabled", True)),
|
|
||||||
"has_access_password": bool(str(bot.access_password or "").strip()),
|
|
||||||
"image_tag": bot.image_tag,
|
|
||||||
"docker_status": bot.docker_status,
|
|
||||||
"current_state": bot.current_state,
|
|
||||||
"last_action": bot.last_action,
|
|
||||||
"created_at": created_at,
|
|
||||||
"updated_at": updated_at,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _has_bot_workspace_config(bot_id: str) -> bool:
|
|
||||||
return (Path(BOTS_WORKSPACE_ROOT) / bot_id / ".nanobot" / "config.json").is_file()
|
|
||||||
|
|
||||||
|
|
||||||
def sync_bot_workspace_channels(
|
|
||||||
session: Session,
|
|
||||||
bot_id: str,
|
|
||||||
channels_override: Optional[List[Dict[str, Any]]] = None,
|
|
||||||
global_delivery_override: Optional[Dict[str, Any]] = None,
|
|
||||||
runtime_overrides: Optional[Dict[str, Any]] = None,
|
|
||||||
) -> None:
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise RuntimeError(f"Bot not found: {bot_id}")
|
|
||||||
|
|
||||||
has_existing_config = _has_bot_workspace_config(bot_id)
|
|
||||||
if has_existing_config:
|
|
||||||
snapshot = read_bot_runtime_snapshot(bot)
|
|
||||||
bot_data: Dict[str, Any] = dict(snapshot)
|
|
||||||
else:
|
|
||||||
if not isinstance(runtime_overrides, dict):
|
|
||||||
raise RuntimeError(f"Missing required bot config for workspace sync: {bot_id}")
|
|
||||||
bot_data = {}
|
|
||||||
if isinstance(runtime_overrides, dict):
|
|
||||||
bot_data.update(runtime_overrides)
|
|
||||||
|
|
||||||
resources = normalize_bot_resource_limits(
|
|
||||||
bot_data.get("cpu_cores"),
|
|
||||||
bot_data.get("memory_mb"),
|
|
||||||
bot_data.get("storage_gb"),
|
|
||||||
)
|
|
||||||
bot_data.update(resources)
|
|
||||||
send_progress = bool(bot_data.get("send_progress", False))
|
|
||||||
send_tool_hints = bool(bot_data.get("send_tool_hints", False))
|
|
||||||
if isinstance(global_delivery_override, dict):
|
|
||||||
if "sendProgress" in global_delivery_override:
|
|
||||||
send_progress = bool(global_delivery_override.get("sendProgress"))
|
|
||||||
if "sendToolHints" in global_delivery_override:
|
|
||||||
send_tool_hints = bool(global_delivery_override.get("sendToolHints"))
|
|
||||||
|
|
||||||
if channels_override is not None:
|
|
||||||
channels_data = channels_override
|
|
||||||
elif has_existing_config:
|
|
||||||
channels_data = list_bot_channels_from_config(bot)
|
|
||||||
else:
|
|
||||||
channels_data = []
|
|
||||||
bot_data["send_progress"] = send_progress
|
|
||||||
bot_data["send_tool_hints"] = send_tool_hints
|
|
||||||
normalized_channels: List[Dict[str, Any]] = []
|
|
||||||
for row in channels_data:
|
|
||||||
ctype = str(row.get("channel_type") or "").strip().lower()
|
|
||||||
if not ctype or ctype == "dashboard":
|
|
||||||
continue
|
|
||||||
normalized_channels.append(
|
|
||||||
{
|
|
||||||
"channel_type": ctype,
|
|
||||||
"external_app_id": str(row.get("external_app_id") or ""),
|
|
||||||
"app_secret": str(row.get("app_secret") or ""),
|
|
||||||
"internal_port": max(1, min(int(row.get("internal_port") or 8080), 65535)),
|
|
||||||
"is_active": bool(row.get("is_active", True)),
|
|
||||||
"extra_config": normalize_channel_extra(row.get("extra_config")),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
workspace_provider.write_workspace(bot_id=bot_id, bot_data=bot_data, channels=normalized_channels)
|
|
||||||
write_bot_resource_limits(bot_id, bot_data.get("cpu_cores"), bot_data.get("memory_mb"), bot_data.get("storage_gb"))
|
|
||||||
|
|
@ -1,305 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
from typing import Any, Dict, Optional
|
|
||||||
|
|
||||||
from core.utils import _calc_dir_size_bytes
|
|
||||||
from core.settings import BOTS_WORKSPACE_ROOT
|
|
||||||
|
|
||||||
_ENV_KEY_RE = re.compile(r"^[A-Z_][A-Z0-9_]{0,127}$")
|
|
||||||
_BYTES_PER_GB = 1024 * 1024 * 1024
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"get_bot_data_root",
|
|
||||||
"normalize_bot_env_params",
|
|
||||||
"normalize_bot_resource_limits",
|
|
||||||
"read_bot_config_data",
|
|
||||||
"read_bot_cron_jobs_store",
|
|
||||||
"read_bot_env_params",
|
|
||||||
"get_bot_resource_limits",
|
|
||||||
"get_bot_workspace_root",
|
|
||||||
"get_bot_workspace_snapshot",
|
|
||||||
"get_bot_workspace_usage_bytes",
|
|
||||||
"write_bot_config_data",
|
|
||||||
"write_bot_cron_jobs_store",
|
|
||||||
"write_bot_env_params",
|
|
||||||
"write_bot_resource_limits",
|
|
||||||
"_bot_data_root",
|
|
||||||
"_clear_bot_dashboard_direct_session",
|
|
||||||
"_clear_bot_sessions",
|
|
||||||
"_normalize_env_params",
|
|
||||||
"_normalize_resource_limits",
|
|
||||||
"_read_bot_config",
|
|
||||||
"_read_bot_resources",
|
|
||||||
"_read_cron_store",
|
|
||||||
"_read_env_store",
|
|
||||||
"_safe_float",
|
|
||||||
"_safe_int",
|
|
||||||
"_workspace_root",
|
|
||||||
"_write_bot_config",
|
|
||||||
"_write_bot_resources",
|
|
||||||
"_write_cron_store",
|
|
||||||
"_write_env_store",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def get_bot_workspace_root(bot_id: str) -> str:
|
|
||||||
return _workspace_root(bot_id)
|
|
||||||
|
|
||||||
|
|
||||||
def _workspace_root(bot_id: str) -> str:
|
|
||||||
return os.path.abspath(os.path.join(BOTS_WORKSPACE_ROOT, bot_id, ".nanobot", "workspace"))
|
|
||||||
|
|
||||||
|
|
||||||
def get_bot_data_root(bot_id: str) -> str:
|
|
||||||
return _bot_data_root(bot_id)
|
|
||||||
|
|
||||||
|
|
||||||
def _bot_data_root(bot_id: str) -> str:
|
|
||||||
return os.path.abspath(os.path.join(BOTS_WORKSPACE_ROOT, bot_id, ".nanobot"))
|
|
||||||
|
|
||||||
|
|
||||||
def _safe_float(raw: Any, default: float) -> float:
|
|
||||||
try:
|
|
||||||
return float(raw)
|
|
||||||
except Exception:
|
|
||||||
return default
|
|
||||||
|
|
||||||
|
|
||||||
def _safe_int(raw: Any, default: int) -> int:
|
|
||||||
try:
|
|
||||||
return int(raw)
|
|
||||||
except Exception:
|
|
||||||
return default
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_resource_limits(cpu_cores: Any, memory_mb: Any, storage_gb: Any) -> Dict[str, Any]:
|
|
||||||
cpu = _safe_float(cpu_cores, 1.0)
|
|
||||||
mem = _safe_int(memory_mb, 1024)
|
|
||||||
storage = _safe_int(storage_gb, 10)
|
|
||||||
if cpu < 0:
|
|
||||||
cpu = 1.0
|
|
||||||
if mem < 0:
|
|
||||||
mem = 1024
|
|
||||||
if storage < 0:
|
|
||||||
storage = 10
|
|
||||||
normalized_cpu = 0.0 if cpu == 0 else min(16.0, max(0.1, cpu))
|
|
||||||
normalized_mem = 0 if mem == 0 else min(65536, max(256, mem))
|
|
||||||
normalized_storage = 0 if storage == 0 else min(1024, max(1, storage))
|
|
||||||
return {
|
|
||||||
"cpu_cores": normalized_cpu,
|
|
||||||
"memory_mb": normalized_mem,
|
|
||||||
"storage_gb": normalized_storage,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_bot_resource_limits(cpu_cores: Any, memory_mb: Any, storage_gb: Any) -> Dict[str, Any]:
|
|
||||||
return _normalize_resource_limits(cpu_cores, memory_mb, storage_gb)
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_env_params(raw: Any) -> Dict[str, str]:
|
|
||||||
if not isinstance(raw, dict):
|
|
||||||
return {}
|
|
||||||
rows: Dict[str, str] = {}
|
|
||||||
for key, value in raw.items():
|
|
||||||
normalized_key = str(key or "").strip().upper()
|
|
||||||
if not normalized_key or not _ENV_KEY_RE.fullmatch(normalized_key):
|
|
||||||
continue
|
|
||||||
rows[normalized_key] = str(value or "").strip()
|
|
||||||
return rows
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_bot_env_params(raw: Any) -> Dict[str, str]:
|
|
||||||
return _normalize_env_params(raw)
|
|
||||||
|
|
||||||
|
|
||||||
def _read_json_object(path: str) -> Dict[str, Any]:
|
|
||||||
if not os.path.isfile(path):
|
|
||||||
raise RuntimeError(f"Missing required JSON file: {path}")
|
|
||||||
try:
|
|
||||||
with open(path, "r", encoding="utf-8") as file:
|
|
||||||
data = json.load(file)
|
|
||||||
except Exception as exc:
|
|
||||||
raise RuntimeError(f"Invalid JSON file: {path}") from exc
|
|
||||||
if not isinstance(data, dict):
|
|
||||||
raise RuntimeError(f"JSON file must contain an object: {path}")
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def _read_json_value(path: str) -> Any:
|
|
||||||
if not os.path.isfile(path):
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
with open(path, "r", encoding="utf-8") as file:
|
|
||||||
return json.load(file)
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def _write_json_atomic(path: str, payload: Dict[str, Any]) -> None:
|
|
||||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
|
||||||
tmp_path = f"{path}.tmp"
|
|
||||||
with open(tmp_path, "w", encoding="utf-8") as file:
|
|
||||||
json.dump(payload, file, ensure_ascii=False, indent=2)
|
|
||||||
os.replace(tmp_path, path)
|
|
||||||
|
|
||||||
|
|
||||||
def _config_json_path(bot_id: str) -> str:
|
|
||||||
return os.path.join(_bot_data_root(bot_id), "config.json")
|
|
||||||
|
|
||||||
|
|
||||||
def _read_bot_config(bot_id: str) -> Dict[str, Any]:
|
|
||||||
return _read_json_object(_config_json_path(bot_id))
|
|
||||||
|
|
||||||
|
|
||||||
def read_bot_config_data(bot_id: str) -> Dict[str, Any]:
|
|
||||||
return _read_bot_config(bot_id)
|
|
||||||
|
|
||||||
|
|
||||||
def _write_bot_config(bot_id: str, config_data: Dict[str, Any]) -> None:
|
|
||||||
_write_json_atomic(_config_json_path(bot_id), config_data)
|
|
||||||
|
|
||||||
|
|
||||||
def write_bot_config_data(bot_id: str, config_data: Dict[str, Any]) -> None:
|
|
||||||
_write_bot_config(bot_id, config_data)
|
|
||||||
|
|
||||||
|
|
||||||
def _resources_json_path(bot_id: str) -> str:
|
|
||||||
return os.path.join(_bot_data_root(bot_id), "resources.json")
|
|
||||||
|
|
||||||
|
|
||||||
def _write_bot_resources(bot_id: str, cpu_cores: Any, memory_mb: Any, storage_gb: Any) -> None:
|
|
||||||
normalized = _normalize_resource_limits(cpu_cores, memory_mb, storage_gb)
|
|
||||||
_write_json_atomic(
|
|
||||||
_resources_json_path(bot_id),
|
|
||||||
{
|
|
||||||
"cpuCores": normalized["cpu_cores"],
|
|
||||||
"memoryMB": normalized["memory_mb"],
|
|
||||||
"storageGB": normalized["storage_gb"],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def write_bot_resource_limits(bot_id: str, cpu_cores: Any, memory_mb: Any, storage_gb: Any) -> None:
|
|
||||||
_write_bot_resources(bot_id, cpu_cores, memory_mb, storage_gb)
|
|
||||||
|
|
||||||
|
|
||||||
def _read_bot_resources(bot_id: str, config_data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
|
||||||
_ = config_data
|
|
||||||
path = _resources_json_path(bot_id)
|
|
||||||
data = _read_json_object(path)
|
|
||||||
return _normalize_resource_limits(
|
|
||||||
data.get("cpuCores", data.get("cpu_cores")),
|
|
||||||
data.get("memoryMB", data.get("memory_mb")),
|
|
||||||
data.get("storageGB", data.get("storage_gb")),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_bot_resource_limits(bot_id: str, config_data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
|
||||||
return _read_bot_resources(bot_id, config_data=config_data)
|
|
||||||
|
|
||||||
|
|
||||||
def get_bot_workspace_usage_bytes(bot_id: str) -> int:
|
|
||||||
return _calc_dir_size_bytes(_workspace_root(bot_id))
|
|
||||||
|
|
||||||
|
|
||||||
def get_bot_workspace_snapshot(bot_id: str, config_data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
|
||||||
resources = get_bot_resource_limits(bot_id, config_data=config_data)
|
|
||||||
configured_limit_bytes = int(resources.get("storage_gb") or 0) * _BYTES_PER_GB
|
|
||||||
return {
|
|
||||||
"path": get_bot_workspace_root(bot_id),
|
|
||||||
"usage_bytes": get_bot_workspace_usage_bytes(bot_id),
|
|
||||||
"configured_limit_bytes": configured_limit_bytes if configured_limit_bytes > 0 else None,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _env_store_path(bot_id: str) -> str:
|
|
||||||
return os.path.join(_bot_data_root(bot_id), "env.json")
|
|
||||||
|
|
||||||
|
|
||||||
def _read_env_store(bot_id: str) -> Dict[str, str]:
|
|
||||||
return _normalize_env_params(_read_json_object(_env_store_path(bot_id)))
|
|
||||||
|
|
||||||
|
|
||||||
def read_bot_env_params(bot_id: str) -> Dict[str, str]:
|
|
||||||
return _read_env_store(bot_id)
|
|
||||||
|
|
||||||
|
|
||||||
def _write_env_store(bot_id: str, env_params: Dict[str, str]) -> None:
|
|
||||||
_write_json_atomic(_env_store_path(bot_id), _normalize_env_params(env_params))
|
|
||||||
|
|
||||||
|
|
||||||
def write_bot_env_params(bot_id: str, env_params: Dict[str, str]) -> None:
|
|
||||||
_write_env_store(bot_id, env_params)
|
|
||||||
|
|
||||||
|
|
||||||
def _cron_store_path(bot_id: str) -> str:
|
|
||||||
return os.path.join(_workspace_root(bot_id), "cron", "jobs.json")
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_cron_store_payload(raw: Any) -> Dict[str, Any]:
|
|
||||||
if isinstance(raw, list):
|
|
||||||
return {"version": 1, "jobs": [row for row in raw if isinstance(row, dict)]}
|
|
||||||
if not isinstance(raw, dict):
|
|
||||||
return {"version": 1, "jobs": []}
|
|
||||||
jobs = raw.get("jobs")
|
|
||||||
if isinstance(jobs, list):
|
|
||||||
normalized_jobs = [row for row in jobs if isinstance(row, dict)]
|
|
||||||
else:
|
|
||||||
normalized_jobs = []
|
|
||||||
return {
|
|
||||||
"version": _safe_int(raw.get("version"), 1),
|
|
||||||
"jobs": normalized_jobs,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _read_cron_store(bot_id: str) -> Dict[str, Any]:
|
|
||||||
return _normalize_cron_store_payload(_read_json_value(_cron_store_path(bot_id)))
|
|
||||||
|
|
||||||
|
|
||||||
def read_bot_cron_jobs_store(bot_id: str) -> Dict[str, Any]:
|
|
||||||
return _read_cron_store(bot_id)
|
|
||||||
|
|
||||||
|
|
||||||
def _write_cron_store(bot_id: str, store: Dict[str, Any]) -> None:
|
|
||||||
normalized = _normalize_cron_store_payload(store)
|
|
||||||
_write_json_atomic(_cron_store_path(bot_id), normalized)
|
|
||||||
|
|
||||||
|
|
||||||
def write_bot_cron_jobs_store(bot_id: str, store: Dict[str, Any]) -> None:
|
|
||||||
_write_cron_store(bot_id, store)
|
|
||||||
|
|
||||||
|
|
||||||
def _sessions_root(bot_id: str) -> str:
|
|
||||||
return os.path.join(_workspace_root(bot_id), "sessions")
|
|
||||||
|
|
||||||
|
|
||||||
def _clear_bot_sessions(bot_id: str) -> int:
|
|
||||||
root = _sessions_root(bot_id)
|
|
||||||
if not os.path.isdir(root):
|
|
||||||
return 0
|
|
||||||
deleted = 0
|
|
||||||
for name in os.listdir(root):
|
|
||||||
path = os.path.join(root, name)
|
|
||||||
if not os.path.isfile(path):
|
|
||||||
continue
|
|
||||||
if not name.lower().endswith(".jsonl"):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
os.remove(path)
|
|
||||||
deleted += 1
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
return deleted
|
|
||||||
|
|
||||||
|
|
||||||
def _clear_bot_dashboard_direct_session(bot_id: str) -> Dict[str, Any]:
|
|
||||||
root = _sessions_root(bot_id)
|
|
||||||
os.makedirs(root, exist_ok=True)
|
|
||||||
path = os.path.join(root, "dashboard_direct.jsonl")
|
|
||||||
existed = os.path.exists(path)
|
|
||||||
with open(path, "w", encoding="utf-8"):
|
|
||||||
pass
|
|
||||||
return {"path": path, "existed": existed}
|
|
||||||
|
|
@ -1,28 +0,0 @@
|
||||||
from typing import Optional
|
|
||||||
from core.cache import cache
|
|
||||||
|
|
||||||
def _cache_key_bots_list() -> str:
|
|
||||||
return "bot:list:v3"
|
|
||||||
|
|
||||||
def _cache_key_bot_detail(bot_id: str) -> str:
|
|
||||||
return f"bot:detail:v3:{bot_id}"
|
|
||||||
|
|
||||||
def _cache_key_bot_messages(bot_id: str, limit: int) -> str:
|
|
||||||
return f"bot:messages:list:v2:{bot_id}:limit:{limit}"
|
|
||||||
|
|
||||||
def _cache_key_bot_messages_page(bot_id: str, limit: int, before_id: Optional[int]) -> str:
|
|
||||||
cursor = str(int(before_id)) if isinstance(before_id, int) and before_id > 0 else "latest"
|
|
||||||
return f"bot:messages:page:v2:{bot_id}:before:{cursor}:limit:{limit}"
|
|
||||||
|
|
||||||
def _cache_key_images() -> str:
|
|
||||||
return "images:list"
|
|
||||||
|
|
||||||
def _invalidate_bot_detail_cache(bot_id: str) -> None:
|
|
||||||
cache.delete(_cache_key_bots_list(), _cache_key_bot_detail(bot_id))
|
|
||||||
|
|
||||||
def _invalidate_bot_messages_cache(bot_id: str) -> None:
|
|
||||||
cache.delete_prefix(f"bot:messages:list:v2:{bot_id}:")
|
|
||||||
cache.delete_prefix(f"bot:messages:page:v2:{bot_id}:")
|
|
||||||
|
|
||||||
def _invalidate_images_cache() -> None:
|
|
||||||
cache.delete(_cache_key_images())
|
|
||||||
|
|
@ -1,202 +0,0 @@
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
from typing import Any, Dict, List
|
|
||||||
|
|
||||||
from fastapi import HTTPException
|
|
||||||
from sqlmodel import Session
|
|
||||||
|
|
||||||
from core.docker_instance import docker_manager
|
|
||||||
from core.utils import _is_video_attachment_path, _is_visual_attachment_path
|
|
||||||
from models.bot import BotInstance
|
|
||||||
from services.bot_service import read_bot_runtime_snapshot
|
|
||||||
from services.platform_activity_service import record_activity_event
|
|
||||||
from services.platform_usage_service import create_usage_request, fail_latest_usage
|
|
||||||
from services.runtime_service import broadcast_runtime_packet, persist_runtime_packet
|
|
||||||
from services.workspace_service import resolve_workspace_path
|
|
||||||
|
|
||||||
logger = logging.getLogger("dashboard.backend")
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_message_media_item(value: Any) -> str:
|
|
||||||
return str(value or "").strip().replace("\\", "/").lstrip("/")
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_message_media_list(raw: Any) -> List[str]:
|
|
||||||
if not isinstance(raw, list):
|
|
||||||
return []
|
|
||||||
rows: List[str] = []
|
|
||||||
for value in raw:
|
|
||||||
normalized = _normalize_message_media_item(value)
|
|
||||||
if normalized:
|
|
||||||
rows.append(normalized)
|
|
||||||
return rows
|
|
||||||
|
|
||||||
|
|
||||||
def _build_delivery_command(command: str, checked_attachments: List[str]) -> str:
|
|
||||||
if not checked_attachments:
|
|
||||||
return command
|
|
||||||
|
|
||||||
attachment_block = "\n".join(f"- {path}" for path in checked_attachments)
|
|
||||||
if all(_is_visual_attachment_path(path) for path in checked_attachments):
|
|
||||||
has_video = any(_is_video_attachment_path(path) for path in checked_attachments)
|
|
||||||
media_label = "图片/视频" if has_video else "图片"
|
|
||||||
capability_hint = (
|
|
||||||
"1) 附件已随请求附带;图片在可用时可直接作为多模态输入理解,视频请按附件路径处理。\n"
|
|
||||||
if has_video
|
|
||||||
else "1) 附件中的图片已作为多模态输入提供,优先直接理解并回答。\n"
|
|
||||||
)
|
|
||||||
if command:
|
|
||||||
return (
|
|
||||||
f"{command}\n\n"
|
|
||||||
"[Attached files]\n"
|
|
||||||
f"{attachment_block}\n\n"
|
|
||||||
"【附件处理要求】\n"
|
|
||||||
f"{capability_hint}"
|
|
||||||
"2) 若当前模型或接口不支持直接理解该附件,请明确说明后再调用工具解析。\n"
|
|
||||||
"3) 除非用户明确要求,不要先调用工具读取附件文件。\n"
|
|
||||||
"4) 回复语言必须遵循 USER.md;若未指定,则与用户当前输入语言保持一致。\n"
|
|
||||||
"5) 仅基于可见内容回答;看不清或无法确认的部分请明确说明,不要猜测。"
|
|
||||||
)
|
|
||||||
return (
|
|
||||||
"请先处理已附带的附件列表:\n"
|
|
||||||
f"{attachment_block}\n\n"
|
|
||||||
f"请直接分析已附带的{media_label}并总结关键信息。\n"
|
|
||||||
f"{'图片在可用时可直接作为多模态输入理解,视频请按附件路径处理。' if has_video else ''}\n"
|
|
||||||
"若当前模型或接口不支持直接理解该附件,请明确说明后再调用工具解析。\n"
|
|
||||||
"回复语言必须遵循 USER.md;若未指定,则与用户当前输入语言保持一致。\n"
|
|
||||||
"仅基于可见内容回答;看不清或无法确认的部分请明确说明,不要猜测。"
|
|
||||||
)
|
|
||||||
|
|
||||||
command_has_paths = all(path in command for path in checked_attachments) if command else False
|
|
||||||
if command and not command_has_paths:
|
|
||||||
return (
|
|
||||||
f"{command}\n\n"
|
|
||||||
"[Attached files]\n"
|
|
||||||
f"{attachment_block}\n\n"
|
|
||||||
"Please process the attached file(s) listed above when answering this request.\n"
|
|
||||||
"Reply language must follow USER.md. If not specified, use the same language as the user input."
|
|
||||||
)
|
|
||||||
if not command:
|
|
||||||
return (
|
|
||||||
"Please process the uploaded file(s) listed below:\n"
|
|
||||||
f"{attachment_block}\n\n"
|
|
||||||
"Reply language must follow USER.md. If not specified, use the same language as the user input."
|
|
||||||
)
|
|
||||||
return command
|
|
||||||
|
|
||||||
|
|
||||||
def send_bot_command(session: Session, bot_id: str, command: str, attachments: Any) -> Dict[str, Any]:
|
|
||||||
request_id = ""
|
|
||||||
try:
|
|
||||||
bot = session.get(BotInstance, bot_id)
|
|
||||||
if not bot:
|
|
||||||
raise HTTPException(status_code=404, detail="Bot not found")
|
|
||||||
runtime_snapshot = read_bot_runtime_snapshot(bot)
|
|
||||||
|
|
||||||
normalized_attachments = _normalize_message_media_list(attachments)
|
|
||||||
text_command = str(command or "").strip()
|
|
||||||
if not text_command and not normalized_attachments:
|
|
||||||
raise HTTPException(status_code=400, detail="Command or attachments is required")
|
|
||||||
|
|
||||||
checked_attachments: List[str] = []
|
|
||||||
for rel_path in normalized_attachments:
|
|
||||||
_, target = resolve_workspace_path(bot_id, rel_path)
|
|
||||||
if not os.path.isfile(target):
|
|
||||||
raise HTTPException(status_code=400, detail=f"attachment not found: {rel_path}")
|
|
||||||
checked_attachments.append(rel_path)
|
|
||||||
delivery_media = [f"/root/.nanobot/workspace/{path.lstrip('/')}" for path in checked_attachments]
|
|
||||||
|
|
||||||
display_command = text_command if text_command else "[attachment message]"
|
|
||||||
delivery_command = _build_delivery_command(text_command, checked_attachments)
|
|
||||||
|
|
||||||
request_id = create_usage_request(
|
|
||||||
session,
|
|
||||||
bot_id,
|
|
||||||
display_command,
|
|
||||||
attachments=checked_attachments,
|
|
||||||
channel="dashboard",
|
|
||||||
metadata={"attachment_count": len(checked_attachments)},
|
|
||||||
provider=str(runtime_snapshot.get("llm_provider") or "").strip() or None,
|
|
||||||
model=str(runtime_snapshot.get("llm_model") or "").strip() or None,
|
|
||||||
)
|
|
||||||
record_activity_event(
|
|
||||||
session,
|
|
||||||
bot_id,
|
|
||||||
"command_submitted",
|
|
||||||
request_id=request_id,
|
|
||||||
channel="dashboard",
|
|
||||||
detail="command submitted",
|
|
||||||
metadata={"attachment_count": len(checked_attachments), "has_text": bool(text_command)},
|
|
||||||
)
|
|
||||||
session.commit()
|
|
||||||
|
|
||||||
outbound_user_packet: Dict[str, Any] | None = None
|
|
||||||
if display_command or checked_attachments:
|
|
||||||
outbound_user_packet = {
|
|
||||||
"type": "USER_COMMAND",
|
|
||||||
"channel": "dashboard",
|
|
||||||
"text": display_command,
|
|
||||||
"media": checked_attachments,
|
|
||||||
"request_id": request_id,
|
|
||||||
}
|
|
||||||
persist_runtime_packet(bot_id, outbound_user_packet)
|
|
||||||
|
|
||||||
if outbound_user_packet:
|
|
||||||
broadcast_runtime_packet(bot_id, outbound_user_packet)
|
|
||||||
|
|
||||||
success = docker_manager.send_command(bot_id, delivery_command, media=delivery_media)
|
|
||||||
if success:
|
|
||||||
return {"success": True}
|
|
||||||
|
|
||||||
detail = docker_manager.get_last_delivery_error(bot_id)
|
|
||||||
fail_latest_usage(session, bot_id, detail or "command delivery failed")
|
|
||||||
record_activity_event(
|
|
||||||
session,
|
|
||||||
bot_id,
|
|
||||||
"command_failed",
|
|
||||||
request_id=request_id,
|
|
||||||
channel="dashboard",
|
|
||||||
detail=(detail or "command delivery failed")[:400],
|
|
||||||
)
|
|
||||||
session.commit()
|
|
||||||
broadcast_runtime_packet(
|
|
||||||
bot_id,
|
|
||||||
{
|
|
||||||
"type": "AGENT_STATE",
|
|
||||||
"channel": "dashboard",
|
|
||||||
"payload": {
|
|
||||||
"state": "ERROR",
|
|
||||||
"action_msg": detail or "command delivery failed",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=502,
|
|
||||||
detail=f"Failed to deliver command to bot dashboard channel{': ' + detail if detail else ''}",
|
|
||||||
)
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as exc:
|
|
||||||
logger.exception("send_bot_command failed for bot_id=%s", bot_id)
|
|
||||||
try:
|
|
||||||
session.rollback()
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
if request_id:
|
|
||||||
try:
|
|
||||||
fail_latest_usage(session, bot_id, str(exc))
|
|
||||||
record_activity_event(
|
|
||||||
session,
|
|
||||||
bot_id,
|
|
||||||
"command_failed",
|
|
||||||
request_id=request_id,
|
|
||||||
channel="dashboard",
|
|
||||||
detail=str(exc)[:400],
|
|
||||||
)
|
|
||||||
session.commit()
|
|
||||||
except Exception:
|
|
||||||
try:
|
|
||||||
session.rollback()
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
raise HTTPException(status_code=500, detail=f"Failed to process bot command: {exc}") from exc
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue