diff --git a/backend/api/bot_router.py b/backend/api/bot_router.py
index 8fed3fd..52889f9 100644
--- a/backend/api/bot_router.py
+++ b/backend/api/bot_router.py
@@ -16,9 +16,9 @@ from services.bot_management_service import (
create_bot_record,
get_bot_detail_cached,
list_bots_with_cache,
- test_provider_connection,
update_bot_record,
)
+from services.provider_service import test_provider_connection
router = APIRouter()
diff --git a/backend/api/health_router.py b/backend/api/health_router.py
new file mode 100644
index 0000000..be3622c
--- /dev/null
+++ b/backend/api/health_router.py
@@ -0,0 +1,39 @@
+from fastapi import APIRouter, HTTPException
+from sqlmodel import Session, select
+
+from core.cache import cache
+from core.database import engine
+from core.settings import DATABASE_ENGINE, REDIS_ENABLED, REDIS_PREFIX, REDIS_URL
+from models.bot import BotInstance
+
+router = APIRouter()
+
+
+@router.get("/api/health")
+def get_health():
+ try:
+ with Session(engine) as session:
+ session.exec(select(BotInstance).limit(1)).first()
+ return {"status": "ok", "database": DATABASE_ENGINE}
+ except Exception as exc:
+ raise HTTPException(status_code=503, detail=f"database check failed: {exc}") from exc
+
+
+@router.get("/api/health/cache")
+def get_cache_health():
+ redis_url = str(REDIS_URL or "").strip()
+ configured = bool(REDIS_ENABLED and redis_url)
+ client_enabled = bool(getattr(cache, "enabled", False))
+ reachable = bool(cache.ping()) if client_enabled else False
+ status = "ok"
+ if configured and not reachable:
+ status = "degraded"
+ return {
+ "status": status,
+ "cache": {
+ "configured": configured,
+ "enabled": client_enabled,
+ "reachable": reachable,
+ "prefix": REDIS_PREFIX,
+ },
+ }
diff --git a/backend/api/image_router.py b/backend/api/image_router.py
index 333c5d4..674b90c 100644
--- a/backend/api/image_router.py
+++ b/backend/api/image_router.py
@@ -1,121 +1,31 @@
-from typing import Any, Dict, List
+from typing import Dict
-from fastapi import APIRouter, Depends, HTTPException
-from sqlmodel import Session, select
+from fastapi import APIRouter, Depends
+from sqlmodel import Session
-from core.cache import cache
from core.database import get_session
-from core.docker_instance import docker_manager
-from models.bot import BotInstance, NanobotImage
-from services.cache_service import _cache_key_images, _invalidate_images_cache
+from services.image_service import (
+ delete_registered_image,
+ list_docker_images_by_repository,
+ list_registered_images,
+ register_image as register_image_record,
+)
router = APIRouter()
-def _serialize_image(row: NanobotImage) -> Dict[str, Any]:
- created_at = row.created_at.isoformat() + "Z" if row.created_at else None
- return {
- "tag": row.tag,
- "image_id": row.image_id,
- "version": row.version,
- "status": row.status,
- "source_dir": row.source_dir,
- "created_at": created_at,
- }
-
-
-def _reconcile_registered_images(session: Session) -> None:
- rows = session.exec(select(NanobotImage)).all()
- dirty = False
- for row in rows:
- docker_exists = docker_manager.has_image(row.tag)
- next_status = "READY" if docker_exists else "ERROR"
- next_image_id = row.image_id
- if docker_exists and docker_manager.client:
- try:
- next_image_id = docker_manager.client.images.get(row.tag).id
- except Exception:
- next_image_id = row.image_id
- if row.status != next_status or row.image_id != next_image_id:
- row.status = next_status
- row.image_id = next_image_id
- session.add(row)
- dirty = True
- if dirty:
- session.commit()
-
-
@router.get("/api/images")
def list_images(session: Session = Depends(get_session)):
- cached = cache.get_json(_cache_key_images())
- if isinstance(cached, list) and all(isinstance(row, dict) for row in cached):
- return cached
- if isinstance(cached, list):
- _invalidate_images_cache()
- try:
- _reconcile_registered_images(session)
- except Exception as exc:
- # Docker status probing should not break image management in dev mode.
- print(f"[image_router] reconcile images skipped: {exc}")
- rows = session.exec(select(NanobotImage).order_by(NanobotImage.created_at.desc())).all()
- payload = [_serialize_image(row) for row in rows]
- cache.set_json(_cache_key_images(), payload, ttl=60)
- return payload
+ return list_registered_images(session)
@router.delete("/api/images/{tag:path}")
def delete_image(tag: str, session: Session = Depends(get_session)):
- image = session.get(NanobotImage, tag)
- if not image:
- raise HTTPException(status_code=404, detail="Image not found")
-
- # 检查是否有机器人正在使用此镜像
- bots_using = session.exec(select(BotInstance).where(BotInstance.image_tag == tag)).all()
- if bots_using:
- raise HTTPException(status_code=400, detail=f"Cannot delete image: {len(bots_using)} bots are using it.")
-
- session.delete(image)
- session.commit()
- _invalidate_images_cache()
- return {"status": "deleted"}
+ return delete_registered_image(session, tag=tag)
@router.get("/api/docker-images")
def list_docker_images(repository: str = "nanobot-base"):
- rows = docker_manager.list_images_by_repo(repository)
- return rows
+ return list_docker_images_by_repository(repository)
@router.post("/api/images/register")
def register_image(payload: dict, session: Session = Depends(get_session)):
- tag = (payload.get("tag") or "").strip()
- source_dir = (payload.get("source_dir") or "manual").strip() or "manual"
- if not tag:
- raise HTTPException(status_code=400, detail="tag is required")
-
- if not docker_manager.has_image(tag):
- raise HTTPException(status_code=404, detail=f"Docker image not found: {tag}")
-
- version = tag.split(":")[-1].removeprefix("v") if ":" in tag else tag
- try:
- docker_img = docker_manager.client.images.get(tag) if docker_manager.client else None
- image_id = docker_img.id if docker_img else None
- except Exception:
- image_id = None
-
- row = session.get(NanobotImage, tag)
- if not row:
- row = NanobotImage(
- tag=tag,
- version=version,
- status="READY",
- source_dir=source_dir,
- image_id=image_id,
- )
- else:
- row.version = version
- row.status = "READY"
- row.source_dir = source_dir
- row.image_id = image_id
- session.add(row)
- session.commit()
- session.refresh(row)
- _invalidate_images_cache()
- return _serialize_image(row)
+ return register_image_record(session, payload)
diff --git a/backend/api/panel_auth_router.py b/backend/api/panel_auth_router.py
new file mode 100644
index 0000000..b25d2d9
--- /dev/null
+++ b/backend/api/panel_auth_router.py
@@ -0,0 +1,55 @@
+from fastapi import APIRouter, Depends, HTTPException, Request, Response
+from sqlmodel import Session
+
+from core.database import get_session
+from core.settings import PANEL_ACCESS_PASSWORD
+from schemas.system import PanelLoginRequest
+from services.platform_auth_service import (
+ clear_panel_token_cookie,
+ create_panel_token,
+ resolve_panel_request_auth,
+ revoke_panel_token,
+ set_panel_token_cookie,
+)
+
+router = APIRouter()
+
+
+@router.get("/api/panel/auth/status")
+def get_panel_auth_status(request: Request, session: Session = Depends(get_session)):
+ configured = str(PANEL_ACCESS_PASSWORD or "").strip()
+ principal = resolve_panel_request_auth(session, request)
+ return {
+ "enabled": bool(configured),
+ "authenticated": bool(principal.authenticated),
+ "auth_source": principal.auth_source if principal.authenticated else None,
+ }
+
+
+@router.post("/api/panel/auth/login")
+def panel_login(
+ payload: PanelLoginRequest,
+ request: Request,
+ response: Response,
+ session: Session = Depends(get_session),
+):
+ configured = str(PANEL_ACCESS_PASSWORD or "").strip()
+ if not configured:
+ clear_panel_token_cookie(response)
+ return {"success": True, "enabled": False}
+ supplied = str(payload.password or "").strip()
+ if supplied != configured:
+ raise HTTPException(status_code=401, detail="Invalid panel access password")
+ try:
+ raw_token = create_panel_token(session, request)
+ except RuntimeError as exc:
+ raise HTTPException(status_code=503, detail=str(exc)) from exc
+ set_panel_token_cookie(response, request, raw_token, session)
+ return {"success": True, "enabled": True, "authenticated": True}
+
+
+@router.post("/api/panel/auth/logout")
+def panel_logout(request: Request, response: Response, session: Session = Depends(get_session)):
+ revoke_panel_token(session, request)
+ clear_panel_token_cookie(response)
+ return {"success": True}
diff --git a/backend/api/platform_router.py b/backend/api/platform_router.py
index 587b3d6..f306045 100644
--- a/backend/api/platform_router.py
+++ b/backend/api/platform_router.py
@@ -3,6 +3,7 @@ from typing import Optional
from fastapi import APIRouter, Depends, HTTPException, Request
from sqlmodel import Session
+from bootstrap.app_runtime import reload_platform_runtime
from core.cache import cache
from core.database import get_session
from schemas.platform import PlatformSettingsPayload, SystemSettingPayload
@@ -22,13 +23,6 @@ from services.platform_service import (
router = APIRouter()
-def _apply_platform_runtime_changes(request: Request) -> None:
- cache.delete_prefix("")
- speech_service = getattr(request.app.state, "speech_service", None)
- if speech_service is not None and hasattr(speech_service, "reset_runtime"):
- speech_service.reset_runtime()
-
-
@router.get("/api/platform/overview")
def get_platform_overview(request: Request, session: Session = Depends(get_session)):
docker_manager = getattr(request.app.state, "docker_manager", None)
@@ -43,7 +37,7 @@ def get_platform_settings_api(session: Session = Depends(get_session)):
@router.put("/api/platform/settings")
def update_platform_settings_api(payload: PlatformSettingsPayload, request: Request, session: Session = Depends(get_session)):
result = save_platform_settings(session, payload).model_dump()
- _apply_platform_runtime_changes(request)
+ reload_platform_runtime(request.app)
return result
@@ -54,8 +48,8 @@ def clear_platform_cache():
@router.post("/api/platform/reload")
-def reload_platform_runtime(request: Request):
- _apply_platform_runtime_changes(request)
+def reload_platform_runtime_api(request: Request):
+ reload_platform_runtime(request.app)
return {"status": "reloaded"}
@@ -107,7 +101,7 @@ def get_system_settings(search: str = "", session: Session = Depends(get_session
def create_system_setting(payload: SystemSettingPayload, request: Request, session: Session = Depends(get_session)):
try:
result = create_or_update_system_setting(session, payload)
- _apply_platform_runtime_changes(request)
+ reload_platform_runtime(request.app)
return result
except ValueError as exc:
raise HTTPException(status_code=400, detail=str(exc)) from exc
@@ -117,7 +111,7 @@ def create_system_setting(payload: SystemSettingPayload, request: Request, sessi
def update_system_setting(key: str, payload: SystemSettingPayload, request: Request, session: Session = Depends(get_session)):
try:
result = create_or_update_system_setting(session, payload.model_copy(update={"key": key}))
- _apply_platform_runtime_changes(request)
+ reload_platform_runtime(request.app)
return result
except ValueError as exc:
raise HTTPException(status_code=400, detail=str(exc)) from exc
@@ -127,7 +121,7 @@ def update_system_setting(key: str, payload: SystemSettingPayload, request: Requ
def remove_system_setting(key: str, request: Request, session: Session = Depends(get_session)):
try:
delete_system_setting(session, key)
- _apply_platform_runtime_changes(request)
+ reload_platform_runtime(request.app)
except ValueError as exc:
raise HTTPException(status_code=400, detail=str(exc)) from exc
return {"status": "deleted", "key": key}
diff --git a/backend/api/system_router.py b/backend/api/system_router.py
index e4ea186..6de2288 100644
--- a/backend/api/system_router.py
+++ b/backend/api/system_router.py
@@ -1,21 +1,7 @@
-from typing import Any, Dict
+from fastapi import APIRouter, HTTPException
-from fastapi import APIRouter, Depends, HTTPException, Request, Response
-from sqlmodel import Session, select
-
-from core.database import engine, get_session
-from core.settings import DATABASE_ENGINE, PANEL_ACCESS_PASSWORD, REDIS_ENABLED, REDIS_PREFIX, REDIS_URL
from core.utils import _get_default_system_timezone
-from models.bot import BotInstance
-from schemas.system import PanelLoginRequest, SystemTemplatesUpdateRequest
-from core.cache import cache
-from services.platform_auth_service import (
- clear_panel_token_cookie,
- create_panel_token,
- resolve_panel_request_auth,
- revoke_panel_token,
- set_panel_token_cookie,
-)
+from schemas.system import SystemTemplatesUpdateRequest
from services.platform_service import get_platform_settings_snapshot, get_speech_runtime_settings
from services.template_service import (
get_agent_md_templates,
@@ -26,40 +12,6 @@ from services.template_service import (
router = APIRouter()
-
-@router.get("/api/panel/auth/status")
-def get_panel_auth_status(request: Request, session: Session = Depends(get_session)):
- configured = str(PANEL_ACCESS_PASSWORD or "").strip()
- principal = resolve_panel_request_auth(session, request)
- return {
- "enabled": bool(configured),
- "authenticated": bool(principal.authenticated),
- "auth_source": principal.auth_source if principal.authenticated else None,
- }
-
-@router.post("/api/panel/auth/login")
-def panel_login(payload: PanelLoginRequest, request: Request, response: Response, session: Session = Depends(get_session)):
- configured = str(PANEL_ACCESS_PASSWORD or "").strip()
- if not configured:
- clear_panel_token_cookie(response)
- return {"success": True, "enabled": False}
- supplied = str(payload.password or "").strip()
- if supplied != configured:
- raise HTTPException(status_code=401, detail="Invalid panel access password")
- try:
- raw_token = create_panel_token(session, request)
- except RuntimeError as exc:
- raise HTTPException(status_code=503, detail=str(exc)) from exc
- set_panel_token_cookie(response, request, raw_token, session)
- return {"success": True, "enabled": True, "authenticated": True}
-
-
-@router.post("/api/panel/auth/logout")
-def panel_logout(request: Request, response: Response, session: Session = Depends(get_session)):
- revoke_panel_token(session, request)
- clear_panel_token_cookie(response)
- return {"success": True}
-
@router.get("/api/system/defaults")
def get_system_defaults():
md_templates = get_agent_md_templates()
@@ -115,31 +67,3 @@ def update_system_templates(payload: SystemTemplatesUpdateRequest):
"agent_md_templates": get_agent_md_templates(),
"topic_presets": get_topic_presets(),
}
-
-@router.get("/api/health")
-def get_health():
- try:
- with Session(engine) as session:
- session.exec(select(BotInstance).limit(1)).first()
- return {"status": "ok", "database": DATABASE_ENGINE}
- except Exception as e:
- raise HTTPException(status_code=503, detail=f"database check failed: {e}")
-
-@router.get("/api/health/cache")
-def get_cache_health():
- redis_url = str(REDIS_URL or "").strip()
- configured = bool(REDIS_ENABLED and redis_url)
- client_enabled = bool(getattr(cache, "enabled", False))
- reachable = bool(cache.ping()) if client_enabled else False
- status = "ok"
- if configured and not reachable:
- status = "degraded"
- return {
- "status": status,
- "cache": {
- "configured": configured,
- "enabled": client_enabled,
- "reachable": reachable,
- "prefix": REDIS_PREFIX,
- },
- }
diff --git a/backend/api/topic_router.py b/backend/api/topic_router.py
index fcc7421..ae9e8d9 100644
--- a/backend/api/topic_router.py
+++ b/backend/api/topic_router.py
@@ -1,42 +1,24 @@
-import json
-from datetime import datetime
-from typing import Any, Dict, List, Optional
+from typing import Any, Dict, Optional
-from fastapi import APIRouter, Depends, HTTPException
+from fastapi import APIRouter, Depends
from pydantic import BaseModel
-from sqlalchemy import func
-from sqlmodel import Session, select
+from sqlmodel import Session
from core.database import get_session
-from models.bot import BotInstance
-from models.topic import TopicItem, TopicTopic
from services.topic_service import (
- _TOPIC_KEY_RE,
- _list_topics,
- _normalize_topic_key,
- _topic_item_to_dict,
- _topic_to_dict,
+ create_topic,
+ delete_topic,
+ delete_topic_item,
+ get_topic_item_stats,
+ list_topic_items,
+ list_topics,
+ mark_topic_item_read,
+ update_topic,
)
router = APIRouter()
-def _count_topic_items(
- session: Session,
- bot_id: str,
- topic_key: Optional[str] = None,
- unread_only: bool = False,
-) -> int:
- stmt = select(func.count()).select_from(TopicItem).where(TopicItem.bot_id == bot_id)
- normalized_topic_key = _normalize_topic_key(topic_key or "")
- if normalized_topic_key:
- stmt = stmt.where(TopicItem.topic_key == normalized_topic_key)
- if unread_only:
- stmt = stmt.where(TopicItem.is_read == False) # noqa: E712
- value = session.exec(stmt).one()
- return int(value or 0)
-
-
class TopicCreateRequest(BaseModel):
topic_key: str
name: Optional[str] = None
@@ -56,112 +38,31 @@ class TopicUpdateRequest(BaseModel):
@router.get("/api/bots/{bot_id}/topics")
def list_bot_topics(bot_id: str, session: Session = Depends(get_session)):
- bot = session.get(BotInstance, bot_id)
- if not bot:
- raise HTTPException(status_code=404, detail="Bot not found")
- return _list_topics(session, bot_id)
+ return list_topics(session, bot_id)
@router.post("/api/bots/{bot_id}/topics")
def create_bot_topic(bot_id: str, payload: TopicCreateRequest, session: Session = Depends(get_session)):
- bot = session.get(BotInstance, bot_id)
- if not bot:
- raise HTTPException(status_code=404, detail="Bot not found")
- topic_key = _normalize_topic_key(payload.topic_key)
- if not topic_key:
- raise HTTPException(status_code=400, detail="topic_key is required")
- if not _TOPIC_KEY_RE.fullmatch(topic_key):
- raise HTTPException(status_code=400, detail="invalid topic_key")
- exists = session.exec(
- select(TopicTopic)
- .where(TopicTopic.bot_id == bot_id)
- .where(TopicTopic.topic_key == topic_key)
- .limit(1)
- ).first()
- if exists:
- raise HTTPException(status_code=400, detail=f"Topic already exists: {topic_key}")
-
- now = datetime.utcnow()
- row = TopicTopic(
+ return create_topic(
+ session,
bot_id=bot_id,
- topic_key=topic_key,
- name=str(payload.name or topic_key).strip() or topic_key,
- description=str(payload.description or "").strip(),
- is_active=bool(payload.is_active),
- is_default_fallback=False,
- routing_json=json.dumps(payload.routing or {}, ensure_ascii=False),
- view_schema_json=json.dumps(payload.view_schema or {}, ensure_ascii=False),
- created_at=now,
- updated_at=now,
+ topic_key=payload.topic_key,
+ name=payload.name,
+ description=payload.description,
+ is_active=payload.is_active,
+ routing=payload.routing,
+ view_schema=payload.view_schema,
)
- session.add(row)
- session.commit()
- session.refresh(row)
- return _topic_to_dict(row)
@router.put("/api/bots/{bot_id}/topics/{topic_key}")
def update_bot_topic(bot_id: str, topic_key: str, payload: TopicUpdateRequest, session: Session = Depends(get_session)):
- bot = session.get(BotInstance, bot_id)
- if not bot:
- raise HTTPException(status_code=404, detail="Bot not found")
- normalized_key = _normalize_topic_key(topic_key)
- if not normalized_key:
- raise HTTPException(status_code=400, detail="topic_key is required")
- row = session.exec(
- select(TopicTopic)
- .where(TopicTopic.bot_id == bot_id)
- .where(TopicTopic.topic_key == normalized_key)
- .limit(1)
- ).first()
- if not row:
- raise HTTPException(status_code=404, detail="Topic not found")
-
- update_data = payload.model_dump(exclude_unset=True)
- if "name" in update_data:
- row.name = str(update_data.get("name") or "").strip() or row.topic_key
- if "description" in update_data:
- row.description = str(update_data.get("description") or "").strip()
- if "is_active" in update_data:
- row.is_active = bool(update_data.get("is_active"))
- if "routing" in update_data:
- row.routing_json = json.dumps(update_data.get("routing") or {}, ensure_ascii=False)
- if "view_schema" in update_data:
- row.view_schema_json = json.dumps(update_data.get("view_schema") or {}, ensure_ascii=False)
- row.is_default_fallback = False
- row.updated_at = datetime.utcnow()
- session.add(row)
- session.commit()
- session.refresh(row)
- return _topic_to_dict(row)
+ return update_topic(session, bot_id=bot_id, topic_key=topic_key, updates=payload.model_dump(exclude_unset=True))
@router.delete("/api/bots/{bot_id}/topics/{topic_key}")
def delete_bot_topic(bot_id: str, topic_key: str, session: Session = Depends(get_session)):
- bot = session.get(BotInstance, bot_id)
- if not bot:
- raise HTTPException(status_code=404, detail="Bot not found")
- normalized_key = _normalize_topic_key(topic_key)
- if not normalized_key:
- raise HTTPException(status_code=400, detail="topic_key is required")
- row = session.exec(
- select(TopicTopic)
- .where(TopicTopic.bot_id == bot_id)
- .where(TopicTopic.topic_key == normalized_key)
- .limit(1)
- ).first()
- if not row:
- raise HTTPException(status_code=404, detail="Topic not found")
- items = session.exec(
- select(TopicItem)
- .where(TopicItem.bot_id == bot_id)
- .where(TopicItem.topic_key == normalized_key)
- ).all()
- for item in items:
- session.delete(item)
- session.delete(row)
- session.commit()
- return {"status": "deleted", "bot_id": bot_id, "topic_key": normalized_key}
+ return delete_topic(session, bot_id=bot_id, topic_key=topic_key)
@router.get("/api/bots/{bot_id}/topic-items")
@@ -172,97 +73,19 @@ def list_bot_topic_items(
limit: int = 50,
session: Session = Depends(get_session),
):
- bot = session.get(BotInstance, bot_id)
- if not bot:
- raise HTTPException(status_code=404, detail="Bot not found")
- normalized_limit = max(1, min(int(limit or 50), 100))
- stmt = select(TopicItem).where(TopicItem.bot_id == bot_id)
- normalized_topic_key = _normalize_topic_key(topic_key or "")
- if normalized_topic_key:
- stmt = stmt.where(TopicItem.topic_key == normalized_topic_key)
- if cursor is not None:
- normalized_cursor = int(cursor)
- if normalized_cursor > 0:
- stmt = stmt.where(TopicItem.id < normalized_cursor)
- rows = session.exec(
- stmt.order_by(TopicItem.id.desc()).limit(normalized_limit + 1)
- ).all()
- next_cursor: Optional[int] = None
- if len(rows) > normalized_limit:
- next_cursor = rows[-1].id
- rows = rows[:normalized_limit]
- return {
- "bot_id": bot_id,
- "topic_key": normalized_topic_key or None,
- "items": [_topic_item_to_dict(row) for row in rows],
- "next_cursor": next_cursor,
- "unread_count": _count_topic_items(session, bot_id, normalized_topic_key, unread_only=True),
- "total_unread_count": _count_topic_items(session, bot_id, unread_only=True),
- }
+ return list_topic_items(session, bot_id=bot_id, topic_key=topic_key, cursor=cursor, limit=limit)
@router.get("/api/bots/{bot_id}/topic-items/stats")
def get_bot_topic_item_stats(bot_id: str, session: Session = Depends(get_session)):
- bot = session.get(BotInstance, bot_id)
- if not bot:
- raise HTTPException(status_code=404, detail="Bot not found")
- latest_item = session.exec(
- select(TopicItem)
- .where(TopicItem.bot_id == bot_id)
- .order_by(TopicItem.id.desc())
- .limit(1)
- ).first()
- return {
- "bot_id": bot_id,
- "total_count": _count_topic_items(session, bot_id),
- "unread_count": _count_topic_items(session, bot_id, unread_only=True),
- "latest_item_id": int(latest_item.id or 0) if latest_item and latest_item.id else None,
- }
+ return get_topic_item_stats(session, bot_id=bot_id)
@router.post("/api/bots/{bot_id}/topic-items/{item_id}/read")
def mark_bot_topic_item_read(bot_id: str, item_id: int, session: Session = Depends(get_session)):
- bot = session.get(BotInstance, bot_id)
- if not bot:
- raise HTTPException(status_code=404, detail="Bot not found")
- row = session.exec(
- select(TopicItem)
- .where(TopicItem.bot_id == bot_id)
- .where(TopicItem.id == item_id)
- .limit(1)
- ).first()
- if not row:
- raise HTTPException(status_code=404, detail="Topic item not found")
- if not bool(row.is_read):
- row.is_read = True
- session.add(row)
- session.commit()
- session.refresh(row)
- return {
- "status": "updated",
- "bot_id": bot_id,
- "item": _topic_item_to_dict(row),
- }
+ return mark_topic_item_read(session, bot_id=bot_id, item_id=item_id)
@router.delete("/api/bots/{bot_id}/topic-items/{item_id}")
def delete_bot_topic_item(bot_id: str, item_id: int, session: Session = Depends(get_session)):
- bot = session.get(BotInstance, bot_id)
- if not bot:
- raise HTTPException(status_code=404, detail="Bot not found")
- row = session.exec(
- select(TopicItem)
- .where(TopicItem.bot_id == bot_id)
- .where(TopicItem.id == item_id)
- .limit(1)
- ).first()
- if not row:
- raise HTTPException(status_code=404, detail="Topic item not found")
- payload = _topic_item_to_dict(row)
- session.delete(row)
- session.commit()
- return {
- "status": "deleted",
- "bot_id": bot_id,
- "item": payload,
- }
+ return delete_topic_item(session, bot_id=bot_id, item_id=item_id)
diff --git a/backend/app_factory.py b/backend/app_factory.py
index 0cda5a8..c27a682 100644
--- a/backend/app_factory.py
+++ b/backend/app_factory.py
@@ -10,14 +10,16 @@ from api.bot_runtime_router import router as bot_runtime_router
from api.bot_speech_router import router as bot_speech_router
from api.chat_history_router import router as chat_history_router
from api.chat_router import router as chat_router
+from api.health_router import router as health_router
from api.image_router import router as image_router
+from api.panel_auth_router import router as panel_auth_router
from api.platform_router import router as platform_router
from api.skill_router import router as skill_router
from api.system_router import router as system_router
from api.topic_router import router as topic_router
from api.workspace_router import router as workspace_router
from bootstrap.app_runtime import register_app_runtime
-from core.auth_middleware import PasswordProtectionMiddleware
+from core.auth_middleware import AuthAccessMiddleware
from core.docker_instance import docker_manager
from core.settings import BOTS_WORKSPACE_ROOT, CORS_ALLOWED_ORIGINS, DATA_ROOT
from core.speech_service import WhisperSpeechService
@@ -30,7 +32,7 @@ def create_app() -> FastAPI:
app.state.docker_manager = docker_manager
app.state.speech_service = speech_service
- app.add_middleware(PasswordProtectionMiddleware)
+ app.add_middleware(AuthAccessMiddleware)
app.add_middleware(
CORSMiddleware,
allow_origins=list(CORS_ALLOWED_ORIGINS),
@@ -39,6 +41,8 @@ def create_app() -> FastAPI:
allow_credentials=True,
)
+ app.include_router(panel_auth_router)
+ app.include_router(health_router)
app.include_router(platform_router)
app.include_router(topic_router)
app.include_router(system_router)
diff --git a/backend/bootstrap/app_runtime.py b/backend/bootstrap/app_runtime.py
index 971711f..6922840 100644
--- a/backend/bootstrap/app_runtime.py
+++ b/backend/bootstrap/app_runtime.py
@@ -3,6 +3,7 @@ import asyncio
from fastapi import FastAPI
from sqlmodel import Session, select
+from core.cache import cache
from core.database import engine, init_database
from core.docker_instance import docker_manager
from core.settings import DATABASE_URL_DISPLAY, REDIS_ENABLED
@@ -12,6 +13,13 @@ from services.platform_service import prune_expired_activity_events
from services.runtime_service import docker_callback, set_main_loop
+def reload_platform_runtime(app: FastAPI) -> None:
+ cache.delete_prefix("")
+ speech_service = getattr(app.state, "speech_service", None)
+ if speech_service is not None and hasattr(speech_service, "reset_runtime"):
+ speech_service.reset_runtime()
+
+
def register_app_runtime(app: FastAPI) -> None:
@app.on_event("startup")
async def _on_startup() -> None:
diff --git a/backend/bootstrap/auth_access.py b/backend/bootstrap/auth_access.py
index 54d28ab..4d21c7d 100644
--- a/backend/bootstrap/auth_access.py
+++ b/backend/bootstrap/auth_access.py
@@ -1,6 +1,5 @@
from __future__ import annotations
-import re
from enum import Enum
from typing import Optional
@@ -12,46 +11,84 @@ class RouteAccessMode(str, Enum):
PUBLIC_BOT_OR_PANEL = "public_bot_or_panel"
-_BOT_ID_API_RE = re.compile(r"^/api/bots/([^/]+)(?:/.*)?$")
-_BOT_ID_PUBLIC_RE = re.compile(r"^/public/bots/([^/]+)(?:/.*)?$")
-_BOT_PANEL_ONLY_ROUTE_METHODS = [
- (re.compile(r"^/api/bots/[^/]+$"), {"DELETE"}),
- (re.compile(r"^/api/bots/[^/]+/(?:enable|disable|deactivate)$"), {"POST"}),
-]
-
-_PUBLIC_PATHS = {
- "/api/panel/auth/status",
- "/api/panel/auth/login",
- "/api/panel/auth/logout",
+_PUBLIC_EXACT_PATHS = {
"/api/health",
"/api/health/cache",
"/api/system/defaults",
}
-_BOT_PUBLIC_AUTH_RE = re.compile(r"^/api/bots/[^/]+/auth/(?:login|logout|status)$")
+_PANEL_AUTH_SEGMENTS = ("api", "panel", "auth")
+_BOT_PUBLIC_SEGMENTS = ("public", "bots")
+_BOT_API_SEGMENTS = ("api", "bots")
+_BOT_AUTH_SEGMENT = "auth"
+_BOT_PANEL_ONLY_ACTIONS = {"enable", "disable", "deactivate"}
+_BOT_PUBLIC_AUTH_ACTIONS = {"login", "logout", "status"}
+
+
+def _path_segments(path: str) -> list[str]:
+ raw = str(path or "").strip().strip("/")
+ if not raw:
+ return []
+ return [segment for segment in raw.split("/") if segment]
def extract_bot_id(path: str) -> Optional[str]:
- raw = str(path or "").strip()
- match = _BOT_ID_API_RE.match(raw) or _BOT_ID_PUBLIC_RE.match(raw)
- if not match or not match.group(1):
+ segments = _path_segments(path)
+ if len(segments) < 3:
return None
- return match.group(1).strip() or None
+ if tuple(segments[:2]) not in {_BOT_API_SEGMENTS, _BOT_PUBLIC_SEGMENTS}:
+ return None
+ bot_id = str(segments[2] or "").strip()
+ return bot_id or None
+
+
+def _is_panel_auth_route(segments: list[str]) -> bool:
+ return tuple(segments[:3]) == _PANEL_AUTH_SEGMENTS
+
+
+def _is_public_bot_route(segments: list[str]) -> bool:
+ return tuple(segments[:2]) == _BOT_PUBLIC_SEGMENTS and len(segments) >= 3
+
+
+def _is_bot_auth_route(segments: list[str]) -> bool:
+ return (
+ tuple(segments[:2]) == _BOT_API_SEGMENTS
+ and len(segments) >= 5
+ and segments[3] == _BOT_AUTH_SEGMENT
+ and segments[4] in _BOT_PUBLIC_AUTH_ACTIONS
+ )
+
+
+def _is_panel_only_bot_action(segments: list[str], method: str) -> bool:
+ if tuple(segments[:2]) != _BOT_API_SEGMENTS or len(segments) < 3:
+ return False
+ if len(segments) == 3 and method == "DELETE":
+ return True
+ return len(segments) >= 4 and method == "POST" and segments[3] in _BOT_PANEL_ONLY_ACTIONS
+
+
+def _is_bot_scoped_api_route(segments: list[str]) -> bool:
+ return tuple(segments[:2]) == _BOT_API_SEGMENTS and len(segments) >= 3
def resolve_route_access_mode(path: str, method: str) -> RouteAccessMode:
raw_path = str(path or "").strip()
verb = str(method or "GET").strip().upper()
+ segments = _path_segments(raw_path)
- if raw_path in _PUBLIC_PATHS or _BOT_PUBLIC_AUTH_RE.fullmatch(raw_path):
+ if raw_path in _PUBLIC_EXACT_PATHS:
return RouteAccessMode.PUBLIC
- if raw_path.startswith("/public/bots/"):
+ if _is_panel_auth_route(segments) or _is_bot_auth_route(segments):
+ return RouteAccessMode.PUBLIC
+
+ if _is_public_bot_route(segments):
return RouteAccessMode.PUBLIC_BOT_OR_PANEL
- if _BOT_ID_API_RE.fullmatch(raw_path):
- if any(pattern.fullmatch(raw_path) and verb in methods for pattern, methods in _BOT_PANEL_ONLY_ROUTE_METHODS):
- return RouteAccessMode.PANEL_ONLY
+ if _is_panel_only_bot_action(segments, verb):
+ return RouteAccessMode.PANEL_ONLY
+
+ if _is_bot_scoped_api_route(segments):
return RouteAccessMode.BOT_OR_PANEL
if raw_path.startswith("/api/"):
diff --git a/backend/core/auth_middleware.py b/backend/core/auth_middleware.py
index 72c6d55..f1c7701 100644
--- a/backend/core/auth_middleware.py
+++ b/backend/core/auth_middleware.py
@@ -17,7 +17,7 @@ def _unauthorized(detail: str) -> JSONResponse:
return JSONResponse(status_code=401, content={"detail": detail})
-class PasswordProtectionMiddleware(BaseHTTPMiddleware):
+class AuthAccessMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request: Request, call_next):
if request.method.upper() == "OPTIONS":
return await call_next(request)
diff --git a/backend/services/bot_channel_service.py b/backend/services/bot_channel_service.py
deleted file mode 100644
index 6423ab8..0000000
--- a/backend/services/bot_channel_service.py
+++ /dev/null
@@ -1,366 +0,0 @@
-from pathlib import Path
-from typing import Any, Dict, List, Optional
-
-from sqlmodel import Session
-
-from core.config_manager import BotConfigManager
-from core.settings import BOTS_WORKSPACE_ROOT
-from models.bot import BotInstance
-from schemas.bot import ChannelConfigRequest
-from services.bot_storage_service import (
- _normalize_resource_limits,
- _read_bot_config,
- _write_bot_resources,
-)
-from services.template_service import get_agent_md_templates
-
-config_manager = BotConfigManager(host_data_root=BOTS_WORKSPACE_ROOT)
-
-
-def _normalize_channel_extra(raw: Any) -> Dict[str, Any]:
- if not isinstance(raw, dict):
- return {}
- return raw
-
-
-def _normalize_allow_from(raw: Any) -> List[str]:
- rows: List[str] = []
- if isinstance(raw, list):
- for item in raw:
- text = str(item or "").strip()
- if text and text not in rows:
- rows.append(text)
- return rows or ["*"]
-
-
-def _read_global_delivery_flags(channels_cfg: Any) -> tuple[bool, bool]:
- if not isinstance(channels_cfg, dict):
- return False, False
- send_progress = channels_cfg.get("sendProgress")
- send_tool_hints = channels_cfg.get("sendToolHints")
- dashboard_cfg = channels_cfg.get("dashboard")
- if isinstance(dashboard_cfg, dict):
- if send_progress is None and "sendProgress" in dashboard_cfg:
- send_progress = dashboard_cfg.get("sendProgress")
- if send_tool_hints is None and "sendToolHints" in dashboard_cfg:
- send_tool_hints = dashboard_cfg.get("sendToolHints")
- return bool(send_progress), bool(send_tool_hints)
-
-
-def _channel_cfg_to_api_dict(bot_id: str, ctype: str, cfg: Dict[str, Any]) -> Dict[str, Any]:
- ctype = str(ctype or "").strip().lower()
- enabled = bool(cfg.get("enabled", True))
- port = max(1, min(int(cfg.get("port", 8080) or 8080), 65535))
- extra: Dict[str, Any] = {}
- external_app_id = ""
- app_secret = ""
-
- if ctype == "feishu":
- external_app_id = str(cfg.get("appId") or "")
- app_secret = str(cfg.get("appSecret") or "")
- extra = {
- "encryptKey": cfg.get("encryptKey", ""),
- "verificationToken": cfg.get("verificationToken", ""),
- "allowFrom": _normalize_allow_from(cfg.get("allowFrom", [])),
- }
- elif ctype == "dingtalk":
- external_app_id = str(cfg.get("clientId") or "")
- app_secret = str(cfg.get("clientSecret") or "")
- extra = {"allowFrom": _normalize_allow_from(cfg.get("allowFrom", []))}
- elif ctype == "telegram":
- app_secret = str(cfg.get("token") or "")
- extra = {
- "proxy": cfg.get("proxy", ""),
- "replyToMessage": bool(cfg.get("replyToMessage", False)),
- "allowFrom": _normalize_allow_from(cfg.get("allowFrom", [])),
- }
- elif ctype == "slack":
- external_app_id = str(cfg.get("botToken") or "")
- app_secret = str(cfg.get("appToken") or "")
- extra = {
- "mode": cfg.get("mode", "socket"),
- "replyInThread": bool(cfg.get("replyInThread", True)),
- "groupPolicy": cfg.get("groupPolicy", "mention"),
- "groupAllowFrom": cfg.get("groupAllowFrom", []),
- "reactEmoji": cfg.get("reactEmoji", "eyes"),
- }
- elif ctype == "qq":
- external_app_id = str(cfg.get("appId") or "")
- app_secret = str(cfg.get("secret") or "")
- extra = {"allowFrom": _normalize_allow_from(cfg.get("allowFrom", []))}
- elif ctype == "weixin":
- app_secret = ""
- extra = {
- "hasSavedState": (Path(BOTS_WORKSPACE_ROOT) / bot_id / ".nanobot" / "weixin" / "account.json").is_file(),
- }
- elif ctype == "email":
- extra = {
- "consentGranted": bool(cfg.get("consentGranted", False)),
- "imapHost": str(cfg.get("imapHost") or ""),
- "imapPort": int(cfg.get("imapPort") or 993),
- "imapUsername": str(cfg.get("imapUsername") or ""),
- "imapPassword": str(cfg.get("imapPassword") or ""),
- "imapMailbox": str(cfg.get("imapMailbox") or "INBOX"),
- "imapUseSsl": bool(cfg.get("imapUseSsl", True)),
- "smtpHost": str(cfg.get("smtpHost") or ""),
- "smtpPort": int(cfg.get("smtpPort") or 587),
- "smtpUsername": str(cfg.get("smtpUsername") or ""),
- "smtpPassword": str(cfg.get("smtpPassword") or ""),
- "smtpUseTls": bool(cfg.get("smtpUseTls", True)),
- "smtpUseSsl": bool(cfg.get("smtpUseSsl", False)),
- "fromAddress": str(cfg.get("fromAddress") or ""),
- "autoReplyEnabled": bool(cfg.get("autoReplyEnabled", True)),
- "pollIntervalSeconds": int(cfg.get("pollIntervalSeconds") or 30),
- "markSeen": bool(cfg.get("markSeen", True)),
- "maxBodyChars": int(cfg.get("maxBodyChars") or 12000),
- "subjectPrefix": str(cfg.get("subjectPrefix") or "Re: "),
- "allowFrom": _normalize_allow_from(cfg.get("allowFrom", [])),
- }
- else:
- external_app_id = str(cfg.get("appId") or cfg.get("clientId") or cfg.get("botToken") or cfg.get("externalAppId") or "")
- app_secret = str(cfg.get("appSecret") or cfg.get("clientSecret") or cfg.get("secret") or cfg.get("token") or cfg.get("appToken") or "")
- extra = {
- key: value
- for key, value in cfg.items()
- if key not in {"enabled", "port", "appId", "clientId", "botToken", "externalAppId", "appSecret", "clientSecret", "secret", "token", "appToken"}
- }
-
- return {
- "id": ctype,
- "bot_id": bot_id,
- "channel_type": ctype,
- "external_app_id": external_app_id,
- "app_secret": app_secret,
- "internal_port": port,
- "is_active": enabled,
- "extra_config": extra,
- "locked": ctype == "dashboard",
- }
-
-
-def _channel_api_to_cfg(row: Dict[str, Any]) -> Dict[str, Any]:
- ctype = str(row.get("channel_type") or "").strip().lower()
- enabled = bool(row.get("is_active", True))
- extra = _normalize_channel_extra(row.get("extra_config"))
- external_app_id = str(row.get("external_app_id") or "")
- app_secret = str(row.get("app_secret") or "")
- port = max(1, min(int(row.get("internal_port") or 8080), 65535))
-
- if ctype == "feishu":
- return {
- "enabled": enabled,
- "appId": external_app_id,
- "appSecret": app_secret,
- "encryptKey": extra.get("encryptKey", ""),
- "verificationToken": extra.get("verificationToken", ""),
- "allowFrom": _normalize_allow_from(extra.get("allowFrom", [])),
- }
- if ctype == "dingtalk":
- return {
- "enabled": enabled,
- "clientId": external_app_id,
- "clientSecret": app_secret,
- "allowFrom": _normalize_allow_from(extra.get("allowFrom", [])),
- }
- if ctype == "telegram":
- return {
- "enabled": enabled,
- "token": app_secret,
- "proxy": extra.get("proxy", ""),
- "replyToMessage": bool(extra.get("replyToMessage", False)),
- "allowFrom": _normalize_allow_from(extra.get("allowFrom", [])),
- }
- if ctype == "slack":
- return {
- "enabled": enabled,
- "mode": extra.get("mode", "socket"),
- "botToken": external_app_id,
- "appToken": app_secret,
- "replyInThread": bool(extra.get("replyInThread", True)),
- "groupPolicy": extra.get("groupPolicy", "mention"),
- "groupAllowFrom": extra.get("groupAllowFrom", []),
- "reactEmoji": extra.get("reactEmoji", "eyes"),
- }
- if ctype == "qq":
- return {
- "enabled": enabled,
- "appId": external_app_id,
- "secret": app_secret,
- "allowFrom": _normalize_allow_from(extra.get("allowFrom", [])),
- }
- if ctype == "weixin":
- return {
- "enabled": enabled,
- "token": app_secret,
- }
- if ctype == "email":
- return {
- "enabled": enabled,
- "consentGranted": bool(extra.get("consentGranted", False)),
- "imapHost": str(extra.get("imapHost") or ""),
- "imapPort": max(1, min(int(extra.get("imapPort") or 993), 65535)),
- "imapUsername": str(extra.get("imapUsername") or ""),
- "imapPassword": str(extra.get("imapPassword") or ""),
- "imapMailbox": str(extra.get("imapMailbox") or "INBOX"),
- "imapUseSsl": bool(extra.get("imapUseSsl", True)),
- "smtpHost": str(extra.get("smtpHost") or ""),
- "smtpPort": max(1, min(int(extra.get("smtpPort") or 587), 65535)),
- "smtpUsername": str(extra.get("smtpUsername") or ""),
- "smtpPassword": str(extra.get("smtpPassword") or ""),
- "smtpUseTls": bool(extra.get("smtpUseTls", True)),
- "smtpUseSsl": bool(extra.get("smtpUseSsl", False)),
- "fromAddress": str(extra.get("fromAddress") or ""),
- "autoReplyEnabled": bool(extra.get("autoReplyEnabled", True)),
- "pollIntervalSeconds": max(5, int(extra.get("pollIntervalSeconds") or 30)),
- "markSeen": bool(extra.get("markSeen", True)),
- "maxBodyChars": max(1, int(extra.get("maxBodyChars") or 12000)),
- "subjectPrefix": str(extra.get("subjectPrefix") or "Re: "),
- "allowFrom": _normalize_allow_from(extra.get("allowFrom", [])),
- }
- merged = dict(extra)
- merged.update(
- {
- "enabled": enabled,
- "appId": external_app_id,
- "appSecret": app_secret,
- "port": port,
- }
- )
- return merged
-
-
-def _get_bot_channels_from_config(bot: BotInstance) -> List[Dict[str, Any]]:
- config_data = _read_bot_config(bot.id)
- channels_cfg = config_data.get("channels")
- if not isinstance(channels_cfg, dict):
- channels_cfg = {}
- send_progress, send_tool_hints = _read_global_delivery_flags(channels_cfg)
- rows: List[Dict[str, Any]] = [
- {
- "id": "dashboard",
- "bot_id": bot.id,
- "channel_type": "dashboard",
- "external_app_id": f"dashboard-{bot.id}",
- "app_secret": "",
- "internal_port": 9000,
- "is_active": True,
- "extra_config": {
- "sendProgress": send_progress,
- "sendToolHints": send_tool_hints,
- },
- "locked": True,
- }
- ]
- for ctype, cfg in channels_cfg.items():
- if ctype in {"sendProgress", "sendToolHints", "dashboard"} or not isinstance(cfg, dict):
- continue
- rows.append(_channel_cfg_to_api_dict(bot.id, ctype, cfg))
- return rows
-
-
-def _normalize_initial_channels(bot_id: str, channels: Optional[List[ChannelConfigRequest]]) -> List[Dict[str, Any]]:
- rows: List[Dict[str, Any]] = []
- seen_types: set[str] = set()
- for channel in channels or []:
- ctype = (channel.channel_type or "").strip().lower()
- if not ctype or ctype == "dashboard" or ctype in seen_types:
- continue
- seen_types.add(ctype)
- rows.append(
- {
- "id": ctype,
- "bot_id": bot_id,
- "channel_type": ctype,
- "external_app_id": (channel.external_app_id or "").strip() or f"{ctype}-{bot_id}",
- "app_secret": (channel.app_secret or "").strip(),
- "internal_port": max(1, min(int(channel.internal_port or 8080), 65535)),
- "is_active": bool(channel.is_active),
- "extra_config": _normalize_channel_extra(channel.extra_config),
- "locked": False,
- }
- )
- return rows
-
-
-def _sync_workspace_channels_impl(
- session: Session,
- bot_id: str,
- snapshot: Dict[str, Any],
- *,
- channels_override: Optional[List[Dict[str, Any]]] = None,
- global_delivery_override: Optional[Dict[str, Any]] = None,
- runtime_overrides: Optional[Dict[str, Any]] = None,
-) -> None:
- bot = session.get(BotInstance, bot_id)
- if not bot:
- return
- template_defaults = get_agent_md_templates()
- bot_data: Dict[str, Any] = {
- "name": bot.name,
- "system_prompt": snapshot.get("system_prompt") or template_defaults.get("soul_md", ""),
- "soul_md": snapshot.get("soul_md") or template_defaults.get("soul_md", ""),
- "agents_md": snapshot.get("agents_md") or template_defaults.get("agents_md", ""),
- "user_md": snapshot.get("user_md") or template_defaults.get("user_md", ""),
- "tools_md": snapshot.get("tools_md") or template_defaults.get("tools_md", ""),
- "identity_md": snapshot.get("identity_md") or template_defaults.get("identity_md", ""),
- "llm_provider": snapshot.get("llm_provider") or "",
- "llm_model": snapshot.get("llm_model") or "",
- "api_key": snapshot.get("api_key") or "",
- "api_base": snapshot.get("api_base") or "",
- "temperature": snapshot.get("temperature"),
- "top_p": snapshot.get("top_p"),
- "max_tokens": snapshot.get("max_tokens"),
- "cpu_cores": snapshot.get("cpu_cores"),
- "memory_mb": snapshot.get("memory_mb"),
- "storage_gb": snapshot.get("storage_gb"),
- "send_progress": bool(snapshot.get("send_progress")),
- "send_tool_hints": bool(snapshot.get("send_tool_hints")),
- }
- if isinstance(runtime_overrides, dict):
- for key, value in runtime_overrides.items():
- if key in {"api_key", "llm_provider", "llm_model"}:
- text = str(value or "").strip()
- if not text:
- continue
- bot_data[key] = text
- continue
- if key == "api_base":
- bot_data[key] = str(value or "").strip()
- continue
- bot_data[key] = value
-
- resources = _normalize_resource_limits(
- bot_data.get("cpu_cores"),
- bot_data.get("memory_mb"),
- bot_data.get("storage_gb"),
- )
- bot_data.update(resources)
- send_progress = bool(bot_data.get("send_progress", False))
- send_tool_hints = bool(bot_data.get("send_tool_hints", False))
- if isinstance(global_delivery_override, dict):
- if "sendProgress" in global_delivery_override:
- send_progress = bool(global_delivery_override.get("sendProgress"))
- if "sendToolHints" in global_delivery_override:
- send_tool_hints = bool(global_delivery_override.get("sendToolHints"))
-
- channels_data = channels_override if channels_override is not None else _get_bot_channels_from_config(bot)
- bot_data["send_progress"] = send_progress
- bot_data["send_tool_hints"] = send_tool_hints
- normalized_channels: List[Dict[str, Any]] = []
- for row in channels_data:
- ctype = str(row.get("channel_type") or "").strip().lower()
- if not ctype or ctype == "dashboard":
- continue
- normalized_channels.append(
- {
- "channel_type": ctype,
- "external_app_id": str(row.get("external_app_id") or ""),
- "app_secret": str(row.get("app_secret") or ""),
- "internal_port": max(1, min(int(row.get("internal_port") or 8080), 65535)),
- "is_active": bool(row.get("is_active", True)),
- "extra_config": _normalize_channel_extra(row.get("extra_config")),
- }
- )
-
- config_manager.update_workspace(bot_id=bot_id, bot_data=bot_data, channels=normalized_channels)
- _write_bot_resources(bot_id, bot_data.get("cpu_cores"), bot_data.get("memory_mb"), bot_data.get("storage_gb"))
diff --git a/backend/services/bot_config_service.py b/backend/services/bot_config_service.py
index de6b3b7..e08aafe 100644
--- a/backend/services/bot_config_service.py
+++ b/backend/services/bot_config_service.py
@@ -1,11 +1,12 @@
+import os
from datetime import datetime
-from typing import Any, Dict
+from typing import Any, Dict, Optional
from fastapi import HTTPException
from sqlmodel import Session
from core.docker_instance import docker_manager
-from core.utils import _calc_dir_size_bytes
+from core.settings import BOTS_WORKSPACE_ROOT
from models.bot import BotInstance
from schemas.bot import (
BotEnvParamsUpdateRequest,
@@ -13,28 +14,30 @@ from schemas.bot import (
ChannelConfigRequest,
ChannelConfigUpdateRequest,
)
-from services.bot_channel_service import (
- _channel_api_to_cfg,
- _get_bot_channels_from_config,
- _normalize_channel_extra,
- _read_global_delivery_flags,
+from services.bot_service import (
+ channel_api_to_config,
+ list_bot_channels_from_config,
+ normalize_channel_extra,
+ read_global_delivery_flags,
+ sync_bot_workspace_channels,
)
-from services.bot_service import _sync_workspace_channels
from services.bot_mcp_service import (
_merge_mcp_servers_preserving_extras,
_normalize_mcp_servers,
)
from services.bot_storage_service import (
- _normalize_env_params,
- _read_bot_config,
- _read_bot_resources,
- _read_env_store,
- _workspace_root,
- _write_bot_config,
- _write_env_store,
+ get_bot_resource_limits,
+ get_bot_workspace_snapshot,
+ normalize_bot_env_params,
+ read_bot_config_data,
+ read_bot_env_params,
+ write_bot_config_data,
+ write_bot_env_params,
)
from services.cache_service import _invalidate_bot_detail_cache
+MANAGED_WORKSPACE_FILENAMES = ("AGENTS.md", "SOUL.md", "USER.md", "TOOLS.md", "IDENTITY.md")
+
def _get_bot_or_404(session: Session, bot_id: str) -> BotInstance:
bot = session.get(BotInstance, bot_id)
@@ -43,14 +46,103 @@ def _get_bot_or_404(session: Session, bot_id: str) -> BotInstance:
return bot
+def _read_bot_config_object(bot_id: str) -> Dict[str, Any]:
+ config_data = read_bot_config_data(bot_id)
+ return config_data if isinstance(config_data, dict) else {}
+
+
+def _read_bot_tools_cfg(bot_id: str) -> tuple[Dict[str, Any], Dict[str, Any]]:
+ config_data = _read_bot_config_object(bot_id)
+ tools_cfg = config_data.get("tools")
+ if not isinstance(tools_cfg, dict):
+ tools_cfg = {}
+ config_data["tools"] = tools_cfg
+ return config_data, tools_cfg
+
+
+def _read_bot_channels_cfg(bot_id: str) -> tuple[Dict[str, Any], Dict[str, Any]]:
+ config_data = _read_bot_config_object(bot_id)
+ channels_cfg = config_data.get("channels")
+ if not isinstance(channels_cfg, dict):
+ channels_cfg = {}
+ config_data["channels"] = channels_cfg
+ return config_data, channels_cfg
+
+
+def _managed_bot_file_paths(bot_id: str) -> Dict[str, str]:
+ bot_root = os.path.join(BOTS_WORKSPACE_ROOT, bot_id, ".nanobot")
+ workspace_root = os.path.join(bot_root, "workspace")
+ paths = {
+ "config": os.path.join(bot_root, "config.json"),
+ "resources": os.path.join(bot_root, "resources.json"),
+ }
+ for filename in MANAGED_WORKSPACE_FILENAMES:
+ paths[f"workspace:{filename}"] = os.path.join(workspace_root, filename)
+ return paths
+
+
+def _snapshot_managed_bot_files(bot_id: str) -> Dict[str, Optional[bytes]]:
+ snapshot: Dict[str, Optional[bytes]] = {}
+ for key, path in _managed_bot_file_paths(bot_id).items():
+ if os.path.isfile(path):
+ with open(path, "rb") as file:
+ snapshot[key] = file.read()
+ else:
+ snapshot[key] = None
+ return snapshot
+
+
+def _restore_managed_bot_files(bot_id: str, snapshot: Dict[str, Optional[bytes]]) -> None:
+ for key, path in _managed_bot_file_paths(bot_id).items():
+ payload = snapshot.get(key)
+ if payload is None:
+ if os.path.exists(path):
+ os.remove(path)
+ continue
+ os.makedirs(os.path.dirname(path), exist_ok=True)
+ tmp_path = f"{path}.tmp"
+ with open(tmp_path, "wb") as file:
+ file.write(payload)
+ os.replace(tmp_path, path)
+
+
+def _write_bot_config_state(
+ session: Session,
+ *,
+ bot_id: str,
+ config_data: Dict[str, Any],
+ sync_workspace: bool = False,
+) -> None:
+ managed_file_snapshot = _snapshot_managed_bot_files(bot_id) if sync_workspace else None
+ try:
+ write_bot_config_data(bot_id, config_data)
+ if sync_workspace:
+ sync_bot_workspace_channels(session, bot_id)
+ except Exception:
+ if managed_file_snapshot is not None:
+ _restore_managed_bot_files(bot_id, managed_file_snapshot)
+ session.rollback()
+ raise
+ _invalidate_bot_detail_cache(bot_id)
+
+
+def _find_channel_row(rows: list[Dict[str, Any]], channel_id: str) -> Dict[str, Any]:
+ channel_key = str(channel_id or "").strip().lower()
+ row = next((item for item in rows if str(item.get("id") or "").lower() == channel_key), None)
+ if not row:
+ raise HTTPException(status_code=404, detail="Channel not found")
+ return row
+
+
def get_bot_resources_snapshot(session: Session, *, bot_id: str) -> Dict[str, Any]:
bot = _get_bot_or_404(session, bot_id)
- configured = _read_bot_resources(bot_id)
+ configured = get_bot_resource_limits(bot_id)
runtime = docker_manager.get_bot_resource_snapshot(bot_id)
- workspace_root = _workspace_root(bot_id)
- workspace_bytes = _calc_dir_size_bytes(workspace_root)
- configured_storage_bytes = int(configured.get("storage_gb", 0) or 0) * 1024 * 1024 * 1024
+ workspace = get_bot_workspace_snapshot(bot_id)
+ workspace_root = str(workspace.get("path") or "")
+ workspace_bytes = int(workspace.get("usage_bytes") or 0)
+ configured_storage_bytes = int(workspace.get("configured_limit_bytes") or 0)
workspace_percent = 0.0
if configured_storage_bytes > 0:
workspace_percent = (workspace_bytes / configured_storage_bytes) * 100.0
@@ -86,7 +178,7 @@ def get_bot_resources_snapshot(session: Session, *, bot_id: str) -> Dict[str, An
def list_bot_channels_config(session: Session, *, bot_id: str):
bot = _get_bot_or_404(session, bot_id)
- return _get_bot_channels_from_config(bot)
+ return list_bot_channels_from_config(bot)
def get_bot_tools_config_state(session: Session, *, bot_id: str) -> Dict[str, Any]:
@@ -114,10 +206,7 @@ def reject_bot_tools_config_update(
def get_bot_mcp_config_state(session: Session, *, bot_id: str) -> Dict[str, Any]:
_get_bot_or_404(session, bot_id)
- config_data = _read_bot_config(bot_id)
- tools_cfg = config_data.get("tools") if isinstance(config_data, dict) else {}
- if not isinstance(tools_cfg, dict):
- tools_cfg = {}
+ _config_data, tools_cfg = _read_bot_tools_cfg(bot_id)
mcp_servers = _normalize_mcp_servers(tools_cfg.get("mcpServers"))
return {
"bot_id": bot_id,
@@ -134,20 +223,13 @@ def update_bot_mcp_config_state(
payload: BotMcpConfigUpdateRequest,
) -> Dict[str, Any]:
_get_bot_or_404(session, bot_id)
- config_data = _read_bot_config(bot_id)
- if not isinstance(config_data, dict):
- config_data = {}
- tools_cfg = config_data.get("tools")
- if not isinstance(tools_cfg, dict):
- tools_cfg = {}
+ config_data, tools_cfg = _read_bot_tools_cfg(bot_id)
normalized_mcp_servers = _normalize_mcp_servers(payload.mcp_servers or {})
current_mcp_servers = tools_cfg.get("mcpServers")
merged_mcp_servers = _merge_mcp_servers_preserving_extras(current_mcp_servers, normalized_mcp_servers)
tools_cfg["mcpServers"] = merged_mcp_servers
- config_data["tools"] = tools_cfg
sanitized_after_save = _normalize_mcp_servers(tools_cfg.get("mcpServers"))
- _write_bot_config(bot_id, config_data)
- _invalidate_bot_detail_cache(bot_id)
+ _write_bot_config_state(session, bot_id=bot_id, config_data=config_data)
return {
"status": "updated",
"bot_id": bot_id,
@@ -161,7 +243,7 @@ def get_bot_env_params_state(session: Session, *, bot_id: str) -> Dict[str, Any]
_get_bot_or_404(session, bot_id)
return {
"bot_id": bot_id,
- "env_params": _read_env_store(bot_id),
+ "env_params": read_bot_env_params(bot_id),
}
@@ -172,8 +254,8 @@ def update_bot_env_params_state(
payload: BotEnvParamsUpdateRequest,
) -> Dict[str, Any]:
_get_bot_or_404(session, bot_id)
- normalized = _normalize_env_params(payload.env_params)
- _write_env_store(bot_id, normalized)
+ normalized = normalize_bot_env_params(payload.env_params)
+ write_bot_env_params(bot_id, normalized)
_invalidate_bot_detail_cache(bot_id)
return {
"status": "updated",
@@ -196,7 +278,7 @@ def create_bot_channel_config(
raise HTTPException(status_code=400, detail="channel_type is required")
if ctype == "dashboard":
raise HTTPException(status_code=400, detail="dashboard channel is built-in and cannot be created manually")
- current_rows = _get_bot_channels_from_config(bot)
+ current_rows = list_bot_channels_from_config(bot)
if any(str(row.get("channel_type") or "").lower() == ctype for row in current_rows):
raise HTTPException(status_code=400, detail=f"Channel already exists: {ctype}")
@@ -208,19 +290,13 @@ def create_bot_channel_config(
"app_secret": (payload.app_secret or "").strip(),
"internal_port": max(1, min(int(payload.internal_port or 8080), 65535)),
"is_active": bool(payload.is_active),
- "extra_config": _normalize_channel_extra(payload.extra_config),
+ "extra_config": normalize_channel_extra(payload.extra_config),
"locked": False,
}
- config_data = _read_bot_config(bot_id)
- channels_cfg = config_data.get("channels")
- if not isinstance(channels_cfg, dict):
- channels_cfg = {}
- config_data["channels"] = channels_cfg
- channels_cfg[ctype] = _channel_api_to_cfg(new_row)
- _write_bot_config(bot_id, config_data)
- _sync_workspace_channels(session, bot_id)
- _invalidate_bot_detail_cache(bot_id)
+ config_data, channels_cfg = _read_bot_channels_cfg(bot_id)
+ channels_cfg[ctype] = channel_api_to_config(new_row)
+ _write_bot_config_state(session, bot_id=bot_id, config_data=config_data, sync_workspace=True)
return new_row
@@ -233,11 +309,8 @@ def update_bot_channel_config(
) -> Dict[str, Any]:
bot = _get_bot_or_404(session, bot_id)
- channel_key = str(channel_id or "").strip().lower()
- rows = _get_bot_channels_from_config(bot)
- row = next((r for r in rows if str(r.get("id") or "").lower() == channel_key), None)
- if not row:
- raise HTTPException(status_code=404, detail="Channel not found")
+ rows = list_bot_channels_from_config(bot)
+ row = _find_channel_row(rows, channel_id)
if str(row.get("channel_type") or "").strip().lower() == "dashboard" or bool(row.get("locked")):
raise HTTPException(status_code=400, detail="dashboard channel is built-in and cannot be modified")
@@ -265,19 +338,15 @@ def update_bot_channel_config(
raise HTTPException(status_code=400, detail="dashboard channel must remain enabled")
row["is_active"] = next_active
if "extra_config" in update_data:
- row["extra_config"] = _normalize_channel_extra(update_data.get("extra_config"))
+ row["extra_config"] = normalize_channel_extra(update_data.get("extra_config"))
row["channel_type"] = new_type
row["id"] = new_type
row["locked"] = new_type == "dashboard"
- config_data = _read_bot_config(bot_id)
- channels_cfg = config_data.get("channels")
- if not isinstance(channels_cfg, dict):
- channels_cfg = {}
- config_data["channels"] = channels_cfg
- current_send_progress, current_send_tool_hints = _read_global_delivery_flags(channels_cfg)
+ config_data, channels_cfg = _read_bot_channels_cfg(bot_id)
+ current_send_progress, current_send_tool_hints = read_global_delivery_flags(channels_cfg)
if new_type == "dashboard":
- extra = _normalize_channel_extra(row.get("extra_config"))
+ extra = normalize_channel_extra(row.get("extra_config"))
channels_cfg["sendProgress"] = bool(extra.get("sendProgress", current_send_progress))
channels_cfg["sendToolHints"] = bool(extra.get("sendToolHints", current_send_tool_hints))
else:
@@ -287,11 +356,8 @@ def update_bot_channel_config(
if existing_type != "dashboard" and existing_type in channels_cfg and existing_type != new_type:
channels_cfg.pop(existing_type, None)
if new_type != "dashboard":
- channels_cfg[new_type] = _channel_api_to_cfg(row)
- _write_bot_config(bot_id, config_data)
- session.commit()
- _sync_workspace_channels(session, bot_id)
- _invalidate_bot_detail_cache(bot_id)
+ channels_cfg[new_type] = channel_api_to_config(row)
+ _write_bot_config_state(session, bot_id=bot_id, config_data=config_data, sync_workspace=True)
return row
@@ -303,22 +369,12 @@ def delete_bot_channel_config(
) -> Dict[str, Any]:
bot = _get_bot_or_404(session, bot_id)
- channel_key = str(channel_id or "").strip().lower()
- rows = _get_bot_channels_from_config(bot)
- row = next((r for r in rows if str(r.get("id") or "").lower() == channel_key), None)
- if not row:
- raise HTTPException(status_code=404, detail="Channel not found")
+ rows = list_bot_channels_from_config(bot)
+ row = _find_channel_row(rows, channel_id)
if str(row.get("channel_type") or "").lower() == "dashboard":
raise HTTPException(status_code=400, detail="dashboard channel cannot be deleted")
- config_data = _read_bot_config(bot_id)
- channels_cfg = config_data.get("channels")
- if not isinstance(channels_cfg, dict):
- channels_cfg = {}
- config_data["channels"] = channels_cfg
+ config_data, channels_cfg = _read_bot_channels_cfg(bot_id)
channels_cfg.pop(str(row.get("channel_type") or "").lower(), None)
- _write_bot_config(bot_id, config_data)
- session.commit()
- _sync_workspace_channels(session, bot_id)
- _invalidate_bot_detail_cache(bot_id)
+ _write_bot_config_state(session, bot_id=bot_id, config_data=config_data, sync_workspace=True)
return {"status": "deleted"}
diff --git a/backend/services/bot_lifecycle_service.py b/backend/services/bot_lifecycle_service.py
index 0f538d3..d1f4a90 100644
--- a/backend/services/bot_lifecycle_service.py
+++ b/backend/services/bot_lifecycle_service.py
@@ -12,16 +12,16 @@ from models.platform import BotActivityEvent, BotRequestUsage
from models.skill import BotSkillInstall
from models.topic import TopicItem, TopicTopic
from services.bot_service import (
- _read_bot_runtime_snapshot,
- _resolve_bot_env_params,
_safe_float,
_safe_int,
- _sync_workspace_channels,
+ read_bot_runtime_snapshot,
+ resolve_bot_runtime_env_params,
+ sync_bot_workspace_channels,
)
-from services.bot_storage_service import _write_env_store
+from services.bot_storage_service import write_bot_env_params
from services.cache_service import _invalidate_bot_detail_cache, _invalidate_bot_messages_cache
from services.platform_service import record_activity_event
-from services.runtime_service import _record_agent_loop_ready_warning, docker_callback
+from services.runtime_service import docker_callback, record_agent_loop_ready_warning
def _get_bot_or_404(session: Session, bot_id: str) -> BotInstance:
@@ -36,10 +36,10 @@ async def start_bot_instance(session: Session, bot_id: str) -> Dict[str, Any]:
if not bool(getattr(bot, "enabled", True)):
raise PermissionError("Bot is disabled. Enable it first.")
- _sync_workspace_channels(session, bot_id)
- runtime_snapshot = _read_bot_runtime_snapshot(bot)
- env_params = _resolve_bot_env_params(bot_id)
- _write_env_store(bot_id, env_params)
+ sync_bot_workspace_channels(session, bot_id)
+ runtime_snapshot = read_bot_runtime_snapshot(bot)
+ env_params = resolve_bot_runtime_env_params(bot_id)
+ write_bot_env_params(bot_id, env_params)
success = docker_manager.start_bot(
bot_id,
image_tag=bot.image_tag,
@@ -63,7 +63,7 @@ async def start_bot_instance(session: Session, bot_id: str) -> Dict[str, Any]:
_invalidate_bot_detail_cache(bot_id)
raise RuntimeError("Bot container failed shortly after startup. Check bot logs/config.")
- asyncio.create_task(_record_agent_loop_ready_warning(bot_id))
+ asyncio.create_task(record_agent_loop_ready_warning(bot_id))
session.add(bot)
record_activity_event(session, bot_id, "bot_started", channel="system", detail=f"Container started for {bot_id}")
session.commit()
diff --git a/backend/services/bot_management_service.py b/backend/services/bot_management_service.py
index 3d6cdc7..8273025 100644
--- a/backend/services/bot_management_service.py
+++ b/backend/services/bot_management_service.py
@@ -3,7 +3,6 @@ import re
import shutil
from typing import Any, Dict, List, Optional
-import httpx
from fastapi import HTTPException
from sqlmodel import Session, select
@@ -13,19 +12,21 @@ from core.settings import BOTS_WORKSPACE_ROOT
from models.bot import BotInstance, NanobotImage
from schemas.bot import BotCreateRequest, BotUpdateRequest
from services.bot_service import (
- _normalize_env_params,
- _normalize_initial_channels,
- _normalize_resource_limits,
- _normalize_system_timezone,
- _provider_defaults,
- _resolve_bot_env_params,
- _serialize_bot,
- _serialize_bot_list_item,
- _sync_workspace_channels,
+ normalize_initial_bot_channels,
+ normalize_bot_system_timezone,
+ resolve_bot_runtime_env_params,
+ serialize_bot_detail,
+ serialize_bot_list_entry,
+ sync_bot_workspace_channels,
+)
+from services.bot_storage_service import (
+ normalize_bot_env_params,
+ normalize_bot_resource_limits,
+ write_bot_env_params,
)
-from services.bot_storage_service import _write_env_store
from services.cache_service import _cache_key_bot_detail, _cache_key_bots_list, _invalidate_bot_detail_cache
from services.platform_service import record_activity_event
+from services.provider_service import get_provider_defaults
from services.template_service import get_agent_md_templates
BOT_ID_PATTERN = re.compile(r"^[A-Za-z0-9_]+$")
@@ -76,60 +77,6 @@ def _cleanup_bot_workspace_root(bot_id: str) -> None:
shutil.rmtree(bot_root, ignore_errors=True)
-async def test_provider_connection(payload: Dict[str, Any]) -> Dict[str, Any]:
- provider = (payload.get("provider") or "").strip()
- api_key = (payload.get("api_key") or "").strip()
- model = (payload.get("model") or "").strip()
- api_base = (payload.get("api_base") or "").strip()
-
- if not provider or not api_key:
- raise HTTPException(status_code=400, detail="provider and api_key are required")
-
- normalized_provider, default_base = _provider_defaults(provider)
- base = (api_base or default_base).rstrip("/")
- if normalized_provider not in {"openrouter", "dashscope", "kimi", "minimax", "openai", "deepseek"}:
- raise HTTPException(status_code=400, detail=f"provider not supported for test: {provider}")
- if not base:
- raise HTTPException(status_code=400, detail=f"api_base is required for provider: {provider}")
-
- headers = {"Authorization": f"Bearer {api_key}"}
- timeout = httpx.Timeout(20.0, connect=10.0)
- url = f"{base}/models"
- try:
- async with httpx.AsyncClient(timeout=timeout) as client:
- response = await client.get(url, headers=headers)
- if response.status_code >= 400:
- return {
- "ok": False,
- "provider": normalized_provider,
- "status_code": response.status_code,
- "detail": response.text[:500],
- }
- data = response.json()
- models_raw = data.get("data", []) if isinstance(data, dict) else []
- model_ids: List[str] = [
- str(item["id"]) for item in models_raw[:20] if isinstance(item, dict) and item.get("id")
- ]
- return {
- "ok": True,
- "provider": normalized_provider,
- "endpoint": url,
- "models_preview": model_ids[:8],
- "model_hint": (
- "model_found"
- if model and any(model in item for item in model_ids)
- else ("model_not_listed" if model else "")
- ),
- }
- except Exception as exc:
- return {
- "ok": False,
- "provider": normalized_provider,
- "endpoint": url,
- "detail": str(exc),
- }
-
-
def create_bot_record(session: Session, *, payload: BotCreateRequest) -> Dict[str, Any]:
normalized_bot_id = str(payload.id or "").strip()
if not normalized_bot_id:
@@ -147,9 +94,9 @@ def create_bot_record(session: Session, *, payload: BotCreateRequest) -> Dict[st
if not docker_manager.has_image(payload.image_tag):
raise HTTPException(status_code=400, detail=f"Docker image not found locally: {payload.image_tag}")
- normalized_env_params = _normalize_env_params(payload.env_params)
+ normalized_env_params = normalize_bot_env_params(payload.env_params)
try:
- normalized_env_params["TZ"] = _normalize_system_timezone(payload.system_timezone)
+ normalized_env_params["TZ"] = normalize_bot_system_timezone(payload.system_timezone)
except ValueError as exc:
raise HTTPException(status_code=400, detail=str(exc)) from exc
@@ -162,15 +109,15 @@ def create_bot_record(session: Session, *, payload: BotCreateRequest) -> Dict[st
workspace_dir=os.path.join(BOTS_WORKSPACE_ROOT, normalized_bot_id),
)
template_defaults = get_agent_md_templates()
- resource_limits = _normalize_resource_limits(payload.cpu_cores, payload.memory_mb, payload.storage_gb)
+ resource_limits = normalize_bot_resource_limits(payload.cpu_cores, payload.memory_mb, payload.storage_gb)
try:
session.add(bot)
session.flush()
- _write_env_store(normalized_bot_id, normalized_env_params)
- _sync_workspace_channels(
+ write_bot_env_params(normalized_bot_id, normalized_env_params)
+ sync_bot_workspace_channels(
session,
normalized_bot_id,
- channels_override=_normalize_initial_channels(normalized_bot_id, payload.channels),
+ channels_override=normalize_initial_bot_channels(normalized_bot_id, payload.channels),
global_delivery_override={
"sendProgress": bool(payload.send_progress) if payload.send_progress is not None else False,
"sendToolHints": bool(payload.send_tool_hints) if payload.send_tool_hints is not None else False,
@@ -211,7 +158,7 @@ def create_bot_record(session: Session, *, payload: BotCreateRequest) -> Dict[st
_cleanup_bot_workspace_root(normalized_bot_id)
raise
_invalidate_bot_detail_cache(normalized_bot_id)
- return _serialize_bot(bot)
+ return serialize_bot_detail(bot)
def list_bots_with_cache(session: Session) -> List[Dict[str, Any]]:
@@ -234,7 +181,7 @@ def list_bots_with_cache(session: Session) -> List[Dict[str, Any]]:
session.commit()
for bot in bots:
session.refresh(bot)
- rows = [_serialize_bot_list_item(bot) for bot in bots]
+ rows = [serialize_bot_list_entry(bot) for bot in bots]
cache.set_json(_cache_key_bots_list(), rows, ttl=30)
return rows
@@ -246,7 +193,7 @@ def get_bot_detail_cached(session: Session, *, bot_id: str) -> Dict[str, Any]:
bot = session.get(BotInstance, bot_id)
if not bot:
raise HTTPException(status_code=404, detail="Bot not found")
- row = _serialize_bot(bot)
+ row = serialize_bot_detail(bot)
cache.set_json(_cache_key_bot_detail(bot_id), row, ttl=30)
return row
@@ -290,7 +237,7 @@ def update_bot_record(session: Session, *, bot_id: str, payload: BotUpdateReques
normalized_system_timezone: Optional[str] = None
if system_timezone is not None:
try:
- normalized_system_timezone = _normalize_system_timezone(system_timezone)
+ normalized_system_timezone = normalize_bot_system_timezone(system_timezone)
except ValueError as exc:
raise HTTPException(status_code=400, detail=str(exc)) from exc
@@ -335,7 +282,7 @@ def update_bot_record(session: Session, *, bot_id: str, payload: BotUpdateReques
runtime_overrides["system_prompt"] = runtime_overrides["soul_md"]
if {"cpu_cores", "memory_mb", "storage_gb"} & set(runtime_overrides.keys()):
runtime_overrides.update(
- _normalize_resource_limits(
+ normalize_bot_resource_limits(
runtime_overrides.get("cpu_cores"),
runtime_overrides.get("memory_mb"),
runtime_overrides.get("storage_gb"),
@@ -350,12 +297,12 @@ def update_bot_record(session: Session, *, bot_id: str, payload: BotUpdateReques
session.flush()
if env_params is not None or normalized_system_timezone is not None:
- next_env_params = _resolve_bot_env_params(bot_id)
+ next_env_params = resolve_bot_runtime_env_params(bot_id)
if env_params is not None:
- next_env_params = _normalize_env_params(env_params)
+ next_env_params = normalize_bot_env_params(env_params)
if normalized_system_timezone is not None:
next_env_params["TZ"] = normalized_system_timezone
- _write_env_store(bot_id, next_env_params)
+ write_bot_env_params(bot_id, next_env_params)
global_delivery_override: Optional[Dict[str, Any]] = None
if "send_progress" in runtime_overrides or "send_tool_hints" in runtime_overrides:
@@ -365,7 +312,7 @@ def update_bot_record(session: Session, *, bot_id: str, payload: BotUpdateReques
if "send_tool_hints" in runtime_overrides:
global_delivery_override["sendToolHints"] = bool(runtime_overrides.get("send_tool_hints"))
- _sync_workspace_channels(
+ sync_bot_workspace_channels(
session,
bot_id,
runtime_overrides=runtime_overrides if runtime_overrides else None,
@@ -382,4 +329,4 @@ def update_bot_record(session: Session, *, bot_id: str, payload: BotUpdateReques
bot = refreshed_bot
raise
_invalidate_bot_detail_cache(bot_id)
- return _serialize_bot(bot)
+ return serialize_bot_detail(bot)
diff --git a/backend/services/bot_runtime_service.py b/backend/services/bot_runtime_service.py
index 5ab4a3d..b1717a3 100644
--- a/backend/services/bot_runtime_service.py
+++ b/backend/services/bot_runtime_service.py
@@ -12,9 +12,14 @@ from sqlmodel import Session
from core.docker_instance import docker_manager
from core.settings import BOTS_WORKSPACE_ROOT
from models.bot import BotInstance
-from services.bot_channel_service import _get_bot_channels_from_config
from services.bot_lifecycle_service import start_bot_instance, stop_bot_instance
-from services.bot_storage_service import _read_bot_config, _read_cron_store, _write_bot_config, _write_cron_store
+from services.bot_service import list_bot_channels_from_config
+from services.bot_storage_service import (
+ read_bot_config_data,
+ read_bot_cron_jobs_store,
+ write_bot_config_data,
+ write_bot_cron_jobs_store,
+)
from services.platform_auth_service import resolve_bot_websocket_auth, resolve_panel_websocket_auth
@@ -90,7 +95,7 @@ async def relogin_weixin(session: Session, *, bot_id: str) -> Dict[str, Any]:
weixin_channel = next(
(
row
- for row in _get_bot_channels_from_config(bot)
+ for row in list_bot_channels_from_config(bot)
if str(row.get("channel_type") or "").strip().lower() == "weixin"
),
None,
@@ -107,12 +112,12 @@ async def relogin_weixin(session: Session, *, bot_id: str) -> Dict[str, Any]:
except Exception as exc:
raise RuntimeError(f"Failed to remove weixin state: {exc}") from exc
- config_data = _read_bot_config(bot_id)
+ config_data = read_bot_config_data(bot_id)
channels_cfg = config_data.get("channels") if isinstance(config_data, dict) else {}
weixin_cfg = channels_cfg.get("weixin") if isinstance(channels_cfg, dict) else None
if isinstance(weixin_cfg, dict) and "token" in weixin_cfg:
weixin_cfg.pop("token", None)
- _write_bot_config(bot_id, config_data)
+ write_bot_config_data(bot_id, config_data)
restarted = False
if str(bot.docker_status or "").upper() == "RUNNING":
@@ -130,7 +135,7 @@ async def relogin_weixin(session: Session, *, bot_id: str) -> Dict[str, Any]:
def list_cron_jobs(session: Session, *, bot_id: str, include_disabled: bool = True) -> Dict[str, Any]:
_get_bot_or_raise(session, bot_id)
- store = _read_cron_store(bot_id)
+ store = read_bot_cron_jobs_store(bot_id)
rows = []
for row in store.get("jobs", []):
if not isinstance(row, dict):
@@ -145,7 +150,7 @@ def list_cron_jobs(session: Session, *, bot_id: str, include_disabled: bool = Tr
def stop_cron_job(session: Session, *, bot_id: str, job_id: str) -> Dict[str, Any]:
_get_bot_or_raise(session, bot_id)
- store = _read_cron_store(bot_id)
+ store = read_bot_cron_jobs_store(bot_id)
jobs = store.get("jobs", [])
if not isinstance(jobs, list):
jobs = []
@@ -159,13 +164,13 @@ def stop_cron_job(session: Session, *, bot_id: str, job_id: str) -> Dict[str, An
state = {}
found["state"] = state
state["nextRunAtMs"] = None
- _write_cron_store(bot_id, {"version": int(store.get("version", 1) or 1), "jobs": jobs})
+ write_bot_cron_jobs_store(bot_id, {"version": int(store.get("version", 1) or 1), "jobs": jobs})
return {"status": "stopped", "job_id": job_id}
def start_cron_job(session: Session, *, bot_id: str, job_id: str) -> Dict[str, Any]:
_get_bot_or_raise(session, bot_id)
- store = _read_cron_store(bot_id)
+ store = read_bot_cron_jobs_store(bot_id)
jobs = store.get("jobs", [])
if not isinstance(jobs, list):
jobs = []
@@ -180,20 +185,20 @@ def start_cron_job(session: Session, *, bot_id: str, job_id: str) -> Dict[str, A
found["state"] = state
schedule = found.get("schedule")
state["nextRunAtMs"] = _compute_cron_next_run(schedule if isinstance(schedule, dict) else {})
- _write_cron_store(bot_id, {"version": int(store.get("version", 1) or 1), "jobs": jobs})
+ write_bot_cron_jobs_store(bot_id, {"version": int(store.get("version", 1) or 1), "jobs": jobs})
return {"status": "started", "job_id": job_id}
def delete_cron_job(session: Session, *, bot_id: str, job_id: str) -> Dict[str, Any]:
_get_bot_or_raise(session, bot_id)
- store = _read_cron_store(bot_id)
+ store = read_bot_cron_jobs_store(bot_id)
jobs = store.get("jobs", [])
if not isinstance(jobs, list):
jobs = []
kept = [row for row in jobs if not (isinstance(row, dict) and str(row.get("id")) == job_id)]
if len(kept) == len(jobs):
raise LookupError("Cron job not found")
- _write_cron_store(bot_id, {"version": int(store.get("version", 1) or 1), "jobs": kept})
+ write_bot_cron_jobs_store(bot_id, {"version": int(store.get("version", 1) or 1), "jobs": kept})
return {"status": "deleted", "job_id": job_id}
diff --git a/backend/services/bot_service.py b/backend/services/bot_service.py
index c169d2c..334a791 100644
--- a/backend/services/bot_service.py
+++ b/backend/services/bot_service.py
@@ -1,81 +1,32 @@
import os
+from datetime import datetime
+from pathlib import Path
from typing import Any, Dict, List, Optional
-from datetime import datetime, timezone
from zoneinfo import ZoneInfo
from sqlmodel import Session
-from core.settings import DEFAULT_BOT_SYSTEM_TIMEZONE
+from core.config_manager import BotConfigManager
+from core.settings import BOTS_WORKSPACE_ROOT, DEFAULT_BOT_SYSTEM_TIMEZONE
from models.bot import BotInstance
+from schemas.bot import ChannelConfigRequest
from services.bot_storage_service import (
- _bot_data_root,
- _clear_bot_dashboard_direct_session,
- _clear_bot_sessions,
- _migrate_bot_resources_store,
_normalize_env_params,
- _normalize_resource_limits,
_read_bot_config,
_read_bot_resources,
- _read_cron_store,
_read_env_store,
_safe_float,
_safe_int,
_workspace_root,
- _write_bot_config,
- _write_bot_resources,
- _write_cron_store,
- _write_env_store,
-)
-from services.bot_channel_service import (
- _channel_api_to_cfg,
- _get_bot_channels_from_config,
- _normalize_channel_extra,
- _normalize_initial_channels,
- _read_global_delivery_flags,
- _sync_workspace_channels_impl,
-)
-from services.bot_mcp_service import (
- _merge_mcp_servers_preserving_extras,
- _normalize_mcp_servers,
- _sanitize_mcp_servers_in_config_data,
+ normalize_bot_resource_limits,
+ write_bot_resource_limits,
)
from services.template_service import get_agent_md_templates
-__all__ = [
- "_bot_data_root",
- "_channel_api_to_cfg",
- "_clear_bot_dashboard_direct_session",
- "_clear_bot_sessions",
- "_get_bot_channels_from_config",
- "_migrate_bot_resources_store",
- "_normalize_channel_extra",
- "_normalize_env_params",
- "_normalize_initial_channels",
- "_normalize_mcp_servers",
- "_normalize_resource_limits",
- "_normalize_system_timezone",
- "_provider_defaults",
- "_read_bot_config",
- "_read_bot_resources",
- "_read_bot_runtime_snapshot",
- "_read_cron_store",
- "_read_env_store",
- "_read_global_delivery_flags",
- "_resolve_bot_env_params",
- "_safe_float",
- "_safe_int",
- "_sanitize_mcp_servers_in_config_data",
- "_serialize_bot",
- "_serialize_bot_list_item",
- "_sync_workspace_channels",
- "_workspace_root",
- "_write_bot_config",
- "_write_bot_resources",
- "_write_cron_store",
- "_write_env_store",
- "_merge_mcp_servers_preserving_extras",
-]
-def _get_default_system_timezone() -> str:
+config_manager = BotConfigManager(host_data_root=BOTS_WORKSPACE_ROOT)
+
+
+def get_default_bot_system_timezone() -> str:
value = str(DEFAULT_BOT_SYSTEM_TIMEZONE or "").strip() or "Asia/Shanghai"
try:
ZoneInfo(value)
@@ -84,10 +35,10 @@ def _get_default_system_timezone() -> str:
return "Asia/Shanghai"
-def _normalize_system_timezone(raw: Any) -> str:
+def normalize_bot_system_timezone(raw: Any) -> str:
value = str(raw or "").strip()
if not value:
- return _get_default_system_timezone()
+ return get_default_bot_system_timezone()
try:
ZoneInfo(value)
except Exception as exc:
@@ -95,47 +46,316 @@ def _normalize_system_timezone(raw: Any) -> str:
return value
-def _resolve_bot_env_params(bot_id: str, raw: Optional[Dict[str, str]] = None) -> Dict[str, str]:
+def resolve_bot_runtime_env_params(bot_id: str, raw: Optional[Dict[str, str]] = None) -> Dict[str, str]:
env_params = _normalize_env_params(raw if isinstance(raw, dict) else _read_env_store(bot_id))
try:
- env_params["TZ"] = _normalize_system_timezone(env_params.get("TZ"))
+ env_params["TZ"] = normalize_bot_system_timezone(env_params.get("TZ"))
except ValueError:
- env_params["TZ"] = _get_default_system_timezone()
+ env_params["TZ"] = get_default_bot_system_timezone()
return env_params
-def _provider_defaults(provider: str) -> tuple[str, str]:
- normalized = provider.lower().strip()
- if normalized in {"openai"}:
- return "openai", "https://api.openai.com/v1"
- if normalized in {"openrouter"}:
- return "openrouter", "https://openrouter.ai/api/v1"
- if normalized in {"dashscope", "aliyun", "qwen", "aliyun-qwen"}:
- return "dashscope", "https://dashscope.aliyuncs.com/compatible-mode/v1"
- if normalized in {"deepseek"}:
- return "deepseek", "https://api.deepseek.com/v1"
- if normalized in {"xunfei", "iflytek", "xfyun"}:
- return "openai", "https://spark-api-open.xf-yun.com/v1"
- if normalized in {"vllm"}:
- return "openai", ""
- if normalized in {"kimi", "moonshot"}:
- return "kimi", "https://api.moonshot.cn/v1"
- if normalized in {"minimax"}:
- return "minimax", "https://api.minimax.chat/v1"
- return normalized, ""
+def normalize_channel_extra(raw: Any) -> Dict[str, Any]:
+ if not isinstance(raw, dict):
+ return {}
+ return raw
+
+
+def _normalize_allow_from(raw: Any) -> List[str]:
+ rows: List[str] = []
+ if isinstance(raw, list):
+ for item in raw:
+ text = str(item or "").strip()
+ if text and text not in rows:
+ rows.append(text)
+ return rows or ["*"]
+
+
+def read_global_delivery_flags(channels_cfg: Any) -> tuple[bool, bool]:
+ if not isinstance(channels_cfg, dict):
+ return False, False
+ send_progress = channels_cfg.get("sendProgress")
+ send_tool_hints = channels_cfg.get("sendToolHints")
+ dashboard_cfg = channels_cfg.get("dashboard")
+ if isinstance(dashboard_cfg, dict):
+ if send_progress is None and "sendProgress" in dashboard_cfg:
+ send_progress = dashboard_cfg.get("sendProgress")
+ if send_tool_hints is None and "sendToolHints" in dashboard_cfg:
+ send_tool_hints = dashboard_cfg.get("sendToolHints")
+ return bool(send_progress), bool(send_tool_hints)
+
+
+def channel_config_to_api(bot_id: str, channel_type: str, cfg: Dict[str, Any]) -> Dict[str, Any]:
+ ctype = str(channel_type or "").strip().lower()
+ enabled = bool(cfg.get("enabled", True))
+ port = max(1, min(int(cfg.get("port", 8080) or 8080), 65535))
+ extra: Dict[str, Any] = {}
+ external_app_id = ""
+ app_secret = ""
+
+ if ctype == "feishu":
+ external_app_id = str(cfg.get("appId") or "")
+ app_secret = str(cfg.get("appSecret") or "")
+ extra = {
+ "encryptKey": cfg.get("encryptKey", ""),
+ "verificationToken": cfg.get("verificationToken", ""),
+ "allowFrom": _normalize_allow_from(cfg.get("allowFrom", [])),
+ }
+ elif ctype == "dingtalk":
+ external_app_id = str(cfg.get("clientId") or "")
+ app_secret = str(cfg.get("clientSecret") or "")
+ extra = {"allowFrom": _normalize_allow_from(cfg.get("allowFrom", []))}
+ elif ctype == "telegram":
+ app_secret = str(cfg.get("token") or "")
+ extra = {
+ "proxy": cfg.get("proxy", ""),
+ "replyToMessage": bool(cfg.get("replyToMessage", False)),
+ "allowFrom": _normalize_allow_from(cfg.get("allowFrom", [])),
+ }
+ elif ctype == "slack":
+ external_app_id = str(cfg.get("botToken") or "")
+ app_secret = str(cfg.get("appToken") or "")
+ extra = {
+ "mode": cfg.get("mode", "socket"),
+ "replyInThread": bool(cfg.get("replyInThread", True)),
+ "groupPolicy": cfg.get("groupPolicy", "mention"),
+ "groupAllowFrom": cfg.get("groupAllowFrom", []),
+ "reactEmoji": cfg.get("reactEmoji", "eyes"),
+ }
+ elif ctype == "qq":
+ external_app_id = str(cfg.get("appId") or "")
+ app_secret = str(cfg.get("secret") or "")
+ extra = {"allowFrom": _normalize_allow_from(cfg.get("allowFrom", []))}
+ elif ctype == "weixin":
+ app_secret = ""
+ extra = {
+ "hasSavedState": (Path(BOTS_WORKSPACE_ROOT) / bot_id / ".nanobot" / "weixin" / "account.json").is_file(),
+ }
+ elif ctype == "email":
+ extra = {
+ "consentGranted": bool(cfg.get("consentGranted", False)),
+ "imapHost": str(cfg.get("imapHost") or ""),
+ "imapPort": int(cfg.get("imapPort") or 993),
+ "imapUsername": str(cfg.get("imapUsername") or ""),
+ "imapPassword": str(cfg.get("imapPassword") or ""),
+ "imapMailbox": str(cfg.get("imapMailbox") or "INBOX"),
+ "imapUseSsl": bool(cfg.get("imapUseSsl", True)),
+ "smtpHost": str(cfg.get("smtpHost") or ""),
+ "smtpPort": int(cfg.get("smtpPort") or 587),
+ "smtpUsername": str(cfg.get("smtpUsername") or ""),
+ "smtpPassword": str(cfg.get("smtpPassword") or ""),
+ "smtpUseTls": bool(cfg.get("smtpUseTls", True)),
+ "smtpUseSsl": bool(cfg.get("smtpUseSsl", False)),
+ "fromAddress": str(cfg.get("fromAddress") or ""),
+ "autoReplyEnabled": bool(cfg.get("autoReplyEnabled", True)),
+ "pollIntervalSeconds": int(cfg.get("pollIntervalSeconds") or 30),
+ "markSeen": bool(cfg.get("markSeen", True)),
+ "maxBodyChars": int(cfg.get("maxBodyChars") or 12000),
+ "subjectPrefix": str(cfg.get("subjectPrefix") or "Re: "),
+ "allowFrom": _normalize_allow_from(cfg.get("allowFrom", [])),
+ }
+ else:
+ external_app_id = str(
+ cfg.get("appId") or cfg.get("clientId") or cfg.get("botToken") or cfg.get("externalAppId") or ""
+ )
+ app_secret = str(
+ cfg.get("appSecret")
+ or cfg.get("clientSecret")
+ or cfg.get("secret")
+ or cfg.get("token")
+ or cfg.get("appToken")
+ or ""
+ )
+ extra = {
+ key: value
+ for key, value in cfg.items()
+ if key
+ not in {
+ "enabled",
+ "port",
+ "appId",
+ "clientId",
+ "botToken",
+ "externalAppId",
+ "appSecret",
+ "clientSecret",
+ "secret",
+ "token",
+ "appToken",
+ }
+ }
+
+ return {
+ "id": ctype,
+ "bot_id": bot_id,
+ "channel_type": ctype,
+ "external_app_id": external_app_id,
+ "app_secret": app_secret,
+ "internal_port": port,
+ "is_active": enabled,
+ "extra_config": extra,
+ "locked": ctype == "dashboard",
+ }
+
+
+def channel_api_to_config(row: Dict[str, Any]) -> Dict[str, Any]:
+ ctype = str(row.get("channel_type") or "").strip().lower()
+ enabled = bool(row.get("is_active", True))
+ extra = normalize_channel_extra(row.get("extra_config"))
+ external_app_id = str(row.get("external_app_id") or "")
+ app_secret = str(row.get("app_secret") or "")
+ port = max(1, min(int(row.get("internal_port") or 8080), 65535))
+
+ if ctype == "feishu":
+ return {
+ "enabled": enabled,
+ "appId": external_app_id,
+ "appSecret": app_secret,
+ "encryptKey": extra.get("encryptKey", ""),
+ "verificationToken": extra.get("verificationToken", ""),
+ "allowFrom": _normalize_allow_from(extra.get("allowFrom", [])),
+ }
+ if ctype == "dingtalk":
+ return {
+ "enabled": enabled,
+ "clientId": external_app_id,
+ "clientSecret": app_secret,
+ "allowFrom": _normalize_allow_from(extra.get("allowFrom", [])),
+ }
+ if ctype == "telegram":
+ return {
+ "enabled": enabled,
+ "token": app_secret,
+ "proxy": extra.get("proxy", ""),
+ "replyToMessage": bool(extra.get("replyToMessage", False)),
+ "allowFrom": _normalize_allow_from(extra.get("allowFrom", [])),
+ }
+ if ctype == "slack":
+ return {
+ "enabled": enabled,
+ "mode": extra.get("mode", "socket"),
+ "botToken": external_app_id,
+ "appToken": app_secret,
+ "replyInThread": bool(extra.get("replyInThread", True)),
+ "groupPolicy": extra.get("groupPolicy", "mention"),
+ "groupAllowFrom": extra.get("groupAllowFrom", []),
+ "reactEmoji": extra.get("reactEmoji", "eyes"),
+ }
+ if ctype == "qq":
+ return {
+ "enabled": enabled,
+ "appId": external_app_id,
+ "secret": app_secret,
+ "allowFrom": _normalize_allow_from(extra.get("allowFrom", [])),
+ }
+ if ctype == "weixin":
+ return {
+ "enabled": enabled,
+ "token": app_secret,
+ }
+ if ctype == "email":
+ return {
+ "enabled": enabled,
+ "consentGranted": bool(extra.get("consentGranted", False)),
+ "imapHost": str(extra.get("imapHost") or ""),
+ "imapPort": max(1, min(int(extra.get("imapPort") or 993), 65535)),
+ "imapUsername": str(extra.get("imapUsername") or ""),
+ "imapPassword": str(extra.get("imapPassword") or ""),
+ "imapMailbox": str(extra.get("imapMailbox") or "INBOX"),
+ "imapUseSsl": bool(extra.get("imapUseSsl", True)),
+ "smtpHost": str(extra.get("smtpHost") or ""),
+ "smtpPort": max(1, min(int(extra.get("smtpPort") or 587), 65535)),
+ "smtpUsername": str(extra.get("smtpUsername") or ""),
+ "smtpPassword": str(extra.get("smtpPassword") or ""),
+ "smtpUseTls": bool(extra.get("smtpUseTls", True)),
+ "smtpUseSsl": bool(extra.get("smtpUseSsl", False)),
+ "fromAddress": str(extra.get("fromAddress") or ""),
+ "autoReplyEnabled": bool(extra.get("autoReplyEnabled", True)),
+ "pollIntervalSeconds": max(5, int(extra.get("pollIntervalSeconds") or 30)),
+ "markSeen": bool(extra.get("markSeen", True)),
+ "maxBodyChars": max(1, int(extra.get("maxBodyChars") or 12000)),
+ "subjectPrefix": str(extra.get("subjectPrefix") or "Re: "),
+ "allowFrom": _normalize_allow_from(extra.get("allowFrom", [])),
+ }
+ merged = dict(extra)
+ merged.update(
+ {
+ "enabled": enabled,
+ "appId": external_app_id,
+ "appSecret": app_secret,
+ "port": port,
+ }
+ )
+ return merged
+
+
+def list_bot_channels_from_config(bot: BotInstance) -> List[Dict[str, Any]]:
+ config_data = _read_bot_config(bot.id)
+ channels_cfg = config_data.get("channels")
+ if not isinstance(channels_cfg, dict):
+ channels_cfg = {}
+ send_progress, send_tool_hints = read_global_delivery_flags(channels_cfg)
+ rows: List[Dict[str, Any]] = [
+ {
+ "id": "dashboard",
+ "bot_id": bot.id,
+ "channel_type": "dashboard",
+ "external_app_id": f"dashboard-{bot.id}",
+ "app_secret": "",
+ "internal_port": 9000,
+ "is_active": True,
+ "extra_config": {
+ "sendProgress": send_progress,
+ "sendToolHints": send_tool_hints,
+ },
+ "locked": True,
+ }
+ ]
+ for ctype, cfg in channels_cfg.items():
+ if ctype in {"sendProgress", "sendToolHints", "dashboard"} or not isinstance(cfg, dict):
+ continue
+ rows.append(channel_config_to_api(bot.id, ctype, cfg))
+ return rows
+
+
+def normalize_initial_bot_channels(bot_id: str, channels: Optional[List[ChannelConfigRequest]]) -> List[Dict[str, Any]]:
+ rows: List[Dict[str, Any]] = []
+ seen_types: set[str] = set()
+ for channel in channels or []:
+ ctype = (channel.channel_type or "").strip().lower()
+ if not ctype or ctype == "dashboard" or ctype in seen_types:
+ continue
+ seen_types.add(ctype)
+ rows.append(
+ {
+ "id": ctype,
+ "bot_id": bot_id,
+ "channel_type": ctype,
+ "external_app_id": (channel.external_app_id or "").strip() or f"{ctype}-{bot_id}",
+ "app_secret": (channel.app_secret or "").strip(),
+ "internal_port": max(1, min(int(channel.internal_port or 8080), 65535)),
+ "is_active": bool(channel.is_active),
+ "extra_config": normalize_channel_extra(channel.extra_config),
+ "locked": False,
+ }
+ )
+ return rows
+
+
def _read_workspace_md(bot_id: str, filename: str, default_value: str) -> str:
path = os.path.join(_workspace_root(bot_id), filename)
if not os.path.isfile(path):
return default_value
try:
- with open(path, "r", encoding="utf-8") as f:
- return f.read().strip()
+ with open(path, "r", encoding="utf-8") as file:
+ return file.read().strip()
except Exception:
return default_value
-def _read_bot_runtime_snapshot(bot: BotInstance) -> Dict[str, Any]:
+
+def read_bot_runtime_snapshot(bot: BotInstance) -> Dict[str, Any]:
config_data = _read_bot_config(bot.id)
- env_params = _resolve_bot_env_params(bot.id)
+ env_params = resolve_bot_runtime_env_params(bot.id)
template_defaults = get_agent_md_templates()
provider_name = ""
@@ -156,7 +376,7 @@ def _read_bot_runtime_snapshot(bot: BotInstance) -> Dict[str, Any]:
agents_defaults = defaults
channels_cfg = config_data.get("channels")
- send_progress, send_tool_hints = _read_global_delivery_flags(channels_cfg)
+ send_progress, send_tool_hints = read_global_delivery_flags(channels_cfg)
llm_provider = provider_name or ""
llm_model = str(agents_defaults.get("model") or "")
@@ -182,7 +402,7 @@ def _read_bot_runtime_snapshot(bot: BotInstance) -> Dict[str, Any]:
"cpu_cores": resources["cpu_cores"],
"memory_mb": resources["memory_mb"],
"storage_gb": resources["storage_gb"],
- "system_timezone": env_params.get("TZ") or _get_default_system_timezone(),
+ "system_timezone": env_params.get("TZ") or get_default_bot_system_timezone(),
"send_progress": send_progress,
"send_tool_hints": send_tool_hints,
"soul_md": soul_md,
@@ -193,8 +413,9 @@ def _read_bot_runtime_snapshot(bot: BotInstance) -> Dict[str, Any]:
"system_prompt": soul_md,
}
-def _serialize_bot(bot: BotInstance) -> Dict[str, Any]:
- runtime = _read_bot_runtime_snapshot(bot)
+
+def serialize_bot_detail(bot: BotInstance) -> Dict[str, Any]:
+ runtime = read_bot_runtime_snapshot(bot)
created_at = bot.created_at.isoformat() + "Z" if bot.created_at else None
updated_at = bot.updated_at.isoformat() + "Z" if bot.updated_at else None
return {
@@ -216,7 +437,7 @@ def _serialize_bot(bot: BotInstance) -> Dict[str, Any]:
"cpu_cores": _safe_float(runtime.get("cpu_cores"), 1.0),
"memory_mb": _safe_int(runtime.get("memory_mb"), 1024),
"storage_gb": _safe_int(runtime.get("storage_gb"), 10),
- "system_timezone": str(runtime.get("system_timezone") or _get_default_system_timezone()),
+ "system_timezone": str(runtime.get("system_timezone") or get_default_bot_system_timezone()),
"send_progress": bool(runtime.get("send_progress")),
"send_tool_hints": bool(runtime.get("send_tool_hints")),
"soul_md": runtime.get("soul_md") or "",
@@ -232,7 +453,8 @@ def _serialize_bot(bot: BotInstance) -> Dict[str, Any]:
"updated_at": updated_at,
}
-def _serialize_bot_list_item(bot: BotInstance) -> Dict[str, Any]:
+
+def serialize_bot_list_entry(bot: BotInstance) -> Dict[str, Any]:
created_at = bot.created_at.isoformat() + "Z" if bot.created_at else None
updated_at = bot.updated_at.isoformat() + "Z" if bot.updated_at else None
return {
@@ -248,7 +470,8 @@ def _serialize_bot_list_item(bot: BotInstance) -> Dict[str, Any]:
"updated_at": updated_at,
}
-def _sync_workspace_channels(
+
+def sync_bot_workspace_channels(
session: Session,
bot_id: str,
channels_override: Optional[List[Dict[str, Any]]] = None,
@@ -258,12 +481,75 @@ def _sync_workspace_channels(
bot = session.get(BotInstance, bot_id)
if not bot:
return
- snapshot = _read_bot_runtime_snapshot(bot)
- _sync_workspace_channels_impl(
- session,
- bot_id,
- snapshot,
- channels_override=channels_override,
- global_delivery_override=global_delivery_override,
- runtime_overrides=runtime_overrides,
+
+ snapshot = read_bot_runtime_snapshot(bot)
+ template_defaults = get_agent_md_templates()
+ bot_data: Dict[str, Any] = {
+ "name": bot.name,
+ "system_prompt": snapshot.get("system_prompt") or template_defaults.get("soul_md", ""),
+ "soul_md": snapshot.get("soul_md") or template_defaults.get("soul_md", ""),
+ "agents_md": snapshot.get("agents_md") or template_defaults.get("agents_md", ""),
+ "user_md": snapshot.get("user_md") or template_defaults.get("user_md", ""),
+ "tools_md": snapshot.get("tools_md") or template_defaults.get("tools_md", ""),
+ "identity_md": snapshot.get("identity_md") or template_defaults.get("identity_md", ""),
+ "llm_provider": snapshot.get("llm_provider") or "",
+ "llm_model": snapshot.get("llm_model") or "",
+ "api_key": snapshot.get("api_key") or "",
+ "api_base": snapshot.get("api_base") or "",
+ "temperature": snapshot.get("temperature"),
+ "top_p": snapshot.get("top_p"),
+ "max_tokens": snapshot.get("max_tokens"),
+ "cpu_cores": snapshot.get("cpu_cores"),
+ "memory_mb": snapshot.get("memory_mb"),
+ "storage_gb": snapshot.get("storage_gb"),
+ "send_progress": bool(snapshot.get("send_progress")),
+ "send_tool_hints": bool(snapshot.get("send_tool_hints")),
+ }
+ if isinstance(runtime_overrides, dict):
+ for key, value in runtime_overrides.items():
+ if key in {"api_key", "llm_provider", "llm_model"}:
+ text = str(value or "").strip()
+ if not text:
+ continue
+ bot_data[key] = text
+ continue
+ if key == "api_base":
+ bot_data[key] = str(value or "").strip()
+ continue
+ bot_data[key] = value
+
+ resources = normalize_bot_resource_limits(
+ bot_data.get("cpu_cores"),
+ bot_data.get("memory_mb"),
+ bot_data.get("storage_gb"),
)
+ bot_data.update(resources)
+ send_progress = bool(bot_data.get("send_progress", False))
+ send_tool_hints = bool(bot_data.get("send_tool_hints", False))
+ if isinstance(global_delivery_override, dict):
+ if "sendProgress" in global_delivery_override:
+ send_progress = bool(global_delivery_override.get("sendProgress"))
+ if "sendToolHints" in global_delivery_override:
+ send_tool_hints = bool(global_delivery_override.get("sendToolHints"))
+
+ channels_data = channels_override if channels_override is not None else list_bot_channels_from_config(bot)
+ bot_data["send_progress"] = send_progress
+ bot_data["send_tool_hints"] = send_tool_hints
+ normalized_channels: List[Dict[str, Any]] = []
+ for row in channels_data:
+ ctype = str(row.get("channel_type") or "").strip().lower()
+ if not ctype or ctype == "dashboard":
+ continue
+ normalized_channels.append(
+ {
+ "channel_type": ctype,
+ "external_app_id": str(row.get("external_app_id") or ""),
+ "app_secret": str(row.get("app_secret") or ""),
+ "internal_port": max(1, min(int(row.get("internal_port") or 8080), 65535)),
+ "is_active": bool(row.get("is_active", True)),
+ "extra_config": normalize_channel_extra(row.get("extra_config")),
+ }
+ )
+
+ config_manager.update_workspace(bot_id=bot_id, bot_data=bot_data, channels=normalized_channels)
+ write_bot_resource_limits(bot_id, bot_data.get("cpu_cores"), bot_data.get("memory_mb"), bot_data.get("storage_gb"))
diff --git a/backend/services/bot_storage_service.py b/backend/services/bot_storage_service.py
index 569f338..03c7e99 100644
--- a/backend/services/bot_storage_service.py
+++ b/backend/services/bot_storage_service.py
@@ -5,11 +5,27 @@ import os
import re
from typing import Any, Dict, Optional
+from core.utils import _calc_dir_size_bytes
from core.settings import BOTS_WORKSPACE_ROOT
_ENV_KEY_RE = re.compile(r"^[A-Z_][A-Z0-9_]{0,127}$")
+_BYTES_PER_GB = 1024 * 1024 * 1024
__all__ = [
+ "get_bot_data_root",
+ "normalize_bot_env_params",
+ "normalize_bot_resource_limits",
+ "read_bot_config_data",
+ "read_bot_cron_jobs_store",
+ "read_bot_env_params",
+ "get_bot_resource_limits",
+ "get_bot_workspace_root",
+ "get_bot_workspace_snapshot",
+ "get_bot_workspace_usage_bytes",
+ "write_bot_config_data",
+ "write_bot_cron_jobs_store",
+ "write_bot_env_params",
+ "write_bot_resource_limits",
"_bot_data_root",
"_clear_bot_dashboard_direct_session",
"_clear_bot_sessions",
@@ -30,10 +46,18 @@ __all__ = [
]
+def get_bot_workspace_root(bot_id: str) -> str:
+ return _workspace_root(bot_id)
+
+
def _workspace_root(bot_id: str) -> str:
return os.path.abspath(os.path.join(BOTS_WORKSPACE_ROOT, bot_id, ".nanobot", "workspace"))
+def get_bot_data_root(bot_id: str) -> str:
+ return _bot_data_root(bot_id)
+
+
def _bot_data_root(bot_id: str) -> str:
return os.path.abspath(os.path.join(BOTS_WORKSPACE_ROOT, bot_id, ".nanobot"))
@@ -72,6 +96,10 @@ def _normalize_resource_limits(cpu_cores: Any, memory_mb: Any, storage_gb: Any)
}
+def normalize_bot_resource_limits(cpu_cores: Any, memory_mb: Any, storage_gb: Any) -> Dict[str, Any]:
+ return _normalize_resource_limits(cpu_cores, memory_mb, storage_gb)
+
+
def _normalize_env_params(raw: Any) -> Dict[str, str]:
if not isinstance(raw, dict):
return {}
@@ -84,6 +112,10 @@ def _normalize_env_params(raw: Any) -> Dict[str, str]:
return rows
+def normalize_bot_env_params(raw: Any) -> Dict[str, str]:
+ return _normalize_env_params(raw)
+
+
def _read_json_object(path: str) -> Dict[str, Any]:
if not os.path.isfile(path):
return {}
@@ -121,10 +153,18 @@ def _read_bot_config(bot_id: str) -> Dict[str, Any]:
return _read_json_object(_config_json_path(bot_id))
+def read_bot_config_data(bot_id: str) -> Dict[str, Any]:
+ return _read_bot_config(bot_id)
+
+
def _write_bot_config(bot_id: str, config_data: Dict[str, Any]) -> None:
_write_json_atomic(_config_json_path(bot_id), config_data)
+def write_bot_config_data(bot_id: str, config_data: Dict[str, Any]) -> None:
+ _write_bot_config(bot_id, config_data)
+
+
def _resources_json_path(bot_id: str) -> str:
return os.path.join(_bot_data_root(bot_id), "resources.json")
@@ -141,6 +181,10 @@ def _write_bot_resources(bot_id: str, cpu_cores: Any, memory_mb: Any, storage_gb
)
+def write_bot_resource_limits(bot_id: str, cpu_cores: Any, memory_mb: Any, storage_gb: Any) -> None:
+ _write_bot_resources(bot_id, cpu_cores, memory_mb, storage_gb)
+
+
def _read_bot_resources(bot_id: str, config_data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
cpu_raw: Any = None
memory_raw: Any = None
@@ -168,6 +212,24 @@ def _read_bot_resources(bot_id: str, config_data: Optional[Dict[str, Any]] = Non
return _normalize_resource_limits(cpu_raw, memory_raw, storage_raw)
+def get_bot_resource_limits(bot_id: str, config_data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
+ return _read_bot_resources(bot_id, config_data=config_data)
+
+
+def get_bot_workspace_usage_bytes(bot_id: str) -> int:
+ return _calc_dir_size_bytes(_workspace_root(bot_id))
+
+
+def get_bot_workspace_snapshot(bot_id: str, config_data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
+ resources = get_bot_resource_limits(bot_id, config_data=config_data)
+ configured_limit_bytes = int(resources.get("storage_gb") or 0) * _BYTES_PER_GB
+ return {
+ "path": get_bot_workspace_root(bot_id),
+ "usage_bytes": get_bot_workspace_usage_bytes(bot_id),
+ "configured_limit_bytes": configured_limit_bytes if configured_limit_bytes > 0 else None,
+ }
+
+
def _migrate_bot_resources_store(bot_id: str) -> None:
config_data = _read_bot_config(bot_id)
runtime_cfg = config_data.get("runtime")
@@ -201,10 +263,18 @@ def _read_env_store(bot_id: str) -> Dict[str, str]:
return _normalize_env_params(_read_json_object(_env_store_path(bot_id)))
+def read_bot_env_params(bot_id: str) -> Dict[str, str]:
+ return _read_env_store(bot_id)
+
+
def _write_env_store(bot_id: str, env_params: Dict[str, str]) -> None:
_write_json_atomic(_env_store_path(bot_id), _normalize_env_params(env_params))
+def write_bot_env_params(bot_id: str, env_params: Dict[str, str]) -> None:
+ _write_env_store(bot_id, env_params)
+
+
def _cron_store_path(bot_id: str) -> str:
return os.path.join(_workspace_root(bot_id), "cron", "jobs.json")
@@ -229,11 +299,19 @@ def _read_cron_store(bot_id: str) -> Dict[str, Any]:
return _normalize_cron_store_payload(_read_json_value(_cron_store_path(bot_id)))
+def read_bot_cron_jobs_store(bot_id: str) -> Dict[str, Any]:
+ return _read_cron_store(bot_id)
+
+
def _write_cron_store(bot_id: str, store: Dict[str, Any]) -> None:
normalized = _normalize_cron_store_payload(store)
_write_json_atomic(_cron_store_path(bot_id), normalized)
+def write_bot_cron_jobs_store(bot_id: str, store: Dict[str, Any]) -> None:
+ _write_cron_store(bot_id, store)
+
+
def _sessions_root(bot_id: str) -> str:
return os.path.join(_workspace_root(bot_id), "sessions")
diff --git a/backend/services/chat_command_service.py b/backend/services/chat_command_service.py
index 3ad8495..b7efe02 100644
--- a/backend/services/chat_command_service.py
+++ b/backend/services/chat_command_service.py
@@ -6,16 +6,16 @@ from fastapi import HTTPException
from sqlmodel import Session
from core.docker_instance import docker_manager
+from core.utils import _is_video_attachment_path, _is_visual_attachment_path
from models.bot import BotInstance
-from services.bot_service import _read_bot_runtime_snapshot
+from services.bot_service import read_bot_runtime_snapshot
from services.platform_service import (
create_usage_request,
fail_latest_usage,
record_activity_event,
)
-from services.runtime_service import _persist_runtime_packet, _queue_runtime_broadcast
-from services.workspace_service import _resolve_workspace_path
-from core.utils import _is_video_attachment_path, _is_visual_attachment_path
+from services.runtime_service import broadcast_runtime_packet, persist_runtime_packet
+from services.workspace_service import resolve_workspace_path
logger = logging.getLogger("dashboard.backend")
@@ -94,7 +94,7 @@ def send_bot_command(session: Session, bot_id: str, command: str, attachments: A
bot = session.get(BotInstance, bot_id)
if not bot:
raise HTTPException(status_code=404, detail="Bot not found")
- runtime_snapshot = _read_bot_runtime_snapshot(bot)
+ runtime_snapshot = read_bot_runtime_snapshot(bot)
normalized_attachments = _normalize_message_media_list(attachments)
text_command = str(command or "").strip()
@@ -103,7 +103,7 @@ def send_bot_command(session: Session, bot_id: str, command: str, attachments: A
checked_attachments: List[str] = []
for rel_path in normalized_attachments:
- _, target = _resolve_workspace_path(bot_id, rel_path)
+ _, target = resolve_workspace_path(bot_id, rel_path)
if not os.path.isfile(target):
raise HTTPException(status_code=400, detail=f"attachment not found: {rel_path}")
checked_attachments.append(rel_path)
@@ -142,10 +142,10 @@ def send_bot_command(session: Session, bot_id: str, command: str, attachments: A
"media": checked_attachments,
"request_id": request_id,
}
- _persist_runtime_packet(bot_id, outbound_user_packet)
+ persist_runtime_packet(bot_id, outbound_user_packet)
if outbound_user_packet:
- _queue_runtime_broadcast(bot_id, outbound_user_packet)
+ broadcast_runtime_packet(bot_id, outbound_user_packet)
success = docker_manager.send_command(bot_id, delivery_command, media=delivery_media)
if success:
@@ -162,7 +162,7 @@ def send_bot_command(session: Session, bot_id: str, command: str, attachments: A
detail=(detail or "command delivery failed")[:400],
)
session.commit()
- _queue_runtime_broadcast(
+ broadcast_runtime_packet(
bot_id,
{
"type": "AGENT_STATE",
diff --git a/backend/services/chat_history_service.py b/backend/services/chat_history_service.py
index 4113179..a3908bb 100644
--- a/backend/services/chat_history_service.py
+++ b/backend/services/chat_history_service.py
@@ -10,7 +10,11 @@ from core.cache import cache
from core.docker_instance import docker_manager
from core.utils import _resolve_local_day_range
from models.bot import BotInstance, BotMessage
-from services.bot_storage_service import _clear_bot_dashboard_direct_session, _clear_bot_sessions, _workspace_root
+from services.bot_storage_service import (
+ _clear_bot_dashboard_direct_session,
+ _clear_bot_sessions,
+ get_bot_workspace_root,
+)
from services.cache_service import (
_cache_key_bot_messages,
_cache_key_bot_messages_page,
@@ -33,7 +37,7 @@ def _normalize_message_media_item(bot_id: str, value: Any) -> str:
return ""
if raw.startswith("/root/.nanobot/workspace/"):
return raw[len("/root/.nanobot/workspace/") :].lstrip("/")
- root = _workspace_root(bot_id)
+ root = get_bot_workspace_root(bot_id)
if os.path.isabs(raw):
try:
if os.path.commonpath([root, raw]) == root:
diff --git a/backend/services/image_service.py b/backend/services/image_service.py
new file mode 100644
index 0000000..274afe4
--- /dev/null
+++ b/backend/services/image_service.py
@@ -0,0 +1,116 @@
+import logging
+from typing import Any, Dict, List
+
+from fastapi import HTTPException
+from sqlmodel import Session, select
+
+from core.cache import cache
+from core.docker_instance import docker_manager
+from models.bot import BotInstance, NanobotImage
+from services.cache_service import _cache_key_images, _invalidate_images_cache
+
+logger = logging.getLogger("dashboard.backend")
+
+
+def _serialize_image(row: NanobotImage) -> Dict[str, Any]:
+ created_at = row.created_at.isoformat() + "Z" if row.created_at else None
+ return {
+ "tag": row.tag,
+ "image_id": row.image_id,
+ "version": row.version,
+ "status": row.status,
+ "source_dir": row.source_dir,
+ "created_at": created_at,
+ }
+
+
+def _reconcile_registered_images(session: Session) -> None:
+ rows = session.exec(select(NanobotImage)).all()
+ dirty = False
+ for row in rows:
+ docker_exists = docker_manager.has_image(row.tag)
+ next_status = "READY" if docker_exists else "ERROR"
+ next_image_id = row.image_id
+ if docker_exists and docker_manager.client:
+ try:
+ next_image_id = docker_manager.client.images.get(row.tag).id
+ except Exception:
+ next_image_id = row.image_id
+ if row.status != next_status or row.image_id != next_image_id:
+ row.status = next_status
+ row.image_id = next_image_id
+ session.add(row)
+ dirty = True
+ if dirty:
+ session.commit()
+
+
+def list_registered_images(session: Session) -> List[Dict[str, Any]]:
+ cached = cache.get_json(_cache_key_images())
+ if isinstance(cached, list) and all(isinstance(row, dict) for row in cached):
+ return cached
+ if isinstance(cached, list):
+ _invalidate_images_cache()
+ try:
+ _reconcile_registered_images(session)
+ except Exception as exc:
+ logger.warning("image reconcile skipped: %s", exc)
+ rows = session.exec(select(NanobotImage).order_by(NanobotImage.created_at.desc())).all()
+ payload = [_serialize_image(row) for row in rows]
+ cache.set_json(_cache_key_images(), payload, ttl=60)
+ return payload
+
+
+def delete_registered_image(session: Session, *, tag: str) -> Dict[str, Any]:
+ image = session.get(NanobotImage, tag)
+ if not image:
+ raise HTTPException(status_code=404, detail="Image not found")
+
+ bots_using = session.exec(select(BotInstance).where(BotInstance.image_tag == tag)).all()
+ if bots_using:
+ raise HTTPException(status_code=400, detail=f"Cannot delete image: {len(bots_using)} bots are using it.")
+
+ session.delete(image)
+ session.commit()
+ _invalidate_images_cache()
+ return {"status": "deleted"}
+
+
+def list_docker_images_by_repository(repository: str = "nanobot-base") -> List[Dict[str, Any]]:
+ return docker_manager.list_images_by_repo(repository)
+
+
+def register_image(session: Session, payload: Dict[str, Any]) -> Dict[str, Any]:
+ tag = str(payload.get("tag") or "").strip()
+ source_dir = str(payload.get("source_dir") or "manual").strip() or "manual"
+ if not tag:
+ raise HTTPException(status_code=400, detail="tag is required")
+ if not docker_manager.has_image(tag):
+ raise HTTPException(status_code=404, detail=f"Docker image not found: {tag}")
+
+ version = tag.split(":")[-1].removeprefix("v") if ":" in tag else tag
+ try:
+ docker_img = docker_manager.client.images.get(tag) if docker_manager.client else None
+ image_id = docker_img.id if docker_img else None
+ except Exception:
+ image_id = None
+
+ row = session.get(NanobotImage, tag)
+ if not row:
+ row = NanobotImage(
+ tag=tag,
+ version=version,
+ status="READY",
+ source_dir=source_dir,
+ image_id=image_id,
+ )
+ else:
+ row.version = version
+ row.status = "READY"
+ row.source_dir = source_dir
+ row.image_id = image_id
+ session.add(row)
+ session.commit()
+ session.refresh(row)
+ _invalidate_images_cache()
+ return _serialize_image(row)
diff --git a/backend/services/platform_overview_service.py b/backend/services/platform_overview_service.py
index f5e9994..5a00cc5 100644
--- a/backend/services/platform_overview_service.py
+++ b/backend/services/platform_overview_service.py
@@ -2,9 +2,8 @@ from typing import Any, Dict
from sqlmodel import Session, select
-from core.utils import _calc_dir_size_bytes
from models.bot import BotInstance, NanobotImage
-from services.bot_storage_service import _read_bot_resources, _workspace_root
+from services.bot_storage_service import get_bot_resource_limits, get_bot_workspace_snapshot
from services.platform_activity_service import (
get_bot_activity_stats,
list_activity_events,
@@ -39,15 +38,15 @@ def build_platform_overview(session: Session, docker_manager: Any) -> Dict[str,
for bot in bots:
enabled = bool(getattr(bot, "enabled", True))
runtime_status = docker_manager.get_bot_status(bot.id) if docker_manager else str(bot.docker_status or "STOPPED")
- resources = _read_bot_resources(bot.id)
+ resources = get_bot_resource_limits(bot.id)
runtime = (
docker_manager.get_bot_resource_snapshot(bot.id)
if docker_manager
else {"usage": {}, "limits": {}, "docker_status": runtime_status}
)
- workspace_root = _workspace_root(bot.id)
- workspace_used = _calc_dir_size_bytes(workspace_root)
- workspace_limit = int(resources["storage_gb"] or 0) * 1024 * 1024 * 1024
+ workspace = get_bot_workspace_snapshot(bot.id, config_data=None)
+ workspace_used = int(workspace.get("usage_bytes") or 0)
+ workspace_limit = int(workspace.get("configured_limit_bytes") or 0)
configured_cpu_total += float(resources["cpu_cores"] or 0)
configured_memory_total += int(resources["memory_mb"] or 0) * 1024 * 1024
diff --git a/backend/services/platform_runtime_settings_service.py b/backend/services/platform_runtime_settings_service.py
deleted file mode 100644
index 03a4b0c..0000000
--- a/backend/services/platform_runtime_settings_service.py
+++ /dev/null
@@ -1,151 +0,0 @@
-from typing import Any, Dict, List
-
-from sqlmodel import Session, select
-
-from core.database import engine
-from core.settings import (
- DEFAULT_STT_AUDIO_FILTER,
- DEFAULT_STT_AUDIO_PREPROCESS,
- DEFAULT_STT_DEFAULT_LANGUAGE,
- DEFAULT_STT_FORCE_SIMPLIFIED,
- DEFAULT_STT_INITIAL_PROMPT,
- DEFAULT_STT_MAX_AUDIO_SECONDS,
- STT_DEVICE,
- STT_MODEL,
-)
-from models.platform import PlatformSetting
-from schemas.platform import PlatformSettingsPayload
-from services.platform_settings_core import (
- SETTING_KEYS,
- SYSTEM_SETTING_DEFINITIONS,
- _bootstrap_platform_setting_values,
- _normalize_extension_list,
- _read_setting_value,
- _upsert_setting_row,
-)
-from services.platform_system_settings_service import ensure_default_system_settings
-
-
-def default_platform_settings() -> PlatformSettingsPayload:
- bootstrap = _bootstrap_platform_setting_values()
- return PlatformSettingsPayload(
- page_size=int(bootstrap["page_size"]),
- chat_pull_page_size=int(bootstrap["chat_pull_page_size"]),
- command_auto_unlock_seconds=int(bootstrap["command_auto_unlock_seconds"]),
- auth_token_ttl_hours=int(bootstrap["auth_token_ttl_hours"]),
- auth_token_max_active=int(bootstrap["auth_token_max_active"]),
- upload_max_mb=int(bootstrap["upload_max_mb"]),
- allowed_attachment_extensions=list(bootstrap["allowed_attachment_extensions"]),
- workspace_download_extensions=list(bootstrap["workspace_download_extensions"]),
- speech_enabled=bool(bootstrap["speech_enabled"]),
- )
-
-
-def get_platform_settings(session: Session) -> PlatformSettingsPayload:
- defaults = default_platform_settings()
- ensure_default_system_settings(session)
- rows = session.exec(select(PlatformSetting).where(PlatformSetting.key.in_(SETTING_KEYS))).all()
- data: Dict[str, Any] = {row.key: _read_setting_value(row) for row in rows}
-
- merged = defaults.model_dump()
- merged["page_size"] = max(1, min(100, int(data.get("page_size") or merged["page_size"])))
- merged["chat_pull_page_size"] = max(10, min(500, int(data.get("chat_pull_page_size") or merged["chat_pull_page_size"])))
- merged["command_auto_unlock_seconds"] = max(
- 1,
- min(600, int(data.get("command_auto_unlock_seconds") or merged["command_auto_unlock_seconds"])),
- )
- merged["auth_token_ttl_hours"] = max(
- 1,
- min(720, int(data.get("auth_token_ttl_hours") or merged["auth_token_ttl_hours"])),
- )
- merged["auth_token_max_active"] = max(
- 1,
- min(20, int(data.get("auth_token_max_active") or merged["auth_token_max_active"])),
- )
- merged["upload_max_mb"] = int(data.get("upload_max_mb") or merged["upload_max_mb"])
- merged["allowed_attachment_extensions"] = _normalize_extension_list(
- data.get("allowed_attachment_extensions", merged["allowed_attachment_extensions"])
- )
- merged["workspace_download_extensions"] = _normalize_extension_list(
- data.get("workspace_download_extensions", merged["workspace_download_extensions"])
- )
- merged["speech_enabled"] = bool(data.get("speech_enabled", merged["speech_enabled"]))
- return PlatformSettingsPayload.model_validate(merged)
-
-
-def save_platform_settings(session: Session, payload: PlatformSettingsPayload) -> PlatformSettingsPayload:
- normalized = PlatformSettingsPayload(
- page_size=max(1, min(100, int(payload.page_size))),
- chat_pull_page_size=max(10, min(500, int(payload.chat_pull_page_size))),
- command_auto_unlock_seconds=max(1, min(600, int(payload.command_auto_unlock_seconds))),
- auth_token_ttl_hours=max(1, min(720, int(payload.auth_token_ttl_hours))),
- auth_token_max_active=max(1, min(20, int(payload.auth_token_max_active))),
- upload_max_mb=payload.upload_max_mb,
- allowed_attachment_extensions=_normalize_extension_list(payload.allowed_attachment_extensions),
- workspace_download_extensions=_normalize_extension_list(payload.workspace_download_extensions),
- speech_enabled=bool(payload.speech_enabled),
- )
- payload_by_key = normalized.model_dump()
- for key in SETTING_KEYS:
- definition = SYSTEM_SETTING_DEFINITIONS[key]
- _upsert_setting_row(
- session,
- key,
- name=str(definition["name"]),
- category=str(definition["category"]),
- description=str(definition["description"]),
- value_type=str(definition["value_type"]),
- value=payload_by_key[key],
- is_public=bool(definition["is_public"]),
- sort_order=int(definition["sort_order"]),
- )
- session.commit()
- return normalized
-
-
-def get_platform_settings_snapshot() -> PlatformSettingsPayload:
- with Session(engine) as session:
- return get_platform_settings(session)
-
-
-def get_upload_max_mb() -> int:
- return get_platform_settings_snapshot().upload_max_mb
-
-
-def get_allowed_attachment_extensions() -> List[str]:
- return get_platform_settings_snapshot().allowed_attachment_extensions
-
-
-def get_workspace_download_extensions() -> List[str]:
- return get_platform_settings_snapshot().workspace_download_extensions
-
-
-def get_page_size() -> int:
- return get_platform_settings_snapshot().page_size
-
-
-def get_chat_pull_page_size() -> int:
- return get_platform_settings_snapshot().chat_pull_page_size
-
-
-def get_auth_token_ttl_hours(session: Session) -> int:
- return get_platform_settings(session).auth_token_ttl_hours
-
-
-def get_auth_token_max_active(session: Session) -> int:
- return get_platform_settings(session).auth_token_max_active
-
-
-def get_speech_runtime_settings() -> Dict[str, Any]:
- settings = get_platform_settings_snapshot()
- return {
- "enabled": bool(settings.speech_enabled),
- "max_audio_seconds": int(DEFAULT_STT_MAX_AUDIO_SECONDS),
- "default_language": str(DEFAULT_STT_DEFAULT_LANGUAGE or "zh").strip().lower() or "zh",
- "force_simplified": bool(DEFAULT_STT_FORCE_SIMPLIFIED),
- "audio_preprocess": bool(DEFAULT_STT_AUDIO_PREPROCESS),
- "audio_filter": str(DEFAULT_STT_AUDIO_FILTER or "").strip(),
- "initial_prompt": str(DEFAULT_STT_INITIAL_PROMPT or "").strip(),
- "model": STT_MODEL,
- "device": STT_DEVICE,
- }
diff --git a/backend/services/platform_settings_service.py b/backend/services/platform_settings_service.py
index 6b2cbc9..a2bd252 100644
--- a/backend/services/platform_settings_service.py
+++ b/backend/services/platform_settings_service.py
@@ -1,23 +1,30 @@
-from services.platform_runtime_settings_service import (
- get_auth_token_max_active,
- get_auth_token_ttl_hours,
- default_platform_settings,
- get_allowed_attachment_extensions,
- get_chat_pull_page_size,
- get_page_size,
- get_platform_settings,
- get_platform_settings_snapshot,
- get_speech_runtime_settings,
- get_upload_max_mb,
- get_workspace_download_extensions,
- save_platform_settings,
+from typing import Any, Dict, List
+
+from sqlmodel import Session, select
+
+from core.database import engine
+from core.settings import (
+ DEFAULT_STT_AUDIO_FILTER,
+ DEFAULT_STT_AUDIO_PREPROCESS,
+ DEFAULT_STT_DEFAULT_LANGUAGE,
+ DEFAULT_STT_FORCE_SIMPLIFIED,
+ DEFAULT_STT_INITIAL_PROMPT,
+ DEFAULT_STT_MAX_AUDIO_SECONDS,
+ STT_DEVICE,
+ STT_MODEL,
)
+from models.platform import PlatformSetting
+from schemas.platform import PlatformSettingsPayload
from services.platform_settings_core import (
ACTIVITY_EVENT_RETENTION_SETTING_KEY,
DEFAULT_ACTIVITY_EVENT_RETENTION_DAYS,
DEFAULT_ALLOWED_ATTACHMENT_EXTENSIONS,
SETTING_KEYS,
SYSTEM_SETTING_DEFINITIONS,
+ _bootstrap_platform_setting_values,
+ _normalize_extension_list,
+ _read_setting_value,
+ _upsert_setting_row,
)
from services.platform_system_settings_service import (
create_or_update_system_setting,
@@ -26,3 +33,128 @@ from services.platform_system_settings_service import (
get_activity_event_retention_days,
list_system_settings,
)
+
+
+def default_platform_settings() -> PlatformSettingsPayload:
+ bootstrap = _bootstrap_platform_setting_values()
+ return PlatformSettingsPayload(
+ page_size=int(bootstrap["page_size"]),
+ chat_pull_page_size=int(bootstrap["chat_pull_page_size"]),
+ command_auto_unlock_seconds=int(bootstrap["command_auto_unlock_seconds"]),
+ auth_token_ttl_hours=int(bootstrap["auth_token_ttl_hours"]),
+ auth_token_max_active=int(bootstrap["auth_token_max_active"]),
+ upload_max_mb=int(bootstrap["upload_max_mb"]),
+ allowed_attachment_extensions=list(bootstrap["allowed_attachment_extensions"]),
+ workspace_download_extensions=list(bootstrap["workspace_download_extensions"]),
+ speech_enabled=bool(bootstrap["speech_enabled"]),
+ )
+
+
+def get_platform_settings(session: Session) -> PlatformSettingsPayload:
+ defaults = default_platform_settings()
+ ensure_default_system_settings(session)
+ rows = session.exec(select(PlatformSetting).where(PlatformSetting.key.in_(SETTING_KEYS))).all()
+ data: Dict[str, Any] = {row.key: _read_setting_value(row) for row in rows}
+
+ merged = defaults.model_dump()
+ merged["page_size"] = max(1, min(100, int(data.get("page_size") or merged["page_size"])))
+ merged["chat_pull_page_size"] = max(10, min(500, int(data.get("chat_pull_page_size") or merged["chat_pull_page_size"])))
+ merged["command_auto_unlock_seconds"] = max(
+ 1,
+ min(600, int(data.get("command_auto_unlock_seconds") or merged["command_auto_unlock_seconds"])),
+ )
+ merged["auth_token_ttl_hours"] = max(
+ 1,
+ min(720, int(data.get("auth_token_ttl_hours") or merged["auth_token_ttl_hours"])),
+ )
+ merged["auth_token_max_active"] = max(
+ 1,
+ min(20, int(data.get("auth_token_max_active") or merged["auth_token_max_active"])),
+ )
+ merged["upload_max_mb"] = int(data.get("upload_max_mb") or merged["upload_max_mb"])
+ merged["allowed_attachment_extensions"] = _normalize_extension_list(
+ data.get("allowed_attachment_extensions", merged["allowed_attachment_extensions"])
+ )
+ merged["workspace_download_extensions"] = _normalize_extension_list(
+ data.get("workspace_download_extensions", merged["workspace_download_extensions"])
+ )
+ merged["speech_enabled"] = bool(data.get("speech_enabled", merged["speech_enabled"]))
+ return PlatformSettingsPayload.model_validate(merged)
+
+
+def save_platform_settings(session: Session, payload: PlatformSettingsPayload) -> PlatformSettingsPayload:
+ normalized = PlatformSettingsPayload(
+ page_size=max(1, min(100, int(payload.page_size))),
+ chat_pull_page_size=max(10, min(500, int(payload.chat_pull_page_size))),
+ command_auto_unlock_seconds=max(1, min(600, int(payload.command_auto_unlock_seconds))),
+ auth_token_ttl_hours=max(1, min(720, int(payload.auth_token_ttl_hours))),
+ auth_token_max_active=max(1, min(20, int(payload.auth_token_max_active))),
+ upload_max_mb=payload.upload_max_mb,
+ allowed_attachment_extensions=_normalize_extension_list(payload.allowed_attachment_extensions),
+ workspace_download_extensions=_normalize_extension_list(payload.workspace_download_extensions),
+ speech_enabled=bool(payload.speech_enabled),
+ )
+ payload_by_key = normalized.model_dump()
+ for key in SETTING_KEYS:
+ definition = SYSTEM_SETTING_DEFINITIONS[key]
+ _upsert_setting_row(
+ session,
+ key,
+ name=str(definition["name"]),
+ category=str(definition["category"]),
+ description=str(definition["description"]),
+ value_type=str(definition["value_type"]),
+ value=payload_by_key[key],
+ is_public=bool(definition["is_public"]),
+ sort_order=int(definition["sort_order"]),
+ )
+ session.commit()
+ return normalized
+
+
+def get_platform_settings_snapshot() -> PlatformSettingsPayload:
+ with Session(engine) as session:
+ return get_platform_settings(session)
+
+
+def get_upload_max_mb() -> int:
+ return get_platform_settings_snapshot().upload_max_mb
+
+
+def get_allowed_attachment_extensions() -> List[str]:
+ return get_platform_settings_snapshot().allowed_attachment_extensions
+
+
+def get_workspace_download_extensions() -> List[str]:
+ return get_platform_settings_snapshot().workspace_download_extensions
+
+
+def get_page_size() -> int:
+ return get_platform_settings_snapshot().page_size
+
+
+def get_chat_pull_page_size() -> int:
+ return get_platform_settings_snapshot().chat_pull_page_size
+
+
+def get_auth_token_ttl_hours(session: Session) -> int:
+ return get_platform_settings(session).auth_token_ttl_hours
+
+
+def get_auth_token_max_active(session: Session) -> int:
+ return get_platform_settings(session).auth_token_max_active
+
+
+def get_speech_runtime_settings() -> Dict[str, Any]:
+ settings = get_platform_settings_snapshot()
+ return {
+ "enabled": bool(settings.speech_enabled),
+ "max_audio_seconds": int(DEFAULT_STT_MAX_AUDIO_SECONDS),
+ "default_language": str(DEFAULT_STT_DEFAULT_LANGUAGE or "zh").strip().lower() or "zh",
+ "force_simplified": bool(DEFAULT_STT_FORCE_SIMPLIFIED),
+ "audio_preprocess": bool(DEFAULT_STT_AUDIO_PREPROCESS),
+ "audio_filter": str(DEFAULT_STT_AUDIO_FILTER or "").strip(),
+ "initial_prompt": str(DEFAULT_STT_INITIAL_PROMPT or "").strip(),
+ "model": STT_MODEL,
+ "device": STT_DEVICE,
+ }
diff --git a/backend/services/provider_service.py b/backend/services/provider_service.py
new file mode 100644
index 0000000..c856548
--- /dev/null
+++ b/backend/services/provider_service.py
@@ -0,0 +1,79 @@
+from typing import Any, Dict, List
+
+import httpx
+from fastapi import HTTPException
+
+
+def get_provider_defaults(provider: str) -> tuple[str, str]:
+ normalized = str(provider or "").lower().strip()
+ if normalized in {"openai"}:
+ return "openai", "https://api.openai.com/v1"
+ if normalized in {"openrouter"}:
+ return "openrouter", "https://openrouter.ai/api/v1"
+ if normalized in {"dashscope", "aliyun", "qwen", "aliyun-qwen"}:
+ return "dashscope", "https://dashscope.aliyuncs.com/compatible-mode/v1"
+ if normalized in {"deepseek"}:
+ return "deepseek", "https://api.deepseek.com/v1"
+ if normalized in {"xunfei", "iflytek", "xfyun"}:
+ return "openai", "https://spark-api-open.xf-yun.com/v1"
+ if normalized in {"vllm"}:
+ return "openai", ""
+ if normalized in {"kimi", "moonshot"}:
+ return "kimi", "https://api.moonshot.cn/v1"
+ if normalized in {"minimax"}:
+ return "minimax", "https://api.minimax.chat/v1"
+ return normalized, ""
+
+
+async def test_provider_connection(payload: Dict[str, Any]) -> Dict[str, Any]:
+ provider = str(payload.get("provider") or "").strip()
+ api_key = str(payload.get("api_key") or "").strip()
+ model = str(payload.get("model") or "").strip()
+ api_base = str(payload.get("api_base") or "").strip()
+
+ if not provider or not api_key:
+ raise HTTPException(status_code=400, detail="provider and api_key are required")
+
+ normalized_provider, default_base = get_provider_defaults(provider)
+ base = (api_base or default_base).rstrip("/")
+ if normalized_provider not in {"openrouter", "dashscope", "kimi", "minimax", "openai", "deepseek"}:
+ raise HTTPException(status_code=400, detail=f"provider not supported for test: {provider}")
+ if not base:
+ raise HTTPException(status_code=400, detail=f"api_base is required for provider: {provider}")
+
+ headers = {"Authorization": f"Bearer {api_key}"}
+ timeout = httpx.Timeout(20.0, connect=10.0)
+ url = f"{base}/models"
+ try:
+ async with httpx.AsyncClient(timeout=timeout) as client:
+ response = await client.get(url, headers=headers)
+ if response.status_code >= 400:
+ return {
+ "ok": False,
+ "provider": normalized_provider,
+ "status_code": response.status_code,
+ "detail": response.text[:500],
+ }
+ data = response.json()
+ models_raw = data.get("data", []) if isinstance(data, dict) else []
+ model_ids: List[str] = [
+ str(item["id"]) for item in models_raw[:20] if isinstance(item, dict) and item.get("id")
+ ]
+ return {
+ "ok": True,
+ "provider": normalized_provider,
+ "endpoint": url,
+ "models_preview": model_ids[:8],
+ "model_hint": (
+ "model_found"
+ if model and any(model in item for item in model_ids)
+ else ("model_not_listed" if model else "")
+ ),
+ }
+ except Exception as exc:
+ return {
+ "ok": False,
+ "provider": normalized_provider,
+ "endpoint": url,
+ "detail": str(exc),
+ }
diff --git a/backend/services/runtime_service.py b/backend/services/runtime_service.py
index dc9abb5..6acb5f7 100644
--- a/backend/services/runtime_service.py
+++ b/backend/services/runtime_service.py
@@ -13,7 +13,7 @@ from core.database import engine
from core.docker_instance import docker_manager
from core.websocket_manager import manager
from models.bot import BotInstance, BotMessage
-from services.bot_service import _workspace_root
+from services.bot_storage_service import get_bot_workspace_root
from services.cache_service import _invalidate_bot_detail_cache, _invalidate_bot_messages_cache
from services.platform_service import bind_usage_message, finalize_usage_from_packet, record_activity_event
from services.topic_runtime import publish_runtime_topic_packet
@@ -41,6 +41,10 @@ def _queue_runtime_broadcast(bot_id: str, packet: Dict[str, Any]) -> None:
asyncio.run_coroutine_threadsafe(manager.broadcast(bot_id, packet), loop)
+def broadcast_runtime_packet(bot_id: str, packet: Dict[str, Any]) -> None:
+ _queue_runtime_broadcast(bot_id, packet)
+
+
def _normalize_packet_channel(packet: Dict[str, Any]) -> str:
raw = str(packet.get("channel") or packet.get("source") or "").strip().lower()
if raw in {"dashboard", "dashboard_channel", "dashboard-channel"}:
@@ -54,7 +58,7 @@ def _normalize_media_item(bot_id: str, value: Any) -> str:
return ""
if raw.startswith("/root/.nanobot/workspace/"):
return raw[len("/root/.nanobot/workspace/") :].lstrip("/")
- root = _workspace_root(bot_id)
+ root = get_bot_workspace_root(bot_id)
if os.path.isabs(raw):
try:
if os.path.commonpath([root, raw]) == root:
@@ -205,6 +209,10 @@ def _persist_runtime_packet(bot_id: str, packet: Dict[str, Any]) -> Optional[int
return persisted_message_id
+def persist_runtime_packet(bot_id: str, packet: Dict[str, Any]) -> Optional[int]:
+ return _persist_runtime_packet(bot_id, packet)
+
+
def docker_callback(bot_id: str, packet: Dict[str, Any]) -> None:
packet_type = str(packet.get("type", "")).upper()
if packet_type == "RAW_LOG":
@@ -272,3 +280,15 @@ async def _record_agent_loop_ready_warning(
_invalidate_bot_detail_cache(bot_id)
except Exception:
logger.exception("Failed to record agent loop readiness warning for bot_id=%s", bot_id)
+
+
+async def record_agent_loop_ready_warning(
+ bot_id: str,
+ timeout_seconds: float = 12.0,
+ poll_interval_seconds: float = 0.5,
+) -> None:
+ await _record_agent_loop_ready_warning(
+ bot_id,
+ timeout_seconds=timeout_seconds,
+ poll_interval_seconds=poll_interval_seconds,
+ )
diff --git a/backend/services/skill_market_service.py b/backend/services/skill_market_service.py
index f3c64dd..9b57e83 100644
--- a/backend/services/skill_market_service.py
+++ b/backend/services/skill_market_service.py
@@ -19,7 +19,7 @@ from core.utils import (
)
from models.skill import BotSkillInstall, SkillMarketItem
from services.platform_service import get_platform_settings_snapshot
-from services.skill_service import _install_skill_zip_into_workspace, _skills_root
+from services.skill_service import get_bot_skills_root, install_skill_zip_into_workspace
def _skill_market_root() -> str:
@@ -341,7 +341,7 @@ def list_bot_skill_market_items(session: Session, *, bot_id: str) -> List[Dict[s
else (
install_lookup[int(item.id or 0)].status == "INSTALLED"
and all(
- os.path.exists(os.path.join(_skills_root(bot_id), name))
+ os.path.exists(os.path.join(get_bot_skills_root(bot_id), name))
for name in _parse_json_string_list(install_lookup[int(item.id or 0)].installed_entries_json)
)
)
@@ -378,7 +378,7 @@ def install_skill_market_item_for_bot(
).first()
try:
- install_result = _install_skill_zip_into_workspace(bot_id, zip_path)
+ install_result = install_skill_zip_into_workspace(bot_id, zip_path)
now = datetime.utcnow()
if not install_row:
install_row = BotSkillInstall(
diff --git a/backend/services/skill_service.py b/backend/services/skill_service.py
index c4af051..5170918 100644
--- a/backend/services/skill_service.py
+++ b/backend/services/skill_service.py
@@ -11,12 +11,16 @@ from core.utils import (
_is_ignored_skill_zip_top_level,
_is_valid_top_level_skill_name,
)
-from services.bot_storage_service import _workspace_root
+from services.bot_storage_service import get_bot_workspace_root
from services.platform_service import get_platform_settings_snapshot
+def get_bot_skills_root(bot_id: str) -> str:
+ return _skills_root(bot_id)
+
+
def _skills_root(bot_id: str) -> str:
- return os.path.join(_workspace_root(bot_id), "skills")
+ return os.path.join(get_bot_workspace_root(bot_id), "skills")
def _read_skill_description(entry_path: str) -> str:
candidates: List[str] = []
@@ -139,6 +143,10 @@ def _install_skill_zip_into_workspace(bot_id: str, zip_path: str) -> Dict[str, A
}
+def install_skill_zip_into_workspace(bot_id: str, zip_path: str) -> Dict[str, Any]:
+ return _install_skill_zip_into_workspace(bot_id, zip_path)
+
+
def list_bot_skills(bot_id: str) -> List[Dict[str, Any]]:
return _list_workspace_skills(bot_id)
diff --git a/backend/services/topic_runtime/bridge.py b/backend/services/topic_runtime/bridge.py
index de1cf7b..c6e1034 100644
--- a/backend/services/topic_runtime/bridge.py
+++ b/backend/services/topic_runtime/bridge.py
@@ -3,7 +3,7 @@ from typing import Any, Dict, Optional
from sqlmodel import Session
-from services.topic_service import _topic_publish_internal
+from services.topic_service import publish_topic_item
from .publisher import build_topic_publish_payload
@@ -30,6 +30,6 @@ def publish_runtime_topic_packet(
try:
with Session(engine) as session:
- _topic_publish_internal(session, bot_id, topic_payload)
+ publish_topic_item(session, bot_id, topic_payload)
except Exception:
logger.exception("topic auto publish failed for bot %s packet %s", bot_id, packet_type)
diff --git a/backend/services/topic_service.py b/backend/services/topic_service.py
index 0de79c7..9bb7833 100644
--- a/backend/services/topic_service.py
+++ b/backend/services/topic_service.py
@@ -3,13 +3,16 @@ import re
from datetime import datetime
from typing import Any, Dict, List, Optional
+from fastapi import HTTPException
from sqlmodel import Session, select
+from models.bot import BotInstance
from models.topic import TopicItem, TopicTopic
TOPIC_DEDUPE_WINDOW_SECONDS = 10 * 60
TOPIC_LEVEL_SET = {"info", "warn", "error", "success"}
_TOPIC_KEY_RE = re.compile(r"^[a-z0-9][a-z0-9_.-]{0,63}$")
+TOPIC_KEY_RE = _TOPIC_KEY_RE
def _as_bool(value: Any) -> bool:
@@ -101,6 +104,13 @@ def _topic_get_row(session: Session, bot_id: str, topic_key: str) -> Optional[To
).first()
+def _get_bot_or_404(session: Session, bot_id: str) -> BotInstance:
+ bot = session.get(BotInstance, bot_id)
+ if not bot:
+ raise HTTPException(status_code=404, detail="Bot not found")
+ return bot
+
+
def _normalize_topic_keywords(raw: Any) -> List[str]:
rows: List[str] = []
if isinstance(raw, list):
@@ -338,3 +348,217 @@ def _topic_publish_internal(session: Session, bot_id: str, payload: Dict[str, An
"item": _topic_item_to_dict(item),
"route": route_result,
}
+
+
+def normalize_topic_key(raw: Any) -> str:
+ return _normalize_topic_key(raw)
+
+
+def list_topics(session: Session, bot_id: str) -> List[Dict[str, Any]]:
+ _get_bot_or_404(session, bot_id)
+ return _list_topics(session, bot_id)
+
+
+def create_topic(
+ session: Session,
+ *,
+ bot_id: str,
+ topic_key: str,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ is_active: bool = True,
+ routing: Optional[Dict[str, Any]] = None,
+ view_schema: Optional[Dict[str, Any]] = None,
+) -> Dict[str, Any]:
+ _get_bot_or_404(session, bot_id)
+ normalized_key = _normalize_topic_key(topic_key)
+ if not normalized_key:
+ raise HTTPException(status_code=400, detail="topic_key is required")
+ if not TOPIC_KEY_RE.fullmatch(normalized_key):
+ raise HTTPException(status_code=400, detail="invalid topic_key")
+ exists = _topic_get_row(session, bot_id, normalized_key)
+ if exists:
+ raise HTTPException(status_code=400, detail=f"Topic already exists: {normalized_key}")
+
+ now = datetime.utcnow()
+ row = TopicTopic(
+ bot_id=bot_id,
+ topic_key=normalized_key,
+ name=str(name or normalized_key).strip() or normalized_key,
+ description=str(description or "").strip(),
+ is_active=bool(is_active),
+ is_default_fallback=False,
+ routing_json=json.dumps(routing or {}, ensure_ascii=False),
+ view_schema_json=json.dumps(view_schema or {}, ensure_ascii=False),
+ created_at=now,
+ updated_at=now,
+ )
+ session.add(row)
+ session.commit()
+ session.refresh(row)
+ return _topic_to_dict(row)
+
+
+def update_topic(
+ session: Session,
+ *,
+ bot_id: str,
+ topic_key: str,
+ updates: Dict[str, Any],
+) -> Dict[str, Any]:
+ _get_bot_or_404(session, bot_id)
+ normalized_key = _normalize_topic_key(topic_key)
+ if not normalized_key:
+ raise HTTPException(status_code=400, detail="topic_key is required")
+ row = _topic_get_row(session, bot_id, normalized_key)
+ if not row:
+ raise HTTPException(status_code=404, detail="Topic not found")
+
+ if "name" in updates:
+ row.name = str(updates.get("name") or "").strip() or row.topic_key
+ if "description" in updates:
+ row.description = str(updates.get("description") or "").strip()
+ if "is_active" in updates:
+ row.is_active = bool(updates.get("is_active"))
+ if "routing" in updates:
+ row.routing_json = json.dumps(updates.get("routing") or {}, ensure_ascii=False)
+ if "view_schema" in updates:
+ row.view_schema_json = json.dumps(updates.get("view_schema") or {}, ensure_ascii=False)
+ row.is_default_fallback = False
+ row.updated_at = datetime.utcnow()
+ session.add(row)
+ session.commit()
+ session.refresh(row)
+ return _topic_to_dict(row)
+
+
+def delete_topic(session: Session, *, bot_id: str, topic_key: str) -> Dict[str, Any]:
+ _get_bot_or_404(session, bot_id)
+ normalized_key = _normalize_topic_key(topic_key)
+ if not normalized_key:
+ raise HTTPException(status_code=400, detail="topic_key is required")
+ row = _topic_get_row(session, bot_id, normalized_key)
+ if not row:
+ raise HTTPException(status_code=404, detail="Topic not found")
+ items = session.exec(
+ select(TopicItem)
+ .where(TopicItem.bot_id == bot_id)
+ .where(TopicItem.topic_key == normalized_key)
+ ).all()
+ for item in items:
+ session.delete(item)
+ session.delete(row)
+ session.commit()
+ return {"status": "deleted", "bot_id": bot_id, "topic_key": normalized_key}
+
+
+def _count_topic_items(
+ session: Session,
+ bot_id: str,
+ topic_key: Optional[str] = None,
+ unread_only: bool = False,
+) -> int:
+ stmt = select(TopicItem).where(TopicItem.bot_id == bot_id)
+ normalized_topic_key = _normalize_topic_key(topic_key or "")
+ if normalized_topic_key:
+ stmt = stmt.where(TopicItem.topic_key == normalized_topic_key)
+ rows = session.exec(stmt).all()
+ if unread_only:
+ return sum(1 for row in rows if not bool(row.is_read))
+ return len(rows)
+
+
+def list_topic_items(
+ session: Session,
+ *,
+ bot_id: str,
+ topic_key: Optional[str] = None,
+ cursor: Optional[int] = None,
+ limit: int = 50,
+) -> Dict[str, Any]:
+ _get_bot_or_404(session, bot_id)
+ normalized_limit = max(1, min(int(limit or 50), 100))
+ stmt = select(TopicItem).where(TopicItem.bot_id == bot_id)
+ normalized_topic_key = _normalize_topic_key(topic_key or "")
+ if normalized_topic_key:
+ stmt = stmt.where(TopicItem.topic_key == normalized_topic_key)
+ if cursor is not None:
+ normalized_cursor = int(cursor)
+ if normalized_cursor > 0:
+ stmt = stmt.where(TopicItem.id < normalized_cursor)
+ rows = session.exec(stmt.order_by(TopicItem.id.desc()).limit(normalized_limit + 1)).all()
+ next_cursor: Optional[int] = None
+ if len(rows) > normalized_limit:
+ next_cursor = rows[-1].id
+ rows = rows[:normalized_limit]
+ return {
+ "bot_id": bot_id,
+ "topic_key": normalized_topic_key or None,
+ "items": [_topic_item_to_dict(row) for row in rows],
+ "next_cursor": next_cursor,
+ "unread_count": _count_topic_items(session, bot_id, normalized_topic_key, unread_only=True),
+ "total_unread_count": _count_topic_items(session, bot_id, unread_only=True),
+ }
+
+
+def get_topic_item_stats(session: Session, *, bot_id: str) -> Dict[str, Any]:
+ _get_bot_or_404(session, bot_id)
+ latest_item = session.exec(
+ select(TopicItem)
+ .where(TopicItem.bot_id == bot_id)
+ .order_by(TopicItem.id.desc())
+ .limit(1)
+ ).first()
+ return {
+ "bot_id": bot_id,
+ "total_count": _count_topic_items(session, bot_id),
+ "unread_count": _count_topic_items(session, bot_id, unread_only=True),
+ "latest_item_id": int(latest_item.id or 0) if latest_item and latest_item.id else None,
+ }
+
+
+def mark_topic_item_read(session: Session, *, bot_id: str, item_id: int) -> Dict[str, Any]:
+ _get_bot_or_404(session, bot_id)
+ row = session.exec(
+ select(TopicItem)
+ .where(TopicItem.bot_id == bot_id)
+ .where(TopicItem.id == item_id)
+ .limit(1)
+ ).first()
+ if not row:
+ raise HTTPException(status_code=404, detail="Topic item not found")
+ if not bool(row.is_read):
+ row.is_read = True
+ session.add(row)
+ session.commit()
+ session.refresh(row)
+ return {
+ "status": "updated",
+ "bot_id": bot_id,
+ "item": _topic_item_to_dict(row),
+ }
+
+
+def delete_topic_item(session: Session, *, bot_id: str, item_id: int) -> Dict[str, Any]:
+ _get_bot_or_404(session, bot_id)
+ row = session.exec(
+ select(TopicItem)
+ .where(TopicItem.bot_id == bot_id)
+ .where(TopicItem.id == item_id)
+ .limit(1)
+ ).first()
+ if not row:
+ raise HTTPException(status_code=404, detail="Topic item not found")
+ payload = _topic_item_to_dict(row)
+ session.delete(row)
+ session.commit()
+ return {
+ "status": "deleted",
+ "bot_id": bot_id,
+ "item": payload,
+ }
+
+
+def publish_topic_item(session: Session, bot_id: str, payload: Dict[str, Any]) -> Dict[str, Any]:
+ _get_bot_or_404(session, bot_id)
+ return _topic_publish_internal(session, bot_id, payload)
diff --git a/backend/services/workspace_service.py b/backend/services/workspace_service.py
index 487b8c2..3b4dfa2 100644
--- a/backend/services/workspace_service.py
+++ b/backend/services/workspace_service.py
@@ -9,7 +9,7 @@ from fastapi import HTTPException, Request, UploadFile
from fastapi.responses import FileResponse, RedirectResponse, Response, StreamingResponse
from core.utils import _workspace_stat_ctime_iso
-from services.bot_storage_service import _workspace_root
+from services.bot_storage_service import get_bot_workspace_root
from services.platform_service import get_platform_settings_snapshot
TEXT_PREVIEW_EXTENSIONS = {
@@ -32,7 +32,7 @@ TEXT_PREVIEW_EXTENSIONS = {
MARKDOWN_EXTENSIONS = {".md", ".markdown"}
def _resolve_workspace_path(bot_id: str, rel_path: Optional[str] = None) -> tuple[str, str]:
- root = _workspace_root(bot_id)
+ root = get_bot_workspace_root(bot_id)
rel = (rel_path or "").strip().replace("\\", "/")
target = os.path.abspath(os.path.join(root, rel))
if os.path.commonpath([root, target]) != root:
@@ -40,6 +40,10 @@ def _resolve_workspace_path(bot_id: str, rel_path: Optional[str] = None) -> tupl
return root, target
+def resolve_workspace_path(bot_id: str, rel_path: Optional[str] = None) -> tuple[str, str]:
+ return _resolve_workspace_path(bot_id, rel_path)
+
+
def _write_text_atomic(target: str, content: str) -> None:
os.makedirs(os.path.dirname(target), exist_ok=True)
tmp = f"{target}.tmp"
@@ -249,7 +253,7 @@ def get_workspace_tree_data(
path: Optional[str] = None,
recursive: bool = False,
) -> Dict[str, Any]:
- root = _workspace_root(bot_id)
+ root = get_bot_workspace_root(bot_id)
if not os.path.isdir(root):
return {"bot_id": bot_id, "root": root, "cwd": "", "parent": None, "entries": []}
diff --git a/design/code-structure-standards.md b/design/code-structure-standards.md
index 64cd687..dd892fb 100644
--- a/design/code-structure-standards.md
+++ b/design/code-structure-standards.md
@@ -161,7 +161,7 @@
### 2.6 前端禁止事项
-- 禁止再次把页面做成“一个文件管状态、接口、弹层、列表、详情、搜索、分页”
+- 禁止把页面做成“一个文件管状态、接口、弹层、列表、详情、搜索、分页”
- 禁止把样式、业务逻辑、视图结构三者重新耦合回单文件
- 禁止创建无明确职责的超通用组件
- 禁止为减少行数而做不可读的过度抽象
@@ -226,12 +226,6 @@ Router 不允许承担:
- 数据库表间拼装
- 本地文件系统读写细节
-Router 文件体量规则:
-
-- 目标:`< 300` 行
-- 可接受上限:`400` 行
-- 超过 `400` 行必须拆成子 router,并由装配层统一 `include_router`
-
### 3.4 Service 规范
Service 必须按业务域内聚组织,而不是为了压缩行数而机械切碎。
diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx
index 4b66c61..f43eee6 100644
--- a/frontend/src/App.tsx
+++ b/frontend/src/App.tsx
@@ -68,26 +68,18 @@ function AppShell() {
const showNavRail = route.kind !== 'bot' && !compactMode;
const showAppNavDrawerEntry = route.kind !== 'bot' && compactMode;
const showBotPanelDrawerEntry = route.kind === 'bot' && compactMode;
+ const appNavDrawerVisible = showAppNavDrawerEntry && appNavDrawerOpen;
+ const botPanelDrawerVisible = showBotPanelDrawerEntry && botPanelDrawerOpen;
+ const activeCompactPanelTab = showBotPanelDrawerEntry ? botCompactPanelTab : 'chat';
const useCompactSimpleHeader = showBotPanelDrawerEntry || showAppNavDrawerEntry;
const headerTitle = showBotPanelDrawerEntry
- ? (botCompactPanelTab === 'runtime' ? t.botPanels.runtime : t.botPanels.chat)
+ ? (activeCompactPanelTab === 'runtime' ? t.botPanels.runtime : t.botPanels.chat)
: routeMeta.title;
useEffect(() => {
document.title = `${t.title} - ${route.kind === 'bot' ? botDocumentTitle : routeMeta.title}`;
}, [botDocumentTitle, route.kind, routeMeta.title, t.title]);
- useEffect(() => {
- if (!showBotPanelDrawerEntry) {
- setBotPanelDrawerOpen(false);
- setBotCompactPanelTab('chat');
- }
- }, [forcedBotId, showBotPanelDrawerEntry]);
-
- useEffect(() => {
- if (!showAppNavDrawerEntry) setAppNavDrawerOpen(false);
- }, [route.kind, showAppNavDrawerEntry]);
-
const botPanelLabels = t.botPanels;
const drawerBotName = String(forcedBot?.name || '').trim() || defaultLoadingTitle;
const drawerBotId = String(forcedBotId || '').trim() || '-';
@@ -152,7 +144,7 @@ function AppShell() {