v0.1.4-p5

main
mula.liu 2026-04-14 10:04:12 +08:00
parent a6ec2368f4
commit fb461a7f5b
42 changed files with 713 additions and 802 deletions

View File

@ -69,15 +69,6 @@ def delete_bot_message(
return delete_bot_message_payload(session, bot_id, message_id)
@router.post("/api/bots/{bot_id}/messages/{message_id}/delete")
def delete_bot_message_post(
bot_id: str,
message_id: int,
session: Session = Depends(get_session),
):
return delete_bot_message_payload(session, bot_id, message_id)
@router.delete("/api/bots/{bot_id}/messages")
def clear_bot_messages(bot_id: str, session: Session = Depends(get_session)):
return clear_bot_messages_payload(session, bot_id)

View File

@ -7,18 +7,16 @@ from bootstrap.app_runtime import reload_platform_runtime
from core.cache import cache
from core.database import get_session
from schemas.platform import PlatformSettingsPayload, SystemSettingPayload
from services.platform_service import (
build_platform_overview,
from services.platform_activity_service import get_bot_activity_stats, list_activity_events
from services.platform_login_log_service import list_login_logs
from services.platform_overview_service import build_platform_overview
from services.platform_settings_service import get_platform_settings, save_platform_settings
from services.platform_system_settings_service import (
create_or_update_system_setting,
delete_system_setting,
get_bot_activity_stats,
get_platform_settings,
list_system_settings,
list_login_logs,
list_activity_events,
list_usage,
save_platform_settings,
)
from services.platform_usage_service import list_usage
router = APIRouter()

View File

@ -3,7 +3,7 @@ from fastapi import APIRouter, HTTPException
from core.speech_service import inspect_speech_model_status
from core.utils import _get_default_system_timezone
from schemas.system import SystemTemplatesUpdateRequest
from services.platform_service import get_platform_settings_snapshot, get_speech_runtime_settings
from services.platform_settings_service import get_platform_settings_snapshot, get_speech_runtime_settings
from services.template_service import (
get_agent_md_templates,
get_topic_presets,
@ -36,7 +36,7 @@ def get_system_defaults():
"page_size": platform_settings.page_size,
"command_auto_unlock_seconds": platform_settings.command_auto_unlock_seconds,
},
"topic_presets": get_topic_presets().get("presets", []),
"topic_presets": get_topic_presets()["presets"],
"speech": {
"enabled": speech_settings["enabled"],
"model": speech_settings["model"],

View File

@ -10,7 +10,8 @@ from core.speech_service import inspect_speech_model_status
from core.settings import DATABASE_URL_DISPLAY, REDIS_ENABLED
from models.bot import BotInstance
from services.default_assets_service import validate_runtime_data_assets
from services.platform_service import get_speech_runtime_settings, prune_expired_activity_events
from services.platform_activity_service import prune_expired_activity_events
from services.platform_settings_service import get_speech_runtime_settings
from services.runtime_service import docker_callback, set_main_loop

View File

@ -1,310 +0,0 @@
import json
import os
from typing import Any, Dict, List
from services.template_service import get_agent_md_templates
def _provider_default_api_base(provider: str) -> str:
normalized = str(provider or "").strip().lower()
if normalized == "openai":
return "https://api.openai.com/v1"
if normalized == "openrouter":
return "https://openrouter.ai/api/v1"
if normalized in {"dashscope", "aliyun", "qwen", "aliyun-qwen"}:
return "https://dashscope.aliyuncs.com/compatible-mode/v1"
if normalized == "deepseek":
return "https://api.deepseek.com/v1"
if normalized in {"xunfei", "iflytek", "xfyun"}:
return "https://spark-api-open.xf-yun.com/v1"
if normalized in {"kimi", "moonshot"}:
return "https://api.moonshot.cn/v1"
if normalized == "minimax":
return "https://api.minimax.chat/v1"
return ""
class BotConfigManager:
def __init__(self, host_data_root: str):
self.host_data_root = host_data_root
def update_workspace(self, bot_id: str, bot_data: Dict[str, Any], channels: List[Dict[str, Any]]):
"""Generate/update nanobot workspace files and config.json."""
bot_dir = os.path.join(self.host_data_root, bot_id)
dot_nanobot_dir = os.path.join(bot_dir, ".nanobot")
workspace_dir = os.path.join(dot_nanobot_dir, "workspace")
memory_dir = os.path.join(workspace_dir, "memory")
skills_dir = os.path.join(workspace_dir, "skills")
for d in [dot_nanobot_dir, workspace_dir, memory_dir, skills_dir]:
os.makedirs(d, exist_ok=True)
template_defaults = get_agent_md_templates()
existing_config: Dict[str, Any] = {}
config_path = os.path.join(dot_nanobot_dir, "config.json")
if os.path.isfile(config_path):
try:
with open(config_path, "r", encoding="utf-8") as f:
loaded = json.load(f)
if isinstance(loaded, dict):
existing_config = loaded
except Exception:
existing_config = {}
existing_provider_name = ""
existing_provider_cfg: Dict[str, Any] = {}
existing_model_name = ""
providers_cfg = existing_config.get("providers")
if isinstance(providers_cfg, dict):
for provider_name, provider_cfg in providers_cfg.items():
existing_provider_name = str(provider_name or "").strip().lower()
if isinstance(provider_cfg, dict):
existing_provider_cfg = provider_cfg
break
agents_cfg = existing_config.get("agents")
if isinstance(agents_cfg, dict):
defaults_cfg = agents_cfg.get("defaults")
if isinstance(defaults_cfg, dict):
existing_model_name = str(defaults_cfg.get("model") or "").strip()
raw_provider_name = (bot_data.get("llm_provider") or existing_provider_name).strip().lower()
provider_name = raw_provider_name
model_name = (bot_data.get("llm_model") or existing_model_name).strip()
api_key = (bot_data.get("api_key") or existing_provider_cfg.get("apiKey") or "").strip()
api_base = (bot_data.get("api_base") or existing_provider_cfg.get("apiBase") or "").strip() or None
provider_alias = {
"aliyun": "dashscope",
"qwen": "dashscope",
"aliyun-qwen": "dashscope",
"moonshot": "kimi",
# Xunfei Spark provides OpenAI-compatible endpoint.
"xunfei": "openai",
"iflytek": "openai",
"xfyun": "openai",
"vllm": "openai",
}
provider_name = provider_alias.get(provider_name, provider_name)
if provider_name == "openai" and raw_provider_name in {"xunfei", "iflytek", "xfyun"}:
if model_name and "/" not in model_name:
model_name = f"openai/{model_name}"
if not api_base:
api_base = _provider_default_api_base(raw_provider_name) or _provider_default_api_base(provider_name) or None
provider_cfg: Dict[str, Any] = {
"apiKey": api_key,
}
if raw_provider_name in {"xunfei", "iflytek", "xfyun", "vllm"}:
provider_cfg["dashboardProviderAlias"] = raw_provider_name
if api_base:
provider_cfg["apiBase"] = api_base
channels_cfg: Dict[str, Any] = {
"sendProgress": bool(bot_data.get("send_progress", False)),
"sendToolHints": bool(bot_data.get("send_tool_hints", False)),
}
existing_tools = existing_config.get("tools")
tools_cfg: Dict[str, Any] = dict(existing_tools) if isinstance(existing_tools, dict) else {}
if "mcp_servers" in bot_data:
mcp_servers = bot_data.get("mcp_servers")
if isinstance(mcp_servers, dict):
tools_cfg["mcpServers"] = mcp_servers
config_data: Dict[str, Any] = {
"agents": {
"defaults": {
"model": model_name,
"temperature": float(bot_data.get("temperature") or 0.2),
"topP": float(bot_data.get("top_p") or 1.0),
"maxTokens": int(bot_data.get("max_tokens") or 8192),
}
},
"providers": {provider_name: provider_cfg} if provider_name else {},
"channels": channels_cfg,
}
if tools_cfg:
config_data["tools"] = tools_cfg
existing_channels = existing_config.get("channels")
existing_dashboard_cfg = (
existing_channels.get("dashboard")
if isinstance(existing_channels, dict) and isinstance(existing_channels.get("dashboard"), dict)
else {}
)
dashboard_cfg: Dict[str, Any] = {
"enabled": True,
"host": "0.0.0.0",
"port": 9000,
"allowFrom": ["*"],
}
for key in ("host", "port", "allowFrom"):
if key in existing_dashboard_cfg:
dashboard_cfg[key] = existing_dashboard_cfg[key]
channels_cfg["dashboard"] = dashboard_cfg
for channel in channels:
channel_type = (channel.get("channel_type") or "").strip()
if not channel_type:
continue
raw_extra = channel.get("extra_config")
extra: Dict[str, Any] = {}
if isinstance(raw_extra, str) and raw_extra.strip():
try:
parsed = json.loads(raw_extra)
if isinstance(parsed, dict):
extra = parsed
except Exception:
extra = {}
elif isinstance(raw_extra, dict):
extra = raw_extra
# Dashboard channel is deprecated in DB routing. Global flags now come from bot fields.
if channel_type == "dashboard":
continue
enabled = bool(channel.get("is_active", True))
external = channel.get("external_app_id", "") or ""
secret = channel.get("app_secret", "") or ""
if channel_type == "telegram":
channels_cfg["telegram"] = {
"enabled": enabled,
"token": secret,
"proxy": extra.get("proxy", ""),
"replyToMessage": bool(extra.get("replyToMessage", False)),
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
}
continue
if channel_type == "feishu":
channels_cfg["feishu"] = {
"enabled": enabled,
"appId": external,
"appSecret": secret,
"encryptKey": extra.get("encryptKey", ""),
"verificationToken": extra.get("verificationToken", ""),
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
}
continue
if channel_type == "dingtalk":
channels_cfg["dingtalk"] = {
"enabled": enabled,
"clientId": external,
"clientSecret": secret,
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
}
continue
if channel_type == "slack":
channels_cfg["slack"] = {
"enabled": enabled,
"mode": extra.get("mode", "socket"),
"botToken": external,
"appToken": secret,
"replyInThread": bool(extra.get("replyInThread", True)),
"groupPolicy": extra.get("groupPolicy", "mention"),
"groupAllowFrom": extra.get("groupAllowFrom", []),
"reactEmoji": extra.get("reactEmoji", "eyes"),
}
continue
if channel_type == "qq":
channels_cfg["qq"] = {
"enabled": enabled,
"appId": external,
"secret": secret,
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
}
continue
if channel_type == "weixin":
weixin_cfg: Dict[str, Any] = {
"enabled": enabled,
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
}
route_tag = str(extra.get("routeTag") or "").strip()
if route_tag:
weixin_cfg["routeTag"] = route_tag
state_dir = str(extra.get("stateDir") or "").strip()
if state_dir:
weixin_cfg["stateDir"] = state_dir
base_url = str(extra.get("baseUrl") or "").strip()
if base_url:
weixin_cfg["baseUrl"] = base_url
cdn_base_url = str(extra.get("cdnBaseUrl") or "").strip()
if cdn_base_url:
weixin_cfg["cdnBaseUrl"] = cdn_base_url
poll_timeout = extra.get("pollTimeout", extra.get("poll_timeout"))
if poll_timeout not in {None, ""}:
try:
weixin_cfg["pollTimeout"] = max(1, int(poll_timeout))
except (TypeError, ValueError):
pass
channels_cfg["weixin"] = weixin_cfg
continue
if channel_type == "email":
channels_cfg["email"] = {
"enabled": enabled,
"consentGranted": bool(extra.get("consentGranted", False)),
"imapHost": extra.get("imapHost", ""),
"imapPort": max(1, min(int(extra.get("imapPort", 993) or 993), 65535)),
"imapUsername": extra.get("imapUsername", ""),
"imapPassword": extra.get("imapPassword", ""),
"imapMailbox": extra.get("imapMailbox", "INBOX"),
"imapUseSsl": bool(extra.get("imapUseSsl", True)),
"smtpHost": extra.get("smtpHost", ""),
"smtpPort": max(1, min(int(extra.get("smtpPort", 587) or 587), 65535)),
"smtpUsername": extra.get("smtpUsername", ""),
"smtpPassword": extra.get("smtpPassword", ""),
"smtpUseTls": bool(extra.get("smtpUseTls", True)),
"smtpUseSsl": bool(extra.get("smtpUseSsl", False)),
"fromAddress": extra.get("fromAddress", ""),
"autoReplyEnabled": bool(extra.get("autoReplyEnabled", True)),
"pollIntervalSeconds": max(5, int(extra.get("pollIntervalSeconds", 30) or 30)),
"markSeen": bool(extra.get("markSeen", True)),
"maxBodyChars": max(1, int(extra.get("maxBodyChars", 12000) or 12000)),
"subjectPrefix": extra.get("subjectPrefix", "Re: "),
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
}
continue
# Fallback for future custom channels.
channels_cfg[channel_type] = {
"enabled": enabled,
"appId": external,
"appSecret": secret,
**extra,
}
with open(config_path, "w", encoding="utf-8") as f:
json.dump(config_data, f, indent=4, ensure_ascii=False)
bootstrap_files = {
"AGENTS.md": bot_data.get("agents_md") or template_defaults.get("agents_md", ""),
"SOUL.md": bot_data.get("soul_md") or bot_data.get("system_prompt") or template_defaults.get("soul_md", ""),
"USER.md": bot_data.get("user_md") or template_defaults.get("user_md", ""),
"TOOLS.md": bot_data.get("tools_md") or template_defaults.get("tools_md", ""),
"IDENTITY.md": bot_data.get("identity_md") or template_defaults.get("identity_md", ""),
}
for filename, content in bootstrap_files.items():
file_path = os.path.join(workspace_dir, filename)
with open(file_path, "w", encoding="utf-8") as f:
f.write(str(content).strip() + "\n")
return dot_nanobot_dir
@staticmethod
def _normalize_allow_from(raw: Any) -> List[str]:
rows: List[str] = []
if isinstance(raw, list):
for item in raw:
text = str(item or "").strip()
if text and text not in rows:
rows.append(text)
if not rows:
return ["*"]
return rows

View File

@ -9,7 +9,7 @@ from pathlib import Path
from typing import Any, Dict, Optional
from core.settings import STT_DEVICE, STT_MODEL, STT_MODEL_DIR
from services.platform_service import get_speech_runtime_settings
from services.platform_settings_service import get_speech_runtime_settings
class SpeechServiceError(RuntimeError):

View File

@ -0,0 +1,3 @@
from providers.bot_workspace_provider import BotWorkspaceProvider
__all__ = ["BotWorkspaceProvider"]

View File

@ -0,0 +1,270 @@
from __future__ import annotations
import json
import os
from typing import Any, Dict, List
_PROVIDER_ALIAS_MAP = {
"aliyun": "dashscope",
"qwen": "dashscope",
"aliyun-qwen": "dashscope",
"moonshot": "kimi",
"xunfei": "openai",
"iflytek": "openai",
"xfyun": "openai",
"vllm": "openai",
}
_MANAGED_WORKSPACE_FILES = ("AGENTS.md", "SOUL.md", "USER.md", "TOOLS.md", "IDENTITY.md")
def _require_text(raw: Any, *, field: str) -> str:
value = str(raw if raw is not None else "").strip()
if not value:
raise RuntimeError(f"Missing required bot runtime field: {field}")
return value
def _normalize_markdown_text(raw: Any, *, field: str) -> str:
if raw is None:
raise RuntimeError(f"Missing required workspace markdown field: {field}")
return str(raw).replace("\r\n", "\n").strip() + "\n"
def _normalize_provider_name(raw_provider_name: str) -> tuple[str, str]:
normalized = raw_provider_name.strip().lower()
if not normalized:
raise RuntimeError("Missing required bot runtime field: llm_provider")
canonical = _PROVIDER_ALIAS_MAP.get(normalized, normalized)
return normalized, canonical
def _normalize_allow_from(raw: Any) -> List[str]:
rows: List[str] = []
if isinstance(raw, list):
for item in raw:
text = str(item or "").strip()
if text and text not in rows:
rows.append(text)
return rows or ["*"]
def _normalize_extra_config(raw: Any) -> Dict[str, Any]:
if raw is None:
return {}
if not isinstance(raw, dict):
raise RuntimeError("Channel extra_config must be an object")
return dict(raw)
def _write_json_atomic(path: str, payload: Dict[str, Any]) -> None:
os.makedirs(os.path.dirname(path), exist_ok=True)
tmp_path = f"{path}.tmp"
with open(tmp_path, "w", encoding="utf-8") as file:
json.dump(payload, file, ensure_ascii=False, indent=2)
os.replace(tmp_path, path)
def _write_text_atomic(path: str, content: str) -> None:
os.makedirs(os.path.dirname(path), exist_ok=True)
tmp_path = f"{path}.tmp"
with open(tmp_path, "w", encoding="utf-8") as file:
file.write(content)
os.replace(tmp_path, path)
class BotWorkspaceProvider:
def __init__(self, host_data_root: str):
self.host_data_root = host_data_root
def write_workspace(self, bot_id: str, bot_data: Dict[str, Any], channels: List[Dict[str, Any]]) -> str:
raw_provider_name, provider_name = _normalize_provider_name(_require_text(bot_data.get("llm_provider"), field="llm_provider"))
model_name = _require_text(bot_data.get("llm_model"), field="llm_model")
api_key = _require_text(bot_data.get("api_key"), field="api_key")
api_base = _require_text(bot_data.get("api_base"), field="api_base")
temperature = float(bot_data.get("temperature"))
top_p = float(bot_data.get("top_p"))
max_tokens = int(bot_data.get("max_tokens"))
send_progress = bool(bot_data.get("send_progress"))
send_tool_hints = bool(bot_data.get("send_tool_hints"))
bot_root = os.path.join(self.host_data_root, bot_id)
dot_nanobot_dir = os.path.join(bot_root, ".nanobot")
workspace_dir = os.path.join(dot_nanobot_dir, "workspace")
memory_dir = os.path.join(workspace_dir, "memory")
skills_dir = os.path.join(workspace_dir, "skills")
for path in (dot_nanobot_dir, workspace_dir, memory_dir, skills_dir):
os.makedirs(path, exist_ok=True)
provider_cfg: Dict[str, Any] = {
"apiKey": api_key,
"apiBase": api_base,
}
if raw_provider_name in {"xunfei", "iflytek", "xfyun", "vllm"}:
provider_cfg["dashboardProviderAlias"] = raw_provider_name
effective_model_name = model_name
if provider_name == "openai" and raw_provider_name in {"xunfei", "iflytek", "xfyun"} and "/" not in model_name:
effective_model_name = f"openai/{model_name}"
config_data: Dict[str, Any] = {
"agents": {
"defaults": {
"model": effective_model_name,
"temperature": temperature,
"topP": top_p,
"maxTokens": max_tokens,
}
},
"providers": {
provider_name: provider_cfg,
},
"channels": {
"sendProgress": send_progress,
"sendToolHints": send_tool_hints,
"dashboard": {
"enabled": True,
"host": "0.0.0.0",
"port": 9000,
"allowFrom": ["*"],
},
},
}
mcp_servers = bot_data.get("mcp_servers")
if mcp_servers is not None:
if not isinstance(mcp_servers, dict):
raise RuntimeError("mcp_servers must be an object")
config_data["tools"] = {"mcpServers": mcp_servers}
channels_cfg = config_data["channels"]
for channel in channels:
channel_type = str(channel.get("channel_type") or "").strip().lower()
if not channel_type or channel_type == "dashboard":
continue
extra = _normalize_extra_config(channel.get("extra_config"))
enabled = bool(channel.get("is_active"))
external_app_id = str(channel.get("external_app_id") or "").strip()
app_secret = str(channel.get("app_secret") or "").strip()
if channel_type == "telegram":
channels_cfg["telegram"] = {
"enabled": enabled,
"token": app_secret,
"proxy": str(extra.get("proxy") or "").strip(),
"replyToMessage": bool(extra.get("replyToMessage")),
"allowFrom": _normalize_allow_from(extra.get("allowFrom")),
}
continue
if channel_type == "feishu":
channels_cfg["feishu"] = {
"enabled": enabled,
"appId": external_app_id,
"appSecret": app_secret,
"encryptKey": str(extra.get("encryptKey") or "").strip(),
"verificationToken": str(extra.get("verificationToken") or "").strip(),
"allowFrom": _normalize_allow_from(extra.get("allowFrom")),
}
continue
if channel_type == "dingtalk":
channels_cfg["dingtalk"] = {
"enabled": enabled,
"clientId": external_app_id,
"clientSecret": app_secret,
"allowFrom": _normalize_allow_from(extra.get("allowFrom")),
}
continue
if channel_type == "slack":
channels_cfg["slack"] = {
"enabled": enabled,
"mode": str(extra.get("mode") or "socket"),
"botToken": external_app_id,
"appToken": app_secret,
"replyInThread": bool(extra.get("replyInThread", True)),
"groupPolicy": str(extra.get("groupPolicy") or "mention"),
"groupAllowFrom": extra.get("groupAllowFrom") if isinstance(extra.get("groupAllowFrom"), list) else [],
"reactEmoji": str(extra.get("reactEmoji") or "eyes"),
}
continue
if channel_type == "qq":
channels_cfg["qq"] = {
"enabled": enabled,
"appId": external_app_id,
"secret": app_secret,
"allowFrom": _normalize_allow_from(extra.get("allowFrom")),
}
continue
if channel_type == "weixin":
weixin_cfg: Dict[str, Any] = {
"enabled": enabled,
"allowFrom": _normalize_allow_from(extra.get("allowFrom")),
}
route_tag = str(extra.get("routeTag") or "").strip()
if route_tag:
weixin_cfg["routeTag"] = route_tag
state_dir = str(extra.get("stateDir") or "").strip()
if state_dir:
weixin_cfg["stateDir"] = state_dir
base_url = str(extra.get("baseUrl") or "").strip()
if base_url:
weixin_cfg["baseUrl"] = base_url
cdn_base_url = str(extra.get("cdnBaseUrl") or "").strip()
if cdn_base_url:
weixin_cfg["cdnBaseUrl"] = cdn_base_url
poll_timeout = extra.get("pollTimeout", extra.get("poll_timeout"))
if poll_timeout not in {None, ""}:
weixin_cfg["pollTimeout"] = max(1, int(poll_timeout))
channels_cfg["weixin"] = weixin_cfg
continue
if channel_type == "email":
channels_cfg["email"] = {
"enabled": enabled,
"consentGranted": bool(extra.get("consentGranted")),
"imapHost": str(extra.get("imapHost") or "").strip(),
"imapPort": max(1, min(int(extra.get("imapPort") or 993), 65535)),
"imapUsername": str(extra.get("imapUsername") or "").strip(),
"imapPassword": str(extra.get("imapPassword") or "").strip(),
"imapMailbox": str(extra.get("imapMailbox") or "INBOX"),
"imapUseSsl": bool(extra.get("imapUseSsl", True)),
"smtpHost": str(extra.get("smtpHost") or "").strip(),
"smtpPort": max(1, min(int(extra.get("smtpPort") or 587), 65535)),
"smtpUsername": str(extra.get("smtpUsername") or "").strip(),
"smtpPassword": str(extra.get("smtpPassword") or "").strip(),
"smtpUseTls": bool(extra.get("smtpUseTls", True)),
"smtpUseSsl": bool(extra.get("smtpUseSsl")),
"fromAddress": str(extra.get("fromAddress") or "").strip(),
"autoReplyEnabled": bool(extra.get("autoReplyEnabled", True)),
"pollIntervalSeconds": max(5, int(extra.get("pollIntervalSeconds") or 30)),
"markSeen": bool(extra.get("markSeen", True)),
"maxBodyChars": max(1, int(extra.get("maxBodyChars") or 12000)),
"subjectPrefix": str(extra.get("subjectPrefix") or "Re: "),
"allowFrom": _normalize_allow_from(extra.get("allowFrom")),
}
continue
channels_cfg[channel_type] = {
"enabled": enabled,
"appId": external_app_id,
"appSecret": app_secret,
**extra,
}
_write_json_atomic(os.path.join(dot_nanobot_dir, "config.json"), config_data)
workspace_files = {
"AGENTS.md": _normalize_markdown_text(bot_data.get("agents_md"), field="agents_md"),
"SOUL.md": _normalize_markdown_text(bot_data.get("soul_md"), field="soul_md"),
"USER.md": _normalize_markdown_text(bot_data.get("user_md"), field="user_md"),
"TOOLS.md": _normalize_markdown_text(bot_data.get("tools_md"), field="tools_md"),
"IDENTITY.md": _normalize_markdown_text(bot_data.get("identity_md"), field="identity_md"),
}
for filename in _MANAGED_WORKSPACE_FILES:
_write_text_atomic(os.path.join(workspace_dir, filename), workspace_files[filename])
return dot_nanobot_dir

View File

@ -1,5 +1,6 @@
from typing import Optional, Dict, Any, List
from pydantic import BaseModel
from pydantic import BaseModel, ConfigDict
class ChannelConfigRequest(BaseModel):
@ -21,6 +22,8 @@ class ChannelConfigUpdateRequest(BaseModel):
class BotCreateRequest(BaseModel):
model_config = ConfigDict(extra="forbid")
id: str
name: str
enabled: Optional[bool] = True
@ -29,28 +32,29 @@ class BotCreateRequest(BaseModel):
llm_provider: str
llm_model: str
api_key: str
api_base: Optional[str] = None
system_prompt: Optional[str] = None
api_base: str
temperature: float = 0.2
top_p: float = 1.0
max_tokens: int = 8192
cpu_cores: float = 1.0
memory_mb: int = 1024
storage_gb: int = 10
system_timezone: Optional[str] = None
soul_md: Optional[str] = None
agents_md: Optional[str] = None
user_md: Optional[str] = None
tools_md: Optional[str] = None
system_timezone: str
soul_md: str
agents_md: str
user_md: str
tools_md: str
tools_config: Optional[Dict[str, Any]] = None
env_params: Optional[Dict[str, str]] = None
identity_md: Optional[str] = None
identity_md: str
channels: Optional[List[ChannelConfigRequest]] = None
send_progress: Optional[bool] = None
send_tool_hints: Optional[bool] = None
class BotUpdateRequest(BaseModel):
model_config = ConfigDict(extra="forbid")
name: Optional[str] = None
enabled: Optional[bool] = None
image_tag: Optional[str] = None
@ -66,7 +70,6 @@ class BotUpdateRequest(BaseModel):
memory_mb: Optional[int] = None
storage_gb: Optional[int] = None
system_timezone: Optional[str] = None
system_prompt: Optional[str] = None
agents_md: Optional[str] = None
soul_md: Optional[str] = None
user_md: Optional[str] = None

View File

@ -20,7 +20,7 @@ from services.bot_service import (
)
from services.bot_storage_service import write_bot_env_params
from services.cache_service import _invalidate_bot_detail_cache, _invalidate_bot_messages_cache
from services.platform_service import record_activity_event
from services.platform_activity_service import record_activity_event
from services.runtime_service import docker_callback, record_agent_loop_ready_warning

View File

@ -26,9 +26,7 @@ from services.bot_storage_service import (
write_bot_resource_limits,
)
from services.cache_service import _cache_key_bot_detail, _cache_key_bots_list, _invalidate_bot_detail_cache
from services.platform_service import record_activity_event
from services.provider_service import get_provider_defaults
from services.template_service import get_agent_md_templates
from services.platform_activity_service import record_activity_event
BOT_ID_PATTERN = re.compile(r"^[A-Za-z0-9_]+$")
MANAGED_WORKSPACE_FILENAMES = ("AGENTS.md", "SOUL.md", "USER.md", "TOOLS.md", "IDENTITY.md")
@ -78,6 +76,13 @@ def _cleanup_bot_workspace_root(bot_id: str) -> None:
shutil.rmtree(bot_root, ignore_errors=True)
def _require_runtime_text(raw: Any, *, field: str) -> str:
value = str(raw if raw is not None else "").strip()
if not value:
raise HTTPException(status_code=400, detail=f"{field} is required")
return value
def create_bot_record(session: Session, *, payload: BotCreateRequest) -> Dict[str, Any]:
normalized_bot_id = str(payload.id or "").strip()
if not normalized_bot_id:
@ -101,6 +106,11 @@ def create_bot_record(session: Session, *, payload: BotCreateRequest) -> Dict[st
except ValueError as exc:
raise HTTPException(status_code=400, detail=str(exc)) from exc
llm_provider = _require_runtime_text(payload.llm_provider, field="llm_provider")
llm_model = _require_runtime_text(payload.llm_model, field="llm_model")
api_key = _require_runtime_text(payload.api_key, field="api_key")
api_base = _require_runtime_text(payload.api_base, field="api_base")
bot = BotInstance(
id=normalized_bot_id,
name=payload.name,
@ -109,7 +119,6 @@ def create_bot_record(session: Session, *, payload: BotCreateRequest) -> Dict[st
image_tag=payload.image_tag,
workspace_dir=os.path.join(BOTS_WORKSPACE_ROOT, normalized_bot_id),
)
template_defaults = get_agent_md_templates()
resource_limits = normalize_bot_resource_limits(payload.cpu_cores, payload.memory_mb, payload.storage_gb)
try:
session.add(bot)
@ -130,22 +139,21 @@ def create_bot_record(session: Session, *, payload: BotCreateRequest) -> Dict[st
"sendToolHints": bool(payload.send_tool_hints) if payload.send_tool_hints is not None else False,
},
runtime_overrides={
"llm_provider": payload.llm_provider,
"llm_model": payload.llm_model,
"api_key": payload.api_key,
"api_base": payload.api_base or "",
"llm_provider": llm_provider,
"llm_model": llm_model,
"api_key": api_key,
"api_base": api_base,
"temperature": payload.temperature,
"top_p": payload.top_p,
"max_tokens": payload.max_tokens,
"cpu_cores": resource_limits["cpu_cores"],
"memory_mb": resource_limits["memory_mb"],
"storage_gb": resource_limits["storage_gb"],
"system_prompt": payload.system_prompt or payload.soul_md or template_defaults.get("soul_md", ""),
"soul_md": payload.soul_md or payload.system_prompt or template_defaults.get("soul_md", ""),
"agents_md": payload.agents_md or template_defaults.get("agents_md", ""),
"user_md": payload.user_md or template_defaults.get("user_md", ""),
"tools_md": payload.tools_md or template_defaults.get("tools_md", ""),
"identity_md": payload.identity_md or template_defaults.get("identity_md", ""),
"soul_md": payload.soul_md,
"agents_md": payload.agents_md,
"user_md": payload.user_md,
"tools_md": payload.tools_md,
"identity_md": payload.identity_md,
"send_progress": bool(payload.send_progress) if payload.send_progress is not None else False,
"send_tool_hints": bool(payload.send_tool_hints) if payload.send_tool_hints is not None else False,
},
@ -266,7 +274,6 @@ def update_bot_record(session: Session, *, bot_id: str, payload: BotUpdateReques
"identity_md",
"send_progress",
"send_tool_hints",
"system_prompt",
}
runtime_overrides: Dict[str, Any] = {}
update_data.pop("tools_config", None) if isinstance(update_data, dict) else None
@ -274,19 +281,12 @@ def update_bot_record(session: Session, *, bot_id: str, payload: BotUpdateReques
if field in update_data:
runtime_overrides[field] = update_data.pop(field)
for text_field in ("llm_provider", "llm_model", "api_key"):
for text_field in ("llm_provider", "llm_model", "api_key", "api_base"):
if text_field in runtime_overrides:
text = str(runtime_overrides.get(text_field) or "").strip()
if not text:
runtime_overrides.pop(text_field, None)
else:
runtime_overrides[text_field] = text
if "api_base" in runtime_overrides:
runtime_overrides["api_base"] = str(runtime_overrides.get("api_base") or "").strip()
if "system_prompt" in runtime_overrides and "soul_md" not in runtime_overrides:
runtime_overrides["soul_md"] = runtime_overrides["system_prompt"]
if "soul_md" in runtime_overrides and "system_prompt" not in runtime_overrides:
runtime_overrides["system_prompt"] = runtime_overrides["soul_md"]
runtime_overrides[text_field] = _require_runtime_text(
runtime_overrides.get(text_field),
field=text_field,
)
if {"cpu_cores", "memory_mb", "storage_gb"} & set(runtime_overrides.keys()):
runtime_overrides.update(
normalize_bot_resource_limits(

View File

@ -6,9 +6,9 @@ from zoneinfo import ZoneInfo
from sqlmodel import Session
from core.config_manager import BotConfigManager
from core.settings import BOTS_WORKSPACE_ROOT, DEFAULT_BOT_SYSTEM_TIMEZONE
from core.settings import BOTS_WORKSPACE_ROOT
from models.bot import BotInstance
from providers.bot_workspace_provider import BotWorkspaceProvider
from schemas.bot import ChannelConfigRequest
from services.bot_storage_service import (
_normalize_env_params,
@ -21,24 +21,14 @@ from services.bot_storage_service import (
normalize_bot_resource_limits,
write_bot_resource_limits,
)
from services.template_service import get_agent_md_templates
config_manager = BotConfigManager(host_data_root=BOTS_WORKSPACE_ROOT)
def get_default_bot_system_timezone() -> str:
value = str(DEFAULT_BOT_SYSTEM_TIMEZONE or "").strip() or "Asia/Shanghai"
try:
ZoneInfo(value)
return value
except Exception:
return "Asia/Shanghai"
workspace_provider = BotWorkspaceProvider(host_data_root=BOTS_WORKSPACE_ROOT)
def normalize_bot_system_timezone(raw: Any) -> str:
value = str(raw or "").strip()
if not value:
return get_default_bot_system_timezone()
raise ValueError("System timezone is required")
try:
ZoneInfo(value)
except Exception as exc:
@ -48,10 +38,9 @@ def normalize_bot_system_timezone(raw: Any) -> str:
def resolve_bot_runtime_env_params(bot_id: str, raw: Optional[Dict[str, str]] = None) -> Dict[str, str]:
env_params = _normalize_env_params(raw if isinstance(raw, dict) else _read_env_store(bot_id))
try:
env_params["TZ"] = normalize_bot_system_timezone(env_params.get("TZ"))
except ValueError:
env_params["TZ"] = get_default_bot_system_timezone()
if "TZ" not in env_params:
raise RuntimeError(f"Missing required TZ in bot env settings: {bot_id}")
env_params["TZ"] = normalize_bot_system_timezone(env_params.get("TZ"))
return env_params
@ -74,15 +63,7 @@ def _normalize_allow_from(raw: Any) -> List[str]:
def read_global_delivery_flags(channels_cfg: Any) -> tuple[bool, bool]:
if not isinstance(channels_cfg, dict):
return False, False
send_progress = channels_cfg.get("sendProgress")
send_tool_hints = channels_cfg.get("sendToolHints")
dashboard_cfg = channels_cfg.get("dashboard")
if isinstance(dashboard_cfg, dict):
if send_progress is None and "sendProgress" in dashboard_cfg:
send_progress = dashboard_cfg.get("sendProgress")
if send_tool_hints is None and "sendToolHints" in dashboard_cfg:
send_tool_hints = dashboard_cfg.get("sendToolHints")
return bool(send_progress), bool(send_tool_hints)
return bool(channels_cfg.get("sendProgress")), bool(channels_cfg.get("sendToolHints"))
def channel_config_to_api(bot_id: str, channel_type: str, cfg: Dict[str, Any]) -> Dict[str, Any]:
@ -342,21 +323,20 @@ def normalize_initial_bot_channels(bot_id: str, channels: Optional[List[ChannelC
return rows
def _read_workspace_md(bot_id: str, filename: str, default_value: str) -> str:
def _read_workspace_md(bot_id: str, filename: str) -> str:
path = os.path.join(_workspace_root(bot_id), filename)
if not os.path.isfile(path):
return default_value
raise RuntimeError(f"Missing required workspace file: {path}")
try:
with open(path, "r", encoding="utf-8") as file:
return file.read().strip()
except Exception:
return default_value
except Exception as exc:
raise RuntimeError(f"Failed to read workspace file: {path}") from exc
def read_bot_runtime_snapshot(bot: BotInstance) -> Dict[str, Any]:
config_data = _read_bot_config(bot.id)
env_params = resolve_bot_runtime_env_params(bot.id)
template_defaults = get_agent_md_templates()
provider_name = ""
provider_cfg: Dict[str, Any] = {}
@ -367,6 +347,8 @@ def read_bot_runtime_snapshot(bot: BotInstance) -> Dict[str, Any]:
if isinstance(p_cfg, dict):
provider_cfg = p_cfg
break
if not provider_name or not provider_cfg:
raise RuntimeError(f"Missing provider configuration in bot config: {bot.id}")
agents_defaults: Dict[str, Any] = {}
agents_cfg = config_data.get("agents")
@ -374,6 +356,8 @@ def read_bot_runtime_snapshot(bot: BotInstance) -> Dict[str, Any]:
defaults = agents_cfg.get("defaults")
if isinstance(defaults, dict):
agents_defaults = defaults
if not agents_defaults:
raise RuntimeError(f"Missing agents.defaults in bot config: {bot.id}")
channels_cfg = config_data.get("channels")
send_progress, send_tool_hints = read_global_delivery_flags(channels_cfg)
@ -382,6 +366,12 @@ def read_bot_runtime_snapshot(bot: BotInstance) -> Dict[str, Any]:
llm_model = str(agents_defaults.get("model") or "")
api_key = str(provider_cfg.get("apiKey") or "").strip()
api_base = str(provider_cfg.get("apiBase") or "").strip()
if not llm_model:
raise RuntimeError(f"Missing model in bot config: {bot.id}")
if not api_key:
raise RuntimeError(f"Missing apiKey in bot config: {bot.id}")
if not api_base:
raise RuntimeError(f"Missing apiBase in bot config: {bot.id}")
api_base_lower = api_base.lower()
provider_alias = str(provider_cfg.get("dashboardProviderAlias") or "").strip().lower()
if llm_provider == "openai" and provider_alias in {"xunfei", "iflytek", "xfyun", "vllm"}:
@ -389,7 +379,12 @@ def read_bot_runtime_snapshot(bot: BotInstance) -> Dict[str, Any]:
elif llm_provider == "openai" and ("spark-api-open.xf-yun.com" in api_base_lower or "xf-yun.com" in api_base_lower):
llm_provider = "xunfei"
soul_md = _read_workspace_md(bot.id, "SOUL.md", template_defaults.get("soul_md", ""))
tools_cfg = config_data.get("tools")
if tools_cfg is not None and not isinstance(tools_cfg, dict):
raise RuntimeError(f"Invalid tools configuration in bot config: {bot.id}")
mcp_servers = tools_cfg.get("mcpServers") if isinstance(tools_cfg, dict) else None
soul_md = _read_workspace_md(bot.id, "SOUL.md")
resources = _read_bot_resources(bot.id, config_data=config_data)
return {
"llm_provider": llm_provider,
@ -402,15 +397,15 @@ def read_bot_runtime_snapshot(bot: BotInstance) -> Dict[str, Any]:
"cpu_cores": resources["cpu_cores"],
"memory_mb": resources["memory_mb"],
"storage_gb": resources["storage_gb"],
"system_timezone": env_params.get("TZ") or get_default_bot_system_timezone(),
"system_timezone": env_params["TZ"],
"send_progress": send_progress,
"send_tool_hints": send_tool_hints,
"soul_md": soul_md,
"agents_md": _read_workspace_md(bot.id, "AGENTS.md", template_defaults.get("agents_md", "")),
"user_md": _read_workspace_md(bot.id, "USER.md", template_defaults.get("user_md", "")),
"tools_md": _read_workspace_md(bot.id, "TOOLS.md", template_defaults.get("tools_md", "")),
"identity_md": _read_workspace_md(bot.id, "IDENTITY.md", template_defaults.get("identity_md", "")),
"system_prompt": soul_md,
"agents_md": _read_workspace_md(bot.id, "AGENTS.md"),
"user_md": _read_workspace_md(bot.id, "USER.md"),
"tools_md": _read_workspace_md(bot.id, "TOOLS.md"),
"identity_md": _read_workspace_md(bot.id, "IDENTITY.md"),
"mcp_servers": mcp_servers if isinstance(mcp_servers, dict) else None,
}
@ -427,24 +422,23 @@ def serialize_bot_detail(bot: BotInstance) -> Dict[str, Any]:
"avatar_model": "base",
"avatar_skin": "blue_suit",
"image_tag": bot.image_tag,
"llm_provider": runtime.get("llm_provider") or "",
"llm_model": runtime.get("llm_model") or "",
"system_prompt": runtime.get("system_prompt") or "",
"api_base": runtime.get("api_base") or "",
"temperature": _safe_float(runtime.get("temperature"), 0.2),
"top_p": _safe_float(runtime.get("top_p"), 1.0),
"max_tokens": _safe_int(runtime.get("max_tokens"), 8192),
"cpu_cores": _safe_float(runtime.get("cpu_cores"), 1.0),
"memory_mb": _safe_int(runtime.get("memory_mb"), 1024),
"storage_gb": _safe_int(runtime.get("storage_gb"), 10),
"system_timezone": str(runtime.get("system_timezone") or get_default_bot_system_timezone()),
"send_progress": bool(runtime.get("send_progress")),
"send_tool_hints": bool(runtime.get("send_tool_hints")),
"soul_md": runtime.get("soul_md") or "",
"agents_md": runtime.get("agents_md") or "",
"user_md": runtime.get("user_md") or "",
"tools_md": runtime.get("tools_md") or "",
"identity_md": runtime.get("identity_md") or "",
"llm_provider": runtime["llm_provider"],
"llm_model": runtime["llm_model"],
"api_base": runtime["api_base"],
"temperature": runtime["temperature"],
"top_p": runtime["top_p"],
"max_tokens": runtime["max_tokens"],
"cpu_cores": runtime["cpu_cores"],
"memory_mb": runtime["memory_mb"],
"storage_gb": runtime["storage_gb"],
"system_timezone": runtime["system_timezone"],
"send_progress": runtime["send_progress"],
"send_tool_hints": runtime["send_tool_hints"],
"soul_md": runtime["soul_md"],
"agents_md": runtime["agents_md"],
"user_md": runtime["user_md"],
"tools_md": runtime["tools_md"],
"identity_md": runtime["identity_md"],
"workspace_dir": bot.workspace_dir,
"docker_status": bot.docker_status,
"current_state": bot.current_state,
@ -480,43 +474,12 @@ def sync_bot_workspace_channels(
) -> None:
bot = session.get(BotInstance, bot_id)
if not bot:
return
raise RuntimeError(f"Bot not found: {bot_id}")
snapshot = read_bot_runtime_snapshot(bot)
template_defaults = get_agent_md_templates()
bot_data: Dict[str, Any] = {
"name": bot.name,
"system_prompt": snapshot.get("system_prompt") or template_defaults.get("soul_md", ""),
"soul_md": snapshot.get("soul_md") or template_defaults.get("soul_md", ""),
"agents_md": snapshot.get("agents_md") or template_defaults.get("agents_md", ""),
"user_md": snapshot.get("user_md") or template_defaults.get("user_md", ""),
"tools_md": snapshot.get("tools_md") or template_defaults.get("tools_md", ""),
"identity_md": snapshot.get("identity_md") or template_defaults.get("identity_md", ""),
"llm_provider": snapshot.get("llm_provider") or "",
"llm_model": snapshot.get("llm_model") or "",
"api_key": snapshot.get("api_key") or "",
"api_base": snapshot.get("api_base") or "",
"temperature": snapshot.get("temperature"),
"top_p": snapshot.get("top_p"),
"max_tokens": snapshot.get("max_tokens"),
"cpu_cores": snapshot.get("cpu_cores"),
"memory_mb": snapshot.get("memory_mb"),
"storage_gb": snapshot.get("storage_gb"),
"send_progress": bool(snapshot.get("send_progress")),
"send_tool_hints": bool(snapshot.get("send_tool_hints")),
}
bot_data: Dict[str, Any] = dict(snapshot)
if isinstance(runtime_overrides, dict):
for key, value in runtime_overrides.items():
if key in {"api_key", "llm_provider", "llm_model"}:
text = str(value or "").strip()
if not text:
continue
bot_data[key] = text
continue
if key == "api_base":
bot_data[key] = str(value or "").strip()
continue
bot_data[key] = value
bot_data.update(runtime_overrides)
resources = normalize_bot_resource_limits(
bot_data.get("cpu_cores"),
@ -551,5 +514,5 @@ def sync_bot_workspace_channels(
}
)
config_manager.update_workspace(bot_id=bot_id, bot_data=bot_data, channels=normalized_channels)
workspace_provider.write_workspace(bot_id=bot_id, bot_data=bot_data, channels=normalized_channels)
write_bot_resource_limits(bot_id, bot_data.get("cpu_cores"), bot_data.get("memory_mb"), bot_data.get("storage_gb"))

View File

@ -117,13 +117,15 @@ def normalize_bot_env_params(raw: Any) -> Dict[str, str]:
def _read_json_object(path: str) -> Dict[str, Any]:
if not os.path.isfile(path):
return {}
raise RuntimeError(f"Missing required JSON file: {path}")
try:
with open(path, "r", encoding="utf-8") as file:
data = json.load(file)
return data if isinstance(data, dict) else {}
except Exception:
return {}
except Exception as exc:
raise RuntimeError(f"Invalid JSON file: {path}") from exc
if not isinstance(data, dict):
raise RuntimeError(f"JSON file must contain an object: {path}")
return data
def _read_json_value(path: str) -> Any:
@ -187,14 +189,7 @@ def write_bot_resource_limits(bot_id: str, cpu_cores: Any, memory_mb: Any, stora
def _read_bot_resources(bot_id: str, config_data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
_ = config_data
path = _resources_json_path(bot_id)
if not os.path.isfile(path):
raise RuntimeError(
f"Missing bot resource file: {path}. "
"Please restore it or recreate the bot configuration; runtime compatibility fallback has been removed."
)
data = _read_json_object(path)
if not data:
raise RuntimeError(f"Invalid bot resource file: {path}.")
return _normalize_resource_limits(
data.get("cpuCores", data.get("cpu_cores")),
data.get("memoryMB", data.get("memory_mb")),

View File

@ -9,11 +9,8 @@ from core.docker_instance import docker_manager
from core.utils import _is_video_attachment_path, _is_visual_attachment_path
from models.bot import BotInstance
from services.bot_service import read_bot_runtime_snapshot
from services.platform_service import (
create_usage_request,
fail_latest_usage,
record_activity_event,
)
from services.platform_activity_service import record_activity_event
from services.platform_usage_service import create_usage_request, fail_latest_usage
from services.runtime_service import broadcast_runtime_packet, persist_runtime_packet
from services.workspace_service import resolve_workspace_path

View File

@ -21,7 +21,8 @@ from services.cache_service import (
_invalidate_bot_detail_cache,
_invalidate_bot_messages_cache,
)
from services.platform_service import get_chat_pull_page_size, record_activity_event
from services.platform_activity_service import record_activity_event
from services.platform_settings_service import get_chat_pull_page_size
def _get_bot_or_404(session: Session, bot_id: str) -> BotInstance:

View File

@ -1,39 +0,0 @@
from services.platform_activity_service import (
get_bot_activity_stats,
list_activity_events,
prune_expired_activity_events,
record_activity_event,
)
from services.platform_login_log_service import list_login_logs
from services.platform_overview_service import build_platform_overview
from services.platform_settings_service import (
ACTIVITY_EVENT_RETENTION_SETTING_KEY,
DEFAULT_ACTIVITY_EVENT_RETENTION_DAYS,
DEFAULT_ALLOWED_ATTACHMENT_EXTENSIONS,
SETTING_KEYS,
SYSTEM_SETTING_DEFINITIONS,
create_or_update_system_setting,
delete_system_setting,
get_activity_event_retention_days,
get_auth_token_max_active,
get_auth_token_ttl_hours,
get_allowed_attachment_extensions,
get_chat_pull_page_size,
get_page_size,
get_platform_settings,
get_platform_settings_snapshot,
get_speech_runtime_settings,
get_upload_max_mb,
get_workspace_download_extensions,
list_system_settings,
save_platform_settings,
validate_required_system_settings,
)
from services.platform_usage_service import (
bind_usage_message,
create_usage_request,
estimate_tokens,
fail_latest_usage,
finalize_usage_from_packet,
list_usage,
)

View File

@ -34,8 +34,8 @@ async def test_provider_connection(payload: Dict[str, Any]) -> Dict[str, Any]:
if not provider or not api_key:
raise HTTPException(status_code=400, detail="provider and api_key are required")
normalized_provider, default_base = get_provider_defaults(provider)
base = (api_base or default_base).rstrip("/")
normalized_provider, _ = get_provider_defaults(provider)
base = api_base.rstrip("/")
if normalized_provider not in {"openrouter", "dashscope", "kimi", "minimax", "openai", "deepseek"}:
raise HTTPException(status_code=400, detail=f"provider not supported for test: {provider}")
if not base:

View File

@ -15,7 +15,8 @@ from core.websocket_manager import manager
from models.bot import BotInstance, BotMessage
from services.bot_storage_service import get_bot_workspace_root
from services.cache_service import _invalidate_bot_detail_cache, _invalidate_bot_messages_cache
from services.platform_service import bind_usage_message, finalize_usage_from_packet, record_activity_event
from services.platform_activity_service import record_activity_event
from services.platform_usage_service import bind_usage_message, finalize_usage_from_packet
from services.topic_runtime import publish_runtime_topic_packet
logger = logging.getLogger("dashboard.backend")

View File

@ -18,7 +18,7 @@ from core.utils import (
_sanitize_zip_filename,
)
from models.skill import BotSkillInstall, SkillMarketItem
from services.platform_service import get_platform_settings_snapshot
from services.platform_settings_service import get_platform_settings_snapshot
from services.skill_service import get_bot_skills_root, install_skill_zip_into_workspace

View File

@ -12,7 +12,7 @@ from core.utils import (
_is_valid_top_level_skill_name,
)
from services.bot_storage_service import get_bot_workspace_root
from services.platform_service import get_platform_settings_snapshot
from services.platform_settings_service import get_platform_settings_snapshot
def get_bot_skills_root(bot_id: str) -> str:

View File

@ -14,7 +14,7 @@ from core.speech_service import (
WhisperSpeechService,
)
from models.bot import BotInstance
from services.platform_service import get_speech_runtime_settings
from services.platform_settings_service import get_speech_runtime_settings
async def transcribe_bot_speech_upload(

View File

@ -44,6 +44,12 @@ def _write_json_atomic(path: str, payload: Dict[str, Any]) -> None:
def get_agent_md_templates() -> Dict[str, str]:
raw = _load_json_object(AGENT_MD_TEMPLATES_FILE, label="agent templates")
missing_keys = [key for key in TEMPLATE_KEYS if key not in raw]
if missing_keys:
raise RuntimeError(
"Agent template file is missing required keys: "
f"{', '.join(missing_keys)}. File: {Path(AGENT_MD_TEMPLATES_FILE).resolve()}"
)
return {key: _normalize_md_text(raw.get(key)) for key in TEMPLATE_KEYS}
@ -51,8 +57,17 @@ def get_topic_presets() -> Dict[str, Any]:
raw = _load_json_object(TOPIC_PRESETS_TEMPLATES_FILE, label="topic presets")
presets = raw.get("presets")
if not isinstance(presets, list):
return {"presets": []}
return {"presets": [dict(row) for row in presets if isinstance(row, dict)]}
raise RuntimeError(
f"Topic presets file must contain a presets array: {Path(TOPIC_PRESETS_TEMPLATES_FILE).resolve()}"
)
invalid_rows = [index for index, row in enumerate(presets) if not isinstance(row, dict)]
if invalid_rows:
raise RuntimeError(
"Topic presets file contains non-object entries at indexes: "
f"{', '.join(str(index) for index in invalid_rows)}. "
f"File: {Path(TOPIC_PRESETS_TEMPLATES_FILE).resolve()}"
)
return {"presets": [dict(row) for row in presets]}
def update_agent_md_templates(raw: Dict[str, Any]) -> Dict[str, str]:
@ -63,15 +78,21 @@ def update_agent_md_templates(raw: Dict[str, Any]) -> Dict[str, str]:
def update_topic_presets(raw: Dict[str, Any]) -> Dict[str, Any]:
presets = raw.get("presets") if isinstance(raw, dict) else None
if presets is None:
payload: Dict[str, List[Dict[str, Any]]] = {"presets": []}
elif isinstance(presets, list):
payload = {"presets": [dict(row) for row in presets if isinstance(row, dict)]}
else:
if not isinstance(presets, list):
raise ValueError("topic_presets.presets must be an array")
invalid_rows = [index for index, row in enumerate(presets) if not isinstance(row, dict)]
if invalid_rows:
raise ValueError(
"topic_presets.presets must contain objects only; invalid indexes: "
+ ", ".join(str(index) for index in invalid_rows)
)
payload: Dict[str, List[Dict[str, Any]]] = {"presets": [dict(row) for row in presets]}
_write_json_atomic(str(TOPIC_PRESETS_TEMPLATES_FILE), payload)
return payload
def get_agent_template_value(key: str) -> str:
return get_agent_md_templates().get(key, "")
templates = get_agent_md_templates()
if key not in templates:
raise KeyError(f"Unknown agent template key: {key}")
return templates[key]

View File

@ -10,7 +10,7 @@ from fastapi.responses import FileResponse, RedirectResponse, Response, Streamin
from core.utils import _workspace_stat_ctime_iso
from services.bot_storage_service import get_bot_workspace_root
from services.platform_service import get_platform_settings_snapshot
from services.platform_settings_service import get_platform_settings_snapshot
TEXT_PREVIEW_EXTENSIONS = {
"",

View File

@ -216,9 +216,12 @@ export function BaseConfigModal({
<label className="field-label">{isZh ? '系统时区' : 'System Timezone'}</label>
<LucentSelect
value={editForm.system_timezone || defaultSystemTimezone}
value={editForm.system_timezone}
onChange={(e) => onEditFormChange({ system_timezone: e.target.value })}
>
<option value="" disabled>
{isZh ? '请选择系统时区' : 'Select system timezone'}
</option>
{systemTimezoneOptions.map((option) => (
<option key={option.value} value={option.value}>
{option.value}
@ -228,6 +231,11 @@ export function BaseConfigModal({
<div className="field-label">
{isZh ? '提示:系统时区保存后,在 Bot 重启后生效。' : 'Tip: timezone changes take effect after the bot restarts.'}
</div>
{defaultSystemTimezone ? (
<div className="field-label">
{isZh ? `系统默认值:${defaultSystemTimezone}` : `System default: ${defaultSystemTimezone}`}
</div>
) : null}
<div className="section-mini-title" style={{ marginTop: 10 }}>
{isZh ? '硬件资源' : 'Hardware Resources'}

View File

@ -1,12 +1,13 @@
import { useEffect, useMemo, useState } from 'react';
import { useEffect, useMemo, useRef, useState } from 'react';
import { ChevronLeft, ChevronRight, Hammer, RefreshCw } from 'lucide-react';
import '../../../components/skill-market/SkillMarketShared.css';
import type { BotSkillMarketItem } from '../../platform/types';
import { LucentIconButton } from '../../../components/lucent/LucentIconButton';
import { useLucentPrompt } from '../../../components/lucent/useLucentPrompt';
import { ProtectedSearchInput } from '../../../components/ProtectedSearchInput';
import { resolveApiErrorMessage } from '../../../shared/http/apiErrors';
import { ModalCardShell } from '../../../shared/ui/ModalCardShell';
import { fetchPreferredPlatformPageSize } from '../../platform/api/settings';
import { readCachedPlatformPageSize } from '../../../utils/platformPageSize';
interface SkillMarketInstallModalProps {
isZh: boolean;
@ -38,23 +39,37 @@ function SkillMarketInstallModalContent({
onInstall,
formatBytes,
}: Omit<SkillMarketInstallModalProps, 'open'>) {
const { notify } = useLucentPrompt();
const [search, setSearch] = useState('');
const [page, setPage] = useState(1);
const [pageSize, setPageSize] = useState(() => readCachedPlatformPageSize(10));
const [pageSize, setPageSize] = useState(10);
const initializedRef = useRef(false);
useEffect(() => {
if (initializedRef.current) {
return undefined;
}
initializedRef.current = true;
let cancelled = false;
void onRefresh();
void (async () => {
const nextPageSize = await fetchPreferredPlatformPageSize(10);
if (!cancelled) {
setPageSize(nextPageSize);
try {
const nextPageSize = await fetchPreferredPlatformPageSize();
if (!cancelled) {
setPageSize(nextPageSize);
}
} catch (error: unknown) {
if (!cancelled) {
notify(resolveApiErrorMessage(error, isZh ? '读取分页配置失败。' : 'Failed to load page size setting.'), {
tone: 'error',
});
}
}
})();
return () => {
cancelled = true;
};
}, [onRefresh]);
}, [isZh, notify, onRefresh]);
const filteredItems = useMemo(() => {
const keyword = search.trim().toLowerCase();

View File

@ -45,6 +45,7 @@ export function useBotDashboardModule({
const [botListPageSize, setBotListPageSize] = useState(10);
const [chatPullPageSize, setChatPullPageSize] = useState(60);
const [commandAutoUnlockSeconds, setCommandAutoUnlockSeconds] = useState(10);
const isZh = locale === 'zh';
const botSearchInputName = `nbot-search-${useId().replace(/:/g, '-')}`;
const workspaceSearchInputName = `nbot-workspace-search-${useId().replace(/:/g, '-')}`;
const {
@ -58,6 +59,8 @@ export function useBotDashboardModule({
voiceMaxSeconds,
workspaceDownloadExtensions,
} = useDashboardSystemDefaults({
isZh,
notify,
setBotListPageSize,
setChatPullPageSize,
setCommandAutoUnlockSeconds,
@ -108,7 +111,6 @@ export function useBotDashboardModule({
});
const messages = selectedBot?.messages || [];
const events = selectedBot?.events || [];
const isZh = locale === 'zh';
const t = pickLocale(locale, { 'zh-cn': dashboardZhCn, en: dashboardEn });
const lc = isZh ? channelsZhCn : channelsEn;
const passwordToggleLabels = isZh
@ -178,7 +180,6 @@ export function useBotDashboardModule({
updateEditForm,
updateParamDraft,
} = useDashboardBotEditor({
defaultSystemTimezone,
ensureSelectedBotDetail,
isZh,
notify,

View File

@ -55,7 +55,6 @@ interface NotifyOptions {
}
interface UseDashboardBotEditorOptions {
defaultSystemTimezone: string;
ensureSelectedBotDetail: () => Promise<BotState | undefined>;
isZh: boolean;
notify: (message: string, options?: NotifyOptions) => void;
@ -68,7 +67,6 @@ interface UseDashboardBotEditorOptions {
}
export function useDashboardBotEditor({
defaultSystemTimezone,
ensureSelectedBotDetail,
isZh,
notify,
@ -100,7 +98,7 @@ export function useDashboardBotEditor({
llm_model: bot.llm_model || '',
image_tag: bot.image_tag || '',
api_key: '',
api_base: bot.api_base || getLlmProviderDefaultApiBase(provider),
api_base: bot.api_base || '',
temperature: clampTemperature(bot.temperature ?? 0.2),
top_p: bot.top_p ?? 1,
max_tokens: clampMaxTokens(bot.max_tokens ?? 8192),
@ -109,7 +107,7 @@ export function useDashboardBotEditor({
storage_gb: clampStorageGb(bot.storage_gb ?? 10),
system_timezone: bot.system_timezone || '',
agents_md: bot.agents_md || '',
soul_md: bot.soul_md || bot.system_prompt || '',
soul_md: bot.soul_md || '',
user_md: bot.user_md || '',
tools_md: bot.tools_md || '',
identity_md: bot.identity_md || '',
@ -154,11 +152,6 @@ export function useDashboardBotEditor({
setShowAgentModal(false);
}, []);
useEffect(() => {
if (!defaultSystemTimezone || editForm.system_timezone) return;
updateEditForm({ system_timezone: defaultSystemTimezone });
}, [defaultSystemTimezone, editForm.system_timezone, updateEditForm]);
useEffect(() => {
if (!selectedBotId) return;
if (showBaseModal || showParamModal || showAgentModal) return;
@ -207,7 +200,7 @@ export function useDashboardBotEditor({
provider: editForm.llm_provider,
model: editForm.llm_model,
api_key: editForm.api_key.trim(),
api_base: editForm.api_base || undefined,
api_base: editForm.api_base.trim(),
});
if (res.data?.ok) {
const preview = (res.data.models_preview || []).slice(0, 3).join(', ');
@ -233,10 +226,15 @@ export function useDashboardBotEditor({
try {
const payload: Record<string, string | number> = {};
if (mode === 'base') {
const normalizedSystemTimezone = editForm.system_timezone.trim();
if (!normalizedSystemTimezone) {
notify(isZh ? '系统时区不能为空。' : 'System timezone is required.', { tone: 'warning' });
return;
}
payload.name = editForm.name;
payload.access_password = editForm.access_password;
payload.image_tag = editForm.image_tag;
payload.system_timezone = editForm.system_timezone.trim() || defaultSystemTimezone;
payload.system_timezone = normalizedSystemTimezone;
const normalizedImageTag = String(editForm.image_tag || '').trim();
const selectedImage = availableImages.find((row) => String(row.tag || '').trim() === normalizedImageTag);
const selectedImageStatus = String(selectedImage?.status || '').toUpperCase();
@ -265,7 +263,7 @@ export function useDashboardBotEditor({
if (mode === 'params') {
payload.llm_provider = editForm.llm_provider;
payload.llm_model = editForm.llm_model;
payload.api_base = editForm.api_base;
payload.api_base = editForm.api_base.trim();
if (editForm.api_key.trim()) payload.api_key = editForm.api_key.trim();
payload.temperature = clampTemperature(Number(editForm.temperature));
payload.top_p = Number(editForm.top_p);
@ -298,7 +296,6 @@ export function useDashboardBotEditor({
}, [
availableImages,
closeModals,
defaultSystemTimezone,
editForm,
isZh,
notify,

View File

@ -192,41 +192,6 @@ export function useDashboardChatMessageActions({
removeConversationMessageLocally(message, targetMessageId);
notify(t.deleteMessageDone, { tone: 'success' });
} catch (error: unknown) {
if (axios.isAxiosError(error) && error.response?.status === 404) {
try {
await axios.post(`${APP_ENDPOINTS.apiBase}/bots/${selectedBotId}/messages/${targetMessageId}/delete`);
removeConversationMessageLocally(message, targetMessageId);
notify(t.deleteMessageDone, { tone: 'success' });
return;
} catch {
// continue to secondary re-match fallback below
}
}
if (axios.isAxiosError(error) && error.response?.status === 404) {
const refreshedMessageId = Number(await resolveMessageIdFromLatest(message));
if (Number.isFinite(refreshedMessageId) && refreshedMessageId > 0 && refreshedMessageId !== targetMessageId) {
try {
await deleteConversationMessageOnServer(refreshedMessageId);
removeConversationMessageLocally(message, refreshedMessageId);
notify(t.deleteMessageDone, { tone: 'success' });
return;
} catch (retryError: unknown) {
if (axios.isAxiosError(retryError) && retryError.response?.status === 404) {
try {
await axios.post(`${APP_ENDPOINTS.apiBase}/bots/${selectedBotId}/messages/${refreshedMessageId}/delete`);
removeConversationMessageLocally(message, refreshedMessageId);
notify(t.deleteMessageDone, { tone: 'success' });
return;
} catch (postRetryError: unknown) {
notify(resolveApiErrorMessage(postRetryError, t.deleteMessageFail), { tone: 'error' });
return;
}
}
notify(resolveApiErrorMessage(retryError, t.deleteMessageFail), { tone: 'error' });
return;
}
}
}
notify(resolveApiErrorMessage(error, t.deleteMessageFail), { tone: 'error' });
} finally {
setDeletingMessageIdMap((prev) => {

View File

@ -73,6 +73,38 @@ export function useDashboardSkillsConfig({
setShowSkillsModal(true);
}, [closeRuntimeMenu, loadBotSkills, selectedBot]);
const closeSkillsModal = useCallback(() => {
setSkillAddMenuOpen(false);
setShowSkillsModal(false);
}, []);
const refreshSkills = useCallback(() => {
if (!selectedBot) return undefined;
return loadBotSkills(selectedBot.id);
}, [loadBotSkills, selectedBot]);
const openSkillMarketplace = useCallback(() => {
if (!selectedBot) return;
setSkillAddMenuOpen(false);
setShowSkillMarketInstallModal(true);
}, [selectedBot]);
const closeSkillMarketInstallModal = useCallback(() => {
setShowSkillMarketInstallModal(false);
}, []);
const refreshSkillMarketplace = useCallback(async () => {
if (!selectedBot) return;
await loadMarketSkills(selectedBot.id);
}, [loadMarketSkills, selectedBot]);
const handleInstallMarketSkill = useCallback(async (skill: BotSkillMarketItem) => {
await installMarketSkill(skill);
if (selectedBot) {
await loadBotSkills(selectedBot.id);
}
}, [installMarketSkill, loadBotSkills, selectedBot]);
const resetSkillsPanels = useCallback(() => {
setShowSkillsModal(false);
setShowSkillMarketInstallModal(false);
@ -89,21 +121,13 @@ export function useDashboardSkillsConfig({
skillZipPickerRef,
skillAddMenuRef,
skillAddMenuOpen,
onClose: () => {
setSkillAddMenuOpen(false);
setShowSkillsModal(false);
},
onRefreshSkills: () => (selectedBot ? loadBotSkills(selectedBot.id) : undefined),
onClose: closeSkillsModal,
onRefreshSkills: refreshSkills,
onRemoveSkill: removeBotSkill,
onPickSkillZip: onPickSkillZip as (event: ChangeEvent<HTMLInputElement>) => void,
onSetSkillAddMenuOpen: setSkillAddMenuOpen as Dispatch<SetStateAction<boolean>>,
onTriggerSkillZipUpload: triggerSkillZipUpload,
onOpenSkillMarketplace: async () => {
if (!selectedBot) return;
setSkillAddMenuOpen(false);
await loadMarketSkills(selectedBot.id);
setShowSkillMarketInstallModal(true);
},
onOpenSkillMarketplace: openSkillMarketplace,
};
const skillMarketInstallModalProps = {
@ -112,17 +136,9 @@ export function useDashboardSkillsConfig({
items: marketSkills,
loading: isMarketSkillsLoading,
installingId: typeof marketSkillInstallingId === 'number' ? marketSkillInstallingId : null,
onClose: () => setShowSkillMarketInstallModal(false),
onRefresh: async () => {
if (!selectedBot) return;
await loadMarketSkills(selectedBot.id);
},
onInstall: async (skill: BotSkillMarketItem) => {
await installMarketSkill(skill);
if (selectedBot) {
await loadBotSkills(selectedBot.id);
}
},
onClose: closeSkillMarketInstallModal,
onRefresh: refreshSkillMarketplace,
onInstall: handleInstallMarketSkill,
formatBytes,
};

View File

@ -1,5 +1,6 @@
import { useCallback, useEffect, useState, type Dispatch, type SetStateAction } from 'react';
import { resolveApiErrorMessage } from '../../../shared/http/apiErrors';
import { parseAllowedAttachmentExtensions, parseWorkspaceDownloadExtensions } from '../../../shared/workspace/utils';
import { normalizePlatformPageSize } from '../../../utils/platformPageSize';
import { fetchDashboardSystemDefaults } from '../api/system';
@ -7,6 +8,8 @@ import { parseTopicPresets } from '../topic/topicPresetUtils';
import type { SystemDefaultsResponse, TopicPresetTemplate } from '../types';
interface UseDashboardSystemDefaultsOptions {
isZh: boolean;
notify: (message: string, options?: { tone?: 'error' | 'success' | 'warning' | 'info' }) => void;
setBotListPageSize: Dispatch<SetStateAction<number>>;
setChatPullPageSize?: Dispatch<SetStateAction<number>>;
setCommandAutoUnlockSeconds?: Dispatch<SetStateAction<number>>;
@ -38,6 +41,8 @@ function resolveCommandAutoUnlockSeconds(raw: unknown) {
}
export function useDashboardSystemDefaults({
isZh,
notify,
setBotListPageSize,
setChatPullPageSize,
setCommandAutoUnlockSeconds,
@ -59,9 +64,7 @@ export function useDashboardSystemDefaults({
const nextSystemTimezone = String(data?.bot?.system_timezone || '').trim();
setDefaultSystemTimezone(nextSystemTimezone);
setBotListPageSize((prev) =>
normalizePlatformPageSize(data?.chat?.page_size, normalizePlatformPageSize(prev, 10)),
);
setBotListPageSize(normalizePlatformPageSize(data?.chat?.page_size, 10));
setChatPullPageSize?.(resolveChatPullPageSize(data?.chat?.pull_page_size));
setCommandAutoUnlockSeconds?.(resolveCommandAutoUnlockSeconds(data?.chat?.command_auto_unlock_seconds));
setAllowedAttachmentExtensions(
@ -91,8 +94,16 @@ export function useDashboardSystemDefaults({
const data = await fetchDashboardSystemDefaults();
if (!alive) return;
applySystemDefaults(data);
} catch {
// Keep current UI state when system defaults are unavailable.
} catch (error: unknown) {
if (alive) {
notify(
resolveApiErrorMessage(
error,
isZh ? '读取系统默认配置失败。' : 'Failed to load system defaults.',
),
{ tone: 'error' },
);
}
} finally {
if (alive) {
setBotListPageSizeReady(true);
@ -103,34 +114,26 @@ export function useDashboardSystemDefaults({
return () => {
alive = false;
};
}, [applySystemDefaults]);
}, [applySystemDefaults, isZh, notify]);
const refreshAttachmentPolicy = useCallback(async () => {
try {
const data = await fetchDashboardSystemDefaults();
const nextUploadMaxMb = resolveUploadMaxMb(data?.limits?.upload_max_mb);
const nextAllowedAttachmentExtensions = parseAllowedAttachmentExtensions(
data?.workspace?.allowed_attachment_extensions,
);
const nextWorkspaceDownloadExtensions = parseWorkspaceDownloadExtensions(
data?.workspace?.download_extensions,
);
setUploadMaxMb(nextUploadMaxMb);
setAllowedAttachmentExtensions(nextAllowedAttachmentExtensions);
setWorkspaceDownloadExtensions(nextWorkspaceDownloadExtensions);
return {
uploadMaxMb: nextUploadMaxMb,
allowedAttachmentExtensions: nextAllowedAttachmentExtensions,
workspaceDownloadExtensions: nextWorkspaceDownloadExtensions,
};
} catch {
return {
uploadMaxMb,
allowedAttachmentExtensions,
workspaceDownloadExtensions,
};
}
}, [allowedAttachmentExtensions, uploadMaxMb, workspaceDownloadExtensions]);
const data = await fetchDashboardSystemDefaults();
const nextUploadMaxMb = resolveUploadMaxMb(data?.limits?.upload_max_mb);
const nextAllowedAttachmentExtensions = parseAllowedAttachmentExtensions(
data?.workspace?.allowed_attachment_extensions,
);
const nextWorkspaceDownloadExtensions = parseWorkspaceDownloadExtensions(
data?.workspace?.download_extensions,
);
setUploadMaxMb(nextUploadMaxMb);
setAllowedAttachmentExtensions(nextAllowedAttachmentExtensions);
setWorkspaceDownloadExtensions(nextWorkspaceDownloadExtensions);
return {
uploadMaxMb: nextUploadMaxMb,
allowedAttachmentExtensions: nextAllowedAttachmentExtensions,
workspaceDownloadExtensions: nextWorkspaceDownloadExtensions,
};
}, []);
return {
allowedAttachmentExtensions,

View File

@ -110,9 +110,12 @@ export function BotWizardBaseStep({
<div className="section-mini-title">{isZh ? '基础信息' : 'Basic Info'}</div>
<label className="field-label">{isZh ? '系统时区' : 'System Timezone'}</label>
<LucentSelect
value={form.system_timezone || defaultSystemTimezone}
value={form.system_timezone}
onChange={(e) => setForm((prev) => ({ ...prev, system_timezone: e.target.value }))}
>
<option value="" disabled>
{isZh ? '请选择系统时区' : 'Select system timezone'}
</option>
{systemTimezoneOptions.map((option) => (
<option key={option.value} value={option.value}>
{option.value}
@ -122,6 +125,11 @@ export function BotWizardBaseStep({
<div className="field-label">
{isZh ? '提示:系统时区保存后,在 Bot 重启后生效。' : 'Tip: timezone changes take effect after the bot restarts.'}
</div>
{defaultSystemTimezone ? (
<div className="field-label">
{isZh ? `系统默认值:${defaultSystemTimezone}` : `System default: ${defaultSystemTimezone}`}
</div>
) : null}
<div className="section-mini-title" style={{ marginTop: 10 }}>
{isZh ? '资源配额' : 'Resource Limits'}

View File

@ -36,10 +36,15 @@ export function BotWizardReviewStep({
<div>CPU: {Number(form.cpu_cores) === 0 ? (isZh ? '不限' : 'Unlimited') : form.cpu_cores}</div>
<div>{isZh ? '内存' : 'Memory'}: {Number(form.memory_mb) === 0 ? (isZh ? '不限' : 'Unlimited') : `${form.memory_mb} MB`}</div>
<div>{isZh ? '存储' : 'Storage'}: {Number(form.storage_gb) === 0 ? (isZh ? '不限' : 'Unlimited') : `${form.storage_gb} GB`}</div>
<div>{isZh ? '系统时区' : 'System Timezone'}: {form.system_timezone || defaultSystemTimezone}</div>
<div>{isZh ? '系统时区' : 'System Timezone'}: {form.system_timezone || '-'}</div>
<div>{ui.channels}: {configuredChannelsLabel}</div>
<div>{ui.tools}: {envEntries.map(([key]) => key).join(', ') || '-'}</div>
</div>
{defaultSystemTimezone ? (
<div className="field-label">
{isZh ? `系统默认值:${defaultSystemTimezone}` : `System default: ${defaultSystemTimezone}`}
</div>
) : null}
<label>
<input type="checkbox" checked={autoStart} onChange={(e) => setAutoStart(e.target.checked)} style={{ marginRight: 8 }} />
{ui.autoStart}

View File

@ -77,6 +77,14 @@ const tabMap: Record<AgentTab, keyof BotWizardForm> = {
IDENTITY: 'identity_md',
};
const EMPTY_DEFAULT_TEMPLATES: Pick<BotWizardForm, 'soul_md' | 'agents_md' | 'user_md' | 'tools_md' | 'identity_md'> = {
soul_md: '',
agents_md: '',
user_md: '',
tools_md: '',
identity_md: '',
};
export function useBotWizard({
ui,
notify,
@ -97,7 +105,7 @@ export function useBotWizard({
const [envDraftValue, setEnvDraftValue] = useState('');
const [newChannelType, setNewChannelType] = useState<ChannelType | ''>('');
const [form, setForm] = useState<BotWizardForm>(initialForm);
const [defaultAgentsTemplate, setDefaultAgentsTemplate] = useState('');
const [defaultTemplates, setDefaultTemplates] = useState(EMPTY_DEFAULT_TEMPLATES);
const [maxTokensDraft, setMaxTokensDraft] = useState(String(initialForm.max_tokens));
const [cpuCoresDraft, setCpuCoresDraft] = useState(String(initialForm.cpu_cores));
const [memoryMbDraft, setMemoryMbDraft] = useState(String(initialForm.memory_mb));
@ -128,6 +136,7 @@ export function useBotWizard({
() => (form.channels.length > 0 ? form.channels.map((channel) => channel.channel_type).join(', ') : '-'),
[form.channels],
);
const defaultAgentsTemplate = defaultTemplates.agents_md;
useEffect(() => {
const loadSystemDefaults = async () => {
@ -135,24 +144,30 @@ export function useBotWizard({
const res = await axios.get<SystemDefaultsResponse>(`${APP_ENDPOINTS.apiBase}/system/defaults`);
const tpl = res.data?.templates || {};
const nextSystemTimezone = String(res.data?.bot?.system_timezone || '').trim();
const agentsTemplate = String(tpl.agents_md || '').trim();
const nextTemplates = {
soul_md: String(tpl.soul_md || '').trim(),
agents_md: String(tpl.agents_md || '').trim(),
user_md: String(tpl.user_md || '').trim(),
tools_md: String(tpl.tools_md || '').trim(),
identity_md: String(tpl.identity_md || '').trim(),
};
setDefaultSystemTimezone(nextSystemTimezone);
setDefaultAgentsTemplate(agentsTemplate);
setDefaultTemplates(nextTemplates);
setForm((prev) => ({
...prev,
system_timezone: prev.system_timezone || nextSystemTimezone,
soul_md: String(tpl.soul_md || '').trim() || prev.soul_md,
agents_md: agentsTemplate,
user_md: String(tpl.user_md || '').trim() || prev.user_md,
tools_md: String(tpl.tools_md || '').trim() || prev.tools_md,
identity_md: String(tpl.identity_md || '').trim() || prev.identity_md,
soul_md: prev.soul_md || nextTemplates.soul_md,
agents_md: prev.agents_md || nextTemplates.agents_md,
user_md: prev.user_md || nextTemplates.user_md,
tools_md: prev.tools_md || nextTemplates.tools_md,
identity_md: prev.identity_md || nextTemplates.identity_md,
}));
} catch {
// Keep current editor values when defaults are unavailable.
} catch (error: unknown) {
notify(resolveApiErrorMessage(error, ui.createFailed), { tone: 'error' });
}
};
void loadSystemDefaults();
}, []);
}, [notify, ui.createFailed]);
useEffect(() => {
const raw = String(form.id || '').trim();
@ -257,7 +272,7 @@ export function useBotWizard({
notify(ui.botIdChecking, { tone: 'warning' });
return;
}
if (!form.name || !form.api_key || !form.image_tag || !form.llm_model) {
if (!form.name || !form.api_key || !form.image_tag || !form.llm_model || !form.llm_provider || !form.api_base.trim() || !form.system_timezone.trim()) {
notify(ui.requiredBase, { tone: 'warning' });
return;
}
@ -285,7 +300,7 @@ export function useBotWizard({
provider: form.llm_provider,
model: form.llm_model,
api_key: form.api_key,
api_base: form.api_base || undefined,
api_base: form.api_base.trim(),
});
if (res.data?.ok) {
@ -308,22 +323,27 @@ export function useBotWizard({
createAbortControllerRef.current = controller;
setIsSubmitting(true);
try {
const normalizedApiBase = form.api_base.trim();
const normalizedSystemTimezone = form.system_timezone.trim();
if (!form.id.trim() || !form.name.trim() || !form.llm_provider.trim() || !form.llm_model.trim() || !form.api_key.trim() || !normalizedApiBase || !form.image_tag.trim() || !normalizedSystemTimezone) {
notify(ui.requiredBase, { tone: 'warning' });
return;
}
await axios.post(`${APP_ENDPOINTS.apiBase}/bots`, {
id: form.id,
name: form.name,
llm_provider: form.llm_provider,
llm_model: form.llm_model,
api_key: form.api_key,
api_base: form.api_base || undefined,
api_base: normalizedApiBase,
image_tag: form.image_tag,
system_prompt: form.soul_md,
temperature: clampTemperature(Number(form.temperature)),
top_p: Number(form.top_p),
max_tokens: Number(form.max_tokens),
cpu_cores: Number(form.cpu_cores),
memory_mb: Number(form.memory_mb),
storage_gb: Number(form.storage_gb),
system_timezone: form.system_timezone.trim() || defaultSystemTimezone,
system_timezone: normalizedSystemTimezone,
soul_md: form.soul_md,
agents_md: form.agents_md,
user_md: form.user_md,
@ -347,7 +367,11 @@ export function useBotWizard({
}
onCreated?.();
onGoDashboard?.();
setForm({ ...initialForm, system_timezone: defaultSystemTimezone });
setForm({
...initialForm,
system_timezone: defaultSystemTimezone,
...defaultTemplates,
});
setMaxTokensDraft(String(initialForm.max_tokens));
setCpuCoresDraft(String(initialForm.cpu_cores));
setMemoryMbDraft(String(initialForm.memory_mb));

View File

@ -56,9 +56,15 @@ export function PlatformLoginLogPage({ isZh }: PlatformLoginLogPageProps) {
useEffect(() => {
void (async () => {
setPageSize(await fetchPreferredPlatformPageSize(10));
try {
setPageSize(await fetchPreferredPlatformPageSize());
} catch (error: unknown) {
notify(resolveApiErrorMessage(error, isZh ? '读取分页配置失败。' : 'Failed to load page size setting.'), {
tone: 'error',
});
}
})();
}, []);
}, [isZh, notify]);
useEffect(() => {
setPage(1);

View File

@ -1,11 +1,7 @@
import axios from 'axios';
import { APP_ENDPOINTS } from '../../../config/env';
import {
normalizePlatformPageSize,
readCachedPlatformPageSize,
writeCachedPlatformPageSize,
} from '../../../utils/platformPageSize';
import { normalizePlatformPageSize } from '../../../utils/platformPageSize';
import type { PlatformLoginLogResponse, PlatformSettings, SystemSettingItem } from '../types';
export interface SystemSettingsResponse {
@ -39,16 +35,9 @@ export function fetchPlatformLoginLogs(params: {
.then((res) => res.data);
}
export async function fetchPreferredPlatformPageSize(fallback = 10) {
const cachedFallback = readCachedPlatformPageSize(fallback);
try {
const data = await fetchPlatformSettings();
const normalized = normalizePlatformPageSize(data?.page_size, cachedFallback);
writeCachedPlatformPageSize(normalized);
return normalized;
} catch {
return cachedFallback;
}
export async function fetchPreferredPlatformPageSize() {
const data = await fetchPlatformSettings();
return normalizePlatformPageSize(data?.page_size, 10);
}
export function fetchPlatformSystemSettings() {

View File

@ -105,11 +105,19 @@ function PlatformSettingsView({
try {
const data = await fetchPlatformSettings();
onSaved(data);
} catch {
// Ignore snapshot refresh failures here; the table is still the source of truth in the view.
} catch (error: unknown) {
notify(resolveApiErrorMessage(error, isZh ? '读取平台设置失败。' : 'Failed to load platform settings.'), {
tone: 'error',
});
}
setPageSize(await fetchPreferredPlatformPageSize(10));
}, [onSaved]);
try {
setPageSize(await fetchPreferredPlatformPageSize());
} catch (error: unknown) {
notify(resolveApiErrorMessage(error, isZh ? '读取分页配置失败。' : 'Failed to load page size setting.'), {
tone: 'error',
});
}
}, [isZh, notify, onSaved]);
useEffect(() => {
setPage(1);

View File

@ -14,7 +14,6 @@ import {
fetchPlatformSkillMarket,
updatePlatformSkillMarketItem,
} from '../api/skills';
import { readCachedPlatformPageSize } from '../../../utils/platformPageSize';
interface SkillMarketManagerPageProps {
isZh: boolean;
@ -69,7 +68,7 @@ function SkillMarketManagerView({
const [draft, setDraft] = useState<SkillDraft>(buildEmptyDraft);
const [editorOpen, setEditorOpen] = useState(false);
const [page, setPage] = useState(1);
const [pageSize, setPageSize] = useState(() => readCachedPlatformPageSize(10));
const [pageSize, setPageSize] = useState(10);
const infoTitle = isZh ? '技能市场管理说明' : 'Skill Marketplace Management';
const infoDescription = isZh
? '技能市场仅接收人工上传的 ZIP 技能包。平台将统一保存技能元数据与归档文件,并为 Bot 安装提供标准化来源,不再自动扫描 /data/skills 目录。'
@ -95,9 +94,15 @@ function SkillMarketManagerView({
void loadRows();
setPage(1);
void (async () => {
setPageSize(await fetchPreferredPlatformPageSize(10));
try {
setPageSize(await fetchPreferredPlatformPageSize());
} catch (error: unknown) {
notify(resolveApiErrorMessage(error, isZh ? '读取分页配置失败。' : 'Failed to load page size setting.'), {
tone: 'error',
});
}
})();
}, [loadRows]);
}, [isZh, loadRows, notify]);
useEffect(() => {
setPage(1);

View File

@ -3,11 +3,7 @@ import axios from 'axios';
import { APP_ENDPOINTS } from '../../../config/env';
import { resolveApiErrorMessage } from '../../../shared/http/apiErrors';
import {
normalizePlatformPageSize,
readCachedPlatformPageSize,
writeCachedPlatformPageSize,
} from '../../../utils/platformPageSize';
import { normalizePlatformPageSize } from '../../../utils/platformPageSize';
import type {
BotActivityStatsItem,
PlatformOverviewResponse,
@ -37,18 +33,14 @@ export function usePlatformOverviewState({
const [usageLoading, setUsageLoading] = useState(false);
const [activityStatsData, setActivityStatsData] = useState<BotActivityStatsItem[] | null>(null);
const [activityLoading, setActivityLoading] = useState(false);
const [platformPageSize, setPlatformPageSize] = useState(() => readCachedPlatformPageSize(10));
const [platformPageSize, setPlatformPageSize] = useState(10);
const loadOverview = useCallback(async () => {
setLoading(true);
try {
const res = await axios.get<PlatformOverviewResponse>(`${APP_ENDPOINTS.apiBase}/platform/overview`);
setOverview(res.data);
const normalizedPageSize = normalizePlatformPageSize(
res.data?.settings?.page_size,
readCachedPlatformPageSize(10),
);
writeCachedPlatformPageSize(normalizedPageSize);
const normalizedPageSize = normalizePlatformPageSize(res.data?.settings?.page_size, 10);
setPlatformPageSize(normalizedPageSize);
} catch (error: unknown) {
notify(resolveApiErrorMessage(error, isZh ? '读取平台总览失败。' : 'Failed to load platform overview.'), { tone: 'error' });

View File

@ -87,85 +87,85 @@ export function useWorkspaceAttachments({
const onPickAttachments = useCallback(async (event: ChangeEvent<HTMLInputElement>) => {
if (!selectedBotId || !event.target.files || event.target.files.length === 0) return;
const files = Array.from(event.target.files);
const latestAttachmentPolicy = await refreshAttachmentPolicy();
const effectiveUploadMaxMb = latestAttachmentPolicy.uploadMaxMb;
const effectiveAllowedAttachmentExtensions = [...latestAttachmentPolicy.allowedAttachmentExtensions];
const effectiveAllowedAttachmentExtensionSet = new Set(effectiveAllowedAttachmentExtensions);
if (effectiveAllowedAttachmentExtensionSet.size > 0) {
const disallowed = files.filter((file) => {
const name = String(file.name || '').trim().toLowerCase();
const dot = name.lastIndexOf('.');
const ext = dot >= 0 ? name.slice(dot) : '';
return !ext || !effectiveAllowedAttachmentExtensionSet.has(ext);
});
if (disallowed.length > 0) {
const names = disallowed.map((file) => String(file.name || '').trim() || 'unknown').slice(0, 3).join(', ');
notify(t.uploadTypeNotAllowed(names, effectiveAllowedAttachmentExtensions.join(', ')), { tone: 'warning' });
event.target.value = '';
return;
}
}
if (effectiveUploadMaxMb > 0) {
const maxBytes = effectiveUploadMaxMb * 1024 * 1024;
const tooLarge = files.filter((file) => Number(file.size) > maxBytes);
if (tooLarge.length > 0) {
const names = tooLarge.map((file) => String(file.name || '').trim() || 'unknown').slice(0, 3).join(', ');
notify(t.uploadTooLarge(names, effectiveUploadMaxMb), { tone: 'warning' });
event.target.value = '';
return;
}
}
const mediaFiles: File[] = [];
const normalFiles: File[] = [];
files.forEach((file) => {
if (isMediaUploadFile(file)) {
mediaFiles.push(file);
} else {
normalFiles.push(file);
}
});
const totalBytes = files.reduce((sum, file) => sum + Math.max(0, Number(file.size) || 0), 0);
let uploadedBytes = 0;
const uploadedPaths: string[] = [];
const uploadBatch = async (batchFiles: File[], path: 'media' | 'uploads') => {
if (batchFiles.length === 0) return;
const batchBytes = batchFiles.reduce((sum, file) => sum + Math.max(0, Number(file.size) || 0), 0);
const formData = new FormData();
batchFiles.forEach((file) => formData.append('files', file));
const res = await axios.post<WorkspaceUploadResponse>(
`${APP_ENDPOINTS.apiBase}/bots/${selectedBotId}/workspace/upload`,
formData,
{
params: { path },
onUploadProgress: (progressEvent) => {
const loaded = Number(progressEvent.loaded || 0);
if (!Number.isFinite(loaded) || loaded < 0 || totalBytes <= 0) {
setAttachmentUploadPercent(null);
return;
}
const cappedLoaded = Math.max(0, Math.min(batchBytes, loaded));
const pct = Math.max(0, Math.min(100, Math.round(((uploadedBytes + cappedLoaded) / totalBytes) * 100)));
setAttachmentUploadPercent(pct);
},
},
);
const uploaded = normalizeAttachmentPaths((res.data?.files || []).map((file) => file.path));
uploadedPaths.push(...uploaded);
uploadedBytes += batchBytes;
if (totalBytes > 0) {
const pct = Math.max(0, Math.min(100, Math.round((uploadedBytes / totalBytes) * 100)));
setAttachmentUploadPercent(pct);
}
};
setIsUploadingAttachments(true);
setAttachmentUploadPercent(0);
try {
const latestAttachmentPolicy = await refreshAttachmentPolicy();
const effectiveUploadMaxMb = latestAttachmentPolicy.uploadMaxMb;
const effectiveAllowedAttachmentExtensions = [...latestAttachmentPolicy.allowedAttachmentExtensions];
const effectiveAllowedAttachmentExtensionSet = new Set(effectiveAllowedAttachmentExtensions);
if (effectiveAllowedAttachmentExtensionSet.size > 0) {
const disallowed = files.filter((file) => {
const name = String(file.name || '').trim().toLowerCase();
const dot = name.lastIndexOf('.');
const ext = dot >= 0 ? name.slice(dot) : '';
return !ext || !effectiveAllowedAttachmentExtensionSet.has(ext);
});
if (disallowed.length > 0) {
const names = disallowed.map((file) => String(file.name || '').trim() || 'unknown').slice(0, 3).join(', ');
notify(t.uploadTypeNotAllowed(names, effectiveAllowedAttachmentExtensions.join(', ')), { tone: 'warning' });
event.target.value = '';
return;
}
}
if (effectiveUploadMaxMb > 0) {
const maxBytes = effectiveUploadMaxMb * 1024 * 1024;
const tooLarge = files.filter((file) => Number(file.size) > maxBytes);
if (tooLarge.length > 0) {
const names = tooLarge.map((file) => String(file.name || '').trim() || 'unknown').slice(0, 3).join(', ');
notify(t.uploadTooLarge(names, effectiveUploadMaxMb), { tone: 'warning' });
event.target.value = '';
return;
}
}
const mediaFiles: File[] = [];
const normalFiles: File[] = [];
files.forEach((file) => {
if (isMediaUploadFile(file)) {
mediaFiles.push(file);
} else {
normalFiles.push(file);
}
});
const totalBytes = files.reduce((sum, file) => sum + Math.max(0, Number(file.size) || 0), 0);
let uploadedBytes = 0;
const uploadedPaths: string[] = [];
const uploadBatch = async (batchFiles: File[], path: 'media' | 'uploads') => {
if (batchFiles.length === 0) return;
const batchBytes = batchFiles.reduce((sum, file) => sum + Math.max(0, Number(file.size) || 0), 0);
const formData = new FormData();
batchFiles.forEach((file) => formData.append('files', file));
const res = await axios.post<WorkspaceUploadResponse>(
`${APP_ENDPOINTS.apiBase}/bots/${selectedBotId}/workspace/upload`,
formData,
{
params: { path },
onUploadProgress: (progressEvent) => {
const loaded = Number(progressEvent.loaded || 0);
if (!Number.isFinite(loaded) || loaded < 0 || totalBytes <= 0) {
setAttachmentUploadPercent(null);
return;
}
const cappedLoaded = Math.max(0, Math.min(batchBytes, loaded));
const pct = Math.max(0, Math.min(100, Math.round(((uploadedBytes + cappedLoaded) / totalBytes) * 100)));
setAttachmentUploadPercent(pct);
},
},
);
const uploaded = normalizeAttachmentPaths((res.data?.files || []).map((file) => file.path));
uploadedPaths.push(...uploaded);
uploadedBytes += batchBytes;
if (totalBytes > 0) {
const pct = Math.max(0, Math.min(100, Math.round((uploadedBytes / totalBytes) * 100)));
setAttachmentUploadPercent(pct);
}
};
setIsUploadingAttachments(true);
setAttachmentUploadPercent(0);
await uploadBatch(mediaFiles, 'media');
await uploadBatch(normalFiles, 'uploads');
if (uploadedPaths.length > 0) {

View File

@ -85,7 +85,6 @@ export const useAppStore = create<AppStore>((set) => ({
user_md: preferDefined(bot.user_md, prev?.user_md),
tools_md: preferDefined(bot.tools_md, prev?.tools_md),
identity_md: preferDefined(bot.identity_md, prev?.identity_md),
system_prompt: preferDefined(bot.system_prompt, prev?.system_prompt),
access_password: preferDefined(bot.access_password, prev?.access_password),
has_access_password: preferDefined(bot.has_access_password, prev?.has_access_password),
logs: prev?.logs ?? [],

View File

@ -28,7 +28,6 @@ export interface BotState {
image_tag?: string;
llm_provider?: string;
llm_model?: string;
system_prompt?: string;
api_base?: string;
temperature?: number;
top_p?: number;

View File

@ -1,29 +1,5 @@
const PLATFORM_PAGE_SIZE_STORAGE_KEY = 'nanobot-platform-page-size';
export function normalizePlatformPageSize(value: unknown, fallback = 10) {
const parsed = Number(value);
if (!Number.isFinite(parsed) || parsed <= 0) return fallback;
return Math.max(1, Math.min(100, Math.floor(parsed)));
}
export function readCachedPlatformPageSize(fallback = 10) {
if (typeof window === 'undefined') return fallback;
try {
const raw = window.localStorage.getItem(PLATFORM_PAGE_SIZE_STORAGE_KEY);
return normalizePlatformPageSize(raw, fallback);
} catch {
return fallback;
}
}
export function writeCachedPlatformPageSize(value: unknown) {
if (typeof window === 'undefined') return;
try {
window.localStorage.setItem(
PLATFORM_PAGE_SIZE_STORAGE_KEY,
String(normalizePlatformPageSize(value, 10)),
);
} catch {
// ignore localStorage write failures
}
}