2026-03-01 20:04:42 +00:00
|
|
|
import json
|
2026-03-01 16:26:03 +00:00
|
|
|
import os
|
|
|
|
|
from typing import Any, Dict, List
|
|
|
|
|
|
2026-03-31 04:31:47 +00:00
|
|
|
from services.template_service import get_agent_md_templates
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _provider_default_api_base(provider: str) -> str:
|
|
|
|
|
normalized = str(provider or "").strip().lower()
|
|
|
|
|
if normalized == "openai":
|
|
|
|
|
return "https://api.openai.com/v1"
|
|
|
|
|
if normalized == "openrouter":
|
|
|
|
|
return "https://openrouter.ai/api/v1"
|
|
|
|
|
if normalized in {"dashscope", "aliyun", "qwen", "aliyun-qwen"}:
|
|
|
|
|
return "https://dashscope.aliyuncs.com/compatible-mode/v1"
|
|
|
|
|
if normalized == "deepseek":
|
|
|
|
|
return "https://api.deepseek.com/v1"
|
|
|
|
|
if normalized in {"xunfei", "iflytek", "xfyun"}:
|
|
|
|
|
return "https://spark-api-open.xf-yun.com/v1"
|
|
|
|
|
if normalized in {"kimi", "moonshot"}:
|
|
|
|
|
return "https://api.moonshot.cn/v1"
|
|
|
|
|
if normalized == "minimax":
|
|
|
|
|
return "https://api.minimax.chat/v1"
|
|
|
|
|
return ""
|
2026-03-01 16:26:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
class BotConfigManager:
|
|
|
|
|
def __init__(self, host_data_root: str):
|
|
|
|
|
self.host_data_root = host_data_root
|
|
|
|
|
|
|
|
|
|
def update_workspace(self, bot_id: str, bot_data: Dict[str, Any], channels: List[Dict[str, Any]]):
|
|
|
|
|
"""Generate/update nanobot workspace files and config.json."""
|
|
|
|
|
bot_dir = os.path.join(self.host_data_root, bot_id)
|
|
|
|
|
dot_nanobot_dir = os.path.join(bot_dir, ".nanobot")
|
|
|
|
|
workspace_dir = os.path.join(dot_nanobot_dir, "workspace")
|
|
|
|
|
memory_dir = os.path.join(workspace_dir, "memory")
|
|
|
|
|
skills_dir = os.path.join(workspace_dir, "skills")
|
|
|
|
|
|
|
|
|
|
for d in [dot_nanobot_dir, workspace_dir, memory_dir, skills_dir]:
|
|
|
|
|
os.makedirs(d, exist_ok=True)
|
|
|
|
|
|
2026-03-31 04:31:47 +00:00
|
|
|
template_defaults = get_agent_md_templates()
|
|
|
|
|
existing_config: Dict[str, Any] = {}
|
|
|
|
|
config_path = os.path.join(dot_nanobot_dir, "config.json")
|
|
|
|
|
if os.path.isfile(config_path):
|
|
|
|
|
try:
|
|
|
|
|
with open(config_path, "r", encoding="utf-8") as f:
|
|
|
|
|
loaded = json.load(f)
|
|
|
|
|
if isinstance(loaded, dict):
|
|
|
|
|
existing_config = loaded
|
|
|
|
|
except Exception:
|
|
|
|
|
existing_config = {}
|
|
|
|
|
|
|
|
|
|
existing_provider_name = ""
|
|
|
|
|
existing_provider_cfg: Dict[str, Any] = {}
|
|
|
|
|
existing_model_name = ""
|
|
|
|
|
providers_cfg = existing_config.get("providers")
|
|
|
|
|
if isinstance(providers_cfg, dict):
|
|
|
|
|
for provider_name, provider_cfg in providers_cfg.items():
|
|
|
|
|
existing_provider_name = str(provider_name or "").strip().lower()
|
|
|
|
|
if isinstance(provider_cfg, dict):
|
|
|
|
|
existing_provider_cfg = provider_cfg
|
|
|
|
|
break
|
|
|
|
|
agents_cfg = existing_config.get("agents")
|
|
|
|
|
if isinstance(agents_cfg, dict):
|
|
|
|
|
defaults_cfg = agents_cfg.get("defaults")
|
|
|
|
|
if isinstance(defaults_cfg, dict):
|
|
|
|
|
existing_model_name = str(defaults_cfg.get("model") or "").strip()
|
|
|
|
|
|
|
|
|
|
raw_provider_name = (bot_data.get("llm_provider") or existing_provider_name).strip().lower()
|
2026-03-15 07:14:01 +00:00
|
|
|
provider_name = raw_provider_name
|
2026-03-31 04:31:47 +00:00
|
|
|
model_name = (bot_data.get("llm_model") or existing_model_name).strip()
|
|
|
|
|
api_key = (bot_data.get("api_key") or existing_provider_cfg.get("apiKey") or "").strip()
|
|
|
|
|
api_base = (bot_data.get("api_base") or existing_provider_cfg.get("apiBase") or "").strip() or None
|
2026-03-01 16:26:03 +00:00
|
|
|
|
|
|
|
|
provider_alias = {
|
|
|
|
|
"aliyun": "dashscope",
|
|
|
|
|
"qwen": "dashscope",
|
|
|
|
|
"aliyun-qwen": "dashscope",
|
|
|
|
|
"moonshot": "kimi",
|
2026-03-15 07:14:01 +00:00
|
|
|
# Xunfei Spark provides OpenAI-compatible endpoint.
|
|
|
|
|
"xunfei": "openai",
|
|
|
|
|
"iflytek": "openai",
|
|
|
|
|
"xfyun": "openai",
|
2026-03-26 18:09:25 +00:00
|
|
|
"vllm": "openai",
|
2026-03-01 16:26:03 +00:00
|
|
|
}
|
|
|
|
|
provider_name = provider_alias.get(provider_name, provider_name)
|
2026-03-15 07:14:01 +00:00
|
|
|
if provider_name == "openai" and raw_provider_name in {"xunfei", "iflytek", "xfyun"}:
|
|
|
|
|
if model_name and "/" not in model_name:
|
|
|
|
|
model_name = f"openai/{model_name}"
|
2026-03-31 04:31:47 +00:00
|
|
|
if not api_base:
|
|
|
|
|
api_base = _provider_default_api_base(raw_provider_name) or _provider_default_api_base(provider_name) or None
|
2026-03-01 16:26:03 +00:00
|
|
|
|
|
|
|
|
provider_cfg: Dict[str, Any] = {
|
|
|
|
|
"apiKey": api_key,
|
|
|
|
|
}
|
2026-03-26 18:09:25 +00:00
|
|
|
if raw_provider_name in {"xunfei", "iflytek", "xfyun", "vllm"}:
|
|
|
|
|
provider_cfg["dashboardProviderAlias"] = raw_provider_name
|
2026-03-01 16:26:03 +00:00
|
|
|
if api_base:
|
|
|
|
|
provider_cfg["apiBase"] = api_base
|
|
|
|
|
|
|
|
|
|
channels_cfg: Dict[str, Any] = {
|
|
|
|
|
"sendProgress": bool(bot_data.get("send_progress", False)),
|
|
|
|
|
"sendToolHints": bool(bot_data.get("send_tool_hints", False)),
|
|
|
|
|
}
|
|
|
|
|
|
2026-03-11 12:55:42 +00:00
|
|
|
existing_tools = existing_config.get("tools")
|
|
|
|
|
tools_cfg: Dict[str, Any] = dict(existing_tools) if isinstance(existing_tools, dict) else {}
|
|
|
|
|
if "mcp_servers" in bot_data:
|
|
|
|
|
mcp_servers = bot_data.get("mcp_servers")
|
|
|
|
|
if isinstance(mcp_servers, dict):
|
|
|
|
|
tools_cfg["mcpServers"] = mcp_servers
|
|
|
|
|
|
2026-03-01 16:26:03 +00:00
|
|
|
config_data: Dict[str, Any] = {
|
|
|
|
|
"agents": {
|
|
|
|
|
"defaults": {
|
|
|
|
|
"model": model_name,
|
|
|
|
|
"temperature": float(bot_data.get("temperature") or 0.2),
|
|
|
|
|
"topP": float(bot_data.get("top_p") or 1.0),
|
|
|
|
|
"maxTokens": int(bot_data.get("max_tokens") or 8192),
|
|
|
|
|
}
|
|
|
|
|
},
|
2026-03-31 04:31:47 +00:00
|
|
|
"providers": {provider_name: provider_cfg} if provider_name else {},
|
2026-03-01 16:26:03 +00:00
|
|
|
"channels": channels_cfg,
|
|
|
|
|
}
|
2026-03-11 12:55:42 +00:00
|
|
|
if tools_cfg:
|
|
|
|
|
config_data["tools"] = tools_cfg
|
2026-03-01 16:26:03 +00:00
|
|
|
|
2026-03-17 19:52:50 +00:00
|
|
|
existing_channels = existing_config.get("channels")
|
|
|
|
|
existing_dashboard_cfg = (
|
|
|
|
|
existing_channels.get("dashboard")
|
|
|
|
|
if isinstance(existing_channels, dict) and isinstance(existing_channels.get("dashboard"), dict)
|
|
|
|
|
else {}
|
|
|
|
|
)
|
|
|
|
|
dashboard_cfg: Dict[str, Any] = {
|
|
|
|
|
"enabled": True,
|
|
|
|
|
"host": "0.0.0.0",
|
|
|
|
|
"port": 9000,
|
|
|
|
|
"allowFrom": ["*"],
|
|
|
|
|
}
|
|
|
|
|
for key in ("host", "port", "allowFrom"):
|
|
|
|
|
if key in existing_dashboard_cfg:
|
|
|
|
|
dashboard_cfg[key] = existing_dashboard_cfg[key]
|
|
|
|
|
channels_cfg["dashboard"] = dashboard_cfg
|
|
|
|
|
|
2026-03-01 16:26:03 +00:00
|
|
|
for channel in channels:
|
|
|
|
|
channel_type = (channel.get("channel_type") or "").strip()
|
|
|
|
|
if not channel_type:
|
|
|
|
|
continue
|
|
|
|
|
raw_extra = channel.get("extra_config")
|
|
|
|
|
extra: Dict[str, Any] = {}
|
|
|
|
|
if isinstance(raw_extra, str) and raw_extra.strip():
|
|
|
|
|
try:
|
|
|
|
|
parsed = json.loads(raw_extra)
|
|
|
|
|
if isinstance(parsed, dict):
|
|
|
|
|
extra = parsed
|
|
|
|
|
except Exception:
|
|
|
|
|
extra = {}
|
|
|
|
|
elif isinstance(raw_extra, dict):
|
|
|
|
|
extra = raw_extra
|
|
|
|
|
|
|
|
|
|
# Dashboard channel is deprecated in DB routing. Global flags now come from bot fields.
|
|
|
|
|
if channel_type == "dashboard":
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
enabled = bool(channel.get("is_active", True))
|
|
|
|
|
external = channel.get("external_app_id", "") or ""
|
|
|
|
|
secret = channel.get("app_secret", "") or ""
|
|
|
|
|
|
|
|
|
|
if channel_type == "telegram":
|
|
|
|
|
channels_cfg["telegram"] = {
|
|
|
|
|
"enabled": enabled,
|
|
|
|
|
"token": secret,
|
|
|
|
|
"proxy": extra.get("proxy", ""),
|
|
|
|
|
"replyToMessage": bool(extra.get("replyToMessage", False)),
|
2026-03-10 05:47:28 +00:00
|
|
|
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
|
2026-03-01 16:26:03 +00:00
|
|
|
}
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
if channel_type == "feishu":
|
|
|
|
|
channels_cfg["feishu"] = {
|
|
|
|
|
"enabled": enabled,
|
|
|
|
|
"appId": external,
|
|
|
|
|
"appSecret": secret,
|
|
|
|
|
"encryptKey": extra.get("encryptKey", ""),
|
|
|
|
|
"verificationToken": extra.get("verificationToken", ""),
|
2026-03-10 05:47:28 +00:00
|
|
|
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
|
2026-03-01 16:26:03 +00:00
|
|
|
}
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
if channel_type == "dingtalk":
|
|
|
|
|
channels_cfg["dingtalk"] = {
|
|
|
|
|
"enabled": enabled,
|
|
|
|
|
"clientId": external,
|
|
|
|
|
"clientSecret": secret,
|
2026-03-10 05:47:28 +00:00
|
|
|
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
|
2026-03-01 16:26:03 +00:00
|
|
|
}
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
if channel_type == "slack":
|
|
|
|
|
channels_cfg["slack"] = {
|
|
|
|
|
"enabled": enabled,
|
|
|
|
|
"mode": extra.get("mode", "socket"),
|
|
|
|
|
"botToken": external,
|
|
|
|
|
"appToken": secret,
|
|
|
|
|
"replyInThread": bool(extra.get("replyInThread", True)),
|
|
|
|
|
"groupPolicy": extra.get("groupPolicy", "mention"),
|
|
|
|
|
"groupAllowFrom": extra.get("groupAllowFrom", []),
|
|
|
|
|
"reactEmoji": extra.get("reactEmoji", "eyes"),
|
|
|
|
|
}
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
if channel_type == "qq":
|
|
|
|
|
channels_cfg["qq"] = {
|
|
|
|
|
"enabled": enabled,
|
|
|
|
|
"appId": external,
|
|
|
|
|
"secret": secret,
|
2026-03-10 05:47:28 +00:00
|
|
|
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
|
2026-03-01 16:26:03 +00:00
|
|
|
}
|
|
|
|
|
continue
|
|
|
|
|
|
2026-03-31 04:31:47 +00:00
|
|
|
if channel_type == "weixin":
|
|
|
|
|
weixin_cfg: Dict[str, Any] = {
|
|
|
|
|
"enabled": enabled,
|
|
|
|
|
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
|
|
|
|
|
}
|
|
|
|
|
route_tag = str(extra.get("routeTag") or "").strip()
|
|
|
|
|
if route_tag:
|
|
|
|
|
weixin_cfg["routeTag"] = route_tag
|
|
|
|
|
state_dir = str(extra.get("stateDir") or "").strip()
|
|
|
|
|
if state_dir:
|
|
|
|
|
weixin_cfg["stateDir"] = state_dir
|
|
|
|
|
base_url = str(extra.get("baseUrl") or "").strip()
|
|
|
|
|
if base_url:
|
|
|
|
|
weixin_cfg["baseUrl"] = base_url
|
|
|
|
|
cdn_base_url = str(extra.get("cdnBaseUrl") or "").strip()
|
|
|
|
|
if cdn_base_url:
|
|
|
|
|
weixin_cfg["cdnBaseUrl"] = cdn_base_url
|
|
|
|
|
poll_timeout = extra.get("pollTimeout", extra.get("poll_timeout"))
|
|
|
|
|
if poll_timeout not in {None, ""}:
|
|
|
|
|
try:
|
|
|
|
|
weixin_cfg["pollTimeout"] = max(1, int(poll_timeout))
|
|
|
|
|
except (TypeError, ValueError):
|
|
|
|
|
pass
|
|
|
|
|
channels_cfg["weixin"] = weixin_cfg
|
|
|
|
|
continue
|
|
|
|
|
|
2026-03-14 07:44:11 +00:00
|
|
|
if channel_type == "email":
|
|
|
|
|
channels_cfg["email"] = {
|
|
|
|
|
"enabled": enabled,
|
|
|
|
|
"consentGranted": bool(extra.get("consentGranted", False)),
|
|
|
|
|
"imapHost": extra.get("imapHost", ""),
|
|
|
|
|
"imapPort": max(1, min(int(extra.get("imapPort", 993) or 993), 65535)),
|
|
|
|
|
"imapUsername": extra.get("imapUsername", ""),
|
|
|
|
|
"imapPassword": extra.get("imapPassword", ""),
|
|
|
|
|
"imapMailbox": extra.get("imapMailbox", "INBOX"),
|
|
|
|
|
"imapUseSsl": bool(extra.get("imapUseSsl", True)),
|
|
|
|
|
"smtpHost": extra.get("smtpHost", ""),
|
|
|
|
|
"smtpPort": max(1, min(int(extra.get("smtpPort", 587) or 587), 65535)),
|
|
|
|
|
"smtpUsername": extra.get("smtpUsername", ""),
|
|
|
|
|
"smtpPassword": extra.get("smtpPassword", ""),
|
|
|
|
|
"smtpUseTls": bool(extra.get("smtpUseTls", True)),
|
|
|
|
|
"smtpUseSsl": bool(extra.get("smtpUseSsl", False)),
|
|
|
|
|
"fromAddress": extra.get("fromAddress", ""),
|
|
|
|
|
"autoReplyEnabled": bool(extra.get("autoReplyEnabled", True)),
|
|
|
|
|
"pollIntervalSeconds": max(5, int(extra.get("pollIntervalSeconds", 30) or 30)),
|
|
|
|
|
"markSeen": bool(extra.get("markSeen", True)),
|
|
|
|
|
"maxBodyChars": max(1, int(extra.get("maxBodyChars", 12000) or 12000)),
|
|
|
|
|
"subjectPrefix": extra.get("subjectPrefix", "Re: "),
|
|
|
|
|
"allowFrom": self._normalize_allow_from(extra.get("allowFrom", [])),
|
|
|
|
|
}
|
|
|
|
|
continue
|
|
|
|
|
|
2026-03-01 16:26:03 +00:00
|
|
|
# Fallback for future custom channels.
|
|
|
|
|
channels_cfg[channel_type] = {
|
|
|
|
|
"enabled": enabled,
|
|
|
|
|
"appId": external,
|
|
|
|
|
"appSecret": secret,
|
|
|
|
|
**extra,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
with open(config_path, "w", encoding="utf-8") as f:
|
|
|
|
|
json.dump(config_data, f, indent=4, ensure_ascii=False)
|
|
|
|
|
|
|
|
|
|
bootstrap_files = {
|
2026-03-31 04:31:47 +00:00
|
|
|
"AGENTS.md": bot_data.get("agents_md") or template_defaults.get("agents_md", ""),
|
|
|
|
|
"SOUL.md": bot_data.get("soul_md") or bot_data.get("system_prompt") or template_defaults.get("soul_md", ""),
|
|
|
|
|
"USER.md": bot_data.get("user_md") or template_defaults.get("user_md", ""),
|
|
|
|
|
"TOOLS.md": bot_data.get("tools_md") or template_defaults.get("tools_md", ""),
|
|
|
|
|
"IDENTITY.md": bot_data.get("identity_md") or template_defaults.get("identity_md", ""),
|
2026-03-01 16:26:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for filename, content in bootstrap_files.items():
|
|
|
|
|
file_path = os.path.join(workspace_dir, filename)
|
|
|
|
|
with open(file_path, "w", encoding="utf-8") as f:
|
|
|
|
|
f.write(str(content).strip() + "\n")
|
|
|
|
|
|
|
|
|
|
return dot_nanobot_dir
|
2026-03-10 05:47:28 +00:00
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def _normalize_allow_from(raw: Any) -> List[str]:
|
|
|
|
|
rows: List[str] = []
|
|
|
|
|
if isinstance(raw, list):
|
|
|
|
|
for item in raw:
|
|
|
|
|
text = str(item or "").strip()
|
|
|
|
|
if text and text not in rows:
|
|
|
|
|
rows.append(text)
|
|
|
|
|
if not rows:
|
|
|
|
|
return ["*"]
|
|
|
|
|
return rows
|