v0.1.4-p5
parent
9ad37d0aa6
commit
4d7aa42a18
|
|
@ -35,9 +35,9 @@ POSTGRES_BOOTSTRAP_DB=postgres
|
|||
|
||||
# Dashboard application database account.
|
||||
# deploy-full.sh will call scripts/init-full-db.sh to create/update these idempotently.
|
||||
POSTGRES_APP_DB=dashboard
|
||||
POSTGRES_APP_USER=dashboard
|
||||
POSTGRES_APP_PASSWORD=change_me_dashboard_password
|
||||
POSTGRES_APP_DB=nanobot
|
||||
POSTGRES_APP_USER=nanobot
|
||||
POSTGRES_APP_PASSWORD=change_me_nanobot_password
|
||||
DATABASE_POOL_SIZE=20
|
||||
DATABASE_MAX_OVERFLOW=40
|
||||
DATABASE_POOL_TIMEOUT=30
|
||||
|
|
|
|||
|
|
@ -1,7 +0,0 @@
|
|||
{
|
||||
"agents_md": "- 优先完成任务目标\n- 操作前先说明意图\n- 输出必须可执行\n\n## 默认输出规范\n\n- 每次执行任务时,在 workspace 中创建新目录保存本次输出。\n- 输出内容默认采用 Markdown(.md)格式。\n- 最终报告需求.md和.htm双格式。",
|
||||
"soul_md": "你是专业的企业数字员工,表达清晰、可执行。",
|
||||
"user_md": "- 语言: 中文\n- 风格: 专业\n- 偏好: 简明且有步骤",
|
||||
"tools_md": "- 谨慎使用 shell\n- 修改文件后复核\n- 失败时说明原因并重试策略",
|
||||
"identity_md": "- 角色: 企业数字员工\n- 领域: 运维与任务执行"
|
||||
}
|
||||
|
|
@ -1,134 +0,0 @@
|
|||
{
|
||||
"presets": [
|
||||
{
|
||||
"id": "politics",
|
||||
"topic_key": "politics_news",
|
||||
"name": "时政新闻",
|
||||
"description": "沉淀国内外时政动态、政策发布与重大公共治理事件,便于集中查看。",
|
||||
"routing_purpose": "收录与政府决策、政策法规、外交事务及公共治理相关的关键信息。",
|
||||
"routing_include_when": [
|
||||
"时政",
|
||||
"政策",
|
||||
"法规",
|
||||
"国务院",
|
||||
"政府",
|
||||
"部委",
|
||||
"人大",
|
||||
"政协",
|
||||
"外交",
|
||||
"国际关系",
|
||||
"白宫",
|
||||
"总统",
|
||||
"议会",
|
||||
"election",
|
||||
"policy"
|
||||
],
|
||||
"routing_exclude_when": [
|
||||
"娱乐",
|
||||
"明星",
|
||||
"综艺",
|
||||
"体育",
|
||||
"游戏",
|
||||
"购物",
|
||||
"种草",
|
||||
"广告"
|
||||
],
|
||||
"routing_examples_positive": [
|
||||
"国务院发布新一轮宏观政策措施。",
|
||||
"外交部就国际热点事件发布声明。",
|
||||
"某国总统宣布新的对外政策方向。"
|
||||
],
|
||||
"routing_examples_negative": [
|
||||
"某明星新剧开播引发热议。",
|
||||
"某球队转会新闻与赛果分析。",
|
||||
"数码产品促销与购物推荐汇总。"
|
||||
],
|
||||
"routing_priority": 85
|
||||
},
|
||||
{
|
||||
"id": "finance",
|
||||
"topic_key": "finance_market",
|
||||
"name": "财经信息",
|
||||
"description": "聚合宏观经济、市场波动、公司财报与监管政策等财经信息。",
|
||||
"routing_purpose": "沉淀与资本市场、行业景气、资产价格相关的关键结论与风险提示。",
|
||||
"routing_include_when": [
|
||||
"财经",
|
||||
"金融",
|
||||
"股市",
|
||||
"A股",
|
||||
"港股",
|
||||
"美股",
|
||||
"债券",
|
||||
"汇率",
|
||||
"利率",
|
||||
"通胀",
|
||||
"GDP",
|
||||
"财报",
|
||||
"央行",
|
||||
"market",
|
||||
"earnings"
|
||||
],
|
||||
"routing_exclude_when": [
|
||||
"娱乐",
|
||||
"体育",
|
||||
"游戏",
|
||||
"影视",
|
||||
"八卦",
|
||||
"生活方式",
|
||||
"旅行攻略"
|
||||
],
|
||||
"routing_examples_positive": [
|
||||
"央行公布最新利率决议并释放政策信号。",
|
||||
"上市公司发布季度财报并上调全年指引。",
|
||||
"美元指数走强导致主要货币普遍承压。"
|
||||
],
|
||||
"routing_examples_negative": [
|
||||
"某综艺节目收视排名变化。",
|
||||
"某球员转会传闻引发讨论。",
|
||||
"新游上线玩法测评。"
|
||||
],
|
||||
"routing_priority": 80
|
||||
},
|
||||
{
|
||||
"id": "tech",
|
||||
"topic_key": "tech_updates",
|
||||
"name": "技术资讯",
|
||||
"description": "追踪 AI、云计算、开源社区与开发工具链的最新技术资讯。",
|
||||
"routing_purpose": "沉淀技术发布、版本升级、兼容性变更与工程实践建议。",
|
||||
"routing_include_when": [
|
||||
"技术",
|
||||
"开源",
|
||||
"AI",
|
||||
"模型",
|
||||
"大语言模型",
|
||||
"MCP",
|
||||
"API",
|
||||
"SDK",
|
||||
"发布",
|
||||
"版本",
|
||||
"升级",
|
||||
"breaking change",
|
||||
"security advisory"
|
||||
],
|
||||
"routing_exclude_when": [
|
||||
"娱乐",
|
||||
"体育",
|
||||
"美食",
|
||||
"旅游",
|
||||
"情感",
|
||||
"八卦"
|
||||
],
|
||||
"routing_examples_positive": [
|
||||
"某主流框架发布新版本并调整默认配置。",
|
||||
"开源项目披露高危安全漏洞并给出修复方案。",
|
||||
"AI 模型服务更新 API,返回结构发生变化。"
|
||||
],
|
||||
"routing_examples_negative": [
|
||||
"某艺人参加活动造型盘点。",
|
||||
"旅游目的地打卡攻略合集。",
|
||||
"比赛结果预测与竞猜。"
|
||||
],
|
||||
"routing_priority": 75
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -1,308 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import ast
|
||||
import builtins
|
||||
import dis
|
||||
import importlib
|
||||
import inspect
|
||||
import pathlib
|
||||
import pkgutil
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from typing import Iterable, List, Sequence
|
||||
|
||||
|
||||
PROJECT_ROOT = pathlib.Path(__file__).resolve().parents[1]
|
||||
BACKEND_ROOT = PROJECT_ROOT / "backend"
|
||||
|
||||
ROUTER_MAX_LINES = 400
|
||||
SERVICE_MAX_LINES = 500
|
||||
|
||||
|
||||
@dataclass
|
||||
class Finding:
|
||||
severity: str
|
||||
code: str
|
||||
path: str
|
||||
detail: str
|
||||
|
||||
|
||||
def _iter_backend_py_files() -> Iterable[pathlib.Path]:
|
||||
for path in sorted(BACKEND_ROOT.rglob("*.py")):
|
||||
if "venv" in path.parts:
|
||||
continue
|
||||
yield path
|
||||
|
||||
|
||||
def _iter_backend_pyc_files() -> Iterable[pathlib.Path]:
|
||||
for path in sorted(BACKEND_ROOT.rglob("*.pyc")):
|
||||
if "venv" in path.parts:
|
||||
continue
|
||||
yield path
|
||||
|
||||
|
||||
def _module_name_from_path(path: pathlib.Path) -> str:
|
||||
rel = path.relative_to(BACKEND_ROOT).with_suffix("")
|
||||
return ".".join(rel.parts)
|
||||
|
||||
|
||||
def _parse_ast(path: pathlib.Path) -> ast.AST | None:
|
||||
try:
|
||||
return ast.parse(path.read_text(encoding="utf-8"), filename=str(path))
|
||||
except Exception as exc:
|
||||
return ast.parse("", filename=f"{path} [parse failed: {exc}]")
|
||||
|
||||
|
||||
def _check_import_all(paths: Sequence[pathlib.Path]) -> List[Finding]:
|
||||
findings: List[Finding] = []
|
||||
for path in paths:
|
||||
tree = _parse_ast(path)
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.ImportFrom) and any(alias.name == "*" for alias in node.names):
|
||||
findings.append(
|
||||
Finding(
|
||||
severity="ERROR",
|
||||
code="import-star",
|
||||
path=str(path.relative_to(PROJECT_ROOT)),
|
||||
detail=f"wildcard import from '{node.module or ''}'",
|
||||
)
|
||||
)
|
||||
return findings
|
||||
|
||||
|
||||
def _check_settings_imports(paths: Sequence[pathlib.Path]) -> List[Finding]:
|
||||
findings: List[Finding] = []
|
||||
sys.path.insert(0, str(BACKEND_ROOT))
|
||||
settings = importlib.import_module("core.settings")
|
||||
available = set(dir(settings))
|
||||
for path in paths:
|
||||
tree = _parse_ast(path)
|
||||
for node in ast.walk(tree):
|
||||
if not isinstance(node, ast.ImportFrom) or node.module != "core.settings":
|
||||
continue
|
||||
for alias in node.names:
|
||||
if alias.name == "*":
|
||||
continue
|
||||
if alias.name not in available:
|
||||
findings.append(
|
||||
Finding(
|
||||
severity="ERROR",
|
||||
code="settings-missing-symbol",
|
||||
path=str(path.relative_to(PROJECT_ROOT)),
|
||||
detail=f"imports missing symbol '{alias.name}' from core.settings",
|
||||
)
|
||||
)
|
||||
return findings
|
||||
|
||||
|
||||
def _check_importability(paths: Sequence[pathlib.Path]) -> List[Finding]:
|
||||
findings: List[Finding] = []
|
||||
sys.path.insert(0, str(BACKEND_ROOT))
|
||||
for path in paths:
|
||||
if path.name == "__init__.py":
|
||||
continue
|
||||
module_name = _module_name_from_path(path)
|
||||
try:
|
||||
importlib.import_module(module_name)
|
||||
except Exception as exc:
|
||||
findings.append(
|
||||
Finding(
|
||||
severity="ERROR",
|
||||
code="module-import-failed",
|
||||
path=str(path.relative_to(PROJECT_ROOT)),
|
||||
detail=f"{module_name}: {exc.__class__.__name__}: {exc}",
|
||||
)
|
||||
)
|
||||
return findings
|
||||
|
||||
|
||||
def _check_runtime_global_refs(paths: Sequence[pathlib.Path]) -> List[Finding]:
|
||||
findings: List[Finding] = []
|
||||
sys.path.insert(0, str(BACKEND_ROOT))
|
||||
builtin_names = set(dir(builtins))
|
||||
|
||||
module_names = {
|
||||
_module_name_from_path(path)
|
||||
for path in paths
|
||||
if path.name != "__init__.py"
|
||||
}
|
||||
|
||||
for module_name in sorted(module_names):
|
||||
try:
|
||||
module = importlib.import_module(module_name)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
module_globals = module.__dict__
|
||||
|
||||
def _check_function(obj: object, qualname: str) -> None:
|
||||
if module_name.startswith("models.") and qualname.endswith(".__init__"):
|
||||
return
|
||||
try:
|
||||
instructions = list(dis.get_instructions(obj))
|
||||
except TypeError:
|
||||
return
|
||||
for ins in instructions:
|
||||
if ins.opname not in {"LOAD_GLOBAL", "LOAD_NAME"}:
|
||||
continue
|
||||
ref_name = str(ins.argval or "").strip()
|
||||
if not ref_name or ref_name in builtin_names or ref_name in module_globals:
|
||||
continue
|
||||
findings.append(
|
||||
Finding(
|
||||
severity="ERROR",
|
||||
code="runtime-missing-global",
|
||||
path=module_name.replace(".", "/") + ".py",
|
||||
detail=f"{qualname} uses missing global '{ref_name}' ({ins.opname})",
|
||||
)
|
||||
)
|
||||
|
||||
for _attr_name, obj in vars(module).items():
|
||||
if inspect.isfunction(obj) and obj.__module__ == module.__name__:
|
||||
_check_function(obj, obj.__qualname__)
|
||||
elif inspect.isclass(obj) and obj.__module__ == module.__name__:
|
||||
for _method_name, method in vars(obj).items():
|
||||
if inspect.isfunction(method):
|
||||
_check_function(method, f"{obj.__qualname__}.{method.__name__}")
|
||||
|
||||
deduped: dict[tuple[str, str, str], Finding] = {}
|
||||
for finding in findings:
|
||||
key = (finding.code, finding.path, finding.detail)
|
||||
deduped[key] = finding
|
||||
return list(deduped.values())
|
||||
|
||||
|
||||
def _check_app_factory() -> List[Finding]:
|
||||
findings: List[Finding] = []
|
||||
sys.path.insert(0, str(BACKEND_ROOT))
|
||||
try:
|
||||
from app_factory import create_app
|
||||
except Exception as exc:
|
||||
return [
|
||||
Finding(
|
||||
severity="ERROR",
|
||||
code="app-factory-import-failed",
|
||||
path="backend/app_factory.py",
|
||||
detail=f"{exc.__class__.__name__}: {exc}",
|
||||
)
|
||||
]
|
||||
|
||||
try:
|
||||
app = create_app()
|
||||
except Exception as exc:
|
||||
return [
|
||||
Finding(
|
||||
severity="ERROR",
|
||||
code="app-factory-create-failed",
|
||||
path="backend/app_factory.py",
|
||||
detail=f"{exc.__class__.__name__}: {exc}",
|
||||
)
|
||||
]
|
||||
|
||||
route_keys: set[tuple[str, str]] = set()
|
||||
duplicates: set[tuple[str, str]] = set()
|
||||
for route in getattr(app, "routes", []):
|
||||
path = str(getattr(route, "path", "") or "").strip()
|
||||
methods = sorted(getattr(route, "methods", []) or [])
|
||||
for method in methods:
|
||||
if method in {"HEAD", "OPTIONS"}:
|
||||
continue
|
||||
key = (method, path)
|
||||
if key in route_keys:
|
||||
duplicates.add(key)
|
||||
else:
|
||||
route_keys.add(key)
|
||||
|
||||
for method, path in sorted(duplicates):
|
||||
findings.append(
|
||||
Finding(
|
||||
severity="ERROR",
|
||||
code="duplicate-route",
|
||||
path="backend/app_factory.py",
|
||||
detail=f"duplicate route registered for {method} {path}",
|
||||
)
|
||||
)
|
||||
return findings
|
||||
|
||||
|
||||
def _source_path_for_pyc(path: pathlib.Path) -> pathlib.Path | None:
|
||||
if path.name == "__init__.cpython-312.pyc":
|
||||
return path.parent.parent / "__init__.py"
|
||||
if path.parent.name != "__pycache__":
|
||||
return None
|
||||
stem = path.name.split(".cpython-", 1)[0]
|
||||
return path.parent.parent / f"{stem}.py"
|
||||
|
||||
|
||||
def _check_pyc_without_source(pyc_paths: Sequence[pathlib.Path]) -> List[Finding]:
|
||||
findings: List[Finding] = []
|
||||
for path in pyc_paths:
|
||||
source = _source_path_for_pyc(path)
|
||||
if source is None:
|
||||
continue
|
||||
if source.exists():
|
||||
continue
|
||||
findings.append(
|
||||
Finding(
|
||||
severity="ERROR",
|
||||
code="pyc-without-source",
|
||||
path=str(path.relative_to(PROJECT_ROOT)),
|
||||
detail=f"compiled module has no source file at {source.relative_to(PROJECT_ROOT)}",
|
||||
)
|
||||
)
|
||||
return findings
|
||||
|
||||
|
||||
def _check_file_sizes(paths: Sequence[pathlib.Path]) -> List[Finding]:
|
||||
findings: List[Finding] = []
|
||||
for path in paths:
|
||||
rel = path.relative_to(BACKEND_ROOT)
|
||||
line_count = sum(1 for _ in path.open("r", encoding="utf-8"))
|
||||
if rel.parts[:1] == ("api",) and line_count > ROUTER_MAX_LINES:
|
||||
findings.append(
|
||||
Finding(
|
||||
severity="WARN",
|
||||
code="router-too-large",
|
||||
path=str(path.relative_to(PROJECT_ROOT)),
|
||||
detail=f"{line_count} lines exceeds router limit {ROUTER_MAX_LINES}",
|
||||
)
|
||||
)
|
||||
if rel.parts[:1] == ("services",) and line_count > SERVICE_MAX_LINES:
|
||||
findings.append(
|
||||
Finding(
|
||||
severity="WARN",
|
||||
code="service-too-large",
|
||||
path=str(path.relative_to(PROJECT_ROOT)),
|
||||
detail=f"{line_count} lines exceeds service limit {SERVICE_MAX_LINES}",
|
||||
)
|
||||
)
|
||||
return findings
|
||||
|
||||
|
||||
def _print_findings(findings: Sequence[Finding]) -> None:
|
||||
if not findings:
|
||||
print("PASS backend integrity audit")
|
||||
return
|
||||
for finding in findings:
|
||||
print(f"[{finding.severity}] {finding.code} {finding.path} :: {finding.detail}")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
py_files = list(_iter_backend_py_files())
|
||||
pyc_files = list(_iter_backend_pyc_files())
|
||||
findings: List[Finding] = []
|
||||
findings.extend(_check_import_all(py_files))
|
||||
findings.extend(_check_settings_imports(py_files))
|
||||
findings.extend(_check_importability(py_files))
|
||||
findings.extend(_check_runtime_global_refs(py_files))
|
||||
findings.extend(_check_app_factory())
|
||||
findings.extend(_check_pyc_without_source(pyc_files))
|
||||
findings.extend(_check_file_sizes(py_files))
|
||||
findings.sort(key=lambda item: (item.severity != "ERROR", item.code, item.path))
|
||||
_print_findings(findings)
|
||||
return 1 if any(item.severity == "ERROR" for item in findings) else 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
Loading…
Reference in New Issue