dashboard-nanobot/backend/services/default_assets_service.py

207 lines
7.2 KiB
Python

from __future__ import annotations
import json
import os
import re
import shutil
import zipfile
from pathlib import Path
from typing import Any, Dict, List
from sqlmodel import Session, select
from core.settings import (
AGENT_MD_TEMPLATES_FILE,
BUNDLED_AGENT_MD_TEMPLATES_FILE,
BUNDLED_SKILLS_ROOT,
BUNDLED_TOPIC_PRESETS_TEMPLATES_FILE,
DATA_ROOT,
RUNTIME_MODEL_ROOT,
RUNTIME_SKILLS_ROOT,
RUNTIME_TEMPLATES_ROOT,
TOPIC_PRESETS_TEMPLATES_FILE,
)
from core.utils import (
_is_ignored_skill_zip_top_level,
_is_valid_top_level_skill_name,
_read_description_from_text,
_sanitize_skill_market_key,
)
from models.skill import SkillMarketItem
def _copy_if_missing(src: Path, dst: Path) -> bool:
if not src.exists() or not src.is_file():
return False
if src.resolve() == dst.resolve() if dst.exists() else False:
return False
if dst.exists():
return False
dst.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(src, dst)
return True
def _copy_if_different(src: Path, dst: Path) -> bool:
if not src.exists() or not src.is_file():
return False
if src.resolve() == dst.resolve() if dst.exists() else False:
return False
dst.parent.mkdir(parents=True, exist_ok=True)
if dst.exists():
try:
if src.stat().st_size == dst.stat().st_size and src.read_bytes() == dst.read_bytes():
return False
except Exception:
pass
shutil.copy2(src, dst)
return True
def _iter_bundled_skill_packages() -> List[Path]:
if not BUNDLED_SKILLS_ROOT.exists() or not BUNDLED_SKILLS_ROOT.is_dir():
return []
return sorted(path for path in BUNDLED_SKILLS_ROOT.iterdir() if path.is_file() and path.suffix.lower() == ".zip")
def ensure_runtime_data_assets() -> Dict[str, int]:
Path(DATA_ROOT).mkdir(parents=True, exist_ok=True)
RUNTIME_TEMPLATES_ROOT.mkdir(parents=True, exist_ok=True)
RUNTIME_SKILLS_ROOT.mkdir(parents=True, exist_ok=True)
RUNTIME_MODEL_ROOT.mkdir(parents=True, exist_ok=True)
templates_initialized = 0
skills_synchronized = 0
if _copy_if_missing(BUNDLED_AGENT_MD_TEMPLATES_FILE, AGENT_MD_TEMPLATES_FILE):
templates_initialized += 1
if _copy_if_missing(BUNDLED_TOPIC_PRESETS_TEMPLATES_FILE, TOPIC_PRESETS_TEMPLATES_FILE):
templates_initialized += 1
for src in _iter_bundled_skill_packages():
if _copy_if_different(src, RUNTIME_SKILLS_ROOT / src.name):
skills_synchronized += 1
return {
"templates_initialized": templates_initialized,
"skills_synchronized": skills_synchronized,
}
def _extract_skill_zip_summary(zip_path: Path) -> Dict[str, Any]:
entry_names: List[str] = []
description = ""
with zipfile.ZipFile(zip_path) as archive:
members = archive.infolist()
file_members = [member for member in members if not member.is_dir()]
for member in file_members:
raw_name = str(member.filename or "").replace("\\", "/").lstrip("/")
if not raw_name:
continue
first = raw_name.split("/", 1)[0].strip()
if _is_ignored_skill_zip_top_level(first):
continue
if _is_valid_top_level_skill_name(first) and first not in entry_names:
entry_names.append(first)
candidates = sorted(
[
str(member.filename or "").replace("\\", "/").lstrip("/")
for member in file_members
if str(member.filename or "").replace("\\", "/").rsplit("/", 1)[-1].lower()
in {"skill.md", "readme.md"}
],
key=lambda value: (value.count("/"), value.lower()),
)
for candidate in candidates:
try:
with archive.open(candidate, "r") as file:
preview = file.read(4096).decode("utf-8", errors="ignore")
description = _read_description_from_text(preview)
if description:
break
except Exception:
continue
return {
"entry_names": entry_names,
"description": description,
}
def _default_display_name(stem: str) -> str:
chunks = [chunk for chunk in re.split(r"[-_]+", str(stem or "").strip()) if chunk]
if not chunks:
return "Skill"
return " ".join(chunk.upper() if chunk.isupper() else chunk.capitalize() for chunk in chunks)
def _resolve_unique_skill_key(existing_keys: set[str], preferred_key: str) -> str:
base_key = _sanitize_skill_market_key(preferred_key) or "skill"
candidate = base_key
counter = 2
while candidate in existing_keys:
candidate = f"{base_key}-{counter}"
counter += 1
existing_keys.add(candidate)
return candidate
def ensure_default_skill_market_items(session: Session) -> Dict[str, List[str]]:
report: Dict[str, List[str]] = {"created": [], "updated": []}
default_packages = _iter_bundled_skill_packages()
if not default_packages:
return report
rows = session.exec(select(SkillMarketItem)).all()
existing_by_zip = {str(row.zip_filename or "").strip(): row for row in rows if str(row.zip_filename or "").strip()}
existing_keys = {str(row.skill_key or "").strip() for row in rows if str(row.skill_key or "").strip()}
for bundled_path in default_packages:
runtime_path = RUNTIME_SKILLS_ROOT / bundled_path.name
source_path = runtime_path if runtime_path.exists() else bundled_path
try:
summary = _extract_skill_zip_summary(source_path)
except Exception:
continue
zip_filename = bundled_path.name
entry_names_json = json.dumps(summary["entry_names"], ensure_ascii=False)
display_name = _default_display_name((summary["entry_names"] or [bundled_path.stem])[0])
zip_size_bytes = int(source_path.stat().st_size) if source_path.exists() else 0
row = existing_by_zip.get(zip_filename)
if row is None:
row = SkillMarketItem(
skill_key=_resolve_unique_skill_key(existing_keys, bundled_path.stem),
display_name=display_name,
description=str(summary["description"] or "").strip(),
zip_filename=zip_filename,
zip_size_bytes=zip_size_bytes,
entry_names_json=entry_names_json,
)
session.add(row)
existing_by_zip[zip_filename] = row
report["created"].append(zip_filename)
continue
changed = False
if int(row.zip_size_bytes or 0) != zip_size_bytes:
row.zip_size_bytes = zip_size_bytes
changed = True
if str(row.entry_names_json or "") != entry_names_json:
row.entry_names_json = entry_names_json
changed = True
if not str(row.display_name or "").strip():
row.display_name = display_name
changed = True
if not str(row.description or "").strip() and str(summary["description"] or "").strip():
row.description = str(summary["description"] or "").strip()
changed = True
if changed:
report["updated"].append(zip_filename)
if report["created"] or report["updated"]:
session.commit()
return report