imeeting/frontend/src/pages/business/RealtimeAsrSession.tsx

861 lines
32 KiB
TypeScript
Raw Blame History

This file contains ambiguous Unicode characters!

This file contains ambiguous Unicode characters that may be confused with others in your current locale. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to highlight these characters.

import { useEffect, useMemo, useRef, useState } from "react";
import {
Alert,
Avatar,
Badge,
Button,
Card,
Col,
Empty,
Row,
Space,
Statistic,
Tag,
Typography,
message,
} from "antd";
import {
AudioOutlined,
ClockCircleOutlined,
PauseCircleOutlined,
PlayCircleOutlined,
SoundOutlined,
SyncOutlined,
UserOutlined,
} from "@ant-design/icons";
import { useNavigate, useParams } from "react-router-dom";
import dayjs from "dayjs";
import PageHeader from "../../components/shared/PageHeader";
import {
appendRealtimeTranscripts,
completeRealtimeMeeting,
getMeetingDetail,
getRealtimeMeetingSessionStatus,
getTranscripts,
openRealtimeMeetingSocketSession,
pauseRealtimeMeeting,
type MeetingTranscriptVO,
type MeetingVO,
type RealtimeMeetingSessionStatus,
type RealtimeTranscriptItemDTO,
type RealtimeSocketSessionVO,
} from "../../api/business/meeting";
const { Text, Title } = Typography;
const SAMPLE_RATE = 16000;
const CHUNK_SIZE = 1280;
type WsSpeaker = string | { name?: string; user_id?: string | number } | undefined;
type WsMessage = {
type?: string;
code?: number | string;
message?: string;
data?: {
text?: string;
is_final?: boolean;
start?: number;
end?: number;
speaker_id?: string;
speaker_name?: string;
user_id?: string | number | null;
};
text?: string;
is_final?: boolean;
speaker?: WsSpeaker;
timestamp?: number[][];
};
type TranscriptCard = {
id: string;
speakerName: string;
userId?: string | number;
text: string;
startTime?: number;
endTime?: number;
final: boolean;
};
type RealtimeMeetingSessionDraft = {
meetingId: number;
meetingTitle: string;
asrModelName: string;
summaryModelName: string;
asrModelId: number;
mode: string;
language: string;
useSpkId: number;
enablePunctuation: boolean;
enableItn: boolean;
enableTextRefine: boolean;
saveAudio: boolean;
hotwords: Array<{ hotword: string; weight: number }>;
};
function getSessionKey(meetingId: number) {
return `realtimeMeetingSession:${meetingId}`;
}
function buildDraftFromStatus(meetingId: number, meeting: MeetingVO | null, status?: RealtimeMeetingSessionStatus | null): RealtimeMeetingSessionDraft | null {
const config = status?.resumeConfig;
if (!config?.asrModelId) {
return null;
}
return {
meetingId,
meetingTitle: meeting?.title || `实时会议 ${meetingId}`,
asrModelName: "ASR",
summaryModelName: "LLM",
asrModelId: config.asrModelId,
mode: config.mode || "2pass",
language: config.language || "auto",
useSpkId: config.useSpkId ? 1 : 0,
enablePunctuation: config.enablePunctuation !== false,
enableItn: config.enableItn !== false,
enableTextRefine: !!config.enableTextRefine,
saveAudio: !!config.saveAudio,
hotwords: config.hotwords || [],
};
}
function floatTo16BitPCM(input: Float32Array) {
const buffer = new ArrayBuffer(input.length * 2);
const view = new DataView(buffer);
for (let i = 0; i < input.length; i += 1) {
const value = Math.max(-1, Math.min(1, input[i]));
view.setInt16(i * 2, value < 0 ? value * 0x8000 : value * 0x7fff, true);
}
return buffer;
}
function resolveSpeaker(speaker?: WsSpeaker) {
if (!speaker) {
return { speakerId: "spk_0", speakerName: "Unknown", userId: undefined };
}
if (typeof speaker === "string") {
return { speakerId: speaker, speakerName: speaker, userId: undefined };
}
return {
speakerId: speaker.user_id ? String(speaker.user_id) : "spk_0",
speakerName: speaker.name || (speaker.user_id ? String(speaker.user_id) : "Unknown"),
userId: speaker.user_id,
};
}
function formatClock(totalSeconds: number) {
const hours = Math.floor(totalSeconds / 3600);
const minutes = Math.floor((totalSeconds % 3600) / 60);
const seconds = totalSeconds % 60;
if (hours > 0) {
return `${hours.toString().padStart(2, "0")}:${minutes.toString().padStart(2, "0")}:${seconds.toString().padStart(2, "0")}`;
}
return `${minutes.toString().padStart(2, "0")}:${seconds.toString().padStart(2, "0")}`;
}
function formatTranscriptTime(ms?: number) {
if (ms === undefined || ms === null) {
return "--:--";
}
const totalSeconds = Math.floor(ms / 1000);
const minutes = Math.floor(totalSeconds / 60);
const seconds = totalSeconds % 60;
return `${minutes.toString().padStart(2, "0")}:${seconds.toString().padStart(2, "0")}`;
}
function toMs(value?: number) {
if (value === undefined || value === null || Number.isNaN(value)) {
return undefined;
}
return Math.round(value * 1000);
}
function buildRealtimeProxyWsUrl(socketSession: RealtimeSocketSessionVO) {
const protocol = window.location.protocol === "https:" ? "wss" : "ws";
return `${protocol}://${window.location.host}${socketSession.path}?sessionToken=${encodeURIComponent(socketSession.sessionToken)}`;
}
function normalizeWsMessage(payload: WsMessage) {
if (payload.type === "partial" || payload.type === "segment") {
const data = payload.data || {};
return {
text: data.text || "",
isFinal: payload.type === "segment" || !!data.is_final,
speaker: {
name: data.speaker_name,
user_id: data.user_id ?? data.speaker_id,
} as WsSpeaker,
startTime: toMs(data.start),
endTime: toMs(data.end),
};
}
if (!payload.text) {
return null;
}
return {
text: payload.text,
isFinal: !!payload.is_final,
speaker: payload.speaker,
startTime: payload.timestamp?.[0]?.[0],
endTime: payload.timestamp?.[payload.timestamp.length - 1]?.[1],
};
}
export function RealtimeAsrSession() {
const navigate = useNavigate();
const { id } = useParams<{ id: string }>();
const meetingId = Number(id);
const [meeting, setMeeting] = useState<MeetingVO | null>(null);
const [sessionDraft, setSessionDraft] = useState<RealtimeMeetingSessionDraft | null>(null);
const [loading, setLoading] = useState(true);
const [recording, setRecording] = useState(false);
const [connecting, setConnecting] = useState(false);
const [finishing, setFinishing] = useState(false);
const [pausing, setPausing] = useState(false);
const [statusText, setStatusText] = useState("待开始");
const [streamingText, setStreamingText] = useState("");
const [streamingSpeaker, setStreamingSpeaker] = useState("Unknown");
const [transcripts, setTranscripts] = useState<TranscriptCard[]>([]);
const [audioLevel, setAudioLevel] = useState(0);
const [elapsedSeconds, setElapsedSeconds] = useState(0);
const [sessionStatus, setSessionStatus] = useState<RealtimeMeetingSessionStatus | null>(null);
const transcriptRef = useRef<HTMLDivElement | null>(null);
const wsRef = useRef<WebSocket | null>(null);
const audioContextRef = useRef<AudioContext | null>(null);
const processorRef = useRef<ScriptProcessorNode | null>(null);
const audioSourceRef = useRef<MediaStreamAudioSourceNode | null>(null);
const streamRef = useRef<MediaStream | null>(null);
const audioBufferRef = useRef<number[]>([]);
const completeOnceRef = useRef(false);
const startedAtRef = useRef<number | null>(null);
const sessionStartedRef = useRef(false);
const finalTranscriptCount = transcripts.length;
const totalTranscriptChars = useMemo(
() => transcripts.reduce((sum, item) => sum + item.text.length, 0) + streamingText.length,
[streamingText, transcripts],
);
const statusColor = recording ? "#1677ff" : connecting || finishing ? "#faad14" : "#94a3b8";
const hasRemoteActiveConnection = Boolean(sessionStatus?.activeConnection) && !recording && !connecting;
useEffect(() => {
if (!meetingId || Number.isNaN(meetingId)) {
return;
}
const loadData = async () => {
setLoading(true);
try {
const stored = sessionStorage.getItem(getSessionKey(meetingId));
const parsedDraft = stored ? JSON.parse(stored) : null;
const [detailRes, transcriptRes, statusRes] = await Promise.all([
getMeetingDetail(meetingId),
getTranscripts(meetingId),
getRealtimeMeetingSessionStatus(meetingId),
]);
const detail = detailRes.data.data;
const realtimeStatus = statusRes.data.data;
setMeeting(detail);
setSessionStatus(realtimeStatus);
const fallbackDraft = buildDraftFromStatus(meetingId, detail, realtimeStatus);
const resolvedDraft = parsedDraft || fallbackDraft;
setSessionDraft(resolvedDraft);
if (resolvedDraft) {
sessionStorage.setItem(getSessionKey(meetingId), JSON.stringify(resolvedDraft));
}
if (realtimeStatus?.status === "PAUSED_RESUMABLE") {
setStatusText(`已暂停,可在 ${Math.max(1, Math.ceil((realtimeStatus.remainingSeconds || 0) / 60))} 分钟内继续`);
} else if (realtimeStatus?.status === "PAUSED_EMPTY") {
setStatusText("已暂停,可继续识别");
} else if (realtimeStatus?.status === "ACTIVE" && realtimeStatus?.activeConnection) {
setStatusText("当前会议已有活跃实时连接");
} else if (realtimeStatus?.status === "COMPLETING") {
setStatusText("正在生成总结");
}
setTranscripts(
(transcriptRes.data.data || []).map((item: MeetingTranscriptVO) => ({
id: String(item.id),
speakerName: item.speakerName || item.speakerId || "发言人",
text: item.content,
startTime: item.startTime,
endTime: item.endTime,
final: true,
})),
);
} catch {
message.error("加载实时会议失败");
} finally {
setLoading(false);
}
};
void loadData();
}, [meetingId]);
useEffect(() => {
if (!recording) {
setElapsedSeconds(0);
return;
}
const timer = window.setInterval(() => {
if (startedAtRef.current) {
setElapsedSeconds(Math.floor((Date.now() - startedAtRef.current) / 1000));
}
}, 1000);
return () => window.clearInterval(timer);
}, [recording]);
useEffect(() => {
if (!transcriptRef.current) {
return;
}
transcriptRef.current.scrollTop = transcriptRef.current.scrollHeight;
}, [streamingText, transcripts]);
useEffect(() => {
const handlePageHide = () => {
if (!meetingId || completeOnceRef.current) {
return;
}
const token = localStorage.getItem("accessToken");
if (wsRef.current?.readyState === WebSocket.OPEN) {
wsRef.current.send(JSON.stringify({ is_speaking: false }));
}
fetch(`/api/biz/meeting/${meetingId}/realtime/pause`, {
method: "POST",
keepalive: true,
headers: {
"Content-Type": "application/json",
...(token ? { Authorization: `Bearer ${token}` } : {}),
},
body: JSON.stringify({}),
}).catch(() => undefined);
};
window.addEventListener("pagehide", handlePageHide);
return () => window.removeEventListener("pagehide", handlePageHide);
}, [meetingId]);
const shutdownAudioPipeline = async () => {
processorRef.current?.disconnect();
audioSourceRef.current?.disconnect();
if (streamRef.current) {
streamRef.current.getTracks().forEach((track) => track.stop());
}
if (audioContextRef.current && audioContextRef.current.state !== "closed") {
await audioContextRef.current.close();
}
streamRef.current = null;
processorRef.current = null;
audioSourceRef.current = null;
audioContextRef.current = null;
audioBufferRef.current = [];
setAudioLevel(0);
};
const handleFatalRealtimeError = async (errorMessage: string) => {
setConnecting(false);
setRecording(false);
setStatusText("连接失败");
sessionStartedRef.current = false;
wsRef.current?.close();
wsRef.current = null;
await shutdownAudioPipeline();
startedAtRef.current = null;
message.error(errorMessage);
};
const startAudioPipeline = async () => {
if (!window.isSecureContext || !navigator.mediaDevices?.getUserMedia) {
throw new Error("当前浏览器环境不支持麦克风访问。请使用 localhost 或 HTTPS 域名访问系统。");
}
const stream = await navigator.mediaDevices.getUserMedia({
audio: {
channelCount: 1,
echoCancellation: true,
noiseSuppression: true,
},
});
const audioContext = new AudioContext({ sampleRate: SAMPLE_RATE });
const source = audioContext.createMediaStreamSource(stream);
const processor = audioContext.createScriptProcessor(4096, 1, 1);
streamRef.current = stream;
audioContextRef.current = audioContext;
audioSourceRef.current = source;
processorRef.current = processor;
processor.onaudioprocess = (event) => {
const input = event.inputBuffer.getChannelData(0);
let maxAmplitude = 0;
for (let i = 0; i < input.length; i += 1) {
const amplitude = Math.abs(input[i]);
if (amplitude > maxAmplitude) {
maxAmplitude = amplitude;
}
audioBufferRef.current.push(input[i]);
}
setAudioLevel(Math.min(100, Math.round(maxAmplitude * 180)));
while (audioBufferRef.current.length >= CHUNK_SIZE) {
const chunk = audioBufferRef.current.slice(0, CHUNK_SIZE);
audioBufferRef.current = audioBufferRef.current.slice(CHUNK_SIZE);
if (wsRef.current?.readyState === WebSocket.OPEN) {
wsRef.current.send(floatTo16BitPCM(new Float32Array(chunk)));
}
}
};
source.connect(processor);
processor.connect(audioContext.destination);
};
const saveFinalTranscript = async (normalized: {
text: string;
speaker?: WsSpeaker;
startTime?: number;
endTime?: number;
}) => {
if (!normalized.text || !meetingId) {
return;
}
const speaker = resolveSpeaker(normalized.speaker);
const item: RealtimeTranscriptItemDTO = {
speakerId: speaker.speakerId,
speakerName: speaker.speakerName,
content: normalized.text,
startTime: normalized.startTime,
endTime: normalized.endTime,
};
await appendRealtimeTranscripts(meetingId, [item]);
};
const handlePause = async () => {
if (!meetingId || pausing || finishing || (!recording && !connecting)) {
return;
}
setPausing(true);
setStatusText("暂停识别中...");
try {
if (wsRef.current?.readyState === WebSocket.OPEN) {
wsRef.current.send(JSON.stringify({ is_speaking: false }));
}
wsRef.current?.close();
wsRef.current = null;
sessionStartedRef.current = false;
await shutdownAudioPipeline();
const pauseRes = await pauseRealtimeMeeting(meetingId);
setSessionStatus(pauseRes.data.data);
setRecording(false);
setConnecting(false);
startedAtRef.current = null;
setStatusText(pauseRes.data.data?.hasTranscript ? "已暂停,可继续识别" : "已暂停,当前还没有转录内容");
message.success("实时识别已暂停");
} catch (error) {
setStatusText("暂停失败");
message.error(error instanceof Error ? error.message : "暂停实时识别失败");
} finally {
setPausing(false);
}
};
const handleStart = async () => {
if (!sessionDraft?.asrModelId) {
message.error("未找到实时识别配置,请返回创建页重新进入");
return;
}
if (recording || connecting) {
return;
}
setConnecting(true);
setStatusText("连接识别服务...");
sessionStartedRef.current = false;
try {
const socketSessionRes = await openRealtimeMeetingSocketSession(meetingId, {
asrModelId: sessionDraft.asrModelId,
mode: sessionDraft.mode || "2pass",
language: sessionDraft.language || "auto",
useSpkId: sessionDraft.useSpkId,
enablePunctuation: sessionDraft.enablePunctuation !== false,
enableItn: sessionDraft.enableItn !== false,
enableTextRefine: !!sessionDraft.enableTextRefine,
saveAudio: !!sessionDraft.saveAudio,
hotwords: sessionDraft.hotwords || [],
});
const socketSession = socketSessionRes.data.data;
const socket = new WebSocket(buildRealtimeProxyWsUrl(socketSession));
socket.binaryType = "arraybuffer";
wsRef.current = socket;
socket.onopen = () => {
setStatusText("识别服务连接中,等待第三方服务就绪...");
};
socket.onmessage = (event) => {
try {
const payload = JSON.parse(event.data) as WsMessage;
if (payload.type === "proxy_ready") {
if (sessionStartedRef.current) {
return;
}
sessionStartedRef.current = true;
setStatusText("启动音频采集中...");
socket.send(JSON.stringify(socketSession.startMessage || {}));
void startAudioPipeline()
.then(() => {
startedAtRef.current = Date.now();
setConnecting(false);
setRecording(true);
setSessionStatus((prev) => prev ? { ...prev, status: "ACTIVE", activeConnection: true } : prev);
setStatusText("实时识别中");
})
.catch((error) => {
void handleFatalRealtimeError(error instanceof Error ? error.message : "启动麦克风失败");
});
return;
}
if ((payload.code || payload.type === "error") && payload.message) {
setStatusText(payload.message);
void handleFatalRealtimeError(payload.message);
return;
}
const normalized = normalizeWsMessage(payload);
if (!normalized) {
return;
}
const speaker = resolveSpeaker(normalized.speaker);
if (normalized.isFinal) {
setTranscripts((prev) => [
...prev,
{
id: `${Date.now()}-${Math.random()}`,
speakerName: speaker.speakerName,
userId: speaker.userId,
text: normalized.text,
startTime: normalized.startTime,
endTime: normalized.endTime,
final: true,
},
]);
setStreamingText("");
setStreamingSpeaker("Unknown");
void saveFinalTranscript(normalized);
} else {
setStreamingText(normalized.text);
setStreamingSpeaker(speaker.speakerName);
}
} catch {
// ignore invalid payload
}
};
socket.onerror = () => {
void handleFatalRealtimeError("实时识别 WebSocket 连接失败");
};
socket.onclose = () => {
setConnecting(false);
setRecording(false);
sessionStartedRef.current = false;
setSessionStatus((prev) => prev ? { ...prev, activeConnection: false } : prev);
};
} catch (error) {
setConnecting(false);
setStatusText("启动失败");
sessionStartedRef.current = false;
message.error(error instanceof Error ? error.message : "启动实时识别失败");
}
};
const handleStop = async (navigateAfterStop = true) => {
if (!meetingId || completeOnceRef.current) {
return;
}
completeOnceRef.current = true;
setFinishing(true);
setStatusText("结束会议中...");
if (wsRef.current?.readyState === WebSocket.OPEN) {
wsRef.current.send(JSON.stringify({ is_speaking: false }));
}
wsRef.current?.close();
wsRef.current = null;
sessionStartedRef.current = false;
await shutdownAudioPipeline();
try {
await completeRealtimeMeeting(meetingId, {});
sessionStorage.removeItem(getSessionKey(meetingId));
setSessionStatus((prev) => prev ? { ...prev, status: "COMPLETING", canResume: false, activeConnection: false } : prev);
setStatusText("已提交总结任务");
message.success("实时会议已结束,正在生成总结");
if (navigateAfterStop) {
navigate(`/meetings/${meetingId}`);
}
} catch (error) {
completeOnceRef.current = false;
const errorMessage = error instanceof Error ? error.message : "结束会议失败";
if (errorMessage.includes("当前还没有转录内容")) {
try {
const statusRes = await getRealtimeMeetingSessionStatus(meetingId);
setSessionStatus(statusRes.data.data);
} catch {
// ignore status refresh failure
}
setStatusText("当前还没有转录内容,可继续识别");
} else {
setStatusText("结束失败");
}
} finally {
setRecording(false);
setFinishing(false);
startedAtRef.current = null;
sessionStartedRef.current = false;
}
};
if (loading) {
return (
<div style={{ padding: 24 }}>
<Card bordered={false} style={{ borderRadius: 18 }}>
<div style={{ textAlign: "center", padding: "96px 0" }}>
<SyncOutlined spin />
</div>
</Card>
</div>
);
}
if (!meeting) {
return (
<div style={{ padding: 24 }}>
<Card bordered={false} style={{ borderRadius: 18 }}>
<Empty description="会议不存在" />
</Card>
</div>
);
}
return (
<div style={{ height: "100%", display: "flex", flexDirection: "column", overflow: "hidden" }}>
<style>{`
.ant-list-item.transcript-row,
.live-transcript-row {
display: grid !important;
grid-template-columns: 72px minmax(0, 1fr);
justify-content: flex-start !important;
align-items: flex-start !important;
gap: 12px;
padding: 12px 0;
border-bottom: 0;
}
.transcript-time {
position: relative;
padding-top: 10px;
color: #58627f;
font-size: 14px;
font-weight: 700;
}
.transcript-time::after {
content: "";
display: inline-block;
width: 8px;
height: 8px;
margin-left: 8px;
border-radius: 50%;
background: #6e76ff;
vertical-align: middle;
}
.transcript-row:not(:last-child) .transcript-time::before,
.live-transcript-row:not(:last-child) .transcript-time::before {
content: "";
position: absolute;
top: 30px;
left: 38px;
width: 1px;
height: calc(100% + 12px);
background: rgba(218, 223, 243, 0.96);
}
.transcript-entry {
justify-self: start;
display: flex;
flex-direction: column;
gap: 10px;
width: 100%;
min-width: 0;
}
.transcript-meta {
display: flex;
align-items: center;
gap: 8px;
flex-wrap: wrap;
color: #8e98b8;
}
.transcript-avatar {
background: linear-gradient(135deg, #7a84ff, #9363ff) !important;
}
.transcript-speaker {
color: #5e698d;
font-weight: 700;
}
.transcript-bubble {
display: block;
width: 100%;
box-sizing: border-box;
padding: 14px 18px;
border-radius: 16px;
background: #ffffff;
border: 1px solid rgba(234, 238, 248, 1);
box-shadow: 0 12px 28px rgba(137, 149, 193, 0.08);
color: #3f496a;
line-height: 1.86;
white-space: pre-wrap;
}
`}</style>
<PageHeader
title={meeting.title || "实时识别中"}
subtitle={`会议编号 #${meeting.id} · ${dayjs(meeting.meetingTime).format("YYYY-MM-DD HH:mm")}`}
extra={<Badge color={statusColor} text={statusText} />}
/>
<div style={{ flex: 1, minHeight: 0, overflow: "hidden" }}>
{!sessionDraft ? (
<Card bordered={false} style={{ borderRadius: 18 }}>
<Alert
type="warning"
showIcon
message="缺少实时识别启动配置"
description="这个会议的实时识别配置没有保存在当前浏览器中,请返回创建页重新进入。"
action={<Button size="small" onClick={() => navigate("/meeting-live-create")}></Button>}
/>
</Card>
) : (
<Row gutter={16} style={{ height: "100%" }}>
<Col xs={24} xl={7} style={{ height: "100%" }}>
<Card
bordered={false}
style={{ height: "100%", borderRadius: 18, boxShadow: "0 8px 22px rgba(15,23,42,0.05)" }}
bodyStyle={{ height: "100%", padding: 16, display: "flex", flexDirection: "column" }}
>
<Space direction="vertical" size={16} style={{ width: "100%" }}>
<div style={{ padding: 14, borderRadius: 16, background: "linear-gradient(135deg, #0f172a 0%, #1e40af 60%, #60a5fa 100%)", color: "#fff" }}>
<Space direction="vertical" size={8}>
<Tag color="blue" style={{ width: "fit-content", margin: 0 }}>LIVE SESSION</Tag>
<Title level={4} style={{ color: "#fff", margin: 0 }}></Title>
<Text style={{ color: "rgba(255,255,255,0.82)" }}></Text>
</Space>
</div>
<Space style={{ width: "100%" }}>
<Button type="primary" icon={<PlayCircleOutlined />} disabled={recording || connecting || finishing || pausing || hasRemoteActiveConnection} loading={connecting} onClick={() => void handleStart()} style={{ flex: 1, height: 42 }}>
{sessionStatus?.status === "ACTIVE" && hasRemoteActiveConnection ? "连接占用中" : sessionStatus?.status === "PAUSED_EMPTY" || sessionStatus?.status === "PAUSED_RESUMABLE" ? "继续识别" : "开始识别"}
</Button>
<Button icon={<PauseCircleOutlined />} disabled={(!recording && !connecting) || finishing || pausing} loading={pausing} onClick={() => void handlePause()} style={{ flex: 1, height: 42 }}>
</Button>
<Button danger icon={<PauseCircleOutlined />} disabled={(!recording && !connecting && !sessionStatus?.hasTranscript) || finishing || pausing} loading={finishing} onClick={() => void handleStop(true)} style={{ flex: 1, height: 42 }}>
</Button>
</Space>
<Row gutter={[12, 12]}>
<Col span={12}><Statistic title="已识别片段" value={finalTranscriptCount} /></Col>
<Col span={12}><Statistic title="实时字数" value={totalTranscriptChars} /></Col>
<Col span={12}><Statistic title="已录时长" value={formatClock(elapsedSeconds)} prefix={<ClockCircleOutlined />} /></Col>
<Col span={12}><Statistic title="说话人区分" value={sessionDraft.useSpkId ? "开启" : "关闭"} /></Col>
</Row>
</Space>
<div style={{ marginTop: 12, padding: 14, borderRadius: 14, background: "#fafcff", border: "1px solid #edf2ff" }}>
<Space direction="vertical" size={10} style={{ width: "100%" }}>
<div style={{ display: "flex", justifyContent: "space-between" }}><Text type="secondary">ASR </Text><Text strong>{sessionDraft.asrModelName}</Text></div>
<div style={{ display: "flex", justifyContent: "space-between" }}><Text type="secondary"></Text><Text strong>{sessionDraft.summaryModelName}</Text></div>
<div style={{ display: "flex", justifyContent: "space-between" }}><Text type="secondary"></Text><Text strong>{sessionDraft.mode}</Text></div>
<div style={{ display: "flex", justifyContent: "space-between" }}><Text type="secondary"></Text><Text strong>{sessionDraft.hotwords.length}</Text></div>
<div>
<Text type="secondary"></Text>
<div style={{ marginTop: 8, height: 10, borderRadius: 999, background: "#e2e8f0", overflow: "hidden" }}>
<div style={{ width: `${audioLevel}%`, height: "100%", background: "linear-gradient(90deg, #38bdf8, #2563eb)" }} />
</div>
</div>
</Space>
</div>
<div style={{ marginTop: "auto" }}>
<Alert type="info" showIcon message="异常关闭保护" description="最终转录会实时写入会议;页面关闭时会优先尝试暂停会议。当前还没有转录内容时,结束会议会被拦截并保留空会话。" />
</div>
</Card>
</Col>
<Col xs={24} xl={17} style={{ height: "100%" }}>
<Card bordered={false} style={{ borderRadius: 18, boxShadow: "0 8px 22px rgba(15,23,42,0.05)", height: "100%" }} bodyStyle={{ padding: 0, height: "100%", display: "flex", flexDirection: "column" }}>
<div style={{ padding: "16px 20px", borderBottom: "1px solid #f0f0f0", display: "flex", alignItems: "center", justifyContent: "space-between", gap: 12, flexShrink: 0 }}>
<div>
<Title level={4} style={{ margin: 0 }}></Title>
<Text type="secondary">稿</Text>
</div>
<Space wrap>
<Tag icon={<SoundOutlined />} color={recording ? "processing" : sessionStatus?.status === "ACTIVE" && hasRemoteActiveConnection ? "processing" : sessionStatus?.status === "PAUSED_RESUMABLE" || sessionStatus?.status === "PAUSED_EMPTY" ? "warning" : "default"}>{recording ? "采集中" : connecting ? "连接中" : sessionStatus?.status === "ACTIVE" && hasRemoteActiveConnection ? "连接占用中" : sessionStatus?.status === "PAUSED_RESUMABLE" || sessionStatus?.status === "PAUSED_EMPTY" ? "已暂停" : "待命"}</Tag>
<Tag color="blue">{sessionDraft.asrModelName}</Tag>
</Space>
</div>
<div ref={transcriptRef} style={{ flex: 1, minHeight: 0, overflowY: "auto", padding: 18, background: "linear-gradient(180deg, #f8fafc 0%, #ffffff 65%, #f8fafc 100%)" }}>
{transcripts.length === 0 && !streamingText ? (
<div style={{ height: "100%", display: "flex", alignItems: "center", justifyContent: "center" }}>
<Empty description={hasRemoteActiveConnection ? "当前会议已有活跃连接,请先关闭旧连接后再继续。" : "会议已创建,点击左侧开始识别即可进入转写。"} />
</div>
) : (
<Space direction="vertical" size={12} style={{ width: "100%" }}>
{transcripts.map((item) => (
<div key={item.id} className="live-transcript-row">
<div className="transcript-time">{formatTranscriptTime(item.startTime)}</div>
<div className="transcript-entry">
<div className="transcript-meta">
<Avatar icon={<UserOutlined />} className="transcript-avatar" />
<span className="transcript-speaker">{item.speakerName}</span>
{item.userId ? <Tag color="blue">UID: {item.userId}</Tag> : null}
<Text type="secondary">{formatTranscriptTime(item.startTime)} - {formatTranscriptTime(item.endTime)}</Text>
</div>
<div className="transcript-bubble">{item.text}</div>
</div>
</div>
))}
{streamingText ? (
<div className="live-transcript-row">
<div className="transcript-time">--:--</div>
<div className="transcript-entry">
<div className="transcript-meta">
<Avatar icon={<UserOutlined />} className="transcript-avatar" />
<span className="transcript-speaker">{streamingSpeaker}</span>
<Tag color="processing">稿</Tag>
</div>
<div className="transcript-bubble">{streamingText}</div>
</div>
</div>
) : null}
</Space>
)}
</div>
</Card>
</Col>
</Row>
)}
</div>
</div>
);
}
export default RealtimeAsrSession;