Compare commits

...

2 Commits

2 changed files with 21 additions and 16 deletions

View File

@@ -5,7 +5,7 @@ const CONFIG = {
apiUrl: 'https://open.bigmodel.cn/api/paas/v4/chat/completions',
apiKey: '2259e33a1357460abe17919aaf81e73d.K44a8LPQTmFM5PKm',
model: 'glm-4.5-air',
thinkingModel: 'glm-4-flash-thinking', // 思考模型
thinkingModel: 'glm-z1-flash', // 智谱思考模型
maxTokens: 2048
};
@@ -395,10 +395,12 @@ async function streamGenerate(userMsgIndex) {
sendBtn.disabled = true;
const aiMessageIndex = currentConversation.messages.length;
// 只有开启深度思考时才添加 thinking 字段
currentConversation.messages.push({
role: 'assistant',
content: '',
thinking: '' // 思考内容
...(enableThinking ? { thinking: '' } : {})
});
renderMessages();
@@ -406,8 +408,14 @@ async function streamGenerate(userMsgIndex) {
const contentEl = lastMessageEl.querySelector('.message-content');
const thinkingEl = lastMessageEl.querySelector('.thinking-content');
// 深度思考模式:思考块默认展开
if (enableThinking && thinkingEl) {
const thinkingBlock = lastMessageEl.querySelector('.thinking-block');
if (thinkingBlock) thinkingBlock.classList.add('expanded');
thinkingEl.innerHTML = '<span class="streaming-cursor">思考中...</span>';
}
contentEl.innerHTML = '<span class="streaming-cursor">▌</span>';
if (thinkingEl) thinkingEl.innerHTML = '<span class="streaming-cursor">▌</span>';
try {
// 根据开关选择模型
@@ -437,7 +445,7 @@ async function streamGenerate(userMsgIndex) {
const reader = response.body.getReader();
const decoder = new TextDecoder();
let buffer = '';
let thinkingComplete = false; // 思考是否完成
let thinkingOutputStarted = false; // 正式内容是否开始输出
while (true) {
const { done, value } = await reader.read();
@@ -457,24 +465,21 @@ async function streamGenerate(userMsgIndex) {
const delta = data.choices?.[0]?.delta;
if (delta) {
// 处理思考内容
if (delta.reasoning_content || delta.thinking) {
// 只有开启深度思考时才处理思考内容
if (enableThinking && (delta.reasoning_content || delta.thinking)) {
const thinkingChunk = delta.reasoning_content || delta.thinking;
currentConversation.messages[aiMessageIndex].thinking += thinkingChunk;
if (thinkingEl) {
thinkingEl.innerHTML = renderMarkdown(currentConversation.messages[aiMessageIndex].thinking) + '<span class="streaming-cursor">▌</span>';
// 思考时展开
const thinkingBlock = lastMessageEl.querySelector('.thinking-block');
if (thinkingBlock) thinkingBlock.classList.add('expanded');
}
scrollToBottom();
}
// 处理正式回复内容
if (delta.content) {
// 如果开始输出正式内容,说明思考完成
if (currentConversation.messages[aiMessageIndex].thinking && !thinkingComplete) {
thinkingComplete = true;
// 如果开启深度思考且开始输出正式内容,折叠思考块
if (enableThinking && !thinkingOutputStarted && currentConversation.messages[aiMessageIndex].thinking) {
thinkingOutputStarted = true;
// 折叠思考内容
const thinkingBlock = lastMessageEl.querySelector('.thinking-block');
if (thinkingBlock) thinkingBlock.classList.remove('expanded');
@@ -492,7 +497,7 @@ async function streamGenerate(userMsgIndex) {
}
// 最终渲染
if (thinkingEl && currentConversation.messages[aiMessageIndex].thinking) {
if (thinkingEl && enableThinking && currentConversation.messages[aiMessageIndex].thinking) {
thinkingEl.innerHTML = renderMarkdown(currentConversation.messages[aiMessageIndex].thinking);
}
contentEl.innerHTML = renderMarkdown(currentConversation.messages[aiMessageIndex].content);

View File

@@ -8,12 +8,12 @@
<meta http-equiv="Pragma" content="no-cache">
<meta http-equiv="Expires" content="0">
<title>AI助手</title>
<link rel="stylesheet" href="style.css?v=2.2.0">
<link rel="stylesheet" href="style.css?v=2.2.2">
<link rel="manifest" href="manifest.json">
</head>
<body>
<div id="app"></div>
<script src="marked.min.js?v=2.2.0"></script>
<script src="app.js?v=2.2.0"></script>
<script src="marked.min.js?v=2.2.2"></script>
<script src="app.js?v=2.2.2"></script>
</body>
</html>