Skip to content

Commit eb77009

Browse files
committed
feat: optimize ui & streaming config
1 parent b86e747 commit eb77009

File tree

12 files changed

+971
-317
lines changed

12 files changed

+971
-317
lines changed

examples/serve-astack/backend/.env.example

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,4 +30,20 @@ MATH_AGENT_TEMPERATURE=0.3
3030
TEXT_AGENT_TEMPERATURE=0.7
3131

3232
# 普通聊天温度参数(0-1)
33-
CHAT_TEMPERATURE=0.7
33+
CHAT_TEMPERATURE=0.7
34+
35+
# ===== 流式性能优化配置 =====
36+
# LLM 最大 token 数量
37+
LLM_MAX_TOKENS=2048
38+
39+
# 流式传输延迟(毫秒)
40+
# 0 = 无延迟(最快)
41+
# 1-5 = 几乎无感知延迟
42+
# 10-20 = 自然打字感觉
43+
# 50+ = 慢速演示效果
44+
STREAMING_DELAY_MS=0
45+
46+
# 流式传输模式
47+
# true = 按字符流式(更细腻但性能稍差)
48+
# false = 按词语流式(推荐,性能更好)
49+
STREAM_BY_CHARACTER=false

examples/serve-astack/backend/src/routes/chat.ts

Lines changed: 27 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,14 @@ import type { FastifyInstance } from 'fastify';
22
import { classifyIntent, getStreamingAgentByIntent } from '../agents/index.js';
33
import { createLLMClient, chatWithLLMStreaming } from '../services/llm.js';
44

5+
// 流式配置
6+
const STREAMING_CONFIG = {
7+
// 每个token/字符的延迟(毫秒),可以通过环境变量配置
8+
delayPerToken: parseInt(process.env.STREAMING_DELAY_MS || '0'),
9+
// 是否按字符流式传输,否则按词语
10+
streamByCharacter: process.env.STREAM_BY_CHARACTER === 'true',
11+
};
12+
513
// AI SDK 5.0 compatible types
614
interface UIMessagePart {
715
type: 'text';
@@ -65,9 +73,9 @@ export default async function chatRoutes(fastify: FastifyInstance) {
6573
let completionTokens = 0;
6674

6775
try {
68-
// Real streaming from LLM
76+
// Real streaming from LLM with optimized chunking
6977
for await (const chunk of chatWithLLMStreaming(llmClient, llmMessages)) {
70-
// Send each chunk as it arrives from LLM
78+
// Send each chunk as it arrives from LLM - no artificial delays
7179
const textPart = `0:${JSON.stringify(chunk)}\n`;
7280
reply.raw.write(textPart);
7381
completionTokens += chunk.length;
@@ -137,14 +145,23 @@ export default async function chatRoutes(fastify: FastifyInstance) {
137145
case 'assistant_message': {
138146
if (chunk.content) {
139147
fullContent = chunk.content;
140-
// Send text chunks character by character using Text Parts
141-
const chars = chunk.content.split('');
142-
for (const char of chars) {
143-
// Text Part: 0:string\n
144-
const textPart = `0:${JSON.stringify(char)}\n`;
145-
reply.raw.write(textPart);
146-
// Small delay for streaming effect
147-
await new Promise(resolve => setTimeout(resolve, 50));
148+
149+
// 智能流式传输:根据配置选择字符级或词语级
150+
const chunks = STREAMING_CONFIG.streamByCharacter
151+
? chunk.content.split('')
152+
: chunk.content.split(/(\s+)/);
153+
154+
for (const textChunk of chunks) {
155+
if (textChunk) { // 跳过空字符串
156+
// Text Part: 0:string\n
157+
const textPart = `0:${JSON.stringify(textChunk)}\n`;
158+
reply.raw.write(textPart);
159+
160+
// 可配置的延迟
161+
if (STREAMING_CONFIG.delayPerToken > 0) {
162+
await new Promise(resolve => setTimeout(resolve, STREAMING_CONFIG.delayPerToken));
163+
}
164+
}
148165
}
149166
}
150167
break;

examples/serve-astack/backend/src/services/llm.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@ export function createLLMClient(): ModelProvider {
1616
apiKey,
1717
model: 'deepseek-chat',
1818
temperature: 0.7,
19+
// 优化流式性能的参数
20+
maxTokens: parseInt(process.env.LLM_MAX_TOKENS || '2048'),
1921
});
2022

2123
return model as ModelProvider;

examples/serve-astack/frontend/package.json

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,12 +13,17 @@
1313
"@ai-sdk/react": "^1.0.18",
1414
"@radix-ui/react-scroll-area": "^1.0.5",
1515
"@radix-ui/react-slot": "^1.0.2",
16+
"@tailwindcss/typography": "^0.5.19",
1617
"class-variance-authority": "^0.7.0",
1718
"clsx": "^2.1.0",
1819
"lucide-react": "^0.263.1",
1920
"next": "15.5.4",
2021
"react": "19.1.0",
2122
"react-dom": "19.1.0",
23+
"react-markdown": "^10.1.0",
24+
"rehype-highlight": "^7.0.2",
25+
"rehype-raw": "^7.0.0",
26+
"remark-gfm": "^4.0.1",
2227
"shiki": "^3.13.0",
2328
"streamdown": "^1.3.0"
2429
},
Lines changed: 92 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,92 @@
1+
/* GitHub Dark theme for highlight.js */
2+
.hljs {
3+
color: #e6edf3;
4+
background: #0d1117;
5+
}
6+
7+
.hljs-doctag,
8+
.hljs-keyword,
9+
.hljs-meta .hljs-keyword,
10+
.hljs-template-tag,
11+
.hljs-template-variable,
12+
.hljs-type,
13+
.hljs-variable.language_ {
14+
color: #ff7b72;
15+
}
16+
17+
.hljs-title,
18+
.hljs-title.class_,
19+
.hljs-title.class_.inherited__,
20+
.hljs-title.function_ {
21+
color: #d2a8ff;
22+
}
23+
24+
.hljs-attr,
25+
.hljs-attribute,
26+
.hljs-literal,
27+
.hljs-meta,
28+
.hljs-number,
29+
.hljs-operator,
30+
.hljs-variable,
31+
.hljs-selector-attr,
32+
.hljs-selector-class,
33+
.hljs-selector-id {
34+
color: #79c0ff;
35+
}
36+
37+
.hljs-regexp,
38+
.hljs-string,
39+
.hljs-meta .hljs-string {
40+
color: #a5d6ff;
41+
}
42+
43+
.hljs-built_in,
44+
.hljs-symbol {
45+
color: #ffa657;
46+
}
47+
48+
.hljs-comment,
49+
.hljs-code,
50+
.hljs-formula {
51+
color: #8b949e;
52+
}
53+
54+
.hljs-name,
55+
.hljs-quote,
56+
.hljs-selector-tag,
57+
.hljs-selector-pseudo {
58+
color: #7ee787;
59+
}
60+
61+
.hljs-subst {
62+
color: #e6edf3;
63+
}
64+
65+
.hljs-section {
66+
color: #1f6feb;
67+
font-weight: bold;
68+
}
69+
70+
.hljs-bullet {
71+
color: #f2cc60;
72+
}
73+
74+
.hljs-emphasis {
75+
color: #e6edf3;
76+
font-style: italic;
77+
}
78+
79+
.hljs-strong {
80+
color: #e6edf3;
81+
font-weight: bold;
82+
}
83+
84+
.hljs-addition {
85+
color: #aff5b4;
86+
background-color: #033a16;
87+
}
88+
89+
.hljs-deletion {
90+
color: #ffdcd7;
91+
background-color: #67060c;
92+
}

examples/serve-astack/frontend/src/app/layout.tsx

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import type { Metadata } from 'next';
22
import { Geist, Geist_Mono } from 'next/font/google';
33
import './globals.css';
4+
import './highlight.css';
45
import React from 'react';
56

67
const geistSans = Geist({

0 commit comments

Comments
 (0)