feat: 后台添加大模型配置管理页面
- 新增 /admin/llm_config 页面 - 支持配置API地址、Key、模型名称、参数 - 支持测试连接和恢复默认配置 - 配置保存到数据库,翻译服务动态读取 - 所有后台页面侧边栏添加入口
This commit is contained in:
108
admin.py
108
admin.py
@@ -661,4 +661,110 @@ def api_user_add_package(user_id):
|
||||
'id': user_package.id,
|
||||
'name': user_package.package_name,
|
||||
'remaining': user_package.remaining_count
|
||||
}})
|
||||
}})
|
||||
|
||||
|
||||
# ==================== LLM大模型配置 ====================
|
||||
@admin_bp.route('/llm_config')
|
||||
@admin_required
|
||||
def llm_config():
|
||||
"""LLM配置页面"""
|
||||
from config import LLM_CONFIG
|
||||
|
||||
# 从数据库获取配置,如果没有则使用默认值
|
||||
config = {
|
||||
'api_base': DynamicConfig.get('llm_api_base', LLM_CONFIG.get('api_base')),
|
||||
'api_key': DynamicConfig.get('llm_api_key', LLM_CONFIG.get('api_key')),
|
||||
'model': DynamicConfig.get('llm_model', LLM_CONFIG.get('model')),
|
||||
'max_tokens': DynamicConfig.get('llm_max_tokens', LLM_CONFIG.get('max_tokens')),
|
||||
'chunk_size': DynamicConfig.get('llm_chunk_size', LLM_CONFIG.get('chunk_size')),
|
||||
'timeout': DynamicConfig.get('llm_timeout', LLM_CONFIG.get('timeout')),
|
||||
}
|
||||
|
||||
return render_template('admin/llm_config.html', config=config)
|
||||
|
||||
|
||||
@admin_bp.route('/llm_config/save', methods=['POST'])
|
||||
@admin_required
|
||||
def save_llm_config():
|
||||
"""保存LLM配置"""
|
||||
data = request.json
|
||||
|
||||
DynamicConfig.set('llm_api_base', data.get('api_base'), category='llm', user_id=session.get('user_id'))
|
||||
DynamicConfig.set('llm_api_key', data.get('api_key'), category='llm', user_id=session.get('user_id'))
|
||||
DynamicConfig.set('llm_model', data.get('model'), category='llm', user_id=session.get('user_id'))
|
||||
DynamicConfig.set('llm_max_tokens', data.get('max_tokens'), category='llm', value_type='int', user_id=session.get('user_id'))
|
||||
DynamicConfig.set('llm_chunk_size', data.get('chunk_size'), category='llm', value_type='int', user_id=session.get('user_id'))
|
||||
DynamicConfig.set('llm_timeout', data.get('timeout'), category='llm', value_type='int', user_id=session.get('user_id'))
|
||||
|
||||
# 记录日志
|
||||
log = OperationLog(
|
||||
user_id=session.get('user_id'),
|
||||
username='admin',
|
||||
action='update_llm_config',
|
||||
detail='更新大模型配置'
|
||||
)
|
||||
db.session.add(log)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({'success': True})
|
||||
|
||||
|
||||
@admin_bp.route('/llm_config/test', methods=['POST'])
|
||||
@admin_required
|
||||
def test_llm_connection():
|
||||
"""测试LLM连接"""
|
||||
data = request.json
|
||||
|
||||
try:
|
||||
from openai import OpenAI
|
||||
|
||||
client = OpenAI(
|
||||
api_key=data.get('api_key', 'sk-test'),
|
||||
base_url=data.get('api_base'),
|
||||
)
|
||||
|
||||
# 发送简单测试请求
|
||||
response = client.chat.completions.create(
|
||||
model=data.get('model'),
|
||||
messages=[{"role": "user", "content": "Hello"}],
|
||||
max_tokens=10,
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'model': data.get('model'),
|
||||
'response': response.choices[0].message.content[:50] if response.choices else 'OK'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'success': False, 'error': str(e)})
|
||||
|
||||
|
||||
@admin_bp.route('/llm_config/reset', methods=['POST'])
|
||||
@admin_required
|
||||
def reset_llm_config():
|
||||
"""恢复默认LLM配置"""
|
||||
from config import LLM_CONFIG
|
||||
|
||||
# 删除数据库中的LLM配置
|
||||
DynamicConfig.query.filter_by(category='llm').delete()
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({'success': True})
|
||||
|
||||
|
||||
# ==================== 获取当前LLM配置(供其他模块使用) ====================
|
||||
def get_llm_config():
|
||||
"""获取当前LLM配置"""
|
||||
from config import LLM_CONFIG
|
||||
|
||||
return {
|
||||
'api_base': DynamicConfig.get('llm_api_base', LLM_CONFIG.get('api_base')),
|
||||
'api_key': DynamicConfig.get('llm_api_key', LLM_CONFIG.get('api_key')),
|
||||
'model': DynamicConfig.get('llm_model', LLM_CONFIG.get('model')),
|
||||
'max_tokens': DynamicConfig.get('llm_max_tokens', LLM_CONFIG.get('max_tokens')),
|
||||
'chunk_size': DynamicConfig.get('llm_chunk_size', LLM_CONFIG.get('chunk_size')),
|
||||
'timeout': DynamicConfig.get('llm_timeout', LLM_CONFIG.get('timeout')),
|
||||
}
|
||||
Reference in New Issue
Block a user