feat: 备用大模型接口管理功能
- 新增 BackupLLMConfig 数据模型存储备用大模型配置 - 支持手动新增、编辑、删除备用大模型接口 - 支持测试连接功能 - 大模型配置页面静态表格改为动态管理的备用接口链接 - 默认初始化5个常用大模型服务商配置
This commit is contained in:
206
admin.py
206
admin.py
@@ -10,7 +10,7 @@ import json
|
||||
|
||||
from models import (db, User, Translation, TranslationCache, GuestTranslation,
|
||||
SystemConfig, OperationLog, DataPackage, UserPackage, DynamicConfig,
|
||||
UserTypeConfig, MembershipPlanConfig)
|
||||
UserTypeConfig, MembershipPlanConfig, BackupLLMConfig)
|
||||
from config import USER_LIMITS, MEMBERSHIP_PLANS
|
||||
|
||||
admin_bp = Blueprint('admin', __name__, url_prefix='/admin')
|
||||
@@ -1217,4 +1217,206 @@ def get_membership_plan(plan_key):
|
||||
def get_all_membership_plans():
|
||||
"""获取所有会员套餐配置"""
|
||||
plans = MembershipPlanConfig.query.filter_by(is_active=True).order_by(MembershipPlanConfig.sort_order).all()
|
||||
return [p.to_dict() for p in plans]
|
||||
return [p.to_dict() for p in plans]
|
||||
|
||||
|
||||
# ==================== 备用大模型接口管理 ====================
|
||||
@admin_bp.route('/backup-llm')
|
||||
@admin_required
|
||||
def backup_llm_list():
|
||||
"""备用大模型接口列表"""
|
||||
configs = BackupLLMConfig.query.order_by(BackupLLMConfig.sort_order).all()
|
||||
|
||||
# 如果数据库中没有数据,初始化默认配置
|
||||
if not configs:
|
||||
init_default_backup_llm()
|
||||
configs = BackupLLMConfig.query.order_by(BackupLLMConfig.sort_order).all()
|
||||
|
||||
return render_template('admin/backup_llm.html', configs=configs)
|
||||
|
||||
|
||||
@admin_bp.route('/backup-llm/add', methods=['GET', 'POST'])
|
||||
@admin_required
|
||||
def add_backup_llm():
|
||||
"""添加备用大模型接口"""
|
||||
if request.method == 'POST':
|
||||
data = request.json if request.is_json else request.form
|
||||
|
||||
config = BackupLLMConfig(
|
||||
provider_name=data.get('provider_name'),
|
||||
api_base=data.get('api_base'),
|
||||
api_key=data.get('api_key'),
|
||||
model=data.get('model'),
|
||||
is_active=data.get('is_active', True) if isinstance(data.get('is_active'), bool) else data.get('is_active') == 'true',
|
||||
sort_order=int(data.get('sort_order', 0)),
|
||||
description=data.get('description'),
|
||||
)
|
||||
|
||||
db.session.add(config)
|
||||
db.session.commit()
|
||||
|
||||
# 记录日志
|
||||
log = OperationLog(
|
||||
user_id=session.get('user_id'),
|
||||
username='admin',
|
||||
action='add_backup_llm',
|
||||
target=config.provider_name,
|
||||
detail=json.dumps(config.to_dict(), ensure_ascii=False)
|
||||
)
|
||||
db.session.add(log)
|
||||
db.session.commit()
|
||||
|
||||
if request.is_json:
|
||||
return jsonify({'success': True, 'config': config.to_dict()})
|
||||
flash('备用大模型接口已添加', 'success')
|
||||
return redirect(url_for('admin.backup_llm_list'))
|
||||
|
||||
return render_template('admin/backup_llm_form.html', config=None)
|
||||
|
||||
|
||||
@admin_bp.route('/backup-llm/<int:config_id>/edit', methods=['GET', 'POST'])
|
||||
@admin_required
|
||||
def edit_backup_llm(config_id):
|
||||
"""编辑备用大模型接口"""
|
||||
config = BackupLLMConfig.query.get_or_404(config_id)
|
||||
|
||||
if request.method == 'POST':
|
||||
data = request.json if request.is_json else request.form
|
||||
|
||||
config.provider_name = data.get('provider_name', config.provider_name)
|
||||
config.api_base = data.get('api_base', config.api_base)
|
||||
config.api_key = data.get('api_key', config.api_key)
|
||||
config.model = data.get('model', config.model)
|
||||
config.is_active = data.get('is_active', True) if isinstance(data.get('is_active'), bool) else data.get('is_active') == 'true'
|
||||
config.sort_order = int(data.get('sort_order', config.sort_order))
|
||||
config.description = data.get('description', config.description)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
# 记录日志
|
||||
log = OperationLog(
|
||||
user_id=session.get('user_id'),
|
||||
username='admin',
|
||||
action='edit_backup_llm',
|
||||
target=config.provider_name,
|
||||
detail=json.dumps(config.to_dict(), ensure_ascii=False)
|
||||
)
|
||||
db.session.add(log)
|
||||
db.session.commit()
|
||||
|
||||
if request.is_json:
|
||||
return jsonify({'success': True, 'config': config.to_dict()})
|
||||
flash('备用大模型接口已更新', 'success')
|
||||
return redirect(url_for('admin.backup_llm_list'))
|
||||
|
||||
return render_template('admin/backup_llm_form.html', config=config)
|
||||
|
||||
|
||||
@admin_bp.route('/backup-llm/<int:config_id>/delete', methods=['POST'])
|
||||
@admin_required
|
||||
def delete_backup_llm(config_id):
|
||||
"""删除备用大模型接口"""
|
||||
config = BackupLLMConfig.query.get_or_404(config_id)
|
||||
|
||||
provider_name = config.provider_name
|
||||
db.session.delete(config)
|
||||
db.session.commit()
|
||||
|
||||
# 记录日志
|
||||
log = OperationLog(
|
||||
user_id=session.get('user_id'),
|
||||
username='admin',
|
||||
action='delete_backup_llm',
|
||||
target=provider_name
|
||||
)
|
||||
db.session.add(log)
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({'success': True})
|
||||
|
||||
|
||||
@admin_bp.route('/backup-llm/<int:config_id>/toggle', methods=['POST'])
|
||||
@admin_required
|
||||
def toggle_backup_llm(config_id):
|
||||
"""切换备用大模型接口状态"""
|
||||
config = BackupLLMConfig.query.get_or_404(config_id)
|
||||
config.is_active = not config.is_active
|
||||
db.session.commit()
|
||||
|
||||
return jsonify({'success': True, 'is_active': config.is_active})
|
||||
|
||||
|
||||
@admin_bp.route('/backup-llm/<int:config_id>/test', methods=['POST'])
|
||||
@admin_required
|
||||
def test_backup_llm(config_id):
|
||||
"""测试备用大模型接口"""
|
||||
config = BackupLLMConfig.query.get_or_404(config_id)
|
||||
|
||||
try:
|
||||
from openai import OpenAI
|
||||
|
||||
client = OpenAI(
|
||||
api_key=config.api_key or 'sk-test',
|
||||
base_url=config.api_base,
|
||||
)
|
||||
|
||||
model = config.model or 'default'
|
||||
|
||||
# 发送简单测试请求
|
||||
response = client.chat.completions.create(
|
||||
model=model,
|
||||
messages=[{"role": "user", "content": "Hello"}],
|
||||
max_tokens=10,
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'provider': config.provider_name,
|
||||
'model': model,
|
||||
'response': response.choices[0].message.content[:50] if response.choices else 'OK'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'success': False, 'error': str(e)})
|
||||
|
||||
|
||||
@admin_bp.route('/backup-llm/init', methods=['POST'])
|
||||
@admin_required
|
||||
def init_backup_llm():
|
||||
"""初始化默认备用大模型"""
|
||||
init_default_backup_llm()
|
||||
return jsonify({'success': True})
|
||||
|
||||
|
||||
def init_default_backup_llm():
|
||||
"""初始化默认备用大模型接口配置"""
|
||||
defaults = [
|
||||
('本地LM Studio', 'http://localhost:1234/v1', None, None, 0),
|
||||
('OpenAI', 'https://api.openai.com/v1', None, 'gpt-4', 1),
|
||||
('DeepSeek', 'https://api.deepseek.com/v1', None, 'deepseek-chat', 2),
|
||||
('阿里百炼', 'https://dashscope.aliyuncs.com/compatible-mode/v1', None, 'qwen-turbo', 3),
|
||||
('SiliconFlow', 'https://api.siliconflow.cn/v1', None, 'Qwen/Qwen2.5-72B-Instruct', 4),
|
||||
]
|
||||
|
||||
for provider_name, api_base, api_key, model, sort_order in defaults:
|
||||
existing = BackupLLMConfig.query.filter_by(provider_name=provider_name).first()
|
||||
if not existing:
|
||||
config = BackupLLMConfig(
|
||||
provider_name=provider_name,
|
||||
api_base=api_base,
|
||||
api_key=api_key,
|
||||
model=model,
|
||||
is_active=True,
|
||||
sort_order=sort_order,
|
||||
description=f'{provider_name} 默认接口',
|
||||
)
|
||||
db.session.add(config)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
|
||||
def get_backup_llm_configs():
|
||||
"""获取所有备用大模型配置(供其他模块使用)"""
|
||||
configs = BackupLLMConfig.query.filter_by(is_active=True).order_by(BackupLLMConfig.sort_order).all()
|
||||
return [c.to_dict() for c in configs]
|
||||
Reference in New Issue
Block a user