release: v1.0.10 — 腾讯云 Coding Plan 套餐支持

This commit is contained in:
10000ge10000
2026-03-08 10:57:39 +08:00
parent 67f04e6bd0
commit f55442b0d7
6 changed files with 303 additions and 88 deletions

View File

@@ -144,7 +144,31 @@ auth_set_apikey() {
d.profiles[process.env._AP_PROFILE]={
type:'api_key',
provider:process.env._AP_PROVIDER,
token:process.env._AP_KEY
key:process.env._AP_KEY
};
fs.writeFileSync(f,JSON.stringify(d,null,2));
" 2>/dev/null
chown openclaw:openclaw "$auth_file" 2>/dev/null || true
}
# ── GitHub Copilot Token 写入 auth-profiles.json (type:token) ──
# GitHub Copilot 使用 token 类型而非 api_keyOpenClaw 会自动兑换 Copilot session token
# 用法: auth_set_copilot_token <github_token>
auth_set_copilot_token() {
local github_token="$1"
local auth_dir="${OC_STATE_DIR}/agents/main/agent"
local auth_file="${auth_dir}/auth-profiles.json"
mkdir -p "$auth_dir"
chown -R openclaw:openclaw "${OC_STATE_DIR}/agents" 2>/dev/null || true
_AP_TOKEN="$github_token" "$NODE_BIN" -e "
const fs=require('fs'),f='${auth_file}';
let d={version:1,profiles:{},usageStats:{}};
try{d=JSON.parse(fs.readFileSync(f,'utf8'));}catch(e){}
if(!d.profiles)d.profiles={};
d.profiles['github-copilot:github']={
type:'token',
provider:'github-copilot',
token:process.env._AP_TOKEN
};
fs.writeFileSync(f,JSON.stringify(d,null,2));
" 2>/dev/null
@@ -246,6 +270,45 @@ register_codingplan_provider() {
chown openclaw:openclaw "$CONFIG_FILE" 2>/dev/null || true
}
# ── 注册腾讯云 Coding Plan 提供商 (多模型批量注册) ──
# 用法: register_lkeap_codingplan_provider <api_key>
# 按腾讯云官方文档 (https://cloud.tencent.com/document/product/1772/128949) 注册
# provider name: lkeap, Base URL: https://api.lkeap.cloud.tencent.com/coding/v3
register_lkeap_codingplan_provider() {
local api_key="$1"
_RCP_KEY="$api_key" "$NODE_BIN" -e "
const fs=require('fs');
let d={};
try{d=JSON.parse(fs.readFileSync('${CONFIG_FILE}','utf8'));}catch(e){}
if(!d.models)d.models={};
if(!d.models.providers)d.models.providers={};
d.models.mode='merge';
d.models.providers['lkeap']={
baseUrl:'https://api.lkeap.cloud.tencent.com/coding/v3',
apiKey:process.env._RCP_KEY,
api:'openai-completions',
models:[
{id:'tc-code-latest',name:'Auto (智能匹配最优模型)',reasoning:false,input:['text'],cost:{input:0,output:0,cacheRead:0,cacheWrite:0},contextWindow:128000,maxTokens:8192},
{id:'hunyuan-2.0-instruct',name:'Tencent HY 2.0 Instruct',reasoning:false,input:['text'],cost:{input:0,output:0,cacheRead:0,cacheWrite:0},contextWindow:128000,maxTokens:16000},
{id:'hunyuan-2.0-thinking',name:'Tencent HY 2.0 Think',reasoning:true,input:['text'],cost:{input:0,output:0,cacheRead:0,cacheWrite:0},contextWindow:128000,maxTokens:64000},
{id:'hunyuan-t1',name:'Hunyuan-T1',reasoning:true,input:['text'],cost:{input:0,output:0,cacheRead:0,cacheWrite:0},contextWindow:32000,maxTokens:64000},
{id:'hunyuan-turbos',name:'Hunyuan-TurboS',reasoning:false,input:['text'],cost:{input:0,output:0,cacheRead:0,cacheWrite:0},contextWindow:32000,maxTokens:16000},
{id:'minimax-m2.5',name:'MiniMax-M2.5',reasoning:false,input:['text'],cost:{input:0,output:0,cacheRead:0,cacheWrite:0},contextWindow:204800,maxTokens:131072},
{id:'kimi-k2.5',name:'Kimi-K2.5',reasoning:false,input:['text','image'],cost:{input:0,output:0,cacheRead:0,cacheWrite:0},contextWindow:262144,maxTokens:32768},
{id:'glm-5',name:'GLM-5',reasoning:false,input:['text'],cost:{input:0,output:0,cacheRead:0,cacheWrite:0},contextWindow:202752,maxTokens:8192}
]
};
if(!d.agents)d.agents={};
if(!d.agents.defaults)d.agents.defaults={};
if(!d.agents.defaults.models)d.agents.defaults.models={};
['tc-code-latest','hunyuan-2.0-instruct','hunyuan-2.0-thinking','hunyuan-t1','hunyuan-turbos','minimax-m2.5','kimi-k2.5','glm-5'].forEach(m=>{
d.agents.defaults.models['lkeap/'+m]={};
});
fs.writeFileSync('${CONFIG_FILE}',JSON.stringify(d,null,2));
" 2>/dev/null
chown openclaw:openclaw "$CONFIG_FILE" 2>/dev/null || true
}
# ── 辅助函数 ──
# 清理输入: 去除 ANSI 转义序列、不可见字符,只保留 ASCII 可打印字符
@@ -402,7 +465,8 @@ configure_model() {
echo -e " ${CYAN}10)${NC} Groq (Llama 4, Llama 3.3)"
echo -e " ${CYAN}11)${NC} 硅基流动 SiliconFlow"
echo -e " ${CYAN}12)${NC} Ollama (本地模型,无需 API Key)"
echo -e " ${CYAN}13)${NC} 自定义 OpenAI 兼容 API"
echo -e " ${CYAN}13)${NC} 腾讯云 Coding Plan (HY T1/TurboS/GLM-5/Kimi)"
echo -e " ${CYAN}14)${NC} 自定义 OpenAI 兼容 API"
echo -e " ${CYAN}0)${NC} 返回"
echo ""
prompt_with_default "请选择" "1" choice
@@ -465,15 +529,19 @@ configure_model() {
echo -e " ${CYAN}可用模型:${NC}"
echo -e " ${CYAN}a)${NC} claude-sonnet-4-20250514 — Claude Sonnet 4 (推荐)"
echo -e " ${CYAN}b)${NC} claude-opus-4-20250514 — Claude Opus 4 顶级推理"
echo -e " ${CYAN}c)${NC} claude-haiku-4-20250514 — Claude Haiku 4 轻量快速"
echo -e " ${CYAN}d)${NC} 手动输入模型名"
echo -e " ${CYAN}c)${NC} claude-haiku-4-5 — Claude Haiku 4.5 轻量快速"
echo -e " ${CYAN}d)${NC} claude-sonnet-4.5 — Claude Sonnet 4.5"
echo -e " ${CYAN}e)${NC} claude-sonnet-4.6 — Claude Sonnet 4.6"
echo -e " ${CYAN}f)${NC} 手动输入模型名"
echo ""
prompt_with_default "请选择模型" "a" model_choice
case "$model_choice" in
a) model_name="claude-sonnet-4-20250514" ;;
b) model_name="claude-opus-4-20250514" ;;
c) model_name="claude-haiku-4-20250514" ;;
d) prompt_with_default "请输入模型名称" "claude-sonnet-4-20250514" model_name ;;
c) model_name="claude-haiku-4-5" ;;
d) model_name="claude-sonnet-4-5" ;;
e) model_name="claude-sonnet-4-6" ;;
f) prompt_with_default "请输入模型名称" "claude-sonnet-4-20250514" model_name ;;
*) model_name="claude-sonnet-4-20250514" ;;
esac
register_and_set_model "anthropic/${model_name}"
@@ -490,19 +558,21 @@ configure_model() {
auth_set_apikey google "$api_key"
echo ""
echo -e " ${CYAN}可用模型:${NC}"
echo -e " ${CYAN}a)${NC} gemini-2.5-pro — 旗舰推理 (推荐)"
echo -e " ${CYAN}b)${NC} gemini-2.5-flash — 快速均衡"
echo -e " ${CYAN}c)${NC} gemini-2.5-flash-lite — 极速低成本"
echo -e " ${CYAN}d)${NC} gemini-3-flash — Gemini 3 预览"
echo -e " ${CYAN}e)${NC} 手动输入模型名"
echo -e " ${CYAN}a)${NC} gemini-2.5-pro — 旗舰推理 (推荐)"
echo -e " ${CYAN}b)${NC} gemini-2.5-flash — 快速均衡"
echo -e " ${CYAN}c)${NC} gemini-2.5-flash-lite — 极速低成本"
echo -e " ${CYAN}d)${NC} gemini-3-flash-preview — Gemini 3 Flash 预览"
echo -e " ${CYAN}e)${NC} gemini-3-pro-preview — Gemini 3 Pro 预览"
echo -e " ${CYAN}f)${NC} 手动输入模型名"
echo ""
prompt_with_default "请选择模型" "a" model_choice
case "$model_choice" in
a) model_name="gemini-2.5-pro" ;;
b) model_name="gemini-2.5-flash" ;;
c) model_name="gemini-2.5-flash-lite" ;;
d) model_name="gemini-3-flash" ;;
e) prompt_with_default "请输入模型名称" "gemini-2.5-pro" model_name ;;
d) model_name="gemini-3-flash-preview" ;;
e) model_name="gemini-3-pro-preview" ;;
f) prompt_with_default "请输入模型名称" "gemini-2.5-pro" model_name ;;
*) model_name="gemini-2.5-pro" ;;
esac
register_and_set_model "google/${model_name}"
@@ -573,52 +643,56 @@ configure_model() {
echo ""
echo -e " ${BOLD}GitHub Copilot 配置${NC}"
echo -e " ${YELLOW}需要有效的 GitHub Copilot 订阅 (Free/Pro/Business 均可)${NC}"
echo -e " ${YELLOW}使用 OAuth 自动认证,无需手动输入 Token${NC}"
echo ""
echo -e " ${CYAN}配置方式:${NC}"
echo -e " ${CYAN}a)${NC} 通过 OAuth 授权登录 (推荐)"
echo -e " ${CYAN}b)${NC} 手动输入 GitHub Token (ghp_...)"
echo -e " ${CYAN}启动 GitHub Copilot OAuth 登录 (Device Flow)...${NC}"
echo -e " ${DIM}请在浏览器中打开显示的 URL输入授权码完成登录${NC}"
echo ""
prompt_with_default "请选择" "a" copilot_mode
case "$copilot_mode" in
a)
echo ""
echo -e " ${CYAN}启用 Copilot Proxy 插件...${NC}"
enable_auth_plugins
echo -e " ${CYAN}启动 GitHub Copilot OAuth 授权...${NC}"
oc_cmd models auth login --provider copilot-proxy --set-default || echo -e " ${YELLOW}OAuth 授权已退出${NC}"
echo ""
ask_restart
;;
b|*)
echo ""
echo -e " ${YELLOW}获取 Token: https://github.com/settings/tokens (需 copilot 权限)${NC}"
echo ""
prompt_with_default "请输入 GitHub Token (ghp_...)" "" api_key
if [ -n "$api_key" ]; then
auth_set_apikey github-copilot "$api_key" "github-copilot:github"
echo ""
echo -e " ${CYAN}可用模型:${NC}"
echo -e " ${CYAN}a)${NC} github-copilot/claude-sonnet-4 — Claude Sonnet 4 (推荐)"
echo -e " ${CYAN}b)${NC} github-copilot/gpt-5.2 — GPT-5.2"
echo -e " ${CYAN}c)${NC} github-copilot/gemini-2.5-pro — Gemini 2.5 Pro"
echo -e " ${CYAN}d)${NC} github-copilot/o3 — o3"
echo -e " ${CYAN}e)${NC} 手动输入模型名"
echo ""
prompt_with_default "请选择模型" "a" model_choice
case "$model_choice" in
a) model_name="github-copilot/claude-sonnet-4" ;;
b) model_name="github-copilot/gpt-5.2" ;;
c) model_name="github-copilot/gemini-2.5-pro" ;;
d) model_name="github-copilot/o3" ;;
e) prompt_with_default "请输入模型名称" "github-copilot/claude-sonnet-4" model_name ;;
*) model_name="github-copilot/claude-sonnet-4" ;;
esac
register_and_set_model "$model_name"
echo -e " ${GREEN}✅ GitHub Copilot 已配置,活跃模型: ${model_name}${NC}"
fi
;;
esac
if oc_cmd models auth login-github-copilot --yes; then
echo ""
echo -e " ${GREEN}✅ GitHub Copilot OAuth 认证成功${NC}"
echo ""
echo -e " ${CYAN}选择默认模型:${NC}"
echo ""
echo -e " ${CYAN}── GPT 系列 ──${NC}"
echo -e " ${CYAN}a)${NC} github-copilot/gpt-4.1 — GPT-4.1 ${GREEN}(推荐)${NC}"
echo -e " ${CYAN}b)${NC} github-copilot/gpt-4o — GPT-4o"
echo -e " ${CYAN}c)${NC} github-copilot/gpt-5 — GPT-5"
echo -e " ${CYAN}d)${NC} github-copilot/gpt-5-mini — GPT-5 mini"
echo -e " ${CYAN}e)${NC} github-copilot/gpt-5.1 — GPT-5.1"
echo -e " ${CYAN}f)${NC} github-copilot/gpt-5.2 — GPT-5.2"
echo -e " ${CYAN}g)${NC} github-copilot/gpt-5.2-codex — GPT-5.2 Codex"
echo ""
echo -e " ${CYAN}── Claude 系列 ──${NC}"
echo -e " ${CYAN}h)${NC} github-copilot/claude-sonnet-4 — Claude Sonnet 4"
echo -e " ${CYAN}i)${NC} github-copilot/claude-sonnet-4.5 — Claude Sonnet 4.5"
echo -e " ${CYAN}j)${NC} github-copilot/claude-sonnet-4.6 — Claude Sonnet 4.6"
echo ""
echo -e " ${CYAN}── Gemini 系列 ──${NC}"
echo -e " ${CYAN}k)${NC} github-copilot/gemini-2.5-pro — Gemini 2.5 Pro"
echo ""
echo -e " ${CYAN}m)${NC} 手动输入模型名"
echo ""
prompt_with_default "请选择模型" "a" model_choice
case "$model_choice" in
a) model_name="github-copilot/gpt-4.1" ;;
b) model_name="github-copilot/gpt-4o" ;;
c) model_name="github-copilot/gpt-5" ;;
d) model_name="github-copilot/gpt-5-mini" ;;
e) model_name="github-copilot/gpt-5.1" ;;
f) model_name="github-copilot/gpt-5.2" ;;
g) model_name="github-copilot/gpt-5.2-codex" ;;
h) model_name="github-copilot/claude-sonnet-4" ;;
i) model_name="github-copilot/claude-sonnet-4.5" ;;
j) model_name="github-copilot/claude-sonnet-4.6" ;;
k) model_name="github-copilot/gemini-2.5-pro" ;;
m) prompt_with_default "请输入模型名称" "github-copilot/gpt-4.1" model_name ;;
*) model_name="github-copilot/gpt-4.1" ;;
esac
register_and_set_model "$model_name"
echo -e " ${GREEN}✅ 活跃模型已设置: ${model_name}${NC}"
else
echo -e " ${YELLOW}OAuth 授权已退出或失败${NC}"
fi
;;
8)
echo ""
@@ -641,6 +715,7 @@ configure_model() {
oc_cmd models auth login --provider qwen-portal --set-default || echo -e " ${YELLOW}OAuth 授权已退出${NC}"
echo ""
ask_restart
return
;;
b)
echo ""
@@ -755,19 +830,26 @@ configure_model() {
if [ -n "$api_key" ]; then
echo ""
echo -e " ${CYAN}可用模型:${NC}"
echo -e " ${CYAN}a)${NC} grok-3 — Grok 3 旗舰"
echo -e " ${CYAN}b)${NC} grok-3-mini — Grok 3 Mini"
echo -e " ${CYAN}c)${NC} 手动输入模型名"
echo -e " ${CYAN}a)${NC} grok-4 — Grok 4 旗舰 (推荐)"
echo -e " ${CYAN}b)${NC} grok-4-fast — Grok 4 Fast"
echo -e " ${CYAN}c)${NC} grok-3 — Grok 3"
echo -e " ${CYAN}d)${NC} grok-3-fast — Grok 3 Fast"
echo -e " ${CYAN}e)${NC} grok-3-mini — Grok 3 Mini"
echo -e " ${CYAN}f)${NC} grok-3-mini-fast — Grok 3 Mini Fast"
echo -e " ${CYAN}g)${NC} 手动输入模型名"
echo ""
prompt_with_default "请选择模型" "a" model_choice
case "$model_choice" in
a) model_name="grok-3" ;;
b) model_name="grok-3-mini" ;;
c) prompt_with_default "请输入模型名称" "grok-3" model_name ;;
*) model_name="grok-3" ;;
a) model_name="grok-4" ;;
b) model_name="grok-4-fast" ;;
c) model_name="grok-3" ;;
d) model_name="grok-3-fast" ;;
e) model_name="grok-3-mini" ;;
f) model_name="grok-3-mini-fast" ;;
g) prompt_with_default "请输入模型名称" "grok-4" model_name ;;
*) model_name="grok-4" ;;
esac
auth_set_apikey xai "$api_key"
register_custom_provider xai "https://api.x.ai/v1" "$api_key" "$model_name" "$model_name"
register_and_set_model "xai/${model_name}"
echo -e " ${GREEN}✅ xAI Grok 已配置,活跃模型: xai/${model_name}${NC}"
fi
@@ -782,21 +864,26 @@ configure_model() {
if [ -n "$api_key" ]; then
echo ""
echo -e " ${CYAN}可用模型:${NC}"
echo -e " ${CYAN}a)${NC} llama-4-maverick-17b-128e — Llama 4 Maverick (推荐)"
echo -e " ${CYAN}b)${NC} llama-3.3-70b-versatile — Llama 3.3 70B"
echo -e " ${CYAN}c)${NC} llama-3.1-8b-instant — Llama 3.1 8B (极速)"
echo -e " ${CYAN}d)${NC} 手动输入模型名"
echo -e " ${CYAN}a)${NC} meta-llama/llama-4-maverick-17b-128e-instruct — Llama 4 Maverick (推荐)"
echo -e " ${CYAN}b)${NC} meta-llama/llama-4-scout-17b-16e-instruct — Llama 4 Scout"
echo -e " ${CYAN}c)${NC} moonshotai/kimi-k2-instruct — Kimi K2"
echo -e " ${CYAN}d)${NC} qwen/qwen3-32b — 通义千问 Qwen3 32B"
echo -e " ${CYAN}e)${NC} llama-3.3-70b-versatile — Llama 3.3 70B"
echo -e " ${CYAN}f)${NC} llama-3.1-8b-instant — Llama 3.1 8B (极速)"
echo -e " ${CYAN}g)${NC} 手动输入模型名"
echo ""
prompt_with_default "请选择模型" "a" model_choice
case "$model_choice" in
a) model_name="llama-4-maverick-17b-128e" ;;
b) model_name="llama-3.3-70b-versatile" ;;
c) model_name="llama-3.1-8b-instant" ;;
d) prompt_with_default "请输入模型名称" "llama-4-maverick-17b-128e" model_name ;;
*) model_name="llama-4-maverick-17b-128e" ;;
a) model_name="meta-llama/llama-4-maverick-17b-128e-instruct" ;;
b) model_name="meta-llama/llama-4-scout-17b-16e-instruct" ;;
c) model_name="moonshotai/kimi-k2-instruct" ;;
d) model_name="qwen/qwen3-32b" ;;
e) model_name="llama-3.3-70b-versatile" ;;
f) model_name="llama-3.1-8b-instant" ;;
g) prompt_with_default "请输入模型名称" "meta-llama/llama-4-maverick-17b-128e-instruct" model_name ;;
*) model_name="meta-llama/llama-4-maverick-17b-128e-instruct" ;;
esac
auth_set_apikey groq "$api_key"
register_custom_provider groq "https://api.groq.com/openai/v1" "$api_key" "$model_name" "$model_name"
register_and_set_model "groq/${model_name}"
echo -e " ${GREEN}✅ Groq 已配置,活跃模型: groq/${model_name}${NC}"
fi
@@ -947,6 +1034,55 @@ configure_model() {
fi
;;
13)
echo ""
echo -e " ${BOLD}腾讯云大模型 Coding Plan 套餐配置${NC}"
echo ""
echo -e " ${YELLOW}订阅/管理套餐: https://hunyuan.cloud.tencent.com/#/app/subscription${NC}"
echo -e " ${YELLOW}获取 API Key: 在上方页面创建 Coding Plan 专属 Key (sk-sp-...)${NC}"
echo -e " ${DIM}文档: https://cloud.tencent.com/document/product/1772/128947${NC}"
echo ""
prompt_with_default "请输入 Coding Plan API Key (sk-sp-...)" "" api_key
if [ -n "$api_key" ]; then
echo ""
echo -e " ${CYAN}可用模型 (Coding Plan 套餐内):${NC}"
echo -e " ${CYAN}── 智能推荐 ──${NC}"
echo -e " ${CYAN}a)${NC} tc-code-latest — 自动路由 (由平台选择最佳模型) ${GREEN}★ 推荐${NC}"
echo -e " ${CYAN}── 推理模型 ──${NC}"
echo -e " ${CYAN}b)${NC} hunyuan-t1 — 混元 T1 深度推理"
echo -e " ${CYAN}c)${NC} hunyuan-2.0-thinking — 混元 2.0 Thinking"
echo -e " ${CYAN}── 旗舰模型 ──${NC}"
echo -e " ${CYAN}d)${NC} hunyuan-turbos — 混元 TurboS 旗舰"
echo -e " ${CYAN}e)${NC} hunyuan-2.0-instruct — 混元 2.0 Instruct"
echo -e " ${CYAN}── 第三方模型 ──${NC}"
echo -e " ${CYAN}f)${NC} glm-5 — 智谱 GLM-5"
echo -e " ${CYAN}g)${NC} kimi-k2.5 — Moonshot Kimi K2.5"
echo -e " ${CYAN}h)${NC} minimax-m2.5 — MiniMax M2.5"
echo -e " ${CYAN}────────────${NC}"
echo -e " ${CYAN}z)${NC} 手动输入模型名"
echo ""
prompt_with_default "请选择默认模型" "a" model_choice
case "$model_choice" in
a) model_name="tc-code-latest" ;;
b) model_name="hunyuan-t1" ;;
c) model_name="hunyuan-2.0-thinking" ;;
d) model_name="hunyuan-turbos" ;;
e) model_name="hunyuan-2.0-instruct" ;;
f) model_name="glm-5" ;;
g) model_name="kimi-k2.5" ;;
h) model_name="minimax-m2.5" ;;
z) prompt_with_default "请输入模型名称" "tc-code-latest" model_name ;;
*) model_name="tc-code-latest" ;;
esac
echo ""
echo -e " ${CYAN}正在注册腾讯云 Coding Plan 提供商 (含全部套餐模型)...${NC}"
auth_set_apikey lkeap "$api_key"
register_lkeap_codingplan_provider "$api_key"
register_and_set_model "lkeap/${model_name}"
echo -e " ${GREEN}✅ 腾讯云 Coding Plan 已配置,活跃模型: lkeap/${model_name}${NC}"
echo -e " ${DIM}提示: 套餐内全部模型已注册,可随时在 WebChat 中通过 /model 切换${NC}"
fi
;;
14)
echo ""
echo -e " ${BOLD}自定义 OpenAI 兼容 API${NC}"
echo -e " ${YELLOW}支持任何兼容 OpenAI API 格式的服务商${NC}"
@@ -964,7 +1100,7 @@ configure_model() {
0) return ;;
esac
if [ "$choice" != "0" ] && [ "$choice" != "1" ] && [ "$choice" != "8" ]; then
if [ "$choice" != "0" ] && [ "$choice" != "1" ]; then
echo ""
ask_restart
fi
@@ -1642,11 +1778,10 @@ main_menu() {
echo -e " ${CYAN}2)${NC} 🤖 配置 AI 模型提供商"
echo -e " ${CYAN}3)${NC} 🔄 设定当前活跃模型"
echo -e " ${CYAN}4)${NC} 📡 配置消息渠道 (Telegram/Discord/飞书/Slack)"
echo -e " ${CYAN}5)${NC} 🤝 Telegram 配对助手"
echo -e " ${CYAN}6)${NC} 🔍 健康检查 / 诊断"
echo -e " ${CYAN}7)${NC} 🔄 重启 Gateway"
echo -e " ${CYAN}8)${NC} 📝 查看原始配置文件"
echo -e " ${CYAN}9)${NC} ⚠️ 恢复默认配置"
echo -e " ${CYAN}5)${NC} 🔍 健康检查 / 诊断"
echo -e " ${CYAN}6)${NC} 🔄 重启 Gateway"
echo -e " ${CYAN}7)${NC} 📝 查看原始配置文件"
echo -e " ${CYAN}8)${NC} ⚠️ 恢复默认配置"
echo -e " ${CYAN}0)${NC} 退出"
echo ""
prompt_with_default "请选择" "1" menu_choice
@@ -1656,10 +1791,9 @@ main_menu() {
2) configure_model ;;
3) set_active_model ;;
4) configure_channels ;;
5) telegram_pairing ;;
6) health_check ;;
7) restart_gateway ;;
8)
5) health_check ;;
6) restart_gateway ;;
7)
echo ""
echo -e " ${CYAN}配置文件路径: ${CONFIG_FILE}${NC}"
echo ""
@@ -1675,7 +1809,7 @@ main_menu() {
ask_restart
fi
;;
9) reset_to_defaults ;;
8) reset_to_defaults ;;
0)
echo -e " ${GREEN}再见!${NC}"
exit 0

View File

@@ -138,6 +138,7 @@
const MAX_RETRY = Infinity;
let retryTimer = null;
let wasEverConnected = false;
let pingTimer = null;
console.log('[oc-config] protocol:', location.protocol);
console.log('[oc-config] host:', location.host, 'port:', location.port);
@@ -146,6 +147,7 @@
function connect() {
if (retryTimer) { clearTimeout(retryTimer); retryTimer = null; }
if (pingTimer) { clearInterval(pingTimer); pingTimer = null; }
if (ws) { try { ws.close(); } catch(e){} ws = null; }
retryCount++;
@@ -191,30 +193,49 @@
ws.onopen = function() {
clearTimeout(connectTimeout);
connected = true;
var isReconnect = wasEverConnected;
wasEverConnected = true;
retryCount = 0;
loadingEl.classList.add('hidden');
reconnectEl.classList.remove('show');
statusEl.textContent = '● 已连接';
statusEl.className = 'status connected';
// 重连时清屏,避免旧内容和新菜单混在一起
if (isReconnect) {
term.clear();
term.write('\x1b[33m⚡ 已重新连接,配置脚本已重新启动\x1b[0m\r\n\r\n');
}
// 连接后重新 fit确保尺寸正确
setTimeout(doFit, 50);
term.focus();
try {
ws.send(JSON.stringify({ type: 'resize', cols: term.cols, rows: term.rows }));
} catch(e) {}
// 客户端心跳: 每 20 秒发送一次,防止连接被浏览器/代理超时关闭
if (pingTimer) clearInterval(pingTimer);
pingTimer = setInterval(function() {
if (ws && ws.readyState === WebSocket.OPEN) {
try { ws.send(JSON.stringify({ type: 'ping' })); } catch(e) {}
}
}, 20000);
};
ws.onmessage = function(ev) {
if (typeof ev.data === 'string') {
// 过滤掉服务端的 pong 心跳消息,不写入终端
if (ev.data.indexOf('"type":"pong"') !== -1) return;
term.write(ev.data, function() { term.scrollToBottom(); });
} else if (ev.data instanceof Blob) {
ev.data.text().then(function(text) { term.write(text, function() { term.scrollToBottom(); }); });
ev.data.text().then(function(text) {
if (text.indexOf('"type":"pong"') !== -1) return;
term.write(text, function() { term.scrollToBottom(); });
});
}
};
ws.onclose = function(ev) {
clearTimeout(connectTimeout);
if (pingTimer) { clearInterval(pingTimer); pingTimer = null; }
connected = false;
statusEl.textContent = '● 已断开';
statusEl.className = 'status disconnected';

View File

@@ -106,12 +106,29 @@ class PtySession {
this.alive = true;
this._spawnFailCount = 0;
this._MAX_SPAWN_RETRIES = 5;
this._pingTimer = null;
this._pongReceived = true;
activeSessions++;
console.log(`[oc-config] Session created (active: ${activeSessions}/${MAX_SESSIONS})`);
this._setupWSReader();
this._startPing();
this._spawnPty();
}
// WebSocket ping/pong 保活 (每 25 秒发一次 ping)
_startPing() {
this._pingTimer = setInterval(() => {
if (!this.alive) { clearInterval(this._pingTimer); return; }
if (!this._pongReceived) {
console.log('[oc-config] Pong timeout, closing connection');
this._cleanup();
return;
}
this._pongReceived = false;
try { this.socket.write(encodeWSFrame(Buffer.alloc(0), 0x09)); } catch(e) { this._cleanup(); }
}, 25000);
}
_setupWSReader() {
this.socket.on('data', (chunk) => {
this.buffer = Buffer.concat([this.buffer, chunk]);
@@ -123,6 +140,7 @@ class PtySession {
else if (frame.opcode === 0x02 && this.proc && this.proc.stdin.writable) this.proc.stdin.write(frame.data);
else if (frame.opcode === 0x08) { console.log('[oc-config] WS close frame received'); this._cleanup(); }
else if (frame.opcode === 0x09) this.socket.write(encodeWSFrame(frame.data, 0x0a));
else if (frame.opcode === 0x0a) { this._pongReceived = true; }
}
});
this.socket.on('close', (hadError) => { console.log(`[oc-config] Socket closed, hadError=${hadError}`); this._cleanup(); });
@@ -141,6 +159,10 @@ class PtySession {
this.cols = msg.cols || 80; this.rows = msg.rows || 24;
if (this.proc && this.proc.pid) { try { process.kill(-this.proc.pid, 'SIGWINCH'); } catch(e){} }
}
else if (msg.type === 'ping') {
// 应用层心跳: 客户端定期发送 ping服务端回复 pong 保持连接活跃
this.socket.write(encodeWSFrame(JSON.stringify({ type: 'pong' }), 0x01));
}
} catch(e) { if (this.proc && this.proc.stdin.writable) this.proc.stdin.write(text); }
}
@@ -205,6 +227,7 @@ class PtySession {
_cleanup() {
if (!this.alive) return; this.alive = false;
if (this._pingTimer) { clearInterval(this._pingTimer); this._pingTimer = null; }
activeSessions = Math.max(0, activeSessions - 1);
console.log(`[oc-config] Session ended (active: ${activeSessions}/${MAX_SESSIONS})`);
if (this.proc) { try { process.kill(-this.proc.pid, 'SIGTERM'); } catch(e){} try { this.proc.kill('SIGTERM'); } catch(e){} }