Files
openclaw/src/commands/auth-choice.apply.vllm.ts
2026-02-13 15:48:37 +01:00

108 lines
3.1 KiB
TypeScript

import type { OpenClawConfig } from "../config/config.js";
import type { ApplyAuthChoiceParams, ApplyAuthChoiceResult } from "./auth-choice.apply.js";
import { upsertAuthProfile } from "../agents/auth-profiles.js";
const VLLM_DEFAULT_BASE_URL = "http://127.0.0.1:8000/v1";
const VLLM_DEFAULT_CONTEXT_WINDOW = 128000;
const VLLM_DEFAULT_MAX_TOKENS = 8192;
const VLLM_DEFAULT_COST = {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
};
function applyVllmDefaultModel(cfg: OpenClawConfig, modelRef: string): OpenClawConfig {
const existingModel = cfg.agents?.defaults?.model;
const fallbacks =
existingModel && typeof existingModel === "object" && "fallbacks" in existingModel
? (existingModel as { fallbacks?: string[] }).fallbacks
: undefined;
return {
...cfg,
agents: {
...cfg.agents,
defaults: {
...cfg.agents?.defaults,
model: {
...(fallbacks ? { fallbacks } : undefined),
primary: modelRef,
},
},
},
};
}
export async function applyAuthChoiceVllm(
params: ApplyAuthChoiceParams,
): Promise<ApplyAuthChoiceResult | null> {
if (params.authChoice !== "vllm") {
return null;
}
const baseUrlRaw = await params.prompter.text({
message: "vLLM base URL",
initialValue: VLLM_DEFAULT_BASE_URL,
placeholder: VLLM_DEFAULT_BASE_URL,
validate: (value) => (value?.trim() ? undefined : "Required"),
});
const apiKeyRaw = await params.prompter.text({
message: "vLLM API key",
placeholder: "sk-... (or any non-empty string)",
validate: (value) => (value?.trim() ? undefined : "Required"),
});
const modelIdRaw = await params.prompter.text({
message: "vLLM model",
placeholder: "meta-llama/Meta-Llama-3-8B-Instruct",
validate: (value) => (value?.trim() ? undefined : "Required"),
});
const baseUrl = String(baseUrlRaw ?? "")
.trim()
.replace(/\/+$/, "");
const apiKey = String(apiKeyRaw ?? "").trim();
const modelId = String(modelIdRaw ?? "").trim();
const modelRef = `vllm/${modelId}`;
upsertAuthProfile({
profileId: "vllm:default",
credential: { type: "api_key", provider: "vllm", key: apiKey },
agentDir: params.agentDir,
});
const nextConfig: OpenClawConfig = {
...params.config,
models: {
...params.config.models,
mode: params.config.models?.mode ?? "merge",
providers: {
...params.config.models?.providers,
vllm: {
baseUrl,
api: "openai-completions",
apiKey: "VLLM_API_KEY",
models: [
{
id: modelId,
name: modelId,
reasoning: false,
input: ["text"],
cost: VLLM_DEFAULT_COST,
contextWindow: VLLM_DEFAULT_CONTEXT_WINDOW,
maxTokens: VLLM_DEFAULT_MAX_TOKENS,
},
],
},
},
},
};
if (!params.setDefaultModel) {
return { config: nextConfig, agentModelOverride: modelRef };
}
await params.prompter.note(`Default model set to ${modelRef}`, "Model configured");
return { config: applyVllmDefaultModel(nextConfig, modelRef) };
}