Files
openclaw-ollama-toolcall-proxy/test/integration.vllm.test.ts

63 lines
2.1 KiB
TypeScript
Executable File

import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import { buildServer } from '../src/server';
import { FastifyInstance } from 'fastify';
import fs from 'fs';
import path from 'path';
import { config } from '../src/config';
describe('vLLM Proxy Integration Test', () => {
let server: FastifyInstance;
beforeEach(() => {
// Mutate the loaded config instance
config.proxyMode = 'vllm';
server = buildServer();
global.fetch = vi.fn();
});
afterEach(async () => {
await server.close();
vi.restoreAllMocks();
});
it('proxies request and rewrites XML response to tool_calls for vLLM', async () => {
const requestFixturePath = path.join(__dirname, 'fixtures', 'vllm-like-request.json');
const responseFixturePath = path.join(__dirname, 'fixtures', 'vllm-xml-response.json');
const requestJson = JSON.parse(fs.readFileSync(requestFixturePath, 'utf8'));
const responseJson = JSON.parse(fs.readFileSync(responseFixturePath, 'utf8'));
(global.fetch as any).mockResolvedValue({
ok: true,
json: async () => responseJson
});
const response = await server.inject({
method: 'POST',
url: '/v1/chat/completions',
payload: requestJson
});
expect(response.statusCode).toBe(200);
const body = JSON.parse(response.payload);
// Verify proxy forwarded it
expect(global.fetch).toHaveBeenCalledTimes(1);
const fetchArgs = (global.fetch as any).mock.calls[0];
expect(fetchArgs[0]).toContain('/v1/chat/completions');
const upstreamBody = JSON.parse(fetchArgs[1].body);
expect(upstreamBody.model).toBe('Qwen3.5-27B');
// Verify response was rewritten
expect(body.choices[0].message.content).toBe("");
expect(body.choices[0].message.tool_calls).toBeDefined();
expect(body.choices[0].message.tool_calls).toHaveLength(1);
expect(body.choices[0].message.tool_calls[0].function.name).toBe('read');
expect(JSON.parse(body.choices[0].message.tool_calls[0].function.arguments)).toEqual({
path: "/tmp/test.txt"
});
});
});