|
| 1 | +import { NodejsPlatformServices } from '@metamask/kernel-node-runtime'; |
| 2 | +import { makeSQLKernelDatabase } from '@metamask/kernel-store/sqlite/nodejs'; |
| 3 | +import { waitUntilQuiescent } from '@metamask/kernel-utils'; |
| 4 | +import { |
| 5 | + Logger, |
| 6 | + makeArrayTransport, |
| 7 | + makeConsoleTransport, |
| 8 | +} from '@metamask/logger'; |
| 9 | +import type { LogEntry } from '@metamask/logger'; |
| 10 | +import { Kernel } from '@metamask/ocap-kernel'; |
| 11 | +import { |
| 12 | + LANGUAGE_MODEL_SERVICE_NAME, |
| 13 | + makeKernelLanguageModelService, |
| 14 | + makeOpenV1NodejsService, |
| 15 | +} from '@ocap/kernel-language-model-service'; |
| 16 | +import { fetchMock } from '@ocap/repo-tools/test-utils/fetch-mock'; |
| 17 | +import { afterAll, beforeAll, describe, expect, it } from 'vitest'; |
| 18 | + |
| 19 | +import { DEFAULT_MODEL } from '../../src/constants.ts'; |
| 20 | +import { filterTransports } from '../../src/utils.ts'; |
| 21 | + |
| 22 | +const getBundleSpec = (name: string): string => |
| 23 | + new URL(`./vats/${name}.bundle`, import.meta.url).toString(); |
| 24 | + |
| 25 | +describe.sequential('lms-kernel', () => { |
| 26 | + beforeAll(() => { |
| 27 | + fetchMock.disableMocks(); |
| 28 | + }); |
| 29 | + |
| 30 | + afterAll(() => { |
| 31 | + fetchMock.enableMocks(); |
| 32 | + }); |
| 33 | + |
| 34 | + it( |
| 35 | + 'sends a chat message through the kernel to Ollama and receives a response', |
| 36 | + { timeout: 60_000 }, |
| 37 | + async () => { |
| 38 | + const kernelDatabase = await makeSQLKernelDatabase({ |
| 39 | + dbFilename: ':memory:', |
| 40 | + }); |
| 41 | + |
| 42 | + const entries: LogEntry[] = []; |
| 43 | + const logger = new Logger({ |
| 44 | + transports: [ |
| 45 | + filterTransports(makeConsoleTransport(), makeArrayTransport(entries)), |
| 46 | + ], |
| 47 | + }); |
| 48 | + |
| 49 | + const platformServices = new NodejsPlatformServices({ |
| 50 | + logger: logger.subLogger({ tags: ['vat-worker-manager'] }), |
| 51 | + }); |
| 52 | + |
| 53 | + const kernel = await Kernel.make(platformServices, kernelDatabase, { |
| 54 | + resetStorage: true, |
| 55 | + logger, |
| 56 | + }); |
| 57 | + |
| 58 | + const { chat } = makeOpenV1NodejsService({ |
| 59 | + endowments: { fetch }, |
| 60 | + baseUrl: 'http://localhost:11434', |
| 61 | + apiKey: 'test-api-key', |
| 62 | + }); |
| 63 | + const { name, service } = makeKernelLanguageModelService(chat); |
| 64 | + kernel.registerKernelServiceObject(name, service); |
| 65 | + |
| 66 | + await kernel.launchSubcluster({ |
| 67 | + bootstrap: 'main', |
| 68 | + services: [LANGUAGE_MODEL_SERVICE_NAME], |
| 69 | + vats: { |
| 70 | + main: { |
| 71 | + bundleSpec: getBundleSpec('lms-chat-vat'), |
| 72 | + parameters: { model: DEFAULT_MODEL }, |
| 73 | + }, |
| 74 | + }, |
| 75 | + }); |
| 76 | + await waitUntilQuiescent(100); |
| 77 | + |
| 78 | + const responseEntry = entries.find((entry) => |
| 79 | + entry.message?.startsWith('lms-chat response:'), |
| 80 | + ); |
| 81 | + expect(responseEntry).toBeDefined(); |
| 82 | + expect(responseEntry?.message?.length).toBeGreaterThan( |
| 83 | + 'lms-chat response: '.length, |
| 84 | + ); |
| 85 | + expect(responseEntry?.message).toMatch( |
| 86 | + /^lms-chat response: [hH]ello[.!]?$/u, |
| 87 | + ); |
| 88 | + }, |
| 89 | + ); |
| 90 | +}); |
0 commit comments