Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions backend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,9 @@
"@aws-sdk/s3-request-presigner": "^3.958.0",
"@electric-sql/pglite": "^0.3.14",
"@faker-js/faker": "^10.1.0",
"@langchain/aws": "^1.2.0",
"@langchain/core": "^1.1.15",
"@langchain/openai": "^1.2.2",
"@nestjs/common": "11.1.9",
"@nestjs/config": "4.0.2",
"@nestjs/core": "11.1.9",
Expand Down Expand Up @@ -71,6 +74,7 @@
"json2csv": "^5.0.7",
"jsonwebtoken": "^9.0.3",
"knex": "3.1.0",
"langchain": "^1.2.10",
"lru-cache": "^11.2.4",
"nanoid": "5.1.6",
"nodemailer": "^7.0.11",
Expand Down
21 changes: 21 additions & 0 deletions backend/src/ai-core/ai-core.module.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import { Global, Module } from '@nestjs/common';
import { LangchainOpenAIProvider } from './providers/langchain-openai.provider.js';
import { LangchainBedrockProvider } from './providers/langchain-bedrock.provider.js';
import { AICoreService } from './services/ai-core.service.js';

export const AI_CORE_SERVICE = 'AI_CORE_SERVICE';

@Global()
@Module({
providers: [
LangchainOpenAIProvider,
LangchainBedrockProvider,
AICoreService,
{
provide: AI_CORE_SERVICE,
useExisting: AICoreService,
},
],
exports: [AICoreService, AI_CORE_SERVICE, LangchainOpenAIProvider, LangchainBedrockProvider],
})
export class AICoreModule {}
6 changes: 6 additions & 0 deletions backend/src/ai-core/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
export * from './ai-core.module.js';
export * from './interfaces/index.js';
export * from './providers/index.js';
export * from './services/index.js';
export * from './tools/index.js';
export * from './utils/index.js';
81 changes: 81 additions & 0 deletions backend/src/ai-core/interfaces/ai-provider.interface.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
import { BaseMessage } from '@langchain/core/messages';
import { IterableReadableStream } from '@langchain/core/utils/stream';

export interface AIProviderConfig {
modelId?: string;
temperature?: number;
maxTokens?: number;
streaming?: boolean;
previousResponseId?: string; // For conversation continuation (OpenAI Responses API)
}

export interface AIToolDefinition {
name: string;
description: string;
parameters: Record<string, unknown>;
}

export interface AIToolCall {
id: string;
name: string;
arguments: Record<string, unknown>;
}

export interface AIToolResult {
toolCallId: string;
result: string;
}

export interface AIStreamChunk {
type: 'text' | 'tool_call' | 'tool_result' | 'done';
content?: string;
toolCall?: AIToolCall;
responseId?: string;
}

export interface AICompletionResult {
content: string;
toolCalls?: AIToolCall[];
responseId?: string;
}

export interface IAIProvider {
generateCompletion(prompt: string, config?: AIProviderConfig): Promise<string>;

generateChatCompletion(messages: BaseMessage[], config?: AIProviderConfig): Promise<AICompletionResult>;

generateStreamingCompletion(
messages: BaseMessage[],
config?: AIProviderConfig,
): Promise<IterableReadableStream<AIStreamChunk>>;

generateWithTools(
messages: BaseMessage[],
tools: AIToolDefinition[],
config?: AIProviderConfig,
): Promise<AICompletionResult>;

generateStreamingWithTools(
messages: BaseMessage[],
tools: AIToolDefinition[],
config?: AIProviderConfig,
): Promise<IterableReadableStream<AIStreamChunk>>;

continueWithToolResults(
messages: BaseMessage[],
toolResults: AIToolResult[],
tools: AIToolDefinition[],
config?: AIProviderConfig,
): Promise<AICompletionResult>;

continueStreamingWithToolResults(
messages: BaseMessage[],
toolResults: AIToolResult[],
tools: AIToolDefinition[],
config?: AIProviderConfig,
): Promise<IterableReadableStream<AIStreamChunk>>;

getProviderName(): string;

getDefaultModelId(): string;
}
81 changes: 81 additions & 0 deletions backend/src/ai-core/interfaces/ai-service.interface.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
import { BaseMessage } from '@langchain/core/messages';
import {
AICompletionResult,
AIProviderConfig,
AIStreamChunk,
AIToolDefinition,
AIToolResult,
} from './ai-provider.interface.js';
import { IterableReadableStream } from '@langchain/core/utils/stream';

export enum AIProviderType {
OPENAI = 'openai',
BEDROCK = 'bedrock',
}

export interface IAIService {

complete(prompt: string, config?: AIProviderConfig): Promise<string>;

completeWithProvider(provider: AIProviderType, prompt: string, config?: AIProviderConfig): Promise<string>;

chat(messages: BaseMessage[], config?: AIProviderConfig): Promise<AICompletionResult>;

chatWithProvider(
provider: AIProviderType,
messages: BaseMessage[],
config?: AIProviderConfig,
): Promise<AICompletionResult>;

streamChat(messages: BaseMessage[], config?: AIProviderConfig): Promise<IterableReadableStream<AIStreamChunk>>;

streamChatWithProvider(
provider: AIProviderType,
messages: BaseMessage[],
config?: AIProviderConfig,
): Promise<IterableReadableStream<AIStreamChunk>>;

chatWithTools(
messages: BaseMessage[],
tools: AIToolDefinition[],
config?: AIProviderConfig,
): Promise<AICompletionResult>;

chatWithToolsAndProvider(
provider: AIProviderType,
messages: BaseMessage[],
tools: AIToolDefinition[],
config?: AIProviderConfig,
): Promise<AICompletionResult>;

streamChatWithTools(
messages: BaseMessage[],
tools: AIToolDefinition[],
config?: AIProviderConfig,
): Promise<IterableReadableStream<AIStreamChunk>>;

streamChatWithToolsAndProvider(
provider: AIProviderType,
messages: BaseMessage[],
tools: AIToolDefinition[],
config?: AIProviderConfig,
): Promise<IterableReadableStream<AIStreamChunk>>;

continueAfterToolCall(
messages: BaseMessage[],
toolResults: AIToolResult[],
tools: AIToolDefinition[],
config?: AIProviderConfig,
): Promise<AICompletionResult>;

continueStreamingAfterToolCall(
messages: BaseMessage[],
toolResults: AIToolResult[],
tools: AIToolDefinition[],
config?: AIProviderConfig,
): Promise<IterableReadableStream<AIStreamChunk>>;

getDefaultProvider(): AIProviderType;

setDefaultProvider(provider: AIProviderType): void;
}
2 changes: 2 additions & 0 deletions backend/src/ai-core/interfaces/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
export * from './ai-provider.interface.js';
export * from './ai-service.interface.js';
2 changes: 2 additions & 0 deletions backend/src/ai-core/providers/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
export * from './langchain-openai.provider.js';
export * from './langchain-bedrock.provider.js';
Loading
Loading