Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 33 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
name: CI

on:
pull_request:
push:
branches:
- main

concurrency:
group: ci-${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

jobs:
verify-local:
name: verify-local
runs-on: ubuntu-latest
timeout-minutes: 30

steps:
- name: Checkout
uses: actions/checkout@v4

- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 20
cache: npm

- name: Install dependencies
run: npm ci

- name: Run full verification
run: npm run verify:local
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@ coverage/
*.log
.env*
!.env.example
.vercel
3 changes: 3 additions & 0 deletions .husky/pre-push
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
#!/usr/bin/env sh

npm run verify:local
45 changes: 45 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,51 @@ These constraints are enforced in `firestore.rules` and validated by `test/rules
- Local emulator config: `firebase.json`
- Confirm production Firebase Auth domain setup before release (Google provider and authorized domains)

## GitHub-Vercel Sync Workflow

This project uses GitHub as the deployment source of truth.

### Daily flow
1. Create a feature branch from `main`.
2. Commit and push branch changes.
3. Open a pull request.
4. Wait for CI check `verify-local` to pass.
5. Merge PR into `main`.
6. Vercel auto-deploys merged `main` commit to production.

### CI contract
- Workflow file: `.github/workflows/ci.yml`
- Triggers:
- every pull request
- every push to `main`
- Required check name for branch protection: `verify-local`
- CI command chain:
- `npm ci`
- `npm run verify:local`

### Local push gate (Husky)
- Husky install hook is configured via `npm run prepare`.
- Pre-push hook path: `.husky/pre-push`
- Pre-push command: `npm run verify:local`
- If checks fail, push is blocked.

### Required GitHub settings (`main` branch protection)
- Require pull request before merging.
- Require status checks to pass before merging.
- Add required status check: `verify-local`.
- Require branches to be up to date before merging.

### Required Vercel settings
- Git repository connected to this GitHub repo.
- Production branch set to `main`.
- Preview deployments enabled for pull requests.
- `GEMINI_API_KEY` configured for Preview and Production environments.

### Emergency rollback
- Open Vercel dashboard.
- Find the last known-good production deployment.
- Redeploy that deployment to production.

## Troubleshooting

### `GEMINI_API_KEY is not configured for the AI parse endpoint`
Expand Down
204 changes: 128 additions & 76 deletions api/ai/parse.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,26 @@
import { GoogleGenAI, Type } from '@google/genai';
import { validateAiParseResult } from '../../src/services/aiValidation';
import { AiParseResult, InventoryItem, Language } from '../../src/types';
import { AiParseResult, InventoryPromptItem, Language, validateAiParseResult } from './validation.js';

type ParseCookVoiceInputRequest = {
input: string;
inventory: InventoryPromptItem[];
lang: Language;
};

type InventoryPromptItem = Pick<InventoryItem, 'id' | 'name' | 'nameHi'>;
type NodeApiRequest = {
method?: string;
body?: unknown;
};

type NodeApiResponse = {
status: (statusCode: number) => NodeApiResponse;
json: (body: unknown) => void;
};

const AI_MODEL = 'gemini-3-flash-preview';
const AI_ENDPOINT_NAME = 'ai_parse';
const MAX_AI_ATTEMPTS = 3;
const BASE_RETRY_DELAY_MS = 250;
const AI_REQUEST_TIMEOUT_MS = 12000;
const GEMINI_API_BASE_URL = 'https://generativelanguage.googleapis.com/v1beta/models';
const EMPTY_AI_RESPONSE_MESSAGE = 'Empty response';

class AiParseRequestError extends Error {
Expand All @@ -37,8 +44,8 @@ class AiParseExecutionError extends Error {
}
}

function createJsonResponse(body: unknown, status: number): Response {
return Response.json(body, { status });
function sendJsonResponse(response: NodeApiResponse, body: unknown, status: number): void {
response.status(status).json(body);
}

function getEnvApiKey(): string {
Expand All @@ -49,8 +56,8 @@ function getEnvApiKey(): string {
return apiKey;
}

function getAiClient(): GoogleGenAI {
return new GoogleGenAI({ apiKey: getEnvApiKey() });
function getAiModel(): string {
return process.env.GEMINI_MODEL ?? 'gemini-2.5-flash';
}

function isLanguage(value: unknown): value is Language {
Expand All @@ -71,11 +78,25 @@ function isInventoryPromptItem(value: unknown): value is InventoryPromptItem {
}

function parseRequestBody(raw: unknown): ParseCookVoiceInputRequest {
if (!raw || typeof raw !== 'object') {
const parsedRaw = (() => {
if (typeof raw !== 'string') {
return raw;
}

try {
return JSON.parse(raw) as unknown;
} catch (error) {
throw new AiParseRequestError('AI parse request body must be valid JSON.', {
cause: error instanceof Error ? error : undefined,
});
}
})();

if (!parsedRaw || typeof parsedRaw !== 'object') {
throw new AiParseRequestError('AI parse request body must be an object.');
}

const candidate = raw as Record<string, unknown>;
const candidate = parsedRaw as Record<string, unknown>;
if (typeof candidate.input !== 'string' || candidate.input.trim().length === 0) {
throw new AiParseRequestError('AI parse request input must be a non-empty string.');
}
Expand Down Expand Up @@ -114,42 +135,6 @@ function buildPrompt(input: string, inventory: InventoryPromptItem[], lang: Lang
Return a JSON object matching this schema.`;
}

function createResponseSchema() {
return {
type: Type.OBJECT,
properties: {
understood: { type: Type.BOOLEAN },
message: { type: Type.STRING },
updates: {
type: Type.ARRAY,
items: {
type: Type.OBJECT,
properties: {
itemId: { type: Type.STRING },
newStatus: { type: Type.STRING },
requestedQuantity: { type: Type.STRING },
},
required: ['itemId', 'newStatus'],
},
},
unlistedItems: {
type: Type.ARRAY,
items: {
type: Type.OBJECT,
properties: {
name: { type: Type.STRING },
status: { type: Type.STRING },
category: { type: Type.STRING },
requestedQuantity: { type: Type.STRING },
},
required: ['name', 'status', 'category'],
},
},
},
required: ['understood', 'updates', 'unlistedItems'],
};
}

function getErrorMessage(error: unknown): string {
if (error instanceof Error) {
return error.message;
Expand Down Expand Up @@ -179,26 +164,96 @@ async function waitForRetry(delayMs: number): Promise<void> {
});
}

function createTimeoutError(timeoutMs: number): Error {
return new Error(`AI request timed out after ${timeoutMs}ms.`);
}

function buildGeminiEndpoint(model: string, apiKey: string): string {
return `${GEMINI_API_BASE_URL}/${encodeURIComponent(model)}:generateContent?key=${encodeURIComponent(apiKey)}`;
}

function createGeminiRequestBody(prompt: string): Record<string, unknown> {
return {
contents: [
{
role: 'user',
parts: [{ text: prompt }],
},
],
generationConfig: {
responseMimeType: 'application/json',
},
};
}

function parseGeminiText(raw: unknown): string {
if (!raw || typeof raw !== 'object') {
throw new Error('Gemini response body is not an object.');
}

const parsed = raw as {
candidates?: Array<{
content?: {
parts?: Array<{ text?: string }>;
};
}>;
};

const text = parsed.candidates?.[0]?.content?.parts?.[0]?.text;
if (typeof text !== 'string' || text.trim().length === 0) {
throw new Error(EMPTY_AI_RESPONSE_MESSAGE);
}

return text;
}

async function requestGeminiJson(prompt: string, apiKey: string, model: string, timeoutMs: number): Promise<unknown> {
const abortController = new AbortController();
const timeoutId = setTimeout(() => {
abortController.abort();
}, timeoutMs);

try {
const response = await fetch(buildGeminiEndpoint(model, apiKey), {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(createGeminiRequestBody(prompt)),
signal: abortController.signal,
});

const responseBody = await response.text();

if (!response.ok) {
throw new Error(
`Gemini request failed. status=${response.status} body=${responseBody.slice(0, 1000)}`
);
}

const parsed = JSON.parse(responseBody) as unknown;
const text = parseGeminiText(parsed);
return JSON.parse(text) as unknown;
} catch (error) {
const candidate = error as { name?: string };
if (candidate?.name === 'AbortError') {
throw createTimeoutError(timeoutMs);
}
throw error;
} finally {
clearTimeout(timeoutId);
}
}

async function generateAiParseResult(input: string, inventory: InventoryPromptItem[], lang: Language): Promise<AiParseResult> {
const aiClient = getAiClient();
const apiKey = getEnvApiKey();
const aiModel = getAiModel();
const prompt = buildPrompt(input, inventory, lang);
let lastError: unknown = null;

for (let attempt = 1; attempt <= MAX_AI_ATTEMPTS; attempt += 1) {
try {
const response = await aiClient.models.generateContent({
model: AI_MODEL,
contents: buildPrompt(input, inventory, lang),
config: {
responseMimeType: 'application/json',
responseSchema: createResponseSchema(),
},
});

if (!response.text) {
throw new Error(EMPTY_AI_RESPONSE_MESSAGE);
}

const parsed = JSON.parse(response.text) as unknown;
const parsed = await requestGeminiJson(prompt, apiKey, aiModel, AI_REQUEST_TIMEOUT_MS);
return validateAiParseResult(parsed);
} catch (error) {
lastError = error;
Expand All @@ -222,25 +277,18 @@ export const config = {
runtime: 'nodejs',
};

export default async function handler(request: Request): Promise<Response> {
export default async function handler(request: NodeApiRequest, response: NodeApiResponse): Promise<void> {
if (request.method !== 'POST') {
return createJsonResponse({ message: 'Method not allowed.' }, 405);
sendJsonResponse(response, { message: 'Method not allowed.' }, 405);
return;
}

try {
let body: unknown;

try {
body = (await request.json()) as unknown;
} catch (error) {
throw new AiParseRequestError('AI parse request body must be valid JSON.', {
cause: error instanceof Error ? error : undefined,
});
}

const body = request.body;
const { input, inventory, lang } = parseRequestBody(body);
const result = await generateAiParseResult(input, inventory, lang);
return createJsonResponse(result, 200);
sendJsonResponse(response, result, 200);
return;
} catch (error) {
const errorMessage = getErrorMessage(error);
const status =
Expand All @@ -254,6 +302,10 @@ export default async function handler(request: Request): Promise<Response> {
errorMessage,
});

return createJsonResponse({ message: status === 400 || status === 503 ? errorMessage : 'Could not process AI response safely. Please retry with clearer input.' }, status);
sendJsonResponse(
response,
{ message: status === 400 || status === 503 ? errorMessage : 'Could not process AI response safely. Please retry with clearer input.' },
status
);
}
}
Loading
Loading