Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .dockerignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Dependencies
node_modules
*/node_modules
**/node_modules
npm-debug.log*
yarn-debug.log*
yarn-error.log*
Expand Down
89 changes: 49 additions & 40 deletions apps/api/Dockerfile.multistage
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# =============================================================================
# STAGE 1: Dependencies - Install workspace dependencies
# STAGE 1: Dependencies - Install only what the API needs
# =============================================================================
FROM oven/bun:1.2.8 AS deps

Expand All @@ -8,7 +8,16 @@ WORKDIR /app
# Copy root workspace config
COPY package.json bun.lock ./

# Copy all workspace package.json files
# Strip root package.json to only keep workspaces config.
# The root has frontend deps (design-system, react-dnd, sharp, semantic-release, etc.)
# that the API doesn't need. Removing them cuts ~800 packages from the install.
RUN cat package.json | bun -e " \
const pkg = JSON.parse(await Bun.stdin.text()); \
delete pkg.dependencies; delete pkg.devDependencies; delete pkg.scripts; \
console.log(JSON.stringify(pkg, null, 2));" > package.min.json \
&& mv package.min.json package.json

# Copy only the workspace package.json files the API depends on
COPY packages/auth/package.json ./packages/auth/
COPY packages/db/package.json ./packages/db/
COPY packages/utils/package.json ./packages/utils/
Expand All @@ -20,16 +29,23 @@ COPY packages/company/package.json ./packages/company/
# Copy API package.json
COPY apps/api/package.json ./apps/api/

# Install all dependencies (including workspace deps)
RUN bun install
# Install dependencies — skip lifecycle scripts (husky, etc. not needed in Docker)
RUN bun install --ignore-scripts

# =============================================================================
# STAGE 2: Builder - Build workspace packages and NestJS app
# =============================================================================
FROM deps AS builder
FROM oven/bun:1.2.8 AS builder

WORKDIR /app

# Copy node_modules first (from deps stage), then source on top.
# This avoids conflicts between workspace symlinks and local node_modules
# that get included from the build context.
COPY --from=deps /app/node_modules ./node_modules
COPY --from=deps /app/package.json ./package.json
COPY --from=deps /app/bun.lock ./bun.lock

# Copy workspace packages source
COPY packages/auth ./packages/auth
COPY packages/db ./packages/db
Expand All @@ -42,66 +58,59 @@ COPY packages/company ./packages/company
# Copy API source
COPY apps/api ./apps/api

# Bring in node_modules from deps stage
COPY --from=deps /app/node_modules ./node_modules

# Build workspace packages
RUN cd packages/auth && bun run build && cd ../..
RUN cd packages/db && bun run build && cd ../..
RUN cd packages/integration-platform && bun run build && cd ../..
RUN cd packages/email && bun run build && cd ../..
RUN cd packages/company && bun run build && cd ../..
# Build db first — generates Prisma client needed by other packages
RUN cd packages/db && bun run build

# Generate Prisma client for API (copy schema and generate)
RUN cd packages/db && node scripts/combine-schemas.js && cd ../..
RUN cp packages/db/dist/schema.prisma apps/api/prisma/schema.prisma
RUN cd apps/api && bunx prisma generate
# Build remaining workspace packages
RUN cd packages/auth && bun run build \
&& cd ../integration-platform && bun run build \
&& cd ../email && bun run build \
&& cd ../company && bun run build

# Build NestJS application (skip prebuild since we already generated Prisma)
RUN cd apps/api && bunx nest build
# Generate Prisma schema for API and build NestJS app
RUN cd packages/db && node scripts/combine-schemas.js \
&& cp /app/packages/db/dist/schema.prisma /app/apps/api/prisma/schema.prisma \
&& cd /app/apps/api && bunx prisma generate && bunx nest build

# =============================================================================
# STAGE 3: Production Runtime
# =============================================================================
FROM node:20-slim AS production

# Create non-root user before copying files so COPY --chown can use it
RUN groupadd --system nestjs && useradd --system --gid nestjs --create-home nestjs

WORKDIR /app
RUN chown nestjs:nestjs /app

# Install runtime dependencies
RUN apt-get update && apt-get install -y --no-install-recommends wget openssl && rm -rf /var/lib/apt/lists/*

# Copy built NestJS app
COPY --from=builder /app/apps/api/dist ./dist
COPY --from=builder --chown=nestjs:nestjs /app/apps/api/dist ./dist

# Copy prisma files
COPY --from=builder /app/apps/api/prisma ./prisma
# Copy prisma schema (for reference only — client is already generated in node_modules)
COPY --from=builder --chown=nestjs:nestjs /app/apps/api/prisma ./prisma

# Copy package.json (for any runtime needs)
COPY --from=builder /app/apps/api/package.json ./package.json
COPY --from=builder --chown=nestjs:nestjs /app/apps/api/package.json ./package.json

# Copy workspace packages that are referenced by node_modules symlinks
COPY --from=builder /app/packages/auth ./packages/auth
COPY --from=builder /app/packages/db ./packages/db
COPY --from=builder /app/packages/utils ./packages/utils
COPY --from=builder /app/packages/integration-platform ./packages/integration-platform
COPY --from=builder /app/packages/tsconfig ./packages/tsconfig
COPY --from=builder /app/packages/email ./packages/email
COPY --from=builder /app/packages/company ./packages/company
COPY --from=builder --chown=nestjs:nestjs /app/packages/auth ./packages/auth
COPY --from=builder --chown=nestjs:nestjs /app/packages/db ./packages/db
COPY --from=builder --chown=nestjs:nestjs /app/packages/utils ./packages/utils
COPY --from=builder --chown=nestjs:nestjs /app/packages/integration-platform ./packages/integration-platform
COPY --from=builder --chown=nestjs:nestjs /app/packages/tsconfig ./packages/tsconfig
COPY --from=builder --chown=nestjs:nestjs /app/packages/email ./packages/email
COPY --from=builder --chown=nestjs:nestjs /app/packages/company ./packages/company

# Copy production node_modules (includes symlinks to workspace packages above)
COPY --from=builder /app/node_modules ./node_modules
# Copy production node_modules (includes Prisma client already generated for linux/amd64)
COPY --from=builder --chown=nestjs:nestjs /app/node_modules ./node_modules

# Set production environment
ENV NODE_ENV=production
ENV PORT=3333

# Regenerate Prisma client for this runtime environment
RUN npx prisma generate --schema=./prisma/schema.prisma

# Create non-root user
RUN groupadd --system nestjs && useradd --system --gid nestjs nestjs \
&& chown -R nestjs:nestjs /app

USER nestjs

EXPOSE 3333
Expand Down
20 changes: 12 additions & 8 deletions apps/api/buildspec.multistage.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
version: 0.2

# Simplified buildspec that uses multi stage Docker build
# al building happens inside Docker - CodeBuild just orchestrates ECR/ECS
# Simplified buildspec that uses multi-stage Docker build.
# All building happens inside Docker CodeBuild just orchestrates ECR/ECS.

phases:
pre_build:
Expand All @@ -10,12 +10,21 @@ phases:
- aws ecr get-login-password --region $AWS_DEFAULT_REGION | docker login --username AWS --password-stdin $AWS_ACCOUNT_ID.dkr.ecr.$AWS_DEFAULT_REGION.amazonaws.com
- COMMIT_HASH=$(echo $CODEBUILD_RESOLVED_SOURCE_VERSION | cut -c 1-7)
- IMAGE_TAG=${COMMIT_HASH:=latest}
# Pull latest image for Docker layer cache (ignore failure on first build)
- docker pull $ECR_REPOSITORY_URI:latest || true

build:
commands:
- echo "Building Docker image with multi-stage build..."
- cd apps/api
- docker build --build-arg BUILDKIT_INLINE_CACHE=1 --target production -f Dockerfile.multistage -t $ECR_REPOSITORY_URI:$IMAGE_TAG ../..
- >-
docker build
--build-arg BUILDKIT_INLINE_CACHE=1
--cache-from $ECR_REPOSITORY_URI:latest
--target production
-f Dockerfile.multistage
-t $ECR_REPOSITORY_URI:$IMAGE_TAG
../..
- docker tag $ECR_REPOSITORY_URI:$IMAGE_TAG $ECR_REPOSITORY_URI:latest

post_build:
Expand All @@ -27,12 +36,7 @@ phases:
- aws ecs update-service --cluster $ECS_CLUSTER_NAME --service $ECS_SERVICE_NAME --force-new-deployment
- 'printf "[{\"name\":\"%s-container\",\"imageUri\":\"%s\"}]" api $ECR_REPOSITORY_URI:$IMAGE_TAG > imagedefinitions.json'

cache:
paths:
- '/root/.docker/buildx/cache/**/*'

artifacts:
files:
- imagedefinitions.json
name: ${APP_NAME}-build

3 changes: 1 addition & 2 deletions apps/api/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
"express": "^4.21.2",
"helmet": "^8.1.0",
"jose": "^6.0.12",
"jspdf": "^3.0.3",
"jspdf": "^4.2.0",
"mammoth": "^1.8.0",
"nanoid": "^5.1.6",
"pdf-lib": "^1.17.1",
Expand All @@ -63,7 +63,6 @@
"safe-stable-stringify": "^2.5.0",
"stripe": "^20.4.0",
"swagger-ui-express": "^5.0.1",
"xlsx": "^0.18.5",
"zod": "^4.0.14"
},
"devDependencies": {
Expand Down
14 changes: 7 additions & 7 deletions apps/api/src/organization/organization.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -349,17 +349,17 @@ export class OrganizationService {
},
employee: {
policyNotifications: true,
taskReminders: true,
taskAssignments: true,
taskMentions: true,
weeklyTaskDigest: true,
taskReminders: false,
taskAssignments: false,
taskMentions: false,
weeklyTaskDigest: false,
findingNotifications: false,
},
contractor: {
policyNotifications: true,
taskReminders: true,
taskAssignments: true,
taskMentions: true,
taskReminders: false,
taskAssignments: false,
taskMentions: false,
weeklyTaskDigest: false,
findingNotifications: false,
},
Expand Down
2 changes: 1 addition & 1 deletion apps/api/src/questionnaire/questionnaire.service.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,7 @@ describe('QuestionnaireService', () => {
mimeType: 'text/csv',
filename: 'test.csv',
};
(generateExportFile as jest.Mock).mockReturnValue(mockExport);
(generateExportFile as jest.Mock).mockResolvedValue(mockExport);

const result = await service.exportById({
questionnaireId: 'q1',
Expand Down
6 changes: 3 additions & 3 deletions apps/api/src/questionnaire/questionnaire.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ export class QuestionnaireService {
const zip = new AdmZip();

for (const format of formats) {
const exportFile = generateExportFile(
const exportFile = await generateExportFile(
answered.map((a) => ({ question: a.question, answer: a.answer })),
format,
vendorName,
Expand Down Expand Up @@ -182,7 +182,7 @@ export class QuestionnaireService {
}

// Single format export (default behavior)
const exportFile = generateExportFile(
const exportFile = await generateExportFile(
answered.map((a) => ({ question: a.question, answer: a.answer })),
dto.format as ExportFormat,
vendorName,
Expand Down Expand Up @@ -433,7 +433,7 @@ export class QuestionnaireService {
format: dto.format,
});

return generateExportFile(
return await generateExportFile(
questionsAndAnswers,
dto.format as ExportFormat,
questionnaire.filename,
Expand Down
100 changes: 100 additions & 0 deletions apps/api/src/questionnaire/utils/content-extractor.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
import { extractContentFromFile } from './content-extractor';
import ExcelJS from 'exceljs';

// Mock AI dependencies
jest.mock('@ai-sdk/openai', () => ({ openai: jest.fn() }));
jest.mock('@ai-sdk/anthropic', () => ({ anthropic: jest.fn() }));
jest.mock('@ai-sdk/groq', () => ({ createGroq: jest.fn(() => jest.fn()) }));
jest.mock('ai', () => ({
generateText: jest.fn(),
generateObject: jest.fn(),
jsonSchema: jest.fn((s) => s),
}));

async function createTestExcelBuffer(
sheets: { name: string; rows: (string | number)[][] }[],
): Promise<Buffer> {
const workbook = new ExcelJS.Workbook();
for (const sheet of sheets) {
const ws = workbook.addWorksheet(sheet.name);
for (const row of sheet.rows) {
ws.addRow(row);
}
}
const arrayBuffer = await workbook.xlsx.writeBuffer();
return Buffer.from(arrayBuffer);
}

describe('content-extractor: extractContentFromFile', () => {
const XLSX_MIME =
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet';

it('should extract content from an Excel file with headers', async () => {
const buffer = await createTestExcelBuffer([
{
name: 'Survey',
rows: [
['Question', 'Response', 'Comment'],
['Do you agree?', 'Yes', 'Fully agree'],
['Rating?', '5', ''],
],
},
]);

const base64 = buffer.toString('base64');
const result = await extractContentFromFile(base64, XLSX_MIME);

expect(result).toContain('Question');
expect(result).toContain('Do you agree?');
expect(result).toContain('Yes');
expect(result).toContain('Rating?');
});

it('should extract content from multiple sheets', async () => {
const buffer = await createTestExcelBuffer([
{ name: 'General', rows: [['Info', 'Details'], ['Name', 'Acme Corp']] },
{ name: 'Security', rows: [['Control', 'Status'], ['MFA', 'Enabled']] },
]);

const base64 = buffer.toString('base64');
const result = await extractContentFromFile(base64, XLSX_MIME);

expect(result).toContain('Acme Corp');
expect(result).toContain('MFA');
});

it('should handle CSV files', async () => {
const csv = 'question,answer\nWhat is 2+2?,4\n';
const base64 = Buffer.from(csv).toString('base64');

const result = await extractContentFromFile(base64, 'text/csv');

expect(result).toContain('question,answer');
expect(result).toContain('What is 2+2?,4');
});

it('should handle plain text files', async () => {
const text = 'Some compliance document content';
const base64 = Buffer.from(text).toString('base64');

const result = await extractContentFromFile(base64, 'text/plain');

expect(result).toBe(text);
});

it('should throw for Word documents', async () => {
const base64 = Buffer.from('fake').toString('base64');

await expect(
extractContentFromFile(base64, 'application/msword'),
).rejects.toThrow('Word documents');
});

it('should throw for unsupported types', async () => {
const base64 = Buffer.from('data').toString('base64');

await expect(
extractContentFromFile(base64, 'application/octet-stream'),
).rejects.toThrow('Unsupported file type');
});
});
Loading
Loading