Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Add missing schema changes introduced in [#1170](https://github.com/sourcebot-dev/sourcebot/pull/1170). [#1176](https://github.com/sourcebot-dev/sourcebot/pull/1176)
- Fixed blame gutter commit navigation to use the file path as it existed at the attributing commit, so clicking a blame line whose commit predates a rename resolves to the correct historical path. [#1178](https://github.com/sourcebot-dev/sourcebot/pull/1178)

### Changed
- Reduced the log verbosity of the worker by changing various log messages from info to debug. [#1179](https://github.com/sourcebot-dev/sourcebot/pull/1179)

## [4.17.1] - 2026-05-04

### Added
Expand Down
2 changes: 1 addition & 1 deletion packages/backend/src/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ export class Api {
app.post(`/api/experimental/add-github-repo`, this.experimental_addGithubRepo.bind(this));

this.server = app.listen(PORT, () => {
logger.info(`API server is running on port ${PORT}`);
logger.debug(`API server is running on port ${PORT}`);
});
}

Expand Down
4 changes: 2 additions & 2 deletions packages/backend/src/azuredevops.ts
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ async function getRepos(
const results = await Promise.allSettled(repoList.map(async (repo) => {
try {
const [org, projectName, repoName] = repo.split('/');
logger.info(`Fetching repository info for ${repo}...`);
logger.debug(`Fetching repository info for ${repo}...`);

const { durationMs, data: result } = await measure(async () => {
const fetchFn = async () => {
Expand All @@ -306,7 +306,7 @@ async function getRepos(
return fetchWithRetry(fetchFn, repo, logger);
});

logger.info(`Found info for repository ${repo} in ${durationMs}ms`);
logger.debug(`Found info for repository ${repo} in ${durationMs}ms`);
return {
type: 'valid' as const,
data: [result]
Expand Down
6 changes: 3 additions & 3 deletions packages/backend/src/configManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ export class ConfigManager {
});

this.watcher.on('change', async () => {
logger.info(`Config file ${configPath} changed. Syncing config.`);
logger.debug(`Config file ${configPath} changed. Syncing config.`);
try {
await this.syncConfig(configPath);
} catch (error) {
Expand Down Expand Up @@ -101,7 +101,7 @@ export class ConfigManager {
});

if (connectionNeedsSyncing) {
logger.info(`Change detected for connection '${key}' (id: ${connection.id}). Creating sync job.`);
logger.debug(`Change detected for connection '${key}' (id: ${connection.id}). Creating sync job.`);
await this.connectionManager.createJobs([connection]);
}
}
Expand All @@ -119,7 +119,7 @@ export class ConfigManager {
});

for (const connection of deletedConnections) {
logger.info(`Deleting connection with name '${connection.name}'. Connection ID: ${connection.id}`);
logger.debug(`Deleting connection with name '${connection.name}'. Connection ID: ${connection.id}`);
await this.db.connection.delete({
where: {
id: connection.id,
Expand Down
10 changes: 5 additions & 5 deletions packages/backend/src/connectionManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ export class ConnectionManager {
});

for (const job of jobs) {
logger.info(`Scheduling job ${job.id} for connection ${job.connection.name} (id: ${job.connectionId})`);
logger.debug(`Scheduling job ${job.id} for connection ${job.connection.name} (id: ${job.connectionId})`);
await this.queue.add(
'connection-sync-job',
{
Expand All @@ -158,7 +158,7 @@ export class ConnectionManager {
private async runJob(job: Job<JobPayload>): Promise<JobResult> {
const { jobId, connectionName } = job.data;
const logger = createJobLogger(jobId);
logger.info(`Running connection sync job ${jobId} for connection ${connectionName} (id: ${job.data.connectionId})`);
logger.debug(`Running connection sync job ${jobId} for connection ${connectionName} (id: ${job.data.connectionId})`);

const currentStatus = await this.db.connectionSyncJob.findUniqueOrThrow({
where: {
Expand Down Expand Up @@ -261,7 +261,7 @@ export class ConnectionManager {
}
});
const deleteDuration = performance.now() - deleteStart;
logger.info(`Deleted all RepoToConnection records for connection ${connectionName} (id: ${job.data.connectionId}) in ${deleteDuration}ms`);
logger.debug(`Deleted all RepoToConnection records for connection ${connectionName} (id: ${job.data.connectionId}) in ${deleteDuration}ms`);

const totalUpsertStart = performance.now();
for (const repo of repoData) {
Expand All @@ -281,7 +281,7 @@ export class ConnectionManager {
logger.debug(`Upserted repo ${repo.displayName} (id: ${repo.external_id}) in ${upsertDuration}ms`);
}
const totalUpsertDuration = performance.now() - totalUpsertStart;
logger.info(`Upserted ${repoData.length} repos for connection ${connectionName} (id: ${job.data.connectionId}) in ${totalUpsertDuration}ms`);
logger.debug(`Upserted ${repoData.length} repos for connection ${connectionName} (id: ${job.data.connectionId}) in ${totalUpsertDuration}ms`);
}, { timeout: env.CONNECTION_MANAGER_UPSERT_TIMEOUT_MS });

return {
Expand Down Expand Up @@ -330,7 +330,7 @@ export class ConnectionManager {
}
}

logger.info(`Connection sync job ${job.id} for connection ${job.data.connectionName} (id: ${job.data.connectionId}) completed`);
logger.debug(`Connection sync job ${job.id} for connection ${job.data.connectionName} (id: ${job.data.connectionId}) completed`);

this.promClient.activeConnectionSyncJobs.dec({ connection: connectionName });
this.promClient.connectionSyncJobSuccessTotal.inc({ connection: connectionName });
Expand Down
4 changes: 2 additions & 2 deletions packages/backend/src/ee/accountPermissionSyncer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ export class AccountPermissionSyncer {

const config = await loadConfig(env.CONFIG_PATH);

logger.info(`Syncing permissions for ${account.provider} account (id: ${account.id}) for user ${account.user.email}...`);
logger.debug(`Syncing permissions for ${account.provider} account (id: ${account.id}) for user ${account.user.email}...`);

// Ensure the OAuth token is fresh, refreshing it if it is expired or near expiry.
// Throws and sets Account.tokenRefreshErrorMessage if the refresh fails.
Expand Down Expand Up @@ -370,7 +370,7 @@ export class AccountPermissionSyncer {
}
});

logger.info(`Permissions synced for ${account.provider} account (id: ${account.id}) for user ${account.user.email}`);
logger.debug(`Permissions synced for ${account.provider} account (id: ${account.id}) for user ${account.user.email}`);
}

private async onJobFailed(job: Job<AccountPermissionSyncJob> | undefined, err: Error) {
Expand Down
8 changes: 4 additions & 4 deletions packages/backend/src/ee/auditLogPruner.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ export class AuditLogPruner {
return;
}

logger.info(`Audit log pruner started. Retaining logs for ${env.SOURCEBOT_EE_AUDIT_RETENTION_DAYS} days.`);
logger.debug(`Audit log pruner started. Retaining logs for ${env.SOURCEBOT_EE_AUDIT_RETENTION_DAYS} days.`);

// Run immediately on startup, then every 24 hours
this.pruneOldAuditLogs();
Expand All @@ -41,7 +41,7 @@ export class AuditLogPruner {
const cutoff = new Date(Date.now() - env.SOURCEBOT_EE_AUDIT_RETENTION_DAYS * ONE_DAY_MS);
let totalDeleted = 0;

logger.info(`Pruning audit logs older than ${cutoff.toISOString()}...`);
logger.debug(`Pruning audit logs older than ${cutoff.toISOString()}...`);

// Delete in batches to avoid long-running transactions
while (true) {
Expand All @@ -63,9 +63,9 @@ export class AuditLogPruner {
}

if (totalDeleted > 0) {
logger.info(`Pruned ${totalDeleted} audit log records.`);
logger.debug(`Pruned ${totalDeleted} audit log records.`);
} else {
logger.info('No audit log records to prune.');
logger.debug('No audit log records to prune.');
}
}
}
4 changes: 2 additions & 2 deletions packages/backend/src/ee/repoPermissionSyncer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ export class RepoPermissionSyncer {
throw new Error(`Repo ${id} not found`);
}

logger.info(`Syncing permissions for repo ${repo.displayName}...`);
logger.debug(`Syncing permissions for repo ${repo.displayName}...`);

const credentials = await getAuthCredentialsForRepo(repo, logger);
if (!credentials) {
Expand Down Expand Up @@ -388,7 +388,7 @@ export class RepoPermissionSyncer {
}
});

logger.info(`Permissions synced for repo ${repo.displayName ?? repo.name}`);
logger.debug(`Permissions synced for repo ${repo.displayName ?? repo.name}`);
}

private async onJobFailed(job: Job<RepoPermissionSyncJob> | undefined, err: Error) {
Expand Down
2 changes: 1 addition & 1 deletion packages/backend/src/ee/syncSearchContexts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ export const syncSearchContexts = async (params: SyncSearchContextsParams) => {
});

for (const context of deletedContexts) {
logger.info(`Deleting search context with name '${context.name}'. ID: ${context.id}`);
logger.debug(`Deleting search context with name '${context.name}'. ID: ${context.id}`);
await db.searchContext.delete({
where: {
id: context.id,
Expand Down
2 changes: 1 addition & 1 deletion packages/backend/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ const prisma = new PrismaClient({

try {
await redis.ping();
logger.info('Connected to redis');
logger.debug('Connected to redis');
} catch (err: unknown) {
logger.error('Failed to connect to redis. Error:', err);
process.exit(1);
Expand Down
4 changes: 2 additions & 2 deletions packages/backend/src/repoCompileUtils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -609,7 +609,7 @@ export const compileGenericGitHostConfig_file = async (
};
}

logger.info(`Found ${repoPaths.length} path(s) matching pattern '${configUrl.pathname}'`);
logger.debug(`Found ${repoPaths.length} path(s) matching pattern '${configUrl.pathname}'`);

await Promise.all(repoPaths.map((repoPath) => gitOperationLimit(async () => {
const stat = await fs.stat(repoPath).catch(() => null);
Expand Down Expand Up @@ -691,7 +691,7 @@ export const compileGenericGitHostConfig_file = async (
logger.warn(warning);
warnings.push(warning);
} else {
logger.info(`Successfully found ${repos.length} valid git repository(s) from ${repoPaths.length} matched path(s)`);
logger.debug(`Successfully found ${repos.length} valid git repository(s) from ${repoPaths.length} matched path(s)`);
}

return {
Expand Down
32 changes: 15 additions & 17 deletions packages/backend/src/repoIndexManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -279,7 +279,7 @@ export class RepoIndexManager {
private async runJob(job: Job<JobPayload>, signal: AbortSignal) {
const id = job.data.jobId;
const logger = createJobLogger(id);
logger.info(`Running ${job.data.type} job ${id} for repo ${job.data.repoName} (id: ${job.data.repoId})`);
logger.debug(`Running ${job.data.type} job ${id} for repo ${job.data.repoName} (id: ${job.data.repoId})`);

const currentStatus = await this.db.repoIndexingJob.findUniqueOrThrow({
where: {
Expand Down Expand Up @@ -383,7 +383,7 @@ export class RepoIndexManager {
signal,
});

logger.info(`Fetching ${repo.name} (id: ${repo.id})...`);
logger.debug(`Fetching ${repo.name} (id: ${repo.id})...`);
const { durationMs } = await measure(() => fetchRepository({
cloneUrl: cloneUrlMaybeWithToken,
authHeader,
Expand All @@ -395,10 +395,9 @@ export class RepoIndexManager {
}));
const fetchDuration_s = durationMs / 1000;

process.stdout.write('\n');
logger.info(`Fetched ${repo.name} (id: ${repo.id}) in ${fetchDuration_s}s`);
logger.debug(`Fetched ${repo.name} (id: ${repo.id}) in ${fetchDuration_s}s`);
} else if (!isReadOnly) {
logger.info(`Cloning ${repo.name} (id: ${repo.id})...`);
logger.debug(`Cloning ${repo.name} (id: ${repo.id})...`);

const { durationMs } = await measure(() => cloneRepository({
cloneUrl: cloneUrlMaybeWithToken,
Expand All @@ -411,8 +410,7 @@ export class RepoIndexManager {
}));
const cloneDuration_s = durationMs / 1000;

process.stdout.write('\n');
logger.info(`Cloned ${repo.name} (id: ${repo.id}) in ${cloneDuration_s}s`);
logger.debug(`Cloned ${repo.name} (id: ${repo.id}) in ${cloneDuration_s}s`);
}

// Regardless of clone or fetch, always upsert the git config for the repo.
Expand Down Expand Up @@ -478,11 +476,11 @@ export class RepoIndexManager {
revisions = revisions.slice(0, 64);
}

logger.info(`Indexing ${repo.name} (id: ${repo.id})...`);
logger.debug(`Indexing ${repo.name} (id: ${repo.id})...`);
try {
const { durationMs } = await measure(() => indexGitRepository(repo, this.settings, revisions, signal));
const indexDuration_s = durationMs / 1000;
logger.info(`Indexed ${repo.name} (id: ${repo.id}) in ${indexDuration_s}s`);
logger.debug(`Indexed ${repo.name} (id: ${repo.id}) in ${indexDuration_s}s`);
} catch (error) {
// Clean up any temporary shard files left behind by the failed indexing operation.
// Zoekt creates .tmp files during indexing which can accumulate if indexing fails repeatedly.
Expand All @@ -497,15 +495,15 @@ export class RepoIndexManager {
private async cleanupRepository(repo: Repo, logger: Logger) {
const { path: repoPath, isReadOnly } = getRepoPath(repo);
if (existsSync(repoPath) && !isReadOnly) {
logger.info(`Deleting repo directory ${repoPath}`);
logger.debug(`Deleting repo directory ${repoPath}`);
await rm(repoPath, { recursive: true, force: true });
}

const shardPrefix = getShardPrefix(repo.orgId, repo.id);
const files = (await readdir(INDEX_CACHE_DIR)).filter(file => file.startsWith(shardPrefix));
for (const file of files) {
const filePath = `${INDEX_CACHE_DIR}/${file}`;
logger.info(`Deleting shard file ${filePath}`);
logger.debug(`Deleting shard file ${filePath}`);
await rm(filePath, { force: true });
}
}
Expand Down Expand Up @@ -564,14 +562,14 @@ export class RepoIndexManager {
}
});

logger.info(`Completed index job ${job.data.jobId} for repo ${repo.name} (id: ${repo.id})`);
logger.debug(`Completed index job ${job.data.jobId} for repo ${repo.name} (id: ${repo.id})`);
}
else if (jobData.type === RepoIndexingJobType.CLEANUP) {
const repo = await this.db.repo.delete({
where: { id: jobData.repoId },
});

logger.info(`Completed cleanup job ${job.data.jobId} for repo ${repo.name} (id: ${repo.id})`);
logger.debug(`Completed cleanup job ${job.data.jobId} for repo ${repo.name} (id: ${repo.id})`);
}

// Track metrics for successful job
Expand Down Expand Up @@ -604,7 +602,7 @@ export class RepoIndexManager {
// or if it is being retried.
const jobState = await job.getState();
if (jobState !== 'failed') {
jobLogger.warn(`Job ${job.id} for repo ${job.data.repoName} (id: ${job.data.repoId}) failed. Retrying...`);
jobLogger.warn(`Job ${job.id} for repo ${job.data.repoName} (id: ${job.data.repoId}) failed. Retrying... Reason: ${error.message}`);
return;
}

Expand All @@ -626,7 +624,7 @@ export class RepoIndexManager {
this.promClient.activeRepoIndexJobs.dec({ repo: job.data.repoName, type: jobTypeLabel });
this.promClient.repoIndexJobFailTotal.inc({ repo: job.data.repoName, type: jobTypeLabel });

jobLogger.error(`Failed job ${job.data.jobId} for repo ${repo.name} (id: ${repo.id}).`);
jobLogger.error(`Failed job ${job.data.jobId} for repo ${repo.name} (id: ${repo.id}). Reason: ${error.message}`);

captureEvent('backend_repo_index_job_failed', {
repoId: job.data.repoId,
Expand Down Expand Up @@ -664,7 +662,7 @@ export class RepoIndexManager {
const existingIds = new Set(existingRepos.map(r => r.id));
for (const [repoId, repoPath] of repoIdToPath) {
if (!existingIds.has(repoId)) {
logger.info(`Removing orphaned repo directory with no DB record: ${repoPath}`);
logger.debug(`Removing orphaned repo directory with no DB record: ${repoPath}`);
await rm(repoPath, { recursive: true, force: true });
}
}
Expand Down Expand Up @@ -695,7 +693,7 @@ export class RepoIndexManager {
if (!existingIds.has(repoId)) {
for (const entry of shards) {
const shardPath = `${INDEX_CACHE_DIR}/${entry}`;
logger.info(`Removing orphaned index shard with no DB record: ${shardPath}`);
logger.debug(`Removing orphaned index shard with no DB record: ${shardPath}`);
await rm(shardPath, { force: true });
}
}
Expand Down
10 changes: 4 additions & 6 deletions packages/backend/src/zoekt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,14 +37,12 @@ export const indexGitRepository = async (repo: Repo, settings: Settings, revisio

if (stdout) {
stdout.split('\n').filter(line => line.trim()).forEach(line => {
logger.info(line);
logger.debug(line);
});
}
if (stderr) {
stderr.split('\n').filter(line => line.trim()).forEach(line => {
// TODO: logging as regular info here and not error because non error logs are being
// streamed in stderr and incorrectly being logged as errors at a high level
logger.info(line);
logger.debug(line);
});
}
Comment on lines 43 to 47
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor | ⚡ Quick win

stderr from zoekt-git-index should stay at warn (or info) to preserve observability of non-fatal indexing issues.

zoekt-git-index (like most CLI tools) emits non-fatal warnings — e.g., files skipped due to size, permission issues, branch resolution problems — to stderr while still exiting with code 0. The exec callback only rejects on a non-zero exit code (line 33), so the stderr log is the only production signal for these warnings. Downgrading it to debug makes them invisible at default log levels and will make diagnosing partial indexing failures harder.

🔧 Suggested fix: log stderr at `warn` level
         if (stderr) {
             stderr.split('\n').filter(line => line.trim()).forEach(line => {
-                logger.debug(line);
+                logger.warn(line);
             });
         }
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
if (stderr) {
stderr.split('\n').filter(line => line.trim()).forEach(line => {
// TODO: logging as regular info here and not error because non error logs are being
// streamed in stderr and incorrectly being logged as errors at a high level
logger.info(line);
logger.debug(line);
});
}
if (stderr) {
stderr.split('\n').filter(line => line.trim()).forEach(line => {
logger.warn(line);
});
}
🤖 Prompt for AI Agents
Verify each finding against current code. Fix only still-valid issues, skip the
rest with a brief reason, keep changes minimal, and validate.

In `@packages/backend/src/zoekt.ts` around lines 43 - 47, The stderr output from
the exec callback is being logged at debug via logger.debug, which hides
non-fatal warnings from zoekt-git-index; change the stderr handling in the exec
callback (the block that currently calls stderr.split(...).forEach(line =>
logger.debug(line))) to log each non-empty stderr line at warn (or info) level
instead, include a short contextual prefix like "zoekt-git-index:" or "zoekt
index warning" in the message so it's clear in logs, and otherwise keep the same
splitting/filtering logic.


Expand Down Expand Up @@ -74,12 +72,12 @@ export const cleanupTempShards = async (repo: Repo) => {

for (const file of tempFiles) {
const filePath = `${INDEX_CACHE_DIR}/${file}`;
logger.info(`Cleaning up temp shard file: ${filePath}`);
logger.debug(`Cleaning up temp shard file: ${filePath}`);
await rm(filePath, { force: true });
}

if (tempFiles.length > 0) {
logger.info(`Cleaned up ${tempFiles.length} temp shard file(s) for repo ${repo.id}`);
logger.debug(`Cleaned up ${tempFiles.length} temp shard file(s) for repo ${repo.id}`);
}
} catch (error) {
// Log but don't throw - cleanup is best effort
Expand Down
2 changes: 1 addition & 1 deletion packages/web/src/app/api/(server)/health/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { createLogger } from "@sourcebot/shared";
const logger = createLogger('health-check');

export const GET = apiHandler(async () => {
logger.info('health check');
logger.debug('health check');
return Response.json({ status: 'ok' });
}, { track: false });

Loading