Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions apps/dokploy/drizzle/0148_sftp_destination.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
ALTER TABLE "destination" ADD COLUMN "sftpHost" text;--> statement-breakpoint
ALTER TABLE "destination" ADD COLUMN "sftpPort" integer DEFAULT 22;--> statement-breakpoint
ALTER TABLE "destination" ADD COLUMN "sftpUser" text;--> statement-breakpoint
ALTER TABLE "destination" ADD COLUMN "sftpPassword" text;--> statement-breakpoint
ALTER TABLE "destination" ADD COLUMN "sftpPath" text;
24 changes: 22 additions & 2 deletions packages/server/src/db/schema/destination.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { relations } from "drizzle-orm";
import { pgTable, text, timestamp } from "drizzle-orm/pg-core";
import { integer, pgTable, text, timestamp } from "drizzle-orm/pg-core";
import { createInsertSchema } from "drizzle-zod";
import { nanoid } from "nanoid";
import { z } from "zod";
Expand All @@ -18,6 +18,12 @@ export const destinations = pgTable("destination", {
bucket: text("bucket").notNull(),
region: text("region").notNull(),
endpoint: text("endpoint").notNull(),
// SFTP-specific columns
sftpHost: text("sftpHost"),
sftpPort: integer("sftpPort").default(22),
sftpUser: text("sftpUser"),
sftpPassword: text("sftpPassword"),
sftpPath: text("sftpPath"),
organizationId: text("organizationId")
.notNull()
.references(() => organization.id, { onDelete: "cascade" }),
Expand All @@ -44,6 +50,11 @@ const createSchema = createInsertSchema(destinations, {
endpoint: z.string(),
secretAccessKey: z.string(),
region: z.string(),
sftpHost: z.string().optional(),
sftpPort: z.number().int().default(22),
sftpUser: z.string().optional(),
sftpPassword: z.string().optional(),
sftpPath: z.string().optional(),
});

export const apiCreateDestination = createSchema
Expand All @@ -55,8 +66,12 @@ export const apiCreateDestination = createSchema
region: true,
endpoint: true,
secretAccessKey: true,
sftpHost: true,
sftpPort: true,
sftpUser: true,
sftpPassword: true,
sftpPath: true,
})
.required()
.extend({
serverId: z.string().optional(),
});
Expand All @@ -81,6 +96,11 @@ export const apiUpdateDestination = createSchema
secretAccessKey: true,
destinationId: true,
provider: true,
sftpHost: true,
sftpPort: true,
sftpUser: true,
sftpPassword: true,
sftpPath: true,
})
.required()
.extend({
Expand Down
14 changes: 11 additions & 3 deletions packages/server/src/utils/backups/compose.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,12 @@ import { findEnvironmentById } from "@dokploy/server/services/environment";
import { findProjectById } from "@dokploy/server/services/project";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getBackupCommand, getS3Credentials, normalizeS3Path } from "./utils";
import {
getBackupCommand,
getRcloneCredentials,
getRcloneDestinationPath,
normalizeS3Path,
} from "./utils";

export const runComposeBackup = async (
compose: Compose,
Expand All @@ -28,8 +33,11 @@ export const runComposeBackup = async (
});

try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
const { flags: rcloneFlags } = getRcloneCredentials(destination);
const rcloneDestination = getRcloneDestinationPath(
destination,
bucketDestination,
);
const rcloneCommand = `rclone rcat ${rcloneFlags.join(" ")} "${rcloneDestination}"`;

const backupCommand = getBackupCommand(
Expand Down
13 changes: 8 additions & 5 deletions packages/server/src/utils/backups/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import path from "node:path";
import { CLEANUP_CRON_JOB } from "@dokploy/server/constants";
import { member } from "@dokploy/server/db/schema";
import type { BackupSchedule } from "@dokploy/server/services/backup";
Expand All @@ -11,7 +10,11 @@ import { startLogCleanup } from "../access-log/handler";
import { cleanupAll } from "../docker/utils";
import { sendDockerCleanupNotifications } from "../notifications/docker-cleanup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials, scheduleBackup } from "./utils";
import {
getRcloneCredentials,
getRcloneDestinationPath,
scheduleBackup,
} from "./utils";

export const initCronJobs = async () => {
console.log("Setting up cron jobs....");
Expand Down Expand Up @@ -116,9 +119,9 @@ export const keepLatestNBackups = async (
if (!backup.keepLatestCount) return;

try {
const rcloneFlags = getS3Credentials(backup.destination);
const backupFilesPath = path.join(
`:s3:${backup.destination.bucket}`,
const { flags: rcloneFlags } = getRcloneCredentials(backup.destination);
const backupFilesPath = getRcloneDestinationPath(
backup.destination,
backup.prefix,
);

Expand Down
14 changes: 11 additions & 3 deletions packages/server/src/utils/backups/mariadb.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,12 @@ import type { Mariadb } from "@dokploy/server/services/mariadb";
import { findProjectById } from "@dokploy/server/services/project";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getBackupCommand, getS3Credentials, normalizeS3Path } from "./utils";
import {
getBackupCommand,
getRcloneCredentials,
getRcloneDestinationPath,
normalizeS3Path,
} from "./utils";

export const runMariadbBackup = async (
mariadb: Mariadb,
Expand All @@ -27,8 +32,11 @@ export const runMariadbBackup = async (
description: "MariaDB Backup",
});
try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
const { flags: rcloneFlags } = getRcloneCredentials(destination);
const rcloneDestination = getRcloneDestinationPath(
destination,
bucketDestination,
);
const rcloneCommand = `rclone rcat ${rcloneFlags.join(" ")} "${rcloneDestination}"`;

const backupCommand = getBackupCommand(
Expand Down
14 changes: 11 additions & 3 deletions packages/server/src/utils/backups/mongo.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,12 @@ import type { Mongo } from "@dokploy/server/services/mongo";
import { findProjectById } from "@dokploy/server/services/project";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getBackupCommand, getS3Credentials, normalizeS3Path } from "./utils";
import {
getBackupCommand,
getRcloneCredentials,
getRcloneDestinationPath,
normalizeS3Path,
} from "./utils";

export const runMongoBackup = async (mongo: Mongo, backup: BackupSchedule) => {
const { environmentId, name } = mongo;
Expand All @@ -24,8 +29,11 @@ export const runMongoBackup = async (mongo: Mongo, backup: BackupSchedule) => {
description: "MongoDB Backup",
});
try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
const { flags: rcloneFlags } = getRcloneCredentials(destination);
const rcloneDestination = getRcloneDestinationPath(
destination,
bucketDestination,
);
const rcloneCommand = `rclone rcat ${rcloneFlags.join(" ")} "${rcloneDestination}"`;

const backupCommand = getBackupCommand(
Expand Down
14 changes: 11 additions & 3 deletions packages/server/src/utils/backups/mysql.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,12 @@ import type { MySql } from "@dokploy/server/services/mysql";
import { findProjectById } from "@dokploy/server/services/project";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getBackupCommand, getS3Credentials, normalizeS3Path } from "./utils";
import {
getBackupCommand,
getRcloneCredentials,
getRcloneDestinationPath,
normalizeS3Path,
} from "./utils";

export const runMySqlBackup = async (mysql: MySql, backup: BackupSchedule) => {
const { environmentId, name } = mysql;
Expand All @@ -25,8 +30,11 @@ export const runMySqlBackup = async (mysql: MySql, backup: BackupSchedule) => {
});

try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
const { flags: rcloneFlags } = getRcloneCredentials(destination);
const rcloneDestination = getRcloneDestinationPath(
destination,
bucketDestination,
);

const rcloneCommand = `rclone rcat ${rcloneFlags.join(" ")} "${rcloneDestination}"`;

Expand Down
14 changes: 11 additions & 3 deletions packages/server/src/utils/backups/postgres.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,12 @@ import type { Postgres } from "@dokploy/server/services/postgres";
import { findProjectById } from "@dokploy/server/services/project";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getBackupCommand, getS3Credentials, normalizeS3Path } from "./utils";
import {
getBackupCommand,
getRcloneCredentials,
getRcloneDestinationPath,
normalizeS3Path,
} from "./utils";

export const runPostgresBackup = async (
postgres: Postgres,
Expand All @@ -28,8 +33,11 @@ export const runPostgresBackup = async (
const backupFileName = `${new Date().toISOString()}.sql.gz`;
const bucketDestination = `${normalizeS3Path(prefix)}${backupFileName}`;
try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
const { flags: rcloneFlags } = getRcloneCredentials(destination);
const rcloneDestination = getRcloneDestinationPath(
destination,
bucketDestination,
);

const rcloneCommand = `rclone rcat ${rcloneFlags.join(" ")} "${rcloneDestination}"`;

Expand Down
35 changes: 35 additions & 0 deletions packages/server/src/utils/backups/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,41 @@ export const getS3Credentials = (destination: Destination) => {
return rcloneFlags;
};

export const getSftpCredentials = (destination: Destination) => {
const { sftpHost, sftpPort, sftpUser, sftpPassword } = destination;
if (!sftpHost || !sftpUser || !sftpPassword) {
throw new Error(
"SFTP destination requires host, user, and password to be configured",
);
}
return [
`--sftp-host="${sftpHost}"`,
`--sftp-port="${sftpPort ?? 22}"`,
`--sftp-user="${sftpUser}"`,
`--sftp-pass=$(rclone obscure "${sftpPassword}")`,
];
Comment on lines +80 to +92
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

--sftp-pass-is-base64=false is not a valid rclone SFTP flag. More critically, rclone's --sftp-pass expects a password that has been pre-obscured using rclone obscure <password>, not plaintext. Passing a raw plaintext password will result in rclone failing to authenticate.

The password must be obscured before being embedded in the flag—either store it already-obscured in the database, or obscure it at runtime via a subprocess call to rclone obscure.

Comment on lines +80 to +92
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

sftpHost, sftpUser, and sftpPassword are all nullable columns (no .notNull() constraint), so when null they are interpolated as the literal strings "null" in template literals. This produces invalid rclone flags like --sftp-host="null" and --sftp-user="null", causing silent rclone failures.

Add explicit null-checks before constructing flags:

if (!sftpHost || !sftpUser || !sftpPassword) {
  throw new Error("SFTP destination is missing required credentials");
}

Alternatively, add .notNull() constraints to these columns in the schema if they are required for SFTP destinations.

};

export const getRcloneCredentials = (
destination: Destination,
): { flags: string[]; backend: string } => {
if (destination.provider === "sftp") {
return { flags: getSftpCredentials(destination), backend: "sftp" };
}
return { flags: getS3Credentials(destination), backend: "s3" };
};

export const getRcloneDestinationPath = (
destination: Destination,
remotePath: string,
): string => {
if (destination.provider === "sftp") {
const basePath = (destination.sftpPath ?? "").replace(/\/+$/, "");
return `:sftp:${basePath}/${remotePath}`;
}
return `:s3:${destination.bucket}/${remotePath}`;
};
Comment on lines +95 to +113
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The new getRcloneCredentials and getRcloneDestinationPath dispatcher functions (lines 91–109) are defined here but never imported or used anywhere in the codebase. Every backup file (postgres.ts, mysql.ts, mariadb.ts, mongo.ts, compose.ts, web-server.ts, index.ts) still imports and calls getS3Credentials directly and hardcodes :s3: paths.

For example, in postgres.ts (line 31–32):

const rcloneFlags = getS3Credentials(destination);           // never routes to SFTP
const rcloneDestination = `:s3:${destination.bucket}/...`;  // always S3

All backup execution and cleanup call sites must be updated to use the new dispatcher functions for SFTP support to work end-to-end.


export const getPostgresBackupCommand = (
database: string,
databaseUser: string,
Expand Down
13 changes: 9 additions & 4 deletions packages/server/src/utils/backups/web-server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,11 @@ import {
} from "@dokploy/server/services/deployment";
import { findDestinationById } from "@dokploy/server/services/destination";
import { execAsync } from "../process/execAsync";
import { getS3Credentials, normalizeS3Path } from "./utils";
import {
getRcloneCredentials,
getRcloneDestinationPath,
normalizeS3Path,
} from "./utils";

export const runWebServerBackup = async (backup: BackupSchedule) => {
if (IS_CLOUD) {
Expand All @@ -26,12 +30,13 @@ export const runWebServerBackup = async (backup: BackupSchedule) => {

try {
const destination = await findDestinationById(backup.destinationId);
const rcloneFlags = getS3Credentials(destination);
const { flags: rcloneFlags } = getRcloneCredentials(destination);
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
const { BASE_PATH } = paths();
const tempDir = await mkdtemp(join(tmpdir(), "dokploy-backup-"));
const backupFileName = `webserver-backup-${timestamp}.zip`;
const s3Path = `:s3:${destination.bucket}/${normalizeS3Path(backup.prefix)}${backupFileName}`;
const remotePath = `${normalizeS3Path(backup.prefix)}${backupFileName}`;
const destinationPath = getRcloneDestinationPath(destination, remotePath);

try {
await execAsync(`mkdir -p ${tempDir}/filesystem`);
Expand Down Expand Up @@ -79,7 +84,7 @@ export const runWebServerBackup = async (backup: BackupSchedule) => {

writeStream.write("Zipped database and filesystem\n");

const uploadCommand = `rclone copyto ${rcloneFlags.join(" ")} "${tempDir}/${backupFileName}" "${s3Path}"`;
const uploadCommand = `rclone copyto ${rcloneFlags.join(" ")} "${tempDir}/${backupFileName}" "${destinationPath}"`;
writeStream.write("Running command to upload backup to S3\n");
await execAsync(uploadCommand);
writeStream.write("Uploaded backup to S3 ✅\n");
Expand Down