diff --git a/prisma/migrations/20251118091618_add_backups_and_pbs_credentials/migration.sql b/prisma/migrations/20251118091618_add_backups_and_pbs_credentials/migration.sql new file mode 100644 index 0000000..a65f6da --- /dev/null +++ b/prisma/migrations/20251118091618_add_backups_and_pbs_credentials/migration.sql @@ -0,0 +1,41 @@ +-- CreateTable +CREATE TABLE IF NOT EXISTS "backups" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "container_id" TEXT NOT NULL, + "server_id" INTEGER NOT NULL, + "hostname" TEXT NOT NULL, + "backup_name" TEXT NOT NULL, + "backup_path" TEXT NOT NULL, + "size" BIGINT, + "created_at" DATETIME, + "storage_name" TEXT NOT NULL, + "storage_type" TEXT NOT NULL, + "discovered_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT "backups_server_id_fkey" FOREIGN KEY ("server_id") REFERENCES "servers" ("id") ON DELETE CASCADE ON UPDATE CASCADE +); + +-- CreateTable +CREATE TABLE IF NOT EXISTS "pbs_storage_credentials" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "server_id" INTEGER NOT NULL, + "storage_name" TEXT NOT NULL, + "pbs_ip" TEXT NOT NULL, + "pbs_datastore" TEXT NOT NULL, + "pbs_password" TEXT NOT NULL, + "pbs_fingerprint" TEXT NOT NULL, + "created_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updated_at" DATETIME NOT NULL, + CONSTRAINT "pbs_storage_credentials_server_id_fkey" FOREIGN KEY ("server_id") REFERENCES "servers" ("id") ON DELETE CASCADE ON UPDATE CASCADE +); + +-- CreateIndex +CREATE INDEX IF NOT EXISTS "backups_container_id_idx" ON "backups"("container_id"); + +-- CreateIndex +CREATE INDEX IF NOT EXISTS "backups_server_id_idx" ON "backups"("server_id"); + +-- CreateIndex +CREATE INDEX IF NOT EXISTS "pbs_storage_credentials_server_id_idx" ON "pbs_storage_credentials"("server_id"); + +-- CreateIndex +CREATE UNIQUE INDEX IF NOT EXISTS "pbs_storage_credentials_server_id_storage_name_key" ON "pbs_storage_credentials"("server_id", "storage_name"); diff --git a/prisma/schema.prisma b/prisma/schema.prisma index e985ad9..535f82b 100644 --- a/prisma/schema.prisma +++ b/prisma/schema.prisma @@ -41,6 +41,8 @@ model Server { ssh_key_path String? key_generated Boolean? @default(false) installed_scripts InstalledScript[] + backups Backup[] + pbs_credentials PBSStorageCredential[] @@map("servers") } @@ -96,6 +98,42 @@ model LXCConfig { @@map("lxc_configs") } +model Backup { + id Int @id @default(autoincrement()) + container_id String + server_id Int + hostname String + backup_name String + backup_path String + size BigInt? + created_at DateTime? + storage_name String + storage_type String // 'local', 'storage', or 'pbs' + discovered_at DateTime @default(now()) + server Server @relation(fields: [server_id], references: [id], onDelete: Cascade) + + @@index([container_id]) + @@index([server_id]) + @@map("backups") +} + +model PBSStorageCredential { + id Int @id @default(autoincrement()) + server_id Int + storage_name String + pbs_ip String + pbs_datastore String + pbs_password String + pbs_fingerprint String + created_at DateTime @default(now()) + updated_at DateTime @updatedAt + server Server @relation(fields: [server_id], references: [id], onDelete: Cascade) + + @@unique([server_id, storage_name]) + @@index([server_id]) + @@map("pbs_storage_credentials") +} + model Repository { id Int @id @default(autoincrement()) url String @unique diff --git a/restore.log b/restore.log new file mode 100644 index 0000000..0f654fb --- /dev/null +++ b/restore.log @@ -0,0 +1,10 @@ +Starting restore... +Reading container configuration... +Stopping container... +Destroying container... +Logging into PBS... +Downloading backup from PBS... +Packing backup folder... +Restoring container... +Cleaning up temporary files... +Restore completed successfully diff --git a/server.js b/server.js index caf8e9f..0e6371a 100644 --- a/server.js +++ b/server.js @@ -276,13 +276,15 @@ class ScriptExecutionHandler { * @param {WebSocketMessage} message */ async handleMessage(ws, message) { - const { action, scriptPath, executionId, input, mode, server, isUpdate, isShell, containerId } = message; + const { action, scriptPath, executionId, input, mode, server, isUpdate, isShell, isBackup, containerId, storage, backupStorage } = message; switch (action) { case 'start': if (scriptPath && executionId) { - if (isUpdate && containerId) { - await this.startUpdateExecution(ws, containerId, executionId, mode, server); + if (isBackup && containerId && storage) { + await this.startBackupExecution(ws, containerId, executionId, storage, mode, server); + } else if (isUpdate && containerId) { + await this.startUpdateExecution(ws, containerId, executionId, mode, server, backupStorage); } else if (isShell && containerId) { await this.startShellExecution(ws, containerId, executionId, mode, server); } else { @@ -660,6 +662,157 @@ class ScriptExecutionHandler { } } + /** + * Start backup execution + * @param {ExtendedWebSocket} ws + * @param {string} containerId + * @param {string} executionId + * @param {string} storage + * @param {string} mode + * @param {ServerInfo|null} server + */ + async startBackupExecution(ws, containerId, executionId, storage, mode = 'local', server = null) { + try { + // Send start message + this.sendMessage(ws, { + type: 'start', + data: `Starting backup for container ${containerId} to storage ${storage}...`, + timestamp: Date.now() + }); + + if (mode === 'ssh' && server) { + await this.startSSHBackupExecution(ws, containerId, executionId, storage, server); + } else { + this.sendMessage(ws, { + type: 'error', + data: 'Backup is only supported via SSH', + timestamp: Date.now() + }); + } + } catch (error) { + this.sendMessage(ws, { + type: 'error', + data: `Failed to start backup: ${error instanceof Error ? error.message : String(error)}`, + timestamp: Date.now() + }); + } + } + + /** + * Start SSH backup execution + * @param {ExtendedWebSocket} ws + * @param {string} containerId + * @param {string} executionId + * @param {string} storage + * @param {ServerInfo} server + * @param {Function} [onComplete] - Optional callback when backup completes + */ + startSSHBackupExecution(ws, containerId, executionId, storage, server, onComplete = null) { + const sshService = getSSHExecutionService(); + + return new Promise((resolve, reject) => { + try { + const backupCommand = `vzdump ${containerId} --storage ${storage} --mode snapshot`; + + // Wrap the onExit callback to resolve our promise + let promiseResolved = false; + + sshService.executeCommand( + server, + backupCommand, + /** @param {string} data */ + (data) => { + this.sendMessage(ws, { + type: 'output', + data: data, + timestamp: Date.now() + }); + }, + /** @param {string} error */ + (error) => { + this.sendMessage(ws, { + type: 'error', + data: error, + timestamp: Date.now() + }); + }, + /** @param {number} code */ + (code) => { + // Don't send 'end' message here if this is part of a backup+update flow + // The update flow will handle completion messages + const success = code === 0; + + if (!success) { + this.sendMessage(ws, { + type: 'error', + data: `Backup failed with exit code: ${code}`, + timestamp: Date.now() + }); + } + + // Send a completion message (but not 'end' type to avoid stopping terminal) + this.sendMessage(ws, { + type: 'output', + data: `\n[Backup ${success ? 'completed' : 'failed'} with exit code: ${code}]\n`, + timestamp: Date.now() + }); + + if (onComplete) onComplete(success); + + // Resolve the promise when backup completes + // Use setImmediate to ensure resolution happens in the right execution context + if (!promiseResolved) { + promiseResolved = true; + const result = { success, code }; + + // Use setImmediate to ensure promise resolution happens in the next tick + // This ensures the await in startUpdateExecution can properly resume + setImmediate(() => { + try { + resolve(result); + } catch (resolveError) { + console.error('Error resolving backup promise:', resolveError); + reject(resolveError); + } + }); + } + + this.activeExecutions.delete(executionId); + } + ).then((execution) => { + // Store the execution + this.activeExecutions.set(executionId, { + process: /** @type {any} */ (execution).process, + ws + }); + // Note: Don't resolve here - wait for onExit callback + }).catch((error) => { + console.error('Error starting backup execution:', error); + this.sendMessage(ws, { + type: 'error', + data: `SSH backup execution failed: ${error instanceof Error ? error.message : String(error)}`, + timestamp: Date.now() + }); + if (onComplete) onComplete(false); + if (!promiseResolved) { + promiseResolved = true; + reject(error); + } + }); + + } catch (error) { + console.error('Error in startSSHBackupExecution:', error); + this.sendMessage(ws, { + type: 'error', + data: `SSH backup execution failed: ${error instanceof Error ? error.message : String(error)}`, + timestamp: Date.now() + }); + if (onComplete) onComplete(false); + reject(error); + } + }); + } + /** * Start update execution (pct enter + update command) * @param {ExtendedWebSocket} ws @@ -667,11 +820,62 @@ class ScriptExecutionHandler { * @param {string} executionId * @param {string} mode * @param {ServerInfo|null} server + * @param {string} [backupStorage] - Optional storage to backup to before update */ - async startUpdateExecution(ws, containerId, executionId, mode = 'local', server = null) { + async startUpdateExecution(ws, containerId, executionId, mode = 'local', server = null, backupStorage = null) { try { + // If backup storage is provided, run backup first + if (backupStorage && mode === 'ssh' && server) { + this.sendMessage(ws, { + type: 'start', + data: `Starting backup before update for container ${containerId}...`, + timestamp: Date.now() + }); + + // Create a separate execution ID for backup + const backupExecutionId = `backup_${executionId}`; + + // Run backup and wait for it to complete + try { + const backupResult = await this.startSSHBackupExecution( + ws, + containerId, + backupExecutionId, + backupStorage, + server + ); + + // Backup completed (successfully or not) + if (!backupResult || !backupResult.success) { + // Backup failed, but we'll still allow update (per requirement 1b) + this.sendMessage(ws, { + type: 'output', + data: '\n⚠️ Backup failed, but proceeding with update as requested...\n', + timestamp: Date.now() + }); + } else { + // Backup succeeded + this.sendMessage(ws, { + type: 'output', + data: '\n✅ Backup completed successfully. Starting update...\n', + timestamp: Date.now() + }); + } + } catch (error) { + console.error('Backup error before update:', error); + // Backup failed to start, but allow update to proceed + this.sendMessage(ws, { + type: 'output', + data: `\n⚠️ Backup error: ${error instanceof Error ? error.message : String(error)}. Proceeding with update...\n`, + timestamp: Date.now() + }); + } + + // Small delay before starting update + await new Promise(resolve => setTimeout(resolve, 1000)); + } - // Send start message + // Send start message for update (only if we're actually starting an update) this.sendMessage(ws, { type: 'start', data: `Starting update for container ${containerId}...`, diff --git a/src/app/_components/BackupWarningModal.tsx b/src/app/_components/BackupWarningModal.tsx new file mode 100644 index 0000000..d93f5c9 --- /dev/null +++ b/src/app/_components/BackupWarningModal.tsx @@ -0,0 +1,67 @@ +'use client'; + +import { Button } from './ui/button'; +import { AlertTriangle } from 'lucide-react'; +import { useRegisterModal } from './modal/ModalStackProvider'; + +interface BackupWarningModalProps { + isOpen: boolean; + onClose: () => void; + onProceed: () => void; +} + +export function BackupWarningModal({ + isOpen, + onClose, + onProceed +}: BackupWarningModalProps) { + useRegisterModal(isOpen, { id: 'backup-warning-modal', allowEscape: true, onClose }); + + if (!isOpen) return null; + + return ( +
+
+ {/* Header */} +
+
+ +

Backup Failed

+
+
+ + {/* Content */} +
+

+ The backup failed, but you can still proceed with the update if you wish. +

+ Warning: Proceeding without a backup means you won't be able to restore the container if something goes wrong during the update. +

+ + {/* Action Buttons */} +
+ + +
+
+
+
+ ); +} + + + diff --git a/src/app/_components/BackupsTab.tsx b/src/app/_components/BackupsTab.tsx new file mode 100644 index 0000000..2e24b7b --- /dev/null +++ b/src/app/_components/BackupsTab.tsx @@ -0,0 +1,503 @@ +'use client'; + +import { useState, useEffect } from 'react'; +import { api } from '~/trpc/react'; +import { Button } from './ui/button'; +import { Badge } from './ui/badge'; +import { RefreshCw, ChevronDown, ChevronRight, HardDrive, Database, Server, CheckCircle, AlertCircle } from 'lucide-react'; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from './ui/dropdown-menu'; +import { ConfirmationModal } from './ConfirmationModal'; +import { LoadingModal } from './LoadingModal'; + +interface Backup { + id: number; + backup_name: string; + backup_path: string; + size: bigint | null; + created_at: Date | null; + storage_name: string; + storage_type: string; + discovered_at: Date; + server_id: number; + server_name: string | null; + server_color: string | null; +} + +interface ContainerBackups { + container_id: string; + hostname: string; + backups: Backup[]; +} + +export function BackupsTab() { + const [expandedContainers, setExpandedContainers] = useState>(new Set()); + const [hasAutoDiscovered, setHasAutoDiscovered] = useState(false); + const [restoreConfirmOpen, setRestoreConfirmOpen] = useState(false); + const [selectedBackup, setSelectedBackup] = useState<{ backup: Backup; containerId: string } | null>(null); + const [restoreProgress, setRestoreProgress] = useState([]); + const [restoreSuccess, setRestoreSuccess] = useState(false); + const [restoreError, setRestoreError] = useState(null); + const [shouldPollRestore, setShouldPollRestore] = useState(false); + + const { data: backupsData, refetch: refetchBackups, isLoading } = api.backups.getAllBackupsGrouped.useQuery(); + const discoverMutation = api.backups.discoverBackups.useMutation({ + onSuccess: () => { + void refetchBackups(); + }, + }); + + // Poll for restore progress + const { data: restoreLogsData } = api.backups.getRestoreProgress.useQuery(undefined, { + enabled: shouldPollRestore, + refetchInterval: 1000, // Poll every second + refetchIntervalInBackground: true, + }); + + // Update restore progress when log data changes + useEffect(() => { + if (restoreLogsData?.success && restoreLogsData.logs) { + setRestoreProgress(restoreLogsData.logs); + + // Stop polling when restore is complete + if (restoreLogsData.isComplete) { + setShouldPollRestore(false); + // Check if restore was successful or failed + const lastLog = restoreLogsData.logs[restoreLogsData.logs.length - 1] || ''; + if (lastLog.includes('Restore completed successfully')) { + setRestoreSuccess(true); + setRestoreError(null); + } else if (lastLog.includes('Error:') || lastLog.includes('failed')) { + setRestoreError(lastLog); + setRestoreSuccess(false); + } + } + } + }, [restoreLogsData]); + + const restoreMutation = api.backups.restoreBackup.useMutation({ + onMutate: () => { + // Start polling for progress + setShouldPollRestore(true); + setRestoreProgress(['Starting restore...']); + setRestoreError(null); + setRestoreSuccess(false); + }, + onSuccess: (result) => { + // Stop polling - progress will be updated from logs + setShouldPollRestore(false); + + if (result.success) { + // Update progress with all messages from backend (fallback if polling didn't work) + const progressMessages = restoreProgress.length > 0 ? restoreProgress : (result.progress?.map(p => p.message) || ['Restore completed successfully']); + setRestoreProgress(progressMessages); + setRestoreSuccess(true); + setRestoreError(null); + setRestoreConfirmOpen(false); + setSelectedBackup(null); + // Keep success message visible - user can dismiss manually + } else { + setRestoreError(result.error || 'Restore failed'); + setRestoreProgress(result.progress?.map(p => p.message) || restoreProgress); + setRestoreSuccess(false); + setRestoreConfirmOpen(false); + setSelectedBackup(null); + // Keep error message visible - user can dismiss manually + } + }, + onError: (error) => { + // Stop polling on error + setShouldPollRestore(false); + setRestoreError(error.message || 'Restore failed'); + setRestoreConfirmOpen(false); + setSelectedBackup(null); + setRestoreProgress([]); + }, + }); + + // Update progress text in modal based on current progress + const currentProgressText = restoreProgress.length > 0 + ? restoreProgress[restoreProgress.length - 1] + : 'Restoring backup...'; + + // Auto-discover backups when tab is first opened + useEffect(() => { + if (!hasAutoDiscovered && !isLoading && backupsData) { + // Only auto-discover if there are no backups yet + if (!backupsData.backups || backupsData.backups.length === 0) { + handleDiscoverBackups(); + } + setHasAutoDiscovered(true); + } + }, [hasAutoDiscovered, isLoading, backupsData]); + + const handleDiscoverBackups = () => { + discoverMutation.mutate(); + }; + + const handleRestoreClick = (backup: Backup, containerId: string) => { + setSelectedBackup({ backup, containerId }); + setRestoreConfirmOpen(true); + setRestoreError(null); + setRestoreSuccess(false); + setRestoreProgress([]); + }; + + const handleRestoreConfirm = () => { + if (!selectedBackup) return; + + setRestoreConfirmOpen(false); + setRestoreError(null); + setRestoreSuccess(false); + + restoreMutation.mutate({ + backupId: selectedBackup.backup.id, + containerId: selectedBackup.containerId, + serverId: selectedBackup.backup.server_id, + }); + }; + + const toggleContainer = (containerId: string) => { + const newExpanded = new Set(expandedContainers); + if (newExpanded.has(containerId)) { + newExpanded.delete(containerId); + } else { + newExpanded.add(containerId); + } + setExpandedContainers(newExpanded); + }; + + const formatFileSize = (bytes: bigint | null): string => { + if (!bytes) return 'Unknown size'; + const b = Number(bytes); + if (b === 0) return '0 B'; + const k = 1024; + const sizes = ['B', 'KB', 'MB', 'GB', 'TB']; + const i = Math.floor(Math.log(b) / Math.log(k)); + return `${(b / Math.pow(k, i)).toFixed(2)} ${sizes[i]}`; + }; + + const formatDate = (date: Date | null): string => { + if (!date) return 'Unknown date'; + return new Date(date).toLocaleString(); + }; + + const getStorageTypeIcon = (type: string) => { + switch (type) { + case 'pbs': + return ; + case 'local': + return ; + default: + return ; + } + }; + + const getStorageTypeBadgeVariant = (type: string): 'default' | 'secondary' | 'outline' => { + switch (type) { + case 'pbs': + return 'default'; + case 'local': + return 'secondary'; + default: + return 'outline'; + } + }; + + const backups = backupsData?.success ? backupsData.backups : []; + const isDiscovering = discoverMutation.isPending; + + return ( +
+ {/* Header with refresh button */} +
+
+

Backups

+

+ Discovered backups grouped by container ID +

+
+ +
+ + {/* Loading state */} + {(isLoading || isDiscovering) && backups.length === 0 && ( +
+ +

+ {isDiscovering ? 'Discovering backups...' : 'Loading backups...'} +

+
+ )} + + {/* Empty state */} + {!isLoading && !isDiscovering && backups.length === 0 && ( +
+ +

No backups found

+

+ Click "Discover Backups" to scan for backups on your servers. +

+ +
+ )} + + {/* Backups list */} + {!isLoading && backups.length > 0 && ( +
+ {backups.map((container: ContainerBackups) => { + const isExpanded = expandedContainers.has(container.container_id); + const backupCount = container.backups.length; + + return ( +
+ {/* Container header - collapsible */} + + + {/* Container content - backups list */} + {isExpanded && ( +
+
+ {container.backups.map((backup) => ( +
+
+
+
+ + {backup.backup_name} + + + {getStorageTypeIcon(backup.storage_type)} + {backup.storage_name} + +
+
+ {backup.size && ( + + + {formatFileSize(backup.size)} + + )} + {backup.created_at && ( + {formatDate(backup.created_at)} + )} + {backup.server_name && ( + + + {backup.server_name} + + )} +
+
+ + {backup.backup_path} + +
+
+
+ + + + + + handleRestoreClick(backup, container.container_id)} + disabled={restoreMutation.isPending} + className="text-muted-foreground hover:text-foreground hover:bg-muted/20 focus:bg-muted/20" + > + Restore + + + Delete + + + +
+
+
+ ))} +
+
+ )} +
+ ); + })} +
+ )} + + {/* Error state */} + {backupsData && !backupsData.success && ( +
+

+ Error loading backups: {backupsData.error || 'Unknown error'} +

+
+ )} + + {/* Restore Confirmation Modal */} + {selectedBackup && ( + { + setRestoreConfirmOpen(false); + setSelectedBackup(null); + }} + onConfirm={handleRestoreConfirm} + title="Restore Backup" + message={`This will destroy the existing container and restore from backup. The container will be stopped during restore. This action cannot be undone and may result in data loss.`} + variant="danger" + confirmText={selectedBackup.containerId} + confirmButtonText="Restore" + cancelButtonText="Cancel" + /> + )} + + {/* Restore Progress Modal */} + {(restoreMutation.isPending || (restoreSuccess && restoreProgress.length > 0)) && ( + { + setRestoreSuccess(false); + setRestoreProgress([]); + }} + /> + )} + + {/* Restore Success */} + {restoreSuccess && ( +
+
+
+ + Restore Completed Successfully +
+ +
+

+ The container has been restored from backup. +

+
+ )} + + {/* Restore Error */} + {restoreError && ( +
+
+
+ + Restore Failed +
+ +
+

+ {restoreError} +

+ {restoreProgress.length > 0 && ( +
+ {restoreProgress.map((message, index) => ( +

+ {message} +

+ ))} +
+ )} + +
+ )} +
+ ); +} + diff --git a/src/app/_components/InstalledScriptsTab.tsx b/src/app/_components/InstalledScriptsTab.tsx index 02155a4..ad155b4 100644 --- a/src/app/_components/InstalledScriptsTab.tsx +++ b/src/app/_components/InstalledScriptsTab.tsx @@ -10,6 +10,9 @@ import { ConfirmationModal } from './ConfirmationModal'; import { ErrorModal } from './ErrorModal'; import { LoadingModal } from './LoadingModal'; import { LXCSettingsModal } from './LXCSettingsModal'; +import { StorageSelectionModal } from './StorageSelectionModal'; +import { BackupWarningModal } from './BackupWarningModal'; +import type { Storage } from '~/server/services/storageService'; import { getContrastColor } from '../../lib/colorUtils'; import { DropdownMenu, @@ -50,8 +53,15 @@ export function InstalledScriptsTab() { const [serverFilter, setServerFilter] = useState('all'); const [sortField, setSortField] = useState<'script_name' | 'container_id' | 'server_name' | 'status' | 'installation_date'>('server_name'); const [sortDirection, setSortDirection] = useState<'asc' | 'desc'>('asc'); - const [updatingScript, setUpdatingScript] = useState<{ id: number; containerId: string; server?: any } | null>(null); + const [updatingScript, setUpdatingScript] = useState<{ id: number; containerId: string; server?: any; backupStorage?: string; isBackupOnly?: boolean } | null>(null); const [openingShell, setOpeningShell] = useState<{ id: number; containerId: string; server?: any } | null>(null); + const [showBackupPrompt, setShowBackupPrompt] = useState(false); + const [showStorageSelection, setShowStorageSelection] = useState(false); + const [pendingUpdateScript, setPendingUpdateScript] = useState(null); + const [backupStorages, setBackupStorages] = useState([]); + const [isLoadingStorages, setIsLoadingStorages] = useState(false); + const [showBackupWarning, setShowBackupWarning] = useState(false); + const [isPreUpdateBackup, setIsPreUpdateBackup] = useState(false); // Track if storage selection is for pre-update backup const [editingScriptId, setEditingScriptId] = useState(null); const [editFormData, setEditFormData] = useState<{ script_name: string; container_id: string; web_ui_ip: string; web_ui_port: string }>({ script_name: '', container_id: '', web_ui_ip: '', web_ui_port: '' }); const [showAddForm, setShowAddForm] = useState(false); @@ -244,22 +254,54 @@ export function InstalledScriptsTab() { void refetchScripts(); setAutoDetectStatus({ type: 'success', - message: data.message ?? 'Web UI IP detected successfully!' + message: data.success ? `Detected IP: ${data.ip}` : (data.error ?? 'Failed to detect Web UI') }); - // Clear status after 5 seconds setTimeout(() => setAutoDetectStatus({ type: null, message: '' }), 5000); }, onError: (error) => { - console.error('❌ Auto-detect Web UI error:', error); + console.error('❌ Auto-detect WebUI error:', error); setAutoDetectStatus({ type: 'error', - message: error.message ?? 'Auto-detect failed. Please try again.' + message: error.message ?? 'Failed to detect Web UI' }); - // Clear status after 5 seconds - setTimeout(() => setAutoDetectStatus({ type: null, message: '' }), 5000); + setTimeout(() => setAutoDetectStatus({ type: null, message: '' }), 8000); } }); + // Get backup storages query + const getBackupStoragesQuery = api.installedScripts.getBackupStorages.useQuery( + { serverId: pendingUpdateScript?.server_id ?? 0, forceRefresh: false }, + { enabled: false } // Only fetch when explicitly called + ); + + const fetchStorages = async (serverId: number, forceRefresh = false) => { + setIsLoadingStorages(true); + try { + const result = await getBackupStoragesQuery.refetch({ + queryKey: ['installedScripts.getBackupStorages', { serverId, forceRefresh }] + }); + if (result.data?.success) { + setBackupStorages(result.data.storages); + } else { + setErrorModal({ + isOpen: true, + title: 'Failed to Fetch Storages', + message: result.data?.error ?? 'Unknown error occurred', + type: 'error' + }); + } + } catch (error) { + setErrorModal({ + isOpen: true, + title: 'Failed to Fetch Storages', + message: error instanceof Error ? error.message : 'Unknown error occurred', + type: 'error' + }); + } finally { + setIsLoadingStorages(false); + } + }; + // Container control mutations // Note: getStatusMutation removed - using direct API calls instead @@ -600,38 +642,154 @@ export function InstalledScriptsTab() { message: `Are you sure you want to update "${script.script_name}"?\n\n⚠️ WARNING: This will update the script and may affect the container. Consider backing up your data beforehand.`, variant: 'danger', confirmText: script.container_id, - confirmButtonText: 'Update Script', + confirmButtonText: 'Continue', onConfirm: () => { - // Get server info if it's SSH mode - let server = null; - if (script.server_id && script.server_user) { - server = { - id: script.server_id, - name: script.server_name, - ip: script.server_ip, - user: script.server_user, - password: script.server_password, - auth_type: script.server_auth_type ?? 'password', - ssh_key: script.server_ssh_key, - ssh_key_passphrase: script.server_ssh_key_passphrase, - ssh_port: script.server_ssh_port ?? 22 - }; - } - - setUpdatingScript({ - id: script.id, - containerId: script.container_id!, - server: server - }); setConfirmationModal(null); + // Store the script for backup flow + setPendingUpdateScript(script); + // Show backup prompt + setShowBackupPrompt(true); } }); }; + const handleBackupPromptResponse = (wantsBackup: boolean) => { + setShowBackupPrompt(false); + + if (!pendingUpdateScript) return; + + if (wantsBackup) { + // User wants backup - fetch storages and show selection + if (pendingUpdateScript.server_id) { + setIsPreUpdateBackup(true); // Mark that this is for pre-update backup + void fetchStorages(pendingUpdateScript.server_id, false); + setShowStorageSelection(true); + } else { + setErrorModal({ + isOpen: true, + title: 'Backup Not Available', + message: 'Backup is only available for SSH scripts with a configured server.', + type: 'error' + }); + // Proceed without backup + proceedWithUpdate(null); + } + } else { + // User doesn't want backup - proceed directly to update + proceedWithUpdate(null); + } + }; + + const handleStorageSelected = (storage: Storage) => { + setShowStorageSelection(false); + + // Check if this is for a standalone backup or pre-update backup + if (isPreUpdateBackup) { + // Pre-update backup - proceed with update + setIsPreUpdateBackup(false); // Reset flag + proceedWithUpdate(storage.name); + } else if (pendingUpdateScript) { + // Standalone backup - execute backup directly + executeStandaloneBackup(pendingUpdateScript, storage.name); + } + }; + + const executeStandaloneBackup = (script: InstalledScript, storageName: string) => { + // Get server info + let server = null; + if (script.server_id && script.server_user) { + server = { + id: script.server_id, + name: script.server_name, + ip: script.server_ip, + user: script.server_user, + password: script.server_password, + auth_type: script.server_auth_type ?? 'password', + ssh_key: script.server_ssh_key, + ssh_key_passphrase: script.server_ssh_key_passphrase, + ssh_port: script.server_ssh_port ?? 22 + }; + } + + // Start backup terminal + setUpdatingScript({ + id: script.id, + containerId: script.container_id!, + server: server, + backupStorage: storageName, + isBackupOnly: true + }); + + // Reset state + setIsPreUpdateBackup(false); // Reset flag + setPendingUpdateScript(null); + setBackupStorages([]); + }; + + const proceedWithUpdate = (backupStorage: string | null) => { + if (!pendingUpdateScript) return; + + // Get server info if it's SSH mode + let server = null; + if (pendingUpdateScript.server_id && pendingUpdateScript.server_user) { + server = { + id: pendingUpdateScript.server_id, + name: pendingUpdateScript.server_name, + ip: pendingUpdateScript.server_ip, + user: pendingUpdateScript.server_user, + password: pendingUpdateScript.server_password, + auth_type: pendingUpdateScript.server_auth_type ?? 'password', + ssh_key: pendingUpdateScript.server_ssh_key, + ssh_key_passphrase: pendingUpdateScript.server_ssh_key_passphrase, + ssh_port: pendingUpdateScript.server_ssh_port ?? 22 + }; + } + + setUpdatingScript({ + id: pendingUpdateScript.id, + containerId: pendingUpdateScript.container_id!, + server: server, + backupStorage: backupStorage ?? undefined, + isBackupOnly: false // Explicitly set to false for update operations + }); + + // Reset state + setPendingUpdateScript(null); + setBackupStorages([]); + }; + const handleCloseUpdateTerminal = () => { setUpdatingScript(null); }; + const handleBackupScript = (script: InstalledScript) => { + if (!script.container_id) { + setErrorModal({ + isOpen: true, + title: 'Backup Failed', + message: 'No Container ID available for this script', + details: 'This script does not have a valid container ID and cannot be backed up.' + }); + return; + } + + if (!script.server_id) { + setErrorModal({ + isOpen: true, + title: 'Backup Not Available', + message: 'Backup is only available for SSH scripts with a configured server.', + type: 'error' + }); + return; + } + + // Store the script and fetch storages + setIsPreUpdateBackup(false); // This is a standalone backup, not pre-update + setPendingUpdateScript(script); + void fetchStorages(script.server_id, false); + setShowStorageSelection(true); + }; + const handleOpenShell = (script: InstalledScript) => { if (!script.container_id) { setErrorModal({ @@ -887,12 +1045,15 @@ export function InstalledScriptsTab() { {updatingScript && (
)} @@ -1252,6 +1413,7 @@ export function InstalledScriptsTab() { onSave={handleSaveEdit} onCancel={handleCancelEdit} onUpdate={() => handleUpdateScript(script)} + onBackup={() => handleBackupScript(script)} onShell={() => handleOpenShell(script)} onDelete={() => handleDeleteScript(Number(script.id))} isUpdating={updateScriptMutation.isPending} @@ -1530,6 +1692,15 @@ export function InstalledScriptsTab() { Update )} + {script.container_id && script.execution_mode === 'ssh' && ( + handleBackupScript(script)} + disabled={containerStatuses.get(script.id) === 'stopped'} + className="text-muted-foreground hover:text-foreground hover:bg-muted/20 focus:bg-muted/20" + > + Backup + + )} {script.container_id && script.execution_mode === 'ssh' && ( handleOpenShell(script)} @@ -1656,6 +1827,79 @@ export function InstalledScriptsTab() { /> )} + {/* Backup Prompt Modal */} + {showBackupPrompt && ( +
+
+
+
+ + + +

Backup Before Update?

+
+
+
+

+ Would you like to create a backup before updating the container? +

+
+ + +
+
+
+
+ )} + + {/* Storage Selection Modal */} + { + setShowStorageSelection(false); + setPendingUpdateScript(null); + setBackupStorages([]); + }} + onSelect={handleStorageSelected} + storages={backupStorages} + isLoading={isLoadingStorages} + onRefresh={() => { + if (pendingUpdateScript?.server_id) { + void fetchStorages(pendingUpdateScript.server_id, true); + } + }} + /> + + {/* Backup Warning Modal */} + setShowBackupWarning(false)} + onProceed={() => { + setShowBackupWarning(false); + // Proceed with update even though backup failed + if (pendingUpdateScript) { + proceedWithUpdate(null); + } + }} + /> + {/* LXC Settings Modal */} void; } -export function LoadingModal({ isOpen, action }: LoadingModalProps) { - useRegisterModal(isOpen, { id: 'loading-modal', allowEscape: false, onClose: () => null }); +export function LoadingModal({ isOpen, action, logs = [], isComplete = false, title, onClose }: LoadingModalProps) { + // Allow dismissing with ESC only when complete, prevent during running + useRegisterModal(isOpen, { id: 'loading-modal', allowEscape: isComplete, onClose: onClose || (() => null) }); + const logsEndRef = useRef(null); + + // Auto-scroll to bottom when new logs arrive + useEffect(() => { + logsEndRef.current?.scrollIntoView({ behavior: 'smooth' }); + }, [logs]); + if (!isOpen) return null; return (
-
+
+ {/* Close button - only show when complete */} + {isComplete && onClose && ( + + )} +
- -
+ {isComplete ? ( + + ) : ( + <> + +
+ + )}
-
-

- Processing -

+ + {/* Static title text */} + {title && (

- {action} + {title}

-

- Please wait... -

-
+ )} + + {/* Log output */} + {logs.length > 0 && ( +
+ {logs.map((log, index) => ( +
+ {log} +
+ ))} +
+
+ )} + + {!isComplete && ( +
+
+
+
+
+ )}
diff --git a/src/app/_components/PBSCredentialsModal.tsx b/src/app/_components/PBSCredentialsModal.tsx new file mode 100644 index 0000000..e64e3e9 --- /dev/null +++ b/src/app/_components/PBSCredentialsModal.tsx @@ -0,0 +1,296 @@ +'use client'; + +import { useState, useEffect } from 'react'; +import { Button } from './ui/button'; +import { Lock, CheckCircle, AlertCircle } from 'lucide-react'; +import { useRegisterModal } from './modal/ModalStackProvider'; +import { api } from '~/trpc/react'; +import type { Storage } from '~/server/services/storageService'; + +interface PBSCredentialsModalProps { + isOpen: boolean; + onClose: () => void; + serverId: number; + serverName: string; + storage: Storage; +} + +export function PBSCredentialsModal({ + isOpen, + onClose, + serverId, + serverName, + storage +}: PBSCredentialsModalProps) { + const [pbsIp, setPbsIp] = useState(''); + const [pbsDatastore, setPbsDatastore] = useState(''); + const [pbsPassword, setPbsPassword] = useState(''); + const [pbsFingerprint, setPbsFingerprint] = useState(''); + const [isLoading, setIsLoading] = useState(false); + + // Extract PBS info from storage object + const pbsIpFromStorage = (storage as any).server || null; + const pbsDatastoreFromStorage = (storage as any).datastore || null; + + // Fetch existing credentials + const { data: credentialData, refetch } = api.pbsCredentials.getCredentialsForStorage.useQuery( + { serverId, storageName: storage.name }, + { enabled: isOpen } + ); + + // Initialize form with storage config values or existing credentials + useEffect(() => { + if (isOpen) { + if (credentialData?.success && credentialData.credential) { + // Load existing credentials + setPbsIp(credentialData.credential.pbs_ip); + setPbsDatastore(credentialData.credential.pbs_datastore); + setPbsPassword(''); // Don't show password + setPbsFingerprint(credentialData.credential.pbs_fingerprint || ''); + } else { + // Initialize with storage config values + setPbsIp(pbsIpFromStorage || ''); + setPbsDatastore(pbsDatastoreFromStorage || ''); + setPbsPassword(''); + setPbsFingerprint(''); + } + } + }, [isOpen, credentialData, pbsIpFromStorage, pbsDatastoreFromStorage]); + + const saveCredentials = api.pbsCredentials.saveCredentials.useMutation({ + onSuccess: () => { + void refetch(); + onClose(); + }, + onError: (error) => { + console.error('Failed to save PBS credentials:', error); + alert(`Failed to save credentials: ${error.message}`); + }, + }); + + const deleteCredentials = api.pbsCredentials.deleteCredentials.useMutation({ + onSuccess: () => { + void refetch(); + onClose(); + }, + onError: (error) => { + console.error('Failed to delete PBS credentials:', error); + alert(`Failed to delete credentials: ${error.message}`); + }, + }); + + useRegisterModal(isOpen, { id: 'pbs-credentials-modal', allowEscape: true, onClose }); + + const handleSubmit = async (e: React.FormEvent) => { + e.preventDefault(); + + if (!pbsIp || !pbsDatastore || !pbsFingerprint) { + alert('Please fill in all required fields (IP, Datastore, Fingerprint)'); + return; + } + + // Password is optional when updating existing credentials + setIsLoading(true); + try { + await saveCredentials.mutateAsync({ + serverId, + storageName: storage.name, + pbs_ip: pbsIp, + pbs_datastore: pbsDatastore, + pbs_password: pbsPassword || undefined, // Undefined means keep existing password + pbs_fingerprint: pbsFingerprint, + }); + } finally { + setIsLoading(false); + } + }; + + const handleDelete = async () => { + if (!confirm('Are you sure you want to delete the PBS credentials for this storage?')) { + return; + } + + setIsLoading(true); + try { + await deleteCredentials.mutateAsync({ + serverId, + storageName: storage.name, + }); + } finally { + setIsLoading(false); + } + }; + + if (!isOpen) return null; + + const hasCredentials = credentialData?.success && credentialData.credential; + + return ( +
+
+ {/* Header */} +
+
+ +

+ PBS Credentials - {storage.name} +

+
+ +
+ + {/* Content */} +
+
+ {/* Storage Name (read-only) */} +
+ + +
+ + {/* PBS IP */} +
+ + setPbsIp(e.target.value)} + required + disabled={isLoading} + className="w-full px-3 py-2 border rounded-md shadow-sm bg-card text-foreground placeholder-muted-foreground focus:outline-none focus:ring-2 focus:ring-ring focus:border-ring border-border" + placeholder="e.g., 10.10.10.226" + /> +

+ IP address of the Proxmox Backup Server +

+
+ + {/* PBS Datastore */} +
+ + setPbsDatastore(e.target.value)} + required + disabled={isLoading} + className="w-full px-3 py-2 border rounded-md shadow-sm bg-card text-foreground placeholder-muted-foreground focus:outline-none focus:ring-2 focus:ring-ring focus:border-ring border-border" + placeholder="e.g., NAS03-ISCSI-BACKUP" + /> +

+ Name of the datastore on the PBS server +

+
+ + {/* PBS Password */} +
+ + setPbsPassword(e.target.value)} + required={!hasCredentials} + disabled={isLoading} + className="w-full px-3 py-2 border rounded-md shadow-sm bg-card text-foreground placeholder-muted-foreground focus:outline-none focus:ring-2 focus:ring-ring focus:border-ring border-border" + placeholder={hasCredentials ? "Enter new password (leave empty to keep existing)" : "Enter PBS password"} + /> +

+ Password for root@pam user on PBS server +

+
+ + {/* PBS Fingerprint */} +
+ + setPbsFingerprint(e.target.value)} + required + disabled={isLoading} + className="w-full px-3 py-2 border rounded-md shadow-sm bg-card text-foreground placeholder-muted-foreground focus:outline-none focus:ring-2 focus:ring-ring focus:border-ring border-border" + placeholder="e.g., 7b:e5:87:38:5e:16:05:d1:12:22:7f:73:d2:e2:d0:cf:8c:cb:28:e2:74:0c:78:91:1a:71:74:2e:79:20:5a:02" + /> +

+ Server fingerprint for auto-acceptance. You can find this on your PBS dashboard by clicking the "Show Fingerprint" button. +

+
+ + {/* Status indicator */} + {hasCredentials && ( +
+ + + Credentials are configured for this storage + +
+ )} + + {/* Action Buttons */} +
+ {hasCredentials && ( + + )} + + +
+
+
+
+
+ ); +} + diff --git a/src/app/_components/ScriptInstallationCard.tsx b/src/app/_components/ScriptInstallationCard.tsx index 5490fee..6221839 100644 --- a/src/app/_components/ScriptInstallationCard.tsx +++ b/src/app/_components/ScriptInstallationCard.tsx @@ -44,6 +44,7 @@ interface ScriptInstallationCardProps { onSave: () => void; onCancel: () => void; onUpdate: () => void; + onBackup?: () => void; onShell: () => void; onDelete: () => void; isUpdating: boolean; @@ -68,6 +69,7 @@ export function ScriptInstallationCard({ onSave, onCancel, onUpdate, + onBackup, onShell, onDelete, isUpdating, @@ -307,6 +309,15 @@ export function ScriptInstallationCard({ Update )} + {script.container_id && script.execution_mode === 'ssh' && onBackup && ( + + Backup + + )} {script.container_id && script.execution_mode === 'ssh' && ( (null); + const [showStoragesModal, setShowStoragesModal] = useState(false); + const [selectedServerForStorages, setSelectedServerForStorages] = useState<{ id: number; name: string } | null>(null); const handleEdit = (server: Server) => { setEditingId(server.id); @@ -251,6 +254,19 @@ export function ServerList({ servers, onUpdate, onDelete }: ServerListProps) { )} +
{/* View Public Key button - only show for generated keys */} {server.key_generated === true && ( @@ -324,6 +340,19 @@ export function ServerList({ servers, onUpdate, onDelete }: ServerListProps) { serverIp={publicKeyData.serverIp} /> )} + + {/* Server Storages Modal */} + {selectedServerForStorages && ( + { + setShowStoragesModal(false); + setSelectedServerForStorages(null); + }} + serverId={selectedServerForStorages.id} + serverName={selectedServerForStorages.name} + /> + )}
); } diff --git a/src/app/_components/ServerStoragesModal.tsx b/src/app/_components/ServerStoragesModal.tsx new file mode 100644 index 0000000..60d51a2 --- /dev/null +++ b/src/app/_components/ServerStoragesModal.tsx @@ -0,0 +1,227 @@ +'use client'; + +import { useState, useEffect } from 'react'; +import { Button } from './ui/button'; +import { Database, RefreshCw, CheckCircle, Lock, AlertCircle } from 'lucide-react'; +import { useRegisterModal } from './modal/ModalStackProvider'; +import { api } from '~/trpc/react'; +import { PBSCredentialsModal } from './PBSCredentialsModal'; +import type { Storage } from '~/server/services/storageService'; + +interface ServerStoragesModalProps { + isOpen: boolean; + onClose: () => void; + serverId: number; + serverName: string; +} + +export function ServerStoragesModal({ + isOpen, + onClose, + serverId, + serverName +}: ServerStoragesModalProps) { + const [forceRefresh, setForceRefresh] = useState(false); + const [selectedPBSStorage, setSelectedPBSStorage] = useState(null); + + const { data, isLoading, refetch } = api.installedScripts.getBackupStorages.useQuery( + { serverId, forceRefresh }, + { enabled: isOpen } + ); + + // Fetch all PBS credentials for this server to show status indicators + const { data: allCredentials } = api.pbsCredentials.getAllCredentialsForServer.useQuery( + { serverId }, + { enabled: isOpen } + ); + + const credentialsMap = new Map(); + if (allCredentials?.success) { + allCredentials.credentials.forEach(c => { + credentialsMap.set(c.storage_name, true); + }); + } + + useRegisterModal(isOpen, { id: 'server-storages-modal', allowEscape: true, onClose }); + + const handleRefresh = () => { + setForceRefresh(true); + void refetch(); + setTimeout(() => setForceRefresh(false), 1000); + }; + + if (!isOpen) return null; + + const storages = data?.success ? data.storages : []; + const backupStorages = storages.filter(s => s.supportsBackup); + + return ( +
+
+ {/* Header */} +
+
+ +

+ Storages for {serverName} +

+
+
+ + +
+
+ + {/* Content */} +
+ {isLoading ? ( +
+
+

Loading storages...

+
+ ) : !data?.success ? ( +
+ +

Failed to load storages

+

+ {data?.error ?? 'Unknown error occurred'} +

+ +
+ ) : storages.length === 0 ? ( +
+ +

No storages found

+

+ Make sure your server has storages configured. +

+
+ ) : ( + <> + {data.cached && ( +
+ Showing cached data. Click Refresh to fetch latest from server. +
+ )} + +
+ {storages.map((storage) => { + const isBackupCapable = storage.supportsBackup; + + return ( +
+
+
+

{storage.name}

+ {isBackupCapable && ( + + + Backup + + )} + + {storage.type} + + {storage.type === 'pbs' && ( + credentialsMap.has(storage.name) ? ( + + + Credentials Configured + + ) : ( + + + Credentials Needed + + ) + )} +
+
+
+ Content: {storage.content.join(', ')} +
+ {storage.nodes && storage.nodes.length > 0 && ( +
+ Nodes: {storage.nodes.join(', ')} +
+ )} + {Object.entries(storage) + .filter(([key]) => !['name', 'type', 'content', 'supportsBackup', 'nodes'].includes(key)) + .map(([key, value]) => ( +
+ {key.replace(/_/g, ' ')}: {String(value)} +
+ ))} +
+ {storage.type === 'pbs' && ( +
+ +
+ )} +
+
+ ); + })} +
+ + {backupStorages.length > 0 && ( +
+

+ {backupStorages.length} storage{backupStorages.length !== 1 ? 's' : ''} available for backups +

+
+ )} + + )} +
+
+ + {/* PBS Credentials Modal */} + {selectedPBSStorage && ( + setSelectedPBSStorage(null)} + serverId={serverId} + serverName={serverName} + storage={selectedPBSStorage} + /> + )} +
+ ); +} + diff --git a/src/app/_components/StorageSelectionModal.tsx b/src/app/_components/StorageSelectionModal.tsx new file mode 100644 index 0000000..1c24671 --- /dev/null +++ b/src/app/_components/StorageSelectionModal.tsx @@ -0,0 +1,168 @@ +'use client'; + +import { useState } from 'react'; +import { Button } from './ui/button'; +import { Database, RefreshCw, CheckCircle } from 'lucide-react'; +import { useRegisterModal } from './modal/ModalStackProvider'; +import type { Storage } from '~/server/services/storageService'; + +interface StorageSelectionModalProps { + isOpen: boolean; + onClose: () => void; + onSelect: (storage: Storage) => void; + storages: Storage[]; + isLoading: boolean; + onRefresh: () => void; +} + +export function StorageSelectionModal({ + isOpen, + onClose, + onSelect, + storages, + isLoading, + onRefresh +}: StorageSelectionModalProps) { + const [selectedStorage, setSelectedStorage] = useState(null); + + useRegisterModal(isOpen, { id: 'storage-selection-modal', allowEscape: true, onClose }); + + if (!isOpen) return null; + + const handleSelect = () => { + if (selectedStorage) { + onSelect(selectedStorage); + setSelectedStorage(null); + } + }; + + const handleClose = () => { + setSelectedStorage(null); + onClose(); + }; + + // Filter to show only backup-capable storages + const backupStorages = storages.filter(s => s.supportsBackup); + + return ( +
+
+ {/* Header */} +
+
+ +

Select Backup Storage

+
+ +
+ + {/* Content */} +
+ {isLoading ? ( +
+
+

Loading storages...

+
+ ) : backupStorages.length === 0 ? ( +
+ +

No backup-capable storages found

+

+ Make sure your server has storages configured with backup content type. +

+ +
+ ) : ( + <> +

+ Select a storage to use for the backup. Only storages that support backups are shown. +

+ + {/* Storage List */} +
+ {backupStorages.map((storage) => ( +
setSelectedStorage(storage)} + className={`p-4 border rounded-lg cursor-pointer transition-all ${ + selectedStorage?.name === storage.name + ? 'border-primary bg-primary/10' + : 'border-border hover:border-primary/50 hover:bg-accent/50' + }`} + > +
+
+
+

{storage.name}

+ + Backup + + + {storage.type} + +
+
+ Content: {storage.content.join(', ')} + {storage.nodes && storage.nodes.length > 0 && ( + • Nodes: {storage.nodes.join(', ')} + )} +
+
+ {selectedStorage?.name === storage.name && ( + + )} +
+
+ ))} +
+ + {/* Refresh Button */} +
+ +
+ + )} + + {/* Action Buttons */} +
+ + +
+
+
+
+ ); +} + + + diff --git a/src/app/_components/Terminal.tsx b/src/app/_components/Terminal.tsx index b18e108..84c0ec1 100644 --- a/src/app/_components/Terminal.tsx +++ b/src/app/_components/Terminal.tsx @@ -12,7 +12,10 @@ interface TerminalProps { server?: any; isUpdate?: boolean; isShell?: boolean; + isBackup?: boolean; containerId?: string; + storage?: string; + backupStorage?: string; } interface TerminalMessage { @@ -21,7 +24,7 @@ interface TerminalMessage { timestamp: number; } -export function Terminal({ scriptPath, onClose, mode = 'local', server, isUpdate = false, isShell = false, containerId }: TerminalProps) { +export function Terminal({ scriptPath, onClose, mode = 'local', server, isUpdate = false, isShell = false, isBackup = false, containerId, storage, backupStorage }: TerminalProps) { const [isConnected, setIsConnected] = useState(false); const [isRunning, setIsRunning] = useState(false); const [isClient, setIsClient] = useState(false); @@ -334,7 +337,10 @@ export function Terminal({ scriptPath, onClose, mode = 'local', server, isUpdate server, isUpdate, isShell, - containerId + isBackup, + containerId, + storage, + backupStorage }; ws.send(JSON.stringify(message)); } diff --git a/src/app/page.tsx b/src/app/page.tsx index 8e13279..98182a1 100644 --- a/src/app/page.tsx +++ b/src/app/page.tsx @@ -5,6 +5,7 @@ import { useState, useRef, useEffect } from 'react'; import { ScriptsGrid } from './_components/ScriptsGrid'; import { DownloadedScriptsTab } from './_components/DownloadedScriptsTab'; import { InstalledScriptsTab } from './_components/InstalledScriptsTab'; +import { BackupsTab } from './_components/BackupsTab'; import { ResyncButton } from './_components/ResyncButton'; import { Terminal } from './_components/Terminal'; import { ServerSettingsButton } from './_components/ServerSettingsButton'; @@ -16,16 +17,16 @@ import { Button } from './_components/ui/button'; import { ContextualHelpIcon } from './_components/ContextualHelpIcon'; import { ReleaseNotesModal, getLastSeenVersion } from './_components/ReleaseNotesModal'; import { Footer } from './_components/Footer'; -import { Package, HardDrive, FolderOpen, LogOut } from 'lucide-react'; +import { Package, HardDrive, FolderOpen, LogOut, Archive } from 'lucide-react'; import { api } from '~/trpc/react'; import { useAuth } from './_components/AuthProvider'; export default function Home() { const { isAuthenticated, logout } = useAuth(); const [runningScript, setRunningScript] = useState<{ path: string; name: string; mode?: 'local' | 'ssh'; server?: any } | null>(null); - const [activeTab, setActiveTab] = useState<'scripts' | 'downloaded' | 'installed'>(() => { + const [activeTab, setActiveTab] = useState<'scripts' | 'downloaded' | 'installed' | 'backups'>(() => { if (typeof window !== 'undefined') { - const savedTab = localStorage.getItem('activeTab') as 'scripts' | 'downloaded' | 'installed'; + const savedTab = localStorage.getItem('activeTab') as 'scripts' | 'downloaded' | 'installed' | 'backups'; return savedTab || 'scripts'; } return 'scripts'; @@ -38,6 +39,7 @@ export default function Home() { const { data: scriptCardsData } = api.scripts.getScriptCardsWithCategories.useQuery(); const { data: localScriptsData } = api.scripts.getAllDownloadedScripts.useQuery(); const { data: installedScriptsData } = api.installedScripts.getAllInstalledScripts.useQuery(); + const { data: backupsData } = api.backups.getAllBackupsGrouped.useQuery(); const { data: versionData } = api.version.getCurrentVersion.useQuery(); // Save active tab to localStorage whenever it changes @@ -118,7 +120,8 @@ export default function Home() { }); }).length; })(), - installed: installedScriptsData?.scripts?.length ?? 0 + installed: installedScriptsData?.scripts?.length ?? 0, + backups: backupsData?.success ? backupsData.backups.length : 0 }; const scrollToTerminal = () => { @@ -243,6 +246,22 @@ export default function Home() { +
@@ -273,6 +292,10 @@ export default function Home() { {activeTab === 'installed' && ( )} + + {activeTab === 'backups' && ( + + )} {/* Footer */} diff --git a/src/server/api/root.ts b/src/server/api/root.ts index 6194547..c56b7a5 100644 --- a/src/server/api/root.ts +++ b/src/server/api/root.ts @@ -2,6 +2,8 @@ import { scriptsRouter } from "~/server/api/routers/scripts"; import { installedScriptsRouter } from "~/server/api/routers/installedScripts"; import { serversRouter } from "~/server/api/routers/servers"; import { versionRouter } from "~/server/api/routers/version"; +import { backupsRouter } from "~/server/api/routers/backups"; +import { pbsCredentialsRouter } from "~/server/api/routers/pbsCredentials"; import { repositoriesRouter } from "~/server/api/routers/repositories"; import { createCallerFactory, createTRPCRouter } from "~/server/api/trpc"; @@ -15,6 +17,8 @@ export const appRouter = createTRPCRouter({ installedScripts: installedScriptsRouter, servers: serversRouter, version: versionRouter, + backups: backupsRouter, + pbsCredentials: pbsCredentialsRouter, repositories: repositoriesRouter, }); diff --git a/src/server/api/routers/backups.ts b/src/server/api/routers/backups.ts new file mode 100644 index 0000000..4b2ab08 --- /dev/null +++ b/src/server/api/routers/backups.ts @@ -0,0 +1,170 @@ +import { z } from 'zod'; +import { createTRPCRouter, publicProcedure } from '~/server/api/trpc'; +import { getDatabase } from '~/server/database-prisma'; +import { getBackupService } from '~/server/services/backupService'; +import { getRestoreService } from '~/server/services/restoreService'; +import { readFile } from 'fs/promises'; +import { join } from 'path'; +import { existsSync } from 'fs'; +import stripAnsi from 'strip-ansi'; + +export const backupsRouter = createTRPCRouter({ + // Get all backups grouped by container ID + getAllBackupsGrouped: publicProcedure + .query(async () => { + try { + const db = getDatabase(); + const groupedBackups = await db.getBackupsGroupedByContainer(); + + // Convert Map to array format for frontend + const result: Array<{ + container_id: string; + hostname: string; + backups: Array<{ + id: number; + backup_name: string; + backup_path: string; + size: bigint | null; + created_at: Date | null; + storage_name: string; + storage_type: string; + discovered_at: Date; + server_name: string | null; + server_color: string | null; + }>; + }> = []; + + for (const [containerId, backups] of groupedBackups.entries()) { + if (backups.length === 0) continue; + + // Get hostname from first backup (all backups for same container should have same hostname) + const hostname = backups[0]?.hostname || ''; + + result.push({ + container_id: containerId, + hostname, + backups: backups.map(backup => ({ + id: backup.id, + backup_name: backup.backup_name, + backup_path: backup.backup_path, + size: backup.size, + created_at: backup.created_at, + storage_name: backup.storage_name, + storage_type: backup.storage_type, + discovered_at: backup.discovered_at, + server_id: backup.server_id, + server_name: backup.server?.name ?? null, + server_color: backup.server?.color ?? null, + })), + }); + } + + return { + success: true, + backups: result, + }; + } catch (error) { + console.error('Error in getAllBackupsGrouped:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Failed to fetch backups', + backups: [], + }; + } + }), + + // Discover backups for all containers + discoverBackups: publicProcedure + .mutation(async () => { + try { + const backupService = getBackupService(); + await backupService.discoverAllBackups(); + + return { + success: true, + message: 'Backup discovery completed successfully', + }; + } catch (error) { + console.error('Error in discoverBackups:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Failed to discover backups', + }; + } + }), + + // Get restore progress from log file + getRestoreProgress: publicProcedure + .query(async () => { + try { + const logPath = join(process.cwd(), 'restore.log'); + + if (!existsSync(logPath)) { + return { + success: true, + logs: [], + isComplete: false + }; + } + + const logs = await readFile(logPath, 'utf-8'); + const logLines = logs.split('\n') + .filter(line => line.trim()) + .map(line => stripAnsi(line)); // Strip ANSI color codes + + // Check if restore is complete by looking for completion indicators + const isComplete = logLines.some(line => + line.includes('complete: Restore completed successfully') || + line.includes('error: Error:') || + line.includes('Restore completed successfully') || + line.includes('Restore failed') + ); + + return { + success: true, + logs: logLines, + isComplete + }; + } catch (error) { + console.error('Error reading restore logs:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Failed to read restore logs', + logs: [], + isComplete: false + }; + } + }), + + // Restore backup + restoreBackup: publicProcedure + .input(z.object({ + backupId: z.number(), + containerId: z.string(), + serverId: z.number(), + })) + .mutation(async ({ input }) => { + try { + const restoreService = getRestoreService(); + const result = await restoreService.executeRestore( + input.backupId, + input.containerId, + input.serverId + ); + + return { + success: result.success, + error: result.error, + progress: result.progress, + }; + } catch (error) { + console.error('Error in restoreBackup:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Failed to restore backup', + progress: [], + }; + } + }), +}); + diff --git a/src/server/api/routers/installedScripts.ts b/src/server/api/routers/installedScripts.ts index 5bce857..7a2e24e 100644 --- a/src/server/api/routers/installedScripts.ts +++ b/src/server/api/routers/installedScripts.ts @@ -3,6 +3,7 @@ import { createTRPCRouter, publicProcedure } from "~/server/api/trpc"; import { getDatabase } from "~/server/database-prisma"; import { createHash } from "crypto"; import type { Server } from "~/types/server"; +import { getStorageService } from "~/server/services/storageService"; // Helper function to parse raw LXC config into structured data function parseRawConfig(rawConfig: string): any { @@ -2038,5 +2039,163 @@ EOFCONFIG`; .getLXCConfig({ scriptId: input.scriptId, forceSync: true }); return result; + }), + + // Get backup-capable storages for a server + getBackupStorages: publicProcedure + .input(z.object({ + serverId: z.number(), + forceRefresh: z.boolean().optional().default(false) + })) + .query(async ({ input }) => { + try { + const db = getDatabase(); + const server = await db.getServerById(input.serverId); + + if (!server) { + return { + success: false, + error: 'Server not found', + storages: [], + cached: false + }; + } + + const storageService = getStorageService(); + const { default: SSHService } = await import('~/server/ssh-service'); + const { getSSHExecutionService } = await import('~/server/ssh-execution-service'); + const sshService = new SSHService(); + const sshExecutionService = getSSHExecutionService(); + + // Test SSH connection first + const connectionTest = await sshService.testSSHConnection(server as Server); + if (!(connectionTest as any).success) { + return { + success: false, + error: `SSH connection failed: ${(connectionTest as any).error ?? 'Unknown error'}`, + storages: [], + cached: false + }; + } + + // Get server hostname to filter storages + let serverHostname = ''; + try { + await new Promise((resolve, reject) => { + sshExecutionService.executeCommand( + server as Server, + 'hostname', + (data: string) => { + serverHostname += data; + }, + (error: string) => { + reject(new Error(`Failed to get hostname: ${error}`)); + }, + (exitCode: number) => { + if (exitCode === 0) { + resolve(); + } else { + reject(new Error(`hostname command failed with exit code ${exitCode}`)); + } + } + ); + }); + } catch (error) { + console.error('Error getting server hostname:', error); + // Continue without filtering if hostname can't be retrieved + } + + const normalizedHostname = serverHostname.trim().toLowerCase(); + + // Check if we have cached data + const wasCached = !input.forceRefresh; + + // Fetch storages (will use cache if not forcing refresh) + const allStorages = await storageService.getStorages(server as Server, input.forceRefresh); + + // Filter storages by node hostname matching + const applicableStorages = allStorages.filter(storage => { + // If storage has no nodes specified, it's available on all nodes + if (!storage.nodes || storage.nodes.length === 0) { + return true; + } + + // If we couldn't get hostname, include all storages (fallback) + if (!normalizedHostname) { + return true; + } + + // Check if server hostname is in the nodes array (case-insensitive, trimmed) + const normalizedNodes = storage.nodes.map(node => node.trim().toLowerCase()); + return normalizedNodes.includes(normalizedHostname); + }); + + return { + success: true, + storages: applicableStorages, + cached: wasCached && applicableStorages.length > 0 + }; + } catch (error) { + console.error('Error in getBackupStorages:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Failed to fetch storages', + storages: [], + cached: false + }; + } + }), + + // Execute backup for a container + executeBackup: publicProcedure + .input(z.object({ + containerId: z.string(), + storage: z.string(), + serverId: z.number() + })) + .mutation(async ({ input }) => { + try { + const db = getDatabase(); + const server = await db.getServerById(input.serverId); + + if (!server) { + return { + success: false, + error: 'Server not found', + executionId: null + }; + } + + const { default: SSHService } = await import('~/server/ssh-service'); + const sshService = new SSHService(); + + // Test SSH connection first + const connectionTest = await sshService.testSSHConnection(server as Server); + if (!(connectionTest as any).success) { + return { + success: false, + error: `SSH connection failed: ${(connectionTest as any).error ?? 'Unknown error'}`, + executionId: null + }; + } + + // Generate execution ID for websocket tracking + const executionId = `backup_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`; + + return { + success: true, + executionId, + containerId: input.containerId, + storage: input.storage, + server: server as Server + }; + } catch (error) { + console.error('Error in executeBackup:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Failed to execute backup', + executionId: null + }; + } }) }); diff --git a/src/server/api/routers/pbsCredentials.ts b/src/server/api/routers/pbsCredentials.ts new file mode 100644 index 0000000..167b489 --- /dev/null +++ b/src/server/api/routers/pbsCredentials.ts @@ -0,0 +1,153 @@ +import { z } from 'zod'; +import { createTRPCRouter, publicProcedure } from '~/server/api/trpc'; +import { getDatabase } from '~/server/database-prisma'; + +export const pbsCredentialsRouter = createTRPCRouter({ + // Get credentials for a specific storage + getCredentialsForStorage: publicProcedure + .input(z.object({ + serverId: z.number(), + storageName: z.string(), + })) + .query(async ({ input }) => { + try { + const db = getDatabase(); + const credential = await db.getPBSCredential(input.serverId, input.storageName); + + if (!credential) { + return { + success: false, + error: 'PBS credentials not found', + credential: null, + }; + } + + return { + success: true, + credential: { + id: credential.id, + server_id: credential.server_id, + storage_name: credential.storage_name, + pbs_ip: credential.pbs_ip, + pbs_datastore: credential.pbs_datastore, + pbs_fingerprint: credential.pbs_fingerprint, + // Don't return password for security + }, + }; + } catch (error) { + console.error('Error in getCredentialsForStorage:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Failed to fetch PBS credentials', + credential: null, + }; + } + }), + + // Get all PBS credentials for a server + getAllCredentialsForServer: publicProcedure + .input(z.object({ + serverId: z.number(), + })) + .query(async ({ input }) => { + try { + const db = getDatabase(); + const credentials = await db.getPBSCredentialsByServer(input.serverId); + + return { + success: true, + credentials: credentials.map(c => ({ + id: c.id, + server_id: c.server_id, + storage_name: c.storage_name, + pbs_ip: c.pbs_ip, + pbs_datastore: c.pbs_datastore, + pbs_fingerprint: c.pbs_fingerprint, + // Don't return password for security + })), + }; + } catch (error) { + console.error('Error in getAllCredentialsForServer:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Failed to fetch PBS credentials', + credentials: [], + }; + } + }), + + // Save/update PBS credentials + saveCredentials: publicProcedure + .input(z.object({ + serverId: z.number(), + storageName: z.string(), + pbs_ip: z.string(), + pbs_datastore: z.string(), + pbs_password: z.string().optional(), // Optional to allow updating without changing password + pbs_fingerprint: z.string(), + })) + .mutation(async ({ input }) => { + try { + const db = getDatabase(); + + // If password is not provided, fetch existing credential to preserve password + let passwordToSave = input.pbs_password; + if (!passwordToSave) { + const existing = await db.getPBSCredential(input.serverId, input.storageName); + if (existing) { + passwordToSave = existing.pbs_password; + } else { + return { + success: false, + error: 'Password is required for new credentials', + }; + } + } + + await db.createOrUpdatePBSCredential({ + server_id: input.serverId, + storage_name: input.storageName, + pbs_ip: input.pbs_ip, + pbs_datastore: input.pbs_datastore, + pbs_password: passwordToSave, + pbs_fingerprint: input.pbs_fingerprint, + }); + + return { + success: true, + message: 'PBS credentials saved successfully', + }; + } catch (error) { + console.error('Error in saveCredentials:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Failed to save PBS credentials', + }; + } + }), + + // Delete PBS credentials + deleteCredentials: publicProcedure + .input(z.object({ + serverId: z.number(), + storageName: z.string(), + })) + .mutation(async ({ input }) => { + try { + const db = getDatabase(); + await db.deletePBSCredential(input.serverId, input.storageName); + + return { + success: true, + message: 'PBS credentials deleted successfully', + }; + } catch (error) { + console.error('Error in deleteCredentials:', error); + return { + success: false, + error: error instanceof Error ? error.message : 'Failed to delete PBS credentials', + }; + } + }), +}); + diff --git a/src/server/database-prisma.js b/src/server/database-prisma.js index dc86245..51f6461 100644 --- a/src/server/database-prisma.js +++ b/src/server/database-prisma.js @@ -271,6 +271,161 @@ class DatabaseServicePrisma { }); } + // Backup CRUD operations + async createOrUpdateBackup(backupData) { + // Find existing backup by container_id, server_id, and backup_path + const existing = await prisma.backup.findFirst({ + where: { + container_id: backupData.container_id, + server_id: backupData.server_id, + backup_path: backupData.backup_path, + }, + }); + + if (existing) { + // Update existing backup + return await prisma.backup.update({ + where: { id: existing.id }, + data: { + hostname: backupData.hostname, + backup_name: backupData.backup_name, + size: backupData.size, + created_at: backupData.created_at, + storage_name: backupData.storage_name, + storage_type: backupData.storage_type, + discovered_at: new Date(), + }, + }); + } else { + // Create new backup + return await prisma.backup.create({ + data: { + container_id: backupData.container_id, + server_id: backupData.server_id, + hostname: backupData.hostname, + backup_name: backupData.backup_name, + backup_path: backupData.backup_path, + size: backupData.size, + created_at: backupData.created_at, + storage_name: backupData.storage_name, + storage_type: backupData.storage_type, + discovered_at: new Date(), + }, + }); + } + } + + async getAllBackups() { + return await prisma.backup.findMany({ + include: { + server: true, + }, + orderBy: [ + { container_id: 'asc' }, + { created_at: 'desc' }, + ], + }); + } + + async getBackupById(id) { + return await prisma.backup.findUnique({ + where: { id }, + include: { + server: true, + }, + }); + } + + async getBackupsByContainerId(containerId) { + return await prisma.backup.findMany({ + where: { container_id: containerId }, + include: { + server: true, + }, + orderBy: { created_at: 'desc' }, + }); + } + + async deleteBackupsForContainer(containerId, serverId) { + return await prisma.backup.deleteMany({ + where: { + container_id: containerId, + server_id: serverId, + }, + }); + } + + async getBackupsGroupedByContainer() { + const backups = await this.getAllBackups(); + const grouped = new Map(); + + for (const backup of backups) { + const key = backup.container_id; + if (!grouped.has(key)) { + grouped.set(key, []); + } + grouped.get(key).push(backup); + } + + return grouped; + } + + // PBS Credentials CRUD operations + async createOrUpdatePBSCredential(credentialData) { + return await prisma.pBSStorageCredential.upsert({ + where: { + server_id_storage_name: { + server_id: credentialData.server_id, + storage_name: credentialData.storage_name, + }, + }, + update: { + pbs_ip: credentialData.pbs_ip, + pbs_datastore: credentialData.pbs_datastore, + pbs_password: credentialData.pbs_password, + pbs_fingerprint: credentialData.pbs_fingerprint, + updated_at: new Date(), + }, + create: { + server_id: credentialData.server_id, + storage_name: credentialData.storage_name, + pbs_ip: credentialData.pbs_ip, + pbs_datastore: credentialData.pbs_datastore, + pbs_password: credentialData.pbs_password, + pbs_fingerprint: credentialData.pbs_fingerprint, + }, + }); + } + + async getPBSCredential(serverId, storageName) { + return await prisma.pBSStorageCredential.findUnique({ + where: { + server_id_storage_name: { + server_id: serverId, + storage_name: storageName, + }, + }, + }); + } + + async getPBSCredentialsByServer(serverId) { + return await prisma.pBSStorageCredential.findMany({ + where: { server_id: serverId }, + orderBy: { storage_name: 'asc' }, + }); + } + + async deletePBSCredential(serverId, storageName) { + return await prisma.pBSStorageCredential.delete({ + where: { + server_id_storage_name: { + server_id: serverId, + storage_name: storageName, + }, + }, + }); + } + async close() { await prisma.$disconnect(); } diff --git a/src/server/database-prisma.ts b/src/server/database-prisma.ts index d6d83d5..37d9cca 100644 --- a/src/server/database-prisma.ts +++ b/src/server/database-prisma.ts @@ -298,6 +298,197 @@ class DatabaseServicePrisma { }); } + // Backup CRUD operations + async createOrUpdateBackup(backupData: { + container_id: string; + server_id: number; + hostname: string; + backup_name: string; + backup_path: string; + size?: bigint; + created_at?: Date; + storage_name: string; + storage_type: 'local' | 'storage' | 'pbs'; + }) { + // Find existing backup by container_id, server_id, and backup_path + const existing = await prisma.backup.findFirst({ + where: { + container_id: backupData.container_id, + server_id: backupData.server_id, + backup_path: backupData.backup_path, + }, + }); + + if (existing) { + // Update existing backup + return await prisma.backup.update({ + where: { id: existing.id }, + data: { + hostname: backupData.hostname, + backup_name: backupData.backup_name, + size: backupData.size, + created_at: backupData.created_at, + storage_name: backupData.storage_name, + storage_type: backupData.storage_type, + discovered_at: new Date(), + }, + }); + } else { + // Create new backup + return await prisma.backup.create({ + data: { + container_id: backupData.container_id, + server_id: backupData.server_id, + hostname: backupData.hostname, + backup_name: backupData.backup_name, + backup_path: backupData.backup_path, + size: backupData.size, + created_at: backupData.created_at, + storage_name: backupData.storage_name, + storage_type: backupData.storage_type, + discovered_at: new Date(), + }, + }); + } + } + + async getAllBackups() { + return await prisma.backup.findMany({ + include: { + server: true, + }, + orderBy: [ + { container_id: 'asc' }, + { created_at: 'desc' }, + ], + }); + } + + async getBackupById(id: number) { + return await prisma.backup.findUnique({ + where: { id }, + include: { + server: true, + }, + }); + } + + async getBackupsByContainerId(containerId: string) { + return await prisma.backup.findMany({ + where: { container_id: containerId }, + include: { + server: true, + }, + orderBy: { created_at: 'desc' }, + }); + } + + async deleteBackupsForContainer(containerId: string, serverId: number) { + return await prisma.backup.deleteMany({ + where: { + container_id: containerId, + server_id: serverId, + }, + }); + } + + async getBackupsGroupedByContainer(): Promise>> { + const backups = await this.getAllBackups(); + const grouped = new Map(); + + for (const backup of backups) { + const key = backup.container_id; + if (!grouped.has(key)) { + grouped.set(key, []); + } + grouped.get(key)!.push(backup); + } + + return grouped; + } + + // PBS Credentials CRUD operations + async createOrUpdatePBSCredential(credentialData: { + server_id: number; + storage_name: string; + pbs_ip: string; + pbs_datastore: string; + pbs_password: string; + pbs_fingerprint: string; + }) { + return await prisma.pBSStorageCredential.upsert({ + where: { + server_id_storage_name: { + server_id: credentialData.server_id, + storage_name: credentialData.storage_name, + }, + }, + update: { + pbs_ip: credentialData.pbs_ip, + pbs_datastore: credentialData.pbs_datastore, + pbs_password: credentialData.pbs_password, + pbs_fingerprint: credentialData.pbs_fingerprint, + updated_at: new Date(), + }, + create: { + server_id: credentialData.server_id, + storage_name: credentialData.storage_name, + pbs_ip: credentialData.pbs_ip, + pbs_datastore: credentialData.pbs_datastore, + pbs_password: credentialData.pbs_password, + pbs_fingerprint: credentialData.pbs_fingerprint, + }, + }); + } + + async getPBSCredential(serverId: number, storageName: string) { + return await prisma.pBSStorageCredential.findUnique({ + where: { + server_id_storage_name: { + server_id: serverId, + storage_name: storageName, + }, + }, + }); + } + + async getPBSCredentialsByServer(serverId: number) { + return await prisma.pBSStorageCredential.findMany({ + where: { server_id: serverId }, + orderBy: { storage_name: 'asc' }, + }); + } + + async deletePBSCredential(serverId: number, storageName: string) { + return await prisma.pBSStorageCredential.delete({ + where: { + server_id_storage_name: { + server_id: serverId, + storage_name: storageName, + }, + }, + }); + } + async close() { await prisma.$disconnect(); } diff --git a/src/server/services/backupService.ts b/src/server/services/backupService.ts new file mode 100644 index 0000000..f65e4cd --- /dev/null +++ b/src/server/services/backupService.ts @@ -0,0 +1,690 @@ +import { getSSHExecutionService } from '../ssh-execution-service'; +import { getStorageService } from './storageService'; +import { getDatabase } from '../database-prisma'; +import type { Server } from '~/types/server'; +import type { Storage } from './storageService'; + +export interface BackupData { + container_id: string; + server_id: number; + hostname: string; + backup_name: string; + backup_path: string; + size?: bigint; + created_at?: Date; + storage_name: string; + storage_type: 'local' | 'storage' | 'pbs'; +} + +class BackupService { + /** + * Get server hostname via SSH + */ + async getServerHostname(server: Server): Promise { + const sshService = getSSHExecutionService(); + let hostname = ''; + + await new Promise((resolve, reject) => { + sshService.executeCommand( + server, + 'hostname', + (data: string) => { + hostname += data; + }, + (error: string) => { + reject(new Error(`Failed to get hostname: ${error}`)); + }, + (exitCode: number) => { + if (exitCode === 0) { + resolve(); + } else { + reject(new Error(`hostname command failed with exit code ${exitCode}`)); + } + } + ); + }); + + return hostname.trim(); + } + + /** + * Discover local backups in /var/lib/vz/dump/ + */ + async discoverLocalBackups(server: Server, ctId: string, hostname: string): Promise { + const sshService = getSSHExecutionService(); + const backups: BackupData[] = []; + + // Find backup files matching pattern (with timeout) + const findCommand = `timeout 10 find /var/lib/vz/dump/ -type f -name "vzdump-lxc-${ctId}-*.tar*" 2>/dev/null`; + let findOutput = ''; + + try { + await Promise.race([ + new Promise((resolve) => { + sshService.executeCommand( + server, + findCommand, + (data: string) => { + findOutput += data; + }, + (error: string) => { + // Ignore errors - directory might not exist + resolve(); + }, + (exitCode: number) => { + resolve(); + } + ); + }), + new Promise((resolve) => { + setTimeout(() => { + resolve(); + }, 15000); // 15 second timeout + }) + ]); + + const backupPaths = findOutput.trim().split('\n').filter(path => path.trim()); + + // Get detailed info for each backup file + for (const backupPath of backupPaths) { + if (!backupPath.trim()) continue; + + try { + // Get file size and modification time + const statCommand = `stat -c "%s|%Y|%n" "${backupPath}" 2>/dev/null || stat -f "%z|%m|%N" "${backupPath}" 2>/dev/null || echo ""`; + let statOutput = ''; + + await Promise.race([ + new Promise((resolve) => { + sshService.executeCommand( + server, + statCommand, + (data: string) => { + statOutput += data; + }, + () => resolve(), + () => resolve() + ); + }), + new Promise((resolve) => { + setTimeout(() => resolve(), 5000); // 5 second timeout for stat + }) + ]); + + const statParts = statOutput.trim().split('|'); + const fileName = backupPath.split('/').pop() || backupPath; + + if (statParts.length >= 2 && statParts[0] && statParts[1]) { + const size = BigInt(statParts[0] || '0'); + const mtime = parseInt(statParts[1] || '0', 10); + + backups.push({ + container_id: ctId, + server_id: server.id, + hostname, + backup_name: fileName, + backup_path: backupPath, + size, + created_at: mtime > 0 ? new Date(mtime * 1000) : undefined, + storage_name: 'local', + storage_type: 'local', + }); + } else { + // If stat fails, still add the backup with minimal info + backups.push({ + container_id: ctId, + server_id: server.id, + hostname, + backup_name: fileName, + backup_path: backupPath, + size: undefined, + created_at: undefined, + storage_name: 'local', + storage_type: 'local', + }); + } + } catch (error) { + // Still try to add the backup even if stat fails + const fileName = backupPath.split('/').pop() || backupPath; + backups.push({ + container_id: ctId, + server_id: server.id, + hostname, + backup_name: fileName, + backup_path: backupPath, + size: undefined, + created_at: undefined, + storage_name: 'local', + storage_type: 'local', + }); + } + } + } catch (error) { + console.error(`Error discovering local backups for CT ${ctId}:`, error); + } + + return backups; + } + + /** + * Discover backups in mounted storage (/mnt/pve//dump/) + */ + async discoverStorageBackups(server: Server, storage: Storage, ctId: string, hostname: string): Promise { + const sshService = getSSHExecutionService(); + const backups: BackupData[] = []; + + const dumpPath = `/mnt/pve/${storage.name}/dump/`; + const findCommand = `timeout 10 find "${dumpPath}" -type f -name "vzdump-lxc-${ctId}-*.tar*" 2>/dev/null`; + let findOutput = ''; + + console.log(`[BackupService] Discovering storage backups for CT ${ctId} on ${storage.name}`); + + try { + await Promise.race([ + new Promise((resolve) => { + sshService.executeCommand( + server, + findCommand, + (data: string) => { + findOutput += data; + }, + (error: string) => { + // Ignore errors - storage might not be mounted + resolve(); + }, + (exitCode: number) => { + resolve(); + } + ); + }), + new Promise((resolve) => { + setTimeout(() => { + console.log(`[BackupService] Storage backup discovery timeout for ${storage.name}`); + resolve(); + }, 15000); // 15 second timeout + }) + ]); + + const backupPaths = findOutput.trim().split('\n').filter(path => path.trim()); + console.log(`[BackupService] Found ${backupPaths.length} backup files for CT ${ctId} on storage ${storage.name}`); + + // Get detailed info for each backup file + for (const backupPath of backupPaths) { + if (!backupPath.trim()) continue; + + try { + const statCommand = `stat -c "%s|%Y|%n" "${backupPath}" 2>/dev/null || stat -f "%z|%m|%N" "${backupPath}" 2>/dev/null || echo ""`; + let statOutput = ''; + + await Promise.race([ + new Promise((resolve) => { + sshService.executeCommand( + server, + statCommand, + (data: string) => { + statOutput += data; + }, + () => resolve(), + () => resolve() + ); + }), + new Promise((resolve) => { + setTimeout(() => resolve(), 5000); // 5 second timeout for stat + }) + ]); + + const statParts = statOutput.trim().split('|'); + const fileName = backupPath.split('/').pop() || backupPath; + + if (statParts.length >= 2 && statParts[0] && statParts[1]) { + const size = BigInt(statParts[0] || '0'); + const mtime = parseInt(statParts[1] || '0', 10); + + backups.push({ + container_id: ctId, + server_id: server.id, + hostname, + backup_name: fileName, + backup_path: backupPath, + size, + created_at: mtime > 0 ? new Date(mtime * 1000) : undefined, + storage_name: storage.name, + storage_type: 'storage', + }); + console.log(`[BackupService] Added storage backup: ${fileName} from ${storage.name}`); + } else { + // If stat fails, still add the backup with minimal info + console.log(`[BackupService] Stat failed for ${fileName}, adding backup without size/date`); + backups.push({ + container_id: ctId, + server_id: server.id, + hostname, + backup_name: fileName, + backup_path: backupPath, + size: undefined, + created_at: undefined, + storage_name: storage.name, + storage_type: 'storage', + }); + } + } catch (error) { + console.error(`Error processing backup ${backupPath}:`, error); + // Still try to add the backup even if stat fails + const fileName = backupPath.split('/').pop() || backupPath; + backups.push({ + container_id: ctId, + server_id: server.id, + hostname, + backup_name: fileName, + backup_path: backupPath, + size: undefined, + created_at: undefined, + storage_name: storage.name, + storage_type: 'storage', + }); + } + } + + console.log(`[BackupService] Total storage backups found for CT ${ctId} on ${storage.name}: ${backups.length}`); + } catch (error) { + console.error(`Error discovering storage backups for CT ${ctId} on ${storage.name}:`, error); + } + + return backups; + } + + /** + * Login to PBS using stored credentials + */ + async loginToPBS(server: Server, storage: Storage): Promise { + const db = getDatabase(); + const credential = await db.getPBSCredential(server.id, storage.name); + + if (!credential) { + console.log(`[BackupService] No PBS credentials found for storage ${storage.name}, skipping PBS discovery`); + return false; + } + + const sshService = getSSHExecutionService(); + const storageService = getStorageService(); + const pbsInfo = storageService.getPBSStorageInfo(storage); + + // Use IP and datastore from credentials (they override config if different) + const pbsIp = credential.pbs_ip || pbsInfo.pbs_ip; + const pbsDatastore = credential.pbs_datastore || pbsInfo.pbs_datastore; + + if (!pbsIp || !pbsDatastore) { + console.log(`[BackupService] Missing PBS IP or datastore for storage ${storage.name}`); + return false; + } + + // Build login command + // Format: proxmox-backup-client login --repository root@pam@: + // PBS supports PBS_PASSWORD and PBS_REPOSITORY environment variables for non-interactive login + const repository = `root@pam@${pbsIp}:${pbsDatastore}`; + + // Escape password for shell safety (single quotes) + const escapedPassword = credential.pbs_password.replace(/'/g, "'\\''"); + + // Use PBS_PASSWORD environment variable for non-interactive authentication + // Auto-accept fingerprint by piping "y" to stdin + // PBS will use PBS_PASSWORD env var if available, avoiding interactive prompt + const fullCommand = `echo "y" | PBS_PASSWORD='${escapedPassword}' PBS_REPOSITORY='${repository}' timeout 10 proxmox-backup-client login --repository ${repository} 2>&1`; + + console.log(`[BackupService] Logging into PBS: ${repository}`); + + let loginOutput = ''; + let loginSuccess = false; + + try { + await Promise.race([ + new Promise((resolve) => { + sshService.executeCommand( + server, + fullCommand, + (data: string) => { + loginOutput += data; + }, + (error: string) => { + console.log(`[BackupService] PBS login error: ${error}`); + resolve(); + }, + (exitCode: number) => { + loginSuccess = exitCode === 0; + if (loginSuccess) { + console.log(`[BackupService] Successfully logged into PBS: ${repository}`); + } else { + console.log(`[BackupService] PBS login failed with exit code ${exitCode}`); + console.log(`[BackupService] Login output: ${loginOutput}`); + } + resolve(); + } + ); + }), + new Promise((resolve) => { + setTimeout(() => { + console.log(`[BackupService] PBS login timeout`); + resolve(); + }, 15000); // 15 second timeout + }) + ]); + + // Check if login was successful (look for success indicators in output) + if (loginSuccess || loginOutput.includes('successfully') || loginOutput.includes('logged in')) { + return true; + } + + return false; + } catch (error) { + console.error(`[BackupService] Error during PBS login:`, error); + return false; + } + } + + /** + * Discover PBS backups using proxmox-backup-client + */ + async discoverPBSBackups(server: Server, storage: Storage, ctId: string, hostname: string): Promise { + const sshService = getSSHExecutionService(); + const backups: BackupData[] = []; + + // Login to PBS first + const loggedIn = await this.loginToPBS(server, storage); + if (!loggedIn) { + console.log(`[BackupService] Failed to login to PBS for storage ${storage.name}, skipping backup discovery`); + return backups; + } + + // Get PBS credentials to build full repository string + const db = getDatabase(); + const credential = await db.getPBSCredential(server.id, storage.name); + if (!credential) { + console.log(`[BackupService] No PBS credentials found for storage ${storage.name}`); + return backups; + } + + const storageService = getStorageService(); + const pbsInfo = storageService.getPBSStorageInfo(storage); + const pbsIp = credential.pbs_ip || pbsInfo.pbs_ip; + const pbsDatastore = credential.pbs_datastore || pbsInfo.pbs_datastore; + + if (!pbsIp || !pbsDatastore) { + console.log(`[BackupService] Missing PBS IP or datastore for storage ${storage.name}`); + return backups; + } + + // Build full repository string: root@pam@: + const repository = `root@pam@${pbsIp}:${pbsDatastore}`; + + // Use correct command: snapshot list ct/ --repository + const command = `timeout 30 proxmox-backup-client snapshot list ct/${ctId} --repository ${repository} 2>&1 || echo "PBS_ERROR"`; + let output = ''; + + console.log(`[BackupService] Discovering PBS backups for CT ${ctId} on repository ${repository}`); + + try { + // Add timeout to prevent hanging + await Promise.race([ + new Promise((resolve, reject) => { + sshService.executeCommand( + server, + command, + (data: string) => { + output += data; + }, + (error: string) => { + console.log(`[BackupService] PBS command error: ${error}`); + resolve(); + }, + (exitCode: number) => { + console.log(`[BackupService] PBS command completed with exit code ${exitCode}`); + resolve(); + } + ); + }), + new Promise((resolve) => { + setTimeout(() => { + console.log(`[BackupService] PBS discovery timeout, continuing...`); + resolve(); + }, 35000); // 35 second timeout (command has 30s timeout, so this is a safety net) + }) + ]); + + // Check if PBS command failed + if (output.includes('PBS_ERROR') || output.includes('error') || output.includes('Error')) { + console.log(`[BackupService] PBS discovery failed or no backups found for CT ${ctId}`); + return backups; + } + + // Parse PBS snapshot list output (table format) + // Format: snapshot | size | files + // Example: ct/148/2025-10-21T19:14:55Z | 994.944 MiB | catalog.pcat1 client.log ... + const lines = output.trim().split('\n').filter(line => line.trim()); + + console.log(`[BackupService] Parsing ${lines.length} lines from PBS output`); + + for (const line of lines) { + // Skip header lines, separators, or error messages + if (line.includes('snapshot') && line.includes('size') && line.includes('files')) { + continue; // Skip header row + } + if (line.includes('═') || line.includes('─') || line.includes('│') && line.match(/^[│═─╞╪╡├┼┤└┴┘]+$/)) { + continue; // Skip table separator lines + } + if (line.includes('repository') || line.includes('error') || line.includes('Error') || line.includes('PBS_ERROR')) { + continue; + } + + // Parse table row - format: snapshot | size | files + // Example: │ ct/148/2025-10-21T19:14:55Z │ 994.944 MiB │ catalog.pcat1 client.log index.json pct.conf root.pxar │ + const parts = line.split('│').map(p => p.trim()).filter(p => p); + + if (parts.length >= 2) { + const snapshotPath = parts[0]; // e.g., "ct/148/2025-10-21T19:14:55Z" + const sizeStr = parts[1]; // e.g., "994.944 MiB" + + if (!snapshotPath) { + continue; // Skip if no snapshot path + } + + // Extract snapshot name (last part after /) + const snapshotParts = snapshotPath.split('/'); + const snapshotName = snapshotParts[snapshotParts.length - 1] || snapshotPath; + + if (!snapshotName) { + continue; // Skip if no snapshot name + } + + // Parse date from snapshot name (format: 2025-10-21T19:14:55Z) + let createdAt: Date | undefined; + const dateMatch = snapshotName.match(/(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z)/); + if (dateMatch && dateMatch[1]) { + try { + createdAt = new Date(dateMatch[1]); + } catch (e) { + // Invalid date, leave undefined + } + } + + // Parse size (convert MiB/GiB to bytes) + let size: bigint | undefined; + if (sizeStr) { + const sizeMatch = sizeStr.match(/([\d.]+)\s*(MiB|GiB|KiB|B)/i); + if (sizeMatch && sizeMatch[1] && sizeMatch[2]) { + const sizeValue = parseFloat(sizeMatch[1]); + const unit = sizeMatch[2].toUpperCase(); + let bytes = sizeValue; + + if (unit === 'KIB') bytes = sizeValue * 1024; + else if (unit === 'MIB') bytes = sizeValue * 1024 * 1024; + else if (unit === 'GIB') bytes = sizeValue * 1024 * 1024 * 1024; + + size = BigInt(Math.floor(bytes)); + } + } + + backups.push({ + container_id: ctId, + server_id: server.id, + hostname, + backup_name: snapshotName, + backup_path: `pbs://${repository}/${snapshotPath}`, + size, + created_at: createdAt, + storage_name: storage.name, + storage_type: 'pbs', + }); + } + } + + console.log(`[BackupService] Found ${backups.length} PBS backups for CT ${ctId}`); + } catch (error) { + console.error(`Error discovering PBS backups for CT ${ctId}:`, error); + } + + return backups; + } + + /** + * Discover all backups for a container across all backup-capable storages + */ + async discoverAllBackupsForContainer(server: Server, ctId: string, hostname: string): Promise { + const allBackups: BackupData[] = []; + + try { + // Get server hostname to filter storages + const serverHostname = await this.getServerHostname(server); + const normalizedHostname = serverHostname.trim().toLowerCase(); + console.log(`[BackupService] Discovering backups for server ${server.name} (hostname: ${serverHostname}, normalized: ${normalizedHostname})`); + + // Get all backup-capable storages (force refresh to get latest node assignments) + const storageService = getStorageService(); + const allStorages = await storageService.getBackupStorages(server, true); // Force refresh + + console.log(`[BackupService] Found ${allStorages.length} backup-capable storages total`); + + // Filter storages by node hostname matching + const applicableStorages = allStorages.filter(storage => { + // If storage has no nodes specified, it's available on all nodes + if (!storage.nodes || storage.nodes.length === 0) { + console.log(`[BackupService] Storage ${storage.name} has no nodes specified, including it`); + return true; + } + + // Normalize all nodes for comparison + const normalizedNodes = storage.nodes.map(node => node.trim().toLowerCase()); + const isApplicable = normalizedNodes.includes(normalizedHostname); + + if (!isApplicable) { + console.log(`[BackupService] EXCLUDING Storage ${storage.name} (nodes: ${storage.nodes.join(', ')}) - not applicable for hostname: ${serverHostname}`); + } else { + console.log(`[BackupService] INCLUDING Storage ${storage.name} (nodes: ${storage.nodes.join(', ')}) - applicable for hostname: ${serverHostname}`); + } + + return isApplicable; + }); + + console.log(`[BackupService] Filtered to ${applicableStorages.length} applicable storages for ${serverHostname}`); + + // Discover local backups + const localBackups = await this.discoverLocalBackups(server, ctId, hostname); + allBackups.push(...localBackups); + + // Discover backups from each applicable storage + for (const storage of applicableStorages) { + try { + if (storage.type === 'pbs') { + // PBS storage + const pbsBackups = await this.discoverPBSBackups(server, storage, ctId, hostname); + allBackups.push(...pbsBackups); + } else { + // Regular storage (dir, nfs, etc.) + const storageBackups = await this.discoverStorageBackups(server, storage, ctId, hostname); + allBackups.push(...storageBackups); + } + } catch (error) { + console.error(`[BackupService] Error discovering backups from storage ${storage.name}:`, error); + // Continue with other storages + } + } + + console.log(`[BackupService] Total backups discovered for CT ${ctId}: ${allBackups.length}`); + } catch (error) { + console.error(`Error discovering backups for container ${ctId}:`, error); + } + + return allBackups; + } + + /** + * Discover backups for all installed scripts with container_id + */ + async discoverAllBackups(): Promise { + const db = getDatabase(); + const scripts = await db.getAllInstalledScripts(); + + // Filter scripts that have container_id and server_id + const scriptsWithContainers = scripts.filter( + (script: any) => script.container_id && script.server_id && script.server + ); + + // Clear all existing backups first to ensure we start fresh + console.log('[BackupService] Clearing all existing backups before rediscovery...'); + const allBackups = await db.getAllBackups(); + for (const backup of allBackups) { + await db.deleteBackupsForContainer(backup.container_id, backup.server_id); + } + console.log('[BackupService] Cleared all existing backups'); + + for (const script of scriptsWithContainers) { + if (!script.container_id || !script.server_id || !script.server) continue; + + const containerId = script.container_id; + const serverId = script.server_id; + const server = script.server as Server; + + try { + // Get hostname from LXC config if available, otherwise use script name + let hostname = script.script_name || `CT-${script.container_id}`; + try { + const lxcConfig = await db.getLXCConfigByScriptId(script.id); + if (lxcConfig?.hostname) { + hostname = lxcConfig.hostname; + } + } catch (error) { + // LXC config might not exist, use script name + console.debug(`No LXC config found for script ${script.id}, using script name as hostname`); + } + + console.log(`[BackupService] Discovering backups for script ${script.id}, CT ${containerId} on server ${server.name}`); + + // Discover backups for this container + const backups = await this.discoverAllBackupsForContainer( + server, + containerId, + hostname + ); + + console.log(`[BackupService] Found ${backups.length} backups for CT ${containerId} on server ${server.name}`); + + // Save discovered backups + for (const backup of backups) { + await db.createOrUpdateBackup(backup); + } + } catch (error) { + console.error(`Error discovering backups for script ${script.id} (CT ${script.container_id}):`, error); + } + } + } +} + +// Singleton instance +let backupServiceInstance: BackupService | null = null; + +export function getBackupService(): BackupService { + if (!backupServiceInstance) { + backupServiceInstance = new BackupService(); + } + return backupServiceInstance; +} + diff --git a/src/server/services/restoreService.ts b/src/server/services/restoreService.ts new file mode 100644 index 0000000..935b949 --- /dev/null +++ b/src/server/services/restoreService.ts @@ -0,0 +1,561 @@ +import { getSSHExecutionService } from '../ssh-execution-service'; +import { getBackupService } from './backupService'; +import { getStorageService } from './storageService'; +import { getDatabase } from '../database-prisma'; +import type { Server } from '~/types/server'; +import type { Storage } from './storageService'; +import { writeFile, readFile } from 'fs/promises'; +import { join } from 'path'; +import { existsSync } from 'fs'; + +export interface RestoreProgress { + step: string; + message: string; +} + +export interface RestoreResult { + success: boolean; + error?: string; + progress?: RestoreProgress[]; +} + +class RestoreService { + /** + * Get rootfs storage from LXC config or installed scripts database + */ + async getRootfsStorage(server: Server, ctId: string): Promise { + const sshService = getSSHExecutionService(); + const db = getDatabase(); + const configPath = `/etc/pve/lxc/${ctId}.conf`; + const readCommand = `cat "${configPath}" 2>/dev/null || echo ""`; + let rawConfig = ''; + + try { + // Try to read config file (container might not exist, so don't fail on error) + await new Promise((resolve) => { + sshService.executeCommand( + server, + readCommand, + (data: string) => { + rawConfig += data; + }, + () => resolve(), // Don't fail on error + () => resolve() // Always resolve + ); + }); + + // If we got config content, parse it + if (rawConfig.trim()) { + // Parse rootfs line: rootfs: PROX2-STORAGE2:vm-148-disk-0,size=4G + const lines = rawConfig.split('\n'); + for (const line of lines) { + const trimmed = line.trim(); + if (trimmed.startsWith('rootfs:')) { + const match = trimmed.match(/^rootfs:\s*([^:]+):/); + if (match && match[1]) { + return match[1].trim(); + } + } + } + } + + // If config file doesn't exist or doesn't have rootfs, try to get from installed scripts database + const installedScripts = await db.getAllInstalledScripts(); + const script = installedScripts.find((s: any) => s.container_id === ctId && s.server_id === server.id); + + if (script) { + // Try to get LXC config from database + const lxcConfig = await db.getLXCConfigByScriptId(script.id); + if (lxcConfig?.rootfs_storage) { + // Extract storage from rootfs_storage format: "STORAGE:vm-148-disk-0" + const match = lxcConfig.rootfs_storage.match(/^([^:]+):/); + if (match && match[1]) { + return match[1].trim(); + } + } + } + + return null; + } catch (error) { + // Try fallback to database + try { + const installedScripts = await db.getAllInstalledScripts(); + const script = installedScripts.find((s: any) => s.container_id === ctId && s.server_id === server.id); + if (script) { + const lxcConfig = await db.getLXCConfigByScriptId(script.id); + if (lxcConfig?.rootfs_storage) { + const match = lxcConfig.rootfs_storage.match(/^([^:]+):/); + if (match && match[1]) { + return match[1].trim(); + } + } + } + } catch (dbError) { + // Ignore database error + } + return null; + } + } + + /** + * Stop container (continue if already stopped) + */ + async stopContainer(server: Server, ctId: string): Promise { + const sshService = getSSHExecutionService(); + const command = `pct stop ${ctId} 2>&1 || true`; // Continue even if already stopped + + await new Promise((resolve) => { + sshService.executeCommand( + server, + command, + () => {}, + () => resolve(), + () => resolve() // Always resolve, don't fail if already stopped + ); + }); + } + + /** + * Destroy container + */ + async destroyContainer(server: Server, ctId: string): Promise { + const sshService = getSSHExecutionService(); + const command = `pct destroy ${ctId} 2>&1`; + let output = ''; + let exitCode = 0; + + await new Promise((resolve, reject) => { + sshService.executeCommand( + server, + command, + (data: string) => { + output += data; + }, + (error: string) => { + // Check if error is about container not existing + if (error.includes('does not exist') || error.includes('not found')) { + resolve(); // Container doesn't exist, that's fine + } else { + reject(new Error(`Destroy failed: ${error}`)); + } + }, + (code: number) => { + exitCode = code; + if (exitCode === 0) { + resolve(); + } else { + // Check if error is about container not existing + if (output.includes('does not exist') || output.includes('not found') || output.includes('No such file')) { + resolve(); // Container doesn't exist, that's fine + } else { + reject(new Error(`Destroy failed with exit code ${exitCode}: ${output}`)); + } + } + } + ); + }); + } + + /** + * Restore from local/storage backup + */ + async restoreLocalBackup( + server: Server, + ctId: string, + backupPath: string, + storage: string + ): Promise { + const sshService = getSSHExecutionService(); + const command = `pct restore ${ctId} "${backupPath}" --storage=${storage}`; + let output = ''; + let exitCode = 0; + + await new Promise((resolve, reject) => { + sshService.executeCommand( + server, + command, + (data: string) => { + output += data; + }, + (error: string) => { + reject(new Error(`Restore failed: ${error}`)); + }, + (code: number) => { + exitCode = code; + if (exitCode === 0) { + resolve(); + } else { + reject(new Error(`Restore failed with exit code ${exitCode}: ${output}`)); + } + } + ); + }); + } + + /** + * Restore from PBS backup + */ + async restorePBSBackup( + server: Server, + storage: Storage, + ctId: string, + snapshotPath: string, + storageName: string, + onProgress?: (step: string, message: string) => Promise + ): Promise { + const backupService = getBackupService(); + const sshService = getSSHExecutionService(); + const db = getDatabase(); + + // Get PBS credentials + const credential = await db.getPBSCredential(server.id, storage.name); + if (!credential) { + throw new Error(`No PBS credentials found for storage ${storage.name}`); + } + + const storageService = getStorageService(); + const pbsInfo = storageService.getPBSStorageInfo(storage); + const pbsIp = credential.pbs_ip || pbsInfo.pbs_ip; + const pbsDatastore = credential.pbs_datastore || pbsInfo.pbs_datastore; + + if (!pbsIp || !pbsDatastore) { + throw new Error(`Missing PBS IP or datastore for storage ${storage.name}`); + } + + const repository = `root@pam@${pbsIp}:${pbsDatastore}`; + + // Extract snapshot name from path (e.g., "2025-10-21T19:14:55Z" from "ct/148/2025-10-21T19:14:55Z") + const snapshotParts = snapshotPath.split('/'); + const snapshotName = snapshotParts[snapshotParts.length - 1] || snapshotPath; + // Replace colons with underscores for file paths (tar doesn't like colons in filenames) + const snapshotNameForPath = snapshotName.replace(/:/g, '_'); + + // Determine file extension - try common extensions + const extensions = ['.tar', '.tar.zst', '.pxar']; + let downloadedPath = ''; + let downloadSuccess = false; + + // Login to PBS first + if (onProgress) await onProgress('pbs_login', 'Logging into PBS...'); + const loggedIn = await backupService.loginToPBS(server, storage); + if (!loggedIn) { + throw new Error(`Failed to login to PBS for storage ${storage.name}`); + } + + // Download backup from PBS + // proxmox-backup-client restore outputs a folder, not a file + if (onProgress) await onProgress('pbs_download', 'Downloading backup from PBS...'); + + // Target folder for PBS restore (without extension) + // Use sanitized snapshot name (colons replaced with underscores) for file paths + const targetFolder = `/var/lib/vz/dump/vzdump-lxc-${ctId}-${snapshotNameForPath}`; + const targetTar = `${targetFolder}.tar`; + + // Use PBS_PASSWORD env var and add timeout for long downloads + const escapedPassword = credential.pbs_password.replace(/'/g, "'\\''"); + const restoreCommand = `PBS_PASSWORD='${escapedPassword}' PBS_REPOSITORY='${repository}' timeout 300 proxmox-backup-client restore "${snapshotPath}" root.pxar "${targetFolder}" --repository '${repository}' 2>&1`; + + let output = ''; + let exitCode = 0; + + try { + // Download from PBS (creates a folder) + await Promise.race([ + new Promise((resolve, reject) => { + sshService.executeCommand( + server, + restoreCommand, + (data: string) => { + output += data; + }, + (error: string) => { + reject(new Error(`Download failed: ${error}`)); + }, + (code: number) => { + exitCode = code; + if (exitCode === 0) { + resolve(); + } else { + reject(new Error(`Download failed with exit code ${exitCode}: ${output}`)); + } + } + ); + }), + new Promise((resolve, reject) => { + setTimeout(() => { + reject(new Error('Download timeout after 5 minutes')); + }, 300000); // 5 minute timeout + }) + ]); + + // Check if folder exists + const checkCommand = `test -d "${targetFolder}" && echo "exists" || echo "notfound"`; + let checkOutput = ''; + + await new Promise((resolve) => { + sshService.executeCommand( + server, + checkCommand, + (data: string) => { + checkOutput += data; + }, + () => resolve(), + () => resolve() + ); + }); + + if (!checkOutput.includes('exists')) { + throw new Error(`Downloaded folder ${targetFolder} does not exist`); + } + + // Pack the folder into a tar file + if (onProgress) await onProgress('pbs_pack', 'Packing backup folder...'); + + // Use -C to change to the folder directory, then pack all contents (.) into the tar file + const packCommand = `tar -cf "${targetTar}" -C "${targetFolder}" . 2>&1`; + let packOutput = ''; + let packExitCode = 0; + + await Promise.race([ + new Promise((resolve, reject) => { + sshService.executeCommand( + server, + packCommand, + (data: string) => { + packOutput += data; + }, + (error: string) => { + reject(new Error(`Pack failed: ${error}`)); + }, + (code: number) => { + packExitCode = code; + if (packExitCode === 0) { + resolve(); + } else { + reject(new Error(`Pack failed with exit code ${packExitCode}: ${packOutput}`)); + } + } + ); + }), + new Promise((resolve, reject) => { + setTimeout(() => { + reject(new Error('Pack timeout after 2 minutes')); + }, 120000); // 2 minute timeout for packing + }) + ]); + + // Check if tar file exists + const checkTarCommand = `test -f "${targetTar}" && echo "exists" || echo "notfound"`; + let checkTarOutput = ''; + + await new Promise((resolve) => { + sshService.executeCommand( + server, + checkTarCommand, + (data: string) => { + checkTarOutput += data; + }, + () => resolve(), + () => resolve() + ); + }); + + if (!checkTarOutput.includes('exists')) { + throw new Error(`Packed tar file ${targetTar} does not exist`); + } + + downloadedPath = targetTar; + downloadSuccess = true; + } catch (error) { + throw error; + } + + if (!downloadSuccess || !downloadedPath) { + throw new Error(`Failed to download and pack backup from PBS`); + } + + // Restore from packed tar file + if (onProgress) await onProgress('restoring', 'Restoring container...'); + try { + await this.restoreLocalBackup(server, ctId, downloadedPath, storageName); + } finally { + // Cleanup: delete downloaded folder and tar file + if (onProgress) await onProgress('cleanup', 'Cleaning up temporary files...'); + const cleanupCommand = `rm -rf "${targetFolder}" "${targetTar}" 2>&1 || true`; + sshService.executeCommand( + server, + cleanupCommand, + () => {}, + () => {}, + () => {} + ); + } + } + + /** + * Execute full restore flow + */ + async executeRestore( + backupId: number, + containerId: string, + serverId: number, + onProgress?: (progress: RestoreProgress) => void + ): Promise { + const progress: RestoreProgress[] = []; + const logPath = join(process.cwd(), 'restore.log'); + + // Clear log file at start of restore + const clearLogFile = async () => { + try { + await writeFile(logPath, '', 'utf-8'); + } catch (error) { + // Ignore log file errors + } + }; + + // Write progress to log file + const writeProgressToLog = async (message: string) => { + try { + const logLine = `${message}\n`; + await writeFile(logPath, logLine, { flag: 'a', encoding: 'utf-8' }); + } catch (error) { + // Ignore log file errors + } + }; + + const addProgress = async (step: string, message: string) => { + const p = { step, message }; + progress.push(p); + + // Write to log file (just the message, without step prefix) + await writeProgressToLog(message); + + // Call callback if provided + if (onProgress) { + onProgress(p); + } + }; + + try { + // Clear log file at start + await clearLogFile(); + + const db = getDatabase(); + const sshService = getSSHExecutionService(); + + await addProgress('starting', 'Starting restore...'); + + // Get backup details + const backup = await db.getBackupById(backupId); + if (!backup) { + throw new Error(`Backup with ID ${backupId} not found`); + } + + // Get server details + const server = await db.getServerById(serverId); + if (!server) { + throw new Error(`Server with ID ${serverId} not found`); + } + + // Get rootfs storage + await addProgress('reading_config', 'Reading container configuration...'); + const rootfsStorage = await this.getRootfsStorage(server, containerId); + + if (!rootfsStorage) { + // Try to check if container exists, if not we can proceed without stopping/destroying + const checkCommand = `pct list ${containerId} 2>&1 | grep -q "^${containerId}" && echo "exists" || echo "notfound"`; + let checkOutput = ''; + await new Promise((resolve) => { + sshService.executeCommand( + server, + checkCommand, + (data: string) => { + checkOutput += data; + }, + () => resolve(), + () => resolve() + ); + }); + + if (checkOutput.includes('notfound')) { + // Container doesn't exist, we can't determine storage - need user input or use default + throw new Error(`Container ${containerId} does not exist and storage could not be determined. Please ensure the container exists or specify the storage manually.`); + } + + throw new Error(`Could not determine rootfs storage for container ${containerId}. Please ensure the container exists and has a valid configuration.`); + } + + // Try to stop and destroy container - if it doesn't exist, continue anyway + await addProgress('stopping', 'Stopping container...'); + try { + await this.stopContainer(server, containerId); + } catch (error) { + // Continue even if stop fails + } + + // Try to destroy container - if it doesn't exist, continue anyway + await addProgress('destroying', 'Destroying container...'); + try { + await this.destroyContainer(server, containerId); + } catch (error) { + // Container might not exist, which is fine - continue with restore + await addProgress('skipping', 'Container does not exist or already destroyed, continuing...'); + } + + // Restore based on backup type + if (backup.storage_type === 'pbs') { + // Get storage info for PBS + const storageService = getStorageService(); + const storages = await storageService.getStorages(server, false); + const storage = storages.find(s => s.name === backup.storage_name); + + if (!storage) { + throw new Error(`Storage ${backup.storage_name} not found`); + } + + // Parse snapshot path from backup_path (format: pbs://root@pam@IP:DATASTORE/ct/148/2025-10-21T19:14:55Z) + const snapshotPathMatch = backup.backup_path.match(/pbs:\/\/[^/]+\/(.+)$/); + if (!snapshotPathMatch || !snapshotPathMatch[1]) { + throw new Error(`Invalid PBS backup path format: ${backup.backup_path}`); + } + + const snapshotPath = snapshotPathMatch[1]; + + await this.restorePBSBackup(server, storage, containerId, snapshotPath, rootfsStorage, async (step, message) => { + await addProgress(step, message); + }); + } else { + // Local or storage backup + await addProgress('restoring', 'Restoring container...'); + await this.restoreLocalBackup(server, containerId, backup.backup_path, rootfsStorage); + } + + await addProgress('complete', 'Restore completed successfully'); + + return { + success: true, + progress, + }; + } catch (error) { + const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'; + await addProgress('error', `Error: ${errorMessage}`); + + return { + success: false, + error: errorMessage, + progress, + }; + } + } +} + +// Singleton instance +let restoreServiceInstance: RestoreService | null = null; + +export function getRestoreService(): RestoreService { + if (!restoreServiceInstance) { + restoreServiceInstance = new RestoreService(); + } + return restoreServiceInstance; +} + diff --git a/src/server/services/storageService.ts b/src/server/services/storageService.ts new file mode 100644 index 0000000..7157c42 --- /dev/null +++ b/src/server/services/storageService.ts @@ -0,0 +1,223 @@ +import { getSSHExecutionService } from '../ssh-execution-service'; +import type { Server } from '~/types/server'; + +export interface Storage { + name: string; + type: string; + content: string[]; + supportsBackup: boolean; + nodes?: string[]; + [key: string]: any; // For additional storage-specific properties +} + +interface CachedStorageData { + storages: Storage[]; + lastFetched: Date; +} + +class StorageService { + private cache: Map = new Map(); + private readonly CACHE_TTL_MS = 60 * 60 * 1000; // 1 hour + + /** + * Parse storage.cfg content and extract storage information + */ + private parseStorageConfig(configContent: string): Storage[] { + const storages: Storage[] = []; + const lines = configContent.split('\n'); + + let currentStorage: Partial | null = null; + + for (let i = 0; i < lines.length; i++) { + const rawLine = lines[i]; + if (!rawLine) continue; + + // Check if line is indented (has leading whitespace/tabs) BEFORE trimming + const isIndented = /^[\s\t]/.test(rawLine); + const line = rawLine.trim(); + + // Skip empty lines and comments + if (!line || line.startsWith('#')) { + continue; + } + + // Check if this is a storage definition line (format: "type: name") + // Storage definitions are NOT indented + if (!isIndented) { + const storageMatch = line.match(/^(\w+):\s*(.+)$/); + if (storageMatch && storageMatch[1] && storageMatch[2]) { + // Save previous storage if exists + if (currentStorage && currentStorage.name) { + storages.push(this.finalizeStorage(currentStorage)); + } + + // Start new storage + currentStorage = { + type: storageMatch[1], + name: storageMatch[2], + content: [], + supportsBackup: false, + }; + continue; + } + } + + // Parse storage properties (indented lines - can be tabs or spaces) + if (currentStorage && isIndented) { + // Split on first whitespace (space or tab) to separate key and value + const match = line.match(/^(\S+)\s+(.+)$/); + + if (match && match[1] && match[2]) { + const key = match[1]; + const value = match[2].trim(); + + switch (key) { + case 'content': + // Content can be comma-separated: "images,rootdir" or "backup" + currentStorage.content = value.split(',').map(c => c.trim()); + currentStorage.supportsBackup = currentStorage.content.includes('backup'); + break; + case 'nodes': + // Nodes can be comma-separated: "prox5" or "prox5,prox6" + currentStorage.nodes = value.split(',').map(n => n.trim()); + break; + default: + // Store other properties + if (key) { + (currentStorage as any)[key] = value; + } + } + } + } + } + + // Don't forget the last storage + if (currentStorage && currentStorage.name) { + storages.push(this.finalizeStorage(currentStorage)); + } + + return storages; + } + + /** + * Finalize storage object with proper typing + */ + private finalizeStorage(storage: Partial): Storage { + return { + name: storage.name!, + type: storage.type!, + content: storage.content || [], + supportsBackup: storage.supportsBackup || false, + nodes: storage.nodes, + ...Object.fromEntries( + Object.entries(storage).filter(([key]) => + !['name', 'type', 'content', 'supportsBackup', 'nodes'].includes(key) + ) + ), + }; + } + + /** + * Fetch storage configuration from server via SSH + */ + async fetchStoragesFromServer(server: Server, forceRefresh = false): Promise { + const serverId = server.id; + + // Check cache first (unless force refresh) + if (!forceRefresh && this.cache.has(serverId)) { + const cached = this.cache.get(serverId)!; + const age = Date.now() - cached.lastFetched.getTime(); + + if (age < this.CACHE_TTL_MS) { + return cached.storages; + } + } + + // Fetch from server + const sshService = getSSHExecutionService(); + let configContent = ''; + + await new Promise((resolve, reject) => { + sshService.executeCommand( + server, + 'cat /etc/pve/storage.cfg', + (data: string) => { + configContent += data; + }, + (error: string) => { + reject(new Error(`Failed to read storage config: ${error}`)); + }, + (exitCode: number) => { + if (exitCode === 0) { + resolve(); + } else { + reject(new Error(`Command failed with exit code ${exitCode}`)); + } + } + ); + }); + + // Parse and cache + const storages = this.parseStorageConfig(configContent); + this.cache.set(serverId, { + storages, + lastFetched: new Date(), + }); + + return storages; + } + + /** + * Get all storages for a server (cached or fresh) + */ + async getStorages(server: Server, forceRefresh = false): Promise { + return this.fetchStoragesFromServer(server, forceRefresh); + } + + /** + * Get only backup-capable storages + */ + async getBackupStorages(server: Server, forceRefresh = false): Promise { + const allStorages = await this.getStorages(server, forceRefresh); + return allStorages.filter(s => s.supportsBackup); + } + + /** + * Get PBS storage information (IP and datastore) from storage config + */ + getPBSStorageInfo(storage: Storage): { pbs_ip: string | null; pbs_datastore: string | null } { + if (storage.type !== 'pbs') { + return { pbs_ip: null, pbs_datastore: null }; + } + + return { + pbs_ip: (storage as any).server || null, + pbs_datastore: (storage as any).datastore || null, + }; + } + + /** + * Clear cache for a specific server + */ + clearCache(serverId: number): void { + this.cache.delete(serverId); + } + + /** + * Clear all caches + */ + clearAllCaches(): void { + this.cache.clear(); + } +} + +// Singleton instance +let storageServiceInstance: StorageService | null = null; + +export function getStorageService(): StorageService { + if (!storageServiceInstance) { + storageServiceInstance = new StorageService(); + } + return storageServiceInstance; +} +