diff --git a/frontend/src/features/images/components/images-table.tsx b/frontend/src/features/images/components/images-table.tsx index 68ad2d6f..6f00c916 100644 --- a/frontend/src/features/images/components/images-table.tsx +++ b/frontend/src/features/images/components/images-table.tsx @@ -1,12 +1,18 @@ import { useMemo, useState } from "react"; import { DownloadIcon, + FileTextIcon, RefreshCcwIcon, SearchIcon, + ShieldAlertIcon, + ShieldCheckIcon, Trash2Icon, } from "lucide-react"; import { toast } from "sonner"; +import { BulkScanDialog } from "@/features/scanner/components/bulk-scan-dialog"; +import { SBOMDialog } from "@/features/scanner/components/sbom-dialog"; +import { ScanDialog } from "@/features/scanner/components/scan-dialog"; import { AlertDialog, AlertDialogAction, @@ -37,8 +43,8 @@ import { } from "@/components/ui/tooltip"; import { useImagesQuery, useRemoveImageMutation } from "../hooks/use-images-query"; -import { ImagePullDialog } from "./image-pull-dialog"; import type { ImageInfo } from "../types"; +import { ImagePullDialog } from "./image-pull-dialog"; function formatBytes(bytes: number): string { if (bytes === 0) return "0 B"; @@ -75,21 +81,18 @@ export function ImagesTable() { image: ImageInfo; host: string; } | null>(null); + const [scanImage, setScanImage] = useState(null); + const [sbomImage, setSbomImage] = useState(null); + const [bulkScanOpen, setBulkScanOpen] = useState(false); - // Images already come as flat array with host field - const allImages = useMemo(() => { - if (!data?.images) return []; - return data.images; - }, [data?.images]); + const allImages = useMemo(() => data?.images ?? [], [data?.images]); - // Get unique hosts for pull dialog const hosts = useMemo(() => { if (!data?.images) return []; const uniqueHosts = new Set(data.images.map((img) => img.host)); return Array.from(uniqueHosts); }, [data?.images]); - // Filter images by search const filteredImages = useMemo(() => { if (!searchText) return allImages; const search = searchText.toLowerCase(); @@ -149,7 +152,7 @@ export function ImagesTable() { <> -
+
Docker Images
@@ -168,10 +171,16 @@ export function ImagesTable() { Refresh {!data?.readOnly && ( - + <> + + + )}
@@ -198,9 +207,7 @@ export function ImagesTable() { Host Size Created - {!data?.readOnly && ( - Actions - )} + {!data?.readOnly && Actions} @@ -213,7 +220,7 @@ export function ImagesTable() { - + {image.id.replace("sha256:", "").slice(0, 12)} @@ -230,23 +237,49 @@ export function ImagesTable() { {formatDate(image.created)} {!data?.readOnly && ( - - - - - Remove image - +
+ + + + + Scan image + + + + + + Generate SBOM + + + + + + Remove image + +
)} @@ -265,6 +298,30 @@ export function ImagesTable() { onSelectedHostsChange={setSelectedHosts} /> + + + {scanImage && ( + { + if (!open) setScanImage(null); + }} + imageRef={getImageDisplayName(scanImage)} + host={scanImage.host} + /> + )} + + {sbomImage && ( + { + if (!open) setSbomImage(null); + }} + imageRef={getImageDisplayName(sbomImage)} + host={sbomImage.host} + /> + )} + !open && setImageToDelete(null)} diff --git a/frontend/src/features/scanner/api/generate-sbom.ts b/frontend/src/features/scanner/api/generate-sbom.ts new file mode 100644 index 00000000..b2a1f869 --- /dev/null +++ b/frontend/src/features/scanner/api/generate-sbom.ts @@ -0,0 +1,55 @@ +import { authenticatedFetch } from "@/lib/api-client"; +import { API_BASE_URL } from "@/types/api"; + +import type { SBOMFormat, SBOMJob } from "../types"; + +const SBOM_ENDPOINT = `${API_BASE_URL}/api/v1/scan/sbom`; + +export interface GenerateSBOMParams { + imageRef: string; + host: string; + format?: SBOMFormat; +} + +export async function generateSBOM(params: GenerateSBOMParams): Promise { + const response = await authenticatedFetch(SBOM_ENDPOINT, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(params), + }); + + if (!response.ok) { + const message = await response.text(); + throw new Error(message || `Request failed with status ${response.status}`); + } + + const data = await response.json(); + return data.job as SBOMJob; +} + +export async function getSBOMJob(id: string): Promise { + const response = await authenticatedFetch(`${SBOM_ENDPOINT}/${id}`); + + if (!response.ok) { + const message = await response.text(); + throw new Error(message || `Request failed with status ${response.status}`); + } + + const data = await response.json(); + return data.job as SBOMJob; +} + +export function getSBOMDownloadURL(id: string): string { + return `${SBOM_ENDPOINT}/${id}?download=true`; +} + +export async function downloadSBOM(id: string): Promise { + const response = await authenticatedFetch(getSBOMDownloadURL(id)); + + if (!response.ok) { + const message = await response.text(); + throw new Error(message || `Request failed with status ${response.status}`); + } + + return response.blob(); +} diff --git a/frontend/src/features/scanner/api/get-scan-jobs.ts b/frontend/src/features/scanner/api/get-scan-jobs.ts new file mode 100644 index 00000000..05583335 --- /dev/null +++ b/frontend/src/features/scanner/api/get-scan-jobs.ts @@ -0,0 +1,44 @@ +import { authenticatedFetch } from "@/lib/api-client"; +import { API_BASE_URL } from "@/types/api"; + +import type { BulkScanJob, ScanJob } from "../types"; + +const SCAN_JOBS_ENDPOINT = `${API_BASE_URL}/api/v1/scan/jobs`; + +export interface GetScanJobsResponse { + jobs: ScanJob[]; + bulkJobs: BulkScanJob[]; +} + +export async function getScanJobs(): Promise { + const response = await authenticatedFetch(SCAN_JOBS_ENDPOINT); + + if (!response.ok) { + const message = await response.text(); + throw new Error(message || `Request failed with status ${response.status}`); + } + + return response.json(); +} + +export async function getScanJob(id: string): Promise<{ job?: ScanJob; bulkJob?: BulkScanJob }> { + const response = await authenticatedFetch(`${SCAN_JOBS_ENDPOINT}/${id}`); + + if (!response.ok) { + const message = await response.text(); + throw new Error(message || `Request failed with status ${response.status}`); + } + + return response.json(); +} + +export async function cancelScanJob(id: string): Promise { + const response = await authenticatedFetch(`${SCAN_JOBS_ENDPOINT}/${id}`, { + method: "DELETE", + }); + + if (!response.ok) { + const message = await response.text(); + throw new Error(message || `Request failed with status ${response.status}`); + } +} diff --git a/frontend/src/features/scanner/api/get-scan-results.ts b/frontend/src/features/scanner/api/get-scan-results.ts new file mode 100644 index 00000000..453c5163 --- /dev/null +++ b/frontend/src/features/scanner/api/get-scan-results.ts @@ -0,0 +1,40 @@ +import { authenticatedFetch } from "@/lib/api-client"; +import { API_BASE_URL } from "@/types/api"; + +import type { ScanResult } from "../types"; + +const SCAN_RESULTS_ENDPOINT = `${API_BASE_URL}/api/v1/scan/results`; + +export async function getScanResults(imageRef: string, host: string): Promise { + const encoded = encodeURIComponent(imageRef); + const response = await authenticatedFetch( + `${SCAN_RESULTS_ENDPOINT}/${encoded}?host=${encodeURIComponent(host)}` + ); + + if (!response.ok) { + const message = await response.text(); + throw new Error(message || `Request failed with status ${response.status}`); + } + + const data = await response.json(); + return data.results as ScanResult[]; +} + +export async function getLatestScanResult(imageRef: string, host: string): Promise { + const encoded = encodeURIComponent(imageRef); + const response = await authenticatedFetch( + `${SCAN_RESULTS_ENDPOINT}/${encoded}/latest?host=${encodeURIComponent(host)}` + ); + + if (response.status === 404) { + return null; + } + + if (!response.ok) { + const message = await response.text(); + throw new Error(message || `Request failed with status ${response.status}`); + } + + const data = await response.json(); + return data.result as ScanResult; +} diff --git a/frontend/src/features/scanner/api/scanner-config.ts b/frontend/src/features/scanner/api/scanner-config.ts new file mode 100644 index 00000000..7122063c --- /dev/null +++ b/frontend/src/features/scanner/api/scanner-config.ts @@ -0,0 +1,45 @@ +import { authenticatedFetch } from "@/lib/api-client"; +import { API_BASE_URL } from "@/types/api"; + +import type { ScannerConfig } from "../types"; + +const SCANNER_CONFIG_ENDPOINT = `${API_BASE_URL}/api/v1/settings/scan`; + +export async function getScannerConfig(): Promise { + const response = await authenticatedFetch(SCANNER_CONFIG_ENDPOINT); + + if (!response.ok) { + const message = await response.text(); + throw new Error(message || `Request failed with status ${response.status}`); + } + + const data = await response.json(); + return data.config as ScannerConfig; +} + +export async function updateScannerConfig(config: ScannerConfig): Promise { + const response = await authenticatedFetch(SCANNER_CONFIG_ENDPOINT, { + method: "PUT", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(config), + }); + + if (!response.ok) { + const message = await response.text(); + throw new Error(message || `Request failed with status ${response.status}`); + } + + const data = await response.json(); + return data.config as ScannerConfig; +} + +export async function testScanNotification(): Promise { + const response = await authenticatedFetch(`${SCANNER_CONFIG_ENDPOINT}/test-notification`, { + method: "POST", + }); + + if (!response.ok) { + const message = await response.text(); + throw new Error(message || `Request failed with status ${response.status}`); + } +} diff --git a/frontend/src/features/scanner/api/start-bulk-scan.ts b/frontend/src/features/scanner/api/start-bulk-scan.ts new file mode 100644 index 00000000..77376be5 --- /dev/null +++ b/frontend/src/features/scanner/api/start-bulk-scan.ts @@ -0,0 +1,27 @@ +import { authenticatedFetch } from "@/lib/api-client"; +import { API_BASE_URL } from "@/types/api"; + +import type { BulkScanJob, ScannerType } from "../types"; + +const BULK_SCAN_ENDPOINT = `${API_BASE_URL}/api/v1/scan/bulk`; + +export interface StartBulkScanParams { + scanner?: ScannerType; + hosts?: string[]; +} + +export async function startBulkScan(params: StartBulkScanParams): Promise { + const response = await authenticatedFetch(BULK_SCAN_ENDPOINT, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(params), + }); + + if (!response.ok) { + const message = await response.text(); + throw new Error(message || `Request failed with status ${response.status}`); + } + + const data = await response.json(); + return data.job as BulkScanJob; +} diff --git a/frontend/src/features/scanner/api/start-scan.ts b/frontend/src/features/scanner/api/start-scan.ts new file mode 100644 index 00000000..62a47135 --- /dev/null +++ b/frontend/src/features/scanner/api/start-scan.ts @@ -0,0 +1,28 @@ +import { authenticatedFetch } from "@/lib/api-client"; +import { API_BASE_URL } from "@/types/api"; + +import type { ScanJob, ScannerType } from "../types"; + +const SCAN_ENDPOINT = `${API_BASE_URL}/api/v1/scan`; + +export interface StartScanParams { + imageRef: string; + host: string; + scanner?: ScannerType; +} + +export async function startScan(params: StartScanParams): Promise { + const response = await authenticatedFetch(SCAN_ENDPOINT, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(params), + }); + + if (!response.ok) { + const message = await response.text(); + throw new Error(message || `Request failed with status ${response.status}`); + } + + const data = await response.json(); + return data.job as ScanJob; +} diff --git a/frontend/src/features/scanner/components/bulk-scan-dialog.tsx b/frontend/src/features/scanner/components/bulk-scan-dialog.tsx new file mode 100644 index 00000000..aa072c8c --- /dev/null +++ b/frontend/src/features/scanner/components/bulk-scan-dialog.tsx @@ -0,0 +1,255 @@ +import { useState } from "react"; +import { ShieldCheckIcon } from "lucide-react"; + +import { Badge } from "@/components/ui/badge"; +import { Button } from "@/components/ui/button"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogHeader, + DialogTitle, +} from "@/components/ui/dialog"; +import { Progress } from "@/components/ui/progress"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "@/components/ui/select"; +import { ScrollArea } from "@/components/ui/scroll-area"; +import { Spinner } from "@/components/ui/spinner"; +import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"; + +import { useCancelScan, useScanJob, useStartBulkScan } from "../hooks/use-scan-query"; +import { ScanResultsExport } from "./scan-results-export"; +import { ScanResultsSummary } from "./scan-results-summary"; +import { ScanResultsTable } from "./scan-results-table"; +import type { ScanResult, ScannerType, SeveritySummary } from "../types"; + +interface BulkScanDialogProps { + isOpen: boolean; + onOpenChange: (open: boolean) => void; +} + +export function BulkScanDialog({ isOpen, onOpenChange }: BulkScanDialogProps) { + const [scanner, setScanner] = useState("grype"); + const [jobId, setJobId] = useState(null); + const [started, setStarted] = useState(false); + const [selectedResult, setSelectedResult] = useState(null); + + const startBulkScanMutation = useStartBulkScan(); + const cancelScanMutation = useCancelScan(); + const { data: jobData } = useScanJob(jobId, started); + + const bulkJob = jobData?.bulkJob; + const isScanning = bulkJob && !["complete", "failed", "cancelled"].includes(bulkJob.status); + const isComplete = bulkJob?.status === "complete"; + + const progress = bulkJob + ? ((bulkJob.completed + bulkJob.failed) / Math.max(bulkJob.total_images, 1)) * 100 + : 0; + + const handleStart = async () => { + try { + const newJob = await startBulkScanMutation.mutateAsync({ scanner }); + setJobId(newJob.id); + setStarted(true); + setSelectedResult(null); + } catch { + // mutation handles errors + } + }; + + const handleCancel = () => { + if (jobId) { + cancelScanMutation.mutate(jobId); + } + }; + + const handleClose = (open: boolean) => { + if (!open) { + setJobId(null); + setStarted(false); + setSelectedResult(null); + } + onOpenChange(open); + }; + + const aggregateSummary: SeveritySummary = { + critical: 0, + high: 0, + medium: 0, + low: 0, + negligible: 0, + unknown: 0, + total: 0, + }; + + if (bulkJob) { + for (const job of bulkJob.jobs) { + if (job.result) { + aggregateSummary.critical += job.result.summary.critical; + aggregateSummary.high += job.result.summary.high; + aggregateSummary.medium += job.result.summary.medium; + aggregateSummary.low += job.result.summary.low; + aggregateSummary.negligible += job.result.summary.negligible; + aggregateSummary.unknown += job.result.summary.unknown; + aggregateSummary.total += job.result.summary.total; + } + } + } + + return ( + + + + + + Bulk vulnerability scan + + + Scan all Docker images for known vulnerabilities. + + + + {!started ? ( +
+
+ + +
+
+ + +
+
+ ) : ( +
+ {isScanning && bulkJob && ( + <> +
+ +

+ Scanning images... ({bulkJob.completed + bulkJob.failed}/{bulkJob.total_images}) +

+
+ + + )} + + {isComplete && bulkJob && ( +
+
+

+ Scan complete - {bulkJob.total_images} images +

+
+ {bulkJob.completed} succeeded + {bulkJob.failed > 0 && ( + {bulkJob.failed} failed + )} +
+
+ +
+ )} + + {bulkJob && bulkJob.jobs.length > 0 && ( + +
+ {bulkJob.jobs.map((job) => ( +
+
+

{job.image_ref}

+

{job.host}

+
+
+ {job.status === "complete" && job.result ? ( + <> + 0 ? "destructive" : "outline"} + className={job.result.summary.total === 0 ? "border-green-500 text-green-500" : ""} + > + {job.result.summary.total} vulns + + + + ) : job.status === "failed" ? ( + Failed + ) : job.status === "cancelled" ? ( + Cancelled + ) : ( + + )} +
+
+ ))} +
+
+ )} + + {selectedResult && ( +
+
+
+

{selectedResult.image_ref}

+

{selectedResult.host}

+
+ +
+ + + + Results + + + + + +
+ )} + +
+ {isScanning && ( + + )} + {(isComplete || bulkJob?.status === "failed" || bulkJob?.status === "cancelled") && ( + + )} +
+
+ )} +
+
+ ); +} diff --git a/frontend/src/features/scanner/components/sbom-dialog.tsx b/frontend/src/features/scanner/components/sbom-dialog.tsx new file mode 100644 index 00000000..179b011e --- /dev/null +++ b/frontend/src/features/scanner/components/sbom-dialog.tsx @@ -0,0 +1,182 @@ +import { useState } from "react"; +import { DownloadIcon, FileTextIcon } from "lucide-react"; +import { toast } from "sonner"; + +import { Badge } from "@/components/ui/badge"; +import { Button } from "@/components/ui/button"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogHeader, + DialogTitle, +} from "@/components/ui/dialog"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "@/components/ui/select"; +import { Spinner } from "@/components/ui/spinner"; + +import { downloadSBOM } from "../api/generate-sbom"; +import { useGenerateSBOM, useSBOMJob } from "../hooks/use-scan-query"; +import type { SBOMFormat } from "../types"; + +interface SBOMDialogProps { + isOpen: boolean; + onOpenChange: (open: boolean) => void; + imageRef: string; + host: string; +} + +export function SBOMDialog({ isOpen, onOpenChange, imageRef, host }: SBOMDialogProps) { + const [format, setFormat] = useState("spdx-json"); + const [jobId, setJobId] = useState(null); + const [started, setStarted] = useState(false); + const [downloading, setDownloading] = useState(false); + + const generateMutation = useGenerateSBOM(); + const { data: sbomJob } = useSBOMJob(jobId, started); + + const isGenerating = sbomJob && !["complete", "failed", "cancelled"].includes(sbomJob.status); + const isComplete = sbomJob?.status === "complete"; + const isFailed = sbomJob?.status === "failed"; + + const handleGenerate = async () => { + try { + const job = await generateMutation.mutateAsync({ imageRef, host, format }); + setJobId(job.id); + setStarted(true); + } catch { + // mutation handles errors + } + }; + + const handleDownload = async () => { + if (!jobId) return; + + try { + setDownloading(true); + const blob = await downloadSBOM(jobId); + const url = URL.createObjectURL(blob); + const link = document.createElement("a"); + link.href = url; + link.download = `sbom-${imageRef.replace(/[/:]/g, "_")}.json`; + link.click(); + URL.revokeObjectURL(url); + } catch (err) { + toast.error(err instanceof Error ? err.message : "Failed to download SBOM"); + } finally { + setDownloading(false); + } + }; + + const handleClose = (open: boolean) => { + if (!open) { + setJobId(null); + setStarted(false); + setDownloading(false); + } + onOpenChange(open); + }; + + return ( + + + + + + Generate SBOM + + + Generate a Software Bill of Materials for{" "} + + {imageRef} + + + + + {!started ? ( +
+
+ + +
+
+ + +
+
+ ) : isGenerating ? ( +
+
+ +
+

Generating SBOM...

+

+ This may take a minute for large images. +

+
+
+
+ ) : isComplete ? ( +
+
+

SBOM generated successfully

+

+ Format: {format === "spdx-json" ? "SPDX" : "CycloneDX"} JSON +

+
+
+ + +
+
+ ) : isFailed ? ( +
+
+

SBOM generation failed

+

+ {sbomJob?.error || "An unknown error occurred"} +

+
+
+ + +
+
+ ) : null} +
+
+ ); +} diff --git a/frontend/src/features/scanner/components/scan-dialog.tsx b/frontend/src/features/scanner/components/scan-dialog.tsx new file mode 100644 index 00000000..9daac450 --- /dev/null +++ b/frontend/src/features/scanner/components/scan-dialog.tsx @@ -0,0 +1,212 @@ +import { useState } from "react"; +import { ShieldAlertIcon } from "lucide-react"; + +import { Badge } from "@/components/ui/badge"; +import { Button } from "@/components/ui/button"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogHeader, + DialogTitle, +} from "@/components/ui/dialog"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "@/components/ui/select"; +import { Spinner } from "@/components/ui/spinner"; +import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"; + +import { useStartScan, useScanJob, useCancelScan } from "../hooks/use-scan-query"; +import { ScanResultsSummary } from "./scan-results-summary"; +import { ScanResultsTable } from "./scan-results-table"; +import { ScanResultsExport } from "./scan-results-export"; +import type { ScannerType } from "../types"; + +interface ScanDialogProps { + isOpen: boolean; + onOpenChange: (open: boolean) => void; + imageRef: string; + host: string; +} + +export function ScanDialog({ isOpen, onOpenChange, imageRef, host }: ScanDialogProps) { + const [scanner, setScanner] = useState("grype"); + const [jobId, setJobId] = useState(null); + const [started, setStarted] = useState(false); + + const startScanMutation = useStartScan(); + const cancelScanMutation = useCancelScan(); + const { data: jobData } = useScanJob(jobId, started); + + const job = jobData?.job; + const isScanning = job && !["complete", "failed", "cancelled"].includes(job.status); + const isComplete = job?.status === "complete"; + const isFailed = job?.status === "failed" || job?.status === "cancelled"; + + const handleStartScan = async () => { + try { + const newJob = await startScanMutation.mutateAsync({ imageRef, host, scanner }); + setJobId(newJob.id); + setStarted(true); + } catch { + // error handled by mutation + } + }; + + const handleCancel = () => { + if (jobId) { + cancelScanMutation.mutate(jobId); + } + }; + + const handleClose = (open: boolean) => { + if (!open) { + setJobId(null); + setStarted(false); + } + onOpenChange(open); + }; + + return ( + + + + + + Vulnerability scan + + {imageRef} + + + + Scan this image for known vulnerabilities using {scanner === "grype" ? "Grype" : "Trivy"}. + + + + {!started ? ( +
+
+
+ + +
+
+
+ + +
+
+ ) : isScanning ? ( +
+
+ +
+

Scanning for vulnerabilities...

+

+ {job?.progress || `Status: ${job?.status}`} +

+
+
+ +
+

+ {job?.progress || "Initializing scanner..."} +

+
+ +
+ +
+
+ ) : isFailed ? ( +
+
+

Scan failed

+

+ {job?.error || "An unknown error occurred"} +

+
+
+ + +
+
+ ) : isComplete && job?.result ? ( +
+
+
+
+ {job.result.summary.total > 0 ? ( + + {job.result.summary.total} vulnerabilities + + ) : ( + + No vulnerabilities + + )} + + {(job.result.duration_ms / 1000).toFixed(1)}s + +
+ +
+ +
+ + + + + Scan results + {job.result.summary.total > 0 && ( + + {job.result.summary.total} + + )} + + + + + + + +
+ +
+
+ ) : null} +
+
+ ); +} diff --git a/frontend/src/features/scanner/components/scan-results-export.tsx b/frontend/src/features/scanner/components/scan-results-export.tsx new file mode 100644 index 00000000..5b8cea51 --- /dev/null +++ b/frontend/src/features/scanner/components/scan-results-export.tsx @@ -0,0 +1,107 @@ +import { DownloadIcon } from "lucide-react"; + +import { Button } from "@/components/ui/button"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from "@/components/ui/dropdown-menu"; + +import type { ScanResult } from "../types"; + +interface ScanResultsExportProps { + result: ScanResult; +} + +export function ScanResultsExport({ result }: ScanResultsExportProps) { + const downloadFile = (content: string, filename: string, mimeType: string) => { + const blob = new Blob([content], { type: mimeType }); + const url = URL.createObjectURL(blob); + const a = document.createElement("a"); + a.href = url; + a.download = filename; + a.click(); + URL.revokeObjectURL(url); + }; + + const exportJSON = () => { + downloadFile( + JSON.stringify(result, null, 2), + `scan-${result.image_ref.replace(/[/:]/g, "_")}.json`, + "application/json" + ); + }; + + const exportCSV = () => { + const headers = ["CVE ID", "Severity", "Package", "Installed Version", "Fixed Version"]; + const rows = result.vulnerabilities.map((v) => [ + v.id, + v.severity, + v.package, + v.installed_version, + v.fixed_version || "", + ]); + + const csv = [headers, ...rows].map((row) => row.map((cell) => `"${cell}"`).join(",")).join("\n"); + downloadFile(csv, `scan-${result.image_ref.replace(/[/:]/g, "_")}.csv`, "text/csv"); + }; + + const exportMarkdown = () => { + const lines = [ + `# Vulnerability Scan Report`, + ``, + `**Image:** ${result.image_ref}`, + `**Host:** ${result.host}`, + `**Scanner:** ${result.scanner}`, + `**Duration:** ${(result.duration_ms / 1000).toFixed(1)}s`, + `**Date:** ${new Date(result.completed_at * 1000).toLocaleString()}`, + ``, + `## Summary`, + ``, + `| Severity | Count |`, + `|----------|-------|`, + `| Critical | ${result.summary.critical} |`, + `| High | ${result.summary.high} |`, + `| Medium | ${result.summary.medium} |`, + `| Low | ${result.summary.low} |`, + `| **Total** | **${result.summary.total}** |`, + ``, + `## Vulnerabilities`, + ``, + `| CVE ID | Severity | Package | Installed | Fixed In |`, + `|--------|----------|---------|-----------|----------|`, + ...result.vulnerabilities.map( + (v) => `| ${v.id} | ${v.severity} | ${v.package} | ${v.installed_version} | ${v.fixed_version || "-"} |` + ), + ]; + + downloadFile( + lines.join("\n"), + `scan-${result.image_ref.replace(/[/:]/g, "_")}.md`, + "text/markdown" + ); + }; + + return ( + + + + + + + Markdown report (.md) + + + CSV spreadsheet (.csv) + + + JSON data (.json) + + + + ); +} diff --git a/frontend/src/features/scanner/components/scan-results-summary.tsx b/frontend/src/features/scanner/components/scan-results-summary.tsx new file mode 100644 index 00000000..b4a7408f --- /dev/null +++ b/frontend/src/features/scanner/components/scan-results-summary.tsx @@ -0,0 +1,46 @@ +import { Badge } from "@/components/ui/badge"; + +import type { SeveritySummary } from "../types"; + +interface ScanResultsSummaryProps { + summary: SeveritySummary; +} + +const severityColors: Record = { + critical: "bg-red-600 text-white hover:bg-red-600", + high: "bg-red-500 text-white hover:bg-red-500", + medium: "bg-orange-500 text-white hover:bg-orange-500", + low: "bg-yellow-500 text-white hover:bg-yellow-500", +}; + +export function ScanResultsSummary({ summary }: ScanResultsSummaryProps) { + return ( +
+ {summary.critical > 0 && ( + + {summary.critical} Critical + + )} + {summary.high > 0 && ( + + {summary.high} High + + )} + {summary.medium > 0 && ( + + {summary.medium} Medium + + )} + {summary.low > 0 && ( + + {summary.low} Low + + )} + {summary.total === 0 && ( + + No vulnerabilities + + )} +
+ ); +} diff --git a/frontend/src/features/scanner/components/scan-results-table.tsx b/frontend/src/features/scanner/components/scan-results-table.tsx new file mode 100644 index 00000000..1941b1d3 --- /dev/null +++ b/frontend/src/features/scanner/components/scan-results-table.tsx @@ -0,0 +1,185 @@ +import { useMemo, useState } from "react"; +import { ArrowUpDownIcon, ExternalLinkIcon, SearchIcon } from "lucide-react"; + +import { Badge } from "@/components/ui/badge"; +import { Button } from "@/components/ui/button"; +import { Input } from "@/components/ui/input"; +import { + Table, + TableBody, + TableCell, + TableHead, + TableHeader, + TableRow, +} from "@/components/ui/table"; + +import type { SeverityLevel, Vulnerability } from "../types"; + +interface ScanResultsTableProps { + vulnerabilities: Vulnerability[]; +} + +const severityOrder: Record = { + Critical: 0, + High: 1, + Medium: 2, + Low: 3, + Negligible: 4, + Unknown: 5, +}; + +const severityColors: Record = { + Critical: "bg-red-600 text-white hover:bg-red-600", + High: "bg-red-500 text-white hover:bg-red-500", + Medium: "bg-orange-500 text-white hover:bg-orange-500", + Low: "bg-yellow-500 text-white hover:bg-yellow-500", + Negligible: "bg-gray-400 text-white hover:bg-gray-400", + Unknown: "bg-gray-300 text-gray-700 hover:bg-gray-300", +}; + +type SortField = "severity" | "id" | "package"; +type SortDir = "asc" | "desc"; + +export function ScanResultsTable({ vulnerabilities }: ScanResultsTableProps) { + const [search, setSearch] = useState(""); + const [sortField, setSortField] = useState("severity"); + const [sortDir, setSortDir] = useState("asc"); + + const toggleSort = (field: SortField) => { + if (sortField === field) { + setSortDir(sortDir === "asc" ? "desc" : "asc"); + } else { + setSortField(field); + setSortDir("asc"); + } + }; + + const filtered = useMemo(() => { + let items = vulnerabilities; + if (search) { + const s = search.toLowerCase(); + items = items.filter( + (v) => + v.id.toLowerCase().includes(s) || + v.package.toLowerCase().includes(s) || + v.severity.toLowerCase().includes(s) + ); + } + + items = [...items].sort((a, b) => { + let cmp = 0; + switch (sortField) { + case "severity": + cmp = severityOrder[a.severity] - severityOrder[b.severity]; + break; + case "id": + cmp = a.id.localeCompare(b.id); + break; + case "package": + cmp = a.package.localeCompare(b.package); + break; + } + return sortDir === "asc" ? cmp : -cmp; + }); + + return items; + }, [vulnerabilities, search, sortField, sortDir]); + + return ( +
+
+ + setSearch(e.target.value)} + className="pl-8" + /> +
+ +
+ + + + + + + + + + + + + Installed + Fixed in + + + + {filtered.length === 0 ? ( + + + {search ? "No matching vulnerabilities" : "No vulnerabilities found"} + + + ) : ( + filtered.map((vuln, index) => ( + + + + {vuln.id} + + + + + + {vuln.severity.toLowerCase()} + + + {vuln.package} + {vuln.installed_version} + + {vuln.fixed_version ? ( + {vuln.fixed_version} + ) : ( + - + )} + + + )) + )} + +
+
+ +

+ Showing {filtered.length} of {vulnerabilities.length} vulnerabilities +

+
+ ); +} diff --git a/frontend/src/features/scanner/hooks/use-scan-query.ts b/frontend/src/features/scanner/hooks/use-scan-query.ts new file mode 100644 index 00000000..55e1f2e2 --- /dev/null +++ b/frontend/src/features/scanner/hooks/use-scan-query.ts @@ -0,0 +1,107 @@ +import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; + +import { startScan, type StartScanParams } from "../api/start-scan"; +import { startBulkScan, type StartBulkScanParams } from "../api/start-bulk-scan"; +import { getScanJob, cancelScanJob } from "../api/get-scan-jobs"; +import { getScanResults } from "../api/get-scan-results"; +import { generateSBOM, getSBOMJob, type GenerateSBOMParams } from "../api/generate-sbom"; +import { + getScannerConfig, + updateScannerConfig, + testScanNotification, +} from "../api/scanner-config"; +import type { ScannerConfig } from "../types"; + +const SCANNER_CONFIG_KEY = ["scannerConfig"] as const; + +export function useScannerConfig() { + return useQuery({ + queryKey: SCANNER_CONFIG_KEY, + queryFn: getScannerConfig, + staleTime: 30_000, + }); +} + +export function useUpdateScannerConfig() { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: (config: ScannerConfig) => updateScannerConfig(config), + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: SCANNER_CONFIG_KEY }); + }, + }); +} + +export function useTestScanNotification() { + return useMutation({ + mutationFn: () => testScanNotification(), + }); +} + +export function useStartScan() { + return useMutation({ + mutationFn: (params: StartScanParams) => startScan(params), + }); +} + +export function useStartBulkScan() { + return useMutation({ + mutationFn: (params: StartBulkScanParams) => startBulkScan(params), + }); +} + +export function useScanJob(id: string | null, enabled = true) { + return useQuery({ + queryKey: ["scanJob", id], + queryFn: () => getScanJob(id!), + enabled: enabled && !!id, + refetchInterval: (query) => { + const data = query.state.data; + if (!data) return 2000; + const job = data.job || data.bulkJob; + if (!job) return false; + const status = job.status; + if (status === "complete" || status === "failed" || status === "cancelled") { + return false; + } + return 2000; + }, + }); +} + +export function useCancelScan() { + return useMutation({ + mutationFn: (id: string) => cancelScanJob(id), + }); +} + +export function useScanResults(imageRef: string, host: string, enabled = true) { + return useQuery({ + queryKey: ["scanResults", imageRef, host], + queryFn: () => getScanResults(imageRef, host), + enabled, + staleTime: 30_000, + }); +} + +export function useGenerateSBOM() { + return useMutation({ + mutationFn: (params: GenerateSBOMParams) => generateSBOM(params), + }); +} + +export function useSBOMJob(id: string | null, enabled = true) { + return useQuery({ + queryKey: ["sbomJob", id], + queryFn: () => getSBOMJob(id!), + enabled: enabled && !!id, + refetchInterval: (query) => { + const data = query.state.data; + if (!data) return 2000; + if (data.status === "complete" || data.status === "failed" || data.status === "cancelled") { + return false; + } + return 2000; + }, + }); +} diff --git a/frontend/src/features/scanner/types.ts b/frontend/src/features/scanner/types.ts new file mode 100644 index 00000000..23863748 --- /dev/null +++ b/frontend/src/features/scanner/types.ts @@ -0,0 +1,87 @@ +export type ScannerType = "grype" | "trivy"; +export type SeverityLevel = "Critical" | "High" | "Medium" | "Low" | "Negligible" | "Unknown"; +export type ScanJobStatus = "pending" | "pulling_scanner" | "scanning" | "complete" | "failed" | "cancelled"; +export type SBOMFormat = "spdx-json" | "cyclonedx-json"; + +export interface Vulnerability { + id: string; + severity: SeverityLevel; + package: string; + installed_version: string; + fixed_version?: string; + description?: string; + data_source?: string; +} + +export interface SeveritySummary { + critical: number; + high: number; + medium: number; + low: number; + negligible: number; + unknown: number; + total: number; +} + +export interface ScanResult { + id: string; + image_ref: string; + host: string; + scanner: ScannerType; + vulnerabilities: Vulnerability[]; + summary: SeveritySummary; + started_at: number; + completed_at: number; + duration_ms: number; + error?: string; +} + +export interface ScanJob { + id: string; + image_ref: string; + host: string; + scanner: ScannerType; + status: ScanJobStatus; + progress?: string; + result?: ScanResult; + created_at: number; + error?: string; +} + +export interface BulkScanJob { + id: string; + jobs: ScanJob[]; + total_images: number; + completed: number; + failed: number; + status: ScanJobStatus; + created_at: number; +} + +export interface SBOMJob { + id: string; + image_ref: string; + host: string; + format: SBOMFormat; + status: ScanJobStatus; + created_at: number; + error?: string; +} + +export interface NotificationConfig { + discordWebhookURL?: string; + slackWebhookURL?: string; + onScanComplete: boolean; + onBulkComplete: boolean; + minSeverity?: SeverityLevel; +} + +export interface ScannerConfig { + grypeImage: string; + trivyImage: string; + syftImage: string; + defaultScanner: ScannerType; + grypeArgs: string; + trivyArgs: string; + notifications: NotificationConfig; +} diff --git a/frontend/src/features/settings/components/scanner-section.tsx b/frontend/src/features/settings/components/scanner-section.tsx new file mode 100644 index 00000000..a2cbe26c --- /dev/null +++ b/frontend/src/features/settings/components/scanner-section.tsx @@ -0,0 +1,327 @@ +import { useEffect, useMemo, useState } from "react"; +import { toast } from "sonner"; + +import { + useScannerConfig, + useTestScanNotification, + useUpdateScannerConfig, +} from "@/features/scanner/hooks/use-scan-query"; +import type { ScannerConfig, SeverityLevel } from "@/features/scanner/types"; +import { Button } from "@/components/ui/button"; +import { + Card, + CardContent, + CardDescription, + CardHeader, + CardTitle, +} from "@/components/ui/card"; +import { Input } from "@/components/ui/input"; +import { Label } from "@/components/ui/label"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "@/components/ui/select"; +import { Switch } from "@/components/ui/switch"; + +interface ScannerSectionProps { + disabled?: boolean; +} + +const severityOptions: SeverityLevel[] = [ + "Critical", + "High", + "Medium", + "Low", + "Negligible", + "Unknown", +]; + +function configsMatch(a: ScannerConfig | null, b: ScannerConfig | null) { + return JSON.stringify(a) === JSON.stringify(b); +} + +export function ScannerSection({ disabled = false }: ScannerSectionProps) { + const { data, isLoading, error } = useScannerConfig(); + const updateMutation = useUpdateScannerConfig(); + const testMutation = useTestScanNotification(); + const [draft, setDraft] = useState(null); + + useEffect(() => { + if (data) { + setDraft(data); + } + }, [data]); + + const hasChanges = useMemo(() => { + if (!data || !draft) return false; + return !configsMatch(data, draft); + }, [data, draft]); + + const saveConfig = async () => { + if (!draft) return null; + const updated = await updateMutation.mutateAsync(draft); + toast.success("Scanner configuration saved"); + setDraft(updated); + return updated; + }; + + const handleSave = async () => { + try { + await saveConfig(); + } catch (err) { + toast.error(err instanceof Error ? err.message : "Failed to save scanner configuration"); + } + }; + + const handleTest = async () => { + try { + if (hasChanges) { + await saveConfig(); + } + await testMutation.mutateAsync(); + toast.success("Test notification sent"); + } catch (err) { + toast.error(err instanceof Error ? err.message : "Failed to send test notification"); + } + }; + + if (isLoading) { + return ( + + + Scanner + Loading scanner configuration... + + + ); + } + + if (error || !draft) { + return ( + + + Scanner + + Failed to load scanner settings: {error?.message ?? "Unknown error"} + + + + ); + } + + const busy = disabled || updateMutation.isPending || testMutation.isPending; + + return ( + + + Scanner + + Configure vulnerability scanning, SBOM generation, and completion notifications. + + + +
+
+ + setDraft({ ...draft, grypeImage: e.target.value })} + disabled={busy} + /> +
+
+ + setDraft({ ...draft, trivyImage: e.target.value })} + disabled={busy} + /> +
+
+ + setDraft({ ...draft, syftImage: e.target.value })} + disabled={busy} + /> +
+
+ + +
+
+ + setDraft({ ...draft, grypeArgs: e.target.value })} + disabled={busy} + /> +
+
+ + setDraft({ ...draft, trivyArgs: e.target.value })} + disabled={busy} + /> +
+
+ +
+
+

Notifications

+

+ Send scanner completion updates to Discord and Slack webhooks. +

+
+ +
+
+ + + setDraft({ + ...draft, + notifications: { + ...draft.notifications, + discordWebhookURL: e.target.value, + }, + }) + } + disabled={busy} + /> +
+
+ + + setDraft({ + ...draft, + notifications: { + ...draft.notifications, + slackWebhookURL: e.target.value, + }, + }) + } + disabled={busy} + /> +
+
+ + +
+
+ +
+
+
+

On scan complete

+

+ Send a notification when a single image scan finishes. +

+
+ + setDraft({ + ...draft, + notifications: { + ...draft.notifications, + onScanComplete: value, + }, + }) + } + disabled={busy} + /> +
+
+
+

On bulk complete

+

+ Send a notification when the bulk scan finishes. +

+
+ + setDraft({ + ...draft, + notifications: { + ...draft.notifications, + onBulkComplete: value, + }, + }) + } + disabled={busy} + /> +
+
+
+ + {disabled && ( +

+ Scanner settings are disabled while the server is in read-only mode. +

+ )} + +
+ + +
+
+
+ ); +} diff --git a/frontend/src/features/settings/components/settings-page.tsx b/frontend/src/features/settings/components/settings-page.tsx index 7f206284..c57914e0 100644 --- a/frontend/src/features/settings/components/settings-page.tsx +++ b/frontend/src/features/settings/components/settings-page.tsx @@ -5,6 +5,7 @@ import { AuthSection } from "./auth-section"; import { CoolifyHostsSection } from "./coolify-hosts-section"; import { DockerHostsSection } from "./docker-hosts-section"; import { ReadOnlySection } from "./read-only-section"; +import { ScannerSection } from "./scanner-section"; export function SettingsPage() { const { data, isLoading, error } = useSettings(); @@ -43,6 +44,7 @@ export function SettingsPage() { +
); } diff --git a/home/cmd/server/main.go b/home/cmd/server/main.go index 5e1a00aa..e6a63e0d 100644 --- a/home/cmd/server/main.go +++ b/home/cmd/server/main.go @@ -10,6 +10,8 @@ import ( "github.com/hhftechnology/vps-monitor/internal/config" "github.com/hhftechnology/vps-monitor/internal/coolify" "github.com/hhftechnology/vps-monitor/internal/docker" + "github.com/hhftechnology/vps-monitor/internal/models" + "github.com/hhftechnology/vps-monitor/internal/scanner" "github.com/hhftechnology/vps-monitor/internal/services" "github.com/hhftechnology/vps-monitor/internal/system" ) @@ -30,14 +32,14 @@ func main() { if err != nil { log.Fatalf("Failed to initialize auth service: %v\nPlease ensure ALL auth environment variables are set: JWT_SECRET, ADMIN_USERNAME, and ADMIN_PASSWORD.", err) } - if authService.IsDisabled() { + if authService == nil || authService.IsDisabled() { fc := manager.FileConfigSnapshot() if fc.Auth != nil && fc.Auth.Enabled { authService = auth.NewServiceFromFileConfig(fc.Auth) } } - if authService.IsDisabled() { + if authService == nil || authService.IsDisabled() { log.Println("Authentication is DISABLED - no auth environment variables detected") log.Println(" To enable authentication, set: JWT_SECRET, ADMIN_USERNAME, ADMIN_PASSWORD") } else { @@ -81,6 +83,25 @@ func main() { registry := services.NewRegistry(multiHostClient, coolifyClient, authService, cfg, alertMonitor) + // Scanner service + scannerCfg := &models.ScannerConfig{ + GrypeImage: cfg.Scanner.GrypeImage, + TrivyImage: cfg.Scanner.TrivyImage, + SyftImage: cfg.Scanner.SyftImage, + DefaultScanner: models.ScannerType(cfg.Scanner.DefaultScanner), + GrypeArgs: cfg.Scanner.GrypeArgs, + TrivyArgs: cfg.Scanner.TrivyArgs, + Notifications: models.NotificationConfig{ + DiscordWebhookURL: cfg.Scanner.DiscordWebhookURL, + SlackWebhookURL: cfg.Scanner.SlackWebhookURL, + OnScanComplete: cfg.Scanner.NotifyOnComplete, + OnBulkComplete: cfg.Scanner.NotifyOnBulk, + MinSeverity: models.SeverityLevel(cfg.Scanner.NotifyMinSeverity), + }, + } + scannerService := scanner.NewScannerService(registry, scannerCfg) + log.Printf("Vulnerability scanner ready (default: %s)", cfg.Scanner.DefaultScanner) + // Hot-reload callback manager.OnChange(func(newCfg *config.Config) { registry.UpdateConfig(newCfg) @@ -114,7 +135,8 @@ func main() { }) routerOpts := &api.RouterOptions{ - AlertMonitor: alertMonitor, + AlertMonitor: alertMonitor, + ScannerService: scannerService, } apiRouter := api.NewRouter(registry, manager, routerOpts) diff --git a/home/internal/api/handlers.go b/home/internal/api/handlers.go index e36ae1e6..c451973b 100644 --- a/home/internal/api/handlers.go +++ b/home/internal/api/handlers.go @@ -6,6 +6,7 @@ import ( "fmt" "log" "net/http" + "reflect" "regexp" "strconv" "strings" @@ -24,6 +25,19 @@ type coolifyEnvSyncer interface { SyncEnvVars(ctx context.Context, resource *coolify.ResourceInfo, envVars map[string]string) error } +func isNilCoolifySyncer(syncer coolifyEnvSyncer) bool { + if syncer == nil { + return true + } + value := reflect.ValueOf(syncer) + switch value.Kind() { + case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Pointer, reflect.Slice: + return value.IsNil() + default: + return false + } +} + func (ar *APIRouter) GetSystemStats(w http.ResponseWriter, r *http.Request) { ctx := r.Context() stats, err := system.GetStats(ctx) @@ -299,11 +313,11 @@ func (ar *APIRouter) GetContainerLogsParsed(w http.ResponseWriter, r *http.Reque func (ar *APIRouter) streamParsedLogs(w http.ResponseWriter, host, id string, options models.LogOptions) { dockerClient, releaseDocker := ar.registry.AcquireDocker() + defer releaseDocker() if dockerClient == nil { http.Error(w, "docker client unavailable", http.StatusServiceUnavailable) return } - defer releaseDocker() stream, err := dockerClient.StreamContainerLogsParsed(host, id, options) if err != nil { @@ -448,15 +462,19 @@ func (ar *APIRouter) UpdateEnvVariables(w http.ResponseWriter, r *http.Request) coolifyMulti := ar.registry.Coolify() if coolifyMulti != nil { coolifyClient := coolifyMulti.GetClient(host) - coolifyResource := coolify.ExtractResourceInfo(labels) - applyCoolifyEnvSync(r.Context(), host, coolifyClient, coolifyResource, envVariables.Env, response) + if isNilCoolifySyncer(coolifyClient) { + log.Printf("Warning: Coolify client unavailable for host %s; skipping env sync", host) + } else { + coolifyResource := coolify.ExtractResourceInfo(labels) + applyCoolifyEnvSync(r.Context(), host, coolifyClient, coolifyResource, envVariables.Env, response) + } } WriteJsonResponse(w, http.StatusOK, response) } func applyCoolifyEnvSync(ctx context.Context, host string, syncer coolifyEnvSyncer, resource *coolify.ResourceInfo, env map[string]string, response map[string]any) { - if syncer == nil || resource == nil { + if isNilCoolifySyncer(syncer) || resource == nil { return } if resource.Type == coolify.ResourceTypeDatabase { diff --git a/home/internal/api/router.go b/home/internal/api/router.go index 482b7e87..71f2ae38 100644 --- a/home/internal/api/router.go +++ b/home/internal/api/router.go @@ -14,6 +14,7 @@ import ( "github.com/hhftechnology/vps-monitor/internal/auth" "github.com/hhftechnology/vps-monitor/internal/config" "github.com/hhftechnology/vps-monitor/internal/models" + "github.com/hhftechnology/vps-monitor/internal/scanner" "github.com/hhftechnology/vps-monitor/internal/services" "github.com/hhftechnology/vps-monitor/internal/static" ) @@ -30,11 +31,13 @@ type APIRouter struct { registry *services.Registry manager *config.Manager alertHandlers *AlertHandlers + scanHandlers *ScanHandlers } // RouterOptions contains optional dependencies for the router type RouterOptions struct { - AlertMonitor *alerts.Monitor + AlertMonitor *alerts.Monitor + ScannerService *scanner.ScannerService } func NewRouter(registry *services.Registry, manager *config.Manager, opts *RouterOptions) *chi.Mux { @@ -46,6 +49,13 @@ func NewRouter(registry *services.Registry, manager *config.Manager, opts *Route manager: manager, } + // Set up scan handlers + if opts != nil && opts.ScannerService != nil { + r.scanHandlers = NewScanHandlers(opts.ScannerService, manager) + } else { + r.scanHandlers = nil + } + // Set up alert handlers if opts != nil && opts.AlertMonitor != nil { r.alertHandlers = NewAlertHandlers(opts.AlertMonitor, &models.AlertConfigResponse{ @@ -114,6 +124,7 @@ func (ar *APIRouter) Routes() *chi.Mux { ar.registerImageRoutes(protected) ar.registerNetworkRoutes(protected) ar.registerAlertRoutes(protected) + ar.registerScanRoutes(protected) }) }) @@ -190,6 +201,30 @@ func (ar *APIRouter) registerAlertRoutes(r chi.Router) { r.Post("/alerts/acknowledge-all", ar.alertHandlers.AcknowledgeAllAlerts) } +func (ar *APIRouter) registerScanRoutes(r chi.Router) { + if ar.scanHandlers == nil { + return + } + + // Read-only routes + r.Get("/scan/jobs", ar.scanHandlers.GetScanJobs) + r.Get("/scan/jobs/{id}", ar.scanHandlers.GetScanJob) + r.Get("/scan/results/{imageRef}", ar.scanHandlers.GetScanResults) + r.Get("/scan/results/{imageRef}/latest", ar.scanHandlers.GetLatestScanResult) + r.Get("/scan/sbom/{id}", ar.scanHandlers.GetSBOMJob) + + // Mutating routes (blocked in read-only mode) + r.Group(func(mutating chi.Router) { + mutating.Use(middleware.ReadOnly(func() bool { + return ar.registry.Config().ReadOnly + })) + mutating.Post("/scan", ar.scanHandlers.StartScan) + mutating.Post("/scan/bulk", ar.scanHandlers.StartBulkScan) + mutating.Delete("/scan/jobs/{id}", ar.scanHandlers.CancelScanJob) + mutating.Post("/scan/sbom", ar.scanHandlers.StartSBOMGeneration) + }) +} + func (ar *APIRouter) registerSettingsRoutes(r chi.Router) { r.Route("/settings", func(r chi.Router) { r.Use(auth.DynamicMiddleware(ar.registry.Auth)) @@ -201,6 +236,11 @@ func (ar *APIRouter) registerSettingsRoutes(r chi.Router) { r.Put("/auth", ar.UpdateAuth) r.Post("/test/docker-host", ar.TestDockerHost) r.Post("/test/coolify-host", ar.TestCoolifyHost) + if ar.scanHandlers != nil { + r.Get("/scan", ar.scanHandlers.GetScannerConfig) + r.Put("/scan", ar.scanHandlers.UpdateScannerConfig) + r.Post("/scan/test-notification", ar.scanHandlers.TestScanNotification) + } }) } diff --git a/home/internal/api/scan_handlers.go b/home/internal/api/scan_handlers.go new file mode 100644 index 00000000..54cebd04 --- /dev/null +++ b/home/internal/api/scan_handlers.go @@ -0,0 +1,331 @@ +package api + +import ( + "encoding/json" + "net/http" + "net/url" + + "github.com/go-chi/chi/v5" + "github.com/hhftechnology/vps-monitor/internal/config" + "github.com/hhftechnology/vps-monitor/internal/models" + "github.com/hhftechnology/vps-monitor/internal/scanner" +) + +// ScanHandlers holds dependencies for scan-related handlers +type ScanHandlers struct { + scanner *scanner.ScannerService + manager *config.Manager +} + +// NewScanHandlers creates new scan handlers +func NewScanHandlers(scannerService *scanner.ScannerService, manager *config.Manager) *ScanHandlers { + return &ScanHandlers{ + scanner: scannerService, + manager: manager, + } +} + +// StartScan handles POST /api/v1/scan +func (h *ScanHandlers) StartScan(w http.ResponseWriter, r *http.Request) { + var req struct { + ImageRef string `json:"imageRef"` + Host string `json:"host"` + Scanner models.ScannerType `json:"scanner"` + } + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, "invalid request body", http.StatusBadRequest) + return + } + + if req.ImageRef == "" || req.Host == "" { + http.Error(w, "imageRef and host are required", http.StatusBadRequest) + return + } + + job, err := h.scanner.StartScan(req.ImageRef, req.Host, req.Scanner) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + WriteJsonResponse(w, http.StatusAccepted, map[string]any{ + "job": job, + }) +} + +// StartBulkScan handles POST /api/v1/scan/bulk +func (h *ScanHandlers) StartBulkScan(w http.ResponseWriter, r *http.Request) { + var req struct { + Scanner models.ScannerType `json:"scanner"` + Hosts []string `json:"hosts"` + } + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, "invalid request body", http.StatusBadRequest) + return + } + + bulkJob, err := h.scanner.StartBulkScan(req.Scanner, req.Hosts) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + WriteJsonResponse(w, http.StatusAccepted, map[string]any{ + "job": bulkJob, + }) +} + +// GetScanJobs handles GET /api/v1/scan/jobs +func (h *ScanHandlers) GetScanJobs(w http.ResponseWriter, r *http.Request) { + jobs := h.scanner.GetJobs() + bulkJobs := h.scanner.GetBulkJobs() + + WriteJsonResponse(w, http.StatusOK, map[string]any{ + "jobs": jobs, + "bulkJobs": bulkJobs, + }) +} + +// GetScanJob handles GET /api/v1/scan/jobs/{id} +func (h *ScanHandlers) GetScanJob(w http.ResponseWriter, r *http.Request) { + id := chi.URLParam(r, "id") + if id == "" { + http.Error(w, "job id is required", http.StatusBadRequest) + return + } + + // Check if it's a regular job or bulk job + job := h.scanner.GetJob(id) + if job != nil { + WriteJsonResponse(w, http.StatusOK, map[string]any{ + "job": job, + }) + return + } + + bulkJob := h.scanner.GetBulkJob(id) + if bulkJob != nil { + WriteJsonResponse(w, http.StatusOK, map[string]any{ + "bulkJob": bulkJob, + }) + return + } + + http.Error(w, "job not found", http.StatusNotFound) +} + +// CancelScanJob handles DELETE /api/v1/scan/jobs/{id} +func (h *ScanHandlers) CancelScanJob(w http.ResponseWriter, r *http.Request) { + id := chi.URLParam(r, "id") + if id == "" { + http.Error(w, "job id is required", http.StatusBadRequest) + return + } + + if h.scanner.CancelJob(id) { + WriteJsonResponse(w, http.StatusOK, map[string]any{ + "message": "Job cancelled", + }) + } else { + http.Error(w, "job not found or already completed", http.StatusNotFound) + } +} + +// GetScanResults handles GET /api/v1/scan/results/{imageRef} +func (h *ScanHandlers) GetScanResults(w http.ResponseWriter, r *http.Request) { + imageRef, err := url.PathUnescape(chi.URLParam(r, "imageRef")) + if err != nil { + http.Error(w, "invalid imageRef", http.StatusBadRequest) + return + } + host := r.URL.Query().Get("host") + if host == "" { + http.Error(w, "host query parameter is required", http.StatusBadRequest) + return + } + + results := h.scanner.Store().GetResults(host, imageRef) + WriteJsonResponse(w, http.StatusOK, map[string]any{ + "results": results, + }) +} + +// GetLatestScanResult handles GET /api/v1/scan/results/{imageRef}/latest +func (h *ScanHandlers) GetLatestScanResult(w http.ResponseWriter, r *http.Request) { + imageRef, err := url.PathUnescape(chi.URLParam(r, "imageRef")) + if err != nil { + http.Error(w, "invalid imageRef", http.StatusBadRequest) + return + } + host := r.URL.Query().Get("host") + if host == "" { + http.Error(w, "host query parameter is required", http.StatusBadRequest) + return + } + + result := h.scanner.Store().GetLatest(host, imageRef) + if result == nil { + http.Error(w, "no scan results found", http.StatusNotFound) + return + } + + WriteJsonResponse(w, http.StatusOK, map[string]any{ + "result": result, + }) +} + +// StartSBOMGeneration handles POST /api/v1/scan/sbom +func (h *ScanHandlers) StartSBOMGeneration(w http.ResponseWriter, r *http.Request) { + var req struct { + ImageRef string `json:"imageRef"` + Host string `json:"host"` + Format models.SBOMFormat `json:"format"` + } + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, "invalid request body", http.StatusBadRequest) + return + } + + if req.ImageRef == "" || req.Host == "" { + http.Error(w, "imageRef and host are required", http.StatusBadRequest) + return + } + + if req.Format == "" { + req.Format = models.SBOMFormatSPDX + } + + job, err := h.scanner.StartSBOMGeneration(req.ImageRef, req.Host, req.Format) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + WriteJsonResponse(w, http.StatusAccepted, map[string]any{ + "job": job, + }) +} + +// GetSBOMJob handles GET /api/v1/scan/sbom/{id} +func (h *ScanHandlers) GetSBOMJob(w http.ResponseWriter, r *http.Request) { + id := chi.URLParam(r, "id") + if id == "" { + http.Error(w, "job id is required", http.StatusBadRequest) + return + } + + job := h.scanner.GetSBOMJob(id) + if job == nil { + http.Error(w, "SBOM job not found", http.StatusNotFound) + return + } + + // If complete and download requested, serve the file + if job.Status == models.ScanJobComplete && r.URL.Query().Get("download") == "true" && job.FilePath != "" { + w.Header().Set("Content-Disposition", "attachment; filename=sbom-"+id+".json") + w.Header().Set("Content-Type", "application/json") + http.ServeFile(w, r, job.FilePath) + return + } + + WriteJsonResponse(w, http.StatusOK, map[string]any{ + "job": job, + }) +} + +// GetScannerConfig handles GET /api/v1/settings/scan +func (h *ScanHandlers) GetScannerConfig(w http.ResponseWriter, r *http.Request) { + cfg := h.scanner.Config() + WriteJsonResponse(w, http.StatusOK, map[string]any{ + "config": cfg, + }) +} + +// UpdateScannerConfig handles PUT /api/v1/settings/scan +func (h *ScanHandlers) UpdateScannerConfig(w http.ResponseWriter, r *http.Request) { + var req struct { + GrypeImage string `json:"grypeImage"` + TrivyImage string `json:"trivyImage"` + SyftImage string `json:"syftImage"` + DefaultScanner string `json:"defaultScanner"` + GrypeArgs string `json:"grypeArgs"` + TrivyArgs string `json:"trivyArgs"` + Notifications struct { + DiscordWebhookURL string `json:"discordWebhookURL"` + SlackWebhookURL string `json:"slackWebhookURL"` + OnScanComplete *bool `json:"onScanComplete"` + OnBulkComplete *bool `json:"onBulkComplete"` + MinSeverity string `json:"minSeverity"` + } `json:"notifications"` + } + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, "invalid request body", http.StatusBadRequest) + return + } + + // Persist to file config + fileCfg := &config.FileScannerConfig{ + GrypeImage: req.GrypeImage, + TrivyImage: req.TrivyImage, + SyftImage: req.SyftImage, + DefaultScanner: req.DefaultScanner, + GrypeArgs: req.GrypeArgs, + TrivyArgs: req.TrivyArgs, + Notifications: &config.FileNotificationConfig{ + DiscordWebhookURL: req.Notifications.DiscordWebhookURL, + SlackWebhookURL: req.Notifications.SlackWebhookURL, + OnScanComplete: req.Notifications.OnScanComplete, + OnBulkComplete: req.Notifications.OnBulkComplete, + MinSeverity: req.Notifications.MinSeverity, + }, + } + + if err := h.manager.UpdateScannerConfig(fileCfg); err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + // Update the scanner service config + mergedCfg := h.manager.Config() + scannerCfg := configToScannerConfig(&mergedCfg.Scanner) + h.scanner.UpdateConfig(scannerCfg) + + WriteJsonResponse(w, http.StatusOK, map[string]any{ + "message": "Scanner configuration updated", + "config": scannerCfg, + }) +} + +// TestScanNotification handles POST /api/v1/settings/scan/test-notification +func (h *ScanHandlers) TestScanNotification(w http.ResponseWriter, r *http.Request) { + cfg := h.scanner.Config() + + notifier := scanner.NewNotifier() + if err := notifier.SendTestNotification(cfg.Notifications.DiscordWebhookURL, cfg.Notifications.SlackWebhookURL); err != nil { + http.Error(w, "notification test failed: "+err.Error(), http.StatusInternalServerError) + return + } + + WriteJsonResponse(w, http.StatusOK, map[string]any{ + "message": "Test notification sent successfully", + }) +} + +// configToScannerConfig converts config.ScannerConfig to models.ScannerConfig +func configToScannerConfig(cfg *config.ScannerConfig) *models.ScannerConfig { + return &models.ScannerConfig{ + GrypeImage: cfg.GrypeImage, + TrivyImage: cfg.TrivyImage, + SyftImage: cfg.SyftImage, + DefaultScanner: models.ScannerType(cfg.DefaultScanner), + GrypeArgs: cfg.GrypeArgs, + TrivyArgs: cfg.TrivyArgs, + Notifications: models.NotificationConfig{ + DiscordWebhookURL: cfg.DiscordWebhookURL, + SlackWebhookURL: cfg.SlackWebhookURL, + OnScanComplete: cfg.NotifyOnComplete, + OnBulkComplete: cfg.NotifyOnBulk, + MinSeverity: models.SeverityLevel(cfg.NotifyMinSeverity), + }, + } +} diff --git a/home/internal/auth/service_test.go b/home/internal/auth/service_test.go index 3cad1995..6c01324c 100644 --- a/home/internal/auth/service_test.go +++ b/home/internal/auth/service_test.go @@ -41,30 +41,6 @@ func TestValidateCredentialsRejectsNonBcryptHashes(t *testing.T) { } } -func TestIsBcryptHashFormatValidation(t *testing.T) { - validHash, err := HashPassword("super-secret") - if err != nil { - t.Fatalf("HashPassword failed: %v", err) - } - if !isBcryptHash(validHash) { - t.Fatalf("expected generated hash to be recognized as bcrypt") - } - - cases := []string{ - "", - "$2x$10$abcdefghijklmnopqrstuvwxyzABCDE1234567890abcdefghiJKL", - "$2b$aa$abcdefghijklmnopqrstuvwxyzABCDE1234567890abcdefghiJKL", - "$2b$10:abcdefghijklmnopqrstuvwxyzABCDE1234567890abcdefghiJKL", - "$2b$10$too-short", - } - - for _, tc := range cases { - if isBcryptHash(tc) { - t.Fatalf("expected invalid bcrypt format to be rejected: %q", tc) - } - } -} - func TestDynamicMiddlewareFailsClosedWhenUnavailable(t *testing.T) { handler := DynamicMiddleware(func() *Service { return nil })(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) diff --git a/home/internal/config/config.go b/home/internal/config/config.go index a1a654f8..5c07853d 100644 --- a/home/internal/config/config.go +++ b/home/internal/config/config.go @@ -39,12 +39,28 @@ type AlertConfig struct { CheckInterval time.Duration // How often to check thresholds } +// ScannerConfig holds configuration for vulnerability scanning +type ScannerConfig struct { + GrypeImage string + TrivyImage string + SyftImage string + DefaultScanner string + GrypeArgs string + TrivyArgs string + DiscordWebhookURL string + SlackWebhookURL string + NotifyOnComplete bool + NotifyOnBulk bool + NotifyMinSeverity string +} + type Config struct { ReadOnly bool Hostname string // Optional override for displayed hostname DockerHosts []DockerHost CoolifyHosts []CoolifyHostConfig Alerts AlertConfig + Scanner ScannerConfig } func NewConfig() *Config { @@ -71,12 +87,15 @@ func NewConfig() *Config { } } + scannerConfig := parseScannerConfig() + return &Config{ ReadOnly: isReadOnlyMode, Hostname: hostname, DockerHosts: dockerHosts, CoolifyHosts: coolifyHosts, Alerts: alertConfig, + Scanner: scannerConfig, } } @@ -151,6 +170,52 @@ func parseCoolifyHostConfigs() []CoolifyHostConfig { return configs } +func parseScannerConfig() ScannerConfig { + cfg := ScannerConfig{ + GrypeImage: "anchore/grype:v0.110.0", + TrivyImage: "aquasec/trivy:0.69.3", + SyftImage: "anchore/syft:v1.27.1", + DefaultScanner: "grype", + GrypeArgs: "", + TrivyArgs: "", + DiscordWebhookURL: os.Getenv("SCANNER_DISCORD_WEBHOOK_URL"), + SlackWebhookURL: os.Getenv("SCANNER_SLACK_WEBHOOK_URL"), + NotifyOnComplete: true, + NotifyOnBulk: true, + NotifyMinSeverity: "High", + } + + if v := os.Getenv("SCANNER_GRYPE_IMAGE"); v != "" { + cfg.GrypeImage = v + } + if v := os.Getenv("SCANNER_TRIVY_IMAGE"); v != "" { + cfg.TrivyImage = v + } + if v := os.Getenv("SCANNER_SYFT_IMAGE"); v != "" { + cfg.SyftImage = v + } + if v := os.Getenv("SCANNER_DEFAULT"); v != "" { + cfg.DefaultScanner = v + } + if v := os.Getenv("SCANNER_GRYPE_ARGS"); v != "" { + cfg.GrypeArgs = v + } + if v := os.Getenv("SCANNER_TRIVY_ARGS"); v != "" { + cfg.TrivyArgs = v + } + if os.Getenv("SCANNER_NOTIFY_ON_COMPLETE") == "false" { + cfg.NotifyOnComplete = false + } + if os.Getenv("SCANNER_NOTIFY_ON_BULK") == "false" { + cfg.NotifyOnBulk = false + } + if v := os.Getenv("SCANNER_NOTIFY_MIN_SEVERITY"); v != "" { + cfg.NotifyMinSeverity = v + } + + return cfg +} + func parseDockerHosts() []DockerHost { // Format: DOCKER_HOSTS=local=unix:///var/run/docker.sock,remote=ssh://root@X.X.X.X dockerHosts := os.Getenv("DOCKER_HOSTS") diff --git a/home/internal/config/manager.go b/home/internal/config/manager.go index 137af1b7..c39c8a34 100644 --- a/home/internal/config/manager.go +++ b/home/internal/config/manager.go @@ -15,12 +15,33 @@ import ( "time" ) +// FileScannerConfig represents scanner settings stored in the config file. +type FileScannerConfig struct { + GrypeImage string `json:"grypeImage,omitempty"` + TrivyImage string `json:"trivyImage,omitempty"` + SyftImage string `json:"syftImage,omitempty"` + DefaultScanner string `json:"defaultScanner,omitempty"` + GrypeArgs string `json:"grypeArgs,omitempty"` + TrivyArgs string `json:"trivyArgs,omitempty"` + Notifications *FileNotificationConfig `json:"notifications,omitempty"` +} + +// FileNotificationConfig represents notification settings stored in the config file. +type FileNotificationConfig struct { + DiscordWebhookURL string `json:"discordWebhookURL,omitempty"` + SlackWebhookURL string `json:"slackWebhookURL,omitempty"` + OnScanComplete *bool `json:"onScanComplete,omitempty"` + OnBulkComplete *bool `json:"onBulkComplete,omitempty"` + MinSeverity string `json:"minSeverity,omitempty"` +} + // FileConfig represents the JSON config file structure. type FileConfig struct { DockerHosts []DockerHost `json:"dockerHosts,omitempty"` CoolifyHosts []CoolifyHostConfig `json:"coolifyHosts,omitempty"` ReadOnly *bool `json:"readOnly,omitempty"` Auth *FileAuthConfig `json:"auth,omitempty"` + Scanner *FileScannerConfig `json:"scanner,omitempty"` } // Source indicates where a config value came from. @@ -186,26 +207,6 @@ func (m *Manager) UpdateCoolifyHosts(hosts []CoolifyHostConfig) error { return err } - m.mu.Lock() - - if m.envSnapshot.CoolifySet { - envNames := make(map[string]bool) - for _, h := range m.envConfig.CoolifyHosts { - envNames[h.HostName] = true - } - for _, h := range hosts { - if envNames[h.HostName] { - m.mu.Unlock() - return fmt.Errorf("%w: coolify host %q is defined via environment variable and cannot be managed from the UI", ErrEnvironmentConfigured, h.HostName) - } - } - } - m.mu.Unlock() - - if err := validateCoolifyHosts(hosts); err != nil { - return err - } - m.mu.Lock() if m.envSnapshot.CoolifySet { envNames := make(map[string]bool) @@ -352,6 +353,21 @@ func (m *Manager) UpdateAuth(mutate func(current *FileAuthConfig) (*FileAuthConf return nil } +// UpdateScannerConfig updates the scanner configuration in the file config. +func (m *Manager) UpdateScannerConfig(scanner *FileScannerConfig) error { + m.mu.Lock() + + oldScanner := m.fileConfig.Scanner + m.fileConfig.Scanner = scanner + if err := m.persist(); err != nil { + m.fileConfig.Scanner = oldScanner + m.mu.Unlock() + return err + } + m.remerge() + return nil +} + // merge produces the merged config and source tracking. Must be called with lock held. func (m *Manager) merge() (*Config, ConfigSources) { cfg := &Config{} @@ -429,6 +445,46 @@ func (m *Manager) merge() (*Config, ConfigSources) { sources.Auth = SourceDefault } + // Scanner: start with env config, override with file config where set + cfg.Scanner = m.envConfig.Scanner + if fc := m.fileConfig.Scanner; fc != nil { + if fc.GrypeImage != "" { + cfg.Scanner.GrypeImage = fc.GrypeImage + } + if fc.TrivyImage != "" { + cfg.Scanner.TrivyImage = fc.TrivyImage + } + if fc.SyftImage != "" { + cfg.Scanner.SyftImage = fc.SyftImage + } + if fc.DefaultScanner != "" { + cfg.Scanner.DefaultScanner = fc.DefaultScanner + } + if fc.GrypeArgs != "" { + cfg.Scanner.GrypeArgs = fc.GrypeArgs + } + if fc.TrivyArgs != "" { + cfg.Scanner.TrivyArgs = fc.TrivyArgs + } + if fc.Notifications != nil { + if fc.Notifications.DiscordWebhookURL != "" { + cfg.Scanner.DiscordWebhookURL = fc.Notifications.DiscordWebhookURL + } + if fc.Notifications.SlackWebhookURL != "" { + cfg.Scanner.SlackWebhookURL = fc.Notifications.SlackWebhookURL + } + if fc.Notifications.OnScanComplete != nil { + cfg.Scanner.NotifyOnComplete = *fc.Notifications.OnScanComplete + } + if fc.Notifications.OnBulkComplete != nil { + cfg.Scanner.NotifyOnBulk = *fc.Notifications.OnBulkComplete + } + if fc.Notifications.MinSeverity != "" { + cfg.Scanner.NotifyMinSeverity = fc.Notifications.MinSeverity + } + } + } + return cfg, sources } diff --git a/home/internal/models/scan.go b/home/internal/models/scan.go new file mode 100644 index 00000000..76acc751 --- /dev/null +++ b/home/internal/models/scan.go @@ -0,0 +1,133 @@ +package models + +// ScannerType represents the type of vulnerability scanner +type ScannerType string + +const ( + ScannerGrype ScannerType = "grype" + ScannerTrivy ScannerType = "trivy" +) + +// SeverityLevel represents the severity of a vulnerability +type SeverityLevel string + +const ( + SeverityCritical SeverityLevel = "Critical" + SeverityHigh SeverityLevel = "High" + SeverityMedium SeverityLevel = "Medium" + SeverityLow SeverityLevel = "Low" + SeverityNegligible SeverityLevel = "Negligible" + SeverityUnknown SeverityLevel = "Unknown" +) + +// Vulnerability represents a single vulnerability finding +type Vulnerability struct { + ID string `json:"id"` + Severity SeverityLevel `json:"severity"` + Package string `json:"package"` + InstalledVersion string `json:"installed_version"` + FixedVersion string `json:"fixed_version,omitempty"` + Description string `json:"description,omitempty"` + DataSource string `json:"data_source,omitempty"` +} + +// SeveritySummary summarizes vulnerability counts by severity +type SeveritySummary struct { + Critical int `json:"critical"` + High int `json:"high"` + Medium int `json:"medium"` + Low int `json:"low"` + Negligible int `json:"negligible"` + Unknown int `json:"unknown"` + Total int `json:"total"` +} + +// ScanResult holds the results of a vulnerability scan +type ScanResult struct { + ID string `json:"id"` + ImageRef string `json:"image_ref"` + Host string `json:"host"` + Scanner ScannerType `json:"scanner"` + Vulnerabilities []Vulnerability `json:"vulnerabilities"` + Summary SeveritySummary `json:"summary"` + StartedAt int64 `json:"started_at"` + CompletedAt int64 `json:"completed_at"` + DurationMs int64 `json:"duration_ms"` + Error string `json:"error,omitempty"` +} + +// ScanJobStatus represents the status of a scan job +type ScanJobStatus string + +const ( + ScanJobPending ScanJobStatus = "pending" + ScanJobPulling ScanJobStatus = "pulling_scanner" + ScanJobScanning ScanJobStatus = "scanning" + ScanJobComplete ScanJobStatus = "complete" + ScanJobFailed ScanJobStatus = "failed" + ScanJobCancelled ScanJobStatus = "cancelled" +) + +// ScanJob represents an individual scan job +type ScanJob struct { + ID string `json:"id"` + ImageRef string `json:"image_ref"` + Host string `json:"host"` + Scanner ScannerType `json:"scanner"` + Status ScanJobStatus `json:"status"` + Progress string `json:"progress,omitempty"` + Result *ScanResult `json:"result,omitempty"` + CreatedAt int64 `json:"created_at"` + Error string `json:"error,omitempty"` +} + +// BulkScanJob represents a bulk scan of multiple images +type BulkScanJob struct { + ID string `json:"id"` + Jobs []*ScanJob `json:"jobs"` + TotalImages int `json:"total_images"` + Completed int `json:"completed"` + Failed int `json:"failed"` + Status ScanJobStatus `json:"status"` + CreatedAt int64 `json:"created_at"` +} + +// SBOMFormat represents the output format for SBOM +type SBOMFormat string + +const ( + SBOMFormatSPDX SBOMFormat = "spdx-json" + SBOMFormatCycloneDX SBOMFormat = "cyclonedx-json" +) + +// SBOMJob represents a SBOM generation job +type SBOMJob struct { + ID string `json:"id"` + ImageRef string `json:"image_ref"` + Host string `json:"host"` + Format SBOMFormat `json:"format"` + Status ScanJobStatus `json:"status"` + FilePath string `json:"-"` + CreatedAt int64 `json:"created_at"` + Error string `json:"error,omitempty"` +} + +// ScannerConfig holds scanner configuration +type ScannerConfig struct { + GrypeImage string `json:"grypeImage"` + TrivyImage string `json:"trivyImage"` + SyftImage string `json:"syftImage"` + DefaultScanner ScannerType `json:"defaultScanner"` + GrypeArgs string `json:"grypeArgs"` + TrivyArgs string `json:"trivyArgs"` + Notifications NotificationConfig `json:"notifications"` +} + +// NotificationConfig holds notification webhook configuration +type NotificationConfig struct { + DiscordWebhookURL string `json:"discordWebhookURL,omitempty"` + SlackWebhookURL string `json:"slackWebhookURL,omitempty"` + OnScanComplete bool `json:"onScanComplete"` + OnBulkComplete bool `json:"onBulkComplete"` + MinSeverity SeverityLevel `json:"minSeverity,omitempty"` +} diff --git a/home/internal/scanner/grype.go b/home/internal/scanner/grype.go new file mode 100644 index 00000000..9fb67ccd --- /dev/null +++ b/home/internal/scanner/grype.go @@ -0,0 +1,219 @@ +package scanner + +import ( + "context" + "encoding/json" + "fmt" + "io" + "strings" + + "github.com/docker/docker/api/types/container" + "github.com/docker/docker/api/types/image" + "github.com/docker/docker/client" + "github.com/hhftechnology/vps-monitor/internal/models" +) + +// grypeOutput represents the JSON output structure from Grype +type grypeOutput struct { + Matches []grypeMatch `json:"matches"` +} + +type grypeMatch struct { + Vulnerability grypeVulnerability `json:"vulnerability"` + Artifact grypeArtifact `json:"artifact"` +} + +type grypeVulnerability struct { + ID string `json:"id"` + Severity string `json:"severity"` + Description string `json:"description"` + DataSource string `json:"dataSource"` + Fix grypeFixInfo `json:"fix"` +} + +type grypeFixInfo struct { + Versions []string `json:"versions"` + State string `json:"state"` +} + +type grypeArtifact struct { + Name string `json:"name"` + Version string `json:"version"` +} + +// RunGrypeScan runs a Grype vulnerability scan against an image using Docker. +func RunGrypeScan(ctx context.Context, dockerClient *client.Client, scannerImage, imageRef, args string, onProgress func(string)) ([]models.Vulnerability, error) { + // Pull the scanner image + if onProgress != nil { + onProgress("Pulling scanner image " + scannerImage + "...") + } + pullReader, err := dockerClient.ImagePull(ctx, scannerImage, image.PullOptions{}) + if err != nil { + return nil, fmt.Errorf("failed to pull grype image: %w", err) + } + io.Copy(io.Discard, pullReader) + pullReader.Close() + + // Build the command + cmd := buildGrypeCmd(imageRef, args) + + if onProgress != nil { + onProgress("Scanning " + imageRef + " with Grype...") + } + + // Create and start scanner container + resp, err := dockerClient.ContainerCreate(ctx, &container.Config{ + Image: scannerImage, + Cmd: cmd, + }, &container.HostConfig{ + Binds: []string{"/var/run/docker.sock:/var/run/docker.sock"}, + }, nil, nil, "") + if err != nil { + return nil, fmt.Errorf("failed to create grype container: %w", err) + } + containerID := resp.ID + defer dockerClient.ContainerRemove(ctx, containerID, container.RemoveOptions{Force: true}) + + if err := dockerClient.ContainerStart(ctx, containerID, container.StartOptions{}); err != nil { + return nil, fmt.Errorf("failed to start grype container: %w", err) + } + + // Wait for completion + statusCh, errCh := dockerClient.ContainerWait(ctx, containerID, container.WaitConditionNotRunning) + select { + case err := <-errCh: + if err != nil { + return nil, fmt.Errorf("error waiting for grype container: %w", err) + } + case status := <-statusCh: + if status.StatusCode != 0 { + // Grype exits with code 1 when vulnerabilities are found - that's expected + if status.StatusCode != 1 { + logs, _ := getContainerLogs(ctx, dockerClient, containerID) + return nil, fmt.Errorf("grype exited with code %d: %s", status.StatusCode, logs) + } + } + case <-ctx.Done(): + return nil, ctx.Err() + } + + // Read stdout for JSON output + logReader, err := dockerClient.ContainerLogs(ctx, containerID, container.LogsOptions{ShowStdout: true}) + if err != nil { + return nil, fmt.Errorf("failed to read grype output: %w", err) + } + defer logReader.Close() + + output, err := demuxDockerLogs(logReader) + if err != nil { + return nil, fmt.Errorf("failed to read grype output: %w", err) + } + + return parseGrypeOutput(output) +} + +// buildGrypeCmd constructs the command for Grype. +func buildGrypeCmd(imageRef, args string) []string { + if args != "" { + // Replace {image} placeholder with actual image ref + resolved := strings.ReplaceAll(args, "{image}", imageRef) + return strings.Fields(resolved) + } + return []string{imageRef, "-o", "json"} +} + +// parseGrypeOutput parses Grype JSON output into vulnerabilities. +func parseGrypeOutput(data []byte) ([]models.Vulnerability, error) { + var output grypeOutput + if err := json.Unmarshal(data, &output); err != nil { + return nil, fmt.Errorf("failed to parse grype output: %w", err) + } + + vulns := make([]models.Vulnerability, 0, len(output.Matches)) + for _, match := range output.Matches { + fixedVersion := "" + if len(match.Vulnerability.Fix.Versions) > 0 { + fixedVersion = match.Vulnerability.Fix.Versions[0] + } + + vulns = append(vulns, models.Vulnerability{ + ID: match.Vulnerability.ID, + Severity: normalizeSeverity(match.Vulnerability.Severity), + Package: match.Artifact.Name, + InstalledVersion: match.Artifact.Version, + FixedVersion: fixedVersion, + Description: match.Vulnerability.Description, + DataSource: match.Vulnerability.DataSource, + }) + } + + return vulns, nil +} + +// getContainerLogs reads stderr from a container for error reporting. +func getContainerLogs(ctx context.Context, dockerClient *client.Client, containerID string) (string, error) { + reader, err := dockerClient.ContainerLogs(ctx, containerID, container.LogsOptions{ShowStderr: true}) + if err != nil { + return "", err + } + defer reader.Close() + data, err := demuxDockerLogs(reader) + if err != nil { + return "", err + } + return string(data), nil +} + +// normalizeSeverity normalizes severity strings from scanners. +func normalizeSeverity(severity string) models.SeverityLevel { + switch strings.ToLower(severity) { + case "critical": + return models.SeverityCritical + case "high": + return models.SeverityHigh + case "medium": + return models.SeverityMedium + case "low": + return models.SeverityLow + case "negligible": + return models.SeverityNegligible + default: + return models.SeverityUnknown + } +} + +// demuxDockerLogs reads Docker multiplexed log output and returns the raw content. +func demuxDockerLogs(reader io.Reader) ([]byte, error) { + // Docker container logs use a multiplexed format with an 8-byte header per frame. + // Header: [1 byte stream type][3 bytes padding][4 bytes uint32 big-endian size] + var result []byte + header := make([]byte, 8) + + for { + _, err := io.ReadFull(reader, header) + if err != nil { + if err == io.EOF { + break + } + return result, nil + } + + size := uint32(header[4])<<24 | uint32(header[5])<<16 | uint32(header[6])<<8 | uint32(header[7]) + if size == 0 { + continue + } + + frame := make([]byte, size) + _, err = io.ReadFull(reader, frame) + if err != nil { + return result, nil + } + + // Only capture stdout (stream type 1) + if header[0] == 1 { + result = append(result, frame...) + } + } + + return result, nil +} diff --git a/home/internal/scanner/notifier.go b/home/internal/scanner/notifier.go new file mode 100644 index 00000000..a985e78f --- /dev/null +++ b/home/internal/scanner/notifier.go @@ -0,0 +1,268 @@ +package scanner + +import ( + "bytes" + "encoding/json" + "fmt" + "net/http" + "time" + + "github.com/hhftechnology/vps-monitor/internal/models" +) + +// Notifier sends scan result notifications to Discord and Slack. +type Notifier struct { + client *http.Client +} + +// NewNotifier creates a new notifier. +func NewNotifier() *Notifier { + return &Notifier{ + client: &http.Client{Timeout: 10 * time.Second}, + } +} + +// SendDiscord sends a scan result notification to a Discord webhook. +func (n *Notifier) SendDiscord(webhookURL string, result *models.ScanResult, bulkJob *models.BulkScanJob) error { + var payload map[string]interface{} + + if bulkJob != nil { + payload = n.buildDiscordBulkPayload(bulkJob) + } else if result != nil { + payload = n.buildDiscordScanPayload(result) + } else { + return nil + } + + return n.sendWebhook(webhookURL, payload) +} + +// SendSlack sends a scan result notification to a Slack webhook. +func (n *Notifier) SendSlack(webhookURL string, result *models.ScanResult, bulkJob *models.BulkScanJob) error { + var payload map[string]interface{} + + if bulkJob != nil { + payload = n.buildSlackBulkPayload(bulkJob) + } else if result != nil { + payload = n.buildSlackScanPayload(result) + } else { + return nil + } + + return n.sendWebhook(webhookURL, payload) +} + +// SendTestNotification sends a test notification to verify webhook configuration. +func (n *Notifier) SendTestNotification(discordURL, slackURL string) error { + testResult := &models.ScanResult{ + ImageRef: "test/image:latest", + Host: "test-host", + Scanner: models.ScannerGrype, + Summary: models.SeveritySummary{ + Critical: 1, + High: 3, + Medium: 5, + Low: 2, + Total: 11, + }, + DurationMs: 5000, + } + + if discordURL != "" { + if err := n.SendDiscord(discordURL, testResult, nil); err != nil { + return fmt.Errorf("discord: %w", err) + } + } + if slackURL != "" { + if err := n.SendSlack(slackURL, testResult, nil); err != nil { + return fmt.Errorf("slack: %w", err) + } + } + return nil +} + +func (n *Notifier) buildDiscordScanPayload(result *models.ScanResult) map[string]interface{} { + color := discordColor(result.Summary) + fields := []map[string]interface{}{ + {"name": "Critical", "value": fmt.Sprintf("%d", result.Summary.Critical), "inline": true}, + {"name": "High", "value": fmt.Sprintf("%d", result.Summary.High), "inline": true}, + {"name": "Medium", "value": fmt.Sprintf("%d", result.Summary.Medium), "inline": true}, + {"name": "Low", "value": fmt.Sprintf("%d", result.Summary.Low), "inline": true}, + {"name": "Total", "value": fmt.Sprintf("%d", result.Summary.Total), "inline": true}, + {"name": "Scanner", "value": string(result.Scanner), "inline": true}, + } + if result.DurationMs > 0 { + fields = append(fields, map[string]interface{}{ + "name": "Duration", "value": fmt.Sprintf("%.1fs", float64(result.DurationMs)/1000), "inline": true, + }) + } + + return map[string]interface{}{ + "embeds": []map[string]interface{}{ + { + "title": "Vulnerability Scan Complete", + "description": fmt.Sprintf("**%s** on host **%s**", result.ImageRef, result.Host), + "color": color, + "fields": fields, + "footer": map[string]string{"text": "VPS Monitor"}, + "timestamp": time.Now().UTC().Format(time.RFC3339), + }, + }, + } +} + +func (n *Notifier) buildDiscordBulkPayload(bulkJob *models.BulkScanJob) map[string]interface{} { + description := fmt.Sprintf("Scanned **%d** images\nCompleted: **%d** | Failed: **%d**", + bulkJob.TotalImages, bulkJob.Completed, bulkJob.Failed) + + // Aggregate severity counts across all completed scans + var totalSummary models.SeveritySummary + for _, job := range bulkJob.Jobs { + if job.Result != nil { + totalSummary.Critical += job.Result.Summary.Critical + totalSummary.High += job.Result.Summary.High + totalSummary.Medium += job.Result.Summary.Medium + totalSummary.Low += job.Result.Summary.Low + totalSummary.Total += job.Result.Summary.Total + } + } + + color := discordColor(totalSummary) + fields := []map[string]interface{}{ + {"name": "Critical", "value": fmt.Sprintf("%d", totalSummary.Critical), "inline": true}, + {"name": "High", "value": fmt.Sprintf("%d", totalSummary.High), "inline": true}, + {"name": "Medium", "value": fmt.Sprintf("%d", totalSummary.Medium), "inline": true}, + {"name": "Low", "value": fmt.Sprintf("%d", totalSummary.Low), "inline": true}, + {"name": "Total", "value": fmt.Sprintf("%d", totalSummary.Total), "inline": true}, + } + + return map[string]interface{}{ + "embeds": []map[string]interface{}{ + { + "title": "Bulk Vulnerability Scan Complete", + "description": description, + "color": color, + "fields": fields, + "footer": map[string]string{"text": "VPS Monitor"}, + "timestamp": time.Now().UTC().Format(time.RFC3339), + }, + }, + } +} + +func (n *Notifier) buildSlackScanPayload(result *models.ScanResult) map[string]interface{} { + summaryText := fmt.Sprintf("Critical: %d | High: %d | Medium: %d | Low: %d | Total: %d", + result.Summary.Critical, result.Summary.High, result.Summary.Medium, + result.Summary.Low, result.Summary.Total) + + return map[string]interface{}{ + "blocks": []map[string]interface{}{ + { + "type": "header", + "text": map[string]string{ + "type": "plain_text", + "text": "Vulnerability Scan Complete", + }, + }, + { + "type": "section", + "text": map[string]string{ + "type": "mrkdwn", + "text": fmt.Sprintf("*%s* on host *%s*\n\n%s", result.ImageRef, result.Host, summaryText), + }, + }, + { + "type": "context", + "elements": []map[string]string{ + {"type": "mrkdwn", "text": fmt.Sprintf("Scanner: %s | Duration: %.1fs | VPS Monitor", result.Scanner, float64(result.DurationMs)/1000)}, + }, + }, + }, + } +} + +func (n *Notifier) buildSlackBulkPayload(bulkJob *models.BulkScanJob) map[string]interface{} { + var totalSummary models.SeveritySummary + for _, job := range bulkJob.Jobs { + if job.Result != nil { + totalSummary.Critical += job.Result.Summary.Critical + totalSummary.High += job.Result.Summary.High + totalSummary.Medium += job.Result.Summary.Medium + totalSummary.Low += job.Result.Summary.Low + totalSummary.Total += job.Result.Summary.Total + } + } + + summaryText := fmt.Sprintf("Critical: %d | High: %d | Medium: %d | Low: %d | Total: %d", + totalSummary.Critical, totalSummary.High, totalSummary.Medium, + totalSummary.Low, totalSummary.Total) + + return map[string]interface{}{ + "blocks": []map[string]interface{}{ + { + "type": "header", + "text": map[string]string{ + "type": "plain_text", + "text": "Bulk Vulnerability Scan Complete", + }, + }, + { + "type": "section", + "text": map[string]string{ + "type": "mrkdwn", + "text": fmt.Sprintf("Scanned *%d* images | Completed: *%d* | Failed: *%d*\n\n%s", + bulkJob.TotalImages, bulkJob.Completed, bulkJob.Failed, summaryText), + }, + }, + { + "type": "context", + "elements": []map[string]string{ + {"type": "mrkdwn", "text": "VPS Monitor"}, + }, + }, + }, + } +} + +func (n *Notifier) sendWebhook(url string, payload map[string]interface{}) error { + data, err := json.Marshal(payload) + if err != nil { + return fmt.Errorf("failed to marshal webhook payload: %w", err) + } + + req, err := http.NewRequest(http.MethodPost, url, bytes.NewReader(data)) + if err != nil { + return fmt.Errorf("failed to create webhook request: %w", err) + } + req.Header.Set("Content-Type", "application/json") + req.Header.Set("User-Agent", "VPS-Monitor/1.0") + + resp, err := n.client.Do(req) + if err != nil { + return fmt.Errorf("failed to send webhook: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode >= 400 { + return fmt.Errorf("webhook returned error status: %d", resp.StatusCode) + } + + return nil +} + +// discordColor returns the embed color based on highest severity. +func discordColor(summary models.SeveritySummary) int { + if summary.Critical > 0 { + return 0xED4245 // Red + } + if summary.High > 0 { + return 0xED4245 // Red + } + if summary.Medium > 0 { + return 0xFFA500 // Orange + } + if summary.Low > 0 { + return 0xFEE75C // Yellow + } + return 0x57F287 // Green - no vulnerabilities +} diff --git a/home/internal/scanner/sbom.go b/home/internal/scanner/sbom.go new file mode 100644 index 00000000..a93c5825 --- /dev/null +++ b/home/internal/scanner/sbom.go @@ -0,0 +1,231 @@ +package scanner + +import ( + "context" + "fmt" + "io" + "os" + "path/filepath" + "strings" + "time" + + "github.com/docker/docker/api/types/container" + "github.com/docker/docker/api/types/image" + "github.com/docker/docker/client" + "github.com/google/uuid" + "github.com/hhftechnology/vps-monitor/internal/models" +) + +const sbomDir = "/data/sbom" + +// StartSBOMGeneration starts SBOM generation for an image. +func (s *ScannerService) StartSBOMGeneration(imageRef, host string, format models.SBOMFormat) (*models.SBOMJob, error) { + job := &models.SBOMJob{ + ID: uuid.New().String(), + ImageRef: imageRef, + Host: host, + Format: format, + Status: models.ScanJobPending, + CreatedAt: time.Now().Unix(), + } + + s.mu.Lock() + s.sbomJobs[job.ID] = job + s.mu.Unlock() + + go s.runSBOMGeneration(job) + + return job, nil +} + +func (s *ScannerService) runSBOMGeneration(job *models.SBOMJob) { + cfg := s.Config() + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Minute) + defer cancel() + + dockerClient, release := s.registry.AcquireDocker() + if dockerClient == nil { + release() + s.updateSBOMStatus(job, models.ScanJobFailed, "docker client unavailable") + return + } + defer release() + + apiClient, err := dockerClient.GetClient(job.Host) + if err != nil { + s.updateSBOMStatus(job, models.ScanJobFailed, err.Error()) + return + } + + s.updateSBOMStatus(job, models.ScanJobPulling, "") + + // Use Syft for SBOM generation + scannerImage := cfg.SyftImage + cmd := buildSBOMCmd(job.ImageRef, job.Format) + + // Pull scanner image + pullReader, err := apiClient.ImagePull(ctx, scannerImage, image.PullOptions{}) + if err != nil { + s.updateSBOMStatus(job, models.ScanJobFailed, fmt.Sprintf("failed to pull syft image: %v", err)) + return + } + io.Copy(io.Discard, pullReader) + pullReader.Close() + + s.updateSBOMStatus(job, models.ScanJobScanning, "") + + // Create and run container + resp, err := apiClient.ContainerCreate(ctx, &container.Config{ + Image: scannerImage, + Cmd: cmd, + }, &container.HostConfig{ + Binds: []string{"/var/run/docker.sock:/var/run/docker.sock"}, + }, nil, nil, "") + if err != nil { + s.updateSBOMStatus(job, models.ScanJobFailed, fmt.Sprintf("failed to create syft container: %v", err)) + return + } + containerID := resp.ID + defer apiClient.ContainerRemove(ctx, containerID, container.RemoveOptions{Force: true}) + + if err := apiClient.ContainerStart(ctx, containerID, container.StartOptions{}); err != nil { + s.updateSBOMStatus(job, models.ScanJobFailed, fmt.Sprintf("failed to start syft container: %v", err)) + return + } + + // Wait for completion + statusCh, errCh := apiClient.ContainerWait(ctx, containerID, container.WaitConditionNotRunning) + select { + case err := <-errCh: + if err != nil { + s.updateSBOMStatus(job, models.ScanJobFailed, fmt.Sprintf("error waiting for syft: %v", err)) + return + } + case status := <-statusCh: + if status.StatusCode != 0 { + logs, _ := getContainerLogs(ctx, apiClient, containerID) + s.updateSBOMStatus(job, models.ScanJobFailed, fmt.Sprintf("syft exited with code %d: %s", status.StatusCode, logs)) + return + } + case <-ctx.Done(): + s.updateSBOMStatus(job, models.ScanJobCancelled, "cancelled") + return + } + + // Read output and save to file + logReader, err := apiClient.ContainerLogs(ctx, containerID, container.LogsOptions{ShowStdout: true}) + if err != nil { + s.updateSBOMStatus(job, models.ScanJobFailed, fmt.Sprintf("failed to read syft output: %v", err)) + return + } + defer logReader.Close() + + output, err := demuxDockerLogs(logReader) + if err != nil { + s.updateSBOMStatus(job, models.ScanJobFailed, fmt.Sprintf("failed to read syft output: %v", err)) + return + } + + // Write SBOM to file + if err := os.MkdirAll(sbomDir, 0750); err != nil { + s.updateSBOMStatus(job, models.ScanJobFailed, fmt.Sprintf("failed to create sbom directory: %v", err)) + return + } + + filePath := filepath.Join(sbomDir, job.ID+".json") + if err := os.WriteFile(filePath, output, 0600); err != nil { + s.updateSBOMStatus(job, models.ScanJobFailed, fmt.Sprintf("failed to write sbom file: %v", err)) + return + } + + s.mu.Lock() + job.Status = models.ScanJobComplete + job.FilePath = filePath + s.mu.Unlock() + + // Schedule cleanup after 1 hour + go func() { + time.Sleep(1 * time.Hour) + os.Remove(filePath) + }() +} + +func (s *ScannerService) updateSBOMStatus(job *models.SBOMJob, status models.ScanJobStatus, errMsg string) { + s.mu.Lock() + defer s.mu.Unlock() + job.Status = status + job.Error = errMsg +} + +func buildSBOMCmd(imageRef string, format models.SBOMFormat) []string { + outputFormat := "spdx-json" + if format == models.SBOMFormatCycloneDX { + outputFormat = "cyclonedx-json" + } + return []string{imageRef, "-o", outputFormat} +} + +// RunSBOMWithTrivy generates an SBOM using Trivy instead of Syft. +func RunSBOMWithTrivy(ctx context.Context, dockerClient *client.Client, trivyImage, imageRef string, format models.SBOMFormat) ([]byte, error) { + outputFormat := "spdx-json" + if format == models.SBOMFormatCycloneDX { + outputFormat = "cyclonedx" + } + + cmd := []string{"image", "--format", outputFormat, imageRef} + + pullReader, err := dockerClient.ImagePull(ctx, trivyImage, image.PullOptions{}) + if err != nil { + return nil, fmt.Errorf("failed to pull trivy image: %w", err) + } + io.Copy(io.Discard, pullReader) + pullReader.Close() + + resp, err := dockerClient.ContainerCreate(ctx, &container.Config{ + Image: trivyImage, + Cmd: cmd, + }, &container.HostConfig{ + Binds: []string{"/var/run/docker.sock:/var/run/docker.sock"}, + }, nil, nil, "") + if err != nil { + return nil, fmt.Errorf("failed to create trivy sbom container: %w", err) + } + containerID := resp.ID + defer dockerClient.ContainerRemove(ctx, containerID, container.RemoveOptions{Force: true}) + + if err := dockerClient.ContainerStart(ctx, containerID, container.StartOptions{}); err != nil { + return nil, fmt.Errorf("failed to start trivy sbom container: %w", err) + } + + statusCh, errCh := dockerClient.ContainerWait(ctx, containerID, container.WaitConditionNotRunning) + select { + case err := <-errCh: + if err != nil { + return nil, fmt.Errorf("error waiting for trivy sbom: %w", err) + } + case status := <-statusCh: + if status.StatusCode != 0 { + logs, _ := getContainerLogs(ctx, dockerClient, containerID) + return nil, fmt.Errorf("trivy sbom exited with code %d: %s", status.StatusCode, logs) + } + case <-ctx.Done(): + return nil, ctx.Err() + } + + logReader, err := dockerClient.ContainerLogs(ctx, containerID, container.LogsOptions{ShowStdout: true}) + if err != nil { + return nil, fmt.Errorf("failed to read trivy sbom output: %w", err) + } + defer logReader.Close() + + return demuxDockerLogs(logReader) +} + +// getContainerLogs is defined in grype.go, avoid redeclaration by using the existing one. +// demuxDockerLogs is defined in grype.go, shared across the package. +// normalizeSeverity is defined in grype.go, shared across the package. + +func sanitizeImageRefForFilename(imageRef string) string { + r := strings.NewReplacer("/", "_", ":", "_", ".", "_") + return r.Replace(imageRef) +} diff --git a/home/internal/scanner/scanner.go b/home/internal/scanner/scanner.go new file mode 100644 index 00000000..9ad6504a --- /dev/null +++ b/home/internal/scanner/scanner.go @@ -0,0 +1,462 @@ +package scanner + +import ( + "context" + "fmt" + "log" + "sync" + "sync/atomic" + "time" + + "github.com/google/uuid" + "github.com/hhftechnology/vps-monitor/internal/models" + "github.com/hhftechnology/vps-monitor/internal/services" +) + +const maxConcurrentScansPerHost = 3 + +// ScannerService orchestrates vulnerability scanning across Docker hosts. +type ScannerService struct { + registry *services.Registry + config atomic.Pointer[models.ScannerConfig] + store *ScanResultStore + notifier *Notifier + + mu sync.RWMutex + jobs map[string]*models.ScanJob + bulkJobs map[string]*bulkScanState + sbomJobs map[string]*models.SBOMJob + cancels map[string]context.CancelFunc +} + +type bulkScanState struct { + job *models.BulkScanJob + cancel context.CancelFunc +} + +// NewScannerService creates a new scanner service. +func NewScannerService(registry *services.Registry, cfg *models.ScannerConfig) *ScannerService { + s := &ScannerService{ + registry: registry, + store: NewScanResultStore(), + notifier: NewNotifier(), + jobs: make(map[string]*models.ScanJob), + bulkJobs: make(map[string]*bulkScanState), + sbomJobs: make(map[string]*models.SBOMJob), + cancels: make(map[string]context.CancelFunc), + } + s.config.Store(cfg) + return s +} + +// UpdateConfig updates the scanner configuration. +func (s *ScannerService) UpdateConfig(cfg *models.ScannerConfig) { + s.config.Store(cfg) +} + +// Config returns the current scanner configuration. +func (s *ScannerService) Config() *models.ScannerConfig { + return s.config.Load() +} + +// Store returns the scan result store. +func (s *ScannerService) Store() *ScanResultStore { + return s.store +} + +// StartScan starts a single image vulnerability scan. +func (s *ScannerService) StartScan(imageRef, host string, scannerType models.ScannerType) (*models.ScanJob, error) { + cfg := s.Config() + if scannerType == "" { + scannerType = cfg.DefaultScanner + } + + job := &models.ScanJob{ + ID: uuid.New().String(), + ImageRef: imageRef, + Host: host, + Scanner: scannerType, + Status: models.ScanJobPending, + CreatedAt: time.Now().Unix(), + } + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Minute) + + s.mu.Lock() + s.jobs[job.ID] = job + s.cancels[job.ID] = cancel + s.mu.Unlock() + + go s.runScan(ctx, job, cancel) + + return job, nil +} + +// StartBulkScan starts scanning all images across specified hosts. +func (s *ScannerService) StartBulkScan(scannerType models.ScannerType, hosts []string) (*models.BulkScanJob, error) { + cfg := s.Config() + if scannerType == "" { + scannerType = cfg.DefaultScanner + } + + // Get all images + dockerClient, release := s.registry.AcquireDocker() + if dockerClient == nil { + release() + return nil, fmt.Errorf("docker client unavailable") + } + + ctx := context.Background() + imagesByHost, _, err := dockerClient.ListImagesAllHosts(ctx) + release() + if err != nil { + return nil, fmt.Errorf("failed to list images: %w", err) + } + + bulkJob := &models.BulkScanJob{ + ID: uuid.New().String(), + Status: models.ScanJobPending, + CreatedAt: time.Now().Unix(), + } + + // Create scan jobs for matching hosts + for hostName, images := range imagesByHost { + if len(hosts) > 0 && !containsHost(hosts, hostName) { + continue + } + for _, img := range images { + imageRef := img.RepoTags[0] + if len(img.RepoTags) == 0 { + imageRef = img.ID + } + job := &models.ScanJob{ + ID: uuid.New().String(), + ImageRef: imageRef, + Host: hostName, + Scanner: scannerType, + Status: models.ScanJobPending, + CreatedAt: time.Now().Unix(), + } + bulkJob.Jobs = append(bulkJob.Jobs, job) + + s.mu.Lock() + s.jobs[job.ID] = job + s.mu.Unlock() + } + } + + bulkJob.TotalImages = len(bulkJob.Jobs) + if bulkJob.TotalImages == 0 { + bulkJob.Status = models.ScanJobComplete + return bulkJob, nil + } + + bulkCtx, bulkCancel := context.WithTimeout(context.Background(), 60*time.Minute) + + s.mu.Lock() + s.bulkJobs[bulkJob.ID] = &bulkScanState{job: bulkJob, cancel: bulkCancel} + s.cancels[bulkJob.ID] = bulkCancel + s.mu.Unlock() + + go s.runBulkScan(bulkCtx, bulkJob, bulkCancel) + + return bulkJob, nil +} + +// GetJob returns a scan job by ID. +func (s *ScannerService) GetJob(id string) *models.ScanJob { + s.mu.RLock() + defer s.mu.RUnlock() + return s.jobs[id] +} + +// GetBulkJob returns a bulk scan job by ID. +func (s *ScannerService) GetBulkJob(id string) *models.BulkScanJob { + s.mu.RLock() + defer s.mu.RUnlock() + if state, ok := s.bulkJobs[id]; ok { + return state.job + } + return nil +} + +// GetJobs returns all recent scan jobs. +func (s *ScannerService) GetJobs() []*models.ScanJob { + s.mu.RLock() + defer s.mu.RUnlock() + + jobs := make([]*models.ScanJob, 0, len(s.jobs)) + for _, job := range s.jobs { + jobs = append(jobs, job) + } + return jobs +} + +// GetBulkJobs returns all bulk scan jobs. +func (s *ScannerService) GetBulkJobs() []*models.BulkScanJob { + s.mu.RLock() + defer s.mu.RUnlock() + + jobs := make([]*models.BulkScanJob, 0, len(s.bulkJobs)) + for _, state := range s.bulkJobs { + jobs = append(jobs, state.job) + } + return jobs +} + +// CancelJob cancels a running scan job. +func (s *ScannerService) CancelJob(id string) bool { + s.mu.Lock() + cancel, ok := s.cancels[id] + s.mu.Unlock() + + if ok { + cancel() + // Update job status + s.mu.Lock() + if job, exists := s.jobs[id]; exists { + job.Status = models.ScanJobCancelled + } + if state, exists := s.bulkJobs[id]; exists { + state.job.Status = models.ScanJobCancelled + } + s.mu.Unlock() + return true + } + return false +} + +// GetSBOMJob returns an SBOM job by ID. +func (s *ScannerService) GetSBOMJob(id string) *models.SBOMJob { + s.mu.RLock() + defer s.mu.RUnlock() + return s.sbomJobs[id] +} + +func (s *ScannerService) runScan(ctx context.Context, job *models.ScanJob, cancel context.CancelFunc) { + defer cancel() + + cfg := s.Config() + + dockerClient, release := s.registry.AcquireDocker() + if dockerClient == nil { + release() + s.updateJobStatus(job, models.ScanJobFailed, "docker client unavailable") + return + } + defer release() + + apiClient, err := dockerClient.GetClient(job.Host) + if err != nil { + s.updateJobStatus(job, models.ScanJobFailed, err.Error()) + return + } + + s.updateJobProgress(job, models.ScanJobPulling, "Pulling scanner image...") + + startedAt := time.Now() + var vulns []models.Vulnerability + + onProgress := func(msg string) { + s.updateJobProgress(job, job.Status, msg) + } + + s.updateJobProgress(job, models.ScanJobScanning, "Scanning...") + + switch job.Scanner { + case models.ScannerGrype: + vulns, err = RunGrypeScan(ctx, apiClient, cfg.GrypeImage, job.ImageRef, cfg.GrypeArgs, onProgress) + case models.ScannerTrivy: + vulns, err = RunTrivyScan(ctx, apiClient, cfg.TrivyImage, job.ImageRef, cfg.TrivyArgs, onProgress) + default: + err = fmt.Errorf("unknown scanner type: %s", job.Scanner) + } + + completedAt := time.Now() + + if err != nil { + if ctx.Err() != nil { + s.updateJobStatus(job, models.ScanJobCancelled, "scan cancelled") + } else { + s.updateJobStatus(job, models.ScanJobFailed, err.Error()) + } + return + } + + summary := computeSummary(vulns) + result := models.ScanResult{ + ID: uuid.New().String(), + ImageRef: job.ImageRef, + Host: job.Host, + Scanner: job.Scanner, + Vulnerabilities: vulns, + Summary: summary, + StartedAt: startedAt.Unix(), + CompletedAt: completedAt.Unix(), + DurationMs: completedAt.Sub(startedAt).Milliseconds(), + } + + s.store.Add(result) + + s.mu.Lock() + job.Status = models.ScanJobComplete + job.Result = &result + s.mu.Unlock() + + // Send notification if configured + if cfg.Notifications.OnScanComplete { + s.sendNotification(&result) + } +} + +func (s *ScannerService) runBulkScan(ctx context.Context, bulkJob *models.BulkScanJob, cancel context.CancelFunc) { + defer cancel() + + s.mu.Lock() + bulkJob.Status = models.ScanJobScanning + s.mu.Unlock() + + // Group jobs by host for per-host concurrency limiting + hostJobs := make(map[string][]*models.ScanJob) + for _, job := range bulkJob.Jobs { + hostJobs[job.Host] = append(hostJobs[job.Host], job) + } + + var wg sync.WaitGroup + for _, jobs := range hostJobs { + wg.Add(1) + go func(jobs []*models.ScanJob) { + defer wg.Done() + sem := make(chan struct{}, maxConcurrentScansPerHost) + var hostWg sync.WaitGroup + for _, job := range jobs { + select { + case <-ctx.Done(): + return + case sem <- struct{}{}: + } + hostWg.Add(1) + go func(j *models.ScanJob) { + defer hostWg.Done() + defer func() { <-sem }() + + jobCtx, jobCancel := context.WithTimeout(ctx, 10*time.Minute) + s.runScan(jobCtx, j, jobCancel) + + s.mu.Lock() + if j.Status == models.ScanJobComplete { + bulkJob.Completed++ + } else if j.Status == models.ScanJobFailed { + bulkJob.Failed++ + } + s.mu.Unlock() + }(job) + } + hostWg.Wait() + }(jobs) + } + + wg.Wait() + + s.mu.Lock() + if bulkJob.Status != models.ScanJobCancelled { + bulkJob.Status = models.ScanJobComplete + } + s.mu.Unlock() + + // Send bulk notification + cfg := s.Config() + if cfg.Notifications.OnBulkComplete { + s.sendBulkNotification(bulkJob) + } +} + +func (s *ScannerService) updateJobStatus(job *models.ScanJob, status models.ScanJobStatus, errMsg string) { + s.mu.Lock() + defer s.mu.Unlock() + job.Status = status + job.Error = errMsg +} + +func (s *ScannerService) updateJobProgress(job *models.ScanJob, status models.ScanJobStatus, progress string) { + s.mu.Lock() + defer s.mu.Unlock() + job.Status = status + job.Progress = progress +} + +func (s *ScannerService) sendNotification(result *models.ScanResult) { + cfg := s.Config() + if !meetsMinSeverity(result.Summary, cfg.Notifications.MinSeverity) { + return + } + if cfg.Notifications.DiscordWebhookURL != "" { + if err := s.notifier.SendDiscord(cfg.Notifications.DiscordWebhookURL, result, nil); err != nil { + log.Printf("Failed to send Discord notification: %v", err) + } + } + if cfg.Notifications.SlackWebhookURL != "" { + if err := s.notifier.SendSlack(cfg.Notifications.SlackWebhookURL, result, nil); err != nil { + log.Printf("Failed to send Slack notification: %v", err) + } + } +} + +func (s *ScannerService) sendBulkNotification(bulkJob *models.BulkScanJob) { + cfg := s.Config() + if cfg.Notifications.DiscordWebhookURL != "" { + if err := s.notifier.SendDiscord(cfg.Notifications.DiscordWebhookURL, nil, bulkJob); err != nil { + log.Printf("Failed to send Discord bulk notification: %v", err) + } + } + if cfg.Notifications.SlackWebhookURL != "" { + if err := s.notifier.SendSlack(cfg.Notifications.SlackWebhookURL, nil, bulkJob); err != nil { + log.Printf("Failed to send Slack bulk notification: %v", err) + } + } +} + +func computeSummary(vulns []models.Vulnerability) models.SeveritySummary { + summary := models.SeveritySummary{Total: len(vulns)} + for _, v := range vulns { + switch v.Severity { + case models.SeverityCritical: + summary.Critical++ + case models.SeverityHigh: + summary.High++ + case models.SeverityMedium: + summary.Medium++ + case models.SeverityLow: + summary.Low++ + case models.SeverityNegligible: + summary.Negligible++ + default: + summary.Unknown++ + } + } + return summary +} + +func meetsMinSeverity(summary models.SeveritySummary, minSeverity models.SeverityLevel) bool { + switch minSeverity { + case models.SeverityCritical: + return summary.Critical > 0 + case models.SeverityHigh: + return summary.Critical > 0 || summary.High > 0 + case models.SeverityMedium: + return summary.Critical > 0 || summary.High > 0 || summary.Medium > 0 + case models.SeverityLow: + return summary.Critical > 0 || summary.High > 0 || summary.Medium > 0 || summary.Low > 0 + default: + return summary.Total > 0 + } +} + +func containsHost(hosts []string, host string) bool { + for _, h := range hosts { + if h == host { + return true + } + } + return false +} diff --git a/home/internal/scanner/store.go b/home/internal/scanner/store.go new file mode 100644 index 00000000..3b3a039a --- /dev/null +++ b/home/internal/scanner/store.go @@ -0,0 +1,68 @@ +package scanner + +import ( + "sync" + + "github.com/hhftechnology/vps-monitor/internal/models" +) + +const maxResultsPerImage = 10 + +// ScanResultStore stores scan results in memory, keyed by host:imageRef. +type ScanResultStore struct { + mu sync.RWMutex + results map[string][]models.ScanResult // key: "host:imageRef" +} + +// NewScanResultStore creates a new in-memory scan result store. +func NewScanResultStore() *ScanResultStore { + return &ScanResultStore{ + results: make(map[string][]models.ScanResult), + } +} + +func resultKey(host, imageRef string) string { + return host + ":" + imageRef +} + +// Add stores a scan result, keeping at most maxResultsPerImage per image. +func (s *ScanResultStore) Add(result models.ScanResult) { + s.mu.Lock() + defer s.mu.Unlock() + + key := resultKey(result.Host, result.ImageRef) + // Prepend for newest-first ordering + s.results[key] = append([]models.ScanResult{result}, s.results[key]...) + + if len(s.results[key]) > maxResultsPerImage { + s.results[key] = s.results[key][:maxResultsPerImage] + } +} + +// GetResults returns all scan results for a specific image on a host. +func (s *ScanResultStore) GetResults(host, imageRef string) []models.ScanResult { + s.mu.RLock() + defer s.mu.RUnlock() + + key := resultKey(host, imageRef) + results := s.results[key] + if results == nil { + return []models.ScanResult{} + } + out := make([]models.ScanResult, len(results)) + copy(out, results) + return out +} + +// GetLatest returns the most recent scan result for an image on a host. +func (s *ScanResultStore) GetLatest(host, imageRef string) *models.ScanResult { + s.mu.RLock() + defer s.mu.RUnlock() + + key := resultKey(host, imageRef) + if len(s.results[key]) == 0 { + return nil + } + result := s.results[key][0] + return &result +} diff --git a/home/internal/scanner/trivy.go b/home/internal/scanner/trivy.go new file mode 100644 index 00000000..501d3765 --- /dev/null +++ b/home/internal/scanner/trivy.go @@ -0,0 +1,140 @@ +package scanner + +import ( + "context" + "encoding/json" + "fmt" + "io" + "strings" + + "github.com/docker/docker/api/types/container" + "github.com/docker/docker/api/types/image" + "github.com/docker/docker/client" + "github.com/hhftechnology/vps-monitor/internal/models" +) + +// trivyOutput represents the JSON output structure from Trivy +type trivyOutput struct { + Results []trivyResult `json:"Results"` +} + +type trivyResult struct { + Target string `json:"Target"` + Vulnerabilities []trivyVulnerability `json:"Vulnerabilities"` +} + +type trivyVulnerability struct { + VulnerabilityID string `json:"VulnerabilityID"` + PkgName string `json:"PkgName"` + InstalledVersion string `json:"InstalledVersion"` + FixedVersion string `json:"FixedVersion"` + Severity string `json:"Severity"` + Description string `json:"Description"` + PrimaryURL string `json:"PrimaryURL"` +} + +// RunTrivyScan runs a Trivy vulnerability scan against an image using Docker. +func RunTrivyScan(ctx context.Context, dockerClient *client.Client, scannerImage, imageRef, args string, onProgress func(string)) ([]models.Vulnerability, error) { + // Pull the scanner image + if onProgress != nil { + onProgress("Pulling scanner image " + scannerImage + "...") + } + pullReader, err := dockerClient.ImagePull(ctx, scannerImage, image.PullOptions{}) + if err != nil { + return nil, fmt.Errorf("failed to pull trivy image: %w", err) + } + io.Copy(io.Discard, pullReader) + pullReader.Close() + + // Build the command + cmd := buildTrivyCmd(imageRef, args) + + if onProgress != nil { + onProgress("Scanning " + imageRef + " with Trivy...") + } + + // Create and start scanner container + resp, err := dockerClient.ContainerCreate(ctx, &container.Config{ + Image: scannerImage, + Cmd: cmd, + }, &container.HostConfig{ + Binds: []string{"/var/run/docker.sock:/var/run/docker.sock"}, + }, nil, nil, "") + if err != nil { + return nil, fmt.Errorf("failed to create trivy container: %w", err) + } + containerID := resp.ID + defer dockerClient.ContainerRemove(ctx, containerID, container.RemoveOptions{Force: true}) + + if err := dockerClient.ContainerStart(ctx, containerID, container.StartOptions{}); err != nil { + return nil, fmt.Errorf("failed to start trivy container: %w", err) + } + + // Wait for completion + statusCh, errCh := dockerClient.ContainerWait(ctx, containerID, container.WaitConditionNotRunning) + select { + case err := <-errCh: + if err != nil { + return nil, fmt.Errorf("error waiting for trivy container: %w", err) + } + case status := <-statusCh: + if status.StatusCode != 0 { + logs, _ := getContainerLogs(ctx, dockerClient, containerID) + return nil, fmt.Errorf("trivy exited with code %d: %s", status.StatusCode, logs) + } + case <-ctx.Done(): + return nil, ctx.Err() + } + + // Read stdout for JSON output + logReader, err := dockerClient.ContainerLogs(ctx, containerID, container.LogsOptions{ShowStdout: true}) + if err != nil { + return nil, fmt.Errorf("failed to read trivy output: %w", err) + } + defer logReader.Close() + + output, err := demuxDockerLogs(logReader) + if err != nil { + return nil, fmt.Errorf("failed to read trivy output: %w", err) + } + + return parseTrivyOutput(output) +} + +// buildTrivyCmd constructs the command for Trivy. +func buildTrivyCmd(imageRef, args string) []string { + if args != "" { + resolved := strings.ReplaceAll(args, "{image}", imageRef) + return strings.Fields(resolved) + } + return []string{"image", "--format", "json", imageRef} +} + +// parseTrivyOutput parses Trivy JSON output into vulnerabilities. +func parseTrivyOutput(data []byte) ([]models.Vulnerability, error) { + var output trivyOutput + if err := json.Unmarshal(data, &output); err != nil { + return nil, fmt.Errorf("failed to parse trivy output: %w", err) + } + + var vulns []models.Vulnerability + for _, result := range output.Results { + for _, v := range result.Vulnerabilities { + vulns = append(vulns, models.Vulnerability{ + ID: v.VulnerabilityID, + Severity: normalizeSeverity(v.Severity), + Package: v.PkgName, + InstalledVersion: v.InstalledVersion, + FixedVersion: v.FixedVersion, + Description: v.Description, + DataSource: v.PrimaryURL, + }) + } + } + + if vulns == nil { + vulns = []models.Vulnerability{} + } + + return vulns, nil +} diff --git a/home/internal/system/stats.go b/home/internal/system/stats.go index 8f3d958a..c2609ee3 100644 --- a/home/internal/system/stats.go +++ b/home/internal/system/stats.go @@ -4,6 +4,7 @@ import ( "context" "os" "runtime" + "sync" "github.com/shirou/gopsutil/v4/cpu" "github.com/shirou/gopsutil/v4/disk" @@ -23,8 +24,8 @@ type HostInfo struct { KernelVersion string `json:"kernelVersion"` Arch string `json:"arch"` Uptime uint64 `json:"uptime"` - CPULogical int `json:"cpuLogical"` - CPUPhysical int `json:"cpuPhysical,omitempty"` + CPULogical *int `json:"cpuLogical"` + CPUPhysical *int `json:"cpuPhysical"` } type Usage struct { @@ -37,6 +38,12 @@ type Usage struct { DiskUsed uint64 `json:"diskUsed"` } +var ( + cpuCountsOnce sync.Once + cachedCPULogical *int + cachedCPUPhysical *int +) + // Init configures gopsutil to use the host's /proc directory if mounted func Init() { // If we are running in a container and have mounted /proc to /host/proc, @@ -46,6 +53,20 @@ func Init() { } } +func loadCPUCounts(ctx context.Context) { + if cpuLogical, err := cpu.CountsWithContext(ctx, true); err == nil { + cachedCPULogical = intPtr(cpuLogical) + } + + if cpuPhysical, err := cpu.CountsWithContext(ctx, false); err == nil { + cachedCPUPhysical = intPtr(cpuPhysical) + } +} + +func intPtr(v int) *int { + return &v +} + func GetStats(ctx context.Context) (*SystemStats, error) { hInfo, err := host.InfoWithContext(ctx) if err != nil { @@ -73,15 +94,9 @@ func GetStats(ctx context.Context) (*SystemStats, error) { cpuPercent = cpuPercents[0] } - cpuLogical, err := cpu.CountsWithContext(ctx, true) - if err != nil { - cpuLogical = 0 - } - - cpuPhysical, err := cpu.CountsWithContext(ctx, false) - if err != nil { - cpuPhysical = 0 - } + cpuCountsOnce.Do(func() { + loadCPUCounts(ctx) + }) // Get Disk Usage for root partition // If running in container with /host mounted, use /host, otherwise use / @@ -107,8 +122,8 @@ func GetStats(ctx context.Context) (*SystemStats, error) { KernelVersion: hInfo.KernelVersion, Arch: runtime.GOARCH, Uptime: hInfo.Uptime, - CPULogical: cpuLogical, - CPUPhysical: cpuPhysical, + CPULogical: cachedCPULogical, + CPUPhysical: cachedCPUPhysical, }, Usage: Usage{ CPUPercent: cpuPercent,