Fix NVIDIA persistence mode and add benchmark results table
This commit is contained in:
@@ -8,9 +8,12 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"bee/audit/internal/app"
|
||||
"bee/audit/internal/platform"
|
||||
"bee/audit/internal/schema"
|
||||
)
|
||||
|
||||
@@ -161,7 +164,7 @@ func renderPage(page string, opts HandlerOptions) string {
|
||||
case "benchmark":
|
||||
pageID = "benchmark"
|
||||
title = "Benchmark"
|
||||
body = renderBenchmark()
|
||||
body = renderBenchmark(opts)
|
||||
case "tasks":
|
||||
pageID = "tasks"
|
||||
title = "Tasks"
|
||||
@@ -1072,14 +1075,14 @@ func renderValidate(opts HandlerOptions) string {
|
||||
inv.NVIDIA,
|
||||
`Select which NVIDIA GPUs to include in Validate. The same selection is used by both NVIDIA GPU cards below and by Validate one by one.`,
|
||||
`<code>nvidia-smi --query-gpu=index,name,memory.total</code>`,
|
||||
`<div id="sat-gpu-list"><p style="color:var(--muted);font-size:13px">Loading NVIDIA GPUs…</p></div><div style="display:flex;gap:8px;flex-wrap:wrap;margin-top:8px"><button type="button" class="btn btn-sm btn-secondary" onclick="satSelectAllGPUs()">Select all</button><button type="button" class="btn btn-sm btn-secondary" onclick="satSelectNoGPUs()">Clear</button></div><div id="sat-gpu-selection-note" style="font-size:12px;color:var(--muted);margin-top:8px"></div>`,
|
||||
)) +
|
||||
renderSATCard("nvidia", "NVIDIA GPU", "runNvidiaValidateSet('nvidia')", "", renderValidateCardBody(
|
||||
inv.NVIDIA,
|
||||
`Runs NVIDIA diagnostics and board inventory checks.`,
|
||||
`<code>nvidia-smi</code>, <code>dmidecode</code>, <code>dcgmi diag</code>`,
|
||||
`Runs one GPU at a time on the selected NVIDIA GPUs. Diag level is taken from Validate Profile.`,
|
||||
`<div id="sat-gpu-list"><p style="color:var(--muted);font-size:13px">Loading NVIDIA GPUs…</p></div><div style="display:flex;gap:8px;flex-wrap:wrap;margin-top:8px"><button type="button" class="btn btn-sm btn-secondary" onclick="satSelectAllGPUs()">Select all</button><button type="button" class="btn btn-sm btn-secondary" onclick="satSelectNoGPUs()">Clear</button></div><div id="sat-gpu-selection-note" style="font-size:12px;color:var(--muted);margin-top:8px"></div>`,
|
||||
)) +
|
||||
renderSATCard("nvidia", "NVIDIA GPU", "runNvidiaValidateSet('nvidia')", "", renderValidateCardBody(
|
||||
inv.NVIDIA,
|
||||
`Runs NVIDIA diagnostics and board inventory checks.`,
|
||||
`<code>nvidia-smi</code>, <code>dmidecode</code>, <code>dcgmi diag</code>`,
|
||||
`Runs one GPU at a time on the selected NVIDIA GPUs. Diag level is taken from Validate Profile.`,
|
||||
)) +
|
||||
renderSATCard("nvidia-targeted-stress", "NVIDIA GPU Targeted Stress", "runNvidiaValidateSet('nvidia-targeted-stress')", "", renderValidateCardBody(
|
||||
inv.NVIDIA,
|
||||
`Runs a controlled NVIDIA DCGM load in Validate to check stability under moderate stress.`,
|
||||
@@ -1569,7 +1572,25 @@ func renderSATCard(id, label, runAction, headerActions, body string) string {
|
||||
|
||||
// ── Benchmark ─────────────────────────────────────────────────────────────────
|
||||
|
||||
func renderBenchmark() string {
|
||||
type benchmarkHistoryColumn struct {
|
||||
key string
|
||||
label string
|
||||
name string
|
||||
index int
|
||||
}
|
||||
|
||||
type benchmarkHistoryCell struct {
|
||||
score float64
|
||||
present bool
|
||||
}
|
||||
|
||||
type benchmarkHistoryRun struct {
|
||||
generatedAt time.Time
|
||||
displayTime string
|
||||
cells map[string]benchmarkHistoryCell
|
||||
}
|
||||
|
||||
func renderBenchmark(opts HandlerOptions) string {
|
||||
return `<p style="color:var(--muted);font-size:13px;margin-bottom:16px">Benchmark runs generate a human-readable TXT report and machine-readable result bundle. Tasks continue in the background — view progress in <a href="/tasks">Tasks</a>.</p>
|
||||
|
||||
<div class="grid2">
|
||||
@@ -1618,6 +1639,8 @@ func renderBenchmark() string {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
` + renderBenchmarkResultsCard(opts.ExportDir) + `
|
||||
|
||||
<div id="benchmark-output" style="display:none;margin-top:16px" class="card">
|
||||
<div class="card-head">Benchmark Output <span id="benchmark-title"></span></div>
|
||||
<div class="card-body"><div id="benchmark-terminal" class="terminal"></div></div>
|
||||
@@ -1754,6 +1777,115 @@ benchmarkLoadGPUs();
|
||||
</script>`
|
||||
}
|
||||
|
||||
func renderBenchmarkResultsCard(exportDir string) string {
|
||||
columns, runs := loadBenchmarkHistory(exportDir)
|
||||
if len(runs) == 0 {
|
||||
return `<div class="card"><div class="card-head">Benchmark Results</div><div class="card-body"><p style="color:var(--muted);font-size:13px">No saved benchmark runs yet.</p></div></div>`
|
||||
}
|
||||
|
||||
var b strings.Builder
|
||||
b.WriteString(`<div class="card"><div class="card-head">Benchmark Results</div><div class="card-body">`)
|
||||
b.WriteString(`<p style="color:var(--muted);font-size:13px;margin-bottom:12px">Composite score by saved benchmark run and GPU.</p>`)
|
||||
b.WriteString(`<div style="overflow-x:auto">`)
|
||||
b.WriteString(`<table><thead><tr><th>Test</th><th>Time</th>`)
|
||||
for _, col := range columns {
|
||||
b.WriteString(`<th>` + html.EscapeString(col.label) + `</th>`)
|
||||
}
|
||||
b.WriteString(`</tr></thead><tbody>`)
|
||||
for i, run := range runs {
|
||||
b.WriteString(`<tr>`)
|
||||
b.WriteString(`<td>#` + strconv.Itoa(i+1) + `</td>`)
|
||||
b.WriteString(`<td>` + html.EscapeString(run.displayTime) + `</td>`)
|
||||
for _, col := range columns {
|
||||
cell, ok := run.cells[col.key]
|
||||
if !ok || !cell.present {
|
||||
b.WriteString(`<td style="color:var(--muted)">-</td>`)
|
||||
continue
|
||||
}
|
||||
b.WriteString(`<td>` + fmt.Sprintf("%.2f", cell.score) + `</td>`)
|
||||
}
|
||||
b.WriteString(`</tr>`)
|
||||
}
|
||||
b.WriteString(`</tbody></table></div></div></div>`)
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func loadBenchmarkHistory(exportDir string) ([]benchmarkHistoryColumn, []benchmarkHistoryRun) {
|
||||
baseDir := app.DefaultBenchmarkBaseDir
|
||||
if strings.TrimSpace(exportDir) != "" {
|
||||
baseDir = filepath.Join(exportDir, "bee-benchmark")
|
||||
}
|
||||
paths, err := filepath.Glob(filepath.Join(baseDir, "gpu-benchmark-*", "result.json"))
|
||||
if err != nil || len(paths) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
sort.Strings(paths)
|
||||
|
||||
columnByKey := make(map[string]benchmarkHistoryColumn)
|
||||
runs := make([]benchmarkHistoryRun, 0, len(paths))
|
||||
for _, path := range paths {
|
||||
raw, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
var result platform.NvidiaBenchmarkResult
|
||||
if err := json.Unmarshal(raw, &result); err != nil {
|
||||
continue
|
||||
}
|
||||
run := benchmarkHistoryRun{
|
||||
generatedAt: result.GeneratedAt,
|
||||
displayTime: result.GeneratedAt.Local().Format("2006-01-02 15:04:05"),
|
||||
cells: make(map[string]benchmarkHistoryCell),
|
||||
}
|
||||
for _, gpu := range result.GPUs {
|
||||
key := benchmarkHistoryColumnKey(gpu.Name, gpu.Index)
|
||||
columnByKey[key] = benchmarkHistoryColumn{
|
||||
key: key,
|
||||
label: benchmarkHistoryColumnLabel(gpu.Name, gpu.Index),
|
||||
name: strings.TrimSpace(gpu.Name),
|
||||
index: gpu.Index,
|
||||
}
|
||||
run.cells[key] = benchmarkHistoryCell{
|
||||
score: gpu.Scores.CompositeScore,
|
||||
present: true,
|
||||
}
|
||||
}
|
||||
runs = append(runs, run)
|
||||
}
|
||||
|
||||
columns := make([]benchmarkHistoryColumn, 0, len(columnByKey))
|
||||
for _, col := range columnByKey {
|
||||
columns = append(columns, col)
|
||||
}
|
||||
sort.Slice(columns, func(i, j int) bool {
|
||||
leftName := strings.ToLower(strings.TrimSpace(columns[i].name))
|
||||
rightName := strings.ToLower(strings.TrimSpace(columns[j].name))
|
||||
if leftName != rightName {
|
||||
return leftName < rightName
|
||||
}
|
||||
if columns[i].index != columns[j].index {
|
||||
return columns[i].index < columns[j].index
|
||||
}
|
||||
return columns[i].key < columns[j].key
|
||||
})
|
||||
sort.Slice(runs, func(i, j int) bool {
|
||||
return runs[i].generatedAt.After(runs[j].generatedAt)
|
||||
})
|
||||
return columns, runs
|
||||
}
|
||||
|
||||
func benchmarkHistoryColumnKey(name string, index int) string {
|
||||
return strings.TrimSpace(name) + "|" + strconv.Itoa(index)
|
||||
}
|
||||
|
||||
func benchmarkHistoryColumnLabel(name string, index int) string {
|
||||
name = strings.TrimSpace(name)
|
||||
if name == "" {
|
||||
return fmt.Sprintf("GPU %d", index)
|
||||
}
|
||||
return fmt.Sprintf("%s / GPU %d", name, index)
|
||||
}
|
||||
|
||||
// ── Burn ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
func renderBurn() string {
|
||||
|
||||
Reference in New Issue
Block a user