improve redfish collection progress and robust hardware dedup/serial parsing

This commit is contained in:
2026-02-28 16:07:42 +03:00
parent 8dbbec3610
commit 9a30705c9a
9 changed files with 871 additions and 60 deletions

View File

@@ -436,7 +436,7 @@ func (c *RedfishConnector) collectGPUs(ctx context.Context, client *http.Client,
continue continue
} }
key := gpuDedupKey(gpu) key := gpuDocDedupKey(doc, gpu)
if key == "" { if key == "" {
continue continue
} }
@@ -1281,25 +1281,20 @@ func (c *RedfishConnector) getCollectionMembers(ctx context.Context, client *htt
return nil, err return nil, err
} }
refs, ok := collection["Members"].([]interface{}) memberPaths := redfishCollectionMemberRefs(collection)
if !ok || len(refs) == 0 { if len(memberPaths) == 0 {
return []map[string]interface{}{}, nil return []map[string]interface{}{}, nil
} }
out := make([]map[string]interface{}, 0, len(refs)) out := make([]map[string]interface{}, 0, len(memberPaths))
for _, refAny := range refs { for _, memberPath := range memberPaths {
ref, ok := refAny.(map[string]interface{})
if !ok {
continue
}
memberPath := asString(ref["@odata.id"])
if memberPath == "" {
continue
}
memberDoc, err := c.getJSON(ctx, client, req, baseURL, memberPath) memberDoc, err := c.getJSON(ctx, client, req, baseURL, memberPath)
if err != nil { if err != nil {
continue continue
} }
if strings.TrimSpace(asString(memberDoc["@odata.id"])) == "" {
memberDoc["@odata.id"] = normalizeRedfishPath(memberPath)
}
out = append(out, memberDoc) out = append(out, memberDoc)
} }
return out, nil return out, nil
@@ -1387,20 +1382,12 @@ func (c *RedfishConnector) getJSONWithRetry(ctx context.Context, client *http.Cl
} }
func (c *RedfishConnector) collectCriticalCollectionMembersSequential(ctx context.Context, client *http.Client, req Request, baseURL, collectionPath string, collectionDoc map[string]interface{}) (map[string]interface{}, bool) { func (c *RedfishConnector) collectCriticalCollectionMembersSequential(ctx context.Context, client *http.Client, req Request, baseURL, collectionPath string, collectionDoc map[string]interface{}) (map[string]interface{}, bool) {
refs, ok := collectionDoc["Members"].([]interface{}) memberPaths := redfishCollectionMemberRefs(collectionDoc)
if !ok || len(refs) == 0 { if len(memberPaths) == 0 {
return nil, false return nil, false
} }
out := make(map[string]interface{}) out := make(map[string]interface{})
for _, refAny := range refs { for _, memberPath := range memberPaths {
ref, ok := refAny.(map[string]interface{})
if !ok {
continue
}
memberPath := normalizeRedfishPath(asString(ref["@odata.id"]))
if memberPath == "" {
continue
}
doc, err := c.getJSONWithRetry(ctx, client, req, baseURL, memberPath, redfishCriticalRetryAttempts(), redfishCriticalRetryBackoff()) doc, err := c.getJSONWithRetry(ctx, client, req, baseURL, memberPath, redfishCriticalRetryAttempts(), redfishCriticalRetryBackoff())
if err != nil { if err != nil {
continue continue
@@ -1412,6 +1399,19 @@ func (c *RedfishConnector) collectCriticalCollectionMembersSequential(ctx contex
func (c *RedfishConnector) recoverCriticalRedfishDocsPlanB(ctx context.Context, client *http.Client, req Request, baseURL string, criticalPaths []string, rawTree map[string]interface{}, fetchErrs map[string]string, emit ProgressFn) int { func (c *RedfishConnector) recoverCriticalRedfishDocsPlanB(ctx context.Context, client *http.Client, req Request, baseURL string, criticalPaths []string, rawTree map[string]interface{}, fetchErrs map[string]string, emit ProgressFn) int {
var targets []string var targets []string
seenTargets := make(map[string]struct{})
addTarget := func(path string) {
path = normalizeRedfishPath(path)
if path == "" {
return
}
if _, ok := seenTargets[path]; ok {
return
}
seenTargets[path] = struct{}{}
targets = append(targets, path)
}
for _, p := range criticalPaths { for _, p := range criticalPaths {
p = normalizeRedfishPath(p) p = normalizeRedfishPath(p)
if p == "" { if p == "" {
@@ -1424,7 +1424,35 @@ func (c *RedfishConnector) recoverCriticalRedfishDocsPlanB(ctx context.Context,
if hasErr && !isRetryableRedfishFetchError(fmt.Errorf("%s", errMsg)) { if hasErr && !isRetryableRedfishFetchError(fmt.Errorf("%s", errMsg)) {
continue continue
} }
targets = append(targets, p) addTarget(p)
}
// If a critical collection document was fetched, but some of its members
// failed during the initial crawl (common for /Drives on partially loaded BMCs),
// retry those member resources in plan-B too.
for _, p := range criticalPaths {
p = normalizeRedfishPath(p)
if p == "" {
continue
}
docAny, ok := rawTree[p]
if !ok {
continue
}
doc, ok := docAny.(map[string]interface{})
if !ok {
continue
}
for _, memberPath := range redfishCollectionMemberRefs(doc) {
if _, exists := rawTree[memberPath]; exists {
continue
}
errMsg, hasErr := fetchErrs[memberPath]
if hasErr && !isRetryableRedfishFetchError(fmt.Errorf("%s", errMsg)) {
continue
}
addTarget(memberPath)
}
} }
if len(targets) == 0 { if len(targets) == 0 {
return 0 return 0
@@ -1608,7 +1636,7 @@ func parseCPUs(docs []map[string]interface{}) []models.CPU {
Threads: asInt(doc["TotalThreads"]), Threads: asInt(doc["TotalThreads"]),
FrequencyMHz: asInt(doc["OperatingSpeedMHz"]), FrequencyMHz: asInt(doc["OperatingSpeedMHz"]),
MaxFreqMHz: asInt(doc["MaxSpeedMHz"]), MaxFreqMHz: asInt(doc["MaxSpeedMHz"]),
SerialNumber: asString(doc["SerialNumber"]), SerialNumber: findFirstNormalizedStringByKeys(doc, "SerialNumber"),
}) })
} }
return cpus return cpus
@@ -1638,7 +1666,7 @@ func parseMemory(docs []map[string]interface{}) []models.MemoryDIMM {
MaxSpeedMHz: asInt(doc["MaxSpeedMHz"]), MaxSpeedMHz: asInt(doc["MaxSpeedMHz"]),
CurrentSpeedMHz: asInt(doc["OperatingSpeedMhz"]), CurrentSpeedMHz: asInt(doc["OperatingSpeedMhz"]),
Manufacturer: asString(doc["Manufacturer"]), Manufacturer: asString(doc["Manufacturer"]),
SerialNumber: asString(doc["SerialNumber"]), SerialNumber: findFirstNormalizedStringByKeys(doc, "SerialNumber"),
PartNumber: asString(doc["PartNumber"]), PartNumber: asString(doc["PartNumber"]),
Status: mapStatus(doc["Status"]), Status: mapStatus(doc["Status"]),
}) })
@@ -1665,7 +1693,7 @@ func parseDrive(doc map[string]interface{}) models.Storage {
Type: storageType, Type: storageType,
Model: firstNonEmpty(asString(doc["Model"]), asString(doc["Name"])), Model: firstNonEmpty(asString(doc["Model"]), asString(doc["Name"])),
SizeGB: sizeGB, SizeGB: sizeGB,
SerialNumber: asString(doc["SerialNumber"]), SerialNumber: findFirstNormalizedStringByKeys(doc, "SerialNumber"),
Manufacturer: asString(doc["Manufacturer"]), Manufacturer: asString(doc["Manufacturer"]),
Firmware: asString(doc["Revision"]), Firmware: asString(doc["Revision"]),
Interface: asString(doc["Protocol"]), Interface: asString(doc["Protocol"]),
@@ -1737,7 +1765,7 @@ func parseNIC(doc map[string]interface{}) models.NetworkAdapter {
Vendor: strings.TrimSpace(vendor), Vendor: strings.TrimSpace(vendor),
VendorID: vendorID, VendorID: vendorID,
DeviceID: deviceID, DeviceID: deviceID,
SerialNumber: asString(doc["SerialNumber"]), SerialNumber: findFirstNormalizedStringByKeys(doc, "SerialNumber"),
PartNumber: asString(doc["PartNumber"]), PartNumber: asString(doc["PartNumber"]),
Firmware: firmware, Firmware: firmware,
PortCount: portCount, PortCount: portCount,
@@ -1828,7 +1856,7 @@ func parsePSU(doc map[string]interface{}, idx int) models.PSU {
Model: firstNonEmpty(asString(doc["Model"]), asString(doc["Name"])), Model: firstNonEmpty(asString(doc["Model"]), asString(doc["Name"])),
Vendor: asString(doc["Manufacturer"]), Vendor: asString(doc["Manufacturer"]),
WattageW: asInt(doc["PowerCapacityWatts"]), WattageW: asInt(doc["PowerCapacityWatts"]),
SerialNumber: asString(doc["SerialNumber"]), SerialNumber: findFirstNormalizedStringByKeys(doc, "SerialNumber"),
PartNumber: asString(doc["PartNumber"]), PartNumber: asString(doc["PartNumber"]),
Firmware: asString(doc["FirmwareVersion"]), Firmware: asString(doc["FirmwareVersion"]),
Status: status, Status: status,
@@ -1856,7 +1884,7 @@ func parseGPU(doc map[string]interface{}, functionDocs []map[string]interface{},
Location: firstNonEmpty(redfishLocationLabel(doc["Location"]), redfishLocationLabel(doc["PhysicalLocation"])), Location: firstNonEmpty(redfishLocationLabel(doc["Location"]), redfishLocationLabel(doc["PhysicalLocation"])),
Model: firstNonEmpty(asString(doc["Model"]), asString(doc["Name"])), Model: firstNonEmpty(asString(doc["Model"]), asString(doc["Name"])),
Manufacturer: asString(doc["Manufacturer"]), Manufacturer: asString(doc["Manufacturer"]),
SerialNumber: strings.TrimSpace(asString(doc["SerialNumber"])), SerialNumber: findFirstNormalizedStringByKeys(doc, "SerialNumber"),
PartNumber: asString(doc["PartNumber"]), PartNumber: asString(doc["PartNumber"]),
Firmware: asString(doc["FirmwareVersion"]), Firmware: asString(doc["FirmwareVersion"]),
Status: mapStatus(doc["Status"]), Status: mapStatus(doc["Status"]),
@@ -1918,7 +1946,7 @@ func parsePCIeDevice(doc map[string]interface{}, functionDocs []map[string]inter
DeviceClass: asString(doc["DeviceType"]), DeviceClass: asString(doc["DeviceType"]),
Manufacturer: asString(doc["Manufacturer"]), Manufacturer: asString(doc["Manufacturer"]),
PartNumber: asString(doc["PartNumber"]), PartNumber: asString(doc["PartNumber"]),
SerialNumber: asString(doc["SerialNumber"]), SerialNumber: findFirstNormalizedStringByKeys(doc, "SerialNumber"),
VendorID: asHexOrInt(doc["VendorId"]), VendorID: asHexOrInt(doc["VendorId"]),
DeviceID: asHexOrInt(doc["DeviceId"]), DeviceID: asHexOrInt(doc["DeviceId"]),
} }
@@ -1988,7 +2016,7 @@ func parsePCIeFunction(doc map[string]interface{}, idx int) models.PCIeDevice {
DeviceID: asHexOrInt(doc["DeviceId"]), DeviceID: asHexOrInt(doc["DeviceId"]),
DeviceClass: firstNonEmpty(asString(doc["DeviceClass"]), asString(doc["ClassCode"]), "PCIe device"), DeviceClass: firstNonEmpty(asString(doc["DeviceClass"]), asString(doc["ClassCode"]), "PCIe device"),
Manufacturer: asString(doc["Manufacturer"]), Manufacturer: asString(doc["Manufacturer"]),
SerialNumber: asString(doc["SerialNumber"]), SerialNumber: findFirstNormalizedStringByKeys(doc, "SerialNumber"),
LinkWidth: asInt(doc["CurrentLinkWidth"]), LinkWidth: asInt(doc["CurrentLinkWidth"]),
LinkSpeed: firstNonEmpty(asString(doc["CurrentLinkSpeedGTs"]), asString(doc["CurrentLinkSpeed"])), LinkSpeed: firstNonEmpty(asString(doc["CurrentLinkSpeedGTs"]), asString(doc["CurrentLinkSpeed"])),
MaxLinkWidth: asInt(doc["MaxLinkWidth"]), MaxLinkWidth: asInt(doc["MaxLinkWidth"]),
@@ -2097,6 +2125,13 @@ func gpuDedupKey(gpu models.GPU) string {
return firstNonEmpty(strings.TrimSpace(gpu.Slot)+"|"+strings.TrimSpace(gpu.Model), strings.TrimSpace(gpu.Slot)) return firstNonEmpty(strings.TrimSpace(gpu.Slot)+"|"+strings.TrimSpace(gpu.Model), strings.TrimSpace(gpu.Slot))
} }
func gpuDocDedupKey(doc map[string]interface{}, gpu models.GPU) string {
if path := normalizeRedfishPath(asString(doc["@odata.id"])); path != "" {
return "path:" + path
}
return gpuDedupKey(gpu)
}
func shouldSkipGenericGPUDuplicate(existing []models.GPU, candidate models.GPU) bool { func shouldSkipGenericGPUDuplicate(existing []models.GPU, candidate models.GPU) bool {
if len(existing) == 0 { if len(existing) == 0 {
return false return false
@@ -2137,6 +2172,48 @@ func dropModelOnlyGPUPlaceholders(items []models.GPU) []models.GPU {
return items return items
} }
// Merge serial from generic GraphicsControllers placeholders (slot ~= model)
// into concrete PCIe rows (with BDF) when mapping is unambiguous.
mergedPlaceholder := make(map[int]struct{})
for i := range items {
serial := normalizeRedfishIdentityField(items[i].SerialNumber)
if serial == "" || strings.TrimSpace(items[i].BDF) != "" || !isModelOnlyGPUPlaceholder(items[i]) {
continue
}
candidate := -1
model := strings.TrimSpace(items[i].Model)
mfr := strings.TrimSpace(items[i].Manufacturer)
for j := range items {
if i == j {
continue
}
if !strings.EqualFold(strings.TrimSpace(items[j].Model), model) {
continue
}
otherMfr := strings.TrimSpace(items[j].Manufacturer)
if mfr != "" && otherMfr != "" && !strings.EqualFold(mfr, otherMfr) {
continue
}
if strings.TrimSpace(items[j].BDF) == "" || isModelOnlyGPUPlaceholder(items[j]) {
continue
}
if normalizeRedfishIdentityField(items[j].SerialNumber) != "" {
continue
}
if candidate != -1 {
candidate = -2
break
}
candidate = j
}
if candidate >= 0 {
items[candidate].SerialNumber = serial
mergedPlaceholder[i] = struct{}{}
}
}
concreteByModel := make(map[string]struct{}, len(items)) concreteByModel := make(map[string]struct{}, len(items))
for _, gpu := range items { for _, gpu := range items {
modelKey := strings.ToLower(strings.TrimSpace(gpu.Model)) modelKey := strings.ToLower(strings.TrimSpace(gpu.Model))
@@ -2152,14 +2229,12 @@ func dropModelOnlyGPUPlaceholders(items []models.GPU) []models.GPU {
} }
out := make([]models.GPU, 0, len(items)) out := make([]models.GPU, 0, len(items))
for _, gpu := range items { for i, gpu := range items {
modelKey := strings.ToLower(strings.TrimSpace(gpu.Model)) modelKey := strings.ToLower(strings.TrimSpace(gpu.Model))
slot := strings.TrimSpace(gpu.Slot)
if _, hasConcrete := concreteByModel[modelKey]; hasConcrete && if _, hasConcrete := concreteByModel[modelKey]; hasConcrete &&
normalizeRedfishIdentityField(gpu.SerialNumber) == "" &&
strings.TrimSpace(gpu.BDF) == "" && strings.TrimSpace(gpu.BDF) == "" &&
(strings.EqualFold(slot, strings.TrimSpace(gpu.Model)) || isModelOnlyGPUPlaceholder(gpu) &&
strings.HasPrefix(strings.ToUpper(slot), "GPU")) { (normalizeRedfishIdentityField(gpu.SerialNumber) == "" || hasMergedPlaceholderIndex(mergedPlaceholder, i)) {
continue continue
} }
out = append(out, gpu) out = append(out, gpu)
@@ -2167,6 +2242,20 @@ func dropModelOnlyGPUPlaceholders(items []models.GPU) []models.GPU {
return out return out
} }
func isModelOnlyGPUPlaceholder(gpu models.GPU) bool {
slot := strings.TrimSpace(gpu.Slot)
model := strings.TrimSpace(gpu.Model)
if slot == "" || model == "" {
return false
}
return strings.EqualFold(slot, model) || strings.HasPrefix(strings.ToUpper(slot), "GPU")
}
func hasMergedPlaceholderIndex(indexes map[int]struct{}, idx int) bool {
_, ok := indexes[idx]
return ok
}
func looksLikeGPU(doc map[string]interface{}, functionDocs []map[string]interface{}) bool { func looksLikeGPU(doc map[string]interface{}, functionDocs []map[string]interface{}) bool {
deviceType := strings.ToLower(asString(doc["DeviceType"])) deviceType := strings.ToLower(asString(doc["DeviceType"]))
if strings.Contains(deviceType, "gpu") || strings.Contains(deviceType, "graphics") || strings.Contains(deviceType, "accelerator") { if strings.Contains(deviceType, "gpu") || strings.Contains(deviceType, "graphics") || strings.Contains(deviceType, "accelerator") {
@@ -2537,6 +2626,42 @@ func normalizeRedfishPath(raw string) string {
return raw return raw
} }
func redfishCollectionMemberRefs(collection map[string]interface{}) []string {
if len(collection) == 0 {
return nil
}
var out []string
seen := make(map[string]struct{})
addRefs := func(raw any) {
refs, ok := raw.([]interface{})
if !ok || len(refs) == 0 {
return
}
for _, refAny := range refs {
ref, ok := refAny.(map[string]interface{})
if !ok {
continue
}
memberPath := normalizeRedfishPath(asString(ref["@odata.id"]))
if memberPath == "" {
continue
}
if _, exists := seen[memberPath]; exists {
continue
}
seen[memberPath] = struct{}{}
out = append(out, memberPath)
}
}
addRefs(collection["Members"])
if oem, ok := collection["Oem"].(map[string]interface{}); ok {
if public, ok := oem["Public"].(map[string]interface{}); ok {
addRefs(public["Members"])
}
}
return out
}
func extractODataIDs(v interface{}) []string { func extractODataIDs(v interface{}) []string {
var refs []string var refs []string
var walk func(any) var walk func(any)

View File

@@ -549,24 +549,19 @@ func (r redfishSnapshotReader) getCollectionMembers(collectionPath string) ([]ma
if err != nil { if err != nil {
return r.fallbackCollectionMembers(collectionPath, err) return r.fallbackCollectionMembers(collectionPath, err)
} }
refs, ok := collection["Members"].([]interface{}) memberPaths := redfishCollectionMemberRefs(collection)
if !ok || len(refs) == 0 { if len(memberPaths) == 0 {
return r.fallbackCollectionMembers(collectionPath, nil) return r.fallbackCollectionMembers(collectionPath, nil)
} }
out := make([]map[string]interface{}, 0, len(refs)) out := make([]map[string]interface{}, 0, len(memberPaths))
for _, refAny := range refs { for _, memberPath := range memberPaths {
ref, ok := refAny.(map[string]interface{})
if !ok {
continue
}
memberPath := asString(ref["@odata.id"])
if memberPath == "" {
continue
}
doc, err := r.getJSON(memberPath) doc, err := r.getJSON(memberPath)
if err != nil { if err != nil {
continue continue
} }
if strings.TrimSpace(asString(doc["@odata.id"])) == "" {
doc["@odata.id"] = normalizeRedfishPath(memberPath)
}
out = append(out, doc) out = append(out, doc)
} }
if len(out) == 0 { if len(out) == 0 {
@@ -608,6 +603,9 @@ func (r redfishSnapshotReader) fallbackCollectionMembers(collectionPath string,
if err != nil { if err != nil {
continue continue
} }
if strings.TrimSpace(asString(doc["@odata.id"])) == "" {
doc["@odata.id"] = normalizeRedfishPath(p)
}
out = append(out, doc) out = append(out, doc)
} }
return out, nil return out, nil
@@ -939,7 +937,7 @@ func (r redfishSnapshotReader) collectGPUs(systemPaths, chassisPaths []string) [
if shouldSkipGenericGPUDuplicate(out, gpu) { if shouldSkipGenericGPUDuplicate(out, gpu) {
continue continue
} }
key := gpuDedupKey(gpu) key := gpuDocDedupKey(doc, gpu)
if key == "" { if key == "" {
continue continue
} }

View File

@@ -369,6 +369,139 @@ func TestParsePCIeDevice_PrefersFunctionClassOverDeviceType(t *testing.T) {
} }
} }
func TestParseComponents_UseNestedSerialNumberFallback(t *testing.T) {
doc := map[string]interface{}{
"Name": "dev0",
"Id": "dev0",
"Model": "model0",
"Manufacturer": "vendor0",
"SerialNumber": "N/A",
"Oem": map[string]interface{}{
"SerialNumber": "SN-OK-001",
},
}
cpus := parseCPUs([]map[string]interface{}{doc})
if len(cpus) != 1 || cpus[0].SerialNumber != "SN-OK-001" {
t.Fatalf("expected CPU serial fallback, got %+v", cpus)
}
dimms := parseMemory([]map[string]interface{}{doc})
if len(dimms) != 1 || dimms[0].SerialNumber != "SN-OK-001" {
t.Fatalf("expected DIMM serial fallback, got %+v", dimms)
}
drive := parseDrive(doc)
if drive.SerialNumber != "SN-OK-001" {
t.Fatalf("expected drive serial fallback, got %q", drive.SerialNumber)
}
nic := parseNIC(doc)
if nic.SerialNumber != "SN-OK-001" {
t.Fatalf("expected NIC serial fallback, got %q", nic.SerialNumber)
}
psu := parsePSU(doc, 1)
if psu.SerialNumber != "SN-OK-001" {
t.Fatalf("expected PSU serial fallback, got %q", psu.SerialNumber)
}
pcie := parsePCIeDevice(doc, nil)
if pcie.SerialNumber != "SN-OK-001" {
t.Fatalf("expected PCIe device serial fallback, got %q", pcie.SerialNumber)
}
pcieFn := parsePCIeFunction(doc, 1)
if pcieFn.SerialNumber != "SN-OK-001" {
t.Fatalf("expected PCIe function serial fallback, got %q", pcieFn.SerialNumber)
}
}
func TestRedfishCollectionMemberRefs_IncludesOemPublicMembers(t *testing.T) {
collection := map[string]interface{}{
"Members": []interface{}{
map[string]interface{}{"@odata.id": "/redfish/v1/Chassis/1/Drives/OB01"},
},
"Oem": map[string]interface{}{
"Public": map[string]interface{}{
"Members": []interface{}{
map[string]interface{}{"@odata.id": "/redfish/v1/Chassis/1/Drives/FP00HDD00"},
map[string]interface{}{"@odata.id": "/redfish/v1/Chassis/1/Drives/FP00HDD02"},
},
},
},
}
got := redfishCollectionMemberRefs(collection)
if len(got) != 3 {
t.Fatalf("expected 3 member refs, got %d: %v", len(got), got)
}
}
func TestRecoverCriticalRedfishDocsPlanB_RetriesMembersFromExistingCollection(t *testing.T) {
t.Setenv("LOGPILE_REDFISH_CRITICAL_COOLDOWN", "0s")
t.Setenv("LOGPILE_REDFISH_CRITICAL_SLOW_GAP", "0s")
t.Setenv("LOGPILE_REDFISH_CRITICAL_PLANB_RETRIES", "1")
t.Setenv("LOGPILE_REDFISH_CRITICAL_RETRIES", "1")
t.Setenv("LOGPILE_REDFISH_CRITICAL_BACKOFF", "0s")
const memberPath = "/redfish/v1/Chassis/1/Drives/FP00HDD00"
mux := http.NewServeMux()
mux.HandleFunc(memberPath, func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
_ = json.NewEncoder(w).Encode(map[string]interface{}{
"Id": "FP00HDD00",
"Name": "FP00HDD00",
"Model": "HDD-TEST",
"MediaType": "HDD",
"Protocol": "SAS",
"CapacityBytes": int64(2000398934016),
"SerialNumber": "HDD-SN-001",
})
})
ts := httptest.NewServer(mux)
defer ts.Close()
rawTree := map[string]interface{}{
"/redfish/v1/Chassis/1/Drives": map[string]interface{}{
"Members": []interface{}{
map[string]interface{}{"@odata.id": "/redfish/v1/Chassis/1/Drives/OB01"},
},
"Oem": map[string]interface{}{
"Public": map[string]interface{}{
"Members": []interface{}{
map[string]interface{}{"@odata.id": memberPath},
},
},
},
},
}
fetchErrs := map[string]string{
memberPath: "Get \"https://example/redfish/v1/Chassis/1/Drives/FP00HDD00\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)",
}
c := NewRedfishConnector()
recovered := c.recoverCriticalRedfishDocsPlanB(
context.Background(),
ts.Client(),
Request{},
ts.URL,
[]string{"/redfish/v1/Chassis/1/Drives"},
rawTree,
fetchErrs,
nil,
)
if recovered == 0 {
t.Fatalf("expected plan-B to recover at least one document")
}
if _, ok := rawTree[memberPath]; !ok {
t.Fatalf("expected recovered member doc for %s", memberPath)
}
if _, ok := fetchErrs[memberPath]; ok {
t.Fatalf("expected fetch error for %s to be cleared after recovery", memberPath)
}
}
func TestReplayCollectStorage_ProbesSupermicroNVMeDiskBayWhenCollectionEmpty(t *testing.T) { func TestReplayCollectStorage_ProbesSupermicroNVMeDiskBayWhenCollectionEmpty(t *testing.T) {
r := redfishSnapshotReader{tree: map[string]interface{}{ r := redfishSnapshotReader{tree: map[string]interface{}{
"/redfish/v1/Systems": map[string]interface{}{ "/redfish/v1/Systems": map[string]interface{}{
@@ -551,6 +684,54 @@ func TestReplayCollectGPUs_FromGraphicsControllers(t *testing.T) {
} }
} }
func TestReplayCollectGPUs_DedupUsesRedfishPathBeforeHeuristics(t *testing.T) {
r := redfishSnapshotReader{tree: map[string]interface{}{
"/redfish/v1/Systems/1/GraphicsControllers": map[string]interface{}{
"Members": []interface{}{
map[string]interface{}{"@odata.id": "/redfish/v1/Systems/1/GraphicsControllers/GPU0"},
map[string]interface{}{"@odata.id": "/redfish/v1/Systems/1/GraphicsControllers/GPU1"},
},
},
"/redfish/v1/Systems/1/GraphicsControllers/GPU0": map[string]interface{}{
"Id": "GPU0",
"Name": "H100-PCIE-80G",
"Model": "H100-PCIE-80G",
"Manufacturer": "NVIDIA",
"SerialNumber": "N/A",
},
"/redfish/v1/Systems/1/GraphicsControllers/GPU1": map[string]interface{}{
"Id": "GPU1",
"Name": "H100-PCIE-80G",
"Model": "H100-PCIE-80G",
"Manufacturer": "NVIDIA",
"SerialNumber": "N/A",
},
}}
got := r.collectGPUs([]string{"/redfish/v1/Systems/1"}, nil)
if len(got) != 2 {
t.Fatalf("expected both GPUs to be kept by unique redfish path, got %d", len(got))
}
}
func TestParseGPU_UsesNestedOemSerialNumber(t *testing.T) {
doc := map[string]interface{}{
"Id": "GPU4",
"Name": "H100-PCIE-80G",
"Model": "H100-PCIE-80G",
"Manufacturer": "NVIDIA",
"SerialNumber": "N/A",
"Oem": map[string]interface{}{
"SerialNumber": "1794024010533",
},
}
got := parseGPU(doc, nil, 1)
if got.SerialNumber != "1794024010533" {
t.Fatalf("expected nested OEM serial number, got %q", got.SerialNumber)
}
}
func TestParseBoardInfoWithFallback_UsesFRU(t *testing.T) { func TestParseBoardInfoWithFallback_UsesFRU(t *testing.T) {
system := map[string]interface{}{ system := map[string]interface{}{
"Manufacturer": "NULL", "Manufacturer": "NULL",
@@ -769,6 +950,49 @@ func TestReplayCollectGPUs_DropsModelOnlyPlaceholderWhenConcreteDiscoveredLater(
} }
} }
func TestReplayCollectGPUs_MergesGraphicsSerialIntoConcretePCIeGPU(t *testing.T) {
r := redfishSnapshotReader{tree: map[string]interface{}{
"/redfish/v1/Systems/1/GraphicsControllers": map[string]interface{}{
"Members": []interface{}{
map[string]interface{}{"@odata.id": "/redfish/v1/Systems/1/GraphicsControllers/GPU4"},
},
},
"/redfish/v1/Systems/1/GraphicsControllers/GPU4": map[string]interface{}{
"Id": "4",
"Name": "H100-PCIE-80G",
"Model": "H100-PCIE-80G",
"Manufacturer": "NVIDIA",
"Oem": map[string]interface{}{
"SerialNumber": "1794024010533",
},
},
"/redfish/v1/Chassis/1/PCIeDevices": map[string]interface{}{
"Members": []interface{}{
map[string]interface{}{"@odata.id": "/redfish/v1/Chassis/1/PCIeDevices/8"},
},
},
"/redfish/v1/Chassis/1/PCIeDevices/8": map[string]interface{}{
"Id": "8",
"Name": "PCIeCard8",
"Model": "H100-PCIE-80G",
"Manufacturer": "NVIDIA",
"SerialNumber": "N/A",
"BDF": "0000:b1:00.0",
},
}}
got := r.collectGPUs([]string{"/redfish/v1/Systems/1"}, []string{"/redfish/v1/Chassis/1"})
if len(got) != 1 {
t.Fatalf("expected merged single GPU row, got %d", len(got))
}
if got[0].Slot != "PCIeCard8" {
t.Fatalf("expected concrete PCIe slot, got %q", got[0].Slot)
}
if got[0].SerialNumber != "1794024010533" {
t.Fatalf("expected merged serial from graphics controller, got %q", got[0].SerialNumber)
}
}
func TestShouldCrawlPath_MemorySubresourcesAreSkipped(t *testing.T) { func TestShouldCrawlPath_MemorySubresourcesAreSkipped(t *testing.T) {
if !shouldCrawlPath("/redfish/v1/Systems/1/Memory/CPU0_C0D0") { if !shouldCrawlPath("/redfish/v1/Systems/1/Memory/CPU0_C0D0") {
t.Fatalf("expected direct DIMM resource to be crawlable") t.Fatalf("expected direct DIMM resource to be crawlable")

View File

@@ -46,10 +46,21 @@ func ParseComponentLogEvents(content []byte) []models.Event {
// Parse RESTful Memory info for Warning/Error status // Parse RESTful Memory info for Warning/Error status
memEvents := parseMemoryEvents(text) memEvents := parseMemoryEvents(text)
events = append(events, memEvents...) events = append(events, memEvents...)
events = append(events, parseFanEvents(text)...)
return events return events
} }
// ParseComponentLogSensors extracts sensor readings from component.log JSON sections.
func ParseComponentLogSensors(content []byte) []models.SensorReading {
text := string(content)
var out []models.SensorReading
out = append(out, parseFanSensors(text)...)
out = append(out, parseDiskBackplaneSensors(text)...)
out = append(out, parsePSUSummarySensors(text)...)
return out
}
// MemoryRESTInfo represents the RESTful Memory info structure // MemoryRESTInfo represents the RESTful Memory info structure
type MemoryRESTInfo struct { type MemoryRESTInfo struct {
MemModules []struct { MemModules []struct {
@@ -224,6 +235,30 @@ func parseHDDInfo(text string, hw *models.HardwareConfig) {
} }
} }
// Merge into existing inventory first (asset/other sections).
for i := range hw.Storage {
slot := strings.TrimSpace(hw.Storage[i].Slot)
if slot == "" {
continue
}
detail, ok := hddMap[slot]
if !ok {
continue
}
if hw.Storage[i].SerialNumber == "" {
hw.Storage[i].SerialNumber = detail.SN
}
if hw.Storage[i].Model == "" {
hw.Storage[i].Model = detail.Model
}
if hw.Storage[i].Firmware == "" {
hw.Storage[i].Firmware = detail.Firmware
}
if hw.Storage[i].Manufacturer == "" {
hw.Storage[i].Manufacturer = detail.Mfr
}
}
// If storage is empty, populate from HDD info // If storage is empty, populate from HDD info
if len(hw.Storage) == 0 { if len(hw.Storage) == 0 {
for _, hdd := range hddInfo { for _, hdd := range hddInfo {
@@ -255,6 +290,22 @@ func parseHDDInfo(text string, hw *models.HardwareConfig) {
} }
} }
// FanRESTInfo represents the RESTful fan info structure.
type FanRESTInfo struct {
Fans []struct {
ID int `json:"id"`
FanName string `json:"fan_name"`
Present string `json:"present"`
Status string `json:"status"`
StatusStr string `json:"status_str"`
SpeedRPM int `json:"speed_rpm"`
SpeedPercent int `json:"speed_percent"`
MaxSpeedRPM int `json:"max_speed_rpm"`
FanModel string `json:"fan_model"`
} `json:"fans"`
FansPower int `json:"fans_power"`
}
// NetworkAdapterRESTInfo represents the RESTful Network Adapter info structure // NetworkAdapterRESTInfo represents the RESTful Network Adapter info structure
type NetworkAdapterRESTInfo struct { type NetworkAdapterRESTInfo struct {
SysAdapters []struct { SysAdapters []struct {
@@ -335,6 +386,213 @@ func parseNetworkAdapterInfo(text string, hw *models.HardwareConfig) {
} }
} }
func parseFanSensors(text string) []models.SensorReading {
re := regexp.MustCompile(`RESTful fan info:\s*(\{[\s\S]*?\})\s*RESTful diskbackplane`)
match := re.FindStringSubmatch(text)
if match == nil {
return nil
}
jsonStr := strings.ReplaceAll(match[1], "\n", "")
var fanInfo FanRESTInfo
if err := json.Unmarshal([]byte(jsonStr), &fanInfo); err != nil {
return nil
}
out := make([]models.SensorReading, 0, len(fanInfo.Fans)+1)
for _, fan := range fanInfo.Fans {
name := strings.TrimSpace(fan.FanName)
if name == "" {
name = fmt.Sprintf("FAN%d", fan.ID)
}
status := normalizeComponentStatus(fan.StatusStr, fan.Status, fan.Present)
raw := fmt.Sprintf("rpm=%d pct=%d model=%s max_rpm=%d", fan.SpeedRPM, fan.SpeedPercent, fan.FanModel, fan.MaxSpeedRPM)
out = append(out, models.SensorReading{
Name: name,
Type: "fan_speed",
Value: float64(fan.SpeedRPM),
Unit: "RPM",
RawValue: raw,
Status: status,
})
}
if fanInfo.FansPower > 0 {
out = append(out, models.SensorReading{
Name: "Fans_Power",
Type: "power",
Value: float64(fanInfo.FansPower),
Unit: "W",
RawValue: fmt.Sprintf("%d", fanInfo.FansPower),
Status: "OK",
})
}
return out
}
func parseFanEvents(text string) []models.Event {
re := regexp.MustCompile(`RESTful fan info:\s*(\{[\s\S]*?\})\s*RESTful diskbackplane`)
match := re.FindStringSubmatch(text)
if match == nil {
return nil
}
jsonStr := strings.ReplaceAll(match[1], "\n", "")
var fanInfo FanRESTInfo
if err := json.Unmarshal([]byte(jsonStr), &fanInfo); err != nil {
return nil
}
var events []models.Event
for _, fan := range fanInfo.Fans {
status := normalizeComponentStatus(fan.StatusStr, fan.Status, fan.Present)
if isHealthyComponentStatus(status) {
continue
}
name := strings.TrimSpace(fan.FanName)
if name == "" {
name = fmt.Sprintf("FAN%d", fan.ID)
}
severity := models.SeverityWarning
lowStatus := strings.ToLower(status)
if strings.Contains(lowStatus, "critical") || strings.Contains(lowStatus, "fail") || strings.Contains(lowStatus, "error") {
severity = models.SeverityCritical
}
events = append(events, models.Event{
ID: fmt.Sprintf("fan_%d_status", fan.ID),
Timestamp: time.Now(),
Source: "Fan",
SensorType: "fan",
SensorName: name,
EventType: "Fan Status",
Severity: severity,
Description: fmt.Sprintf("%s reports %s", name, status),
RawData: fmt.Sprintf("rpm=%d pct=%d model=%s", fan.SpeedRPM, fan.SpeedPercent, fan.FanModel),
})
}
return events
}
func parseDiskBackplaneSensors(text string) []models.SensorReading {
re := regexp.MustCompile(`RESTful diskbackplane info:\s*(\[[\s\S]*?\])\s*BMC`)
match := re.FindStringSubmatch(text)
if match == nil {
return nil
}
jsonStr := strings.ReplaceAll(match[1], "\n", "")
var backplaneInfo DiskBackplaneRESTInfo
if err := json.Unmarshal([]byte(jsonStr), &backplaneInfo); err != nil {
return nil
}
out := make([]models.SensorReading, 0, len(backplaneInfo))
for _, bp := range backplaneInfo {
if bp.Present != 1 {
continue
}
name := fmt.Sprintf("Backplane%d_Temp", bp.BackplaneIndex)
status := "OK"
if bp.Temperature <= 0 {
status = "unknown"
}
raw := fmt.Sprintf("front=%d ports=%d drives=%d cpld=%s", bp.Front, bp.PortCount, bp.DriverCount, bp.CPLDVersion)
out = append(out, models.SensorReading{
Name: name,
Type: "temperature",
Value: float64(bp.Temperature),
Unit: "C",
RawValue: raw,
Status: status,
})
}
return out
}
func parsePSUSummarySensors(text string) []models.SensorReading {
re := regexp.MustCompile(`RESTful PSU info:\s*(\{[\s\S]*?\})\s*RESTful Network`)
match := re.FindStringSubmatch(text)
if match == nil {
return nil
}
jsonStr := strings.ReplaceAll(match[1], "\n", "")
var psuInfo PSURESTInfo
if err := json.Unmarshal([]byte(jsonStr), &psuInfo); err != nil {
return nil
}
out := make([]models.SensorReading, 0, len(psuInfo.PowerSupplies)*3+1)
if psuInfo.PresentPowerReading > 0 {
out = append(out, models.SensorReading{
Name: "PSU_Present_Power_Reading",
Type: "power",
Value: float64(psuInfo.PresentPowerReading),
Unit: "W",
RawValue: fmt.Sprintf("%d", psuInfo.PresentPowerReading),
Status: "OK",
})
}
for _, psu := range psuInfo.PowerSupplies {
if psu.Present != 1 {
continue
}
status := normalizeComponentStatus(psu.Status)
out = append(out, models.SensorReading{
Name: fmt.Sprintf("PSU%d_InputPower", psu.ID),
Type: "power",
Value: float64(psu.PSInPower),
Unit: "W",
RawValue: fmt.Sprintf("%d", psu.PSInPower),
Status: status,
})
out = append(out, models.SensorReading{
Name: fmt.Sprintf("PSU%d_OutputPower", psu.ID),
Type: "power",
Value: float64(psu.PSOutPower),
Unit: "W",
RawValue: fmt.Sprintf("%d", psu.PSOutPower),
Status: status,
})
out = append(out, models.SensorReading{
Name: fmt.Sprintf("PSU%d_Temp", psu.ID),
Type: "temperature",
Value: float64(psu.PSUMaxTemp),
Unit: "C",
RawValue: fmt.Sprintf("%d", psu.PSUMaxTemp),
Status: status,
})
}
return out
}
func normalizeComponentStatus(values ...string) string {
for _, v := range values {
s := strings.TrimSpace(v)
if s == "" {
continue
}
return s
}
return "unknown"
}
func isHealthyComponentStatus(status string) bool {
switch strings.ToLower(strings.TrimSpace(status)) {
case "", "ok", "normal", "present", "enabled":
return true
default:
return false
}
}
var rawDeviceIDLikeRegex = regexp.MustCompile(`(?i)^(?:0x)?[0-9a-f]{3,4}$`) var rawDeviceIDLikeRegex = regexp.MustCompile(`(?i)^(?:0x)?[0-9a-f]{3,4}$`)
func looksLikeRawDeviceID(v string) bool { func looksLikeRawDeviceID(v string) bool {

View File

@@ -50,3 +50,117 @@ RESTful fan`
t.Fatalf("expected NIC vendor resolved from pci.ids") t.Fatalf("expected NIC vendor resolved from pci.ids")
} }
} }
func TestParseComponentLogSensors_ExtractsFanBackplaneAndPSUSummary(t *testing.T) {
text := `RESTful PSU info:
{
"power_supplies": [
{ "id": 0, "present": 1, "status": "OK", "ps_in_power": 123, "ps_out_power": 110, "psu_max_temperature": 41 }
],
"present_power_reading": 999
}
RESTful Network Adapter info:
{ "sys_adapters": [] }
RESTful fan info:
{
"fans": [
{ "id": 1, "fan_name": "FAN0_F_Speed", "present": "OK", "status": "OK", "status_str": "OK", "speed_rpm": 9200, "speed_percent": 35, "max_speed_rpm": 20000, "fan_model": "6056" }
],
"fans_power": 33
}
RESTful diskbackplane info:
[
{ "port_count": 8, "driver_count": 4, "front": 1, "backplane_index": 0, "present": 1, "cpld_version": "3.1", "temperature": 18 }
]
BMC`
sensors := ParseComponentLogSensors([]byte(text))
if len(sensors) == 0 {
t.Fatalf("expected sensors from component.log, got none")
}
has := func(name string) bool {
for _, s := range sensors {
if s.Name == name {
return true
}
}
return false
}
if !has("FAN0_F_Speed") {
t.Fatalf("expected FAN0_F_Speed sensor in parsed output")
}
if !has("Backplane0_Temp") {
t.Fatalf("expected Backplane0_Temp sensor in parsed output")
}
if !has("PSU_Present_Power_Reading") {
t.Fatalf("expected PSU_Present_Power_Reading sensor in parsed output")
}
}
func TestParseComponentLogEvents_FanCriticalStatus(t *testing.T) {
text := `RESTful fan info:
{
"fans": [
{ "id": 7, "fan_name": "FAN3_R_Speed", "present": "OK", "status": "Critical", "status_str": "Critical", "speed_rpm": 0, "speed_percent": 0, "max_speed_rpm": 20000, "fan_model": "6056" }
],
"fans_power": 0
}
RESTful diskbackplane info:
[]
BMC`
events := ParseComponentLogEvents([]byte(text))
if len(events) != 1 {
t.Fatalf("expected 1 fan event, got %d", len(events))
}
if events[0].EventType != "Fan Status" {
t.Fatalf("expected Fan Status event type, got %q", events[0].EventType)
}
if events[0].Severity != models.SeverityCritical {
t.Fatalf("expected critical severity, got %q", events[0].Severity)
}
}
func TestParseHDDInfo_MergesIntoExistingStorage(t *testing.T) {
text := `RESTful HDD info:
[
{
"id": 1,
"present": 1,
"enable": 1,
"SN": "SER123",
"model": "Sample SSD",
"capacity": 1024,
"manufacture": "ACME",
"firmware": "1.0.0",
"locationstring": "OB01",
"capablespeed": 6
}
]
RESTful PSU`
hw := &models.HardwareConfig{
Storage: []models.Storage{
{
Slot: "OB01",
Type: "SSD",
},
},
}
parseHDDInfo(text, hw)
if len(hw.Storage) != 1 {
t.Fatalf("expected 1 storage item, got %d", len(hw.Storage))
}
if hw.Storage[0].SerialNumber != "SER123" {
t.Fatalf("expected serial from HDD section, got %q", hw.Storage[0].SerialNumber)
}
if hw.Storage[0].Model != "Sample SSD" {
t.Fatalf("expected model from HDD section, got %q", hw.Storage[0].Model)
}
if hw.Storage[0].Firmware != "1.0.0" {
t.Fatalf("expected firmware from HDD section, got %q", hw.Storage[0].Firmware)
}
}

View File

@@ -15,7 +15,7 @@ import (
// parserVersion - version of this parser module // parserVersion - version of this parser module
// IMPORTANT: Increment this version when making changes to parser logic! // IMPORTANT: Increment this version when making changes to parser logic!
const parserVersion = "1.2.1" const parserVersion = "1.3.0"
func init() { func init() {
parser.Register(&Parser{}) parser.Register(&Parser{})
@@ -123,6 +123,11 @@ func (p *Parser) Parse(files []parser.ExtractedFile) (*models.AnalysisResult, er
// Extract events from component.log (memory errors, etc.) // Extract events from component.log (memory errors, etc.)
componentEvents := ParseComponentLogEvents(f.Content) componentEvents := ParseComponentLogEvents(f.Content)
result.Events = append(result.Events, componentEvents...) result.Events = append(result.Events, componentEvents...)
// Extract additional telemetry sensors from component.log sections
// (fan RPM, backplane temperature, PSU summary power, etc.).
componentSensors := ParseComponentLogSensors(f.Content)
result.Sensors = mergeSensorReadings(result.Sensors, componentSensors)
} }
// Enrich runtime component data from Redis snapshot (serials, FW, telemetry), // Enrich runtime component data from Redis snapshot (serials, FW, telemetry),
@@ -262,3 +267,38 @@ func extractSlotNumberFromGPU(slot string) int {
} }
return 0 return 0
} }
func mergeSensorReadings(base, extra []models.SensorReading) []models.SensorReading {
if len(extra) == 0 {
return base
}
out := append([]models.SensorReading{}, base...)
seen := make(map[string]struct{}, len(out))
for _, s := range out {
if key := sensorMergeKey(s); key != "" {
seen[key] = struct{}{}
}
}
for _, s := range extra {
key := sensorMergeKey(s)
if key != "" {
if _, ok := seen[key]; ok {
continue
}
seen[key] = struct{}{}
}
out = append(out, s)
}
return out
}
func sensorMergeKey(s models.SensorReading) string {
name := strings.ToLower(strings.TrimSpace(s.Name))
if name == "" {
return ""
}
return name
}

View File

@@ -242,6 +242,28 @@ main {
font-size: 0.9rem; font-size: 0.9rem;
} }
.job-progress {
height: 22px;
border-radius: 999px;
border: 1px solid #cbd5e1;
background: #e2e8f0;
overflow: hidden;
margin-bottom: 0.8rem;
}
.job-progress-bar {
height: 100%;
min-width: 2.5rem;
background: linear-gradient(90deg, #2563eb, #0ea5e9);
color: #fff;
font-size: 0.78rem;
font-weight: 700;
display: flex;
align-items: center;
justify-content: center;
transition: width 0.25s ease;
}
.meta-label { .meta-label {
color: #64748b; color: #64748b;
font-weight: 600; font-weight: 600;

View File

@@ -334,9 +334,11 @@ function renderCollectionJob() {
const jobIdValue = document.getElementById('job-id-value'); const jobIdValue = document.getElementById('job-id-value');
const statusValue = document.getElementById('job-status-value'); const statusValue = document.getElementById('job-status-value');
const progressValue = document.getElementById('job-progress-value'); const progressValue = document.getElementById('job-progress-value');
const etaValue = document.getElementById('job-eta-value');
const progressBar = document.getElementById('job-progress-bar');
const logsList = document.getElementById('job-logs-list'); const logsList = document.getElementById('job-logs-list');
const cancelButton = document.getElementById('cancel-job-btn'); const cancelButton = document.getElementById('cancel-job-btn');
if (!jobStatusBlock || !jobIdValue || !statusValue || !progressValue || !logsList || !cancelButton) { if (!jobStatusBlock || !jobIdValue || !statusValue || !progressValue || !etaValue || !progressBar || !logsList || !cancelButton) {
return; return;
} }
@@ -356,12 +358,16 @@ function renderCollectionJob() {
failed: 'Сбор завершился ошибкой', failed: 'Сбор завершился ошибкой',
canceled: 'Сбор отменен' canceled: 'Сбор отменен'
}[collectionJob.status]; }[collectionJob.status];
const progressLabel = isTerminal const activity = isTerminal ? terminalMessage : latestCollectionActivityMessage();
? terminalMessage const eta = isTerminal ? '-' : latestCollectionETA();
: latestCollectionActivityMessage(); const progressPercent = Math.max(0, Math.min(100, Number(collectionJob.progress) || 0));
progressValue.textContent = `${collectionJob.progress}% · ${progressLabel}`;
logsList.innerHTML = collectionJob.logs.map((log) => ( progressValue.textContent = activity;
etaValue.textContent = eta;
progressBar.style.width = `${progressPercent}%`;
progressBar.textContent = `${progressPercent}%`;
logsList.innerHTML = [...collectionJob.logs].reverse().map((log) => (
`<li><span class="log-time">${escapeHtml(log.time)}</span><span class="log-message">${escapeHtml(log.message)}</span></li>` `<li><span class="log-time">${escapeHtml(log.time)}</span><span class="log-message">${escapeHtml(log.message)}</span></li>`
)).join(''); )).join('');
@@ -379,7 +385,27 @@ function latestCollectionActivityMessage() {
} }
// Job logs already contain server timestamp prefix. Show concise step text in progress label. // Job logs already contain server timestamp prefix. Show concise step text in progress label.
const cleaned = last.replace(/^\d{4}-\d{2}-\d{2}T[^\s]+\s+/, '').trim(); const cleaned = last.replace(/^\d{4}-\d{2}-\d{2}T[^\s]+\s+/, '').trim();
return cleaned || 'Сбор данных...'; if (!cleaned) {
return 'Сбор данных...';
}
return cleaned.replace(/\s*[,(]?\s*ETA[^,;)]*/i, '').trim() || 'Сбор данных...';
}
function latestCollectionETA() {
if (!collectionJob || !Array.isArray(collectionJob.logs) || collectionJob.logs.length === 0) {
return '-';
}
const last = String(collectionJob.logs[collectionJob.logs.length - 1].message || '').trim();
const cleaned = last.replace(/^\d{4}-\d{2}-\d{2}T[^\s]+\s+/, '').trim();
if (!cleaned) {
return '-';
}
const match = cleaned.match(/ETA[^,;)]*/i);
if (!match) {
return '-';
}
const eta = match[0].replace(/^ETA\s*[:=~≈-]?\s*/i, '').trim();
return eta || '-';
} }
function isCollectionJobTerminal(status) { function isCollectionJobTerminal(status) {

View File

@@ -78,7 +78,11 @@
<span class="meta-label">Статус:</span> <span class="meta-label">Статус:</span>
<span id="job-status-value" class="job-status-badge">Queued</span> <span id="job-status-value" class="job-status-badge">Queued</span>
</div> </div>
<div><span class="meta-label">Прогресс:</span> <span id="job-progress-value">0% · Шаг 0 из 4</span></div> <div><span class="meta-label">Этап:</span> <span id="job-progress-value">Сбор данных...</span></div>
<div><span class="meta-label">ETA:</span> <span id="job-eta-value">-</span></div>
</div>
<div class="job-progress" aria-label="Прогресс задачи">
<div id="job-progress-bar" class="job-progress-bar" style="width: 0%">0%</div>
</div> </div>
<div class="job-status-logs"> <div class="job-status-logs">
<p class="meta-label">Журнал шагов:</p> <p class="meta-label">Журнал шагов:</p>