sync file-type support across upload/convert and fix collected_at timezone handling
This commit is contained in:
17
internal/server/file_support_test.go
Normal file
17
internal/server/file_support_test.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package server
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestIsSupportedConvertFileName_AcceptsNvidiaBugReportGzip(t *testing.T) {
|
||||
if !isSupportedConvertFileName("nvidia-bug-report-1651124000923.log.gz") {
|
||||
t.Fatalf("expected .log.gz bug-report to be supported")
|
||||
}
|
||||
}
|
||||
|
||||
func TestAnalyzeUploadedFile_RejectsUnsupportedExtension(t *testing.T) {
|
||||
s := &Server{}
|
||||
_, _, _, err := s.analyzeUploadedFile("unsupported.bin", "application/octet-stream", []byte("abc"))
|
||||
if err == nil {
|
||||
t.Fatalf("expected unsupported archive error")
|
||||
}
|
||||
}
|
||||
46
internal/server/file_types_test.go
Normal file
46
internal/server/file_types_test.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestHandleGetFileTypes(t *testing.T) {
|
||||
s := &Server{}
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/file-types", nil)
|
||||
rec := httptest.NewRecorder()
|
||||
|
||||
s.handleGetFileTypes(rec, req)
|
||||
if rec.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200, got %d", rec.Code)
|
||||
}
|
||||
|
||||
var payload struct {
|
||||
ArchiveExtensions []string `json:"archive_extensions"`
|
||||
UploadExtensions []string `json:"upload_extensions"`
|
||||
ConvertExtensions []string `json:"convert_extensions"`
|
||||
}
|
||||
if err := json.NewDecoder(rec.Body).Decode(&payload); err != nil {
|
||||
t.Fatalf("decode payload: %v", err)
|
||||
}
|
||||
if len(payload.ArchiveExtensions) == 0 || len(payload.UploadExtensions) == 0 || len(payload.ConvertExtensions) == 0 {
|
||||
t.Fatalf("expected non-empty extensions in payload")
|
||||
}
|
||||
if !containsString(payload.ArchiveExtensions, ".gz") {
|
||||
t.Fatalf("expected .gz in archive extensions")
|
||||
}
|
||||
if !containsString(payload.UploadExtensions, ".json") || !containsString(payload.ConvertExtensions, ".json") {
|
||||
t.Fatalf("expected .json in upload/convert extensions")
|
||||
}
|
||||
}
|
||||
|
||||
func containsString(items []string, target string) bool {
|
||||
for _, item := range items {
|
||||
if item == target {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
@@ -138,6 +138,9 @@ func (s *Server) analyzeUploadedFile(filename, mimeType string, payload []byte)
|
||||
}
|
||||
return snapshotResult, vendor, newRawExportFromUploadedFile(filename, mimeType, payload, snapshotResult), nil
|
||||
}
|
||||
if !parser.IsSupportedArchiveFilename(filename) {
|
||||
return nil, "", nil, fmt.Errorf("unsupported archive format: %s", strings.ToLower(filepath.Ext(filename)))
|
||||
}
|
||||
|
||||
p := parser.NewBMCParser()
|
||||
if err := p.ParseFromReader(bytes.NewReader(payload), filename); err != nil {
|
||||
@@ -208,9 +211,10 @@ func (s *Server) reanalyzeRawExportPackage(pkg *RawExportPackage) (*models.Analy
|
||||
if strings.TrimSpace(result.TargetHost) == "" {
|
||||
result.TargetHost = strings.TrimSpace(pkg.Source.TargetHost)
|
||||
}
|
||||
if result.CollectedAt.IsZero() {
|
||||
result.CollectedAt = time.Now().UTC()
|
||||
if strings.TrimSpace(result.SourceTimezone) == "" {
|
||||
result.SourceTimezone = strings.TrimSpace(pkg.Source.SourceTimezone)
|
||||
}
|
||||
result.CollectedAt = inferRawExportCollectedAt(result, pkg)
|
||||
if strings.TrimSpace(result.Filename) == "" {
|
||||
target := result.TargetHost
|
||||
if target == "" {
|
||||
@@ -253,6 +257,39 @@ func (s *Server) handleGetParsers(w http.ResponseWriter, r *http.Request) {
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Server) handleGetFileTypes(w http.ResponseWriter, r *http.Request) {
|
||||
archiveExt := parser.SupportedArchiveExtensions()
|
||||
uploadExt := append([]string{}, archiveExt...)
|
||||
uploadExt = append(uploadExt, ".json")
|
||||
|
||||
jsonResponse(w, map[string]any{
|
||||
"archive_extensions": archiveExt,
|
||||
"upload_extensions": uniqueSortedExtensions(uploadExt),
|
||||
"convert_extensions": uniqueSortedExtensions(uploadExt),
|
||||
})
|
||||
}
|
||||
|
||||
func uniqueSortedExtensions(exts []string) []string {
|
||||
if len(exts) == 0 {
|
||||
return nil
|
||||
}
|
||||
seen := make(map[string]struct{}, len(exts))
|
||||
out := make([]string, 0, len(exts))
|
||||
for _, e := range exts {
|
||||
e = strings.ToLower(strings.TrimSpace(e))
|
||||
if e == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[e]; ok {
|
||||
continue
|
||||
}
|
||||
seen[e] = struct{}{}
|
||||
out = append(out, e)
|
||||
}
|
||||
sort.Strings(out)
|
||||
return out
|
||||
}
|
||||
|
||||
func (s *Server) handleGetEvents(w http.ResponseWriter, r *http.Request) {
|
||||
result := s.GetResult()
|
||||
if result == nil {
|
||||
@@ -324,10 +361,11 @@ func (s *Server) handleGetConfig(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
response := map[string]interface{}{
|
||||
"source_type": result.SourceType,
|
||||
"protocol": result.Protocol,
|
||||
"target_host": result.TargetHost,
|
||||
"collected_at": result.CollectedAt,
|
||||
"source_type": result.SourceType,
|
||||
"protocol": result.Protocol,
|
||||
"target_host": result.TargetHost,
|
||||
"source_timezone": result.SourceTimezone,
|
||||
"collected_at": result.CollectedAt,
|
||||
}
|
||||
if result.RawPayloads != nil {
|
||||
if fetchErrors, ok := result.RawPayloads["redfish_fetch_errors"]; ok {
|
||||
@@ -1012,13 +1050,14 @@ func (s *Server) handleGetStatus(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
jsonResponse(w, map[string]interface{}{
|
||||
"loaded": true,
|
||||
"filename": result.Filename,
|
||||
"vendor": s.GetDetectedVendor(),
|
||||
"source_type": result.SourceType,
|
||||
"protocol": result.Protocol,
|
||||
"target_host": result.TargetHost,
|
||||
"collected_at": result.CollectedAt,
|
||||
"loaded": true,
|
||||
"filename": result.Filename,
|
||||
"vendor": s.GetDetectedVendor(),
|
||||
"source_type": result.SourceType,
|
||||
"protocol": result.Protocol,
|
||||
"target_host": result.TargetHost,
|
||||
"source_timezone": result.SourceTimezone,
|
||||
"collected_at": result.CollectedAt,
|
||||
"stats": map[string]int{
|
||||
"events": len(result.Events),
|
||||
"sensors": len(result.Sensors),
|
||||
@@ -1362,17 +1401,14 @@ func (s *Server) handleConvertDownload(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
func isSupportedConvertFileName(filename string) bool {
|
||||
name := strings.ToLower(strings.TrimSpace(filename))
|
||||
name := strings.TrimSpace(filename)
|
||||
if name == "" {
|
||||
return false
|
||||
}
|
||||
return strings.HasSuffix(name, ".zip") ||
|
||||
strings.HasSuffix(name, ".tar") ||
|
||||
strings.HasSuffix(name, ".tar.gz") ||
|
||||
strings.HasSuffix(name, ".tgz") ||
|
||||
strings.HasSuffix(name, ".json") ||
|
||||
strings.HasSuffix(name, ".txt") ||
|
||||
strings.HasSuffix(name, ".log")
|
||||
if strings.HasSuffix(strings.ToLower(name), ".json") {
|
||||
return true
|
||||
}
|
||||
return parser.IsSupportedArchiveFilename(name)
|
||||
}
|
||||
|
||||
func sanitizeZipPath(filename string) string {
|
||||
@@ -1599,17 +1635,101 @@ func inferArchiveCollectedAt(result *models.AnalysisResult) time.Time {
|
||||
return time.Now().UTC()
|
||||
}
|
||||
|
||||
var latest time.Time
|
||||
var latestReliable time.Time
|
||||
var latestAny time.Time
|
||||
for _, event := range result.Events {
|
||||
if event.Timestamp.IsZero() {
|
||||
continue
|
||||
}
|
||||
if latest.IsZero() || event.Timestamp.After(latest) {
|
||||
latest = event.Timestamp
|
||||
// Drop obviously bad epochs from broken RTC logs.
|
||||
if event.Timestamp.Year() < 2000 {
|
||||
continue
|
||||
}
|
||||
if latestAny.IsZero() || event.Timestamp.After(latestAny) {
|
||||
latestAny = event.Timestamp
|
||||
}
|
||||
if !isReliableCollectedAtEvent(event) {
|
||||
continue
|
||||
}
|
||||
if latestReliable.IsZero() || event.Timestamp.After(latestReliable) {
|
||||
latestReliable = event.Timestamp
|
||||
}
|
||||
}
|
||||
if !latest.IsZero() {
|
||||
return latest.UTC()
|
||||
if !latestReliable.IsZero() {
|
||||
return latestReliable.UTC()
|
||||
}
|
||||
if !latestAny.IsZero() {
|
||||
return latestAny.UTC()
|
||||
}
|
||||
if fromFilename, ok := inferCollectedAtFromFilename(result.Filename); ok {
|
||||
return fromFilename.UTC()
|
||||
}
|
||||
return time.Now().UTC()
|
||||
}
|
||||
|
||||
func isReliableCollectedAtEvent(event models.Event) bool {
|
||||
// component.log-derived synthetic states are created "at parse time"
|
||||
// and must not override real log timestamps.
|
||||
src := strings.ToLower(strings.TrimSpace(event.Source))
|
||||
etype := strings.ToLower(strings.TrimSpace(event.EventType))
|
||||
stype := strings.ToLower(strings.TrimSpace(event.SensorType))
|
||||
if etype == "fan status" && (src == "fan" || stype == "fan") {
|
||||
return false
|
||||
}
|
||||
if etype == "memory status" && (src == "memory" || stype == "memory") {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
var (
|
||||
filenameDateTimeRegex = regexp.MustCompile(`(?i)(\d{8})[-_](\d{4})(\d{2})?`)
|
||||
filenameDateRegex = regexp.MustCompile(`(?i)(\d{4})-(\d{2})-(\d{2})`)
|
||||
)
|
||||
|
||||
func inferCollectedAtFromFilename(name string) (time.Time, bool) {
|
||||
base := strings.TrimSpace(filepath.Base(name))
|
||||
if base == "" {
|
||||
return time.Time{}, false
|
||||
}
|
||||
|
||||
if m := filenameDateTimeRegex.FindStringSubmatch(base); len(m) == 4 {
|
||||
datePart := m[1]
|
||||
timePart := m[2]
|
||||
if strings.TrimSpace(m[3]) != "" {
|
||||
timePart += m[3]
|
||||
} else {
|
||||
timePart += "00"
|
||||
}
|
||||
if ts, err := parser.ParseInDefaultArchiveLocation("20060102 150405", datePart+" "+timePart); err == nil {
|
||||
return ts, true
|
||||
}
|
||||
}
|
||||
|
||||
if m := filenameDateRegex.FindStringSubmatch(base); len(m) == 4 {
|
||||
datePart := m[1] + "-" + m[2] + "-" + m[3]
|
||||
if ts, err := parser.ParseInDefaultArchiveLocation("2006-01-02 15:04:05", datePart+" 00:00:00"); err == nil {
|
||||
return ts, true
|
||||
}
|
||||
}
|
||||
|
||||
return time.Time{}, false
|
||||
}
|
||||
|
||||
func inferRawExportCollectedAt(result *models.AnalysisResult, pkg *RawExportPackage) time.Time {
|
||||
if result != nil && !result.CollectedAt.IsZero() {
|
||||
return result.CollectedAt.UTC()
|
||||
}
|
||||
if pkg != nil {
|
||||
if !pkg.CollectedAtHint.IsZero() {
|
||||
return pkg.CollectedAtHint.UTC()
|
||||
}
|
||||
if _, finishedAt, ok := collectLogTimeBounds(pkg.Source.CollectLogs); ok {
|
||||
return finishedAt.UTC()
|
||||
}
|
||||
if !pkg.ExportedAt.IsZero() {
|
||||
return pkg.ExportedAt.UTC()
|
||||
}
|
||||
}
|
||||
return time.Now().UTC()
|
||||
}
|
||||
@@ -1621,7 +1741,14 @@ func applyCollectSourceMetadata(result *models.AnalysisResult, req CollectReques
|
||||
result.SourceType = models.SourceTypeAPI
|
||||
result.Protocol = req.Protocol
|
||||
result.TargetHost = req.Host
|
||||
result.CollectedAt = time.Now().UTC()
|
||||
if strings.TrimSpace(result.SourceTimezone) == "" && result.RawPayloads != nil {
|
||||
if tz, ok := result.RawPayloads["source_timezone"].(string); ok {
|
||||
result.SourceTimezone = strings.TrimSpace(tz)
|
||||
}
|
||||
}
|
||||
if result.CollectedAt.IsZero() {
|
||||
result.CollectedAt = time.Now().UTC()
|
||||
}
|
||||
if strings.TrimSpace(result.Filename) == "" {
|
||||
result.Filename = fmt.Sprintf("%s://%s", req.Protocol, req.Host)
|
||||
}
|
||||
|
||||
@@ -26,6 +26,9 @@ type RawExportPackage struct {
|
||||
ExportedAt time.Time `json:"exported_at"`
|
||||
Source RawExportSource `json:"source"`
|
||||
Analysis *models.AnalysisResult `json:"analysis_result,omitempty"`
|
||||
// CollectedAtHint is extracted from parser_fields.json when importing
|
||||
// a raw-export bundle and represents original collection time.
|
||||
CollectedAtHint time.Time `json:"-"`
|
||||
}
|
||||
|
||||
type RawExportSource struct {
|
||||
@@ -36,6 +39,7 @@ type RawExportSource struct {
|
||||
Data string `json:"data,omitempty"`
|
||||
Protocol string `json:"protocol,omitempty"`
|
||||
TargetHost string `json:"target_host,omitempty"`
|
||||
SourceTimezone string `json:"source_timezone,omitempty"`
|
||||
RawPayloads map[string]any `json:"raw_payloads,omitempty"`
|
||||
CollectLogs []string `json:"collect_logs,omitempty"`
|
||||
CollectMeta *CollectRequestMeta `json:"collect_meta,omitempty"`
|
||||
@@ -53,6 +57,7 @@ func newRawExportFromUploadedFile(filename, mimeType string, payload []byte, res
|
||||
Data: base64.StdEncoding.EncodeToString(payload),
|
||||
Protocol: resultProtocol(result),
|
||||
TargetHost: resultTargetHost(result),
|
||||
SourceTimezone: resultSourceTimezone(result),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -79,6 +84,7 @@ func newRawExportFromLiveCollect(result *models.AnalysisResult, req CollectReque
|
||||
Kind: "live_redfish",
|
||||
Protocol: req.Protocol,
|
||||
TargetHost: req.Host,
|
||||
SourceTimezone: resultSourceTimezone(result),
|
||||
RawPayloads: rawPayloads,
|
||||
CollectLogs: append([]string(nil), logs...),
|
||||
CollectMeta: &meta,
|
||||
@@ -158,23 +164,62 @@ func parseRawExportBundle(payload []byte) (*RawExportPackage, bool, error) {
|
||||
if err != nil {
|
||||
return nil, false, nil
|
||||
}
|
||||
var pkgBody []byte
|
||||
var parserFieldsBody []byte
|
||||
|
||||
for _, f := range zr.File {
|
||||
if f.Name != rawExportBundlePackageFile {
|
||||
if f.Name != rawExportBundlePackageFile && f.Name != rawExportBundleFieldsFile {
|
||||
continue
|
||||
}
|
||||
rc, err := f.Open()
|
||||
if err != nil {
|
||||
return nil, true, err
|
||||
}
|
||||
defer rc.Close()
|
||||
body, err := io.ReadAll(rc)
|
||||
rc.Close()
|
||||
if err != nil {
|
||||
return nil, true, err
|
||||
}
|
||||
pkg, ok, err := parseRawExportPackage(body)
|
||||
switch f.Name {
|
||||
case rawExportBundlePackageFile:
|
||||
pkgBody = body
|
||||
case rawExportBundleFieldsFile:
|
||||
parserFieldsBody = body
|
||||
}
|
||||
}
|
||||
|
||||
if len(pkgBody) == 0 {
|
||||
return nil, false, nil
|
||||
}
|
||||
pkg, ok, err := parseRawExportPackage(pkgBody)
|
||||
if err != nil || !ok {
|
||||
return pkg, ok, err
|
||||
}
|
||||
return nil, false, nil
|
||||
if ts, ok := parseCollectedAtHint(parserFieldsBody); ok {
|
||||
pkg.CollectedAtHint = ts.UTC()
|
||||
}
|
||||
return pkg, true, nil
|
||||
}
|
||||
|
||||
func parseCollectedAtHint(parserFieldsBody []byte) (time.Time, bool) {
|
||||
if len(parserFieldsBody) == 0 {
|
||||
return time.Time{}, false
|
||||
}
|
||||
var payload struct {
|
||||
CollectedAt string `json:"collected_at"`
|
||||
}
|
||||
if err := json.Unmarshal(parserFieldsBody, &payload); err != nil {
|
||||
return time.Time{}, false
|
||||
}
|
||||
collectedAt := strings.TrimSpace(payload.CollectedAt)
|
||||
if collectedAt == "" {
|
||||
return time.Time{}, false
|
||||
}
|
||||
ts, err := time.Parse(time.RFC3339Nano, collectedAt)
|
||||
if err != nil {
|
||||
return time.Time{}, false
|
||||
}
|
||||
return ts, true
|
||||
}
|
||||
|
||||
func buildHumanReadableCollectionLog(pkg *RawExportPackage, result *models.AnalysisResult, clientVersion string) string {
|
||||
@@ -333,6 +378,7 @@ func buildParserFieldSummary(result *models.AnalysisResult) map[string]any {
|
||||
out["source_type"] = result.SourceType
|
||||
out["protocol"] = result.Protocol
|
||||
out["target_host"] = result.TargetHost
|
||||
out["source_timezone"] = result.SourceTimezone
|
||||
out["collected_at"] = result.CollectedAt
|
||||
|
||||
if result.Hardware == nil {
|
||||
@@ -382,3 +428,10 @@ func resultTargetHost(result *models.AnalysisResult) string {
|
||||
}
|
||||
return result.TargetHost
|
||||
}
|
||||
|
||||
func resultSourceTimezone(result *models.AnalysisResult) string {
|
||||
if result == nil {
|
||||
return ""
|
||||
}
|
||||
return strings.TrimSpace(result.SourceTimezone)
|
||||
}
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestCollectLogTimeBounds(t *testing.T) {
|
||||
@@ -44,3 +48,54 @@ func TestBuildHumanReadableCollectionLog_IncludesDurationHeader(t *testing.T) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseRawExportBundle_ExtractsCollectedAtHintFromParserFields(t *testing.T) {
|
||||
pkg := &RawExportPackage{
|
||||
Format: rawExportFormatV1,
|
||||
ExportedAt: time.Date(2026, 2, 25, 9, 59, 41, 479023400, time.UTC),
|
||||
Source: RawExportSource{
|
||||
Kind: "live_redfish",
|
||||
},
|
||||
}
|
||||
pkgJSON, err := json.Marshal(pkg)
|
||||
if err != nil {
|
||||
t.Fatalf("marshal pkg: %v", err)
|
||||
}
|
||||
|
||||
parserFields := []byte(`{"collected_at":"2026-02-25T09:58:05.9129753Z"}`)
|
||||
|
||||
var buf bytes.Buffer
|
||||
zw := zip.NewWriter(&buf)
|
||||
|
||||
jf, err := zw.Create(rawExportBundlePackageFile)
|
||||
if err != nil {
|
||||
t.Fatalf("create package file: %v", err)
|
||||
}
|
||||
if _, err := jf.Write(pkgJSON); err != nil {
|
||||
t.Fatalf("write package file: %v", err)
|
||||
}
|
||||
|
||||
ff, err := zw.Create(rawExportBundleFieldsFile)
|
||||
if err != nil {
|
||||
t.Fatalf("create parser fields file: %v", err)
|
||||
}
|
||||
if _, err := ff.Write(parserFields); err != nil {
|
||||
t.Fatalf("write parser fields file: %v", err)
|
||||
}
|
||||
|
||||
if err := zw.Close(); err != nil {
|
||||
t.Fatalf("close zip writer: %v", err)
|
||||
}
|
||||
|
||||
gotPkg, ok, err := parseRawExportBundle(buf.Bytes())
|
||||
if err != nil {
|
||||
t.Fatalf("parse bundle: %v", err)
|
||||
}
|
||||
if !ok || gotPkg == nil {
|
||||
t.Fatalf("expected valid raw export bundle")
|
||||
}
|
||||
want := time.Date(2026, 2, 25, 9, 58, 5, 912975300, time.UTC)
|
||||
if !gotPkg.CollectedAtHint.Equal(want) {
|
||||
t.Fatalf("expected collected_at hint %s, got %s", want, gotPkg.CollectedAtHint)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -72,6 +72,7 @@ func (s *Server) setupRoutes() {
|
||||
s.mux.HandleFunc("POST /api/upload", s.handleUpload)
|
||||
s.mux.HandleFunc("GET /api/status", s.handleGetStatus)
|
||||
s.mux.HandleFunc("GET /api/parsers", s.handleGetParsers)
|
||||
s.mux.HandleFunc("GET /api/file-types", s.handleGetFileTypes)
|
||||
s.mux.HandleFunc("GET /api/events", s.handleGetEvents)
|
||||
s.mux.HandleFunc("GET /api/sensors", s.handleGetSensors)
|
||||
s.mux.HandleFunc("GET /api/config", s.handleGetConfig)
|
||||
|
||||
@@ -60,6 +60,108 @@ func TestApplyArchiveSourceMetadata_InferCollectedAtFromEvents(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestApplyArchiveSourceMetadata_InferCollectedAtFromFilename(t *testing.T) {
|
||||
result := &models.AnalysisResult{
|
||||
Filename: "dump_23E100203_20260228-0428.tar.gz",
|
||||
}
|
||||
|
||||
applyArchiveSourceMetadata(result)
|
||||
|
||||
// 2026-02-28 04:28 in Europe/Moscow => 2026-02-28 01:28 UTC
|
||||
want := time.Date(2026, 2, 28, 1, 28, 0, 0, time.UTC)
|
||||
if !result.CollectedAt.Equal(want) {
|
||||
t.Fatalf("expected collected_at from filename: got %s want %s", result.CollectedAt, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestApplyArchiveSourceMetadata_IgnoresSyntheticComponentNowEvents(t *testing.T) {
|
||||
realTs := time.Date(2026, 2, 28, 4, 18, 18, 217225000, time.FixedZone("UTC+8", 8*3600))
|
||||
syntheticNow := time.Date(2026, 3, 5, 10, 0, 0, 0, time.UTC)
|
||||
result := &models.AnalysisResult{
|
||||
Events: []models.Event{
|
||||
{
|
||||
Timestamp: realTs,
|
||||
Source: "spx_restservice_ext",
|
||||
SensorType:"syslog",
|
||||
EventType: "System Log",
|
||||
},
|
||||
{
|
||||
Timestamp: syntheticNow,
|
||||
Source: "Fan",
|
||||
SensorType: "fan",
|
||||
EventType: "Fan Status",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
applyArchiveSourceMetadata(result)
|
||||
|
||||
if !result.CollectedAt.Equal(realTs.UTC()) {
|
||||
t.Fatalf("expected collected_at from real log timestamp: got %s want %s", result.CollectedAt, realTs.UTC())
|
||||
}
|
||||
}
|
||||
|
||||
func TestInferRawExportCollectedAt_PrefersResultCollectedAt(t *testing.T) {
|
||||
expected := time.Date(2026, 2, 25, 8, 0, 0, 0, time.UTC)
|
||||
result := &models.AnalysisResult{CollectedAt: expected}
|
||||
pkg := &RawExportPackage{
|
||||
ExportedAt: time.Date(2026, 2, 25, 9, 59, 41, 0, time.UTC),
|
||||
Source: RawExportSource{
|
||||
CollectLogs: []string{
|
||||
"2026-02-25T09:00:00Z step1",
|
||||
"2026-02-25T09:10:00Z step2",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
got := inferRawExportCollectedAt(result, pkg)
|
||||
if !got.Equal(expected) {
|
||||
t.Fatalf("expected collected_at from result: got %s want %s", got, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestInferRawExportCollectedAt_UsesCollectLogsThenExportedAt(t *testing.T) {
|
||||
hintTs := time.Date(2026, 2, 25, 9, 58, 5, 912975300, time.UTC)
|
||||
pkgWithLogs := &RawExportPackage{
|
||||
ExportedAt: time.Date(2026, 2, 25, 9, 59, 41, 0, time.UTC),
|
||||
CollectedAtHint: hintTs,
|
||||
Source: RawExportSource{
|
||||
CollectLogs: []string{
|
||||
"2026-02-25T09:10:13.7442032Z started",
|
||||
"2026-02-25T09:31:00.5077486Z finished",
|
||||
},
|
||||
},
|
||||
}
|
||||
got := inferRawExportCollectedAt(&models.AnalysisResult{}, pkgWithLogs)
|
||||
if !got.Equal(hintTs) {
|
||||
t.Fatalf("expected collected_at from parser_fields hint: got %s want %s", got, hintTs)
|
||||
}
|
||||
|
||||
pkgFromLogs := &RawExportPackage{
|
||||
ExportedAt: time.Date(2026, 2, 25, 9, 59, 41, 0, time.UTC),
|
||||
Source: RawExportSource{
|
||||
CollectLogs: []string{
|
||||
"2026-02-25T09:10:13.7442032Z started",
|
||||
"2026-02-25T09:31:00.5077486Z finished",
|
||||
},
|
||||
},
|
||||
}
|
||||
got = inferRawExportCollectedAt(&models.AnalysisResult{}, pkgFromLogs)
|
||||
wantFromLogs := time.Date(2026, 2, 25, 9, 31, 0, 507748600, time.UTC)
|
||||
if !got.Equal(wantFromLogs) {
|
||||
t.Fatalf("expected collected_at from collect logs: got %s want %s", got, wantFromLogs)
|
||||
}
|
||||
|
||||
pkgWithoutLogs := &RawExportPackage{
|
||||
ExportedAt: time.Date(2026, 2, 25, 9, 59, 41, 479023400, time.UTC),
|
||||
}
|
||||
got = inferRawExportCollectedAt(&models.AnalysisResult{}, pkgWithoutLogs)
|
||||
wantFromExportedAt := time.Date(2026, 2, 25, 9, 59, 41, 479023400, time.UTC)
|
||||
if !got.Equal(wantFromExportedAt) {
|
||||
t.Fatalf("expected collected_at from exported_at: got %s want %s", got, wantFromExportedAt)
|
||||
}
|
||||
}
|
||||
|
||||
func TestApplyCollectSourceMetadata(t *testing.T) {
|
||||
req := CollectRequest{
|
||||
Host: "bmc-api.local",
|
||||
@@ -106,6 +208,32 @@ func TestApplyCollectSourceMetadata(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestApplyCollectSourceMetadata_PreservesCollectedAtAndTimezone(t *testing.T) {
|
||||
req := CollectRequest{
|
||||
Host: "bmc-api.local",
|
||||
Protocol: "redfish",
|
||||
Port: 443,
|
||||
Username: "admin",
|
||||
AuthType: "password",
|
||||
Password: "super-secret",
|
||||
TLSMode: "strict",
|
||||
}
|
||||
collectedAt := time.Date(2026, 2, 28, 4, 18, 18, 0, time.FixedZone("UTC+8", 8*3600))
|
||||
result := &models.AnalysisResult{
|
||||
CollectedAt: collectedAt,
|
||||
SourceTimezone: "+08:00",
|
||||
}
|
||||
|
||||
applyCollectSourceMetadata(result, req)
|
||||
|
||||
if !result.CollectedAt.Equal(collectedAt) {
|
||||
t.Fatalf("expected collected_at to be preserved: got %s want %s", result.CollectedAt, collectedAt)
|
||||
}
|
||||
if result.SourceTimezone != "+08:00" {
|
||||
t.Fatalf("expected source_timezone to be preserved, got %q", result.SourceTimezone)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStatusAndConfigExposeSourceMetadata(t *testing.T) {
|
||||
s := &Server{}
|
||||
s.SetDetectedVendor("nvidia")
|
||||
|
||||
Reference in New Issue
Block a user