misc: sds format support, convert limits, dell dedup, supermicro removal, bible updates

Parser / archive:
- Add .sds extension as tar-format alias (archive.go)
- Add tests for multipart upload size limits (multipart_limits_test.go)
- Remove supermicro crashdump parser (ADL-015)

Dell parser:
- Remove GPU duplicates from PCIeDevices (DCIM_VideoView vs DCIM_PCIDeviceView
  both list the same GPU; VideoView record is authoritative)

Server:
- Add LOGPILE_CONVERT_MAX_MB env var for independent convert batch size limit
- Improve "file too large" error message with current limit value

Web:
- Add CONVERT_MAX_FILES_PER_BATCH = 1000 cap
- Minor UI copy and CSS fixes

Bible:
- bible-local/06-parsers.md: add pci.ids enrichment rule (enrich model from
  pciids when name is empty but vendor_id+device_id are present)
- Sync bible submodule and local overview/architecture docs

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-01 22:23:44 +03:00
parent 9c5512d238
commit 21ea129933
22 changed files with 268 additions and 446 deletions

2
bible

Submodule bible updated: 8f28cfeac2...0c829182a1

View File

@@ -19,7 +19,7 @@ through the same API and UI.
## Key capabilities ## Key capabilities
- Single self-contained binary with embedded HTML/JS/CSS (no static file serving required). - Single self-contained binary with embedded HTML/JS/CSS (no static file serving required).
- Vendor archive parsing: Inspur/Kaytus, Supermicro, NVIDIA HGX Field Diagnostics, - Vendor archive parsing: Inspur/Kaytus, Dell TSR, NVIDIA HGX Field Diagnostics,
NVIDIA Bug Report, Unraid, XigmaNAS, Generic text fallback. NVIDIA Bug Report, Unraid, XigmaNAS, Generic text fallback.
- Live Redfish collection with async progress tracking. - Live Redfish collection with async progress tracking.
- Normalized hardware inventory: CPU / RAM / Storage / GPU / PSU / NIC / PCIe / Firmware. - Normalized hardware inventory: CPU / RAM / Storage / GPU / PSU / NIC / PCIe / Firmware.

View File

@@ -29,8 +29,8 @@ internal/
interface.go # VendorParser interface interface.go # VendorParser interface
vendors/ # Vendor-specific parser modules vendors/ # Vendor-specific parser modules
vendors.go # Import-side-effect registrations vendors.go # Import-side-effect registrations
dell/
inspur/ inspur/
supermicro/
nvidia/ nvidia/
nvidia_bug_report/ nvidia_bug_report/
unraid/ unraid/

View File

@@ -87,6 +87,31 @@ Use `nicMACInModelRE` (defined in the Dell parser) or an equivalent regex:
This applies to **all** string fields used as device names or model identifiers. This applies to **all** string fields used as device names or model identifiers.
### PCI device name enrichment via pci.ids
If a PCIe device, GPU, NIC, or any hardware component has a `vendor_id` + `device_id`
but its model/name field is **empty or generic** (e.g. blank, equals the description,
or is just a raw hex ID), the parser **must** attempt to resolve the human-readable
model name from the embedded `pci.ids` database before storing the result.
**Rule:** When `Model` (or equivalent name field) is empty and both `VendorID` and
`DeviceID` are non-zero, call the pciids lookup and use the result as the model name.
```go
// Example pattern — use in any parser that handles PCIe/GPU/NIC devices:
if strings.TrimSpace(device.Model) == "" && device.VendorID != 0 && device.DeviceID != 0 {
if name := pciids.Lookup(device.VendorID, device.DeviceID); name != "" {
device.Model = name
}
}
```
This rule applies to all vendor parsers. The pciids package is available at
`internal/parser/vendors/pciids`. See ADL-005 for the rationale.
**Do not hardcode model name strings.** If a device is unknown today, it will be
resolved automatically once `pci.ids` is updated.
--- ---
## Vendor parsers ## Vendor parsers

View File

@@ -22,6 +22,7 @@ var supportedArchiveExt = map[string]struct{}{
".gz": {}, ".gz": {},
".tgz": {}, ".tgz": {},
".tar": {}, ".tar": {},
".sds": {},
".zip": {}, ".zip": {},
".txt": {}, ".txt": {},
".log": {}, ".log": {},
@@ -46,7 +47,7 @@ func ExtractArchive(archivePath string) ([]ExtractedFile, error) {
switch ext { switch ext {
case ".gz", ".tgz": case ".gz", ".tgz":
return extractTarGz(archivePath) return extractTarGz(archivePath)
case ".tar": case ".tar", ".sds":
return extractTar(archivePath) return extractTar(archivePath)
case ".zip": case ".zip":
return extractZip(archivePath) return extractZip(archivePath)
@@ -67,7 +68,7 @@ func ExtractArchiveFromReader(r io.Reader, filename string) ([]ExtractedFile, er
switch ext { switch ext {
case ".gz", ".tgz": case ".gz", ".tgz":
return extractTarGzFromReader(r, filename) return extractTarGzFromReader(r, filename)
case ".tar": case ".tar", ".sds":
return extractTarFromReader(r) return extractTarFromReader(r)
case ".zip": case ".zip":
return extractZipFromReader(r) return extractZipFromReader(r)

View File

@@ -1,6 +1,7 @@
package parser package parser
import ( import (
"archive/tar"
"bytes" "bytes"
"os" "os"
"path/filepath" "path/filepath"
@@ -78,6 +79,7 @@ func TestIsSupportedArchiveFilename(t *testing.T) {
{name: "dump.tar.gz", want: true}, {name: "dump.tar.gz", want: true},
{name: "nvidia-bug-report-1651124000923.log.gz", want: true}, {name: "nvidia-bug-report-1651124000923.log.gz", want: true},
{name: "snapshot.zip", want: true}, {name: "snapshot.zip", want: true},
{name: "h3c_20250819.sds", want: true},
{name: "report.log", want: true}, {name: "report.log", want: true},
{name: "xigmanas.txt", want: true}, {name: "xigmanas.txt", want: true},
{name: "raw_export.json", want: false}, {name: "raw_export.json", want: false},
@@ -91,3 +93,34 @@ func TestIsSupportedArchiveFilename(t *testing.T) {
} }
} }
} }
func TestExtractArchiveFromReaderSDS(t *testing.T) {
var buf bytes.Buffer
tw := tar.NewWriter(&buf)
payload := []byte("STARTTIME:0\nENDTIME:0\n")
if err := tw.WriteHeader(&tar.Header{
Name: "bmc/pack.info",
Mode: 0o600,
Size: int64(len(payload)),
}); err != nil {
t.Fatalf("write tar header: %v", err)
}
if _, err := tw.Write(payload); err != nil {
t.Fatalf("write tar payload: %v", err)
}
if err := tw.Close(); err != nil {
t.Fatalf("close tar writer: %v", err)
}
files, err := ExtractArchiveFromReader(bytes.NewReader(buf.Bytes()), "sample.sds")
if err != nil {
t.Fatalf("extract sds from reader: %v", err)
}
if len(files) != 1 {
t.Fatalf("expected 1 extracted file, got %d", len(files))
}
if files[0].Path != "bmc/pack.info" {
t.Fatalf("expected bmc/pack.info, got %q", files[0].Path)
}
}

View File

@@ -9,7 +9,7 @@ type VendorParser interface {
// Name returns human-readable parser name // Name returns human-readable parser name
Name() string Name() string
// Vendor returns vendor identifier (e.g., "inspur", "supermicro", "dell") // Vendor returns vendor identifier (e.g., "inspur", "dell", "h3c_g6")
Vendor() string Vendor() string
// Version returns parser version string // Version returns parser version string

View File

@@ -99,8 +99,8 @@ func (p *Parser) Parse(files []parser.ExtractedFile) (*models.AnalysisResult, er
result.Hardware.PowerSupply = dedupePSU(result.Hardware.PowerSupply) result.Hardware.PowerSupply = dedupePSU(result.Hardware.PowerSupply)
result.Hardware.NetworkAdapters = dedupeNetworkAdapters(result.Hardware.NetworkAdapters) result.Hardware.NetworkAdapters = dedupeNetworkAdapters(result.Hardware.NetworkAdapters)
result.Hardware.NetworkCards = nicCardsFromAdapters(result.Hardware.NetworkAdapters) result.Hardware.NetworkCards = nicCardsFromAdapters(result.Hardware.NetworkAdapters)
result.Hardware.PCIeDevices = dedupePCIe(result.Hardware.PCIeDevices)
result.Hardware.GPUs = dedupeGPU(result.Hardware.GPUs) result.Hardware.GPUs = dedupeGPU(result.Hardware.GPUs)
result.Hardware.PCIeDevices = removePCIeOverlappingWithGPUs(dedupePCIe(result.Hardware.PCIeDevices), result.Hardware.GPUs)
result.Hardware.CPUs = dedupeCPU(result.Hardware.CPUs) result.Hardware.CPUs = dedupeCPU(result.Hardware.CPUs)
result.Hardware.Memory = dedupeDIMM(result.Hardware.Memory) result.Hardware.Memory = dedupeDIMM(result.Hardware.Memory)
result.Hardware.Firmware = dedupeFirmware(result.Hardware.Firmware) result.Hardware.Firmware = dedupeFirmware(result.Hardware.Firmware)
@@ -1248,6 +1248,39 @@ func nicCardsFromAdapters(items []models.NetworkAdapter) []models.NIC {
return out return out
} }
// removePCIeOverlappingWithGPUs drops PCIe entries that duplicate a GPU already
// captured from DCIM_VideoView. Dell TSR lists GPUs in both DCIM_VideoView and
// DCIM_PCIDeviceView; the VideoView record is authoritative (has serial, firmware,
// temperature) so the PCIe duplicate must be removed.
func removePCIeOverlappingWithGPUs(pcie []models.PCIeDevice, gpus []models.GPU) []models.PCIeDevice {
if len(gpus) == 0 {
return pcie
}
gpuSlots := make(map[string]struct{}, len(gpus))
gpuBDFs := make(map[string]struct{}, len(gpus))
for _, g := range gpus {
if s := strings.ToLower(strings.TrimSpace(g.Slot)); s != "" {
gpuSlots[s] = struct{}{}
}
if b := strings.ToLower(strings.TrimSpace(g.BDF)); b != "" {
gpuBDFs[b] = struct{}{}
}
}
out := make([]models.PCIeDevice, 0, len(pcie))
for _, p := range pcie {
slot := strings.ToLower(strings.TrimSpace(p.Slot))
bdf := strings.ToLower(strings.TrimSpace(p.BDF))
if _, ok := gpuSlots[slot]; ok && slot != "" {
continue
}
if _, ok := gpuBDFs[bdf]; ok && bdf != "" {
continue
}
out = append(out, p)
}
return out
}
func dedupePCIe(items []models.PCIeDevice) []models.PCIeDevice { func dedupePCIe(items []models.PCIeDevice) []models.PCIeDevice {
out := make([]models.PCIeDevice, 0, len(items)) out := make([]models.PCIeDevice, 0, len(items))
seen := make(map[string]int) seen := make(map[string]int)

View File

@@ -10,7 +10,7 @@ import (
) )
// parserVersion - version of this parser module // parserVersion - version of this parser module
const parserVersion = "1.0.0" const parserVersion = "1.1"
func init() { func init() {
parser.Register(&Parser{}) parser.Register(&Parser{})

View File

@@ -20,8 +20,8 @@ import (
) )
const ( const (
parserVersionG5 = "1.0.0" parserVersionG5 = "2.1"
parserVersionG6 = "1.0.0" parserVersionG6 = "2.1"
) )
func init() { func init() {

View File

@@ -16,7 +16,7 @@ import (
// parserVersion - version of this parser module // parserVersion - version of this parser module
// IMPORTANT: Increment this version when making changes to parser logic! // IMPORTANT: Increment this version when making changes to parser logic!
const parserVersion = "1.4.0" const parserVersion = "1.5"
func init() { func init() {
parser.Register(&Parser{}) parser.Register(&Parser{})

View File

@@ -14,7 +14,7 @@ import (
// parserVersion - version of this parser module // parserVersion - version of this parser module
// IMPORTANT: Increment this version when making changes to parser logic! // IMPORTANT: Increment this version when making changes to parser logic!
const parserVersion = "1.3.0" const parserVersion = "1.4"
func init() { func init() {
parser.Register(&Parser{}) parser.Register(&Parser{})

View File

@@ -13,7 +13,7 @@ import (
) )
// parserVersion - version of this parser module // parserVersion - version of this parser module
const parserVersion = "1.1.0" const parserVersion = "1.2"
var bugReportDateLineRegex = regexp.MustCompile(`(?m)^Date:\s+(.+?)\s*$`) var bugReportDateLineRegex = regexp.MustCompile(`(?m)^Date:\s+(.+?)\s*$`)
var dateWithTZAbbrevRegex = regexp.MustCompile(`^([A-Za-z]{3}\s+[A-Za-z]{3}\s+\d{1,2}\s+\d{2}:\d{2}:\d{2})\s+([A-Za-z]{2,5})\s+(\d{4})$`) var dateWithTZAbbrevRegex = regexp.MustCompile(`^([A-Za-z]{3}\s+[A-Za-z]{3}\s+\d{1,2}\s+\d{2}:\d{2}:\d{2})\s+([A-Za-z]{2,5})\s+(\d{4})$`)

View File

@@ -1,261 +0,0 @@
package supermicro
import (
"encoding/json"
"fmt"
"strconv"
"strings"
"time"
"git.mchus.pro/mchus/logpile/internal/models"
)
// CrashDumpData represents the structure of CDump.txt
type CrashDumpData struct {
CrashData struct {
METADATA Metadata `json:"METADATA"`
PROCESSORS ProcessorsData `json:"PROCESSORS"`
} `json:"crash_data"`
}
// ProcessorsData contains processor crash data
type ProcessorsData struct {
Version string `json:"_version"`
CPU0 Processors `json:"cpu0"`
CPU1 Processors `json:"cpu1"`
}
// Metadata contains crashdump metadata
type Metadata struct {
CPU0 CPUMetadata `json:"cpu0"`
CPU1 CPUMetadata `json:"cpu1"`
BMCFWVer string `json:"bmc_fw_ver"`
BIOSId string `json:"bios_id"`
MEFWVer string `json:"me_fw_ver"`
Timestamp string `json:"timestamp"`
TriggerType string `json:"trigger_type"`
PlatformName string `json:"platform_name"`
CrashdumpVer string `json:"crashdump_ver"`
ResetDetected string `json:"_reset_detected"`
}
// CPUMetadata contains CPU metadata
type CPUMetadata struct {
CPUID string `json:"cpuid"`
CoreMask string `json:"core_mask"`
CHACount string `json:"cha_count"`
CoreCount string `json:"core_count"`
PPIN string `json:"ppin"`
UcodePatchVer string `json:"ucode_patch_ver"`
}
// Processors contains processor crash data
type Processors struct {
MCA MCAData `json:"MCA"`
}
// MCAData contains Machine Check Architecture data
type MCAData struct {
Uncore map[string]interface{} `json:"uncore"`
}
// ParseCrashDump parses CDump.txt file
func ParseCrashDump(content []byte, result *models.AnalysisResult) error {
var data CrashDumpData
if err := json.Unmarshal(content, &data); err != nil {
return fmt.Errorf("failed to parse CDump.txt: %w", err)
}
// Initialize Hardware.Firmware slice if nil
if result.Hardware.Firmware == nil {
result.Hardware.Firmware = make([]models.FirmwareInfo, 0)
}
// Parse metadata
parseMetadata(&data.CrashData.METADATA, result)
// Parse CPU information
parseCPUInfo(&data.CrashData.METADATA, result)
// Parse MCA errors
parseMCAErrors(&data.CrashData, result)
return nil
}
// parseMetadata extracts metadata information
func parseMetadata(metadata *Metadata, result *models.AnalysisResult) {
// Store firmware versions in HardwareConfig.Firmware
if metadata.BMCFWVer != "" {
result.Hardware.Firmware = append(result.Hardware.Firmware, models.FirmwareInfo{
DeviceName: "BMC",
Version: metadata.BMCFWVer,
})
}
if metadata.BIOSId != "" {
result.Hardware.Firmware = append(result.Hardware.Firmware, models.FirmwareInfo{
DeviceName: "BIOS",
Version: metadata.BIOSId,
})
}
if metadata.MEFWVer != "" {
result.Hardware.Firmware = append(result.Hardware.Firmware, models.FirmwareInfo{
DeviceName: "ME",
Version: metadata.MEFWVer,
})
}
// Create event for crashdump trigger
timestamp := time.Now()
if metadata.Timestamp != "" {
if t, err := time.Parse(time.RFC3339, metadata.Timestamp); err == nil {
timestamp = t
}
}
triggerType := metadata.TriggerType
if triggerType == "" {
triggerType = "Unknown"
}
severity := models.SeverityInfo
if metadata.ResetDetected != "" && metadata.ResetDetected != "NONE" {
severity = models.SeverityWarning
}
result.Events = append(result.Events, models.Event{
Timestamp: timestamp,
Source: "Crashdump",
EventType: "System Crashdump",
Description: fmt.Sprintf("Crashdump collected (%s)", triggerType),
Severity: severity,
RawData: fmt.Sprintf("Version: %s, Reset: %s", metadata.CrashdumpVer, metadata.ResetDetected),
})
}
// parseCPUInfo extracts CPU information
func parseCPUInfo(metadata *Metadata, result *models.AnalysisResult) {
cpus := []struct {
socket int
data CPUMetadata
}{
{0, metadata.CPU0},
{1, metadata.CPU1},
}
for _, cpu := range cpus {
if cpu.data.CPUID == "" {
continue
}
// Parse core count
coreCount := 0
if cpu.data.CoreCount != "" {
if count, err := strconv.ParseInt(strings.TrimPrefix(cpu.data.CoreCount, "0x"), 16, 64); err == nil {
coreCount = int(count)
}
}
cpuModel := models.CPU{
Socket: cpu.socket,
Model: fmt.Sprintf("Intel CPU (CPUID: %s)", cpu.data.CPUID),
Cores: coreCount,
}
// Add PPIN
if cpu.data.PPIN != "" && cpu.data.PPIN != "0x0" {
cpuModel.PPIN = cpu.data.PPIN
}
result.Hardware.CPUs = append(result.Hardware.CPUs, cpuModel)
// Add microcode version to firmware list
if cpu.data.UcodePatchVer != "" {
result.Hardware.Firmware = append(result.Hardware.Firmware, models.FirmwareInfo{
DeviceName: fmt.Sprintf("CPU%d Microcode", cpu.socket),
Version: cpu.data.UcodePatchVer,
})
}
}
}
// parseMCAErrors extracts Machine Check Architecture errors
func parseMCAErrors(crashData *struct {
METADATA Metadata `json:"METADATA"`
PROCESSORS ProcessorsData `json:"PROCESSORS"`
}, result *models.AnalysisResult) {
timestamp := time.Now()
if crashData.METADATA.Timestamp != "" {
if t, err := time.Parse(time.RFC3339, crashData.METADATA.Timestamp); err == nil {
timestamp = t
}
}
// Parse each CPU's MCA data
cpuProcs := []struct {
name string
data Processors
}{
{"cpu0", crashData.PROCESSORS.CPU0},
{"cpu1", crashData.PROCESSORS.CPU1},
}
for _, cpu := range cpuProcs {
if cpu.data.MCA.Uncore == nil {
continue
}
// Check each MCA bank for errors
for bankName, bankDataRaw := range cpu.data.MCA.Uncore {
bankData, ok := bankDataRaw.(map[string]interface{})
if !ok {
continue
}
// Look for status register
statusKey := strings.ToLower(bankName) + "_status"
statusRaw, ok := bankData[statusKey]
if !ok {
continue
}
statusStr, ok := statusRaw.(string)
if !ok {
continue
}
// Parse status value
status, err := strconv.ParseUint(strings.TrimPrefix(statusStr, "0x"), 16, 64)
if err != nil {
continue
}
// Check if MCA error is valid (bit 63 = Valid)
if status&(1<<63) != 0 {
// MCA error detected
severity := models.SeverityWarning
if status&(1<<61) != 0 { // UC bit = uncorrected error
severity = models.SeverityCritical
}
description := fmt.Sprintf("MCA Error in %s bank %s", cpu.name, bankName)
if status&(1<<61) != 0 {
description += " (Uncorrected)"
} else {
description += " (Corrected)"
}
result.Events = append(result.Events, models.Event{
Timestamp: timestamp,
Source: "MCA",
EventType: "Machine Check",
Description: description,
Severity: severity,
RawData: fmt.Sprintf("Status: %s, CPU: %s, Bank: %s", statusStr, cpu.name, bankName),
})
}
}
}
}

View File

@@ -1,98 +0,0 @@
// Package supermicro provides parser for Supermicro BMC crashdump archives
// Tested with: Supermicro SYS-821GE-TNHR (Crashdump format)
//
// IMPORTANT: Increment parserVersion when modifying parser logic!
// This helps track which version was used to parse specific logs.
package supermicro
import (
"strings"
"git.mchus.pro/mchus/logpile/internal/models"
"git.mchus.pro/mchus/logpile/internal/parser"
)
// parserVersion - version of this parser module
// IMPORTANT: Increment this version when making changes to parser logic!
const parserVersion = "1.0.0"
func init() {
parser.Register(&Parser{})
}
// Parser implements VendorParser for Supermicro servers
type Parser struct{}
// Name returns human-readable parser name
func (p *Parser) Name() string {
return "SMC Crash Dump Parser"
}
// Vendor returns vendor identifier
func (p *Parser) Vendor() string {
return "supermicro"
}
// Version returns parser version
// IMPORTANT: Update parserVersion constant when modifying parser logic!
func (p *Parser) Version() string {
return parserVersion
}
// Detect checks if archive matches Supermicro crashdump format
// Returns confidence 0-100
func (p *Parser) Detect(files []parser.ExtractedFile) int {
confidence := 0
for _, f := range files {
path := strings.ToLower(f.Path)
// Strong indicator for Supermicro Crashdump format
if strings.HasSuffix(path, "cdump.txt") {
// Check if it's really Supermicro crashdump format
if containsCrashdumpMarkers(f.Content) {
confidence += 80
}
}
// Cap at 100
if confidence >= 100 {
return 100
}
}
return confidence
}
// containsCrashdumpMarkers checks if content has Supermicro crashdump markers
func containsCrashdumpMarkers(content []byte) bool {
s := string(content)
// Check for typical Supermicro Crashdump structure
return strings.Contains(s, "crash_data") &&
strings.Contains(s, "METADATA") &&
(strings.Contains(s, "bmc_fw_ver") || strings.Contains(s, "crashdump_ver"))
}
// Parse parses Supermicro crashdump archive
func (p *Parser) Parse(files []parser.ExtractedFile) (*models.AnalysisResult, error) {
result := &models.AnalysisResult{
Events: make([]models.Event, 0),
FRU: make([]models.FRUInfo, 0),
Sensors: make([]models.SensorReading, 0),
}
// Initialize hardware config
result.Hardware = &models.HardwareConfig{
CPUs: make([]models.CPU, 0),
}
// Parse CDump.txt (JSON crashdump)
if f := parser.FindFileByName(files, "CDump.txt"); f != nil {
if err := ParseCrashDump(f.Content, result); err != nil {
// Log error but continue parsing other files
_ = err // Ignore error for now
}
}
return result, nil
}

View File

@@ -4,18 +4,17 @@ package vendors
import ( import (
// Import vendor modules to trigger their init() registration // Import vendor modules to trigger their init() registration
_ "git.mchus.pro/mchus/logpile/internal/parser/vendors/dell"
_ "git.mchus.pro/mchus/logpile/internal/parser/vendors/h3c" _ "git.mchus.pro/mchus/logpile/internal/parser/vendors/h3c"
_ "git.mchus.pro/mchus/logpile/internal/parser/vendors/inspur" _ "git.mchus.pro/mchus/logpile/internal/parser/vendors/inspur"
_ "git.mchus.pro/mchus/logpile/internal/parser/vendors/nvidia" _ "git.mchus.pro/mchus/logpile/internal/parser/vendors/nvidia"
_ "git.mchus.pro/mchus/logpile/internal/parser/vendors/nvidia_bug_report" _ "git.mchus.pro/mchus/logpile/internal/parser/vendors/nvidia_bug_report"
_ "git.mchus.pro/mchus/logpile/internal/parser/vendors/supermicro"
_ "git.mchus.pro/mchus/logpile/internal/parser/vendors/unraid" _ "git.mchus.pro/mchus/logpile/internal/parser/vendors/unraid"
_ "git.mchus.pro/mchus/logpile/internal/parser/vendors/xigmanas" _ "git.mchus.pro/mchus/logpile/internal/parser/vendors/xigmanas"
// Generic fallback parser (must be last for lowest priority) // Generic fallback parser (must be last for lowest priority)
_ "git.mchus.pro/mchus/logpile/internal/parser/vendors/generic" _ "git.mchus.pro/mchus/logpile/internal/parser/vendors/generic"
// Future vendors: // Future vendors:
// _ "git.mchus.pro/mchus/logpile/internal/parser/vendors/dell"
// _ "git.mchus.pro/mchus/logpile/internal/parser/vendors/hpe" // _ "git.mchus.pro/mchus/logpile/internal/parser/vendors/hpe"
// _ "git.mchus.pro/mchus/logpile/internal/parser/vendors/lenovo" // _ "git.mchus.pro/mchus/logpile/internal/parser/vendors/lenovo"
) )

View File

@@ -12,7 +12,7 @@ import (
) )
// parserVersion - increment when parsing logic changes. // parserVersion - increment when parsing logic changes.
const parserVersion = "2.1.0" const parserVersion = "2.2"
func init() { func init() {
parser.Register(&Parser{}) parser.Register(&Parser{})

View File

@@ -173,6 +173,29 @@ func uploadMultipartMaxBytes() int64 {
return int64(mb) << 20 return int64(mb) << 20
} }
func convertMultipartMaxBytes() int64 {
// Convert mode typically uploads a folder with many files,
// so it has a larger independent limit.
const (
defMB = 16384
minMB = 512
maxMB = 65536
)
mb := defMB
if v := strings.TrimSpace(os.Getenv("LOGPILE_CONVERT_MAX_MB")); v != "" {
if n, err := strconv.Atoi(v); err == nil {
if n < minMB {
n = minMB
}
if n > maxMB {
n = maxMB
}
mb = n
}
}
return int64(mb) << 20
}
func uploadMultipartFormMemoryBytes() int64 { func uploadMultipartFormMemoryBytes() int64 {
// Keep a small in-memory threshold; file parts spill to temp files. // Keep a small in-memory threshold; file parts spill to temp files.
const formMemoryMB = 32 const formMemoryMB = 32
@@ -1126,9 +1149,17 @@ func (s *Server) handleExportReanimator(w http.ResponseWriter, r *http.Request)
} }
func (s *Server) handleConvertReanimatorBatch(w http.ResponseWriter, r *http.Request) { func (s *Server) handleConvertReanimatorBatch(w http.ResponseWriter, r *http.Request) {
r.Body = http.MaxBytesReader(w, r.Body, uploadMultipartMaxBytes()) r.Body = http.MaxBytesReader(w, r.Body, convertMultipartMaxBytes())
if err := r.ParseMultipartForm(uploadMultipartFormMemoryBytes()); err != nil { if err := r.ParseMultipartForm(uploadMultipartFormMemoryBytes()); err != nil {
jsonError(w, "File too large", http.StatusBadRequest) if strings.Contains(strings.ToLower(err.Error()), "too large") {
msg := fmt.Sprintf(
"File too large. Increase LOGPILE_CONVERT_MAX_MB (current limit: %d MB)",
convertMultipartMaxBytes()>>20,
)
jsonError(w, msg, http.StatusBadRequest)
return
}
jsonError(w, "Failed to parse multipart form", http.StatusBadRequest)
return return
} }

View File

@@ -0,0 +1,29 @@
package server
import "testing"
func TestConvertMultipartMaxBytes_Default(t *testing.T) {
t.Setenv("LOGPILE_CONVERT_MAX_MB", "")
got := convertMultipartMaxBytes()
want := int64(16384) << 20
if got != want {
t.Fatalf("convertMultipartMaxBytes()=%d, want %d", got, want)
}
}
func TestConvertMultipartMaxBytes_EnvClamp(t *testing.T) {
t.Setenv("LOGPILE_CONVERT_MAX_MB", "42")
if got := convertMultipartMaxBytes(); got != (int64(512) << 20) {
t.Fatalf("expected min clamp 512MB, got %d", got)
}
t.Setenv("LOGPILE_CONVERT_MAX_MB", "999999")
if got := convertMultipartMaxBytes(); got != (int64(65536) << 20) {
t.Fatalf("expected max clamp 65536MB, got %d", got)
}
t.Setenv("LOGPILE_CONVERT_MAX_MB", "12288")
if got := convertMultipartMaxBytes(); got != (int64(12288) << 20) {
t.Fatalf("expected exact env value 12288MB, got %d", got)
}
}

View File

@@ -384,38 +384,43 @@ main {
.parsers-title { .parsers-title {
font-size: 0.85rem; font-size: 0.85rem;
color: #666; color: #4b5563;
margin-bottom: 0.5rem; margin-bottom: 0.6rem;
font-weight: 600;
} }
.parsers-list { .parsers-list {
display: flex; display: flex;
flex-wrap: wrap; flex-wrap: wrap;
gap: 0.5rem; gap: 0.6rem;
justify-content: center; justify-content: center;
} }
.parser-item { .parser-chip {
display: inline-flex; display: inline-flex;
align-items: center; align-items: center;
gap: 0.5rem; gap: 0.45rem;
background: #f8f9fa; background: #eef6ff;
padding: 0.4rem 0.8rem; padding: 0.38rem 0.72rem;
border-radius: 4px; border-radius: 999px;
border: 1px solid #e0e0e0; border: 1px solid #bfdcff;
line-height: 1;
} }
.parser-name { .parser-chip-name {
font-size: 0.85rem; font-size: 0.85rem;
color: #2c3e50; color: #1f2937;
font-weight: 500;
} }
.parser-version { .parser-chip-version {
font-size: 0.75rem; font-size: 0.72rem;
color: #888; color: #1d4ed8;
background: #e8e8e8; background: #dbeafe;
padding: 0.1rem 0.4rem; padding: 0.12rem 0.42rem;
border-radius: 3px; border-radius: 999px;
border: 1px solid #bfdbfe;
font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, monospace;
} }
/* File Info */ /* File Info */

View File

@@ -14,6 +14,7 @@ document.addEventListener('DOMContentLoaded', () => {
let sourceType = 'archive'; let sourceType = 'archive';
let convertFiles = []; let convertFiles = [];
let isConvertRunning = false; let isConvertRunning = false;
const CONVERT_MAX_FILES_PER_BATCH = 1000;
let supportedUploadExtensions = null; let supportedUploadExtensions = null;
let supportedConvertExtensions = null; let supportedConvertExtensions = null;
let apiConnectPayload = null; let apiConnectPayload = null;
@@ -539,12 +540,12 @@ async function loadParsersInfo() {
const container = document.getElementById('parsers-info'); const container = document.getElementById('parsers-info');
if (data.parsers && data.parsers.length > 0) { if (data.parsers && data.parsers.length > 0) {
let html = '<p class="parsers-title">Поддерживаемые платформы:</p><div class="parsers-list">'; let html = '<p class="parsers-title">Подключенные парсеры:</p><div class="parsers-list">';
data.parsers.forEach(p => { data.parsers.forEach(p => {
html += `<div class="parser-item"> html += `<span class="parser-chip">
<span class="parser-name">${escapeHtml(p.name)}</span> <span class="parser-chip-name">${escapeHtml(p.name)}</span>
<span class="parser-version">v${escapeHtml(p.version)}</span> <span class="parser-chip-version">v${escapeHtml(p.version)}</span>
</div>`; </span>`;
}); });
html += '</div>'; html += '</div>';
container.innerHTML = html; container.innerHTML = html;
@@ -653,8 +654,10 @@ function renderConvertSummary() {
const remaining = supportedFiles.length - previewFiles.length; const remaining = supportedFiles.length - previewFiles.length;
const previewText = previewFiles.length > 0 ? `Примеры: ${previewFiles.join(', ')}` : ''; const previewText = previewFiles.length > 0 ? `Примеры: ${previewFiles.join(', ')}` : '';
const skippedText = skippedCount > 0 ? ` Пропущено неподдерживаемых: ${skippedCount}.` : ''; const skippedText = skippedCount > 0 ? ` Пропущено неподдерживаемых: ${skippedCount}.` : '';
const batchCount = Math.ceil(supportedFiles.length / CONVERT_MAX_FILES_PER_BATCH);
const batchesText = batchCount > 1 ? ` Будет ${batchCount} прохода(ов) по ${CONVERT_MAX_FILES_PER_BATCH} файлов.` : '';
summary.innerHTML = `<strong>${supportedFiles.length}</strong> файлов готовы к конвертации.${previewText ? ` ${previewText}` : ''}${remaining > 0 ? ` и ещё ${remaining}` : ''}.${skippedText}`; summary.innerHTML = `<strong>${supportedFiles.length}</strong> файлов готовы к конвертации.${previewText ? ` ${previewText}` : ''}${remaining > 0 ? ` и ещё ${remaining}` : ''}.${skippedText}${batchesText}`;
summary.className = 'api-connect-status'; summary.className = 'api-connect-status';
} }
@@ -674,58 +677,71 @@ async function runConvertBatch() {
renderConvertStatus('В выбранной папке нет файлов поддерживаемого типа', 'error'); renderConvertStatus('В выбранной папке нет файлов поддерживаемого типа', 'error');
return; return;
} }
const batches = chunkFiles(supportedFiles, CONVERT_MAX_FILES_PER_BATCH);
isConvertRunning = true; isConvertRunning = true;
runButton.disabled = true; runButton.disabled = true;
renderConvertProgress(0, 'Подготовка загрузки...'); renderConvertProgress(0, 'Подготовка загрузки...');
renderConvertStatus('Выполняю пакетную конвертацию...', 'info'); renderConvertStatus(`Выполняю пакетную конвертацию (${batches.length} проходов)...`, 'info');
const formData = new FormData();
supportedFiles.forEach(file => {
const relativePath = file.webkitRelativePath || file.name || 'file';
formData.append('files[]', file, relativePath);
});
try { try {
const startResponse = await uploadConvertBatch(formData, (percent) => {
const uploadPercent = Math.round(percent * 0.3);
renderConvertProgress(uploadPercent, `Загрузка файлов: ${percent}%`);
});
if (!startResponse.ok) {
const errorPayload = parseConvertErrorPayload(startResponse.bodyText);
hideConvertProgress();
renderConvertStatus(errorPayload.error || 'Пакетная конвертация завершилась с ошибкой', 'error');
return;
}
if (!startResponse.jobId) {
hideConvertProgress();
renderConvertStatus('Сервер не вернул идентификатор задачи', 'error');
return;
}
await waitForConvertJob(startResponse.jobId, (statusPayload) => {
const serverProgress = Number(statusPayload.progress || 0);
const combined = 30 + Math.round(Math.max(0, Math.min(100, serverProgress)) * 0.7);
renderConvertProgress(combined, `Конвертация: ${serverProgress}%`);
});
renderConvertProgress(100, 'Подготовка выгрузки...');
const downloadResponse = await downloadConvertArchive(startResponse.jobId);
if (!downloadResponse.ok) {
const errorPayload = parseConvertErrorPayload(downloadResponse.bodyText);
hideConvertProgress();
renderConvertStatus(errorPayload.error || 'Не удалось скачать результат', 'error');
return;
}
const blob = downloadResponse.blob;
const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
downloadBlob(blob, `logpile-convert-${timestamp}.zip`); const passSummaries = [];
const summary = downloadResponse.summaryHeader || 'Конвертация завершена';
for (let batchIdx = 0; batchIdx < batches.length; batchIdx++) {
const batchFiles = batches[batchIdx];
const pass = batchIdx + 1;
const passLabel = `Проход ${pass}/${batches.length}`;
const passStart = Math.round((batchIdx / batches.length) * 100);
const passEnd = Math.round(((batchIdx + 1) / batches.length) * 100);
const formData = new FormData();
batchFiles.forEach(file => {
const relativePath = file.webkitRelativePath || file.name || 'file';
formData.append('files[]', file, relativePath);
});
const startResponse = await uploadConvertBatch(formData, (percent) => {
const clamped = Math.max(0, Math.min(100, Number(percent) || 0));
const uploadPhase = passStart + Math.round((passEnd - passStart) * 0.3 * (clamped / 100));
renderConvertProgress(uploadPhase, `${passLabel}: загрузка ${clamped}%`);
});
if (!startResponse.ok) {
const errorPayload = parseConvertErrorPayload(startResponse.bodyText);
hideConvertProgress();
renderConvertStatus(`${passLabel}: ${errorPayload.error || 'пакетная конвертация завершилась с ошибкой'}`, 'error');
return;
}
if (!startResponse.jobId) {
hideConvertProgress();
renderConvertStatus(`${passLabel}: сервер не вернул идентификатор задачи`, 'error');
return;
}
await waitForConvertJob(startResponse.jobId, (statusPayload) => {
const serverProgress = Math.max(0, Math.min(100, Number(statusPayload.progress || 0)));
const phase = 0.3 + 0.7 * (serverProgress / 100);
const combined = passStart + Math.round((passEnd - passStart) * phase);
renderConvertProgress(combined, `${passLabel}: конвертация ${serverProgress}%`);
});
const downloadResponse = await downloadConvertArchive(startResponse.jobId);
if (!downloadResponse.ok) {
const errorPayload = parseConvertErrorPayload(downloadResponse.bodyText);
hideConvertProgress();
renderConvertStatus(`${passLabel}: ${errorPayload.error || 'не удалось скачать результат'}`, 'error');
return;
}
const suffix = batches.length > 1 ? `-part${pass}` : '';
downloadBlob(downloadResponse.blob, `logpile-convert-${timestamp}${suffix}.zip`);
passSummaries.push(downloadResponse.summaryHeader || `${passLabel}: завершено`);
}
hideConvertProgress(); hideConvertProgress();
renderConvertStatus(summary, 'success'); renderConvertStatus(passSummaries.join(' | '), 'success');
} catch (err) { } catch (err) {
hideConvertProgress(); hideConvertProgress();
renderConvertStatus('Ошибка соединения при конвертации', 'error'); renderConvertStatus('Ошибка соединения при конвертации', 'error');
@@ -735,6 +751,15 @@ async function runConvertBatch() {
} }
} }
function chunkFiles(files, chunkSize) {
const safeChunkSize = Math.max(1, Number(chunkSize) || 1);
const chunks = [];
for (let i = 0; i < files.length; i += safeChunkSize) {
chunks.push(files.slice(i, i + safeChunkSize));
}
return chunks;
}
function uploadConvertBatch(formData, onUploadPercent) { function uploadConvertBatch(formData, onUploadPercent) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest(); const xhr = new XMLHttpRequest();

View File

@@ -23,9 +23,9 @@
<div id="archive-source-content"> <div id="archive-source-content">
<div class="upload-area" id="drop-zone"> <div class="upload-area" id="drop-zone">
<p>Перетащите архив, TXT/LOG или JSON snapshot сюда</p> <p>Перетащите архив, TXT/LOG или JSON snapshot сюда</p>
<input type="file" id="file-input" accept="application/gzip,application/x-gzip,application/x-tar,application/zip,application/json,text/plain,.json,.tar,.tar.gz,.tgz,.zip,.txt,.log" hidden> <input type="file" id="file-input" accept="application/gzip,application/x-gzip,application/x-tar,application/zip,application/json,text/plain,.json,.tar,.tar.gz,.tgz,.sds,.zip,.txt,.log" hidden>
<button type="button" onclick="document.getElementById('file-input').click()">Выберите файл</button> <button type="button" onclick="document.getElementById('file-input').click()">Выберите файл</button>
<p class="hint">Поддерживаемые форматы: tar.gz, zip, json, txt, log</p> <p class="hint">Поддерживаемые форматы: tar.gz, tar, tgz, sds, zip, json, txt, log</p>
</div> </div>
<div id="upload-status"></div> <div id="upload-status"></div>
<div id="parsers-info" class="parsers-info"></div> <div id="parsers-info" class="parsers-info"></div>