misc: sds format support, convert limits, dell dedup, supermicro removal, bible updates
Parser / archive: - Add .sds extension as tar-format alias (archive.go) - Add tests for multipart upload size limits (multipart_limits_test.go) - Remove supermicro crashdump parser (ADL-015) Dell parser: - Remove GPU duplicates from PCIeDevices (DCIM_VideoView vs DCIM_PCIDeviceView both list the same GPU; VideoView record is authoritative) Server: - Add LOGPILE_CONVERT_MAX_MB env var for independent convert batch size limit - Improve "file too large" error message with current limit value Web: - Add CONVERT_MAX_FILES_PER_BATCH = 1000 cap - Minor UI copy and CSS fixes Bible: - bible-local/06-parsers.md: add pci.ids enrichment rule (enrich model from pciids when name is empty but vendor_id+device_id are present) - Sync bible submodule and local overview/architecture docs Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -173,6 +173,29 @@ func uploadMultipartMaxBytes() int64 {
|
||||
return int64(mb) << 20
|
||||
}
|
||||
|
||||
func convertMultipartMaxBytes() int64 {
|
||||
// Convert mode typically uploads a folder with many files,
|
||||
// so it has a larger independent limit.
|
||||
const (
|
||||
defMB = 16384
|
||||
minMB = 512
|
||||
maxMB = 65536
|
||||
)
|
||||
mb := defMB
|
||||
if v := strings.TrimSpace(os.Getenv("LOGPILE_CONVERT_MAX_MB")); v != "" {
|
||||
if n, err := strconv.Atoi(v); err == nil {
|
||||
if n < minMB {
|
||||
n = minMB
|
||||
}
|
||||
if n > maxMB {
|
||||
n = maxMB
|
||||
}
|
||||
mb = n
|
||||
}
|
||||
}
|
||||
return int64(mb) << 20
|
||||
}
|
||||
|
||||
func uploadMultipartFormMemoryBytes() int64 {
|
||||
// Keep a small in-memory threshold; file parts spill to temp files.
|
||||
const formMemoryMB = 32
|
||||
@@ -1126,9 +1149,17 @@ func (s *Server) handleExportReanimator(w http.ResponseWriter, r *http.Request)
|
||||
}
|
||||
|
||||
func (s *Server) handleConvertReanimatorBatch(w http.ResponseWriter, r *http.Request) {
|
||||
r.Body = http.MaxBytesReader(w, r.Body, uploadMultipartMaxBytes())
|
||||
r.Body = http.MaxBytesReader(w, r.Body, convertMultipartMaxBytes())
|
||||
if err := r.ParseMultipartForm(uploadMultipartFormMemoryBytes()); err != nil {
|
||||
jsonError(w, "File too large", http.StatusBadRequest)
|
||||
if strings.Contains(strings.ToLower(err.Error()), "too large") {
|
||||
msg := fmt.Sprintf(
|
||||
"File too large. Increase LOGPILE_CONVERT_MAX_MB (current limit: %d MB)",
|
||||
convertMultipartMaxBytes()>>20,
|
||||
)
|
||||
jsonError(w, msg, http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
jsonError(w, "Failed to parse multipart form", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
29
internal/server/multipart_limits_test.go
Normal file
29
internal/server/multipart_limits_test.go
Normal file
@@ -0,0 +1,29 @@
|
||||
package server
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestConvertMultipartMaxBytes_Default(t *testing.T) {
|
||||
t.Setenv("LOGPILE_CONVERT_MAX_MB", "")
|
||||
got := convertMultipartMaxBytes()
|
||||
want := int64(16384) << 20
|
||||
if got != want {
|
||||
t.Fatalf("convertMultipartMaxBytes()=%d, want %d", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConvertMultipartMaxBytes_EnvClamp(t *testing.T) {
|
||||
t.Setenv("LOGPILE_CONVERT_MAX_MB", "42")
|
||||
if got := convertMultipartMaxBytes(); got != (int64(512) << 20) {
|
||||
t.Fatalf("expected min clamp 512MB, got %d", got)
|
||||
}
|
||||
|
||||
t.Setenv("LOGPILE_CONVERT_MAX_MB", "999999")
|
||||
if got := convertMultipartMaxBytes(); got != (int64(65536) << 20) {
|
||||
t.Fatalf("expected max clamp 65536MB, got %d", got)
|
||||
}
|
||||
|
||||
t.Setenv("LOGPILE_CONVERT_MAX_MB", "12288")
|
||||
if got := convertMultipartMaxBytes(); got != (int64(12288) << 20) {
|
||||
t.Fatalf("expected exact env value 12288MB, got %d", got)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user