Files
QuoteForge/internal/services/stock_import.go

1086 lines
28 KiB
Go

package services
import (
"archive/zip"
"bytes"
"encoding/xml"
"fmt"
"io"
"path/filepath"
"regexp"
"sort"
"strconv"
"strings"
"time"
"git.mchus.pro/mchus/quoteforge/internal/lotmatch"
"git.mchus.pro/mchus/quoteforge/internal/models"
pricelistsvc "git.mchus.pro/mchus/quoteforge/internal/services/pricelist"
"git.mchus.pro/mchus/quoteforge/internal/warehouse"
"gorm.io/gorm"
"gorm.io/gorm/clause"
)
type StockImportProgress struct {
Status string `json:"status"`
Message string `json:"message,omitempty"`
Current int `json:"current,omitempty"`
Total int `json:"total,omitempty"`
RowsTotal int `json:"rows_total,omitempty"`
ValidRows int `json:"valid_rows,omitempty"`
Inserted int `json:"inserted,omitempty"`
Deleted int64 `json:"deleted,omitempty"`
Unmapped int `json:"unmapped,omitempty"`
Conflicts int `json:"conflicts,omitempty"`
FallbackMatches int `json:"fallback_matches,omitempty"`
ParseErrors int `json:"parse_errors,omitempty"`
QtyParseErrors int `json:"qty_parse_errors,omitempty"`
Ignored int `json:"ignored,omitempty"`
MappingSuggestions []StockMappingSuggestion `json:"mapping_suggestions,omitempty"`
ImportDate string `json:"import_date,omitempty"`
PricelistID uint `json:"warehouse_pricelist_id,omitempty"`
PricelistVer string `json:"warehouse_pricelist_version,omitempty"`
}
type StockImportResult struct {
RowsTotal int
ValidRows int
Inserted int
Deleted int64
Unmapped int
Conflicts int
FallbackMatches int
ParseErrors int
QtyParseErrors int
Ignored int
MappingSuggestions []StockMappingSuggestion
ImportDate time.Time
WarehousePLID uint
WarehousePLVer string
}
type StockMappingSuggestion struct {
Partnumber string `json:"partnumber"`
Description string `json:"description,omitempty"`
Reason string `json:"reason,omitempty"`
}
type stockIgnoreRule struct {
Target string
MatchType string
Pattern string
}
type StockImportService struct {
db *gorm.DB
pricelistSvc *pricelistsvc.Service
}
func NewStockImportService(db *gorm.DB, pricelistSvc *pricelistsvc.Service) *StockImportService {
return &StockImportService{
db: db,
pricelistSvc: pricelistSvc,
}
}
type stockImportRow struct {
Folder string
Article string
Description string
Vendor string
Price float64
Qty float64
QtyRaw string
QtyInvalid bool
}
type weightedPricePoint struct {
price float64
weight float64
}
func (s *StockImportService) Import(
filename string,
content []byte,
fileModTime time.Time,
createdBy string,
onProgress func(StockImportProgress),
) (*StockImportResult, error) {
if s.db == nil {
return nil, fmt.Errorf("offline mode: stock import unavailable")
}
if len(content) == 0 {
return nil, fmt.Errorf("empty file")
}
report := func(p StockImportProgress) {
if onProgress != nil {
onProgress(p)
}
}
report(StockImportProgress{Status: "starting", Message: "Запуск импорта", Current: 0, Total: 100})
rows, err := parseStockRows(filename, content)
if err != nil {
return nil, err
}
if len(rows) == 0 {
return nil, fmt.Errorf("no rows parsed")
}
report(StockImportProgress{Status: "parsing", Message: "Файл распарсен", RowsTotal: len(rows), Current: 10, Total: 100})
importDate := detectImportDate(content, filename, fileModTime)
report(StockImportProgress{
Status: "parsing",
Message: "Дата импорта определена",
ImportDate: importDate.Format("2006-01-02"),
Current: 15,
Total: 100,
})
partnumberMatcher, err := lotmatch.NewMappingMatcherFromDB(s.db)
if err != nil {
return nil, err
}
var (
records []models.StockLog
unmapped int
conflicts int
fallbackMatches int
parseErrors int
qtyParseErrors int
ignored int
suggestionsByPN = make(map[string]StockMappingSuggestion)
)
ignoreRules, err := s.loadIgnoreRules()
if err != nil {
return nil, err
}
for _, row := range rows {
if strings.TrimSpace(row.Article) == "" {
parseErrors++
continue
}
if row.QtyInvalid {
qtyParseErrors++
parseErrors++
continue
}
if shouldIgnoreStockRow(row, ignoreRules) {
ignored++
continue
}
partnumber := strings.TrimSpace(row.Article)
key := normalizeKey(partnumber)
mappedLots := partnumberMatcher.MatchLots(partnumber)
if len(mappedLots) == 0 {
unmapped++
suggestionsByPN[key] = upsertSuggestion(suggestionsByPN[key], StockMappingSuggestion{
Partnumber: partnumber,
Description: strings.TrimSpace(row.Description),
Reason: "unmapped",
})
} else if len(mappedLots) > 1 {
conflicts++
suggestionsByPN[key] = upsertSuggestion(suggestionsByPN[key], StockMappingSuggestion{
Partnumber: partnumber,
Description: strings.TrimSpace(row.Description),
Reason: "conflict",
})
}
var comments *string
if trimmed := strings.TrimSpace(row.Description); trimmed != "" {
comments = &trimmed
}
var vendor *string
if trimmed := strings.TrimSpace(row.Vendor); trimmed != "" {
vendor = &trimmed
}
qty := row.Qty
records = append(records, models.StockLog{
Partnumber: partnumber,
Date: importDate,
Price: row.Price,
Comments: comments,
Vendor: vendor,
Qty: &qty,
})
}
suggestions := collectSortedSuggestions(suggestionsByPN, 200)
if len(records) == 0 {
return nil, fmt.Errorf("no valid rows after filtering")
}
report(StockImportProgress{
Status: "mapping",
Message: "Валидация строк завершена",
RowsTotal: len(rows),
ValidRows: len(records),
Unmapped: unmapped,
Conflicts: conflicts,
FallbackMatches: fallbackMatches,
ParseErrors: parseErrors,
QtyParseErrors: qtyParseErrors,
Current: 40,
Total: 100,
})
deleted, inserted, err := s.replaceStockLogs(records)
if err != nil {
return nil, err
}
report(StockImportProgress{
Status: "writing",
Message: "Данные stock_log обновлены",
Inserted: inserted,
Deleted: deleted,
Current: 60,
Total: 100,
ImportDate: importDate.Format("2006-01-02"),
})
items, err := s.buildWarehousePricelistItems()
if err != nil {
return nil, err
}
if len(items) == 0 {
return nil, fmt.Errorf("stock_log does not contain positive prices for warehouse pricelist")
}
if createdBy == "" {
createdBy = "unknown"
}
report(StockImportProgress{Status: "recalculating_warehouse", Message: "Создание warehouse прайслиста", Current: 70, Total: 100})
var warehousePLID uint
var warehousePLVer string
if s.pricelistSvc == nil {
return nil, fmt.Errorf("pricelist service unavailable")
}
pl, err := s.pricelistSvc.CreateForSourceWithProgress(createdBy, string(models.PricelistSourceWarehouse), items, func(p pricelistsvc.CreateProgress) {
current := 70 + int(float64(p.Current)*0.3)
if p.Status != "completed" && current >= 100 {
current = 99
}
report(StockImportProgress{
Status: "recalculating_warehouse",
Message: p.Message,
Current: current,
Total: 100,
})
})
if err != nil {
return nil, err
}
warehousePLID = pl.ID
warehousePLVer = pl.Version
result := &StockImportResult{
RowsTotal: len(rows),
ValidRows: len(records),
Inserted: inserted,
Deleted: deleted,
Unmapped: unmapped,
Conflicts: conflicts,
FallbackMatches: fallbackMatches,
ParseErrors: parseErrors,
QtyParseErrors: qtyParseErrors,
Ignored: ignored,
MappingSuggestions: suggestions,
ImportDate: importDate,
WarehousePLID: warehousePLID,
WarehousePLVer: warehousePLVer,
}
report(StockImportProgress{
Status: "completed",
Message: "Импорт завершен",
RowsTotal: result.RowsTotal,
ValidRows: result.ValidRows,
Inserted: result.Inserted,
Deleted: result.Deleted,
Unmapped: result.Unmapped,
Conflicts: result.Conflicts,
FallbackMatches: result.FallbackMatches,
ParseErrors: result.ParseErrors,
QtyParseErrors: result.QtyParseErrors,
Ignored: result.Ignored,
MappingSuggestions: result.MappingSuggestions,
ImportDate: result.ImportDate.Format("2006-01-02"),
PricelistID: result.WarehousePLID,
PricelistVer: result.WarehousePLVer,
Current: 100,
Total: 100,
})
return result, nil
}
func (s *StockImportService) replaceStockLogs(records []models.StockLog) (int64, int, error) {
var deleted int64
err := s.db.Transaction(func(tx *gorm.DB) error {
res := tx.Exec("DELETE FROM stock_log")
if res.Error != nil {
return res.Error
}
deleted = res.RowsAffected
if err := tx.CreateInBatches(records, 500).Error; err != nil {
return err
}
return nil
})
if err != nil {
return 0, 0, err
}
return deleted, len(records), nil
}
func (s *StockImportService) buildWarehousePricelistItems() ([]pricelistsvc.CreateItemInput, error) {
warehouseItems, err := warehouse.ComputePricelistItemsFromStockLog(s.db)
if err != nil {
return nil, err
}
items := make([]pricelistsvc.CreateItemInput, 0, len(warehouseItems))
for _, item := range warehouseItems {
items = append(items, pricelistsvc.CreateItemInput{
LotName: item.LotName,
Price: item.Price,
PriceMethod: item.PriceMethod,
})
}
return items, nil
}
func upsertSuggestion(prev StockMappingSuggestion, candidate StockMappingSuggestion) StockMappingSuggestion {
if strings.TrimSpace(prev.Partnumber) == "" {
return candidate
}
if strings.TrimSpace(prev.Description) == "" && strings.TrimSpace(candidate.Description) != "" {
prev.Description = candidate.Description
}
if prev.Reason != "conflict" && candidate.Reason == "conflict" {
prev.Reason = "conflict"
}
return prev
}
func (s *StockImportService) ListMappings(page, perPage int, search string) ([]models.LotPartnumber, int64, error) {
if s.db == nil {
return nil, 0, fmt.Errorf("offline mode: mappings unavailable")
}
if page < 1 {
page = 1
}
if perPage < 1 {
perPage = 50
}
if perPage > 500 {
perPage = 500
}
offset := (page - 1) * perPage
query := s.db.Model(&models.LotPartnumber{})
if search = strings.TrimSpace(search); search != "" {
like := "%" + search + "%"
query = query.Where("partnumber LIKE ? OR lot_name LIKE ? OR description LIKE ?", like, like, like)
}
var total int64
if err := query.Count(&total).Error; err != nil {
return nil, 0, err
}
var rows []models.LotPartnumber
if err := query.Order("CASE WHEN TRIM(lot_name) = '' THEN 0 ELSE 1 END, partnumber ASC").Offset(offset).Limit(perPage).Find(&rows).Error; err != nil {
return nil, 0, err
}
return rows, total, nil
}
func (s *StockImportService) UpsertMapping(partnumber, lotName, description string) error {
if s.db == nil {
return fmt.Errorf("offline mode: mappings unavailable")
}
partnumber = strings.TrimSpace(partnumber)
lotName = strings.TrimSpace(lotName)
description = strings.TrimSpace(description)
if partnumber == "" {
return fmt.Errorf("partnumber is required")
}
if lotName != "" {
var lotCount int64
if err := s.db.Model(&models.Lot{}).Where("lot_name = ?", lotName).Count(&lotCount).Error; err != nil {
return err
}
if lotCount == 0 {
return fmt.Errorf("lot not found: %s", lotName)
}
}
return s.db.Transaction(func(tx *gorm.DB) error {
var existing []models.LotPartnumber
if err := tx.Where("LOWER(TRIM(partnumber)) = LOWER(TRIM(?))", partnumber).Find(&existing).Error; err != nil {
return err
}
if description == "" {
for _, row := range existing {
if row.Description != nil && strings.TrimSpace(*row.Description) != "" {
description = strings.TrimSpace(*row.Description)
break
}
}
}
if err := tx.Where("LOWER(TRIM(partnumber)) = LOWER(TRIM(?))", partnumber).Delete(&models.LotPartnumber{}).Error; err != nil {
return err
}
var descPtr *string
if description != "" {
descPtr = &description
}
return tx.Clauses(clause.OnConflict{DoNothing: true}).Create(&models.LotPartnumber{
Partnumber: partnumber,
LotName: lotName,
Description: descPtr,
}).Error
})
}
func (s *StockImportService) DeleteMapping(partnumber string) (int64, error) {
if s.db == nil {
return 0, fmt.Errorf("offline mode: mappings unavailable")
}
partnumber = strings.TrimSpace(partnumber)
if partnumber == "" {
return 0, fmt.Errorf("partnumber is required")
}
res := s.db.Where("LOWER(TRIM(partnumber)) = LOWER(TRIM(?))", partnumber).Delete(&models.LotPartnumber{})
return res.RowsAffected, res.Error
}
func (s *StockImportService) ListIgnoreRules(page, perPage int) ([]models.StockIgnoreRule, int64, error) {
if s.db == nil {
return nil, 0, fmt.Errorf("offline mode: ignore rules unavailable")
}
if page < 1 {
page = 1
}
if perPage < 1 {
perPage = 50
}
if perPage > 500 {
perPage = 500
}
offset := (page - 1) * perPage
query := s.db.Model(&models.StockIgnoreRule{})
var total int64
if err := query.Count(&total).Error; err != nil {
return nil, 0, err
}
var rows []models.StockIgnoreRule
if err := query.Order("id DESC").Offset(offset).Limit(perPage).Find(&rows).Error; err != nil {
return nil, 0, err
}
return rows, total, nil
}
func (s *StockImportService) UpsertIgnoreRule(target, matchType, pattern string) error {
if s.db == nil {
return fmt.Errorf("offline mode: ignore rules unavailable")
}
target = normalizeIgnoreTarget(target)
matchType = normalizeIgnoreMatchType(matchType)
pattern = strings.TrimSpace(pattern)
if target == "" || matchType == "" || pattern == "" {
return fmt.Errorf("target, match_type and pattern are required")
}
res := s.db.Clauses(clause.OnConflict{DoNothing: true}).Create(&models.StockIgnoreRule{
Target: target,
MatchType: matchType,
Pattern: pattern,
})
if res.Error != nil {
return res.Error
}
if res.RowsAffected == 0 {
return fmt.Errorf("rule already exists")
}
return nil
}
func (s *StockImportService) DeleteIgnoreRule(id uint) (int64, error) {
if s.db == nil {
return 0, fmt.Errorf("offline mode: ignore rules unavailable")
}
res := s.db.Delete(&models.StockIgnoreRule{}, id)
return res.RowsAffected, res.Error
}
func (s *StockImportService) loadIgnoreRules() ([]stockIgnoreRule, error) {
var rows []models.StockIgnoreRule
if err := s.db.Find(&rows).Error; err != nil {
return nil, err
}
rules := make([]stockIgnoreRule, 0, len(rows))
for _, row := range rows {
target := normalizeIgnoreTarget(row.Target)
matchType := normalizeIgnoreMatchType(row.MatchType)
pattern := normalizeKey(row.Pattern)
if target == "" || matchType == "" || pattern == "" {
continue
}
rules = append(rules, stockIgnoreRule{
Target: target,
MatchType: matchType,
Pattern: pattern,
})
}
return rules, nil
}
func collectSortedSuggestions(src map[string]StockMappingSuggestion, limit int) []StockMappingSuggestion {
if len(src) == 0 {
return nil
}
items := make([]StockMappingSuggestion, 0, len(src))
for _, item := range src {
items = append(items, item)
}
sort.Slice(items, func(i, j int) bool {
return strings.ToLower(items[i].Partnumber) < strings.ToLower(items[j].Partnumber)
})
if limit > 0 && len(items) > limit {
return items[:limit]
}
return items
}
func shouldIgnoreStockRow(row stockImportRow, rules []stockIgnoreRule) bool {
if len(rules) == 0 {
return false
}
partnumber := normalizeKey(row.Article)
description := normalizeKey(row.Description)
for _, rule := range rules {
candidate := ""
if rule.Target == "partnumber" {
candidate = partnumber
} else {
candidate = description
}
if candidate == "" || rule.Pattern == "" {
continue
}
switch rule.MatchType {
case "exact":
if candidate == rule.Pattern {
return true
}
case "prefix":
if strings.HasPrefix(candidate, rule.Pattern) {
return true
}
case "suffix":
if strings.HasSuffix(candidate, rule.Pattern) {
return true
}
}
}
return false
}
func normalizeIgnoreTarget(v string) string {
switch strings.ToLower(strings.TrimSpace(v)) {
case "partnumber":
return "partnumber"
case "description":
return "description"
default:
return ""
}
}
func normalizeIgnoreMatchType(v string) string {
switch strings.ToLower(strings.TrimSpace(v)) {
case "exact":
return "exact"
case "prefix":
return "prefix"
case "suffix":
return "suffix"
default:
return ""
}
}
var (
reISODate = regexp.MustCompile(`\b(20\d{2})-(\d{2})-(\d{2})\b`)
reRuDate = regexp.MustCompile(`\b([0-3]\d)\.([01]\d)\.(20\d{2})\b`)
mxlCellRe = regexp.MustCompile(`\{16,\d+,\s*\{1,1,\s*\{"ru","(.*?)"\}\s*\},0\},(\d+),`)
)
func parseStockRows(filename string, content []byte) ([]stockImportRow, error) {
switch strings.ToLower(filepath.Ext(filename)) {
case ".mxl":
return parseMXLRows(content)
case ".xlsx":
return parseXLSXRows(content)
default:
return nil, fmt.Errorf("unsupported file format: %s", filepath.Ext(filename))
}
}
func parseMXLRows(content []byte) ([]stockImportRow, error) {
text := string(content)
matches := mxlCellRe.FindAllStringSubmatch(text, -1)
if len(matches) == 0 {
return nil, fmt.Errorf("mxl parsing failed: no cells found")
}
rows := make([]map[int]string, 0, 128)
current := map[int]string{}
for _, m := range matches {
val := strings.ReplaceAll(m[1], `""`, `"`)
col, err := strconv.Atoi(m[2])
if err != nil {
continue
}
if col == 1 && len(current) > 0 {
rows = append(rows, current)
current = map[int]string{}
}
current[col] = strings.TrimSpace(val)
}
if len(current) > 0 {
rows = append(rows, current)
}
result := make([]stockImportRow, 0, len(rows))
for _, r := range rows {
article := strings.TrimSpace(r[2])
if article == "" || strings.EqualFold(article, "Артикул") {
continue
}
price, err := parseLocalizedFloat(r[5])
if err != nil {
continue
}
qtyRaw := strings.TrimSpace(r[6])
qty, err := parseLocalizedQty(qtyRaw)
if err != nil {
qty = 0
}
result = append(result, stockImportRow{
Folder: strings.TrimSpace(r[1]),
Article: article,
Description: strings.TrimSpace(r[3]),
Vendor: strings.TrimSpace(r[4]),
Price: price,
Qty: qty,
QtyRaw: qtyRaw,
QtyInvalid: err != nil,
})
}
return result, nil
}
func parseXLSXRows(content []byte) ([]stockImportRow, error) {
zr, err := zip.NewReader(bytes.NewReader(content), int64(len(content)))
if err != nil {
return nil, fmt.Errorf("opening xlsx: %w", err)
}
sharedStrings, _ := readSharedStrings(zr)
sheetPath := firstWorksheetPath(zr)
if sheetPath == "" {
return nil, fmt.Errorf("xlsx parsing failed: worksheet not found")
}
sheetData, err := readZipFile(zr, sheetPath)
if err != nil {
return nil, err
}
type xlsxInline struct {
T string `xml:"t"`
}
type xlsxCell struct {
R string `xml:"r,attr"`
T string `xml:"t,attr"`
V string `xml:"v"`
IS *xlsxInline `xml:"is"`
}
type xlsxRow struct {
C []xlsxCell `xml:"c"`
}
type xlsxSheet struct {
Rows []xlsxRow `xml:"sheetData>row"`
}
var ws xlsxSheet
if err := xml.Unmarshal(sheetData, &ws); err != nil {
return nil, fmt.Errorf("decode worksheet: %w", err)
}
grid := make([]map[int]string, 0, len(ws.Rows))
for _, r := range ws.Rows {
rowMap := make(map[int]string, len(r.C))
for _, c := range r.C {
colIdx := excelRefColumn(c.R)
if colIdx < 0 {
continue
}
inlineText := ""
if c.IS != nil {
inlineText = c.IS.T
}
rowMap[colIdx] = decodeXLSXCell(c.T, c.V, inlineText, sharedStrings)
}
grid = append(grid, rowMap)
}
headerRow := -1
headers := map[string]int{}
for i, row := range grid {
for idx, val := range row {
norm := normalizeHeader(val)
switch norm {
case "папка", "артикул", "описание", "вендор", "стоимость", "свободно":
headers[norm] = idx
}
}
_, hasArticle := headers["артикул"]
_, hasPrice := headers["стоимость"]
if hasArticle && hasPrice {
headerRow = i
break
}
}
if headerRow < 0 {
return nil, fmt.Errorf("xlsx parsing failed: header row not found")
}
result := make([]stockImportRow, 0, len(grid)-headerRow-1)
idxFolder, hasFolder := headers["папка"]
idxArticle := headers["артикул"]
idxDesc, hasDesc := headers["описание"]
idxVendor, hasVendor := headers["вендор"]
idxPrice := headers["стоимость"]
idxQty, hasQty := headers["свободно"]
if !hasQty {
return nil, fmt.Errorf("xlsx parsing failed: qty column 'Свободно' not found")
}
for i := headerRow + 1; i < len(grid); i++ {
row := grid[i]
article := strings.TrimSpace(row[idxArticle])
if article == "" {
continue
}
price, err := parseLocalizedFloat(row[idxPrice])
if err != nil {
continue
}
qty := 0.0
qtyRaw := ""
qtyInvalid := false
if hasQty {
qtyRaw = strings.TrimSpace(row[idxQty])
qty, err = parseLocalizedQty(qtyRaw)
if err != nil {
qty = 0
qtyInvalid = true
}
}
folder := ""
if hasFolder {
folder = strings.TrimSpace(row[idxFolder])
}
description := ""
if hasDesc {
description = strings.TrimSpace(row[idxDesc])
}
vendor := ""
if hasVendor {
vendor = strings.TrimSpace(row[idxVendor])
}
result = append(result, stockImportRow{
Folder: folder,
Article: article,
Description: description,
Vendor: vendor,
Price: price,
Qty: qty,
QtyRaw: qtyRaw,
QtyInvalid: qtyInvalid,
})
}
return result, nil
}
func parseLocalizedFloat(value string) (float64, error) {
clean := strings.TrimSpace(value)
clean = strings.ReplaceAll(clean, "\u00a0", "")
clean = strings.ReplaceAll(clean, " ", "")
clean = strings.ReplaceAll(clean, ",", ".")
if clean == "" {
return 0, fmt.Errorf("empty number")
}
return strconv.ParseFloat(clean, 64)
}
func parseLocalizedQty(value string) (float64, error) {
clean := strings.TrimSpace(value)
if clean == "" {
return 0, fmt.Errorf("empty qty")
}
if v, err := parseLocalizedFloat(clean); err == nil {
return v, nil
}
// Tolerate strings like "1 200 шт" by extracting the first numeric token.
re := regexp.MustCompile(`[-+]?\d[\d\s\u00a0]*(?:[.,]\d+)?`)
match := re.FindString(clean)
if strings.TrimSpace(match) == "" {
return 0, fmt.Errorf("invalid qty: %s", value)
}
return parseLocalizedFloat(match)
}
func detectImportDate(content []byte, filename string, fileModTime time.Time) time.Time {
if d, ok := extractDateFromText(string(content)); ok {
return d
}
if d, ok := extractDateFromFilename(filename); ok {
return d
}
if !fileModTime.IsZero() {
return normalizeDate(fileModTime)
}
return normalizeDate(time.Now())
}
func extractDateFromText(text string) (time.Time, bool) {
if m := reISODate.FindStringSubmatch(text); len(m) == 4 {
d, err := time.Parse("2006-01-02", m[0])
if err == nil {
return normalizeDate(d), true
}
}
if m := reRuDate.FindStringSubmatch(text); len(m) == 4 {
d, err := time.Parse("02.01.2006", m[0])
if err == nil {
return normalizeDate(d), true
}
}
return time.Time{}, false
}
func extractDateFromFilename(filename string) (time.Time, bool) {
base := filepath.Base(filename)
if m := reISODate.FindStringSubmatch(base); len(m) == 4 {
d, err := time.Parse("2006-01-02", m[0])
if err == nil {
return normalizeDate(d), true
}
}
if m := reRuDate.FindStringSubmatch(base); len(m) == 4 {
d, err := time.Parse("02.01.2006", m[0])
if err == nil {
return normalizeDate(d), true
}
}
return time.Time{}, false
}
func normalizeDate(t time.Time) time.Time {
y, m, d := t.Date()
return time.Date(y, m, d, 0, 0, 0, 0, time.Local)
}
func median(values []float64) float64 {
if len(values) == 0 {
return 0
}
c := append([]float64(nil), values...)
sort.Float64s(c)
n := len(c)
if n%2 == 0 {
return (c[n/2-1] + c[n/2]) / 2
}
return c[n/2]
}
func weightedMedian(values []weightedPricePoint) float64 {
if len(values) == 0 {
return 0
}
type pair struct {
price float64
weight float64
}
items := make([]pair, 0, len(values))
totalWeight := 0.0
prices := make([]float64, 0, len(values))
for _, v := range values {
if v.price <= 0 {
continue
}
prices = append(prices, v.price)
w := v.weight
if w > 0 {
items = append(items, pair{price: v.price, weight: w})
totalWeight += w
}
}
// Fallback for rows without positive weights.
if totalWeight <= 0 {
return median(prices)
}
sort.Slice(items, func(i, j int) bool {
if items[i].price == items[j].price {
return items[i].weight < items[j].weight
}
return items[i].price < items[j].price
})
threshold := totalWeight / 2.0
acc := 0.0
for _, it := range items {
acc += it.weight
if acc >= threshold {
return it.price
}
}
return items[len(items)-1].price
}
func normalizeKey(v string) string {
return lotmatch.NormalizeKey(v)
}
func readZipFile(zr *zip.Reader, name string) ([]byte, error) {
for _, f := range zr.File {
if f.Name != name {
continue
}
rc, err := f.Open()
if err != nil {
return nil, err
}
defer rc.Close()
return io.ReadAll(rc)
}
return nil, fmt.Errorf("zip entry not found: %s", name)
}
func firstWorksheetPath(zr *zip.Reader) string {
candidates := make([]string, 0, 4)
for _, f := range zr.File {
if strings.HasPrefix(f.Name, "xl/worksheets/") && strings.HasSuffix(f.Name, ".xml") {
candidates = append(candidates, f.Name)
}
}
if len(candidates) == 0 {
return ""
}
sort.Strings(candidates)
for _, c := range candidates {
if strings.HasSuffix(c, "sheet1.xml") {
return c
}
}
return candidates[0]
}
func readSharedStrings(zr *zip.Reader) ([]string, error) {
data, err := readZipFile(zr, "xl/sharedStrings.xml")
if err != nil {
return nil, err
}
type richRun struct {
Text string `xml:"t"`
}
type si struct {
Text string `xml:"t"`
Runs []richRun `xml:"r"`
}
type sst struct {
Items []si `xml:"si"`
}
var parsed sst
if err := xml.Unmarshal(data, &parsed); err != nil {
return nil, err
}
values := make([]string, 0, len(parsed.Items))
for _, item := range parsed.Items {
if item.Text != "" {
values = append(values, item.Text)
continue
}
var b strings.Builder
for _, run := range item.Runs {
b.WriteString(run.Text)
}
values = append(values, b.String())
}
return values, nil
}
func decodeXLSXCell(cellType, value, inlineText string, sharedStrings []string) string {
switch cellType {
case "s":
idx, err := strconv.Atoi(strings.TrimSpace(value))
if err == nil && idx >= 0 && idx < len(sharedStrings) {
return strings.TrimSpace(sharedStrings[idx])
}
case "inlineStr":
return strings.TrimSpace(inlineText)
default:
return strings.TrimSpace(value)
}
return strings.TrimSpace(value)
}
func excelRefColumn(ref string) int {
if ref == "" {
return -1
}
var letters []rune
for _, r := range ref {
if r >= 'A' && r <= 'Z' {
letters = append(letters, r)
} else if r >= 'a' && r <= 'z' {
letters = append(letters, r-'a'+'A')
} else {
break
}
}
if len(letters) == 0 {
return -1
}
col := 0
for _, r := range letters {
col = col*26 + int(r-'A'+1)
}
return col - 1
}
func normalizeHeader(v string) string {
return strings.ToLower(strings.TrimSpace(strings.ReplaceAll(v, "\u00a0", " ")))
}