990 lines
24 KiB
Go
990 lines
24 KiB
Go
package services
|
|
|
|
import (
|
|
"archive/zip"
|
|
"bytes"
|
|
"encoding/xml"
|
|
"errors"
|
|
"fmt"
|
|
"io"
|
|
"path/filepath"
|
|
"regexp"
|
|
"sort"
|
|
"strconv"
|
|
"strings"
|
|
"time"
|
|
|
|
"git.mchus.pro/mchus/quoteforge/internal/models"
|
|
pricelistsvc "git.mchus.pro/mchus/quoteforge/internal/services/pricelist"
|
|
"gorm.io/gorm"
|
|
"gorm.io/gorm/clause"
|
|
)
|
|
|
|
type StockImportProgress struct {
|
|
Status string `json:"status"`
|
|
Message string `json:"message,omitempty"`
|
|
Current int `json:"current,omitempty"`
|
|
Total int `json:"total,omitempty"`
|
|
RowsTotal int `json:"rows_total,omitempty"`
|
|
ValidRows int `json:"valid_rows,omitempty"`
|
|
Inserted int `json:"inserted,omitempty"`
|
|
Deleted int64 `json:"deleted,omitempty"`
|
|
Unmapped int `json:"unmapped,omitempty"`
|
|
Conflicts int `json:"conflicts,omitempty"`
|
|
FallbackMatches int `json:"fallback_matches,omitempty"`
|
|
ParseErrors int `json:"parse_errors,omitempty"`
|
|
ImportDate string `json:"import_date,omitempty"`
|
|
PricelistID uint `json:"warehouse_pricelist_id,omitempty"`
|
|
PricelistVer string `json:"warehouse_pricelist_version,omitempty"`
|
|
}
|
|
|
|
type StockImportResult struct {
|
|
RowsTotal int
|
|
ValidRows int
|
|
Inserted int
|
|
Deleted int64
|
|
Unmapped int
|
|
Conflicts int
|
|
FallbackMatches int
|
|
ParseErrors int
|
|
ImportDate time.Time
|
|
WarehousePLID uint
|
|
WarehousePLVer string
|
|
}
|
|
|
|
type pendingMapping struct {
|
|
Partnumber string
|
|
Description string
|
|
}
|
|
|
|
type StockImportService struct {
|
|
db *gorm.DB
|
|
pricelistSvc *pricelistsvc.Service
|
|
}
|
|
|
|
func NewStockImportService(db *gorm.DB, pricelistSvc *pricelistsvc.Service) *StockImportService {
|
|
return &StockImportService{
|
|
db: db,
|
|
pricelistSvc: pricelistSvc,
|
|
}
|
|
}
|
|
|
|
type stockImportRow struct {
|
|
Folder string
|
|
Article string
|
|
Description string
|
|
Vendor string
|
|
Price float64
|
|
Qty float64
|
|
}
|
|
|
|
func (s *StockImportService) Import(
|
|
filename string,
|
|
content []byte,
|
|
fileModTime time.Time,
|
|
createdBy string,
|
|
onProgress func(StockImportProgress),
|
|
) (*StockImportResult, error) {
|
|
if s.db == nil {
|
|
return nil, fmt.Errorf("offline mode: stock import unavailable")
|
|
}
|
|
if len(content) == 0 {
|
|
return nil, fmt.Errorf("empty file")
|
|
}
|
|
report := func(p StockImportProgress) {
|
|
if onProgress != nil {
|
|
onProgress(p)
|
|
}
|
|
}
|
|
|
|
report(StockImportProgress{Status: "starting", Message: "Запуск импорта", Current: 0, Total: 100})
|
|
|
|
rows, err := parseStockRows(filename, content)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
if len(rows) == 0 {
|
|
return nil, fmt.Errorf("no rows parsed")
|
|
}
|
|
report(StockImportProgress{Status: "parsing", Message: "Файл распарсен", RowsTotal: len(rows), Current: 10, Total: 100})
|
|
|
|
importDate := detectImportDate(content, filename, fileModTime)
|
|
report(StockImportProgress{
|
|
Status: "parsing",
|
|
Message: "Дата импорта определена",
|
|
ImportDate: importDate.Format("2006-01-02"),
|
|
Current: 15,
|
|
Total: 100,
|
|
})
|
|
|
|
resolver, err := s.newLotResolver()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
var (
|
|
records []models.StockLog
|
|
unmapped int
|
|
conflicts int
|
|
fallbackMatches int
|
|
parseErrors int
|
|
pendingByPN = make(map[string]pendingMapping)
|
|
)
|
|
|
|
for _, row := range rows {
|
|
if strings.TrimSpace(row.Article) == "" {
|
|
parseErrors++
|
|
continue
|
|
}
|
|
lot, matchType, resolveErr := resolver.resolve(row.Article)
|
|
if resolveErr != nil {
|
|
trimmedPN := strings.TrimSpace(row.Article)
|
|
if trimmedPN != "" {
|
|
key := normalizeKey(trimmedPN)
|
|
if key != "" {
|
|
candidate := pendingMapping{
|
|
Partnumber: trimmedPN,
|
|
Description: strings.TrimSpace(row.Description),
|
|
}
|
|
if prev, ok := pendingByPN[key]; !ok || (strings.TrimSpace(prev.Description) == "" && candidate.Description != "") {
|
|
pendingByPN[key] = candidate
|
|
}
|
|
}
|
|
}
|
|
if errors.Is(resolveErr, errResolveConflict) {
|
|
conflicts++
|
|
} else {
|
|
unmapped++
|
|
}
|
|
continue
|
|
}
|
|
if matchType == "article_exact" || matchType == "prefix" {
|
|
fallbackMatches++
|
|
}
|
|
|
|
var comments *string
|
|
if trimmed := strings.TrimSpace(row.Description); trimmed != "" {
|
|
comments = &trimmed
|
|
}
|
|
var vendor *string
|
|
if trimmed := strings.TrimSpace(row.Vendor); trimmed != "" {
|
|
vendor = &trimmed
|
|
}
|
|
qty := row.Qty
|
|
records = append(records, models.StockLog{
|
|
Lot: lot,
|
|
Date: importDate,
|
|
Price: row.Price,
|
|
Comments: comments,
|
|
Vendor: vendor,
|
|
Qty: &qty,
|
|
})
|
|
}
|
|
|
|
if len(pendingByPN) > 0 {
|
|
pending := make([]pendingMapping, 0, len(pendingByPN))
|
|
for _, m := range pendingByPN {
|
|
pending = append(pending, m)
|
|
}
|
|
if err := s.upsertPendingMappings(pending); err != nil {
|
|
return nil, err
|
|
}
|
|
}
|
|
|
|
if len(records) == 0 {
|
|
return nil, fmt.Errorf("no valid rows after mapping")
|
|
}
|
|
|
|
report(StockImportProgress{
|
|
Status: "mapping",
|
|
Message: "Сопоставление article -> lot завершено",
|
|
RowsTotal: len(rows),
|
|
ValidRows: len(records),
|
|
Unmapped: unmapped,
|
|
Conflicts: conflicts,
|
|
FallbackMatches: fallbackMatches,
|
|
ParseErrors: parseErrors,
|
|
Current: 40,
|
|
Total: 100,
|
|
})
|
|
|
|
deleted, inserted, err := s.replaceStockLogs(records)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
report(StockImportProgress{
|
|
Status: "writing",
|
|
Message: "Данные stock_log обновлены",
|
|
Inserted: inserted,
|
|
Deleted: deleted,
|
|
Current: 60,
|
|
Total: 100,
|
|
ImportDate: importDate.Format("2006-01-02"),
|
|
})
|
|
|
|
items, err := s.buildWarehousePricelistItems()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
if len(items) == 0 {
|
|
return nil, fmt.Errorf("stock_log does not contain positive prices for warehouse pricelist")
|
|
}
|
|
|
|
if createdBy == "" {
|
|
createdBy = "unknown"
|
|
}
|
|
|
|
report(StockImportProgress{Status: "recalculating_warehouse", Message: "Создание warehouse прайслиста", Current: 70, Total: 100})
|
|
var warehousePLID uint
|
|
var warehousePLVer string
|
|
if s.pricelistSvc == nil {
|
|
return nil, fmt.Errorf("pricelist service unavailable")
|
|
}
|
|
pl, err := s.pricelistSvc.CreateForSourceWithProgress(createdBy, string(models.PricelistSourceWarehouse), items, func(p pricelistsvc.CreateProgress) {
|
|
report(StockImportProgress{
|
|
Status: "recalculating_warehouse",
|
|
Message: p.Message,
|
|
Current: 70 + int(float64(p.Current)*0.3),
|
|
Total: 100,
|
|
})
|
|
})
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
warehousePLID = pl.ID
|
|
warehousePLVer = pl.Version
|
|
|
|
result := &StockImportResult{
|
|
RowsTotal: len(rows),
|
|
ValidRows: len(records),
|
|
Inserted: inserted,
|
|
Deleted: deleted,
|
|
Unmapped: unmapped,
|
|
Conflicts: conflicts,
|
|
FallbackMatches: fallbackMatches,
|
|
ParseErrors: parseErrors,
|
|
ImportDate: importDate,
|
|
WarehousePLID: warehousePLID,
|
|
WarehousePLVer: warehousePLVer,
|
|
}
|
|
|
|
report(StockImportProgress{
|
|
Status: "completed",
|
|
Message: "Импорт завершен",
|
|
RowsTotal: result.RowsTotal,
|
|
ValidRows: result.ValidRows,
|
|
Inserted: result.Inserted,
|
|
Deleted: result.Deleted,
|
|
Unmapped: result.Unmapped,
|
|
Conflicts: result.Conflicts,
|
|
FallbackMatches: result.FallbackMatches,
|
|
ParseErrors: result.ParseErrors,
|
|
ImportDate: result.ImportDate.Format("2006-01-02"),
|
|
PricelistID: result.WarehousePLID,
|
|
PricelistVer: result.WarehousePLVer,
|
|
Current: 100,
|
|
Total: 100,
|
|
})
|
|
|
|
return result, nil
|
|
}
|
|
|
|
func (s *StockImportService) replaceStockLogs(records []models.StockLog) (int64, int, error) {
|
|
var deleted int64
|
|
err := s.db.Transaction(func(tx *gorm.DB) error {
|
|
res := tx.Exec("DELETE FROM stock_log")
|
|
if res.Error != nil {
|
|
return res.Error
|
|
}
|
|
deleted = res.RowsAffected
|
|
|
|
if err := tx.CreateInBatches(records, 500).Error; err != nil {
|
|
return err
|
|
}
|
|
return nil
|
|
})
|
|
if err != nil {
|
|
return 0, 0, err
|
|
}
|
|
return deleted, len(records), nil
|
|
}
|
|
|
|
func (s *StockImportService) buildWarehousePricelistItems() ([]pricelistsvc.CreateItemInput, error) {
|
|
var logs []models.StockLog
|
|
if err := s.db.Select("lot, price").Where("price > 0").Find(&logs).Error; err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
grouped := make(map[string][]float64)
|
|
for _, l := range logs {
|
|
lot := strings.TrimSpace(l.Lot)
|
|
if lot == "" || l.Price <= 0 {
|
|
continue
|
|
}
|
|
grouped[lot] = append(grouped[lot], l.Price)
|
|
}
|
|
|
|
items := make([]pricelistsvc.CreateItemInput, 0, len(grouped))
|
|
for lot, prices := range grouped {
|
|
price := median(prices)
|
|
if price <= 0 {
|
|
continue
|
|
}
|
|
items = append(items, pricelistsvc.CreateItemInput{
|
|
LotName: lot,
|
|
Price: price,
|
|
})
|
|
}
|
|
sort.Slice(items, func(i, j int) bool {
|
|
return items[i].LotName < items[j].LotName
|
|
})
|
|
return items, nil
|
|
}
|
|
|
|
func (s *StockImportService) ListMappings(page, perPage int, search string) ([]models.LotPartnumber, int64, error) {
|
|
if s.db == nil {
|
|
return nil, 0, fmt.Errorf("offline mode: mappings unavailable")
|
|
}
|
|
if page < 1 {
|
|
page = 1
|
|
}
|
|
if perPage < 1 {
|
|
perPage = 50
|
|
}
|
|
if perPage > 500 {
|
|
perPage = 500
|
|
}
|
|
|
|
offset := (page - 1) * perPage
|
|
query := s.db.Model(&models.LotPartnumber{})
|
|
if search = strings.TrimSpace(search); search != "" {
|
|
like := "%" + search + "%"
|
|
query = query.Where("partnumber LIKE ? OR lot_name LIKE ? OR description LIKE ?", like, like, like)
|
|
}
|
|
|
|
var total int64
|
|
if err := query.Count(&total).Error; err != nil {
|
|
return nil, 0, err
|
|
}
|
|
|
|
var rows []models.LotPartnumber
|
|
if err := query.Order("CASE WHEN TRIM(lot_name) = '' THEN 0 ELSE 1 END, partnumber ASC").Offset(offset).Limit(perPage).Find(&rows).Error; err != nil {
|
|
return nil, 0, err
|
|
}
|
|
return rows, total, nil
|
|
}
|
|
|
|
func (s *StockImportService) UpsertMapping(partnumber, lotName, description string) error {
|
|
if s.db == nil {
|
|
return fmt.Errorf("offline mode: mappings unavailable")
|
|
}
|
|
partnumber = strings.TrimSpace(partnumber)
|
|
lotName = strings.TrimSpace(lotName)
|
|
description = strings.TrimSpace(description)
|
|
if partnumber == "" || lotName == "" {
|
|
return fmt.Errorf("partnumber and lot_name are required")
|
|
}
|
|
|
|
var lotCount int64
|
|
if err := s.db.Model(&models.Lot{}).Where("lot_name = ?", lotName).Count(&lotCount).Error; err != nil {
|
|
return err
|
|
}
|
|
if lotCount == 0 {
|
|
return fmt.Errorf("lot not found: %s", lotName)
|
|
}
|
|
|
|
return s.db.Transaction(func(tx *gorm.DB) error {
|
|
var existing []models.LotPartnumber
|
|
if err := tx.Where("LOWER(TRIM(partnumber)) = LOWER(TRIM(?))", partnumber).Find(&existing).Error; err != nil {
|
|
return err
|
|
}
|
|
if description == "" {
|
|
for _, row := range existing {
|
|
if row.Description != nil && strings.TrimSpace(*row.Description) != "" {
|
|
description = strings.TrimSpace(*row.Description)
|
|
break
|
|
}
|
|
}
|
|
}
|
|
if err := tx.Where("LOWER(TRIM(partnumber)) = LOWER(TRIM(?))", partnumber).Delete(&models.LotPartnumber{}).Error; err != nil {
|
|
return err
|
|
}
|
|
var descPtr *string
|
|
if description != "" {
|
|
descPtr = &description
|
|
}
|
|
return tx.Clauses(clause.OnConflict{DoNothing: true}).Create(&models.LotPartnumber{
|
|
Partnumber: partnumber,
|
|
LotName: lotName,
|
|
Description: descPtr,
|
|
}).Error
|
|
})
|
|
}
|
|
|
|
func (s *StockImportService) DeleteMapping(partnumber string) (int64, error) {
|
|
if s.db == nil {
|
|
return 0, fmt.Errorf("offline mode: mappings unavailable")
|
|
}
|
|
partnumber = strings.TrimSpace(partnumber)
|
|
if partnumber == "" {
|
|
return 0, fmt.Errorf("partnumber is required")
|
|
}
|
|
res := s.db.Where("LOWER(TRIM(partnumber)) = LOWER(TRIM(?))", partnumber).Delete(&models.LotPartnumber{})
|
|
return res.RowsAffected, res.Error
|
|
}
|
|
|
|
func (s *StockImportService) upsertPendingMappings(rows []pendingMapping) error {
|
|
if s.db == nil || len(rows) == 0 {
|
|
return nil
|
|
}
|
|
return s.db.Transaction(func(tx *gorm.DB) error {
|
|
for _, row := range rows {
|
|
pn := strings.TrimSpace(row.Partnumber)
|
|
if pn == "" {
|
|
continue
|
|
}
|
|
desc := strings.TrimSpace(row.Description)
|
|
var existing []models.LotPartnumber
|
|
if err := tx.Where("LOWER(TRIM(partnumber)) = LOWER(TRIM(?))", pn).Find(&existing).Error; err != nil {
|
|
return err
|
|
}
|
|
if len(existing) == 0 {
|
|
var descPtr *string
|
|
if desc != "" {
|
|
descPtr = &desc
|
|
}
|
|
if err := tx.Create(&models.LotPartnumber{
|
|
Partnumber: pn,
|
|
LotName: "",
|
|
Description: descPtr,
|
|
}).Error; err != nil {
|
|
return err
|
|
}
|
|
continue
|
|
}
|
|
if desc == "" {
|
|
continue
|
|
}
|
|
needsDescription := true
|
|
for _, item := range existing {
|
|
if item.Description != nil && strings.TrimSpace(*item.Description) != "" {
|
|
needsDescription = false
|
|
break
|
|
}
|
|
}
|
|
if needsDescription {
|
|
if err := tx.Model(&models.LotPartnumber{}).
|
|
Where("LOWER(TRIM(partnumber)) = LOWER(TRIM(?))", pn).
|
|
Update("description", desc).Error; err != nil {
|
|
return err
|
|
}
|
|
}
|
|
}
|
|
return nil
|
|
})
|
|
}
|
|
|
|
var (
|
|
reISODate = regexp.MustCompile(`\b(20\d{2})-(\d{2})-(\d{2})\b`)
|
|
reRuDate = regexp.MustCompile(`\b([0-3]\d)\.([01]\d)\.(20\d{2})\b`)
|
|
mxlCellRe = regexp.MustCompile(`\{16,\d+,\s*\{1,1,\s*\{"ru","(.*?)"\}\s*\},0\},(\d+),`)
|
|
errResolveConflict = errors.New("multiple lot matches")
|
|
errResolveNotFound = errors.New("lot not found")
|
|
)
|
|
|
|
func parseStockRows(filename string, content []byte) ([]stockImportRow, error) {
|
|
switch strings.ToLower(filepath.Ext(filename)) {
|
|
case ".mxl":
|
|
return parseMXLRows(content)
|
|
case ".xlsx":
|
|
return parseXLSXRows(content)
|
|
default:
|
|
return nil, fmt.Errorf("unsupported file format: %s", filepath.Ext(filename))
|
|
}
|
|
}
|
|
|
|
func parseMXLRows(content []byte) ([]stockImportRow, error) {
|
|
text := string(content)
|
|
matches := mxlCellRe.FindAllStringSubmatch(text, -1)
|
|
if len(matches) == 0 {
|
|
return nil, fmt.Errorf("mxl parsing failed: no cells found")
|
|
}
|
|
|
|
rows := make([]map[int]string, 0, 128)
|
|
current := map[int]string{}
|
|
for _, m := range matches {
|
|
val := strings.ReplaceAll(m[1], `""`, `"`)
|
|
col, err := strconv.Atoi(m[2])
|
|
if err != nil {
|
|
continue
|
|
}
|
|
if col == 1 && len(current) > 0 {
|
|
rows = append(rows, current)
|
|
current = map[int]string{}
|
|
}
|
|
current[col] = strings.TrimSpace(val)
|
|
}
|
|
if len(current) > 0 {
|
|
rows = append(rows, current)
|
|
}
|
|
|
|
result := make([]stockImportRow, 0, len(rows))
|
|
for _, r := range rows {
|
|
article := strings.TrimSpace(r[2])
|
|
if article == "" || strings.EqualFold(article, "Артикул") {
|
|
continue
|
|
}
|
|
price, err := parseLocalizedFloat(r[5])
|
|
if err != nil {
|
|
continue
|
|
}
|
|
qty, err := parseLocalizedFloat(r[6])
|
|
if err != nil {
|
|
qty = 0
|
|
}
|
|
result = append(result, stockImportRow{
|
|
Folder: strings.TrimSpace(r[1]),
|
|
Article: article,
|
|
Description: strings.TrimSpace(r[3]),
|
|
Vendor: strings.TrimSpace(r[4]),
|
|
Price: price,
|
|
Qty: qty,
|
|
})
|
|
}
|
|
return result, nil
|
|
}
|
|
|
|
func parseXLSXRows(content []byte) ([]stockImportRow, error) {
|
|
zr, err := zip.NewReader(bytes.NewReader(content), int64(len(content)))
|
|
if err != nil {
|
|
return nil, fmt.Errorf("opening xlsx: %w", err)
|
|
}
|
|
|
|
sharedStrings, _ := readSharedStrings(zr)
|
|
sheetPath := firstWorksheetPath(zr)
|
|
if sheetPath == "" {
|
|
return nil, fmt.Errorf("xlsx parsing failed: worksheet not found")
|
|
}
|
|
sheetData, err := readZipFile(zr, sheetPath)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
type xlsxInline struct {
|
|
T string `xml:"t"`
|
|
}
|
|
type xlsxCell struct {
|
|
R string `xml:"r,attr"`
|
|
T string `xml:"t,attr"`
|
|
V string `xml:"v"`
|
|
IS *xlsxInline `xml:"is"`
|
|
}
|
|
type xlsxRow struct {
|
|
C []xlsxCell `xml:"c"`
|
|
}
|
|
type xlsxSheet struct {
|
|
Rows []xlsxRow `xml:"sheetData>row"`
|
|
}
|
|
|
|
var ws xlsxSheet
|
|
if err := xml.Unmarshal(sheetData, &ws); err != nil {
|
|
return nil, fmt.Errorf("decode worksheet: %w", err)
|
|
}
|
|
|
|
grid := make([]map[int]string, 0, len(ws.Rows))
|
|
for _, r := range ws.Rows {
|
|
rowMap := make(map[int]string, len(r.C))
|
|
for _, c := range r.C {
|
|
colIdx := excelRefColumn(c.R)
|
|
if colIdx < 0 {
|
|
continue
|
|
}
|
|
inlineText := ""
|
|
if c.IS != nil {
|
|
inlineText = c.IS.T
|
|
}
|
|
rowMap[colIdx] = decodeXLSXCell(c.T, c.V, inlineText, sharedStrings)
|
|
}
|
|
grid = append(grid, rowMap)
|
|
}
|
|
|
|
headerRow := -1
|
|
headers := map[string]int{}
|
|
for i, row := range grid {
|
|
for idx, val := range row {
|
|
norm := normalizeHeader(val)
|
|
switch norm {
|
|
case "папка", "артикул", "описание", "вендор", "стоимость", "свободно":
|
|
headers[norm] = idx
|
|
}
|
|
}
|
|
_, hasArticle := headers["артикул"]
|
|
_, hasPrice := headers["стоимость"]
|
|
if hasArticle && hasPrice {
|
|
headerRow = i
|
|
break
|
|
}
|
|
}
|
|
if headerRow < 0 {
|
|
return nil, fmt.Errorf("xlsx parsing failed: header row not found")
|
|
}
|
|
|
|
result := make([]stockImportRow, 0, len(grid)-headerRow-1)
|
|
idxFolder, hasFolder := headers["папка"]
|
|
idxArticle := headers["артикул"]
|
|
idxDesc, hasDesc := headers["описание"]
|
|
idxVendor, hasVendor := headers["вендор"]
|
|
idxPrice := headers["стоимость"]
|
|
idxQty, hasQty := headers["свободно"]
|
|
for i := headerRow + 1; i < len(grid); i++ {
|
|
row := grid[i]
|
|
article := strings.TrimSpace(row[idxArticle])
|
|
if article == "" {
|
|
continue
|
|
}
|
|
price, err := parseLocalizedFloat(row[idxPrice])
|
|
if err != nil {
|
|
continue
|
|
}
|
|
qty := 0.0
|
|
if hasQty {
|
|
qty, err = parseLocalizedFloat(row[idxQty])
|
|
if err != nil {
|
|
qty = 0
|
|
}
|
|
}
|
|
|
|
folder := ""
|
|
if hasFolder {
|
|
folder = strings.TrimSpace(row[idxFolder])
|
|
}
|
|
description := ""
|
|
if hasDesc {
|
|
description = strings.TrimSpace(row[idxDesc])
|
|
}
|
|
vendor := ""
|
|
if hasVendor {
|
|
vendor = strings.TrimSpace(row[idxVendor])
|
|
}
|
|
|
|
result = append(result, stockImportRow{
|
|
Folder: folder,
|
|
Article: article,
|
|
Description: description,
|
|
Vendor: vendor,
|
|
Price: price,
|
|
Qty: qty,
|
|
})
|
|
}
|
|
return result, nil
|
|
}
|
|
|
|
func parseLocalizedFloat(value string) (float64, error) {
|
|
clean := strings.TrimSpace(value)
|
|
clean = strings.ReplaceAll(clean, "\u00a0", "")
|
|
clean = strings.ReplaceAll(clean, " ", "")
|
|
clean = strings.ReplaceAll(clean, ",", ".")
|
|
if clean == "" {
|
|
return 0, fmt.Errorf("empty number")
|
|
}
|
|
return strconv.ParseFloat(clean, 64)
|
|
}
|
|
|
|
func detectImportDate(content []byte, filename string, fileModTime time.Time) time.Time {
|
|
if d, ok := extractDateFromText(string(content)); ok {
|
|
return d
|
|
}
|
|
if d, ok := extractDateFromFilename(filename); ok {
|
|
return d
|
|
}
|
|
if !fileModTime.IsZero() {
|
|
return normalizeDate(fileModTime)
|
|
}
|
|
return normalizeDate(time.Now())
|
|
}
|
|
|
|
func extractDateFromText(text string) (time.Time, bool) {
|
|
if m := reISODate.FindStringSubmatch(text); len(m) == 4 {
|
|
d, err := time.Parse("2006-01-02", m[0])
|
|
if err == nil {
|
|
return normalizeDate(d), true
|
|
}
|
|
}
|
|
if m := reRuDate.FindStringSubmatch(text); len(m) == 4 {
|
|
d, err := time.Parse("02.01.2006", m[0])
|
|
if err == nil {
|
|
return normalizeDate(d), true
|
|
}
|
|
}
|
|
return time.Time{}, false
|
|
}
|
|
|
|
func extractDateFromFilename(filename string) (time.Time, bool) {
|
|
base := filepath.Base(filename)
|
|
if m := reISODate.FindStringSubmatch(base); len(m) == 4 {
|
|
d, err := time.Parse("2006-01-02", m[0])
|
|
if err == nil {
|
|
return normalizeDate(d), true
|
|
}
|
|
}
|
|
if m := reRuDate.FindStringSubmatch(base); len(m) == 4 {
|
|
d, err := time.Parse("02.01.2006", m[0])
|
|
if err == nil {
|
|
return normalizeDate(d), true
|
|
}
|
|
}
|
|
return time.Time{}, false
|
|
}
|
|
|
|
func normalizeDate(t time.Time) time.Time {
|
|
y, m, d := t.Date()
|
|
return time.Date(y, m, d, 0, 0, 0, 0, time.Local)
|
|
}
|
|
|
|
func median(values []float64) float64 {
|
|
if len(values) == 0 {
|
|
return 0
|
|
}
|
|
c := append([]float64(nil), values...)
|
|
sort.Float64s(c)
|
|
n := len(c)
|
|
if n%2 == 0 {
|
|
return (c[n/2-1] + c[n/2]) / 2
|
|
}
|
|
return c[n/2]
|
|
}
|
|
|
|
type lotResolver struct {
|
|
partnumberToLots map[string][]string
|
|
exactLots map[string]string
|
|
allLots []string
|
|
}
|
|
|
|
func (s *StockImportService) newLotResolver() (*lotResolver, error) {
|
|
var mappings []models.LotPartnumber
|
|
if err := s.db.Find(&mappings).Error; err != nil {
|
|
return nil, err
|
|
}
|
|
partnumberToLots := make(map[string][]string, len(mappings))
|
|
for _, m := range mappings {
|
|
p := normalizeKey(m.Partnumber)
|
|
if p == "" || strings.TrimSpace(m.LotName) == "" {
|
|
continue
|
|
}
|
|
partnumberToLots[p] = append(partnumberToLots[p], m.LotName)
|
|
}
|
|
|
|
var lots []models.Lot
|
|
if err := s.db.Select("lot_name").Find(&lots).Error; err != nil {
|
|
return nil, err
|
|
}
|
|
exactLots := make(map[string]string, len(lots))
|
|
allLots := make([]string, 0, len(lots))
|
|
for _, l := range lots {
|
|
name := strings.TrimSpace(l.LotName)
|
|
if name == "" {
|
|
continue
|
|
}
|
|
k := normalizeKey(name)
|
|
exactLots[k] = name
|
|
allLots = append(allLots, name)
|
|
}
|
|
sort.Slice(allLots, func(i, j int) bool {
|
|
li := len([]rune(allLots[i]))
|
|
lj := len([]rune(allLots[j]))
|
|
if li == lj {
|
|
return allLots[i] < allLots[j]
|
|
}
|
|
return li > lj
|
|
})
|
|
|
|
return &lotResolver{
|
|
partnumberToLots: partnumberToLots,
|
|
exactLots: exactLots,
|
|
allLots: allLots,
|
|
}, nil
|
|
}
|
|
|
|
func (r *lotResolver) resolve(article string) (string, string, error) {
|
|
key := normalizeKey(article)
|
|
if key == "" {
|
|
return "", "", errResolveNotFound
|
|
}
|
|
|
|
if mapped := r.partnumberToLots[key]; len(mapped) > 0 {
|
|
uniq := uniqueStrings(mapped)
|
|
if len(uniq) == 1 {
|
|
return uniq[0], "mapping_table", nil
|
|
}
|
|
return "", "", errResolveConflict
|
|
}
|
|
|
|
if lot, ok := r.exactLots[key]; ok {
|
|
return lot, "article_exact", nil
|
|
}
|
|
|
|
best := ""
|
|
bestLen := -1
|
|
tie := false
|
|
for _, lot := range r.allLots {
|
|
lotKey := normalizeKey(lot)
|
|
if lotKey == "" {
|
|
continue
|
|
}
|
|
if strings.HasPrefix(key, lotKey) {
|
|
l := len([]rune(lotKey))
|
|
if l > bestLen {
|
|
best = lot
|
|
bestLen = l
|
|
tie = false
|
|
} else if l == bestLen && !strings.EqualFold(best, lot) {
|
|
tie = true
|
|
}
|
|
}
|
|
}
|
|
if best == "" {
|
|
return "", "", errResolveNotFound
|
|
}
|
|
if tie {
|
|
return "", "", errResolveConflict
|
|
}
|
|
return best, "prefix", nil
|
|
}
|
|
|
|
func normalizeKey(v string) string {
|
|
return strings.ToLower(strings.TrimSpace(v))
|
|
}
|
|
|
|
func uniqueStrings(values []string) []string {
|
|
seen := make(map[string]bool, len(values))
|
|
out := make([]string, 0, len(values))
|
|
for _, v := range values {
|
|
v = strings.TrimSpace(v)
|
|
if v == "" {
|
|
continue
|
|
}
|
|
k := strings.ToLower(v)
|
|
if seen[k] {
|
|
continue
|
|
}
|
|
seen[k] = true
|
|
out = append(out, v)
|
|
}
|
|
sort.Strings(out)
|
|
return out
|
|
}
|
|
|
|
func readZipFile(zr *zip.Reader, name string) ([]byte, error) {
|
|
for _, f := range zr.File {
|
|
if f.Name != name {
|
|
continue
|
|
}
|
|
rc, err := f.Open()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rc.Close()
|
|
return io.ReadAll(rc)
|
|
}
|
|
return nil, fmt.Errorf("zip entry not found: %s", name)
|
|
}
|
|
|
|
func firstWorksheetPath(zr *zip.Reader) string {
|
|
candidates := make([]string, 0, 4)
|
|
for _, f := range zr.File {
|
|
if strings.HasPrefix(f.Name, "xl/worksheets/") && strings.HasSuffix(f.Name, ".xml") {
|
|
candidates = append(candidates, f.Name)
|
|
}
|
|
}
|
|
if len(candidates) == 0 {
|
|
return ""
|
|
}
|
|
sort.Strings(candidates)
|
|
for _, c := range candidates {
|
|
if strings.HasSuffix(c, "sheet1.xml") {
|
|
return c
|
|
}
|
|
}
|
|
return candidates[0]
|
|
}
|
|
|
|
func readSharedStrings(zr *zip.Reader) ([]string, error) {
|
|
data, err := readZipFile(zr, "xl/sharedStrings.xml")
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
type richRun struct {
|
|
Text string `xml:"t"`
|
|
}
|
|
type si struct {
|
|
Text string `xml:"t"`
|
|
Runs []richRun `xml:"r"`
|
|
}
|
|
type sst struct {
|
|
Items []si `xml:"si"`
|
|
}
|
|
|
|
var parsed sst
|
|
if err := xml.Unmarshal(data, &parsed); err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
values := make([]string, 0, len(parsed.Items))
|
|
for _, item := range parsed.Items {
|
|
if item.Text != "" {
|
|
values = append(values, item.Text)
|
|
continue
|
|
}
|
|
var b strings.Builder
|
|
for _, run := range item.Runs {
|
|
b.WriteString(run.Text)
|
|
}
|
|
values = append(values, b.String())
|
|
}
|
|
return values, nil
|
|
}
|
|
|
|
func decodeXLSXCell(cellType, value, inlineText string, sharedStrings []string) string {
|
|
switch cellType {
|
|
case "s":
|
|
idx, err := strconv.Atoi(strings.TrimSpace(value))
|
|
if err == nil && idx >= 0 && idx < len(sharedStrings) {
|
|
return strings.TrimSpace(sharedStrings[idx])
|
|
}
|
|
case "inlineStr":
|
|
return strings.TrimSpace(inlineText)
|
|
default:
|
|
return strings.TrimSpace(value)
|
|
}
|
|
return strings.TrimSpace(value)
|
|
}
|
|
|
|
func excelRefColumn(ref string) int {
|
|
if ref == "" {
|
|
return -1
|
|
}
|
|
var letters []rune
|
|
for _, r := range ref {
|
|
if r >= 'A' && r <= 'Z' {
|
|
letters = append(letters, r)
|
|
} else if r >= 'a' && r <= 'z' {
|
|
letters = append(letters, r-'a'+'A')
|
|
} else {
|
|
break
|
|
}
|
|
}
|
|
if len(letters) == 0 {
|
|
return -1
|
|
}
|
|
col := 0
|
|
for _, r := range letters {
|
|
col = col*26 + int(r-'A'+1)
|
|
}
|
|
return col - 1
|
|
}
|
|
|
|
func normalizeHeader(v string) string {
|
|
return strings.ToLower(strings.TrimSpace(strings.ReplaceAll(v, "\u00a0", " ")))
|
|
}
|