Add stock pricelist admin flow with mapping placeholders and warehouse details
This commit is contained in:
@@ -1,17 +1,20 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"git.mchus.pro/mchus/quoteforge/internal/models"
|
||||
"git.mchus.pro/mchus/quoteforge/internal/repository"
|
||||
"git.mchus.pro/mchus/quoteforge/internal/services"
|
||||
"git.mchus.pro/mchus/quoteforge/internal/services/alerts"
|
||||
"git.mchus.pro/mchus/quoteforge/internal/services/pricing"
|
||||
"github.com/gin-gonic/gin"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
@@ -41,12 +44,14 @@ func calculateAverage(prices []float64) float64 {
|
||||
}
|
||||
|
||||
type PricingHandler struct {
|
||||
db *gorm.DB
|
||||
pricingService *pricing.Service
|
||||
alertService *alerts.Service
|
||||
componentRepo *repository.ComponentRepository
|
||||
priceRepo *repository.PriceRepository
|
||||
statsRepo *repository.StatsRepository
|
||||
db *gorm.DB
|
||||
pricingService *pricing.Service
|
||||
alertService *alerts.Service
|
||||
componentRepo *repository.ComponentRepository
|
||||
priceRepo *repository.PriceRepository
|
||||
statsRepo *repository.StatsRepository
|
||||
stockImportService *services.StockImportService
|
||||
dbUsername string
|
||||
}
|
||||
|
||||
func NewPricingHandler(
|
||||
@@ -56,14 +61,18 @@ func NewPricingHandler(
|
||||
componentRepo *repository.ComponentRepository,
|
||||
priceRepo *repository.PriceRepository,
|
||||
statsRepo *repository.StatsRepository,
|
||||
stockImportService *services.StockImportService,
|
||||
dbUsername string,
|
||||
) *PricingHandler {
|
||||
return &PricingHandler{
|
||||
db: db,
|
||||
pricingService: pricingService,
|
||||
alertService: alertService,
|
||||
componentRepo: componentRepo,
|
||||
priceRepo: priceRepo,
|
||||
statsRepo: statsRepo,
|
||||
db: db,
|
||||
pricingService: pricingService,
|
||||
alertService: alertService,
|
||||
componentRepo: componentRepo,
|
||||
priceRepo: priceRepo,
|
||||
statsRepo: statsRepo,
|
||||
stockImportService: stockImportService,
|
||||
dbUsername: dbUsername,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -936,3 +945,167 @@ func expandMetaPricesWithCache(metaPrices, excludeLot string, allLotNames []stri
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (h *PricingHandler) ImportStockLog(c *gin.Context) {
|
||||
if h.stockImportService == nil {
|
||||
c.JSON(http.StatusServiceUnavailable, gin.H{
|
||||
"error": "Импорт склада доступен только в онлайн режиме",
|
||||
"offline": true,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
fileHeader, err := c.FormFile("file")
|
||||
if err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "file is required"})
|
||||
return
|
||||
}
|
||||
|
||||
file, err := fileHeader.Open()
|
||||
if err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "failed to open uploaded file"})
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
content, err := io.ReadAll(file)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "failed to read uploaded file"})
|
||||
return
|
||||
}
|
||||
modTime := time.Now()
|
||||
if statter, ok := file.(interface{ Stat() (os.FileInfo, error) }); ok {
|
||||
if st, statErr := statter.Stat(); statErr == nil {
|
||||
modTime = st.ModTime()
|
||||
}
|
||||
}
|
||||
|
||||
flusher, ok := c.Writer.(http.Flusher)
|
||||
if !ok {
|
||||
result, impErr := h.stockImportService.Import(fileHeader.Filename, content, modTime, h.dbUsername, nil)
|
||||
if impErr != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": impErr.Error()})
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"status": "completed",
|
||||
"rows_total": result.RowsTotal,
|
||||
"valid_rows": result.ValidRows,
|
||||
"inserted": result.Inserted,
|
||||
"deleted": result.Deleted,
|
||||
"unmapped": result.Unmapped,
|
||||
"conflicts": result.Conflicts,
|
||||
"fallback_matches": result.FallbackMatches,
|
||||
"parse_errors": result.ParseErrors,
|
||||
"import_date": result.ImportDate.Format("2006-01-02"),
|
||||
"warehouse_pricelist_id": result.WarehousePLID,
|
||||
"warehouse_pricelist_version": result.WarehousePLVer,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
c.Header("Content-Type", "text/event-stream")
|
||||
c.Header("Cache-Control", "no-cache")
|
||||
c.Header("Connection", "keep-alive")
|
||||
c.Header("X-Accel-Buffering", "no")
|
||||
|
||||
send := func(p gin.H) {
|
||||
c.SSEvent("progress", p)
|
||||
flusher.Flush()
|
||||
}
|
||||
|
||||
send(gin.H{"status": "starting", "message": "Запуск импорта"})
|
||||
_, impErr := h.stockImportService.Import(fileHeader.Filename, content, modTime, h.dbUsername, func(p services.StockImportProgress) {
|
||||
send(gin.H{
|
||||
"status": p.Status,
|
||||
"message": p.Message,
|
||||
"current": p.Current,
|
||||
"total": p.Total,
|
||||
"rows_total": p.RowsTotal,
|
||||
"valid_rows": p.ValidRows,
|
||||
"inserted": p.Inserted,
|
||||
"deleted": p.Deleted,
|
||||
"unmapped": p.Unmapped,
|
||||
"conflicts": p.Conflicts,
|
||||
"fallback_matches": p.FallbackMatches,
|
||||
"parse_errors": p.ParseErrors,
|
||||
"import_date": p.ImportDate,
|
||||
"warehouse_pricelist_id": p.PricelistID,
|
||||
"warehouse_pricelist_version": p.PricelistVer,
|
||||
})
|
||||
})
|
||||
if impErr != nil {
|
||||
send(gin.H{"status": "error", "message": impErr.Error()})
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func (h *PricingHandler) ListStockMappings(c *gin.Context) {
|
||||
if h.stockImportService == nil {
|
||||
c.JSON(http.StatusServiceUnavailable, gin.H{
|
||||
"error": "Сопоставления доступны только в онлайн режиме",
|
||||
"offline": true,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
page, _ := strconv.Atoi(c.DefaultQuery("page", "1"))
|
||||
perPage, _ := strconv.Atoi(c.DefaultQuery("per_page", "50"))
|
||||
search := c.Query("search")
|
||||
|
||||
rows, total, err := h.stockImportService.ListMappings(page, perPage, search)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"items": rows,
|
||||
"total": total,
|
||||
"page": page,
|
||||
"per_page": perPage,
|
||||
})
|
||||
}
|
||||
|
||||
func (h *PricingHandler) UpsertStockMapping(c *gin.Context) {
|
||||
if h.stockImportService == nil {
|
||||
c.JSON(http.StatusServiceUnavailable, gin.H{
|
||||
"error": "Сопоставления доступны только в онлайн режиме",
|
||||
"offline": true,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
var req struct {
|
||||
Partnumber string `json:"partnumber" binding:"required"`
|
||||
LotName string `json:"lot_name" binding:"required"`
|
||||
Description string `json:"description"`
|
||||
}
|
||||
if err := c.ShouldBindJSON(&req); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
if err := h.stockImportService.UpsertMapping(req.Partnumber, req.LotName, req.Description); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusOK, gin.H{"message": "mapping saved"})
|
||||
}
|
||||
|
||||
func (h *PricingHandler) DeleteStockMapping(c *gin.Context) {
|
||||
if h.stockImportService == nil {
|
||||
c.JSON(http.StatusServiceUnavailable, gin.H{
|
||||
"error": "Сопоставления доступны только в онлайн режиме",
|
||||
"offline": true,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
partnumber := c.Param("partnumber")
|
||||
deleted, err := h.stockImportService.DeleteMapping(partnumber)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusOK, gin.H{"deleted": deleted})
|
||||
}
|
||||
|
||||
@@ -37,3 +37,31 @@ type Supplier struct {
|
||||
func (Supplier) TableName() string {
|
||||
return "supplier"
|
||||
}
|
||||
|
||||
// StockLog stores warehouse stock snapshots imported from external files.
|
||||
type StockLog struct {
|
||||
StockLogID uint `gorm:"column:stock_log_id;primaryKey;autoIncrement"`
|
||||
Lot string `gorm:"column:lot;size:255;not null"`
|
||||
Supplier *string `gorm:"column:supplier;size:255"`
|
||||
Date time.Time `gorm:"column:date;type:date;not null"`
|
||||
Price float64 `gorm:"column:price;not null"`
|
||||
Quality *string `gorm:"column:quality;size:255"`
|
||||
Comments *string `gorm:"column:comments;size:15000"`
|
||||
Vendor *string `gorm:"column:vendor;size:255"`
|
||||
Qty *float64 `gorm:"column:qty"`
|
||||
}
|
||||
|
||||
func (StockLog) TableName() string {
|
||||
return "stock_log"
|
||||
}
|
||||
|
||||
// LotPartnumber maps external part numbers to internal lots.
|
||||
type LotPartnumber struct {
|
||||
Partnumber string `gorm:"column:partnumber;size:255;primaryKey"`
|
||||
LotName string `gorm:"column:lot_name;size:255;primaryKey"`
|
||||
Description *string `gorm:"column:description;size:10000"`
|
||||
}
|
||||
|
||||
func (LotPartnumber) TableName() string {
|
||||
return "lot_partnumbers"
|
||||
}
|
||||
|
||||
@@ -65,8 +65,10 @@ type PricelistItem struct {
|
||||
MetaPrices string `gorm:"size:1000" json:"meta_prices,omitempty"`
|
||||
|
||||
// Virtual fields for display
|
||||
LotDescription string `gorm:"-" json:"lot_description,omitempty"`
|
||||
Category string `gorm:"-" json:"category,omitempty"`
|
||||
LotDescription string `gorm:"-" json:"lot_description,omitempty"`
|
||||
Category string `gorm:"-" json:"category,omitempty"`
|
||||
AvailableQty *float64 `gorm:"-" json:"available_qty,omitempty"`
|
||||
Partnumbers []string `gorm:"-" json:"partnumbers,omitempty"`
|
||||
}
|
||||
|
||||
func (PricelistItem) TableName() string {
|
||||
|
||||
@@ -240,9 +240,86 @@ func (r *PricelistRepository) GetItems(pricelistID uint, offset, limit int, sear
|
||||
}
|
||||
}
|
||||
|
||||
var pl models.Pricelist
|
||||
if err := r.db.Select("source").Where("id = ?", pricelistID).First(&pl).Error; err == nil && pl.Source == string(models.PricelistSourceWarehouse) {
|
||||
if err := r.enrichWarehouseItems(items); err != nil {
|
||||
return nil, 0, fmt.Errorf("enriching warehouse items: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
return items, total, nil
|
||||
}
|
||||
|
||||
func (r *PricelistRepository) enrichWarehouseItems(items []models.PricelistItem) error {
|
||||
if len(items) == 0 {
|
||||
return nil
|
||||
}
|
||||
lots := make([]string, 0, len(items))
|
||||
seen := make(map[string]struct{}, len(items))
|
||||
for _, item := range items {
|
||||
lot := strings.TrimSpace(item.LotName)
|
||||
if lot == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[lot]; ok {
|
||||
continue
|
||||
}
|
||||
seen[lot] = struct{}{}
|
||||
lots = append(lots, lot)
|
||||
}
|
||||
if len(lots) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
type lotQty struct {
|
||||
Lot string
|
||||
Qty float64
|
||||
}
|
||||
var qtyRows []lotQty
|
||||
if err := r.db.Model(&models.StockLog{}).
|
||||
Select("lot, COALESCE(SUM(qty), 0) AS qty").
|
||||
Where("lot IN ?", lots).
|
||||
Group("lot").
|
||||
Scan(&qtyRows).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
qtyByLot := make(map[string]float64, len(qtyRows))
|
||||
for _, row := range qtyRows {
|
||||
qtyByLot[row.Lot] = row.Qty
|
||||
}
|
||||
|
||||
var mappings []models.LotPartnumber
|
||||
if err := r.db.Where("lot_name IN ? AND TRIM(lot_name) <> ''", lots).
|
||||
Order("partnumber ASC").
|
||||
Find(&mappings).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
partnumbersByLot := make(map[string][]string, len(lots))
|
||||
seenPair := make(map[string]struct{}, len(mappings))
|
||||
for _, m := range mappings {
|
||||
lot := strings.TrimSpace(m.LotName)
|
||||
pn := strings.TrimSpace(m.Partnumber)
|
||||
if lot == "" || pn == "" {
|
||||
continue
|
||||
}
|
||||
key := lot + "\x00" + strings.ToLower(pn)
|
||||
if _, ok := seenPair[key]; ok {
|
||||
continue
|
||||
}
|
||||
seenPair[key] = struct{}{}
|
||||
partnumbersByLot[lot] = append(partnumbersByLot[lot], pn)
|
||||
}
|
||||
|
||||
for i := range items {
|
||||
if qty, ok := qtyByLot[items[i].LotName]; ok {
|
||||
q := qty
|
||||
items[i].AvailableQty = &q
|
||||
}
|
||||
items[i].Partnumbers = partnumbersByLot[items[i].LotName]
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetPriceForLot returns item price for a lot within a pricelist.
|
||||
func (r *PricelistRepository) GetPriceForLot(pricelistID uint, lotName string) (float64, error) {
|
||||
var item models.PricelistItem
|
||||
@@ -265,17 +342,18 @@ func (r *PricelistRepository) GenerateVersion() (string, error) {
|
||||
// GenerateVersionBySource generates a new version string in format YYYY-MM-DD-NNN scoped by source.
|
||||
func (r *PricelistRepository) GenerateVersionBySource(source string) (string, error) {
|
||||
today := time.Now().Format("2006-01-02")
|
||||
prefix := versionPrefixBySource(source)
|
||||
|
||||
var last models.Pricelist
|
||||
err := r.db.Model(&models.Pricelist{}).
|
||||
Select("version").
|
||||
Where("source = ? AND version LIKE ?", source, today+"-%").
|
||||
Where("source = ? AND version LIKE ?", source, prefix+"-"+today+"-%").
|
||||
Order("version DESC").
|
||||
Limit(1).
|
||||
Take(&last).Error
|
||||
if err != nil {
|
||||
if errors.Is(err, gorm.ErrRecordNotFound) {
|
||||
return fmt.Sprintf("%s-001", today), nil
|
||||
return fmt.Sprintf("%s-%s-001", prefix, today), nil
|
||||
}
|
||||
return "", fmt.Errorf("loading latest today's pricelist version: %w", err)
|
||||
}
|
||||
@@ -290,7 +368,18 @@ func (r *PricelistRepository) GenerateVersionBySource(source string) (string, er
|
||||
return "", fmt.Errorf("parsing pricelist sequence %q: %w", parts[len(parts)-1], err)
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s-%03d", today, n+1), nil
|
||||
return fmt.Sprintf("%s-%s-%03d", prefix, today, n+1), nil
|
||||
}
|
||||
|
||||
func versionPrefixBySource(source string) string {
|
||||
switch models.NormalizePricelistSource(source) {
|
||||
case models.PricelistSourceWarehouse:
|
||||
return "S"
|
||||
case models.PricelistSourceCompetitor:
|
||||
return "B"
|
||||
default:
|
||||
return "E"
|
||||
}
|
||||
}
|
||||
|
||||
// GetPriceForLotBySource returns item price for a lot from latest active pricelist of source.
|
||||
|
||||
@@ -19,7 +19,7 @@ func TestGenerateVersion_FirstOfDay(t *testing.T) {
|
||||
}
|
||||
|
||||
today := time.Now().Format("2006-01-02")
|
||||
want := fmt.Sprintf("%s-001", today)
|
||||
want := fmt.Sprintf("E-%s-001", today)
|
||||
if version != want {
|
||||
t.Fatalf("expected %s, got %s", want, version)
|
||||
}
|
||||
@@ -30,8 +30,8 @@ func TestGenerateVersion_UsesMaxSuffixNotCount(t *testing.T) {
|
||||
today := time.Now().Format("2006-01-02")
|
||||
|
||||
seed := []models.Pricelist{
|
||||
{Source: string(models.PricelistSourceEstimate), Version: fmt.Sprintf("%s-001", today), CreatedBy: "test", IsActive: true},
|
||||
{Source: string(models.PricelistSourceEstimate), Version: fmt.Sprintf("%s-003", today), CreatedBy: "test", IsActive: true},
|
||||
{Source: string(models.PricelistSourceEstimate), Version: fmt.Sprintf("E-%s-001", today), CreatedBy: "test", IsActive: true},
|
||||
{Source: string(models.PricelistSourceEstimate), Version: fmt.Sprintf("E-%s-003", today), CreatedBy: "test", IsActive: true},
|
||||
}
|
||||
for _, pl := range seed {
|
||||
if err := repo.Create(&pl); err != nil {
|
||||
@@ -44,7 +44,7 @@ func TestGenerateVersion_UsesMaxSuffixNotCount(t *testing.T) {
|
||||
t.Fatalf("GenerateVersionBySource returned error: %v", err)
|
||||
}
|
||||
|
||||
want := fmt.Sprintf("%s-004", today)
|
||||
want := fmt.Sprintf("E-%s-004", today)
|
||||
if version != want {
|
||||
t.Fatalf("expected %s, got %s", want, version)
|
||||
}
|
||||
@@ -55,8 +55,8 @@ func TestGenerateVersion_IsolatedBySource(t *testing.T) {
|
||||
today := time.Now().Format("2006-01-02")
|
||||
|
||||
seed := []models.Pricelist{
|
||||
{Source: string(models.PricelistSourceEstimate), Version: fmt.Sprintf("%s-009", today), CreatedBy: "test", IsActive: true},
|
||||
{Source: string(models.PricelistSourceWarehouse), Version: fmt.Sprintf("%s-002", today), CreatedBy: "test", IsActive: true},
|
||||
{Source: string(models.PricelistSourceEstimate), Version: fmt.Sprintf("E-%s-009", today), CreatedBy: "test", IsActive: true},
|
||||
{Source: string(models.PricelistSourceWarehouse), Version: fmt.Sprintf("S-%s-002", today), CreatedBy: "test", IsActive: true},
|
||||
}
|
||||
for _, pl := range seed {
|
||||
if err := repo.Create(&pl); err != nil {
|
||||
@@ -69,7 +69,7 @@ func TestGenerateVersion_IsolatedBySource(t *testing.T) {
|
||||
t.Fatalf("GenerateVersionBySource returned error: %v", err)
|
||||
}
|
||||
|
||||
want := fmt.Sprintf("%s-003", today)
|
||||
want := fmt.Sprintf("S-%s-003", today)
|
||||
if version != want {
|
||||
t.Fatalf("expected %s, got %s", want, version)
|
||||
}
|
||||
|
||||
989
internal/services/stock_import.go
Normal file
989
internal/services/stock_import.go
Normal file
@@ -0,0 +1,989 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"encoding/xml"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"git.mchus.pro/mchus/quoteforge/internal/models"
|
||||
pricelistsvc "git.mchus.pro/mchus/quoteforge/internal/services/pricelist"
|
||||
"gorm.io/gorm"
|
||||
"gorm.io/gorm/clause"
|
||||
)
|
||||
|
||||
type StockImportProgress struct {
|
||||
Status string `json:"status"`
|
||||
Message string `json:"message,omitempty"`
|
||||
Current int `json:"current,omitempty"`
|
||||
Total int `json:"total,omitempty"`
|
||||
RowsTotal int `json:"rows_total,omitempty"`
|
||||
ValidRows int `json:"valid_rows,omitempty"`
|
||||
Inserted int `json:"inserted,omitempty"`
|
||||
Deleted int64 `json:"deleted,omitempty"`
|
||||
Unmapped int `json:"unmapped,omitempty"`
|
||||
Conflicts int `json:"conflicts,omitempty"`
|
||||
FallbackMatches int `json:"fallback_matches,omitempty"`
|
||||
ParseErrors int `json:"parse_errors,omitempty"`
|
||||
ImportDate string `json:"import_date,omitempty"`
|
||||
PricelistID uint `json:"warehouse_pricelist_id,omitempty"`
|
||||
PricelistVer string `json:"warehouse_pricelist_version,omitempty"`
|
||||
}
|
||||
|
||||
type StockImportResult struct {
|
||||
RowsTotal int
|
||||
ValidRows int
|
||||
Inserted int
|
||||
Deleted int64
|
||||
Unmapped int
|
||||
Conflicts int
|
||||
FallbackMatches int
|
||||
ParseErrors int
|
||||
ImportDate time.Time
|
||||
WarehousePLID uint
|
||||
WarehousePLVer string
|
||||
}
|
||||
|
||||
type pendingMapping struct {
|
||||
Partnumber string
|
||||
Description string
|
||||
}
|
||||
|
||||
type StockImportService struct {
|
||||
db *gorm.DB
|
||||
pricelistSvc *pricelistsvc.Service
|
||||
}
|
||||
|
||||
func NewStockImportService(db *gorm.DB, pricelistSvc *pricelistsvc.Service) *StockImportService {
|
||||
return &StockImportService{
|
||||
db: db,
|
||||
pricelistSvc: pricelistSvc,
|
||||
}
|
||||
}
|
||||
|
||||
type stockImportRow struct {
|
||||
Folder string
|
||||
Article string
|
||||
Description string
|
||||
Vendor string
|
||||
Price float64
|
||||
Qty float64
|
||||
}
|
||||
|
||||
func (s *StockImportService) Import(
|
||||
filename string,
|
||||
content []byte,
|
||||
fileModTime time.Time,
|
||||
createdBy string,
|
||||
onProgress func(StockImportProgress),
|
||||
) (*StockImportResult, error) {
|
||||
if s.db == nil {
|
||||
return nil, fmt.Errorf("offline mode: stock import unavailable")
|
||||
}
|
||||
if len(content) == 0 {
|
||||
return nil, fmt.Errorf("empty file")
|
||||
}
|
||||
report := func(p StockImportProgress) {
|
||||
if onProgress != nil {
|
||||
onProgress(p)
|
||||
}
|
||||
}
|
||||
|
||||
report(StockImportProgress{Status: "starting", Message: "Запуск импорта", Current: 0, Total: 100})
|
||||
|
||||
rows, err := parseStockRows(filename, content)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(rows) == 0 {
|
||||
return nil, fmt.Errorf("no rows parsed")
|
||||
}
|
||||
report(StockImportProgress{Status: "parsing", Message: "Файл распарсен", RowsTotal: len(rows), Current: 10, Total: 100})
|
||||
|
||||
importDate := detectImportDate(content, filename, fileModTime)
|
||||
report(StockImportProgress{
|
||||
Status: "parsing",
|
||||
Message: "Дата импорта определена",
|
||||
ImportDate: importDate.Format("2006-01-02"),
|
||||
Current: 15,
|
||||
Total: 100,
|
||||
})
|
||||
|
||||
resolver, err := s.newLotResolver()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var (
|
||||
records []models.StockLog
|
||||
unmapped int
|
||||
conflicts int
|
||||
fallbackMatches int
|
||||
parseErrors int
|
||||
pendingByPN = make(map[string]pendingMapping)
|
||||
)
|
||||
|
||||
for _, row := range rows {
|
||||
if strings.TrimSpace(row.Article) == "" {
|
||||
parseErrors++
|
||||
continue
|
||||
}
|
||||
lot, matchType, resolveErr := resolver.resolve(row.Article)
|
||||
if resolveErr != nil {
|
||||
trimmedPN := strings.TrimSpace(row.Article)
|
||||
if trimmedPN != "" {
|
||||
key := normalizeKey(trimmedPN)
|
||||
if key != "" {
|
||||
candidate := pendingMapping{
|
||||
Partnumber: trimmedPN,
|
||||
Description: strings.TrimSpace(row.Description),
|
||||
}
|
||||
if prev, ok := pendingByPN[key]; !ok || (strings.TrimSpace(prev.Description) == "" && candidate.Description != "") {
|
||||
pendingByPN[key] = candidate
|
||||
}
|
||||
}
|
||||
}
|
||||
if errors.Is(resolveErr, errResolveConflict) {
|
||||
conflicts++
|
||||
} else {
|
||||
unmapped++
|
||||
}
|
||||
continue
|
||||
}
|
||||
if matchType == "article_exact" || matchType == "prefix" {
|
||||
fallbackMatches++
|
||||
}
|
||||
|
||||
var comments *string
|
||||
if trimmed := strings.TrimSpace(row.Description); trimmed != "" {
|
||||
comments = &trimmed
|
||||
}
|
||||
var vendor *string
|
||||
if trimmed := strings.TrimSpace(row.Vendor); trimmed != "" {
|
||||
vendor = &trimmed
|
||||
}
|
||||
qty := row.Qty
|
||||
records = append(records, models.StockLog{
|
||||
Lot: lot,
|
||||
Date: importDate,
|
||||
Price: row.Price,
|
||||
Comments: comments,
|
||||
Vendor: vendor,
|
||||
Qty: &qty,
|
||||
})
|
||||
}
|
||||
|
||||
if len(pendingByPN) > 0 {
|
||||
pending := make([]pendingMapping, 0, len(pendingByPN))
|
||||
for _, m := range pendingByPN {
|
||||
pending = append(pending, m)
|
||||
}
|
||||
if err := s.upsertPendingMappings(pending); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if len(records) == 0 {
|
||||
return nil, fmt.Errorf("no valid rows after mapping")
|
||||
}
|
||||
|
||||
report(StockImportProgress{
|
||||
Status: "mapping",
|
||||
Message: "Сопоставление article -> lot завершено",
|
||||
RowsTotal: len(rows),
|
||||
ValidRows: len(records),
|
||||
Unmapped: unmapped,
|
||||
Conflicts: conflicts,
|
||||
FallbackMatches: fallbackMatches,
|
||||
ParseErrors: parseErrors,
|
||||
Current: 40,
|
||||
Total: 100,
|
||||
})
|
||||
|
||||
deleted, inserted, err := s.replaceStockLogs(records)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
report(StockImportProgress{
|
||||
Status: "writing",
|
||||
Message: "Данные stock_log обновлены",
|
||||
Inserted: inserted,
|
||||
Deleted: deleted,
|
||||
Current: 60,
|
||||
Total: 100,
|
||||
ImportDate: importDate.Format("2006-01-02"),
|
||||
})
|
||||
|
||||
items, err := s.buildWarehousePricelistItems()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(items) == 0 {
|
||||
return nil, fmt.Errorf("stock_log does not contain positive prices for warehouse pricelist")
|
||||
}
|
||||
|
||||
if createdBy == "" {
|
||||
createdBy = "unknown"
|
||||
}
|
||||
|
||||
report(StockImportProgress{Status: "recalculating_warehouse", Message: "Создание warehouse прайслиста", Current: 70, Total: 100})
|
||||
var warehousePLID uint
|
||||
var warehousePLVer string
|
||||
if s.pricelistSvc == nil {
|
||||
return nil, fmt.Errorf("pricelist service unavailable")
|
||||
}
|
||||
pl, err := s.pricelistSvc.CreateForSourceWithProgress(createdBy, string(models.PricelistSourceWarehouse), items, func(p pricelistsvc.CreateProgress) {
|
||||
report(StockImportProgress{
|
||||
Status: "recalculating_warehouse",
|
||||
Message: p.Message,
|
||||
Current: 70 + int(float64(p.Current)*0.3),
|
||||
Total: 100,
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
warehousePLID = pl.ID
|
||||
warehousePLVer = pl.Version
|
||||
|
||||
result := &StockImportResult{
|
||||
RowsTotal: len(rows),
|
||||
ValidRows: len(records),
|
||||
Inserted: inserted,
|
||||
Deleted: deleted,
|
||||
Unmapped: unmapped,
|
||||
Conflicts: conflicts,
|
||||
FallbackMatches: fallbackMatches,
|
||||
ParseErrors: parseErrors,
|
||||
ImportDate: importDate,
|
||||
WarehousePLID: warehousePLID,
|
||||
WarehousePLVer: warehousePLVer,
|
||||
}
|
||||
|
||||
report(StockImportProgress{
|
||||
Status: "completed",
|
||||
Message: "Импорт завершен",
|
||||
RowsTotal: result.RowsTotal,
|
||||
ValidRows: result.ValidRows,
|
||||
Inserted: result.Inserted,
|
||||
Deleted: result.Deleted,
|
||||
Unmapped: result.Unmapped,
|
||||
Conflicts: result.Conflicts,
|
||||
FallbackMatches: result.FallbackMatches,
|
||||
ParseErrors: result.ParseErrors,
|
||||
ImportDate: result.ImportDate.Format("2006-01-02"),
|
||||
PricelistID: result.WarehousePLID,
|
||||
PricelistVer: result.WarehousePLVer,
|
||||
Current: 100,
|
||||
Total: 100,
|
||||
})
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (s *StockImportService) replaceStockLogs(records []models.StockLog) (int64, int, error) {
|
||||
var deleted int64
|
||||
err := s.db.Transaction(func(tx *gorm.DB) error {
|
||||
res := tx.Exec("DELETE FROM stock_log")
|
||||
if res.Error != nil {
|
||||
return res.Error
|
||||
}
|
||||
deleted = res.RowsAffected
|
||||
|
||||
if err := tx.CreateInBatches(records, 500).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
return deleted, len(records), nil
|
||||
}
|
||||
|
||||
func (s *StockImportService) buildWarehousePricelistItems() ([]pricelistsvc.CreateItemInput, error) {
|
||||
var logs []models.StockLog
|
||||
if err := s.db.Select("lot, price").Where("price > 0").Find(&logs).Error; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
grouped := make(map[string][]float64)
|
||||
for _, l := range logs {
|
||||
lot := strings.TrimSpace(l.Lot)
|
||||
if lot == "" || l.Price <= 0 {
|
||||
continue
|
||||
}
|
||||
grouped[lot] = append(grouped[lot], l.Price)
|
||||
}
|
||||
|
||||
items := make([]pricelistsvc.CreateItemInput, 0, len(grouped))
|
||||
for lot, prices := range grouped {
|
||||
price := median(prices)
|
||||
if price <= 0 {
|
||||
continue
|
||||
}
|
||||
items = append(items, pricelistsvc.CreateItemInput{
|
||||
LotName: lot,
|
||||
Price: price,
|
||||
})
|
||||
}
|
||||
sort.Slice(items, func(i, j int) bool {
|
||||
return items[i].LotName < items[j].LotName
|
||||
})
|
||||
return items, nil
|
||||
}
|
||||
|
||||
func (s *StockImportService) ListMappings(page, perPage int, search string) ([]models.LotPartnumber, int64, error) {
|
||||
if s.db == nil {
|
||||
return nil, 0, fmt.Errorf("offline mode: mappings unavailable")
|
||||
}
|
||||
if page < 1 {
|
||||
page = 1
|
||||
}
|
||||
if perPage < 1 {
|
||||
perPage = 50
|
||||
}
|
||||
if perPage > 500 {
|
||||
perPage = 500
|
||||
}
|
||||
|
||||
offset := (page - 1) * perPage
|
||||
query := s.db.Model(&models.LotPartnumber{})
|
||||
if search = strings.TrimSpace(search); search != "" {
|
||||
like := "%" + search + "%"
|
||||
query = query.Where("partnumber LIKE ? OR lot_name LIKE ? OR description LIKE ?", like, like, like)
|
||||
}
|
||||
|
||||
var total int64
|
||||
if err := query.Count(&total).Error; err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
var rows []models.LotPartnumber
|
||||
if err := query.Order("CASE WHEN TRIM(lot_name) = '' THEN 0 ELSE 1 END, partnumber ASC").Offset(offset).Limit(perPage).Find(&rows).Error; err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
return rows, total, nil
|
||||
}
|
||||
|
||||
func (s *StockImportService) UpsertMapping(partnumber, lotName, description string) error {
|
||||
if s.db == nil {
|
||||
return fmt.Errorf("offline mode: mappings unavailable")
|
||||
}
|
||||
partnumber = strings.TrimSpace(partnumber)
|
||||
lotName = strings.TrimSpace(lotName)
|
||||
description = strings.TrimSpace(description)
|
||||
if partnumber == "" || lotName == "" {
|
||||
return fmt.Errorf("partnumber and lot_name are required")
|
||||
}
|
||||
|
||||
var lotCount int64
|
||||
if err := s.db.Model(&models.Lot{}).Where("lot_name = ?", lotName).Count(&lotCount).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
if lotCount == 0 {
|
||||
return fmt.Errorf("lot not found: %s", lotName)
|
||||
}
|
||||
|
||||
return s.db.Transaction(func(tx *gorm.DB) error {
|
||||
var existing []models.LotPartnumber
|
||||
if err := tx.Where("LOWER(TRIM(partnumber)) = LOWER(TRIM(?))", partnumber).Find(&existing).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
if description == "" {
|
||||
for _, row := range existing {
|
||||
if row.Description != nil && strings.TrimSpace(*row.Description) != "" {
|
||||
description = strings.TrimSpace(*row.Description)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if err := tx.Where("LOWER(TRIM(partnumber)) = LOWER(TRIM(?))", partnumber).Delete(&models.LotPartnumber{}).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
var descPtr *string
|
||||
if description != "" {
|
||||
descPtr = &description
|
||||
}
|
||||
return tx.Clauses(clause.OnConflict{DoNothing: true}).Create(&models.LotPartnumber{
|
||||
Partnumber: partnumber,
|
||||
LotName: lotName,
|
||||
Description: descPtr,
|
||||
}).Error
|
||||
})
|
||||
}
|
||||
|
||||
func (s *StockImportService) DeleteMapping(partnumber string) (int64, error) {
|
||||
if s.db == nil {
|
||||
return 0, fmt.Errorf("offline mode: mappings unavailable")
|
||||
}
|
||||
partnumber = strings.TrimSpace(partnumber)
|
||||
if partnumber == "" {
|
||||
return 0, fmt.Errorf("partnumber is required")
|
||||
}
|
||||
res := s.db.Where("LOWER(TRIM(partnumber)) = LOWER(TRIM(?))", partnumber).Delete(&models.LotPartnumber{})
|
||||
return res.RowsAffected, res.Error
|
||||
}
|
||||
|
||||
func (s *StockImportService) upsertPendingMappings(rows []pendingMapping) error {
|
||||
if s.db == nil || len(rows) == 0 {
|
||||
return nil
|
||||
}
|
||||
return s.db.Transaction(func(tx *gorm.DB) error {
|
||||
for _, row := range rows {
|
||||
pn := strings.TrimSpace(row.Partnumber)
|
||||
if pn == "" {
|
||||
continue
|
||||
}
|
||||
desc := strings.TrimSpace(row.Description)
|
||||
var existing []models.LotPartnumber
|
||||
if err := tx.Where("LOWER(TRIM(partnumber)) = LOWER(TRIM(?))", pn).Find(&existing).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
if len(existing) == 0 {
|
||||
var descPtr *string
|
||||
if desc != "" {
|
||||
descPtr = &desc
|
||||
}
|
||||
if err := tx.Create(&models.LotPartnumber{
|
||||
Partnumber: pn,
|
||||
LotName: "",
|
||||
Description: descPtr,
|
||||
}).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
continue
|
||||
}
|
||||
if desc == "" {
|
||||
continue
|
||||
}
|
||||
needsDescription := true
|
||||
for _, item := range existing {
|
||||
if item.Description != nil && strings.TrimSpace(*item.Description) != "" {
|
||||
needsDescription = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if needsDescription {
|
||||
if err := tx.Model(&models.LotPartnumber{}).
|
||||
Where("LOWER(TRIM(partnumber)) = LOWER(TRIM(?))", pn).
|
||||
Update("description", desc).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
var (
|
||||
reISODate = regexp.MustCompile(`\b(20\d{2})-(\d{2})-(\d{2})\b`)
|
||||
reRuDate = regexp.MustCompile(`\b([0-3]\d)\.([01]\d)\.(20\d{2})\b`)
|
||||
mxlCellRe = regexp.MustCompile(`\{16,\d+,\s*\{1,1,\s*\{"ru","(.*?)"\}\s*\},0\},(\d+),`)
|
||||
errResolveConflict = errors.New("multiple lot matches")
|
||||
errResolveNotFound = errors.New("lot not found")
|
||||
)
|
||||
|
||||
func parseStockRows(filename string, content []byte) ([]stockImportRow, error) {
|
||||
switch strings.ToLower(filepath.Ext(filename)) {
|
||||
case ".mxl":
|
||||
return parseMXLRows(content)
|
||||
case ".xlsx":
|
||||
return parseXLSXRows(content)
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported file format: %s", filepath.Ext(filename))
|
||||
}
|
||||
}
|
||||
|
||||
func parseMXLRows(content []byte) ([]stockImportRow, error) {
|
||||
text := string(content)
|
||||
matches := mxlCellRe.FindAllStringSubmatch(text, -1)
|
||||
if len(matches) == 0 {
|
||||
return nil, fmt.Errorf("mxl parsing failed: no cells found")
|
||||
}
|
||||
|
||||
rows := make([]map[int]string, 0, 128)
|
||||
current := map[int]string{}
|
||||
for _, m := range matches {
|
||||
val := strings.ReplaceAll(m[1], `""`, `"`)
|
||||
col, err := strconv.Atoi(m[2])
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if col == 1 && len(current) > 0 {
|
||||
rows = append(rows, current)
|
||||
current = map[int]string{}
|
||||
}
|
||||
current[col] = strings.TrimSpace(val)
|
||||
}
|
||||
if len(current) > 0 {
|
||||
rows = append(rows, current)
|
||||
}
|
||||
|
||||
result := make([]stockImportRow, 0, len(rows))
|
||||
for _, r := range rows {
|
||||
article := strings.TrimSpace(r[2])
|
||||
if article == "" || strings.EqualFold(article, "Артикул") {
|
||||
continue
|
||||
}
|
||||
price, err := parseLocalizedFloat(r[5])
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
qty, err := parseLocalizedFloat(r[6])
|
||||
if err != nil {
|
||||
qty = 0
|
||||
}
|
||||
result = append(result, stockImportRow{
|
||||
Folder: strings.TrimSpace(r[1]),
|
||||
Article: article,
|
||||
Description: strings.TrimSpace(r[3]),
|
||||
Vendor: strings.TrimSpace(r[4]),
|
||||
Price: price,
|
||||
Qty: qty,
|
||||
})
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func parseXLSXRows(content []byte) ([]stockImportRow, error) {
|
||||
zr, err := zip.NewReader(bytes.NewReader(content), int64(len(content)))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("opening xlsx: %w", err)
|
||||
}
|
||||
|
||||
sharedStrings, _ := readSharedStrings(zr)
|
||||
sheetPath := firstWorksheetPath(zr)
|
||||
if sheetPath == "" {
|
||||
return nil, fmt.Errorf("xlsx parsing failed: worksheet not found")
|
||||
}
|
||||
sheetData, err := readZipFile(zr, sheetPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
type xlsxInline struct {
|
||||
T string `xml:"t"`
|
||||
}
|
||||
type xlsxCell struct {
|
||||
R string `xml:"r,attr"`
|
||||
T string `xml:"t,attr"`
|
||||
V string `xml:"v"`
|
||||
IS *xlsxInline `xml:"is"`
|
||||
}
|
||||
type xlsxRow struct {
|
||||
C []xlsxCell `xml:"c"`
|
||||
}
|
||||
type xlsxSheet struct {
|
||||
Rows []xlsxRow `xml:"sheetData>row"`
|
||||
}
|
||||
|
||||
var ws xlsxSheet
|
||||
if err := xml.Unmarshal(sheetData, &ws); err != nil {
|
||||
return nil, fmt.Errorf("decode worksheet: %w", err)
|
||||
}
|
||||
|
||||
grid := make([]map[int]string, 0, len(ws.Rows))
|
||||
for _, r := range ws.Rows {
|
||||
rowMap := make(map[int]string, len(r.C))
|
||||
for _, c := range r.C {
|
||||
colIdx := excelRefColumn(c.R)
|
||||
if colIdx < 0 {
|
||||
continue
|
||||
}
|
||||
inlineText := ""
|
||||
if c.IS != nil {
|
||||
inlineText = c.IS.T
|
||||
}
|
||||
rowMap[colIdx] = decodeXLSXCell(c.T, c.V, inlineText, sharedStrings)
|
||||
}
|
||||
grid = append(grid, rowMap)
|
||||
}
|
||||
|
||||
headerRow := -1
|
||||
headers := map[string]int{}
|
||||
for i, row := range grid {
|
||||
for idx, val := range row {
|
||||
norm := normalizeHeader(val)
|
||||
switch norm {
|
||||
case "папка", "артикул", "описание", "вендор", "стоимость", "свободно":
|
||||
headers[norm] = idx
|
||||
}
|
||||
}
|
||||
_, hasArticle := headers["артикул"]
|
||||
_, hasPrice := headers["стоимость"]
|
||||
if hasArticle && hasPrice {
|
||||
headerRow = i
|
||||
break
|
||||
}
|
||||
}
|
||||
if headerRow < 0 {
|
||||
return nil, fmt.Errorf("xlsx parsing failed: header row not found")
|
||||
}
|
||||
|
||||
result := make([]stockImportRow, 0, len(grid)-headerRow-1)
|
||||
idxFolder, hasFolder := headers["папка"]
|
||||
idxArticle := headers["артикул"]
|
||||
idxDesc, hasDesc := headers["описание"]
|
||||
idxVendor, hasVendor := headers["вендор"]
|
||||
idxPrice := headers["стоимость"]
|
||||
idxQty, hasQty := headers["свободно"]
|
||||
for i := headerRow + 1; i < len(grid); i++ {
|
||||
row := grid[i]
|
||||
article := strings.TrimSpace(row[idxArticle])
|
||||
if article == "" {
|
||||
continue
|
||||
}
|
||||
price, err := parseLocalizedFloat(row[idxPrice])
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
qty := 0.0
|
||||
if hasQty {
|
||||
qty, err = parseLocalizedFloat(row[idxQty])
|
||||
if err != nil {
|
||||
qty = 0
|
||||
}
|
||||
}
|
||||
|
||||
folder := ""
|
||||
if hasFolder {
|
||||
folder = strings.TrimSpace(row[idxFolder])
|
||||
}
|
||||
description := ""
|
||||
if hasDesc {
|
||||
description = strings.TrimSpace(row[idxDesc])
|
||||
}
|
||||
vendor := ""
|
||||
if hasVendor {
|
||||
vendor = strings.TrimSpace(row[idxVendor])
|
||||
}
|
||||
|
||||
result = append(result, stockImportRow{
|
||||
Folder: folder,
|
||||
Article: article,
|
||||
Description: description,
|
||||
Vendor: vendor,
|
||||
Price: price,
|
||||
Qty: qty,
|
||||
})
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func parseLocalizedFloat(value string) (float64, error) {
|
||||
clean := strings.TrimSpace(value)
|
||||
clean = strings.ReplaceAll(clean, "\u00a0", "")
|
||||
clean = strings.ReplaceAll(clean, " ", "")
|
||||
clean = strings.ReplaceAll(clean, ",", ".")
|
||||
if clean == "" {
|
||||
return 0, fmt.Errorf("empty number")
|
||||
}
|
||||
return strconv.ParseFloat(clean, 64)
|
||||
}
|
||||
|
||||
func detectImportDate(content []byte, filename string, fileModTime time.Time) time.Time {
|
||||
if d, ok := extractDateFromText(string(content)); ok {
|
||||
return d
|
||||
}
|
||||
if d, ok := extractDateFromFilename(filename); ok {
|
||||
return d
|
||||
}
|
||||
if !fileModTime.IsZero() {
|
||||
return normalizeDate(fileModTime)
|
||||
}
|
||||
return normalizeDate(time.Now())
|
||||
}
|
||||
|
||||
func extractDateFromText(text string) (time.Time, bool) {
|
||||
if m := reISODate.FindStringSubmatch(text); len(m) == 4 {
|
||||
d, err := time.Parse("2006-01-02", m[0])
|
||||
if err == nil {
|
||||
return normalizeDate(d), true
|
||||
}
|
||||
}
|
||||
if m := reRuDate.FindStringSubmatch(text); len(m) == 4 {
|
||||
d, err := time.Parse("02.01.2006", m[0])
|
||||
if err == nil {
|
||||
return normalizeDate(d), true
|
||||
}
|
||||
}
|
||||
return time.Time{}, false
|
||||
}
|
||||
|
||||
func extractDateFromFilename(filename string) (time.Time, bool) {
|
||||
base := filepath.Base(filename)
|
||||
if m := reISODate.FindStringSubmatch(base); len(m) == 4 {
|
||||
d, err := time.Parse("2006-01-02", m[0])
|
||||
if err == nil {
|
||||
return normalizeDate(d), true
|
||||
}
|
||||
}
|
||||
if m := reRuDate.FindStringSubmatch(base); len(m) == 4 {
|
||||
d, err := time.Parse("02.01.2006", m[0])
|
||||
if err == nil {
|
||||
return normalizeDate(d), true
|
||||
}
|
||||
}
|
||||
return time.Time{}, false
|
||||
}
|
||||
|
||||
func normalizeDate(t time.Time) time.Time {
|
||||
y, m, d := t.Date()
|
||||
return time.Date(y, m, d, 0, 0, 0, 0, time.Local)
|
||||
}
|
||||
|
||||
func median(values []float64) float64 {
|
||||
if len(values) == 0 {
|
||||
return 0
|
||||
}
|
||||
c := append([]float64(nil), values...)
|
||||
sort.Float64s(c)
|
||||
n := len(c)
|
||||
if n%2 == 0 {
|
||||
return (c[n/2-1] + c[n/2]) / 2
|
||||
}
|
||||
return c[n/2]
|
||||
}
|
||||
|
||||
type lotResolver struct {
|
||||
partnumberToLots map[string][]string
|
||||
exactLots map[string]string
|
||||
allLots []string
|
||||
}
|
||||
|
||||
func (s *StockImportService) newLotResolver() (*lotResolver, error) {
|
||||
var mappings []models.LotPartnumber
|
||||
if err := s.db.Find(&mappings).Error; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
partnumberToLots := make(map[string][]string, len(mappings))
|
||||
for _, m := range mappings {
|
||||
p := normalizeKey(m.Partnumber)
|
||||
if p == "" || strings.TrimSpace(m.LotName) == "" {
|
||||
continue
|
||||
}
|
||||
partnumberToLots[p] = append(partnumberToLots[p], m.LotName)
|
||||
}
|
||||
|
||||
var lots []models.Lot
|
||||
if err := s.db.Select("lot_name").Find(&lots).Error; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
exactLots := make(map[string]string, len(lots))
|
||||
allLots := make([]string, 0, len(lots))
|
||||
for _, l := range lots {
|
||||
name := strings.TrimSpace(l.LotName)
|
||||
if name == "" {
|
||||
continue
|
||||
}
|
||||
k := normalizeKey(name)
|
||||
exactLots[k] = name
|
||||
allLots = append(allLots, name)
|
||||
}
|
||||
sort.Slice(allLots, func(i, j int) bool {
|
||||
li := len([]rune(allLots[i]))
|
||||
lj := len([]rune(allLots[j]))
|
||||
if li == lj {
|
||||
return allLots[i] < allLots[j]
|
||||
}
|
||||
return li > lj
|
||||
})
|
||||
|
||||
return &lotResolver{
|
||||
partnumberToLots: partnumberToLots,
|
||||
exactLots: exactLots,
|
||||
allLots: allLots,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *lotResolver) resolve(article string) (string, string, error) {
|
||||
key := normalizeKey(article)
|
||||
if key == "" {
|
||||
return "", "", errResolveNotFound
|
||||
}
|
||||
|
||||
if mapped := r.partnumberToLots[key]; len(mapped) > 0 {
|
||||
uniq := uniqueStrings(mapped)
|
||||
if len(uniq) == 1 {
|
||||
return uniq[0], "mapping_table", nil
|
||||
}
|
||||
return "", "", errResolveConflict
|
||||
}
|
||||
|
||||
if lot, ok := r.exactLots[key]; ok {
|
||||
return lot, "article_exact", nil
|
||||
}
|
||||
|
||||
best := ""
|
||||
bestLen := -1
|
||||
tie := false
|
||||
for _, lot := range r.allLots {
|
||||
lotKey := normalizeKey(lot)
|
||||
if lotKey == "" {
|
||||
continue
|
||||
}
|
||||
if strings.HasPrefix(key, lotKey) {
|
||||
l := len([]rune(lotKey))
|
||||
if l > bestLen {
|
||||
best = lot
|
||||
bestLen = l
|
||||
tie = false
|
||||
} else if l == bestLen && !strings.EqualFold(best, lot) {
|
||||
tie = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if best == "" {
|
||||
return "", "", errResolveNotFound
|
||||
}
|
||||
if tie {
|
||||
return "", "", errResolveConflict
|
||||
}
|
||||
return best, "prefix", nil
|
||||
}
|
||||
|
||||
func normalizeKey(v string) string {
|
||||
return strings.ToLower(strings.TrimSpace(v))
|
||||
}
|
||||
|
||||
func uniqueStrings(values []string) []string {
|
||||
seen := make(map[string]bool, len(values))
|
||||
out := make([]string, 0, len(values))
|
||||
for _, v := range values {
|
||||
v = strings.TrimSpace(v)
|
||||
if v == "" {
|
||||
continue
|
||||
}
|
||||
k := strings.ToLower(v)
|
||||
if seen[k] {
|
||||
continue
|
||||
}
|
||||
seen[k] = true
|
||||
out = append(out, v)
|
||||
}
|
||||
sort.Strings(out)
|
||||
return out
|
||||
}
|
||||
|
||||
func readZipFile(zr *zip.Reader, name string) ([]byte, error) {
|
||||
for _, f := range zr.File {
|
||||
if f.Name != name {
|
||||
continue
|
||||
}
|
||||
rc, err := f.Open()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rc.Close()
|
||||
return io.ReadAll(rc)
|
||||
}
|
||||
return nil, fmt.Errorf("zip entry not found: %s", name)
|
||||
}
|
||||
|
||||
func firstWorksheetPath(zr *zip.Reader) string {
|
||||
candidates := make([]string, 0, 4)
|
||||
for _, f := range zr.File {
|
||||
if strings.HasPrefix(f.Name, "xl/worksheets/") && strings.HasSuffix(f.Name, ".xml") {
|
||||
candidates = append(candidates, f.Name)
|
||||
}
|
||||
}
|
||||
if len(candidates) == 0 {
|
||||
return ""
|
||||
}
|
||||
sort.Strings(candidates)
|
||||
for _, c := range candidates {
|
||||
if strings.HasSuffix(c, "sheet1.xml") {
|
||||
return c
|
||||
}
|
||||
}
|
||||
return candidates[0]
|
||||
}
|
||||
|
||||
func readSharedStrings(zr *zip.Reader) ([]string, error) {
|
||||
data, err := readZipFile(zr, "xl/sharedStrings.xml")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
type richRun struct {
|
||||
Text string `xml:"t"`
|
||||
}
|
||||
type si struct {
|
||||
Text string `xml:"t"`
|
||||
Runs []richRun `xml:"r"`
|
||||
}
|
||||
type sst struct {
|
||||
Items []si `xml:"si"`
|
||||
}
|
||||
|
||||
var parsed sst
|
||||
if err := xml.Unmarshal(data, &parsed); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
values := make([]string, 0, len(parsed.Items))
|
||||
for _, item := range parsed.Items {
|
||||
if item.Text != "" {
|
||||
values = append(values, item.Text)
|
||||
continue
|
||||
}
|
||||
var b strings.Builder
|
||||
for _, run := range item.Runs {
|
||||
b.WriteString(run.Text)
|
||||
}
|
||||
values = append(values, b.String())
|
||||
}
|
||||
return values, nil
|
||||
}
|
||||
|
||||
func decodeXLSXCell(cellType, value, inlineText string, sharedStrings []string) string {
|
||||
switch cellType {
|
||||
case "s":
|
||||
idx, err := strconv.Atoi(strings.TrimSpace(value))
|
||||
if err == nil && idx >= 0 && idx < len(sharedStrings) {
|
||||
return strings.TrimSpace(sharedStrings[idx])
|
||||
}
|
||||
case "inlineStr":
|
||||
return strings.TrimSpace(inlineText)
|
||||
default:
|
||||
return strings.TrimSpace(value)
|
||||
}
|
||||
return strings.TrimSpace(value)
|
||||
}
|
||||
|
||||
func excelRefColumn(ref string) int {
|
||||
if ref == "" {
|
||||
return -1
|
||||
}
|
||||
var letters []rune
|
||||
for _, r := range ref {
|
||||
if r >= 'A' && r <= 'Z' {
|
||||
letters = append(letters, r)
|
||||
} else if r >= 'a' && r <= 'z' {
|
||||
letters = append(letters, r-'a'+'A')
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if len(letters) == 0 {
|
||||
return -1
|
||||
}
|
||||
col := 0
|
||||
for _, r := range letters {
|
||||
col = col*26 + int(r-'A'+1)
|
||||
}
|
||||
return col - 1
|
||||
}
|
||||
|
||||
func normalizeHeader(v string) string {
|
||||
return strings.ToLower(strings.TrimSpace(strings.ReplaceAll(v, "\u00a0", " ")))
|
||||
}
|
||||
256
internal/services/stock_import_test.go
Normal file
256
internal/services/stock_import_test.go
Normal file
@@ -0,0 +1,256 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"git.mchus.pro/mchus/quoteforge/internal/models"
|
||||
"github.com/glebarez/sqlite"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
func TestParseMXLRows(t *testing.T) {
|
||||
content := strings.Join([]string{
|
||||
`MOXCEL`,
|
||||
`{16,2,{1,1,{"ru","Папка"}},0},1,`,
|
||||
`{16,2,{1,1,{"ru","Артикул"}},0},2,`,
|
||||
`{16,2,{1,1,{"ru","Описание"}},0},3,`,
|
||||
`{16,2,{1,1,{"ru","Вендор"}},0},4,`,
|
||||
`{16,2,{1,1,{"ru","Стоимость"}},0},5,`,
|
||||
`{16,2,{1,1,{"ru","Свободно"}},0},6,`,
|
||||
`{16,2,{1,1,{"ru","Серверы"}},0},1,`,
|
||||
`{16,2,{1,1,{"ru","CPU_X"}},0},2,`,
|
||||
`{16,2,{1,1,{"ru","Процессор"}},0},3,`,
|
||||
`{16,2,{1,1,{"ru","AMD"}},0},4,`,
|
||||
`{16,2,{1,1,{"ru","125,50"}},0},5,`,
|
||||
`{16,2,{1,1,{"ru","10"}},0},6,`,
|
||||
}, "\n")
|
||||
|
||||
rows, err := parseMXLRows([]byte(content))
|
||||
if err != nil {
|
||||
t.Fatalf("parseMXLRows: %v", err)
|
||||
}
|
||||
if len(rows) != 1 {
|
||||
t.Fatalf("expected 1 row, got %d", len(rows))
|
||||
}
|
||||
if rows[0].Article != "CPU_X" {
|
||||
t.Fatalf("unexpected article: %s", rows[0].Article)
|
||||
}
|
||||
if rows[0].Price != 125.50 {
|
||||
t.Fatalf("unexpected price: %v", rows[0].Price)
|
||||
}
|
||||
if rows[0].Qty != 10 {
|
||||
t.Fatalf("unexpected qty: %v", rows[0].Qty)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseXLSXRows(t *testing.T) {
|
||||
xlsx := buildMinimalXLSX(t, []string{
|
||||
"Папка", "Артикул", "Описание", "Вендор", "Стоимость", "Свободно",
|
||||
}, []string{
|
||||
"Серверы", "CPU_A", "Процессор", "AMD", "99,25", "7",
|
||||
})
|
||||
|
||||
rows, err := parseXLSXRows(xlsx)
|
||||
if err != nil {
|
||||
t.Fatalf("parseXLSXRows: %v", err)
|
||||
}
|
||||
if len(rows) != 1 {
|
||||
t.Fatalf("expected 1 row, got %d", len(rows))
|
||||
}
|
||||
if rows[0].Article != "CPU_A" {
|
||||
t.Fatalf("unexpected article: %s", rows[0].Article)
|
||||
}
|
||||
if rows[0].Price != 99.25 {
|
||||
t.Fatalf("unexpected price: %v", rows[0].Price)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLotResolverPrecedenceAndConflicts(t *testing.T) {
|
||||
r := &lotResolver{
|
||||
partnumberToLots: map[string][]string{
|
||||
"pn-1": {"LOT_MAPPED"},
|
||||
"pn-conflict": {"LOT_A", "LOT_B"},
|
||||
},
|
||||
exactLots: map[string]string{
|
||||
"cpu_a": "CPU_A",
|
||||
},
|
||||
allLots: []string{"CPU_A_LONG", "CPU_A", "ABC ", "ABC\t"},
|
||||
}
|
||||
|
||||
lot, typ, err := r.resolve("pn-1")
|
||||
if err != nil || lot != "LOT_MAPPED" || typ != "mapping_table" {
|
||||
t.Fatalf("mapping_table mismatch: lot=%s typ=%s err=%v", lot, typ, err)
|
||||
}
|
||||
|
||||
lot, typ, err = r.resolve("cpu_a")
|
||||
if err != nil || lot != "CPU_A" || typ != "article_exact" {
|
||||
t.Fatalf("article_exact mismatch: lot=%s typ=%s err=%v", lot, typ, err)
|
||||
}
|
||||
|
||||
lot, typ, err = r.resolve("cpu_a_long_suffix")
|
||||
if err != nil || lot != "CPU_A_LONG" || typ != "prefix" {
|
||||
t.Fatalf("prefix mismatch: lot=%s typ=%s err=%v", lot, typ, err)
|
||||
}
|
||||
|
||||
_, _, err = r.resolve("abx")
|
||||
if err == nil {
|
||||
t.Fatalf("expected not found error")
|
||||
}
|
||||
|
||||
_, _, err = r.resolve("pn-conflict")
|
||||
if err == nil || err != errResolveConflict {
|
||||
t.Fatalf("expected conflict, got %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestImportNoValidRowsKeepsStockLog(t *testing.T) {
|
||||
db := openTestDB(t)
|
||||
if err := db.AutoMigrate(&models.StockLog{}); err != nil {
|
||||
t.Fatalf("automigrate stock_log: %v", err)
|
||||
}
|
||||
|
||||
existing := models.StockLog{
|
||||
Lot: "CPU_A",
|
||||
Date: time.Now(),
|
||||
Price: 10,
|
||||
}
|
||||
if err := db.Create(&existing).Error; err != nil {
|
||||
t.Fatalf("seed stock_log: %v", err)
|
||||
}
|
||||
|
||||
svc := NewStockImportService(db, nil)
|
||||
headerOnly := []byte(strings.Join([]string{
|
||||
`MOXCEL`,
|
||||
`{16,2,{1,1,{"ru","Папка"}},0},1,`,
|
||||
`{16,2,{1,1,{"ru","Артикул"}},0},2,`,
|
||||
`{16,2,{1,1,{"ru","Описание"}},0},3,`,
|
||||
`{16,2,{1,1,{"ru","Вендор"}},0},4,`,
|
||||
`{16,2,{1,1,{"ru","Стоимость"}},0},5,`,
|
||||
`{16,2,{1,1,{"ru","Свободно"}},0},6,`,
|
||||
}, "\n"))
|
||||
|
||||
if _, err := svc.Import("test.mxl", headerOnly, time.Now(), "tester", nil); err == nil {
|
||||
t.Fatalf("expected import error")
|
||||
}
|
||||
|
||||
var count int64
|
||||
if err := db.Model(&models.StockLog{}).Count(&count).Error; err != nil {
|
||||
t.Fatalf("count stock_log: %v", err)
|
||||
}
|
||||
if count != 1 {
|
||||
t.Fatalf("expected stock_log unchanged, got %d rows", count)
|
||||
}
|
||||
}
|
||||
|
||||
func TestReplaceStockLogs(t *testing.T) {
|
||||
db := openTestDB(t)
|
||||
if err := db.AutoMigrate(&models.StockLog{}); err != nil {
|
||||
t.Fatalf("automigrate stock_log: %v", err)
|
||||
}
|
||||
|
||||
if err := db.Create(&models.StockLog{Lot: "OLD", Date: time.Now(), Price: 1}).Error; err != nil {
|
||||
t.Fatalf("seed old row: %v", err)
|
||||
}
|
||||
|
||||
svc := NewStockImportService(db, nil)
|
||||
records := []models.StockLog{
|
||||
{Lot: "NEW_1", Date: time.Now(), Price: 2},
|
||||
{Lot: "NEW_2", Date: time.Now(), Price: 3},
|
||||
}
|
||||
|
||||
deleted, inserted, err := svc.replaceStockLogs(records)
|
||||
if err != nil {
|
||||
t.Fatalf("replaceStockLogs: %v", err)
|
||||
}
|
||||
if deleted != 1 || inserted != 2 {
|
||||
t.Fatalf("unexpected replace stats deleted=%d inserted=%d", deleted, inserted)
|
||||
}
|
||||
|
||||
var rows []models.StockLog
|
||||
if err := db.Order("lot").Find(&rows).Error; err != nil {
|
||||
t.Fatalf("read rows: %v", err)
|
||||
}
|
||||
if len(rows) != 2 || rows[0].Lot != "NEW_1" || rows[1].Lot != "NEW_2" {
|
||||
t.Fatalf("unexpected rows after replace: %#v", rows)
|
||||
}
|
||||
}
|
||||
|
||||
func openTestDB(t *testing.T) *gorm.DB {
|
||||
t.Helper()
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
if err != nil {
|
||||
t.Fatalf("open sqlite: %v", err)
|
||||
}
|
||||
return db
|
||||
}
|
||||
|
||||
func buildMinimalXLSX(t *testing.T, headers, values []string) []byte {
|
||||
t.Helper()
|
||||
var buf bytes.Buffer
|
||||
zw := zip.NewWriter(&buf)
|
||||
|
||||
write := func(name, body string) {
|
||||
w, err := zw.Create(name)
|
||||
if err != nil {
|
||||
t.Fatalf("create zip entry %s: %v", name, err)
|
||||
}
|
||||
if _, err := w.Write([]byte(body)); err != nil {
|
||||
t.Fatalf("write zip entry %s: %v", name, err)
|
||||
}
|
||||
}
|
||||
|
||||
write("[Content_Types].xml", `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Types xmlns="http://schemas.openxmlformats.org/package/2006/content-types">
|
||||
<Default Extension="rels" ContentType="application/vnd.openxmlformats-package.relationships+xml"/>
|
||||
<Default Extension="xml" ContentType="application/xml"/>
|
||||
<Override PartName="/xl/workbook.xml" ContentType="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml"/>
|
||||
<Override PartName="/xl/worksheets/sheet1.xml" ContentType="application/vnd.openxmlformats-officedocument.spreadsheetml.worksheet+xml"/>
|
||||
</Types>`)
|
||||
write("_rels/.rels", `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Relationships xmlns="http://schemas.openxmlformats.org/package/2006/relationships">
|
||||
<Relationship Id="rId1" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/officeDocument" Target="xl/workbook.xml"/>
|
||||
</Relationships>`)
|
||||
write("xl/workbook.xml", `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<workbook xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
|
||||
<sheets>
|
||||
<sheet name="Sheet1" sheetId="1" r:id="rId1" xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships"/>
|
||||
</sheets>
|
||||
</workbook>`)
|
||||
write("xl/_rels/workbook.xml.rels", `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Relationships xmlns="http://schemas.openxmlformats.org/package/2006/relationships">
|
||||
<Relationship Id="rId1" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/worksheet" Target="worksheets/sheet1.xml"/>
|
||||
</Relationships>`)
|
||||
|
||||
makeCell := func(ref, value string) string {
|
||||
escaped := strings.ReplaceAll(value, "&", "&")
|
||||
escaped = strings.ReplaceAll(escaped, "<", "<")
|
||||
escaped = strings.ReplaceAll(escaped, ">", ">")
|
||||
return `<c r="` + ref + `" t="inlineStr"><is><t>` + escaped + `</t></is></c>`
|
||||
}
|
||||
|
||||
cols := []string{"A", "B", "C", "D", "E", "F"}
|
||||
var headerCells, valueCells strings.Builder
|
||||
for i := 0; i < len(cols) && i < len(headers); i++ {
|
||||
headerCells.WriteString(makeCell(cols[i]+"1", headers[i]))
|
||||
}
|
||||
for i := 0; i < len(cols) && i < len(values); i++ {
|
||||
valueCells.WriteString(makeCell(cols[i]+"2", values[i]))
|
||||
}
|
||||
|
||||
write("xl/worksheets/sheet1.xml", `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<worksheet xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
|
||||
<sheetData>
|
||||
<row r="1">`+headerCells.String()+`</row>
|
||||
<row r="2">`+valueCells.String()+`</row>
|
||||
</sheetData>
|
||||
</worksheet>`)
|
||||
|
||||
if err := zw.Close(); err != nil {
|
||||
t.Fatalf("close zip: %v", err)
|
||||
}
|
||||
return buf.Bytes()
|
||||
}
|
||||
Reference in New Issue
Block a user