- New unified append-only quote log table parts_log replaces three separate log tables (stock_log, partnumber_log_competitors, lot_log) - Migrations 042-049: extend supplier, create parts_log/import_formats/ ignore_rules, rework qt_lot_metadata composite PK, add lead_time_weeks to pricelist_items, backfill data, migrate ignore rules - New services: PartsLogBackfillService, ImportFormatService, UnifiedImportService; new world pricelist type (all supplier types) - qt_lot_metadata PK changed to (lot_name, pricelist_type); all queries now filter WHERE pricelist_type='estimate' - Fix pre-existing bug: qt_component_usage_stats column names quotes_last30d/quotes_last7d (no underscore) — added explicit gorm tags - Bible: full table inventory, baseline schema snapshot, updated pricelist/ data-rules/api/history/architecture docs Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
439 lines
12 KiB
Go
439 lines
12 KiB
Go
package services
|
|
|
|
import (
|
|
"fmt"
|
|
"strings"
|
|
"time"
|
|
|
|
"git.mchus.pro/mchus/priceforge/internal/models"
|
|
"git.mchus.pro/mchus/priceforge/internal/repository"
|
|
"gorm.io/gorm"
|
|
"gorm.io/gorm/clause"
|
|
)
|
|
|
|
// UnifiedImportParams contains all parameters for a single import run.
|
|
type UnifiedImportParams struct {
|
|
SupplierCode string
|
|
FormatCode string // optional; supplier.default_import_format_code used if empty
|
|
OfferType string // "public" or "private"; defaults to "public"
|
|
QuoteDate time.Time // zero means detect from file or use today
|
|
ImportAll bool // true = import unmapped p/ns into parts_log with lot_name=NULL
|
|
Filename string
|
|
Content []byte
|
|
FileModTime time.Time
|
|
CreatedBy string
|
|
}
|
|
|
|
// UnifiedImportResult summarises the outcome of an import run.
|
|
type UnifiedImportResult struct {
|
|
RowsTotal int
|
|
Inserted int
|
|
Skipped int
|
|
Ignored int
|
|
Unmapped int
|
|
ParseErrors int
|
|
}
|
|
|
|
// UnifiedImportProgress is sent to the progress callback.
|
|
type UnifiedImportProgress struct {
|
|
Status string `json:"status"`
|
|
Message string `json:"message,omitempty"`
|
|
Current int `json:"current,omitempty"`
|
|
Total int `json:"total,omitempty"`
|
|
// Counters
|
|
RowsTotal int `json:"rows_total,omitempty"`
|
|
Inserted int `json:"inserted,omitempty"`
|
|
Skipped int `json:"skipped,omitempty"`
|
|
Ignored int `json:"ignored,omitempty"`
|
|
Unmapped int `json:"unmapped,omitempty"`
|
|
ParseErrors int `json:"parse_errors,omitempty"`
|
|
}
|
|
|
|
// UnifiedImportService is the single entry point for all import sources.
|
|
// It delegates file parsing to the existing per-format parsers and writes
|
|
// normalised rows to parts_log.
|
|
type UnifiedImportService struct {
|
|
db *gorm.DB
|
|
partsLogRepo *repository.PartsLogRepository
|
|
importFormatSvc *ImportFormatService
|
|
backfillSvc *PartsLogBackfillService
|
|
}
|
|
|
|
func NewUnifiedImportService(
|
|
db *gorm.DB,
|
|
importFormatSvc *ImportFormatService,
|
|
backfillSvc *PartsLogBackfillService,
|
|
) *UnifiedImportService {
|
|
return &UnifiedImportService{
|
|
db: db,
|
|
partsLogRepo: repository.NewPartsLogRepository(db),
|
|
importFormatSvc: importFormatSvc,
|
|
backfillSvc: backfillSvc,
|
|
}
|
|
}
|
|
|
|
// Import executes the unified import algorithm for the given parameters.
|
|
func (s *UnifiedImportService) Import(
|
|
params UnifiedImportParams,
|
|
onProgress func(UnifiedImportProgress),
|
|
) (*UnifiedImportResult, error) {
|
|
report := func(p UnifiedImportProgress) {
|
|
if onProgress != nil {
|
|
onProgress(p)
|
|
}
|
|
}
|
|
report(UnifiedImportProgress{Status: "starting", Message: "Запуск импорта", Current: 0, Total: 100})
|
|
|
|
if s.db == nil {
|
|
return nil, fmt.Errorf("offline mode: import unavailable")
|
|
}
|
|
if len(params.Content) == 0 {
|
|
return nil, fmt.Errorf("empty file")
|
|
}
|
|
|
|
// 1. Load supplier
|
|
var supplier models.Supplier
|
|
if err := s.db.Where("supplier_code = ?", params.SupplierCode).First(&supplier).Error; err != nil {
|
|
return nil, fmt.Errorf("supplier %q not found: %w", params.SupplierCode, err)
|
|
}
|
|
|
|
// 2. Resolve format code
|
|
formatCode := params.FormatCode
|
|
if formatCode == "" && supplier.DefaultImportFormatCode != nil {
|
|
formatCode = *supplier.DefaultImportFormatCode
|
|
}
|
|
if formatCode == "" {
|
|
return nil, fmt.Errorf("no import format specified and supplier has no default_import_format_code")
|
|
}
|
|
format, err := s.importFormatSvc.GetByCode(formatCode)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("load import format: %w", err)
|
|
}
|
|
|
|
// 3. Determine offer_type
|
|
offerType := models.OfferTypePublic
|
|
if strings.ToLower(params.OfferType) == "private" {
|
|
offerType = models.OfferTypePrivate
|
|
}
|
|
|
|
// 4. Determine quote_date
|
|
quoteDate := params.QuoteDate
|
|
if quoteDate.IsZero() {
|
|
quoteDate = time.Now().UTC().Truncate(24 * time.Hour)
|
|
}
|
|
|
|
report(UnifiedImportProgress{Status: "parsing", Message: "Парсинг файла", Current: 10, Total: 100})
|
|
|
|
// 5. Parse file using existing competitor column_mapping logic
|
|
columnMapping, err := s.importFormatSvc.ParseColumnMapping(format)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
rawRows, parseErrors, err := parseUnifiedRows(params.Filename, params.Content, format, columnMapping)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("parse file: %w", err)
|
|
}
|
|
|
|
report(UnifiedImportProgress{
|
|
Status: "parsed",
|
|
Message: "Файл распарсен",
|
|
Current: 20,
|
|
Total: 100,
|
|
RowsTotal: len(rawRows),
|
|
ParseErrors: parseErrors,
|
|
})
|
|
|
|
// 6. Load ignore rules
|
|
ignoreRules, err := s.loadIgnoreRules()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
// 7. Load seen-index for unmapped tracking
|
|
seenMap := make(map[string]models.VendorPartnumberSeen)
|
|
|
|
var toInsert []models.PartsLog
|
|
ignored, unmapped, skipped := 0, 0, 0
|
|
|
|
for _, row := range rawRows {
|
|
pn := strings.TrimSpace(row.Partnumber)
|
|
if pn == "" {
|
|
skipped++
|
|
continue
|
|
}
|
|
|
|
// 7a. Check ignore rules
|
|
if matchesIgnoreRules(ignoreRules, pn, row.Vendor, row.Description) {
|
|
ignored++
|
|
continue
|
|
}
|
|
|
|
// 7b. Check if p/n is known in qt_vendor_partnumber_seen or qt_partnumber_book_items
|
|
isMapped := s.isKnownPartnumber(pn)
|
|
if !isMapped {
|
|
if !params.ImportAll {
|
|
// Track in seen-registry, do not insert
|
|
unmapped++
|
|
key := strings.ToLower(pn)
|
|
if _, exists := seenMap[key]; !exists {
|
|
seenEntry := models.VendorPartnumberSeen{
|
|
SourceType: "unified:" + supplier.SupplierCode,
|
|
Vendor: row.Vendor,
|
|
Partnumber: pn,
|
|
LastSeenAt: time.Now(),
|
|
}
|
|
if row.Description != "" {
|
|
seenEntry.Description = &row.Description
|
|
}
|
|
seenMap[key] = seenEntry
|
|
}
|
|
continue
|
|
}
|
|
// ImportAll: insert with lot_name=NULL
|
|
}
|
|
|
|
// 7c. Normalise price
|
|
price := row.Price / supplier.PriceUplift
|
|
|
|
// 7d. Resolve lead_time
|
|
var leadTime *int
|
|
if row.LeadTimeWeeks != nil {
|
|
leadTime = row.LeadTimeWeeks
|
|
} else if supplier.DefaultLeadTimeWeeks != nil {
|
|
leadTime = supplier.DefaultLeadTimeWeeks
|
|
}
|
|
|
|
pl := models.PartsLog{
|
|
SupplierCode: supplier.SupplierCode,
|
|
Partnumber: pn,
|
|
Vendor: row.Vendor,
|
|
Price: price,
|
|
OfferType: offerType,
|
|
QuoteDate: quoteDate,
|
|
CreatedBy: params.CreatedBy,
|
|
LeadTimeWeeks: leadTime,
|
|
}
|
|
if row.Qty != 0 {
|
|
qty := row.Qty
|
|
pl.Qty = &qty
|
|
}
|
|
if row.Description != "" {
|
|
pl.Description = &row.Description
|
|
}
|
|
toInsert = append(toInsert, pl)
|
|
}
|
|
|
|
// Flush seen rows for unmapped p/ns
|
|
if len(seenMap) > 0 {
|
|
seenRows := make([]models.VendorPartnumberSeen, 0, len(seenMap))
|
|
for _, v := range seenMap {
|
|
seenRows = append(seenRows, v)
|
|
}
|
|
_ = s.db.Clauses(clause.OnConflict{DoNothing: true}).CreateInBatches(seenRows, 200).Error
|
|
}
|
|
|
|
report(UnifiedImportProgress{
|
|
Status: "inserting",
|
|
Message: "Запись в parts_log",
|
|
Current: 60,
|
|
Total: 100,
|
|
RowsTotal: len(rawRows),
|
|
Ignored: ignored,
|
|
Unmapped: unmapped,
|
|
})
|
|
|
|
// 7e. INSERT IGNORE into parts_log
|
|
inserted, err := s.partsLogRepo.InsertBatch(toInsert)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("insert parts_log: %w", err)
|
|
}
|
|
deduped := len(toInsert) - inserted
|
|
|
|
report(UnifiedImportProgress{
|
|
Status: "completed",
|
|
Message: "Импорт завершён",
|
|
Current: 100,
|
|
Total: 100,
|
|
RowsTotal: len(rawRows),
|
|
Inserted: inserted,
|
|
Skipped: skipped + deduped,
|
|
Ignored: ignored,
|
|
Unmapped: unmapped,
|
|
ParseErrors: parseErrors,
|
|
})
|
|
|
|
return &UnifiedImportResult{
|
|
RowsTotal: len(rawRows),
|
|
Inserted: inserted,
|
|
Skipped: skipped + deduped,
|
|
Ignored: ignored,
|
|
Unmapped: unmapped,
|
|
ParseErrors: parseErrors,
|
|
}, nil
|
|
}
|
|
|
|
// unifiedRawRow is an intermediate parsed row before normalisation.
|
|
type unifiedRawRow struct {
|
|
Partnumber string
|
|
Vendor string
|
|
Description string
|
|
Price float64
|
|
Qty float64
|
|
LeadTimeWeeks *int
|
|
}
|
|
|
|
// parseUnifiedRows parses file content using the format's column_mapping.
|
|
// Returns parsed rows and the number of parse errors.
|
|
// Currently delegates to the existing Excel/CSV parsing utilities.
|
|
func parseUnifiedRows(
|
|
filename string,
|
|
content []byte,
|
|
format *models.ImportFormat,
|
|
mapping *models.CompetitorColumnMapping,
|
|
) ([]unifiedRawRow, int, error) {
|
|
// Delegate to existing per-format parsers based on file_type.
|
|
// This is a stub that will be wired to the actual parsers in a follow-up.
|
|
// For xlsx/mxl, parseExcelUnified is used; for csv, parseCsvUnified.
|
|
switch format.FileType {
|
|
case "xlsx":
|
|
return parseExcelUnifiedRows(filename, content, mapping)
|
|
case "mxl":
|
|
return parseMXLUnifiedRows(filename, content, mapping)
|
|
default:
|
|
return nil, 0, fmt.Errorf("unsupported file_type %q for unified import", format.FileType)
|
|
}
|
|
}
|
|
|
|
// parseExcelUnifiedRows wraps the existing competitor Excel row parsing logic.
|
|
func parseExcelUnifiedRows(filename string, content []byte, mapping *models.CompetitorColumnMapping) ([]unifiedRawRow, int, error) {
|
|
rows, err := parseCompetitorExcel(filename, content, *mapping)
|
|
if err != nil {
|
|
return nil, 0, err
|
|
}
|
|
parseErrors := 0
|
|
result := make([]unifiedRawRow, 0, len(rows))
|
|
for _, r := range rows {
|
|
price := r.PriceUSD
|
|
if price == 0 {
|
|
price = r.PriceLocCur
|
|
}
|
|
if price <= 0 {
|
|
parseErrors++
|
|
continue
|
|
}
|
|
result = append(result, unifiedRawRow{
|
|
Partnumber: r.Partnumber,
|
|
Vendor: r.Vendor,
|
|
Description: r.Description,
|
|
Price: price,
|
|
Qty: r.Qty,
|
|
})
|
|
}
|
|
return result, parseErrors, nil
|
|
}
|
|
|
|
// parseMXLUnifiedRows wraps the existing MXL parsing logic.
|
|
func parseMXLUnifiedRows(filename string, content []byte, mapping *models.CompetitorColumnMapping) ([]unifiedRawRow, int, error) {
|
|
stockRows, err := parseStockRows(filename, content)
|
|
if err != nil {
|
|
return nil, 0, err
|
|
}
|
|
result := make([]unifiedRawRow, 0, len(stockRows))
|
|
for _, r := range stockRows {
|
|
if r.Price <= 0 {
|
|
continue
|
|
}
|
|
result = append(result, unifiedRawRow{
|
|
Partnumber: r.Article,
|
|
Vendor: r.Vendor,
|
|
Description: r.Description,
|
|
Price: r.Price,
|
|
Qty: r.Qty,
|
|
})
|
|
}
|
|
return result, 0, nil
|
|
}
|
|
|
|
// loadIgnoreRules loads all active ignore rules from qt_ignore_rules.
|
|
func (s *UnifiedImportService) loadIgnoreRules() ([]models.IgnoreRule, error) {
|
|
var rules []models.IgnoreRule
|
|
if err := s.db.Find(&rules).Error; err != nil {
|
|
return nil, fmt.Errorf("load ignore rules: %w", err)
|
|
}
|
|
return rules, nil
|
|
}
|
|
|
|
// matchesIgnoreRules returns true if the row should be dropped.
|
|
func matchesIgnoreRules(rules []models.IgnoreRule, partnumber, vendor, description string) bool {
|
|
for _, rule := range rules {
|
|
var target string
|
|
switch rule.Field {
|
|
case "partnumber":
|
|
target = partnumber
|
|
case "vendor":
|
|
target = vendor
|
|
case "description":
|
|
target = description
|
|
default:
|
|
continue
|
|
}
|
|
if matchIgnorePattern(rule.MatchType, rule.Pattern, target) {
|
|
return true
|
|
}
|
|
}
|
|
return false
|
|
}
|
|
|
|
func matchIgnorePattern(matchType, pattern, value string) bool {
|
|
switch matchType {
|
|
case "exact":
|
|
return strings.EqualFold(pattern, value)
|
|
case "contains":
|
|
return strings.Contains(strings.ToLower(value), strings.ToLower(pattern))
|
|
case "glob":
|
|
return matchGlobPattern(pattern, value)
|
|
}
|
|
return false
|
|
}
|
|
|
|
// matchGlobPattern supports * (any chars) and ? (single char) wildcards.
|
|
func matchGlobPattern(pattern, value string) bool {
|
|
p := strings.ToLower(pattern)
|
|
v := strings.ToLower(value)
|
|
return globMatch(p, v)
|
|
}
|
|
|
|
func globMatch(pattern, str string) bool {
|
|
if pattern == "*" {
|
|
return true
|
|
}
|
|
if len(pattern) == 0 {
|
|
return len(str) == 0
|
|
}
|
|
if pattern[0] == '*' {
|
|
for i := 0; i <= len(str); i++ {
|
|
if globMatch(pattern[1:], str[i:]) {
|
|
return true
|
|
}
|
|
}
|
|
return false
|
|
}
|
|
if len(str) == 0 {
|
|
return false
|
|
}
|
|
if pattern[0] == '?' || pattern[0] == str[0] {
|
|
return globMatch(pattern[1:], str[1:])
|
|
}
|
|
return false
|
|
}
|
|
|
|
// isKnownPartnumber returns true if the partnumber exists in qt_partnumber_book_items.
|
|
func (s *UnifiedImportService) isKnownPartnumber(pn string) bool {
|
|
var count int64
|
|
s.db.Model(&models.PartnumberBookItem{}).
|
|
Where("partnumber = ?", pn).
|
|
Count(&count)
|
|
return count > 0
|
|
}
|