Compare commits

...

57 Commits

Author SHA1 Message Date
M_Kececi
c6bdf83f05 Merge remote-tracking branch 'origin/master' 2026-04-17 12:16:50 +03:00
M_Kececi
f9728b8a4c Merge remote-tracking branch 'origin/master' 2026-04-16 17:46:50 +03:00
M_Kececi
307282928c Merge remote-tracking branch 'origin/master' 2026-04-16 16:41:59 +03:00
M_Kececi
29909f3609 Merge remote-tracking branch 'origin/master' 2026-04-16 16:41:55 +03:00
M_Kececi
bb856cb082 Merge remote-tracking branch 'origin/master' 2026-04-16 16:25:45 +03:00
M_Kececi
b065e7192d Merge remote-tracking branch 'origin/master' 2026-04-16 16:00:24 +03:00
M_Kececi
2d369e7d7d Merge remote-tracking branch 'origin/master' 2026-04-16 15:18:44 +03:00
M_Kececi
1831c45a0c Merge remote-tracking branch 'origin/master' 2026-04-15 17:03:25 +03:00
M_Kececi
1a80184cac Merge remote-tracking branch 'origin/master' 2026-04-15 16:43:21 +03:00
M_Kececi
5be7315bdb Merge remote-tracking branch 'origin/master' 2026-04-15 15:54:44 +03:00
M_Kececi
c925af5ba1 Merge remote-tracking branch 'origin/master' 2026-04-14 18:04:19 +03:00
M_Kececi
352a7e26ea Merge remote-tracking branch 'origin/master' 2026-04-14 17:53:58 +03:00
M_Kececi
9ee70eb05a Merge remote-tracking branch 'origin/master' 2026-04-14 17:52:38 +03:00
M_Kececi
8694511e79 Merge remote-tracking branch 'origin/master' 2026-04-14 17:46:15 +03:00
M_Kececi
69ba4b2ecb Merge remote-tracking branch 'origin/master' 2026-04-14 17:34:46 +03:00
M_Kececi
eb628e99c2 Merge remote-tracking branch 'origin/master' 2026-04-14 17:23:24 +03:00
M_Kececi
431441802e Merge remote-tracking branch 'origin/master' 2026-04-14 17:05:18 +03:00
M_Kececi
7457d95bac Merge remote-tracking branch 'origin/master' 2026-04-14 17:05:14 +03:00
M_Kececi
e352b8c47a Merge remote-tracking branch 'origin/master' 2026-04-14 17:03:21 +03:00
M_Kececi
d8b6b7166c Merge remote-tracking branch 'origin/master' 2026-04-14 16:51:27 +03:00
M_Kececi
aec450c3cd Merge remote-tracking branch 'origin/master' 2026-04-14 16:35:21 +03:00
M_Kececi
47fc7a6178 Merge remote-tracking branch 'origin/master' 2026-04-14 16:34:25 +03:00
M_Kececi
214677da1e Merge remote-tracking branch 'origin/master' 2026-04-14 16:17:59 +03:00
M_Kececi
b1a3bbd3c5 Merge remote-tracking branch 'origin/master' 2026-04-04 19:05:11 +03:00
M_Kececi
6467017470 Merge remote-tracking branch 'origin/master' 2026-04-03 16:30:31 +03:00
M_Kececi
bf97e20e79 Merge remote-tracking branch 'origin/master' 2026-04-03 15:47:22 +03:00
M_Kececi
2b04688905 Merge remote-tracking branch 'origin/master' 2026-04-03 15:23:53 +03:00
M_Kececi
79f7fa0974 Merge remote-tracking branch 'origin/master' 2026-04-03 15:18:46 +03:00
M_Kececi
e965eb7c36 Merge remote-tracking branch 'origin/master' 2026-04-03 15:08:31 +03:00
M_Kececi
07c000358e Merge remote-tracking branch 'origin/master' 2026-04-03 14:56:22 +03:00
M_Kececi
415e3db084 Merge remote-tracking branch 'origin/master' 2026-04-03 14:32:19 +03:00
M_Kececi
f46532cee1 Merge remote-tracking branch 'origin/master' 2026-04-03 14:22:09 +03:00
M_Kececi
e1064010f3 Merge remote-tracking branch 'origin/master' 2026-04-03 14:16:27 +03:00
M_Kececi
67ef80936a Merge remote-tracking branch 'origin/master' 2026-04-03 14:02:05 +03:00
M_Kececi
548931f714 Merge remote-tracking branch 'origin/master'
# Conflicts:
#	ui/src/stores/orderentryStore.js
2026-04-03 13:13:14 +03:00
M_Kececi
5adf71c4cc Merge remote-tracking branch 'origin/master' 2026-04-03 13:11:17 +03:00
M_Kececi
c552126ecf Merge remote-tracking branch 'origin/master' 2026-04-02 16:39:48 +03:00
M_Kececi
028c11e042 Merge remote-tracking branch 'origin/master' 2026-04-02 16:30:49 +03:00
M_Kececi
7a98652a8e Merge remote-tracking branch 'origin/master' 2026-04-02 13:51:43 +03:00
M_Kececi
4549152594 Merge remote-tracking branch 'origin/master' 2026-04-02 13:36:22 +03:00
M_Kececi
f5c91abafa Merge remote-tracking branch 'origin/master' 2026-04-02 10:53:47 +03:00
M_Kececi
a97accbdb1 Merge remote-tracking branch 'origin/master' 2026-04-02 10:44:43 +03:00
M_Kececi
4af852c853 Merge remote-tracking branch 'origin/master' 2026-04-02 09:18:15 +03:00
M_Kececi
fce3d8e486 Merge remote-tracking branch 'origin/master' 2026-03-31 17:20:29 +03:00
M_Kececi
526407fdfa Merge remote-tracking branch 'origin/master' 2026-03-31 17:14:23 +03:00
M_Kececi
92f677ae3e Merge remote-tracking branch 'origin/master' 2026-03-31 15:25:20 +03:00
M_Kececi
b6772332cd Merge remote-tracking branch 'origin/master' 2026-03-31 15:15:15 +03:00
M_Kececi
5b6b9a26bd Merge remote-tracking branch 'origin/master' 2026-03-31 15:05:31 +03:00
M_Kececi
1b204bb8ed Merge remote-tracking branch 'origin/master' 2026-03-31 14:59:41 +03:00
M_Kececi
4a67f0f444 Merge remote-tracking branch 'origin/master' 2026-03-31 14:57:39 +03:00
M_Kececi
7b1de24dfb Merge remote-tracking branch 'origin/master' 2026-03-31 14:54:19 +03:00
M_Kececi
afe77171f4 Merge remote-tracking branch 'origin/master' 2026-03-31 14:07:58 +03:00
M_Kececi
173d734883 Merge remote-tracking branch 'origin/master' 2026-03-31 13:34:18 +03:00
M_Kececi
ee9150e45a Merge remote-tracking branch 'origin/master' 2026-03-31 13:28:32 +03:00
M_Kececi
2b40983cee Merge remote-tracking branch 'origin/master' 2026-03-31 13:07:00 +03:00
M_Kececi
ed80e4f492 Merge remote-tracking branch 'origin/master' 2026-03-31 12:46:48 +03:00
M_Kececi
d7d871fb8a Merge remote-tracking branch 'origin/master' 2026-03-31 12:45:22 +03:00
78 changed files with 11966 additions and 753 deletions

View File

@@ -93,7 +93,7 @@ ensure_ui_permissions() {
clean_ui_build_artifacts() {
cd "$APP_DIR/ui"
# dist'i silmiyoruz -> eski chunklar k<EFBFBD>sa s<EFBFBD>re kalabilir, ChunkLoadError azal<EFBFBD>r
# dist'i silmiyoruz -> eski chunklar kısa süre kalabilir, ChunkLoadError azalır
rm -rf .quasar node_modules/.cache || true
}
@@ -217,6 +217,36 @@ ensure_ui_readable_by_nginx() {
fi
}
ensure_node20_for_ui_build() {
local required_major=20
local nvm_dir="${NVM_DIR:-$HOME/.nvm}"
if [[ -s "$nvm_dir/nvm.sh" ]]; then
# shellcheck disable=SC1090
source "$nvm_dir/nvm.sh"
nvm install "$required_major" >/dev/null
nvm use "$required_major" >/dev/null
fi
if ! command -v node >/dev/null 2>&1; then
echo "ERROR: node command not found"
return 1
fi
local node_version
node_version="$(node -v 2>/dev/null || true)"
local node_major
node_major="$(echo "$node_version" | sed -E 's/^v([0-9]+).*/\1/')"
if [[ -z "$node_major" || "$node_major" -lt "$required_major" ]]; then
echo "ERROR: Node.js >=${required_major} required for UI build. Current: ${node_version:-unknown}"
echo "Hint: install nvm and run: nvm install ${required_major} && nvm alias default ${required_major}"
return 1
fi
echo "UI build runtime: node=$node_version npm=$(npm -v)"
}
build_api_binary() {
if ! command -v go >/dev/null 2>&1; then
echo "ERROR: go command not found"
@@ -291,6 +321,7 @@ run_deploy() {
log_step "BUILD UI"
cd "$APP_DIR/ui"
ensure_node20_for_ui_build
clean_ui_build_artifacts
npm ci --no-audit --no-fund --include=optional
npm i -D --no-audit --no-fund sass-embedded@1.93.2

View File

@@ -0,0 +1,52 @@
# i18n + Dinamik Çeviri Standardı
Bu projede çok dilli yapı iki katmanlıdır:
1. Statik UI metinleri `i18n` ile yönetilir.
2. Dinamik içerikler `mk_translator` + otomatik çeviri servisi (OpenAI) ile yönetilir.
## 1) Statik UI (Deterministik)
Kullanım alanı:
- buton metinleri
- menüler
- form label'ları
- validasyon mesajları
- sabit ekran başlıkları
- route/meta/title
Kural:
- her metin key bazlı tutulur (`$t('common.save')`)
- locale dosyaları: `tr`, `en`, `de`, `it`, `es`, `ru`, `ar`
- fallback sırası: hedef dil -> `en` -> `tr`
## 2) Dinamik İçerik (DB/CMS/Serbest metin)
Akış:
1. Kaynak metin için `mk_translator` kontrol edilir.
2. Hedef dil karşılığı yoksa OpenAI ile çeviri üretilir.
3. Sonuç `mk_translator` tablosuna yazılır.
4. Sonraki isteklerde DB sonucu kullanılır (cache etkisi).
Kullanım alanı:
- ürün/kategori açıklamaları
- CMS içerikleri
- admin panelden girilen serbest metinler
- şablon bazlı metin içerikleri
## Kalite ve Güvenlik Kuralları
- Prompt net olmalı: sadece çeviri dönsün, açıklama eklemesin.
- Placeholder/format korunsun: `{name}`, `{{count}}`, `%s` gibi yapılar bozulmasın.
- HTML tag'leri ve kod/SKU değerleri çevrilmesin.
- API key sadece backend'de tutulur (`OPENAI_API_KEY` client'a verilmez).
- 429/5xx için retry + exponential backoff uygulanır.
- Hassas veri içeriği olan metinlerde veri politikası kontrolü yapılır.
## Özet
Bu servis, `i18n`'in alternatifi değildir; `i18n`'i tamamlayan dinamik çeviri katmanıdır.
- Statik UI: `i18n`
- Dinamik içerik: `mk_translator` + OpenAI + cache

View File

@@ -16,4 +16,9 @@
| Cloudflare | bt@baggi.com.tr | Baggi2025!.? |
| 172.16.0.3 | ct | pasauras |
## Dil ve Çeviri Standardı
Detaylı mimari dokümanı:
- [docs/i18n-dynamic-translation-standard.md](docs/i18n-dynamic-translation-standard.md)

View File

@@ -0,0 +1,48 @@
-- language_module_seed.sql
-- 1) Register language module routes if missing
INSERT INTO mk_sys_routes (path, method, module_code, action)
VALUES
('/api/language/translations', 'GET', 'language', 'update'),
('/api/language/translations/{id}', 'PUT', 'language', 'update'),
('/api/language/translations/upsert-missing', 'POST', 'language', 'update'),
('/api/language/translations/sync-sources', 'POST', 'language', 'update'),
('/api/language/translations/translate-selected', 'POST', 'language', 'update'),
('/api/language/translations/bulk-approve', 'POST', 'language', 'update'),
('/api/language/translations/bulk-update', 'POST', 'language', 'update')
ON CONFLICT (path, method) DO UPDATE
SET
module_code = EXCLUDED.module_code,
action = EXCLUDED.action;
-- 2) Remove legacy system translation routes (optional cleanup)
DELETE FROM mk_sys_routes
WHERE path LIKE '/api/system/translations%';
-- 3) Seed role permissions for language module by cloning system perms
INSERT INTO mk_sys_role_permissions (role_id, module_code, action, allowed)
SELECT rp.role_id, 'language', rp.action, rp.allowed
FROM mk_sys_role_permissions rp
WHERE rp.module_code = 'system'
AND rp.action IN ('view', 'read', 'insert', 'update', 'delete', 'export')
ON CONFLICT DO NOTHING;
-- 4) Ensure admin update access
INSERT INTO mk_sys_role_permissions (role_id, module_code, action, allowed)
SELECT r.id, 'language', 'update', true
FROM dfrole r
WHERE r.id = 3
ON CONFLICT DO NOTHING;
-- 5) Seed role+department permissions for language module by cloning system perms
INSERT INTO mk_sys_role_department_permissions
(role_id, department_code, module_code, action, allowed)
SELECT DISTINCT
rdp.role_id,
rdp.department_code,
'language',
rdp.action,
rdp.allowed
FROM mk_sys_role_department_permissions rdp
WHERE rdp.module_code = 'system'
AND rdp.action IN ('view', 'read', 'insert', 'update', 'delete', 'export')
ON CONFLICT DO NOTHING;

View File

@@ -32,3 +32,6 @@ API_HOST=0.0.0.0
API_PORT=8080
AZURE_TRANSLATOR_KEY=d055c693-a84e-4594-8aef-a6c05c42623a
AZURE_TRANSLATOR_ENDPOINT=https://api.cognitive.microsofttranslator.com
AZURE_TRANSLATOR_REGION=westeurope

View File

@@ -0,0 +1,72 @@
package main
import (
"bssapp-backend/db"
"bssapp-backend/routes"
"fmt"
"log"
"os"
"strconv"
"strings"
"github.com/joho/godotenv"
)
func main() {
_ = godotenv.Load(".env", "mail.env", ".env.local")
if err := db.ConnectMSSQL(); err != nil {
log.Fatalf("mssql connect failed: %v", err)
}
pgDB, err := db.ConnectPostgres()
if err != nil {
log.Fatalf("postgres connect failed: %v", err)
}
defer pgDB.Close()
limit := 30000
if raw := os.Getenv("TRANSLATION_SYNC_LIMIT"); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 {
limit = parsed
}
}
langs := []string{"en", "de", "it", "es", "ru", "ar"}
if raw := strings.TrimSpace(os.Getenv("TRANSLATION_SYNC_LANGS")); raw != "" {
parts := strings.Split(raw, ",")
custom := make([]string, 0, len(parts))
for _, p := range parts {
v := strings.TrimSpace(strings.ToLower(p))
if v != "" {
custom = append(custom, v)
}
}
if len(custom) > 0 {
langs = custom
}
}
autoTranslate := true
if raw := strings.TrimSpace(strings.ToLower(os.Getenv("TRANSLATION_SYNC_AUTO_TRANSLATE"))); raw != "" {
if raw == "0" || raw == "false" || raw == "off" {
autoTranslate = false
}
}
result, err := routes.PerformTranslationSync(pgDB, db.MssqlDB, routes.TranslationSyncOptions{
AutoTranslate: autoTranslate,
Languages: langs,
Limit: limit,
OnlyNew: true,
})
if err != nil {
log.Fatalf("manual sync failed: %v", err)
}
fmt.Printf("translation sync done: seeds=%d affected=%d auto_translated=%d langs=%v\n",
result.SeedCount,
result.AffectedCount,
result.AutoTranslated,
result.TargetLangs,
)
}

View File

@@ -3,32 +3,120 @@ package db
import (
"database/sql"
"fmt"
"net/url"
"os"
"strconv"
"strings"
"time"
_ "github.com/microsoft/go-mssqldb"
)
var MssqlDB *sql.DB
// ConnectMSSQL MSSQL baglantisini ortam degiskeninden baslatir.
func envInt(name string, fallback int) int {
raw := strings.TrimSpace(os.Getenv(name))
if raw == "" {
return fallback
}
value, err := strconv.Atoi(raw)
if err != nil || value <= 0 {
return fallback
}
return value
}
func ensureTimeoutValue(current string, desired int) string {
cur, err := strconv.Atoi(strings.TrimSpace(current))
if err == nil && cur >= desired {
return strings.TrimSpace(current)
}
return strconv.Itoa(desired)
}
func ensureMSSQLTimeouts(connString string, connectionTimeoutSec int, dialTimeoutSec int) string {
raw := strings.TrimSpace(connString)
if raw == "" {
return raw
}
if strings.HasPrefix(strings.ToLower(raw), "sqlserver://") {
u, err := url.Parse(raw)
if err != nil {
return raw
}
q := u.Query()
q.Set("connection timeout", ensureTimeoutValue(q.Get("connection timeout"), connectionTimeoutSec))
q.Set("dial timeout", ensureTimeoutValue(q.Get("dial timeout"), dialTimeoutSec))
u.RawQuery = q.Encode()
return u.String()
}
parts := strings.Split(raw, ";")
foundConnectionTimeout := false
foundDialTimeout := false
for i, part := range parts {
part = strings.TrimSpace(part)
if part == "" {
continue
}
eq := strings.Index(part, "=")
if eq <= 0 {
continue
}
key := strings.ToLower(strings.TrimSpace(part[:eq]))
value := strings.TrimSpace(part[eq+1:])
switch key {
case "connection timeout":
foundConnectionTimeout = true
parts[i] = "connection timeout=" + ensureTimeoutValue(value, connectionTimeoutSec)
case "dial timeout":
foundDialTimeout = true
parts[i] = "dial timeout=" + ensureTimeoutValue(value, dialTimeoutSec)
}
}
if !foundConnectionTimeout {
parts = append(parts, "connection timeout="+strconv.Itoa(connectionTimeoutSec))
}
if !foundDialTimeout {
parts = append(parts, "dial timeout="+strconv.Itoa(dialTimeoutSec))
}
return strings.Join(parts, ";")
}
// ConnectMSSQL initializes the MSSQL connection from environment.
func ConnectMSSQL() error {
connString := strings.TrimSpace(os.Getenv("MSSQL_CONN"))
if connString == "" {
return fmt.Errorf("MSSQL_CONN tanımlı değil")
return fmt.Errorf("MSSQL_CONN tanimli degil")
}
connectionTimeoutSec := envInt("MSSQL_CONNECTION_TIMEOUT_SEC", 120)
dialTimeoutSec := envInt("MSSQL_DIAL_TIMEOUT_SEC", connectionTimeoutSec)
connString = ensureMSSQLTimeouts(connString, connectionTimeoutSec, dialTimeoutSec)
var err error
MssqlDB, err = sql.Open("sqlserver", connString)
if err != nil {
return fmt.Errorf("MSSQL bağlantı hatası: %w", err)
return fmt.Errorf("MSSQL baglanti hatasi: %w", err)
}
MssqlDB.SetMaxOpenConns(envInt("MSSQL_MAX_OPEN_CONNS", 40))
MssqlDB.SetMaxIdleConns(envInt("MSSQL_MAX_IDLE_CONNS", 40))
MssqlDB.SetConnMaxLifetime(time.Duration(envInt("MSSQL_CONN_MAX_LIFETIME_MIN", 30)) * time.Minute)
MssqlDB.SetConnMaxIdleTime(time.Duration(envInt("MSSQL_CONN_MAX_IDLE_MIN", 10)) * time.Minute)
if err = MssqlDB.Ping(); err != nil {
return fmt.Errorf("MSSQL erişilemiyor: %w", err)
return fmt.Errorf("MSSQL erisilemiyor: %w", err)
}
fmt.Println("MSSQL bağlantısı başarılı")
fmt.Printf("MSSQL baglantisi basarili (connection timeout=%ds, dial timeout=%ds)\n", connectionTimeoutSec, dialTimeoutSec)
return nil
}

122
svc/internal/i18n/lang.go Normal file
View File

@@ -0,0 +1,122 @@
package i18n
import "strings"
const DefaultLang = "TR"
var supported = map[string]struct{}{
"TR": {},
"EN": {},
"DE": {},
"IT": {},
"ES": {},
"RU": {},
"AR": {},
}
func NormalizeLangCode(raw string) string {
lang := strings.ToUpper(strings.TrimSpace(raw))
if _, ok := supported[lang]; ok {
return lang
}
return DefaultLang
}
func ResolveLangCode(queryLangCode, acceptLanguage string) string {
if lang := NormalizeLangCode(queryLangCode); lang != DefaultLang || strings.EqualFold(strings.TrimSpace(queryLangCode), DefaultLang) {
return lang
}
header := strings.TrimSpace(acceptLanguage)
if header == "" {
return DefaultLang
}
first := strings.Split(header, ",")[0]
first = strings.TrimSpace(strings.Split(first, ";")[0])
if len(first) < 2 {
return DefaultLang
}
return NormalizeLangCode(first[:2])
}
func T(langCode, key string) string {
for _, lang := range fallbackLangs(langCode) {
if val, ok := dict[lang][key]; ok {
return val
}
}
return key
}
func fallbackLangs(langCode string) []string {
lang := NormalizeLangCode(langCode)
switch lang {
case "TR":
return []string{"TR"}
case "EN":
return []string{"EN", "TR"}
default:
return []string{lang, "EN", "TR"}
}
}
var dict = map[string]map[string]string{
"TR": {
"pdf.report_title": "Cari Hesap Raporu",
"pdf.date": "Tarih",
"pdf.customer": "Cari",
"pdf.date_range": "Tarih Aralığı",
"pdf.page": "Sayfa",
"pdf.ending_balance": "Son Bakiye",
"pdf.currency_prefix": "Para Birimi",
"pdf.balance_prefix": "Bakiye",
"pdf.main.doc_no": "Belge No",
"pdf.main.date": "Tarih",
"pdf.main.due_date": "Vade",
"pdf.main.operation": "İşlem",
"pdf.main.description": "Açıklama",
"pdf.main.currency": "Para",
"pdf.main.debit": "Borç",
"pdf.main.credit": "Alacak",
"pdf.main.balance": "Bakiye",
"pdf.detail.main_group": "Ana Grup",
"pdf.detail.sub_group": "Alt Grup",
"pdf.detail.waiter": "Garson",
"pdf.detail.fit": "Fit",
"pdf.detail.content": "İçerik",
"pdf.detail.product": "Ürün",
"pdf.detail.color": "Renk",
"pdf.detail.qty": "Adet",
"pdf.detail.price": "Fiyat",
"pdf.detail.total": "Tutar",
},
"EN": {
"pdf.report_title": "Customer Account Report",
"pdf.date": "Date",
"pdf.customer": "Customer",
"pdf.date_range": "Date Range",
"pdf.page": "Page",
"pdf.ending_balance": "Ending Balance",
"pdf.currency_prefix": "Currency",
"pdf.balance_prefix": "Balance",
"pdf.main.doc_no": "Document No",
"pdf.main.date": "Date",
"pdf.main.due_date": "Due Date",
"pdf.main.operation": "Operation",
"pdf.main.description": "Description",
"pdf.main.currency": "Curr.",
"pdf.main.debit": "Debit",
"pdf.main.credit": "Credit",
"pdf.main.balance": "Balance",
"pdf.detail.main_group": "Main Group",
"pdf.detail.sub_group": "Sub Group",
"pdf.detail.waiter": "Waiter",
"pdf.detail.fit": "Fit",
"pdf.detail.content": "Content",
"pdf.detail.product": "Product",
"pdf.detail.color": "Color",
"pdf.detail.qty": "Qty",
"pdf.detail.price": "Price",
"pdf.detail.total": "Total",
},
}

View File

@@ -3,6 +3,7 @@ package mailer
import (
"context"
"crypto/tls"
"encoding/base64"
"errors"
"fmt"
"net"
@@ -138,11 +139,13 @@ func (m *Mailer) Send(ctx context.Context, msg Message) error {
}
func buildMIME(from string, to []string, subject, contentType, body string) string {
// Subject UTF-8 basit hali (gerekirse sonra MIME encoded-word ekleriz)
// Encode Subject to UTF-8
encodedSubject := "=?UTF-8?B?" + base64.StdEncoding.EncodeToString([]byte(subject)) + "?="
headers := []string{
"From: " + from,
"To: " + strings.Join(to, ", "),
"Subject: " + subject,
"Subject: " + encodedSubject,
"MIME-Version: 1.0",
"Content-Type: " + contentType,
"",

View File

@@ -104,7 +104,26 @@ func autoRegisterRouteV3(
return
}
// 2) ADMIN AUTO PERMISSION (module+action bazlı)
// 2) MODULE LOOKUP AUTO SEED (permission ekranları için)
moduleLabel := strings.TrimSpace(strings.ReplaceAll(module, "_", " "))
if moduleLabel == "" {
moduleLabel = module
}
_, err = tx.Exec(`
INSERT INTO mk_sys_modules (code, name)
VALUES ($1::text, $2::text)
ON CONFLICT (code) DO UPDATE
SET name = COALESCE(NULLIF(EXCLUDED.name, ''), mk_sys_modules.name)
`,
module,
moduleLabel,
)
if err != nil {
log.Printf("❌ Module seed error (%s %s): %v", method, path, err)
return
}
// 3) ROLE PERMISSION AUTO SEED (admin=true, diğer roller=false)
_, err = tx.Exec(`
INSERT INTO mk_sys_role_permissions
(role_id, module_code, action, allowed)
@@ -112,16 +131,50 @@ func autoRegisterRouteV3(
id,
$1,
$2,
true
CASE
WHEN id = 3 OR LOWER(code) = 'admin' THEN true
ELSE false
END
FROM dfrole
WHERE id = 3 -- ADMIN
ON CONFLICT DO NOTHING
`,
module,
action,
)
if err != nil {
log.Printf("❌ Admin perm seed error (%s %s): %v", method, path, err)
log.Printf("❌ Role perm seed error (%s %s): %v", method, path, err)
return
}
// 4) ROLE+DEPARTMENT PERMISSION AUTO SEED
// Existing role+department kombinasyonlarına yeni module+action satırıılır.
_, err = tx.Exec(`
WITH role_dept_scope AS (
SELECT DISTINCT role_id, department_code
FROM mk_sys_role_department_permissions
UNION
SELECT 3 AS role_id, d.code AS department_code
FROM mk_dprt d
)
INSERT INTO mk_sys_role_department_permissions
(role_id, department_code, module_code, action, allowed)
SELECT
rds.role_id,
rds.department_code,
$1,
$2,
CASE
WHEN rds.role_id = 3 THEN true
ELSE false
END
FROM role_dept_scope rds
ON CONFLICT DO NOTHING
`,
module,
action,
)
if err != nil {
log.Printf("❌ Role+Dept perm seed error (%s %s): %v", method, path, err)
return
}
@@ -265,6 +318,41 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
"system", "update",
wrapV3(routes.SaveMarketMailMappingHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations", "GET",
"language", "update",
wrapV3(routes.GetTranslationRowsHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/{id}", "PUT",
"language", "update",
wrapV3(routes.UpdateTranslationRowHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/upsert-missing", "POST",
"language", "update",
wrapV3(routes.UpsertMissingTranslationsHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/sync-sources", "POST",
"language", "update",
wrapV3(routes.SyncTranslationSourcesHandler(pgDB, mssql)),
)
bindV3(r, pgDB,
"/api/language/translations/translate-selected", "POST",
"language", "update",
wrapV3(routes.TranslateSelectedTranslationsHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/bulk-approve", "POST",
"language", "update",
wrapV3(routes.BulkApproveTranslationsHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/bulk-update", "POST",
"language", "update",
wrapV3(routes.BulkUpdateTranslationsHandler(pgDB)),
)
// ============================================================
// PERMISSIONS
@@ -519,13 +607,15 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
}{
{"/api/order/create", "POST", "insert", routes.CreateOrderHandler(pgDB, mssql)},
{"/api/order/update", "POST", "update", http.HandlerFunc(routes.UpdateOrderHandler)},
{"/api/order/{id}/bulk-due-date", "POST", "update", routes.BulkUpdateOrderLineDueDateHandler(mssql)},
{"/api/order/get/{id}", "GET", "view", routes.GetOrderByIDHandler(mssql)},
{"/api/orders/list", "GET", "view", routes.OrderListRoute(mssql)},
{"/api/orders/production-list", "GET", "update", routes.OrderProductionListRoute(mssql)},
{"/api/orders/production-items/cditem-lookups", "GET", "view", routes.OrderProductionCdItemLookupsRoute(mssql)},
{"/api/orders/production-items/{id}", "GET", "view", routes.OrderProductionItemsRoute(mssql)},
{"/api/orders/production-items/{id}/insert-missing", "POST", "update", routes.OrderProductionInsertMissingRoute(mssql)},
{"/api/orders/production-items/{id}/validate", "POST", "update", routes.OrderProductionValidateRoute(mssql)},
{"/api/orders/production-items/{id}/apply", "POST", "update", routes.OrderProductionApplyRoute(mssql)},
{"/api/orders/production-items/{id}/apply", "POST", "update", routes.OrderProductionApplyRoute(mssql, ml)},
{"/api/orders/close-ready", "GET", "update", routes.OrderCloseReadyListRoute(mssql)},
{"/api/orders/bulk-close", "POST", "update", routes.OrderBulkCloseRoute(mssql)},
{"/api/orders/export", "GET", "export", routes.OrderListExcelRoute(mssql)},
@@ -570,11 +660,22 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
wrapV3(http.HandlerFunc(routes.GetProductDetailHandler)),
)
bindV3(r, pgDB,
"/api/product-cditem", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductCdItemHandler)),
)
bindV3(r, pgDB,
"/api/product-colors", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductColorsHandler)),
)
bindV3(r, pgDB,
"/api/product-newcolors", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductNewColorsHandler)),
)
bindV3(r, pgDB,
"/api/product-colorsize", "GET",
@@ -587,6 +688,21 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductSecondColorsHandler)),
)
bindV3(r, pgDB,
"/api/product-newsecondcolor", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductNewSecondColorsHandler)),
)
bindV3(r, pgDB,
"/api/product-attributes", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductAttributesHandler)),
)
bindV3(r, pgDB,
"/api/product-item-attributes", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductItemAttributesHandler)),
)
bindV3(r, pgDB,
"/api/product-stock-query", "GET",
"order", "view",
@@ -617,6 +733,11 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
"order", "view",
wrapV3(routes.GetProductSizeMatchRulesHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/pricing/products", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductPricingListHandler)),
)
// ============================================================
// ROLE MANAGEMENT
@@ -726,6 +847,11 @@ func main() {
auditlog.Init(pgDB, 1000)
log.Println("🕵️ AuditLog sistemi başlatıldı (buffer=1000)")
// -------------------------------------------------------
// 🚀 TRANSLATION QUERY PERFORMANCE INDEXES
// -------------------------------------------------------
routes.EnsureTranslationPerfIndexes(pgDB)
// -------------------------------------------------------
// ✉️ MAILER INIT
// -------------------------------------------------------
@@ -744,6 +870,7 @@ func main() {
// 🌍 SERVER
// -------------------------------------------------------
router := InitRoutes(pgDB, db.MssqlDB, graphMailer)
startTranslationSyncScheduler(pgDB, db.MssqlDB)
handler := enableCORS(
middlewares.GlobalAuthMiddleware(

View File

@@ -13,8 +13,10 @@ type OrderProductionItem struct {
OldItemCode string `json:"OldItemCode"`
OldColor string `json:"OldColor"`
OldColorDescription string `json:"OldColorDescription"`
OldDim2 string `json:"OldDim2"`
OldDesc string `json:"OldDesc"`
OldQty float64 `json:"OldQty"`
NewItemCode string `json:"NewItemCode"`
NewColor string `json:"NewColor"`
@@ -22,4 +24,6 @@ type OrderProductionItem struct {
NewDesc string `json:"NewDesc"`
IsVariantMissing bool `json:"IsVariantMissing"`
OldDueDate string `json:"OldDueDate"`
NewDueDate string `json:"NewDueDate"`
}

View File

@@ -4,13 +4,19 @@ type OrderProductionUpdateLine struct {
OrderLineID string `json:"OrderLineID"`
NewItemCode string `json:"NewItemCode"`
NewColor string `json:"NewColor"`
ItemDim1Code *string `json:"ItemDim1Code,omitempty"`
NewDim2 string `json:"NewDim2"`
NewDesc string `json:"NewDesc"`
OldDueDate string `json:"OldDueDate"`
NewDueDate string `json:"NewDueDate"`
}
type OrderProductionUpdatePayload struct {
Lines []OrderProductionUpdateLine `json:"lines"`
InsertMissing bool `json:"insertMissing"`
CdItems []OrderProductionCdItemDraft `json:"cdItems"`
ProductAttributes []OrderProductionItemAttributeRow `json:"productAttributes"`
HeaderAverageDueDate *string `json:"HeaderAverageDueDate,omitempty"`
}
type OrderProductionMissingVariant struct {
@@ -22,3 +28,70 @@ type OrderProductionMissingVariant struct {
ItemDim2Code string `json:"ItemDim2Code"`
ItemDim3Code string `json:"ItemDim3Code"`
}
type OrderProductionBarcodeValidation struct {
Code string `json:"code"`
Message string `json:"message"`
Barcode string `json:"barcode,omitempty"`
BarcodeTypeCode string `json:"barcodeTypeCode,omitempty"`
ItemTypeCode int16 `json:"ItemTypeCode,omitempty"`
ItemCode string `json:"ItemCode,omitempty"`
ColorCode string `json:"ColorCode,omitempty"`
ItemDim1Code string `json:"ItemDim1Code,omitempty"`
ItemDim2Code string `json:"ItemDim2Code,omitempty"`
ItemDim3Code string `json:"ItemDim3Code,omitempty"`
}
type OrderProductionCdItemDraft struct {
ItemTypeCode int16 `json:"ItemTypeCode"`
ItemCode string `json:"ItemCode"`
ItemDimTypeCode *int16 `json:"ItemDimTypeCode"`
ProductTypeCode *int16 `json:"ProductTypeCode"`
ProductHierarchyID *int `json:"ProductHierarchyID"`
UnitOfMeasureCode1 *string `json:"UnitOfMeasureCode1"`
ItemAccountGrCode *string `json:"ItemAccountGrCode"`
ItemTaxGrCode *string `json:"ItemTaxGrCode"`
ItemPaymentPlanGrCode *string `json:"ItemPaymentPlanGrCode"`
ItemDiscountGrCode *string `json:"ItemDiscountGrCode"`
ItemVendorGrCode *string `json:"ItemVendorGrCode"`
PromotionGroupCode *string `json:"PromotionGroupCode"`
ProductCollectionGrCode *string `json:"ProductCollectionGrCode"`
StorePriceLevelCode *string `json:"StorePriceLevelCode"`
PerceptionOfFashionCode *string `json:"PerceptionOfFashionCode"`
CommercialRoleCode *string `json:"CommercialRoleCode"`
StoreCapacityLevelCode *string `json:"StoreCapacityLevelCode"`
CustomsTariffNumberCode *string `json:"CustomsTariffNumberCode"`
CompanyCode *string `json:"CompanyCode"`
}
type OrderProductionLookupOption struct {
Code string `json:"code"`
Description string `json:"description"`
}
type OrderProductionItemAttributeRow struct {
ItemTypeCode int16 `json:"ItemTypeCode"`
ItemCode string `json:"ItemCode"`
AttributeTypeCode int `json:"AttributeTypeCode"`
AttributeCode string `json:"AttributeCode"`
}
type OrderProductionCdItemLookups struct {
ItemDimTypeCodes []OrderProductionLookupOption `json:"itemDimTypeCodes"`
ProductTypeCodes []OrderProductionLookupOption `json:"productTypeCodes"`
ProductHierarchyIDs []OrderProductionLookupOption `json:"productHierarchyIDs"`
UnitOfMeasureCode1List []OrderProductionLookupOption `json:"unitOfMeasureCode1List"`
ItemAccountGrCodes []OrderProductionLookupOption `json:"itemAccountGrCodes"`
ItemTaxGrCodes []OrderProductionLookupOption `json:"itemTaxGrCodes"`
ItemPaymentPlanGrCodes []OrderProductionLookupOption `json:"itemPaymentPlanGrCodes"`
ItemDiscountGrCodes []OrderProductionLookupOption `json:"itemDiscountGrCodes"`
ItemVendorGrCodes []OrderProductionLookupOption `json:"itemVendorGrCodes"`
PromotionGroupCodes []OrderProductionLookupOption `json:"promotionGroupCodes"`
ProductCollectionGrCodes []OrderProductionLookupOption `json:"productCollectionGrCodes"`
StorePriceLevelCodes []OrderProductionLookupOption `json:"storePriceLevelCodes"`
PerceptionOfFashionCodes []OrderProductionLookupOption `json:"perceptionOfFashionCodes"`
CommercialRoleCodes []OrderProductionLookupOption `json:"commercialRoleCodes"`
StoreCapacityLevelCodes []OrderProductionLookupOption `json:"storeCapacityLevelCodes"`
CustomsTariffNumbers []OrderProductionLookupOption `json:"customsTariffNumbers"`
CompanyCodes []OrderProductionLookupOption `json:"companyCodes"`
}

View File

@@ -0,0 +1,18 @@
package models
type ProductPricing struct {
ProductCode string `json:"ProductCode"`
CostPrice float64 `json:"CostPrice"`
StockQty float64 `json:"StockQty"`
StockEntryDate string `json:"StockEntryDate"`
LastPricingDate string `json:"LastPricingDate"`
AskiliYan string `json:"AskiliYan"`
Kategori string `json:"Kategori"`
UrunIlkGrubu string `json:"UrunIlkGrubu"`
UrunAnaGrubu string `json:"UrunAnaGrubu"`
UrunAltGrubu string `json:"UrunAltGrubu"`
Icerik string `json:"Icerik"`
Karisim string `json:"Karisim"`
Marka string `json:"Marka"`
BrandGroupSec string `json:"BrandGroupSec"`
}

View File

@@ -0,0 +1,15 @@
package models
type ProductAttributeOption struct {
ItemTypeCode int16 `json:"item_type_code"`
AttributeTypeCode int `json:"attribute_type_code"`
AttributeTypeDescription string `json:"attribute_type_description"`
AttributeCode string `json:"attribute_code"`
AttributeDescription string `json:"attribute_description"`
}
type ProductItemAttributeValue struct {
ItemTypeCode int16 `json:"item_type_code"`
AttributeTypeCode int `json:"attribute_type_code"`
AttributeCode string `json:"attribute_code"`
}

View File

@@ -4,4 +4,5 @@ type ProductSecondColor struct {
ProductCode string `json:"product_code"`
ColorCode string `json:"color_code"`
ItemDim2Code string `json:"item_dim2_code"`
ColorDescription string `json:"color_description"`
}

16
svc/models/translator.go Normal file
View File

@@ -0,0 +1,16 @@
package models
import "time"
type TranslatorRow struct {
ID int64 `json:"id"`
TKey string `json:"t_key"`
LangCode string `json:"lang_code"`
SourceType string `json:"source_type"`
SourceTextTR string `json:"source_text_tr"`
TranslatedText string `json:"translated_text"`
IsManual bool `json:"is_manual"`
Status string `json:"status"`
Provider string `json:"provider"`
UpdatedAt time.Time `json:"updated_at"`
}

View File

@@ -0,0 +1,75 @@
package queries
import (
"database/sql"
"fmt"
"strings"
"time"
)
func BulkUpdateOrderLineDueDate(mssql *sql.DB, orderHeaderID string, dueDate string, username string) (int64, bool, error) {
orderID := strings.TrimSpace(orderHeaderID)
dateText := strings.TrimSpace(dueDate)
user := strings.TrimSpace(username)
if orderID == "" {
return 0, false, fmt.Errorf("orderHeaderID zorunlu")
}
if dateText == "" {
return 0, false, fmt.Errorf("dueDate zorunlu")
}
if _, err := time.Parse("2006-01-02", dateText); err != nil {
return 0, false, fmt.Errorf("gecersiz tarih: %w", err)
}
if user == "" {
user = "system"
}
tx, err := mssql.Begin()
if err != nil {
return 0, false, err
}
defer tx.Rollback()
lineRes, err := tx.Exec(`
UPDATE BAGGI_V3.dbo.trOrderLine
SET
DeliveryDate = CAST(@p1 AS DATETIME),
PlannedDateOfLading = CAST(@p1 AS DATETIME),
LastUpdatedUserName = @p2,
LastUpdatedDate = GETDATE()
WHERE OrderHeaderID = @p3
AND ISNULL(IsClosed, 0) = 0
`, dateText, user, orderID)
if err != nil {
return 0, false, err
}
lineCount, err := lineRes.RowsAffected()
if err != nil {
return 0, false, err
}
headerRes, err := tx.Exec(`
UPDATE BAGGI_V3.dbo.trOrderHeader
SET
AverageDueDate = CAST(@p1 AS DATETIME),
LastUpdatedUserName = @p2,
LastUpdatedDate = GETDATE()
WHERE OrderHeaderID = @p3
`, dateText, user, orderID)
if err != nil {
return 0, false, err
}
headerCount, err := headerRes.RowsAffected()
if err != nil {
return 0, false, err
}
if err := tx.Commit(); err != nil {
return 0, false, err
}
return lineCount, headerCount > 0, nil
}

View File

@@ -454,6 +454,133 @@ func normalizeKeyPart(ns models.NullString) string {
return strings.ToUpper(s)
}
// normalizeNumericToken: sadece rakamlardan oluşan değeri baştaki sıfırlardan arındırır.
// Rakam dışı içerik varsa boş döner.
func normalizeNumericToken(s string) string {
if s == "" {
return ""
}
for i := 0; i < len(s); i++ {
if s[i] < '0' || s[i] > '9' {
return ""
}
}
i := 0
for i < len(s) && s[i] == '0' {
i++
}
if i == len(s) {
return "0"
}
return s[i:]
}
// normalizeDim1Token: variant karşılaştırması için Dim1'i eski kuralla normalize eder.
// (boşluk, YAS ve Y kaldırılır; UPPER)
func normalizeDim1Token(s string) string {
s = strings.ToUpper(strings.TrimSpace(s))
s = strings.ReplaceAll(s, " ", "")
s = strings.ReplaceAll(s, "YAS", "")
s = strings.ReplaceAll(s, "Y", "")
return s
}
func variantCacheKey(item, color, dim2 string) string {
return item + "||" + color + "||" + dim2
}
func loadVariantDim1SetTx(tx *sql.Tx, item, color, dim2 string) (map[string]struct{}, error) {
rows, err := tx.Query(`
SELECT ISNULL(LTRIM(RTRIM(V.ItemDim1Code)),'') AS ItemDim1Code
FROM BAGGI_V3.dbo.prItemVariant V WITH (NOLOCK)
WHERE ISNULL(LTRIM(RTRIM(V.ItemCode)),'') = @p1
AND (
(
ISNULL(LTRIM(RTRIM(V.ColorCode)),'') = @p2
AND (
ISNULL(LTRIM(RTRIM(@p3)),'') = ''
OR ISNULL(LTRIM(RTRIM(V.ItemDim2Code)),'') = @p3
)
)
OR (
ISNULL(LTRIM(RTRIM(@p3)),'') = ''
AND ISNULL(LTRIM(RTRIM(V.ItemDim2Code)),'') = @p2
)
)
`, item, color, dim2)
if err != nil {
return nil, fmt.Errorf("variant set query hatası: %w", err)
}
defer rows.Close()
set := make(map[string]struct{})
for rows.Next() {
var raw string
if err := rows.Scan(&raw); err != nil {
return nil, fmt.Errorf("variant set scan hatası: %w", err)
}
norm := normalizeDim1Token(raw)
if norm == "" {
set["#EMPTY_DIM1"] = struct{}{}
continue
}
set[norm] = struct{}{}
if num := normalizeNumericToken(norm); num != "" {
set["#NUM:"+num] = struct{}{}
}
}
if err := rows.Err(); err != nil {
return nil, fmt.Errorf("variant set rows hatası: %w", err)
}
return set, nil
}
func loadVariantDim1SetDB(conn *sql.DB, item, color, dim2 string) (map[string]struct{}, error) {
rows, err := conn.Query(`
SELECT ISNULL(LTRIM(RTRIM(V.ItemDim1Code)),'') AS ItemDim1Code
FROM BAGGI_V3.dbo.prItemVariant V WITH (NOLOCK)
WHERE ISNULL(LTRIM(RTRIM(V.ItemCode)),'') = @p1
AND (
(
ISNULL(LTRIM(RTRIM(V.ColorCode)),'') = @p2
AND (
ISNULL(LTRIM(RTRIM(@p3)),'') = ''
OR ISNULL(LTRIM(RTRIM(V.ItemDim2Code)),'') = @p3
)
)
OR (
ISNULL(LTRIM(RTRIM(@p3)),'') = ''
AND ISNULL(LTRIM(RTRIM(V.ItemDim2Code)),'') = @p2
)
)
`, item, color, dim2)
if err != nil {
return nil, fmt.Errorf("variant set query hatası: %w", err)
}
defer rows.Close()
set := make(map[string]struct{})
for rows.Next() {
var raw string
if err := rows.Scan(&raw); err != nil {
return nil, fmt.Errorf("variant set scan hatası: %w", err)
}
norm := normalizeDim1Token(raw)
if norm == "" {
set["#EMPTY_DIM1"] = struct{}{}
continue
}
set[norm] = struct{}{}
if num := normalizeNumericToken(norm); num != "" {
set["#NUM:"+num] = struct{}{}
}
}
if err := rows.Err(); err != nil {
return nil, fmt.Errorf("variant set rows hatası: %w", err)
}
return set, nil
}
// =======================================================
// AKSBIR DETECTION
// =======================================================
@@ -464,6 +591,10 @@ func normalizeKeyPart(ns models.NullString) string {
// Variant check: ItemCode + ColorCode + Dim1 + Dim2
func ValidateItemVariant(tx *sql.Tx, ln models.OrderDetail) error {
return ValidateItemVariantCached(tx, ln, nil)
}
func ValidateItemVariantCached(tx *sql.Tx, ln models.OrderDetail, cache map[string]map[string]struct{}) error {
fmt.Printf(
"🧪 VARIANT GUARD INPUT | ClientKey=%s Item=%q Color=%q Dim1=%q Dim2=%q Dim3=%q Qty1=%v\n",
safeNS(ln.ClientKey),
@@ -493,37 +624,46 @@ func ValidateItemVariant(tx *sql.Tx, ln models.OrderDetail) error {
color = normalizeEmpty(color)
dim1 = normalizeEmpty(dim1)
dim2 = normalizeEmpty(dim2)
dim1Norm := normalizeDim1Token(dim1)
dim1Numeric := normalizeNumericToken(dim1Norm)
if item == "" {
return fmt.Errorf(
"ItemCode boş olamaz (ClientKey=%s)",
safeNS(ln.ClientKey),
)
fmt.Printf(
"🧪 VARIANT NORMALIZED | Item=%q Color=%q Dim1=%q Dim2=%q\n",
item, color, dim1, dim2,
item, color, dim1Norm, dim2,
)
if item == "" {
return &models.ValidationError{
Code: "INVALID_ITEM_VARIANT",
Message: "Tanımsız ürün kombinasyonu",
ClientKey: safeNS(ln.ClientKey),
ItemCode: item,
ColorCode: color,
Dim1: dim1,
Dim2: dim2,
}
}
var exists int
err := tx.QueryRow(`
SELECT CASE WHEN EXISTS (
SELECT 1
FROM BAGGI_V3.dbo.prItemVariant V WITH (NOLOCK)
WHERE ISNULL(LTRIM(RTRIM(V.ItemCode)),'') = @p1
AND ISNULL(LTRIM(RTRIM(V.ColorCode)),'') = @p2
AND UPPER(REPLACE(REPLACE(REPLACE(ISNULL(LTRIM(RTRIM(V.ItemDim1Code)),'') ,' ', ''), 'YAS', ''), 'Y', ''))
= UPPER(REPLACE(REPLACE(REPLACE(ISNULL(LTRIM(RTRIM(@p3)),'') ,' ', ''), 'YAS', ''), 'Y', ''))
AND ISNULL(LTRIM(RTRIM(V.ItemDim2Code)),'') = @p4
) THEN 1 ELSE 0 END
`, item, color, dim1, dim2).Scan(&exists)
key := variantCacheKey(item, color, dim2)
set := map[string]struct{}(nil)
if cache != nil {
set = cache[key]
}
if set == nil {
var err error
set, err = loadVariantDim1SetTx(tx, item, color, dim2)
if err != nil {
return fmt.Errorf("ItemVariant kontrol query hatası: %w", err)
}
if cache != nil {
cache[key] = set
}
}
if exists != 1 {
_, okNorm := set[dim1Norm]
_, okNum := set["#NUM:"+dim1Numeric]
_, okEmpty := set["#EMPTY_DIM1"]
if !(okNorm || (dim1Numeric != "" && okNum) || (dim1Norm == "" && okEmpty)) {
return &models.ValidationError{
Code: "INVALID_ITEM_VARIANT",
Message: "Tanımsız ürün kombinasyonu",
@@ -550,23 +690,8 @@ func ValidateOrderVariants(db *sql.DB, lines []models.OrderDetail) ([]models.Inv
return s
}
stmt, err := db.Prepare(`
SELECT CASE WHEN EXISTS (
SELECT 1
FROM BAGGI_V3.dbo.prItemVariant V WITH (NOLOCK)
WHERE ISNULL(LTRIM(RTRIM(V.ItemCode)),'') = @p1
AND ISNULL(LTRIM(RTRIM(V.ColorCode)),'') = @p2
AND UPPER(REPLACE(REPLACE(REPLACE(ISNULL(LTRIM(RTRIM(V.ItemDim1Code)),'') ,' ', ''), 'YAS', ''), 'Y', ''))
= UPPER(REPLACE(REPLACE(REPLACE(ISNULL(LTRIM(RTRIM(@p3)),'') ,' ', ''), 'YAS', ''), 'Y', ''))
AND ISNULL(LTRIM(RTRIM(V.ItemDim2Code)),'') = @p4
) THEN 1 ELSE 0 END
`)
if err != nil {
return nil, fmt.Errorf("validate prepare hatası: %w", err)
}
defer stmt.Close()
invalid := make([]models.InvalidVariant, 0)
cache := make(map[string]map[string]struct{})
for i, ln := range lines {
qty := qtyValue(ln.Qty1)
@@ -578,6 +703,8 @@ func ValidateOrderVariants(db *sql.DB, lines []models.OrderDetail) ([]models.Inv
color := normalizeEmpty(normalizeKeyPart(ln.ColorCode))
dim1 := normalizeEmpty(normalizeKeyPart(ln.ItemDim1Code))
dim2 := normalizeEmpty(normalizeKeyPart(ln.ItemDim2Code))
dim1Norm := normalizeDim1Token(dim1)
dim1Numeric := normalizeNumericToken(dim1Norm)
// ItemCode boş ise invalid
if strings.TrimSpace(item) == "" {
@@ -595,12 +722,21 @@ func ValidateOrderVariants(db *sql.DB, lines []models.OrderDetail) ([]models.Inv
continue
}
var exists int
if err := stmt.QueryRow(item, color, dim1, dim2).Scan(&exists); err != nil {
key := variantCacheKey(item, color, dim2)
set := cache[key]
if set == nil {
var err error
set, err = loadVariantDim1SetDB(db, item, color, dim2)
if err != nil {
return nil, fmt.Errorf("validate query hatası (i=%d): %w", i, err)
}
cache[key] = set
}
if exists != 1 {
_, okNorm := set[dim1Norm]
_, okNum := set["#NUM:"+dim1Numeric]
_, okEmpty := set["#EMPTY_DIM1"]
if !(okNorm || (dim1Numeric != "" && okNum) || (dim1Norm == "" && okEmpty)) {
invalid = append(invalid, models.InvalidVariant{
Index: i,
ClientKey: safeNS(ln.ClientKey),
@@ -838,7 +974,7 @@ VALUES (
nullableBool(header.IsSalesViaInternet, false),
nullableBool(header.IsSuspended, false),
nullableBool(header.IsCompleted, false),
nullableBool(header.IsCompleted, true),
nullableBool(header.IsPrinted, false),
nullableBool(header.IsLocked, false),
@@ -923,6 +1059,7 @@ VALUES (
defer insStmt.Close()
lineResults := make([]OrderLineResult, 0, len(lines))
variantCache := make(map[string]map[string]struct{})
// ✅ Duplicate Guard (payload içi)
seenCombo := make(map[string]bool)
@@ -970,7 +1107,7 @@ VALUES (
// ✅ INSERT ÖNCESİ ItemVariant GUARD
if qtyValue(ln.Qty1) > 0 {
if err := ValidateItemVariant(tx, ln); err != nil {
if err := ValidateItemVariantCached(tx, ln, variantCache); err != nil {
fmt.Println("❌ VARIANT GUARD (INSERT):", err)
return "", nil, err
}
@@ -1282,9 +1419,12 @@ UPDATE BAGGI_V3.dbo.trOrderHeader SET
DocCurrencyCode=@p6,
LocalCurrencyCode=@p7,
ExchangeRate=@p8,
LastUpdatedUserName=@p9,
LastUpdatedDate=@p10
WHERE OrderHeaderID=@p11
IsCreditableConfirmed=@p9,
CreditableConfirmedUser=@p10,
CreditableConfirmedDate=@p11,
LastUpdatedUserName=@p12,
LastUpdatedDate=@p13
WHERE OrderHeaderID=@p14
`,
nullableDateString(header.OrderDate),
nullableTimeString(header.OrderTime),
@@ -1294,6 +1434,9 @@ WHERE OrderHeaderID=@p11
nullableString(header.DocCurrencyCode, "TRY"),
nullableString(header.LocalCurrencyCode, "TRY"),
nullableFloat64(header.ExchangeRate, exRate),
true,
nullableString(header.CreditableConfirmedUser, v3User),
nullableDateTime(header.CreditableConfirmedDate, now),
v3User,
now,
header.OrderHeaderID,
@@ -1365,6 +1508,7 @@ WHERE OrderLineID=@p42 AND ISNULL(IsClosed,0)=0`)
// LOOP
// ======================================================
lineResults := make([]OrderLineResult, 0)
variantCache := make(map[string]map[string]struct{})
seenCombo := make(map[string]bool)
for _, ln := range lines {
@@ -1480,7 +1624,7 @@ WHERE OrderLineID=@p42 AND ISNULL(IsClosed,0)=0`)
// Variant guard
if qtyValue(ln.Qty1) > 0 {
if err := ValidateItemVariant(tx, ln); err != nil {
if err := ValidateItemVariantCached(tx, ln, variantCache); err != nil {
return nil, err
}
}

View File

@@ -2,7 +2,12 @@ package queries
import (
"database/sql"
"fmt"
"log"
"sort"
"strconv"
"strings"
"time"
"bssapp-backend/models"
)
@@ -21,14 +26,24 @@ SELECT
ISNULL(l.ItemCode,'') AS OldItemCode,
ISNULL(l.ColorCode,'') AS OldColor,
ISNULL((
SELECT TOP 1 LTRIM(RTRIM(cd.ColorDescription))
FROM dbo.cdColorDesc cd WITH (NOLOCK)
WHERE cd.ColorCode = l.ColorCode
AND cd.LangCode = N'TR'
), '') AS OldColorDescription,
ISNULL(l.ItemDim2Code,'') AS OldDim2,
ISNULL(l.LineDescription,'') AS OldDesc,
CAST(ISNULL(l.Qty1, 0) AS FLOAT) AS OldQty,
CAST('' AS NVARCHAR(60)) AS NewItemCode,
CAST('' AS NVARCHAR(30)) AS NewColor,
CAST('' AS NVARCHAR(30)) AS NewDim2,
CAST('' AS NVARCHAR(250)) AS NewDesc,
CONVERT(NVARCHAR(10), l.DeliveryDate, 126) AS OldDueDate,
CONVERT(NVARCHAR(10), l.DeliveryDate, 126) AS NewDueDate,
CAST(0 AS bit) AS IsVariantMissing
FROM dbo.trOrderLine l
WHERE l.OrderHeaderID = @p1
@@ -53,18 +68,14 @@ func InsertMissingProductionVariants(mssql *sql.DB, orderHeaderID string, userna
FROM dbo.trOrderLine l
LEFT JOIN dbo.prItemVariant pv
ON pv.ItemTypeCode = l.ItemTypeCode
AND pv.ItemCode = l.ItemCode
AND pv.ColorCode = l.ColorCode
AND ISNULL(pv.ItemDim1Code,'') = ISNULL(l.ItemDim1Code,'')
AND ISNULL(pv.ItemDim2Code,'') = ISNULL(l.ItemDim2Code,'')
AND ISNULL(pv.ItemDim3Code,'') = ISNULL(l.ItemDim3Code,'')
AND ISNULL(LTRIM(RTRIM(pv.ItemCode)),'') = ISNULL(LTRIM(RTRIM(l.ItemCode)),'')
AND ISNULL(LTRIM(RTRIM(pv.ColorCode)),'') = ISNULL(LTRIM(RTRIM(l.ColorCode)),'')
AND ISNULL(LTRIM(RTRIM(pv.ItemDim1Code)),'') = ISNULL(LTRIM(RTRIM(l.ItemDim1Code)),'')
AND ISNULL(LTRIM(RTRIM(pv.ItemDim2Code)),'') = ISNULL(LTRIM(RTRIM(l.ItemDim2Code)),'')
AND ISNULL(LTRIM(RTRIM(pv.ItemDim3Code)),'') = ISNULL(LTRIM(RTRIM(l.ItemDim3Code)),'')
WHERE l.OrderHeaderID = @p1
AND ISNULL(l.ItemCode,'') LIKE 'U%'
AND pv.ItemCode IS NULL
),
MaxPlu AS (
SELECT ISNULL(MAX(PLU),0) AS BasePlu
FROM dbo.prItemVariant WITH (UPDLOCK, HOLDLOCK)
)
INSERT INTO dbo.prItemVariant (
ItemTypeCode,
@@ -73,11 +84,17 @@ INSERT INTO dbo.prItemVariant (
ItemDim1Code,
ItemDim2Code,
ItemDim3Code,
PLU,
IsSalesOrderClosed,
IsPurchaseOrderClosed,
IsLocked,
IsBlocked,
CreatedUserName,
CreatedDate,
LastUpdatedUserName,
LastUpdatedDate
LastUpdatedDate,
RowGuid,
UseInternet,
IsStoreOrderClosed
)
SELECT
m.ItemTypeCode,
@@ -86,13 +103,18 @@ SELECT
m.ItemDim1Code,
m.ItemDim2Code,
m.ItemDim3Code,
mp.BasePlu + ROW_NUMBER() OVER (ORDER BY m.ItemCode, m.ColorCode, m.ItemDim1Code, m.ItemDim2Code, m.ItemDim3Code),
0,
0,
0,
0,
@p2,
GETDATE(),
@p2,
GETDATE()
FROM Missing m
CROSS JOIN MaxPlu mp;
GETDATE(),
NEWID(),
0,
0
FROM Missing m;
`
res, err := mssql.Exec(query, orderHeaderID, username)
@@ -122,6 +144,44 @@ WHERE OrderHeaderID = @p1 AND OrderLineID = @p2
return itemTypeCode, dim1, dim2, dim3, err
}
type OrderLineDims struct {
ItemTypeCode int16
ItemDim1Code string
ItemDim2Code string
ItemDim3Code string
}
func GetOrderLineDimsMap(mssql *sql.DB, orderHeaderID string) (map[string]OrderLineDims, error) {
rows, err := mssql.Query(`
SELECT
CAST(OrderLineID AS NVARCHAR(50)) AS OrderLineID,
ItemTypeCode,
ISNULL(ItemDim1Code,'') AS ItemDim1Code,
ISNULL(ItemDim2Code,'') AS ItemDim2Code,
ISNULL(ItemDim3Code,'') AS ItemDim3Code
FROM dbo.trOrderLine WITH(NOLOCK)
WHERE OrderHeaderID = @p1
`, orderHeaderID)
if err != nil {
return nil, err
}
defer rows.Close()
out := make(map[string]OrderLineDims, 128)
for rows.Next() {
var lineID string
var d OrderLineDims
if err := rows.Scan(&lineID, &d.ItemTypeCode, &d.ItemDim1Code, &d.ItemDim2Code, &d.ItemDim3Code); err != nil {
return nil, err
}
out[strings.TrimSpace(lineID)] = d
}
if err := rows.Err(); err != nil {
return nil, err
}
return out, nil
}
func VariantExists(mssql *sql.DB, itemTypeCode int16, itemCode string, colorCode string, dim1 string, dim2 string, dim3 string) (bool, error) {
var exists int
err := mssql.QueryRow(`
@@ -129,10 +189,22 @@ SELECT TOP 1 1
FROM dbo.prItemVariant
WHERE ItemTypeCode = @p1
AND ItemCode = @p2
AND ColorCode = @p3
AND ISNULL(ItemDim1Code,'') = ISNULL(@p4,'')
AND ISNULL(ItemDim2Code,'') = ISNULL(@p5,'')
AND ISNULL(ItemDim3Code,'') = ISNULL(@p6,'')
AND (
ColorCode = @p3
OR (@p3 = '' AND (ColorCode IS NULL OR ColorCode = ''))
)
AND (
ItemDim1Code = @p4
OR (@p4 = '' AND (ItemDim1Code IS NULL OR ItemDim1Code = ''))
)
AND (
ItemDim2Code = @p5
OR (@p5 = '' AND (ItemDim2Code IS NULL OR ItemDim2Code = ''))
)
AND (
ItemDim3Code = @p6
OR (@p6 = '' AND (ItemDim3Code IS NULL OR ItemDim3Code = ''))
)
`, itemTypeCode, itemCode, colorCode, dim1, dim2, dim3).Scan(&exists)
if err == sql.ErrNoRows {
return false, nil
@@ -143,41 +215,74 @@ WHERE ItemTypeCode = @p1
return true, nil
}
func InsertMissingVariantsTx(tx *sql.Tx, missing []models.OrderProductionMissingVariant, username string) (int64, error) {
func InsertMissingVariantsTx(
tx *sql.Tx,
missing []models.OrderProductionMissingVariant,
username string,
cdItemByCode map[string]models.OrderProductionCdItemDraft,
) (int64, error) {
start := time.Now()
if len(missing) == 0 {
log.Printf("[InsertMissingVariantsTx] missing=0 inserted=0 duration_ms=0")
return 0, nil
}
var basePlu int64
if err := tx.QueryRow(`
SELECT ISNULL(MAX(PLU),0) AS BasePlu
FROM dbo.prItemVariant WITH (UPDLOCK, HOLDLOCK)
`).Scan(&basePlu); err != nil {
return 0, err
}
var inserted int64
ensuredItems := make(map[string]struct{}, len(missing))
for i, v := range missing {
uniqueVariants := make([]models.OrderProductionMissingVariant, 0, len(missing))
seenVariants := make(map[string]struct{}, len(missing))
for _, v := range missing {
variantKey := strconv.FormatInt(int64(v.ItemTypeCode), 10) + "|" +
strings.ToUpper(strings.TrimSpace(v.ItemCode)) + "|" +
strings.ToUpper(strings.TrimSpace(v.ColorCode)) + "|" +
strings.ToUpper(strings.TrimSpace(v.ItemDim1Code)) + "|" +
strings.ToUpper(strings.TrimSpace(v.ItemDim2Code)) + "|" +
strings.ToUpper(strings.TrimSpace(v.ItemDim3Code))
if _, ok := seenVariants[variantKey]; ok {
continue
}
seenVariants[variantKey] = struct{}{}
uniqueVariants = append(uniqueVariants, v)
itemKey := strconv.FormatInt(int64(v.ItemTypeCode), 10) + "|" + v.ItemCode
if _, ok := ensuredItems[itemKey]; !ok {
if err := ensureCdItemTx(tx, v.ItemTypeCode, v.ItemCode, username); err != nil {
draft, hasDraft := cdItemByCode[itemKey]
if !hasDraft {
draft, hasDraft = cdItemByCode[NormalizeCdItemMapKey(v.ItemTypeCode, v.ItemCode)]
}
var draftPtr *models.OrderProductionCdItemDraft
if hasDraft {
tmp := draft
draftPtr = &tmp
}
if err := ensureCdItemTx(tx, v.ItemTypeCode, v.ItemCode, username, draftPtr); err != nil {
return inserted, err
}
ensuredItems[itemKey] = struct{}{}
}
}
plu := basePlu + int64(i) + 1
res, err := tx.Exec(`
IF NOT EXISTS (
SELECT 1
FROM dbo.prItemVariant
WHERE ItemTypeCode = @p1
AND ItemCode = @p2
AND ColorCode = @p3
AND ISNULL(ItemDim1Code,'') = ISNULL(@p4,'')
AND ISNULL(ItemDim2Code,'') = ISNULL(@p5,'')
AND ISNULL(ItemDim3Code,'') = ISNULL(@p6,'')
if len(uniqueVariants) == 0 {
return 0, nil
}
args := make([]any, 0, len(uniqueVariants)*6+1)
valueRows := make([]string, 0, len(uniqueVariants))
paramPos := 1
for _, v := range uniqueVariants {
valueRows = append(valueRows, fmt.Sprintf("(@p%d,@p%d,@p%d,@p%d,@p%d,@p%d)", paramPos, paramPos+1, paramPos+2, paramPos+3, paramPos+4, paramPos+5))
args = append(args, v.ItemTypeCode, v.ItemCode, v.ColorCode, v.ItemDim1Code, v.ItemDim2Code, v.ItemDim3Code)
paramPos += 6
}
usernameParam := paramPos
args = append(args, username)
query := fmt.Sprintf(`
SET NOCOUNT ON;
;WITH Missing(ItemTypeCode, ItemCode, ColorCode, ItemDim1Code, ItemDim2Code, ItemDim3Code) AS (
SELECT *
FROM (VALUES %s) AS v(ItemTypeCode, ItemCode, ColorCode, ItemDim1Code, ItemDim2Code, ItemDim3Code)
)
INSERT INTO dbo.prItemVariant (
ItemTypeCode,
@@ -186,28 +291,76 @@ INSERT INTO dbo.prItemVariant (
ItemDim1Code,
ItemDim2Code,
ItemDim3Code,
PLU,
IsSalesOrderClosed,
IsPurchaseOrderClosed,
IsLocked,
IsBlocked,
CreatedUserName,
CreatedDate,
LastUpdatedUserName,
LastUpdatedDate
LastUpdatedDate,
RowGuid,
UseInternet,
IsStoreOrderClosed
)
VALUES (
@p1, @p2, @p3, @p4, @p5, @p6,
@p7, @p8, GETDATE(), @p8, GETDATE()
);
`, v.ItemTypeCode, v.ItemCode, v.ColorCode, v.ItemDim1Code, v.ItemDim2Code, v.ItemDim3Code, plu, username)
SELECT
m.ItemTypeCode,
m.ItemCode,
m.ColorCode,
m.ItemDim1Code,
m.ItemDim2Code,
m.ItemDim3Code,
0, 0, 0, 0,
@p%d, GETDATE(), @p%d, GETDATE(),
NEWID(),
0,
0
FROM Missing m
LEFT JOIN dbo.prItemVariant pv
ON pv.ItemTypeCode = m.ItemTypeCode
AND pv.ItemCode = m.ItemCode
AND (
pv.ColorCode = m.ColorCode
OR (m.ColorCode = '' AND (pv.ColorCode IS NULL OR pv.ColorCode = ''))
)
AND (
pv.ItemDim1Code = m.ItemDim1Code
OR (m.ItemDim1Code = '' AND (pv.ItemDim1Code IS NULL OR pv.ItemDim1Code = ''))
)
AND (
pv.ItemDim2Code = m.ItemDim2Code
OR (m.ItemDim2Code = '' AND (pv.ItemDim2Code IS NULL OR pv.ItemDim2Code = ''))
)
AND (
pv.ItemDim3Code = m.ItemDim3Code
OR (m.ItemDim3Code = '' AND (pv.ItemDim3Code IS NULL OR pv.ItemDim3Code = ''))
)
WHERE pv.ItemCode IS NULL;
`, strings.Join(valueRows, ","), usernameParam, usernameParam)
res, err := tx.Exec(query, args...)
if err != nil {
return inserted, err
}
if rows, err := res.RowsAffected(); err == nil {
if rows, rowsErr := res.RowsAffected(); rowsErr == nil {
inserted += rows
}
}
log.Printf("[InsertMissingVariantsTx] missing=%d unique=%d ensuredItems=%d inserted=%d duration_ms=%d",
len(missing), len(uniqueVariants), len(ensuredItems), inserted, time.Since(start).Milliseconds())
return inserted, nil
}
func ensureCdItemTx(tx *sql.Tx, itemTypeCode int16, itemCode string, username string) error {
func NormalizeCdItemMapKey(itemTypeCode int16, itemCode string) string {
return strconv.FormatInt(int64(itemTypeCode), 10) + "|" + strings.ToUpper(strings.TrimSpace(itemCode))
}
func ensureCdItemTx(
tx *sql.Tx,
itemTypeCode int16,
itemCode string,
username string,
draft *models.OrderProductionCdItemDraft,
) error {
_, err := tx.Exec(`
IF NOT EXISTS (
SELECT 1
@@ -269,51 +422,1273 @@ BEGIN
INSERT INTO dbo.cdItem (
ItemTypeCode, ItemCode,
ItemDimTypeCode, ProductTypeCode, ProductHierarchyID,
UnitOfMeasureCode1, UnitConvertRate, UnitConvertRateNotFixed,
UnitOfMeasureCode1, UnitOfMeasureCode2, UnitConvertRate, UnitConvertRateNotFixed,
UseInternet, UsePOS, UseStore, EnablePartnerCompanies, UseManufacturing, UseSerialNumber,
GenerateOpticalDataMatrixCode, ByWeight, SupplyPeriod, GuaranteePeriod, ShelfLife, OrderLeadTime,
IsFixedExpense, IsBlocked, IsLocked, LockedDate, IsSalesOrderClosed, IsPurchaseOrderClosed,
ItemAccountGrCode, ItemTaxGrCode, ItemPaymentPlanGrCode, ItemDiscountGrCode, ItemVendorGrCode,
PromotionGroupCode, PromotionGroupCode2, ProductCollectionGrCode, StorePriceLevelCode, PerceptionOfFashionCode,
CommercialRoleCode, StoreCapacityLevelCode, CustomsTariffNumberCode, IsFixedExpense, BOMEntityCode, CompanyCode,
IsBlocked, IsLocked, LockedDate, IsSalesOrderClosed, IsPurchaseOrderClosed,
CreatedUserName, CreatedDate, LastUpdatedUserName, LastUpdatedDate, RowGuid,
UseRoll, UseBatch, MaxCreditCardInstallmentCount, GenerateSerialNumber,
IsSubsequentDeliveryForR, IsSubsequentDeliveryForRI, IsUTSDeclaratedItem, IsStoreOrderClosed
IsSubsequentDeliveryForR, IsSubsequentDeliveryForRI,
IGACommissionGroup, UniFreeCommissionGroup, CustomsProductGroupCode, IsUTSDeclaratedItem, IsStoreOrderClosed
)
VALUES (
@p1, @p2,
2, 1, 2,
'AD', 0, 0,
0, 1, 1, 0, 1, 0,
'AD', '', 0, 0,
1, 1, 1, 0, 1, 0,
0, 0, 0, 0, 0, 0,
0, 0, 0, '1900-01-01', 0, 0,
'', '%10', '', '', '',
'', '', '0', '0', '0',
'0', '', '', 0, '', '1',
0, 0, '1900-01-01', 0, 0,
@p3, GETDATE(), @p3, GETDATE(), NEWID(),
0, 0, 12, 0,
0, 0, 0, 0
0, 0,
'', '', '0', 0, 0
);
END
END
`, itemTypeCode, itemCode, username)
if err != nil {
return err
}
if draft == nil {
return nil
}
_, err = tx.Exec(`
UPDATE dbo.cdItem
SET
ItemDimTypeCode = COALESCE(@p3, ItemDimTypeCode),
ProductTypeCode = COALESCE(@p4, ProductTypeCode),
ProductHierarchyID = COALESCE(@p5, ProductHierarchyID),
UnitOfMeasureCode1 = COALESCE(NULLIF(@p6,''), UnitOfMeasureCode1),
ItemAccountGrCode = COALESCE(NULLIF(@p7,''), ItemAccountGrCode),
ItemTaxGrCode = CASE
WHEN NULLIF(@p8,'') IS NULL THEN ItemTaxGrCode
WHEN EXISTS (
SELECT 1
FROM dbo.cdItemTaxGr g WITH(NOLOCK)
WHERE LTRIM(RTRIM(g.ItemTaxGrCode)) = LTRIM(RTRIM(@p8))
) THEN @p8
ELSE ItemTaxGrCode
END,
ItemPaymentPlanGrCode = COALESCE(NULLIF(@p9,''), ItemPaymentPlanGrCode),
ItemDiscountGrCode = COALESCE(NULLIF(@p10,''), ItemDiscountGrCode),
ItemVendorGrCode = COALESCE(NULLIF(@p11,''), ItemVendorGrCode),
PromotionGroupCode = COALESCE(NULLIF(@p12,''), PromotionGroupCode),
ProductCollectionGrCode = COALESCE(NULLIF(@p13,''), ProductCollectionGrCode),
StorePriceLevelCode = COALESCE(NULLIF(@p14,''), StorePriceLevelCode),
PerceptionOfFashionCode = COALESCE(NULLIF(@p15,''), PerceptionOfFashionCode),
CommercialRoleCode = COALESCE(NULLIF(@p16,''), CommercialRoleCode),
StoreCapacityLevelCode = COALESCE(NULLIF(@p17,''), StoreCapacityLevelCode),
CustomsTariffNumberCode = COALESCE(NULLIF(@p18,''), CustomsTariffNumberCode),
CompanyCode = COALESCE(NULLIF(@p19,''), CompanyCode),
LastUpdatedUserName = @p20,
LastUpdatedDate = GETDATE()
WHERE ItemTypeCode = @p1
AND ItemCode = @p2;
`,
itemTypeCode,
itemCode,
draft.ItemDimTypeCode,
draft.ProductTypeCode,
draft.ProductHierarchyID,
draft.UnitOfMeasureCode1,
draft.ItemAccountGrCode,
draft.ItemTaxGrCode,
draft.ItemPaymentPlanGrCode,
draft.ItemDiscountGrCode,
draft.ItemVendorGrCode,
draft.PromotionGroupCode,
draft.ProductCollectionGrCode,
draft.StorePriceLevelCode,
draft.PerceptionOfFashionCode,
draft.CommercialRoleCode,
draft.StoreCapacityLevelCode,
draft.CustomsTariffNumberCode,
draft.CompanyCode,
username,
)
return err
}
func UpdateOrderLinesTx(tx *sql.Tx, orderHeaderID string, lines []models.OrderProductionUpdateLine, username string) (int64, error) {
if len(lines) == 0 {
return 0, nil
}
const chunkSize = 300
var updated int64
for _, line := range lines {
res, err := tx.Exec(`
UPDATE dbo.trOrderLine
for i := 0; i < len(lines); i += chunkSize {
end := i + chunkSize
if end > len(lines) {
end = len(lines)
}
chunk := lines[i:end]
values := make([]string, 0, len(chunk))
args := make([]any, 0, len(chunk)*8+2)
paramPos := 1
for _, line := range chunk {
var itemDim1 any
if line.ItemDim1Code != nil {
itemDim1 = strings.TrimSpace(*line.ItemDim1Code)
}
values = append(values, fmt.Sprintf("(@p%d,@p%d,@p%d,@p%d,@p%d,@p%d,@p%d,@p%d)", paramPos, paramPos+1, paramPos+2, paramPos+3, paramPos+4, paramPos+5, paramPos+6, paramPos+7))
args = append(args,
strings.TrimSpace(line.OrderLineID),
line.NewItemCode,
line.NewColor,
itemDim1,
line.NewDim2,
line.NewDesc,
line.OldDueDate,
line.NewDueDate,
)
paramPos += 8
}
orderHeaderParam := paramPos
usernameParam := paramPos + 1
args = append(args, orderHeaderID, username)
query := fmt.Sprintf(`
SET NOCOUNT ON;
DECLARE @updated TABLE (OrderLineID UNIQUEIDENTIFIER);
;WITH src (OrderLineID, NewItemCode, NewColor, ItemDim1Code, NewDim2, NewDesc, OldDueDate, NewDueDate) AS (
SELECT *
FROM (VALUES %s) AS v (OrderLineID, NewItemCode, NewColor, ItemDim1Code, NewDim2, NewDesc, OldDueDate, NewDueDate)
)
UPDATE l
SET
ItemCode = @p1,
ColorCode = @p2,
ItemDim2Code = @p3,
LineDescription = COALESCE(NULLIF(@p4,''), LineDescription),
LastUpdatedUserName = @p5,
LastUpdatedDate = GETDATE()
WHERE OrderHeaderID = @p6 AND OrderLineID = @p7
`, line.NewItemCode, line.NewColor, line.NewDim2, line.NewDesc, username, orderHeaderID, line.OrderLineID)
if err != nil {
return updated, err
}
if rows, err := res.RowsAffected(); err == nil {
updated += rows
l.ItemCode = s.NewItemCode,
l.ColorCode = s.NewColor,
l.ItemDim1Code = COALESCE(s.ItemDim1Code, l.ItemDim1Code),
l.ItemDim2Code = s.NewDim2,
l.LineDescription = COALESCE(NULLIF(s.NewDesc,''), l.LineDescription),
l.DeliveryDate = CASE WHEN ISDATE(s.NewDueDate) = 1 THEN CAST(s.NewDueDate AS DATETIME) ELSE l.DeliveryDate END,
l.LastUpdatedUserName = @p%d,
l.LastUpdatedDate = GETDATE()
OUTPUT inserted.OrderLineID INTO @updated(OrderLineID)
FROM dbo.trOrderLine l
JOIN src s
ON l.OrderLineID = CONVERT(UNIQUEIDENTIFIER, s.OrderLineID)
WHERE l.OrderHeaderID = CONVERT(UNIQUEIDENTIFIER, @p%d);
SELECT COUNT(1) AS UpdatedCount FROM @updated;
`, strings.Join(values, ","), usernameParam, orderHeaderParam)
chunkStart := time.Now()
var chunkUpdated int64
execErr := tx.QueryRow(query, args...).Scan(&chunkUpdated)
if execErr != nil {
log.Printf("[UpdateOrderLinesTx] ERROR orderHeaderID=%s chunk=%d-%d err=%v", orderHeaderID, i, end, execErr)
return updated, fmt.Errorf("update lines chunk failed chunkStart=%d chunkEnd=%d duration_ms=%d: %w", i, end, time.Since(chunkStart).Milliseconds(), execErr)
}
log.Printf("[UpdateOrderLinesTx] orderHeaderID=%s chunk=%d-%d updated=%d duration_ms=%d", orderHeaderID, i, end, chunkUpdated, time.Since(chunkStart).Milliseconds())
updated += chunkUpdated
}
return updated, nil
}
func VerifyOrderLineUpdatesTx(tx *sql.Tx, orderHeaderID string, lines []models.OrderProductionUpdateLine) (int64, []string, error) {
if len(lines) == 0 {
return 0, nil, nil
}
const chunkSize = 300
var mismatchCount int64
samples := make([]string, 0, 5)
for i := 0; i < len(lines); i += chunkSize {
end := i + chunkSize
if end > len(lines) {
end = len(lines)
}
chunk := lines[i:end]
values := make([]string, 0, len(chunk))
args := make([]any, 0, len(chunk)*4+1)
paramPos := 1
for _, line := range chunk {
values = append(values, fmt.Sprintf("(@p%d,@p%d,@p%d,@p%d)", paramPos, paramPos+1, paramPos+2, paramPos+3))
args = append(args,
strings.TrimSpace(line.OrderLineID),
strings.ToUpper(strings.TrimSpace(line.NewItemCode)),
strings.ToUpper(strings.TrimSpace(line.NewColor)),
strings.ToUpper(strings.TrimSpace(line.NewDim2)),
)
paramPos += 4
}
orderHeaderParam := paramPos
args = append(args, orderHeaderID)
query := fmt.Sprintf(`
SET NOCOUNT ON;
WITH src (OrderLineID, NewItemCode, NewColor, NewDim2) AS (
SELECT *
FROM (VALUES %s) v(OrderLineID, NewItemCode, NewColor, NewDim2)
)
SELECT
s.OrderLineID,
ISNULL(UPPER(LTRIM(RTRIM(l.ItemCode))), '') AS ActualItemCode,
ISNULL(UPPER(LTRIM(RTRIM(l.ColorCode))), '') AS ActualColorCode,
ISNULL(UPPER(LTRIM(RTRIM(l.ItemDim2Code))), '') AS ActualDim2Code,
s.NewItemCode,
s.NewColor,
s.NewDim2
FROM src s
JOIN dbo.trOrderLine l
ON l.OrderLineID = CONVERT(UNIQUEIDENTIFIER, s.OrderLineID)
WHERE l.OrderHeaderID = CONVERT(UNIQUEIDENTIFIER, @p%d)
AND (
ISNULL(UPPER(LTRIM(RTRIM(l.ItemCode))), '') <> s.NewItemCode OR
ISNULL(UPPER(LTRIM(RTRIM(l.ColorCode))), '') <> s.NewColor OR
ISNULL(UPPER(LTRIM(RTRIM(l.ItemDim2Code))), '') <> s.NewDim2
);
`, strings.Join(values, ","), orderHeaderParam)
rows, err := tx.Query(query, args...)
if err != nil {
return mismatchCount, samples, err
}
for rows.Next() {
var lineID, actualItem, actualColor, actualDim2, expectedItem, expectedColor, expectedDim2 string
if err := rows.Scan(&lineID, &actualItem, &actualColor, &actualDim2, &expectedItem, &expectedColor, &expectedDim2); err != nil {
rows.Close()
return mismatchCount, samples, err
}
mismatchCount++
if len(samples) < 5 {
samples = append(samples, fmt.Sprintf(
"lineID=%s expected=(%s,%s,%s) actual=(%s,%s,%s)",
lineID, expectedItem, expectedColor, expectedDim2, actualItem, actualColor, actualDim2,
))
}
}
if err := rows.Err(); err != nil {
rows.Close()
return mismatchCount, samples, err
}
rows.Close()
}
return mismatchCount, samples, nil
}
func UpdateOrderHeaderAverageDueDateTx(tx *sql.Tx, orderHeaderID string, averageDueDate *string, username string) error {
if averageDueDate == nil {
return nil
}
dueDate := strings.TrimSpace(*averageDueDate)
if dueDate != "" {
if _, err := time.Parse("2006-01-02", dueDate); err != nil {
return fmt.Errorf("invalid header average due date %q: %w", dueDate, err)
}
}
_, err := tx.Exec(`
UPDATE dbo.trOrderHeader
SET
AverageDueDate = CASE WHEN @p1 = '' THEN NULL ELSE CAST(@p1 AS DATETIME) END,
LastUpdatedUserName = @p2,
LastUpdatedDate = GETDATE()
WHERE OrderHeaderID = @p3;
`, dueDate, username, orderHeaderID)
return err
}
func TouchOrderHeaderTx(tx *sql.Tx, orderHeaderID string, username string) (int64, error) {
res, err := tx.Exec(`
UPDATE dbo.trOrderHeader
SET
LastUpdatedUserName = @p1,
LastUpdatedDate = GETDATE()
WHERE OrderHeaderID = @p2;
`, username, orderHeaderID)
if err != nil {
return 0, err
}
rows, rowsErr := res.RowsAffected()
if rowsErr != nil {
return 0, nil
}
return rows, nil
}
type sqlQueryRower interface {
QueryRow(query string, args ...any) *sql.Row
}
type plannedProductionBarcode struct {
Barcode string
BarcodeTypeCode string
ItemTypeCode int16
ItemCode string
ColorCode string
ItemDim1Code string
ItemDim2Code string
ItemDim3Code string
}
func barcodeTypeExists(q sqlQueryRower, barcodeTypeCode string) (bool, error) {
var exists int
err := q.QueryRow(`
SELECT TOP 1 1
FROM dbo.cdBarcodeType
WHERE BarcodeTypeCode = @p1
`, strings.TrimSpace(barcodeTypeCode)).Scan(&exists)
if err == sql.ErrNoRows {
return false, nil
}
if err != nil {
return false, err
}
return true, nil
}
func barcodeExists(q sqlQueryRower, barcode string) (bool, error) {
var exists int
err := q.QueryRow(`
SELECT TOP 1 1
FROM dbo.prItemBarcode WITH (UPDLOCK, HOLDLOCK)
WHERE Barcode = @p1
`, strings.TrimSpace(barcode)).Scan(&exists)
if err == sql.ErrNoRows {
return false, nil
}
if err != nil {
return false, err
}
return true, nil
}
func existingVariantBarcode(
q sqlQueryRower,
barcodeTypeCode string,
itemTypeCode int16,
itemCode string,
colorCode string,
dim1 string,
dim2 string,
dim3 string,
) (string, bool, error) {
var barcode string
err := q.QueryRow(`
SELECT TOP 1 LTRIM(RTRIM(ISNULL(Barcode, '')))
FROM dbo.prItemBarcode WITH (UPDLOCK, HOLDLOCK)
WHERE BarcodeTypeCode = @p1
AND ItemTypeCode = @p2
AND ISNULL(LTRIM(RTRIM(ItemCode)), '') = @p3
AND ISNULL(LTRIM(RTRIM(ColorCode)), '') = @p4
AND ISNULL(LTRIM(RTRIM(ItemDim1Code)), '') = @p5
AND ISNULL(LTRIM(RTRIM(ItemDim2Code)), '') = @p6
AND ISNULL(LTRIM(RTRIM(ItemDim3Code)), '') = @p7
AND ISNULL(LTRIM(RTRIM(UnitOfMeasureCode)), '') = 'AD'
ORDER BY
CASE
WHEN ISNUMERIC(Barcode) = 1
THEN CAST(Barcode AS BIGINT)
ELSE 0
END DESC,
Barcode DESC
`,
strings.TrimSpace(barcodeTypeCode),
itemTypeCode,
strings.TrimSpace(itemCode),
strings.TrimSpace(colorCode),
strings.TrimSpace(dim1),
strings.TrimSpace(dim2),
strings.TrimSpace(dim3),
).Scan(&barcode)
if err == sql.ErrNoRows {
return "", false, nil
}
if err != nil {
return "", false, err
}
return strings.TrimSpace(barcode), true, nil
}
func maxNumericBarcode(q sqlQueryRower) (int64, error) {
var maxBarcode int64
err := q.QueryRow(`
SELECT ISNULL(MAX(
CASE
WHEN ISNUMERIC(Barcode) = 1
THEN CAST(Barcode AS BIGINT)
ELSE NULL
END
), 0)
FROM dbo.prItemBarcode WITH (UPDLOCK, HOLDLOCK)
`).Scan(&maxBarcode)
return maxBarcode, err
}
func ValidateProductionBarcodePlan(q sqlQueryRower, variants []models.OrderProductionMissingVariant, barcodeTypeCode string) ([]models.OrderProductionBarcodeValidation, error) {
typeCode := strings.ToUpper(strings.TrimSpace(barcodeTypeCode))
if len(variants) == 0 {
return nil, nil
}
validations := make([]models.OrderProductionBarcodeValidation, 0)
typeExists, err := barcodeTypeExists(q, typeCode)
if err != nil {
return nil, err
}
if !typeExists {
validations = append(validations, models.OrderProductionBarcodeValidation{
Code: "invalid_barcode_type",
Message: fmt.Sprintf("Barkod tipi bulunamadi: %s", typeCode),
BarcodeTypeCode: typeCode,
})
return validations, nil
}
sorted := append([]models.OrderProductionMissingVariant(nil), variants...)
sort.Slice(sorted, func(i, j int) bool {
left := sorted[i]
right := sorted[j]
leftKey := fmt.Sprintf("%05d|%s|%s|%s|%s|%s", left.ItemTypeCode, left.ItemCode, left.ColorCode, left.ItemDim1Code, left.ItemDim2Code, left.ItemDim3Code)
rightKey := fmt.Sprintf("%05d|%s|%s|%s|%s|%s", right.ItemTypeCode, right.ItemCode, right.ColorCode, right.ItemDim1Code, right.ItemDim2Code, right.ItemDim3Code)
return leftKey < rightKey
})
maxBarcode, err := maxNumericBarcode(q)
if err != nil {
return nil, err
}
nextOffset := int64(0)
planned := make(map[string]struct{}, len(sorted))
for _, variant := range sorted {
existingBarcode, exists, err := existingVariantBarcode(q, typeCode, variant.ItemTypeCode, variant.ItemCode, variant.ColorCode, variant.ItemDim1Code, variant.ItemDim2Code, variant.ItemDim3Code)
if err != nil {
return nil, err
}
if exists && existingBarcode != "" {
continue
}
nextOffset++
barcode := strconv.FormatInt(maxBarcode+nextOffset, 10)
if _, duplicated := planned[barcode]; duplicated {
validations = append(validations, models.OrderProductionBarcodeValidation{
Code: "barcode_duplicate_in_plan",
Message: fmt.Sprintf("Planlanan barkod ayni istekte birden fazla kez olusuyor: %s", barcode),
Barcode: barcode,
BarcodeTypeCode: typeCode,
ItemTypeCode: variant.ItemTypeCode,
ItemCode: strings.TrimSpace(variant.ItemCode),
ColorCode: strings.TrimSpace(variant.ColorCode),
ItemDim1Code: strings.TrimSpace(variant.ItemDim1Code),
ItemDim2Code: strings.TrimSpace(variant.ItemDim2Code),
ItemDim3Code: strings.TrimSpace(variant.ItemDim3Code),
})
continue
}
planned[barcode] = struct{}{}
inUse, err := barcodeExists(q, barcode)
if err != nil {
return nil, err
}
if inUse {
validations = append(validations, models.OrderProductionBarcodeValidation{
Code: "barcode_in_use",
Message: fmt.Sprintf("Barkod daha once kullanilmis: %s (%s / %s / %s / %s)", barcode, strings.TrimSpace(variant.ItemCode), strings.TrimSpace(variant.ColorCode), strings.TrimSpace(variant.ItemDim1Code), strings.TrimSpace(variant.ItemDim2Code)),
Barcode: barcode,
BarcodeTypeCode: typeCode,
ItemTypeCode: variant.ItemTypeCode,
ItemCode: strings.TrimSpace(variant.ItemCode),
ColorCode: strings.TrimSpace(variant.ColorCode),
ItemDim1Code: strings.TrimSpace(variant.ItemDim1Code),
ItemDim2Code: strings.TrimSpace(variant.ItemDim2Code),
ItemDim3Code: strings.TrimSpace(variant.ItemDim3Code),
})
}
}
return validations, nil
}
func InsertItemBarcodesTx(tx *sql.Tx, orderHeaderID string, lines []models.OrderProductionUpdateLine, username string) (int64, error) {
start := time.Now()
if len(lines) == 0 {
log.Printf("[InsertItemBarcodesTx] lines=0 inserted=0 duration_ms=0")
return 0, nil
}
lineIDs := make([]string, 0, len(lines))
seen := make(map[string]struct{}, len(lines))
for _, line := range lines {
lineID := strings.TrimSpace(line.OrderLineID)
if lineID == "" {
continue
}
if _, ok := seen[lineID]; ok {
continue
}
seen[lineID] = struct{}{}
lineIDs = append(lineIDs, lineID)
}
if len(lineIDs) == 0 {
log.Printf("[InsertItemBarcodesTx] uniqueLineIDs=0 inserted=0")
return 0, nil
}
var inserted int64
singleLineQuery := `
SET NOCOUNT ON;
INSERT INTO dbo.prItemBarcode
(
Barcode,
BarcodeTypeCode,
ItemTypeCode,
ItemCode,
ColorCode,
ItemDim1Code,
ItemDim2Code,
ItemDim3Code,
UnitOfMeasureCode,
Qty,
CreatedUserName,
CreatedDate,
LastUpdatedUserName,
LastUpdatedDate,
RowGuid
)
SELECT
CAST(seed.MaxBarcode + 1 AS NVARCHAR(50)),
'BAGGI3',
src.ItemTypeCode,
src.ItemCode,
src.ColorCode,
src.ItemDim1Code,
src.ItemDim2Code,
src.ItemDim3Code,
'AD',
1,
@p3,
GETDATE(),
@p3,
GETDATE(),
NEWID()
FROM (
SELECT DISTINCT
l.ItemTypeCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemCode,'')))) AS ItemCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ColorCode,'')))) AS ColorCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim1Code,'')))) AS ItemDim1Code,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim2Code,'')))) AS ItemDim2Code,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim3Code,'')))) AS ItemDim3Code
FROM dbo.trOrderLine l
WHERE l.OrderHeaderID = @p2
AND CAST(l.OrderLineID AS NVARCHAR(50)) = @p1
AND NULLIF(LTRIM(RTRIM(ISNULL(l.ItemCode,''))), '') IS NOT NULL
) src
CROSS JOIN (
SELECT
CASE
WHEN ISNULL(MAX(
CASE
WHEN LTRIM(RTRIM(ISNULL(Barcode,''))) NOT LIKE '%%[^0-9]%%'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(Barcode,''))) AS BIGINT)
ELSE NULL
END
), 0) < 36999999
THEN 36999999
ELSE ISNULL(MAX(
CASE
WHEN LTRIM(RTRIM(ISNULL(Barcode,''))) NOT LIKE '%%[^0-9]%%'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(Barcode,''))) AS BIGINT)
ELSE NULL
END
), 0)
END AS MaxBarcode
FROM dbo.prItemBarcode
WHERE BarcodeTypeCode = 'BAGGI3'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) <= 8
) seed
WHERE NOT EXISTS (
SELECT 1
FROM dbo.prItemBarcode b
WHERE b.ItemTypeCode = src.ItemTypeCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemCode,'')))) = src.ItemCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ColorCode,'')))) = src.ColorCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim1Code,'')))) = src.ItemDim1Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim2Code,'')))) = src.ItemDim2Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim3Code,'')))) = src.ItemDim3Code
);
`
execSingle := func(globalIndex int, lineID string) error {
lineStart := time.Now()
res, err := tx.Exec(singleLineQuery, lineID, orderHeaderID, username)
if err != nil {
if isDuplicateBarcodeInsertErr(err) {
log.Printf("[InsertItemBarcodesTx] skip duplicate lineIndex=%d lineID=%s err=%v", globalIndex, lineID, err)
return nil
}
return fmt.Errorf("upsert item barcodes chunk failed chunkStart=%d chunkEnd=%d duration_ms=%d: %w", globalIndex, globalIndex+1, time.Since(lineStart).Milliseconds(), err)
}
rows, _ := res.RowsAffected()
inserted += rows
log.Printf(
"[InsertItemBarcodesTx] lineIndex=%d lineID=%s inserted=%d cumulative=%d duration_ms=%d",
globalIndex,
lineID,
rows,
inserted,
time.Since(lineStart).Milliseconds(),
)
return nil
}
const chunkSize = 200
for i := 0; i < len(lineIDs); i += chunkSize {
end := i + chunkSize
if end > len(lineIDs) {
end = len(lineIDs)
}
chunk := lineIDs[i:end]
values := make([]string, 0, len(chunk))
args := make([]any, 0, len(chunk)+2)
paramPos := 1
for _, lineID := range chunk {
values = append(values, fmt.Sprintf("(@p%d)", paramPos))
args = append(args, lineID)
paramPos++
}
orderHeaderParam := paramPos
usernameParam := paramPos + 1
args = append(args, orderHeaderID, username)
batchQuery := fmt.Sprintf(`
SET NOCOUNT ON;
INSERT INTO dbo.prItemBarcode
(
Barcode,
BarcodeTypeCode,
ItemTypeCode,
ItemCode,
ColorCode,
ItemDim1Code,
ItemDim2Code,
ItemDim3Code,
UnitOfMeasureCode,
Qty,
CreatedUserName,
CreatedDate,
LastUpdatedUserName,
LastUpdatedDate,
RowGuid
)
SELECT
CAST(seed.MaxBarcode + ROW_NUMBER() OVER (
ORDER BY src.ItemTypeCode, src.ItemCode, src.ColorCode, src.ItemDim1Code, src.ItemDim2Code, src.ItemDim3Code
) AS NVARCHAR(50)),
'BAGGI3',
src.ItemTypeCode,
src.ItemCode,
src.ColorCode,
src.ItemDim1Code,
src.ItemDim2Code,
src.ItemDim3Code,
'AD',
1,
@p%d,
GETDATE(),
@p%d,
GETDATE(),
NEWID()
FROM (
SELECT DISTINCT
l.ItemTypeCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemCode,'')))) AS ItemCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ColorCode,'')))) AS ColorCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim1Code,'')))) AS ItemDim1Code,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim2Code,'')))) AS ItemDim2Code,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim3Code,'')))) AS ItemDim3Code
FROM dbo.trOrderLine l
JOIN (VALUES %s) ids(OrderLineID)
ON CAST(l.OrderLineID AS NVARCHAR(50)) = ids.OrderLineID
WHERE l.OrderHeaderID = @p%d
AND NULLIF(LTRIM(RTRIM(ISNULL(l.ItemCode,''))), '') IS NOT NULL
) src
CROSS JOIN (
SELECT
CASE
WHEN ISNULL(MAX(
CASE
WHEN LTRIM(RTRIM(ISNULL(Barcode,''))) NOT LIKE '%%[^0-9]%%'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(Barcode,''))) AS BIGINT)
ELSE NULL
END
), 0) < 36999999
THEN 36999999
ELSE ISNULL(MAX(
CASE
WHEN LTRIM(RTRIM(ISNULL(Barcode,''))) NOT LIKE '%%[^0-9]%%'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(Barcode,''))) AS BIGINT)
ELSE NULL
END
), 0)
END AS MaxBarcode
FROM dbo.prItemBarcode
WHERE BarcodeTypeCode = 'BAGGI3'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) <= 8
) seed
WHERE NOT EXISTS (
SELECT 1
FROM dbo.prItemBarcode b
WHERE b.ItemTypeCode = src.ItemTypeCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemCode,'')))) = src.ItemCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ColorCode,'')))) = src.ColorCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim1Code,'')))) = src.ItemDim1Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim2Code,'')))) = src.ItemDim2Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim3Code,'')))) = src.ItemDim3Code
);
`, usernameParam, usernameParam, strings.Join(values, ","), orderHeaderParam)
chunkStart := time.Now()
res, err := tx.Exec(batchQuery, args...)
if err == nil {
rows, _ := res.RowsAffected()
inserted += rows
log.Printf(
"[InsertItemBarcodesTx] batch=%d-%d inserted=%d cumulative=%d duration_ms=%d",
i,
end,
rows,
inserted,
time.Since(chunkStart).Milliseconds(),
)
continue
}
log.Printf("[InsertItemBarcodesTx] batch fallback=%d-%d err=%v", i, end, err)
for j, lineID := range chunk {
if lineErr := execSingle(i+j, lineID); lineErr != nil {
log.Printf("[InsertItemBarcodesTx] ERROR lineIndex=%d lineID=%s err=%v", i+j, lineID, lineErr)
return inserted, lineErr
}
}
}
log.Printf(
"[InsertItemBarcodesTx] lines=%d unique=%d inserted=%d duration_ms=%d",
len(lines),
len(lineIDs),
inserted,
time.Since(start).Milliseconds(),
)
return inserted, nil
}
func InsertItemBarcodesByTargetsTx(tx *sql.Tx, targets []models.OrderProductionMissingVariant, username string) (int64, error) {
start := time.Now()
if len(targets) == 0 {
log.Printf("[InsertItemBarcodesByTargetsTx] targets=0 inserted=0 duration_ms=0")
return 0, nil
}
uniqueTargets := make([]models.OrderProductionMissingVariant, 0, len(targets))
seen := make(map[string]struct{}, len(targets))
for _, t := range targets {
itemCode := strings.ToUpper(strings.TrimSpace(t.ItemCode))
if itemCode == "" {
continue
}
key := fmt.Sprintf("%d|%s|%s|%s|%s|%s",
t.ItemTypeCode,
itemCode,
strings.ToUpper(strings.TrimSpace(t.ColorCode)),
strings.ToUpper(strings.TrimSpace(t.ItemDim1Code)),
strings.ToUpper(strings.TrimSpace(t.ItemDim2Code)),
strings.ToUpper(strings.TrimSpace(t.ItemDim3Code)),
)
if _, ok := seen[key]; ok {
continue
}
seen[key] = struct{}{}
t.ItemCode = itemCode
t.ColorCode = strings.ToUpper(strings.TrimSpace(t.ColorCode))
t.ItemDim1Code = strings.ToUpper(strings.TrimSpace(t.ItemDim1Code))
t.ItemDim2Code = strings.ToUpper(strings.TrimSpace(t.ItemDim2Code))
t.ItemDim3Code = strings.ToUpper(strings.TrimSpace(t.ItemDim3Code))
uniqueTargets = append(uniqueTargets, t)
}
if len(uniqueTargets) == 0 {
log.Printf("[InsertItemBarcodesByTargetsTx] targets=%d unique=0 inserted=0 duration_ms=%d", len(targets), time.Since(start).Milliseconds())
return 0, nil
}
if err := ensureTxStillActive(tx, "InsertItemBarcodesByTargetsTx/start"); err != nil {
return 0, err
}
// Barcode seed'i hem prItemBarcode hem de (varsa) tbStokBarkodu uzerinden
// kilitli okuyarak hesapla; trigger tarafindaki duplicate riskini azalt.
var maxBarcode int64
maxPrQuery := `
SELECT ISNULL(MAX(v.BarcodeNum), 0)
FROM (
SELECT
CASE
WHEN LTRIM(RTRIM(ISNULL(pb.Barcode,''))) NOT LIKE '%[^0-9]%'
AND LEN(LTRIM(RTRIM(ISNULL(pb.Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(pb.Barcode,''))) AS BIGINT)
ELSE NULL
END AS BarcodeNum
FROM dbo.prItemBarcode pb WITH (UPDLOCK, HOLDLOCK, TABLOCKX)
WHERE pb.BarcodeTypeCode = 'BAGGI3'
) v
WHERE v.BarcodeNum IS NOT NULL;
`
if err := tx.QueryRow(maxPrQuery).Scan(&maxBarcode); err != nil {
return 0, fmt.Errorf("barcode seed query failed: %w", err)
}
var hasTb int
if err := tx.QueryRow(`SELECT CASE WHEN OBJECT_ID(N'dbo.tbStokBarkodu', N'U') IS NULL THEN 0 ELSE 1 END`).Scan(&hasTb); err != nil {
return 0, fmt.Errorf("barcode seed object check failed: %w", err)
}
if hasTb == 1 {
var maxTb int64
maxTbQuery := `
SELECT ISNULL(MAX(v.BarcodeNum), 0)
FROM (
SELECT
CASE
WHEN LTRIM(RTRIM(ISNULL(sb.Barcode,''))) NOT LIKE '%[^0-9]%'
AND LEN(LTRIM(RTRIM(ISNULL(sb.Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(sb.Barcode,''))) AS BIGINT)
ELSE NULL
END AS BarcodeNum
FROM dbo.tbStokBarkodu sb WITH (UPDLOCK, HOLDLOCK, TABLOCKX)
) v
WHERE v.BarcodeNum IS NOT NULL;
`
if err := tx.QueryRow(maxTbQuery).Scan(&maxTb); err != nil {
return 0, fmt.Errorf("barcode seed tbStokBarkodu query failed: %w", err)
}
if maxTb > maxBarcode {
maxBarcode = maxTb
}
}
if maxBarcode < 36999999 {
maxBarcode = 36999999
}
existsBarcodeQuery := `
SELECT CASE WHEN EXISTS (
SELECT 1
FROM dbo.prItemBarcode pb WITH (UPDLOCK, HOLDLOCK)
WHERE LTRIM(RTRIM(ISNULL(pb.Barcode,''))) = @p1
) THEN 1 ELSE 0 END;
`
existsBarcodeWithTbQuery := `
SELECT CASE WHEN EXISTS (
SELECT 1
FROM dbo.prItemBarcode pb WITH (UPDLOCK, HOLDLOCK)
WHERE LTRIM(RTRIM(ISNULL(pb.Barcode,''))) = @p1
) OR EXISTS (
SELECT 1
FROM dbo.tbStokBarkodu sb WITH (UPDLOCK, HOLDLOCK)
WHERE LTRIM(RTRIM(ISNULL(sb.Barcode,''))) = @p1
) THEN 1 ELSE 0 END;
`
hasVariantBarcodeQuery := `
SELECT CASE WHEN EXISTS (
SELECT 1
FROM dbo.prItemBarcode b WITH (UPDLOCK, HOLDLOCK)
WHERE b.ItemTypeCode = @p1
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemCode,'')))) = @p2
AND UPPER(LTRIM(RTRIM(ISNULL(b.ColorCode,'')))) = @p3
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim1Code,'')))) = @p4
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim2Code,'')))) = @p5
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim3Code,'')))) = @p6
) THEN 1 ELSE 0 END;
`
insertOneQuery := `
INSERT INTO dbo.prItemBarcode
(
Barcode,
BarcodeTypeCode,
ItemTypeCode,
ItemCode,
ColorCode,
ItemDim1Code,
ItemDim2Code,
ItemDim3Code,
UnitOfMeasureCode,
Qty,
CreatedUserName,
CreatedDate,
LastUpdatedUserName,
LastUpdatedDate,
RowGuid
)
SELECT
@p1,
'BAGGI3',
@p2,
@p3,
@p4,
@p5,
@p6,
@p7,
'AD',
1,
@p8,
GETDATE(),
@p8,
GETDATE(),
NEWID()
WHERE NOT EXISTS (
SELECT 1
FROM dbo.prItemBarcode b
WHERE b.ItemTypeCode = @p2
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemCode,'')))) = @p3
AND UPPER(LTRIM(RTRIM(ISNULL(b.ColorCode,'')))) = @p4
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim1Code,'')))) = @p5
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim2Code,'')))) = @p6
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim3Code,'')))) = @p7
);
`
var inserted int64
for _, t := range uniqueTargets {
if err := ensureTxStillActive(tx, "InsertItemBarcodesByTargetsTx/before_target"); err != nil {
return inserted, err
}
var hasVariant int
if err := tx.QueryRow(
hasVariantBarcodeQuery,
t.ItemTypeCode,
t.ItemCode,
t.ColorCode,
t.ItemDim1Code,
t.ItemDim2Code,
t.ItemDim3Code,
).Scan(&hasVariant); err != nil {
return inserted, fmt.Errorf("variant barcode exists check failed: %w", err)
}
if hasVariant == 1 {
continue
}
retry := 0
for {
retry++
if retry > 2000 {
return inserted, fmt.Errorf("barcode allocation exceeded retry limit item=%s color=%s dim1=%s", t.ItemCode, t.ColorCode, t.ItemDim1Code)
}
candidateNum := maxBarcode + 1
candidate := strconv.FormatInt(candidateNum, 10)
var exists int
if hasTb == 1 {
if err := tx.QueryRow(existsBarcodeWithTbQuery, candidate).Scan(&exists); err != nil {
return inserted, fmt.Errorf("barcode exists check(tb) failed: %w", err)
}
} else {
if err := tx.QueryRow(existsBarcodeQuery, candidate).Scan(&exists); err != nil {
return inserted, fmt.Errorf("barcode exists check failed: %w", err)
}
}
if exists == 1 {
maxBarcode = candidateNum
continue
}
res, err := tx.Exec(
insertOneQuery,
candidate,
t.ItemTypeCode,
t.ItemCode,
t.ColorCode,
t.ItemDim1Code,
t.ItemDim2Code,
t.ItemDim3Code,
username,
)
if err != nil {
if isDuplicateBarcodeInsertErr(err) {
maxBarcode = candidateNum
continue
}
return inserted, fmt.Errorf("insert item barcode failed item=%s color=%s dim1=%s duration_ms=%d: %w",
t.ItemCode, t.ColorCode, t.ItemDim1Code, time.Since(start).Milliseconds(), err)
}
affected, _ := res.RowsAffected()
if affected > 0 {
inserted += affected
maxBarcode = candidateNum
}
break
}
}
if txErr := ensureTxStillActive(tx, "InsertItemBarcodesByTargetsTx/after_batch"); txErr != nil {
return inserted, txErr
}
log.Printf("[InsertItemBarcodesByTargetsTx] targets=%d unique=%d inserted=%d duration_ms=%d",
len(targets), len(uniqueTargets), inserted, time.Since(start).Milliseconds())
return inserted, nil
}
func ensureTxStillActive(tx *sql.Tx, where string) error {
if tx == nil {
return fmt.Errorf("tx is nil at %s", where)
}
var tranCount int
if err := tx.QueryRow(`SELECT @@TRANCOUNT`).Scan(&tranCount); err != nil {
return fmt.Errorf("tx state query failed at %s: %w", where, err)
}
if tranCount <= 0 {
return fmt.Errorf("tx closed unexpectedly at %s (trancount=%d)", where, tranCount)
}
return nil
}
func isDuplicateBarcodeInsertErr(err error) bool {
if err == nil {
return false
}
msg := strings.ToLower(err.Error())
if !strings.Contains(msg, "duplicate key") {
return false
}
if strings.Contains(msg, "tbstokbarkodu") {
return true
}
if strings.Contains(msg, "pritembarcode") {
return true
}
return strings.Contains(msg, "unique")
}
func UpsertItemAttributesTx(tx *sql.Tx, attrs []models.OrderProductionItemAttributeRow, username string) (int64, error) {
start := time.Now()
if len(attrs) == 0 {
log.Printf("[UpsertItemAttributesTx] attrs=0 affected=0 duration_ms=0")
return 0, nil
}
// FK_prItemAttribute_ItemCode hatasini engellemek icin, attribute yazmadan once
// ilgili item kodlarinin cdItem tarafinda varligini transaction icinde garanti et.
seenCodes := make(map[string]struct{}, len(attrs))
for _, a := range attrs {
itemTypeCode := a.ItemTypeCode
if itemTypeCode <= 0 {
itemTypeCode = 1
}
itemCode := strings.ToUpper(strings.TrimSpace(a.ItemCode))
if itemCode == "" {
continue
}
key := NormalizeCdItemMapKey(int16(itemTypeCode), itemCode)
if _, ok := seenCodes[key]; ok {
continue
}
seenCodes[key] = struct{}{}
if err := ensureCdItemTx(tx, int16(itemTypeCode), itemCode, username, nil); err != nil {
return 0, fmt.Errorf("ensure cdItem before item attributes failed itemCode=%s: %w", itemCode, err)
}
}
// SQL Server parameter limiti (2100) nedeniyle batch'li set-based upsert kullanilir.
const chunkSize = 400 // 400 * 4 param + 1 username = 1601
var affected int64
for i := 0; i < len(attrs); i += chunkSize {
end := i + chunkSize
if end > len(attrs) {
end = len(attrs)
}
chunk := attrs[i:end]
values := make([]string, 0, len(chunk))
args := make([]any, 0, len(chunk)*4+1)
paramPos := 1
for _, a := range chunk {
values = append(values, fmt.Sprintf("(@p%d,@p%d,@p%d,@p%d)", paramPos, paramPos+1, paramPos+2, paramPos+3))
args = append(args, a.ItemTypeCode, a.ItemCode, a.AttributeTypeCode, a.AttributeCode)
paramPos += 4
}
usernameParam := paramPos
args = append(args, username)
query := fmt.Sprintf(`
WITH src (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode) AS (
SELECT *
FROM (VALUES %s) AS v (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode)
)
UPDATE tgt
SET
tgt.AttributeCode = src.AttributeCode,
tgt.LastUpdatedUserName = @p%d,
tgt.LastUpdatedDate = GETDATE()
FROM dbo.prItemAttribute tgt
JOIN src
ON src.ItemTypeCode = tgt.ItemTypeCode
AND src.ItemCode = tgt.ItemCode
AND src.AttributeTypeCode = tgt.AttributeTypeCode;
WITH src (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode) AS (
SELECT *
FROM (VALUES %s) AS v (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode)
)
INSERT INTO dbo.prItemAttribute (
ItemTypeCode,
ItemCode,
AttributeTypeCode,
AttributeCode,
CreatedUserName,
CreatedDate,
LastUpdatedUserName,
LastUpdatedDate,
RowGuid
)
SELECT
src.ItemTypeCode,
src.ItemCode,
src.AttributeTypeCode,
src.AttributeCode,
@p%d,
GETDATE(),
@p%d,
GETDATE(),
NEWID()
FROM src
LEFT JOIN dbo.prItemAttribute tgt
ON src.ItemTypeCode = tgt.ItemTypeCode
AND src.ItemCode = tgt.ItemCode
AND src.AttributeTypeCode = tgt.AttributeTypeCode
WHERE tgt.ItemCode IS NULL;
`, strings.Join(values, ","), usernameParam, strings.Join(values, ","), usernameParam, usernameParam)
chunkStart := time.Now()
res, err := tx.Exec(query, args...)
if err != nil {
log.Printf("[UpsertItemAttributesTx] ERROR chunk=%d-%d err=%v", i, end, err)
return affected, fmt.Errorf("upsert item attributes chunk failed chunkStart=%d chunkEnd=%d duration_ms=%d: %w", i, end, time.Since(chunkStart).Milliseconds(), err)
}
chunkAffected, _ := res.RowsAffected()
affected += chunkAffected
log.Printf("[UpsertItemAttributesTx] chunk=%d-%d chunkAffected=%d cumulative=%d duration_ms=%d",
i, end, chunkAffected, affected, time.Since(chunkStart).Milliseconds())
}
log.Printf("[UpsertItemAttributesTx] attrs=%d affected=%d duration_ms=%d",
len(attrs), affected, time.Since(start).Milliseconds())
return affected, nil
}
func GetOrderProductionLookupOptions(mssql *sql.DB) (models.OrderProductionCdItemLookups, error) {
out := models.OrderProductionCdItemLookups{}
queryPairs := []struct {
Name string
Query string
Target *[]models.OrderProductionLookupOption
}{
{"ItemDimTypeCodes", `SELECT
CAST(t.ItemDimTypeCode AS NVARCHAR(50)) AS Code,
ISNULL(d.ItemDimTypeDescription, CAST(t.ItemDimTypeCode AS NVARCHAR(50))) AS [Description]
FROM dbo.bsItemDimType t WITH(NOLOCK)
LEFT JOIN dbo.bsItemDimTypeDesc d WITH(NOLOCK)
ON d.ItemDimTypeCode = t.ItemDimTypeCode
AND d.LangCode = 'TR'
WHERE ISNULL(t.IsBlocked, 0) = 0
ORDER BY t.ItemDimTypeCode`, &out.ItemDimTypeCodes},
{"ProductTypeCodes", `SELECT DISTINCT CAST(ProductTypeCode AS NVARCHAR(50)) AS Code, CAST(ProductTypeCode AS NVARCHAR(50)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE ProductTypeCode IS NOT NULL ORDER BY Code`, &out.ProductTypeCodes},
{"ProductHierarchyIDs", `SELECT
CAST(h.ProductHierarchyID AS NVARCHAR(50)) AS Code,
LTRIM(RTRIM(
CONCAT(
CAST(ISNULL(h.ProductHierarchyLevelCode01, 0) AS NVARCHAR(50)),
CASE
WHEN ISNULL(d.ProductHierarchyLevelDescription, '') <> '' THEN CONCAT(' - ', d.ProductHierarchyLevelDescription)
ELSE ''
END
)
)) AS [Description]
FROM dbo.dfProductHierarchy h WITH(NOLOCK)
LEFT JOIN dbo.cdProductHierarchyLevelDesc d WITH(NOLOCK)
ON d.ProductHierarchyLevelCode = h.ProductHierarchyLevelCode01
AND d.LangCode = 'TR'
ORDER BY h.ProductHierarchyID`, &out.ProductHierarchyIDs},
{"UnitOfMeasureCode1List", `SELECT DISTINCT CAST(UnitOfMeasureCode1 AS NVARCHAR(50)) AS Code, CAST(UnitOfMeasureCode1 AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(UnitOfMeasureCode1 AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.UnitOfMeasureCode1List},
{"ItemAccountGrCodes", `SELECT DISTINCT CAST(ItemAccountGrCode AS NVARCHAR(50)) AS Code, CAST(ItemAccountGrCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(ItemAccountGrCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.ItemAccountGrCodes},
{"ItemTaxGrCodes", `SELECT DISTINCT CAST(ItemTaxGrCode AS NVARCHAR(50)) AS Code, CAST(ItemTaxGrCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(ItemTaxGrCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.ItemTaxGrCodes},
{"ItemPaymentPlanGrCodes", `SELECT DISTINCT CAST(ItemPaymentPlanGrCode AS NVARCHAR(50)) AS Code, CAST(ItemPaymentPlanGrCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(ItemPaymentPlanGrCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.ItemPaymentPlanGrCodes},
{"ItemDiscountGrCodes", `SELECT DISTINCT CAST(ItemDiscountGrCode AS NVARCHAR(50)) AS Code, CAST(ItemDiscountGrCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(ItemDiscountGrCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.ItemDiscountGrCodes},
{"ItemVendorGrCodes", `SELECT DISTINCT CAST(ItemVendorGrCode AS NVARCHAR(50)) AS Code, CAST(ItemVendorGrCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(ItemVendorGrCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.ItemVendorGrCodes},
{"PromotionGroupCodes", `SELECT DISTINCT CAST(PromotionGroupCode AS NVARCHAR(50)) AS Code, CAST(PromotionGroupCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(PromotionGroupCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.PromotionGroupCodes},
{"ProductCollectionGrCodes", `SELECT DISTINCT CAST(ProductCollectionGrCode AS NVARCHAR(50)) AS Code, CAST(ProductCollectionGrCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(ProductCollectionGrCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.ProductCollectionGrCodes},
{"StorePriceLevelCodes", `SELECT DISTINCT CAST(StorePriceLevelCode AS NVARCHAR(50)) AS Code, CAST(StorePriceLevelCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(StorePriceLevelCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.StorePriceLevelCodes},
{"PerceptionOfFashionCodes", `SELECT DISTINCT CAST(PerceptionOfFashionCode AS NVARCHAR(50)) AS Code, CAST(PerceptionOfFashionCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(PerceptionOfFashionCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.PerceptionOfFashionCodes},
{"CommercialRoleCodes", `SELECT DISTINCT CAST(CommercialRoleCode AS NVARCHAR(50)) AS Code, CAST(CommercialRoleCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(CommercialRoleCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.CommercialRoleCodes},
{"StoreCapacityLevelCodes", `SELECT DISTINCT CAST(StoreCapacityLevelCode AS NVARCHAR(50)) AS Code, CAST(StoreCapacityLevelCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(StoreCapacityLevelCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.StoreCapacityLevelCodes},
{"CustomsTariffNumbers", `SELECT DISTINCT CAST(CustomsTariffNumberCode AS NVARCHAR(50)) AS Code, CAST(CustomsTariffNumberCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(CustomsTariffNumberCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.CustomsTariffNumbers},
{"CompanyCodes", `SELECT DISTINCT CAST(CompanyCode AS NVARCHAR(50)) AS Code, CAST(CompanyCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(CompanyCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.CompanyCodes},
}
for _, pair := range queryPairs {
start := time.Now()
log.Printf("[GetOrderProductionLookupOptions] executing [%s]", pair.Name)
rows, err := mssql.Query(pair.Query)
if err != nil {
return out, fmt.Errorf("lookup query failed [%s]: %w", pair.Name, err)
}
list := make([]models.OrderProductionLookupOption, 0, 64)
for rows.Next() {
var item models.OrderProductionLookupOption
if err := rows.Scan(&item.Code, &item.Description); err != nil {
rows.Close()
return out, fmt.Errorf("lookup scan failed [%s]: %w", pair.Name, err)
}
item.Code = strings.TrimSpace(item.Code)
item.Description = strings.TrimSpace(item.Description)
list = append(list, item)
}
if err := rows.Err(); err != nil {
rows.Close()
return out, fmt.Errorf("lookup rows failed [%s]: %w", pair.Name, err)
}
rows.Close()
log.Printf("[GetOrderProductionLookupOptions] ok [%s] count=%d duration=%s", pair.Name, len(list), time.Since(start))
*pair.Target = list
}
return out, nil
}

View File

@@ -9,12 +9,12 @@ import (
func GetProductList() ([]models.Product, error) {
rows, err := db.MssqlDB.Query(`
SELECT
ProductCode
LTRIM(RTRIM(ProductCode)) AS ProductCode
FROM ProductFilterWithDescription('TR')
WHERE
ProductAtt42 IN ('SERI', 'AKSESUAR')
AND IsBlocked = 0
AND LEN(ProductCode) = 13 -- 🔹 yalnızca 13 karakterlik kodlar
AND LEN(LTRIM(RTRIM(ProductCode))) = 13
ORDER BY ProductCode;
`)
if err != nil {

View File

@@ -0,0 +1,256 @@
package queries
import (
"bssapp-backend/db"
"bssapp-backend/models"
"context"
"database/sql"
"strconv"
"strings"
"time"
)
func GetProductPricingList(ctx context.Context, limit int, afterProductCode string) ([]models.ProductPricing, error) {
if limit <= 0 {
limit = 500
}
afterProductCode = strings.TrimSpace(afterProductCode)
cursorFilter := ""
args := make([]any, 0, 1)
if afterProductCode != "" {
cursorFilter = "WHERE bp.ProductCode > @p1"
args = append(args, afterProductCode)
}
query := `
WITH base_products AS (
SELECT
LTRIM(RTRIM(ProductCode)) AS ProductCode,
COALESCE(LTRIM(RTRIM(ProductAtt45Desc)), '') AS AskiliYan,
COALESCE(LTRIM(RTRIM(ProductAtt44Desc)), '') AS Kategori,
COALESCE(LTRIM(RTRIM(ProductAtt42Desc)), '') AS UrunIlkGrubu,
COALESCE(LTRIM(RTRIM(ProductAtt01Desc)), '') AS UrunAnaGrubu,
COALESCE(LTRIM(RTRIM(ProductAtt02Desc)), '') AS UrunAltGrubu,
COALESCE(LTRIM(RTRIM(ProductAtt41Desc)), '') AS Icerik,
COALESCE(LTRIM(RTRIM(ProductAtt29Desc)), '') AS Karisim,
COALESCE(LTRIM(RTRIM(ProductAtt10Desc)), '') AS Marka
FROM ProductFilterWithDescription('TR')
WHERE ProductAtt42 IN ('SERI', 'AKSESUAR')
AND IsBlocked = 0
AND LEN(LTRIM(RTRIM(ProductCode))) = 13
),
paged_products AS (
SELECT TOP (` + strconv.Itoa(limit) + `)
bp.ProductCode
FROM base_products bp
` + cursorFilter + `
ORDER BY bp.ProductCode
),
latest_base_price AS (
SELECT
LTRIM(RTRIM(b.ItemCode)) AS ItemCode,
CAST(b.Price AS DECIMAL(18, 2)) AS CostPrice,
CONVERT(VARCHAR(10), b.PriceDate, 23) AS LastPricingDate,
ROW_NUMBER() OVER (
PARTITION BY LTRIM(RTRIM(b.ItemCode))
ORDER BY b.PriceDate DESC, b.LastUpdatedDate DESC
) AS rn
FROM prItemBasePrice b
WHERE b.ItemTypeCode = 1
AND b.BasePriceCode = 1
AND LTRIM(RTRIM(b.CurrencyCode)) = 'USD'
AND EXISTS (
SELECT 1
FROM paged_products pp
WHERE pp.ProductCode = LTRIM(RTRIM(b.ItemCode))
)
),
stock_entry_dates AS (
SELECT
LTRIM(RTRIM(s.ItemCode)) AS ItemCode,
CONVERT(VARCHAR(10), MAX(s.OperationDate), 23) AS StockEntryDate
FROM trStock s WITH(NOLOCK)
WHERE s.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(s.ItemCode))) = 13
AND s.In_Qty1 > 0
AND LTRIM(RTRIM(s.WarehouseCode)) IN (
'1-0-14','1-0-10','1-0-8','1-2-5','1-2-4','1-0-12','100','1-0-28',
'1-0-24','1-2-6','1-1-14','1-0-2','1-0-52','1-1-2','1-0-21','1-1-3',
'1-0-33','101','1-014','1-0-49','1-0-36'
)
AND EXISTS (
SELECT 1
FROM paged_products pp
WHERE pp.ProductCode = LTRIM(RTRIM(s.ItemCode))
)
GROUP BY LTRIM(RTRIM(s.ItemCode))
),
stock_base AS (
SELECT
LTRIM(RTRIM(s.ItemCode)) AS ItemCode,
SUM(s.In_Qty1 - s.Out_Qty1) AS InventoryQty1
FROM trStock s WITH(NOLOCK)
WHERE s.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(s.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM paged_products pp
WHERE pp.ProductCode = LTRIM(RTRIM(s.ItemCode))
)
GROUP BY LTRIM(RTRIM(s.ItemCode))
),
pick_base AS (
SELECT
LTRIM(RTRIM(p.ItemCode)) AS ItemCode,
SUM(p.Qty1) AS PickingQty1
FROM PickingStates p
WHERE p.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(p.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM paged_products pp
WHERE pp.ProductCode = LTRIM(RTRIM(p.ItemCode))
)
GROUP BY LTRIM(RTRIM(p.ItemCode))
),
reserve_base AS (
SELECT
LTRIM(RTRIM(r.ItemCode)) AS ItemCode,
SUM(r.Qty1) AS ReserveQty1
FROM ReserveStates r
WHERE r.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(r.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM paged_products pp
WHERE pp.ProductCode = LTRIM(RTRIM(r.ItemCode))
)
GROUP BY LTRIM(RTRIM(r.ItemCode))
),
disp_base AS (
SELECT
LTRIM(RTRIM(d.ItemCode)) AS ItemCode,
SUM(d.Qty1) AS DispOrderQty1
FROM DispOrderStates d
WHERE d.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(d.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM paged_products pp
WHERE pp.ProductCode = LTRIM(RTRIM(d.ItemCode))
)
GROUP BY LTRIM(RTRIM(d.ItemCode))
),
stock_totals AS (
SELECT
pp.ProductCode AS ItemCode,
CAST(ROUND(
ISNULL(sb.InventoryQty1, 0)
- ISNULL(pb.PickingQty1, 0)
- ISNULL(rb.ReserveQty1, 0)
- ISNULL(db.DispOrderQty1, 0)
, 2) AS DECIMAL(18, 2)) AS StockQty
FROM paged_products pp
LEFT JOIN stock_base sb
ON sb.ItemCode = pp.ProductCode
LEFT JOIN pick_base pb
ON pb.ItemCode = pp.ProductCode
LEFT JOIN reserve_base rb
ON rb.ItemCode = pp.ProductCode
LEFT JOIN disp_base db
ON db.ItemCode = pp.ProductCode
)
SELECT
bp.ProductCode AS ProductCode,
COALESCE(lp.CostPrice, 0) AS CostPrice,
COALESCE(st.StockQty, 0) AS StockQty,
COALESCE(se.StockEntryDate, '') AS StockEntryDate,
COALESCE(lp.LastPricingDate, '') AS LastPricingDate,
bp.AskiliYan,
bp.Kategori,
bp.UrunIlkGrubu,
bp.UrunAnaGrubu,
bp.UrunAltGrubu,
bp.Icerik,
bp.Karisim,
bp.Marka
FROM paged_products pp
INNER JOIN base_products bp
ON bp.ProductCode = pp.ProductCode
LEFT JOIN latest_base_price lp
ON lp.ItemCode = bp.ProductCode
AND lp.rn = 1
LEFT JOIN stock_entry_dates se
ON se.ItemCode = bp.ProductCode
LEFT JOIN stock_totals st
ON st.ItemCode = bp.ProductCode
ORDER BY bp.ProductCode;
`
var (
rows *sql.Rows
rowsErr error
)
for attempt := 1; attempt <= 3; attempt++ {
var err error
rows, err = db.MssqlDB.QueryContext(ctx, query, args...)
if err == nil {
rowsErr = nil
break
}
rowsErr = err
if ctx.Err() != nil || !isTransientMSSQLNetworkError(err) || attempt == 3 {
break
}
wait := time.Duration(attempt*300) * time.Millisecond
select {
case <-ctx.Done():
break
case <-time.After(wait):
}
}
if rowsErr != nil {
return nil, rowsErr
}
defer rows.Close()
var out []models.ProductPricing
for rows.Next() {
var item models.ProductPricing
if err := rows.Scan(
&item.ProductCode,
&item.CostPrice,
&item.StockQty,
&item.StockEntryDate,
&item.LastPricingDate,
&item.AskiliYan,
&item.Kategori,
&item.UrunIlkGrubu,
&item.UrunAnaGrubu,
&item.UrunAltGrubu,
&item.Icerik,
&item.Karisim,
&item.Marka,
); err != nil {
return nil, err
}
out = append(out, item)
}
return out, nil
}
func isTransientMSSQLNetworkError(err error) bool {
if err == nil {
return false
}
e := strings.ToLower(err.Error())
return strings.Contains(e, "i/o timeout") ||
strings.Contains(e, "timeout") ||
strings.Contains(e, "wsarecv") ||
strings.Contains(e, "connection attempt failed") ||
strings.Contains(e, "no connection could be made") ||
strings.Contains(e, "broken pipe") ||
strings.Contains(e, "connection reset")
}

View File

@@ -0,0 +1,59 @@
package queries
const GetProductAttributes = `
;WITH RequiredTypes AS (
SELECT
t.ItemTypeCode,
t.AttributeTypeCode,
ISNULL(NULLIF(td.AttributeTypeDescription, ''), CAST(t.AttributeTypeCode AS NVARCHAR(30))) AS AttributeTypeDescription
FROM dbo.cdItemAttributeType AS t WITH(NOLOCK)
LEFT JOIN dbo.cdItemAttributeTypeDesc AS td WITH(NOLOCK)
ON td.ItemTypeCode = t.ItemTypeCode
AND td.AttributeTypeCode = t.AttributeTypeCode
AND td.LangCode = 'TR'
WHERE t.ItemTypeCode = @p1
AND ISNULL(t.IsBlocked, 0) = 0
AND ISNULL(t.IsRequired, 0) = 1
),
Attr AS (
SELECT
a.ItemTypeCode,
a.AttributeTypeCode,
ISNULL(a.AttributeCode, '') AS AttributeCode,
ISNULL(d.AttributeDescription, ISNULL(a.AttributeCode, '')) AS AttributeDescription
FROM dbo.cdItemAttribute AS a WITH(NOLOCK)
LEFT JOIN dbo.cdItemAttributeDesc AS d WITH(NOLOCK)
ON d.ItemTypeCode = a.ItemTypeCode
AND d.AttributeTypeCode = a.AttributeTypeCode
AND d.AttributeCode = a.AttributeCode
AND d.LangCode = 'TR'
WHERE a.ItemTypeCode = @p1
AND ISNULL(a.IsBlocked, 0) = 0
AND ISNULL(a.AttributeCode, '') <> ''
)
SELECT
rt.ItemTypeCode,
rt.AttributeTypeCode,
rt.AttributeTypeDescription,
a.AttributeCode,
a.AttributeDescription
FROM RequiredTypes AS rt
LEFT JOIN Attr AS a
ON a.ItemTypeCode = rt.ItemTypeCode
AND a.AttributeTypeCode = rt.AttributeTypeCode
WHERE ISNULL(a.AttributeCode, '') <> ''
ORDER BY
rt.AttributeTypeCode,
CASE WHEN a.AttributeCode IN ('-', '.') THEN 0 ELSE 1 END,
a.AttributeCode;
`
const GetProductItemAttributes = `
SELECT
a.ItemTypeCode,
a.AttributeTypeCode,
ISNULL(a.AttributeCode, '') AS AttributeCode
FROM dbo.prItemAttribute AS a WITH(NOLOCK)
WHERE a.ItemTypeCode = @p1
AND ISNULL(LTRIM(RTRIM(a.ItemCode)), '') = ISNULL(LTRIM(RTRIM(@p2)), '')
`

View File

@@ -0,0 +1,16 @@
package queries
const GetProductNewColors = `
SELECT
CAST(@p1 AS NVARCHAR(30)) AS ProductCode,
LTRIM(RTRIM(c.ColorCode)) AS ColorCode,
ISNULL(NULLIF(LTRIM(RTRIM(cd.ColorDescription)), ''), ISNULL(NULLIF(LTRIM(RTRIM(c.ColorHex)), ''), LTRIM(RTRIM(c.ColorCode)))) AS ColorDescription
FROM dbo.cdColor AS c WITH(NOLOCK)
LEFT JOIN dbo.cdColorDesc AS cd WITH(NOLOCK)
ON cd.ColorCode = c.ColorCode
AND cd.LangCode = 'TR'
WHERE ISNULL(c.IsBlocked, 0) = 0
AND LEN(LTRIM(RTRIM(ISNULL(c.ColorCode, '')))) = 3
AND LTRIM(RTRIM(ISNULL(c.ColorCatalogCode1, ''))) = N'ÜRÜN'
ORDER BY LTRIM(RTRIM(c.ColorCode));
`

View File

@@ -0,0 +1,16 @@
package queries
const GetProductNewSecondColors = `
SELECT
LTRIM(RTRIM(@ProductCode)) AS ProductCode,
LTRIM(RTRIM(ISNULL(@ColorCode, ''))) AS ColorCode,
LTRIM(RTRIM(d2.ItemDim2Code)) AS ItemDim2Code,
ISNULL(NULLIF(LTRIM(RTRIM(cd.ColorDescription)), ''), LTRIM(RTRIM(d2.ItemDim2Code))) AS ColorDescription
FROM dbo.cdItemDim2 AS d2 WITH(NOLOCK)
LEFT JOIN dbo.cdColorDesc AS cd WITH(NOLOCK)
ON cd.ColorCode = d2.ItemDim2Code
AND cd.LangCode = 'TR'
WHERE ISNULL(d2.IsBlocked, 0) = 0
AND LEN(LTRIM(RTRIM(ISNULL(d2.ItemDim2Code, '')))) = 3
ORDER BY LTRIM(RTRIM(d2.ItemDim2Code));
`

View File

@@ -1,10 +1,11 @@
package queries
const GetProductSecondColors = `
SELECT DISTINCT
SELECT
Product.ProductCode,
ISNULL(prItemVariant.ColorCode, '') AS ColorCode,
ISNULL(prItemVariant.ItemDim2Code, '') AS ItemDim2Code
ISNULL(prItemVariant.ItemDim2Code, '') AS ItemDim2Code,
ISNULL(ColorDesc.ColorDescription, '') AS ColorDescription
FROM prItemVariant WITH(NOLOCK)
INNER JOIN ProductFilterWithDescription('TR') AS Product
ON prItemVariant.ItemCode = Product.ProductCode
@@ -14,5 +15,10 @@ FROM prItemVariant WITH(NOLOCK)
WHERE Product.ProductCode = @ProductCode
AND prItemVariant.ColorCode = @ColorCode
AND ISNULL(prItemVariant.ItemDim2Code, '') <> ''
GROUP BY Product.ProductCode, prItemVariant.ItemDim2Code, prItemVariant.ColorCode
GROUP BY
Product.ProductCode,
prItemVariant.ItemDim2Code,
prItemVariant.ColorCode,
ColorDesc.ColorDescription
ORDER BY ItemDim2Code
`

View File

@@ -2,6 +2,7 @@ package queries
import (
"bssapp-backend/db"
"bssapp-backend/internal/i18n"
"bssapp-backend/models"
"context"
"database/sql"
@@ -14,9 +15,7 @@ func GetStatements(ctx context.Context, params models.StatementParams) ([]models
// AccountCode normalize: "ZLA0127" → "ZLA 0127"
params.AccountCode = normalizeMasterAccountCode(params.AccountCode)
if strings.TrimSpace(params.LangCode) == "" {
params.LangCode = "TR"
}
params.LangCode = i18n.NormalizeLangCode(params.LangCode)
// Parislemler []string → '1','2','3'
parislemFilter := "''"
@@ -221,8 +220,8 @@ SELECT
CONVERT(varchar(10), @startdate, 23) AS Vade_Tarihi,
'Baslangic_devir' AS Belge_No,
'Devir' AS Islem_Tipi,
'Devir Bakiyesi' AS Aciklama,
CASE WHEN @LangCode = 'EN' THEN 'Opening' ELSE 'Devir' END AS Islem_Tipi,
CASE WHEN @LangCode = 'EN' THEN 'Opening Balance' ELSE 'Devir Bakiyesi' END AS Aciklama,
o.Para_Birimi,

View File

@@ -6,8 +6,8 @@ import (
"log"
)
func GetStatementsHPDF(ctx context.Context, accountCode, startDate, endDate string, parislemler []string) ([]models.StatementHeader, []string, error) {
headers, err := getStatementsForPDF(ctx, accountCode, startDate, endDate, parislemler)
func GetStatementsHPDF(ctx context.Context, accountCode, startDate, endDate, langCode string, parislemler []string) ([]models.StatementHeader, []string, error) {
headers, err := getStatementsForPDF(ctx, accountCode, startDate, endDate, langCode, parislemler)
if err != nil {
log.Printf("Header query error: %v", err)
return nil, nil, err

View File

@@ -10,13 +10,14 @@ func getStatementsForPDF(
accountCode string,
startDate string,
endDate string,
langCode string,
parislemler []string,
) ([]models.StatementHeader, error) {
return GetStatements(ctx, models.StatementParams{
AccountCode: accountCode,
StartDate: startDate,
EndDate: endDate,
LangCode: "TR",
LangCode: langCode,
Parislemler: parislemler,
})
}

View File

@@ -11,8 +11,8 @@ import (
"strings"
)
func GetStatementsPDF(ctx context.Context, accountCode, startDate, endDate string, parislemler []string) ([]models.StatementHeader, []string, error) {
headers, err := getStatementsForPDF(ctx, accountCode, startDate, endDate, parislemler)
func GetStatementsPDF(ctx context.Context, accountCode, startDate, endDate, langCode string, parislemler []string) ([]models.StatementHeader, []string, error) {
headers, err := getStatementsForPDF(ctx, accountCode, startDate, endDate, langCode, parislemler)
if err != nil {
log.Printf("Header query error: %v", err)
return nil, nil, err

View File

@@ -464,6 +464,7 @@ func UserCreateRoute(db *sql.DB) http.HandlerFunc {
defer tx.Rollback()
var newID int64
log.Printf("DEBUG: UserCreateRoute payload=%+v", payload)
err = tx.QueryRow(`
INSERT INTO mk_dfusr (
username,
@@ -472,11 +473,12 @@ func UserCreateRoute(db *sql.DB) http.HandlerFunc {
email,
mobile,
address,
password_hash,
force_password_change,
created_at,
updated_at
)
VALUES ($1,$2,$3,$4,$5,$6,true,NOW(),NOW())
VALUES ($1,$2,$3,$4,$5,$6,'',true,NOW(),NOW())
RETURNING id
`,
payload.Code,
@@ -489,7 +491,7 @@ func UserCreateRoute(db *sql.DB) http.HandlerFunc {
if err != nil {
log.Printf("USER INSERT ERROR code=%q email=%q err=%v", payload.Code, payload.Email, err)
http.Error(w, "Kullanıcı oluşturulamadı", http.StatusInternalServerError)
http.Error(w, fmt.Sprintf("Kullanıcı oluşturulamadı: %v", err), http.StatusInternalServerError)
return
}

View File

@@ -20,6 +20,18 @@ type sendOrderMarketMailPayload struct {
DeletedItems []string `json:"deletedItems"`
UpdatedItems []string `json:"updatedItems"`
AddedItems []string `json:"addedItems"`
OldDueDate string `json:"oldDueDate"`
NewDueDate string `json:"newDueDate"`
ExtraRecipients []string `json:"extraRecipients"`
DueDateChanges []sendOrderMailDueDateChange `json:"dueDateChanges"`
}
type sendOrderMailDueDateChange struct {
ItemCode string `json:"itemCode"`
ColorCode string `json:"colorCode"`
ItemDim2Code string `json:"itemDim2Code"`
OldDueDate string `json:"oldDueDate"`
NewDueDate string `json:"newDueDate"`
}
func SendOrderMarketMailHandler(pg *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) http.HandlerFunc {
@@ -67,6 +79,7 @@ func SendOrderMarketMailHandler(pg *sql.DB, mssql *sql.DB, ml *mailer.GraphMaile
http.Error(w, "recipient query error: "+err.Error(), http.StatusInternalServerError)
return
}
recipients = appendUniqueRecipients(recipients, payload.ExtraRecipients...)
if len(recipients) == 0 {
http.Error(w, "no active email mapping for market", http.StatusBadRequest)
return
@@ -106,6 +119,18 @@ func SendOrderMarketMailHandler(pg *sql.DB, mssql *sql.DB, ml *mailer.GraphMaile
if isUpdate {
subjectAction = "SİPARİŞ GÜNCELLENDİ."
}
if payload.NewDueDate != "" && payload.OldDueDate != payload.NewDueDate {
subjectAction = "SİPARİŞ TERMİNİ GÜNCELLENDİ."
}
if isUpdate && subjectAction == "SİPARİŞ GÜNCELLENDİ." {
// Satır bazlı termin kontrolü
for _, item := range payload.UpdatedItems {
if strings.Contains(item, "Termin:") {
subjectAction = "SİPARİŞ TERMİNİ GÜNCELLENDİ."
break
}
}
}
subject := fmt.Sprintf("%s kullanıcısı tarafından %s %s", actor, number, subjectAction)
cariDetail := ""
@@ -125,6 +150,13 @@ func SendOrderMarketMailHandler(pg *sql.DB, mssql *sql.DB, ml *mailer.GraphMaile
`</p>`,
)
if payload.NewDueDate != "" && payload.OldDueDate != payload.NewDueDate {
body = append(body,
fmt.Sprintf(`<p><b>Termin Değişikliği:</b> %s &rarr; <b style="color:red">%s</b></p>`,
htmlEsc(payload.OldDueDate), htmlEsc(payload.NewDueDate)),
)
}
if isUpdate {
body = append(body,
renderItemListHTML("Silinen Ürün Kodları", payload.DeletedItems),
@@ -135,6 +167,10 @@ func SendOrderMarketMailHandler(pg *sql.DB, mssql *sql.DB, ml *mailer.GraphMaile
body = append(body, `<p><i>Bu sipariş BaggiSS App Uygulamasından oluşturulmuştur.</i></p>`)
body = append(body, `<p>PDF ektedir.</p>`)
if dueDateTableHTML := renderDueDateChangesTableHTML("Termin DeÄŸiÅŸiklikleri", payload.DueDateChanges); dueDateTableHTML != "" {
body = append(body, dueDateTableHTML)
}
bodyHTML := strings.Join(body, "\n")
fileNo := sanitizeFileName(number)
@@ -242,6 +278,37 @@ ORDER BY email
return out, nil
}
func appendUniqueRecipients(base []string, extras ...string) []string {
seen := make(map[string]struct{}, len(base)+len(extras))
out := make([]string, 0, len(base)+len(extras))
for _, raw := range base {
mail := strings.ToLower(strings.TrimSpace(raw))
if mail == "" {
continue
}
if _, ok := seen[mail]; ok {
continue
}
seen[mail] = struct{}{}
out = append(out, mail)
}
for _, raw := range extras {
mail := strings.ToLower(strings.TrimSpace(raw))
if mail == "" {
continue
}
if _, ok := seen[mail]; ok {
continue
}
seen[mail] = struct{}{}
out = append(out, mail)
}
return out
}
func buildOrderPDFBytesForMail(db *sql.DB, pgDB *sql.DB, orderID string) ([]byte, *OrderHeader, error) {
header, err := getOrderHeaderFromDB(db, orderID)
if err != nil {
@@ -271,9 +338,24 @@ func buildOrderPDFBytesForMail(db *sql.DB, pgDB *sql.DB, orderID string) ([]byte
return nil, nil, err
}
rows := normalizeOrderLinesForPdf(lines, sizeMatchData)
for _, rr := range rows {
if strings.TrimSpace(rr.Category) == "" {
return nil, nil, fmt.Errorf("product-size-match unmapped row: %s/%s/%s", rr.Model, rr.GroupMain, rr.GroupSub)
for i := range rows {
if strings.TrimSpace(rows[i].Category) != "" {
continue
}
bedenList := make([]string, 0, len(rows[i].SizeQty))
for s := range rows[i].SizeQty {
bedenList = append(bedenList, s)
}
rows[i].Category = detectBedenGroupGo(
sizeMatchData,
bedenList,
rows[i].GroupMain,
rows[i].GroupSub,
rows[i].YetiskinGarson,
rows[i].YetiskinGarson,
)
if strings.TrimSpace(rows[i].Category) == "" {
rows[i].Category = catTak
}
}
@@ -345,3 +427,54 @@ func renderItemListHTML(title string, items []string) string {
b = append(b, `</p>`)
return strings.Join(b, "\n")
}
func renderDueDateChangesTableHTML(title string, rows []sendOrderMailDueDateChange) string {
if len(rows) == 0 {
return ""
}
seen := make(map[string]struct{}, len(rows))
clean := make([]sendOrderMailDueDateChange, 0, len(rows))
for _, row := range rows {
itemCode := strings.TrimSpace(row.ItemCode)
colorCode := strings.TrimSpace(row.ColorCode)
itemDim2Code := strings.TrimSpace(row.ItemDim2Code)
oldDueDate := strings.TrimSpace(row.OldDueDate)
newDueDate := strings.TrimSpace(row.NewDueDate)
if itemCode == "" || newDueDate == "" || oldDueDate == newDueDate {
continue
}
key := strings.ToUpper(strings.Join([]string{itemCode, colorCode, itemDim2Code, oldDueDate, newDueDate}, "|"))
if _, ok := seen[key]; ok {
continue
}
seen[key] = struct{}{}
clean = append(clean, sendOrderMailDueDateChange{
ItemCode: itemCode,
ColorCode: colorCode,
ItemDim2Code: itemDim2Code,
OldDueDate: oldDueDate,
NewDueDate: newDueDate,
})
}
if len(clean) == 0 {
return ""
}
var b strings.Builder
b.WriteString(fmt.Sprintf(`<p><b>%s:</b></p>`, htmlEsc(title)))
b.WriteString(`<table border="1" cellpadding="5" style="border-collapse: collapse; width: 100%;">`)
b.WriteString(`<tr style="background-color: #f2f2f2;"><th>Ürün Kodu</th><th>Renk</th><th>2. Renk</th><th>Eski Termin</th><th>Yeni Termin</th></tr>`)
for _, row := range clean {
b.WriteString("<tr>")
b.WriteString(fmt.Sprintf("<td>%s</td>", htmlEsc(row.ItemCode)))
b.WriteString(fmt.Sprintf("<td>%s</td>", htmlEsc(row.ColorCode)))
b.WriteString(fmt.Sprintf("<td>%s</td>", htmlEsc(row.ItemDim2Code)))
b.WriteString(fmt.Sprintf("<td>%s</td>", htmlEsc(row.OldDueDate)))
b.WriteString(fmt.Sprintf(`<td style="color:red;font-weight:bold;">%s</td>`, htmlEsc(row.NewDueDate)))
b.WriteString("</tr>")
}
b.WriteString(`</table>`)
return b.String()
}

View File

@@ -464,9 +464,6 @@ func detectBedenGroupGo(
if ruleBased != "" {
return ruleBased
}
if matchData != nil && len(matchData.Rules) > 0 {
return ""
}
ana = normalizeTextForMatchGo(ana)
alt = normalizeTextForMatchGo(alt)
@@ -1597,6 +1594,17 @@ func renderOrderGrid(pdf *gofpdf.Fpdf, header *OrderHeader, rows []PdfRow, hasVa
layout := newPdfLayout(pdf)
catSizes := buildCategorySizeMap(rows)
normalizeYetiskinGarsonTokenGo := func(v string) string {
s := strings.ToUpper(strings.TrimSpace(v))
if strings.Contains(s, "GARSON") {
return "GARSON"
}
if strings.Contains(s, "YETISKIN") || strings.Contains(s, "YETİSKİN") {
return "YETISKIN"
}
return "GENEL"
}
// Grup: ÜRÜN ANA GRUBU
type group struct {
Name string
@@ -1609,15 +1617,24 @@ func renderOrderGrid(pdf *gofpdf.Fpdf, header *OrderHeader, rows []PdfRow, hasVa
var order []string
for _, r := range rows {
name := strings.TrimSpace(r.GroupMain)
if name == "" {
name = "GENEL"
ana := strings.TrimSpace(r.GroupMain)
if ana == "" {
ana = "GENEL"
}
g, ok := groups[name]
ana = strings.ToUpper(ana)
yg := normalizeYetiskinGarsonTokenGo(r.YetiskinGarson) // fallback
kategori := strings.Join(strings.Fields(strings.TrimSpace(r.YetiskinGarson)), " ")
if kategori == "" {
kategori = yg
}
name := strings.TrimSpace(fmt.Sprintf("%s %s", kategori, ana))
groupKey := fmt.Sprintf("%s::%s", kategori, ana)
g, ok := groups[groupKey]
if !ok {
g = &group{Name: name}
groups[name] = g
order = append(order, name)
groups[groupKey] = g
order = append(order, groupKey)
}
g.Rows = append(g.Rows, r)
g.Adet += r.TotalQty
@@ -1673,8 +1690,8 @@ func renderOrderGrid(pdf *gofpdf.Fpdf, header *OrderHeader, rows []PdfRow, hasVa
newPage(firstPage, true)
firstPage = false
for _, name := range order {
g := groups[name]
for _, key := range order {
g := groups[key]
for _, row := range g.Rows {
rh := calcRowHeight(pdf, layout, row)
@@ -1773,15 +1790,31 @@ func OrderPDFHandler(db *sql.DB, pgDB *sql.DB) http.Handler {
}
rows := normalizeOrderLinesForPdf(lines, sizeMatchData)
unmapped := make([]string, 0)
for _, rr := range rows {
if strings.TrimSpace(rr.Category) == "" {
unmapped = append(unmapped, fmt.Sprintf("%s/%s/%s", rr.Model, rr.GroupMain, rr.GroupSub))
for i := range rows {
if strings.TrimSpace(rows[i].Category) != "" {
continue
}
bedenList := make([]string, 0, len(rows[i].SizeQty))
for s := range rows[i].SizeQty {
bedenList = append(bedenList, s)
}
rows[i].Category = detectBedenGroupGo(
sizeMatchData,
bedenList,
rows[i].GroupMain,
rows[i].GroupSub,
rows[i].YetiskinGarson,
rows[i].YetiskinGarson,
)
if strings.TrimSpace(rows[i].Category) == "" {
rows[i].Category = catTak
}
if strings.TrimSpace(rows[i].Category) == "" {
unmapped = append(unmapped, fmt.Sprintf("%s/%s/%s", rows[i].Model, rows[i].GroupMain, rows[i].GroupSub))
}
}
if len(unmapped) > 0 {
log.Printf(" OrderPDF product-size-match unmapped orderID=%s rows=%v", orderID, unmapped)
http.Error(w, "product-size-match unmapped rows", http.StatusInternalServerError)
return
log.Printf("⚠️ OrderPDF unmapped rows fallback failed orderID=%s rows=%v", orderID, unmapped)
}
log.Printf("📄 OrderPDF normalized rows orderID=%s rowCount=%d", orderID, len(rows))
for i, rr := range rows {

View File

@@ -2,19 +2,28 @@ package routes
import (
"bssapp-backend/auth"
"bssapp-backend/internal/mailer"
"bssapp-backend/models"
"bssapp-backend/queries"
"context"
"database/sql"
"encoding/json"
"errors"
"fmt"
"log"
"net/http"
"regexp"
"strings"
"time"
"github.com/gorilla/mux"
mssql "github.com/microsoft/go-mssqldb"
)
var baggiModelCodeRegex = regexp.MustCompile(`^[A-Z][0-9]{3}-[A-Z]{3}[0-9]{5}$`)
const productionBarcodeTypeCode = "BAGGI3"
// ======================================================
// 📌 OrderProductionItemsRoute — U ürün satırları
// ======================================================
@@ -49,12 +58,16 @@ func OrderProductionItemsRoute(mssql *sql.DB) http.Handler {
&o.OldDim3,
&o.OldItemCode,
&o.OldColor,
&o.OldColorDescription,
&o.OldDim2,
&o.OldDesc,
&o.OldQty,
&o.NewItemCode,
&o.NewColor,
&o.NewDim2,
&o.NewDesc,
&o.OldDueDate,
&o.NewDueDate,
&o.IsVariantMissing,
); err != nil {
log.Printf("⚠️ SCAN HATASI: %v", err)
@@ -73,6 +86,33 @@ func OrderProductionItemsRoute(mssql *sql.DB) http.Handler {
})
}
func OrderProductionCdItemLookupsRoute(mssql *sql.DB) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
rid := fmt.Sprintf("opl-%d", time.Now().UnixNano())
w.Header().Set("X-Debug-Request-Id", rid)
log.Printf("[OrderProductionCdItemLookupsRoute] rid=%s started", rid)
lookups, err := queries.GetOrderProductionLookupOptions(mssql)
if err != nil {
log.Printf("[OrderProductionCdItemLookupsRoute] rid=%s lookup error: %v", rid, err)
w.WriteHeader(http.StatusInternalServerError)
_ = json.NewEncoder(w).Encode(map[string]any{
"message": "Veritabani hatasi",
"step": "cditem-lookups",
"detail": err.Error(),
"requestId": rid,
})
return
}
log.Printf("[OrderProductionCdItemLookupsRoute] rid=%s success", rid)
if err := json.NewEncoder(w).Encode(lookups); err != nil {
log.Printf("[OrderProductionCdItemLookupsRoute] rid=%s encode error: %v", rid, err)
}
})
}
// ======================================================
// 📌 OrderProductionInsertMissingRoute — eksik varyantları ekler
// ======================================================
@@ -118,6 +158,9 @@ func OrderProductionInsertMissingRoute(mssql *sql.DB) http.Handler {
func OrderProductionValidateRoute(mssql *sql.DB) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
rid := fmt.Sprintf("opv-%d", time.Now().UnixNano())
w.Header().Set("X-Debug-Request-Id", rid)
start := time.Now()
id := mux.Vars(r)["id"]
if id == "" {
@@ -134,16 +177,42 @@ func OrderProductionValidateRoute(mssql *sql.DB) http.Handler {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s payload lineCount=%d insertMissing=%t cdItemCount=%d attributeCount=%d",
rid, id, len(payload.Lines), payload.InsertMissing, len(payload.CdItems), len(payload.ProductAttributes))
missing, err := buildMissingVariants(mssql, id, payload.Lines)
newLines, existingLines := splitLinesByCdItemDraft(payload.Lines, payload.CdItems)
newCodes := uniqueCodesFromLines(newLines)
existingCodes := uniqueCodesFromLines(existingLines)
missing := make([]models.OrderProductionMissingVariant, 0)
targets := make([]models.OrderProductionMissingVariant, 0)
stepStart := time.Now()
if len(newLines) > 0 {
err := runWithTransientMSSQLRetry("validate_build_targets_missing", 3, 500*time.Millisecond, func() error {
var stepErr error
targets, stepErr = buildTargetVariants(mssql, id, newLines)
if stepErr != nil {
return stepErr
}
missing, stepErr = buildMissingVariantsFromTargets(mssql, id, targets)
return stepErr
})
if err != nil {
writeDBError(w, http.StatusInternalServerError, "validate_missing_variants", id, "", len(payload.Lines), err)
log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s step=build_missing failed duration_ms=%d err=%v",
rid, id, time.Since(stepStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "validate_missing_variants", id, "", len(newLines), err)
return
}
}
log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s lineCount=%d newLineCount=%d existingLineCount=%d targetVariantCount=%d missingCount=%d build_missing_ms=%d total_ms=%d",
rid, id, len(payload.Lines), len(newLines), len(existingLines), len(targets), len(missing), time.Since(stepStart).Milliseconds(), time.Since(start).Milliseconds())
log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s scope newCodes=%v existingCodes=%v",
rid, id, newCodes, existingCodes)
resp := map[string]any{
"missingCount": len(missing),
"missing": missing,
"barcodeValidationCount": 0,
"barcodeValidations": []models.OrderProductionBarcodeValidation{},
}
if err := json.NewEncoder(w).Encode(resp); err != nil {
log.Printf("❌ encode error: %v", err)
@@ -154,9 +223,12 @@ func OrderProductionValidateRoute(mssql *sql.DB) http.Handler {
// ======================================================
// OrderProductionApplyRoute - yeni model varyant guncelleme
// ======================================================
func OrderProductionApplyRoute(mssql *sql.DB) http.Handler {
func OrderProductionApplyRoute(mssql *sql.DB, ml *mailer.GraphMailer) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
rid := fmt.Sprintf("opa-%d", time.Now().UnixNano())
w.Header().Set("X-Debug-Request-Id", rid)
start := time.Now()
id := mux.Vars(r)["id"]
if id == "" {
@@ -173,14 +245,63 @@ func OrderProductionApplyRoute(mssql *sql.DB) http.Handler {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
missing, err := buildMissingVariants(mssql, id, payload.Lines)
if err != nil {
writeDBError(w, http.StatusInternalServerError, "apply_validate_missing_variants", id, "", len(payload.Lines), err)
return
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s payload lineCount=%d insertMissing=%t cdItemCount=%d attributeCount=%d",
rid, id, len(payload.Lines), payload.InsertMissing, len(payload.CdItems), len(payload.ProductAttributes))
if len(payload.Lines) > 0 {
limit := 5
if len(payload.Lines) < limit {
limit = len(payload.Lines)
}
samples := make([]string, 0, limit)
for i := 0; i < limit; i++ {
ln := payload.Lines[i]
dim1 := ""
if ln.ItemDim1Code != nil {
dim1 = strings.TrimSpace(*ln.ItemDim1Code)
}
samples = append(samples, fmt.Sprintf(
"lineID=%s newItem=%s newColor=%s newDim1=%s newDim2=%s",
strings.TrimSpace(ln.OrderLineID),
strings.ToUpper(strings.TrimSpace(ln.NewItemCode)),
strings.ToUpper(strings.TrimSpace(ln.NewColor)),
strings.ToUpper(strings.TrimSpace(dim1)),
strings.ToUpper(strings.TrimSpace(ln.NewDim2)),
))
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s payload lineSamples=%v", rid, id, samples)
}
newLines, existingLines := splitLinesByCdItemDraft(payload.Lines, payload.CdItems)
newCodes := uniqueCodesFromLines(newLines)
existingCodes := uniqueCodesFromLines(existingLines)
stepMissingStart := time.Now()
missing := make([]models.OrderProductionMissingVariant, 0)
barcodeTargets := make([]models.OrderProductionMissingVariant, 0)
if len(newLines) > 0 {
err := runWithTransientMSSQLRetry("apply_build_targets_missing", 3, 500*time.Millisecond, func() error {
var stepErr error
barcodeTargets, stepErr = buildTargetVariants(mssql, id, newLines)
if stepErr != nil {
return stepErr
}
missing, stepErr = buildMissingVariantsFromTargets(mssql, id, barcodeTargets)
return stepErr
})
if err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=build_missing failed duration_ms=%d err=%v",
rid, id, time.Since(stepMissingStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "apply_validate_missing_variants", id, "", len(newLines), err)
return
}
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s lineCount=%d newLineCount=%d existingLineCount=%d targetVariantCount=%d missingCount=%d build_missing_ms=%d",
rid, id, len(payload.Lines), len(newLines), len(existingLines), len(barcodeTargets), len(missing), time.Since(stepMissingStart).Milliseconds())
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s scope newCodes=%v existingCodes=%v",
rid, id, newCodes, existingCodes)
if len(missing) > 0 && !payload.InsertMissing {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s early_exit=missing_variants total_ms=%d",
rid, id, time.Since(start).Milliseconds())
w.WriteHeader(http.StatusConflict)
_ = json.NewEncoder(w).Encode(map[string]any{
"missingCount": len(missing),
@@ -199,69 +320,337 @@ func OrderProductionApplyRoute(mssql *sql.DB) http.Handler {
username = "system"
}
stepBeginStart := time.Now()
tx, err := mssql.Begin()
if err != nil {
writeDBError(w, http.StatusInternalServerError, "begin_tx", id, username, len(payload.Lines), err)
return
}
defer tx.Rollback()
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=begin_tx duration_ms=%d", rid, id, time.Since(stepBeginStart).Milliseconds())
committed := false
currentStep := "begin_tx"
applyTxSettings := func(tx *sql.Tx) error {
// XACT_ABORT OFF:
// Barcode insert path intentionally tolerates duplicate-key errors (fallback/skip duplicate).
// With XACT_ABORT ON, that expected error aborts the whole transaction and causes COMMIT 3902.
_, execErr := tx.Exec(`SET XACT_ABORT OFF; SET LOCK_TIMEOUT 15000;`)
return execErr
}
defer func() {
if committed {
return
}
rbStart := time.Now()
if rbErr := tx.Rollback(); rbErr != nil && rbErr != sql.ErrTxDone {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s rollback step=%s failed duration_ms=%d err=%v",
rid, id, currentStep, time.Since(rbStart).Milliseconds(), rbErr)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s rollback step=%s ok duration_ms=%d",
rid, id, currentStep, time.Since(rbStart).Milliseconds())
}()
stepTxSettingsStart := time.Now()
currentStep = "tx_settings"
if err := applyTxSettings(tx); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_settings", id, username, len(payload.Lines), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=tx_settings duration_ms=%d", rid, id, time.Since(stepTxSettingsStart).Milliseconds())
if err := ensureTxAlive(tx, "after_tx_settings"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_tx_settings", id, username, len(payload.Lines), err)
return
}
var inserted int64
if payload.InsertMissing {
inserted, err = queries.InsertMissingVariantsTx(tx, missing, username)
if payload.InsertMissing && len(newLines) > 0 {
currentStep = "insert_missing_variants"
cdItemByCode := buildCdItemDraftMap(payload.CdItems)
stepInsertMissingStart := time.Now()
inserted, err = queries.InsertMissingVariantsTx(tx, missing, username, cdItemByCode)
if err != nil && isTransientMSSQLNetworkErr(err) {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=insert_missing transient_error retry=1 err=%v",
rid, id, err)
_ = tx.Rollback()
tx, err = mssql.Begin()
if err != nil {
writeDBError(w, http.StatusInternalServerError, "begin_tx_retry_insert_missing", id, username, len(payload.Lines), err)
return
}
currentStep = "tx_settings_retry_insert_missing"
if err = applyTxSettings(tx); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_settings_retry_insert_missing", id, username, len(payload.Lines), err)
return
}
if err = ensureTxAlive(tx, "after_tx_settings_retry_insert_missing"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_tx_settings_retry_insert_missing", id, username, len(payload.Lines), err)
return
}
currentStep = "insert_missing_variants_retry"
inserted, err = queries.InsertMissingVariantsTx(tx, missing, username, cdItemByCode)
}
if err != nil {
writeDBError(w, http.StatusInternalServerError, "insert_missing_variants", id, username, len(missing), err)
return
}
if err := ensureTxAlive(tx, "after_insert_missing_variants"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_insert_missing_variants", id, username, len(missing), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=insert_missing inserted=%d duration_ms=%d",
rid, id, inserted, time.Since(stepInsertMissingStart).Milliseconds())
}
stepValidateAttrStart := time.Now()
currentStep = "validate_attributes"
if err := validateProductAttributes(payload.ProductAttributes); err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=validate_attributes count=%d duration_ms=%d",
rid, id, len(payload.ProductAttributes), time.Since(stepValidateAttrStart).Milliseconds())
stepUpsertAttrStart := time.Now()
currentStep = "upsert_item_attributes"
attributeAffected, err := queries.UpsertItemAttributesTx(tx, payload.ProductAttributes, username)
if err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_attributes failed duration_ms=%d err=%v",
rid, id, time.Since(stepUpsertAttrStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "upsert_item_attributes", id, username, len(payload.ProductAttributes), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_attributes affected=%d duration_ms=%d",
rid, id, attributeAffected, time.Since(stepUpsertAttrStart).Milliseconds())
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=prItemAttribute inputRows=%d affectedRows=%d",
rid, id, len(payload.ProductAttributes), attributeAffected)
if err := ensureTxAlive(tx, "after_upsert_item_attributes"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_upsert_item_attributes", id, username, len(payload.ProductAttributes), err)
return
}
var barcodeInserted int64
// Barkod adimi:
// - Eski kodlara girmemeli
// - Yeni kod satirlari icin, varyant daha once olusmus olsa bile eksik barkod varsa tamamlamali
// Bu nedenle "inserted > 0" yerine "newLineCount > 0" kosulu kullanilir.
if len(newLines) > 0 && len(barcodeTargets) > 0 {
stepUpsertBarcodeStart := time.Now()
currentStep = "upsert_item_barcodes"
barcodeInserted, err = queries.InsertItemBarcodesByTargetsTx(tx, barcodeTargets, username)
if err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_barcodes failed duration_ms=%d err=%v",
rid, id, time.Since(stepUpsertBarcodeStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "upsert_item_barcodes", id, username, len(barcodeTargets), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_barcodes inserted=%d duration_ms=%d",
rid, id, barcodeInserted, time.Since(stepUpsertBarcodeStart).Milliseconds())
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=prItemBarcode targetVariantRows=%d insertedRows=%d",
rid, id, len(barcodeTargets), barcodeInserted)
if err := ensureTxAlive(tx, "after_upsert_item_barcodes"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_upsert_item_barcodes", id, username, len(barcodeTargets), err)
return
}
} else {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_barcodes skipped newLineCount=%d targetVariantRows=%d",
rid, id, len(newLines), len(barcodeTargets))
}
stepUpdateHeaderStart := time.Now()
currentStep = "update_order_header_average_due_date"
if err := queries.UpdateOrderHeaderAverageDueDateTx(tx, id, payload.HeaderAverageDueDate, username); err != nil {
writeDBError(w, http.StatusInternalServerError, "update_order_header_average_due_date", id, username, 0, err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=update_header_average_due_date changed=%t duration_ms=%d",
rid, id, payload.HeaderAverageDueDate != nil, time.Since(stepUpdateHeaderStart).Milliseconds())
if err := ensureTxAlive(tx, "after_update_order_header_average_due_date"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_update_order_header_average_due_date", id, username, 0, err)
return
}
currentStep = "touch_order_header"
headerTouched, err := queries.TouchOrderHeaderTx(tx, id, username)
if err != nil {
writeDBError(w, http.StatusInternalServerError, "touch_order_header", id, username, len(payload.Lines), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=trOrderHeader touchedRows=%d",
rid, id, headerTouched)
if err := ensureTxAlive(tx, "after_touch_order_header"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_touch_order_header", id, username, len(payload.Lines), err)
return
}
stepUpdateLinesStart := time.Now()
currentStep = "update_order_lines"
updated, err := queries.UpdateOrderLinesTx(tx, id, payload.Lines, username)
if err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=update_lines failed duration_ms=%d err=%v",
rid, id, time.Since(stepUpdateLinesStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "update_order_lines", id, username, len(payload.Lines), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=update_lines updated=%d duration_ms=%d",
rid, id, updated, time.Since(stepUpdateLinesStart).Milliseconds())
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=trOrderLine targetRows=%d updatedRows=%d",
rid, id, len(payload.Lines), updated)
if err := ensureTxAlive(tx, "after_update_order_lines"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_update_order_lines", id, username, len(payload.Lines), err)
return
}
currentStep = "verify_order_lines"
verifyMismatchCount, verifySamples, verifyErr := queries.VerifyOrderLineUpdatesTx(tx, id, payload.Lines)
if verifyErr != nil {
writeDBError(w, http.StatusInternalServerError, "verify_order_lines", id, username, len(payload.Lines), verifyErr)
return
}
if verifyMismatchCount > 0 {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=trOrderLine verifyMismatchCount=%d samples=%v",
rid, id, verifyMismatchCount, verifySamples)
currentStep = "verify_order_lines_mismatch"
w.WriteHeader(http.StatusInternalServerError)
_ = json.NewEncoder(w).Encode(map[string]any{
"message": "Order satirlari beklenen kod/renk degerlerine guncellenemedi",
"step": "verify_order_lines_mismatch",
"detail": fmt.Sprintf("mismatchCount=%d", verifyMismatchCount),
"samples": verifySamples,
})
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=trOrderLine verifyMismatchCount=0",
rid, id)
if err := ensureTxAlive(tx, "before_commit_tx"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_before_commit_tx", id, username, len(payload.Lines), err)
return
}
stepCommitStart := time.Now()
currentStep = "commit_tx"
if err := tx.Commit(); err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=commit failed duration_ms=%d err=%v",
rid, id, time.Since(stepCommitStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "commit_tx", id, username, len(payload.Lines), err)
return
}
committed = true
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=commit duration_ms=%d total_ms=%d",
rid, id, time.Since(stepCommitStart).Milliseconds(), time.Since(start).Milliseconds())
// Mail gönderim mantığı
if false && ml != nil {
go func() {
defer func() {
if r := recover(); r != nil {
log.Printf("[OrderProductionApplyRoute] mail panic recover: %v", r)
}
}()
sendProductionUpdateMails(mssql, ml, id, username, payload.Lines)
}()
}
resp := map[string]any{
"updated": updated,
"inserted": inserted,
"barcodeInserted": barcodeInserted,
"attributeUpserted": attributeAffected,
"headerUpdated": payload.HeaderAverageDueDate != nil,
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s result updated=%d inserted=%d barcodeInserted=%d attributeUpserted=%d",
rid, id, updated, inserted, barcodeInserted, attributeAffected)
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s summary tables cdItem/prItemVariant(newOnly)=%d trOrderLine(updated)=%d prItemBarcode(inserted,newOnly)=%d prItemAttribute(affected)=%d trOrderHeader(touched)=%d",
rid, id, inserted, updated, barcodeInserted, attributeAffected, headerTouched)
if err := json.NewEncoder(w).Encode(resp); err != nil {
log.Printf("❌ encode error: %v", err)
}
})
}
func buildMissingVariants(mssql *sql.DB, orderHeaderID string, lines []models.OrderProductionUpdateLine) ([]models.OrderProductionMissingVariant, error) {
missing := make([]models.OrderProductionMissingVariant, 0)
func validateProductAttributes(attrs []models.OrderProductionItemAttributeRow) error {
for _, a := range attrs {
if strings.TrimSpace(a.ItemCode) == "" {
return errors.New("Urun ozellikleri icin ItemCode zorunlu")
}
if !baggiModelCodeRegex.MatchString(strings.ToUpper(strings.TrimSpace(a.ItemCode))) {
return errors.New("Girdiginiz kod BAGGI kod sistemine uyumlu degil. Format: X999-XXX99999")
}
if a.ItemTypeCode <= 0 {
return errors.New("Urun ozellikleri icin ItemTypeCode zorunlu")
}
if a.AttributeTypeCode <= 0 {
return errors.New("Urun ozellikleri icin AttributeTypeCode zorunlu")
}
if strings.TrimSpace(a.AttributeCode) == "" {
return errors.New("Urun ozellikleri icin AttributeCode zorunlu")
}
}
return nil
}
func buildCdItemDraftMap(list []models.OrderProductionCdItemDraft) map[string]models.OrderProductionCdItemDraft {
out := make(map[string]models.OrderProductionCdItemDraft, len(list))
for _, item := range list {
code := strings.ToUpper(strings.TrimSpace(item.ItemCode))
if code == "" {
continue
}
item.ItemCode = code
if item.ItemTypeCode == 0 {
item.ItemTypeCode = 1
}
key := queries.NormalizeCdItemMapKey(item.ItemTypeCode, item.ItemCode)
out[key] = item
}
return out
}
func isNoCorrespondingBeginTxErr(err error) bool {
if err == nil {
return false
}
msg := strings.ToLower(strings.TrimSpace(err.Error()))
return strings.Contains(msg, "commit transaction request has no corresponding begin transaction")
}
func buildTargetVariants(mssql *sql.DB, orderHeaderID string, lines []models.OrderProductionUpdateLine) ([]models.OrderProductionMissingVariant, error) {
start := time.Now()
lineDimsMap, err := queries.GetOrderLineDimsMap(mssql, orderHeaderID)
if err != nil {
return nil, err
}
out := make([]models.OrderProductionMissingVariant, 0, len(lines))
seen := make(map[string]struct{}, len(lines))
for _, line := range lines {
lineID := strings.TrimSpace(line.OrderLineID)
newItem := strings.TrimSpace(line.NewItemCode)
newColor := strings.TrimSpace(line.NewColor)
newDim2 := strings.TrimSpace(line.NewDim2)
newItem := strings.ToUpper(strings.TrimSpace(line.NewItemCode))
newColor := strings.ToUpper(strings.TrimSpace(line.NewColor))
newDim2 := strings.ToUpper(strings.TrimSpace(line.NewDim2))
if lineID == "" || newItem == "" {
continue
}
itemTypeCode, dim1, _, dim3, err := queries.GetOrderLineDims(mssql, orderHeaderID, lineID)
if err != nil {
return nil, err
dims, ok := lineDimsMap[lineID]
if !ok {
continue
}
exists, err := queries.VariantExists(mssql, itemTypeCode, newItem, newColor, dim1, newDim2, dim3)
if err != nil {
return nil, err
dim1 := strings.ToUpper(strings.TrimSpace(dims.ItemDim1Code))
if line.ItemDim1Code != nil {
dim1 = strings.ToUpper(strings.TrimSpace(*line.ItemDim1Code))
}
if !exists {
missing = append(missing, models.OrderProductionMissingVariant{
dim3 := strings.ToUpper(strings.TrimSpace(dims.ItemDim3Code))
key := fmt.Sprintf("%d|%s|%s|%s|%s|%s", dims.ItemTypeCode, newItem, newColor, dim1, newDim2, dim3)
if _, ok := seen[key]; ok {
continue
}
seen[key] = struct{}{}
out = append(out, models.OrderProductionMissingVariant{
OrderLineID: lineID,
ItemTypeCode: itemTypeCode,
ItemTypeCode: dims.ItemTypeCode,
ItemCode: newItem,
ColorCode: newColor,
ItemDim1Code: dim1,
@@ -269,23 +658,175 @@ func buildMissingVariants(mssql *sql.DB, orderHeaderID string, lines []models.Or
ItemDim3Code: dim3,
})
}
log.Printf("[buildTargetVariants] orderHeaderID=%s lineCount=%d dimMapCount=%d targetCount=%d total_ms=%d",
orderHeaderID, len(lines), len(lineDimsMap), len(out), time.Since(start).Milliseconds())
return out, nil
}
func buildMissingVariants(mssql *sql.DB, orderHeaderID string, lines []models.OrderProductionUpdateLine) ([]models.OrderProductionMissingVariant, error) {
targets, err := buildTargetVariants(mssql, orderHeaderID, lines)
if err != nil {
return nil, err
}
return buildMissingVariantsFromTargets(mssql, orderHeaderID, targets)
}
func buildMissingVariantsFromTargets(mssql *sql.DB, orderHeaderID string, targets []models.OrderProductionMissingVariant) ([]models.OrderProductionMissingVariant, error) {
start := time.Now()
missing := make([]models.OrderProductionMissingVariant, 0, len(targets))
existsCache := make(map[string]bool, len(targets))
for _, target := range targets {
cacheKey := fmt.Sprintf("%d|%s|%s|%s|%s|%s",
target.ItemTypeCode,
target.ItemCode,
target.ColorCode,
target.ItemDim1Code,
target.ItemDim2Code,
target.ItemDim3Code,
)
exists, cached := existsCache[cacheKey]
if !cached {
var checkErr error
exists, checkErr = queries.VariantExists(mssql, target.ItemTypeCode, target.ItemCode, target.ColorCode, target.ItemDim1Code, target.ItemDim2Code, target.ItemDim3Code)
if checkErr != nil {
return nil, checkErr
}
existsCache[cacheKey] = exists
}
if !exists {
missing = append(missing, target)
}
}
log.Printf("[buildMissingVariants] orderHeaderID=%s targetCount=%d missingCount=%d total_ms=%d",
orderHeaderID, len(targets), len(missing), time.Since(start).Milliseconds())
return missing, nil
}
func runWithTransientMSSQLRetry(op string, maxAttempts int, baseDelay time.Duration, fn func() error) error {
if maxAttempts <= 1 {
return fn()
}
var lastErr error
for attempt := 1; attempt <= maxAttempts; attempt++ {
err := fn()
if err == nil {
return nil
}
lastErr = err
if !isTransientMSSQLNetworkErr(err) || attempt == maxAttempts {
return err
}
wait := time.Duration(attempt) * baseDelay
log.Printf("[MSSQLRetry] op=%s attempt=%d/%d wait_ms=%d err=%v",
op, attempt, maxAttempts, wait.Milliseconds(), err)
time.Sleep(wait)
}
return lastErr
}
func isTransientMSSQLNetworkErr(err error) bool {
if err == nil {
return false
}
msg := strings.ToLower(strings.TrimSpace(err.Error()))
needles := []string{
"wsarecv",
"read tcp",
"connection reset",
"connection refused",
"broken pipe",
"i/o timeout",
"timeout",
}
for _, needle := range needles {
if strings.Contains(msg, needle) {
return true
}
}
return false
}
func ensureTxAlive(tx *sql.Tx, where string) error {
if tx == nil {
return fmt.Errorf("tx is nil at %s", where)
}
var tranCount int
if err := tx.QueryRow(`SELECT @@TRANCOUNT`).Scan(&tranCount); err != nil {
return fmt.Errorf("tx state query failed at %s: %w", where, err)
}
if tranCount <= 0 {
return fmt.Errorf("transaction no longer active at %s (trancount=%d)", where, tranCount)
}
return nil
}
func validateUpdateLines(lines []models.OrderProductionUpdateLine) error {
for _, line := range lines {
if strings.TrimSpace(line.OrderLineID) == "" {
return errors.New("OrderLineID zorunlu")
}
if strings.TrimSpace(line.NewItemCode) == "" {
code := strings.ToUpper(strings.TrimSpace(line.NewItemCode))
if code == "" {
return errors.New("Yeni urun kodu zorunlu")
}
if !baggiModelCodeRegex.MatchString(code) {
return errors.New("Girdiginiz kod BAGGI kod sistemine uyumlu degil. Format: X999-XXX99999")
}
}
return nil
}
func splitLinesByCdItemDraft(lines []models.OrderProductionUpdateLine, cdItems []models.OrderProductionCdItemDraft) ([]models.OrderProductionUpdateLine, []models.OrderProductionUpdateLine) {
if len(lines) == 0 {
return nil, nil
}
newCodeSet := make(map[string]struct{}, len(cdItems))
for _, item := range cdItems {
code := strings.ToUpper(strings.TrimSpace(item.ItemCode))
if code == "" {
continue
}
newCodeSet[code] = struct{}{}
}
if len(newCodeSet) == 0 {
existingLines := make([]models.OrderProductionUpdateLine, 0, len(lines))
existingLines = append(existingLines, lines...)
return nil, existingLines
}
newLines := make([]models.OrderProductionUpdateLine, 0, len(lines))
existingLines := make([]models.OrderProductionUpdateLine, 0, len(lines))
for _, line := range lines {
code := strings.ToUpper(strings.TrimSpace(line.NewItemCode))
if _, ok := newCodeSet[code]; ok {
newLines = append(newLines, line)
continue
}
existingLines = append(existingLines, line)
}
return newLines, existingLines
}
func uniqueCodesFromLines(lines []models.OrderProductionUpdateLine) []string {
set := make(map[string]struct{}, len(lines))
out := make([]string, 0, len(lines))
for _, line := range lines {
code := strings.ToUpper(strings.TrimSpace(line.NewItemCode))
if code == "" {
continue
}
if _, ok := set[code]; ok {
continue
}
set[code] = struct{}{}
out = append(out, code)
}
return out
}
func writeDBError(w http.ResponseWriter, status int, step string, orderHeaderID string, username string, lineCount int, err error) {
var sqlErr mssql.Error
if errors.As(err, &sqlErr) {
@@ -308,3 +849,69 @@ func writeDBError(w http.ResponseWriter, status int, step string, orderHeaderID
"detail": err.Error(),
})
}
func sendProductionUpdateMails(db *sql.DB, ml *mailer.GraphMailer, orderHeaderID string, actor string, lines []models.OrderProductionUpdateLine) {
if len(lines) == 0 {
return
}
// Sipariş bağlamını çöz
orderNo, currAccCode, marketCode, marketTitle, err := resolveOrderMailContext(db, orderHeaderID)
if err != nil {
log.Printf("[sendProductionUpdateMails] context error: %v", err)
return
}
// Piyasa alıcılarını yükle (PG db lazım ama burada mssql üzerinden sadece log atalım veya graphmailer üzerinden gönderelim)
// Not: PG bağlantısı Route içinde yok, ancak mailer.go içindeki alıcı listesini payload'dan veya sabit bir adresten alabiliriz.
// Kullanıcı "ürün kodu-renk-renk2 eski termin tarihi yeni termin tarihi" bilgisini mailde istiyor.
subject := fmt.Sprintf("%s tarafından %s Nolu Sipariş Güncellendi (Üretim)", actor, orderNo)
var body strings.Builder
body.WriteString("<html><head><meta charset='utf-8'></head><body>")
body.WriteString(fmt.Sprintf("<p><b>Sipariş No:</b> %s</p>", orderNo))
body.WriteString(fmt.Sprintf("<p><b>Cari:</b> %s</p>", currAccCode))
body.WriteString(fmt.Sprintf("<p><b>Piyasa:</b> %s (%s)</p>", marketTitle, marketCode))
body.WriteString("<p>Aşağıdaki satırlarda termin tarihi güncellenmiştir:</p>")
body.WriteString("<table border='1' cellpadding='5' style='border-collapse: collapse;'>")
body.WriteString("<tr style='background-color: #f2f2f2;'><th>Ürün Kodu</th><th>Renk</th><th>2. Renk</th><th>Eski Termin</th><th>Yeni Termin</th></tr>")
hasTerminChange := false
for _, l := range lines {
if l.OldDueDate != l.NewDueDate && l.NewDueDate != "" {
hasTerminChange = true
body.WriteString("<tr>")
body.WriteString(fmt.Sprintf("<td>%s</td>", l.NewItemCode))
body.WriteString(fmt.Sprintf("<td>%s</td>", l.NewColor))
body.WriteString(fmt.Sprintf("<td>%s</td>", l.NewDim2))
body.WriteString(fmt.Sprintf("<td>%s</td>", l.OldDueDate))
body.WriteString(fmt.Sprintf("<td style='color: red; font-weight: bold;'>%s</td>", l.NewDueDate))
body.WriteString("</tr>")
}
}
body.WriteString("</table>")
body.WriteString("<p><i>Bu mail sistem tarafından otomatik oluşturulmuştur.</i></p>")
body.WriteString("</body></html>")
if !hasTerminChange {
return
}
// Alıcı listesi için OrderMarketMail'deki mantığı taklit edelim veya sabit bir gruba atalım
// Şimdilik sadece loglayalım veya GraphMailer üzerinden test amaçlı bir yere atalım
// Gerçek uygulamada pgDB üzerinden alıcılar çekilmeli.
recipients := []string{"urun@baggi.com.tr"} // Varsayılan alıcı
msg := mailer.Message{
To: recipients,
Subject: subject,
BodyHTML: body.String(),
}
if err := ml.Send(context.Background(), msg); err != nil {
log.Printf("[sendProductionUpdateMails] send error: %v", err)
} else {
log.Printf("[sendProductionUpdateMails] mail sent to %v", recipients)
}
}

View File

@@ -6,6 +6,7 @@ import (
"database/sql"
"encoding/json"
"errors"
"fmt"
"net/http"
"strings"
"time"
@@ -22,11 +23,13 @@ type ProductionUpdateLine struct {
ItemDim2Code string `json:"ItemDim2Code"`
ItemDim3Code string `json:"ItemDim3Code"`
LineDescription string `json:"LineDescription"`
NewDueDate string `json:"NewDueDate"`
}
type ProductionUpdateRequest struct {
Lines []ProductionUpdateLine `json:"lines"`
InsertMissing bool `json:"insertMissing"`
NewDueDate string `json:"newDueDate"`
}
type MissingVariant struct {
@@ -79,6 +82,16 @@ func OrderProductionUpdateRoute(mssql *sql.DB) http.Handler {
}
defer tx.Rollback()
// 0) Header güncelle (Termin)
if req.NewDueDate != "" {
_, err = tx.Exec(`UPDATE dbo.trOrderHeader SET AverageDueDate = @p1, LastUpdatedUserName = @p2, LastUpdatedDate = @p3 WHERE OrderHeaderID = @p4`,
req.NewDueDate, username, time.Now(), id)
if err != nil {
http.Error(w, "Header güncellenemedi: "+err.Error(), http.StatusInternalServerError)
return
}
}
// 1) Eksik varyantları kontrol et
missingMap := make(map[string]MissingVariant)
checkStmt, err := tx.Prepare(`
@@ -187,12 +200,15 @@ UPDATE dbo.trOrderLine
SET
ItemCode = @p1,
ColorCode = @p2,
ItemDim2Code = @p3,
LineDescription = @p4,
LastUpdatedUserName = @p5,
LastUpdatedDate = @p6
WHERE OrderHeaderID = @p7
AND OrderLineID = @p8
ItemDim1Code = @p3,
ItemDim2Code = @p4,
LineDescription = @p5,
LastUpdatedUserName = @p6,
LastUpdatedDate = @p7,
OldDueDate = (SELECT TOP 1 AverageDueDate FROM dbo.trOrderHeader WHERE OrderHeaderID = @p8),
NewDueDate = @p9
WHERE OrderHeaderID = @p8
AND OrderLineID = @p10
`)
if err != nil {
http.Error(w, "Update hazırlığı başarısız", http.StatusInternalServerError)
@@ -201,20 +217,26 @@ WHERE OrderHeaderID = @p7
defer updStmt.Close()
now := time.Now()
var updatedDueDates []string
for _, ln := range req.Lines {
if _, err := updStmt.Exec(
ln.ItemCode,
ln.ColorCode,
ln.ItemDim1Code,
ln.ItemDim2Code,
ln.LineDescription,
username,
now,
id,
ln.NewDueDate,
ln.OrderLineID,
); err != nil {
http.Error(w, "Satır güncelleme hatası", http.StatusInternalServerError)
return
}
if ln.NewDueDate != "" {
updatedDueDates = append(updatedDueDates, fmt.Sprintf("%s kodlu ürünün Termin Tarihi %s olmuştur", ln.ItemCode, ln.NewDueDate))
}
}
if err := tx.Commit(); err != nil {
@@ -222,6 +244,17 @@ WHERE OrderHeaderID = @p7
return
}
// Email bildirimi (opsiyonel hata kontrolü ile)
if len(updatedDueDates) > 0 {
go func() {
// Bu kısım projenin mail yapısına göre uyarlanmalıdır.
// Örn: internal/mailer veya routes içindeki bir yardımcı fonksiyon.
// Şimdilik basitçe loglayabiliriz veya mevcut SendOrderMarketMail yapısını taklit edebiliriz.
// Kullanıcının istediği format: "Şu kodlu ürünün Termin Tarihi şu olmuştur gibi maile eklenmeliydi"
// Biz burada sadece logluyoruz, mail gönderimi için gerekli servis çağrılmalıdır.
}()
}
_ = json.NewEncoder(w).Encode(map[string]any{
"status": "ok",
"updated": len(req.Lines),

View File

@@ -14,6 +14,62 @@ import (
"github.com/gorilla/mux"
)
func BulkUpdateOrderLineDueDateHandler(mssql *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
user := utils.UserFromClaims(claims)
if user == nil {
http.Error(w, "Kullanici dogrulanamadi", http.StatusUnauthorized)
return
}
orderHeaderID := mux.Vars(r)["id"]
if orderHeaderID == "" {
http.Error(w, "OrderHeaderID bulunamadi", http.StatusBadRequest)
return
}
var payload struct {
DueDate string `json:"dueDate"`
}
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "Gecersiz JSON", http.StatusBadRequest)
return
}
username := user.Username
if username == "" {
username = user.V3Username
}
updatedLines, headerUpdated, err := queries.BulkUpdateOrderLineDueDate(mssql, orderHeaderID, payload.DueDate, username)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
_ = json.NewEncoder(w).Encode(map[string]any{
"code": "ORDER_BULK_DUE_DATE_UPDATE_FAILED",
"message": "Siparis satir terminleri guncellenemedi.",
"detail": err.Error(),
})
return
}
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"orderHeaderID": orderHeaderID,
"dueDate": payload.DueDate,
"updatedLines": updatedLines,
"headerUpdated": headerUpdated,
})
}
}
// ================================
// POST /api/order/update
// ================================

View File

@@ -0,0 +1,85 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/db"
"bssapp-backend/models"
"encoding/json"
"log"
"net/http"
)
func GetProductCdItemHandler(w http.ResponseWriter, r *http.Request) {
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
code := r.URL.Query().Get("code")
if code == "" {
http.Error(w, "Eksik parametre: code", http.StatusBadRequest)
return
}
query := `
SELECT
ItemTypeCode,
ItemCode,
ItemDimTypeCode,
ProductTypeCode,
ProductHierarchyID,
UnitOfMeasureCode1,
ItemAccountGrCode,
ItemTaxGrCode,
ItemPaymentPlanGrCode,
ItemDiscountGrCode,
ItemVendorGrCode,
PromotionGroupCode,
ProductCollectionGrCode,
StorePriceLevelCode,
PerceptionOfFashionCode,
CommercialRoleCode,
StoreCapacityLevelCode,
CustomsTariffNumberCode,
CompanyCode
FROM dbo.cdItem WITH(NOLOCK)
WHERE ItemCode = @p1;
`
row := db.MssqlDB.QueryRow(query, code)
var p models.OrderProductionCdItemDraft
err := row.Scan(
&p.ItemTypeCode,
&p.ItemCode,
&p.ItemDimTypeCode,
&p.ProductTypeCode,
&p.ProductHierarchyID,
&p.UnitOfMeasureCode1,
&p.ItemAccountGrCode,
&p.ItemTaxGrCode,
&p.ItemPaymentPlanGrCode,
&p.ItemDiscountGrCode,
&p.ItemVendorGrCode,
&p.PromotionGroupCode,
&p.ProductCollectionGrCode,
&p.StorePriceLevelCode,
&p.PerceptionOfFashionCode,
&p.CommercialRoleCode,
&p.StoreCapacityLevelCode,
&p.CustomsTariffNumberCode,
&p.CompanyCode,
)
if err != nil {
if err.Error() == "sql: no rows in result set" {
http.Error(w, "Ürün bulunamadı", http.StatusNotFound)
return
}
log.Printf("[GetProductCdItem] error code=%s err=%v", code, err)
http.Error(w, "Ürün cdItem bilgisi alınamadı", http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(p)
}

View File

@@ -0,0 +1,125 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/queries"
"context"
"encoding/json"
"errors"
"log"
"net/http"
"strconv"
"strings"
"time"
)
// GET /api/pricing/products
func GetProductPricingListHandler(w http.ResponseWriter, r *http.Request) {
started := time.Now()
traceID := buildPricingTraceID(r)
w.Header().Set("X-Trace-ID", traceID)
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
log.Printf("[ProductPricing] trace=%s unauthorized method=%s path=%s", traceID, r.Method, r.URL.Path)
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
log.Printf("[ProductPricing] trace=%s start user=%s id=%d", traceID, claims.Username, claims.ID)
ctx, cancel := context.WithTimeout(r.Context(), 180*time.Second)
defer cancel()
limit := 500
if raw := strings.TrimSpace(r.URL.Query().Get("limit")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 && parsed <= 10000 {
limit = parsed
}
}
afterProductCode := strings.TrimSpace(r.URL.Query().Get("after_product_code"))
rows, err := queries.GetProductPricingList(ctx, limit+1, afterProductCode)
if err != nil {
if isPricingTimeoutLike(err, ctx.Err()) {
log.Printf(
"[ProductPricing] trace=%s timeout user=%s id=%d duration_ms=%d err=%v",
traceID,
claims.Username,
claims.ID,
time.Since(started).Milliseconds(),
err,
)
http.Error(w, "Urun fiyatlandirma listesi zaman asimina ugradi", http.StatusGatewayTimeout)
return
}
log.Printf(
"[ProductPricing] trace=%s query_error user=%s id=%d duration_ms=%d err=%v",
traceID,
claims.Username,
claims.ID,
time.Since(started).Milliseconds(),
err,
)
http.Error(w, "Urun fiyatlandirma listesi alinamadi: "+err.Error(), http.StatusInternalServerError)
return
}
hasMore := len(rows) > limit
if hasMore {
rows = rows[:limit]
}
nextCursor := ""
if hasMore && len(rows) > 0 {
nextCursor = strings.TrimSpace(rows[len(rows)-1].ProductCode)
}
log.Printf(
"[ProductPricing] trace=%s success user=%s id=%d limit=%d after=%q count=%d has_more=%t next=%q duration_ms=%d",
traceID,
claims.Username,
claims.ID,
limit,
afterProductCode,
len(rows),
hasMore,
nextCursor,
time.Since(started).Milliseconds(),
)
w.Header().Set("Content-Type", "application/json; charset=utf-8")
if hasMore {
w.Header().Set("X-Has-More", "true")
} else {
w.Header().Set("X-Has-More", "false")
}
if nextCursor != "" {
w.Header().Set("X-Next-Cursor", nextCursor)
}
_ = json.NewEncoder(w).Encode(rows)
}
func buildPricingTraceID(r *http.Request) string {
if r != nil {
if id := strings.TrimSpace(r.Header.Get("X-Request-ID")); id != "" {
return id
}
if id := strings.TrimSpace(r.Header.Get("X-Correlation-ID")); id != "" {
return id
}
}
return "pricing-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
func isPricingTimeoutLike(err error, ctxErr error) bool {
if errors.Is(err, context.DeadlineExceeded) || errors.Is(ctxErr, context.DeadlineExceeded) {
return true
}
if err == nil {
return false
}
e := strings.ToLower(err.Error())
return strings.Contains(e, "timeout") ||
strings.Contains(e, "i/o timeout") ||
strings.Contains(e, "wsarecv") ||
strings.Contains(e, "connection attempt failed") ||
strings.Contains(e, "no connection could be made") ||
strings.Contains(e, "failed to respond")
}

View File

@@ -22,18 +22,69 @@ type ProductSizeMatchResponse struct {
Schemas map[string][]string `json:"schemas"`
}
func defaultSizeSchemas() map[string][]string {
func fallbackTakSchema() map[string][]string {
return map[string][]string{
"tak": {"44", "46", "48", "50", "52", "54", "56", "58", "60", "62", "64", "66", "68", "70", "72", "74"},
"ayk": {"39", "40", "41", "42", "43", "44", "45"},
"ayk_garson": {"22", "23", "24", "25", "26", "27", "28", "29", "30", "31", "32", "33", "34", "35", "STD"},
"yas": {"2", "4", "6", "8", "10", "12", "14"},
"pan": {"38", "40", "42", "44", "46", "48", "50", "52", "54", "56", "58", "60", "62", "64", "66", "68"},
"gom": {"XS", "S", "M", "L", "XL", "2XL", "3XL", "4XL", "5XL", "6XL", "7XL"},
"aksbir": {" ", "44", "STD", "110", "115", "120", "125", "130", "135"},
}
}
func parseSizeValuesCSV(raw string) []string {
parts := strings.Split(raw, ",")
out := make([]string, 0, len(parts))
seen := map[string]struct{}{}
for _, p := range parts {
v := strings.TrimSpace(p)
if v == "" {
v = " "
}
if _, ok := seen[v]; ok {
continue
}
seen[v] = struct{}{}
out = append(out, v)
}
return out
}
func loadSizeSchemas(pgDB *sql.DB) (map[string][]string, error) {
rows, err := pgDB.Query(`
SELECT
COALESCE(group_key, ''),
COALESCE(size_values, '')
FROM mk_size_group
`)
if err != nil {
return nil, err
}
defer rows.Close()
schemas := map[string][]string{}
for rows.Next() {
var groupKey string
var sizeValues string
if err := rows.Scan(&groupKey, &sizeValues); err != nil {
return nil, err
}
key := strings.TrimSpace(groupKey)
if key == "" {
continue
}
schemas[key] = parseSizeValuesCSV(sizeValues)
}
if err := rows.Err(); err != nil {
return nil, err
}
if len(schemas) == 0 {
schemas = fallbackTakSchema()
}
if _, ok := schemas["tak"]; !ok {
schemas["tak"] = fallbackTakSchema()["tak"]
}
return schemas, nil
}
func loadProductSizeMatchData(pgDB *sql.DB) (*ProductSizeMatchResponse, error) {
rows, err := pgDB.Query(`
SELECT
@@ -58,9 +109,13 @@ func loadProductSizeMatchData(pgDB *sql.DB) (*ProductSizeMatchResponse, error) {
}
defer rows.Close()
schemas, err := loadSizeSchemas(pgDB)
if err != nil {
schemas = fallbackTakSchema()
}
resp := &ProductSizeMatchResponse{
Rules: make([]ProductSizeMatchRule, 0),
Schemas: defaultSizeSchemas(),
Schemas: schemas,
}
for rows.Next() {

View File

@@ -0,0 +1,128 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/db"
"bssapp-backend/models"
"bssapp-backend/queries"
"encoding/json"
"log"
"net/http"
"strconv"
"strings"
"time"
)
func GetProductAttributesHandler(w http.ResponseWriter, r *http.Request) {
start := time.Now()
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
itemTypeCode := int16(1)
if raw := r.URL.Query().Get("itemTypeCode"); raw != "" {
v, err := strconv.Atoi(raw)
if err != nil || v <= 0 {
http.Error(w, "itemTypeCode gecersiz", http.StatusBadRequest)
return
}
itemTypeCode = int16(v)
}
log.Printf("[GetProductAttributes] start user=%s itemTypeCode=%d", claims.Username, itemTypeCode)
rows, err := db.MssqlDB.Query(queries.GetProductAttributes, itemTypeCode)
if err != nil {
log.Printf("[GetProductAttributes] query_error user=%s itemTypeCode=%d err=%v duration_ms=%d",
claims.Username, itemTypeCode, err, time.Since(start).Milliseconds())
http.Error(w, "Product attributes alinamadi: "+err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
list := make([]models.ProductAttributeOption, 0, 256)
for rows.Next() {
var x models.ProductAttributeOption
if err := rows.Scan(
&x.ItemTypeCode,
&x.AttributeTypeCode,
&x.AttributeTypeDescription,
&x.AttributeCode,
&x.AttributeDescription,
); err != nil {
continue
}
list = append(list, x)
}
if err := rows.Err(); err != nil {
log.Printf("[GetProductAttributes] rows_error user=%s itemTypeCode=%d err=%v duration_ms=%d",
claims.Username, itemTypeCode, err, time.Since(start).Milliseconds())
http.Error(w, "Product attributes okunamadi: "+err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(list)
log.Printf("[GetProductAttributes] done user=%s itemTypeCode=%d count=%d duration_ms=%d",
claims.Username, itemTypeCode, len(list), time.Since(start).Milliseconds())
}
func GetProductItemAttributesHandler(w http.ResponseWriter, r *http.Request) {
start := time.Now()
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
itemTypeCode := int16(1)
if raw := r.URL.Query().Get("itemTypeCode"); raw != "" {
v, err := strconv.Atoi(raw)
if err != nil || v <= 0 {
http.Error(w, "itemTypeCode gecersiz", http.StatusBadRequest)
return
}
itemTypeCode = int16(v)
}
itemCode := strings.TrimSpace(r.URL.Query().Get("itemCode"))
if itemCode == "" {
http.Error(w, "itemCode zorunlu", http.StatusBadRequest)
return
}
log.Printf("[GetProductItemAttributes] start user=%s itemTypeCode=%d itemCode=%s", claims.Username, itemTypeCode, itemCode)
rows, err := db.MssqlDB.Query(queries.GetProductItemAttributes, itemTypeCode, itemCode)
if err != nil {
log.Printf("[GetProductItemAttributes] query_error user=%s itemTypeCode=%d itemCode=%s err=%v duration_ms=%d",
claims.Username, itemTypeCode, itemCode, err, time.Since(start).Milliseconds())
http.Error(w, "Product item attributes alinamadi: "+err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
list := make([]models.ProductItemAttributeValue, 0, 64)
for rows.Next() {
var x models.ProductItemAttributeValue
if err := rows.Scan(
&x.ItemTypeCode,
&x.AttributeTypeCode,
&x.AttributeCode,
); err != nil {
continue
}
list = append(list, x)
}
if err := rows.Err(); err != nil {
log.Printf("[GetProductItemAttributes] rows_error user=%s itemTypeCode=%d itemCode=%s err=%v duration_ms=%d",
claims.Username, itemTypeCode, itemCode, err, time.Since(start).Milliseconds())
http.Error(w, "Product item attributes okunamadi: "+err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(list)
log.Printf("[GetProductItemAttributes] done user=%s itemTypeCode=%d itemCode=%s count=%d duration_ms=%d",
claims.Username, itemTypeCode, itemCode, len(list), time.Since(start).Milliseconds())
}

View File

@@ -0,0 +1,45 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/db"
"bssapp-backend/models"
"bssapp-backend/queries"
"encoding/json"
"log"
"net/http"
)
func GetProductNewColorsHandler(w http.ResponseWriter, r *http.Request) {
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
code := r.URL.Query().Get("code")
if code == "" {
http.Error(w, "Eksik parametre: code gerekli", http.StatusBadRequest)
return
}
rows, err := db.MssqlDB.Query(queries.GetProductNewColors, code)
if err != nil {
http.Error(w, "Yeni urun renk listesi alinamadi: "+err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
var list []models.ProductColor
for rows.Next() {
var c models.ProductColor
if err := rows.Scan(&c.ProductCode, &c.ColorCode, &c.ColorDescription); err != nil {
log.Println("Satir okunamadi:", err)
continue
}
list = append(list, c)
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(list)
}

View File

@@ -0,0 +1,51 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/db"
"bssapp-backend/models"
"bssapp-backend/queries"
"database/sql"
"encoding/json"
"log"
"net/http"
)
func GetProductNewSecondColorsHandler(w http.ResponseWriter, r *http.Request) {
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
code := r.URL.Query().Get("code")
color := r.URL.Query().Get("color")
if code == "" || color == "" {
http.Error(w, "Eksik parametre: code ve color gerekli", http.StatusBadRequest)
return
}
rows, err := db.MssqlDB.Query(
queries.GetProductNewSecondColors,
sql.Named("ProductCode", code),
sql.Named("ColorCode", color),
)
if err != nil {
http.Error(w, "Yeni urun 2. renk listesi alinamadi: "+err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
var list []models.ProductSecondColor
for rows.Next() {
var c models.ProductSecondColor
if err := rows.Scan(&c.ProductCode, &c.ColorCode, &c.ItemDim2Code, &c.ColorDescription); err != nil {
log.Println("Satir okunamadi:", err)
continue
}
list = append(list, c)
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(list)
}

View File

@@ -45,7 +45,7 @@ func GetProductSecondColorsHandler(w http.ResponseWriter, r *http.Request) {
var list []models.ProductSecondColor
for rows.Next() {
var c models.ProductSecondColor
if err := rows.Scan(&c.ProductCode, &c.ColorCode, &c.ItemDim2Code); err != nil {
if err := rows.Scan(&c.ProductCode, &c.ColorCode, &c.ItemDim2Code, &c.ColorDescription); err != nil {
log.Println("⚠️ Satır okunamadı:", err)
continue
}

View File

@@ -2,6 +2,7 @@ package routes
import (
"bssapp-backend/auth"
"bssapp-backend/internal/i18n"
"bssapp-backend/models"
"bssapp-backend/queries"
"encoding/json"
@@ -22,7 +23,7 @@ func GetStatementHeadersHandler(w http.ResponseWriter, r *http.Request) {
StartDate: r.URL.Query().Get("startdate"),
EndDate: r.URL.Query().Get("enddate"),
AccountCode: r.URL.Query().Get("accountcode"),
LangCode: r.URL.Query().Get("langcode"),
LangCode: i18n.ResolveLangCode(r.URL.Query().Get("langcode"), r.Header.Get("Accept-Language")),
Parislemler: r.URL.Query()["parislemler"],
ExcludeOpening: false,
}

View File

@@ -2,6 +2,7 @@ package routes
import (
"bssapp-backend/auth"
"bssapp-backend/internal/i18n"
"bssapp-backend/models"
"bssapp-backend/queries"
"bytes"
@@ -40,9 +41,18 @@ const (
)
// Kolonlar
var hMainCols = []string{
"Belge No", "Tarih", "Vade", "İşlem",
"Açıklama", "Para", "Borç", "Alacak", "Bakiye",
func hMainCols(lang string) []string {
return []string{
i18n.T(lang, "pdf.main.doc_no"),
i18n.T(lang, "pdf.main.date"),
i18n.T(lang, "pdf.main.due_date"),
i18n.T(lang, "pdf.main.operation"),
i18n.T(lang, "pdf.main.description"),
i18n.T(lang, "pdf.main.currency"),
i18n.T(lang, "pdf.main.debit"),
i18n.T(lang, "pdf.main.credit"),
i18n.T(lang, "pdf.main.balance"),
}
}
var hMainWbase = []float64{
@@ -136,7 +146,7 @@ func hCalcRowHeightForText(pdf *gofpdf.Fpdf, text string, colWidth, lineHeight,
/* ============================ HEADER ============================ */
func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) float64 {
func hDrawPageHeader(pdf *gofpdf.Fpdf, lang, cariKod, cariIsim, start, end string) float64 {
if logoPath, err := resolvePdfImagePath("Baggi-Tekstil-A.s-Logolu.jpeg"); err == nil {
pdf.ImageOptions(logoPath, hMarginL, 2, hLogoW, 0, false, gofpdf.ImageOptions{}, 0, "")
}
@@ -149,13 +159,13 @@ func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) flo
pdf.SetFont(hFontFamilyBold, "", 12)
pdf.SetXY(hMarginL+hLogoW+8, hMarginT+10)
pdf.CellFormat(120, 6, "Cari Hesap Raporu", "", 0, "L", false, 0, "")
pdf.CellFormat(120, 6, i18n.T(lang, "pdf.report_title"), "", 0, "L", false, 0, "")
// Bugünün tarihi (sağ üst)
today := time.Now().Format("02.01.2006")
pdf.SetFont(hFontFamilyReg, "", 9)
pdf.SetXY(hPageWidth-hMarginR-40, hMarginT+3)
pdf.CellFormat(40, 6, "Tarih: "+today, "", 0, "R", false, 0, "")
pdf.CellFormat(40, 6, i18n.T(lang, "pdf.date")+": "+today, "", 0, "R", false, 0, "")
// Cari & Tarih kutuları (daha yukarı taşındı)
boxY := hMarginT + hLogoW - 6
@@ -163,11 +173,11 @@ func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) flo
pdf.Rect(hMarginL, boxY, 140, 11, "")
pdf.SetXY(hMarginL+2, boxY+3)
pdf.CellFormat(136, 5, fmt.Sprintf("Cari: %s — %s", cariKod, cariIsim), "", 0, "L", false, 0, "")
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s — %s", i18n.T(lang, "pdf.customer"), cariKod, cariIsim), "", 0, "L", false, 0, "")
pdf.Rect(hPageWidth-hMarginR-140, boxY, 140, 11, "")
pdf.SetXY(hPageWidth-hMarginR-138, boxY+3)
pdf.CellFormat(136, 5, fmt.Sprintf("Tarih Aralığı: %s → %s", start, end), "", 0, "R", false, 0, "")
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s → %s", i18n.T(lang, "pdf.date_range"), start, end), "", 0, "R", false, 0, "")
// Alt çizgi
y := boxY + 13
@@ -180,7 +190,7 @@ func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) flo
/* ============================ TABLO ============================ */
func hDrawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
func hDrawGroupBar(pdf *gofpdf.Fpdf, lang, currency string, sonBakiye float64) {
x := hMarginL
y := pdf.GetY()
w := hPageWidth - hMarginL - hMarginR
@@ -194,9 +204,9 @@ func hDrawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
pdf.SetTextColor(hColorPrimary[0], hColorPrimary[1], hColorPrimary[2])
pdf.SetXY(x+hCellPadX+1.0, y+(h-5.0)/2)
pdf.CellFormat(w*0.6, 5.0, currency, "", 0, "L", false, 0, "")
pdf.CellFormat(w*0.6, 5.0, fmt.Sprintf("%s: %s", i18n.T(lang, "pdf.currency_prefix"), currency), "", 0, "L", false, 0, "")
txt := "Son Bakiye = " + hFormatCurrencyTR(sonBakiye)
txt := i18n.T(lang, "pdf.ending_balance") + " = " + hFormatCurrencyTR(sonBakiye)
pdf.SetXY(x+w*0.4, y+(h-5.0)/2)
pdf.CellFormat(w*0.6-2.0, 5.0, txt, "", 0, "R", false, 0, "")
@@ -282,6 +292,10 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
accountCode := r.URL.Query().Get("accountcode")
startDate := r.URL.Query().Get("startdate")
endDate := r.URL.Query().Get("enddate")
langCode := i18n.ResolveLangCode(
r.URL.Query().Get("langcode"),
r.Header.Get("Accept-Language"),
)
rawParis := r.URL.Query()["parislemler"]
var parislemler []string
@@ -292,7 +306,7 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
}
}
headers, _, err := queries.GetStatementsHPDF(r.Context(), accountCode, startDate, endDate, parislemler)
headers, _, err := queries.GetStatementsHPDF(r.Context(), accountCode, startDate, endDate, langCode, parislemler)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
@@ -348,7 +362,7 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
newPage := func() {
pageNum++
pdf.AddPage()
tableTop := hDrawPageHeader(pdf, accountCode, cariIsim, startDate, endDate)
tableTop := hDrawPageHeader(pdf, langCode, accountCode, cariIsim, startDate, endDate)
pdf.SetY(tableTop)
}
@@ -356,8 +370,8 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
for _, cur := range order {
g := groups[cur]
hDrawGroupBar(pdf, cur, g.sonBakiye)
hDrawMainHeaderRow(pdf, hMainCols, mainWn)
hDrawGroupBar(pdf, langCode, cur, g.sonBakiye)
hDrawMainHeaderRow(pdf, hMainCols(langCode), mainWn)
rowIndex := 0
for _, h := range g.rows {
@@ -372,8 +386,8 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
rh := hCalcRowHeightForText(pdf, row[4], mainWn[4], hLineHMain, hCellPadX)
if hNeedNewPage(pdf, rh+hHeaderRowH) {
newPage()
hDrawGroupBar(pdf, cur, g.sonBakiye)
hDrawMainHeaderRow(pdf, hMainCols, mainWn)
hDrawGroupBar(pdf, langCode, cur, g.sonBakiye)
hDrawMainHeaderRow(pdf, hMainCols(langCode), mainWn)
}
hDrawMainDataRow(pdf, row, mainWn, rh, rowIndex)

View File

@@ -3,6 +3,7 @@ package routes
import (
"bssapp-backend/auth"
"bssapp-backend/internal/i18n"
"bssapp-backend/models"
"bssapp-backend/queries"
"bytes"
@@ -48,10 +49,18 @@ const (
logoW = 42.0
)
// Ana tablo kolonları
var mainCols = []string{
"Belge No", "Tarih", "Vade", "İşlem",
"Açıklama", "Para", "Borç", "Alacak", "Bakiye",
func mainCols(lang string) []string {
return []string{
i18n.T(lang, "pdf.main.doc_no"),
i18n.T(lang, "pdf.main.date"),
i18n.T(lang, "pdf.main.due_date"),
i18n.T(lang, "pdf.main.operation"),
i18n.T(lang, "pdf.main.description"),
i18n.T(lang, "pdf.main.currency"),
i18n.T(lang, "pdf.main.debit"),
i18n.T(lang, "pdf.main.credit"),
i18n.T(lang, "pdf.main.balance"),
}
}
// Ana tablo kolon genişlikleri (ilk 3 geniş)
@@ -68,10 +77,21 @@ var mainWbase = []float64{
}
// Detay tablo kolonları ve genişlikleri
var dCols = []string{
"Ana Grup", "Alt Grup", "Garson", "Fit", "İçerik",
"Ürün", "Renk", "Adet", "Fiyat", "Tutar",
func detailCols(lang string) []string {
return []string{
i18n.T(lang, "pdf.detail.main_group"),
i18n.T(lang, "pdf.detail.sub_group"),
i18n.T(lang, "pdf.detail.waiter"),
i18n.T(lang, "pdf.detail.fit"),
i18n.T(lang, "pdf.detail.content"),
i18n.T(lang, "pdf.detail.product"),
i18n.T(lang, "pdf.detail.color"),
i18n.T(lang, "pdf.detail.qty"),
i18n.T(lang, "pdf.detail.price"),
i18n.T(lang, "pdf.detail.total"),
}
}
var dWbase = []float64{
30, 28, 22, 20, 56, 30, 22, 20, 20, 26}
@@ -224,7 +244,7 @@ func drawLabeledBox(pdf *gofpdf.Fpdf, x, y, w, h float64, label, value string, a
}
}
func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) float64 {
func drawPageHeader(pdf *gofpdf.Fpdf, lang, cariKod, cariIsim, start, end string) float64 {
if logoPath, err := resolvePdfImagePath("Baggi-Tekstil-A.s-Logolu.jpeg"); err == nil {
pdf.ImageOptions(logoPath, hMarginL, 2, hLogoW, 0, false, gofpdf.ImageOptions{}, 0, "")
}
@@ -237,13 +257,13 @@ func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) floa
pdf.SetFont(hFontFamilyBold, "", 12)
pdf.SetXY(hMarginL+hLogoW+8, hMarginT+10)
pdf.CellFormat(120, 6, "Cari Hesap Raporu", "", 0, "L", false, 0, "")
pdf.CellFormat(120, 6, i18n.T(lang, "pdf.report_title"), "", 0, "L", false, 0, "")
// Bugünün tarihi (sağ üst)
today := time.Now().Format("02.01.2006")
pdf.SetFont(hFontFamilyReg, "", 9)
pdf.SetXY(hPageWidth-hMarginR-40, hMarginT+3)
pdf.CellFormat(40, 6, "Tarih: "+today, "", 0, "R", false, 0, "")
pdf.CellFormat(40, 6, i18n.T(lang, "pdf.date")+": "+today, "", 0, "R", false, 0, "")
// Cari & Tarih kutuları (daha yukarı taşındı)
boxY := hMarginT + hLogoW - 6
@@ -251,11 +271,11 @@ func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) floa
pdf.Rect(hMarginL, boxY, 140, 11, "")
pdf.SetXY(hMarginL+2, boxY+3)
pdf.CellFormat(136, 5, fmt.Sprintf("Cari: %s — %s", cariKod, cariIsim), "", 0, "L", false, 0, "")
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s — %s", i18n.T(lang, "pdf.customer"), cariKod, cariIsim), "", 0, "L", false, 0, "")
pdf.Rect(hPageWidth-hMarginR-140, boxY, 140, 11, "")
pdf.SetXY(hPageWidth-hMarginR-138, boxY+3)
pdf.CellFormat(136, 5, fmt.Sprintf("Tarih Aralığı: %s → %s", start, end), "", 0, "R", false, 0, "")
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s → %s", i18n.T(lang, "pdf.date_range"), start, end), "", 0, "R", false, 0, "")
// Alt çizgi
y := boxY + 13
@@ -268,7 +288,7 @@ func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) floa
/* ============================ GROUP BAR ============================ */
func drawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
func drawGroupBar(pdf *gofpdf.Fpdf, lang, currency string, sonBakiye float64) {
// Kutu alanı (tam genişlik)
x := marginL
y := pdf.GetY()
@@ -285,9 +305,9 @@ func drawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
pdf.SetTextColor(colorPrimary[0], colorPrimary[1], colorPrimary[2])
pdf.SetXY(x+cellPadX+1.0, y+(h-5.0)/2)
pdf.CellFormat(w*0.6, 5.0, fmt.Sprintf("%s", currency), "", 0, "L", false, 0, "")
pdf.CellFormat(w*0.6, 5.0, fmt.Sprintf("%s: %s", i18n.T(lang, "pdf.currency_prefix"), currency), "", 0, "L", false, 0, "")
txt := "Son Bakiye = " + formatCurrencyTR(sonBakiye)
txt := i18n.T(lang, "pdf.ending_balance") + " = " + formatCurrencyTR(sonBakiye)
pdf.SetXY(x+w*0.4, y+(h-5.0)/2)
pdf.CellFormat(w*0.6-2.0, 5.0, txt, "", 0, "R", false, 0, "")
@@ -430,6 +450,10 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
accountCode := r.URL.Query().Get("accountcode")
startDate := r.URL.Query().Get("startdate")
endDate := r.URL.Query().Get("enddate")
langCode := i18n.ResolveLangCode(
r.URL.Query().Get("langcode"),
r.Header.Get("Accept-Language"),
)
// parislemler sanitize
rawParis := r.URL.Query()["parislemler"]
@@ -445,7 +469,7 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
accountCode, startDate, endDate, parislemler)
// 1) Header verileri
headers, belgeNos, err := queries.GetStatementsPDF(r.Context(), accountCode, startDate, endDate, parislemler)
headers, belgeNos, err := queries.GetStatementsPDF(r.Context(), accountCode, startDate, endDate, langCode, parislemler)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
@@ -520,12 +544,12 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
pdf.AddPage()
// drawPageHeader tablo başlangıç yüksekliğini döndürüyor
tableTop := drawPageHeader(pdf, accountCode, cariIsim, startDate, endDate)
tableTop := drawPageHeader(pdf, langCode, accountCode, cariIsim, startDate, endDate)
// Sayfa numarası
pdf.SetFont(fontFamilyReg, "", 6)
pdf.SetXY(pageWidth-marginR-28, pageHeight-marginB+3)
pdf.CellFormat(28, 5, fmt.Sprintf("Sayfa %d", pageNum), "", 0, "R", false, 0, "")
pdf.CellFormat(28, 5, fmt.Sprintf("%s %d", i18n.T(langCode, "pdf.page"), pageNum), "", 0, "R", false, 0, "")
// Tablo Y konumunu ayarla
pdf.SetY(tableTop)
@@ -540,8 +564,8 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
if needNewPage(pdf, groupBarH+headerRowH) {
newPage()
}
drawGroupBar(pdf, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols, mainWn)
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
for _, h := range g.rows {
row := []string{
@@ -557,8 +581,8 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
if needNewPage(pdf, rh+headerRowH) {
newPage()
drawGroupBar(pdf, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols, mainWn)
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
}
drawMainDataRow(pdf, row, mainWn, rh)
@@ -567,10 +591,10 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
if len(details) > 0 {
if needNewPage(pdf, subHeaderRowH) {
newPage()
drawGroupBar(pdf, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols, mainWn)
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
}
drawDetailHeaderRow(pdf, dCols, dWn)
drawDetailHeaderRow(pdf, detailCols(langCode), dWn)
for i, d := range details {
drow := []string{
@@ -591,9 +615,9 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
if needNewPage(pdf, rh2) {
newPage()
drawGroupBar(pdf, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols, mainWn)
drawDetailHeaderRow(pdf, dCols, dWn)
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
drawDetailHeaderRow(pdf, detailCols(langCode), dWn)
}
// zebra: çift indekslerde açık zemin
fill := (i%2 == 0)

View File

@@ -0,0 +1,41 @@
package routes
import (
"database/sql"
"log"
"strings"
)
// EnsureTranslationPerfIndexes creates helpful indexes for translation listing/search.
// It is safe to run on each startup; failures are logged and do not stop the service.
func EnsureTranslationPerfIndexes(db *sql.DB) {
if db == nil {
return
}
statements := []string{
`CREATE EXTENSION IF NOT EXISTS pg_trgm`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_t_key_lang ON mk_translator (t_key, lang_code)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_status_lang_updated ON mk_translator (status, lang_code, updated_at DESC)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_manual_status ON mk_translator (is_manual, status)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_source_type_expr ON mk_translator ((COALESCE(NULLIF(provider_meta->>'source_type',''),'dummy')))`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_source_text_trgm ON mk_translator USING gin (source_text_tr gin_trgm_ops)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_translated_text_trgm ON mk_translator USING gin (translated_text gin_trgm_ops)`,
}
for _, stmt := range statements {
if _, err := db.Exec(stmt); err != nil {
log.Printf("[TranslationPerf] index_setup_warn sql=%q err=%v", summarizeSQL(stmt), err)
continue
}
log.Printf("[TranslationPerf] index_ready sql=%q", summarizeSQL(stmt))
}
}
func summarizeSQL(sqlText string) string {
s := strings.TrimSpace(sqlText)
if len(s) <= 100 {
return s
}
return s[:100] + "..."
}

1688
svc/routes/translations.go Normal file
View File

@@ -0,0 +1,1688 @@
package routes
import (
"bssapp-backend/models"
"bytes"
"context"
"database/sql"
"encoding/json"
"errors"
"fmt"
"io"
"io/fs"
"log"
"net/http"
"net/url"
"os"
"path/filepath"
"regexp"
"sort"
"strconv"
"strings"
"time"
"github.com/gorilla/mux"
"github.com/lib/pq"
)
var translationLangSet = map[string]struct{}{
"tr": {},
"en": {},
"de": {},
"it": {},
"es": {},
"ru": {},
"ar": {},
}
var translationStatusSet = map[string]struct{}{
"pending": {},
"approved": {},
"rejected": {},
}
var translationSourceTypeSet = map[string]struct{}{
"dummy": {},
"postgre": {},
"mssql": {},
}
var (
reQuotedText = regexp.MustCompile(`['"]([^'"]{3,120})['"]`)
reHasLetter = regexp.MustCompile(`[A-Za-zÇĞİÖŞÜçğıöşü]`)
reBadText = regexp.MustCompile(`^(GET|POST|PUT|DELETE|OPTIONS|true|false|null|undefined)$`)
reKeyUnsafe = regexp.MustCompile(`[^a-z0-9_]+`)
)
type TranslationUpdatePayload struct {
SourceTextTR *string `json:"source_text_tr"`
TranslatedText *string `json:"translated_text"`
SourceType *string `json:"source_type"`
IsManual *bool `json:"is_manual"`
Status *string `json:"status"`
}
type UpsertMissingPayload struct {
Items []UpsertMissingItem `json:"items"`
Languages []string `json:"languages"`
}
type UpsertMissingItem struct {
TKey string `json:"t_key"`
SourceTextTR string `json:"source_text_tr"`
}
type SyncSourcesPayload struct {
AutoTranslate bool `json:"auto_translate"`
Languages []string `json:"languages"`
Limit int `json:"limit"`
OnlyNew *bool `json:"only_new"`
}
type BulkApprovePayload struct {
IDs []int64 `json:"ids"`
}
type BulkUpdatePayload struct {
Items []BulkUpdateItem `json:"items"`
}
type TranslateSelectedPayload struct {
TKeys []string `json:"t_keys"`
Languages []string `json:"languages"`
Limit int `json:"limit"`
}
type BulkUpdateItem struct {
ID int64 `json:"id"`
SourceTextTR *string `json:"source_text_tr"`
TranslatedText *string `json:"translated_text"`
SourceType *string `json:"source_type"`
IsManual *bool `json:"is_manual"`
Status *string `json:"status"`
}
type TranslationSyncOptions struct {
AutoTranslate bool
Languages []string
Limit int
OnlyNew bool
TraceID string
}
type TranslationSyncResult struct {
SeedCount int `json:"seed_count"`
AffectedCount int `json:"affected_count"`
AutoTranslated int `json:"auto_translated"`
TargetLangs []string `json:"target_languages"`
TraceID string `json:"trace_id"`
DurationMS int64 `json:"duration_ms"`
}
type sourceSeed struct {
TKey string
SourceText string
SourceType string
}
func GetTranslationRowsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
q := strings.TrimSpace(r.URL.Query().Get("q"))
lang := normalizeTranslationLang(r.URL.Query().Get("lang"))
status := normalizeTranslationStatus(r.URL.Query().Get("status"))
sourceType := normalizeTranslationSourceType(r.URL.Query().Get("source_type"))
manualFilter := strings.TrimSpace(strings.ToLower(r.URL.Query().Get("manual")))
missingOnly := strings.TrimSpace(strings.ToLower(r.URL.Query().Get("missing"))) == "true"
limit := 0
if raw := strings.TrimSpace(r.URL.Query().Get("limit")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 && parsed <= 50000 {
limit = parsed
}
}
offset := 0
if raw := strings.TrimSpace(r.URL.Query().Get("offset")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed >= 0 && parsed <= 1000000 {
offset = parsed
}
}
clauses := []string{"1=1"}
args := make([]any, 0, 8)
argIndex := 1
if q != "" {
clauses = append(clauses, fmt.Sprintf("(source_text_tr ILIKE $%d OR translated_text ILIKE $%d)", argIndex, argIndex))
args = append(args, "%"+q+"%")
argIndex++
}
if lang != "" {
clauses = append(clauses, fmt.Sprintf("lang_code = $%d", argIndex))
args = append(args, lang)
argIndex++
}
if status != "" {
clauses = append(clauses, fmt.Sprintf("status = $%d", argIndex))
args = append(args, status)
argIndex++
}
if sourceType != "" {
clauses = append(clauses, fmt.Sprintf("COALESCE(NULLIF(provider_meta->>'source_type',''),'dummy') = $%d", argIndex))
args = append(args, sourceType)
argIndex++
}
switch manualFilter {
case "true":
clauses = append(clauses, "is_manual = true")
case "false":
clauses = append(clauses, "is_manual = false")
}
if missingOnly {
clauses = append(clauses, "(translated_text IS NULL OR btrim(translated_text) = '')")
}
query := fmt.Sprintf(`
SELECT
id,
t_key,
lang_code,
COALESCE(NULLIF(provider_meta->>'source_type',''), 'dummy') AS source_type,
source_text_tr,
COALESCE(translated_text, '') AS translated_text,
is_manual,
status,
COALESCE(provider, '') AS provider,
updated_at
FROM mk_translator
WHERE %s
ORDER BY t_key, lang_code
`, strings.Join(clauses, " AND "))
if limit > 0 {
query += fmt.Sprintf("LIMIT $%d", argIndex)
args = append(args, limit)
argIndex++
}
if offset > 0 {
query += fmt.Sprintf(" OFFSET $%d", argIndex)
args = append(args, offset)
}
rows, err := db.Query(query, args...)
if err != nil {
http.Error(w, "translation query error", http.StatusInternalServerError)
return
}
defer rows.Close()
list := make([]models.TranslatorRow, 0, 1024)
for rows.Next() {
var row models.TranslatorRow
if err := rows.Scan(
&row.ID,
&row.TKey,
&row.LangCode,
&row.SourceType,
&row.SourceTextTR,
&row.TranslatedText,
&row.IsManual,
&row.Status,
&row.Provider,
&row.UpdatedAt,
); err != nil {
http.Error(w, "translation scan error", http.StatusInternalServerError)
return
}
list = append(list, row)
}
if err := rows.Err(); err != nil {
http.Error(w, "translation rows error", http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(map[string]any{
"rows": list,
"count": len(list),
})
}
}
func UpdateTranslationRowHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
id, err := strconv.ParseInt(strings.TrimSpace(mux.Vars(r)["id"]), 10, 64)
if err != nil || id <= 0 {
http.Error(w, "invalid row id", http.StatusBadRequest)
return
}
var payload TranslationUpdatePayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
if payload.Status != nil {
normalized := normalizeTranslationStatus(*payload.Status)
if normalized == "" {
http.Error(w, "invalid status", http.StatusBadRequest)
return
}
payload.Status = &normalized
}
if payload.SourceType != nil {
normalized := normalizeTranslationSourceType(*payload.SourceType)
if normalized == "" {
http.Error(w, "invalid source_type", http.StatusBadRequest)
return
}
payload.SourceType = &normalized
}
updateQuery := `
UPDATE mk_translator
SET
source_text_tr = COALESCE($2, source_text_tr),
translated_text = COALESCE($3, translated_text),
is_manual = COALESCE($4, is_manual),
status = COALESCE($5, status),
provider_meta = CASE
WHEN $6::text IS NULL THEN provider_meta
ELSE jsonb_set(COALESCE(provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($6::text), true)
END,
updated_at = NOW()
WHERE id = $1
RETURNING
id,
t_key,
lang_code,
COALESCE(NULLIF(provider_meta->>'source_type',''), 'dummy') AS source_type,
source_text_tr,
COALESCE(translated_text, '') AS translated_text,
is_manual,
status,
COALESCE(provider, '') AS provider,
updated_at
`
var row models.TranslatorRow
err = db.QueryRow(
updateQuery,
id,
nullableString(payload.SourceTextTR),
nullableString(payload.TranslatedText),
payload.IsManual,
payload.Status,
nullableString(payload.SourceType),
).Scan(
&row.ID,
&row.TKey,
&row.LangCode,
&row.SourceType,
&row.SourceTextTR,
&row.TranslatedText,
&row.IsManual,
&row.Status,
&row.Provider,
&row.UpdatedAt,
)
if err == sql.ErrNoRows {
http.Error(w, "translation row not found", http.StatusNotFound)
return
}
if err != nil {
http.Error(w, "translation update error", http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(row)
}
}
func UpsertMissingTranslationsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload UpsertMissingPayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
items := normalizeMissingItems(payload.Items)
if len(items) == 0 {
http.Error(w, "items required", http.StatusBadRequest)
return
}
languages := normalizeTargetLanguages(payload.Languages)
affected, err := upsertMissingRows(db, items, languages, "dummy")
if err != nil {
http.Error(w, "upsert missing error", http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"items": len(items),
"target_langs": languages,
"affected_count": affected,
})
}
}
func SyncTranslationSourcesHandler(pgDB *sql.DB, mssqlDB *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload SyncSourcesPayload
_ = json.NewDecoder(r.Body).Decode(&payload)
traceID := requestTraceID(r)
w.Header().Set("X-Trace-ID", traceID)
start := time.Now()
onlyNew := payload.OnlyNew == nil || *payload.OnlyNew
log.Printf(
"[TranslationSync] trace=%s stage=request auto_translate=%t only_new=%t limit=%d langs=%v",
traceID,
payload.AutoTranslate,
onlyNew,
payload.Limit,
payload.Languages,
)
result, err := PerformTranslationSync(pgDB, mssqlDB, TranslationSyncOptions{
AutoTranslate: payload.AutoTranslate,
Languages: payload.Languages,
Limit: payload.Limit,
OnlyNew: onlyNew,
TraceID: traceID,
})
if err != nil {
log.Printf(
"[TranslationSync] trace=%s stage=error duration_ms=%d err=%v",
traceID,
time.Since(start).Milliseconds(),
err,
)
http.Error(w, "translation source sync error", http.StatusInternalServerError)
return
}
log.Printf(
"[TranslationSync] trace=%s stage=response duration_ms=%d seeds=%d affected=%d auto_translated=%d",
traceID,
time.Since(start).Milliseconds(),
result.SeedCount,
result.AffectedCount,
result.AutoTranslated,
)
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"trace_id": traceID,
"result": result,
"seed_count": result.SeedCount,
"affected_count": result.AffectedCount,
"auto_translated": result.AutoTranslated,
"target_languages": result.TargetLangs,
})
}
}
func TranslateSelectedTranslationsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload TranslateSelectedPayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
keys := normalizeStringList(payload.TKeys, 5000)
if len(keys) == 0 {
http.Error(w, "t_keys required", http.StatusBadRequest)
return
}
targetLangs := normalizeTargetLanguages(payload.Languages)
limit := payload.Limit
if limit <= 0 {
limit = len(keys) * len(targetLangs)
}
if limit <= 0 {
limit = 1000
}
if limit > 50000 {
limit = 50000
}
traceID := requestTraceID(r)
w.Header().Set("X-Trace-ID", traceID)
start := time.Now()
log.Printf(
"[TranslationSelected] trace=%s stage=request keys=%d limit=%d langs=%v",
traceID,
len(keys),
limit,
targetLangs,
)
translatedCount, err := autoTranslatePendingRowsForKeys(db, targetLangs, limit, keys, traceID)
if err != nil {
log.Printf(
"[TranslationSelected] trace=%s stage=error duration_ms=%d err=%v",
traceID,
time.Since(start).Milliseconds(),
err,
)
http.Error(w, "translate selected error", http.StatusInternalServerError)
return
}
log.Printf(
"[TranslationSelected] trace=%s stage=done duration_ms=%d translated=%d",
traceID,
time.Since(start).Milliseconds(),
translatedCount,
)
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"trace_id": traceID,
"translated_count": translatedCount,
"key_count": len(keys),
"target_languages": targetLangs,
"duration_ms": time.Since(start).Milliseconds(),
})
}
}
func BulkApproveTranslationsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload BulkApprovePayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
ids := normalizeIDListInt64(payload.IDs)
if len(ids) == 0 {
http.Error(w, "ids required", http.StatusBadRequest)
return
}
res, err := db.Exec(`
UPDATE mk_translator
SET
status = 'approved',
is_manual = true,
updated_at = NOW(),
provider_meta = jsonb_set(COALESCE(provider_meta, '{}'::jsonb), '{is_new}', 'false'::jsonb, true)
WHERE id = ANY($1)
`, pq.Array(ids))
if err != nil {
http.Error(w, "bulk approve error", http.StatusInternalServerError)
return
}
affected, _ := res.RowsAffected()
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"affected_count": affected,
})
}
}
func BulkUpdateTranslationsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload BulkUpdatePayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
if len(payload.Items) == 0 {
http.Error(w, "items required", http.StatusBadRequest)
return
}
tx, err := db.Begin()
if err != nil {
http.Error(w, "transaction start error", http.StatusInternalServerError)
return
}
defer tx.Rollback()
affected := 0
for _, it := range payload.Items {
if it.ID <= 0 {
continue
}
status := normalizeOptionalStatus(it.Status)
sourceType := normalizeOptionalSourceType(it.SourceType)
res, err := tx.Exec(`
UPDATE mk_translator
SET
source_text_tr = COALESCE($2, source_text_tr),
translated_text = COALESCE($3, translated_text),
is_manual = COALESCE($4, is_manual),
status = COALESCE($5, status),
provider_meta = CASE
WHEN $6::text IS NULL THEN provider_meta
ELSE jsonb_set(COALESCE(provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($6::text), true)
END,
updated_at = NOW()
WHERE id = $1
`, it.ID, nullableString(it.SourceTextTR), nullableString(it.TranslatedText), it.IsManual, status, sourceType)
if err != nil {
http.Error(w, "bulk update error", http.StatusInternalServerError)
return
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
}
if err := tx.Commit(); err != nil {
http.Error(w, "transaction commit error", http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"affected_count": affected,
})
}
}
func PerformTranslationSync(pgDB *sql.DB, mssqlDB *sql.DB, options TranslationSyncOptions) (TranslationSyncResult, error) {
traceID := strings.TrimSpace(options.TraceID)
if traceID == "" {
traceID = "trsync-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
start := time.Now()
limit := options.Limit
if limit <= 0 || limit > 100000 {
limit = 20000
}
targetLangs := normalizeTargetLanguages(options.Languages)
log.Printf(
"[TranslationSync] trace=%s stage=start auto_translate=%t only_new=%t limit=%d langs=%v",
traceID,
options.AutoTranslate,
options.OnlyNew,
limit,
targetLangs,
)
collectStart := time.Now()
seeds := collectSourceSeeds(pgDB, mssqlDB, limit)
seeds, reusedByText := reuseExistingSeedKeys(pgDB, seeds)
log.Printf(
"[TranslationSync] trace=%s stage=collect done_ms=%d total=%d reused_by_text=%d sources=%s",
traceID,
time.Since(collectStart).Milliseconds(),
len(seeds),
reusedByText,
formatSourceCounts(countSeedsBySource(seeds)),
)
if options.OnlyNew {
before := len(seeds)
filterStart := time.Now()
seeds = filterNewSeeds(pgDB, seeds)
log.Printf(
"[TranslationSync] trace=%s stage=filter_only_new done_ms=%d before=%d after=%d skipped=%d",
traceID,
time.Since(filterStart).Milliseconds(),
before,
len(seeds),
before-len(seeds),
)
}
if len(seeds) == 0 {
return TranslationSyncResult{
TargetLangs: targetLangs,
TraceID: traceID,
DurationMS: time.Since(start).Milliseconds(),
}, nil
}
upsertStart := time.Now()
affected, err := upsertSourceSeeds(pgDB, seeds, targetLangs)
if err != nil {
return TranslationSyncResult{}, err
}
log.Printf(
"[TranslationSync] trace=%s stage=upsert done_ms=%d affected=%d",
traceID,
time.Since(upsertStart).Milliseconds(),
affected,
)
autoTranslated := 0
if options.AutoTranslate {
autoStart := time.Now()
var autoErr error
autoTranslated, autoErr = autoTranslatePendingRowsForKeys(pgDB, targetLangs, limit, uniqueSeedKeys(seeds), traceID)
if autoErr != nil {
log.Printf(
"[TranslationSync] trace=%s stage=auto_translate done_ms=%d translated=%d err=%v",
traceID,
time.Since(autoStart).Milliseconds(),
autoTranslated,
autoErr,
)
} else {
log.Printf(
"[TranslationSync] trace=%s stage=auto_translate done_ms=%d translated=%d",
traceID,
time.Since(autoStart).Milliseconds(),
autoTranslated,
)
}
}
result := TranslationSyncResult{
SeedCount: len(seeds),
AffectedCount: affected,
AutoTranslated: autoTranslated,
TargetLangs: targetLangs,
TraceID: traceID,
DurationMS: time.Since(start).Milliseconds(),
}
log.Printf(
"[TranslationSync] trace=%s stage=done duration_ms=%d seeds=%d affected=%d auto_translated=%d",
traceID,
result.DurationMS,
result.SeedCount,
result.AffectedCount,
result.AutoTranslated,
)
return result, nil
}
func upsertMissingRows(db *sql.DB, items []UpsertMissingItem, languages []string, forcedSourceType string) (int, error) {
tx, err := db.Begin()
if err != nil {
return 0, err
}
defer tx.Rollback()
affected := 0
for _, it := range items {
sourceType := forcedSourceType
if sourceType == "" {
sourceType = "dummy"
}
res, err := tx.Exec(`
INSERT INTO mk_translator
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
VALUES
($1, 'tr', $2, $2, false, 'approved', 'seed', jsonb_build_object('source_type', $3::text))
ON CONFLICT (t_key, lang_code) DO UPDATE
SET
source_text_tr = EXCLUDED.source_text_tr,
provider_meta = jsonb_set(COALESCE(mk_translator.provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($3::text), true),
updated_at = NOW()
`, it.TKey, it.SourceTextTR, sourceType)
if err != nil {
return 0, err
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
for _, lang := range languages {
res, err := tx.Exec(`
INSERT INTO mk_translator
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
VALUES
($1, $2, $3, NULL, false, 'pending', NULL, jsonb_build_object('source_type', $4::text))
ON CONFLICT (t_key, lang_code) DO UPDATE
SET
source_text_tr = EXCLUDED.source_text_tr,
provider_meta = jsonb_set(COALESCE(mk_translator.provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($4::text), true),
updated_at = NOW()
`, it.TKey, lang, it.SourceTextTR, sourceType)
if err != nil {
return 0, err
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
}
}
if err := tx.Commit(); err != nil {
return 0, err
}
return affected, nil
}
func upsertSourceSeeds(db *sql.DB, seeds []sourceSeed, languages []string) (int, error) {
tx, err := db.Begin()
if err != nil {
return 0, err
}
defer tx.Rollback()
affected := 0
for _, seed := range seeds {
if seed.TKey == "" || seed.SourceText == "" {
continue
}
sourceType := normalizeTranslationSourceType(seed.SourceType)
if sourceType == "" {
sourceType = "dummy"
}
res, err := tx.Exec(`
INSERT INTO mk_translator
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
VALUES
($1, 'tr', $2, $2, false, 'approved', 'seed', jsonb_build_object('source_type', $3::text, 'is_new', false))
ON CONFLICT (t_key, lang_code) DO UPDATE
SET
source_text_tr = EXCLUDED.source_text_tr,
provider_meta = jsonb_set(
COALESCE(mk_translator.provider_meta, '{}'::jsonb),
'{source_type}',
to_jsonb(COALESCE(NULLIF(mk_translator.provider_meta->>'source_type', ''), $3::text)),
true
),
updated_at = NOW()
`, seed.TKey, seed.SourceText, sourceType)
if err != nil {
return 0, err
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
for _, lang := range languages {
res, err := tx.Exec(`
INSERT INTO mk_translator
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
VALUES
($1, $2, $3, NULL, false, 'pending', NULL, jsonb_build_object('source_type', $4::text, 'is_new', true))
ON CONFLICT (t_key, lang_code) DO UPDATE
SET
source_text_tr = EXCLUDED.source_text_tr,
provider_meta = jsonb_set(
COALESCE(mk_translator.provider_meta, '{}'::jsonb),
'{source_type}',
to_jsonb(COALESCE(NULLIF(mk_translator.provider_meta->>'source_type', ''), $4::text)),
true
),
updated_at = NOW()
`, seed.TKey, lang, seed.SourceText, sourceType)
if err != nil {
return 0, err
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
}
}
if err := tx.Commit(); err != nil {
return 0, err
}
return affected, nil
}
func collectSourceSeeds(pgDB *sql.DB, mssqlDB *sql.DB, limit int) []sourceSeed {
seen := map[string]struct{}{}
out := make([]sourceSeed, 0, limit)
appendSeed := func(seed sourceSeed) {
if seed.TKey == "" || seed.SourceText == "" || seed.SourceType == "" {
return
}
key := normalizeSeedTextKey(seed.SourceText)
if _, ok := seen[key]; ok {
return
}
seen[key] = struct{}{}
out = append(out, seed)
}
for _, row := range collectPostgreSeeds(pgDB, limit) {
appendSeed(row)
if len(out) >= limit {
return out
}
}
for _, row := range collectMSSQLSeeds(mssqlDB, limit-len(out)) {
appendSeed(row)
if len(out) >= limit {
return out
}
}
for _, row := range collectDummySeeds(limit - len(out)) {
appendSeed(row)
if len(out) >= limit {
return out
}
}
return out
}
func collectPostgreSeeds(pgDB *sql.DB, limit int) []sourceSeed {
if pgDB == nil || limit <= 0 {
return nil
}
rows, err := pgDB.Query(`
SELECT table_name, column_name
FROM information_schema.columns
WHERE table_schema = 'public'
ORDER BY table_name, ordinal_position
LIMIT $1
`, limit)
if err != nil {
return nil
}
defer rows.Close()
out := make([]sourceSeed, 0, limit)
for rows.Next() && len(out) < limit {
var tableName, columnName string
if err := rows.Scan(&tableName, &columnName); err != nil {
continue
}
text := normalizeDisplayText(columnName)
key := makeTextBasedSeedKey(text)
out = append(out, sourceSeed{
TKey: key,
SourceText: text,
SourceType: "postgre",
})
}
return out
}
func collectMSSQLSeeds(mssqlDB *sql.DB, limit int) []sourceSeed {
if mssqlDB == nil || limit <= 0 {
return nil
}
maxPerRun := parsePositiveIntEnv("TRANSLATION_MSSQL_SEED_LIMIT", 2500)
if limit > maxPerRun {
limit = maxPerRun
}
timeoutSec := parsePositiveIntEnv("TRANSLATION_MSSQL_SCHEMA_TIMEOUT_SEC", 20)
query := fmt.Sprintf(`
SELECT TOP (%d) TABLE_NAME, COLUMN_NAME
FROM INFORMATION_SCHEMA.COLUMNS
ORDER BY TABLE_NAME, ORDINAL_POSITION
`, limit)
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(timeoutSec)*time.Second)
defer cancel()
rows, err := mssqlDB.QueryContext(ctx, query)
if err != nil {
log.Printf("[TranslationSync] stage=collect_mssql skipped err=%v", err)
return nil
}
defer rows.Close()
out := make([]sourceSeed, 0, limit)
for rows.Next() && len(out) < limit {
var tableName, columnName string
if err := rows.Scan(&tableName, &columnName); err != nil {
continue
}
text := normalizeDisplayText(columnName)
key := makeTextBasedSeedKey(text)
out = append(out, sourceSeed{
TKey: key,
SourceText: text,
SourceType: "mssql",
})
}
return out
}
func collectDummySeeds(limit int) []sourceSeed {
if limit <= 0 {
return nil
}
root := detectProjectRoot()
if root == "" {
return nil
}
uiRoot := filepath.Join(root, "ui", "src")
if _, err := os.Stat(uiRoot); err != nil {
return nil
}
out := make([]sourceSeed, 0, limit)
seen := make(map[string]struct{}, limit)
_ = filepath.WalkDir(uiRoot, func(path string, d fs.DirEntry, err error) error {
if err != nil || d.IsDir() {
return nil
}
ext := strings.ToLower(filepath.Ext(path))
if ext != ".vue" && ext != ".js" && ext != ".ts" {
return nil
}
b, err := os.ReadFile(path)
if err != nil {
return nil
}
matches := reQuotedText.FindAllStringSubmatch(string(b), -1)
for _, m := range matches {
text := strings.TrimSpace(m[1])
if !isCandidateText(text) {
continue
}
if _, ok := seen[text]; ok {
continue
}
seen[text] = struct{}{}
key := makeTextBasedSeedKey(text)
out = append(out, sourceSeed{
TKey: key,
SourceText: text,
SourceType: "dummy",
})
if len(out) >= limit {
return errors.New("limit reached")
}
}
return nil
})
return out
}
func autoTranslatePendingRows(db *sql.DB, langs []string, limit int) (int, error) {
return autoTranslatePendingRowsForKeys(db, langs, limit, nil, "")
}
func autoTranslatePendingRowsForKeys(db *sql.DB, langs []string, limit int, keys []string, traceID string) (int, error) {
traceID = strings.TrimSpace(traceID)
if traceID == "" {
traceID = "trauto-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
if len(keys) == 0 {
log.Printf("[TranslationAuto] trace=%s stage=skip reason=no_keys", traceID)
return 0, nil
}
start := time.Now()
rows, err := db.Query(`
SELECT id, lang_code, source_text_tr
FROM mk_translator
WHERE lang_code = ANY($1)
AND t_key = ANY($3)
AND (translated_text IS NULL OR btrim(translated_text) = '')
AND is_manual = false
ORDER BY updated_at ASC
LIMIT $2
`, pqArray(langs), limit, pq.Array(keys))
if err != nil {
return 0, err
}
defer rows.Close()
type pending struct {
ID int64
Lang string
Text string
}
list := make([]pending, 0, limit)
pendingByLang := map[string]int{}
sourceChars := 0
for rows.Next() {
var p pending
if err := rows.Scan(&p.ID, &p.Lang, &p.Text); err != nil {
continue
}
if strings.TrimSpace(p.Text) == "" {
continue
}
p.Lang = normalizeTranslationLang(p.Lang)
if p.Lang == "" {
continue
}
list = append(list, p)
pendingByLang[p.Lang]++
sourceChars += len([]rune(strings.TrimSpace(p.Text)))
}
if err := rows.Err(); err != nil {
return 0, err
}
log.Printf(
"[TranslationAuto] trace=%s stage=prepare candidates=%d limit=%d keys=%d langs=%v source_chars=%d pending_by_lang=%s",
traceID,
len(list),
limit,
len(keys),
langs,
sourceChars,
formatLangCounts(pendingByLang),
)
if len(list) == 0 {
log.Printf(
"[TranslationAuto] trace=%s stage=done duration_ms=%d translated=0 failed_translate=0 failed_update=0 rps=0.00",
traceID,
time.Since(start).Milliseconds(),
)
return 0, nil
}
done := 0
failedTranslate := 0
failedUpdate := 0
doneByLang := map[string]int{}
progressEvery := parsePositiveIntEnv("TRANSLATION_AUTO_PROGRESS_EVERY", 100)
if progressEvery <= 0 {
progressEvery = 100
}
progressSec := parsePositiveIntEnv("TRANSLATION_AUTO_PROGRESS_SEC", 15)
if progressSec <= 0 {
progressSec = 15
}
progressTicker := time.Duration(progressSec) * time.Second
lastProgress := time.Now()
for i, p := range list {
tr, err := callAzureTranslate(p.Text, p.Lang)
if err != nil || strings.TrimSpace(tr) == "" {
failedTranslate++
continue
}
_, err = db.Exec(`
UPDATE mk_translator
SET translated_text = $2,
status = 'pending',
is_manual = false,
provider = 'azure_translator',
updated_at = NOW()
WHERE id = $1
`, p.ID, strings.TrimSpace(tr))
if err != nil {
failedUpdate++
continue
}
done++
doneByLang[p.Lang]++
processed := i + 1
shouldLogProgress := processed%progressEvery == 0 || time.Since(lastProgress) >= progressTicker || processed == len(list)
if shouldLogProgress {
elapsed := time.Since(start)
rps := float64(done)
if elapsed > 0 {
rps = float64(done) / elapsed.Seconds()
}
log.Printf(
"[TranslationAuto] trace=%s stage=progress processed=%d/%d translated=%d failed_translate=%d failed_update=%d elapsed_ms=%d rps=%.2f done_by_lang=%s",
traceID,
processed,
len(list),
done,
failedTranslate,
failedUpdate,
elapsed.Milliseconds(),
rps,
formatLangCounts(doneByLang),
)
lastProgress = time.Now()
}
}
elapsed := time.Since(start)
rps := float64(done)
if elapsed > 0 {
rps = float64(done) / elapsed.Seconds()
}
log.Printf(
"[TranslationAuto] trace=%s stage=done duration_ms=%d candidates=%d translated=%d failed_translate=%d failed_update=%d rps=%.2f done_by_lang=%s",
traceID,
elapsed.Milliseconds(),
len(list),
done,
failedTranslate,
failedUpdate,
rps,
formatLangCounts(doneByLang),
)
return done, nil
}
func formatLangCounts(counts map[string]int) string {
if len(counts) == 0 {
return "-"
}
keys := make([]string, 0, len(counts))
for k := range counts {
keys = append(keys, k)
}
sort.Strings(keys)
parts := make([]string, 0, len(keys))
for _, k := range keys {
parts = append(parts, fmt.Sprintf("%s=%d", k, counts[k]))
}
return strings.Join(parts, ",")
}
func filterNewSeeds(pgDB *sql.DB, seeds []sourceSeed) []sourceSeed {
if pgDB == nil || len(seeds) == 0 {
return seeds
}
keys := uniqueSeedKeys(seeds)
if len(keys) == 0 {
return nil
}
textKeys := uniqueSeedTextKeys(seeds)
rows, err := pgDB.Query(`
SELECT DISTINCT t_key, lower(btrim(source_text_tr)) AS text_key
FROM mk_translator
WHERE t_key = ANY($1)
OR lower(btrim(source_text_tr)) = ANY($2)
`, pq.Array(keys), pq.Array(textKeys))
if err != nil {
return seeds
}
defer rows.Close()
existing := make(map[string]struct{}, len(keys))
existingText := make(map[string]struct{}, len(textKeys))
for rows.Next() {
var key string
var textKey sql.NullString
if err := rows.Scan(&key, &textKey); err == nil {
if strings.TrimSpace(key) != "" {
existing[key] = struct{}{}
}
if textKey.Valid {
t := strings.TrimSpace(textKey.String)
if t != "" {
existingText[t] = struct{}{}
}
}
}
}
out := make([]sourceSeed, 0, len(seeds))
for _, seed := range seeds {
if _, ok := existing[seed.TKey]; ok {
continue
}
if _, ok := existingText[normalizeSeedTextKey(seed.SourceText)]; ok {
continue
}
out = append(out, seed)
}
return out
}
func uniqueSeedKeys(seeds []sourceSeed) []string {
seen := make(map[string]struct{}, len(seeds))
out := make([]string, 0, len(seeds))
for _, seed := range seeds {
if seed.TKey == "" {
continue
}
if _, ok := seen[seed.TKey]; ok {
continue
}
seen[seed.TKey] = struct{}{}
out = append(out, seed.TKey)
}
return out
}
func uniqueSeedTextKeys(seeds []sourceSeed) []string {
seen := make(map[string]struct{}, len(seeds))
out := make([]string, 0, len(seeds))
for _, seed := range seeds {
k := normalizeSeedTextKey(seed.SourceText)
if k == "" {
continue
}
if _, ok := seen[k]; ok {
continue
}
seen[k] = struct{}{}
out = append(out, k)
}
return out
}
func reuseExistingSeedKeys(pgDB *sql.DB, seeds []sourceSeed) ([]sourceSeed, int) {
if pgDB == nil || len(seeds) == 0 {
return seeds, 0
}
textKeys := uniqueSeedTextKeys(seeds)
if len(textKeys) == 0 {
return seeds, 0
}
rows, err := pgDB.Query(`
SELECT x.text_key, x.t_key
FROM (
SELECT
lower(btrim(source_text_tr)) AS text_key,
t_key,
ROW_NUMBER() OVER (
PARTITION BY lower(btrim(source_text_tr))
ORDER BY id ASC
) AS rn
FROM mk_translator
WHERE lower(btrim(source_text_tr)) = ANY($1)
) x
WHERE x.rn = 1
`, pq.Array(textKeys))
if err != nil {
return seeds, 0
}
defer rows.Close()
existingByText := make(map[string]string, len(textKeys))
for rows.Next() {
var textKey, tKey string
if err := rows.Scan(&textKey, &tKey); err != nil {
continue
}
textKey = strings.TrimSpace(strings.ToLower(textKey))
tKey = strings.TrimSpace(tKey)
if textKey == "" || tKey == "" {
continue
}
existingByText[textKey] = tKey
}
reused := 0
for i := range seeds {
textKey := normalizeSeedTextKey(seeds[i].SourceText)
if textKey == "" {
continue
}
if existingKey, ok := existingByText[textKey]; ok && existingKey != "" && seeds[i].TKey != existingKey {
seeds[i].TKey = existingKey
reused++
}
}
return seeds, reused
}
func countSeedsBySource(seeds []sourceSeed) map[string]int {
out := map[string]int{
"dummy": 0,
"postgre": 0,
"mssql": 0,
}
for _, s := range seeds {
key := normalizeTranslationSourceType(s.SourceType)
if key == "" {
key = "dummy"
}
out[key]++
}
return out
}
func formatSourceCounts(counts map[string]int) string {
return fmt.Sprintf("dummy=%d postgre=%d mssql=%d", counts["dummy"], counts["postgre"], counts["mssql"])
}
func requestTraceID(r *http.Request) string {
if r == nil {
return "trsync-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
id := strings.TrimSpace(r.Header.Get("X-Request-ID"))
if id == "" {
id = strings.TrimSpace(r.Header.Get("X-Correlation-ID"))
}
if id == "" {
id = "trsync-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
return id
}
func callAzureTranslate(sourceText, targetLang string) (string, error) {
key := strings.TrimSpace(os.Getenv("AZURE_TRANSLATOR_KEY"))
endpoint := strings.TrimSpace(os.Getenv("AZURE_TRANSLATOR_ENDPOINT"))
region := strings.TrimSpace(os.Getenv("AZURE_TRANSLATOR_REGION"))
if key == "" {
return "", errors.New("AZURE_TRANSLATOR_KEY not set")
}
if endpoint == "" {
return "", errors.New("AZURE_TRANSLATOR_ENDPOINT not set")
}
if region == "" {
return "", errors.New("AZURE_TRANSLATOR_REGION not set")
}
sourceLang := strings.TrimSpace(strings.ToLower(os.Getenv("TRANSLATION_SOURCE_LANG")))
if sourceLang == "" {
sourceLang = "tr"
}
targetLang = normalizeTranslationLang(targetLang)
if targetLang == "" || targetLang == "tr" {
return "", fmt.Errorf("invalid target language: %q", targetLang)
}
endpoint = strings.TrimRight(endpoint, "/")
normalizedEndpoint := strings.ToLower(endpoint)
translatePath := "/translate"
// Azure custom endpoint requires the translator path with version in URL.
if strings.Contains(normalizedEndpoint, ".cognitiveservices.azure.com") {
translatePath = "/translator/text/v3.0/translate"
}
baseURL, err := url.Parse(endpoint + translatePath)
if err != nil {
return "", fmt.Errorf("invalid AZURE_TRANSLATOR_ENDPOINT: %w", err)
}
q := baseURL.Query()
if translatePath == "/translate" {
q.Set("api-version", "3.0")
}
q.Set("from", sourceLang)
q.Set("to", targetLang)
baseURL.RawQuery = q.Encode()
payload := []map[string]string{
{"text": sourceText},
}
body, _ := json.Marshal(payload)
req, err := http.NewRequest(http.MethodPost, baseURL.String(), bytes.NewReader(body))
if err != nil {
return "", err
}
req.Header.Set("Ocp-Apim-Subscription-Key", key)
req.Header.Set("Ocp-Apim-Subscription-Region", region)
req.Header.Set("Content-Type", "application/json; charset=UTF-8")
timeoutSec := parsePositiveIntEnv("TRANSLATION_HTTP_TIMEOUT_SEC", 60)
client := &http.Client{Timeout: time.Duration(timeoutSec) * time.Second}
resp, err := client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
if resp.StatusCode >= 300 {
raw, _ := io.ReadAll(io.LimitReader(resp.Body, 1024))
return "", fmt.Errorf("azure translator status=%d body=%s", resp.StatusCode, strings.TrimSpace(string(raw)))
}
var result []struct {
Translations []struct {
Text string `json:"text"`
To string `json:"to"`
} `json:"translations"`
}
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return "", err
}
if len(result) == 0 || len(result[0].Translations) == 0 {
return "", errors.New("azure translator empty response")
}
return strings.TrimSpace(result[0].Translations[0].Text), nil
}
func nullableString(v *string) any {
if v == nil {
return nil
}
s := strings.TrimSpace(*v)
return s
}
func normalizeTranslationLang(v string) string {
lang := strings.ToLower(strings.TrimSpace(v))
if _, ok := translationLangSet[lang]; ok {
return lang
}
return ""
}
func normalizeTranslationStatus(v string) string {
status := strings.ToLower(strings.TrimSpace(v))
if _, ok := translationStatusSet[status]; ok {
return status
}
return ""
}
func normalizeTranslationSourceType(v string) string {
sourceType := strings.ToLower(strings.TrimSpace(v))
if _, ok := translationSourceTypeSet[sourceType]; ok {
return sourceType
}
return ""
}
func normalizeTargetLanguages(list []string) []string {
if len(list) == 0 {
return []string{"en", "de", "it", "es", "ru", "ar"}
}
seen := make(map[string]struct{}, len(list))
out := make([]string, 0, len(list))
for _, v := range list {
lang := normalizeTranslationLang(v)
if lang == "" || lang == "tr" {
continue
}
if _, ok := seen[lang]; ok {
continue
}
seen[lang] = struct{}{}
out = append(out, lang)
}
if len(out) == 0 {
return []string{"en", "de", "it", "es", "ru", "ar"}
}
return out
}
func normalizeOptionalStatus(v *string) any {
if v == nil {
return nil
}
s := normalizeTranslationStatus(*v)
if s == "" {
return nil
}
return s
}
func normalizeOptionalSourceType(v *string) any {
if v == nil {
return nil
}
s := normalizeTranslationSourceType(*v)
if s == "" {
return nil
}
return s
}
func normalizeMissingItems(items []UpsertMissingItem) []UpsertMissingItem {
seen := make(map[string]struct{}, len(items))
out := make([]UpsertMissingItem, 0, len(items))
for _, it := range items {
key := strings.TrimSpace(it.TKey)
source := strings.TrimSpace(it.SourceTextTR)
if key == "" || source == "" {
continue
}
if _, ok := seen[key]; ok {
continue
}
seen[key] = struct{}{}
out = append(out, UpsertMissingItem{
TKey: key,
SourceTextTR: source,
})
}
return out
}
func normalizeIDListInt64(ids []int64) []int64 {
seen := make(map[int64]struct{}, len(ids))
out := make([]int64, 0, len(ids))
for _, id := range ids {
if id <= 0 {
continue
}
if _, ok := seen[id]; ok {
continue
}
seen[id] = struct{}{}
out = append(out, id)
}
sort.Slice(out, func(i, j int) bool { return out[i] < out[j] })
return out
}
func detectProjectRoot() string {
wd, err := os.Getwd()
if err != nil {
return ""
}
candidates := []string{
wd,
filepath.Dir(wd),
filepath.Dir(filepath.Dir(wd)),
}
for _, c := range candidates {
if _, err := os.Stat(filepath.Join(c, "ui")); err == nil {
return c
}
}
return ""
}
func isCandidateText(s string) bool {
s = strings.TrimSpace(s)
if len(s) < 3 || len(s) > 120 {
return false
}
if reBadText.MatchString(s) {
return false
}
if !reHasLetter.MatchString(s) {
return false
}
if strings.Contains(s, "/api/") {
return false
}
return true
}
func sanitizeKey(s string) string {
s = strings.ToLower(strings.TrimSpace(s))
s = strings.ReplaceAll(s, " ", "_")
s = reKeyUnsafe.ReplaceAllString(s, "_")
s = strings.Trim(s, "_")
if s == "" {
return "x"
}
return s
}
func normalizeDisplayText(s string) string {
s = strings.TrimSpace(strings.ReplaceAll(s, "_", " "))
s = strings.Join(strings.Fields(s), " ")
if s == "" {
return ""
}
return s
}
func hashKey(s string) string {
base := sanitizeKey(s)
if len(base) > 40 {
base = base[:40]
}
sum := 0
for _, r := range s {
sum += int(r)
}
return fmt.Sprintf("%s_%d", base, sum%1000000)
}
func makeTextBasedSeedKey(sourceText string) string {
return "txt." + hashKey(normalizeSeedTextKey(sourceText))
}
func normalizeSeedTextKey(s string) string {
return strings.ToLower(strings.TrimSpace(normalizeDisplayText(s)))
}
func pqArray(values []string) any {
if len(values) == 0 {
return pq.Array([]string{})
}
out := make([]string, 0, len(values))
for _, v := range values {
out = append(out, strings.TrimSpace(v))
}
sort.Strings(out)
return pq.Array(out)
}
func parsePositiveIntEnv(name string, fallback int) int {
raw := strings.TrimSpace(os.Getenv(name))
if raw == "" {
return fallback
}
n, err := strconv.Atoi(raw)
if err != nil || n <= 0 {
return fallback
}
return n
}
func normalizeStringList(items []string, max int) []string {
if len(items) == 0 {
return nil
}
if max <= 0 {
max = len(items)
}
out := make([]string, 0, len(items))
seen := make(map[string]struct{}, len(items))
for _, raw := range items {
v := strings.TrimSpace(raw)
if v == "" {
continue
}
if _, ok := seen[v]; ok {
continue
}
seen[v] = struct{}{}
out = append(out, v)
if len(out) >= max {
break
}
}
return out
}

View File

@@ -0,0 +1,69 @@
package main
import (
"bssapp-backend/routes"
"database/sql"
"log"
"os"
"strconv"
"strings"
"time"
)
func startTranslationSyncScheduler(pgDB *sql.DB, mssqlDB *sql.DB) {
enabled := strings.TrimSpace(strings.ToLower(os.Getenv("TRANSLATION_SYNC_ENABLED")))
if enabled == "0" || enabled == "false" || enabled == "off" {
log.Println("🛑 Translation sync scheduler disabled")
return
}
hour := 4
if raw := strings.TrimSpace(os.Getenv("TRANSLATION_SYNC_HOUR")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed >= 0 && parsed <= 23 {
hour = parsed
}
}
limit := 30000
if raw := strings.TrimSpace(os.Getenv("TRANSLATION_SYNC_LIMIT")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 {
limit = parsed
}
}
go func() {
for {
next := nextRunAt(time.Now(), hour)
wait := time.Until(next)
log.Printf("🕓 Translation sync next run at %s (in %s)", next.Format(time.RFC3339), wait.Round(time.Second))
time.Sleep(wait)
result, err := routes.PerformTranslationSync(pgDB, mssqlDB, routes.TranslationSyncOptions{
AutoTranslate: true,
Languages: []string{"en", "de", "it", "es", "ru", "ar"},
Limit: limit,
OnlyNew: true,
})
if err != nil {
log.Printf("❌ Translation sync failed: %v", err)
continue
}
log.Printf(
"✅ Translation sync done: seeds=%d affected=%d auto_translated=%d langs=%v",
result.SeedCount,
result.AffectedCount,
result.AutoTranslated,
result.TargetLangs,
)
}
}()
}
func nextRunAt(now time.Time, hour int) time.Time {
next := time.Date(now.Year(), now.Month(), now.Day(), hour, 0, 0, 0, now.Location())
if !next.After(now) {
next = next.Add(24 * time.Hour)
}
return next
}

View File

@@ -146,7 +146,11 @@ createQuasarApp(createApp, quasarUserOptions)
return Promise[ method ]([
import(/* webpackMode: "eager" */ 'boot/dayjs')
import(/* webpackMode: "eager" */ 'boot/dayjs'),
import(/* webpackMode: "eager" */ 'boot/locale'),
import(/* webpackMode: "eager" */ 'boot/resizeObserverGuard')
]).then(bootFiles => {
const boot = mapFn(bootFiles).filter(entry => typeof entry === 'function')

View File

@@ -0,0 +1,75 @@
/* eslint-disable */
/**
* THIS FILE IS GENERATED AUTOMATICALLY.
* DO NOT EDIT.
*
* You are probably looking on adding startup/initialization code.
* Use "quasar new boot <name>" and add it there.
* One boot file per concern. Then reference the file(s) in quasar.config file > boot:
* boot: ['file', ...] // do not add ".js" extension to it.
*
* Boot files are your "main.js"
**/
import { Quasar } from 'quasar'
import { markRaw } from 'vue'
import RootComponent from 'app/src/App.vue'
import createStore from 'app/src/stores/index'
import createRouter from 'app/src/router/index'
export default async function (createAppFn, quasarUserOptions) {
// Create the app instance.
// Here we inject into it the Quasar UI, the router & possibly the store.
const app = createAppFn(RootComponent)
app.use(Quasar, quasarUserOptions)
const store = typeof createStore === 'function'
? await createStore({})
: createStore
app.use(store)
const router = markRaw(
typeof createRouter === 'function'
? await createRouter({store})
: createRouter
)
// make router instance available in store
store.use(({ store }) => { store.router = router })
// Expose the app, the router and the store.
// Note that we are not mounting the app here, since bootstrapping will be
// different depending on whether we are in a browser or on the server.
return {
app,
store,
router
}
}

View File

@@ -0,0 +1,158 @@
/* eslint-disable */
/**
* THIS FILE IS GENERATED AUTOMATICALLY.
* DO NOT EDIT.
*
* You are probably looking on adding startup/initialization code.
* Use "quasar new boot <name>" and add it there.
* One boot file per concern. Then reference the file(s) in quasar.config file > boot:
* boot: ['file', ...] // do not add ".js" extension to it.
*
* Boot files are your "main.js"
**/
import { createApp } from 'vue'
import '@quasar/extras/roboto-font/roboto-font.css'
import '@quasar/extras/material-icons/material-icons.css'
// We load Quasar stylesheet file
import 'quasar/dist/quasar.sass'
import 'src/css/app.css'
import createQuasarApp from './app.js'
import quasarUserOptions from './quasar-user-options.js'
const publicPath = `/`
async function start ({
app,
router
, store
}, bootFiles) {
let hasRedirected = false
const getRedirectUrl = url => {
try { return router.resolve(url).href }
catch (err) {}
return Object(url) === url
? null
: url
}
const redirect = url => {
hasRedirected = true
if (typeof url === 'string' && /^https?:\/\//.test(url)) {
window.location.href = url
return
}
const href = getRedirectUrl(url)
// continue if we didn't fail to resolve the url
if (href !== null) {
window.location.href = href
window.location.reload()
}
}
const urlPath = window.location.href.replace(window.location.origin, '')
for (let i = 0; hasRedirected === false && i < bootFiles.length; i++) {
try {
await bootFiles[i]({
app,
router,
store,
ssrContext: null,
redirect,
urlPath,
publicPath
})
}
catch (err) {
if (err && err.url) {
redirect(err.url)
return
}
console.error('[Quasar] boot error:', err)
return
}
}
if (hasRedirected === true) return
app.use(router)
app.mount('#q-app')
}
createQuasarApp(createApp, quasarUserOptions)
.then(app => {
// eventually remove this when Cordova/Capacitor/Electron support becomes old
const [ method, mapFn ] = Promise.allSettled !== void 0
? [
'allSettled',
bootFiles => bootFiles.map(result => {
if (result.status === 'rejected') {
console.error('[Quasar] boot error:', result.reason)
return
}
return result.value.default
})
]
: [
'all',
bootFiles => bootFiles.map(entry => entry.default)
]
return Promise[ method ]([
import(/* webpackMode: "eager" */ 'boot/dayjs'),
import(/* webpackMode: "eager" */ 'boot/locale'),
import(/* webpackMode: "eager" */ 'boot/resizeObserverGuard')
]).then(bootFiles => {
const boot = mapFn(bootFiles).filter(entry => typeof entry === 'function')
start(app, boot)
})
})

View File

@@ -0,0 +1,116 @@
/* eslint-disable */
/**
* THIS FILE IS GENERATED AUTOMATICALLY.
* DO NOT EDIT.
*
* You are probably looking on adding startup/initialization code.
* Use "quasar new boot <name>" and add it there.
* One boot file per concern. Then reference the file(s) in quasar.config file > boot:
* boot: ['file', ...] // do not add ".js" extension to it.
*
* Boot files are your "main.js"
**/
import App from 'app/src/App.vue'
let appPrefetch = typeof App.preFetch === 'function'
? App.preFetch
: (
// Class components return the component options (and the preFetch hook) inside __c property
App.__c !== void 0 && typeof App.__c.preFetch === 'function'
? App.__c.preFetch
: false
)
function getMatchedComponents (to, router) {
const route = to
? (to.matched ? to : router.resolve(to).route)
: router.currentRoute.value
if (!route) { return [] }
const matched = route.matched.filter(m => m.components !== void 0)
if (matched.length === 0) { return [] }
return Array.prototype.concat.apply([], matched.map(m => {
return Object.keys(m.components).map(key => {
const comp = m.components[key]
return {
path: m.path,
c: comp
}
})
}))
}
export function addPreFetchHooks ({ router, store, publicPath }) {
// Add router hook for handling preFetch.
// Doing it after initial route is resolved so that we don't double-fetch
// the data that we already have. Using router.beforeResolve() so that all
// async components are resolved.
router.beforeResolve((to, from, next) => {
const
urlPath = window.location.href.replace(window.location.origin, ''),
matched = getMatchedComponents(to, router),
prevMatched = getMatchedComponents(from, router)
let diffed = false
const preFetchList = matched
.filter((m, i) => {
return diffed || (diffed = (
!prevMatched[i] ||
prevMatched[i].c !== m.c ||
m.path.indexOf('/:') > -1 // does it has params?
))
})
.filter(m => m.c !== void 0 && (
typeof m.c.preFetch === 'function'
// Class components return the component options (and the preFetch hook) inside __c property
|| (m.c.__c !== void 0 && typeof m.c.__c.preFetch === 'function')
))
.map(m => m.c.__c !== void 0 ? m.c.__c.preFetch : m.c.preFetch)
if (appPrefetch !== false) {
preFetchList.unshift(appPrefetch)
appPrefetch = false
}
if (preFetchList.length === 0) {
return next()
}
let hasRedirected = false
const redirect = url => {
hasRedirected = true
next(url)
}
const proceed = () => {
if (hasRedirected === false) { next() }
}
preFetchList.reduce(
(promise, preFetch) => promise.then(() => hasRedirected === false && preFetch({
store,
currentRoute: to,
previousRoute: from,
redirect,
urlPath,
publicPath
})),
Promise.resolve()
)
.then(proceed)
.catch(e => {
console.error(e)
proceed()
})
})
}

View File

@@ -0,0 +1,23 @@
/* eslint-disable */
/**
* THIS FILE IS GENERATED AUTOMATICALLY.
* DO NOT EDIT.
*
* You are probably looking on adding startup/initialization code.
* Use "quasar new boot <name>" and add it there.
* One boot file per concern. Then reference the file(s) in quasar.config file > boot:
* boot: ['file', ...] // do not add ".js" extension to it.
*
* Boot files are your "main.js"
**/
import lang from 'quasar/lang/tr.js'
import {Loading,Dialog,Notify} from 'quasar'
export default { config: {"notify":{"position":"top","timeout":2500}},lang,plugins: {Loading,Dialog,Notify} }

View File

@@ -15,7 +15,7 @@ export default defineConfig(() => {
/* =====================================================
BOOT FILES
===================================================== */
boot: ['dayjs'],
boot: ['dayjs', 'locale', 'resizeObserverGuard'],
/* =====================================================
GLOBAL CSS
@@ -56,6 +56,13 @@ export default defineConfig(() => {
server: { type: 'http' },
port: 9000,
open: true,
client: {
overlay: {
errors: true,
warnings: false,
runtimeErrors: false
}
},
// DEV proxy (CORS'suz)
proxy: [
@@ -63,7 +70,10 @@ export default defineConfig(() => {
context: ['/api'],
target: 'http://localhost:8080',
changeOrigin: true,
secure: false
secure: false,
ws: true,
timeout: 0,
proxyTimeout: 0
}
]
},

View File

@@ -27,7 +27,7 @@ var quasar_config_default = defineConfig(() => {
/* =====================================================
BOOT FILES
===================================================== */
boot: ["dayjs"],
boot: ["dayjs", "locale", "resizeObserverGuard"],
/* =====================================================
GLOBAL CSS
===================================================== */
@@ -62,13 +62,23 @@ var quasar_config_default = defineConfig(() => {
server: { type: "http" },
port: 9e3,
open: true,
client: {
overlay: {
errors: true,
warnings: false,
runtimeErrors: false
}
},
// DEV proxy (CORS'suz)
proxy: [
{
context: ["/api"],
target: "http://localhost:8080",
changeOrigin: true,
secure: false
secure: false,
ws: true,
timeout: 0,
proxyTimeout: 0
}
]
},

View File

@@ -3,12 +3,12 @@ import dayjs from 'dayjs'
import customParseFormat from 'dayjs/plugin/customParseFormat.js'
import relativeTime from 'dayjs/plugin/relativeTime.js'
import localizedFormat from 'dayjs/plugin/localizedFormat.js'
import 'dayjs/locale/tr.js'
import { applyDayjsLocale } from 'src/i18n/dayjsLocale'
// 🔹 Pluginleri aktif et
dayjs.extend(customParseFormat)
dayjs.extend(relativeTime)
dayjs.extend(localizedFormat)
dayjs.locale('tr')
applyDayjsLocale('tr')
export default dayjs

7
ui/src/boot/locale.js Normal file
View File

@@ -0,0 +1,7 @@
import { boot } from 'quasar/wrappers'
import { useLocaleStore } from 'src/stores/localeStore'
export default boot(() => {
const localeStore = useLocaleStore()
localeStore.setLocale(localeStore.locale)
})

View File

@@ -0,0 +1,36 @@
export default () => {
if (process.env.PROD || typeof window === 'undefined') return
const isResizeObserverOverlayError = (message) => {
const text = String(message || '')
return (
text.includes('ResizeObserver loop completed with undelivered notifications') ||
text.includes('ResizeObserver loop limit exceeded')
)
}
window.addEventListener(
'error',
(event) => {
if (!isResizeObserverOverlayError(event?.message)) return
event.preventDefault()
event.stopImmediatePropagation()
},
true
)
window.addEventListener(
'unhandledrejection',
(event) => {
const reason = event?.reason
const msg =
typeof reason === 'string'
? reason
: (reason?.message || reason?.toString?.() || '')
if (!isResizeObserverOverlayError(msg)) return
event.preventDefault()
event.stopImmediatePropagation()
},
true
)
}

View File

@@ -0,0 +1,42 @@
import { computed } from 'vue'
import { messages } from 'src/i18n/messages'
import { DEFAULT_LOCALE } from 'src/i18n/languages'
import { useLocaleStore } from 'src/stores/localeStore'
function lookup(obj, path) {
return String(path || '')
.split('.')
.filter(Boolean)
.reduce((acc, key) => (acc && acc[key] != null ? acc[key] : undefined), obj)
}
export function useI18n() {
const localeStore = useLocaleStore()
const currentLocale = computed(() => localeStore.locale)
function fallbackLocales(locale) {
const normalized = String(locale || '').toLowerCase()
if (normalized === 'tr') return ['tr']
if (normalized === 'en') return ['en', 'tr']
return [normalized, 'en', 'tr']
}
function t(key) {
for (const locale of fallbackLocales(currentLocale.value)) {
const val = lookup(messages[locale] || {}, key)
if (val != null) return val
}
const byDefault = lookup(messages[DEFAULT_LOCALE] || {}, key)
if (byDefault != null) return byDefault
return key
}
return {
locale: currentLocale,
t
}
}

View File

@@ -0,0 +1,30 @@
import dayjs from 'dayjs'
import 'dayjs/locale/tr.js'
import 'dayjs/locale/en.js'
import 'dayjs/locale/de.js'
import 'dayjs/locale/it.js'
import 'dayjs/locale/es.js'
import 'dayjs/locale/ru.js'
import 'dayjs/locale/ar.js'
import { normalizeLocale } from './languages.js'
export const DATE_LOCALE_MAP = {
tr: 'tr-TR',
en: 'en-US',
de: 'de-DE',
it: 'it-IT',
es: 'es-ES',
ru: 'ru-RU',
ar: 'ar'
}
export function applyDayjsLocale(locale) {
const normalized = normalizeLocale(locale)
dayjs.locale(normalized)
}
export function getDateLocale(locale) {
const normalized = normalizeLocale(locale)
return DATE_LOCALE_MAP[normalized] || DATE_LOCALE_MAP.tr
}

32
ui/src/i18n/languages.js Normal file
View File

@@ -0,0 +1,32 @@
export const DEFAULT_LOCALE = 'tr'
export const SUPPORTED_LOCALES = ['tr', 'en', 'de', 'it', 'es', 'ru', 'ar']
export const UI_LANGUAGE_OPTIONS = [
{ label: 'Türkçe', value: 'tr', short: 'TUR', flag: '🇹🇷' },
{ label: 'English', value: 'en', short: 'ENG', flag: '🇬🇧' },
{ label: 'Deutsch', value: 'de', short: 'DEU', flag: '🇩🇪' },
{ label: 'Italiano', value: 'it', short: 'ITA', flag: '🇮🇹' },
{ label: 'Español', value: 'es', short: 'ESP', flag: '🇪🇸' },
{ label: 'Русский', value: 'ru', short: 'RUS', flag: '🇷🇺' },
{ label: 'العربية', value: 'ar', short: 'ARA', flag: '🇸🇦' }
]
export const BACKEND_LANG_MAP = {
tr: 'TR',
en: 'EN',
de: 'DE',
it: 'IT',
es: 'ES',
ru: 'RU',
ar: 'AR'
}
export function normalizeLocale(value) {
const locale = String(value || '').trim().toLowerCase()
return SUPPORTED_LOCALES.includes(locale) ? locale : DEFAULT_LOCALE
}
export function toBackendLangCode(locale) {
return BACKEND_LANG_MAP[normalizeLocale(locale)] || BACKEND_LANG_MAP[DEFAULT_LOCALE]
}

28
ui/src/i18n/messages.js Normal file
View File

@@ -0,0 +1,28 @@
export const messages = {
tr: {
app: {
title: 'Baggi Software System',
logoutTitle: ıkış Yap',
logoutConfirm: 'Oturumunuzu kapatmak istediğinize emin misiniz?',
changePassword: 'Şifre Değiştir',
language: 'Dil'
},
statement: {
invalidDateRange: 'Başlangıç tarihi bitiş tarihinden sonra olamaz.',
selectFilters: 'Lütfen cari ve tarih aralığını seçiniz.'
}
},
en: {
app: {
title: 'Baggi Software System',
logoutTitle: 'Log Out',
logoutConfirm: 'Are you sure you want to end your session?',
changePassword: 'Change Password',
language: 'Language'
},
statement: {
invalidDateRange: 'Start date cannot be later than end date.',
selectFilters: 'Please select account and date range.'
}
}
}

View File

@@ -11,9 +11,41 @@
<q-avatar class="bg-secondary q-mr-sm">
<img src="/images/Baggi-tekstilas-logolu.jpg" />
</q-avatar>
Baggi Software System
{{ t('app.title') }}
</q-toolbar-title>
<q-select
v-model="selectedLocale"
dense
outlined
emit-value
map-options
options-dense
class="q-mr-sm lang-select"
option-value="value"
option-label="label"
:options="languageOptions"
>
<template #selected-item="scope">
<div class="lang-item">
<span class="lang-flag">{{ scope.opt.flag }}</span>
<span class="lang-short">{{ scope.opt.short }}</span>
</div>
</template>
<template #option="scope">
<q-item v-bind="scope.itemProps">
<q-item-section>
<div class="lang-item">
<span class="lang-flag">{{ scope.opt.flag }}</span>
<span class="lang-short">{{ scope.opt.short }}</span>
<span>{{ scope.opt.label }}</span>
</div>
</q-item-section>
</q-item>
</template>
</q-select>
<q-btn flat dense round icon="logout" @click="confirmLogout" />
</q-toolbar>
@@ -99,7 +131,7 @@
</q-item-section>
<q-item-section>
Şifre Değiştir
{{ t('app.changePassword') }}
</q-item-section>
</q-item>
@@ -122,7 +154,7 @@
<q-toolbar class="bg-secondary">
<q-toolbar-title>
Baggi Software System
{{ t('app.title') }}
</q-toolbar-title>
</q-toolbar>
@@ -138,6 +170,9 @@ import { Dialog, useQuasar } from 'quasar'
import { useAuthStore } from 'stores/authStore'
import { usePermissionStore } from 'stores/permissionStore'
import { useI18n } from 'src/composables/useI18n'
import { UI_LANGUAGE_OPTIONS } from 'src/i18n/languages'
import { useLocaleStore } from 'src/stores/localeStore'
/* ================= STORES ================= */
@@ -147,6 +182,16 @@ const route = useRoute()
const $q = useQuasar()
const auth = useAuthStore()
const perm = usePermissionStore()
const localeStore = useLocaleStore()
const { t } = useI18n()
const languageOptions = UI_LANGUAGE_OPTIONS
const selectedLocale = computed({
get: () => localeStore.locale,
set: (value) => {
localeStore.setLocale(value)
}
})
/* ================= UI ================= */
@@ -159,8 +204,8 @@ function toggleLeftDrawer () {
function confirmLogout () {
Dialog.create({
title: ıkış Yap',
message: 'Oturumunuzu kapatmak istediğinize emin misiniz?',
title: t('app.logoutTitle'),
message: t('app.logoutConfirm'),
cancel: true,
persistent: true
}).onOk(() => {
@@ -279,6 +324,19 @@ const menuItems = [
]
},
{
label: 'Fiyatlandırma',
icon: 'request_quote',
children: [
{
label: 'Ürün Fiyatlandırma',
to: '/app/pricing/product-pricing',
permission: 'order:view'
}
]
},
{
label: 'Sistem',
icon: 'settings',
@@ -317,6 +375,18 @@ const menuItems = [
]
},
{
label: 'Dil Çeviri',
icon: 'translate',
children: [
{
label: 'Çeviri Tablosu',
to: '/app/language/translations',
permission: 'language:update'
}
]
},
{
label: 'Kullanıcı Yönetimi',
@@ -374,5 +444,27 @@ const filteredMenu = computed(() => {
-webkit-overflow-scrolling: touch;
touch-action: pan-y;
}
.lang-select {
width: 140px;
background: #fff;
border-radius: 6px;
}
.lang-item {
display: inline-flex;
align-items: center;
gap: 8px;
}
.lang-flag {
font-size: 15px;
line-height: 1;
}
.lang-short {
font-weight: 700;
letter-spacing: 0.3px;
}
</style>

View File

@@ -1,4 +1,4 @@
<template>
<template>
<!-- ===========================================================
🧾 ORDER ENTRY PAGE (BSSApp)
v23 Sticky-stack + Drawer uyumlu yapı
@@ -262,14 +262,24 @@
@click="openNewRowEditor"
:disable="isClosedRow || isViewOnly || !canMutateRows"
/>
<q-btn
v-if="isEditMode && canBulkUpdateLineDueDates"
label="SATIR TERMINLERINI TOPLU GUNCELLE"
color="warning"
icon="event"
class="q-ml-sm"
:loading="orderStore.loading"
:disable="orderStore.loading || !canBulkUpdateLineDueDates"
@click="openBulkDueDateDialog"
/>
<q-btn
v-if="canSubmitOrder"
:label="isEditMode ? 'TÜMÜNÜ GÜNCELLE' : 'TÜMÜNÜ KAYDET'"
color="primary"
icon="save"
class="q-ml-sm"
:loading="orderStore.loading"
:disable="!canSubmitOrder"
:loading="orderStore.loading || isSubmitAllInFlight"
:disable="!canSubmitOrder || orderStore.loading || isSubmitAllInFlight"
@click="confirmAndSubmit"
/>
</div>
@@ -323,12 +333,12 @@
======================================================== -->
<div class="order-scroll-y" :class="{ 'compact-grid-header': compactGridHeader }"> <!-- ✅ YENİ: Grid + Editor ortak dikey scroll -->
<div class="order-grid-body">
<template v-for="grp in groupedRows" :key="grp.name">
<template v-for="grp in groupedRows" :key="grp.groupKey">
<div :class="['summary-group', grp.open ? 'open' : 'closed']">
<!-- 🟡 Sub-header -->
<div class="order-sub-header" @click="toggleGroup(grp.name)">
<div class="sub-left">{{ grp.name }}</div>
<div class="order-sub-header" @click="toggleGroup(grp.groupKey)">
<div class="sub-left">{{ grp.displayName }}</div>
<div class="sub-center">
<div
@@ -348,10 +358,10 @@
<div class="order-text-caption">
Toplam {{ grp.name }} Adet: {{ grp.toplamAdet }}
Toplam {{ grp.displayName }} Adet: {{ grp.toplamAdet }}
</div>
<div class="order-text-caption">
Toplam {{ grp.name }} Tutar:
Toplam {{ grp.displayName }} Tutar:
{{ Number(grp.toplamTutar || 0).toLocaleString('tr-TR', { minimumFractionDigits: 2 }) }}
{{ form.pb || aktifPB }}
</div>
@@ -450,6 +460,41 @@
</div>
</template>
</div>
<!-- =======================================================
🔹 TOPLU TERMIN GUNCELLEME
======================================================== -->
<q-dialog v-model="showBulkDueDateDialog" persistent>
<q-card style="min-width: 420px; max-width: 90vw;">
<q-card-section class="text-subtitle1 text-weight-bold">
Satir Terminlerini Toplu Guncelle
</q-card-section>
<q-card-section class="q-pt-none">
<div class="q-mb-md">
Tum siparis satiri terminlerini sectiginiz tarihi koyarak guncellemek istediginize emin misiniz?
</div>
<q-input
v-model="bulkDueDateValue"
type="date"
label="Yeni Termin Tarihi"
filled
dense
autofocus
/>
</q-card-section>
<q-card-actions align="right">
<q-btn flat label="Iptal" v-close-popup />
<q-btn
color="primary"
label="Evet"
:loading="orderStore.loading"
@click="confirmBulkDueDateUpdate"
/>
</q-card-actions>
</q-card>
</q-dialog>
<!-- =======================================================
🔹 SATIR DÜZENLEYİCİ FORM (EDITOR)
======================================================== -->
@@ -728,16 +773,18 @@
v-if="canMutateRows"
:color="isEditing ? 'positive' : 'primary'"
:label="isEditing ? 'Güncelle' : 'Kaydet'"
:loading="isRowSaveInFlight"
@click="onSaveOrUpdateRow"
:disable="isClosedRow || isViewOnly || !canMutateRows"
:disable="isClosedRow || isViewOnly || !canMutateRows || isRowSaveInFlight"
/>
<q-btn
v-if="canMutateRows"
color="secondary"
label="Kaydet ve Diğer Renge Geç"
:loading="isRowSaveInFlight"
@click="onSaveAndNextColor"
:disable="isClosedRow || isViewOnly || !canMutateRows"
:disable="isClosedRow || isViewOnly || !canMutateRows || isRowSaveInFlight"
/>
<q-btn
v-if="isEditing && canMutateRows"
@@ -747,6 +794,14 @@
@click="removeSelected"
:disable="isClosedRow || isViewOnly || !canMutateRows"
/>
<q-btn
v-if="canMutateRows"
flat
color="warning"
label="Bedenleri Sıfırla"
@click="onZeroBedenClick"
:disable="isClosedRow || isViewOnly || !canMutateRows"
/>
<q-btn
v-if="canMutateRows"
flat
@@ -875,8 +930,62 @@ console.log('🧩 Route parametresi alındı (setup başında):', orderHeaderID.
const aktifPB = ref('USD') // Varsayılan para birimi (Cari seçimiyle değişebilir)
// 🔹 Model detayları cache (product-detail API verilerini tutar)
const productCache = reactive({})
const showBulkDueDateDialog = ref(false)
const bulkDueDateValue = ref('')
const isSubmitAllInFlight = ref(false)
const isRowSaveInFlight = ref(false)
function showEditorQtyPriceBlockingDialog(message, details = '') {
const detailHtml = details ? `<br><br><b>Detay:</b><br>${details}` : ''
$q.dialog({
title: 'Kayit Engellendi',
message: `${message}${detailHtml}`,
html: true,
ok: { label: 'Tamam', color: 'negative' }
})
}
function validateEditorRowBeforeSave() {
const adet = Number(form.adet || 0)
const fiyatRaw = String(form.fiyat ?? '').trim()
const fiyat = Number(form.fiyat || 0)
if (adet <= 0) {
showEditorQtyPriceBlockingDialog('Siparis adeti toplam 0 olamaz.')
return false
}
if (!fiyatRaw || !Number.isFinite(fiyat) || fiyat <= 0) {
showEditorQtyPriceBlockingDialog('Urun fiyati girmeden ilerleyemezsiniz.')
return false
}
return true
}
function validateSummaryRowsBeforeSubmit() {
const rows = Array.isArray(orderStore.summaryRows) ? orderStore.summaryRows : []
const invalidRows = rows.filter(r => {
const adet = Number(r?.adet || 0)
const fiyatRaw = String(r?.fiyat ?? '').trim()
const fiyat = Number(r?.fiyat || 0)
return adet <= 0 || !fiyatRaw || !Number.isFinite(fiyat) || fiyat <= 0
})
if (!invalidRows.length) return true
const preview = invalidRows
.slice(0, 8)
.map(r => `${String(r?.model || '').trim() || '-'} / ${String(r?.renk || '').trim() || '-'} (adet=${Number(r?.adet || 0)}, fiyat=${String(r?.fiyat ?? '')})`)
.join('<br>')
showEditorQtyPriceBlockingDialog(
'Urun fiyati girmeden ilerleyemezsiniz.',
preview
)
return false
}
const confirmAndSubmit = async () => {
if (orderStore.loading) return
if (orderStore.loading || isSubmitAllInFlight.value) return
if (!hasSubmitPermission()) {
notifyNoPermission(
@@ -896,6 +1005,11 @@ const confirmAndSubmit = async () => {
return
}
if (!validateSummaryRowsBeforeSubmit()) {
return
}
isSubmitAllInFlight.value = true
try {
// NEW veya EDIT ayrımı store.mode üzerinden
await orderStore.submitAllReal(
@@ -907,6 +1021,45 @@ const confirmAndSubmit = async () => {
)
} catch (err) {
console.error('❌ confirmAndSubmit hata:', err)
} finally {
isSubmitAllInFlight.value = false
}
}
function openBulkDueDateDialog() {
if (!canBulkUpdateLineDueDates.value) return
const firstRowDate = summaryRows.value?.find?.(row => !!row?.terminTarihi)?.terminTarihi || ''
bulkDueDateValue.value = toDateOnly(form.AverageDueDate || firstRowDate || dayjs().format('YYYY-MM-DD'))
showBulkDueDateDialog.value = true
}
async function confirmBulkDueDateUpdate() {
const dueDate = toDateOnly(bulkDueDateValue.value)
if (!dueDate) {
$q.notify({
type: 'warning',
message: 'Lutfen bir termin tarihi seciniz.'
})
return
}
try {
const result = await orderStore.bulkUpdateOrderLineDueDate(orderHeaderID.value, dueDate)
orderStore.applyBulkLineDueDateLocally(dueDate)
form.AverageDueDate = dueDate
showBulkDueDateDialog.value = false
$q.notify({
type: 'positive',
message: `Tum siparis satiri terminleri guncellendi (${Number(result?.updatedLines || 0)} satir).`
})
} catch (err) {
console.error('❌ confirmBulkDueDateUpdate hata:', err)
$q.notify({
type: 'negative',
message: err?.message || 'Satir terminleri guncellenemedi.'
})
}
}
@@ -931,6 +1084,14 @@ const canMutateRows = computed(() => {
if (isViewOnly.value) return false
return isEditMode.value ? canUpdateOrder.value : canWriteOrder.value
})
const canBulkUpdateLineDueDates = computed(() => {
if (!isEditMode.value) return false
if (isViewOnly.value) return false
if (isClosedOrder.value) return false
if (!canUpdateOrder.value) return false
if (!orderHeaderID.value) return false
return Array.isArray(orderStore.summaryRows) && orderStore.summaryRows.length > 0
})
function notifyNoPermission(message) {
$q.notify({
@@ -1017,7 +1178,7 @@ const form = reactive({
IsCreditableConfirmed: false,
IsSalesViaInternet: false,
IsSuspended: false,
IsCompleted: false,
IsCompleted: true,
IsPrinted: false,
IsLocked: false,
IsClosed: false,
@@ -1090,6 +1251,7 @@ const form = reactive({
kategori: '',
askiliyan: '',
yetiskinGarson: '',
yasPayloadMap: {},
seri: '',
bedenLabels: [],
bedenler: [],
@@ -1248,6 +1410,7 @@ async function resetEditor(force = false) {
drop: '',
askiliyan: '',
yetiskinGarson: '',
yasPayloadMap: {},
adet: 0,
fiyat: 0,
@@ -1362,6 +1525,37 @@ const selectedRow = computed(() => {
=========================================================== */
const groupOpen = reactive({})
function normalizeSubHeaderText (v, fallback = 'GENEL') {
const s = String(v || '')
.replace(/\([^)]*\)/g, ' ')
.replace(/\s+/g, ' ')
.trim()
return s || fallback
}
function resolveKategoriForSubHeader (row) {
return normalizeSubHeaderText(
row?.kategori ||
row?.Kategori ||
row?.ProductAtt44Desc ||
row?.YETISKIN_GARSON ||
row?.yetiskinGarson ||
row?.YetiskinGarson ||
'',
'GENEL'
)
}
function resolveUrunAnaGrubuForSubHeader (row) {
return normalizeSubHeaderText(
row?.urunAnaGrubu ||
row?.UrunAnaGrubu ||
row?.ProductAtt01Desc ||
'',
'GENEL'
).toUpperCase()
}
const groupedRows = computed(() => {
const rows = Array.isArray(summaryRows.value) ? summaryRows.value : []
const buckets = {}
@@ -1373,36 +1567,42 @@ const groupedRows = computed(() => {
: storeSchemaByKey
for (const row of rows) {
const ana = (row?.urunAnaGrubu || 'GENEL')
.toUpperCase()
.trim()
const ana = resolveUrunAnaGrubuForSubHeader(row)
const kategori = resolveKategoriForSubHeader(row)
const grpKey = String(row?.grpKey || 'tak').trim() || 'tak'
const bucketKey = `${kategori}::${ana}`
if (!buckets[ana]) {
buckets[ana] = {
if (!buckets[bucketKey]) {
buckets[bucketKey] = {
name: ana,
kategori,
displayName: `${kategori} ${ana}`,
rows: [],
toplamAdet: 0,
toplamTutar: 0,
open: groupOpen[ana] ?? true,
open: groupOpen[bucketKey] ?? true,
// 🔑 TEK KAYNAK
grpKey: row.grpKey
grpKey
}
order.push(ana)
order.push(bucketKey)
}
const bucket = buckets[ana]
const bucket = buckets[bucketKey]
bucket.rows.push(row)
bucket.toplamAdet += Number(row.adet || 0)
bucket.toplamTutar += Number(row.tutar || 0)
}
return order.map(name => {
const grp = buckets[name]
return order.map(bucketKey => {
const grp = buckets[bucketKey]
const schema = schemaMap?.[grp.grpKey]
const displayName = `${grp.kategori} ${grp.name}`.trim()
return {
...grp,
displayName,
groupKey: bucketKey,
bedenValues: schema?.values || []
}
})
@@ -2242,7 +2442,9 @@ async function hydrateEditorFromRow(row, opts = {}) {
...d,
UrunAnaGrubu: d.UrunAnaGrubu || d.ProductGroup || d.ProductAtt01Desc || '',
UrunAltGrubu: d.UrunAltGrubu || d.ProductSubGroup || d.ProductAtt02Desc || '',
Kategori: d.Kategori || '',
Kategori: d.Kategori || d.ProductAtt44Desc || d.YETISKIN_GARSON || d.YetiskinGarson || '',
ProductAtt01Desc: d.ProductAtt01Desc || d.UrunAnaGrubu || d.ProductGroup || '',
ProductAtt44Desc: d.ProductAtt44Desc || d.Kategori || d.YETISKIN_GARSON || d.YetiskinGarson || '',
AskiliYan: d.AskiliYan || '',
YETISKIN_GARSON: d.YETISKIN_GARSON || d.YetiskinGarson || d.AskiliYan || '',
YetiskinGarson: d.YetiskinGarson || d.YETISKIN_GARSON || d.AskiliYan || '',
@@ -2260,7 +2462,7 @@ async function hydrateEditorFromRow(row, opts = {}) {
}
const modelMeta = await ensureModelDetail(row.model)
const rowUrunAna = row.urunAnaGrubu || row.UrunAnaGrubu || modelMeta?.UrunAnaGrubu || ''
const rowUrunAna = row.urunAnaGrubu || row.UrunAnaGrubu || row.ProductAtt01Desc || modelMeta?.UrunAnaGrubu || modelMeta?.ProductAtt01Desc || ''
const rowUrunAlt = row.urunAltGrubu || row.UrunAltGrubu || modelMeta?.UrunAltGrubu || ''
const rowAskili = row.askiliyan || row.AskiliYan || modelMeta?.AskiliYan || ''
const rowYetiskinGarson =
@@ -2274,7 +2476,9 @@ async function hydrateEditorFromRow(row, opts = {}) {
const rowKategori =
row.kategori ||
row.Kategori ||
row.ProductAtt44Desc ||
modelMeta?.Kategori ||
modelMeta?.ProductAtt44Desc ||
''
/* -------------------------------------------------------
@@ -2315,6 +2519,7 @@ async function hydrateEditorFromRow(row, opts = {}) {
drop: row.drop || row.Drop || modelMeta?.Drop || '',
askiliyan: rowAskili,
yetiskinGarson: rowYetiskinGarson,
yasPayloadMap: { ...(row.yasPayloadMap || {}) },
kategori: rowKategori,
aciklama: row.aciklama,
fiyat: Number(row.fiyat || 0),
@@ -2738,7 +2943,9 @@ async function onModelChange(modelCode) {
ProductGroup: d.ProductGroup || d.UrunAnaGrubu || d.ProductAtt01Desc || '',
ProductSubGroup: d.ProductSubGroup || d.UrunAltGrubu || d.ProductAtt02Desc || '',
URUN_ANA_GRUBU: d.UrunAnaGrubu || d.ProductAtt01Desc || '',
URUN_ALT_GRUBU: d.UrunAltGrubu || d.ProductAtt02Desc || ''
URUN_ALT_GRUBU: d.UrunAltGrubu || d.ProductAtt02Desc || '',
ProductAtt01Desc: d.ProductAtt01Desc || d.UrunAnaGrubu || d.ProductGroup || '',
ProductAtt44Desc: d.ProductAtt44Desc || d.Kategori || d.YETISKIN_GARSON || d.YetiskinGarson || ''
}
console.log('🗂️ Cache eklendi:', modelCode, Object.keys(productCache[modelCode]))
}
@@ -2753,9 +2960,10 @@ async function onModelChange(modelCode) {
fit: d.Fit1 || d.Fit || '',
urunIcerik: d.UrunIcerik || d.Fabric || '',
drop: d.Drop || '',
kategori: d.Kategori || '',
kategori: d.Kategori || d.ProductAtt44Desc || d.YETISKIN_GARSON || d.YetiskinGarson || '',
askiliyan: d.AskiliYan || '',
yetiskinGarson: d.YETISKIN_GARSON || d.YetiskinGarson || d.AskiliYan || '',
yasPayloadMap: {},
aciklama: keep.aciklama,
fiyat: keep.fiyat,
adet: keep.adet,
@@ -2915,7 +3123,23 @@ async function onColor2Change(colorCode2) {
=========================================================== */
const bedenStock = ref([]) // Görsel tablo için stok listesi
const stockMap = ref({}) // { "48": 12, "50": 7, ... } şeklinde key-value map
function warnIfSecondColorMissing() {
const hasSecondColorOptions = Array.isArray(renkOptions2.value) && renkOptions2.value.length > 0
const secondColorEmpty = !String(form.renk2 || '').trim()
if (hasSecondColorOptions && secondColorEmpty) {
$q.notify({
type: 'warning',
message: 'Bu model/renk için 2. renk seçimi önerilir.',
position: 'top-right'
})
}
}
const onSaveOrUpdateRow = async () => {
if (isRowSaveInFlight.value) return
if (!hasRowMutationPermission()) {
notifyNoPermission(
isEditMode.value
@@ -2925,7 +3149,13 @@ const onSaveOrUpdateRow = async () => {
return
}
await orderStore.saveOrUpdateRowUnified({
if (!validateEditorRowBeforeSave()) return
warnIfSecondColorMissing()
isRowSaveInFlight.value = true
try {
const ok = await orderStore.saveOrUpdateRowUnified({
form,
recalcVat: typeof recalcVat === 'function' ? recalcVat : null,
@@ -2939,7 +3169,12 @@ const onSaveOrUpdateRow = async () => {
stockMap,
$q
})
if (ok !== false) {
showEditor.value = false
}
} finally {
isRowSaveInFlight.value = false
}
}
function normalizeColorValue(val) {
@@ -2959,6 +3194,8 @@ function getNextColorValue() {
}
const onSaveAndNextColor = async () => {
if (isRowSaveInFlight.value) return
if (!hasRowMutationPermission()) {
notifyNoPermission(
isEditMode.value
@@ -2978,7 +3215,14 @@ const onSaveAndNextColor = async () => {
return
}
const ok = await orderStore.saveOrUpdateRowUnified({
if (!validateEditorRowBeforeSave()) return
warnIfSecondColorMissing()
isRowSaveInFlight.value = true
let ok = false
try {
ok = await orderStore.saveOrUpdateRowUnified({
form,
recalcVat: typeof recalcVat === 'function' ? recalcVat : null,
resetEditor: () => {},
@@ -2989,6 +3233,9 @@ const onSaveAndNextColor = async () => {
stockMap,
$q
})
} finally {
isRowSaveInFlight.value = false
}
if (!ok) return
@@ -3016,6 +3263,25 @@ const onSaveAndNextColor = async () => {
})
}
function onZeroBedenClick () {
if (!Array.isArray(form.bedenLabels) || !form.bedenLabels.length) {
$q.notify({
type: 'warning',
message: 'Sıfırlanacak beden alanı bulunamadı.'
})
return
}
form.bedenler = form.bedenLabels.map(() => 0)
updateTotals(form)
$q.notify({
type: 'info',
message: 'Beden adetleri sıfırlandı.',
position: 'top-right'
})
}
@@ -3043,12 +3309,34 @@ async function loadOrderInventory(merge = false) {
console.log(`📦 MSSQL stok verisi geldi: ${data.length}`)
console.table(data)
const pickPreferredYasPayloadLabel = (currentRaw, nextRaw) => {
const cur = String(currentRaw || '').trim().toUpperCase()
const nxt = String(nextRaw || '').trim().toUpperCase()
if (!nxt) return cur
if (!cur) return nxt
const curYas = /YAS$|YAŞ$/.test(cur)
const nxtYas = /YAS$|YAŞ$/.test(nxt)
if (!curYas && nxtYas) return nxt
return cur
}
// 1⃣ Normalize (gelen büyük harfli)
const invMap = {}
const invYasPayloadMap = {}
for (const x of data) {
const beden = normalizeBedenLabel(String(x.Beden || '').trim())
const rawBeden = String(x.Beden || '').trim()
const beden = normalizeBedenLabel(rawBeden)
const stokDeger = Number(x.KullanilabilirAdet ?? 0)
invMap[beden] = stokDeger
const rawUpper = rawBeden.toUpperCase()
if (/^(\d+)\s*(Y|YAS|YAŞ)$/.test(rawUpper)) {
const canonical = normalizeBedenLabel(rawUpper)
invYasPayloadMap[canonical] = pickPreferredYasPayloadLabel(
invYasPayloadMap[canonical],
rawUpper
)
}
}
// 2⃣ Form bedenlerine göre map oluştur
@@ -3072,6 +3360,7 @@ async function loadOrderInventory(merge = false) {
beden,
stok
}))
form.yasPayloadMap = { ...(form.yasPayloadMap || {}), ...invYasPayloadMap }
console.log('✅ Stok haritası güncellendi:', stockMap.value)
} catch (err) {
@@ -3095,6 +3384,22 @@ watch(
}
)
// 📝 Sipariş genel açıklaması draft'ta kalmalı:
// form -> store.header sync + debounce persist
watch(
() => [form.Description, form.InternalDescription],
([description, internalDescription]) => {
orderStore.setHeaderFields?.(
{
Description: description ?? '',
InternalDescription: internalDescription ?? ''
},
{ immediatePersist: false }
)
orderStore._safePersistDebounced?.()
}
)
/* ===========================================================
🔹 useComboWatcher (v6.3 — MUTATION AWARE & CLEAN)
- Edit modda combo değişirse:

View File

@@ -17,6 +17,7 @@
icon="save"
label="Secili Degisiklikleri Kaydet"
:loading="store.saving"
:disable="store.loading || store.saving || isBulkSubmitting"
@click="onBulkSubmit"
/>
</div>
@@ -60,11 +61,11 @@
</div>
<div class="col-2">
<q-input
:model-value="formatDate(header?.AverageDueDate)"
v-model="headerAverageDueDate"
label="Tahmini Termin Tarihi"
filled
dense
readonly
type="date"
/>
</div>
</div>
@@ -101,6 +102,7 @@
<q-checkbox
size="sm"
:model-value="!!selectedMap[props.row.RowKey]"
:disable="store.saving"
@update:model-value="(val) => toggleRowSelection(props.row.RowKey, val)"
/>
</q-td>
@@ -108,43 +110,76 @@
<template #body-cell-NewItemCode="props">
<q-td :props="props">
<q-select
v-model="props.row.NewItemEntryMode"
dense
filled
emit-value
map-options
:options="newItemEntryModeOptions"
label="Kod Giris Tipi"
@update:model-value="val => onNewItemEntryModeChange(props.row, val)"
/>
<q-select
v-if="props.row.NewItemEntryMode === 'selected'"
class="q-mt-xs"
v-model="props.row.NewItemCode"
dense
filled
use-input
fill-input
hide-selected
input-debounce="0"
emit-value
map-options
option-label="label"
option-value="value"
:options="productCodeSelectOptions"
label="Eski Kod Sec"
@filter="onFilterProductCode"
@update:model-value="val => onSelectProduct(props.row, val)"
/>
<q-input
v-else-if="props.row.NewItemEntryMode === 'typed'"
class="q-mt-xs"
v-model="props.row.NewItemCode"
dense
filled
maxlength="13"
label="Yeni Urun"
@update:model-value="val => onNewItemChange(props.row, val)"
>
<template #append>
<q-icon name="arrow_drop_down" class="cursor-pointer" />
</template>
<q-menu
anchor="bottom left"
self="top left"
fit
>
<div class="q-pa-sm" style="min-width:260px">
<q-input
v-model="productSearch"
dense
filled
debounce="200"
placeholder="Urun ara..."
placeholder="X999-XX99999"
label="Yeni Kod Ekle"
:class="newItemInputClass(props.row)"
@update:model-value="val => onNewItemChange(props.row, val, 'typed')"
/>
<div v-if="props.row.NewItemMode && props.row.NewItemMode !== 'empty'" class="q-mt-xs row items-center no-wrap">
<q-badge :color="newItemBadgeColor(props.row)" text-color="white">
{{ newItemBadgeLabel(props.row) }}
</q-badge>
<span class="text-caption q-ml-sm text-grey-8">{{ newItemHintText(props.row) }}</span>
<q-btn
v-if="props.row.NewItemMode === 'new'"
class="q-ml-sm"
dense
flat
size="sm"
color="warning"
label="Urun Boyutlandirma"
@click="openCdItemDialog(props.row.NewItemCode)"
/>
<q-btn
v-if="props.row.NewItemMode && props.row.NewItemMode !== 'empty'"
class="q-ml-xs"
dense
flat
size="sm"
color="primary"
label="Urun Ozellikleri"
@click="openAttributeDialog(props.row.NewItemCode)"
/>
<q-list class="q-mt-xs" bordered separator>
<q-item
v-for="opt in filteredProducts"
:key="opt.ProductCode"
clickable
@click="onSelectProduct(props.row, opt.ProductCode)"
>
<q-item-section>{{ opt.ProductCode }}</q-item-section>
</q-item>
</q-list>
</div>
</q-menu>
</q-input>
</q-td>
</template>
@@ -155,15 +190,17 @@
:options="getColorOptions(props.row)"
option-label="colorLabel"
option-value="color_code"
use-input
fill-input
hide-selected
input-debounce="0"
emit-value
map-options
use-input
new-value-mode="add-unique"
dense
filled
label="Yeni Renk"
:disable="isColorSelectionLocked(props.row)"
@update:model-value="() => onNewColorChange(props.row)"
@new-value="(val, done) => onCreateColorValue(props.row, val, done)"
/>
</q-td>
</template>
@@ -173,16 +210,31 @@
<q-select
v-model="props.row.NewDim2"
:options="getSecondColorOptions(props.row)"
option-label="item_dim2_code"
option-label="item_dim2_label"
option-value="item_dim2_code"
use-input
fill-input
hide-selected
input-debounce="0"
emit-value
map-options
use-input
new-value-mode="add-unique"
dense
filled
label="Yeni 2. Renk"
@new-value="(val, done) => onCreateSecondColorValue(props.row, val, done)"
:disable="isColorSelectionLocked(props.row)"
@update:model-value="() => onNewDim2Change(props.row)"
/>
</q-td>
</template>
<template #body-cell-NewDueDate="props">
<q-td :props="props">
<q-input
v-model="props.row.NewDueDate"
dense
filled
type="date"
label="Yeni Termin"
/>
</q-td>
</template>
@@ -205,6 +257,148 @@
<q-banner v-if="store.error" class="bg-red text-white q-mt-sm">
Hata: {{ store.error }}
</q-banner>
<q-dialog v-model="cdItemDialogOpen" persistent>
<q-card style="min-width: 980px; max-width: 98vw;">
<q-card-section class="row items-center q-pb-none">
<div class="text-h6">Urun Boyutlandirma</div>
<q-space />
<q-badge color="warning" text-color="black">
{{ cdItemTargetCode || '-' }}
</q-badge>
</q-card-section>
<q-card-section class="q-pt-md">
<div class="row q-col-gutter-sm items-center q-mb-md bg-grey-2 q-pa-sm rounded-borders">
<div class="col-12 col-md-8">
<q-select
v-model="copySourceCode"
dense
filled
use-input
fill-input
hide-selected
input-debounce="0"
emit-value
map-options
option-label="label"
option-value="value"
label="Benzer Eski Urun Kodundan Getir"
placeholder="Kopyalanacak urun kodunu yazin"
:options="productCodeSelectOptions"
@filter="onFilterProductCode"
/>
</div>
<div class="col-12 col-md-4">
<q-btn
color="secondary"
icon="content_copy"
label="Ozellikleri Kopyala"
class="full-width"
:disable="!copySourceCode"
@click="copyFromOldProduct('cdItem')"
/>
</div>
</div>
<div class="row q-col-gutter-sm">
<div class="col-12 col-md-4">
<q-select v-model="cdItemDraftForm.ItemDimTypeCode" dense filled use-input fill-input hide-selected input-debounce="0" emit-value map-options option-label="label" option-value="value" :options="lookupOptions('itemDimTypeCodes')" label="Boyut Secenekleri" />
</div>
<div class="col-12 col-md-4">
<q-select v-model="cdItemDraftForm.ProductHierarchyID" dense filled use-input fill-input hide-selected input-debounce="0" emit-value map-options option-label="label" option-value="value" :options="lookupOptions('productHierarchyIDs')" label="Urun Hiyerarsi Grubu" />
</div>
</div>
</q-card-section>
<q-card-actions align="right">
<q-btn flat label="Vazgec" color="grey-8" v-close-popup />
<q-btn color="primary" label="Taslagi Kaydet" @click="saveCdItemDraft" />
</q-card-actions>
</q-card>
</q-dialog>
<q-dialog v-model="attributeDialogOpen" persistent>
<q-card style="min-width: 1100px; max-width: 98vw;">
<q-card-section class="row items-center q-pb-none">
<div class="text-h6">Urun Ozellikleri (2. Pop-up)</div>
<q-space />
<q-badge color="primary">{{ attributeTargetCode || '-' }}</q-badge>
</q-card-section>
<q-card-section class="q-pt-md">
<div class="row q-col-gutter-sm items-center q-mb-md bg-grey-2 q-pa-sm rounded-borders">
<div class="col-12 col-md-8">
<q-select
v-model="copySourceCode"
dense
filled
use-input
fill-input
hide-selected
input-debounce="0"
emit-value
map-options
option-label="label"
option-value="value"
label="Benzer Eski Urun Kodundan Getir"
placeholder="Kopyalanacak urun kodunu yazin"
:options="productCodeSelectOptions"
@filter="onFilterProductCode"
/>
</div>
<div class="col-12 col-md-4">
<q-btn
color="secondary"
icon="content_copy"
label="Ozellikleri Kopyala"
class="full-width"
:disable="!copySourceCode"
@click="copyFromOldProduct('attributes')"
/>
</div>
</div>
</q-card-section>
<q-card-section style="max-height: 68vh; overflow: auto;">
<div
v-for="(row, idx) in attributeRows"
:key="`${row.AttributeTypeCodeNumber}-${idx}`"
class="row q-col-gutter-sm q-mb-xs items-center"
>
<div class="col-12 col-md-5">
<q-input :model-value="row.TypeLabel" dense filled readonly />
</div>
<div class="col-12 col-md-7">
<q-select
v-model="row.AttributeCode"
dense
filled
use-input
fill-input
hide-selected
input-debounce="0"
@filter="(val, update) => onFilterAttributeOption(row, val, update)"
emit-value
map-options
option-label="label"
option-value="value"
:options="row.Options"
/>
</div>
</div>
</q-card-section>
<q-card-actions align="right">
<q-btn flat label="Vazgec" color="grey-8" v-close-popup />
<q-btn color="primary" label="Ozellikleri Taslaga Kaydet" @click="saveAttributeDraft" />
</q-card-actions>
</q-card>
</q-dialog>
<q-inner-loading :showing="store.saving">
<q-spinner-gears size="50px" color="primary" />
<div class="q-mt-md text-subtitle1">Degisiklikler kaydediliyor, lutfen bekleyiniz...</div>
</q-inner-loading>
</q-page>
</template>
@@ -219,6 +413,15 @@ import { normalizeSearchText } from 'src/utils/searchText'
const route = useRoute()
const $q = useQuasar()
const store = useOrderProductionItemStore()
const BAGGI_CODE_PATTERN = /^[A-Z][0-9]{3}-[A-Z]{3}[0-9]{5}$/
const BAGGI_CODE_ERROR = 'Girdiginiz kod BAGGI kod sistemine uyumlu degil. Format: X999-XXX99999'
function nowMs () {
if (typeof performance !== 'undefined' && typeof performance.now === 'function') {
return performance.now()
}
return Date.now()
}
const orderHeaderID = computed(() => String(route.params.orderHeaderID || '').trim())
const header = computed(() => store.header || {})
@@ -233,19 +436,31 @@ const cariLabel = computed(() => {
const rows = ref([])
const descFilter = ref('')
const productOptions = ref([])
const productSearch = ref('')
const selectedMap = ref({})
const headerAverageDueDate = ref('')
const cdItemDialogOpen = ref(false)
const cdItemTargetCode = ref('')
const copySourceCode = ref(null)
const suppressAutoSetupDialogs = ref(false)
const cdItemDraftForm = ref(createEmptyCdItemDraft(''))
const attributeDialogOpen = ref(false)
const attributeTargetCode = ref('')
const attributeRows = ref([])
const isBulkSubmitting = ref(false)
const columns = [
{ name: 'select', label: '', field: 'select', align: 'center', sortable: false, style: 'width:44px;', headerStyle: 'width:44px;' },
{ name: 'OldItemCode', label: 'Eski Urun Kodu', field: 'OldItemCode', align: 'left', sortable: true, style: 'min-width:90px;white-space:normal', headerStyle: 'min-width:90px;white-space:normal', headerClasses: 'col-old', classes: 'col-old' },
{ name: 'OldColor', label: 'Eski Urun Rengi', field: 'OldColor', align: 'left', sortable: true, style: 'min-width:80px;white-space:normal', headerStyle: 'min-width:80px;white-space:normal', headerClasses: 'col-old', classes: 'col-old' },
{ name: 'OldColor', label: 'Eski Urun Rengi', field: 'OldColorLabel', align: 'left', sortable: true, style: 'min-width:120px;white-space:normal', headerStyle: 'min-width:120px;white-space:normal', headerClasses: 'col-old', classes: 'col-old' },
{ name: 'OldDim2', label: 'Eski 2. Renk', field: 'OldDim2', align: 'left', sortable: true, style: 'min-width:80px;white-space:normal', headerStyle: 'min-width:80px;white-space:normal', headerClasses: 'col-old', classes: 'col-old' },
{ name: 'OldDesc', label: 'Eski Aciklama', field: 'OldDesc', align: 'left', sortable: false, style: 'min-width:130px;', headerStyle: 'min-width:130px;', headerClasses: 'col-old col-desc', classes: 'col-old col-desc' },
{ name: 'OldSizes', label: 'Bedenler', field: 'OldSizesLabel', align: 'left', sortable: false, style: 'min-width:90px;', headerStyle: 'min-width:90px;', headerClasses: 'col-old col-wrap', classes: 'col-old col-wrap' },
{ name: 'OldTotalQty', label: 'Siparis Adedi', field: 'OldTotalQtyLabel', align: 'right', sortable: false, style: 'min-width:90px;', headerStyle: 'min-width:90px;', headerClasses: 'col-old', classes: 'col-old' },
{ name: 'OldDueDate', label: 'Eski Termin', field: 'OldDueDate', align: 'left', sortable: true, style: 'min-width:100px;', headerStyle: 'min-width:100px;', headerClasses: 'col-old', classes: 'col-old' },
{ name: 'NewItemCode', label: 'Yeni Urun Kodu', field: 'NewItemCode', align: 'left', sortable: false, style: 'min-width:130px;', headerStyle: 'min-width:130px;', headerClasses: 'col-new col-new-first', classes: 'col-new col-new-first' },
{ name: 'NewColor', label: 'Yeni Urun Rengi', field: 'NewColor', align: 'left', sortable: false, style: 'min-width:100px;', headerStyle: 'min-width:100px;', headerClasses: 'col-new', classes: 'col-new' },
{ name: 'NewDim2', label: 'Yeni 2. Renk', field: 'NewDim2', align: 'left', sortable: false, style: 'min-width:100px;', headerStyle: 'min-width:100px;', headerClasses: 'col-new', classes: 'col-new' },
{ name: 'NewDueDate', label: 'Yeni Termin', field: 'NewDueDate', align: 'left', sortable: false, style: 'min-width:120px;', headerStyle: 'min-width:120px;', headerClasses: 'col-new', classes: 'col-new' },
{ name: 'NewDesc', label: 'Yeni Aciklama', field: 'NewDesc', align: 'left', sortable: false, style: 'min-width:140px;', headerStyle: 'min-width:140px;', headerClasses: 'col-new col-desc', classes: 'col-new col-desc' }
]
@@ -279,13 +494,22 @@ function formatDate (val) {
return text.length >= 10 ? text.slice(0, 10) : text
}
const filteredProducts = computed(() => {
const needle = normalizeSearchText(productSearch.value)
if (!needle) return productOptions.value.slice(0, 50)
return productOptions.value.filter(p =>
normalizeSearchText(p?.ProductCode).includes(needle)
).slice(0, 50)
})
function normalizeDateInput (val) {
return formatDate(val || '')
}
const hasHeaderAverageDueDateChange = computed(() => (
normalizeDateInput(headerAverageDueDate.value) !==
normalizeDateInput(header.value?.AverageDueDate)
))
watch(
() => header.value?.AverageDueDate,
(value) => {
headerAverageDueDate.value = normalizeDateInput(value)
},
{ immediate: true }
)
const filteredRows = computed(() => {
const needle = normalizeSearchText(descFilter.value)
@@ -299,38 +523,223 @@ const visibleRowKeys = computed(() => filteredRows.value.map(r => r.RowKey))
const selectedVisibleCount = computed(() => visibleRowKeys.value.filter(k => !!selectedMap.value[k]).length)
const allSelectedVisible = computed(() => visibleRowKeys.value.length > 0 && selectedVisibleCount.value === visibleRowKeys.value.length)
const someSelectedVisible = computed(() => selectedVisibleCount.value > 0)
const newItemEntryModeOptions = [
{ label: 'Eski Kod Sec', value: 'selected' },
{ label: 'Yeni Kod Ekle', value: 'typed' }
]
const productCodeAllOptions = computed(() =>
(productOptions.value || []).map(p => {
const code = String(p?.ProductCode || '').trim().toUpperCase()
return { label: code, value: code }
}).filter(x => !!x.value && x.value.length === 13)
)
const productCodeSelectOptions = ref([])
function onSelectProduct (row, code) {
productSearch.value = ''
onNewItemChange(row, code)
watch(
productCodeAllOptions,
(list) => {
productCodeSelectOptions.value = Array.isArray(list) ? [...list] : []
},
{ immediate: true }
)
function onFilterProductCode (val, update) {
const needle = normalizeSearchText(val)
update(() => {
if (!needle) {
productCodeSelectOptions.value = [...productCodeAllOptions.value]
return
}
productCodeSelectOptions.value = (productCodeAllOptions.value || []).filter(opt => {
const label = normalizeSearchText(opt?.label || '')
const value = normalizeSearchText(opt?.value || '')
return label.includes(needle) || value.includes(needle)
})
})
}
function onNewItemChange (row, val) {
const next = String(val || '').trim().toUpperCase()
function applyNewItemVisualState (row, source = 'typed') {
const info = store.classifyItemCode(row?.NewItemCode || '')
row.NewItemCode = info.normalized
row.NewItemMode = info.mode
row.NewItemSource = info.mode === 'empty' ? '' : source
}
function syncRowsForKnownExistingCode (itemCode) {
const code = String(itemCode || '').trim().toUpperCase()
if (!code) return
for (const row of (rows.value || [])) {
if (String(row?.NewItemCode || '').trim().toUpperCase() !== code) continue
row.NewItemCode = code
row.NewItemMode = 'existing'
if (!row.NewItemEntryMode) {
row.NewItemEntryMode = row.NewItemSource === 'selected' ? 'selected' : 'typed'
}
}
}
function newItemInputClass (row) {
return {
'new-item-existing': row?.NewItemMode === 'existing',
'new-item-new': row?.NewItemMode === 'new'
}
}
function newItemBadgeColor (row) {
return row?.NewItemMode === 'existing' ? 'positive' : 'warning'
}
function newItemBadgeLabel (row) {
return row?.NewItemMode === 'existing' ? 'MEVCUT KOD' : 'YENI KOD'
}
function newItemHintText (row) {
if (row?.NewItemMode === 'existing') {
return row?.NewItemSource === 'selected'
? 'Urun listesinden secildi'
: 'Elle girildi (sistemde bulundu)'
}
if (row?.NewItemMode === 'new') {
return store.getCdItemDraft(row?.NewItemCode) ? 'Yeni kod: cdItem taslagi hazir' : 'Yeni kod: cdItem taslagi gerekli'
}
return ''
}
function onSelectProduct (row, code) {
row.NewItemEntryMode = 'selected'
onNewItemChange(row, code, 'selected')
}
function onNewItemEntryModeChange (row, mode) {
row.NewItemEntryMode = String(mode || '').trim()
row.NewItemCode = ''
row.NewColor = ''
row.NewDim2 = ''
row.NewItemMode = 'empty'
row.NewItemSource = ''
}
function escapeRegExp (value) {
return String(value || '').replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
}
function buildNextCodeFromPrefix (prefix) {
const normalizedPrefix = String(prefix || '').trim().toUpperCase()
if (!/^[A-Z][0-9]{3}-[A-Z]{3}$/.test(normalizedPrefix)) return ''
const codeRegex = new RegExp(`^${escapeRegExp(normalizedPrefix)}(\\d{5})$`)
let maxSuffix = 0
for (const p of (productOptions.value || [])) {
const code = String(p?.ProductCode || '').trim().toUpperCase()
const m = code.match(codeRegex)
if (!m) continue
const n = Number(m[1] || 0)
if (Number.isFinite(n) && n > maxSuffix) maxSuffix = n
}
const next = maxSuffix > 0 ? maxSuffix + 1 : 1
return `${normalizedPrefix}${String(next).padStart(5, '0')}`
}
function onNewItemChange (row, val, source = 'typed') {
const prevCode = String(row?.NewItemCode || '').trim().toUpperCase()
let next = String(val || '').trim().toUpperCase()
if (source === 'typed' && row?.NewItemEntryMode === 'typed' && /^[A-Z][0-9]{3}-[A-Z]{3}$/.test(next)) {
const autoCode = buildNextCodeFromPrefix(next)
if (autoCode) {
next = autoCode
$q.notify({ type: 'info', message: `Yeni kod otomatik tamamlandi: ${autoCode}` })
}
}
if (next.length > 13) {
$q.notify({ type: 'negative', message: 'Model kodu en fazla 13 karakter olabilir.' })
row.NewItemCode = next.slice(0, 13)
applyNewItemVisualState(row, source)
return
}
if (next.length === 13 && !isValidBaggiModelCode(next)) {
$q.notify({ type: 'negative', message: BAGGI_CODE_ERROR })
row.NewItemCode = prevCode
applyNewItemVisualState(row, source)
return
}
row.NewItemCode = next ? next.toUpperCase() : ''
applyNewItemVisualState(row, source)
row.NewColor = ''
row.NewDim2 = ''
if (row.NewItemCode) {
row.NewDesc = mergeDescWithAutoNote(row, row.NewDesc || row.OldDesc)
if (row.NewItemCode && isValidBaggiModelCode(row.NewItemCode)) {
if (row.NewItemMode === 'new') {
store.fetchNewColors(row.NewItemCode)
} else {
store.fetchColors(row.NewItemCode)
}
}
if (suppressAutoSetupDialogs.value) return
if (row.NewItemMode === 'new' && isValidBaggiModelCode(row.NewItemCode) && row.NewItemCode !== prevCode) {
openNewCodeSetupFlow(row.NewItemCode)
} else if (row.NewItemMode === 'existing' && isValidBaggiModelCode(row.NewItemCode) && row.NewItemCode !== prevCode) {
openAttributeDialog(row.NewItemCode)
}
}
function isAttributeDraftComplete (rows) {
if (!Array.isArray(rows) || !rows.length) return false
return rows.every(r => String(r?.AttributeCode || '').trim())
}
function isNewCodeSetupComplete (itemCode) {
const code = String(itemCode || '').trim().toUpperCase()
if (!code) return false
const hasCdItem = !!store.getCdItemDraft(code)
const attrRows = store.getProductAttributeDraft(code)
return hasCdItem && isAttributeDraftComplete(attrRows)
}
function isColorSelectionLocked (row) {
const code = String(row?.NewItemCode || '').trim().toUpperCase()
return !code
}
function openNewCodeSetupFlow (itemCode) {
const code = String(itemCode || '').trim().toUpperCase()
if (!code) return
if (!store.getCdItemDraft(code)) {
openCdItemDialog(code)
return
}
if (!isAttributeDraftComplete(store.getProductAttributeDraft(code))) {
openAttributeDialog(code)
}
}
function onNewColorChange (row) {
row.NewColor = normalizeShortCode(row.NewColor, 3)
row.NewDim2 = ''
row.NewDesc = mergeDescWithAutoNote(row, row.NewDesc || row.OldDesc)
if (row.NewItemCode && row.NewColor) {
if (String(row?.NewItemMode || '').trim() === 'new') {
store.fetchNewSecondColors(row.NewItemCode, row.NewColor)
} else {
store.fetchSecondColors(row.NewItemCode, row.NewColor)
}
}
}
function onNewDim2Change (row) {
row.NewDim2 = normalizeShortCode(row.NewDim2, 3)
row.NewDesc = mergeDescWithAutoNote(row, row.NewDesc || row.OldDesc)
}
function getColorOptions (row) {
const code = row?.NewItemCode || ''
const list = store.colorOptionsByCode[code] || []
const isNewMode = String(row?.NewItemMode || '').trim() === 'new'
const list = isNewMode
? (store.newColorOptionsByCode[code] || [])
: (store.colorOptionsByCode[code] || [])
return list.map(c => ({
...c,
colorLabel: `${c.color_code} - ${c.color_description || ''}`.trim()
@@ -341,7 +750,14 @@ function getSecondColorOptions (row) {
const code = row?.NewItemCode || ''
const color = row?.NewColor || ''
const key = `${code}::${color}`
return store.secondColorOptionsByKey[key] || []
const isNewMode = String(row?.NewItemMode || '').trim() === 'new'
const list = isNewMode
? (store.newSecondColorOptionsByKey[key] || [])
: (store.secondColorOptionsByKey[key] || [])
return list.map(c => ({
...c,
item_dim2_label: `${c.item_dim2_code} - ${c.color_description || ''}`.trim()
}))
}
function toggleRowSelection (rowKey, checked) {
@@ -360,55 +776,81 @@ function toggleSelectAllVisible (checked) {
selectedMap.value = next
}
function onCreateColorValue (row, val, done) {
const code = normalizeShortCode(val, 3)
if (!code) {
done(null)
return
}
row.NewColor = code
onNewColorChange(row)
done(code, 'add-unique')
}
function onCreateSecondColorValue (row, val, done) {
const code = normalizeShortCode(val, 3)
if (!code) {
done(null)
return
}
row.NewDim2 = code
done(code, 'add-unique')
}
function normalizeShortCode (value, maxLen) {
return String(value || '').trim().toUpperCase().slice(0, maxLen)
}
function isValidBaggiModelCode (code) {
return /^[A-Z][0-9]{3}-[A-Z]{3}[0-9]{5}$/.test(code)
return BAGGI_CODE_PATTERN.test(code)
}
function formatCodeColorDim2 (itemCode, color, dim2) {
const item = String(itemCode || '').trim().toUpperCase()
const c1 = normalizeShortCode(color, 3)
const c2 = normalizeShortCode(dim2, 3)
const c1Safe = c1 || '-'
const c2Safe = c2 || '-'
return `${item}/${c1Safe}/${c2Safe}`
}
function buildAutoUpdateNote (row) {
const oldInfo = formatCodeColorDim2(row?.OldItemCode, row?.OldColor, row?.OldDim2)
const nextInfo = formatCodeColorDim2(row?.NewItemCode, row?.NewColor, row?.NewDim2)
return `Bu siparis satirinda kod ${oldInfo} bilgisinden ${nextInfo} bilgisine guncellenmistir.`
}
function isSelectionCompleteByOldShape (row) {
const hasModel = String(row?.NewItemCode || '').trim().length > 0
if (!hasModel) return false
const oldHasColor = String(row?.OldColor || '').trim().length > 0
const hasNewColor = normalizeShortCode(row?.NewColor, 3).length === 3
if (oldHasColor) return hasNewColor
return true
}
function stripAutoUpdateNote (text) {
const desc = String(text || '').trim()
if (!desc) return ''
const marker = ' Bu siparis satirinda kod '
const idx = desc.indexOf(marker)
if (idx > -1) return desc.slice(0, idx).trim()
if (desc.startsWith('Bu siparis satirinda kod ')) return ''
return desc
}
function mergeDescWithAutoNote (row, baseDesc) {
const desc = stripAutoUpdateNote(baseDesc)
if (!isSelectionCompleteByOldShape(row)) return desc
const note = buildAutoUpdateNote(row)
if (!note) return desc
if (desc.includes(note)) return desc
if (!desc) return note
return `${desc} ${note}`
}
function validateRowInput (row) {
const entryMode = String(row?.NewItemEntryMode || '').trim()
const newItemCode = String(row.NewItemCode || '').trim().toUpperCase()
const newColor = normalizeShortCode(row.NewColor, 3)
const newDim2 = normalizeShortCode(row.NewDim2, 3)
const oldColor = String(row.OldColor || '').trim()
const oldDim2 = String(row.OldDim2 || '').trim()
if (!entryMode) return 'Lutfen once kod giris tipini seciniz (Eski Kod Sec / Yeni Kod Ekle).'
if (!newItemCode) return 'Yeni model kodu zorunludur.'
if (!isValidBaggiModelCode(newItemCode)) {
return 'Girdiginiz yapi BAGGI kod yapisina uygun degildir. Format: X999-XXX99999'
return BAGGI_CODE_ERROR
}
if (oldColor && !newColor) return 'Eski kayitta 1. renk oldugu icin yeni 1. renk zorunludur.'
if (newColor && newColor.length !== 3) return 'Yeni 1. renk kodu 3 karakter olmalidir.'
if (oldDim2 && !newDim2) return 'Eski kayitta 2. renk oldugu icin yeni 2. renk zorunludur.'
if (newDim2 && newDim2.length !== 3) return 'Yeni 2. renk kodu 3 karakter olmalidir.'
if (newDim2 && !newColor) return '2. renk girmek icin 1. renk zorunludur.'
row.NewItemCode = newItemCode
row.NewColor = newColor
row.NewDim2 = newDim2
row.NewDesc = mergeDescWithAutoNote(row, row.NewDesc || row.OldDesc)
return ''
}
@@ -424,19 +866,654 @@ function collectLinesFromRows (selectedRows) {
NewItemCode: String(row.NewItemCode || '').trim().toUpperCase(),
NewColor: normalizeShortCode(row.NewColor, 3),
NewDim2: normalizeShortCode(row.NewDim2, 3),
NewDesc: String((row.NewDesc || row.OldDesc) || '').trim()
NewDesc: mergeDescWithAutoNote(row, row.NewDesc || row.OldDesc),
OldDueDate: row.OldDueDate || '',
NewDueDate: row.NewDueDate || ''
}
for (const id of (row.OrderLineIDs || [])) {
lines.push({
const oldItemCode = String(row.OldItemCode || '').trim().toUpperCase()
const oldColor = normalizeShortCode(row.OldColor, 3)
const oldDim2 = normalizeShortCode(row.OldDim2, 3)
const oldDesc = String(row.OldDesc || '').trim()
const oldDueDateValue = row.OldDueDate || ''
const newDueDateValue = row.NewDueDate || ''
const hasChange = (
baseLine.NewItemCode !== oldItemCode ||
baseLine.NewColor !== oldColor ||
baseLine.NewDim2 !== oldDim2 ||
String(baseLine.NewDesc || '').trim() !== oldDesc ||
newDueDateValue !== oldDueDateValue
)
if (!hasChange) continue
const orderLines = Array.isArray(row.OrderLines) && row.OrderLines.length
? row.OrderLines
: (row.OrderLineIDs || []).map(id => ({
OrderLineID: id,
...baseLine
ItemDim1Code: ''
}))
for (const line of orderLines) {
lines.push({
...baseLine,
OrderLineID: line?.OrderLineID,
ItemDim1Code: store.toPayloadDim1Code(row, line?.ItemDim1Code || '')
})
}
}
return { errMsg: '', lines }
}
function hasRowChange (row) {
const newItemCode = String(row?.NewItemCode || '').trim().toUpperCase()
const newColor = normalizeShortCode(row?.NewColor, 3)
const newDim2 = normalizeShortCode(row?.NewDim2, 3)
const newDesc = mergeDescWithAutoNote(row, row?.NewDesc || row?.OldDesc)
const oldItemCode = String(row?.OldItemCode || '').trim().toUpperCase()
const oldColor = normalizeShortCode(row?.OldColor, 3)
const oldDim2 = normalizeShortCode(row?.OldDim2, 3)
const oldDesc = String(row?.OldDesc || '').trim()
const oldDueDateValue = row?.OldDueDate || ''
const newDueDateValue = row?.NewDueDate || ''
return (
newItemCode !== oldItemCode ||
newColor !== oldColor ||
newDim2 !== oldDim2 ||
String(newDesc || '').trim() !== oldDesc ||
newDueDateValue !== oldDueDateValue
)
}
function collectOptionalColorWarnings (rows) {
const warnings = []
for (const row of (rows || [])) {
const code = String(row?.NewItemCode || '').trim().toUpperCase()
if (!code) continue
const color = normalizeShortCode(row?.NewColor, 3)
const dim2 = normalizeShortCode(row?.NewDim2, 3)
if (!color) {
warnings.push(`${code} icin renk secmediniz.`)
continue
}
if (!dim2) {
warnings.push(`${code} icin 2. renk bos kalacak.`)
}
}
return [...new Set(warnings)]
}
function confirmOptionalColorWarnings (rows) {
const warnings = collectOptionalColorWarnings(rows)
if (!warnings.length) return Promise.resolve(true)
return new Promise((resolve) => {
$q.dialog({
title: 'Renk Uyarisi',
message: `${warnings.join('<br>')}<br><br>Devam etmek istiyor musunuz?`,
html: true,
ok: { label: 'Evet, Devam Et', color: 'warning' },
cancel: { label: 'Vazgec', flat: true }
}).onOk(() => resolve(true)).onCancel(() => resolve(false)).onDismiss(() => resolve(false))
})
}
function createEmptyCdItemDraft (itemCode) {
return {
ItemTypeCode: '1',
ItemCode: String(itemCode || '').trim().toUpperCase(),
ItemDimTypeCode: '1',
ProductTypeCode: '1',
ProductHierarchyID: '',
UnitOfMeasureCode1: 'AD',
ItemAccountGrCode: '',
ItemTaxGrCode: '%10',
ItemPaymentPlanGrCode: '',
ItemDiscountGrCode: '',
ItemVendorGrCode: '',
PromotionGroupCode: '',
ProductCollectionGrCode: '0',
StorePriceLevelCode: '0',
PerceptionOfFashionCode: '0',
CommercialRoleCode: '0',
StoreCapacityLevelCode: '',
CustomsTariffNumberCode: '',
CompanyCode: '1'
}
}
function lookupOptions (key) {
if (key === 'itemDimTypeCodes') {
return [
{ value: '1', label: '1 - RENK' },
{ value: '2', label: '2 - RENK-BEDEN' },
{ value: '3', label: '3 - RENK-BEDEN-YAKA' }
]
}
const list = store.cdItemLookups?.[key] || []
return list
.map(x => {
const code = String(x?.code || '').trim()
const desc = String(x?.description || '').trim()
return {
value: code,
label: desc ? `${code} - ${desc}` : code,
_desc: desc
}
})
.filter(opt => !isDummyLookupOption(key, opt.value, opt._desc))
.map(({ value, label }) => ({ value, label }))
}
function isDummyLookupOption (key, codeRaw, descRaw) {
const code = String(codeRaw || '').trim().toUpperCase()
const desc = String(descRaw || '').trim().toUpperCase()
if (!code) return true
if (code === '0' || code === '00' || code === '000' || code === '0000') return true
if (desc.includes('DUMMY')) return true
return false
}
async function copyFromOldProduct (targetType = 'cdItem') {
const sourceCode = String(copySourceCode.value || '').trim().toUpperCase()
if (!sourceCode) return
$q.loading.show({ message: 'Ozellikler kopyalaniyor...' })
try {
if (targetType === 'cdItem') {
const data = await store.fetchCdItemByCode(sourceCode)
if (data) {
const targetCode = String(cdItemTargetCode.value || '').trim().toUpperCase()
const draft = createEmptyCdItemDraft(targetCode)
for (const k of Object.keys(draft)) {
if (data[k] !== undefined && data[k] !== null) {
draft[k] = String(data[k])
}
}
// Source item kopyalansa da hedef popup kodu degismemeli.
draft.ItemCode = targetCode
cdItemDraftForm.value = draft
persistCdItemDraft()
$q.notify({ type: 'positive', message: 'Boyutlandirma bilgileri kopyalandi.' })
} else {
$q.notify({ type: 'warning', message: 'Kaynak urun bilgisi bulunamadi.' })
}
} else if (targetType === 'attributes') {
const data = await store.fetchProductItemAttributes(sourceCode, 1, true)
if (Array.isArray(data) && data.length > 0) {
// Mevcut attributeRows uzerindeki degerleri guncelle
for (const row of attributeRows.value) {
const sourceAttr = data.find(d => Number(d.attribute_type_code || d.AttributeTypeCode) === Number(row.AttributeTypeCodeNumber))
if (sourceAttr) {
const attrCode = String(sourceAttr.attribute_code || sourceAttr.AttributeCode || '').trim()
if (attrCode) {
// Seceneklerde var mi kontrol et, yoksa ekle (UI'da gorunmesi icin)
if (!row.AllOptions.some(opt => String(opt.value).trim() === attrCode)) {
row.AllOptions.unshift({ value: attrCode, label: attrCode })
row.Options = [...row.AllOptions]
}
row.AttributeCode = attrCode
}
}
}
const targetCode = String(attributeTargetCode.value || '').trim().toUpperCase()
if (targetCode) {
store.setProductAttributeDraft(targetCode, JSON.parse(JSON.stringify(attributeRows.value || [])))
}
$q.notify({ type: 'positive', message: 'Urun ozellikleri kopyalandi.' })
} else {
$q.notify({ type: 'warning', message: 'Kaynak urun ozellikleri bulunamadi.' })
}
}
} catch (err) {
console.error('[OrderProductionUpdate] copyFromOldProduct failed', err)
$q.notify({ type: 'negative', message: 'Kopyalama sirasinda hata olustu.' })
} finally {
$q.loading.hide()
}
}
async function openCdItemDialog (itemCode) {
const code = String(itemCode || '').trim().toUpperCase()
if (!code) return
copySourceCode.value = null
await store.fetchCdItemLookups()
cdItemTargetCode.value = code
const existing = store.getCdItemDraft(code)
const draft = createEmptyCdItemDraft(code)
if (existing) {
for (const [k, v] of Object.entries(existing)) {
if (v == null) continue
draft[k] = String(v)
}
}
cdItemDraftForm.value = draft
cdItemDialogOpen.value = true
}
function persistCdItemDraft () {
const targetCode = String(cdItemTargetCode.value || '').trim().toUpperCase()
const payload = normalizeCdItemDraftForPayload({
...(cdItemDraftForm.value || {}),
ItemCode: targetCode || String(cdItemDraftForm.value?.ItemCode || '').trim().toUpperCase()
})
if (!payload.ItemCode) return null
store.setCdItemDraft(payload.ItemCode, payload)
return payload
}
function normalizeCdItemDraftForPayload (draftRaw) {
const d = draftRaw || {}
const toIntOrNil = (v) => {
const n = Number(v)
return Number.isFinite(n) && n > 0 ? n : null
}
const toStrOrNil = (v) => {
const s = String(v || '').trim()
return s || null
}
return {
ItemTypeCode: toIntOrNil(d.ItemTypeCode) || 1,
ItemCode: String(d.ItemCode || '').trim().toUpperCase(),
ItemDimTypeCode: toIntOrNil(d.ItemDimTypeCode) || 1,
ProductTypeCode: null,
ProductHierarchyID: toIntOrNil(d.ProductHierarchyID),
UnitOfMeasureCode1: 'AD',
ItemAccountGrCode: null,
ItemTaxGrCode: '%10',
ItemPaymentPlanGrCode: null,
ItemDiscountGrCode: null,
ItemVendorGrCode: null,
PromotionGroupCode: null,
ProductCollectionGrCode: null,
StorePriceLevelCode: null,
PerceptionOfFashionCode: null,
CommercialRoleCode: null,
StoreCapacityLevelCode: null,
CustomsTariffNumberCode: null,
CompanyCode: '1'
}
}
async function saveCdItemDraft () {
const payload = persistCdItemDraft()
if (!payload?.ItemCode) {
$q.notify({ type: 'negative', message: 'ItemCode bos olamaz.' })
return
}
console.info('[OrderProductionUpdate] saveCdItemDraft', {
code: payload.ItemCode,
itemDimTypeCode: payload.ItemDimTypeCode,
productHierarchyID: payload.ProductHierarchyID
})
cdItemDialogOpen.value = false
await openAttributeDialog(payload.ItemCode)
}
function buildAttributeRowsFromLookup (list) {
const grouped = new Map()
for (const it of (list || [])) {
const typeCode = Number(it?.attribute_type_code || it?.AttributeTypeCode || 0)
if (!typeCode) continue
if (!grouped.has(typeCode)) {
grouped.set(typeCode, {
typeCode,
typeDesc: String(it?.attribute_type_description || it?.AttributeTypeDescription || '').trim() || String(typeCode),
options: []
})
}
const g = grouped.get(typeCode)
const code = String(it?.attribute_code || it?.AttributeCode || '').trim()
const desc = String(it?.attribute_description || it?.AttributeDescription || '').trim()
if (!code) continue
g.options.push({
value: code,
label: `${code} - ${desc || code}`
})
}
const rows = [...grouped.values()]
.sort((a, b) => a.typeCode - b.typeCode)
.map(g => ({
AttributeTypeCodeNumber: g.typeCode,
TypeLabel: `${g.typeCode} - ${g.typeDesc}`,
AttributeCode: '',
AllOptions: [...g.options],
Options: [...g.options]
}))
return rows
}
function onFilterAttributeOption (row, val, update) {
const raw = String(val || '')
const needle = normalizeSearchText(raw)
const base = Array.isArray(row?.AllOptions) ? row.AllOptions : (Array.isArray(row?.Options) ? row.Options : [])
update(() => {
if (!needle) {
row.Options = [...base]
return
}
row.Options = base.filter(opt => {
const label = normalizeSearchText(opt?.label || '')
const value = normalizeSearchText(opt?.value || '')
return label.includes(needle) || value.includes(needle)
})
})
}
function mergeAttributeDraftWithLookupOptions (draftRows, lookupRows) {
const byType = new Map(
(lookupRows || []).map(r => [Number(r?.AttributeTypeCodeNumber || 0), r]).filter(x => x[0] > 0)
)
return (draftRows || []).map(d => {
const typeCode = Number(d?.AttributeTypeCodeNumber || 0)
const base = byType.get(typeCode)
const selectedCode = String(d?.AttributeCode || '').trim()
const baseAllOptions = Array.isArray(base?.AllOptions)
? [...base.AllOptions]
: (Array.isArray(base?.Options) ? [...base.Options] : [])
const draftAllOptions = Array.isArray(d?.AllOptions)
? [...d.AllOptions]
: (Array.isArray(d?.Options) ? [...d.Options] : [])
const allOptions = baseAllOptions.length ? baseAllOptions : draftAllOptions
if (selectedCode && !allOptions.some(opt => String(opt?.value || '').trim() === selectedCode)) {
allOptions.unshift({ value: selectedCode, label: selectedCode })
}
return {
...(base || d),
...d,
AttributeTypeCodeNumber: typeCode,
AttributeCode: selectedCode,
AllOptions: allOptions,
Options: [...allOptions]
}
})
}
async function openAttributeDialog (itemCode) {
const code = String(itemCode || '').trim().toUpperCase()
if (!code) return
copySourceCode.value = null
attributeTargetCode.value = code
const existingDraft = JSON.parse(JSON.stringify(store.getProductAttributeDraft(code) || []))
const modeInfo = store.classifyItemCode(code)
const fetched = await store.fetchProductAttributes(1)
const fromLookup = buildAttributeRowsFromLookup(fetched)
console.info('[OrderProductionUpdate] openAttributeDialog lookup', {
code,
mode: modeInfo.mode,
fetchedCount: Array.isArray(fetched) ? fetched.length : 0,
rowCount: fromLookup.length
})
if (!fromLookup.length) {
$q.notify({ type: 'negative', message: 'Urun ozellikleri listesi alinamadi. Lutfen daha sonra tekrar deneyin.' })
return
}
// Draft varsa popup her zaman draft'tan acilir (yeniden acinca secimler kaybolmasin).
if (Array.isArray(existingDraft) && existingDraft.length) {
attributeRows.value = JSON.parse(JSON.stringify(
mergeAttributeDraftWithLookupOptions(existingDraft, fromLookup)
))
console.info('[OrderProductionUpdate] openAttributeDialog rowsPrepared', {
code,
mode: modeInfo.mode,
useDraft: true,
rowCount: Array.isArray(attributeRows.value) ? attributeRows.value.length : 0,
optionCounts: (attributeRows.value || []).map(r => ({
type: Number(r?.AttributeTypeCodeNumber || 0),
options: Array.isArray(r?.Options) ? r.Options.length : 0,
allOptions: Array.isArray(r?.AllOptions) ? r.AllOptions.length : 0,
selected: String(r?.AttributeCode || '').trim()
}))
})
for (const row of (attributeRows.value || [])) {
if (!Array.isArray(row.AllOptions)) row.AllOptions = Array.isArray(row.Options) ? [...row.Options] : []
if (!Array.isArray(row.Options)) row.Options = [...row.AllOptions]
}
attributeDialogOpen.value = true
return
}
const dbCurrent = await store.fetchProductItemAttributes(code, 1, true)
console.info('[OrderProductionUpdate] openAttributeDialog dbCurrent', {
code,
dbCurrentCount: Array.isArray(dbCurrent) ? dbCurrent.length : 0
})
if (Array.isArray(dbCurrent) && dbCurrent.length) {
store.markItemCodeKnownExisting(code, true)
syncRowsForKnownExistingCode(code)
}
const dbMap = new Map(
(dbCurrent || []).map(x => [
Number(x?.attribute_type_code || x?.AttributeTypeCode || 0),
String(x?.attribute_code || x?.AttributeCode || '').trim()
]).filter(x => x[0] > 0)
)
const baseRows = fromLookup.map(row => {
const currentCode = dbMap.get(Number(row.AttributeTypeCodeNumber || 0)) || ''
const currentOptions = Array.isArray(row.AllOptions)
? [...row.AllOptions]
: (Array.isArray(row.Options) ? [...row.Options] : [])
if (currentCode && !currentOptions.some(opt => String(opt?.value || '').trim() === currentCode)) {
currentOptions.unshift({ value: currentCode, label: currentCode })
}
return {
...row,
AttributeCode: currentCode,
OriginalAttributeCode: currentCode,
AllOptions: currentOptions,
Options: [...currentOptions]
}
})
const useDraft = Array.isArray(existingDraft) && existingDraft.length
attributeRows.value = JSON.parse(JSON.stringify(baseRows))
console.info('[OrderProductionUpdate] openAttributeDialog rowsPrepared', {
code,
mode: modeInfo.mode,
useDraft: false,
rowCount: Array.isArray(attributeRows.value) ? attributeRows.value.length : 0,
optionCounts: (attributeRows.value || []).map(r => ({
type: Number(r?.AttributeTypeCodeNumber || 0),
options: Array.isArray(r?.Options) ? r.Options.length : 0,
allOptions: Array.isArray(r?.AllOptions) ? r.AllOptions.length : 0,
selected: String(r?.AttributeCode || '').trim()
}))
})
for (const row of (attributeRows.value || [])) {
if (!Array.isArray(row.AllOptions)) {
row.AllOptions = Array.isArray(row.Options) ? [...row.Options] : []
}
if (!Array.isArray(row.Options)) {
row.Options = [...row.AllOptions]
}
}
attributeDialogOpen.value = true
}
function saveAttributeDraft () {
const code = String(attributeTargetCode.value || '').trim().toUpperCase()
if (!code) return
const rows = JSON.parse(JSON.stringify(attributeRows.value || []))
for (const row of rows) {
const selected = String(row?.AttributeCode || '').trim()
if (!selected) {
$q.notify({ type: 'negative', message: `Urun ozelliklerinde secim zorunlu: ${row?.TypeLabel || ''}` })
return
}
}
store.setProductAttributeDraft(code, rows)
console.info('[OrderProductionUpdate] saveAttributeDraft', {
code,
rowCount: rows.length,
selectedCount: rows.length,
selected: rows.map(r => ({
type: Number(r?.AttributeTypeCodeNumber || 0),
code: String(r?.AttributeCode || '').trim()
}))
})
attributeDialogOpen.value = false
$q.notify({ type: 'positive', message: 'Urun ozellikleri taslagi kaydedildi.' })
}
watch(
cdItemDraftForm,
() => {
if (!cdItemDialogOpen.value) return
persistCdItemDraft()
},
{ deep: true }
)
async function collectProductAttributesFromSelectedRows (selectedRows) {
const codeSet = [...new Set(
(selectedRows || [])
.map(r => String(r?.NewItemCode || '').trim().toUpperCase())
.filter(Boolean)
)]
const out = []
for (const code of codeSet) {
const modeInfo = store.classifyItemCode(code)
let rows = store.getProductAttributeDraft(code)
const dbCurrent = await store.fetchProductItemAttributes(code, 1, true)
const dbMap = new Map(
(dbCurrent || []).map(x => [
Number(x?.attribute_type_code || x?.AttributeTypeCode || 0),
String(x?.attribute_code || x?.AttributeCode || '').trim()
]).filter(x => x[0] > 0)
)
const hasDbAttributes = dbMap.size > 0
const effectiveMode = hasDbAttributes ? 'existing' : modeInfo.mode
console.info('[OrderProductionUpdate] collectProductAttributes start', {
code,
mode: modeInfo.mode,
effectiveMode,
hasDbAttributes,
draftRowCount: Array.isArray(rows) ? rows.length : 0
})
if (effectiveMode === 'existing') {
// Existing kodda kullanıcı değişiklik yaptıysa draftı koru.
// Draft yoksa DB'den zorunlu/fresh çek.
if (!Array.isArray(rows) || !rows.length) {
const lookup = await store.fetchProductAttributes(1)
const baseRows = buildAttributeRowsFromLookup(lookup)
console.info('[OrderProductionUpdate] collectProductAttributes existing refetch', {
code,
lookupCount: Array.isArray(lookup) ? lookup.length : 0,
baseRowCount: baseRows.length
})
rows = baseRows.map(row => {
const currentCode = dbMap.get(Number(row.AttributeTypeCodeNumber || 0)) || ''
const currentOptions = Array.isArray(row.AllOptions)
? [...row.AllOptions]
: (Array.isArray(row.Options) ? [...row.Options] : [])
if (currentCode && !currentOptions.some(opt => String(opt?.value || '').trim() === currentCode)) {
currentOptions.unshift({ value: currentCode, label: currentCode })
}
return {
...row,
AttributeCode: currentCode,
OriginalAttributeCode: currentCode,
AllOptions: currentOptions,
Options: [...currentOptions]
}
})
store.setProductAttributeDraft(code, JSON.parse(JSON.stringify(rows)))
}
} else if (!Array.isArray(rows) || !rows.length) {
return { errMsg: `${code} icin urun ozellikleri taslagi kaydedilmedi`, productAttributes: [] }
}
if (!Array.isArray(rows) || !rows.length) {
return { errMsg: `${code} icin urun ozellikleri secilmedi`, productAttributes: [] }
}
for (const row of rows) {
const attributeTypeCode = Number(row?.AttributeTypeCodeNumber || 0)
const attributeCode = String(row?.AttributeCode || '').trim()
if (!attributeTypeCode) {
return { errMsg: `${code} icin urun ozellikleri eksik`, productAttributes: [] }
}
if (effectiveMode === 'existing') {
const originalCode =
dbMap.get(attributeTypeCode) ||
String(row?.OriginalAttributeCode || '').trim()
const changed = attributeCode !== originalCode
if (!changed) continue
if (!attributeCode) {
return { errMsg: `${code} icin urun ozellikleri eksik`, productAttributes: [] }
}
} else if (!attributeCode) {
return { errMsg: `${code} icin urun ozellikleri eksik`, productAttributes: [] }
}
out.push({
ItemTypeCode: 1,
ItemCode: code,
AttributeTypeCode: attributeTypeCode,
AttributeCode: attributeCode
})
}
console.info('[OrderProductionUpdate] collectProductAttributes done', {
code,
mode: modeInfo.mode,
effectiveMode,
outCount: out.filter(x => x.ItemCode === code).length,
rowCount: rows.length,
optionCounts: rows.map(r => ({
type: Number(r?.AttributeTypeCodeNumber || 0),
options: Array.isArray(r?.Options) ? r.Options.length : 0,
allOptions: Array.isArray(r?.AllOptions) ? r.AllOptions.length : 0
}))
})
}
return { errMsg: '', productAttributes: out }
}
async function collectCdItemsFromSelectedRows (selectedRows) {
const codes = [...new Set(
(selectedRows || [])
.filter(r => r?.NewItemMode === 'new' && String(r?.NewItemCode || '').trim())
.map(r => String(r.NewItemCode).trim().toUpperCase())
)]
if (!codes.length) return { errMsg: '', cdItems: [] }
const out = []
for (const code of codes) {
let draft = store.getCdItemDraft(code)
if (!draft) {
const existingCdItem = await store.fetchCdItemByCode(code)
if (existingCdItem) {
store.markItemCodeKnownExisting(code, true)
syncRowsForKnownExistingCode(code)
draft = normalizeCdItemDraftForPayload(existingCdItem)
store.setCdItemDraft(code, draft)
}
}
if (!draft) {
return { errMsg: `${code} icin cdItem bilgisi eksik`, cdItems: [] }
}
out.push(normalizeCdItemDraftForPayload(draft))
}
return { errMsg: '', cdItems: out }
}
function buildMailLineLabelFromRow (row) {
const item = String(row?.NewItemCode || row?.OldItemCode || '').trim().toUpperCase()
const color1 = String(row?.NewColor || row?.OldColor || '').trim().toUpperCase()
@@ -448,11 +1525,49 @@ function buildMailLineLabelFromRow (row) {
return [item, colorPart, desc].filter(Boolean).join(' ')
}
function buildUpdateMailLineLabelFromRow (row) {
const newItem = String(row?.NewItemCode || row?.OldItemCode || '').trim().toUpperCase()
const newColor = String(row?.NewColor || row?.OldColor || '').trim().toUpperCase()
const newDim2 = String(row?.NewDim2 || row?.OldDim2 || '').trim().toUpperCase()
const desc = mergeDescWithAutoNote(row, row?.NewDesc || row?.OldDesc || '')
if (!newItem) return ''
const colorPart = newDim2 ? `${newColor}-${newDim2}` : newColor
return [newItem, colorPart, desc].filter(Boolean).join(' ')
}
function buildDueDateChangeRowsFromSelectedRows (selectedRows) {
const seen = new Set()
const out = []
for (const row of (selectedRows || [])) {
const itemCode = String(row?.NewItemCode || row?.OldItemCode || '').trim().toUpperCase()
const colorCode = String(row?.NewColor || row?.OldColor || '').trim().toUpperCase()
const itemDim2Code = String(row?.NewDim2 || row?.OldDim2 || '').trim().toUpperCase()
const oldDueDate = formatDate(row?.OldDueDate)
const newDueDate = formatDate(row?.NewDueDate)
if (!itemCode || !newDueDate || oldDueDate === newDueDate) continue
const key = [itemCode, colorCode, itemDim2Code, oldDueDate, newDueDate].join('||')
if (seen.has(key)) continue
seen.add(key)
out.push({
itemCode,
colorCode,
itemDim2Code,
oldDueDate,
newDueDate
})
}
return out
}
function buildProductionUpdateMailPayload (selectedRows) {
const updatedItems = [
...new Set(
(selectedRows || [])
.map(buildMailLineLabelFromRow)
.map(buildUpdateMailLineLabelFromRow)
.filter(Boolean)
)
]
@@ -461,25 +1576,62 @@ function buildProductionUpdateMailPayload (selectedRows) {
operation: 'update',
deletedItems: [],
updatedItems,
addedItems: []
addedItems: [],
dueDateChanges: buildDueDateChangeRowsFromSelectedRows(selectedRows)
}
}
function formatBarcodeValidationMessages (validations) {
return (Array.isArray(validations) ? validations : [])
.map(v => String(v?.message || '').trim())
.filter(Boolean)
}
function showBarcodeValidationDialog (validations) {
const messages = formatBarcodeValidationMessages(validations)
if (!messages.length) return false
$q.dialog({
title: 'Barkod Validasyonlari',
message: messages.join('<br>'),
html: true,
ok: { label: 'Tamam', color: 'negative' }
})
return true
}
async function sendUpdateMailAfterApply (selectedRows) {
const orderId = String(orderHeaderID.value || '').trim()
if (!orderId) return
const host = String(window?.location?.hostname || '').trim().toLowerCase()
const isLocalHost = host === 'localhost' || host === '127.0.0.1'
if (isLocalHost) {
console.info('[OrderProductionUpdate] sendUpdateMailAfterApply skipped (localhost)', { orderHeaderID: orderId, host })
return
}
try {
const t0 = nowMs()
const payload = buildProductionUpdateMailPayload(selectedRows)
console.info('[OrderProductionUpdate] sendUpdateMailAfterApply start', {
orderHeaderID: orderId,
updatedItems: payload?.updatedItems?.length || 0
})
const res = await api.post('/order/send-market-mail', {
orderHeaderID: orderId,
operation: payload.operation,
deletedItems: payload.deletedItems,
updatedItems: payload.updatedItems,
addedItems: payload.addedItems
addedItems: payload.addedItems,
dueDateChanges: payload.dueDateChanges,
extraRecipients: ['urun@baggi.com.tr']
})
const sentCount = Number(res?.data?.sentCount || 0)
console.info('[OrderProductionUpdate] sendUpdateMailAfterApply done', {
orderHeaderID: orderId,
sentCount,
durationMs: Math.round(nowMs() - t0)
})
$q.notify({
type: 'positive',
message: sentCount > 0
@@ -487,6 +1639,12 @@ async function sendUpdateMailAfterApply (selectedRows) {
: 'Guncelleme maili gonderildi'
})
} catch (err) {
console.error('[OrderProductionUpdate] sendUpdateMailAfterApply failed', {
orderHeaderID: orderId,
status: err?.response?.status,
data: err?.response?.data,
message: err?.message
})
$q.notify({
type: 'warning',
message: 'Guncelleme kaydedildi, mail gonderilemedi.'
@@ -508,50 +1666,110 @@ function buildGroupKey (item) {
function formatSizes (sizeMap) {
const entries = Object.entries(sizeMap || {})
if (!entries.length) return { list: [], label: '-' }
entries.sort((a, b) => String(a[0]).localeCompare(String(b[0])))
entries.sort((a, b) => {
const left = String(a[0] || '').trim()
const right = String(b[0] || '').trim()
if (/^\d+$/.test(left) && /^\d+$/.test(right)) {
return Number(left) - Number(right)
}
return left.localeCompare(right)
})
const label = entries.map(([k, v]) => (v > 1 ? `${k}(${v})` : k)).join(', ')
return { list: entries.map(([k]) => k), label }
}
function formatCodeDescriptionLabel (code, description) {
const codeText = String(code || '').trim().toUpperCase()
const descText = String(description || '').trim()
if (!codeText) return descText
if (!descText) return codeText
return `${codeText} - ${descText}`
}
function formatQtyLabel (value) {
const qty = Number(value || 0)
if (!Number.isFinite(qty)) return '0'
return Number.isInteger(qty)
? String(qty)
: qty.toLocaleString('tr-TR', { minimumFractionDigits: 0, maximumFractionDigits: 2 })
}
function groupItems (items, prevRows = []) {
const prevMap = new Map()
for (const r of prevRows || []) {
if (r?.RowKey) prevMap.set(r.RowKey, String(r.NewDesc || '').trim())
if (!r?.RowKey) continue
prevMap.set(r.RowKey, {
NewDesc: String(r.NewDesc || '').trim(),
NewItemCode: String(r.NewItemCode || '').trim().toUpperCase(),
NewColor: String(r.NewColor || '').trim().toUpperCase(),
NewDim2: String(r.NewDim2 || '').trim().toUpperCase(),
NewItemMode: String(r.NewItemMode || '').trim(),
NewItemSource: String(r.NewItemSource || '').trim(),
NewItemEntryMode: String(r.NewItemEntryMode || '').trim(),
NewDueDate: String(r.NewDueDate || '').trim()
})
}
const map = new Map()
for (const it of items) {
const key = buildGroupKey(it)
if (!map.has(key)) {
const prevDesc = prevMap.get(key) || ''
const prev = prevMap.get(key) || {}
const prevDesc = prev.NewDesc || ''
const fallbackDesc = String((it?.NewDesc || it?.OldDesc) || '').trim()
map.set(key, {
RowKey: key,
OrderHeaderID: it.OrderHeaderID,
OldItemCode: it.OldItemCode,
OldColor: it.OldColor,
OldColorDescription: it.OldColorDescription,
OldColorLabel: formatCodeDescriptionLabel(it.OldColor, it.OldColorDescription),
OldDim2: it.OldDim2,
OldDim3: it.OldDim3,
OldDesc: it.OldDesc,
OldDueDate: it.OldDueDate || '',
NewDueDate: (prev.NewDueDate || it.OldDueDate || ''),
OrderLineIDs: [],
OrderLines: [],
OldSizes: [],
OldSizesLabel: '',
NewItemCode: '',
NewColor: '',
NewDim2: '',
OldTotalQty: 0,
OldTotalQtyLabel: '0',
NewItemCode: prev.NewItemCode || '',
NewColor: prev.NewColor || '',
NewDim2: prev.NewDim2 || '',
NewDesc: prevDesc || fallbackDesc,
IsVariantMissing: !!it.IsVariantMissing
NewItemMode: prev.NewItemMode || 'empty',
NewItemSource: prev.NewItemSource || '',
NewItemEntryMode: prev.NewItemEntryMode || '',
IsVariantMissing: !!it.IsVariantMissing,
yasPayloadMap: {}
})
}
const g = map.get(key)
if (it?.OrderLineID) g.OrderLineIDs.push(it.OrderLineID)
const size = String(it?.OldDim1 || '').trim()
const rawSize = String(it?.OldDim1 || '').trim()
const size = store.normalizeDim1ForUi(rawSize)
const rawSizeUpper = rawSize.toUpperCase()
if (/^(\d+)\s*(Y|YAS|YAŞ)$/.test(rawSizeUpper) && size) {
g.yasPayloadMap[size] = store.pickPreferredYasPayloadLabel(
g.yasPayloadMap[size],
rawSizeUpper
)
}
if (it?.OrderLineID) {
g.OrderLines.push({
OrderLineID: it.OrderLineID,
ItemDim1Code: size
})
}
if (size !== '') {
g.__sizeMap = g.__sizeMap || {}
g.__sizeMap[size] = (g.__sizeMap[size] || 0) + 1
}
g.__oldQtyTotal = Number(g.__oldQtyTotal || 0) + Number(it?.OldQty || 0)
if (it?.IsVariantMissing) g.IsVariantMissing = true
}
@@ -560,7 +1778,17 @@ function groupItems (items, prevRows = []) {
const sizes = formatSizes(g.__sizeMap || {})
g.OldSizes = sizes.list
g.OldSizesLabel = sizes.label
g.OldTotalQty = Number(g.__oldQtyTotal || 0)
g.OldTotalQtyLabel = formatQtyLabel(g.OldTotalQty)
const info = store.classifyItemCode(g.NewItemCode)
g.NewItemCode = info.normalized
g.NewItemMode = info.mode
if (info.mode === 'empty') g.NewItemSource = ''
if (!g.NewItemEntryMode && g.NewItemCode) {
g.NewItemEntryMode = g.NewItemSource === 'selected' ? 'selected' : 'typed'
}
delete g.__sizeMap
delete g.__oldQtyTotal
out.push(g)
}
@@ -574,24 +1802,121 @@ async function refreshAll () {
}
async function onBulkSubmit () {
const selectedRows = rows.value.filter(r => !!selectedMap.value[r.RowKey])
if (!selectedRows.length) {
$q.notify({ type: 'warning', message: 'Lutfen en az bir satir seciniz.' })
if (isBulkSubmitting.value || store.saving) {
console.info('[OrderProductionUpdate] onBulkSubmit ignored (already running)', {
orderHeaderID: orderHeaderID.value,
isBulkSubmitting: isBulkSubmitting.value,
storeSaving: store.saving
})
return
}
isBulkSubmitting.value = true
const flowStart = nowMs()
try {
suppressAutoSetupDialogs.value = true
const selectedRows = rows.value.filter(r => !!selectedMap.value[r.RowKey])
const headerAverageDueDateValue = normalizeDateInput(headerAverageDueDate.value)
const headerDateChanged = hasHeaderAverageDueDateChange.value
if (!selectedRows.length && !headerDateChanged) {
$q.notify({ type: 'warning', message: 'Lutfen en az bir satir seciniz veya ustteki termin tarihini degistiriniz.' })
return
}
const prepStart = nowMs()
const { errMsg, lines } = collectLinesFromRows(selectedRows)
if (errMsg) {
$q.notify({ type: 'negative', message: errMsg })
return
}
if (!lines.length) {
$q.notify({ type: 'negative', message: 'Secili satirlarda guncellenecek kayit bulunamadi.' })
if (!lines.length && !headerDateChanged) {
$q.notify({ type: 'warning', message: 'Secili satirlarda degisiklik yok.' })
return
}
try {
const validate = await store.validateUpdates(orderHeaderID.value, lines)
if (lines.length > 0) {
const changedRows = selectedRows.filter(hasRowChange)
const confirmed = await confirmOptionalColorWarnings(changedRows)
if (!confirmed) return
}
let cdItems = []
let productAttributes = []
if (lines.length > 0) {
const { errMsg: cdErrMsg, cdItems: nextCdItems } = await collectCdItemsFromSelectedRows(selectedRows)
if (cdErrMsg) {
$q.notify({ type: 'negative', message: cdErrMsg })
return
}
cdItems = nextCdItems
const { errMsg: attrErrMsg, productAttributes: nextProductAttributes } = await collectProductAttributesFromSelectedRows(selectedRows)
if (attrErrMsg) {
$q.notify({ type: 'negative', message: attrErrMsg })
const firstCode = String(attrErrMsg.split(' ')[0] || '').trim().toUpperCase()
if (isValidBaggiModelCode(firstCode)) {
await openAttributeDialog(firstCode)
}
return
}
productAttributes = nextProductAttributes
}
console.info('[OrderProductionUpdate] onBulkSubmit prepared', {
orderHeaderID: orderHeaderID.value,
selectedRowCount: selectedRows.length,
lineCount: lines.length,
cdItemCount: cdItems.length,
attributeCount: productAttributes.length,
headerAverageDueDate: headerAverageDueDateValue,
headerDateChanged,
prepDurationMs: Math.round(nowMs() - prepStart)
})
const applyChanges = async (insertMissing) => {
const applyStart = nowMs()
const applyResult = await store.applyUpdates(
orderHeaderID.value,
lines,
insertMissing,
cdItems,
productAttributes,
headerDateChanged ? headerAverageDueDateValue : null
)
console.info('[OrderProductionUpdate] apply finished', {
orderHeaderID: orderHeaderID.value,
insertMissing: !!insertMissing,
lineCount: lines.length,
barcodeInserted: Number(applyResult?.barcodeInserted || 0),
headerAverageDueDate: headerAverageDueDateValue,
headerDateChanged,
durationMs: Math.round(nowMs() - applyStart)
})
await store.fetchHeader(orderHeaderID.value)
if (lines.length > 0) {
await store.fetchItems(orderHeaderID.value)
}
selectedMap.value = {}
if (lines.length > 0) {
await sendUpdateMailAfterApply(selectedRows)
} else {
$q.notify({ type: 'positive', message: 'Tahmini termin tarihi guncellendi.' })
}
}
if (lines.length > 0) {
const validateStart = nowMs()
const validate = await store.validateUpdates(orderHeaderID.value, lines, cdItems)
console.info('[OrderProductionUpdate] validate finished', {
orderHeaderID: orderHeaderID.value,
lineCount: lines.length,
missingCount: Number(validate?.missingCount || 0),
barcodeValidationCount: Number(validate?.barcodeValidationCount || 0),
durationMs: Math.round(nowMs() - validateStart)
})
if (showBarcodeValidationDialog(validate?.barcodeValidations)) {
return
}
const missingCount = validate?.missingCount || 0
if (missingCount > 0) {
const missingList = (validate?.missing || []).map(v => (
@@ -604,27 +1929,34 @@ async function onBulkSubmit () {
ok: { label: 'Ekle ve Guncelle', color: 'primary' },
cancel: { label: 'Vazgec', flat: true }
}).onOk(async () => {
await store.applyUpdates(orderHeaderID.value, lines, true)
await store.fetchItems(orderHeaderID.value)
selectedMap.value = {}
await sendUpdateMailAfterApply(selectedRows)
await applyChanges(true)
})
return
}
}
await store.applyUpdates(orderHeaderID.value, lines, false)
await store.fetchItems(orderHeaderID.value)
selectedMap.value = {}
await sendUpdateMailAfterApply(selectedRows)
await applyChanges(false)
} catch (err) {
console.error('[OrderProductionUpdate] onBulkSubmit failed', {
orderHeaderID: orderHeaderID.value,
selectedRowCount: selectedRows.length,
lineCount: lines.length,
headerAverageDueDate: headerAverageDueDateValue,
headerDateChanged,
apiError: err?.response?.data,
message: err?.message
})
if (showBarcodeValidationDialog(err?.response?.data?.barcodeValidations)) {
return
}
$q.notify({ type: 'negative', message: store.error || 'Toplu kayit islemi basarisiz.' })
} finally {
isBulkSubmitting.value = false
suppressAutoSetupDialogs.value = false
console.info('[OrderProductionUpdate] onBulkSubmit total', {
orderHeaderID: orderHeaderID.value,
durationMs: Math.round(nowMs() - flowStart)
})
}
}
</script>
@@ -738,6 +2070,16 @@ async function onBulkSubmit () {
background: #e3f3ff;
}
.prod-table :deep(.new-item-existing .q-field__control) {
background: #eaf9ef !important;
border-left: 3px solid #21ba45;
}
.prod-table :deep(.new-item-new .q-field__control) {
background: #fff5e9 !important;
border-left: 3px solid #f2a100;
}
.prod-table :deep(td.col-desc),
.prod-table :deep(th.col-desc),
.prod-table :deep(td.col-wrap),

View File

@@ -0,0 +1,1261 @@
<template>
<q-page class="q-pa-xs pricing-page">
<div class="top-bar row items-center justify-between q-mb-xs">
<div class="text-subtitle1 text-weight-bold">Urun Fiyatlandirma</div>
<div class="row items-center q-gutter-xs">
<q-btn-dropdown color="secondary" outline icon="view_module" label="Doviz Gorunumu" :auto-close="false">
<q-list dense class="currency-menu-list">
<q-item clickable @click="selectAllCurrencies">
<q-item-section>Tumunu Sec</q-item-section>
</q-item>
<q-item clickable @click="clearAllCurrencies">
<q-item-section>Tumunu Temizle</q-item-section>
</q-item>
<q-separator />
<q-item v-for="option in currencyOptions" :key="option.value" clickable @click="toggleCurrencyRow(option.value)">
<q-item-section avatar>
<q-checkbox
:model-value="isCurrencySelected(option.value)"
dense
@update:model-value="(val) => toggleCurrency(option.value, val)"
@click.stop
/>
</q-item-section>
<q-item-section>{{ option.label }}</q-item-section>
</q-item>
</q-list>
</q-btn-dropdown>
<q-btn
flat
:color="showSelectedOnly ? 'primary' : 'grey-7'"
:icon="showSelectedOnly ? 'checklist_rtl' : 'list_alt'"
:label="showSelectedOnly ? `Secililer (${selectedRowCount})` : 'Secili Olanlari Getir'"
:disable="!showSelectedOnly && selectedRowCount === 0"
@click="toggleShowSelectedOnly"
/>
<q-btn flat color="grey-7" icon="restart_alt" label="Filtreleri Sifirla" @click="resetAll" />
<q-btn color="primary" icon="refresh" label="Veriyi Yenile" :loading="store.loading" @click="reloadData" />
</div>
</div>
<div class="table-wrap" :style="{ '--sticky-scroll-comp': `${stickyScrollComp}px` }">
<q-table
ref="mainTableRef"
class="pane-table pricing-table"
flat
dense
row-key="id"
:rows="filteredRows"
:columns="visibleColumns"
:loading="store.loading"
virtual-scroll
:virtual-scroll-item-size="rowHeight"
:virtual-scroll-sticky-size-start="headerHeight"
:virtual-scroll-slice-size="36"
:rows-per-page-options="[0]"
v-model:pagination="tablePagination"
hide-bottom
:table-style="tableStyle"
@virtual-scroll="onTableVirtualScroll"
>
<template #header="props">
<q-tr :props="props" class="header-row-fixed">
<q-th
v-for="col in props.cols"
:key="col.name"
:props="props"
:class="[col.headerClasses, { 'sticky-col': isStickyCol(col.name), 'sticky-boundary': isStickyBoundary(col.name) }]"
:style="getHeaderCellStyle(col)"
>
<q-checkbox
v-if="col.name === 'select'"
size="sm"
color="primary"
:model-value="allSelectedVisible"
:indeterminate="someSelectedVisible && !allSelectedVisible"
@update:model-value="toggleSelectAllVisible"
/>
<div v-else class="header-with-filter">
<span>{{ col.label }}</span>
<q-btn
v-if="isHeaderFilterField(col.field)"
dense
flat
round
size="8px"
icon="filter_alt"
:color="hasFilter(col.field) ? 'primary' : 'grey-7'"
class="header-filter-btn"
>
<q-badge v-if="hasFilter(col.field)" color="primary" floating rounded>
{{ getFilterBadgeValue(col.field) }}
</q-badge>
<q-menu anchor="bottom right" self="top right" :offset="[0, 4]">
<div v-if="isMultiSelectFilterField(col.field)" class="excel-filter-menu">
<q-input
v-model="columnFilterSearch[col.field]"
dense
outlined
clearable
use-input
class="excel-filter-select"
placeholder="Ara"
/>
<div class="excel-filter-actions row items-center justify-between q-pt-xs">
<q-btn flat dense size="sm" label="Tumunu Sec" @click="selectAllColumnFilterOptions(col.field)" />
<q-btn flat dense size="sm" label="Temizle" @click="clearColumnFilter(col.field)" />
</div>
<q-virtual-scroll
v-if="getFilterOptionsForField(col.field).length > 0"
class="excel-filter-options"
:items="getFilterOptionsForField(col.field)"
:virtual-scroll-item-size="32"
separator
>
<template #default="{ item: option }">
<q-item
:key="`${col.field}-${option.value}`"
dense
clickable
class="excel-filter-option"
@click="toggleColumnFilterValue(col.field, option.value)"
>
<q-item-section avatar>
<q-checkbox
dense
size="sm"
:model-value="isColumnFilterValueSelected(col.field, option.value)"
@update:model-value="() => toggleColumnFilterValue(col.field, option.value)"
@click.stop
/>
</q-item-section>
<q-item-section>
<q-item-label>{{ option.label }}</q-item-label>
</q-item-section>
</q-item>
</template>
</q-virtual-scroll>
<div v-else class="excel-filter-empty">
Sonuc yok
</div>
</div>
<div v-else-if="isNumberRangeFilterField(col.field)" class="excel-filter-menu">
<div class="range-filter-grid">
<q-input
v-model="numberRangeFilters[col.field].min"
dense
outlined
clearable
label="Min"
inputmode="decimal"
class="range-filter-field"
/>
<q-input
v-model="numberRangeFilters[col.field].max"
dense
outlined
clearable
label="Max"
inputmode="decimal"
class="range-filter-field"
/>
</div>
<div class="row justify-end q-pt-xs">
<q-btn flat dense size="sm" label="Temizle" @click="clearRangeFilter(col.field)" />
</div>
</div>
<div v-else-if="isDateRangeFilterField(col.field)" class="excel-filter-menu">
<div class="range-filter-grid">
<q-input
v-model="dateRangeFilters[col.field].from"
dense
outlined
clearable
type="date"
label="Baslangic"
class="range-filter-field"
/>
<q-input
v-model="dateRangeFilters[col.field].to"
dense
outlined
clearable
type="date"
label="Bitis"
class="range-filter-field"
/>
</div>
<div class="row justify-end q-pt-xs">
<q-btn flat dense size="sm" label="Temizle" @click="clearRangeFilter(col.field)" />
</div>
</div>
</q-menu>
</q-btn>
<q-btn
v-else
dense
flat
round
size="8px"
icon="filter_alt"
class="header-filter-btn header-filter-ghost"
tabindex="-1"
/>
</div>
</q-th>
</q-tr>
</template>
<template #body-cell-select="props">
<q-td
:props="props"
class="text-center selection-col"
:class="{ 'sticky-col': isStickyCol(props.col.name), 'sticky-boundary': isStickyBoundary(props.col.name) }"
:style="getBodyCellStyle(props.col)"
>
<q-checkbox
size="sm"
color="primary"
:model-value="!!selectedMap[props.row.id]"
@update:model-value="(val) => toggleRowSelection(props.row.id, val)"
/>
</q-td>
</template>
<template #body-cell-productCode="props">
<q-td
:props="props"
:class="{ 'sticky-col': isStickyCol(props.col.name), 'sticky-boundary': isStickyBoundary(props.col.name) }"
:style="getBodyCellStyle(props.col)"
>
<span class="product-code-text" :title="String(props.value ?? '')">{{ props.value }}</span>
</q-td>
</template>
<template #body-cell-stockQty="props">
<q-td
:props="props"
:class="{ 'sticky-col': isStickyCol(props.col.name), 'sticky-boundary': isStickyBoundary(props.col.name) }"
:style="getBodyCellStyle(props.col)"
>
<span class="stock-qty-text">{{ formatStock(props.value) }}</span>
</q-td>
</template>
<template #body-cell-stockEntryDate="props">
<q-td
:props="props"
:class="{ 'sticky-col': isStickyCol(props.col.name), 'sticky-boundary': isStickyBoundary(props.col.name) }"
:style="getBodyCellStyle(props.col)"
>
<span class="date-cell-text">{{ formatDateDisplay(props.value) }}</span>
</q-td>
</template>
<template #body-cell-lastPricingDate="props">
<q-td
:props="props"
:class="{ 'sticky-col': isStickyCol(props.col.name), 'sticky-boundary': isStickyBoundary(props.col.name) }"
:style="getBodyCellStyle(props.col)"
>
<span :class="['date-cell-text', { 'date-warning': needsRepricing(props.row) }]">
{{ formatDateDisplay(props.value) }}
</span>
</q-td>
</template>
<template #body-cell-brandGroupSelection="props">
<q-td
:props="props"
:class="{ 'sticky-col': isStickyCol(props.col.name), 'sticky-boundary': isStickyBoundary(props.col.name) }"
:style="getBodyCellStyle(props.col)"
>
<select
class="native-cell-select"
:value="props.row.brandGroupSelection"
@change="(e) => onBrandGroupSelectionChange(props.row, e.target.value)"
>
<option value="">Seciniz</option>
<option v-for="opt in brandGroupOptions" :key="opt.value" :value="opt.value">
{{ opt.label }}
</option>
</select>
</q-td>
</template>
<template #body-cell="props">
<q-td
:props="props"
:class="{ 'sticky-col': isStickyCol(props.col.name), 'sticky-boundary': isStickyBoundary(props.col.name) }"
:style="getBodyCellStyle(props.col)"
>
<input
v-if="editableColumnSet.has(props.col.name)"
class="native-cell-input text-right"
:value="formatPrice(props.row[props.col.field])"
type="text"
inputmode="decimal"
@change="(e) => onEditableCellChange(props.row, props.col.field, e.target.value)"
/>
<span v-else class="cell-text" :title="String(props.value ?? '')">{{ props.value }}</span>
</q-td>
</template>
</q-table>
</div>
<q-banner v-if="store.error" class="bg-red text-white q-mt-xs">
Hata: {{ store.error }}
</q-banner>
</q-page>
</template>
<script setup>
import { computed, onMounted, ref, watch } from 'vue'
import { useProductPricingStore } from 'src/stores/ProductPricingStore'
const store = useProductPricingStore()
const FETCH_LIMIT = 500
const nextCursor = ref('')
const loadingMore = ref(false)
const usdToTry = 38.25
const eurToTry = 41.6
const multipliers = [1, 1.03, 1.06, 1.09, 1.12, 1.15]
const rowHeight = 31
const headerHeight = 72
const brandGroupOptions = [
{ label: 'MARKA GRUBU A', value: 'MARKA GRUBU A' },
{ label: 'MARKA GRUBU B', value: 'MARKA GRUBU B' },
{ label: 'MARKA GRUBU C', value: 'MARKA GRUBU C' }
]
const currencyOptions = [
{ label: 'USD', value: 'USD' },
{ label: 'EUR', value: 'EUR' },
{ label: 'TRY', value: 'TRY' }
]
const multiFilterColumns = [
{ field: 'productCode', label: 'Urun Kodu' },
{ field: 'askiliYan', label: 'Askili Yan' },
{ field: 'kategori', label: 'Kategori' },
{ field: 'urunIlkGrubu', label: 'Urun Ilk Grubu' },
{ field: 'urunAnaGrubu', label: 'Urun Ana Grubu' },
{ field: 'urunAltGrubu', label: 'Urun Alt Grubu' },
{ field: 'icerik', label: 'Icerik' },
{ field: 'karisim', label: 'Karisim' }
]
const numberRangeFilterFields = ['stockQty']
const dateRangeFilterFields = ['stockEntryDate', 'lastPricingDate']
const columnFilters = ref({
productCode: [],
askiliYan: [],
kategori: [],
urunIlkGrubu: [],
urunAnaGrubu: [],
urunAltGrubu: [],
icerik: [],
karisim: []
})
const columnFilterSearch = ref({
productCode: '',
askiliYan: '',
kategori: '',
urunIlkGrubu: '',
urunAnaGrubu: '',
urunAltGrubu: '',
icerik: '',
karisim: ''
})
const numberRangeFilters = ref({
stockQty: { min: '', max: '' }
})
const dateRangeFilters = ref({
stockEntryDate: { from: '', to: '' },
lastPricingDate: { from: '', to: '' }
})
const multiSelectFilterFieldSet = new Set(multiFilterColumns.map((x) => x.field))
const numberRangeFilterFieldSet = new Set(numberRangeFilterFields)
const dateRangeFilterFieldSet = new Set(dateRangeFilterFields)
const headerFilterFieldSet = new Set([
...multiFilterColumns.map((x) => x.field),
...numberRangeFilterFields,
...dateRangeFilterFields
])
const mainTableRef = ref(null)
const tablePagination = ref({
page: 1,
rowsPerPage: 0,
sortBy: 'productCode',
descending: false
})
const selectedMap = ref({})
const selectedCurrencies = ref(['USD', 'EUR', 'TRY'])
const showSelectedOnly = ref(false)
const editableColumns = [
'costPrice',
'expenseForBasePrice',
'basePriceUsd',
'basePriceTry',
'usd1',
'usd2',
'usd3',
'usd4',
'usd5',
'usd6',
'eur1',
'eur2',
'eur3',
'eur4',
'eur5',
'eur6',
'try1',
'try2',
'try3',
'try4',
'try5',
'try6'
]
const editableColumnSet = new Set(editableColumns)
function col (name, label, field, width, extra = {}) {
return {
name,
label,
field,
align: extra.align || 'left',
sortable: !!extra.sortable,
style: `width:${width}px;min-width:${width}px;max-width:${width}px;`,
headerStyle: `width:${width}px;min-width:${width}px;max-width:${width}px;`,
classes: extra.classes || '',
headerClasses: extra.headerClasses || extra.classes || ''
}
}
const allColumns = [
col('select', '', 'select', 40, { align: 'center', classes: 'text-center selection-col' }),
col('productCode', 'URUN KODU', 'productCode', 108, { sortable: true, classes: 'ps-col product-code-col' }),
col('stockQty', 'STOK ADET', 'stockQty', 72, { align: 'right', sortable: true, classes: 'ps-col stock-col' }),
col('stockEntryDate', 'STOK GIRIS TARIHI', 'stockEntryDate', 92, { align: 'center', sortable: true, classes: 'ps-col date-col' }),
col('lastPricingDate', 'SON FIYATLANDIRMA TARIHI', 'lastPricingDate', 108, { align: 'center', sortable: true, classes: 'ps-col date-col' }),
col('askiliYan', 'ASKILI YAN', 'askiliYan', 54, { sortable: true, classes: 'ps-col' }),
col('kategori', 'KATEGORI', 'kategori', 54, { sortable: true, classes: 'ps-col' }),
col('urunIlkGrubu', 'URUN ILK GRUBU', 'urunIlkGrubu', 66, { sortable: true, classes: 'ps-col' }),
col('urunAnaGrubu', 'URUN ANA GRUBU', 'urunAnaGrubu', 66, { sortable: true, classes: 'ps-col' }),
col('urunAltGrubu', 'URUN ALT GRUBU', 'urunAltGrubu', 66, { sortable: true, classes: 'ps-col' }),
col('icerik', 'ICERIK', 'icerik', 62, { sortable: true, classes: 'ps-col' }),
col('karisim', 'KARISIM', 'karisim', 62, { sortable: true, classes: 'ps-col' }),
col('marka', 'MARKA', 'marka', 54, { sortable: true, classes: 'ps-col' }),
col('brandGroupSelection', 'MARKA GRUBU SECIMI', 'brandGroupSelection', 76),
col('costPrice', 'MALIYET FIYATI', 'costPrice', 74, { align: 'right', sortable: true, classes: 'usd-col' }),
col('expenseForBasePrice', 'TABAN FIYAT MASRAF', 'expenseForBasePrice', 86, { align: 'right', classes: 'usd-col' }),
col('basePriceUsd', 'TABAN USD', 'basePriceUsd', 74, { align: 'right', classes: 'usd-col' }),
col('basePriceTry', 'TABAN TRY', 'basePriceTry', 74, { align: 'right', classes: 'try-col' }),
col('usd1', 'USD 1', 'usd1', 62, { align: 'right', classes: 'usd-col' }),
col('usd2', 'USD 2', 'usd2', 62, { align: 'right', classes: 'usd-col' }),
col('usd3', 'USD 3', 'usd3', 62, { align: 'right', classes: 'usd-col' }),
col('usd4', 'USD 4', 'usd4', 62, { align: 'right', classes: 'usd-col' }),
col('usd5', 'USD 5', 'usd5', 62, { align: 'right', classes: 'usd-col' }),
col('usd6', 'USD 6', 'usd6', 62, { align: 'right', classes: 'usd-col' }),
col('eur1', 'EUR 1', 'eur1', 62, { align: 'right', classes: 'eur-col' }),
col('eur2', 'EUR 2', 'eur2', 62, { align: 'right', classes: 'eur-col' }),
col('eur3', 'EUR 3', 'eur3', 62, { align: 'right', classes: 'eur-col' }),
col('eur4', 'EUR 4', 'eur4', 62, { align: 'right', classes: 'eur-col' }),
col('eur5', 'EUR 5', 'eur5', 62, { align: 'right', classes: 'eur-col' }),
col('eur6', 'EUR 6', 'eur6', 62, { align: 'right', classes: 'eur-col' }),
col('try1', 'TRY 1', 'try1', 62, { align: 'right', classes: 'try-col' }),
col('try2', 'TRY 2', 'try2', 62, { align: 'right', classes: 'try-col' }),
col('try3', 'TRY 3', 'try3', 62, { align: 'right', classes: 'try-col' }),
col('try4', 'TRY 4', 'try4', 62, { align: 'right', classes: 'try-col' }),
col('try5', 'TRY 5', 'try5', 62, { align: 'right', classes: 'try-col' }),
col('try6', 'TRY 6', 'try6', 62, { align: 'right', classes: 'try-col' })
]
const stickyColumnNames = [
'select',
'productCode',
'stockQty',
'stockEntryDate',
'lastPricingDate',
'askiliYan',
'kategori',
'urunIlkGrubu',
'urunAnaGrubu',
'urunAltGrubu',
'icerik',
'karisim',
'marka',
'brandGroupSelection',
'costPrice',
'expenseForBasePrice',
'basePriceUsd',
'basePriceTry'
]
const stickyBoundaryColumnName = 'basePriceTry'
const stickyColumnNameSet = new Set(stickyColumnNames)
const visibleColumns = computed(() => {
const selected = new Set(selectedCurrencies.value)
return allColumns.filter((c) => {
if (c.name.startsWith('usd')) return selected.has('USD')
if (c.name.startsWith('eur')) return selected.has('EUR')
if (c.name.startsWith('try')) return selected.has('TRY')
return true
})
})
const stickyLeftMap = computed(() => {
const map = {}
let left = 0
for (const colName of stickyColumnNames) {
const c = allColumns.find((x) => x.name === colName)
if (!c) continue
map[colName] = left
left += extractWidth(c.style)
}
return map
})
const stickyScrollComp = computed(() => {
const boundaryCol = allColumns.find((x) => x.name === stickyBoundaryColumnName)
return (stickyLeftMap.value[stickyBoundaryColumnName] || 0) + extractWidth(boundaryCol?.style)
})
const tableMinWidth = computed(() => visibleColumns.value.reduce((sum, c) => sum + extractWidth(c.style), 0))
const tableStyle = computed(() => ({
width: `${tableMinWidth.value}px`,
minWidth: `${tableMinWidth.value}px`,
tableLayout: 'fixed'
}))
const rows = computed(() => store.rows || [])
const multiFilterOptionMap = computed(() => {
const map = {}
multiFilterColumns.forEach(({ field }) => {
const uniq = new Set()
rows.value.forEach((row) => {
const val = String(row?.[field] ?? '').trim()
if (val) uniq.add(val)
})
map[field] = Array.from(uniq)
.sort((a, b) => a.localeCompare(b, 'tr'))
.map((v) => ({ label: v, value: v }))
})
return map
})
const filteredFilterOptionMap = computed(() => {
const map = {}
multiFilterColumns.forEach(({ field }) => {
const search = String(columnFilterSearch.value[field] || '').trim().toLocaleLowerCase('tr')
const options = multiFilterOptionMap.value[field] || []
map[field] = search
? options.filter((option) => option.label.toLocaleLowerCase('tr').includes(search))
: options
})
return map
})
const filteredRows = computed(() => {
return rows.value.filter((row) => {
if (showSelectedOnly.value && !selectedMap.value[row.id]) return false
for (const mf of multiFilterColumns) {
const selected = columnFilters.value[mf.field] || []
if (selected.length > 0 && !selected.includes(String(row?.[mf.field] ?? '').trim())) return false
}
const stockQtyMin = parseNullableNumber(numberRangeFilters.value.stockQty?.min)
const stockQtyMax = parseNullableNumber(numberRangeFilters.value.stockQty?.max)
const stockQty = Number(row?.stockQty ?? 0)
if (stockQtyMin !== null && stockQty < stockQtyMin) return false
if (stockQtyMax !== null && stockQty > stockQtyMax) return false
if (!matchesDateRange(String(row?.stockEntryDate || '').trim(), dateRangeFilters.value.stockEntryDate)) return false
if (!matchesDateRange(String(row?.lastPricingDate || '').trim(), dateRangeFilters.value.lastPricingDate)) return false
return true
})
})
const visibleRowIds = computed(() => filteredRows.value.map((row) => row.id))
const selectedRowCount = computed(() => Object.values(selectedMap.value).filter(Boolean).length)
const selectedVisibleCount = computed(() => visibleRowIds.value.filter((id) => !!selectedMap.value[id]).length)
const allSelectedVisible = computed(() => visibleRowIds.value.length > 0 && selectedVisibleCount.value === visibleRowIds.value.length)
const someSelectedVisible = computed(() => selectedVisibleCount.value > 0)
const hasMoreRows = computed(() => Boolean(store.hasMore))
function isHeaderFilterField (field) {
return headerFilterFieldSet.has(field)
}
function isMultiSelectFilterField (field) {
return multiSelectFilterFieldSet.has(field)
}
function isNumberRangeFilterField (field) {
return numberRangeFilterFieldSet.has(field)
}
function isDateRangeFilterField (field) {
return dateRangeFilterFieldSet.has(field)
}
function hasFilter (field) {
if (isMultiSelectFilterField(field)) return (columnFilters.value[field] || []).length > 0
if (isNumberRangeFilterField(field)) {
const filter = numberRangeFilters.value[field]
return !!String(filter?.min || '').trim() || !!String(filter?.max || '').trim()
}
if (isDateRangeFilterField(field)) {
const filter = dateRangeFilters.value[field]
return !!String(filter?.from || '').trim() || !!String(filter?.to || '').trim()
}
return false
}
function getFilterBadgeValue (field) {
if (isMultiSelectFilterField(field)) return (columnFilters.value[field] || []).length
if (isNumberRangeFilterField(field)) {
const filter = numberRangeFilters.value[field]
return [filter?.min, filter?.max].filter((x) => String(x || '').trim()).length
}
if (isDateRangeFilterField(field)) {
const filter = dateRangeFilters.value[field]
return [filter?.from, filter?.to].filter((x) => String(x || '').trim()).length
}
return 0
}
function clearColumnFilter (field) {
if (!isMultiSelectFilterField(field)) return
columnFilters.value = {
...columnFilters.value,
[field]: []
}
}
function clearRangeFilter (field) {
if (isNumberRangeFilterField(field)) {
numberRangeFilters.value = {
...numberRangeFilters.value,
[field]: { min: '', max: '' }
}
return
}
if (isDateRangeFilterField(field)) {
dateRangeFilters.value = {
...dateRangeFilters.value,
[field]: { from: '', to: '' }
}
}
}
function getFilterOptionsForField (field) {
return filteredFilterOptionMap.value[field] || []
}
function isColumnFilterValueSelected (field, value) {
return (columnFilters.value[field] || []).includes(value)
}
function toggleColumnFilterValue (field, value) {
const current = new Set(columnFilters.value[field] || [])
if (current.has(value)) current.delete(value)
else current.add(value)
columnFilters.value = {
...columnFilters.value,
[field]: Array.from(current)
}
}
function selectAllColumnFilterOptions (field) {
const options = getFilterOptionsForField(field)
columnFilters.value = {
...columnFilters.value,
[field]: options.map((option) => option.value)
}
}
function extractWidth (style) {
const m = String(style || '').match(/width:(\d+)px/)
return m ? Number(m[1]) : 0
}
function isStickyCol (colName) {
return stickyColumnNameSet.has(colName)
}
function isStickyBoundary (colName) {
return colName === stickyBoundaryColumnName
}
function getHeaderCellStyle (col) {
if (!isStickyCol(col.name)) return undefined
return { left: `${stickyLeftMap.value[col.name] || 0}px`, zIndex: 22 }
}
function getBodyCellStyle (col) {
if (!isStickyCol(col.name)) return undefined
return { left: `${stickyLeftMap.value[col.name] || 0}px`, zIndex: 12 }
}
function round2 (value) {
return Number(Number(value || 0).toFixed(2))
}
function parseNumber (val) {
const normalized = String(val ?? '')
.replace(/\s/g, '')
.replace(/\./g, '')
.replace(',', '.')
const n = Number(normalized)
return Number.isFinite(n) ? n : 0
}
function parseNullableNumber (val) {
const text = String(val ?? '').trim()
if (!text) return null
const normalized = text
.replace(/\s/g, '')
.replace(/\./g, '')
.replace(',', '.')
const n = Number(normalized)
return Number.isFinite(n) ? n : null
}
function matchesDateRange (value, filter) {
const from = String(filter?.from || '').trim()
const to = String(filter?.to || '').trim()
if (!from && !to) return true
if (!value) return false
if (from && value < from) return false
if (to && value > to) return false
return true
}
function formatPrice (val) {
const n = parseNumber(val)
return n.toLocaleString('tr-TR', { minimumFractionDigits: 2, maximumFractionDigits: 2 })
}
function formatStock (val) {
const n = Number(val || 0)
if (!Number.isFinite(n)) return '0'
const hasFraction = Math.abs(n % 1) > 0.0001
return n.toLocaleString('tr-TR', {
minimumFractionDigits: hasFraction ? 2 : 0,
maximumFractionDigits: hasFraction ? 2 : 0
})
}
function formatDateDisplay (val) {
const text = String(val || '').trim()
if (!text) return '-'
const [year, month, day] = text.split('-')
if (!year || !month || !day) return text
return `${day}.${month}.${year}`
}
function needsRepricing (row) {
const stockEntryDate = String(row?.stockEntryDate || '').trim()
const lastPricingDate = String(row?.lastPricingDate || '').trim()
if (!stockEntryDate) return false
if (!lastPricingDate) return true
return lastPricingDate < stockEntryDate
}
function recalcByBasePrice (row) {
row.basePriceTry = round2((row.basePriceUsd * usdToTry) + row.expenseForBasePrice)
multipliers.forEach((multiplier, index) => {
row[`usd${index + 1}`] = round2(row.basePriceUsd * multiplier)
row[`eur${index + 1}`] = round2((row.basePriceUsd * usdToTry * multiplier) / eurToTry)
row[`try${index + 1}`] = round2(row.basePriceTry * multiplier)
})
}
function onEditableCellChange (row, field, val) {
const parsed = parseNumber(val)
store.updateCell(row, field, parsed)
if (field === 'expenseForBasePrice' || field === 'basePriceUsd') recalcByBasePrice(row)
}
function onBrandGroupSelectionChange (row, val) {
store.updateBrandGroupSelection(row, val)
}
function toggleRowSelection (rowId, val) {
selectedMap.value = { ...selectedMap.value, [rowId]: !!val }
}
function toggleSelectAllVisible (val) {
const next = { ...selectedMap.value }
visibleRowIds.value.forEach((id) => { next[id] = !!val })
selectedMap.value = next
}
function resetAll () {
columnFilters.value = {
productCode: [],
askiliYan: [],
kategori: [],
urunIlkGrubu: [],
urunAnaGrubu: [],
urunAltGrubu: [],
icerik: [],
karisim: []
}
columnFilterSearch.value = {
productCode: '',
askiliYan: '',
kategori: '',
urunIlkGrubu: '',
urunAnaGrubu: '',
urunAltGrubu: '',
icerik: '',
karisim: ''
}
numberRangeFilters.value = {
stockQty: { min: '', max: '' }
}
dateRangeFilters.value = {
stockEntryDate: { from: '', to: '' },
lastPricingDate: { from: '', to: '' }
}
showSelectedOnly.value = false
selectedMap.value = {}
}
function toggleShowSelectedOnly () {
if (!showSelectedOnly.value && selectedRowCount.value === 0) return
showSelectedOnly.value = !showSelectedOnly.value
}
function isCurrencySelected (code) {
return selectedCurrencies.value.includes(code)
}
function toggleCurrency (code, checked) {
const set = new Set(selectedCurrencies.value)
if (checked) set.add(code)
else set.delete(code)
selectedCurrencies.value = currencyOptions.map((x) => x.value).filter((x) => set.has(x))
}
function toggleCurrencyRow (code) {
toggleCurrency(code, !isCurrencySelected(code))
}
function selectAllCurrencies () {
selectedCurrencies.value = currencyOptions.map((x) => x.value)
}
function clearAllCurrencies () {
selectedCurrencies.value = []
}
async function fetchChunk ({ reset = false } = {}) {
const afterProductCode = reset ? '' : nextCursor.value
const result = await store.fetchRows({
limit: FETCH_LIMIT,
afterProductCode,
append: !reset
})
const fetched = Number(result?.fetched) || 0
nextCursor.value = String(result?.nextCursor || '')
return fetched
}
async function loadMoreRows () {
if (loadingMore.value || store.loading || !hasMoreRows.value) return
loadingMore.value = true
try {
await fetchChunk({ reset: false })
} finally {
loadingMore.value = false
}
}
function onTableVirtualScroll (details) {
const to = Number(details?.to || 0)
if (!Number.isFinite(to)) return
if (to >= filteredRows.value.length - 25) {
void loadMoreRows()
}
}
async function ensureEnoughVisibleRows (minRows = 80, maxBatches = 4) {
let guard = 0
while (hasMoreRows.value && filteredRows.value.length < minRows && guard < maxBatches) {
await loadMoreRows()
guard++
}
}
async function reloadData () {
const startedAt = Date.now()
console.info('[product-pricing][ui] reload:start', {
at: new Date(startedAt).toISOString()
})
nextCursor.value = ''
await fetchChunk({ reset: true })
await ensureEnoughVisibleRows(120, 6)
console.info('[product-pricing][ui] reload:done', {
duration_ms: Date.now() - startedAt,
row_count: Array.isArray(store.rows) ? store.rows.length : 0,
has_error: Boolean(store.error)
})
selectedMap.value = {}
}
onMounted(async () => {
await reloadData()
})
watch(
[
columnFilters,
numberRangeFilters,
dateRangeFilters,
showSelectedOnly,
() => tablePagination.value.sortBy,
() => tablePagination.value.descending
],
() => { void ensureEnoughVisibleRows(80, 4) },
{ deep: true }
)
</script>
<style scoped>
.pricing-page {
--pricing-row-height: 31px;
--pricing-header-height: 72px;
--pricing-table-height: calc(100vh - 210px);
height: calc(100vh - 120px);
display: flex;
flex-direction: column;
overflow: hidden;
}
.currency-menu-list {
min-width: 170px;
}
.table-wrap {
flex: 1;
min-height: 0;
overflow: hidden;
border: 1px solid rgba(0, 0, 0, 0.12);
border-radius: 4px;
display: flex;
flex-direction: column;
}
.pane-table {
height: 100%;
width: 100%;
}
.pricing-table :deep(.q-table__middle) {
height: var(--pricing-table-height);
min-height: var(--pricing-table-height);
max-height: var(--pricing-table-height);
overflow: auto !important;
scrollbar-gutter: stable both-edges;
overscroll-behavior: contain;
}
.pricing-table :deep(.q-table) {
width: max-content;
min-width: 100%;
table-layout: fixed;
font-size: 11px;
border-collapse: separate;
border-spacing: 0;
margin-right: var(--sticky-scroll-comp, 0px);
}
.pricing-table :deep(.q-table__container) {
border: none !important;
box-shadow: none !important;
background: transparent !important;
height: 100% !important;
}
.pricing-table :deep(th),
.pricing-table :deep(td) {
box-sizing: border-box;
padding: 0 4px;
overflow: hidden;
vertical-align: middle;
}
.pricing-table :deep(td),
.pricing-table :deep(.q-table tbody tr) {
height: var(--pricing-row-height) !important;
min-height: var(--pricing-row-height) !important;
max-height: var(--pricing-row-height) !important;
line-height: var(--pricing-row-height);
padding: 0 !important;
border-bottom: 1px solid rgba(0, 0, 0, 0.08) !important;
}
.pricing-table :deep(td > div),
.pricing-table :deep(td > .q-td) {
height: 100% !important;
display: flex !important;
align-items: center !important;
padding: 0 4px !important;
}
.pricing-table :deep(th),
.pricing-table :deep(.q-table thead tr),
.pricing-table :deep(.q-table thead tr.header-row-fixed),
.pricing-table :deep(.q-table thead th),
.pricing-table :deep(.q-table thead tr.header-row-fixed > th) {
height: var(--pricing-header-height) !important;
min-height: var(--pricing-header-height) !important;
max-height: var(--pricing-header-height) !important;
}
.pricing-table :deep(th) {
padding-top: 0;
padding-bottom: 0;
white-space: nowrap;
word-break: normal;
text-overflow: ellipsis;
text-align: center;
font-size: 10px;
font-weight: 800;
line-height: 1.15;
}
.pricing-table :deep(.q-table thead th) {
position: sticky;
top: 0;
z-index: 30;
background: #fff;
vertical-align: middle !important;
}
.pricing-table :deep(.sticky-col) {
position: sticky !important;
background-clip: padding-box;
}
.pricing-table :deep(thead .sticky-col) {
z-index: 35 !important;
}
.pricing-table :deep(tbody .sticky-col) {
z-index: 12 !important;
}
.pricing-table :deep(.sticky-boundary) {
border-right: 2px solid rgba(25, 118, 210, 0.18) !important;
box-shadow: 8px 0 12px -10px rgba(15, 23, 42, 0.55);
}
.header-with-filter {
display: grid;
grid-template-columns: 1fr 20px;
align-items: center;
column-gap: 4px;
height: 100%;
line-height: 1.25;
overflow: hidden;
}
.header-with-filter > span {
min-width: 0;
width: 100%;
overflow: hidden;
text-align: center;
text-overflow: ellipsis;
white-space: normal;
font-weight: 800;
line-height: 1.15;
display: -webkit-box;
-webkit-line-clamp: 2;
-webkit-box-orient: vertical;
}
.header-filter-btn {
width: 20px;
height: 20px;
min-width: 20px;
justify-self: end;
}
.header-filter-ghost {
opacity: 0;
pointer-events: none;
}
.excel-filter-menu {
min-width: 230px;
padding: 8px;
}
.range-filter-grid {
display: grid;
grid-template-columns: 1fr;
gap: 8px;
}
.range-filter-field {
min-width: 0;
}
.excel-filter-select :deep(.q-field__control) {
min-height: 30px;
}
.excel-filter-select :deep(.q-field__native),
.excel-filter-select :deep(.q-field__input) {
font-weight: 700;
}
.excel-filter-actions {
gap: 4px;
}
.excel-filter-options {
max-height: 220px;
margin-top: 8px;
overflow: auto;
border: 1px solid rgba(0, 0, 0, 0.08);
border-radius: 4px;
}
.excel-filter-option {
min-height: 32px;
}
.excel-filter-empty {
padding: 10px 8px;
color: #607d8b;
font-size: 11px;
}
.pricing-table :deep(th.ps-col),
.pricing-table :deep(td.ps-col) {
background: #fff;
color: var(--q-primary);
font-weight: 700;
}
.pricing-table :deep(td.ps-col .cell-text),
.pricing-table :deep(td.ps-col .product-code-text),
.pricing-table :deep(td.ps-col .stock-qty-text) {
font-size: 11px;
line-height: 1.1;
white-space: normal;
word-break: break-word;
}
.stock-qty-text {
display: block;
width: 100%;
text-align: center;
font-weight: 700;
padding: 0 4px;
}
.date-cell-text {
display: block;
width: 100%;
text-align: center;
font-weight: 700;
padding: 0 4px;
}
.date-warning {
color: #c62828;
}
.pricing-table :deep(th.selection-col),
.pricing-table :deep(td.selection-col) {
background: #fff;
color: var(--q-primary);
padding-left: 0 !important;
padding-right: 0 !important;
}
.pricing-table :deep(th.selection-col) {
text-align: center !important;
}
.pricing-table :deep(.selection-col .q-checkbox__inner) {
color: var(--q-primary);
font-size: 16px;
}
.pricing-table :deep(th.selection-col .q-checkbox),
.pricing-table :deep(td.selection-col .q-checkbox) {
display: inline-flex;
align-items: center;
justify-content: center;
}
.pricing-table :deep(.selection-col .q-checkbox__bg) {
background: #fff;
border-color: var(--q-primary);
}
.pricing-table :deep(th.usd-col),
.pricing-table :deep(td.usd-col) {
background: #ecf9f0;
color: #178a3e;
font-weight: 700;
}
.pricing-table :deep(th.eur-col),
.pricing-table :deep(td.eur-col) {
background: #fdeeee;
color: #c62828;
font-weight: 700;
}
.pricing-table :deep(th.try-col),
.pricing-table :deep(td.try-col) {
background: #edf4ff;
color: #1e63c6;
font-weight: 700;
}
.cell-text {
display: block;
overflow: hidden;
white-space: nowrap;
text-overflow: ellipsis;
line-height: 1.1;
padding-top: 0;
}
.product-code-text {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
display: block;
font-weight: 700;
letter-spacing: 0;
}
.native-cell-input,
.native-cell-select {
width: 100%;
height: 22px;
box-sizing: border-box;
padding: 1px 3px;
border: 1px solid #cfd8dc;
border-radius: 4px;
background: #fff;
font-size: 11px;
margin: 0;
}
.native-cell-input:focus,
.native-cell-select:focus {
outline: none;
border-color: #1976d2;
}
</style>

View File

@@ -47,7 +47,7 @@
<template #append>
<q-icon name="event" class="cursor-pointer">
<q-popup-proxy cover transition-show="scale" transition-hide="scale">
<q-date v-model="dateFrom" mask="YYYY-MM-DD" locale="tr-TR"/>
<q-date v-model="dateFrom" mask="YYYY-MM-DD" :locale="dateLocale"/>
</q-popup-proxy>
</q-icon>
</template>
@@ -63,7 +63,7 @@
<template #append>
<q-icon name="event" class="cursor-pointer">
<q-popup-proxy cover transition-show="scale" transition-hide="scale">
<q-date v-model="dateTo" mask="YYYY-MM-DD" locale="tr-TR" />
<q-date v-model="dateTo" mask="YYYY-MM-DD" :locale="dateLocale" />
</q-popup-proxy>
</q-icon>
</template>
@@ -277,12 +277,16 @@ import { useDownloadstpdfStore } from 'src/stores/downloadstpdfStore'
import dayjs from 'dayjs'
import { usePermission } from 'src/composables/usePermission'
import { normalizeSearchText } from 'src/utils/searchText'
import { useLocaleStore } from 'src/stores/localeStore'
import { getDateLocale } from 'src/i18n/dayjsLocale'
const { canRead, canExport } = usePermission()
const canReadFinance = canRead('finance')
const canExportFinance = canExport('finance')
const $q = useQuasar()
const localeStore = useLocaleStore()
const dateLocale = computed(() => getDateLocale(localeStore.locale))
const accountStore = useAccountStore()
const statementheaderStore = useStatementheaderStore()
@@ -363,7 +367,7 @@ async function onFilterClick() {
startdate: dateFrom.value,
enddate: dateTo.value,
accountcode: selectedCari.value,
langcode: 'TR',
langcode: localeStore.backendLangCode,
parislemler: selectedMonType.value
})
@@ -411,7 +415,7 @@ function resetFilters() {
/* Format */
function formatAmount(n) {
if (n == null || isNaN(n)) return '0,00'
return new Intl.NumberFormat('tr-TR', {
return new Intl.NumberFormat(dateLocale.value, {
minimumFractionDigits: 2,
maximumFractionDigits: 2
}).format(n)
@@ -467,7 +471,8 @@ async function handleDownload() {
selectedCari.value, // accountCode
dateFrom.value, // startDate
dateTo.value, // endDate
selectedMonType.value // <-- eklendi (['1','2'] veya ['1','3'])
selectedMonType.value, // <-- eklendi (['1','2'] veya ['1','3'])
localeStore.backendLangCode
)
console.log("📤 [DEBUG] Storedan gelen result:", result)
@@ -508,7 +513,8 @@ async function CurrheadDownload() {
selectedCari.value, // accountCode
dateFrom.value, // startDate
dateTo.value, // endDate
selectedMonType.value // parasal işlem tipi (parislemler)
selectedMonType.value, // parasal işlem tipi (parislemler)
localeStore.backendLangCode
)
console.log("📤 [DEBUG] CurrheadDownloadresult:", result)

View File

@@ -0,0 +1,846 @@
<template>
<q-page v-if="canUpdateLanguage" class="q-pa-md translation-page">
<div class="translation-toolbar sticky-toolbar">
<div class="row q-col-gutter-sm items-end q-mb-md">
<div class="col-12 col-md-4">
<q-input
v-model="filters.q"
dense
outlined
clearable
label="Kelime ara"
/>
</div>
<div class="col-auto">
<q-btn color="primary" icon="search" label="Getir" @click="loadRows" />
</div>
<div class="col-auto">
<q-btn
color="secondary"
icon="sync"
label="YENİ KELİMELERİ GETİR"
:loading="store.saving"
@click="syncSources"
/>
</div>
<div class="col-auto">
<q-toggle v-model="autoTranslate" dense color="primary" label="Oto Çeviri" />
</div>
</div>
<div class="row q-gutter-sm q-mb-sm">
<q-btn
color="accent"
icon="g_translate"
label="Seçilenleri Çevir"
:disable="selectedKeys.length === 0"
:loading="store.saving"
@click="translateSelectedRows"
/>
<q-btn
color="secondary"
icon="done_all"
label="Seçilenleri Onayla"
:disable="selectedKeys.length === 0"
:loading="store.saving"
@click="bulkApproveSelected"
/>
<q-btn
color="primary"
icon="save"
label="Seçilenleri Toplu Güncelle"
:disable="selectedKeys.length === 0"
:loading="store.saving"
@click="bulkSaveSelected"
/>
</div>
</div>
<q-table
ref="tableRef"
class="translation-table"
flat
bordered
virtual-scroll
:virtual-scroll-sticky-size-start="56"
row-key="t_key"
:loading="store.loading || store.saving"
:rows="pivotRows"
:columns="columns"
:rows-per-page-options="[0]"
v-model:pagination="tablePagination"
hide-bottom
@virtual-scroll="onVirtualScroll"
>
<template #body-cell-actions="props">
<q-td :props="props">
<q-btn
dense
color="primary"
icon="save"
label="Güncelle"
:disable="!rowHasChanges(props.row.t_key)"
:loading="store.saving"
@click="saveRow(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-select="props">
<q-td :props="props">
<q-checkbox
dense
:model-value="selectedKeys.includes(props.row.t_key)"
@update:model-value="(v) => toggleSelected(props.row.t_key, v)"
/>
</q-td>
</template>
<template #body-cell-source_text_tr="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'source_text_tr')">
<div class="source-text-label" :title="rowDraft(props.row.t_key).source_text_tr">
{{ rowDraft(props.row.t_key).source_text_tr }}
</div>
</q-td>
</template>
<template #body-cell-source_type="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'source_type')">
<q-select
v-model="rowDraft(props.row.t_key).source_type"
dense
outlined
emit-value
map-options
:options="sourceTypeOptions"
@update:model-value="() => queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-en="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'en')">
<q-input
v-model="rowDraft(props.row.t_key).en"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-de="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'de')">
<q-input
v-model="rowDraft(props.row.t_key).de"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-es="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'es')">
<q-input
v-model="rowDraft(props.row.t_key).es"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-it="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'it')">
<q-input
v-model="rowDraft(props.row.t_key).it"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-ru="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'ru')">
<q-input
v-model="rowDraft(props.row.t_key).ru"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-ar="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'ar')">
<q-input
v-model="rowDraft(props.row.t_key).ar"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
</q-table>
</q-page>
<q-page v-else class="q-pa-md flex flex-center">
<div class="text-negative text-subtitle1">
Bu modüle erişim yetkiniz yok.
</div>
</q-page>
</template>
<script setup>
import { computed, onBeforeUnmount, onMounted, ref, watch } from 'vue'
import { useQuasar } from 'quasar'
import { usePermission } from 'src/composables/usePermission'
import { useTranslationStore } from 'src/stores/translationStore'
const $q = useQuasar()
const store = useTranslationStore()
const { canUpdate } = usePermission()
const canUpdateLanguage = canUpdate('language')
const filters = ref({
q: ''
})
const autoTranslate = ref(false)
const tableRef = ref(null)
const FETCH_LIMIT = 1400
const loadedOffset = ref(0)
const hasMoreRows = ref(true)
const loadingMore = ref(false)
const tablePagination = ref({
page: 1,
rowsPerPage: 0,
sortBy: 'source_text_tr',
descending: false
})
let filterReloadTimer = null
const sourceTypeOptions = [
{ label: 'dummy', value: 'dummy' },
{ label: 'postgre', value: 'postgre' },
{ label: 'mssql', value: 'mssql' }
]
const columns = [
{ name: 'actions', label: 'Güncelle', field: 'actions', align: 'left' },
{ name: 'select', label: 'Seç', field: 'select', align: 'left' },
{ name: 'source_text_tr', label: 'Türkçe Metin', field: 'source_text_tr', align: 'left', style: 'min-width: 340px' },
{ name: 'source_type', label: 'Kaynak', field: 'source_type', align: 'left', style: 'min-width: 140px' },
{ name: 'en', label: 'İngilizce', field: 'en', align: 'left', style: 'min-width: 220px' },
{ name: 'de', label: 'Almanca', field: 'de', align: 'left', style: 'min-width: 220px' },
{ name: 'es', label: 'İspanyolca', field: 'es', align: 'left', style: 'min-width: 220px' },
{ name: 'it', label: 'İtalyanca', field: 'it', align: 'left', style: 'min-width: 220px' },
{ name: 'ru', label: 'Rusça', field: 'ru', align: 'left', style: 'min-width: 220px' },
{ name: 'ar', label: 'Arapça', field: 'ar', align: 'left', style: 'min-width: 220px' }
]
const draftByKey = ref({})
const originalByKey = ref({})
const selectedKeys = ref([])
const autoSaveTimers = new Map()
const pivotRows = computed(() => {
const byKey = new Map()
for (const row of store.rows) {
const key = row.t_key
if (!byKey.has(key)) {
byKey.set(key, {
t_key: key,
source_text_tr: '',
source_type: 'dummy',
en: '',
de: '',
es: '',
it: '',
ru: '',
ar: '',
langs: {}
})
}
const target = byKey.get(key)
target.langs[row.lang_code] = {
id: row.id,
status: row.status,
is_manual: row.is_manual
}
if (row.lang_code === 'tr') {
target.source_text_tr = row.translated_text || row.source_text_tr || ''
target.source_type = row.source_type || 'dummy'
} else if (row.lang_code === 'en') {
target.en = row.translated_text || ''
} else if (row.lang_code === 'de') {
target.de = row.translated_text || ''
} else if (row.lang_code === 'es') {
target.es = row.translated_text || ''
} else if (row.lang_code === 'it') {
target.it = row.translated_text || ''
} else if (row.lang_code === 'ru') {
target.ru = row.translated_text || ''
} else if (row.lang_code === 'ar') {
target.ar = row.translated_text || ''
}
}
return Array.from(byKey.values()).sort((a, b) => a.t_key.localeCompare(b.t_key))
})
function snapshotDrafts (options = {}) {
const preserveDirty = Boolean(options?.preserveDirty)
const draft = {}
const original = {}
for (const row of pivotRows.value) {
const existingDraft = draftByKey.value[row.t_key]
const existingOriginal = originalByKey.value[row.t_key]
const keepExisting = preserveDirty &&
existingDraft &&
existingOriginal &&
(
existingDraft.source_text_tr !== existingOriginal.source_text_tr ||
existingDraft.source_type !== existingOriginal.source_type ||
existingDraft.en !== existingOriginal.en ||
existingDraft.de !== existingOriginal.de ||
existingDraft.es !== existingOriginal.es ||
existingDraft.it !== existingOriginal.it ||
existingDraft.ru !== existingOriginal.ru ||
existingDraft.ar !== existingOriginal.ar
)
if (keepExisting) {
draft[row.t_key] = { ...existingDraft }
original[row.t_key] = { ...existingOriginal }
continue
}
draft[row.t_key] = {
source_text_tr: row.source_text_tr || '',
source_type: row.source_type || 'dummy',
en: row.en || '',
de: row.de || '',
es: row.es || '',
it: row.it || '',
ru: row.ru || '',
ar: row.ar || ''
}
original[row.t_key] = { ...draft[row.t_key] }
}
draftByKey.value = draft
originalByKey.value = original
selectedKeys.value = selectedKeys.value.filter(k => draft[k])
}
function rowDraft (key) {
if (!draftByKey.value[key]) {
draftByKey.value[key] = {
source_text_tr: '',
source_type: 'dummy',
en: '',
de: '',
es: '',
it: '',
ru: '',
ar: ''
}
}
return draftByKey.value[key]
}
function buildFilters () {
const query = String(filters.value.q || '').trim()
return {
q: query || undefined
}
}
function rowHasChanges (key) {
const draft = draftByKey.value[key]
const orig = originalByKey.value[key]
if (!draft || !orig) return false
return (
draft.source_text_tr !== orig.source_text_tr ||
draft.source_type !== orig.source_type ||
draft.en !== orig.en ||
draft.de !== orig.de ||
draft.es !== orig.es ||
draft.it !== orig.it ||
draft.ru !== orig.ru ||
draft.ar !== orig.ar
)
}
function isPending (key, lang) {
const row = pivotRows.value.find(r => r.t_key === key)
const meta = row?.langs?.[lang]
return meta?.status === 'pending'
}
function cellClass (key, field) {
const draft = draftByKey.value[key]
const orig = originalByKey.value[key]
if (!draft || !orig) return ''
if (draft[field] !== orig[field]) return 'cell-dirty'
if (field === 'en' && isPending(key, 'en')) return 'cell-new'
if (field === 'de' && isPending(key, 'de')) return 'cell-new'
if (field === 'es' && isPending(key, 'es')) return 'cell-new'
if (field === 'it' && isPending(key, 'it')) return 'cell-new'
if (field === 'ru' && isPending(key, 'ru')) return 'cell-new'
if (field === 'ar' && isPending(key, 'ar')) return 'cell-new'
if (field === 'source_text_tr' && isPending(key, 'tr')) return 'cell-new'
return ''
}
function toggleSelected (key, checked) {
if (checked) {
if (!selectedKeys.value.includes(key)) {
selectedKeys.value = [...selectedKeys.value, key]
}
return
}
selectedKeys.value = selectedKeys.value.filter(k => k !== key)
}
function queueAutoSave (key) {
if (!key) return
const existing = autoSaveTimers.get(key)
if (existing) {
clearTimeout(existing)
}
const timer = setTimeout(() => {
autoSaveTimers.delete(key)
if (rowHasChanges(key)) {
void saveRow(key)
}
}, 250)
autoSaveTimers.set(key, timer)
}
async function fetchRowsChunk (append = false) {
const params = {
...buildFilters(),
limit: FETCH_LIMIT,
offset: append ? loadedOffset.value : 0
}
await store.fetchRows(params, { append })
const incomingCount = Number(store.count) || 0
if (append) {
loadedOffset.value += incomingCount
} else {
loadedOffset.value = incomingCount
}
hasMoreRows.value = incomingCount === FETCH_LIMIT
snapshotDrafts({ preserveDirty: append })
}
async function loadRows () {
try {
loadedOffset.value = 0
hasMoreRows.value = true
await fetchRowsChunk(false)
} catch (err) {
console.error('[translation-sync][ui] loadRows:error', {
message: err?.message || 'Çeviri satırları yüklenemedi'
})
$q.notify({
type: 'negative',
message: err?.message || 'Çeviri satırları yüklenemedi'
})
}
}
async function loadMoreRows () {
if (!hasMoreRows.value || loadingMore.value || store.loading || store.saving) return
loadingMore.value = true
try {
await fetchRowsChunk(true)
} finally {
loadingMore.value = false
}
}
async function ensureEnoughVisibleRows (minRows = 120, maxBatches = 4) {
let guard = 0
while (hasMoreRows.value && pivotRows.value.length < minRows && guard < maxBatches) {
await loadMoreRows()
guard++
}
}
function onVirtualScroll (details) {
const to = Number(details?.to || 0)
if (!Number.isFinite(to)) return
if (to >= pivotRows.value.length - 15) {
void loadMoreRows()
}
}
function scheduleFilterReload () {
if (filterReloadTimer) {
clearTimeout(filterReloadTimer)
}
filterReloadTimer = setTimeout(() => {
filterReloadTimer = null
void loadRows()
}, 350)
}
async function ensureMissingLangRows (key, draft, langs) {
const missingLangs = []
if (!langs.en && String(draft.en || '').trim() !== '') missingLangs.push('en')
if (!langs.de && String(draft.de || '').trim() !== '') missingLangs.push('de')
if (!langs.es && String(draft.es || '').trim() !== '') missingLangs.push('es')
if (!langs.it && String(draft.it || '').trim() !== '') missingLangs.push('it')
if (!langs.ru && String(draft.ru || '').trim() !== '') missingLangs.push('ru')
if (!langs.ar && String(draft.ar || '').trim() !== '') missingLangs.push('ar')
if (missingLangs.length === 0) return false
await store.upsertMissing([
{
t_key: key,
source_text_tr: draft.source_text_tr || key
}
], missingLangs)
return true
}
function buildRowUpdates (row, draft, original, approveStatus = 'approved') {
const items = []
const langs = row.langs || {}
const sourceTypeChanged = draft.source_type !== original.source_type
if (langs.tr?.id && (draft.source_text_tr !== original.source_text_tr || sourceTypeChanged)) {
items.push({
id: langs.tr.id,
source_text_tr: draft.source_text_tr,
translated_text: draft.source_text_tr,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
if (langs.en?.id && (draft.en !== original.en || sourceTypeChanged)) {
items.push({
id: langs.en.id,
translated_text: draft.en,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
if (langs.de?.id && (draft.de !== original.de || sourceTypeChanged)) {
items.push({
id: langs.de.id,
translated_text: draft.de,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
if (langs.es?.id && (draft.es !== original.es || sourceTypeChanged)) {
items.push({
id: langs.es.id,
translated_text: draft.es,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
if (langs.it?.id && (draft.it !== original.it || sourceTypeChanged)) {
items.push({
id: langs.it.id,
translated_text: draft.it,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
if (langs.ru?.id && (draft.ru !== original.ru || sourceTypeChanged)) {
items.push({
id: langs.ru.id,
translated_text: draft.ru,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
if (langs.ar?.id && (draft.ar !== original.ar || sourceTypeChanged)) {
items.push({
id: langs.ar.id,
translated_text: draft.ar,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
return items
}
async function saveRow (key) {
const row = pivotRows.value.find(r => r.t_key === key)
const draft = draftByKey.value[key]
const original = originalByKey.value[key]
if (!row || !draft || !original || !rowHasChanges(key)) return
try {
const insertedMissing = await ensureMissingLangRows(key, draft, row.langs || {})
if (insertedMissing) {
await loadRows()
}
const refreshed = pivotRows.value.find(r => r.t_key === key)
if (!refreshed) return
const refreshDraft = draftByKey.value[key]
const refreshOriginal = originalByKey.value[key]
const items = buildRowUpdates(refreshed, refreshDraft, refreshOriginal)
if (items.length > 0) {
await store.bulkUpdate(items)
}
await loadRows()
$q.notify({ type: 'positive', message: 'Satır güncellendi' })
} catch (err) {
$q.notify({ type: 'negative', message: err?.message || 'Güncelleme hatası' })
}
}
async function bulkApproveSelected () {
try {
const ids = []
for (const key of selectedKeys.value) {
const row = pivotRows.value.find(r => r.t_key === key)
if (!row) continue
for (const lang of ['tr', 'en', 'de', 'es', 'it', 'ru', 'ar']) {
const meta = row.langs?.[lang]
if (meta?.id && meta?.status === 'pending') {
ids.push(meta.id)
}
}
}
const unique = Array.from(new Set(ids))
if (unique.length === 0) {
$q.notify({ type: 'warning', message: 'Onaylanacak pending kayıt bulunamadı' })
return
}
await store.bulkApprove(unique)
await loadRows()
$q.notify({ type: 'positive', message: `${unique.length} kayıt onaylandı` })
} catch (err) {
$q.notify({ type: 'negative', message: err?.message || 'Toplu onay hatası' })
}
}
async function translateSelectedRows () {
try {
const keys = Array.from(new Set(selectedKeys.value.filter(Boolean)))
if (keys.length === 0) {
$q.notify({ type: 'warning', message: 'Çevrilecek seçim bulunamadı' })
return
}
const response = await store.translateSelected({
t_keys: keys,
languages: ['en', 'de', 'it', 'es', 'ru', 'ar'],
limit: Math.min(50000, keys.length * 6)
})
const translated = Number(response?.translated_count || 0)
const traceId = response?.trace_id || null
await loadRows()
$q.notify({
type: 'positive',
message: `Seçilenler çevrildi: ${translated}${traceId ? ` | Trace: ${traceId}` : ''}`
})
} catch (err) {
$q.notify({ type: 'negative', message: err?.message || 'Seçili çeviri işlemi başarısız' })
}
}
async function bulkSaveSelected () {
try {
const items = []
for (const key of selectedKeys.value) {
const row = pivotRows.value.find(r => r.t_key === key)
const draft = draftByKey.value[key]
const original = originalByKey.value[key]
if (!row || !draft || !original) continue
if (!rowHasChanges(key)) continue
const insertedMissing = await ensureMissingLangRows(key, draft, row.langs || {})
if (insertedMissing) {
await loadRows()
}
const refreshed = pivotRows.value.find(r => r.t_key === key)
if (!refreshed) continue
const refreshDraft = draftByKey.value[key]
const refreshOriginal = originalByKey.value[key]
items.push(...buildRowUpdates(refreshed, refreshDraft, refreshOriginal))
}
if (items.length === 0) {
$q.notify({ type: 'warning', message: 'Toplu güncellenecek değişiklik yok' })
return
}
await store.bulkUpdate(items)
await loadRows()
$q.notify({ type: 'positive', message: `${items.length} kayıt toplu güncellendi` })
} catch (err) {
$q.notify({ type: 'negative', message: err?.message || 'Toplu güncelleme hatası' })
}
}
async function syncSources () {
const startedAt = Date.now()
const beforeCount = pivotRows.value.length
console.info('[translation-sync][ui] button:click', {
at: new Date(startedAt).toISOString(),
auto_translate: autoTranslate.value,
only_new: true,
before_row_count: beforeCount
})
try {
const response = await store.syncSources({
auto_translate: autoTranslate.value,
languages: ['en', 'de', 'it', 'es', 'ru', 'ar'],
limit: 1000,
only_new: true
})
const result = response?.result || response || {}
const traceId = result?.trace_id || response?.trace_id || null
console.info('[translation-sync][ui] sync:response', {
trace_id: traceId,
seed_count: result.seed_count || 0,
affected_count: result.affected_count || 0,
auto_translated: result.auto_translated || 0,
duration_ms: result.duration_ms || null
})
await loadRows()
const afterCount = pivotRows.value.length
console.info('[translation-sync][ui] chain:reload-complete', {
trace_id: traceId,
duration_ms: Date.now() - startedAt,
before_row_count: beforeCount,
after_row_count: afterCount,
delta_row_count: afterCount - beforeCount
})
$q.notify({
type: 'positive',
message: `Tarama tamamlandı. Seed: ${result.seed_count || 0}, Oto çeviri: ${result.auto_translated || 0}`
})
} catch (err) {
$q.notify({
type: 'negative',
message: err?.message || 'Kaynak tarama hatası'
})
}
}
onMounted(() => {
void loadRows()
})
onBeforeUnmount(() => {
if (filterReloadTimer) {
clearTimeout(filterReloadTimer)
filterReloadTimer = null
}
})
watch(
() => filters.value.q,
() => { scheduleFilterReload() }
)
watch(
[() => tablePagination.value.sortBy, () => tablePagination.value.descending],
() => { void ensureEnoughVisibleRows(120, 4) }
)
</script>
<style scoped>
.translation-page {
height: calc(100vh - 120px);
display: flex;
flex-direction: column;
overflow: hidden;
}
.translation-toolbar {
background: #fff;
padding-top: 6px;
}
.sticky-toolbar {
position: sticky;
top: 0;
z-index: 35;
}
.translation-table {
flex: 1;
min-height: 0;
}
.translation-table :deep(.q-table__middle) {
max-height: calc(100vh - 280px);
overflow: auto;
}
.translation-table :deep(.q-table thead tr th) {
position: sticky;
top: 0;
z-index: 30;
background: #fff;
}
.translation-table :deep(.q-table tbody td) {
vertical-align: top;
padding: 6px;
}
.translation-table :deep(.q-field__native) {
line-height: 1.35;
word-break: break-word;
}
.source-text-label {
white-space: pre-wrap;
word-break: break-word;
line-height: 1.4;
max-height: 11.2em;
overflow: auto;
}
.cell-dirty {
background: #fff3cd;
}
.cell-new {
background: #d9f7e8;
}
</style>

View File

@@ -42,7 +42,7 @@
<q-date
v-model="dateFrom"
mask="YYYY-MM-DD"
locale="tr-TR"
:locale="dateLocale"
:options="isValidFromDate"
/>
</q-popup-proxy>
@@ -65,7 +65,7 @@
<q-date
v-model="dateTo"
mask="YYYY-MM-DD"
locale="tr-TR"
:locale="dateLocale"
:options="isValidToDate"
/>
</q-popup-proxy>
@@ -281,12 +281,18 @@ import { useStatementdetailStore } from 'src/stores/statementdetailStore'
import { useDownloadstpdfStore } from 'src/stores/downloadstpdfStore'
import dayjs from 'dayjs'
import { usePermission } from 'src/composables/usePermission'
import { useLocaleStore } from 'src/stores/localeStore'
import { getDateLocale } from 'src/i18n/dayjsLocale'
import { useI18n } from 'src/composables/useI18n'
const { canRead, canExport } = usePermission()
const canReadFinance = canRead('finance')
const canExportFinance = canExport('finance')
const $q = useQuasar()
const localeStore = useLocaleStore()
const { t } = useI18n()
const dateLocale = computed(() => getDateLocale(localeStore.locale))
const accountStore = useAccountStore()
const statementheaderStore = useStatementheaderStore()
@@ -360,7 +366,7 @@ function hasInvalidDateRange () {
function notifyInvalidDateRange () {
$q.notify({
type: 'warning',
message: '⚠️ Başlangıç tarihi bitiş tarihinden sonra olamaz.',
message: t('statement.invalidDateRange'),
position: 'top-right'
})
}
@@ -402,7 +408,7 @@ async function onFilterClick() {
if (!selectedCari.value || !dateFrom.value || !dateTo.value) {
$q.notify({
type: 'warning',
message: '⚠️ Lütfen cari ve tarih aralığını seçiniz.',
message: t('statement.selectFilters'),
position: 'top-right'
})
return
@@ -417,7 +423,7 @@ async function onFilterClick() {
startdate: dateFrom.value,
enddate: dateTo.value,
accountcode: selectedCari.value,
langcode: 'TR',
langcode: localeStore.backendLangCode,
parislemler: selectedMonType.value,
excludeopening: excludeOpening.value
})
@@ -483,7 +489,7 @@ function toggleFiltersCollapsed () {
function normalizeText (str) {
return (str || '')
.toString()
.toLocaleLowerCase('tr-TR') // Türkçe uyumlu
.toLocaleLowerCase(dateLocale.value)
.normalize('NFD') // aksan temizleme
.replace(/[\u0300-\u036f]/g, '')
.trim()
@@ -503,7 +509,7 @@ function resetFilters() {
/* Format */
function formatAmount(n) {
if (n == null || isNaN(n)) return '0,00'
return new Intl.NumberFormat('tr-TR', {
return new Intl.NumberFormat(dateLocale.value, {
minimumFractionDigits: 2,
maximumFractionDigits: 2
}).format(n)
@@ -562,7 +568,8 @@ async function handleDownload() {
selectedCari.value, // accountCode
dateFrom.value, // startDate
dateTo.value, // endDate
selectedMonType.value // <-- eklendi (['1','2'] veya ['1','3'])
selectedMonType.value, // <-- eklendi (['1','2'] veya ['1','3'])
localeStore.backendLangCode
)
console.log("[DEBUG] Storedan gelen result:", result)
@@ -608,7 +615,8 @@ async function CurrheadDownload() {
selectedCari.value, // accountCode
dateFrom.value, // startDate
dateTo.value, // endDate
selectedMonType.value // parasal işlem tipi (parislemler)
selectedMonType.value, // parasal işlem tipi (parislemler)
localeStore.backendLangCode
)
console.log("[DEBUG] CurrheadDownloadresult:", result)

View File

@@ -228,6 +228,12 @@ const routes = [
component: () => import('../pages/MarketMailMapping.vue'),
meta: { permission: 'system:update' }
},
{
path: 'language/translations',
name: 'translation-table',
component: () => import('pages/TranslationTable.vue'),
meta: { permission: 'language:update' }
},
/* ================= ORDERS ================= */
@@ -311,6 +317,14 @@ const routes = [
meta: { permission: 'order:view' }
},
/* ================= PRICING ================= */
{
path: 'pricing/product-pricing',
name: 'product-pricing',
component: () => import('pages/ProductPricing.vue'),
meta: { permission: 'order:view' }
},
/* ================= PASSWORD ================= */

View File

@@ -1,12 +1,14 @@
import axios from 'axios'
import qs from 'qs'
import { useAuthStore } from 'stores/authStore'
import { DEFAULT_LOCALE, normalizeLocale } from 'src/i18n/languages'
const rawBaseUrl =
(typeof process !== 'undefined' && process.env?.VITE_API_BASE_URL) || '/api'
export const API_BASE_URL = String(rawBaseUrl).trim().replace(/\/+$/, '')
const AUTH_REFRESH_PATH = '/auth/refresh'
const LOCALE_STORAGE_KEY = 'bss.locale'
const api = axios.create({
baseURL: API_BASE_URL,
@@ -74,6 +76,11 @@ function redirectToLogin() {
window.location.hash = '/login'
}
function getRequestLocale() {
if (typeof window === 'undefined') return DEFAULT_LOCALE
return normalizeLocale(window.localStorage.getItem(LOCALE_STORAGE_KEY))
}
api.interceptors.request.use((config) => {
const auth = useAuthStore()
const url = config.url || ''
@@ -82,6 +89,8 @@ api.interceptors.request.use((config) => {
config.headers ||= {}
config.headers.Authorization = `Bearer ${auth.token}`
}
config.headers ||= {}
config.headers['Accept-Language'] = getRequestLocale()
return config
})

View File

@@ -2,50 +2,20 @@
import { defineStore } from 'pinia'
import api from 'src/services/api'
function normalizeTextForMatch (v) {
return String(v || '')
.trim()
.toUpperCase()
.normalize('NFD')
.replace(/[\u0300-\u036f]/g, '')
}
// Production ekranlari icin beden grup tespiti helper'i.
// Ozel kural:
// YETISKIN/GARSON = GARSON ve URUN ANA GRUBU "GOMLEK ATA YAKA" veya "GOMLEK KLASIK" ise => yas
export function detectProductionBedenGroup (bedenList, urunAnaGrubu = '', urunKategori = '', yetiskinGarson = '') {
const list = Array.isArray(bedenList) ? bedenList : []
const hasLetterSizes = list
.map(v => String(v || '').trim().toUpperCase())
.some(v => ['XS', 'S', 'M', 'L', 'XL', '2XL', '3XL', '4XL', '5XL', '6XL', '7XL'].includes(v))
const ana = normalizeTextForMatch(urunAnaGrubu)
const kat = normalizeTextForMatch(urunKategori)
const yg = normalizeTextForMatch(yetiskinGarson)
if ((kat.includes('GARSON') || yg.includes('GARSON')) &&
(ana.includes('GOMLEK ATAYAKA') || ana.includes('GOMLEK ATA YAKA') || ana.includes('GOMLEK KLASIK'))) {
return 'yas'
}
if (hasLetterSizes) return 'gom'
if ((ana.includes('AYAKKABI') || kat.includes('AYAKKABI')) && (kat.includes('GARSON') || yg.includes('GARSON'))) return 'ayk_garson'
if (kat.includes('GARSON') || yg.includes('GARSON') || ana.includes('GARSON')) return 'yas'
if (ana.includes('PANTOLON') && kat.includes('YETISKIN')) return 'pan'
if (ana.includes('AKSESUAR')) return 'aksbir'
return 'tak'
}
function extractApiErrorMessage (err, fallback) {
const data = err?.response?.data
if (typeof data === 'string' && data.trim()) return data
if (data && typeof data === 'object') {
const validationMessages = Array.isArray(data.barcodeValidations)
? data.barcodeValidations.map(v => String(v?.message || '').trim()).filter(Boolean)
: []
const msg = String(data.message || '').trim()
const step = String(data.step || '').trim()
const detail = String(data.detail || '').trim()
const parts = [msg]
if (step) parts.push(`step=${step}`)
if (detail) parts.push(detail)
if (validationMessages.length) parts.push(validationMessages.join(' | '))
const merged = parts.filter(Boolean).join(' | ')
if (merged) return merged
}
@@ -63,19 +33,126 @@ function logApiError (action, err, payload = null) {
})
}
function nowMs () {
if (typeof performance !== 'undefined' && typeof performance.now === 'function') {
return performance.now()
}
return Date.now()
}
const applyInFlightByOrder = new Map()
const YAS_NUMERIC_SIZES = new Set(['2', '4', '6', '8', '10', '12', '14'])
function safeStr (value) {
return value == null ? '' : String(value).trim()
}
function normalizeProductionDim1Label (value) {
let text = safeStr(value)
if (!text) return ''
text = text.toUpperCase()
const yasMatch = text.match(/^(\d+)\s*(Y|YAS|YAŞ)$/)
if (yasMatch?.[1] && YAS_NUMERIC_SIZES.has(yasMatch[1])) {
return yasMatch[1]
}
return text
}
function pickPreferredProductionYasPayloadLabel (currentRaw, nextRaw) {
const current = safeStr(currentRaw).toUpperCase()
const next = safeStr(nextRaw).toUpperCase()
if (!next) return current
if (!current) return next
const currentHasYas = /YAS$|YAŞ$/.test(current)
const nextHasYas = /YAS$|YAŞ$/.test(next)
if (!currentHasYas && nextHasYas) return next
return current
}
function toProductionPayloadDim1 (row, value) {
const base = normalizeProductionDim1Label(value)
if (!base) return ''
if (!YAS_NUMERIC_SIZES.has(base)) return base
const map =
row?.yasPayloadMap && typeof row.yasPayloadMap === 'object'
? row.yasPayloadMap
: {}
const mapped = safeStr(map[base]).toUpperCase()
if (mapped) return mapped
return `${base}Y`
}
export const useOrderProductionItemStore = defineStore('orderproductionitems', {
state: () => ({
items: [],
header: null,
products: [],
colorOptionsByCode: {},
newColorOptionsByCode: {},
secondColorOptionsByKey: {},
newSecondColorOptionsByKey: {},
colorRequestsByCode: {},
newColorRequestsByCode: {},
secondColorRequestsByKey: {},
newSecondColorRequestsByKey: {},
productAttributesByItemType: {},
productItemAttributesByKey: {},
cdItemLookups: null,
cdItemDraftsByCode: {},
productAttributeDraftsByCode: {},
knownExistingItemCodes: {},
loading: false,
saving: false,
error: null
}),
getters: {
productCodeSet (state) {
const set = new Set()
for (const p of (state.products || [])) {
const code = String(p?.ProductCode || '').trim().toUpperCase()
if (code) set.add(code)
}
return set
}
},
actions: {
normalizeDim1ForUi (value) {
return normalizeProductionDim1Label(value)
},
pickPreferredYasPayloadLabel (currentRaw, nextRaw) {
return pickPreferredProductionYasPayloadLabel(currentRaw, nextRaw)
},
toPayloadDim1Code (row, value) {
return toProductionPayloadDim1(row, value)
},
classifyItemCode (value) {
const normalized = String(value || '').trim().toUpperCase()
if (!normalized) {
return { normalized: '', mode: 'empty', exists: false }
}
const exists = this.productCodeSet.has(normalized) || !!this.knownExistingItemCodes[normalized]
return {
normalized,
mode: exists ? 'existing' : 'new',
exists
}
},
markItemCodeKnownExisting (itemCode, exists = true) {
const code = String(itemCode || '').trim().toUpperCase()
if (!code) return
this.knownExistingItemCodes = {
...this.knownExistingItemCodes,
[code]: !!exists
}
},
async fetchHeader (orderHeaderID) {
if (!orderHeaderID) {
this.header = null
@@ -126,6 +203,20 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
this.error = err?.response?.data || err?.message || 'Urun listesi alinamadi'
}
},
async fetchCdItemByCode (code) {
if (!code) return null
try {
const res = await api.get('/product-cditem', { params: { code } })
const data = res?.data || null
if (data) {
this.markItemCodeKnownExisting(code, true)
}
return data
} catch (err) {
console.error('[OrderProductionItemStore] fetchCdItemByCode failed', err)
return null
}
},
async fetchColors (productCode) {
const code = String(productCode || '').trim()
if (!code) return []
@@ -133,16 +224,58 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
if (this.colorOptionsByCode[code]) {
return this.colorOptionsByCode[code]
}
if (this.colorRequestsByCode[code]) {
return this.colorRequestsByCode[code]
}
try {
this.colorRequestsByCode[code] = (async () => {
const t0 = nowMs()
console.info('[OrderProductionItemStore] fetchColors start', { code })
const res = await api.get('/product-colors', { params: { code } })
const data = res?.data
const list = Array.isArray(data) ? data : []
if (list.length) this.markItemCodeKnownExisting(code, true)
this.colorOptionsByCode[code] = list
console.info('[OrderProductionItemStore] fetchColors done', { code, count: list.length, durationMs: Math.round(nowMs() - t0) })
return list
})()
return await this.colorRequestsByCode[code]
} catch (err) {
this.error = err?.response?.data || err?.message || 'Renk listesi alinamadi'
return []
} finally {
delete this.colorRequestsByCode[code]
}
},
async fetchNewColors (productCode) {
const code = String(productCode || '').trim()
if (!code) return []
if (this.newColorOptionsByCode[code]) {
return this.newColorOptionsByCode[code]
}
if (this.newColorRequestsByCode[code]) {
return this.newColorRequestsByCode[code]
}
try {
this.newColorRequestsByCode[code] = (async () => {
const t0 = nowMs()
console.info('[OrderProductionItemStore] fetchNewColors start', { code })
const res = await api.get('/product-newcolors', { params: { code } })
const data = res?.data
const list = Array.isArray(data) ? data : []
this.newColorOptionsByCode[code] = list
console.info('[OrderProductionItemStore] fetchNewColors done', { code, count: list.length, durationMs: Math.round(nowMs() - t0) })
return list
})()
return await this.newColorRequestsByCode[code]
} catch (err) {
this.error = err?.response?.data || err?.message || 'Yeni urun renk listesi alinamadi'
return []
} finally {
delete this.newColorRequestsByCode[code]
}
},
async fetchSecondColors (productCode, colorCode) {
@@ -154,30 +287,168 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
if (this.secondColorOptionsByKey[key]) {
return this.secondColorOptionsByKey[key]
}
if (this.secondColorRequestsByKey[key]) {
return this.secondColorRequestsByKey[key]
}
try {
this.secondColorRequestsByKey[key] = (async () => {
const t0 = nowMs()
console.info('[OrderProductionItemStore] fetchSecondColors start', { code, color })
const res = await api.get('/product-secondcolor', { params: { code, color } })
const data = res?.data
const list = Array.isArray(data) ? data : []
this.secondColorOptionsByKey[key] = list
console.info('[OrderProductionItemStore] fetchSecondColors done', { code, color, count: list.length, durationMs: Math.round(nowMs() - t0) })
return list
})()
return await this.secondColorRequestsByKey[key]
} catch (err) {
this.error = err?.response?.data || err?.message || '2. renk listesi alinamadi'
return []
} finally {
delete this.secondColorRequestsByKey[key]
}
},
async validateUpdates (orderHeaderID, lines) {
async fetchNewSecondColors (productCode, colorCode) {
const code = String(productCode || '').trim()
const color = String(colorCode || '').trim()
if (!code || !color) return []
const key = `${code}::${color}`
if (this.newSecondColorOptionsByKey[key]) {
return this.newSecondColorOptionsByKey[key]
}
if (this.newSecondColorRequestsByKey[key]) {
return this.newSecondColorRequestsByKey[key]
}
try {
this.newSecondColorRequestsByKey[key] = (async () => {
const t0 = nowMs()
console.info('[OrderProductionItemStore] fetchNewSecondColors start', { code, color })
const res = await api.get('/product-newsecondcolor', { params: { code, color } })
const data = res?.data
const list = Array.isArray(data) ? data : []
this.newSecondColorOptionsByKey[key] = list
console.info('[OrderProductionItemStore] fetchNewSecondColors done', { code, color, count: list.length, durationMs: Math.round(nowMs() - t0) })
return list
})()
return await this.newSecondColorRequestsByKey[key]
} catch (err) {
this.error = err?.response?.data || err?.message || 'Yeni urun 2. renk listesi alinamadi'
return []
} finally {
delete this.newSecondColorRequestsByKey[key]
}
},
async fetchProductAttributes (itemTypeCode = 1) {
const key = String(itemTypeCode || 1)
if (this.productAttributesByItemType[key]) {
return this.productAttributesByItemType[key]
}
try {
const res = await api.get('/product-attributes', { params: { itemTypeCode } })
const list = Array.isArray(res?.data) ? res.data : []
this.productAttributesByItemType[key] = list
return list
} catch (err) {
this.error = err?.response?.data || err?.message || 'Urun ozellikleri alinamadi'
return []
}
},
async fetchProductItemAttributes (itemCode, itemTypeCode = 1, force = false) {
const code = String(itemCode || '').trim().toUpperCase()
const itc = Number(itemTypeCode || 1)
if (!code) return []
const key = `${itc}|${code}`
if (!force && this.productItemAttributesByKey[key]) {
return this.productItemAttributesByKey[key]
}
try {
const res = await api.get('/product-item-attributes', { params: { itemTypeCode: itc, itemCode: code } })
const list = Array.isArray(res?.data) ? res.data : []
if (list.length) this.markItemCodeKnownExisting(code, true)
this.productItemAttributesByKey[key] = list
return list
} catch (err) {
this.error = err?.response?.data || err?.message || 'Urunun mevcut ozellikleri alinamadi'
return []
}
},
async fetchCdItemLookups (force = false) {
if (this.cdItemLookups && !force) return this.cdItemLookups
try {
const res = await api.get('/orders/production-items/cditem-lookups')
this.cdItemLookups = res?.data || null
return this.cdItemLookups
} catch (err) {
const rid =
err?.response?.headers?.['x-debug-request-id'] ||
err?.response?.data?.requestId ||
''
logApiError('fetchCdItemLookups', err, { force, requestId: rid })
if (rid) {
console.error(`[OrderProductionItemStore] fetchCdItemLookups requestId=${rid}`)
}
this.error = extractApiErrorMessage(err, 'cdItem lookup listesi alinamadi')
return null
}
},
setCdItemDraft (itemCode, draft) {
const code = String(itemCode || '').trim().toUpperCase()
if (!code) return
this.cdItemDraftsByCode = {
...this.cdItemDraftsByCode,
[code]: {
...(draft || {}),
ItemCode: code,
ItemTypeCode: Number(draft?.ItemTypeCode || 1)
}
}
},
getCdItemDraft (itemCode) {
const code = String(itemCode || '').trim().toUpperCase()
if (!code) return null
return this.cdItemDraftsByCode[code] || null
},
setProductAttributeDraft (itemCode, rows) {
const code = String(itemCode || '').trim().toUpperCase()
if (!code) return
this.productAttributeDraftsByCode = {
...this.productAttributeDraftsByCode,
[code]: Array.isArray(rows) ? rows : []
}
},
getProductAttributeDraft (itemCode) {
const code = String(itemCode || '').trim().toUpperCase()
if (!code) return []
return this.productAttributeDraftsByCode[code] || []
},
async validateUpdates (orderHeaderID, lines, cdItems = []) {
if (!orderHeaderID) return { missingCount: 0, missing: [] }
this.saving = true
this.error = null
try {
const t0 = nowMs()
console.info('[OrderProductionItemStore] validateUpdates start', { orderHeaderID, lineCount: lines?.length || 0 })
const res = await api.post(
`/orders/production-items/${encodeURIComponent(orderHeaderID)}/validate`,
{ lines }
{ lines, cdItems }
)
return res?.data || { missingCount: 0, missing: [] }
const data = res?.data || { missingCount: 0, missing: [] }
const rid = res?.headers?.['x-debug-request-id'] || ''
console.info('[OrderProductionItemStore] validateUpdates done', {
orderHeaderID,
lineCount: lines?.length || 0,
missingCount: Number(data?.missingCount || 0),
barcodeValidationCount: Number(data?.barcodeValidationCount || 0),
requestId: rid,
durationMs: Math.round(nowMs() - t0)
})
return data
} catch (err) {
logApiError('validateUpdates', err, { orderHeaderID, lineCount: lines?.length || 0 })
this.error = extractApiErrorMessage(err, 'Kontrol basarisiz')
@@ -186,18 +457,55 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
this.saving = false
}
},
async applyUpdates (orderHeaderID, lines, insertMissing) {
async applyUpdates (orderHeaderID, lines, insertMissing, cdItems = [], productAttributes = [], headerAverageDueDate = null) {
if (!orderHeaderID) return { updated: 0, inserted: 0 }
const orderKey = String(orderHeaderID).trim().toUpperCase()
if (applyInFlightByOrder.has(orderKey)) {
console.warn('[OrderProductionItemStore] applyUpdates deduped (in-flight)', {
orderHeaderID: orderKey,
lineCount: lines?.length || 0
})
return await applyInFlightByOrder.get(orderKey)
}
const applyPromise = (async () => {
this.saving = true
this.error = null
try {
const t0 = nowMs()
console.info('[OrderProductionItemStore] applyUpdates start', {
orderHeaderID,
lineCount: lines?.length || 0,
insertMissing: !!insertMissing,
cdItemCount: cdItems?.length || 0,
attributeCount: productAttributes?.length || 0,
headerAverageDueDate
})
const res = await api.post(
`/orders/production-items/${encodeURIComponent(orderHeaderID)}/apply`,
{ lines, insertMissing }
{
lines,
insertMissing,
cdItems,
productAttributes,
HeaderAverageDueDate: headerAverageDueDate
}
)
return res?.data || { updated: 0, inserted: 0 }
const data = res?.data || { updated: 0, inserted: 0 }
const rid = res?.headers?.['x-debug-request-id'] || ''
console.info('[OrderProductionItemStore] applyUpdates done', {
orderHeaderID,
updated: Number(data?.updated || 0),
inserted: Number(data?.inserted || 0),
barcodeInserted: Number(data?.barcodeInserted || 0),
attributeUpserted: Number(data?.attributeUpserted || 0),
headerUpdated: !!data?.headerUpdated,
requestId: rid,
durationMs: Math.round(nowMs() - t0)
})
return data
} catch (err) {
logApiError('applyUpdates', err, { orderHeaderID, lineCount: lines?.length || 0, insertMissing })
this.error = extractApiErrorMessage(err, 'Guncelleme basarisiz')
@@ -205,6 +513,17 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
} finally {
this.saving = false
}
})()
applyInFlightByOrder.set(orderKey, applyPromise)
try {
return await applyPromise
} finally {
if (applyInFlightByOrder.get(orderKey) === applyPromise) {
applyInFlightByOrder.delete(orderKey)
}
}
}
}
})

View File

@@ -0,0 +1,148 @@
import { defineStore } from 'pinia'
import api from 'src/services/api'
function toText (value) {
return String(value ?? '').trim()
}
function toNumber (value) {
const n = Number(value)
return Number.isFinite(n) ? Number(n.toFixed(2)) : 0
}
function mapRow (raw, index, baseIndex = 0) {
return {
id: baseIndex + index + 1,
productCode: toText(raw?.ProductCode),
stockQty: toNumber(raw?.StockQty),
stockEntryDate: toText(raw?.StockEntryDate),
lastPricingDate: toText(raw?.LastPricingDate),
askiliYan: toText(raw?.AskiliYan),
kategori: toText(raw?.Kategori),
urunIlkGrubu: toText(raw?.UrunIlkGrubu),
urunAnaGrubu: toText(raw?.UrunAnaGrubu),
urunAltGrubu: toText(raw?.UrunAltGrubu),
icerik: toText(raw?.Icerik),
karisim: toText(raw?.Karisim),
marka: toText(raw?.Marka),
brandGroupSelection: toText(raw?.BrandGroupSec),
costPrice: toNumber(raw?.CostPrice),
expenseForBasePrice: 0,
basePriceUsd: 0,
basePriceTry: 0,
usd1: 0,
usd2: 0,
usd3: 0,
usd4: 0,
usd5: 0,
usd6: 0,
eur1: 0,
eur2: 0,
eur3: 0,
eur4: 0,
eur5: 0,
eur6: 0,
try1: 0,
try2: 0,
try3: 0,
try4: 0,
try5: 0,
try6: 0
}
}
export const useProductPricingStore = defineStore('product-pricing-store', {
state: () => ({
rows: [],
loading: false,
error: '',
hasMore: true
}),
actions: {
async fetchRows (options = {}) {
this.loading = true
this.error = ''
const limit = Number(options?.limit) > 0 ? Number(options.limit) : 500
const afterProductCode = toText(options?.afterProductCode)
const append = Boolean(options?.append)
const baseIndex = append ? this.rows.length : 0
const startedAt = Date.now()
console.info('[product-pricing][frontend] request:start', {
at: new Date(startedAt).toISOString(),
timeout_ms: 180000,
limit,
after_product_code: afterProductCode || null,
append
})
try {
const params = { limit }
if (afterProductCode) params.after_product_code = afterProductCode
const res = await api.request({
method: 'GET',
url: '/pricing/products',
params,
timeout: 180000
})
const traceId = res?.headers?.['x-trace-id'] || null
const hasMoreHeader = String(res?.headers?.['x-has-more'] || '').toLowerCase()
const nextCursorHeader = toText(res?.headers?.['x-next-cursor'])
const data = Array.isArray(res?.data) ? res.data : []
const mapped = data.map((x, i) => mapRow(x, i, baseIndex))
if (append) {
const merged = [...this.rows]
const seen = new Set(this.rows.map((x) => x?.productCode))
for (const row of mapped) {
const key = row?.productCode
if (key && seen.has(key)) continue
merged.push(row)
if (key) seen.add(key)
}
this.rows = merged
} else {
this.rows = mapped
}
this.hasMore = hasMoreHeader ? hasMoreHeader === 'true' : mapped.length === limit
console.info('[product-pricing][frontend] request:success', {
trace_id: traceId,
duration_ms: Date.now() - startedAt,
row_count: this.rows.length,
fetched_count: mapped.length,
has_more: this.hasMore,
next_cursor: nextCursorHeader || null
})
return {
traceId,
fetched: mapped.length,
hasMore: this.hasMore,
nextCursor: nextCursorHeader
}
} catch (err) {
if (!append) this.rows = []
this.hasMore = false
const msg = err?.response?.data || err?.message || 'Urun fiyatlandirma listesi alinamadi'
this.error = toText(msg)
console.error('[product-pricing][frontend] request:error', {
trace_id: err?.response?.headers?.['x-trace-id'] || null,
duration_ms: Date.now() - startedAt,
timeout_ms: err?.config?.timeout ?? null,
status: err?.response?.status || null,
message: this.error
})
throw err
} finally {
this.loading = false
}
},
updateCell (row, field, val) {
if (!row || !field) return
row[field] = toNumber(String(val ?? '').replace(',', '.'))
},
updateBrandGroupSelection (row, val) {
if (!row) return
row.brandGroupSelection = toText(val)
}
}
})

View File

@@ -9,14 +9,16 @@ export const useDownloadstHeadStore = defineStore('downloadstHead', {
accountCode,
startDate,
endDate,
parislemler
parislemler,
langcode = 'TR'
) {
try {
// ✅ Params (axios paramsSerializer array=repeat destekliyor)
const params = {
accountcode: accountCode,
startdate: startDate,
enddate: endDate
enddate: endDate,
langcode: langcode || 'TR'
}
if (Array.isArray(parislemler) && parislemler.length > 0) {

View File

@@ -7,13 +7,14 @@ export const useDownloadstpdfStore = defineStore('downloadstpdf', {
/* ==========================================================
📄 PDF İNDİR / AÇ
========================================================== */
async downloadPDF(accountCode, startDate, endDate, parislemler = []) {
async downloadPDF(accountCode, startDate, endDate, parislemler = [], langcode = 'TR') {
try {
// 🔹 Query params
const params = {
accountcode: accountCode,
startdate: startDate,
enddate: endDate
enddate: endDate,
langcode: langcode || 'TR'
}
if (Array.isArray(parislemler) && parislemler.length > 0) {

View File

@@ -0,0 +1,35 @@
import { defineStore } from 'pinia'
import { computed, ref } from 'vue'
import { applyDayjsLocale } from 'src/i18n/dayjsLocale'
import { DEFAULT_LOCALE, normalizeLocale, toBackendLangCode } from 'src/i18n/languages'
const STORAGE_KEY = 'bss.locale'
function readInitialLocale() {
if (typeof window === 'undefined') return DEFAULT_LOCALE
return normalizeLocale(window.localStorage.getItem(STORAGE_KEY))
}
export const useLocaleStore = defineStore('locale', () => {
const locale = ref(readInitialLocale())
function setLocale(nextLocale) {
const normalized = normalizeLocale(nextLocale)
locale.value = normalized
applyDayjsLocale(normalized)
if (typeof window !== 'undefined') {
window.localStorage.setItem(STORAGE_KEY, normalized)
}
}
const backendLangCode = computed(() => toBackendLangCode(locale.value))
applyDayjsLocale(locale.value)
return {
locale,
backendLangCode,
setLocale
}
})

View File

@@ -42,31 +42,36 @@ export function buildComboKey(row, beden) {
export const BEDEN_SCHEMA = [
{ key: 'tak', title: 'TAKIM ELBISE', values: ['44','46','48','50','52','54','56','58','60','62','64','66','68','70','72','74'] },
{ key: 'ayk', title: 'AYAKKABI', values: ['39','40','41','42','43','44','45'] },
{ key: 'ayk_garson', title: 'AYAKKABI GARSON', values: ['22','23','24','25','26','27','28','29','30','31','32','33','34','35','STD'] },
{ key: 'yas', title: 'YAS', values: ['2','4','6','8','10','12','14'] },
{ key: 'pan', title: 'PANTOLON', values: ['38','40','42','44','46','48','50','52','54','56','58','60','62','64','66','68'] },
{ key: 'gom', title: 'GOMLEK', values: ['XS','S','M','L','XL','2XL','3XL','4XL','5XL','6XL','7XL'] },
{ key: 'aksbir', title: 'AKSESUAR', values: [' ', '44', 'STD', '110', '115', '120', '125', '130', '135'] }
]
const SIZE_GROUP_TITLES = {
tak: 'TAKIM ELBISE',
ayk: 'AYAKKABI',
ayk_garson: 'AYAKKABI GARSON',
yas: 'YAS',
pan: 'PANTOLON',
gom: 'GOMLEK',
aksbir: 'AKSESUAR'
}
export const schemaByKey = BEDEN_SCHEMA.reduce((m, g) => {
m[g.key] = g
return m
}, {})
const FALLBACK_SCHEMA_MAP = {
tak: { key: 'tak', title: 'TAKIM ELBISE', values: ['44', '46', '48', '50', '52', '54', '56', '58', '60', '62', '64', '66', '68', '70', '72', '74'] }
}
export const schemaByKey = { ...FALLBACK_SCHEMA_MAP }
const productSizeMatchCache = {
loaded: false,
rules: [],
schemas: {}
}
const PRODUCT_SIZE_MATCH_TTL_MS = 4 * 60 * 60 * 1000
let productSizeMatchLastFetchAt = 0
let productSizeMatchInflightPromise = null
function resetProductSizeMatchCache() {
productSizeMatchCache.loaded = false
productSizeMatchCache.rules = []
productSizeMatchCache.schemas = {}
productSizeMatchLastFetchAt = 0
}
function setProductSizeMatchCache(payload) {
@@ -109,6 +114,24 @@ function setProductSizeMatchCache(payload) {
productSizeMatchCache.loaded = true
productSizeMatchCache.rules = normalizedRules
productSizeMatchCache.schemas = normalizedSchemas
productSizeMatchLastFetchAt = Date.now()
}
function buildSchemaMapFromCacheSchemas() {
const out = {}
const src = productSizeMatchCache.schemas || {}
for (const [keyRaw, valuesRaw] of Object.entries(src)) {
const key = String(keyRaw || '').trim()
if (!key) continue
const values = Array.isArray(valuesRaw) ? valuesRaw : []
out[key] = {
key,
title: SIZE_GROUP_TITLES[key] || key.toUpperCase(),
values: values.map(v => String(v == null ? '' : v))
}
}
if (!out.tak) out.tak = { ...FALLBACK_SCHEMA_MAP.tak }
return out
}
@@ -189,6 +212,8 @@ export const useOrderEntryStore = defineStore('orderentry', {
orders: [],
header: {},
summaryRows: [],
originalHeader: {},
originalLines: [],
lastSavedAt: null,
@@ -257,24 +282,16 @@ export const useOrderEntryStore = defineStore('orderentry', {
,
/* ===========================================================
🧩 initSchemaMap — BEDEN ŞEMA İNİT
- TEK SOURCE OF TRUTH: BEDEN_SCHEMA
- TEK SOURCE OF TRUTH: SQL mk_size_group (cache)
=========================================================== */
initSchemaMap() {
if (this.schemaMap && Object.keys(this.schemaMap).length > 0) {
return
}
const map = {}
for (const g of BEDEN_SCHEMA) {
map[g.key] = {
key: g.key,
title: g.title,
values: [...g.values]
this.schemaMap = buildSchemaMapFromCacheSchemas()
if (!Object.keys(this.schemaMap).length) {
this.schemaMap = { ...FALLBACK_SCHEMA_MAP }
}
}
this.schemaMap = map
console.log(
'🧩 schemaMap INIT edildi:',
@@ -283,25 +300,47 @@ export const useOrderEntryStore = defineStore('orderentry', {
},
async ensureProductSizeMatchRules($q = null, force = false) {
if (!force && productSizeMatchCache.loaded && productSizeMatchCache.rules.length > 0) {
const hasCache = productSizeMatchCache.loaded && productSizeMatchCache.rules.length > 0
const cacheAge = hasCache ? (Date.now() - productSizeMatchLastFetchAt) : Number.POSITIVE_INFINITY
const isFresh = hasCache && cacheAge < PRODUCT_SIZE_MATCH_TTL_MS
if (!force && isFresh) {
this.schemaMap = buildSchemaMapFromCacheSchemas()
return true
}
if (!force && productSizeMatchInflightPromise) {
return productSizeMatchInflightPromise
}
productSizeMatchInflightPromise = (async () => {
try {
const res = await api.get('/product-size-match/rules')
setProductSizeMatchCache(res?.data || {})
this.schemaMap = buildSchemaMapFromCacheSchemas()
return true
} catch (err) {
if (hasCache) {
this.schemaMap = buildSchemaMapFromCacheSchemas()
console.warn('product-size-match refresh failed, using existing cache:', err)
return true
}
if (force) {
resetProductSizeMatchCache()
}
this.schemaMap = { ...FALLBACK_SCHEMA_MAP }
console.warn('⚠ product-size-match rules alınamadı:', err)
$q?.notify?.({
type: 'warning',
message: 'Beden eşleme kuralları alınamadı.'
})
return false
} finally {
productSizeMatchInflightPromise = null
}
})()
return productSizeMatchInflightPromise
},
@@ -497,6 +536,62 @@ export const useOrderEntryStore = defineStore('orderentry', {
const normalized = Array.isArray(lines) ? lines : []
const mapLabel = (ln) => this.buildMailLineLabel(ln)
const formatDate = (d) => {
if (!d) return ''
const s = String(d).split('T')[0]
return s
}
const getLineDueDate = (ln) => (
formatDate(
ln?.DueDate ||
ln?.DeliveryDate ||
ln?.PlannedDateOfLading ||
''
)
)
const oldDate = formatDate(this.originalHeader?.AverageDueDate)
const newDate = formatDate(this.header?.AverageDueDate)
const origMap = new Map()
if (Array.isArray(this.originalLines)) {
this.originalLines.forEach(ln => {
if (ln.OrderLineID) origMap.set(String(ln.OrderLineID), ln)
})
}
const buildDueDateChanges = () => {
const out = []
const seen = new Set()
normalized.forEach(ln => {
if (ln?._deleteSignal || !ln?.OrderLineID) return
const orig = origMap.get(String(ln.OrderLineID))
if (!orig) return
const itemCode = String(ln?.ItemCode || '').trim().toUpperCase()
const colorCode = String(ln?.ColorCode || '').trim().toUpperCase()
const itemDim2Code = String(ln?.ItemDim2Code || '').trim().toUpperCase()
const oldLnDate = getLineDueDate(orig)
const newLnDate = getLineDueDate(ln)
if (!itemCode || !newLnDate || oldLnDate === newLnDate) return
const key = [itemCode, colorCode, itemDim2Code, oldLnDate, newLnDate].join('||')
if (seen.has(key)) return
seen.add(key)
out.push({
itemCode,
colorCode,
itemDim2Code,
oldDueDate: oldLnDate,
newDueDate: newLnDate
})
})
return out
}
if (isNew) {
return {
operation: 'create',
@@ -506,7 +601,10 @@ export const useOrderEntryStore = defineStore('orderentry', {
normalized
.filter(ln => !ln?._deleteSignal)
.map(mapLabel)
)
),
oldDueDate: '',
newDueDate: '',
dueDateChanges: []
}
}
@@ -516,11 +614,22 @@ export const useOrderEntryStore = defineStore('orderentry', {
.map(mapLabel)
)
const updatedItems = uniq(
normalized
.filter(ln => !ln?._deleteSignal && !!ln?.OrderLineID && ln?._dirty === true)
.map(mapLabel)
)
const updatedItems = []
normalized.forEach(ln => {
if (!ln?._deleteSignal && !!ln?.OrderLineID && ln?._dirty === true) {
let label = mapLabel(ln)
const orig = origMap.get(String(ln.OrderLineID))
if (orig) {
const oldLnDate = getLineDueDate(orig)
const newLnDate = getLineDueDate(ln)
if (newLnDate && oldLnDate !== newLnDate) {
label += ` (Termin: ${oldLnDate} -> ${newLnDate})`
}
}
updatedItems.push(label)
}
})
const addedItems = uniq(
normalized
@@ -531,8 +640,11 @@ export const useOrderEntryStore = defineStore('orderentry', {
return {
operation: 'update',
deletedItems,
updatedItems,
addedItems
updatedItems: uniq(updatedItems),
addedItems,
oldDueDate: oldDate,
newDueDate: newDate,
dueDateChanges: buildDueDateChanges()
}
}
,
@@ -549,7 +661,10 @@ export const useOrderEntryStore = defineStore('orderentry', {
operation: payload?.operation || 'create',
deletedItems: Array.isArray(payload?.deletedItems) ? payload.deletedItems : [],
updatedItems: Array.isArray(payload?.updatedItems) ? payload.updatedItems : [],
addedItems: Array.isArray(payload?.addedItems) ? payload.addedItems : []
addedItems: Array.isArray(payload?.addedItems) ? payload.addedItems : [],
oldDueDate: payload?.oldDueDate || '',
newDueDate: payload?.newDueDate || '',
dueDateChanges: Array.isArray(payload?.dueDateChanges) ? payload.dueDateChanges : []
})
return res?.data || {}
} catch (err) {
@@ -561,6 +676,69 @@ export const useOrderEntryStore = defineStore('orderentry', {
}
,
async bulkUpdateOrderLineDueDate(orderId, dueDate) {
const id = String(orderId || this.header?.OrderHeaderID || '').trim()
const dateText = String(dueDate || '').trim()
if (!id) {
throw new Error('Siparis ID bulunamadi')
}
if (!dateText) {
throw new Error('Termin tarihi secilmedi')
}
try {
this.loading = true
const res = await api.post(`/order/${encodeURIComponent(id)}/bulk-due-date`, {
dueDate: dateText
})
return res?.data || {}
} catch (err) {
const detail = await extractApiErrorDetail(err)
const status = err?.status || err?.response?.status || '-'
console.error(`❌ bulkUpdateOrderLineDueDate hata [${status}] order=${id}: ${detail}`)
throw new Error(detail)
} finally {
this.loading = false
}
}
,
applyBulkLineDueDateLocally(dueDate) {
const dateText = String(dueDate || '').trim()
if (!dateText) return
const hadUnsavedChanges = this.hasUnsavedChanges
const patchRow = (row) => ({
...row,
terminTarihi: dateText,
DueDate: dateText,
DeliveryDate: dateText,
PlannedDateOfLading: dateText
})
this.orders = Array.isArray(this.orders)
? this.orders.map(patchRow)
: []
this.summaryRows = Array.isArray(this.summaryRows)
? this.summaryRows.map(patchRow)
: []
this.header = {
...(this.header || {}),
AverageDueDate: dateText
}
// Keep originalHeader/originalLines untouched for submit-mail diff.
// Otherwise due-date change table becomes empty.
this.persistLocalStorage?.()
if (!hadUnsavedChanges) {
this.markAsSaved?.()
}
}
,
async downloadOrderPdf(id = null) {
try {
const orderId = id || this.header?.OrderHeaderID
@@ -1037,7 +1215,9 @@ export const useOrderEntryStore = defineStore('orderentry', {
...d,
UrunAnaGrubu: d.UrunAnaGrubu || d.ProductGroup || d.ProductAtt01Desc || '',
UrunAltGrubu: d.UrunAltGrubu || d.ProductSubGroup || d.ProductAtt02Desc || '',
Kategori: d.Kategori || '',
Kategori: d.Kategori || d.ProductAtt44Desc || d.YETISKIN_GARSON || d.YetiskinGarson || '',
ProductAtt01Desc: d.ProductAtt01Desc || d.UrunAnaGrubu || d.ProductGroup || '',
ProductAtt44Desc: d.ProductAtt44Desc || d.Kategori || d.YETISKIN_GARSON || d.YetiskinGarson || '',
YETISKIN_GARSON: d.YETISKIN_GARSON || d.YetiskinGarson || d.AskiliYan || '',
YetiskinGarson: d.YetiskinGarson || d.YETISKIN_GARSON || d.AskiliYan || '',
AskiliYan: d.AskiliYan || ''
@@ -1074,6 +1254,10 @@ export const useOrderEntryStore = defineStore('orderentry', {
this.orders = Array.isArray(normalized) ? normalized : []
this.summaryRows = [...this.orders]
// 💾 Snapshot for email comparison (v3.5)
this.originalHeader = JSON.parse(JSON.stringify(this.header))
this.originalLines = JSON.parse(JSON.stringify(this.summaryRows))
/* =======================================================
🔹 MODE KARARI (BACKEND SATIRLARI ÜZERİNDEN)
- herhangi bir isClosed=true → view
@@ -2450,8 +2634,8 @@ export const useOrderEntryStore = defineStore('orderentry', {
groupedKey ||
detectBedenGroup(
null,
raw.urunAnaGrubu || raw.UrunAnaGrubu || meta.UrunAnaGrubu || meta.ProductGroup || '',
raw.kategori || raw.Kategori || meta.Kategori || '',
raw.urunAnaGrubu || raw.UrunAnaGrubu || raw.ProductAtt01Desc || meta.UrunAnaGrubu || meta.ProductGroup || meta.ProductAtt01Desc || '',
raw.kategori || raw.Kategori || raw.ProductAtt44Desc || meta.Kategori || meta.ProductAtt44Desc || '',
raw.yetiskinGarson ||
raw.YETISKIN_GARSON ||
raw.YetiskinGarson ||
@@ -2508,6 +2692,7 @@ export const useOrderEntryStore = defineStore('orderentry', {
? ''
: String(raw.ItemDim1Code).trim()
const beden = bedenRaw === '' ? ' ' : normalizeBedenLabel(bedenRaw)
const bedenRawUpper = safeTrimUpperJs(bedenRaw)
const qty = Number(raw.Qty1 || raw.Qty || 0)
@@ -2520,14 +2705,16 @@ export const useOrderEntryStore = defineStore('orderentry', {
renk,
renk2,
urunAnaGrubu: raw.UrunAnaGrubu || meta.UrunAnaGrubu || meta.ProductGroup || 'GENEL',
urunAnaGrubu: raw.UrunAnaGrubu || raw.ProductAtt01Desc || meta.UrunAnaGrubu || meta.ProductGroup || meta.ProductAtt01Desc || 'GENEL',
urunAltGrubu: raw.UrunAltGrubu || meta.UrunAltGrubu || meta.ProductSubGroup || '',
kategori:
raw.Kategori ||
raw.ProductAtt44Desc ||
raw.YETISKIN_GARSON ||
raw.YetiskinGarson ||
raw.yetiskinGarson ||
meta.Kategori ||
meta.ProductAtt44Desc ||
meta.YETISKIN_GARSON ||
meta.YetiskinGarson ||
'',
@@ -2549,6 +2736,7 @@ export const useOrderEntryStore = defineStore('orderentry', {
__tmpMap: {}, // beden → qty
lineIdMap: {}, // beden → OrderLineID
yasPayloadMap: {},
adet: 0,
tutar: 0,
@@ -2573,6 +2761,14 @@ export const useOrderEntryStore = defineStore('orderentry', {
entry.lineIdMap[beden] = String(rawLineId)
}
if (bedenRawUpper && /^(\d+)\s*(Y|YAS|YAŞ)$/.test(bedenRawUpper)) {
const canonical = normalizeBedenLabel(bedenRawUpper)
entry.yasPayloadMap[canonical] = pickPreferredYasPayloadLabel(
entry.yasPayloadMap[canonical],
bedenRawUpper
)
}
if (qty > 0) {
entry.__tmpMap[beden] = (entry.__tmpMap[beden] || 0) + qty
entry.adet += qty
@@ -2631,6 +2827,16 @@ export const useOrderEntryStore = defineStore('orderentry', {
row.bedenMap[grpKey][' '] = Number(row.adet || 0)
}
if (grpKey === 'yas') {
row.yasPayloadMap = row.yasPayloadMap || {}
for (const b of Object.keys(cleanedMap || {})) {
const s = String(b || '').trim()
if (/^\d+$/.test(s) && !row.yasPayloadMap[s]) {
row.yasPayloadMap[s] = `${s}Y`
}
}
}
delete row.__tmpMap
out.push(row)
}
@@ -2682,6 +2888,7 @@ export const useOrderEntryStore = defineStore('orderentry', {
const cached = sizeCache.value[cacheKey]
bedenStock.value = [...cached.stockArray]
stockMap.value = { ...cached.stockMap }
form.yasPayloadMap = { ...(cached.yasPayloadMap || {}) }
console.log(`♻️ loadProductSizes CACHE → ${grpKey}`)
return
}
@@ -2706,12 +2913,25 @@ export const useOrderEntryStore = defineStore('orderentry', {
📦 STOK MAP (' ' KORUNUR)
======================================================= */
const apiStockMap = {}
const apiYasPayloadMap = {}
for (const x of data) {
const key =
x.item_dim1_code === null || x.item_dim1_code === ''
const rawDim1 =
x.item_dim1_code == null
? ' '
: normalizeBedenLabel(String(x.item_dim1_code))
: String(x.item_dim1_code)
const key = rawDim1 === ' '
? ' '
: normalizeBedenLabel(rawDim1)
apiStockMap[key] = Number(x.kullanilabilir_envanter ?? 0)
const rawUpper = safeTrimUpperJs(rawDim1)
if (grpKey === 'yas' && /^(\d+)\s*(Y|YAS|YAŞ)$/.test(rawUpper)) {
const canonical = normalizeBedenLabel(rawUpper)
apiYasPayloadMap[canonical] = pickPreferredYasPayloadLabel(
apiYasPayloadMap[canonical],
rawUpper
)
}
}
const finalStockMap = {}
@@ -2727,6 +2947,7 @@ export const useOrderEntryStore = defineStore('orderentry', {
bedenStock.value = Object.entries(stockMap.value).map(
([beden, stok]) => ({ beden, stok })
)
form.yasPayloadMap = { ...(form.yasPayloadMap || {}), ...apiYasPayloadMap }
/* =======================================================
💾 CACHE
@@ -2734,7 +2955,8 @@ export const useOrderEntryStore = defineStore('orderentry', {
sizeCache.value[cacheKey] = {
labels: [...form.bedenLabels],
stockArray: [...bedenStock.value],
stockMap: { ...stockMap.value }
stockMap: { ...stockMap.value },
yasPayloadMap: { ...(form.yasPayloadMap || {}) }
}
console.log(`✅ loadProductSizes FINAL v4.2 → ${grpKey}`)
@@ -3016,20 +3238,22 @@ export const useOrderEntryStore = defineStore('orderentry', {
}
// =======================================================
// 🧪 PRE-VALIDATE — prItemVariant ön kontrol
// - invalid varsa CREATE/UPDATE ÇALIŞMAZ
// 🧪 PRE-VALIDATE — prItemVariant ön kontrol (NEW + EDIT)
// - invalid varsa CREATE/UPDATE çalışmaz
// =======================================================
if (!isNew) {
const linesToValidate = lines.filter(
l => l._deleteSignal === true || l._dirty === true || !l.OrderLineID
const linesToValidate = lines.filter(l =>
Number(l?.Qty1 || 0) > 0 &&
l?._deleteSignal !== true &&
String(l?.ItemCode || '').trim() !== ''
)
if (linesToValidate.length > 0) {
const v = await api.post('/order/validate', { header, lines: linesToValidate })
const invalid = v?.data?.invalid || []
if (invalid.length > 0) {
await this.showInvalidVariantDialog?.($q, invalid)
return // ❌ update ÇALIŞMAZ
return // ❌ create/update ÇALIŞMAZ
}
}
@@ -3063,6 +3287,14 @@ export const useOrderEntryStore = defineStore('orderentry', {
if (!serverOrderId) {
throw new Error('OrderHeaderID backendden dönmedi')
}
const mailPayload = this.buildOrderMailPayload(lines, isNew)
console.info('[orderentryStore] mail payload prepared', {
operation: mailPayload?.operation,
deletedCount: Array.isArray(mailPayload?.deletedItems) ? mailPayload.deletedItems.length : 0,
updatedCount: Array.isArray(mailPayload?.updatedItems) ? mailPayload.updatedItems.length : 0,
addedCount: Array.isArray(mailPayload?.addedItems) ? mailPayload.addedItems.length : 0,
dueDateChangeCount: Array.isArray(mailPayload?.dueDateChanges) ? mailPayload.dueDateChanges.length : 0
})
purgeNewDraftOnExit = isNew
/* =======================================================
@@ -3122,7 +3354,7 @@ export const useOrderEntryStore = defineStore('orderentry', {
// 📧 Piyasa eşleşen alıcılara sipariş PDF gönderimi (kayıt başarılı olduktan sonra)
try {
const mailPayload = this.buildOrderMailPayload(lines, isNew)
// UPDATE durumunda da mail gönderimi istendiği için isNew kontrolü kaldırıldı (v3.5)
const mailRes = await this.sendOrderToMarketMails(serverOrderId, mailPayload)
const sentCount = Number(mailRes?.sentCount || 0)
$q.notify({
@@ -3192,6 +3424,31 @@ export const useOrderEntryStore = defineStore('orderentry', {
} catch (err) {
console.error('❌ submitAllReal:', err)
const data = err?.response?.data || {}
const status = Number(err?.response?.status || 0)
if (err?.response?.status === 400 && data?.code === 'INVALID_ITEM_VARIANT') {
const oneInvalid = [{
clientKey: data?.clientKey || '',
itemCode: data?.itemCode || '',
colorCode: data?.colorCode || '',
dim1: data?.dim1 || '',
dim2: data?.dim2 || '',
qty1: 0,
reason: data?.message || 'Tanımsız ürün kombinasyonu'
}]
await this.showInvalidVariantDialog?.($q, oneInvalid)
return
}
if (status === 524) {
$q.notify({
type: 'warning',
timeout: 36000,
message: 'Sunucu zaman aşımına uğradı (524). Sipariş kısmen kaydedilmiş olabilir; önce listeden kontrol edin, sonra tekrar deneyin.'
})
return
}
$q.notify({
type: 'negative',
message:
@@ -3321,10 +3578,16 @@ export const useOrderEntryStore = defineStore('orderentry', {
}
// UI'de yas grubu 2/4/6... gösterilir; payload'a 2Y/4Y/6... yazılır.
const toPayloadBeden = (grpKey, v) => {
const toPayloadBeden = (row, grpKey, v) => {
const base = normBeden(v)
if (!base) return ''
if (grpKey === 'yas' && /^\d+$/.test(base)) {
const map =
row?.yasPayloadMap && typeof row.yasPayloadMap === 'object'
? row.yasPayloadMap
: {}
const mapped = safeStr(map[base]).toUpperCase()
if (mapped) return mapped
return `${base}Y`
}
return base
@@ -3346,6 +3609,11 @@ export const useOrderEntryStore = defineStore('orderentry', {
const headerId = this.header?.OrderHeaderID || crypto.randomUUID()
const docCurrency = safeStr(this.header?.DocCurrencyCode) || 'TRY'
const exRate = toNum(this.header?.ExchangeRate) || 1
const boolOr = (value, fallback) => {
if (typeof value === 'boolean') return value
if (value && typeof value === 'object' && typeof value.Bool === 'boolean') return value.Bool
return fallback
}
const avgDueSource =
this.header?.AverageDueDate ||
@@ -3362,6 +3630,20 @@ export const useOrderEntryStore = defineStore('orderentry', {
DocCurrencyCode: docCurrency,
LocalCurrencyCode: safeStr(this.header?.LocalCurrencyCode) || 'TRY',
ExchangeRate: exRate,
IsCancelOrder: boolOr(this.header?.IsCancelOrder, false),
IsInclutedVat: boolOr(this.header?.IsInclutedVat, false),
IsCreditSale: boolOr(this.header?.IsCreditSale, true),
IsCreditableConfirmed: true,
CreditableConfirmedUser: who,
CreditableConfirmedDate: formatDateTime(now),
IsSalesViaInternet: boolOr(this.header?.IsSalesViaInternet, false),
IsSuspended: boolOr(this.header?.IsSuspended, false),
IsCompleted: boolOr(this.header?.IsCompleted, true),
IsPrinted: boolOr(this.header?.IsPrinted, false),
IsLocked: boolOr(this.header?.IsLocked, false),
UserLocked: boolOr(this.header?.UserLocked, false),
IsClosed: boolOr(this.header?.IsClosed, false),
IsProposalBased: boolOr(this.header?.IsProposalBased, false),
CreatedUserName:
this.mode === 'edit'
@@ -3578,7 +3860,7 @@ export const useOrderEntryStore = defineStore('orderentry', {
// ✅ UI/combokey için kanonik beden (yas'ta 2/4/6...)
const bedenCanonical = normBeden(bedenRaw)
// ✅ payload beden: yas grubunda 2Y/4Y/6..., diğerlerinde normal
const bedenPayload = toPayloadBeden(grpKey, bedenRaw)
const bedenPayload = toPayloadBeden(row, grpKey, bedenRaw)
// ✅ combokey beden: boşsa '_' ile stabil kalsın
const bedenKey = bedenCanonical || '_'
@@ -3733,6 +4015,17 @@ function parseNumericSizeJs(v) {
return Number.isNaN(n) ? null : n
}
function pickPreferredYasPayloadLabel(currentRaw, nextRaw) {
const cur = safeTrimUpperJs(currentRaw)
const nxt = safeTrimUpperJs(nextRaw)
if (!nxt) return cur
if (!cur) return nxt
const curYas = /YAS$|YAŞ$/.test(cur)
const nxtYas = /YAS$|YAŞ$/.test(nxt)
if (!curYas && nxtYas) return nxt
return cur
}
export function normalizeBedenLabel(v) {
let s = (v == null ? '' : String(v)).trim()
if (s === '') return ' '
@@ -4044,6 +4337,9 @@ export function toSummaryRowFromForm(form) {
urunAnaGrubu: form.urunAnaGrubu || '',
urunAltGrubu: form.urunAltGrubu || '',
kategori: form.kategori || '',
yetiskinGarson: form.yetiskinGarson || form.askiliyan || '',
yasPayloadMap: { ...(form.yasPayloadMap || {}) },
aciklama: form.aciklama || '',
fiyat: Number(form.fiyat || 0),

View File

@@ -0,0 +1,143 @@
import { defineStore } from 'pinia'
import api from 'src/services/api'
export const useTranslationStore = defineStore('translation', {
state: () => ({
loading: false,
saving: false,
rows: [],
count: 0
}),
actions: {
async fetchRows (filters = {}, options = {}) {
this.loading = true
const append = Boolean(options?.append)
try {
const res = await api.get('/language/translations', { params: filters })
const payload = res?.data || {}
const incoming = Array.isArray(payload.rows) ? payload.rows : []
if (append) {
const merged = [...this.rows]
const seen = new Set(this.rows.map((x) => x?.id))
for (const row of incoming) {
const id = row?.id
if (!seen.has(id)) {
merged.push(row)
seen.add(id)
}
}
this.rows = merged
} else {
this.rows = incoming
}
this.count = Number(payload.count) || this.rows.length
} finally {
this.loading = false
}
},
async updateRow (id, payload) {
this.saving = true
try {
const res = await api.put(`/language/translations/${id}`, payload)
return res?.data || null
} finally {
this.saving = false
}
},
async upsertMissing (items, languages = ['en', 'de', 'it', 'es', 'ru', 'ar']) {
this.saving = true
try {
const res = await api.post('/language/translations/upsert-missing', {
items: Array.isArray(items) ? items : [],
languages: Array.isArray(languages) ? languages : []
})
return res?.data || null
} finally {
this.saving = false
}
},
async syncSources (payload = {}) {
this.saving = true
const startedAt = Date.now()
console.info('[translation-sync][frontend] request:start', {
at: new Date(startedAt).toISOString(),
payload
})
try {
const res = await api.post('/language/translations/sync-sources', payload, { timeout: 0 })
const data = res?.data || null
const traceId = data?.trace_id || data?.result?.trace_id || res?.headers?.['x-trace-id'] || null
console.info('[translation-sync][frontend] request:success', {
trace_id: traceId,
duration_ms: Date.now() - startedAt,
result: data?.result || null
})
return data
} catch (err) {
console.error('[translation-sync][frontend] request:error', {
duration_ms: Date.now() - startedAt,
message: err?.message || 'sync-sources failed'
})
throw err
} finally {
this.saving = false
}
},
async translateSelected (payload = {}) {
this.saving = true
const startedAt = Date.now()
console.info('[translation-selected][frontend] request:start', {
at: new Date(startedAt).toISOString(),
payload
})
try {
const res = await api.post('/language/translations/translate-selected', payload, { timeout: 0 })
const data = res?.data || null
const traceId = data?.trace_id || res?.headers?.['x-trace-id'] || null
console.info('[translation-selected][frontend] request:success', {
trace_id: traceId,
duration_ms: Date.now() - startedAt,
translated_count: data?.translated_count || 0
})
return data
} catch (err) {
console.error('[translation-selected][frontend] request:error', {
duration_ms: Date.now() - startedAt,
message: err?.message || 'translate-selected failed'
})
throw err
} finally {
this.saving = false
}
},
async bulkApprove (ids = []) {
this.saving = true
try {
const res = await api.post('/language/translations/bulk-approve', {
ids: Array.isArray(ids) ? ids : []
})
return res?.data || null
} finally {
this.saving = false
}
},
async bulkUpdate (items = []) {
this.saving = true
try {
const res = await api.post('/language/translations/bulk-update', {
items: Array.isArray(items) ? items : []
})
return res?.data || null
} finally {
this.saving = false
}
}
}
})