Compare commits

...

36 Commits

Author SHA1 Message Date
M_Kececi
c6bdf83f05 Merge remote-tracking branch 'origin/master' 2026-04-17 12:16:50 +03:00
M_Kececi
f9728b8a4c Merge remote-tracking branch 'origin/master' 2026-04-16 17:46:50 +03:00
M_Kececi
307282928c Merge remote-tracking branch 'origin/master' 2026-04-16 16:41:59 +03:00
M_Kececi
29909f3609 Merge remote-tracking branch 'origin/master' 2026-04-16 16:41:55 +03:00
M_Kececi
bb856cb082 Merge remote-tracking branch 'origin/master' 2026-04-16 16:25:45 +03:00
M_Kececi
b065e7192d Merge remote-tracking branch 'origin/master' 2026-04-16 16:00:24 +03:00
M_Kececi
2d369e7d7d Merge remote-tracking branch 'origin/master' 2026-04-16 15:18:44 +03:00
M_Kececi
1831c45a0c Merge remote-tracking branch 'origin/master' 2026-04-15 17:03:25 +03:00
M_Kececi
1a80184cac Merge remote-tracking branch 'origin/master' 2026-04-15 16:43:21 +03:00
M_Kececi
5be7315bdb Merge remote-tracking branch 'origin/master' 2026-04-15 15:54:44 +03:00
M_Kececi
c925af5ba1 Merge remote-tracking branch 'origin/master' 2026-04-14 18:04:19 +03:00
M_Kececi
352a7e26ea Merge remote-tracking branch 'origin/master' 2026-04-14 17:53:58 +03:00
M_Kececi
9ee70eb05a Merge remote-tracking branch 'origin/master' 2026-04-14 17:52:38 +03:00
M_Kececi
8694511e79 Merge remote-tracking branch 'origin/master' 2026-04-14 17:46:15 +03:00
M_Kececi
69ba4b2ecb Merge remote-tracking branch 'origin/master' 2026-04-14 17:34:46 +03:00
M_Kececi
eb628e99c2 Merge remote-tracking branch 'origin/master' 2026-04-14 17:23:24 +03:00
M_Kececi
431441802e Merge remote-tracking branch 'origin/master' 2026-04-14 17:05:18 +03:00
M_Kececi
7457d95bac Merge remote-tracking branch 'origin/master' 2026-04-14 17:05:14 +03:00
M_Kececi
e352b8c47a Merge remote-tracking branch 'origin/master' 2026-04-14 17:03:21 +03:00
M_Kececi
d8b6b7166c Merge remote-tracking branch 'origin/master' 2026-04-14 16:51:27 +03:00
M_Kececi
aec450c3cd Merge remote-tracking branch 'origin/master' 2026-04-14 16:35:21 +03:00
M_Kececi
47fc7a6178 Merge remote-tracking branch 'origin/master' 2026-04-14 16:34:25 +03:00
M_Kececi
214677da1e Merge remote-tracking branch 'origin/master' 2026-04-14 16:17:59 +03:00
M_Kececi
b1a3bbd3c5 Merge remote-tracking branch 'origin/master' 2026-04-04 19:05:11 +03:00
M_Kececi
6467017470 Merge remote-tracking branch 'origin/master' 2026-04-03 16:30:31 +03:00
M_Kececi
bf97e20e79 Merge remote-tracking branch 'origin/master' 2026-04-03 15:47:22 +03:00
M_Kececi
2b04688905 Merge remote-tracking branch 'origin/master' 2026-04-03 15:23:53 +03:00
M_Kececi
79f7fa0974 Merge remote-tracking branch 'origin/master' 2026-04-03 15:18:46 +03:00
M_Kececi
e965eb7c36 Merge remote-tracking branch 'origin/master' 2026-04-03 15:08:31 +03:00
M_Kececi
07c000358e Merge remote-tracking branch 'origin/master' 2026-04-03 14:56:22 +03:00
M_Kececi
415e3db084 Merge remote-tracking branch 'origin/master' 2026-04-03 14:32:19 +03:00
M_Kececi
f46532cee1 Merge remote-tracking branch 'origin/master' 2026-04-03 14:22:09 +03:00
M_Kececi
e1064010f3 Merge remote-tracking branch 'origin/master' 2026-04-03 14:16:27 +03:00
M_Kececi
67ef80936a Merge remote-tracking branch 'origin/master' 2026-04-03 14:02:05 +03:00
M_Kececi
548931f714 Merge remote-tracking branch 'origin/master'
# Conflicts:
#	ui/src/stores/orderentryStore.js
2026-04-03 13:13:14 +03:00
M_Kececi
5adf71c4cc Merge remote-tracking branch 'origin/master' 2026-04-03 13:11:17 +03:00
68 changed files with 9568 additions and 591 deletions

View File

@@ -93,7 +93,7 @@ ensure_ui_permissions() {
clean_ui_build_artifacts() { clean_ui_build_artifacts() {
cd "$APP_DIR/ui" cd "$APP_DIR/ui"
# dist'i silmiyoruz -> eski chunklar k<EFBFBD>sa s<EFBFBD>re kalabilir, ChunkLoadError azal<EFBFBD>r # dist'i silmiyoruz -> eski chunklar kısa süre kalabilir, ChunkLoadError azalır
rm -rf .quasar node_modules/.cache || true rm -rf .quasar node_modules/.cache || true
} }
@@ -217,6 +217,36 @@ ensure_ui_readable_by_nginx() {
fi fi
} }
ensure_node20_for_ui_build() {
local required_major=20
local nvm_dir="${NVM_DIR:-$HOME/.nvm}"
if [[ -s "$nvm_dir/nvm.sh" ]]; then
# shellcheck disable=SC1090
source "$nvm_dir/nvm.sh"
nvm install "$required_major" >/dev/null
nvm use "$required_major" >/dev/null
fi
if ! command -v node >/dev/null 2>&1; then
echo "ERROR: node command not found"
return 1
fi
local node_version
node_version="$(node -v 2>/dev/null || true)"
local node_major
node_major="$(echo "$node_version" | sed -E 's/^v([0-9]+).*/\1/')"
if [[ -z "$node_major" || "$node_major" -lt "$required_major" ]]; then
echo "ERROR: Node.js >=${required_major} required for UI build. Current: ${node_version:-unknown}"
echo "Hint: install nvm and run: nvm install ${required_major} && nvm alias default ${required_major}"
return 1
fi
echo "UI build runtime: node=$node_version npm=$(npm -v)"
}
build_api_binary() { build_api_binary() {
if ! command -v go >/dev/null 2>&1; then if ! command -v go >/dev/null 2>&1; then
echo "ERROR: go command not found" echo "ERROR: go command not found"
@@ -291,6 +321,7 @@ run_deploy() {
log_step "BUILD UI" log_step "BUILD UI"
cd "$APP_DIR/ui" cd "$APP_DIR/ui"
ensure_node20_for_ui_build
clean_ui_build_artifacts clean_ui_build_artifacts
npm ci --no-audit --no-fund --include=optional npm ci --no-audit --no-fund --include=optional
npm i -D --no-audit --no-fund sass-embedded@1.93.2 npm i -D --no-audit --no-fund sass-embedded@1.93.2

View File

@@ -0,0 +1,52 @@
# i18n + Dinamik Çeviri Standardı
Bu projede çok dilli yapı iki katmanlıdır:
1. Statik UI metinleri `i18n` ile yönetilir.
2. Dinamik içerikler `mk_translator` + otomatik çeviri servisi (OpenAI) ile yönetilir.
## 1) Statik UI (Deterministik)
Kullanım alanı:
- buton metinleri
- menüler
- form label'ları
- validasyon mesajları
- sabit ekran başlıkları
- route/meta/title
Kural:
- her metin key bazlı tutulur (`$t('common.save')`)
- locale dosyaları: `tr`, `en`, `de`, `it`, `es`, `ru`, `ar`
- fallback sırası: hedef dil -> `en` -> `tr`
## 2) Dinamik İçerik (DB/CMS/Serbest metin)
Akış:
1. Kaynak metin için `mk_translator` kontrol edilir.
2. Hedef dil karşılığı yoksa OpenAI ile çeviri üretilir.
3. Sonuç `mk_translator` tablosuna yazılır.
4. Sonraki isteklerde DB sonucu kullanılır (cache etkisi).
Kullanım alanı:
- ürün/kategori açıklamaları
- CMS içerikleri
- admin panelden girilen serbest metinler
- şablon bazlı metin içerikleri
## Kalite ve Güvenlik Kuralları
- Prompt net olmalı: sadece çeviri dönsün, açıklama eklemesin.
- Placeholder/format korunsun: `{name}`, `{{count}}`, `%s` gibi yapılar bozulmasın.
- HTML tag'leri ve kod/SKU değerleri çevrilmesin.
- API key sadece backend'de tutulur (`OPENAI_API_KEY` client'a verilmez).
- 429/5xx için retry + exponential backoff uygulanır.
- Hassas veri içeriği olan metinlerde veri politikası kontrolü yapılır.
## Özet
Bu servis, `i18n`'in alternatifi değildir; `i18n`'i tamamlayan dinamik çeviri katmanıdır.
- Statik UI: `i18n`
- Dinamik içerik: `mk_translator` + OpenAI + cache

View File

@@ -16,4 +16,9 @@
| Cloudflare | bt@baggi.com.tr | Baggi2025!.? | | Cloudflare | bt@baggi.com.tr | Baggi2025!.? |
| 172.16.0.3 | ct | pasauras | | 172.16.0.3 | ct | pasauras |
## Dil ve Çeviri Standardı
Detaylı mimari dokümanı:
- [docs/i18n-dynamic-translation-standard.md](docs/i18n-dynamic-translation-standard.md)

View File

@@ -0,0 +1,48 @@
-- language_module_seed.sql
-- 1) Register language module routes if missing
INSERT INTO mk_sys_routes (path, method, module_code, action)
VALUES
('/api/language/translations', 'GET', 'language', 'update'),
('/api/language/translations/{id}', 'PUT', 'language', 'update'),
('/api/language/translations/upsert-missing', 'POST', 'language', 'update'),
('/api/language/translations/sync-sources', 'POST', 'language', 'update'),
('/api/language/translations/translate-selected', 'POST', 'language', 'update'),
('/api/language/translations/bulk-approve', 'POST', 'language', 'update'),
('/api/language/translations/bulk-update', 'POST', 'language', 'update')
ON CONFLICT (path, method) DO UPDATE
SET
module_code = EXCLUDED.module_code,
action = EXCLUDED.action;
-- 2) Remove legacy system translation routes (optional cleanup)
DELETE FROM mk_sys_routes
WHERE path LIKE '/api/system/translations%';
-- 3) Seed role permissions for language module by cloning system perms
INSERT INTO mk_sys_role_permissions (role_id, module_code, action, allowed)
SELECT rp.role_id, 'language', rp.action, rp.allowed
FROM mk_sys_role_permissions rp
WHERE rp.module_code = 'system'
AND rp.action IN ('view', 'read', 'insert', 'update', 'delete', 'export')
ON CONFLICT DO NOTHING;
-- 4) Ensure admin update access
INSERT INTO mk_sys_role_permissions (role_id, module_code, action, allowed)
SELECT r.id, 'language', 'update', true
FROM dfrole r
WHERE r.id = 3
ON CONFLICT DO NOTHING;
-- 5) Seed role+department permissions for language module by cloning system perms
INSERT INTO mk_sys_role_department_permissions
(role_id, department_code, module_code, action, allowed)
SELECT DISTINCT
rdp.role_id,
rdp.department_code,
'language',
rdp.action,
rdp.allowed
FROM mk_sys_role_department_permissions rdp
WHERE rdp.module_code = 'system'
AND rdp.action IN ('view', 'read', 'insert', 'update', 'delete', 'export')
ON CONFLICT DO NOTHING;

View File

@@ -32,3 +32,6 @@ API_HOST=0.0.0.0
API_PORT=8080 API_PORT=8080
AZURE_TRANSLATOR_KEY=d055c693-a84e-4594-8aef-a6c05c42623a
AZURE_TRANSLATOR_ENDPOINT=https://api.cognitive.microsofttranslator.com
AZURE_TRANSLATOR_REGION=westeurope

View File

@@ -0,0 +1,72 @@
package main
import (
"bssapp-backend/db"
"bssapp-backend/routes"
"fmt"
"log"
"os"
"strconv"
"strings"
"github.com/joho/godotenv"
)
func main() {
_ = godotenv.Load(".env", "mail.env", ".env.local")
if err := db.ConnectMSSQL(); err != nil {
log.Fatalf("mssql connect failed: %v", err)
}
pgDB, err := db.ConnectPostgres()
if err != nil {
log.Fatalf("postgres connect failed: %v", err)
}
defer pgDB.Close()
limit := 30000
if raw := os.Getenv("TRANSLATION_SYNC_LIMIT"); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 {
limit = parsed
}
}
langs := []string{"en", "de", "it", "es", "ru", "ar"}
if raw := strings.TrimSpace(os.Getenv("TRANSLATION_SYNC_LANGS")); raw != "" {
parts := strings.Split(raw, ",")
custom := make([]string, 0, len(parts))
for _, p := range parts {
v := strings.TrimSpace(strings.ToLower(p))
if v != "" {
custom = append(custom, v)
}
}
if len(custom) > 0 {
langs = custom
}
}
autoTranslate := true
if raw := strings.TrimSpace(strings.ToLower(os.Getenv("TRANSLATION_SYNC_AUTO_TRANSLATE"))); raw != "" {
if raw == "0" || raw == "false" || raw == "off" {
autoTranslate = false
}
}
result, err := routes.PerformTranslationSync(pgDB, db.MssqlDB, routes.TranslationSyncOptions{
AutoTranslate: autoTranslate,
Languages: langs,
Limit: limit,
OnlyNew: true,
})
if err != nil {
log.Fatalf("manual sync failed: %v", err)
}
fmt.Printf("translation sync done: seeds=%d affected=%d auto_translated=%d langs=%v\n",
result.SeedCount,
result.AffectedCount,
result.AutoTranslated,
result.TargetLangs,
)
}

View File

@@ -3,32 +3,120 @@ package db
import ( import (
"database/sql" "database/sql"
"fmt" "fmt"
"net/url"
"os" "os"
"strconv"
"strings" "strings"
"time"
_ "github.com/microsoft/go-mssqldb" _ "github.com/microsoft/go-mssqldb"
) )
var MssqlDB *sql.DB var MssqlDB *sql.DB
// ConnectMSSQL MSSQL baglantisini ortam degiskeninden baslatir. func envInt(name string, fallback int) int {
raw := strings.TrimSpace(os.Getenv(name))
if raw == "" {
return fallback
}
value, err := strconv.Atoi(raw)
if err != nil || value <= 0 {
return fallback
}
return value
}
func ensureTimeoutValue(current string, desired int) string {
cur, err := strconv.Atoi(strings.TrimSpace(current))
if err == nil && cur >= desired {
return strings.TrimSpace(current)
}
return strconv.Itoa(desired)
}
func ensureMSSQLTimeouts(connString string, connectionTimeoutSec int, dialTimeoutSec int) string {
raw := strings.TrimSpace(connString)
if raw == "" {
return raw
}
if strings.HasPrefix(strings.ToLower(raw), "sqlserver://") {
u, err := url.Parse(raw)
if err != nil {
return raw
}
q := u.Query()
q.Set("connection timeout", ensureTimeoutValue(q.Get("connection timeout"), connectionTimeoutSec))
q.Set("dial timeout", ensureTimeoutValue(q.Get("dial timeout"), dialTimeoutSec))
u.RawQuery = q.Encode()
return u.String()
}
parts := strings.Split(raw, ";")
foundConnectionTimeout := false
foundDialTimeout := false
for i, part := range parts {
part = strings.TrimSpace(part)
if part == "" {
continue
}
eq := strings.Index(part, "=")
if eq <= 0 {
continue
}
key := strings.ToLower(strings.TrimSpace(part[:eq]))
value := strings.TrimSpace(part[eq+1:])
switch key {
case "connection timeout":
foundConnectionTimeout = true
parts[i] = "connection timeout=" + ensureTimeoutValue(value, connectionTimeoutSec)
case "dial timeout":
foundDialTimeout = true
parts[i] = "dial timeout=" + ensureTimeoutValue(value, dialTimeoutSec)
}
}
if !foundConnectionTimeout {
parts = append(parts, "connection timeout="+strconv.Itoa(connectionTimeoutSec))
}
if !foundDialTimeout {
parts = append(parts, "dial timeout="+strconv.Itoa(dialTimeoutSec))
}
return strings.Join(parts, ";")
}
// ConnectMSSQL initializes the MSSQL connection from environment.
func ConnectMSSQL() error { func ConnectMSSQL() error {
connString := strings.TrimSpace(os.Getenv("MSSQL_CONN")) connString := strings.TrimSpace(os.Getenv("MSSQL_CONN"))
if connString == "" { if connString == "" {
return fmt.Errorf("MSSQL_CONN tanımlı değil") return fmt.Errorf("MSSQL_CONN tanimli degil")
} }
connectionTimeoutSec := envInt("MSSQL_CONNECTION_TIMEOUT_SEC", 120)
dialTimeoutSec := envInt("MSSQL_DIAL_TIMEOUT_SEC", connectionTimeoutSec)
connString = ensureMSSQLTimeouts(connString, connectionTimeoutSec, dialTimeoutSec)
var err error var err error
MssqlDB, err = sql.Open("sqlserver", connString) MssqlDB, err = sql.Open("sqlserver", connString)
if err != nil { if err != nil {
return fmt.Errorf("MSSQL bağlantı hatası: %w", err) return fmt.Errorf("MSSQL baglanti hatasi: %w", err)
} }
MssqlDB.SetMaxOpenConns(envInt("MSSQL_MAX_OPEN_CONNS", 40))
MssqlDB.SetMaxIdleConns(envInt("MSSQL_MAX_IDLE_CONNS", 40))
MssqlDB.SetConnMaxLifetime(time.Duration(envInt("MSSQL_CONN_MAX_LIFETIME_MIN", 30)) * time.Minute)
MssqlDB.SetConnMaxIdleTime(time.Duration(envInt("MSSQL_CONN_MAX_IDLE_MIN", 10)) * time.Minute)
if err = MssqlDB.Ping(); err != nil { if err = MssqlDB.Ping(); err != nil {
return fmt.Errorf("MSSQL erişilemiyor: %w", err) return fmt.Errorf("MSSQL erisilemiyor: %w", err)
} }
fmt.Println("MSSQL bağlantısı başarılı") fmt.Printf("MSSQL baglantisi basarili (connection timeout=%ds, dial timeout=%ds)\n", connectionTimeoutSec, dialTimeoutSec)
return nil return nil
} }

122
svc/internal/i18n/lang.go Normal file
View File

@@ -0,0 +1,122 @@
package i18n
import "strings"
const DefaultLang = "TR"
var supported = map[string]struct{}{
"TR": {},
"EN": {},
"DE": {},
"IT": {},
"ES": {},
"RU": {},
"AR": {},
}
func NormalizeLangCode(raw string) string {
lang := strings.ToUpper(strings.TrimSpace(raw))
if _, ok := supported[lang]; ok {
return lang
}
return DefaultLang
}
func ResolveLangCode(queryLangCode, acceptLanguage string) string {
if lang := NormalizeLangCode(queryLangCode); lang != DefaultLang || strings.EqualFold(strings.TrimSpace(queryLangCode), DefaultLang) {
return lang
}
header := strings.TrimSpace(acceptLanguage)
if header == "" {
return DefaultLang
}
first := strings.Split(header, ",")[0]
first = strings.TrimSpace(strings.Split(first, ";")[0])
if len(first) < 2 {
return DefaultLang
}
return NormalizeLangCode(first[:2])
}
func T(langCode, key string) string {
for _, lang := range fallbackLangs(langCode) {
if val, ok := dict[lang][key]; ok {
return val
}
}
return key
}
func fallbackLangs(langCode string) []string {
lang := NormalizeLangCode(langCode)
switch lang {
case "TR":
return []string{"TR"}
case "EN":
return []string{"EN", "TR"}
default:
return []string{lang, "EN", "TR"}
}
}
var dict = map[string]map[string]string{
"TR": {
"pdf.report_title": "Cari Hesap Raporu",
"pdf.date": "Tarih",
"pdf.customer": "Cari",
"pdf.date_range": "Tarih Aralığı",
"pdf.page": "Sayfa",
"pdf.ending_balance": "Son Bakiye",
"pdf.currency_prefix": "Para Birimi",
"pdf.balance_prefix": "Bakiye",
"pdf.main.doc_no": "Belge No",
"pdf.main.date": "Tarih",
"pdf.main.due_date": "Vade",
"pdf.main.operation": "İşlem",
"pdf.main.description": "Açıklama",
"pdf.main.currency": "Para",
"pdf.main.debit": "Borç",
"pdf.main.credit": "Alacak",
"pdf.main.balance": "Bakiye",
"pdf.detail.main_group": "Ana Grup",
"pdf.detail.sub_group": "Alt Grup",
"pdf.detail.waiter": "Garson",
"pdf.detail.fit": "Fit",
"pdf.detail.content": "İçerik",
"pdf.detail.product": "Ürün",
"pdf.detail.color": "Renk",
"pdf.detail.qty": "Adet",
"pdf.detail.price": "Fiyat",
"pdf.detail.total": "Tutar",
},
"EN": {
"pdf.report_title": "Customer Account Report",
"pdf.date": "Date",
"pdf.customer": "Customer",
"pdf.date_range": "Date Range",
"pdf.page": "Page",
"pdf.ending_balance": "Ending Balance",
"pdf.currency_prefix": "Currency",
"pdf.balance_prefix": "Balance",
"pdf.main.doc_no": "Document No",
"pdf.main.date": "Date",
"pdf.main.due_date": "Due Date",
"pdf.main.operation": "Operation",
"pdf.main.description": "Description",
"pdf.main.currency": "Curr.",
"pdf.main.debit": "Debit",
"pdf.main.credit": "Credit",
"pdf.main.balance": "Balance",
"pdf.detail.main_group": "Main Group",
"pdf.detail.sub_group": "Sub Group",
"pdf.detail.waiter": "Waiter",
"pdf.detail.fit": "Fit",
"pdf.detail.content": "Content",
"pdf.detail.product": "Product",
"pdf.detail.color": "Color",
"pdf.detail.qty": "Qty",
"pdf.detail.price": "Price",
"pdf.detail.total": "Total",
},
}

View File

@@ -3,6 +3,7 @@ package mailer
import ( import (
"context" "context"
"crypto/tls" "crypto/tls"
"encoding/base64"
"errors" "errors"
"fmt" "fmt"
"net" "net"
@@ -138,11 +139,13 @@ func (m *Mailer) Send(ctx context.Context, msg Message) error {
} }
func buildMIME(from string, to []string, subject, contentType, body string) string { func buildMIME(from string, to []string, subject, contentType, body string) string {
// Subject UTF-8 basit hali (gerekirse sonra MIME encoded-word ekleriz) // Encode Subject to UTF-8
encodedSubject := "=?UTF-8?B?" + base64.StdEncoding.EncodeToString([]byte(subject)) + "?="
headers := []string{ headers := []string{
"From: " + from, "From: " + from,
"To: " + strings.Join(to, ", "), "To: " + strings.Join(to, ", "),
"Subject: " + subject, "Subject: " + encodedSubject,
"MIME-Version: 1.0", "MIME-Version: 1.0",
"Content-Type: " + contentType, "Content-Type: " + contentType,
"", "",

View File

@@ -104,7 +104,26 @@ func autoRegisterRouteV3(
return return
} }
// 2) ADMIN AUTO PERMISSION (module+action bazlı) // 2) MODULE LOOKUP AUTO SEED (permission ekranları için)
moduleLabel := strings.TrimSpace(strings.ReplaceAll(module, "_", " "))
if moduleLabel == "" {
moduleLabel = module
}
_, err = tx.Exec(`
INSERT INTO mk_sys_modules (code, name)
VALUES ($1::text, $2::text)
ON CONFLICT (code) DO UPDATE
SET name = COALESCE(NULLIF(EXCLUDED.name, ''), mk_sys_modules.name)
`,
module,
moduleLabel,
)
if err != nil {
log.Printf("❌ Module seed error (%s %s): %v", method, path, err)
return
}
// 3) ROLE PERMISSION AUTO SEED (admin=true, diğer roller=false)
_, err = tx.Exec(` _, err = tx.Exec(`
INSERT INTO mk_sys_role_permissions INSERT INTO mk_sys_role_permissions
(role_id, module_code, action, allowed) (role_id, module_code, action, allowed)
@@ -112,16 +131,50 @@ func autoRegisterRouteV3(
id, id,
$1, $1,
$2, $2,
true CASE
WHEN id = 3 OR LOWER(code) = 'admin' THEN true
ELSE false
END
FROM dfrole FROM dfrole
WHERE id = 3 -- ADMIN
ON CONFLICT DO NOTHING ON CONFLICT DO NOTHING
`, `,
module, module,
action, action,
) )
if err != nil { if err != nil {
log.Printf("❌ Admin perm seed error (%s %s): %v", method, path, err) log.Printf("❌ Role perm seed error (%s %s): %v", method, path, err)
return
}
// 4) ROLE+DEPARTMENT PERMISSION AUTO SEED
// Existing role+department kombinasyonlarına yeni module+action satırıılır.
_, err = tx.Exec(`
WITH role_dept_scope AS (
SELECT DISTINCT role_id, department_code
FROM mk_sys_role_department_permissions
UNION
SELECT 3 AS role_id, d.code AS department_code
FROM mk_dprt d
)
INSERT INTO mk_sys_role_department_permissions
(role_id, department_code, module_code, action, allowed)
SELECT
rds.role_id,
rds.department_code,
$1,
$2,
CASE
WHEN rds.role_id = 3 THEN true
ELSE false
END
FROM role_dept_scope rds
ON CONFLICT DO NOTHING
`,
module,
action,
)
if err != nil {
log.Printf("❌ Role+Dept perm seed error (%s %s): %v", method, path, err)
return return
} }
@@ -265,6 +318,41 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
"system", "update", "system", "update",
wrapV3(routes.SaveMarketMailMappingHandler(pgDB)), wrapV3(routes.SaveMarketMailMappingHandler(pgDB)),
) )
bindV3(r, pgDB,
"/api/language/translations", "GET",
"language", "update",
wrapV3(routes.GetTranslationRowsHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/{id}", "PUT",
"language", "update",
wrapV3(routes.UpdateTranslationRowHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/upsert-missing", "POST",
"language", "update",
wrapV3(routes.UpsertMissingTranslationsHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/sync-sources", "POST",
"language", "update",
wrapV3(routes.SyncTranslationSourcesHandler(pgDB, mssql)),
)
bindV3(r, pgDB,
"/api/language/translations/translate-selected", "POST",
"language", "update",
wrapV3(routes.TranslateSelectedTranslationsHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/bulk-approve", "POST",
"language", "update",
wrapV3(routes.BulkApproveTranslationsHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/bulk-update", "POST",
"language", "update",
wrapV3(routes.BulkUpdateTranslationsHandler(pgDB)),
)
// ============================================================ // ============================================================
// PERMISSIONS // PERMISSIONS
@@ -519,6 +607,7 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
}{ }{
{"/api/order/create", "POST", "insert", routes.CreateOrderHandler(pgDB, mssql)}, {"/api/order/create", "POST", "insert", routes.CreateOrderHandler(pgDB, mssql)},
{"/api/order/update", "POST", "update", http.HandlerFunc(routes.UpdateOrderHandler)}, {"/api/order/update", "POST", "update", http.HandlerFunc(routes.UpdateOrderHandler)},
{"/api/order/{id}/bulk-due-date", "POST", "update", routes.BulkUpdateOrderLineDueDateHandler(mssql)},
{"/api/order/get/{id}", "GET", "view", routes.GetOrderByIDHandler(mssql)}, {"/api/order/get/{id}", "GET", "view", routes.GetOrderByIDHandler(mssql)},
{"/api/orders/list", "GET", "view", routes.OrderListRoute(mssql)}, {"/api/orders/list", "GET", "view", routes.OrderListRoute(mssql)},
{"/api/orders/production-list", "GET", "update", routes.OrderProductionListRoute(mssql)}, {"/api/orders/production-list", "GET", "update", routes.OrderProductionListRoute(mssql)},
@@ -526,7 +615,7 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
{"/api/orders/production-items/{id}", "GET", "view", routes.OrderProductionItemsRoute(mssql)}, {"/api/orders/production-items/{id}", "GET", "view", routes.OrderProductionItemsRoute(mssql)},
{"/api/orders/production-items/{id}/insert-missing", "POST", "update", routes.OrderProductionInsertMissingRoute(mssql)}, {"/api/orders/production-items/{id}/insert-missing", "POST", "update", routes.OrderProductionInsertMissingRoute(mssql)},
{"/api/orders/production-items/{id}/validate", "POST", "update", routes.OrderProductionValidateRoute(mssql)}, {"/api/orders/production-items/{id}/validate", "POST", "update", routes.OrderProductionValidateRoute(mssql)},
{"/api/orders/production-items/{id}/apply", "POST", "update", routes.OrderProductionApplyRoute(mssql)}, {"/api/orders/production-items/{id}/apply", "POST", "update", routes.OrderProductionApplyRoute(mssql, ml)},
{"/api/orders/close-ready", "GET", "update", routes.OrderCloseReadyListRoute(mssql)}, {"/api/orders/close-ready", "GET", "update", routes.OrderCloseReadyListRoute(mssql)},
{"/api/orders/bulk-close", "POST", "update", routes.OrderBulkCloseRoute(mssql)}, {"/api/orders/bulk-close", "POST", "update", routes.OrderBulkCloseRoute(mssql)},
{"/api/orders/export", "GET", "export", routes.OrderListExcelRoute(mssql)}, {"/api/orders/export", "GET", "export", routes.OrderListExcelRoute(mssql)},
@@ -571,6 +660,12 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
wrapV3(http.HandlerFunc(routes.GetProductDetailHandler)), wrapV3(http.HandlerFunc(routes.GetProductDetailHandler)),
) )
bindV3(r, pgDB,
"/api/product-cditem", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductCdItemHandler)),
)
bindV3(r, pgDB, bindV3(r, pgDB,
"/api/product-colors", "GET", "/api/product-colors", "GET",
"order", "view", "order", "view",
@@ -603,6 +698,11 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
"order", "view", "order", "view",
wrapV3(http.HandlerFunc(routes.GetProductAttributesHandler)), wrapV3(http.HandlerFunc(routes.GetProductAttributesHandler)),
) )
bindV3(r, pgDB,
"/api/product-item-attributes", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductItemAttributesHandler)),
)
bindV3(r, pgDB, bindV3(r, pgDB,
"/api/product-stock-query", "GET", "/api/product-stock-query", "GET",
"order", "view", "order", "view",
@@ -633,6 +733,11 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
"order", "view", "order", "view",
wrapV3(routes.GetProductSizeMatchRulesHandler(pgDB)), wrapV3(routes.GetProductSizeMatchRulesHandler(pgDB)),
) )
bindV3(r, pgDB,
"/api/pricing/products", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductPricingListHandler)),
)
// ============================================================ // ============================================================
// ROLE MANAGEMENT // ROLE MANAGEMENT
@@ -742,6 +847,11 @@ func main() {
auditlog.Init(pgDB, 1000) auditlog.Init(pgDB, 1000)
log.Println("🕵️ AuditLog sistemi başlatıldı (buffer=1000)") log.Println("🕵️ AuditLog sistemi başlatıldı (buffer=1000)")
// -------------------------------------------------------
// 🚀 TRANSLATION QUERY PERFORMANCE INDEXES
// -------------------------------------------------------
routes.EnsureTranslationPerfIndexes(pgDB)
// ------------------------------------------------------- // -------------------------------------------------------
// ✉️ MAILER INIT // ✉️ MAILER INIT
// ------------------------------------------------------- // -------------------------------------------------------
@@ -760,6 +870,7 @@ func main() {
// 🌍 SERVER // 🌍 SERVER
// ------------------------------------------------------- // -------------------------------------------------------
router := InitRoutes(pgDB, db.MssqlDB, graphMailer) router := InitRoutes(pgDB, db.MssqlDB, graphMailer)
startTranslationSyncScheduler(pgDB, db.MssqlDB)
handler := enableCORS( handler := enableCORS(
middlewares.GlobalAuthMiddleware( middlewares.GlobalAuthMiddleware(

View File

@@ -13,8 +13,10 @@ type OrderProductionItem struct {
OldItemCode string `json:"OldItemCode"` OldItemCode string `json:"OldItemCode"`
OldColor string `json:"OldColor"` OldColor string `json:"OldColor"`
OldColorDescription string `json:"OldColorDescription"`
OldDim2 string `json:"OldDim2"` OldDim2 string `json:"OldDim2"`
OldDesc string `json:"OldDesc"` OldDesc string `json:"OldDesc"`
OldQty float64 `json:"OldQty"`
NewItemCode string `json:"NewItemCode"` NewItemCode string `json:"NewItemCode"`
NewColor string `json:"NewColor"` NewColor string `json:"NewColor"`
@@ -22,4 +24,6 @@ type OrderProductionItem struct {
NewDesc string `json:"NewDesc"` NewDesc string `json:"NewDesc"`
IsVariantMissing bool `json:"IsVariantMissing"` IsVariantMissing bool `json:"IsVariantMissing"`
OldDueDate string `json:"OldDueDate"`
NewDueDate string `json:"NewDueDate"`
} }

View File

@@ -4,8 +4,11 @@ type OrderProductionUpdateLine struct {
OrderLineID string `json:"OrderLineID"` OrderLineID string `json:"OrderLineID"`
NewItemCode string `json:"NewItemCode"` NewItemCode string `json:"NewItemCode"`
NewColor string `json:"NewColor"` NewColor string `json:"NewColor"`
ItemDim1Code *string `json:"ItemDim1Code,omitempty"`
NewDim2 string `json:"NewDim2"` NewDim2 string `json:"NewDim2"`
NewDesc string `json:"NewDesc"` NewDesc string `json:"NewDesc"`
OldDueDate string `json:"OldDueDate"`
NewDueDate string `json:"NewDueDate"`
} }
type OrderProductionUpdatePayload struct { type OrderProductionUpdatePayload struct {
@@ -13,6 +16,7 @@ type OrderProductionUpdatePayload struct {
InsertMissing bool `json:"insertMissing"` InsertMissing bool `json:"insertMissing"`
CdItems []OrderProductionCdItemDraft `json:"cdItems"` CdItems []OrderProductionCdItemDraft `json:"cdItems"`
ProductAttributes []OrderProductionItemAttributeRow `json:"productAttributes"` ProductAttributes []OrderProductionItemAttributeRow `json:"productAttributes"`
HeaderAverageDueDate *string `json:"HeaderAverageDueDate,omitempty"`
} }
type OrderProductionMissingVariant struct { type OrderProductionMissingVariant struct {
@@ -25,6 +29,19 @@ type OrderProductionMissingVariant struct {
ItemDim3Code string `json:"ItemDim3Code"` ItemDim3Code string `json:"ItemDim3Code"`
} }
type OrderProductionBarcodeValidation struct {
Code string `json:"code"`
Message string `json:"message"`
Barcode string `json:"barcode,omitempty"`
BarcodeTypeCode string `json:"barcodeTypeCode,omitempty"`
ItemTypeCode int16 `json:"ItemTypeCode,omitempty"`
ItemCode string `json:"ItemCode,omitempty"`
ColorCode string `json:"ColorCode,omitempty"`
ItemDim1Code string `json:"ItemDim1Code,omitempty"`
ItemDim2Code string `json:"ItemDim2Code,omitempty"`
ItemDim3Code string `json:"ItemDim3Code,omitempty"`
}
type OrderProductionCdItemDraft struct { type OrderProductionCdItemDraft struct {
ItemTypeCode int16 `json:"ItemTypeCode"` ItemTypeCode int16 `json:"ItemTypeCode"`
ItemCode string `json:"ItemCode"` ItemCode string `json:"ItemCode"`

View File

@@ -0,0 +1,18 @@
package models
type ProductPricing struct {
ProductCode string `json:"ProductCode"`
CostPrice float64 `json:"CostPrice"`
StockQty float64 `json:"StockQty"`
StockEntryDate string `json:"StockEntryDate"`
LastPricingDate string `json:"LastPricingDate"`
AskiliYan string `json:"AskiliYan"`
Kategori string `json:"Kategori"`
UrunIlkGrubu string `json:"UrunIlkGrubu"`
UrunAnaGrubu string `json:"UrunAnaGrubu"`
UrunAltGrubu string `json:"UrunAltGrubu"`
Icerik string `json:"Icerik"`
Karisim string `json:"Karisim"`
Marka string `json:"Marka"`
BrandGroupSec string `json:"BrandGroupSec"`
}

View File

@@ -7,3 +7,9 @@ type ProductAttributeOption struct {
AttributeCode string `json:"attribute_code"` AttributeCode string `json:"attribute_code"`
AttributeDescription string `json:"attribute_description"` AttributeDescription string `json:"attribute_description"`
} }
type ProductItemAttributeValue struct {
ItemTypeCode int16 `json:"item_type_code"`
AttributeTypeCode int `json:"attribute_type_code"`
AttributeCode string `json:"attribute_code"`
}

16
svc/models/translator.go Normal file
View File

@@ -0,0 +1,16 @@
package models
import "time"
type TranslatorRow struct {
ID int64 `json:"id"`
TKey string `json:"t_key"`
LangCode string `json:"lang_code"`
SourceType string `json:"source_type"`
SourceTextTR string `json:"source_text_tr"`
TranslatedText string `json:"translated_text"`
IsManual bool `json:"is_manual"`
Status string `json:"status"`
Provider string `json:"provider"`
UpdatedAt time.Time `json:"updated_at"`
}

View File

@@ -0,0 +1,75 @@
package queries
import (
"database/sql"
"fmt"
"strings"
"time"
)
func BulkUpdateOrderLineDueDate(mssql *sql.DB, orderHeaderID string, dueDate string, username string) (int64, bool, error) {
orderID := strings.TrimSpace(orderHeaderID)
dateText := strings.TrimSpace(dueDate)
user := strings.TrimSpace(username)
if orderID == "" {
return 0, false, fmt.Errorf("orderHeaderID zorunlu")
}
if dateText == "" {
return 0, false, fmt.Errorf("dueDate zorunlu")
}
if _, err := time.Parse("2006-01-02", dateText); err != nil {
return 0, false, fmt.Errorf("gecersiz tarih: %w", err)
}
if user == "" {
user = "system"
}
tx, err := mssql.Begin()
if err != nil {
return 0, false, err
}
defer tx.Rollback()
lineRes, err := tx.Exec(`
UPDATE BAGGI_V3.dbo.trOrderLine
SET
DeliveryDate = CAST(@p1 AS DATETIME),
PlannedDateOfLading = CAST(@p1 AS DATETIME),
LastUpdatedUserName = @p2,
LastUpdatedDate = GETDATE()
WHERE OrderHeaderID = @p3
AND ISNULL(IsClosed, 0) = 0
`, dateText, user, orderID)
if err != nil {
return 0, false, err
}
lineCount, err := lineRes.RowsAffected()
if err != nil {
return 0, false, err
}
headerRes, err := tx.Exec(`
UPDATE BAGGI_V3.dbo.trOrderHeader
SET
AverageDueDate = CAST(@p1 AS DATETIME),
LastUpdatedUserName = @p2,
LastUpdatedDate = GETDATE()
WHERE OrderHeaderID = @p3
`, dateText, user, orderID)
if err != nil {
return 0, false, err
}
headerCount, err := headerRes.RowsAffected()
if err != nil {
return 0, false, err
}
if err := tx.Commit(); err != nil {
return 0, false, err
}
return lineCount, headerCount > 0, nil
}

View File

@@ -1419,9 +1419,12 @@ UPDATE BAGGI_V3.dbo.trOrderHeader SET
DocCurrencyCode=@p6, DocCurrencyCode=@p6,
LocalCurrencyCode=@p7, LocalCurrencyCode=@p7,
ExchangeRate=@p8, ExchangeRate=@p8,
LastUpdatedUserName=@p9, IsCreditableConfirmed=@p9,
LastUpdatedDate=@p10 CreditableConfirmedUser=@p10,
WHERE OrderHeaderID=@p11 CreditableConfirmedDate=@p11,
LastUpdatedUserName=@p12,
LastUpdatedDate=@p13
WHERE OrderHeaderID=@p14
`, `,
nullableDateString(header.OrderDate), nullableDateString(header.OrderDate),
nullableTimeString(header.OrderTime), nullableTimeString(header.OrderTime),
@@ -1431,6 +1434,9 @@ WHERE OrderHeaderID=@p11
nullableString(header.DocCurrencyCode, "TRY"), nullableString(header.DocCurrencyCode, "TRY"),
nullableString(header.LocalCurrencyCode, "TRY"), nullableString(header.LocalCurrencyCode, "TRY"),
nullableFloat64(header.ExchangeRate, exRate), nullableFloat64(header.ExchangeRate, exRate),
true,
nullableString(header.CreditableConfirmedUser, v3User),
nullableDateTime(header.CreditableConfirmedDate, now),
v3User, v3User,
now, now,
header.OrderHeaderID, header.OrderHeaderID,

View File

@@ -4,6 +4,7 @@ import (
"database/sql" "database/sql"
"fmt" "fmt"
"log" "log"
"sort"
"strconv" "strconv"
"strings" "strings"
"time" "time"
@@ -25,14 +26,24 @@ SELECT
ISNULL(l.ItemCode,'') AS OldItemCode, ISNULL(l.ItemCode,'') AS OldItemCode,
ISNULL(l.ColorCode,'') AS OldColor, ISNULL(l.ColorCode,'') AS OldColor,
ISNULL((
SELECT TOP 1 LTRIM(RTRIM(cd.ColorDescription))
FROM dbo.cdColorDesc cd WITH (NOLOCK)
WHERE cd.ColorCode = l.ColorCode
AND cd.LangCode = N'TR'
), '') AS OldColorDescription,
ISNULL(l.ItemDim2Code,'') AS OldDim2, ISNULL(l.ItemDim2Code,'') AS OldDim2,
ISNULL(l.LineDescription,'') AS OldDesc, ISNULL(l.LineDescription,'') AS OldDesc,
CAST(ISNULL(l.Qty1, 0) AS FLOAT) AS OldQty,
CAST('' AS NVARCHAR(60)) AS NewItemCode, CAST('' AS NVARCHAR(60)) AS NewItemCode,
CAST('' AS NVARCHAR(30)) AS NewColor, CAST('' AS NVARCHAR(30)) AS NewColor,
CAST('' AS NVARCHAR(30)) AS NewDim2, CAST('' AS NVARCHAR(30)) AS NewDim2,
CAST('' AS NVARCHAR(250)) AS NewDesc, CAST('' AS NVARCHAR(250)) AS NewDesc,
CONVERT(NVARCHAR(10), l.DeliveryDate, 126) AS OldDueDate,
CONVERT(NVARCHAR(10), l.DeliveryDate, 126) AS NewDueDate,
CAST(0 AS bit) AS IsVariantMissing CAST(0 AS bit) AS IsVariantMissing
FROM dbo.trOrderLine l FROM dbo.trOrderLine l
WHERE l.OrderHeaderID = @p1 WHERE l.OrderHeaderID = @p1
@@ -210,7 +221,9 @@ func InsertMissingVariantsTx(
username string, username string,
cdItemByCode map[string]models.OrderProductionCdItemDraft, cdItemByCode map[string]models.OrderProductionCdItemDraft,
) (int64, error) { ) (int64, error) {
start := time.Now()
if len(missing) == 0 { if len(missing) == 0 {
log.Printf("[InsertMissingVariantsTx] missing=0 inserted=0 duration_ms=0")
return 0, nil return 0, nil
} }
@@ -267,7 +280,7 @@ func InsertMissingVariantsTx(
query := fmt.Sprintf(` query := fmt.Sprintf(`
SET NOCOUNT ON; SET NOCOUNT ON;
WITH Missing(ItemTypeCode, ItemCode, ColorCode, ItemDim1Code, ItemDim2Code, ItemDim3Code) AS ( ;WITH Missing(ItemTypeCode, ItemCode, ColorCode, ItemDim1Code, ItemDim2Code, ItemDim3Code) AS (
SELECT * SELECT *
FROM (VALUES %s) AS v(ItemTypeCode, ItemCode, ColorCode, ItemDim1Code, ItemDim2Code, ItemDim3Code) FROM (VALUES %s) AS v(ItemTypeCode, ItemCode, ColorCode, ItemDim1Code, ItemDim2Code, ItemDim3Code)
) )
@@ -332,6 +345,8 @@ WHERE pv.ItemCode IS NULL;
if rows, rowsErr := res.RowsAffected(); rowsErr == nil { if rows, rowsErr := res.RowsAffected(); rowsErr == nil {
inserted += rows inserted += rows
} }
log.Printf("[InsertMissingVariantsTx] missing=%d unique=%d ensuredItems=%d inserted=%d duration_ms=%d",
len(missing), len(uniqueVariants), len(ensuredItems), inserted, time.Since(start).Milliseconds())
return inserted, nil return inserted, nil
} }
@@ -518,18 +533,25 @@ func UpdateOrderLinesTx(tx *sql.Tx, orderHeaderID string, lines []models.OrderPr
chunk := lines[i:end] chunk := lines[i:end]
values := make([]string, 0, len(chunk)) values := make([]string, 0, len(chunk))
args := make([]any, 0, len(chunk)*5+2) args := make([]any, 0, len(chunk)*8+2)
paramPos := 1 paramPos := 1
for _, line := range chunk { for _, line := range chunk {
values = append(values, fmt.Sprintf("(@p%d,@p%d,@p%d,@p%d,@p%d)", paramPos, paramPos+1, paramPos+2, paramPos+3, paramPos+4)) var itemDim1 any
if line.ItemDim1Code != nil {
itemDim1 = strings.TrimSpace(*line.ItemDim1Code)
}
values = append(values, fmt.Sprintf("(@p%d,@p%d,@p%d,@p%d,@p%d,@p%d,@p%d,@p%d)", paramPos, paramPos+1, paramPos+2, paramPos+3, paramPos+4, paramPos+5, paramPos+6, paramPos+7))
args = append(args, args = append(args,
strings.TrimSpace(line.OrderLineID), strings.TrimSpace(line.OrderLineID),
line.NewItemCode, line.NewItemCode,
line.NewColor, line.NewColor,
itemDim1,
line.NewDim2, line.NewDim2,
line.NewDesc, line.NewDesc,
line.OldDueDate,
line.NewDueDate,
) )
paramPos += 5 paramPos += 8
} }
orderHeaderParam := paramPos orderHeaderParam := paramPos
@@ -538,42 +560,971 @@ func UpdateOrderLinesTx(tx *sql.Tx, orderHeaderID string, lines []models.OrderPr
query := fmt.Sprintf(` query := fmt.Sprintf(`
SET NOCOUNT ON; SET NOCOUNT ON;
WITH src (OrderLineID, NewItemCode, NewColor, NewDim2, NewDesc) AS ( DECLARE @updated TABLE (OrderLineID UNIQUEIDENTIFIER);
;WITH src (OrderLineID, NewItemCode, NewColor, ItemDim1Code, NewDim2, NewDesc, OldDueDate, NewDueDate) AS (
SELECT * SELECT *
FROM (VALUES %s) AS v (OrderLineID, NewItemCode, NewColor, NewDim2, NewDesc) FROM (VALUES %s) AS v (OrderLineID, NewItemCode, NewColor, ItemDim1Code, NewDim2, NewDesc, OldDueDate, NewDueDate)
) )
UPDATE l UPDATE l
SET SET
l.ItemCode = s.NewItemCode, l.ItemCode = s.NewItemCode,
l.ColorCode = s.NewColor, l.ColorCode = s.NewColor,
l.ItemDim1Code = COALESCE(s.ItemDim1Code, l.ItemDim1Code),
l.ItemDim2Code = s.NewDim2, l.ItemDim2Code = s.NewDim2,
l.LineDescription = COALESCE(NULLIF(s.NewDesc,''), l.LineDescription), l.LineDescription = COALESCE(NULLIF(s.NewDesc,''), l.LineDescription),
l.DeliveryDate = CASE WHEN ISDATE(s.NewDueDate) = 1 THEN CAST(s.NewDueDate AS DATETIME) ELSE l.DeliveryDate END,
l.LastUpdatedUserName = @p%d, l.LastUpdatedUserName = @p%d,
l.LastUpdatedDate = GETDATE() l.LastUpdatedDate = GETDATE()
OUTPUT inserted.OrderLineID INTO @updated(OrderLineID)
FROM dbo.trOrderLine l FROM dbo.trOrderLine l
JOIN src s JOIN src s
ON CAST(l.OrderLineID AS NVARCHAR(50)) = s.OrderLineID ON l.OrderLineID = CONVERT(UNIQUEIDENTIFIER, s.OrderLineID)
WHERE l.OrderHeaderID = @p%d; WHERE l.OrderHeaderID = CONVERT(UNIQUEIDENTIFIER, @p%d);
SELECT COUNT(1) AS UpdatedCount FROM @updated;
`, strings.Join(values, ","), usernameParam, orderHeaderParam) `, strings.Join(values, ","), usernameParam, orderHeaderParam)
chunkStart := time.Now() chunkStart := time.Now()
res, execErr := tx.Exec(query, args...) var chunkUpdated int64
execErr := tx.QueryRow(query, args...).Scan(&chunkUpdated)
if execErr != nil { if execErr != nil {
log.Printf("[UpdateOrderLinesTx] ERROR orderHeaderID=%s chunk=%d-%d err=%v", orderHeaderID, i, end, execErr)
return updated, fmt.Errorf("update lines chunk failed chunkStart=%d chunkEnd=%d duration_ms=%d: %w", i, end, time.Since(chunkStart).Milliseconds(), execErr) return updated, fmt.Errorf("update lines chunk failed chunkStart=%d chunkEnd=%d duration_ms=%d: %w", i, end, time.Since(chunkStart).Milliseconds(), execErr)
} }
log.Printf("[UpdateOrderLinesTx] orderHeaderID=%s chunk=%d-%d duration_ms=%d", orderHeaderID, i, end, time.Since(chunkStart).Milliseconds()) log.Printf("[UpdateOrderLinesTx] orderHeaderID=%s chunk=%d-%d updated=%d duration_ms=%d", orderHeaderID, i, end, chunkUpdated, time.Since(chunkStart).Milliseconds())
updated += chunkUpdated
if rows, rowsErr := res.RowsAffected(); rowsErr == nil {
updated += rows
}
} }
return updated, nil return updated, nil
} }
func UpsertItemAttributesTx(tx *sql.Tx, attrs []models.OrderProductionItemAttributeRow, username string) (int64, error) { func VerifyOrderLineUpdatesTx(tx *sql.Tx, orderHeaderID string, lines []models.OrderProductionUpdateLine) (int64, []string, error) {
if len(attrs) == 0 { if len(lines) == 0 {
return 0, nil, nil
}
const chunkSize = 300
var mismatchCount int64
samples := make([]string, 0, 5)
for i := 0; i < len(lines); i += chunkSize {
end := i + chunkSize
if end > len(lines) {
end = len(lines)
}
chunk := lines[i:end]
values := make([]string, 0, len(chunk))
args := make([]any, 0, len(chunk)*4+1)
paramPos := 1
for _, line := range chunk {
values = append(values, fmt.Sprintf("(@p%d,@p%d,@p%d,@p%d)", paramPos, paramPos+1, paramPos+2, paramPos+3))
args = append(args,
strings.TrimSpace(line.OrderLineID),
strings.ToUpper(strings.TrimSpace(line.NewItemCode)),
strings.ToUpper(strings.TrimSpace(line.NewColor)),
strings.ToUpper(strings.TrimSpace(line.NewDim2)),
)
paramPos += 4
}
orderHeaderParam := paramPos
args = append(args, orderHeaderID)
query := fmt.Sprintf(`
SET NOCOUNT ON;
WITH src (OrderLineID, NewItemCode, NewColor, NewDim2) AS (
SELECT *
FROM (VALUES %s) v(OrderLineID, NewItemCode, NewColor, NewDim2)
)
SELECT
s.OrderLineID,
ISNULL(UPPER(LTRIM(RTRIM(l.ItemCode))), '') AS ActualItemCode,
ISNULL(UPPER(LTRIM(RTRIM(l.ColorCode))), '') AS ActualColorCode,
ISNULL(UPPER(LTRIM(RTRIM(l.ItemDim2Code))), '') AS ActualDim2Code,
s.NewItemCode,
s.NewColor,
s.NewDim2
FROM src s
JOIN dbo.trOrderLine l
ON l.OrderLineID = CONVERT(UNIQUEIDENTIFIER, s.OrderLineID)
WHERE l.OrderHeaderID = CONVERT(UNIQUEIDENTIFIER, @p%d)
AND (
ISNULL(UPPER(LTRIM(RTRIM(l.ItemCode))), '') <> s.NewItemCode OR
ISNULL(UPPER(LTRIM(RTRIM(l.ColorCode))), '') <> s.NewColor OR
ISNULL(UPPER(LTRIM(RTRIM(l.ItemDim2Code))), '') <> s.NewDim2
);
`, strings.Join(values, ","), orderHeaderParam)
rows, err := tx.Query(query, args...)
if err != nil {
return mismatchCount, samples, err
}
for rows.Next() {
var lineID, actualItem, actualColor, actualDim2, expectedItem, expectedColor, expectedDim2 string
if err := rows.Scan(&lineID, &actualItem, &actualColor, &actualDim2, &expectedItem, &expectedColor, &expectedDim2); err != nil {
rows.Close()
return mismatchCount, samples, err
}
mismatchCount++
if len(samples) < 5 {
samples = append(samples, fmt.Sprintf(
"lineID=%s expected=(%s,%s,%s) actual=(%s,%s,%s)",
lineID, expectedItem, expectedColor, expectedDim2, actualItem, actualColor, actualDim2,
))
}
}
if err := rows.Err(); err != nil {
rows.Close()
return mismatchCount, samples, err
}
rows.Close()
}
return mismatchCount, samples, nil
}
func UpdateOrderHeaderAverageDueDateTx(tx *sql.Tx, orderHeaderID string, averageDueDate *string, username string) error {
if averageDueDate == nil {
return nil
}
dueDate := strings.TrimSpace(*averageDueDate)
if dueDate != "" {
if _, err := time.Parse("2006-01-02", dueDate); err != nil {
return fmt.Errorf("invalid header average due date %q: %w", dueDate, err)
}
}
_, err := tx.Exec(`
UPDATE dbo.trOrderHeader
SET
AverageDueDate = CASE WHEN @p1 = '' THEN NULL ELSE CAST(@p1 AS DATETIME) END,
LastUpdatedUserName = @p2,
LastUpdatedDate = GETDATE()
WHERE OrderHeaderID = @p3;
`, dueDate, username, orderHeaderID)
return err
}
func TouchOrderHeaderTx(tx *sql.Tx, orderHeaderID string, username string) (int64, error) {
res, err := tx.Exec(`
UPDATE dbo.trOrderHeader
SET
LastUpdatedUserName = @p1,
LastUpdatedDate = GETDATE()
WHERE OrderHeaderID = @p2;
`, username, orderHeaderID)
if err != nil {
return 0, err
}
rows, rowsErr := res.RowsAffected()
if rowsErr != nil {
return 0, nil return 0, nil
} }
return rows, nil
}
type sqlQueryRower interface {
QueryRow(query string, args ...any) *sql.Row
}
type plannedProductionBarcode struct {
Barcode string
BarcodeTypeCode string
ItemTypeCode int16
ItemCode string
ColorCode string
ItemDim1Code string
ItemDim2Code string
ItemDim3Code string
}
func barcodeTypeExists(q sqlQueryRower, barcodeTypeCode string) (bool, error) {
var exists int
err := q.QueryRow(`
SELECT TOP 1 1
FROM dbo.cdBarcodeType
WHERE BarcodeTypeCode = @p1
`, strings.TrimSpace(barcodeTypeCode)).Scan(&exists)
if err == sql.ErrNoRows {
return false, nil
}
if err != nil {
return false, err
}
return true, nil
}
func barcodeExists(q sqlQueryRower, barcode string) (bool, error) {
var exists int
err := q.QueryRow(`
SELECT TOP 1 1
FROM dbo.prItemBarcode WITH (UPDLOCK, HOLDLOCK)
WHERE Barcode = @p1
`, strings.TrimSpace(barcode)).Scan(&exists)
if err == sql.ErrNoRows {
return false, nil
}
if err != nil {
return false, err
}
return true, nil
}
func existingVariantBarcode(
q sqlQueryRower,
barcodeTypeCode string,
itemTypeCode int16,
itemCode string,
colorCode string,
dim1 string,
dim2 string,
dim3 string,
) (string, bool, error) {
var barcode string
err := q.QueryRow(`
SELECT TOP 1 LTRIM(RTRIM(ISNULL(Barcode, '')))
FROM dbo.prItemBarcode WITH (UPDLOCK, HOLDLOCK)
WHERE BarcodeTypeCode = @p1
AND ItemTypeCode = @p2
AND ISNULL(LTRIM(RTRIM(ItemCode)), '') = @p3
AND ISNULL(LTRIM(RTRIM(ColorCode)), '') = @p4
AND ISNULL(LTRIM(RTRIM(ItemDim1Code)), '') = @p5
AND ISNULL(LTRIM(RTRIM(ItemDim2Code)), '') = @p6
AND ISNULL(LTRIM(RTRIM(ItemDim3Code)), '') = @p7
AND ISNULL(LTRIM(RTRIM(UnitOfMeasureCode)), '') = 'AD'
ORDER BY
CASE
WHEN ISNUMERIC(Barcode) = 1
THEN CAST(Barcode AS BIGINT)
ELSE 0
END DESC,
Barcode DESC
`,
strings.TrimSpace(barcodeTypeCode),
itemTypeCode,
strings.TrimSpace(itemCode),
strings.TrimSpace(colorCode),
strings.TrimSpace(dim1),
strings.TrimSpace(dim2),
strings.TrimSpace(dim3),
).Scan(&barcode)
if err == sql.ErrNoRows {
return "", false, nil
}
if err != nil {
return "", false, err
}
return strings.TrimSpace(barcode), true, nil
}
func maxNumericBarcode(q sqlQueryRower) (int64, error) {
var maxBarcode int64
err := q.QueryRow(`
SELECT ISNULL(MAX(
CASE
WHEN ISNUMERIC(Barcode) = 1
THEN CAST(Barcode AS BIGINT)
ELSE NULL
END
), 0)
FROM dbo.prItemBarcode WITH (UPDLOCK, HOLDLOCK)
`).Scan(&maxBarcode)
return maxBarcode, err
}
func ValidateProductionBarcodePlan(q sqlQueryRower, variants []models.OrderProductionMissingVariant, barcodeTypeCode string) ([]models.OrderProductionBarcodeValidation, error) {
typeCode := strings.ToUpper(strings.TrimSpace(barcodeTypeCode))
if len(variants) == 0 {
return nil, nil
}
validations := make([]models.OrderProductionBarcodeValidation, 0)
typeExists, err := barcodeTypeExists(q, typeCode)
if err != nil {
return nil, err
}
if !typeExists {
validations = append(validations, models.OrderProductionBarcodeValidation{
Code: "invalid_barcode_type",
Message: fmt.Sprintf("Barkod tipi bulunamadi: %s", typeCode),
BarcodeTypeCode: typeCode,
})
return validations, nil
}
sorted := append([]models.OrderProductionMissingVariant(nil), variants...)
sort.Slice(sorted, func(i, j int) bool {
left := sorted[i]
right := sorted[j]
leftKey := fmt.Sprintf("%05d|%s|%s|%s|%s|%s", left.ItemTypeCode, left.ItemCode, left.ColorCode, left.ItemDim1Code, left.ItemDim2Code, left.ItemDim3Code)
rightKey := fmt.Sprintf("%05d|%s|%s|%s|%s|%s", right.ItemTypeCode, right.ItemCode, right.ColorCode, right.ItemDim1Code, right.ItemDim2Code, right.ItemDim3Code)
return leftKey < rightKey
})
maxBarcode, err := maxNumericBarcode(q)
if err != nil {
return nil, err
}
nextOffset := int64(0)
planned := make(map[string]struct{}, len(sorted))
for _, variant := range sorted {
existingBarcode, exists, err := existingVariantBarcode(q, typeCode, variant.ItemTypeCode, variant.ItemCode, variant.ColorCode, variant.ItemDim1Code, variant.ItemDim2Code, variant.ItemDim3Code)
if err != nil {
return nil, err
}
if exists && existingBarcode != "" {
continue
}
nextOffset++
barcode := strconv.FormatInt(maxBarcode+nextOffset, 10)
if _, duplicated := planned[barcode]; duplicated {
validations = append(validations, models.OrderProductionBarcodeValidation{
Code: "barcode_duplicate_in_plan",
Message: fmt.Sprintf("Planlanan barkod ayni istekte birden fazla kez olusuyor: %s", barcode),
Barcode: barcode,
BarcodeTypeCode: typeCode,
ItemTypeCode: variant.ItemTypeCode,
ItemCode: strings.TrimSpace(variant.ItemCode),
ColorCode: strings.TrimSpace(variant.ColorCode),
ItemDim1Code: strings.TrimSpace(variant.ItemDim1Code),
ItemDim2Code: strings.TrimSpace(variant.ItemDim2Code),
ItemDim3Code: strings.TrimSpace(variant.ItemDim3Code),
})
continue
}
planned[barcode] = struct{}{}
inUse, err := barcodeExists(q, barcode)
if err != nil {
return nil, err
}
if inUse {
validations = append(validations, models.OrderProductionBarcodeValidation{
Code: "barcode_in_use",
Message: fmt.Sprintf("Barkod daha once kullanilmis: %s (%s / %s / %s / %s)", barcode, strings.TrimSpace(variant.ItemCode), strings.TrimSpace(variant.ColorCode), strings.TrimSpace(variant.ItemDim1Code), strings.TrimSpace(variant.ItemDim2Code)),
Barcode: barcode,
BarcodeTypeCode: typeCode,
ItemTypeCode: variant.ItemTypeCode,
ItemCode: strings.TrimSpace(variant.ItemCode),
ColorCode: strings.TrimSpace(variant.ColorCode),
ItemDim1Code: strings.TrimSpace(variant.ItemDim1Code),
ItemDim2Code: strings.TrimSpace(variant.ItemDim2Code),
ItemDim3Code: strings.TrimSpace(variant.ItemDim3Code),
})
}
}
return validations, nil
}
func InsertItemBarcodesTx(tx *sql.Tx, orderHeaderID string, lines []models.OrderProductionUpdateLine, username string) (int64, error) {
start := time.Now()
if len(lines) == 0 {
log.Printf("[InsertItemBarcodesTx] lines=0 inserted=0 duration_ms=0")
return 0, nil
}
lineIDs := make([]string, 0, len(lines))
seen := make(map[string]struct{}, len(lines))
for _, line := range lines {
lineID := strings.TrimSpace(line.OrderLineID)
if lineID == "" {
continue
}
if _, ok := seen[lineID]; ok {
continue
}
seen[lineID] = struct{}{}
lineIDs = append(lineIDs, lineID)
}
if len(lineIDs) == 0 {
log.Printf("[InsertItemBarcodesTx] uniqueLineIDs=0 inserted=0")
return 0, nil
}
var inserted int64
singleLineQuery := `
SET NOCOUNT ON;
INSERT INTO dbo.prItemBarcode
(
Barcode,
BarcodeTypeCode,
ItemTypeCode,
ItemCode,
ColorCode,
ItemDim1Code,
ItemDim2Code,
ItemDim3Code,
UnitOfMeasureCode,
Qty,
CreatedUserName,
CreatedDate,
LastUpdatedUserName,
LastUpdatedDate,
RowGuid
)
SELECT
CAST(seed.MaxBarcode + 1 AS NVARCHAR(50)),
'BAGGI3',
src.ItemTypeCode,
src.ItemCode,
src.ColorCode,
src.ItemDim1Code,
src.ItemDim2Code,
src.ItemDim3Code,
'AD',
1,
@p3,
GETDATE(),
@p3,
GETDATE(),
NEWID()
FROM (
SELECT DISTINCT
l.ItemTypeCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemCode,'')))) AS ItemCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ColorCode,'')))) AS ColorCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim1Code,'')))) AS ItemDim1Code,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim2Code,'')))) AS ItemDim2Code,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim3Code,'')))) AS ItemDim3Code
FROM dbo.trOrderLine l
WHERE l.OrderHeaderID = @p2
AND CAST(l.OrderLineID AS NVARCHAR(50)) = @p1
AND NULLIF(LTRIM(RTRIM(ISNULL(l.ItemCode,''))), '') IS NOT NULL
) src
CROSS JOIN (
SELECT
CASE
WHEN ISNULL(MAX(
CASE
WHEN LTRIM(RTRIM(ISNULL(Barcode,''))) NOT LIKE '%%[^0-9]%%'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(Barcode,''))) AS BIGINT)
ELSE NULL
END
), 0) < 36999999
THEN 36999999
ELSE ISNULL(MAX(
CASE
WHEN LTRIM(RTRIM(ISNULL(Barcode,''))) NOT LIKE '%%[^0-9]%%'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(Barcode,''))) AS BIGINT)
ELSE NULL
END
), 0)
END AS MaxBarcode
FROM dbo.prItemBarcode
WHERE BarcodeTypeCode = 'BAGGI3'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) <= 8
) seed
WHERE NOT EXISTS (
SELECT 1
FROM dbo.prItemBarcode b
WHERE b.ItemTypeCode = src.ItemTypeCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemCode,'')))) = src.ItemCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ColorCode,'')))) = src.ColorCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim1Code,'')))) = src.ItemDim1Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim2Code,'')))) = src.ItemDim2Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim3Code,'')))) = src.ItemDim3Code
);
`
execSingle := func(globalIndex int, lineID string) error {
lineStart := time.Now()
res, err := tx.Exec(singleLineQuery, lineID, orderHeaderID, username)
if err != nil {
if isDuplicateBarcodeInsertErr(err) {
log.Printf("[InsertItemBarcodesTx] skip duplicate lineIndex=%d lineID=%s err=%v", globalIndex, lineID, err)
return nil
}
return fmt.Errorf("upsert item barcodes chunk failed chunkStart=%d chunkEnd=%d duration_ms=%d: %w", globalIndex, globalIndex+1, time.Since(lineStart).Milliseconds(), err)
}
rows, _ := res.RowsAffected()
inserted += rows
log.Printf(
"[InsertItemBarcodesTx] lineIndex=%d lineID=%s inserted=%d cumulative=%d duration_ms=%d",
globalIndex,
lineID,
rows,
inserted,
time.Since(lineStart).Milliseconds(),
)
return nil
}
const chunkSize = 200
for i := 0; i < len(lineIDs); i += chunkSize {
end := i + chunkSize
if end > len(lineIDs) {
end = len(lineIDs)
}
chunk := lineIDs[i:end]
values := make([]string, 0, len(chunk))
args := make([]any, 0, len(chunk)+2)
paramPos := 1
for _, lineID := range chunk {
values = append(values, fmt.Sprintf("(@p%d)", paramPos))
args = append(args, lineID)
paramPos++
}
orderHeaderParam := paramPos
usernameParam := paramPos + 1
args = append(args, orderHeaderID, username)
batchQuery := fmt.Sprintf(`
SET NOCOUNT ON;
INSERT INTO dbo.prItemBarcode
(
Barcode,
BarcodeTypeCode,
ItemTypeCode,
ItemCode,
ColorCode,
ItemDim1Code,
ItemDim2Code,
ItemDim3Code,
UnitOfMeasureCode,
Qty,
CreatedUserName,
CreatedDate,
LastUpdatedUserName,
LastUpdatedDate,
RowGuid
)
SELECT
CAST(seed.MaxBarcode + ROW_NUMBER() OVER (
ORDER BY src.ItemTypeCode, src.ItemCode, src.ColorCode, src.ItemDim1Code, src.ItemDim2Code, src.ItemDim3Code
) AS NVARCHAR(50)),
'BAGGI3',
src.ItemTypeCode,
src.ItemCode,
src.ColorCode,
src.ItemDim1Code,
src.ItemDim2Code,
src.ItemDim3Code,
'AD',
1,
@p%d,
GETDATE(),
@p%d,
GETDATE(),
NEWID()
FROM (
SELECT DISTINCT
l.ItemTypeCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemCode,'')))) AS ItemCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ColorCode,'')))) AS ColorCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim1Code,'')))) AS ItemDim1Code,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim2Code,'')))) AS ItemDim2Code,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim3Code,'')))) AS ItemDim3Code
FROM dbo.trOrderLine l
JOIN (VALUES %s) ids(OrderLineID)
ON CAST(l.OrderLineID AS NVARCHAR(50)) = ids.OrderLineID
WHERE l.OrderHeaderID = @p%d
AND NULLIF(LTRIM(RTRIM(ISNULL(l.ItemCode,''))), '') IS NOT NULL
) src
CROSS JOIN (
SELECT
CASE
WHEN ISNULL(MAX(
CASE
WHEN LTRIM(RTRIM(ISNULL(Barcode,''))) NOT LIKE '%%[^0-9]%%'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(Barcode,''))) AS BIGINT)
ELSE NULL
END
), 0) < 36999999
THEN 36999999
ELSE ISNULL(MAX(
CASE
WHEN LTRIM(RTRIM(ISNULL(Barcode,''))) NOT LIKE '%%[^0-9]%%'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(Barcode,''))) AS BIGINT)
ELSE NULL
END
), 0)
END AS MaxBarcode
FROM dbo.prItemBarcode
WHERE BarcodeTypeCode = 'BAGGI3'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) <= 8
) seed
WHERE NOT EXISTS (
SELECT 1
FROM dbo.prItemBarcode b
WHERE b.ItemTypeCode = src.ItemTypeCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemCode,'')))) = src.ItemCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ColorCode,'')))) = src.ColorCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim1Code,'')))) = src.ItemDim1Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim2Code,'')))) = src.ItemDim2Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim3Code,'')))) = src.ItemDim3Code
);
`, usernameParam, usernameParam, strings.Join(values, ","), orderHeaderParam)
chunkStart := time.Now()
res, err := tx.Exec(batchQuery, args...)
if err == nil {
rows, _ := res.RowsAffected()
inserted += rows
log.Printf(
"[InsertItemBarcodesTx] batch=%d-%d inserted=%d cumulative=%d duration_ms=%d",
i,
end,
rows,
inserted,
time.Since(chunkStart).Milliseconds(),
)
continue
}
log.Printf("[InsertItemBarcodesTx] batch fallback=%d-%d err=%v", i, end, err)
for j, lineID := range chunk {
if lineErr := execSingle(i+j, lineID); lineErr != nil {
log.Printf("[InsertItemBarcodesTx] ERROR lineIndex=%d lineID=%s err=%v", i+j, lineID, lineErr)
return inserted, lineErr
}
}
}
log.Printf(
"[InsertItemBarcodesTx] lines=%d unique=%d inserted=%d duration_ms=%d",
len(lines),
len(lineIDs),
inserted,
time.Since(start).Milliseconds(),
)
return inserted, nil
}
func InsertItemBarcodesByTargetsTx(tx *sql.Tx, targets []models.OrderProductionMissingVariant, username string) (int64, error) {
start := time.Now()
if len(targets) == 0 {
log.Printf("[InsertItemBarcodesByTargetsTx] targets=0 inserted=0 duration_ms=0")
return 0, nil
}
uniqueTargets := make([]models.OrderProductionMissingVariant, 0, len(targets))
seen := make(map[string]struct{}, len(targets))
for _, t := range targets {
itemCode := strings.ToUpper(strings.TrimSpace(t.ItemCode))
if itemCode == "" {
continue
}
key := fmt.Sprintf("%d|%s|%s|%s|%s|%s",
t.ItemTypeCode,
itemCode,
strings.ToUpper(strings.TrimSpace(t.ColorCode)),
strings.ToUpper(strings.TrimSpace(t.ItemDim1Code)),
strings.ToUpper(strings.TrimSpace(t.ItemDim2Code)),
strings.ToUpper(strings.TrimSpace(t.ItemDim3Code)),
)
if _, ok := seen[key]; ok {
continue
}
seen[key] = struct{}{}
t.ItemCode = itemCode
t.ColorCode = strings.ToUpper(strings.TrimSpace(t.ColorCode))
t.ItemDim1Code = strings.ToUpper(strings.TrimSpace(t.ItemDim1Code))
t.ItemDim2Code = strings.ToUpper(strings.TrimSpace(t.ItemDim2Code))
t.ItemDim3Code = strings.ToUpper(strings.TrimSpace(t.ItemDim3Code))
uniqueTargets = append(uniqueTargets, t)
}
if len(uniqueTargets) == 0 {
log.Printf("[InsertItemBarcodesByTargetsTx] targets=%d unique=0 inserted=0 duration_ms=%d", len(targets), time.Since(start).Milliseconds())
return 0, nil
}
if err := ensureTxStillActive(tx, "InsertItemBarcodesByTargetsTx/start"); err != nil {
return 0, err
}
// Barcode seed'i hem prItemBarcode hem de (varsa) tbStokBarkodu uzerinden
// kilitli okuyarak hesapla; trigger tarafindaki duplicate riskini azalt.
var maxBarcode int64
maxPrQuery := `
SELECT ISNULL(MAX(v.BarcodeNum), 0)
FROM (
SELECT
CASE
WHEN LTRIM(RTRIM(ISNULL(pb.Barcode,''))) NOT LIKE '%[^0-9]%'
AND LEN(LTRIM(RTRIM(ISNULL(pb.Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(pb.Barcode,''))) AS BIGINT)
ELSE NULL
END AS BarcodeNum
FROM dbo.prItemBarcode pb WITH (UPDLOCK, HOLDLOCK, TABLOCKX)
WHERE pb.BarcodeTypeCode = 'BAGGI3'
) v
WHERE v.BarcodeNum IS NOT NULL;
`
if err := tx.QueryRow(maxPrQuery).Scan(&maxBarcode); err != nil {
return 0, fmt.Errorf("barcode seed query failed: %w", err)
}
var hasTb int
if err := tx.QueryRow(`SELECT CASE WHEN OBJECT_ID(N'dbo.tbStokBarkodu', N'U') IS NULL THEN 0 ELSE 1 END`).Scan(&hasTb); err != nil {
return 0, fmt.Errorf("barcode seed object check failed: %w", err)
}
if hasTb == 1 {
var maxTb int64
maxTbQuery := `
SELECT ISNULL(MAX(v.BarcodeNum), 0)
FROM (
SELECT
CASE
WHEN LTRIM(RTRIM(ISNULL(sb.Barcode,''))) NOT LIKE '%[^0-9]%'
AND LEN(LTRIM(RTRIM(ISNULL(sb.Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(sb.Barcode,''))) AS BIGINT)
ELSE NULL
END AS BarcodeNum
FROM dbo.tbStokBarkodu sb WITH (UPDLOCK, HOLDLOCK, TABLOCKX)
) v
WHERE v.BarcodeNum IS NOT NULL;
`
if err := tx.QueryRow(maxTbQuery).Scan(&maxTb); err != nil {
return 0, fmt.Errorf("barcode seed tbStokBarkodu query failed: %w", err)
}
if maxTb > maxBarcode {
maxBarcode = maxTb
}
}
if maxBarcode < 36999999 {
maxBarcode = 36999999
}
existsBarcodeQuery := `
SELECT CASE WHEN EXISTS (
SELECT 1
FROM dbo.prItemBarcode pb WITH (UPDLOCK, HOLDLOCK)
WHERE LTRIM(RTRIM(ISNULL(pb.Barcode,''))) = @p1
) THEN 1 ELSE 0 END;
`
existsBarcodeWithTbQuery := `
SELECT CASE WHEN EXISTS (
SELECT 1
FROM dbo.prItemBarcode pb WITH (UPDLOCK, HOLDLOCK)
WHERE LTRIM(RTRIM(ISNULL(pb.Barcode,''))) = @p1
) OR EXISTS (
SELECT 1
FROM dbo.tbStokBarkodu sb WITH (UPDLOCK, HOLDLOCK)
WHERE LTRIM(RTRIM(ISNULL(sb.Barcode,''))) = @p1
) THEN 1 ELSE 0 END;
`
hasVariantBarcodeQuery := `
SELECT CASE WHEN EXISTS (
SELECT 1
FROM dbo.prItemBarcode b WITH (UPDLOCK, HOLDLOCK)
WHERE b.ItemTypeCode = @p1
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemCode,'')))) = @p2
AND UPPER(LTRIM(RTRIM(ISNULL(b.ColorCode,'')))) = @p3
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim1Code,'')))) = @p4
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim2Code,'')))) = @p5
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim3Code,'')))) = @p6
) THEN 1 ELSE 0 END;
`
insertOneQuery := `
INSERT INTO dbo.prItemBarcode
(
Barcode,
BarcodeTypeCode,
ItemTypeCode,
ItemCode,
ColorCode,
ItemDim1Code,
ItemDim2Code,
ItemDim3Code,
UnitOfMeasureCode,
Qty,
CreatedUserName,
CreatedDate,
LastUpdatedUserName,
LastUpdatedDate,
RowGuid
)
SELECT
@p1,
'BAGGI3',
@p2,
@p3,
@p4,
@p5,
@p6,
@p7,
'AD',
1,
@p8,
GETDATE(),
@p8,
GETDATE(),
NEWID()
WHERE NOT EXISTS (
SELECT 1
FROM dbo.prItemBarcode b
WHERE b.ItemTypeCode = @p2
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemCode,'')))) = @p3
AND UPPER(LTRIM(RTRIM(ISNULL(b.ColorCode,'')))) = @p4
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim1Code,'')))) = @p5
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim2Code,'')))) = @p6
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim3Code,'')))) = @p7
);
`
var inserted int64
for _, t := range uniqueTargets {
if err := ensureTxStillActive(tx, "InsertItemBarcodesByTargetsTx/before_target"); err != nil {
return inserted, err
}
var hasVariant int
if err := tx.QueryRow(
hasVariantBarcodeQuery,
t.ItemTypeCode,
t.ItemCode,
t.ColorCode,
t.ItemDim1Code,
t.ItemDim2Code,
t.ItemDim3Code,
).Scan(&hasVariant); err != nil {
return inserted, fmt.Errorf("variant barcode exists check failed: %w", err)
}
if hasVariant == 1 {
continue
}
retry := 0
for {
retry++
if retry > 2000 {
return inserted, fmt.Errorf("barcode allocation exceeded retry limit item=%s color=%s dim1=%s", t.ItemCode, t.ColorCode, t.ItemDim1Code)
}
candidateNum := maxBarcode + 1
candidate := strconv.FormatInt(candidateNum, 10)
var exists int
if hasTb == 1 {
if err := tx.QueryRow(existsBarcodeWithTbQuery, candidate).Scan(&exists); err != nil {
return inserted, fmt.Errorf("barcode exists check(tb) failed: %w", err)
}
} else {
if err := tx.QueryRow(existsBarcodeQuery, candidate).Scan(&exists); err != nil {
return inserted, fmt.Errorf("barcode exists check failed: %w", err)
}
}
if exists == 1 {
maxBarcode = candidateNum
continue
}
res, err := tx.Exec(
insertOneQuery,
candidate,
t.ItemTypeCode,
t.ItemCode,
t.ColorCode,
t.ItemDim1Code,
t.ItemDim2Code,
t.ItemDim3Code,
username,
)
if err != nil {
if isDuplicateBarcodeInsertErr(err) {
maxBarcode = candidateNum
continue
}
return inserted, fmt.Errorf("insert item barcode failed item=%s color=%s dim1=%s duration_ms=%d: %w",
t.ItemCode, t.ColorCode, t.ItemDim1Code, time.Since(start).Milliseconds(), err)
}
affected, _ := res.RowsAffected()
if affected > 0 {
inserted += affected
maxBarcode = candidateNum
}
break
}
}
if txErr := ensureTxStillActive(tx, "InsertItemBarcodesByTargetsTx/after_batch"); txErr != nil {
return inserted, txErr
}
log.Printf("[InsertItemBarcodesByTargetsTx] targets=%d unique=%d inserted=%d duration_ms=%d",
len(targets), len(uniqueTargets), inserted, time.Since(start).Milliseconds())
return inserted, nil
}
func ensureTxStillActive(tx *sql.Tx, where string) error {
if tx == nil {
return fmt.Errorf("tx is nil at %s", where)
}
var tranCount int
if err := tx.QueryRow(`SELECT @@TRANCOUNT`).Scan(&tranCount); err != nil {
return fmt.Errorf("tx state query failed at %s: %w", where, err)
}
if tranCount <= 0 {
return fmt.Errorf("tx closed unexpectedly at %s (trancount=%d)", where, tranCount)
}
return nil
}
func isDuplicateBarcodeInsertErr(err error) bool {
if err == nil {
return false
}
msg := strings.ToLower(err.Error())
if !strings.Contains(msg, "duplicate key") {
return false
}
if strings.Contains(msg, "tbstokbarkodu") {
return true
}
if strings.Contains(msg, "pritembarcode") {
return true
}
return strings.Contains(msg, "unique")
}
func UpsertItemAttributesTx(tx *sql.Tx, attrs []models.OrderProductionItemAttributeRow, username string) (int64, error) {
start := time.Now()
if len(attrs) == 0 {
log.Printf("[UpsertItemAttributesTx] attrs=0 affected=0 duration_ms=0")
return 0, nil
}
// FK_prItemAttribute_ItemCode hatasini engellemek icin, attribute yazmadan once
// ilgili item kodlarinin cdItem tarafinda varligini transaction icinde garanti et.
seenCodes := make(map[string]struct{}, len(attrs))
for _, a := range attrs {
itemTypeCode := a.ItemTypeCode
if itemTypeCode <= 0 {
itemTypeCode = 1
}
itemCode := strings.ToUpper(strings.TrimSpace(a.ItemCode))
if itemCode == "" {
continue
}
key := NormalizeCdItemMapKey(int16(itemTypeCode), itemCode)
if _, ok := seenCodes[key]; ok {
continue
}
seenCodes[key] = struct{}{}
if err := ensureCdItemTx(tx, int16(itemTypeCode), itemCode, username, nil); err != nil {
return 0, fmt.Errorf("ensure cdItem before item attributes failed itemCode=%s: %w", itemCode, err)
}
}
// SQL Server parameter limiti (2100) nedeniyle batch'li set-based upsert kullanilir. // SQL Server parameter limiti (2100) nedeniyle batch'li set-based upsert kullanilir.
const chunkSize = 400 // 400 * 4 param + 1 username = 1601 const chunkSize = 400 // 400 * 4 param + 1 username = 1601
@@ -597,10 +1548,6 @@ func UpsertItemAttributesTx(tx *sql.Tx, attrs []models.OrderProductionItemAttrib
args = append(args, username) args = append(args, username)
query := fmt.Sprintf(` query := fmt.Sprintf(`
SET NOCOUNT ON;
DECLARE @updated INT = 0;
DECLARE @inserted INT = 0;
WITH src (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode) AS ( WITH src (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode) AS (
SELECT * SELECT *
FROM (VALUES %s) AS v (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode) FROM (VALUES %s) AS v (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode)
@@ -615,7 +1562,6 @@ JOIN src
ON src.ItemTypeCode = tgt.ItemTypeCode ON src.ItemTypeCode = tgt.ItemTypeCode
AND src.ItemCode = tgt.ItemCode AND src.ItemCode = tgt.ItemCode
AND src.AttributeTypeCode = tgt.AttributeTypeCode; AND src.AttributeTypeCode = tgt.AttributeTypeCode;
SET @updated = @@ROWCOUNT;
WITH src (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode) AS ( WITH src (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode) AS (
SELECT * SELECT *
@@ -648,17 +1594,21 @@ LEFT JOIN dbo.prItemAttribute tgt
AND src.ItemCode = tgt.ItemCode AND src.ItemCode = tgt.ItemCode
AND src.AttributeTypeCode = tgt.AttributeTypeCode AND src.AttributeTypeCode = tgt.AttributeTypeCode
WHERE tgt.ItemCode IS NULL; WHERE tgt.ItemCode IS NULL;
SET @inserted = @@ROWCOUNT;
SELECT (@updated + @inserted) AS Affected;
`, strings.Join(values, ","), usernameParam, strings.Join(values, ","), usernameParam, usernameParam) `, strings.Join(values, ","), usernameParam, strings.Join(values, ","), usernameParam, usernameParam)
var chunkAffected int64 chunkStart := time.Now()
if err := tx.QueryRow(query, args...).Scan(&chunkAffected); err != nil { res, err := tx.Exec(query, args...)
return affected, err if err != nil {
log.Printf("[UpsertItemAttributesTx] ERROR chunk=%d-%d err=%v", i, end, err)
return affected, fmt.Errorf("upsert item attributes chunk failed chunkStart=%d chunkEnd=%d duration_ms=%d: %w", i, end, time.Since(chunkStart).Milliseconds(), err)
} }
chunkAffected, _ := res.RowsAffected()
affected += chunkAffected affected += chunkAffected
log.Printf("[UpsertItemAttributesTx] chunk=%d-%d chunkAffected=%d cumulative=%d duration_ms=%d",
i, end, chunkAffected, affected, time.Since(chunkStart).Milliseconds())
} }
log.Printf("[UpsertItemAttributesTx] attrs=%d affected=%d duration_ms=%d",
len(attrs), affected, time.Since(start).Milliseconds())
return affected, nil return affected, nil
} }

View File

@@ -0,0 +1,256 @@
package queries
import (
"bssapp-backend/db"
"bssapp-backend/models"
"context"
"database/sql"
"strconv"
"strings"
"time"
)
func GetProductPricingList(ctx context.Context, limit int, afterProductCode string) ([]models.ProductPricing, error) {
if limit <= 0 {
limit = 500
}
afterProductCode = strings.TrimSpace(afterProductCode)
cursorFilter := ""
args := make([]any, 0, 1)
if afterProductCode != "" {
cursorFilter = "WHERE bp.ProductCode > @p1"
args = append(args, afterProductCode)
}
query := `
WITH base_products AS (
SELECT
LTRIM(RTRIM(ProductCode)) AS ProductCode,
COALESCE(LTRIM(RTRIM(ProductAtt45Desc)), '') AS AskiliYan,
COALESCE(LTRIM(RTRIM(ProductAtt44Desc)), '') AS Kategori,
COALESCE(LTRIM(RTRIM(ProductAtt42Desc)), '') AS UrunIlkGrubu,
COALESCE(LTRIM(RTRIM(ProductAtt01Desc)), '') AS UrunAnaGrubu,
COALESCE(LTRIM(RTRIM(ProductAtt02Desc)), '') AS UrunAltGrubu,
COALESCE(LTRIM(RTRIM(ProductAtt41Desc)), '') AS Icerik,
COALESCE(LTRIM(RTRIM(ProductAtt29Desc)), '') AS Karisim,
COALESCE(LTRIM(RTRIM(ProductAtt10Desc)), '') AS Marka
FROM ProductFilterWithDescription('TR')
WHERE ProductAtt42 IN ('SERI', 'AKSESUAR')
AND IsBlocked = 0
AND LEN(LTRIM(RTRIM(ProductCode))) = 13
),
paged_products AS (
SELECT TOP (` + strconv.Itoa(limit) + `)
bp.ProductCode
FROM base_products bp
` + cursorFilter + `
ORDER BY bp.ProductCode
),
latest_base_price AS (
SELECT
LTRIM(RTRIM(b.ItemCode)) AS ItemCode,
CAST(b.Price AS DECIMAL(18, 2)) AS CostPrice,
CONVERT(VARCHAR(10), b.PriceDate, 23) AS LastPricingDate,
ROW_NUMBER() OVER (
PARTITION BY LTRIM(RTRIM(b.ItemCode))
ORDER BY b.PriceDate DESC, b.LastUpdatedDate DESC
) AS rn
FROM prItemBasePrice b
WHERE b.ItemTypeCode = 1
AND b.BasePriceCode = 1
AND LTRIM(RTRIM(b.CurrencyCode)) = 'USD'
AND EXISTS (
SELECT 1
FROM paged_products pp
WHERE pp.ProductCode = LTRIM(RTRIM(b.ItemCode))
)
),
stock_entry_dates AS (
SELECT
LTRIM(RTRIM(s.ItemCode)) AS ItemCode,
CONVERT(VARCHAR(10), MAX(s.OperationDate), 23) AS StockEntryDate
FROM trStock s WITH(NOLOCK)
WHERE s.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(s.ItemCode))) = 13
AND s.In_Qty1 > 0
AND LTRIM(RTRIM(s.WarehouseCode)) IN (
'1-0-14','1-0-10','1-0-8','1-2-5','1-2-4','1-0-12','100','1-0-28',
'1-0-24','1-2-6','1-1-14','1-0-2','1-0-52','1-1-2','1-0-21','1-1-3',
'1-0-33','101','1-014','1-0-49','1-0-36'
)
AND EXISTS (
SELECT 1
FROM paged_products pp
WHERE pp.ProductCode = LTRIM(RTRIM(s.ItemCode))
)
GROUP BY LTRIM(RTRIM(s.ItemCode))
),
stock_base AS (
SELECT
LTRIM(RTRIM(s.ItemCode)) AS ItemCode,
SUM(s.In_Qty1 - s.Out_Qty1) AS InventoryQty1
FROM trStock s WITH(NOLOCK)
WHERE s.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(s.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM paged_products pp
WHERE pp.ProductCode = LTRIM(RTRIM(s.ItemCode))
)
GROUP BY LTRIM(RTRIM(s.ItemCode))
),
pick_base AS (
SELECT
LTRIM(RTRIM(p.ItemCode)) AS ItemCode,
SUM(p.Qty1) AS PickingQty1
FROM PickingStates p
WHERE p.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(p.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM paged_products pp
WHERE pp.ProductCode = LTRIM(RTRIM(p.ItemCode))
)
GROUP BY LTRIM(RTRIM(p.ItemCode))
),
reserve_base AS (
SELECT
LTRIM(RTRIM(r.ItemCode)) AS ItemCode,
SUM(r.Qty1) AS ReserveQty1
FROM ReserveStates r
WHERE r.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(r.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM paged_products pp
WHERE pp.ProductCode = LTRIM(RTRIM(r.ItemCode))
)
GROUP BY LTRIM(RTRIM(r.ItemCode))
),
disp_base AS (
SELECT
LTRIM(RTRIM(d.ItemCode)) AS ItemCode,
SUM(d.Qty1) AS DispOrderQty1
FROM DispOrderStates d
WHERE d.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(d.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM paged_products pp
WHERE pp.ProductCode = LTRIM(RTRIM(d.ItemCode))
)
GROUP BY LTRIM(RTRIM(d.ItemCode))
),
stock_totals AS (
SELECT
pp.ProductCode AS ItemCode,
CAST(ROUND(
ISNULL(sb.InventoryQty1, 0)
- ISNULL(pb.PickingQty1, 0)
- ISNULL(rb.ReserveQty1, 0)
- ISNULL(db.DispOrderQty1, 0)
, 2) AS DECIMAL(18, 2)) AS StockQty
FROM paged_products pp
LEFT JOIN stock_base sb
ON sb.ItemCode = pp.ProductCode
LEFT JOIN pick_base pb
ON pb.ItemCode = pp.ProductCode
LEFT JOIN reserve_base rb
ON rb.ItemCode = pp.ProductCode
LEFT JOIN disp_base db
ON db.ItemCode = pp.ProductCode
)
SELECT
bp.ProductCode AS ProductCode,
COALESCE(lp.CostPrice, 0) AS CostPrice,
COALESCE(st.StockQty, 0) AS StockQty,
COALESCE(se.StockEntryDate, '') AS StockEntryDate,
COALESCE(lp.LastPricingDate, '') AS LastPricingDate,
bp.AskiliYan,
bp.Kategori,
bp.UrunIlkGrubu,
bp.UrunAnaGrubu,
bp.UrunAltGrubu,
bp.Icerik,
bp.Karisim,
bp.Marka
FROM paged_products pp
INNER JOIN base_products bp
ON bp.ProductCode = pp.ProductCode
LEFT JOIN latest_base_price lp
ON lp.ItemCode = bp.ProductCode
AND lp.rn = 1
LEFT JOIN stock_entry_dates se
ON se.ItemCode = bp.ProductCode
LEFT JOIN stock_totals st
ON st.ItemCode = bp.ProductCode
ORDER BY bp.ProductCode;
`
var (
rows *sql.Rows
rowsErr error
)
for attempt := 1; attempt <= 3; attempt++ {
var err error
rows, err = db.MssqlDB.QueryContext(ctx, query, args...)
if err == nil {
rowsErr = nil
break
}
rowsErr = err
if ctx.Err() != nil || !isTransientMSSQLNetworkError(err) || attempt == 3 {
break
}
wait := time.Duration(attempt*300) * time.Millisecond
select {
case <-ctx.Done():
break
case <-time.After(wait):
}
}
if rowsErr != nil {
return nil, rowsErr
}
defer rows.Close()
var out []models.ProductPricing
for rows.Next() {
var item models.ProductPricing
if err := rows.Scan(
&item.ProductCode,
&item.CostPrice,
&item.StockQty,
&item.StockEntryDate,
&item.LastPricingDate,
&item.AskiliYan,
&item.Kategori,
&item.UrunIlkGrubu,
&item.UrunAnaGrubu,
&item.UrunAltGrubu,
&item.Icerik,
&item.Karisim,
&item.Marka,
); err != nil {
return nil, err
}
out = append(out, item)
}
return out, nil
}
func isTransientMSSQLNetworkError(err error) bool {
if err == nil {
return false
}
e := strings.ToLower(err.Error())
return strings.Contains(e, "i/o timeout") ||
strings.Contains(e, "timeout") ||
strings.Contains(e, "wsarecv") ||
strings.Contains(e, "connection attempt failed") ||
strings.Contains(e, "no connection could be made") ||
strings.Contains(e, "broken pipe") ||
strings.Contains(e, "connection reset")
}

View File

@@ -47,3 +47,13 @@ ORDER BY
CASE WHEN a.AttributeCode IN ('-', '.') THEN 0 ELSE 1 END, CASE WHEN a.AttributeCode IN ('-', '.') THEN 0 ELSE 1 END,
a.AttributeCode; a.AttributeCode;
` `
const GetProductItemAttributes = `
SELECT
a.ItemTypeCode,
a.AttributeTypeCode,
ISNULL(a.AttributeCode, '') AS AttributeCode
FROM dbo.prItemAttribute AS a WITH(NOLOCK)
WHERE a.ItemTypeCode = @p1
AND ISNULL(LTRIM(RTRIM(a.ItemCode)), '') = ISNULL(LTRIM(RTRIM(@p2)), '')
`

View File

@@ -2,6 +2,7 @@ package queries
import ( import (
"bssapp-backend/db" "bssapp-backend/db"
"bssapp-backend/internal/i18n"
"bssapp-backend/models" "bssapp-backend/models"
"context" "context"
"database/sql" "database/sql"
@@ -14,9 +15,7 @@ func GetStatements(ctx context.Context, params models.StatementParams) ([]models
// AccountCode normalize: "ZLA0127" → "ZLA 0127" // AccountCode normalize: "ZLA0127" → "ZLA 0127"
params.AccountCode = normalizeMasterAccountCode(params.AccountCode) params.AccountCode = normalizeMasterAccountCode(params.AccountCode)
if strings.TrimSpace(params.LangCode) == "" { params.LangCode = i18n.NormalizeLangCode(params.LangCode)
params.LangCode = "TR"
}
// Parislemler []string → '1','2','3' // Parislemler []string → '1','2','3'
parislemFilter := "''" parislemFilter := "''"
@@ -221,8 +220,8 @@ SELECT
CONVERT(varchar(10), @startdate, 23) AS Vade_Tarihi, CONVERT(varchar(10), @startdate, 23) AS Vade_Tarihi,
'Baslangic_devir' AS Belge_No, 'Baslangic_devir' AS Belge_No,
'Devir' AS Islem_Tipi, CASE WHEN @LangCode = 'EN' THEN 'Opening' ELSE 'Devir' END AS Islem_Tipi,
'Devir Bakiyesi' AS Aciklama, CASE WHEN @LangCode = 'EN' THEN 'Opening Balance' ELSE 'Devir Bakiyesi' END AS Aciklama,
o.Para_Birimi, o.Para_Birimi,

View File

@@ -6,8 +6,8 @@ import (
"log" "log"
) )
func GetStatementsHPDF(ctx context.Context, accountCode, startDate, endDate string, parislemler []string) ([]models.StatementHeader, []string, error) { func GetStatementsHPDF(ctx context.Context, accountCode, startDate, endDate, langCode string, parislemler []string) ([]models.StatementHeader, []string, error) {
headers, err := getStatementsForPDF(ctx, accountCode, startDate, endDate, parislemler) headers, err := getStatementsForPDF(ctx, accountCode, startDate, endDate, langCode, parislemler)
if err != nil { if err != nil {
log.Printf("Header query error: %v", err) log.Printf("Header query error: %v", err)
return nil, nil, err return nil, nil, err

View File

@@ -10,13 +10,14 @@ func getStatementsForPDF(
accountCode string, accountCode string,
startDate string, startDate string,
endDate string, endDate string,
langCode string,
parislemler []string, parislemler []string,
) ([]models.StatementHeader, error) { ) ([]models.StatementHeader, error) {
return GetStatements(ctx, models.StatementParams{ return GetStatements(ctx, models.StatementParams{
AccountCode: accountCode, AccountCode: accountCode,
StartDate: startDate, StartDate: startDate,
EndDate: endDate, EndDate: endDate,
LangCode: "TR", LangCode: langCode,
Parislemler: parislemler, Parislemler: parislemler,
}) })
} }

View File

@@ -11,8 +11,8 @@ import (
"strings" "strings"
) )
func GetStatementsPDF(ctx context.Context, accountCode, startDate, endDate string, parislemler []string) ([]models.StatementHeader, []string, error) { func GetStatementsPDF(ctx context.Context, accountCode, startDate, endDate, langCode string, parislemler []string) ([]models.StatementHeader, []string, error) {
headers, err := getStatementsForPDF(ctx, accountCode, startDate, endDate, parislemler) headers, err := getStatementsForPDF(ctx, accountCode, startDate, endDate, langCode, parislemler)
if err != nil { if err != nil {
log.Printf("Header query error: %v", err) log.Printf("Header query error: %v", err)
return nil, nil, err return nil, nil, err

View File

@@ -464,6 +464,7 @@ func UserCreateRoute(db *sql.DB) http.HandlerFunc {
defer tx.Rollback() defer tx.Rollback()
var newID int64 var newID int64
log.Printf("DEBUG: UserCreateRoute payload=%+v", payload)
err = tx.QueryRow(` err = tx.QueryRow(`
INSERT INTO mk_dfusr ( INSERT INTO mk_dfusr (
username, username,
@@ -472,11 +473,12 @@ func UserCreateRoute(db *sql.DB) http.HandlerFunc {
email, email,
mobile, mobile,
address, address,
password_hash,
force_password_change, force_password_change,
created_at, created_at,
updated_at updated_at
) )
VALUES ($1,$2,$3,$4,$5,$6,true,NOW(),NOW()) VALUES ($1,$2,$3,$4,$5,$6,'',true,NOW(),NOW())
RETURNING id RETURNING id
`, `,
payload.Code, payload.Code,
@@ -489,7 +491,7 @@ func UserCreateRoute(db *sql.DB) http.HandlerFunc {
if err != nil { if err != nil {
log.Printf("USER INSERT ERROR code=%q email=%q err=%v", payload.Code, payload.Email, err) log.Printf("USER INSERT ERROR code=%q email=%q err=%v", payload.Code, payload.Email, err)
http.Error(w, "Kullanıcı oluşturulamadı", http.StatusInternalServerError) http.Error(w, fmt.Sprintf("Kullanıcı oluşturulamadı: %v", err), http.StatusInternalServerError)
return return
} }

View File

@@ -20,6 +20,18 @@ type sendOrderMarketMailPayload struct {
DeletedItems []string `json:"deletedItems"` DeletedItems []string `json:"deletedItems"`
UpdatedItems []string `json:"updatedItems"` UpdatedItems []string `json:"updatedItems"`
AddedItems []string `json:"addedItems"` AddedItems []string `json:"addedItems"`
OldDueDate string `json:"oldDueDate"`
NewDueDate string `json:"newDueDate"`
ExtraRecipients []string `json:"extraRecipients"`
DueDateChanges []sendOrderMailDueDateChange `json:"dueDateChanges"`
}
type sendOrderMailDueDateChange struct {
ItemCode string `json:"itemCode"`
ColorCode string `json:"colorCode"`
ItemDim2Code string `json:"itemDim2Code"`
OldDueDate string `json:"oldDueDate"`
NewDueDate string `json:"newDueDate"`
} }
func SendOrderMarketMailHandler(pg *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) http.HandlerFunc { func SendOrderMarketMailHandler(pg *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) http.HandlerFunc {
@@ -67,6 +79,7 @@ func SendOrderMarketMailHandler(pg *sql.DB, mssql *sql.DB, ml *mailer.GraphMaile
http.Error(w, "recipient query error: "+err.Error(), http.StatusInternalServerError) http.Error(w, "recipient query error: "+err.Error(), http.StatusInternalServerError)
return return
} }
recipients = appendUniqueRecipients(recipients, payload.ExtraRecipients...)
if len(recipients) == 0 { if len(recipients) == 0 {
http.Error(w, "no active email mapping for market", http.StatusBadRequest) http.Error(w, "no active email mapping for market", http.StatusBadRequest)
return return
@@ -106,6 +119,18 @@ func SendOrderMarketMailHandler(pg *sql.DB, mssql *sql.DB, ml *mailer.GraphMaile
if isUpdate { if isUpdate {
subjectAction = "SİPARİŞ GÜNCELLENDİ." subjectAction = "SİPARİŞ GÜNCELLENDİ."
} }
if payload.NewDueDate != "" && payload.OldDueDate != payload.NewDueDate {
subjectAction = "SİPARİŞ TERMİNİ GÜNCELLENDİ."
}
if isUpdate && subjectAction == "SİPARİŞ GÜNCELLENDİ." {
// Satır bazlı termin kontrolü
for _, item := range payload.UpdatedItems {
if strings.Contains(item, "Termin:") {
subjectAction = "SİPARİŞ TERMİNİ GÜNCELLENDİ."
break
}
}
}
subject := fmt.Sprintf("%s kullanıcısı tarafından %s %s", actor, number, subjectAction) subject := fmt.Sprintf("%s kullanıcısı tarafından %s %s", actor, number, subjectAction)
cariDetail := "" cariDetail := ""
@@ -125,6 +150,13 @@ func SendOrderMarketMailHandler(pg *sql.DB, mssql *sql.DB, ml *mailer.GraphMaile
`</p>`, `</p>`,
) )
if payload.NewDueDate != "" && payload.OldDueDate != payload.NewDueDate {
body = append(body,
fmt.Sprintf(`<p><b>Termin Değişikliği:</b> %s &rarr; <b style="color:red">%s</b></p>`,
htmlEsc(payload.OldDueDate), htmlEsc(payload.NewDueDate)),
)
}
if isUpdate { if isUpdate {
body = append(body, body = append(body,
renderItemListHTML("Silinen Ürün Kodları", payload.DeletedItems), renderItemListHTML("Silinen Ürün Kodları", payload.DeletedItems),
@@ -135,6 +167,10 @@ func SendOrderMarketMailHandler(pg *sql.DB, mssql *sql.DB, ml *mailer.GraphMaile
body = append(body, `<p><i>Bu sipariş BaggiSS App Uygulamasından oluşturulmuştur.</i></p>`) body = append(body, `<p><i>Bu sipariş BaggiSS App Uygulamasından oluşturulmuştur.</i></p>`)
body = append(body, `<p>PDF ektedir.</p>`) body = append(body, `<p>PDF ektedir.</p>`)
if dueDateTableHTML := renderDueDateChangesTableHTML("Termin DeÄŸiÅŸiklikleri", payload.DueDateChanges); dueDateTableHTML != "" {
body = append(body, dueDateTableHTML)
}
bodyHTML := strings.Join(body, "\n") bodyHTML := strings.Join(body, "\n")
fileNo := sanitizeFileName(number) fileNo := sanitizeFileName(number)
@@ -242,6 +278,37 @@ ORDER BY email
return out, nil return out, nil
} }
func appendUniqueRecipients(base []string, extras ...string) []string {
seen := make(map[string]struct{}, len(base)+len(extras))
out := make([]string, 0, len(base)+len(extras))
for _, raw := range base {
mail := strings.ToLower(strings.TrimSpace(raw))
if mail == "" {
continue
}
if _, ok := seen[mail]; ok {
continue
}
seen[mail] = struct{}{}
out = append(out, mail)
}
for _, raw := range extras {
mail := strings.ToLower(strings.TrimSpace(raw))
if mail == "" {
continue
}
if _, ok := seen[mail]; ok {
continue
}
seen[mail] = struct{}{}
out = append(out, mail)
}
return out
}
func buildOrderPDFBytesForMail(db *sql.DB, pgDB *sql.DB, orderID string) ([]byte, *OrderHeader, error) { func buildOrderPDFBytesForMail(db *sql.DB, pgDB *sql.DB, orderID string) ([]byte, *OrderHeader, error) {
header, err := getOrderHeaderFromDB(db, orderID) header, err := getOrderHeaderFromDB(db, orderID)
if err != nil { if err != nil {
@@ -360,3 +427,54 @@ func renderItemListHTML(title string, items []string) string {
b = append(b, `</p>`) b = append(b, `</p>`)
return strings.Join(b, "\n") return strings.Join(b, "\n")
} }
func renderDueDateChangesTableHTML(title string, rows []sendOrderMailDueDateChange) string {
if len(rows) == 0 {
return ""
}
seen := make(map[string]struct{}, len(rows))
clean := make([]sendOrderMailDueDateChange, 0, len(rows))
for _, row := range rows {
itemCode := strings.TrimSpace(row.ItemCode)
colorCode := strings.TrimSpace(row.ColorCode)
itemDim2Code := strings.TrimSpace(row.ItemDim2Code)
oldDueDate := strings.TrimSpace(row.OldDueDate)
newDueDate := strings.TrimSpace(row.NewDueDate)
if itemCode == "" || newDueDate == "" || oldDueDate == newDueDate {
continue
}
key := strings.ToUpper(strings.Join([]string{itemCode, colorCode, itemDim2Code, oldDueDate, newDueDate}, "|"))
if _, ok := seen[key]; ok {
continue
}
seen[key] = struct{}{}
clean = append(clean, sendOrderMailDueDateChange{
ItemCode: itemCode,
ColorCode: colorCode,
ItemDim2Code: itemDim2Code,
OldDueDate: oldDueDate,
NewDueDate: newDueDate,
})
}
if len(clean) == 0 {
return ""
}
var b strings.Builder
b.WriteString(fmt.Sprintf(`<p><b>%s:</b></p>`, htmlEsc(title)))
b.WriteString(`<table border="1" cellpadding="5" style="border-collapse: collapse; width: 100%;">`)
b.WriteString(`<tr style="background-color: #f2f2f2;"><th>Ürün Kodu</th><th>Renk</th><th>2. Renk</th><th>Eski Termin</th><th>Yeni Termin</th></tr>`)
for _, row := range clean {
b.WriteString("<tr>")
b.WriteString(fmt.Sprintf("<td>%s</td>", htmlEsc(row.ItemCode)))
b.WriteString(fmt.Sprintf("<td>%s</td>", htmlEsc(row.ColorCode)))
b.WriteString(fmt.Sprintf("<td>%s</td>", htmlEsc(row.ItemDim2Code)))
b.WriteString(fmt.Sprintf("<td>%s</td>", htmlEsc(row.OldDueDate)))
b.WriteString(fmt.Sprintf(`<td style="color:red;font-weight:bold;">%s</td>`, htmlEsc(row.NewDueDate)))
b.WriteString("</tr>")
}
b.WriteString(`</table>`)
return b.String()
}

View File

@@ -2,8 +2,10 @@ package routes
import ( import (
"bssapp-backend/auth" "bssapp-backend/auth"
"bssapp-backend/internal/mailer"
"bssapp-backend/models" "bssapp-backend/models"
"bssapp-backend/queries" "bssapp-backend/queries"
"context"
"database/sql" "database/sql"
"encoding/json" "encoding/json"
"errors" "errors"
@@ -20,6 +22,8 @@ import (
var baggiModelCodeRegex = regexp.MustCompile(`^[A-Z][0-9]{3}-[A-Z]{3}[0-9]{5}$`) var baggiModelCodeRegex = regexp.MustCompile(`^[A-Z][0-9]{3}-[A-Z]{3}[0-9]{5}$`)
const productionBarcodeTypeCode = "BAGGI3"
// ====================================================== // ======================================================
// 📌 OrderProductionItemsRoute — U ürün satırları // 📌 OrderProductionItemsRoute — U ürün satırları
// ====================================================== // ======================================================
@@ -54,12 +58,16 @@ func OrderProductionItemsRoute(mssql *sql.DB) http.Handler {
&o.OldDim3, &o.OldDim3,
&o.OldItemCode, &o.OldItemCode,
&o.OldColor, &o.OldColor,
&o.OldColorDescription,
&o.OldDim2, &o.OldDim2,
&o.OldDesc, &o.OldDesc,
&o.OldQty,
&o.NewItemCode, &o.NewItemCode,
&o.NewColor, &o.NewColor,
&o.NewDim2, &o.NewDim2,
&o.NewDesc, &o.NewDesc,
&o.OldDueDate,
&o.NewDueDate,
&o.IsVariantMissing, &o.IsVariantMissing,
); err != nil { ); err != nil {
log.Printf("⚠️ SCAN HATASI: %v", err) log.Printf("⚠️ SCAN HATASI: %v", err)
@@ -169,21 +177,42 @@ func OrderProductionValidateRoute(mssql *sql.DB) http.Handler {
http.Error(w, err.Error(), http.StatusBadRequest) http.Error(w, err.Error(), http.StatusBadRequest)
return return
} }
log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s payload lineCount=%d insertMissing=%t cdItemCount=%d attributeCount=%d",
rid, id, len(payload.Lines), payload.InsertMissing, len(payload.CdItems), len(payload.ProductAttributes))
newLines, existingLines := splitLinesByCdItemDraft(payload.Lines, payload.CdItems)
newCodes := uniqueCodesFromLines(newLines)
existingCodes := uniqueCodesFromLines(existingLines)
missing := make([]models.OrderProductionMissingVariant, 0)
targets := make([]models.OrderProductionMissingVariant, 0)
stepStart := time.Now() stepStart := time.Now()
missing, err := buildMissingVariants(mssql, id, payload.Lines) if len(newLines) > 0 {
err := runWithTransientMSSQLRetry("validate_build_targets_missing", 3, 500*time.Millisecond, func() error {
var stepErr error
targets, stepErr = buildTargetVariants(mssql, id, newLines)
if stepErr != nil {
return stepErr
}
missing, stepErr = buildMissingVariantsFromTargets(mssql, id, targets)
return stepErr
})
if err != nil { if err != nil {
log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s step=build_missing failed duration_ms=%d err=%v", log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s step=build_missing failed duration_ms=%d err=%v",
rid, id, time.Since(stepStart).Milliseconds(), err) rid, id, time.Since(stepStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "validate_missing_variants", id, "", len(payload.Lines), err) writeDBError(w, http.StatusInternalServerError, "validate_missing_variants", id, "", len(newLines), err)
return return
} }
log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s lineCount=%d missingCount=%d build_missing_ms=%d total_ms=%d", }
rid, id, len(payload.Lines), len(missing), time.Since(stepStart).Milliseconds(), time.Since(start).Milliseconds()) log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s lineCount=%d newLineCount=%d existingLineCount=%d targetVariantCount=%d missingCount=%d build_missing_ms=%d total_ms=%d",
rid, id, len(payload.Lines), len(newLines), len(existingLines), len(targets), len(missing), time.Since(stepStart).Milliseconds(), time.Since(start).Milliseconds())
log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s scope newCodes=%v existingCodes=%v",
rid, id, newCodes, existingCodes)
resp := map[string]any{ resp := map[string]any{
"missingCount": len(missing), "missingCount": len(missing),
"missing": missing, "missing": missing,
"barcodeValidationCount": 0,
"barcodeValidations": []models.OrderProductionBarcodeValidation{},
} }
if err := json.NewEncoder(w).Encode(resp); err != nil { if err := json.NewEncoder(w).Encode(resp); err != nil {
log.Printf("❌ encode error: %v", err) log.Printf("❌ encode error: %v", err)
@@ -194,7 +223,7 @@ func OrderProductionValidateRoute(mssql *sql.DB) http.Handler {
// ====================================================== // ======================================================
// OrderProductionApplyRoute - yeni model varyant guncelleme // OrderProductionApplyRoute - yeni model varyant guncelleme
// ====================================================== // ======================================================
func OrderProductionApplyRoute(mssql *sql.DB) http.Handler { func OrderProductionApplyRoute(mssql *sql.DB, ml *mailer.GraphMailer) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8") w.Header().Set("Content-Type", "application/json; charset=utf-8")
rid := fmt.Sprintf("opa-%d", time.Now().UnixNano()) rid := fmt.Sprintf("opa-%d", time.Now().UnixNano())
@@ -216,17 +245,59 @@ func OrderProductionApplyRoute(mssql *sql.DB) http.Handler {
http.Error(w, err.Error(), http.StatusBadRequest) http.Error(w, err.Error(), http.StatusBadRequest)
return return
} }
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s payload lineCount=%d insertMissing=%t cdItemCount=%d attributeCount=%d",
rid, id, len(payload.Lines), payload.InsertMissing, len(payload.CdItems), len(payload.ProductAttributes))
if len(payload.Lines) > 0 {
limit := 5
if len(payload.Lines) < limit {
limit = len(payload.Lines)
}
samples := make([]string, 0, limit)
for i := 0; i < limit; i++ {
ln := payload.Lines[i]
dim1 := ""
if ln.ItemDim1Code != nil {
dim1 = strings.TrimSpace(*ln.ItemDim1Code)
}
samples = append(samples, fmt.Sprintf(
"lineID=%s newItem=%s newColor=%s newDim1=%s newDim2=%s",
strings.TrimSpace(ln.OrderLineID),
strings.ToUpper(strings.TrimSpace(ln.NewItemCode)),
strings.ToUpper(strings.TrimSpace(ln.NewColor)),
strings.ToUpper(strings.TrimSpace(dim1)),
strings.ToUpper(strings.TrimSpace(ln.NewDim2)),
))
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s payload lineSamples=%v", rid, id, samples)
}
newLines, existingLines := splitLinesByCdItemDraft(payload.Lines, payload.CdItems)
newCodes := uniqueCodesFromLines(newLines)
existingCodes := uniqueCodesFromLines(existingLines)
stepMissingStart := time.Now() stepMissingStart := time.Now()
missing, err := buildMissingVariants(mssql, id, payload.Lines) missing := make([]models.OrderProductionMissingVariant, 0)
barcodeTargets := make([]models.OrderProductionMissingVariant, 0)
if len(newLines) > 0 {
err := runWithTransientMSSQLRetry("apply_build_targets_missing", 3, 500*time.Millisecond, func() error {
var stepErr error
barcodeTargets, stepErr = buildTargetVariants(mssql, id, newLines)
if stepErr != nil {
return stepErr
}
missing, stepErr = buildMissingVariantsFromTargets(mssql, id, barcodeTargets)
return stepErr
})
if err != nil { if err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=build_missing failed duration_ms=%d err=%v", log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=build_missing failed duration_ms=%d err=%v",
rid, id, time.Since(stepMissingStart).Milliseconds(), err) rid, id, time.Since(stepMissingStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "apply_validate_missing_variants", id, "", len(payload.Lines), err) writeDBError(w, http.StatusInternalServerError, "apply_validate_missing_variants", id, "", len(newLines), err)
return return
} }
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s lineCount=%d missingCount=%d build_missing_ms=%d", }
rid, id, len(payload.Lines), len(missing), time.Since(stepMissingStart).Milliseconds()) log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s lineCount=%d newLineCount=%d existingLineCount=%d targetVariantCount=%d missingCount=%d build_missing_ms=%d",
rid, id, len(payload.Lines), len(newLines), len(existingLines), len(barcodeTargets), len(missing), time.Since(stepMissingStart).Milliseconds())
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s scope newCodes=%v existingCodes=%v",
rid, id, newCodes, existingCodes)
if len(missing) > 0 && !payload.InsertMissing { if len(missing) > 0 && !payload.InsertMissing {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s early_exit=missing_variants total_ms=%d", log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s early_exit=missing_variants total_ms=%d",
@@ -255,30 +326,83 @@ func OrderProductionApplyRoute(mssql *sql.DB) http.Handler {
writeDBError(w, http.StatusInternalServerError, "begin_tx", id, username, len(payload.Lines), err) writeDBError(w, http.StatusInternalServerError, "begin_tx", id, username, len(payload.Lines), err)
return return
} }
defer tx.Rollback()
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=begin_tx duration_ms=%d", rid, id, time.Since(stepBeginStart).Milliseconds()) log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=begin_tx duration_ms=%d", rid, id, time.Since(stepBeginStart).Milliseconds())
committed := false
currentStep := "begin_tx"
applyTxSettings := func(tx *sql.Tx) error {
// XACT_ABORT OFF:
// Barcode insert path intentionally tolerates duplicate-key errors (fallback/skip duplicate).
// With XACT_ABORT ON, that expected error aborts the whole transaction and causes COMMIT 3902.
_, execErr := tx.Exec(`SET XACT_ABORT OFF; SET LOCK_TIMEOUT 15000;`)
return execErr
}
defer func() {
if committed {
return
}
rbStart := time.Now()
if rbErr := tx.Rollback(); rbErr != nil && rbErr != sql.ErrTxDone {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s rollback step=%s failed duration_ms=%d err=%v",
rid, id, currentStep, time.Since(rbStart).Milliseconds(), rbErr)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s rollback step=%s ok duration_ms=%d",
rid, id, currentStep, time.Since(rbStart).Milliseconds())
}()
stepTxSettingsStart := time.Now() stepTxSettingsStart := time.Now()
if _, err := tx.Exec(`SET XACT_ABORT ON; SET LOCK_TIMEOUT 15000;`); err != nil { currentStep = "tx_settings"
if err := applyTxSettings(tx); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_settings", id, username, len(payload.Lines), err) writeDBError(w, http.StatusInternalServerError, "tx_settings", id, username, len(payload.Lines), err)
return return
} }
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=tx_settings duration_ms=%d", rid, id, time.Since(stepTxSettingsStart).Milliseconds()) log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=tx_settings duration_ms=%d", rid, id, time.Since(stepTxSettingsStart).Milliseconds())
if err := ensureTxAlive(tx, "after_tx_settings"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_tx_settings", id, username, len(payload.Lines), err)
return
}
var inserted int64 var inserted int64
if payload.InsertMissing { if payload.InsertMissing && len(newLines) > 0 {
currentStep = "insert_missing_variants"
cdItemByCode := buildCdItemDraftMap(payload.CdItems) cdItemByCode := buildCdItemDraftMap(payload.CdItems)
stepInsertMissingStart := time.Now() stepInsertMissingStart := time.Now()
inserted, err = queries.InsertMissingVariantsTx(tx, missing, username, cdItemByCode) inserted, err = queries.InsertMissingVariantsTx(tx, missing, username, cdItemByCode)
if err != nil && isTransientMSSQLNetworkErr(err) {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=insert_missing transient_error retry=1 err=%v",
rid, id, err)
_ = tx.Rollback()
tx, err = mssql.Begin()
if err != nil {
writeDBError(w, http.StatusInternalServerError, "begin_tx_retry_insert_missing", id, username, len(payload.Lines), err)
return
}
currentStep = "tx_settings_retry_insert_missing"
if err = applyTxSettings(tx); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_settings_retry_insert_missing", id, username, len(payload.Lines), err)
return
}
if err = ensureTxAlive(tx, "after_tx_settings_retry_insert_missing"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_tx_settings_retry_insert_missing", id, username, len(payload.Lines), err)
return
}
currentStep = "insert_missing_variants_retry"
inserted, err = queries.InsertMissingVariantsTx(tx, missing, username, cdItemByCode)
}
if err != nil { if err != nil {
writeDBError(w, http.StatusInternalServerError, "insert_missing_variants", id, username, len(missing), err) writeDBError(w, http.StatusInternalServerError, "insert_missing_variants", id, username, len(missing), err)
return return
} }
if err := ensureTxAlive(tx, "after_insert_missing_variants"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_insert_missing_variants", id, username, len(missing), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=insert_missing inserted=%d duration_ms=%d", log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=insert_missing inserted=%d duration_ms=%d",
rid, id, inserted, time.Since(stepInsertMissingStart).Milliseconds()) rid, id, inserted, time.Since(stepInsertMissingStart).Milliseconds())
} }
stepValidateAttrStart := time.Now() stepValidateAttrStart := time.Now()
currentStep = "validate_attributes"
if err := validateProductAttributes(payload.ProductAttributes); err != nil { if err := validateProductAttributes(payload.ProductAttributes); err != nil {
http.Error(w, err.Error(), http.StatusBadRequest) http.Error(w, err.Error(), http.StatusBadRequest)
return return
@@ -286,37 +410,157 @@ func OrderProductionApplyRoute(mssql *sql.DB) http.Handler {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=validate_attributes count=%d duration_ms=%d", log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=validate_attributes count=%d duration_ms=%d",
rid, id, len(payload.ProductAttributes), time.Since(stepValidateAttrStart).Milliseconds()) rid, id, len(payload.ProductAttributes), time.Since(stepValidateAttrStart).Milliseconds())
stepUpdateLinesStart := time.Now()
updated, err := queries.UpdateOrderLinesTx(tx, id, payload.Lines, username)
if err != nil {
writeDBError(w, http.StatusInternalServerError, "update_order_lines", id, username, len(payload.Lines), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=update_lines updated=%d duration_ms=%d",
rid, id, updated, time.Since(stepUpdateLinesStart).Milliseconds())
stepUpsertAttrStart := time.Now() stepUpsertAttrStart := time.Now()
currentStep = "upsert_item_attributes"
attributeAffected, err := queries.UpsertItemAttributesTx(tx, payload.ProductAttributes, username) attributeAffected, err := queries.UpsertItemAttributesTx(tx, payload.ProductAttributes, username)
if err != nil { if err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_attributes failed duration_ms=%d err=%v",
rid, id, time.Since(stepUpsertAttrStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "upsert_item_attributes", id, username, len(payload.ProductAttributes), err) writeDBError(w, http.StatusInternalServerError, "upsert_item_attributes", id, username, len(payload.ProductAttributes), err)
return return
} }
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_attributes affected=%d duration_ms=%d", log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_attributes affected=%d duration_ms=%d",
rid, id, attributeAffected, time.Since(stepUpsertAttrStart).Milliseconds()) rid, id, attributeAffected, time.Since(stepUpsertAttrStart).Milliseconds())
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=prItemAttribute inputRows=%d affectedRows=%d",
rid, id, len(payload.ProductAttributes), attributeAffected)
if err := ensureTxAlive(tx, "after_upsert_item_attributes"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_upsert_item_attributes", id, username, len(payload.ProductAttributes), err)
return
}
var barcodeInserted int64
// Barkod adimi:
// - Eski kodlara girmemeli
// - Yeni kod satirlari icin, varyant daha once olusmus olsa bile eksik barkod varsa tamamlamali
// Bu nedenle "inserted > 0" yerine "newLineCount > 0" kosulu kullanilir.
if len(newLines) > 0 && len(barcodeTargets) > 0 {
stepUpsertBarcodeStart := time.Now()
currentStep = "upsert_item_barcodes"
barcodeInserted, err = queries.InsertItemBarcodesByTargetsTx(tx, barcodeTargets, username)
if err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_barcodes failed duration_ms=%d err=%v",
rid, id, time.Since(stepUpsertBarcodeStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "upsert_item_barcodes", id, username, len(barcodeTargets), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_barcodes inserted=%d duration_ms=%d",
rid, id, barcodeInserted, time.Since(stepUpsertBarcodeStart).Milliseconds())
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=prItemBarcode targetVariantRows=%d insertedRows=%d",
rid, id, len(barcodeTargets), barcodeInserted)
if err := ensureTxAlive(tx, "after_upsert_item_barcodes"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_upsert_item_barcodes", id, username, len(barcodeTargets), err)
return
}
} else {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_barcodes skipped newLineCount=%d targetVariantRows=%d",
rid, id, len(newLines), len(barcodeTargets))
}
stepUpdateHeaderStart := time.Now()
currentStep = "update_order_header_average_due_date"
if err := queries.UpdateOrderHeaderAverageDueDateTx(tx, id, payload.HeaderAverageDueDate, username); err != nil {
writeDBError(w, http.StatusInternalServerError, "update_order_header_average_due_date", id, username, 0, err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=update_header_average_due_date changed=%t duration_ms=%d",
rid, id, payload.HeaderAverageDueDate != nil, time.Since(stepUpdateHeaderStart).Milliseconds())
if err := ensureTxAlive(tx, "after_update_order_header_average_due_date"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_update_order_header_average_due_date", id, username, 0, err)
return
}
currentStep = "touch_order_header"
headerTouched, err := queries.TouchOrderHeaderTx(tx, id, username)
if err != nil {
writeDBError(w, http.StatusInternalServerError, "touch_order_header", id, username, len(payload.Lines), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=trOrderHeader touchedRows=%d",
rid, id, headerTouched)
if err := ensureTxAlive(tx, "after_touch_order_header"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_touch_order_header", id, username, len(payload.Lines), err)
return
}
stepUpdateLinesStart := time.Now()
currentStep = "update_order_lines"
updated, err := queries.UpdateOrderLinesTx(tx, id, payload.Lines, username)
if err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=update_lines failed duration_ms=%d err=%v",
rid, id, time.Since(stepUpdateLinesStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "update_order_lines", id, username, len(payload.Lines), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=update_lines updated=%d duration_ms=%d",
rid, id, updated, time.Since(stepUpdateLinesStart).Milliseconds())
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=trOrderLine targetRows=%d updatedRows=%d",
rid, id, len(payload.Lines), updated)
if err := ensureTxAlive(tx, "after_update_order_lines"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_update_order_lines", id, username, len(payload.Lines), err)
return
}
currentStep = "verify_order_lines"
verifyMismatchCount, verifySamples, verifyErr := queries.VerifyOrderLineUpdatesTx(tx, id, payload.Lines)
if verifyErr != nil {
writeDBError(w, http.StatusInternalServerError, "verify_order_lines", id, username, len(payload.Lines), verifyErr)
return
}
if verifyMismatchCount > 0 {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=trOrderLine verifyMismatchCount=%d samples=%v",
rid, id, verifyMismatchCount, verifySamples)
currentStep = "verify_order_lines_mismatch"
w.WriteHeader(http.StatusInternalServerError)
_ = json.NewEncoder(w).Encode(map[string]any{
"message": "Order satirlari beklenen kod/renk degerlerine guncellenemedi",
"step": "verify_order_lines_mismatch",
"detail": fmt.Sprintf("mismatchCount=%d", verifyMismatchCount),
"samples": verifySamples,
})
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=trOrderLine verifyMismatchCount=0",
rid, id)
if err := ensureTxAlive(tx, "before_commit_tx"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_before_commit_tx", id, username, len(payload.Lines), err)
return
}
stepCommitStart := time.Now() stepCommitStart := time.Now()
currentStep = "commit_tx"
if err := tx.Commit(); err != nil { if err := tx.Commit(); err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=commit failed duration_ms=%d err=%v",
rid, id, time.Since(stepCommitStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "commit_tx", id, username, len(payload.Lines), err) writeDBError(w, http.StatusInternalServerError, "commit_tx", id, username, len(payload.Lines), err)
return return
} }
committed = true
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=commit duration_ms=%d total_ms=%d", log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=commit duration_ms=%d total_ms=%d",
rid, id, time.Since(stepCommitStart).Milliseconds(), time.Since(start).Milliseconds()) rid, id, time.Since(stepCommitStart).Milliseconds(), time.Since(start).Milliseconds())
// Mail gönderim mantığı
if false && ml != nil {
go func() {
defer func() {
if r := recover(); r != nil {
log.Printf("[OrderProductionApplyRoute] mail panic recover: %v", r)
}
}()
sendProductionUpdateMails(mssql, ml, id, username, payload.Lines)
}()
}
resp := map[string]any{ resp := map[string]any{
"updated": updated, "updated": updated,
"inserted": inserted, "inserted": inserted,
"barcodeInserted": barcodeInserted,
"attributeUpserted": attributeAffected, "attributeUpserted": attributeAffected,
"headerUpdated": payload.HeaderAverageDueDate != nil,
} }
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s result updated=%d inserted=%d barcodeInserted=%d attributeUpserted=%d",
rid, id, updated, inserted, barcodeInserted, attributeAffected)
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s summary tables cdItem/prItemVariant(newOnly)=%d trOrderLine(updated)=%d prItemBarcode(inserted,newOnly)=%d prItemAttribute(affected)=%d trOrderHeader(touched)=%d",
rid, id, inserted, updated, barcodeInserted, attributeAffected, headerTouched)
if err := json.NewEncoder(w).Encode(resp); err != nil { if err := json.NewEncoder(w).Encode(resp); err != nil {
log.Printf("❌ encode error: %v", err) log.Printf("❌ encode error: %v", err)
} }
@@ -361,21 +605,28 @@ func buildCdItemDraftMap(list []models.OrderProductionCdItemDraft) map[string]mo
return out return out
} }
func buildMissingVariants(mssql *sql.DB, orderHeaderID string, lines []models.OrderProductionUpdateLine) ([]models.OrderProductionMissingVariant, error) { func isNoCorrespondingBeginTxErr(err error) bool {
if err == nil {
return false
}
msg := strings.ToLower(strings.TrimSpace(err.Error()))
return strings.Contains(msg, "commit transaction request has no corresponding begin transaction")
}
func buildTargetVariants(mssql *sql.DB, orderHeaderID string, lines []models.OrderProductionUpdateLine) ([]models.OrderProductionMissingVariant, error) {
start := time.Now() start := time.Now()
missing := make([]models.OrderProductionMissingVariant, 0)
lineDimsMap, err := queries.GetOrderLineDimsMap(mssql, orderHeaderID) lineDimsMap, err := queries.GetOrderLineDimsMap(mssql, orderHeaderID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
existsCache := make(map[string]bool, len(lines))
out := make([]models.OrderProductionMissingVariant, 0, len(lines))
seen := make(map[string]struct{}, len(lines))
for _, line := range lines { for _, line := range lines {
lineID := strings.TrimSpace(line.OrderLineID) lineID := strings.TrimSpace(line.OrderLineID)
newItem := strings.TrimSpace(line.NewItemCode) newItem := strings.ToUpper(strings.TrimSpace(line.NewItemCode))
newColor := strings.TrimSpace(line.NewColor) newColor := strings.ToUpper(strings.TrimSpace(line.NewColor))
newDim2 := strings.TrimSpace(line.NewDim2) newDim2 := strings.ToUpper(strings.TrimSpace(line.NewDim2))
if lineID == "" || newItem == "" { if lineID == "" || newItem == "" {
continue continue
} }
@@ -385,41 +636,133 @@ func buildMissingVariants(mssql *sql.DB, orderHeaderID string, lines []models.Or
continue continue
} }
dim1 := strings.ToUpper(strings.TrimSpace(dims.ItemDim1Code))
if line.ItemDim1Code != nil {
dim1 = strings.ToUpper(strings.TrimSpace(*line.ItemDim1Code))
}
dim3 := strings.ToUpper(strings.TrimSpace(dims.ItemDim3Code))
key := fmt.Sprintf("%d|%s|%s|%s|%s|%s", dims.ItemTypeCode, newItem, newColor, dim1, newDim2, dim3)
if _, ok := seen[key]; ok {
continue
}
seen[key] = struct{}{}
out = append(out, models.OrderProductionMissingVariant{
OrderLineID: lineID,
ItemTypeCode: dims.ItemTypeCode,
ItemCode: newItem,
ColorCode: newColor,
ItemDim1Code: dim1,
ItemDim2Code: newDim2,
ItemDim3Code: dim3,
})
}
log.Printf("[buildTargetVariants] orderHeaderID=%s lineCount=%d dimMapCount=%d targetCount=%d total_ms=%d",
orderHeaderID, len(lines), len(lineDimsMap), len(out), time.Since(start).Milliseconds())
return out, nil
}
func buildMissingVariants(mssql *sql.DB, orderHeaderID string, lines []models.OrderProductionUpdateLine) ([]models.OrderProductionMissingVariant, error) {
targets, err := buildTargetVariants(mssql, orderHeaderID, lines)
if err != nil {
return nil, err
}
return buildMissingVariantsFromTargets(mssql, orderHeaderID, targets)
}
func buildMissingVariantsFromTargets(mssql *sql.DB, orderHeaderID string, targets []models.OrderProductionMissingVariant) ([]models.OrderProductionMissingVariant, error) {
start := time.Now()
missing := make([]models.OrderProductionMissingVariant, 0, len(targets))
existsCache := make(map[string]bool, len(targets))
for _, target := range targets {
cacheKey := fmt.Sprintf("%d|%s|%s|%s|%s|%s", cacheKey := fmt.Sprintf("%d|%s|%s|%s|%s|%s",
dims.ItemTypeCode, target.ItemTypeCode,
strings.ToUpper(strings.TrimSpace(newItem)), target.ItemCode,
strings.ToUpper(strings.TrimSpace(newColor)), target.ColorCode,
strings.ToUpper(strings.TrimSpace(dims.ItemDim1Code)), target.ItemDim1Code,
strings.ToUpper(strings.TrimSpace(newDim2)), target.ItemDim2Code,
strings.ToUpper(strings.TrimSpace(dims.ItemDim3Code)), target.ItemDim3Code,
) )
exists, cached := existsCache[cacheKey] exists, cached := existsCache[cacheKey]
if !cached { if !cached {
var checkErr error var checkErr error
exists, checkErr = queries.VariantExists(mssql, dims.ItemTypeCode, newItem, newColor, dims.ItemDim1Code, newDim2, dims.ItemDim3Code) exists, checkErr = queries.VariantExists(mssql, target.ItemTypeCode, target.ItemCode, target.ColorCode, target.ItemDim1Code, target.ItemDim2Code, target.ItemDim3Code)
if checkErr != nil { if checkErr != nil {
return nil, checkErr return nil, checkErr
} }
existsCache[cacheKey] = exists existsCache[cacheKey] = exists
} }
if !exists { if !exists {
missing = append(missing, models.OrderProductionMissingVariant{ missing = append(missing, target)
OrderLineID: lineID,
ItemTypeCode: dims.ItemTypeCode,
ItemCode: newItem,
ColorCode: newColor,
ItemDim1Code: dims.ItemDim1Code,
ItemDim2Code: newDim2,
ItemDim3Code: dims.ItemDim3Code,
})
} }
} }
log.Printf("[buildMissingVariants] orderHeaderID=%s lineCount=%d dimMapCount=%d missingCount=%d total_ms=%d", log.Printf("[buildMissingVariants] orderHeaderID=%s targetCount=%d missingCount=%d total_ms=%d",
orderHeaderID, len(lines), len(lineDimsMap), len(missing), time.Since(start).Milliseconds()) orderHeaderID, len(targets), len(missing), time.Since(start).Milliseconds())
return missing, nil return missing, nil
} }
func runWithTransientMSSQLRetry(op string, maxAttempts int, baseDelay time.Duration, fn func() error) error {
if maxAttempts <= 1 {
return fn()
}
var lastErr error
for attempt := 1; attempt <= maxAttempts; attempt++ {
err := fn()
if err == nil {
return nil
}
lastErr = err
if !isTransientMSSQLNetworkErr(err) || attempt == maxAttempts {
return err
}
wait := time.Duration(attempt) * baseDelay
log.Printf("[MSSQLRetry] op=%s attempt=%d/%d wait_ms=%d err=%v",
op, attempt, maxAttempts, wait.Milliseconds(), err)
time.Sleep(wait)
}
return lastErr
}
func isTransientMSSQLNetworkErr(err error) bool {
if err == nil {
return false
}
msg := strings.ToLower(strings.TrimSpace(err.Error()))
needles := []string{
"wsarecv",
"read tcp",
"connection reset",
"connection refused",
"broken pipe",
"i/o timeout",
"timeout",
}
for _, needle := range needles {
if strings.Contains(msg, needle) {
return true
}
}
return false
}
func ensureTxAlive(tx *sql.Tx, where string) error {
if tx == nil {
return fmt.Errorf("tx is nil at %s", where)
}
var tranCount int
if err := tx.QueryRow(`SELECT @@TRANCOUNT`).Scan(&tranCount); err != nil {
return fmt.Errorf("tx state query failed at %s: %w", where, err)
}
if tranCount <= 0 {
return fmt.Errorf("transaction no longer active at %s (trancount=%d)", where, tranCount)
}
return nil
}
func validateUpdateLines(lines []models.OrderProductionUpdateLine) error { func validateUpdateLines(lines []models.OrderProductionUpdateLine) error {
for _, line := range lines { for _, line := range lines {
if strings.TrimSpace(line.OrderLineID) == "" { if strings.TrimSpace(line.OrderLineID) == "" {
@@ -436,6 +779,54 @@ func validateUpdateLines(lines []models.OrderProductionUpdateLine) error {
return nil return nil
} }
func splitLinesByCdItemDraft(lines []models.OrderProductionUpdateLine, cdItems []models.OrderProductionCdItemDraft) ([]models.OrderProductionUpdateLine, []models.OrderProductionUpdateLine) {
if len(lines) == 0 {
return nil, nil
}
newCodeSet := make(map[string]struct{}, len(cdItems))
for _, item := range cdItems {
code := strings.ToUpper(strings.TrimSpace(item.ItemCode))
if code == "" {
continue
}
newCodeSet[code] = struct{}{}
}
if len(newCodeSet) == 0 {
existingLines := make([]models.OrderProductionUpdateLine, 0, len(lines))
existingLines = append(existingLines, lines...)
return nil, existingLines
}
newLines := make([]models.OrderProductionUpdateLine, 0, len(lines))
existingLines := make([]models.OrderProductionUpdateLine, 0, len(lines))
for _, line := range lines {
code := strings.ToUpper(strings.TrimSpace(line.NewItemCode))
if _, ok := newCodeSet[code]; ok {
newLines = append(newLines, line)
continue
}
existingLines = append(existingLines, line)
}
return newLines, existingLines
}
func uniqueCodesFromLines(lines []models.OrderProductionUpdateLine) []string {
set := make(map[string]struct{}, len(lines))
out := make([]string, 0, len(lines))
for _, line := range lines {
code := strings.ToUpper(strings.TrimSpace(line.NewItemCode))
if code == "" {
continue
}
if _, ok := set[code]; ok {
continue
}
set[code] = struct{}{}
out = append(out, code)
}
return out
}
func writeDBError(w http.ResponseWriter, status int, step string, orderHeaderID string, username string, lineCount int, err error) { func writeDBError(w http.ResponseWriter, status int, step string, orderHeaderID string, username string, lineCount int, err error) {
var sqlErr mssql.Error var sqlErr mssql.Error
if errors.As(err, &sqlErr) { if errors.As(err, &sqlErr) {
@@ -458,3 +849,69 @@ func writeDBError(w http.ResponseWriter, status int, step string, orderHeaderID
"detail": err.Error(), "detail": err.Error(),
}) })
} }
func sendProductionUpdateMails(db *sql.DB, ml *mailer.GraphMailer, orderHeaderID string, actor string, lines []models.OrderProductionUpdateLine) {
if len(lines) == 0 {
return
}
// Sipariş bağlamını çöz
orderNo, currAccCode, marketCode, marketTitle, err := resolveOrderMailContext(db, orderHeaderID)
if err != nil {
log.Printf("[sendProductionUpdateMails] context error: %v", err)
return
}
// Piyasa alıcılarını yükle (PG db lazım ama burada mssql üzerinden sadece log atalım veya graphmailer üzerinden gönderelim)
// Not: PG bağlantısı Route içinde yok, ancak mailer.go içindeki alıcı listesini payload'dan veya sabit bir adresten alabiliriz.
// Kullanıcı "ürün kodu-renk-renk2 eski termin tarihi yeni termin tarihi" bilgisini mailde istiyor.
subject := fmt.Sprintf("%s tarafından %s Nolu Sipariş Güncellendi (Üretim)", actor, orderNo)
var body strings.Builder
body.WriteString("<html><head><meta charset='utf-8'></head><body>")
body.WriteString(fmt.Sprintf("<p><b>Sipariş No:</b> %s</p>", orderNo))
body.WriteString(fmt.Sprintf("<p><b>Cari:</b> %s</p>", currAccCode))
body.WriteString(fmt.Sprintf("<p><b>Piyasa:</b> %s (%s)</p>", marketTitle, marketCode))
body.WriteString("<p>Aşağıdaki satırlarda termin tarihi güncellenmiştir:</p>")
body.WriteString("<table border='1' cellpadding='5' style='border-collapse: collapse;'>")
body.WriteString("<tr style='background-color: #f2f2f2;'><th>Ürün Kodu</th><th>Renk</th><th>2. Renk</th><th>Eski Termin</th><th>Yeni Termin</th></tr>")
hasTerminChange := false
for _, l := range lines {
if l.OldDueDate != l.NewDueDate && l.NewDueDate != "" {
hasTerminChange = true
body.WriteString("<tr>")
body.WriteString(fmt.Sprintf("<td>%s</td>", l.NewItemCode))
body.WriteString(fmt.Sprintf("<td>%s</td>", l.NewColor))
body.WriteString(fmt.Sprintf("<td>%s</td>", l.NewDim2))
body.WriteString(fmt.Sprintf("<td>%s</td>", l.OldDueDate))
body.WriteString(fmt.Sprintf("<td style='color: red; font-weight: bold;'>%s</td>", l.NewDueDate))
body.WriteString("</tr>")
}
}
body.WriteString("</table>")
body.WriteString("<p><i>Bu mail sistem tarafından otomatik oluşturulmuştur.</i></p>")
body.WriteString("</body></html>")
if !hasTerminChange {
return
}
// Alıcı listesi için OrderMarketMail'deki mantığı taklit edelim veya sabit bir gruba atalım
// Şimdilik sadece loglayalım veya GraphMailer üzerinden test amaçlı bir yere atalım
// Gerçek uygulamada pgDB üzerinden alıcılar çekilmeli.
recipients := []string{"urun@baggi.com.tr"} // Varsayılan alıcı
msg := mailer.Message{
To: recipients,
Subject: subject,
BodyHTML: body.String(),
}
if err := ml.Send(context.Background(), msg); err != nil {
log.Printf("[sendProductionUpdateMails] send error: %v", err)
} else {
log.Printf("[sendProductionUpdateMails] mail sent to %v", recipients)
}
}

View File

@@ -6,6 +6,7 @@ import (
"database/sql" "database/sql"
"encoding/json" "encoding/json"
"errors" "errors"
"fmt"
"net/http" "net/http"
"strings" "strings"
"time" "time"
@@ -22,11 +23,13 @@ type ProductionUpdateLine struct {
ItemDim2Code string `json:"ItemDim2Code"` ItemDim2Code string `json:"ItemDim2Code"`
ItemDim3Code string `json:"ItemDim3Code"` ItemDim3Code string `json:"ItemDim3Code"`
LineDescription string `json:"LineDescription"` LineDescription string `json:"LineDescription"`
NewDueDate string `json:"NewDueDate"`
} }
type ProductionUpdateRequest struct { type ProductionUpdateRequest struct {
Lines []ProductionUpdateLine `json:"lines"` Lines []ProductionUpdateLine `json:"lines"`
InsertMissing bool `json:"insertMissing"` InsertMissing bool `json:"insertMissing"`
NewDueDate string `json:"newDueDate"`
} }
type MissingVariant struct { type MissingVariant struct {
@@ -79,6 +82,16 @@ func OrderProductionUpdateRoute(mssql *sql.DB) http.Handler {
} }
defer tx.Rollback() defer tx.Rollback()
// 0) Header güncelle (Termin)
if req.NewDueDate != "" {
_, err = tx.Exec(`UPDATE dbo.trOrderHeader SET AverageDueDate = @p1, LastUpdatedUserName = @p2, LastUpdatedDate = @p3 WHERE OrderHeaderID = @p4`,
req.NewDueDate, username, time.Now(), id)
if err != nil {
http.Error(w, "Header güncellenemedi: "+err.Error(), http.StatusInternalServerError)
return
}
}
// 1) Eksik varyantları kontrol et // 1) Eksik varyantları kontrol et
missingMap := make(map[string]MissingVariant) missingMap := make(map[string]MissingVariant)
checkStmt, err := tx.Prepare(` checkStmt, err := tx.Prepare(`
@@ -187,12 +200,15 @@ UPDATE dbo.trOrderLine
SET SET
ItemCode = @p1, ItemCode = @p1,
ColorCode = @p2, ColorCode = @p2,
ItemDim2Code = @p3, ItemDim1Code = @p3,
LineDescription = @p4, ItemDim2Code = @p4,
LastUpdatedUserName = @p5, LineDescription = @p5,
LastUpdatedDate = @p6 LastUpdatedUserName = @p6,
WHERE OrderHeaderID = @p7 LastUpdatedDate = @p7,
AND OrderLineID = @p8 OldDueDate = (SELECT TOP 1 AverageDueDate FROM dbo.trOrderHeader WHERE OrderHeaderID = @p8),
NewDueDate = @p9
WHERE OrderHeaderID = @p8
AND OrderLineID = @p10
`) `)
if err != nil { if err != nil {
http.Error(w, "Update hazırlığı başarısız", http.StatusInternalServerError) http.Error(w, "Update hazırlığı başarısız", http.StatusInternalServerError)
@@ -201,20 +217,26 @@ WHERE OrderHeaderID = @p7
defer updStmt.Close() defer updStmt.Close()
now := time.Now() now := time.Now()
var updatedDueDates []string
for _, ln := range req.Lines { for _, ln := range req.Lines {
if _, err := updStmt.Exec( if _, err := updStmt.Exec(
ln.ItemCode, ln.ItemCode,
ln.ColorCode, ln.ColorCode,
ln.ItemDim1Code,
ln.ItemDim2Code, ln.ItemDim2Code,
ln.LineDescription, ln.LineDescription,
username, username,
now, now,
id, id,
ln.NewDueDate,
ln.OrderLineID, ln.OrderLineID,
); err != nil { ); err != nil {
http.Error(w, "Satır güncelleme hatası", http.StatusInternalServerError) http.Error(w, "Satır güncelleme hatası", http.StatusInternalServerError)
return return
} }
if ln.NewDueDate != "" {
updatedDueDates = append(updatedDueDates, fmt.Sprintf("%s kodlu ürünün Termin Tarihi %s olmuştur", ln.ItemCode, ln.NewDueDate))
}
} }
if err := tx.Commit(); err != nil { if err := tx.Commit(); err != nil {
@@ -222,6 +244,17 @@ WHERE OrderHeaderID = @p7
return return
} }
// Email bildirimi (opsiyonel hata kontrolü ile)
if len(updatedDueDates) > 0 {
go func() {
// Bu kısım projenin mail yapısına göre uyarlanmalıdır.
// Örn: internal/mailer veya routes içindeki bir yardımcı fonksiyon.
// Şimdilik basitçe loglayabiliriz veya mevcut SendOrderMarketMail yapısını taklit edebiliriz.
// Kullanıcının istediği format: "Şu kodlu ürünün Termin Tarihi şu olmuştur gibi maile eklenmeliydi"
// Biz burada sadece logluyoruz, mail gönderimi için gerekli servis çağrılmalıdır.
}()
}
_ = json.NewEncoder(w).Encode(map[string]any{ _ = json.NewEncoder(w).Encode(map[string]any{
"status": "ok", "status": "ok",
"updated": len(req.Lines), "updated": len(req.Lines),

View File

@@ -14,6 +14,62 @@ import (
"github.com/gorilla/mux" "github.com/gorilla/mux"
) )
func BulkUpdateOrderLineDueDateHandler(mssql *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
user := utils.UserFromClaims(claims)
if user == nil {
http.Error(w, "Kullanici dogrulanamadi", http.StatusUnauthorized)
return
}
orderHeaderID := mux.Vars(r)["id"]
if orderHeaderID == "" {
http.Error(w, "OrderHeaderID bulunamadi", http.StatusBadRequest)
return
}
var payload struct {
DueDate string `json:"dueDate"`
}
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "Gecersiz JSON", http.StatusBadRequest)
return
}
username := user.Username
if username == "" {
username = user.V3Username
}
updatedLines, headerUpdated, err := queries.BulkUpdateOrderLineDueDate(mssql, orderHeaderID, payload.DueDate, username)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
_ = json.NewEncoder(w).Encode(map[string]any{
"code": "ORDER_BULK_DUE_DATE_UPDATE_FAILED",
"message": "Siparis satir terminleri guncellenemedi.",
"detail": err.Error(),
})
return
}
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"orderHeaderID": orderHeaderID,
"dueDate": payload.DueDate,
"updatedLines": updatedLines,
"headerUpdated": headerUpdated,
})
}
}
// ================================ // ================================
// POST /api/order/update // POST /api/order/update
// ================================ // ================================

View File

@@ -0,0 +1,85 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/db"
"bssapp-backend/models"
"encoding/json"
"log"
"net/http"
)
func GetProductCdItemHandler(w http.ResponseWriter, r *http.Request) {
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
code := r.URL.Query().Get("code")
if code == "" {
http.Error(w, "Eksik parametre: code", http.StatusBadRequest)
return
}
query := `
SELECT
ItemTypeCode,
ItemCode,
ItemDimTypeCode,
ProductTypeCode,
ProductHierarchyID,
UnitOfMeasureCode1,
ItemAccountGrCode,
ItemTaxGrCode,
ItemPaymentPlanGrCode,
ItemDiscountGrCode,
ItemVendorGrCode,
PromotionGroupCode,
ProductCollectionGrCode,
StorePriceLevelCode,
PerceptionOfFashionCode,
CommercialRoleCode,
StoreCapacityLevelCode,
CustomsTariffNumberCode,
CompanyCode
FROM dbo.cdItem WITH(NOLOCK)
WHERE ItemCode = @p1;
`
row := db.MssqlDB.QueryRow(query, code)
var p models.OrderProductionCdItemDraft
err := row.Scan(
&p.ItemTypeCode,
&p.ItemCode,
&p.ItemDimTypeCode,
&p.ProductTypeCode,
&p.ProductHierarchyID,
&p.UnitOfMeasureCode1,
&p.ItemAccountGrCode,
&p.ItemTaxGrCode,
&p.ItemPaymentPlanGrCode,
&p.ItemDiscountGrCode,
&p.ItemVendorGrCode,
&p.PromotionGroupCode,
&p.ProductCollectionGrCode,
&p.StorePriceLevelCode,
&p.PerceptionOfFashionCode,
&p.CommercialRoleCode,
&p.StoreCapacityLevelCode,
&p.CustomsTariffNumberCode,
&p.CompanyCode,
)
if err != nil {
if err.Error() == "sql: no rows in result set" {
http.Error(w, "Ürün bulunamadı", http.StatusNotFound)
return
}
log.Printf("[GetProductCdItem] error code=%s err=%v", code, err)
http.Error(w, "Ürün cdItem bilgisi alınamadı", http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(p)
}

View File

@@ -0,0 +1,125 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/queries"
"context"
"encoding/json"
"errors"
"log"
"net/http"
"strconv"
"strings"
"time"
)
// GET /api/pricing/products
func GetProductPricingListHandler(w http.ResponseWriter, r *http.Request) {
started := time.Now()
traceID := buildPricingTraceID(r)
w.Header().Set("X-Trace-ID", traceID)
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
log.Printf("[ProductPricing] trace=%s unauthorized method=%s path=%s", traceID, r.Method, r.URL.Path)
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
log.Printf("[ProductPricing] trace=%s start user=%s id=%d", traceID, claims.Username, claims.ID)
ctx, cancel := context.WithTimeout(r.Context(), 180*time.Second)
defer cancel()
limit := 500
if raw := strings.TrimSpace(r.URL.Query().Get("limit")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 && parsed <= 10000 {
limit = parsed
}
}
afterProductCode := strings.TrimSpace(r.URL.Query().Get("after_product_code"))
rows, err := queries.GetProductPricingList(ctx, limit+1, afterProductCode)
if err != nil {
if isPricingTimeoutLike(err, ctx.Err()) {
log.Printf(
"[ProductPricing] trace=%s timeout user=%s id=%d duration_ms=%d err=%v",
traceID,
claims.Username,
claims.ID,
time.Since(started).Milliseconds(),
err,
)
http.Error(w, "Urun fiyatlandirma listesi zaman asimina ugradi", http.StatusGatewayTimeout)
return
}
log.Printf(
"[ProductPricing] trace=%s query_error user=%s id=%d duration_ms=%d err=%v",
traceID,
claims.Username,
claims.ID,
time.Since(started).Milliseconds(),
err,
)
http.Error(w, "Urun fiyatlandirma listesi alinamadi: "+err.Error(), http.StatusInternalServerError)
return
}
hasMore := len(rows) > limit
if hasMore {
rows = rows[:limit]
}
nextCursor := ""
if hasMore && len(rows) > 0 {
nextCursor = strings.TrimSpace(rows[len(rows)-1].ProductCode)
}
log.Printf(
"[ProductPricing] trace=%s success user=%s id=%d limit=%d after=%q count=%d has_more=%t next=%q duration_ms=%d",
traceID,
claims.Username,
claims.ID,
limit,
afterProductCode,
len(rows),
hasMore,
nextCursor,
time.Since(started).Milliseconds(),
)
w.Header().Set("Content-Type", "application/json; charset=utf-8")
if hasMore {
w.Header().Set("X-Has-More", "true")
} else {
w.Header().Set("X-Has-More", "false")
}
if nextCursor != "" {
w.Header().Set("X-Next-Cursor", nextCursor)
}
_ = json.NewEncoder(w).Encode(rows)
}
func buildPricingTraceID(r *http.Request) string {
if r != nil {
if id := strings.TrimSpace(r.Header.Get("X-Request-ID")); id != "" {
return id
}
if id := strings.TrimSpace(r.Header.Get("X-Correlation-ID")); id != "" {
return id
}
}
return "pricing-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
func isPricingTimeoutLike(err error, ctxErr error) bool {
if errors.Is(err, context.DeadlineExceeded) || errors.Is(ctxErr, context.DeadlineExceeded) {
return true
}
if err == nil {
return false
}
e := strings.ToLower(err.Error())
return strings.Contains(e, "timeout") ||
strings.Contains(e, "i/o timeout") ||
strings.Contains(e, "wsarecv") ||
strings.Contains(e, "connection attempt failed") ||
strings.Contains(e, "no connection could be made") ||
strings.Contains(e, "failed to respond")
}

View File

@@ -6,11 +6,15 @@ import (
"bssapp-backend/models" "bssapp-backend/models"
"bssapp-backend/queries" "bssapp-backend/queries"
"encoding/json" "encoding/json"
"log"
"net/http" "net/http"
"strconv" "strconv"
"strings"
"time"
) )
func GetProductAttributesHandler(w http.ResponseWriter, r *http.Request) { func GetProductAttributesHandler(w http.ResponseWriter, r *http.Request) {
start := time.Now()
claims, ok := auth.GetClaimsFromContext(r.Context()) claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil { if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized) http.Error(w, "unauthorized", http.StatusUnauthorized)
@@ -26,9 +30,12 @@ func GetProductAttributesHandler(w http.ResponseWriter, r *http.Request) {
} }
itemTypeCode = int16(v) itemTypeCode = int16(v)
} }
log.Printf("[GetProductAttributes] start user=%s itemTypeCode=%d", claims.Username, itemTypeCode)
rows, err := db.MssqlDB.Query(queries.GetProductAttributes, itemTypeCode) rows, err := db.MssqlDB.Query(queries.GetProductAttributes, itemTypeCode)
if err != nil { if err != nil {
log.Printf("[GetProductAttributes] query_error user=%s itemTypeCode=%d err=%v duration_ms=%d",
claims.Username, itemTypeCode, err, time.Since(start).Milliseconds())
http.Error(w, "Product attributes alinamadi: "+err.Error(), http.StatusInternalServerError) http.Error(w, "Product attributes alinamadi: "+err.Error(), http.StatusInternalServerError)
return return
} }
@@ -48,7 +55,74 @@ func GetProductAttributesHandler(w http.ResponseWriter, r *http.Request) {
} }
list = append(list, x) list = append(list, x)
} }
if err := rows.Err(); err != nil {
log.Printf("[GetProductAttributes] rows_error user=%s itemTypeCode=%d err=%v duration_ms=%d",
claims.Username, itemTypeCode, err, time.Since(start).Milliseconds())
http.Error(w, "Product attributes okunamadi: "+err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8") w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(list) _ = json.NewEncoder(w).Encode(list)
log.Printf("[GetProductAttributes] done user=%s itemTypeCode=%d count=%d duration_ms=%d",
claims.Username, itemTypeCode, len(list), time.Since(start).Milliseconds())
}
func GetProductItemAttributesHandler(w http.ResponseWriter, r *http.Request) {
start := time.Now()
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
itemTypeCode := int16(1)
if raw := r.URL.Query().Get("itemTypeCode"); raw != "" {
v, err := strconv.Atoi(raw)
if err != nil || v <= 0 {
http.Error(w, "itemTypeCode gecersiz", http.StatusBadRequest)
return
}
itemTypeCode = int16(v)
}
itemCode := strings.TrimSpace(r.URL.Query().Get("itemCode"))
if itemCode == "" {
http.Error(w, "itemCode zorunlu", http.StatusBadRequest)
return
}
log.Printf("[GetProductItemAttributes] start user=%s itemTypeCode=%d itemCode=%s", claims.Username, itemTypeCode, itemCode)
rows, err := db.MssqlDB.Query(queries.GetProductItemAttributes, itemTypeCode, itemCode)
if err != nil {
log.Printf("[GetProductItemAttributes] query_error user=%s itemTypeCode=%d itemCode=%s err=%v duration_ms=%d",
claims.Username, itemTypeCode, itemCode, err, time.Since(start).Milliseconds())
http.Error(w, "Product item attributes alinamadi: "+err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
list := make([]models.ProductItemAttributeValue, 0, 64)
for rows.Next() {
var x models.ProductItemAttributeValue
if err := rows.Scan(
&x.ItemTypeCode,
&x.AttributeTypeCode,
&x.AttributeCode,
); err != nil {
continue
}
list = append(list, x)
}
if err := rows.Err(); err != nil {
log.Printf("[GetProductItemAttributes] rows_error user=%s itemTypeCode=%d itemCode=%s err=%v duration_ms=%d",
claims.Username, itemTypeCode, itemCode, err, time.Since(start).Milliseconds())
http.Error(w, "Product item attributes okunamadi: "+err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(list)
log.Printf("[GetProductItemAttributes] done user=%s itemTypeCode=%d itemCode=%s count=%d duration_ms=%d",
claims.Username, itemTypeCode, itemCode, len(list), time.Since(start).Milliseconds())
} }

View File

@@ -2,6 +2,7 @@ package routes
import ( import (
"bssapp-backend/auth" "bssapp-backend/auth"
"bssapp-backend/internal/i18n"
"bssapp-backend/models" "bssapp-backend/models"
"bssapp-backend/queries" "bssapp-backend/queries"
"encoding/json" "encoding/json"
@@ -22,7 +23,7 @@ func GetStatementHeadersHandler(w http.ResponseWriter, r *http.Request) {
StartDate: r.URL.Query().Get("startdate"), StartDate: r.URL.Query().Get("startdate"),
EndDate: r.URL.Query().Get("enddate"), EndDate: r.URL.Query().Get("enddate"),
AccountCode: r.URL.Query().Get("accountcode"), AccountCode: r.URL.Query().Get("accountcode"),
LangCode: r.URL.Query().Get("langcode"), LangCode: i18n.ResolveLangCode(r.URL.Query().Get("langcode"), r.Header.Get("Accept-Language")),
Parislemler: r.URL.Query()["parislemler"], Parislemler: r.URL.Query()["parislemler"],
ExcludeOpening: false, ExcludeOpening: false,
} }

View File

@@ -2,6 +2,7 @@ package routes
import ( import (
"bssapp-backend/auth" "bssapp-backend/auth"
"bssapp-backend/internal/i18n"
"bssapp-backend/models" "bssapp-backend/models"
"bssapp-backend/queries" "bssapp-backend/queries"
"bytes" "bytes"
@@ -40,9 +41,18 @@ const (
) )
// Kolonlar // Kolonlar
var hMainCols = []string{ func hMainCols(lang string) []string {
"Belge No", "Tarih", "Vade", "İşlem", return []string{
"Açıklama", "Para", "Borç", "Alacak", "Bakiye", i18n.T(lang, "pdf.main.doc_no"),
i18n.T(lang, "pdf.main.date"),
i18n.T(lang, "pdf.main.due_date"),
i18n.T(lang, "pdf.main.operation"),
i18n.T(lang, "pdf.main.description"),
i18n.T(lang, "pdf.main.currency"),
i18n.T(lang, "pdf.main.debit"),
i18n.T(lang, "pdf.main.credit"),
i18n.T(lang, "pdf.main.balance"),
}
} }
var hMainWbase = []float64{ var hMainWbase = []float64{
@@ -136,7 +146,7 @@ func hCalcRowHeightForText(pdf *gofpdf.Fpdf, text string, colWidth, lineHeight,
/* ============================ HEADER ============================ */ /* ============================ HEADER ============================ */
func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) float64 { func hDrawPageHeader(pdf *gofpdf.Fpdf, lang, cariKod, cariIsim, start, end string) float64 {
if logoPath, err := resolvePdfImagePath("Baggi-Tekstil-A.s-Logolu.jpeg"); err == nil { if logoPath, err := resolvePdfImagePath("Baggi-Tekstil-A.s-Logolu.jpeg"); err == nil {
pdf.ImageOptions(logoPath, hMarginL, 2, hLogoW, 0, false, gofpdf.ImageOptions{}, 0, "") pdf.ImageOptions(logoPath, hMarginL, 2, hLogoW, 0, false, gofpdf.ImageOptions{}, 0, "")
} }
@@ -149,13 +159,13 @@ func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) flo
pdf.SetFont(hFontFamilyBold, "", 12) pdf.SetFont(hFontFamilyBold, "", 12)
pdf.SetXY(hMarginL+hLogoW+8, hMarginT+10) pdf.SetXY(hMarginL+hLogoW+8, hMarginT+10)
pdf.CellFormat(120, 6, "Cari Hesap Raporu", "", 0, "L", false, 0, "") pdf.CellFormat(120, 6, i18n.T(lang, "pdf.report_title"), "", 0, "L", false, 0, "")
// Bugünün tarihi (sağ üst) // Bugünün tarihi (sağ üst)
today := time.Now().Format("02.01.2006") today := time.Now().Format("02.01.2006")
pdf.SetFont(hFontFamilyReg, "", 9) pdf.SetFont(hFontFamilyReg, "", 9)
pdf.SetXY(hPageWidth-hMarginR-40, hMarginT+3) pdf.SetXY(hPageWidth-hMarginR-40, hMarginT+3)
pdf.CellFormat(40, 6, "Tarih: "+today, "", 0, "R", false, 0, "") pdf.CellFormat(40, 6, i18n.T(lang, "pdf.date")+": "+today, "", 0, "R", false, 0, "")
// Cari & Tarih kutuları (daha yukarı taşındı) // Cari & Tarih kutuları (daha yukarı taşındı)
boxY := hMarginT + hLogoW - 6 boxY := hMarginT + hLogoW - 6
@@ -163,11 +173,11 @@ func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) flo
pdf.Rect(hMarginL, boxY, 140, 11, "") pdf.Rect(hMarginL, boxY, 140, 11, "")
pdf.SetXY(hMarginL+2, boxY+3) pdf.SetXY(hMarginL+2, boxY+3)
pdf.CellFormat(136, 5, fmt.Sprintf("Cari: %s — %s", cariKod, cariIsim), "", 0, "L", false, 0, "") pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s — %s", i18n.T(lang, "pdf.customer"), cariKod, cariIsim), "", 0, "L", false, 0, "")
pdf.Rect(hPageWidth-hMarginR-140, boxY, 140, 11, "") pdf.Rect(hPageWidth-hMarginR-140, boxY, 140, 11, "")
pdf.SetXY(hPageWidth-hMarginR-138, boxY+3) pdf.SetXY(hPageWidth-hMarginR-138, boxY+3)
pdf.CellFormat(136, 5, fmt.Sprintf("Tarih Aralığı: %s → %s", start, end), "", 0, "R", false, 0, "") pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s → %s", i18n.T(lang, "pdf.date_range"), start, end), "", 0, "R", false, 0, "")
// Alt çizgi // Alt çizgi
y := boxY + 13 y := boxY + 13
@@ -180,7 +190,7 @@ func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) flo
/* ============================ TABLO ============================ */ /* ============================ TABLO ============================ */
func hDrawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) { func hDrawGroupBar(pdf *gofpdf.Fpdf, lang, currency string, sonBakiye float64) {
x := hMarginL x := hMarginL
y := pdf.GetY() y := pdf.GetY()
w := hPageWidth - hMarginL - hMarginR w := hPageWidth - hMarginL - hMarginR
@@ -194,9 +204,9 @@ func hDrawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
pdf.SetTextColor(hColorPrimary[0], hColorPrimary[1], hColorPrimary[2]) pdf.SetTextColor(hColorPrimary[0], hColorPrimary[1], hColorPrimary[2])
pdf.SetXY(x+hCellPadX+1.0, y+(h-5.0)/2) pdf.SetXY(x+hCellPadX+1.0, y+(h-5.0)/2)
pdf.CellFormat(w*0.6, 5.0, currency, "", 0, "L", false, 0, "") pdf.CellFormat(w*0.6, 5.0, fmt.Sprintf("%s: %s", i18n.T(lang, "pdf.currency_prefix"), currency), "", 0, "L", false, 0, "")
txt := "Son Bakiye = " + hFormatCurrencyTR(sonBakiye) txt := i18n.T(lang, "pdf.ending_balance") + " = " + hFormatCurrencyTR(sonBakiye)
pdf.SetXY(x+w*0.4, y+(h-5.0)/2) pdf.SetXY(x+w*0.4, y+(h-5.0)/2)
pdf.CellFormat(w*0.6-2.0, 5.0, txt, "", 0, "R", false, 0, "") pdf.CellFormat(w*0.6-2.0, 5.0, txt, "", 0, "R", false, 0, "")
@@ -282,6 +292,10 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
accountCode := r.URL.Query().Get("accountcode") accountCode := r.URL.Query().Get("accountcode")
startDate := r.URL.Query().Get("startdate") startDate := r.URL.Query().Get("startdate")
endDate := r.URL.Query().Get("enddate") endDate := r.URL.Query().Get("enddate")
langCode := i18n.ResolveLangCode(
r.URL.Query().Get("langcode"),
r.Header.Get("Accept-Language"),
)
rawParis := r.URL.Query()["parislemler"] rawParis := r.URL.Query()["parislemler"]
var parislemler []string var parislemler []string
@@ -292,7 +306,7 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
} }
} }
headers, _, err := queries.GetStatementsHPDF(r.Context(), accountCode, startDate, endDate, parislemler) headers, _, err := queries.GetStatementsHPDF(r.Context(), accountCode, startDate, endDate, langCode, parislemler)
if err != nil { if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError) http.Error(w, err.Error(), http.StatusInternalServerError)
return return
@@ -348,7 +362,7 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
newPage := func() { newPage := func() {
pageNum++ pageNum++
pdf.AddPage() pdf.AddPage()
tableTop := hDrawPageHeader(pdf, accountCode, cariIsim, startDate, endDate) tableTop := hDrawPageHeader(pdf, langCode, accountCode, cariIsim, startDate, endDate)
pdf.SetY(tableTop) pdf.SetY(tableTop)
} }
@@ -356,8 +370,8 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
for _, cur := range order { for _, cur := range order {
g := groups[cur] g := groups[cur]
hDrawGroupBar(pdf, cur, g.sonBakiye) hDrawGroupBar(pdf, langCode, cur, g.sonBakiye)
hDrawMainHeaderRow(pdf, hMainCols, mainWn) hDrawMainHeaderRow(pdf, hMainCols(langCode), mainWn)
rowIndex := 0 rowIndex := 0
for _, h := range g.rows { for _, h := range g.rows {
@@ -372,8 +386,8 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
rh := hCalcRowHeightForText(pdf, row[4], mainWn[4], hLineHMain, hCellPadX) rh := hCalcRowHeightForText(pdf, row[4], mainWn[4], hLineHMain, hCellPadX)
if hNeedNewPage(pdf, rh+hHeaderRowH) { if hNeedNewPage(pdf, rh+hHeaderRowH) {
newPage() newPage()
hDrawGroupBar(pdf, cur, g.sonBakiye) hDrawGroupBar(pdf, langCode, cur, g.sonBakiye)
hDrawMainHeaderRow(pdf, hMainCols, mainWn) hDrawMainHeaderRow(pdf, hMainCols(langCode), mainWn)
} }
hDrawMainDataRow(pdf, row, mainWn, rh, rowIndex) hDrawMainDataRow(pdf, row, mainWn, rh, rowIndex)

View File

@@ -3,6 +3,7 @@ package routes
import ( import (
"bssapp-backend/auth" "bssapp-backend/auth"
"bssapp-backend/internal/i18n"
"bssapp-backend/models" "bssapp-backend/models"
"bssapp-backend/queries" "bssapp-backend/queries"
"bytes" "bytes"
@@ -48,10 +49,18 @@ const (
logoW = 42.0 logoW = 42.0
) )
// Ana tablo kolonları func mainCols(lang string) []string {
var mainCols = []string{ return []string{
"Belge No", "Tarih", "Vade", "İşlem", i18n.T(lang, "pdf.main.doc_no"),
"Açıklama", "Para", "Borç", "Alacak", "Bakiye", i18n.T(lang, "pdf.main.date"),
i18n.T(lang, "pdf.main.due_date"),
i18n.T(lang, "pdf.main.operation"),
i18n.T(lang, "pdf.main.description"),
i18n.T(lang, "pdf.main.currency"),
i18n.T(lang, "pdf.main.debit"),
i18n.T(lang, "pdf.main.credit"),
i18n.T(lang, "pdf.main.balance"),
}
} }
// Ana tablo kolon genişlikleri (ilk 3 geniş) // Ana tablo kolon genişlikleri (ilk 3 geniş)
@@ -68,10 +77,21 @@ var mainWbase = []float64{
} }
// Detay tablo kolonları ve genişlikleri // Detay tablo kolonları ve genişlikleri
var dCols = []string{ func detailCols(lang string) []string {
"Ana Grup", "Alt Grup", "Garson", "Fit", "İçerik", return []string{
"Ürün", "Renk", "Adet", "Fiyat", "Tutar", i18n.T(lang, "pdf.detail.main_group"),
i18n.T(lang, "pdf.detail.sub_group"),
i18n.T(lang, "pdf.detail.waiter"),
i18n.T(lang, "pdf.detail.fit"),
i18n.T(lang, "pdf.detail.content"),
i18n.T(lang, "pdf.detail.product"),
i18n.T(lang, "pdf.detail.color"),
i18n.T(lang, "pdf.detail.qty"),
i18n.T(lang, "pdf.detail.price"),
i18n.T(lang, "pdf.detail.total"),
} }
}
var dWbase = []float64{ var dWbase = []float64{
30, 28, 22, 20, 56, 30, 22, 20, 20, 26} 30, 28, 22, 20, 56, 30, 22, 20, 20, 26}
@@ -224,7 +244,7 @@ func drawLabeledBox(pdf *gofpdf.Fpdf, x, y, w, h float64, label, value string, a
} }
} }
func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) float64 { func drawPageHeader(pdf *gofpdf.Fpdf, lang, cariKod, cariIsim, start, end string) float64 {
if logoPath, err := resolvePdfImagePath("Baggi-Tekstil-A.s-Logolu.jpeg"); err == nil { if logoPath, err := resolvePdfImagePath("Baggi-Tekstil-A.s-Logolu.jpeg"); err == nil {
pdf.ImageOptions(logoPath, hMarginL, 2, hLogoW, 0, false, gofpdf.ImageOptions{}, 0, "") pdf.ImageOptions(logoPath, hMarginL, 2, hLogoW, 0, false, gofpdf.ImageOptions{}, 0, "")
} }
@@ -237,13 +257,13 @@ func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) floa
pdf.SetFont(hFontFamilyBold, "", 12) pdf.SetFont(hFontFamilyBold, "", 12)
pdf.SetXY(hMarginL+hLogoW+8, hMarginT+10) pdf.SetXY(hMarginL+hLogoW+8, hMarginT+10)
pdf.CellFormat(120, 6, "Cari Hesap Raporu", "", 0, "L", false, 0, "") pdf.CellFormat(120, 6, i18n.T(lang, "pdf.report_title"), "", 0, "L", false, 0, "")
// Bugünün tarihi (sağ üst) // Bugünün tarihi (sağ üst)
today := time.Now().Format("02.01.2006") today := time.Now().Format("02.01.2006")
pdf.SetFont(hFontFamilyReg, "", 9) pdf.SetFont(hFontFamilyReg, "", 9)
pdf.SetXY(hPageWidth-hMarginR-40, hMarginT+3) pdf.SetXY(hPageWidth-hMarginR-40, hMarginT+3)
pdf.CellFormat(40, 6, "Tarih: "+today, "", 0, "R", false, 0, "") pdf.CellFormat(40, 6, i18n.T(lang, "pdf.date")+": "+today, "", 0, "R", false, 0, "")
// Cari & Tarih kutuları (daha yukarı taşındı) // Cari & Tarih kutuları (daha yukarı taşındı)
boxY := hMarginT + hLogoW - 6 boxY := hMarginT + hLogoW - 6
@@ -251,11 +271,11 @@ func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) floa
pdf.Rect(hMarginL, boxY, 140, 11, "") pdf.Rect(hMarginL, boxY, 140, 11, "")
pdf.SetXY(hMarginL+2, boxY+3) pdf.SetXY(hMarginL+2, boxY+3)
pdf.CellFormat(136, 5, fmt.Sprintf("Cari: %s — %s", cariKod, cariIsim), "", 0, "L", false, 0, "") pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s — %s", i18n.T(lang, "pdf.customer"), cariKod, cariIsim), "", 0, "L", false, 0, "")
pdf.Rect(hPageWidth-hMarginR-140, boxY, 140, 11, "") pdf.Rect(hPageWidth-hMarginR-140, boxY, 140, 11, "")
pdf.SetXY(hPageWidth-hMarginR-138, boxY+3) pdf.SetXY(hPageWidth-hMarginR-138, boxY+3)
pdf.CellFormat(136, 5, fmt.Sprintf("Tarih Aralığı: %s → %s", start, end), "", 0, "R", false, 0, "") pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s → %s", i18n.T(lang, "pdf.date_range"), start, end), "", 0, "R", false, 0, "")
// Alt çizgi // Alt çizgi
y := boxY + 13 y := boxY + 13
@@ -268,7 +288,7 @@ func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) floa
/* ============================ GROUP BAR ============================ */ /* ============================ GROUP BAR ============================ */
func drawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) { func drawGroupBar(pdf *gofpdf.Fpdf, lang, currency string, sonBakiye float64) {
// Kutu alanı (tam genişlik) // Kutu alanı (tam genişlik)
x := marginL x := marginL
y := pdf.GetY() y := pdf.GetY()
@@ -285,9 +305,9 @@ func drawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
pdf.SetTextColor(colorPrimary[0], colorPrimary[1], colorPrimary[2]) pdf.SetTextColor(colorPrimary[0], colorPrimary[1], colorPrimary[2])
pdf.SetXY(x+cellPadX+1.0, y+(h-5.0)/2) pdf.SetXY(x+cellPadX+1.0, y+(h-5.0)/2)
pdf.CellFormat(w*0.6, 5.0, fmt.Sprintf("%s", currency), "", 0, "L", false, 0, "") pdf.CellFormat(w*0.6, 5.0, fmt.Sprintf("%s: %s", i18n.T(lang, "pdf.currency_prefix"), currency), "", 0, "L", false, 0, "")
txt := "Son Bakiye = " + formatCurrencyTR(sonBakiye) txt := i18n.T(lang, "pdf.ending_balance") + " = " + formatCurrencyTR(sonBakiye)
pdf.SetXY(x+w*0.4, y+(h-5.0)/2) pdf.SetXY(x+w*0.4, y+(h-5.0)/2)
pdf.CellFormat(w*0.6-2.0, 5.0, txt, "", 0, "R", false, 0, "") pdf.CellFormat(w*0.6-2.0, 5.0, txt, "", 0, "R", false, 0, "")
@@ -430,6 +450,10 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
accountCode := r.URL.Query().Get("accountcode") accountCode := r.URL.Query().Get("accountcode")
startDate := r.URL.Query().Get("startdate") startDate := r.URL.Query().Get("startdate")
endDate := r.URL.Query().Get("enddate") endDate := r.URL.Query().Get("enddate")
langCode := i18n.ResolveLangCode(
r.URL.Query().Get("langcode"),
r.Header.Get("Accept-Language"),
)
// parislemler sanitize // parislemler sanitize
rawParis := r.URL.Query()["parislemler"] rawParis := r.URL.Query()["parislemler"]
@@ -445,7 +469,7 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
accountCode, startDate, endDate, parislemler) accountCode, startDate, endDate, parislemler)
// 1) Header verileri // 1) Header verileri
headers, belgeNos, err := queries.GetStatementsPDF(r.Context(), accountCode, startDate, endDate, parislemler) headers, belgeNos, err := queries.GetStatementsPDF(r.Context(), accountCode, startDate, endDate, langCode, parislemler)
if err != nil { if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError) http.Error(w, err.Error(), http.StatusInternalServerError)
return return
@@ -520,12 +544,12 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
pdf.AddPage() pdf.AddPage()
// drawPageHeader tablo başlangıç yüksekliğini döndürüyor // drawPageHeader tablo başlangıç yüksekliğini döndürüyor
tableTop := drawPageHeader(pdf, accountCode, cariIsim, startDate, endDate) tableTop := drawPageHeader(pdf, langCode, accountCode, cariIsim, startDate, endDate)
// Sayfa numarası // Sayfa numarası
pdf.SetFont(fontFamilyReg, "", 6) pdf.SetFont(fontFamilyReg, "", 6)
pdf.SetXY(pageWidth-marginR-28, pageHeight-marginB+3) pdf.SetXY(pageWidth-marginR-28, pageHeight-marginB+3)
pdf.CellFormat(28, 5, fmt.Sprintf("Sayfa %d", pageNum), "", 0, "R", false, 0, "") pdf.CellFormat(28, 5, fmt.Sprintf("%s %d", i18n.T(langCode, "pdf.page"), pageNum), "", 0, "R", false, 0, "")
// Tablo Y konumunu ayarla // Tablo Y konumunu ayarla
pdf.SetY(tableTop) pdf.SetY(tableTop)
@@ -540,8 +564,8 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
if needNewPage(pdf, groupBarH+headerRowH) { if needNewPage(pdf, groupBarH+headerRowH) {
newPage() newPage()
} }
drawGroupBar(pdf, cur, g.sonBakiye) drawGroupBar(pdf, langCode, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols, mainWn) drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
for _, h := range g.rows { for _, h := range g.rows {
row := []string{ row := []string{
@@ -557,8 +581,8 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
if needNewPage(pdf, rh+headerRowH) { if needNewPage(pdf, rh+headerRowH) {
newPage() newPage()
drawGroupBar(pdf, cur, g.sonBakiye) drawGroupBar(pdf, langCode, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols, mainWn) drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
} }
drawMainDataRow(pdf, row, mainWn, rh) drawMainDataRow(pdf, row, mainWn, rh)
@@ -567,10 +591,10 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
if len(details) > 0 { if len(details) > 0 {
if needNewPage(pdf, subHeaderRowH) { if needNewPage(pdf, subHeaderRowH) {
newPage() newPage()
drawGroupBar(pdf, cur, g.sonBakiye) drawGroupBar(pdf, langCode, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols, mainWn) drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
} }
drawDetailHeaderRow(pdf, dCols, dWn) drawDetailHeaderRow(pdf, detailCols(langCode), dWn)
for i, d := range details { for i, d := range details {
drow := []string{ drow := []string{
@@ -591,9 +615,9 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
if needNewPage(pdf, rh2) { if needNewPage(pdf, rh2) {
newPage() newPage()
drawGroupBar(pdf, cur, g.sonBakiye) drawGroupBar(pdf, langCode, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols, mainWn) drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
drawDetailHeaderRow(pdf, dCols, dWn) drawDetailHeaderRow(pdf, detailCols(langCode), dWn)
} }
// zebra: çift indekslerde açık zemin // zebra: çift indekslerde açık zemin
fill := (i%2 == 0) fill := (i%2 == 0)

View File

@@ -0,0 +1,41 @@
package routes
import (
"database/sql"
"log"
"strings"
)
// EnsureTranslationPerfIndexes creates helpful indexes for translation listing/search.
// It is safe to run on each startup; failures are logged and do not stop the service.
func EnsureTranslationPerfIndexes(db *sql.DB) {
if db == nil {
return
}
statements := []string{
`CREATE EXTENSION IF NOT EXISTS pg_trgm`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_t_key_lang ON mk_translator (t_key, lang_code)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_status_lang_updated ON mk_translator (status, lang_code, updated_at DESC)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_manual_status ON mk_translator (is_manual, status)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_source_type_expr ON mk_translator ((COALESCE(NULLIF(provider_meta->>'source_type',''),'dummy')))`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_source_text_trgm ON mk_translator USING gin (source_text_tr gin_trgm_ops)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_translated_text_trgm ON mk_translator USING gin (translated_text gin_trgm_ops)`,
}
for _, stmt := range statements {
if _, err := db.Exec(stmt); err != nil {
log.Printf("[TranslationPerf] index_setup_warn sql=%q err=%v", summarizeSQL(stmt), err)
continue
}
log.Printf("[TranslationPerf] index_ready sql=%q", summarizeSQL(stmt))
}
}
func summarizeSQL(sqlText string) string {
s := strings.TrimSpace(sqlText)
if len(s) <= 100 {
return s
}
return s[:100] + "..."
}

1688
svc/routes/translations.go Normal file
View File

@@ -0,0 +1,1688 @@
package routes
import (
"bssapp-backend/models"
"bytes"
"context"
"database/sql"
"encoding/json"
"errors"
"fmt"
"io"
"io/fs"
"log"
"net/http"
"net/url"
"os"
"path/filepath"
"regexp"
"sort"
"strconv"
"strings"
"time"
"github.com/gorilla/mux"
"github.com/lib/pq"
)
var translationLangSet = map[string]struct{}{
"tr": {},
"en": {},
"de": {},
"it": {},
"es": {},
"ru": {},
"ar": {},
}
var translationStatusSet = map[string]struct{}{
"pending": {},
"approved": {},
"rejected": {},
}
var translationSourceTypeSet = map[string]struct{}{
"dummy": {},
"postgre": {},
"mssql": {},
}
var (
reQuotedText = regexp.MustCompile(`['"]([^'"]{3,120})['"]`)
reHasLetter = regexp.MustCompile(`[A-Za-zÇĞİÖŞÜçğıöşü]`)
reBadText = regexp.MustCompile(`^(GET|POST|PUT|DELETE|OPTIONS|true|false|null|undefined)$`)
reKeyUnsafe = regexp.MustCompile(`[^a-z0-9_]+`)
)
type TranslationUpdatePayload struct {
SourceTextTR *string `json:"source_text_tr"`
TranslatedText *string `json:"translated_text"`
SourceType *string `json:"source_type"`
IsManual *bool `json:"is_manual"`
Status *string `json:"status"`
}
type UpsertMissingPayload struct {
Items []UpsertMissingItem `json:"items"`
Languages []string `json:"languages"`
}
type UpsertMissingItem struct {
TKey string `json:"t_key"`
SourceTextTR string `json:"source_text_tr"`
}
type SyncSourcesPayload struct {
AutoTranslate bool `json:"auto_translate"`
Languages []string `json:"languages"`
Limit int `json:"limit"`
OnlyNew *bool `json:"only_new"`
}
type BulkApprovePayload struct {
IDs []int64 `json:"ids"`
}
type BulkUpdatePayload struct {
Items []BulkUpdateItem `json:"items"`
}
type TranslateSelectedPayload struct {
TKeys []string `json:"t_keys"`
Languages []string `json:"languages"`
Limit int `json:"limit"`
}
type BulkUpdateItem struct {
ID int64 `json:"id"`
SourceTextTR *string `json:"source_text_tr"`
TranslatedText *string `json:"translated_text"`
SourceType *string `json:"source_type"`
IsManual *bool `json:"is_manual"`
Status *string `json:"status"`
}
type TranslationSyncOptions struct {
AutoTranslate bool
Languages []string
Limit int
OnlyNew bool
TraceID string
}
type TranslationSyncResult struct {
SeedCount int `json:"seed_count"`
AffectedCount int `json:"affected_count"`
AutoTranslated int `json:"auto_translated"`
TargetLangs []string `json:"target_languages"`
TraceID string `json:"trace_id"`
DurationMS int64 `json:"duration_ms"`
}
type sourceSeed struct {
TKey string
SourceText string
SourceType string
}
func GetTranslationRowsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
q := strings.TrimSpace(r.URL.Query().Get("q"))
lang := normalizeTranslationLang(r.URL.Query().Get("lang"))
status := normalizeTranslationStatus(r.URL.Query().Get("status"))
sourceType := normalizeTranslationSourceType(r.URL.Query().Get("source_type"))
manualFilter := strings.TrimSpace(strings.ToLower(r.URL.Query().Get("manual")))
missingOnly := strings.TrimSpace(strings.ToLower(r.URL.Query().Get("missing"))) == "true"
limit := 0
if raw := strings.TrimSpace(r.URL.Query().Get("limit")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 && parsed <= 50000 {
limit = parsed
}
}
offset := 0
if raw := strings.TrimSpace(r.URL.Query().Get("offset")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed >= 0 && parsed <= 1000000 {
offset = parsed
}
}
clauses := []string{"1=1"}
args := make([]any, 0, 8)
argIndex := 1
if q != "" {
clauses = append(clauses, fmt.Sprintf("(source_text_tr ILIKE $%d OR translated_text ILIKE $%d)", argIndex, argIndex))
args = append(args, "%"+q+"%")
argIndex++
}
if lang != "" {
clauses = append(clauses, fmt.Sprintf("lang_code = $%d", argIndex))
args = append(args, lang)
argIndex++
}
if status != "" {
clauses = append(clauses, fmt.Sprintf("status = $%d", argIndex))
args = append(args, status)
argIndex++
}
if sourceType != "" {
clauses = append(clauses, fmt.Sprintf("COALESCE(NULLIF(provider_meta->>'source_type',''),'dummy') = $%d", argIndex))
args = append(args, sourceType)
argIndex++
}
switch manualFilter {
case "true":
clauses = append(clauses, "is_manual = true")
case "false":
clauses = append(clauses, "is_manual = false")
}
if missingOnly {
clauses = append(clauses, "(translated_text IS NULL OR btrim(translated_text) = '')")
}
query := fmt.Sprintf(`
SELECT
id,
t_key,
lang_code,
COALESCE(NULLIF(provider_meta->>'source_type',''), 'dummy') AS source_type,
source_text_tr,
COALESCE(translated_text, '') AS translated_text,
is_manual,
status,
COALESCE(provider, '') AS provider,
updated_at
FROM mk_translator
WHERE %s
ORDER BY t_key, lang_code
`, strings.Join(clauses, " AND "))
if limit > 0 {
query += fmt.Sprintf("LIMIT $%d", argIndex)
args = append(args, limit)
argIndex++
}
if offset > 0 {
query += fmt.Sprintf(" OFFSET $%d", argIndex)
args = append(args, offset)
}
rows, err := db.Query(query, args...)
if err != nil {
http.Error(w, "translation query error", http.StatusInternalServerError)
return
}
defer rows.Close()
list := make([]models.TranslatorRow, 0, 1024)
for rows.Next() {
var row models.TranslatorRow
if err := rows.Scan(
&row.ID,
&row.TKey,
&row.LangCode,
&row.SourceType,
&row.SourceTextTR,
&row.TranslatedText,
&row.IsManual,
&row.Status,
&row.Provider,
&row.UpdatedAt,
); err != nil {
http.Error(w, "translation scan error", http.StatusInternalServerError)
return
}
list = append(list, row)
}
if err := rows.Err(); err != nil {
http.Error(w, "translation rows error", http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(map[string]any{
"rows": list,
"count": len(list),
})
}
}
func UpdateTranslationRowHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
id, err := strconv.ParseInt(strings.TrimSpace(mux.Vars(r)["id"]), 10, 64)
if err != nil || id <= 0 {
http.Error(w, "invalid row id", http.StatusBadRequest)
return
}
var payload TranslationUpdatePayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
if payload.Status != nil {
normalized := normalizeTranslationStatus(*payload.Status)
if normalized == "" {
http.Error(w, "invalid status", http.StatusBadRequest)
return
}
payload.Status = &normalized
}
if payload.SourceType != nil {
normalized := normalizeTranslationSourceType(*payload.SourceType)
if normalized == "" {
http.Error(w, "invalid source_type", http.StatusBadRequest)
return
}
payload.SourceType = &normalized
}
updateQuery := `
UPDATE mk_translator
SET
source_text_tr = COALESCE($2, source_text_tr),
translated_text = COALESCE($3, translated_text),
is_manual = COALESCE($4, is_manual),
status = COALESCE($5, status),
provider_meta = CASE
WHEN $6::text IS NULL THEN provider_meta
ELSE jsonb_set(COALESCE(provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($6::text), true)
END,
updated_at = NOW()
WHERE id = $1
RETURNING
id,
t_key,
lang_code,
COALESCE(NULLIF(provider_meta->>'source_type',''), 'dummy') AS source_type,
source_text_tr,
COALESCE(translated_text, '') AS translated_text,
is_manual,
status,
COALESCE(provider, '') AS provider,
updated_at
`
var row models.TranslatorRow
err = db.QueryRow(
updateQuery,
id,
nullableString(payload.SourceTextTR),
nullableString(payload.TranslatedText),
payload.IsManual,
payload.Status,
nullableString(payload.SourceType),
).Scan(
&row.ID,
&row.TKey,
&row.LangCode,
&row.SourceType,
&row.SourceTextTR,
&row.TranslatedText,
&row.IsManual,
&row.Status,
&row.Provider,
&row.UpdatedAt,
)
if err == sql.ErrNoRows {
http.Error(w, "translation row not found", http.StatusNotFound)
return
}
if err != nil {
http.Error(w, "translation update error", http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(row)
}
}
func UpsertMissingTranslationsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload UpsertMissingPayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
items := normalizeMissingItems(payload.Items)
if len(items) == 0 {
http.Error(w, "items required", http.StatusBadRequest)
return
}
languages := normalizeTargetLanguages(payload.Languages)
affected, err := upsertMissingRows(db, items, languages, "dummy")
if err != nil {
http.Error(w, "upsert missing error", http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"items": len(items),
"target_langs": languages,
"affected_count": affected,
})
}
}
func SyncTranslationSourcesHandler(pgDB *sql.DB, mssqlDB *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload SyncSourcesPayload
_ = json.NewDecoder(r.Body).Decode(&payload)
traceID := requestTraceID(r)
w.Header().Set("X-Trace-ID", traceID)
start := time.Now()
onlyNew := payload.OnlyNew == nil || *payload.OnlyNew
log.Printf(
"[TranslationSync] trace=%s stage=request auto_translate=%t only_new=%t limit=%d langs=%v",
traceID,
payload.AutoTranslate,
onlyNew,
payload.Limit,
payload.Languages,
)
result, err := PerformTranslationSync(pgDB, mssqlDB, TranslationSyncOptions{
AutoTranslate: payload.AutoTranslate,
Languages: payload.Languages,
Limit: payload.Limit,
OnlyNew: onlyNew,
TraceID: traceID,
})
if err != nil {
log.Printf(
"[TranslationSync] trace=%s stage=error duration_ms=%d err=%v",
traceID,
time.Since(start).Milliseconds(),
err,
)
http.Error(w, "translation source sync error", http.StatusInternalServerError)
return
}
log.Printf(
"[TranslationSync] trace=%s stage=response duration_ms=%d seeds=%d affected=%d auto_translated=%d",
traceID,
time.Since(start).Milliseconds(),
result.SeedCount,
result.AffectedCount,
result.AutoTranslated,
)
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"trace_id": traceID,
"result": result,
"seed_count": result.SeedCount,
"affected_count": result.AffectedCount,
"auto_translated": result.AutoTranslated,
"target_languages": result.TargetLangs,
})
}
}
func TranslateSelectedTranslationsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload TranslateSelectedPayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
keys := normalizeStringList(payload.TKeys, 5000)
if len(keys) == 0 {
http.Error(w, "t_keys required", http.StatusBadRequest)
return
}
targetLangs := normalizeTargetLanguages(payload.Languages)
limit := payload.Limit
if limit <= 0 {
limit = len(keys) * len(targetLangs)
}
if limit <= 0 {
limit = 1000
}
if limit > 50000 {
limit = 50000
}
traceID := requestTraceID(r)
w.Header().Set("X-Trace-ID", traceID)
start := time.Now()
log.Printf(
"[TranslationSelected] trace=%s stage=request keys=%d limit=%d langs=%v",
traceID,
len(keys),
limit,
targetLangs,
)
translatedCount, err := autoTranslatePendingRowsForKeys(db, targetLangs, limit, keys, traceID)
if err != nil {
log.Printf(
"[TranslationSelected] trace=%s stage=error duration_ms=%d err=%v",
traceID,
time.Since(start).Milliseconds(),
err,
)
http.Error(w, "translate selected error", http.StatusInternalServerError)
return
}
log.Printf(
"[TranslationSelected] trace=%s stage=done duration_ms=%d translated=%d",
traceID,
time.Since(start).Milliseconds(),
translatedCount,
)
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"trace_id": traceID,
"translated_count": translatedCount,
"key_count": len(keys),
"target_languages": targetLangs,
"duration_ms": time.Since(start).Milliseconds(),
})
}
}
func BulkApproveTranslationsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload BulkApprovePayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
ids := normalizeIDListInt64(payload.IDs)
if len(ids) == 0 {
http.Error(w, "ids required", http.StatusBadRequest)
return
}
res, err := db.Exec(`
UPDATE mk_translator
SET
status = 'approved',
is_manual = true,
updated_at = NOW(),
provider_meta = jsonb_set(COALESCE(provider_meta, '{}'::jsonb), '{is_new}', 'false'::jsonb, true)
WHERE id = ANY($1)
`, pq.Array(ids))
if err != nil {
http.Error(w, "bulk approve error", http.StatusInternalServerError)
return
}
affected, _ := res.RowsAffected()
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"affected_count": affected,
})
}
}
func BulkUpdateTranslationsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload BulkUpdatePayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
if len(payload.Items) == 0 {
http.Error(w, "items required", http.StatusBadRequest)
return
}
tx, err := db.Begin()
if err != nil {
http.Error(w, "transaction start error", http.StatusInternalServerError)
return
}
defer tx.Rollback()
affected := 0
for _, it := range payload.Items {
if it.ID <= 0 {
continue
}
status := normalizeOptionalStatus(it.Status)
sourceType := normalizeOptionalSourceType(it.SourceType)
res, err := tx.Exec(`
UPDATE mk_translator
SET
source_text_tr = COALESCE($2, source_text_tr),
translated_text = COALESCE($3, translated_text),
is_manual = COALESCE($4, is_manual),
status = COALESCE($5, status),
provider_meta = CASE
WHEN $6::text IS NULL THEN provider_meta
ELSE jsonb_set(COALESCE(provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($6::text), true)
END,
updated_at = NOW()
WHERE id = $1
`, it.ID, nullableString(it.SourceTextTR), nullableString(it.TranslatedText), it.IsManual, status, sourceType)
if err != nil {
http.Error(w, "bulk update error", http.StatusInternalServerError)
return
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
}
if err := tx.Commit(); err != nil {
http.Error(w, "transaction commit error", http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"affected_count": affected,
})
}
}
func PerformTranslationSync(pgDB *sql.DB, mssqlDB *sql.DB, options TranslationSyncOptions) (TranslationSyncResult, error) {
traceID := strings.TrimSpace(options.TraceID)
if traceID == "" {
traceID = "trsync-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
start := time.Now()
limit := options.Limit
if limit <= 0 || limit > 100000 {
limit = 20000
}
targetLangs := normalizeTargetLanguages(options.Languages)
log.Printf(
"[TranslationSync] trace=%s stage=start auto_translate=%t only_new=%t limit=%d langs=%v",
traceID,
options.AutoTranslate,
options.OnlyNew,
limit,
targetLangs,
)
collectStart := time.Now()
seeds := collectSourceSeeds(pgDB, mssqlDB, limit)
seeds, reusedByText := reuseExistingSeedKeys(pgDB, seeds)
log.Printf(
"[TranslationSync] trace=%s stage=collect done_ms=%d total=%d reused_by_text=%d sources=%s",
traceID,
time.Since(collectStart).Milliseconds(),
len(seeds),
reusedByText,
formatSourceCounts(countSeedsBySource(seeds)),
)
if options.OnlyNew {
before := len(seeds)
filterStart := time.Now()
seeds = filterNewSeeds(pgDB, seeds)
log.Printf(
"[TranslationSync] trace=%s stage=filter_only_new done_ms=%d before=%d after=%d skipped=%d",
traceID,
time.Since(filterStart).Milliseconds(),
before,
len(seeds),
before-len(seeds),
)
}
if len(seeds) == 0 {
return TranslationSyncResult{
TargetLangs: targetLangs,
TraceID: traceID,
DurationMS: time.Since(start).Milliseconds(),
}, nil
}
upsertStart := time.Now()
affected, err := upsertSourceSeeds(pgDB, seeds, targetLangs)
if err != nil {
return TranslationSyncResult{}, err
}
log.Printf(
"[TranslationSync] trace=%s stage=upsert done_ms=%d affected=%d",
traceID,
time.Since(upsertStart).Milliseconds(),
affected,
)
autoTranslated := 0
if options.AutoTranslate {
autoStart := time.Now()
var autoErr error
autoTranslated, autoErr = autoTranslatePendingRowsForKeys(pgDB, targetLangs, limit, uniqueSeedKeys(seeds), traceID)
if autoErr != nil {
log.Printf(
"[TranslationSync] trace=%s stage=auto_translate done_ms=%d translated=%d err=%v",
traceID,
time.Since(autoStart).Milliseconds(),
autoTranslated,
autoErr,
)
} else {
log.Printf(
"[TranslationSync] trace=%s stage=auto_translate done_ms=%d translated=%d",
traceID,
time.Since(autoStart).Milliseconds(),
autoTranslated,
)
}
}
result := TranslationSyncResult{
SeedCount: len(seeds),
AffectedCount: affected,
AutoTranslated: autoTranslated,
TargetLangs: targetLangs,
TraceID: traceID,
DurationMS: time.Since(start).Milliseconds(),
}
log.Printf(
"[TranslationSync] trace=%s stage=done duration_ms=%d seeds=%d affected=%d auto_translated=%d",
traceID,
result.DurationMS,
result.SeedCount,
result.AffectedCount,
result.AutoTranslated,
)
return result, nil
}
func upsertMissingRows(db *sql.DB, items []UpsertMissingItem, languages []string, forcedSourceType string) (int, error) {
tx, err := db.Begin()
if err != nil {
return 0, err
}
defer tx.Rollback()
affected := 0
for _, it := range items {
sourceType := forcedSourceType
if sourceType == "" {
sourceType = "dummy"
}
res, err := tx.Exec(`
INSERT INTO mk_translator
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
VALUES
($1, 'tr', $2, $2, false, 'approved', 'seed', jsonb_build_object('source_type', $3::text))
ON CONFLICT (t_key, lang_code) DO UPDATE
SET
source_text_tr = EXCLUDED.source_text_tr,
provider_meta = jsonb_set(COALESCE(mk_translator.provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($3::text), true),
updated_at = NOW()
`, it.TKey, it.SourceTextTR, sourceType)
if err != nil {
return 0, err
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
for _, lang := range languages {
res, err := tx.Exec(`
INSERT INTO mk_translator
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
VALUES
($1, $2, $3, NULL, false, 'pending', NULL, jsonb_build_object('source_type', $4::text))
ON CONFLICT (t_key, lang_code) DO UPDATE
SET
source_text_tr = EXCLUDED.source_text_tr,
provider_meta = jsonb_set(COALESCE(mk_translator.provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($4::text), true),
updated_at = NOW()
`, it.TKey, lang, it.SourceTextTR, sourceType)
if err != nil {
return 0, err
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
}
}
if err := tx.Commit(); err != nil {
return 0, err
}
return affected, nil
}
func upsertSourceSeeds(db *sql.DB, seeds []sourceSeed, languages []string) (int, error) {
tx, err := db.Begin()
if err != nil {
return 0, err
}
defer tx.Rollback()
affected := 0
for _, seed := range seeds {
if seed.TKey == "" || seed.SourceText == "" {
continue
}
sourceType := normalizeTranslationSourceType(seed.SourceType)
if sourceType == "" {
sourceType = "dummy"
}
res, err := tx.Exec(`
INSERT INTO mk_translator
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
VALUES
($1, 'tr', $2, $2, false, 'approved', 'seed', jsonb_build_object('source_type', $3::text, 'is_new', false))
ON CONFLICT (t_key, lang_code) DO UPDATE
SET
source_text_tr = EXCLUDED.source_text_tr,
provider_meta = jsonb_set(
COALESCE(mk_translator.provider_meta, '{}'::jsonb),
'{source_type}',
to_jsonb(COALESCE(NULLIF(mk_translator.provider_meta->>'source_type', ''), $3::text)),
true
),
updated_at = NOW()
`, seed.TKey, seed.SourceText, sourceType)
if err != nil {
return 0, err
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
for _, lang := range languages {
res, err := tx.Exec(`
INSERT INTO mk_translator
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
VALUES
($1, $2, $3, NULL, false, 'pending', NULL, jsonb_build_object('source_type', $4::text, 'is_new', true))
ON CONFLICT (t_key, lang_code) DO UPDATE
SET
source_text_tr = EXCLUDED.source_text_tr,
provider_meta = jsonb_set(
COALESCE(mk_translator.provider_meta, '{}'::jsonb),
'{source_type}',
to_jsonb(COALESCE(NULLIF(mk_translator.provider_meta->>'source_type', ''), $4::text)),
true
),
updated_at = NOW()
`, seed.TKey, lang, seed.SourceText, sourceType)
if err != nil {
return 0, err
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
}
}
if err := tx.Commit(); err != nil {
return 0, err
}
return affected, nil
}
func collectSourceSeeds(pgDB *sql.DB, mssqlDB *sql.DB, limit int) []sourceSeed {
seen := map[string]struct{}{}
out := make([]sourceSeed, 0, limit)
appendSeed := func(seed sourceSeed) {
if seed.TKey == "" || seed.SourceText == "" || seed.SourceType == "" {
return
}
key := normalizeSeedTextKey(seed.SourceText)
if _, ok := seen[key]; ok {
return
}
seen[key] = struct{}{}
out = append(out, seed)
}
for _, row := range collectPostgreSeeds(pgDB, limit) {
appendSeed(row)
if len(out) >= limit {
return out
}
}
for _, row := range collectMSSQLSeeds(mssqlDB, limit-len(out)) {
appendSeed(row)
if len(out) >= limit {
return out
}
}
for _, row := range collectDummySeeds(limit - len(out)) {
appendSeed(row)
if len(out) >= limit {
return out
}
}
return out
}
func collectPostgreSeeds(pgDB *sql.DB, limit int) []sourceSeed {
if pgDB == nil || limit <= 0 {
return nil
}
rows, err := pgDB.Query(`
SELECT table_name, column_name
FROM information_schema.columns
WHERE table_schema = 'public'
ORDER BY table_name, ordinal_position
LIMIT $1
`, limit)
if err != nil {
return nil
}
defer rows.Close()
out := make([]sourceSeed, 0, limit)
for rows.Next() && len(out) < limit {
var tableName, columnName string
if err := rows.Scan(&tableName, &columnName); err != nil {
continue
}
text := normalizeDisplayText(columnName)
key := makeTextBasedSeedKey(text)
out = append(out, sourceSeed{
TKey: key,
SourceText: text,
SourceType: "postgre",
})
}
return out
}
func collectMSSQLSeeds(mssqlDB *sql.DB, limit int) []sourceSeed {
if mssqlDB == nil || limit <= 0 {
return nil
}
maxPerRun := parsePositiveIntEnv("TRANSLATION_MSSQL_SEED_LIMIT", 2500)
if limit > maxPerRun {
limit = maxPerRun
}
timeoutSec := parsePositiveIntEnv("TRANSLATION_MSSQL_SCHEMA_TIMEOUT_SEC", 20)
query := fmt.Sprintf(`
SELECT TOP (%d) TABLE_NAME, COLUMN_NAME
FROM INFORMATION_SCHEMA.COLUMNS
ORDER BY TABLE_NAME, ORDINAL_POSITION
`, limit)
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(timeoutSec)*time.Second)
defer cancel()
rows, err := mssqlDB.QueryContext(ctx, query)
if err != nil {
log.Printf("[TranslationSync] stage=collect_mssql skipped err=%v", err)
return nil
}
defer rows.Close()
out := make([]sourceSeed, 0, limit)
for rows.Next() && len(out) < limit {
var tableName, columnName string
if err := rows.Scan(&tableName, &columnName); err != nil {
continue
}
text := normalizeDisplayText(columnName)
key := makeTextBasedSeedKey(text)
out = append(out, sourceSeed{
TKey: key,
SourceText: text,
SourceType: "mssql",
})
}
return out
}
func collectDummySeeds(limit int) []sourceSeed {
if limit <= 0 {
return nil
}
root := detectProjectRoot()
if root == "" {
return nil
}
uiRoot := filepath.Join(root, "ui", "src")
if _, err := os.Stat(uiRoot); err != nil {
return nil
}
out := make([]sourceSeed, 0, limit)
seen := make(map[string]struct{}, limit)
_ = filepath.WalkDir(uiRoot, func(path string, d fs.DirEntry, err error) error {
if err != nil || d.IsDir() {
return nil
}
ext := strings.ToLower(filepath.Ext(path))
if ext != ".vue" && ext != ".js" && ext != ".ts" {
return nil
}
b, err := os.ReadFile(path)
if err != nil {
return nil
}
matches := reQuotedText.FindAllStringSubmatch(string(b), -1)
for _, m := range matches {
text := strings.TrimSpace(m[1])
if !isCandidateText(text) {
continue
}
if _, ok := seen[text]; ok {
continue
}
seen[text] = struct{}{}
key := makeTextBasedSeedKey(text)
out = append(out, sourceSeed{
TKey: key,
SourceText: text,
SourceType: "dummy",
})
if len(out) >= limit {
return errors.New("limit reached")
}
}
return nil
})
return out
}
func autoTranslatePendingRows(db *sql.DB, langs []string, limit int) (int, error) {
return autoTranslatePendingRowsForKeys(db, langs, limit, nil, "")
}
func autoTranslatePendingRowsForKeys(db *sql.DB, langs []string, limit int, keys []string, traceID string) (int, error) {
traceID = strings.TrimSpace(traceID)
if traceID == "" {
traceID = "trauto-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
if len(keys) == 0 {
log.Printf("[TranslationAuto] trace=%s stage=skip reason=no_keys", traceID)
return 0, nil
}
start := time.Now()
rows, err := db.Query(`
SELECT id, lang_code, source_text_tr
FROM mk_translator
WHERE lang_code = ANY($1)
AND t_key = ANY($3)
AND (translated_text IS NULL OR btrim(translated_text) = '')
AND is_manual = false
ORDER BY updated_at ASC
LIMIT $2
`, pqArray(langs), limit, pq.Array(keys))
if err != nil {
return 0, err
}
defer rows.Close()
type pending struct {
ID int64
Lang string
Text string
}
list := make([]pending, 0, limit)
pendingByLang := map[string]int{}
sourceChars := 0
for rows.Next() {
var p pending
if err := rows.Scan(&p.ID, &p.Lang, &p.Text); err != nil {
continue
}
if strings.TrimSpace(p.Text) == "" {
continue
}
p.Lang = normalizeTranslationLang(p.Lang)
if p.Lang == "" {
continue
}
list = append(list, p)
pendingByLang[p.Lang]++
sourceChars += len([]rune(strings.TrimSpace(p.Text)))
}
if err := rows.Err(); err != nil {
return 0, err
}
log.Printf(
"[TranslationAuto] trace=%s stage=prepare candidates=%d limit=%d keys=%d langs=%v source_chars=%d pending_by_lang=%s",
traceID,
len(list),
limit,
len(keys),
langs,
sourceChars,
formatLangCounts(pendingByLang),
)
if len(list) == 0 {
log.Printf(
"[TranslationAuto] trace=%s stage=done duration_ms=%d translated=0 failed_translate=0 failed_update=0 rps=0.00",
traceID,
time.Since(start).Milliseconds(),
)
return 0, nil
}
done := 0
failedTranslate := 0
failedUpdate := 0
doneByLang := map[string]int{}
progressEvery := parsePositiveIntEnv("TRANSLATION_AUTO_PROGRESS_EVERY", 100)
if progressEvery <= 0 {
progressEvery = 100
}
progressSec := parsePositiveIntEnv("TRANSLATION_AUTO_PROGRESS_SEC", 15)
if progressSec <= 0 {
progressSec = 15
}
progressTicker := time.Duration(progressSec) * time.Second
lastProgress := time.Now()
for i, p := range list {
tr, err := callAzureTranslate(p.Text, p.Lang)
if err != nil || strings.TrimSpace(tr) == "" {
failedTranslate++
continue
}
_, err = db.Exec(`
UPDATE mk_translator
SET translated_text = $2,
status = 'pending',
is_manual = false,
provider = 'azure_translator',
updated_at = NOW()
WHERE id = $1
`, p.ID, strings.TrimSpace(tr))
if err != nil {
failedUpdate++
continue
}
done++
doneByLang[p.Lang]++
processed := i + 1
shouldLogProgress := processed%progressEvery == 0 || time.Since(lastProgress) >= progressTicker || processed == len(list)
if shouldLogProgress {
elapsed := time.Since(start)
rps := float64(done)
if elapsed > 0 {
rps = float64(done) / elapsed.Seconds()
}
log.Printf(
"[TranslationAuto] trace=%s stage=progress processed=%d/%d translated=%d failed_translate=%d failed_update=%d elapsed_ms=%d rps=%.2f done_by_lang=%s",
traceID,
processed,
len(list),
done,
failedTranslate,
failedUpdate,
elapsed.Milliseconds(),
rps,
formatLangCounts(doneByLang),
)
lastProgress = time.Now()
}
}
elapsed := time.Since(start)
rps := float64(done)
if elapsed > 0 {
rps = float64(done) / elapsed.Seconds()
}
log.Printf(
"[TranslationAuto] trace=%s stage=done duration_ms=%d candidates=%d translated=%d failed_translate=%d failed_update=%d rps=%.2f done_by_lang=%s",
traceID,
elapsed.Milliseconds(),
len(list),
done,
failedTranslate,
failedUpdate,
rps,
formatLangCounts(doneByLang),
)
return done, nil
}
func formatLangCounts(counts map[string]int) string {
if len(counts) == 0 {
return "-"
}
keys := make([]string, 0, len(counts))
for k := range counts {
keys = append(keys, k)
}
sort.Strings(keys)
parts := make([]string, 0, len(keys))
for _, k := range keys {
parts = append(parts, fmt.Sprintf("%s=%d", k, counts[k]))
}
return strings.Join(parts, ",")
}
func filterNewSeeds(pgDB *sql.DB, seeds []sourceSeed) []sourceSeed {
if pgDB == nil || len(seeds) == 0 {
return seeds
}
keys := uniqueSeedKeys(seeds)
if len(keys) == 0 {
return nil
}
textKeys := uniqueSeedTextKeys(seeds)
rows, err := pgDB.Query(`
SELECT DISTINCT t_key, lower(btrim(source_text_tr)) AS text_key
FROM mk_translator
WHERE t_key = ANY($1)
OR lower(btrim(source_text_tr)) = ANY($2)
`, pq.Array(keys), pq.Array(textKeys))
if err != nil {
return seeds
}
defer rows.Close()
existing := make(map[string]struct{}, len(keys))
existingText := make(map[string]struct{}, len(textKeys))
for rows.Next() {
var key string
var textKey sql.NullString
if err := rows.Scan(&key, &textKey); err == nil {
if strings.TrimSpace(key) != "" {
existing[key] = struct{}{}
}
if textKey.Valid {
t := strings.TrimSpace(textKey.String)
if t != "" {
existingText[t] = struct{}{}
}
}
}
}
out := make([]sourceSeed, 0, len(seeds))
for _, seed := range seeds {
if _, ok := existing[seed.TKey]; ok {
continue
}
if _, ok := existingText[normalizeSeedTextKey(seed.SourceText)]; ok {
continue
}
out = append(out, seed)
}
return out
}
func uniqueSeedKeys(seeds []sourceSeed) []string {
seen := make(map[string]struct{}, len(seeds))
out := make([]string, 0, len(seeds))
for _, seed := range seeds {
if seed.TKey == "" {
continue
}
if _, ok := seen[seed.TKey]; ok {
continue
}
seen[seed.TKey] = struct{}{}
out = append(out, seed.TKey)
}
return out
}
func uniqueSeedTextKeys(seeds []sourceSeed) []string {
seen := make(map[string]struct{}, len(seeds))
out := make([]string, 0, len(seeds))
for _, seed := range seeds {
k := normalizeSeedTextKey(seed.SourceText)
if k == "" {
continue
}
if _, ok := seen[k]; ok {
continue
}
seen[k] = struct{}{}
out = append(out, k)
}
return out
}
func reuseExistingSeedKeys(pgDB *sql.DB, seeds []sourceSeed) ([]sourceSeed, int) {
if pgDB == nil || len(seeds) == 0 {
return seeds, 0
}
textKeys := uniqueSeedTextKeys(seeds)
if len(textKeys) == 0 {
return seeds, 0
}
rows, err := pgDB.Query(`
SELECT x.text_key, x.t_key
FROM (
SELECT
lower(btrim(source_text_tr)) AS text_key,
t_key,
ROW_NUMBER() OVER (
PARTITION BY lower(btrim(source_text_tr))
ORDER BY id ASC
) AS rn
FROM mk_translator
WHERE lower(btrim(source_text_tr)) = ANY($1)
) x
WHERE x.rn = 1
`, pq.Array(textKeys))
if err != nil {
return seeds, 0
}
defer rows.Close()
existingByText := make(map[string]string, len(textKeys))
for rows.Next() {
var textKey, tKey string
if err := rows.Scan(&textKey, &tKey); err != nil {
continue
}
textKey = strings.TrimSpace(strings.ToLower(textKey))
tKey = strings.TrimSpace(tKey)
if textKey == "" || tKey == "" {
continue
}
existingByText[textKey] = tKey
}
reused := 0
for i := range seeds {
textKey := normalizeSeedTextKey(seeds[i].SourceText)
if textKey == "" {
continue
}
if existingKey, ok := existingByText[textKey]; ok && existingKey != "" && seeds[i].TKey != existingKey {
seeds[i].TKey = existingKey
reused++
}
}
return seeds, reused
}
func countSeedsBySource(seeds []sourceSeed) map[string]int {
out := map[string]int{
"dummy": 0,
"postgre": 0,
"mssql": 0,
}
for _, s := range seeds {
key := normalizeTranslationSourceType(s.SourceType)
if key == "" {
key = "dummy"
}
out[key]++
}
return out
}
func formatSourceCounts(counts map[string]int) string {
return fmt.Sprintf("dummy=%d postgre=%d mssql=%d", counts["dummy"], counts["postgre"], counts["mssql"])
}
func requestTraceID(r *http.Request) string {
if r == nil {
return "trsync-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
id := strings.TrimSpace(r.Header.Get("X-Request-ID"))
if id == "" {
id = strings.TrimSpace(r.Header.Get("X-Correlation-ID"))
}
if id == "" {
id = "trsync-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
return id
}
func callAzureTranslate(sourceText, targetLang string) (string, error) {
key := strings.TrimSpace(os.Getenv("AZURE_TRANSLATOR_KEY"))
endpoint := strings.TrimSpace(os.Getenv("AZURE_TRANSLATOR_ENDPOINT"))
region := strings.TrimSpace(os.Getenv("AZURE_TRANSLATOR_REGION"))
if key == "" {
return "", errors.New("AZURE_TRANSLATOR_KEY not set")
}
if endpoint == "" {
return "", errors.New("AZURE_TRANSLATOR_ENDPOINT not set")
}
if region == "" {
return "", errors.New("AZURE_TRANSLATOR_REGION not set")
}
sourceLang := strings.TrimSpace(strings.ToLower(os.Getenv("TRANSLATION_SOURCE_LANG")))
if sourceLang == "" {
sourceLang = "tr"
}
targetLang = normalizeTranslationLang(targetLang)
if targetLang == "" || targetLang == "tr" {
return "", fmt.Errorf("invalid target language: %q", targetLang)
}
endpoint = strings.TrimRight(endpoint, "/")
normalizedEndpoint := strings.ToLower(endpoint)
translatePath := "/translate"
// Azure custom endpoint requires the translator path with version in URL.
if strings.Contains(normalizedEndpoint, ".cognitiveservices.azure.com") {
translatePath = "/translator/text/v3.0/translate"
}
baseURL, err := url.Parse(endpoint + translatePath)
if err != nil {
return "", fmt.Errorf("invalid AZURE_TRANSLATOR_ENDPOINT: %w", err)
}
q := baseURL.Query()
if translatePath == "/translate" {
q.Set("api-version", "3.0")
}
q.Set("from", sourceLang)
q.Set("to", targetLang)
baseURL.RawQuery = q.Encode()
payload := []map[string]string{
{"text": sourceText},
}
body, _ := json.Marshal(payload)
req, err := http.NewRequest(http.MethodPost, baseURL.String(), bytes.NewReader(body))
if err != nil {
return "", err
}
req.Header.Set("Ocp-Apim-Subscription-Key", key)
req.Header.Set("Ocp-Apim-Subscription-Region", region)
req.Header.Set("Content-Type", "application/json; charset=UTF-8")
timeoutSec := parsePositiveIntEnv("TRANSLATION_HTTP_TIMEOUT_SEC", 60)
client := &http.Client{Timeout: time.Duration(timeoutSec) * time.Second}
resp, err := client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
if resp.StatusCode >= 300 {
raw, _ := io.ReadAll(io.LimitReader(resp.Body, 1024))
return "", fmt.Errorf("azure translator status=%d body=%s", resp.StatusCode, strings.TrimSpace(string(raw)))
}
var result []struct {
Translations []struct {
Text string `json:"text"`
To string `json:"to"`
} `json:"translations"`
}
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return "", err
}
if len(result) == 0 || len(result[0].Translations) == 0 {
return "", errors.New("azure translator empty response")
}
return strings.TrimSpace(result[0].Translations[0].Text), nil
}
func nullableString(v *string) any {
if v == nil {
return nil
}
s := strings.TrimSpace(*v)
return s
}
func normalizeTranslationLang(v string) string {
lang := strings.ToLower(strings.TrimSpace(v))
if _, ok := translationLangSet[lang]; ok {
return lang
}
return ""
}
func normalizeTranslationStatus(v string) string {
status := strings.ToLower(strings.TrimSpace(v))
if _, ok := translationStatusSet[status]; ok {
return status
}
return ""
}
func normalizeTranslationSourceType(v string) string {
sourceType := strings.ToLower(strings.TrimSpace(v))
if _, ok := translationSourceTypeSet[sourceType]; ok {
return sourceType
}
return ""
}
func normalizeTargetLanguages(list []string) []string {
if len(list) == 0 {
return []string{"en", "de", "it", "es", "ru", "ar"}
}
seen := make(map[string]struct{}, len(list))
out := make([]string, 0, len(list))
for _, v := range list {
lang := normalizeTranslationLang(v)
if lang == "" || lang == "tr" {
continue
}
if _, ok := seen[lang]; ok {
continue
}
seen[lang] = struct{}{}
out = append(out, lang)
}
if len(out) == 0 {
return []string{"en", "de", "it", "es", "ru", "ar"}
}
return out
}
func normalizeOptionalStatus(v *string) any {
if v == nil {
return nil
}
s := normalizeTranslationStatus(*v)
if s == "" {
return nil
}
return s
}
func normalizeOptionalSourceType(v *string) any {
if v == nil {
return nil
}
s := normalizeTranslationSourceType(*v)
if s == "" {
return nil
}
return s
}
func normalizeMissingItems(items []UpsertMissingItem) []UpsertMissingItem {
seen := make(map[string]struct{}, len(items))
out := make([]UpsertMissingItem, 0, len(items))
for _, it := range items {
key := strings.TrimSpace(it.TKey)
source := strings.TrimSpace(it.SourceTextTR)
if key == "" || source == "" {
continue
}
if _, ok := seen[key]; ok {
continue
}
seen[key] = struct{}{}
out = append(out, UpsertMissingItem{
TKey: key,
SourceTextTR: source,
})
}
return out
}
func normalizeIDListInt64(ids []int64) []int64 {
seen := make(map[int64]struct{}, len(ids))
out := make([]int64, 0, len(ids))
for _, id := range ids {
if id <= 0 {
continue
}
if _, ok := seen[id]; ok {
continue
}
seen[id] = struct{}{}
out = append(out, id)
}
sort.Slice(out, func(i, j int) bool { return out[i] < out[j] })
return out
}
func detectProjectRoot() string {
wd, err := os.Getwd()
if err != nil {
return ""
}
candidates := []string{
wd,
filepath.Dir(wd),
filepath.Dir(filepath.Dir(wd)),
}
for _, c := range candidates {
if _, err := os.Stat(filepath.Join(c, "ui")); err == nil {
return c
}
}
return ""
}
func isCandidateText(s string) bool {
s = strings.TrimSpace(s)
if len(s) < 3 || len(s) > 120 {
return false
}
if reBadText.MatchString(s) {
return false
}
if !reHasLetter.MatchString(s) {
return false
}
if strings.Contains(s, "/api/") {
return false
}
return true
}
func sanitizeKey(s string) string {
s = strings.ToLower(strings.TrimSpace(s))
s = strings.ReplaceAll(s, " ", "_")
s = reKeyUnsafe.ReplaceAllString(s, "_")
s = strings.Trim(s, "_")
if s == "" {
return "x"
}
return s
}
func normalizeDisplayText(s string) string {
s = strings.TrimSpace(strings.ReplaceAll(s, "_", " "))
s = strings.Join(strings.Fields(s), " ")
if s == "" {
return ""
}
return s
}
func hashKey(s string) string {
base := sanitizeKey(s)
if len(base) > 40 {
base = base[:40]
}
sum := 0
for _, r := range s {
sum += int(r)
}
return fmt.Sprintf("%s_%d", base, sum%1000000)
}
func makeTextBasedSeedKey(sourceText string) string {
return "txt." + hashKey(normalizeSeedTextKey(sourceText))
}
func normalizeSeedTextKey(s string) string {
return strings.ToLower(strings.TrimSpace(normalizeDisplayText(s)))
}
func pqArray(values []string) any {
if len(values) == 0 {
return pq.Array([]string{})
}
out := make([]string, 0, len(values))
for _, v := range values {
out = append(out, strings.TrimSpace(v))
}
sort.Strings(out)
return pq.Array(out)
}
func parsePositiveIntEnv(name string, fallback int) int {
raw := strings.TrimSpace(os.Getenv(name))
if raw == "" {
return fallback
}
n, err := strconv.Atoi(raw)
if err != nil || n <= 0 {
return fallback
}
return n
}
func normalizeStringList(items []string, max int) []string {
if len(items) == 0 {
return nil
}
if max <= 0 {
max = len(items)
}
out := make([]string, 0, len(items))
seen := make(map[string]struct{}, len(items))
for _, raw := range items {
v := strings.TrimSpace(raw)
if v == "" {
continue
}
if _, ok := seen[v]; ok {
continue
}
seen[v] = struct{}{}
out = append(out, v)
if len(out) >= max {
break
}
}
return out
}

View File

@@ -0,0 +1,69 @@
package main
import (
"bssapp-backend/routes"
"database/sql"
"log"
"os"
"strconv"
"strings"
"time"
)
func startTranslationSyncScheduler(pgDB *sql.DB, mssqlDB *sql.DB) {
enabled := strings.TrimSpace(strings.ToLower(os.Getenv("TRANSLATION_SYNC_ENABLED")))
if enabled == "0" || enabled == "false" || enabled == "off" {
log.Println("🛑 Translation sync scheduler disabled")
return
}
hour := 4
if raw := strings.TrimSpace(os.Getenv("TRANSLATION_SYNC_HOUR")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed >= 0 && parsed <= 23 {
hour = parsed
}
}
limit := 30000
if raw := strings.TrimSpace(os.Getenv("TRANSLATION_SYNC_LIMIT")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 {
limit = parsed
}
}
go func() {
for {
next := nextRunAt(time.Now(), hour)
wait := time.Until(next)
log.Printf("🕓 Translation sync next run at %s (in %s)", next.Format(time.RFC3339), wait.Round(time.Second))
time.Sleep(wait)
result, err := routes.PerformTranslationSync(pgDB, mssqlDB, routes.TranslationSyncOptions{
AutoTranslate: true,
Languages: []string{"en", "de", "it", "es", "ru", "ar"},
Limit: limit,
OnlyNew: true,
})
if err != nil {
log.Printf("❌ Translation sync failed: %v", err)
continue
}
log.Printf(
"✅ Translation sync done: seeds=%d affected=%d auto_translated=%d langs=%v",
result.SeedCount,
result.AffectedCount,
result.AutoTranslated,
result.TargetLangs,
)
}
}()
}
func nextRunAt(now time.Time, hour int) time.Time {
next := time.Date(now.Year(), now.Month(), now.Day(), hour, 0, 0, 0, now.Location())
if !next.After(now) {
next = next.Add(24 * time.Hour)
}
return next
}

View File

@@ -146,7 +146,11 @@ createQuasarApp(createApp, quasarUserOptions)
return Promise[ method ]([ return Promise[ method ]([
import(/* webpackMode: "eager" */ 'boot/dayjs') import(/* webpackMode: "eager" */ 'boot/dayjs'),
import(/* webpackMode: "eager" */ 'boot/locale'),
import(/* webpackMode: "eager" */ 'boot/resizeObserverGuard')
]).then(bootFiles => { ]).then(bootFiles => {
const boot = mapFn(bootFiles).filter(entry => typeof entry === 'function') const boot = mapFn(bootFiles).filter(entry => typeof entry === 'function')

View File

@@ -0,0 +1,75 @@
/* eslint-disable */
/**
* THIS FILE IS GENERATED AUTOMATICALLY.
* DO NOT EDIT.
*
* You are probably looking on adding startup/initialization code.
* Use "quasar new boot <name>" and add it there.
* One boot file per concern. Then reference the file(s) in quasar.config file > boot:
* boot: ['file', ...] // do not add ".js" extension to it.
*
* Boot files are your "main.js"
**/
import { Quasar } from 'quasar'
import { markRaw } from 'vue'
import RootComponent from 'app/src/App.vue'
import createStore from 'app/src/stores/index'
import createRouter from 'app/src/router/index'
export default async function (createAppFn, quasarUserOptions) {
// Create the app instance.
// Here we inject into it the Quasar UI, the router & possibly the store.
const app = createAppFn(RootComponent)
app.use(Quasar, quasarUserOptions)
const store = typeof createStore === 'function'
? await createStore({})
: createStore
app.use(store)
const router = markRaw(
typeof createRouter === 'function'
? await createRouter({store})
: createRouter
)
// make router instance available in store
store.use(({ store }) => { store.router = router })
// Expose the app, the router and the store.
// Note that we are not mounting the app here, since bootstrapping will be
// different depending on whether we are in a browser or on the server.
return {
app,
store,
router
}
}

View File

@@ -0,0 +1,158 @@
/* eslint-disable */
/**
* THIS FILE IS GENERATED AUTOMATICALLY.
* DO NOT EDIT.
*
* You are probably looking on adding startup/initialization code.
* Use "quasar new boot <name>" and add it there.
* One boot file per concern. Then reference the file(s) in quasar.config file > boot:
* boot: ['file', ...] // do not add ".js" extension to it.
*
* Boot files are your "main.js"
**/
import { createApp } from 'vue'
import '@quasar/extras/roboto-font/roboto-font.css'
import '@quasar/extras/material-icons/material-icons.css'
// We load Quasar stylesheet file
import 'quasar/dist/quasar.sass'
import 'src/css/app.css'
import createQuasarApp from './app.js'
import quasarUserOptions from './quasar-user-options.js'
const publicPath = `/`
async function start ({
app,
router
, store
}, bootFiles) {
let hasRedirected = false
const getRedirectUrl = url => {
try { return router.resolve(url).href }
catch (err) {}
return Object(url) === url
? null
: url
}
const redirect = url => {
hasRedirected = true
if (typeof url === 'string' && /^https?:\/\//.test(url)) {
window.location.href = url
return
}
const href = getRedirectUrl(url)
// continue if we didn't fail to resolve the url
if (href !== null) {
window.location.href = href
window.location.reload()
}
}
const urlPath = window.location.href.replace(window.location.origin, '')
for (let i = 0; hasRedirected === false && i < bootFiles.length; i++) {
try {
await bootFiles[i]({
app,
router,
store,
ssrContext: null,
redirect,
urlPath,
publicPath
})
}
catch (err) {
if (err && err.url) {
redirect(err.url)
return
}
console.error('[Quasar] boot error:', err)
return
}
}
if (hasRedirected === true) return
app.use(router)
app.mount('#q-app')
}
createQuasarApp(createApp, quasarUserOptions)
.then(app => {
// eventually remove this when Cordova/Capacitor/Electron support becomes old
const [ method, mapFn ] = Promise.allSettled !== void 0
? [
'allSettled',
bootFiles => bootFiles.map(result => {
if (result.status === 'rejected') {
console.error('[Quasar] boot error:', result.reason)
return
}
return result.value.default
})
]
: [
'all',
bootFiles => bootFiles.map(entry => entry.default)
]
return Promise[ method ]([
import(/* webpackMode: "eager" */ 'boot/dayjs'),
import(/* webpackMode: "eager" */ 'boot/locale'),
import(/* webpackMode: "eager" */ 'boot/resizeObserverGuard')
]).then(bootFiles => {
const boot = mapFn(bootFiles).filter(entry => typeof entry === 'function')
start(app, boot)
})
})

View File

@@ -0,0 +1,116 @@
/* eslint-disable */
/**
* THIS FILE IS GENERATED AUTOMATICALLY.
* DO NOT EDIT.
*
* You are probably looking on adding startup/initialization code.
* Use "quasar new boot <name>" and add it there.
* One boot file per concern. Then reference the file(s) in quasar.config file > boot:
* boot: ['file', ...] // do not add ".js" extension to it.
*
* Boot files are your "main.js"
**/
import App from 'app/src/App.vue'
let appPrefetch = typeof App.preFetch === 'function'
? App.preFetch
: (
// Class components return the component options (and the preFetch hook) inside __c property
App.__c !== void 0 && typeof App.__c.preFetch === 'function'
? App.__c.preFetch
: false
)
function getMatchedComponents (to, router) {
const route = to
? (to.matched ? to : router.resolve(to).route)
: router.currentRoute.value
if (!route) { return [] }
const matched = route.matched.filter(m => m.components !== void 0)
if (matched.length === 0) { return [] }
return Array.prototype.concat.apply([], matched.map(m => {
return Object.keys(m.components).map(key => {
const comp = m.components[key]
return {
path: m.path,
c: comp
}
})
}))
}
export function addPreFetchHooks ({ router, store, publicPath }) {
// Add router hook for handling preFetch.
// Doing it after initial route is resolved so that we don't double-fetch
// the data that we already have. Using router.beforeResolve() so that all
// async components are resolved.
router.beforeResolve((to, from, next) => {
const
urlPath = window.location.href.replace(window.location.origin, ''),
matched = getMatchedComponents(to, router),
prevMatched = getMatchedComponents(from, router)
let diffed = false
const preFetchList = matched
.filter((m, i) => {
return diffed || (diffed = (
!prevMatched[i] ||
prevMatched[i].c !== m.c ||
m.path.indexOf('/:') > -1 // does it has params?
))
})
.filter(m => m.c !== void 0 && (
typeof m.c.preFetch === 'function'
// Class components return the component options (and the preFetch hook) inside __c property
|| (m.c.__c !== void 0 && typeof m.c.__c.preFetch === 'function')
))
.map(m => m.c.__c !== void 0 ? m.c.__c.preFetch : m.c.preFetch)
if (appPrefetch !== false) {
preFetchList.unshift(appPrefetch)
appPrefetch = false
}
if (preFetchList.length === 0) {
return next()
}
let hasRedirected = false
const redirect = url => {
hasRedirected = true
next(url)
}
const proceed = () => {
if (hasRedirected === false) { next() }
}
preFetchList.reduce(
(promise, preFetch) => promise.then(() => hasRedirected === false && preFetch({
store,
currentRoute: to,
previousRoute: from,
redirect,
urlPath,
publicPath
})),
Promise.resolve()
)
.then(proceed)
.catch(e => {
console.error(e)
proceed()
})
})
}

View File

@@ -0,0 +1,23 @@
/* eslint-disable */
/**
* THIS FILE IS GENERATED AUTOMATICALLY.
* DO NOT EDIT.
*
* You are probably looking on adding startup/initialization code.
* Use "quasar new boot <name>" and add it there.
* One boot file per concern. Then reference the file(s) in quasar.config file > boot:
* boot: ['file', ...] // do not add ".js" extension to it.
*
* Boot files are your "main.js"
**/
import lang from 'quasar/lang/tr.js'
import {Loading,Dialog,Notify} from 'quasar'
export default { config: {"notify":{"position":"top","timeout":2500}},lang,plugins: {Loading,Dialog,Notify} }

View File

@@ -15,7 +15,7 @@ export default defineConfig(() => {
/* ===================================================== /* =====================================================
BOOT FILES BOOT FILES
===================================================== */ ===================================================== */
boot: ['dayjs'], boot: ['dayjs', 'locale', 'resizeObserverGuard'],
/* ===================================================== /* =====================================================
GLOBAL CSS GLOBAL CSS
@@ -56,6 +56,13 @@ export default defineConfig(() => {
server: { type: 'http' }, server: { type: 'http' },
port: 9000, port: 9000,
open: true, open: true,
client: {
overlay: {
errors: true,
warnings: false,
runtimeErrors: false
}
},
// DEV proxy (CORS'suz) // DEV proxy (CORS'suz)
proxy: [ proxy: [
@@ -63,7 +70,10 @@ export default defineConfig(() => {
context: ['/api'], context: ['/api'],
target: 'http://localhost:8080', target: 'http://localhost:8080',
changeOrigin: true, changeOrigin: true,
secure: false secure: false,
ws: true,
timeout: 0,
proxyTimeout: 0
} }
] ]
}, },

View File

@@ -27,7 +27,7 @@ var quasar_config_default = defineConfig(() => {
/* ===================================================== /* =====================================================
BOOT FILES BOOT FILES
===================================================== */ ===================================================== */
boot: ["dayjs"], boot: ["dayjs", "locale", "resizeObserverGuard"],
/* ===================================================== /* =====================================================
GLOBAL CSS GLOBAL CSS
===================================================== */ ===================================================== */
@@ -62,13 +62,23 @@ var quasar_config_default = defineConfig(() => {
server: { type: "http" }, server: { type: "http" },
port: 9e3, port: 9e3,
open: true, open: true,
client: {
overlay: {
errors: true,
warnings: false,
runtimeErrors: false
}
},
// DEV proxy (CORS'suz) // DEV proxy (CORS'suz)
proxy: [ proxy: [
{ {
context: ["/api"], context: ["/api"],
target: "http://localhost:8080", target: "http://localhost:8080",
changeOrigin: true, changeOrigin: true,
secure: false secure: false,
ws: true,
timeout: 0,
proxyTimeout: 0
} }
] ]
}, },

View File

@@ -3,12 +3,12 @@ import dayjs from 'dayjs'
import customParseFormat from 'dayjs/plugin/customParseFormat.js' import customParseFormat from 'dayjs/plugin/customParseFormat.js'
import relativeTime from 'dayjs/plugin/relativeTime.js' import relativeTime from 'dayjs/plugin/relativeTime.js'
import localizedFormat from 'dayjs/plugin/localizedFormat.js' import localizedFormat from 'dayjs/plugin/localizedFormat.js'
import 'dayjs/locale/tr.js' import { applyDayjsLocale } from 'src/i18n/dayjsLocale'
// 🔹 Pluginleri aktif et // 🔹 Pluginleri aktif et
dayjs.extend(customParseFormat) dayjs.extend(customParseFormat)
dayjs.extend(relativeTime) dayjs.extend(relativeTime)
dayjs.extend(localizedFormat) dayjs.extend(localizedFormat)
dayjs.locale('tr') applyDayjsLocale('tr')
export default dayjs export default dayjs

7
ui/src/boot/locale.js Normal file
View File

@@ -0,0 +1,7 @@
import { boot } from 'quasar/wrappers'
import { useLocaleStore } from 'src/stores/localeStore'
export default boot(() => {
const localeStore = useLocaleStore()
localeStore.setLocale(localeStore.locale)
})

View File

@@ -0,0 +1,36 @@
export default () => {
if (process.env.PROD || typeof window === 'undefined') return
const isResizeObserverOverlayError = (message) => {
const text = String(message || '')
return (
text.includes('ResizeObserver loop completed with undelivered notifications') ||
text.includes('ResizeObserver loop limit exceeded')
)
}
window.addEventListener(
'error',
(event) => {
if (!isResizeObserverOverlayError(event?.message)) return
event.preventDefault()
event.stopImmediatePropagation()
},
true
)
window.addEventListener(
'unhandledrejection',
(event) => {
const reason = event?.reason
const msg =
typeof reason === 'string'
? reason
: (reason?.message || reason?.toString?.() || '')
if (!isResizeObserverOverlayError(msg)) return
event.preventDefault()
event.stopImmediatePropagation()
},
true
)
}

View File

@@ -0,0 +1,42 @@
import { computed } from 'vue'
import { messages } from 'src/i18n/messages'
import { DEFAULT_LOCALE } from 'src/i18n/languages'
import { useLocaleStore } from 'src/stores/localeStore'
function lookup(obj, path) {
return String(path || '')
.split('.')
.filter(Boolean)
.reduce((acc, key) => (acc && acc[key] != null ? acc[key] : undefined), obj)
}
export function useI18n() {
const localeStore = useLocaleStore()
const currentLocale = computed(() => localeStore.locale)
function fallbackLocales(locale) {
const normalized = String(locale || '').toLowerCase()
if (normalized === 'tr') return ['tr']
if (normalized === 'en') return ['en', 'tr']
return [normalized, 'en', 'tr']
}
function t(key) {
for (const locale of fallbackLocales(currentLocale.value)) {
const val = lookup(messages[locale] || {}, key)
if (val != null) return val
}
const byDefault = lookup(messages[DEFAULT_LOCALE] || {}, key)
if (byDefault != null) return byDefault
return key
}
return {
locale: currentLocale,
t
}
}

View File

@@ -0,0 +1,30 @@
import dayjs from 'dayjs'
import 'dayjs/locale/tr.js'
import 'dayjs/locale/en.js'
import 'dayjs/locale/de.js'
import 'dayjs/locale/it.js'
import 'dayjs/locale/es.js'
import 'dayjs/locale/ru.js'
import 'dayjs/locale/ar.js'
import { normalizeLocale } from './languages.js'
export const DATE_LOCALE_MAP = {
tr: 'tr-TR',
en: 'en-US',
de: 'de-DE',
it: 'it-IT',
es: 'es-ES',
ru: 'ru-RU',
ar: 'ar'
}
export function applyDayjsLocale(locale) {
const normalized = normalizeLocale(locale)
dayjs.locale(normalized)
}
export function getDateLocale(locale) {
const normalized = normalizeLocale(locale)
return DATE_LOCALE_MAP[normalized] || DATE_LOCALE_MAP.tr
}

32
ui/src/i18n/languages.js Normal file
View File

@@ -0,0 +1,32 @@
export const DEFAULT_LOCALE = 'tr'
export const SUPPORTED_LOCALES = ['tr', 'en', 'de', 'it', 'es', 'ru', 'ar']
export const UI_LANGUAGE_OPTIONS = [
{ label: 'Türkçe', value: 'tr', short: 'TUR', flag: '🇹🇷' },
{ label: 'English', value: 'en', short: 'ENG', flag: '🇬🇧' },
{ label: 'Deutsch', value: 'de', short: 'DEU', flag: '🇩🇪' },
{ label: 'Italiano', value: 'it', short: 'ITA', flag: '🇮🇹' },
{ label: 'Español', value: 'es', short: 'ESP', flag: '🇪🇸' },
{ label: 'Русский', value: 'ru', short: 'RUS', flag: '🇷🇺' },
{ label: 'العربية', value: 'ar', short: 'ARA', flag: '🇸🇦' }
]
export const BACKEND_LANG_MAP = {
tr: 'TR',
en: 'EN',
de: 'DE',
it: 'IT',
es: 'ES',
ru: 'RU',
ar: 'AR'
}
export function normalizeLocale(value) {
const locale = String(value || '').trim().toLowerCase()
return SUPPORTED_LOCALES.includes(locale) ? locale : DEFAULT_LOCALE
}
export function toBackendLangCode(locale) {
return BACKEND_LANG_MAP[normalizeLocale(locale)] || BACKEND_LANG_MAP[DEFAULT_LOCALE]
}

28
ui/src/i18n/messages.js Normal file
View File

@@ -0,0 +1,28 @@
export const messages = {
tr: {
app: {
title: 'Baggi Software System',
logoutTitle: ıkış Yap',
logoutConfirm: 'Oturumunuzu kapatmak istediğinize emin misiniz?',
changePassword: 'Şifre Değiştir',
language: 'Dil'
},
statement: {
invalidDateRange: 'Başlangıç tarihi bitiş tarihinden sonra olamaz.',
selectFilters: 'Lütfen cari ve tarih aralığını seçiniz.'
}
},
en: {
app: {
title: 'Baggi Software System',
logoutTitle: 'Log Out',
logoutConfirm: 'Are you sure you want to end your session?',
changePassword: 'Change Password',
language: 'Language'
},
statement: {
invalidDateRange: 'Start date cannot be later than end date.',
selectFilters: 'Please select account and date range.'
}
}
}

View File

@@ -11,9 +11,41 @@
<q-avatar class="bg-secondary q-mr-sm"> <q-avatar class="bg-secondary q-mr-sm">
<img src="/images/Baggi-tekstilas-logolu.jpg" /> <img src="/images/Baggi-tekstilas-logolu.jpg" />
</q-avatar> </q-avatar>
Baggi Software System {{ t('app.title') }}
</q-toolbar-title> </q-toolbar-title>
<q-select
v-model="selectedLocale"
dense
outlined
emit-value
map-options
options-dense
class="q-mr-sm lang-select"
option-value="value"
option-label="label"
:options="languageOptions"
>
<template #selected-item="scope">
<div class="lang-item">
<span class="lang-flag">{{ scope.opt.flag }}</span>
<span class="lang-short">{{ scope.opt.short }}</span>
</div>
</template>
<template #option="scope">
<q-item v-bind="scope.itemProps">
<q-item-section>
<div class="lang-item">
<span class="lang-flag">{{ scope.opt.flag }}</span>
<span class="lang-short">{{ scope.opt.short }}</span>
<span>{{ scope.opt.label }}</span>
</div>
</q-item-section>
</q-item>
</template>
</q-select>
<q-btn flat dense round icon="logout" @click="confirmLogout" /> <q-btn flat dense round icon="logout" @click="confirmLogout" />
</q-toolbar> </q-toolbar>
@@ -99,7 +131,7 @@
</q-item-section> </q-item-section>
<q-item-section> <q-item-section>
Şifre Değiştir {{ t('app.changePassword') }}
</q-item-section> </q-item-section>
</q-item> </q-item>
@@ -122,7 +154,7 @@
<q-toolbar class="bg-secondary"> <q-toolbar class="bg-secondary">
<q-toolbar-title> <q-toolbar-title>
Baggi Software System {{ t('app.title') }}
</q-toolbar-title> </q-toolbar-title>
</q-toolbar> </q-toolbar>
@@ -138,6 +170,9 @@ import { Dialog, useQuasar } from 'quasar'
import { useAuthStore } from 'stores/authStore' import { useAuthStore } from 'stores/authStore'
import { usePermissionStore } from 'stores/permissionStore' import { usePermissionStore } from 'stores/permissionStore'
import { useI18n } from 'src/composables/useI18n'
import { UI_LANGUAGE_OPTIONS } from 'src/i18n/languages'
import { useLocaleStore } from 'src/stores/localeStore'
/* ================= STORES ================= */ /* ================= STORES ================= */
@@ -147,6 +182,16 @@ const route = useRoute()
const $q = useQuasar() const $q = useQuasar()
const auth = useAuthStore() const auth = useAuthStore()
const perm = usePermissionStore() const perm = usePermissionStore()
const localeStore = useLocaleStore()
const { t } = useI18n()
const languageOptions = UI_LANGUAGE_OPTIONS
const selectedLocale = computed({
get: () => localeStore.locale,
set: (value) => {
localeStore.setLocale(value)
}
})
/* ================= UI ================= */ /* ================= UI ================= */
@@ -159,8 +204,8 @@ function toggleLeftDrawer () {
function confirmLogout () { function confirmLogout () {
Dialog.create({ Dialog.create({
title: ıkış Yap', title: t('app.logoutTitle'),
message: 'Oturumunuzu kapatmak istediğinize emin misiniz?', message: t('app.logoutConfirm'),
cancel: true, cancel: true,
persistent: true persistent: true
}).onOk(() => { }).onOk(() => {
@@ -279,6 +324,19 @@ const menuItems = [
] ]
}, },
{
label: 'Fiyatlandırma',
icon: 'request_quote',
children: [
{
label: 'Ürün Fiyatlandırma',
to: '/app/pricing/product-pricing',
permission: 'order:view'
}
]
},
{ {
label: 'Sistem', label: 'Sistem',
icon: 'settings', icon: 'settings',
@@ -317,6 +375,18 @@ const menuItems = [
] ]
}, },
{
label: 'Dil Çeviri',
icon: 'translate',
children: [
{
label: 'Çeviri Tablosu',
to: '/app/language/translations',
permission: 'language:update'
}
]
},
{ {
label: 'Kullanıcı Yönetimi', label: 'Kullanıcı Yönetimi',
@@ -374,5 +444,27 @@ const filteredMenu = computed(() => {
-webkit-overflow-scrolling: touch; -webkit-overflow-scrolling: touch;
touch-action: pan-y; touch-action: pan-y;
} }
.lang-select {
width: 140px;
background: #fff;
border-radius: 6px;
}
.lang-item {
display: inline-flex;
align-items: center;
gap: 8px;
}
.lang-flag {
font-size: 15px;
line-height: 1;
}
.lang-short {
font-weight: 700;
letter-spacing: 0.3px;
}
</style> </style>

View File

@@ -262,14 +262,24 @@
@click="openNewRowEditor" @click="openNewRowEditor"
:disable="isClosedRow || isViewOnly || !canMutateRows" :disable="isClosedRow || isViewOnly || !canMutateRows"
/> />
<q-btn
v-if="isEditMode && canBulkUpdateLineDueDates"
label="SATIR TERMINLERINI TOPLU GUNCELLE"
color="warning"
icon="event"
class="q-ml-sm"
:loading="orderStore.loading"
:disable="orderStore.loading || !canBulkUpdateLineDueDates"
@click="openBulkDueDateDialog"
/>
<q-btn <q-btn
v-if="canSubmitOrder" v-if="canSubmitOrder"
:label="isEditMode ? 'TÜMÜNÜ GÜNCELLE' : 'TÜMÜNÜ KAYDET'" :label="isEditMode ? 'TÜMÜNÜ GÜNCELLE' : 'TÜMÜNÜ KAYDET'"
color="primary" color="primary"
icon="save" icon="save"
class="q-ml-sm" class="q-ml-sm"
:loading="orderStore.loading" :loading="orderStore.loading || isSubmitAllInFlight"
:disable="!canSubmitOrder" :disable="!canSubmitOrder || orderStore.loading || isSubmitAllInFlight"
@click="confirmAndSubmit" @click="confirmAndSubmit"
/> />
</div> </div>
@@ -450,6 +460,41 @@
</div> </div>
</template> </template>
</div> </div>
<!-- =======================================================
🔹 TOPLU TERMIN GUNCELLEME
======================================================== -->
<q-dialog v-model="showBulkDueDateDialog" persistent>
<q-card style="min-width: 420px; max-width: 90vw;">
<q-card-section class="text-subtitle1 text-weight-bold">
Satir Terminlerini Toplu Guncelle
</q-card-section>
<q-card-section class="q-pt-none">
<div class="q-mb-md">
Tum siparis satiri terminlerini sectiginiz tarihi koyarak guncellemek istediginize emin misiniz?
</div>
<q-input
v-model="bulkDueDateValue"
type="date"
label="Yeni Termin Tarihi"
filled
dense
autofocus
/>
</q-card-section>
<q-card-actions align="right">
<q-btn flat label="Iptal" v-close-popup />
<q-btn
color="primary"
label="Evet"
:loading="orderStore.loading"
@click="confirmBulkDueDateUpdate"
/>
</q-card-actions>
</q-card>
</q-dialog>
<!-- ======================================================= <!-- =======================================================
🔹 SATIR DÜZENLEYİCİ FORM (EDITOR) 🔹 SATIR DÜZENLEYİCİ FORM (EDITOR)
======================================================== --> ======================================================== -->
@@ -728,16 +773,18 @@
v-if="canMutateRows" v-if="canMutateRows"
:color="isEditing ? 'positive' : 'primary'" :color="isEditing ? 'positive' : 'primary'"
:label="isEditing ? 'Güncelle' : 'Kaydet'" :label="isEditing ? 'Güncelle' : 'Kaydet'"
:loading="isRowSaveInFlight"
@click="onSaveOrUpdateRow" @click="onSaveOrUpdateRow"
:disable="isClosedRow || isViewOnly || !canMutateRows" :disable="isClosedRow || isViewOnly || !canMutateRows || isRowSaveInFlight"
/> />
<q-btn <q-btn
v-if="canMutateRows" v-if="canMutateRows"
color="secondary" color="secondary"
label="Kaydet ve Diğer Renge Geç" label="Kaydet ve Diğer Renge Geç"
:loading="isRowSaveInFlight"
@click="onSaveAndNextColor" @click="onSaveAndNextColor"
:disable="isClosedRow || isViewOnly || !canMutateRows" :disable="isClosedRow || isViewOnly || !canMutateRows || isRowSaveInFlight"
/> />
<q-btn <q-btn
v-if="isEditing && canMutateRows" v-if="isEditing && canMutateRows"
@@ -883,8 +930,62 @@ console.log('🧩 Route parametresi alındı (setup başında):', orderHeaderID.
const aktifPB = ref('USD') // Varsayılan para birimi (Cari seçimiyle değişebilir) const aktifPB = ref('USD') // Varsayılan para birimi (Cari seçimiyle değişebilir)
// 🔹 Model detayları cache (product-detail API verilerini tutar) // 🔹 Model detayları cache (product-detail API verilerini tutar)
const productCache = reactive({}) const productCache = reactive({})
const showBulkDueDateDialog = ref(false)
const bulkDueDateValue = ref('')
const isSubmitAllInFlight = ref(false)
const isRowSaveInFlight = ref(false)
function showEditorQtyPriceBlockingDialog(message, details = '') {
const detailHtml = details ? `<br><br><b>Detay:</b><br>${details}` : ''
$q.dialog({
title: 'Kayit Engellendi',
message: `${message}${detailHtml}`,
html: true,
ok: { label: 'Tamam', color: 'negative' }
})
}
function validateEditorRowBeforeSave() {
const adet = Number(form.adet || 0)
const fiyatRaw = String(form.fiyat ?? '').trim()
const fiyat = Number(form.fiyat || 0)
if (adet <= 0) {
showEditorQtyPriceBlockingDialog('Siparis adeti toplam 0 olamaz.')
return false
}
if (!fiyatRaw || !Number.isFinite(fiyat) || fiyat <= 0) {
showEditorQtyPriceBlockingDialog('Urun fiyati girmeden ilerleyemezsiniz.')
return false
}
return true
}
function validateSummaryRowsBeforeSubmit() {
const rows = Array.isArray(orderStore.summaryRows) ? orderStore.summaryRows : []
const invalidRows = rows.filter(r => {
const adet = Number(r?.adet || 0)
const fiyatRaw = String(r?.fiyat ?? '').trim()
const fiyat = Number(r?.fiyat || 0)
return adet <= 0 || !fiyatRaw || !Number.isFinite(fiyat) || fiyat <= 0
})
if (!invalidRows.length) return true
const preview = invalidRows
.slice(0, 8)
.map(r => `${String(r?.model || '').trim() || '-'} / ${String(r?.renk || '').trim() || '-'} (adet=${Number(r?.adet || 0)}, fiyat=${String(r?.fiyat ?? '')})`)
.join('<br>')
showEditorQtyPriceBlockingDialog(
'Urun fiyati girmeden ilerleyemezsiniz.',
preview
)
return false
}
const confirmAndSubmit = async () => { const confirmAndSubmit = async () => {
if (orderStore.loading) return if (orderStore.loading || isSubmitAllInFlight.value) return
if (!hasSubmitPermission()) { if (!hasSubmitPermission()) {
notifyNoPermission( notifyNoPermission(
@@ -904,6 +1005,11 @@ const confirmAndSubmit = async () => {
return return
} }
if (!validateSummaryRowsBeforeSubmit()) {
return
}
isSubmitAllInFlight.value = true
try { try {
// NEW veya EDIT ayrımı store.mode üzerinden // NEW veya EDIT ayrımı store.mode üzerinden
await orderStore.submitAllReal( await orderStore.submitAllReal(
@@ -915,6 +1021,45 @@ const confirmAndSubmit = async () => {
) )
} catch (err) { } catch (err) {
console.error('❌ confirmAndSubmit hata:', err) console.error('❌ confirmAndSubmit hata:', err)
} finally {
isSubmitAllInFlight.value = false
}
}
function openBulkDueDateDialog() {
if (!canBulkUpdateLineDueDates.value) return
const firstRowDate = summaryRows.value?.find?.(row => !!row?.terminTarihi)?.terminTarihi || ''
bulkDueDateValue.value = toDateOnly(form.AverageDueDate || firstRowDate || dayjs().format('YYYY-MM-DD'))
showBulkDueDateDialog.value = true
}
async function confirmBulkDueDateUpdate() {
const dueDate = toDateOnly(bulkDueDateValue.value)
if (!dueDate) {
$q.notify({
type: 'warning',
message: 'Lutfen bir termin tarihi seciniz.'
})
return
}
try {
const result = await orderStore.bulkUpdateOrderLineDueDate(orderHeaderID.value, dueDate)
orderStore.applyBulkLineDueDateLocally(dueDate)
form.AverageDueDate = dueDate
showBulkDueDateDialog.value = false
$q.notify({
type: 'positive',
message: `Tum siparis satiri terminleri guncellendi (${Number(result?.updatedLines || 0)} satir).`
})
} catch (err) {
console.error('❌ confirmBulkDueDateUpdate hata:', err)
$q.notify({
type: 'negative',
message: err?.message || 'Satir terminleri guncellenemedi.'
})
} }
} }
@@ -939,6 +1084,14 @@ const canMutateRows = computed(() => {
if (isViewOnly.value) return false if (isViewOnly.value) return false
return isEditMode.value ? canUpdateOrder.value : canWriteOrder.value return isEditMode.value ? canUpdateOrder.value : canWriteOrder.value
}) })
const canBulkUpdateLineDueDates = computed(() => {
if (!isEditMode.value) return false
if (isViewOnly.value) return false
if (isClosedOrder.value) return false
if (!canUpdateOrder.value) return false
if (!orderHeaderID.value) return false
return Array.isArray(orderStore.summaryRows) && orderStore.summaryRows.length > 0
})
function notifyNoPermission(message) { function notifyNoPermission(message) {
$q.notify({ $q.notify({
@@ -2985,6 +3138,8 @@ function warnIfSecondColorMissing() {
} }
const onSaveOrUpdateRow = async () => { const onSaveOrUpdateRow = async () => {
if (isRowSaveInFlight.value) return
if (!hasRowMutationPermission()) { if (!hasRowMutationPermission()) {
notifyNoPermission( notifyNoPermission(
isEditMode.value isEditMode.value
@@ -2994,9 +3149,13 @@ const onSaveOrUpdateRow = async () => {
return return
} }
if (!validateEditorRowBeforeSave()) return
warnIfSecondColorMissing() warnIfSecondColorMissing()
await orderStore.saveOrUpdateRowUnified({ isRowSaveInFlight.value = true
try {
const ok = await orderStore.saveOrUpdateRowUnified({
form, form,
recalcVat: typeof recalcVat === 'function' ? recalcVat : null, recalcVat: typeof recalcVat === 'function' ? recalcVat : null,
@@ -3010,8 +3169,13 @@ const onSaveOrUpdateRow = async () => {
stockMap, stockMap,
$q $q
}) })
if (ok !== false) {
showEditor.value = false showEditor.value = false
} }
} finally {
isRowSaveInFlight.value = false
}
}
function normalizeColorValue(val) { function normalizeColorValue(val) {
return String(val || '').trim().toUpperCase() return String(val || '').trim().toUpperCase()
@@ -3030,6 +3194,8 @@ function getNextColorValue() {
} }
const onSaveAndNextColor = async () => { const onSaveAndNextColor = async () => {
if (isRowSaveInFlight.value) return
if (!hasRowMutationPermission()) { if (!hasRowMutationPermission()) {
notifyNoPermission( notifyNoPermission(
isEditMode.value isEditMode.value
@@ -3049,9 +3215,14 @@ const onSaveAndNextColor = async () => {
return return
} }
if (!validateEditorRowBeforeSave()) return
warnIfSecondColorMissing() warnIfSecondColorMissing()
const ok = await orderStore.saveOrUpdateRowUnified({ isRowSaveInFlight.value = true
let ok = false
try {
ok = await orderStore.saveOrUpdateRowUnified({
form, form,
recalcVat: typeof recalcVat === 'function' ? recalcVat : null, recalcVat: typeof recalcVat === 'function' ? recalcVat : null,
resetEditor: () => {}, resetEditor: () => {},
@@ -3062,6 +3233,9 @@ const onSaveAndNextColor = async () => {
stockMap, stockMap,
$q $q
}) })
} finally {
isRowSaveInFlight.value = false
}
if (!ok) return if (!ok) return

View File

@@ -17,6 +17,7 @@
icon="save" icon="save"
label="Secili Degisiklikleri Kaydet" label="Secili Degisiklikleri Kaydet"
:loading="store.saving" :loading="store.saving"
:disable="store.loading || store.saving || isBulkSubmitting"
@click="onBulkSubmit" @click="onBulkSubmit"
/> />
</div> </div>
@@ -60,11 +61,11 @@
</div> </div>
<div class="col-2"> <div class="col-2">
<q-input <q-input
:model-value="formatDate(header?.AverageDueDate)" v-model="headerAverageDueDate"
label="Tahmini Termin Tarihi" label="Tahmini Termin Tarihi"
filled filled
dense dense
readonly type="date"
/> />
</div> </div>
</div> </div>
@@ -101,6 +102,7 @@
<q-checkbox <q-checkbox
size="sm" size="sm"
:model-value="!!selectedMap[props.row.RowKey]" :model-value="!!selectedMap[props.row.RowKey]"
:disable="store.saving"
@update:model-value="(val) => toggleRowSelection(props.row.RowKey, val)" @update:model-value="(val) => toggleRowSelection(props.row.RowKey, val)"
/> />
</q-td> </q-td>
@@ -126,12 +128,16 @@
dense dense
filled filled
use-input use-input
fill-input
hide-selected
input-debounce="0"
emit-value emit-value
map-options map-options
option-label="label" option-label="label"
option-value="value" option-value="value"
:options="productCodeSelectOptions" :options="productCodeSelectOptions"
label="Eski Kod Sec" label="Eski Kod Sec"
@filter="onFilterProductCode"
@update:model-value="val => onSelectProduct(props.row, val)" @update:model-value="val => onSelectProduct(props.row, val)"
/> />
@@ -164,7 +170,7 @@
@click="openCdItemDialog(props.row.NewItemCode)" @click="openCdItemDialog(props.row.NewItemCode)"
/> />
<q-btn <q-btn
v-if="props.row.NewItemMode === 'new'" v-if="props.row.NewItemMode && props.row.NewItemMode !== 'empty'"
class="q-ml-xs" class="q-ml-xs"
dense dense
flat flat
@@ -221,6 +227,18 @@
</q-td> </q-td>
</template> </template>
<template #body-cell-NewDueDate="props">
<q-td :props="props">
<q-input
v-model="props.row.NewDueDate"
dense
filled
type="date"
label="Yeni Termin"
/>
</q-td>
</template>
<template #body-cell-NewDesc="props"> <template #body-cell-NewDesc="props">
<q-td :props="props" class="cell-new"> <q-td :props="props" class="cell-new">
<q-input <q-input
@@ -251,6 +269,38 @@
</q-card-section> </q-card-section>
<q-card-section class="q-pt-md"> <q-card-section class="q-pt-md">
<div class="row q-col-gutter-sm items-center q-mb-md bg-grey-2 q-pa-sm rounded-borders">
<div class="col-12 col-md-8">
<q-select
v-model="copySourceCode"
dense
filled
use-input
fill-input
hide-selected
input-debounce="0"
emit-value
map-options
option-label="label"
option-value="value"
label="Benzer Eski Urun Kodundan Getir"
placeholder="Kopyalanacak urun kodunu yazin"
:options="productCodeSelectOptions"
@filter="onFilterProductCode"
/>
</div>
<div class="col-12 col-md-4">
<q-btn
color="secondary"
icon="content_copy"
label="Ozellikleri Kopyala"
class="full-width"
:disable="!copySourceCode"
@click="copyFromOldProduct('cdItem')"
/>
</div>
</div>
<div class="row q-col-gutter-sm"> <div class="row q-col-gutter-sm">
<div class="col-12 col-md-4"> <div class="col-12 col-md-4">
<q-select v-model="cdItemDraftForm.ItemDimTypeCode" dense filled use-input fill-input hide-selected input-debounce="0" emit-value map-options option-label="label" option-value="value" :options="lookupOptions('itemDimTypeCodes')" label="Boyut Secenekleri" /> <q-select v-model="cdItemDraftForm.ItemDimTypeCode" dense filled use-input fill-input hide-selected input-debounce="0" emit-value map-options option-label="label" option-value="value" :options="lookupOptions('itemDimTypeCodes')" label="Boyut Secenekleri" />
@@ -276,10 +326,41 @@
<q-badge color="primary">{{ attributeTargetCode || '-' }}</q-badge> <q-badge color="primary">{{ attributeTargetCode || '-' }}</q-badge>
</q-card-section> </q-card-section>
<q-card-section style="max-height: 68vh; overflow: auto;"> <q-card-section class="q-pt-md">
<div class="text-caption text-grey-7 q-mb-sm"> <div class="row q-col-gutter-sm items-center q-mb-md bg-grey-2 q-pa-sm rounded-borders">
Ilk etap dummy: isBlocked=0 kabul edilmis satirlar gibi listelenir. <div class="col-12 col-md-8">
<q-select
v-model="copySourceCode"
dense
filled
use-input
fill-input
hide-selected
input-debounce="0"
emit-value
map-options
option-label="label"
option-value="value"
label="Benzer Eski Urun Kodundan Getir"
placeholder="Kopyalanacak urun kodunu yazin"
:options="productCodeSelectOptions"
@filter="onFilterProductCode"
/>
</div> </div>
<div class="col-12 col-md-4">
<q-btn
color="secondary"
icon="content_copy"
label="Ozellikleri Kopyala"
class="full-width"
:disable="!copySourceCode"
@click="copyFromOldProduct('attributes')"
/>
</div>
</div>
</q-card-section>
<q-card-section style="max-height: 68vh; overflow: auto;">
<div <div
v-for="(row, idx) in attributeRows" v-for="(row, idx) in attributeRows"
:key="`${row.AttributeTypeCodeNumber}-${idx}`" :key="`${row.AttributeTypeCodeNumber}-${idx}`"
@@ -303,7 +384,6 @@
option-label="label" option-label="label"
option-value="value" option-value="value"
:options="row.Options" :options="row.Options"
label="AttributeCode - AttributeDescription"
/> />
</div> </div>
</div> </div>
@@ -311,10 +391,14 @@
<q-card-actions align="right"> <q-card-actions align="right">
<q-btn flat label="Vazgec" color="grey-8" v-close-popup /> <q-btn flat label="Vazgec" color="grey-8" v-close-popup />
<q-btn color="primary" label="Ozellikleri Kaydet" @click="saveAttributeDraft" /> <q-btn color="primary" label="Ozellikleri Taslaga Kaydet" @click="saveAttributeDraft" />
</q-card-actions> </q-card-actions>
</q-card> </q-card>
</q-dialog> </q-dialog>
<q-inner-loading :showing="store.saving">
<q-spinner-gears size="50px" color="primary" />
<div class="q-mt-md text-subtitle1">Degisiklikler kaydediliyor, lutfen bekleyiniz...</div>
</q-inner-loading>
</q-page> </q-page>
</template> </template>
@@ -353,23 +437,30 @@ const rows = ref([])
const descFilter = ref('') const descFilter = ref('')
const productOptions = ref([]) const productOptions = ref([])
const selectedMap = ref({}) const selectedMap = ref({})
const headerAverageDueDate = ref('')
const cdItemDialogOpen = ref(false) const cdItemDialogOpen = ref(false)
const cdItemTargetCode = ref('') const cdItemTargetCode = ref('')
const copySourceCode = ref(null)
const suppressAutoSetupDialogs = ref(false)
const cdItemDraftForm = ref(createEmptyCdItemDraft('')) const cdItemDraftForm = ref(createEmptyCdItemDraft(''))
const attributeDialogOpen = ref(false) const attributeDialogOpen = ref(false)
const attributeTargetCode = ref('') const attributeTargetCode = ref('')
const attributeRows = ref([]) const attributeRows = ref([])
const isBulkSubmitting = ref(false)
const columns = [ const columns = [
{ name: 'select', label: '', field: 'select', align: 'center', sortable: false, style: 'width:44px;', headerStyle: 'width:44px;' }, { name: 'select', label: '', field: 'select', align: 'center', sortable: false, style: 'width:44px;', headerStyle: 'width:44px;' },
{ name: 'OldItemCode', label: 'Eski Urun Kodu', field: 'OldItemCode', align: 'left', sortable: true, style: 'min-width:90px;white-space:normal', headerStyle: 'min-width:90px;white-space:normal', headerClasses: 'col-old', classes: 'col-old' }, { name: 'OldItemCode', label: 'Eski Urun Kodu', field: 'OldItemCode', align: 'left', sortable: true, style: 'min-width:90px;white-space:normal', headerStyle: 'min-width:90px;white-space:normal', headerClasses: 'col-old', classes: 'col-old' },
{ name: 'OldColor', label: 'Eski Urun Rengi', field: 'OldColor', align: 'left', sortable: true, style: 'min-width:80px;white-space:normal', headerStyle: 'min-width:80px;white-space:normal', headerClasses: 'col-old', classes: 'col-old' }, { name: 'OldColor', label: 'Eski Urun Rengi', field: 'OldColorLabel', align: 'left', sortable: true, style: 'min-width:120px;white-space:normal', headerStyle: 'min-width:120px;white-space:normal', headerClasses: 'col-old', classes: 'col-old' },
{ name: 'OldDim2', label: 'Eski 2. Renk', field: 'OldDim2', align: 'left', sortable: true, style: 'min-width:80px;white-space:normal', headerStyle: 'min-width:80px;white-space:normal', headerClasses: 'col-old', classes: 'col-old' }, { name: 'OldDim2', label: 'Eski 2. Renk', field: 'OldDim2', align: 'left', sortable: true, style: 'min-width:80px;white-space:normal', headerStyle: 'min-width:80px;white-space:normal', headerClasses: 'col-old', classes: 'col-old' },
{ name: 'OldDesc', label: 'Eski Aciklama', field: 'OldDesc', align: 'left', sortable: false, style: 'min-width:130px;', headerStyle: 'min-width:130px;', headerClasses: 'col-old col-desc', classes: 'col-old col-desc' }, { name: 'OldDesc', label: 'Eski Aciklama', field: 'OldDesc', align: 'left', sortable: false, style: 'min-width:130px;', headerStyle: 'min-width:130px;', headerClasses: 'col-old col-desc', classes: 'col-old col-desc' },
{ name: 'OldSizes', label: 'Bedenler', field: 'OldSizesLabel', align: 'left', sortable: false, style: 'min-width:90px;', headerStyle: 'min-width:90px;', headerClasses: 'col-old col-wrap', classes: 'col-old col-wrap' }, { name: 'OldSizes', label: 'Bedenler', field: 'OldSizesLabel', align: 'left', sortable: false, style: 'min-width:90px;', headerStyle: 'min-width:90px;', headerClasses: 'col-old col-wrap', classes: 'col-old col-wrap' },
{ name: 'OldTotalQty', label: 'Siparis Adedi', field: 'OldTotalQtyLabel', align: 'right', sortable: false, style: 'min-width:90px;', headerStyle: 'min-width:90px;', headerClasses: 'col-old', classes: 'col-old' },
{ name: 'OldDueDate', label: 'Eski Termin', field: 'OldDueDate', align: 'left', sortable: true, style: 'min-width:100px;', headerStyle: 'min-width:100px;', headerClasses: 'col-old', classes: 'col-old' },
{ name: 'NewItemCode', label: 'Yeni Urun Kodu', field: 'NewItemCode', align: 'left', sortable: false, style: 'min-width:130px;', headerStyle: 'min-width:130px;', headerClasses: 'col-new col-new-first', classes: 'col-new col-new-first' }, { name: 'NewItemCode', label: 'Yeni Urun Kodu', field: 'NewItemCode', align: 'left', sortable: false, style: 'min-width:130px;', headerStyle: 'min-width:130px;', headerClasses: 'col-new col-new-first', classes: 'col-new col-new-first' },
{ name: 'NewColor', label: 'Yeni Urun Rengi', field: 'NewColor', align: 'left', sortable: false, style: 'min-width:100px;', headerStyle: 'min-width:100px;', headerClasses: 'col-new', classes: 'col-new' }, { name: 'NewColor', label: 'Yeni Urun Rengi', field: 'NewColor', align: 'left', sortable: false, style: 'min-width:100px;', headerStyle: 'min-width:100px;', headerClasses: 'col-new', classes: 'col-new' },
{ name: 'NewDim2', label: 'Yeni 2. Renk', field: 'NewDim2', align: 'left', sortable: false, style: 'min-width:100px;', headerStyle: 'min-width:100px;', headerClasses: 'col-new', classes: 'col-new' }, { name: 'NewDim2', label: 'Yeni 2. Renk', field: 'NewDim2', align: 'left', sortable: false, style: 'min-width:100px;', headerStyle: 'min-width:100px;', headerClasses: 'col-new', classes: 'col-new' },
{ name: 'NewDueDate', label: 'Yeni Termin', field: 'NewDueDate', align: 'left', sortable: false, style: 'min-width:120px;', headerStyle: 'min-width:120px;', headerClasses: 'col-new', classes: 'col-new' },
{ name: 'NewDesc', label: 'Yeni Aciklama', field: 'NewDesc', align: 'left', sortable: false, style: 'min-width:140px;', headerStyle: 'min-width:140px;', headerClasses: 'col-new col-desc', classes: 'col-new col-desc' } { name: 'NewDesc', label: 'Yeni Aciklama', field: 'NewDesc', align: 'left', sortable: false, style: 'min-width:140px;', headerStyle: 'min-width:140px;', headerClasses: 'col-new col-desc', classes: 'col-new col-desc' }
] ]
@@ -403,6 +494,23 @@ function formatDate (val) {
return text.length >= 10 ? text.slice(0, 10) : text return text.length >= 10 ? text.slice(0, 10) : text
} }
function normalizeDateInput (val) {
return formatDate(val || '')
}
const hasHeaderAverageDueDateChange = computed(() => (
normalizeDateInput(headerAverageDueDate.value) !==
normalizeDateInput(header.value?.AverageDueDate)
))
watch(
() => header.value?.AverageDueDate,
(value) => {
headerAverageDueDate.value = normalizeDateInput(value)
},
{ immediate: true }
)
const filteredRows = computed(() => { const filteredRows = computed(() => {
const needle = normalizeSearchText(descFilter.value) const needle = normalizeSearchText(descFilter.value)
if (!needle) return rows.value if (!needle) return rows.value
@@ -419,12 +527,36 @@ const newItemEntryModeOptions = [
{ label: 'Eski Kod Sec', value: 'selected' }, { label: 'Eski Kod Sec', value: 'selected' },
{ label: 'Yeni Kod Ekle', value: 'typed' } { label: 'Yeni Kod Ekle', value: 'typed' }
] ]
const productCodeSelectOptions = computed(() => const productCodeAllOptions = computed(() =>
(productOptions.value || []).map(p => { (productOptions.value || []).map(p => {
const code = String(p?.ProductCode || '').trim().toUpperCase() const code = String(p?.ProductCode || '').trim().toUpperCase()
return { label: code, value: code } return { label: code, value: code }
}).filter(x => !!x.value && x.value.length === 13) }).filter(x => !!x.value && x.value.length === 13)
) )
const productCodeSelectOptions = ref([])
watch(
productCodeAllOptions,
(list) => {
productCodeSelectOptions.value = Array.isArray(list) ? [...list] : []
},
{ immediate: true }
)
function onFilterProductCode (val, update) {
const needle = normalizeSearchText(val)
update(() => {
if (!needle) {
productCodeSelectOptions.value = [...productCodeAllOptions.value]
return
}
productCodeSelectOptions.value = (productCodeAllOptions.value || []).filter(opt => {
const label = normalizeSearchText(opt?.label || '')
const value = normalizeSearchText(opt?.value || '')
return label.includes(needle) || value.includes(needle)
})
})
}
function applyNewItemVisualState (row, source = 'typed') { function applyNewItemVisualState (row, source = 'typed') {
const info = store.classifyItemCode(row?.NewItemCode || '') const info = store.classifyItemCode(row?.NewItemCode || '')
@@ -433,6 +565,19 @@ function applyNewItemVisualState (row, source = 'typed') {
row.NewItemSource = info.mode === 'empty' ? '' : source row.NewItemSource = info.mode === 'empty' ? '' : source
} }
function syncRowsForKnownExistingCode (itemCode) {
const code = String(itemCode || '').trim().toUpperCase()
if (!code) return
for (const row of (rows.value || [])) {
if (String(row?.NewItemCode || '').trim().toUpperCase() !== code) continue
row.NewItemCode = code
row.NewItemMode = 'existing'
if (!row.NewItemEntryMode) {
row.NewItemEntryMode = row.NewItemSource === 'selected' ? 'selected' : 'typed'
}
}
}
function newItemInputClass (row) { function newItemInputClass (row) {
return { return {
'new-item-existing': row?.NewItemMode === 'existing', 'new-item-existing': row?.NewItemMode === 'existing',
@@ -526,15 +671,18 @@ function onNewItemChange (row, val, source = 'typed') {
row.NewColor = '' row.NewColor = ''
row.NewDim2 = '' row.NewDim2 = ''
row.NewDesc = mergeDescWithAutoNote(row, row.NewDesc || row.OldDesc) row.NewDesc = mergeDescWithAutoNote(row, row.NewDesc || row.OldDesc)
if (row.NewItemCode) { if (row.NewItemCode && isValidBaggiModelCode(row.NewItemCode)) {
if (row.NewItemMode === 'new') { if (row.NewItemMode === 'new') {
store.fetchNewColors(row.NewItemCode) store.fetchNewColors(row.NewItemCode)
} else { } else {
store.fetchColors(row.NewItemCode) store.fetchColors(row.NewItemCode)
} }
} }
if (suppressAutoSetupDialogs.value) return
if (row.NewItemMode === 'new' && isValidBaggiModelCode(row.NewItemCode) && row.NewItemCode !== prevCode) { if (row.NewItemMode === 'new' && isValidBaggiModelCode(row.NewItemCode) && row.NewItemCode !== prevCode) {
openNewCodeSetupFlow(row.NewItemCode) openNewCodeSetupFlow(row.NewItemCode)
} else if (row.NewItemMode === 'existing' && isValidBaggiModelCode(row.NewItemCode) && row.NewItemCode !== prevCode) {
openAttributeDialog(row.NewItemCode)
} }
} }
@@ -553,9 +701,7 @@ function isNewCodeSetupComplete (itemCode) {
function isColorSelectionLocked (row) { function isColorSelectionLocked (row) {
const code = String(row?.NewItemCode || '').trim().toUpperCase() const code = String(row?.NewItemCode || '').trim().toUpperCase()
if (!code) return true return !code
if (row?.NewItemMode !== 'new') return false
return !isNewCodeSetupComplete(code)
} }
function openNewCodeSetupFlow (itemCode) { function openNewCodeSetupFlow (itemCode) {
@@ -658,11 +804,8 @@ function isSelectionCompleteByOldShape (row) {
if (!hasModel) return false if (!hasModel) return false
const oldHasColor = String(row?.OldColor || '').trim().length > 0 const oldHasColor = String(row?.OldColor || '').trim().length > 0
const oldHasDim2 = String(row?.OldDim2 || '').trim().length > 0
const hasNewColor = normalizeShortCode(row?.NewColor, 3).length === 3 const hasNewColor = normalizeShortCode(row?.NewColor, 3).length === 3
const hasNewDim2 = normalizeShortCode(row?.NewDim2, 3).length === 3
if (oldHasDim2) return hasNewColor && hasNewDim2
if (oldHasColor) return hasNewColor if (oldHasColor) return hasNewColor
return true return true
} }
@@ -693,7 +836,6 @@ function validateRowInput (row) {
const newColor = normalizeShortCode(row.NewColor, 3) const newColor = normalizeShortCode(row.NewColor, 3)
const newDim2 = normalizeShortCode(row.NewDim2, 3) const newDim2 = normalizeShortCode(row.NewDim2, 3)
const oldColor = String(row.OldColor || '').trim() const oldColor = String(row.OldColor || '').trim()
const oldDim2 = String(row.OldDim2 || '').trim()
if (!entryMode) return 'Lutfen once kod giris tipini seciniz (Eski Kod Sec / Yeni Kod Ekle).' if (!entryMode) return 'Lutfen once kod giris tipini seciniz (Eski Kod Sec / Yeni Kod Ekle).'
if (!newItemCode) return 'Yeni model kodu zorunludur.' if (!newItemCode) return 'Yeni model kodu zorunludur.'
@@ -702,7 +844,6 @@ function validateRowInput (row) {
} }
if (oldColor && !newColor) return 'Eski kayitta 1. renk oldugu icin yeni 1. renk zorunludur.' if (oldColor && !newColor) return 'Eski kayitta 1. renk oldugu icin yeni 1. renk zorunludur.'
if (newColor && newColor.length !== 3) return 'Yeni 1. renk kodu 3 karakter olmalidir.' if (newColor && newColor.length !== 3) return 'Yeni 1. renk kodu 3 karakter olmalidir.'
if (oldDim2 && !newDim2) return 'Eski kayitta 2. renk oldugu icin yeni 2. renk zorunludur.'
if (newDim2 && newDim2.length !== 3) return 'Yeni 2. renk kodu 3 karakter olmalidir.' if (newDim2 && newDim2.length !== 3) return 'Yeni 2. renk kodu 3 karakter olmalidir.'
if (newDim2 && !newColor) return '2. renk girmek icin 1. renk zorunludur.' if (newDim2 && !newColor) return '2. renk girmek icin 1. renk zorunludur.'
@@ -725,19 +866,98 @@ function collectLinesFromRows (selectedRows) {
NewItemCode: String(row.NewItemCode || '').trim().toUpperCase(), NewItemCode: String(row.NewItemCode || '').trim().toUpperCase(),
NewColor: normalizeShortCode(row.NewColor, 3), NewColor: normalizeShortCode(row.NewColor, 3),
NewDim2: normalizeShortCode(row.NewDim2, 3), NewDim2: normalizeShortCode(row.NewDim2, 3),
NewDesc: mergeDescWithAutoNote(row, row.NewDesc || row.OldDesc) NewDesc: mergeDescWithAutoNote(row, row.NewDesc || row.OldDesc),
OldDueDate: row.OldDueDate || '',
NewDueDate: row.NewDueDate || ''
} }
for (const id of (row.OrderLineIDs || [])) { const oldItemCode = String(row.OldItemCode || '').trim().toUpperCase()
lines.push({ const oldColor = normalizeShortCode(row.OldColor, 3)
const oldDim2 = normalizeShortCode(row.OldDim2, 3)
const oldDesc = String(row.OldDesc || '').trim()
const oldDueDateValue = row.OldDueDate || ''
const newDueDateValue = row.NewDueDate || ''
const hasChange = (
baseLine.NewItemCode !== oldItemCode ||
baseLine.NewColor !== oldColor ||
baseLine.NewDim2 !== oldDim2 ||
String(baseLine.NewDesc || '').trim() !== oldDesc ||
newDueDateValue !== oldDueDateValue
)
if (!hasChange) continue
const orderLines = Array.isArray(row.OrderLines) && row.OrderLines.length
? row.OrderLines
: (row.OrderLineIDs || []).map(id => ({
OrderLineID: id, OrderLineID: id,
...baseLine ItemDim1Code: ''
}))
for (const line of orderLines) {
lines.push({
...baseLine,
OrderLineID: line?.OrderLineID,
ItemDim1Code: store.toPayloadDim1Code(row, line?.ItemDim1Code || '')
}) })
} }
} }
return { errMsg: '', lines } return { errMsg: '', lines }
} }
function hasRowChange (row) {
const newItemCode = String(row?.NewItemCode || '').trim().toUpperCase()
const newColor = normalizeShortCode(row?.NewColor, 3)
const newDim2 = normalizeShortCode(row?.NewDim2, 3)
const newDesc = mergeDescWithAutoNote(row, row?.NewDesc || row?.OldDesc)
const oldItemCode = String(row?.OldItemCode || '').trim().toUpperCase()
const oldColor = normalizeShortCode(row?.OldColor, 3)
const oldDim2 = normalizeShortCode(row?.OldDim2, 3)
const oldDesc = String(row?.OldDesc || '').trim()
const oldDueDateValue = row?.OldDueDate || ''
const newDueDateValue = row?.NewDueDate || ''
return (
newItemCode !== oldItemCode ||
newColor !== oldColor ||
newDim2 !== oldDim2 ||
String(newDesc || '').trim() !== oldDesc ||
newDueDateValue !== oldDueDateValue
)
}
function collectOptionalColorWarnings (rows) {
const warnings = []
for (const row of (rows || [])) {
const code = String(row?.NewItemCode || '').trim().toUpperCase()
if (!code) continue
const color = normalizeShortCode(row?.NewColor, 3)
const dim2 = normalizeShortCode(row?.NewDim2, 3)
if (!color) {
warnings.push(`${code} icin renk secmediniz.`)
continue
}
if (!dim2) {
warnings.push(`${code} icin 2. renk bos kalacak.`)
}
}
return [...new Set(warnings)]
}
function confirmOptionalColorWarnings (rows) {
const warnings = collectOptionalColorWarnings(rows)
if (!warnings.length) return Promise.resolve(true)
return new Promise((resolve) => {
$q.dialog({
title: 'Renk Uyarisi',
message: `${warnings.join('<br>')}<br><br>Devam etmek istiyor musunuz?`,
html: true,
ok: { label: 'Evet, Devam Et', color: 'warning' },
cancel: { label: 'Vazgec', flat: true }
}).onOk(() => resolve(true)).onCancel(() => resolve(false)).onDismiss(() => resolve(false))
})
}
function createEmptyCdItemDraft (itemCode) { function createEmptyCdItemDraft (itemCode) {
return { return {
ItemTypeCode: '1', ItemTypeCode: '1',
@@ -797,9 +1017,69 @@ function isDummyLookupOption (key, codeRaw, descRaw) {
return false return false
} }
async function copyFromOldProduct (targetType = 'cdItem') {
const sourceCode = String(copySourceCode.value || '').trim().toUpperCase()
if (!sourceCode) return
$q.loading.show({ message: 'Ozellikler kopyalaniyor...' })
try {
if (targetType === 'cdItem') {
const data = await store.fetchCdItemByCode(sourceCode)
if (data) {
const targetCode = String(cdItemTargetCode.value || '').trim().toUpperCase()
const draft = createEmptyCdItemDraft(targetCode)
for (const k of Object.keys(draft)) {
if (data[k] !== undefined && data[k] !== null) {
draft[k] = String(data[k])
}
}
// Source item kopyalansa da hedef popup kodu degismemeli.
draft.ItemCode = targetCode
cdItemDraftForm.value = draft
persistCdItemDraft()
$q.notify({ type: 'positive', message: 'Boyutlandirma bilgileri kopyalandi.' })
} else {
$q.notify({ type: 'warning', message: 'Kaynak urun bilgisi bulunamadi.' })
}
} else if (targetType === 'attributes') {
const data = await store.fetchProductItemAttributes(sourceCode, 1, true)
if (Array.isArray(data) && data.length > 0) {
// Mevcut attributeRows uzerindeki degerleri guncelle
for (const row of attributeRows.value) {
const sourceAttr = data.find(d => Number(d.attribute_type_code || d.AttributeTypeCode) === Number(row.AttributeTypeCodeNumber))
if (sourceAttr) {
const attrCode = String(sourceAttr.attribute_code || sourceAttr.AttributeCode || '').trim()
if (attrCode) {
// Seceneklerde var mi kontrol et, yoksa ekle (UI'da gorunmesi icin)
if (!row.AllOptions.some(opt => String(opt.value).trim() === attrCode)) {
row.AllOptions.unshift({ value: attrCode, label: attrCode })
row.Options = [...row.AllOptions]
}
row.AttributeCode = attrCode
}
}
}
const targetCode = String(attributeTargetCode.value || '').trim().toUpperCase()
if (targetCode) {
store.setProductAttributeDraft(targetCode, JSON.parse(JSON.stringify(attributeRows.value || [])))
}
$q.notify({ type: 'positive', message: 'Urun ozellikleri kopyalandi.' })
} else {
$q.notify({ type: 'warning', message: 'Kaynak urun ozellikleri bulunamadi.' })
}
}
} catch (err) {
console.error('[OrderProductionUpdate] copyFromOldProduct failed', err)
$q.notify({ type: 'negative', message: 'Kopyalama sirasinda hata olustu.' })
} finally {
$q.loading.hide()
}
}
async function openCdItemDialog (itemCode) { async function openCdItemDialog (itemCode) {
const code = String(itemCode || '').trim().toUpperCase() const code = String(itemCode || '').trim().toUpperCase()
if (!code) return if (!code) return
copySourceCode.value = null
await store.fetchCdItemLookups() await store.fetchCdItemLookups()
cdItemTargetCode.value = code cdItemTargetCode.value = code
@@ -815,6 +1095,17 @@ async function openCdItemDialog (itemCode) {
cdItemDialogOpen.value = true cdItemDialogOpen.value = true
} }
function persistCdItemDraft () {
const targetCode = String(cdItemTargetCode.value || '').trim().toUpperCase()
const payload = normalizeCdItemDraftForPayload({
...(cdItemDraftForm.value || {}),
ItemCode: targetCode || String(cdItemDraftForm.value?.ItemCode || '').trim().toUpperCase()
})
if (!payload.ItemCode) return null
store.setCdItemDraft(payload.ItemCode, payload)
return payload
}
function normalizeCdItemDraftForPayload (draftRaw) { function normalizeCdItemDraftForPayload (draftRaw) {
const d = draftRaw || {} const d = draftRaw || {}
const toIntOrNil = (v) => { const toIntOrNil = (v) => {
@@ -849,12 +1140,16 @@ function normalizeCdItemDraftForPayload (draftRaw) {
} }
async function saveCdItemDraft () { async function saveCdItemDraft () {
const payload = normalizeCdItemDraftForPayload(cdItemDraftForm.value) const payload = persistCdItemDraft()
if (!payload.ItemCode) { if (!payload?.ItemCode) {
$q.notify({ type: 'negative', message: 'ItemCode bos olamaz.' }) $q.notify({ type: 'negative', message: 'ItemCode bos olamaz.' })
return return
} }
store.setCdItemDraft(payload.ItemCode, payload) console.info('[OrderProductionUpdate] saveCdItemDraft', {
code: payload.ItemCode,
itemDimTypeCode: payload.ItemDimTypeCode,
productHierarchyID: payload.ProductHierarchyID
})
cdItemDialogOpen.value = false cdItemDialogOpen.value = false
await openAttributeDialog(payload.ItemCode) await openAttributeDialog(payload.ItemCode)
} }
@@ -862,18 +1157,19 @@ async function saveCdItemDraft () {
function buildAttributeRowsFromLookup (list) { function buildAttributeRowsFromLookup (list) {
const grouped = new Map() const grouped = new Map()
for (const it of (list || [])) { for (const it of (list || [])) {
const typeCode = Number(it?.attribute_type_code || 0) const typeCode = Number(it?.attribute_type_code || it?.AttributeTypeCode || 0)
if (!typeCode) continue if (!typeCode) continue
if (!grouped.has(typeCode)) { if (!grouped.has(typeCode)) {
grouped.set(typeCode, { grouped.set(typeCode, {
typeCode, typeCode,
typeDesc: String(it?.attribute_type_description || '').trim() || String(typeCode), typeDesc: String(it?.attribute_type_description || it?.AttributeTypeDescription || '').trim() || String(typeCode),
options: [] options: []
}) })
} }
const g = grouped.get(typeCode) const g = grouped.get(typeCode)
const code = String(it?.attribute_code || '').trim() const code = String(it?.attribute_code || it?.AttributeCode || '').trim()
const desc = String(it?.attribute_description || '').trim() const desc = String(it?.attribute_description || it?.AttributeDescription || '').trim()
if (!code) continue
g.options.push({ g.options.push({
value: code, value: code,
label: `${code} - ${desc || code}` label: `${code} - ${desc || code}`
@@ -909,21 +1205,134 @@ function onFilterAttributeOption (row, val, update) {
}) })
} }
function mergeAttributeDraftWithLookupOptions (draftRows, lookupRows) {
const byType = new Map(
(lookupRows || []).map(r => [Number(r?.AttributeTypeCodeNumber || 0), r]).filter(x => x[0] > 0)
)
return (draftRows || []).map(d => {
const typeCode = Number(d?.AttributeTypeCodeNumber || 0)
const base = byType.get(typeCode)
const selectedCode = String(d?.AttributeCode || '').trim()
const baseAllOptions = Array.isArray(base?.AllOptions)
? [...base.AllOptions]
: (Array.isArray(base?.Options) ? [...base.Options] : [])
const draftAllOptions = Array.isArray(d?.AllOptions)
? [...d.AllOptions]
: (Array.isArray(d?.Options) ? [...d.Options] : [])
const allOptions = baseAllOptions.length ? baseAllOptions : draftAllOptions
if (selectedCode && !allOptions.some(opt => String(opt?.value || '').trim() === selectedCode)) {
allOptions.unshift({ value: selectedCode, label: selectedCode })
}
return {
...(base || d),
...d,
AttributeTypeCodeNumber: typeCode,
AttributeCode: selectedCode,
AllOptions: allOptions,
Options: [...allOptions]
}
})
}
async function openAttributeDialog (itemCode) { async function openAttributeDialog (itemCode) {
const code = String(itemCode || '').trim().toUpperCase() const code = String(itemCode || '').trim().toUpperCase()
if (!code) return if (!code) return
copySourceCode.value = null
attributeTargetCode.value = code attributeTargetCode.value = code
const existing = store.getProductAttributeDraft(code) const existingDraft = JSON.parse(JSON.stringify(store.getProductAttributeDraft(code) || []))
const modeInfo = store.classifyItemCode(code)
const fetched = await store.fetchProductAttributes(1) const fetched = await store.fetchProductAttributes(1)
const fromLookup = buildAttributeRowsFromLookup(fetched) const fromLookup = buildAttributeRowsFromLookup(fetched)
console.info('[OrderProductionUpdate] openAttributeDialog lookup', {
code,
mode: modeInfo.mode,
fetchedCount: Array.isArray(fetched) ? fetched.length : 0,
rowCount: fromLookup.length
})
if (!fromLookup.length) { if (!fromLookup.length) {
$q.notify({ type: 'negative', message: 'Urun ozellikleri listesi alinamadi. Lutfen daha sonra tekrar deneyin.' }) $q.notify({ type: 'negative', message: 'Urun ozellikleri listesi alinamadi. Lutfen daha sonra tekrar deneyin.' })
return return
} }
const baseRows = fromLookup
attributeRows.value = Array.isArray(existing) && existing.length // Draft varsa popup her zaman draft'tan acilir (yeniden acinca secimler kaybolmasin).
? JSON.parse(JSON.stringify(existing)) if (Array.isArray(existingDraft) && existingDraft.length) {
: baseRows attributeRows.value = JSON.parse(JSON.stringify(
mergeAttributeDraftWithLookupOptions(existingDraft, fromLookup)
))
console.info('[OrderProductionUpdate] openAttributeDialog rowsPrepared', {
code,
mode: modeInfo.mode,
useDraft: true,
rowCount: Array.isArray(attributeRows.value) ? attributeRows.value.length : 0,
optionCounts: (attributeRows.value || []).map(r => ({
type: Number(r?.AttributeTypeCodeNumber || 0),
options: Array.isArray(r?.Options) ? r.Options.length : 0,
allOptions: Array.isArray(r?.AllOptions) ? r.AllOptions.length : 0,
selected: String(r?.AttributeCode || '').trim()
}))
})
for (const row of (attributeRows.value || [])) {
if (!Array.isArray(row.AllOptions)) row.AllOptions = Array.isArray(row.Options) ? [...row.Options] : []
if (!Array.isArray(row.Options)) row.Options = [...row.AllOptions]
}
attributeDialogOpen.value = true
return
}
const dbCurrent = await store.fetchProductItemAttributes(code, 1, true)
console.info('[OrderProductionUpdate] openAttributeDialog dbCurrent', {
code,
dbCurrentCount: Array.isArray(dbCurrent) ? dbCurrent.length : 0
})
if (Array.isArray(dbCurrent) && dbCurrent.length) {
store.markItemCodeKnownExisting(code, true)
syncRowsForKnownExistingCode(code)
}
const dbMap = new Map(
(dbCurrent || []).map(x => [
Number(x?.attribute_type_code || x?.AttributeTypeCode || 0),
String(x?.attribute_code || x?.AttributeCode || '').trim()
]).filter(x => x[0] > 0)
)
const baseRows = fromLookup.map(row => {
const currentCode = dbMap.get(Number(row.AttributeTypeCodeNumber || 0)) || ''
const currentOptions = Array.isArray(row.AllOptions)
? [...row.AllOptions]
: (Array.isArray(row.Options) ? [...row.Options] : [])
if (currentCode && !currentOptions.some(opt => String(opt?.value || '').trim() === currentCode)) {
currentOptions.unshift({ value: currentCode, label: currentCode })
}
return {
...row,
AttributeCode: currentCode,
OriginalAttributeCode: currentCode,
AllOptions: currentOptions,
Options: [...currentOptions]
}
})
const useDraft = Array.isArray(existingDraft) && existingDraft.length
attributeRows.value = JSON.parse(JSON.stringify(baseRows))
console.info('[OrderProductionUpdate] openAttributeDialog rowsPrepared', {
code,
mode: modeInfo.mode,
useDraft: false,
rowCount: Array.isArray(attributeRows.value) ? attributeRows.value.length : 0,
optionCounts: (attributeRows.value || []).map(r => ({
type: Number(r?.AttributeTypeCodeNumber || 0),
options: Array.isArray(r?.Options) ? r.Options.length : 0,
allOptions: Array.isArray(r?.AllOptions) ? r.AllOptions.length : 0,
selected: String(r?.AttributeCode || '').trim()
}))
})
for (const row of (attributeRows.value || [])) { for (const row of (attributeRows.value || [])) {
if (!Array.isArray(row.AllOptions)) { if (!Array.isArray(row.AllOptions)) {
row.AllOptions = Array.isArray(row.Options) ? [...row.Options] : [] row.AllOptions = Array.isArray(row.Options) ? [...row.Options] : []
@@ -938,19 +1347,38 @@ async function openAttributeDialog (itemCode) {
function saveAttributeDraft () { function saveAttributeDraft () {
const code = String(attributeTargetCode.value || '').trim().toUpperCase() const code = String(attributeTargetCode.value || '').trim().toUpperCase()
if (!code) return if (!code) return
for (const row of (attributeRows.value || [])) { const rows = JSON.parse(JSON.stringify(attributeRows.value || []))
for (const row of rows) {
const selected = String(row?.AttributeCode || '').trim() const selected = String(row?.AttributeCode || '').trim()
if (!selected) { if (!selected) {
$q.notify({ type: 'negative', message: `Urun ozelliklerinde secim zorunlu: ${row?.TypeLabel || ''}` }) $q.notify({ type: 'negative', message: `Urun ozelliklerinde secim zorunlu: ${row?.TypeLabel || ''}` })
return return
} }
} }
store.setProductAttributeDraft(code, JSON.parse(JSON.stringify(attributeRows.value || []))) store.setProductAttributeDraft(code, rows)
console.info('[OrderProductionUpdate] saveAttributeDraft', {
code,
rowCount: rows.length,
selectedCount: rows.length,
selected: rows.map(r => ({
type: Number(r?.AttributeTypeCodeNumber || 0),
code: String(r?.AttributeCode || '').trim()
}))
})
attributeDialogOpen.value = false attributeDialogOpen.value = false
$q.notify({ type: 'positive', message: 'Urun ozellikleri taslagi kaydedildi.' }) $q.notify({ type: 'positive', message: 'Urun ozellikleri taslagi kaydedildi.' })
} }
function collectProductAttributesFromSelectedRows (selectedRows) { watch(
cdItemDraftForm,
() => {
if (!cdItemDialogOpen.value) return
persistCdItemDraft()
},
{ deep: true }
)
async function collectProductAttributesFromSelectedRows (selectedRows) {
const codeSet = [...new Set( const codeSet = [...new Set(
(selectedRows || []) (selectedRows || [])
.map(r => String(r?.NewItemCode || '').trim().toUpperCase()) .map(r => String(r?.NewItemCode || '').trim().toUpperCase())
@@ -959,16 +1387,82 @@ function collectProductAttributesFromSelectedRows (selectedRows) {
const out = [] const out = []
for (const code of codeSet) { for (const code of codeSet) {
const rows = store.getProductAttributeDraft(code) const modeInfo = store.classifyItemCode(code)
let rows = store.getProductAttributeDraft(code)
const dbCurrent = await store.fetchProductItemAttributes(code, 1, true)
const dbMap = new Map(
(dbCurrent || []).map(x => [
Number(x?.attribute_type_code || x?.AttributeTypeCode || 0),
String(x?.attribute_code || x?.AttributeCode || '').trim()
]).filter(x => x[0] > 0)
)
const hasDbAttributes = dbMap.size > 0
const effectiveMode = hasDbAttributes ? 'existing' : modeInfo.mode
console.info('[OrderProductionUpdate] collectProductAttributes start', {
code,
mode: modeInfo.mode,
effectiveMode,
hasDbAttributes,
draftRowCount: Array.isArray(rows) ? rows.length : 0
})
if (effectiveMode === 'existing') {
// Existing kodda kullanıcı değişiklik yaptıysa draftı koru.
// Draft yoksa DB'den zorunlu/fresh çek.
if (!Array.isArray(rows) || !rows.length) {
const lookup = await store.fetchProductAttributes(1)
const baseRows = buildAttributeRowsFromLookup(lookup)
console.info('[OrderProductionUpdate] collectProductAttributes existing refetch', {
code,
lookupCount: Array.isArray(lookup) ? lookup.length : 0,
baseRowCount: baseRows.length
})
rows = baseRows.map(row => {
const currentCode = dbMap.get(Number(row.AttributeTypeCodeNumber || 0)) || ''
const currentOptions = Array.isArray(row.AllOptions)
? [...row.AllOptions]
: (Array.isArray(row.Options) ? [...row.Options] : [])
if (currentCode && !currentOptions.some(opt => String(opt?.value || '').trim() === currentCode)) {
currentOptions.unshift({ value: currentCode, label: currentCode })
}
return {
...row,
AttributeCode: currentCode,
OriginalAttributeCode: currentCode,
AllOptions: currentOptions,
Options: [...currentOptions]
}
})
store.setProductAttributeDraft(code, JSON.parse(JSON.stringify(rows)))
}
} else if (!Array.isArray(rows) || !rows.length) {
return { errMsg: `${code} icin urun ozellikleri taslagi kaydedilmedi`, productAttributes: [] }
}
if (!Array.isArray(rows) || !rows.length) { if (!Array.isArray(rows) || !rows.length) {
return { errMsg: `${code} icin urun ozellikleri secilmedi`, productAttributes: [] } return { errMsg: `${code} icin urun ozellikleri secilmedi`, productAttributes: [] }
} }
for (const row of rows) { for (const row of rows) {
const attributeTypeCode = Number(row?.AttributeTypeCodeNumber || 0) const attributeTypeCode = Number(row?.AttributeTypeCodeNumber || 0)
const attributeCode = String(row?.AttributeCode || '').trim() const attributeCode = String(row?.AttributeCode || '').trim()
if (!attributeTypeCode || !attributeCode) {
if (!attributeTypeCode) {
return { errMsg: `${code} icin urun ozellikleri eksik`, productAttributes: [] } return { errMsg: `${code} icin urun ozellikleri eksik`, productAttributes: [] }
} }
if (effectiveMode === 'existing') {
const originalCode =
dbMap.get(attributeTypeCode) ||
String(row?.OriginalAttributeCode || '').trim()
const changed = attributeCode !== originalCode
if (!changed) continue
if (!attributeCode) {
return { errMsg: `${code} icin urun ozellikleri eksik`, productAttributes: [] }
}
} else if (!attributeCode) {
return { errMsg: `${code} icin urun ozellikleri eksik`, productAttributes: [] }
}
out.push({ out.push({
ItemTypeCode: 1, ItemTypeCode: 1,
ItemCode: code, ItemCode: code,
@@ -976,11 +1470,23 @@ function collectProductAttributesFromSelectedRows (selectedRows) {
AttributeCode: attributeCode AttributeCode: attributeCode
}) })
} }
console.info('[OrderProductionUpdate] collectProductAttributes done', {
code,
mode: modeInfo.mode,
effectiveMode,
outCount: out.filter(x => x.ItemCode === code).length,
rowCount: rows.length,
optionCounts: rows.map(r => ({
type: Number(r?.AttributeTypeCodeNumber || 0),
options: Array.isArray(r?.Options) ? r.Options.length : 0,
allOptions: Array.isArray(r?.AllOptions) ? r.AllOptions.length : 0
}))
})
} }
return { errMsg: '', productAttributes: out } return { errMsg: '', productAttributes: out }
} }
function collectCdItemsFromSelectedRows (selectedRows) { async function collectCdItemsFromSelectedRows (selectedRows) {
const codes = [...new Set( const codes = [...new Set(
(selectedRows || []) (selectedRows || [])
.filter(r => r?.NewItemMode === 'new' && String(r?.NewItemCode || '').trim()) .filter(r => r?.NewItemMode === 'new' && String(r?.NewItemCode || '').trim())
@@ -990,7 +1496,16 @@ function collectCdItemsFromSelectedRows (selectedRows) {
const out = [] const out = []
for (const code of codes) { for (const code of codes) {
const draft = store.getCdItemDraft(code) let draft = store.getCdItemDraft(code)
if (!draft) {
const existingCdItem = await store.fetchCdItemByCode(code)
if (existingCdItem) {
store.markItemCodeKnownExisting(code, true)
syncRowsForKnownExistingCode(code)
draft = normalizeCdItemDraftForPayload(existingCdItem)
store.setCdItemDraft(code, draft)
}
}
if (!draft) { if (!draft) {
return { errMsg: `${code} icin cdItem bilgisi eksik`, cdItems: [] } return { errMsg: `${code} icin cdItem bilgisi eksik`, cdItems: [] }
} }
@@ -1010,11 +1525,49 @@ function buildMailLineLabelFromRow (row) {
return [item, colorPart, desc].filter(Boolean).join(' ') return [item, colorPart, desc].filter(Boolean).join(' ')
} }
function buildUpdateMailLineLabelFromRow (row) {
const newItem = String(row?.NewItemCode || row?.OldItemCode || '').trim().toUpperCase()
const newColor = String(row?.NewColor || row?.OldColor || '').trim().toUpperCase()
const newDim2 = String(row?.NewDim2 || row?.OldDim2 || '').trim().toUpperCase()
const desc = mergeDescWithAutoNote(row, row?.NewDesc || row?.OldDesc || '')
if (!newItem) return ''
const colorPart = newDim2 ? `${newColor}-${newDim2}` : newColor
return [newItem, colorPart, desc].filter(Boolean).join(' ')
}
function buildDueDateChangeRowsFromSelectedRows (selectedRows) {
const seen = new Set()
const out = []
for (const row of (selectedRows || [])) {
const itemCode = String(row?.NewItemCode || row?.OldItemCode || '').trim().toUpperCase()
const colorCode = String(row?.NewColor || row?.OldColor || '').trim().toUpperCase()
const itemDim2Code = String(row?.NewDim2 || row?.OldDim2 || '').trim().toUpperCase()
const oldDueDate = formatDate(row?.OldDueDate)
const newDueDate = formatDate(row?.NewDueDate)
if (!itemCode || !newDueDate || oldDueDate === newDueDate) continue
const key = [itemCode, colorCode, itemDim2Code, oldDueDate, newDueDate].join('||')
if (seen.has(key)) continue
seen.add(key)
out.push({
itemCode,
colorCode,
itemDim2Code,
oldDueDate,
newDueDate
})
}
return out
}
function buildProductionUpdateMailPayload (selectedRows) { function buildProductionUpdateMailPayload (selectedRows) {
const updatedItems = [ const updatedItems = [
...new Set( ...new Set(
(selectedRows || []) (selectedRows || [])
.map(buildMailLineLabelFromRow) .map(buildUpdateMailLineLabelFromRow)
.filter(Boolean) .filter(Boolean)
) )
] ]
@@ -1023,30 +1576,62 @@ function buildProductionUpdateMailPayload (selectedRows) {
operation: 'update', operation: 'update',
deletedItems: [], deletedItems: [],
updatedItems, updatedItems,
addedItems: [] addedItems: [],
dueDateChanges: buildDueDateChangeRowsFromSelectedRows(selectedRows)
} }
} }
function formatBarcodeValidationMessages (validations) {
return (Array.isArray(validations) ? validations : [])
.map(v => String(v?.message || '').trim())
.filter(Boolean)
}
function showBarcodeValidationDialog (validations) {
const messages = formatBarcodeValidationMessages(validations)
if (!messages.length) return false
$q.dialog({
title: 'Barkod Validasyonlari',
message: messages.join('<br>'),
html: true,
ok: { label: 'Tamam', color: 'negative' }
})
return true
}
async function sendUpdateMailAfterApply (selectedRows) { async function sendUpdateMailAfterApply (selectedRows) {
const orderId = String(orderHeaderID.value || '').trim() const orderId = String(orderHeaderID.value || '').trim()
if (!orderId) return if (!orderId) return
const host = String(window?.location?.hostname || '').trim().toLowerCase() const host = String(window?.location?.hostname || '').trim().toLowerCase()
const isLocalHost = host === 'localhost' || host === '127.0.0.1' const isLocalHost = host === 'localhost' || host === '127.0.0.1'
if (isLocalHost) { if (isLocalHost) {
console.info('[OrderProductionUpdate] sendUpdateMailAfterApply skipped (localhost)', { orderHeaderID: orderId, host })
return return
} }
try { try {
const t0 = nowMs()
const payload = buildProductionUpdateMailPayload(selectedRows) const payload = buildProductionUpdateMailPayload(selectedRows)
console.info('[OrderProductionUpdate] sendUpdateMailAfterApply start', {
orderHeaderID: orderId,
updatedItems: payload?.updatedItems?.length || 0
})
const res = await api.post('/order/send-market-mail', { const res = await api.post('/order/send-market-mail', {
orderHeaderID: orderId, orderHeaderID: orderId,
operation: payload.operation, operation: payload.operation,
deletedItems: payload.deletedItems, deletedItems: payload.deletedItems,
updatedItems: payload.updatedItems, updatedItems: payload.updatedItems,
addedItems: payload.addedItems addedItems: payload.addedItems,
dueDateChanges: payload.dueDateChanges,
extraRecipients: ['urun@baggi.com.tr']
}) })
const sentCount = Number(res?.data?.sentCount || 0) const sentCount = Number(res?.data?.sentCount || 0)
console.info('[OrderProductionUpdate] sendUpdateMailAfterApply done', {
orderHeaderID: orderId,
sentCount,
durationMs: Math.round(nowMs() - t0)
})
$q.notify({ $q.notify({
type: 'positive', type: 'positive',
message: sentCount > 0 message: sentCount > 0
@@ -1054,6 +1639,12 @@ async function sendUpdateMailAfterApply (selectedRows) {
: 'Guncelleme maili gonderildi' : 'Guncelleme maili gonderildi'
}) })
} catch (err) { } catch (err) {
console.error('[OrderProductionUpdate] sendUpdateMailAfterApply failed', {
orderHeaderID: orderId,
status: err?.response?.status,
data: err?.response?.data,
message: err?.message
})
$q.notify({ $q.notify({
type: 'warning', type: 'warning',
message: 'Guncelleme kaydedildi, mail gonderilemedi.' message: 'Guncelleme kaydedildi, mail gonderilemedi.'
@@ -1075,11 +1666,34 @@ function buildGroupKey (item) {
function formatSizes (sizeMap) { function formatSizes (sizeMap) {
const entries = Object.entries(sizeMap || {}) const entries = Object.entries(sizeMap || {})
if (!entries.length) return { list: [], label: '-' } if (!entries.length) return { list: [], label: '-' }
entries.sort((a, b) => String(a[0]).localeCompare(String(b[0]))) entries.sort((a, b) => {
const left = String(a[0] || '').trim()
const right = String(b[0] || '').trim()
if (/^\d+$/.test(left) && /^\d+$/.test(right)) {
return Number(left) - Number(right)
}
return left.localeCompare(right)
})
const label = entries.map(([k, v]) => (v > 1 ? `${k}(${v})` : k)).join(', ') const label = entries.map(([k, v]) => (v > 1 ? `${k}(${v})` : k)).join(', ')
return { list: entries.map(([k]) => k), label } return { list: entries.map(([k]) => k), label }
} }
function formatCodeDescriptionLabel (code, description) {
const codeText = String(code || '').trim().toUpperCase()
const descText = String(description || '').trim()
if (!codeText) return descText
if (!descText) return codeText
return `${codeText} - ${descText}`
}
function formatQtyLabel (value) {
const qty = Number(value || 0)
if (!Number.isFinite(qty)) return '0'
return Number.isInteger(qty)
? String(qty)
: qty.toLocaleString('tr-TR', { minimumFractionDigits: 0, maximumFractionDigits: 2 })
}
function groupItems (items, prevRows = []) { function groupItems (items, prevRows = []) {
const prevMap = new Map() const prevMap = new Map()
for (const r of prevRows || []) { for (const r of prevRows || []) {
@@ -1091,7 +1705,8 @@ function groupItems (items, prevRows = []) {
NewDim2: String(r.NewDim2 || '').trim().toUpperCase(), NewDim2: String(r.NewDim2 || '').trim().toUpperCase(),
NewItemMode: String(r.NewItemMode || '').trim(), NewItemMode: String(r.NewItemMode || '').trim(),
NewItemSource: String(r.NewItemSource || '').trim(), NewItemSource: String(r.NewItemSource || '').trim(),
NewItemEntryMode: String(r.NewItemEntryMode || '').trim() NewItemEntryMode: String(r.NewItemEntryMode || '').trim(),
NewDueDate: String(r.NewDueDate || '').trim()
}) })
} }
const map = new Map() const map = new Map()
@@ -1107,12 +1722,19 @@ function groupItems (items, prevRows = []) {
OrderHeaderID: it.OrderHeaderID, OrderHeaderID: it.OrderHeaderID,
OldItemCode: it.OldItemCode, OldItemCode: it.OldItemCode,
OldColor: it.OldColor, OldColor: it.OldColor,
OldColorDescription: it.OldColorDescription,
OldColorLabel: formatCodeDescriptionLabel(it.OldColor, it.OldColorDescription),
OldDim2: it.OldDim2, OldDim2: it.OldDim2,
OldDim3: it.OldDim3, OldDim3: it.OldDim3,
OldDesc: it.OldDesc, OldDesc: it.OldDesc,
OldDueDate: it.OldDueDate || '',
NewDueDate: (prev.NewDueDate || it.OldDueDate || ''),
OrderLineIDs: [], OrderLineIDs: [],
OrderLines: [],
OldSizes: [], OldSizes: [],
OldSizesLabel: '', OldSizesLabel: '',
OldTotalQty: 0,
OldTotalQtyLabel: '0',
NewItemCode: prev.NewItemCode || '', NewItemCode: prev.NewItemCode || '',
NewColor: prev.NewColor || '', NewColor: prev.NewColor || '',
NewDim2: prev.NewDim2 || '', NewDim2: prev.NewDim2 || '',
@@ -1120,18 +1742,34 @@ function groupItems (items, prevRows = []) {
NewItemMode: prev.NewItemMode || 'empty', NewItemMode: prev.NewItemMode || 'empty',
NewItemSource: prev.NewItemSource || '', NewItemSource: prev.NewItemSource || '',
NewItemEntryMode: prev.NewItemEntryMode || '', NewItemEntryMode: prev.NewItemEntryMode || '',
IsVariantMissing: !!it.IsVariantMissing IsVariantMissing: !!it.IsVariantMissing,
yasPayloadMap: {}
}) })
} }
const g = map.get(key) const g = map.get(key)
if (it?.OrderLineID) g.OrderLineIDs.push(it.OrderLineID) if (it?.OrderLineID) g.OrderLineIDs.push(it.OrderLineID)
const size = String(it?.OldDim1 || '').trim() const rawSize = String(it?.OldDim1 || '').trim()
const size = store.normalizeDim1ForUi(rawSize)
const rawSizeUpper = rawSize.toUpperCase()
if (/^(\d+)\s*(Y|YAS|YAŞ)$/.test(rawSizeUpper) && size) {
g.yasPayloadMap[size] = store.pickPreferredYasPayloadLabel(
g.yasPayloadMap[size],
rawSizeUpper
)
}
if (it?.OrderLineID) {
g.OrderLines.push({
OrderLineID: it.OrderLineID,
ItemDim1Code: size
})
}
if (size !== '') { if (size !== '') {
g.__sizeMap = g.__sizeMap || {} g.__sizeMap = g.__sizeMap || {}
g.__sizeMap[size] = (g.__sizeMap[size] || 0) + 1 g.__sizeMap[size] = (g.__sizeMap[size] || 0) + 1
} }
g.__oldQtyTotal = Number(g.__oldQtyTotal || 0) + Number(it?.OldQty || 0)
if (it?.IsVariantMissing) g.IsVariantMissing = true if (it?.IsVariantMissing) g.IsVariantMissing = true
} }
@@ -1140,6 +1778,8 @@ function groupItems (items, prevRows = []) {
const sizes = formatSizes(g.__sizeMap || {}) const sizes = formatSizes(g.__sizeMap || {})
g.OldSizes = sizes.list g.OldSizes = sizes.list
g.OldSizesLabel = sizes.label g.OldSizesLabel = sizes.label
g.OldTotalQty = Number(g.__oldQtyTotal || 0)
g.OldTotalQtyLabel = formatQtyLabel(g.OldTotalQty)
const info = store.classifyItemCode(g.NewItemCode) const info = store.classifyItemCode(g.NewItemCode)
g.NewItemCode = info.normalized g.NewItemCode = info.normalized
g.NewItemMode = info.mode g.NewItemMode = info.mode
@@ -1148,6 +1788,7 @@ function groupItems (items, prevRows = []) {
g.NewItemEntryMode = g.NewItemSource === 'selected' ? 'selected' : 'typed' g.NewItemEntryMode = g.NewItemSource === 'selected' ? 'selected' : 'typed'
} }
delete g.__sizeMap delete g.__sizeMap
delete g.__oldQtyTotal
out.push(g) out.push(g)
} }
@@ -1161,10 +1802,24 @@ async function refreshAll () {
} }
async function onBulkSubmit () { async function onBulkSubmit () {
if (isBulkSubmitting.value || store.saving) {
console.info('[OrderProductionUpdate] onBulkSubmit ignored (already running)', {
orderHeaderID: orderHeaderID.value,
isBulkSubmitting: isBulkSubmitting.value,
storeSaving: store.saving
})
return
}
isBulkSubmitting.value = true
const flowStart = nowMs() const flowStart = nowMs()
try {
suppressAutoSetupDialogs.value = true
const selectedRows = rows.value.filter(r => !!selectedMap.value[r.RowKey]) const selectedRows = rows.value.filter(r => !!selectedMap.value[r.RowKey])
if (!selectedRows.length) { const headerAverageDueDateValue = normalizeDateInput(headerAverageDueDate.value)
$q.notify({ type: 'warning', message: 'Lutfen en az bir satir seciniz.' }) const headerDateChanged = hasHeaderAverageDueDateChange.value
if (!selectedRows.length && !headerDateChanged) {
$q.notify({ type: 'warning', message: 'Lutfen en az bir satir seciniz veya ustteki termin tarihini degistiriniz.' })
return return
} }
@@ -1174,24 +1829,38 @@ async function onBulkSubmit () {
$q.notify({ type: 'negative', message: errMsg }) $q.notify({ type: 'negative', message: errMsg })
return return
} }
if (!lines.length) { if (!lines.length && !headerDateChanged) {
$q.notify({ type: 'negative', message: 'Secili satirlarda guncellenecek kayit bulunamadi.' }) $q.notify({ type: 'warning', message: 'Secili satirlarda degisiklik yok.' })
return return
} }
const { errMsg: cdErrMsg, cdItems } = collectCdItemsFromSelectedRows(selectedRows)
if (lines.length > 0) {
const changedRows = selectedRows.filter(hasRowChange)
const confirmed = await confirmOptionalColorWarnings(changedRows)
if (!confirmed) return
}
let cdItems = []
let productAttributes = []
if (lines.length > 0) {
const { errMsg: cdErrMsg, cdItems: nextCdItems } = await collectCdItemsFromSelectedRows(selectedRows)
if (cdErrMsg) { if (cdErrMsg) {
$q.notify({ type: 'negative', message: cdErrMsg }) $q.notify({ type: 'negative', message: cdErrMsg })
const firstCode = String(cdErrMsg.split(' ')[0] || '').trim()
if (firstCode) openCdItemDialog(firstCode)
return return
} }
const { errMsg: attrErrMsg, productAttributes } = collectProductAttributesFromSelectedRows(selectedRows) cdItems = nextCdItems
const { errMsg: attrErrMsg, productAttributes: nextProductAttributes } = await collectProductAttributesFromSelectedRows(selectedRows)
if (attrErrMsg) { if (attrErrMsg) {
$q.notify({ type: 'negative', message: attrErrMsg }) $q.notify({ type: 'negative', message: attrErrMsg })
const firstCode = String(attrErrMsg.split(' ')[0] || '').trim() const firstCode = String(attrErrMsg.split(' ')[0] || '').trim().toUpperCase()
if (firstCode) openAttributeDialog(firstCode) if (isValidBaggiModelCode(firstCode)) {
await openAttributeDialog(firstCode)
}
return return
} }
productAttributes = nextProductAttributes
}
console.info('[OrderProductionUpdate] onBulkSubmit prepared', { console.info('[OrderProductionUpdate] onBulkSubmit prepared', {
orderHeaderID: orderHeaderID.value, orderHeaderID: orderHeaderID.value,
@@ -1199,18 +1868,55 @@ async function onBulkSubmit () {
lineCount: lines.length, lineCount: lines.length,
cdItemCount: cdItems.length, cdItemCount: cdItems.length,
attributeCount: productAttributes.length, attributeCount: productAttributes.length,
headerAverageDueDate: headerAverageDueDateValue,
headerDateChanged,
prepDurationMs: Math.round(nowMs() - prepStart) prepDurationMs: Math.round(nowMs() - prepStart)
}) })
try { const applyChanges = async (insertMissing) => {
const applyStart = nowMs()
const applyResult = await store.applyUpdates(
orderHeaderID.value,
lines,
insertMissing,
cdItems,
productAttributes,
headerDateChanged ? headerAverageDueDateValue : null
)
console.info('[OrderProductionUpdate] apply finished', {
orderHeaderID: orderHeaderID.value,
insertMissing: !!insertMissing,
lineCount: lines.length,
barcodeInserted: Number(applyResult?.barcodeInserted || 0),
headerAverageDueDate: headerAverageDueDateValue,
headerDateChanged,
durationMs: Math.round(nowMs() - applyStart)
})
await store.fetchHeader(orderHeaderID.value)
if (lines.length > 0) {
await store.fetchItems(orderHeaderID.value)
}
selectedMap.value = {}
if (lines.length > 0) {
await sendUpdateMailAfterApply(selectedRows)
} else {
$q.notify({ type: 'positive', message: 'Tahmini termin tarihi guncellendi.' })
}
}
if (lines.length > 0) {
const validateStart = nowMs() const validateStart = nowMs()
const validate = await store.validateUpdates(orderHeaderID.value, lines) const validate = await store.validateUpdates(orderHeaderID.value, lines, cdItems)
console.info('[OrderProductionUpdate] validate finished', { console.info('[OrderProductionUpdate] validate finished', {
orderHeaderID: orderHeaderID.value, orderHeaderID: orderHeaderID.value,
lineCount: lines.length, lineCount: lines.length,
missingCount: Number(validate?.missingCount || 0), missingCount: Number(validate?.missingCount || 0),
barcodeValidationCount: Number(validate?.barcodeValidationCount || 0),
durationMs: Math.round(nowMs() - validateStart) durationMs: Math.round(nowMs() - validateStart)
}) })
if (showBarcodeValidationDialog(validate?.barcodeValidations)) {
return
}
const missingCount = validate?.missingCount || 0 const missingCount = validate?.missingCount || 0
if (missingCount > 0) { if (missingCount > 0) {
const missingList = (validate?.missing || []).map(v => ( const missingList = (validate?.missing || []).map(v => (
@@ -1223,45 +1929,36 @@ async function onBulkSubmit () {
ok: { label: 'Ekle ve Guncelle', color: 'primary' }, ok: { label: 'Ekle ve Guncelle', color: 'primary' },
cancel: { label: 'Vazgec', flat: true } cancel: { label: 'Vazgec', flat: true }
}).onOk(async () => { }).onOk(async () => {
const applyStart = nowMs() await applyChanges(true)
await store.applyUpdates(orderHeaderID.value, lines, true, cdItems, productAttributes)
console.info('[OrderProductionUpdate] apply finished', {
orderHeaderID: orderHeaderID.value,
insertMissing: true,
durationMs: Math.round(nowMs() - applyStart)
})
await store.fetchItems(orderHeaderID.value)
selectedMap.value = {}
await sendUpdateMailAfterApply(selectedRows)
}) })
return return
} }
}
const applyStart = nowMs() await applyChanges(false)
await store.applyUpdates(orderHeaderID.value, lines, false, cdItems, productAttributes)
console.info('[OrderProductionUpdate] apply finished', {
orderHeaderID: orderHeaderID.value,
insertMissing: false,
durationMs: Math.round(nowMs() - applyStart)
})
await store.fetchItems(orderHeaderID.value)
selectedMap.value = {}
await sendUpdateMailAfterApply(selectedRows)
} catch (err) { } catch (err) {
console.error('[OrderProductionUpdate] onBulkSubmit failed', { console.error('[OrderProductionUpdate] onBulkSubmit failed', {
orderHeaderID: orderHeaderID.value, orderHeaderID: orderHeaderID.value,
selectedRowCount: selectedRows.length, selectedRowCount: selectedRows.length,
lineCount: lines.length, lineCount: lines.length,
headerAverageDueDate: headerAverageDueDateValue,
headerDateChanged,
apiError: err?.response?.data, apiError: err?.response?.data,
message: err?.message message: err?.message
}) })
$q.notify({ type: 'negative', message: store.error || 'Toplu kayit islemi basarisiz.' }) if (showBarcodeValidationDialog(err?.response?.data?.barcodeValidations)) {
return
} }
$q.notify({ type: 'negative', message: store.error || 'Toplu kayit islemi basarisiz.' })
} finally {
isBulkSubmitting.value = false
suppressAutoSetupDialogs.value = false
console.info('[OrderProductionUpdate] onBulkSubmit total', { console.info('[OrderProductionUpdate] onBulkSubmit total', {
orderHeaderID: orderHeaderID.value, orderHeaderID: orderHeaderID.value,
durationMs: Math.round(nowMs() - flowStart) durationMs: Math.round(nowMs() - flowStart)
}) })
} }
}
</script> </script>
<style scoped> <style scoped>

View File

@@ -0,0 +1,1261 @@
<template>
<q-page class="q-pa-xs pricing-page">
<div class="top-bar row items-center justify-between q-mb-xs">
<div class="text-subtitle1 text-weight-bold">Urun Fiyatlandirma</div>
<div class="row items-center q-gutter-xs">
<q-btn-dropdown color="secondary" outline icon="view_module" label="Doviz Gorunumu" :auto-close="false">
<q-list dense class="currency-menu-list">
<q-item clickable @click="selectAllCurrencies">
<q-item-section>Tumunu Sec</q-item-section>
</q-item>
<q-item clickable @click="clearAllCurrencies">
<q-item-section>Tumunu Temizle</q-item-section>
</q-item>
<q-separator />
<q-item v-for="option in currencyOptions" :key="option.value" clickable @click="toggleCurrencyRow(option.value)">
<q-item-section avatar>
<q-checkbox
:model-value="isCurrencySelected(option.value)"
dense
@update:model-value="(val) => toggleCurrency(option.value, val)"
@click.stop
/>
</q-item-section>
<q-item-section>{{ option.label }}</q-item-section>
</q-item>
</q-list>
</q-btn-dropdown>
<q-btn
flat
:color="showSelectedOnly ? 'primary' : 'grey-7'"
:icon="showSelectedOnly ? 'checklist_rtl' : 'list_alt'"
:label="showSelectedOnly ? `Secililer (${selectedRowCount})` : 'Secili Olanlari Getir'"
:disable="!showSelectedOnly && selectedRowCount === 0"
@click="toggleShowSelectedOnly"
/>
<q-btn flat color="grey-7" icon="restart_alt" label="Filtreleri Sifirla" @click="resetAll" />
<q-btn color="primary" icon="refresh" label="Veriyi Yenile" :loading="store.loading" @click="reloadData" />
</div>
</div>
<div class="table-wrap" :style="{ '--sticky-scroll-comp': `${stickyScrollComp}px` }">
<q-table
ref="mainTableRef"
class="pane-table pricing-table"
flat
dense
row-key="id"
:rows="filteredRows"
:columns="visibleColumns"
:loading="store.loading"
virtual-scroll
:virtual-scroll-item-size="rowHeight"
:virtual-scroll-sticky-size-start="headerHeight"
:virtual-scroll-slice-size="36"
:rows-per-page-options="[0]"
v-model:pagination="tablePagination"
hide-bottom
:table-style="tableStyle"
@virtual-scroll="onTableVirtualScroll"
>
<template #header="props">
<q-tr :props="props" class="header-row-fixed">
<q-th
v-for="col in props.cols"
:key="col.name"
:props="props"
:class="[col.headerClasses, { 'sticky-col': isStickyCol(col.name), 'sticky-boundary': isStickyBoundary(col.name) }]"
:style="getHeaderCellStyle(col)"
>
<q-checkbox
v-if="col.name === 'select'"
size="sm"
color="primary"
:model-value="allSelectedVisible"
:indeterminate="someSelectedVisible && !allSelectedVisible"
@update:model-value="toggleSelectAllVisible"
/>
<div v-else class="header-with-filter">
<span>{{ col.label }}</span>
<q-btn
v-if="isHeaderFilterField(col.field)"
dense
flat
round
size="8px"
icon="filter_alt"
:color="hasFilter(col.field) ? 'primary' : 'grey-7'"
class="header-filter-btn"
>
<q-badge v-if="hasFilter(col.field)" color="primary" floating rounded>
{{ getFilterBadgeValue(col.field) }}
</q-badge>
<q-menu anchor="bottom right" self="top right" :offset="[0, 4]">
<div v-if="isMultiSelectFilterField(col.field)" class="excel-filter-menu">
<q-input
v-model="columnFilterSearch[col.field]"
dense
outlined
clearable
use-input
class="excel-filter-select"
placeholder="Ara"
/>
<div class="excel-filter-actions row items-center justify-between q-pt-xs">
<q-btn flat dense size="sm" label="Tumunu Sec" @click="selectAllColumnFilterOptions(col.field)" />
<q-btn flat dense size="sm" label="Temizle" @click="clearColumnFilter(col.field)" />
</div>
<q-virtual-scroll
v-if="getFilterOptionsForField(col.field).length > 0"
class="excel-filter-options"
:items="getFilterOptionsForField(col.field)"
:virtual-scroll-item-size="32"
separator
>
<template #default="{ item: option }">
<q-item
:key="`${col.field}-${option.value}`"
dense
clickable
class="excel-filter-option"
@click="toggleColumnFilterValue(col.field, option.value)"
>
<q-item-section avatar>
<q-checkbox
dense
size="sm"
:model-value="isColumnFilterValueSelected(col.field, option.value)"
@update:model-value="() => toggleColumnFilterValue(col.field, option.value)"
@click.stop
/>
</q-item-section>
<q-item-section>
<q-item-label>{{ option.label }}</q-item-label>
</q-item-section>
</q-item>
</template>
</q-virtual-scroll>
<div v-else class="excel-filter-empty">
Sonuc yok
</div>
</div>
<div v-else-if="isNumberRangeFilterField(col.field)" class="excel-filter-menu">
<div class="range-filter-grid">
<q-input
v-model="numberRangeFilters[col.field].min"
dense
outlined
clearable
label="Min"
inputmode="decimal"
class="range-filter-field"
/>
<q-input
v-model="numberRangeFilters[col.field].max"
dense
outlined
clearable
label="Max"
inputmode="decimal"
class="range-filter-field"
/>
</div>
<div class="row justify-end q-pt-xs">
<q-btn flat dense size="sm" label="Temizle" @click="clearRangeFilter(col.field)" />
</div>
</div>
<div v-else-if="isDateRangeFilterField(col.field)" class="excel-filter-menu">
<div class="range-filter-grid">
<q-input
v-model="dateRangeFilters[col.field].from"
dense
outlined
clearable
type="date"
label="Baslangic"
class="range-filter-field"
/>
<q-input
v-model="dateRangeFilters[col.field].to"
dense
outlined
clearable
type="date"
label="Bitis"
class="range-filter-field"
/>
</div>
<div class="row justify-end q-pt-xs">
<q-btn flat dense size="sm" label="Temizle" @click="clearRangeFilter(col.field)" />
</div>
</div>
</q-menu>
</q-btn>
<q-btn
v-else
dense
flat
round
size="8px"
icon="filter_alt"
class="header-filter-btn header-filter-ghost"
tabindex="-1"
/>
</div>
</q-th>
</q-tr>
</template>
<template #body-cell-select="props">
<q-td
:props="props"
class="text-center selection-col"
:class="{ 'sticky-col': isStickyCol(props.col.name), 'sticky-boundary': isStickyBoundary(props.col.name) }"
:style="getBodyCellStyle(props.col)"
>
<q-checkbox
size="sm"
color="primary"
:model-value="!!selectedMap[props.row.id]"
@update:model-value="(val) => toggleRowSelection(props.row.id, val)"
/>
</q-td>
</template>
<template #body-cell-productCode="props">
<q-td
:props="props"
:class="{ 'sticky-col': isStickyCol(props.col.name), 'sticky-boundary': isStickyBoundary(props.col.name) }"
:style="getBodyCellStyle(props.col)"
>
<span class="product-code-text" :title="String(props.value ?? '')">{{ props.value }}</span>
</q-td>
</template>
<template #body-cell-stockQty="props">
<q-td
:props="props"
:class="{ 'sticky-col': isStickyCol(props.col.name), 'sticky-boundary': isStickyBoundary(props.col.name) }"
:style="getBodyCellStyle(props.col)"
>
<span class="stock-qty-text">{{ formatStock(props.value) }}</span>
</q-td>
</template>
<template #body-cell-stockEntryDate="props">
<q-td
:props="props"
:class="{ 'sticky-col': isStickyCol(props.col.name), 'sticky-boundary': isStickyBoundary(props.col.name) }"
:style="getBodyCellStyle(props.col)"
>
<span class="date-cell-text">{{ formatDateDisplay(props.value) }}</span>
</q-td>
</template>
<template #body-cell-lastPricingDate="props">
<q-td
:props="props"
:class="{ 'sticky-col': isStickyCol(props.col.name), 'sticky-boundary': isStickyBoundary(props.col.name) }"
:style="getBodyCellStyle(props.col)"
>
<span :class="['date-cell-text', { 'date-warning': needsRepricing(props.row) }]">
{{ formatDateDisplay(props.value) }}
</span>
</q-td>
</template>
<template #body-cell-brandGroupSelection="props">
<q-td
:props="props"
:class="{ 'sticky-col': isStickyCol(props.col.name), 'sticky-boundary': isStickyBoundary(props.col.name) }"
:style="getBodyCellStyle(props.col)"
>
<select
class="native-cell-select"
:value="props.row.brandGroupSelection"
@change="(e) => onBrandGroupSelectionChange(props.row, e.target.value)"
>
<option value="">Seciniz</option>
<option v-for="opt in brandGroupOptions" :key="opt.value" :value="opt.value">
{{ opt.label }}
</option>
</select>
</q-td>
</template>
<template #body-cell="props">
<q-td
:props="props"
:class="{ 'sticky-col': isStickyCol(props.col.name), 'sticky-boundary': isStickyBoundary(props.col.name) }"
:style="getBodyCellStyle(props.col)"
>
<input
v-if="editableColumnSet.has(props.col.name)"
class="native-cell-input text-right"
:value="formatPrice(props.row[props.col.field])"
type="text"
inputmode="decimal"
@change="(e) => onEditableCellChange(props.row, props.col.field, e.target.value)"
/>
<span v-else class="cell-text" :title="String(props.value ?? '')">{{ props.value }}</span>
</q-td>
</template>
</q-table>
</div>
<q-banner v-if="store.error" class="bg-red text-white q-mt-xs">
Hata: {{ store.error }}
</q-banner>
</q-page>
</template>
<script setup>
import { computed, onMounted, ref, watch } from 'vue'
import { useProductPricingStore } from 'src/stores/ProductPricingStore'
const store = useProductPricingStore()
const FETCH_LIMIT = 500
const nextCursor = ref('')
const loadingMore = ref(false)
const usdToTry = 38.25
const eurToTry = 41.6
const multipliers = [1, 1.03, 1.06, 1.09, 1.12, 1.15]
const rowHeight = 31
const headerHeight = 72
const brandGroupOptions = [
{ label: 'MARKA GRUBU A', value: 'MARKA GRUBU A' },
{ label: 'MARKA GRUBU B', value: 'MARKA GRUBU B' },
{ label: 'MARKA GRUBU C', value: 'MARKA GRUBU C' }
]
const currencyOptions = [
{ label: 'USD', value: 'USD' },
{ label: 'EUR', value: 'EUR' },
{ label: 'TRY', value: 'TRY' }
]
const multiFilterColumns = [
{ field: 'productCode', label: 'Urun Kodu' },
{ field: 'askiliYan', label: 'Askili Yan' },
{ field: 'kategori', label: 'Kategori' },
{ field: 'urunIlkGrubu', label: 'Urun Ilk Grubu' },
{ field: 'urunAnaGrubu', label: 'Urun Ana Grubu' },
{ field: 'urunAltGrubu', label: 'Urun Alt Grubu' },
{ field: 'icerik', label: 'Icerik' },
{ field: 'karisim', label: 'Karisim' }
]
const numberRangeFilterFields = ['stockQty']
const dateRangeFilterFields = ['stockEntryDate', 'lastPricingDate']
const columnFilters = ref({
productCode: [],
askiliYan: [],
kategori: [],
urunIlkGrubu: [],
urunAnaGrubu: [],
urunAltGrubu: [],
icerik: [],
karisim: []
})
const columnFilterSearch = ref({
productCode: '',
askiliYan: '',
kategori: '',
urunIlkGrubu: '',
urunAnaGrubu: '',
urunAltGrubu: '',
icerik: '',
karisim: ''
})
const numberRangeFilters = ref({
stockQty: { min: '', max: '' }
})
const dateRangeFilters = ref({
stockEntryDate: { from: '', to: '' },
lastPricingDate: { from: '', to: '' }
})
const multiSelectFilterFieldSet = new Set(multiFilterColumns.map((x) => x.field))
const numberRangeFilterFieldSet = new Set(numberRangeFilterFields)
const dateRangeFilterFieldSet = new Set(dateRangeFilterFields)
const headerFilterFieldSet = new Set([
...multiFilterColumns.map((x) => x.field),
...numberRangeFilterFields,
...dateRangeFilterFields
])
const mainTableRef = ref(null)
const tablePagination = ref({
page: 1,
rowsPerPage: 0,
sortBy: 'productCode',
descending: false
})
const selectedMap = ref({})
const selectedCurrencies = ref(['USD', 'EUR', 'TRY'])
const showSelectedOnly = ref(false)
const editableColumns = [
'costPrice',
'expenseForBasePrice',
'basePriceUsd',
'basePriceTry',
'usd1',
'usd2',
'usd3',
'usd4',
'usd5',
'usd6',
'eur1',
'eur2',
'eur3',
'eur4',
'eur5',
'eur6',
'try1',
'try2',
'try3',
'try4',
'try5',
'try6'
]
const editableColumnSet = new Set(editableColumns)
function col (name, label, field, width, extra = {}) {
return {
name,
label,
field,
align: extra.align || 'left',
sortable: !!extra.sortable,
style: `width:${width}px;min-width:${width}px;max-width:${width}px;`,
headerStyle: `width:${width}px;min-width:${width}px;max-width:${width}px;`,
classes: extra.classes || '',
headerClasses: extra.headerClasses || extra.classes || ''
}
}
const allColumns = [
col('select', '', 'select', 40, { align: 'center', classes: 'text-center selection-col' }),
col('productCode', 'URUN KODU', 'productCode', 108, { sortable: true, classes: 'ps-col product-code-col' }),
col('stockQty', 'STOK ADET', 'stockQty', 72, { align: 'right', sortable: true, classes: 'ps-col stock-col' }),
col('stockEntryDate', 'STOK GIRIS TARIHI', 'stockEntryDate', 92, { align: 'center', sortable: true, classes: 'ps-col date-col' }),
col('lastPricingDate', 'SON FIYATLANDIRMA TARIHI', 'lastPricingDate', 108, { align: 'center', sortable: true, classes: 'ps-col date-col' }),
col('askiliYan', 'ASKILI YAN', 'askiliYan', 54, { sortable: true, classes: 'ps-col' }),
col('kategori', 'KATEGORI', 'kategori', 54, { sortable: true, classes: 'ps-col' }),
col('urunIlkGrubu', 'URUN ILK GRUBU', 'urunIlkGrubu', 66, { sortable: true, classes: 'ps-col' }),
col('urunAnaGrubu', 'URUN ANA GRUBU', 'urunAnaGrubu', 66, { sortable: true, classes: 'ps-col' }),
col('urunAltGrubu', 'URUN ALT GRUBU', 'urunAltGrubu', 66, { sortable: true, classes: 'ps-col' }),
col('icerik', 'ICERIK', 'icerik', 62, { sortable: true, classes: 'ps-col' }),
col('karisim', 'KARISIM', 'karisim', 62, { sortable: true, classes: 'ps-col' }),
col('marka', 'MARKA', 'marka', 54, { sortable: true, classes: 'ps-col' }),
col('brandGroupSelection', 'MARKA GRUBU SECIMI', 'brandGroupSelection', 76),
col('costPrice', 'MALIYET FIYATI', 'costPrice', 74, { align: 'right', sortable: true, classes: 'usd-col' }),
col('expenseForBasePrice', 'TABAN FIYAT MASRAF', 'expenseForBasePrice', 86, { align: 'right', classes: 'usd-col' }),
col('basePriceUsd', 'TABAN USD', 'basePriceUsd', 74, { align: 'right', classes: 'usd-col' }),
col('basePriceTry', 'TABAN TRY', 'basePriceTry', 74, { align: 'right', classes: 'try-col' }),
col('usd1', 'USD 1', 'usd1', 62, { align: 'right', classes: 'usd-col' }),
col('usd2', 'USD 2', 'usd2', 62, { align: 'right', classes: 'usd-col' }),
col('usd3', 'USD 3', 'usd3', 62, { align: 'right', classes: 'usd-col' }),
col('usd4', 'USD 4', 'usd4', 62, { align: 'right', classes: 'usd-col' }),
col('usd5', 'USD 5', 'usd5', 62, { align: 'right', classes: 'usd-col' }),
col('usd6', 'USD 6', 'usd6', 62, { align: 'right', classes: 'usd-col' }),
col('eur1', 'EUR 1', 'eur1', 62, { align: 'right', classes: 'eur-col' }),
col('eur2', 'EUR 2', 'eur2', 62, { align: 'right', classes: 'eur-col' }),
col('eur3', 'EUR 3', 'eur3', 62, { align: 'right', classes: 'eur-col' }),
col('eur4', 'EUR 4', 'eur4', 62, { align: 'right', classes: 'eur-col' }),
col('eur5', 'EUR 5', 'eur5', 62, { align: 'right', classes: 'eur-col' }),
col('eur6', 'EUR 6', 'eur6', 62, { align: 'right', classes: 'eur-col' }),
col('try1', 'TRY 1', 'try1', 62, { align: 'right', classes: 'try-col' }),
col('try2', 'TRY 2', 'try2', 62, { align: 'right', classes: 'try-col' }),
col('try3', 'TRY 3', 'try3', 62, { align: 'right', classes: 'try-col' }),
col('try4', 'TRY 4', 'try4', 62, { align: 'right', classes: 'try-col' }),
col('try5', 'TRY 5', 'try5', 62, { align: 'right', classes: 'try-col' }),
col('try6', 'TRY 6', 'try6', 62, { align: 'right', classes: 'try-col' })
]
const stickyColumnNames = [
'select',
'productCode',
'stockQty',
'stockEntryDate',
'lastPricingDate',
'askiliYan',
'kategori',
'urunIlkGrubu',
'urunAnaGrubu',
'urunAltGrubu',
'icerik',
'karisim',
'marka',
'brandGroupSelection',
'costPrice',
'expenseForBasePrice',
'basePriceUsd',
'basePriceTry'
]
const stickyBoundaryColumnName = 'basePriceTry'
const stickyColumnNameSet = new Set(stickyColumnNames)
const visibleColumns = computed(() => {
const selected = new Set(selectedCurrencies.value)
return allColumns.filter((c) => {
if (c.name.startsWith('usd')) return selected.has('USD')
if (c.name.startsWith('eur')) return selected.has('EUR')
if (c.name.startsWith('try')) return selected.has('TRY')
return true
})
})
const stickyLeftMap = computed(() => {
const map = {}
let left = 0
for (const colName of stickyColumnNames) {
const c = allColumns.find((x) => x.name === colName)
if (!c) continue
map[colName] = left
left += extractWidth(c.style)
}
return map
})
const stickyScrollComp = computed(() => {
const boundaryCol = allColumns.find((x) => x.name === stickyBoundaryColumnName)
return (stickyLeftMap.value[stickyBoundaryColumnName] || 0) + extractWidth(boundaryCol?.style)
})
const tableMinWidth = computed(() => visibleColumns.value.reduce((sum, c) => sum + extractWidth(c.style), 0))
const tableStyle = computed(() => ({
width: `${tableMinWidth.value}px`,
minWidth: `${tableMinWidth.value}px`,
tableLayout: 'fixed'
}))
const rows = computed(() => store.rows || [])
const multiFilterOptionMap = computed(() => {
const map = {}
multiFilterColumns.forEach(({ field }) => {
const uniq = new Set()
rows.value.forEach((row) => {
const val = String(row?.[field] ?? '').trim()
if (val) uniq.add(val)
})
map[field] = Array.from(uniq)
.sort((a, b) => a.localeCompare(b, 'tr'))
.map((v) => ({ label: v, value: v }))
})
return map
})
const filteredFilterOptionMap = computed(() => {
const map = {}
multiFilterColumns.forEach(({ field }) => {
const search = String(columnFilterSearch.value[field] || '').trim().toLocaleLowerCase('tr')
const options = multiFilterOptionMap.value[field] || []
map[field] = search
? options.filter((option) => option.label.toLocaleLowerCase('tr').includes(search))
: options
})
return map
})
const filteredRows = computed(() => {
return rows.value.filter((row) => {
if (showSelectedOnly.value && !selectedMap.value[row.id]) return false
for (const mf of multiFilterColumns) {
const selected = columnFilters.value[mf.field] || []
if (selected.length > 0 && !selected.includes(String(row?.[mf.field] ?? '').trim())) return false
}
const stockQtyMin = parseNullableNumber(numberRangeFilters.value.stockQty?.min)
const stockQtyMax = parseNullableNumber(numberRangeFilters.value.stockQty?.max)
const stockQty = Number(row?.stockQty ?? 0)
if (stockQtyMin !== null && stockQty < stockQtyMin) return false
if (stockQtyMax !== null && stockQty > stockQtyMax) return false
if (!matchesDateRange(String(row?.stockEntryDate || '').trim(), dateRangeFilters.value.stockEntryDate)) return false
if (!matchesDateRange(String(row?.lastPricingDate || '').trim(), dateRangeFilters.value.lastPricingDate)) return false
return true
})
})
const visibleRowIds = computed(() => filteredRows.value.map((row) => row.id))
const selectedRowCount = computed(() => Object.values(selectedMap.value).filter(Boolean).length)
const selectedVisibleCount = computed(() => visibleRowIds.value.filter((id) => !!selectedMap.value[id]).length)
const allSelectedVisible = computed(() => visibleRowIds.value.length > 0 && selectedVisibleCount.value === visibleRowIds.value.length)
const someSelectedVisible = computed(() => selectedVisibleCount.value > 0)
const hasMoreRows = computed(() => Boolean(store.hasMore))
function isHeaderFilterField (field) {
return headerFilterFieldSet.has(field)
}
function isMultiSelectFilterField (field) {
return multiSelectFilterFieldSet.has(field)
}
function isNumberRangeFilterField (field) {
return numberRangeFilterFieldSet.has(field)
}
function isDateRangeFilterField (field) {
return dateRangeFilterFieldSet.has(field)
}
function hasFilter (field) {
if (isMultiSelectFilterField(field)) return (columnFilters.value[field] || []).length > 0
if (isNumberRangeFilterField(field)) {
const filter = numberRangeFilters.value[field]
return !!String(filter?.min || '').trim() || !!String(filter?.max || '').trim()
}
if (isDateRangeFilterField(field)) {
const filter = dateRangeFilters.value[field]
return !!String(filter?.from || '').trim() || !!String(filter?.to || '').trim()
}
return false
}
function getFilterBadgeValue (field) {
if (isMultiSelectFilterField(field)) return (columnFilters.value[field] || []).length
if (isNumberRangeFilterField(field)) {
const filter = numberRangeFilters.value[field]
return [filter?.min, filter?.max].filter((x) => String(x || '').trim()).length
}
if (isDateRangeFilterField(field)) {
const filter = dateRangeFilters.value[field]
return [filter?.from, filter?.to].filter((x) => String(x || '').trim()).length
}
return 0
}
function clearColumnFilter (field) {
if (!isMultiSelectFilterField(field)) return
columnFilters.value = {
...columnFilters.value,
[field]: []
}
}
function clearRangeFilter (field) {
if (isNumberRangeFilterField(field)) {
numberRangeFilters.value = {
...numberRangeFilters.value,
[field]: { min: '', max: '' }
}
return
}
if (isDateRangeFilterField(field)) {
dateRangeFilters.value = {
...dateRangeFilters.value,
[field]: { from: '', to: '' }
}
}
}
function getFilterOptionsForField (field) {
return filteredFilterOptionMap.value[field] || []
}
function isColumnFilterValueSelected (field, value) {
return (columnFilters.value[field] || []).includes(value)
}
function toggleColumnFilterValue (field, value) {
const current = new Set(columnFilters.value[field] || [])
if (current.has(value)) current.delete(value)
else current.add(value)
columnFilters.value = {
...columnFilters.value,
[field]: Array.from(current)
}
}
function selectAllColumnFilterOptions (field) {
const options = getFilterOptionsForField(field)
columnFilters.value = {
...columnFilters.value,
[field]: options.map((option) => option.value)
}
}
function extractWidth (style) {
const m = String(style || '').match(/width:(\d+)px/)
return m ? Number(m[1]) : 0
}
function isStickyCol (colName) {
return stickyColumnNameSet.has(colName)
}
function isStickyBoundary (colName) {
return colName === stickyBoundaryColumnName
}
function getHeaderCellStyle (col) {
if (!isStickyCol(col.name)) return undefined
return { left: `${stickyLeftMap.value[col.name] || 0}px`, zIndex: 22 }
}
function getBodyCellStyle (col) {
if (!isStickyCol(col.name)) return undefined
return { left: `${stickyLeftMap.value[col.name] || 0}px`, zIndex: 12 }
}
function round2 (value) {
return Number(Number(value || 0).toFixed(2))
}
function parseNumber (val) {
const normalized = String(val ?? '')
.replace(/\s/g, '')
.replace(/\./g, '')
.replace(',', '.')
const n = Number(normalized)
return Number.isFinite(n) ? n : 0
}
function parseNullableNumber (val) {
const text = String(val ?? '').trim()
if (!text) return null
const normalized = text
.replace(/\s/g, '')
.replace(/\./g, '')
.replace(',', '.')
const n = Number(normalized)
return Number.isFinite(n) ? n : null
}
function matchesDateRange (value, filter) {
const from = String(filter?.from || '').trim()
const to = String(filter?.to || '').trim()
if (!from && !to) return true
if (!value) return false
if (from && value < from) return false
if (to && value > to) return false
return true
}
function formatPrice (val) {
const n = parseNumber(val)
return n.toLocaleString('tr-TR', { minimumFractionDigits: 2, maximumFractionDigits: 2 })
}
function formatStock (val) {
const n = Number(val || 0)
if (!Number.isFinite(n)) return '0'
const hasFraction = Math.abs(n % 1) > 0.0001
return n.toLocaleString('tr-TR', {
minimumFractionDigits: hasFraction ? 2 : 0,
maximumFractionDigits: hasFraction ? 2 : 0
})
}
function formatDateDisplay (val) {
const text = String(val || '').trim()
if (!text) return '-'
const [year, month, day] = text.split('-')
if (!year || !month || !day) return text
return `${day}.${month}.${year}`
}
function needsRepricing (row) {
const stockEntryDate = String(row?.stockEntryDate || '').trim()
const lastPricingDate = String(row?.lastPricingDate || '').trim()
if (!stockEntryDate) return false
if (!lastPricingDate) return true
return lastPricingDate < stockEntryDate
}
function recalcByBasePrice (row) {
row.basePriceTry = round2((row.basePriceUsd * usdToTry) + row.expenseForBasePrice)
multipliers.forEach((multiplier, index) => {
row[`usd${index + 1}`] = round2(row.basePriceUsd * multiplier)
row[`eur${index + 1}`] = round2((row.basePriceUsd * usdToTry * multiplier) / eurToTry)
row[`try${index + 1}`] = round2(row.basePriceTry * multiplier)
})
}
function onEditableCellChange (row, field, val) {
const parsed = parseNumber(val)
store.updateCell(row, field, parsed)
if (field === 'expenseForBasePrice' || field === 'basePriceUsd') recalcByBasePrice(row)
}
function onBrandGroupSelectionChange (row, val) {
store.updateBrandGroupSelection(row, val)
}
function toggleRowSelection (rowId, val) {
selectedMap.value = { ...selectedMap.value, [rowId]: !!val }
}
function toggleSelectAllVisible (val) {
const next = { ...selectedMap.value }
visibleRowIds.value.forEach((id) => { next[id] = !!val })
selectedMap.value = next
}
function resetAll () {
columnFilters.value = {
productCode: [],
askiliYan: [],
kategori: [],
urunIlkGrubu: [],
urunAnaGrubu: [],
urunAltGrubu: [],
icerik: [],
karisim: []
}
columnFilterSearch.value = {
productCode: '',
askiliYan: '',
kategori: '',
urunIlkGrubu: '',
urunAnaGrubu: '',
urunAltGrubu: '',
icerik: '',
karisim: ''
}
numberRangeFilters.value = {
stockQty: { min: '', max: '' }
}
dateRangeFilters.value = {
stockEntryDate: { from: '', to: '' },
lastPricingDate: { from: '', to: '' }
}
showSelectedOnly.value = false
selectedMap.value = {}
}
function toggleShowSelectedOnly () {
if (!showSelectedOnly.value && selectedRowCount.value === 0) return
showSelectedOnly.value = !showSelectedOnly.value
}
function isCurrencySelected (code) {
return selectedCurrencies.value.includes(code)
}
function toggleCurrency (code, checked) {
const set = new Set(selectedCurrencies.value)
if (checked) set.add(code)
else set.delete(code)
selectedCurrencies.value = currencyOptions.map((x) => x.value).filter((x) => set.has(x))
}
function toggleCurrencyRow (code) {
toggleCurrency(code, !isCurrencySelected(code))
}
function selectAllCurrencies () {
selectedCurrencies.value = currencyOptions.map((x) => x.value)
}
function clearAllCurrencies () {
selectedCurrencies.value = []
}
async function fetchChunk ({ reset = false } = {}) {
const afterProductCode = reset ? '' : nextCursor.value
const result = await store.fetchRows({
limit: FETCH_LIMIT,
afterProductCode,
append: !reset
})
const fetched = Number(result?.fetched) || 0
nextCursor.value = String(result?.nextCursor || '')
return fetched
}
async function loadMoreRows () {
if (loadingMore.value || store.loading || !hasMoreRows.value) return
loadingMore.value = true
try {
await fetchChunk({ reset: false })
} finally {
loadingMore.value = false
}
}
function onTableVirtualScroll (details) {
const to = Number(details?.to || 0)
if (!Number.isFinite(to)) return
if (to >= filteredRows.value.length - 25) {
void loadMoreRows()
}
}
async function ensureEnoughVisibleRows (minRows = 80, maxBatches = 4) {
let guard = 0
while (hasMoreRows.value && filteredRows.value.length < minRows && guard < maxBatches) {
await loadMoreRows()
guard++
}
}
async function reloadData () {
const startedAt = Date.now()
console.info('[product-pricing][ui] reload:start', {
at: new Date(startedAt).toISOString()
})
nextCursor.value = ''
await fetchChunk({ reset: true })
await ensureEnoughVisibleRows(120, 6)
console.info('[product-pricing][ui] reload:done', {
duration_ms: Date.now() - startedAt,
row_count: Array.isArray(store.rows) ? store.rows.length : 0,
has_error: Boolean(store.error)
})
selectedMap.value = {}
}
onMounted(async () => {
await reloadData()
})
watch(
[
columnFilters,
numberRangeFilters,
dateRangeFilters,
showSelectedOnly,
() => tablePagination.value.sortBy,
() => tablePagination.value.descending
],
() => { void ensureEnoughVisibleRows(80, 4) },
{ deep: true }
)
</script>
<style scoped>
.pricing-page {
--pricing-row-height: 31px;
--pricing-header-height: 72px;
--pricing-table-height: calc(100vh - 210px);
height: calc(100vh - 120px);
display: flex;
flex-direction: column;
overflow: hidden;
}
.currency-menu-list {
min-width: 170px;
}
.table-wrap {
flex: 1;
min-height: 0;
overflow: hidden;
border: 1px solid rgba(0, 0, 0, 0.12);
border-radius: 4px;
display: flex;
flex-direction: column;
}
.pane-table {
height: 100%;
width: 100%;
}
.pricing-table :deep(.q-table__middle) {
height: var(--pricing-table-height);
min-height: var(--pricing-table-height);
max-height: var(--pricing-table-height);
overflow: auto !important;
scrollbar-gutter: stable both-edges;
overscroll-behavior: contain;
}
.pricing-table :deep(.q-table) {
width: max-content;
min-width: 100%;
table-layout: fixed;
font-size: 11px;
border-collapse: separate;
border-spacing: 0;
margin-right: var(--sticky-scroll-comp, 0px);
}
.pricing-table :deep(.q-table__container) {
border: none !important;
box-shadow: none !important;
background: transparent !important;
height: 100% !important;
}
.pricing-table :deep(th),
.pricing-table :deep(td) {
box-sizing: border-box;
padding: 0 4px;
overflow: hidden;
vertical-align: middle;
}
.pricing-table :deep(td),
.pricing-table :deep(.q-table tbody tr) {
height: var(--pricing-row-height) !important;
min-height: var(--pricing-row-height) !important;
max-height: var(--pricing-row-height) !important;
line-height: var(--pricing-row-height);
padding: 0 !important;
border-bottom: 1px solid rgba(0, 0, 0, 0.08) !important;
}
.pricing-table :deep(td > div),
.pricing-table :deep(td > .q-td) {
height: 100% !important;
display: flex !important;
align-items: center !important;
padding: 0 4px !important;
}
.pricing-table :deep(th),
.pricing-table :deep(.q-table thead tr),
.pricing-table :deep(.q-table thead tr.header-row-fixed),
.pricing-table :deep(.q-table thead th),
.pricing-table :deep(.q-table thead tr.header-row-fixed > th) {
height: var(--pricing-header-height) !important;
min-height: var(--pricing-header-height) !important;
max-height: var(--pricing-header-height) !important;
}
.pricing-table :deep(th) {
padding-top: 0;
padding-bottom: 0;
white-space: nowrap;
word-break: normal;
text-overflow: ellipsis;
text-align: center;
font-size: 10px;
font-weight: 800;
line-height: 1.15;
}
.pricing-table :deep(.q-table thead th) {
position: sticky;
top: 0;
z-index: 30;
background: #fff;
vertical-align: middle !important;
}
.pricing-table :deep(.sticky-col) {
position: sticky !important;
background-clip: padding-box;
}
.pricing-table :deep(thead .sticky-col) {
z-index: 35 !important;
}
.pricing-table :deep(tbody .sticky-col) {
z-index: 12 !important;
}
.pricing-table :deep(.sticky-boundary) {
border-right: 2px solid rgba(25, 118, 210, 0.18) !important;
box-shadow: 8px 0 12px -10px rgba(15, 23, 42, 0.55);
}
.header-with-filter {
display: grid;
grid-template-columns: 1fr 20px;
align-items: center;
column-gap: 4px;
height: 100%;
line-height: 1.25;
overflow: hidden;
}
.header-with-filter > span {
min-width: 0;
width: 100%;
overflow: hidden;
text-align: center;
text-overflow: ellipsis;
white-space: normal;
font-weight: 800;
line-height: 1.15;
display: -webkit-box;
-webkit-line-clamp: 2;
-webkit-box-orient: vertical;
}
.header-filter-btn {
width: 20px;
height: 20px;
min-width: 20px;
justify-self: end;
}
.header-filter-ghost {
opacity: 0;
pointer-events: none;
}
.excel-filter-menu {
min-width: 230px;
padding: 8px;
}
.range-filter-grid {
display: grid;
grid-template-columns: 1fr;
gap: 8px;
}
.range-filter-field {
min-width: 0;
}
.excel-filter-select :deep(.q-field__control) {
min-height: 30px;
}
.excel-filter-select :deep(.q-field__native),
.excel-filter-select :deep(.q-field__input) {
font-weight: 700;
}
.excel-filter-actions {
gap: 4px;
}
.excel-filter-options {
max-height: 220px;
margin-top: 8px;
overflow: auto;
border: 1px solid rgba(0, 0, 0, 0.08);
border-radius: 4px;
}
.excel-filter-option {
min-height: 32px;
}
.excel-filter-empty {
padding: 10px 8px;
color: #607d8b;
font-size: 11px;
}
.pricing-table :deep(th.ps-col),
.pricing-table :deep(td.ps-col) {
background: #fff;
color: var(--q-primary);
font-weight: 700;
}
.pricing-table :deep(td.ps-col .cell-text),
.pricing-table :deep(td.ps-col .product-code-text),
.pricing-table :deep(td.ps-col .stock-qty-text) {
font-size: 11px;
line-height: 1.1;
white-space: normal;
word-break: break-word;
}
.stock-qty-text {
display: block;
width: 100%;
text-align: center;
font-weight: 700;
padding: 0 4px;
}
.date-cell-text {
display: block;
width: 100%;
text-align: center;
font-weight: 700;
padding: 0 4px;
}
.date-warning {
color: #c62828;
}
.pricing-table :deep(th.selection-col),
.pricing-table :deep(td.selection-col) {
background: #fff;
color: var(--q-primary);
padding-left: 0 !important;
padding-right: 0 !important;
}
.pricing-table :deep(th.selection-col) {
text-align: center !important;
}
.pricing-table :deep(.selection-col .q-checkbox__inner) {
color: var(--q-primary);
font-size: 16px;
}
.pricing-table :deep(th.selection-col .q-checkbox),
.pricing-table :deep(td.selection-col .q-checkbox) {
display: inline-flex;
align-items: center;
justify-content: center;
}
.pricing-table :deep(.selection-col .q-checkbox__bg) {
background: #fff;
border-color: var(--q-primary);
}
.pricing-table :deep(th.usd-col),
.pricing-table :deep(td.usd-col) {
background: #ecf9f0;
color: #178a3e;
font-weight: 700;
}
.pricing-table :deep(th.eur-col),
.pricing-table :deep(td.eur-col) {
background: #fdeeee;
color: #c62828;
font-weight: 700;
}
.pricing-table :deep(th.try-col),
.pricing-table :deep(td.try-col) {
background: #edf4ff;
color: #1e63c6;
font-weight: 700;
}
.cell-text {
display: block;
overflow: hidden;
white-space: nowrap;
text-overflow: ellipsis;
line-height: 1.1;
padding-top: 0;
}
.product-code-text {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
display: block;
font-weight: 700;
letter-spacing: 0;
}
.native-cell-input,
.native-cell-select {
width: 100%;
height: 22px;
box-sizing: border-box;
padding: 1px 3px;
border: 1px solid #cfd8dc;
border-radius: 4px;
background: #fff;
font-size: 11px;
margin: 0;
}
.native-cell-input:focus,
.native-cell-select:focus {
outline: none;
border-color: #1976d2;
}
</style>

View File

@@ -47,7 +47,7 @@
<template #append> <template #append>
<q-icon name="event" class="cursor-pointer"> <q-icon name="event" class="cursor-pointer">
<q-popup-proxy cover transition-show="scale" transition-hide="scale"> <q-popup-proxy cover transition-show="scale" transition-hide="scale">
<q-date v-model="dateFrom" mask="YYYY-MM-DD" locale="tr-TR"/> <q-date v-model="dateFrom" mask="YYYY-MM-DD" :locale="dateLocale"/>
</q-popup-proxy> </q-popup-proxy>
</q-icon> </q-icon>
</template> </template>
@@ -63,7 +63,7 @@
<template #append> <template #append>
<q-icon name="event" class="cursor-pointer"> <q-icon name="event" class="cursor-pointer">
<q-popup-proxy cover transition-show="scale" transition-hide="scale"> <q-popup-proxy cover transition-show="scale" transition-hide="scale">
<q-date v-model="dateTo" mask="YYYY-MM-DD" locale="tr-TR" /> <q-date v-model="dateTo" mask="YYYY-MM-DD" :locale="dateLocale" />
</q-popup-proxy> </q-popup-proxy>
</q-icon> </q-icon>
</template> </template>
@@ -277,12 +277,16 @@ import { useDownloadstpdfStore } from 'src/stores/downloadstpdfStore'
import dayjs from 'dayjs' import dayjs from 'dayjs'
import { usePermission } from 'src/composables/usePermission' import { usePermission } from 'src/composables/usePermission'
import { normalizeSearchText } from 'src/utils/searchText' import { normalizeSearchText } from 'src/utils/searchText'
import { useLocaleStore } from 'src/stores/localeStore'
import { getDateLocale } from 'src/i18n/dayjsLocale'
const { canRead, canExport } = usePermission() const { canRead, canExport } = usePermission()
const canReadFinance = canRead('finance') const canReadFinance = canRead('finance')
const canExportFinance = canExport('finance') const canExportFinance = canExport('finance')
const $q = useQuasar() const $q = useQuasar()
const localeStore = useLocaleStore()
const dateLocale = computed(() => getDateLocale(localeStore.locale))
const accountStore = useAccountStore() const accountStore = useAccountStore()
const statementheaderStore = useStatementheaderStore() const statementheaderStore = useStatementheaderStore()
@@ -363,7 +367,7 @@ async function onFilterClick() {
startdate: dateFrom.value, startdate: dateFrom.value,
enddate: dateTo.value, enddate: dateTo.value,
accountcode: selectedCari.value, accountcode: selectedCari.value,
langcode: 'TR', langcode: localeStore.backendLangCode,
parislemler: selectedMonType.value parislemler: selectedMonType.value
}) })
@@ -411,7 +415,7 @@ function resetFilters() {
/* Format */ /* Format */
function formatAmount(n) { function formatAmount(n) {
if (n == null || isNaN(n)) return '0,00' if (n == null || isNaN(n)) return '0,00'
return new Intl.NumberFormat('tr-TR', { return new Intl.NumberFormat(dateLocale.value, {
minimumFractionDigits: 2, minimumFractionDigits: 2,
maximumFractionDigits: 2 maximumFractionDigits: 2
}).format(n) }).format(n)
@@ -467,7 +471,8 @@ async function handleDownload() {
selectedCari.value, // accountCode selectedCari.value, // accountCode
dateFrom.value, // startDate dateFrom.value, // startDate
dateTo.value, // endDate dateTo.value, // endDate
selectedMonType.value // <-- eklendi (['1','2'] veya ['1','3']) selectedMonType.value, // <-- eklendi (['1','2'] veya ['1','3'])
localeStore.backendLangCode
) )
console.log("📤 [DEBUG] Storedan gelen result:", result) console.log("📤 [DEBUG] Storedan gelen result:", result)
@@ -508,7 +513,8 @@ async function CurrheadDownload() {
selectedCari.value, // accountCode selectedCari.value, // accountCode
dateFrom.value, // startDate dateFrom.value, // startDate
dateTo.value, // endDate dateTo.value, // endDate
selectedMonType.value // parasal işlem tipi (parislemler) selectedMonType.value, // parasal işlem tipi (parislemler)
localeStore.backendLangCode
) )
console.log("📤 [DEBUG] CurrheadDownloadresult:", result) console.log("📤 [DEBUG] CurrheadDownloadresult:", result)

View File

@@ -0,0 +1,846 @@
<template>
<q-page v-if="canUpdateLanguage" class="q-pa-md translation-page">
<div class="translation-toolbar sticky-toolbar">
<div class="row q-col-gutter-sm items-end q-mb-md">
<div class="col-12 col-md-4">
<q-input
v-model="filters.q"
dense
outlined
clearable
label="Kelime ara"
/>
</div>
<div class="col-auto">
<q-btn color="primary" icon="search" label="Getir" @click="loadRows" />
</div>
<div class="col-auto">
<q-btn
color="secondary"
icon="sync"
label="YENİ KELİMELERİ GETİR"
:loading="store.saving"
@click="syncSources"
/>
</div>
<div class="col-auto">
<q-toggle v-model="autoTranslate" dense color="primary" label="Oto Çeviri" />
</div>
</div>
<div class="row q-gutter-sm q-mb-sm">
<q-btn
color="accent"
icon="g_translate"
label="Seçilenleri Çevir"
:disable="selectedKeys.length === 0"
:loading="store.saving"
@click="translateSelectedRows"
/>
<q-btn
color="secondary"
icon="done_all"
label="Seçilenleri Onayla"
:disable="selectedKeys.length === 0"
:loading="store.saving"
@click="bulkApproveSelected"
/>
<q-btn
color="primary"
icon="save"
label="Seçilenleri Toplu Güncelle"
:disable="selectedKeys.length === 0"
:loading="store.saving"
@click="bulkSaveSelected"
/>
</div>
</div>
<q-table
ref="tableRef"
class="translation-table"
flat
bordered
virtual-scroll
:virtual-scroll-sticky-size-start="56"
row-key="t_key"
:loading="store.loading || store.saving"
:rows="pivotRows"
:columns="columns"
:rows-per-page-options="[0]"
v-model:pagination="tablePagination"
hide-bottom
@virtual-scroll="onVirtualScroll"
>
<template #body-cell-actions="props">
<q-td :props="props">
<q-btn
dense
color="primary"
icon="save"
label="Güncelle"
:disable="!rowHasChanges(props.row.t_key)"
:loading="store.saving"
@click="saveRow(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-select="props">
<q-td :props="props">
<q-checkbox
dense
:model-value="selectedKeys.includes(props.row.t_key)"
@update:model-value="(v) => toggleSelected(props.row.t_key, v)"
/>
</q-td>
</template>
<template #body-cell-source_text_tr="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'source_text_tr')">
<div class="source-text-label" :title="rowDraft(props.row.t_key).source_text_tr">
{{ rowDraft(props.row.t_key).source_text_tr }}
</div>
</q-td>
</template>
<template #body-cell-source_type="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'source_type')">
<q-select
v-model="rowDraft(props.row.t_key).source_type"
dense
outlined
emit-value
map-options
:options="sourceTypeOptions"
@update:model-value="() => queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-en="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'en')">
<q-input
v-model="rowDraft(props.row.t_key).en"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-de="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'de')">
<q-input
v-model="rowDraft(props.row.t_key).de"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-es="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'es')">
<q-input
v-model="rowDraft(props.row.t_key).es"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-it="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'it')">
<q-input
v-model="rowDraft(props.row.t_key).it"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-ru="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'ru')">
<q-input
v-model="rowDraft(props.row.t_key).ru"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-ar="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'ar')">
<q-input
v-model="rowDraft(props.row.t_key).ar"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
</q-table>
</q-page>
<q-page v-else class="q-pa-md flex flex-center">
<div class="text-negative text-subtitle1">
Bu modüle erişim yetkiniz yok.
</div>
</q-page>
</template>
<script setup>
import { computed, onBeforeUnmount, onMounted, ref, watch } from 'vue'
import { useQuasar } from 'quasar'
import { usePermission } from 'src/composables/usePermission'
import { useTranslationStore } from 'src/stores/translationStore'
const $q = useQuasar()
const store = useTranslationStore()
const { canUpdate } = usePermission()
const canUpdateLanguage = canUpdate('language')
const filters = ref({
q: ''
})
const autoTranslate = ref(false)
const tableRef = ref(null)
const FETCH_LIMIT = 1400
const loadedOffset = ref(0)
const hasMoreRows = ref(true)
const loadingMore = ref(false)
const tablePagination = ref({
page: 1,
rowsPerPage: 0,
sortBy: 'source_text_tr',
descending: false
})
let filterReloadTimer = null
const sourceTypeOptions = [
{ label: 'dummy', value: 'dummy' },
{ label: 'postgre', value: 'postgre' },
{ label: 'mssql', value: 'mssql' }
]
const columns = [
{ name: 'actions', label: 'Güncelle', field: 'actions', align: 'left' },
{ name: 'select', label: 'Seç', field: 'select', align: 'left' },
{ name: 'source_text_tr', label: 'Türkçe Metin', field: 'source_text_tr', align: 'left', style: 'min-width: 340px' },
{ name: 'source_type', label: 'Kaynak', field: 'source_type', align: 'left', style: 'min-width: 140px' },
{ name: 'en', label: 'İngilizce', field: 'en', align: 'left', style: 'min-width: 220px' },
{ name: 'de', label: 'Almanca', field: 'de', align: 'left', style: 'min-width: 220px' },
{ name: 'es', label: 'İspanyolca', field: 'es', align: 'left', style: 'min-width: 220px' },
{ name: 'it', label: 'İtalyanca', field: 'it', align: 'left', style: 'min-width: 220px' },
{ name: 'ru', label: 'Rusça', field: 'ru', align: 'left', style: 'min-width: 220px' },
{ name: 'ar', label: 'Arapça', field: 'ar', align: 'left', style: 'min-width: 220px' }
]
const draftByKey = ref({})
const originalByKey = ref({})
const selectedKeys = ref([])
const autoSaveTimers = new Map()
const pivotRows = computed(() => {
const byKey = new Map()
for (const row of store.rows) {
const key = row.t_key
if (!byKey.has(key)) {
byKey.set(key, {
t_key: key,
source_text_tr: '',
source_type: 'dummy',
en: '',
de: '',
es: '',
it: '',
ru: '',
ar: '',
langs: {}
})
}
const target = byKey.get(key)
target.langs[row.lang_code] = {
id: row.id,
status: row.status,
is_manual: row.is_manual
}
if (row.lang_code === 'tr') {
target.source_text_tr = row.translated_text || row.source_text_tr || ''
target.source_type = row.source_type || 'dummy'
} else if (row.lang_code === 'en') {
target.en = row.translated_text || ''
} else if (row.lang_code === 'de') {
target.de = row.translated_text || ''
} else if (row.lang_code === 'es') {
target.es = row.translated_text || ''
} else if (row.lang_code === 'it') {
target.it = row.translated_text || ''
} else if (row.lang_code === 'ru') {
target.ru = row.translated_text || ''
} else if (row.lang_code === 'ar') {
target.ar = row.translated_text || ''
}
}
return Array.from(byKey.values()).sort((a, b) => a.t_key.localeCompare(b.t_key))
})
function snapshotDrafts (options = {}) {
const preserveDirty = Boolean(options?.preserveDirty)
const draft = {}
const original = {}
for (const row of pivotRows.value) {
const existingDraft = draftByKey.value[row.t_key]
const existingOriginal = originalByKey.value[row.t_key]
const keepExisting = preserveDirty &&
existingDraft &&
existingOriginal &&
(
existingDraft.source_text_tr !== existingOriginal.source_text_tr ||
existingDraft.source_type !== existingOriginal.source_type ||
existingDraft.en !== existingOriginal.en ||
existingDraft.de !== existingOriginal.de ||
existingDraft.es !== existingOriginal.es ||
existingDraft.it !== existingOriginal.it ||
existingDraft.ru !== existingOriginal.ru ||
existingDraft.ar !== existingOriginal.ar
)
if (keepExisting) {
draft[row.t_key] = { ...existingDraft }
original[row.t_key] = { ...existingOriginal }
continue
}
draft[row.t_key] = {
source_text_tr: row.source_text_tr || '',
source_type: row.source_type || 'dummy',
en: row.en || '',
de: row.de || '',
es: row.es || '',
it: row.it || '',
ru: row.ru || '',
ar: row.ar || ''
}
original[row.t_key] = { ...draft[row.t_key] }
}
draftByKey.value = draft
originalByKey.value = original
selectedKeys.value = selectedKeys.value.filter(k => draft[k])
}
function rowDraft (key) {
if (!draftByKey.value[key]) {
draftByKey.value[key] = {
source_text_tr: '',
source_type: 'dummy',
en: '',
de: '',
es: '',
it: '',
ru: '',
ar: ''
}
}
return draftByKey.value[key]
}
function buildFilters () {
const query = String(filters.value.q || '').trim()
return {
q: query || undefined
}
}
function rowHasChanges (key) {
const draft = draftByKey.value[key]
const orig = originalByKey.value[key]
if (!draft || !orig) return false
return (
draft.source_text_tr !== orig.source_text_tr ||
draft.source_type !== orig.source_type ||
draft.en !== orig.en ||
draft.de !== orig.de ||
draft.es !== orig.es ||
draft.it !== orig.it ||
draft.ru !== orig.ru ||
draft.ar !== orig.ar
)
}
function isPending (key, lang) {
const row = pivotRows.value.find(r => r.t_key === key)
const meta = row?.langs?.[lang]
return meta?.status === 'pending'
}
function cellClass (key, field) {
const draft = draftByKey.value[key]
const orig = originalByKey.value[key]
if (!draft || !orig) return ''
if (draft[field] !== orig[field]) return 'cell-dirty'
if (field === 'en' && isPending(key, 'en')) return 'cell-new'
if (field === 'de' && isPending(key, 'de')) return 'cell-new'
if (field === 'es' && isPending(key, 'es')) return 'cell-new'
if (field === 'it' && isPending(key, 'it')) return 'cell-new'
if (field === 'ru' && isPending(key, 'ru')) return 'cell-new'
if (field === 'ar' && isPending(key, 'ar')) return 'cell-new'
if (field === 'source_text_tr' && isPending(key, 'tr')) return 'cell-new'
return ''
}
function toggleSelected (key, checked) {
if (checked) {
if (!selectedKeys.value.includes(key)) {
selectedKeys.value = [...selectedKeys.value, key]
}
return
}
selectedKeys.value = selectedKeys.value.filter(k => k !== key)
}
function queueAutoSave (key) {
if (!key) return
const existing = autoSaveTimers.get(key)
if (existing) {
clearTimeout(existing)
}
const timer = setTimeout(() => {
autoSaveTimers.delete(key)
if (rowHasChanges(key)) {
void saveRow(key)
}
}, 250)
autoSaveTimers.set(key, timer)
}
async function fetchRowsChunk (append = false) {
const params = {
...buildFilters(),
limit: FETCH_LIMIT,
offset: append ? loadedOffset.value : 0
}
await store.fetchRows(params, { append })
const incomingCount = Number(store.count) || 0
if (append) {
loadedOffset.value += incomingCount
} else {
loadedOffset.value = incomingCount
}
hasMoreRows.value = incomingCount === FETCH_LIMIT
snapshotDrafts({ preserveDirty: append })
}
async function loadRows () {
try {
loadedOffset.value = 0
hasMoreRows.value = true
await fetchRowsChunk(false)
} catch (err) {
console.error('[translation-sync][ui] loadRows:error', {
message: err?.message || 'Çeviri satırları yüklenemedi'
})
$q.notify({
type: 'negative',
message: err?.message || 'Çeviri satırları yüklenemedi'
})
}
}
async function loadMoreRows () {
if (!hasMoreRows.value || loadingMore.value || store.loading || store.saving) return
loadingMore.value = true
try {
await fetchRowsChunk(true)
} finally {
loadingMore.value = false
}
}
async function ensureEnoughVisibleRows (minRows = 120, maxBatches = 4) {
let guard = 0
while (hasMoreRows.value && pivotRows.value.length < minRows && guard < maxBatches) {
await loadMoreRows()
guard++
}
}
function onVirtualScroll (details) {
const to = Number(details?.to || 0)
if (!Number.isFinite(to)) return
if (to >= pivotRows.value.length - 15) {
void loadMoreRows()
}
}
function scheduleFilterReload () {
if (filterReloadTimer) {
clearTimeout(filterReloadTimer)
}
filterReloadTimer = setTimeout(() => {
filterReloadTimer = null
void loadRows()
}, 350)
}
async function ensureMissingLangRows (key, draft, langs) {
const missingLangs = []
if (!langs.en && String(draft.en || '').trim() !== '') missingLangs.push('en')
if (!langs.de && String(draft.de || '').trim() !== '') missingLangs.push('de')
if (!langs.es && String(draft.es || '').trim() !== '') missingLangs.push('es')
if (!langs.it && String(draft.it || '').trim() !== '') missingLangs.push('it')
if (!langs.ru && String(draft.ru || '').trim() !== '') missingLangs.push('ru')
if (!langs.ar && String(draft.ar || '').trim() !== '') missingLangs.push('ar')
if (missingLangs.length === 0) return false
await store.upsertMissing([
{
t_key: key,
source_text_tr: draft.source_text_tr || key
}
], missingLangs)
return true
}
function buildRowUpdates (row, draft, original, approveStatus = 'approved') {
const items = []
const langs = row.langs || {}
const sourceTypeChanged = draft.source_type !== original.source_type
if (langs.tr?.id && (draft.source_text_tr !== original.source_text_tr || sourceTypeChanged)) {
items.push({
id: langs.tr.id,
source_text_tr: draft.source_text_tr,
translated_text: draft.source_text_tr,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
if (langs.en?.id && (draft.en !== original.en || sourceTypeChanged)) {
items.push({
id: langs.en.id,
translated_text: draft.en,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
if (langs.de?.id && (draft.de !== original.de || sourceTypeChanged)) {
items.push({
id: langs.de.id,
translated_text: draft.de,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
if (langs.es?.id && (draft.es !== original.es || sourceTypeChanged)) {
items.push({
id: langs.es.id,
translated_text: draft.es,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
if (langs.it?.id && (draft.it !== original.it || sourceTypeChanged)) {
items.push({
id: langs.it.id,
translated_text: draft.it,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
if (langs.ru?.id && (draft.ru !== original.ru || sourceTypeChanged)) {
items.push({
id: langs.ru.id,
translated_text: draft.ru,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
if (langs.ar?.id && (draft.ar !== original.ar || sourceTypeChanged)) {
items.push({
id: langs.ar.id,
translated_text: draft.ar,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
return items
}
async function saveRow (key) {
const row = pivotRows.value.find(r => r.t_key === key)
const draft = draftByKey.value[key]
const original = originalByKey.value[key]
if (!row || !draft || !original || !rowHasChanges(key)) return
try {
const insertedMissing = await ensureMissingLangRows(key, draft, row.langs || {})
if (insertedMissing) {
await loadRows()
}
const refreshed = pivotRows.value.find(r => r.t_key === key)
if (!refreshed) return
const refreshDraft = draftByKey.value[key]
const refreshOriginal = originalByKey.value[key]
const items = buildRowUpdates(refreshed, refreshDraft, refreshOriginal)
if (items.length > 0) {
await store.bulkUpdate(items)
}
await loadRows()
$q.notify({ type: 'positive', message: 'Satır güncellendi' })
} catch (err) {
$q.notify({ type: 'negative', message: err?.message || 'Güncelleme hatası' })
}
}
async function bulkApproveSelected () {
try {
const ids = []
for (const key of selectedKeys.value) {
const row = pivotRows.value.find(r => r.t_key === key)
if (!row) continue
for (const lang of ['tr', 'en', 'de', 'es', 'it', 'ru', 'ar']) {
const meta = row.langs?.[lang]
if (meta?.id && meta?.status === 'pending') {
ids.push(meta.id)
}
}
}
const unique = Array.from(new Set(ids))
if (unique.length === 0) {
$q.notify({ type: 'warning', message: 'Onaylanacak pending kayıt bulunamadı' })
return
}
await store.bulkApprove(unique)
await loadRows()
$q.notify({ type: 'positive', message: `${unique.length} kayıt onaylandı` })
} catch (err) {
$q.notify({ type: 'negative', message: err?.message || 'Toplu onay hatası' })
}
}
async function translateSelectedRows () {
try {
const keys = Array.from(new Set(selectedKeys.value.filter(Boolean)))
if (keys.length === 0) {
$q.notify({ type: 'warning', message: 'Çevrilecek seçim bulunamadı' })
return
}
const response = await store.translateSelected({
t_keys: keys,
languages: ['en', 'de', 'it', 'es', 'ru', 'ar'],
limit: Math.min(50000, keys.length * 6)
})
const translated = Number(response?.translated_count || 0)
const traceId = response?.trace_id || null
await loadRows()
$q.notify({
type: 'positive',
message: `Seçilenler çevrildi: ${translated}${traceId ? ` | Trace: ${traceId}` : ''}`
})
} catch (err) {
$q.notify({ type: 'negative', message: err?.message || 'Seçili çeviri işlemi başarısız' })
}
}
async function bulkSaveSelected () {
try {
const items = []
for (const key of selectedKeys.value) {
const row = pivotRows.value.find(r => r.t_key === key)
const draft = draftByKey.value[key]
const original = originalByKey.value[key]
if (!row || !draft || !original) continue
if (!rowHasChanges(key)) continue
const insertedMissing = await ensureMissingLangRows(key, draft, row.langs || {})
if (insertedMissing) {
await loadRows()
}
const refreshed = pivotRows.value.find(r => r.t_key === key)
if (!refreshed) continue
const refreshDraft = draftByKey.value[key]
const refreshOriginal = originalByKey.value[key]
items.push(...buildRowUpdates(refreshed, refreshDraft, refreshOriginal))
}
if (items.length === 0) {
$q.notify({ type: 'warning', message: 'Toplu güncellenecek değişiklik yok' })
return
}
await store.bulkUpdate(items)
await loadRows()
$q.notify({ type: 'positive', message: `${items.length} kayıt toplu güncellendi` })
} catch (err) {
$q.notify({ type: 'negative', message: err?.message || 'Toplu güncelleme hatası' })
}
}
async function syncSources () {
const startedAt = Date.now()
const beforeCount = pivotRows.value.length
console.info('[translation-sync][ui] button:click', {
at: new Date(startedAt).toISOString(),
auto_translate: autoTranslate.value,
only_new: true,
before_row_count: beforeCount
})
try {
const response = await store.syncSources({
auto_translate: autoTranslate.value,
languages: ['en', 'de', 'it', 'es', 'ru', 'ar'],
limit: 1000,
only_new: true
})
const result = response?.result || response || {}
const traceId = result?.trace_id || response?.trace_id || null
console.info('[translation-sync][ui] sync:response', {
trace_id: traceId,
seed_count: result.seed_count || 0,
affected_count: result.affected_count || 0,
auto_translated: result.auto_translated || 0,
duration_ms: result.duration_ms || null
})
await loadRows()
const afterCount = pivotRows.value.length
console.info('[translation-sync][ui] chain:reload-complete', {
trace_id: traceId,
duration_ms: Date.now() - startedAt,
before_row_count: beforeCount,
after_row_count: afterCount,
delta_row_count: afterCount - beforeCount
})
$q.notify({
type: 'positive',
message: `Tarama tamamlandı. Seed: ${result.seed_count || 0}, Oto çeviri: ${result.auto_translated || 0}`
})
} catch (err) {
$q.notify({
type: 'negative',
message: err?.message || 'Kaynak tarama hatası'
})
}
}
onMounted(() => {
void loadRows()
})
onBeforeUnmount(() => {
if (filterReloadTimer) {
clearTimeout(filterReloadTimer)
filterReloadTimer = null
}
})
watch(
() => filters.value.q,
() => { scheduleFilterReload() }
)
watch(
[() => tablePagination.value.sortBy, () => tablePagination.value.descending],
() => { void ensureEnoughVisibleRows(120, 4) }
)
</script>
<style scoped>
.translation-page {
height: calc(100vh - 120px);
display: flex;
flex-direction: column;
overflow: hidden;
}
.translation-toolbar {
background: #fff;
padding-top: 6px;
}
.sticky-toolbar {
position: sticky;
top: 0;
z-index: 35;
}
.translation-table {
flex: 1;
min-height: 0;
}
.translation-table :deep(.q-table__middle) {
max-height: calc(100vh - 280px);
overflow: auto;
}
.translation-table :deep(.q-table thead tr th) {
position: sticky;
top: 0;
z-index: 30;
background: #fff;
}
.translation-table :deep(.q-table tbody td) {
vertical-align: top;
padding: 6px;
}
.translation-table :deep(.q-field__native) {
line-height: 1.35;
word-break: break-word;
}
.source-text-label {
white-space: pre-wrap;
word-break: break-word;
line-height: 1.4;
max-height: 11.2em;
overflow: auto;
}
.cell-dirty {
background: #fff3cd;
}
.cell-new {
background: #d9f7e8;
}
</style>

View File

@@ -42,7 +42,7 @@
<q-date <q-date
v-model="dateFrom" v-model="dateFrom"
mask="YYYY-MM-DD" mask="YYYY-MM-DD"
locale="tr-TR" :locale="dateLocale"
:options="isValidFromDate" :options="isValidFromDate"
/> />
</q-popup-proxy> </q-popup-proxy>
@@ -65,7 +65,7 @@
<q-date <q-date
v-model="dateTo" v-model="dateTo"
mask="YYYY-MM-DD" mask="YYYY-MM-DD"
locale="tr-TR" :locale="dateLocale"
:options="isValidToDate" :options="isValidToDate"
/> />
</q-popup-proxy> </q-popup-proxy>
@@ -281,12 +281,18 @@ import { useStatementdetailStore } from 'src/stores/statementdetailStore'
import { useDownloadstpdfStore } from 'src/stores/downloadstpdfStore' import { useDownloadstpdfStore } from 'src/stores/downloadstpdfStore'
import dayjs from 'dayjs' import dayjs from 'dayjs'
import { usePermission } from 'src/composables/usePermission' import { usePermission } from 'src/composables/usePermission'
import { useLocaleStore } from 'src/stores/localeStore'
import { getDateLocale } from 'src/i18n/dayjsLocale'
import { useI18n } from 'src/composables/useI18n'
const { canRead, canExport } = usePermission() const { canRead, canExport } = usePermission()
const canReadFinance = canRead('finance') const canReadFinance = canRead('finance')
const canExportFinance = canExport('finance') const canExportFinance = canExport('finance')
const $q = useQuasar() const $q = useQuasar()
const localeStore = useLocaleStore()
const { t } = useI18n()
const dateLocale = computed(() => getDateLocale(localeStore.locale))
const accountStore = useAccountStore() const accountStore = useAccountStore()
const statementheaderStore = useStatementheaderStore() const statementheaderStore = useStatementheaderStore()
@@ -360,7 +366,7 @@ function hasInvalidDateRange () {
function notifyInvalidDateRange () { function notifyInvalidDateRange () {
$q.notify({ $q.notify({
type: 'warning', type: 'warning',
message: '⚠️ Başlangıç tarihi bitiş tarihinden sonra olamaz.', message: t('statement.invalidDateRange'),
position: 'top-right' position: 'top-right'
}) })
} }
@@ -402,7 +408,7 @@ async function onFilterClick() {
if (!selectedCari.value || !dateFrom.value || !dateTo.value) { if (!selectedCari.value || !dateFrom.value || !dateTo.value) {
$q.notify({ $q.notify({
type: 'warning', type: 'warning',
message: '⚠️ Lütfen cari ve tarih aralığını seçiniz.', message: t('statement.selectFilters'),
position: 'top-right' position: 'top-right'
}) })
return return
@@ -417,7 +423,7 @@ async function onFilterClick() {
startdate: dateFrom.value, startdate: dateFrom.value,
enddate: dateTo.value, enddate: dateTo.value,
accountcode: selectedCari.value, accountcode: selectedCari.value,
langcode: 'TR', langcode: localeStore.backendLangCode,
parislemler: selectedMonType.value, parislemler: selectedMonType.value,
excludeopening: excludeOpening.value excludeopening: excludeOpening.value
}) })
@@ -483,7 +489,7 @@ function toggleFiltersCollapsed () {
function normalizeText (str) { function normalizeText (str) {
return (str || '') return (str || '')
.toString() .toString()
.toLocaleLowerCase('tr-TR') // Türkçe uyumlu .toLocaleLowerCase(dateLocale.value)
.normalize('NFD') // aksan temizleme .normalize('NFD') // aksan temizleme
.replace(/[\u0300-\u036f]/g, '') .replace(/[\u0300-\u036f]/g, '')
.trim() .trim()
@@ -503,7 +509,7 @@ function resetFilters() {
/* Format */ /* Format */
function formatAmount(n) { function formatAmount(n) {
if (n == null || isNaN(n)) return '0,00' if (n == null || isNaN(n)) return '0,00'
return new Intl.NumberFormat('tr-TR', { return new Intl.NumberFormat(dateLocale.value, {
minimumFractionDigits: 2, minimumFractionDigits: 2,
maximumFractionDigits: 2 maximumFractionDigits: 2
}).format(n) }).format(n)
@@ -562,7 +568,8 @@ async function handleDownload() {
selectedCari.value, // accountCode selectedCari.value, // accountCode
dateFrom.value, // startDate dateFrom.value, // startDate
dateTo.value, // endDate dateTo.value, // endDate
selectedMonType.value // <-- eklendi (['1','2'] veya ['1','3']) selectedMonType.value, // <-- eklendi (['1','2'] veya ['1','3'])
localeStore.backendLangCode
) )
console.log("[DEBUG] Storedan gelen result:", result) console.log("[DEBUG] Storedan gelen result:", result)
@@ -608,7 +615,8 @@ async function CurrheadDownload() {
selectedCari.value, // accountCode selectedCari.value, // accountCode
dateFrom.value, // startDate dateFrom.value, // startDate
dateTo.value, // endDate dateTo.value, // endDate
selectedMonType.value // parasal işlem tipi (parislemler) selectedMonType.value, // parasal işlem tipi (parislemler)
localeStore.backendLangCode
) )
console.log("[DEBUG] CurrheadDownloadresult:", result) console.log("[DEBUG] CurrheadDownloadresult:", result)

View File

@@ -228,6 +228,12 @@ const routes = [
component: () => import('../pages/MarketMailMapping.vue'), component: () => import('../pages/MarketMailMapping.vue'),
meta: { permission: 'system:update' } meta: { permission: 'system:update' }
}, },
{
path: 'language/translations',
name: 'translation-table',
component: () => import('pages/TranslationTable.vue'),
meta: { permission: 'language:update' }
},
/* ================= ORDERS ================= */ /* ================= ORDERS ================= */
@@ -311,6 +317,14 @@ const routes = [
meta: { permission: 'order:view' } meta: { permission: 'order:view' }
}, },
/* ================= PRICING ================= */
{
path: 'pricing/product-pricing',
name: 'product-pricing',
component: () => import('pages/ProductPricing.vue'),
meta: { permission: 'order:view' }
},
/* ================= PASSWORD ================= */ /* ================= PASSWORD ================= */

View File

@@ -1,12 +1,14 @@
import axios from 'axios' import axios from 'axios'
import qs from 'qs' import qs from 'qs'
import { useAuthStore } from 'stores/authStore' import { useAuthStore } from 'stores/authStore'
import { DEFAULT_LOCALE, normalizeLocale } from 'src/i18n/languages'
const rawBaseUrl = const rawBaseUrl =
(typeof process !== 'undefined' && process.env?.VITE_API_BASE_URL) || '/api' (typeof process !== 'undefined' && process.env?.VITE_API_BASE_URL) || '/api'
export const API_BASE_URL = String(rawBaseUrl).trim().replace(/\/+$/, '') export const API_BASE_URL = String(rawBaseUrl).trim().replace(/\/+$/, '')
const AUTH_REFRESH_PATH = '/auth/refresh' const AUTH_REFRESH_PATH = '/auth/refresh'
const LOCALE_STORAGE_KEY = 'bss.locale'
const api = axios.create({ const api = axios.create({
baseURL: API_BASE_URL, baseURL: API_BASE_URL,
@@ -74,6 +76,11 @@ function redirectToLogin() {
window.location.hash = '/login' window.location.hash = '/login'
} }
function getRequestLocale() {
if (typeof window === 'undefined') return DEFAULT_LOCALE
return normalizeLocale(window.localStorage.getItem(LOCALE_STORAGE_KEY))
}
api.interceptors.request.use((config) => { api.interceptors.request.use((config) => {
const auth = useAuthStore() const auth = useAuthStore()
const url = config.url || '' const url = config.url || ''
@@ -82,6 +89,8 @@ api.interceptors.request.use((config) => {
config.headers ||= {} config.headers ||= {}
config.headers.Authorization = `Bearer ${auth.token}` config.headers.Authorization = `Bearer ${auth.token}`
} }
config.headers ||= {}
config.headers['Accept-Language'] = getRequestLocale()
return config return config
}) })

View File

@@ -6,12 +6,16 @@ function extractApiErrorMessage (err, fallback) {
const data = err?.response?.data const data = err?.response?.data
if (typeof data === 'string' && data.trim()) return data if (typeof data === 'string' && data.trim()) return data
if (data && typeof data === 'object') { if (data && typeof data === 'object') {
const validationMessages = Array.isArray(data.barcodeValidations)
? data.barcodeValidations.map(v => String(v?.message || '').trim()).filter(Boolean)
: []
const msg = String(data.message || '').trim() const msg = String(data.message || '').trim()
const step = String(data.step || '').trim() const step = String(data.step || '').trim()
const detail = String(data.detail || '').trim() const detail = String(data.detail || '').trim()
const parts = [msg] const parts = [msg]
if (step) parts.push(`step=${step}`) if (step) parts.push(`step=${step}`)
if (detail) parts.push(detail) if (detail) parts.push(detail)
if (validationMessages.length) parts.push(validationMessages.join(' | '))
const merged = parts.filter(Boolean).join(' | ') const merged = parts.filter(Boolean).join(' | ')
if (merged) return merged if (merged) return merged
} }
@@ -36,6 +40,53 @@ function nowMs () {
return Date.now() return Date.now()
} }
const applyInFlightByOrder = new Map()
const YAS_NUMERIC_SIZES = new Set(['2', '4', '6', '8', '10', '12', '14'])
function safeStr (value) {
return value == null ? '' : String(value).trim()
}
function normalizeProductionDim1Label (value) {
let text = safeStr(value)
if (!text) return ''
text = text.toUpperCase()
const yasMatch = text.match(/^(\d+)\s*(Y|YAS|YAŞ)$/)
if (yasMatch?.[1] && YAS_NUMERIC_SIZES.has(yasMatch[1])) {
return yasMatch[1]
}
return text
}
function pickPreferredProductionYasPayloadLabel (currentRaw, nextRaw) {
const current = safeStr(currentRaw).toUpperCase()
const next = safeStr(nextRaw).toUpperCase()
if (!next) return current
if (!current) return next
const currentHasYas = /YAS$|YAŞ$/.test(current)
const nextHasYas = /YAS$|YAŞ$/.test(next)
if (!currentHasYas && nextHasYas) return next
return current
}
function toProductionPayloadDim1 (row, value) {
const base = normalizeProductionDim1Label(value)
if (!base) return ''
if (!YAS_NUMERIC_SIZES.has(base)) return base
const map =
row?.yasPayloadMap && typeof row.yasPayloadMap === 'object'
? row.yasPayloadMap
: {}
const mapped = safeStr(map[base]).toUpperCase()
if (mapped) return mapped
return `${base}Y`
}
export const useOrderProductionItemStore = defineStore('orderproductionitems', { export const useOrderProductionItemStore = defineStore('orderproductionitems', {
state: () => ({ state: () => ({
items: [], items: [],
@@ -50,9 +101,11 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
secondColorRequestsByKey: {}, secondColorRequestsByKey: {},
newSecondColorRequestsByKey: {}, newSecondColorRequestsByKey: {},
productAttributesByItemType: {}, productAttributesByItemType: {},
productItemAttributesByKey: {},
cdItemLookups: null, cdItemLookups: null,
cdItemDraftsByCode: {}, cdItemDraftsByCode: {},
productAttributeDraftsByCode: {}, productAttributeDraftsByCode: {},
knownExistingItemCodes: {},
loading: false, loading: false,
saving: false, saving: false,
error: null error: null
@@ -70,18 +123,35 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
}, },
actions: { actions: {
normalizeDim1ForUi (value) {
return normalizeProductionDim1Label(value)
},
pickPreferredYasPayloadLabel (currentRaw, nextRaw) {
return pickPreferredProductionYasPayloadLabel(currentRaw, nextRaw)
},
toPayloadDim1Code (row, value) {
return toProductionPayloadDim1(row, value)
},
classifyItemCode (value) { classifyItemCode (value) {
const normalized = String(value || '').trim().toUpperCase() const normalized = String(value || '').trim().toUpperCase()
if (!normalized) { if (!normalized) {
return { normalized: '', mode: 'empty', exists: false } return { normalized: '', mode: 'empty', exists: false }
} }
const exists = this.productCodeSet.has(normalized) const exists = this.productCodeSet.has(normalized) || !!this.knownExistingItemCodes[normalized]
return { return {
normalized, normalized,
mode: exists ? 'existing' : 'new', mode: exists ? 'existing' : 'new',
exists exists
} }
}, },
markItemCodeKnownExisting (itemCode, exists = true) {
const code = String(itemCode || '').trim().toUpperCase()
if (!code) return
this.knownExistingItemCodes = {
...this.knownExistingItemCodes,
[code]: !!exists
}
},
async fetchHeader (orderHeaderID) { async fetchHeader (orderHeaderID) {
if (!orderHeaderID) { if (!orderHeaderID) {
@@ -133,6 +203,20 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
this.error = err?.response?.data || err?.message || 'Urun listesi alinamadi' this.error = err?.response?.data || err?.message || 'Urun listesi alinamadi'
} }
}, },
async fetchCdItemByCode (code) {
if (!code) return null
try {
const res = await api.get('/product-cditem', { params: { code } })
const data = res?.data || null
if (data) {
this.markItemCodeKnownExisting(code, true)
}
return data
} catch (err) {
console.error('[OrderProductionItemStore] fetchCdItemByCode failed', err)
return null
}
},
async fetchColors (productCode) { async fetchColors (productCode) {
const code = String(productCode || '').trim() const code = String(productCode || '').trim()
if (!code) return [] if (!code) return []
@@ -151,6 +235,7 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
const res = await api.get('/product-colors', { params: { code } }) const res = await api.get('/product-colors', { params: { code } })
const data = res?.data const data = res?.data
const list = Array.isArray(data) ? data : [] const list = Array.isArray(data) ? data : []
if (list.length) this.markItemCodeKnownExisting(code, true)
this.colorOptionsByCode[code] = list this.colorOptionsByCode[code] = list
console.info('[OrderProductionItemStore] fetchColors done', { code, count: list.length, durationMs: Math.round(nowMs() - t0) }) console.info('[OrderProductionItemStore] fetchColors done', { code, count: list.length, durationMs: Math.round(nowMs() - t0) })
return list return list
@@ -272,6 +357,25 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
return [] return []
} }
}, },
async fetchProductItemAttributes (itemCode, itemTypeCode = 1, force = false) {
const code = String(itemCode || '').trim().toUpperCase()
const itc = Number(itemTypeCode || 1)
if (!code) return []
const key = `${itc}|${code}`
if (!force && this.productItemAttributesByKey[key]) {
return this.productItemAttributesByKey[key]
}
try {
const res = await api.get('/product-item-attributes', { params: { itemTypeCode: itc, itemCode: code } })
const list = Array.isArray(res?.data) ? res.data : []
if (list.length) this.markItemCodeKnownExisting(code, true)
this.productItemAttributesByKey[key] = list
return list
} catch (err) {
this.error = err?.response?.data || err?.message || 'Urunun mevcut ozellikleri alinamadi'
return []
}
},
async fetchCdItemLookups (force = false) { async fetchCdItemLookups (force = false) {
if (this.cdItemLookups && !force) return this.cdItemLookups if (this.cdItemLookups && !force) return this.cdItemLookups
try { try {
@@ -321,7 +425,7 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
if (!code) return [] if (!code) return []
return this.productAttributeDraftsByCode[code] || [] return this.productAttributeDraftsByCode[code] || []
}, },
async validateUpdates (orderHeaderID, lines) { async validateUpdates (orderHeaderID, lines, cdItems = []) {
if (!orderHeaderID) return { missingCount: 0, missing: [] } if (!orderHeaderID) return { missingCount: 0, missing: [] }
this.saving = true this.saving = true
@@ -332,7 +436,7 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
console.info('[OrderProductionItemStore] validateUpdates start', { orderHeaderID, lineCount: lines?.length || 0 }) console.info('[OrderProductionItemStore] validateUpdates start', { orderHeaderID, lineCount: lines?.length || 0 })
const res = await api.post( const res = await api.post(
`/orders/production-items/${encodeURIComponent(orderHeaderID)}/validate`, `/orders/production-items/${encodeURIComponent(orderHeaderID)}/validate`,
{ lines } { lines, cdItems }
) )
const data = res?.data || { missingCount: 0, missing: [] } const data = res?.data || { missingCount: 0, missing: [] }
const rid = res?.headers?.['x-debug-request-id'] || '' const rid = res?.headers?.['x-debug-request-id'] || ''
@@ -340,6 +444,7 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
orderHeaderID, orderHeaderID,
lineCount: lines?.length || 0, lineCount: lines?.length || 0,
missingCount: Number(data?.missingCount || 0), missingCount: Number(data?.missingCount || 0),
barcodeValidationCount: Number(data?.barcodeValidationCount || 0),
requestId: rid, requestId: rid,
durationMs: Math.round(nowMs() - t0) durationMs: Math.round(nowMs() - t0)
}) })
@@ -352,9 +457,19 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
this.saving = false this.saving = false
} }
}, },
async applyUpdates (orderHeaderID, lines, insertMissing, cdItems = [], productAttributes = []) { async applyUpdates (orderHeaderID, lines, insertMissing, cdItems = [], productAttributes = [], headerAverageDueDate = null) {
if (!orderHeaderID) return { updated: 0, inserted: 0 } if (!orderHeaderID) return { updated: 0, inserted: 0 }
const orderKey = String(orderHeaderID).trim().toUpperCase()
if (applyInFlightByOrder.has(orderKey)) {
console.warn('[OrderProductionItemStore] applyUpdates deduped (in-flight)', {
orderHeaderID: orderKey,
lineCount: lines?.length || 0
})
return await applyInFlightByOrder.get(orderKey)
}
const applyPromise = (async () => {
this.saving = true this.saving = true
this.error = null this.error = null
@@ -365,11 +480,18 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
lineCount: lines?.length || 0, lineCount: lines?.length || 0,
insertMissing: !!insertMissing, insertMissing: !!insertMissing,
cdItemCount: cdItems?.length || 0, cdItemCount: cdItems?.length || 0,
attributeCount: productAttributes?.length || 0 attributeCount: productAttributes?.length || 0,
headerAverageDueDate
}) })
const res = await api.post( const res = await api.post(
`/orders/production-items/${encodeURIComponent(orderHeaderID)}/apply`, `/orders/production-items/${encodeURIComponent(orderHeaderID)}/apply`,
{ lines, insertMissing, cdItems, productAttributes } {
lines,
insertMissing,
cdItems,
productAttributes,
HeaderAverageDueDate: headerAverageDueDate
}
) )
const data = res?.data || { updated: 0, inserted: 0 } const data = res?.data || { updated: 0, inserted: 0 }
const rid = res?.headers?.['x-debug-request-id'] || '' const rid = res?.headers?.['x-debug-request-id'] || ''
@@ -377,7 +499,9 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
orderHeaderID, orderHeaderID,
updated: Number(data?.updated || 0), updated: Number(data?.updated || 0),
inserted: Number(data?.inserted || 0), inserted: Number(data?.inserted || 0),
barcodeInserted: Number(data?.barcodeInserted || 0),
attributeUpserted: Number(data?.attributeUpserted || 0), attributeUpserted: Number(data?.attributeUpserted || 0),
headerUpdated: !!data?.headerUpdated,
requestId: rid, requestId: rid,
durationMs: Math.round(nowMs() - t0) durationMs: Math.round(nowMs() - t0)
}) })
@@ -389,6 +513,17 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
} finally { } finally {
this.saving = false this.saving = false
} }
})()
applyInFlightByOrder.set(orderKey, applyPromise)
try {
return await applyPromise
} finally {
if (applyInFlightByOrder.get(orderKey) === applyPromise) {
applyInFlightByOrder.delete(orderKey)
}
}
} }
} }
}) })

View File

@@ -0,0 +1,148 @@
import { defineStore } from 'pinia'
import api from 'src/services/api'
function toText (value) {
return String(value ?? '').trim()
}
function toNumber (value) {
const n = Number(value)
return Number.isFinite(n) ? Number(n.toFixed(2)) : 0
}
function mapRow (raw, index, baseIndex = 0) {
return {
id: baseIndex + index + 1,
productCode: toText(raw?.ProductCode),
stockQty: toNumber(raw?.StockQty),
stockEntryDate: toText(raw?.StockEntryDate),
lastPricingDate: toText(raw?.LastPricingDate),
askiliYan: toText(raw?.AskiliYan),
kategori: toText(raw?.Kategori),
urunIlkGrubu: toText(raw?.UrunIlkGrubu),
urunAnaGrubu: toText(raw?.UrunAnaGrubu),
urunAltGrubu: toText(raw?.UrunAltGrubu),
icerik: toText(raw?.Icerik),
karisim: toText(raw?.Karisim),
marka: toText(raw?.Marka),
brandGroupSelection: toText(raw?.BrandGroupSec),
costPrice: toNumber(raw?.CostPrice),
expenseForBasePrice: 0,
basePriceUsd: 0,
basePriceTry: 0,
usd1: 0,
usd2: 0,
usd3: 0,
usd4: 0,
usd5: 0,
usd6: 0,
eur1: 0,
eur2: 0,
eur3: 0,
eur4: 0,
eur5: 0,
eur6: 0,
try1: 0,
try2: 0,
try3: 0,
try4: 0,
try5: 0,
try6: 0
}
}
export const useProductPricingStore = defineStore('product-pricing-store', {
state: () => ({
rows: [],
loading: false,
error: '',
hasMore: true
}),
actions: {
async fetchRows (options = {}) {
this.loading = true
this.error = ''
const limit = Number(options?.limit) > 0 ? Number(options.limit) : 500
const afterProductCode = toText(options?.afterProductCode)
const append = Boolean(options?.append)
const baseIndex = append ? this.rows.length : 0
const startedAt = Date.now()
console.info('[product-pricing][frontend] request:start', {
at: new Date(startedAt).toISOString(),
timeout_ms: 180000,
limit,
after_product_code: afterProductCode || null,
append
})
try {
const params = { limit }
if (afterProductCode) params.after_product_code = afterProductCode
const res = await api.request({
method: 'GET',
url: '/pricing/products',
params,
timeout: 180000
})
const traceId = res?.headers?.['x-trace-id'] || null
const hasMoreHeader = String(res?.headers?.['x-has-more'] || '').toLowerCase()
const nextCursorHeader = toText(res?.headers?.['x-next-cursor'])
const data = Array.isArray(res?.data) ? res.data : []
const mapped = data.map((x, i) => mapRow(x, i, baseIndex))
if (append) {
const merged = [...this.rows]
const seen = new Set(this.rows.map((x) => x?.productCode))
for (const row of mapped) {
const key = row?.productCode
if (key && seen.has(key)) continue
merged.push(row)
if (key) seen.add(key)
}
this.rows = merged
} else {
this.rows = mapped
}
this.hasMore = hasMoreHeader ? hasMoreHeader === 'true' : mapped.length === limit
console.info('[product-pricing][frontend] request:success', {
trace_id: traceId,
duration_ms: Date.now() - startedAt,
row_count: this.rows.length,
fetched_count: mapped.length,
has_more: this.hasMore,
next_cursor: nextCursorHeader || null
})
return {
traceId,
fetched: mapped.length,
hasMore: this.hasMore,
nextCursor: nextCursorHeader
}
} catch (err) {
if (!append) this.rows = []
this.hasMore = false
const msg = err?.response?.data || err?.message || 'Urun fiyatlandirma listesi alinamadi'
this.error = toText(msg)
console.error('[product-pricing][frontend] request:error', {
trace_id: err?.response?.headers?.['x-trace-id'] || null,
duration_ms: Date.now() - startedAt,
timeout_ms: err?.config?.timeout ?? null,
status: err?.response?.status || null,
message: this.error
})
throw err
} finally {
this.loading = false
}
},
updateCell (row, field, val) {
if (!row || !field) return
row[field] = toNumber(String(val ?? '').replace(',', '.'))
},
updateBrandGroupSelection (row, val) {
if (!row) return
row.brandGroupSelection = toText(val)
}
}
})

View File

@@ -9,14 +9,16 @@ export const useDownloadstHeadStore = defineStore('downloadstHead', {
accountCode, accountCode,
startDate, startDate,
endDate, endDate,
parislemler parislemler,
langcode = 'TR'
) { ) {
try { try {
// ✅ Params (axios paramsSerializer array=repeat destekliyor) // ✅ Params (axios paramsSerializer array=repeat destekliyor)
const params = { const params = {
accountcode: accountCode, accountcode: accountCode,
startdate: startDate, startdate: startDate,
enddate: endDate enddate: endDate,
langcode: langcode || 'TR'
} }
if (Array.isArray(parislemler) && parislemler.length > 0) { if (Array.isArray(parislemler) && parislemler.length > 0) {

View File

@@ -7,13 +7,14 @@ export const useDownloadstpdfStore = defineStore('downloadstpdf', {
/* ========================================================== /* ==========================================================
📄 PDF İNDİR / AÇ 📄 PDF İNDİR / AÇ
========================================================== */ ========================================================== */
async downloadPDF(accountCode, startDate, endDate, parislemler = []) { async downloadPDF(accountCode, startDate, endDate, parislemler = [], langcode = 'TR') {
try { try {
// 🔹 Query params // 🔹 Query params
const params = { const params = {
accountcode: accountCode, accountcode: accountCode,
startdate: startDate, startdate: startDate,
enddate: endDate enddate: endDate,
langcode: langcode || 'TR'
} }
if (Array.isArray(parislemler) && parislemler.length > 0) { if (Array.isArray(parislemler) && parislemler.length > 0) {

View File

@@ -0,0 +1,35 @@
import { defineStore } from 'pinia'
import { computed, ref } from 'vue'
import { applyDayjsLocale } from 'src/i18n/dayjsLocale'
import { DEFAULT_LOCALE, normalizeLocale, toBackendLangCode } from 'src/i18n/languages'
const STORAGE_KEY = 'bss.locale'
function readInitialLocale() {
if (typeof window === 'undefined') return DEFAULT_LOCALE
return normalizeLocale(window.localStorage.getItem(STORAGE_KEY))
}
export const useLocaleStore = defineStore('locale', () => {
const locale = ref(readInitialLocale())
function setLocale(nextLocale) {
const normalized = normalizeLocale(nextLocale)
locale.value = normalized
applyDayjsLocale(normalized)
if (typeof window !== 'undefined') {
window.localStorage.setItem(STORAGE_KEY, normalized)
}
}
const backendLangCode = computed(() => toBackendLangCode(locale.value))
applyDayjsLocale(locale.value)
return {
locale,
backendLangCode,
setLocale
}
})

View File

@@ -212,6 +212,8 @@ export const useOrderEntryStore = defineStore('orderentry', {
orders: [], orders: [],
header: {}, header: {},
summaryRows: [], summaryRows: [],
originalHeader: {},
originalLines: [],
lastSavedAt: null, lastSavedAt: null,
@@ -534,6 +536,62 @@ export const useOrderEntryStore = defineStore('orderentry', {
const normalized = Array.isArray(lines) ? lines : [] const normalized = Array.isArray(lines) ? lines : []
const mapLabel = (ln) => this.buildMailLineLabel(ln) const mapLabel = (ln) => this.buildMailLineLabel(ln)
const formatDate = (d) => {
if (!d) return ''
const s = String(d).split('T')[0]
return s
}
const getLineDueDate = (ln) => (
formatDate(
ln?.DueDate ||
ln?.DeliveryDate ||
ln?.PlannedDateOfLading ||
''
)
)
const oldDate = formatDate(this.originalHeader?.AverageDueDate)
const newDate = formatDate(this.header?.AverageDueDate)
const origMap = new Map()
if (Array.isArray(this.originalLines)) {
this.originalLines.forEach(ln => {
if (ln.OrderLineID) origMap.set(String(ln.OrderLineID), ln)
})
}
const buildDueDateChanges = () => {
const out = []
const seen = new Set()
normalized.forEach(ln => {
if (ln?._deleteSignal || !ln?.OrderLineID) return
const orig = origMap.get(String(ln.OrderLineID))
if (!orig) return
const itemCode = String(ln?.ItemCode || '').trim().toUpperCase()
const colorCode = String(ln?.ColorCode || '').trim().toUpperCase()
const itemDim2Code = String(ln?.ItemDim2Code || '').trim().toUpperCase()
const oldLnDate = getLineDueDate(orig)
const newLnDate = getLineDueDate(ln)
if (!itemCode || !newLnDate || oldLnDate === newLnDate) return
const key = [itemCode, colorCode, itemDim2Code, oldLnDate, newLnDate].join('||')
if (seen.has(key)) return
seen.add(key)
out.push({
itemCode,
colorCode,
itemDim2Code,
oldDueDate: oldLnDate,
newDueDate: newLnDate
})
})
return out
}
if (isNew) { if (isNew) {
return { return {
operation: 'create', operation: 'create',
@@ -543,7 +601,10 @@ export const useOrderEntryStore = defineStore('orderentry', {
normalized normalized
.filter(ln => !ln?._deleteSignal) .filter(ln => !ln?._deleteSignal)
.map(mapLabel) .map(mapLabel)
) ),
oldDueDate: '',
newDueDate: '',
dueDateChanges: []
} }
} }
@@ -553,11 +614,22 @@ export const useOrderEntryStore = defineStore('orderentry', {
.map(mapLabel) .map(mapLabel)
) )
const updatedItems = uniq( const updatedItems = []
normalized
.filter(ln => !ln?._deleteSignal && !!ln?.OrderLineID && ln?._dirty === true) normalized.forEach(ln => {
.map(mapLabel) if (!ln?._deleteSignal && !!ln?.OrderLineID && ln?._dirty === true) {
) let label = mapLabel(ln)
const orig = origMap.get(String(ln.OrderLineID))
if (orig) {
const oldLnDate = getLineDueDate(orig)
const newLnDate = getLineDueDate(ln)
if (newLnDate && oldLnDate !== newLnDate) {
label += ` (Termin: ${oldLnDate} -> ${newLnDate})`
}
}
updatedItems.push(label)
}
})
const addedItems = uniq( const addedItems = uniq(
normalized normalized
@@ -568,8 +640,11 @@ export const useOrderEntryStore = defineStore('orderentry', {
return { return {
operation: 'update', operation: 'update',
deletedItems, deletedItems,
updatedItems, updatedItems: uniq(updatedItems),
addedItems addedItems,
oldDueDate: oldDate,
newDueDate: newDate,
dueDateChanges: buildDueDateChanges()
} }
} }
, ,
@@ -586,7 +661,10 @@ export const useOrderEntryStore = defineStore('orderentry', {
operation: payload?.operation || 'create', operation: payload?.operation || 'create',
deletedItems: Array.isArray(payload?.deletedItems) ? payload.deletedItems : [], deletedItems: Array.isArray(payload?.deletedItems) ? payload.deletedItems : [],
updatedItems: Array.isArray(payload?.updatedItems) ? payload.updatedItems : [], updatedItems: Array.isArray(payload?.updatedItems) ? payload.updatedItems : [],
addedItems: Array.isArray(payload?.addedItems) ? payload.addedItems : [] addedItems: Array.isArray(payload?.addedItems) ? payload.addedItems : [],
oldDueDate: payload?.oldDueDate || '',
newDueDate: payload?.newDueDate || '',
dueDateChanges: Array.isArray(payload?.dueDateChanges) ? payload.dueDateChanges : []
}) })
return res?.data || {} return res?.data || {}
} catch (err) { } catch (err) {
@@ -598,6 +676,69 @@ export const useOrderEntryStore = defineStore('orderentry', {
} }
, ,
async bulkUpdateOrderLineDueDate(orderId, dueDate) {
const id = String(orderId || this.header?.OrderHeaderID || '').trim()
const dateText = String(dueDate || '').trim()
if (!id) {
throw new Error('Siparis ID bulunamadi')
}
if (!dateText) {
throw new Error('Termin tarihi secilmedi')
}
try {
this.loading = true
const res = await api.post(`/order/${encodeURIComponent(id)}/bulk-due-date`, {
dueDate: dateText
})
return res?.data || {}
} catch (err) {
const detail = await extractApiErrorDetail(err)
const status = err?.status || err?.response?.status || '-'
console.error(`❌ bulkUpdateOrderLineDueDate hata [${status}] order=${id}: ${detail}`)
throw new Error(detail)
} finally {
this.loading = false
}
}
,
applyBulkLineDueDateLocally(dueDate) {
const dateText = String(dueDate || '').trim()
if (!dateText) return
const hadUnsavedChanges = this.hasUnsavedChanges
const patchRow = (row) => ({
...row,
terminTarihi: dateText,
DueDate: dateText,
DeliveryDate: dateText,
PlannedDateOfLading: dateText
})
this.orders = Array.isArray(this.orders)
? this.orders.map(patchRow)
: []
this.summaryRows = Array.isArray(this.summaryRows)
? this.summaryRows.map(patchRow)
: []
this.header = {
...(this.header || {}),
AverageDueDate: dateText
}
// Keep originalHeader/originalLines untouched for submit-mail diff.
// Otherwise due-date change table becomes empty.
this.persistLocalStorage?.()
if (!hadUnsavedChanges) {
this.markAsSaved?.()
}
}
,
async downloadOrderPdf(id = null) { async downloadOrderPdf(id = null) {
try { try {
const orderId = id || this.header?.OrderHeaderID const orderId = id || this.header?.OrderHeaderID
@@ -1113,6 +1254,10 @@ export const useOrderEntryStore = defineStore('orderentry', {
this.orders = Array.isArray(normalized) ? normalized : [] this.orders = Array.isArray(normalized) ? normalized : []
this.summaryRows = [...this.orders] this.summaryRows = [...this.orders]
// 💾 Snapshot for email comparison (v3.5)
this.originalHeader = JSON.parse(JSON.stringify(this.header))
this.originalLines = JSON.parse(JSON.stringify(this.summaryRows))
/* ======================================================= /* =======================================================
🔹 MODE KARARI (BACKEND SATIRLARI ÜZERİNDEN) 🔹 MODE KARARI (BACKEND SATIRLARI ÜZERİNDEN)
- herhangi bir isClosed=true → view - herhangi bir isClosed=true → view
@@ -3142,6 +3287,14 @@ export const useOrderEntryStore = defineStore('orderentry', {
if (!serverOrderId) { if (!serverOrderId) {
throw new Error('OrderHeaderID backendden dönmedi') throw new Error('OrderHeaderID backendden dönmedi')
} }
const mailPayload = this.buildOrderMailPayload(lines, isNew)
console.info('[orderentryStore] mail payload prepared', {
operation: mailPayload?.operation,
deletedCount: Array.isArray(mailPayload?.deletedItems) ? mailPayload.deletedItems.length : 0,
updatedCount: Array.isArray(mailPayload?.updatedItems) ? mailPayload.updatedItems.length : 0,
addedCount: Array.isArray(mailPayload?.addedItems) ? mailPayload.addedItems.length : 0,
dueDateChangeCount: Array.isArray(mailPayload?.dueDateChanges) ? mailPayload.dueDateChanges.length : 0
})
purgeNewDraftOnExit = isNew purgeNewDraftOnExit = isNew
/* ======================================================= /* =======================================================
@@ -3201,7 +3354,7 @@ export const useOrderEntryStore = defineStore('orderentry', {
// 📧 Piyasa eşleşen alıcılara sipariş PDF gönderimi (kayıt başarılı olduktan sonra) // 📧 Piyasa eşleşen alıcılara sipariş PDF gönderimi (kayıt başarılı olduktan sonra)
try { try {
const mailPayload = this.buildOrderMailPayload(lines, isNew) // UPDATE durumunda da mail gönderimi istendiği için isNew kontrolü kaldırıldı (v3.5)
const mailRes = await this.sendOrderToMarketMails(serverOrderId, mailPayload) const mailRes = await this.sendOrderToMarketMails(serverOrderId, mailPayload)
const sentCount = Number(mailRes?.sentCount || 0) const sentCount = Number(mailRes?.sentCount || 0)
$q.notify({ $q.notify({
@@ -3480,7 +3633,9 @@ export const useOrderEntryStore = defineStore('orderentry', {
IsCancelOrder: boolOr(this.header?.IsCancelOrder, false), IsCancelOrder: boolOr(this.header?.IsCancelOrder, false),
IsInclutedVat: boolOr(this.header?.IsInclutedVat, false), IsInclutedVat: boolOr(this.header?.IsInclutedVat, false),
IsCreditSale: boolOr(this.header?.IsCreditSale, true), IsCreditSale: boolOr(this.header?.IsCreditSale, true),
IsCreditableConfirmed: boolOr(this.header?.IsCreditableConfirmed, false), IsCreditableConfirmed: true,
CreditableConfirmedUser: who,
CreditableConfirmedDate: formatDateTime(now),
IsSalesViaInternet: boolOr(this.header?.IsSalesViaInternet, false), IsSalesViaInternet: boolOr(this.header?.IsSalesViaInternet, false),
IsSuspended: boolOr(this.header?.IsSuspended, false), IsSuspended: boolOr(this.header?.IsSuspended, false),
IsCompleted: boolOr(this.header?.IsCompleted, true), IsCompleted: boolOr(this.header?.IsCompleted, true),

View File

@@ -0,0 +1,143 @@
import { defineStore } from 'pinia'
import api from 'src/services/api'
export const useTranslationStore = defineStore('translation', {
state: () => ({
loading: false,
saving: false,
rows: [],
count: 0
}),
actions: {
async fetchRows (filters = {}, options = {}) {
this.loading = true
const append = Boolean(options?.append)
try {
const res = await api.get('/language/translations', { params: filters })
const payload = res?.data || {}
const incoming = Array.isArray(payload.rows) ? payload.rows : []
if (append) {
const merged = [...this.rows]
const seen = new Set(this.rows.map((x) => x?.id))
for (const row of incoming) {
const id = row?.id
if (!seen.has(id)) {
merged.push(row)
seen.add(id)
}
}
this.rows = merged
} else {
this.rows = incoming
}
this.count = Number(payload.count) || this.rows.length
} finally {
this.loading = false
}
},
async updateRow (id, payload) {
this.saving = true
try {
const res = await api.put(`/language/translations/${id}`, payload)
return res?.data || null
} finally {
this.saving = false
}
},
async upsertMissing (items, languages = ['en', 'de', 'it', 'es', 'ru', 'ar']) {
this.saving = true
try {
const res = await api.post('/language/translations/upsert-missing', {
items: Array.isArray(items) ? items : [],
languages: Array.isArray(languages) ? languages : []
})
return res?.data || null
} finally {
this.saving = false
}
},
async syncSources (payload = {}) {
this.saving = true
const startedAt = Date.now()
console.info('[translation-sync][frontend] request:start', {
at: new Date(startedAt).toISOString(),
payload
})
try {
const res = await api.post('/language/translations/sync-sources', payload, { timeout: 0 })
const data = res?.data || null
const traceId = data?.trace_id || data?.result?.trace_id || res?.headers?.['x-trace-id'] || null
console.info('[translation-sync][frontend] request:success', {
trace_id: traceId,
duration_ms: Date.now() - startedAt,
result: data?.result || null
})
return data
} catch (err) {
console.error('[translation-sync][frontend] request:error', {
duration_ms: Date.now() - startedAt,
message: err?.message || 'sync-sources failed'
})
throw err
} finally {
this.saving = false
}
},
async translateSelected (payload = {}) {
this.saving = true
const startedAt = Date.now()
console.info('[translation-selected][frontend] request:start', {
at: new Date(startedAt).toISOString(),
payload
})
try {
const res = await api.post('/language/translations/translate-selected', payload, { timeout: 0 })
const data = res?.data || null
const traceId = data?.trace_id || res?.headers?.['x-trace-id'] || null
console.info('[translation-selected][frontend] request:success', {
trace_id: traceId,
duration_ms: Date.now() - startedAt,
translated_count: data?.translated_count || 0
})
return data
} catch (err) {
console.error('[translation-selected][frontend] request:error', {
duration_ms: Date.now() - startedAt,
message: err?.message || 'translate-selected failed'
})
throw err
} finally {
this.saving = false
}
},
async bulkApprove (ids = []) {
this.saving = true
try {
const res = await api.post('/language/translations/bulk-approve', {
ids: Array.isArray(ids) ? ids : []
})
return res?.data || null
} finally {
this.saving = false
}
},
async bulkUpdate (items = []) {
this.saving = true
try {
const res = await api.post('/language/translations/bulk-update', {
items: Array.isArray(items) ? items : []
})
return res?.data || null
} finally {
this.saving = false
}
}
}
})