Compare commits

..

23 Commits

Author SHA1 Message Date
M_Kececi
e6ae925f1c Merge remote-tracking branch 'origin/master' 2026-04-20 10:32:07 +03:00
M_Kececi
18c9a99a57 Merge remote-tracking branch 'origin/master' 2026-04-20 10:19:02 +03:00
M_Kececi
8462df878f Merge remote-tracking branch 'origin/master' 2026-04-20 10:04:45 +03:00
M_Kececi
7ef12df93a Merge remote-tracking branch 'origin/master' 2026-04-20 09:42:34 +03:00
M_Kececi
a1f5c653c6 Merge remote-tracking branch 'origin/master' 2026-04-20 08:50:41 +03:00
M_Kececi
c6bdf83f05 Merge remote-tracking branch 'origin/master' 2026-04-17 12:16:50 +03:00
M_Kececi
f9728b8a4c Merge remote-tracking branch 'origin/master' 2026-04-16 17:46:50 +03:00
M_Kececi
307282928c Merge remote-tracking branch 'origin/master' 2026-04-16 16:41:59 +03:00
M_Kececi
29909f3609 Merge remote-tracking branch 'origin/master' 2026-04-16 16:41:55 +03:00
M_Kececi
bb856cb082 Merge remote-tracking branch 'origin/master' 2026-04-16 16:25:45 +03:00
M_Kececi
b065e7192d Merge remote-tracking branch 'origin/master' 2026-04-16 16:00:24 +03:00
M_Kececi
2d369e7d7d Merge remote-tracking branch 'origin/master' 2026-04-16 15:18:44 +03:00
M_Kececi
1831c45a0c Merge remote-tracking branch 'origin/master' 2026-04-15 17:03:25 +03:00
M_Kececi
1a80184cac Merge remote-tracking branch 'origin/master' 2026-04-15 16:43:21 +03:00
M_Kececi
5be7315bdb Merge remote-tracking branch 'origin/master' 2026-04-15 15:54:44 +03:00
M_Kececi
c925af5ba1 Merge remote-tracking branch 'origin/master' 2026-04-14 18:04:19 +03:00
M_Kececi
352a7e26ea Merge remote-tracking branch 'origin/master' 2026-04-14 17:53:58 +03:00
M_Kececi
9ee70eb05a Merge remote-tracking branch 'origin/master' 2026-04-14 17:52:38 +03:00
M_Kececi
8694511e79 Merge remote-tracking branch 'origin/master' 2026-04-14 17:46:15 +03:00
M_Kececi
69ba4b2ecb Merge remote-tracking branch 'origin/master' 2026-04-14 17:34:46 +03:00
M_Kececi
eb628e99c2 Merge remote-tracking branch 'origin/master' 2026-04-14 17:23:24 +03:00
M_Kececi
431441802e Merge remote-tracking branch 'origin/master' 2026-04-14 17:05:18 +03:00
M_Kececi
7457d95bac Merge remote-tracking branch 'origin/master' 2026-04-14 17:05:14 +03:00
50 changed files with 5907 additions and 634 deletions

View File

@@ -0,0 +1,52 @@
# i18n + Dinamik Çeviri Standardı
Bu projede çok dilli yapı iki katmanlıdır:
1. Statik UI metinleri `i18n` ile yönetilir.
2. Dinamik içerikler `mk_translator` + otomatik çeviri servisi (OpenAI) ile yönetilir.
## 1) Statik UI (Deterministik)
Kullanım alanı:
- buton metinleri
- menüler
- form label'ları
- validasyon mesajları
- sabit ekran başlıkları
- route/meta/title
Kural:
- her metin key bazlı tutulur (`$t('common.save')`)
- locale dosyaları: `tr`, `en`, `de`, `it`, `es`, `ru`, `ar`
- fallback sırası: hedef dil -> `en` -> `tr`
## 2) Dinamik İçerik (DB/CMS/Serbest metin)
Akış:
1. Kaynak metin için `mk_translator` kontrol edilir.
2. Hedef dil karşılığı yoksa OpenAI ile çeviri üretilir.
3. Sonuç `mk_translator` tablosuna yazılır.
4. Sonraki isteklerde DB sonucu kullanılır (cache etkisi).
Kullanım alanı:
- ürün/kategori açıklamaları
- CMS içerikleri
- admin panelden girilen serbest metinler
- şablon bazlı metin içerikleri
## Kalite ve Güvenlik Kuralları
- Prompt net olmalı: sadece çeviri dönsün, açıklama eklemesin.
- Placeholder/format korunsun: `{name}`, `{{count}}`, `%s` gibi yapılar bozulmasın.
- HTML tag'leri ve kod/SKU değerleri çevrilmesin.
- API key sadece backend'de tutulur (`OPENAI_API_KEY` client'a verilmez).
- 429/5xx için retry + exponential backoff uygulanır.
- Hassas veri içeriği olan metinlerde veri politikası kontrolü yapılır.
## Özet
Bu servis, `i18n`'in alternatifi değildir; `i18n`'i tamamlayan dinamik çeviri katmanıdır.
- Statik UI: `i18n`
- Dinamik içerik: `mk_translator` + OpenAI + cache

View File

@@ -16,4 +16,9 @@
| Cloudflare | bt@baggi.com.tr | Baggi2025!.? |
| 172.16.0.3 | ct | pasauras |
## Dil ve Çeviri Standardı
Detaylı mimari dokümanı:
- [docs/i18n-dynamic-translation-standard.md](docs/i18n-dynamic-translation-standard.md)

View File

@@ -0,0 +1,48 @@
-- language_module_seed.sql
-- 1) Register language module routes if missing
INSERT INTO mk_sys_routes (path, method, module_code, action)
VALUES
('/api/language/translations', 'GET', 'language', 'update'),
('/api/language/translations/{id}', 'PUT', 'language', 'update'),
('/api/language/translations/upsert-missing', 'POST', 'language', 'update'),
('/api/language/translations/sync-sources', 'POST', 'language', 'update'),
('/api/language/translations/translate-selected', 'POST', 'language', 'update'),
('/api/language/translations/bulk-approve', 'POST', 'language', 'update'),
('/api/language/translations/bulk-update', 'POST', 'language', 'update')
ON CONFLICT (path, method) DO UPDATE
SET
module_code = EXCLUDED.module_code,
action = EXCLUDED.action;
-- 2) Remove legacy system translation routes (optional cleanup)
DELETE FROM mk_sys_routes
WHERE path LIKE '/api/system/translations%';
-- 3) Seed role permissions for language module by cloning system perms
INSERT INTO mk_sys_role_permissions (role_id, module_code, action, allowed)
SELECT rp.role_id, 'language', rp.action, rp.allowed
FROM mk_sys_role_permissions rp
WHERE rp.module_code = 'system'
AND rp.action IN ('view', 'read', 'insert', 'update', 'delete', 'export')
ON CONFLICT DO NOTHING;
-- 4) Ensure admin update access
INSERT INTO mk_sys_role_permissions (role_id, module_code, action, allowed)
SELECT r.id, 'language', 'update', true
FROM dfrole r
WHERE r.id = 3
ON CONFLICT DO NOTHING;
-- 5) Seed role+department permissions for language module by cloning system perms
INSERT INTO mk_sys_role_department_permissions
(role_id, department_code, module_code, action, allowed)
SELECT DISTINCT
rdp.role_id,
rdp.department_code,
'language',
rdp.action,
rdp.allowed
FROM mk_sys_role_department_permissions rdp
WHERE rdp.module_code = 'system'
AND rdp.action IN ('view', 'read', 'insert', 'update', 'delete', 'export')
ON CONFLICT DO NOTHING;

View File

@@ -32,3 +32,6 @@ API_HOST=0.0.0.0
API_PORT=8080
AZURE_TRANSLATOR_KEY=d055c693-a84e-4594-8aef-a6c05c42623a
AZURE_TRANSLATOR_ENDPOINT=https://api.cognitive.microsofttranslator.com
AZURE_TRANSLATOR_REGION=westeurope

View File

@@ -0,0 +1,72 @@
package main
import (
"bssapp-backend/db"
"bssapp-backend/routes"
"fmt"
"log"
"os"
"strconv"
"strings"
"github.com/joho/godotenv"
)
func main() {
_ = godotenv.Load(".env", "mail.env", ".env.local")
if err := db.ConnectMSSQL(); err != nil {
log.Fatalf("mssql connect failed: %v", err)
}
pgDB, err := db.ConnectPostgres()
if err != nil {
log.Fatalf("postgres connect failed: %v", err)
}
defer pgDB.Close()
limit := 30000
if raw := os.Getenv("TRANSLATION_SYNC_LIMIT"); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 {
limit = parsed
}
}
langs := []string{"en", "de", "it", "es", "ru", "ar"}
if raw := strings.TrimSpace(os.Getenv("TRANSLATION_SYNC_LANGS")); raw != "" {
parts := strings.Split(raw, ",")
custom := make([]string, 0, len(parts))
for _, p := range parts {
v := strings.TrimSpace(strings.ToLower(p))
if v != "" {
custom = append(custom, v)
}
}
if len(custom) > 0 {
langs = custom
}
}
autoTranslate := true
if raw := strings.TrimSpace(strings.ToLower(os.Getenv("TRANSLATION_SYNC_AUTO_TRANSLATE"))); raw != "" {
if raw == "0" || raw == "false" || raw == "off" {
autoTranslate = false
}
}
result, err := routes.PerformTranslationSync(pgDB, db.MssqlDB, routes.TranslationSyncOptions{
AutoTranslate: autoTranslate,
Languages: langs,
Limit: limit,
OnlyNew: true,
})
if err != nil {
log.Fatalf("manual sync failed: %v", err)
}
fmt.Printf("translation sync done: seeds=%d affected=%d auto_translated=%d langs=%v\n",
result.SeedCount,
result.AffectedCount,
result.AutoTranslated,
result.TargetLangs,
)
}

122
svc/internal/i18n/lang.go Normal file
View File

@@ -0,0 +1,122 @@
package i18n
import "strings"
const DefaultLang = "TR"
var supported = map[string]struct{}{
"TR": {},
"EN": {},
"DE": {},
"IT": {},
"ES": {},
"RU": {},
"AR": {},
}
func NormalizeLangCode(raw string) string {
lang := strings.ToUpper(strings.TrimSpace(raw))
if _, ok := supported[lang]; ok {
return lang
}
return DefaultLang
}
func ResolveLangCode(queryLangCode, acceptLanguage string) string {
if lang := NormalizeLangCode(queryLangCode); lang != DefaultLang || strings.EqualFold(strings.TrimSpace(queryLangCode), DefaultLang) {
return lang
}
header := strings.TrimSpace(acceptLanguage)
if header == "" {
return DefaultLang
}
first := strings.Split(header, ",")[0]
first = strings.TrimSpace(strings.Split(first, ";")[0])
if len(first) < 2 {
return DefaultLang
}
return NormalizeLangCode(first[:2])
}
func T(langCode, key string) string {
for _, lang := range fallbackLangs(langCode) {
if val, ok := dict[lang][key]; ok {
return val
}
}
return key
}
func fallbackLangs(langCode string) []string {
lang := NormalizeLangCode(langCode)
switch lang {
case "TR":
return []string{"TR"}
case "EN":
return []string{"EN", "TR"}
default:
return []string{lang, "EN", "TR"}
}
}
var dict = map[string]map[string]string{
"TR": {
"pdf.report_title": "Cari Hesap Raporu",
"pdf.date": "Tarih",
"pdf.customer": "Cari",
"pdf.date_range": "Tarih Aralığı",
"pdf.page": "Sayfa",
"pdf.ending_balance": "Son Bakiye",
"pdf.currency_prefix": "Para Birimi",
"pdf.balance_prefix": "Bakiye",
"pdf.main.doc_no": "Belge No",
"pdf.main.date": "Tarih",
"pdf.main.due_date": "Vade",
"pdf.main.operation": "İşlem",
"pdf.main.description": "Açıklama",
"pdf.main.currency": "Para",
"pdf.main.debit": "Borç",
"pdf.main.credit": "Alacak",
"pdf.main.balance": "Bakiye",
"pdf.detail.main_group": "Ana Grup",
"pdf.detail.sub_group": "Alt Grup",
"pdf.detail.waiter": "Garson",
"pdf.detail.fit": "Fit",
"pdf.detail.content": "İçerik",
"pdf.detail.product": "Ürün",
"pdf.detail.color": "Renk",
"pdf.detail.qty": "Adet",
"pdf.detail.price": "Fiyat",
"pdf.detail.total": "Tutar",
},
"EN": {
"pdf.report_title": "Customer Account Report",
"pdf.date": "Date",
"pdf.customer": "Customer",
"pdf.date_range": "Date Range",
"pdf.page": "Page",
"pdf.ending_balance": "Ending Balance",
"pdf.currency_prefix": "Currency",
"pdf.balance_prefix": "Balance",
"pdf.main.doc_no": "Document No",
"pdf.main.date": "Date",
"pdf.main.due_date": "Due Date",
"pdf.main.operation": "Operation",
"pdf.main.description": "Description",
"pdf.main.currency": "Curr.",
"pdf.main.debit": "Debit",
"pdf.main.credit": "Credit",
"pdf.main.balance": "Balance",
"pdf.detail.main_group": "Main Group",
"pdf.detail.sub_group": "Sub Group",
"pdf.detail.waiter": "Waiter",
"pdf.detail.fit": "Fit",
"pdf.detail.content": "Content",
"pdf.detail.product": "Product",
"pdf.detail.color": "Color",
"pdf.detail.qty": "Qty",
"pdf.detail.price": "Price",
"pdf.detail.total": "Total",
},
}

View File

@@ -3,6 +3,7 @@ package mailer
import (
"context"
"crypto/tls"
"encoding/base64"
"errors"
"fmt"
"net"
@@ -138,11 +139,13 @@ func (m *Mailer) Send(ctx context.Context, msg Message) error {
}
func buildMIME(from string, to []string, subject, contentType, body string) string {
// Subject UTF-8 basit hali (gerekirse sonra MIME encoded-word ekleriz)
// Encode Subject to UTF-8
encodedSubject := "=?UTF-8?B?" + base64.StdEncoding.EncodeToString([]byte(subject)) + "?="
headers := []string{
"From: " + from,
"To: " + strings.Join(to, ", "),
"Subject: " + subject,
"Subject: " + encodedSubject,
"MIME-Version: 1.0",
"Content-Type: " + contentType,
"",

View File

@@ -104,7 +104,26 @@ func autoRegisterRouteV3(
return
}
// 2) ADMIN AUTO PERMISSION (module+action bazlı)
// 2) MODULE LOOKUP AUTO SEED (permission ekranları için)
moduleLabel := strings.TrimSpace(strings.ReplaceAll(module, "_", " "))
if moduleLabel == "" {
moduleLabel = module
}
_, err = tx.Exec(`
INSERT INTO mk_sys_modules (code, name)
VALUES ($1::text, $2::text)
ON CONFLICT (code) DO UPDATE
SET name = COALESCE(NULLIF(EXCLUDED.name, ''), mk_sys_modules.name)
`,
module,
moduleLabel,
)
if err != nil {
log.Printf("❌ Module seed error (%s %s): %v", method, path, err)
return
}
// 3) ROLE PERMISSION AUTO SEED (admin=true, diğer roller=false)
_, err = tx.Exec(`
INSERT INTO mk_sys_role_permissions
(role_id, module_code, action, allowed)
@@ -112,16 +131,50 @@ func autoRegisterRouteV3(
id,
$1,
$2,
true
CASE
WHEN id = 3 OR LOWER(code) = 'admin' THEN true
ELSE false
END
FROM dfrole
WHERE id = 3 -- ADMIN
ON CONFLICT DO NOTHING
`,
module,
action,
)
if err != nil {
log.Printf("❌ Admin perm seed error (%s %s): %v", method, path, err)
log.Printf("❌ Role perm seed error (%s %s): %v", method, path, err)
return
}
// 4) ROLE+DEPARTMENT PERMISSION AUTO SEED
// Existing role+department kombinasyonlarına yeni module+action satırıılır.
_, err = tx.Exec(`
WITH role_dept_scope AS (
SELECT DISTINCT role_id, department_code
FROM mk_sys_role_department_permissions
UNION
SELECT 3 AS role_id, d.code AS department_code
FROM mk_dprt d
)
INSERT INTO mk_sys_role_department_permissions
(role_id, department_code, module_code, action, allowed)
SELECT
rds.role_id,
rds.department_code,
$1,
$2,
CASE
WHEN rds.role_id = 3 THEN true
ELSE false
END
FROM role_dept_scope rds
ON CONFLICT DO NOTHING
`,
module,
action,
)
if err != nil {
log.Printf("❌ Role+Dept perm seed error (%s %s): %v", method, path, err)
return
}
@@ -265,6 +318,41 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
"system", "update",
wrapV3(routes.SaveMarketMailMappingHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations", "GET",
"language", "update",
wrapV3(routes.GetTranslationRowsHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/{id}", "PUT",
"language", "update",
wrapV3(routes.UpdateTranslationRowHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/upsert-missing", "POST",
"language", "update",
wrapV3(routes.UpsertMissingTranslationsHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/sync-sources", "POST",
"language", "update",
wrapV3(routes.SyncTranslationSourcesHandler(pgDB, mssql)),
)
bindV3(r, pgDB,
"/api/language/translations/translate-selected", "POST",
"language", "update",
wrapV3(routes.TranslateSelectedTranslationsHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/bulk-approve", "POST",
"language", "update",
wrapV3(routes.BulkApproveTranslationsHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/bulk-update", "POST",
"language", "update",
wrapV3(routes.BulkUpdateTranslationsHandler(pgDB)),
)
// ============================================================
// PERMISSIONS
@@ -759,6 +847,11 @@ func main() {
auditlog.Init(pgDB, 1000)
log.Println("🕵️ AuditLog sistemi başlatıldı (buffer=1000)")
// -------------------------------------------------------
// 🚀 TRANSLATION QUERY PERFORMANCE INDEXES
// -------------------------------------------------------
routes.EnsureTranslationPerfIndexes(pgDB)
// -------------------------------------------------------
// ✉️ MAILER INIT
// -------------------------------------------------------
@@ -777,6 +870,7 @@ func main() {
// 🌍 SERVER
// -------------------------------------------------------
router := InitRoutes(pgDB, db.MssqlDB, graphMailer)
startTranslationSyncScheduler(pgDB, db.MssqlDB)
handler := enableCORS(
middlewares.GlobalAuthMiddleware(

16
svc/models/translator.go Normal file
View File

@@ -0,0 +1,16 @@
package models
import "time"
type TranslatorRow struct {
ID int64 `json:"id"`
TKey string `json:"t_key"`
LangCode string `json:"lang_code"`
SourceType string `json:"source_type"`
SourceTextTR string `json:"source_text_tr"`
TranslatedText string `json:"translated_text"`
IsManual bool `json:"is_manual"`
Status string `json:"status"`
Provider string `json:"provider"`
UpdatedAt time.Time `json:"updated_at"`
}

View File

@@ -560,6 +560,8 @@ func UpdateOrderLinesTx(tx *sql.Tx, orderHeaderID string, lines []models.OrderPr
query := fmt.Sprintf(`
SET NOCOUNT ON;
DECLARE @updated TABLE (OrderLineID UNIQUEIDENTIFIER);
;WITH src (OrderLineID, NewItemCode, NewColor, ItemDim1Code, NewDim2, NewDesc, OldDueDate, NewDueDate) AS (
SELECT *
FROM (VALUES %s) AS v (OrderLineID, NewItemCode, NewColor, ItemDim1Code, NewDim2, NewDesc, OldDueDate, NewDueDate)
@@ -574,26 +576,114 @@ SET
l.DeliveryDate = CASE WHEN ISDATE(s.NewDueDate) = 1 THEN CAST(s.NewDueDate AS DATETIME) ELSE l.DeliveryDate END,
l.LastUpdatedUserName = @p%d,
l.LastUpdatedDate = GETDATE()
OUTPUT inserted.OrderLineID INTO @updated(OrderLineID)
FROM dbo.trOrderLine l
JOIN src s
ON CAST(l.OrderLineID AS NVARCHAR(50)) = s.OrderLineID
WHERE l.OrderHeaderID = @p%d;
ON l.OrderLineID = CONVERT(UNIQUEIDENTIFIER, s.OrderLineID)
WHERE l.OrderHeaderID = CONVERT(UNIQUEIDENTIFIER, @p%d);
SELECT COUNT(1) AS UpdatedCount FROM @updated;
`, strings.Join(values, ","), usernameParam, orderHeaderParam)
chunkStart := time.Now()
res, execErr := tx.Exec(query, args...)
var chunkUpdated int64
execErr := tx.QueryRow(query, args...).Scan(&chunkUpdated)
if execErr != nil {
log.Printf("[UpdateOrderLinesTx] ERROR orderHeaderID=%s chunk=%d-%d err=%v", orderHeaderID, i, end, execErr)
return updated, fmt.Errorf("update lines chunk failed chunkStart=%d chunkEnd=%d duration_ms=%d: %w", i, end, time.Since(chunkStart).Milliseconds(), execErr)
}
log.Printf("[UpdateOrderLinesTx] orderHeaderID=%s chunk=%d-%d duration_ms=%d", orderHeaderID, i, end, time.Since(chunkStart).Milliseconds())
if rows, rowsErr := res.RowsAffected(); rowsErr == nil {
updated += rows
}
log.Printf("[UpdateOrderLinesTx] orderHeaderID=%s chunk=%d-%d updated=%d duration_ms=%d", orderHeaderID, i, end, chunkUpdated, time.Since(chunkStart).Milliseconds())
updated += chunkUpdated
}
return updated, nil
}
func VerifyOrderLineUpdatesTx(tx *sql.Tx, orderHeaderID string, lines []models.OrderProductionUpdateLine) (int64, []string, error) {
if len(lines) == 0 {
return 0, nil, nil
}
const chunkSize = 300
var mismatchCount int64
samples := make([]string, 0, 5)
for i := 0; i < len(lines); i += chunkSize {
end := i + chunkSize
if end > len(lines) {
end = len(lines)
}
chunk := lines[i:end]
values := make([]string, 0, len(chunk))
args := make([]any, 0, len(chunk)*4+1)
paramPos := 1
for _, line := range chunk {
values = append(values, fmt.Sprintf("(@p%d,@p%d,@p%d,@p%d)", paramPos, paramPos+1, paramPos+2, paramPos+3))
args = append(args,
strings.TrimSpace(line.OrderLineID),
strings.ToUpper(strings.TrimSpace(line.NewItemCode)),
strings.ToUpper(strings.TrimSpace(line.NewColor)),
strings.ToUpper(strings.TrimSpace(line.NewDim2)),
)
paramPos += 4
}
orderHeaderParam := paramPos
args = append(args, orderHeaderID)
query := fmt.Sprintf(`
SET NOCOUNT ON;
WITH src (OrderLineID, NewItemCode, NewColor, NewDim2) AS (
SELECT *
FROM (VALUES %s) v(OrderLineID, NewItemCode, NewColor, NewDim2)
)
SELECT
s.OrderLineID,
ISNULL(UPPER(LTRIM(RTRIM(l.ItemCode))), '') AS ActualItemCode,
ISNULL(UPPER(LTRIM(RTRIM(l.ColorCode))), '') AS ActualColorCode,
ISNULL(UPPER(LTRIM(RTRIM(l.ItemDim2Code))), '') AS ActualDim2Code,
s.NewItemCode,
s.NewColor,
s.NewDim2
FROM src s
JOIN dbo.trOrderLine l
ON l.OrderLineID = CONVERT(UNIQUEIDENTIFIER, s.OrderLineID)
WHERE l.OrderHeaderID = CONVERT(UNIQUEIDENTIFIER, @p%d)
AND (
ISNULL(UPPER(LTRIM(RTRIM(l.ItemCode))), '') <> s.NewItemCode OR
ISNULL(UPPER(LTRIM(RTRIM(l.ColorCode))), '') <> s.NewColor OR
ISNULL(UPPER(LTRIM(RTRIM(l.ItemDim2Code))), '') <> s.NewDim2
);
`, strings.Join(values, ","), orderHeaderParam)
rows, err := tx.Query(query, args...)
if err != nil {
return mismatchCount, samples, err
}
for rows.Next() {
var lineID, actualItem, actualColor, actualDim2, expectedItem, expectedColor, expectedDim2 string
if err := rows.Scan(&lineID, &actualItem, &actualColor, &actualDim2, &expectedItem, &expectedColor, &expectedDim2); err != nil {
rows.Close()
return mismatchCount, samples, err
}
mismatchCount++
if len(samples) < 5 {
samples = append(samples, fmt.Sprintf(
"lineID=%s expected=(%s,%s,%s) actual=(%s,%s,%s)",
lineID, expectedItem, expectedColor, expectedDim2, actualItem, actualColor, actualDim2,
))
}
}
if err := rows.Err(); err != nil {
rows.Close()
return mismatchCount, samples, err
}
rows.Close()
}
return mismatchCount, samples, nil
}
func UpdateOrderHeaderAverageDueDateTx(tx *sql.Tx, orderHeaderID string, averageDueDate *string, username string) error {
if averageDueDate == nil {
return nil
@@ -617,6 +707,24 @@ WHERE OrderHeaderID = @p3;
return err
}
func TouchOrderHeaderTx(tx *sql.Tx, orderHeaderID string, username string) (int64, error) {
res, err := tx.Exec(`
UPDATE dbo.trOrderHeader
SET
LastUpdatedUserName = @p1,
LastUpdatedDate = GETDATE()
WHERE OrderHeaderID = @p2;
`, username, orderHeaderID)
if err != nil {
return 0, err
}
rows, rowsErr := res.RowsAffected()
if rowsErr != nil {
return 0, nil
}
return rows, nil
}
type sqlQueryRower interface {
QueryRow(query string, args ...any) *sql.Row
}
@@ -664,8 +772,19 @@ WHERE Barcode = @p1
return true, nil
}
func existingVariantBarcode(q sqlQueryRower, barcodeTypeCode string, itemTypeCode int16, itemCode string, colorCode string, dim1 string, dim2 string, dim3 string) (string, bool, error) {
func existingVariantBarcode(
q sqlQueryRower,
barcodeTypeCode string,
itemTypeCode int16,
itemCode string,
colorCode string,
dim1 string,
dim2 string,
dim3 string,
) (string, bool, error) {
var barcode string
err := q.QueryRow(`
SELECT TOP 1 LTRIM(RTRIM(ISNULL(Barcode, '')))
FROM dbo.prItemBarcode WITH (UPDLOCK, HOLDLOCK)
@@ -677,23 +796,48 @@ WHERE BarcodeTypeCode = @p1
AND ISNULL(LTRIM(RTRIM(ItemDim2Code)), '') = @p6
AND ISNULL(LTRIM(RTRIM(ItemDim3Code)), '') = @p7
AND ISNULL(LTRIM(RTRIM(UnitOfMeasureCode)), '') = 'AD'
ORDER BY TRY_CONVERT(BIGINT, NULLIF(LTRIM(RTRIM(Barcode)), '')) DESC, Barcode DESC
`, strings.TrimSpace(barcodeTypeCode), itemTypeCode, strings.TrimSpace(itemCode), strings.TrimSpace(colorCode), strings.TrimSpace(dim1), strings.TrimSpace(dim2), strings.TrimSpace(dim3)).Scan(&barcode)
ORDER BY
CASE
WHEN ISNUMERIC(Barcode) = 1
THEN CAST(Barcode AS BIGINT)
ELSE 0
END DESC,
Barcode DESC
`,
strings.TrimSpace(barcodeTypeCode),
itemTypeCode,
strings.TrimSpace(itemCode),
strings.TrimSpace(colorCode),
strings.TrimSpace(dim1),
strings.TrimSpace(dim2),
strings.TrimSpace(dim3),
).Scan(&barcode)
if err == sql.ErrNoRows {
return "", false, nil
}
if err != nil {
return "", false, err
}
return strings.TrimSpace(barcode), true, nil
}
func maxNumericBarcode(q sqlQueryRower) (int64, error) {
var maxBarcode int64
err := q.QueryRow(`
SELECT ISNULL(MAX(TRY_CONVERT(BIGINT, NULLIF(LTRIM(RTRIM(Barcode)), ''))), 0)
SELECT ISNULL(MAX(
CASE
WHEN ISNUMERIC(Barcode) = 1
THEN CAST(Barcode AS BIGINT)
ELSE NULL
END
), 0)
FROM dbo.prItemBarcode WITH (UPDLOCK, HOLDLOCK)
`).Scan(&maxBarcode)
return maxBarcode, err
}
@@ -784,15 +928,17 @@ func ValidateProductionBarcodePlan(q sqlQueryRower, variants []models.OrderProdu
return validations, nil
}
func UpsertItemBarcodesTx(tx *sql.Tx, orderHeaderID string, lines []models.OrderProductionUpdateLine, username string) (int64, error) {
func InsertItemBarcodesTx(tx *sql.Tx, orderHeaderID string, lines []models.OrderProductionUpdateLine, username string) (int64, error) {
start := time.Now()
if len(lines) == 0 {
log.Printf("[UpsertItemBarcodesTx] lines=0 inserted=0 duration_ms=0")
log.Printf("[InsertItemBarcodesTx] lines=0 inserted=0 duration_ms=0")
return 0, nil
}
lineIDs := make([]string, 0, len(lines))
seen := make(map[string]struct{}, len(lines))
for _, line := range lines {
lineID := strings.TrimSpace(line.OrderLineID)
if lineID == "" {
@@ -804,78 +950,19 @@ func UpsertItemBarcodesTx(tx *sql.Tx, orderHeaderID string, lines []models.Order
seen[lineID] = struct{}{}
lineIDs = append(lineIDs, lineID)
}
if len(lineIDs) == 0 {
log.Printf("[UpsertItemBarcodesTx] lines=%d uniqueLineIDs=0 inserted=0 duration_ms=%d", len(lines), time.Since(start).Milliseconds())
log.Printf("[InsertItemBarcodesTx] uniqueLineIDs=0 inserted=0")
return 0, nil
}
const chunkSize = 900
var inserted int64
for i := 0; i < len(lineIDs); i += chunkSize {
end := i + chunkSize
if end > len(lineIDs) {
end = len(lineIDs)
}
chunk := lineIDs[i:end]
values := make([]string, 0, len(chunk))
args := make([]any, 0, len(chunk)+2)
paramPos := 1
for _, lineID := range chunk {
values = append(values, fmt.Sprintf("(@p%d)", paramPos))
args = append(args, lineID)
paramPos++
}
orderHeaderParam := paramPos
usernameParam := paramPos + 1
args = append(args, orderHeaderID, username)
query := fmt.Sprintf(`
singleLineQuery := `
SET NOCOUNT ON;
;WITH srcLine (OrderLineID) AS (
SELECT *
FROM (VALUES %s) AS v (OrderLineID)
),
src AS (
SELECT DISTINCT
l.ItemTypeCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemCode, '')))) AS ItemCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ColorCode, '')))) AS ColorCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim1Code, '')))) AS ItemDim1Code,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim2Code, '')))) AS ItemDim2Code,
CAST('' AS NVARCHAR(50)) AS ItemDim3Code
FROM dbo.trOrderLine l WITH (UPDLOCK, HOLDLOCK)
JOIN srcLine s
ON CAST(l.OrderLineID AS NVARCHAR(50)) = s.OrderLineID
WHERE l.OrderHeaderID = @p%d
AND NULLIF(LTRIM(RTRIM(ISNULL(l.ItemCode, ''))), '') IS NOT NULL
),
missing AS (
SELECT
s.ItemTypeCode,
s.ItemCode,
s.ColorCode,
s.ItemDim1Code,
s.ItemDim2Code,
s.ItemDim3Code,
ROW_NUMBER() OVER (
ORDER BY s.ItemCode, s.ColorCode, s.ItemDim1Code, s.ItemDim2Code, s.ItemDim3Code
) AS RowNo
FROM src s
LEFT JOIN dbo.prItemBarcode b WITH (UPDLOCK, HOLDLOCK)
ON UPPER(LTRIM(RTRIM(ISNULL(b.BarcodeTypeCode, '')))) = 'BAGGI3'
AND b.ItemTypeCode = s.ItemTypeCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemCode, '')))) = s.ItemCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ColorCode, '')))) = s.ColorCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim1Code, '')))) = s.ItemDim1Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim2Code, '')))) = s.ItemDim2Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim3Code, '')))) = s.ItemDim3Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.UnitOfMeasureCode, '')))) = 'AD'
WHERE b.Barcode IS NULL
)
INSERT INTO dbo.prItemBarcode (
INSERT INTO dbo.prItemBarcode
(
Barcode,
BarcodeTypeCode,
ItemTypeCode,
@@ -893,14 +980,146 @@ INSERT INTO dbo.prItemBarcode (
RowGuid
)
SELECT
CAST(seed.MaxBarcode + m.RowNo AS NVARCHAR(50)) AS Barcode,
CAST(seed.MaxBarcode + 1 AS NVARCHAR(50)),
'BAGGI3',
m.ItemTypeCode,
m.ItemCode,
m.ColorCode,
m.ItemDim1Code,
m.ItemDim2Code,
m.ItemDim3Code,
src.ItemTypeCode,
src.ItemCode,
src.ColorCode,
src.ItemDim1Code,
src.ItemDim2Code,
src.ItemDim3Code,
'AD',
1,
@p3,
GETDATE(),
@p3,
GETDATE(),
NEWID()
FROM (
SELECT DISTINCT
l.ItemTypeCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemCode,'')))) AS ItemCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ColorCode,'')))) AS ColorCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim1Code,'')))) AS ItemDim1Code,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim2Code,'')))) AS ItemDim2Code,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim3Code,'')))) AS ItemDim3Code
FROM dbo.trOrderLine l
WHERE l.OrderHeaderID = @p2
AND CAST(l.OrderLineID AS NVARCHAR(50)) = @p1
AND NULLIF(LTRIM(RTRIM(ISNULL(l.ItemCode,''))), '') IS NOT NULL
) src
CROSS JOIN (
SELECT
CASE
WHEN ISNULL(MAX(
CASE
WHEN LTRIM(RTRIM(ISNULL(Barcode,''))) NOT LIKE '%%[^0-9]%%'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(Barcode,''))) AS BIGINT)
ELSE NULL
END
), 0) < 36999999
THEN 36999999
ELSE ISNULL(MAX(
CASE
WHEN LTRIM(RTRIM(ISNULL(Barcode,''))) NOT LIKE '%%[^0-9]%%'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(Barcode,''))) AS BIGINT)
ELSE NULL
END
), 0)
END AS MaxBarcode
FROM dbo.prItemBarcode
WHERE BarcodeTypeCode = 'BAGGI3'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) <= 8
) seed
WHERE NOT EXISTS (
SELECT 1
FROM dbo.prItemBarcode b
WHERE b.ItemTypeCode = src.ItemTypeCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemCode,'')))) = src.ItemCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ColorCode,'')))) = src.ColorCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim1Code,'')))) = src.ItemDim1Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim2Code,'')))) = src.ItemDim2Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim3Code,'')))) = src.ItemDim3Code
);
`
execSingle := func(globalIndex int, lineID string) error {
lineStart := time.Now()
res, err := tx.Exec(singleLineQuery, lineID, orderHeaderID, username)
if err != nil {
if isDuplicateBarcodeInsertErr(err) {
log.Printf("[InsertItemBarcodesTx] skip duplicate lineIndex=%d lineID=%s err=%v", globalIndex, lineID, err)
return nil
}
return fmt.Errorf("upsert item barcodes chunk failed chunkStart=%d chunkEnd=%d duration_ms=%d: %w", globalIndex, globalIndex+1, time.Since(lineStart).Milliseconds(), err)
}
rows, _ := res.RowsAffected()
inserted += rows
log.Printf(
"[InsertItemBarcodesTx] lineIndex=%d lineID=%s inserted=%d cumulative=%d duration_ms=%d",
globalIndex,
lineID,
rows,
inserted,
time.Since(lineStart).Milliseconds(),
)
return nil
}
const chunkSize = 200
for i := 0; i < len(lineIDs); i += chunkSize {
end := i + chunkSize
if end > len(lineIDs) {
end = len(lineIDs)
}
chunk := lineIDs[i:end]
values := make([]string, 0, len(chunk))
args := make([]any, 0, len(chunk)+2)
paramPos := 1
for _, lineID := range chunk {
values = append(values, fmt.Sprintf("(@p%d)", paramPos))
args = append(args, lineID)
paramPos++
}
orderHeaderParam := paramPos
usernameParam := paramPos + 1
args = append(args, orderHeaderID, username)
batchQuery := fmt.Sprintf(`
SET NOCOUNT ON;
INSERT INTO dbo.prItemBarcode
(
Barcode,
BarcodeTypeCode,
ItemTypeCode,
ItemCode,
ColorCode,
ItemDim1Code,
ItemDim2Code,
ItemDim3Code,
UnitOfMeasureCode,
Qty,
CreatedUserName,
CreatedDate,
LastUpdatedUserName,
LastUpdatedDate,
RowGuid
)
SELECT
CAST(seed.MaxBarcode + ROW_NUMBER() OVER (
ORDER BY src.ItemTypeCode, src.ItemCode, src.ColorCode, src.ItemDim1Code, src.ItemDim2Code, src.ItemDim3Code
) AS NVARCHAR(50)),
'BAGGI3',
src.ItemTypeCode,
src.ItemCode,
src.ColorCode,
src.ItemDim1Code,
src.ItemDim2Code,
src.ItemDim3Code,
'AD',
1,
@p%d,
@@ -908,30 +1127,376 @@ SELECT
@p%d,
GETDATE(),
NEWID()
FROM missing m
FROM (
SELECT DISTINCT
l.ItemTypeCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemCode,'')))) AS ItemCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ColorCode,'')))) AS ColorCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim1Code,'')))) AS ItemDim1Code,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim2Code,'')))) AS ItemDim2Code,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim3Code,'')))) AS ItemDim3Code
FROM dbo.trOrderLine l
JOIN (VALUES %s) ids(OrderLineID)
ON CAST(l.OrderLineID AS NVARCHAR(50)) = ids.OrderLineID
WHERE l.OrderHeaderID = @p%d
AND NULLIF(LTRIM(RTRIM(ISNULL(l.ItemCode,''))), '') IS NOT NULL
) src
CROSS JOIN (
SELECT ISNULL(MAX(TRY_CONVERT(BIGINT, NULLIF(LTRIM(RTRIM(Barcode)), ''))), 0) AS MaxBarcode
FROM dbo.prItemBarcode WITH (UPDLOCK, HOLDLOCK)
) seed;
SELECT @@ROWCOUNT AS Inserted;
`, strings.Join(values, ","), orderHeaderParam, usernameParam, usernameParam)
SELECT
CASE
WHEN ISNULL(MAX(
CASE
WHEN LTRIM(RTRIM(ISNULL(Barcode,''))) NOT LIKE '%%[^0-9]%%'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(Barcode,''))) AS BIGINT)
ELSE NULL
END
), 0) < 36999999
THEN 36999999
ELSE ISNULL(MAX(
CASE
WHEN LTRIM(RTRIM(ISNULL(Barcode,''))) NOT LIKE '%%[^0-9]%%'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(Barcode,''))) AS BIGINT)
ELSE NULL
END
), 0)
END AS MaxBarcode
FROM dbo.prItemBarcode
WHERE BarcodeTypeCode = 'BAGGI3'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) <= 8
) seed
WHERE NOT EXISTS (
SELECT 1
FROM dbo.prItemBarcode b
WHERE b.ItemTypeCode = src.ItemTypeCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemCode,'')))) = src.ItemCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ColorCode,'')))) = src.ColorCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim1Code,'')))) = src.ItemDim1Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim2Code,'')))) = src.ItemDim2Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim3Code,'')))) = src.ItemDim3Code
);
`, usernameParam, usernameParam, strings.Join(values, ","), orderHeaderParam)
chunkStart := time.Now()
var chunkInserted int64
if err := tx.QueryRow(query, args...).Scan(&chunkInserted); err != nil {
return inserted, fmt.Errorf("upsert item barcodes chunk failed chunkStart=%d chunkEnd=%d duration_ms=%d: %w", i, end, time.Since(chunkStart).Milliseconds(), err)
res, err := tx.Exec(batchQuery, args...)
if err == nil {
rows, _ := res.RowsAffected()
inserted += rows
log.Printf(
"[InsertItemBarcodesTx] batch=%d-%d inserted=%d cumulative=%d duration_ms=%d",
i,
end,
rows,
inserted,
time.Since(chunkStart).Milliseconds(),
)
continue
}
log.Printf("[InsertItemBarcodesTx] batch fallback=%d-%d err=%v", i, end, err)
for j, lineID := range chunk {
if lineErr := execSingle(i+j, lineID); lineErr != nil {
log.Printf("[InsertItemBarcodesTx] ERROR lineIndex=%d lineID=%s err=%v", i+j, lineID, lineErr)
return inserted, lineErr
}
}
inserted += chunkInserted
log.Printf("[UpsertItemBarcodesTx] orderHeaderID=%s chunk=%d-%d chunkInserted=%d cumulative=%d duration_ms=%d",
orderHeaderID, i, end, chunkInserted, inserted, time.Since(chunkStart).Milliseconds())
}
log.Printf("[UpsertItemBarcodesTx] orderHeaderID=%s lines=%d uniqueLineIDs=%d inserted=%d duration_ms=%d",
orderHeaderID, len(lines), len(lineIDs), inserted, time.Since(start).Milliseconds())
log.Printf(
"[InsertItemBarcodesTx] lines=%d unique=%d inserted=%d duration_ms=%d",
len(lines),
len(lineIDs),
inserted,
time.Since(start).Milliseconds(),
)
return inserted, nil
}
func InsertItemBarcodesByTargetsTx(tx *sql.Tx, targets []models.OrderProductionMissingVariant, username string) (int64, error) {
start := time.Now()
if len(targets) == 0 {
log.Printf("[InsertItemBarcodesByTargetsTx] targets=0 inserted=0 duration_ms=0")
return 0, nil
}
uniqueTargets := make([]models.OrderProductionMissingVariant, 0, len(targets))
seen := make(map[string]struct{}, len(targets))
for _, t := range targets {
itemCode := strings.ToUpper(strings.TrimSpace(t.ItemCode))
if itemCode == "" {
continue
}
key := fmt.Sprintf("%d|%s|%s|%s|%s|%s",
t.ItemTypeCode,
itemCode,
strings.ToUpper(strings.TrimSpace(t.ColorCode)),
strings.ToUpper(strings.TrimSpace(t.ItemDim1Code)),
strings.ToUpper(strings.TrimSpace(t.ItemDim2Code)),
strings.ToUpper(strings.TrimSpace(t.ItemDim3Code)),
)
if _, ok := seen[key]; ok {
continue
}
seen[key] = struct{}{}
t.ItemCode = itemCode
t.ColorCode = strings.ToUpper(strings.TrimSpace(t.ColorCode))
t.ItemDim1Code = strings.ToUpper(strings.TrimSpace(t.ItemDim1Code))
t.ItemDim2Code = strings.ToUpper(strings.TrimSpace(t.ItemDim2Code))
t.ItemDim3Code = strings.ToUpper(strings.TrimSpace(t.ItemDim3Code))
uniqueTargets = append(uniqueTargets, t)
}
if len(uniqueTargets) == 0 {
log.Printf("[InsertItemBarcodesByTargetsTx] targets=%d unique=0 inserted=0 duration_ms=%d", len(targets), time.Since(start).Milliseconds())
return 0, nil
}
if err := ensureTxStillActive(tx, "InsertItemBarcodesByTargetsTx/start"); err != nil {
return 0, err
}
// Barcode seed'i hem prItemBarcode hem de (varsa) tbStokBarkodu uzerinden
// kilitli okuyarak hesapla; trigger tarafindaki duplicate riskini azalt.
var maxBarcode int64
maxPrQuery := `
SELECT ISNULL(MAX(v.BarcodeNum), 0)
FROM (
SELECT
CASE
WHEN LTRIM(RTRIM(ISNULL(pb.Barcode,''))) NOT LIKE '%[^0-9]%'
AND LEN(LTRIM(RTRIM(ISNULL(pb.Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(pb.Barcode,''))) AS BIGINT)
ELSE NULL
END AS BarcodeNum
FROM dbo.prItemBarcode pb WITH (UPDLOCK, HOLDLOCK, TABLOCKX)
WHERE pb.BarcodeTypeCode = 'BAGGI3'
) v
WHERE v.BarcodeNum IS NOT NULL;
`
if err := tx.QueryRow(maxPrQuery).Scan(&maxBarcode); err != nil {
return 0, fmt.Errorf("barcode seed query failed: %w", err)
}
var hasTb int
if err := tx.QueryRow(`SELECT CASE WHEN OBJECT_ID(N'dbo.tbStokBarkodu', N'U') IS NULL THEN 0 ELSE 1 END`).Scan(&hasTb); err != nil {
return 0, fmt.Errorf("barcode seed object check failed: %w", err)
}
if hasTb == 1 {
var maxTb int64
maxTbQuery := `
SELECT ISNULL(MAX(v.BarcodeNum), 0)
FROM (
SELECT
CASE
WHEN LTRIM(RTRIM(ISNULL(sb.Barcode,''))) NOT LIKE '%[^0-9]%'
AND LEN(LTRIM(RTRIM(ISNULL(sb.Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(sb.Barcode,''))) AS BIGINT)
ELSE NULL
END AS BarcodeNum
FROM dbo.tbStokBarkodu sb WITH (UPDLOCK, HOLDLOCK, TABLOCKX)
) v
WHERE v.BarcodeNum IS NOT NULL;
`
if err := tx.QueryRow(maxTbQuery).Scan(&maxTb); err != nil {
return 0, fmt.Errorf("barcode seed tbStokBarkodu query failed: %w", err)
}
if maxTb > maxBarcode {
maxBarcode = maxTb
}
}
if maxBarcode < 36999999 {
maxBarcode = 36999999
}
existsBarcodeQuery := `
SELECT CASE WHEN EXISTS (
SELECT 1
FROM dbo.prItemBarcode pb WITH (UPDLOCK, HOLDLOCK)
WHERE LTRIM(RTRIM(ISNULL(pb.Barcode,''))) = @p1
) THEN 1 ELSE 0 END;
`
existsBarcodeWithTbQuery := `
SELECT CASE WHEN EXISTS (
SELECT 1
FROM dbo.prItemBarcode pb WITH (UPDLOCK, HOLDLOCK)
WHERE LTRIM(RTRIM(ISNULL(pb.Barcode,''))) = @p1
) OR EXISTS (
SELECT 1
FROM dbo.tbStokBarkodu sb WITH (UPDLOCK, HOLDLOCK)
WHERE LTRIM(RTRIM(ISNULL(sb.Barcode,''))) = @p1
) THEN 1 ELSE 0 END;
`
hasVariantBarcodeQuery := `
SELECT CASE WHEN EXISTS (
SELECT 1
FROM dbo.prItemBarcode b WITH (UPDLOCK, HOLDLOCK)
WHERE b.ItemTypeCode = @p1
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemCode,'')))) = @p2
AND UPPER(LTRIM(RTRIM(ISNULL(b.ColorCode,'')))) = @p3
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim1Code,'')))) = @p4
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim2Code,'')))) = @p5
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim3Code,'')))) = @p6
) THEN 1 ELSE 0 END;
`
insertOneQuery := `
INSERT INTO dbo.prItemBarcode
(
Barcode,
BarcodeTypeCode,
ItemTypeCode,
ItemCode,
ColorCode,
ItemDim1Code,
ItemDim2Code,
ItemDim3Code,
UnitOfMeasureCode,
Qty,
CreatedUserName,
CreatedDate,
LastUpdatedUserName,
LastUpdatedDate,
RowGuid
)
SELECT
@p1,
'BAGGI3',
@p2,
@p3,
@p4,
@p5,
@p6,
@p7,
'AD',
1,
@p8,
GETDATE(),
@p8,
GETDATE(),
NEWID()
WHERE NOT EXISTS (
SELECT 1
FROM dbo.prItemBarcode b
WHERE b.ItemTypeCode = @p2
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemCode,'')))) = @p3
AND UPPER(LTRIM(RTRIM(ISNULL(b.ColorCode,'')))) = @p4
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim1Code,'')))) = @p5
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim2Code,'')))) = @p6
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim3Code,'')))) = @p7
);
`
var inserted int64
for _, t := range uniqueTargets {
if err := ensureTxStillActive(tx, "InsertItemBarcodesByTargetsTx/before_target"); err != nil {
return inserted, err
}
var hasVariant int
if err := tx.QueryRow(
hasVariantBarcodeQuery,
t.ItemTypeCode,
t.ItemCode,
t.ColorCode,
t.ItemDim1Code,
t.ItemDim2Code,
t.ItemDim3Code,
).Scan(&hasVariant); err != nil {
return inserted, fmt.Errorf("variant barcode exists check failed: %w", err)
}
if hasVariant == 1 {
continue
}
retry := 0
for {
retry++
if retry > 2000 {
return inserted, fmt.Errorf("barcode allocation exceeded retry limit item=%s color=%s dim1=%s", t.ItemCode, t.ColorCode, t.ItemDim1Code)
}
candidateNum := maxBarcode + 1
candidate := strconv.FormatInt(candidateNum, 10)
var exists int
if hasTb == 1 {
if err := tx.QueryRow(existsBarcodeWithTbQuery, candidate).Scan(&exists); err != nil {
return inserted, fmt.Errorf("barcode exists check(tb) failed: %w", err)
}
} else {
if err := tx.QueryRow(existsBarcodeQuery, candidate).Scan(&exists); err != nil {
return inserted, fmt.Errorf("barcode exists check failed: %w", err)
}
}
if exists == 1 {
maxBarcode = candidateNum
continue
}
res, err := tx.Exec(
insertOneQuery,
candidate,
t.ItemTypeCode,
t.ItemCode,
t.ColorCode,
t.ItemDim1Code,
t.ItemDim2Code,
t.ItemDim3Code,
username,
)
if err != nil {
if isDuplicateBarcodeInsertErr(err) {
maxBarcode = candidateNum
continue
}
return inserted, fmt.Errorf("insert item barcode failed item=%s color=%s dim1=%s duration_ms=%d: %w",
t.ItemCode, t.ColorCode, t.ItemDim1Code, time.Since(start).Milliseconds(), err)
}
affected, _ := res.RowsAffected()
if affected > 0 {
inserted += affected
maxBarcode = candidateNum
}
break
}
}
if txErr := ensureTxStillActive(tx, "InsertItemBarcodesByTargetsTx/after_batch"); txErr != nil {
return inserted, txErr
}
log.Printf("[InsertItemBarcodesByTargetsTx] targets=%d unique=%d inserted=%d duration_ms=%d",
len(targets), len(uniqueTargets), inserted, time.Since(start).Milliseconds())
return inserted, nil
}
func ensureTxStillActive(tx *sql.Tx, where string) error {
if tx == nil {
return fmt.Errorf("tx is nil at %s", where)
}
var tranCount int
if err := tx.QueryRow(`SELECT @@TRANCOUNT`).Scan(&tranCount); err != nil {
return fmt.Errorf("tx state query failed at %s: %w", where, err)
}
if tranCount <= 0 {
return fmt.Errorf("tx closed unexpectedly at %s (trancount=%d)", where, tranCount)
}
return nil
}
func isDuplicateBarcodeInsertErr(err error) bool {
if err == nil {
return false
}
msg := strings.ToLower(err.Error())
if !strings.Contains(msg, "duplicate key") {
return false
}
if strings.Contains(msg, "tbstokbarkodu") {
return true
}
if strings.Contains(msg, "pritembarcode") {
return true
}
return strings.Contains(msg, "unique")
}
func UpsertItemAttributesTx(tx *sql.Tx, attrs []models.OrderProductionItemAttributeRow, username string) (int64, error) {
start := time.Now()
if len(attrs) == 0 {
@@ -939,6 +1504,28 @@ func UpsertItemAttributesTx(tx *sql.Tx, attrs []models.OrderProductionItemAttrib
return 0, nil
}
// FK_prItemAttribute_ItemCode hatasini engellemek icin, attribute yazmadan once
// ilgili item kodlarinin cdItem tarafinda varligini transaction icinde garanti et.
seenCodes := make(map[string]struct{}, len(attrs))
for _, a := range attrs {
itemTypeCode := a.ItemTypeCode
if itemTypeCode <= 0 {
itemTypeCode = 1
}
itemCode := strings.ToUpper(strings.TrimSpace(a.ItemCode))
if itemCode == "" {
continue
}
key := NormalizeCdItemMapKey(int16(itemTypeCode), itemCode)
if _, ok := seenCodes[key]; ok {
continue
}
seenCodes[key] = struct{}{}
if err := ensureCdItemTx(tx, int16(itemTypeCode), itemCode, username, nil); err != nil {
return 0, fmt.Errorf("ensure cdItem before item attributes failed itemCode=%s: %w", itemCode, err)
}
}
// SQL Server parameter limiti (2100) nedeniyle batch'li set-based upsert kullanilir.
const chunkSize = 400 // 400 * 4 param + 1 username = 1601
var affected int64
@@ -961,11 +1548,7 @@ func UpsertItemAttributesTx(tx *sql.Tx, attrs []models.OrderProductionItemAttrib
args = append(args, username)
query := fmt.Sprintf(`
SET NOCOUNT ON;
DECLARE @updated INT = 0;
DECLARE @inserted INT = 0;
;WITH src (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode) AS (
WITH src (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode) AS (
SELECT *
FROM (VALUES %s) AS v (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode)
)
@@ -979,9 +1562,8 @@ JOIN src
ON src.ItemTypeCode = tgt.ItemTypeCode
AND src.ItemCode = tgt.ItemCode
AND src.AttributeTypeCode = tgt.AttributeTypeCode;
SET @updated = @@ROWCOUNT;
;WITH src (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode) AS (
WITH src (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode) AS (
SELECT *
FROM (VALUES %s) AS v (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode)
)
@@ -1012,18 +1594,18 @@ LEFT JOIN dbo.prItemAttribute tgt
AND src.ItemCode = tgt.ItemCode
AND src.AttributeTypeCode = tgt.AttributeTypeCode
WHERE tgt.ItemCode IS NULL;
SET @inserted = @@ROWCOUNT;
SELECT (@updated + @inserted) AS Affected;
`, strings.Join(values, ","), usernameParam, strings.Join(values, ","), usernameParam, usernameParam)
var chunkAffected int64
if err := tx.QueryRow(query, args...).Scan(&chunkAffected); err != nil {
return affected, err
chunkStart := time.Now()
res, err := tx.Exec(query, args...)
if err != nil {
log.Printf("[UpsertItemAttributesTx] ERROR chunk=%d-%d err=%v", i, end, err)
return affected, fmt.Errorf("upsert item attributes chunk failed chunkStart=%d chunkEnd=%d duration_ms=%d: %w", i, end, time.Since(chunkStart).Milliseconds(), err)
}
chunkAffected, _ := res.RowsAffected()
affected += chunkAffected
log.Printf("[UpsertItemAttributesTx] chunk=%d-%d chunkAffected=%d cumulative=%d",
i, end, chunkAffected, affected)
log.Printf("[UpsertItemAttributesTx] chunk=%d-%d chunkAffected=%d cumulative=%d duration_ms=%d",
i, end, chunkAffected, affected, time.Since(chunkStart).Milliseconds())
}
log.Printf("[UpsertItemAttributesTx] attrs=%d affected=%d duration_ms=%d",
len(attrs), affected, time.Since(start).Milliseconds())

View File

@@ -3,178 +3,72 @@ package queries
import (
"bssapp-backend/db"
"bssapp-backend/models"
"context"
"database/sql"
"fmt"
"strconv"
"strings"
"time"
)
func GetProductPricingList() ([]models.ProductPricing, error) {
rows, err := db.MssqlDB.Query(`
WITH base_products AS (
SELECT
LTRIM(RTRIM(ProductCode)) AS ProductCode,
COALESCE(LTRIM(RTRIM(ProductAtt45Desc)), '') AS AskiliYan,
COALESCE(LTRIM(RTRIM(ProductAtt44Desc)), '') AS Kategori,
COALESCE(LTRIM(RTRIM(ProductAtt42Desc)), '') AS UrunIlkGrubu,
COALESCE(LTRIM(RTRIM(ProductAtt01Desc)), '') AS UrunAnaGrubu,
COALESCE(LTRIM(RTRIM(ProductAtt02Desc)), '') AS UrunAltGrubu,
COALESCE(LTRIM(RTRIM(ProductAtt41Desc)), '') AS Icerik,
COALESCE(LTRIM(RTRIM(ProductAtt29Desc)), '') AS Karisim,
COALESCE(LTRIM(RTRIM(ProductAtt10Desc)), '') AS Marka
FROM ProductFilterWithDescription('TR')
WHERE ProductAtt42 IN ('SERI', 'AKSESUAR')
AND IsBlocked = 0
AND LEN(LTRIM(RTRIM(ProductCode))) = 13
),
latest_base_price AS (
SELECT
LTRIM(RTRIM(b.ItemCode)) AS ItemCode,
CAST(b.Price AS DECIMAL(18, 2)) AS CostPrice,
CONVERT(VARCHAR(10), b.PriceDate, 23) AS LastPricingDate,
ROW_NUMBER() OVER (
PARTITION BY LTRIM(RTRIM(b.ItemCode))
ORDER BY b.PriceDate DESC, b.LastUpdatedDate DESC
) AS rn
FROM prItemBasePrice b
WHERE b.ItemTypeCode = 1
AND b.BasePriceCode = 1
AND LTRIM(RTRIM(b.CurrencyCode)) = 'USD'
AND EXISTS (
SELECT 1
FROM base_products bp
WHERE bp.ProductCode = LTRIM(RTRIM(b.ItemCode))
)
),
stock_entry_dates AS (
SELECT
LTRIM(RTRIM(s.ItemCode)) AS ItemCode,
CONVERT(VARCHAR(10), MAX(s.OperationDate), 23) AS StockEntryDate
FROM trStock s WITH(NOLOCK)
WHERE s.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(s.ItemCode))) = 13
AND s.In_Qty1 > 0
AND LTRIM(RTRIM(s.WarehouseCode)) IN (
'1-0-14','1-0-10','1-0-8','1-2-5','1-2-4','1-0-12','100','1-0-28',
'1-0-24','1-2-6','1-1-14','1-0-2','1-0-52','1-1-2','1-0-21','1-1-3',
'1-0-33','101','1-014','1-0-49','1-0-36'
)
AND EXISTS (
SELECT 1
FROM base_products bp
WHERE bp.ProductCode = LTRIM(RTRIM(s.ItemCode))
)
GROUP BY LTRIM(RTRIM(s.ItemCode))
),
stock_base AS (
SELECT
LTRIM(RTRIM(s.ItemCode)) AS ItemCode,
SUM(s.In_Qty1 - s.Out_Qty1) AS InventoryQty1
FROM trStock s WITH(NOLOCK)
WHERE s.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(s.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM base_products bp
WHERE bp.ProductCode = LTRIM(RTRIM(s.ItemCode))
)
GROUP BY LTRIM(RTRIM(s.ItemCode))
),
pick_base AS (
SELECT
LTRIM(RTRIM(p.ItemCode)) AS ItemCode,
SUM(p.Qty1) AS PickingQty1
FROM PickingStates p
WHERE p.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(p.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM base_products bp
WHERE bp.ProductCode = LTRIM(RTRIM(p.ItemCode))
)
GROUP BY LTRIM(RTRIM(p.ItemCode))
),
reserve_base AS (
SELECT
LTRIM(RTRIM(r.ItemCode)) AS ItemCode,
SUM(r.Qty1) AS ReserveQty1
FROM ReserveStates r
WHERE r.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(r.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM base_products bp
WHERE bp.ProductCode = LTRIM(RTRIM(r.ItemCode))
)
GROUP BY LTRIM(RTRIM(r.ItemCode))
),
disp_base AS (
SELECT
LTRIM(RTRIM(d.ItemCode)) AS ItemCode,
SUM(d.Qty1) AS DispOrderQty1
FROM DispOrderStates d
WHERE d.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(d.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM base_products bp
WHERE bp.ProductCode = LTRIM(RTRIM(d.ItemCode))
)
GROUP BY LTRIM(RTRIM(d.ItemCode))
),
stock_totals AS (
SELECT
bp.ProductCode AS ItemCode,
CAST(ROUND(
ISNULL(sb.InventoryQty1, 0)
- ISNULL(pb.PickingQty1, 0)
- ISNULL(rb.ReserveQty1, 0)
- ISNULL(db.DispOrderQty1, 0)
, 2) AS DECIMAL(18, 2)) AS StockQty
FROM base_products bp
LEFT JOIN stock_base sb
ON sb.ItemCode = bp.ProductCode
LEFT JOIN pick_base pb
ON pb.ItemCode = bp.ProductCode
LEFT JOIN reserve_base rb
ON rb.ItemCode = bp.ProductCode
LEFT JOIN disp_base db
ON db.ItemCode = bp.ProductCode
)
SELECT
bp.ProductCode AS ProductCode,
COALESCE(lp.CostPrice, 0) AS CostPrice,
COALESCE(st.StockQty, 0) AS StockQty,
COALESCE(se.StockEntryDate, '') AS StockEntryDate,
COALESCE(lp.LastPricingDate, '') AS LastPricingDate,
bp.AskiliYan,
bp.Kategori,
bp.UrunIlkGrubu,
bp.UrunAnaGrubu,
bp.UrunAltGrubu,
bp.Icerik,
bp.Karisim,
bp.Marka
FROM base_products bp
LEFT JOIN latest_base_price lp
ON lp.ItemCode = bp.ProductCode
AND lp.rn = 1
LEFT JOIN stock_entry_dates se
ON se.ItemCode = bp.ProductCode
LEFT JOIN stock_totals st
ON st.ItemCode = bp.ProductCode
ORDER BY bp.ProductCode;
`)
if err != nil {
return nil, err
func GetProductPricingList(ctx context.Context, limit int, afterProductCode string) ([]models.ProductPricing, error) {
if limit <= 0 {
limit = 500
}
afterProductCode = strings.TrimSpace(afterProductCode)
// Stage 1: fetch only paged products first (fast path).
productQuery := `
SELECT TOP (` + strconv.Itoa(limit) + `)
LTRIM(RTRIM(ProductCode)) AS ProductCode,
COALESCE(LTRIM(RTRIM(ProductAtt45Desc)), '') AS AskiliYan,
COALESCE(LTRIM(RTRIM(ProductAtt44Desc)), '') AS Kategori,
COALESCE(LTRIM(RTRIM(ProductAtt42Desc)), '') AS UrunIlkGrubu,
COALESCE(LTRIM(RTRIM(ProductAtt01Desc)), '') AS UrunAnaGrubu,
COALESCE(LTRIM(RTRIM(ProductAtt02Desc)), '') AS UrunAltGrubu,
COALESCE(LTRIM(RTRIM(ProductAtt41Desc)), '') AS Icerik,
COALESCE(LTRIM(RTRIM(ProductAtt29Desc)), '') AS Karisim,
COALESCE(LTRIM(RTRIM(ProductAtt10Desc)), '') AS Marka
FROM ProductFilterWithDescription('TR')
WHERE ProductAtt42 IN ('SERI', 'AKSESUAR')
AND IsBlocked = 0
AND LEN(LTRIM(RTRIM(ProductCode))) = 13
AND (@p1 = '' OR LTRIM(RTRIM(ProductCode)) > @p1)
ORDER BY LTRIM(RTRIM(ProductCode));
`
var (
rows *sql.Rows
rowsErr error
)
for attempt := 1; attempt <= 3; attempt++ {
var err error
rows, err = db.MssqlDB.QueryContext(ctx, productQuery, afterProductCode)
if err == nil {
rowsErr = nil
break
}
rowsErr = err
if ctx.Err() != nil || !isTransientMSSQLNetworkError(err) || attempt == 3 {
break
}
wait := time.Duration(attempt*300) * time.Millisecond
select {
case <-ctx.Done():
break
case <-time.After(wait):
}
}
if rowsErr != nil {
return nil, rowsErr
}
defer rows.Close()
var out []models.ProductPricing
out := make([]models.ProductPricing, 0, limit)
for rows.Next() {
var item models.ProductPricing
if err := rows.Scan(
&item.ProductCode,
&item.CostPrice,
&item.StockQty,
&item.StockEntryDate,
&item.LastPricingDate,
&item.AskiliYan,
&item.Kategori,
&item.UrunIlkGrubu,
@@ -188,6 +82,191 @@ func GetProductPricingList() ([]models.ProductPricing, error) {
}
out = append(out, item)
}
if err := rows.Err(); err != nil {
return nil, err
}
if len(out) == 0 {
return out, nil
}
// Stage 2: fetch metrics only for paged product codes.
codes := make([]string, 0, len(out))
for _, item := range out {
codes = append(codes, strings.TrimSpace(item.ProductCode))
}
valueRows := make([]string, 0, len(codes))
metricArgs := make([]any, 0, len(codes))
for i, code := range codes {
paramName := "@p" + strconv.Itoa(i+1)
valueRows = append(valueRows, "("+paramName+")")
metricArgs = append(metricArgs, code)
}
metricsQuery := `
WITH req_codes AS (
SELECT DISTINCT LTRIM(RTRIM(v.ProductCode)) AS ProductCode
FROM (VALUES ` + strings.Join(valueRows, ",") + `) v(ProductCode)
WHERE LEN(LTRIM(RTRIM(v.ProductCode))) > 0
),
latest_base_price AS (
SELECT
LTRIM(RTRIM(b.ItemCode)) AS ItemCode,
CAST(b.Price AS DECIMAL(18, 2)) AS CostPrice,
CONVERT(VARCHAR(10), b.PriceDate, 23) AS LastPricingDate,
ROW_NUMBER() OVER (
PARTITION BY LTRIM(RTRIM(b.ItemCode))
ORDER BY b.PriceDate DESC, b.LastUpdatedDate DESC
) AS rn
FROM prItemBasePrice b
INNER JOIN req_codes rc
ON rc.ProductCode = LTRIM(RTRIM(b.ItemCode))
WHERE b.ItemTypeCode = 1
AND b.BasePriceCode = 1
AND LTRIM(RTRIM(b.CurrencyCode)) = 'USD'
),
stock_entry_dates AS (
SELECT
LTRIM(RTRIM(s.ItemCode)) AS ItemCode,
CONVERT(VARCHAR(10), MAX(s.OperationDate), 23) AS StockEntryDate
FROM trStock s WITH(NOLOCK)
INNER JOIN req_codes rc
ON rc.ProductCode = LTRIM(RTRIM(s.ItemCode))
WHERE s.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(s.ItemCode))) = 13
AND s.In_Qty1 > 0
AND LTRIM(RTRIM(s.WarehouseCode)) IN (
'1-0-14','1-0-10','1-0-8','1-2-5','1-2-4','1-0-12','100','1-0-28',
'1-0-24','1-2-6','1-1-14','1-0-2','1-0-52','1-1-2','1-0-21','1-1-3',
'1-0-33','101','1-014','1-0-49','1-0-36'
)
GROUP BY LTRIM(RTRIM(s.ItemCode))
),
stock_base AS (
SELECT
LTRIM(RTRIM(s.ItemCode)) AS ItemCode,
SUM(s.In_Qty1 - s.Out_Qty1) AS InventoryQty1
FROM trStock s WITH(NOLOCK)
INNER JOIN req_codes rc
ON rc.ProductCode = LTRIM(RTRIM(s.ItemCode))
WHERE s.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(s.ItemCode))) = 13
GROUP BY LTRIM(RTRIM(s.ItemCode))
),
pick_base AS (
SELECT
LTRIM(RTRIM(p.ItemCode)) AS ItemCode,
SUM(p.Qty1) AS PickingQty1
FROM PickingStates p
INNER JOIN req_codes rc
ON rc.ProductCode = LTRIM(RTRIM(p.ItemCode))
WHERE p.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(p.ItemCode))) = 13
GROUP BY LTRIM(RTRIM(p.ItemCode))
),
reserve_base AS (
SELECT
LTRIM(RTRIM(r.ItemCode)) AS ItemCode,
SUM(r.Qty1) AS ReserveQty1
FROM ReserveStates r
INNER JOIN req_codes rc
ON rc.ProductCode = LTRIM(RTRIM(r.ItemCode))
WHERE r.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(r.ItemCode))) = 13
GROUP BY LTRIM(RTRIM(r.ItemCode))
),
disp_base AS (
SELECT
LTRIM(RTRIM(d.ItemCode)) AS ItemCode,
SUM(d.Qty1) AS DispOrderQty1
FROM DispOrderStates d
INNER JOIN req_codes rc
ON rc.ProductCode = LTRIM(RTRIM(d.ItemCode))
WHERE d.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(d.ItemCode))) = 13
GROUP BY LTRIM(RTRIM(d.ItemCode))
)
SELECT
rc.ProductCode,
COALESCE(lp.CostPrice, 0) AS CostPrice,
CAST(ROUND(
ISNULL(sb.InventoryQty1, 0)
- ISNULL(pb.PickingQty1, 0)
- ISNULL(rb.ReserveQty1, 0)
- ISNULL(db.DispOrderQty1, 0)
, 2) AS DECIMAL(18, 2)) AS StockQty,
COALESCE(se.StockEntryDate, '') AS StockEntryDate,
COALESCE(lp.LastPricingDate, '') AS LastPricingDate
FROM req_codes rc
LEFT JOIN latest_base_price lp
ON lp.ItemCode = rc.ProductCode
AND lp.rn = 1
LEFT JOIN stock_entry_dates se
ON se.ItemCode = rc.ProductCode
LEFT JOIN stock_base sb
ON sb.ItemCode = rc.ProductCode
LEFT JOIN pick_base pb
ON pb.ItemCode = rc.ProductCode
LEFT JOIN reserve_base rb
ON rb.ItemCode = rc.ProductCode
LEFT JOIN disp_base db
ON db.ItemCode = rc.ProductCode;
`
metricsRows, err := db.MssqlDB.QueryContext(ctx, metricsQuery, metricArgs...)
if err != nil {
return nil, fmt.Errorf("metrics query failed: %w", err)
}
defer metricsRows.Close()
type metrics struct {
CostPrice float64
StockQty float64
StockEntryDate string
LastPricingDate string
}
metricsByCode := make(map[string]metrics, len(out))
for metricsRows.Next() {
var (
code string
m metrics
)
if err := metricsRows.Scan(
&code,
&m.CostPrice,
&m.StockQty,
&m.StockEntryDate,
&m.LastPricingDate,
); err != nil {
return nil, err
}
metricsByCode[strings.TrimSpace(code)] = m
}
if err := metricsRows.Err(); err != nil {
return nil, err
}
for i := range out {
if m, ok := metricsByCode[strings.TrimSpace(out[i].ProductCode)]; ok {
out[i].CostPrice = m.CostPrice
out[i].StockQty = m.StockQty
out[i].StockEntryDate = m.StockEntryDate
out[i].LastPricingDate = m.LastPricingDate
}
}
return out, nil
}
func isTransientMSSQLNetworkError(err error) bool {
if err == nil {
return false
}
e := strings.ToLower(err.Error())
return strings.Contains(e, "i/o timeout") ||
strings.Contains(e, "timeout") ||
strings.Contains(e, "wsarecv") ||
strings.Contains(e, "connection attempt failed") ||
strings.Contains(e, "no connection could be made") ||
strings.Contains(e, "broken pipe") ||
strings.Contains(e, "connection reset")
}

View File

@@ -2,6 +2,7 @@ package queries
import (
"bssapp-backend/db"
"bssapp-backend/internal/i18n"
"bssapp-backend/models"
"context"
"database/sql"
@@ -14,9 +15,7 @@ func GetStatements(ctx context.Context, params models.StatementParams) ([]models
// AccountCode normalize: "ZLA0127" → "ZLA 0127"
params.AccountCode = normalizeMasterAccountCode(params.AccountCode)
if strings.TrimSpace(params.LangCode) == "" {
params.LangCode = "TR"
}
params.LangCode = i18n.NormalizeLangCode(params.LangCode)
// Parislemler []string → '1','2','3'
parislemFilter := "''"
@@ -221,8 +220,8 @@ SELECT
CONVERT(varchar(10), @startdate, 23) AS Vade_Tarihi,
'Baslangic_devir' AS Belge_No,
'Devir' AS Islem_Tipi,
'Devir Bakiyesi' AS Aciklama,
CASE WHEN @LangCode = 'EN' THEN 'Opening' ELSE 'Devir' END AS Islem_Tipi,
CASE WHEN @LangCode = 'EN' THEN 'Opening Balance' ELSE 'Devir Bakiyesi' END AS Aciklama,
o.Para_Birimi,

View File

@@ -6,8 +6,8 @@ import (
"log"
)
func GetStatementsHPDF(ctx context.Context, accountCode, startDate, endDate string, parislemler []string) ([]models.StatementHeader, []string, error) {
headers, err := getStatementsForPDF(ctx, accountCode, startDate, endDate, parislemler)
func GetStatementsHPDF(ctx context.Context, accountCode, startDate, endDate, langCode string, parislemler []string) ([]models.StatementHeader, []string, error) {
headers, err := getStatementsForPDF(ctx, accountCode, startDate, endDate, langCode, parislemler)
if err != nil {
log.Printf("Header query error: %v", err)
return nil, nil, err

View File

@@ -10,13 +10,14 @@ func getStatementsForPDF(
accountCode string,
startDate string,
endDate string,
langCode string,
parislemler []string,
) ([]models.StatementHeader, error) {
return GetStatements(ctx, models.StatementParams{
AccountCode: accountCode,
StartDate: startDate,
EndDate: endDate,
LangCode: "TR",
LangCode: langCode,
Parislemler: parislemler,
})
}

View File

@@ -11,8 +11,8 @@ import (
"strings"
)
func GetStatementsPDF(ctx context.Context, accountCode, startDate, endDate string, parislemler []string) ([]models.StatementHeader, []string, error) {
headers, err := getStatementsForPDF(ctx, accountCode, startDate, endDate, parislemler)
func GetStatementsPDF(ctx context.Context, accountCode, startDate, endDate, langCode string, parislemler []string) ([]models.StatementHeader, []string, error) {
headers, err := getStatementsForPDF(ctx, accountCode, startDate, endDate, langCode, parislemler)
if err != nil {
log.Printf("Header query error: %v", err)
return nil, nil, err

View File

@@ -180,16 +180,33 @@ func OrderProductionValidateRoute(mssql *sql.DB) http.Handler {
log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s payload lineCount=%d insertMissing=%t cdItemCount=%d attributeCount=%d",
rid, id, len(payload.Lines), payload.InsertMissing, len(payload.CdItems), len(payload.ProductAttributes))
newLines, existingLines := splitLinesByCdItemDraft(payload.Lines, payload.CdItems)
newCodes := uniqueCodesFromLines(newLines)
existingCodes := uniqueCodesFromLines(existingLines)
missing := make([]models.OrderProductionMissingVariant, 0)
targets := make([]models.OrderProductionMissingVariant, 0)
stepStart := time.Now()
missing, err := buildMissingVariants(mssql, id, payload.Lines)
if err != nil {
log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s step=build_missing failed duration_ms=%d err=%v",
rid, id, time.Since(stepStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "validate_missing_variants", id, "", len(payload.Lines), err)
return
if len(newLines) > 0 {
err := runWithTransientMSSQLRetry("validate_build_targets_missing", 3, 500*time.Millisecond, func() error {
var stepErr error
targets, stepErr = buildTargetVariants(mssql, id, newLines)
if stepErr != nil {
return stepErr
}
missing, stepErr = buildMissingVariantsFromTargets(mssql, id, targets)
return stepErr
})
if err != nil {
log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s step=build_missing failed duration_ms=%d err=%v",
rid, id, time.Since(stepStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "validate_missing_variants", id, "", len(newLines), err)
return
}
}
log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s lineCount=%d missingCount=%d build_missing_ms=%d total_ms=%d",
rid, id, len(payload.Lines), len(missing), time.Since(stepStart).Milliseconds(), time.Since(start).Milliseconds())
log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s lineCount=%d newLineCount=%d existingLineCount=%d targetVariantCount=%d missingCount=%d build_missing_ms=%d total_ms=%d",
rid, id, len(payload.Lines), len(newLines), len(existingLines), len(targets), len(missing), time.Since(stepStart).Milliseconds(), time.Since(start).Milliseconds())
log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s scope newCodes=%v existingCodes=%v",
rid, id, newCodes, existingCodes)
resp := map[string]any{
"missingCount": len(missing),
@@ -230,17 +247,57 @@ func OrderProductionApplyRoute(mssql *sql.DB, ml *mailer.GraphMailer) http.Handl
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s payload lineCount=%d insertMissing=%t cdItemCount=%d attributeCount=%d",
rid, id, len(payload.Lines), payload.InsertMissing, len(payload.CdItems), len(payload.ProductAttributes))
stepMissingStart := time.Now()
missing, err := buildMissingVariants(mssql, id, payload.Lines)
if err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=build_missing failed duration_ms=%d err=%v",
rid, id, time.Since(stepMissingStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "apply_validate_missing_variants", id, "", len(payload.Lines), err)
return
if len(payload.Lines) > 0 {
limit := 5
if len(payload.Lines) < limit {
limit = len(payload.Lines)
}
samples := make([]string, 0, limit)
for i := 0; i < limit; i++ {
ln := payload.Lines[i]
dim1 := ""
if ln.ItemDim1Code != nil {
dim1 = strings.TrimSpace(*ln.ItemDim1Code)
}
samples = append(samples, fmt.Sprintf(
"lineID=%s newItem=%s newColor=%s newDim1=%s newDim2=%s",
strings.TrimSpace(ln.OrderLineID),
strings.ToUpper(strings.TrimSpace(ln.NewItemCode)),
strings.ToUpper(strings.TrimSpace(ln.NewColor)),
strings.ToUpper(strings.TrimSpace(dim1)),
strings.ToUpper(strings.TrimSpace(ln.NewDim2)),
))
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s payload lineSamples=%v", rid, id, samples)
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s lineCount=%d missingCount=%d build_missing_ms=%d",
rid, id, len(payload.Lines), len(missing), time.Since(stepMissingStart).Milliseconds())
newLines, existingLines := splitLinesByCdItemDraft(payload.Lines, payload.CdItems)
newCodes := uniqueCodesFromLines(newLines)
existingCodes := uniqueCodesFromLines(existingLines)
stepMissingStart := time.Now()
missing := make([]models.OrderProductionMissingVariant, 0)
barcodeTargets := make([]models.OrderProductionMissingVariant, 0)
if len(newLines) > 0 {
err := runWithTransientMSSQLRetry("apply_build_targets_missing", 3, 500*time.Millisecond, func() error {
var stepErr error
barcodeTargets, stepErr = buildTargetVariants(mssql, id, newLines)
if stepErr != nil {
return stepErr
}
missing, stepErr = buildMissingVariantsFromTargets(mssql, id, barcodeTargets)
return stepErr
})
if err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=build_missing failed duration_ms=%d err=%v",
rid, id, time.Since(stepMissingStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "apply_validate_missing_variants", id, "", len(newLines), err)
return
}
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s lineCount=%d newLineCount=%d existingLineCount=%d targetVariantCount=%d missingCount=%d build_missing_ms=%d",
rid, id, len(payload.Lines), len(newLines), len(existingLines), len(barcodeTargets), len(missing), time.Since(stepMissingStart).Milliseconds())
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s scope newCodes=%v existingCodes=%v",
rid, id, newCodes, existingCodes)
if len(missing) > 0 && !payload.InsertMissing {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s early_exit=missing_variants total_ms=%d",
@@ -269,30 +326,83 @@ func OrderProductionApplyRoute(mssql *sql.DB, ml *mailer.GraphMailer) http.Handl
writeDBError(w, http.StatusInternalServerError, "begin_tx", id, username, len(payload.Lines), err)
return
}
defer tx.Rollback()
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=begin_tx duration_ms=%d", rid, id, time.Since(stepBeginStart).Milliseconds())
committed := false
currentStep := "begin_tx"
applyTxSettings := func(tx *sql.Tx) error {
// XACT_ABORT OFF:
// Barcode insert path intentionally tolerates duplicate-key errors (fallback/skip duplicate).
// With XACT_ABORT ON, that expected error aborts the whole transaction and causes COMMIT 3902.
_, execErr := tx.Exec(`SET XACT_ABORT OFF; SET LOCK_TIMEOUT 15000;`)
return execErr
}
defer func() {
if committed {
return
}
rbStart := time.Now()
if rbErr := tx.Rollback(); rbErr != nil && rbErr != sql.ErrTxDone {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s rollback step=%s failed duration_ms=%d err=%v",
rid, id, currentStep, time.Since(rbStart).Milliseconds(), rbErr)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s rollback step=%s ok duration_ms=%d",
rid, id, currentStep, time.Since(rbStart).Milliseconds())
}()
stepTxSettingsStart := time.Now()
if _, err := tx.Exec(`SET XACT_ABORT ON; SET LOCK_TIMEOUT 15000;`); err != nil {
currentStep = "tx_settings"
if err := applyTxSettings(tx); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_settings", id, username, len(payload.Lines), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=tx_settings duration_ms=%d", rid, id, time.Since(stepTxSettingsStart).Milliseconds())
if err := ensureTxAlive(tx, "after_tx_settings"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_tx_settings", id, username, len(payload.Lines), err)
return
}
var inserted int64
if payload.InsertMissing {
if payload.InsertMissing && len(newLines) > 0 {
currentStep = "insert_missing_variants"
cdItemByCode := buildCdItemDraftMap(payload.CdItems)
stepInsertMissingStart := time.Now()
inserted, err = queries.InsertMissingVariantsTx(tx, missing, username, cdItemByCode)
if err != nil && isTransientMSSQLNetworkErr(err) {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=insert_missing transient_error retry=1 err=%v",
rid, id, err)
_ = tx.Rollback()
tx, err = mssql.Begin()
if err != nil {
writeDBError(w, http.StatusInternalServerError, "begin_tx_retry_insert_missing", id, username, len(payload.Lines), err)
return
}
currentStep = "tx_settings_retry_insert_missing"
if err = applyTxSettings(tx); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_settings_retry_insert_missing", id, username, len(payload.Lines), err)
return
}
if err = ensureTxAlive(tx, "after_tx_settings_retry_insert_missing"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_tx_settings_retry_insert_missing", id, username, len(payload.Lines), err)
return
}
currentStep = "insert_missing_variants_retry"
inserted, err = queries.InsertMissingVariantsTx(tx, missing, username, cdItemByCode)
}
if err != nil {
writeDBError(w, http.StatusInternalServerError, "insert_missing_variants", id, username, len(missing), err)
return
}
if err := ensureTxAlive(tx, "after_insert_missing_variants"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_insert_missing_variants", id, username, len(missing), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=insert_missing inserted=%d duration_ms=%d",
rid, id, inserted, time.Since(stepInsertMissingStart).Milliseconds())
}
stepValidateAttrStart := time.Now()
currentStep = "validate_attributes"
if err := validateProductAttributes(payload.ProductAttributes); err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
@@ -300,46 +410,131 @@ func OrderProductionApplyRoute(mssql *sql.DB, ml *mailer.GraphMailer) http.Handl
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=validate_attributes count=%d duration_ms=%d",
rid, id, len(payload.ProductAttributes), time.Since(stepValidateAttrStart).Milliseconds())
stepUpsertAttrStart := time.Now()
currentStep = "upsert_item_attributes"
attributeAffected, err := queries.UpsertItemAttributesTx(tx, payload.ProductAttributes, username)
if err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_attributes failed duration_ms=%d err=%v",
rid, id, time.Since(stepUpsertAttrStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "upsert_item_attributes", id, username, len(payload.ProductAttributes), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_attributes affected=%d duration_ms=%d",
rid, id, attributeAffected, time.Since(stepUpsertAttrStart).Milliseconds())
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=prItemAttribute inputRows=%d affectedRows=%d",
rid, id, len(payload.ProductAttributes), attributeAffected)
if err := ensureTxAlive(tx, "after_upsert_item_attributes"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_upsert_item_attributes", id, username, len(payload.ProductAttributes), err)
return
}
var barcodeInserted int64
// Barkod adimi:
// - Eski kodlara girmemeli
// - Yeni kod satirlari icin, varyant daha once olusmus olsa bile eksik barkod varsa tamamlamali
// Bu nedenle "inserted > 0" yerine "newLineCount > 0" kosulu kullanilir.
if len(newLines) > 0 && len(barcodeTargets) > 0 {
stepUpsertBarcodeStart := time.Now()
currentStep = "upsert_item_barcodes"
barcodeInserted, err = queries.InsertItemBarcodesByTargetsTx(tx, barcodeTargets, username)
if err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_barcodes failed duration_ms=%d err=%v",
rid, id, time.Since(stepUpsertBarcodeStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "upsert_item_barcodes", id, username, len(barcodeTargets), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_barcodes inserted=%d duration_ms=%d",
rid, id, barcodeInserted, time.Since(stepUpsertBarcodeStart).Milliseconds())
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=prItemBarcode targetVariantRows=%d insertedRows=%d",
rid, id, len(barcodeTargets), barcodeInserted)
if err := ensureTxAlive(tx, "after_upsert_item_barcodes"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_upsert_item_barcodes", id, username, len(barcodeTargets), err)
return
}
} else {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_barcodes skipped newLineCount=%d targetVariantRows=%d",
rid, id, len(newLines), len(barcodeTargets))
}
stepUpdateHeaderStart := time.Now()
currentStep = "update_order_header_average_due_date"
if err := queries.UpdateOrderHeaderAverageDueDateTx(tx, id, payload.HeaderAverageDueDate, username); err != nil {
writeDBError(w, http.StatusInternalServerError, "update_order_header_average_due_date", id, username, 0, err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=update_header_average_due_date changed=%t duration_ms=%d",
rid, id, payload.HeaderAverageDueDate != nil, time.Since(stepUpdateHeaderStart).Milliseconds())
if err := ensureTxAlive(tx, "after_update_order_header_average_due_date"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_update_order_header_average_due_date", id, username, 0, err)
return
}
currentStep = "touch_order_header"
headerTouched, err := queries.TouchOrderHeaderTx(tx, id, username)
if err != nil {
writeDBError(w, http.StatusInternalServerError, "touch_order_header", id, username, len(payload.Lines), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=trOrderHeader touchedRows=%d",
rid, id, headerTouched)
if err := ensureTxAlive(tx, "after_touch_order_header"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_touch_order_header", id, username, len(payload.Lines), err)
return
}
stepUpdateLinesStart := time.Now()
currentStep = "update_order_lines"
updated, err := queries.UpdateOrderLinesTx(tx, id, payload.Lines, username)
if err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=update_lines failed duration_ms=%d err=%v",
rid, id, time.Since(stepUpdateLinesStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "update_order_lines", id, username, len(payload.Lines), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=update_lines updated=%d duration_ms=%d",
rid, id, updated, time.Since(stepUpdateLinesStart).Milliseconds())
stepUpsertBarcodeStart := time.Now()
barcodeInserted, err := queries.UpsertItemBarcodesTx(tx, id, payload.Lines, username)
if err != nil {
writeDBError(w, http.StatusInternalServerError, "upsert_item_barcodes", id, username, len(payload.Lines), err)
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=trOrderLine targetRows=%d updatedRows=%d",
rid, id, len(payload.Lines), updated)
if err := ensureTxAlive(tx, "after_update_order_lines"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_update_order_lines", id, username, len(payload.Lines), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_barcodes inserted=%d duration_ms=%d",
rid, id, barcodeInserted, time.Since(stepUpsertBarcodeStart).Milliseconds())
stepUpsertAttrStart := time.Now()
attributeAffected, err := queries.UpsertItemAttributesTx(tx, payload.ProductAttributes, username)
if err != nil {
writeDBError(w, http.StatusInternalServerError, "upsert_item_attributes", id, username, len(payload.ProductAttributes), err)
currentStep = "verify_order_lines"
verifyMismatchCount, verifySamples, verifyErr := queries.VerifyOrderLineUpdatesTx(tx, id, payload.Lines)
if verifyErr != nil {
writeDBError(w, http.StatusInternalServerError, "verify_order_lines", id, username, len(payload.Lines), verifyErr)
return
}
if verifyMismatchCount > 0 {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=trOrderLine verifyMismatchCount=%d samples=%v",
rid, id, verifyMismatchCount, verifySamples)
currentStep = "verify_order_lines_mismatch"
w.WriteHeader(http.StatusInternalServerError)
_ = json.NewEncoder(w).Encode(map[string]any{
"message": "Order satirlari beklenen kod/renk degerlerine guncellenemedi",
"step": "verify_order_lines_mismatch",
"detail": fmt.Sprintf("mismatchCount=%d", verifyMismatchCount),
"samples": verifySamples,
})
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=trOrderLine verifyMismatchCount=0",
rid, id)
if err := ensureTxAlive(tx, "before_commit_tx"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_before_commit_tx", id, username, len(payload.Lines), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_attributes affected=%d duration_ms=%d",
rid, id, attributeAffected, time.Since(stepUpsertAttrStart).Milliseconds())
stepCommitStart := time.Now()
currentStep = "commit_tx"
if err := tx.Commit(); err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=commit failed duration_ms=%d err=%v",
rid, id, time.Since(stepCommitStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "commit_tx", id, username, len(payload.Lines), err)
return
}
committed = true
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=commit duration_ms=%d total_ms=%d",
rid, id, time.Since(stepCommitStart).Milliseconds(), time.Since(start).Milliseconds())
@@ -364,6 +559,8 @@ func OrderProductionApplyRoute(mssql *sql.DB, ml *mailer.GraphMailer) http.Handl
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s result updated=%d inserted=%d barcodeInserted=%d attributeUpserted=%d",
rid, id, updated, inserted, barcodeInserted, attributeAffected)
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s summary tables cdItem/prItemVariant(newOnly)=%d trOrderLine(updated)=%d prItemBarcode(inserted,newOnly)=%d prItemAttribute(affected)=%d trOrderHeader(touched)=%d",
rid, id, inserted, updated, barcodeInserted, attributeAffected, headerTouched)
if err := json.NewEncoder(w).Encode(resp); err != nil {
log.Printf("❌ encode error: %v", err)
}
@@ -408,6 +605,14 @@ func buildCdItemDraftMap(list []models.OrderProductionCdItemDraft) map[string]mo
return out
}
func isNoCorrespondingBeginTxErr(err error) bool {
if err == nil {
return false
}
msg := strings.ToLower(strings.TrimSpace(err.Error()))
return strings.Contains(msg, "commit transaction request has no corresponding begin transaction")
}
func buildTargetVariants(mssql *sql.DB, orderHeaderID string, lines []models.OrderProductionUpdateLine) ([]models.OrderProductionMissingVariant, error) {
start := time.Now()
lineDimsMap, err := queries.GetOrderLineDimsMap(mssql, orderHeaderID)
@@ -460,11 +665,15 @@ func buildTargetVariants(mssql *sql.DB, orderHeaderID string, lines []models.Ord
}
func buildMissingVariants(mssql *sql.DB, orderHeaderID string, lines []models.OrderProductionUpdateLine) ([]models.OrderProductionMissingVariant, error) {
start := time.Now()
targets, err := buildTargetVariants(mssql, orderHeaderID, lines)
if err != nil {
return nil, err
}
return buildMissingVariantsFromTargets(mssql, orderHeaderID, targets)
}
func buildMissingVariantsFromTargets(mssql *sql.DB, orderHeaderID string, targets []models.OrderProductionMissingVariant) ([]models.OrderProductionMissingVariant, error) {
start := time.Now()
missing := make([]models.OrderProductionMissingVariant, 0, len(targets))
existsCache := make(map[string]bool, len(targets))
@@ -491,11 +700,69 @@ func buildMissingVariants(mssql *sql.DB, orderHeaderID string, lines []models.Or
}
}
log.Printf("[buildMissingVariants] orderHeaderID=%s lineCount=%d dimMapCount=%d missingCount=%d total_ms=%d",
orderHeaderID, len(lines), len(targets), len(missing), time.Since(start).Milliseconds())
log.Printf("[buildMissingVariants] orderHeaderID=%s targetCount=%d missingCount=%d total_ms=%d",
orderHeaderID, len(targets), len(missing), time.Since(start).Milliseconds())
return missing, nil
}
func runWithTransientMSSQLRetry(op string, maxAttempts int, baseDelay time.Duration, fn func() error) error {
if maxAttempts <= 1 {
return fn()
}
var lastErr error
for attempt := 1; attempt <= maxAttempts; attempt++ {
err := fn()
if err == nil {
return nil
}
lastErr = err
if !isTransientMSSQLNetworkErr(err) || attempt == maxAttempts {
return err
}
wait := time.Duration(attempt) * baseDelay
log.Printf("[MSSQLRetry] op=%s attempt=%d/%d wait_ms=%d err=%v",
op, attempt, maxAttempts, wait.Milliseconds(), err)
time.Sleep(wait)
}
return lastErr
}
func isTransientMSSQLNetworkErr(err error) bool {
if err == nil {
return false
}
msg := strings.ToLower(strings.TrimSpace(err.Error()))
needles := []string{
"wsarecv",
"read tcp",
"connection reset",
"connection refused",
"broken pipe",
"i/o timeout",
"timeout",
}
for _, needle := range needles {
if strings.Contains(msg, needle) {
return true
}
}
return false
}
func ensureTxAlive(tx *sql.Tx, where string) error {
if tx == nil {
return fmt.Errorf("tx is nil at %s", where)
}
var tranCount int
if err := tx.QueryRow(`SELECT @@TRANCOUNT`).Scan(&tranCount); err != nil {
return fmt.Errorf("tx state query failed at %s: %w", where, err)
}
if tranCount <= 0 {
return fmt.Errorf("transaction no longer active at %s (trancount=%d)", where, tranCount)
}
return nil
}
func validateUpdateLines(lines []models.OrderProductionUpdateLine) error {
for _, line := range lines {
if strings.TrimSpace(line.OrderLineID) == "" {
@@ -512,6 +779,54 @@ func validateUpdateLines(lines []models.OrderProductionUpdateLine) error {
return nil
}
func splitLinesByCdItemDraft(lines []models.OrderProductionUpdateLine, cdItems []models.OrderProductionCdItemDraft) ([]models.OrderProductionUpdateLine, []models.OrderProductionUpdateLine) {
if len(lines) == 0 {
return nil, nil
}
newCodeSet := make(map[string]struct{}, len(cdItems))
for _, item := range cdItems {
code := strings.ToUpper(strings.TrimSpace(item.ItemCode))
if code == "" {
continue
}
newCodeSet[code] = struct{}{}
}
if len(newCodeSet) == 0 {
existingLines := make([]models.OrderProductionUpdateLine, 0, len(lines))
existingLines = append(existingLines, lines...)
return nil, existingLines
}
newLines := make([]models.OrderProductionUpdateLine, 0, len(lines))
existingLines := make([]models.OrderProductionUpdateLine, 0, len(lines))
for _, line := range lines {
code := strings.ToUpper(strings.TrimSpace(line.NewItemCode))
if _, ok := newCodeSet[code]; ok {
newLines = append(newLines, line)
continue
}
existingLines = append(existingLines, line)
}
return newLines, existingLines
}
func uniqueCodesFromLines(lines []models.OrderProductionUpdateLine) []string {
set := make(map[string]struct{}, len(lines))
out := make([]string, 0, len(lines))
for _, line := range lines {
code := strings.ToUpper(strings.TrimSpace(line.NewItemCode))
if code == "" {
continue
}
if _, ok := set[code]; ok {
continue
}
set[code] = struct{}{}
out = append(out, code)
}
return out
}
func writeDBError(w http.ResponseWriter, status int, step string, orderHeaderID string, username string, lineCount int, err error) {
var sqlErr mssql.Error
if errors.As(err, &sqlErr) {
@@ -554,7 +869,7 @@ func sendProductionUpdateMails(db *sql.DB, ml *mailer.GraphMailer, orderHeaderID
subject := fmt.Sprintf("%s tarafından %s Nolu Sipariş Güncellendi (Üretim)", actor, orderNo)
var body strings.Builder
body.WriteString("<html><body>")
body.WriteString("<html><head><meta charset='utf-8'></head><body>")
body.WriteString(fmt.Sprintf("<p><b>Sipariş No:</b> %s</p>", orderNo))
body.WriteString(fmt.Sprintf("<p><b>Cari:</b> %s</p>", currAccCode))
body.WriteString(fmt.Sprintf("<p><b>Piyasa:</b> %s (%s)</p>", marketTitle, marketCode))

View File

@@ -3,24 +3,124 @@ package routes
import (
"bssapp-backend/auth"
"bssapp-backend/queries"
"context"
"encoding/json"
"errors"
"log"
"net/http"
"strconv"
"strings"
"time"
)
// GET /api/pricing/products
func GetProductPricingListHandler(w http.ResponseWriter, r *http.Request) {
started := time.Now()
traceID := buildPricingTraceID(r)
w.Header().Set("X-Trace-ID", traceID)
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
log.Printf("[ProductPricing] trace=%s unauthorized method=%s path=%s", traceID, r.Method, r.URL.Path)
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
log.Printf("[ProductPricing] trace=%s start user=%s id=%d", traceID, claims.Username, claims.ID)
rows, err := queries.GetProductPricingList()
// Cloudflare upstream timeout is lower than 180s; fail fast and return API 504 instead of CDN 524.
ctx, cancel := context.WithTimeout(r.Context(), 110*time.Second)
defer cancel()
limit := 500
if raw := strings.TrimSpace(r.URL.Query().Get("limit")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 && parsed <= 10000 {
limit = parsed
}
}
afterProductCode := strings.TrimSpace(r.URL.Query().Get("after_product_code"))
rows, err := queries.GetProductPricingList(ctx, limit+1, afterProductCode)
if err != nil {
if isPricingTimeoutLike(err, ctx.Err()) {
log.Printf(
"[ProductPricing] trace=%s timeout user=%s id=%d duration_ms=%d err=%v",
traceID,
claims.Username,
claims.ID,
time.Since(started).Milliseconds(),
err,
)
http.Error(w, "Urun fiyatlandirma listesi zaman asimina ugradi", http.StatusGatewayTimeout)
return
}
log.Printf(
"[ProductPricing] trace=%s query_error user=%s id=%d duration_ms=%d err=%v",
traceID,
claims.Username,
claims.ID,
time.Since(started).Milliseconds(),
err,
)
http.Error(w, "Urun fiyatlandirma listesi alinamadi: "+err.Error(), http.StatusInternalServerError)
return
}
hasMore := len(rows) > limit
if hasMore {
rows = rows[:limit]
}
nextCursor := ""
if hasMore && len(rows) > 0 {
nextCursor = strings.TrimSpace(rows[len(rows)-1].ProductCode)
}
log.Printf(
"[ProductPricing] trace=%s success user=%s id=%d limit=%d after=%q count=%d has_more=%t next=%q duration_ms=%d",
traceID,
claims.Username,
claims.ID,
limit,
afterProductCode,
len(rows),
hasMore,
nextCursor,
time.Since(started).Milliseconds(),
)
w.Header().Set("Content-Type", "application/json; charset=utf-8")
if hasMore {
w.Header().Set("X-Has-More", "true")
} else {
w.Header().Set("X-Has-More", "false")
}
if nextCursor != "" {
w.Header().Set("X-Next-Cursor", nextCursor)
}
_ = json.NewEncoder(w).Encode(rows)
}
func buildPricingTraceID(r *http.Request) string {
if r != nil {
if id := strings.TrimSpace(r.Header.Get("X-Request-ID")); id != "" {
return id
}
if id := strings.TrimSpace(r.Header.Get("X-Correlation-ID")); id != "" {
return id
}
}
return "pricing-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
func isPricingTimeoutLike(err error, ctxErr error) bool {
if errors.Is(err, context.DeadlineExceeded) || errors.Is(ctxErr, context.DeadlineExceeded) {
return true
}
if err == nil {
return false
}
e := strings.ToLower(err.Error())
return strings.Contains(e, "timeout") ||
strings.Contains(e, "i/o timeout") ||
strings.Contains(e, "wsarecv") ||
strings.Contains(e, "connection attempt failed") ||
strings.Contains(e, "no connection could be made") ||
strings.Contains(e, "failed to respond")
}

View File

@@ -2,6 +2,7 @@ package routes
import (
"bssapp-backend/auth"
"bssapp-backend/internal/i18n"
"bssapp-backend/models"
"bssapp-backend/queries"
"encoding/json"
@@ -22,7 +23,7 @@ func GetStatementHeadersHandler(w http.ResponseWriter, r *http.Request) {
StartDate: r.URL.Query().Get("startdate"),
EndDate: r.URL.Query().Get("enddate"),
AccountCode: r.URL.Query().Get("accountcode"),
LangCode: r.URL.Query().Get("langcode"),
LangCode: i18n.ResolveLangCode(r.URL.Query().Get("langcode"), r.Header.Get("Accept-Language")),
Parislemler: r.URL.Query()["parislemler"],
ExcludeOpening: false,
}

View File

@@ -2,6 +2,7 @@ package routes
import (
"bssapp-backend/auth"
"bssapp-backend/internal/i18n"
"bssapp-backend/models"
"bssapp-backend/queries"
"bytes"
@@ -40,9 +41,18 @@ const (
)
// Kolonlar
var hMainCols = []string{
"Belge No", "Tarih", "Vade", "İşlem",
"Açıklama", "Para", "Borç", "Alacak", "Bakiye",
func hMainCols(lang string) []string {
return []string{
i18n.T(lang, "pdf.main.doc_no"),
i18n.T(lang, "pdf.main.date"),
i18n.T(lang, "pdf.main.due_date"),
i18n.T(lang, "pdf.main.operation"),
i18n.T(lang, "pdf.main.description"),
i18n.T(lang, "pdf.main.currency"),
i18n.T(lang, "pdf.main.debit"),
i18n.T(lang, "pdf.main.credit"),
i18n.T(lang, "pdf.main.balance"),
}
}
var hMainWbase = []float64{
@@ -136,7 +146,7 @@ func hCalcRowHeightForText(pdf *gofpdf.Fpdf, text string, colWidth, lineHeight,
/* ============================ HEADER ============================ */
func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) float64 {
func hDrawPageHeader(pdf *gofpdf.Fpdf, lang, cariKod, cariIsim, start, end string) float64 {
if logoPath, err := resolvePdfImagePath("Baggi-Tekstil-A.s-Logolu.jpeg"); err == nil {
pdf.ImageOptions(logoPath, hMarginL, 2, hLogoW, 0, false, gofpdf.ImageOptions{}, 0, "")
}
@@ -149,13 +159,13 @@ func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) flo
pdf.SetFont(hFontFamilyBold, "", 12)
pdf.SetXY(hMarginL+hLogoW+8, hMarginT+10)
pdf.CellFormat(120, 6, "Cari Hesap Raporu", "", 0, "L", false, 0, "")
pdf.CellFormat(120, 6, i18n.T(lang, "pdf.report_title"), "", 0, "L", false, 0, "")
// Bugünün tarihi (sağ üst)
today := time.Now().Format("02.01.2006")
pdf.SetFont(hFontFamilyReg, "", 9)
pdf.SetXY(hPageWidth-hMarginR-40, hMarginT+3)
pdf.CellFormat(40, 6, "Tarih: "+today, "", 0, "R", false, 0, "")
pdf.CellFormat(40, 6, i18n.T(lang, "pdf.date")+": "+today, "", 0, "R", false, 0, "")
// Cari & Tarih kutuları (daha yukarı taşındı)
boxY := hMarginT + hLogoW - 6
@@ -163,11 +173,11 @@ func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) flo
pdf.Rect(hMarginL, boxY, 140, 11, "")
pdf.SetXY(hMarginL+2, boxY+3)
pdf.CellFormat(136, 5, fmt.Sprintf("Cari: %s — %s", cariKod, cariIsim), "", 0, "L", false, 0, "")
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s — %s", i18n.T(lang, "pdf.customer"), cariKod, cariIsim), "", 0, "L", false, 0, "")
pdf.Rect(hPageWidth-hMarginR-140, boxY, 140, 11, "")
pdf.SetXY(hPageWidth-hMarginR-138, boxY+3)
pdf.CellFormat(136, 5, fmt.Sprintf("Tarih Aralığı: %s → %s", start, end), "", 0, "R", false, 0, "")
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s → %s", i18n.T(lang, "pdf.date_range"), start, end), "", 0, "R", false, 0, "")
// Alt çizgi
y := boxY + 13
@@ -180,7 +190,7 @@ func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) flo
/* ============================ TABLO ============================ */
func hDrawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
func hDrawGroupBar(pdf *gofpdf.Fpdf, lang, currency string, sonBakiye float64) {
x := hMarginL
y := pdf.GetY()
w := hPageWidth - hMarginL - hMarginR
@@ -194,9 +204,9 @@ func hDrawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
pdf.SetTextColor(hColorPrimary[0], hColorPrimary[1], hColorPrimary[2])
pdf.SetXY(x+hCellPadX+1.0, y+(h-5.0)/2)
pdf.CellFormat(w*0.6, 5.0, currency, "", 0, "L", false, 0, "")
pdf.CellFormat(w*0.6, 5.0, fmt.Sprintf("%s: %s", i18n.T(lang, "pdf.currency_prefix"), currency), "", 0, "L", false, 0, "")
txt := "Son Bakiye = " + hFormatCurrencyTR(sonBakiye)
txt := i18n.T(lang, "pdf.ending_balance") + " = " + hFormatCurrencyTR(sonBakiye)
pdf.SetXY(x+w*0.4, y+(h-5.0)/2)
pdf.CellFormat(w*0.6-2.0, 5.0, txt, "", 0, "R", false, 0, "")
@@ -282,6 +292,10 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
accountCode := r.URL.Query().Get("accountcode")
startDate := r.URL.Query().Get("startdate")
endDate := r.URL.Query().Get("enddate")
langCode := i18n.ResolveLangCode(
r.URL.Query().Get("langcode"),
r.Header.Get("Accept-Language"),
)
rawParis := r.URL.Query()["parislemler"]
var parislemler []string
@@ -292,7 +306,7 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
}
}
headers, _, err := queries.GetStatementsHPDF(r.Context(), accountCode, startDate, endDate, parislemler)
headers, _, err := queries.GetStatementsHPDF(r.Context(), accountCode, startDate, endDate, langCode, parislemler)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
@@ -348,7 +362,7 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
newPage := func() {
pageNum++
pdf.AddPage()
tableTop := hDrawPageHeader(pdf, accountCode, cariIsim, startDate, endDate)
tableTop := hDrawPageHeader(pdf, langCode, accountCode, cariIsim, startDate, endDate)
pdf.SetY(tableTop)
}
@@ -356,8 +370,8 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
for _, cur := range order {
g := groups[cur]
hDrawGroupBar(pdf, cur, g.sonBakiye)
hDrawMainHeaderRow(pdf, hMainCols, mainWn)
hDrawGroupBar(pdf, langCode, cur, g.sonBakiye)
hDrawMainHeaderRow(pdf, hMainCols(langCode), mainWn)
rowIndex := 0
for _, h := range g.rows {
@@ -372,8 +386,8 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
rh := hCalcRowHeightForText(pdf, row[4], mainWn[4], hLineHMain, hCellPadX)
if hNeedNewPage(pdf, rh+hHeaderRowH) {
newPage()
hDrawGroupBar(pdf, cur, g.sonBakiye)
hDrawMainHeaderRow(pdf, hMainCols, mainWn)
hDrawGroupBar(pdf, langCode, cur, g.sonBakiye)
hDrawMainHeaderRow(pdf, hMainCols(langCode), mainWn)
}
hDrawMainDataRow(pdf, row, mainWn, rh, rowIndex)

View File

@@ -3,6 +3,7 @@ package routes
import (
"bssapp-backend/auth"
"bssapp-backend/internal/i18n"
"bssapp-backend/models"
"bssapp-backend/queries"
"bytes"
@@ -48,10 +49,18 @@ const (
logoW = 42.0
)
// Ana tablo kolonları
var mainCols = []string{
"Belge No", "Tarih", "Vade", "İşlem",
"Açıklama", "Para", "Borç", "Alacak", "Bakiye",
func mainCols(lang string) []string {
return []string{
i18n.T(lang, "pdf.main.doc_no"),
i18n.T(lang, "pdf.main.date"),
i18n.T(lang, "pdf.main.due_date"),
i18n.T(lang, "pdf.main.operation"),
i18n.T(lang, "pdf.main.description"),
i18n.T(lang, "pdf.main.currency"),
i18n.T(lang, "pdf.main.debit"),
i18n.T(lang, "pdf.main.credit"),
i18n.T(lang, "pdf.main.balance"),
}
}
// Ana tablo kolon genişlikleri (ilk 3 geniş)
@@ -68,10 +77,21 @@ var mainWbase = []float64{
}
// Detay tablo kolonları ve genişlikleri
var dCols = []string{
"Ana Grup", "Alt Grup", "Garson", "Fit", "İçerik",
"Ürün", "Renk", "Adet", "Fiyat", "Tutar",
func detailCols(lang string) []string {
return []string{
i18n.T(lang, "pdf.detail.main_group"),
i18n.T(lang, "pdf.detail.sub_group"),
i18n.T(lang, "pdf.detail.waiter"),
i18n.T(lang, "pdf.detail.fit"),
i18n.T(lang, "pdf.detail.content"),
i18n.T(lang, "pdf.detail.product"),
i18n.T(lang, "pdf.detail.color"),
i18n.T(lang, "pdf.detail.qty"),
i18n.T(lang, "pdf.detail.price"),
i18n.T(lang, "pdf.detail.total"),
}
}
var dWbase = []float64{
30, 28, 22, 20, 56, 30, 22, 20, 20, 26}
@@ -224,7 +244,7 @@ func drawLabeledBox(pdf *gofpdf.Fpdf, x, y, w, h float64, label, value string, a
}
}
func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) float64 {
func drawPageHeader(pdf *gofpdf.Fpdf, lang, cariKod, cariIsim, start, end string) float64 {
if logoPath, err := resolvePdfImagePath("Baggi-Tekstil-A.s-Logolu.jpeg"); err == nil {
pdf.ImageOptions(logoPath, hMarginL, 2, hLogoW, 0, false, gofpdf.ImageOptions{}, 0, "")
}
@@ -237,13 +257,13 @@ func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) floa
pdf.SetFont(hFontFamilyBold, "", 12)
pdf.SetXY(hMarginL+hLogoW+8, hMarginT+10)
pdf.CellFormat(120, 6, "Cari Hesap Raporu", "", 0, "L", false, 0, "")
pdf.CellFormat(120, 6, i18n.T(lang, "pdf.report_title"), "", 0, "L", false, 0, "")
// Bugünün tarihi (sağ üst)
today := time.Now().Format("02.01.2006")
pdf.SetFont(hFontFamilyReg, "", 9)
pdf.SetXY(hPageWidth-hMarginR-40, hMarginT+3)
pdf.CellFormat(40, 6, "Tarih: "+today, "", 0, "R", false, 0, "")
pdf.CellFormat(40, 6, i18n.T(lang, "pdf.date")+": "+today, "", 0, "R", false, 0, "")
// Cari & Tarih kutuları (daha yukarı taşındı)
boxY := hMarginT + hLogoW - 6
@@ -251,11 +271,11 @@ func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) floa
pdf.Rect(hMarginL, boxY, 140, 11, "")
pdf.SetXY(hMarginL+2, boxY+3)
pdf.CellFormat(136, 5, fmt.Sprintf("Cari: %s — %s", cariKod, cariIsim), "", 0, "L", false, 0, "")
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s — %s", i18n.T(lang, "pdf.customer"), cariKod, cariIsim), "", 0, "L", false, 0, "")
pdf.Rect(hPageWidth-hMarginR-140, boxY, 140, 11, "")
pdf.SetXY(hPageWidth-hMarginR-138, boxY+3)
pdf.CellFormat(136, 5, fmt.Sprintf("Tarih Aralığı: %s → %s", start, end), "", 0, "R", false, 0, "")
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s → %s", i18n.T(lang, "pdf.date_range"), start, end), "", 0, "R", false, 0, "")
// Alt çizgi
y := boxY + 13
@@ -268,7 +288,7 @@ func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) floa
/* ============================ GROUP BAR ============================ */
func drawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
func drawGroupBar(pdf *gofpdf.Fpdf, lang, currency string, sonBakiye float64) {
// Kutu alanı (tam genişlik)
x := marginL
y := pdf.GetY()
@@ -285,9 +305,9 @@ func drawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
pdf.SetTextColor(colorPrimary[0], colorPrimary[1], colorPrimary[2])
pdf.SetXY(x+cellPadX+1.0, y+(h-5.0)/2)
pdf.CellFormat(w*0.6, 5.0, fmt.Sprintf("%s", currency), "", 0, "L", false, 0, "")
pdf.CellFormat(w*0.6, 5.0, fmt.Sprintf("%s: %s", i18n.T(lang, "pdf.currency_prefix"), currency), "", 0, "L", false, 0, "")
txt := "Son Bakiye = " + formatCurrencyTR(sonBakiye)
txt := i18n.T(lang, "pdf.ending_balance") + " = " + formatCurrencyTR(sonBakiye)
pdf.SetXY(x+w*0.4, y+(h-5.0)/2)
pdf.CellFormat(w*0.6-2.0, 5.0, txt, "", 0, "R", false, 0, "")
@@ -430,6 +450,10 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
accountCode := r.URL.Query().Get("accountcode")
startDate := r.URL.Query().Get("startdate")
endDate := r.URL.Query().Get("enddate")
langCode := i18n.ResolveLangCode(
r.URL.Query().Get("langcode"),
r.Header.Get("Accept-Language"),
)
// parislemler sanitize
rawParis := r.URL.Query()["parislemler"]
@@ -445,7 +469,7 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
accountCode, startDate, endDate, parislemler)
// 1) Header verileri
headers, belgeNos, err := queries.GetStatementsPDF(r.Context(), accountCode, startDate, endDate, parislemler)
headers, belgeNos, err := queries.GetStatementsPDF(r.Context(), accountCode, startDate, endDate, langCode, parislemler)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
@@ -520,12 +544,12 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
pdf.AddPage()
// drawPageHeader tablo başlangıç yüksekliğini döndürüyor
tableTop := drawPageHeader(pdf, accountCode, cariIsim, startDate, endDate)
tableTop := drawPageHeader(pdf, langCode, accountCode, cariIsim, startDate, endDate)
// Sayfa numarası
pdf.SetFont(fontFamilyReg, "", 6)
pdf.SetXY(pageWidth-marginR-28, pageHeight-marginB+3)
pdf.CellFormat(28, 5, fmt.Sprintf("Sayfa %d", pageNum), "", 0, "R", false, 0, "")
pdf.CellFormat(28, 5, fmt.Sprintf("%s %d", i18n.T(langCode, "pdf.page"), pageNum), "", 0, "R", false, 0, "")
// Tablo Y konumunu ayarla
pdf.SetY(tableTop)
@@ -540,8 +564,8 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
if needNewPage(pdf, groupBarH+headerRowH) {
newPage()
}
drawGroupBar(pdf, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols, mainWn)
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
for _, h := range g.rows {
row := []string{
@@ -557,8 +581,8 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
if needNewPage(pdf, rh+headerRowH) {
newPage()
drawGroupBar(pdf, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols, mainWn)
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
}
drawMainDataRow(pdf, row, mainWn, rh)
@@ -567,10 +591,10 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
if len(details) > 0 {
if needNewPage(pdf, subHeaderRowH) {
newPage()
drawGroupBar(pdf, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols, mainWn)
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
}
drawDetailHeaderRow(pdf, dCols, dWn)
drawDetailHeaderRow(pdf, detailCols(langCode), dWn)
for i, d := range details {
drow := []string{
@@ -591,9 +615,9 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
if needNewPage(pdf, rh2) {
newPage()
drawGroupBar(pdf, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols, mainWn)
drawDetailHeaderRow(pdf, dCols, dWn)
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
drawDetailHeaderRow(pdf, detailCols(langCode), dWn)
}
// zebra: çift indekslerde açık zemin
fill := (i%2 == 0)

View File

@@ -0,0 +1,41 @@
package routes
import (
"database/sql"
"log"
"strings"
)
// EnsureTranslationPerfIndexes creates helpful indexes for translation listing/search.
// It is safe to run on each startup; failures are logged and do not stop the service.
func EnsureTranslationPerfIndexes(db *sql.DB) {
if db == nil {
return
}
statements := []string{
`CREATE EXTENSION IF NOT EXISTS pg_trgm`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_t_key_lang ON mk_translator (t_key, lang_code)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_status_lang_updated ON mk_translator (status, lang_code, updated_at DESC)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_manual_status ON mk_translator (is_manual, status)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_source_type_expr ON mk_translator ((COALESCE(NULLIF(provider_meta->>'source_type',''),'dummy')))`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_source_text_trgm ON mk_translator USING gin (source_text_tr gin_trgm_ops)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_translated_text_trgm ON mk_translator USING gin (translated_text gin_trgm_ops)`,
}
for _, stmt := range statements {
if _, err := db.Exec(stmt); err != nil {
log.Printf("[TranslationPerf] index_setup_warn sql=%q err=%v", summarizeSQL(stmt), err)
continue
}
log.Printf("[TranslationPerf] index_ready sql=%q", summarizeSQL(stmt))
}
}
func summarizeSQL(sqlText string) string {
s := strings.TrimSpace(sqlText)
if len(s) <= 100 {
return s
}
return s[:100] + "..."
}

1911
svc/routes/translations.go Normal file
View File

@@ -0,0 +1,1911 @@
package routes
import (
"bssapp-backend/models"
"bytes"
"context"
"database/sql"
"encoding/json"
"errors"
"fmt"
"io"
"io/fs"
"log"
"net/http"
"net/url"
"os"
"path/filepath"
"regexp"
"sort"
"strconv"
"strings"
"sync/atomic"
"time"
"github.com/gorilla/mux"
"github.com/lib/pq"
)
var translationLangSet = map[string]struct{}{
"tr": {},
"en": {},
"de": {},
"it": {},
"es": {},
"ru": {},
"ar": {},
}
var translationStatusSet = map[string]struct{}{
"pending": {},
"approved": {},
"rejected": {},
}
var translationSourceTypeSet = map[string]struct{}{
"dummy": {},
"postgre": {},
"mssql": {},
}
var (
reHasLetter = regexp.MustCompile(`[A-Za-zÇĞİÖŞÜçğıöşü]`)
reBadText = regexp.MustCompile(`^(GET|POST|PUT|DELETE|OPTIONS|true|false|null|undefined)$`)
reKeyUnsafe = regexp.MustCompile(`[^a-z0-9_]+`)
reVueTemplate = regexp.MustCompile(`(?is)<template[^>]*>(.*?)</template>`)
reVueScript = regexp.MustCompile(`(?is)<script[^>]*>(.*?)</script>`)
reTemplateAttr = regexp.MustCompile(`\b(?:label|title|placeholder|aria-label|hint)\s*=\s*['"]([^'"]{2,180})['"]`)
reTemplateText = regexp.MustCompile(`>([^<]{3,180})<`)
reScriptLabelProp = regexp.MustCompile(`\blabel\s*:\s*['"]([^'"]{2,180})['"]`)
reScriptUIProp = regexp.MustCompile(`\b(?:label|message|title|placeholder|hint)\s*:\s*['"]([^'"]{2,180})['"]`)
reTemplateDynamic = regexp.MustCompile(`[{][{]|[}][}]`)
reCodeLikeText = regexp.MustCompile(`(?i)(\bconst\b|\blet\b|\bvar\b|\breturn\b|\bfunction\b|=>|\|\||&&|\?\?|//|/\*|\*/|\.trim\(|\.replace\(|\.map\(|\.filter\()`)
)
var translationNoiseTokens = map[string]struct{}{
"flat": {},
"dense": {},
"filled": {},
"outlined": {},
"borderless": {},
"clearable": {},
"loading": {},
"disable": {},
"readonly": {},
"hide-bottom": {},
"stack-label": {},
"emit-value": {},
"map-options": {},
"use-input": {},
"multiple": {},
"options": {},
"rows": {},
"cols": {},
"class": {},
"style": {},
}
var translationDummyAllowedVueDirs = []string{
"pages/",
"components/",
"layouts/",
}
var translationDummyAllowedStoreDirs = []string{
"stores/",
}
type TranslationUpdatePayload struct {
SourceTextTR *string `json:"source_text_tr"`
TranslatedText *string `json:"translated_text"`
SourceType *string `json:"source_type"`
IsManual *bool `json:"is_manual"`
Status *string `json:"status"`
}
type UpsertMissingPayload struct {
Items []UpsertMissingItem `json:"items"`
Languages []string `json:"languages"`
}
type UpsertMissingItem struct {
TKey string `json:"t_key"`
SourceTextTR string `json:"source_text_tr"`
}
type SyncSourcesPayload struct {
AutoTranslate bool `json:"auto_translate"`
Languages []string `json:"languages"`
Limit int `json:"limit"`
OnlyNew *bool `json:"only_new"`
}
type BulkApprovePayload struct {
IDs []int64 `json:"ids"`
}
type BulkUpdatePayload struct {
Items []BulkUpdateItem `json:"items"`
}
type TranslateSelectedPayload struct {
TKeys []string `json:"t_keys"`
Languages []string `json:"languages"`
Limit int `json:"limit"`
}
type BulkUpdateItem struct {
ID int64 `json:"id"`
SourceTextTR *string `json:"source_text_tr"`
TranslatedText *string `json:"translated_text"`
SourceType *string `json:"source_type"`
IsManual *bool `json:"is_manual"`
Status *string `json:"status"`
}
type TranslationSyncOptions struct {
AutoTranslate bool
Languages []string
Limit int
OnlyNew bool
TraceID string
}
type TranslationSyncResult struct {
SeedCount int `json:"seed_count"`
AffectedCount int `json:"affected_count"`
AutoTranslated int `json:"auto_translated"`
TargetLangs []string `json:"target_languages"`
TraceID string `json:"trace_id"`
DurationMS int64 `json:"duration_ms"`
}
type sourceSeed struct {
TKey string
SourceText string
SourceType string
}
func GetTranslationRowsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
q := strings.TrimSpace(r.URL.Query().Get("q"))
lang := normalizeTranslationLang(r.URL.Query().Get("lang"))
status := normalizeTranslationStatus(r.URL.Query().Get("status"))
sourceType := normalizeTranslationSourceType(r.URL.Query().Get("source_type"))
manualFilter := strings.TrimSpace(strings.ToLower(r.URL.Query().Get("manual")))
missingOnly := strings.TrimSpace(strings.ToLower(r.URL.Query().Get("missing"))) == "true"
limit := 0
if raw := strings.TrimSpace(r.URL.Query().Get("limit")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 && parsed <= 50000 {
limit = parsed
}
}
offset := 0
if raw := strings.TrimSpace(r.URL.Query().Get("offset")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed >= 0 && parsed <= 1000000 {
offset = parsed
}
}
clauses := []string{"1=1"}
args := make([]any, 0, 8)
argIndex := 1
if q != "" {
clauses = append(clauses, fmt.Sprintf("(source_text_tr ILIKE $%d OR translated_text ILIKE $%d)", argIndex, argIndex))
args = append(args, "%"+q+"%")
argIndex++
}
if lang != "" {
clauses = append(clauses, fmt.Sprintf("lang_code = $%d", argIndex))
args = append(args, lang)
argIndex++
}
if status != "" {
clauses = append(clauses, fmt.Sprintf("status = $%d", argIndex))
args = append(args, status)
argIndex++
}
if sourceType != "" {
clauses = append(clauses, fmt.Sprintf("COALESCE(NULLIF(provider_meta->>'source_type',''),'dummy') = $%d", argIndex))
args = append(args, sourceType)
argIndex++
}
switch manualFilter {
case "true":
clauses = append(clauses, "is_manual = true")
case "false":
clauses = append(clauses, "is_manual = false")
}
if missingOnly {
clauses = append(clauses, "(translated_text IS NULL OR btrim(translated_text) = '')")
}
query := fmt.Sprintf(`
SELECT
id,
t_key,
lang_code,
COALESCE(NULLIF(provider_meta->>'source_type',''), 'dummy') AS source_type,
source_text_tr,
COALESCE(translated_text, '') AS translated_text,
is_manual,
status,
COALESCE(provider, '') AS provider,
updated_at
FROM mk_translator
WHERE %s
ORDER BY t_key, lang_code
`, strings.Join(clauses, " AND "))
if limit > 0 {
query += fmt.Sprintf("LIMIT $%d", argIndex)
args = append(args, limit)
argIndex++
}
if offset > 0 {
query += fmt.Sprintf(" OFFSET $%d", argIndex)
args = append(args, offset)
}
rows, err := db.Query(query, args...)
if err != nil {
http.Error(w, "translation query error", http.StatusInternalServerError)
return
}
defer rows.Close()
list := make([]models.TranslatorRow, 0, 1024)
for rows.Next() {
var row models.TranslatorRow
if err := rows.Scan(
&row.ID,
&row.TKey,
&row.LangCode,
&row.SourceType,
&row.SourceTextTR,
&row.TranslatedText,
&row.IsManual,
&row.Status,
&row.Provider,
&row.UpdatedAt,
); err != nil {
http.Error(w, "translation scan error", http.StatusInternalServerError)
return
}
list = append(list, row)
}
if err := rows.Err(); err != nil {
http.Error(w, "translation rows error", http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(map[string]any{
"rows": list,
"count": len(list),
})
}
}
func UpdateTranslationRowHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
id, err := strconv.ParseInt(strings.TrimSpace(mux.Vars(r)["id"]), 10, 64)
if err != nil || id <= 0 {
http.Error(w, "invalid row id", http.StatusBadRequest)
return
}
var payload TranslationUpdatePayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
if payload.Status != nil {
normalized := normalizeTranslationStatus(*payload.Status)
if normalized == "" {
http.Error(w, "invalid status", http.StatusBadRequest)
return
}
payload.Status = &normalized
}
if payload.SourceType != nil {
normalized := normalizeTranslationSourceType(*payload.SourceType)
if normalized == "" {
http.Error(w, "invalid source_type", http.StatusBadRequest)
return
}
payload.SourceType = &normalized
}
updateQuery := `
UPDATE mk_translator
SET
source_text_tr = COALESCE($2, source_text_tr),
translated_text = COALESCE($3, translated_text),
is_manual = COALESCE($4, is_manual),
status = COALESCE($5, status),
provider_meta = CASE
WHEN $6::text IS NULL THEN provider_meta
ELSE jsonb_set(COALESCE(provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($6::text), true)
END,
updated_at = NOW()
WHERE id = $1
RETURNING
id,
t_key,
lang_code,
COALESCE(NULLIF(provider_meta->>'source_type',''), 'dummy') AS source_type,
source_text_tr,
COALESCE(translated_text, '') AS translated_text,
is_manual,
status,
COALESCE(provider, '') AS provider,
updated_at
`
var row models.TranslatorRow
err = db.QueryRow(
updateQuery,
id,
nullableString(payload.SourceTextTR),
nullableString(payload.TranslatedText),
payload.IsManual,
payload.Status,
nullableString(payload.SourceType),
).Scan(
&row.ID,
&row.TKey,
&row.LangCode,
&row.SourceType,
&row.SourceTextTR,
&row.TranslatedText,
&row.IsManual,
&row.Status,
&row.Provider,
&row.UpdatedAt,
)
if err == sql.ErrNoRows {
http.Error(w, "translation row not found", http.StatusNotFound)
return
}
if err != nil {
http.Error(w, "translation update error", http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(row)
}
}
func UpsertMissingTranslationsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload UpsertMissingPayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
items := normalizeMissingItems(payload.Items)
if len(items) == 0 {
http.Error(w, "items required", http.StatusBadRequest)
return
}
languages := normalizeTargetLanguages(payload.Languages)
affected, err := upsertMissingRows(db, items, languages, "dummy")
if err != nil {
http.Error(w, "upsert missing error", http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"items": len(items),
"target_langs": languages,
"affected_count": affected,
})
}
}
func SyncTranslationSourcesHandler(pgDB *sql.DB, mssqlDB *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload SyncSourcesPayload
_ = json.NewDecoder(r.Body).Decode(&payload)
traceID := requestTraceID(r)
w.Header().Set("X-Trace-ID", traceID)
start := time.Now()
onlyNew := payload.OnlyNew == nil || *payload.OnlyNew
log.Printf(
"[TranslationSync] trace=%s stage=request auto_translate=%t only_new=%t limit=%d langs=%v",
traceID,
payload.AutoTranslate,
onlyNew,
payload.Limit,
payload.Languages,
)
result, err := PerformTranslationSync(pgDB, mssqlDB, TranslationSyncOptions{
AutoTranslate: payload.AutoTranslate,
Languages: payload.Languages,
Limit: payload.Limit,
OnlyNew: onlyNew,
TraceID: traceID,
})
if err != nil {
log.Printf(
"[TranslationSync] trace=%s stage=error duration_ms=%d err=%v",
traceID,
time.Since(start).Milliseconds(),
err,
)
http.Error(w, "translation source sync error", http.StatusInternalServerError)
return
}
log.Printf(
"[TranslationSync] trace=%s stage=response duration_ms=%d seeds=%d affected=%d auto_translated=%d",
traceID,
time.Since(start).Milliseconds(),
result.SeedCount,
result.AffectedCount,
result.AutoTranslated,
)
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"trace_id": traceID,
"result": result,
"seed_count": result.SeedCount,
"affected_count": result.AffectedCount,
"auto_translated": result.AutoTranslated,
"target_languages": result.TargetLangs,
})
}
}
func TranslateSelectedTranslationsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload TranslateSelectedPayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
keys := normalizeStringList(payload.TKeys, 5000)
if len(keys) == 0 {
http.Error(w, "t_keys required", http.StatusBadRequest)
return
}
targetLangs := normalizeTargetLanguages(payload.Languages)
limit := payload.Limit
if limit <= 0 {
limit = len(keys) * len(targetLangs)
}
if limit <= 0 {
limit = 1000
}
if limit > 50000 {
limit = 50000
}
traceID := requestTraceID(r)
w.Header().Set("X-Trace-ID", traceID)
start := time.Now()
log.Printf(
"[TranslationSelected] trace=%s stage=request keys=%d limit=%d langs=%v",
traceID,
len(keys),
limit,
targetLangs,
)
translatedCount, err := autoTranslatePendingRowsForKeys(db, targetLangs, limit, keys, traceID)
if err != nil {
log.Printf(
"[TranslationSelected] trace=%s stage=error duration_ms=%d err=%v",
traceID,
time.Since(start).Milliseconds(),
err,
)
http.Error(w, "translate selected error", http.StatusInternalServerError)
return
}
log.Printf(
"[TranslationSelected] trace=%s stage=done duration_ms=%d translated=%d",
traceID,
time.Since(start).Milliseconds(),
translatedCount,
)
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"trace_id": traceID,
"translated_count": translatedCount,
"key_count": len(keys),
"target_languages": targetLangs,
"duration_ms": time.Since(start).Milliseconds(),
})
}
}
func BulkApproveTranslationsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload BulkApprovePayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
ids := normalizeIDListInt64(payload.IDs)
if len(ids) == 0 {
http.Error(w, "ids required", http.StatusBadRequest)
return
}
res, err := db.Exec(`
UPDATE mk_translator
SET
status = 'approved',
is_manual = true,
updated_at = NOW(),
provider_meta = jsonb_set(COALESCE(provider_meta, '{}'::jsonb), '{is_new}', 'false'::jsonb, true)
WHERE id = ANY($1)
`, pq.Array(ids))
if err != nil {
http.Error(w, "bulk approve error", http.StatusInternalServerError)
return
}
affected, _ := res.RowsAffected()
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"affected_count": affected,
})
}
}
func BulkUpdateTranslationsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload BulkUpdatePayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
if len(payload.Items) == 0 {
http.Error(w, "items required", http.StatusBadRequest)
return
}
tx, err := db.Begin()
if err != nil {
http.Error(w, "transaction start error", http.StatusInternalServerError)
return
}
defer tx.Rollback()
affected := 0
for _, it := range payload.Items {
if it.ID <= 0 {
continue
}
status := normalizeOptionalStatus(it.Status)
sourceType := normalizeOptionalSourceType(it.SourceType)
res, err := tx.Exec(`
UPDATE mk_translator
SET
source_text_tr = COALESCE($2, source_text_tr),
translated_text = COALESCE($3, translated_text),
is_manual = COALESCE($4, is_manual),
status = COALESCE($5, status),
provider_meta = CASE
WHEN $6::text IS NULL THEN provider_meta
ELSE jsonb_set(COALESCE(provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($6::text), true)
END,
updated_at = NOW()
WHERE id = $1
`, it.ID, nullableString(it.SourceTextTR), nullableString(it.TranslatedText), it.IsManual, status, sourceType)
if err != nil {
http.Error(w, "bulk update error", http.StatusInternalServerError)
return
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
}
if err := tx.Commit(); err != nil {
http.Error(w, "transaction commit error", http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"affected_count": affected,
})
}
}
func PerformTranslationSync(pgDB *sql.DB, mssqlDB *sql.DB, options TranslationSyncOptions) (TranslationSyncResult, error) {
traceID := strings.TrimSpace(options.TraceID)
if traceID == "" {
traceID = "trsync-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
start := time.Now()
limit := options.Limit
if limit <= 0 || limit > 100000 {
limit = 20000
}
targetLangs := normalizeTargetLanguages(options.Languages)
log.Printf(
"[TranslationSync] trace=%s stage=start auto_translate=%t only_new=%t limit=%d langs=%v",
traceID,
options.AutoTranslate,
options.OnlyNew,
limit,
targetLangs,
)
collectStart := time.Now()
seeds := collectSourceSeeds(pgDB, mssqlDB, limit)
seeds, reusedByText := reuseExistingSeedKeys(pgDB, seeds)
log.Printf(
"[TranslationSync] trace=%s stage=collect done_ms=%d total=%d reused_by_text=%d sources=%s",
traceID,
time.Since(collectStart).Milliseconds(),
len(seeds),
reusedByText,
formatSourceCounts(countSeedsBySource(seeds)),
)
if options.OnlyNew {
before := len(seeds)
filterStart := time.Now()
seeds = filterNewSeeds(pgDB, seeds)
log.Printf(
"[TranslationSync] trace=%s stage=filter_only_new done_ms=%d before=%d after=%d skipped=%d",
traceID,
time.Since(filterStart).Milliseconds(),
before,
len(seeds),
before-len(seeds),
)
}
if len(seeds) == 0 {
return TranslationSyncResult{
TargetLangs: targetLangs,
TraceID: traceID,
DurationMS: time.Since(start).Milliseconds(),
}, nil
}
upsertStart := time.Now()
affected, err := upsertSourceSeeds(pgDB, seeds, targetLangs)
if err != nil {
return TranslationSyncResult{}, err
}
log.Printf(
"[TranslationSync] trace=%s stage=upsert done_ms=%d affected=%d",
traceID,
time.Since(upsertStart).Milliseconds(),
affected,
)
autoTranslated := 0
if options.AutoTranslate {
autoStart := time.Now()
var autoErr error
autoTranslated, autoErr = autoTranslatePendingRowsForKeys(pgDB, targetLangs, limit, uniqueSeedKeys(seeds), traceID)
if autoErr != nil {
log.Printf(
"[TranslationSync] trace=%s stage=auto_translate done_ms=%d translated=%d err=%v",
traceID,
time.Since(autoStart).Milliseconds(),
autoTranslated,
autoErr,
)
} else {
log.Printf(
"[TranslationSync] trace=%s stage=auto_translate done_ms=%d translated=%d",
traceID,
time.Since(autoStart).Milliseconds(),
autoTranslated,
)
}
}
result := TranslationSyncResult{
SeedCount: len(seeds),
AffectedCount: affected,
AutoTranslated: autoTranslated,
TargetLangs: targetLangs,
TraceID: traceID,
DurationMS: time.Since(start).Milliseconds(),
}
log.Printf(
"[TranslationSync] trace=%s stage=done duration_ms=%d seeds=%d affected=%d auto_translated=%d",
traceID,
result.DurationMS,
result.SeedCount,
result.AffectedCount,
result.AutoTranslated,
)
return result, nil
}
func upsertMissingRows(db *sql.DB, items []UpsertMissingItem, languages []string, forcedSourceType string) (int, error) {
tx, err := db.Begin()
if err != nil {
return 0, err
}
defer tx.Rollback()
affected := 0
for _, it := range items {
sourceType := forcedSourceType
if sourceType == "" {
sourceType = "dummy"
}
res, err := tx.Exec(`
INSERT INTO mk_translator
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
VALUES
($1, 'tr', $2, $2, false, 'approved', 'seed', jsonb_build_object('source_type', $3::text))
ON CONFLICT (t_key, lang_code) DO UPDATE
SET
source_text_tr = EXCLUDED.source_text_tr,
provider_meta = jsonb_set(COALESCE(mk_translator.provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($3::text), true),
updated_at = NOW()
`, it.TKey, it.SourceTextTR, sourceType)
if err != nil {
return 0, err
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
for _, lang := range languages {
res, err := tx.Exec(`
INSERT INTO mk_translator
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
VALUES
($1, $2, $3, NULL, false, 'pending', NULL, jsonb_build_object('source_type', $4::text))
ON CONFLICT (t_key, lang_code) DO UPDATE
SET
source_text_tr = EXCLUDED.source_text_tr,
provider_meta = jsonb_set(COALESCE(mk_translator.provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($4::text), true),
updated_at = NOW()
`, it.TKey, lang, it.SourceTextTR, sourceType)
if err != nil {
return 0, err
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
}
}
if err := tx.Commit(); err != nil {
return 0, err
}
return affected, nil
}
func upsertSourceSeeds(db *sql.DB, seeds []sourceSeed, languages []string) (int, error) {
tx, err := db.Begin()
if err != nil {
return 0, err
}
defer tx.Rollback()
affected := 0
for _, seed := range seeds {
if seed.TKey == "" || seed.SourceText == "" {
continue
}
sourceType := normalizeTranslationSourceType(seed.SourceType)
if sourceType == "" {
sourceType = "dummy"
}
res, err := tx.Exec(`
INSERT INTO mk_translator
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
VALUES
($1, 'tr', $2, $2, false, 'approved', 'seed', jsonb_build_object('source_type', $3::text, 'is_new', false))
ON CONFLICT (t_key, lang_code) DO UPDATE
SET
source_text_tr = EXCLUDED.source_text_tr,
provider_meta = jsonb_set(
COALESCE(mk_translator.provider_meta, '{}'::jsonb),
'{source_type}',
to_jsonb(COALESCE(NULLIF(mk_translator.provider_meta->>'source_type', ''), $3::text)),
true
),
updated_at = NOW()
`, seed.TKey, seed.SourceText, sourceType)
if err != nil {
return 0, err
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
for _, lang := range languages {
res, err := tx.Exec(`
INSERT INTO mk_translator
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
VALUES
($1, $2, $3, NULL, false, 'pending', NULL, jsonb_build_object('source_type', $4::text, 'is_new', true))
ON CONFLICT (t_key, lang_code) DO UPDATE
SET
source_text_tr = EXCLUDED.source_text_tr,
provider_meta = jsonb_set(
COALESCE(mk_translator.provider_meta, '{}'::jsonb),
'{source_type}',
to_jsonb(COALESCE(NULLIF(mk_translator.provider_meta->>'source_type', ''), $4::text)),
true
),
updated_at = NOW()
`, seed.TKey, lang, seed.SourceText, sourceType)
if err != nil {
return 0, err
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
}
}
if err := tx.Commit(); err != nil {
return 0, err
}
return affected, nil
}
func collectSourceSeeds(pgDB *sql.DB, mssqlDB *sql.DB, limit int) []sourceSeed {
seen := map[string]struct{}{}
out := make([]sourceSeed, 0, limit)
appendSeed := func(seed sourceSeed) {
if seed.TKey == "" || seed.SourceText == "" || seed.SourceType == "" {
return
}
key := normalizeSeedTextKey(seed.SourceText)
if _, ok := seen[key]; ok {
return
}
seen[key] = struct{}{}
out = append(out, seed)
}
for _, row := range collectPostgreSeeds(pgDB, limit) {
appendSeed(row)
if len(out) >= limit {
return out
}
}
for _, row := range collectMSSQLSeeds(mssqlDB, limit-len(out)) {
appendSeed(row)
if len(out) >= limit {
return out
}
}
for _, row := range collectDummySeeds(limit - len(out)) {
appendSeed(row)
if len(out) >= limit {
return out
}
}
return out
}
func collectPostgreSeeds(pgDB *sql.DB, limit int) []sourceSeed {
if pgDB == nil || limit <= 0 {
return nil
}
rows, err := pgDB.Query(`
SELECT table_name, column_name
FROM information_schema.columns
WHERE table_schema = 'public'
ORDER BY table_name, ordinal_position
LIMIT $1
`, limit)
if err != nil {
return nil
}
defer rows.Close()
out := make([]sourceSeed, 0, limit)
for rows.Next() && len(out) < limit {
var tableName, columnName string
if err := rows.Scan(&tableName, &columnName); err != nil {
continue
}
text := normalizeDisplayText(columnName)
key := makeTextBasedSeedKey(text)
out = append(out, sourceSeed{
TKey: key,
SourceText: text,
SourceType: "postgre",
})
}
return out
}
func collectMSSQLSeeds(mssqlDB *sql.DB, limit int) []sourceSeed {
if mssqlDB == nil || limit <= 0 {
return nil
}
maxPerRun := parsePositiveIntEnv("TRANSLATION_MSSQL_SEED_LIMIT", 2500)
if limit > maxPerRun {
limit = maxPerRun
}
timeoutSec := parsePositiveIntEnv("TRANSLATION_MSSQL_SCHEMA_TIMEOUT_SEC", 20)
query := fmt.Sprintf(`
SELECT TOP (%d) TABLE_NAME, COLUMN_NAME
FROM INFORMATION_SCHEMA.COLUMNS
ORDER BY TABLE_NAME, ORDINAL_POSITION
`, limit)
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(timeoutSec)*time.Second)
defer cancel()
rows, err := mssqlDB.QueryContext(ctx, query)
if err != nil {
log.Printf("[TranslationSync] stage=collect_mssql skipped err=%v", err)
return nil
}
defer rows.Close()
out := make([]sourceSeed, 0, limit)
for rows.Next() && len(out) < limit {
var tableName, columnName string
if err := rows.Scan(&tableName, &columnName); err != nil {
continue
}
text := normalizeDisplayText(columnName)
key := makeTextBasedSeedKey(text)
out = append(out, sourceSeed{
TKey: key,
SourceText: text,
SourceType: "mssql",
})
}
return out
}
func collectDummySeeds(limit int) []sourceSeed {
if limit <= 0 {
return nil
}
root := detectProjectRoot()
if root == "" {
return nil
}
uiRoot := filepath.Join(root, "ui", "src")
if _, err := os.Stat(uiRoot); err != nil {
return nil
}
out := make([]sourceSeed, 0, limit)
seen := make(map[string]struct{}, limit)
_ = filepath.WalkDir(uiRoot, func(path string, d fs.DirEntry, err error) error {
if err != nil || d.IsDir() {
return nil
}
ext := strings.ToLower(filepath.Ext(path))
if ext != ".vue" && ext != ".js" && ext != ".ts" {
return nil
}
if !shouldCollectDummySeedFile(uiRoot, path, ext) {
return nil
}
b, err := os.ReadFile(path)
if err != nil {
return nil
}
texts := extractVisibleUIText(string(b), ext)
for _, text := range texts {
if !isCandidateText(text) {
continue
}
if _, ok := seen[text]; ok {
continue
}
seen[text] = struct{}{}
key := makeTextBasedSeedKey(text)
out = append(out, sourceSeed{
TKey: key,
SourceText: text,
SourceType: "dummy",
})
if len(out) >= limit {
return errors.New("limit reached")
}
}
return nil
})
return out
}
func shouldCollectDummySeedFile(uiRoot, fullPath, ext string) bool {
rel, err := filepath.Rel(uiRoot, fullPath)
if err != nil {
return false
}
rel = strings.ToLower(filepath.ToSlash(rel))
if strings.Contains(rel, "/__tests__/") || strings.Contains(rel, "/tests/") || strings.Contains(rel, "/mock/") || strings.Contains(rel, "/mocks/") {
return false
}
if ext == ".vue" {
for _, prefix := range translationDummyAllowedVueDirs {
if strings.HasPrefix(rel, prefix) {
return true
}
}
return false
}
if ext == ".js" || ext == ".ts" {
for _, prefix := range translationDummyAllowedStoreDirs {
if strings.HasPrefix(rel, prefix) {
return true
}
}
return false
}
return false
}
func extractVisibleUIText(content string, ext string) []string {
out := make([]string, 0, 32)
seen := map[string]struct{}{}
isLikelyAttrNoise := func(text string) bool {
tokens := strings.Fields(strings.ToLower(text))
if len(tokens) < 2 || len(tokens) > 16 {
return false
}
matched := 0
for _, t := range tokens {
if _, ok := translationNoiseTokens[t]; ok {
matched++
continue
}
if strings.HasPrefix(t, ":") || strings.HasPrefix(t, "@") || strings.HasPrefix(t, "v-") || strings.HasPrefix(t, "#") {
matched++
continue
}
}
return matched == len(tokens)
}
appendText := func(raw string) {
if strings.ContainsAny(raw, "\r\n\t") {
return
}
text := strings.TrimSpace(strings.Join(strings.Fields(raw), " "))
if text == "" {
return
}
if strings.ContainsAny(text, "<>{}[]`") {
return
}
if strings.Contains(text, "=") || strings.Contains(text, "#") {
return
}
if reTemplateDynamic.MatchString(text) {
return
}
if isLikelyAttrNoise(text) {
return
}
if _, ok := seen[text]; ok {
return
}
seen[text] = struct{}{}
out = append(out, text)
}
switch ext {
case ".vue":
template := content
if m := reVueTemplate.FindStringSubmatch(content); len(m) > 1 {
template = m[1]
}
for _, m := range reTemplateAttr.FindAllStringSubmatch(template, -1) {
if len(m) > 1 {
appendText(m[1])
}
}
for _, m := range reTemplateText.FindAllStringSubmatch(template, -1) {
if len(m) > 1 {
appendText(m[1])
}
}
script := content
if m := reVueScript.FindStringSubmatch(content); len(m) > 1 {
script = m[1]
}
for _, m := range reScriptLabelProp.FindAllStringSubmatch(script, -1) {
if len(m) > 1 {
appendText(m[1])
}
}
for _, m := range reScriptUIProp.FindAllStringSubmatch(script, -1) {
if len(m) > 1 {
appendText(m[1])
}
}
case ".js", ".ts":
for _, m := range reScriptUIProp.FindAllStringSubmatch(content, -1) {
if len(m) > 1 {
appendText(m[1])
}
}
}
return out
}
func autoTranslatePendingRows(db *sql.DB, langs []string, limit int) (int, error) {
return autoTranslatePendingRowsForKeys(db, langs, limit, nil, "")
}
func autoTranslatePendingRowsForKeys(db *sql.DB, langs []string, limit int, keys []string, traceID string) (int, error) {
traceID = strings.TrimSpace(traceID)
if traceID == "" {
traceID = "trauto-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
if len(keys) == 0 {
log.Printf("[TranslationAuto] trace=%s stage=skip reason=no_keys", traceID)
return 0, nil
}
start := time.Now()
rows, err := db.Query(`
SELECT id, lang_code, source_text_tr
FROM mk_translator
WHERE lang_code = ANY($1)
AND t_key = ANY($3)
AND (translated_text IS NULL OR btrim(translated_text) = '')
AND is_manual = false
ORDER BY updated_at ASC
LIMIT $2
`, pqArray(langs), limit, pq.Array(keys))
if err != nil {
return 0, err
}
defer rows.Close()
type pending struct {
ID int64
Lang string
Text string
}
list := make([]pending, 0, limit)
pendingByLang := map[string]int{}
sourceChars := 0
for rows.Next() {
var p pending
if err := rows.Scan(&p.ID, &p.Lang, &p.Text); err != nil {
continue
}
if strings.TrimSpace(p.Text) == "" {
continue
}
p.Lang = normalizeTranslationLang(p.Lang)
if p.Lang == "" {
continue
}
list = append(list, p)
pendingByLang[p.Lang]++
sourceChars += len([]rune(strings.TrimSpace(p.Text)))
}
if err := rows.Err(); err != nil {
return 0, err
}
log.Printf(
"[TranslationAuto] trace=%s stage=prepare candidates=%d limit=%d keys=%d langs=%v source_chars=%d pending_by_lang=%s",
traceID,
len(list),
limit,
len(keys),
langs,
sourceChars,
formatLangCounts(pendingByLang),
)
if len(list) == 0 {
log.Printf(
"[TranslationAuto] trace=%s stage=done duration_ms=%d translated=0 failed_translate=0 failed_update=0 rps=0.00",
traceID,
time.Since(start).Milliseconds(),
)
return 0, nil
}
done := 0
failedTranslate := 0
failedUpdate := 0
doneByLang := map[string]int{}
var processedCount int64
var translatedCount int64
var failedTranslateCount int64
var failedUpdateCount int64
progressEvery := parsePositiveIntEnv("TRANSLATION_AUTO_PROGRESS_EVERY", 100)
if progressEvery <= 0 {
progressEvery = 100
}
progressSec := parsePositiveIntEnv("TRANSLATION_AUTO_PROGRESS_SEC", 15)
if progressSec <= 0 {
progressSec = 15
}
progressTicker := time.Duration(progressSec) * time.Second
lastProgress := time.Now()
heartbeatDone := make(chan struct{})
go func() {
ticker := time.NewTicker(progressTicker)
defer ticker.Stop()
for {
select {
case <-ticker.C:
processed := int(atomic.LoadInt64(&processedCount))
translated := int(atomic.LoadInt64(&translatedCount))
failedTr := int(atomic.LoadInt64(&failedTranslateCount))
failedUpd := int(atomic.LoadInt64(&failedUpdateCount))
elapsed := time.Since(start)
rps := float64(translated)
if elapsed > 0 {
rps = float64(translated) / elapsed.Seconds()
}
log.Printf(
"[TranslationAuto] trace=%s stage=heartbeat processed=%d/%d translated=%d failed_translate=%d failed_update=%d elapsed_ms=%d rps=%.2f",
traceID,
processed,
len(list),
translated,
failedTr,
failedUpd,
elapsed.Milliseconds(),
rps,
)
case <-heartbeatDone:
return
}
}
}()
defer close(heartbeatDone)
for i, p := range list {
tr, err := callAzureTranslate(p.Text, p.Lang)
if err != nil || strings.TrimSpace(tr) == "" {
failedTranslate++
atomic.StoreInt64(&failedTranslateCount, int64(failedTranslate))
atomic.StoreInt64(&processedCount, int64(i+1))
continue
}
_, err = db.Exec(`
UPDATE mk_translator
SET translated_text = $2,
status = 'pending',
is_manual = false,
provider = 'azure_translator',
updated_at = NOW()
WHERE id = $1
`, p.ID, strings.TrimSpace(tr))
if err != nil {
failedUpdate++
atomic.StoreInt64(&failedUpdateCount, int64(failedUpdate))
atomic.StoreInt64(&processedCount, int64(i+1))
continue
}
done++
atomic.StoreInt64(&translatedCount, int64(done))
atomic.StoreInt64(&processedCount, int64(i+1))
doneByLang[p.Lang]++
processed := i + 1
shouldLogProgress := processed%progressEvery == 0 || time.Since(lastProgress) >= progressTicker || processed == len(list)
if shouldLogProgress {
elapsed := time.Since(start)
rps := float64(done)
if elapsed > 0 {
rps = float64(done) / elapsed.Seconds()
}
log.Printf(
"[TranslationAuto] trace=%s stage=progress processed=%d/%d translated=%d failed_translate=%d failed_update=%d elapsed_ms=%d rps=%.2f done_by_lang=%s",
traceID,
processed,
len(list),
done,
failedTranslate,
failedUpdate,
elapsed.Milliseconds(),
rps,
formatLangCounts(doneByLang),
)
lastProgress = time.Now()
}
}
elapsed := time.Since(start)
rps := float64(done)
if elapsed > 0 {
rps = float64(done) / elapsed.Seconds()
}
log.Printf(
"[TranslationAuto] trace=%s stage=done duration_ms=%d candidates=%d translated=%d failed_translate=%d failed_update=%d rps=%.2f done_by_lang=%s",
traceID,
elapsed.Milliseconds(),
len(list),
done,
failedTranslate,
failedUpdate,
rps,
formatLangCounts(doneByLang),
)
return done, nil
}
func formatLangCounts(counts map[string]int) string {
if len(counts) == 0 {
return "-"
}
keys := make([]string, 0, len(counts))
for k := range counts {
keys = append(keys, k)
}
sort.Strings(keys)
parts := make([]string, 0, len(keys))
for _, k := range keys {
parts = append(parts, fmt.Sprintf("%s=%d", k, counts[k]))
}
return strings.Join(parts, ",")
}
func filterNewSeeds(pgDB *sql.DB, seeds []sourceSeed) []sourceSeed {
if pgDB == nil || len(seeds) == 0 {
return seeds
}
keys := uniqueSeedKeys(seeds)
if len(keys) == 0 {
return nil
}
textKeys := uniqueSeedTextKeys(seeds)
rows, err := pgDB.Query(`
SELECT DISTINCT t_key, lower(btrim(source_text_tr)) AS text_key
FROM mk_translator
WHERE t_key = ANY($1)
OR lower(btrim(source_text_tr)) = ANY($2)
`, pq.Array(keys), pq.Array(textKeys))
if err != nil {
return seeds
}
defer rows.Close()
existing := make(map[string]struct{}, len(keys))
existingText := make(map[string]struct{}, len(textKeys))
for rows.Next() {
var key string
var textKey sql.NullString
if err := rows.Scan(&key, &textKey); err == nil {
if strings.TrimSpace(key) != "" {
existing[key] = struct{}{}
}
if textKey.Valid {
t := strings.TrimSpace(textKey.String)
if t != "" {
existingText[t] = struct{}{}
}
}
}
}
out := make([]sourceSeed, 0, len(seeds))
for _, seed := range seeds {
if _, ok := existing[seed.TKey]; ok {
continue
}
if _, ok := existingText[normalizeSeedTextKey(seed.SourceText)]; ok {
continue
}
out = append(out, seed)
}
return out
}
func uniqueSeedKeys(seeds []sourceSeed) []string {
seen := make(map[string]struct{}, len(seeds))
out := make([]string, 0, len(seeds))
for _, seed := range seeds {
if seed.TKey == "" {
continue
}
if _, ok := seen[seed.TKey]; ok {
continue
}
seen[seed.TKey] = struct{}{}
out = append(out, seed.TKey)
}
return out
}
func uniqueSeedTextKeys(seeds []sourceSeed) []string {
seen := make(map[string]struct{}, len(seeds))
out := make([]string, 0, len(seeds))
for _, seed := range seeds {
k := normalizeSeedTextKey(seed.SourceText)
if k == "" {
continue
}
if _, ok := seen[k]; ok {
continue
}
seen[k] = struct{}{}
out = append(out, k)
}
return out
}
func reuseExistingSeedKeys(pgDB *sql.DB, seeds []sourceSeed) ([]sourceSeed, int) {
if pgDB == nil || len(seeds) == 0 {
return seeds, 0
}
textKeys := uniqueSeedTextKeys(seeds)
if len(textKeys) == 0 {
return seeds, 0
}
rows, err := pgDB.Query(`
SELECT x.text_key, x.t_key
FROM (
SELECT
lower(btrim(source_text_tr)) AS text_key,
t_key,
ROW_NUMBER() OVER (
PARTITION BY lower(btrim(source_text_tr))
ORDER BY id ASC
) AS rn
FROM mk_translator
WHERE lower(btrim(source_text_tr)) = ANY($1)
) x
WHERE x.rn = 1
`, pq.Array(textKeys))
if err != nil {
return seeds, 0
}
defer rows.Close()
existingByText := make(map[string]string, len(textKeys))
for rows.Next() {
var textKey, tKey string
if err := rows.Scan(&textKey, &tKey); err != nil {
continue
}
textKey = strings.TrimSpace(strings.ToLower(textKey))
tKey = strings.TrimSpace(tKey)
if textKey == "" || tKey == "" {
continue
}
existingByText[textKey] = tKey
}
reused := 0
for i := range seeds {
textKey := normalizeSeedTextKey(seeds[i].SourceText)
if textKey == "" {
continue
}
if existingKey, ok := existingByText[textKey]; ok && existingKey != "" && seeds[i].TKey != existingKey {
seeds[i].TKey = existingKey
reused++
}
}
return seeds, reused
}
func countSeedsBySource(seeds []sourceSeed) map[string]int {
out := map[string]int{
"dummy": 0,
"postgre": 0,
"mssql": 0,
}
for _, s := range seeds {
key := normalizeTranslationSourceType(s.SourceType)
if key == "" {
key = "dummy"
}
out[key]++
}
return out
}
func formatSourceCounts(counts map[string]int) string {
return fmt.Sprintf("dummy=%d postgre=%d mssql=%d", counts["dummy"], counts["postgre"], counts["mssql"])
}
func requestTraceID(r *http.Request) string {
if r == nil {
return "trsync-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
id := strings.TrimSpace(r.Header.Get("X-Request-ID"))
if id == "" {
id = strings.TrimSpace(r.Header.Get("X-Correlation-ID"))
}
if id == "" {
id = "trsync-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
return id
}
func callAzureTranslate(sourceText, targetLang string) (string, error) {
key := strings.TrimSpace(os.Getenv("AZURE_TRANSLATOR_KEY"))
endpoint := strings.TrimSpace(os.Getenv("AZURE_TRANSLATOR_ENDPOINT"))
region := strings.TrimSpace(os.Getenv("AZURE_TRANSLATOR_REGION"))
if key == "" {
return "", errors.New("AZURE_TRANSLATOR_KEY not set")
}
if endpoint == "" {
return "", errors.New("AZURE_TRANSLATOR_ENDPOINT not set")
}
if region == "" {
return "", errors.New("AZURE_TRANSLATOR_REGION not set")
}
sourceLang := strings.TrimSpace(strings.ToLower(os.Getenv("TRANSLATION_SOURCE_LANG")))
if sourceLang == "" {
sourceLang = "tr"
}
targetLang = normalizeTranslationLang(targetLang)
if targetLang == "" || targetLang == "tr" {
return "", fmt.Errorf("invalid target language: %q", targetLang)
}
endpoint = strings.TrimRight(endpoint, "/")
normalizedEndpoint := strings.ToLower(endpoint)
translatePath := "/translate"
// Azure custom endpoint requires the translator path with version in URL.
if strings.Contains(normalizedEndpoint, ".cognitiveservices.azure.com") {
translatePath = "/translator/text/v3.0/translate"
}
baseURL, err := url.Parse(endpoint + translatePath)
if err != nil {
return "", fmt.Errorf("invalid AZURE_TRANSLATOR_ENDPOINT: %w", err)
}
q := baseURL.Query()
if translatePath == "/translate" {
q.Set("api-version", "3.0")
}
q.Set("from", sourceLang)
q.Set("to", targetLang)
baseURL.RawQuery = q.Encode()
payload := []map[string]string{
{"text": sourceText},
}
body, _ := json.Marshal(payload)
req, err := http.NewRequest(http.MethodPost, baseURL.String(), bytes.NewReader(body))
if err != nil {
return "", err
}
req.Header.Set("Ocp-Apim-Subscription-Key", key)
req.Header.Set("Ocp-Apim-Subscription-Region", region)
req.Header.Set("Content-Type", "application/json; charset=UTF-8")
timeoutSec := parsePositiveIntEnv("TRANSLATION_HTTP_TIMEOUT_SEC", 60)
client := &http.Client{Timeout: time.Duration(timeoutSec) * time.Second}
resp, err := client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
if resp.StatusCode >= 300 {
raw, _ := io.ReadAll(io.LimitReader(resp.Body, 1024))
return "", fmt.Errorf("azure translator status=%d body=%s", resp.StatusCode, strings.TrimSpace(string(raw)))
}
var result []struct {
Translations []struct {
Text string `json:"text"`
To string `json:"to"`
} `json:"translations"`
}
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return "", err
}
if len(result) == 0 || len(result[0].Translations) == 0 {
return "", errors.New("azure translator empty response")
}
return strings.TrimSpace(result[0].Translations[0].Text), nil
}
func nullableString(v *string) any {
if v == nil {
return nil
}
s := strings.TrimSpace(*v)
return s
}
func normalizeTranslationLang(v string) string {
lang := strings.ToLower(strings.TrimSpace(v))
if _, ok := translationLangSet[lang]; ok {
return lang
}
return ""
}
func normalizeTranslationStatus(v string) string {
status := strings.ToLower(strings.TrimSpace(v))
if _, ok := translationStatusSet[status]; ok {
return status
}
return ""
}
func normalizeTranslationSourceType(v string) string {
sourceType := strings.ToLower(strings.TrimSpace(v))
if _, ok := translationSourceTypeSet[sourceType]; ok {
return sourceType
}
return ""
}
func normalizeTargetLanguages(list []string) []string {
if len(list) == 0 {
return []string{"en", "de", "it", "es", "ru", "ar"}
}
seen := make(map[string]struct{}, len(list))
out := make([]string, 0, len(list))
for _, v := range list {
lang := normalizeTranslationLang(v)
if lang == "" || lang == "tr" {
continue
}
if _, ok := seen[lang]; ok {
continue
}
seen[lang] = struct{}{}
out = append(out, lang)
}
if len(out) == 0 {
return []string{"en", "de", "it", "es", "ru", "ar"}
}
return out
}
func normalizeOptionalStatus(v *string) any {
if v == nil {
return nil
}
s := normalizeTranslationStatus(*v)
if s == "" {
return nil
}
return s
}
func normalizeOptionalSourceType(v *string) any {
if v == nil {
return nil
}
s := normalizeTranslationSourceType(*v)
if s == "" {
return nil
}
return s
}
func normalizeMissingItems(items []UpsertMissingItem) []UpsertMissingItem {
seen := make(map[string]struct{}, len(items))
out := make([]UpsertMissingItem, 0, len(items))
for _, it := range items {
key := strings.TrimSpace(it.TKey)
source := strings.TrimSpace(it.SourceTextTR)
if key == "" || source == "" {
continue
}
if _, ok := seen[key]; ok {
continue
}
seen[key] = struct{}{}
out = append(out, UpsertMissingItem{
TKey: key,
SourceTextTR: source,
})
}
return out
}
func normalizeIDListInt64(ids []int64) []int64 {
seen := make(map[int64]struct{}, len(ids))
out := make([]int64, 0, len(ids))
for _, id := range ids {
if id <= 0 {
continue
}
if _, ok := seen[id]; ok {
continue
}
seen[id] = struct{}{}
out = append(out, id)
}
sort.Slice(out, func(i, j int) bool { return out[i] < out[j] })
return out
}
func detectProjectRoot() string {
wd, err := os.Getwd()
if err != nil {
return ""
}
candidates := []string{
wd,
filepath.Dir(wd),
filepath.Dir(filepath.Dir(wd)),
}
for _, c := range candidates {
if _, err := os.Stat(filepath.Join(c, "ui")); err == nil {
return c
}
}
return ""
}
func isCandidateText(s string) bool {
s = strings.TrimSpace(s)
if len(s) < 3 || len(s) > 120 {
return false
}
if reBadText.MatchString(s) {
return false
}
if !reHasLetter.MatchString(s) {
return false
}
if strings.Contains(s, "/api/") {
return false
}
if reCodeLikeText.MatchString(s) {
return false
}
if strings.ContainsAny(s, "{}[];`") {
return false
}
symbolCount := 0
for _, r := range s {
switch r {
case '(', ')', '=', ':', '/', '\\', '|', '&', '*', '<', '>', '_':
symbolCount++
}
}
if symbolCount >= 4 {
return false
}
return true
}
func sanitizeKey(s string) string {
s = strings.ToLower(strings.TrimSpace(s))
s = strings.ReplaceAll(s, " ", "_")
s = reKeyUnsafe.ReplaceAllString(s, "_")
s = strings.Trim(s, "_")
if s == "" {
return "x"
}
return s
}
func normalizeDisplayText(s string) string {
s = strings.TrimSpace(strings.ReplaceAll(s, "_", " "))
s = strings.Join(strings.Fields(s), " ")
if s == "" {
return ""
}
return s
}
func hashKey(s string) string {
base := sanitizeKey(s)
if len(base) > 40 {
base = base[:40]
}
sum := 0
for _, r := range s {
sum += int(r)
}
return fmt.Sprintf("%s_%d", base, sum%1000000)
}
func makeTextBasedSeedKey(sourceText string) string {
return "txt." + hashKey(normalizeSeedTextKey(sourceText))
}
func normalizeSeedTextKey(s string) string {
return strings.ToLower(strings.TrimSpace(normalizeDisplayText(s)))
}
func pqArray(values []string) any {
if len(values) == 0 {
return pq.Array([]string{})
}
out := make([]string, 0, len(values))
for _, v := range values {
out = append(out, strings.TrimSpace(v))
}
sort.Strings(out)
return pq.Array(out)
}
func parsePositiveIntEnv(name string, fallback int) int {
raw := strings.TrimSpace(os.Getenv(name))
if raw == "" {
return fallback
}
n, err := strconv.Atoi(raw)
if err != nil || n <= 0 {
return fallback
}
return n
}
func normalizeStringList(items []string, max int) []string {
if len(items) == 0 {
return nil
}
if max <= 0 {
max = len(items)
}
out := make([]string, 0, len(items))
seen := make(map[string]struct{}, len(items))
for _, raw := range items {
v := strings.TrimSpace(raw)
if v == "" {
continue
}
if _, ok := seen[v]; ok {
continue
}
seen[v] = struct{}{}
out = append(out, v)
if len(out) >= max {
break
}
}
return out
}

View File

@@ -0,0 +1,69 @@
package main
import (
"bssapp-backend/routes"
"database/sql"
"log"
"os"
"strconv"
"strings"
"time"
)
func startTranslationSyncScheduler(pgDB *sql.DB, mssqlDB *sql.DB) {
enabled := strings.TrimSpace(strings.ToLower(os.Getenv("TRANSLATION_SYNC_ENABLED")))
if enabled == "0" || enabled == "false" || enabled == "off" {
log.Println("🛑 Translation sync scheduler disabled")
return
}
hour := 4
if raw := strings.TrimSpace(os.Getenv("TRANSLATION_SYNC_HOUR")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed >= 0 && parsed <= 23 {
hour = parsed
}
}
limit := 30000
if raw := strings.TrimSpace(os.Getenv("TRANSLATION_SYNC_LIMIT")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 {
limit = parsed
}
}
go func() {
for {
next := nextRunAt(time.Now(), hour)
wait := time.Until(next)
log.Printf("🕓 Translation sync next run at %s (in %s)", next.Format(time.RFC3339), wait.Round(time.Second))
time.Sleep(wait)
result, err := routes.PerformTranslationSync(pgDB, mssqlDB, routes.TranslationSyncOptions{
AutoTranslate: true,
Languages: []string{"en", "de", "it", "es", "ru", "ar"},
Limit: limit,
OnlyNew: true,
})
if err != nil {
log.Printf("❌ Translation sync failed: %v", err)
continue
}
log.Printf(
"✅ Translation sync done: seeds=%d affected=%d auto_translated=%d langs=%v",
result.SeedCount,
result.AffectedCount,
result.AutoTranslated,
result.TargetLangs,
)
}
}()
}
func nextRunAt(now time.Time, hour int) time.Time {
next := time.Date(now.Year(), now.Month(), now.Day(), hour, 0, 0, 0, now.Location())
if !next.After(now) {
next = next.Add(24 * time.Hour)
}
return next
}

View File

@@ -146,7 +146,11 @@ createQuasarApp(createApp, quasarUserOptions)
return Promise[ method ]([
import(/* webpackMode: "eager" */ 'boot/dayjs')
import(/* webpackMode: "eager" */ 'boot/dayjs'),
import(/* webpackMode: "eager" */ 'boot/locale'),
import(/* webpackMode: "eager" */ 'boot/resizeObserverGuard')
]).then(bootFiles => {
const boot = mapFn(bootFiles).filter(entry => typeof entry === 'function')

View File

@@ -144,7 +144,11 @@ createQuasarApp(createApp, quasarUserOptions)
return Promise[ method ]([
import(/* webpackMode: "eager" */ 'boot/dayjs')
import(/* webpackMode: "eager" */ 'boot/dayjs'),
import(/* webpackMode: "eager" */ 'boot/locale'),
import(/* webpackMode: "eager" */ 'boot/resizeObserverGuard')
]).then(bootFiles => {
const boot = mapFn(bootFiles).filter(entry => typeof entry === 'function')

View File

@@ -15,7 +15,7 @@ export default defineConfig(() => {
/* =====================================================
BOOT FILES
===================================================== */
boot: ['dayjs'],
boot: ['dayjs', 'locale', 'resizeObserverGuard'],
/* =====================================================
GLOBAL CSS
@@ -56,6 +56,13 @@ export default defineConfig(() => {
server: { type: 'http' },
port: 9000,
open: true,
client: {
overlay: {
errors: true,
warnings: false,
runtimeErrors: false
}
},
// DEV proxy (CORS'suz)
proxy: [
@@ -63,7 +70,10 @@ export default defineConfig(() => {
context: ['/api'],
target: 'http://localhost:8080',
changeOrigin: true,
secure: false
secure: false,
ws: true,
timeout: 0,
proxyTimeout: 0
}
]
},

View File

@@ -27,7 +27,7 @@ var quasar_config_default = defineConfig(() => {
/* =====================================================
BOOT FILES
===================================================== */
boot: ["dayjs"],
boot: ["dayjs", "locale", "resizeObserverGuard"],
/* =====================================================
GLOBAL CSS
===================================================== */
@@ -62,13 +62,23 @@ var quasar_config_default = defineConfig(() => {
server: { type: "http" },
port: 9e3,
open: true,
client: {
overlay: {
errors: true,
warnings: false,
runtimeErrors: false
}
},
// DEV proxy (CORS'suz)
proxy: [
{
context: ["/api"],
target: "http://localhost:8080",
changeOrigin: true,
secure: false
secure: false,
ws: true,
timeout: 0,
proxyTimeout: 0
}
]
},

View File

@@ -3,12 +3,12 @@ import dayjs from 'dayjs'
import customParseFormat from 'dayjs/plugin/customParseFormat.js'
import relativeTime from 'dayjs/plugin/relativeTime.js'
import localizedFormat from 'dayjs/plugin/localizedFormat.js'
import 'dayjs/locale/tr.js'
import { applyDayjsLocale } from 'src/i18n/dayjsLocale'
// 🔹 Pluginleri aktif et
dayjs.extend(customParseFormat)
dayjs.extend(relativeTime)
dayjs.extend(localizedFormat)
dayjs.locale('tr')
applyDayjsLocale('tr')
export default dayjs

7
ui/src/boot/locale.js Normal file
View File

@@ -0,0 +1,7 @@
import { boot } from 'quasar/wrappers'
import { useLocaleStore } from 'src/stores/localeStore'
export default boot(() => {
const localeStore = useLocaleStore()
localeStore.setLocale(localeStore.locale)
})

View File

@@ -0,0 +1,36 @@
export default () => {
if (process.env.PROD || typeof window === 'undefined') return
const isResizeObserverOverlayError = (message) => {
const text = String(message || '')
return (
text.includes('ResizeObserver loop completed with undelivered notifications') ||
text.includes('ResizeObserver loop limit exceeded')
)
}
window.addEventListener(
'error',
(event) => {
if (!isResizeObserverOverlayError(event?.message)) return
event.preventDefault()
event.stopImmediatePropagation()
},
true
)
window.addEventListener(
'unhandledrejection',
(event) => {
const reason = event?.reason
const msg =
typeof reason === 'string'
? reason
: (reason?.message || reason?.toString?.() || '')
if (!isResizeObserverOverlayError(msg)) return
event.preventDefault()
event.stopImmediatePropagation()
},
true
)
}

View File

@@ -0,0 +1,42 @@
import { computed } from 'vue'
import { messages } from 'src/i18n/messages'
import { DEFAULT_LOCALE } from 'src/i18n/languages'
import { useLocaleStore } from 'src/stores/localeStore'
function lookup(obj, path) {
return String(path || '')
.split('.')
.filter(Boolean)
.reduce((acc, key) => (acc && acc[key] != null ? acc[key] : undefined), obj)
}
export function useI18n() {
const localeStore = useLocaleStore()
const currentLocale = computed(() => localeStore.locale)
function fallbackLocales(locale) {
const normalized = String(locale || '').toLowerCase()
if (normalized === 'tr') return ['tr']
if (normalized === 'en') return ['en', 'tr']
return [normalized, 'en', 'tr']
}
function t(key) {
for (const locale of fallbackLocales(currentLocale.value)) {
const val = lookup(messages[locale] || {}, key)
if (val != null) return val
}
const byDefault = lookup(messages[DEFAULT_LOCALE] || {}, key)
if (byDefault != null) return byDefault
return key
}
return {
locale: currentLocale,
t
}
}

View File

@@ -0,0 +1,30 @@
import dayjs from 'dayjs'
import 'dayjs/locale/tr.js'
import 'dayjs/locale/en.js'
import 'dayjs/locale/de.js'
import 'dayjs/locale/it.js'
import 'dayjs/locale/es.js'
import 'dayjs/locale/ru.js'
import 'dayjs/locale/ar.js'
import { normalizeLocale } from './languages.js'
export const DATE_LOCALE_MAP = {
tr: 'tr-TR',
en: 'en-US',
de: 'de-DE',
it: 'it-IT',
es: 'es-ES',
ru: 'ru-RU',
ar: 'ar'
}
export function applyDayjsLocale(locale) {
const normalized = normalizeLocale(locale)
dayjs.locale(normalized)
}
export function getDateLocale(locale) {
const normalized = normalizeLocale(locale)
return DATE_LOCALE_MAP[normalized] || DATE_LOCALE_MAP.tr
}

32
ui/src/i18n/languages.js Normal file
View File

@@ -0,0 +1,32 @@
export const DEFAULT_LOCALE = 'tr'
export const SUPPORTED_LOCALES = ['tr', 'en', 'de', 'it', 'es', 'ru', 'ar']
export const UI_LANGUAGE_OPTIONS = [
{ label: 'Türkçe', value: 'tr', short: 'TUR', flag: '🇹🇷' },
{ label: 'English', value: 'en', short: 'ENG', flag: '🇬🇧' },
{ label: 'Deutsch', value: 'de', short: 'DEU', flag: '🇩🇪' },
{ label: 'Italiano', value: 'it', short: 'ITA', flag: '🇮🇹' },
{ label: 'Español', value: 'es', short: 'ESP', flag: '🇪🇸' },
{ label: 'Русский', value: 'ru', short: 'RUS', flag: '🇷🇺' },
{ label: 'العربية', value: 'ar', short: 'ARA', flag: '🇸🇦' }
]
export const BACKEND_LANG_MAP = {
tr: 'TR',
en: 'EN',
de: 'DE',
it: 'IT',
es: 'ES',
ru: 'RU',
ar: 'AR'
}
export function normalizeLocale(value) {
const locale = String(value || '').trim().toLowerCase()
return SUPPORTED_LOCALES.includes(locale) ? locale : DEFAULT_LOCALE
}
export function toBackendLangCode(locale) {
return BACKEND_LANG_MAP[normalizeLocale(locale)] || BACKEND_LANG_MAP[DEFAULT_LOCALE]
}

28
ui/src/i18n/messages.js Normal file
View File

@@ -0,0 +1,28 @@
export const messages = {
tr: {
app: {
title: 'Baggi Software System',
logoutTitle: ıkış Yap',
logoutConfirm: 'Oturumunuzu kapatmak istediğinize emin misiniz?',
changePassword: 'Şifre Değiştir',
language: 'Dil'
},
statement: {
invalidDateRange: 'Başlangıç tarihi bitiş tarihinden sonra olamaz.',
selectFilters: 'Lütfen cari ve tarih aralığını seçiniz.'
}
},
en: {
app: {
title: 'Baggi Software System',
logoutTitle: 'Log Out',
logoutConfirm: 'Are you sure you want to end your session?',
changePassword: 'Change Password',
language: 'Language'
},
statement: {
invalidDateRange: 'Start date cannot be later than end date.',
selectFilters: 'Please select account and date range.'
}
}
}

View File

@@ -11,9 +11,41 @@
<q-avatar class="bg-secondary q-mr-sm">
<img src="/images/Baggi-tekstilas-logolu.jpg" />
</q-avatar>
Baggi Software System
{{ t('app.title') }}
</q-toolbar-title>
<q-select
v-model="selectedLocale"
dense
outlined
emit-value
map-options
options-dense
class="q-mr-sm lang-select"
option-value="value"
option-label="label"
:options="languageOptions"
>
<template #selected-item="scope">
<div class="lang-item">
<span class="lang-flag">{{ scope.opt.flag }}</span>
<span class="lang-short">{{ scope.opt.short }}</span>
</div>
</template>
<template #option="scope">
<q-item v-bind="scope.itemProps">
<q-item-section>
<div class="lang-item">
<span class="lang-flag">{{ scope.opt.flag }}</span>
<span class="lang-short">{{ scope.opt.short }}</span>
<span>{{ scope.opt.label }}</span>
</div>
</q-item-section>
</q-item>
</template>
</q-select>
<q-btn flat dense round icon="logout" @click="confirmLogout" />
</q-toolbar>
@@ -99,7 +131,7 @@
</q-item-section>
<q-item-section>
Şifre Değiştir
{{ t('app.changePassword') }}
</q-item-section>
</q-item>
@@ -122,7 +154,7 @@
<q-toolbar class="bg-secondary">
<q-toolbar-title>
Baggi Software System
{{ t('app.title') }}
</q-toolbar-title>
</q-toolbar>
@@ -138,6 +170,9 @@ import { Dialog, useQuasar } from 'quasar'
import { useAuthStore } from 'stores/authStore'
import { usePermissionStore } from 'stores/permissionStore'
import { useI18n } from 'src/composables/useI18n'
import { UI_LANGUAGE_OPTIONS } from 'src/i18n/languages'
import { useLocaleStore } from 'src/stores/localeStore'
/* ================= STORES ================= */
@@ -147,6 +182,16 @@ const route = useRoute()
const $q = useQuasar()
const auth = useAuthStore()
const perm = usePermissionStore()
const localeStore = useLocaleStore()
const { t } = useI18n()
const languageOptions = UI_LANGUAGE_OPTIONS
const selectedLocale = computed({
get: () => localeStore.locale,
set: (value) => {
localeStore.setLocale(value)
}
})
/* ================= UI ================= */
@@ -159,8 +204,8 @@ function toggleLeftDrawer () {
function confirmLogout () {
Dialog.create({
title: ıkış Yap',
message: 'Oturumunuzu kapatmak istediğinize emin misiniz?',
title: t('app.logoutTitle'),
message: t('app.logoutConfirm'),
cancel: true,
persistent: true
}).onOk(() => {
@@ -330,6 +375,18 @@ const menuItems = [
]
},
{
label: 'Dil Çeviri',
icon: 'translate',
children: [
{
label: 'Çeviri Tablosu',
to: '/app/language/translations',
permission: 'language:update'
}
]
},
{
label: 'Kullanıcı Yönetimi',
@@ -387,5 +444,27 @@ const filteredMenu = computed(() => {
-webkit-overflow-scrolling: touch;
touch-action: pan-y;
}
.lang-select {
width: 140px;
background: #fff;
border-radius: 6px;
}
.lang-item {
display: inline-flex;
align-items: center;
gap: 8px;
}
.lang-flag {
font-size: 15px;
line-height: 1;
}
.lang-short {
font-weight: 700;
letter-spacing: 0.3px;
}
</style>

View File

@@ -278,8 +278,8 @@
color="primary"
icon="save"
class="q-ml-sm"
:loading="orderStore.loading"
:disable="!canSubmitOrder"
:loading="orderStore.loading || isSubmitAllInFlight"
:disable="!canSubmitOrder || orderStore.loading || isSubmitAllInFlight"
@click="confirmAndSubmit"
/>
</div>
@@ -773,16 +773,18 @@
v-if="canMutateRows"
:color="isEditing ? 'positive' : 'primary'"
:label="isEditing ? 'Güncelle' : 'Kaydet'"
:loading="isRowSaveInFlight"
@click="onSaveOrUpdateRow"
:disable="isClosedRow || isViewOnly || !canMutateRows"
:disable="isClosedRow || isViewOnly || !canMutateRows || isRowSaveInFlight"
/>
<q-btn
v-if="canMutateRows"
color="secondary"
label="Kaydet ve Diğer Renge Geç"
:loading="isRowSaveInFlight"
@click="onSaveAndNextColor"
:disable="isClosedRow || isViewOnly || !canMutateRows"
:disable="isClosedRow || isViewOnly || !canMutateRows || isRowSaveInFlight"
/>
<q-btn
v-if="isEditing && canMutateRows"
@@ -930,8 +932,60 @@ const aktifPB = ref('USD') // Varsayılan para birimi (Cari seç
const productCache = reactive({})
const showBulkDueDateDialog = ref(false)
const bulkDueDateValue = ref('')
const isSubmitAllInFlight = ref(false)
const isRowSaveInFlight = ref(false)
function showEditorQtyPriceBlockingDialog(message, details = '') {
const detailHtml = details ? `<br><br><b>Detay:</b><br>${details}` : ''
$q.dialog({
title: 'Kayit Engellendi',
message: `${message}${detailHtml}`,
html: true,
ok: { label: 'Tamam', color: 'negative' }
})
}
function validateEditorRowBeforeSave() {
const adet = Number(form.adet || 0)
const fiyatRaw = String(form.fiyat ?? '').trim()
const fiyat = Number(form.fiyat || 0)
if (adet <= 0) {
showEditorQtyPriceBlockingDialog('Siparis adeti toplam 0 olamaz.')
return false
}
if (!fiyatRaw || !Number.isFinite(fiyat) || fiyat <= 0) {
showEditorQtyPriceBlockingDialog('Urun fiyati girmeden ilerleyemezsiniz.')
return false
}
return true
}
function validateSummaryRowsBeforeSubmit() {
const rows = Array.isArray(orderStore.summaryRows) ? orderStore.summaryRows : []
const invalidRows = rows.filter(r => {
const adet = Number(r?.adet || 0)
const fiyatRaw = String(r?.fiyat ?? '').trim()
const fiyat = Number(r?.fiyat || 0)
return adet <= 0 || !fiyatRaw || !Number.isFinite(fiyat) || fiyat <= 0
})
if (!invalidRows.length) return true
const preview = invalidRows
.slice(0, 8)
.map(r => `${String(r?.model || '').trim() || '-'} / ${String(r?.renk || '').trim() || '-'} (adet=${Number(r?.adet || 0)}, fiyat=${String(r?.fiyat ?? '')})`)
.join('<br>')
showEditorQtyPriceBlockingDialog(
'Urun fiyati girmeden ilerleyemezsiniz.',
preview
)
return false
}
const confirmAndSubmit = async () => {
if (orderStore.loading) return
if (orderStore.loading || isSubmitAllInFlight.value) return
if (!hasSubmitPermission()) {
notifyNoPermission(
@@ -951,6 +1005,11 @@ const confirmAndSubmit = async () => {
return
}
if (!validateSummaryRowsBeforeSubmit()) {
return
}
isSubmitAllInFlight.value = true
try {
// NEW veya EDIT ayrımı store.mode üzerinden
await orderStore.submitAllReal(
@@ -962,6 +1021,8 @@ const confirmAndSubmit = async () => {
)
} catch (err) {
console.error('❌ confirmAndSubmit hata:', err)
} finally {
isSubmitAllInFlight.value = false
}
}
@@ -3077,6 +3138,8 @@ function warnIfSecondColorMissing() {
}
const onSaveOrUpdateRow = async () => {
if (isRowSaveInFlight.value) return
if (!hasRowMutationPermission()) {
notifyNoPermission(
isEditMode.value
@@ -3086,23 +3149,32 @@ const onSaveOrUpdateRow = async () => {
return
}
if (!validateEditorRowBeforeSave()) return
warnIfSecondColorMissing()
await orderStore.saveOrUpdateRowUnified({
form,
isRowSaveInFlight.value = true
try {
const ok = await orderStore.saveOrUpdateRowUnified({
form,
recalcVat: typeof recalcVat === 'function' ? recalcVat : null,
resetEditor: typeof resetEditor === 'function' ? resetEditor : null,
loadProductSizes: async () => {
await orderStore.loadProductSizes(form, true, $q, productCache)
await loadOrderInventory(true)
},
recalcVat: typeof recalcVat === 'function' ? recalcVat : null,
resetEditor: typeof resetEditor === 'function' ? resetEditor : null,
loadProductSizes: async () => {
await orderStore.loadProductSizes(form, true, $q, productCache)
await loadOrderInventory(true)
},
// gerekiyorsa pass edebilirsin (store tarafında zaten optional)
stockMap,
$q
})
showEditor.value = false
// gerekiyorsa pass edebilirsin (store tarafında zaten optional)
stockMap,
$q
})
if (ok !== false) {
showEditor.value = false
}
} finally {
isRowSaveInFlight.value = false
}
}
function normalizeColorValue(val) {
@@ -3122,6 +3194,8 @@ function getNextColorValue() {
}
const onSaveAndNextColor = async () => {
if (isRowSaveInFlight.value) return
if (!hasRowMutationPermission()) {
notifyNoPermission(
isEditMode.value
@@ -3141,19 +3215,27 @@ const onSaveAndNextColor = async () => {
return
}
if (!validateEditorRowBeforeSave()) return
warnIfSecondColorMissing()
const ok = await orderStore.saveOrUpdateRowUnified({
form,
recalcVat: typeof recalcVat === 'function' ? recalcVat : null,
resetEditor: () => {},
loadProductSizes: async () => {
await orderStore.loadProductSizes(form, true, $q, productCache)
await loadOrderInventory(true)
},
stockMap,
$q
})
isRowSaveInFlight.value = true
let ok = false
try {
ok = await orderStore.saveOrUpdateRowUnified({
form,
recalcVat: typeof recalcVat === 'function' ? recalcVat : null,
resetEditor: () => {},
loadProductSizes: async () => {
await orderStore.loadProductSizes(form, true, $q, productCache)
await loadOrderInventory(true)
},
stockMap,
$q
})
} finally {
isRowSaveInFlight.value = false
}
if (!ok) return

View File

@@ -17,6 +17,7 @@
icon="save"
label="Secili Degisiklikleri Kaydet"
:loading="store.saving"
:disable="store.loading || store.saving || isBulkSubmitting"
@click="onBulkSubmit"
/>
</div>
@@ -101,6 +102,7 @@
<q-checkbox
size="sm"
:model-value="!!selectedMap[props.row.RowKey]"
:disable="store.saving"
@update:model-value="(val) => toggleRowSelection(props.row.RowKey, val)"
/>
</q-td>
@@ -389,10 +391,14 @@
<q-card-actions align="right">
<q-btn flat label="Vazgec" color="grey-8" v-close-popup />
<q-btn color="primary" label="Ozellikleri Kaydet" @click="saveAttributeDraft" />
<q-btn color="primary" label="Ozellikleri Taslaga Kaydet" @click="saveAttributeDraft" />
</q-card-actions>
</q-card>
</q-dialog>
<q-inner-loading :showing="store.saving">
<q-spinner-gears size="50px" color="primary" />
<div class="q-mt-md text-subtitle1">Degisiklikler kaydediliyor, lutfen bekleyiniz...</div>
</q-inner-loading>
</q-page>
</template>
@@ -435,10 +441,12 @@ const headerAverageDueDate = ref('')
const cdItemDialogOpen = ref(false)
const cdItemTargetCode = ref('')
const copySourceCode = ref(null)
const suppressAutoSetupDialogs = ref(false)
const cdItemDraftForm = ref(createEmptyCdItemDraft(''))
const attributeDialogOpen = ref(false)
const attributeTargetCode = ref('')
const attributeRows = ref([])
const isBulkSubmitting = ref(false)
const columns = [
{ name: 'select', label: '', field: 'select', align: 'center', sortable: false, style: 'width:44px;', headerStyle: 'width:44px;' },
@@ -663,13 +671,14 @@ function onNewItemChange (row, val, source = 'typed') {
row.NewColor = ''
row.NewDim2 = ''
row.NewDesc = mergeDescWithAutoNote(row, row.NewDesc || row.OldDesc)
if (row.NewItemCode) {
if (row.NewItemCode && isValidBaggiModelCode(row.NewItemCode)) {
if (row.NewItemMode === 'new') {
store.fetchNewColors(row.NewItemCode)
} else {
store.fetchColors(row.NewItemCode)
}
}
if (suppressAutoSetupDialogs.value) return
if (row.NewItemMode === 'new' && isValidBaggiModelCode(row.NewItemCode) && row.NewItemCode !== prevCode) {
openNewCodeSetupFlow(row.NewItemCode)
} else if (row.NewItemMode === 'existing' && isValidBaggiModelCode(row.NewItemCode) && row.NewItemCode !== prevCode) {
@@ -896,6 +905,59 @@ function collectLinesFromRows (selectedRows) {
return { errMsg: '', lines }
}
function hasRowChange (row) {
const newItemCode = String(row?.NewItemCode || '').trim().toUpperCase()
const newColor = normalizeShortCode(row?.NewColor, 3)
const newDim2 = normalizeShortCode(row?.NewDim2, 3)
const newDesc = mergeDescWithAutoNote(row, row?.NewDesc || row?.OldDesc)
const oldItemCode = String(row?.OldItemCode || '').trim().toUpperCase()
const oldColor = normalizeShortCode(row?.OldColor, 3)
const oldDim2 = normalizeShortCode(row?.OldDim2, 3)
const oldDesc = String(row?.OldDesc || '').trim()
const oldDueDateValue = row?.OldDueDate || ''
const newDueDateValue = row?.NewDueDate || ''
return (
newItemCode !== oldItemCode ||
newColor !== oldColor ||
newDim2 !== oldDim2 ||
String(newDesc || '').trim() !== oldDesc ||
newDueDateValue !== oldDueDateValue
)
}
function collectOptionalColorWarnings (rows) {
const warnings = []
for (const row of (rows || [])) {
const code = String(row?.NewItemCode || '').trim().toUpperCase()
if (!code) continue
const color = normalizeShortCode(row?.NewColor, 3)
const dim2 = normalizeShortCode(row?.NewDim2, 3)
if (!color) {
warnings.push(`${code} icin renk secmediniz.`)
continue
}
if (!dim2) {
warnings.push(`${code} icin 2. renk bos kalacak.`)
}
}
return [...new Set(warnings)]
}
function confirmOptionalColorWarnings (rows) {
const warnings = collectOptionalColorWarnings(rows)
if (!warnings.length) return Promise.resolve(true)
return new Promise((resolve) => {
$q.dialog({
title: 'Renk Uyarisi',
message: `${warnings.join('<br>')}<br><br>Devam etmek istiyor musunuz?`,
html: true,
ok: { label: 'Evet, Devam Et', color: 'warning' },
cancel: { label: 'Vazgec', flat: true }
}).onOk(() => resolve(true)).onCancel(() => resolve(false)).onDismiss(() => resolve(false))
})
}
function createEmptyCdItemDraft (itemCode) {
return {
ItemTypeCode: '1',
@@ -964,13 +1026,15 @@ async function copyFromOldProduct (targetType = 'cdItem') {
if (targetType === 'cdItem') {
const data = await store.fetchCdItemByCode(sourceCode)
if (data) {
const targetCode = cdItemTargetCode.value
const targetCode = String(cdItemTargetCode.value || '').trim().toUpperCase()
const draft = createEmptyCdItemDraft(targetCode)
for (const k of Object.keys(draft)) {
if (data[k] !== undefined && data[k] !== null) {
draft[k] = String(data[k])
}
}
// Source item kopyalansa da hedef popup kodu degismemeli.
draft.ItemCode = targetCode
cdItemDraftForm.value = draft
persistCdItemDraft()
$q.notify({ type: 'positive', message: 'Boyutlandirma bilgileri kopyalandi.' })
@@ -1032,7 +1096,11 @@ async function openCdItemDialog (itemCode) {
}
function persistCdItemDraft () {
const payload = normalizeCdItemDraftForPayload(cdItemDraftForm.value)
const targetCode = String(cdItemTargetCode.value || '').trim().toUpperCase()
const payload = normalizeCdItemDraftForPayload({
...(cdItemDraftForm.value || {}),
ItemCode: targetCode || String(cdItemDraftForm.value?.ItemCode || '').trim().toUpperCase()
})
if (!payload.ItemCode) return null
store.setCdItemDraft(payload.ItemCode, payload)
return payload
@@ -1177,7 +1245,7 @@ async function openAttributeDialog (itemCode) {
if (!code) return
copySourceCode.value = null
attributeTargetCode.value = code
const existingDraft = store.getProductAttributeDraft(code)
const existingDraft = JSON.parse(JSON.stringify(store.getProductAttributeDraft(code) || []))
const modeInfo = store.classifyItemCode(code)
const fetched = await store.fetchProductAttributes(1)
const fromLookup = buildAttributeRowsFromLookup(fetched)
@@ -1191,6 +1259,32 @@ async function openAttributeDialog (itemCode) {
$q.notify({ type: 'negative', message: 'Urun ozellikleri listesi alinamadi. Lutfen daha sonra tekrar deneyin.' })
return
}
// Draft varsa popup her zaman draft'tan acilir (yeniden acinca secimler kaybolmasin).
if (Array.isArray(existingDraft) && existingDraft.length) {
attributeRows.value = JSON.parse(JSON.stringify(
mergeAttributeDraftWithLookupOptions(existingDraft, fromLookup)
))
console.info('[OrderProductionUpdate] openAttributeDialog rowsPrepared', {
code,
mode: modeInfo.mode,
useDraft: true,
rowCount: Array.isArray(attributeRows.value) ? attributeRows.value.length : 0,
optionCounts: (attributeRows.value || []).map(r => ({
type: Number(r?.AttributeTypeCodeNumber || 0),
options: Array.isArray(r?.Options) ? r.Options.length : 0,
allOptions: Array.isArray(r?.AllOptions) ? r.AllOptions.length : 0,
selected: String(r?.AttributeCode || '').trim()
}))
})
for (const row of (attributeRows.value || [])) {
if (!Array.isArray(row.AllOptions)) row.AllOptions = Array.isArray(row.Options) ? [...row.Options] : []
if (!Array.isArray(row.Options)) row.Options = [...row.AllOptions]
}
attributeDialogOpen.value = true
return
}
const dbCurrent = await store.fetchProductItemAttributes(code, 1, true)
console.info('[OrderProductionUpdate] openAttributeDialog dbCurrent', {
code,
@@ -1226,13 +1320,11 @@ async function openAttributeDialog (itemCode) {
})
const useDraft = Array.isArray(existingDraft) && existingDraft.length
attributeRows.value = useDraft
? JSON.parse(JSON.stringify(mergeAttributeDraftWithLookupOptions(existingDraft, baseRows)))
: JSON.parse(JSON.stringify(baseRows))
attributeRows.value = JSON.parse(JSON.stringify(baseRows))
console.info('[OrderProductionUpdate] openAttributeDialog rowsPrepared', {
code,
mode: modeInfo.mode,
useDraft,
useDraft: false,
rowCount: Array.isArray(attributeRows.value) ? attributeRows.value.length : 0,
optionCounts: (attributeRows.value || []).map(r => ({
type: Number(r?.AttributeTypeCodeNumber || 0),
@@ -1249,27 +1341,26 @@ async function openAttributeDialog (itemCode) {
row.Options = [...row.AllOptions]
}
}
if ((!existingDraft || !existingDraft.length) && baseRows.length) {
store.setProductAttributeDraft(code, JSON.parse(JSON.stringify(baseRows)))
}
attributeDialogOpen.value = true
}
function saveAttributeDraft () {
const code = String(attributeTargetCode.value || '').trim().toUpperCase()
if (!code) return
for (const row of (attributeRows.value || [])) {
const rows = JSON.parse(JSON.stringify(attributeRows.value || []))
for (const row of rows) {
const selected = String(row?.AttributeCode || '').trim()
if (!selected) {
$q.notify({ type: 'negative', message: `Urun ozelliklerinde secim zorunlu: ${row?.TypeLabel || ''}` })
return
}
}
store.setProductAttributeDraft(code, JSON.parse(JSON.stringify(attributeRows.value || [])))
store.setProductAttributeDraft(code, rows)
console.info('[OrderProductionUpdate] saveAttributeDraft', {
code,
rowCount: (attributeRows.value || []).length,
selected: (attributeRows.value || []).map(r => ({
rowCount: rows.length,
selectedCount: rows.length,
selected: rows.map(r => ({
type: Number(r?.AttributeTypeCodeNumber || 0),
code: String(r?.AttributeCode || '').trim()
}))
@@ -1287,17 +1378,6 @@ watch(
{ deep: true }
)
watch(
attributeRows,
(rows) => {
if (!attributeDialogOpen.value) return
const code = String(attributeTargetCode.value || '').trim().toUpperCase()
if (!code) return
store.setProductAttributeDraft(code, JSON.parse(JSON.stringify(rows || [])))
},
{ deep: true }
)
async function collectProductAttributesFromSelectedRows (selectedRows) {
const codeSet = [...new Set(
(selectedRows || [])
@@ -1309,22 +1389,24 @@ async function collectProductAttributesFromSelectedRows (selectedRows) {
for (const code of codeSet) {
const modeInfo = store.classifyItemCode(code)
let rows = store.getProductAttributeDraft(code)
let dbMap = new Map()
const dbCurrent = await store.fetchProductItemAttributes(code, 1, true)
const dbMap = new Map(
(dbCurrent || []).map(x => [
Number(x?.attribute_type_code || x?.AttributeTypeCode || 0),
String(x?.attribute_code || x?.AttributeCode || '').trim()
]).filter(x => x[0] > 0)
)
const hasDbAttributes = dbMap.size > 0
const effectiveMode = hasDbAttributes ? 'existing' : modeInfo.mode
console.info('[OrderProductionUpdate] collectProductAttributes start', {
code,
mode: modeInfo.mode,
effectiveMode,
hasDbAttributes,
draftRowCount: Array.isArray(rows) ? rows.length : 0
})
if (modeInfo.mode === 'existing') {
const dbCurrent = await store.fetchProductItemAttributes(code, 1, true)
dbMap = new Map(
(dbCurrent || []).map(x => [
Number(x?.attribute_type_code || x?.AttributeTypeCode || 0),
String(x?.attribute_code || x?.AttributeCode || '').trim()
]).filter(x => x[0] > 0)
)
if (effectiveMode === 'existing') {
// Existing kodda kullanıcı değişiklik yaptıysa draftı koru.
// Draft yoksa DB'den zorunlu/fresh çek.
if (!Array.isArray(rows) || !rows.length) {
@@ -1354,26 +1436,7 @@ async function collectProductAttributesFromSelectedRows (selectedRows) {
store.setProductAttributeDraft(code, JSON.parse(JSON.stringify(rows)))
}
} else if (!Array.isArray(rows) || !rows.length) {
const lookup = await store.fetchProductAttributes(1)
const baseRows = buildAttributeRowsFromLookup(lookup)
const dbCurrent = await store.fetchProductItemAttributes(code, 1, true)
const dbMap = new Map(
(dbCurrent || []).map(x => [
Number(x?.attribute_type_code || x?.AttributeTypeCode || 0),
String(x?.attribute_code || x?.AttributeCode || '').trim()
]).filter(x => x[0] > 0)
)
rows = baseRows.map(row => ({
...row,
AttributeCode: dbMap.get(Number(row.AttributeTypeCodeNumber || 0)) || ''
}))
console.info('[OrderProductionUpdate] collectProductAttributes new init', {
code,
lookupCount: Array.isArray(lookup) ? lookup.length : 0,
baseRowCount: baseRows.length,
dbCurrentCount: Array.isArray(dbCurrent) ? dbCurrent.length : 0
})
store.setProductAttributeDraft(code, JSON.parse(JSON.stringify(rows)))
return { errMsg: `${code} icin urun ozellikleri taslagi kaydedilmedi`, productAttributes: [] }
}
if (!Array.isArray(rows) || !rows.length) {
@@ -1387,7 +1450,7 @@ async function collectProductAttributesFromSelectedRows (selectedRows) {
return { errMsg: `${code} icin urun ozellikleri eksik`, productAttributes: [] }
}
if (modeInfo.mode === 'existing') {
if (effectiveMode === 'existing') {
const originalCode =
dbMap.get(attributeTypeCode) ||
String(row?.OriginalAttributeCode || '').trim()
@@ -1410,6 +1473,7 @@ async function collectProductAttributesFromSelectedRows (selectedRows) {
console.info('[OrderProductionUpdate] collectProductAttributes done', {
code,
mode: modeInfo.mode,
effectiveMode,
outCount: out.filter(x => x.ItemCode === code).length,
rowCount: rows.length,
optionCounts: rows.map(r => ({
@@ -1738,60 +1802,77 @@ async function refreshAll () {
}
async function onBulkSubmit () {
if (isBulkSubmitting.value || store.saving) {
console.info('[OrderProductionUpdate] onBulkSubmit ignored (already running)', {
orderHeaderID: orderHeaderID.value,
isBulkSubmitting: isBulkSubmitting.value,
storeSaving: store.saving
})
return
}
isBulkSubmitting.value = true
const flowStart = nowMs()
const selectedRows = rows.value.filter(r => !!selectedMap.value[r.RowKey])
const headerAverageDueDateValue = normalizeDateInput(headerAverageDueDate.value)
const headerDateChanged = hasHeaderAverageDueDateChange.value
if (!selectedRows.length && !headerDateChanged) {
$q.notify({ type: 'warning', message: 'Lutfen en az bir satir seciniz veya ustteki termin tarihini degistiriniz.' })
return
}
const prepStart = nowMs()
const { errMsg, lines } = collectLinesFromRows(selectedRows)
if (errMsg) {
$q.notify({ type: 'negative', message: errMsg })
return
}
if (!lines.length && !headerDateChanged) {
$q.notify({ type: 'warning', message: 'Secili satirlarda degisiklik yok.' })
return
}
let cdItems = []
let productAttributes = []
if (lines.length > 0) {
const { errMsg: cdErrMsg, cdItems: nextCdItems } = await collectCdItemsFromSelectedRows(selectedRows)
if (cdErrMsg) {
$q.notify({ type: 'negative', message: cdErrMsg })
const firstCode = String(cdErrMsg.split(' ')[0] || '').trim()
if (firstCode) openCdItemDialog(firstCode)
return
}
cdItems = nextCdItems
const { errMsg: attrErrMsg, productAttributes: nextProductAttributes } = await collectProductAttributesFromSelectedRows(selectedRows)
if (attrErrMsg) {
$q.notify({ type: 'negative', message: attrErrMsg })
const firstCode = String(attrErrMsg.split(' ')[0] || '').trim()
if (firstCode) openAttributeDialog(firstCode)
return
}
productAttributes = nextProductAttributes
}
console.info('[OrderProductionUpdate] onBulkSubmit prepared', {
orderHeaderID: orderHeaderID.value,
selectedRowCount: selectedRows.length,
lineCount: lines.length,
cdItemCount: cdItems.length,
attributeCount: productAttributes.length,
headerAverageDueDate: headerAverageDueDateValue,
headerDateChanged,
prepDurationMs: Math.round(nowMs() - prepStart)
})
try {
suppressAutoSetupDialogs.value = true
const selectedRows = rows.value.filter(r => !!selectedMap.value[r.RowKey])
const headerAverageDueDateValue = normalizeDateInput(headerAverageDueDate.value)
const headerDateChanged = hasHeaderAverageDueDateChange.value
if (!selectedRows.length && !headerDateChanged) {
$q.notify({ type: 'warning', message: 'Lutfen en az bir satir seciniz veya ustteki termin tarihini degistiriniz.' })
return
}
const prepStart = nowMs()
const { errMsg, lines } = collectLinesFromRows(selectedRows)
if (errMsg) {
$q.notify({ type: 'negative', message: errMsg })
return
}
if (!lines.length && !headerDateChanged) {
$q.notify({ type: 'warning', message: 'Secili satirlarda degisiklik yok.' })
return
}
if (lines.length > 0) {
const changedRows = selectedRows.filter(hasRowChange)
const confirmed = await confirmOptionalColorWarnings(changedRows)
if (!confirmed) return
}
let cdItems = []
let productAttributes = []
if (lines.length > 0) {
const { errMsg: cdErrMsg, cdItems: nextCdItems } = await collectCdItemsFromSelectedRows(selectedRows)
if (cdErrMsg) {
$q.notify({ type: 'negative', message: cdErrMsg })
return
}
cdItems = nextCdItems
const { errMsg: attrErrMsg, productAttributes: nextProductAttributes } = await collectProductAttributesFromSelectedRows(selectedRows)
if (attrErrMsg) {
$q.notify({ type: 'negative', message: attrErrMsg })
const firstCode = String(attrErrMsg.split(' ')[0] || '').trim().toUpperCase()
if (isValidBaggiModelCode(firstCode)) {
await openAttributeDialog(firstCode)
}
return
}
productAttributes = nextProductAttributes
}
console.info('[OrderProductionUpdate] onBulkSubmit prepared', {
orderHeaderID: orderHeaderID.value,
selectedRowCount: selectedRows.length,
lineCount: lines.length,
cdItemCount: cdItems.length,
attributeCount: productAttributes.length,
headerAverageDueDate: headerAverageDueDateValue,
headerDateChanged,
prepDurationMs: Math.round(nowMs() - prepStart)
})
const applyChanges = async (insertMissing) => {
const applyStart = nowMs()
const applyResult = await store.applyUpdates(
@@ -1825,7 +1906,7 @@ async function onBulkSubmit () {
if (lines.length > 0) {
const validateStart = nowMs()
const validate = await store.validateUpdates(orderHeaderID.value, lines)
const validate = await store.validateUpdates(orderHeaderID.value, lines, cdItems)
console.info('[OrderProductionUpdate] validate finished', {
orderHeaderID: orderHeaderID.value,
lineCount: lines.length,
@@ -1869,11 +1950,14 @@ async function onBulkSubmit () {
return
}
$q.notify({ type: 'negative', message: store.error || 'Toplu kayit islemi basarisiz.' })
} finally {
isBulkSubmitting.value = false
suppressAutoSetupDialogs.value = false
console.info('[OrderProductionUpdate] onBulkSubmit total', {
orderHeaderID: orderHeaderID.value,
durationMs: Math.round(nowMs() - flowStart)
})
}
console.info('[OrderProductionUpdate] onBulkSubmit total', {
orderHeaderID: orderHeaderID.value,
durationMs: Math.round(nowMs() - flowStart)
})
}
</script>

View File

@@ -53,9 +53,10 @@
:virtual-scroll-sticky-size-start="headerHeight"
:virtual-scroll-slice-size="36"
:rows-per-page-options="[0]"
:pagination="{ rowsPerPage: 0 }"
v-model:pagination="tablePagination"
hide-bottom
:table-style="tableStyle"
@virtual-scroll="onTableVirtualScroll"
>
<template #header="props">
<q-tr :props="props" class="header-row-fixed">
@@ -309,10 +310,13 @@
</template>
<script setup>
import { computed, onMounted, ref } from 'vue'
import { computed, onMounted, ref, watch } from 'vue'
import { useProductPricingStore } from 'src/stores/ProductPricingStore'
const store = useProductPricingStore()
const FETCH_LIMIT = 500
const nextCursor = ref('')
const loadingMore = ref(false)
const usdToTry = 38.25
const eurToTry = 41.6
@@ -381,6 +385,12 @@ const headerFilterFieldSet = new Set([
])
const mainTableRef = ref(null)
const tablePagination = ref({
page: 1,
rowsPerPage: 0,
sortBy: 'productCode',
descending: false
})
const selectedMap = ref({})
const selectedCurrencies = ref(['USD', 'EUR', 'TRY'])
const showSelectedOnly = ref(false)
@@ -570,6 +580,7 @@ const selectedRowCount = computed(() => Object.values(selectedMap.value).filter(
const selectedVisibleCount = computed(() => visibleRowIds.value.filter((id) => !!selectedMap.value[id]).length)
const allSelectedVisible = computed(() => visibleRowIds.value.length > 0 && selectedVisibleCount.value === visibleRowIds.value.length)
const someSelectedVisible = computed(() => selectedVisibleCount.value > 0)
const hasMoreRows = computed(() => Boolean(store.hasMore))
function isHeaderFilterField (field) {
return headerFilterFieldSet.has(field)
@@ -691,10 +702,26 @@ function round2 (value) {
}
function parseNumber (val) {
const normalized = String(val ?? '')
.replace(/\s/g, '')
.replace(/\./g, '')
.replace(',', '.')
if (typeof val === 'number') return Number.isFinite(val) ? val : 0
const text = String(val ?? '').trim().replace(/\s/g, '')
if (!text) return 0
const lastComma = text.lastIndexOf(',')
const lastDot = text.lastIndexOf('.')
let normalized = text
if (lastComma >= 0 && lastDot >= 0) {
if (lastComma > lastDot) {
normalized = text.replace(/\./g, '').replace(',', '.')
} else {
normalized = text.replace(/,/g, '')
}
} else if (lastComma >= 0) {
normalized = text.replace(/\./g, '').replace(',', '.')
} else {
normalized = text.replace(/,/g, '')
}
const n = Number(normalized)
return Number.isFinite(n) ? n : 0
}
@@ -702,11 +729,7 @@ function parseNumber (val) {
function parseNullableNumber (val) {
const text = String(val ?? '').trim()
if (!text) return null
const normalized = text
.replace(/\s/g, '')
.replace(/\./g, '')
.replace(',', '.')
const n = Number(normalized)
const n = parseNumber(text)
return Number.isFinite(n) ? n : null
}
@@ -840,14 +863,83 @@ function clearAllCurrencies () {
selectedCurrencies.value = []
}
async function fetchChunk ({ reset = false } = {}) {
const afterProductCode = reset ? '' : nextCursor.value
const result = await store.fetchRows({
limit: FETCH_LIMIT,
afterProductCode,
append: !reset
})
const fetched = Number(result?.fetched) || 0
nextCursor.value = String(result?.nextCursor || '')
return fetched
}
async function loadMoreRows () {
if (loadingMore.value || store.loading || !hasMoreRows.value) return
loadingMore.value = true
try {
await fetchChunk({ reset: false })
} finally {
loadingMore.value = false
}
}
function onTableVirtualScroll (details) {
const to = Number(details?.to || 0)
if (!Number.isFinite(to)) return
if (to >= filteredRows.value.length - 25) {
void loadMoreRows()
}
}
async function ensureEnoughVisibleRows (minRows = 80, maxBatches = 4) {
let guard = 0
while (hasMoreRows.value && filteredRows.value.length < minRows && guard < maxBatches) {
await loadMoreRows()
guard++
}
}
async function reloadData () {
await store.fetchRows()
const startedAt = Date.now()
console.info('[product-pricing][ui] reload:start', {
at: new Date(startedAt).toISOString()
})
try {
nextCursor.value = ''
await fetchChunk({ reset: true })
await ensureEnoughVisibleRows(120, 6)
} catch (err) {
console.error('[product-pricing][ui] reload:error', {
duration_ms: Date.now() - startedAt,
message: String(err?.message || err || 'reload failed')
})
}
console.info('[product-pricing][ui] reload:done', {
duration_ms: Date.now() - startedAt,
row_count: Array.isArray(store.rows) ? store.rows.length : 0,
has_error: Boolean(store.error)
})
selectedMap.value = {}
}
onMounted(async () => {
await reloadData()
})
watch(
[
columnFilters,
numberRangeFilters,
dateRangeFilters,
showSelectedOnly,
() => tablePagination.value.sortBy,
() => tablePagination.value.descending
],
() => { void ensureEnoughVisibleRows(80, 4) },
{ deep: true }
)
</script>
<style scoped>

View File

@@ -47,7 +47,7 @@
<template #append>
<q-icon name="event" class="cursor-pointer">
<q-popup-proxy cover transition-show="scale" transition-hide="scale">
<q-date v-model="dateFrom" mask="YYYY-MM-DD" locale="tr-TR"/>
<q-date v-model="dateFrom" mask="YYYY-MM-DD" :locale="dateLocale"/>
</q-popup-proxy>
</q-icon>
</template>
@@ -63,7 +63,7 @@
<template #append>
<q-icon name="event" class="cursor-pointer">
<q-popup-proxy cover transition-show="scale" transition-hide="scale">
<q-date v-model="dateTo" mask="YYYY-MM-DD" locale="tr-TR" />
<q-date v-model="dateTo" mask="YYYY-MM-DD" :locale="dateLocale" />
</q-popup-proxy>
</q-icon>
</template>
@@ -277,12 +277,16 @@ import { useDownloadstpdfStore } from 'src/stores/downloadstpdfStore'
import dayjs from 'dayjs'
import { usePermission } from 'src/composables/usePermission'
import { normalizeSearchText } from 'src/utils/searchText'
import { useLocaleStore } from 'src/stores/localeStore'
import { getDateLocale } from 'src/i18n/dayjsLocale'
const { canRead, canExport } = usePermission()
const canReadFinance = canRead('finance')
const canExportFinance = canExport('finance')
const $q = useQuasar()
const localeStore = useLocaleStore()
const dateLocale = computed(() => getDateLocale(localeStore.locale))
const accountStore = useAccountStore()
const statementheaderStore = useStatementheaderStore()
@@ -363,7 +367,7 @@ async function onFilterClick() {
startdate: dateFrom.value,
enddate: dateTo.value,
accountcode: selectedCari.value,
langcode: 'TR',
langcode: localeStore.backendLangCode,
parislemler: selectedMonType.value
})
@@ -411,7 +415,7 @@ function resetFilters() {
/* Format */
function formatAmount(n) {
if (n == null || isNaN(n)) return '0,00'
return new Intl.NumberFormat('tr-TR', {
return new Intl.NumberFormat(dateLocale.value, {
minimumFractionDigits: 2,
maximumFractionDigits: 2
}).format(n)
@@ -467,7 +471,8 @@ async function handleDownload() {
selectedCari.value, // accountCode
dateFrom.value, // startDate
dateTo.value, // endDate
selectedMonType.value // <-- eklendi (['1','2'] veya ['1','3'])
selectedMonType.value, // <-- eklendi (['1','2'] veya ['1','3'])
localeStore.backendLangCode
)
console.log("📤 [DEBUG] Storedan gelen result:", result)
@@ -508,7 +513,8 @@ async function CurrheadDownload() {
selectedCari.value, // accountCode
dateFrom.value, // startDate
dateTo.value, // endDate
selectedMonType.value // parasal işlem tipi (parislemler)
selectedMonType.value, // parasal işlem tipi (parislemler)
localeStore.backendLangCode
)
console.log("📤 [DEBUG] CurrheadDownloadresult:", result)

View File

@@ -0,0 +1,852 @@
<template>
<q-page v-if="canUpdateLanguage" class="q-pa-md translation-page">
<div class="translation-toolbar sticky-toolbar">
<div class="row q-col-gutter-sm items-end q-mb-md">
<div class="col-12 col-md-4">
<q-input
v-model="filters.q"
dense
outlined
clearable
label="Kelime ara"
/>
</div>
<div class="col-auto">
<q-btn color="primary" icon="search" label="Getir" @click="loadRows" />
</div>
<div class="col-auto">
<q-btn
color="secondary"
icon="sync"
label="YENİ KELİMELERİ GETİR"
:loading="store.saving"
@click="syncSources"
/>
</div>
<div class="col-auto">
<q-toggle v-model="autoTranslate" dense color="primary" label="Oto Çeviri" />
</div>
</div>
<div class="row q-gutter-sm q-mb-sm">
<q-btn
color="accent"
icon="g_translate"
label="Seçilenleri Çevir"
:disable="selectedKeys.length === 0"
:loading="store.saving"
@click="translateSelectedRows"
/>
<q-btn
color="secondary"
icon="done_all"
label="Seçilenleri Onayla"
:disable="selectedKeys.length === 0"
:loading="store.saving"
@click="bulkApproveSelected"
/>
<q-btn
color="primary"
icon="save"
label="Seçilenleri Toplu Güncelle"
:disable="selectedKeys.length === 0"
:loading="store.saving"
@click="bulkSaveSelected"
/>
</div>
</div>
<q-table
ref="tableRef"
class="translation-table"
flat
bordered
virtual-scroll
:virtual-scroll-sticky-size-start="56"
row-key="t_key"
:loading="store.loading || store.saving"
:rows="pivotRows"
:columns="columns"
:rows-per-page-options="[0]"
v-model:pagination="tablePagination"
hide-bottom
@virtual-scroll="onVirtualScroll"
>
<template #body-cell-actions="props">
<q-td :props="props">
<q-btn
dense
color="primary"
icon="save"
label="Güncelle"
:disable="!rowHasChanges(props.row.t_key)"
:loading="store.saving"
@click="saveRow(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-select="props">
<q-td :props="props">
<q-checkbox
dense
:model-value="selectedKeys.includes(props.row.t_key)"
@update:model-value="(v) => toggleSelected(props.row.t_key, v)"
/>
</q-td>
</template>
<template #body-cell-source_text_tr="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'source_text_tr')">
<div class="source-text-label" :title="rowDraft(props.row.t_key).source_text_tr">
{{ rowDraft(props.row.t_key).source_text_tr }}
</div>
</q-td>
</template>
<template #body-cell-source_type="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'source_type')">
<q-badge
color="primary"
text-color="white"
class="source-type-badge"
:label="sourceTypeLabel(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-en="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'en')">
<q-input
v-model="rowDraft(props.row.t_key).en"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-de="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'de')">
<q-input
v-model="rowDraft(props.row.t_key).de"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-es="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'es')">
<q-input
v-model="rowDraft(props.row.t_key).es"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-it="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'it')">
<q-input
v-model="rowDraft(props.row.t_key).it"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-ru="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'ru')">
<q-input
v-model="rowDraft(props.row.t_key).ru"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
<template #body-cell-ar="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'ar')">
<q-input
v-model="rowDraft(props.row.t_key).ar"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td>
</template>
</q-table>
</q-page>
<q-page v-else class="q-pa-md flex flex-center">
<div class="text-negative text-subtitle1">
Bu modüle erişim yetkiniz yok.
</div>
</q-page>
</template>
<script setup>
import { computed, onBeforeUnmount, onMounted, ref, watch } from 'vue'
import { useQuasar } from 'quasar'
import { usePermission } from 'src/composables/usePermission'
import { useTranslationStore } from 'src/stores/translationStore'
const $q = useQuasar()
const store = useTranslationStore()
const { canUpdate } = usePermission()
const canUpdateLanguage = canUpdate('language')
const filters = ref({
q: ''
})
const autoTranslate = ref(false)
const tableRef = ref(null)
const FETCH_LIMIT = 1400
const loadedOffset = ref(0)
const hasMoreRows = ref(true)
const loadingMore = ref(false)
const tablePagination = ref({
page: 1,
rowsPerPage: 0,
sortBy: 'source_text_tr',
descending: false
})
let filterReloadTimer = null
const sourceTypeOptions = [
{ label: 'dummy', value: 'dummy' },
{ label: 'postgre', value: 'postgre' },
{ label: 'mssql', value: 'mssql' }
]
const sourceTypeLabelMap = {
dummy: 'UI',
postgre: 'PostgreSQL',
mssql: 'MSSQL'
}
const columns = [
{ name: 'actions', label: 'Güncelle', field: 'actions', align: 'left' },
{ name: 'select', label: 'Seç', field: 'select', align: 'left' },
{ name: 'source_text_tr', label: 'Türkçe Metin', field: 'source_text_tr', align: 'left', style: 'min-width: 340px' },
{ name: 'source_type', label: 'Kaynak', field: 'source_type', align: 'left', style: 'min-width: 140px' },
{ name: 'en', label: 'İngilizce', field: 'en', align: 'left', style: 'min-width: 220px' },
{ name: 'de', label: 'Almanca', field: 'de', align: 'left', style: 'min-width: 220px' },
{ name: 'es', label: 'İspanyolca', field: 'es', align: 'left', style: 'min-width: 220px' },
{ name: 'it', label: 'İtalyanca', field: 'it', align: 'left', style: 'min-width: 220px' },
{ name: 'ru', label: 'Rusça', field: 'ru', align: 'left', style: 'min-width: 220px' },
{ name: 'ar', label: 'Arapça', field: 'ar', align: 'left', style: 'min-width: 220px' }
]
const draftByKey = ref({})
const originalByKey = ref({})
const selectedKeys = ref([])
const autoSaveTimers = new Map()
const pivotRows = computed(() => {
const byKey = new Map()
for (const row of store.rows) {
const key = row.t_key
if (!byKey.has(key)) {
byKey.set(key, {
t_key: key,
source_text_tr: '',
source_type: 'dummy',
en: '',
de: '',
es: '',
it: '',
ru: '',
ar: '',
langs: {}
})
}
const target = byKey.get(key)
target.langs[row.lang_code] = {
id: row.id,
status: row.status,
is_manual: row.is_manual
}
if (row.lang_code === 'tr') {
target.source_text_tr = row.translated_text || row.source_text_tr || ''
target.source_type = row.source_type || 'dummy'
} else if (row.lang_code === 'en') {
target.en = row.translated_text || ''
} else if (row.lang_code === 'de') {
target.de = row.translated_text || ''
} else if (row.lang_code === 'es') {
target.es = row.translated_text || ''
} else if (row.lang_code === 'it') {
target.it = row.translated_text || ''
} else if (row.lang_code === 'ru') {
target.ru = row.translated_text || ''
} else if (row.lang_code === 'ar') {
target.ar = row.translated_text || ''
}
}
return Array.from(byKey.values()).sort((a, b) => a.t_key.localeCompare(b.t_key))
})
function snapshotDrafts (options = {}) {
const preserveDirty = Boolean(options?.preserveDirty)
const draft = {}
const original = {}
for (const row of pivotRows.value) {
const existingDraft = draftByKey.value[row.t_key]
const existingOriginal = originalByKey.value[row.t_key]
const keepExisting = preserveDirty &&
existingDraft &&
existingOriginal &&
(
existingDraft.source_text_tr !== existingOriginal.source_text_tr ||
existingDraft.source_type !== existingOriginal.source_type ||
existingDraft.en !== existingOriginal.en ||
existingDraft.de !== existingOriginal.de ||
existingDraft.es !== existingOriginal.es ||
existingDraft.it !== existingOriginal.it ||
existingDraft.ru !== existingOriginal.ru ||
existingDraft.ar !== existingOriginal.ar
)
if (keepExisting) {
draft[row.t_key] = { ...existingDraft }
original[row.t_key] = { ...existingOriginal }
continue
}
draft[row.t_key] = {
source_text_tr: row.source_text_tr || '',
source_type: row.source_type || 'dummy',
en: row.en || '',
de: row.de || '',
es: row.es || '',
it: row.it || '',
ru: row.ru || '',
ar: row.ar || ''
}
original[row.t_key] = { ...draft[row.t_key] }
}
draftByKey.value = draft
originalByKey.value = original
selectedKeys.value = selectedKeys.value.filter(k => draft[k])
}
function rowDraft (key) {
if (!draftByKey.value[key]) {
draftByKey.value[key] = {
source_text_tr: '',
source_type: 'dummy',
en: '',
de: '',
es: '',
it: '',
ru: '',
ar: ''
}
}
return draftByKey.value[key]
}
function buildFilters () {
const query = String(filters.value.q || '').trim()
return {
q: query || undefined
}
}
function rowHasChanges (key) {
const draft = draftByKey.value[key]
const orig = originalByKey.value[key]
if (!draft || !orig) return false
return (
draft.source_text_tr !== orig.source_text_tr ||
draft.en !== orig.en ||
draft.de !== orig.de ||
draft.es !== orig.es ||
draft.it !== orig.it ||
draft.ru !== orig.ru ||
draft.ar !== orig.ar
)
}
function isPending (key, lang) {
const row = pivotRows.value.find(r => r.t_key === key)
const meta = row?.langs?.[lang]
return meta?.status === 'pending'
}
function cellClass (key, field) {
const draft = draftByKey.value[key]
const orig = originalByKey.value[key]
if (!draft || !orig) return ''
if (field !== 'source_type' && draft[field] !== orig[field]) return 'cell-dirty'
if (field === 'en' && isPending(key, 'en')) return 'cell-new'
if (field === 'de' && isPending(key, 'de')) return 'cell-new'
if (field === 'es' && isPending(key, 'es')) return 'cell-new'
if (field === 'it' && isPending(key, 'it')) return 'cell-new'
if (field === 'ru' && isPending(key, 'ru')) return 'cell-new'
if (field === 'ar' && isPending(key, 'ar')) return 'cell-new'
if (field === 'source_text_tr' && isPending(key, 'tr')) return 'cell-new'
return ''
}
function sourceTypeLabel (key) {
const val = String(rowDraft(key).source_type || 'dummy').toLowerCase()
return sourceTypeLabelMap[val] || val || '-'
}
function toggleSelected (key, checked) {
if (checked) {
if (!selectedKeys.value.includes(key)) {
selectedKeys.value = [...selectedKeys.value, key]
}
return
}
selectedKeys.value = selectedKeys.value.filter(k => k !== key)
}
function queueAutoSave (key) {
if (!key) return
const existing = autoSaveTimers.get(key)
if (existing) {
clearTimeout(existing)
}
const timer = setTimeout(() => {
autoSaveTimers.delete(key)
if (rowHasChanges(key)) {
void saveRow(key)
}
}, 250)
autoSaveTimers.set(key, timer)
}
async function fetchRowsChunk (append = false) {
const params = {
...buildFilters(),
limit: FETCH_LIMIT,
offset: append ? loadedOffset.value : 0
}
await store.fetchRows(params, { append })
const incomingCount = Number(store.count) || 0
if (append) {
loadedOffset.value += incomingCount
} else {
loadedOffset.value = incomingCount
}
hasMoreRows.value = incomingCount === FETCH_LIMIT
snapshotDrafts({ preserveDirty: append })
}
async function loadRows () {
try {
loadedOffset.value = 0
hasMoreRows.value = true
await fetchRowsChunk(false)
} catch (err) {
console.error('[translation-sync][ui] loadRows:error', {
message: err?.message || 'Çeviri satırları yüklenemedi'
})
$q.notify({
type: 'negative',
message: err?.message || 'Çeviri satırları yüklenemedi'
})
}
}
async function loadMoreRows () {
if (!hasMoreRows.value || loadingMore.value || store.loading || store.saving) return
loadingMore.value = true
try {
await fetchRowsChunk(true)
} finally {
loadingMore.value = false
}
}
async function ensureEnoughVisibleRows (minRows = 120, maxBatches = 4) {
let guard = 0
while (hasMoreRows.value && pivotRows.value.length < minRows && guard < maxBatches) {
await loadMoreRows()
guard++
}
}
function onVirtualScroll (details) {
const to = Number(details?.to || 0)
if (!Number.isFinite(to)) return
if (to >= pivotRows.value.length - 15) {
void loadMoreRows()
}
}
function scheduleFilterReload () {
if (filterReloadTimer) {
clearTimeout(filterReloadTimer)
}
filterReloadTimer = setTimeout(() => {
filterReloadTimer = null
void loadRows()
}, 350)
}
async function ensureMissingLangRows (key, draft, langs) {
const missingLangs = []
if (!langs.en && String(draft.en || '').trim() !== '') missingLangs.push('en')
if (!langs.de && String(draft.de || '').trim() !== '') missingLangs.push('de')
if (!langs.es && String(draft.es || '').trim() !== '') missingLangs.push('es')
if (!langs.it && String(draft.it || '').trim() !== '') missingLangs.push('it')
if (!langs.ru && String(draft.ru || '').trim() !== '') missingLangs.push('ru')
if (!langs.ar && String(draft.ar || '').trim() !== '') missingLangs.push('ar')
if (missingLangs.length === 0) return false
await store.upsertMissing([
{
t_key: key,
source_text_tr: draft.source_text_tr || key
}
], missingLangs)
return true
}
function buildRowUpdates (row, draft, original, approveStatus = 'approved') {
const items = []
const langs = row.langs || {}
const sourceTypeChanged = draft.source_type !== original.source_type
if (langs.tr?.id && (draft.source_text_tr !== original.source_text_tr || sourceTypeChanged)) {
items.push({
id: langs.tr.id,
source_text_tr: draft.source_text_tr,
translated_text: draft.source_text_tr,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
if (langs.en?.id && (draft.en !== original.en || sourceTypeChanged)) {
items.push({
id: langs.en.id,
translated_text: draft.en,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
if (langs.de?.id && (draft.de !== original.de || sourceTypeChanged)) {
items.push({
id: langs.de.id,
translated_text: draft.de,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
if (langs.es?.id && (draft.es !== original.es || sourceTypeChanged)) {
items.push({
id: langs.es.id,
translated_text: draft.es,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
if (langs.it?.id && (draft.it !== original.it || sourceTypeChanged)) {
items.push({
id: langs.it.id,
translated_text: draft.it,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
if (langs.ru?.id && (draft.ru !== original.ru || sourceTypeChanged)) {
items.push({
id: langs.ru.id,
translated_text: draft.ru,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
if (langs.ar?.id && (draft.ar !== original.ar || sourceTypeChanged)) {
items.push({
id: langs.ar.id,
translated_text: draft.ar,
source_type: draft.source_type,
status: approveStatus,
is_manual: true
})
}
return items
}
async function saveRow (key) {
const row = pivotRows.value.find(r => r.t_key === key)
const draft = draftByKey.value[key]
const original = originalByKey.value[key]
if (!row || !draft || !original || !rowHasChanges(key)) return
try {
const insertedMissing = await ensureMissingLangRows(key, draft, row.langs || {})
if (insertedMissing) {
await loadRows()
}
const refreshed = pivotRows.value.find(r => r.t_key === key)
if (!refreshed) return
const refreshDraft = draftByKey.value[key]
const refreshOriginal = originalByKey.value[key]
const items = buildRowUpdates(refreshed, refreshDraft, refreshOriginal)
if (items.length > 0) {
await store.bulkUpdate(items)
}
await loadRows()
$q.notify({ type: 'positive', message: 'Satır güncellendi' })
} catch (err) {
$q.notify({ type: 'negative', message: err?.message || 'Güncelleme hatası' })
}
}
async function bulkApproveSelected () {
try {
const ids = []
for (const key of selectedKeys.value) {
const row = pivotRows.value.find(r => r.t_key === key)
if (!row) continue
for (const lang of ['tr', 'en', 'de', 'es', 'it', 'ru', 'ar']) {
const meta = row.langs?.[lang]
if (meta?.id && meta?.status === 'pending') {
ids.push(meta.id)
}
}
}
const unique = Array.from(new Set(ids))
if (unique.length === 0) {
$q.notify({ type: 'warning', message: 'Onaylanacak pending kayıt bulunamadı' })
return
}
await store.bulkApprove(unique)
await loadRows()
$q.notify({ type: 'positive', message: `${unique.length} kayıt onaylandı` })
} catch (err) {
$q.notify({ type: 'negative', message: err?.message || 'Toplu onay hatası' })
}
}
async function translateSelectedRows () {
try {
const keys = Array.from(new Set(selectedKeys.value.filter(Boolean)))
if (keys.length === 0) {
$q.notify({ type: 'warning', message: 'Çevrilecek seçim bulunamadı' })
return
}
const response = await store.translateSelected({
t_keys: keys,
languages: ['en', 'de', 'it', 'es', 'ru', 'ar'],
limit: Math.min(50000, keys.length * 6)
})
const translated = Number(response?.translated_count || 0)
const traceId = response?.trace_id || null
await loadRows()
$q.notify({
type: 'positive',
message: `Seçilenler çevrildi: ${translated}${traceId ? ` | Trace: ${traceId}` : ''}`
})
} catch (err) {
$q.notify({ type: 'negative', message: err?.message || 'Seçili çeviri işlemi başarısız' })
}
}
async function bulkSaveSelected () {
try {
const items = []
for (const key of selectedKeys.value) {
const row = pivotRows.value.find(r => r.t_key === key)
const draft = draftByKey.value[key]
const original = originalByKey.value[key]
if (!row || !draft || !original) continue
if (!rowHasChanges(key)) continue
const insertedMissing = await ensureMissingLangRows(key, draft, row.langs || {})
if (insertedMissing) {
await loadRows()
}
const refreshed = pivotRows.value.find(r => r.t_key === key)
if (!refreshed) continue
const refreshDraft = draftByKey.value[key]
const refreshOriginal = originalByKey.value[key]
items.push(...buildRowUpdates(refreshed, refreshDraft, refreshOriginal))
}
if (items.length === 0) {
$q.notify({ type: 'warning', message: 'Toplu güncellenecek değişiklik yok' })
return
}
await store.bulkUpdate(items)
await loadRows()
$q.notify({ type: 'positive', message: `${items.length} kayıt toplu güncellendi` })
} catch (err) {
$q.notify({ type: 'negative', message: err?.message || 'Toplu güncelleme hatası' })
}
}
async function syncSources () {
const startedAt = Date.now()
const beforeCount = pivotRows.value.length
console.info('[translation-sync][ui] button:click', {
at: new Date(startedAt).toISOString(),
auto_translate: autoTranslate.value,
only_new: true,
before_row_count: beforeCount
})
try {
const response = await store.syncSources({
auto_translate: autoTranslate.value,
languages: ['en', 'de', 'it', 'es', 'ru', 'ar'],
limit: 1000,
only_new: true
})
const result = response?.result || response || {}
const traceId = result?.trace_id || response?.trace_id || null
console.info('[translation-sync][ui] sync:response', {
trace_id: traceId,
seed_count: result.seed_count || 0,
affected_count: result.affected_count || 0,
auto_translated: result.auto_translated || 0,
duration_ms: result.duration_ms || null
})
await loadRows()
const afterCount = pivotRows.value.length
console.info('[translation-sync][ui] chain:reload-complete', {
trace_id: traceId,
duration_ms: Date.now() - startedAt,
before_row_count: beforeCount,
after_row_count: afterCount,
delta_row_count: afterCount - beforeCount
})
$q.notify({
type: 'positive',
message: `Tarama tamamlandı. Seed: ${result.seed_count || 0}, Oto çeviri: ${result.auto_translated || 0}`
})
} catch (err) {
$q.notify({
type: 'negative',
message: err?.message || 'Kaynak tarama hatası'
})
}
}
onMounted(() => {
void loadRows()
})
onBeforeUnmount(() => {
if (filterReloadTimer) {
clearTimeout(filterReloadTimer)
filterReloadTimer = null
}
})
watch(
() => filters.value.q,
() => { scheduleFilterReload() }
)
watch(
[() => tablePagination.value.sortBy, () => tablePagination.value.descending],
() => { void ensureEnoughVisibleRows(120, 4) }
)
</script>
<style scoped>
.translation-page {
height: calc(100vh - 120px);
display: flex;
flex-direction: column;
overflow: hidden;
}
.translation-toolbar {
background: #fff;
padding-top: 6px;
}
.sticky-toolbar {
position: sticky;
top: 0;
z-index: 35;
}
.translation-table {
flex: 1;
min-height: 0;
}
.translation-table :deep(.q-table__middle) {
max-height: calc(100vh - 280px);
overflow: auto;
}
.translation-table :deep(.q-table thead tr th) {
position: sticky;
top: 0;
z-index: 30;
background: #fff;
}
.translation-table :deep(.q-table tbody td) {
vertical-align: top;
padding: 6px;
}
.translation-table :deep(.q-field__native) {
line-height: 1.35;
word-break: break-word;
}
.source-text-label {
white-space: pre-wrap;
word-break: break-word;
line-height: 1.4;
max-height: 11.2em;
overflow: auto;
}
.cell-dirty {
background: #fff3cd;
}
.cell-new {
background: #d9f7e8;
}
</style>

View File

@@ -42,7 +42,7 @@
<q-date
v-model="dateFrom"
mask="YYYY-MM-DD"
locale="tr-TR"
:locale="dateLocale"
:options="isValidFromDate"
/>
</q-popup-proxy>
@@ -65,7 +65,7 @@
<q-date
v-model="dateTo"
mask="YYYY-MM-DD"
locale="tr-TR"
:locale="dateLocale"
:options="isValidToDate"
/>
</q-popup-proxy>
@@ -281,12 +281,18 @@ import { useStatementdetailStore } from 'src/stores/statementdetailStore'
import { useDownloadstpdfStore } from 'src/stores/downloadstpdfStore'
import dayjs from 'dayjs'
import { usePermission } from 'src/composables/usePermission'
import { useLocaleStore } from 'src/stores/localeStore'
import { getDateLocale } from 'src/i18n/dayjsLocale'
import { useI18n } from 'src/composables/useI18n'
const { canRead, canExport } = usePermission()
const canReadFinance = canRead('finance')
const canExportFinance = canExport('finance')
const $q = useQuasar()
const localeStore = useLocaleStore()
const { t } = useI18n()
const dateLocale = computed(() => getDateLocale(localeStore.locale))
const accountStore = useAccountStore()
const statementheaderStore = useStatementheaderStore()
@@ -360,7 +366,7 @@ function hasInvalidDateRange () {
function notifyInvalidDateRange () {
$q.notify({
type: 'warning',
message: '⚠️ Başlangıç tarihi bitiş tarihinden sonra olamaz.',
message: t('statement.invalidDateRange'),
position: 'top-right'
})
}
@@ -402,7 +408,7 @@ async function onFilterClick() {
if (!selectedCari.value || !dateFrom.value || !dateTo.value) {
$q.notify({
type: 'warning',
message: '⚠️ Lütfen cari ve tarih aralığını seçiniz.',
message: t('statement.selectFilters'),
position: 'top-right'
})
return
@@ -417,7 +423,7 @@ async function onFilterClick() {
startdate: dateFrom.value,
enddate: dateTo.value,
accountcode: selectedCari.value,
langcode: 'TR',
langcode: localeStore.backendLangCode,
parislemler: selectedMonType.value,
excludeopening: excludeOpening.value
})
@@ -483,7 +489,7 @@ function toggleFiltersCollapsed () {
function normalizeText (str) {
return (str || '')
.toString()
.toLocaleLowerCase('tr-TR') // Türkçe uyumlu
.toLocaleLowerCase(dateLocale.value)
.normalize('NFD') // aksan temizleme
.replace(/[\u0300-\u036f]/g, '')
.trim()
@@ -503,7 +509,7 @@ function resetFilters() {
/* Format */
function formatAmount(n) {
if (n == null || isNaN(n)) return '0,00'
return new Intl.NumberFormat('tr-TR', {
return new Intl.NumberFormat(dateLocale.value, {
minimumFractionDigits: 2,
maximumFractionDigits: 2
}).format(n)
@@ -562,7 +568,8 @@ async function handleDownload() {
selectedCari.value, // accountCode
dateFrom.value, // startDate
dateTo.value, // endDate
selectedMonType.value // <-- eklendi (['1','2'] veya ['1','3'])
selectedMonType.value, // <-- eklendi (['1','2'] veya ['1','3'])
localeStore.backendLangCode
)
console.log("[DEBUG] Storedan gelen result:", result)
@@ -608,7 +615,8 @@ async function CurrheadDownload() {
selectedCari.value, // accountCode
dateFrom.value, // startDate
dateTo.value, // endDate
selectedMonType.value // parasal işlem tipi (parislemler)
selectedMonType.value, // parasal işlem tipi (parislemler)
localeStore.backendLangCode
)
console.log("[DEBUG] CurrheadDownloadresult:", result)

View File

@@ -228,6 +228,12 @@ const routes = [
component: () => import('../pages/MarketMailMapping.vue'),
meta: { permission: 'system:update' }
},
{
path: 'language/translations',
name: 'translation-table',
component: () => import('pages/TranslationTable.vue'),
meta: { permission: 'language:update' }
},
/* ================= ORDERS ================= */

View File

@@ -1,12 +1,14 @@
import axios from 'axios'
import qs from 'qs'
import { useAuthStore } from 'stores/authStore'
import { DEFAULT_LOCALE, normalizeLocale } from 'src/i18n/languages'
const rawBaseUrl =
(typeof process !== 'undefined' && process.env?.VITE_API_BASE_URL) || '/api'
export const API_BASE_URL = String(rawBaseUrl).trim().replace(/\/+$/, '')
const AUTH_REFRESH_PATH = '/auth/refresh'
const LOCALE_STORAGE_KEY = 'bss.locale'
const api = axios.create({
baseURL: API_BASE_URL,
@@ -74,6 +76,11 @@ function redirectToLogin() {
window.location.hash = '/login'
}
function getRequestLocale() {
if (typeof window === 'undefined') return DEFAULT_LOCALE
return normalizeLocale(window.localStorage.getItem(LOCALE_STORAGE_KEY))
}
api.interceptors.request.use((config) => {
const auth = useAuthStore()
const url = config.url || ''
@@ -82,6 +89,8 @@ api.interceptors.request.use((config) => {
config.headers ||= {}
config.headers.Authorization = `Bearer ${auth.token}`
}
config.headers ||= {}
config.headers['Accept-Language'] = getRequestLocale()
return config
})

View File

@@ -40,6 +40,8 @@ function nowMs () {
return Date.now()
}
const applyInFlightByOrder = new Map()
const YAS_NUMERIC_SIZES = new Set(['2', '4', '6', '8', '10', '12', '14'])
function safeStr (value) {
@@ -423,7 +425,7 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
if (!code) return []
return this.productAttributeDraftsByCode[code] || []
},
async validateUpdates (orderHeaderID, lines) {
async validateUpdates (orderHeaderID, lines, cdItems = []) {
if (!orderHeaderID) return { missingCount: 0, missing: [] }
this.saving = true
@@ -434,7 +436,7 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
console.info('[OrderProductionItemStore] validateUpdates start', { orderHeaderID, lineCount: lines?.length || 0 })
const res = await api.post(
`/orders/production-items/${encodeURIComponent(orderHeaderID)}/validate`,
{ lines }
{ lines, cdItems }
)
const data = res?.data || { missingCount: 0, missing: [] }
const rid = res?.headers?.['x-debug-request-id'] || ''
@@ -458,48 +460,69 @@ export const useOrderProductionItemStore = defineStore('orderproductionitems', {
async applyUpdates (orderHeaderID, lines, insertMissing, cdItems = [], productAttributes = [], headerAverageDueDate = null) {
if (!orderHeaderID) return { updated: 0, inserted: 0 }
this.saving = true
this.error = null
const orderKey = String(orderHeaderID).trim().toUpperCase()
if (applyInFlightByOrder.has(orderKey)) {
console.warn('[OrderProductionItemStore] applyUpdates deduped (in-flight)', {
orderHeaderID: orderKey,
lineCount: lines?.length || 0
})
return await applyInFlightByOrder.get(orderKey)
}
const applyPromise = (async () => {
this.saving = true
this.error = null
try {
const t0 = nowMs()
console.info('[OrderProductionItemStore] applyUpdates start', {
orderHeaderID,
lineCount: lines?.length || 0,
insertMissing: !!insertMissing,
cdItemCount: cdItems?.length || 0,
attributeCount: productAttributes?.length || 0,
headerAverageDueDate
})
const res = await api.post(
`/orders/production-items/${encodeURIComponent(orderHeaderID)}/apply`,
{
lines,
insertMissing,
cdItems,
productAttributes,
HeaderAverageDueDate: headerAverageDueDate
}
)
const data = res?.data || { updated: 0, inserted: 0 }
const rid = res?.headers?.['x-debug-request-id'] || ''
console.info('[OrderProductionItemStore] applyUpdates done', {
orderHeaderID,
updated: Number(data?.updated || 0),
inserted: Number(data?.inserted || 0),
barcodeInserted: Number(data?.barcodeInserted || 0),
attributeUpserted: Number(data?.attributeUpserted || 0),
headerUpdated: !!data?.headerUpdated,
requestId: rid,
durationMs: Math.round(nowMs() - t0)
})
return data
} catch (err) {
logApiError('applyUpdates', err, { orderHeaderID, lineCount: lines?.length || 0, insertMissing })
this.error = extractApiErrorMessage(err, 'Guncelleme basarisiz')
throw err
} finally {
this.saving = false
}
})()
applyInFlightByOrder.set(orderKey, applyPromise)
try {
const t0 = nowMs()
console.info('[OrderProductionItemStore] applyUpdates start', {
orderHeaderID,
lineCount: lines?.length || 0,
insertMissing: !!insertMissing,
cdItemCount: cdItems?.length || 0,
attributeCount: productAttributes?.length || 0,
headerAverageDueDate
})
const res = await api.post(
`/orders/production-items/${encodeURIComponent(orderHeaderID)}/apply`,
{
lines,
insertMissing,
cdItems,
productAttributes,
HeaderAverageDueDate: headerAverageDueDate
}
)
const data = res?.data || { updated: 0, inserted: 0 }
const rid = res?.headers?.['x-debug-request-id'] || ''
console.info('[OrderProductionItemStore] applyUpdates done', {
orderHeaderID,
updated: Number(data?.updated || 0),
inserted: Number(data?.inserted || 0),
barcodeInserted: Number(data?.barcodeInserted || 0),
attributeUpserted: Number(data?.attributeUpserted || 0),
headerUpdated: !!data?.headerUpdated,
requestId: rid,
durationMs: Math.round(nowMs() - t0)
})
return data
} catch (err) {
logApiError('applyUpdates', err, { orderHeaderID, lineCount: lines?.length || 0, insertMissing })
this.error = extractApiErrorMessage(err, 'Guncelleme basarisiz')
throw err
return await applyPromise
} finally {
this.saving = false
if (applyInFlightByOrder.get(orderKey) === applyPromise) {
applyInFlightByOrder.delete(orderKey)
}
}
}
}

View File

@@ -6,13 +6,39 @@ function toText (value) {
}
function toNumber (value) {
const n = Number(value)
const n = parseFlexibleNumber(value)
return Number.isFinite(n) ? Number(n.toFixed(2)) : 0
}
function mapRow (raw, index) {
function parseFlexibleNumber (value) {
if (typeof value === 'number') return value
const text = String(value ?? '').trim().replace(/\s/g, '')
if (!text) return 0
const lastComma = text.lastIndexOf(',')
const lastDot = text.lastIndexOf('.')
let normalized = text
if (lastComma >= 0 && lastDot >= 0) {
// Keep the last separator as decimal, remove the other as thousand.
if (lastComma > lastDot) {
normalized = text.replace(/\./g, '').replace(',', '.')
} else {
normalized = text.replace(/,/g, '')
}
} else if (lastComma >= 0) {
normalized = text.replace(/\./g, '').replace(',', '.')
} else {
normalized = text.replace(/,/g, '')
}
const n = Number(normalized)
return Number.isFinite(n) ? n : 0
}
function mapRow (raw, index, baseIndex = 0) {
return {
id: index + 1,
id: baseIndex + index + 1,
productCode: toText(raw?.ProductCode),
stockQty: toNumber(raw?.StockQty),
stockEntryDate: toText(raw?.StockEntryDate),
@@ -55,21 +81,85 @@ export const useProductPricingStore = defineStore('product-pricing-store', {
state: () => ({
rows: [],
loading: false,
error: ''
error: '',
hasMore: true
}),
actions: {
async fetchRows () {
async fetchRows (options = {}) {
this.loading = true
this.error = ''
const limit = Number(options?.limit) > 0 ? Number(options.limit) : 500
const afterProductCode = toText(options?.afterProductCode)
const append = Boolean(options?.append)
const baseIndex = append ? this.rows.length : 0
const startedAt = Date.now()
console.info('[product-pricing][frontend] request:start', {
at: new Date(startedAt).toISOString(),
timeout_ms: 180000,
limit,
after_product_code: afterProductCode || null,
append
})
try {
const res = await api.get('/pricing/products')
const params = { limit }
if (afterProductCode) params.after_product_code = afterProductCode
const res = await api.request({
method: 'GET',
url: '/pricing/products',
params,
timeout: 180000
})
const traceId = res?.headers?.['x-trace-id'] || null
const hasMoreHeader = String(res?.headers?.['x-has-more'] || '').toLowerCase()
const nextCursorHeader = toText(res?.headers?.['x-next-cursor'])
const data = Array.isArray(res?.data) ? res.data : []
this.rows = data.map((x, i) => mapRow(x, i))
const mapped = data.map((x, i) => mapRow(x, i, baseIndex))
const fallbackNextCursor = mapped.length > 0
? toText(mapped[mapped.length - 1]?.productCode)
: ''
const nextCursor = nextCursorHeader || fallbackNextCursor
if (append) {
const merged = [...this.rows]
const seen = new Set(this.rows.map((x) => x?.productCode))
for (const row of mapped) {
const key = row?.productCode
if (key && seen.has(key)) continue
merged.push(row)
if (key) seen.add(key)
}
this.rows = merged
} else {
this.rows = mapped
}
this.hasMore = hasMoreHeader ? hasMoreHeader === 'true' : mapped.length === limit
console.info('[product-pricing][frontend] request:success', {
trace_id: traceId,
duration_ms: Date.now() - startedAt,
row_count: this.rows.length,
fetched_count: mapped.length,
has_more: this.hasMore,
next_cursor: nextCursor || null
})
return {
traceId,
fetched: mapped.length,
hasMore: this.hasMore,
nextCursor
}
} catch (err) {
this.rows = []
if (!append) this.rows = []
this.hasMore = false
const msg = err?.response?.data || err?.message || 'Urun fiyatlandirma listesi alinamadi'
this.error = toText(msg)
console.error('[product-pricing][frontend] request:error', {
trace_id: err?.response?.headers?.['x-trace-id'] || null,
duration_ms: Date.now() - startedAt,
timeout_ms: err?.config?.timeout ?? null,
status: err?.response?.status || null,
message: this.error
})
throw err
} finally {
this.loading = false
}
@@ -77,7 +167,7 @@ export const useProductPricingStore = defineStore('product-pricing-store', {
updateCell (row, field, val) {
if (!row || !field) return
row[field] = toNumber(String(val ?? '').replace(',', '.'))
row[field] = toNumber(val)
},
updateBrandGroupSelection (row, val) {

View File

@@ -9,14 +9,16 @@ export const useDownloadstHeadStore = defineStore('downloadstHead', {
accountCode,
startDate,
endDate,
parislemler
parislemler,
langcode = 'TR'
) {
try {
// ✅ Params (axios paramsSerializer array=repeat destekliyor)
const params = {
accountcode: accountCode,
startdate: startDate,
enddate: endDate
enddate: endDate,
langcode: langcode || 'TR'
}
if (Array.isArray(parislemler) && parislemler.length > 0) {

View File

@@ -7,13 +7,14 @@ export const useDownloadstpdfStore = defineStore('downloadstpdf', {
/* ==========================================================
📄 PDF İNDİR / AÇ
========================================================== */
async downloadPDF(accountCode, startDate, endDate, parislemler = []) {
async downloadPDF(accountCode, startDate, endDate, parislemler = [], langcode = 'TR') {
try {
// 🔹 Query params
const params = {
accountcode: accountCode,
startdate: startDate,
enddate: endDate
enddate: endDate,
langcode: langcode || 'TR'
}
if (Array.isArray(parislemler) && parislemler.length > 0) {

View File

@@ -0,0 +1,35 @@
import { defineStore } from 'pinia'
import { computed, ref } from 'vue'
import { applyDayjsLocale } from 'src/i18n/dayjsLocale'
import { DEFAULT_LOCALE, normalizeLocale, toBackendLangCode } from 'src/i18n/languages'
const STORAGE_KEY = 'bss.locale'
function readInitialLocale() {
if (typeof window === 'undefined') return DEFAULT_LOCALE
return normalizeLocale(window.localStorage.getItem(STORAGE_KEY))
}
export const useLocaleStore = defineStore('locale', () => {
const locale = ref(readInitialLocale())
function setLocale(nextLocale) {
const normalized = normalizeLocale(nextLocale)
locale.value = normalized
applyDayjsLocale(normalized)
if (typeof window !== 'undefined') {
window.localStorage.setItem(STORAGE_KEY, normalized)
}
}
const backendLangCode = computed(() => toBackendLangCode(locale.value))
applyDayjsLocale(locale.value)
return {
locale,
backendLangCode,
setLocale
}
})

View File

@@ -541,6 +541,14 @@ export const useOrderEntryStore = defineStore('orderentry', {
const s = String(d).split('T')[0]
return s
}
const getLineDueDate = (ln) => (
formatDate(
ln?.DueDate ||
ln?.DeliveryDate ||
ln?.PlannedDateOfLading ||
''
)
)
const oldDate = formatDate(this.originalHeader?.AverageDueDate)
const newDate = formatDate(this.header?.AverageDueDate)
@@ -556,7 +564,7 @@ export const useOrderEntryStore = defineStore('orderentry', {
const seen = new Set()
normalized.forEach(ln => {
if (ln?._deleteSignal || !ln?.OrderLineID || ln?._dirty !== true) return
if (ln?._deleteSignal || !ln?.OrderLineID) return
const orig = origMap.get(String(ln.OrderLineID))
if (!orig) return
@@ -564,8 +572,8 @@ export const useOrderEntryStore = defineStore('orderentry', {
const itemCode = String(ln?.ItemCode || '').trim().toUpperCase()
const colorCode = String(ln?.ColorCode || '').trim().toUpperCase()
const itemDim2Code = String(ln?.ItemDim2Code || '').trim().toUpperCase()
const oldLnDate = formatDate(orig?.DueDate)
const newLnDate = formatDate(ln?.DueDate)
const oldLnDate = getLineDueDate(orig)
const newLnDate = getLineDueDate(ln)
if (!itemCode || !newLnDate || oldLnDate === newLnDate) return
const key = [itemCode, colorCode, itemDim2Code, oldLnDate, newLnDate].join('||')
@@ -613,8 +621,8 @@ export const useOrderEntryStore = defineStore('orderentry', {
let label = mapLabel(ln)
const orig = origMap.get(String(ln.OrderLineID))
if (orig) {
const oldLnDate = formatDate(orig.DueDate)
const newLnDate = formatDate(ln.DueDate)
const oldLnDate = getLineDueDate(orig)
const newLnDate = getLineDueDate(ln)
if (newLnDate && oldLnDate !== newLnDate) {
label += ` (Termin: ${oldLnDate} -> ${newLnDate})`
}
@@ -721,16 +729,8 @@ export const useOrderEntryStore = defineStore('orderentry', {
AverageDueDate: dateText
}
if (this.originalHeader && typeof this.originalHeader === 'object') {
this.originalHeader = {
...this.originalHeader,
AverageDueDate: dateText
}
}
if (Array.isArray(this.originalLines)) {
this.originalLines = this.originalLines.map(patchRow)
}
// Keep originalHeader/originalLines untouched for submit-mail diff.
// Otherwise due-date change table becomes empty.
this.persistLocalStorage?.()
if (!hadUnsavedChanges) {
@@ -3288,6 +3288,13 @@ export const useOrderEntryStore = defineStore('orderentry', {
throw new Error('OrderHeaderID backendden dönmedi')
}
const mailPayload = this.buildOrderMailPayload(lines, isNew)
console.info('[orderentryStore] mail payload prepared', {
operation: mailPayload?.operation,
deletedCount: Array.isArray(mailPayload?.deletedItems) ? mailPayload.deletedItems.length : 0,
updatedCount: Array.isArray(mailPayload?.updatedItems) ? mailPayload.updatedItems.length : 0,
addedCount: Array.isArray(mailPayload?.addedItems) ? mailPayload.addedItems.length : 0,
dueDateChangeCount: Array.isArray(mailPayload?.dueDateChanges) ? mailPayload.dueDateChanges.length : 0
})
purgeNewDraftOnExit = isNew
/* =======================================================

View File

@@ -0,0 +1,143 @@
import { defineStore } from 'pinia'
import api from 'src/services/api'
export const useTranslationStore = defineStore('translation', {
state: () => ({
loading: false,
saving: false,
rows: [],
count: 0
}),
actions: {
async fetchRows (filters = {}, options = {}) {
this.loading = true
const append = Boolean(options?.append)
try {
const res = await api.get('/language/translations', { params: filters })
const payload = res?.data || {}
const incoming = Array.isArray(payload.rows) ? payload.rows : []
if (append) {
const merged = [...this.rows]
const seen = new Set(this.rows.map((x) => x?.id))
for (const row of incoming) {
const id = row?.id
if (!seen.has(id)) {
merged.push(row)
seen.add(id)
}
}
this.rows = merged
} else {
this.rows = incoming
}
this.count = Number(payload.count) || this.rows.length
} finally {
this.loading = false
}
},
async updateRow (id, payload) {
this.saving = true
try {
const res = await api.put(`/language/translations/${id}`, payload)
return res?.data || null
} finally {
this.saving = false
}
},
async upsertMissing (items, languages = ['en', 'de', 'it', 'es', 'ru', 'ar']) {
this.saving = true
try {
const res = await api.post('/language/translations/upsert-missing', {
items: Array.isArray(items) ? items : [],
languages: Array.isArray(languages) ? languages : []
})
return res?.data || null
} finally {
this.saving = false
}
},
async syncSources (payload = {}) {
this.saving = true
const startedAt = Date.now()
console.info('[translation-sync][frontend] request:start', {
at: new Date(startedAt).toISOString(),
payload
})
try {
const res = await api.post('/language/translations/sync-sources', payload, { timeout: 0 })
const data = res?.data || null
const traceId = data?.trace_id || data?.result?.trace_id || res?.headers?.['x-trace-id'] || null
console.info('[translation-sync][frontend] request:success', {
trace_id: traceId,
duration_ms: Date.now() - startedAt,
result: data?.result || null
})
return data
} catch (err) {
console.error('[translation-sync][frontend] request:error', {
duration_ms: Date.now() - startedAt,
message: err?.message || 'sync-sources failed'
})
throw err
} finally {
this.saving = false
}
},
async translateSelected (payload = {}) {
this.saving = true
const startedAt = Date.now()
console.info('[translation-selected][frontend] request:start', {
at: new Date(startedAt).toISOString(),
payload
})
try {
const res = await api.post('/language/translations/translate-selected', payload, { timeout: 0 })
const data = res?.data || null
const traceId = data?.trace_id || res?.headers?.['x-trace-id'] || null
console.info('[translation-selected][frontend] request:success', {
trace_id: traceId,
duration_ms: Date.now() - startedAt,
translated_count: data?.translated_count || 0
})
return data
} catch (err) {
console.error('[translation-selected][frontend] request:error', {
duration_ms: Date.now() - startedAt,
message: err?.message || 'translate-selected failed'
})
throw err
} finally {
this.saving = false
}
},
async bulkApprove (ids = []) {
this.saving = true
try {
const res = await api.post('/language/translations/bulk-approve', {
ids: Array.isArray(ids) ? ids : []
})
return res?.data || null
} finally {
this.saving = false
}
},
async bulkUpdate (items = []) {
this.saving = true
try {
const res = await api.post('/language/translations/bulk-update', {
items: Array.isArray(items) ? items : []
})
return res?.data || null
} finally {
this.saving = false
}
}
}
})