Merge remote-tracking branch 'origin/master'
This commit is contained in:
52
docs/i18n-dynamic-translation-standard.md
Normal file
52
docs/i18n-dynamic-translation-standard.md
Normal file
@@ -0,0 +1,52 @@
|
||||
# i18n + Dinamik Çeviri Standardı
|
||||
|
||||
Bu projede çok dilli yapı iki katmanlıdır:
|
||||
|
||||
1. Statik UI metinleri `i18n` ile yönetilir.
|
||||
2. Dinamik içerikler `mk_translator` + otomatik çeviri servisi (OpenAI) ile yönetilir.
|
||||
|
||||
## 1) Statik UI (Deterministik)
|
||||
|
||||
Kullanım alanı:
|
||||
- buton metinleri
|
||||
- menüler
|
||||
- form label'ları
|
||||
- validasyon mesajları
|
||||
- sabit ekran başlıkları
|
||||
- route/meta/title
|
||||
|
||||
Kural:
|
||||
- her metin key bazlı tutulur (`$t('common.save')`)
|
||||
- locale dosyaları: `tr`, `en`, `de`, `it`, `es`, `ru`, `ar`
|
||||
- fallback sırası: hedef dil -> `en` -> `tr`
|
||||
|
||||
## 2) Dinamik İçerik (DB/CMS/Serbest metin)
|
||||
|
||||
Akış:
|
||||
1. Kaynak metin için `mk_translator` kontrol edilir.
|
||||
2. Hedef dil karşılığı yoksa OpenAI ile çeviri üretilir.
|
||||
3. Sonuç `mk_translator` tablosuna yazılır.
|
||||
4. Sonraki isteklerde DB sonucu kullanılır (cache etkisi).
|
||||
|
||||
Kullanım alanı:
|
||||
- ürün/kategori açıklamaları
|
||||
- CMS içerikleri
|
||||
- admin panelden girilen serbest metinler
|
||||
- şablon bazlı metin içerikleri
|
||||
|
||||
## Kalite ve Güvenlik Kuralları
|
||||
|
||||
- Prompt net olmalı: sadece çeviri dönsün, açıklama eklemesin.
|
||||
- Placeholder/format korunsun: `{name}`, `{{count}}`, `%s` gibi yapılar bozulmasın.
|
||||
- HTML tag'leri ve kod/SKU değerleri çevrilmesin.
|
||||
- API key sadece backend'de tutulur (`OPENAI_API_KEY` client'a verilmez).
|
||||
- 429/5xx için retry + exponential backoff uygulanır.
|
||||
- Hassas veri içeriği olan metinlerde veri politikası kontrolü yapılır.
|
||||
|
||||
## Özet
|
||||
|
||||
Bu servis, `i18n`'in alternatifi değildir; `i18n`'i tamamlayan dinamik çeviri katmanıdır.
|
||||
|
||||
- Statik UI: `i18n`
|
||||
- Dinamik içerik: `mk_translator` + OpenAI + cache
|
||||
|
||||
@@ -16,4 +16,9 @@
|
||||
| Cloudflare | bt@baggi.com.tr | Baggi2025!.? |
|
||||
| 172.16.0.3 | ct | pasauras |
|
||||
|
||||
## Dil ve Çeviri Standardı
|
||||
|
||||
Detaylı mimari dokümanı:
|
||||
- [docs/i18n-dynamic-translation-standard.md](docs/i18n-dynamic-translation-standard.md)
|
||||
|
||||
|
||||
|
||||
48
scripts/sql/language_module_seed.sql
Normal file
48
scripts/sql/language_module_seed.sql
Normal file
@@ -0,0 +1,48 @@
|
||||
-- language_module_seed.sql
|
||||
-- 1) Register language module routes if missing
|
||||
INSERT INTO mk_sys_routes (path, method, module_code, action)
|
||||
VALUES
|
||||
('/api/language/translations', 'GET', 'language', 'update'),
|
||||
('/api/language/translations/{id}', 'PUT', 'language', 'update'),
|
||||
('/api/language/translations/upsert-missing', 'POST', 'language', 'update'),
|
||||
('/api/language/translations/sync-sources', 'POST', 'language', 'update'),
|
||||
('/api/language/translations/translate-selected', 'POST', 'language', 'update'),
|
||||
('/api/language/translations/bulk-approve', 'POST', 'language', 'update'),
|
||||
('/api/language/translations/bulk-update', 'POST', 'language', 'update')
|
||||
ON CONFLICT (path, method) DO UPDATE
|
||||
SET
|
||||
module_code = EXCLUDED.module_code,
|
||||
action = EXCLUDED.action;
|
||||
|
||||
-- 2) Remove legacy system translation routes (optional cleanup)
|
||||
DELETE FROM mk_sys_routes
|
||||
WHERE path LIKE '/api/system/translations%';
|
||||
|
||||
-- 3) Seed role permissions for language module by cloning system perms
|
||||
INSERT INTO mk_sys_role_permissions (role_id, module_code, action, allowed)
|
||||
SELECT rp.role_id, 'language', rp.action, rp.allowed
|
||||
FROM mk_sys_role_permissions rp
|
||||
WHERE rp.module_code = 'system'
|
||||
AND rp.action IN ('view', 'read', 'insert', 'update', 'delete', 'export')
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- 4) Ensure admin update access
|
||||
INSERT INTO mk_sys_role_permissions (role_id, module_code, action, allowed)
|
||||
SELECT r.id, 'language', 'update', true
|
||||
FROM dfrole r
|
||||
WHERE r.id = 3
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- 5) Seed role+department permissions for language module by cloning system perms
|
||||
INSERT INTO mk_sys_role_department_permissions
|
||||
(role_id, department_code, module_code, action, allowed)
|
||||
SELECT DISTINCT
|
||||
rdp.role_id,
|
||||
rdp.department_code,
|
||||
'language',
|
||||
rdp.action,
|
||||
rdp.allowed
|
||||
FROM mk_sys_role_department_permissions rdp
|
||||
WHERE rdp.module_code = 'system'
|
||||
AND rdp.action IN ('view', 'read', 'insert', 'update', 'delete', 'export')
|
||||
ON CONFLICT DO NOTHING;
|
||||
@@ -32,3 +32,6 @@ API_HOST=0.0.0.0
|
||||
API_PORT=8080
|
||||
|
||||
|
||||
AZURE_TRANSLATOR_KEY=d055c693-a84e-4594-8aef-a6c05c42623a
|
||||
AZURE_TRANSLATOR_ENDPOINT=https://api.cognitive.microsofttranslator.com
|
||||
AZURE_TRANSLATOR_REGION=westeurope
|
||||
|
||||
72
svc/cmd/translation_sync_once/main.go
Normal file
72
svc/cmd/translation_sync_once/main.go
Normal file
@@ -0,0 +1,72 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bssapp-backend/db"
|
||||
"bssapp-backend/routes"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/joho/godotenv"
|
||||
)
|
||||
|
||||
func main() {
|
||||
_ = godotenv.Load(".env", "mail.env", ".env.local")
|
||||
|
||||
if err := db.ConnectMSSQL(); err != nil {
|
||||
log.Fatalf("mssql connect failed: %v", err)
|
||||
}
|
||||
pgDB, err := db.ConnectPostgres()
|
||||
if err != nil {
|
||||
log.Fatalf("postgres connect failed: %v", err)
|
||||
}
|
||||
defer pgDB.Close()
|
||||
|
||||
limit := 30000
|
||||
if raw := os.Getenv("TRANSLATION_SYNC_LIMIT"); raw != "" {
|
||||
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 {
|
||||
limit = parsed
|
||||
}
|
||||
}
|
||||
|
||||
langs := []string{"en", "de", "it", "es", "ru", "ar"}
|
||||
if raw := strings.TrimSpace(os.Getenv("TRANSLATION_SYNC_LANGS")); raw != "" {
|
||||
parts := strings.Split(raw, ",")
|
||||
custom := make([]string, 0, len(parts))
|
||||
for _, p := range parts {
|
||||
v := strings.TrimSpace(strings.ToLower(p))
|
||||
if v != "" {
|
||||
custom = append(custom, v)
|
||||
}
|
||||
}
|
||||
if len(custom) > 0 {
|
||||
langs = custom
|
||||
}
|
||||
}
|
||||
|
||||
autoTranslate := true
|
||||
if raw := strings.TrimSpace(strings.ToLower(os.Getenv("TRANSLATION_SYNC_AUTO_TRANSLATE"))); raw != "" {
|
||||
if raw == "0" || raw == "false" || raw == "off" {
|
||||
autoTranslate = false
|
||||
}
|
||||
}
|
||||
|
||||
result, err := routes.PerformTranslationSync(pgDB, db.MssqlDB, routes.TranslationSyncOptions{
|
||||
AutoTranslate: autoTranslate,
|
||||
Languages: langs,
|
||||
Limit: limit,
|
||||
OnlyNew: true,
|
||||
})
|
||||
if err != nil {
|
||||
log.Fatalf("manual sync failed: %v", err)
|
||||
}
|
||||
|
||||
fmt.Printf("translation sync done: seeds=%d affected=%d auto_translated=%d langs=%v\n",
|
||||
result.SeedCount,
|
||||
result.AffectedCount,
|
||||
result.AutoTranslated,
|
||||
result.TargetLangs,
|
||||
)
|
||||
}
|
||||
122
svc/internal/i18n/lang.go
Normal file
122
svc/internal/i18n/lang.go
Normal file
@@ -0,0 +1,122 @@
|
||||
package i18n
|
||||
|
||||
import "strings"
|
||||
|
||||
const DefaultLang = "TR"
|
||||
|
||||
var supported = map[string]struct{}{
|
||||
"TR": {},
|
||||
"EN": {},
|
||||
"DE": {},
|
||||
"IT": {},
|
||||
"ES": {},
|
||||
"RU": {},
|
||||
"AR": {},
|
||||
}
|
||||
|
||||
func NormalizeLangCode(raw string) string {
|
||||
lang := strings.ToUpper(strings.TrimSpace(raw))
|
||||
if _, ok := supported[lang]; ok {
|
||||
return lang
|
||||
}
|
||||
return DefaultLang
|
||||
}
|
||||
|
||||
func ResolveLangCode(queryLangCode, acceptLanguage string) string {
|
||||
if lang := NormalizeLangCode(queryLangCode); lang != DefaultLang || strings.EqualFold(strings.TrimSpace(queryLangCode), DefaultLang) {
|
||||
return lang
|
||||
}
|
||||
|
||||
header := strings.TrimSpace(acceptLanguage)
|
||||
if header == "" {
|
||||
return DefaultLang
|
||||
}
|
||||
first := strings.Split(header, ",")[0]
|
||||
first = strings.TrimSpace(strings.Split(first, ";")[0])
|
||||
if len(first) < 2 {
|
||||
return DefaultLang
|
||||
}
|
||||
return NormalizeLangCode(first[:2])
|
||||
}
|
||||
|
||||
func T(langCode, key string) string {
|
||||
for _, lang := range fallbackLangs(langCode) {
|
||||
if val, ok := dict[lang][key]; ok {
|
||||
return val
|
||||
}
|
||||
}
|
||||
return key
|
||||
}
|
||||
|
||||
func fallbackLangs(langCode string) []string {
|
||||
lang := NormalizeLangCode(langCode)
|
||||
switch lang {
|
||||
case "TR":
|
||||
return []string{"TR"}
|
||||
case "EN":
|
||||
return []string{"EN", "TR"}
|
||||
default:
|
||||
return []string{lang, "EN", "TR"}
|
||||
}
|
||||
}
|
||||
|
||||
var dict = map[string]map[string]string{
|
||||
"TR": {
|
||||
"pdf.report_title": "Cari Hesap Raporu",
|
||||
"pdf.date": "Tarih",
|
||||
"pdf.customer": "Cari",
|
||||
"pdf.date_range": "Tarih Aralığı",
|
||||
"pdf.page": "Sayfa",
|
||||
"pdf.ending_balance": "Son Bakiye",
|
||||
"pdf.currency_prefix": "Para Birimi",
|
||||
"pdf.balance_prefix": "Bakiye",
|
||||
"pdf.main.doc_no": "Belge No",
|
||||
"pdf.main.date": "Tarih",
|
||||
"pdf.main.due_date": "Vade",
|
||||
"pdf.main.operation": "İşlem",
|
||||
"pdf.main.description": "Açıklama",
|
||||
"pdf.main.currency": "Para",
|
||||
"pdf.main.debit": "Borç",
|
||||
"pdf.main.credit": "Alacak",
|
||||
"pdf.main.balance": "Bakiye",
|
||||
"pdf.detail.main_group": "Ana Grup",
|
||||
"pdf.detail.sub_group": "Alt Grup",
|
||||
"pdf.detail.waiter": "Garson",
|
||||
"pdf.detail.fit": "Fit",
|
||||
"pdf.detail.content": "İçerik",
|
||||
"pdf.detail.product": "Ürün",
|
||||
"pdf.detail.color": "Renk",
|
||||
"pdf.detail.qty": "Adet",
|
||||
"pdf.detail.price": "Fiyat",
|
||||
"pdf.detail.total": "Tutar",
|
||||
},
|
||||
"EN": {
|
||||
"pdf.report_title": "Customer Account Report",
|
||||
"pdf.date": "Date",
|
||||
"pdf.customer": "Customer",
|
||||
"pdf.date_range": "Date Range",
|
||||
"pdf.page": "Page",
|
||||
"pdf.ending_balance": "Ending Balance",
|
||||
"pdf.currency_prefix": "Currency",
|
||||
"pdf.balance_prefix": "Balance",
|
||||
"pdf.main.doc_no": "Document No",
|
||||
"pdf.main.date": "Date",
|
||||
"pdf.main.due_date": "Due Date",
|
||||
"pdf.main.operation": "Operation",
|
||||
"pdf.main.description": "Description",
|
||||
"pdf.main.currency": "Curr.",
|
||||
"pdf.main.debit": "Debit",
|
||||
"pdf.main.credit": "Credit",
|
||||
"pdf.main.balance": "Balance",
|
||||
"pdf.detail.main_group": "Main Group",
|
||||
"pdf.detail.sub_group": "Sub Group",
|
||||
"pdf.detail.waiter": "Waiter",
|
||||
"pdf.detail.fit": "Fit",
|
||||
"pdf.detail.content": "Content",
|
||||
"pdf.detail.product": "Product",
|
||||
"pdf.detail.color": "Color",
|
||||
"pdf.detail.qty": "Qty",
|
||||
"pdf.detail.price": "Price",
|
||||
"pdf.detail.total": "Total",
|
||||
},
|
||||
}
|
||||
97
svc/main.go
97
svc/main.go
@@ -104,7 +104,26 @@ func autoRegisterRouteV3(
|
||||
return
|
||||
}
|
||||
|
||||
// 2) ADMIN AUTO PERMISSION (module+action bazlı)
|
||||
// 2) MODULE LOOKUP AUTO SEED (permission ekranları için)
|
||||
moduleLabel := strings.TrimSpace(strings.ReplaceAll(module, "_", " "))
|
||||
if moduleLabel == "" {
|
||||
moduleLabel = module
|
||||
}
|
||||
_, err = tx.Exec(`
|
||||
INSERT INTO mk_sys_modules (code, name)
|
||||
VALUES ($1::text, $2::text)
|
||||
ON CONFLICT (code) DO UPDATE
|
||||
SET name = COALESCE(NULLIF(EXCLUDED.name, ''), mk_sys_modules.name)
|
||||
`,
|
||||
module,
|
||||
moduleLabel,
|
||||
)
|
||||
if err != nil {
|
||||
log.Printf("❌ Module seed error (%s %s): %v", method, path, err)
|
||||
return
|
||||
}
|
||||
|
||||
// 3) ROLE PERMISSION AUTO SEED (admin=true, diğer roller=false)
|
||||
_, err = tx.Exec(`
|
||||
INSERT INTO mk_sys_role_permissions
|
||||
(role_id, module_code, action, allowed)
|
||||
@@ -112,16 +131,50 @@ func autoRegisterRouteV3(
|
||||
id,
|
||||
$1,
|
||||
$2,
|
||||
true
|
||||
CASE
|
||||
WHEN id = 3 OR LOWER(code) = 'admin' THEN true
|
||||
ELSE false
|
||||
END
|
||||
FROM dfrole
|
||||
WHERE id = 3 -- ADMIN
|
||||
ON CONFLICT DO NOTHING
|
||||
`,
|
||||
module,
|
||||
action,
|
||||
)
|
||||
if err != nil {
|
||||
log.Printf("❌ Admin perm seed error (%s %s): %v", method, path, err)
|
||||
log.Printf("❌ Role perm seed error (%s %s): %v", method, path, err)
|
||||
return
|
||||
}
|
||||
|
||||
// 4) ROLE+DEPARTMENT PERMISSION AUTO SEED
|
||||
// Existing role+department kombinasyonlarına yeni module+action satırı açılır.
|
||||
_, err = tx.Exec(`
|
||||
WITH role_dept_scope AS (
|
||||
SELECT DISTINCT role_id, department_code
|
||||
FROM mk_sys_role_department_permissions
|
||||
UNION
|
||||
SELECT 3 AS role_id, d.code AS department_code
|
||||
FROM mk_dprt d
|
||||
)
|
||||
INSERT INTO mk_sys_role_department_permissions
|
||||
(role_id, department_code, module_code, action, allowed)
|
||||
SELECT
|
||||
rds.role_id,
|
||||
rds.department_code,
|
||||
$1,
|
||||
$2,
|
||||
CASE
|
||||
WHEN rds.role_id = 3 THEN true
|
||||
ELSE false
|
||||
END
|
||||
FROM role_dept_scope rds
|
||||
ON CONFLICT DO NOTHING
|
||||
`,
|
||||
module,
|
||||
action,
|
||||
)
|
||||
if err != nil {
|
||||
log.Printf("❌ Role+Dept perm seed error (%s %s): %v", method, path, err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -265,6 +318,41 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
|
||||
"system", "update",
|
||||
wrapV3(routes.SaveMarketMailMappingHandler(pgDB)),
|
||||
)
|
||||
bindV3(r, pgDB,
|
||||
"/api/language/translations", "GET",
|
||||
"language", "update",
|
||||
wrapV3(routes.GetTranslationRowsHandler(pgDB)),
|
||||
)
|
||||
bindV3(r, pgDB,
|
||||
"/api/language/translations/{id}", "PUT",
|
||||
"language", "update",
|
||||
wrapV3(routes.UpdateTranslationRowHandler(pgDB)),
|
||||
)
|
||||
bindV3(r, pgDB,
|
||||
"/api/language/translations/upsert-missing", "POST",
|
||||
"language", "update",
|
||||
wrapV3(routes.UpsertMissingTranslationsHandler(pgDB)),
|
||||
)
|
||||
bindV3(r, pgDB,
|
||||
"/api/language/translations/sync-sources", "POST",
|
||||
"language", "update",
|
||||
wrapV3(routes.SyncTranslationSourcesHandler(pgDB, mssql)),
|
||||
)
|
||||
bindV3(r, pgDB,
|
||||
"/api/language/translations/translate-selected", "POST",
|
||||
"language", "update",
|
||||
wrapV3(routes.TranslateSelectedTranslationsHandler(pgDB)),
|
||||
)
|
||||
bindV3(r, pgDB,
|
||||
"/api/language/translations/bulk-approve", "POST",
|
||||
"language", "update",
|
||||
wrapV3(routes.BulkApproveTranslationsHandler(pgDB)),
|
||||
)
|
||||
bindV3(r, pgDB,
|
||||
"/api/language/translations/bulk-update", "POST",
|
||||
"language", "update",
|
||||
wrapV3(routes.BulkUpdateTranslationsHandler(pgDB)),
|
||||
)
|
||||
|
||||
// ============================================================
|
||||
// PERMISSIONS
|
||||
@@ -777,6 +865,7 @@ func main() {
|
||||
// 🌍 SERVER
|
||||
// -------------------------------------------------------
|
||||
router := InitRoutes(pgDB, db.MssqlDB, graphMailer)
|
||||
startTranslationSyncScheduler(pgDB, db.MssqlDB)
|
||||
|
||||
handler := enableCORS(
|
||||
middlewares.GlobalAuthMiddleware(
|
||||
|
||||
16
svc/models/translator.go
Normal file
16
svc/models/translator.go
Normal file
@@ -0,0 +1,16 @@
|
||||
package models
|
||||
|
||||
import "time"
|
||||
|
||||
type TranslatorRow struct {
|
||||
ID int64 `json:"id"`
|
||||
TKey string `json:"t_key"`
|
||||
LangCode string `json:"lang_code"`
|
||||
SourceType string `json:"source_type"`
|
||||
SourceTextTR string `json:"source_text_tr"`
|
||||
TranslatedText string `json:"translated_text"`
|
||||
IsManual bool `json:"is_manual"`
|
||||
Status string `json:"status"`
|
||||
Provider string `json:"provider"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
@@ -3,10 +3,14 @@ package queries
|
||||
import (
|
||||
"bssapp-backend/db"
|
||||
"bssapp-backend/models"
|
||||
"context"
|
||||
"database/sql"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
func GetProductPricingList() ([]models.ProductPricing, error) {
|
||||
rows, err := db.MssqlDB.Query(`
|
||||
func GetProductPricingList(ctx context.Context) ([]models.ProductPricing, error) {
|
||||
const query = `
|
||||
WITH base_products AS (
|
||||
SELECT
|
||||
LTRIM(RTRIM(ProductCode)) AS ProductCode,
|
||||
@@ -160,9 +164,32 @@ func GetProductPricingList() ([]models.ProductPricing, error) {
|
||||
LEFT JOIN stock_totals st
|
||||
ON st.ItemCode = bp.ProductCode
|
||||
ORDER BY bp.ProductCode;
|
||||
`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
`
|
||||
|
||||
var (
|
||||
rows *sql.Rows
|
||||
rowsErr error
|
||||
)
|
||||
for attempt := 1; attempt <= 3; attempt++ {
|
||||
var err error
|
||||
rows, err = db.MssqlDB.QueryContext(ctx, query)
|
||||
if err == nil {
|
||||
rowsErr = nil
|
||||
break
|
||||
}
|
||||
rowsErr = err
|
||||
if ctx.Err() != nil || !isTransientMSSQLNetworkError(err) || attempt == 3 {
|
||||
break
|
||||
}
|
||||
wait := time.Duration(attempt*300) * time.Millisecond
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
break
|
||||
case <-time.After(wait):
|
||||
}
|
||||
}
|
||||
if rowsErr != nil {
|
||||
return nil, rowsErr
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
@@ -191,3 +218,17 @@ func GetProductPricingList() ([]models.ProductPricing, error) {
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func isTransientMSSQLNetworkError(err error) bool {
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
e := strings.ToLower(err.Error())
|
||||
return strings.Contains(e, "i/o timeout") ||
|
||||
strings.Contains(e, "timeout") ||
|
||||
strings.Contains(e, "wsarecv") ||
|
||||
strings.Contains(e, "connection attempt failed") ||
|
||||
strings.Contains(e, "no connection could be made") ||
|
||||
strings.Contains(e, "broken pipe") ||
|
||||
strings.Contains(e, "connection reset")
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package queries
|
||||
|
||||
import (
|
||||
"bssapp-backend/db"
|
||||
"bssapp-backend/internal/i18n"
|
||||
"bssapp-backend/models"
|
||||
"context"
|
||||
"database/sql"
|
||||
@@ -14,9 +15,7 @@ func GetStatements(ctx context.Context, params models.StatementParams) ([]models
|
||||
|
||||
// AccountCode normalize: "ZLA0127" → "ZLA 0127"
|
||||
params.AccountCode = normalizeMasterAccountCode(params.AccountCode)
|
||||
if strings.TrimSpace(params.LangCode) == "" {
|
||||
params.LangCode = "TR"
|
||||
}
|
||||
params.LangCode = i18n.NormalizeLangCode(params.LangCode)
|
||||
|
||||
// Parislemler []string → '1','2','3'
|
||||
parislemFilter := "''"
|
||||
@@ -221,8 +220,8 @@ SELECT
|
||||
CONVERT(varchar(10), @startdate, 23) AS Vade_Tarihi,
|
||||
|
||||
'Baslangic_devir' AS Belge_No,
|
||||
'Devir' AS Islem_Tipi,
|
||||
'Devir Bakiyesi' AS Aciklama,
|
||||
CASE WHEN @LangCode = 'EN' THEN 'Opening' ELSE 'Devir' END AS Islem_Tipi,
|
||||
CASE WHEN @LangCode = 'EN' THEN 'Opening Balance' ELSE 'Devir Bakiyesi' END AS Aciklama,
|
||||
|
||||
o.Para_Birimi,
|
||||
|
||||
|
||||
@@ -6,8 +6,8 @@ import (
|
||||
"log"
|
||||
)
|
||||
|
||||
func GetStatementsHPDF(ctx context.Context, accountCode, startDate, endDate string, parislemler []string) ([]models.StatementHeader, []string, error) {
|
||||
headers, err := getStatementsForPDF(ctx, accountCode, startDate, endDate, parislemler)
|
||||
func GetStatementsHPDF(ctx context.Context, accountCode, startDate, endDate, langCode string, parislemler []string) ([]models.StatementHeader, []string, error) {
|
||||
headers, err := getStatementsForPDF(ctx, accountCode, startDate, endDate, langCode, parislemler)
|
||||
if err != nil {
|
||||
log.Printf("Header query error: %v", err)
|
||||
return nil, nil, err
|
||||
|
||||
@@ -10,13 +10,14 @@ func getStatementsForPDF(
|
||||
accountCode string,
|
||||
startDate string,
|
||||
endDate string,
|
||||
langCode string,
|
||||
parislemler []string,
|
||||
) ([]models.StatementHeader, error) {
|
||||
return GetStatements(ctx, models.StatementParams{
|
||||
AccountCode: accountCode,
|
||||
StartDate: startDate,
|
||||
EndDate: endDate,
|
||||
LangCode: "TR",
|
||||
LangCode: langCode,
|
||||
Parislemler: parislemler,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -11,8 +11,8 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
func GetStatementsPDF(ctx context.Context, accountCode, startDate, endDate string, parislemler []string) ([]models.StatementHeader, []string, error) {
|
||||
headers, err := getStatementsForPDF(ctx, accountCode, startDate, endDate, parislemler)
|
||||
func GetStatementsPDF(ctx context.Context, accountCode, startDate, endDate, langCode string, parislemler []string) ([]models.StatementHeader, []string, error) {
|
||||
headers, err := getStatementsForPDF(ctx, accountCode, startDate, endDate, langCode, parislemler)
|
||||
if err != nil {
|
||||
log.Printf("Header query error: %v", err)
|
||||
return nil, nil, err
|
||||
|
||||
@@ -3,24 +3,94 @@ package routes
|
||||
import (
|
||||
"bssapp-backend/auth"
|
||||
"bssapp-backend/queries"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"log"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// GET /api/pricing/products
|
||||
func GetProductPricingListHandler(w http.ResponseWriter, r *http.Request) {
|
||||
started := time.Now()
|
||||
traceID := buildPricingTraceID(r)
|
||||
w.Header().Set("X-Trace-ID", traceID)
|
||||
claims, ok := auth.GetClaimsFromContext(r.Context())
|
||||
if !ok || claims == nil {
|
||||
log.Printf("[ProductPricing] trace=%s unauthorized method=%s path=%s", traceID, r.Method, r.URL.Path)
|
||||
http.Error(w, "unauthorized", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
log.Printf("[ProductPricing] trace=%s start user=%s id=%d", traceID, claims.Username, claims.ID)
|
||||
|
||||
rows, err := queries.GetProductPricingList()
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 180*time.Second)
|
||||
defer cancel()
|
||||
|
||||
rows, err := queries.GetProductPricingList(ctx)
|
||||
if err != nil {
|
||||
if isPricingTimeoutLike(err, ctx.Err()) {
|
||||
log.Printf(
|
||||
"[ProductPricing] trace=%s timeout user=%s id=%d duration_ms=%d err=%v",
|
||||
traceID,
|
||||
claims.Username,
|
||||
claims.ID,
|
||||
time.Since(started).Milliseconds(),
|
||||
err,
|
||||
)
|
||||
http.Error(w, "Urun fiyatlandirma listesi zaman asimina ugradi", http.StatusGatewayTimeout)
|
||||
return
|
||||
}
|
||||
log.Printf(
|
||||
"[ProductPricing] trace=%s query_error user=%s id=%d duration_ms=%d err=%v",
|
||||
traceID,
|
||||
claims.Username,
|
||||
claims.ID,
|
||||
time.Since(started).Milliseconds(),
|
||||
err,
|
||||
)
|
||||
http.Error(w, "Urun fiyatlandirma listesi alinamadi: "+err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
log.Printf(
|
||||
"[ProductPricing] trace=%s success user=%s id=%d count=%d duration_ms=%d",
|
||||
traceID,
|
||||
claims.Username,
|
||||
claims.ID,
|
||||
len(rows),
|
||||
time.Since(started).Milliseconds(),
|
||||
)
|
||||
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
_ = json.NewEncoder(w).Encode(rows)
|
||||
}
|
||||
|
||||
func buildPricingTraceID(r *http.Request) string {
|
||||
if r != nil {
|
||||
if id := strings.TrimSpace(r.Header.Get("X-Request-ID")); id != "" {
|
||||
return id
|
||||
}
|
||||
if id := strings.TrimSpace(r.Header.Get("X-Correlation-ID")); id != "" {
|
||||
return id
|
||||
}
|
||||
}
|
||||
return "pricing-" + strconv.FormatInt(time.Now().UnixNano(), 36)
|
||||
}
|
||||
|
||||
func isPricingTimeoutLike(err error, ctxErr error) bool {
|
||||
if errors.Is(err, context.DeadlineExceeded) || errors.Is(ctxErr, context.DeadlineExceeded) {
|
||||
return true
|
||||
}
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
e := strings.ToLower(err.Error())
|
||||
return strings.Contains(e, "timeout") ||
|
||||
strings.Contains(e, "i/o timeout") ||
|
||||
strings.Contains(e, "wsarecv") ||
|
||||
strings.Contains(e, "connection attempt failed") ||
|
||||
strings.Contains(e, "no connection could be made") ||
|
||||
strings.Contains(e, "failed to respond")
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package routes
|
||||
|
||||
import (
|
||||
"bssapp-backend/auth"
|
||||
"bssapp-backend/internal/i18n"
|
||||
"bssapp-backend/models"
|
||||
"bssapp-backend/queries"
|
||||
"encoding/json"
|
||||
@@ -22,7 +23,7 @@ func GetStatementHeadersHandler(w http.ResponseWriter, r *http.Request) {
|
||||
StartDate: r.URL.Query().Get("startdate"),
|
||||
EndDate: r.URL.Query().Get("enddate"),
|
||||
AccountCode: r.URL.Query().Get("accountcode"),
|
||||
LangCode: r.URL.Query().Get("langcode"),
|
||||
LangCode: i18n.ResolveLangCode(r.URL.Query().Get("langcode"), r.Header.Get("Accept-Language")),
|
||||
Parislemler: r.URL.Query()["parislemler"],
|
||||
ExcludeOpening: false,
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package routes
|
||||
|
||||
import (
|
||||
"bssapp-backend/auth"
|
||||
"bssapp-backend/internal/i18n"
|
||||
"bssapp-backend/models"
|
||||
"bssapp-backend/queries"
|
||||
"bytes"
|
||||
@@ -40,9 +41,18 @@ const (
|
||||
)
|
||||
|
||||
// Kolonlar
|
||||
var hMainCols = []string{
|
||||
"Belge No", "Tarih", "Vade", "İşlem",
|
||||
"Açıklama", "Para", "Borç", "Alacak", "Bakiye",
|
||||
func hMainCols(lang string) []string {
|
||||
return []string{
|
||||
i18n.T(lang, "pdf.main.doc_no"),
|
||||
i18n.T(lang, "pdf.main.date"),
|
||||
i18n.T(lang, "pdf.main.due_date"),
|
||||
i18n.T(lang, "pdf.main.operation"),
|
||||
i18n.T(lang, "pdf.main.description"),
|
||||
i18n.T(lang, "pdf.main.currency"),
|
||||
i18n.T(lang, "pdf.main.debit"),
|
||||
i18n.T(lang, "pdf.main.credit"),
|
||||
i18n.T(lang, "pdf.main.balance"),
|
||||
}
|
||||
}
|
||||
|
||||
var hMainWbase = []float64{
|
||||
@@ -136,7 +146,7 @@ func hCalcRowHeightForText(pdf *gofpdf.Fpdf, text string, colWidth, lineHeight,
|
||||
|
||||
/* ============================ HEADER ============================ */
|
||||
|
||||
func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) float64 {
|
||||
func hDrawPageHeader(pdf *gofpdf.Fpdf, lang, cariKod, cariIsim, start, end string) float64 {
|
||||
if logoPath, err := resolvePdfImagePath("Baggi-Tekstil-A.s-Logolu.jpeg"); err == nil {
|
||||
pdf.ImageOptions(logoPath, hMarginL, 2, hLogoW, 0, false, gofpdf.ImageOptions{}, 0, "")
|
||||
}
|
||||
@@ -149,13 +159,13 @@ func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) flo
|
||||
|
||||
pdf.SetFont(hFontFamilyBold, "", 12)
|
||||
pdf.SetXY(hMarginL+hLogoW+8, hMarginT+10)
|
||||
pdf.CellFormat(120, 6, "Cari Hesap Raporu", "", 0, "L", false, 0, "")
|
||||
pdf.CellFormat(120, 6, i18n.T(lang, "pdf.report_title"), "", 0, "L", false, 0, "")
|
||||
|
||||
// Bugünün tarihi (sağ üst)
|
||||
today := time.Now().Format("02.01.2006")
|
||||
pdf.SetFont(hFontFamilyReg, "", 9)
|
||||
pdf.SetXY(hPageWidth-hMarginR-40, hMarginT+3)
|
||||
pdf.CellFormat(40, 6, "Tarih: "+today, "", 0, "R", false, 0, "")
|
||||
pdf.CellFormat(40, 6, i18n.T(lang, "pdf.date")+": "+today, "", 0, "R", false, 0, "")
|
||||
|
||||
// Cari & Tarih kutuları (daha yukarı taşındı)
|
||||
boxY := hMarginT + hLogoW - 6
|
||||
@@ -163,11 +173,11 @@ func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) flo
|
||||
|
||||
pdf.Rect(hMarginL, boxY, 140, 11, "")
|
||||
pdf.SetXY(hMarginL+2, boxY+3)
|
||||
pdf.CellFormat(136, 5, fmt.Sprintf("Cari: %s — %s", cariKod, cariIsim), "", 0, "L", false, 0, "")
|
||||
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s — %s", i18n.T(lang, "pdf.customer"), cariKod, cariIsim), "", 0, "L", false, 0, "")
|
||||
|
||||
pdf.Rect(hPageWidth-hMarginR-140, boxY, 140, 11, "")
|
||||
pdf.SetXY(hPageWidth-hMarginR-138, boxY+3)
|
||||
pdf.CellFormat(136, 5, fmt.Sprintf("Tarih Aralığı: %s → %s", start, end), "", 0, "R", false, 0, "")
|
||||
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s → %s", i18n.T(lang, "pdf.date_range"), start, end), "", 0, "R", false, 0, "")
|
||||
|
||||
// Alt çizgi
|
||||
y := boxY + 13
|
||||
@@ -180,7 +190,7 @@ func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) flo
|
||||
|
||||
/* ============================ TABLO ============================ */
|
||||
|
||||
func hDrawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
|
||||
func hDrawGroupBar(pdf *gofpdf.Fpdf, lang, currency string, sonBakiye float64) {
|
||||
x := hMarginL
|
||||
y := pdf.GetY()
|
||||
w := hPageWidth - hMarginL - hMarginR
|
||||
@@ -194,9 +204,9 @@ func hDrawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
|
||||
pdf.SetTextColor(hColorPrimary[0], hColorPrimary[1], hColorPrimary[2])
|
||||
|
||||
pdf.SetXY(x+hCellPadX+1.0, y+(h-5.0)/2)
|
||||
pdf.CellFormat(w*0.6, 5.0, currency, "", 0, "L", false, 0, "")
|
||||
pdf.CellFormat(w*0.6, 5.0, fmt.Sprintf("%s: %s", i18n.T(lang, "pdf.currency_prefix"), currency), "", 0, "L", false, 0, "")
|
||||
|
||||
txt := "Son Bakiye = " + hFormatCurrencyTR(sonBakiye)
|
||||
txt := i18n.T(lang, "pdf.ending_balance") + " = " + hFormatCurrencyTR(sonBakiye)
|
||||
pdf.SetXY(x+w*0.4, y+(h-5.0)/2)
|
||||
pdf.CellFormat(w*0.6-2.0, 5.0, txt, "", 0, "R", false, 0, "")
|
||||
|
||||
@@ -282,6 +292,10 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
accountCode := r.URL.Query().Get("accountcode")
|
||||
startDate := r.URL.Query().Get("startdate")
|
||||
endDate := r.URL.Query().Get("enddate")
|
||||
langCode := i18n.ResolveLangCode(
|
||||
r.URL.Query().Get("langcode"),
|
||||
r.Header.Get("Accept-Language"),
|
||||
)
|
||||
rawParis := r.URL.Query()["parislemler"]
|
||||
|
||||
var parislemler []string
|
||||
@@ -292,7 +306,7 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
}
|
||||
}
|
||||
|
||||
headers, _, err := queries.GetStatementsHPDF(r.Context(), accountCode, startDate, endDate, parislemler)
|
||||
headers, _, err := queries.GetStatementsHPDF(r.Context(), accountCode, startDate, endDate, langCode, parislemler)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
@@ -348,7 +362,7 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
newPage := func() {
|
||||
pageNum++
|
||||
pdf.AddPage()
|
||||
tableTop := hDrawPageHeader(pdf, accountCode, cariIsim, startDate, endDate)
|
||||
tableTop := hDrawPageHeader(pdf, langCode, accountCode, cariIsim, startDate, endDate)
|
||||
pdf.SetY(tableTop)
|
||||
}
|
||||
|
||||
@@ -356,8 +370,8 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
|
||||
for _, cur := range order {
|
||||
g := groups[cur]
|
||||
hDrawGroupBar(pdf, cur, g.sonBakiye)
|
||||
hDrawMainHeaderRow(pdf, hMainCols, mainWn)
|
||||
hDrawGroupBar(pdf, langCode, cur, g.sonBakiye)
|
||||
hDrawMainHeaderRow(pdf, hMainCols(langCode), mainWn)
|
||||
|
||||
rowIndex := 0
|
||||
for _, h := range g.rows {
|
||||
@@ -372,8 +386,8 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
rh := hCalcRowHeightForText(pdf, row[4], mainWn[4], hLineHMain, hCellPadX)
|
||||
if hNeedNewPage(pdf, rh+hHeaderRowH) {
|
||||
newPage()
|
||||
hDrawGroupBar(pdf, cur, g.sonBakiye)
|
||||
hDrawMainHeaderRow(pdf, hMainCols, mainWn)
|
||||
hDrawGroupBar(pdf, langCode, cur, g.sonBakiye)
|
||||
hDrawMainHeaderRow(pdf, hMainCols(langCode), mainWn)
|
||||
}
|
||||
|
||||
hDrawMainDataRow(pdf, row, mainWn, rh, rowIndex)
|
||||
|
||||
@@ -3,6 +3,7 @@ package routes
|
||||
|
||||
import (
|
||||
"bssapp-backend/auth"
|
||||
"bssapp-backend/internal/i18n"
|
||||
"bssapp-backend/models"
|
||||
"bssapp-backend/queries"
|
||||
"bytes"
|
||||
@@ -48,10 +49,18 @@ const (
|
||||
logoW = 42.0
|
||||
)
|
||||
|
||||
// Ana tablo kolonları
|
||||
var mainCols = []string{
|
||||
"Belge No", "Tarih", "Vade", "İşlem",
|
||||
"Açıklama", "Para", "Borç", "Alacak", "Bakiye",
|
||||
func mainCols(lang string) []string {
|
||||
return []string{
|
||||
i18n.T(lang, "pdf.main.doc_no"),
|
||||
i18n.T(lang, "pdf.main.date"),
|
||||
i18n.T(lang, "pdf.main.due_date"),
|
||||
i18n.T(lang, "pdf.main.operation"),
|
||||
i18n.T(lang, "pdf.main.description"),
|
||||
i18n.T(lang, "pdf.main.currency"),
|
||||
i18n.T(lang, "pdf.main.debit"),
|
||||
i18n.T(lang, "pdf.main.credit"),
|
||||
i18n.T(lang, "pdf.main.balance"),
|
||||
}
|
||||
}
|
||||
|
||||
// Ana tablo kolon genişlikleri (ilk 3 geniş)
|
||||
@@ -68,10 +77,21 @@ var mainWbase = []float64{
|
||||
}
|
||||
|
||||
// Detay tablo kolonları ve genişlikleri
|
||||
var dCols = []string{
|
||||
"Ana Grup", "Alt Grup", "Garson", "Fit", "İçerik",
|
||||
"Ürün", "Renk", "Adet", "Fiyat", "Tutar",
|
||||
func detailCols(lang string) []string {
|
||||
return []string{
|
||||
i18n.T(lang, "pdf.detail.main_group"),
|
||||
i18n.T(lang, "pdf.detail.sub_group"),
|
||||
i18n.T(lang, "pdf.detail.waiter"),
|
||||
i18n.T(lang, "pdf.detail.fit"),
|
||||
i18n.T(lang, "pdf.detail.content"),
|
||||
i18n.T(lang, "pdf.detail.product"),
|
||||
i18n.T(lang, "pdf.detail.color"),
|
||||
i18n.T(lang, "pdf.detail.qty"),
|
||||
i18n.T(lang, "pdf.detail.price"),
|
||||
i18n.T(lang, "pdf.detail.total"),
|
||||
}
|
||||
}
|
||||
|
||||
var dWbase = []float64{
|
||||
30, 28, 22, 20, 56, 30, 22, 20, 20, 26}
|
||||
|
||||
@@ -224,7 +244,7 @@ func drawLabeledBox(pdf *gofpdf.Fpdf, x, y, w, h float64, label, value string, a
|
||||
}
|
||||
}
|
||||
|
||||
func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) float64 {
|
||||
func drawPageHeader(pdf *gofpdf.Fpdf, lang, cariKod, cariIsim, start, end string) float64 {
|
||||
if logoPath, err := resolvePdfImagePath("Baggi-Tekstil-A.s-Logolu.jpeg"); err == nil {
|
||||
pdf.ImageOptions(logoPath, hMarginL, 2, hLogoW, 0, false, gofpdf.ImageOptions{}, 0, "")
|
||||
}
|
||||
@@ -237,13 +257,13 @@ func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) floa
|
||||
|
||||
pdf.SetFont(hFontFamilyBold, "", 12)
|
||||
pdf.SetXY(hMarginL+hLogoW+8, hMarginT+10)
|
||||
pdf.CellFormat(120, 6, "Cari Hesap Raporu", "", 0, "L", false, 0, "")
|
||||
pdf.CellFormat(120, 6, i18n.T(lang, "pdf.report_title"), "", 0, "L", false, 0, "")
|
||||
|
||||
// Bugünün tarihi (sağ üst)
|
||||
today := time.Now().Format("02.01.2006")
|
||||
pdf.SetFont(hFontFamilyReg, "", 9)
|
||||
pdf.SetXY(hPageWidth-hMarginR-40, hMarginT+3)
|
||||
pdf.CellFormat(40, 6, "Tarih: "+today, "", 0, "R", false, 0, "")
|
||||
pdf.CellFormat(40, 6, i18n.T(lang, "pdf.date")+": "+today, "", 0, "R", false, 0, "")
|
||||
|
||||
// Cari & Tarih kutuları (daha yukarı taşındı)
|
||||
boxY := hMarginT + hLogoW - 6
|
||||
@@ -251,11 +271,11 @@ func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) floa
|
||||
|
||||
pdf.Rect(hMarginL, boxY, 140, 11, "")
|
||||
pdf.SetXY(hMarginL+2, boxY+3)
|
||||
pdf.CellFormat(136, 5, fmt.Sprintf("Cari: %s — %s", cariKod, cariIsim), "", 0, "L", false, 0, "")
|
||||
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s — %s", i18n.T(lang, "pdf.customer"), cariKod, cariIsim), "", 0, "L", false, 0, "")
|
||||
|
||||
pdf.Rect(hPageWidth-hMarginR-140, boxY, 140, 11, "")
|
||||
pdf.SetXY(hPageWidth-hMarginR-138, boxY+3)
|
||||
pdf.CellFormat(136, 5, fmt.Sprintf("Tarih Aralığı: %s → %s", start, end), "", 0, "R", false, 0, "")
|
||||
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s → %s", i18n.T(lang, "pdf.date_range"), start, end), "", 0, "R", false, 0, "")
|
||||
|
||||
// Alt çizgi
|
||||
y := boxY + 13
|
||||
@@ -268,7 +288,7 @@ func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) floa
|
||||
|
||||
/* ============================ GROUP BAR ============================ */
|
||||
|
||||
func drawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
|
||||
func drawGroupBar(pdf *gofpdf.Fpdf, lang, currency string, sonBakiye float64) {
|
||||
// Kutu alanı (tam genişlik)
|
||||
x := marginL
|
||||
y := pdf.GetY()
|
||||
@@ -285,9 +305,9 @@ func drawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
|
||||
pdf.SetTextColor(colorPrimary[0], colorPrimary[1], colorPrimary[2])
|
||||
|
||||
pdf.SetXY(x+cellPadX+1.0, y+(h-5.0)/2)
|
||||
pdf.CellFormat(w*0.6, 5.0, fmt.Sprintf("%s", currency), "", 0, "L", false, 0, "")
|
||||
pdf.CellFormat(w*0.6, 5.0, fmt.Sprintf("%s: %s", i18n.T(lang, "pdf.currency_prefix"), currency), "", 0, "L", false, 0, "")
|
||||
|
||||
txt := "Son Bakiye = " + formatCurrencyTR(sonBakiye)
|
||||
txt := i18n.T(lang, "pdf.ending_balance") + " = " + formatCurrencyTR(sonBakiye)
|
||||
pdf.SetXY(x+w*0.4, y+(h-5.0)/2)
|
||||
pdf.CellFormat(w*0.6-2.0, 5.0, txt, "", 0, "R", false, 0, "")
|
||||
|
||||
@@ -430,6 +450,10 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
accountCode := r.URL.Query().Get("accountcode")
|
||||
startDate := r.URL.Query().Get("startdate")
|
||||
endDate := r.URL.Query().Get("enddate")
|
||||
langCode := i18n.ResolveLangCode(
|
||||
r.URL.Query().Get("langcode"),
|
||||
r.Header.Get("Accept-Language"),
|
||||
)
|
||||
|
||||
// parislemler sanitize
|
||||
rawParis := r.URL.Query()["parislemler"]
|
||||
@@ -445,7 +469,7 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
accountCode, startDate, endDate, parislemler)
|
||||
|
||||
// 1) Header verileri
|
||||
headers, belgeNos, err := queries.GetStatementsPDF(r.Context(), accountCode, startDate, endDate, parislemler)
|
||||
headers, belgeNos, err := queries.GetStatementsPDF(r.Context(), accountCode, startDate, endDate, langCode, parislemler)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
@@ -520,12 +544,12 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
pdf.AddPage()
|
||||
|
||||
// drawPageHeader tablo başlangıç yüksekliğini döndürüyor
|
||||
tableTop := drawPageHeader(pdf, accountCode, cariIsim, startDate, endDate)
|
||||
tableTop := drawPageHeader(pdf, langCode, accountCode, cariIsim, startDate, endDate)
|
||||
|
||||
// Sayfa numarası
|
||||
pdf.SetFont(fontFamilyReg, "", 6)
|
||||
pdf.SetXY(pageWidth-marginR-28, pageHeight-marginB+3)
|
||||
pdf.CellFormat(28, 5, fmt.Sprintf("Sayfa %d", pageNum), "", 0, "R", false, 0, "")
|
||||
pdf.CellFormat(28, 5, fmt.Sprintf("%s %d", i18n.T(langCode, "pdf.page"), pageNum), "", 0, "R", false, 0, "")
|
||||
|
||||
// Tablo Y konumunu ayarla
|
||||
pdf.SetY(tableTop)
|
||||
@@ -540,8 +564,8 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
if needNewPage(pdf, groupBarH+headerRowH) {
|
||||
newPage()
|
||||
}
|
||||
drawGroupBar(pdf, cur, g.sonBakiye)
|
||||
drawMainHeaderRow(pdf, mainCols, mainWn)
|
||||
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
|
||||
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
|
||||
|
||||
for _, h := range g.rows {
|
||||
row := []string{
|
||||
@@ -557,8 +581,8 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
|
||||
if needNewPage(pdf, rh+headerRowH) {
|
||||
newPage()
|
||||
drawGroupBar(pdf, cur, g.sonBakiye)
|
||||
drawMainHeaderRow(pdf, mainCols, mainWn)
|
||||
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
|
||||
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
|
||||
}
|
||||
drawMainDataRow(pdf, row, mainWn, rh)
|
||||
|
||||
@@ -567,10 +591,10 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
if len(details) > 0 {
|
||||
if needNewPage(pdf, subHeaderRowH) {
|
||||
newPage()
|
||||
drawGroupBar(pdf, cur, g.sonBakiye)
|
||||
drawMainHeaderRow(pdf, mainCols, mainWn)
|
||||
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
|
||||
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
|
||||
}
|
||||
drawDetailHeaderRow(pdf, dCols, dWn)
|
||||
drawDetailHeaderRow(pdf, detailCols(langCode), dWn)
|
||||
|
||||
for i, d := range details {
|
||||
drow := []string{
|
||||
@@ -591,9 +615,9 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
|
||||
if needNewPage(pdf, rh2) {
|
||||
newPage()
|
||||
drawGroupBar(pdf, cur, g.sonBakiye)
|
||||
drawMainHeaderRow(pdf, mainCols, mainWn)
|
||||
drawDetailHeaderRow(pdf, dCols, dWn)
|
||||
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
|
||||
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
|
||||
drawDetailHeaderRow(pdf, detailCols(langCode), dWn)
|
||||
}
|
||||
// zebra: çift indekslerde açık zemin
|
||||
fill := (i%2 == 0)
|
||||
|
||||
1669
svc/routes/translations.go
Normal file
1669
svc/routes/translations.go
Normal file
@@ -0,0 +1,1669 @@
|
||||
package routes
|
||||
|
||||
import (
|
||||
"bssapp-backend/models"
|
||||
"bytes"
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/lib/pq"
|
||||
)
|
||||
|
||||
var translationLangSet = map[string]struct{}{
|
||||
"tr": {},
|
||||
"en": {},
|
||||
"de": {},
|
||||
"it": {},
|
||||
"es": {},
|
||||
"ru": {},
|
||||
"ar": {},
|
||||
}
|
||||
|
||||
var translationStatusSet = map[string]struct{}{
|
||||
"pending": {},
|
||||
"approved": {},
|
||||
"rejected": {},
|
||||
}
|
||||
|
||||
var translationSourceTypeSet = map[string]struct{}{
|
||||
"dummy": {},
|
||||
"postgre": {},
|
||||
"mssql": {},
|
||||
}
|
||||
|
||||
var (
|
||||
reQuotedText = regexp.MustCompile(`['"]([^'"]{3,120})['"]`)
|
||||
reHasLetter = regexp.MustCompile(`[A-Za-zÇĞİÖŞÜçğıöşü]`)
|
||||
reBadText = regexp.MustCompile(`^(GET|POST|PUT|DELETE|OPTIONS|true|false|null|undefined)$`)
|
||||
reKeyUnsafe = regexp.MustCompile(`[^a-z0-9_]+`)
|
||||
)
|
||||
|
||||
type TranslationUpdatePayload struct {
|
||||
SourceTextTR *string `json:"source_text_tr"`
|
||||
TranslatedText *string `json:"translated_text"`
|
||||
SourceType *string `json:"source_type"`
|
||||
IsManual *bool `json:"is_manual"`
|
||||
Status *string `json:"status"`
|
||||
}
|
||||
|
||||
type UpsertMissingPayload struct {
|
||||
Items []UpsertMissingItem `json:"items"`
|
||||
Languages []string `json:"languages"`
|
||||
}
|
||||
|
||||
type UpsertMissingItem struct {
|
||||
TKey string `json:"t_key"`
|
||||
SourceTextTR string `json:"source_text_tr"`
|
||||
}
|
||||
|
||||
type SyncSourcesPayload struct {
|
||||
AutoTranslate bool `json:"auto_translate"`
|
||||
Languages []string `json:"languages"`
|
||||
Limit int `json:"limit"`
|
||||
OnlyNew *bool `json:"only_new"`
|
||||
}
|
||||
|
||||
type BulkApprovePayload struct {
|
||||
IDs []int64 `json:"ids"`
|
||||
}
|
||||
|
||||
type BulkUpdatePayload struct {
|
||||
Items []BulkUpdateItem `json:"items"`
|
||||
}
|
||||
|
||||
type TranslateSelectedPayload struct {
|
||||
TKeys []string `json:"t_keys"`
|
||||
Languages []string `json:"languages"`
|
||||
Limit int `json:"limit"`
|
||||
}
|
||||
|
||||
type BulkUpdateItem struct {
|
||||
ID int64 `json:"id"`
|
||||
SourceTextTR *string `json:"source_text_tr"`
|
||||
TranslatedText *string `json:"translated_text"`
|
||||
SourceType *string `json:"source_type"`
|
||||
IsManual *bool `json:"is_manual"`
|
||||
Status *string `json:"status"`
|
||||
}
|
||||
|
||||
type TranslationSyncOptions struct {
|
||||
AutoTranslate bool
|
||||
Languages []string
|
||||
Limit int
|
||||
OnlyNew bool
|
||||
TraceID string
|
||||
}
|
||||
|
||||
type TranslationSyncResult struct {
|
||||
SeedCount int `json:"seed_count"`
|
||||
AffectedCount int `json:"affected_count"`
|
||||
AutoTranslated int `json:"auto_translated"`
|
||||
TargetLangs []string `json:"target_languages"`
|
||||
TraceID string `json:"trace_id"`
|
||||
DurationMS int64 `json:"duration_ms"`
|
||||
}
|
||||
|
||||
type sourceSeed struct {
|
||||
TKey string
|
||||
SourceText string
|
||||
SourceType string
|
||||
}
|
||||
|
||||
func GetTranslationRowsHandler(db *sql.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
|
||||
q := strings.TrimSpace(r.URL.Query().Get("q"))
|
||||
lang := normalizeTranslationLang(r.URL.Query().Get("lang"))
|
||||
status := normalizeTranslationStatus(r.URL.Query().Get("status"))
|
||||
sourceType := normalizeTranslationSourceType(r.URL.Query().Get("source_type"))
|
||||
|
||||
manualFilter := strings.TrimSpace(strings.ToLower(r.URL.Query().Get("manual")))
|
||||
missingOnly := strings.TrimSpace(strings.ToLower(r.URL.Query().Get("missing"))) == "true"
|
||||
|
||||
limit := 0
|
||||
if raw := strings.TrimSpace(r.URL.Query().Get("limit")); raw != "" {
|
||||
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 && parsed <= 50000 {
|
||||
limit = parsed
|
||||
}
|
||||
}
|
||||
|
||||
clauses := []string{"1=1"}
|
||||
args := make([]any, 0, 8)
|
||||
argIndex := 1
|
||||
|
||||
if q != "" {
|
||||
clauses = append(clauses, fmt.Sprintf("(source_text_tr ILIKE $%d OR translated_text ILIKE $%d)", argIndex, argIndex))
|
||||
args = append(args, "%"+q+"%")
|
||||
argIndex++
|
||||
}
|
||||
|
||||
if lang != "" {
|
||||
clauses = append(clauses, fmt.Sprintf("lang_code = $%d", argIndex))
|
||||
args = append(args, lang)
|
||||
argIndex++
|
||||
}
|
||||
|
||||
if status != "" {
|
||||
clauses = append(clauses, fmt.Sprintf("status = $%d", argIndex))
|
||||
args = append(args, status)
|
||||
argIndex++
|
||||
}
|
||||
|
||||
if sourceType != "" {
|
||||
clauses = append(clauses, fmt.Sprintf("COALESCE(NULLIF(provider_meta->>'source_type',''),'dummy') = $%d", argIndex))
|
||||
args = append(args, sourceType)
|
||||
argIndex++
|
||||
}
|
||||
|
||||
switch manualFilter {
|
||||
case "true":
|
||||
clauses = append(clauses, "is_manual = true")
|
||||
case "false":
|
||||
clauses = append(clauses, "is_manual = false")
|
||||
}
|
||||
|
||||
if missingOnly {
|
||||
clauses = append(clauses, "(translated_text IS NULL OR btrim(translated_text) = '')")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
id,
|
||||
t_key,
|
||||
lang_code,
|
||||
COALESCE(NULLIF(provider_meta->>'source_type',''), 'dummy') AS source_type,
|
||||
source_text_tr,
|
||||
COALESCE(translated_text, '') AS translated_text,
|
||||
is_manual,
|
||||
status,
|
||||
COALESCE(provider, '') AS provider,
|
||||
updated_at
|
||||
FROM mk_translator
|
||||
WHERE %s
|
||||
ORDER BY t_key, lang_code
|
||||
`, strings.Join(clauses, " AND "))
|
||||
if limit > 0 {
|
||||
query += fmt.Sprintf("LIMIT $%d", argIndex)
|
||||
args = append(args, limit)
|
||||
}
|
||||
|
||||
rows, err := db.Query(query, args...)
|
||||
if err != nil {
|
||||
http.Error(w, "translation query error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
list := make([]models.TranslatorRow, 0, 1024)
|
||||
for rows.Next() {
|
||||
var row models.TranslatorRow
|
||||
if err := rows.Scan(
|
||||
&row.ID,
|
||||
&row.TKey,
|
||||
&row.LangCode,
|
||||
&row.SourceType,
|
||||
&row.SourceTextTR,
|
||||
&row.TranslatedText,
|
||||
&row.IsManual,
|
||||
&row.Status,
|
||||
&row.Provider,
|
||||
&row.UpdatedAt,
|
||||
); err != nil {
|
||||
http.Error(w, "translation scan error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
list = append(list, row)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
http.Error(w, "translation rows error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||
"rows": list,
|
||||
"count": len(list),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func UpdateTranslationRowHandler(db *sql.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
|
||||
id, err := strconv.ParseInt(strings.TrimSpace(mux.Vars(r)["id"]), 10, 64)
|
||||
if err != nil || id <= 0 {
|
||||
http.Error(w, "invalid row id", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
var payload TranslationUpdatePayload
|
||||
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
|
||||
http.Error(w, "invalid payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
if payload.Status != nil {
|
||||
normalized := normalizeTranslationStatus(*payload.Status)
|
||||
if normalized == "" {
|
||||
http.Error(w, "invalid status", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
payload.Status = &normalized
|
||||
}
|
||||
if payload.SourceType != nil {
|
||||
normalized := normalizeTranslationSourceType(*payload.SourceType)
|
||||
if normalized == "" {
|
||||
http.Error(w, "invalid source_type", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
payload.SourceType = &normalized
|
||||
}
|
||||
|
||||
updateQuery := `
|
||||
UPDATE mk_translator
|
||||
SET
|
||||
source_text_tr = COALESCE($2, source_text_tr),
|
||||
translated_text = COALESCE($3, translated_text),
|
||||
is_manual = COALESCE($4, is_manual),
|
||||
status = COALESCE($5, status),
|
||||
provider_meta = CASE
|
||||
WHEN $6::text IS NULL THEN provider_meta
|
||||
ELSE jsonb_set(COALESCE(provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($6::text), true)
|
||||
END,
|
||||
updated_at = NOW()
|
||||
WHERE id = $1
|
||||
RETURNING
|
||||
id,
|
||||
t_key,
|
||||
lang_code,
|
||||
COALESCE(NULLIF(provider_meta->>'source_type',''), 'dummy') AS source_type,
|
||||
source_text_tr,
|
||||
COALESCE(translated_text, '') AS translated_text,
|
||||
is_manual,
|
||||
status,
|
||||
COALESCE(provider, '') AS provider,
|
||||
updated_at
|
||||
`
|
||||
|
||||
var row models.TranslatorRow
|
||||
err = db.QueryRow(
|
||||
updateQuery,
|
||||
id,
|
||||
nullableString(payload.SourceTextTR),
|
||||
nullableString(payload.TranslatedText),
|
||||
payload.IsManual,
|
||||
payload.Status,
|
||||
nullableString(payload.SourceType),
|
||||
).Scan(
|
||||
&row.ID,
|
||||
&row.TKey,
|
||||
&row.LangCode,
|
||||
&row.SourceType,
|
||||
&row.SourceTextTR,
|
||||
&row.TranslatedText,
|
||||
&row.IsManual,
|
||||
&row.Status,
|
||||
&row.Provider,
|
||||
&row.UpdatedAt,
|
||||
)
|
||||
if err == sql.ErrNoRows {
|
||||
http.Error(w, "translation row not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
http.Error(w, "translation update error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
_ = json.NewEncoder(w).Encode(row)
|
||||
}
|
||||
}
|
||||
|
||||
func UpsertMissingTranslationsHandler(db *sql.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
|
||||
var payload UpsertMissingPayload
|
||||
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
|
||||
http.Error(w, "invalid payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
items := normalizeMissingItems(payload.Items)
|
||||
if len(items) == 0 {
|
||||
http.Error(w, "items required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
languages := normalizeTargetLanguages(payload.Languages)
|
||||
affected, err := upsertMissingRows(db, items, languages, "dummy")
|
||||
if err != nil {
|
||||
http.Error(w, "upsert missing error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||
"success": true,
|
||||
"items": len(items),
|
||||
"target_langs": languages,
|
||||
"affected_count": affected,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func SyncTranslationSourcesHandler(pgDB *sql.DB, mssqlDB *sql.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
|
||||
var payload SyncSourcesPayload
|
||||
_ = json.NewDecoder(r.Body).Decode(&payload)
|
||||
traceID := requestTraceID(r)
|
||||
w.Header().Set("X-Trace-ID", traceID)
|
||||
start := time.Now()
|
||||
onlyNew := payload.OnlyNew == nil || *payload.OnlyNew
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=request auto_translate=%t only_new=%t limit=%d langs=%v",
|
||||
traceID,
|
||||
payload.AutoTranslate,
|
||||
onlyNew,
|
||||
payload.Limit,
|
||||
payload.Languages,
|
||||
)
|
||||
|
||||
result, err := PerformTranslationSync(pgDB, mssqlDB, TranslationSyncOptions{
|
||||
AutoTranslate: payload.AutoTranslate,
|
||||
Languages: payload.Languages,
|
||||
Limit: payload.Limit,
|
||||
OnlyNew: onlyNew,
|
||||
TraceID: traceID,
|
||||
})
|
||||
if err != nil {
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=error duration_ms=%d err=%v",
|
||||
traceID,
|
||||
time.Since(start).Milliseconds(),
|
||||
err,
|
||||
)
|
||||
http.Error(w, "translation source sync error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=response duration_ms=%d seeds=%d affected=%d auto_translated=%d",
|
||||
traceID,
|
||||
time.Since(start).Milliseconds(),
|
||||
result.SeedCount,
|
||||
result.AffectedCount,
|
||||
result.AutoTranslated,
|
||||
)
|
||||
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||
"success": true,
|
||||
"trace_id": traceID,
|
||||
"result": result,
|
||||
"seed_count": result.SeedCount,
|
||||
"affected_count": result.AffectedCount,
|
||||
"auto_translated": result.AutoTranslated,
|
||||
"target_languages": result.TargetLangs,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TranslateSelectedTranslationsHandler(db *sql.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
|
||||
var payload TranslateSelectedPayload
|
||||
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
|
||||
http.Error(w, "invalid payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
keys := normalizeStringList(payload.TKeys, 5000)
|
||||
if len(keys) == 0 {
|
||||
http.Error(w, "t_keys required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
targetLangs := normalizeTargetLanguages(payload.Languages)
|
||||
limit := payload.Limit
|
||||
if limit <= 0 {
|
||||
limit = len(keys) * len(targetLangs)
|
||||
}
|
||||
if limit <= 0 {
|
||||
limit = 1000
|
||||
}
|
||||
if limit > 50000 {
|
||||
limit = 50000
|
||||
}
|
||||
|
||||
traceID := requestTraceID(r)
|
||||
w.Header().Set("X-Trace-ID", traceID)
|
||||
start := time.Now()
|
||||
log.Printf(
|
||||
"[TranslationSelected] trace=%s stage=request keys=%d limit=%d langs=%v",
|
||||
traceID,
|
||||
len(keys),
|
||||
limit,
|
||||
targetLangs,
|
||||
)
|
||||
|
||||
translatedCount, err := autoTranslatePendingRowsForKeys(db, targetLangs, limit, keys, traceID)
|
||||
if err != nil {
|
||||
log.Printf(
|
||||
"[TranslationSelected] trace=%s stage=error duration_ms=%d err=%v",
|
||||
traceID,
|
||||
time.Since(start).Milliseconds(),
|
||||
err,
|
||||
)
|
||||
http.Error(w, "translate selected error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf(
|
||||
"[TranslationSelected] trace=%s stage=done duration_ms=%d translated=%d",
|
||||
traceID,
|
||||
time.Since(start).Milliseconds(),
|
||||
translatedCount,
|
||||
)
|
||||
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||
"success": true,
|
||||
"trace_id": traceID,
|
||||
"translated_count": translatedCount,
|
||||
"key_count": len(keys),
|
||||
"target_languages": targetLangs,
|
||||
"duration_ms": time.Since(start).Milliseconds(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func BulkApproveTranslationsHandler(db *sql.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
|
||||
var payload BulkApprovePayload
|
||||
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
|
||||
http.Error(w, "invalid payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
ids := normalizeIDListInt64(payload.IDs)
|
||||
if len(ids) == 0 {
|
||||
http.Error(w, "ids required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
res, err := db.Exec(`
|
||||
UPDATE mk_translator
|
||||
SET
|
||||
status = 'approved',
|
||||
is_manual = true,
|
||||
updated_at = NOW(),
|
||||
provider_meta = jsonb_set(COALESCE(provider_meta, '{}'::jsonb), '{is_new}', 'false'::jsonb, true)
|
||||
WHERE id = ANY($1)
|
||||
`, pq.Array(ids))
|
||||
if err != nil {
|
||||
http.Error(w, "bulk approve error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
affected, _ := res.RowsAffected()
|
||||
|
||||
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||
"success": true,
|
||||
"affected_count": affected,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func BulkUpdateTranslationsHandler(db *sql.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
|
||||
var payload BulkUpdatePayload
|
||||
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
|
||||
http.Error(w, "invalid payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if len(payload.Items) == 0 {
|
||||
http.Error(w, "items required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
tx, err := db.Begin()
|
||||
if err != nil {
|
||||
http.Error(w, "transaction start error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
affected := 0
|
||||
for _, it := range payload.Items {
|
||||
if it.ID <= 0 {
|
||||
continue
|
||||
}
|
||||
status := normalizeOptionalStatus(it.Status)
|
||||
sourceType := normalizeOptionalSourceType(it.SourceType)
|
||||
res, err := tx.Exec(`
|
||||
UPDATE mk_translator
|
||||
SET
|
||||
source_text_tr = COALESCE($2, source_text_tr),
|
||||
translated_text = COALESCE($3, translated_text),
|
||||
is_manual = COALESCE($4, is_manual),
|
||||
status = COALESCE($5, status),
|
||||
provider_meta = CASE
|
||||
WHEN $6::text IS NULL THEN provider_meta
|
||||
ELSE jsonb_set(COALESCE(provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($6::text), true)
|
||||
END,
|
||||
updated_at = NOW()
|
||||
WHERE id = $1
|
||||
`, it.ID, nullableString(it.SourceTextTR), nullableString(it.TranslatedText), it.IsManual, status, sourceType)
|
||||
if err != nil {
|
||||
http.Error(w, "bulk update error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if n, _ := res.RowsAffected(); n > 0 {
|
||||
affected += int(n)
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
http.Error(w, "transaction commit error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||
"success": true,
|
||||
"affected_count": affected,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func PerformTranslationSync(pgDB *sql.DB, mssqlDB *sql.DB, options TranslationSyncOptions) (TranslationSyncResult, error) {
|
||||
traceID := strings.TrimSpace(options.TraceID)
|
||||
if traceID == "" {
|
||||
traceID = "trsync-" + strconv.FormatInt(time.Now().UnixNano(), 36)
|
||||
}
|
||||
start := time.Now()
|
||||
limit := options.Limit
|
||||
if limit <= 0 || limit > 100000 {
|
||||
limit = 20000
|
||||
}
|
||||
targetLangs := normalizeTargetLanguages(options.Languages)
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=start auto_translate=%t only_new=%t limit=%d langs=%v",
|
||||
traceID,
|
||||
options.AutoTranslate,
|
||||
options.OnlyNew,
|
||||
limit,
|
||||
targetLangs,
|
||||
)
|
||||
|
||||
collectStart := time.Now()
|
||||
seeds := collectSourceSeeds(pgDB, mssqlDB, limit)
|
||||
seeds, reusedByText := reuseExistingSeedKeys(pgDB, seeds)
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=collect done_ms=%d total=%d reused_by_text=%d sources=%s",
|
||||
traceID,
|
||||
time.Since(collectStart).Milliseconds(),
|
||||
len(seeds),
|
||||
reusedByText,
|
||||
formatSourceCounts(countSeedsBySource(seeds)),
|
||||
)
|
||||
if options.OnlyNew {
|
||||
before := len(seeds)
|
||||
filterStart := time.Now()
|
||||
seeds = filterNewSeeds(pgDB, seeds)
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=filter_only_new done_ms=%d before=%d after=%d skipped=%d",
|
||||
traceID,
|
||||
time.Since(filterStart).Milliseconds(),
|
||||
before,
|
||||
len(seeds),
|
||||
before-len(seeds),
|
||||
)
|
||||
}
|
||||
if len(seeds) == 0 {
|
||||
return TranslationSyncResult{
|
||||
TargetLangs: targetLangs,
|
||||
TraceID: traceID,
|
||||
DurationMS: time.Since(start).Milliseconds(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
upsertStart := time.Now()
|
||||
affected, err := upsertSourceSeeds(pgDB, seeds, targetLangs)
|
||||
if err != nil {
|
||||
return TranslationSyncResult{}, err
|
||||
}
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=upsert done_ms=%d affected=%d",
|
||||
traceID,
|
||||
time.Since(upsertStart).Milliseconds(),
|
||||
affected,
|
||||
)
|
||||
|
||||
autoTranslated := 0
|
||||
if options.AutoTranslate {
|
||||
autoStart := time.Now()
|
||||
var autoErr error
|
||||
autoTranslated, autoErr = autoTranslatePendingRowsForKeys(pgDB, targetLangs, limit, uniqueSeedKeys(seeds), traceID)
|
||||
if autoErr != nil {
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=auto_translate done_ms=%d translated=%d err=%v",
|
||||
traceID,
|
||||
time.Since(autoStart).Milliseconds(),
|
||||
autoTranslated,
|
||||
autoErr,
|
||||
)
|
||||
} else {
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=auto_translate done_ms=%d translated=%d",
|
||||
traceID,
|
||||
time.Since(autoStart).Milliseconds(),
|
||||
autoTranslated,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
result := TranslationSyncResult{
|
||||
SeedCount: len(seeds),
|
||||
AffectedCount: affected,
|
||||
AutoTranslated: autoTranslated,
|
||||
TargetLangs: targetLangs,
|
||||
TraceID: traceID,
|
||||
DurationMS: time.Since(start).Milliseconds(),
|
||||
}
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=done duration_ms=%d seeds=%d affected=%d auto_translated=%d",
|
||||
traceID,
|
||||
result.DurationMS,
|
||||
result.SeedCount,
|
||||
result.AffectedCount,
|
||||
result.AutoTranslated,
|
||||
)
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func upsertMissingRows(db *sql.DB, items []UpsertMissingItem, languages []string, forcedSourceType string) (int, error) {
|
||||
tx, err := db.Begin()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
affected := 0
|
||||
for _, it := range items {
|
||||
sourceType := forcedSourceType
|
||||
if sourceType == "" {
|
||||
sourceType = "dummy"
|
||||
}
|
||||
|
||||
res, err := tx.Exec(`
|
||||
INSERT INTO mk_translator
|
||||
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
|
||||
VALUES
|
||||
($1, 'tr', $2, $2, false, 'approved', 'seed', jsonb_build_object('source_type', $3::text))
|
||||
ON CONFLICT (t_key, lang_code) DO UPDATE
|
||||
SET
|
||||
source_text_tr = EXCLUDED.source_text_tr,
|
||||
provider_meta = jsonb_set(COALESCE(mk_translator.provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($3::text), true),
|
||||
updated_at = NOW()
|
||||
`, it.TKey, it.SourceTextTR, sourceType)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if n, _ := res.RowsAffected(); n > 0 {
|
||||
affected += int(n)
|
||||
}
|
||||
|
||||
for _, lang := range languages {
|
||||
res, err := tx.Exec(`
|
||||
INSERT INTO mk_translator
|
||||
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
|
||||
VALUES
|
||||
($1, $2, $3, NULL, false, 'pending', NULL, jsonb_build_object('source_type', $4::text))
|
||||
ON CONFLICT (t_key, lang_code) DO UPDATE
|
||||
SET
|
||||
source_text_tr = EXCLUDED.source_text_tr,
|
||||
provider_meta = jsonb_set(COALESCE(mk_translator.provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($4::text), true),
|
||||
updated_at = NOW()
|
||||
`, it.TKey, lang, it.SourceTextTR, sourceType)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if n, _ := res.RowsAffected(); n > 0 {
|
||||
affected += int(n)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return affected, nil
|
||||
}
|
||||
|
||||
func upsertSourceSeeds(db *sql.DB, seeds []sourceSeed, languages []string) (int, error) {
|
||||
tx, err := db.Begin()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
affected := 0
|
||||
for _, seed := range seeds {
|
||||
if seed.TKey == "" || seed.SourceText == "" {
|
||||
continue
|
||||
}
|
||||
sourceType := normalizeTranslationSourceType(seed.SourceType)
|
||||
if sourceType == "" {
|
||||
sourceType = "dummy"
|
||||
}
|
||||
|
||||
res, err := tx.Exec(`
|
||||
INSERT INTO mk_translator
|
||||
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
|
||||
VALUES
|
||||
($1, 'tr', $2, $2, false, 'approved', 'seed', jsonb_build_object('source_type', $3::text, 'is_new', false))
|
||||
ON CONFLICT (t_key, lang_code) DO UPDATE
|
||||
SET
|
||||
source_text_tr = EXCLUDED.source_text_tr,
|
||||
provider_meta = jsonb_set(
|
||||
COALESCE(mk_translator.provider_meta, '{}'::jsonb),
|
||||
'{source_type}',
|
||||
to_jsonb(COALESCE(NULLIF(mk_translator.provider_meta->>'source_type', ''), $3::text)),
|
||||
true
|
||||
),
|
||||
updated_at = NOW()
|
||||
`, seed.TKey, seed.SourceText, sourceType)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if n, _ := res.RowsAffected(); n > 0 {
|
||||
affected += int(n)
|
||||
}
|
||||
|
||||
for _, lang := range languages {
|
||||
res, err := tx.Exec(`
|
||||
INSERT INTO mk_translator
|
||||
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
|
||||
VALUES
|
||||
($1, $2, $3, NULL, false, 'pending', NULL, jsonb_build_object('source_type', $4::text, 'is_new', true))
|
||||
ON CONFLICT (t_key, lang_code) DO UPDATE
|
||||
SET
|
||||
source_text_tr = EXCLUDED.source_text_tr,
|
||||
provider_meta = jsonb_set(
|
||||
COALESCE(mk_translator.provider_meta, '{}'::jsonb),
|
||||
'{source_type}',
|
||||
to_jsonb(COALESCE(NULLIF(mk_translator.provider_meta->>'source_type', ''), $4::text)),
|
||||
true
|
||||
),
|
||||
updated_at = NOW()
|
||||
`, seed.TKey, lang, seed.SourceText, sourceType)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if n, _ := res.RowsAffected(); n > 0 {
|
||||
affected += int(n)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return affected, nil
|
||||
}
|
||||
|
||||
func collectSourceSeeds(pgDB *sql.DB, mssqlDB *sql.DB, limit int) []sourceSeed {
|
||||
seen := map[string]struct{}{}
|
||||
out := make([]sourceSeed, 0, limit)
|
||||
|
||||
appendSeed := func(seed sourceSeed) {
|
||||
if seed.TKey == "" || seed.SourceText == "" || seed.SourceType == "" {
|
||||
return
|
||||
}
|
||||
key := normalizeSeedTextKey(seed.SourceText)
|
||||
if _, ok := seen[key]; ok {
|
||||
return
|
||||
}
|
||||
seen[key] = struct{}{}
|
||||
out = append(out, seed)
|
||||
}
|
||||
|
||||
for _, row := range collectPostgreSeeds(pgDB, limit) {
|
||||
appendSeed(row)
|
||||
if len(out) >= limit {
|
||||
return out
|
||||
}
|
||||
}
|
||||
for _, row := range collectMSSQLSeeds(mssqlDB, limit-len(out)) {
|
||||
appendSeed(row)
|
||||
if len(out) >= limit {
|
||||
return out
|
||||
}
|
||||
}
|
||||
for _, row := range collectDummySeeds(limit - len(out)) {
|
||||
appendSeed(row)
|
||||
if len(out) >= limit {
|
||||
return out
|
||||
}
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func collectPostgreSeeds(pgDB *sql.DB, limit int) []sourceSeed {
|
||||
if pgDB == nil || limit <= 0 {
|
||||
return nil
|
||||
}
|
||||
rows, err := pgDB.Query(`
|
||||
SELECT table_name, column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public'
|
||||
ORDER BY table_name, ordinal_position
|
||||
LIMIT $1
|
||||
`, limit)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
out := make([]sourceSeed, 0, limit)
|
||||
for rows.Next() && len(out) < limit {
|
||||
var tableName, columnName string
|
||||
if err := rows.Scan(&tableName, &columnName); err != nil {
|
||||
continue
|
||||
}
|
||||
text := normalizeDisplayText(columnName)
|
||||
key := makeTextBasedSeedKey(text)
|
||||
out = append(out, sourceSeed{
|
||||
TKey: key,
|
||||
SourceText: text,
|
||||
SourceType: "postgre",
|
||||
})
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func collectMSSQLSeeds(mssqlDB *sql.DB, limit int) []sourceSeed {
|
||||
if mssqlDB == nil || limit <= 0 {
|
||||
return nil
|
||||
}
|
||||
maxPerRun := parsePositiveIntEnv("TRANSLATION_MSSQL_SEED_LIMIT", 2500)
|
||||
if limit > maxPerRun {
|
||||
limit = maxPerRun
|
||||
}
|
||||
timeoutSec := parsePositiveIntEnv("TRANSLATION_MSSQL_SCHEMA_TIMEOUT_SEC", 20)
|
||||
query := fmt.Sprintf(`
|
||||
SELECT TOP (%d) TABLE_NAME, COLUMN_NAME
|
||||
FROM INFORMATION_SCHEMA.COLUMNS
|
||||
ORDER BY TABLE_NAME, ORDINAL_POSITION
|
||||
`, limit)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(timeoutSec)*time.Second)
|
||||
defer cancel()
|
||||
rows, err := mssqlDB.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
log.Printf("[TranslationSync] stage=collect_mssql skipped err=%v", err)
|
||||
return nil
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
out := make([]sourceSeed, 0, limit)
|
||||
for rows.Next() && len(out) < limit {
|
||||
var tableName, columnName string
|
||||
if err := rows.Scan(&tableName, &columnName); err != nil {
|
||||
continue
|
||||
}
|
||||
text := normalizeDisplayText(columnName)
|
||||
key := makeTextBasedSeedKey(text)
|
||||
out = append(out, sourceSeed{
|
||||
TKey: key,
|
||||
SourceText: text,
|
||||
SourceType: "mssql",
|
||||
})
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func collectDummySeeds(limit int) []sourceSeed {
|
||||
if limit <= 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
root := detectProjectRoot()
|
||||
if root == "" {
|
||||
return nil
|
||||
}
|
||||
uiRoot := filepath.Join(root, "ui", "src")
|
||||
if _, err := os.Stat(uiRoot); err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
out := make([]sourceSeed, 0, limit)
|
||||
seen := make(map[string]struct{}, limit)
|
||||
|
||||
_ = filepath.WalkDir(uiRoot, func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil || d.IsDir() {
|
||||
return nil
|
||||
}
|
||||
ext := strings.ToLower(filepath.Ext(path))
|
||||
if ext != ".vue" && ext != ".js" && ext != ".ts" {
|
||||
return nil
|
||||
}
|
||||
|
||||
b, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
matches := reQuotedText.FindAllStringSubmatch(string(b), -1)
|
||||
for _, m := range matches {
|
||||
text := strings.TrimSpace(m[1])
|
||||
if !isCandidateText(text) {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[text]; ok {
|
||||
continue
|
||||
}
|
||||
seen[text] = struct{}{}
|
||||
key := makeTextBasedSeedKey(text)
|
||||
out = append(out, sourceSeed{
|
||||
TKey: key,
|
||||
SourceText: text,
|
||||
SourceType: "dummy",
|
||||
})
|
||||
if len(out) >= limit {
|
||||
return errors.New("limit reached")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func autoTranslatePendingRows(db *sql.DB, langs []string, limit int) (int, error) {
|
||||
return autoTranslatePendingRowsForKeys(db, langs, limit, nil, "")
|
||||
}
|
||||
|
||||
func autoTranslatePendingRowsForKeys(db *sql.DB, langs []string, limit int, keys []string, traceID string) (int, error) {
|
||||
traceID = strings.TrimSpace(traceID)
|
||||
if traceID == "" {
|
||||
traceID = "trauto-" + strconv.FormatInt(time.Now().UnixNano(), 36)
|
||||
}
|
||||
|
||||
if len(keys) == 0 {
|
||||
log.Printf("[TranslationAuto] trace=%s stage=skip reason=no_keys", traceID)
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
rows, err := db.Query(`
|
||||
SELECT id, lang_code, source_text_tr
|
||||
FROM mk_translator
|
||||
WHERE lang_code = ANY($1)
|
||||
AND t_key = ANY($3)
|
||||
AND (translated_text IS NULL OR btrim(translated_text) = '')
|
||||
AND is_manual = false
|
||||
ORDER BY updated_at ASC
|
||||
LIMIT $2
|
||||
`, pqArray(langs), limit, pq.Array(keys))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
type pending struct {
|
||||
ID int64
|
||||
Lang string
|
||||
Text string
|
||||
}
|
||||
list := make([]pending, 0, limit)
|
||||
pendingByLang := map[string]int{}
|
||||
sourceChars := 0
|
||||
for rows.Next() {
|
||||
var p pending
|
||||
if err := rows.Scan(&p.ID, &p.Lang, &p.Text); err != nil {
|
||||
continue
|
||||
}
|
||||
if strings.TrimSpace(p.Text) == "" {
|
||||
continue
|
||||
}
|
||||
p.Lang = normalizeTranslationLang(p.Lang)
|
||||
if p.Lang == "" {
|
||||
continue
|
||||
}
|
||||
list = append(list, p)
|
||||
pendingByLang[p.Lang]++
|
||||
sourceChars += len([]rune(strings.TrimSpace(p.Text)))
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
log.Printf(
|
||||
"[TranslationAuto] trace=%s stage=prepare candidates=%d limit=%d keys=%d langs=%v source_chars=%d pending_by_lang=%s",
|
||||
traceID,
|
||||
len(list),
|
||||
limit,
|
||||
len(keys),
|
||||
langs,
|
||||
sourceChars,
|
||||
formatLangCounts(pendingByLang),
|
||||
)
|
||||
if len(list) == 0 {
|
||||
log.Printf(
|
||||
"[TranslationAuto] trace=%s stage=done duration_ms=%d translated=0 failed_translate=0 failed_update=0 rps=0.00",
|
||||
traceID,
|
||||
time.Since(start).Milliseconds(),
|
||||
)
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
done := 0
|
||||
failedTranslate := 0
|
||||
failedUpdate := 0
|
||||
doneByLang := map[string]int{}
|
||||
progressEvery := parsePositiveIntEnv("TRANSLATION_AUTO_PROGRESS_EVERY", 100)
|
||||
if progressEvery <= 0 {
|
||||
progressEvery = 100
|
||||
}
|
||||
progressSec := parsePositiveIntEnv("TRANSLATION_AUTO_PROGRESS_SEC", 15)
|
||||
if progressSec <= 0 {
|
||||
progressSec = 15
|
||||
}
|
||||
progressTicker := time.Duration(progressSec) * time.Second
|
||||
lastProgress := time.Now()
|
||||
|
||||
for i, p := range list {
|
||||
tr, err := callAzureTranslate(p.Text, p.Lang)
|
||||
if err != nil || strings.TrimSpace(tr) == "" {
|
||||
failedTranslate++
|
||||
continue
|
||||
}
|
||||
_, err = db.Exec(`
|
||||
UPDATE mk_translator
|
||||
SET translated_text = $2,
|
||||
status = 'pending',
|
||||
is_manual = false,
|
||||
provider = 'azure_translator',
|
||||
updated_at = NOW()
|
||||
WHERE id = $1
|
||||
`, p.ID, strings.TrimSpace(tr))
|
||||
if err != nil {
|
||||
failedUpdate++
|
||||
continue
|
||||
}
|
||||
done++
|
||||
doneByLang[p.Lang]++
|
||||
|
||||
processed := i + 1
|
||||
shouldLogProgress := processed%progressEvery == 0 || time.Since(lastProgress) >= progressTicker || processed == len(list)
|
||||
if shouldLogProgress {
|
||||
elapsed := time.Since(start)
|
||||
rps := float64(done)
|
||||
if elapsed > 0 {
|
||||
rps = float64(done) / elapsed.Seconds()
|
||||
}
|
||||
log.Printf(
|
||||
"[TranslationAuto] trace=%s stage=progress processed=%d/%d translated=%d failed_translate=%d failed_update=%d elapsed_ms=%d rps=%.2f done_by_lang=%s",
|
||||
traceID,
|
||||
processed,
|
||||
len(list),
|
||||
done,
|
||||
failedTranslate,
|
||||
failedUpdate,
|
||||
elapsed.Milliseconds(),
|
||||
rps,
|
||||
formatLangCounts(doneByLang),
|
||||
)
|
||||
lastProgress = time.Now()
|
||||
}
|
||||
}
|
||||
|
||||
elapsed := time.Since(start)
|
||||
rps := float64(done)
|
||||
if elapsed > 0 {
|
||||
rps = float64(done) / elapsed.Seconds()
|
||||
}
|
||||
log.Printf(
|
||||
"[TranslationAuto] trace=%s stage=done duration_ms=%d candidates=%d translated=%d failed_translate=%d failed_update=%d rps=%.2f done_by_lang=%s",
|
||||
traceID,
|
||||
elapsed.Milliseconds(),
|
||||
len(list),
|
||||
done,
|
||||
failedTranslate,
|
||||
failedUpdate,
|
||||
rps,
|
||||
formatLangCounts(doneByLang),
|
||||
)
|
||||
return done, nil
|
||||
}
|
||||
|
||||
func formatLangCounts(counts map[string]int) string {
|
||||
if len(counts) == 0 {
|
||||
return "-"
|
||||
}
|
||||
keys := make([]string, 0, len(counts))
|
||||
for k := range counts {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
parts := make([]string, 0, len(keys))
|
||||
for _, k := range keys {
|
||||
parts = append(parts, fmt.Sprintf("%s=%d", k, counts[k]))
|
||||
}
|
||||
return strings.Join(parts, ",")
|
||||
}
|
||||
|
||||
func filterNewSeeds(pgDB *sql.DB, seeds []sourceSeed) []sourceSeed {
|
||||
if pgDB == nil || len(seeds) == 0 {
|
||||
return seeds
|
||||
}
|
||||
|
||||
keys := uniqueSeedKeys(seeds)
|
||||
if len(keys) == 0 {
|
||||
return nil
|
||||
}
|
||||
textKeys := uniqueSeedTextKeys(seeds)
|
||||
|
||||
rows, err := pgDB.Query(`
|
||||
SELECT DISTINCT t_key, lower(btrim(source_text_tr)) AS text_key
|
||||
FROM mk_translator
|
||||
WHERE t_key = ANY($1)
|
||||
OR lower(btrim(source_text_tr)) = ANY($2)
|
||||
`, pq.Array(keys), pq.Array(textKeys))
|
||||
if err != nil {
|
||||
return seeds
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
existing := make(map[string]struct{}, len(keys))
|
||||
existingText := make(map[string]struct{}, len(textKeys))
|
||||
for rows.Next() {
|
||||
var key string
|
||||
var textKey sql.NullString
|
||||
if err := rows.Scan(&key, &textKey); err == nil {
|
||||
if strings.TrimSpace(key) != "" {
|
||||
existing[key] = struct{}{}
|
||||
}
|
||||
if textKey.Valid {
|
||||
t := strings.TrimSpace(textKey.String)
|
||||
if t != "" {
|
||||
existingText[t] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
out := make([]sourceSeed, 0, len(seeds))
|
||||
for _, seed := range seeds {
|
||||
if _, ok := existing[seed.TKey]; ok {
|
||||
continue
|
||||
}
|
||||
if _, ok := existingText[normalizeSeedTextKey(seed.SourceText)]; ok {
|
||||
continue
|
||||
}
|
||||
out = append(out, seed)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func uniqueSeedKeys(seeds []sourceSeed) []string {
|
||||
seen := make(map[string]struct{}, len(seeds))
|
||||
out := make([]string, 0, len(seeds))
|
||||
for _, seed := range seeds {
|
||||
if seed.TKey == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[seed.TKey]; ok {
|
||||
continue
|
||||
}
|
||||
seen[seed.TKey] = struct{}{}
|
||||
out = append(out, seed.TKey)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func uniqueSeedTextKeys(seeds []sourceSeed) []string {
|
||||
seen := make(map[string]struct{}, len(seeds))
|
||||
out := make([]string, 0, len(seeds))
|
||||
for _, seed := range seeds {
|
||||
k := normalizeSeedTextKey(seed.SourceText)
|
||||
if k == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[k]; ok {
|
||||
continue
|
||||
}
|
||||
seen[k] = struct{}{}
|
||||
out = append(out, k)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func reuseExistingSeedKeys(pgDB *sql.DB, seeds []sourceSeed) ([]sourceSeed, int) {
|
||||
if pgDB == nil || len(seeds) == 0 {
|
||||
return seeds, 0
|
||||
}
|
||||
|
||||
textKeys := uniqueSeedTextKeys(seeds)
|
||||
if len(textKeys) == 0 {
|
||||
return seeds, 0
|
||||
}
|
||||
|
||||
rows, err := pgDB.Query(`
|
||||
SELECT x.text_key, x.t_key
|
||||
FROM (
|
||||
SELECT
|
||||
lower(btrim(source_text_tr)) AS text_key,
|
||||
t_key,
|
||||
ROW_NUMBER() OVER (
|
||||
PARTITION BY lower(btrim(source_text_tr))
|
||||
ORDER BY id ASC
|
||||
) AS rn
|
||||
FROM mk_translator
|
||||
WHERE lower(btrim(source_text_tr)) = ANY($1)
|
||||
) x
|
||||
WHERE x.rn = 1
|
||||
`, pq.Array(textKeys))
|
||||
if err != nil {
|
||||
return seeds, 0
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
existingByText := make(map[string]string, len(textKeys))
|
||||
for rows.Next() {
|
||||
var textKey, tKey string
|
||||
if err := rows.Scan(&textKey, &tKey); err != nil {
|
||||
continue
|
||||
}
|
||||
textKey = strings.TrimSpace(strings.ToLower(textKey))
|
||||
tKey = strings.TrimSpace(tKey)
|
||||
if textKey == "" || tKey == "" {
|
||||
continue
|
||||
}
|
||||
existingByText[textKey] = tKey
|
||||
}
|
||||
|
||||
reused := 0
|
||||
for i := range seeds {
|
||||
textKey := normalizeSeedTextKey(seeds[i].SourceText)
|
||||
if textKey == "" {
|
||||
continue
|
||||
}
|
||||
if existingKey, ok := existingByText[textKey]; ok && existingKey != "" && seeds[i].TKey != existingKey {
|
||||
seeds[i].TKey = existingKey
|
||||
reused++
|
||||
}
|
||||
}
|
||||
|
||||
return seeds, reused
|
||||
}
|
||||
|
||||
func countSeedsBySource(seeds []sourceSeed) map[string]int {
|
||||
out := map[string]int{
|
||||
"dummy": 0,
|
||||
"postgre": 0,
|
||||
"mssql": 0,
|
||||
}
|
||||
for _, s := range seeds {
|
||||
key := normalizeTranslationSourceType(s.SourceType)
|
||||
if key == "" {
|
||||
key = "dummy"
|
||||
}
|
||||
out[key]++
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func formatSourceCounts(counts map[string]int) string {
|
||||
return fmt.Sprintf("dummy=%d postgre=%d mssql=%d", counts["dummy"], counts["postgre"], counts["mssql"])
|
||||
}
|
||||
|
||||
func requestTraceID(r *http.Request) string {
|
||||
if r == nil {
|
||||
return "trsync-" + strconv.FormatInt(time.Now().UnixNano(), 36)
|
||||
}
|
||||
id := strings.TrimSpace(r.Header.Get("X-Request-ID"))
|
||||
if id == "" {
|
||||
id = strings.TrimSpace(r.Header.Get("X-Correlation-ID"))
|
||||
}
|
||||
if id == "" {
|
||||
id = "trsync-" + strconv.FormatInt(time.Now().UnixNano(), 36)
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
||||
func callAzureTranslate(sourceText, targetLang string) (string, error) {
|
||||
key := strings.TrimSpace(os.Getenv("AZURE_TRANSLATOR_KEY"))
|
||||
endpoint := strings.TrimSpace(os.Getenv("AZURE_TRANSLATOR_ENDPOINT"))
|
||||
region := strings.TrimSpace(os.Getenv("AZURE_TRANSLATOR_REGION"))
|
||||
if key == "" {
|
||||
return "", errors.New("AZURE_TRANSLATOR_KEY not set")
|
||||
}
|
||||
if endpoint == "" {
|
||||
return "", errors.New("AZURE_TRANSLATOR_ENDPOINT not set")
|
||||
}
|
||||
if region == "" {
|
||||
return "", errors.New("AZURE_TRANSLATOR_REGION not set")
|
||||
}
|
||||
|
||||
sourceLang := strings.TrimSpace(strings.ToLower(os.Getenv("TRANSLATION_SOURCE_LANG")))
|
||||
if sourceLang == "" {
|
||||
sourceLang = "tr"
|
||||
}
|
||||
targetLang = normalizeTranslationLang(targetLang)
|
||||
if targetLang == "" || targetLang == "tr" {
|
||||
return "", fmt.Errorf("invalid target language: %q", targetLang)
|
||||
}
|
||||
|
||||
endpoint = strings.TrimRight(endpoint, "/")
|
||||
baseURL, err := url.Parse(endpoint + "/translate")
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("invalid AZURE_TRANSLATOR_ENDPOINT: %w", err)
|
||||
}
|
||||
q := baseURL.Query()
|
||||
q.Set("api-version", "3.0")
|
||||
q.Set("from", sourceLang)
|
||||
q.Set("to", targetLang)
|
||||
baseURL.RawQuery = q.Encode()
|
||||
|
||||
payload := []map[string]string{
|
||||
{"Text": sourceText},
|
||||
}
|
||||
body, _ := json.Marshal(payload)
|
||||
req, err := http.NewRequest(http.MethodPost, baseURL.String(), bytes.NewReader(body))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
req.Header.Set("Ocp-Apim-Subscription-Key", key)
|
||||
req.Header.Set("Ocp-Apim-Subscription-Region", region)
|
||||
req.Header.Set("Content-Type", "application/json; charset=UTF-8")
|
||||
|
||||
timeoutSec := parsePositiveIntEnv("TRANSLATION_HTTP_TIMEOUT_SEC", 60)
|
||||
client := &http.Client{Timeout: time.Duration(timeoutSec) * time.Second}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode >= 300 {
|
||||
raw, _ := io.ReadAll(io.LimitReader(resp.Body, 1024))
|
||||
return "", fmt.Errorf("azure translator status=%d body=%s", resp.StatusCode, strings.TrimSpace(string(raw)))
|
||||
}
|
||||
|
||||
var result []struct {
|
||||
Translations []struct {
|
||||
Text string `json:"text"`
|
||||
To string `json:"to"`
|
||||
} `json:"translations"`
|
||||
}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
|
||||
return "", err
|
||||
}
|
||||
if len(result) == 0 || len(result[0].Translations) == 0 {
|
||||
return "", errors.New("azure translator empty response")
|
||||
}
|
||||
return strings.TrimSpace(result[0].Translations[0].Text), nil
|
||||
}
|
||||
|
||||
func nullableString(v *string) any {
|
||||
if v == nil {
|
||||
return nil
|
||||
}
|
||||
s := strings.TrimSpace(*v)
|
||||
return s
|
||||
}
|
||||
|
||||
func normalizeTranslationLang(v string) string {
|
||||
lang := strings.ToLower(strings.TrimSpace(v))
|
||||
if _, ok := translationLangSet[lang]; ok {
|
||||
return lang
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func normalizeTranslationStatus(v string) string {
|
||||
status := strings.ToLower(strings.TrimSpace(v))
|
||||
if _, ok := translationStatusSet[status]; ok {
|
||||
return status
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func normalizeTranslationSourceType(v string) string {
|
||||
sourceType := strings.ToLower(strings.TrimSpace(v))
|
||||
if _, ok := translationSourceTypeSet[sourceType]; ok {
|
||||
return sourceType
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func normalizeTargetLanguages(list []string) []string {
|
||||
if len(list) == 0 {
|
||||
return []string{"en", "de", "it", "es", "ru", "ar"}
|
||||
}
|
||||
|
||||
seen := make(map[string]struct{}, len(list))
|
||||
out := make([]string, 0, len(list))
|
||||
for _, v := range list {
|
||||
lang := normalizeTranslationLang(v)
|
||||
if lang == "" || lang == "tr" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[lang]; ok {
|
||||
continue
|
||||
}
|
||||
seen[lang] = struct{}{}
|
||||
out = append(out, lang)
|
||||
}
|
||||
if len(out) == 0 {
|
||||
return []string{"en", "de", "it", "es", "ru", "ar"}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func normalizeOptionalStatus(v *string) any {
|
||||
if v == nil {
|
||||
return nil
|
||||
}
|
||||
s := normalizeTranslationStatus(*v)
|
||||
if s == "" {
|
||||
return nil
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func normalizeOptionalSourceType(v *string) any {
|
||||
if v == nil {
|
||||
return nil
|
||||
}
|
||||
s := normalizeTranslationSourceType(*v)
|
||||
if s == "" {
|
||||
return nil
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func normalizeMissingItems(items []UpsertMissingItem) []UpsertMissingItem {
|
||||
seen := make(map[string]struct{}, len(items))
|
||||
out := make([]UpsertMissingItem, 0, len(items))
|
||||
|
||||
for _, it := range items {
|
||||
key := strings.TrimSpace(it.TKey)
|
||||
source := strings.TrimSpace(it.SourceTextTR)
|
||||
if key == "" || source == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[key]; ok {
|
||||
continue
|
||||
}
|
||||
seen[key] = struct{}{}
|
||||
out = append(out, UpsertMissingItem{
|
||||
TKey: key,
|
||||
SourceTextTR: source,
|
||||
})
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func normalizeIDListInt64(ids []int64) []int64 {
|
||||
seen := make(map[int64]struct{}, len(ids))
|
||||
out := make([]int64, 0, len(ids))
|
||||
for _, id := range ids {
|
||||
if id <= 0 {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[id]; ok {
|
||||
continue
|
||||
}
|
||||
seen[id] = struct{}{}
|
||||
out = append(out, id)
|
||||
}
|
||||
sort.Slice(out, func(i, j int) bool { return out[i] < out[j] })
|
||||
return out
|
||||
}
|
||||
|
||||
func detectProjectRoot() string {
|
||||
wd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
candidates := []string{
|
||||
wd,
|
||||
filepath.Dir(wd),
|
||||
filepath.Dir(filepath.Dir(wd)),
|
||||
}
|
||||
for _, c := range candidates {
|
||||
if _, err := os.Stat(filepath.Join(c, "ui")); err == nil {
|
||||
return c
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func isCandidateText(s string) bool {
|
||||
s = strings.TrimSpace(s)
|
||||
if len(s) < 3 || len(s) > 120 {
|
||||
return false
|
||||
}
|
||||
if reBadText.MatchString(s) {
|
||||
return false
|
||||
}
|
||||
if !reHasLetter.MatchString(s) {
|
||||
return false
|
||||
}
|
||||
if strings.Contains(s, "/api/") {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func sanitizeKey(s string) string {
|
||||
s = strings.ToLower(strings.TrimSpace(s))
|
||||
s = strings.ReplaceAll(s, " ", "_")
|
||||
s = reKeyUnsafe.ReplaceAllString(s, "_")
|
||||
s = strings.Trim(s, "_")
|
||||
if s == "" {
|
||||
return "x"
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func normalizeDisplayText(s string) string {
|
||||
s = strings.TrimSpace(strings.ReplaceAll(s, "_", " "))
|
||||
s = strings.Join(strings.Fields(s), " ")
|
||||
if s == "" {
|
||||
return ""
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func hashKey(s string) string {
|
||||
base := sanitizeKey(s)
|
||||
if len(base) > 40 {
|
||||
base = base[:40]
|
||||
}
|
||||
sum := 0
|
||||
for _, r := range s {
|
||||
sum += int(r)
|
||||
}
|
||||
return fmt.Sprintf("%s_%d", base, sum%1000000)
|
||||
}
|
||||
|
||||
func makeTextBasedSeedKey(sourceText string) string {
|
||||
return "txt." + hashKey(normalizeSeedTextKey(sourceText))
|
||||
}
|
||||
|
||||
func normalizeSeedTextKey(s string) string {
|
||||
return strings.ToLower(strings.TrimSpace(normalizeDisplayText(s)))
|
||||
}
|
||||
|
||||
func pqArray(values []string) any {
|
||||
if len(values) == 0 {
|
||||
return pq.Array([]string{})
|
||||
}
|
||||
out := make([]string, 0, len(values))
|
||||
for _, v := range values {
|
||||
out = append(out, strings.TrimSpace(v))
|
||||
}
|
||||
sort.Strings(out)
|
||||
return pq.Array(out)
|
||||
}
|
||||
|
||||
func parsePositiveIntEnv(name string, fallback int) int {
|
||||
raw := strings.TrimSpace(os.Getenv(name))
|
||||
if raw == "" {
|
||||
return fallback
|
||||
}
|
||||
n, err := strconv.Atoi(raw)
|
||||
if err != nil || n <= 0 {
|
||||
return fallback
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
func normalizeStringList(items []string, max int) []string {
|
||||
if len(items) == 0 {
|
||||
return nil
|
||||
}
|
||||
if max <= 0 {
|
||||
max = len(items)
|
||||
}
|
||||
out := make([]string, 0, len(items))
|
||||
seen := make(map[string]struct{}, len(items))
|
||||
for _, raw := range items {
|
||||
v := strings.TrimSpace(raw)
|
||||
if v == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[v]; ok {
|
||||
continue
|
||||
}
|
||||
seen[v] = struct{}{}
|
||||
out = append(out, v)
|
||||
if len(out) >= max {
|
||||
break
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
69
svc/translation_scheduler.go
Normal file
69
svc/translation_scheduler.go
Normal file
@@ -0,0 +1,69 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bssapp-backend/routes"
|
||||
"database/sql"
|
||||
"log"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
func startTranslationSyncScheduler(pgDB *sql.DB, mssqlDB *sql.DB) {
|
||||
enabled := strings.TrimSpace(strings.ToLower(os.Getenv("TRANSLATION_SYNC_ENABLED")))
|
||||
if enabled == "0" || enabled == "false" || enabled == "off" {
|
||||
log.Println("🛑 Translation sync scheduler disabled")
|
||||
return
|
||||
}
|
||||
|
||||
hour := 4
|
||||
if raw := strings.TrimSpace(os.Getenv("TRANSLATION_SYNC_HOUR")); raw != "" {
|
||||
if parsed, err := strconv.Atoi(raw); err == nil && parsed >= 0 && parsed <= 23 {
|
||||
hour = parsed
|
||||
}
|
||||
}
|
||||
|
||||
limit := 30000
|
||||
if raw := strings.TrimSpace(os.Getenv("TRANSLATION_SYNC_LIMIT")); raw != "" {
|
||||
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 {
|
||||
limit = parsed
|
||||
}
|
||||
}
|
||||
|
||||
go func() {
|
||||
for {
|
||||
next := nextRunAt(time.Now(), hour)
|
||||
wait := time.Until(next)
|
||||
log.Printf("🕓 Translation sync next run at %s (in %s)", next.Format(time.RFC3339), wait.Round(time.Second))
|
||||
time.Sleep(wait)
|
||||
|
||||
result, err := routes.PerformTranslationSync(pgDB, mssqlDB, routes.TranslationSyncOptions{
|
||||
AutoTranslate: true,
|
||||
Languages: []string{"en", "de", "it", "es", "ru", "ar"},
|
||||
Limit: limit,
|
||||
OnlyNew: true,
|
||||
})
|
||||
if err != nil {
|
||||
log.Printf("❌ Translation sync failed: %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
log.Printf(
|
||||
"✅ Translation sync done: seeds=%d affected=%d auto_translated=%d langs=%v",
|
||||
result.SeedCount,
|
||||
result.AffectedCount,
|
||||
result.AutoTranslated,
|
||||
result.TargetLangs,
|
||||
)
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func nextRunAt(now time.Time, hour int) time.Time {
|
||||
next := time.Date(now.Year(), now.Month(), now.Day(), hour, 0, 0, 0, now.Location())
|
||||
if !next.After(now) {
|
||||
next = next.Add(24 * time.Hour)
|
||||
}
|
||||
return next
|
||||
}
|
||||
@@ -148,6 +148,8 @@ createQuasarApp(createApp, quasarUserOptions)
|
||||
|
||||
import(/* webpackMode: "eager" */ 'boot/dayjs'),
|
||||
|
||||
import(/* webpackMode: "eager" */ 'boot/locale'),
|
||||
|
||||
import(/* webpackMode: "eager" */ 'boot/resizeObserverGuard')
|
||||
|
||||
]).then(bootFiles => {
|
||||
|
||||
@@ -15,7 +15,7 @@ export default defineConfig(() => {
|
||||
/* =====================================================
|
||||
BOOT FILES
|
||||
===================================================== */
|
||||
boot: ['dayjs', 'resizeObserverGuard'],
|
||||
boot: ['dayjs', 'locale', 'resizeObserverGuard'],
|
||||
|
||||
/* =====================================================
|
||||
GLOBAL CSS
|
||||
@@ -70,7 +70,10 @@ export default defineConfig(() => {
|
||||
context: ['/api'],
|
||||
target: 'http://localhost:8080',
|
||||
changeOrigin: true,
|
||||
secure: false
|
||||
secure: false,
|
||||
ws: true,
|
||||
timeout: 0,
|
||||
proxyTimeout: 0
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
@@ -27,7 +27,7 @@ var quasar_config_default = defineConfig(() => {
|
||||
/* =====================================================
|
||||
BOOT FILES
|
||||
===================================================== */
|
||||
boot: ["dayjs", "resizeObserverGuard"],
|
||||
boot: ["dayjs", "locale", "resizeObserverGuard"],
|
||||
/* =====================================================
|
||||
GLOBAL CSS
|
||||
===================================================== */
|
||||
@@ -75,7 +75,10 @@ var quasar_config_default = defineConfig(() => {
|
||||
context: ["/api"],
|
||||
target: "http://localhost:8080",
|
||||
changeOrigin: true,
|
||||
secure: false
|
||||
secure: false,
|
||||
ws: true,
|
||||
timeout: 0,
|
||||
proxyTimeout: 0
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -3,12 +3,12 @@ import dayjs from 'dayjs'
|
||||
import customParseFormat from 'dayjs/plugin/customParseFormat.js'
|
||||
import relativeTime from 'dayjs/plugin/relativeTime.js'
|
||||
import localizedFormat from 'dayjs/plugin/localizedFormat.js'
|
||||
import 'dayjs/locale/tr.js'
|
||||
import { applyDayjsLocale } from 'src/i18n/dayjsLocale'
|
||||
|
||||
// 🔹 Plugin’leri aktif et
|
||||
dayjs.extend(customParseFormat)
|
||||
dayjs.extend(relativeTime)
|
||||
dayjs.extend(localizedFormat)
|
||||
dayjs.locale('tr')
|
||||
applyDayjsLocale('tr')
|
||||
|
||||
export default dayjs
|
||||
|
||||
7
ui/src/boot/locale.js
Normal file
7
ui/src/boot/locale.js
Normal file
@@ -0,0 +1,7 @@
|
||||
import { boot } from 'quasar/wrappers'
|
||||
import { useLocaleStore } from 'src/stores/localeStore'
|
||||
|
||||
export default boot(() => {
|
||||
const localeStore = useLocaleStore()
|
||||
localeStore.setLocale(localeStore.locale)
|
||||
})
|
||||
42
ui/src/composables/useI18n.js
Normal file
42
ui/src/composables/useI18n.js
Normal file
@@ -0,0 +1,42 @@
|
||||
import { computed } from 'vue'
|
||||
|
||||
import { messages } from 'src/i18n/messages'
|
||||
import { DEFAULT_LOCALE } from 'src/i18n/languages'
|
||||
import { useLocaleStore } from 'src/stores/localeStore'
|
||||
|
||||
function lookup(obj, path) {
|
||||
return String(path || '')
|
||||
.split('.')
|
||||
.filter(Boolean)
|
||||
.reduce((acc, key) => (acc && acc[key] != null ? acc[key] : undefined), obj)
|
||||
}
|
||||
|
||||
export function useI18n() {
|
||||
const localeStore = useLocaleStore()
|
||||
|
||||
const currentLocale = computed(() => localeStore.locale)
|
||||
|
||||
function fallbackLocales(locale) {
|
||||
const normalized = String(locale || '').toLowerCase()
|
||||
if (normalized === 'tr') return ['tr']
|
||||
if (normalized === 'en') return ['en', 'tr']
|
||||
return [normalized, 'en', 'tr']
|
||||
}
|
||||
|
||||
function t(key) {
|
||||
for (const locale of fallbackLocales(currentLocale.value)) {
|
||||
const val = lookup(messages[locale] || {}, key)
|
||||
if (val != null) return val
|
||||
}
|
||||
|
||||
const byDefault = lookup(messages[DEFAULT_LOCALE] || {}, key)
|
||||
if (byDefault != null) return byDefault
|
||||
|
||||
return key
|
||||
}
|
||||
|
||||
return {
|
||||
locale: currentLocale,
|
||||
t
|
||||
}
|
||||
}
|
||||
30
ui/src/i18n/dayjsLocale.js
Normal file
30
ui/src/i18n/dayjsLocale.js
Normal file
@@ -0,0 +1,30 @@
|
||||
import dayjs from 'dayjs'
|
||||
import 'dayjs/locale/tr.js'
|
||||
import 'dayjs/locale/en.js'
|
||||
import 'dayjs/locale/de.js'
|
||||
import 'dayjs/locale/it.js'
|
||||
import 'dayjs/locale/es.js'
|
||||
import 'dayjs/locale/ru.js'
|
||||
import 'dayjs/locale/ar.js'
|
||||
|
||||
import { normalizeLocale } from './languages.js'
|
||||
|
||||
export const DATE_LOCALE_MAP = {
|
||||
tr: 'tr-TR',
|
||||
en: 'en-US',
|
||||
de: 'de-DE',
|
||||
it: 'it-IT',
|
||||
es: 'es-ES',
|
||||
ru: 'ru-RU',
|
||||
ar: 'ar'
|
||||
}
|
||||
|
||||
export function applyDayjsLocale(locale) {
|
||||
const normalized = normalizeLocale(locale)
|
||||
dayjs.locale(normalized)
|
||||
}
|
||||
|
||||
export function getDateLocale(locale) {
|
||||
const normalized = normalizeLocale(locale)
|
||||
return DATE_LOCALE_MAP[normalized] || DATE_LOCALE_MAP.tr
|
||||
}
|
||||
32
ui/src/i18n/languages.js
Normal file
32
ui/src/i18n/languages.js
Normal file
@@ -0,0 +1,32 @@
|
||||
export const DEFAULT_LOCALE = 'tr'
|
||||
|
||||
export const SUPPORTED_LOCALES = ['tr', 'en', 'de', 'it', 'es', 'ru', 'ar']
|
||||
|
||||
export const UI_LANGUAGE_OPTIONS = [
|
||||
{ label: 'Türkçe', value: 'tr', short: 'TUR', flag: '🇹🇷' },
|
||||
{ label: 'English', value: 'en', short: 'ENG', flag: '🇬🇧' },
|
||||
{ label: 'Deutsch', value: 'de', short: 'DEU', flag: '🇩🇪' },
|
||||
{ label: 'Italiano', value: 'it', short: 'ITA', flag: '🇮🇹' },
|
||||
{ label: 'Español', value: 'es', short: 'ESP', flag: '🇪🇸' },
|
||||
{ label: 'Русский', value: 'ru', short: 'RUS', flag: '🇷🇺' },
|
||||
{ label: 'العربية', value: 'ar', short: 'ARA', flag: '🇸🇦' }
|
||||
]
|
||||
|
||||
export const BACKEND_LANG_MAP = {
|
||||
tr: 'TR',
|
||||
en: 'EN',
|
||||
de: 'DE',
|
||||
it: 'IT',
|
||||
es: 'ES',
|
||||
ru: 'RU',
|
||||
ar: 'AR'
|
||||
}
|
||||
|
||||
export function normalizeLocale(value) {
|
||||
const locale = String(value || '').trim().toLowerCase()
|
||||
return SUPPORTED_LOCALES.includes(locale) ? locale : DEFAULT_LOCALE
|
||||
}
|
||||
|
||||
export function toBackendLangCode(locale) {
|
||||
return BACKEND_LANG_MAP[normalizeLocale(locale)] || BACKEND_LANG_MAP[DEFAULT_LOCALE]
|
||||
}
|
||||
28
ui/src/i18n/messages.js
Normal file
28
ui/src/i18n/messages.js
Normal file
@@ -0,0 +1,28 @@
|
||||
export const messages = {
|
||||
tr: {
|
||||
app: {
|
||||
title: 'Baggi Software System',
|
||||
logoutTitle: 'Çıkış Yap',
|
||||
logoutConfirm: 'Oturumunuzu kapatmak istediğinize emin misiniz?',
|
||||
changePassword: 'Şifre Değiştir',
|
||||
language: 'Dil'
|
||||
},
|
||||
statement: {
|
||||
invalidDateRange: 'Başlangıç tarihi bitiş tarihinden sonra olamaz.',
|
||||
selectFilters: 'Lütfen cari ve tarih aralığını seçiniz.'
|
||||
}
|
||||
},
|
||||
en: {
|
||||
app: {
|
||||
title: 'Baggi Software System',
|
||||
logoutTitle: 'Log Out',
|
||||
logoutConfirm: 'Are you sure you want to end your session?',
|
||||
changePassword: 'Change Password',
|
||||
language: 'Language'
|
||||
},
|
||||
statement: {
|
||||
invalidDateRange: 'Start date cannot be later than end date.',
|
||||
selectFilters: 'Please select account and date range.'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -11,9 +11,41 @@
|
||||
<q-avatar class="bg-secondary q-mr-sm">
|
||||
<img src="/images/Baggi-tekstilas-logolu.jpg" />
|
||||
</q-avatar>
|
||||
Baggi Software System
|
||||
{{ t('app.title') }}
|
||||
</q-toolbar-title>
|
||||
|
||||
<q-select
|
||||
v-model="selectedLocale"
|
||||
dense
|
||||
outlined
|
||||
emit-value
|
||||
map-options
|
||||
options-dense
|
||||
class="q-mr-sm lang-select"
|
||||
option-value="value"
|
||||
option-label="label"
|
||||
:options="languageOptions"
|
||||
>
|
||||
<template #selected-item="scope">
|
||||
<div class="lang-item">
|
||||
<span class="lang-flag">{{ scope.opt.flag }}</span>
|
||||
<span class="lang-short">{{ scope.opt.short }}</span>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<template #option="scope">
|
||||
<q-item v-bind="scope.itemProps">
|
||||
<q-item-section>
|
||||
<div class="lang-item">
|
||||
<span class="lang-flag">{{ scope.opt.flag }}</span>
|
||||
<span class="lang-short">{{ scope.opt.short }}</span>
|
||||
<span>{{ scope.opt.label }}</span>
|
||||
</div>
|
||||
</q-item-section>
|
||||
</q-item>
|
||||
</template>
|
||||
</q-select>
|
||||
|
||||
<q-btn flat dense round icon="logout" @click="confirmLogout" />
|
||||
|
||||
</q-toolbar>
|
||||
@@ -99,7 +131,7 @@
|
||||
</q-item-section>
|
||||
|
||||
<q-item-section>
|
||||
Şifre Değiştir
|
||||
{{ t('app.changePassword') }}
|
||||
</q-item-section>
|
||||
|
||||
</q-item>
|
||||
@@ -122,7 +154,7 @@
|
||||
<q-toolbar class="bg-secondary">
|
||||
|
||||
<q-toolbar-title>
|
||||
Baggi Software System
|
||||
{{ t('app.title') }}
|
||||
</q-toolbar-title>
|
||||
|
||||
</q-toolbar>
|
||||
@@ -138,6 +170,9 @@ import { Dialog, useQuasar } from 'quasar'
|
||||
|
||||
import { useAuthStore } from 'stores/authStore'
|
||||
import { usePermissionStore } from 'stores/permissionStore'
|
||||
import { useI18n } from 'src/composables/useI18n'
|
||||
import { UI_LANGUAGE_OPTIONS } from 'src/i18n/languages'
|
||||
import { useLocaleStore } from 'src/stores/localeStore'
|
||||
|
||||
|
||||
/* ================= STORES ================= */
|
||||
@@ -147,6 +182,16 @@ const route = useRoute()
|
||||
const $q = useQuasar()
|
||||
const auth = useAuthStore()
|
||||
const perm = usePermissionStore()
|
||||
const localeStore = useLocaleStore()
|
||||
const { t } = useI18n()
|
||||
|
||||
const languageOptions = UI_LANGUAGE_OPTIONS
|
||||
const selectedLocale = computed({
|
||||
get: () => localeStore.locale,
|
||||
set: (value) => {
|
||||
localeStore.setLocale(value)
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
/* ================= UI ================= */
|
||||
@@ -159,8 +204,8 @@ function toggleLeftDrawer () {
|
||||
|
||||
function confirmLogout () {
|
||||
Dialog.create({
|
||||
title: 'Çıkış Yap',
|
||||
message: 'Oturumunuzu kapatmak istediğinize emin misiniz?',
|
||||
title: t('app.logoutTitle'),
|
||||
message: t('app.logoutConfirm'),
|
||||
cancel: true,
|
||||
persistent: true
|
||||
}).onOk(() => {
|
||||
@@ -330,6 +375,18 @@ const menuItems = [
|
||||
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Dil Çeviri',
|
||||
icon: 'translate',
|
||||
|
||||
children: [
|
||||
{
|
||||
label: 'Çeviri Tablosu',
|
||||
to: '/app/language/translations',
|
||||
permission: 'language:update'
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
{
|
||||
label: 'Kullanıcı Yönetimi',
|
||||
@@ -387,5 +444,27 @@ const filteredMenu = computed(() => {
|
||||
-webkit-overflow-scrolling: touch;
|
||||
touch-action: pan-y;
|
||||
}
|
||||
|
||||
.lang-select {
|
||||
width: 140px;
|
||||
background: #fff;
|
||||
border-radius: 6px;
|
||||
}
|
||||
|
||||
.lang-item {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.lang-flag {
|
||||
font-size: 15px;
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
.lang-short {
|
||||
font-weight: 700;
|
||||
letter-spacing: 0.3px;
|
||||
}
|
||||
</style>
|
||||
|
||||
|
||||
@@ -841,7 +841,16 @@ function clearAllCurrencies () {
|
||||
}
|
||||
|
||||
async function reloadData () {
|
||||
const startedAt = Date.now()
|
||||
console.info('[product-pricing][ui] reload:start', {
|
||||
at: new Date(startedAt).toISOString()
|
||||
})
|
||||
await store.fetchRows()
|
||||
console.info('[product-pricing][ui] reload:done', {
|
||||
duration_ms: Date.now() - startedAt,
|
||||
row_count: Array.isArray(store.rows) ? store.rows.length : 0,
|
||||
has_error: Boolean(store.error)
|
||||
})
|
||||
selectedMap.value = {}
|
||||
}
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@
|
||||
<template #append>
|
||||
<q-icon name="event" class="cursor-pointer">
|
||||
<q-popup-proxy cover transition-show="scale" transition-hide="scale">
|
||||
<q-date v-model="dateFrom" mask="YYYY-MM-DD" locale="tr-TR"/>
|
||||
<q-date v-model="dateFrom" mask="YYYY-MM-DD" :locale="dateLocale"/>
|
||||
</q-popup-proxy>
|
||||
</q-icon>
|
||||
</template>
|
||||
@@ -63,7 +63,7 @@
|
||||
<template #append>
|
||||
<q-icon name="event" class="cursor-pointer">
|
||||
<q-popup-proxy cover transition-show="scale" transition-hide="scale">
|
||||
<q-date v-model="dateTo" mask="YYYY-MM-DD" locale="tr-TR" />
|
||||
<q-date v-model="dateTo" mask="YYYY-MM-DD" :locale="dateLocale" />
|
||||
</q-popup-proxy>
|
||||
</q-icon>
|
||||
</template>
|
||||
@@ -277,12 +277,16 @@ import { useDownloadstpdfStore } from 'src/stores/downloadstpdfStore'
|
||||
import dayjs from 'dayjs'
|
||||
import { usePermission } from 'src/composables/usePermission'
|
||||
import { normalizeSearchText } from 'src/utils/searchText'
|
||||
import { useLocaleStore } from 'src/stores/localeStore'
|
||||
import { getDateLocale } from 'src/i18n/dayjsLocale'
|
||||
|
||||
const { canRead, canExport } = usePermission()
|
||||
const canReadFinance = canRead('finance')
|
||||
const canExportFinance = canExport('finance')
|
||||
|
||||
const $q = useQuasar()
|
||||
const localeStore = useLocaleStore()
|
||||
const dateLocale = computed(() => getDateLocale(localeStore.locale))
|
||||
|
||||
const accountStore = useAccountStore()
|
||||
const statementheaderStore = useStatementheaderStore()
|
||||
@@ -363,7 +367,7 @@ async function onFilterClick() {
|
||||
startdate: dateFrom.value,
|
||||
enddate: dateTo.value,
|
||||
accountcode: selectedCari.value,
|
||||
langcode: 'TR',
|
||||
langcode: localeStore.backendLangCode,
|
||||
parislemler: selectedMonType.value
|
||||
})
|
||||
|
||||
@@ -411,7 +415,7 @@ function resetFilters() {
|
||||
/* Format */
|
||||
function formatAmount(n) {
|
||||
if (n == null || isNaN(n)) return '0,00'
|
||||
return new Intl.NumberFormat('tr-TR', {
|
||||
return new Intl.NumberFormat(dateLocale.value, {
|
||||
minimumFractionDigits: 2,
|
||||
maximumFractionDigits: 2
|
||||
}).format(n)
|
||||
@@ -467,7 +471,8 @@ async function handleDownload() {
|
||||
selectedCari.value, // accountCode
|
||||
dateFrom.value, // startDate
|
||||
dateTo.value, // endDate
|
||||
selectedMonType.value // <-- eklendi (['1','2'] veya ['1','3'])
|
||||
selectedMonType.value, // <-- eklendi (['1','2'] veya ['1','3'])
|
||||
localeStore.backendLangCode
|
||||
)
|
||||
|
||||
console.log("📤 [DEBUG] Store’dan gelen result:", result)
|
||||
@@ -508,7 +513,8 @@ async function CurrheadDownload() {
|
||||
selectedCari.value, // accountCode
|
||||
dateFrom.value, // startDate
|
||||
dateTo.value, // endDate
|
||||
selectedMonType.value // parasal işlem tipi (parislemler)
|
||||
selectedMonType.value, // parasal işlem tipi (parislemler)
|
||||
localeStore.backendLangCode
|
||||
)
|
||||
|
||||
console.log("📤 [DEBUG] CurrheadDownloadresult:", result)
|
||||
|
||||
633
ui/src/pages/TranslationTable.vue
Normal file
633
ui/src/pages/TranslationTable.vue
Normal file
@@ -0,0 +1,633 @@
|
||||
<template>
|
||||
<q-page v-if="canUpdateLanguage" class="q-pa-md">
|
||||
<div class="row q-col-gutter-sm items-end q-mb-md">
|
||||
<div class="col-12 col-md-4">
|
||||
<q-input
|
||||
v-model="filters.q"
|
||||
dense
|
||||
outlined
|
||||
clearable
|
||||
label="Kelime ara"
|
||||
/>
|
||||
</div>
|
||||
<div class="col-auto">
|
||||
<q-btn color="primary" icon="search" label="Getir" @click="loadRows" />
|
||||
</div>
|
||||
<div class="col-auto">
|
||||
<q-btn
|
||||
color="secondary"
|
||||
icon="sync"
|
||||
label="YENİ KELİMELERİ GETİR"
|
||||
:loading="store.saving"
|
||||
@click="syncSources"
|
||||
/>
|
||||
</div>
|
||||
<div class="col-auto">
|
||||
<q-toggle v-model="autoTranslate" dense color="primary" label="Oto Çeviri" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row q-gutter-sm q-mb-sm">
|
||||
<q-btn
|
||||
color="accent"
|
||||
icon="g_translate"
|
||||
label="Seçilenleri Çevir"
|
||||
:disable="selectedKeys.length === 0"
|
||||
:loading="store.saving"
|
||||
@click="translateSelectedRows"
|
||||
/>
|
||||
<q-btn
|
||||
color="secondary"
|
||||
icon="done_all"
|
||||
label="Seçilenleri Onayla"
|
||||
:disable="selectedKeys.length === 0"
|
||||
:loading="store.saving"
|
||||
@click="bulkApproveSelected"
|
||||
/>
|
||||
<q-btn
|
||||
color="primary"
|
||||
icon="save"
|
||||
label="Seçilenleri Toplu Güncelle"
|
||||
:disable="selectedKeys.length === 0"
|
||||
:loading="store.saving"
|
||||
@click="bulkSaveSelected"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<q-table
|
||||
flat
|
||||
bordered
|
||||
dense
|
||||
row-key="t_key"
|
||||
:loading="store.loading || store.saving"
|
||||
:rows="pivotRows"
|
||||
:columns="columns"
|
||||
:rows-per-page-options="[0]"
|
||||
:pagination="{ rowsPerPage: 0 }"
|
||||
>
|
||||
<template #body-cell-actions="props">
|
||||
<q-td :props="props">
|
||||
<q-btn
|
||||
dense
|
||||
color="primary"
|
||||
icon="save"
|
||||
label="Güncelle"
|
||||
:disable="!rowHasChanges(props.row.t_key)"
|
||||
:loading="store.saving"
|
||||
@click="saveRow(props.row.t_key)"
|
||||
/>
|
||||
</q-td>
|
||||
</template>
|
||||
|
||||
<template #body-cell-select="props">
|
||||
<q-td :props="props">
|
||||
<q-checkbox
|
||||
dense
|
||||
:model-value="selectedKeys.includes(props.row.t_key)"
|
||||
@update:model-value="(v) => toggleSelected(props.row.t_key, v)"
|
||||
/>
|
||||
</q-td>
|
||||
</template>
|
||||
|
||||
<template #body-cell-source_text_tr="props">
|
||||
<q-td :props="props" :class="cellClass(props.row.t_key, 'source_text_tr')">
|
||||
<q-input v-model="rowDraft(props.row.t_key).source_text_tr" dense outlined @blur="queueAutoSave(props.row.t_key)" />
|
||||
</q-td>
|
||||
</template>
|
||||
|
||||
<template #body-cell-source_type="props">
|
||||
<q-td :props="props" :class="cellClass(props.row.t_key, 'source_type')">
|
||||
<q-select
|
||||
v-model="rowDraft(props.row.t_key).source_type"
|
||||
dense
|
||||
outlined
|
||||
emit-value
|
||||
map-options
|
||||
:options="sourceTypeOptions"
|
||||
@update:model-value="() => queueAutoSave(props.row.t_key)"
|
||||
/>
|
||||
</q-td>
|
||||
</template>
|
||||
|
||||
<template #body-cell-en="props">
|
||||
<q-td :props="props" :class="cellClass(props.row.t_key, 'en')">
|
||||
<q-input v-model="rowDraft(props.row.t_key).en" dense outlined @blur="queueAutoSave(props.row.t_key)" />
|
||||
</q-td>
|
||||
</template>
|
||||
|
||||
<template #body-cell-de="props">
|
||||
<q-td :props="props" :class="cellClass(props.row.t_key, 'de')">
|
||||
<q-input v-model="rowDraft(props.row.t_key).de" dense outlined @blur="queueAutoSave(props.row.t_key)" />
|
||||
</q-td>
|
||||
</template>
|
||||
|
||||
<template #body-cell-es="props">
|
||||
<q-td :props="props" :class="cellClass(props.row.t_key, 'es')">
|
||||
<q-input v-model="rowDraft(props.row.t_key).es" dense outlined @blur="queueAutoSave(props.row.t_key)" />
|
||||
</q-td>
|
||||
</template>
|
||||
|
||||
<template #body-cell-it="props">
|
||||
<q-td :props="props" :class="cellClass(props.row.t_key, 'it')">
|
||||
<q-input v-model="rowDraft(props.row.t_key).it" dense outlined @blur="queueAutoSave(props.row.t_key)" />
|
||||
</q-td>
|
||||
</template>
|
||||
|
||||
<template #body-cell-ru="props">
|
||||
<q-td :props="props" :class="cellClass(props.row.t_key, 'ru')">
|
||||
<q-input v-model="rowDraft(props.row.t_key).ru" dense outlined @blur="queueAutoSave(props.row.t_key)" />
|
||||
</q-td>
|
||||
</template>
|
||||
|
||||
<template #body-cell-ar="props">
|
||||
<q-td :props="props" :class="cellClass(props.row.t_key, 'ar')">
|
||||
<q-input v-model="rowDraft(props.row.t_key).ar" dense outlined @blur="queueAutoSave(props.row.t_key)" />
|
||||
</q-td>
|
||||
</template>
|
||||
</q-table>
|
||||
</q-page>
|
||||
|
||||
<q-page v-else class="q-pa-md flex flex-center">
|
||||
<div class="text-negative text-subtitle1">
|
||||
Bu module erisim yetkiniz yok.
|
||||
</div>
|
||||
</q-page>
|
||||
</template>
|
||||
|
||||
<script setup>
|
||||
import { computed, onMounted, ref } from 'vue'
|
||||
import { useQuasar } from 'quasar'
|
||||
import { usePermission } from 'src/composables/usePermission'
|
||||
import { useTranslationStore } from 'src/stores/translationStore'
|
||||
|
||||
const $q = useQuasar()
|
||||
const store = useTranslationStore()
|
||||
const { canUpdate } = usePermission()
|
||||
const canUpdateLanguage = canUpdate('language')
|
||||
|
||||
const filters = ref({
|
||||
q: ''
|
||||
})
|
||||
const autoTranslate = ref(false)
|
||||
|
||||
const sourceTypeOptions = [
|
||||
{ label: 'dummy', value: 'dummy' },
|
||||
{ label: 'postgre', value: 'postgre' },
|
||||
{ label: 'mssql', value: 'mssql' }
|
||||
]
|
||||
|
||||
const columns = [
|
||||
{ name: 'actions', label: 'Güncelle', field: 'actions', align: 'left' },
|
||||
{ name: 'select', label: 'Seç', field: 'select', align: 'left' },
|
||||
{ name: 't_key', label: 'Key', field: 't_key', align: 'left', sortable: true },
|
||||
{ name: 'source_text_tr', label: 'Türkçe kaynak', field: 'source_text_tr', align: 'left' },
|
||||
{ name: 'source_type', label: 'Veri tipi', field: 'source_type', align: 'left' },
|
||||
{ name: 'en', label: 'English', field: 'en', align: 'left' },
|
||||
{ name: 'de', label: 'Deutch', field: 'de', align: 'left' },
|
||||
{ name: 'es', label: 'Espanol', field: 'es', align: 'left' },
|
||||
{ name: 'it', label: 'Italiano', field: 'it', align: 'left' },
|
||||
{ name: 'ru', label: 'Русский', field: 'ru', align: 'left' },
|
||||
{ name: 'ar', label: 'العربية', field: 'ar', align: 'left' }
|
||||
]
|
||||
|
||||
const draftByKey = ref({})
|
||||
const originalByKey = ref({})
|
||||
const selectedKeys = ref([])
|
||||
const autoSaveTimers = new Map()
|
||||
|
||||
const pivotRows = computed(() => {
|
||||
const byKey = new Map()
|
||||
for (const row of store.rows) {
|
||||
const key = row.t_key
|
||||
if (!byKey.has(key)) {
|
||||
byKey.set(key, {
|
||||
t_key: key,
|
||||
source_text_tr: '',
|
||||
source_type: 'dummy',
|
||||
en: '',
|
||||
de: '',
|
||||
es: '',
|
||||
it: '',
|
||||
ru: '',
|
||||
ar: '',
|
||||
langs: {}
|
||||
})
|
||||
}
|
||||
|
||||
const target = byKey.get(key)
|
||||
target.langs[row.lang_code] = {
|
||||
id: row.id,
|
||||
status: row.status,
|
||||
is_manual: row.is_manual
|
||||
}
|
||||
|
||||
if (row.lang_code === 'tr') {
|
||||
target.source_text_tr = row.translated_text || row.source_text_tr || ''
|
||||
target.source_type = row.source_type || 'dummy'
|
||||
} else if (row.lang_code === 'en') {
|
||||
target.en = row.translated_text || ''
|
||||
} else if (row.lang_code === 'de') {
|
||||
target.de = row.translated_text || ''
|
||||
} else if (row.lang_code === 'es') {
|
||||
target.es = row.translated_text || ''
|
||||
} else if (row.lang_code === 'it') {
|
||||
target.it = row.translated_text || ''
|
||||
} else if (row.lang_code === 'ru') {
|
||||
target.ru = row.translated_text || ''
|
||||
} else if (row.lang_code === 'ar') {
|
||||
target.ar = row.translated_text || ''
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(byKey.values()).sort((a, b) => a.t_key.localeCompare(b.t_key))
|
||||
})
|
||||
|
||||
function snapshotDrafts () {
|
||||
const draft = {}
|
||||
const original = {}
|
||||
for (const row of pivotRows.value) {
|
||||
draft[row.t_key] = {
|
||||
source_text_tr: row.source_text_tr || '',
|
||||
source_type: row.source_type || 'dummy',
|
||||
en: row.en || '',
|
||||
de: row.de || '',
|
||||
es: row.es || '',
|
||||
it: row.it || '',
|
||||
ru: row.ru || '',
|
||||
ar: row.ar || ''
|
||||
}
|
||||
original[row.t_key] = { ...draft[row.t_key] }
|
||||
}
|
||||
draftByKey.value = draft
|
||||
originalByKey.value = original
|
||||
selectedKeys.value = selectedKeys.value.filter(k => draft[k])
|
||||
}
|
||||
|
||||
function rowDraft (key) {
|
||||
if (!draftByKey.value[key]) {
|
||||
draftByKey.value[key] = {
|
||||
source_text_tr: '',
|
||||
source_type: 'dummy',
|
||||
en: '',
|
||||
de: '',
|
||||
es: '',
|
||||
it: '',
|
||||
ru: '',
|
||||
ar: ''
|
||||
}
|
||||
}
|
||||
return draftByKey.value[key]
|
||||
}
|
||||
|
||||
function buildFilters () {
|
||||
return {
|
||||
q: filters.value.q || undefined
|
||||
}
|
||||
}
|
||||
|
||||
function rowHasChanges (key) {
|
||||
const draft = draftByKey.value[key]
|
||||
const orig = originalByKey.value[key]
|
||||
if (!draft || !orig) return false
|
||||
return (
|
||||
draft.source_text_tr !== orig.source_text_tr ||
|
||||
draft.source_type !== orig.source_type ||
|
||||
draft.en !== orig.en ||
|
||||
draft.de !== orig.de ||
|
||||
draft.es !== orig.es ||
|
||||
draft.it !== orig.it ||
|
||||
draft.ru !== orig.ru ||
|
||||
draft.ar !== orig.ar
|
||||
)
|
||||
}
|
||||
|
||||
function isPending (key, lang) {
|
||||
const row = pivotRows.value.find(r => r.t_key === key)
|
||||
const meta = row?.langs?.[lang]
|
||||
return meta?.status === 'pending'
|
||||
}
|
||||
|
||||
function cellClass (key, field) {
|
||||
const draft = draftByKey.value[key]
|
||||
const orig = originalByKey.value[key]
|
||||
if (!draft || !orig) return ''
|
||||
|
||||
if (draft[field] !== orig[field]) return 'cell-dirty'
|
||||
|
||||
if (field === 'en' && isPending(key, 'en')) return 'cell-new'
|
||||
if (field === 'de' && isPending(key, 'de')) return 'cell-new'
|
||||
if (field === 'es' && isPending(key, 'es')) return 'cell-new'
|
||||
if (field === 'it' && isPending(key, 'it')) return 'cell-new'
|
||||
if (field === 'ru' && isPending(key, 'ru')) return 'cell-new'
|
||||
if (field === 'ar' && isPending(key, 'ar')) return 'cell-new'
|
||||
if (field === 'source_text_tr' && isPending(key, 'tr')) return 'cell-new'
|
||||
return ''
|
||||
}
|
||||
|
||||
function toggleSelected (key, checked) {
|
||||
if (checked) {
|
||||
if (!selectedKeys.value.includes(key)) {
|
||||
selectedKeys.value = [...selectedKeys.value, key]
|
||||
}
|
||||
return
|
||||
}
|
||||
selectedKeys.value = selectedKeys.value.filter(k => k !== key)
|
||||
}
|
||||
|
||||
function queueAutoSave (key) {
|
||||
if (!key) return
|
||||
const existing = autoSaveTimers.get(key)
|
||||
if (existing) {
|
||||
clearTimeout(existing)
|
||||
}
|
||||
const timer = setTimeout(() => {
|
||||
autoSaveTimers.delete(key)
|
||||
if (rowHasChanges(key)) {
|
||||
void saveRow(key)
|
||||
}
|
||||
}, 250)
|
||||
autoSaveTimers.set(key, timer)
|
||||
}
|
||||
|
||||
async function loadRows () {
|
||||
try {
|
||||
await store.fetchRows(buildFilters())
|
||||
snapshotDrafts()
|
||||
} catch (err) {
|
||||
console.error('[translation-sync][ui] loadRows:error', {
|
||||
message: err?.message || 'Ceviri satirlari yuklenemedi'
|
||||
})
|
||||
$q.notify({
|
||||
type: 'negative',
|
||||
message: err?.message || 'Çeviri satırları yüklenemedi'
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function ensureMissingLangRows (key, draft, langs) {
|
||||
const missingLangs = []
|
||||
if (!langs.en && String(draft.en || '').trim() !== '') missingLangs.push('en')
|
||||
if (!langs.de && String(draft.de || '').trim() !== '') missingLangs.push('de')
|
||||
if (!langs.es && String(draft.es || '').trim() !== '') missingLangs.push('es')
|
||||
if (!langs.it && String(draft.it || '').trim() !== '') missingLangs.push('it')
|
||||
if (!langs.ru && String(draft.ru || '').trim() !== '') missingLangs.push('ru')
|
||||
if (!langs.ar && String(draft.ar || '').trim() !== '') missingLangs.push('ar')
|
||||
if (missingLangs.length === 0) return false
|
||||
|
||||
await store.upsertMissing([
|
||||
{
|
||||
t_key: key,
|
||||
source_text_tr: draft.source_text_tr || key
|
||||
}
|
||||
], missingLangs)
|
||||
return true
|
||||
}
|
||||
|
||||
function buildRowUpdates (row, draft, original, approveStatus = 'approved') {
|
||||
const items = []
|
||||
const langs = row.langs || {}
|
||||
const sourceTypeChanged = draft.source_type !== original.source_type
|
||||
|
||||
if (langs.tr?.id && (draft.source_text_tr !== original.source_text_tr || sourceTypeChanged)) {
|
||||
items.push({
|
||||
id: langs.tr.id,
|
||||
source_text_tr: draft.source_text_tr,
|
||||
translated_text: draft.source_text_tr,
|
||||
source_type: draft.source_type,
|
||||
status: approveStatus,
|
||||
is_manual: true
|
||||
})
|
||||
}
|
||||
if (langs.en?.id && (draft.en !== original.en || sourceTypeChanged)) {
|
||||
items.push({
|
||||
id: langs.en.id,
|
||||
translated_text: draft.en,
|
||||
source_type: draft.source_type,
|
||||
status: approveStatus,
|
||||
is_manual: true
|
||||
})
|
||||
}
|
||||
if (langs.de?.id && (draft.de !== original.de || sourceTypeChanged)) {
|
||||
items.push({
|
||||
id: langs.de.id,
|
||||
translated_text: draft.de,
|
||||
source_type: draft.source_type,
|
||||
status: approveStatus,
|
||||
is_manual: true
|
||||
})
|
||||
}
|
||||
if (langs.es?.id && (draft.es !== original.es || sourceTypeChanged)) {
|
||||
items.push({
|
||||
id: langs.es.id,
|
||||
translated_text: draft.es,
|
||||
source_type: draft.source_type,
|
||||
status: approveStatus,
|
||||
is_manual: true
|
||||
})
|
||||
}
|
||||
if (langs.it?.id && (draft.it !== original.it || sourceTypeChanged)) {
|
||||
items.push({
|
||||
id: langs.it.id,
|
||||
translated_text: draft.it,
|
||||
source_type: draft.source_type,
|
||||
status: approveStatus,
|
||||
is_manual: true
|
||||
})
|
||||
}
|
||||
if (langs.ru?.id && (draft.ru !== original.ru || sourceTypeChanged)) {
|
||||
items.push({
|
||||
id: langs.ru.id,
|
||||
translated_text: draft.ru,
|
||||
source_type: draft.source_type,
|
||||
status: approveStatus,
|
||||
is_manual: true
|
||||
})
|
||||
}
|
||||
if (langs.ar?.id && (draft.ar !== original.ar || sourceTypeChanged)) {
|
||||
items.push({
|
||||
id: langs.ar.id,
|
||||
translated_text: draft.ar,
|
||||
source_type: draft.source_type,
|
||||
status: approveStatus,
|
||||
is_manual: true
|
||||
})
|
||||
}
|
||||
return items
|
||||
}
|
||||
|
||||
async function saveRow (key) {
|
||||
const row = pivotRows.value.find(r => r.t_key === key)
|
||||
const draft = draftByKey.value[key]
|
||||
const original = originalByKey.value[key]
|
||||
if (!row || !draft || !original || !rowHasChanges(key)) return
|
||||
|
||||
try {
|
||||
const insertedMissing = await ensureMissingLangRows(key, draft, row.langs || {})
|
||||
if (insertedMissing) {
|
||||
await loadRows()
|
||||
}
|
||||
|
||||
const refreshed = pivotRows.value.find(r => r.t_key === key)
|
||||
if (!refreshed) return
|
||||
const refreshDraft = draftByKey.value[key]
|
||||
const refreshOriginal = originalByKey.value[key]
|
||||
const items = buildRowUpdates(refreshed, refreshDraft, refreshOriginal)
|
||||
if (items.length > 0) {
|
||||
await store.bulkUpdate(items)
|
||||
}
|
||||
|
||||
await loadRows()
|
||||
$q.notify({ type: 'positive', message: 'Satır güncellendi' })
|
||||
} catch (err) {
|
||||
$q.notify({ type: 'negative', message: err?.message || 'Güncelleme hatası' })
|
||||
}
|
||||
}
|
||||
|
||||
async function bulkApproveSelected () {
|
||||
try {
|
||||
const ids = []
|
||||
for (const key of selectedKeys.value) {
|
||||
const row = pivotRows.value.find(r => r.t_key === key)
|
||||
if (!row) continue
|
||||
for (const lang of ['tr', 'en', 'de', 'es', 'it', 'ru', 'ar']) {
|
||||
const meta = row.langs?.[lang]
|
||||
if (meta?.id && meta?.status === 'pending') {
|
||||
ids.push(meta.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
const unique = Array.from(new Set(ids))
|
||||
if (unique.length === 0) {
|
||||
$q.notify({ type: 'warning', message: 'Onaylanacak pending kayıt bulunamadı' })
|
||||
return
|
||||
}
|
||||
await store.bulkApprove(unique)
|
||||
await loadRows()
|
||||
$q.notify({ type: 'positive', message: `${unique.length} kayıt onaylandı` })
|
||||
} catch (err) {
|
||||
$q.notify({ type: 'negative', message: err?.message || 'Toplu onay hatası' })
|
||||
}
|
||||
}
|
||||
|
||||
async function translateSelectedRows () {
|
||||
try {
|
||||
const keys = Array.from(new Set(selectedKeys.value.filter(Boolean)))
|
||||
if (keys.length === 0) {
|
||||
$q.notify({ type: 'warning', message: 'Çevrilecek seçim bulunamadı' })
|
||||
return
|
||||
}
|
||||
|
||||
const response = await store.translateSelected({
|
||||
t_keys: keys,
|
||||
languages: ['en', 'de', 'it', 'es', 'ru', 'ar'],
|
||||
limit: Math.min(50000, keys.length * 6)
|
||||
})
|
||||
|
||||
const translated = Number(response?.translated_count || 0)
|
||||
const traceId = response?.trace_id || null
|
||||
|
||||
await loadRows()
|
||||
$q.notify({
|
||||
type: 'positive',
|
||||
message: `Seçilenler çevrildi: ${translated}${traceId ? ` | Trace: ${traceId}` : ''}`
|
||||
})
|
||||
} catch (err) {
|
||||
$q.notify({ type: 'negative', message: err?.message || 'Seçili çeviri işlemi başarısız' })
|
||||
}
|
||||
}
|
||||
|
||||
async function bulkSaveSelected () {
|
||||
try {
|
||||
const items = []
|
||||
for (const key of selectedKeys.value) {
|
||||
const row = pivotRows.value.find(r => r.t_key === key)
|
||||
const draft = draftByKey.value[key]
|
||||
const original = originalByKey.value[key]
|
||||
if (!row || !draft || !original) continue
|
||||
if (!rowHasChanges(key)) continue
|
||||
|
||||
const insertedMissing = await ensureMissingLangRows(key, draft, row.langs || {})
|
||||
if (insertedMissing) {
|
||||
await loadRows()
|
||||
}
|
||||
|
||||
const refreshed = pivotRows.value.find(r => r.t_key === key)
|
||||
if (!refreshed) continue
|
||||
const refreshDraft = draftByKey.value[key]
|
||||
const refreshOriginal = originalByKey.value[key]
|
||||
items.push(...buildRowUpdates(refreshed, refreshDraft, refreshOriginal))
|
||||
}
|
||||
|
||||
if (items.length === 0) {
|
||||
$q.notify({ type: 'warning', message: 'Toplu güncellenecek değişiklik yok' })
|
||||
return
|
||||
}
|
||||
|
||||
await store.bulkUpdate(items)
|
||||
await loadRows()
|
||||
$q.notify({ type: 'positive', message: `${items.length} kayıt toplu güncellendi` })
|
||||
} catch (err) {
|
||||
$q.notify({ type: 'negative', message: err?.message || 'Toplu güncelleme hatası' })
|
||||
}
|
||||
}
|
||||
|
||||
async function syncSources () {
|
||||
const startedAt = Date.now()
|
||||
const beforeCount = pivotRows.value.length
|
||||
console.info('[translation-sync][ui] button:click', {
|
||||
at: new Date(startedAt).toISOString(),
|
||||
auto_translate: autoTranslate.value,
|
||||
only_new: true,
|
||||
before_row_count: beforeCount
|
||||
})
|
||||
try {
|
||||
const response = await store.syncSources({
|
||||
auto_translate: autoTranslate.value,
|
||||
languages: ['en', 'de', 'it', 'es', 'ru', 'ar'],
|
||||
limit: 1000,
|
||||
only_new: true
|
||||
})
|
||||
const result = response?.result || response || {}
|
||||
const traceId = result?.trace_id || response?.trace_id || null
|
||||
console.info('[translation-sync][ui] sync:response', {
|
||||
trace_id: traceId,
|
||||
seed_count: result.seed_count || 0,
|
||||
affected_count: result.affected_count || 0,
|
||||
auto_translated: result.auto_translated || 0,
|
||||
duration_ms: result.duration_ms || null
|
||||
})
|
||||
await loadRows()
|
||||
const afterCount = pivotRows.value.length
|
||||
console.info('[translation-sync][ui] chain:reload-complete', {
|
||||
trace_id: traceId,
|
||||
duration_ms: Date.now() - startedAt,
|
||||
before_row_count: beforeCount,
|
||||
after_row_count: afterCount,
|
||||
delta_row_count: afterCount - beforeCount
|
||||
})
|
||||
$q.notify({
|
||||
type: 'positive',
|
||||
message: `Tarama tamamlandı. Seed: ${result.seed_count || 0}, Oto çeviri: ${result.auto_translated || 0}`
|
||||
})
|
||||
} catch (err) {
|
||||
$q.notify({
|
||||
type: 'negative',
|
||||
message: err?.message || 'Kaynak tarama hatası'
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
onMounted(() => {
|
||||
loadRows()
|
||||
})
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.cell-dirty {
|
||||
background: #fff3cd;
|
||||
}
|
||||
|
||||
.cell-new {
|
||||
background: #d9f7e8;
|
||||
}
|
||||
</style>
|
||||
@@ -42,7 +42,7 @@
|
||||
<q-date
|
||||
v-model="dateFrom"
|
||||
mask="YYYY-MM-DD"
|
||||
locale="tr-TR"
|
||||
:locale="dateLocale"
|
||||
:options="isValidFromDate"
|
||||
/>
|
||||
</q-popup-proxy>
|
||||
@@ -65,7 +65,7 @@
|
||||
<q-date
|
||||
v-model="dateTo"
|
||||
mask="YYYY-MM-DD"
|
||||
locale="tr-TR"
|
||||
:locale="dateLocale"
|
||||
:options="isValidToDate"
|
||||
/>
|
||||
</q-popup-proxy>
|
||||
@@ -281,12 +281,18 @@ import { useStatementdetailStore } from 'src/stores/statementdetailStore'
|
||||
import { useDownloadstpdfStore } from 'src/stores/downloadstpdfStore'
|
||||
import dayjs from 'dayjs'
|
||||
import { usePermission } from 'src/composables/usePermission'
|
||||
import { useLocaleStore } from 'src/stores/localeStore'
|
||||
import { getDateLocale } from 'src/i18n/dayjsLocale'
|
||||
import { useI18n } from 'src/composables/useI18n'
|
||||
|
||||
const { canRead, canExport } = usePermission()
|
||||
const canReadFinance = canRead('finance')
|
||||
const canExportFinance = canExport('finance')
|
||||
|
||||
const $q = useQuasar()
|
||||
const localeStore = useLocaleStore()
|
||||
const { t } = useI18n()
|
||||
const dateLocale = computed(() => getDateLocale(localeStore.locale))
|
||||
|
||||
const accountStore = useAccountStore()
|
||||
const statementheaderStore = useStatementheaderStore()
|
||||
@@ -360,7 +366,7 @@ function hasInvalidDateRange () {
|
||||
function notifyInvalidDateRange () {
|
||||
$q.notify({
|
||||
type: 'warning',
|
||||
message: '⚠️ Başlangıç tarihi bitiş tarihinden sonra olamaz.',
|
||||
message: t('statement.invalidDateRange'),
|
||||
position: 'top-right'
|
||||
})
|
||||
}
|
||||
@@ -402,7 +408,7 @@ async function onFilterClick() {
|
||||
if (!selectedCari.value || !dateFrom.value || !dateTo.value) {
|
||||
$q.notify({
|
||||
type: 'warning',
|
||||
message: '⚠️ Lütfen cari ve tarih aralığını seçiniz.',
|
||||
message: t('statement.selectFilters'),
|
||||
position: 'top-right'
|
||||
})
|
||||
return
|
||||
@@ -417,7 +423,7 @@ async function onFilterClick() {
|
||||
startdate: dateFrom.value,
|
||||
enddate: dateTo.value,
|
||||
accountcode: selectedCari.value,
|
||||
langcode: 'TR',
|
||||
langcode: localeStore.backendLangCode,
|
||||
parislemler: selectedMonType.value,
|
||||
excludeopening: excludeOpening.value
|
||||
})
|
||||
@@ -483,7 +489,7 @@ function toggleFiltersCollapsed () {
|
||||
function normalizeText (str) {
|
||||
return (str || '')
|
||||
.toString()
|
||||
.toLocaleLowerCase('tr-TR') // Türkçe uyumlu
|
||||
.toLocaleLowerCase(dateLocale.value)
|
||||
.normalize('NFD') // aksan temizleme
|
||||
.replace(/[\u0300-\u036f]/g, '')
|
||||
.trim()
|
||||
@@ -503,7 +509,7 @@ function resetFilters() {
|
||||
/* Format */
|
||||
function formatAmount(n) {
|
||||
if (n == null || isNaN(n)) return '0,00'
|
||||
return new Intl.NumberFormat('tr-TR', {
|
||||
return new Intl.NumberFormat(dateLocale.value, {
|
||||
minimumFractionDigits: 2,
|
||||
maximumFractionDigits: 2
|
||||
}).format(n)
|
||||
@@ -562,7 +568,8 @@ async function handleDownload() {
|
||||
selectedCari.value, // accountCode
|
||||
dateFrom.value, // startDate
|
||||
dateTo.value, // endDate
|
||||
selectedMonType.value // <-- eklendi (['1','2'] veya ['1','3'])
|
||||
selectedMonType.value, // <-- eklendi (['1','2'] veya ['1','3'])
|
||||
localeStore.backendLangCode
|
||||
)
|
||||
|
||||
console.log("[DEBUG] Store’dan gelen result:", result)
|
||||
@@ -608,7 +615,8 @@ async function CurrheadDownload() {
|
||||
selectedCari.value, // accountCode
|
||||
dateFrom.value, // startDate
|
||||
dateTo.value, // endDate
|
||||
selectedMonType.value // parasal işlem tipi (parislemler)
|
||||
selectedMonType.value, // parasal işlem tipi (parislemler)
|
||||
localeStore.backendLangCode
|
||||
)
|
||||
|
||||
console.log("[DEBUG] CurrheadDownloadresult:", result)
|
||||
|
||||
@@ -228,6 +228,12 @@ const routes = [
|
||||
component: () => import('../pages/MarketMailMapping.vue'),
|
||||
meta: { permission: 'system:update' }
|
||||
},
|
||||
{
|
||||
path: 'language/translations',
|
||||
name: 'translation-table',
|
||||
component: () => import('pages/TranslationTable.vue'),
|
||||
meta: { permission: 'language:update' }
|
||||
},
|
||||
|
||||
|
||||
/* ================= ORDERS ================= */
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
import axios from 'axios'
|
||||
import qs from 'qs'
|
||||
import { useAuthStore } from 'stores/authStore'
|
||||
import { DEFAULT_LOCALE, normalizeLocale } from 'src/i18n/languages'
|
||||
|
||||
const rawBaseUrl =
|
||||
(typeof process !== 'undefined' && process.env?.VITE_API_BASE_URL) || '/api'
|
||||
|
||||
export const API_BASE_URL = String(rawBaseUrl).trim().replace(/\/+$/, '')
|
||||
const AUTH_REFRESH_PATH = '/auth/refresh'
|
||||
const LOCALE_STORAGE_KEY = 'bss.locale'
|
||||
|
||||
const api = axios.create({
|
||||
baseURL: API_BASE_URL,
|
||||
@@ -74,6 +76,11 @@ function redirectToLogin() {
|
||||
window.location.hash = '/login'
|
||||
}
|
||||
|
||||
function getRequestLocale() {
|
||||
if (typeof window === 'undefined') return DEFAULT_LOCALE
|
||||
return normalizeLocale(window.localStorage.getItem(LOCALE_STORAGE_KEY))
|
||||
}
|
||||
|
||||
api.interceptors.request.use((config) => {
|
||||
const auth = useAuthStore()
|
||||
const url = config.url || ''
|
||||
@@ -82,6 +89,8 @@ api.interceptors.request.use((config) => {
|
||||
config.headers ||= {}
|
||||
config.headers.Authorization = `Bearer ${auth.token}`
|
||||
}
|
||||
config.headers ||= {}
|
||||
config.headers['Accept-Language'] = getRequestLocale()
|
||||
|
||||
return config
|
||||
})
|
||||
|
||||
@@ -62,14 +62,36 @@ export const useProductPricingStore = defineStore('product-pricing-store', {
|
||||
async fetchRows () {
|
||||
this.loading = true
|
||||
this.error = ''
|
||||
const startedAt = Date.now()
|
||||
console.info('[product-pricing][frontend] request:start', {
|
||||
at: new Date(startedAt).toISOString(),
|
||||
timeout_ms: 600000
|
||||
})
|
||||
try {
|
||||
const res = await api.get('/pricing/products')
|
||||
const res = await api.request({
|
||||
method: 'GET',
|
||||
url: '/pricing/products',
|
||||
timeout: 600000
|
||||
})
|
||||
const traceId = res?.headers?.['x-trace-id'] || null
|
||||
const data = Array.isArray(res?.data) ? res.data : []
|
||||
this.rows = data.map((x, i) => mapRow(x, i))
|
||||
console.info('[product-pricing][frontend] request:success', {
|
||||
trace_id: traceId,
|
||||
duration_ms: Date.now() - startedAt,
|
||||
row_count: this.rows.length
|
||||
})
|
||||
} catch (err) {
|
||||
this.rows = []
|
||||
const msg = err?.response?.data || err?.message || 'Urun fiyatlandirma listesi alinamadi'
|
||||
this.error = toText(msg)
|
||||
console.error('[product-pricing][frontend] request:error', {
|
||||
trace_id: err?.response?.headers?.['x-trace-id'] || null,
|
||||
duration_ms: Date.now() - startedAt,
|
||||
timeout_ms: err?.config?.timeout ?? null,
|
||||
status: err?.response?.status || null,
|
||||
message: this.error
|
||||
})
|
||||
} finally {
|
||||
this.loading = false
|
||||
}
|
||||
|
||||
@@ -9,14 +9,16 @@ export const useDownloadstHeadStore = defineStore('downloadstHead', {
|
||||
accountCode,
|
||||
startDate,
|
||||
endDate,
|
||||
parislemler
|
||||
parislemler,
|
||||
langcode = 'TR'
|
||||
) {
|
||||
try {
|
||||
// ✅ Params (axios paramsSerializer array=repeat destekliyor)
|
||||
const params = {
|
||||
accountcode: accountCode,
|
||||
startdate: startDate,
|
||||
enddate: endDate
|
||||
enddate: endDate,
|
||||
langcode: langcode || 'TR'
|
||||
}
|
||||
|
||||
if (Array.isArray(parislemler) && parislemler.length > 0) {
|
||||
|
||||
@@ -7,13 +7,14 @@ export const useDownloadstpdfStore = defineStore('downloadstpdf', {
|
||||
/* ==========================================================
|
||||
📄 PDF İNDİR / AÇ
|
||||
========================================================== */
|
||||
async downloadPDF(accountCode, startDate, endDate, parislemler = []) {
|
||||
async downloadPDF(accountCode, startDate, endDate, parislemler = [], langcode = 'TR') {
|
||||
try {
|
||||
// 🔹 Query params
|
||||
const params = {
|
||||
accountcode: accountCode,
|
||||
startdate: startDate,
|
||||
enddate: endDate
|
||||
enddate: endDate,
|
||||
langcode: langcode || 'TR'
|
||||
}
|
||||
|
||||
if (Array.isArray(parislemler) && parislemler.length > 0) {
|
||||
|
||||
35
ui/src/stores/localeStore.js
Normal file
35
ui/src/stores/localeStore.js
Normal file
@@ -0,0 +1,35 @@
|
||||
import { defineStore } from 'pinia'
|
||||
import { computed, ref } from 'vue'
|
||||
|
||||
import { applyDayjsLocale } from 'src/i18n/dayjsLocale'
|
||||
import { DEFAULT_LOCALE, normalizeLocale, toBackendLangCode } from 'src/i18n/languages'
|
||||
|
||||
const STORAGE_KEY = 'bss.locale'
|
||||
|
||||
function readInitialLocale() {
|
||||
if (typeof window === 'undefined') return DEFAULT_LOCALE
|
||||
return normalizeLocale(window.localStorage.getItem(STORAGE_KEY))
|
||||
}
|
||||
|
||||
export const useLocaleStore = defineStore('locale', () => {
|
||||
const locale = ref(readInitialLocale())
|
||||
|
||||
function setLocale(nextLocale) {
|
||||
const normalized = normalizeLocale(nextLocale)
|
||||
locale.value = normalized
|
||||
applyDayjsLocale(normalized)
|
||||
if (typeof window !== 'undefined') {
|
||||
window.localStorage.setItem(STORAGE_KEY, normalized)
|
||||
}
|
||||
}
|
||||
|
||||
const backendLangCode = computed(() => toBackendLangCode(locale.value))
|
||||
|
||||
applyDayjsLocale(locale.value)
|
||||
|
||||
return {
|
||||
locale,
|
||||
backendLangCode,
|
||||
setLocale
|
||||
}
|
||||
})
|
||||
128
ui/src/stores/translationStore.js
Normal file
128
ui/src/stores/translationStore.js
Normal file
@@ -0,0 +1,128 @@
|
||||
import { defineStore } from 'pinia'
|
||||
import api from 'src/services/api'
|
||||
|
||||
export const useTranslationStore = defineStore('translation', {
|
||||
state: () => ({
|
||||
loading: false,
|
||||
saving: false,
|
||||
rows: [],
|
||||
count: 0
|
||||
}),
|
||||
|
||||
actions: {
|
||||
async fetchRows (filters = {}) {
|
||||
this.loading = true
|
||||
try {
|
||||
const res = await api.get('/language/translations', { params: filters })
|
||||
const payload = res?.data || {}
|
||||
this.rows = Array.isArray(payload.rows) ? payload.rows : []
|
||||
this.count = Number(payload.count) || this.rows.length
|
||||
} finally {
|
||||
this.loading = false
|
||||
}
|
||||
},
|
||||
|
||||
async updateRow (id, payload) {
|
||||
this.saving = true
|
||||
try {
|
||||
const res = await api.put(`/language/translations/${id}`, payload)
|
||||
return res?.data || null
|
||||
} finally {
|
||||
this.saving = false
|
||||
}
|
||||
},
|
||||
|
||||
async upsertMissing (items, languages = ['en', 'de', 'it', 'es', 'ru', 'ar']) {
|
||||
this.saving = true
|
||||
try {
|
||||
const res = await api.post('/language/translations/upsert-missing', {
|
||||
items: Array.isArray(items) ? items : [],
|
||||
languages: Array.isArray(languages) ? languages : []
|
||||
})
|
||||
return res?.data || null
|
||||
} finally {
|
||||
this.saving = false
|
||||
}
|
||||
},
|
||||
|
||||
async syncSources (payload = {}) {
|
||||
this.saving = true
|
||||
const startedAt = Date.now()
|
||||
console.info('[translation-sync][frontend] request:start', {
|
||||
at: new Date(startedAt).toISOString(),
|
||||
payload
|
||||
})
|
||||
try {
|
||||
const res = await api.post('/language/translations/sync-sources', payload, { timeout: 0 })
|
||||
const data = res?.data || null
|
||||
const traceId = data?.trace_id || data?.result?.trace_id || res?.headers?.['x-trace-id'] || null
|
||||
console.info('[translation-sync][frontend] request:success', {
|
||||
trace_id: traceId,
|
||||
duration_ms: Date.now() - startedAt,
|
||||
result: data?.result || null
|
||||
})
|
||||
return data
|
||||
} catch (err) {
|
||||
console.error('[translation-sync][frontend] request:error', {
|
||||
duration_ms: Date.now() - startedAt,
|
||||
message: err?.message || 'sync-sources failed'
|
||||
})
|
||||
throw err
|
||||
} finally {
|
||||
this.saving = false
|
||||
}
|
||||
},
|
||||
|
||||
async translateSelected (payload = {}) {
|
||||
this.saving = true
|
||||
const startedAt = Date.now()
|
||||
console.info('[translation-selected][frontend] request:start', {
|
||||
at: new Date(startedAt).toISOString(),
|
||||
payload
|
||||
})
|
||||
try {
|
||||
const res = await api.post('/language/translations/translate-selected', payload, { timeout: 0 })
|
||||
const data = res?.data || null
|
||||
const traceId = data?.trace_id || res?.headers?.['x-trace-id'] || null
|
||||
console.info('[translation-selected][frontend] request:success', {
|
||||
trace_id: traceId,
|
||||
duration_ms: Date.now() - startedAt,
|
||||
translated_count: data?.translated_count || 0
|
||||
})
|
||||
return data
|
||||
} catch (err) {
|
||||
console.error('[translation-selected][frontend] request:error', {
|
||||
duration_ms: Date.now() - startedAt,
|
||||
message: err?.message || 'translate-selected failed'
|
||||
})
|
||||
throw err
|
||||
} finally {
|
||||
this.saving = false
|
||||
}
|
||||
},
|
||||
|
||||
async bulkApprove (ids = []) {
|
||||
this.saving = true
|
||||
try {
|
||||
const res = await api.post('/language/translations/bulk-approve', {
|
||||
ids: Array.isArray(ids) ? ids : []
|
||||
})
|
||||
return res?.data || null
|
||||
} finally {
|
||||
this.saving = false
|
||||
}
|
||||
},
|
||||
|
||||
async bulkUpdate (items = []) {
|
||||
this.saving = true
|
||||
try {
|
||||
const res = await api.post('/language/translations/bulk-update', {
|
||||
items: Array.isArray(items) ? items : []
|
||||
})
|
||||
return res?.data || null
|
||||
} finally {
|
||||
this.saving = false
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
Reference in New Issue
Block a user