Compare commits

..

11 Commits

Author SHA1 Message Date
M_Kececi
e6ae925f1c Merge remote-tracking branch 'origin/master' 2026-04-20 10:32:07 +03:00
M_Kececi
18c9a99a57 Merge remote-tracking branch 'origin/master' 2026-04-20 10:19:02 +03:00
M_Kececi
8462df878f Merge remote-tracking branch 'origin/master' 2026-04-20 10:04:45 +03:00
M_Kececi
7ef12df93a Merge remote-tracking branch 'origin/master' 2026-04-20 09:42:34 +03:00
M_Kececi
a1f5c653c6 Merge remote-tracking branch 'origin/master' 2026-04-20 08:50:41 +03:00
M_Kececi
c6bdf83f05 Merge remote-tracking branch 'origin/master' 2026-04-17 12:16:50 +03:00
M_Kececi
f9728b8a4c Merge remote-tracking branch 'origin/master' 2026-04-16 17:46:50 +03:00
M_Kececi
307282928c Merge remote-tracking branch 'origin/master' 2026-04-16 16:41:59 +03:00
M_Kececi
29909f3609 Merge remote-tracking branch 'origin/master' 2026-04-16 16:41:55 +03:00
M_Kececi
bb856cb082 Merge remote-tracking branch 'origin/master' 2026-04-16 16:25:45 +03:00
M_Kececi
b065e7192d Merge remote-tracking branch 'origin/master' 2026-04-16 16:00:24 +03:00
13 changed files with 1399 additions and 286 deletions

View File

@@ -847,6 +847,11 @@ func main() {
auditlog.Init(pgDB, 1000) auditlog.Init(pgDB, 1000)
log.Println("🕵️ AuditLog sistemi başlatıldı (buffer=1000)") log.Println("🕵️ AuditLog sistemi başlatıldı (buffer=1000)")
// -------------------------------------------------------
// 🚀 TRANSLATION QUERY PERFORMANCE INDEXES
// -------------------------------------------------------
routes.EnsureTranslationPerfIndexes(pgDB)
// ------------------------------------------------------- // -------------------------------------------------------
// ✉️ MAILER INIT // ✉️ MAILER INIT
// ------------------------------------------------------- // -------------------------------------------------------

View File

@@ -5,14 +5,21 @@ import (
"bssapp-backend/models" "bssapp-backend/models"
"context" "context"
"database/sql" "database/sql"
"fmt"
"strconv"
"strings" "strings"
"time" "time"
) )
func GetProductPricingList(ctx context.Context) ([]models.ProductPricing, error) { func GetProductPricingList(ctx context.Context, limit int, afterProductCode string) ([]models.ProductPricing, error) {
const query = ` if limit <= 0 {
WITH base_products AS ( limit = 500
SELECT }
afterProductCode = strings.TrimSpace(afterProductCode)
// Stage 1: fetch only paged products first (fast path).
productQuery := `
SELECT TOP (` + strconv.Itoa(limit) + `)
LTRIM(RTRIM(ProductCode)) AS ProductCode, LTRIM(RTRIM(ProductCode)) AS ProductCode,
COALESCE(LTRIM(RTRIM(ProductAtt45Desc)), '') AS AskiliYan, COALESCE(LTRIM(RTRIM(ProductAtt45Desc)), '') AS AskiliYan,
COALESCE(LTRIM(RTRIM(ProductAtt44Desc)), '') AS Kategori, COALESCE(LTRIM(RTRIM(ProductAtt44Desc)), '') AS Kategori,
@@ -26,144 +33,8 @@ func GetProductPricingList(ctx context.Context) ([]models.ProductPricing, error)
WHERE ProductAtt42 IN ('SERI', 'AKSESUAR') WHERE ProductAtt42 IN ('SERI', 'AKSESUAR')
AND IsBlocked = 0 AND IsBlocked = 0
AND LEN(LTRIM(RTRIM(ProductCode))) = 13 AND LEN(LTRIM(RTRIM(ProductCode))) = 13
), AND (@p1 = '' OR LTRIM(RTRIM(ProductCode)) > @p1)
latest_base_price AS ( ORDER BY LTRIM(RTRIM(ProductCode));
SELECT
LTRIM(RTRIM(b.ItemCode)) AS ItemCode,
CAST(b.Price AS DECIMAL(18, 2)) AS CostPrice,
CONVERT(VARCHAR(10), b.PriceDate, 23) AS LastPricingDate,
ROW_NUMBER() OVER (
PARTITION BY LTRIM(RTRIM(b.ItemCode))
ORDER BY b.PriceDate DESC, b.LastUpdatedDate DESC
) AS rn
FROM prItemBasePrice b
WHERE b.ItemTypeCode = 1
AND b.BasePriceCode = 1
AND LTRIM(RTRIM(b.CurrencyCode)) = 'USD'
AND EXISTS (
SELECT 1
FROM base_products bp
WHERE bp.ProductCode = LTRIM(RTRIM(b.ItemCode))
)
),
stock_entry_dates AS (
SELECT
LTRIM(RTRIM(s.ItemCode)) AS ItemCode,
CONVERT(VARCHAR(10), MAX(s.OperationDate), 23) AS StockEntryDate
FROM trStock s WITH(NOLOCK)
WHERE s.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(s.ItemCode))) = 13
AND s.In_Qty1 > 0
AND LTRIM(RTRIM(s.WarehouseCode)) IN (
'1-0-14','1-0-10','1-0-8','1-2-5','1-2-4','1-0-12','100','1-0-28',
'1-0-24','1-2-6','1-1-14','1-0-2','1-0-52','1-1-2','1-0-21','1-1-3',
'1-0-33','101','1-014','1-0-49','1-0-36'
)
AND EXISTS (
SELECT 1
FROM base_products bp
WHERE bp.ProductCode = LTRIM(RTRIM(s.ItemCode))
)
GROUP BY LTRIM(RTRIM(s.ItemCode))
),
stock_base AS (
SELECT
LTRIM(RTRIM(s.ItemCode)) AS ItemCode,
SUM(s.In_Qty1 - s.Out_Qty1) AS InventoryQty1
FROM trStock s WITH(NOLOCK)
WHERE s.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(s.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM base_products bp
WHERE bp.ProductCode = LTRIM(RTRIM(s.ItemCode))
)
GROUP BY LTRIM(RTRIM(s.ItemCode))
),
pick_base AS (
SELECT
LTRIM(RTRIM(p.ItemCode)) AS ItemCode,
SUM(p.Qty1) AS PickingQty1
FROM PickingStates p
WHERE p.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(p.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM base_products bp
WHERE bp.ProductCode = LTRIM(RTRIM(p.ItemCode))
)
GROUP BY LTRIM(RTRIM(p.ItemCode))
),
reserve_base AS (
SELECT
LTRIM(RTRIM(r.ItemCode)) AS ItemCode,
SUM(r.Qty1) AS ReserveQty1
FROM ReserveStates r
WHERE r.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(r.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM base_products bp
WHERE bp.ProductCode = LTRIM(RTRIM(r.ItemCode))
)
GROUP BY LTRIM(RTRIM(r.ItemCode))
),
disp_base AS (
SELECT
LTRIM(RTRIM(d.ItemCode)) AS ItemCode,
SUM(d.Qty1) AS DispOrderQty1
FROM DispOrderStates d
WHERE d.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(d.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM base_products bp
WHERE bp.ProductCode = LTRIM(RTRIM(d.ItemCode))
)
GROUP BY LTRIM(RTRIM(d.ItemCode))
),
stock_totals AS (
SELECT
bp.ProductCode AS ItemCode,
CAST(ROUND(
ISNULL(sb.InventoryQty1, 0)
- ISNULL(pb.PickingQty1, 0)
- ISNULL(rb.ReserveQty1, 0)
- ISNULL(db.DispOrderQty1, 0)
, 2) AS DECIMAL(18, 2)) AS StockQty
FROM base_products bp
LEFT JOIN stock_base sb
ON sb.ItemCode = bp.ProductCode
LEFT JOIN pick_base pb
ON pb.ItemCode = bp.ProductCode
LEFT JOIN reserve_base rb
ON rb.ItemCode = bp.ProductCode
LEFT JOIN disp_base db
ON db.ItemCode = bp.ProductCode
)
SELECT
bp.ProductCode AS ProductCode,
COALESCE(lp.CostPrice, 0) AS CostPrice,
COALESCE(st.StockQty, 0) AS StockQty,
COALESCE(se.StockEntryDate, '') AS StockEntryDate,
COALESCE(lp.LastPricingDate, '') AS LastPricingDate,
bp.AskiliYan,
bp.Kategori,
bp.UrunIlkGrubu,
bp.UrunAnaGrubu,
bp.UrunAltGrubu,
bp.Icerik,
bp.Karisim,
bp.Marka
FROM base_products bp
LEFT JOIN latest_base_price lp
ON lp.ItemCode = bp.ProductCode
AND lp.rn = 1
LEFT JOIN stock_entry_dates se
ON se.ItemCode = bp.ProductCode
LEFT JOIN stock_totals st
ON st.ItemCode = bp.ProductCode
ORDER BY bp.ProductCode;
` `
var ( var (
@@ -172,7 +43,7 @@ func GetProductPricingList(ctx context.Context) ([]models.ProductPricing, error)
) )
for attempt := 1; attempt <= 3; attempt++ { for attempt := 1; attempt <= 3; attempt++ {
var err error var err error
rows, err = db.MssqlDB.QueryContext(ctx, query) rows, err = db.MssqlDB.QueryContext(ctx, productQuery, afterProductCode)
if err == nil { if err == nil {
rowsErr = nil rowsErr = nil
break break
@@ -193,15 +64,11 @@ func GetProductPricingList(ctx context.Context) ([]models.ProductPricing, error)
} }
defer rows.Close() defer rows.Close()
var out []models.ProductPricing out := make([]models.ProductPricing, 0, limit)
for rows.Next() { for rows.Next() {
var item models.ProductPricing var item models.ProductPricing
if err := rows.Scan( if err := rows.Scan(
&item.ProductCode, &item.ProductCode,
&item.CostPrice,
&item.StockQty,
&item.StockEntryDate,
&item.LastPricingDate,
&item.AskiliYan, &item.AskiliYan,
&item.Kategori, &item.Kategori,
&item.UrunIlkGrubu, &item.UrunIlkGrubu,
@@ -215,6 +82,177 @@ func GetProductPricingList(ctx context.Context) ([]models.ProductPricing, error)
} }
out = append(out, item) out = append(out, item)
} }
if err := rows.Err(); err != nil {
return nil, err
}
if len(out) == 0 {
return out, nil
}
// Stage 2: fetch metrics only for paged product codes.
codes := make([]string, 0, len(out))
for _, item := range out {
codes = append(codes, strings.TrimSpace(item.ProductCode))
}
valueRows := make([]string, 0, len(codes))
metricArgs := make([]any, 0, len(codes))
for i, code := range codes {
paramName := "@p" + strconv.Itoa(i+1)
valueRows = append(valueRows, "("+paramName+")")
metricArgs = append(metricArgs, code)
}
metricsQuery := `
WITH req_codes AS (
SELECT DISTINCT LTRIM(RTRIM(v.ProductCode)) AS ProductCode
FROM (VALUES ` + strings.Join(valueRows, ",") + `) v(ProductCode)
WHERE LEN(LTRIM(RTRIM(v.ProductCode))) > 0
),
latest_base_price AS (
SELECT
LTRIM(RTRIM(b.ItemCode)) AS ItemCode,
CAST(b.Price AS DECIMAL(18, 2)) AS CostPrice,
CONVERT(VARCHAR(10), b.PriceDate, 23) AS LastPricingDate,
ROW_NUMBER() OVER (
PARTITION BY LTRIM(RTRIM(b.ItemCode))
ORDER BY b.PriceDate DESC, b.LastUpdatedDate DESC
) AS rn
FROM prItemBasePrice b
INNER JOIN req_codes rc
ON rc.ProductCode = LTRIM(RTRIM(b.ItemCode))
WHERE b.ItemTypeCode = 1
AND b.BasePriceCode = 1
AND LTRIM(RTRIM(b.CurrencyCode)) = 'USD'
),
stock_entry_dates AS (
SELECT
LTRIM(RTRIM(s.ItemCode)) AS ItemCode,
CONVERT(VARCHAR(10), MAX(s.OperationDate), 23) AS StockEntryDate
FROM trStock s WITH(NOLOCK)
INNER JOIN req_codes rc
ON rc.ProductCode = LTRIM(RTRIM(s.ItemCode))
WHERE s.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(s.ItemCode))) = 13
AND s.In_Qty1 > 0
AND LTRIM(RTRIM(s.WarehouseCode)) IN (
'1-0-14','1-0-10','1-0-8','1-2-5','1-2-4','1-0-12','100','1-0-28',
'1-0-24','1-2-6','1-1-14','1-0-2','1-0-52','1-1-2','1-0-21','1-1-3',
'1-0-33','101','1-014','1-0-49','1-0-36'
)
GROUP BY LTRIM(RTRIM(s.ItemCode))
),
stock_base AS (
SELECT
LTRIM(RTRIM(s.ItemCode)) AS ItemCode,
SUM(s.In_Qty1 - s.Out_Qty1) AS InventoryQty1
FROM trStock s WITH(NOLOCK)
INNER JOIN req_codes rc
ON rc.ProductCode = LTRIM(RTRIM(s.ItemCode))
WHERE s.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(s.ItemCode))) = 13
GROUP BY LTRIM(RTRIM(s.ItemCode))
),
pick_base AS (
SELECT
LTRIM(RTRIM(p.ItemCode)) AS ItemCode,
SUM(p.Qty1) AS PickingQty1
FROM PickingStates p
INNER JOIN req_codes rc
ON rc.ProductCode = LTRIM(RTRIM(p.ItemCode))
WHERE p.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(p.ItemCode))) = 13
GROUP BY LTRIM(RTRIM(p.ItemCode))
),
reserve_base AS (
SELECT
LTRIM(RTRIM(r.ItemCode)) AS ItemCode,
SUM(r.Qty1) AS ReserveQty1
FROM ReserveStates r
INNER JOIN req_codes rc
ON rc.ProductCode = LTRIM(RTRIM(r.ItemCode))
WHERE r.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(r.ItemCode))) = 13
GROUP BY LTRIM(RTRIM(r.ItemCode))
),
disp_base AS (
SELECT
LTRIM(RTRIM(d.ItemCode)) AS ItemCode,
SUM(d.Qty1) AS DispOrderQty1
FROM DispOrderStates d
INNER JOIN req_codes rc
ON rc.ProductCode = LTRIM(RTRIM(d.ItemCode))
WHERE d.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(d.ItemCode))) = 13
GROUP BY LTRIM(RTRIM(d.ItemCode))
)
SELECT
rc.ProductCode,
COALESCE(lp.CostPrice, 0) AS CostPrice,
CAST(ROUND(
ISNULL(sb.InventoryQty1, 0)
- ISNULL(pb.PickingQty1, 0)
- ISNULL(rb.ReserveQty1, 0)
- ISNULL(db.DispOrderQty1, 0)
, 2) AS DECIMAL(18, 2)) AS StockQty,
COALESCE(se.StockEntryDate, '') AS StockEntryDate,
COALESCE(lp.LastPricingDate, '') AS LastPricingDate
FROM req_codes rc
LEFT JOIN latest_base_price lp
ON lp.ItemCode = rc.ProductCode
AND lp.rn = 1
LEFT JOIN stock_entry_dates se
ON se.ItemCode = rc.ProductCode
LEFT JOIN stock_base sb
ON sb.ItemCode = rc.ProductCode
LEFT JOIN pick_base pb
ON pb.ItemCode = rc.ProductCode
LEFT JOIN reserve_base rb
ON rb.ItemCode = rc.ProductCode
LEFT JOIN disp_base db
ON db.ItemCode = rc.ProductCode;
`
metricsRows, err := db.MssqlDB.QueryContext(ctx, metricsQuery, metricArgs...)
if err != nil {
return nil, fmt.Errorf("metrics query failed: %w", err)
}
defer metricsRows.Close()
type metrics struct {
CostPrice float64
StockQty float64
StockEntryDate string
LastPricingDate string
}
metricsByCode := make(map[string]metrics, len(out))
for metricsRows.Next() {
var (
code string
m metrics
)
if err := metricsRows.Scan(
&code,
&m.CostPrice,
&m.StockQty,
&m.StockEntryDate,
&m.LastPricingDate,
); err != nil {
return nil, err
}
metricsByCode[strings.TrimSpace(code)] = m
}
if err := metricsRows.Err(); err != nil {
return nil, err
}
for i := range out {
if m, ok := metricsByCode[strings.TrimSpace(out[i].ProductCode)]; ok {
out[i].CostPrice = m.CostPrice
out[i].StockQty = m.StockQty
out[i].StockEntryDate = m.StockEntryDate
out[i].LastPricingDate = m.LastPricingDate
}
}
return out, nil return out, nil
} }

View File

@@ -26,10 +26,19 @@ func GetProductPricingListHandler(w http.ResponseWriter, r *http.Request) {
} }
log.Printf("[ProductPricing] trace=%s start user=%s id=%d", traceID, claims.Username, claims.ID) log.Printf("[ProductPricing] trace=%s start user=%s id=%d", traceID, claims.Username, claims.ID)
ctx, cancel := context.WithTimeout(r.Context(), 180*time.Second) // Cloudflare upstream timeout is lower than 180s; fail fast and return API 504 instead of CDN 524.
ctx, cancel := context.WithTimeout(r.Context(), 110*time.Second)
defer cancel() defer cancel()
rows, err := queries.GetProductPricingList(ctx) limit := 500
if raw := strings.TrimSpace(r.URL.Query().Get("limit")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 && parsed <= 10000 {
limit = parsed
}
}
afterProductCode := strings.TrimSpace(r.URL.Query().Get("after_product_code"))
rows, err := queries.GetProductPricingList(ctx, limit+1, afterProductCode)
if err != nil { if err != nil {
if isPricingTimeoutLike(err, ctx.Err()) { if isPricingTimeoutLike(err, ctx.Err()) {
log.Printf( log.Printf(
@@ -54,16 +63,37 @@ func GetProductPricingListHandler(w http.ResponseWriter, r *http.Request) {
http.Error(w, "Urun fiyatlandirma listesi alinamadi: "+err.Error(), http.StatusInternalServerError) http.Error(w, "Urun fiyatlandirma listesi alinamadi: "+err.Error(), http.StatusInternalServerError)
return return
} }
hasMore := len(rows) > limit
if hasMore {
rows = rows[:limit]
}
nextCursor := ""
if hasMore && len(rows) > 0 {
nextCursor = strings.TrimSpace(rows[len(rows)-1].ProductCode)
}
log.Printf( log.Printf(
"[ProductPricing] trace=%s success user=%s id=%d count=%d duration_ms=%d", "[ProductPricing] trace=%s success user=%s id=%d limit=%d after=%q count=%d has_more=%t next=%q duration_ms=%d",
traceID, traceID,
claims.Username, claims.Username,
claims.ID, claims.ID,
limit,
afterProductCode,
len(rows), len(rows),
hasMore,
nextCursor,
time.Since(started).Milliseconds(), time.Since(started).Milliseconds(),
) )
w.Header().Set("Content-Type", "application/json; charset=utf-8") w.Header().Set("Content-Type", "application/json; charset=utf-8")
if hasMore {
w.Header().Set("X-Has-More", "true")
} else {
w.Header().Set("X-Has-More", "false")
}
if nextCursor != "" {
w.Header().Set("X-Next-Cursor", nextCursor)
}
_ = json.NewEncoder(w).Encode(rows) _ = json.NewEncoder(w).Encode(rows)
} }

View File

@@ -0,0 +1,41 @@
package routes
import (
"database/sql"
"log"
"strings"
)
// EnsureTranslationPerfIndexes creates helpful indexes for translation listing/search.
// It is safe to run on each startup; failures are logged and do not stop the service.
func EnsureTranslationPerfIndexes(db *sql.DB) {
if db == nil {
return
}
statements := []string{
`CREATE EXTENSION IF NOT EXISTS pg_trgm`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_t_key_lang ON mk_translator (t_key, lang_code)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_status_lang_updated ON mk_translator (status, lang_code, updated_at DESC)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_manual_status ON mk_translator (is_manual, status)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_source_type_expr ON mk_translator ((COALESCE(NULLIF(provider_meta->>'source_type',''),'dummy')))`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_source_text_trgm ON mk_translator USING gin (source_text_tr gin_trgm_ops)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_translated_text_trgm ON mk_translator USING gin (translated_text gin_trgm_ops)`,
}
for _, stmt := range statements {
if _, err := db.Exec(stmt); err != nil {
log.Printf("[TranslationPerf] index_setup_warn sql=%q err=%v", summarizeSQL(stmt), err)
continue
}
log.Printf("[TranslationPerf] index_ready sql=%q", summarizeSQL(stmt))
}
}
func summarizeSQL(sqlText string) string {
s := strings.TrimSpace(sqlText)
if len(s) <= 100 {
return s
}
return s[:100] + "..."
}

View File

@@ -19,6 +19,7 @@ import (
"sort" "sort"
"strconv" "strconv"
"strings" "strings"
"sync/atomic"
"time" "time"
"github.com/gorilla/mux" "github.com/gorilla/mux"
@@ -48,12 +49,52 @@ var translationSourceTypeSet = map[string]struct{}{
} }
var ( var (
reQuotedText = regexp.MustCompile(`['"]([^'"]{3,120})['"]`)
reHasLetter = regexp.MustCompile(`[A-Za-zÇĞİÖŞÜçğıöşü]`) reHasLetter = regexp.MustCompile(`[A-Za-zÇĞİÖŞÜçğıöşü]`)
reBadText = regexp.MustCompile(`^(GET|POST|PUT|DELETE|OPTIONS|true|false|null|undefined)$`) reBadText = regexp.MustCompile(`^(GET|POST|PUT|DELETE|OPTIONS|true|false|null|undefined)$`)
reKeyUnsafe = regexp.MustCompile(`[^a-z0-9_]+`) reKeyUnsafe = regexp.MustCompile(`[^a-z0-9_]+`)
reVueTemplate = regexp.MustCompile(`(?is)<template[^>]*>(.*?)</template>`)
reVueScript = regexp.MustCompile(`(?is)<script[^>]*>(.*?)</script>`)
reTemplateAttr = regexp.MustCompile(`\b(?:label|title|placeholder|aria-label|hint)\s*=\s*['"]([^'"]{2,180})['"]`)
reTemplateText = regexp.MustCompile(`>([^<]{3,180})<`)
reScriptLabelProp = regexp.MustCompile(`\blabel\s*:\s*['"]([^'"]{2,180})['"]`)
reScriptUIProp = regexp.MustCompile(`\b(?:label|message|title|placeholder|hint)\s*:\s*['"]([^'"]{2,180})['"]`)
reTemplateDynamic = regexp.MustCompile(`[{][{]|[}][}]`)
reCodeLikeText = regexp.MustCompile(`(?i)(\bconst\b|\blet\b|\bvar\b|\breturn\b|\bfunction\b|=>|\|\||&&|\?\?|//|/\*|\*/|\.trim\(|\.replace\(|\.map\(|\.filter\()`)
) )
var translationNoiseTokens = map[string]struct{}{
"flat": {},
"dense": {},
"filled": {},
"outlined": {},
"borderless": {},
"clearable": {},
"loading": {},
"disable": {},
"readonly": {},
"hide-bottom": {},
"stack-label": {},
"emit-value": {},
"map-options": {},
"use-input": {},
"multiple": {},
"options": {},
"rows": {},
"cols": {},
"class": {},
"style": {},
}
var translationDummyAllowedVueDirs = []string{
"pages/",
"components/",
"layouts/",
}
var translationDummyAllowedStoreDirs = []string{
"stores/",
}
type TranslationUpdatePayload struct { type TranslationUpdatePayload struct {
SourceTextTR *string `json:"source_text_tr"` SourceTextTR *string `json:"source_text_tr"`
TranslatedText *string `json:"translated_text"` TranslatedText *string `json:"translated_text"`
@@ -143,6 +184,12 @@ func GetTranslationRowsHandler(db *sql.DB) http.HandlerFunc {
limit = parsed limit = parsed
} }
} }
offset := 0
if raw := strings.TrimSpace(r.URL.Query().Get("offset")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed >= 0 && parsed <= 1000000 {
offset = parsed
}
}
clauses := []string{"1=1"} clauses := []string{"1=1"}
args := make([]any, 0, 8) args := make([]any, 0, 8)
@@ -202,6 +249,11 @@ ORDER BY t_key, lang_code
if limit > 0 { if limit > 0 {
query += fmt.Sprintf("LIMIT $%d", argIndex) query += fmt.Sprintf("LIMIT $%d", argIndex)
args = append(args, limit) args = append(args, limit)
argIndex++
}
if offset > 0 {
query += fmt.Sprintf(" OFFSET $%d", argIndex)
args = append(args, offset)
} }
rows, err := db.Query(query, args...) rows, err := db.Query(query, args...)
@@ -967,14 +1019,16 @@ func collectDummySeeds(limit int) []sourceSeed {
if ext != ".vue" && ext != ".js" && ext != ".ts" { if ext != ".vue" && ext != ".js" && ext != ".ts" {
return nil return nil
} }
if !shouldCollectDummySeedFile(uiRoot, path, ext) {
return nil
}
b, err := os.ReadFile(path) b, err := os.ReadFile(path)
if err != nil { if err != nil {
return nil return nil
} }
matches := reQuotedText.FindAllStringSubmatch(string(b), -1) texts := extractVisibleUIText(string(b), ext)
for _, m := range matches { for _, text := range texts {
text := strings.TrimSpace(m[1])
if !isCandidateText(text) { if !isCandidateText(text) {
continue continue
} }
@@ -998,6 +1052,126 @@ func collectDummySeeds(limit int) []sourceSeed {
return out return out
} }
func shouldCollectDummySeedFile(uiRoot, fullPath, ext string) bool {
rel, err := filepath.Rel(uiRoot, fullPath)
if err != nil {
return false
}
rel = strings.ToLower(filepath.ToSlash(rel))
if strings.Contains(rel, "/__tests__/") || strings.Contains(rel, "/tests/") || strings.Contains(rel, "/mock/") || strings.Contains(rel, "/mocks/") {
return false
}
if ext == ".vue" {
for _, prefix := range translationDummyAllowedVueDirs {
if strings.HasPrefix(rel, prefix) {
return true
}
}
return false
}
if ext == ".js" || ext == ".ts" {
for _, prefix := range translationDummyAllowedStoreDirs {
if strings.HasPrefix(rel, prefix) {
return true
}
}
return false
}
return false
}
func extractVisibleUIText(content string, ext string) []string {
out := make([]string, 0, 32)
seen := map[string]struct{}{}
isLikelyAttrNoise := func(text string) bool {
tokens := strings.Fields(strings.ToLower(text))
if len(tokens) < 2 || len(tokens) > 16 {
return false
}
matched := 0
for _, t := range tokens {
if _, ok := translationNoiseTokens[t]; ok {
matched++
continue
}
if strings.HasPrefix(t, ":") || strings.HasPrefix(t, "@") || strings.HasPrefix(t, "v-") || strings.HasPrefix(t, "#") {
matched++
continue
}
}
return matched == len(tokens)
}
appendText := func(raw string) {
if strings.ContainsAny(raw, "\r\n\t") {
return
}
text := strings.TrimSpace(strings.Join(strings.Fields(raw), " "))
if text == "" {
return
}
if strings.ContainsAny(text, "<>{}[]`") {
return
}
if strings.Contains(text, "=") || strings.Contains(text, "#") {
return
}
if reTemplateDynamic.MatchString(text) {
return
}
if isLikelyAttrNoise(text) {
return
}
if _, ok := seen[text]; ok {
return
}
seen[text] = struct{}{}
out = append(out, text)
}
switch ext {
case ".vue":
template := content
if m := reVueTemplate.FindStringSubmatch(content); len(m) > 1 {
template = m[1]
}
for _, m := range reTemplateAttr.FindAllStringSubmatch(template, -1) {
if len(m) > 1 {
appendText(m[1])
}
}
for _, m := range reTemplateText.FindAllStringSubmatch(template, -1) {
if len(m) > 1 {
appendText(m[1])
}
}
script := content
if m := reVueScript.FindStringSubmatch(content); len(m) > 1 {
script = m[1]
}
for _, m := range reScriptLabelProp.FindAllStringSubmatch(script, -1) {
if len(m) > 1 {
appendText(m[1])
}
}
for _, m := range reScriptUIProp.FindAllStringSubmatch(script, -1) {
if len(m) > 1 {
appendText(m[1])
}
}
case ".js", ".ts":
for _, m := range reScriptUIProp.FindAllStringSubmatch(content, -1) {
if len(m) > 1 {
appendText(m[1])
}
}
}
return out
}
func autoTranslatePendingRows(db *sql.DB, langs []string, limit int) (int, error) { func autoTranslatePendingRows(db *sql.DB, langs []string, limit int) (int, error) {
return autoTranslatePendingRowsForKeys(db, langs, limit, nil, "") return autoTranslatePendingRowsForKeys(db, langs, limit, nil, "")
} }
@@ -1080,6 +1254,10 @@ LIMIT $2
failedTranslate := 0 failedTranslate := 0
failedUpdate := 0 failedUpdate := 0
doneByLang := map[string]int{} doneByLang := map[string]int{}
var processedCount int64
var translatedCount int64
var failedTranslateCount int64
var failedUpdateCount int64
progressEvery := parsePositiveIntEnv("TRANSLATION_AUTO_PROGRESS_EVERY", 100) progressEvery := parsePositiveIntEnv("TRANSLATION_AUTO_PROGRESS_EVERY", 100)
if progressEvery <= 0 { if progressEvery <= 0 {
progressEvery = 100 progressEvery = 100
@@ -1090,11 +1268,47 @@ LIMIT $2
} }
progressTicker := time.Duration(progressSec) * time.Second progressTicker := time.Duration(progressSec) * time.Second
lastProgress := time.Now() lastProgress := time.Now()
heartbeatDone := make(chan struct{})
go func() {
ticker := time.NewTicker(progressTicker)
defer ticker.Stop()
for {
select {
case <-ticker.C:
processed := int(atomic.LoadInt64(&processedCount))
translated := int(atomic.LoadInt64(&translatedCount))
failedTr := int(atomic.LoadInt64(&failedTranslateCount))
failedUpd := int(atomic.LoadInt64(&failedUpdateCount))
elapsed := time.Since(start)
rps := float64(translated)
if elapsed > 0 {
rps = float64(translated) / elapsed.Seconds()
}
log.Printf(
"[TranslationAuto] trace=%s stage=heartbeat processed=%d/%d translated=%d failed_translate=%d failed_update=%d elapsed_ms=%d rps=%.2f",
traceID,
processed,
len(list),
translated,
failedTr,
failedUpd,
elapsed.Milliseconds(),
rps,
)
case <-heartbeatDone:
return
}
}
}()
defer close(heartbeatDone)
for i, p := range list { for i, p := range list {
tr, err := callAzureTranslate(p.Text, p.Lang) tr, err := callAzureTranslate(p.Text, p.Lang)
if err != nil || strings.TrimSpace(tr) == "" { if err != nil || strings.TrimSpace(tr) == "" {
failedTranslate++ failedTranslate++
atomic.StoreInt64(&failedTranslateCount, int64(failedTranslate))
atomic.StoreInt64(&processedCount, int64(i+1))
continue continue
} }
_, err = db.Exec(` _, err = db.Exec(`
@@ -1108,9 +1322,13 @@ WHERE id = $1
`, p.ID, strings.TrimSpace(tr)) `, p.ID, strings.TrimSpace(tr))
if err != nil { if err != nil {
failedUpdate++ failedUpdate++
atomic.StoreInt64(&failedUpdateCount, int64(failedUpdate))
atomic.StoreInt64(&processedCount, int64(i+1))
continue continue
} }
done++ done++
atomic.StoreInt64(&translatedCount, int64(done))
atomic.StoreInt64(&processedCount, int64(i+1))
doneByLang[p.Lang]++ doneByLang[p.Lang]++
processed := i + 1 processed := i + 1
@@ -1375,18 +1593,26 @@ func callAzureTranslate(sourceText, targetLang string) (string, error) {
} }
endpoint = strings.TrimRight(endpoint, "/") endpoint = strings.TrimRight(endpoint, "/")
baseURL, err := url.Parse(endpoint + "/translate") normalizedEndpoint := strings.ToLower(endpoint)
translatePath := "/translate"
// Azure custom endpoint requires the translator path with version in URL.
if strings.Contains(normalizedEndpoint, ".cognitiveservices.azure.com") {
translatePath = "/translator/text/v3.0/translate"
}
baseURL, err := url.Parse(endpoint + translatePath)
if err != nil { if err != nil {
return "", fmt.Errorf("invalid AZURE_TRANSLATOR_ENDPOINT: %w", err) return "", fmt.Errorf("invalid AZURE_TRANSLATOR_ENDPOINT: %w", err)
} }
q := baseURL.Query() q := baseURL.Query()
if translatePath == "/translate" {
q.Set("api-version", "3.0") q.Set("api-version", "3.0")
}
q.Set("from", sourceLang) q.Set("from", sourceLang)
q.Set("to", targetLang) q.Set("to", targetLang)
baseURL.RawQuery = q.Encode() baseURL.RawQuery = q.Encode()
payload := []map[string]string{ payload := []map[string]string{
{"Text": sourceText}, {"text": sourceText},
} }
body, _ := json.Marshal(payload) body, _ := json.Marshal(payload)
req, err := http.NewRequest(http.MethodPost, baseURL.String(), bytes.NewReader(body)) req, err := http.NewRequest(http.MethodPost, baseURL.String(), bytes.NewReader(body))
@@ -1575,6 +1801,22 @@ func isCandidateText(s string) bool {
if strings.Contains(s, "/api/") { if strings.Contains(s, "/api/") {
return false return false
} }
if reCodeLikeText.MatchString(s) {
return false
}
if strings.ContainsAny(s, "{}[];`") {
return false
}
symbolCount := 0
for _, r := range s {
switch r {
case '(', ')', '=', ':', '/', '\\', '|', '&', '*', '<', '>', '_':
symbolCount++
}
}
if symbolCount >= 4 {
return false
}
return true return true
} }

View File

@@ -0,0 +1,75 @@
/* eslint-disable */
/**
* THIS FILE IS GENERATED AUTOMATICALLY.
* DO NOT EDIT.
*
* You are probably looking on adding startup/initialization code.
* Use "quasar new boot <name>" and add it there.
* One boot file per concern. Then reference the file(s) in quasar.config file > boot:
* boot: ['file', ...] // do not add ".js" extension to it.
*
* Boot files are your "main.js"
**/
import { Quasar } from 'quasar'
import { markRaw } from 'vue'
import RootComponent from 'app/src/App.vue'
import createStore from 'app/src/stores/index'
import createRouter from 'app/src/router/index'
export default async function (createAppFn, quasarUserOptions) {
// Create the app instance.
// Here we inject into it the Quasar UI, the router & possibly the store.
const app = createAppFn(RootComponent)
app.use(Quasar, quasarUserOptions)
const store = typeof createStore === 'function'
? await createStore({})
: createStore
app.use(store)
const router = markRaw(
typeof createRouter === 'function'
? await createRouter({store})
: createRouter
)
// make router instance available in store
store.use(({ store }) => { store.router = router })
// Expose the app, the router and the store.
// Note that we are not mounting the app here, since bootstrapping will be
// different depending on whether we are in a browser or on the server.
return {
app,
store,
router
}
}

View File

@@ -0,0 +1,158 @@
/* eslint-disable */
/**
* THIS FILE IS GENERATED AUTOMATICALLY.
* DO NOT EDIT.
*
* You are probably looking on adding startup/initialization code.
* Use "quasar new boot <name>" and add it there.
* One boot file per concern. Then reference the file(s) in quasar.config file > boot:
* boot: ['file', ...] // do not add ".js" extension to it.
*
* Boot files are your "main.js"
**/
import { createApp } from 'vue'
import '@quasar/extras/roboto-font/roboto-font.css'
import '@quasar/extras/material-icons/material-icons.css'
// We load Quasar stylesheet file
import 'quasar/dist/quasar.sass'
import 'src/css/app.css'
import createQuasarApp from './app.js'
import quasarUserOptions from './quasar-user-options.js'
const publicPath = `/`
async function start ({
app,
router
, store
}, bootFiles) {
let hasRedirected = false
const getRedirectUrl = url => {
try { return router.resolve(url).href }
catch (err) {}
return Object(url) === url
? null
: url
}
const redirect = url => {
hasRedirected = true
if (typeof url === 'string' && /^https?:\/\//.test(url)) {
window.location.href = url
return
}
const href = getRedirectUrl(url)
// continue if we didn't fail to resolve the url
if (href !== null) {
window.location.href = href
window.location.reload()
}
}
const urlPath = window.location.href.replace(window.location.origin, '')
for (let i = 0; hasRedirected === false && i < bootFiles.length; i++) {
try {
await bootFiles[i]({
app,
router,
store,
ssrContext: null,
redirect,
urlPath,
publicPath
})
}
catch (err) {
if (err && err.url) {
redirect(err.url)
return
}
console.error('[Quasar] boot error:', err)
return
}
}
if (hasRedirected === true) return
app.use(router)
app.mount('#q-app')
}
createQuasarApp(createApp, quasarUserOptions)
.then(app => {
// eventually remove this when Cordova/Capacitor/Electron support becomes old
const [ method, mapFn ] = Promise.allSettled !== void 0
? [
'allSettled',
bootFiles => bootFiles.map(result => {
if (result.status === 'rejected') {
console.error('[Quasar] boot error:', result.reason)
return
}
return result.value.default
})
]
: [
'all',
bootFiles => bootFiles.map(entry => entry.default)
]
return Promise[ method ]([
import(/* webpackMode: "eager" */ 'boot/dayjs'),
import(/* webpackMode: "eager" */ 'boot/locale'),
import(/* webpackMode: "eager" */ 'boot/resizeObserverGuard')
]).then(bootFiles => {
const boot = mapFn(bootFiles).filter(entry => typeof entry === 'function')
start(app, boot)
})
})

View File

@@ -0,0 +1,116 @@
/* eslint-disable */
/**
* THIS FILE IS GENERATED AUTOMATICALLY.
* DO NOT EDIT.
*
* You are probably looking on adding startup/initialization code.
* Use "quasar new boot <name>" and add it there.
* One boot file per concern. Then reference the file(s) in quasar.config file > boot:
* boot: ['file', ...] // do not add ".js" extension to it.
*
* Boot files are your "main.js"
**/
import App from 'app/src/App.vue'
let appPrefetch = typeof App.preFetch === 'function'
? App.preFetch
: (
// Class components return the component options (and the preFetch hook) inside __c property
App.__c !== void 0 && typeof App.__c.preFetch === 'function'
? App.__c.preFetch
: false
)
function getMatchedComponents (to, router) {
const route = to
? (to.matched ? to : router.resolve(to).route)
: router.currentRoute.value
if (!route) { return [] }
const matched = route.matched.filter(m => m.components !== void 0)
if (matched.length === 0) { return [] }
return Array.prototype.concat.apply([], matched.map(m => {
return Object.keys(m.components).map(key => {
const comp = m.components[key]
return {
path: m.path,
c: comp
}
})
}))
}
export function addPreFetchHooks ({ router, store, publicPath }) {
// Add router hook for handling preFetch.
// Doing it after initial route is resolved so that we don't double-fetch
// the data that we already have. Using router.beforeResolve() so that all
// async components are resolved.
router.beforeResolve((to, from, next) => {
const
urlPath = window.location.href.replace(window.location.origin, ''),
matched = getMatchedComponents(to, router),
prevMatched = getMatchedComponents(from, router)
let diffed = false
const preFetchList = matched
.filter((m, i) => {
return diffed || (diffed = (
!prevMatched[i] ||
prevMatched[i].c !== m.c ||
m.path.indexOf('/:') > -1 // does it has params?
))
})
.filter(m => m.c !== void 0 && (
typeof m.c.preFetch === 'function'
// Class components return the component options (and the preFetch hook) inside __c property
|| (m.c.__c !== void 0 && typeof m.c.__c.preFetch === 'function')
))
.map(m => m.c.__c !== void 0 ? m.c.__c.preFetch : m.c.preFetch)
if (appPrefetch !== false) {
preFetchList.unshift(appPrefetch)
appPrefetch = false
}
if (preFetchList.length === 0) {
return next()
}
let hasRedirected = false
const redirect = url => {
hasRedirected = true
next(url)
}
const proceed = () => {
if (hasRedirected === false) { next() }
}
preFetchList.reduce(
(promise, preFetch) => promise.then(() => hasRedirected === false && preFetch({
store,
currentRoute: to,
previousRoute: from,
redirect,
urlPath,
publicPath
})),
Promise.resolve()
)
.then(proceed)
.catch(e => {
console.error(e)
proceed()
})
})
}

View File

@@ -0,0 +1,23 @@
/* eslint-disable */
/**
* THIS FILE IS GENERATED AUTOMATICALLY.
* DO NOT EDIT.
*
* You are probably looking on adding startup/initialization code.
* Use "quasar new boot <name>" and add it there.
* One boot file per concern. Then reference the file(s) in quasar.config file > boot:
* boot: ['file', ...] // do not add ".js" extension to it.
*
* Boot files are your "main.js"
**/
import lang from 'quasar/lang/tr.js'
import {Loading,Dialog,Notify} from 'quasar'
export default { config: {"notify":{"position":"top","timeout":2500}},lang,plugins: {Loading,Dialog,Notify} }

View File

@@ -53,9 +53,10 @@
:virtual-scroll-sticky-size-start="headerHeight" :virtual-scroll-sticky-size-start="headerHeight"
:virtual-scroll-slice-size="36" :virtual-scroll-slice-size="36"
:rows-per-page-options="[0]" :rows-per-page-options="[0]"
:pagination="{ rowsPerPage: 0 }" v-model:pagination="tablePagination"
hide-bottom hide-bottom
:table-style="tableStyle" :table-style="tableStyle"
@virtual-scroll="onTableVirtualScroll"
> >
<template #header="props"> <template #header="props">
<q-tr :props="props" class="header-row-fixed"> <q-tr :props="props" class="header-row-fixed">
@@ -309,10 +310,13 @@
</template> </template>
<script setup> <script setup>
import { computed, onMounted, ref } from 'vue' import { computed, onMounted, ref, watch } from 'vue'
import { useProductPricingStore } from 'src/stores/ProductPricingStore' import { useProductPricingStore } from 'src/stores/ProductPricingStore'
const store = useProductPricingStore() const store = useProductPricingStore()
const FETCH_LIMIT = 500
const nextCursor = ref('')
const loadingMore = ref(false)
const usdToTry = 38.25 const usdToTry = 38.25
const eurToTry = 41.6 const eurToTry = 41.6
@@ -381,6 +385,12 @@ const headerFilterFieldSet = new Set([
]) ])
const mainTableRef = ref(null) const mainTableRef = ref(null)
const tablePagination = ref({
page: 1,
rowsPerPage: 0,
sortBy: 'productCode',
descending: false
})
const selectedMap = ref({}) const selectedMap = ref({})
const selectedCurrencies = ref(['USD', 'EUR', 'TRY']) const selectedCurrencies = ref(['USD', 'EUR', 'TRY'])
const showSelectedOnly = ref(false) const showSelectedOnly = ref(false)
@@ -570,6 +580,7 @@ const selectedRowCount = computed(() => Object.values(selectedMap.value).filter(
const selectedVisibleCount = computed(() => visibleRowIds.value.filter((id) => !!selectedMap.value[id]).length) const selectedVisibleCount = computed(() => visibleRowIds.value.filter((id) => !!selectedMap.value[id]).length)
const allSelectedVisible = computed(() => visibleRowIds.value.length > 0 && selectedVisibleCount.value === visibleRowIds.value.length) const allSelectedVisible = computed(() => visibleRowIds.value.length > 0 && selectedVisibleCount.value === visibleRowIds.value.length)
const someSelectedVisible = computed(() => selectedVisibleCount.value > 0) const someSelectedVisible = computed(() => selectedVisibleCount.value > 0)
const hasMoreRows = computed(() => Boolean(store.hasMore))
function isHeaderFilterField (field) { function isHeaderFilterField (field) {
return headerFilterFieldSet.has(field) return headerFilterFieldSet.has(field)
@@ -691,10 +702,26 @@ function round2 (value) {
} }
function parseNumber (val) { function parseNumber (val) {
const normalized = String(val ?? '') if (typeof val === 'number') return Number.isFinite(val) ? val : 0
.replace(/\s/g, '') const text = String(val ?? '').trim().replace(/\s/g, '')
.replace(/\./g, '') if (!text) return 0
.replace(',', '.')
const lastComma = text.lastIndexOf(',')
const lastDot = text.lastIndexOf('.')
let normalized = text
if (lastComma >= 0 && lastDot >= 0) {
if (lastComma > lastDot) {
normalized = text.replace(/\./g, '').replace(',', '.')
} else {
normalized = text.replace(/,/g, '')
}
} else if (lastComma >= 0) {
normalized = text.replace(/\./g, '').replace(',', '.')
} else {
normalized = text.replace(/,/g, '')
}
const n = Number(normalized) const n = Number(normalized)
return Number.isFinite(n) ? n : 0 return Number.isFinite(n) ? n : 0
} }
@@ -702,11 +729,7 @@ function parseNumber (val) {
function parseNullableNumber (val) { function parseNullableNumber (val) {
const text = String(val ?? '').trim() const text = String(val ?? '').trim()
if (!text) return null if (!text) return null
const normalized = text const n = parseNumber(text)
.replace(/\s/g, '')
.replace(/\./g, '')
.replace(',', '.')
const n = Number(normalized)
return Number.isFinite(n) ? n : null return Number.isFinite(n) ? n : null
} }
@@ -840,12 +863,59 @@ function clearAllCurrencies () {
selectedCurrencies.value = [] selectedCurrencies.value = []
} }
async function fetchChunk ({ reset = false } = {}) {
const afterProductCode = reset ? '' : nextCursor.value
const result = await store.fetchRows({
limit: FETCH_LIMIT,
afterProductCode,
append: !reset
})
const fetched = Number(result?.fetched) || 0
nextCursor.value = String(result?.nextCursor || '')
return fetched
}
async function loadMoreRows () {
if (loadingMore.value || store.loading || !hasMoreRows.value) return
loadingMore.value = true
try {
await fetchChunk({ reset: false })
} finally {
loadingMore.value = false
}
}
function onTableVirtualScroll (details) {
const to = Number(details?.to || 0)
if (!Number.isFinite(to)) return
if (to >= filteredRows.value.length - 25) {
void loadMoreRows()
}
}
async function ensureEnoughVisibleRows (minRows = 80, maxBatches = 4) {
let guard = 0
while (hasMoreRows.value && filteredRows.value.length < minRows && guard < maxBatches) {
await loadMoreRows()
guard++
}
}
async function reloadData () { async function reloadData () {
const startedAt = Date.now() const startedAt = Date.now()
console.info('[product-pricing][ui] reload:start', { console.info('[product-pricing][ui] reload:start', {
at: new Date(startedAt).toISOString() at: new Date(startedAt).toISOString()
}) })
await store.fetchRows() try {
nextCursor.value = ''
await fetchChunk({ reset: true })
await ensureEnoughVisibleRows(120, 6)
} catch (err) {
console.error('[product-pricing][ui] reload:error', {
duration_ms: Date.now() - startedAt,
message: String(err?.message || err || 'reload failed')
})
}
console.info('[product-pricing][ui] reload:done', { console.info('[product-pricing][ui] reload:done', {
duration_ms: Date.now() - startedAt, duration_ms: Date.now() - startedAt,
row_count: Array.isArray(store.rows) ? store.rows.length : 0, row_count: Array.isArray(store.rows) ? store.rows.length : 0,
@@ -857,6 +927,19 @@ async function reloadData () {
onMounted(async () => { onMounted(async () => {
await reloadData() await reloadData()
}) })
watch(
[
columnFilters,
numberRangeFilters,
dateRangeFilters,
showSelectedOnly,
() => tablePagination.value.sortBy,
() => tablePagination.value.descending
],
() => { void ensureEnoughVisibleRows(80, 4) },
{ deep: true }
)
</script> </script>
<style scoped> <style scoped>

View File

@@ -1,5 +1,6 @@
<template> <template>
<q-page v-if="canUpdateLanguage" class="q-pa-md"> <q-page v-if="canUpdateLanguage" class="q-pa-md translation-page">
<div class="translation-toolbar sticky-toolbar">
<div class="row q-col-gutter-sm items-end q-mb-md"> <div class="row q-col-gutter-sm items-end q-mb-md">
<div class="col-12 col-md-4"> <div class="col-12 col-md-4">
<q-input <q-input
@@ -31,7 +32,7 @@
<q-btn <q-btn
color="accent" color="accent"
icon="g_translate" icon="g_translate"
label="Seçilenleri Çevir" label="Seçilenleri Çevir"
:disable="selectedKeys.length === 0" :disable="selectedKeys.length === 0"
:loading="store.saving" :loading="store.saving"
@click="translateSelectedRows" @click="translateSelectedRows"
@@ -53,17 +54,23 @@
@click="bulkSaveSelected" @click="bulkSaveSelected"
/> />
</div> </div>
</div>
<q-table <q-table
ref="tableRef"
class="translation-table"
flat flat
bordered bordered
dense virtual-scroll
:virtual-scroll-sticky-size-start="56"
row-key="t_key" row-key="t_key"
:loading="store.loading || store.saving" :loading="store.loading || store.saving"
:rows="pivotRows" :rows="pivotRows"
:columns="columns" :columns="columns"
:rows-per-page-options="[0]" :rows-per-page-options="[0]"
:pagination="{ rowsPerPage: 0 }" v-model:pagination="tablePagination"
hide-bottom
@virtual-scroll="onVirtualScroll"
> >
<template #body-cell-actions="props"> <template #body-cell-actions="props">
<q-td :props="props"> <q-td :props="props">
@@ -91,57 +98,98 @@
<template #body-cell-source_text_tr="props"> <template #body-cell-source_text_tr="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'source_text_tr')"> <q-td :props="props" :class="cellClass(props.row.t_key, 'source_text_tr')">
<q-input v-model="rowDraft(props.row.t_key).source_text_tr" dense outlined @blur="queueAutoSave(props.row.t_key)" /> <div class="source-text-label" :title="rowDraft(props.row.t_key).source_text_tr">
{{ rowDraft(props.row.t_key).source_text_tr }}
</div>
</q-td> </q-td>
</template> </template>
<template #body-cell-source_type="props"> <template #body-cell-source_type="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'source_type')"> <q-td :props="props" :class="cellClass(props.row.t_key, 'source_type')">
<q-select <q-badge
v-model="rowDraft(props.row.t_key).source_type" color="primary"
dense text-color="white"
outlined class="source-type-badge"
emit-value :label="sourceTypeLabel(props.row.t_key)"
map-options
:options="sourceTypeOptions"
@update:model-value="() => queueAutoSave(props.row.t_key)"
/> />
</q-td> </q-td>
</template> </template>
<template #body-cell-en="props"> <template #body-cell-en="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'en')"> <q-td :props="props" :class="cellClass(props.row.t_key, 'en')">
<q-input v-model="rowDraft(props.row.t_key).en" dense outlined @blur="queueAutoSave(props.row.t_key)" /> <q-input
v-model="rowDraft(props.row.t_key).en"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td> </q-td>
</template> </template>
<template #body-cell-de="props"> <template #body-cell-de="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'de')"> <q-td :props="props" :class="cellClass(props.row.t_key, 'de')">
<q-input v-model="rowDraft(props.row.t_key).de" dense outlined @blur="queueAutoSave(props.row.t_key)" /> <q-input
v-model="rowDraft(props.row.t_key).de"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td> </q-td>
</template> </template>
<template #body-cell-es="props"> <template #body-cell-es="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'es')"> <q-td :props="props" :class="cellClass(props.row.t_key, 'es')">
<q-input v-model="rowDraft(props.row.t_key).es" dense outlined @blur="queueAutoSave(props.row.t_key)" /> <q-input
v-model="rowDraft(props.row.t_key).es"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td> </q-td>
</template> </template>
<template #body-cell-it="props"> <template #body-cell-it="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'it')"> <q-td :props="props" :class="cellClass(props.row.t_key, 'it')">
<q-input v-model="rowDraft(props.row.t_key).it" dense outlined @blur="queueAutoSave(props.row.t_key)" /> <q-input
v-model="rowDraft(props.row.t_key).it"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td> </q-td>
</template> </template>
<template #body-cell-ru="props"> <template #body-cell-ru="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'ru')"> <q-td :props="props" :class="cellClass(props.row.t_key, 'ru')">
<q-input v-model="rowDraft(props.row.t_key).ru" dense outlined @blur="queueAutoSave(props.row.t_key)" /> <q-input
v-model="rowDraft(props.row.t_key).ru"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td> </q-td>
</template> </template>
<template #body-cell-ar="props"> <template #body-cell-ar="props">
<q-td :props="props" :class="cellClass(props.row.t_key, 'ar')"> <q-td :props="props" :class="cellClass(props.row.t_key, 'ar')">
<q-input v-model="rowDraft(props.row.t_key).ar" dense outlined @blur="queueAutoSave(props.row.t_key)" /> <q-input
v-model="rowDraft(props.row.t_key).ar"
type="textarea"
autogrow
:max-rows="8"
outlined
@blur="queueAutoSave(props.row.t_key)"
/>
</q-td> </q-td>
</template> </template>
</q-table> </q-table>
@@ -149,13 +197,13 @@
<q-page v-else class="q-pa-md flex flex-center"> <q-page v-else class="q-pa-md flex flex-center">
<div class="text-negative text-subtitle1"> <div class="text-negative text-subtitle1">
Bu module erisim yetkiniz yok. Bu modüle erişim yetkiniz yok.
</div> </div>
</q-page> </q-page>
</template> </template>
<script setup> <script setup>
import { computed, onMounted, ref } from 'vue' import { computed, onBeforeUnmount, onMounted, ref, watch } from 'vue'
import { useQuasar } from 'quasar' import { useQuasar } from 'quasar'
import { usePermission } from 'src/composables/usePermission' import { usePermission } from 'src/composables/usePermission'
import { useTranslationStore } from 'src/stores/translationStore' import { useTranslationStore } from 'src/stores/translationStore'
@@ -169,25 +217,41 @@ const filters = ref({
q: '' q: ''
}) })
const autoTranslate = ref(false) const autoTranslate = ref(false)
const tableRef = ref(null)
const FETCH_LIMIT = 1400
const loadedOffset = ref(0)
const hasMoreRows = ref(true)
const loadingMore = ref(false)
const tablePagination = ref({
page: 1,
rowsPerPage: 0,
sortBy: 'source_text_tr',
descending: false
})
let filterReloadTimer = null
const sourceTypeOptions = [ const sourceTypeOptions = [
{ label: 'dummy', value: 'dummy' }, { label: 'dummy', value: 'dummy' },
{ label: 'postgre', value: 'postgre' }, { label: 'postgre', value: 'postgre' },
{ label: 'mssql', value: 'mssql' } { label: 'mssql', value: 'mssql' }
] ]
const sourceTypeLabelMap = {
dummy: 'UI',
postgre: 'PostgreSQL',
mssql: 'MSSQL'
}
const columns = [ const columns = [
{ name: 'actions', label: 'Güncelle', field: 'actions', align: 'left' }, { name: 'actions', label: 'Güncelle', field: 'actions', align: 'left' },
{ name: 'select', label: 'Seç', field: 'select', align: 'left' }, { name: 'select', label: 'Seç', field: 'select', align: 'left' },
{ name: 't_key', label: 'Key', field: 't_key', align: 'left', sortable: true }, { name: 'source_text_tr', label: 'Türkçe Metin', field: 'source_text_tr', align: 'left', style: 'min-width: 340px' },
{ name: 'source_text_tr', label: 'Türkçe kaynak', field: 'source_text_tr', align: 'left' }, { name: 'source_type', label: 'Kaynak', field: 'source_type', align: 'left', style: 'min-width: 140px' },
{ name: 'source_type', label: 'Veri tipi', field: 'source_type', align: 'left' }, { name: 'en', label: 'İngilizce', field: 'en', align: 'left', style: 'min-width: 220px' },
{ name: 'en', label: 'English', field: 'en', align: 'left' }, { name: 'de', label: 'Almanca', field: 'de', align: 'left', style: 'min-width: 220px' },
{ name: 'de', label: 'Deutch', field: 'de', align: 'left' }, { name: 'es', label: 'İspanyolca', field: 'es', align: 'left', style: 'min-width: 220px' },
{ name: 'es', label: 'Espanol', field: 'es', align: 'left' }, { name: 'it', label: 'İtalyanca', field: 'it', align: 'left', style: 'min-width: 220px' },
{ name: 'it', label: 'Italiano', field: 'it', align: 'left' }, { name: 'ru', label: 'Rusça', field: 'ru', align: 'left', style: 'min-width: 220px' },
{ name: 'ru', label: 'Русский', field: 'ru', align: 'left' }, { name: 'ar', label: 'Arapça', field: 'ar', align: 'left', style: 'min-width: 220px' }
{ name: 'ar', label: 'العربية', field: 'ar', align: 'left' }
] ]
const draftByKey = ref({}) const draftByKey = ref({})
@@ -242,10 +306,33 @@ const pivotRows = computed(() => {
return Array.from(byKey.values()).sort((a, b) => a.t_key.localeCompare(b.t_key)) return Array.from(byKey.values()).sort((a, b) => a.t_key.localeCompare(b.t_key))
}) })
function snapshotDrafts () { function snapshotDrafts (options = {}) {
const preserveDirty = Boolean(options?.preserveDirty)
const draft = {} const draft = {}
const original = {} const original = {}
for (const row of pivotRows.value) { for (const row of pivotRows.value) {
const existingDraft = draftByKey.value[row.t_key]
const existingOriginal = originalByKey.value[row.t_key]
const keepExisting = preserveDirty &&
existingDraft &&
existingOriginal &&
(
existingDraft.source_text_tr !== existingOriginal.source_text_tr ||
existingDraft.source_type !== existingOriginal.source_type ||
existingDraft.en !== existingOriginal.en ||
existingDraft.de !== existingOriginal.de ||
existingDraft.es !== existingOriginal.es ||
existingDraft.it !== existingOriginal.it ||
existingDraft.ru !== existingOriginal.ru ||
existingDraft.ar !== existingOriginal.ar
)
if (keepExisting) {
draft[row.t_key] = { ...existingDraft }
original[row.t_key] = { ...existingOriginal }
continue
}
draft[row.t_key] = { draft[row.t_key] = {
source_text_tr: row.source_text_tr || '', source_text_tr: row.source_text_tr || '',
source_type: row.source_type || 'dummy', source_type: row.source_type || 'dummy',
@@ -280,8 +367,9 @@ function rowDraft (key) {
} }
function buildFilters () { function buildFilters () {
const query = String(filters.value.q || '').trim()
return { return {
q: filters.value.q || undefined q: query || undefined
} }
} }
@@ -291,7 +379,6 @@ function rowHasChanges (key) {
if (!draft || !orig) return false if (!draft || !orig) return false
return ( return (
draft.source_text_tr !== orig.source_text_tr || draft.source_text_tr !== orig.source_text_tr ||
draft.source_type !== orig.source_type ||
draft.en !== orig.en || draft.en !== orig.en ||
draft.de !== orig.de || draft.de !== orig.de ||
draft.es !== orig.es || draft.es !== orig.es ||
@@ -312,7 +399,7 @@ function cellClass (key, field) {
const orig = originalByKey.value[key] const orig = originalByKey.value[key]
if (!draft || !orig) return '' if (!draft || !orig) return ''
if (draft[field] !== orig[field]) return 'cell-dirty' if (field !== 'source_type' && draft[field] !== orig[field]) return 'cell-dirty'
if (field === 'en' && isPending(key, 'en')) return 'cell-new' if (field === 'en' && isPending(key, 'en')) return 'cell-new'
if (field === 'de' && isPending(key, 'de')) return 'cell-new' if (field === 'de' && isPending(key, 'de')) return 'cell-new'
@@ -324,6 +411,11 @@ function cellClass (key, field) {
return '' return ''
} }
function sourceTypeLabel (key) {
const val = String(rowDraft(key).source_type || 'dummy').toLowerCase()
return sourceTypeLabelMap[val] || val || '-'
}
function toggleSelected (key, checked) { function toggleSelected (key, checked) {
if (checked) { if (checked) {
if (!selectedKeys.value.includes(key)) { if (!selectedKeys.value.includes(key)) {
@@ -349,13 +441,33 @@ function queueAutoSave (key) {
autoSaveTimers.set(key, timer) autoSaveTimers.set(key, timer)
} }
async function fetchRowsChunk (append = false) {
const params = {
...buildFilters(),
limit: FETCH_LIMIT,
offset: append ? loadedOffset.value : 0
}
await store.fetchRows(params, { append })
const incomingCount = Number(store.count) || 0
if (append) {
loadedOffset.value += incomingCount
} else {
loadedOffset.value = incomingCount
}
hasMoreRows.value = incomingCount === FETCH_LIMIT
snapshotDrafts({ preserveDirty: append })
}
async function loadRows () { async function loadRows () {
try { try {
await store.fetchRows(buildFilters()) loadedOffset.value = 0
snapshotDrafts() hasMoreRows.value = true
await fetchRowsChunk(false)
} catch (err) { } catch (err) {
console.error('[translation-sync][ui] loadRows:error', { console.error('[translation-sync][ui] loadRows:error', {
message: err?.message || 'Ceviri satirlari yuklenemedi' message: err?.message || 'Çeviri satırları yüklenemedi'
}) })
$q.notify({ $q.notify({
type: 'negative', type: 'negative',
@@ -364,6 +476,42 @@ async function loadRows () {
} }
} }
async function loadMoreRows () {
if (!hasMoreRows.value || loadingMore.value || store.loading || store.saving) return
loadingMore.value = true
try {
await fetchRowsChunk(true)
} finally {
loadingMore.value = false
}
}
async function ensureEnoughVisibleRows (minRows = 120, maxBatches = 4) {
let guard = 0
while (hasMoreRows.value && pivotRows.value.length < minRows && guard < maxBatches) {
await loadMoreRows()
guard++
}
}
function onVirtualScroll (details) {
const to = Number(details?.to || 0)
if (!Number.isFinite(to)) return
if (to >= pivotRows.value.length - 15) {
void loadMoreRows()
}
}
function scheduleFilterReload () {
if (filterReloadTimer) {
clearTimeout(filterReloadTimer)
}
filterReloadTimer = setTimeout(() => {
filterReloadTimer = null
void loadRows()
}, 350)
}
async function ensureMissingLangRows (key, draft, langs) { async function ensureMissingLangRows (key, draft, langs) {
const missingLangs = [] const missingLangs = []
if (!langs.en && String(draft.en || '').trim() !== '') missingLangs.push('en') if (!langs.en && String(draft.en || '').trim() !== '') missingLangs.push('en')
@@ -513,7 +661,7 @@ async function translateSelectedRows () {
try { try {
const keys = Array.from(new Set(selectedKeys.value.filter(Boolean))) const keys = Array.from(new Set(selectedKeys.value.filter(Boolean)))
if (keys.length === 0) { if (keys.length === 0) {
$q.notify({ type: 'warning', message: 'Çevrilecek seçim bulunamadı' }) $q.notify({ type: 'warning', message: 'Çevrilecek seçim bulunamadı' })
return return
} }
@@ -529,10 +677,10 @@ async function translateSelectedRows () {
await loadRows() await loadRows()
$q.notify({ $q.notify({
type: 'positive', type: 'positive',
message: `Seçilenler çevrildi: ${translated}${traceId ? ` | Trace: ${traceId}` : ''}` message: `Seçilenler çevrildi: ${translated}${traceId ? ` | Trace: ${traceId}` : ''}`
}) })
} catch (err) { } catch (err) {
$q.notify({ type: 'negative', message: err?.message || 'Seçili çeviri iÅŸlemi baÅŸarısız' }) $q.notify({ type: 'negative', message: err?.message || 'Seçili çeviri işlemi başarısız' })
} }
} }
@@ -618,11 +766,81 @@ async function syncSources () {
} }
onMounted(() => { onMounted(() => {
loadRows() void loadRows()
}) })
onBeforeUnmount(() => {
if (filterReloadTimer) {
clearTimeout(filterReloadTimer)
filterReloadTimer = null
}
})
watch(
() => filters.value.q,
() => { scheduleFilterReload() }
)
watch(
[() => tablePagination.value.sortBy, () => tablePagination.value.descending],
() => { void ensureEnoughVisibleRows(120, 4) }
)
</script> </script>
<style scoped> <style scoped>
.translation-page {
height: calc(100vh - 120px);
display: flex;
flex-direction: column;
overflow: hidden;
}
.translation-toolbar {
background: #fff;
padding-top: 6px;
}
.sticky-toolbar {
position: sticky;
top: 0;
z-index: 35;
}
.translation-table {
flex: 1;
min-height: 0;
}
.translation-table :deep(.q-table__middle) {
max-height: calc(100vh - 280px);
overflow: auto;
}
.translation-table :deep(.q-table thead tr th) {
position: sticky;
top: 0;
z-index: 30;
background: #fff;
}
.translation-table :deep(.q-table tbody td) {
vertical-align: top;
padding: 6px;
}
.translation-table :deep(.q-field__native) {
line-height: 1.35;
word-break: break-word;
}
.source-text-label {
white-space: pre-wrap;
word-break: break-word;
line-height: 1.4;
max-height: 11.2em;
overflow: auto;
}
.cell-dirty { .cell-dirty {
background: #fff3cd; background: #fff3cd;
} }
@@ -631,3 +849,4 @@ onMounted(() => {
background: #d9f7e8; background: #d9f7e8;
} }
</style> </style>

View File

@@ -6,13 +6,39 @@ function toText (value) {
} }
function toNumber (value) { function toNumber (value) {
const n = Number(value) const n = parseFlexibleNumber(value)
return Number.isFinite(n) ? Number(n.toFixed(2)) : 0 return Number.isFinite(n) ? Number(n.toFixed(2)) : 0
} }
function mapRow (raw, index) { function parseFlexibleNumber (value) {
if (typeof value === 'number') return value
const text = String(value ?? '').trim().replace(/\s/g, '')
if (!text) return 0
const lastComma = text.lastIndexOf(',')
const lastDot = text.lastIndexOf('.')
let normalized = text
if (lastComma >= 0 && lastDot >= 0) {
// Keep the last separator as decimal, remove the other as thousand.
if (lastComma > lastDot) {
normalized = text.replace(/\./g, '').replace(',', '.')
} else {
normalized = text.replace(/,/g, '')
}
} else if (lastComma >= 0) {
normalized = text.replace(/\./g, '').replace(',', '.')
} else {
normalized = text.replace(/,/g, '')
}
const n = Number(normalized)
return Number.isFinite(n) ? n : 0
}
function mapRow (raw, index, baseIndex = 0) {
return { return {
id: index + 1, id: baseIndex + index + 1,
productCode: toText(raw?.ProductCode), productCode: toText(raw?.ProductCode),
stockQty: toNumber(raw?.StockQty), stockQty: toNumber(raw?.StockQty),
stockEntryDate: toText(raw?.StockEntryDate), stockEntryDate: toText(raw?.StockEntryDate),
@@ -55,34 +81,75 @@ export const useProductPricingStore = defineStore('product-pricing-store', {
state: () => ({ state: () => ({
rows: [], rows: [],
loading: false, loading: false,
error: '' error: '',
hasMore: true
}), }),
actions: { actions: {
async fetchRows () { async fetchRows (options = {}) {
this.loading = true this.loading = true
this.error = '' this.error = ''
const limit = Number(options?.limit) > 0 ? Number(options.limit) : 500
const afterProductCode = toText(options?.afterProductCode)
const append = Boolean(options?.append)
const baseIndex = append ? this.rows.length : 0
const startedAt = Date.now() const startedAt = Date.now()
console.info('[product-pricing][frontend] request:start', { console.info('[product-pricing][frontend] request:start', {
at: new Date(startedAt).toISOString(), at: new Date(startedAt).toISOString(),
timeout_ms: 600000 timeout_ms: 180000,
limit,
after_product_code: afterProductCode || null,
append
}) })
try { try {
const params = { limit }
if (afterProductCode) params.after_product_code = afterProductCode
const res = await api.request({ const res = await api.request({
method: 'GET', method: 'GET',
url: '/pricing/products', url: '/pricing/products',
timeout: 600000 params,
timeout: 180000
}) })
const traceId = res?.headers?.['x-trace-id'] || null const traceId = res?.headers?.['x-trace-id'] || null
const hasMoreHeader = String(res?.headers?.['x-has-more'] || '').toLowerCase()
const nextCursorHeader = toText(res?.headers?.['x-next-cursor'])
const data = Array.isArray(res?.data) ? res.data : [] const data = Array.isArray(res?.data) ? res.data : []
this.rows = data.map((x, i) => mapRow(x, i)) const mapped = data.map((x, i) => mapRow(x, i, baseIndex))
const fallbackNextCursor = mapped.length > 0
? toText(mapped[mapped.length - 1]?.productCode)
: ''
const nextCursor = nextCursorHeader || fallbackNextCursor
if (append) {
const merged = [...this.rows]
const seen = new Set(this.rows.map((x) => x?.productCode))
for (const row of mapped) {
const key = row?.productCode
if (key && seen.has(key)) continue
merged.push(row)
if (key) seen.add(key)
}
this.rows = merged
} else {
this.rows = mapped
}
this.hasMore = hasMoreHeader ? hasMoreHeader === 'true' : mapped.length === limit
console.info('[product-pricing][frontend] request:success', { console.info('[product-pricing][frontend] request:success', {
trace_id: traceId, trace_id: traceId,
duration_ms: Date.now() - startedAt, duration_ms: Date.now() - startedAt,
row_count: this.rows.length row_count: this.rows.length,
fetched_count: mapped.length,
has_more: this.hasMore,
next_cursor: nextCursor || null
}) })
return {
traceId,
fetched: mapped.length,
hasMore: this.hasMore,
nextCursor
}
} catch (err) { } catch (err) {
this.rows = [] if (!append) this.rows = []
this.hasMore = false
const msg = err?.response?.data || err?.message || 'Urun fiyatlandirma listesi alinamadi' const msg = err?.response?.data || err?.message || 'Urun fiyatlandirma listesi alinamadi'
this.error = toText(msg) this.error = toText(msg)
console.error('[product-pricing][frontend] request:error', { console.error('[product-pricing][frontend] request:error', {
@@ -92,6 +159,7 @@ export const useProductPricingStore = defineStore('product-pricing-store', {
status: err?.response?.status || null, status: err?.response?.status || null,
message: this.error message: this.error
}) })
throw err
} finally { } finally {
this.loading = false this.loading = false
} }
@@ -99,7 +167,7 @@ export const useProductPricingStore = defineStore('product-pricing-store', {
updateCell (row, field, val) { updateCell (row, field, val) {
if (!row || !field) return if (!row || !field) return
row[field] = toNumber(String(val ?? '').replace(',', '.')) row[field] = toNumber(val)
}, },
updateBrandGroupSelection (row, val) { updateBrandGroupSelection (row, val) {

View File

@@ -10,12 +10,27 @@ export const useTranslationStore = defineStore('translation', {
}), }),
actions: { actions: {
async fetchRows (filters = {}) { async fetchRows (filters = {}, options = {}) {
this.loading = true this.loading = true
const append = Boolean(options?.append)
try { try {
const res = await api.get('/language/translations', { params: filters }) const res = await api.get('/language/translations', { params: filters })
const payload = res?.data || {} const payload = res?.data || {}
this.rows = Array.isArray(payload.rows) ? payload.rows : [] const incoming = Array.isArray(payload.rows) ? payload.rows : []
if (append) {
const merged = [...this.rows]
const seen = new Set(this.rows.map((x) => x?.id))
for (const row of incoming) {
const id = row?.id
if (!seen.has(id)) {
merged.push(row)
seen.add(id)
}
}
this.rows = merged
} else {
this.rows = incoming
}
this.count = Number(payload.count) || this.rows.length this.count = Number(payload.count) || this.rows.length
} finally { } finally {
this.loading = false this.loading = false