Merge remote-tracking branch 'origin/master'
This commit is contained in:
@@ -3,24 +3,94 @@ package routes
|
||||
import (
|
||||
"bssapp-backend/auth"
|
||||
"bssapp-backend/queries"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"log"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// GET /api/pricing/products
|
||||
func GetProductPricingListHandler(w http.ResponseWriter, r *http.Request) {
|
||||
started := time.Now()
|
||||
traceID := buildPricingTraceID(r)
|
||||
w.Header().Set("X-Trace-ID", traceID)
|
||||
claims, ok := auth.GetClaimsFromContext(r.Context())
|
||||
if !ok || claims == nil {
|
||||
log.Printf("[ProductPricing] trace=%s unauthorized method=%s path=%s", traceID, r.Method, r.URL.Path)
|
||||
http.Error(w, "unauthorized", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
log.Printf("[ProductPricing] trace=%s start user=%s id=%d", traceID, claims.Username, claims.ID)
|
||||
|
||||
rows, err := queries.GetProductPricingList()
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 180*time.Second)
|
||||
defer cancel()
|
||||
|
||||
rows, err := queries.GetProductPricingList(ctx)
|
||||
if err != nil {
|
||||
if isPricingTimeoutLike(err, ctx.Err()) {
|
||||
log.Printf(
|
||||
"[ProductPricing] trace=%s timeout user=%s id=%d duration_ms=%d err=%v",
|
||||
traceID,
|
||||
claims.Username,
|
||||
claims.ID,
|
||||
time.Since(started).Milliseconds(),
|
||||
err,
|
||||
)
|
||||
http.Error(w, "Urun fiyatlandirma listesi zaman asimina ugradi", http.StatusGatewayTimeout)
|
||||
return
|
||||
}
|
||||
log.Printf(
|
||||
"[ProductPricing] trace=%s query_error user=%s id=%d duration_ms=%d err=%v",
|
||||
traceID,
|
||||
claims.Username,
|
||||
claims.ID,
|
||||
time.Since(started).Milliseconds(),
|
||||
err,
|
||||
)
|
||||
http.Error(w, "Urun fiyatlandirma listesi alinamadi: "+err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
log.Printf(
|
||||
"[ProductPricing] trace=%s success user=%s id=%d count=%d duration_ms=%d",
|
||||
traceID,
|
||||
claims.Username,
|
||||
claims.ID,
|
||||
len(rows),
|
||||
time.Since(started).Milliseconds(),
|
||||
)
|
||||
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
_ = json.NewEncoder(w).Encode(rows)
|
||||
}
|
||||
|
||||
func buildPricingTraceID(r *http.Request) string {
|
||||
if r != nil {
|
||||
if id := strings.TrimSpace(r.Header.Get("X-Request-ID")); id != "" {
|
||||
return id
|
||||
}
|
||||
if id := strings.TrimSpace(r.Header.Get("X-Correlation-ID")); id != "" {
|
||||
return id
|
||||
}
|
||||
}
|
||||
return "pricing-" + strconv.FormatInt(time.Now().UnixNano(), 36)
|
||||
}
|
||||
|
||||
func isPricingTimeoutLike(err error, ctxErr error) bool {
|
||||
if errors.Is(err, context.DeadlineExceeded) || errors.Is(ctxErr, context.DeadlineExceeded) {
|
||||
return true
|
||||
}
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
e := strings.ToLower(err.Error())
|
||||
return strings.Contains(e, "timeout") ||
|
||||
strings.Contains(e, "i/o timeout") ||
|
||||
strings.Contains(e, "wsarecv") ||
|
||||
strings.Contains(e, "connection attempt failed") ||
|
||||
strings.Contains(e, "no connection could be made") ||
|
||||
strings.Contains(e, "failed to respond")
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package routes
|
||||
|
||||
import (
|
||||
"bssapp-backend/auth"
|
||||
"bssapp-backend/internal/i18n"
|
||||
"bssapp-backend/models"
|
||||
"bssapp-backend/queries"
|
||||
"encoding/json"
|
||||
@@ -22,7 +23,7 @@ func GetStatementHeadersHandler(w http.ResponseWriter, r *http.Request) {
|
||||
StartDate: r.URL.Query().Get("startdate"),
|
||||
EndDate: r.URL.Query().Get("enddate"),
|
||||
AccountCode: r.URL.Query().Get("accountcode"),
|
||||
LangCode: r.URL.Query().Get("langcode"),
|
||||
LangCode: i18n.ResolveLangCode(r.URL.Query().Get("langcode"), r.Header.Get("Accept-Language")),
|
||||
Parislemler: r.URL.Query()["parislemler"],
|
||||
ExcludeOpening: false,
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package routes
|
||||
|
||||
import (
|
||||
"bssapp-backend/auth"
|
||||
"bssapp-backend/internal/i18n"
|
||||
"bssapp-backend/models"
|
||||
"bssapp-backend/queries"
|
||||
"bytes"
|
||||
@@ -40,9 +41,18 @@ const (
|
||||
)
|
||||
|
||||
// Kolonlar
|
||||
var hMainCols = []string{
|
||||
"Belge No", "Tarih", "Vade", "İşlem",
|
||||
"Açıklama", "Para", "Borç", "Alacak", "Bakiye",
|
||||
func hMainCols(lang string) []string {
|
||||
return []string{
|
||||
i18n.T(lang, "pdf.main.doc_no"),
|
||||
i18n.T(lang, "pdf.main.date"),
|
||||
i18n.T(lang, "pdf.main.due_date"),
|
||||
i18n.T(lang, "pdf.main.operation"),
|
||||
i18n.T(lang, "pdf.main.description"),
|
||||
i18n.T(lang, "pdf.main.currency"),
|
||||
i18n.T(lang, "pdf.main.debit"),
|
||||
i18n.T(lang, "pdf.main.credit"),
|
||||
i18n.T(lang, "pdf.main.balance"),
|
||||
}
|
||||
}
|
||||
|
||||
var hMainWbase = []float64{
|
||||
@@ -136,7 +146,7 @@ func hCalcRowHeightForText(pdf *gofpdf.Fpdf, text string, colWidth, lineHeight,
|
||||
|
||||
/* ============================ HEADER ============================ */
|
||||
|
||||
func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) float64 {
|
||||
func hDrawPageHeader(pdf *gofpdf.Fpdf, lang, cariKod, cariIsim, start, end string) float64 {
|
||||
if logoPath, err := resolvePdfImagePath("Baggi-Tekstil-A.s-Logolu.jpeg"); err == nil {
|
||||
pdf.ImageOptions(logoPath, hMarginL, 2, hLogoW, 0, false, gofpdf.ImageOptions{}, 0, "")
|
||||
}
|
||||
@@ -149,13 +159,13 @@ func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) flo
|
||||
|
||||
pdf.SetFont(hFontFamilyBold, "", 12)
|
||||
pdf.SetXY(hMarginL+hLogoW+8, hMarginT+10)
|
||||
pdf.CellFormat(120, 6, "Cari Hesap Raporu", "", 0, "L", false, 0, "")
|
||||
pdf.CellFormat(120, 6, i18n.T(lang, "pdf.report_title"), "", 0, "L", false, 0, "")
|
||||
|
||||
// Bugünün tarihi (sağ üst)
|
||||
today := time.Now().Format("02.01.2006")
|
||||
pdf.SetFont(hFontFamilyReg, "", 9)
|
||||
pdf.SetXY(hPageWidth-hMarginR-40, hMarginT+3)
|
||||
pdf.CellFormat(40, 6, "Tarih: "+today, "", 0, "R", false, 0, "")
|
||||
pdf.CellFormat(40, 6, i18n.T(lang, "pdf.date")+": "+today, "", 0, "R", false, 0, "")
|
||||
|
||||
// Cari & Tarih kutuları (daha yukarı taşındı)
|
||||
boxY := hMarginT + hLogoW - 6
|
||||
@@ -163,11 +173,11 @@ func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) flo
|
||||
|
||||
pdf.Rect(hMarginL, boxY, 140, 11, "")
|
||||
pdf.SetXY(hMarginL+2, boxY+3)
|
||||
pdf.CellFormat(136, 5, fmt.Sprintf("Cari: %s — %s", cariKod, cariIsim), "", 0, "L", false, 0, "")
|
||||
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s — %s", i18n.T(lang, "pdf.customer"), cariKod, cariIsim), "", 0, "L", false, 0, "")
|
||||
|
||||
pdf.Rect(hPageWidth-hMarginR-140, boxY, 140, 11, "")
|
||||
pdf.SetXY(hPageWidth-hMarginR-138, boxY+3)
|
||||
pdf.CellFormat(136, 5, fmt.Sprintf("Tarih Aralığı: %s → %s", start, end), "", 0, "R", false, 0, "")
|
||||
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s → %s", i18n.T(lang, "pdf.date_range"), start, end), "", 0, "R", false, 0, "")
|
||||
|
||||
// Alt çizgi
|
||||
y := boxY + 13
|
||||
@@ -180,7 +190,7 @@ func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) flo
|
||||
|
||||
/* ============================ TABLO ============================ */
|
||||
|
||||
func hDrawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
|
||||
func hDrawGroupBar(pdf *gofpdf.Fpdf, lang, currency string, sonBakiye float64) {
|
||||
x := hMarginL
|
||||
y := pdf.GetY()
|
||||
w := hPageWidth - hMarginL - hMarginR
|
||||
@@ -194,9 +204,9 @@ func hDrawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
|
||||
pdf.SetTextColor(hColorPrimary[0], hColorPrimary[1], hColorPrimary[2])
|
||||
|
||||
pdf.SetXY(x+hCellPadX+1.0, y+(h-5.0)/2)
|
||||
pdf.CellFormat(w*0.6, 5.0, currency, "", 0, "L", false, 0, "")
|
||||
pdf.CellFormat(w*0.6, 5.0, fmt.Sprintf("%s: %s", i18n.T(lang, "pdf.currency_prefix"), currency), "", 0, "L", false, 0, "")
|
||||
|
||||
txt := "Son Bakiye = " + hFormatCurrencyTR(sonBakiye)
|
||||
txt := i18n.T(lang, "pdf.ending_balance") + " = " + hFormatCurrencyTR(sonBakiye)
|
||||
pdf.SetXY(x+w*0.4, y+(h-5.0)/2)
|
||||
pdf.CellFormat(w*0.6-2.0, 5.0, txt, "", 0, "R", false, 0, "")
|
||||
|
||||
@@ -282,6 +292,10 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
accountCode := r.URL.Query().Get("accountcode")
|
||||
startDate := r.URL.Query().Get("startdate")
|
||||
endDate := r.URL.Query().Get("enddate")
|
||||
langCode := i18n.ResolveLangCode(
|
||||
r.URL.Query().Get("langcode"),
|
||||
r.Header.Get("Accept-Language"),
|
||||
)
|
||||
rawParis := r.URL.Query()["parislemler"]
|
||||
|
||||
var parislemler []string
|
||||
@@ -292,7 +306,7 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
}
|
||||
}
|
||||
|
||||
headers, _, err := queries.GetStatementsHPDF(r.Context(), accountCode, startDate, endDate, parislemler)
|
||||
headers, _, err := queries.GetStatementsHPDF(r.Context(), accountCode, startDate, endDate, langCode, parislemler)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
@@ -348,7 +362,7 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
newPage := func() {
|
||||
pageNum++
|
||||
pdf.AddPage()
|
||||
tableTop := hDrawPageHeader(pdf, accountCode, cariIsim, startDate, endDate)
|
||||
tableTop := hDrawPageHeader(pdf, langCode, accountCode, cariIsim, startDate, endDate)
|
||||
pdf.SetY(tableTop)
|
||||
}
|
||||
|
||||
@@ -356,8 +370,8 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
|
||||
for _, cur := range order {
|
||||
g := groups[cur]
|
||||
hDrawGroupBar(pdf, cur, g.sonBakiye)
|
||||
hDrawMainHeaderRow(pdf, hMainCols, mainWn)
|
||||
hDrawGroupBar(pdf, langCode, cur, g.sonBakiye)
|
||||
hDrawMainHeaderRow(pdf, hMainCols(langCode), mainWn)
|
||||
|
||||
rowIndex := 0
|
||||
for _, h := range g.rows {
|
||||
@@ -372,8 +386,8 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
rh := hCalcRowHeightForText(pdf, row[4], mainWn[4], hLineHMain, hCellPadX)
|
||||
if hNeedNewPage(pdf, rh+hHeaderRowH) {
|
||||
newPage()
|
||||
hDrawGroupBar(pdf, cur, g.sonBakiye)
|
||||
hDrawMainHeaderRow(pdf, hMainCols, mainWn)
|
||||
hDrawGroupBar(pdf, langCode, cur, g.sonBakiye)
|
||||
hDrawMainHeaderRow(pdf, hMainCols(langCode), mainWn)
|
||||
}
|
||||
|
||||
hDrawMainDataRow(pdf, row, mainWn, rh, rowIndex)
|
||||
|
||||
@@ -3,6 +3,7 @@ package routes
|
||||
|
||||
import (
|
||||
"bssapp-backend/auth"
|
||||
"bssapp-backend/internal/i18n"
|
||||
"bssapp-backend/models"
|
||||
"bssapp-backend/queries"
|
||||
"bytes"
|
||||
@@ -48,10 +49,18 @@ const (
|
||||
logoW = 42.0
|
||||
)
|
||||
|
||||
// Ana tablo kolonları
|
||||
var mainCols = []string{
|
||||
"Belge No", "Tarih", "Vade", "İşlem",
|
||||
"Açıklama", "Para", "Borç", "Alacak", "Bakiye",
|
||||
func mainCols(lang string) []string {
|
||||
return []string{
|
||||
i18n.T(lang, "pdf.main.doc_no"),
|
||||
i18n.T(lang, "pdf.main.date"),
|
||||
i18n.T(lang, "pdf.main.due_date"),
|
||||
i18n.T(lang, "pdf.main.operation"),
|
||||
i18n.T(lang, "pdf.main.description"),
|
||||
i18n.T(lang, "pdf.main.currency"),
|
||||
i18n.T(lang, "pdf.main.debit"),
|
||||
i18n.T(lang, "pdf.main.credit"),
|
||||
i18n.T(lang, "pdf.main.balance"),
|
||||
}
|
||||
}
|
||||
|
||||
// Ana tablo kolon genişlikleri (ilk 3 geniş)
|
||||
@@ -68,10 +77,21 @@ var mainWbase = []float64{
|
||||
}
|
||||
|
||||
// Detay tablo kolonları ve genişlikleri
|
||||
var dCols = []string{
|
||||
"Ana Grup", "Alt Grup", "Garson", "Fit", "İçerik",
|
||||
"Ürün", "Renk", "Adet", "Fiyat", "Tutar",
|
||||
func detailCols(lang string) []string {
|
||||
return []string{
|
||||
i18n.T(lang, "pdf.detail.main_group"),
|
||||
i18n.T(lang, "pdf.detail.sub_group"),
|
||||
i18n.T(lang, "pdf.detail.waiter"),
|
||||
i18n.T(lang, "pdf.detail.fit"),
|
||||
i18n.T(lang, "pdf.detail.content"),
|
||||
i18n.T(lang, "pdf.detail.product"),
|
||||
i18n.T(lang, "pdf.detail.color"),
|
||||
i18n.T(lang, "pdf.detail.qty"),
|
||||
i18n.T(lang, "pdf.detail.price"),
|
||||
i18n.T(lang, "pdf.detail.total"),
|
||||
}
|
||||
}
|
||||
|
||||
var dWbase = []float64{
|
||||
30, 28, 22, 20, 56, 30, 22, 20, 20, 26}
|
||||
|
||||
@@ -224,7 +244,7 @@ func drawLabeledBox(pdf *gofpdf.Fpdf, x, y, w, h float64, label, value string, a
|
||||
}
|
||||
}
|
||||
|
||||
func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) float64 {
|
||||
func drawPageHeader(pdf *gofpdf.Fpdf, lang, cariKod, cariIsim, start, end string) float64 {
|
||||
if logoPath, err := resolvePdfImagePath("Baggi-Tekstil-A.s-Logolu.jpeg"); err == nil {
|
||||
pdf.ImageOptions(logoPath, hMarginL, 2, hLogoW, 0, false, gofpdf.ImageOptions{}, 0, "")
|
||||
}
|
||||
@@ -237,13 +257,13 @@ func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) floa
|
||||
|
||||
pdf.SetFont(hFontFamilyBold, "", 12)
|
||||
pdf.SetXY(hMarginL+hLogoW+8, hMarginT+10)
|
||||
pdf.CellFormat(120, 6, "Cari Hesap Raporu", "", 0, "L", false, 0, "")
|
||||
pdf.CellFormat(120, 6, i18n.T(lang, "pdf.report_title"), "", 0, "L", false, 0, "")
|
||||
|
||||
// Bugünün tarihi (sağ üst)
|
||||
today := time.Now().Format("02.01.2006")
|
||||
pdf.SetFont(hFontFamilyReg, "", 9)
|
||||
pdf.SetXY(hPageWidth-hMarginR-40, hMarginT+3)
|
||||
pdf.CellFormat(40, 6, "Tarih: "+today, "", 0, "R", false, 0, "")
|
||||
pdf.CellFormat(40, 6, i18n.T(lang, "pdf.date")+": "+today, "", 0, "R", false, 0, "")
|
||||
|
||||
// Cari & Tarih kutuları (daha yukarı taşındı)
|
||||
boxY := hMarginT + hLogoW - 6
|
||||
@@ -251,11 +271,11 @@ func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) floa
|
||||
|
||||
pdf.Rect(hMarginL, boxY, 140, 11, "")
|
||||
pdf.SetXY(hMarginL+2, boxY+3)
|
||||
pdf.CellFormat(136, 5, fmt.Sprintf("Cari: %s — %s", cariKod, cariIsim), "", 0, "L", false, 0, "")
|
||||
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s — %s", i18n.T(lang, "pdf.customer"), cariKod, cariIsim), "", 0, "L", false, 0, "")
|
||||
|
||||
pdf.Rect(hPageWidth-hMarginR-140, boxY, 140, 11, "")
|
||||
pdf.SetXY(hPageWidth-hMarginR-138, boxY+3)
|
||||
pdf.CellFormat(136, 5, fmt.Sprintf("Tarih Aralığı: %s → %s", start, end), "", 0, "R", false, 0, "")
|
||||
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s → %s", i18n.T(lang, "pdf.date_range"), start, end), "", 0, "R", false, 0, "")
|
||||
|
||||
// Alt çizgi
|
||||
y := boxY + 13
|
||||
@@ -268,7 +288,7 @@ func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) floa
|
||||
|
||||
/* ============================ GROUP BAR ============================ */
|
||||
|
||||
func drawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
|
||||
func drawGroupBar(pdf *gofpdf.Fpdf, lang, currency string, sonBakiye float64) {
|
||||
// Kutu alanı (tam genişlik)
|
||||
x := marginL
|
||||
y := pdf.GetY()
|
||||
@@ -285,9 +305,9 @@ func drawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
|
||||
pdf.SetTextColor(colorPrimary[0], colorPrimary[1], colorPrimary[2])
|
||||
|
||||
pdf.SetXY(x+cellPadX+1.0, y+(h-5.0)/2)
|
||||
pdf.CellFormat(w*0.6, 5.0, fmt.Sprintf("%s", currency), "", 0, "L", false, 0, "")
|
||||
pdf.CellFormat(w*0.6, 5.0, fmt.Sprintf("%s: %s", i18n.T(lang, "pdf.currency_prefix"), currency), "", 0, "L", false, 0, "")
|
||||
|
||||
txt := "Son Bakiye = " + formatCurrencyTR(sonBakiye)
|
||||
txt := i18n.T(lang, "pdf.ending_balance") + " = " + formatCurrencyTR(sonBakiye)
|
||||
pdf.SetXY(x+w*0.4, y+(h-5.0)/2)
|
||||
pdf.CellFormat(w*0.6-2.0, 5.0, txt, "", 0, "R", false, 0, "")
|
||||
|
||||
@@ -430,6 +450,10 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
accountCode := r.URL.Query().Get("accountcode")
|
||||
startDate := r.URL.Query().Get("startdate")
|
||||
endDate := r.URL.Query().Get("enddate")
|
||||
langCode := i18n.ResolveLangCode(
|
||||
r.URL.Query().Get("langcode"),
|
||||
r.Header.Get("Accept-Language"),
|
||||
)
|
||||
|
||||
// parislemler sanitize
|
||||
rawParis := r.URL.Query()["parislemler"]
|
||||
@@ -445,7 +469,7 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
accountCode, startDate, endDate, parislemler)
|
||||
|
||||
// 1) Header verileri
|
||||
headers, belgeNos, err := queries.GetStatementsPDF(r.Context(), accountCode, startDate, endDate, parislemler)
|
||||
headers, belgeNos, err := queries.GetStatementsPDF(r.Context(), accountCode, startDate, endDate, langCode, parislemler)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
@@ -520,12 +544,12 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
pdf.AddPage()
|
||||
|
||||
// drawPageHeader tablo başlangıç yüksekliğini döndürüyor
|
||||
tableTop := drawPageHeader(pdf, accountCode, cariIsim, startDate, endDate)
|
||||
tableTop := drawPageHeader(pdf, langCode, accountCode, cariIsim, startDate, endDate)
|
||||
|
||||
// Sayfa numarası
|
||||
pdf.SetFont(fontFamilyReg, "", 6)
|
||||
pdf.SetXY(pageWidth-marginR-28, pageHeight-marginB+3)
|
||||
pdf.CellFormat(28, 5, fmt.Sprintf("Sayfa %d", pageNum), "", 0, "R", false, 0, "")
|
||||
pdf.CellFormat(28, 5, fmt.Sprintf("%s %d", i18n.T(langCode, "pdf.page"), pageNum), "", 0, "R", false, 0, "")
|
||||
|
||||
// Tablo Y konumunu ayarla
|
||||
pdf.SetY(tableTop)
|
||||
@@ -540,8 +564,8 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
if needNewPage(pdf, groupBarH+headerRowH) {
|
||||
newPage()
|
||||
}
|
||||
drawGroupBar(pdf, cur, g.sonBakiye)
|
||||
drawMainHeaderRow(pdf, mainCols, mainWn)
|
||||
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
|
||||
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
|
||||
|
||||
for _, h := range g.rows {
|
||||
row := []string{
|
||||
@@ -557,8 +581,8 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
|
||||
if needNewPage(pdf, rh+headerRowH) {
|
||||
newPage()
|
||||
drawGroupBar(pdf, cur, g.sonBakiye)
|
||||
drawMainHeaderRow(pdf, mainCols, mainWn)
|
||||
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
|
||||
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
|
||||
}
|
||||
drawMainDataRow(pdf, row, mainWn, rh)
|
||||
|
||||
@@ -567,10 +591,10 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
if len(details) > 0 {
|
||||
if needNewPage(pdf, subHeaderRowH) {
|
||||
newPage()
|
||||
drawGroupBar(pdf, cur, g.sonBakiye)
|
||||
drawMainHeaderRow(pdf, mainCols, mainWn)
|
||||
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
|
||||
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
|
||||
}
|
||||
drawDetailHeaderRow(pdf, dCols, dWn)
|
||||
drawDetailHeaderRow(pdf, detailCols(langCode), dWn)
|
||||
|
||||
for i, d := range details {
|
||||
drow := []string{
|
||||
@@ -591,9 +615,9 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
|
||||
|
||||
if needNewPage(pdf, rh2) {
|
||||
newPage()
|
||||
drawGroupBar(pdf, cur, g.sonBakiye)
|
||||
drawMainHeaderRow(pdf, mainCols, mainWn)
|
||||
drawDetailHeaderRow(pdf, dCols, dWn)
|
||||
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
|
||||
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
|
||||
drawDetailHeaderRow(pdf, detailCols(langCode), dWn)
|
||||
}
|
||||
// zebra: çift indekslerde açık zemin
|
||||
fill := (i%2 == 0)
|
||||
|
||||
1669
svc/routes/translations.go
Normal file
1669
svc/routes/translations.go
Normal file
@@ -0,0 +1,1669 @@
|
||||
package routes
|
||||
|
||||
import (
|
||||
"bssapp-backend/models"
|
||||
"bytes"
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/lib/pq"
|
||||
)
|
||||
|
||||
var translationLangSet = map[string]struct{}{
|
||||
"tr": {},
|
||||
"en": {},
|
||||
"de": {},
|
||||
"it": {},
|
||||
"es": {},
|
||||
"ru": {},
|
||||
"ar": {},
|
||||
}
|
||||
|
||||
var translationStatusSet = map[string]struct{}{
|
||||
"pending": {},
|
||||
"approved": {},
|
||||
"rejected": {},
|
||||
}
|
||||
|
||||
var translationSourceTypeSet = map[string]struct{}{
|
||||
"dummy": {},
|
||||
"postgre": {},
|
||||
"mssql": {},
|
||||
}
|
||||
|
||||
var (
|
||||
reQuotedText = regexp.MustCompile(`['"]([^'"]{3,120})['"]`)
|
||||
reHasLetter = regexp.MustCompile(`[A-Za-zÇĞİÖŞÜçğıöşü]`)
|
||||
reBadText = regexp.MustCompile(`^(GET|POST|PUT|DELETE|OPTIONS|true|false|null|undefined)$`)
|
||||
reKeyUnsafe = regexp.MustCompile(`[^a-z0-9_]+`)
|
||||
)
|
||||
|
||||
type TranslationUpdatePayload struct {
|
||||
SourceTextTR *string `json:"source_text_tr"`
|
||||
TranslatedText *string `json:"translated_text"`
|
||||
SourceType *string `json:"source_type"`
|
||||
IsManual *bool `json:"is_manual"`
|
||||
Status *string `json:"status"`
|
||||
}
|
||||
|
||||
type UpsertMissingPayload struct {
|
||||
Items []UpsertMissingItem `json:"items"`
|
||||
Languages []string `json:"languages"`
|
||||
}
|
||||
|
||||
type UpsertMissingItem struct {
|
||||
TKey string `json:"t_key"`
|
||||
SourceTextTR string `json:"source_text_tr"`
|
||||
}
|
||||
|
||||
type SyncSourcesPayload struct {
|
||||
AutoTranslate bool `json:"auto_translate"`
|
||||
Languages []string `json:"languages"`
|
||||
Limit int `json:"limit"`
|
||||
OnlyNew *bool `json:"only_new"`
|
||||
}
|
||||
|
||||
type BulkApprovePayload struct {
|
||||
IDs []int64 `json:"ids"`
|
||||
}
|
||||
|
||||
type BulkUpdatePayload struct {
|
||||
Items []BulkUpdateItem `json:"items"`
|
||||
}
|
||||
|
||||
type TranslateSelectedPayload struct {
|
||||
TKeys []string `json:"t_keys"`
|
||||
Languages []string `json:"languages"`
|
||||
Limit int `json:"limit"`
|
||||
}
|
||||
|
||||
type BulkUpdateItem struct {
|
||||
ID int64 `json:"id"`
|
||||
SourceTextTR *string `json:"source_text_tr"`
|
||||
TranslatedText *string `json:"translated_text"`
|
||||
SourceType *string `json:"source_type"`
|
||||
IsManual *bool `json:"is_manual"`
|
||||
Status *string `json:"status"`
|
||||
}
|
||||
|
||||
type TranslationSyncOptions struct {
|
||||
AutoTranslate bool
|
||||
Languages []string
|
||||
Limit int
|
||||
OnlyNew bool
|
||||
TraceID string
|
||||
}
|
||||
|
||||
type TranslationSyncResult struct {
|
||||
SeedCount int `json:"seed_count"`
|
||||
AffectedCount int `json:"affected_count"`
|
||||
AutoTranslated int `json:"auto_translated"`
|
||||
TargetLangs []string `json:"target_languages"`
|
||||
TraceID string `json:"trace_id"`
|
||||
DurationMS int64 `json:"duration_ms"`
|
||||
}
|
||||
|
||||
type sourceSeed struct {
|
||||
TKey string
|
||||
SourceText string
|
||||
SourceType string
|
||||
}
|
||||
|
||||
func GetTranslationRowsHandler(db *sql.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
|
||||
q := strings.TrimSpace(r.URL.Query().Get("q"))
|
||||
lang := normalizeTranslationLang(r.URL.Query().Get("lang"))
|
||||
status := normalizeTranslationStatus(r.URL.Query().Get("status"))
|
||||
sourceType := normalizeTranslationSourceType(r.URL.Query().Get("source_type"))
|
||||
|
||||
manualFilter := strings.TrimSpace(strings.ToLower(r.URL.Query().Get("manual")))
|
||||
missingOnly := strings.TrimSpace(strings.ToLower(r.URL.Query().Get("missing"))) == "true"
|
||||
|
||||
limit := 0
|
||||
if raw := strings.TrimSpace(r.URL.Query().Get("limit")); raw != "" {
|
||||
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 && parsed <= 50000 {
|
||||
limit = parsed
|
||||
}
|
||||
}
|
||||
|
||||
clauses := []string{"1=1"}
|
||||
args := make([]any, 0, 8)
|
||||
argIndex := 1
|
||||
|
||||
if q != "" {
|
||||
clauses = append(clauses, fmt.Sprintf("(source_text_tr ILIKE $%d OR translated_text ILIKE $%d)", argIndex, argIndex))
|
||||
args = append(args, "%"+q+"%")
|
||||
argIndex++
|
||||
}
|
||||
|
||||
if lang != "" {
|
||||
clauses = append(clauses, fmt.Sprintf("lang_code = $%d", argIndex))
|
||||
args = append(args, lang)
|
||||
argIndex++
|
||||
}
|
||||
|
||||
if status != "" {
|
||||
clauses = append(clauses, fmt.Sprintf("status = $%d", argIndex))
|
||||
args = append(args, status)
|
||||
argIndex++
|
||||
}
|
||||
|
||||
if sourceType != "" {
|
||||
clauses = append(clauses, fmt.Sprintf("COALESCE(NULLIF(provider_meta->>'source_type',''),'dummy') = $%d", argIndex))
|
||||
args = append(args, sourceType)
|
||||
argIndex++
|
||||
}
|
||||
|
||||
switch manualFilter {
|
||||
case "true":
|
||||
clauses = append(clauses, "is_manual = true")
|
||||
case "false":
|
||||
clauses = append(clauses, "is_manual = false")
|
||||
}
|
||||
|
||||
if missingOnly {
|
||||
clauses = append(clauses, "(translated_text IS NULL OR btrim(translated_text) = '')")
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
id,
|
||||
t_key,
|
||||
lang_code,
|
||||
COALESCE(NULLIF(provider_meta->>'source_type',''), 'dummy') AS source_type,
|
||||
source_text_tr,
|
||||
COALESCE(translated_text, '') AS translated_text,
|
||||
is_manual,
|
||||
status,
|
||||
COALESCE(provider, '') AS provider,
|
||||
updated_at
|
||||
FROM mk_translator
|
||||
WHERE %s
|
||||
ORDER BY t_key, lang_code
|
||||
`, strings.Join(clauses, " AND "))
|
||||
if limit > 0 {
|
||||
query += fmt.Sprintf("LIMIT $%d", argIndex)
|
||||
args = append(args, limit)
|
||||
}
|
||||
|
||||
rows, err := db.Query(query, args...)
|
||||
if err != nil {
|
||||
http.Error(w, "translation query error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
list := make([]models.TranslatorRow, 0, 1024)
|
||||
for rows.Next() {
|
||||
var row models.TranslatorRow
|
||||
if err := rows.Scan(
|
||||
&row.ID,
|
||||
&row.TKey,
|
||||
&row.LangCode,
|
||||
&row.SourceType,
|
||||
&row.SourceTextTR,
|
||||
&row.TranslatedText,
|
||||
&row.IsManual,
|
||||
&row.Status,
|
||||
&row.Provider,
|
||||
&row.UpdatedAt,
|
||||
); err != nil {
|
||||
http.Error(w, "translation scan error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
list = append(list, row)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
http.Error(w, "translation rows error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||
"rows": list,
|
||||
"count": len(list),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func UpdateTranslationRowHandler(db *sql.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
|
||||
id, err := strconv.ParseInt(strings.TrimSpace(mux.Vars(r)["id"]), 10, 64)
|
||||
if err != nil || id <= 0 {
|
||||
http.Error(w, "invalid row id", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
var payload TranslationUpdatePayload
|
||||
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
|
||||
http.Error(w, "invalid payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
if payload.Status != nil {
|
||||
normalized := normalizeTranslationStatus(*payload.Status)
|
||||
if normalized == "" {
|
||||
http.Error(w, "invalid status", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
payload.Status = &normalized
|
||||
}
|
||||
if payload.SourceType != nil {
|
||||
normalized := normalizeTranslationSourceType(*payload.SourceType)
|
||||
if normalized == "" {
|
||||
http.Error(w, "invalid source_type", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
payload.SourceType = &normalized
|
||||
}
|
||||
|
||||
updateQuery := `
|
||||
UPDATE mk_translator
|
||||
SET
|
||||
source_text_tr = COALESCE($2, source_text_tr),
|
||||
translated_text = COALESCE($3, translated_text),
|
||||
is_manual = COALESCE($4, is_manual),
|
||||
status = COALESCE($5, status),
|
||||
provider_meta = CASE
|
||||
WHEN $6::text IS NULL THEN provider_meta
|
||||
ELSE jsonb_set(COALESCE(provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($6::text), true)
|
||||
END,
|
||||
updated_at = NOW()
|
||||
WHERE id = $1
|
||||
RETURNING
|
||||
id,
|
||||
t_key,
|
||||
lang_code,
|
||||
COALESCE(NULLIF(provider_meta->>'source_type',''), 'dummy') AS source_type,
|
||||
source_text_tr,
|
||||
COALESCE(translated_text, '') AS translated_text,
|
||||
is_manual,
|
||||
status,
|
||||
COALESCE(provider, '') AS provider,
|
||||
updated_at
|
||||
`
|
||||
|
||||
var row models.TranslatorRow
|
||||
err = db.QueryRow(
|
||||
updateQuery,
|
||||
id,
|
||||
nullableString(payload.SourceTextTR),
|
||||
nullableString(payload.TranslatedText),
|
||||
payload.IsManual,
|
||||
payload.Status,
|
||||
nullableString(payload.SourceType),
|
||||
).Scan(
|
||||
&row.ID,
|
||||
&row.TKey,
|
||||
&row.LangCode,
|
||||
&row.SourceType,
|
||||
&row.SourceTextTR,
|
||||
&row.TranslatedText,
|
||||
&row.IsManual,
|
||||
&row.Status,
|
||||
&row.Provider,
|
||||
&row.UpdatedAt,
|
||||
)
|
||||
if err == sql.ErrNoRows {
|
||||
http.Error(w, "translation row not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
http.Error(w, "translation update error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
_ = json.NewEncoder(w).Encode(row)
|
||||
}
|
||||
}
|
||||
|
||||
func UpsertMissingTranslationsHandler(db *sql.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
|
||||
var payload UpsertMissingPayload
|
||||
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
|
||||
http.Error(w, "invalid payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
items := normalizeMissingItems(payload.Items)
|
||||
if len(items) == 0 {
|
||||
http.Error(w, "items required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
languages := normalizeTargetLanguages(payload.Languages)
|
||||
affected, err := upsertMissingRows(db, items, languages, "dummy")
|
||||
if err != nil {
|
||||
http.Error(w, "upsert missing error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||
"success": true,
|
||||
"items": len(items),
|
||||
"target_langs": languages,
|
||||
"affected_count": affected,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func SyncTranslationSourcesHandler(pgDB *sql.DB, mssqlDB *sql.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
|
||||
var payload SyncSourcesPayload
|
||||
_ = json.NewDecoder(r.Body).Decode(&payload)
|
||||
traceID := requestTraceID(r)
|
||||
w.Header().Set("X-Trace-ID", traceID)
|
||||
start := time.Now()
|
||||
onlyNew := payload.OnlyNew == nil || *payload.OnlyNew
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=request auto_translate=%t only_new=%t limit=%d langs=%v",
|
||||
traceID,
|
||||
payload.AutoTranslate,
|
||||
onlyNew,
|
||||
payload.Limit,
|
||||
payload.Languages,
|
||||
)
|
||||
|
||||
result, err := PerformTranslationSync(pgDB, mssqlDB, TranslationSyncOptions{
|
||||
AutoTranslate: payload.AutoTranslate,
|
||||
Languages: payload.Languages,
|
||||
Limit: payload.Limit,
|
||||
OnlyNew: onlyNew,
|
||||
TraceID: traceID,
|
||||
})
|
||||
if err != nil {
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=error duration_ms=%d err=%v",
|
||||
traceID,
|
||||
time.Since(start).Milliseconds(),
|
||||
err,
|
||||
)
|
||||
http.Error(w, "translation source sync error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=response duration_ms=%d seeds=%d affected=%d auto_translated=%d",
|
||||
traceID,
|
||||
time.Since(start).Milliseconds(),
|
||||
result.SeedCount,
|
||||
result.AffectedCount,
|
||||
result.AutoTranslated,
|
||||
)
|
||||
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||
"success": true,
|
||||
"trace_id": traceID,
|
||||
"result": result,
|
||||
"seed_count": result.SeedCount,
|
||||
"affected_count": result.AffectedCount,
|
||||
"auto_translated": result.AutoTranslated,
|
||||
"target_languages": result.TargetLangs,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TranslateSelectedTranslationsHandler(db *sql.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
|
||||
var payload TranslateSelectedPayload
|
||||
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
|
||||
http.Error(w, "invalid payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
keys := normalizeStringList(payload.TKeys, 5000)
|
||||
if len(keys) == 0 {
|
||||
http.Error(w, "t_keys required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
targetLangs := normalizeTargetLanguages(payload.Languages)
|
||||
limit := payload.Limit
|
||||
if limit <= 0 {
|
||||
limit = len(keys) * len(targetLangs)
|
||||
}
|
||||
if limit <= 0 {
|
||||
limit = 1000
|
||||
}
|
||||
if limit > 50000 {
|
||||
limit = 50000
|
||||
}
|
||||
|
||||
traceID := requestTraceID(r)
|
||||
w.Header().Set("X-Trace-ID", traceID)
|
||||
start := time.Now()
|
||||
log.Printf(
|
||||
"[TranslationSelected] trace=%s stage=request keys=%d limit=%d langs=%v",
|
||||
traceID,
|
||||
len(keys),
|
||||
limit,
|
||||
targetLangs,
|
||||
)
|
||||
|
||||
translatedCount, err := autoTranslatePendingRowsForKeys(db, targetLangs, limit, keys, traceID)
|
||||
if err != nil {
|
||||
log.Printf(
|
||||
"[TranslationSelected] trace=%s stage=error duration_ms=%d err=%v",
|
||||
traceID,
|
||||
time.Since(start).Milliseconds(),
|
||||
err,
|
||||
)
|
||||
http.Error(w, "translate selected error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf(
|
||||
"[TranslationSelected] trace=%s stage=done duration_ms=%d translated=%d",
|
||||
traceID,
|
||||
time.Since(start).Milliseconds(),
|
||||
translatedCount,
|
||||
)
|
||||
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||
"success": true,
|
||||
"trace_id": traceID,
|
||||
"translated_count": translatedCount,
|
||||
"key_count": len(keys),
|
||||
"target_languages": targetLangs,
|
||||
"duration_ms": time.Since(start).Milliseconds(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func BulkApproveTranslationsHandler(db *sql.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
|
||||
var payload BulkApprovePayload
|
||||
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
|
||||
http.Error(w, "invalid payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
ids := normalizeIDListInt64(payload.IDs)
|
||||
if len(ids) == 0 {
|
||||
http.Error(w, "ids required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
res, err := db.Exec(`
|
||||
UPDATE mk_translator
|
||||
SET
|
||||
status = 'approved',
|
||||
is_manual = true,
|
||||
updated_at = NOW(),
|
||||
provider_meta = jsonb_set(COALESCE(provider_meta, '{}'::jsonb), '{is_new}', 'false'::jsonb, true)
|
||||
WHERE id = ANY($1)
|
||||
`, pq.Array(ids))
|
||||
if err != nil {
|
||||
http.Error(w, "bulk approve error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
affected, _ := res.RowsAffected()
|
||||
|
||||
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||
"success": true,
|
||||
"affected_count": affected,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func BulkUpdateTranslationsHandler(db *sql.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
||||
|
||||
var payload BulkUpdatePayload
|
||||
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
|
||||
http.Error(w, "invalid payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if len(payload.Items) == 0 {
|
||||
http.Error(w, "items required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
tx, err := db.Begin()
|
||||
if err != nil {
|
||||
http.Error(w, "transaction start error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
affected := 0
|
||||
for _, it := range payload.Items {
|
||||
if it.ID <= 0 {
|
||||
continue
|
||||
}
|
||||
status := normalizeOptionalStatus(it.Status)
|
||||
sourceType := normalizeOptionalSourceType(it.SourceType)
|
||||
res, err := tx.Exec(`
|
||||
UPDATE mk_translator
|
||||
SET
|
||||
source_text_tr = COALESCE($2, source_text_tr),
|
||||
translated_text = COALESCE($3, translated_text),
|
||||
is_manual = COALESCE($4, is_manual),
|
||||
status = COALESCE($5, status),
|
||||
provider_meta = CASE
|
||||
WHEN $6::text IS NULL THEN provider_meta
|
||||
ELSE jsonb_set(COALESCE(provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($6::text), true)
|
||||
END,
|
||||
updated_at = NOW()
|
||||
WHERE id = $1
|
||||
`, it.ID, nullableString(it.SourceTextTR), nullableString(it.TranslatedText), it.IsManual, status, sourceType)
|
||||
if err != nil {
|
||||
http.Error(w, "bulk update error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
if n, _ := res.RowsAffected(); n > 0 {
|
||||
affected += int(n)
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
http.Error(w, "transaction commit error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
_ = json.NewEncoder(w).Encode(map[string]any{
|
||||
"success": true,
|
||||
"affected_count": affected,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func PerformTranslationSync(pgDB *sql.DB, mssqlDB *sql.DB, options TranslationSyncOptions) (TranslationSyncResult, error) {
|
||||
traceID := strings.TrimSpace(options.TraceID)
|
||||
if traceID == "" {
|
||||
traceID = "trsync-" + strconv.FormatInt(time.Now().UnixNano(), 36)
|
||||
}
|
||||
start := time.Now()
|
||||
limit := options.Limit
|
||||
if limit <= 0 || limit > 100000 {
|
||||
limit = 20000
|
||||
}
|
||||
targetLangs := normalizeTargetLanguages(options.Languages)
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=start auto_translate=%t only_new=%t limit=%d langs=%v",
|
||||
traceID,
|
||||
options.AutoTranslate,
|
||||
options.OnlyNew,
|
||||
limit,
|
||||
targetLangs,
|
||||
)
|
||||
|
||||
collectStart := time.Now()
|
||||
seeds := collectSourceSeeds(pgDB, mssqlDB, limit)
|
||||
seeds, reusedByText := reuseExistingSeedKeys(pgDB, seeds)
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=collect done_ms=%d total=%d reused_by_text=%d sources=%s",
|
||||
traceID,
|
||||
time.Since(collectStart).Milliseconds(),
|
||||
len(seeds),
|
||||
reusedByText,
|
||||
formatSourceCounts(countSeedsBySource(seeds)),
|
||||
)
|
||||
if options.OnlyNew {
|
||||
before := len(seeds)
|
||||
filterStart := time.Now()
|
||||
seeds = filterNewSeeds(pgDB, seeds)
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=filter_only_new done_ms=%d before=%d after=%d skipped=%d",
|
||||
traceID,
|
||||
time.Since(filterStart).Milliseconds(),
|
||||
before,
|
||||
len(seeds),
|
||||
before-len(seeds),
|
||||
)
|
||||
}
|
||||
if len(seeds) == 0 {
|
||||
return TranslationSyncResult{
|
||||
TargetLangs: targetLangs,
|
||||
TraceID: traceID,
|
||||
DurationMS: time.Since(start).Milliseconds(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
upsertStart := time.Now()
|
||||
affected, err := upsertSourceSeeds(pgDB, seeds, targetLangs)
|
||||
if err != nil {
|
||||
return TranslationSyncResult{}, err
|
||||
}
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=upsert done_ms=%d affected=%d",
|
||||
traceID,
|
||||
time.Since(upsertStart).Milliseconds(),
|
||||
affected,
|
||||
)
|
||||
|
||||
autoTranslated := 0
|
||||
if options.AutoTranslate {
|
||||
autoStart := time.Now()
|
||||
var autoErr error
|
||||
autoTranslated, autoErr = autoTranslatePendingRowsForKeys(pgDB, targetLangs, limit, uniqueSeedKeys(seeds), traceID)
|
||||
if autoErr != nil {
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=auto_translate done_ms=%d translated=%d err=%v",
|
||||
traceID,
|
||||
time.Since(autoStart).Milliseconds(),
|
||||
autoTranslated,
|
||||
autoErr,
|
||||
)
|
||||
} else {
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=auto_translate done_ms=%d translated=%d",
|
||||
traceID,
|
||||
time.Since(autoStart).Milliseconds(),
|
||||
autoTranslated,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
result := TranslationSyncResult{
|
||||
SeedCount: len(seeds),
|
||||
AffectedCount: affected,
|
||||
AutoTranslated: autoTranslated,
|
||||
TargetLangs: targetLangs,
|
||||
TraceID: traceID,
|
||||
DurationMS: time.Since(start).Milliseconds(),
|
||||
}
|
||||
log.Printf(
|
||||
"[TranslationSync] trace=%s stage=done duration_ms=%d seeds=%d affected=%d auto_translated=%d",
|
||||
traceID,
|
||||
result.DurationMS,
|
||||
result.SeedCount,
|
||||
result.AffectedCount,
|
||||
result.AutoTranslated,
|
||||
)
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func upsertMissingRows(db *sql.DB, items []UpsertMissingItem, languages []string, forcedSourceType string) (int, error) {
|
||||
tx, err := db.Begin()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
affected := 0
|
||||
for _, it := range items {
|
||||
sourceType := forcedSourceType
|
||||
if sourceType == "" {
|
||||
sourceType = "dummy"
|
||||
}
|
||||
|
||||
res, err := tx.Exec(`
|
||||
INSERT INTO mk_translator
|
||||
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
|
||||
VALUES
|
||||
($1, 'tr', $2, $2, false, 'approved', 'seed', jsonb_build_object('source_type', $3::text))
|
||||
ON CONFLICT (t_key, lang_code) DO UPDATE
|
||||
SET
|
||||
source_text_tr = EXCLUDED.source_text_tr,
|
||||
provider_meta = jsonb_set(COALESCE(mk_translator.provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($3::text), true),
|
||||
updated_at = NOW()
|
||||
`, it.TKey, it.SourceTextTR, sourceType)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if n, _ := res.RowsAffected(); n > 0 {
|
||||
affected += int(n)
|
||||
}
|
||||
|
||||
for _, lang := range languages {
|
||||
res, err := tx.Exec(`
|
||||
INSERT INTO mk_translator
|
||||
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
|
||||
VALUES
|
||||
($1, $2, $3, NULL, false, 'pending', NULL, jsonb_build_object('source_type', $4::text))
|
||||
ON CONFLICT (t_key, lang_code) DO UPDATE
|
||||
SET
|
||||
source_text_tr = EXCLUDED.source_text_tr,
|
||||
provider_meta = jsonb_set(COALESCE(mk_translator.provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($4::text), true),
|
||||
updated_at = NOW()
|
||||
`, it.TKey, lang, it.SourceTextTR, sourceType)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if n, _ := res.RowsAffected(); n > 0 {
|
||||
affected += int(n)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return affected, nil
|
||||
}
|
||||
|
||||
func upsertSourceSeeds(db *sql.DB, seeds []sourceSeed, languages []string) (int, error) {
|
||||
tx, err := db.Begin()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
affected := 0
|
||||
for _, seed := range seeds {
|
||||
if seed.TKey == "" || seed.SourceText == "" {
|
||||
continue
|
||||
}
|
||||
sourceType := normalizeTranslationSourceType(seed.SourceType)
|
||||
if sourceType == "" {
|
||||
sourceType = "dummy"
|
||||
}
|
||||
|
||||
res, err := tx.Exec(`
|
||||
INSERT INTO mk_translator
|
||||
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
|
||||
VALUES
|
||||
($1, 'tr', $2, $2, false, 'approved', 'seed', jsonb_build_object('source_type', $3::text, 'is_new', false))
|
||||
ON CONFLICT (t_key, lang_code) DO UPDATE
|
||||
SET
|
||||
source_text_tr = EXCLUDED.source_text_tr,
|
||||
provider_meta = jsonb_set(
|
||||
COALESCE(mk_translator.provider_meta, '{}'::jsonb),
|
||||
'{source_type}',
|
||||
to_jsonb(COALESCE(NULLIF(mk_translator.provider_meta->>'source_type', ''), $3::text)),
|
||||
true
|
||||
),
|
||||
updated_at = NOW()
|
||||
`, seed.TKey, seed.SourceText, sourceType)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if n, _ := res.RowsAffected(); n > 0 {
|
||||
affected += int(n)
|
||||
}
|
||||
|
||||
for _, lang := range languages {
|
||||
res, err := tx.Exec(`
|
||||
INSERT INTO mk_translator
|
||||
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
|
||||
VALUES
|
||||
($1, $2, $3, NULL, false, 'pending', NULL, jsonb_build_object('source_type', $4::text, 'is_new', true))
|
||||
ON CONFLICT (t_key, lang_code) DO UPDATE
|
||||
SET
|
||||
source_text_tr = EXCLUDED.source_text_tr,
|
||||
provider_meta = jsonb_set(
|
||||
COALESCE(mk_translator.provider_meta, '{}'::jsonb),
|
||||
'{source_type}',
|
||||
to_jsonb(COALESCE(NULLIF(mk_translator.provider_meta->>'source_type', ''), $4::text)),
|
||||
true
|
||||
),
|
||||
updated_at = NOW()
|
||||
`, seed.TKey, lang, seed.SourceText, sourceType)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if n, _ := res.RowsAffected(); n > 0 {
|
||||
affected += int(n)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return affected, nil
|
||||
}
|
||||
|
||||
func collectSourceSeeds(pgDB *sql.DB, mssqlDB *sql.DB, limit int) []sourceSeed {
|
||||
seen := map[string]struct{}{}
|
||||
out := make([]sourceSeed, 0, limit)
|
||||
|
||||
appendSeed := func(seed sourceSeed) {
|
||||
if seed.TKey == "" || seed.SourceText == "" || seed.SourceType == "" {
|
||||
return
|
||||
}
|
||||
key := normalizeSeedTextKey(seed.SourceText)
|
||||
if _, ok := seen[key]; ok {
|
||||
return
|
||||
}
|
||||
seen[key] = struct{}{}
|
||||
out = append(out, seed)
|
||||
}
|
||||
|
||||
for _, row := range collectPostgreSeeds(pgDB, limit) {
|
||||
appendSeed(row)
|
||||
if len(out) >= limit {
|
||||
return out
|
||||
}
|
||||
}
|
||||
for _, row := range collectMSSQLSeeds(mssqlDB, limit-len(out)) {
|
||||
appendSeed(row)
|
||||
if len(out) >= limit {
|
||||
return out
|
||||
}
|
||||
}
|
||||
for _, row := range collectDummySeeds(limit - len(out)) {
|
||||
appendSeed(row)
|
||||
if len(out) >= limit {
|
||||
return out
|
||||
}
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func collectPostgreSeeds(pgDB *sql.DB, limit int) []sourceSeed {
|
||||
if pgDB == nil || limit <= 0 {
|
||||
return nil
|
||||
}
|
||||
rows, err := pgDB.Query(`
|
||||
SELECT table_name, column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public'
|
||||
ORDER BY table_name, ordinal_position
|
||||
LIMIT $1
|
||||
`, limit)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
out := make([]sourceSeed, 0, limit)
|
||||
for rows.Next() && len(out) < limit {
|
||||
var tableName, columnName string
|
||||
if err := rows.Scan(&tableName, &columnName); err != nil {
|
||||
continue
|
||||
}
|
||||
text := normalizeDisplayText(columnName)
|
||||
key := makeTextBasedSeedKey(text)
|
||||
out = append(out, sourceSeed{
|
||||
TKey: key,
|
||||
SourceText: text,
|
||||
SourceType: "postgre",
|
||||
})
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func collectMSSQLSeeds(mssqlDB *sql.DB, limit int) []sourceSeed {
|
||||
if mssqlDB == nil || limit <= 0 {
|
||||
return nil
|
||||
}
|
||||
maxPerRun := parsePositiveIntEnv("TRANSLATION_MSSQL_SEED_LIMIT", 2500)
|
||||
if limit > maxPerRun {
|
||||
limit = maxPerRun
|
||||
}
|
||||
timeoutSec := parsePositiveIntEnv("TRANSLATION_MSSQL_SCHEMA_TIMEOUT_SEC", 20)
|
||||
query := fmt.Sprintf(`
|
||||
SELECT TOP (%d) TABLE_NAME, COLUMN_NAME
|
||||
FROM INFORMATION_SCHEMA.COLUMNS
|
||||
ORDER BY TABLE_NAME, ORDINAL_POSITION
|
||||
`, limit)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(timeoutSec)*time.Second)
|
||||
defer cancel()
|
||||
rows, err := mssqlDB.QueryContext(ctx, query)
|
||||
if err != nil {
|
||||
log.Printf("[TranslationSync] stage=collect_mssql skipped err=%v", err)
|
||||
return nil
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
out := make([]sourceSeed, 0, limit)
|
||||
for rows.Next() && len(out) < limit {
|
||||
var tableName, columnName string
|
||||
if err := rows.Scan(&tableName, &columnName); err != nil {
|
||||
continue
|
||||
}
|
||||
text := normalizeDisplayText(columnName)
|
||||
key := makeTextBasedSeedKey(text)
|
||||
out = append(out, sourceSeed{
|
||||
TKey: key,
|
||||
SourceText: text,
|
||||
SourceType: "mssql",
|
||||
})
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func collectDummySeeds(limit int) []sourceSeed {
|
||||
if limit <= 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
root := detectProjectRoot()
|
||||
if root == "" {
|
||||
return nil
|
||||
}
|
||||
uiRoot := filepath.Join(root, "ui", "src")
|
||||
if _, err := os.Stat(uiRoot); err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
out := make([]sourceSeed, 0, limit)
|
||||
seen := make(map[string]struct{}, limit)
|
||||
|
||||
_ = filepath.WalkDir(uiRoot, func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil || d.IsDir() {
|
||||
return nil
|
||||
}
|
||||
ext := strings.ToLower(filepath.Ext(path))
|
||||
if ext != ".vue" && ext != ".js" && ext != ".ts" {
|
||||
return nil
|
||||
}
|
||||
|
||||
b, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
matches := reQuotedText.FindAllStringSubmatch(string(b), -1)
|
||||
for _, m := range matches {
|
||||
text := strings.TrimSpace(m[1])
|
||||
if !isCandidateText(text) {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[text]; ok {
|
||||
continue
|
||||
}
|
||||
seen[text] = struct{}{}
|
||||
key := makeTextBasedSeedKey(text)
|
||||
out = append(out, sourceSeed{
|
||||
TKey: key,
|
||||
SourceText: text,
|
||||
SourceType: "dummy",
|
||||
})
|
||||
if len(out) >= limit {
|
||||
return errors.New("limit reached")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func autoTranslatePendingRows(db *sql.DB, langs []string, limit int) (int, error) {
|
||||
return autoTranslatePendingRowsForKeys(db, langs, limit, nil, "")
|
||||
}
|
||||
|
||||
func autoTranslatePendingRowsForKeys(db *sql.DB, langs []string, limit int, keys []string, traceID string) (int, error) {
|
||||
traceID = strings.TrimSpace(traceID)
|
||||
if traceID == "" {
|
||||
traceID = "trauto-" + strconv.FormatInt(time.Now().UnixNano(), 36)
|
||||
}
|
||||
|
||||
if len(keys) == 0 {
|
||||
log.Printf("[TranslationAuto] trace=%s stage=skip reason=no_keys", traceID)
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
rows, err := db.Query(`
|
||||
SELECT id, lang_code, source_text_tr
|
||||
FROM mk_translator
|
||||
WHERE lang_code = ANY($1)
|
||||
AND t_key = ANY($3)
|
||||
AND (translated_text IS NULL OR btrim(translated_text) = '')
|
||||
AND is_manual = false
|
||||
ORDER BY updated_at ASC
|
||||
LIMIT $2
|
||||
`, pqArray(langs), limit, pq.Array(keys))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
type pending struct {
|
||||
ID int64
|
||||
Lang string
|
||||
Text string
|
||||
}
|
||||
list := make([]pending, 0, limit)
|
||||
pendingByLang := map[string]int{}
|
||||
sourceChars := 0
|
||||
for rows.Next() {
|
||||
var p pending
|
||||
if err := rows.Scan(&p.ID, &p.Lang, &p.Text); err != nil {
|
||||
continue
|
||||
}
|
||||
if strings.TrimSpace(p.Text) == "" {
|
||||
continue
|
||||
}
|
||||
p.Lang = normalizeTranslationLang(p.Lang)
|
||||
if p.Lang == "" {
|
||||
continue
|
||||
}
|
||||
list = append(list, p)
|
||||
pendingByLang[p.Lang]++
|
||||
sourceChars += len([]rune(strings.TrimSpace(p.Text)))
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
log.Printf(
|
||||
"[TranslationAuto] trace=%s stage=prepare candidates=%d limit=%d keys=%d langs=%v source_chars=%d pending_by_lang=%s",
|
||||
traceID,
|
||||
len(list),
|
||||
limit,
|
||||
len(keys),
|
||||
langs,
|
||||
sourceChars,
|
||||
formatLangCounts(pendingByLang),
|
||||
)
|
||||
if len(list) == 0 {
|
||||
log.Printf(
|
||||
"[TranslationAuto] trace=%s stage=done duration_ms=%d translated=0 failed_translate=0 failed_update=0 rps=0.00",
|
||||
traceID,
|
||||
time.Since(start).Milliseconds(),
|
||||
)
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
done := 0
|
||||
failedTranslate := 0
|
||||
failedUpdate := 0
|
||||
doneByLang := map[string]int{}
|
||||
progressEvery := parsePositiveIntEnv("TRANSLATION_AUTO_PROGRESS_EVERY", 100)
|
||||
if progressEvery <= 0 {
|
||||
progressEvery = 100
|
||||
}
|
||||
progressSec := parsePositiveIntEnv("TRANSLATION_AUTO_PROGRESS_SEC", 15)
|
||||
if progressSec <= 0 {
|
||||
progressSec = 15
|
||||
}
|
||||
progressTicker := time.Duration(progressSec) * time.Second
|
||||
lastProgress := time.Now()
|
||||
|
||||
for i, p := range list {
|
||||
tr, err := callAzureTranslate(p.Text, p.Lang)
|
||||
if err != nil || strings.TrimSpace(tr) == "" {
|
||||
failedTranslate++
|
||||
continue
|
||||
}
|
||||
_, err = db.Exec(`
|
||||
UPDATE mk_translator
|
||||
SET translated_text = $2,
|
||||
status = 'pending',
|
||||
is_manual = false,
|
||||
provider = 'azure_translator',
|
||||
updated_at = NOW()
|
||||
WHERE id = $1
|
||||
`, p.ID, strings.TrimSpace(tr))
|
||||
if err != nil {
|
||||
failedUpdate++
|
||||
continue
|
||||
}
|
||||
done++
|
||||
doneByLang[p.Lang]++
|
||||
|
||||
processed := i + 1
|
||||
shouldLogProgress := processed%progressEvery == 0 || time.Since(lastProgress) >= progressTicker || processed == len(list)
|
||||
if shouldLogProgress {
|
||||
elapsed := time.Since(start)
|
||||
rps := float64(done)
|
||||
if elapsed > 0 {
|
||||
rps = float64(done) / elapsed.Seconds()
|
||||
}
|
||||
log.Printf(
|
||||
"[TranslationAuto] trace=%s stage=progress processed=%d/%d translated=%d failed_translate=%d failed_update=%d elapsed_ms=%d rps=%.2f done_by_lang=%s",
|
||||
traceID,
|
||||
processed,
|
||||
len(list),
|
||||
done,
|
||||
failedTranslate,
|
||||
failedUpdate,
|
||||
elapsed.Milliseconds(),
|
||||
rps,
|
||||
formatLangCounts(doneByLang),
|
||||
)
|
||||
lastProgress = time.Now()
|
||||
}
|
||||
}
|
||||
|
||||
elapsed := time.Since(start)
|
||||
rps := float64(done)
|
||||
if elapsed > 0 {
|
||||
rps = float64(done) / elapsed.Seconds()
|
||||
}
|
||||
log.Printf(
|
||||
"[TranslationAuto] trace=%s stage=done duration_ms=%d candidates=%d translated=%d failed_translate=%d failed_update=%d rps=%.2f done_by_lang=%s",
|
||||
traceID,
|
||||
elapsed.Milliseconds(),
|
||||
len(list),
|
||||
done,
|
||||
failedTranslate,
|
||||
failedUpdate,
|
||||
rps,
|
||||
formatLangCounts(doneByLang),
|
||||
)
|
||||
return done, nil
|
||||
}
|
||||
|
||||
func formatLangCounts(counts map[string]int) string {
|
||||
if len(counts) == 0 {
|
||||
return "-"
|
||||
}
|
||||
keys := make([]string, 0, len(counts))
|
||||
for k := range counts {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
parts := make([]string, 0, len(keys))
|
||||
for _, k := range keys {
|
||||
parts = append(parts, fmt.Sprintf("%s=%d", k, counts[k]))
|
||||
}
|
||||
return strings.Join(parts, ",")
|
||||
}
|
||||
|
||||
func filterNewSeeds(pgDB *sql.DB, seeds []sourceSeed) []sourceSeed {
|
||||
if pgDB == nil || len(seeds) == 0 {
|
||||
return seeds
|
||||
}
|
||||
|
||||
keys := uniqueSeedKeys(seeds)
|
||||
if len(keys) == 0 {
|
||||
return nil
|
||||
}
|
||||
textKeys := uniqueSeedTextKeys(seeds)
|
||||
|
||||
rows, err := pgDB.Query(`
|
||||
SELECT DISTINCT t_key, lower(btrim(source_text_tr)) AS text_key
|
||||
FROM mk_translator
|
||||
WHERE t_key = ANY($1)
|
||||
OR lower(btrim(source_text_tr)) = ANY($2)
|
||||
`, pq.Array(keys), pq.Array(textKeys))
|
||||
if err != nil {
|
||||
return seeds
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
existing := make(map[string]struct{}, len(keys))
|
||||
existingText := make(map[string]struct{}, len(textKeys))
|
||||
for rows.Next() {
|
||||
var key string
|
||||
var textKey sql.NullString
|
||||
if err := rows.Scan(&key, &textKey); err == nil {
|
||||
if strings.TrimSpace(key) != "" {
|
||||
existing[key] = struct{}{}
|
||||
}
|
||||
if textKey.Valid {
|
||||
t := strings.TrimSpace(textKey.String)
|
||||
if t != "" {
|
||||
existingText[t] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
out := make([]sourceSeed, 0, len(seeds))
|
||||
for _, seed := range seeds {
|
||||
if _, ok := existing[seed.TKey]; ok {
|
||||
continue
|
||||
}
|
||||
if _, ok := existingText[normalizeSeedTextKey(seed.SourceText)]; ok {
|
||||
continue
|
||||
}
|
||||
out = append(out, seed)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func uniqueSeedKeys(seeds []sourceSeed) []string {
|
||||
seen := make(map[string]struct{}, len(seeds))
|
||||
out := make([]string, 0, len(seeds))
|
||||
for _, seed := range seeds {
|
||||
if seed.TKey == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[seed.TKey]; ok {
|
||||
continue
|
||||
}
|
||||
seen[seed.TKey] = struct{}{}
|
||||
out = append(out, seed.TKey)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func uniqueSeedTextKeys(seeds []sourceSeed) []string {
|
||||
seen := make(map[string]struct{}, len(seeds))
|
||||
out := make([]string, 0, len(seeds))
|
||||
for _, seed := range seeds {
|
||||
k := normalizeSeedTextKey(seed.SourceText)
|
||||
if k == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[k]; ok {
|
||||
continue
|
||||
}
|
||||
seen[k] = struct{}{}
|
||||
out = append(out, k)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func reuseExistingSeedKeys(pgDB *sql.DB, seeds []sourceSeed) ([]sourceSeed, int) {
|
||||
if pgDB == nil || len(seeds) == 0 {
|
||||
return seeds, 0
|
||||
}
|
||||
|
||||
textKeys := uniqueSeedTextKeys(seeds)
|
||||
if len(textKeys) == 0 {
|
||||
return seeds, 0
|
||||
}
|
||||
|
||||
rows, err := pgDB.Query(`
|
||||
SELECT x.text_key, x.t_key
|
||||
FROM (
|
||||
SELECT
|
||||
lower(btrim(source_text_tr)) AS text_key,
|
||||
t_key,
|
||||
ROW_NUMBER() OVER (
|
||||
PARTITION BY lower(btrim(source_text_tr))
|
||||
ORDER BY id ASC
|
||||
) AS rn
|
||||
FROM mk_translator
|
||||
WHERE lower(btrim(source_text_tr)) = ANY($1)
|
||||
) x
|
||||
WHERE x.rn = 1
|
||||
`, pq.Array(textKeys))
|
||||
if err != nil {
|
||||
return seeds, 0
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
existingByText := make(map[string]string, len(textKeys))
|
||||
for rows.Next() {
|
||||
var textKey, tKey string
|
||||
if err := rows.Scan(&textKey, &tKey); err != nil {
|
||||
continue
|
||||
}
|
||||
textKey = strings.TrimSpace(strings.ToLower(textKey))
|
||||
tKey = strings.TrimSpace(tKey)
|
||||
if textKey == "" || tKey == "" {
|
||||
continue
|
||||
}
|
||||
existingByText[textKey] = tKey
|
||||
}
|
||||
|
||||
reused := 0
|
||||
for i := range seeds {
|
||||
textKey := normalizeSeedTextKey(seeds[i].SourceText)
|
||||
if textKey == "" {
|
||||
continue
|
||||
}
|
||||
if existingKey, ok := existingByText[textKey]; ok && existingKey != "" && seeds[i].TKey != existingKey {
|
||||
seeds[i].TKey = existingKey
|
||||
reused++
|
||||
}
|
||||
}
|
||||
|
||||
return seeds, reused
|
||||
}
|
||||
|
||||
func countSeedsBySource(seeds []sourceSeed) map[string]int {
|
||||
out := map[string]int{
|
||||
"dummy": 0,
|
||||
"postgre": 0,
|
||||
"mssql": 0,
|
||||
}
|
||||
for _, s := range seeds {
|
||||
key := normalizeTranslationSourceType(s.SourceType)
|
||||
if key == "" {
|
||||
key = "dummy"
|
||||
}
|
||||
out[key]++
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func formatSourceCounts(counts map[string]int) string {
|
||||
return fmt.Sprintf("dummy=%d postgre=%d mssql=%d", counts["dummy"], counts["postgre"], counts["mssql"])
|
||||
}
|
||||
|
||||
func requestTraceID(r *http.Request) string {
|
||||
if r == nil {
|
||||
return "trsync-" + strconv.FormatInt(time.Now().UnixNano(), 36)
|
||||
}
|
||||
id := strings.TrimSpace(r.Header.Get("X-Request-ID"))
|
||||
if id == "" {
|
||||
id = strings.TrimSpace(r.Header.Get("X-Correlation-ID"))
|
||||
}
|
||||
if id == "" {
|
||||
id = "trsync-" + strconv.FormatInt(time.Now().UnixNano(), 36)
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
||||
func callAzureTranslate(sourceText, targetLang string) (string, error) {
|
||||
key := strings.TrimSpace(os.Getenv("AZURE_TRANSLATOR_KEY"))
|
||||
endpoint := strings.TrimSpace(os.Getenv("AZURE_TRANSLATOR_ENDPOINT"))
|
||||
region := strings.TrimSpace(os.Getenv("AZURE_TRANSLATOR_REGION"))
|
||||
if key == "" {
|
||||
return "", errors.New("AZURE_TRANSLATOR_KEY not set")
|
||||
}
|
||||
if endpoint == "" {
|
||||
return "", errors.New("AZURE_TRANSLATOR_ENDPOINT not set")
|
||||
}
|
||||
if region == "" {
|
||||
return "", errors.New("AZURE_TRANSLATOR_REGION not set")
|
||||
}
|
||||
|
||||
sourceLang := strings.TrimSpace(strings.ToLower(os.Getenv("TRANSLATION_SOURCE_LANG")))
|
||||
if sourceLang == "" {
|
||||
sourceLang = "tr"
|
||||
}
|
||||
targetLang = normalizeTranslationLang(targetLang)
|
||||
if targetLang == "" || targetLang == "tr" {
|
||||
return "", fmt.Errorf("invalid target language: %q", targetLang)
|
||||
}
|
||||
|
||||
endpoint = strings.TrimRight(endpoint, "/")
|
||||
baseURL, err := url.Parse(endpoint + "/translate")
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("invalid AZURE_TRANSLATOR_ENDPOINT: %w", err)
|
||||
}
|
||||
q := baseURL.Query()
|
||||
q.Set("api-version", "3.0")
|
||||
q.Set("from", sourceLang)
|
||||
q.Set("to", targetLang)
|
||||
baseURL.RawQuery = q.Encode()
|
||||
|
||||
payload := []map[string]string{
|
||||
{"Text": sourceText},
|
||||
}
|
||||
body, _ := json.Marshal(payload)
|
||||
req, err := http.NewRequest(http.MethodPost, baseURL.String(), bytes.NewReader(body))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
req.Header.Set("Ocp-Apim-Subscription-Key", key)
|
||||
req.Header.Set("Ocp-Apim-Subscription-Region", region)
|
||||
req.Header.Set("Content-Type", "application/json; charset=UTF-8")
|
||||
|
||||
timeoutSec := parsePositiveIntEnv("TRANSLATION_HTTP_TIMEOUT_SEC", 60)
|
||||
client := &http.Client{Timeout: time.Duration(timeoutSec) * time.Second}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode >= 300 {
|
||||
raw, _ := io.ReadAll(io.LimitReader(resp.Body, 1024))
|
||||
return "", fmt.Errorf("azure translator status=%d body=%s", resp.StatusCode, strings.TrimSpace(string(raw)))
|
||||
}
|
||||
|
||||
var result []struct {
|
||||
Translations []struct {
|
||||
Text string `json:"text"`
|
||||
To string `json:"to"`
|
||||
} `json:"translations"`
|
||||
}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
|
||||
return "", err
|
||||
}
|
||||
if len(result) == 0 || len(result[0].Translations) == 0 {
|
||||
return "", errors.New("azure translator empty response")
|
||||
}
|
||||
return strings.TrimSpace(result[0].Translations[0].Text), nil
|
||||
}
|
||||
|
||||
func nullableString(v *string) any {
|
||||
if v == nil {
|
||||
return nil
|
||||
}
|
||||
s := strings.TrimSpace(*v)
|
||||
return s
|
||||
}
|
||||
|
||||
func normalizeTranslationLang(v string) string {
|
||||
lang := strings.ToLower(strings.TrimSpace(v))
|
||||
if _, ok := translationLangSet[lang]; ok {
|
||||
return lang
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func normalizeTranslationStatus(v string) string {
|
||||
status := strings.ToLower(strings.TrimSpace(v))
|
||||
if _, ok := translationStatusSet[status]; ok {
|
||||
return status
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func normalizeTranslationSourceType(v string) string {
|
||||
sourceType := strings.ToLower(strings.TrimSpace(v))
|
||||
if _, ok := translationSourceTypeSet[sourceType]; ok {
|
||||
return sourceType
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func normalizeTargetLanguages(list []string) []string {
|
||||
if len(list) == 0 {
|
||||
return []string{"en", "de", "it", "es", "ru", "ar"}
|
||||
}
|
||||
|
||||
seen := make(map[string]struct{}, len(list))
|
||||
out := make([]string, 0, len(list))
|
||||
for _, v := range list {
|
||||
lang := normalizeTranslationLang(v)
|
||||
if lang == "" || lang == "tr" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[lang]; ok {
|
||||
continue
|
||||
}
|
||||
seen[lang] = struct{}{}
|
||||
out = append(out, lang)
|
||||
}
|
||||
if len(out) == 0 {
|
||||
return []string{"en", "de", "it", "es", "ru", "ar"}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func normalizeOptionalStatus(v *string) any {
|
||||
if v == nil {
|
||||
return nil
|
||||
}
|
||||
s := normalizeTranslationStatus(*v)
|
||||
if s == "" {
|
||||
return nil
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func normalizeOptionalSourceType(v *string) any {
|
||||
if v == nil {
|
||||
return nil
|
||||
}
|
||||
s := normalizeTranslationSourceType(*v)
|
||||
if s == "" {
|
||||
return nil
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func normalizeMissingItems(items []UpsertMissingItem) []UpsertMissingItem {
|
||||
seen := make(map[string]struct{}, len(items))
|
||||
out := make([]UpsertMissingItem, 0, len(items))
|
||||
|
||||
for _, it := range items {
|
||||
key := strings.TrimSpace(it.TKey)
|
||||
source := strings.TrimSpace(it.SourceTextTR)
|
||||
if key == "" || source == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[key]; ok {
|
||||
continue
|
||||
}
|
||||
seen[key] = struct{}{}
|
||||
out = append(out, UpsertMissingItem{
|
||||
TKey: key,
|
||||
SourceTextTR: source,
|
||||
})
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func normalizeIDListInt64(ids []int64) []int64 {
|
||||
seen := make(map[int64]struct{}, len(ids))
|
||||
out := make([]int64, 0, len(ids))
|
||||
for _, id := range ids {
|
||||
if id <= 0 {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[id]; ok {
|
||||
continue
|
||||
}
|
||||
seen[id] = struct{}{}
|
||||
out = append(out, id)
|
||||
}
|
||||
sort.Slice(out, func(i, j int) bool { return out[i] < out[j] })
|
||||
return out
|
||||
}
|
||||
|
||||
func detectProjectRoot() string {
|
||||
wd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
candidates := []string{
|
||||
wd,
|
||||
filepath.Dir(wd),
|
||||
filepath.Dir(filepath.Dir(wd)),
|
||||
}
|
||||
for _, c := range candidates {
|
||||
if _, err := os.Stat(filepath.Join(c, "ui")); err == nil {
|
||||
return c
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func isCandidateText(s string) bool {
|
||||
s = strings.TrimSpace(s)
|
||||
if len(s) < 3 || len(s) > 120 {
|
||||
return false
|
||||
}
|
||||
if reBadText.MatchString(s) {
|
||||
return false
|
||||
}
|
||||
if !reHasLetter.MatchString(s) {
|
||||
return false
|
||||
}
|
||||
if strings.Contains(s, "/api/") {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func sanitizeKey(s string) string {
|
||||
s = strings.ToLower(strings.TrimSpace(s))
|
||||
s = strings.ReplaceAll(s, " ", "_")
|
||||
s = reKeyUnsafe.ReplaceAllString(s, "_")
|
||||
s = strings.Trim(s, "_")
|
||||
if s == "" {
|
||||
return "x"
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func normalizeDisplayText(s string) string {
|
||||
s = strings.TrimSpace(strings.ReplaceAll(s, "_", " "))
|
||||
s = strings.Join(strings.Fields(s), " ")
|
||||
if s == "" {
|
||||
return ""
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func hashKey(s string) string {
|
||||
base := sanitizeKey(s)
|
||||
if len(base) > 40 {
|
||||
base = base[:40]
|
||||
}
|
||||
sum := 0
|
||||
for _, r := range s {
|
||||
sum += int(r)
|
||||
}
|
||||
return fmt.Sprintf("%s_%d", base, sum%1000000)
|
||||
}
|
||||
|
||||
func makeTextBasedSeedKey(sourceText string) string {
|
||||
return "txt." + hashKey(normalizeSeedTextKey(sourceText))
|
||||
}
|
||||
|
||||
func normalizeSeedTextKey(s string) string {
|
||||
return strings.ToLower(strings.TrimSpace(normalizeDisplayText(s)))
|
||||
}
|
||||
|
||||
func pqArray(values []string) any {
|
||||
if len(values) == 0 {
|
||||
return pq.Array([]string{})
|
||||
}
|
||||
out := make([]string, 0, len(values))
|
||||
for _, v := range values {
|
||||
out = append(out, strings.TrimSpace(v))
|
||||
}
|
||||
sort.Strings(out)
|
||||
return pq.Array(out)
|
||||
}
|
||||
|
||||
func parsePositiveIntEnv(name string, fallback int) int {
|
||||
raw := strings.TrimSpace(os.Getenv(name))
|
||||
if raw == "" {
|
||||
return fallback
|
||||
}
|
||||
n, err := strconv.Atoi(raw)
|
||||
if err != nil || n <= 0 {
|
||||
return fallback
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
func normalizeStringList(items []string, max int) []string {
|
||||
if len(items) == 0 {
|
||||
return nil
|
||||
}
|
||||
if max <= 0 {
|
||||
max = len(items)
|
||||
}
|
||||
out := make([]string, 0, len(items))
|
||||
seen := make(map[string]struct{}, len(items))
|
||||
for _, raw := range items {
|
||||
v := strings.TrimSpace(raw)
|
||||
if v == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[v]; ok {
|
||||
continue
|
||||
}
|
||||
seen[v] = struct{}{}
|
||||
out = append(out, v)
|
||||
if len(out) >= max {
|
||||
break
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
Reference in New Issue
Block a user