314 lines
7.7 KiB
Go
314 lines
7.7 KiB
Go
package routes
|
|
|
|
import (
|
|
"database/sql"
|
|
"encoding/json"
|
|
"fmt"
|
|
"log/slog"
|
|
"net/http"
|
|
"os"
|
|
"path/filepath"
|
|
"regexp"
|
|
"strconv"
|
|
"strings"
|
|
|
|
"github.com/google/uuid"
|
|
"github.com/gorilla/mux"
|
|
)
|
|
|
|
type ProductImageItem struct {
|
|
ID int64 `json:"id"`
|
|
FileName string `json:"file_name"`
|
|
FileSize int64 `json:"file_size"`
|
|
Storage string `json:"storage_path"`
|
|
ContentURL string `json:"content_url"`
|
|
UUID string `json:"uuid,omitempty"`
|
|
ThumbURL string `json:"thumb_url,omitempty"`
|
|
FullURL string `json:"full_url,omitempty"`
|
|
}
|
|
|
|
var uuidPattern = regexp.MustCompile(`(?i)[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}`)
|
|
|
|
func normalizeDimParam(v string) string {
|
|
s := strings.TrimSpace(v)
|
|
if s == "" || s == "0" {
|
|
return ""
|
|
}
|
|
return s
|
|
}
|
|
|
|
func extractImageUUID(storagePath, fileName string) string {
|
|
if m := uuidPattern.FindString(storagePath); m != "" {
|
|
return strings.ToLower(m)
|
|
}
|
|
if m := uuidPattern.FindString(fileName); m != "" {
|
|
return strings.ToLower(m)
|
|
}
|
|
return ""
|
|
}
|
|
|
|
// GET /api/product-images?code=...&dim1=...&dim3=...
|
|
func GetProductImagesHandler(pg *sql.DB) http.HandlerFunc {
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
|
reqID := strings.TrimSpace(r.Header.Get("X-Request-ID"))
|
|
if reqID == "" {
|
|
reqID = uuid.NewString()
|
|
}
|
|
w.Header().Set("X-Request-ID", reqID)
|
|
|
|
code := strings.TrimSpace(r.URL.Query().Get("code"))
|
|
dim1 := strings.TrimSpace(r.URL.Query().Get("dim1"))
|
|
if dim1 == "" {
|
|
dim1 = strings.TrimSpace(r.URL.Query().Get("color"))
|
|
}
|
|
dim3 := strings.TrimSpace(r.URL.Query().Get("dim3"))
|
|
if dim3 == "" {
|
|
dim3 = strings.TrimSpace(r.URL.Query().Get("yaka"))
|
|
}
|
|
if dim3 == "" {
|
|
dim3 = strings.TrimSpace(r.URL.Query().Get("renk2"))
|
|
}
|
|
|
|
dim1ID := strings.TrimSpace(r.URL.Query().Get("dim1_id"))
|
|
if dim1ID == "" {
|
|
dim1ID = strings.TrimSpace(r.URL.Query().Get("itemdim1"))
|
|
}
|
|
dim3ID := strings.TrimSpace(r.URL.Query().Get("dim3_id"))
|
|
if dim3ID == "" {
|
|
dim3ID = strings.TrimSpace(r.URL.Query().Get("itemdim3"))
|
|
}
|
|
|
|
if code == "" {
|
|
http.Error(w, "Eksik parametre: code gerekli", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
// Rule: code -> mmitem.id
|
|
var mmItemID int64
|
|
err := pg.QueryRow(`
|
|
SELECT id
|
|
FROM mmitem
|
|
WHERE UPPER(REPLACE(COALESCE(code,''), ' ', '')) = UPPER(REPLACE(COALESCE($1,''), ' ', ''))
|
|
ORDER BY id
|
|
LIMIT 1
|
|
`, code).Scan(&mmItemID)
|
|
if err == sql.ErrNoRows {
|
|
err = pg.QueryRow(`
|
|
SELECT id
|
|
FROM mmitem
|
|
WHERE UPPER(REPLACE(REGEXP_REPLACE(COALESCE(code,''), '^.*-', ''), ' ', '')) =
|
|
UPPER(REPLACE(REGEXP_REPLACE(COALESCE($1,''), '^.*-', ''), ' ', ''))
|
|
ORDER BY id
|
|
LIMIT 1
|
|
`, code).Scan(&mmItemID)
|
|
}
|
|
if err != nil {
|
|
if err == sql.ErrNoRows {
|
|
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
|
_ = json.NewEncoder(w).Encode([]ProductImageItem{})
|
|
return
|
|
}
|
|
http.Error(w, "Gorsel sorgu hatasi: "+err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
// Rule:
|
|
// dim1!=0 && dim3!=0 => dimval1=dim1 AND dimval3=dim3
|
|
// dim1!=0 && dim3==0 => dimval1=dim1
|
|
// dim1==0 && dim3==0 => generic photos
|
|
dim1Filter := normalizeDimParam(dim1ID)
|
|
if dim1Filter == "" {
|
|
dim1Filter = normalizeDimParam(dim1)
|
|
}
|
|
dim3Filter := normalizeDimParam(dim3ID)
|
|
if dim3Filter == "" {
|
|
dim3Filter = normalizeDimParam(dim3)
|
|
}
|
|
|
|
query := `
|
|
SELECT
|
|
id,
|
|
COALESCE(file_name,'') AS file_name,
|
|
COALESCE(file_size,0) AS file_size,
|
|
COALESCE(storage_path,'') AS storage_path
|
|
FROM dfblob
|
|
WHERE typ='img'
|
|
AND src_table='mmitem'
|
|
AND src_id=$1`
|
|
args := []interface{}{mmItemID}
|
|
argPos := 2
|
|
if dim1Filter != "" {
|
|
query += fmt.Sprintf(" AND COALESCE(dimval1::text,'') = $%d", argPos)
|
|
args = append(args, dim1Filter)
|
|
argPos++
|
|
if dim3Filter != "" {
|
|
query += fmt.Sprintf(" AND COALESCE(dimval3::text,'') = $%d", argPos)
|
|
args = append(args, dim3Filter)
|
|
argPos++
|
|
}
|
|
}
|
|
query += `
|
|
ORDER BY
|
|
COALESCE(sort_order,999999),
|
|
zlins_dttm DESC,
|
|
id DESC`
|
|
|
|
rows, err := pg.Query(query, args...)
|
|
if err != nil {
|
|
slog.Error("product_images.list.query_failed",
|
|
"req_id", reqID,
|
|
"code", code,
|
|
"dim1", dim1,
|
|
"dim1_id", dim1ID,
|
|
"dim3", dim3,
|
|
"dim3_id", dim3ID,
|
|
"err", err.Error(),
|
|
)
|
|
http.Error(w, "Gorsel sorgu hatasi: "+err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
defer rows.Close()
|
|
|
|
items := make([]ProductImageItem, 0, 16)
|
|
for rows.Next() {
|
|
var it ProductImageItem
|
|
if err := rows.Scan(&it.ID, &it.FileName, &it.FileSize, &it.Storage); err != nil {
|
|
continue
|
|
}
|
|
it.ContentURL = fmt.Sprintf("/api/product-images/%d/content", it.ID)
|
|
if u := extractImageUUID(it.Storage, it.FileName); u != "" {
|
|
it.UUID = u
|
|
it.ThumbURL = "/uploads/image/t300/" + u + ".jpg"
|
|
it.FullURL = "/uploads/image/" + u + ".jpg"
|
|
}
|
|
items = append(items, it)
|
|
}
|
|
|
|
slog.Info("product_images.list.ok",
|
|
"req_id", reqID,
|
|
"code", code,
|
|
"dim1", dim1,
|
|
"dim1_id", dim1ID,
|
|
"dim3", dim3,
|
|
"dim3_id", dim3ID,
|
|
"count", len(items),
|
|
)
|
|
|
|
w.Header().Set("Content-Type", "application/json; charset=utf-8")
|
|
_ = json.NewEncoder(w).Encode(items)
|
|
}
|
|
}
|
|
|
|
// GET /api/product-images/{id}/content
|
|
func GetProductImageContentHandler(pg *sql.DB) http.HandlerFunc {
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
|
reqID := strings.TrimSpace(r.Header.Get("X-Request-ID"))
|
|
if reqID == "" {
|
|
reqID = uuid.NewString()
|
|
}
|
|
w.Header().Set("X-Request-ID", reqID)
|
|
|
|
idStr := mux.Vars(r)["id"]
|
|
id, err := strconv.ParseInt(idStr, 10, 64)
|
|
if err != nil || id <= 0 {
|
|
http.Error(w, "Gecersiz gorsel id", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
var (
|
|
fileName string
|
|
storagePath string
|
|
storedInDB bool
|
|
binData []byte
|
|
)
|
|
|
|
err = pg.QueryRow(`
|
|
SELECT
|
|
COALESCE(file_name,''),
|
|
COALESCE(storage_path,''),
|
|
COALESCE(stored_in_db,false),
|
|
bin
|
|
FROM dfblob
|
|
WHERE id = $1
|
|
AND typ = 'img'
|
|
`, id).Scan(&fileName, &storagePath, &storedInDB, &binData)
|
|
if err != nil {
|
|
if err == sql.ErrNoRows {
|
|
http.NotFound(w, r)
|
|
return
|
|
}
|
|
http.Error(w, "Gorsel okunamadi: "+err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
if storedInDB && len(binData) > 0 {
|
|
w.Header().Set("Content-Type", http.DetectContentType(binData))
|
|
w.Header().Set("Cache-Control", "public, max-age=3600")
|
|
_, _ = w.Write(binData)
|
|
return
|
|
}
|
|
|
|
resolved, _ := resolveStoragePath(storagePath)
|
|
if resolved == "" {
|
|
http.NotFound(w, r)
|
|
return
|
|
}
|
|
|
|
w.Header().Set("Cache-Control", "public, max-age=3600")
|
|
http.ServeFile(w, r, resolved)
|
|
}
|
|
}
|
|
|
|
func resolveStoragePath(storagePath string) (string, []string) {
|
|
raw := strings.TrimSpace(storagePath)
|
|
if raw == "" {
|
|
return "", nil
|
|
}
|
|
if i := strings.Index(raw, "?"); i >= 0 {
|
|
raw = raw[:i]
|
|
}
|
|
|
|
raw = strings.ReplaceAll(raw, "\\", "/")
|
|
if scheme := strings.Index(raw, "://"); scheme >= 0 {
|
|
rest := raw[scheme+3:]
|
|
if i := strings.Index(rest, "/"); i >= 0 {
|
|
raw = rest[i:]
|
|
}
|
|
}
|
|
|
|
raw = strings.TrimPrefix(raw, "./")
|
|
raw = strings.TrimPrefix(raw, "/")
|
|
raw = strings.TrimPrefix(raw, "uploads/")
|
|
raw = filepath.ToSlash(filepath.Clean(raw))
|
|
|
|
relUploads := filepath.FromSlash(filepath.Join("uploads", raw))
|
|
candidates := []string{
|
|
filepath.Clean(storagePath),
|
|
filepath.FromSlash(filepath.Clean(strings.TrimPrefix(storagePath, "/"))),
|
|
filepath.FromSlash(filepath.Clean(raw)),
|
|
relUploads,
|
|
filepath.Join(".", relUploads),
|
|
filepath.Join("..", relUploads),
|
|
filepath.Join("..", "..", relUploads),
|
|
}
|
|
|
|
if root := strings.TrimSpace(os.Getenv("BLOB_ROOT")); root != "" {
|
|
candidates = append(candidates,
|
|
filepath.Join(root, raw),
|
|
filepath.Join(root, relUploads),
|
|
filepath.Join(root, "uploads", raw),
|
|
)
|
|
}
|
|
|
|
for _, p := range candidates {
|
|
if p == "" {
|
|
continue
|
|
}
|
|
if st, err := os.Stat(p); err == nil && !st.IsDir() {
|
|
return p, candidates
|
|
}
|
|
}
|
|
|
|
return "", candidates
|
|
}
|