Compare commits

..

168 Commits

Author SHA1 Message Date
M_Kececi
c6bdf83f05 Merge remote-tracking branch 'origin/master' 2026-04-17 12:16:50 +03:00
M_Kececi
f9728b8a4c Merge remote-tracking branch 'origin/master' 2026-04-16 17:46:50 +03:00
M_Kececi
307282928c Merge remote-tracking branch 'origin/master' 2026-04-16 16:41:59 +03:00
M_Kececi
29909f3609 Merge remote-tracking branch 'origin/master' 2026-04-16 16:41:55 +03:00
M_Kececi
bb856cb082 Merge remote-tracking branch 'origin/master' 2026-04-16 16:25:45 +03:00
M_Kececi
b065e7192d Merge remote-tracking branch 'origin/master' 2026-04-16 16:00:24 +03:00
M_Kececi
2d369e7d7d Merge remote-tracking branch 'origin/master' 2026-04-16 15:18:44 +03:00
M_Kececi
1831c45a0c Merge remote-tracking branch 'origin/master' 2026-04-15 17:03:25 +03:00
M_Kececi
1a80184cac Merge remote-tracking branch 'origin/master' 2026-04-15 16:43:21 +03:00
M_Kececi
5be7315bdb Merge remote-tracking branch 'origin/master' 2026-04-15 15:54:44 +03:00
M_Kececi
c925af5ba1 Merge remote-tracking branch 'origin/master' 2026-04-14 18:04:19 +03:00
M_Kececi
352a7e26ea Merge remote-tracking branch 'origin/master' 2026-04-14 17:53:58 +03:00
M_Kececi
9ee70eb05a Merge remote-tracking branch 'origin/master' 2026-04-14 17:52:38 +03:00
M_Kececi
8694511e79 Merge remote-tracking branch 'origin/master' 2026-04-14 17:46:15 +03:00
M_Kececi
69ba4b2ecb Merge remote-tracking branch 'origin/master' 2026-04-14 17:34:46 +03:00
M_Kececi
eb628e99c2 Merge remote-tracking branch 'origin/master' 2026-04-14 17:23:24 +03:00
M_Kececi
431441802e Merge remote-tracking branch 'origin/master' 2026-04-14 17:05:18 +03:00
M_Kececi
7457d95bac Merge remote-tracking branch 'origin/master' 2026-04-14 17:05:14 +03:00
M_Kececi
e352b8c47a Merge remote-tracking branch 'origin/master' 2026-04-14 17:03:21 +03:00
M_Kececi
d8b6b7166c Merge remote-tracking branch 'origin/master' 2026-04-14 16:51:27 +03:00
M_Kececi
aec450c3cd Merge remote-tracking branch 'origin/master' 2026-04-14 16:35:21 +03:00
M_Kececi
47fc7a6178 Merge remote-tracking branch 'origin/master' 2026-04-14 16:34:25 +03:00
M_Kececi
214677da1e Merge remote-tracking branch 'origin/master' 2026-04-14 16:17:59 +03:00
M_Kececi
b1a3bbd3c5 Merge remote-tracking branch 'origin/master' 2026-04-04 19:05:11 +03:00
M_Kececi
6467017470 Merge remote-tracking branch 'origin/master' 2026-04-03 16:30:31 +03:00
M_Kececi
bf97e20e79 Merge remote-tracking branch 'origin/master' 2026-04-03 15:47:22 +03:00
M_Kececi
2b04688905 Merge remote-tracking branch 'origin/master' 2026-04-03 15:23:53 +03:00
M_Kececi
79f7fa0974 Merge remote-tracking branch 'origin/master' 2026-04-03 15:18:46 +03:00
M_Kececi
e965eb7c36 Merge remote-tracking branch 'origin/master' 2026-04-03 15:08:31 +03:00
M_Kececi
07c000358e Merge remote-tracking branch 'origin/master' 2026-04-03 14:56:22 +03:00
M_Kececi
415e3db084 Merge remote-tracking branch 'origin/master' 2026-04-03 14:32:19 +03:00
M_Kececi
f46532cee1 Merge remote-tracking branch 'origin/master' 2026-04-03 14:22:09 +03:00
M_Kececi
e1064010f3 Merge remote-tracking branch 'origin/master' 2026-04-03 14:16:27 +03:00
M_Kececi
67ef80936a Merge remote-tracking branch 'origin/master' 2026-04-03 14:02:05 +03:00
M_Kececi
548931f714 Merge remote-tracking branch 'origin/master'
# Conflicts:
#	ui/src/stores/orderentryStore.js
2026-04-03 13:13:14 +03:00
M_Kececi
5adf71c4cc Merge remote-tracking branch 'origin/master' 2026-04-03 13:11:17 +03:00
M_Kececi
c552126ecf Merge remote-tracking branch 'origin/master' 2026-04-02 16:39:48 +03:00
M_Kececi
028c11e042 Merge remote-tracking branch 'origin/master' 2026-04-02 16:30:49 +03:00
M_Kececi
7a98652a8e Merge remote-tracking branch 'origin/master' 2026-04-02 13:51:43 +03:00
M_Kececi
4549152594 Merge remote-tracking branch 'origin/master' 2026-04-02 13:36:22 +03:00
M_Kececi
f5c91abafa Merge remote-tracking branch 'origin/master' 2026-04-02 10:53:47 +03:00
M_Kececi
a97accbdb1 Merge remote-tracking branch 'origin/master' 2026-04-02 10:44:43 +03:00
M_Kececi
4af852c853 Merge remote-tracking branch 'origin/master' 2026-04-02 09:18:15 +03:00
M_Kececi
fce3d8e486 Merge remote-tracking branch 'origin/master' 2026-03-31 17:20:29 +03:00
M_Kececi
526407fdfa Merge remote-tracking branch 'origin/master' 2026-03-31 17:14:23 +03:00
M_Kececi
92f677ae3e Merge remote-tracking branch 'origin/master' 2026-03-31 15:25:20 +03:00
M_Kececi
b6772332cd Merge remote-tracking branch 'origin/master' 2026-03-31 15:15:15 +03:00
M_Kececi
5b6b9a26bd Merge remote-tracking branch 'origin/master' 2026-03-31 15:05:31 +03:00
M_Kececi
1b204bb8ed Merge remote-tracking branch 'origin/master' 2026-03-31 14:59:41 +03:00
M_Kececi
4a67f0f444 Merge remote-tracking branch 'origin/master' 2026-03-31 14:57:39 +03:00
M_Kececi
7b1de24dfb Merge remote-tracking branch 'origin/master' 2026-03-31 14:54:19 +03:00
M_Kececi
afe77171f4 Merge remote-tracking branch 'origin/master' 2026-03-31 14:07:58 +03:00
M_Kececi
173d734883 Merge remote-tracking branch 'origin/master' 2026-03-31 13:34:18 +03:00
M_Kececi
ee9150e45a Merge remote-tracking branch 'origin/master' 2026-03-31 13:28:32 +03:00
M_Kececi
2b40983cee Merge remote-tracking branch 'origin/master' 2026-03-31 13:07:00 +03:00
M_Kececi
ed80e4f492 Merge remote-tracking branch 'origin/master' 2026-03-31 12:46:48 +03:00
M_Kececi
d7d871fb8a Merge remote-tracking branch 'origin/master' 2026-03-31 12:45:22 +03:00
M_Kececi
44439f7908 Merge remote-tracking branch 'origin/master' 2026-03-30 10:00:12 +03:00
M_Kececi
3d0fce8055 Merge remote-tracking branch 'origin/master' 2026-03-30 09:45:26 +03:00
M_Kececi
3896b0d795 Merge remote-tracking branch 'origin/master' 2026-03-30 09:33:47 +03:00
M_Kececi
05c6103a3a Merge remote-tracking branch 'origin/master' 2026-03-29 22:41:17 +03:00
M_Kececi
96ede55936 Merge remote-tracking branch 'origin/master' 2026-03-24 15:38:41 +03:00
M_Kececi
d0e43c03fc Merge remote-tracking branch 'origin/master' 2026-03-24 10:15:51 +03:00
M_Kececi
2b9bcb667e Merge remote-tracking branch 'origin/master' 2026-03-24 10:02:32 +03:00
M_Kececi
6be374717a Merge remote-tracking branch 'origin/master' 2026-03-24 09:05:25 +03:00
M_Kececi
196e42394a Merge remote-tracking branch 'origin/master' 2026-03-24 08:59:48 +03:00
M_Kececi
efdd11a2a7 Merge remote-tracking branch 'origin/master' 2026-03-23 18:12:49 +03:00
M_Kececi
9c573d9101 Merge remote-tracking branch 'origin/master' 2026-03-23 15:48:25 +03:00
M_Kececi
81ca636452 Merge remote-tracking branch 'origin/master' 2026-03-23 15:25:24 +03:00
M_Kececi
cd9aa8a6e0 Merge remote-tracking branch 'origin/master' 2026-03-23 11:03:02 +03:00
M_Kececi
436d25e2fb Merge remote-tracking branch 'origin/master' 2026-03-23 10:27:34 +03:00
M_Kececi
e6e79f8ef4 Merge remote-tracking branch 'origin/master' 2026-03-23 10:16:30 +03:00
M_Kececi
c0053d6058 Merge remote-tracking branch 'origin/master' 2026-03-23 09:58:56 +03:00
M_Kececi
5eab36df69 Merge remote-tracking branch 'origin/master' 2026-03-18 09:29:43 +03:00
M_Kececi
83a55373ea Merge remote-tracking branch 'origin/master' 2026-03-17 14:11:08 +03:00
M_Kececi
569e22e4f8 Merge remote-tracking branch 'origin/master' 2026-03-17 14:05:08 +03:00
M_Kececi
774684d152 Merge remote-tracking branch 'origin/master' 2026-03-17 12:22:52 +03:00
M_Kececi
e16d45002b Merge remote-tracking branch 'origin/master' 2026-03-17 12:15:30 +03:00
M_Kececi
c779e93f43 Merge remote-tracking branch 'origin/master' 2026-03-17 12:04:56 +03:00
M_Kececi
4997d926c7 Merge remote-tracking branch 'origin/master' 2026-03-17 11:53:09 +03:00
M_Kececi
44f4c1abf4 Merge remote-tracking branch 'origin/master' 2026-03-17 11:32:42 +03:00
M_Kececi
06af84204a Merge remote-tracking branch 'origin/master' 2026-03-17 11:23:20 +03:00
M_Kececi
ba8c1dd801 Merge remote-tracking branch 'origin/master' 2026-03-17 11:17:00 +03:00
M_Kececi
84466e87ba Merge remote-tracking branch 'origin/master' 2026-03-17 11:08:43 +03:00
M_Kececi
8a8f384927 Merge remote-tracking branch 'origin/master' 2026-03-17 10:59:28 +03:00
M_Kececi
ab236ec256 Merge remote-tracking branch 'origin/master' 2026-03-16 12:20:02 +03:00
M_Kececi
2a8dc5d3a1 Merge remote-tracking branch 'origin/master' 2026-03-16 12:13:32 +03:00
M_Kececi
804f7fa108 Merge remote-tracking branch 'origin/master' 2026-03-16 10:44:41 +03:00
M_Kececi
c979f31819 Merge remote-tracking branch 'origin/master' 2026-03-16 01:18:54 +03:00
M_Kececi
3df69a3278 Merge remote-tracking branch 'origin/master' 2026-03-16 01:13:26 +03:00
M_Kececi
daa7893d3d Merge remote-tracking branch 'origin/master' 2026-03-16 01:07:25 +03:00
M_Kececi
945a439c03 Merge remote-tracking branch 'origin/master' 2026-03-16 01:02:40 +03:00
M_Kececi
720ba8354f Merge remote-tracking branch 'origin/master' 2026-03-16 00:53:26 +03:00
M_Kececi
e46363a758 Merge remote-tracking branch 'origin/master' 2026-03-16 00:42:35 +03:00
M_Kececi
19e67ad9f5 Merge remote-tracking branch 'origin/master' 2026-03-16 00:13:02 +03:00
M_Kececi
bf1155e958 Merge remote-tracking branch 'origin/master' 2026-03-16 00:08:14 +03:00
M_Kececi
7889ab0f07 Merge remote-tracking branch 'origin/master' 2026-03-16 00:00:02 +03:00
M_Kececi
2a46b2942d Merge remote-tracking branch 'origin/master' 2026-03-15 23:47:50 +03:00
M_Kececi
c080a63ed1 Merge remote-tracking branch 'origin/master' 2026-03-15 23:04:22 +03:00
M_Kececi
e2c04fab5d Merge remote-tracking branch 'origin/master' 2026-03-15 22:59:42 +03:00
M_Kececi
9c76a521c5 Merge remote-tracking branch 'origin/master' 2026-03-15 22:50:04 +03:00
M_Kececi
f08bbd5e28 Merge remote-tracking branch 'origin/master' 2026-03-15 22:28:25 +03:00
M_Kececi
7e98ea66d2 Merge remote-tracking branch 'origin/master' 2026-03-15 22:15:01 +03:00
M_Kececi
d1790b7357 Merge remote-tracking branch 'origin/master' 2026-03-13 17:26:56 +03:00
M_Kececi
f9a864d63c Merge remote-tracking branch 'origin/master' 2026-03-13 17:17:43 +03:00
M_Kececi
899b9fc7cc Merge remote-tracking branch 'origin/master' 2026-03-13 16:48:11 +03:00
M_Kececi
b9322cda2f Merge remote-tracking branch 'origin/master' 2026-03-13 16:44:20 +03:00
M_Kececi
72a5a4f0ac Merge remote-tracking branch 'origin/master' 2026-03-13 16:37:13 +03:00
M_Kececi
bd8dbce39e Merge remote-tracking branch 'origin/master' 2026-03-13 15:13:41 +03:00
M_Kececi
1063ec6e97 Merge remote-tracking branch 'origin/master' 2026-03-13 15:04:58 +03:00
M_Kececi
e341489d6e Merge remote-tracking branch 'origin/master' 2026-03-13 14:40:48 +03:00
M_Kececi
706f530315 Merge remote-tracking branch 'origin/master' 2026-03-13 14:34:46 +03:00
M_Kececi
fd7400ee69 Merge remote-tracking branch 'origin/master' 2026-03-13 14:26:51 +03:00
M_Kececi
4a45fa82d9 Merge remote-tracking branch 'origin/master' 2026-03-13 14:19:51 +03:00
M_Kececi
7455823b1a Merge remote-tracking branch 'origin/master' 2026-03-13 14:15:19 +03:00
M_Kececi
c72640851e Merge remote-tracking branch 'origin/master' 2026-03-13 14:06:22 +03:00
M_Kececi
7091da5b10 Merge remote-tracking branch 'origin/master' 2026-03-13 14:06:18 +03:00
M_Kececi
fd034c9ae6 Merge remote-tracking branch 'origin/master' 2026-03-13 14:06:04 +03:00
M_Kececi
534c1c1806 Merge remote-tracking branch 'origin/master' 2026-03-13 14:00:23 +03:00
M_Kececi
e24cf05b5d Merge remote-tracking branch 'origin/master' 2026-03-13 12:36:46 +03:00
M_Kececi
d68081f6d9 Merge remote-tracking branch 'origin/master' 2026-03-13 12:30:04 +03:00
M_Kececi
6ff8747411 Merge remote-tracking branch 'origin/master' 2026-03-11 17:53:47 +03:00
M_Kececi
aba71341b9 Merge remote-tracking branch 'origin/master' 2026-03-10 17:51:47 +03:00
M_Kececi
d590732f38 Merge remote-tracking branch 'origin/master' 2026-03-10 10:31:03 +03:00
M_Kececi
6f2a6df3d4 Merge remote-tracking branch 'origin/master' 2026-03-10 10:25:15 +03:00
M_Kececi
0d303f0c0f Merge remote-tracking branch 'origin/master' 2026-03-09 13:19:26 +03:00
M_Kececi
6df18ed14d Merge remote-tracking branch 'origin/master' 2026-03-06 14:49:57 +03:00
M_Kececi
807bbad0e7 Merge remote-tracking branch 'origin/master' 2026-03-06 13:59:28 +03:00
M_Kececi
46f4d15ac7 Merge remote-tracking branch 'origin/master' 2026-03-06 12:14:40 +03:00
M_Kececi
ffa8b30b81 Merge remote-tracking branch 'origin/master' 2026-03-06 11:04:00 +03:00
M_Kececi
9e534e9a34 Merge remote-tracking branch 'origin/master' 2026-03-06 10:57:06 +03:00
M_Kececi
9097b5af2d Merge remote-tracking branch 'origin/master' 2026-03-05 11:21:56 +03:00
M_Kececi
dc63a59249 Merge remote-tracking branch 'origin/master' 2026-03-05 11:02:59 +03:00
M_Kececi
4a6ca5a4d2 Merge remote-tracking branch 'origin/master' 2026-03-05 10:45:12 +03:00
M_Kececi
5564dbfbd3 Merge remote-tracking branch 'origin/master' 2026-03-05 09:56:52 +03:00
M_Kececi
431781e765 Merge remote-tracking branch 'origin/master' 2026-03-05 09:56:44 +03:00
M_Kececi
95cdf6c5da Merge remote-tracking branch 'origin/master' 2026-03-05 09:54:02 +03:00
M_Kececi
94244b194a Merge remote-tracking branch 'origin/master' 2026-03-04 17:59:36 +03:00
M_Kececi
b1150c5ef7 Merge remote-tracking branch 'origin/master' 2026-03-04 15:25:27 +03:00
M_Kececi
f40fa0ed18 Merge remote-tracking branch 'origin/master' 2026-03-04 13:50:47 +03:00
M_Kececi
96d782e474 Merge remote-tracking branch 'origin/master' 2026-03-04 13:21:13 +03:00
M_Kececi
4dc0415546 Merge remote-tracking branch 'origin/master' 2026-03-03 23:28:43 +03:00
M_Kececi
da33f30dd0 Merge remote-tracking branch 'origin/master' 2026-03-03 13:59:12 +03:00
M_Kececi
337364b259 Merge remote-tracking branch 'origin/master' 2026-03-03 13:45:15 +03:00
M_Kececi
008eeb3e5f Merge remote-tracking branch 'origin/master' 2026-03-03 13:29:43 +03:00
M_Kececi
d355ef7acd Merge remote-tracking branch 'origin/master' 2026-03-03 13:29:17 +03:00
M_Kececi
4805216808 Merge remote-tracking branch 'origin/master' 2026-03-03 11:35:34 +03:00
M_Kececi
3a574bff6c Merge remote-tracking branch 'origin/master' 2026-03-03 11:29:35 +03:00
M_Kececi
c57c04e12a Merge remote-tracking branch 'origin/master' 2026-03-03 11:26:52 +03:00
M_Kececi
cbcf08728a Merge remote-tracking branch 'origin/master' 2026-03-03 11:20:59 +03:00
M_Kececi
24774863bf Merge remote-tracking branch 'origin/master' 2026-03-03 11:06:12 +03:00
M_Kececi
e1a62df40d Merge remote-tracking branch 'origin/master' 2026-03-03 11:06:01 +03:00
M_Kececi
a6f6110bb5 Merge remote-tracking branch 'origin/master' 2026-03-03 10:47:59 +03:00
M_Kececi
ae8cd892b0 Merge remote-tracking branch 'origin/master' 2026-03-03 10:33:52 +03:00
M_Kececi
0d3983520d Merge remote-tracking branch 'origin/master' 2026-03-03 10:31:09 +03:00
M_Kececi
f0a03f3024 Merge remote-tracking branch 'origin/master' 2026-03-03 10:28:16 +03:00
M_Kececi
ce31aff645 Merge remote-tracking branch 'origin/master' 2026-03-03 10:28:10 +03:00
M_Kececi
b208aa32e2 Merge remote-tracking branch 'origin/master' 2026-03-03 10:16:19 +03:00
M_Kececi
ecf3a8bd07 Merge remote-tracking branch 'origin/master' 2026-03-03 01:11:19 +03:00
M_Kececi
a4f4c2457f Merge remote-tracking branch 'origin/master' 2026-03-03 00:30:34 +03:00
M_Kececi
ea27d34336 Merge remote-tracking branch 'origin/master' 2026-02-27 12:21:15 +03:00
M_Kececi
264f97a5c1 Merge remote-tracking branch 'origin/master' 2026-02-27 11:48:12 +03:00
M_Kececi
15e51e9c39 Merge remote-tracking branch 'origin/master' 2026-02-25 10:40:19 +03:00
M_Kececi
47848fc14d Merge remote-tracking branch 'origin/master' 2026-02-21 22:55:03 +03:00
M_Kececi
34df458e3f Merge remote-tracking branch 'origin/master' 2026-02-21 22:39:25 +03:00
M_Kececi
612a6dc445 Merge remote-tracking branch 'origin/master' 2026-02-21 22:30:09 +03:00
M_Kececi
b70bbb780c Merge remote-tracking branch 'origin/master' 2026-02-21 21:48:00 +03:00
M_Kececi
ac299e2138 Merge remote-tracking branch 'origin/master' 2026-02-21 21:47:10 +03:00
152 changed files with 34353 additions and 3172 deletions

View File

@@ -19,6 +19,8 @@ RUNTIME_PRESERVE_FILES=(
"svc/mail.env"
"svc/fonts"
"svc/public"
"deploy/deploy.sh"
"scripts/deploy.sh"
)
log_step() {
@@ -89,6 +91,115 @@ ensure_ui_permissions() {
find "$ui_root" -type f -exec chmod 644 {} \;
}
clean_ui_build_artifacts() {
cd "$APP_DIR/ui"
# dist'i silmiyoruz -> eski chunklar kısa süre kalabilir, ChunkLoadError azalır
rm -rf .quasar node_modules/.cache || true
}
purge_nginx_ui_cache() {
rm -rf /var/cache/nginx/* || true
}
purge_cdn_html_cache() {
local zone_id="${CF_ZONE_ID:-}"
local api_token="${CF_API_TOKEN:-}"
local site_url="${SITE_URL:-https://ss.baggi.com.tr}"
local purge_urls="${CDN_PURGE_URLS:-$site_url/,$site_url/index.html}"
if [[ -z "$zone_id" || -z "$api_token" ]]; then
echo "CDN purge skipped: CF_ZONE_ID / CF_API_TOKEN not set."
return 0
fi
IFS=',' read -r -a url_array <<< "$purge_urls"
if [[ ${#url_array[@]} -eq 0 ]]; then
echo "CDN purge skipped: no URLs configured."
return 0
fi
local files_json=""
local sep=""
local url=""
for raw in "${url_array[@]}"; do
url="$(echo "$raw" | xargs)"
[[ -n "$url" ]] || continue
files_json="${files_json}${sep}\"${url}\""
sep=","
done
if [[ -z "$files_json" ]]; then
echo "CDN purge skipped: URL list resolved to empty."
return 0
fi
local payload
payload="{\"files\":[${files_json}]}"
local response
response="$(curl -sS -X POST "https://api.cloudflare.com/client/v4/zones/${zone_id}/purge_cache" \
-H "Authorization: Bearer ${api_token}" \
-H "Content-Type: application/json" \
--data "$payload" || true)"
if echo "$response" | grep -q '"success":true'; then
echo "CDN HTML purge completed."
return 0
fi
echo "WARN: CDN purge may have failed. Response: $response"
return 0
}
extract_app_js_name() {
local source="$1"
echo "$source" | grep -oE 'app\.[a-f0-9]+\.js' | head -n1 || true
}
verify_live_ui_hash() {
local site_url="${SITE_URL:-https://ss.baggi.com.tr}"
local fail_on_mismatch="${FAIL_ON_UI_HASH_MISMATCH:-false}"
local local_index="$APP_DIR/ui/dist/spa/index.html"
if [[ ! -f "$local_index" ]]; then
echo "WARN: local index not found for hash verify: $local_index"
return 0
fi
local local_app
local_app="$(extract_app_js_name "$(cat "$local_index")")"
if [[ -z "$local_app" ]]; then
echo "WARN: local app hash parse failed."
return 0
fi
local live_html
live_html="$(curl -sS -H "Cache-Control: no-cache" -H "Pragma: no-cache" "${site_url}/" || true)"
if [[ -z "$live_html" ]]; then
echo "WARN: live index fetch failed for ${site_url}/"
return 0
fi
local live_app
live_app="$(extract_app_js_name "$live_html")"
if [[ -z "$live_app" ]]; then
echo "WARN: live app hash parse failed."
return 0
fi
if [[ "$local_app" == "$live_app" ]]; then
echo "UI HASH OK: ${local_app}"
return 0
fi
echo "WARN: UI hash mismatch local=${local_app} live=${live_app}"
if [[ "$fail_on_mismatch" == "true" ]]; then
echo "ERROR: FAIL_ON_UI_HASH_MISMATCH=true and UI hash mismatch detected."
return 1
fi
return 0
}
ensure_ui_readable_by_nginx() {
local ui_index="$APP_DIR/ui/dist/spa/index.html"
@@ -106,6 +217,36 @@ ensure_ui_readable_by_nginx() {
fi
}
ensure_node20_for_ui_build() {
local required_major=20
local nvm_dir="${NVM_DIR:-$HOME/.nvm}"
if [[ -s "$nvm_dir/nvm.sh" ]]; then
# shellcheck disable=SC1090
source "$nvm_dir/nvm.sh"
nvm install "$required_major" >/dev/null
nvm use "$required_major" >/dev/null
fi
if ! command -v node >/dev/null 2>&1; then
echo "ERROR: node command not found"
return 1
fi
local node_version
node_version="$(node -v 2>/dev/null || true)"
local node_major
node_major="$(echo "$node_version" | sed -E 's/^v([0-9]+).*/\1/')"
if [[ -z "$node_major" || "$node_major" -lt "$required_major" ]]; then
echo "ERROR: Node.js >=${required_major} required for UI build. Current: ${node_version:-unknown}"
echo "Hint: install nvm and run: nvm install ${required_major} && nvm alias default ${required_major}"
return 1
fi
echo "UI build runtime: node=$node_version npm=$(npm -v)"
}
build_api_binary() {
if ! command -v go >/dev/null 2>&1; then
echo "ERROR: go command not found"
@@ -132,7 +273,11 @@ restart_services() {
fi
if systemctl cat nginx >/dev/null 2>&1; then
systemctl restart nginx
if ! nginx -t; then
echo "ERROR: nginx config test failed"
return 1
fi
systemctl reload nginx
if ! systemctl is-active --quiet nginx; then
echo "ERROR: nginx service failed to start"
return 1
@@ -168,12 +313,16 @@ run_deploy() {
-e svc/mail.env \
-e svc/fonts \
-e svc/public \
-e deploy/deploy.sh \
-e scripts/deploy.sh \
-e svc/bssapp
restore_runtime_files
echo "DEPLOY COMMIT: $(git rev-parse --short HEAD)"
log_step "BUILD UI"
cd "$APP_DIR/ui"
ensure_node20_for_ui_build
clean_ui_build_artifacts
npm ci --no-audit --no-fund --include=optional
npm i -D --no-audit --no-fund sass-embedded@1.93.2
npm run build
@@ -194,6 +343,15 @@ run_deploy() {
log_step "RESTART SERVICES"
restart_services
log_step "PURGE NGINX CACHE"
purge_nginx_ui_cache
log_step "PURGE CDN HTML CACHE (OPTIONAL)"
purge_cdn_html_cache
log_step "VERIFY LIVE UI HASH"
verify_live_ui_hash
echo "[DEPLOY FINISHED] $(date '+%F %T')"
}

View File

@@ -0,0 +1,52 @@
# i18n + Dinamik Çeviri Standardı
Bu projede çok dilli yapı iki katmanlıdır:
1. Statik UI metinleri `i18n` ile yönetilir.
2. Dinamik içerikler `mk_translator` + otomatik çeviri servisi (OpenAI) ile yönetilir.
## 1) Statik UI (Deterministik)
Kullanım alanı:
- buton metinleri
- menüler
- form label'ları
- validasyon mesajları
- sabit ekran başlıkları
- route/meta/title
Kural:
- her metin key bazlı tutulur (`$t('common.save')`)
- locale dosyaları: `tr`, `en`, `de`, `it`, `es`, `ru`, `ar`
- fallback sırası: hedef dil -> `en` -> `tr`
## 2) Dinamik İçerik (DB/CMS/Serbest metin)
Akış:
1. Kaynak metin için `mk_translator` kontrol edilir.
2. Hedef dil karşılığı yoksa OpenAI ile çeviri üretilir.
3. Sonuç `mk_translator` tablosuna yazılır.
4. Sonraki isteklerde DB sonucu kullanılır (cache etkisi).
Kullanım alanı:
- ürün/kategori açıklamaları
- CMS içerikleri
- admin panelden girilen serbest metinler
- şablon bazlı metin içerikleri
## Kalite ve Güvenlik Kuralları
- Prompt net olmalı: sadece çeviri dönsün, açıklama eklemesin.
- Placeholder/format korunsun: `{name}`, `{{count}}`, `%s` gibi yapılar bozulmasın.
- HTML tag'leri ve kod/SKU değerleri çevrilmesin.
- API key sadece backend'de tutulur (`OPENAI_API_KEY` client'a verilmez).
- 429/5xx için retry + exponential backoff uygulanır.
- Hassas veri içeriği olan metinlerde veri politikası kontrolü yapılır.
## Özet
Bu servis, `i18n`'in alternatifi değildir; `i18n`'i tamamlayan dinamik çeviri katmanıdır.
- Statik UI: `i18n`
- Dinamik içerik: `mk_translator` + OpenAI + cache

View File

@@ -16,4 +16,9 @@
| Cloudflare | bt@baggi.com.tr | Baggi2025!.? |
| 172.16.0.3 | ct | pasauras |
## Dil ve Çeviri Standardı
Detaylı mimari dokümanı:
- [docs/i18n-dynamic-translation-standard.md](docs/i18n-dynamic-translation-standard.md)

13
scripts/deploy.sh Normal file
View File

@@ -0,0 +1,13 @@
#!/bin/bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
ROOT_DIR="$(cd "${SCRIPT_DIR}/.." && pwd)"
TARGET="${ROOT_DIR}/deploy/deploy.sh"
if [[ ! -f "$TARGET" ]]; then
echo "ERROR: target deploy script not found: $TARGET"
exit 1
fi
exec /bin/bash "$TARGET" "$@"

View File

@@ -30,7 +30,7 @@ echo "Transferring additional files"
#rsync -azP ${PRJ_ROOT}/db/sqls root@${DEST_IP}:/opt/${APP} --delete
#rsync -azP ${SVC_ROOT}/fonts/ root@${DEST_IP}:/opt/${APP}/fonts --delete
#rsync -azP ${PRJ_ROOT}/db/migration/base root@${DEST_IP}:/opt/${APP}/migrate --delete
rsync -azP ${UI_ROOT}/dist/spa/ root@${DEST_IP}:/opt/${APP}/ui --delete
rsync -azP ${UI_ROOT}/dist/spa/ root@${DEST_IP}:/opt/${APP}/ui/dist/spa/ --delete
#echo "Migrating database"
#ssh root@${DEST_IP} "/opt/migrater -folder /opt/${APP}/migrate/base -db ${APP} -host 10.0.0.2 -tracker base -migrate-table symigrate -password tesnos.+ed"

View File

@@ -1,27 +0,0 @@
/* Indexes for order validate performance */
IF NOT EXISTS (
SELECT 1
FROM sys.indexes
WHERE name = 'IX_trOrderLine_OrderHeader_ItemCode'
AND object_id = OBJECT_ID('dbo.trOrderLine')
)
BEGIN
CREATE NONCLUSTERED INDEX IX_trOrderLine_OrderHeader_ItemCode
ON dbo.trOrderLine (OrderHeaderID, ItemCode)
INCLUDE (ItemTypeCode, ColorCode, ItemDim1Code, ItemDim2Code, ItemDim3Code, LineDescription, SortOrder, OrderLineID);
END
GO
IF NOT EXISTS (
SELECT 1
FROM sys.indexes
WHERE name = 'IX_prItemVariant_Combo'
AND object_id = OBJECT_ID('dbo.prItemVariant')
)
BEGIN
CREATE NONCLUSTERED INDEX IX_prItemVariant_Combo
ON dbo.prItemVariant (ItemTypeCode, ItemCode, ColorCode, ItemDim1Code, ItemDim2Code, ItemDim3Code)
INCLUDE (PLU);
END
GO

View File

@@ -0,0 +1,48 @@
-- language_module_seed.sql
-- 1) Register language module routes if missing
INSERT INTO mk_sys_routes (path, method, module_code, action)
VALUES
('/api/language/translations', 'GET', 'language', 'update'),
('/api/language/translations/{id}', 'PUT', 'language', 'update'),
('/api/language/translations/upsert-missing', 'POST', 'language', 'update'),
('/api/language/translations/sync-sources', 'POST', 'language', 'update'),
('/api/language/translations/translate-selected', 'POST', 'language', 'update'),
('/api/language/translations/bulk-approve', 'POST', 'language', 'update'),
('/api/language/translations/bulk-update', 'POST', 'language', 'update')
ON CONFLICT (path, method) DO UPDATE
SET
module_code = EXCLUDED.module_code,
action = EXCLUDED.action;
-- 2) Remove legacy system translation routes (optional cleanup)
DELETE FROM mk_sys_routes
WHERE path LIKE '/api/system/translations%';
-- 3) Seed role permissions for language module by cloning system perms
INSERT INTO mk_sys_role_permissions (role_id, module_code, action, allowed)
SELECT rp.role_id, 'language', rp.action, rp.allowed
FROM mk_sys_role_permissions rp
WHERE rp.module_code = 'system'
AND rp.action IN ('view', 'read', 'insert', 'update', 'delete', 'export')
ON CONFLICT DO NOTHING;
-- 4) Ensure admin update access
INSERT INTO mk_sys_role_permissions (role_id, module_code, action, allowed)
SELECT r.id, 'language', 'update', true
FROM dfrole r
WHERE r.id = 3
ON CONFLICT DO NOTHING;
-- 5) Seed role+department permissions for language module by cloning system perms
INSERT INTO mk_sys_role_department_permissions
(role_id, department_code, module_code, action, allowed)
SELECT DISTINCT
rdp.role_id,
rdp.department_code,
'language',
rdp.action,
rdp.allowed
FROM mk_sys_role_department_permissions rdp
WHERE rdp.module_code = 'system'
AND rdp.action IN ('view', 'read', 'insert', 'update', 'delete', 'export')
ON CONFLICT DO NOTHING;

View File

@@ -0,0 +1,87 @@
/*
Product filter cache refresh for Product Stock By Attributes endpoints.
This cache is used by backend queries when dbo.ProductFilterTRCache exists.
*/
USE BAGGI_V3;
GO
IF OBJECT_ID('dbo.ProductFilterTRCache','U') IS NULL
BEGIN
CREATE TABLE dbo.ProductFilterTRCache
(
ProductCode NVARCHAR(50) NOT NULL PRIMARY KEY,
ProductDescription NVARCHAR(255) NULL,
ProductAtt01Desc NVARCHAR(255) NULL,
ProductAtt02Desc NVARCHAR(255) NULL,
ProductAtt11Desc NVARCHAR(255) NULL,
ProductAtt38Desc NVARCHAR(255) NULL,
ProductAtt41Desc NVARCHAR(255) NULL,
ProductAtt44Desc NVARCHAR(255) NULL
);
END
GO
TRUNCATE TABLE dbo.ProductFilterTRCache;
GO
INSERT INTO dbo.ProductFilterTRCache
(
ProductCode,
ProductDescription,
ProductAtt01Desc,
ProductAtt02Desc,
ProductAtt11Desc,
ProductAtt38Desc,
ProductAtt41Desc,
ProductAtt44Desc
)
SELECT
ProductCode,
ProductDescription,
ProductAtt01Desc,
ProductAtt02Desc,
ProductAtt11Desc,
ProductAtt38Desc,
ProductAtt41Desc,
ProductAtt44Desc
FROM ProductFilterWithDescription('TR')
WHERE LEN(ProductCode) = 13;
GO
IF EXISTS (
SELECT 1 FROM sys.indexes
WHERE name = 'IX_ProductFilterTRCache_Filter'
AND object_id = OBJECT_ID('dbo.ProductFilterTRCache')
)
BEGIN
DROP INDEX IX_ProductFilterTRCache_Filter ON dbo.ProductFilterTRCache;
END
GO
IF NOT EXISTS (
SELECT 1 FROM sys.indexes
WHERE name = 'IX_ProductFilterTRCache_KatAna'
AND object_id = OBJECT_ID('dbo.ProductFilterTRCache')
)
BEGIN
CREATE NONCLUSTERED INDEX IX_ProductFilterTRCache_KatAna
ON dbo.ProductFilterTRCache (ProductAtt44Desc, ProductAtt01Desc, ProductCode)
INCLUDE (ProductDescription, ProductAtt02Desc, ProductAtt41Desc, ProductAtt38Desc, ProductAtt11Desc);
END
GO
IF NOT EXISTS (
SELECT 1 FROM sys.indexes
WHERE name = 'IX_ProductFilterTRCache_KatAnaAlt'
AND object_id = OBJECT_ID('dbo.ProductFilterTRCache')
)
BEGIN
CREATE NONCLUSTERED INDEX IX_ProductFilterTRCache_KatAnaAlt
ON dbo.ProductFilterTRCache (ProductAtt44Desc, ProductAtt01Desc, ProductAtt02Desc, ProductCode)
INCLUDE (ProductDescription, ProductAtt41Desc, ProductAtt38Desc, ProductAtt11Desc);
END
GO
UPDATE STATISTICS dbo.ProductFilterTRCache WITH FULLSCAN;
GO

View File

@@ -0,0 +1,74 @@
/*
Performance indexes for Product Stock By Attributes queries.
Target: SQL Server
*/
/* trStock (inventory aggregation) */
IF NOT EXISTS (
SELECT 1
FROM sys.indexes
WHERE name = 'IX_trStock_Item_Warehouse_Dims'
AND object_id = OBJECT_ID('dbo.trStock')
)
BEGIN
CREATE NONCLUSTERED INDEX IX_trStock_Item_Warehouse_Dims
ON dbo.trStock (ItemTypeCode, ItemCode, WarehouseCode, ColorCode, ItemDim1Code, ItemDim2Code, ItemDim3Code)
INCLUDE (In_Qty1, Out_Qty1, CompanyCode, OfficeCode, StoreTypeCode, StoreCode);
END;
GO
/* PickingStates */
IF NOT EXISTS (
SELECT 1
FROM sys.indexes
WHERE name = 'IX_PickingStates_Item_Warehouse_Dims'
AND object_id = OBJECT_ID('dbo.PickingStates')
)
BEGIN
CREATE NONCLUSTERED INDEX IX_PickingStates_Item_Warehouse_Dims
ON dbo.PickingStates (ItemTypeCode, ItemCode, WarehouseCode, ColorCode, ItemDim1Code, ItemDim2Code, ItemDim3Code)
INCLUDE (Qty1, CompanyCode, OfficeCode, StoreTypeCode, StoreCode);
END;
GO
/* ReserveStates */
IF NOT EXISTS (
SELECT 1
FROM sys.indexes
WHERE name = 'IX_ReserveStates_Item_Warehouse_Dims'
AND object_id = OBJECT_ID('dbo.ReserveStates')
)
BEGIN
CREATE NONCLUSTERED INDEX IX_ReserveStates_Item_Warehouse_Dims
ON dbo.ReserveStates (ItemTypeCode, ItemCode, WarehouseCode, ColorCode, ItemDim1Code, ItemDim2Code, ItemDim3Code)
INCLUDE (Qty1, CompanyCode, OfficeCode, StoreTypeCode, StoreCode);
END;
GO
/* DispOrderStates */
IF NOT EXISTS (
SELECT 1
FROM sys.indexes
WHERE name = 'IX_DispOrderStates_Item_Warehouse_Dims'
AND object_id = OBJECT_ID('dbo.DispOrderStates')
)
BEGIN
CREATE NONCLUSTERED INDEX IX_DispOrderStates_Item_Warehouse_Dims
ON dbo.DispOrderStates (ItemTypeCode, ItemCode, WarehouseCode, ColorCode, ItemDim1Code, ItemDim2Code, ItemDim3Code)
INCLUDE (Qty1, CompanyCode, OfficeCode, StoreTypeCode, StoreCode);
END;
GO
/* Latest price lookup */
IF NOT EXISTS (
SELECT 1
FROM sys.indexes
WHERE name = 'IX_prItemBasePrice_ItemType_ItemCode_PriceDate'
AND object_id = OBJECT_ID('dbo.prItemBasePrice')
)
BEGIN
CREATE NONCLUSTERED INDEX IX_prItemBasePrice_ItemType_ItemCode_PriceDate
ON dbo.prItemBasePrice (ItemTypeCode, ItemCode, PriceDate DESC)
INCLUDE (Price);
END;
GO

View File

@@ -22,7 +22,7 @@ UI_DIR=/opt/bssapp/ui/dist
# DATABASES
# ===============================
POSTGRES_CONN=host=46.224.33.150 port=5432 user=postgres password=tayitkan dbname=baggib2b sslmode=disable
MSSQL_CONN=sqlserver://sa:Gil_0150@100.127.186.137:1433?database=BAGGI_V3&encrypt=disable
MSSQL_CONN=sqlserver://sa:Gil_0150@10.0.0.9:1433?database=BAGGI_V3&encrypt=disable
# ===============================
# PDF
@@ -32,3 +32,6 @@ API_HOST=0.0.0.0
API_PORT=8080
AZURE_TRANSLATOR_KEY=d055c693-a84e-4594-8aef-a6c05c42623a
AZURE_TRANSLATOR_ENDPOINT=https://api.cognitive.microsofttranslator.com
AZURE_TRANSLATOR_REGION=westeurope

View File

@@ -0,0 +1,72 @@
package main
import (
"bssapp-backend/db"
"bssapp-backend/routes"
"fmt"
"log"
"os"
"strconv"
"strings"
"github.com/joho/godotenv"
)
func main() {
_ = godotenv.Load(".env", "mail.env", ".env.local")
if err := db.ConnectMSSQL(); err != nil {
log.Fatalf("mssql connect failed: %v", err)
}
pgDB, err := db.ConnectPostgres()
if err != nil {
log.Fatalf("postgres connect failed: %v", err)
}
defer pgDB.Close()
limit := 30000
if raw := os.Getenv("TRANSLATION_SYNC_LIMIT"); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 {
limit = parsed
}
}
langs := []string{"en", "de", "it", "es", "ru", "ar"}
if raw := strings.TrimSpace(os.Getenv("TRANSLATION_SYNC_LANGS")); raw != "" {
parts := strings.Split(raw, ",")
custom := make([]string, 0, len(parts))
for _, p := range parts {
v := strings.TrimSpace(strings.ToLower(p))
if v != "" {
custom = append(custom, v)
}
}
if len(custom) > 0 {
langs = custom
}
}
autoTranslate := true
if raw := strings.TrimSpace(strings.ToLower(os.Getenv("TRANSLATION_SYNC_AUTO_TRANSLATE"))); raw != "" {
if raw == "0" || raw == "false" || raw == "off" {
autoTranslate = false
}
}
result, err := routes.PerformTranslationSync(pgDB, db.MssqlDB, routes.TranslationSyncOptions{
AutoTranslate: autoTranslate,
Languages: langs,
Limit: limit,
OnlyNew: true,
})
if err != nil {
log.Fatalf("manual sync failed: %v", err)
}
fmt.Printf("translation sync done: seeds=%d affected=%d auto_translated=%d langs=%v\n",
result.SeedCount,
result.AffectedCount,
result.AutoTranslated,
result.TargetLangs,
)
}

View File

@@ -3,33 +3,121 @@ package db
import (
"database/sql"
"fmt"
"log"
"net/url"
"os"
"strconv"
"strings"
"time"
_ "github.com/microsoft/go-mssqldb"
)
var MssqlDB *sql.DB
// ConnectMSSQL MSSQL baglantisini ortam degiskeninden baslatir.
func ConnectMSSQL() {
func envInt(name string, fallback int) int {
raw := strings.TrimSpace(os.Getenv(name))
if raw == "" {
return fallback
}
value, err := strconv.Atoi(raw)
if err != nil || value <= 0 {
return fallback
}
return value
}
func ensureTimeoutValue(current string, desired int) string {
cur, err := strconv.Atoi(strings.TrimSpace(current))
if err == nil && cur >= desired {
return strings.TrimSpace(current)
}
return strconv.Itoa(desired)
}
func ensureMSSQLTimeouts(connString string, connectionTimeoutSec int, dialTimeoutSec int) string {
raw := strings.TrimSpace(connString)
if raw == "" {
return raw
}
if strings.HasPrefix(strings.ToLower(raw), "sqlserver://") {
u, err := url.Parse(raw)
if err != nil {
return raw
}
q := u.Query()
q.Set("connection timeout", ensureTimeoutValue(q.Get("connection timeout"), connectionTimeoutSec))
q.Set("dial timeout", ensureTimeoutValue(q.Get("dial timeout"), dialTimeoutSec))
u.RawQuery = q.Encode()
return u.String()
}
parts := strings.Split(raw, ";")
foundConnectionTimeout := false
foundDialTimeout := false
for i, part := range parts {
part = strings.TrimSpace(part)
if part == "" {
continue
}
eq := strings.Index(part, "=")
if eq <= 0 {
continue
}
key := strings.ToLower(strings.TrimSpace(part[:eq]))
value := strings.TrimSpace(part[eq+1:])
switch key {
case "connection timeout":
foundConnectionTimeout = true
parts[i] = "connection timeout=" + ensureTimeoutValue(value, connectionTimeoutSec)
case "dial timeout":
foundDialTimeout = true
parts[i] = "dial timeout=" + ensureTimeoutValue(value, dialTimeoutSec)
}
}
if !foundConnectionTimeout {
parts = append(parts, "connection timeout="+strconv.Itoa(connectionTimeoutSec))
}
if !foundDialTimeout {
parts = append(parts, "dial timeout="+strconv.Itoa(dialTimeoutSec))
}
return strings.Join(parts, ";")
}
// ConnectMSSQL initializes the MSSQL connection from environment.
func ConnectMSSQL() error {
connString := strings.TrimSpace(os.Getenv("MSSQL_CONN"))
if connString == "" {
log.Fatal("MSSQL_CONN tanımlı değil")
return fmt.Errorf("MSSQL_CONN tanimli degil")
}
connectionTimeoutSec := envInt("MSSQL_CONNECTION_TIMEOUT_SEC", 120)
dialTimeoutSec := envInt("MSSQL_DIAL_TIMEOUT_SEC", connectionTimeoutSec)
connString = ensureMSSQLTimeouts(connString, connectionTimeoutSec, dialTimeoutSec)
var err error
MssqlDB, err = sql.Open("sqlserver", connString)
if err != nil {
log.Fatal("MSSQL bağlantı hatası:", err)
return fmt.Errorf("MSSQL baglanti hatasi: %w", err)
}
MssqlDB.SetMaxOpenConns(envInt("MSSQL_MAX_OPEN_CONNS", 40))
MssqlDB.SetMaxIdleConns(envInt("MSSQL_MAX_IDLE_CONNS", 40))
MssqlDB.SetConnMaxLifetime(time.Duration(envInt("MSSQL_CONN_MAX_LIFETIME_MIN", 30)) * time.Minute)
MssqlDB.SetConnMaxIdleTime(time.Duration(envInt("MSSQL_CONN_MAX_IDLE_MIN", 10)) * time.Minute)
if err = MssqlDB.Ping(); err != nil {
log.Fatal("MSSQL erişilemiyor:", err)
return fmt.Errorf("MSSQL erisilemiyor: %w", err)
}
fmt.Println("MSSQL bağlantısı başarılı")
fmt.Printf("MSSQL baglantisi basarili (connection timeout=%ds, dial timeout=%ds)\n", connectionTimeoutSec, dialTimeoutSec)
return nil
}
func GetDB() *sql.DB {

View File

@@ -1,32 +1,30 @@
package authz
import (
"bssapp-backend/auth"
"context"
"fmt"
"strings"
)
func BuildMSSQLPiyasaFilter(
ctx context.Context,
column string,
) string {
claims, ok := auth.GetClaimsFromContext(ctx)
if ok && claims != nil && claims.IsAdmin() {
return "1=1"
}
codes := GetPiyasaCodesFromCtx(ctx)
if len(codes) == 0 {
return "1=1"
return "1=0"
}
var quoted []string
for _, c := range codes {
quoted = append(quoted, "'"+c+"'")
return BuildMSSQLPiyasaFilterWithCodes(column, codes)
}
return fmt.Sprintf(
"%s IN (%s)",
column,
strings.Join(quoted, ","),
)
func BuildMSSQLPiyasaFilterWithCodes(column string, codes []string) string {
normalizedCol := fmt.Sprintf("UPPER(LTRIM(RTRIM(%s)))", column)
return BuildINClause(normalizedCol, codes)
}

View File

@@ -3,6 +3,7 @@ package authz
import (
"database/sql"
"fmt"
"strings"
"sync"
)
@@ -35,10 +36,21 @@ func GetUserPiyasaCodes(pg *sql.DB, userID int) ([]string, error) {
// DB QUERY
// -----------------------------
rows, err := pg.Query(`
SELECT piyasa_code
FROM dfusr_piyasa
WHERE dfusr_id = $1
AND is_allowed = true
WITH user_piyasa AS (
SELECT TRIM(up.piyasa_code) AS raw_code
FROM dfusr_piyasa up
WHERE up.dfusr_id = $1
AND up.is_allowed = true
)
SELECT DISTINCT
COALESCE(p_code.code, p_title.code, u.raw_code) AS piyasa_code
FROM user_piyasa u
LEFT JOIN mk_sales_piy p_code
ON UPPER(translate(TRIM(p_code.code), 'çğıöşüÇĞİÖŞÜ', 'CGIOSUCGIOSU'))
= UPPER(translate(TRIM(u.raw_code), 'çğıöşüÇĞİÖŞÜ', 'CGIOSUCGIOSU'))
LEFT JOIN mk_sales_piy p_title
ON UPPER(translate(TRIM(p_title.title),'çğıöşüÇĞİÖŞÜ', 'CGIOSUCGIOSU'))
= UPPER(translate(TRIM(u.raw_code), 'çğıöşüÇĞİÖŞÜ', 'CGIOSUCGIOSU'))
`, userID)
if err != nil {
return nil, fmt.Errorf("pg piyasa query error: %w", err)
@@ -46,12 +58,20 @@ func GetUserPiyasaCodes(pg *sql.DB, userID int) ([]string, error) {
defer rows.Close()
var out []string
seen := make(map[string]struct{})
for rows.Next() {
var code string
if err := rows.Scan(&code); err == nil {
code = strings.ToUpper(strings.TrimSpace(code))
if code != "" {
if _, ok := seen[code]; ok {
continue
}
seen[code] = struct{}{}
out = append(out, code)
}
}
}
// -----------------------------
// CACHE WRITE

122
svc/internal/i18n/lang.go Normal file
View File

@@ -0,0 +1,122 @@
package i18n
import "strings"
const DefaultLang = "TR"
var supported = map[string]struct{}{
"TR": {},
"EN": {},
"DE": {},
"IT": {},
"ES": {},
"RU": {},
"AR": {},
}
func NormalizeLangCode(raw string) string {
lang := strings.ToUpper(strings.TrimSpace(raw))
if _, ok := supported[lang]; ok {
return lang
}
return DefaultLang
}
func ResolveLangCode(queryLangCode, acceptLanguage string) string {
if lang := NormalizeLangCode(queryLangCode); lang != DefaultLang || strings.EqualFold(strings.TrimSpace(queryLangCode), DefaultLang) {
return lang
}
header := strings.TrimSpace(acceptLanguage)
if header == "" {
return DefaultLang
}
first := strings.Split(header, ",")[0]
first = strings.TrimSpace(strings.Split(first, ";")[0])
if len(first) < 2 {
return DefaultLang
}
return NormalizeLangCode(first[:2])
}
func T(langCode, key string) string {
for _, lang := range fallbackLangs(langCode) {
if val, ok := dict[lang][key]; ok {
return val
}
}
return key
}
func fallbackLangs(langCode string) []string {
lang := NormalizeLangCode(langCode)
switch lang {
case "TR":
return []string{"TR"}
case "EN":
return []string{"EN", "TR"}
default:
return []string{lang, "EN", "TR"}
}
}
var dict = map[string]map[string]string{
"TR": {
"pdf.report_title": "Cari Hesap Raporu",
"pdf.date": "Tarih",
"pdf.customer": "Cari",
"pdf.date_range": "Tarih Aralığı",
"pdf.page": "Sayfa",
"pdf.ending_balance": "Son Bakiye",
"pdf.currency_prefix": "Para Birimi",
"pdf.balance_prefix": "Bakiye",
"pdf.main.doc_no": "Belge No",
"pdf.main.date": "Tarih",
"pdf.main.due_date": "Vade",
"pdf.main.operation": "İşlem",
"pdf.main.description": "Açıklama",
"pdf.main.currency": "Para",
"pdf.main.debit": "Borç",
"pdf.main.credit": "Alacak",
"pdf.main.balance": "Bakiye",
"pdf.detail.main_group": "Ana Grup",
"pdf.detail.sub_group": "Alt Grup",
"pdf.detail.waiter": "Garson",
"pdf.detail.fit": "Fit",
"pdf.detail.content": "İçerik",
"pdf.detail.product": "Ürün",
"pdf.detail.color": "Renk",
"pdf.detail.qty": "Adet",
"pdf.detail.price": "Fiyat",
"pdf.detail.total": "Tutar",
},
"EN": {
"pdf.report_title": "Customer Account Report",
"pdf.date": "Date",
"pdf.customer": "Customer",
"pdf.date_range": "Date Range",
"pdf.page": "Page",
"pdf.ending_balance": "Ending Balance",
"pdf.currency_prefix": "Currency",
"pdf.balance_prefix": "Balance",
"pdf.main.doc_no": "Document No",
"pdf.main.date": "Date",
"pdf.main.due_date": "Due Date",
"pdf.main.operation": "Operation",
"pdf.main.description": "Description",
"pdf.main.currency": "Curr.",
"pdf.main.debit": "Debit",
"pdf.main.credit": "Credit",
"pdf.main.balance": "Balance",
"pdf.detail.main_group": "Main Group",
"pdf.detail.sub_group": "Sub Group",
"pdf.detail.waiter": "Waiter",
"pdf.detail.fit": "Fit",
"pdf.detail.content": "Content",
"pdf.detail.product": "Product",
"pdf.detail.color": "Color",
"pdf.detail.qty": "Qty",
"pdf.detail.price": "Price",
"pdf.detail.total": "Total",
},
}

View File

@@ -3,6 +3,7 @@ package mailer
import (
"bytes"
"context"
"encoding/base64"
"encoding/json"
"fmt"
"io"
@@ -145,6 +146,36 @@ func (g *GraphMailer) Send(ctx context.Context, msg Message) error {
message["replyTo"] = replyToRecipients
}
if len(msg.Attachments) > 0 {
atts := make([]map[string]any, 0, len(msg.Attachments))
for _, a := range msg.Attachments {
if len(a.Data) == 0 {
continue
}
name := strings.TrimSpace(a.FileName)
if name == "" {
name = "attachment.bin"
}
contentType := strings.TrimSpace(a.ContentType)
if contentType == "" {
contentType = "application/octet-stream"
}
atts = append(atts, map[string]any{
"@odata.type": "#microsoft.graph.fileAttachment",
"name": name,
"contentType": contentType,
"contentBytes": base64.StdEncoding.EncodeToString(a.Data),
})
}
if len(atts) > 0 {
message["attachments"] = atts
}
}
payload := map[string]any{
"message": message,
"saveToSentItems": true,

View File

@@ -3,6 +3,7 @@ package mailer
import (
"context"
"crypto/tls"
"encoding/base64"
"errors"
"fmt"
"net"
@@ -22,6 +23,13 @@ type Message struct {
Subject string
Body string
BodyHTML string
Attachments []Attachment
}
type Attachment struct {
FileName string
ContentType string
Data []byte
}
func New(cfg Config) *Mailer {
@@ -131,11 +139,13 @@ func (m *Mailer) Send(ctx context.Context, msg Message) error {
}
func buildMIME(from string, to []string, subject, contentType, body string) string {
// Subject UTF-8 basit hali (gerekirse sonra MIME encoded-word ekleriz)
// Encode Subject to UTF-8
encodedSubject := "=?UTF-8?B?" + base64.StdEncoding.EncodeToString([]byte(subject)) + "?="
headers := []string{
"From: " + from,
"To: " + strings.Join(to, ", "),
"Subject: " + subject,
"Subject: " + encodedSubject,
"MIME-Version: 1.0",
"Content-Type: " + contentType,
"",

View File

@@ -104,7 +104,26 @@ func autoRegisterRouteV3(
return
}
// 2) ADMIN AUTO PERMISSION (module+action bazlı)
// 2) MODULE LOOKUP AUTO SEED (permission ekranları için)
moduleLabel := strings.TrimSpace(strings.ReplaceAll(module, "_", " "))
if moduleLabel == "" {
moduleLabel = module
}
_, err = tx.Exec(`
INSERT INTO mk_sys_modules (code, name)
VALUES ($1::text, $2::text)
ON CONFLICT (code) DO UPDATE
SET name = COALESCE(NULLIF(EXCLUDED.name, ''), mk_sys_modules.name)
`,
module,
moduleLabel,
)
if err != nil {
log.Printf("❌ Module seed error (%s %s): %v", method, path, err)
return
}
// 3) ROLE PERMISSION AUTO SEED (admin=true, diğer roller=false)
_, err = tx.Exec(`
INSERT INTO mk_sys_role_permissions
(role_id, module_code, action, allowed)
@@ -112,16 +131,50 @@ func autoRegisterRouteV3(
id,
$1,
$2,
true
CASE
WHEN id = 3 OR LOWER(code) = 'admin' THEN true
ELSE false
END
FROM dfrole
WHERE id = 3 -- ADMIN
ON CONFLICT DO NOTHING
`,
module,
action,
)
if err != nil {
log.Printf("❌ Admin perm seed error (%s %s): %v", method, path, err)
log.Printf("❌ Role perm seed error (%s %s): %v", method, path, err)
return
}
// 4) ROLE+DEPARTMENT PERMISSION AUTO SEED
// Existing role+department kombinasyonlarına yeni module+action satırıılır.
_, err = tx.Exec(`
WITH role_dept_scope AS (
SELECT DISTINCT role_id, department_code
FROM mk_sys_role_department_permissions
UNION
SELECT 3 AS role_id, d.code AS department_code
FROM mk_dprt d
)
INSERT INTO mk_sys_role_department_permissions
(role_id, department_code, module_code, action, allowed)
SELECT
rds.role_id,
rds.department_code,
$1,
$2,
CASE
WHEN rds.role_id = 3 THEN true
ELSE false
END
FROM role_dept_scope rds
ON CONFLICT DO NOTHING
`,
module,
action,
)
if err != nil {
log.Printf("❌ Role+Dept perm seed error (%s %s): %v", method, path, err)
return
}
@@ -192,6 +245,11 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
)
}
// Sadece JWT doğrulaması; route-level yetki kontrolü yok.
wrapAuthOnly := func(h http.Handler) http.Handler {
return middlewares.AuthMiddleware(pgDB, h)
}
// ============================================================
// PUBLIC (NO AUTHZ)
// ============================================================
@@ -245,6 +303,57 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
wrapV3(routes.TestMailHandler(ml)),
)
bindV3(r, pgDB,
"/api/system/market-mail-mappings/lookups", "GET",
"system", "update",
wrapV3(routes.GetMarketMailMappingLookupsHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/system/market-mail-mappings", "GET",
"system", "update",
wrapV3(routes.GetMarketMailMappingsHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/system/market-mail-mappings/{marketId}", "PUT",
"system", "update",
wrapV3(routes.SaveMarketMailMappingHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations", "GET",
"language", "update",
wrapV3(routes.GetTranslationRowsHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/{id}", "PUT",
"language", "update",
wrapV3(routes.UpdateTranslationRowHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/upsert-missing", "POST",
"language", "update",
wrapV3(routes.UpsertMissingTranslationsHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/sync-sources", "POST",
"language", "update",
wrapV3(routes.SyncTranslationSourcesHandler(pgDB, mssql)),
)
bindV3(r, pgDB,
"/api/language/translations/translate-selected", "POST",
"language", "update",
wrapV3(routes.TranslateSelectedTranslationsHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/bulk-approve", "POST",
"language", "update",
wrapV3(routes.BulkApproveTranslationsHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/language/translations/bulk-update", "POST",
"language", "update",
wrapV3(routes.BulkUpdateTranslationsHandler(pgDB)),
)
// ============================================================
// PERMISSIONS
// ============================================================
@@ -425,6 +534,50 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
wrapV3(routes.ExportStatementHeaderReportPDFHandler(mssql)),
)
bindV3(r, pgDB,
"/api/finance/customer-balances", "GET",
"finance", "view",
wrapV3(http.HandlerFunc(routes.GetCustomerBalanceListHandler)),
)
bindV3(r, pgDB,
"/api/finance/customer-balances/export-pdf", "GET",
"finance", "export",
wrapV3(routes.ExportCustomerBalancePDFHandler(mssql)),
)
bindV3(r, pgDB,
"/api/finance/customer-balances/export-excel", "GET",
"finance", "export",
wrapV3(routes.ExportCustomerBalanceExcelHandler(mssql)),
)
bindV3(r, pgDB,
"/api/finance/account-aging-statement", "GET",
"finance", "view",
wrapV3(http.HandlerFunc(routes.GetStatementAgingHandler)),
)
bindV3(r, pgDB,
"/api/finance/account-aging-statement/export-pdf", "GET",
"finance", "export",
wrapV3(routes.ExportStatementAgingPDFHandler(mssql)),
)
bindV3(r, pgDB,
"/api/finance/account-aging-statement/export-screen-pdf", "GET",
"finance", "export",
wrapV3(routes.ExportStatementAgingScreenPDFHandler(mssql)),
)
bindV3(r, pgDB,
"/api/finance/account-aging-statement/export-excel", "GET",
"finance", "export",
wrapV3(routes.ExportStatementAgingExcelHandler(mssql)),
)
bindV3(r, pgDB,
"/api/finance/aged-customer-balance-list", "GET",
"finance", "view",
wrapV3(http.HandlerFunc(routes.GetAgedCustomerBalanceListHandler)),
)
// ============================================================
// REPORT (STATEMENTS)
// ============================================================
@@ -454,25 +607,37 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
}{
{"/api/order/create", "POST", "insert", routes.CreateOrderHandler(pgDB, mssql)},
{"/api/order/update", "POST", "update", http.HandlerFunc(routes.UpdateOrderHandler)},
{"/api/order/{id}/bulk-due-date", "POST", "update", routes.BulkUpdateOrderLineDueDateHandler(mssql)},
{"/api/order/get/{id}", "GET", "view", routes.GetOrderByIDHandler(mssql)},
{"/api/orders/list", "GET", "view", routes.OrderListRoute(mssql)},
{"/api/orders/production-list", "GET", "update", routes.OrderProductionListRoute(mssql)},
{"/api/orders/production-items/cditem-lookups", "GET", "view", routes.OrderProductionCdItemLookupsRoute(mssql)},
{"/api/orders/production-items/{id}", "GET", "view", routes.OrderProductionItemsRoute(mssql)},
{"/api/orders/production-items/{id}/insert-missing", "POST", "update", routes.OrderProductionInsertMissingRoute(mssql)},
{"/api/orders/production-items/{id}/validate", "POST", "update", routes.OrderProductionValidateRoute(mssql)},
{"/api/orders/production-items/{id}/apply", "POST", "update", routes.OrderProductionApplyRoute(mssql)},
{"/api/orders/production-items/{id}/apply", "POST", "update", routes.OrderProductionApplyRoute(mssql, ml)},
{"/api/orders/close-ready", "GET", "update", routes.OrderCloseReadyListRoute(mssql)},
{"/api/orders/bulk-close", "POST", "update", routes.OrderBulkCloseRoute(mssql)},
{"/api/orders/export", "GET", "export", routes.OrderListExcelRoute(mssql)},
{"/api/order/check/{id}", "GET", "view", routes.OrderExistsHandler(mssql)},
{"/api/order/validate", "POST", "insert", routes.ValidateOrderHandler(mssql)},
{"/api/order/pdf/{id}", "GET", "export", routes.OrderPDFHandler(mssql)},
{"/api/order/pdf/{id}", "GET", "export", routes.OrderPDFHandler(mssql, pgDB)},
{"/api/order/send-market-mail", "POST", "read", routes.SendOrderMarketMailHandler(pgDB, mssql, ml)},
{"/api/order-inventory", "GET", "view", http.HandlerFunc(routes.GetOrderInventoryHandler)},
{"/api/orderpricelistb2b", "GET", "view", routes.GetOrderPriceListB2BHandler(pgDB, mssql)},
{"/api/min-price", "GET", "view", routes.GetOrderPriceListB2BHandler(pgDB, mssql)},
}
for _, rt := range orderRoutes {
if rt.Path == "/api/order/send-market-mail" {
bindV3(r, pgDB,
rt.Path, rt.Method,
"order", rt.Action,
wrapAuthOnly(rt.Handle),
)
continue
}
bindV3(r, pgDB,
rt.Path, rt.Method,
"order", rt.Action,
@@ -495,11 +660,22 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
wrapV3(http.HandlerFunc(routes.GetProductDetailHandler)),
)
bindV3(r, pgDB,
"/api/product-cditem", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductCdItemHandler)),
)
bindV3(r, pgDB,
"/api/product-colors", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductColorsHandler)),
)
bindV3(r, pgDB,
"/api/product-newcolors", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductNewColorsHandler)),
)
bindV3(r, pgDB,
"/api/product-colorsize", "GET",
@@ -512,6 +688,56 @@ func InitRoutes(pgDB *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) *mux.Router
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductSecondColorsHandler)),
)
bindV3(r, pgDB,
"/api/product-newsecondcolor", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductNewSecondColorsHandler)),
)
bindV3(r, pgDB,
"/api/product-attributes", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductAttributesHandler)),
)
bindV3(r, pgDB,
"/api/product-item-attributes", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductItemAttributesHandler)),
)
bindV3(r, pgDB,
"/api/product-stock-query", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductStockQueryHandler)),
)
bindV3(r, pgDB,
"/api/product-stock-attribute-options", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductStockAttributeOptionsHandler)),
)
bindV3(r, pgDB,
"/api/product-stock-query-by-attributes", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductStockQueryByAttributesHandler)),
)
bindV3(r, pgDB,
"/api/product-images", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductImagesHandler(pgDB))),
)
bindV3(r, pgDB,
"/api/product-images/{id}/content", "GET",
"order", "view",
http.HandlerFunc(routes.GetProductImageContentHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/product-size-match/rules", "GET",
"order", "view",
wrapV3(routes.GetProductSizeMatchRulesHandler(pgDB)),
)
bindV3(r, pgDB,
"/api/pricing/products", "GET",
"order", "view",
wrapV3(http.HandlerFunc(routes.GetProductPricingListHandler)),
)
// ============================================================
// ROLE MANAGEMENT
@@ -576,6 +802,7 @@ func main() {
// -------------------------------------------------------
// 🔑 ENV
// -------------------------------------------------------
// Önce .env + mail.env yükle. MSSQL başarısızsa .env.local dene.
if err := godotenv.Load(".env", "mail.env"); err != nil {
log.Println("⚠️ .env / mail.env bulunamadı")
}
@@ -589,7 +816,15 @@ func main() {
// -------------------------------------------------------
// 🔗 DATABASE
// -------------------------------------------------------
db.ConnectMSSQL()
if err := db.ConnectMSSQL(); err != nil {
log.Println("⚠️ MSSQL ilk deneme başarısız:", err)
if err2 := godotenv.Overload(".env.local"); err2 != nil {
log.Println("⚠️ .env.local bulunamadı")
}
if err3 := db.ConnectMSSQL(); err3 != nil {
log.Fatal(err3)
}
}
pgDB, err := db.ConnectPostgres()
if err != nil {
@@ -612,6 +847,11 @@ func main() {
auditlog.Init(pgDB, 1000)
log.Println("🕵️ AuditLog sistemi başlatıldı (buffer=1000)")
// -------------------------------------------------------
// 🚀 TRANSLATION QUERY PERFORMANCE INDEXES
// -------------------------------------------------------
routes.EnsureTranslationPerfIndexes(pgDB)
// -------------------------------------------------------
// ✉️ MAILER INIT
// -------------------------------------------------------
@@ -630,6 +870,7 @@ func main() {
// 🌍 SERVER
// -------------------------------------------------------
router := InitRoutes(pgDB, db.MssqlDB, graphMailer)
startTranslationSyncScheduler(pgDB, db.MssqlDB)
handler := enableCORS(
middlewares.GlobalAuthMiddleware(

View File

@@ -57,6 +57,26 @@ type ttlCache struct {
m map[string]cacheItem
}
type routeMeta struct {
module string
action string
}
var routeMetaCache sync.Map
var routeMetaFallback = map[string]routeMeta{
"GET /api/product-images": {module: "order", action: "view"},
"GET /api/product-images/{id}/content": {module: "order", action: "view"},
"GET /api/product-stock-query-by-attributes": {module: "order", action: "view"},
}
var userLookupPaths = map[string]struct{}{
"/api/lookups/roles": {},
"/api/lookups/departments": {},
"/api/lookups/piyasalar": {},
"/api/lookups/nebim-users": {},
}
// =====================================================
// 🌍 GLOBAL SCOPE CACHE (for invalidation)
// =====================================================
@@ -537,9 +557,15 @@ func cachedPiyasaIntersectionAny(pg *sql.DB, c *ttlCache, userID, roleID int64,
err := pg.QueryRow(`
SELECT 1
FROM dfusr_piyasa up
LEFT JOIN mk_sales_piy p_code
ON UPPER(translate(TRIM(p_code.code), 'çğıöşüÇĞİÖŞÜ', 'CGIOSUCGIOSU'))
= UPPER(translate(TRIM(up.piyasa_code), 'çğıöşüÇĞİÖŞÜ', 'CGIOSUCGIOSU'))
LEFT JOIN mk_sales_piy p_title
ON UPPER(translate(TRIM(p_title.title), 'çğıöşüÇĞİÖŞÜ', 'CGIOSUCGIOSU'))
= UPPER(translate(TRIM(up.piyasa_code), 'çğıöşüÇĞİÖŞÜ', 'CGIOSUCGIOSU'))
WHERE up.dfusr_id = $1
AND up.is_allowed = true
AND up.piyasa_code = ANY($2)
AND UPPER(TRIM(COALESCE(p_code.code, p_title.code, up.piyasa_code))) = ANY($2)
LIMIT 1
`, userID, pqArray(piyasaCodes)).Scan(&dummy)
@@ -840,6 +866,36 @@ func intersect(a, b []string) []string {
return out
}
func isUserLookupPath(pathTemplate string) bool {
_, ok := userLookupPaths[pathTemplate]
return ok
}
func resolveAnyUserCrudPermission(
repo *permissions.PermissionRepository,
userID int64,
roleID int64,
departmentCodes []string,
) (bool, error) {
for _, action := range []string{"view", "insert", "update"} {
allowed, err := repo.ResolvePermissionChain(
userID,
roleID,
departmentCodes,
"user",
action,
)
if err != nil {
return false, err
}
if allowed {
return true, nil
}
}
return false, nil
}
func AuthzGuardByRoute(pg *sql.DB) func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
@@ -906,10 +962,15 @@ func AuthzGuardByRoute(pg *sql.DB) func(http.Handler) http.Handler {
}
// =====================================================
// 3⃣ ROUTE LOOKUP (path + method)
// 3⃣ ROUTE LOOKUP (path + method) with cache+fallback
// =====================================================
var module, action string
routeKey := r.Method + " " + pathTemplate
if cached, ok := routeMetaCache.Load(routeKey); ok {
meta := cached.(routeMeta)
module, action = meta.module, meta.action
} else {
err = pg.QueryRow(`
SELECT module_code, action
FROM mk_sys_routes
@@ -920,21 +981,21 @@ func AuthzGuardByRoute(pg *sql.DB) func(http.Handler) http.Handler {
r.Method,
).Scan(&module, &action)
if err != nil {
log.Printf(
"❌ AUTHZ: route not registered: %s %s",
r.Method,
pathTemplate,
)
if pathTemplate == "/api/password/change" {
http.Error(w, "password change route permission not found", http.StatusForbidden)
return
}
if err == nil {
routeMetaCache.Store(routeKey, routeMeta{module: module, action: action})
} else if fb, ok := routeMetaFallback[routeKey]; ok {
module, action = fb.module, fb.action
routeMetaCache.Store(routeKey, fb)
log.Printf("⚠️ AUTHZ: route lookup fallback used: %s", routeKey)
} else if err == sql.ErrNoRows {
log.Printf("❌ AUTHZ: route not registered: %s %s", r.Method, pathTemplate)
http.Error(w, "route permission not found", http.StatusForbidden)
return
} else {
log.Printf("❌ AUTHZ: route lookup db error: %s %s err=%v", r.Method, pathTemplate, err)
http.Error(w, "permission lookup failed", http.StatusInternalServerError)
return
}
}
// =====================================================
@@ -969,6 +1030,25 @@ func AuthzGuardByRoute(pg *sql.DB) func(http.Handler) http.Handler {
return
}
if !allowed && isUserLookupPath(pathTemplate) {
allowed, err = resolveAnyUserCrudPermission(
repo,
int64(claims.ID),
int64(claims.RoleID),
claims.DepartmentCodes,
)
if err != nil {
log.Printf(
"❌ AUTHZ: user lookup fallback resolve error user=%d path=%s err=%v",
claims.ID,
pathTemplate,
err,
)
http.Error(w, "forbidden", http.StatusForbidden)
return
}
}
if !allowed {
log.Printf(
@@ -988,7 +1068,23 @@ func AuthzGuardByRoute(pg *sql.DB) func(http.Handler) http.Handler {
}
// =====================================================
// 5PASS
// 5SCOPE CONTEXT ENRICH (for MSSQL piyasa filters)
// =====================================================
if !claims.IsAdmin() {
userPiy, err := authz.GetUserPiyasaCodes(pg, int(claims.ID))
if err != nil {
log.Printf("❌ AUTHZ: user piyasa resolve error user=%d err=%v", claims.ID, err)
http.Error(w, "forbidden", http.StatusForbidden)
return
}
if len(userPiy) > 0 {
r = r.WithContext(authz.WithPiyasaCodes(r.Context(), normalizeCodes(userPiy)))
}
}
// =====================================================
// 6⃣ PASS
// =====================================================
next.ServeHTTP(w, r)
})

View File

@@ -12,6 +12,8 @@ var publicPaths = []string{
"/api/auth/refresh",
"/api/password/forgot",
"/api/password/reset",
"/api/product-images/",
"/uploads/",
}
func GlobalAuthMiddleware(db any, next http.Handler) http.Handler {

View File

@@ -6,6 +6,7 @@ import (
"log"
"net"
"net/http"
"strings"
"time"
)
@@ -59,6 +60,9 @@ func RequestLogger(next http.Handler) http.Handler {
log.Printf("⬅️ %s %s | status=%d | %s", r.Method, r.URL.Path, sw.status, time.Since(start))
// High-frequency endpoints: skip route_access audit to reduce DB/log pressure.
skipAudit := r.Method == http.MethodGet && strings.HasPrefix(r.URL.Path, "/api/product-images")
// ---------- AUDIT (route_access) ----------
al := auditlog.ActivityLog{
ActionType: "route_access",
@@ -95,9 +99,9 @@ func RequestLogger(next http.Handler) http.Handler {
al.ErrorMessage = http.StatusText(sw.status)
}
// ✅ ESKİ: auditlog.Write(al)
// ✅ YENİ:
if !skipAudit {
auditlog.Enqueue(r.Context(), al)
}
if claims == nil {
log.Println("⚠️ LOGGER: claims is NIL")

View File

@@ -0,0 +1,46 @@
package models
type CustomerBalanceListParams struct {
SelectedDate string
CariSearch string
CariIlkGrup string
Piyasa string
Temsilci string
RiskDurumu string
IslemTipi string
Ulke string
Il string
Ilce string
}
type CustomerBalanceListRow struct {
CariIlkGrup string `json:"cari_ilk_grup"`
Piyasa string `json:"piyasa"`
Temsilci string `json:"temsilci"`
Sirket string `json:"sirket"`
AnaCariKodu string `json:"ana_cari_kodu"`
AnaCariAdi string `json:"ana_cari_adi"`
CariKodu string `json:"cari_kodu"`
CariDetay string `json:"cari_detay"`
CariTip string `json:"cari_tip"`
Kanal1 string `json:"kanal_1"`
Ozellik03 string `json:"ozellik03"`
Ozellik05 string `json:"ozellik05"`
Ozellik06 string `json:"ozellik06"`
Ozellik07 string `json:"ozellik07"`
Il string `json:"il"`
Ilce string `json:"ilce"`
MuhasebeKodu string `json:"muhasebe_kodu"`
TC string `json:"tc"`
RiskDurumu string `json:"risk_durumu"`
SirketDetay string `json:"sirket_detay"`
CariDoviz string `json:"cari_doviz"`
Bakiye12 float64 `json:"bakiye_1_2"`
TLBakiye12 float64 `json:"tl_bakiye_1_2"`
USDBakiye12 float64 `json:"usd_bakiye_1_2"`
Bakiye13 float64 `json:"bakiye_1_3"`
TLBakiye13 float64 `json:"tl_bakiye_1_3"`
USDBakiye13 float64 `json:"usd_bakiye_1_3"`
VadeGun float64 `json:"vade_gun"`
VadeBelgeGun float64 `json:"vade_belge_tarihi_gun"`
}

View File

@@ -10,4 +10,5 @@ type CustomerList struct {
Musteri_Temsilcisi string `json:"Musteri_Temsilcisi"`
Ulke string `json:"Ulke"`
Doviz_cinsi string `json:"Doviz_Cinsi"`
PostalAddressID string `json:"PostalAddressID"`
}

View File

@@ -0,0 +1,26 @@
package models
type MarketMailOption struct {
ID string `json:"id"`
Label string `json:"label"`
}
type MarketOption struct {
ID int64 `json:"id"`
Code string `json:"code"`
Title string `json:"title"`
}
type MailOption struct {
ID string `json:"id"`
Email string `json:"email"`
DisplayName string `json:"display_name"`
}
type MarketMailMappingRow struct {
MarketID int64 `json:"market_id"`
MarketCode string `json:"market_code"`
MarketTitle string `json:"market_title"`
MailIDs []string `json:"mail_ids"`
Mails []MarketMailOption `json:"mails"`
}

View File

@@ -9,6 +9,7 @@ type OrderList struct {
OrderHeaderID string `json:"OrderHeaderID"`
OrderNumber string `json:"OrderNumber"`
OrderDate string `json:"OrderDate"`
TerminTarihi string `json:"TerminTarihi"`
// 🧾 Cari Bilgileri
CurrAccCode string `json:"CurrAccCode"`

View File

@@ -13,8 +13,10 @@ type OrderProductionItem struct {
OldItemCode string `json:"OldItemCode"`
OldColor string `json:"OldColor"`
OldColorDescription string `json:"OldColorDescription"`
OldDim2 string `json:"OldDim2"`
OldDesc string `json:"OldDesc"`
OldQty float64 `json:"OldQty"`
NewItemCode string `json:"NewItemCode"`
NewColor string `json:"NewColor"`
@@ -22,4 +24,6 @@ type OrderProductionItem struct {
NewDesc string `json:"NewDesc"`
IsVariantMissing bool `json:"IsVariantMissing"`
OldDueDate string `json:"OldDueDate"`
NewDueDate string `json:"NewDueDate"`
}

View File

@@ -4,13 +4,19 @@ type OrderProductionUpdateLine struct {
OrderLineID string `json:"OrderLineID"`
NewItemCode string `json:"NewItemCode"`
NewColor string `json:"NewColor"`
ItemDim1Code *string `json:"ItemDim1Code,omitempty"`
NewDim2 string `json:"NewDim2"`
NewDesc string `json:"NewDesc"`
OldDueDate string `json:"OldDueDate"`
NewDueDate string `json:"NewDueDate"`
}
type OrderProductionUpdatePayload struct {
Lines []OrderProductionUpdateLine `json:"lines"`
InsertMissing bool `json:"insertMissing"`
CdItems []OrderProductionCdItemDraft `json:"cdItems"`
ProductAttributes []OrderProductionItemAttributeRow `json:"productAttributes"`
HeaderAverageDueDate *string `json:"HeaderAverageDueDate,omitempty"`
}
type OrderProductionMissingVariant struct {
@@ -22,3 +28,70 @@ type OrderProductionMissingVariant struct {
ItemDim2Code string `json:"ItemDim2Code"`
ItemDim3Code string `json:"ItemDim3Code"`
}
type OrderProductionBarcodeValidation struct {
Code string `json:"code"`
Message string `json:"message"`
Barcode string `json:"barcode,omitempty"`
BarcodeTypeCode string `json:"barcodeTypeCode,omitempty"`
ItemTypeCode int16 `json:"ItemTypeCode,omitempty"`
ItemCode string `json:"ItemCode,omitempty"`
ColorCode string `json:"ColorCode,omitempty"`
ItemDim1Code string `json:"ItemDim1Code,omitempty"`
ItemDim2Code string `json:"ItemDim2Code,omitempty"`
ItemDim3Code string `json:"ItemDim3Code,omitempty"`
}
type OrderProductionCdItemDraft struct {
ItemTypeCode int16 `json:"ItemTypeCode"`
ItemCode string `json:"ItemCode"`
ItemDimTypeCode *int16 `json:"ItemDimTypeCode"`
ProductTypeCode *int16 `json:"ProductTypeCode"`
ProductHierarchyID *int `json:"ProductHierarchyID"`
UnitOfMeasureCode1 *string `json:"UnitOfMeasureCode1"`
ItemAccountGrCode *string `json:"ItemAccountGrCode"`
ItemTaxGrCode *string `json:"ItemTaxGrCode"`
ItemPaymentPlanGrCode *string `json:"ItemPaymentPlanGrCode"`
ItemDiscountGrCode *string `json:"ItemDiscountGrCode"`
ItemVendorGrCode *string `json:"ItemVendorGrCode"`
PromotionGroupCode *string `json:"PromotionGroupCode"`
ProductCollectionGrCode *string `json:"ProductCollectionGrCode"`
StorePriceLevelCode *string `json:"StorePriceLevelCode"`
PerceptionOfFashionCode *string `json:"PerceptionOfFashionCode"`
CommercialRoleCode *string `json:"CommercialRoleCode"`
StoreCapacityLevelCode *string `json:"StoreCapacityLevelCode"`
CustomsTariffNumberCode *string `json:"CustomsTariffNumberCode"`
CompanyCode *string `json:"CompanyCode"`
}
type OrderProductionLookupOption struct {
Code string `json:"code"`
Description string `json:"description"`
}
type OrderProductionItemAttributeRow struct {
ItemTypeCode int16 `json:"ItemTypeCode"`
ItemCode string `json:"ItemCode"`
AttributeTypeCode int `json:"AttributeTypeCode"`
AttributeCode string `json:"AttributeCode"`
}
type OrderProductionCdItemLookups struct {
ItemDimTypeCodes []OrderProductionLookupOption `json:"itemDimTypeCodes"`
ProductTypeCodes []OrderProductionLookupOption `json:"productTypeCodes"`
ProductHierarchyIDs []OrderProductionLookupOption `json:"productHierarchyIDs"`
UnitOfMeasureCode1List []OrderProductionLookupOption `json:"unitOfMeasureCode1List"`
ItemAccountGrCodes []OrderProductionLookupOption `json:"itemAccountGrCodes"`
ItemTaxGrCodes []OrderProductionLookupOption `json:"itemTaxGrCodes"`
ItemPaymentPlanGrCodes []OrderProductionLookupOption `json:"itemPaymentPlanGrCodes"`
ItemDiscountGrCodes []OrderProductionLookupOption `json:"itemDiscountGrCodes"`
ItemVendorGrCodes []OrderProductionLookupOption `json:"itemVendorGrCodes"`
PromotionGroupCodes []OrderProductionLookupOption `json:"promotionGroupCodes"`
ProductCollectionGrCodes []OrderProductionLookupOption `json:"productCollectionGrCodes"`
StorePriceLevelCodes []OrderProductionLookupOption `json:"storePriceLevelCodes"`
PerceptionOfFashionCodes []OrderProductionLookupOption `json:"perceptionOfFashionCodes"`
CommercialRoleCodes []OrderProductionLookupOption `json:"commercialRoleCodes"`
StoreCapacityLevelCodes []OrderProductionLookupOption `json:"storeCapacityLevelCodes"`
CustomsTariffNumbers []OrderProductionLookupOption `json:"customsTariffNumbers"`
CompanyCodes []OrderProductionLookupOption `json:"companyCodes"`
}

View File

@@ -0,0 +1,18 @@
package models
type ProductPricing struct {
ProductCode string `json:"ProductCode"`
CostPrice float64 `json:"CostPrice"`
StockQty float64 `json:"StockQty"`
StockEntryDate string `json:"StockEntryDate"`
LastPricingDate string `json:"LastPricingDate"`
AskiliYan string `json:"AskiliYan"`
Kategori string `json:"Kategori"`
UrunIlkGrubu string `json:"UrunIlkGrubu"`
UrunAnaGrubu string `json:"UrunAnaGrubu"`
UrunAltGrubu string `json:"UrunAltGrubu"`
Icerik string `json:"Icerik"`
Karisim string `json:"Karisim"`
Marka string `json:"Marka"`
BrandGroupSec string `json:"BrandGroupSec"`
}

View File

@@ -0,0 +1,15 @@
package models
type ProductAttributeOption struct {
ItemTypeCode int16 `json:"item_type_code"`
AttributeTypeCode int `json:"attribute_type_code"`
AttributeTypeDescription string `json:"attribute_type_description"`
AttributeCode string `json:"attribute_code"`
AttributeDescription string `json:"attribute_description"`
}
type ProductItemAttributeValue struct {
ItemTypeCode int16 `json:"item_type_code"`
AttributeTypeCode int `json:"attribute_type_code"`
AttributeCode string `json:"attribute_code"`
}

View File

@@ -4,4 +4,5 @@ type ProductSecondColor struct {
ProductCode string `json:"product_code"`
ColorCode string `json:"color_code"`
ItemDim2Code string `json:"item_dim2_code"`
ColorDescription string `json:"color_description"`
}

View File

@@ -0,0 +1,7 @@
package models
type StatementAgingParams struct {
AccountCode string `json:"accountcode"`
EndDate string `json:"enddate"`
Parislemler []string `json:"parislemler"`
}

View File

@@ -6,5 +6,6 @@ type StatementParams struct {
EndDate string `json:"enddate"`
AccountCode string `json:"accountcode"`
LangCode string `json:"langcode"`
Parislemler []string `json:"parislemler"` // ✅ slice olmalı
Parislemler []string `json:"parislemler"`
ExcludeOpening bool `json:"excludeopening"`
}

16
svc/models/translator.go Normal file
View File

@@ -0,0 +1,16 @@
package models
import "time"
type TranslatorRow struct {
ID int64 `json:"id"`
TKey string `json:"t_key"`
LangCode string `json:"lang_code"`
SourceType string `json:"source_type"`
SourceTextTR string `json:"source_text_tr"`
TranslatedText string `json:"translated_text"`
IsManual bool `json:"is_manual"`
Status string `json:"status"`
Provider string `json:"provider"`
UpdatedAt time.Time `json:"updated_at"`
}

View File

@@ -13,13 +13,48 @@ import (
func GetAccounts(ctx context.Context) ([]models.Account, error) {
piyasaFilter := authz.BuildMSSQLPiyasaFilter(ctx, "f2.CustomerAtt01")
piyasaFilter := authz.BuildMSSQLPiyasaFilter(
ctx,
"CASE WHEN b.CurrAccTypeCode = 1 THEN vp.VendorAtt01 ELSE f2.CustomerAtt01 END",
)
if strings.TrimSpace(piyasaFilter) == "" {
piyasaFilter = "1=1"
}
query := fmt.Sprintf(`
;WITH VendorPiyasa AS
(
SELECT
Cari8 = LEFT(REPLACE(P.CurrAccCode, ' ', ''), 8),
VendorAtt01 = MAX(P.VendorAtt01)
FROM
(
SELECT
CurrAccTypeCode,
CurrAccCode,
VendorAtt01 = MAX(ISNULL([1], ''))
FROM
(
SELECT
c.CurrAccTypeCode,
c.CurrAccCode,
a.AttributeTypeCode,
a.AttributeCode
FROM cdCurrAcc c WITH (NOLOCK)
LEFT JOIN prCurrAccAttribute a WITH (NOLOCK)
ON a.CurrAccTypeCode = c.CurrAccTypeCode
AND a.CurrAccCode = c.CurrAccCode
WHERE c.CurrAccTypeCode = 1
) d
PIVOT
(
MAX(AttributeCode) FOR AttributeTypeCode IN ([1])
) pvt
GROUP BY CurrAccTypeCode, CurrAccCode
) P
GROUP BY LEFT(REPLACE(P.CurrAccCode, ' ', ''), 8)
)
SELECT
x.AccountCode,
MAX(x.AccountName) AS AccountName
@@ -29,10 +64,16 @@ func GetAccounts(ctx context.Context) ([]models.Account, error) {
COALESCE(d.CurrAccDescription, '') AS AccountName
FROM trCurrAccBook b
LEFT JOIN cdCurrAccDesc d
ON d.CurrAccCode = b.CurrAccCode
JOIN CustomerAttributesFilter f2
ON f2.CurrAccCode = b.CurrAccCode
WHERE %s
ON d.CurrAccTypeCode = b.CurrAccTypeCode
AND d.CurrAccCode = b.CurrAccCode
AND d.LangCode = 'TR'
LEFT JOIN CustomerAttributesFilter f2
ON f2.CurrAccTypeCode = b.CurrAccTypeCode
AND f2.CurrAccCode = b.CurrAccCode
LEFT JOIN VendorPiyasa vp
ON vp.Cari8 = LEFT(REPLACE(b.CurrAccCode, ' ', ''), 8)
WHERE b.CurrAccTypeCode IN (1,3)
AND %s
) x
GROUP BY x.AccountCode
ORDER BY x.AccountCode
@@ -60,15 +101,22 @@ func GetAccounts(ctx context.Context) ([]models.Account, error) {
return nil, err
}
if len(acc.AccountCode) >= 4 {
acc.DisplayCode =
strings.TrimSpace(acc.AccountCode[:3] + " " + acc.AccountCode[3:])
} else {
acc.DisplayCode = acc.AccountCode
}
acc.DisplayCode = formatAccountDisplayCode(acc.AccountCode)
accounts = append(accounts, acc)
}
return accounts, rows.Err()
}
func formatAccountDisplayCode(code string) string {
return normalizeAccountCode8(code)
}
func normalizeAccountCode8(code string) string {
runes := []rune(code)
if len(runes) > 8 {
return string(runes[:8])
}
return code
}

View File

@@ -3,62 +3,10 @@ package queries
import (
"bssapp-backend/models"
"database/sql"
"sync"
"time"
"strings"
)
/* ===============================
CACHE STRUCT
================================ */
type currencyCacheItem struct {
data *models.TodayCurrencyV3
expiresAt time.Time
}
var (
currencyCache = make(map[string]currencyCacheItem)
cacheMutex sync.RWMutex
cacheTTL = 5 * time.Minute
)
/* ===============================
MAIN CACHE FUNC
================================ */
func GetCachedCurrencyV3(db *sql.DB, code string) (*models.TodayCurrencyV3, error) {
now := time.Now()
/* ---------- READ CACHE ---------- */
cacheMutex.RLock()
item, ok := currencyCache[code]
if ok && now.Before(item.expiresAt) {
cacheMutex.RUnlock()
return item.data, nil
}
cacheMutex.RUnlock()
/* ---------- FETCH DB ---------- */
data, err := GetTodayCurrencyV3(db, code)
if err != nil {
return nil, err
}
/* ---------- WRITE CACHE ---------- */
cacheMutex.Lock()
currencyCache[code] = currencyCacheItem{
data: data,
expiresAt: now.Add(cacheTTL),
}
cacheMutex.Unlock()
return data, nil
// GetCachedCurrencyV3 keeps compatibility with existing order routes.
func GetCachedCurrencyV3(db *sql.DB, currencyCode string) (*models.TodayCurrencyV3, error) {
return GetTodayCurrencyV3(db, strings.ToUpper(strings.TrimSpace(currencyCode)))
}

View File

@@ -0,0 +1,1019 @@
package queries
import (
"bssapp-backend/auth"
"bssapp-backend/db"
"bssapp-backend/internal/authz"
"bssapp-backend/models"
"context"
"database/sql"
"fmt"
"log"
"sort"
"strconv"
"strings"
)
type mkCariBakiyeLine struct {
CurrAccTypeCode int
CariKodu string
CariDoviz string
SirketKodu int
PislemTipi string
ParasalIslemTipi string
YerelBakiye float64
Bakiye float64
VadeGun float64
VadeBelgeGun float64
}
type cariMeta struct {
CariDetay string
CariTip string
Kanal1 string
Piyasa string
Temsilci string
Ulke string
Il string
Ilce string
TC string
RiskDurumu string
MuhasebeKodu string
SirketDetay string
}
type masterCariMeta struct {
CariDetay string
Kanal1 string
Piyasa string
Temsilci string
Ulke string
Il string
Ilce string
RiskDurumu string
}
type balanceFilters struct {
cariIlkGrup map[string]struct{}
piyasa map[string]struct{}
temsilci map[string]struct{}
riskDurumu map[string]struct{}
islemTipi map[string]struct{}
ulke map[string]struct{}
il map[string]struct{}
ilce map[string]struct{}
}
func GetCustomerBalanceList(ctx context.Context, params models.CustomerBalanceListParams) ([]models.CustomerBalanceListRow, error) {
if strings.TrimSpace(params.SelectedDate) == "" {
return nil, fmt.Errorf("selected_date is required")
}
lines, err := loadBalanceLines(ctx, params.SelectedDate, params.CariSearch)
if err != nil {
return nil, err
}
metaMap, err := loadCariMetaMap(ctx, lines)
if err != nil {
log.Printf("customer_balance_list: cari meta query failed, fallback without meta: %v", err)
metaMap = map[string]cariMeta{}
}
masterMetaMap, err := loadMasterCariMetaMap(ctx, lines)
if err != nil {
log.Printf("customer_balance_list: master cari meta query failed, fallback without master meta: %v", err)
masterMetaMap = map[string]masterCariMeta{}
}
companyMap, err := loadCompanyMap(ctx)
if err != nil {
return nil, err
}
glMap, err := loadGLAccountMap(ctx, lines)
if err != nil {
return nil, err
}
rateMap, err := loadNearestTryRates(ctx)
if err != nil {
return nil, err
}
usdTry := rateMap["USD"]
if usdTry <= 0 {
usdTry = 1
}
filters := buildFilters(params)
agg := make(map[string]*models.CustomerBalanceListRow, len(lines))
for _, ln := range lines {
cari := strings.TrimSpace(ln.CariKodu)
if cari == "" {
continue
}
curr := strings.ToUpper(strings.TrimSpace(ln.CariDoviz))
if curr == "" {
curr = "TRY"
}
meta := metaMap[metaKey(ln.CurrAccTypeCode, cari)]
meta.MuhasebeKodu = glMap[glKey(ln.CurrAccTypeCode, cari, ln.SirketKodu)]
meta.SirketDetay = companyMap[ln.SirketKodu]
master := deriveMasterCari(cari)
mm := masterMetaMap[master]
if strings.TrimSpace(mm.Kanal1) != "" {
meta.Kanal1 = mm.Kanal1
}
if strings.TrimSpace(mm.Piyasa) != "" {
meta.Piyasa = mm.Piyasa
}
if strings.TrimSpace(mm.Temsilci) != "" {
meta.Temsilci = mm.Temsilci
}
if strings.TrimSpace(mm.Ulke) != "" {
meta.Ulke = mm.Ulke
}
if strings.TrimSpace(mm.Il) != "" {
meta.Il = mm.Il
}
if strings.TrimSpace(mm.Ilce) != "" {
meta.Ilce = mm.Ilce
}
if strings.TrimSpace(mm.RiskDurumu) != "" {
meta.RiskDurumu = mm.RiskDurumu
}
if !filters.matchLine(ln.PislemTipi, meta) {
continue
}
key := strconv.Itoa(ln.CurrAccTypeCode) + "|" + cari + "|" + curr + "|" + strconv.Itoa(ln.SirketKodu)
row, ok := agg[key]
if !ok {
row = &models.CustomerBalanceListRow{
CariIlkGrup: meta.Kanal1,
Piyasa: meta.Piyasa,
Temsilci: meta.Temsilci,
Sirket: strconv.Itoa(ln.SirketKodu),
AnaCariKodu: master,
AnaCariAdi: firstNonEmpty(mm.CariDetay, meta.CariDetay),
CariKodu: cari,
CariDetay: meta.CariDetay,
CariTip: meta.CariTip,
Kanal1: meta.Kanal1,
Ozellik03: meta.RiskDurumu,
Ozellik05: meta.Ulke,
Ozellik06: meta.Il,
Ozellik07: meta.Ilce,
Il: meta.Il,
Ilce: meta.Ilce,
MuhasebeKodu: meta.MuhasebeKodu,
TC: meta.TC,
RiskDurumu: meta.RiskDurumu,
SirketDetay: meta.SirketDetay,
CariDoviz: curr,
}
agg[key] = row
}
usd := toUSD(ln.Bakiye, curr, usdTry, rateMap)
add12, add13 := resolveBalanceBuckets(ln)
if add12 {
row.Bakiye12 += ln.Bakiye
row.TLBakiye12 += ln.YerelBakiye
row.USDBakiye12 += usd
}
if add13 {
row.Bakiye13 += ln.Bakiye
row.TLBakiye13 += ln.YerelBakiye
row.USDBakiye13 += usd
}
}
out := make([]models.CustomerBalanceListRow, 0, len(agg))
for _, v := range agg {
out = append(out, *v)
}
sort.Slice(out, func(i, j int) bool {
if out[i].AnaCariKodu == out[j].AnaCariKodu {
if out[i].CariKodu == out[j].CariKodu {
return out[i].CariDoviz < out[j].CariDoviz
}
return out[i].CariKodu < out[j].CariKodu
}
return out[i].AnaCariKodu < out[j].AnaCariKodu
})
return out, nil
}
func loadMasterCariMetaMap(ctx context.Context, lines []mkCariBakiyeLine) (map[string]masterCariMeta, error) {
masters := make(map[string]struct{})
for _, ln := range lines {
m := strings.TrimSpace(deriveMasterCari(ln.CariKodu))
if m != "" {
masters[m] = struct{}{}
}
}
if len(masters) == 0 {
return map[string]masterCariMeta{}, nil
}
query := fmt.Sprintf(`
WITH BaseCari AS
(
SELECT
CB.CurrAccCode,
CB.CurrAccTypeCode,
MasterCari = LEFT(CB.CurrAccCode, 8),
rn = ROW_NUMBER() OVER
(
PARTITION BY LEFT(CB.CurrAccCode, 8)
ORDER BY CB.CurrAccCode
)
FROM cdCurrAcc CB WITH (NOLOCK)
WHERE CB.CurrAccTypeCode IN (1,3)
AND LEFT(CB.CurrAccCode, 8) IN (%s)
),
FirstCari AS
(
SELECT *
FROM BaseCari
WHERE rn = 1
)
SELECT
CariKodu = F.MasterCari,
CariDetay = ISNULL(cd.CurrAccDescription, ''),
KANAL_1 = ISNULL(CASE WHEN F.CurrAccTypeCode=1 THEN VDesc.VendorAtt08Desc ELSE CDesc.CustomerAtt08Desc END, ''),
PIYASA = ISNULL(CASE WHEN F.CurrAccTypeCode=1 THEN VDesc.VendorAtt01Desc ELSE CDesc.CustomerAtt01Desc END, ''),
CARI_TEMSILCI = ISNULL(
CASE
WHEN ISNULL(CASE WHEN F.CurrAccTypeCode = 1 THEN VDesc.VendorAtt02Desc ELSE CDesc.CustomerAtt02Desc END,'') = ''
THEN ISNULL(CASE WHEN F.CurrAccTypeCode = 1 THEN VAttr.VendorAtt09 ELSE CAttr.CustomerAtt09 END,'')
ELSE CASE WHEN F.CurrAccTypeCode = 1 THEN VDesc.VendorAtt02Desc ELSE CDesc.CustomerAtt02Desc END
END,''
),
ULKE = ISNULL(CASE WHEN F.CurrAccTypeCode=1 THEN VDesc.VendorAtt05Desc ELSE CDesc.CustomerAtt05Desc END, ''),
IL = ISNULL(CASE WHEN F.CurrAccTypeCode=1 THEN VDesc.VendorAtt06Desc ELSE CDesc.CustomerAtt06Desc END, ''),
ILCE = ISNULL(CASE WHEN F.CurrAccTypeCode=1 THEN VDesc.VendorAtt07Desc ELSE CDesc.CustomerAtt07Desc END, ''),
Risk_Durumu = ISNULL(CASE WHEN F.CurrAccTypeCode=1 THEN VDesc.VendorAtt03Desc ELSE CDesc.CustomerAtt03Desc END, '')
FROM FirstCari F
LEFT JOIN cdCurrAccDesc cd WITH (NOLOCK)
ON cd.CurrAccTypeCode = F.CurrAccTypeCode
AND cd.CurrAccCode = F.CurrAccCode
AND cd.LangCode = 'TR'
LEFT JOIN VendorAttributeDescriptions('TR') VDesc
ON VDesc.CurrAccCode = F.CurrAccCode
AND VDesc.CurrAccTypeCode = F.CurrAccTypeCode
LEFT JOIN CustomerAttributeDescriptions('TR') CDesc
ON CDesc.CurrAccCode = F.CurrAccCode
AND CDesc.CurrAccTypeCode = F.CurrAccTypeCode
LEFT JOIN VendorAttributes VAttr
ON VAttr.CurrAccCode = F.CurrAccCode
AND VAttr.CurrAccTypeCode = F.CurrAccTypeCode
LEFT JOIN CustomerAttributes CAttr
ON CAttr.CurrAccCode = F.CurrAccCode
AND CAttr.CurrAccTypeCode = F.CurrAccTypeCode
ORDER BY F.MasterCari;
`, quotedInList(masters))
rows, err := db.MssqlDB.QueryContext(ctx, query)
if err != nil {
return nil, fmt.Errorf("master cari meta query error: %w", err)
}
defer rows.Close()
out := make(map[string]masterCariMeta, len(masters))
for rows.Next() {
var master string
var m masterCariMeta
if err := rows.Scan(
&master,
&m.CariDetay,
&m.Kanal1,
&m.Piyasa,
&m.Temsilci,
&m.Ulke,
&m.Il,
&m.Ilce,
&m.RiskDurumu,
); err != nil {
return nil, err
}
out[strings.TrimSpace(master)] = m
}
if err := rows.Err(); err != nil {
return nil, err
}
return out, nil
}
func loadBalanceLines(ctx context.Context, selectedDate, cariSearch string) ([]mkCariBakiyeLine, error) {
piyasaScope, err := buildPiyasaExistsForCariCode(ctx, "CariKodu")
if err != nil {
return nil, err
}
queryTemplate := `
SELECT
CurrAccTypeCode,
CariKodu,
CariDoviz,
SirketKodu,
PislemTipi,
%s
YerelBakiye,
Bakiye,
CAST(0 AS DECIMAL(18,4)) AS Vade_Gun,
CAST(0 AS DECIMAL(18,4)) AS Vade_BelgeTarihi_Gun
FROM dbo.MK_CARI_BAKIYE_LIST(@SonTarih)
WHERE (@CariSearch = '' OR CariKodu LIKE '%%' + @CariSearch + '%%')
AND %s
`
selectParasalCandidates := make([]string, 0, 7)
if expr := strings.TrimSpace(resolveParasalIslemSelectExpr(ctx, "SELECT * FROM dbo.MK_CARI_BAKIYE_LIST('2000-01-01')")); expr != "" {
selectParasalCandidates = append(selectParasalCandidates, expr)
}
selectParasalCandidates = append(selectParasalCandidates,
"CAST(ATAtt01 AS varchar(16)) AS ParasalIslemTipi,",
"CAST(ParasalIslemTipi AS varchar(16)) AS ParasalIslemTipi,",
"CAST(ParislemTipi AS varchar(16)) AS ParasalIslemTipi,",
"CAST(ParIslemTipi AS varchar(16)) AS ParasalIslemTipi,",
"CAST('' AS varchar(16)) AS ParasalIslemTipi,",
)
var rows *sql.Rows
for i, sel := range selectParasalCandidates {
query := fmt.Sprintf(queryTemplate, sel, piyasaScope)
rows, err = db.MssqlDB.QueryContext(ctx, query,
sql.Named("SonTarih", selectedDate),
sql.Named("CariSearch", strings.TrimSpace(cariSearch)),
)
if err == nil {
break
}
if i < len(selectParasalCandidates)-1 && isInvalidColumnError(err) {
continue
}
return nil, fmt.Errorf("MK_CARI_BAKIYE_LIST query error: %w", err)
}
defer rows.Close()
out := make([]mkCariBakiyeLine, 0, 4096)
for rows.Next() {
var r mkCariBakiyeLine
if err := rows.Scan(
&r.CurrAccTypeCode,
&r.CariKodu,
&r.CariDoviz,
&r.SirketKodu,
&r.PislemTipi,
&r.ParasalIslemTipi,
&r.YerelBakiye,
&r.Bakiye,
&r.VadeGun,
&r.VadeBelgeGun,
); err != nil {
return nil, err
}
out = append(out, r)
}
if err := rows.Err(); err != nil {
return nil, err
}
return out, nil
}
func loadCariMetaMap(ctx context.Context, lines []mkCariBakiyeLine) (map[string]cariMeta, error) {
vendorCodes := make(map[string]struct{})
customerCodes := make(map[string]struct{})
for _, ln := range lines {
code := strings.TrimSpace(ln.CariKodu)
if code == "" {
continue
}
if ln.CurrAccTypeCode == 1 {
vendorCodes[code] = struct{}{}
} else if ln.CurrAccTypeCode == 3 {
customerCodes[code] = struct{}{}
}
}
if len(vendorCodes) == 0 && len(customerCodes) == 0 {
return map[string]cariMeta{}, nil
}
whereParts := make([]string, 0, 2)
if len(vendorCodes) > 0 {
whereParts = append(whereParts, fmt.Sprintf("(c.CurrAccTypeCode=1 AND c.CurrAccCode IN (%s))", quotedInList(vendorCodes)))
}
if len(customerCodes) > 0 {
whereParts = append(whereParts, fmt.Sprintf("(c.CurrAccTypeCode=3 AND c.CurrAccCode IN (%s))", quotedInList(customerCodes)))
}
query := fmt.Sprintf(`
SELECT
c.CurrAccTypeCode,
c.CurrAccCode,
CariDetay = ISNULL(d.CurrAccDescription, ''),
CariTip = CASE WHEN c.CurrAccTypeCode = 1 THEN N'Satıcı' ELSE N'Müşteri' END,
KANAL_1 = ISNULL(CASE WHEN c.CurrAccTypeCode=1 THEN vad.VendorAtt08Desc ELSE cad.CustomerAtt08Desc END, ''),
PIYASA = ISNULL(CASE WHEN c.CurrAccTypeCode=1 THEN vad.VendorAtt01Desc ELSE cad.CustomerAtt01Desc END, ''),
CARI_TEMSILCI = ISNULL(
CASE
WHEN ISNULL(CASE WHEN c.CurrAccTypeCode=1 THEN vad.VendorAtt02Desc ELSE cad.CustomerAtt02Desc END, '') = ''
THEN ISNULL(CASE WHEN c.CurrAccTypeCode=1 THEN va.VendorAtt09 ELSE ca.CustomerAtt09 END, '')
ELSE CASE WHEN c.CurrAccTypeCode=1 THEN vad.VendorAtt02Desc ELSE cad.CustomerAtt02Desc END
END,
''),
ULKE = ISNULL(CASE WHEN c.CurrAccTypeCode=1 THEN vad.VendorAtt05Desc ELSE cad.CustomerAtt05Desc END, ''),
IL = ISNULL(CASE WHEN c.CurrAccTypeCode=1 THEN vad.VendorAtt06Desc ELSE cad.CustomerAtt06Desc END, ''),
ILCE = ISNULL(CASE WHEN c.CurrAccTypeCode=1 THEN vad.VendorAtt07Desc ELSE cad.CustomerAtt07Desc END, ''),
TC = ISNULL(c.IdentityNum, ''),
Risk_Durumu = ISNULL(CASE WHEN c.CurrAccTypeCode=1 THEN vad.VendorAtt03Desc ELSE cad.CustomerAtt03Desc END, '')
FROM cdCurrAcc c WITH(NOLOCK)
LEFT JOIN cdCurrAccDesc d WITH(NOLOCK)
ON d.CurrAccTypeCode = c.CurrAccTypeCode
AND d.CurrAccCode = c.CurrAccCode
AND d.LangCode = 'TR'
LEFT JOIN VendorAttributes va WITH(NOLOCK)
ON va.CurrAccTypeCode = c.CurrAccTypeCode
AND va.CurrAccCode = c.CurrAccCode
LEFT JOIN VendorAttributeDescriptions('TR') vad
ON vad.CurrAccTypeCode = c.CurrAccTypeCode
AND vad.CurrAccCode = c.CurrAccCode
LEFT JOIN CustomerAttributes ca WITH(NOLOCK)
ON ca.CurrAccTypeCode = c.CurrAccTypeCode
AND ca.CurrAccCode = c.CurrAccCode
LEFT JOIN CustomerAttributeDescriptions('TR') cad
ON cad.CurrAccTypeCode = c.CurrAccTypeCode
AND cad.CurrAccCode = c.CurrAccCode
WHERE c.CurrAccTypeCode IN (1,3)
AND (%s)
`, strings.Join(whereParts, " OR "))
rows, err := db.MssqlDB.QueryContext(ctx, query)
if err != nil {
return nil, fmt.Errorf("cari meta query error: %w", err)
}
defer rows.Close()
out := make(map[string]cariMeta, len(lines))
for rows.Next() {
var t int
var code string
var m cariMeta
if err := rows.Scan(
&t,
&code,
&m.CariDetay,
&m.CariTip,
&m.Kanal1,
&m.Piyasa,
&m.Temsilci,
&m.Ulke,
&m.Il,
&m.Ilce,
&m.TC,
&m.RiskDurumu,
); err != nil {
return nil, err
}
out[metaKey(t, code)] = m
}
if err := rows.Err(); err != nil {
return nil, err
}
return out, nil
}
func loadGLAccountMap(ctx context.Context, lines []mkCariBakiyeLine) (map[string]string, error) {
vendorCodes := make(map[string]struct{})
customerCodes := make(map[string]struct{})
companyCodes := make(map[int]struct{})
for _, ln := range lines {
code := strings.TrimSpace(ln.CariKodu)
if code == "" {
continue
}
companyCodes[ln.SirketKodu] = struct{}{}
if ln.CurrAccTypeCode == 1 {
vendorCodes[code] = struct{}{}
} else if ln.CurrAccTypeCode == 3 {
customerCodes[code] = struct{}{}
}
}
if len(companyCodes) == 0 || (len(vendorCodes) == 0 && len(customerCodes) == 0) {
return map[string]string{}, nil
}
whereParts := make([]string, 0, 2)
if len(vendorCodes) > 0 {
whereParts = append(whereParts, fmt.Sprintf("(CurrAccTypeCode=1 AND CurrAccCode IN (%s))", quotedInList(vendorCodes)))
}
if len(customerCodes) > 0 {
whereParts = append(whereParts, fmt.Sprintf("(CurrAccTypeCode=3 AND CurrAccCode IN (%s))", quotedInList(customerCodes)))
}
query := fmt.Sprintf(`
SELECT CurrAccTypeCode, CurrAccCode, CompanyCode, GLAccCode
FROM prCurrAccGLAccount WITH(NOLOCK)
WHERE PostAccTypeCode = 100
AND CompanyCode IN (%s)
AND (%s)
`, intInList(companyCodes), strings.Join(whereParts, " OR "))
rows, err := db.MssqlDB.QueryContext(ctx, query)
if err != nil {
return nil, fmt.Errorf("gl account query error: %w", err)
}
defer rows.Close()
out := make(map[string]string)
for rows.Next() {
var t int
var code string
var company int
var gl sql.NullString
if err := rows.Scan(&t, &code, &company, &gl); err != nil {
return nil, err
}
out[glKey(t, code, company)] = strings.TrimSpace(gl.String)
}
if err := rows.Err(); err != nil {
return nil, err
}
return out, nil
}
func loadCompanyMap(ctx context.Context) (map[int]string, error) {
rows, err := db.MssqlDB.QueryContext(ctx, `SELECT CompanyCode, CompanyName FROM cdCompany WITH(NOLOCK)`)
if err != nil {
return nil, fmt.Errorf("company map query error: %w", err)
}
defer rows.Close()
out := make(map[int]string)
for rows.Next() {
var code int
var name sql.NullString
if err := rows.Scan(&code, &name); err != nil {
return nil, err
}
out[code] = strings.TrimSpace(name.String)
}
if err := rows.Err(); err != nil {
return nil, err
}
return out, nil
}
func loadNearestTryRates(ctx context.Context) (map[string]float64, error) {
query := `
WITH Ranked AS (
SELECT
CurrencyCode,
Rate,
rn = ROW_NUMBER() OVER (
PARTITION BY CurrencyCode
ORDER BY ABS(DATEDIFF(DAY, Date, GETDATE())), Date DESC
)
FROM AllExchangeRates
WHERE RelationCurrencyCode = 'TRY'
AND ExchangeTypeCode = 6
AND Rate > 0
)
SELECT CurrencyCode, Rate
FROM Ranked
WHERE rn = 1
`
rows, err := db.MssqlDB.QueryContext(ctx, query)
if err != nil {
return nil, fmt.Errorf("exchange rates query error: %w", err)
}
defer rows.Close()
out := map[string]float64{"TRY": 1}
for rows.Next() {
var code string
var rate float64
if err := rows.Scan(&code, &rate); err != nil {
return nil, err
}
code = strings.ToUpper(strings.TrimSpace(code))
if code != "" && rate > 0 {
out[code] = rate
}
}
if err := rows.Err(); err != nil {
return nil, err
}
return out, nil
}
func toUSD(amount float64, currency string, usdTry float64, rateMap map[string]float64) float64 {
if usdTry <= 0 {
return 0
}
switch currency {
case "USD":
return amount
case "TRY":
return amount / usdTry
default:
currTry := rateMap[currency]
if currTry <= 0 {
return 0
}
return (amount * currTry) / usdTry
}
}
func deriveMasterCari(cari string) string {
cari = strings.TrimSpace(cari)
if cari == "" {
return ""
}
base := cari
if idx := strings.Index(base, "/"); idx > 0 {
base = base[:idx]
}
base = strings.TrimSpace(base)
if len(base) >= 8 {
return strings.TrimSpace(base[:8])
}
return base
}
func buildFilters(params models.CustomerBalanceListParams) balanceFilters {
return balanceFilters{
cariIlkGrup: parseCSVSet(params.CariIlkGrup),
piyasa: parseCSVSet(params.Piyasa),
temsilci: parseCSVSet(params.Temsilci),
riskDurumu: parseCSVSet(params.RiskDurumu),
islemTipi: parseIslemTipiSet(params.IslemTipi),
ulke: parseCSVSet(params.Ulke),
il: parseCSVSet(params.Il),
ilce: parseCSVSet(params.Ilce),
}
}
func (f balanceFilters) matchLine(islemTipi string, m cariMeta) bool {
if !matchSet(f.islemTipi, islemTipi) {
return false
}
if !matchSet(f.cariIlkGrup, m.Kanal1) {
return false
}
if !matchSet(f.piyasa, m.Piyasa) {
return false
}
if !matchSet(f.temsilci, m.Temsilci) {
return false
}
if !matchSet(f.riskDurumu, m.RiskDurumu) {
return false
}
if !matchSet(f.ulke, m.Ulke) {
return false
}
if !matchSet(f.il, m.Il) {
return false
}
if !matchSet(f.ilce, m.Ilce) {
return false
}
return true
}
func matchSet(set map[string]struct{}, value string) bool {
if len(set) == 0 {
return true
}
trimmed := strings.TrimSpace(value)
if trimmed == "" {
return true
}
_, ok := set[trimmed]
return ok
}
func parseCSVSet(v string) map[string]struct{} {
out := make(map[string]struct{})
for _, p := range strings.Split(v, ",") {
t := strings.TrimSpace(p)
if t == "" {
continue
}
out[t] = struct{}{}
}
return out
}
func parseIslemTipiSet(v string) map[string]struct{} {
raw := parseCSVSet(v)
if len(raw) == 0 {
return raw
}
out := make(map[string]struct{}, 2)
for token := range raw {
switch strings.ToLower(strings.TrimSpace(token)) {
case "1_2", "prbr_1_2", "usd_1_2", "try_1_2", "tl_1_2", "usd_bakiye_1_2", "tl_bakiye_1_2":
out["1_2"] = struct{}{}
case "1_3", "prbr_1_3", "usd_1_3", "try_1_3", "tl_1_3", "usd_bakiye_1_3", "tl_bakiye_1_3":
out["1_3"] = struct{}{}
}
}
if len(out) == 0 {
return raw
}
return out
}
func getAuthorizedPiyasaCodes(ctx context.Context) ([]string, error) {
claims, ok := auth.GetClaimsFromContext(ctx)
if !ok || claims == nil {
return nil, fmt.Errorf("unauthorized: claims not found")
}
if claims.IsAdmin() {
return nil, nil
}
rawCodes := authz.GetPiyasaCodesFromCtx(ctx)
if len(rawCodes) == 0 {
return []string{}, nil
}
unique := make(map[string]struct{}, len(rawCodes))
out := make([]string, 0, len(rawCodes))
for _, code := range rawCodes {
norm := strings.ToUpper(strings.TrimSpace(code))
if norm == "" {
continue
}
if _, exists := unique[norm]; exists {
continue
}
unique[norm] = struct{}{}
out = append(out, norm)
}
if len(out) == 0 {
return []string{}, nil
}
return out, nil
}
func buildPiyasaWhereClause(codes []string, column string) string {
if len(codes) == 0 {
return "1=1"
}
return authz.BuildINClause(column, codes)
}
func metaKey(currType int, code string) string {
return strconv.Itoa(currType) + "|" + strings.TrimSpace(code)
}
func glKey(currType int, code string, company int) string {
return strconv.Itoa(currType) + "|" + strings.TrimSpace(code) + "|" + strconv.Itoa(company)
}
func quotedInList(set map[string]struct{}) string {
vals := make([]string, 0, len(set))
for v := range set {
esc := strings.ReplaceAll(strings.TrimSpace(v), "'", "''")
if esc != "" {
vals = append(vals, "'"+esc+"'")
}
}
if len(vals) == 0 {
return "''"
}
sort.Strings(vals)
return strings.Join(vals, ",")
}
func intInList(set map[int]struct{}) string {
vals := make([]int, 0, len(set))
for v := range set {
vals = append(vals, v)
}
if len(vals) == 0 {
return "0"
}
sort.Ints(vals)
parts := make([]string, 0, len(vals))
for _, v := range vals {
parts = append(parts, strconv.Itoa(v))
}
return strings.Join(parts, ",")
}
func firstNonEmpty(v ...string) string {
for _, s := range v {
if strings.TrimSpace(s) != "" {
return s
}
}
return ""
}
func isInvalidColumnError(err error) bool {
if err == nil {
return false
}
msg := strings.ToLower(err.Error())
return strings.Contains(msg, "invalid column name")
}
func shouldSkipBalanceLine(ln mkCariBakiyeLine) bool {
add12, add13 := resolveBalanceBuckets(ln)
p := strings.TrimSpace(ln.PislemTipi)
if p == "1_2" {
return !add12
}
if p == "1_3" {
return !add13
}
return false
}
func resolveBalanceBuckets(ln mkCariBakiyeLine) (add12 bool, add13 bool) {
p := strings.TrimSpace(ln.PislemTipi)
t := normalizeParasalIslemTipi(ln.ParasalIslemTipi)
switch t {
case "1":
return true, true
case "2", "1_2":
return true, false
case "3", "1_3":
return false, true
}
// Parasal tip yoksa eski davranis: PislemTipi'ne gore ayir.
if p == "1_2" {
return true, false
}
if p == "1_3" {
return false, true
}
return false, false
}
func normalizeParasalIslemTipi(v string) string {
s := strings.TrimSpace(v)
if s == "" {
return ""
}
lower := strings.ToLower(s)
compact := strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(lower, " ", ""), "-", "_"), "/", "_")
if strings.Contains(compact, "1_2") {
return "1_2"
}
if strings.Contains(compact, "1_3") {
return "1_3"
}
// "1,2" / "1,3" gibi liste formatlarini dogrudan yakala.
tokenized := strings.NewReplacer(" ", "", ";", ",", "|", ",", "/", ",", "-", ",", "_", ",").Replace(lower)
parts := strings.Split(tokenized, ",")
has1 := false
has2 := false
has3 := false
for _, p := range parts {
t := strings.TrimSpace(p)
switch t {
case "1":
has1 = true
case "2":
has2 = true
case "3":
has3 = true
}
}
if has1 && has2 {
return "1_2"
}
if has1 && has3 {
return "1_3"
}
if has2 && !has1 && !has3 {
return "2"
}
if has3 && !has1 && !has2 {
return "3"
}
if has1 && !has2 && !has3 {
return "1"
}
// "2.00", "2,00", " 2 " gibi varyasyonlari tek tipe indir.
s = strings.ReplaceAll(s, ",", ".")
if n, err := strconv.ParseFloat(s, 64); err == nil {
return strconv.Itoa(int(n))
}
// Metinsel geldiyse ilk rakam bloğunu al.
start := -1
end := -1
for i, r := range s {
if r >= '0' && r <= '9' {
if start == -1 {
start = i
}
end = i
continue
}
if start != -1 {
break
}
}
if start == -1 || end < start {
return s
}
return s[start : end+1]
}
func resolveParasalIslemSelectExpr(ctx context.Context, sampleQuery string) string {
sampleQuery = strings.TrimSpace(sampleQuery)
if sampleQuery == "" {
return ""
}
metaQuery := `
SELECT name
FROM sys.dm_exec_describe_first_result_set(@tsql, NULL, 0)
WHERE error_number IS NULL
AND name IS NOT NULL
`
rows, err := db.MssqlDB.QueryContext(ctx, metaQuery, sql.Named("tsql", sampleQuery))
if err != nil {
return ""
}
defer rows.Close()
type candidate struct {
key string
expr string
}
priority := []candidate{
{key: "ata tt01", expr: "CAST(%s AS varchar(16)) AS ParasalIslemTipi,"},
{key: "atatt01", expr: "CAST(%s AS varchar(16)) AS ParasalIslemTipi,"},
{key: "parasalislemtipi", expr: "CAST(%s AS varchar(16)) AS ParasalIslemTipi,"},
{key: "parislemtipi", expr: "CAST(%s AS varchar(16)) AS ParasalIslemTipi,"},
{key: "parislemtur", expr: "CAST(%s AS varchar(16)) AS ParasalIslemTipi,"},
}
available := make(map[string]string)
for rows.Next() {
var col sql.NullString
if err := rows.Scan(&col); err != nil {
return ""
}
name := strings.TrimSpace(col.String)
if name == "" {
continue
}
normalized := strings.ToLower(strings.ReplaceAll(strings.ReplaceAll(name, "_", ""), " ", ""))
available[normalized] = name
}
if err := rows.Err(); err != nil {
return ""
}
for _, c := range priority {
key := strings.ToLower(strings.ReplaceAll(strings.ReplaceAll(c.key, "_", ""), " ", ""))
if col, ok := available[key]; ok {
return fmt.Sprintf(c.expr, quoteSQLIdent(col))
}
}
return ""
}
func quoteSQLIdent(ident string) string {
return "[" + strings.ReplaceAll(strings.TrimSpace(ident), "]", "]]") + "]"
}

View File

@@ -66,7 +66,16 @@ func GetCustomerList(ctx context.Context) ([]models.CustomerList, error) {
), SPACE(0))
),
ISNULL(c.CurrencyCode, '')
ISNULL(c.CurrencyCode, ''),
ISNULL((
SELECT TOP 1 CAST(pa.PostalAddressID AS varchar(36))
FROM prCurrAccPostalAddress pa WITH(NOLOCK)
WHERE pa.CurrAccTypeCode = c.CurrAccTypeCode
AND pa.CurrAccCode = c.CurrAccCode
AND pa.AddressTypeCode = 2
AND ISNULL(pa.IsBlocked, 0) = 0
ORDER BY pa.LastUpdatedDate DESC, pa.CreatedDate DESC
), '')
FROM cdCurrAcc c
LEFT JOIN cdCurrAccDesc d
@@ -103,6 +112,7 @@ func GetCustomerList(ctx context.Context) ([]models.CustomerList, error) {
&c.Musteri_Temsilcisi,
&c.Ulke,
&c.Doviz_cinsi,
&c.PostalAddressID,
); err != nil {
return nil, err
}

View File

@@ -17,6 +17,7 @@ SELECT
CAST(h.OrderHeaderID AS NVARCHAR(50)) AS OrderHeaderID,
ISNULL(h.OrderNumber,'') AS OrderNumber,
CONVERT(varchar,h.OrderDate,23) AS OrderDate,
CONVERT(varchar,h.AverageDueDate,23) AS TerminTarihi,
ISNULL(h.CurrAccCode,'') AS CurrAccCode,
ISNULL(ca.CurrAccDescription,'') AS CurrAccDescription,
@@ -73,6 +74,17 @@ SELECT
ELSE 0
END AS PackedRatePct,
CASE
WHEN EXISTS (
SELECT 1
FROM dbo.trOrderLine l2
WHERE l2.OrderHeaderID = h.OrderHeaderID
AND ISNULL(l2.ItemCode,'') LIKE 'U%%'
)
THEN CAST(1 AS bit)
ELSE CAST(0 AS bit)
END AS HasUretimUrunu,
ISNULL(h.Description,'') AS Description,
usd.Rate AS ExchangeRateUSD

View File

@@ -0,0 +1,67 @@
package queries
const GetActiveMarketsForMapping = `
SELECT
p.id,
p.code,
p.title
FROM mk_sales_piy p
WHERE p.is_active = true
ORDER BY p.title, p.code
`
const GetActiveMailsForMapping = `
SELECT
m.id::text,
m.email,
COALESCE(NULLIF(m.display_name, ''), m.email) AS display_name
FROM mk_mail m
WHERE m.is_active = true
ORDER BY m.email
`
const GetMarketMailMappingRows = `
SELECT
p.id,
p.code,
p.title,
m.id::text,
m.email,
COALESCE(NULLIF(m.display_name, ''), m.email) AS display_name
FROM mk_sales_piy p
LEFT JOIN mk_market_mail mm
ON mm.market_id = p.id
LEFT JOIN mk_mail m
ON m.id = mm.mail_id
AND m.is_active = true
WHERE p.is_active = true
ORDER BY p.title, p.code, m.email
`
const ExistsActiveMarketByID = `
SELECT EXISTS (
SELECT 1
FROM mk_sales_piy p
WHERE p.id = $1
AND p.is_active = true
)
`
const ExistsActiveMailByID = `
SELECT EXISTS (
SELECT 1
FROM mk_mail m
WHERE m.id = $1
AND m.is_active = true
)
`
const DeleteMarketMailsByMarketID = `
DELETE FROM mk_market_mail
WHERE market_id = $1
`
const InsertMarketMailMapping = `
INSERT INTO mk_market_mail (market_id, mail_id)
VALUES ($1, $2)
`

View File

@@ -0,0 +1,75 @@
package queries
import (
"database/sql"
"fmt"
"strings"
"time"
)
func BulkUpdateOrderLineDueDate(mssql *sql.DB, orderHeaderID string, dueDate string, username string) (int64, bool, error) {
orderID := strings.TrimSpace(orderHeaderID)
dateText := strings.TrimSpace(dueDate)
user := strings.TrimSpace(username)
if orderID == "" {
return 0, false, fmt.Errorf("orderHeaderID zorunlu")
}
if dateText == "" {
return 0, false, fmt.Errorf("dueDate zorunlu")
}
if _, err := time.Parse("2006-01-02", dateText); err != nil {
return 0, false, fmt.Errorf("gecersiz tarih: %w", err)
}
if user == "" {
user = "system"
}
tx, err := mssql.Begin()
if err != nil {
return 0, false, err
}
defer tx.Rollback()
lineRes, err := tx.Exec(`
UPDATE BAGGI_V3.dbo.trOrderLine
SET
DeliveryDate = CAST(@p1 AS DATETIME),
PlannedDateOfLading = CAST(@p1 AS DATETIME),
LastUpdatedUserName = @p2,
LastUpdatedDate = GETDATE()
WHERE OrderHeaderID = @p3
AND ISNULL(IsClosed, 0) = 0
`, dateText, user, orderID)
if err != nil {
return 0, false, err
}
lineCount, err := lineRes.RowsAffected()
if err != nil {
return 0, false, err
}
headerRes, err := tx.Exec(`
UPDATE BAGGI_V3.dbo.trOrderHeader
SET
AverageDueDate = CAST(@p1 AS DATETIME),
LastUpdatedUserName = @p2,
LastUpdatedDate = GETDATE()
WHERE OrderHeaderID = @p3
`, dateText, user, orderID)
if err != nil {
return 0, false, err
}
headerCount, err := headerRes.RowsAffected()
if err != nil {
return 0, false, err
}
if err := tx.Commit(); err != nil {
return 0, false, err
}
return lineCount, headerCount > 0, nil
}

View File

@@ -454,6 +454,133 @@ func normalizeKeyPart(ns models.NullString) string {
return strings.ToUpper(s)
}
// normalizeNumericToken: sadece rakamlardan oluşan değeri baştaki sıfırlardan arındırır.
// Rakam dışı içerik varsa boş döner.
func normalizeNumericToken(s string) string {
if s == "" {
return ""
}
for i := 0; i < len(s); i++ {
if s[i] < '0' || s[i] > '9' {
return ""
}
}
i := 0
for i < len(s) && s[i] == '0' {
i++
}
if i == len(s) {
return "0"
}
return s[i:]
}
// normalizeDim1Token: variant karşılaştırması için Dim1'i eski kuralla normalize eder.
// (boşluk, YAS ve Y kaldırılır; UPPER)
func normalizeDim1Token(s string) string {
s = strings.ToUpper(strings.TrimSpace(s))
s = strings.ReplaceAll(s, " ", "")
s = strings.ReplaceAll(s, "YAS", "")
s = strings.ReplaceAll(s, "Y", "")
return s
}
func variantCacheKey(item, color, dim2 string) string {
return item + "||" + color + "||" + dim2
}
func loadVariantDim1SetTx(tx *sql.Tx, item, color, dim2 string) (map[string]struct{}, error) {
rows, err := tx.Query(`
SELECT ISNULL(LTRIM(RTRIM(V.ItemDim1Code)),'') AS ItemDim1Code
FROM BAGGI_V3.dbo.prItemVariant V WITH (NOLOCK)
WHERE ISNULL(LTRIM(RTRIM(V.ItemCode)),'') = @p1
AND (
(
ISNULL(LTRIM(RTRIM(V.ColorCode)),'') = @p2
AND (
ISNULL(LTRIM(RTRIM(@p3)),'') = ''
OR ISNULL(LTRIM(RTRIM(V.ItemDim2Code)),'') = @p3
)
)
OR (
ISNULL(LTRIM(RTRIM(@p3)),'') = ''
AND ISNULL(LTRIM(RTRIM(V.ItemDim2Code)),'') = @p2
)
)
`, item, color, dim2)
if err != nil {
return nil, fmt.Errorf("variant set query hatası: %w", err)
}
defer rows.Close()
set := make(map[string]struct{})
for rows.Next() {
var raw string
if err := rows.Scan(&raw); err != nil {
return nil, fmt.Errorf("variant set scan hatası: %w", err)
}
norm := normalizeDim1Token(raw)
if norm == "" {
set["#EMPTY_DIM1"] = struct{}{}
continue
}
set[norm] = struct{}{}
if num := normalizeNumericToken(norm); num != "" {
set["#NUM:"+num] = struct{}{}
}
}
if err := rows.Err(); err != nil {
return nil, fmt.Errorf("variant set rows hatası: %w", err)
}
return set, nil
}
func loadVariantDim1SetDB(conn *sql.DB, item, color, dim2 string) (map[string]struct{}, error) {
rows, err := conn.Query(`
SELECT ISNULL(LTRIM(RTRIM(V.ItemDim1Code)),'') AS ItemDim1Code
FROM BAGGI_V3.dbo.prItemVariant V WITH (NOLOCK)
WHERE ISNULL(LTRIM(RTRIM(V.ItemCode)),'') = @p1
AND (
(
ISNULL(LTRIM(RTRIM(V.ColorCode)),'') = @p2
AND (
ISNULL(LTRIM(RTRIM(@p3)),'') = ''
OR ISNULL(LTRIM(RTRIM(V.ItemDim2Code)),'') = @p3
)
)
OR (
ISNULL(LTRIM(RTRIM(@p3)),'') = ''
AND ISNULL(LTRIM(RTRIM(V.ItemDim2Code)),'') = @p2
)
)
`, item, color, dim2)
if err != nil {
return nil, fmt.Errorf("variant set query hatası: %w", err)
}
defer rows.Close()
set := make(map[string]struct{})
for rows.Next() {
var raw string
if err := rows.Scan(&raw); err != nil {
return nil, fmt.Errorf("variant set scan hatası: %w", err)
}
norm := normalizeDim1Token(raw)
if norm == "" {
set["#EMPTY_DIM1"] = struct{}{}
continue
}
set[norm] = struct{}{}
if num := normalizeNumericToken(norm); num != "" {
set["#NUM:"+num] = struct{}{}
}
}
if err := rows.Err(); err != nil {
return nil, fmt.Errorf("variant set rows hatası: %w", err)
}
return set, nil
}
// =======================================================
// AKSBIR DETECTION
// =======================================================
@@ -464,6 +591,10 @@ func normalizeKeyPart(ns models.NullString) string {
// Variant check: ItemCode + ColorCode + Dim1 + Dim2
func ValidateItemVariant(tx *sql.Tx, ln models.OrderDetail) error {
return ValidateItemVariantCached(tx, ln, nil)
}
func ValidateItemVariantCached(tx *sql.Tx, ln models.OrderDetail, cache map[string]map[string]struct{}) error {
fmt.Printf(
"🧪 VARIANT GUARD INPUT | ClientKey=%s Item=%q Color=%q Dim1=%q Dim2=%q Dim3=%q Qty1=%v\n",
safeNS(ln.ClientKey),
@@ -493,36 +624,46 @@ func ValidateItemVariant(tx *sql.Tx, ln models.OrderDetail) error {
color = normalizeEmpty(color)
dim1 = normalizeEmpty(dim1)
dim2 = normalizeEmpty(dim2)
dim1Norm := normalizeDim1Token(dim1)
dim1Numeric := normalizeNumericToken(dim1Norm)
if item == "" {
return fmt.Errorf(
"ItemCode boş olamaz (ClientKey=%s)",
safeNS(ln.ClientKey),
)
fmt.Printf(
"🧪 VARIANT NORMALIZED | Item=%q Color=%q Dim1=%q Dim2=%q\n",
item, color, dim1, dim2,
item, color, dim1Norm, dim2,
)
if item == "" {
return &models.ValidationError{
Code: "INVALID_ITEM_VARIANT",
Message: "Tanımsız ürün kombinasyonu",
ClientKey: safeNS(ln.ClientKey),
ItemCode: item,
ColorCode: color,
Dim1: dim1,
Dim2: dim2,
}
}
var exists int
err := tx.QueryRow(`
SELECT CASE WHEN EXISTS (
SELECT 1
FROM BAGGI_V3.dbo.prItemVariant V WITH (NOLOCK)
WHERE ISNULL(LTRIM(RTRIM(V.ItemCode)),'') = @p1
AND ISNULL(LTRIM(RTRIM(V.ColorCode)),'') = @p2
AND ISNULL(LTRIM(RTRIM(V.ItemDim1Code)),'') = @p3
AND ISNULL(LTRIM(RTRIM(V.ItemDim2Code)),'') = @p4
) THEN 1 ELSE 0 END
`, item, color, dim1, dim2).Scan(&exists)
key := variantCacheKey(item, color, dim2)
set := map[string]struct{}(nil)
if cache != nil {
set = cache[key]
}
if set == nil {
var err error
set, err = loadVariantDim1SetTx(tx, item, color, dim2)
if err != nil {
return fmt.Errorf("ItemVariant kontrol query hatası: %w", err)
}
if cache != nil {
cache[key] = set
}
}
if exists != 1 {
_, okNorm := set[dim1Norm]
_, okNum := set["#NUM:"+dim1Numeric]
_, okEmpty := set["#EMPTY_DIM1"]
if !(okNorm || (dim1Numeric != "" && okNum) || (dim1Norm == "" && okEmpty)) {
return &models.ValidationError{
Code: "INVALID_ITEM_VARIANT",
Message: "Tanımsız ürün kombinasyonu",
@@ -549,22 +690,8 @@ func ValidateOrderVariants(db *sql.DB, lines []models.OrderDetail) ([]models.Inv
return s
}
stmt, err := db.Prepare(`
SELECT CASE WHEN EXISTS (
SELECT 1
FROM BAGGI_V3.dbo.prItemVariant V WITH (NOLOCK)
WHERE ISNULL(LTRIM(RTRIM(V.ItemCode)),'') = @p1
AND ISNULL(LTRIM(RTRIM(V.ColorCode)),'') = @p2
AND ISNULL(LTRIM(RTRIM(V.ItemDim1Code)),'') = @p3
AND ISNULL(LTRIM(RTRIM(V.ItemDim2Code)),'') = @p4
) THEN 1 ELSE 0 END
`)
if err != nil {
return nil, fmt.Errorf("validate prepare hatası: %w", err)
}
defer stmt.Close()
invalid := make([]models.InvalidVariant, 0)
cache := make(map[string]map[string]struct{})
for i, ln := range lines {
qty := qtyValue(ln.Qty1)
@@ -576,6 +703,8 @@ func ValidateOrderVariants(db *sql.DB, lines []models.OrderDetail) ([]models.Inv
color := normalizeEmpty(normalizeKeyPart(ln.ColorCode))
dim1 := normalizeEmpty(normalizeKeyPart(ln.ItemDim1Code))
dim2 := normalizeEmpty(normalizeKeyPart(ln.ItemDim2Code))
dim1Norm := normalizeDim1Token(dim1)
dim1Numeric := normalizeNumericToken(dim1Norm)
// ItemCode boş ise invalid
if strings.TrimSpace(item) == "" {
@@ -593,12 +722,21 @@ func ValidateOrderVariants(db *sql.DB, lines []models.OrderDetail) ([]models.Inv
continue
}
var exists int
if err := stmt.QueryRow(item, color, dim1, dim2).Scan(&exists); err != nil {
key := variantCacheKey(item, color, dim2)
set := cache[key]
if set == nil {
var err error
set, err = loadVariantDim1SetDB(db, item, color, dim2)
if err != nil {
return nil, fmt.Errorf("validate query hatası (i=%d): %w", i, err)
}
cache[key] = set
}
if exists != 1 {
_, okNorm := set[dim1Norm]
_, okNum := set["#NUM:"+dim1Numeric]
_, okEmpty := set["#EMPTY_DIM1"]
if !(okNorm || (dim1Numeric != "" && okNum) || (dim1Norm == "" && okEmpty)) {
invalid = append(invalid, models.InvalidVariant{
Index: i,
ClientKey: safeNS(ln.ClientKey),
@@ -836,7 +974,7 @@ VALUES (
nullableBool(header.IsSalesViaInternet, false),
nullableBool(header.IsSuspended, false),
nullableBool(header.IsCompleted, false),
nullableBool(header.IsCompleted, true),
nullableBool(header.IsPrinted, false),
nullableBool(header.IsLocked, false),
@@ -921,6 +1059,7 @@ VALUES (
defer insStmt.Close()
lineResults := make([]OrderLineResult, 0, len(lines))
variantCache := make(map[string]map[string]struct{})
// ✅ Duplicate Guard (payload içi)
seenCombo := make(map[string]bool)
@@ -968,7 +1107,7 @@ VALUES (
// ✅ INSERT ÖNCESİ ItemVariant GUARD
if qtyValue(ln.Qty1) > 0 {
if err := ValidateItemVariant(tx, ln); err != nil {
if err := ValidateItemVariantCached(tx, ln, variantCache); err != nil {
fmt.Println("❌ VARIANT GUARD (INSERT):", err)
return "", nil, err
}
@@ -1280,9 +1419,12 @@ UPDATE BAGGI_V3.dbo.trOrderHeader SET
DocCurrencyCode=@p6,
LocalCurrencyCode=@p7,
ExchangeRate=@p8,
LastUpdatedUserName=@p9,
LastUpdatedDate=@p10
WHERE OrderHeaderID=@p11
IsCreditableConfirmed=@p9,
CreditableConfirmedUser=@p10,
CreditableConfirmedDate=@p11,
LastUpdatedUserName=@p12,
LastUpdatedDate=@p13
WHERE OrderHeaderID=@p14
`,
nullableDateString(header.OrderDate),
nullableTimeString(header.OrderTime),
@@ -1292,6 +1434,9 @@ WHERE OrderHeaderID=@p11
nullableString(header.DocCurrencyCode, "TRY"),
nullableString(header.LocalCurrencyCode, "TRY"),
nullableFloat64(header.ExchangeRate, exRate),
true,
nullableString(header.CreditableConfirmedUser, v3User),
nullableDateTime(header.CreditableConfirmedDate, now),
v3User,
now,
header.OrderHeaderID,
@@ -1363,6 +1508,7 @@ WHERE OrderLineID=@p42 AND ISNULL(IsClosed,0)=0`)
// LOOP
// ======================================================
lineResults := make([]OrderLineResult, 0)
variantCache := make(map[string]map[string]struct{})
seenCombo := make(map[string]bool)
for _, ln := range lines {
@@ -1478,7 +1624,7 @@ WHERE OrderLineID=@p42 AND ISNULL(IsClosed,0)=0`)
// Variant guard
if qtyValue(ln.Qty1) > 0 {
if err := ValidateItemVariant(tx, ln); err != nil {
if err := ValidateItemVariantCached(tx, ln, variantCache); err != nil {
return nil, err
}
}

View File

@@ -57,6 +57,7 @@ SELECT
CAST(h.OrderHeaderID AS NVARCHAR(50)) AS OrderHeaderID,
ISNULL(h.OrderNumber, '') AS OrderNumber,
CONVERT(varchar, h.OrderDate, 23) AS OrderDate,
CONVERT(varchar, h.AverageDueDate, 23) AS TerminTarihi,
ISNULL(h.CurrAccCode, '') AS CurrAccCode,
ISNULL(ca.CurrAccDescription, '') AS CurrAccDescription,

View File

@@ -2,6 +2,12 @@ package queries
import (
"database/sql"
"fmt"
"log"
"sort"
"strconv"
"strings"
"time"
"bssapp-backend/models"
)
@@ -20,14 +26,24 @@ SELECT
ISNULL(l.ItemCode,'') AS OldItemCode,
ISNULL(l.ColorCode,'') AS OldColor,
ISNULL((
SELECT TOP 1 LTRIM(RTRIM(cd.ColorDescription))
FROM dbo.cdColorDesc cd WITH (NOLOCK)
WHERE cd.ColorCode = l.ColorCode
AND cd.LangCode = N'TR'
), '') AS OldColorDescription,
ISNULL(l.ItemDim2Code,'') AS OldDim2,
ISNULL(l.LineDescription,'') AS OldDesc,
CAST(ISNULL(l.Qty1, 0) AS FLOAT) AS OldQty,
CAST('' AS NVARCHAR(60)) AS NewItemCode,
CAST('' AS NVARCHAR(30)) AS NewColor,
CAST('' AS NVARCHAR(30)) AS NewDim2,
CAST('' AS NVARCHAR(250)) AS NewDesc,
CONVERT(NVARCHAR(10), l.DeliveryDate, 126) AS OldDueDate,
CONVERT(NVARCHAR(10), l.DeliveryDate, 126) AS NewDueDate,
CAST(0 AS bit) AS IsVariantMissing
FROM dbo.trOrderLine l
WHERE l.OrderHeaderID = @p1
@@ -52,18 +68,14 @@ func InsertMissingProductionVariants(mssql *sql.DB, orderHeaderID string, userna
FROM dbo.trOrderLine l
LEFT JOIN dbo.prItemVariant pv
ON pv.ItemTypeCode = l.ItemTypeCode
AND pv.ItemCode = l.ItemCode
AND pv.ColorCode = l.ColorCode
AND ISNULL(pv.ItemDim1Code,'') = ISNULL(l.ItemDim1Code,'')
AND ISNULL(pv.ItemDim2Code,'') = ISNULL(l.ItemDim2Code,'')
AND ISNULL(pv.ItemDim3Code,'') = ISNULL(l.ItemDim3Code,'')
AND ISNULL(LTRIM(RTRIM(pv.ItemCode)),'') = ISNULL(LTRIM(RTRIM(l.ItemCode)),'')
AND ISNULL(LTRIM(RTRIM(pv.ColorCode)),'') = ISNULL(LTRIM(RTRIM(l.ColorCode)),'')
AND ISNULL(LTRIM(RTRIM(pv.ItemDim1Code)),'') = ISNULL(LTRIM(RTRIM(l.ItemDim1Code)),'')
AND ISNULL(LTRIM(RTRIM(pv.ItemDim2Code)),'') = ISNULL(LTRIM(RTRIM(l.ItemDim2Code)),'')
AND ISNULL(LTRIM(RTRIM(pv.ItemDim3Code)),'') = ISNULL(LTRIM(RTRIM(l.ItemDim3Code)),'')
WHERE l.OrderHeaderID = @p1
AND ISNULL(l.ItemCode,'') LIKE 'U%'
AND pv.ItemCode IS NULL
),
MaxPlu AS (
SELECT ISNULL(MAX(PLU),0) AS BasePlu
FROM dbo.prItemVariant WITH (UPDLOCK, HOLDLOCK)
)
INSERT INTO dbo.prItemVariant (
ItemTypeCode,
@@ -72,11 +84,17 @@ INSERT INTO dbo.prItemVariant (
ItemDim1Code,
ItemDim2Code,
ItemDim3Code,
PLU,
IsSalesOrderClosed,
IsPurchaseOrderClosed,
IsLocked,
IsBlocked,
CreatedUserName,
CreatedDate,
LastUpdatedUserName,
LastUpdatedDate
LastUpdatedDate,
RowGuid,
UseInternet,
IsStoreOrderClosed
)
SELECT
m.ItemTypeCode,
@@ -85,13 +103,18 @@ SELECT
m.ItemDim1Code,
m.ItemDim2Code,
m.ItemDim3Code,
mp.BasePlu + ROW_NUMBER() OVER (ORDER BY m.ItemCode, m.ColorCode, m.ItemDim1Code, m.ItemDim2Code, m.ItemDim3Code),
0,
0,
0,
0,
@p2,
GETDATE(),
@p2,
GETDATE()
FROM Missing m
CROSS JOIN MaxPlu mp;
GETDATE(),
NEWID(),
0,
0
FROM Missing m;
`
res, err := mssql.Exec(query, orderHeaderID, username)
@@ -121,6 +144,44 @@ WHERE OrderHeaderID = @p1 AND OrderLineID = @p2
return itemTypeCode, dim1, dim2, dim3, err
}
type OrderLineDims struct {
ItemTypeCode int16
ItemDim1Code string
ItemDim2Code string
ItemDim3Code string
}
func GetOrderLineDimsMap(mssql *sql.DB, orderHeaderID string) (map[string]OrderLineDims, error) {
rows, err := mssql.Query(`
SELECT
CAST(OrderLineID AS NVARCHAR(50)) AS OrderLineID,
ItemTypeCode,
ISNULL(ItemDim1Code,'') AS ItemDim1Code,
ISNULL(ItemDim2Code,'') AS ItemDim2Code,
ISNULL(ItemDim3Code,'') AS ItemDim3Code
FROM dbo.trOrderLine WITH(NOLOCK)
WHERE OrderHeaderID = @p1
`, orderHeaderID)
if err != nil {
return nil, err
}
defer rows.Close()
out := make(map[string]OrderLineDims, 128)
for rows.Next() {
var lineID string
var d OrderLineDims
if err := rows.Scan(&lineID, &d.ItemTypeCode, &d.ItemDim1Code, &d.ItemDim2Code, &d.ItemDim3Code); err != nil {
return nil, err
}
out[strings.TrimSpace(lineID)] = d
}
if err := rows.Err(); err != nil {
return nil, err
}
return out, nil
}
func VariantExists(mssql *sql.DB, itemTypeCode int16, itemCode string, colorCode string, dim1 string, dim2 string, dim3 string) (bool, error) {
var exists int
err := mssql.QueryRow(`
@@ -128,10 +189,22 @@ SELECT TOP 1 1
FROM dbo.prItemVariant
WHERE ItemTypeCode = @p1
AND ItemCode = @p2
AND ColorCode = @p3
AND ISNULL(ItemDim1Code,'') = ISNULL(@p4,'')
AND ISNULL(ItemDim2Code,'') = ISNULL(@p5,'')
AND ISNULL(ItemDim3Code,'') = ISNULL(@p6,'')
AND (
ColorCode = @p3
OR (@p3 = '' AND (ColorCode IS NULL OR ColorCode = ''))
)
AND (
ItemDim1Code = @p4
OR (@p4 = '' AND (ItemDim1Code IS NULL OR ItemDim1Code = ''))
)
AND (
ItemDim2Code = @p5
OR (@p5 = '' AND (ItemDim2Code IS NULL OR ItemDim2Code = ''))
)
AND (
ItemDim3Code = @p6
OR (@p6 = '' AND (ItemDim3Code IS NULL OR ItemDim3Code = ''))
)
`, itemTypeCode, itemCode, colorCode, dim1, dim2, dim3).Scan(&exists)
if err == sql.ErrNoRows {
return false, nil
@@ -142,32 +215,74 @@ WHERE ItemTypeCode = @p1
return true, nil
}
func InsertMissingVariantsTx(tx *sql.Tx, missing []models.OrderProductionMissingVariant, username string) (int64, error) {
func InsertMissingVariantsTx(
tx *sql.Tx,
missing []models.OrderProductionMissingVariant,
username string,
cdItemByCode map[string]models.OrderProductionCdItemDraft,
) (int64, error) {
start := time.Now()
if len(missing) == 0 {
log.Printf("[InsertMissingVariantsTx] missing=0 inserted=0 duration_ms=0")
return 0, nil
}
var basePlu int64
if err := tx.QueryRow(`
SELECT ISNULL(MAX(PLU),0) AS BasePlu
FROM dbo.prItemVariant WITH (UPDLOCK, HOLDLOCK)
`).Scan(&basePlu); err != nil {
return 0, err
var inserted int64
ensuredItems := make(map[string]struct{}, len(missing))
uniqueVariants := make([]models.OrderProductionMissingVariant, 0, len(missing))
seenVariants := make(map[string]struct{}, len(missing))
for _, v := range missing {
variantKey := strconv.FormatInt(int64(v.ItemTypeCode), 10) + "|" +
strings.ToUpper(strings.TrimSpace(v.ItemCode)) + "|" +
strings.ToUpper(strings.TrimSpace(v.ColorCode)) + "|" +
strings.ToUpper(strings.TrimSpace(v.ItemDim1Code)) + "|" +
strings.ToUpper(strings.TrimSpace(v.ItemDim2Code)) + "|" +
strings.ToUpper(strings.TrimSpace(v.ItemDim3Code))
if _, ok := seenVariants[variantKey]; ok {
continue
}
seenVariants[variantKey] = struct{}{}
uniqueVariants = append(uniqueVariants, v)
itemKey := strconv.FormatInt(int64(v.ItemTypeCode), 10) + "|" + v.ItemCode
if _, ok := ensuredItems[itemKey]; !ok {
draft, hasDraft := cdItemByCode[itemKey]
if !hasDraft {
draft, hasDraft = cdItemByCode[NormalizeCdItemMapKey(v.ItemTypeCode, v.ItemCode)]
}
var draftPtr *models.OrderProductionCdItemDraft
if hasDraft {
tmp := draft
draftPtr = &tmp
}
if err := ensureCdItemTx(tx, v.ItemTypeCode, v.ItemCode, username, draftPtr); err != nil {
return inserted, err
}
ensuredItems[itemKey] = struct{}{}
}
}
var inserted int64
for i, v := range missing {
plu := basePlu + int64(i) + 1
res, err := tx.Exec(`
IF NOT EXISTS (
SELECT 1
FROM dbo.prItemVariant
WHERE ItemTypeCode = @p1
AND ItemCode = @p2
AND ColorCode = @p3
AND ISNULL(ItemDim1Code,'') = ISNULL(@p4,'')
AND ISNULL(ItemDim2Code,'') = ISNULL(@p5,'')
AND ISNULL(ItemDim3Code,'') = ISNULL(@p6,'')
if len(uniqueVariants) == 0 {
return 0, nil
}
args := make([]any, 0, len(uniqueVariants)*6+1)
valueRows := make([]string, 0, len(uniqueVariants))
paramPos := 1
for _, v := range uniqueVariants {
valueRows = append(valueRows, fmt.Sprintf("(@p%d,@p%d,@p%d,@p%d,@p%d,@p%d)", paramPos, paramPos+1, paramPos+2, paramPos+3, paramPos+4, paramPos+5))
args = append(args, v.ItemTypeCode, v.ItemCode, v.ColorCode, v.ItemDim1Code, v.ItemDim2Code, v.ItemDim3Code)
paramPos += 6
}
usernameParam := paramPos
args = append(args, username)
query := fmt.Sprintf(`
SET NOCOUNT ON;
;WITH Missing(ItemTypeCode, ItemCode, ColorCode, ItemDim1Code, ItemDim2Code, ItemDim3Code) AS (
SELECT *
FROM (VALUES %s) AS v(ItemTypeCode, ItemCode, ColorCode, ItemDim1Code, ItemDim2Code, ItemDim3Code)
)
INSERT INTO dbo.prItemVariant (
ItemTypeCode,
@@ -176,47 +291,1404 @@ INSERT INTO dbo.prItemVariant (
ItemDim1Code,
ItemDim2Code,
ItemDim3Code,
PLU,
IsSalesOrderClosed,
IsPurchaseOrderClosed,
IsLocked,
IsBlocked,
CreatedUserName,
CreatedDate,
LastUpdatedUserName,
LastUpdatedDate
LastUpdatedDate,
RowGuid,
UseInternet,
IsStoreOrderClosed
)
VALUES (
@p1, @p2, @p3, @p4, @p5, @p6,
@p7, @p8, GETDATE(), @p8, GETDATE()
);
`, v.ItemTypeCode, v.ItemCode, v.ColorCode, v.ItemDim1Code, v.ItemDim2Code, v.ItemDim3Code, plu, username)
SELECT
m.ItemTypeCode,
m.ItemCode,
m.ColorCode,
m.ItemDim1Code,
m.ItemDim2Code,
m.ItemDim3Code,
0, 0, 0, 0,
@p%d, GETDATE(), @p%d, GETDATE(),
NEWID(),
0,
0
FROM Missing m
LEFT JOIN dbo.prItemVariant pv
ON pv.ItemTypeCode = m.ItemTypeCode
AND pv.ItemCode = m.ItemCode
AND (
pv.ColorCode = m.ColorCode
OR (m.ColorCode = '' AND (pv.ColorCode IS NULL OR pv.ColorCode = ''))
)
AND (
pv.ItemDim1Code = m.ItemDim1Code
OR (m.ItemDim1Code = '' AND (pv.ItemDim1Code IS NULL OR pv.ItemDim1Code = ''))
)
AND (
pv.ItemDim2Code = m.ItemDim2Code
OR (m.ItemDim2Code = '' AND (pv.ItemDim2Code IS NULL OR pv.ItemDim2Code = ''))
)
AND (
pv.ItemDim3Code = m.ItemDim3Code
OR (m.ItemDim3Code = '' AND (pv.ItemDim3Code IS NULL OR pv.ItemDim3Code = ''))
)
WHERE pv.ItemCode IS NULL;
`, strings.Join(valueRows, ","), usernameParam, usernameParam)
res, err := tx.Exec(query, args...)
if err != nil {
return inserted, err
}
if rows, err := res.RowsAffected(); err == nil {
if rows, rowsErr := res.RowsAffected(); rowsErr == nil {
inserted += rows
}
}
log.Printf("[InsertMissingVariantsTx] missing=%d unique=%d ensuredItems=%d inserted=%d duration_ms=%d",
len(missing), len(uniqueVariants), len(ensuredItems), inserted, time.Since(start).Milliseconds())
return inserted, nil
}
func UpdateOrderLinesTx(tx *sql.Tx, orderHeaderID string, lines []models.OrderProductionUpdateLine, username string) (int64, error) {
var updated int64
for _, line := range lines {
res, err := tx.Exec(`
UPDATE dbo.trOrderLine
SET
ItemCode = @p1,
ColorCode = @p2,
ItemDim2Code = @p3,
LineDescription = COALESCE(NULLIF(@p4,''), LineDescription),
LastUpdatedUserName = @p5,
LastUpdatedDate = GETDATE()
WHERE OrderHeaderID = @p6 AND OrderLineID = @p7
`, line.NewItemCode, line.NewColor, line.NewDim2, line.NewDesc, username, orderHeaderID, line.OrderLineID)
func NormalizeCdItemMapKey(itemTypeCode int16, itemCode string) string {
return strconv.FormatInt(int64(itemTypeCode), 10) + "|" + strings.ToUpper(strings.TrimSpace(itemCode))
}
func ensureCdItemTx(
tx *sql.Tx,
itemTypeCode int16,
itemCode string,
username string,
draft *models.OrderProductionCdItemDraft,
) error {
_, err := tx.Exec(`
IF NOT EXISTS (
SELECT 1
FROM dbo.cdItem
WHERE ItemTypeCode = @p1
AND ItemCode = @p2
)
BEGIN
;WITH Template AS (
SELECT TOP 1
ItemDimTypeCode, ProductTypeCode, ProductHierarchyID,
UnitOfMeasureCode1, UnitOfMeasureCode2, UnitConvertRate, UnitConvertRateNotFixed,
UseInternet, UsePOS, UseStore, EnablePartnerCompanies, UseManufacturing, UseSerialNumber,
GenerateOpticalDataMatrixCode, ByWeight, SupplyPeriod, GuaranteePeriod, ShelfLife, OrderLeadTime,
ItemAccountGrCode, ItemTaxGrCode, ItemPaymentPlanGrCode, ItemDiscountGrCode, ItemVendorGrCode,
PromotionGroupCode, PromotionGroupCode2, ProductCollectionGrCode, StorePriceLevelCode, PerceptionOfFashionCode,
CommercialRoleCode, StoreCapacityLevelCode, CustomsTariffNumberCode, IsFixedExpense, BOMEntityCode, CompanyCode,
IsBlocked, IsLocked, LockedDate, IsSalesOrderClosed, IsPurchaseOrderClosed, UseRoll, UseBatch,
MaxCreditCardInstallmentCount, GenerateSerialNumber, IsSubsequentDeliveryForR, IsSubsequentDeliveryForRI,
IGACommissionGroup, UniFreeCommissionGroup, CustomsProductGroupCode, IsUTSDeclaratedItem, IsStoreOrderClosed
FROM dbo.cdItem WITH (UPDLOCK, HOLDLOCK)
WHERE ItemTypeCode = @p1
AND ItemCode LIKE 'U%'
ORDER BY CreatedDate DESC
)
INSERT INTO dbo.cdItem (
ItemTypeCode, ItemCode,
ItemDimTypeCode, ProductTypeCode, ProductHierarchyID,
UnitOfMeasureCode1, UnitOfMeasureCode2, UnitConvertRate, UnitConvertRateNotFixed,
UseInternet, UsePOS, UseStore, EnablePartnerCompanies, UseManufacturing, UseSerialNumber,
GenerateOpticalDataMatrixCode, ByWeight, SupplyPeriod, GuaranteePeriod, ShelfLife, OrderLeadTime,
ItemAccountGrCode, ItemTaxGrCode, ItemPaymentPlanGrCode, ItemDiscountGrCode, ItemVendorGrCode,
PromotionGroupCode, PromotionGroupCode2, ProductCollectionGrCode, StorePriceLevelCode, PerceptionOfFashionCode,
CommercialRoleCode, StoreCapacityLevelCode, CustomsTariffNumberCode, IsFixedExpense, BOMEntityCode, CompanyCode,
IsBlocked, IsLocked, LockedDate, IsSalesOrderClosed, IsPurchaseOrderClosed,
CreatedUserName, CreatedDate, LastUpdatedUserName, LastUpdatedDate, RowGuid,
UseRoll, UseBatch, MaxCreditCardInstallmentCount, GenerateSerialNumber,
IsSubsequentDeliveryForR, IsSubsequentDeliveryForRI,
IGACommissionGroup, UniFreeCommissionGroup, CustomsProductGroupCode, IsUTSDeclaratedItem, IsStoreOrderClosed
)
SELECT
@p1, @p2,
t.ItemDimTypeCode, t.ProductTypeCode, t.ProductHierarchyID,
t.UnitOfMeasureCode1, t.UnitOfMeasureCode2, t.UnitConvertRate, t.UnitConvertRateNotFixed,
t.UseInternet, t.UsePOS, t.UseStore, t.EnablePartnerCompanies, t.UseManufacturing, t.UseSerialNumber,
t.GenerateOpticalDataMatrixCode, t.ByWeight, t.SupplyPeriod, t.GuaranteePeriod, t.ShelfLife, t.OrderLeadTime,
t.ItemAccountGrCode, t.ItemTaxGrCode, t.ItemPaymentPlanGrCode, t.ItemDiscountGrCode, t.ItemVendorGrCode,
t.PromotionGroupCode, t.PromotionGroupCode2, t.ProductCollectionGrCode, t.StorePriceLevelCode, t.PerceptionOfFashionCode,
t.CommercialRoleCode, t.StoreCapacityLevelCode, t.CustomsTariffNumberCode, t.IsFixedExpense, t.BOMEntityCode, t.CompanyCode,
t.IsBlocked, t.IsLocked, t.LockedDate, t.IsSalesOrderClosed, t.IsPurchaseOrderClosed,
@p3, GETDATE(), @p3, GETDATE(), NEWID(),
t.UseRoll, t.UseBatch, t.MaxCreditCardInstallmentCount, t.GenerateSerialNumber,
t.IsSubsequentDeliveryForR, t.IsSubsequentDeliveryForRI,
t.IGACommissionGroup, t.UniFreeCommissionGroup, t.CustomsProductGroupCode, t.IsUTSDeclaratedItem, t.IsStoreOrderClosed
FROM Template t;
IF @@ROWCOUNT = 0
BEGIN
INSERT INTO dbo.cdItem (
ItemTypeCode, ItemCode,
ItemDimTypeCode, ProductTypeCode, ProductHierarchyID,
UnitOfMeasureCode1, UnitOfMeasureCode2, UnitConvertRate, UnitConvertRateNotFixed,
UseInternet, UsePOS, UseStore, EnablePartnerCompanies, UseManufacturing, UseSerialNumber,
GenerateOpticalDataMatrixCode, ByWeight, SupplyPeriod, GuaranteePeriod, ShelfLife, OrderLeadTime,
ItemAccountGrCode, ItemTaxGrCode, ItemPaymentPlanGrCode, ItemDiscountGrCode, ItemVendorGrCode,
PromotionGroupCode, PromotionGroupCode2, ProductCollectionGrCode, StorePriceLevelCode, PerceptionOfFashionCode,
CommercialRoleCode, StoreCapacityLevelCode, CustomsTariffNumberCode, IsFixedExpense, BOMEntityCode, CompanyCode,
IsBlocked, IsLocked, LockedDate, IsSalesOrderClosed, IsPurchaseOrderClosed,
CreatedUserName, CreatedDate, LastUpdatedUserName, LastUpdatedDate, RowGuid,
UseRoll, UseBatch, MaxCreditCardInstallmentCount, GenerateSerialNumber,
IsSubsequentDeliveryForR, IsSubsequentDeliveryForRI,
IGACommissionGroup, UniFreeCommissionGroup, CustomsProductGroupCode, IsUTSDeclaratedItem, IsStoreOrderClosed
)
VALUES (
@p1, @p2,
2, 1, 2,
'AD', '', 0, 0,
1, 1, 1, 0, 1, 0,
0, 0, 0, 0, 0, 0,
'', '%10', '', '', '',
'', '', '0', '0', '0',
'0', '', '', 0, '', '1',
0, 0, '1900-01-01', 0, 0,
@p3, GETDATE(), @p3, GETDATE(), NEWID(),
0, 0, 12, 0,
0, 0,
'', '', '0', 0, 0
);
END
END
`, itemTypeCode, itemCode, username)
if err != nil {
return updated, err
return err
}
if rows, err := res.RowsAffected(); err == nil {
updated += rows
if draft == nil {
return nil
}
_, err = tx.Exec(`
UPDATE dbo.cdItem
SET
ItemDimTypeCode = COALESCE(@p3, ItemDimTypeCode),
ProductTypeCode = COALESCE(@p4, ProductTypeCode),
ProductHierarchyID = COALESCE(@p5, ProductHierarchyID),
UnitOfMeasureCode1 = COALESCE(NULLIF(@p6,''), UnitOfMeasureCode1),
ItemAccountGrCode = COALESCE(NULLIF(@p7,''), ItemAccountGrCode),
ItemTaxGrCode = CASE
WHEN NULLIF(@p8,'') IS NULL THEN ItemTaxGrCode
WHEN EXISTS (
SELECT 1
FROM dbo.cdItemTaxGr g WITH(NOLOCK)
WHERE LTRIM(RTRIM(g.ItemTaxGrCode)) = LTRIM(RTRIM(@p8))
) THEN @p8
ELSE ItemTaxGrCode
END,
ItemPaymentPlanGrCode = COALESCE(NULLIF(@p9,''), ItemPaymentPlanGrCode),
ItemDiscountGrCode = COALESCE(NULLIF(@p10,''), ItemDiscountGrCode),
ItemVendorGrCode = COALESCE(NULLIF(@p11,''), ItemVendorGrCode),
PromotionGroupCode = COALESCE(NULLIF(@p12,''), PromotionGroupCode),
ProductCollectionGrCode = COALESCE(NULLIF(@p13,''), ProductCollectionGrCode),
StorePriceLevelCode = COALESCE(NULLIF(@p14,''), StorePriceLevelCode),
PerceptionOfFashionCode = COALESCE(NULLIF(@p15,''), PerceptionOfFashionCode),
CommercialRoleCode = COALESCE(NULLIF(@p16,''), CommercialRoleCode),
StoreCapacityLevelCode = COALESCE(NULLIF(@p17,''), StoreCapacityLevelCode),
CustomsTariffNumberCode = COALESCE(NULLIF(@p18,''), CustomsTariffNumberCode),
CompanyCode = COALESCE(NULLIF(@p19,''), CompanyCode),
LastUpdatedUserName = @p20,
LastUpdatedDate = GETDATE()
WHERE ItemTypeCode = @p1
AND ItemCode = @p2;
`,
itemTypeCode,
itemCode,
draft.ItemDimTypeCode,
draft.ProductTypeCode,
draft.ProductHierarchyID,
draft.UnitOfMeasureCode1,
draft.ItemAccountGrCode,
draft.ItemTaxGrCode,
draft.ItemPaymentPlanGrCode,
draft.ItemDiscountGrCode,
draft.ItemVendorGrCode,
draft.PromotionGroupCode,
draft.ProductCollectionGrCode,
draft.StorePriceLevelCode,
draft.PerceptionOfFashionCode,
draft.CommercialRoleCode,
draft.StoreCapacityLevelCode,
draft.CustomsTariffNumberCode,
draft.CompanyCode,
username,
)
return err
}
func UpdateOrderLinesTx(tx *sql.Tx, orderHeaderID string, lines []models.OrderProductionUpdateLine, username string) (int64, error) {
if len(lines) == 0 {
return 0, nil
}
const chunkSize = 300
var updated int64
for i := 0; i < len(lines); i += chunkSize {
end := i + chunkSize
if end > len(lines) {
end = len(lines)
}
chunk := lines[i:end]
values := make([]string, 0, len(chunk))
args := make([]any, 0, len(chunk)*8+2)
paramPos := 1
for _, line := range chunk {
var itemDim1 any
if line.ItemDim1Code != nil {
itemDim1 = strings.TrimSpace(*line.ItemDim1Code)
}
values = append(values, fmt.Sprintf("(@p%d,@p%d,@p%d,@p%d,@p%d,@p%d,@p%d,@p%d)", paramPos, paramPos+1, paramPos+2, paramPos+3, paramPos+4, paramPos+5, paramPos+6, paramPos+7))
args = append(args,
strings.TrimSpace(line.OrderLineID),
line.NewItemCode,
line.NewColor,
itemDim1,
line.NewDim2,
line.NewDesc,
line.OldDueDate,
line.NewDueDate,
)
paramPos += 8
}
orderHeaderParam := paramPos
usernameParam := paramPos + 1
args = append(args, orderHeaderID, username)
query := fmt.Sprintf(`
SET NOCOUNT ON;
DECLARE @updated TABLE (OrderLineID UNIQUEIDENTIFIER);
;WITH src (OrderLineID, NewItemCode, NewColor, ItemDim1Code, NewDim2, NewDesc, OldDueDate, NewDueDate) AS (
SELECT *
FROM (VALUES %s) AS v (OrderLineID, NewItemCode, NewColor, ItemDim1Code, NewDim2, NewDesc, OldDueDate, NewDueDate)
)
UPDATE l
SET
l.ItemCode = s.NewItemCode,
l.ColorCode = s.NewColor,
l.ItemDim1Code = COALESCE(s.ItemDim1Code, l.ItemDim1Code),
l.ItemDim2Code = s.NewDim2,
l.LineDescription = COALESCE(NULLIF(s.NewDesc,''), l.LineDescription),
l.DeliveryDate = CASE WHEN ISDATE(s.NewDueDate) = 1 THEN CAST(s.NewDueDate AS DATETIME) ELSE l.DeliveryDate END,
l.LastUpdatedUserName = @p%d,
l.LastUpdatedDate = GETDATE()
OUTPUT inserted.OrderLineID INTO @updated(OrderLineID)
FROM dbo.trOrderLine l
JOIN src s
ON l.OrderLineID = CONVERT(UNIQUEIDENTIFIER, s.OrderLineID)
WHERE l.OrderHeaderID = CONVERT(UNIQUEIDENTIFIER, @p%d);
SELECT COUNT(1) AS UpdatedCount FROM @updated;
`, strings.Join(values, ","), usernameParam, orderHeaderParam)
chunkStart := time.Now()
var chunkUpdated int64
execErr := tx.QueryRow(query, args...).Scan(&chunkUpdated)
if execErr != nil {
log.Printf("[UpdateOrderLinesTx] ERROR orderHeaderID=%s chunk=%d-%d err=%v", orderHeaderID, i, end, execErr)
return updated, fmt.Errorf("update lines chunk failed chunkStart=%d chunkEnd=%d duration_ms=%d: %w", i, end, time.Since(chunkStart).Milliseconds(), execErr)
}
log.Printf("[UpdateOrderLinesTx] orderHeaderID=%s chunk=%d-%d updated=%d duration_ms=%d", orderHeaderID, i, end, chunkUpdated, time.Since(chunkStart).Milliseconds())
updated += chunkUpdated
}
return updated, nil
}
func VerifyOrderLineUpdatesTx(tx *sql.Tx, orderHeaderID string, lines []models.OrderProductionUpdateLine) (int64, []string, error) {
if len(lines) == 0 {
return 0, nil, nil
}
const chunkSize = 300
var mismatchCount int64
samples := make([]string, 0, 5)
for i := 0; i < len(lines); i += chunkSize {
end := i + chunkSize
if end > len(lines) {
end = len(lines)
}
chunk := lines[i:end]
values := make([]string, 0, len(chunk))
args := make([]any, 0, len(chunk)*4+1)
paramPos := 1
for _, line := range chunk {
values = append(values, fmt.Sprintf("(@p%d,@p%d,@p%d,@p%d)", paramPos, paramPos+1, paramPos+2, paramPos+3))
args = append(args,
strings.TrimSpace(line.OrderLineID),
strings.ToUpper(strings.TrimSpace(line.NewItemCode)),
strings.ToUpper(strings.TrimSpace(line.NewColor)),
strings.ToUpper(strings.TrimSpace(line.NewDim2)),
)
paramPos += 4
}
orderHeaderParam := paramPos
args = append(args, orderHeaderID)
query := fmt.Sprintf(`
SET NOCOUNT ON;
WITH src (OrderLineID, NewItemCode, NewColor, NewDim2) AS (
SELECT *
FROM (VALUES %s) v(OrderLineID, NewItemCode, NewColor, NewDim2)
)
SELECT
s.OrderLineID,
ISNULL(UPPER(LTRIM(RTRIM(l.ItemCode))), '') AS ActualItemCode,
ISNULL(UPPER(LTRIM(RTRIM(l.ColorCode))), '') AS ActualColorCode,
ISNULL(UPPER(LTRIM(RTRIM(l.ItemDim2Code))), '') AS ActualDim2Code,
s.NewItemCode,
s.NewColor,
s.NewDim2
FROM src s
JOIN dbo.trOrderLine l
ON l.OrderLineID = CONVERT(UNIQUEIDENTIFIER, s.OrderLineID)
WHERE l.OrderHeaderID = CONVERT(UNIQUEIDENTIFIER, @p%d)
AND (
ISNULL(UPPER(LTRIM(RTRIM(l.ItemCode))), '') <> s.NewItemCode OR
ISNULL(UPPER(LTRIM(RTRIM(l.ColorCode))), '') <> s.NewColor OR
ISNULL(UPPER(LTRIM(RTRIM(l.ItemDim2Code))), '') <> s.NewDim2
);
`, strings.Join(values, ","), orderHeaderParam)
rows, err := tx.Query(query, args...)
if err != nil {
return mismatchCount, samples, err
}
for rows.Next() {
var lineID, actualItem, actualColor, actualDim2, expectedItem, expectedColor, expectedDim2 string
if err := rows.Scan(&lineID, &actualItem, &actualColor, &actualDim2, &expectedItem, &expectedColor, &expectedDim2); err != nil {
rows.Close()
return mismatchCount, samples, err
}
mismatchCount++
if len(samples) < 5 {
samples = append(samples, fmt.Sprintf(
"lineID=%s expected=(%s,%s,%s) actual=(%s,%s,%s)",
lineID, expectedItem, expectedColor, expectedDim2, actualItem, actualColor, actualDim2,
))
}
}
if err := rows.Err(); err != nil {
rows.Close()
return mismatchCount, samples, err
}
rows.Close()
}
return mismatchCount, samples, nil
}
func UpdateOrderHeaderAverageDueDateTx(tx *sql.Tx, orderHeaderID string, averageDueDate *string, username string) error {
if averageDueDate == nil {
return nil
}
dueDate := strings.TrimSpace(*averageDueDate)
if dueDate != "" {
if _, err := time.Parse("2006-01-02", dueDate); err != nil {
return fmt.Errorf("invalid header average due date %q: %w", dueDate, err)
}
}
_, err := tx.Exec(`
UPDATE dbo.trOrderHeader
SET
AverageDueDate = CASE WHEN @p1 = '' THEN NULL ELSE CAST(@p1 AS DATETIME) END,
LastUpdatedUserName = @p2,
LastUpdatedDate = GETDATE()
WHERE OrderHeaderID = @p3;
`, dueDate, username, orderHeaderID)
return err
}
func TouchOrderHeaderTx(tx *sql.Tx, orderHeaderID string, username string) (int64, error) {
res, err := tx.Exec(`
UPDATE dbo.trOrderHeader
SET
LastUpdatedUserName = @p1,
LastUpdatedDate = GETDATE()
WHERE OrderHeaderID = @p2;
`, username, orderHeaderID)
if err != nil {
return 0, err
}
rows, rowsErr := res.RowsAffected()
if rowsErr != nil {
return 0, nil
}
return rows, nil
}
type sqlQueryRower interface {
QueryRow(query string, args ...any) *sql.Row
}
type plannedProductionBarcode struct {
Barcode string
BarcodeTypeCode string
ItemTypeCode int16
ItemCode string
ColorCode string
ItemDim1Code string
ItemDim2Code string
ItemDim3Code string
}
func barcodeTypeExists(q sqlQueryRower, barcodeTypeCode string) (bool, error) {
var exists int
err := q.QueryRow(`
SELECT TOP 1 1
FROM dbo.cdBarcodeType
WHERE BarcodeTypeCode = @p1
`, strings.TrimSpace(barcodeTypeCode)).Scan(&exists)
if err == sql.ErrNoRows {
return false, nil
}
if err != nil {
return false, err
}
return true, nil
}
func barcodeExists(q sqlQueryRower, barcode string) (bool, error) {
var exists int
err := q.QueryRow(`
SELECT TOP 1 1
FROM dbo.prItemBarcode WITH (UPDLOCK, HOLDLOCK)
WHERE Barcode = @p1
`, strings.TrimSpace(barcode)).Scan(&exists)
if err == sql.ErrNoRows {
return false, nil
}
if err != nil {
return false, err
}
return true, nil
}
func existingVariantBarcode(
q sqlQueryRower,
barcodeTypeCode string,
itemTypeCode int16,
itemCode string,
colorCode string,
dim1 string,
dim2 string,
dim3 string,
) (string, bool, error) {
var barcode string
err := q.QueryRow(`
SELECT TOP 1 LTRIM(RTRIM(ISNULL(Barcode, '')))
FROM dbo.prItemBarcode WITH (UPDLOCK, HOLDLOCK)
WHERE BarcodeTypeCode = @p1
AND ItemTypeCode = @p2
AND ISNULL(LTRIM(RTRIM(ItemCode)), '') = @p3
AND ISNULL(LTRIM(RTRIM(ColorCode)), '') = @p4
AND ISNULL(LTRIM(RTRIM(ItemDim1Code)), '') = @p5
AND ISNULL(LTRIM(RTRIM(ItemDim2Code)), '') = @p6
AND ISNULL(LTRIM(RTRIM(ItemDim3Code)), '') = @p7
AND ISNULL(LTRIM(RTRIM(UnitOfMeasureCode)), '') = 'AD'
ORDER BY
CASE
WHEN ISNUMERIC(Barcode) = 1
THEN CAST(Barcode AS BIGINT)
ELSE 0
END DESC,
Barcode DESC
`,
strings.TrimSpace(barcodeTypeCode),
itemTypeCode,
strings.TrimSpace(itemCode),
strings.TrimSpace(colorCode),
strings.TrimSpace(dim1),
strings.TrimSpace(dim2),
strings.TrimSpace(dim3),
).Scan(&barcode)
if err == sql.ErrNoRows {
return "", false, nil
}
if err != nil {
return "", false, err
}
return strings.TrimSpace(barcode), true, nil
}
func maxNumericBarcode(q sqlQueryRower) (int64, error) {
var maxBarcode int64
err := q.QueryRow(`
SELECT ISNULL(MAX(
CASE
WHEN ISNUMERIC(Barcode) = 1
THEN CAST(Barcode AS BIGINT)
ELSE NULL
END
), 0)
FROM dbo.prItemBarcode WITH (UPDLOCK, HOLDLOCK)
`).Scan(&maxBarcode)
return maxBarcode, err
}
func ValidateProductionBarcodePlan(q sqlQueryRower, variants []models.OrderProductionMissingVariant, barcodeTypeCode string) ([]models.OrderProductionBarcodeValidation, error) {
typeCode := strings.ToUpper(strings.TrimSpace(barcodeTypeCode))
if len(variants) == 0 {
return nil, nil
}
validations := make([]models.OrderProductionBarcodeValidation, 0)
typeExists, err := barcodeTypeExists(q, typeCode)
if err != nil {
return nil, err
}
if !typeExists {
validations = append(validations, models.OrderProductionBarcodeValidation{
Code: "invalid_barcode_type",
Message: fmt.Sprintf("Barkod tipi bulunamadi: %s", typeCode),
BarcodeTypeCode: typeCode,
})
return validations, nil
}
sorted := append([]models.OrderProductionMissingVariant(nil), variants...)
sort.Slice(sorted, func(i, j int) bool {
left := sorted[i]
right := sorted[j]
leftKey := fmt.Sprintf("%05d|%s|%s|%s|%s|%s", left.ItemTypeCode, left.ItemCode, left.ColorCode, left.ItemDim1Code, left.ItemDim2Code, left.ItemDim3Code)
rightKey := fmt.Sprintf("%05d|%s|%s|%s|%s|%s", right.ItemTypeCode, right.ItemCode, right.ColorCode, right.ItemDim1Code, right.ItemDim2Code, right.ItemDim3Code)
return leftKey < rightKey
})
maxBarcode, err := maxNumericBarcode(q)
if err != nil {
return nil, err
}
nextOffset := int64(0)
planned := make(map[string]struct{}, len(sorted))
for _, variant := range sorted {
existingBarcode, exists, err := existingVariantBarcode(q, typeCode, variant.ItemTypeCode, variant.ItemCode, variant.ColorCode, variant.ItemDim1Code, variant.ItemDim2Code, variant.ItemDim3Code)
if err != nil {
return nil, err
}
if exists && existingBarcode != "" {
continue
}
nextOffset++
barcode := strconv.FormatInt(maxBarcode+nextOffset, 10)
if _, duplicated := planned[barcode]; duplicated {
validations = append(validations, models.OrderProductionBarcodeValidation{
Code: "barcode_duplicate_in_plan",
Message: fmt.Sprintf("Planlanan barkod ayni istekte birden fazla kez olusuyor: %s", barcode),
Barcode: barcode,
BarcodeTypeCode: typeCode,
ItemTypeCode: variant.ItemTypeCode,
ItemCode: strings.TrimSpace(variant.ItemCode),
ColorCode: strings.TrimSpace(variant.ColorCode),
ItemDim1Code: strings.TrimSpace(variant.ItemDim1Code),
ItemDim2Code: strings.TrimSpace(variant.ItemDim2Code),
ItemDim3Code: strings.TrimSpace(variant.ItemDim3Code),
})
continue
}
planned[barcode] = struct{}{}
inUse, err := barcodeExists(q, barcode)
if err != nil {
return nil, err
}
if inUse {
validations = append(validations, models.OrderProductionBarcodeValidation{
Code: "barcode_in_use",
Message: fmt.Sprintf("Barkod daha once kullanilmis: %s (%s / %s / %s / %s)", barcode, strings.TrimSpace(variant.ItemCode), strings.TrimSpace(variant.ColorCode), strings.TrimSpace(variant.ItemDim1Code), strings.TrimSpace(variant.ItemDim2Code)),
Barcode: barcode,
BarcodeTypeCode: typeCode,
ItemTypeCode: variant.ItemTypeCode,
ItemCode: strings.TrimSpace(variant.ItemCode),
ColorCode: strings.TrimSpace(variant.ColorCode),
ItemDim1Code: strings.TrimSpace(variant.ItemDim1Code),
ItemDim2Code: strings.TrimSpace(variant.ItemDim2Code),
ItemDim3Code: strings.TrimSpace(variant.ItemDim3Code),
})
}
}
return validations, nil
}
func InsertItemBarcodesTx(tx *sql.Tx, orderHeaderID string, lines []models.OrderProductionUpdateLine, username string) (int64, error) {
start := time.Now()
if len(lines) == 0 {
log.Printf("[InsertItemBarcodesTx] lines=0 inserted=0 duration_ms=0")
return 0, nil
}
lineIDs := make([]string, 0, len(lines))
seen := make(map[string]struct{}, len(lines))
for _, line := range lines {
lineID := strings.TrimSpace(line.OrderLineID)
if lineID == "" {
continue
}
if _, ok := seen[lineID]; ok {
continue
}
seen[lineID] = struct{}{}
lineIDs = append(lineIDs, lineID)
}
if len(lineIDs) == 0 {
log.Printf("[InsertItemBarcodesTx] uniqueLineIDs=0 inserted=0")
return 0, nil
}
var inserted int64
singleLineQuery := `
SET NOCOUNT ON;
INSERT INTO dbo.prItemBarcode
(
Barcode,
BarcodeTypeCode,
ItemTypeCode,
ItemCode,
ColorCode,
ItemDim1Code,
ItemDim2Code,
ItemDim3Code,
UnitOfMeasureCode,
Qty,
CreatedUserName,
CreatedDate,
LastUpdatedUserName,
LastUpdatedDate,
RowGuid
)
SELECT
CAST(seed.MaxBarcode + 1 AS NVARCHAR(50)),
'BAGGI3',
src.ItemTypeCode,
src.ItemCode,
src.ColorCode,
src.ItemDim1Code,
src.ItemDim2Code,
src.ItemDim3Code,
'AD',
1,
@p3,
GETDATE(),
@p3,
GETDATE(),
NEWID()
FROM (
SELECT DISTINCT
l.ItemTypeCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemCode,'')))) AS ItemCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ColorCode,'')))) AS ColorCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim1Code,'')))) AS ItemDim1Code,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim2Code,'')))) AS ItemDim2Code,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim3Code,'')))) AS ItemDim3Code
FROM dbo.trOrderLine l
WHERE l.OrderHeaderID = @p2
AND CAST(l.OrderLineID AS NVARCHAR(50)) = @p1
AND NULLIF(LTRIM(RTRIM(ISNULL(l.ItemCode,''))), '') IS NOT NULL
) src
CROSS JOIN (
SELECT
CASE
WHEN ISNULL(MAX(
CASE
WHEN LTRIM(RTRIM(ISNULL(Barcode,''))) NOT LIKE '%%[^0-9]%%'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(Barcode,''))) AS BIGINT)
ELSE NULL
END
), 0) < 36999999
THEN 36999999
ELSE ISNULL(MAX(
CASE
WHEN LTRIM(RTRIM(ISNULL(Barcode,''))) NOT LIKE '%%[^0-9]%%'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(Barcode,''))) AS BIGINT)
ELSE NULL
END
), 0)
END AS MaxBarcode
FROM dbo.prItemBarcode
WHERE BarcodeTypeCode = 'BAGGI3'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) <= 8
) seed
WHERE NOT EXISTS (
SELECT 1
FROM dbo.prItemBarcode b
WHERE b.ItemTypeCode = src.ItemTypeCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemCode,'')))) = src.ItemCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ColorCode,'')))) = src.ColorCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim1Code,'')))) = src.ItemDim1Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim2Code,'')))) = src.ItemDim2Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim3Code,'')))) = src.ItemDim3Code
);
`
execSingle := func(globalIndex int, lineID string) error {
lineStart := time.Now()
res, err := tx.Exec(singleLineQuery, lineID, orderHeaderID, username)
if err != nil {
if isDuplicateBarcodeInsertErr(err) {
log.Printf("[InsertItemBarcodesTx] skip duplicate lineIndex=%d lineID=%s err=%v", globalIndex, lineID, err)
return nil
}
return fmt.Errorf("upsert item barcodes chunk failed chunkStart=%d chunkEnd=%d duration_ms=%d: %w", globalIndex, globalIndex+1, time.Since(lineStart).Milliseconds(), err)
}
rows, _ := res.RowsAffected()
inserted += rows
log.Printf(
"[InsertItemBarcodesTx] lineIndex=%d lineID=%s inserted=%d cumulative=%d duration_ms=%d",
globalIndex,
lineID,
rows,
inserted,
time.Since(lineStart).Milliseconds(),
)
return nil
}
const chunkSize = 200
for i := 0; i < len(lineIDs); i += chunkSize {
end := i + chunkSize
if end > len(lineIDs) {
end = len(lineIDs)
}
chunk := lineIDs[i:end]
values := make([]string, 0, len(chunk))
args := make([]any, 0, len(chunk)+2)
paramPos := 1
for _, lineID := range chunk {
values = append(values, fmt.Sprintf("(@p%d)", paramPos))
args = append(args, lineID)
paramPos++
}
orderHeaderParam := paramPos
usernameParam := paramPos + 1
args = append(args, orderHeaderID, username)
batchQuery := fmt.Sprintf(`
SET NOCOUNT ON;
INSERT INTO dbo.prItemBarcode
(
Barcode,
BarcodeTypeCode,
ItemTypeCode,
ItemCode,
ColorCode,
ItemDim1Code,
ItemDim2Code,
ItemDim3Code,
UnitOfMeasureCode,
Qty,
CreatedUserName,
CreatedDate,
LastUpdatedUserName,
LastUpdatedDate,
RowGuid
)
SELECT
CAST(seed.MaxBarcode + ROW_NUMBER() OVER (
ORDER BY src.ItemTypeCode, src.ItemCode, src.ColorCode, src.ItemDim1Code, src.ItemDim2Code, src.ItemDim3Code
) AS NVARCHAR(50)),
'BAGGI3',
src.ItemTypeCode,
src.ItemCode,
src.ColorCode,
src.ItemDim1Code,
src.ItemDim2Code,
src.ItemDim3Code,
'AD',
1,
@p%d,
GETDATE(),
@p%d,
GETDATE(),
NEWID()
FROM (
SELECT DISTINCT
l.ItemTypeCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemCode,'')))) AS ItemCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ColorCode,'')))) AS ColorCode,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim1Code,'')))) AS ItemDim1Code,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim2Code,'')))) AS ItemDim2Code,
UPPER(LTRIM(RTRIM(ISNULL(l.ItemDim3Code,'')))) AS ItemDim3Code
FROM dbo.trOrderLine l
JOIN (VALUES %s) ids(OrderLineID)
ON CAST(l.OrderLineID AS NVARCHAR(50)) = ids.OrderLineID
WHERE l.OrderHeaderID = @p%d
AND NULLIF(LTRIM(RTRIM(ISNULL(l.ItemCode,''))), '') IS NOT NULL
) src
CROSS JOIN (
SELECT
CASE
WHEN ISNULL(MAX(
CASE
WHEN LTRIM(RTRIM(ISNULL(Barcode,''))) NOT LIKE '%%[^0-9]%%'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(Barcode,''))) AS BIGINT)
ELSE NULL
END
), 0) < 36999999
THEN 36999999
ELSE ISNULL(MAX(
CASE
WHEN LTRIM(RTRIM(ISNULL(Barcode,''))) NOT LIKE '%%[^0-9]%%'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(Barcode,''))) AS BIGINT)
ELSE NULL
END
), 0)
END AS MaxBarcode
FROM dbo.prItemBarcode
WHERE BarcodeTypeCode = 'BAGGI3'
AND LEN(LTRIM(RTRIM(ISNULL(Barcode,'')))) <= 8
) seed
WHERE NOT EXISTS (
SELECT 1
FROM dbo.prItemBarcode b
WHERE b.ItemTypeCode = src.ItemTypeCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemCode,'')))) = src.ItemCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ColorCode,'')))) = src.ColorCode
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim1Code,'')))) = src.ItemDim1Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim2Code,'')))) = src.ItemDim2Code
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim3Code,'')))) = src.ItemDim3Code
);
`, usernameParam, usernameParam, strings.Join(values, ","), orderHeaderParam)
chunkStart := time.Now()
res, err := tx.Exec(batchQuery, args...)
if err == nil {
rows, _ := res.RowsAffected()
inserted += rows
log.Printf(
"[InsertItemBarcodesTx] batch=%d-%d inserted=%d cumulative=%d duration_ms=%d",
i,
end,
rows,
inserted,
time.Since(chunkStart).Milliseconds(),
)
continue
}
log.Printf("[InsertItemBarcodesTx] batch fallback=%d-%d err=%v", i, end, err)
for j, lineID := range chunk {
if lineErr := execSingle(i+j, lineID); lineErr != nil {
log.Printf("[InsertItemBarcodesTx] ERROR lineIndex=%d lineID=%s err=%v", i+j, lineID, lineErr)
return inserted, lineErr
}
}
}
log.Printf(
"[InsertItemBarcodesTx] lines=%d unique=%d inserted=%d duration_ms=%d",
len(lines),
len(lineIDs),
inserted,
time.Since(start).Milliseconds(),
)
return inserted, nil
}
func InsertItemBarcodesByTargetsTx(tx *sql.Tx, targets []models.OrderProductionMissingVariant, username string) (int64, error) {
start := time.Now()
if len(targets) == 0 {
log.Printf("[InsertItemBarcodesByTargetsTx] targets=0 inserted=0 duration_ms=0")
return 0, nil
}
uniqueTargets := make([]models.OrderProductionMissingVariant, 0, len(targets))
seen := make(map[string]struct{}, len(targets))
for _, t := range targets {
itemCode := strings.ToUpper(strings.TrimSpace(t.ItemCode))
if itemCode == "" {
continue
}
key := fmt.Sprintf("%d|%s|%s|%s|%s|%s",
t.ItemTypeCode,
itemCode,
strings.ToUpper(strings.TrimSpace(t.ColorCode)),
strings.ToUpper(strings.TrimSpace(t.ItemDim1Code)),
strings.ToUpper(strings.TrimSpace(t.ItemDim2Code)),
strings.ToUpper(strings.TrimSpace(t.ItemDim3Code)),
)
if _, ok := seen[key]; ok {
continue
}
seen[key] = struct{}{}
t.ItemCode = itemCode
t.ColorCode = strings.ToUpper(strings.TrimSpace(t.ColorCode))
t.ItemDim1Code = strings.ToUpper(strings.TrimSpace(t.ItemDim1Code))
t.ItemDim2Code = strings.ToUpper(strings.TrimSpace(t.ItemDim2Code))
t.ItemDim3Code = strings.ToUpper(strings.TrimSpace(t.ItemDim3Code))
uniqueTargets = append(uniqueTargets, t)
}
if len(uniqueTargets) == 0 {
log.Printf("[InsertItemBarcodesByTargetsTx] targets=%d unique=0 inserted=0 duration_ms=%d", len(targets), time.Since(start).Milliseconds())
return 0, nil
}
if err := ensureTxStillActive(tx, "InsertItemBarcodesByTargetsTx/start"); err != nil {
return 0, err
}
// Barcode seed'i hem prItemBarcode hem de (varsa) tbStokBarkodu uzerinden
// kilitli okuyarak hesapla; trigger tarafindaki duplicate riskini azalt.
var maxBarcode int64
maxPrQuery := `
SELECT ISNULL(MAX(v.BarcodeNum), 0)
FROM (
SELECT
CASE
WHEN LTRIM(RTRIM(ISNULL(pb.Barcode,''))) NOT LIKE '%[^0-9]%'
AND LEN(LTRIM(RTRIM(ISNULL(pb.Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(pb.Barcode,''))) AS BIGINT)
ELSE NULL
END AS BarcodeNum
FROM dbo.prItemBarcode pb WITH (UPDLOCK, HOLDLOCK, TABLOCKX)
WHERE pb.BarcodeTypeCode = 'BAGGI3'
) v
WHERE v.BarcodeNum IS NOT NULL;
`
if err := tx.QueryRow(maxPrQuery).Scan(&maxBarcode); err != nil {
return 0, fmt.Errorf("barcode seed query failed: %w", err)
}
var hasTb int
if err := tx.QueryRow(`SELECT CASE WHEN OBJECT_ID(N'dbo.tbStokBarkodu', N'U') IS NULL THEN 0 ELSE 1 END`).Scan(&hasTb); err != nil {
return 0, fmt.Errorf("barcode seed object check failed: %w", err)
}
if hasTb == 1 {
var maxTb int64
maxTbQuery := `
SELECT ISNULL(MAX(v.BarcodeNum), 0)
FROM (
SELECT
CASE
WHEN LTRIM(RTRIM(ISNULL(sb.Barcode,''))) NOT LIKE '%[^0-9]%'
AND LEN(LTRIM(RTRIM(ISNULL(sb.Barcode,'')))) BETWEEN 1 AND 18
THEN CAST(LTRIM(RTRIM(ISNULL(sb.Barcode,''))) AS BIGINT)
ELSE NULL
END AS BarcodeNum
FROM dbo.tbStokBarkodu sb WITH (UPDLOCK, HOLDLOCK, TABLOCKX)
) v
WHERE v.BarcodeNum IS NOT NULL;
`
if err := tx.QueryRow(maxTbQuery).Scan(&maxTb); err != nil {
return 0, fmt.Errorf("barcode seed tbStokBarkodu query failed: %w", err)
}
if maxTb > maxBarcode {
maxBarcode = maxTb
}
}
if maxBarcode < 36999999 {
maxBarcode = 36999999
}
existsBarcodeQuery := `
SELECT CASE WHEN EXISTS (
SELECT 1
FROM dbo.prItemBarcode pb WITH (UPDLOCK, HOLDLOCK)
WHERE LTRIM(RTRIM(ISNULL(pb.Barcode,''))) = @p1
) THEN 1 ELSE 0 END;
`
existsBarcodeWithTbQuery := `
SELECT CASE WHEN EXISTS (
SELECT 1
FROM dbo.prItemBarcode pb WITH (UPDLOCK, HOLDLOCK)
WHERE LTRIM(RTRIM(ISNULL(pb.Barcode,''))) = @p1
) OR EXISTS (
SELECT 1
FROM dbo.tbStokBarkodu sb WITH (UPDLOCK, HOLDLOCK)
WHERE LTRIM(RTRIM(ISNULL(sb.Barcode,''))) = @p1
) THEN 1 ELSE 0 END;
`
hasVariantBarcodeQuery := `
SELECT CASE WHEN EXISTS (
SELECT 1
FROM dbo.prItemBarcode b WITH (UPDLOCK, HOLDLOCK)
WHERE b.ItemTypeCode = @p1
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemCode,'')))) = @p2
AND UPPER(LTRIM(RTRIM(ISNULL(b.ColorCode,'')))) = @p3
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim1Code,'')))) = @p4
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim2Code,'')))) = @p5
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim3Code,'')))) = @p6
) THEN 1 ELSE 0 END;
`
insertOneQuery := `
INSERT INTO dbo.prItemBarcode
(
Barcode,
BarcodeTypeCode,
ItemTypeCode,
ItemCode,
ColorCode,
ItemDim1Code,
ItemDim2Code,
ItemDim3Code,
UnitOfMeasureCode,
Qty,
CreatedUserName,
CreatedDate,
LastUpdatedUserName,
LastUpdatedDate,
RowGuid
)
SELECT
@p1,
'BAGGI3',
@p2,
@p3,
@p4,
@p5,
@p6,
@p7,
'AD',
1,
@p8,
GETDATE(),
@p8,
GETDATE(),
NEWID()
WHERE NOT EXISTS (
SELECT 1
FROM dbo.prItemBarcode b
WHERE b.ItemTypeCode = @p2
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemCode,'')))) = @p3
AND UPPER(LTRIM(RTRIM(ISNULL(b.ColorCode,'')))) = @p4
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim1Code,'')))) = @p5
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim2Code,'')))) = @p6
AND UPPER(LTRIM(RTRIM(ISNULL(b.ItemDim3Code,'')))) = @p7
);
`
var inserted int64
for _, t := range uniqueTargets {
if err := ensureTxStillActive(tx, "InsertItemBarcodesByTargetsTx/before_target"); err != nil {
return inserted, err
}
var hasVariant int
if err := tx.QueryRow(
hasVariantBarcodeQuery,
t.ItemTypeCode,
t.ItemCode,
t.ColorCode,
t.ItemDim1Code,
t.ItemDim2Code,
t.ItemDim3Code,
).Scan(&hasVariant); err != nil {
return inserted, fmt.Errorf("variant barcode exists check failed: %w", err)
}
if hasVariant == 1 {
continue
}
retry := 0
for {
retry++
if retry > 2000 {
return inserted, fmt.Errorf("barcode allocation exceeded retry limit item=%s color=%s dim1=%s", t.ItemCode, t.ColorCode, t.ItemDim1Code)
}
candidateNum := maxBarcode + 1
candidate := strconv.FormatInt(candidateNum, 10)
var exists int
if hasTb == 1 {
if err := tx.QueryRow(existsBarcodeWithTbQuery, candidate).Scan(&exists); err != nil {
return inserted, fmt.Errorf("barcode exists check(tb) failed: %w", err)
}
} else {
if err := tx.QueryRow(existsBarcodeQuery, candidate).Scan(&exists); err != nil {
return inserted, fmt.Errorf("barcode exists check failed: %w", err)
}
}
if exists == 1 {
maxBarcode = candidateNum
continue
}
res, err := tx.Exec(
insertOneQuery,
candidate,
t.ItemTypeCode,
t.ItemCode,
t.ColorCode,
t.ItemDim1Code,
t.ItemDim2Code,
t.ItemDim3Code,
username,
)
if err != nil {
if isDuplicateBarcodeInsertErr(err) {
maxBarcode = candidateNum
continue
}
return inserted, fmt.Errorf("insert item barcode failed item=%s color=%s dim1=%s duration_ms=%d: %w",
t.ItemCode, t.ColorCode, t.ItemDim1Code, time.Since(start).Milliseconds(), err)
}
affected, _ := res.RowsAffected()
if affected > 0 {
inserted += affected
maxBarcode = candidateNum
}
break
}
}
if txErr := ensureTxStillActive(tx, "InsertItemBarcodesByTargetsTx/after_batch"); txErr != nil {
return inserted, txErr
}
log.Printf("[InsertItemBarcodesByTargetsTx] targets=%d unique=%d inserted=%d duration_ms=%d",
len(targets), len(uniqueTargets), inserted, time.Since(start).Milliseconds())
return inserted, nil
}
func ensureTxStillActive(tx *sql.Tx, where string) error {
if tx == nil {
return fmt.Errorf("tx is nil at %s", where)
}
var tranCount int
if err := tx.QueryRow(`SELECT @@TRANCOUNT`).Scan(&tranCount); err != nil {
return fmt.Errorf("tx state query failed at %s: %w", where, err)
}
if tranCount <= 0 {
return fmt.Errorf("tx closed unexpectedly at %s (trancount=%d)", where, tranCount)
}
return nil
}
func isDuplicateBarcodeInsertErr(err error) bool {
if err == nil {
return false
}
msg := strings.ToLower(err.Error())
if !strings.Contains(msg, "duplicate key") {
return false
}
if strings.Contains(msg, "tbstokbarkodu") {
return true
}
if strings.Contains(msg, "pritembarcode") {
return true
}
return strings.Contains(msg, "unique")
}
func UpsertItemAttributesTx(tx *sql.Tx, attrs []models.OrderProductionItemAttributeRow, username string) (int64, error) {
start := time.Now()
if len(attrs) == 0 {
log.Printf("[UpsertItemAttributesTx] attrs=0 affected=0 duration_ms=0")
return 0, nil
}
// FK_prItemAttribute_ItemCode hatasini engellemek icin, attribute yazmadan once
// ilgili item kodlarinin cdItem tarafinda varligini transaction icinde garanti et.
seenCodes := make(map[string]struct{}, len(attrs))
for _, a := range attrs {
itemTypeCode := a.ItemTypeCode
if itemTypeCode <= 0 {
itemTypeCode = 1
}
itemCode := strings.ToUpper(strings.TrimSpace(a.ItemCode))
if itemCode == "" {
continue
}
key := NormalizeCdItemMapKey(int16(itemTypeCode), itemCode)
if _, ok := seenCodes[key]; ok {
continue
}
seenCodes[key] = struct{}{}
if err := ensureCdItemTx(tx, int16(itemTypeCode), itemCode, username, nil); err != nil {
return 0, fmt.Errorf("ensure cdItem before item attributes failed itemCode=%s: %w", itemCode, err)
}
}
// SQL Server parameter limiti (2100) nedeniyle batch'li set-based upsert kullanilir.
const chunkSize = 400 // 400 * 4 param + 1 username = 1601
var affected int64
for i := 0; i < len(attrs); i += chunkSize {
end := i + chunkSize
if end > len(attrs) {
end = len(attrs)
}
chunk := attrs[i:end]
values := make([]string, 0, len(chunk))
args := make([]any, 0, len(chunk)*4+1)
paramPos := 1
for _, a := range chunk {
values = append(values, fmt.Sprintf("(@p%d,@p%d,@p%d,@p%d)", paramPos, paramPos+1, paramPos+2, paramPos+3))
args = append(args, a.ItemTypeCode, a.ItemCode, a.AttributeTypeCode, a.AttributeCode)
paramPos += 4
}
usernameParam := paramPos
args = append(args, username)
query := fmt.Sprintf(`
WITH src (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode) AS (
SELECT *
FROM (VALUES %s) AS v (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode)
)
UPDATE tgt
SET
tgt.AttributeCode = src.AttributeCode,
tgt.LastUpdatedUserName = @p%d,
tgt.LastUpdatedDate = GETDATE()
FROM dbo.prItemAttribute tgt
JOIN src
ON src.ItemTypeCode = tgt.ItemTypeCode
AND src.ItemCode = tgt.ItemCode
AND src.AttributeTypeCode = tgt.AttributeTypeCode;
WITH src (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode) AS (
SELECT *
FROM (VALUES %s) AS v (ItemTypeCode, ItemCode, AttributeTypeCode, AttributeCode)
)
INSERT INTO dbo.prItemAttribute (
ItemTypeCode,
ItemCode,
AttributeTypeCode,
AttributeCode,
CreatedUserName,
CreatedDate,
LastUpdatedUserName,
LastUpdatedDate,
RowGuid
)
SELECT
src.ItemTypeCode,
src.ItemCode,
src.AttributeTypeCode,
src.AttributeCode,
@p%d,
GETDATE(),
@p%d,
GETDATE(),
NEWID()
FROM src
LEFT JOIN dbo.prItemAttribute tgt
ON src.ItemTypeCode = tgt.ItemTypeCode
AND src.ItemCode = tgt.ItemCode
AND src.AttributeTypeCode = tgt.AttributeTypeCode
WHERE tgt.ItemCode IS NULL;
`, strings.Join(values, ","), usernameParam, strings.Join(values, ","), usernameParam, usernameParam)
chunkStart := time.Now()
res, err := tx.Exec(query, args...)
if err != nil {
log.Printf("[UpsertItemAttributesTx] ERROR chunk=%d-%d err=%v", i, end, err)
return affected, fmt.Errorf("upsert item attributes chunk failed chunkStart=%d chunkEnd=%d duration_ms=%d: %w", i, end, time.Since(chunkStart).Milliseconds(), err)
}
chunkAffected, _ := res.RowsAffected()
affected += chunkAffected
log.Printf("[UpsertItemAttributesTx] chunk=%d-%d chunkAffected=%d cumulative=%d duration_ms=%d",
i, end, chunkAffected, affected, time.Since(chunkStart).Milliseconds())
}
log.Printf("[UpsertItemAttributesTx] attrs=%d affected=%d duration_ms=%d",
len(attrs), affected, time.Since(start).Milliseconds())
return affected, nil
}
func GetOrderProductionLookupOptions(mssql *sql.DB) (models.OrderProductionCdItemLookups, error) {
out := models.OrderProductionCdItemLookups{}
queryPairs := []struct {
Name string
Query string
Target *[]models.OrderProductionLookupOption
}{
{"ItemDimTypeCodes", `SELECT
CAST(t.ItemDimTypeCode AS NVARCHAR(50)) AS Code,
ISNULL(d.ItemDimTypeDescription, CAST(t.ItemDimTypeCode AS NVARCHAR(50))) AS [Description]
FROM dbo.bsItemDimType t WITH(NOLOCK)
LEFT JOIN dbo.bsItemDimTypeDesc d WITH(NOLOCK)
ON d.ItemDimTypeCode = t.ItemDimTypeCode
AND d.LangCode = 'TR'
WHERE ISNULL(t.IsBlocked, 0) = 0
ORDER BY t.ItemDimTypeCode`, &out.ItemDimTypeCodes},
{"ProductTypeCodes", `SELECT DISTINCT CAST(ProductTypeCode AS NVARCHAR(50)) AS Code, CAST(ProductTypeCode AS NVARCHAR(50)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE ProductTypeCode IS NOT NULL ORDER BY Code`, &out.ProductTypeCodes},
{"ProductHierarchyIDs", `SELECT
CAST(h.ProductHierarchyID AS NVARCHAR(50)) AS Code,
LTRIM(RTRIM(
CONCAT(
CAST(ISNULL(h.ProductHierarchyLevelCode01, 0) AS NVARCHAR(50)),
CASE
WHEN ISNULL(d.ProductHierarchyLevelDescription, '') <> '' THEN CONCAT(' - ', d.ProductHierarchyLevelDescription)
ELSE ''
END
)
)) AS [Description]
FROM dbo.dfProductHierarchy h WITH(NOLOCK)
LEFT JOIN dbo.cdProductHierarchyLevelDesc d WITH(NOLOCK)
ON d.ProductHierarchyLevelCode = h.ProductHierarchyLevelCode01
AND d.LangCode = 'TR'
ORDER BY h.ProductHierarchyID`, &out.ProductHierarchyIDs},
{"UnitOfMeasureCode1List", `SELECT DISTINCT CAST(UnitOfMeasureCode1 AS NVARCHAR(50)) AS Code, CAST(UnitOfMeasureCode1 AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(UnitOfMeasureCode1 AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.UnitOfMeasureCode1List},
{"ItemAccountGrCodes", `SELECT DISTINCT CAST(ItemAccountGrCode AS NVARCHAR(50)) AS Code, CAST(ItemAccountGrCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(ItemAccountGrCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.ItemAccountGrCodes},
{"ItemTaxGrCodes", `SELECT DISTINCT CAST(ItemTaxGrCode AS NVARCHAR(50)) AS Code, CAST(ItemTaxGrCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(ItemTaxGrCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.ItemTaxGrCodes},
{"ItemPaymentPlanGrCodes", `SELECT DISTINCT CAST(ItemPaymentPlanGrCode AS NVARCHAR(50)) AS Code, CAST(ItemPaymentPlanGrCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(ItemPaymentPlanGrCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.ItemPaymentPlanGrCodes},
{"ItemDiscountGrCodes", `SELECT DISTINCT CAST(ItemDiscountGrCode AS NVARCHAR(50)) AS Code, CAST(ItemDiscountGrCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(ItemDiscountGrCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.ItemDiscountGrCodes},
{"ItemVendorGrCodes", `SELECT DISTINCT CAST(ItemVendorGrCode AS NVARCHAR(50)) AS Code, CAST(ItemVendorGrCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(ItemVendorGrCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.ItemVendorGrCodes},
{"PromotionGroupCodes", `SELECT DISTINCT CAST(PromotionGroupCode AS NVARCHAR(50)) AS Code, CAST(PromotionGroupCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(PromotionGroupCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.PromotionGroupCodes},
{"ProductCollectionGrCodes", `SELECT DISTINCT CAST(ProductCollectionGrCode AS NVARCHAR(50)) AS Code, CAST(ProductCollectionGrCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(ProductCollectionGrCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.ProductCollectionGrCodes},
{"StorePriceLevelCodes", `SELECT DISTINCT CAST(StorePriceLevelCode AS NVARCHAR(50)) AS Code, CAST(StorePriceLevelCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(StorePriceLevelCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.StorePriceLevelCodes},
{"PerceptionOfFashionCodes", `SELECT DISTINCT CAST(PerceptionOfFashionCode AS NVARCHAR(50)) AS Code, CAST(PerceptionOfFashionCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(PerceptionOfFashionCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.PerceptionOfFashionCodes},
{"CommercialRoleCodes", `SELECT DISTINCT CAST(CommercialRoleCode AS NVARCHAR(50)) AS Code, CAST(CommercialRoleCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(CommercialRoleCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.CommercialRoleCodes},
{"StoreCapacityLevelCodes", `SELECT DISTINCT CAST(StoreCapacityLevelCode AS NVARCHAR(50)) AS Code, CAST(StoreCapacityLevelCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(StoreCapacityLevelCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.StoreCapacityLevelCodes},
{"CustomsTariffNumbers", `SELECT DISTINCT CAST(CustomsTariffNumberCode AS NVARCHAR(50)) AS Code, CAST(CustomsTariffNumberCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(CustomsTariffNumberCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.CustomsTariffNumbers},
{"CompanyCodes", `SELECT DISTINCT CAST(CompanyCode AS NVARCHAR(50)) AS Code, CAST(CompanyCode AS NVARCHAR(200)) AS [Description] FROM dbo.cdItem WITH(NOLOCK) WHERE NULLIF(LTRIM(RTRIM(CAST(CompanyCode AS NVARCHAR(200)))), '') IS NOT NULL ORDER BY Code`, &out.CompanyCodes},
}
for _, pair := range queryPairs {
start := time.Now()
log.Printf("[GetOrderProductionLookupOptions] executing [%s]", pair.Name)
rows, err := mssql.Query(pair.Query)
if err != nil {
return out, fmt.Errorf("lookup query failed [%s]: %w", pair.Name, err)
}
list := make([]models.OrderProductionLookupOption, 0, 64)
for rows.Next() {
var item models.OrderProductionLookupOption
if err := rows.Scan(&item.Code, &item.Description); err != nil {
rows.Close()
return out, fmt.Errorf("lookup scan failed [%s]: %w", pair.Name, err)
}
item.Code = strings.TrimSpace(item.Code)
item.Description = strings.TrimSpace(item.Description)
list = append(list, item)
}
if err := rows.Err(); err != nil {
rows.Close()
return out, fmt.Errorf("lookup rows failed [%s]: %w", pair.Name, err)
}
rows.Close()
log.Printf("[GetOrderProductionLookupOptions] ok [%s] count=%d duration=%s", pair.Name, len(list), time.Since(start))
*pair.Target = list
}
return out, nil
}

View File

@@ -0,0 +1,58 @@
package queries
import (
"bssapp-backend/auth"
"bssapp-backend/internal/authz"
"context"
"fmt"
"strings"
)
func resolvePiyasaScopeInClause(ctx context.Context, column string) (string, error) {
claims, ok := auth.GetClaimsFromContext(ctx)
if !ok || claims == nil {
return "", fmt.Errorf("unauthorized: claims not found")
}
if claims.IsAdmin() {
return "1=1", nil
}
rawCodes := authz.GetPiyasaCodesFromCtx(ctx)
codes := normalizePiyasaCodes(rawCodes)
if len(codes) == 0 {
return "1=0", nil
}
return authz.BuildMSSQLPiyasaFilterWithCodes(column, codes), nil
}
func buildPiyasaExistsForCariCode(ctx context.Context, cariCodeExpr string) (string, error) {
inClause, err := resolvePiyasaScopeInClause(ctx, "PF.CustomerAtt01")
if err != nil {
return "", err
}
return fmt.Sprintf(`
EXISTS (
SELECT 1
FROM CustomerAttributesFilter PF WITH(NOLOCK)
WHERE (PF.CurrAccCode = %s OR LEFT(PF.CurrAccCode, 8) = LEFT(%s, 8))
AND %s
)`, cariCodeExpr, cariCodeExpr, inClause), nil
}
func normalizePiyasaCodes(codes []string) []string {
out := make([]string, 0, len(codes))
seen := make(map[string]struct{}, len(codes))
for _, c := range codes {
n := strings.ToUpper(strings.TrimSpace(c))
if n == "" {
continue
}
if _, ok := seen[n]; ok {
continue
}
seen[n] = struct{}{}
out = append(out, n)
}
return out
}

View File

@@ -9,12 +9,12 @@ import (
func GetProductList() ([]models.Product, error) {
rows, err := db.MssqlDB.Query(`
SELECT
ProductCode
LTRIM(RTRIM(ProductCode)) AS ProductCode
FROM ProductFilterWithDescription('TR')
WHERE
ProductAtt42 IN ('SERI', 'AKSESUAR')
AND IsBlocked = 0
AND LEN(ProductCode) = 13 -- 🔹 yalnızca 13 karakterlik kodlar
AND LEN(LTRIM(RTRIM(ProductCode))) = 13
ORDER BY ProductCode;
`)
if err != nil {

View File

@@ -0,0 +1,256 @@
package queries
import (
"bssapp-backend/db"
"bssapp-backend/models"
"context"
"database/sql"
"strconv"
"strings"
"time"
)
func GetProductPricingList(ctx context.Context, limit int, afterProductCode string) ([]models.ProductPricing, error) {
if limit <= 0 {
limit = 500
}
afterProductCode = strings.TrimSpace(afterProductCode)
cursorFilter := ""
args := make([]any, 0, 1)
if afterProductCode != "" {
cursorFilter = "WHERE bp.ProductCode > @p1"
args = append(args, afterProductCode)
}
query := `
WITH base_products AS (
SELECT
LTRIM(RTRIM(ProductCode)) AS ProductCode,
COALESCE(LTRIM(RTRIM(ProductAtt45Desc)), '') AS AskiliYan,
COALESCE(LTRIM(RTRIM(ProductAtt44Desc)), '') AS Kategori,
COALESCE(LTRIM(RTRIM(ProductAtt42Desc)), '') AS UrunIlkGrubu,
COALESCE(LTRIM(RTRIM(ProductAtt01Desc)), '') AS UrunAnaGrubu,
COALESCE(LTRIM(RTRIM(ProductAtt02Desc)), '') AS UrunAltGrubu,
COALESCE(LTRIM(RTRIM(ProductAtt41Desc)), '') AS Icerik,
COALESCE(LTRIM(RTRIM(ProductAtt29Desc)), '') AS Karisim,
COALESCE(LTRIM(RTRIM(ProductAtt10Desc)), '') AS Marka
FROM ProductFilterWithDescription('TR')
WHERE ProductAtt42 IN ('SERI', 'AKSESUAR')
AND IsBlocked = 0
AND LEN(LTRIM(RTRIM(ProductCode))) = 13
),
paged_products AS (
SELECT TOP (` + strconv.Itoa(limit) + `)
bp.ProductCode
FROM base_products bp
` + cursorFilter + `
ORDER BY bp.ProductCode
),
latest_base_price AS (
SELECT
LTRIM(RTRIM(b.ItemCode)) AS ItemCode,
CAST(b.Price AS DECIMAL(18, 2)) AS CostPrice,
CONVERT(VARCHAR(10), b.PriceDate, 23) AS LastPricingDate,
ROW_NUMBER() OVER (
PARTITION BY LTRIM(RTRIM(b.ItemCode))
ORDER BY b.PriceDate DESC, b.LastUpdatedDate DESC
) AS rn
FROM prItemBasePrice b
WHERE b.ItemTypeCode = 1
AND b.BasePriceCode = 1
AND LTRIM(RTRIM(b.CurrencyCode)) = 'USD'
AND EXISTS (
SELECT 1
FROM paged_products pp
WHERE pp.ProductCode = LTRIM(RTRIM(b.ItemCode))
)
),
stock_entry_dates AS (
SELECT
LTRIM(RTRIM(s.ItemCode)) AS ItemCode,
CONVERT(VARCHAR(10), MAX(s.OperationDate), 23) AS StockEntryDate
FROM trStock s WITH(NOLOCK)
WHERE s.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(s.ItemCode))) = 13
AND s.In_Qty1 > 0
AND LTRIM(RTRIM(s.WarehouseCode)) IN (
'1-0-14','1-0-10','1-0-8','1-2-5','1-2-4','1-0-12','100','1-0-28',
'1-0-24','1-2-6','1-1-14','1-0-2','1-0-52','1-1-2','1-0-21','1-1-3',
'1-0-33','101','1-014','1-0-49','1-0-36'
)
AND EXISTS (
SELECT 1
FROM paged_products pp
WHERE pp.ProductCode = LTRIM(RTRIM(s.ItemCode))
)
GROUP BY LTRIM(RTRIM(s.ItemCode))
),
stock_base AS (
SELECT
LTRIM(RTRIM(s.ItemCode)) AS ItemCode,
SUM(s.In_Qty1 - s.Out_Qty1) AS InventoryQty1
FROM trStock s WITH(NOLOCK)
WHERE s.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(s.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM paged_products pp
WHERE pp.ProductCode = LTRIM(RTRIM(s.ItemCode))
)
GROUP BY LTRIM(RTRIM(s.ItemCode))
),
pick_base AS (
SELECT
LTRIM(RTRIM(p.ItemCode)) AS ItemCode,
SUM(p.Qty1) AS PickingQty1
FROM PickingStates p
WHERE p.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(p.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM paged_products pp
WHERE pp.ProductCode = LTRIM(RTRIM(p.ItemCode))
)
GROUP BY LTRIM(RTRIM(p.ItemCode))
),
reserve_base AS (
SELECT
LTRIM(RTRIM(r.ItemCode)) AS ItemCode,
SUM(r.Qty1) AS ReserveQty1
FROM ReserveStates r
WHERE r.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(r.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM paged_products pp
WHERE pp.ProductCode = LTRIM(RTRIM(r.ItemCode))
)
GROUP BY LTRIM(RTRIM(r.ItemCode))
),
disp_base AS (
SELECT
LTRIM(RTRIM(d.ItemCode)) AS ItemCode,
SUM(d.Qty1) AS DispOrderQty1
FROM DispOrderStates d
WHERE d.ItemTypeCode = 1
AND LEN(LTRIM(RTRIM(d.ItemCode))) = 13
AND EXISTS (
SELECT 1
FROM paged_products pp
WHERE pp.ProductCode = LTRIM(RTRIM(d.ItemCode))
)
GROUP BY LTRIM(RTRIM(d.ItemCode))
),
stock_totals AS (
SELECT
pp.ProductCode AS ItemCode,
CAST(ROUND(
ISNULL(sb.InventoryQty1, 0)
- ISNULL(pb.PickingQty1, 0)
- ISNULL(rb.ReserveQty1, 0)
- ISNULL(db.DispOrderQty1, 0)
, 2) AS DECIMAL(18, 2)) AS StockQty
FROM paged_products pp
LEFT JOIN stock_base sb
ON sb.ItemCode = pp.ProductCode
LEFT JOIN pick_base pb
ON pb.ItemCode = pp.ProductCode
LEFT JOIN reserve_base rb
ON rb.ItemCode = pp.ProductCode
LEFT JOIN disp_base db
ON db.ItemCode = pp.ProductCode
)
SELECT
bp.ProductCode AS ProductCode,
COALESCE(lp.CostPrice, 0) AS CostPrice,
COALESCE(st.StockQty, 0) AS StockQty,
COALESCE(se.StockEntryDate, '') AS StockEntryDate,
COALESCE(lp.LastPricingDate, '') AS LastPricingDate,
bp.AskiliYan,
bp.Kategori,
bp.UrunIlkGrubu,
bp.UrunAnaGrubu,
bp.UrunAltGrubu,
bp.Icerik,
bp.Karisim,
bp.Marka
FROM paged_products pp
INNER JOIN base_products bp
ON bp.ProductCode = pp.ProductCode
LEFT JOIN latest_base_price lp
ON lp.ItemCode = bp.ProductCode
AND lp.rn = 1
LEFT JOIN stock_entry_dates se
ON se.ItemCode = bp.ProductCode
LEFT JOIN stock_totals st
ON st.ItemCode = bp.ProductCode
ORDER BY bp.ProductCode;
`
var (
rows *sql.Rows
rowsErr error
)
for attempt := 1; attempt <= 3; attempt++ {
var err error
rows, err = db.MssqlDB.QueryContext(ctx, query, args...)
if err == nil {
rowsErr = nil
break
}
rowsErr = err
if ctx.Err() != nil || !isTransientMSSQLNetworkError(err) || attempt == 3 {
break
}
wait := time.Duration(attempt*300) * time.Millisecond
select {
case <-ctx.Done():
break
case <-time.After(wait):
}
}
if rowsErr != nil {
return nil, rowsErr
}
defer rows.Close()
var out []models.ProductPricing
for rows.Next() {
var item models.ProductPricing
if err := rows.Scan(
&item.ProductCode,
&item.CostPrice,
&item.StockQty,
&item.StockEntryDate,
&item.LastPricingDate,
&item.AskiliYan,
&item.Kategori,
&item.UrunIlkGrubu,
&item.UrunAnaGrubu,
&item.UrunAltGrubu,
&item.Icerik,
&item.Karisim,
&item.Marka,
); err != nil {
return nil, err
}
out = append(out, item)
}
return out, nil
}
func isTransientMSSQLNetworkError(err error) bool {
if err == nil {
return false
}
e := strings.ToLower(err.Error())
return strings.Contains(e, "i/o timeout") ||
strings.Contains(e, "timeout") ||
strings.Contains(e, "wsarecv") ||
strings.Contains(e, "connection attempt failed") ||
strings.Contains(e, "no connection could be made") ||
strings.Contains(e, "broken pipe") ||
strings.Contains(e, "connection reset")
}

View File

@@ -0,0 +1,59 @@
package queries
const GetProductAttributes = `
;WITH RequiredTypes AS (
SELECT
t.ItemTypeCode,
t.AttributeTypeCode,
ISNULL(NULLIF(td.AttributeTypeDescription, ''), CAST(t.AttributeTypeCode AS NVARCHAR(30))) AS AttributeTypeDescription
FROM dbo.cdItemAttributeType AS t WITH(NOLOCK)
LEFT JOIN dbo.cdItemAttributeTypeDesc AS td WITH(NOLOCK)
ON td.ItemTypeCode = t.ItemTypeCode
AND td.AttributeTypeCode = t.AttributeTypeCode
AND td.LangCode = 'TR'
WHERE t.ItemTypeCode = @p1
AND ISNULL(t.IsBlocked, 0) = 0
AND ISNULL(t.IsRequired, 0) = 1
),
Attr AS (
SELECT
a.ItemTypeCode,
a.AttributeTypeCode,
ISNULL(a.AttributeCode, '') AS AttributeCode,
ISNULL(d.AttributeDescription, ISNULL(a.AttributeCode, '')) AS AttributeDescription
FROM dbo.cdItemAttribute AS a WITH(NOLOCK)
LEFT JOIN dbo.cdItemAttributeDesc AS d WITH(NOLOCK)
ON d.ItemTypeCode = a.ItemTypeCode
AND d.AttributeTypeCode = a.AttributeTypeCode
AND d.AttributeCode = a.AttributeCode
AND d.LangCode = 'TR'
WHERE a.ItemTypeCode = @p1
AND ISNULL(a.IsBlocked, 0) = 0
AND ISNULL(a.AttributeCode, '') <> ''
)
SELECT
rt.ItemTypeCode,
rt.AttributeTypeCode,
rt.AttributeTypeDescription,
a.AttributeCode,
a.AttributeDescription
FROM RequiredTypes AS rt
LEFT JOIN Attr AS a
ON a.ItemTypeCode = rt.ItemTypeCode
AND a.AttributeTypeCode = rt.AttributeTypeCode
WHERE ISNULL(a.AttributeCode, '') <> ''
ORDER BY
rt.AttributeTypeCode,
CASE WHEN a.AttributeCode IN ('-', '.') THEN 0 ELSE 1 END,
a.AttributeCode;
`
const GetProductItemAttributes = `
SELECT
a.ItemTypeCode,
a.AttributeTypeCode,
ISNULL(a.AttributeCode, '') AS AttributeCode
FROM dbo.prItemAttribute AS a WITH(NOLOCK)
WHERE a.ItemTypeCode = @p1
AND ISNULL(LTRIM(RTRIM(a.ItemCode)), '') = ISNULL(LTRIM(RTRIM(@p2)), '')
`

View File

@@ -0,0 +1,16 @@
package queries
const GetProductNewColors = `
SELECT
CAST(@p1 AS NVARCHAR(30)) AS ProductCode,
LTRIM(RTRIM(c.ColorCode)) AS ColorCode,
ISNULL(NULLIF(LTRIM(RTRIM(cd.ColorDescription)), ''), ISNULL(NULLIF(LTRIM(RTRIM(c.ColorHex)), ''), LTRIM(RTRIM(c.ColorCode)))) AS ColorDescription
FROM dbo.cdColor AS c WITH(NOLOCK)
LEFT JOIN dbo.cdColorDesc AS cd WITH(NOLOCK)
ON cd.ColorCode = c.ColorCode
AND cd.LangCode = 'TR'
WHERE ISNULL(c.IsBlocked, 0) = 0
AND LEN(LTRIM(RTRIM(ISNULL(c.ColorCode, '')))) = 3
AND LTRIM(RTRIM(ISNULL(c.ColorCatalogCode1, ''))) = N'ÜRÜN'
ORDER BY LTRIM(RTRIM(c.ColorCode));
`

View File

@@ -0,0 +1,16 @@
package queries
const GetProductNewSecondColors = `
SELECT
LTRIM(RTRIM(@ProductCode)) AS ProductCode,
LTRIM(RTRIM(ISNULL(@ColorCode, ''))) AS ColorCode,
LTRIM(RTRIM(d2.ItemDim2Code)) AS ItemDim2Code,
ISNULL(NULLIF(LTRIM(RTRIM(cd.ColorDescription)), ''), LTRIM(RTRIM(d2.ItemDim2Code))) AS ColorDescription
FROM dbo.cdItemDim2 AS d2 WITH(NOLOCK)
LEFT JOIN dbo.cdColorDesc AS cd WITH(NOLOCK)
ON cd.ColorCode = d2.ItemDim2Code
AND cd.LangCode = 'TR'
WHERE ISNULL(d2.IsBlocked, 0) = 0
AND LEN(LTRIM(RTRIM(ISNULL(d2.ItemDim2Code, '')))) = 3
ORDER BY LTRIM(RTRIM(d2.ItemDim2Code));
`

View File

@@ -1,10 +1,11 @@
package queries
const GetProductSecondColors = `
SELECT DISTINCT
SELECT
Product.ProductCode,
ISNULL(prItemVariant.ColorCode, '') AS ColorCode,
ISNULL(prItemVariant.ItemDim2Code, '') AS ItemDim2Code
ISNULL(prItemVariant.ItemDim2Code, '') AS ItemDim2Code,
ISNULL(ColorDesc.ColorDescription, '') AS ColorDescription
FROM prItemVariant WITH(NOLOCK)
INNER JOIN ProductFilterWithDescription('TR') AS Product
ON prItemVariant.ItemCode = Product.ProductCode
@@ -14,5 +15,10 @@ FROM prItemVariant WITH(NOLOCK)
WHERE Product.ProductCode = @ProductCode
AND prItemVariant.ColorCode = @ColorCode
AND ISNULL(prItemVariant.ItemDim2Code, '') <> ''
GROUP BY Product.ProductCode, prItemVariant.ItemDim2Code, prItemVariant.ColorCode
GROUP BY
Product.ProductCode,
prItemVariant.ItemDim2Code,
prItemVariant.ColorCode,
ColorDesc.ColorDescription
ORDER BY ItemDim2Code
`

View File

@@ -0,0 +1,239 @@
package queries
// GetProductStockQuery:
// Urun kodu bazli, STOCK/PICK/RESERVE/DISP ayrik CTE ile optimize sorgu.
const GetProductStockQuery = `
DECLARE @ProductCode NVARCHAR(50) = @p1;
;WITH STOCK AS
(
SELECT
CompanyCode,
OfficeCode,
StoreTypeCode,
StoreCode,
WarehouseCode,
ItemTypeCode,
ItemCode,
ColorCode,
ItemDim1Code,
ItemDim2Code,
ItemDim3Code,
SUM(In_Qty1 - Out_Qty1) AS InventoryQty1
FROM trStock WITH(NOLOCK)
WHERE ItemTypeCode = 1
AND ItemCode = @ProductCode
AND LEN(ItemCode) = 13
AND LEN(@ProductCode) = 13
GROUP BY
CompanyCode, OfficeCode, StoreTypeCode, StoreCode, WarehouseCode,
ItemTypeCode, ItemCode, ColorCode,
ItemDim1Code, ItemDim2Code, ItemDim3Code
),
PICK AS
(
SELECT
CompanyCode, OfficeCode, StoreTypeCode, StoreCode, WarehouseCode,
ItemTypeCode, ItemCode, ColorCode,
ItemDim1Code, ItemDim2Code, ItemDim3Code,
SUM(Qty1) AS PickingQty1
FROM PickingStates
WHERE ItemTypeCode = 1
AND ItemCode = @ProductCode
AND LEN(ItemCode) = 13
AND LEN(@ProductCode) = 13
GROUP BY
CompanyCode, OfficeCode, StoreTypeCode, StoreCode, WarehouseCode,
ItemTypeCode, ItemCode, ColorCode,
ItemDim1Code, ItemDim2Code, ItemDim3Code
),
RESERVE AS
(
SELECT
CompanyCode, OfficeCode, StoreTypeCode, StoreCode, WarehouseCode,
ItemTypeCode, ItemCode, ColorCode,
ItemDim1Code, ItemDim2Code, ItemDim3Code,
SUM(Qty1) AS ReserveQty1
FROM ReserveStates
WHERE ItemTypeCode = 1
AND ItemCode = @ProductCode
AND LEN(ItemCode) = 13
AND LEN(@ProductCode) = 13
GROUP BY
CompanyCode, OfficeCode, StoreTypeCode, StoreCode, WarehouseCode,
ItemTypeCode, ItemCode, ColorCode,
ItemDim1Code, ItemDim2Code, ItemDim3Code
),
DISP AS
(
SELECT
CompanyCode, OfficeCode, StoreTypeCode, StoreCode, WarehouseCode,
ItemTypeCode, ItemCode, ColorCode,
ItemDim1Code, ItemDim2Code, ItemDim3Code,
SUM(Qty1) AS DispOrderQty1
FROM DispOrderStates
WHERE ItemTypeCode = 1
AND ItemCode = @ProductCode
AND LEN(ItemCode) = 13
AND LEN(@ProductCode) = 13
GROUP BY
CompanyCode, OfficeCode, StoreTypeCode, StoreCode, WarehouseCode,
ItemTypeCode, ItemCode, ColorCode,
ItemDim1Code, ItemDim2Code, ItemDim3Code
)
SELECT
S.WarehouseCode AS Depo_Kodu,
W.WarehouseDescription AS Depo_Adi,
bsItemTypeDesc.ItemTypeDescription AS InventoryType,
S.ItemCode AS Urun_Kodu,
P.ProductDescription AS Madde_Aciklamasi,
S.ColorCode AS Renk_Kodu,
C.ColorDescription AS Renk_Aciklamasi,
S.ItemDim1Code AS Beden,
S.ItemDim3Code AS Renk2,
S.ItemDim2Code AS Yaka,
ROUND(
S.InventoryQty1
- ISNULL(PK.PickingQty1,0)
- ISNULL(RS.ReserveQty1,0)
- ISNULL(DP.DispOrderQty1,0),
cdUnitOfMeasure.RoundDigit
) AS Kullanilabilir_Envanter,
P.ProductAtt01Desc AS URUN_ANA_GRUBU,
P.ProductAtt02Desc AS URUN_ALT_GRUBU,
P.ProductAtt10Desc AS MARKA,
P.ProductAtt11Desc AS DR,
P.ProductAtt21Desc AS KALIP,
P.ProductAtt22Desc AS IKINCI_PARCA_KALIP,
P.ProductAtt23Desc AS PACA_GENISLIGI,
P.ProductAtt24Desc AS UCUNCU_PARCA_KALIP,
P.ProductAtt25Desc AS UCUNCU_PARCA_MODEL,
P.ProductAtt26Desc AS BIRINCI_PARCA_KUMAS,
P.ProductAtt27Desc AS IKINCI_PARCA_KUMAS,
P.ProductAtt28Desc AS UCUNCU_PARCA_KUMAS,
P.ProductAtt29Desc AS BIRINCI_PARCA_KARISIM,
P.ProductAtt30Desc AS IKINCI_PARCA_KARISIM,
P.ProductAtt31Desc AS UCUNCU_PARCA_KARISIM,
P.ProductAtt32Desc AS YAKA_TIPI,
P.ProductAtt33Desc AS DUGME,
P.ProductAtt34Desc AS YIRTMAC,
P.ProductAtt35Desc AS SEZON_YILI,
P.ProductAtt36Desc AS MEVSIM,
P.ProductAtt37Desc AS TABAN,
P.ProductAtt38Desc AS BIRINCI_PARCA_FIT,
P.ProductAtt39Desc AS IKINCI_PARCA_FIT,
P.ProductAtt40Desc AS BOS2,
P.ProductAtt41Desc AS URUN_ICERIGI,
P.ProductAtt41Desc AS KISA_KAR,
P.ProductAtt42Desc AS SERI_FASON,
P.ProductAtt43Desc AS STOK_GIRIS_YONTEMI,
P.ProductAtt44Desc AS YETISKIN_GARSON,
P.ProductAtt45Desc AS ASKILI_YAN,
P.ProductAtt46Desc AS BOS3,
prFilteredBasePrice.Price AS Fiyat
FROM STOCK S
LEFT JOIN PICK PK
ON PK.CompanyCode=S.CompanyCode
AND PK.OfficeCode=S.OfficeCode
AND PK.StoreTypeCode=S.StoreTypeCode
AND PK.StoreCode=S.StoreCode
AND PK.WarehouseCode=S.WarehouseCode
AND PK.ItemTypeCode=S.ItemTypeCode
AND PK.ItemCode=S.ItemCode
AND PK.ColorCode=S.ColorCode
AND PK.ItemDim1Code=S.ItemDim1Code
AND PK.ItemDim2Code=S.ItemDim2Code
AND PK.ItemDim3Code=S.ItemDim3Code
LEFT JOIN RESERVE RS
ON RS.CompanyCode=S.CompanyCode
AND RS.OfficeCode=S.OfficeCode
AND RS.StoreTypeCode=S.StoreTypeCode
AND RS.StoreCode=S.StoreCode
AND RS.WarehouseCode=S.WarehouseCode
AND RS.ItemTypeCode=S.ItemTypeCode
AND RS.ItemCode=S.ItemCode
AND RS.ColorCode=S.ColorCode
AND RS.ItemDim1Code=S.ItemDim1Code
AND RS.ItemDim2Code=S.ItemDim2Code
AND RS.ItemDim3Code=S.ItemDim3Code
LEFT JOIN DISP DP
ON DP.CompanyCode=S.CompanyCode
AND DP.OfficeCode=S.OfficeCode
AND DP.StoreTypeCode=S.StoreTypeCode
AND DP.StoreCode=S.StoreCode
AND DP.WarehouseCode=S.WarehouseCode
AND DP.ItemTypeCode=S.ItemTypeCode
AND DP.ItemCode=S.ItemCode
AND DP.ColorCode=S.ColorCode
AND DP.ItemDim1Code=S.ItemDim1Code
AND DP.ItemDim2Code=S.ItemDim2Code
AND DP.ItemDim3Code=S.ItemDim3Code
JOIN cdItem WITH(NOLOCK)
ON S.ItemCode = cdItem.ItemCode
AND S.ItemTypeCode = cdItem.ItemTypeCode
LEFT JOIN cdUnitOfMeasure WITH(NOLOCK)
ON cdItem.UnitOfMeasureCode1 = cdUnitOfMeasure.UnitOfMeasureCode
LEFT JOIN ProductFilterWithDescription('TR') P
ON P.ProductCode = S.ItemCode
LEFT JOIN bsItemTypeDesc WITH(NOLOCK)
ON bsItemTypeDesc.ItemTypeCode = S.ItemTypeCode
AND bsItemTypeDesc.LangCode='TR'
LEFT JOIN cdWarehouseDesc W WITH(NOLOCK)
ON W.WarehouseCode = S.WarehouseCode
AND W.LangCode='TR'
LEFT JOIN cdColorDesc C WITH(NOLOCK)
ON C.ColorCode = S.ColorCode
AND C.LangCode='TR'
LEFT JOIN (
SELECT
ItemCode,
ItemTypeCode,
Price,
ROW_NUMBER() OVER (PARTITION BY ItemCode, ItemTypeCode ORDER BY PriceDate DESC) AS RowNum
FROM prItemBasePrice WITH(NOLOCK)
) prFilteredBasePrice
ON prFilteredBasePrice.ItemCode = S.ItemCode
AND prFilteredBasePrice.ItemTypeCode = S.ItemTypeCode
AND prFilteredBasePrice.RowNum = 1
WHERE
S.ItemTypeCode IN (1)
AND S.ItemCode = @ProductCode
AND LEN(S.ItemCode) = 13
AND LEN(@ProductCode) = 13
AND (
S.InventoryQty1
- ISNULL(PK.PickingQty1,0)
- ISNULL(RS.ReserveQty1,0)
- ISNULL(DP.DispOrderQty1,0)
) > 0
AND cdItem.IsBlocked = 0
AND S.WarehouseCode IN
(
'1-0-14','1-0-10','1-0-8','1-2-5','1-2-4','1-0-12','100','1-0-28',
'1-0-24','1-2-6','1-1-14','1-0-2','1-0-52','1-1-2','1-0-21','1-1-3',
'1-0-33','101','1-014','1-0-49','1-0-36'
);
`

View File

@@ -0,0 +1,688 @@
package queries
// GetProductStockAttributeOptionsQuery:
// Cascading filtre secenekleri. Kategori + Urun Ana Grubu zorunlu akisina uygundur.
const GetProductStockAttributeOptionsQuery = `
DECLARE @Kategori NVARCHAR(100) = NULLIF(LTRIM(RTRIM(@p1)), '');
DECLARE @UrunAnaGrubu NVARCHAR(100) = NULLIF(LTRIM(RTRIM(@p2)), '');
DECLARE @UrunAltGrubuList NVARCHAR(MAX) = NULLIF(LTRIM(RTRIM(@p3)), '');
DECLARE @RenkList NVARCHAR(MAX) = NULLIF(LTRIM(RTRIM(@p4)), '');
DECLARE @Renk2List NVARCHAR(MAX) = NULLIF(LTRIM(RTRIM(@p5)), '');
DECLARE @UrunIcerigiList NVARCHAR(MAX) = NULLIF(LTRIM(RTRIM(@p6)), '');
DECLARE @FitList NVARCHAR(MAX) = NULLIF(LTRIM(RTRIM(@p7)), '');
DECLARE @DropList NVARCHAR(MAX) = NULLIF(LTRIM(RTRIM(@p8)), '');
DECLARE @BedenList NVARCHAR(MAX) = NULLIF(LTRIM(RTRIM(@p9)), '');
CREATE TABLE #AttrBase
(
ProductCode NVARCHAR(50) NOT NULL,
Kategori NVARCHAR(100) NOT NULL,
UrunAnaGrubu NVARCHAR(100) NOT NULL,
UrunAltGrubu NVARCHAR(100) NOT NULL,
UrunIcerigi NVARCHAR(100) NOT NULL,
Fit NVARCHAR(100) NOT NULL,
DropVal NVARCHAR(100) NOT NULL
);
IF OBJECT_ID('dbo.ProductFilterTRCache','U') IS NOT NULL
BEGIN
INSERT INTO #AttrBase (ProductCode, Kategori, UrunAnaGrubu, UrunAltGrubu, UrunIcerigi, Fit, DropVal)
SELECT
ProductCode,
Kategori = LTRIM(RTRIM(ProductAtt44Desc)),
UrunAnaGrubu = LTRIM(RTRIM(ProductAtt01Desc)),
UrunAltGrubu = LTRIM(RTRIM(ProductAtt02Desc)),
UrunIcerigi = LTRIM(RTRIM(ProductAtt41Desc)),
Fit = LTRIM(RTRIM(ProductAtt38Desc)),
DropVal = LTRIM(RTRIM(ProductAtt11Desc))
FROM dbo.ProductFilterTRCache
WHERE LEN(ProductCode) = 13
AND (@Kategori IS NULL OR ProductAtt44Desc = @Kategori)
AND (@UrunAnaGrubu IS NULL OR ProductAtt01Desc = @UrunAnaGrubu)
AND (@UrunAltGrubuList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(ProductAtt02Desc,'') + NCHAR(31), NCHAR(31) + @UrunAltGrubuList + NCHAR(31)) > 0)
AND (@UrunIcerigiList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(ProductAtt41Desc,'') + NCHAR(31), NCHAR(31) + @UrunIcerigiList + NCHAR(31)) > 0)
AND (@FitList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(ProductAtt38Desc,'') + NCHAR(31), NCHAR(31) + @FitList + NCHAR(31)) > 0)
AND (@DropList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(ProductAtt11Desc,'') + NCHAR(31), NCHAR(31) + @DropList + NCHAR(31)) > 0);
END
ELSE
BEGIN
INSERT INTO #AttrBase (ProductCode, Kategori, UrunAnaGrubu, UrunAltGrubu, UrunIcerigi, Fit, DropVal)
SELECT
ProductCode,
Kategori = LTRIM(RTRIM(ProductAtt44Desc)),
UrunAnaGrubu = LTRIM(RTRIM(ProductAtt01Desc)),
UrunAltGrubu = LTRIM(RTRIM(ProductAtt02Desc)),
UrunIcerigi = LTRIM(RTRIM(ProductAtt41Desc)),
Fit = LTRIM(RTRIM(ProductAtt38Desc)),
DropVal = LTRIM(RTRIM(ProductAtt11Desc))
FROM ProductFilterWithDescription('TR')
WHERE LEN(ProductCode) = 13
AND (@Kategori IS NULL OR ProductAtt44Desc = @Kategori)
AND (@UrunAnaGrubu IS NULL OR ProductAtt01Desc = @UrunAnaGrubu)
AND (@UrunAltGrubuList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(ProductAtt02Desc,'') + NCHAR(31), NCHAR(31) + @UrunAltGrubuList + NCHAR(31)) > 0)
AND (@UrunIcerigiList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(ProductAtt41Desc,'') + NCHAR(31), NCHAR(31) + @UrunIcerigiList + NCHAR(31)) > 0)
AND (@FitList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(ProductAtt38Desc,'') + NCHAR(31), NCHAR(31) + @FitList + NCHAR(31)) > 0)
AND (@DropList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(ProductAtt11Desc,'') + NCHAR(31), NCHAR(31) + @DropList + NCHAR(31)) > 0);
END;
IF @Kategori IS NULL OR @UrunAnaGrubu IS NULL
BEGIN
CREATE CLUSTERED INDEX IX_AttrBase_ProductCode ON #AttrBase(ProductCode);
SELECT 'kategori' AS FieldName, X.FieldValue
FROM (
SELECT DISTINCT FieldValue = AB.Kategori
FROM #AttrBase AB
WHERE AB.Kategori <> ''
) X
UNION ALL
SELECT 'urun_ana_grubu', X.FieldValue
FROM (
SELECT DISTINCT FieldValue = AB.UrunAnaGrubu
FROM #AttrBase AB
WHERE AB.UrunAnaGrubu <> ''
) X;
RETURN;
END;
CREATE CLUSTERED INDEX IX_AttrBase_ProductCode ON #AttrBase(ProductCode);
CREATE NONCLUSTERED INDEX IX_AttrBase_Filter ON #AttrBase(Kategori, UrunAnaGrubu, UrunAltGrubu, UrunIcerigi, Fit, DropVal);
;WITH INV AS
(
SELECT
X.ItemCode,
X.ColorCode,
X.ItemDim1Code,
X.ItemDim2Code,
SUM(X.PickingQty1) AS PickingQty1,
SUM(X.ReserveQty1) AS ReserveQty1,
SUM(X.DispOrderQty1) AS DispOrderQty1,
SUM(X.InventoryQty1) AS InventoryQty1
FROM
(
SELECT
P.ItemCode, P.ColorCode, P.ItemDim1Code, P.ItemDim2Code,
P.Qty1 AS PickingQty1, 0 AS ReserveQty1, 0 AS DispOrderQty1, 0 AS InventoryQty1
FROM PickingStates P
INNER JOIN #AttrBase AB ON AB.ProductCode = P.ItemCode
WHERE P.ItemTypeCode = 1
AND LEN(P.ItemCode) = 13
UNION ALL
SELECT
R.ItemCode, R.ColorCode, R.ItemDim1Code, R.ItemDim2Code,
0, R.Qty1, 0, 0
FROM ReserveStates R
INNER JOIN #AttrBase AB ON AB.ProductCode = R.ItemCode
WHERE R.ItemTypeCode = 1
AND LEN(R.ItemCode) = 13
UNION ALL
SELECT
D.ItemCode, D.ColorCode, D.ItemDim1Code, D.ItemDim2Code,
0, 0, D.Qty1, 0
FROM DispOrderStates D
INNER JOIN #AttrBase AB ON AB.ProductCode = D.ItemCode
WHERE D.ItemTypeCode = 1
AND LEN(D.ItemCode) = 13
UNION ALL
SELECT
T.ItemCode, T.ColorCode, T.ItemDim1Code, T.ItemDim2Code,
0, 0, 0, SUM(T.In_Qty1 - T.Out_Qty1)
FROM trStock T WITH (NOLOCK)
INNER JOIN #AttrBase AB ON AB.ProductCode = T.ItemCode
WHERE T.ItemTypeCode = 1
AND LEN(T.ItemCode) = 13
GROUP BY T.ItemCode, T.ColorCode, T.ItemDim1Code, T.ItemDim2Code
) X
GROUP BY X.ItemCode, X.ColorCode, X.ItemDim1Code, X.ItemDim2Code
),
Avail AS
(
SELECT
I.ItemCode,
Renk = LTRIM(RTRIM(I.ColorCode)),
RenkAciklama = LTRIM(RTRIM(C.ColorDescription)),
Renk2 = LTRIM(RTRIM(I.ItemDim2Code)),
Beden = LTRIM(RTRIM(I.ItemDim1Code)),
Kullanilabilir = (I.InventoryQty1 - I.PickingQty1 - I.ReserveQty1 - I.DispOrderQty1)
FROM INV I
LEFT JOIN cdColorDesc C WITH (NOLOCK)
ON C.ColorCode = I.ColorCode
AND C.LangCode = 'TR'
WHERE (I.InventoryQty1 - I.PickingQty1 - I.ReserveQty1 - I.DispOrderQty1) > 0
)
SELECT 'kategori' AS FieldName, X.FieldValue
FROM (
SELECT DISTINCT FieldValue = AB.Kategori
FROM #AttrBase AB
WHERE AB.Kategori <> ''
) X
UNION ALL
SELECT 'urun_ana_grubu', X.FieldValue
FROM (
SELECT DISTINCT FieldValue = AB.UrunAnaGrubu
FROM #AttrBase AB
WHERE AB.UrunAnaGrubu <> ''
) X
UNION ALL
SELECT 'urun_alt_grubu', X.FieldValue
FROM (
SELECT DISTINCT FieldValue = AB.UrunAltGrubu
FROM #AttrBase AB
WHERE AB.UrunAltGrubu <> ''
AND (@Kategori IS NULL OR AB.Kategori = @Kategori)
AND (@UrunAnaGrubu IS NULL OR AB.UrunAnaGrubu = @UrunAnaGrubu)
) X
UNION ALL
SELECT 'renk', X.FieldValue
FROM (
SELECT DISTINCT FieldValue = CASE WHEN A.RenkAciklama <> '' THEN A.RenkAciklama ELSE A.Renk END
FROM Avail A
INNER JOIN #AttrBase AB ON AB.ProductCode = A.ItemCode
WHERE (CASE WHEN A.RenkAciklama <> '' THEN A.RenkAciklama ELSE A.Renk END) <> ''
AND (@Kategori IS NULL OR AB.Kategori = @Kategori)
AND (@UrunAnaGrubu IS NULL OR AB.UrunAnaGrubu = @UrunAnaGrubu)
AND (@UrunAltGrubuList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(AB.UrunAltGrubu,'') + NCHAR(31), NCHAR(31) + @UrunAltGrubuList + NCHAR(31)) > 0)
AND (@UrunIcerigiList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(AB.UrunIcerigi,'') + NCHAR(31), NCHAR(31) + @UrunIcerigiList + NCHAR(31)) > 0)
AND (@FitList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(AB.Fit,'') + NCHAR(31), NCHAR(31) + @FitList + NCHAR(31)) > 0)
AND (@DropList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(AB.DropVal,'') + NCHAR(31), NCHAR(31) + @DropList + NCHAR(31)) > 0)
AND (@Renk2List IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(A.Renk2,'') + NCHAR(31), NCHAR(31) + @Renk2List + NCHAR(31)) > 0)
AND (@BedenList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(A.Beden,'') + NCHAR(31), NCHAR(31) + @BedenList + NCHAR(31)) > 0)
) X
UNION ALL
SELECT 'renk2', X.FieldValue
FROM (
SELECT DISTINCT FieldValue = A.Renk2
FROM Avail A
INNER JOIN #AttrBase AB ON AB.ProductCode = A.ItemCode
WHERE A.Renk2 <> ''
AND (@Kategori IS NULL OR AB.Kategori = @Kategori)
AND (@UrunAnaGrubu IS NULL OR AB.UrunAnaGrubu = @UrunAnaGrubu)
AND (@UrunAltGrubuList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(AB.UrunAltGrubu,'') + NCHAR(31), NCHAR(31) + @UrunAltGrubuList + NCHAR(31)) > 0)
AND (@UrunIcerigiList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(AB.UrunIcerigi,'') + NCHAR(31), NCHAR(31) + @UrunIcerigiList + NCHAR(31)) > 0)
AND (@FitList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(AB.Fit,'') + NCHAR(31), NCHAR(31) + @FitList + NCHAR(31)) > 0)
AND (@DropList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(AB.DropVal,'') + NCHAR(31), NCHAR(31) + @DropList + NCHAR(31)) > 0)
AND (@RenkList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL((CASE WHEN A.RenkAciklama <> '' THEN A.RenkAciklama ELSE A.Renk END),'') + NCHAR(31), NCHAR(31) + @RenkList + NCHAR(31)) > 0)
AND (@BedenList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(A.Beden,'') + NCHAR(31), NCHAR(31) + @BedenList + NCHAR(31)) > 0)
) X
UNION ALL
SELECT 'urun_icerigi', X.FieldValue
FROM (
SELECT DISTINCT FieldValue = AB.UrunIcerigi
FROM #AttrBase AB
WHERE AB.UrunIcerigi <> ''
AND (@Kategori IS NULL OR AB.Kategori = @Kategori)
AND (@UrunAnaGrubu IS NULL OR AB.UrunAnaGrubu = @UrunAnaGrubu)
AND (@UrunAltGrubuList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(AB.UrunAltGrubu,'') + NCHAR(31), NCHAR(31) + @UrunAltGrubuList + NCHAR(31)) > 0)
) X
UNION ALL
SELECT 'fit', X.FieldValue
FROM (
SELECT DISTINCT FieldValue = AB.Fit
FROM #AttrBase AB
WHERE AB.Fit <> ''
AND (@Kategori IS NULL OR AB.Kategori = @Kategori)
AND (@UrunAnaGrubu IS NULL OR AB.UrunAnaGrubu = @UrunAnaGrubu)
AND (@UrunAltGrubuList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(AB.UrunAltGrubu,'') + NCHAR(31), NCHAR(31) + @UrunAltGrubuList + NCHAR(31)) > 0)
AND (@UrunIcerigiList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(AB.UrunIcerigi,'') + NCHAR(31), NCHAR(31) + @UrunIcerigiList + NCHAR(31)) > 0)
) X
UNION ALL
SELECT 'drop', X.FieldValue
FROM (
SELECT DISTINCT FieldValue = AB.DropVal
FROM #AttrBase AB
WHERE AB.DropVal <> ''
AND (@Kategori IS NULL OR AB.Kategori = @Kategori)
AND (@UrunAnaGrubu IS NULL OR AB.UrunAnaGrubu = @UrunAnaGrubu)
AND (@UrunAltGrubuList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(AB.UrunAltGrubu,'') + NCHAR(31), NCHAR(31) + @UrunAltGrubuList + NCHAR(31)) > 0)
AND (@UrunIcerigiList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(AB.UrunIcerigi,'') + NCHAR(31), NCHAR(31) + @UrunIcerigiList + NCHAR(31)) > 0)
AND (@FitList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(AB.Fit,'') + NCHAR(31), NCHAR(31) + @FitList + NCHAR(31)) > 0)
) X
UNION ALL
SELECT 'beden', X.FieldValue
FROM (
SELECT DISTINCT FieldValue = A.Beden
FROM Avail A
INNER JOIN #AttrBase AB ON AB.ProductCode = A.ItemCode
WHERE A.Beden <> ''
AND (@Kategori IS NULL OR AB.Kategori = @Kategori)
AND (@UrunAnaGrubu IS NULL OR AB.UrunAnaGrubu = @UrunAnaGrubu)
AND (@UrunAltGrubuList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(AB.UrunAltGrubu,'') + NCHAR(31), NCHAR(31) + @UrunAltGrubuList + NCHAR(31)) > 0)
AND (@UrunIcerigiList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(AB.UrunIcerigi,'') + NCHAR(31), NCHAR(31) + @UrunIcerigiList + NCHAR(31)) > 0)
AND (@FitList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(AB.Fit,'') + NCHAR(31), NCHAR(31) + @FitList + NCHAR(31)) > 0)
AND (@DropList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(AB.DropVal,'') + NCHAR(31), NCHAR(31) + @DropList + NCHAR(31)) > 0)
AND (@RenkList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL((CASE WHEN A.RenkAciklama <> '' THEN A.RenkAciklama ELSE A.Renk END),'') + NCHAR(31), NCHAR(31) + @RenkList + NCHAR(31)) > 0)
AND (@Renk2List IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(A.Renk2,'') + NCHAR(31), NCHAR(31) + @Renk2List + NCHAR(31)) > 0)
) X
OPTION (RECOMPILE);
`
// GetProductStockQueryByAttributes:
// Urun ozelliklerine gore stok detay sorgusu.
const GetProductStockQueryByAttributes = `
DECLARE @Kategori NVARCHAR(100) = NULLIF(LTRIM(RTRIM(@p1)), '');
DECLARE @UrunAnaGrubu NVARCHAR(100) = NULLIF(LTRIM(RTRIM(@p2)), '');
DECLARE @UrunAltGrubuList NVARCHAR(MAX) = NULLIF(LTRIM(RTRIM(@p3)), '');
DECLARE @RenkList NVARCHAR(MAX) = NULLIF(LTRIM(RTRIM(@p4)), '');
DECLARE @Renk2List NVARCHAR(MAX) = NULLIF(LTRIM(RTRIM(@p5)), '');
DECLARE @UrunIcerigiList NVARCHAR(MAX) = NULLIF(LTRIM(RTRIM(@p6)), '');
DECLARE @FitList NVARCHAR(MAX) = NULLIF(LTRIM(RTRIM(@p7)), '');
DECLARE @DropList NVARCHAR(MAX) = NULLIF(LTRIM(RTRIM(@p8)), '');
DECLARE @BedenList NVARCHAR(MAX) = NULLIF(LTRIM(RTRIM(@p9)), '');
CREATE TABLE #AttrFiltered
(
ProductCode NVARCHAR(50) NOT NULL,
ProductDescription NVARCHAR(255) NULL,
ProductAtt01Desc NVARCHAR(255) NULL,
ProductAtt02Desc NVARCHAR(255) NULL,
ProductAtt10Desc NVARCHAR(255) NULL,
ProductAtt11Desc NVARCHAR(255) NULL,
ProductAtt21Desc NVARCHAR(255) NULL,
ProductAtt22Desc NVARCHAR(255) NULL,
ProductAtt23Desc NVARCHAR(255) NULL,
ProductAtt24Desc NVARCHAR(255) NULL,
ProductAtt25Desc NVARCHAR(255) NULL,
ProductAtt26Desc NVARCHAR(255) NULL,
ProductAtt27Desc NVARCHAR(255) NULL,
ProductAtt28Desc NVARCHAR(255) NULL,
ProductAtt29Desc NVARCHAR(255) NULL,
ProductAtt30Desc NVARCHAR(255) NULL,
ProductAtt31Desc NVARCHAR(255) NULL,
ProductAtt32Desc NVARCHAR(255) NULL,
ProductAtt33Desc NVARCHAR(255) NULL,
ProductAtt34Desc NVARCHAR(255) NULL,
ProductAtt35Desc NVARCHAR(255) NULL,
ProductAtt36Desc NVARCHAR(255) NULL,
ProductAtt37Desc NVARCHAR(255) NULL,
ProductAtt38Desc NVARCHAR(255) NULL,
ProductAtt39Desc NVARCHAR(255) NULL,
ProductAtt40Desc NVARCHAR(255) NULL,
ProductAtt41Desc NVARCHAR(255) NULL,
ProductAtt42Desc NVARCHAR(255) NULL,
ProductAtt43Desc NVARCHAR(255) NULL,
ProductAtt44Desc NVARCHAR(255) NULL,
ProductAtt45Desc NVARCHAR(255) NULL,
ProductAtt46Desc NVARCHAR(255) NULL
);
IF OBJECT_ID('dbo.ProductFilterTRCache','U') IS NOT NULL
BEGIN
INSERT INTO #AttrFiltered
(
ProductCode,
ProductDescription,
ProductAtt01Desc,
ProductAtt02Desc,
ProductAtt10Desc,
ProductAtt11Desc,
ProductAtt21Desc,
ProductAtt22Desc,
ProductAtt23Desc,
ProductAtt24Desc,
ProductAtt25Desc,
ProductAtt26Desc,
ProductAtt27Desc,
ProductAtt28Desc,
ProductAtt29Desc,
ProductAtt30Desc,
ProductAtt31Desc,
ProductAtt32Desc,
ProductAtt33Desc,
ProductAtt34Desc,
ProductAtt35Desc,
ProductAtt36Desc,
ProductAtt37Desc,
ProductAtt38Desc,
ProductAtt39Desc,
ProductAtt40Desc,
ProductAtt41Desc,
ProductAtt42Desc,
ProductAtt43Desc,
ProductAtt44Desc,
ProductAtt45Desc,
ProductAtt46Desc
)
SELECT
C.ProductCode,
C.ProductDescription,
C.ProductAtt01Desc,
C.ProductAtt02Desc,
ISNULL(PF.ProductAtt10Desc, '') AS ProductAtt10Desc,
C.ProductAtt11Desc,
ISNULL(PF.ProductAtt21Desc, '') AS ProductAtt21Desc,
ISNULL(PF.ProductAtt22Desc, '') AS ProductAtt22Desc,
ISNULL(PF.ProductAtt23Desc, '') AS ProductAtt23Desc,
ISNULL(PF.ProductAtt24Desc, '') AS ProductAtt24Desc,
ISNULL(PF.ProductAtt25Desc, '') AS ProductAtt25Desc,
ISNULL(PF.ProductAtt26Desc, '') AS ProductAtt26Desc,
ISNULL(PF.ProductAtt27Desc, '') AS ProductAtt27Desc,
ISNULL(PF.ProductAtt28Desc, '') AS ProductAtt28Desc,
ISNULL(PF.ProductAtt29Desc, '') AS ProductAtt29Desc,
ISNULL(PF.ProductAtt30Desc, '') AS ProductAtt30Desc,
ISNULL(PF.ProductAtt31Desc, '') AS ProductAtt31Desc,
ISNULL(PF.ProductAtt32Desc, '') AS ProductAtt32Desc,
ISNULL(PF.ProductAtt33Desc, '') AS ProductAtt33Desc,
ISNULL(PF.ProductAtt34Desc, '') AS ProductAtt34Desc,
ISNULL(PF.ProductAtt35Desc, '') AS ProductAtt35Desc,
ISNULL(PF.ProductAtt36Desc, '') AS ProductAtt36Desc,
ISNULL(PF.ProductAtt37Desc, '') AS ProductAtt37Desc,
C.ProductAtt38Desc,
ISNULL(PF.ProductAtt39Desc, '') AS ProductAtt39Desc,
ISNULL(PF.ProductAtt40Desc, '') AS ProductAtt40Desc,
C.ProductAtt41Desc,
ISNULL(PF.ProductAtt42Desc, '') AS ProductAtt42Desc,
ISNULL(PF.ProductAtt43Desc, '') AS ProductAtt43Desc,
C.ProductAtt44Desc,
ISNULL(PF.ProductAtt45Desc, '') AS ProductAtt45Desc,
ISNULL(PF.ProductAtt46Desc, '') AS ProductAtt46Desc
FROM dbo.ProductFilterTRCache C
LEFT JOIN ProductFilterWithDescription('TR') PF ON PF.ProductCode = C.ProductCode
WHERE LEN(C.ProductCode) = 13
AND (@Kategori IS NULL OR C.ProductAtt44Desc = @Kategori)
AND (@UrunAnaGrubu IS NULL OR C.ProductAtt01Desc = @UrunAnaGrubu)
AND (@UrunAltGrubuList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(C.ProductAtt02Desc,'') + NCHAR(31), NCHAR(31) + @UrunAltGrubuList + NCHAR(31)) > 0)
AND (@UrunIcerigiList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(C.ProductAtt41Desc,'') + NCHAR(31), NCHAR(31) + @UrunIcerigiList + NCHAR(31)) > 0)
AND (@FitList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(C.ProductAtt38Desc,'') + NCHAR(31), NCHAR(31) + @FitList + NCHAR(31)) > 0)
AND (@DropList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(C.ProductAtt11Desc,'') + NCHAR(31), NCHAR(31) + @DropList + NCHAR(31)) > 0);
END
ELSE
BEGIN
INSERT INTO #AttrFiltered
(
ProductCode,
ProductDescription,
ProductAtt01Desc,
ProductAtt02Desc,
ProductAtt10Desc,
ProductAtt11Desc,
ProductAtt21Desc,
ProductAtt22Desc,
ProductAtt23Desc,
ProductAtt24Desc,
ProductAtt25Desc,
ProductAtt26Desc,
ProductAtt27Desc,
ProductAtt28Desc,
ProductAtt29Desc,
ProductAtt30Desc,
ProductAtt31Desc,
ProductAtt32Desc,
ProductAtt33Desc,
ProductAtt34Desc,
ProductAtt35Desc,
ProductAtt36Desc,
ProductAtt37Desc,
ProductAtt38Desc,
ProductAtt39Desc,
ProductAtt40Desc,
ProductAtt41Desc,
ProductAtt42Desc,
ProductAtt43Desc,
ProductAtt44Desc,
ProductAtt45Desc,
ProductAtt46Desc
)
SELECT
ProductCode,
ProductDescription,
ProductAtt01Desc,
ProductAtt02Desc,
ProductAtt10Desc,
ProductAtt11Desc,
ProductAtt21Desc,
ProductAtt22Desc,
ProductAtt23Desc,
ProductAtt24Desc,
ProductAtt25Desc,
ProductAtt26Desc,
ProductAtt27Desc,
ProductAtt28Desc,
ProductAtt29Desc,
ProductAtt30Desc,
ProductAtt31Desc,
ProductAtt32Desc,
ProductAtt33Desc,
ProductAtt34Desc,
ProductAtt35Desc,
ProductAtt36Desc,
ProductAtt37Desc,
ProductAtt38Desc,
ProductAtt39Desc,
ProductAtt40Desc,
ProductAtt41Desc,
ProductAtt42Desc,
ProductAtt43Desc,
ProductAtt44Desc,
ProductAtt45Desc,
ProductAtt46Desc
FROM ProductFilterWithDescription('TR')
WHERE LEN(ProductCode) = 13
AND (@Kategori IS NULL OR ProductAtt44Desc = @Kategori)
AND (@UrunAnaGrubu IS NULL OR ProductAtt01Desc = @UrunAnaGrubu)
AND (@UrunAltGrubuList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(ProductAtt02Desc,'') + NCHAR(31), NCHAR(31) + @UrunAltGrubuList + NCHAR(31)) > 0)
AND (@UrunIcerigiList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(ProductAtt41Desc,'') + NCHAR(31), NCHAR(31) + @UrunIcerigiList + NCHAR(31)) > 0)
AND (@FitList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(ProductAtt38Desc,'') + NCHAR(31), NCHAR(31) + @FitList + NCHAR(31)) > 0)
AND (@DropList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(ProductAtt11Desc,'') + NCHAR(31), NCHAR(31) + @DropList + NCHAR(31)) > 0);
END;
CREATE CLUSTERED INDEX IX_AttrFiltered_ProductCode ON #AttrFiltered(ProductCode);
;WITH
INV AS
(
SELECT
X.CompanyCode,
X.OfficeCode,
X.StoreTypeCode,
X.StoreCode,
X.WarehouseCode,
X.ItemTypeCode,
X.ItemCode,
X.ColorCode,
X.ItemDim1Code,
X.ItemDim2Code,
X.ItemDim3Code,
SUM(X.PickingQty1) AS PickingQty1,
SUM(X.ReserveQty1) AS ReserveQty1,
SUM(X.DispOrderQty1) AS DispOrderQty1,
SUM(X.InventoryQty1) AS InventoryQty1
FROM
(
SELECT
P.CompanyCode, P.OfficeCode, P.StoreTypeCode, P.StoreCode, P.WarehouseCode,
P.ItemTypeCode, P.ItemCode, P.ColorCode, P.ItemDim1Code, P.ItemDim2Code, P.ItemDim3Code,
P.Qty1 AS PickingQty1, 0 AS ReserveQty1, 0 AS DispOrderQty1, 0 AS InventoryQty1
FROM PickingStates P
INNER JOIN #AttrFiltered AF ON AF.ProductCode = P.ItemCode
WHERE P.ItemTypeCode = 1
AND LEN(P.ItemCode) = 13
AND P.WarehouseCode IN
(
'1-0-14','1-0-10','1-0-8','1-2-5','1-2-4','1-0-12','100','1-0-28',
'1-0-24','1-2-6','1-1-14','1-0-2','1-0-52','1-1-2','1-0-21','1-1-3',
'1-0-33','101','1-014','1-0-49','1-0-36'
)
UNION ALL
SELECT
R.CompanyCode, R.OfficeCode, R.StoreTypeCode, R.StoreCode, R.WarehouseCode,
R.ItemTypeCode, R.ItemCode, R.ColorCode, R.ItemDim1Code, R.ItemDim2Code, R.ItemDim3Code,
0, R.Qty1, 0, 0
FROM ReserveStates R
INNER JOIN #AttrFiltered AF ON AF.ProductCode = R.ItemCode
WHERE R.ItemTypeCode = 1
AND LEN(R.ItemCode) = 13
AND R.WarehouseCode IN
(
'1-0-14','1-0-10','1-0-8','1-2-5','1-2-4','1-0-12','100','1-0-28',
'1-0-24','1-2-6','1-1-14','1-0-2','1-0-52','1-1-2','1-0-21','1-1-3',
'1-0-33','101','1-014','1-0-49','1-0-36'
)
UNION ALL
SELECT
D.CompanyCode, D.OfficeCode, D.StoreTypeCode, D.StoreCode, D.WarehouseCode,
D.ItemTypeCode, D.ItemCode, D.ColorCode, D.ItemDim1Code, D.ItemDim2Code, D.ItemDim3Code,
0, 0, D.Qty1, 0
FROM DispOrderStates D
INNER JOIN #AttrFiltered AF ON AF.ProductCode = D.ItemCode
WHERE D.ItemTypeCode = 1
AND LEN(D.ItemCode) = 13
AND D.WarehouseCode IN
(
'1-0-14','1-0-10','1-0-8','1-2-5','1-2-4','1-0-12','100','1-0-28',
'1-0-24','1-2-6','1-1-14','1-0-2','1-0-52','1-1-2','1-0-21','1-1-3',
'1-0-33','101','1-014','1-0-49','1-0-36'
)
UNION ALL
SELECT
T.CompanyCode, T.OfficeCode, T.StoreTypeCode, T.StoreCode, T.WarehouseCode,
T.ItemTypeCode, T.ItemCode, T.ColorCode, T.ItemDim1Code, T.ItemDim2Code, T.ItemDim3Code,
0, 0, 0, SUM(T.In_Qty1 - T.Out_Qty1)
FROM trStock T WITH (NOLOCK)
INNER JOIN #AttrFiltered AF ON AF.ProductCode = T.ItemCode
WHERE T.ItemTypeCode = 1
AND LEN(T.ItemCode) = 13
AND T.WarehouseCode IN
(
'1-0-14','1-0-10','1-0-8','1-2-5','1-2-4','1-0-12','100','1-0-28',
'1-0-24','1-2-6','1-1-14','1-0-2','1-0-52','1-1-2','1-0-21','1-1-3',
'1-0-33','101','1-014','1-0-49','1-0-36'
)
GROUP BY
T.CompanyCode, T.OfficeCode, T.StoreTypeCode, T.StoreCode, T.WarehouseCode,
T.ItemTypeCode, T.ItemCode, T.ColorCode, T.ItemDim1Code, T.ItemDim2Code, T.ItemDim3Code
) X
GROUP BY
X.CompanyCode, X.OfficeCode, X.StoreTypeCode, X.StoreCode, X.WarehouseCode,
X.ItemTypeCode, X.ItemCode, X.ColorCode, X.ItemDim1Code, X.ItemDim2Code, X.ItemDim3Code
),
Avail AS
(
SELECT
I.CompanyCode,
I.OfficeCode,
I.StoreTypeCode,
I.StoreCode,
I.WarehouseCode,
I.ItemTypeCode,
I.ItemCode,
I.ColorCode,
I.ItemDim1Code,
I.ItemDim2Code,
I.ItemDim3Code,
Kullanilabilir = ROUND(I.InventoryQty1 - I.PickingQty1 - I.ReserveQty1 - I.DispOrderQty1, U.RoundDigit),
RenkAciklama = LTRIM(RTRIM(C.ColorDescription))
FROM INV I
JOIN cdItem CI WITH (NOLOCK)
ON CI.ItemTypeCode = I.ItemTypeCode
AND CI.ItemCode = I.ItemCode
LEFT JOIN cdUnitOfMeasure U WITH (NOLOCK)
ON U.UnitOfMeasureCode = CI.UnitOfMeasureCode1
LEFT JOIN cdColorDesc C WITH (NOLOCK)
ON C.ColorCode = I.ColorCode
AND C.LangCode = 'TR'
WHERE I.ItemTypeCode = 1
AND LEN(I.ItemCode) = 13
AND (I.InventoryQty1 - I.PickingQty1 - I.ReserveQty1 - I.DispOrderQty1) > 0
AND CI.IsBlocked = 0
AND I.WarehouseCode IN
(
'1-0-14','1-0-10','1-0-8','1-2-5','1-2-4','1-0-12','100','1-0-28',
'1-0-24','1-2-6','1-1-14','1-0-2','1-0-52','1-1-2','1-0-21','1-1-3',
'1-0-33','101','1-014','1-0-49','1-0-36'
)
),
Grouped AS
(
SELECT
A.ItemCode,
A.ColorCode,
A.ItemDim2Code
FROM Avail A
INNER JOIN #AttrFiltered AF ON AF.ProductCode = A.ItemCode
WHERE (@RenkList IS NULL OR CHARINDEX(NCHAR(31) + ISNULL((CASE WHEN A.RenkAciklama <> '' THEN A.RenkAciklama ELSE A.ColorCode END),'') + NCHAR(31), NCHAR(31) + @RenkList + NCHAR(31)) > 0)
AND (@Renk2List IS NULL OR CHARINDEX(NCHAR(31) + ISNULL(A.ItemDim2Code,'') + NCHAR(31), NCHAR(31) + @Renk2List + NCHAR(31)) > 0)
AND (
@BedenList IS NULL
OR EXISTS (
SELECT 1
FROM Avail AB
WHERE AB.ItemCode = A.ItemCode
AND AB.ColorCode = A.ColorCode
AND ISNULL(AB.ItemDim2Code, '') = ISNULL(A.ItemDim2Code, '')
AND CHARINDEX(NCHAR(31) + ISNULL(AB.ItemDim1Code,'') + NCHAR(31), NCHAR(31) + @BedenList + NCHAR(31)) > 0
)
)
GROUP BY A.ItemCode, A.ColorCode, A.ItemDim2Code
)
SELECT
A.WarehouseCode AS Depo_Kodu,
W.WarehouseDescription AS Depo_Adi,
IT.ItemTypeDescription AS InventoryType,
A.ItemCode AS Urun_Kodu,
AF.ProductDescription AS Madde_Aciklamasi,
A.ColorCode AS Renk_Kodu,
A.RenkAciklama AS Renk_Aciklamasi,
A.ItemDim1Code AS Beden,
A.ItemDim3Code AS Renk2,
A.ItemDim2Code AS Yaka,
A.Kullanilabilir AS Kullanilabilir_Envanter,
AF.ProductAtt01Desc AS URUN_ANA_GRUBU,
AF.ProductAtt02Desc AS URUN_ALT_GRUBU,
AF.ProductAtt10Desc AS MARKA,
AF.ProductAtt11Desc AS DR,
AF.ProductAtt21Desc AS KALIP,
AF.ProductAtt22Desc AS IKINCI_PARCA_KALIP,
AF.ProductAtt23Desc AS PACA_GENISLIGI,
AF.ProductAtt24Desc AS UCUNCU_PARCA_KALIP,
AF.ProductAtt25Desc AS UCUNCU_PARCA_MODEL,
AF.ProductAtt26Desc AS BIRINCI_PARCA_KUMAS,
AF.ProductAtt27Desc AS IKINCI_PARCA_KUMAS,
AF.ProductAtt28Desc AS UCUNCU_PARCA_KUMAS,
AF.ProductAtt29Desc AS BIRINCI_PARCA_KARISIM,
AF.ProductAtt30Desc AS IKINCI_PARCA_KARISIM,
AF.ProductAtt31Desc AS UCUNCU_PARCA_KARISIM,
AF.ProductAtt32Desc AS YAKA_TIPI,
AF.ProductAtt33Desc AS DUGME,
AF.ProductAtt34Desc AS YIRTMAC,
AF.ProductAtt35Desc AS SEZON_YILI,
AF.ProductAtt36Desc AS MEVSIM,
AF.ProductAtt37Desc AS TABAN,
AF.ProductAtt38Desc AS BIRINCI_PARCA_FIT,
AF.ProductAtt39Desc AS IKINCI_PARCA_FIT,
AF.ProductAtt40Desc AS BOS2,
AF.ProductAtt41Desc AS URUN_ICERIGI,
AF.ProductAtt42Desc AS SERI_FASON,
AF.ProductAtt43Desc AS STOK_GIRIS_YONTEMI,
AF.ProductAtt44Desc AS YETISKIN_GARSON,
AF.ProductAtt45Desc AS ASKILI_YAN,
AF.ProductAtt46Desc AS BOS3,
P.Price AS Fiyat
FROM Avail A
INNER JOIN Grouped G
ON G.ItemCode = A.ItemCode
AND G.ColorCode = A.ColorCode
AND ISNULL(G.ItemDim2Code, '') = ISNULL(A.ItemDim2Code, '')
INNER JOIN #AttrFiltered AF
ON AF.ProductCode = A.ItemCode
LEFT JOIN cdWarehouseDesc W WITH (NOLOCK)
ON W.WarehouseCode = A.WarehouseCode
AND W.LangCode = 'TR'
LEFT JOIN bsItemTypeDesc IT WITH (NOLOCK)
ON IT.ItemTypeCode = A.ItemTypeCode
AND IT.LangCode = 'TR'
OUTER APPLY (
SELECT TOP 1 Price
FROM prItemBasePrice PB WITH (NOLOCK)
WHERE PB.ItemTypeCode = 1
AND PB.ItemCode = A.ItemCode
AND LEN(PB.ItemCode) = 13
ORDER BY PB.PriceDate DESC
) P
OPTION (RECOMPILE);
`

View File

@@ -0,0 +1,267 @@
package queries
import (
"bssapp-backend/db"
"bssapp-backend/models"
"context"
"database/sql"
"fmt"
"math"
"strconv"
"strings"
"time"
)
func GetStatementAging(params models.StatementAgingParams) ([]map[string]interface{}, error) {
accountCode := normalizeMasterAccountCode(params.AccountCode)
if strings.TrimSpace(params.EndDate) == "" {
return nil, fmt.Errorf("enddate is required")
}
useType2, useType3 := resolveUseTypes(params.Parislemler)
rateMap, err := loadNearestTryRates(context.Background())
if err != nil {
return nil, err
}
usdTry := rateMap["USD"]
if usdTry <= 0 {
usdTry = 1
}
rows, err := db.MssqlDB.Query(`
EXEC dbo.SP_FIFO_MATCH_FINAL
@Cari8 = @Cari8,
@SonTarih = @SonTarih,
@UseType2 = @UseType2,
@UseType3 = @UseType3;
`,
sql.Named("Cari8", accountCode),
sql.Named("SonTarih", params.EndDate),
sql.Named("UseType2", useType2),
sql.Named("UseType3", useType3),
)
if err != nil {
return nil, fmt.Errorf("SP_FIFO_MATCH_FINAL query error: %w", err)
}
defer rows.Close()
columns, err := rows.Columns()
if err != nil {
return nil, fmt.Errorf("columns read error: %w", err)
}
result := make([]map[string]interface{}, 0, 2048)
cari8Set := make(map[string]struct{})
for rows.Next() {
values := make([]interface{}, len(columns))
scanArgs := make([]interface{}, len(columns))
for i := range values {
scanArgs[i] = &values[i]
}
if err := rows.Scan(scanArgs...); err != nil {
return nil, fmt.Errorf("row scan error: %w", err)
}
row := make(map[string]interface{}, len(columns))
for i, col := range columns {
switch v := values[i].(type) {
case nil:
row[col] = nil
case []byte:
row[col] = string(v)
case time.Time:
row[col] = v.Format("2006-01-02")
default:
row[col] = v
}
}
cari8 := strings.TrimSpace(asString(row["Cari8"]))
if cari8 != "" {
cari8Set[cari8] = struct{}{}
}
result = append(result, row)
}
if err := rows.Err(); err != nil {
return nil, fmt.Errorf("rows error: %w", err)
}
cariDetailMap, err := loadAgingMasterCariDetailMap(context.Background(), cari8Set)
if err != nil {
return nil, err
}
for i := range result {
row := result[i]
cari8 := strings.TrimSpace(asString(row["Cari8"]))
curr := strings.ToUpper(strings.TrimSpace(asString(row["DocCurrencyCode"])))
if curr == "" {
curr = "TRY"
}
tutar := asFloat64(row["EslesenTutar"])
usdTutar := toUSD(tutar, curr, usdTry, rateMap)
currTry := rateMap[curr]
usdToCurr := 0.0
if currTry > 0 && usdTry > 0 {
usdToCurr = usdTry / currTry
}
row["CariDetay"] = cariDetailMap[cari8]
row["UsdTutar"] = round2(usdTutar)
row["CurrencyTryRate"] = round6(currTry)
row["UsdTryRate"] = round6(usdTry)
row["CurrencyUsdRate"] = round6(usdToCurr)
}
return result, nil
}
func resolveUseTypes(parislemler []string) (int, int) {
if len(parislemler) == 0 {
return 1, 0
}
useType2 := 0
useType3 := 0
for _, v := range parislemler {
switch strings.TrimSpace(v) {
case "2":
useType2 = 1
case "3":
useType3 = 1
}
}
if useType2 == 0 && useType3 == 0 {
return 1, 0
}
return useType2, useType3
}
func loadAgingMasterCariDetailMap(ctx context.Context, cari8Set map[string]struct{}) (map[string]string, error) {
if len(cari8Set) == 0 {
return map[string]string{}, nil
}
query := fmt.Sprintf(`
WITH BaseCari AS (
SELECT
CurrAccCode,
CurrAccTypeCode,
MasterCari = LEFT(CurrAccCode, 8),
rn = ROW_NUMBER() OVER (
PARTITION BY LEFT(CurrAccCode, 8)
ORDER BY CurrAccCode
)
FROM cdCurrAcc WITH (NOLOCK)
WHERE CurrAccTypeCode IN (1,3)
AND LEFT(CurrAccCode, 8) IN (%s)
)
SELECT
b.MasterCari,
CariDetay = ISNULL(d.CurrAccDescription, '')
FROM BaseCari b
LEFT JOIN cdCurrAccDesc d WITH (NOLOCK)
ON d.CurrAccTypeCode = b.CurrAccTypeCode
AND d.CurrAccCode = b.CurrAccCode
AND d.LangCode = 'TR'
WHERE b.rn = 1;
`, quotedInList(cari8Set))
rows, err := db.MssqlDB.QueryContext(ctx, query)
if err != nil {
return nil, fmt.Errorf("aging cari detail query error: %w", err)
}
defer rows.Close()
out := make(map[string]string, len(cari8Set))
for rows.Next() {
var cari8 string
var detail sql.NullString
if err := rows.Scan(&cari8, &detail); err != nil {
return nil, err
}
out[strings.TrimSpace(cari8)] = strings.TrimSpace(detail.String)
}
if err := rows.Err(); err != nil {
return nil, err
}
return out, nil
}
func asString(v interface{}) string {
switch x := v.(type) {
case nil:
return ""
case string:
return x
case []byte:
return string(x)
default:
return fmt.Sprint(x)
}
}
func asFloat64(v interface{}) float64 {
switch x := v.(type) {
case nil:
return 0
case float64:
return x
case float32:
return float64(x)
case int64:
return float64(x)
case int32:
return float64(x)
case int:
return float64(x)
case string:
return parseNumberString(x)
case []byte:
return parseNumberString(string(x))
default:
return parseNumberString(fmt.Sprint(x))
}
}
func parseNumberString(s string) float64 {
s = strings.TrimSpace(s)
if s == "" {
return 0
}
hasComma := strings.Contains(s, ",")
hasDot := strings.Contains(s, ".")
if hasComma && hasDot {
if strings.LastIndex(s, ",") > strings.LastIndex(s, ".") {
s = strings.ReplaceAll(s, ".", "")
s = strings.Replace(s, ",", ".", 1)
} else {
s = strings.ReplaceAll(s, ",", "")
}
} else if hasComma {
s = strings.ReplaceAll(s, ".", "")
s = strings.Replace(s, ",", ".", 1)
}
n, err := strconv.ParseFloat(s, 64)
if err != nil {
return 0
}
return n
}
func round2(v float64) float64 {
return math.Round(v*100) / 100
}
func round6(v float64) float64 {
return math.Round(v*1_000_000) / 1_000_000
}

View File

@@ -0,0 +1,272 @@
package queries
import (
"bssapp-backend/models"
"context"
"database/sql"
"fmt"
"log"
"math"
"sort"
"strconv"
"strings"
"time"
"bssapp-backend/db"
)
func GetStatementAgingBalanceList(ctx context.Context, params models.CustomerBalanceListParams) ([]models.CustomerBalanceListRow, error) {
selectedDate := strings.TrimSpace(params.SelectedDate)
if selectedDate == "" {
selectedDate = time.Now().Format("2006-01-02")
}
lines, err := loadAgingBalanceLines(ctx, strings.TrimSpace(params.CariSearch))
if err != nil {
return nil, err
}
metaMap, err := loadCariMetaMap(ctx, lines)
if err != nil {
log.Printf("statement_aging_balance: cari meta query failed, fallback without meta: %v", err)
metaMap = map[string]cariMeta{}
}
masterMetaMap, err := loadMasterCariMetaMap(ctx, lines)
if err != nil {
log.Printf("statement_aging_balance: master cari meta query failed, fallback without master meta: %v", err)
masterMetaMap = map[string]masterCariMeta{}
}
companyMap, err := loadCompanyMap(ctx)
if err != nil {
return nil, err
}
glMap, err := loadGLAccountMap(ctx, lines)
if err != nil {
return nil, err
}
rateMap, err := loadNearestTryRates(ctx)
if err != nil {
return nil, err
}
usdTry := rateMap["USD"]
if usdTry <= 0 {
usdTry = 1
}
filters := buildFilters(params)
agg := make(map[string]*models.CustomerBalanceListRow, len(lines))
weightMap := make(map[string]float64, len(lines))
vadeSumMap := make(map[string]float64, len(lines))
vadeBelgeSumMap := make(map[string]float64, len(lines))
for _, ln := range lines {
cari := strings.TrimSpace(ln.CariKodu)
if cari == "" {
continue
}
curr := strings.ToUpper(strings.TrimSpace(ln.CariDoviz))
if curr == "" {
curr = "TRY"
}
meta := metaMap[metaKey(ln.CurrAccTypeCode, cari)]
meta.MuhasebeKodu = glMap[glKey(ln.CurrAccTypeCode, cari, ln.SirketKodu)]
meta.SirketDetay = companyMap[ln.SirketKodu]
master := deriveMasterCari(cari)
mm := masterMetaMap[master]
if strings.TrimSpace(mm.Kanal1) != "" {
meta.Kanal1 = mm.Kanal1
}
if strings.TrimSpace(mm.Piyasa) != "" {
meta.Piyasa = mm.Piyasa
}
if strings.TrimSpace(mm.Temsilci) != "" {
meta.Temsilci = mm.Temsilci
}
if strings.TrimSpace(mm.Ulke) != "" {
meta.Ulke = mm.Ulke
}
if strings.TrimSpace(mm.Il) != "" {
meta.Il = mm.Il
}
if strings.TrimSpace(mm.Ilce) != "" {
meta.Ilce = mm.Ilce
}
if strings.TrimSpace(mm.RiskDurumu) != "" {
meta.RiskDurumu = mm.RiskDurumu
}
if !filters.matchLine(ln.PislemTipi, meta) {
continue
}
key := strconv.Itoa(ln.CurrAccTypeCode) + "|" + cari + "|" + curr + "|" + strconv.Itoa(ln.SirketKodu)
row, ok := agg[key]
if !ok {
row = &models.CustomerBalanceListRow{
CariIlkGrup: meta.Kanal1,
Piyasa: meta.Piyasa,
Temsilci: meta.Temsilci,
Sirket: strconv.Itoa(ln.SirketKodu),
AnaCariKodu: master,
AnaCariAdi: firstNonEmpty(mm.CariDetay, meta.CariDetay),
CariKodu: cari,
CariDetay: meta.CariDetay,
CariTip: meta.CariTip,
Kanal1: meta.Kanal1,
Ozellik03: meta.RiskDurumu,
Ozellik05: meta.Ulke,
Ozellik06: meta.Il,
Ozellik07: meta.Ilce,
Il: meta.Il,
Ilce: meta.Ilce,
MuhasebeKodu: meta.MuhasebeKodu,
TC: meta.TC,
RiskDurumu: meta.RiskDurumu,
SirketDetay: meta.SirketDetay,
CariDoviz: curr,
}
agg[key] = row
}
usd := toUSD(ln.Bakiye, curr, usdTry, rateMap)
tl := toTRY(ln.Bakiye, curr, rateMap)
add12, add13 := resolveBalanceBuckets(ln)
if add12 {
row.Bakiye12 += ln.Bakiye
row.TLBakiye12 += tl
row.USDBakiye12 += usd
}
if add13 {
row.Bakiye13 += ln.Bakiye
row.TLBakiye13 += tl
row.USDBakiye13 += usd
}
w := math.Abs(ln.Bakiye)
if w > 0 {
weightMap[key] += w
vadeSumMap[key] += (ln.VadeGun * w)
vadeBelgeSumMap[key] += (ln.VadeBelgeGun * w)
}
}
out := make([]models.CustomerBalanceListRow, 0, len(agg))
for k, v := range agg {
base := weightMap[k]
if base > 0 {
v.VadeGun = vadeSumMap[k] / base
v.VadeBelgeGun = vadeBelgeSumMap[k] / base
}
out = append(out, *v)
}
sort.Slice(out, func(i, j int) bool {
if out[i].AnaCariKodu == out[j].AnaCariKodu {
if out[i].CariKodu == out[j].CariKodu {
return out[i].CariDoviz < out[j].CariDoviz
}
return out[i].CariKodu < out[j].CariKodu
}
return out[i].AnaCariKodu < out[j].AnaCariKodu
})
_ = selectedDate
return out, nil
}
func loadAgingBalanceLines(ctx context.Context, cariSearch string) ([]mkCariBakiyeLine, error) {
piyasaScope, err := buildPiyasaExistsForCariCode(ctx, "LTRIM(RTRIM(CariKodu))")
if err != nil {
return nil, err
}
queryTemplate := `
SELECT
CurrAccTypeCode,
CariKodu = LTRIM(RTRIM(CariKodu)),
CariDoviz = LTRIM(RTRIM(CariDoviz)),
SirketKodu,
PislemTipi,
%s
YerelBakiye = CAST(0 AS DECIMAL(18,2)),
Bakiye,
Vade_Gun,
Vade_BelgeTarihi_Gun
FROM dbo.CARI_BAKIYE_GUN_CACHE
WHERE (@CariSearch = '' OR LTRIM(RTRIM(CariKodu)) LIKE '%%' + @CariSearch + '%%')
AND %s
ORDER BY CariKodu, CariDoviz, PislemTipi
`
selectParasalCandidates := make([]string, 0, 7)
if expr := strings.TrimSpace(resolveParasalIslemSelectExpr(ctx, "SELECT * FROM dbo.CARI_BAKIYE_GUN_CACHE")); expr != "" {
selectParasalCandidates = append(selectParasalCandidates, expr)
}
selectParasalCandidates = append(selectParasalCandidates,
"CAST(ATAtt01 AS varchar(16)) AS ParasalIslemTipi,",
"CAST(ParasalIslemTipi AS varchar(16)) AS ParasalIslemTipi,",
"CAST(ParislemTipi AS varchar(16)) AS ParasalIslemTipi,",
"CAST(ParIslemTipi AS varchar(16)) AS ParasalIslemTipi,",
"CAST('' AS varchar(16)) AS ParasalIslemTipi,",
)
var rows *sql.Rows
for i, sel := range selectParasalCandidates {
query := fmt.Sprintf(queryTemplate, sel, piyasaScope)
rows, err = db.MssqlDB.QueryContext(ctx, query, sql.Named("CariSearch", strings.TrimSpace(cariSearch)))
if err == nil {
break
}
if i < len(selectParasalCandidates)-1 && isInvalidColumnError(err) {
continue
}
return nil, fmt.Errorf("CARI_BAKIYE_GUN_CACHE query error: %w", err)
}
defer rows.Close()
out := make([]mkCariBakiyeLine, 0, 4096)
for rows.Next() {
var r mkCariBakiyeLine
if err := rows.Scan(
&r.CurrAccTypeCode,
&r.CariKodu,
&r.CariDoviz,
&r.SirketKodu,
&r.PislemTipi,
&r.ParasalIslemTipi,
&r.YerelBakiye,
&r.Bakiye,
&r.VadeGun,
&r.VadeBelgeGun,
); err != nil {
return nil, err
}
out = append(out, r)
}
if err := rows.Err(); err != nil {
return nil, err
}
return out, nil
}
func toTRY(amount float64, currency string, rateMap map[string]float64) float64 {
switch currency {
case "TRY":
return amount
case "":
return amount
default:
currTry := rateMap[currency]
if currTry <= 0 {
return 0
}
return amount * currTry
}
}

View File

@@ -0,0 +1,20 @@
package queries
import (
"bssapp-backend/db"
"context"
"fmt"
)
// RebuildStatementAgingCache runs only step 2 + step 3.
func RebuildStatementAgingCache(ctx context.Context) error {
if _, err := db.MssqlDB.ExecContext(ctx, `EXEC dbo.SP_BUILD_CARI_VADE_GUN_STAGING;`); err != nil {
return fmt.Errorf("SP_BUILD_CARI_VADE_GUN_STAGING error: %w", err)
}
if _, err := db.MssqlDB.ExecContext(ctx, `EXEC dbo.SP_BUILD_CARI_BAKIYE_CACHE;`); err != nil {
return fmt.Errorf("SP_BUILD_CARI_BAKIYE_CACHE error: %w", err)
}
return nil
}

View File

@@ -2,22 +2,20 @@ package queries
import (
"bssapp-backend/db"
"bssapp-backend/internal/i18n"
"bssapp-backend/models"
"context"
"database/sql"
"fmt"
"strings"
)
// Ana tabloyu getiren fonksiyon (Vue header tablosu için)
func GetStatements(params models.StatementParams) ([]models.StatementHeader, error) {
func GetStatements(ctx context.Context, params models.StatementParams) ([]models.StatementHeader, error) {
// AccountCode normalize: "ZLA0127" → "ZLA 0127"
if len(params.AccountCode) == 7 && strings.ContainsAny(params.AccountCode, "0123456789") {
params.AccountCode = params.AccountCode[:3] + " " + params.AccountCode[3:]
}
if strings.TrimSpace(params.LangCode) == "" {
params.LangCode = "TR"
}
params.AccountCode = normalizeMasterAccountCode(params.AccountCode)
params.LangCode = i18n.NormalizeLangCode(params.LangCode)
// Parislemler []string → '1','2','3'
parislemFilter := "''"
@@ -35,6 +33,39 @@ func GetStatements(params models.StatementParams) ([]models.StatementHeader, err
}
}
customerPiyasaInClause, err := resolvePiyasaScopeInClause(ctx, "PF.CustomerAtt01")
if err != nil {
return nil, err
}
vendorPiyasaInClause, err := resolvePiyasaScopeInClause(ctx, "VF.VendorAtt01")
if err != nil {
return nil, err
}
piyasaScope := fmt.Sprintf(`
(
(b.CurrAccTypeCode = 3 AND EXISTS (
SELECT 1
FROM CustomerAttributesFilter PF WITH (NOLOCK)
WHERE (PF.CurrAccCode = b.CurrAccCode OR LEFT(PF.CurrAccCode, 8) = LEFT(b.CurrAccCode, 8))
AND %s
))
OR
(b.CurrAccTypeCode = 1 AND EXISTS (
SELECT 1
FROM (
SELECT
CurrAccCode,
VendorAtt01 = MAX(CASE WHEN AttributeTypeCode = 1 THEN AttributeCode END)
FROM prCurrAccAttribute WITH (NOLOCK)
WHERE CurrAccTypeCode = 1
GROUP BY CurrAccCode
) VF
WHERE (VF.CurrAccCode = b.CurrAccCode OR LEFT(VF.CurrAccCode, 8) = LEFT(b.CurrAccCode, 8))
AND %s
))
)`, customerPiyasaInClause, vendorPiyasaInClause)
query := fmt.Sprintf(`
;WITH CurrDesc AS (
SELECT
@@ -58,8 +89,9 @@ HasMovement AS (
INNER JOIN CurrAccBookATAttributesFilter f
ON f.CurrAccBookID = b.CurrAccBookID
AND f.ATAtt01 IN (%s)
WHERE b.CurrAccCode LIKE '%%' + @Carikod + '%%'
WHERE LEFT(REPLACE(b.CurrAccCode, ' ', ''), 7) = LEFT(REPLACE(@Carikod, ' ', ''), 7)
AND b.DocumentDate BETWEEN @startdate AND @enddate
AND %s
) THEN 1 ELSE 0 END AS HasMov
),
@@ -80,7 +112,9 @@ Opening AS (
LEFT JOIN trCurrAccBookCurrency c
ON c.CurrAccBookID = b.CurrAccBookID
AND c.CurrencyCode = b.DocCurrencyCode
WHERE b.CurrAccCode LIKE '%%' + @Carikod + '%%'
WHERE LEFT(REPLACE(b.CurrAccCode, ' ', ''), 7) = LEFT(REPLACE(@Carikod, ' ', ''), 7)
AND @ExcludeOpening = 0
AND %s
AND (
(hm.HasMov = 1 AND b.DocumentDate < @startdate) -- hareket varsa: klasik devir
OR (hm.HasMov = 0 AND b.DocumentDate <= @enddate) -- hareket yoksa: enddate itibariyle bakiye
@@ -95,15 +129,16 @@ Opening AS (
========================================================= */
Movements AS (
SELECT
@Carikod AS Cari_Kod,
@Carikod AS Ana_Cari_Kod,
b.CurrAccCode AS Cari_Kod,
COALESCE(
(SELECT TOP 1 cd.CurrAccDescription
FROM CurrDesc cd
WHERE cd.CurrAccCode = @Carikod),
WHERE REPLACE(cd.CurrAccCode, ' ', '') = REPLACE(b.CurrAccCode, ' ', '')),
(SELECT TOP 1 cd.CurrAccDescription
FROM CurrDesc cd
WHERE cd.CurrAccCode LIKE '%%' + @Carikod + '%%'
WHERE LEFT(REPLACE(cd.CurrAccCode, ' ', ''), 7) = REPLACE(@Carikod, ' ', '')
ORDER BY cd.CurrAccCode)
) AS Cari_Isim,
@@ -135,7 +170,8 @@ Movements AS (
ON c.CurrAccBookID = b.CurrAccBookID
AND c.CurrencyCode = b.DocCurrencyCode
WHERE b.CurrAccCode LIKE '%%' + @Carikod + '%%'
WHERE LEFT(REPLACE(b.CurrAccCode, ' ', ''), 7) = LEFT(REPLACE(@Carikod, ' ', ''), 7)
AND %s
AND b.DocumentDate BETWEEN @startdate AND @enddate
)
@@ -158,7 +194,7 @@ SELECT
FROM Movements m
LEFT JOIN Opening o
ON o.Cari_Kod = m.Cari_Kod
ON o.Cari_Kod = m.Ana_Cari_Kod
AND o.Para_Birimi = m.Para_Birimi
UNION ALL
@@ -173,10 +209,10 @@ SELECT
COALESCE(
(SELECT TOP 1 cd.CurrAccDescription
FROM CurrDesc cd
WHERE cd.CurrAccCode = @Carikod),
WHERE REPLACE(cd.CurrAccCode, ' ', '') = REPLACE(o.Cari_Kod, ' ', '')),
(SELECT TOP 1 cd.CurrAccDescription
FROM CurrDesc cd
WHERE cd.CurrAccCode LIKE '%%' + @Carikod + '%%'
WHERE LEFT(REPLACE(cd.CurrAccCode, ' ', ''), 7) = REPLACE(@Carikod, ' ', '')
ORDER BY cd.CurrAccCode)
) AS Cari_Isim,
@@ -184,8 +220,8 @@ SELECT
CONVERT(varchar(10), @startdate, 23) AS Vade_Tarihi,
'Baslangic_devir' AS Belge_No,
'Devir' AS Islem_Tipi,
'Devir Bakiyesi' AS Aciklama,
CASE WHEN @LangCode = 'EN' THEN 'Opening' ELSE 'Devir' END AS Islem_Tipi,
CASE WHEN @LangCode = 'EN' THEN 'Opening Balance' ELSE 'Devir Bakiyesi' END AS Aciklama,
o.Para_Birimi,
@@ -202,16 +238,20 @@ ORDER BY
Para_Birimi,
Belge_Tarihi;
`,
parislemFilter, // HasMovement
parislemFilter, // Opening
parislemFilter, // Movements
parislemFilter, // HasMovement ATAtt01
piyasaScope, // HasMovement piyasa scope
parislemFilter, // Opening ATAtt01
piyasaScope, // Opening piyasa scope
parislemFilter, // Movements ATAtt01
piyasaScope, // Movements piyasa scope
)
rows, err := db.MssqlDB.Query(query,
rows, err := db.MssqlDB.QueryContext(ctx, query,
sql.Named("startdate", params.StartDate),
sql.Named("enddate", params.EndDate),
sql.Named("Carikod", params.AccountCode),
sql.Named("LangCode", params.LangCode),
sql.Named("ExcludeOpening", params.ExcludeOpening),
)
if err != nil {
return nil, fmt.Errorf("MSSQL query error: %v", err)
@@ -241,3 +281,19 @@ ORDER BY
}
return results, nil
}
func normalizeMasterAccountCode(code string) string {
code = strings.ToUpper(strings.TrimSpace(code))
if code == "" {
return code
}
noSpace := strings.ReplaceAll(code, " ", "")
r := []rune(noSpace)
if len(r) < 7 {
return code
}
main := r[:7]
return string(main[:3]) + " " + string(main[3:])
}

View File

@@ -2,11 +2,12 @@ package queries
import (
"bssapp-backend/models"
"context"
"log"
)
func GetStatementsHPDF(accountCode, startDate, endDate string, parislemler []string) ([]models.StatementHeader, []string, error) {
headers, err := getStatementsForPDF(accountCode, startDate, endDate, parislemler)
func GetStatementsHPDF(ctx context.Context, accountCode, startDate, endDate, langCode string, parislemler []string) ([]models.StatementHeader, []string, error) {
headers, err := getStatementsForPDF(ctx, accountCode, startDate, endDate, langCode, parislemler)
if err != nil {
log.Printf("Header query error: %v", err)
return nil, nil, err

View File

@@ -1,18 +1,23 @@
package queries
import "bssapp-backend/models"
import (
"bssapp-backend/models"
"context"
)
func getStatementsForPDF(
ctx context.Context,
accountCode string,
startDate string,
endDate string,
langCode string,
parislemler []string,
) ([]models.StatementHeader, error) {
return GetStatements(models.StatementParams{
return GetStatements(ctx, models.StatementParams{
AccountCode: accountCode,
StartDate: startDate,
EndDate: endDate,
LangCode: "TR",
LangCode: langCode,
Parislemler: parislemler,
})
}

View File

@@ -3,24 +3,15 @@ package queries
import (
"bssapp-backend/db"
"bssapp-backend/models"
"context"
"database/sql"
"fmt"
"strings"
)
/* ============================ DETAIL (ALT TABLO) ============================ */
func GetStatementDetails(accountCode, startDate, endDate string, parislemler []string) ([]models.StatementDetail, error) {
// Parislemler filtresi hazırlanır (ör: 1,2,3)
inParislem := ""
if len(parislemler) > 0 {
pp := make([]string, len(parislemler))
for i, v := range parislemler {
pp[i] = strings.TrimSpace(v)
}
inParislem = strings.Join(pp, ",")
}
query := fmt.Sprintf(`
// DETAIL (ALT TABLO)
func GetStatementDetails(ctx context.Context, belgeNo string) ([]models.StatementDetail, error) {
query := `
SELECT
CONVERT(varchar(10), a.InvoiceDate, 23) AS Belge_Tarihi,
a.InvoiceNumber AS Belge_Ref_Numarasi,
@@ -32,16 +23,11 @@ SELECT
a.ItemCode AS Urun_Kodu,
a.ColorCode AS Urun_Rengi,
SUM(a.Qty1) AS Toplam_Adet,
CAST(
SUM(a.Qty1 * ABS(a.Doc_Price))
/ NULLIF(SUM(a.Qty1),0)
AS numeric(18,4)) AS Doviz_Fiyat,
CAST(
SUM(a.Qty1 * ABS(a.Doc_Price))
AS numeric(18,2)) AS Toplam_Tutar
SUM(a.Qty1 * ABS(a.Doc_Price)) / NULLIF(SUM(a.Qty1), 0)
AS numeric(18,2)
) AS Toplam_Fiyat,
CAST(SUM(a.Qty1 * ABS(a.Doc_Price)) AS numeric(18,2)) AS Toplam_Tutar
FROM AllInvoicesWithAttributes a
LEFT JOIN prItemAttribute AnaGrup
ON a.ItemCode = AnaGrup.ItemCode AND AnaGrup.AttributeTypeCode = 1
@@ -77,30 +63,15 @@ LEFT JOIN cdItemAttributeDesc KisaKarDesc
ON KisaKar.AttributeTypeCode = KisaKarDesc.AttributeTypeCode
AND KisaKar.AttributeCode = KisaKarDesc.AttributeCode
AND KisaKar.ItemTypeCode = KisaKarDesc.ItemTypeCode
WHERE a.CurrAccCode LIKE @Carikod
AND a.InvoiceDate BETWEEN @StartDate AND @EndDate
%s
WHERE LTRIM(RTRIM(a.InvoiceNumber)) = LTRIM(RTRIM(@BelgeNo))
GROUP BY a.InvoiceDate, a.InvoiceNumber, a.ItemCode, a.ColorCode
ORDER BY Belge_Tarihi, Belge_Ref_Numarasi, Urun_Kodu;`,
func() string {
if inParislem == "" {
return ""
}
return fmt.Sprintf(`AND EXISTS (
SELECT 1
FROM CurrAccBookATAttributesFilter f
WHERE f.CurrAccBookID = a.CurrAccBookID
AND f.ATAtt01 IN (%s)
)`, inParislem)
}(),
)
rows, err := db.MssqlDB.Query(query,
sql.Named("Carikod", "%"+accountCode+"%"),
sql.Named("StartDate", startDate),
sql.Named("EndDate", endDate),
ORDER BY Belge_Tarihi, Belge_Ref_Numarasi, Urun_Kodu;`
rows, err := db.MssqlDB.QueryContext(ctx, query,
sql.Named("BelgeNo", strings.TrimSpace(belgeNo)),
)
if err != nil {
return nil, fmt.Errorf("detay sorgu hatası: %v", err)
return nil, fmt.Errorf("detay sorgu hatasi: %v", err)
}
defer rows.Close()

View File

@@ -4,14 +4,15 @@ package queries
import (
"bssapp-backend/db"
"bssapp-backend/models"
"context"
"database/sql"
"fmt"
"log"
"strings"
)
func GetStatementsPDF(accountCode, startDate, endDate string, parislemler []string) ([]models.StatementHeader, []string, error) {
headers, err := getStatementsForPDF(accountCode, startDate, endDate, parislemler)
func GetStatementsPDF(ctx context.Context, accountCode, startDate, endDate, langCode string, parislemler []string) ([]models.StatementHeader, []string, error) {
headers, err := getStatementsForPDF(ctx, accountCode, startDate, endDate, langCode, parislemler)
if err != nil {
log.Printf("Header query error: %v", err)
return nil, nil, err

View File

@@ -52,12 +52,19 @@ ORDER BY d.code
// 🌍 PIYASALAR
// ======================================================
const GetUserPiyasalar = `
SELECT p.code, p.title
SELECT
COALESCE(p_code.code, p_title.code, up.piyasa_code) AS code,
COALESCE(p_code.title, p_title.title, up.piyasa_code) AS title
FROM dfusr_piyasa up
JOIN mk_sales_piy p ON p.code = up.piyasa_code
LEFT JOIN mk_sales_piy p_code
ON UPPER(translate(TRIM(p_code.code), 'çğıöşüÇĞİÖŞÜ', 'CGIOSUCGIOSU'))
= UPPER(translate(TRIM(up.piyasa_code), 'çğıöşüÇĞİÖŞÜ', 'CGIOSUCGIOSU'))
LEFT JOIN mk_sales_piy p_title
ON UPPER(translate(TRIM(p_title.title), 'çğıöşüÇĞİÖŞÜ', 'CGIOSUCGIOSU'))
= UPPER(translate(TRIM(up.piyasa_code), 'çğıöşüÇĞİÖŞÜ', 'CGIOSUCGIOSU'))
WHERE up.dfusr_id = $1
AND up.is_allowed = true
ORDER BY p.code
ORDER BY 1
`
// ======================================================

View File

@@ -0,0 +1,172 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/models"
"bssapp-backend/queries"
"database/sql"
"fmt"
"net/http"
"strings"
"time"
"github.com/xuri/excelize/v2"
)
func ExportCustomerBalanceExcelHandler(_ *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
selectedDate := strings.TrimSpace(r.URL.Query().Get("selected_date"))
if selectedDate == "" {
selectedDate = time.Now().Format("2006-01-02")
}
params := models.CustomerBalanceListParams{
SelectedDate: selectedDate,
CariSearch: strings.TrimSpace(r.URL.Query().Get("cari_search")),
CariIlkGrup: strings.TrimSpace(r.URL.Query().Get("cari_ilk_grup")),
Piyasa: strings.TrimSpace(r.URL.Query().Get("piyasa")),
Temsilci: strings.TrimSpace(r.URL.Query().Get("temsilci")),
RiskDurumu: strings.TrimSpace(r.URL.Query().Get("risk_durumu")),
IslemTipi: strings.TrimSpace(r.URL.Query().Get("islem_tipi")),
Ulke: strings.TrimSpace(r.URL.Query().Get("ulke")),
Il: strings.TrimSpace(r.URL.Query().Get("il")),
Ilce: strings.TrimSpace(r.URL.Query().Get("ilce")),
}
excludeZero12 := parseBoolQuery(r.URL.Query().Get("exclude_zero_12"))
excludeZero13 := parseBoolQuery(r.URL.Query().Get("exclude_zero_13"))
rows, err := queries.GetCustomerBalanceList(r.Context(), params)
if err != nil {
http.Error(w, "db error: "+err.Error(), http.StatusInternalServerError)
return
}
rows = filterCustomerBalanceRowsForPDF(rows, excludeZero12, excludeZero13)
summaries, _ := buildCustomerBalancePDFData(rows)
f := excelize.NewFile()
sheet := "CariBakiye"
f.SetSheetName("Sheet1", sheet)
headers := []string{
"Ana Cari Kodu",
"Ana Cari Detay",
"Piyasa",
"Temsilci",
"Risk Durumu",
"1_2 Bakiye Pr.Br",
"1_3 Bakiye Pr.Br",
"1_2 USD Bakiye",
"1_2 TRY Bakiye",
"1_3 USD Bakiye",
"1_3 TRY Bakiye",
"Vade Gun",
"Belge Tarihi Gun",
}
for i, h := range headers {
cell, _ := excelize.CoordinatesToCellName(i+1, 1)
f.SetCellValue(sheet, cell, h)
}
var totalUSD12, totalTRY12, totalUSD13, totalTRY13 float64
var totalVadeBase, totalVadeSum, totalVadeBelgeSum float64
totalPrBr12 := map[string]float64{}
totalPrBr13 := map[string]float64{}
for _, s := range summaries {
totalUSD12 += s.USDBakiye12
totalTRY12 += s.TLBakiye12
totalUSD13 += s.USDBakiye13
totalTRY13 += s.TLBakiye13
w := absFloatExcel(s.USDBakiye12) + absFloatExcel(s.TLBakiye12) + absFloatExcel(s.USDBakiye13) + absFloatExcel(s.TLBakiye13)
if w > 0 {
totalVadeBase += w
totalVadeSum += s.VadeGun * w
totalVadeBelgeSum += s.VadeBelge * w
}
for k, v := range s.Bakiye12Map {
totalPrBr12[k] += v
}
for k, v := range s.Bakiye13Map {
totalPrBr13[k] += v
}
}
totalVade := 0.0
totalVadeBelge := 0.0
if totalVadeBase > 0 {
totalVade = totalVadeSum / totalVadeBase
totalVadeBelge = totalVadeBelgeSum / totalVadeBase
}
f.SetSheetRow(sheet, "A2", &[]any{
"TOPLAM",
"",
"",
"",
"",
formatCurrencyMapPDF(totalPrBr12),
formatCurrencyMapPDF(totalPrBr13),
totalUSD12,
totalTRY12,
totalUSD13,
totalTRY13,
totalVade,
totalVadeBelge,
})
rowNo := 3
for _, s := range summaries {
f.SetSheetRow(sheet, fmt.Sprintf("A%d", rowNo), &[]any{
s.AnaCariKodu,
s.AnaCariAdi,
s.Piyasa,
s.Temsilci,
s.RiskDurumu,
formatCurrencyMapPDF(s.Bakiye12Map),
formatCurrencyMapPDF(s.Bakiye13Map),
s.USDBakiye12,
s.TLBakiye12,
s.USDBakiye13,
s.TLBakiye13,
s.VadeGun,
s.VadeBelge,
})
rowNo++
}
_ = f.SetColWidth(sheet, "A", "A", 16)
_ = f.SetColWidth(sheet, "B", "B", 34)
_ = f.SetColWidth(sheet, "C", "E", 18)
_ = f.SetColWidth(sheet, "F", "G", 34)
_ = f.SetColWidth(sheet, "H", "M", 18)
buf, err := f.WriteToBuffer()
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
filename := fmt.Sprintf("cari_bakiye_listesi_%s.xlsx", time.Now().Format("20060102_150405"))
w.Header().Set("Content-Type", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")
w.Header().Set("Content-Disposition", "attachment; filename=\""+filename+"\"")
w.Header().Set("Content-Length", fmt.Sprint(len(buf.Bytes())))
w.WriteHeader(http.StatusOK)
_, _ = w.Write(buf.Bytes())
}
}
func absFloatExcel(v float64) float64 {
if v < 0 {
return -v
}
return v
}

View File

@@ -0,0 +1,51 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/models"
"bssapp-backend/queries"
"encoding/json"
"log"
"net/http"
"strings"
"time"
)
// GET /api/finance/customer-balances
func GetCustomerBalanceListHandler(w http.ResponseWriter, r *http.Request) {
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
selectedDate := strings.TrimSpace(r.URL.Query().Get("selected_date"))
if selectedDate == "" {
selectedDate = time.Now().Format("2006-01-02")
}
params := models.CustomerBalanceListParams{
SelectedDate: selectedDate,
CariSearch: strings.TrimSpace(r.URL.Query().Get("cari_search")),
CariIlkGrup: strings.TrimSpace(r.URL.Query().Get("cari_ilk_grup")),
Piyasa: strings.TrimSpace(r.URL.Query().Get("piyasa")),
Temsilci: strings.TrimSpace(r.URL.Query().Get("temsilci")),
RiskDurumu: strings.TrimSpace(r.URL.Query().Get("risk_durumu")),
IslemTipi: strings.TrimSpace(r.URL.Query().Get("islem_tipi")),
Ulke: strings.TrimSpace(r.URL.Query().Get("ulke")),
Il: strings.TrimSpace(r.URL.Query().Get("il")),
Ilce: strings.TrimSpace(r.URL.Query().Get("ilce")),
}
rows, err := queries.GetCustomerBalanceList(r.Context(), params)
if err != nil {
log.Println("GetCustomerBalanceList error:", err)
http.Error(w, "db error: "+err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
if err := json.NewEncoder(w).Encode(rows); err != nil {
log.Println("GetCustomerBalanceList json encode error:", err)
}
}

View File

@@ -0,0 +1,983 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/models"
"bssapp-backend/queries"
"bytes"
"database/sql"
"fmt"
"math"
"net/http"
"sort"
"strconv"
"strings"
"time"
"github.com/jung-kurt/gofpdf"
)
type balanceSummaryPDF struct {
AnaCariKodu string
AnaCariAdi string
Piyasa string
Temsilci string
RiskDurumu string
Bakiye12Map map[string]float64
Bakiye13Map map[string]float64
USDBakiye12 float64
TLBakiye12 float64
USDBakiye13 float64
TLBakiye13 float64
VadeGun float64
VadeBelge float64
}
func ExportCustomerBalancePDFHandler(_ *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
selectedDate := strings.TrimSpace(r.URL.Query().Get("selected_date"))
if selectedDate == "" {
selectedDate = time.Now().Format("2006-01-02")
}
params := models.CustomerBalanceListParams{
SelectedDate: selectedDate,
CariSearch: strings.TrimSpace(r.URL.Query().Get("cari_search")),
CariIlkGrup: strings.TrimSpace(r.URL.Query().Get("cari_ilk_grup")),
Piyasa: strings.TrimSpace(r.URL.Query().Get("piyasa")),
Temsilci: strings.TrimSpace(r.URL.Query().Get("temsilci")),
RiskDurumu: strings.TrimSpace(r.URL.Query().Get("risk_durumu")),
IslemTipi: strings.TrimSpace(r.URL.Query().Get("islem_tipi")),
Ulke: strings.TrimSpace(r.URL.Query().Get("ulke")),
Il: strings.TrimSpace(r.URL.Query().Get("il")),
Ilce: strings.TrimSpace(r.URL.Query().Get("ilce")),
}
detailed := parseBoolQuery(r.URL.Query().Get("detailed"))
excludeZero12 := parseBoolQuery(r.URL.Query().Get("exclude_zero_12"))
excludeZero13 := parseBoolQuery(r.URL.Query().Get("exclude_zero_13"))
rows, err := queries.GetCustomerBalanceList(r.Context(), params)
if err != nil {
http.Error(w, "db error: "+err.Error(), http.StatusInternalServerError)
return
}
rows = filterCustomerBalanceRowsForPDF(rows, excludeZero12, excludeZero13)
summaries, detailsByMaster := buildCustomerBalancePDFData(rows)
sortBy := strings.TrimSpace(r.URL.Query().Get("sort_by"))
sortDesc := parseBoolQuery(r.URL.Query().Get("sort_desc"))
sortBalanceSummariesForPDF(summaries, sortBy, sortDesc)
pdf := gofpdf.New("L", "mm", "A4", "")
pdf.SetMargins(8, 8, 8)
pdf.SetAutoPageBreak(false, 12)
if err := registerDejavuFonts(pdf, "dejavu"); err != nil {
http.Error(w, "pdf font error: "+err.Error(), http.StatusInternalServerError)
return
}
if err := safeDrawCustomerBalancePDF(
pdf,
selectedDate,
params.CariSearch,
detailed,
"Cari Bakiye Listesi",
false,
summaries,
detailsByMaster,
); err != nil {
pdf = gofpdf.New("L", "mm", "A4", "")
pdf.SetMargins(8, 8, 8)
pdf.SetAutoPageBreak(true, 12)
if ferr := registerDejavuFonts(pdf, "dejavu"); ferr != nil {
http.Error(w, "pdf font error: "+ferr.Error(), http.StatusInternalServerError)
return
}
drawCustomerBalancePDFFallback(pdf, selectedDate, params.CariSearch, "Cari Bakiye Listesi", summaries, false)
}
if err := pdf.Error(); err != nil {
http.Error(w, "pdf render error: "+err.Error(), http.StatusInternalServerError)
return
}
var buf bytes.Buffer
if err := pdf.Output(&buf); err != nil {
http.Error(w, "pdf output error: "+err.Error(), http.StatusInternalServerError)
return
}
filename := "customer-balance-summary.pdf"
if detailed {
filename = "customer-balance-detailed.pdf"
}
w.Header().Set("Content-Type", "application/pdf")
w.Header().Set("Content-Disposition", fmt.Sprintf("inline; filename=%q", filename))
_, _ = w.Write(buf.Bytes())
}
}
func parseBoolQuery(v string) bool {
switch strings.ToLower(strings.TrimSpace(v)) {
case "1", "true", "yes", "on":
return true
default:
return false
}
}
func filterCustomerBalanceRowsForPDF(rows []models.CustomerBalanceListRow, excludeZero12, excludeZero13 bool) []models.CustomerBalanceListRow {
out := make([]models.CustomerBalanceListRow, 0, len(rows))
for _, row := range rows {
if excludeZero12 && row.Bakiye12 == 0 {
continue
}
if excludeZero13 && row.Bakiye13 == 0 {
continue
}
out = append(out, row)
}
return out
}
func buildCustomerBalancePDFData(rows []models.CustomerBalanceListRow) ([]balanceSummaryPDF, map[string][]models.CustomerBalanceListRow) {
summaryMap := make(map[string]*balanceSummaryPDF)
detailsByMaster := make(map[string][]models.CustomerBalanceListRow)
vadeWeightMap := make(map[string]float64)
vadeGunSumMap := make(map[string]float64)
vadeBelgeSumMap := make(map[string]float64)
for _, row := range rows {
master := strings.TrimSpace(row.AnaCariKodu)
if master == "" {
master = strings.TrimSpace(row.CariKodu)
}
if master == "" {
continue
}
s := summaryMap[master]
if s == nil {
s = &balanceSummaryPDF{
AnaCariKodu: master,
AnaCariAdi: strings.TrimSpace(row.AnaCariAdi),
Piyasa: strings.TrimSpace(row.Piyasa),
Temsilci: strings.TrimSpace(row.Temsilci),
RiskDurumu: strings.TrimSpace(row.RiskDurumu),
Bakiye12Map: map[string]float64{},
Bakiye13Map: map[string]float64{},
}
summaryMap[master] = s
}
if s.AnaCariAdi == "" && strings.TrimSpace(row.AnaCariAdi) != "" {
s.AnaCariAdi = strings.TrimSpace(row.AnaCariAdi)
}
if s.Piyasa == "" && strings.TrimSpace(row.Piyasa) != "" {
s.Piyasa = strings.TrimSpace(row.Piyasa)
}
if s.Temsilci == "" && strings.TrimSpace(row.Temsilci) != "" {
s.Temsilci = strings.TrimSpace(row.Temsilci)
}
if s.RiskDurumu == "" && strings.TrimSpace(row.RiskDurumu) != "" {
s.RiskDurumu = strings.TrimSpace(row.RiskDurumu)
}
curr := strings.ToUpper(strings.TrimSpace(row.CariDoviz))
if curr == "" {
curr = "N/A"
}
s.Bakiye12Map[curr] += row.Bakiye12
s.Bakiye13Map[curr] += row.Bakiye13
s.USDBakiye12 += row.USDBakiye12
s.TLBakiye12 += row.TLBakiye12
s.USDBakiye13 += row.USDBakiye13
s.TLBakiye13 += row.TLBakiye13
w := absFloatExcel(row.USDBakiye12) + absFloatExcel(row.TLBakiye12) + absFloatExcel(row.USDBakiye13) + absFloatExcel(row.TLBakiye13)
if w > 0 {
vadeWeightMap[master] += w
vadeGunSumMap[master] += row.VadeGun * w
vadeBelgeSumMap[master] += row.VadeBelgeGun * w
}
detailsByMaster[master] = append(detailsByMaster[master], row)
}
masters := make([]string, 0, len(summaryMap))
for m := range summaryMap {
masters = append(masters, m)
}
sort.Strings(masters)
summaries := make([]balanceSummaryPDF, 0, len(masters))
for _, m := range masters {
if base := vadeWeightMap[m]; base > 0 {
summaryMap[m].VadeGun = vadeGunSumMap[m] / base
summaryMap[m].VadeBelge = vadeBelgeSumMap[m] / base
}
summaries = append(summaries, *summaryMap[m])
d := detailsByMaster[m]
sort.SliceStable(d, func(i, j int) bool {
if d[i].CariKodu == d[j].CariKodu {
if d[i].CariDoviz == d[j].CariDoviz {
si, _ := strconv.Atoi(d[i].Sirket)
sj, _ := strconv.Atoi(d[j].Sirket)
return si < sj
}
return d[i].CariDoviz < d[j].CariDoviz
}
return d[i].CariKodu < d[j].CariKodu
})
detailsByMaster[m] = d
}
return summaries, detailsByMaster
}
func sortBalanceSummariesForPDF(summaries []balanceSummaryPDF, sortBy string, descending bool) {
key := strings.TrimSpace(sortBy)
if key == "" || len(summaries) <= 1 {
return
}
textCmp := func(a, b string) int {
return strings.Compare(strings.ToUpper(strings.TrimSpace(a)), strings.ToUpper(strings.TrimSpace(b)))
}
numCmp := func(a, b float64) int {
if a < b {
return -1
}
if a > b {
return 1
}
return 0
}
sort.SliceStable(summaries, func(i, j int) bool {
a := summaries[i]
b := summaries[j]
cmp := 0
switch key {
case "ana_cari_kodu":
cmp = textCmp(a.AnaCariKodu, b.AnaCariKodu)
case "ana_cari_adi":
cmp = textCmp(a.AnaCariAdi, b.AnaCariAdi)
case "piyasa":
cmp = textCmp(a.Piyasa, b.Piyasa)
case "temsilci":
cmp = textCmp(a.Temsilci, b.Temsilci)
case "risk_durumu":
cmp = textCmp(a.RiskDurumu, b.RiskDurumu)
case "usd_bakiye_1_2":
cmp = numCmp(a.USDBakiye12, b.USDBakiye12)
case "tl_bakiye_1_2":
cmp = numCmp(a.TLBakiye12, b.TLBakiye12)
case "usd_bakiye_1_3":
cmp = numCmp(a.USDBakiye13, b.USDBakiye13)
case "tl_bakiye_1_3":
cmp = numCmp(a.TLBakiye13, b.TLBakiye13)
case "vade_gun":
cmp = numCmp(a.VadeGun, b.VadeGun)
case "vade_belge_tarihi_gun":
cmp = numCmp(a.VadeBelge, b.VadeBelge)
case "prbr_1_2":
cmp = numCmp(sumCurrencyMapForSort(a.Bakiye12Map), sumCurrencyMapForSort(b.Bakiye12Map))
case "prbr_1_3":
cmp = numCmp(sumCurrencyMapForSort(a.Bakiye13Map), sumCurrencyMapForSort(b.Bakiye13Map))
default:
cmp = textCmp(a.AnaCariKodu, b.AnaCariKodu)
}
if cmp == 0 {
cmp = textCmp(a.AnaCariKodu, b.AnaCariKodu)
}
if descending {
return cmp > 0
}
return cmp < 0
})
}
func sumCurrencyMapForSort(m map[string]float64) float64 {
total := 0.0
for _, v := range m {
total += v
}
return total
}
func drawCustomerBalancePDF(
pdf *gofpdf.Fpdf,
selectedDate string,
searchText string,
detailed bool,
reportTitle string,
includeVadeColumns bool,
summaries []balanceSummaryPDF,
detailsByMaster map[string][]models.CustomerBalanceListRow,
) {
pageW, pageH := pdf.GetPageSize()
marginL, marginT, marginR, marginB := 8.0, 8.0, 8.0, 12.0
tableW := pageW - marginL - marginR
pageNoColor := [3]int{90, 90, 90}
pdf.SetFooterFunc(func() {
pdf.SetY(-8)
pdf.SetFont("dejavu", "", 8)
pdf.SetTextColor(pageNoColor[0], pageNoColor[1], pageNoColor[2])
pdf.CellFormat(0, 4, fmt.Sprintf("Sayfa %d", pdf.PageNo()), "", 0, "R", false, 0, "")
})
summaryCols := []string{"Ana Cari Kod", "Ana Cari Detay", "Piyasa", "Temsilci", "Risk", "1_2 Pr.Br", "1_3 Pr.Br", "1_2 USD", "1_2 TRY", "1_3 USD", "1_3 TRY"}
summaryWeights := []float64{18, 42, 16, 16, 14, 24, 24, 14, 14, 14, 14}
if includeVadeColumns {
summaryCols = append(summaryCols, "Vade Gun", "Belge Tarihi Gun")
summaryWeights = []float64{18, 38, 14, 14, 12, 20, 20, 12, 12, 12, 12, 10, 13}
}
summaryW := normalizeWidths(summaryWeights, tableW)
detailCols := []string{"Cari Kod", "Cari Detay", "Sirket", "Muhasebe", "Doviz", "1_2 Pr.Br", "1_3 Pr.Br", "1_2 USD", "1_2 TRY", "1_3 USD", "1_3 TRY"}
detailWeights := []float64{23, 40, 9, 18, 9, 20, 20, 13, 13, 13, 13}
if includeVadeColumns {
detailCols = append(detailCols, "Vade Gun", "Belge Tarihi Gun")
detailWeights = append(detailWeights, 11, 14)
}
detailW := normalizeWidths(detailWeights, tableW)
header := func() {
pdf.AddPage()
titleX := marginL
if logoPath, err := resolvePdfImagePath("Baggi-Tekstil-A.s-Logolu.jpeg"); err == nil {
pdf.ImageOptions(logoPath, marginL, marginT-1, 34, 0, false, gofpdf.ImageOptions{}, 0, "")
titleX = marginL + 38
}
pdf.SetFont("dejavu", "B", 15)
pdf.SetTextColor(149, 113, 22)
pdf.SetXY(titleX, marginT)
title := strings.TrimSpace(reportTitle)
if title == "" {
title = "Cari Bakiye Listesi"
}
pdf.CellFormat(140, 7, title, "", 0, "L", false, 0, "")
pdf.SetFont("dejavu", "", 9)
pdf.SetTextColor(20, 20, 20)
pdf.SetXY(pageW-marginR-80, marginT+1)
pdf.CellFormat(80, 5, "Tarih: "+formatDateTR(selectedDate), "", 0, "R", false, 0, "")
mode := "Detaysiz"
if detailed {
mode = "Detayli"
}
pdf.SetXY(pageW-marginR-80, marginT+6)
pdf.CellFormat(80, 5, "Mod: "+mode, "", 0, "R", false, 0, "")
if strings.TrimSpace(searchText) != "" {
pdf.SetXY(titleX, marginT+8)
pdf.CellFormat(tableW-(titleX-marginL), 5, "Arama: "+searchText, "", 0, "L", false, 0, "")
}
pdf.SetDrawColor(149, 113, 22)
pdf.Line(marginL, marginT+14, pageW-marginR, marginT+14)
pdf.SetDrawColor(210, 210, 210)
pdf.SetY(marginT + 17)
}
needPage := func(needH float64) bool {
return pdf.GetY()+needH+marginB > pageH
}
drawSummaryHeader := func() {
headerFont := 7.5
pdf.SetFont("dejavu", "B", headerFont)
pdf.SetFillColor(149, 113, 22)
pdf.SetTextColor(255, 255, 255)
y := pdf.GetY()
x := marginL
for i, c := range summaryCols {
if i >= len(summaryW) {
break
}
pdf.Rect(x, y, summaryW[i], 7, "DF")
pdf.SetXY(x+1, y+1.2)
pdf.CellFormat(summaryW[i]-2, 4.6, c, "", 0, "C", false, 0, "")
x += summaryW[i]
}
pdf.SetY(y + 7)
}
drawDetailHeader := func() {
pdf.SetFont("dejavu", "B", 7.2)
pdf.SetFillColor(149, 113, 22)
pdf.SetTextColor(255, 255, 255)
y := pdf.GetY()
x := marginL
for i, c := range detailCols {
if i >= len(detailW) {
break
}
pdf.Rect(x, y, detailW[i], 6, "DF")
pdf.SetXY(x+1, y+1)
pdf.CellFormat(detailW[i]-2, 4, c, "", 0, "C", false, 0, "")
x += detailW[i]
}
pdf.SetY(y + 6)
}
header()
drawSummaryHeader()
bodyFont := 7.2
pdf.SetFont("dejavu", "", bodyFont)
pdf.SetTextColor(20, 20, 20)
totalUSD12, totalTRY12 := 0.0, 0.0
totalUSD13, totalTRY13 := 0.0, 0.0
totalPrBr12 := map[string]float64{}
totalPrBr13 := map[string]float64{}
totalVadeBase, totalVadeSum, totalVadeBelgeSum := 0.0, 0.0, 0.0
for _, s := range summaries {
totalUSD12 += s.USDBakiye12
totalTRY12 += s.TLBakiye12
totalUSD13 += s.USDBakiye13
totalTRY13 += s.TLBakiye13
for k, v := range s.Bakiye12Map {
totalPrBr12[k] += v
}
for k, v := range s.Bakiye13Map {
totalPrBr13[k] += v
}
w := absFloatExcel(s.USDBakiye12) + absFloatExcel(s.TLBakiye12) + absFloatExcel(s.USDBakiye13) + absFloatExcel(s.TLBakiye13)
if w > 0 {
totalVadeBase += w
totalVadeSum += s.VadeGun * w
totalVadeBelgeSum += s.VadeBelge * w
}
}
totalsRow := []string{
"TOPLAM",
"",
"",
"",
"",
formatCurrencyMapPDF(totalPrBr12),
formatCurrencyMapPDF(totalPrBr13),
formatMoneyPDF(totalUSD12),
formatMoneyPDF(totalTRY12),
formatMoneyPDF(totalUSD13),
formatMoneyPDF(totalTRY13),
}
if includeVadeColumns {
totalVade, totalVadeBelge := 0.0, 0.0
if totalVadeBase > 0 {
totalVade = totalVadeSum / totalVadeBase
totalVadeBelge = totalVadeBelgeSum / totalVadeBase
}
totalsRow = append(totalsRow, formatDayUpPDF(totalVade), formatDayUpPDF(totalVadeBelge))
}
totalH := calcPDFRowHeightCapped(pdf, totalsRow, summaryW, map[int]int{0: 1, 1: 1, 2: 1, 3: 1, 5: 2, 6: 2}, 6.0, 3.4)
if needPage(totalH) {
header()
drawSummaryHeader()
}
pdf.SetFont("dejavu", "B", 6.8)
pdf.SetFillColor(218, 193, 151)
pdf.SetTextColor(20, 20, 20)
totalY := pdf.GetY()
totalX := marginL
for i, v := range totalsRow {
if i >= len(summaryW) {
break
}
pdf.Rect(totalX, totalY, summaryW[i], totalH, "FD")
align := "L"
if i >= 7 {
align = "R"
}
if includeVadeColumns && (i == len(totalsRow)-1 || i == len(totalsRow)-2) {
align = "C"
}
if i == 5 || i == 6 {
drawPDFCellWrappedCapped(pdf, v, totalX, totalY, summaryW[i], totalH, align, 3.4, 2)
} else {
drawPDFCellWrapped(pdf, v, totalX, totalY, summaryW[i], totalH, align, 3.4)
}
totalX += summaryW[i]
}
pdf.SetY(totalY + totalH)
pdf.SetFont("dejavu", "", bodyFont)
pdf.SetTextColor(20, 20, 20)
for _, s := range summaries {
row := []string{
s.AnaCariKodu,
s.AnaCariAdi,
s.Piyasa,
s.Temsilci,
s.RiskDurumu,
formatCurrencyMapPDF(s.Bakiye12Map),
formatCurrencyMapPDF(s.Bakiye13Map),
formatMoneyPDF(s.USDBakiye12),
formatMoneyPDF(s.TLBakiye12),
formatMoneyPDF(s.USDBakiye13),
formatMoneyPDF(s.TLBakiye13),
}
if includeVadeColumns {
row = append(row, formatDayUpPDF(s.VadeGun), formatDayUpPDF(s.VadeBelge))
}
rowH := calcPDFRowHeightCapped(pdf, row, summaryW, map[int]int{0: 3, 1: 3, 2: 3, 3: 3, 4: 2, 5: 2, 6: 2}, 6.0, 3.4)
if needPage(rowH) {
header()
drawSummaryHeader()
pdf.SetFont("dejavu", "", bodyFont)
pdf.SetTextColor(20, 20, 20)
}
y := pdf.GetY()
x := marginL
for i, v := range row {
if i >= len(summaryW) {
break
}
pdf.Rect(x, y, summaryW[i], rowH, "")
align := "L"
if i >= 7 {
align = "R"
}
if includeVadeColumns && (i == len(row)-1 || i == len(row)-2) {
align = "C"
}
if i <= 3 {
drawPDFCellWrappedCapped(pdf, v, x, y, summaryW[i], rowH, align, 3.4, 3)
} else if i == 5 || i == 6 {
drawPDFCellWrappedCapped(pdf, v, x, y, summaryW[i], rowH, align, 3.4, 2)
} else {
drawPDFCellWrapped(pdf, v, x, y, summaryW[i], rowH, align, 3.4)
}
x += summaryW[i]
}
pdf.SetY(y + rowH)
}
if !detailed {
return
}
pdf.Ln(1.8)
for _, s := range summaries {
rows := detailsByMaster[s.AnaCariKodu]
if len(rows) == 0 {
continue
}
if needPage(12.4) {
header()
}
pdf.SetFont("dejavu", "B", 8)
pdf.SetFillColor(218, 193, 151)
pdf.SetTextColor(20, 20, 20)
y := pdf.GetY()
pdf.Rect(marginL, y, tableW, 6.2, "DF")
pdf.SetXY(marginL+1.5, y+1)
pdf.CellFormat(tableW-3, 4.2, "Detay: "+s.AnaCariKodu, "", 0, "L", false, 0, "")
pdf.SetY(y + 6.2)
drawDetailHeader()
pdf.SetFont("dejavu", "", 7)
pdf.SetTextColor(40, 40, 40)
for _, r := range rows {
line := []string{
r.CariKodu,
r.CariDetay,
r.Sirket,
r.MuhasebeKodu,
r.CariDoviz,
formatMoneyPDF(r.Bakiye12),
formatMoneyPDF(r.Bakiye13),
formatMoneyPDF(r.USDBakiye12),
formatMoneyPDF(r.TLBakiye12),
formatMoneyPDF(r.USDBakiye13),
formatMoneyPDF(r.TLBakiye13),
}
if includeVadeColumns {
line = append(line, formatDayUpPDF(r.VadeGun), formatDayUpPDF(r.VadeBelgeGun))
}
rowH := calcPDFRowHeight(pdf, line, detailW, map[int]bool{1: true}, 5.8, 3.3)
if needPage(rowH) {
header()
pdf.SetFont("dejavu", "B", 8)
pdf.SetFillColor(218, 193, 151)
pdf.SetTextColor(20, 20, 20)
y := pdf.GetY()
pdf.Rect(marginL, y, tableW, 6.2, "DF")
pdf.SetXY(marginL+1.5, y+1)
pdf.CellFormat(tableW-3, 4.2, "Detay: "+s.AnaCariKodu, "", 0, "L", false, 0, "")
pdf.SetY(y + 6.2)
drawDetailHeader()
pdf.SetFont("dejavu", "", 7)
pdf.SetTextColor(40, 40, 40)
}
rowY := pdf.GetY()
rowX := marginL
for i, v := range line {
if i >= len(detailW) {
break
}
pdf.Rect(rowX, rowY, detailW[i], rowH, "")
align := "L"
if i >= 5 {
align = "R"
}
if includeVadeColumns && (i == len(line)-1 || i == len(line)-2) {
align = "C"
}
drawPDFCellWrapped(pdf, v, rowX, rowY, detailW[i], rowH, align, 3.3)
rowX += detailW[i]
}
pdf.SetY(rowY + rowH)
}
pdf.Ln(1.2)
}
}
func safeDrawCustomerBalancePDF(
pdf *gofpdf.Fpdf,
selectedDate string,
searchText string,
detailed bool,
reportTitle string,
includeVadeColumns bool,
summaries []balanceSummaryPDF,
detailsByMaster map[string][]models.CustomerBalanceListRow,
) (err error) {
defer func() {
if rec := recover(); rec != nil {
err = fmt.Errorf("draw panic: %v", rec)
}
}()
drawCustomerBalancePDF(pdf, selectedDate, searchText, detailed, reportTitle, includeVadeColumns, summaries, detailsByMaster)
return nil
}
func drawCustomerBalancePDFFallback(
pdf *gofpdf.Fpdf,
selectedDate string,
searchText string,
reportTitle string,
summaries []balanceSummaryPDF,
includeVadeColumns bool,
) {
pdf.AddPage()
pdf.SetFont("dejavu", "B", 13)
pdf.SetTextColor(149, 113, 22)
pdf.CellFormat(0, 8, reportTitle, "", 1, "L", false, 0, "")
pdf.SetFont("dejavu", "", 9)
pdf.SetTextColor(20, 20, 20)
pdf.CellFormat(0, 5, "Tarih: "+formatDateTR(selectedDate), "", 1, "L", false, 0, "")
if strings.TrimSpace(searchText) != "" {
pdf.CellFormat(0, 5, "Arama: "+searchText, "", 1, "L", false, 0, "")
}
pdf.Ln(1)
header := []string{"Ana Cari Kod", "Ana Cari Detay", "Piyasa", "Temsilci", "Risk", "1_2 USD", "1_2 TRY", "1_3 USD", "1_3 TRY"}
widths := normalizeWidths([]float64{18, 34, 12, 12, 12, 10, 10, 10, 10}, 281)
if includeVadeColumns {
header = append(header, "Vade Gun", "Belge Tarihi Gun")
widths = normalizeWidths([]float64{17, 28, 10, 10, 10, 10, 10, 10, 10, 8, 10}, 281)
}
pdf.SetFont("dejavu", "B", 8)
pdf.SetFillColor(149, 113, 22)
pdf.SetTextColor(255, 255, 255)
x, y := 8.0, pdf.GetY()
for i, h := range header {
pdf.Rect(x, y, widths[i], 6, "DF")
pdf.SetXY(x+1, y+1)
pdf.CellFormat(widths[i]-2, 4, h, "", 0, "C", false, 0, "")
x += widths[i]
}
pdf.SetY(y + 6)
pdf.SetFont("dejavu", "", 7.4)
pdf.SetTextColor(20, 20, 20)
for _, s := range summaries {
row := []string{
s.AnaCariKodu,
s.AnaCariAdi,
s.Piyasa,
s.Temsilci,
s.RiskDurumu,
formatMoneyPDF(s.USDBakiye12),
formatMoneyPDF(s.TLBakiye12),
formatMoneyPDF(s.USDBakiye13),
formatMoneyPDF(s.TLBakiye13),
}
if includeVadeColumns {
row = append(row, formatDayUpPDF(s.VadeGun), formatDayUpPDF(s.VadeBelge))
}
if pdf.GetY()+6 > 198 {
pdf.AddPage()
pdf.SetY(8)
}
x = 8
y = pdf.GetY()
for i, v := range row {
pdf.Rect(x, y, widths[i], 6, "")
align := "L"
if i >= 2 {
align = "R"
}
if includeVadeColumns && (i == len(row)-1 || i == len(row)-2) {
align = "C"
}
pdf.SetXY(x+1, y+1)
pdf.CellFormat(widths[i]-2, 4, v, "", 0, align, false, 0, "")
x += widths[i]
}
pdf.SetY(y + 6)
}
}
func formatDateTR(v string) string {
s := strings.TrimSpace(v)
if s == "" {
return s
}
if t, err := time.Parse("2006-01-02", s); err == nil {
return t.Format("02.01.2006")
}
if t, err := time.Parse(time.RFC3339, s); err == nil {
return t.Format("02.01.2006")
}
return s
}
func calcPDFRowHeight(pdf *gofpdf.Fpdf, row []string, widths []float64, wrapIdx map[int]bool, minH, lineH float64) float64 {
maxLines := 1
for i, v := range row {
if !wrapIdx[i] {
continue
}
if i >= len(widths) || widths[i] <= 2 {
continue
}
lines := safeSplitLinesPDF(pdf, strings.TrimSpace(v), widths[i]-2)
if len(lines) > maxLines {
maxLines = len(lines)
}
}
h := float64(maxLines)*lineH + 2
if h < minH {
return minH
}
return h
}
func drawPDFCellWrapped(pdf *gofpdf.Fpdf, value string, x, y, w, h float64, align string, lineH float64) {
if w <= 2 || h <= 0 {
return
}
text := strings.TrimSpace(value)
lines := safeSplitLinesPDF(pdf, text, w-2)
if len(lines) == 0 {
lines = [][]byte{[]byte("")}
}
startY := y + (h-(float64(len(lines))*lineH))/2
if startY < y+0.7 {
startY = y + 0.7
}
for _, ln := range lines {
pdf.SetXY(x+1, startY)
fitted := fitTextWithSuffixPDF(pdf, string(ln), w-2, "...")
pdf.CellFormat(w-2, lineH, fitted, "", 0, align, false, 0, "")
startY += lineH
}
}
func safeSplitLinesPDF(pdf *gofpdf.Fpdf, text string, width float64) (lines [][]byte) {
if width <= 0 {
width = 1
}
defer func() {
if recover() != nil {
lines = [][]byte{[]byte(text)}
}
}()
lines = pdf.SplitLines([]byte(text), width)
return lines
}
func calcPDFRowHeightCapped(pdf *gofpdf.Fpdf, row []string, widths []float64, wrapMax map[int]int, minH, lineH float64) float64 {
maxLines := 1
for i, v := range row {
limit, ok := wrapMax[i]
if !ok || limit <= 0 {
continue
}
if i >= len(widths) || widths[i] <= 2 {
continue
}
lines := safeSplitLinesPDF(pdf, strings.TrimSpace(v), widths[i]-2)
lineCount := len(lines)
if lineCount > limit {
lineCount = limit
}
if lineCount > maxLines {
maxLines = lineCount
}
}
h := float64(maxLines)*lineH + 2
if h < minH {
return minH
}
return h
}
func drawPDFCellWrappedCapped(pdf *gofpdf.Fpdf, value string, x, y, w, h float64, align string, lineH float64, maxLines int) {
if w <= 2 || h <= 0 {
return
}
text := strings.TrimSpace(value)
lines := safeSplitLinesPDF(pdf, text, w-2)
if len(lines) == 0 {
lines = [][]byte{[]byte("")}
}
clipped := false
if maxLines > 0 && len(lines) > maxLines {
lines = lines[:maxLines]
clipped = true
}
if clipped && len(lines) > 0 {
last := string(lines[len(lines)-1])
lines[len(lines)-1] = []byte(fitTextWithSuffixPDF(pdf, last, w-2, "..."))
}
startY := y + (h-(float64(len(lines))*lineH))/2
if startY < y+0.7 {
startY = y + 0.7
}
for _, ln := range lines {
pdf.SetXY(x+1, startY)
fitted := fitTextWithSuffixPDF(pdf, string(ln), w-2, "...")
pdf.CellFormat(w-2, lineH, fitted, "", 0, align, false, 0, "")
startY += lineH
}
}
func fitTextWithSuffixPDF(pdf *gofpdf.Fpdf, text string, width float64, suffix string) string {
txt := strings.TrimSpace(text)
if txt == "" {
return suffix
}
if pdf.GetStringWidth(txt) <= width {
return txt
}
allowed := width - pdf.GetStringWidth(suffix)
if allowed <= 0 {
return suffix
}
runes := []rune(txt)
for len(runes) > 0 && pdf.GetStringWidth(string(runes)) > allowed {
runes = runes[:len(runes)-1]
}
if len(runes) == 0 {
return suffix
}
return string(runes) + suffix
}
func formatDayUpPDF(v float64) string {
return formatIntPDF(int64(math.Ceil(v)))
}
func formatIntPDF(v int64) string {
s := strconv.FormatInt(v, 10)
sign := ""
if strings.HasPrefix(s, "-") {
sign = "-"
s = strings.TrimPrefix(s, "-")
}
var out []string
for len(s) > 3 {
out = append([]string{s[len(s)-3:]}, out...)
s = s[:len(s)-3]
}
if s != "" {
out = append([]string{s}, out...)
}
return sign + strings.Join(out, ".")
}
func formatCurrencyMapPDF(m map[string]float64) string {
if len(m) == 0 {
return "-"
}
keys := make([]string, 0, len(m))
for k := range m {
keys = append(keys, k)
}
sort.Strings(keys)
parts := make([]string, 0, len(keys))
for _, k := range keys {
if m[k] == 0 {
continue
}
parts = append(parts, k+": "+formatMoneyPDF(m[k]))
}
if len(parts) == 0 {
return "-"
}
return strings.Join(parts, " | ")
}
func formatMoneyPDF(v float64) string {
s := fmt.Sprintf("%.2f", v)
parts := strings.SplitN(s, ".", 2)
intPart, decPart := parts[0], "00"
if len(parts) == 2 {
decPart = parts[1]
}
sign := ""
if strings.HasPrefix(intPart, "-") {
sign = "-"
intPart = strings.TrimPrefix(intPart, "-")
}
var out []string
for len(intPart) > 3 {
out = append([]string{intPart[len(intPart)-3:]}, out...)
intPart = intPart[:len(intPart)-3]
}
if intPart != "" {
out = append([]string{intPart}, out...)
}
return sign + strings.Join(out, ".") + "," + decPart
}

View File

@@ -445,6 +445,12 @@ func UserCreateRoute(db *sql.DB) http.HandlerFunc {
return
}
payload.Code = strings.TrimSpace(payload.Code)
payload.FullName = strings.TrimSpace(payload.FullName)
payload.Email = strings.TrimSpace(payload.Email)
payload.Mobile = strings.TrimSpace(payload.Mobile)
payload.Address = strings.TrimSpace(payload.Address)
if payload.Code == "" {
http.Error(w, "Kullanıcı kodu zorunludur", http.StatusUnprocessableEntity)
return
@@ -458,18 +464,21 @@ func UserCreateRoute(db *sql.DB) http.HandlerFunc {
defer tx.Rollback()
var newID int64
log.Printf("DEBUG: UserCreateRoute payload=%+v", payload)
err = tx.QueryRow(`
INSERT INTO mk_dfusr (
code,
username,
is_active,
full_name,
email,
mobile,
address,
password_hash,
force_password_change,
last_updated_date
created_at,
updated_at
)
VALUES ($1,$2,$3,$4,$5,$6,true,NOW())
VALUES ($1,$2,$3,$4,$5,$6,'',true,NOW(),NOW())
RETURNING id
`,
payload.Code,
@@ -481,8 +490,8 @@ func UserCreateRoute(db *sql.DB) http.HandlerFunc {
).Scan(&newID)
if err != nil {
log.Println("USER INSERT ERROR:", err)
http.Error(w, "Kullanıcı oluşturulamadı", http.StatusInternalServerError)
log.Printf("USER INSERT ERROR code=%q email=%q err=%v", payload.Code, payload.Email, err)
http.Error(w, fmt.Sprintf("Kullanıcı oluşturulamadı: %v", err), http.StatusInternalServerError)
return
}

View File

@@ -0,0 +1,244 @@
package routes
import (
"bssapp-backend/models"
"bssapp-backend/queries"
"database/sql"
"encoding/json"
"net/http"
"sort"
"strconv"
"strings"
"github.com/gorilla/mux"
)
type MarketMailSavePayload struct {
MailIDs []string `json:"mail_ids"`
}
type MarketMailLookupResponse struct {
Markets []models.MarketOption `json:"markets"`
Mails []models.MailOption `json:"mails"`
}
func GetMarketMailMappingLookupsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
markets := make([]models.MarketOption, 0, 64)
mails := make([]models.MailOption, 0, 128)
marketRows, err := db.Query(queries.GetActiveMarketsForMapping)
if err != nil {
http.Error(w, "markets lookup error", http.StatusInternalServerError)
return
}
defer marketRows.Close()
for marketRows.Next() {
var item models.MarketOption
if err := marketRows.Scan(&item.ID, &item.Code, &item.Title); err != nil {
http.Error(w, "markets scan error", http.StatusInternalServerError)
return
}
markets = append(markets, item)
}
if err := marketRows.Err(); err != nil {
http.Error(w, "markets rows error", http.StatusInternalServerError)
return
}
mailRows, err := db.Query(queries.GetActiveMailsForMapping)
if err != nil {
http.Error(w, "mails lookup error", http.StatusInternalServerError)
return
}
defer mailRows.Close()
for mailRows.Next() {
var item models.MailOption
if err := mailRows.Scan(&item.ID, &item.Email, &item.DisplayName); err != nil {
http.Error(w, "mails scan error", http.StatusInternalServerError)
return
}
mails = append(mails, item)
}
if err := mailRows.Err(); err != nil {
http.Error(w, "mails rows error", http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(MarketMailLookupResponse{
Markets: markets,
Mails: mails,
})
}
}
func GetMarketMailMappingsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
rows, err := db.Query(queries.GetMarketMailMappingRows)
if err != nil {
http.Error(w, "mapping query error", http.StatusInternalServerError)
return
}
defer rows.Close()
byMarket := make(map[int64]*models.MarketMailMappingRow, 64)
order := make([]int64, 0, 64)
for rows.Next() {
var marketID int64
var marketCode, marketTitle string
var mailID sql.NullString
var email sql.NullString
var displayName sql.NullString
if err := rows.Scan(
&marketID,
&marketCode,
&marketTitle,
&mailID,
&email,
&displayName,
); err != nil {
http.Error(w, "mapping scan error", http.StatusInternalServerError)
return
}
row, ok := byMarket[marketID]
if !ok {
row = &models.MarketMailMappingRow{
MarketID: marketID,
MarketCode: marketCode,
MarketTitle: marketTitle,
MailIDs: make([]string, 0, 8),
Mails: make([]models.MarketMailOption, 0, 8),
}
byMarket[marketID] = row
order = append(order, marketID)
}
if mailID.Valid && strings.TrimSpace(mailID.String) != "" {
id := strings.TrimSpace(mailID.String)
row.MailIDs = append(row.MailIDs, id)
label := strings.TrimSpace(displayName.String)
if label == "" {
label = strings.TrimSpace(email.String)
}
row.Mails = append(row.Mails, models.MarketMailOption{
ID: id,
Label: label,
})
}
}
if err := rows.Err(); err != nil {
http.Error(w, "mapping rows error", http.StatusInternalServerError)
return
}
list := make([]models.MarketMailMappingRow, 0, len(order))
for _, marketID := range order {
list = append(list, *byMarket[marketID])
}
_ = json.NewEncoder(w).Encode(list)
}
}
func SaveMarketMailMappingHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
marketIDStr := mux.Vars(r)["marketId"]
marketID, err := strconv.ParseInt(marketIDStr, 10, 64)
if err != nil || marketID <= 0 {
http.Error(w, "invalid market id", http.StatusBadRequest)
return
}
var payload MarketMailSavePayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
var marketExists bool
if err := db.QueryRow(queries.ExistsActiveMarketByID, marketID).Scan(&marketExists); err != nil {
http.Error(w, "market validate error", http.StatusInternalServerError)
return
}
if !marketExists {
http.Error(w, "market not found", http.StatusNotFound)
return
}
mailIDs := normalizeIDList(payload.MailIDs)
for _, mailID := range mailIDs {
var mailExists bool
if err := db.QueryRow(queries.ExistsActiveMailByID, mailID).Scan(&mailExists); err != nil {
http.Error(w, "mail validate error", http.StatusInternalServerError)
return
}
if !mailExists {
http.Error(w, "mail not found: "+mailID, http.StatusBadRequest)
return
}
}
tx, err := db.Begin()
if err != nil {
http.Error(w, "transaction start error", http.StatusInternalServerError)
return
}
defer tx.Rollback()
if _, err := tx.Exec(queries.DeleteMarketMailsByMarketID, marketID); err != nil {
http.Error(w, "mapping delete error", http.StatusInternalServerError)
return
}
for _, mailID := range mailIDs {
if _, err := tx.Exec(queries.InsertMarketMailMapping, marketID, mailID); err != nil {
http.Error(w, "mapping insert error", http.StatusInternalServerError)
return
}
}
if err := tx.Commit(); err != nil {
http.Error(w, "transaction commit error", http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"market_id": marketID,
"mail_ids": mailIDs,
})
}
}
func normalizeIDList(ids []string) []string {
seen := make(map[string]struct{}, len(ids))
out := make([]string, 0, len(ids))
for _, raw := range ids {
id := strings.TrimSpace(raw)
if id == "" {
continue
}
if _, ok := seen[id]; ok {
continue
}
seen[id] = struct{}{}
out = append(out, id)
}
sort.Strings(out)
return out
}

View File

@@ -12,41 +12,29 @@ import (
func OrderListExcelRoute(db *sql.DB) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Cache-Control", "no-store")
// ======================
// PARAMS
// ======================
search := r.URL.Query().Get("search")
currAcc := r.URL.Query().Get("CurrAccCode")
orderDate := r.URL.Query().Get("OrderDate")
// ======================
// QUERY
// ======================
rows, err := queries.GetOrderListExcel(db, search, currAcc, orderDate)
if err != nil {
http.Error(w, err.Error(), 500)
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
// ======================
// EXCEL INIT
// ======================
f := excelize.NewFile()
sheet := "Orders"
f.SetSheetName("Sheet1", sheet)
// ======================
// HEADERS
// ======================
headers := []string{
"Sipariş No",
"Siparis No",
"Tarih",
"Termin Tarihi",
"Cari Kod",
"Cari Adı",
"Cari Adi",
"Temsilci",
"Piyasa",
"PB",
@@ -55,8 +43,9 @@ func OrderListExcelRoute(db *sql.DB) http.Handler {
"Paketlenen Tutar",
"Paketlenen (USD)",
"Paketlenme (%)",
"Uretim",
"USD Kur",
"Açıklama",
"Aciklama",
}
for i, h := range headers {
@@ -64,16 +53,10 @@ func OrderListExcelRoute(db *sql.DB) http.Handler {
f.SetCellValue(sheet, cell, h)
}
// ======================
// ROWS
// ======================
row := 2
for rows.Next() {
// 🔴 15 KOLON = 15 DEĞİŞKEN
var (
id, no, date, code, name string
id, no, date, termin, code, name string
rep, piyasa, cur string
total float64
@@ -81,41 +64,44 @@ func OrderListExcelRoute(db *sql.DB) http.Handler {
packedAmount float64
packedUSD float64
packedRatePct float64
usdRate float64
hasUretim bool
desc string
usdRate float64
)
// 🔴 SELECT SIRASIYLA BİREBİR
err := rows.Scan(
&id, // 1
&no, // 2
&date, // 3
&code, // 4
&name, // 5
&rep, // 6
&piyasa, // 7
&cur, // 8
&total, // 9
&totalUSD, // 10
&packedAmount, // 11
&packedUSD, // 12
&packedRatePct, // 13
&desc, // 14
&usdRate, // 15
&termin, // 4
&code, // 5
&name, // 6
&rep, // 7
&piyasa, // 8
&cur, // 9
&total, // 10
&totalUSD, // 11
&packedAmount, // 12
&packedUSD, // 13
&packedRatePct, // 14
&hasUretim, // 15
&desc, // 16
&usdRate, // 17
)
if err != nil {
http.Error(w, "Scan error: "+err.Error(), 500)
http.Error(w, "Scan error: "+err.Error(), http.StatusInternalServerError)
return
}
// ======================
// WRITE ROW
// ======================
uretim := ""
if hasUretim {
uretim = "VAR"
}
f.SetSheetRow(sheet, fmt.Sprintf("A%d", row), &[]any{
no,
date,
termin,
code,
name,
rep,
@@ -126,6 +112,7 @@ func OrderListExcelRoute(db *sql.DB) http.Handler {
packedAmount,
packedUSD,
packedRatePct,
uretim,
usdRate,
desc,
})
@@ -133,38 +120,17 @@ func OrderListExcelRoute(db *sql.DB) http.Handler {
row++
}
// ======================
// BUFFER WRITE
// ======================
buf, err := f.WriteToBuffer()
if err != nil {
http.Error(w, err.Error(), 500)
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
filename := fmt.Sprintf(
"siparis_listesi_%s.xlsx",
time.Now().Format("20060102_150405"),
)
// ======================
// RESPONSE
// ======================
w.Header().Set(
"Content-Type",
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
)
w.Header().Set(
"Content-Disposition",
"attachment; filename=\""+filename+"\"",
)
w.Header().Set(
"Content-Length",
fmt.Sprint(len(buf.Bytes())),
)
filename := fmt.Sprintf("siparis_listesi_%s.xlsx", time.Now().Format("20060102_150405"))
w.Header().Set("Content-Type", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")
w.Header().Set("Content-Disposition", "attachment; filename=\""+filename+"\"")
w.Header().Set("Content-Length", fmt.Sprint(len(buf.Bytes())))
w.WriteHeader(http.StatusOK)
_, _ = w.Write(buf.Bytes())
})

View File

@@ -0,0 +1,378 @@
package routes
import (
"bssapp-backend/queries"
"bytes"
"database/sql"
"fmt"
"net/http"
"sort"
"strconv"
"strings"
"time"
"github.com/jung-kurt/gofpdf"
)
type orderListPDFRow struct {
OrderNumber string
OrderDate string
TerminTarihi string
CurrAccCode string
CurrAccDescription string
MusteriTemsilcisi string
Piyasa string
DocCurrencyCode string
TotalAmount float64
TotalAmountUSD float64
PackedUSD float64
PackedRatePct float64
HasUretimUrunu bool
Description string
}
type pdfColumn struct {
Header string
Width float64
Align string
Wrap bool
MaxLines int
}
func OrderListPDFRoute(db *sql.DB) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
search := strings.TrimSpace(r.URL.Query().Get("search"))
currAcc := strings.TrimSpace(r.URL.Query().Get("CurrAccCode"))
orderDate := strings.TrimSpace(r.URL.Query().Get("OrderDate"))
sortBy := strings.TrimSpace(r.URL.Query().Get("sort_by"))
descending := parseBool(r.URL.Query().Get("descending"))
rows, err := queries.GetOrderListExcel(db, search, currAcc, orderDate)
if err != nil {
http.Error(w, "db error: "+err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
out := make([]orderListPDFRow, 0, 256)
for rows.Next() {
var (
id, no, date, termin, code, name string
rep, piyasa, cur string
total float64
totalUSD float64
packedAmount float64
packedUSD float64
packedRatePct float64
hasUretim bool
desc string
usdRate float64
)
if err := rows.Scan(
&id, &no, &date, &termin, &code, &name, &rep, &piyasa, &cur,
&total, &totalUSD, &packedAmount, &packedUSD, &packedRatePct,
&hasUretim, &desc, &usdRate,
); err != nil {
http.Error(w, "scan error: "+err.Error(), http.StatusInternalServerError)
return
}
_ = id
_ = packedAmount
_ = usdRate
out = append(out, orderListPDFRow{
OrderNumber: no,
OrderDate: date,
TerminTarihi: termin,
CurrAccCode: code,
CurrAccDescription: name,
MusteriTemsilcisi: rep,
Piyasa: piyasa,
DocCurrencyCode: cur,
TotalAmount: total,
TotalAmountUSD: totalUSD,
PackedUSD: packedUSD,
PackedRatePct: packedRatePct,
HasUretimUrunu: hasUretim,
Description: desc,
})
}
applyOrderListSort(out, sortBy, descending)
pdf := gofpdf.New("L", "mm", "A4", "")
pdf.SetMargins(8, 8, 8)
pdf.SetAutoPageBreak(false, 10)
if err := registerDejavuFonts(pdf, "dejavu"); err != nil {
http.Error(w, "pdf font error: "+err.Error(), http.StatusInternalServerError)
return
}
drawOrderListPDF(pdf, out, search, currAcc, orderDate, sortBy, descending)
if err := pdf.Error(); err != nil {
http.Error(w, "pdf render error: "+err.Error(), http.StatusInternalServerError)
return
}
var buf bytes.Buffer
if err := pdf.Output(&buf); err != nil {
http.Error(w, "pdf output error: "+err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/pdf")
w.Header().Set("Content-Disposition", "inline; filename=\"siparis-listesi.pdf\"")
_, _ = w.Write(buf.Bytes())
})
}
func drawOrderListPDF(
pdf *gofpdf.Fpdf,
rows []orderListPDFRow,
search, currAcc, orderDate, sortBy string,
desc bool,
) {
cols := []pdfColumn{
{Header: "Siparis No", Width: 20, Align: "L", Wrap: false, MaxLines: 1},
{Header: "Tarih", Width: 12, Align: "C", Wrap: false, MaxLines: 1},
{Header: "Termin", Width: 12, Align: "C", Wrap: false, MaxLines: 1},
{Header: "Cari Kod", Width: 15, Align: "L", Wrap: true, MaxLines: 2},
{Header: "Cari Adi", Width: 27, Align: "L", Wrap: true, MaxLines: 3},
{Header: "Temsilci", Width: 18, Align: "L", Wrap: true, MaxLines: 2},
{Header: "Piyasa", Width: 13, Align: "L", Wrap: true, MaxLines: 2},
{Header: "PB", Width: 8, Align: "C", Wrap: false, MaxLines: 1},
{Header: "Tutar", Width: 15, Align: "R", Wrap: false, MaxLines: 1},
{Header: "USD", Width: 15, Align: "R", Wrap: false, MaxLines: 1},
{Header: "Paket USD", Width: 15, Align: "R", Wrap: false, MaxLines: 1},
{Header: "%", Width: 8, Align: "R", Wrap: false, MaxLines: 1},
{Header: "Uretim", Width: 24, Align: "L", Wrap: true, MaxLines: 3},
{Header: "Aciklama", Width: 52, Align: "L", Wrap: true, MaxLines: 3},
}
pageW, pageH := pdf.GetPageSize()
marginL, marginT, marginR := 8.0, 8.0, 8.0
bottomLimit := pageH - 10.0
lineH := 3.8
cellPadX := 1.1
cellPadY := 0.6
drawHeader := func() {
pdf.AddPage()
pdf.SetFont("dejavu", "B", 12)
pdf.CellFormat(0, 7, "Siparis Listesi", "", 1, "L", false, 0, "")
pdf.SetFont("dejavu", "", 8)
sortText := "TotalAmountUSD DESC (varsayilan)"
if sortBy != "" {
dir := "ASC"
if desc {
dir = "DESC"
}
sortText = fmt.Sprintf("%s %s", sortBy, dir)
}
filterLine := fmt.Sprintf(
"Filtreler -> Arama: %s | Cari: %s | Siparis Tarihi: %s | Siralama: %s | Olusturma: %s",
emptyDash(search),
emptyDash(currAcc),
emptyDash(orderDate),
sortText,
time.Now().Format("2006-01-02 15:04"),
)
pdf.CellFormat(0, 5, filterLine, "", 1, "L", false, 0, "")
pdf.Ln(1)
pdf.SetFont("dejavu", "B", 8)
pdf.SetFillColor(240, 240, 240)
for _, c := range cols {
pdf.CellFormat(c.Width, 6, c.Header, "1", 0, "C", true, 0, "")
}
pdf.Ln(-1)
pdf.SetFont("dejavu", "", 7)
}
buildCells := func(row orderListPDFRow) []string {
uretim := ""
if row.HasUretimUrunu {
uretim = "Uretime Verilecek Urunu Var"
}
return []string{
row.OrderNumber,
row.OrderDate,
row.TerminTarihi,
row.CurrAccCode,
row.CurrAccDescription,
row.MusteriTemsilcisi,
row.Piyasa,
row.DocCurrencyCode,
fmt.Sprintf("%.2f", row.TotalAmount),
fmt.Sprintf("%.2f", row.TotalAmountUSD),
fmt.Sprintf("%.2f", row.PackedUSD),
fmt.Sprintf("%.2f", row.PackedRatePct),
uretim,
row.Description,
}
}
calcLineCount := func(col pdfColumn, text string) int {
if !col.Wrap {
return 1
}
lines := pdf.SplitLines([]byte(strings.TrimSpace(text)), col.Width-(cellPadX*2))
n := len(lines)
if n < 1 {
n = 1
}
if col.MaxLines > 0 && n > col.MaxLines {
n = col.MaxLines
}
return n
}
drawWrappedCell := func(x, y, w, h float64, text, align string, maxLines int) {
pdf.Rect(x, y, w, h, "D")
lines := pdf.SplitLines([]byte(strings.TrimSpace(text)), w-(cellPadX*2))
if len(lines) == 0 {
lines = [][]byte{[]byte("")}
}
if maxLines > 0 && len(lines) > maxLines {
lines = lines[:maxLines]
}
ty := y + cellPadY + 2.8
for _, ln := range lines {
pdf.SetXY(x+cellPadX, ty-2.8)
pdf.CellFormat(w-(cellPadX*2), 3.8, string(ln), "", 0, align, false, 0, "")
ty += 3.8
}
}
drawHeader()
for _, row := range rows {
cells := buildCells(row)
maxLines := 1
for i, c := range cols {
n := calcLineCount(c, cells[i])
if n > maxLines {
maxLines = n
}
}
rowH := (float64(maxLines) * lineH) + (cellPadY * 2)
if pdf.GetY()+rowH > bottomLimit {
drawHeader()
}
x := marginL
y := pdf.GetY()
for i, c := range cols {
if c.Wrap {
drawWrappedCell(x, y, c.Width, rowH, cells[i], c.Align, c.MaxLines)
} else {
pdf.SetXY(x, y)
pdf.CellFormat(c.Width, rowH, cells[i], "1", 0, c.Align, false, 0, "")
}
x += c.Width
}
pdf.SetXY(marginL, y+rowH)
}
_ = pageW
_ = marginT
_ = marginR
}
func applyOrderListSort(rows []orderListPDFRow, sortBy string, desc bool) {
if len(rows) <= 1 {
return
}
field := strings.TrimSpace(sortBy)
if field == "" {
field = "TotalAmountUSD"
desc = true
}
lessText := func(a, b string) bool {
aa := strings.ToLower(strings.TrimSpace(a))
bb := strings.ToLower(strings.TrimSpace(b))
if desc {
return aa > bb
}
return aa < bb
}
lessNum := func(a, b float64) bool {
if desc {
return a > b
}
return a < b
}
lessBool := func(a, b bool) bool {
av, bv := 0, 0
if a {
av = 1
}
if b {
bv = 1
}
if desc {
return av > bv
}
return av < bv
}
sort.SliceStable(rows, func(i, j int) bool {
a := rows[i]
b := rows[j]
switch field {
case "OrderNumber":
return lessText(a.OrderNumber, b.OrderNumber)
case "OrderDate":
return lessText(a.OrderDate, b.OrderDate)
case "TerminTarihi":
return lessText(a.TerminTarihi, b.TerminTarihi)
case "CurrAccCode":
return lessText(a.CurrAccCode, b.CurrAccCode)
case "CurrAccDescription":
return lessText(a.CurrAccDescription, b.CurrAccDescription)
case "MusteriTemsilcisi":
return lessText(a.MusteriTemsilcisi, b.MusteriTemsilcisi)
case "Piyasa":
return lessText(a.Piyasa, b.Piyasa)
case "DocCurrencyCode":
return lessText(a.DocCurrencyCode, b.DocCurrencyCode)
case "TotalAmount":
return lessNum(a.TotalAmount, b.TotalAmount)
case "PackedUSD":
return lessNum(a.PackedUSD, b.PackedUSD)
case "PackedRatePct":
return lessNum(a.PackedRatePct, b.PackedRatePct)
case "HasUretimUrunu":
return lessBool(a.HasUretimUrunu, b.HasUretimUrunu)
case "Description":
return lessText(a.Description, b.Description)
default:
return lessNum(a.TotalAmountUSD, b.TotalAmountUSD)
}
})
}
func emptyDash(v string) string {
if strings.TrimSpace(v) == "" {
return "-"
}
return v
}
func parseBool(v string) bool {
b, err := strconv.ParseBool(strings.TrimSpace(v))
if err == nil {
return b
}
switch strings.TrimSpace(strings.ToLower(v)) {
case "1", "yes", "on":
return true
default:
return false
}
}

480
svc/routes/order_mail.go Normal file
View File

@@ -0,0 +1,480 @@
package routes
import (
"bytes"
"context"
"database/sql"
"encoding/json"
"errors"
"fmt"
"net/http"
"strings"
"bssapp-backend/auth"
"bssapp-backend/internal/mailer"
)
type sendOrderMarketMailPayload struct {
OrderHeaderID string `json:"orderHeaderID"`
Operation string `json:"operation"`
DeletedItems []string `json:"deletedItems"`
UpdatedItems []string `json:"updatedItems"`
AddedItems []string `json:"addedItems"`
OldDueDate string `json:"oldDueDate"`
NewDueDate string `json:"newDueDate"`
ExtraRecipients []string `json:"extraRecipients"`
DueDateChanges []sendOrderMailDueDateChange `json:"dueDateChanges"`
}
type sendOrderMailDueDateChange struct {
ItemCode string `json:"itemCode"`
ColorCode string `json:"colorCode"`
ItemDim2Code string `json:"itemDim2Code"`
OldDueDate string `json:"oldDueDate"`
NewDueDate string `json:"newDueDate"`
}
func SendOrderMarketMailHandler(pg *sql.DB, mssql *sql.DB, ml *mailer.GraphMailer) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
if ml == nil {
http.Error(w, "mailer not initialized", http.StatusServiceUnavailable)
return
}
if pg == nil || mssql == nil {
http.Error(w, "database not initialized", http.StatusInternalServerError)
return
}
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
var payload sendOrderMarketMailPayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
orderID := strings.TrimSpace(payload.OrderHeaderID)
if orderID == "" {
http.Error(w, "orderHeaderID is required", http.StatusBadRequest)
return
}
orderNo, currAccCode, marketCode, marketTitle, err := resolveOrderMailContext(mssql, orderID)
if err != nil {
http.Error(w, "order context error: "+err.Error(), http.StatusInternalServerError)
return
}
if strings.TrimSpace(marketCode) == "" && strings.TrimSpace(marketTitle) == "" {
http.Error(w, "market not found for order/cari", http.StatusBadRequest)
return
}
recipients, err := loadMarketRecipients(pg, marketCode, marketTitle)
if err != nil {
http.Error(w, "recipient query error: "+err.Error(), http.StatusInternalServerError)
return
}
recipients = appendUniqueRecipients(recipients, payload.ExtraRecipients...)
if len(recipients) == 0 {
http.Error(w, "no active email mapping for market", http.StatusBadRequest)
return
}
pdfBytes, header, err := buildOrderPDFBytesForMail(mssql, pg, orderID)
if err != nil {
http.Error(w, "pdf build error: "+err.Error(), http.StatusInternalServerError)
return
}
number := strings.TrimSpace(orderNo)
if number == "" && header != nil {
number = strings.TrimSpace(header.OrderNumber)
}
if number == "" {
number = orderID
}
marketLabel := strings.TrimSpace(marketTitle)
if marketLabel == "" {
marketLabel = strings.TrimSpace(marketCode)
}
actor := strings.TrimSpace(claims.Username)
if actor == "" {
actor = strings.TrimSpace(claims.V3Username)
}
if actor == "" {
actor = "Bilinmeyen Kullanici"
}
op := strings.ToLower(strings.TrimSpace(payload.Operation))
isUpdate := op == "update"
subjectAction := "SİPARİŞ KAYDI OLUŞTURULDU"
if isUpdate {
subjectAction = "SİPARİŞ GÜNCELLENDİ."
}
if payload.NewDueDate != "" && payload.OldDueDate != payload.NewDueDate {
subjectAction = "SİPARİŞ TERMİNİ GÜNCELLENDİ."
}
if isUpdate && subjectAction == "SİPARİŞ GÜNCELLENDİ." {
// Satır bazlı termin kontrolü
for _, item := range payload.UpdatedItems {
if strings.Contains(item, "Termin:") {
subjectAction = "SİPARİŞ TERMİNİ GÜNCELLENDİ."
break
}
}
}
subject := fmt.Sprintf("%s kullanıcısı tarafından %s %s", actor, number, subjectAction)
cariDetail := ""
customerRep := ""
if header != nil {
cariDetail = strings.TrimSpace(header.CurrAccName)
customerRep = strings.TrimSpace(header.CustomerRep)
}
body := make([]string, 0, 12)
body = append(body,
`<p>`,
fmt.Sprintf(`<b>Cari Kodu:</b> %s<br/>`, htmlEsc(currAccCode)),
fmt.Sprintf(`<b>Cari Detay:</b> %s<br/>`, htmlEsc(cariDetail)),
fmt.Sprintf(`<b>Müşteri Temsilcisi:</b> %s<br/>`, htmlEsc(customerRep)),
fmt.Sprintf(`<b>Piyasa:</b> %s`, htmlEsc(marketLabel)),
`</p>`,
)
if payload.NewDueDate != "" && payload.OldDueDate != payload.NewDueDate {
body = append(body,
fmt.Sprintf(`<p><b>Termin Değişikliği:</b> %s &rarr; <b style="color:red">%s</b></p>`,
htmlEsc(payload.OldDueDate), htmlEsc(payload.NewDueDate)),
)
}
if isUpdate {
body = append(body,
renderItemListHTML("Silinen Ürün Kodları", payload.DeletedItems),
renderItemListHTML("Güncellenen Ürün Kodları", payload.UpdatedItems),
renderItemListHTML("Eklenen Ürün Kodları", payload.AddedItems),
)
}
body = append(body, `<p><i>Bu sipariş BaggiSS App Uygulamasından oluşturulmuştur.</i></p>`)
body = append(body, `<p>PDF ektedir.</p>`)
if dueDateTableHTML := renderDueDateChangesTableHTML("Termin DeÄŸiÅŸiklikleri", payload.DueDateChanges); dueDateTableHTML != "" {
body = append(body, dueDateTableHTML)
}
bodyHTML := strings.Join(body, "\n")
fileNo := sanitizeFileName(number)
if fileNo == "" {
fileNo = orderID
}
msg := mailer.Message{
To: recipients,
Subject: subject,
BodyHTML: bodyHTML,
Attachments: []mailer.Attachment{
{
FileName: "ORDER_" + fileNo + ".pdf",
ContentType: "application/pdf",
Data: pdfBytes,
},
},
}
if err := ml.Send(context.Background(), msg); err != nil {
http.Error(w, "mail send error: "+err.Error(), http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"orderHeaderID": orderID,
"orderNumber": number,
"marketCode": marketCode,
"marketTitle": marketTitle,
"recipients": recipients,
"sentCount": len(recipients),
})
}
}
func resolveOrderMailContext(db *sql.DB, orderID string) (orderNo, currAccCode, marketCode, marketTitle string, err error) {
row := db.QueryRow(`
SELECT TOP (1)
ISNULL(h.OrderNumber, ''),
ISNULL(h.CurrAccCode, ''),
ISNULL(LTRIM(RTRIM(f.CustomerAtt01)), '') AS MarketCode,
ISNULL(py.AttributeDescription, '') AS MarketTitle
FROM BAGGI_V3.dbo.trOrderHeader h WITH (NOLOCK)
LEFT JOIN BAGGI_V3.dbo.CustomerAttributesFilter f WITH (NOLOCK)
ON f.CurrAccCode = h.CurrAccCode
LEFT JOIN BAGGI_V3.dbo.cdCurrAccAttributeDesc py WITH (NOLOCK)
ON py.CurrAccTypeCode = h.CurrAccTypeCode
AND py.AttributeTypeCode = 1
AND py.AttributeCode = f.CustomerAtt01
AND py.LangCode = 'TR'
WHERE CAST(h.OrderHeaderID AS varchar(36)) = @p1
`, orderID)
var no, cari, pCode, pTitle string
if err = row.Scan(&no, &cari, &pCode, &pTitle); err != nil {
return "", "", "", "", err
}
no = strings.TrimSpace(no)
cari = strings.TrimSpace(cari)
pCode = strings.TrimSpace(pCode)
pTitle = strings.TrimSpace(pTitle)
return no, cari, pCode, pTitle, nil
}
func loadMarketRecipients(pg *sql.DB, marketCode, marketTitle string) ([]string, error) {
rows, err := pg.Query(`
SELECT DISTINCT TRIM(m.email) AS email
FROM mk_sales_piy p
JOIN mk_market_mail mm
ON mm.market_id = p.id
JOIN mk_mail m
ON m.id = mm.mail_id
WHERE p.is_active = true
AND m.is_active = true
AND (
UPPER(TRIM(p.code)) = UPPER(TRIM($1))
OR ($2 <> '' AND UPPER(TRIM(p.title)) = UPPER(TRIM($2)))
)
AND COALESCE(TRIM(m.email), '') <> ''
ORDER BY email
`, strings.TrimSpace(marketCode), strings.TrimSpace(marketTitle))
if err != nil {
return nil, err
}
defer rows.Close()
out := make([]string, 0, 8)
for rows.Next() {
var email string
if err := rows.Scan(&email); err != nil {
return nil, err
}
email = strings.TrimSpace(email)
if email != "" {
out = append(out, email)
}
}
if err := rows.Err(); err != nil {
return nil, err
}
return out, nil
}
func appendUniqueRecipients(base []string, extras ...string) []string {
seen := make(map[string]struct{}, len(base)+len(extras))
out := make([]string, 0, len(base)+len(extras))
for _, raw := range base {
mail := strings.ToLower(strings.TrimSpace(raw))
if mail == "" {
continue
}
if _, ok := seen[mail]; ok {
continue
}
seen[mail] = struct{}{}
out = append(out, mail)
}
for _, raw := range extras {
mail := strings.ToLower(strings.TrimSpace(raw))
if mail == "" {
continue
}
if _, ok := seen[mail]; ok {
continue
}
seen[mail] = struct{}{}
out = append(out, mail)
}
return out
}
func buildOrderPDFBytesForMail(db *sql.DB, pgDB *sql.DB, orderID string) ([]byte, *OrderHeader, error) {
header, err := getOrderHeaderFromDB(db, orderID)
if err != nil {
return nil, nil, err
}
lines, err := getOrderLinesFromDB(db, orderID)
if err != nil {
return nil, nil, err
}
hasVat := false
var vatRate float64
for _, l := range lines {
if l.VatRate.Valid && l.VatRate.Float64 > 0 {
hasVat = true
vatRate = l.VatRate.Float64
break
}
}
if pgDB == nil {
return nil, nil, errors.New("product-size-match db not initialized")
}
sizeMatchData, err := loadProductSizeMatchData(pgDB)
if err != nil {
return nil, nil, err
}
rows := normalizeOrderLinesForPdf(lines, sizeMatchData)
for i := range rows {
if strings.TrimSpace(rows[i].Category) != "" {
continue
}
bedenList := make([]string, 0, len(rows[i].SizeQty))
for s := range rows[i].SizeQty {
bedenList = append(bedenList, s)
}
rows[i].Category = detectBedenGroupGo(
sizeMatchData,
bedenList,
rows[i].GroupMain,
rows[i].GroupSub,
rows[i].YetiskinGarson,
rows[i].YetiskinGarson,
)
if strings.TrimSpace(rows[i].Category) == "" {
rows[i].Category = catTak
}
}
pdf, err := newOrderPdf()
if err != nil {
return nil, nil, err
}
renderOrderGrid(pdf, header, rows, hasVat, vatRate)
if err := pdf.Error(); err != nil {
return nil, nil, err
}
var buf bytes.Buffer
if err := pdf.Output(&buf); err != nil {
return nil, nil, err
}
return buf.Bytes(), header, nil
}
func sanitizeFileName(v string) string {
s := strings.TrimSpace(v)
if s == "" {
return ""
}
invalid := []string{`\\`, `/`, `:`, `*`, `?`, `"`, `<`, `>`, `|`}
for _, bad := range invalid {
s = strings.ReplaceAll(s, bad, "_")
}
return strings.TrimSpace(s)
}
func htmlEsc(s string) string {
r := strings.NewReplacer(
"&", "&amp;",
"<", "&lt;",
">", "&gt;",
`"`, "&quot;",
"'", "&#39;",
)
return r.Replace(s)
}
func renderItemListHTML(title string, items []string) string {
clean := make([]string, 0, len(items))
seen := make(map[string]struct{}, len(items))
for _, raw := range items {
v := strings.TrimSpace(raw)
if v == "" {
continue
}
if _, ok := seen[v]; ok {
continue
}
seen[v] = struct{}{}
clean = append(clean, v)
}
if len(clean) == 0 {
return fmt.Sprintf(`<p><b>%s:</b> Yok</p>`, htmlEsc(title))
}
b := make([]string, 0, len(clean)+3)
b = append(b, fmt.Sprintf(`<p><b>%s:</b><br/>`, htmlEsc(title)))
for _, item := range clean {
b = append(b, "- "+htmlEsc(item)+"<br/>")
}
b = append(b, `</p>`)
return strings.Join(b, "\n")
}
func renderDueDateChangesTableHTML(title string, rows []sendOrderMailDueDateChange) string {
if len(rows) == 0 {
return ""
}
seen := make(map[string]struct{}, len(rows))
clean := make([]sendOrderMailDueDateChange, 0, len(rows))
for _, row := range rows {
itemCode := strings.TrimSpace(row.ItemCode)
colorCode := strings.TrimSpace(row.ColorCode)
itemDim2Code := strings.TrimSpace(row.ItemDim2Code)
oldDueDate := strings.TrimSpace(row.OldDueDate)
newDueDate := strings.TrimSpace(row.NewDueDate)
if itemCode == "" || newDueDate == "" || oldDueDate == newDueDate {
continue
}
key := strings.ToUpper(strings.Join([]string{itemCode, colorCode, itemDim2Code, oldDueDate, newDueDate}, "|"))
if _, ok := seen[key]; ok {
continue
}
seen[key] = struct{}{}
clean = append(clean, sendOrderMailDueDateChange{
ItemCode: itemCode,
ColorCode: colorCode,
ItemDim2Code: itemDim2Code,
OldDueDate: oldDueDate,
NewDueDate: newDueDate,
})
}
if len(clean) == 0 {
return ""
}
var b strings.Builder
b.WriteString(fmt.Sprintf(`<p><b>%s:</b></p>`, htmlEsc(title)))
b.WriteString(`<table border="1" cellpadding="5" style="border-collapse: collapse; width: 100%;">`)
b.WriteString(`<tr style="background-color: #f2f2f2;"><th>Ürün Kodu</th><th>Renk</th><th>2. Renk</th><th>Eski Termin</th><th>Yeni Termin</th></tr>`)
for _, row := range clean {
b.WriteString("<tr>")
b.WriteString(fmt.Sprintf("<td>%s</td>", htmlEsc(row.ItemCode)))
b.WriteString(fmt.Sprintf("<td>%s</td>", htmlEsc(row.ColorCode)))
b.WriteString(fmt.Sprintf("<td>%s</td>", htmlEsc(row.ItemDim2Code)))
b.WriteString(fmt.Sprintf("<td>%s</td>", htmlEsc(row.OldDueDate)))
b.WriteString(fmt.Sprintf(`<td style="color:red;font-weight:bold;">%s</td>`, htmlEsc(row.NewDueDate)))
b.WriteString("</tr>")
}
b.WriteString(`</table>`)
return b.String()
}

View File

@@ -32,6 +32,7 @@ var (
// Beden kategorileri (frontend birebir)
const (
catAyk = "ayk"
catAykGar = "ayk_garson"
catYas = "yas"
catPan = "pan"
catGom = "gom"
@@ -39,14 +40,15 @@ const (
catAksbir = "aksbir"
)
var categoryOrder = []string{catAyk, catYas, catPan, catGom, catTak, catAksbir}
var categoryOrder = []string{catTak, catAyk, catAykGar, catYas, catPan, catGom, catAksbir}
var categoryTitle = map[string]string{
catTak: " TAKIM ELBİSE",
catAyk: " AYAKKABI",
catAykGar: " AYAKKABI GARSON",
catYas: " YAŞ",
catPan: " PANTOLON",
catGom: " GÖMLEK",
catTak: " TAKIM ELBİSE",
catAksbir: " AKSESUAR",
}
@@ -85,6 +87,7 @@ type OrderLineRaw struct {
LineDescription sql.NullString
UrunAnaGrubu sql.NullString
UrunAltGrubu sql.NullString
YetiskinGarson sql.NullString
IsClosed sql.NullBool
WithHoldingTaxType sql.NullString
DOVCode sql.NullString
@@ -103,6 +106,7 @@ type PdfRow struct {
Color string
GroupMain string
GroupSub string
YetiskinGarson string
Description string
Category string
SizeQty map[string]int
@@ -227,6 +231,21 @@ func safeTrimUpper(s string) string {
return strings.ToUpper(strings.TrimSpace(s))
}
func normalizeTextForMatchGo(s string) string {
replacer := strings.NewReplacer(
"ç", "c", "Ç", "C",
"ğ", "g", "Ğ", "G",
"ı", "i", "I", "I", "İ", "I",
"ö", "o", "Ö", "O",
"ş", "s", "Ş", "S",
"ü", "u", "Ü", "U",
)
out := replacer.Replace(strings.TrimSpace(s))
out = strings.ToUpper(out)
out = strings.Join(strings.Fields(out), " ")
return out
}
func f64(v sql.NullFloat64) float64 {
if !v.Valid {
return 0
@@ -258,6 +277,18 @@ func normalizeBedenLabelGo(v string) string {
// 2⃣ Uppercase
s = strings.ToUpper(s)
// Yas bedenleri: 2Y / 2YAS / 2YAŞ -> 2
for _, suf := range []string{"YAS", "YAŞ", "Y"} {
if strings.HasSuffix(s, suf) {
num := strings.TrimSpace(strings.TrimSuffix(s, suf))
if num != "" {
if _, err := strconv.Atoi(num); err == nil {
return num
}
}
}
}
/* --------------------------------------------------
🔥 AKSBİR ÖZEL (STD eş anlamlıları)
-------------------------------------------------- */
@@ -303,57 +334,256 @@ func parseNumericSize(v string) (int, bool) {
return n, true
}
func detectBedenGroupGo(bedenList []string, ana, alt string) string {
ana = safeTrimUpper(ana)
alt = safeTrimUpper(alt)
func deriveKategoriTokenGo(urunKategori, yetiskinGarson string) string {
kat := normalizeTextForMatchGo(urunKategori)
if strings.Contains(kat, "GARSON") {
return "GARSON"
}
if strings.Contains(kat, "YETISKIN") {
return "YETISKIN"
}
return ""
}
// Ürün grubu adı doğrudan ayakkabı ise öncelikli.
func normalizeRuleAltGroupGo(urunAltGrubu string) string {
return normalizeTextForMatchGo(urunAltGrubu)
}
func pickBestGroupFromCandidatesGo(groupKeys, bedenList []string, schemas map[string][]string) string {
if len(groupKeys) == 0 {
return ""
}
if len(groupKeys) == 1 {
return strings.TrimSpace(groupKeys[0])
}
normalizedBeden := make([]string, 0, len(bedenList))
for _, b := range bedenList {
n := normalizeBedenLabelGo(b)
if strings.TrimSpace(n) == "" {
n = " "
}
normalizedBeden = append(normalizedBeden, n)
}
if len(normalizedBeden) == 0 {
return strings.TrimSpace(groupKeys[0])
}
bestKey := strings.TrimSpace(groupKeys[0])
bestScore := -1
for _, key := range groupKeys {
k := strings.TrimSpace(key)
if k == "" {
continue
}
normalizedSchema := map[string]bool{}
for _, sv := range schemas[k] {
ns := normalizeBedenLabelGo(sv)
if strings.TrimSpace(ns) == "" {
ns = " "
}
normalizedSchema[ns] = true
}
score := 0
for _, b := range normalizedBeden {
if normalizedSchema[b] {
score++
}
}
if score > bestScore {
bestScore = score
bestKey = k
}
}
return bestKey
}
func resolveGroupFromProductSizeMatchRulesGo(
matchData *ProductSizeMatchResponse,
bedenList []string,
urunAnaGrubu, urunKategori, yetiskinGarson, urunAltGrubu string,
) string {
if matchData == nil || len(matchData.Rules) == 0 {
return ""
}
kategoriToken := deriveKategoriTokenGo(urunKategori, yetiskinGarson)
ana := normalizeTextForMatchGo(urunAnaGrubu)
alt := normalizeRuleAltGroupGo(urunAltGrubu)
if kategoriToken == "" || ana == "" {
return ""
}
candidateGroupKeys := make([]string, 0, 2)
seen := map[string]bool{}
for i := range matchData.Rules {
rule := &matchData.Rules[i]
if normalizeTextForMatchGo(rule.UrunAnaGrubu) != ana {
continue
}
ruleKategori := normalizeTextForMatchGo(rule.Kategori)
if ruleKategori != kategoriToken {
continue
}
ruleAlt := normalizeTextForMatchGo(rule.UrunAltGrubu)
if ruleAlt != alt {
continue
}
for _, g := range rule.GroupKeys {
key := strings.TrimSpace(g)
if key == "" || seen[key] {
continue
}
seen[key] = true
candidateGroupKeys = append(candidateGroupKeys, key)
}
}
if len(candidateGroupKeys) == 0 {
return ""
}
return pickBestGroupFromCandidatesGo(candidateGroupKeys, bedenList, matchData.Schemas)
}
func detectBedenGroupGo(
matchData *ProductSizeMatchResponse,
bedenList []string,
ana, alt, urunKategori, yetiskinGarson string,
) string {
ruleBased := resolveGroupFromProductSizeMatchRulesGo(
matchData,
bedenList,
ana,
urunKategori,
yetiskinGarson,
alt,
)
if ruleBased != "" {
return ruleBased
}
ana = normalizeTextForMatchGo(ana)
alt = normalizeTextForMatchGo(alt)
isYetiskin := strings.Contains(alt, "YETISKIN") || strings.Contains(alt, "YETISKIN/GARSON")
isGomlekKlasikOrAtayaka := strings.Contains(ana, "GOMLEK KLASIK") ||
strings.Contains(ana, "GOMLEK ATA YAKA") ||
strings.Contains(ana, "GOMLEK ATAYAKA")
// Özel kural:
// Kategorisi YETISKIN ve ana grubu GOMLEK KLASIK/ATA YAKA olanlar "gom" grubunda raporlanır.
if isYetiskin && isGomlekKlasikOrAtayaka {
return catGom
}
// Beden seti çocuk yaş formatındaysa metadata beklemeden "yas" aç.
yasNums := map[string]bool{"2": true, "4": true, "6": true, "8": true, "10": true, "12": true, "14": true}
if len(bedenList) > 0 {
allYas := true
for _, b := range bedenList {
x := normalizeBedenLabelGo(b)
if !yasNums[x] {
allYas = false
break
}
}
if allYas {
return catYas
}
}
hasGarson := strings.Contains(ana, "GARSON") || strings.Contains(alt, "GARSON") ||
strings.Contains(ana, "YETISKIN/GARSON") || strings.Contains(alt, "YETISKIN/GARSON") ||
strings.Contains(ana, "YETİSKIN/GARSON") || strings.Contains(alt, "YETİSKIN/GARSON") ||
strings.Contains(ana, "YETİŞKIN/GARSON") || strings.Contains(alt, "YETİŞKIN/GARSON") ||
strings.Contains(ana, "YETİŞKİN/GARSON") || strings.Contains(alt, "YETİŞKİN/GARSON")
// Ayakkabi kurali garsondan once uygulanmali:
// GARSON + AYAKKABI => ayk_garson, digerleri => ayk
if strings.Contains(ana, "AYAKKABI") || strings.Contains(alt, "AYAKKABI") {
if hasGarson {
return catAykGar
}
return catAyk
}
var hasYasNumeric bool
var hasAykNumeric bool
var hasPanNumeric bool
// ✅ Garson → yaş (ürün tipi fark etmeksizin)
if hasGarson {
return catYas
}
// ✅ Harfli beden → gömlek
for _, b := range bedenList {
b = safeTrimUpper(b)
switch b {
case "XS", "S", "M", "L", "XL",
"2XL", "3XL", "4XL", "5XL", "6XL", "7XL":
return catGom
}
if n, ok := parseNumericSize(b); ok {
if n >= 2 && n <= 14 {
hasYasNumeric = true
}
if n >= 39 && n <= 45 {
hasAykNumeric = true
}
if n >= 38 && n <= 68 {
hasPanNumeric = true
}
}
}
if hasAykNumeric {
// ✅ Aksesuar tespiti (aksbir)
aksesuarGruplari := []string{
"AKSESUAR", "KRAVAT", "PAPYON", "KEMER", "CORAP", "ÇORAP",
"FULAR", "MENDIL", "MENDİL", "KASKOL", "ASKI",
"YAKA", "KOL DUGMESI", "KOL DÜĞMESİ",
}
giyimGruplari := []string{"GOMLEK", "GÖMLEK", "CEKET", "PANTOLON", "MONT", "YELEK", "TAKIM", "TSHIRT", "TISORT", "TİŞÖRT"}
isAksesuar := false
for _, g := range aksesuarGruplari {
if strings.Contains(ana, g) || strings.Contains(alt, g) {
isAksesuar = true
break
}
}
if isAksesuar {
for _, g := range giyimGruplari {
if strings.Contains(ana, g) {
isAksesuar = false
break
}
}
}
if isAksesuar {
return catAksbir
}
// ✅ Pantolon özel (yetişkin)
if strings.Contains(ana, "PANTOLON") && (strings.Contains(alt, "YETISKIN") || strings.Contains(alt, "YETİŞKİN") || strings.Contains(alt, "YETİSKIN")) {
return catPan
}
// ✅ Tam numeric ve 3546 arası → ayakkabı
allNumeric := true
nums := make([]int, 0, len(bedenList))
for _, b := range bedenList {
n, ok := parseNumericSize(b)
if !ok {
allNumeric = false
break
}
nums = append(nums, n)
}
if allNumeric && len(nums) > 0 {
sort.Ints(nums)
okSeq := true
for i := 1; i < len(nums); i++ {
if nums[i]-nums[i-1] != 1 {
okSeq = false
break
}
}
if okSeq && nums[0] >= 35 && nums[len(nums)-1] <= 46 {
return catAyk
}
}
if strings.Contains(ana, "PANTOLON") {
return catPan
}
if hasPanNumeric {
return catPan
}
// ✅ Garson/çocuk alt grubu → yaş
if strings.Contains(alt, "ÇOCUK") || strings.Contains(alt, "GARSON") {
return catYas
}
if hasYasNumeric {
return catYas
}
return catTak
}
@@ -375,6 +605,8 @@ func formatSizeQtyForLog(m map[string]int) string {
}
func defaultSizeListFor(cat string) []string {
switch cat {
case catAykGar:
return []string{"22", "23", "24", "25", "26", "27", "28", "29", "30", "31", "32", "33", "34", "35", "STD"}
case catAyk:
return []string{"39", "40", "41", "42", "43", "44", "45"}
case catYas:
@@ -400,6 +632,20 @@ func contains(list []string, v string) bool {
return false
}
func formatPdfSizeLabel(cat, size string) string {
s := strings.TrimSpace(size)
if s == "" {
return s
}
if cat == catYas {
up := strings.ToUpper(s)
if _, err := strconv.Atoi(up); err == nil {
return up + "Y"
}
}
return s
}
/* ===========================================================
2) PDF OLUŞTURUCU (A4 YATAY + FOOTER)
=========================================================== */
@@ -513,6 +759,7 @@ func getOrderLinesFromDB(db *sql.DB, orderID string) ([]OrderLineRaw, error) {
L.LineDescription,
P.ProductAtt01Desc,
P.ProductAtt02Desc,
P.ProductAtt44Desc,
L.IsClosed,
L.WithHoldingTaxTypeCode,
L.DOVCode,
@@ -551,6 +798,7 @@ func getOrderLinesFromDB(db *sql.DB, orderID string) ([]OrderLineRaw, error) {
&l.LineDescription,
&l.UrunAnaGrubu,
&l.UrunAltGrubu,
&l.YetiskinGarson,
&l.IsClosed,
&l.WithHoldingTaxType,
&l.DOVCode,
@@ -570,7 +818,7 @@ func getOrderLinesFromDB(db *sql.DB, orderID string) ([]OrderLineRaw, error) {
4) NORMALIZE + CATEGORY MAP
=========================================================== */
func normalizeOrderLinesForPdf(lines []OrderLineRaw) []PdfRow {
func normalizeOrderLinesForPdf(lines []OrderLineRaw, matchData *ProductSizeMatchResponse) []PdfRow {
type comboKey struct {
Model, Color, Color2 string
}
@@ -600,6 +848,7 @@ func normalizeOrderLinesForPdf(lines []OrderLineRaw) []PdfRow {
Color: displayColor,
GroupMain: s64(raw.UrunAnaGrubu),
GroupSub: s64(raw.UrunAltGrubu),
YetiskinGarson: s64(raw.YetiskinGarson),
Description: s64(raw.LineDescription),
SizeQty: make(map[string]int),
Currency: s64(raw.DocCurrencyCode),
@@ -646,7 +895,7 @@ func normalizeOrderLinesForPdf(lines []OrderLineRaw) []PdfRow {
for s := range r.SizeQty {
sizes = append(sizes, s)
}
r.Category = detectBedenGroupGo(sizes, r.GroupMain, r.GroupSub)
r.Category = detectBedenGroupGo(matchData, sizes, r.GroupMain, r.GroupSub, r.YetiskinGarson, r.YetiskinGarson)
r.Amount = float64(r.TotalQty) * r.Price
out = append(out, *r)
}
@@ -689,6 +938,10 @@ func buildCategorySizeMap(rows []PdfRow) CategorySizeMap {
if c == "" {
c = catTak
}
// AKSESUAR başlığını sabit tut: satırlardan gelen 2,4,6... gibi ekstra bedenleri ekleme.
if c == catAksbir {
continue
}
if _, ok := cm[c]; !ok {
cm[c] = []string{}
}
@@ -710,12 +963,28 @@ func drawOrderHeader(pdf *gofpdf.Fpdf, h *OrderHeader, showDesc bool) float64 {
pageW, _ := pdf.GetPageSize()
marginL := 10.0
y := 8.0
shorten := func(s string, max int) string {
r := []rune(strings.TrimSpace(s))
if len(r) <= max {
return string(r)
}
if max < 3 {
return string(r[:max])
}
return string(r[:max-3]) + "..."
}
/* ----------------------------------------------------
1) LOGO
---------------------------------------------------- */
if logoPath, err := resolvePdfImagePath("Baggi-Tekstil-A.s-Logolu.jpeg"); err == nil {
pdf.ImageOptions(logoPath, marginL, y, 32, 0, false, gofpdf.ImageOptions{}, 0, "")
} else {
pdf.SetFont("dejavu", "B", 12)
pdf.SetTextColor(149, 113, 22)
pdf.SetXY(marginL, y+6)
pdf.CellFormat(32, 6, "BAGGI", "", 0, "L", false, 0, "")
pdf.SetTextColor(0, 0, 0)
}
/* ----------------------------------------------------
@@ -737,19 +1006,19 @@ func drawOrderHeader(pdf *gofpdf.Fpdf, h *OrderHeader, showDesc bool) float64 {
3) SAĞ TARAF BİLGİ KUTUSU
---------------------------------------------------- */
boxW := 78.0
boxH := 30.0
boxX := pageW - marginL - boxW
boxY := y - 2
pdf.SetDrawColor(180, 180, 180)
pdf.Rect(boxX, boxY, boxW, boxH, "")
pdf.SetFont("dejavu", "B", 9)
pdf.SetTextColor(149, 113, 22)
rep := strings.TrimSpace(h.CustomerRep)
if rep == "" {
rep = strings.TrimSpace(h.CreatedUser)
}
desc := strings.TrimSpace(h.Description)
if desc == "" {
desc = strings.TrimSpace(h.InternalDesc)
}
info := []string{
"Formun Basılma Tarihi: " + time.Now().Format("02.01.2006"),
@@ -759,12 +1028,24 @@ func drawOrderHeader(pdf *gofpdf.Fpdf, h *OrderHeader, showDesc bool) float64 {
"Cari Kod: " + h.CurrAccCode,
"Müşteri: " + h.CurrAccName,
}
if desc != "" {
info = append(info, "Sipariş Açıklaması: "+shorten(desc, 52))
}
lineH := 4.5
boxH := float64(len(info))*lineH + 3
if boxH < 30 {
boxH = 30
}
pdf.SetDrawColor(180, 180, 180)
pdf.Rect(boxX, boxY, boxW, boxH, "")
iy := boxY + 3
for _, line := range info {
pdf.SetXY(boxX+3, iy)
pdf.CellFormat(boxW-6, 4.5, line, "", 0, "L", false, 0, "")
iy += 4.5
pdf.CellFormat(boxW-6, lineH, line, "", 0, "L", false, 0, "")
iy += lineH
}
/* ----------------------------------------------------
@@ -780,10 +1061,6 @@ func drawOrderHeader(pdf *gofpdf.Fpdf, h *OrderHeader, showDesc bool) float64 {
/* ----------------------------------------------------
5) AÇIKLAMA (Varsa)
---------------------------------------------------- */
desc := strings.TrimSpace(h.Description)
if desc == "" {
desc = strings.TrimSpace(h.InternalDesc)
}
if showDesc && desc != "" {
text := desc
@@ -919,8 +1196,9 @@ func drawGridHeader(pdf *gofpdf.Fpdf, layout pdfLayout, startY float64, catSizes
for i := 0; i < 16; i++ {
pdf.Rect(xx, cy, colW, layout.HeaderSizeH, "")
if i < len(sizes) {
label := formatPdfSizeLabel(cat, sizes[i])
pdf.SetXY(xx, cy+1)
pdf.CellFormat(colW, layout.HeaderSizeH-2, sizes[i], "", 0, "C", false, 0, "")
pdf.CellFormat(colW, layout.HeaderSizeH-2, label, "", 0, "C", false, 0, "")
}
xx += colW
}
@@ -1316,6 +1594,17 @@ func renderOrderGrid(pdf *gofpdf.Fpdf, header *OrderHeader, rows []PdfRow, hasVa
layout := newPdfLayout(pdf)
catSizes := buildCategorySizeMap(rows)
normalizeYetiskinGarsonTokenGo := func(v string) string {
s := strings.ToUpper(strings.TrimSpace(v))
if strings.Contains(s, "GARSON") {
return "GARSON"
}
if strings.Contains(s, "YETISKIN") || strings.Contains(s, "YETİSKİN") {
return "YETISKIN"
}
return "GENEL"
}
// Grup: ÜRÜN ANA GRUBU
type group struct {
Name string
@@ -1328,15 +1617,24 @@ func renderOrderGrid(pdf *gofpdf.Fpdf, header *OrderHeader, rows []PdfRow, hasVa
var order []string
for _, r := range rows {
name := strings.TrimSpace(r.GroupMain)
if name == "" {
name = "GENEL"
ana := strings.TrimSpace(r.GroupMain)
if ana == "" {
ana = "GENEL"
}
g, ok := groups[name]
ana = strings.ToUpper(ana)
yg := normalizeYetiskinGarsonTokenGo(r.YetiskinGarson) // fallback
kategori := strings.Join(strings.Fields(strings.TrimSpace(r.YetiskinGarson)), " ")
if kategori == "" {
kategori = yg
}
name := strings.TrimSpace(fmt.Sprintf("%s %s", kategori, ana))
groupKey := fmt.Sprintf("%s::%s", kategori, ana)
g, ok := groups[groupKey]
if !ok {
g = &group{Name: name}
groups[name] = g
order = append(order, name)
groups[groupKey] = g
order = append(order, groupKey)
}
g.Rows = append(g.Rows, r)
g.Adet += r.TotalQty
@@ -1392,8 +1690,8 @@ func renderOrderGrid(pdf *gofpdf.Fpdf, header *OrderHeader, rows []PdfRow, hasVa
newPage(firstPage, true)
firstPage = false
for _, name := range order {
g := groups[name]
for _, key := range order {
g := groups[key]
for _, row := range g.Rows {
rh := calcRowHeight(pdf, layout, row)
@@ -1421,7 +1719,7 @@ func renderOrderGrid(pdf *gofpdf.Fpdf, header *OrderHeader, rows []PdfRow, hasVa
HTTP HANDLER → /api/order/pdf/{id}
=========================================================== */
func OrderPDFHandler(db *sql.DB) http.Handler {
func OrderPDFHandler(db *sql.DB, pgDB *sql.DB) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
orderID := mux.Vars(r)["id"]
@@ -1478,7 +1776,46 @@ func OrderPDFHandler(db *sql.DB) http.Handler {
}
// Normalize
rows := normalizeOrderLinesForPdf(lines)
var sizeMatchData *ProductSizeMatchResponse
if pgDB == nil {
http.Error(w, "product-size-match db not initialized", http.StatusInternalServerError)
return
}
if m, err := loadProductSizeMatchData(pgDB); err != nil {
log.Printf("❌ OrderPDF product-size-match load failed orderID=%s: %v", orderID, err)
http.Error(w, "product-size-match load failed: "+err.Error(), http.StatusInternalServerError)
return
} else {
sizeMatchData = m
}
rows := normalizeOrderLinesForPdf(lines, sizeMatchData)
unmapped := make([]string, 0)
for i := range rows {
if strings.TrimSpace(rows[i].Category) != "" {
continue
}
bedenList := make([]string, 0, len(rows[i].SizeQty))
for s := range rows[i].SizeQty {
bedenList = append(bedenList, s)
}
rows[i].Category = detectBedenGroupGo(
sizeMatchData,
bedenList,
rows[i].GroupMain,
rows[i].GroupSub,
rows[i].YetiskinGarson,
rows[i].YetiskinGarson,
)
if strings.TrimSpace(rows[i].Category) == "" {
rows[i].Category = catTak
}
if strings.TrimSpace(rows[i].Category) == "" {
unmapped = append(unmapped, fmt.Sprintf("%s/%s/%s", rows[i].Model, rows[i].GroupMain, rows[i].GroupSub))
}
}
if len(unmapped) > 0 {
log.Printf("⚠️ OrderPDF unmapped rows fallback failed orderID=%s rows=%v", orderID, unmapped)
}
log.Printf("📄 OrderPDF normalized rows orderID=%s rowCount=%d", orderID, len(rows))
for i, rr := range rows {
if i >= 30 {

View File

@@ -5,8 +5,10 @@ import (
"bssapp-backend/queries"
"context"
"encoding/json"
"fmt"
"log"
"net/http"
"sort"
"time"
)
@@ -79,6 +81,16 @@ func GetOrderInventoryHandler(w http.ResponseWriter, r *http.Request) {
return
}
// Debug: beden/adet özetini tek satırda yazdır (saha doğrulaması için)
if len(list) > 0 {
keys := make([]string, 0, len(list))
for _, it := range list {
keys = append(keys, fmt.Sprintf("%s:%g", it.Beden, it.KullanilabilirAdet))
}
sort.Strings(keys)
log.Printf("🔎 [ORDERINV] beden/qty -> %s", keys)
}
log.Printf("✅ [ORDERINV] %s / %s / %s -> %d kayıt döndü", code, color, color2, len(list))
w.Header().Set("Content-Type", "application/json; charset=utf-8")

View File

@@ -68,27 +68,28 @@ func OrderListRoute(mssql *sql.DB) http.Handler {
&o.OrderHeaderID, // 1
&o.OrderNumber, // 2
&o.OrderDate, // 3
&o.TerminTarihi, // 4
&o.CurrAccCode, // 4
&o.CurrAccDescription, // 5
&o.CurrAccCode, // 5
&o.CurrAccDescription, // 6
&o.MusteriTemsilcisi, // 6
&o.Piyasa, // 7
&o.MusteriTemsilcisi, // 7
&o.Piyasa, // 8
&o.CreditableConfirmedDate, // 8
&o.DocCurrencyCode, // 9
&o.CreditableConfirmedDate, // 9
&o.DocCurrencyCode, // 10
&o.TotalAmount, // 10
&o.TotalAmountUSD, // 11
&o.PackedAmount, // 12
&o.PackedUSD, // 13
&o.PackedRatePct, // 14
&o.TotalAmount, // 11
&o.TotalAmountUSD, // 12
&o.PackedAmount, // 13
&o.PackedUSD, // 14
&o.PackedRatePct, // 15
&o.IsCreditableConfirmed, // 15
&o.HasUretimUrunu, // 16
&o.Description, // 17
&o.IsCreditableConfirmed, // 16
&o.HasUretimUrunu, // 17
&o.Description, // 18
&o.ExchangeRateUSD, // 18
&o.ExchangeRateUSD, // 19
)
if err != nil {

View File

@@ -2,18 +2,28 @@ package routes
import (
"bssapp-backend/auth"
"bssapp-backend/internal/mailer"
"bssapp-backend/models"
"bssapp-backend/queries"
"context"
"database/sql"
"encoding/json"
"errors"
"fmt"
"log"
"net/http"
"regexp"
"strings"
"time"
"github.com/gorilla/mux"
mssql "github.com/microsoft/go-mssqldb"
)
var baggiModelCodeRegex = regexp.MustCompile(`^[A-Z][0-9]{3}-[A-Z]{3}[0-9]{5}$`)
const productionBarcodeTypeCode = "BAGGI3"
// ======================================================
// 📌 OrderProductionItemsRoute — U ürün satırları
// ======================================================
@@ -48,12 +58,16 @@ func OrderProductionItemsRoute(mssql *sql.DB) http.Handler {
&o.OldDim3,
&o.OldItemCode,
&o.OldColor,
&o.OldColorDescription,
&o.OldDim2,
&o.OldDesc,
&o.OldQty,
&o.NewItemCode,
&o.NewColor,
&o.NewDim2,
&o.NewDesc,
&o.OldDueDate,
&o.NewDueDate,
&o.IsVariantMissing,
); err != nil {
log.Printf("⚠️ SCAN HATASI: %v", err)
@@ -72,6 +86,33 @@ func OrderProductionItemsRoute(mssql *sql.DB) http.Handler {
})
}
func OrderProductionCdItemLookupsRoute(mssql *sql.DB) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
rid := fmt.Sprintf("opl-%d", time.Now().UnixNano())
w.Header().Set("X-Debug-Request-Id", rid)
log.Printf("[OrderProductionCdItemLookupsRoute] rid=%s started", rid)
lookups, err := queries.GetOrderProductionLookupOptions(mssql)
if err != nil {
log.Printf("[OrderProductionCdItemLookupsRoute] rid=%s lookup error: %v", rid, err)
w.WriteHeader(http.StatusInternalServerError)
_ = json.NewEncoder(w).Encode(map[string]any{
"message": "Veritabani hatasi",
"step": "cditem-lookups",
"detail": err.Error(),
"requestId": rid,
})
return
}
log.Printf("[OrderProductionCdItemLookupsRoute] rid=%s success", rid)
if err := json.NewEncoder(w).Encode(lookups); err != nil {
log.Printf("[OrderProductionCdItemLookupsRoute] rid=%s encode error: %v", rid, err)
}
})
}
// ======================================================
// 📌 OrderProductionInsertMissingRoute — eksik varyantları ekler
// ======================================================
@@ -117,6 +158,9 @@ func OrderProductionInsertMissingRoute(mssql *sql.DB) http.Handler {
func OrderProductionValidateRoute(mssql *sql.DB) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
rid := fmt.Sprintf("opv-%d", time.Now().UnixNano())
w.Header().Set("X-Debug-Request-Id", rid)
start := time.Now()
id := mux.Vars(r)["id"]
if id == "" {
@@ -133,17 +177,42 @@ func OrderProductionValidateRoute(mssql *sql.DB) http.Handler {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s payload lineCount=%d insertMissing=%t cdItemCount=%d attributeCount=%d",
rid, id, len(payload.Lines), payload.InsertMissing, len(payload.CdItems), len(payload.ProductAttributes))
missing, err := buildMissingVariants(mssql, id, payload.Lines)
newLines, existingLines := splitLinesByCdItemDraft(payload.Lines, payload.CdItems)
newCodes := uniqueCodesFromLines(newLines)
existingCodes := uniqueCodesFromLines(existingLines)
missing := make([]models.OrderProductionMissingVariant, 0)
targets := make([]models.OrderProductionMissingVariant, 0)
stepStart := time.Now()
if len(newLines) > 0 {
err := runWithTransientMSSQLRetry("validate_build_targets_missing", 3, 500*time.Millisecond, func() error {
var stepErr error
targets, stepErr = buildTargetVariants(mssql, id, newLines)
if stepErr != nil {
return stepErr
}
missing, stepErr = buildMissingVariantsFromTargets(mssql, id, targets)
return stepErr
})
if err != nil {
log.Printf("❌ validate error: %v", err)
http.Error(w, "Veritabani hatasi", http.StatusInternalServerError)
log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s step=build_missing failed duration_ms=%d err=%v",
rid, id, time.Since(stepStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "validate_missing_variants", id, "", len(newLines), err)
return
}
}
log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s lineCount=%d newLineCount=%d existingLineCount=%d targetVariantCount=%d missingCount=%d build_missing_ms=%d total_ms=%d",
rid, id, len(payload.Lines), len(newLines), len(existingLines), len(targets), len(missing), time.Since(stepStart).Milliseconds(), time.Since(start).Milliseconds())
log.Printf("[OrderProductionValidateRoute] rid=%s orderHeaderID=%s scope newCodes=%v existingCodes=%v",
rid, id, newCodes, existingCodes)
resp := map[string]any{
"missingCount": len(missing),
"missing": missing,
"barcodeValidationCount": 0,
"barcodeValidations": []models.OrderProductionBarcodeValidation{},
}
if err := json.NewEncoder(w).Encode(resp); err != nil {
log.Printf("❌ encode error: %v", err)
@@ -154,9 +223,12 @@ func OrderProductionValidateRoute(mssql *sql.DB) http.Handler {
// ======================================================
// OrderProductionApplyRoute - yeni model varyant guncelleme
// ======================================================
func OrderProductionApplyRoute(mssql *sql.DB) http.Handler {
func OrderProductionApplyRoute(mssql *sql.DB, ml *mailer.GraphMailer) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
rid := fmt.Sprintf("opa-%d", time.Now().UnixNano())
w.Header().Set("X-Debug-Request-Id", rid)
start := time.Now()
id := mux.Vars(r)["id"]
if id == "" {
@@ -173,15 +245,63 @@ func OrderProductionApplyRoute(mssql *sql.DB) http.Handler {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
missing, err := buildMissingVariants(mssql, id, payload.Lines)
if err != nil {
log.Printf("❌ apply validate error: %v", err)
http.Error(w, "Veritabani hatasi", http.StatusInternalServerError)
return
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s payload lineCount=%d insertMissing=%t cdItemCount=%d attributeCount=%d",
rid, id, len(payload.Lines), payload.InsertMissing, len(payload.CdItems), len(payload.ProductAttributes))
if len(payload.Lines) > 0 {
limit := 5
if len(payload.Lines) < limit {
limit = len(payload.Lines)
}
samples := make([]string, 0, limit)
for i := 0; i < limit; i++ {
ln := payload.Lines[i]
dim1 := ""
if ln.ItemDim1Code != nil {
dim1 = strings.TrimSpace(*ln.ItemDim1Code)
}
samples = append(samples, fmt.Sprintf(
"lineID=%s newItem=%s newColor=%s newDim1=%s newDim2=%s",
strings.TrimSpace(ln.OrderLineID),
strings.ToUpper(strings.TrimSpace(ln.NewItemCode)),
strings.ToUpper(strings.TrimSpace(ln.NewColor)),
strings.ToUpper(strings.TrimSpace(dim1)),
strings.ToUpper(strings.TrimSpace(ln.NewDim2)),
))
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s payload lineSamples=%v", rid, id, samples)
}
newLines, existingLines := splitLinesByCdItemDraft(payload.Lines, payload.CdItems)
newCodes := uniqueCodesFromLines(newLines)
existingCodes := uniqueCodesFromLines(existingLines)
stepMissingStart := time.Now()
missing := make([]models.OrderProductionMissingVariant, 0)
barcodeTargets := make([]models.OrderProductionMissingVariant, 0)
if len(newLines) > 0 {
err := runWithTransientMSSQLRetry("apply_build_targets_missing", 3, 500*time.Millisecond, func() error {
var stepErr error
barcodeTargets, stepErr = buildTargetVariants(mssql, id, newLines)
if stepErr != nil {
return stepErr
}
missing, stepErr = buildMissingVariantsFromTargets(mssql, id, barcodeTargets)
return stepErr
})
if err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=build_missing failed duration_ms=%d err=%v",
rid, id, time.Since(stepMissingStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "apply_validate_missing_variants", id, "", len(newLines), err)
return
}
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s lineCount=%d newLineCount=%d existingLineCount=%d targetVariantCount=%d missingCount=%d build_missing_ms=%d",
rid, id, len(payload.Lines), len(newLines), len(existingLines), len(barcodeTargets), len(missing), time.Since(stepMissingStart).Milliseconds())
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s scope newCodes=%v existingCodes=%v",
rid, id, newCodes, existingCodes)
if len(missing) > 0 && !payload.InsertMissing {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s early_exit=missing_variants total_ms=%d",
rid, id, time.Since(start).Milliseconds())
w.WriteHeader(http.StatusConflict)
_ = json.NewEncoder(w).Encode(map[string]any{
"missingCount": len(missing),
@@ -200,72 +320,337 @@ func OrderProductionApplyRoute(mssql *sql.DB) http.Handler {
username = "system"
}
stepBeginStart := time.Now()
tx, err := mssql.Begin()
if err != nil {
http.Error(w, "Veritabani hatasi", http.StatusInternalServerError)
writeDBError(w, http.StatusInternalServerError, "begin_tx", id, username, len(payload.Lines), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=begin_tx duration_ms=%d", rid, id, time.Since(stepBeginStart).Milliseconds())
committed := false
currentStep := "begin_tx"
applyTxSettings := func(tx *sql.Tx) error {
// XACT_ABORT OFF:
// Barcode insert path intentionally tolerates duplicate-key errors (fallback/skip duplicate).
// With XACT_ABORT ON, that expected error aborts the whole transaction and causes COMMIT 3902.
_, execErr := tx.Exec(`SET XACT_ABORT OFF; SET LOCK_TIMEOUT 15000;`)
return execErr
}
defer func() {
if committed {
return
}
rbStart := time.Now()
if rbErr := tx.Rollback(); rbErr != nil && rbErr != sql.ErrTxDone {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s rollback step=%s failed duration_ms=%d err=%v",
rid, id, currentStep, time.Since(rbStart).Milliseconds(), rbErr)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s rollback step=%s ok duration_ms=%d",
rid, id, currentStep, time.Since(rbStart).Milliseconds())
}()
stepTxSettingsStart := time.Now()
currentStep = "tx_settings"
if err := applyTxSettings(tx); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_settings", id, username, len(payload.Lines), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=tx_settings duration_ms=%d", rid, id, time.Since(stepTxSettingsStart).Milliseconds())
if err := ensureTxAlive(tx, "after_tx_settings"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_tx_settings", id, username, len(payload.Lines), err)
return
}
defer tx.Rollback()
var inserted int64
if payload.InsertMissing {
inserted, err = queries.InsertMissingVariantsTx(tx, missing, username)
if payload.InsertMissing && len(newLines) > 0 {
currentStep = "insert_missing_variants"
cdItemByCode := buildCdItemDraftMap(payload.CdItems)
stepInsertMissingStart := time.Now()
inserted, err = queries.InsertMissingVariantsTx(tx, missing, username, cdItemByCode)
if err != nil && isTransientMSSQLNetworkErr(err) {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=insert_missing transient_error retry=1 err=%v",
rid, id, err)
_ = tx.Rollback()
tx, err = mssql.Begin()
if err != nil {
log.Printf("❌ insert missing error: %v", err)
http.Error(w, "Veritabani hatasi", http.StatusInternalServerError)
writeDBError(w, http.StatusInternalServerError, "begin_tx_retry_insert_missing", id, username, len(payload.Lines), err)
return
}
currentStep = "tx_settings_retry_insert_missing"
if err = applyTxSettings(tx); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_settings_retry_insert_missing", id, username, len(payload.Lines), err)
return
}
if err = ensureTxAlive(tx, "after_tx_settings_retry_insert_missing"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_tx_settings_retry_insert_missing", id, username, len(payload.Lines), err)
return
}
currentStep = "insert_missing_variants_retry"
inserted, err = queries.InsertMissingVariantsTx(tx, missing, username, cdItemByCode)
}
if err != nil {
writeDBError(w, http.StatusInternalServerError, "insert_missing_variants", id, username, len(missing), err)
return
}
if err := ensureTxAlive(tx, "after_insert_missing_variants"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_insert_missing_variants", id, username, len(missing), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=insert_missing inserted=%d duration_ms=%d",
rid, id, inserted, time.Since(stepInsertMissingStart).Milliseconds())
}
stepValidateAttrStart := time.Now()
currentStep = "validate_attributes"
if err := validateProductAttributes(payload.ProductAttributes); err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=validate_attributes count=%d duration_ms=%d",
rid, id, len(payload.ProductAttributes), time.Since(stepValidateAttrStart).Milliseconds())
stepUpsertAttrStart := time.Now()
currentStep = "upsert_item_attributes"
attributeAffected, err := queries.UpsertItemAttributesTx(tx, payload.ProductAttributes, username)
if err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_attributes failed duration_ms=%d err=%v",
rid, id, time.Since(stepUpsertAttrStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "upsert_item_attributes", id, username, len(payload.ProductAttributes), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_attributes affected=%d duration_ms=%d",
rid, id, attributeAffected, time.Since(stepUpsertAttrStart).Milliseconds())
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=prItemAttribute inputRows=%d affectedRows=%d",
rid, id, len(payload.ProductAttributes), attributeAffected)
if err := ensureTxAlive(tx, "after_upsert_item_attributes"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_upsert_item_attributes", id, username, len(payload.ProductAttributes), err)
return
}
var barcodeInserted int64
// Barkod adimi:
// - Eski kodlara girmemeli
// - Yeni kod satirlari icin, varyant daha once olusmus olsa bile eksik barkod varsa tamamlamali
// Bu nedenle "inserted > 0" yerine "newLineCount > 0" kosulu kullanilir.
if len(newLines) > 0 && len(barcodeTargets) > 0 {
stepUpsertBarcodeStart := time.Now()
currentStep = "upsert_item_barcodes"
barcodeInserted, err = queries.InsertItemBarcodesByTargetsTx(tx, barcodeTargets, username)
if err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_barcodes failed duration_ms=%d err=%v",
rid, id, time.Since(stepUpsertBarcodeStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "upsert_item_barcodes", id, username, len(barcodeTargets), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_barcodes inserted=%d duration_ms=%d",
rid, id, barcodeInserted, time.Since(stepUpsertBarcodeStart).Milliseconds())
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=prItemBarcode targetVariantRows=%d insertedRows=%d",
rid, id, len(barcodeTargets), barcodeInserted)
if err := ensureTxAlive(tx, "after_upsert_item_barcodes"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_upsert_item_barcodes", id, username, len(barcodeTargets), err)
return
}
} else {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=upsert_barcodes skipped newLineCount=%d targetVariantRows=%d",
rid, id, len(newLines), len(barcodeTargets))
}
stepUpdateHeaderStart := time.Now()
currentStep = "update_order_header_average_due_date"
if err := queries.UpdateOrderHeaderAverageDueDateTx(tx, id, payload.HeaderAverageDueDate, username); err != nil {
writeDBError(w, http.StatusInternalServerError, "update_order_header_average_due_date", id, username, 0, err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=update_header_average_due_date changed=%t duration_ms=%d",
rid, id, payload.HeaderAverageDueDate != nil, time.Since(stepUpdateHeaderStart).Milliseconds())
if err := ensureTxAlive(tx, "after_update_order_header_average_due_date"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_update_order_header_average_due_date", id, username, 0, err)
return
}
currentStep = "touch_order_header"
headerTouched, err := queries.TouchOrderHeaderTx(tx, id, username)
if err != nil {
writeDBError(w, http.StatusInternalServerError, "touch_order_header", id, username, len(payload.Lines), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=trOrderHeader touchedRows=%d",
rid, id, headerTouched)
if err := ensureTxAlive(tx, "after_touch_order_header"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_touch_order_header", id, username, len(payload.Lines), err)
return
}
stepUpdateLinesStart := time.Now()
currentStep = "update_order_lines"
updated, err := queries.UpdateOrderLinesTx(tx, id, payload.Lines, username)
if err != nil {
log.Printf("❌ update order lines error: %v", err)
http.Error(w, "Veritabani hatasi", http.StatusInternalServerError)
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=update_lines failed duration_ms=%d err=%v",
rid, id, time.Since(stepUpdateLinesStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "update_order_lines", id, username, len(payload.Lines), err)
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=update_lines updated=%d duration_ms=%d",
rid, id, updated, time.Since(stepUpdateLinesStart).Milliseconds())
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=trOrderLine targetRows=%d updatedRows=%d",
rid, id, len(payload.Lines), updated)
if err := ensureTxAlive(tx, "after_update_order_lines"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_after_update_order_lines", id, username, len(payload.Lines), err)
return
}
if err := tx.Commit(); err != nil {
log.Printf("❌ commit error: %v", err)
http.Error(w, "Veritabani hatasi", http.StatusInternalServerError)
currentStep = "verify_order_lines"
verifyMismatchCount, verifySamples, verifyErr := queries.VerifyOrderLineUpdatesTx(tx, id, payload.Lines)
if verifyErr != nil {
writeDBError(w, http.StatusInternalServerError, "verify_order_lines", id, username, len(payload.Lines), verifyErr)
return
}
if verifyMismatchCount > 0 {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=trOrderLine verifyMismatchCount=%d samples=%v",
rid, id, verifyMismatchCount, verifySamples)
currentStep = "verify_order_lines_mismatch"
w.WriteHeader(http.StatusInternalServerError)
_ = json.NewEncoder(w).Encode(map[string]any{
"message": "Order satirlari beklenen kod/renk degerlerine guncellenemedi",
"step": "verify_order_lines_mismatch",
"detail": fmt.Sprintf("mismatchCount=%d", verifyMismatchCount),
"samples": verifySamples,
})
return
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s table=trOrderLine verifyMismatchCount=0",
rid, id)
if err := ensureTxAlive(tx, "before_commit_tx"); err != nil {
writeDBError(w, http.StatusInternalServerError, "tx_not_active_before_commit_tx", id, username, len(payload.Lines), err)
return
}
stepCommitStart := time.Now()
currentStep = "commit_tx"
if err := tx.Commit(); err != nil {
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=commit failed duration_ms=%d err=%v",
rid, id, time.Since(stepCommitStart).Milliseconds(), err)
writeDBError(w, http.StatusInternalServerError, "commit_tx", id, username, len(payload.Lines), err)
return
}
committed = true
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s step=commit duration_ms=%d total_ms=%d",
rid, id, time.Since(stepCommitStart).Milliseconds(), time.Since(start).Milliseconds())
// Mail gönderim mantığı
if false && ml != nil {
go func() {
defer func() {
if r := recover(); r != nil {
log.Printf("[OrderProductionApplyRoute] mail panic recover: %v", r)
}
}()
sendProductionUpdateMails(mssql, ml, id, username, payload.Lines)
}()
}
resp := map[string]any{
"updated": updated,
"inserted": inserted,
"barcodeInserted": barcodeInserted,
"attributeUpserted": attributeAffected,
"headerUpdated": payload.HeaderAverageDueDate != nil,
}
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s result updated=%d inserted=%d barcodeInserted=%d attributeUpserted=%d",
rid, id, updated, inserted, barcodeInserted, attributeAffected)
log.Printf("[OrderProductionApplyRoute] rid=%s orderHeaderID=%s summary tables cdItem/prItemVariant(newOnly)=%d trOrderLine(updated)=%d prItemBarcode(inserted,newOnly)=%d prItemAttribute(affected)=%d trOrderHeader(touched)=%d",
rid, id, inserted, updated, barcodeInserted, attributeAffected, headerTouched)
if err := json.NewEncoder(w).Encode(resp); err != nil {
log.Printf("❌ encode error: %v", err)
}
})
}
func buildMissingVariants(mssql *sql.DB, orderHeaderID string, lines []models.OrderProductionUpdateLine) ([]models.OrderProductionMissingVariant, error) {
missing := make([]models.OrderProductionMissingVariant, 0)
func validateProductAttributes(attrs []models.OrderProductionItemAttributeRow) error {
for _, a := range attrs {
if strings.TrimSpace(a.ItemCode) == "" {
return errors.New("Urun ozellikleri icin ItemCode zorunlu")
}
if !baggiModelCodeRegex.MatchString(strings.ToUpper(strings.TrimSpace(a.ItemCode))) {
return errors.New("Girdiginiz kod BAGGI kod sistemine uyumlu degil. Format: X999-XXX99999")
}
if a.ItemTypeCode <= 0 {
return errors.New("Urun ozellikleri icin ItemTypeCode zorunlu")
}
if a.AttributeTypeCode <= 0 {
return errors.New("Urun ozellikleri icin AttributeTypeCode zorunlu")
}
if strings.TrimSpace(a.AttributeCode) == "" {
return errors.New("Urun ozellikleri icin AttributeCode zorunlu")
}
}
return nil
}
func buildCdItemDraftMap(list []models.OrderProductionCdItemDraft) map[string]models.OrderProductionCdItemDraft {
out := make(map[string]models.OrderProductionCdItemDraft, len(list))
for _, item := range list {
code := strings.ToUpper(strings.TrimSpace(item.ItemCode))
if code == "" {
continue
}
item.ItemCode = code
if item.ItemTypeCode == 0 {
item.ItemTypeCode = 1
}
key := queries.NormalizeCdItemMapKey(item.ItemTypeCode, item.ItemCode)
out[key] = item
}
return out
}
func isNoCorrespondingBeginTxErr(err error) bool {
if err == nil {
return false
}
msg := strings.ToLower(strings.TrimSpace(err.Error()))
return strings.Contains(msg, "commit transaction request has no corresponding begin transaction")
}
func buildTargetVariants(mssql *sql.DB, orderHeaderID string, lines []models.OrderProductionUpdateLine) ([]models.OrderProductionMissingVariant, error) {
start := time.Now()
lineDimsMap, err := queries.GetOrderLineDimsMap(mssql, orderHeaderID)
if err != nil {
return nil, err
}
out := make([]models.OrderProductionMissingVariant, 0, len(lines))
seen := make(map[string]struct{}, len(lines))
for _, line := range lines {
lineID := strings.TrimSpace(line.OrderLineID)
newItem := strings.TrimSpace(line.NewItemCode)
newColor := strings.TrimSpace(line.NewColor)
newDim2 := strings.TrimSpace(line.NewDim2)
if lineID == "" || newItem == "" || newColor == "" {
newItem := strings.ToUpper(strings.TrimSpace(line.NewItemCode))
newColor := strings.ToUpper(strings.TrimSpace(line.NewColor))
newDim2 := strings.ToUpper(strings.TrimSpace(line.NewDim2))
if lineID == "" || newItem == "" {
continue
}
itemTypeCode, dim1, _, dim3, err := queries.GetOrderLineDims(mssql, orderHeaderID, lineID)
if err != nil {
return nil, err
dims, ok := lineDimsMap[lineID]
if !ok {
continue
}
exists, err := queries.VariantExists(mssql, itemTypeCode, newItem, newColor, dim1, newDim2, dim3)
if err != nil {
return nil, err
dim1 := strings.ToUpper(strings.TrimSpace(dims.ItemDim1Code))
if line.ItemDim1Code != nil {
dim1 = strings.ToUpper(strings.TrimSpace(*line.ItemDim1Code))
}
if !exists {
missing = append(missing, models.OrderProductionMissingVariant{
dim3 := strings.ToUpper(strings.TrimSpace(dims.ItemDim3Code))
key := fmt.Sprintf("%d|%s|%s|%s|%s|%s", dims.ItemTypeCode, newItem, newColor, dim1, newDim2, dim3)
if _, ok := seen[key]; ok {
continue
}
seen[key] = struct{}{}
out = append(out, models.OrderProductionMissingVariant{
OrderLineID: lineID,
ItemTypeCode: itemTypeCode,
ItemTypeCode: dims.ItemTypeCode,
ItemCode: newItem,
ColorCode: newColor,
ItemDim1Code: dim1,
@@ -273,22 +658,260 @@ func buildMissingVariants(mssql *sql.DB, orderHeaderID string, lines []models.Or
ItemDim3Code: dim3,
})
}
log.Printf("[buildTargetVariants] orderHeaderID=%s lineCount=%d dimMapCount=%d targetCount=%d total_ms=%d",
orderHeaderID, len(lines), len(lineDimsMap), len(out), time.Since(start).Milliseconds())
return out, nil
}
func buildMissingVariants(mssql *sql.DB, orderHeaderID string, lines []models.OrderProductionUpdateLine) ([]models.OrderProductionMissingVariant, error) {
targets, err := buildTargetVariants(mssql, orderHeaderID, lines)
if err != nil {
return nil, err
}
return buildMissingVariantsFromTargets(mssql, orderHeaderID, targets)
}
func buildMissingVariantsFromTargets(mssql *sql.DB, orderHeaderID string, targets []models.OrderProductionMissingVariant) ([]models.OrderProductionMissingVariant, error) {
start := time.Now()
missing := make([]models.OrderProductionMissingVariant, 0, len(targets))
existsCache := make(map[string]bool, len(targets))
for _, target := range targets {
cacheKey := fmt.Sprintf("%d|%s|%s|%s|%s|%s",
target.ItemTypeCode,
target.ItemCode,
target.ColorCode,
target.ItemDim1Code,
target.ItemDim2Code,
target.ItemDim3Code,
)
exists, cached := existsCache[cacheKey]
if !cached {
var checkErr error
exists, checkErr = queries.VariantExists(mssql, target.ItemTypeCode, target.ItemCode, target.ColorCode, target.ItemDim1Code, target.ItemDim2Code, target.ItemDim3Code)
if checkErr != nil {
return nil, checkErr
}
existsCache[cacheKey] = exists
}
if !exists {
missing = append(missing, target)
}
}
log.Printf("[buildMissingVariants] orderHeaderID=%s targetCount=%d missingCount=%d total_ms=%d",
orderHeaderID, len(targets), len(missing), time.Since(start).Milliseconds())
return missing, nil
}
func runWithTransientMSSQLRetry(op string, maxAttempts int, baseDelay time.Duration, fn func() error) error {
if maxAttempts <= 1 {
return fn()
}
var lastErr error
for attempt := 1; attempt <= maxAttempts; attempt++ {
err := fn()
if err == nil {
return nil
}
lastErr = err
if !isTransientMSSQLNetworkErr(err) || attempt == maxAttempts {
return err
}
wait := time.Duration(attempt) * baseDelay
log.Printf("[MSSQLRetry] op=%s attempt=%d/%d wait_ms=%d err=%v",
op, attempt, maxAttempts, wait.Milliseconds(), err)
time.Sleep(wait)
}
return lastErr
}
func isTransientMSSQLNetworkErr(err error) bool {
if err == nil {
return false
}
msg := strings.ToLower(strings.TrimSpace(err.Error()))
needles := []string{
"wsarecv",
"read tcp",
"connection reset",
"connection refused",
"broken pipe",
"i/o timeout",
"timeout",
}
for _, needle := range needles {
if strings.Contains(msg, needle) {
return true
}
}
return false
}
func ensureTxAlive(tx *sql.Tx, where string) error {
if tx == nil {
return fmt.Errorf("tx is nil at %s", where)
}
var tranCount int
if err := tx.QueryRow(`SELECT @@TRANCOUNT`).Scan(&tranCount); err != nil {
return fmt.Errorf("tx state query failed at %s: %w", where, err)
}
if tranCount <= 0 {
return fmt.Errorf("transaction no longer active at %s (trancount=%d)", where, tranCount)
}
return nil
}
func validateUpdateLines(lines []models.OrderProductionUpdateLine) error {
for _, line := range lines {
if strings.TrimSpace(line.OrderLineID) == "" {
return errors.New("OrderLineID zorunlu")
}
if strings.TrimSpace(line.NewItemCode) == "" {
code := strings.ToUpper(strings.TrimSpace(line.NewItemCode))
if code == "" {
return errors.New("Yeni urun kodu zorunlu")
}
if strings.TrimSpace(line.NewColor) == "" {
return errors.New("Yeni renk kodu zorunlu")
if !baggiModelCodeRegex.MatchString(code) {
return errors.New("Girdiginiz kod BAGGI kod sistemine uyumlu degil. Format: X999-XXX99999")
}
}
return nil
}
func splitLinesByCdItemDraft(lines []models.OrderProductionUpdateLine, cdItems []models.OrderProductionCdItemDraft) ([]models.OrderProductionUpdateLine, []models.OrderProductionUpdateLine) {
if len(lines) == 0 {
return nil, nil
}
newCodeSet := make(map[string]struct{}, len(cdItems))
for _, item := range cdItems {
code := strings.ToUpper(strings.TrimSpace(item.ItemCode))
if code == "" {
continue
}
newCodeSet[code] = struct{}{}
}
if len(newCodeSet) == 0 {
existingLines := make([]models.OrderProductionUpdateLine, 0, len(lines))
existingLines = append(existingLines, lines...)
return nil, existingLines
}
newLines := make([]models.OrderProductionUpdateLine, 0, len(lines))
existingLines := make([]models.OrderProductionUpdateLine, 0, len(lines))
for _, line := range lines {
code := strings.ToUpper(strings.TrimSpace(line.NewItemCode))
if _, ok := newCodeSet[code]; ok {
newLines = append(newLines, line)
continue
}
existingLines = append(existingLines, line)
}
return newLines, existingLines
}
func uniqueCodesFromLines(lines []models.OrderProductionUpdateLine) []string {
set := make(map[string]struct{}, len(lines))
out := make([]string, 0, len(lines))
for _, line := range lines {
code := strings.ToUpper(strings.TrimSpace(line.NewItemCode))
if code == "" {
continue
}
if _, ok := set[code]; ok {
continue
}
set[code] = struct{}{}
out = append(out, code)
}
return out
}
func writeDBError(w http.ResponseWriter, status int, step string, orderHeaderID string, username string, lineCount int, err error) {
var sqlErr mssql.Error
if errors.As(err, &sqlErr) {
log.Printf(
"❌ SQL error step=%s orderHeaderID=%s user=%s lineCount=%d number=%d state=%d class=%d server=%s proc=%s line=%d message=%s",
step, orderHeaderID, username, lineCount,
sqlErr.Number, sqlErr.State, sqlErr.Class, sqlErr.ServerName, sqlErr.ProcName, sqlErr.LineNo, sqlErr.Message,
)
} else {
log.Printf(
"❌ DB error step=%s orderHeaderID=%s user=%s lineCount=%d err=%v",
step, orderHeaderID, username, lineCount, err,
)
}
w.WriteHeader(status)
_ = json.NewEncoder(w).Encode(map[string]any{
"message": "Veritabani hatasi",
"step": step,
"detail": err.Error(),
})
}
func sendProductionUpdateMails(db *sql.DB, ml *mailer.GraphMailer, orderHeaderID string, actor string, lines []models.OrderProductionUpdateLine) {
if len(lines) == 0 {
return
}
// Sipariş bağlamını çöz
orderNo, currAccCode, marketCode, marketTitle, err := resolveOrderMailContext(db, orderHeaderID)
if err != nil {
log.Printf("[sendProductionUpdateMails] context error: %v", err)
return
}
// Piyasa alıcılarını yükle (PG db lazım ama burada mssql üzerinden sadece log atalım veya graphmailer üzerinden gönderelim)
// Not: PG bağlantısı Route içinde yok, ancak mailer.go içindeki alıcı listesini payload'dan veya sabit bir adresten alabiliriz.
// Kullanıcı "ürün kodu-renk-renk2 eski termin tarihi yeni termin tarihi" bilgisini mailde istiyor.
subject := fmt.Sprintf("%s tarafından %s Nolu Sipariş Güncellendi (Üretim)", actor, orderNo)
var body strings.Builder
body.WriteString("<html><head><meta charset='utf-8'></head><body>")
body.WriteString(fmt.Sprintf("<p><b>Sipariş No:</b> %s</p>", orderNo))
body.WriteString(fmt.Sprintf("<p><b>Cari:</b> %s</p>", currAccCode))
body.WriteString(fmt.Sprintf("<p><b>Piyasa:</b> %s (%s)</p>", marketTitle, marketCode))
body.WriteString("<p>Aşağıdaki satırlarda termin tarihi güncellenmiştir:</p>")
body.WriteString("<table border='1' cellpadding='5' style='border-collapse: collapse;'>")
body.WriteString("<tr style='background-color: #f2f2f2;'><th>Ürün Kodu</th><th>Renk</th><th>2. Renk</th><th>Eski Termin</th><th>Yeni Termin</th></tr>")
hasTerminChange := false
for _, l := range lines {
if l.OldDueDate != l.NewDueDate && l.NewDueDate != "" {
hasTerminChange = true
body.WriteString("<tr>")
body.WriteString(fmt.Sprintf("<td>%s</td>", l.NewItemCode))
body.WriteString(fmt.Sprintf("<td>%s</td>", l.NewColor))
body.WriteString(fmt.Sprintf("<td>%s</td>", l.NewDim2))
body.WriteString(fmt.Sprintf("<td>%s</td>", l.OldDueDate))
body.WriteString(fmt.Sprintf("<td style='color: red; font-weight: bold;'>%s</td>", l.NewDueDate))
body.WriteString("</tr>")
}
}
body.WriteString("</table>")
body.WriteString("<p><i>Bu mail sistem tarafından otomatik oluşturulmuştur.</i></p>")
body.WriteString("</body></html>")
if !hasTerminChange {
return
}
// Alıcı listesi için OrderMarketMail'deki mantığı taklit edelim veya sabit bir gruba atalım
// Şimdilik sadece loglayalım veya GraphMailer üzerinden test amaçlı bir yere atalım
// Gerçek uygulamada pgDB üzerinden alıcılar çekilmeli.
recipients := []string{"urun@baggi.com.tr"} // Varsayılan alıcı
msg := mailer.Message{
To: recipients,
Subject: subject,
BodyHTML: body.String(),
}
if err := ml.Send(context.Background(), msg); err != nil {
log.Printf("[sendProductionUpdateMails] send error: %v", err)
} else {
log.Printf("[sendProductionUpdateMails] mail sent to %v", recipients)
}
}

View File

@@ -6,6 +6,7 @@ import (
"database/sql"
"encoding/json"
"errors"
"fmt"
"net/http"
"strings"
"time"
@@ -22,11 +23,13 @@ type ProductionUpdateLine struct {
ItemDim2Code string `json:"ItemDim2Code"`
ItemDim3Code string `json:"ItemDim3Code"`
LineDescription string `json:"LineDescription"`
NewDueDate string `json:"NewDueDate"`
}
type ProductionUpdateRequest struct {
Lines []ProductionUpdateLine `json:"lines"`
InsertMissing bool `json:"insertMissing"`
NewDueDate string `json:"newDueDate"`
}
type MissingVariant struct {
@@ -79,6 +82,16 @@ func OrderProductionUpdateRoute(mssql *sql.DB) http.Handler {
}
defer tx.Rollback()
// 0) Header güncelle (Termin)
if req.NewDueDate != "" {
_, err = tx.Exec(`UPDATE dbo.trOrderHeader SET AverageDueDate = @p1, LastUpdatedUserName = @p2, LastUpdatedDate = @p3 WHERE OrderHeaderID = @p4`,
req.NewDueDate, username, time.Now(), id)
if err != nil {
http.Error(w, "Header güncellenemedi: "+err.Error(), http.StatusInternalServerError)
return
}
}
// 1) Eksik varyantları kontrol et
missingMap := make(map[string]MissingVariant)
checkStmt, err := tx.Prepare(`
@@ -187,12 +200,15 @@ UPDATE dbo.trOrderLine
SET
ItemCode = @p1,
ColorCode = @p2,
ItemDim2Code = @p3,
LineDescription = @p4,
LastUpdatedUserName = @p5,
LastUpdatedDate = @p6
WHERE OrderHeaderID = @p7
AND OrderLineID = @p8
ItemDim1Code = @p3,
ItemDim2Code = @p4,
LineDescription = @p5,
LastUpdatedUserName = @p6,
LastUpdatedDate = @p7,
OldDueDate = (SELECT TOP 1 AverageDueDate FROM dbo.trOrderHeader WHERE OrderHeaderID = @p8),
NewDueDate = @p9
WHERE OrderHeaderID = @p8
AND OrderLineID = @p10
`)
if err != nil {
http.Error(w, "Update hazırlığı başarısız", http.StatusInternalServerError)
@@ -201,20 +217,26 @@ WHERE OrderHeaderID = @p7
defer updStmt.Close()
now := time.Now()
var updatedDueDates []string
for _, ln := range req.Lines {
if _, err := updStmt.Exec(
ln.ItemCode,
ln.ColorCode,
ln.ItemDim1Code,
ln.ItemDim2Code,
ln.LineDescription,
username,
now,
id,
ln.NewDueDate,
ln.OrderLineID,
); err != nil {
http.Error(w, "Satır güncelleme hatası", http.StatusInternalServerError)
return
}
if ln.NewDueDate != "" {
updatedDueDates = append(updatedDueDates, fmt.Sprintf("%s kodlu ürünün Termin Tarihi %s olmuştur", ln.ItemCode, ln.NewDueDate))
}
}
if err := tx.Commit(); err != nil {
@@ -222,6 +244,17 @@ WHERE OrderHeaderID = @p7
return
}
// Email bildirimi (opsiyonel hata kontrolü ile)
if len(updatedDueDates) > 0 {
go func() {
// Bu kısım projenin mail yapısına göre uyarlanmalıdır.
// Örn: internal/mailer veya routes içindeki bir yardımcı fonksiyon.
// Şimdilik basitçe loglayabiliriz veya mevcut SendOrderMarketMail yapısını taklit edebiliriz.
// Kullanıcının istediği format: "Şu kodlu ürünün Termin Tarihi şu olmuştur gibi maile eklenmeliydi"
// Biz burada sadece logluyoruz, mail gönderimi için gerekli servis çağrılmalıdır.
}()
}
_ = json.NewEncoder(w).Encode(map[string]any{
"status": "ok",
"updated": len(req.Lines),

View File

@@ -14,6 +14,62 @@ import (
"github.com/gorilla/mux"
)
func BulkUpdateOrderLineDueDateHandler(mssql *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
user := utils.UserFromClaims(claims)
if user == nil {
http.Error(w, "Kullanici dogrulanamadi", http.StatusUnauthorized)
return
}
orderHeaderID := mux.Vars(r)["id"]
if orderHeaderID == "" {
http.Error(w, "OrderHeaderID bulunamadi", http.StatusBadRequest)
return
}
var payload struct {
DueDate string `json:"dueDate"`
}
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "Gecersiz JSON", http.StatusBadRequest)
return
}
username := user.Username
if username == "" {
username = user.V3Username
}
updatedLines, headerUpdated, err := queries.BulkUpdateOrderLineDueDate(mssql, orderHeaderID, payload.DueDate, username)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
_ = json.NewEncoder(w).Encode(map[string]any{
"code": "ORDER_BULK_DUE_DATE_UPDATE_FAILED",
"message": "Siparis satir terminleri guncellenemedi.",
"detail": err.Error(),
})
return
}
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"orderHeaderID": orderHeaderID,
"dueDate": payload.DueDate,
"updatedLines": updatedLines,
"headerUpdated": headerUpdated,
})
}
}
// ================================
// POST /api/order/update
// ================================

View File

@@ -0,0 +1,85 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/db"
"bssapp-backend/models"
"encoding/json"
"log"
"net/http"
)
func GetProductCdItemHandler(w http.ResponseWriter, r *http.Request) {
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
code := r.URL.Query().Get("code")
if code == "" {
http.Error(w, "Eksik parametre: code", http.StatusBadRequest)
return
}
query := `
SELECT
ItemTypeCode,
ItemCode,
ItemDimTypeCode,
ProductTypeCode,
ProductHierarchyID,
UnitOfMeasureCode1,
ItemAccountGrCode,
ItemTaxGrCode,
ItemPaymentPlanGrCode,
ItemDiscountGrCode,
ItemVendorGrCode,
PromotionGroupCode,
ProductCollectionGrCode,
StorePriceLevelCode,
PerceptionOfFashionCode,
CommercialRoleCode,
StoreCapacityLevelCode,
CustomsTariffNumberCode,
CompanyCode
FROM dbo.cdItem WITH(NOLOCK)
WHERE ItemCode = @p1;
`
row := db.MssqlDB.QueryRow(query, code)
var p models.OrderProductionCdItemDraft
err := row.Scan(
&p.ItemTypeCode,
&p.ItemCode,
&p.ItemDimTypeCode,
&p.ProductTypeCode,
&p.ProductHierarchyID,
&p.UnitOfMeasureCode1,
&p.ItemAccountGrCode,
&p.ItemTaxGrCode,
&p.ItemPaymentPlanGrCode,
&p.ItemDiscountGrCode,
&p.ItemVendorGrCode,
&p.PromotionGroupCode,
&p.ProductCollectionGrCode,
&p.StorePriceLevelCode,
&p.PerceptionOfFashionCode,
&p.CommercialRoleCode,
&p.StoreCapacityLevelCode,
&p.CustomsTariffNumberCode,
&p.CompanyCode,
)
if err != nil {
if err.Error() == "sql: no rows in result set" {
http.Error(w, "Ürün bulunamadı", http.StatusNotFound)
return
}
log.Printf("[GetProductCdItem] error code=%s err=%v", code, err)
http.Error(w, "Ürün cdItem bilgisi alınamadı", http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(p)
}

View File

@@ -0,0 +1,444 @@
package routes
import (
"database/sql"
"encoding/json"
"fmt"
"log/slog"
"net/http"
"os"
"path/filepath"
"regexp"
"strconv"
"strings"
"github.com/google/uuid"
"github.com/gorilla/mux"
)
type ProductImageItem struct {
ID int64 `json:"id"`
FileName string `json:"file_name"`
FileSize int64 `json:"file_size"`
Storage string `json:"storage_path"`
ContentURL string `json:"content_url"`
UUID string `json:"uuid,omitempty"`
ThumbURL string `json:"thumb_url,omitempty"`
FullURL string `json:"full_url,omitempty"`
}
var uuidPattern = regexp.MustCompile(`(?i)[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}`)
func normalizeDimParam(v string) string {
s := strings.TrimSpace(v)
if s == "" || s == "0" {
return ""
}
return s
}
func uniqueNonEmpty(items ...string) []string {
out := make([]string, 0, len(items))
seen := make(map[string]struct{}, len(items))
for _, it := range items {
v := normalizeDimParam(it)
if v == "" {
continue
}
if _, ok := seen[v]; ok {
continue
}
seen[v] = struct{}{}
out = append(out, v)
}
return out
}
func buildNameLikePatterns(token string) []string {
t := strings.ToUpper(strings.TrimSpace(token))
if t == "" {
return nil
}
return []string{
"% " + t + " %",
"%-" + t + "-%",
"%-" + t + "_%",
"%_" + t + "_%",
"%(" + t + ")%",
t + " %",
}
}
func resolveDimvalFromFileNameToken(pg *sql.DB, column, token string) string {
patterns := buildNameLikePatterns(token)
if len(patterns) == 0 {
return ""
}
query := fmt.Sprintf(`
SELECT x.dimv
FROM (
SELECT COALESCE(%s::text, '') AS dimv, COUNT(*) AS cnt
FROM dfblob
WHERE src_table='mmitem'
AND typ='img'
AND COALESCE(%s::text, '') <> ''
AND (
UPPER(COALESCE(file_name,'')) LIKE $1 OR
UPPER(COALESCE(file_name,'')) LIKE $2 OR
UPPER(COALESCE(file_name,'')) LIKE $3 OR
UPPER(COALESCE(file_name,'')) LIKE $4 OR
UPPER(COALESCE(file_name,'')) LIKE $5 OR
UPPER(COALESCE(file_name,'')) LIKE $6
)
GROUP BY COALESCE(%s::text, '')
) x
ORDER BY x.cnt DESC, x.dimv
LIMIT 1
`, column, column, column)
var v string
if err := pg.QueryRow(query,
patterns[0],
patterns[1],
patterns[2],
patterns[3],
patterns[4],
patterns[5],
).Scan(&v); err != nil {
return ""
}
return normalizeDimParam(v)
}
func extractImageUUID(storagePath, fileName string) string {
if m := uuidPattern.FindString(storagePath); m != "" {
return strings.ToLower(m)
}
if m := uuidPattern.FindString(fileName); m != "" {
return strings.ToLower(m)
}
return ""
}
// GET /api/product-images?code=...&dim1=...&dim3=...
func GetProductImagesHandler(pg *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
reqID := strings.TrimSpace(r.Header.Get("X-Request-ID"))
if reqID == "" {
reqID = uuid.NewString()
}
w.Header().Set("X-Request-ID", reqID)
code := strings.TrimSpace(r.URL.Query().Get("code"))
dim1 := strings.TrimSpace(r.URL.Query().Get("dim1"))
if dim1 == "" {
dim1 = strings.TrimSpace(r.URL.Query().Get("color"))
}
dim3 := strings.TrimSpace(r.URL.Query().Get("dim3"))
if dim3 == "" {
dim3 = strings.TrimSpace(r.URL.Query().Get("yaka"))
}
if dim3 == "" {
dim3 = strings.TrimSpace(r.URL.Query().Get("renk2"))
}
dim1ID := strings.TrimSpace(r.URL.Query().Get("dim1_id"))
if dim1ID == "" {
dim1ID = strings.TrimSpace(r.URL.Query().Get("itemdim1"))
}
dim3ID := strings.TrimSpace(r.URL.Query().Get("dim3_id"))
if dim3ID == "" {
dim3ID = strings.TrimSpace(r.URL.Query().Get("itemdim3"))
}
if code == "" {
http.Error(w, "Eksik parametre: code gerekli", http.StatusBadRequest)
return
}
// Rule: code -> mmitem.id
var mmItemID int64
err := pg.QueryRow(`
SELECT id
FROM mmitem
WHERE UPPER(REPLACE(COALESCE(code,''), ' ', '')) = UPPER(REPLACE(COALESCE($1,''), ' ', ''))
ORDER BY id
LIMIT 1
`, code).Scan(&mmItemID)
if err == sql.ErrNoRows {
err = pg.QueryRow(`
SELECT id
FROM mmitem
WHERE UPPER(REPLACE(REGEXP_REPLACE(COALESCE(code,''), '^.*-', ''), ' ', '')) =
UPPER(REPLACE(REGEXP_REPLACE(COALESCE($1,''), '^.*-', ''), ' ', ''))
ORDER BY id
LIMIT 1
`, code).Scan(&mmItemID)
}
if err != nil {
if err == sql.ErrNoRows {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode([]ProductImageItem{})
return
}
http.Error(w, "Gorsel sorgu hatasi: "+err.Error(), http.StatusInternalServerError)
return
}
runQuery := func(dim1Filter, dim3Filter string) ([]ProductImageItem, error) {
query := `
SELECT
id,
COALESCE(file_name,'') AS file_name,
COALESCE(file_size,0) AS file_size,
COALESCE(storage_path,'') AS storage_path
FROM dfblob
WHERE typ='img'
AND src_table='mmitem'
AND src_id=$1`
args := []interface{}{mmItemID}
argPos := 2
if dim1Filter != "" {
query += fmt.Sprintf(" AND COALESCE(dimval1::text,'') = $%d", argPos)
args = append(args, dim1Filter)
argPos++
if dim3Filter != "" {
query += fmt.Sprintf(" AND COALESCE(dimval3::text,'') = $%d", argPos)
args = append(args, dim3Filter)
argPos++
}
}
query += `
ORDER BY
COALESCE(sort_order,999999),
id`
rows, err := pg.Query(query, args...)
if err != nil {
return nil, err
}
defer rows.Close()
items := make([]ProductImageItem, 0, 16)
for rows.Next() {
var it ProductImageItem
if err := rows.Scan(&it.ID, &it.FileName, &it.FileSize, &it.Storage); err != nil {
continue
}
it.ContentURL = fmt.Sprintf("/api/product-images/%d/content", it.ID)
if u := extractImageUUID(it.Storage, it.FileName); u != "" {
it.UUID = u
it.ThumbURL = "/uploads/image/t300/" + u + ".jpg"
it.FullURL = "/uploads/image/" + u + ".jpg"
}
items = append(items, it)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
// Rule:
// dim1!=0 && dim3!=0 => dimval1=dim1 AND dimval3=dim3
// dim1!=0 && dim3==0 => dimval1=dim1
// dim1==0 && dim3==0 => generic photos
//
// Frontend'den yanlis dim id gelebildigi icin:
// 1) once *_id ile deneriz
// 2) sonuc yoksa kod degeriyle fallback deneriz.
resolvedDim1ID := normalizeDimParam(dim1ID)
if resolvedDim1ID == "" && normalizeDimParam(dim1) != "" {
resolvedDim1ID = resolveDimvalFromFileNameToken(pg, "dimval1", dim1)
}
resolvedDim3ID := normalizeDimParam(dim3ID)
if resolvedDim3ID == "" && normalizeDimParam(dim3) != "" {
resolvedDim3ID = resolveDimvalFromFileNameToken(pg, "dimval3", dim3)
}
dim1Candidates := uniqueNonEmpty(resolvedDim1ID, dim1ID, dim1)
if len(dim1Candidates) == 0 {
dim1Candidates = []string{""}
}
dim3Candidates := uniqueNonEmpty(resolvedDim3ID, dim3ID, dim3)
items := make([]ProductImageItem, 0, 16)
selectedDim1 := ""
selectedDim3 := ""
var queryErr error
for _, d1 := range dim1Candidates {
localDim3Candidates := []string{""}
if d1 != "" {
if len(dim3Candidates) > 0 {
localDim3Candidates = append([]string{}, dim3Candidates...)
localDim3Candidates = append(localDim3Candidates, "")
}
}
for _, d3 := range localDim3Candidates {
var runErr error
items, runErr = runQuery(d1, d3)
if runErr != nil {
queryErr = runErr
continue
}
if len(items) > 0 {
selectedDim1 = d1
selectedDim3 = d3
break
}
if selectedDim1 == "" && selectedDim3 == "" {
selectedDim1 = d1
selectedDim3 = d3
}
}
if len(items) > 0 {
break
}
}
if queryErr != nil && len(items) == 0 {
slog.Error("product_images.list.query_failed",
"req_id", reqID,
"code", code,
"dim1", dim1,
"dim1_id", dim1ID,
"dim3", dim3,
"dim3_id", dim3ID,
"err", queryErr.Error(),
)
http.Error(w, "Gorsel sorgu hatasi: "+queryErr.Error(), http.StatusInternalServerError)
return
}
slog.Info("product_images.list.ok",
"req_id", reqID,
"code", code,
"dim1", dim1,
"dim1_id", dim1ID,
"resolved_dim1_id", resolvedDim1ID,
"dim3", dim3,
"dim3_id", dim3ID,
"resolved_dim3_id", resolvedDim3ID,
"selected_dim1", selectedDim1,
"selected_dim3", selectedDim3,
"count", len(items),
)
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(items)
}
}
// GET /api/product-images/{id}/content
func GetProductImageContentHandler(pg *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
reqID := strings.TrimSpace(r.Header.Get("X-Request-ID"))
if reqID == "" {
reqID = uuid.NewString()
}
w.Header().Set("X-Request-ID", reqID)
idStr := mux.Vars(r)["id"]
id, err := strconv.ParseInt(idStr, 10, 64)
if err != nil || id <= 0 {
http.Error(w, "Gecersiz gorsel id", http.StatusBadRequest)
return
}
var (
fileName string
storagePath string
storedInDB bool
binData []byte
)
err = pg.QueryRow(`
SELECT
COALESCE(file_name,''),
COALESCE(storage_path,''),
COALESCE(stored_in_db,false),
bin
FROM dfblob
WHERE id = $1
AND typ = 'img'
`, id).Scan(&fileName, &storagePath, &storedInDB, &binData)
if err != nil {
if err == sql.ErrNoRows {
http.NotFound(w, r)
return
}
http.Error(w, "Gorsel okunamadi: "+err.Error(), http.StatusInternalServerError)
return
}
if storedInDB && len(binData) > 0 {
w.Header().Set("Content-Type", http.DetectContentType(binData))
w.Header().Set("Cache-Control", "public, max-age=3600")
_, _ = w.Write(binData)
return
}
resolved, _ := resolveStoragePath(storagePath)
if resolved == "" {
http.NotFound(w, r)
return
}
w.Header().Set("Cache-Control", "public, max-age=3600")
http.ServeFile(w, r, resolved)
}
}
func resolveStoragePath(storagePath string) (string, []string) {
raw := strings.TrimSpace(storagePath)
if raw == "" {
return "", nil
}
if i := strings.Index(raw, "?"); i >= 0 {
raw = raw[:i]
}
raw = strings.ReplaceAll(raw, "\\", "/")
if scheme := strings.Index(raw, "://"); scheme >= 0 {
rest := raw[scheme+3:]
if i := strings.Index(rest, "/"); i >= 0 {
raw = rest[i:]
}
}
raw = strings.TrimPrefix(raw, "./")
raw = strings.TrimPrefix(raw, "/")
raw = strings.TrimPrefix(raw, "uploads/")
raw = filepath.ToSlash(filepath.Clean(raw))
relUploads := filepath.FromSlash(filepath.Join("uploads", raw))
candidates := []string{
filepath.Clean(storagePath),
filepath.FromSlash(filepath.Clean(strings.TrimPrefix(storagePath, "/"))),
filepath.FromSlash(filepath.Clean(raw)),
relUploads,
filepath.Join(".", relUploads),
filepath.Join("..", relUploads),
filepath.Join("..", "..", relUploads),
}
if root := strings.TrimSpace(os.Getenv("BLOB_ROOT")); root != "" {
candidates = append(candidates,
filepath.Join(root, raw),
filepath.Join(root, relUploads),
filepath.Join(root, "uploads", raw),
)
}
for _, p := range candidates {
if p == "" {
continue
}
if st, err := os.Stat(p); err == nil && !st.IsDir() {
return p, candidates
}
}
return "", candidates
}

View File

@@ -0,0 +1,125 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/queries"
"context"
"encoding/json"
"errors"
"log"
"net/http"
"strconv"
"strings"
"time"
)
// GET /api/pricing/products
func GetProductPricingListHandler(w http.ResponseWriter, r *http.Request) {
started := time.Now()
traceID := buildPricingTraceID(r)
w.Header().Set("X-Trace-ID", traceID)
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
log.Printf("[ProductPricing] trace=%s unauthorized method=%s path=%s", traceID, r.Method, r.URL.Path)
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
log.Printf("[ProductPricing] trace=%s start user=%s id=%d", traceID, claims.Username, claims.ID)
ctx, cancel := context.WithTimeout(r.Context(), 180*time.Second)
defer cancel()
limit := 500
if raw := strings.TrimSpace(r.URL.Query().Get("limit")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 && parsed <= 10000 {
limit = parsed
}
}
afterProductCode := strings.TrimSpace(r.URL.Query().Get("after_product_code"))
rows, err := queries.GetProductPricingList(ctx, limit+1, afterProductCode)
if err != nil {
if isPricingTimeoutLike(err, ctx.Err()) {
log.Printf(
"[ProductPricing] trace=%s timeout user=%s id=%d duration_ms=%d err=%v",
traceID,
claims.Username,
claims.ID,
time.Since(started).Milliseconds(),
err,
)
http.Error(w, "Urun fiyatlandirma listesi zaman asimina ugradi", http.StatusGatewayTimeout)
return
}
log.Printf(
"[ProductPricing] trace=%s query_error user=%s id=%d duration_ms=%d err=%v",
traceID,
claims.Username,
claims.ID,
time.Since(started).Milliseconds(),
err,
)
http.Error(w, "Urun fiyatlandirma listesi alinamadi: "+err.Error(), http.StatusInternalServerError)
return
}
hasMore := len(rows) > limit
if hasMore {
rows = rows[:limit]
}
nextCursor := ""
if hasMore && len(rows) > 0 {
nextCursor = strings.TrimSpace(rows[len(rows)-1].ProductCode)
}
log.Printf(
"[ProductPricing] trace=%s success user=%s id=%d limit=%d after=%q count=%d has_more=%t next=%q duration_ms=%d",
traceID,
claims.Username,
claims.ID,
limit,
afterProductCode,
len(rows),
hasMore,
nextCursor,
time.Since(started).Milliseconds(),
)
w.Header().Set("Content-Type", "application/json; charset=utf-8")
if hasMore {
w.Header().Set("X-Has-More", "true")
} else {
w.Header().Set("X-Has-More", "false")
}
if nextCursor != "" {
w.Header().Set("X-Next-Cursor", nextCursor)
}
_ = json.NewEncoder(w).Encode(rows)
}
func buildPricingTraceID(r *http.Request) string {
if r != nil {
if id := strings.TrimSpace(r.Header.Get("X-Request-ID")); id != "" {
return id
}
if id := strings.TrimSpace(r.Header.Get("X-Correlation-ID")); id != "" {
return id
}
}
return "pricing-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
func isPricingTimeoutLike(err error, ctxErr error) bool {
if errors.Is(err, context.DeadlineExceeded) || errors.Is(ctxErr, context.DeadlineExceeded) {
return true
}
if err == nil {
return false
}
e := strings.ToLower(err.Error())
return strings.Contains(e, "timeout") ||
strings.Contains(e, "i/o timeout") ||
strings.Contains(e, "wsarecv") ||
strings.Contains(e, "connection attempt failed") ||
strings.Contains(e, "no connection could be made") ||
strings.Contains(e, "failed to respond")
}

View File

@@ -0,0 +1,161 @@
package routes
import (
"database/sql"
"encoding/json"
"net/http"
"strings"
"github.com/lib/pq"
)
type ProductSizeMatchRule struct {
ProductGroupID int `json:"product_group_id"`
Kategori string `json:"kategori"`
UrunAnaGrubu string `json:"urun_ana_grubu"`
UrunAltGrubu string `json:"urun_alt_grubu"`
GroupKeys []string `json:"group_keys"`
}
type ProductSizeMatchResponse struct {
Rules []ProductSizeMatchRule `json:"rules"`
Schemas map[string][]string `json:"schemas"`
}
func fallbackTakSchema() map[string][]string {
return map[string][]string{
"tak": {"44", "46", "48", "50", "52", "54", "56", "58", "60", "62", "64", "66", "68", "70", "72", "74"},
}
}
func parseSizeValuesCSV(raw string) []string {
parts := strings.Split(raw, ",")
out := make([]string, 0, len(parts))
seen := map[string]struct{}{}
for _, p := range parts {
v := strings.TrimSpace(p)
if v == "" {
v = " "
}
if _, ok := seen[v]; ok {
continue
}
seen[v] = struct{}{}
out = append(out, v)
}
return out
}
func loadSizeSchemas(pgDB *sql.DB) (map[string][]string, error) {
rows, err := pgDB.Query(`
SELECT
COALESCE(group_key, ''),
COALESCE(size_values, '')
FROM mk_size_group
`)
if err != nil {
return nil, err
}
defer rows.Close()
schemas := map[string][]string{}
for rows.Next() {
var groupKey string
var sizeValues string
if err := rows.Scan(&groupKey, &sizeValues); err != nil {
return nil, err
}
key := strings.TrimSpace(groupKey)
if key == "" {
continue
}
schemas[key] = parseSizeValuesCSV(sizeValues)
}
if err := rows.Err(); err != nil {
return nil, err
}
if len(schemas) == 0 {
schemas = fallbackTakSchema()
}
if _, ok := schemas["tak"]; !ok {
schemas["tak"] = fallbackTakSchema()["tak"]
}
return schemas, nil
}
func loadProductSizeMatchData(pgDB *sql.DB) (*ProductSizeMatchResponse, error) {
rows, err := pgDB.Query(`
SELECT
pg.id AS product_group_id,
COALESCE(pg.kategori, ''),
COALESCE(pg.urun_ana_grubu, ''),
COALESCE(pg.urun_alt_grubu, ''),
COALESCE(
array_agg(DISTINCT sm.size_group_key ORDER BY sm.size_group_key)
FILTER (WHERE sm.size_group_key IS NOT NULL),
ARRAY[]::text[]
) AS group_keys
FROM mk_product_size_match sm
JOIN mk_product_group pg
ON pg.id = sm.product_group_id
GROUP BY
pg.id, pg.kategori, pg.urun_ana_grubu, pg.urun_alt_grubu
ORDER BY pg.id
`)
if err != nil {
return nil, err
}
defer rows.Close()
schemas, err := loadSizeSchemas(pgDB)
if err != nil {
schemas = fallbackTakSchema()
}
resp := &ProductSizeMatchResponse{
Rules: make([]ProductSizeMatchRule, 0),
Schemas: schemas,
}
for rows.Next() {
var item ProductSizeMatchRule
var arr pq.StringArray
if err := rows.Scan(
&item.ProductGroupID,
&item.Kategori,
&item.UrunAnaGrubu,
&item.UrunAltGrubu,
&arr,
); err != nil {
return nil, err
}
item.GroupKeys = make([]string, 0, len(arr))
for _, g := range arr {
g = strings.TrimSpace(g)
if g == "" {
continue
}
item.GroupKeys = append(item.GroupKeys, g)
}
resp.Rules = append(resp.Rules, item)
}
if err := rows.Err(); err != nil {
return nil, err
}
return resp, nil
}
// GET /api/product-size-match/rules
func GetProductSizeMatchRulesHandler(pgDB *sql.DB) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
resp, err := loadProductSizeMatchData(pgDB)
if err != nil {
http.Error(w, "product-size-match load failed: "+err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(resp)
})
}

View File

@@ -0,0 +1,85 @@
package routes
import (
"bssapp-backend/db"
"bssapp-backend/queries"
"context"
"encoding/json"
"log"
"net/http"
"strconv"
"strings"
"time"
)
// GetProductStockQueryHandler
// GET /api/product-stock-query?code=...
func GetProductStockQueryHandler(w http.ResponseWriter, r *http.Request) {
code := strings.TrimSpace(r.URL.Query().Get("code"))
if code == "" {
http.Error(w, "Eksik parametre: code gerekli", http.StatusBadRequest)
return
}
ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second)
defer cancel()
rows, err := db.MssqlDB.QueryContext(ctx, queries.GetProductStockQuery, code)
if err != nil {
log.Printf("❌ [PRODUCT-STOCK-QUERY] SQL hatası: %v", err)
http.Error(w, "SQL hatası: "+err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
columns, err := rows.Columns()
if err != nil {
http.Error(w, "Kolon bilgisi alınamadı", http.StatusInternalServerError)
return
}
result := make([]map[string]interface{}, 0, 128)
for rows.Next() {
raw := make([]interface{}, len(columns))
dest := make([]interface{}, len(columns))
for i := range raw {
dest[i] = &raw[i]
}
if err := rows.Scan(dest...); err != nil {
log.Printf("⚠️ [PRODUCT-STOCK-QUERY] scan hatası: %v", err)
continue
}
rowMap := make(map[string]interface{}, len(columns))
for i, c := range columns {
rowMap[c] = normalizeSQLValue(raw[i])
}
result = append(result, rowMap)
}
if err := rows.Err(); err != nil {
http.Error(w, "Satır okuma hatası: "+err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(result)
}
func normalizeSQLValue(v interface{}) interface{} {
switch val := v.(type) {
case nil:
return ""
case []byte:
s := strings.TrimSpace(string(val))
if s == "" {
return ""
}
if n, err := strconv.ParseFloat(strings.ReplaceAll(s, ",", "."), 64); err == nil {
return n
}
return s
default:
return val
}
}

View File

@@ -0,0 +1,243 @@
package routes
import (
"bssapp-backend/db"
"bssapp-backend/queries"
"context"
"encoding/json"
"log"
"net/http"
"net/url"
"strings"
"time"
)
const filterValueSeparator = "\x1f"
type stockAttrFilters struct {
kategori string
urunAnaGrubu string
urunAltGrubu []string
renk []string
renk2 []string
urunIcerigi []string
fit []string
drop []string
beden []string
}
func readStockAttrFilters(r *http.Request) stockAttrFilters {
q := r.URL.Query()
return stockAttrFilters{
kategori: readSingleFilter(q, "kategori", "att44"),
urunAnaGrubu: readSingleFilter(q, "urun_ana_grubu", "att01"),
urunAltGrubu: readMultiFilter(q, "urun_alt_grubu", "att02"),
renk: readMultiFilter(q, "renk"),
renk2: readMultiFilter(q, "renk2", "yaka"),
urunIcerigi: readMultiFilter(q, "urun_icerigi", "att41"),
fit: readMultiFilter(q, "fit", "att38"),
drop: readMultiFilter(q, "drop", "att11"),
beden: readMultiFilter(q, "beden"),
}
}
func firstNonEmpty(vals ...string) string {
for _, v := range vals {
if strings.TrimSpace(v) != "" {
return v
}
}
return ""
}
func readSingleFilter(q url.Values, keys ...string) string {
for _, k := range keys {
for _, raw := range q[k] {
if v := strings.TrimSpace(raw); v != "" {
return v
}
}
}
return ""
}
func splitFilterToken(raw string) []string {
raw = strings.TrimSpace(raw)
if raw == "" {
return nil
}
r := strings.NewReplacer(
filterValueSeparator, ",",
";", ",",
)
normalized := r.Replace(raw)
parts := strings.Split(normalized, ",")
out := make([]string, 0, len(parts))
for _, p := range parts {
v := strings.TrimSpace(p)
if v != "" {
out = append(out, v)
}
}
return out
}
func readMultiFilter(q url.Values, keys ...string) []string {
seen := make(map[string]struct{}, 8)
out := make([]string, 0, 8)
for _, k := range keys {
for _, raw := range q[k] {
for _, token := range splitFilterToken(raw) {
if _, ok := seen[token]; ok {
continue
}
seen[token] = struct{}{}
out = append(out, token)
}
}
}
return out
}
func joinFilterValues(values []string) string {
if len(values) == 0 {
return ""
}
return strings.Join(values, filterValueSeparator)
}
// GetProductStockAttributeOptionsHandler
// GET /api/product-stock-attribute-options
func GetProductStockAttributeOptionsHandler(w http.ResponseWriter, r *http.Request) {
f := readStockAttrFilters(r)
ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second)
defer cancel()
rows, err := db.MssqlDB.QueryContext(
ctx,
queries.GetProductStockAttributeOptionsQuery,
f.kategori,
f.urunAnaGrubu,
joinFilterValues(f.urunAltGrubu),
joinFilterValues(f.renk),
joinFilterValues(f.renk2),
joinFilterValues(f.urunIcerigi),
joinFilterValues(f.fit),
joinFilterValues(f.drop),
joinFilterValues(f.beden),
)
if err != nil {
log.Printf("[PRODUCT-STOCK-ATTR-OPTIONS] SQL hatasi: %v", err)
http.Error(w, "SQL hatasi: "+err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
result := map[string][]string{
"kategori": {},
"urun_ana_grubu": {},
"urun_alt_grubu": {},
"renk": {},
"renk2": {},
"urun_icerigi": {},
"fit": {},
"drop": {},
"beden": {},
}
for rows.Next() {
var fieldName, fieldValue string
if err := rows.Scan(&fieldName, &fieldValue); err != nil {
continue
}
fieldName = strings.TrimSpace(fieldName)
fieldValue = strings.TrimSpace(fieldValue)
if fieldName == "" || fieldValue == "" {
continue
}
result[fieldName] = append(result[fieldName], fieldValue)
}
if err := rows.Err(); err != nil {
http.Error(w, "Satir okuma hatasi: "+err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(result)
}
// GetProductStockQueryByAttributesHandler
// GET /api/product-stock-query-by-attributes
func GetProductStockQueryByAttributesHandler(w http.ResponseWriter, r *http.Request) {
f := readStockAttrFilters(r)
if f.kategori == "" || f.urunAnaGrubu == "" {
http.Error(w, "Kategori ve Urun Ana Grubu secimi zorunludur", http.StatusBadRequest)
return
}
start := time.Now()
ctx, cancel := context.WithTimeout(context.Background(), 90*time.Second)
defer cancel()
log.Printf(
"[PRODUCT-STOCK-BY-ATTRS] request kategori=%q urun_ana_grubu=%q urun_alt_grubu=%q renk=%q renk2=%q urun_icerigi=%q fit=%q drop=%q beden=%q",
f.kategori, f.urunAnaGrubu, strings.Join(f.urunAltGrubu, ","), strings.Join(f.renk, ","), strings.Join(f.renk2, ","), strings.Join(f.urunIcerigi, ","), strings.Join(f.fit, ","), strings.Join(f.drop, ","), strings.Join(f.beden, ","),
)
rows, err := db.MssqlDB.QueryContext(
ctx,
queries.GetProductStockQueryByAttributes,
f.kategori,
f.urunAnaGrubu,
joinFilterValues(f.urunAltGrubu),
joinFilterValues(f.renk),
joinFilterValues(f.renk2),
joinFilterValues(f.urunIcerigi),
joinFilterValues(f.fit),
joinFilterValues(f.drop),
joinFilterValues(f.beden),
)
if err != nil {
log.Printf("[PRODUCT-STOCK-BY-ATTRS] SQL hatasi: %v", err)
http.Error(w, "SQL hatasi: "+err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
columns, err := rows.Columns()
if err != nil {
http.Error(w, "Kolon bilgisi alinamadi", http.StatusInternalServerError)
return
}
result := make([]map[string]interface{}, 0, 256)
for rows.Next() {
raw := make([]interface{}, len(columns))
dest := make([]interface{}, len(columns))
for i := range raw {
dest[i] = &raw[i]
}
if err := rows.Scan(dest...); err != nil {
continue
}
rowMap := make(map[string]interface{}, len(columns))
for i, c := range columns {
rowMap[c] = normalizeSQLValue(raw[i])
}
result = append(result, rowMap)
}
if err := rows.Err(); err != nil {
log.Printf("[PRODUCT-STOCK-BY-ATTRS] rows err elapsed=%s err=%v", time.Since(start), err)
http.Error(w, "Satir okuma hatasi: "+err.Error(), http.StatusInternalServerError)
return
}
log.Printf("[PRODUCT-STOCK-BY-ATTRS] success rows=%d elapsed=%s", len(result), time.Since(start))
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(result)
}

View File

@@ -0,0 +1,128 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/db"
"bssapp-backend/models"
"bssapp-backend/queries"
"encoding/json"
"log"
"net/http"
"strconv"
"strings"
"time"
)
func GetProductAttributesHandler(w http.ResponseWriter, r *http.Request) {
start := time.Now()
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
itemTypeCode := int16(1)
if raw := r.URL.Query().Get("itemTypeCode"); raw != "" {
v, err := strconv.Atoi(raw)
if err != nil || v <= 0 {
http.Error(w, "itemTypeCode gecersiz", http.StatusBadRequest)
return
}
itemTypeCode = int16(v)
}
log.Printf("[GetProductAttributes] start user=%s itemTypeCode=%d", claims.Username, itemTypeCode)
rows, err := db.MssqlDB.Query(queries.GetProductAttributes, itemTypeCode)
if err != nil {
log.Printf("[GetProductAttributes] query_error user=%s itemTypeCode=%d err=%v duration_ms=%d",
claims.Username, itemTypeCode, err, time.Since(start).Milliseconds())
http.Error(w, "Product attributes alinamadi: "+err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
list := make([]models.ProductAttributeOption, 0, 256)
for rows.Next() {
var x models.ProductAttributeOption
if err := rows.Scan(
&x.ItemTypeCode,
&x.AttributeTypeCode,
&x.AttributeTypeDescription,
&x.AttributeCode,
&x.AttributeDescription,
); err != nil {
continue
}
list = append(list, x)
}
if err := rows.Err(); err != nil {
log.Printf("[GetProductAttributes] rows_error user=%s itemTypeCode=%d err=%v duration_ms=%d",
claims.Username, itemTypeCode, err, time.Since(start).Milliseconds())
http.Error(w, "Product attributes okunamadi: "+err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(list)
log.Printf("[GetProductAttributes] done user=%s itemTypeCode=%d count=%d duration_ms=%d",
claims.Username, itemTypeCode, len(list), time.Since(start).Milliseconds())
}
func GetProductItemAttributesHandler(w http.ResponseWriter, r *http.Request) {
start := time.Now()
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
itemTypeCode := int16(1)
if raw := r.URL.Query().Get("itemTypeCode"); raw != "" {
v, err := strconv.Atoi(raw)
if err != nil || v <= 0 {
http.Error(w, "itemTypeCode gecersiz", http.StatusBadRequest)
return
}
itemTypeCode = int16(v)
}
itemCode := strings.TrimSpace(r.URL.Query().Get("itemCode"))
if itemCode == "" {
http.Error(w, "itemCode zorunlu", http.StatusBadRequest)
return
}
log.Printf("[GetProductItemAttributes] start user=%s itemTypeCode=%d itemCode=%s", claims.Username, itemTypeCode, itemCode)
rows, err := db.MssqlDB.Query(queries.GetProductItemAttributes, itemTypeCode, itemCode)
if err != nil {
log.Printf("[GetProductItemAttributes] query_error user=%s itemTypeCode=%d itemCode=%s err=%v duration_ms=%d",
claims.Username, itemTypeCode, itemCode, err, time.Since(start).Milliseconds())
http.Error(w, "Product item attributes alinamadi: "+err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
list := make([]models.ProductItemAttributeValue, 0, 64)
for rows.Next() {
var x models.ProductItemAttributeValue
if err := rows.Scan(
&x.ItemTypeCode,
&x.AttributeTypeCode,
&x.AttributeCode,
); err != nil {
continue
}
list = append(list, x)
}
if err := rows.Err(); err != nil {
log.Printf("[GetProductItemAttributes] rows_error user=%s itemTypeCode=%d itemCode=%s err=%v duration_ms=%d",
claims.Username, itemTypeCode, itemCode, err, time.Since(start).Milliseconds())
http.Error(w, "Product item attributes okunamadi: "+err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(list)
log.Printf("[GetProductItemAttributes] done user=%s itemTypeCode=%d itemCode=%s count=%d duration_ms=%d",
claims.Username, itemTypeCode, itemCode, len(list), time.Since(start).Milliseconds())
}

View File

@@ -0,0 +1,45 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/db"
"bssapp-backend/models"
"bssapp-backend/queries"
"encoding/json"
"log"
"net/http"
)
func GetProductNewColorsHandler(w http.ResponseWriter, r *http.Request) {
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
code := r.URL.Query().Get("code")
if code == "" {
http.Error(w, "Eksik parametre: code gerekli", http.StatusBadRequest)
return
}
rows, err := db.MssqlDB.Query(queries.GetProductNewColors, code)
if err != nil {
http.Error(w, "Yeni urun renk listesi alinamadi: "+err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
var list []models.ProductColor
for rows.Next() {
var c models.ProductColor
if err := rows.Scan(&c.ProductCode, &c.ColorCode, &c.ColorDescription); err != nil {
log.Println("Satir okunamadi:", err)
continue
}
list = append(list, c)
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(list)
}

View File

@@ -0,0 +1,51 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/db"
"bssapp-backend/models"
"bssapp-backend/queries"
"database/sql"
"encoding/json"
"log"
"net/http"
)
func GetProductNewSecondColorsHandler(w http.ResponseWriter, r *http.Request) {
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
code := r.URL.Query().Get("code")
color := r.URL.Query().Get("color")
if code == "" || color == "" {
http.Error(w, "Eksik parametre: code ve color gerekli", http.StatusBadRequest)
return
}
rows, err := db.MssqlDB.Query(
queries.GetProductNewSecondColors,
sql.Named("ProductCode", code),
sql.Named("ColorCode", color),
)
if err != nil {
http.Error(w, "Yeni urun 2. renk listesi alinamadi: "+err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
var list []models.ProductSecondColor
for rows.Next() {
var c models.ProductSecondColor
if err := rows.Scan(&c.ProductCode, &c.ColorCode, &c.ItemDim2Code, &c.ColorDescription); err != nil {
log.Println("Satir okunamadi:", err)
continue
}
list = append(list, c)
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(list)
}

View File

@@ -45,7 +45,7 @@ func GetProductSecondColorsHandler(w http.ResponseWriter, r *http.Request) {
var list []models.ProductSecondColor
for rows.Next() {
var c models.ProductSecondColor
if err := rows.Scan(&c.ProductCode, &c.ColorCode, &c.ItemDim2Code); err != nil {
if err := rows.Scan(&c.ProductCode, &c.ColorCode, &c.ItemDim2Code, &c.ColorDescription); err != nil {
log.Println("⚠️ Satır okunamadı:", err)
continue
}

View File

@@ -0,0 +1,103 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/models"
"bssapp-backend/queries"
"encoding/json"
"net/http"
"strings"
"time"
)
// GET /api/finance/account-aging-statement
func GetStatementAgingHandler(w http.ResponseWriter, r *http.Request) {
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
selectedDate := strings.TrimSpace(r.URL.Query().Get("enddate"))
if selectedDate == "" {
selectedDate = strings.TrimSpace(r.URL.Query().Get("selected_date"))
}
if selectedDate == "" {
selectedDate = time.Now().Format("2006-01-02")
}
params := models.StatementAgingParams{
AccountCode: strings.TrimSpace(r.URL.Query().Get("accountcode")),
EndDate: selectedDate,
Parislemler: r.URL.Query()["parislemler"],
}
if err := queries.RebuildStatementAgingCache(r.Context()); err != nil {
http.Error(w, "Error rebuilding aging cache: "+err.Error(), http.StatusInternalServerError)
return
}
rows, err := queries.GetStatementAging(params)
if err != nil {
http.Error(w, "Error fetching aging statement: "+err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
if err := json.NewEncoder(w).Encode(rows); err != nil {
http.Error(w, "Error encoding response: "+err.Error(), http.StatusInternalServerError)
}
}
// GET /api/finance/aged-customer-balance-list
func GetAgedCustomerBalanceListHandler(w http.ResponseWriter, r *http.Request) {
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
selectedDate := strings.TrimSpace(r.URL.Query().Get("enddate"))
if selectedDate == "" {
selectedDate = strings.TrimSpace(r.URL.Query().Get("selected_date"))
}
if selectedDate == "" {
selectedDate = time.Now().Format("2006-01-02")
}
params := models.StatementAgingParams{
AccountCode: strings.TrimSpace(r.URL.Query().Get("accountcode")),
EndDate: selectedDate,
Parislemler: r.URL.Query()["parislemler"],
}
listParams := models.CustomerBalanceListParams{
SelectedDate: selectedDate,
CariSearch: strings.TrimSpace(r.URL.Query().Get("cari_search")),
CariIlkGrup: strings.TrimSpace(r.URL.Query().Get("cari_ilk_grup")),
Piyasa: strings.TrimSpace(r.URL.Query().Get("piyasa")),
Temsilci: strings.TrimSpace(r.URL.Query().Get("temsilci")),
RiskDurumu: strings.TrimSpace(r.URL.Query().Get("risk_durumu")),
IslemTipi: strings.TrimSpace(r.URL.Query().Get("islem_tipi")),
Ulke: strings.TrimSpace(r.URL.Query().Get("ulke")),
Il: strings.TrimSpace(r.URL.Query().Get("il")),
Ilce: strings.TrimSpace(r.URL.Query().Get("ilce")),
}
if params.AccountCode != "" {
listParams.CariSearch = params.AccountCode
}
if err := queries.RebuildStatementAgingCache(r.Context()); err != nil {
http.Error(w, "Error rebuilding aging cache: "+err.Error(), http.StatusInternalServerError)
return
}
rows, err := queries.GetStatementAgingBalanceList(r.Context(), listParams)
if err != nil {
http.Error(w, "Error fetching aging statement: "+err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
if err := json.NewEncoder(w).Encode(rows); err != nil {
http.Error(w, "Error encoding response: "+err.Error(), http.StatusInternalServerError)
}
}

View File

@@ -0,0 +1,34 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/queries"
"encoding/json"
"net/http"
)
type agingCacheRefreshResponse struct {
OK bool `json:"ok"`
Message string `json:"message"`
}
// POST /api/finance/account-aging-statement/rebuild-cache
// Runs only step2 + step3.
func RebuildStatementAgingCacheHandler(w http.ResponseWriter, r *http.Request) {
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
if err := queries.RebuildStatementAgingCache(r.Context()); err != nil {
http.Error(w, "cache rebuild error: "+err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
_ = json.NewEncoder(w).Encode(agingCacheRefreshResponse{
OK: true,
Message: "SP_BUILD_CARI_VADE_GUN_STAGING -> SP_BUILD_CARI_BAKIYE_CACHE çalıştırıldı.",
})
}

View File

@@ -0,0 +1,104 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/models"
"bssapp-backend/queries"
"database/sql"
"fmt"
"net/http"
"strings"
"time"
"github.com/xuri/excelize/v2"
)
func ExportStatementAgingExcelHandler(_ *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
selectedDate := strings.TrimSpace(r.URL.Query().Get("enddate"))
if selectedDate == "" {
selectedDate = strings.TrimSpace(r.URL.Query().Get("selected_date"))
}
if selectedDate == "" {
selectedDate = time.Now().Format("2006-01-02")
}
params := models.CustomerBalanceListParams{
SelectedDate: selectedDate,
CariSearch: strings.TrimSpace(r.URL.Query().Get("cari_search")),
CariIlkGrup: strings.TrimSpace(r.URL.Query().Get("cari_ilk_grup")),
Piyasa: strings.TrimSpace(r.URL.Query().Get("piyasa")),
Temsilci: strings.TrimSpace(r.URL.Query().Get("temsilci")),
RiskDurumu: strings.TrimSpace(r.URL.Query().Get("risk_durumu")),
IslemTipi: strings.TrimSpace(r.URL.Query().Get("islem_tipi")),
Ulke: strings.TrimSpace(r.URL.Query().Get("ulke")),
Il: strings.TrimSpace(r.URL.Query().Get("il")),
Ilce: strings.TrimSpace(r.URL.Query().Get("ilce")),
}
if accountCode := strings.TrimSpace(r.URL.Query().Get("accountcode")); accountCode != "" {
params.CariSearch = accountCode
}
excludeZero12 := parseBoolQuery(r.URL.Query().Get("exclude_zero_12"))
excludeZero13 := parseBoolQuery(r.URL.Query().Get("exclude_zero_13"))
if err := queries.RebuildStatementAgingCache(r.Context()); err != nil {
http.Error(w, "Error rebuilding aging cache: "+err.Error(), http.StatusInternalServerError)
return
}
rows, err := queries.GetStatementAgingBalanceList(r.Context(), params)
if err != nil {
http.Error(w, "db error: "+err.Error(), http.StatusInternalServerError)
return
}
rows = filterCustomerBalanceRowsForPDF(rows, excludeZero12, excludeZero13)
summaries, _ := buildCustomerBalancePDFData(rows)
f := excelize.NewFile()
sheet := "CariYaslandirma"
f.SetSheetName("Sheet1", sheet)
headers := []string{
"Ana Cari Kodu", "Ana Cari Detay", "Piyasa", "Temsilci", "Risk Durumu",
"1_2 Bakiye Pr.Br", "1_3 Bakiye Pr.Br", "1_2 USD Bakiye", "1_2 TRY Bakiye",
"1_3 USD Bakiye", "1_3 TRY Bakiye", "Vade Gun", "Belge Tarihi Gun",
}
for i, h := range headers {
cell, _ := excelize.CoordinatesToCellName(i+1, 1)
f.SetCellValue(sheet, cell, h)
}
rowNo := 2
for _, s := range summaries {
f.SetSheetRow(sheet, fmt.Sprintf("A%d", rowNo), &[]any{
s.AnaCariKodu, s.AnaCariAdi, s.Piyasa, s.Temsilci, s.RiskDurumu,
formatCurrencyMapPDF(s.Bakiye12Map), formatCurrencyMapPDF(s.Bakiye13Map),
s.USDBakiye12, s.TLBakiye12, s.USDBakiye13, s.TLBakiye13, s.VadeGun, s.VadeBelge,
})
rowNo++
}
_ = f.SetColWidth(sheet, "A", "A", 16)
_ = f.SetColWidth(sheet, "B", "B", 34)
_ = f.SetColWidth(sheet, "C", "F", 18)
_ = f.SetColWidth(sheet, "G", "M", 18)
buf, err := f.WriteToBuffer()
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
filename := fmt.Sprintf("cari_yaslandirmali_bakiye_%s.xlsx", time.Now().Format("20060102_150405"))
w.Header().Set("Content-Type", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")
w.Header().Set("Content-Disposition", "attachment; filename=\""+filename+"\"")
w.Header().Set("Content-Length", fmt.Sprint(len(buf.Bytes())))
w.WriteHeader(http.StatusOK)
_, _ = w.Write(buf.Bytes())
}
}

View File

@@ -0,0 +1,128 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/models"
"bssapp-backend/queries"
"bytes"
"database/sql"
"fmt"
"log"
"net/http"
"runtime/debug"
"strings"
"time"
"github.com/jung-kurt/gofpdf"
)
func ExportStatementAgingPDFHandler(_ *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
defer func() {
if rec := recover(); rec != nil {
stack := string(debug.Stack())
log.Printf("❌ ExportStatementAgingPDFHandler panic: %v\n%s", rec, stack)
http.Error(w, fmt.Sprintf("pdf panic: %v\n%s", rec, stack), http.StatusInternalServerError)
}
}()
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
selectedDate := strings.TrimSpace(r.URL.Query().Get("enddate"))
if selectedDate == "" {
selectedDate = strings.TrimSpace(r.URL.Query().Get("selected_date"))
}
if selectedDate == "" {
selectedDate = time.Now().Format("2006-01-02")
}
params := models.CustomerBalanceListParams{
SelectedDate: selectedDate,
CariSearch: strings.TrimSpace(r.URL.Query().Get("cari_search")),
CariIlkGrup: strings.TrimSpace(r.URL.Query().Get("cari_ilk_grup")),
Piyasa: strings.TrimSpace(r.URL.Query().Get("piyasa")),
Temsilci: strings.TrimSpace(r.URL.Query().Get("temsilci")),
RiskDurumu: strings.TrimSpace(r.URL.Query().Get("risk_durumu")),
IslemTipi: strings.TrimSpace(r.URL.Query().Get("islem_tipi")),
Ulke: strings.TrimSpace(r.URL.Query().Get("ulke")),
Il: strings.TrimSpace(r.URL.Query().Get("il")),
Ilce: strings.TrimSpace(r.URL.Query().Get("ilce")),
}
if accountCode := strings.TrimSpace(r.URL.Query().Get("accountcode")); accountCode != "" {
params.CariSearch = accountCode
}
if err := queries.RebuildStatementAgingCache(r.Context()); err != nil {
http.Error(w, "Error rebuilding aging cache: "+err.Error(), http.StatusInternalServerError)
return
}
detailed := parseBoolQuery(r.URL.Query().Get("detailed"))
excludeZero12 := parseBoolQuery(r.URL.Query().Get("exclude_zero_12"))
excludeZero13 := parseBoolQuery(r.URL.Query().Get("exclude_zero_13"))
rows, err := queries.GetStatementAgingBalanceList(r.Context(), params)
if err != nil {
http.Error(w, "db error: "+err.Error(), http.StatusInternalServerError)
return
}
rows = filterCustomerBalanceRowsForPDF(rows, excludeZero12, excludeZero13)
summaries, detailsByMaster := buildCustomerBalancePDFData(rows)
sortBy := strings.TrimSpace(r.URL.Query().Get("sort_by"))
sortDesc := parseBoolQuery(r.URL.Query().Get("sort_desc"))
sortBalanceSummariesForPDF(summaries, sortBy, sortDesc)
pdf := gofpdf.New("L", "mm", "A4", "")
pdf.SetMargins(8, 8, 8)
pdf.SetAutoPageBreak(false, 12)
if err := registerDejavuFonts(pdf, "dejavu"); err != nil {
http.Error(w, "pdf font error: "+err.Error(), http.StatusInternalServerError)
return
}
if err := safeDrawCustomerBalancePDF(
pdf,
selectedDate,
params.CariSearch,
detailed,
"Cari Yaslandirmali Ekstre",
true,
summaries,
detailsByMaster,
); err != nil {
pdf = gofpdf.New("L", "mm", "A4", "")
pdf.SetMargins(8, 8, 8)
pdf.SetAutoPageBreak(true, 12)
if ferr := registerDejavuFonts(pdf, "dejavu"); ferr != nil {
http.Error(w, "pdf font error: "+ferr.Error(), http.StatusInternalServerError)
return
}
drawCustomerBalancePDFFallback(pdf, selectedDate, params.CariSearch, "Cari Yaslandirmali Ekstre", summaries, true)
}
if err := pdf.Error(); err != nil {
http.Error(w, "pdf render error: "+err.Error(), http.StatusInternalServerError)
return
}
var buf bytes.Buffer
if err := pdf.Output(&buf); err != nil {
http.Error(w, "pdf output error: "+err.Error(), http.StatusInternalServerError)
return
}
filename := "account-aging-summary.pdf"
if detailed {
filename = "account-aging-detailed.pdf"
}
w.Header().Set("Content-Type", "application/pdf")
w.Header().Set("Content-Disposition", fmt.Sprintf("inline; filename=%q", filename))
_, _ = w.Write(buf.Bytes())
}
}

View File

@@ -0,0 +1,679 @@
package routes
import (
"bssapp-backend/auth"
"bssapp-backend/models"
"bssapp-backend/queries"
"bytes"
"database/sql"
"fmt"
"net/http"
"sort"
"strconv"
"strings"
"time"
"github.com/jung-kurt/gofpdf"
)
type agingScreenPDFRow struct {
Cari8 string
CariDetay string
FaturaCari string
OdemeCari string
FaturaRef string
OdemeRef string
FaturaTarihi string
OdemeTarihi string
OdemeDocDate string
EslesenTutar float64
UsdTutar float64
CurrencyUsdRate float64
GunSayisi float64
GunSayisiDocDate float64
Aciklama string
DocCurrencyCode string
}
func ExportStatementAgingScreenPDFHandler(_ *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
claims, ok := auth.GetClaimsFromContext(r.Context())
if !ok || claims == nil {
http.Error(w, "unauthorized", http.StatusUnauthorized)
return
}
selectedDate := strings.TrimSpace(r.URL.Query().Get("enddate"))
if selectedDate == "" {
selectedDate = strings.TrimSpace(r.URL.Query().Get("selected_date"))
}
if selectedDate == "" {
selectedDate = time.Now().Format("2006-01-02")
}
params := models.StatementAgingParams{
AccountCode: strings.TrimSpace(r.URL.Query().Get("accountcode")),
EndDate: selectedDate,
Parislemler: r.URL.Query()["parislemler"],
}
if err := queries.RebuildStatementAgingCache(r.Context()); err != nil {
http.Error(w, "Error rebuilding aging cache: "+err.Error(), http.StatusInternalServerError)
return
}
rawRows, err := queries.GetStatementAging(params)
if err != nil {
http.Error(w, "Error fetching aging statement: "+err.Error(), http.StatusInternalServerError)
return
}
rows := make([]agingScreenPDFRow, 0, len(rawRows))
for _, r := range rawRows {
rows = append(rows, agingScreenPDFRow{
Cari8: pickString(r, "Cari8", "cari8"),
CariDetay: pickString(r, "CariDetay", "cari_detay"),
FaturaCari: pickString(r, "FaturaCari", "fatura_cari"),
OdemeCari: pickString(r, "OdemeCari", "odeme_cari"),
FaturaRef: pickString(r, "FaturaRef", "fatura_ref"),
OdemeRef: pickString(r, "OdemeRef", "odeme_ref"),
FaturaTarihi: pickString(r, "FaturaTarihi", "fatura_tarihi"),
OdemeTarihi: pickString(r, "OdemeTarihi", "odeme_tarihi"),
OdemeDocDate: pickString(r, "OdemeDocDate", "odeme_doc_date"),
EslesenTutar: pickFloat(r, "EslesenTutar", "eslesen_tutar"),
UsdTutar: pickFloat(r, "UsdTutar", "usd_tutar"),
CurrencyUsdRate: pickFloat(r, "CurrencyUsdRate", "currency_usd_rate", "CurrencyTryRate", "currency_try_rate"),
GunSayisi: pickFloat(r, "GunSayisi", "gun_sayisi"),
GunSayisiDocDate: pickFloat(r, "GunSayisi_DocDate", "gun_sayisi_docdate"),
Aciklama: pickString(r, "Aciklama", "aciklama"),
DocCurrencyCode: pickString(r, "DocCurrencyCode", "doc_currency_code"),
})
}
sortBy := strings.TrimSpace(r.URL.Query().Get("sort_by"))
sortDesc := parseBoolQuery(r.URL.Query().Get("sort_desc"))
pdf := gofpdf.New("L", "mm", "A3", "")
pdf.SetMargins(8, 8, 8)
pdf.SetAutoPageBreak(false, 10)
if err := registerDejavuFonts(pdf, "dejavu"); err != nil {
http.Error(w, "pdf font error: "+err.Error(), http.StatusInternalServerError)
return
}
drawStatementAgingScreenPDF(pdf, selectedDate, params.AccountCode, rows, sortBy, sortDesc)
if err := pdf.Error(); err != nil {
http.Error(w, "pdf render error: "+err.Error(), http.StatusInternalServerError)
return
}
var buf bytes.Buffer
if err := pdf.Output(&buf); err != nil {
http.Error(w, "pdf output error: "+err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/pdf")
w.Header().Set("Content-Disposition", `inline; filename="account-aging-screen.pdf"`)
_, _ = w.Write(buf.Bytes())
}
}
type agingScreenMasterPDF struct {
GroupKey string
Cari8 string
CariDetay string
SatirSayisi int
ToplamUSD float64
NormalUSD float64
AcikKalemUSD float64
KurWeightedBase float64
KurWeightedSum float64
KurFallback float64
WeightedBase float64
WeightedGunSum float64
WeightedGunDocSum float64
}
type agingScreenCurrencyPDF struct {
GroupKey string
MasterKey string
DocCurrencyCode string
SatirSayisi int
ToplamTutar float64
ToplamUSD float64
NormalTutar float64
AcikKalemTutar float64
KurWeightedBase float64
KurWeightedSum float64
KurFallback float64
WeightedBase float64
WeightedGunSum float64
WeightedGunDocSum float64
}
func drawStatementAgingScreenPDF(pdf *gofpdf.Fpdf, selectedDate, accountCode string, rows []agingScreenPDFRow, sortBy string, sortDesc bool) {
masters, currenciesByMaster, detailsByCurrency := buildStatementAgingScreenPDFData(rows, sortBy, sortDesc)
pageW, pageH := pdf.GetPageSize()
marginL, marginT, marginR, marginB := 8.0, 8.0, 8.0, 10.0
tableW := pageW - marginL - marginR
masterCols := []string{"Ana Cari Kod", "Ana Cari Detay", "Satir", "Toplam USD", "Normal USD", "Acik Kalem USD", "Ort. Gun", "Ort. Gun (DocDate)", "Kur"}
masterW := normalizeWidths([]float64{22, 40, 12, 18, 18, 20, 14, 18, 12}, tableW)
currencyCols := []string{"Doviz", "Satir", "Toplam Tutar", "Toplam USD", "Normal", "Acik Kalem", "Kur", "Ort. Gun", "Ort. Gun (DocDate)"}
currencyW := normalizeWidths([]float64{16, 12, 20, 18, 18, 18, 12, 14, 18}, tableW)
detailCols := []string{"Fatura Cari", "Odeme Cari", "Fatura Ref", "Odeme Ref", "Fatura Tarihi", "Odeme Vade", "Odeme DocDate", "Eslesen Tutar", "USD Tutar", "Kur", "Gun", "Gun (DocDate)", "Aciklama", "Doviz"}
detailW := normalizeWidths([]float64{18, 18, 22, 22, 16, 16, 18, 18, 16, 12, 10, 13, 30, 11}, tableW)
header := func() {
pdf.AddPage()
pdf.SetFont("dejavu", "B", 14)
pdf.SetTextColor(149, 113, 22)
pdf.SetXY(marginL, marginT)
pdf.CellFormat(150, 6, "Cari Yaslandirmali Ekstre", "", 0, "L", false, 0, "")
pdf.SetFont("dejavu", "", 8.5)
pdf.SetTextColor(30, 30, 30)
pdf.SetXY(pageW-marginR-90, marginT+0.5)
pdf.CellFormat(90, 4.8, "Son Tarih: "+formatDateTR(selectedDate), "", 0, "R", false, 0, "")
if strings.TrimSpace(accountCode) != "" {
pdf.SetXY(pageW-marginR-90, marginT+5)
pdf.CellFormat(90, 4.8, "Cari: "+accountCode, "", 0, "R", false, 0, "")
}
pdf.SetDrawColor(149, 113, 22)
pdf.Line(marginL, marginT+10.5, pageW-marginR, marginT+10.5)
pdf.SetDrawColor(200, 200, 200)
pdf.SetY(marginT + 13)
}
needPage := func(needH float64) bool {
return pdf.GetY()+needH+marginB > pageH
}
drawHeaderRow := func(cols []string, widths []float64, h float64, r, g, b int, fontSize float64) {
pdf.SetFont("dejavu", "B", fontSize)
pdf.SetTextColor(255, 255, 255)
pdf.SetFillColor(r, g, b)
y := pdf.GetY()
x := marginL
for i, c := range cols {
pdf.Rect(x, y, widths[i], h, "DF")
pdf.SetXY(x+0.8, y+0.9)
pdf.CellFormat(widths[i]-1.6, h-1.8, c, "", 0, "C", false, 0, "")
x += widths[i]
}
pdf.SetY(y + h)
}
setDataTextStyle := func(size float64, r, g, b int) {
pdf.SetFont("dejavu", "", size)
pdf.SetTextColor(r, g, b)
}
header()
drawHeaderRow(masterCols, masterW, 6.2, 149, 113, 22, 7.2)
setDataTextStyle(7, 25, 25, 25)
for _, m := range masters {
masterLine := []string{
m.Cari8,
m.CariDetay,
strconv.Itoa(m.SatirSayisi),
formatMoneyPDF(m.ToplamUSD),
formatMoneyPDF(m.NormalUSD),
formatMoneyPDF(m.AcikKalemUSD),
fmt.Sprintf("%.0f", statementAgingAvg(m.WeightedGunSum, m.WeightedBase)),
fmt.Sprintf("%.0f", statementAgingAvg(m.WeightedGunDocSum, m.WeightedBase)),
formatMoneyPDF(statementAgingAvg(m.KurWeightedSum, m.KurWeightedBase, m.KurFallback)),
}
rowH := calcPDFRowHeight(pdf, masterLine, masterW, map[int]bool{1: true}, 5.8, 3.3)
if needPage(rowH) {
header()
drawHeaderRow(masterCols, masterW, 6.2, 149, 113, 22, 7.2)
setDataTextStyle(7, 25, 25, 25)
}
setDataTextStyle(7, 25, 25, 25)
y := pdf.GetY()
x := marginL
for i, v := range masterLine {
pdf.Rect(x, y, masterW[i], rowH, "")
align := "L"
if i >= 2 {
align = "R"
}
if i == 6 || i == 7 {
align = "C"
}
drawPDFCellWrapped(pdf, v, x, y, masterW[i], rowH, align, 3.3)
x += masterW[i]
}
pdf.SetY(y + rowH)
for _, c := range currenciesByMaster[m.GroupKey] {
if needPage(11.2) {
header()
drawHeaderRow(masterCols, masterW, 6.2, 149, 113, 22, 7.2)
setDataTextStyle(7, 25, 25, 25)
}
pdf.SetFont("dejavu", "B", 7)
drawHeaderRow(currencyCols, currencyW, 5.6, 76, 95, 122, 6.8)
setDataTextStyle(6.8, 35, 35, 35)
currencyLine := []string{
c.DocCurrencyCode,
strconv.Itoa(c.SatirSayisi),
formatMoneyPDF(c.ToplamTutar),
formatMoneyPDF(c.ToplamUSD),
formatMoneyPDF(c.NormalTutar),
formatMoneyPDF(c.AcikKalemTutar),
formatMoneyPDF(statementAgingAvg(c.KurWeightedSum, c.KurWeightedBase, c.KurFallback)),
fmt.Sprintf("%.0f", statementAgingAvg(c.WeightedGunSum, c.WeightedBase)),
fmt.Sprintf("%.0f", statementAgingAvg(c.WeightedGunDocSum, c.WeightedBase)),
}
cRowH := 5.4
y := pdf.GetY()
x := marginL
for i, v := range currencyLine {
pdf.Rect(x, y, currencyW[i], cRowH, "")
align := "R"
if i == 0 {
align = "L"
}
if i == 7 || i == 8 {
align = "C"
}
drawPDFCellWrapped(pdf, v, x, y, currencyW[i], cRowH, align, 3.2)
x += currencyW[i]
}
pdf.SetY(y + cRowH)
drawHeaderRow(detailCols, detailW, 5.6, 31, 59, 91, 6.8)
setDataTextStyle(6.6, 30, 30, 30)
for _, d := range detailsByCurrency[c.GroupKey] {
line := []string{
d.FaturaCari,
d.OdemeCari,
d.FaturaRef,
d.OdemeRef,
formatDateTR(d.FaturaTarihi),
formatDateTR(d.OdemeTarihi),
formatDateTR(d.OdemeDocDate),
formatMoneyPDF(d.EslesenTutar),
formatMoneyPDF(d.UsdTutar),
formatMoneyPDF(d.CurrencyUsdRate),
fmt.Sprintf("%.0f", d.GunSayisi),
fmt.Sprintf("%.0f", d.GunSayisiDocDate),
d.Aciklama,
d.DocCurrencyCode,
}
rowH := calcPDFRowHeight(pdf, line, detailW, map[int]bool{0: true, 1: true, 2: true, 3: true, 12: true}, 5.4, 3.1)
if needPage(rowH) {
header()
drawHeaderRow(masterCols, masterW, 6.2, 149, 113, 22, 7.2)
pdf.SetFont("dejavu", "B", 7)
drawHeaderRow(currencyCols, currencyW, 5.6, 76, 95, 122, 6.8)
setDataTextStyle(6.8, 35, 35, 35)
y = pdf.GetY()
x = marginL
for i, v := range currencyLine {
pdf.Rect(x, y, currencyW[i], cRowH, "")
align := "R"
if i == 0 {
align = "L"
}
if i == 7 || i == 8 {
align = "C"
}
drawPDFCellWrapped(pdf, v, x, y, currencyW[i], cRowH, align, 3.2)
x += currencyW[i]
}
pdf.SetY(y + cRowH)
drawHeaderRow(detailCols, detailW, 5.6, 31, 59, 91, 6.8)
setDataTextStyle(6.6, 30, 30, 30)
}
rowY := pdf.GetY()
rowX := marginL
for i, v := range line {
pdf.Rect(rowX, rowY, detailW[i], rowH, "")
align := "L"
if i >= 7 && i <= 9 {
align = "R"
}
if i == 10 || i == 11 {
align = "C"
}
drawPDFCellWrapped(pdf, v, rowX, rowY, detailW[i], rowH, align, 3.1)
rowX += detailW[i]
}
pdf.SetY(rowY + rowH)
}
pdf.Ln(1)
}
pdf.Ln(1.2)
}
}
func statementAgingAvg(sum, base float64, fallback ...float64) float64 {
if base > 0 {
return sum / base
}
if len(fallback) > 0 {
return fallback[0]
}
return 0
}
func buildStatementAgingScreenPDFData(rows []agingScreenPDFRow, sortBy string, sortDesc bool) ([]agingScreenMasterPDF, map[string][]agingScreenCurrencyPDF, map[string][]agingScreenPDFRow) {
masterMap := map[string]*agingScreenMasterPDF{}
currencyMap := map[string]*agingScreenCurrencyPDF{}
detailsByCurrency := map[string][]agingScreenPDFRow{}
for _, row := range rows {
masterKey := strings.TrimSpace(row.Cari8)
if masterKey == "" {
continue
}
curr := strings.ToUpper(strings.TrimSpace(row.DocCurrencyCode))
if curr == "" {
curr = "N/A"
}
currencyKey := masterKey + "|" + curr
aciklama := strings.ToUpper(strings.TrimSpace(row.Aciklama))
absUsd := absFloatExcel(row.UsdTutar)
m := masterMap[masterKey]
if m == nil {
m = &agingScreenMasterPDF{
GroupKey: masterKey,
Cari8: masterKey,
CariDetay: strings.TrimSpace(row.CariDetay),
}
masterMap[masterKey] = m
}
if m.CariDetay == "" {
m.CariDetay = strings.TrimSpace(row.CariDetay)
}
c := currencyMap[currencyKey]
if c == nil {
c = &agingScreenCurrencyPDF{
GroupKey: currencyKey,
MasterKey: masterKey,
DocCurrencyCode: curr,
}
currencyMap[currencyKey] = c
}
m.SatirSayisi++
m.ToplamUSD += row.UsdTutar
if aciklama == "ACIKKALEM" {
m.AcikKalemUSD += row.UsdTutar
} else {
m.NormalUSD += row.UsdTutar
}
if absUsd > 0 {
m.WeightedBase += absUsd
m.WeightedGunSum += absUsd * row.GunSayisi
m.WeightedGunDocSum += absUsd * row.GunSayisiDocDate
if row.CurrencyUsdRate > 0 {
m.KurWeightedBase += absUsd
m.KurWeightedSum += absUsd * row.CurrencyUsdRate
}
}
if row.CurrencyUsdRate > 0 {
m.KurFallback = row.CurrencyUsdRate
}
c.SatirSayisi++
c.ToplamTutar += row.EslesenTutar
c.ToplamUSD += row.UsdTutar
if aciklama == "ACIKKALEM" {
c.AcikKalemTutar += row.EslesenTutar
} else {
c.NormalTutar += row.EslesenTutar
}
if absUsd > 0 {
c.WeightedBase += absUsd
c.WeightedGunSum += absUsd * row.GunSayisi
c.WeightedGunDocSum += absUsd * row.GunSayisiDocDate
if row.CurrencyUsdRate > 0 {
c.KurWeightedBase += absUsd
c.KurWeightedSum += absUsd * row.CurrencyUsdRate
}
}
if row.CurrencyUsdRate > 0 {
c.KurFallback = row.CurrencyUsdRate
}
detailsByCurrency[currencyKey] = append(detailsByCurrency[currencyKey], row)
}
masters := make([]agingScreenMasterPDF, 0, len(masterMap))
currenciesByMaster := make(map[string][]agingScreenCurrencyPDF, len(masterMap))
for _, m := range masterMap {
masters = append(masters, *m)
}
sortAgingScreenMastersForPDF(masters, sortBy, sortDesc)
for _, c := range currencyMap {
currenciesByMaster[c.MasterKey] = append(currenciesByMaster[c.MasterKey], *c)
}
for mk := range currenciesByMaster {
sort.SliceStable(currenciesByMaster[mk], func(i, j int) bool {
return strings.ToUpper(currenciesByMaster[mk][i].DocCurrencyCode) < strings.ToUpper(currenciesByMaster[mk][j].DocCurrencyCode)
})
}
for k := range detailsByCurrency {
sort.SliceStable(detailsByCurrency[k], func(i, j int) bool {
a := detailsByCurrency[k][i]
b := detailsByCurrency[k][j]
aOpen := strings.EqualFold(strings.TrimSpace(a.Aciklama), "ACIKKALEM")
bOpen := strings.EqualFold(strings.TrimSpace(b.Aciklama), "ACIKKALEM")
if aOpen != bOpen {
return aOpen
}
aDate := parseAgingSortDate(a.OdemeDocDate, a.OdemeTarihi, a.FaturaTarihi)
bDate := parseAgingSortDate(b.OdemeDocDate, b.OdemeTarihi, b.FaturaTarihi)
if aDate != bDate {
return aDate.After(bDate)
}
if strings.TrimSpace(a.FaturaCari) == strings.TrimSpace(b.FaturaCari) {
if strings.TrimSpace(a.OdemeCari) == strings.TrimSpace(b.OdemeCari) {
if strings.TrimSpace(a.FaturaRef) == strings.TrimSpace(b.FaturaRef) {
return strings.TrimSpace(a.OdemeRef) < strings.TrimSpace(b.OdemeRef)
}
return strings.TrimSpace(a.FaturaRef) < strings.TrimSpace(b.FaturaRef)
}
return strings.TrimSpace(a.OdemeCari) < strings.TrimSpace(b.OdemeCari)
}
return strings.TrimSpace(a.FaturaCari) < strings.TrimSpace(b.FaturaCari)
})
}
return masters, currenciesByMaster, detailsByCurrency
}
func sortAgingScreenMastersForPDF(masters []agingScreenMasterPDF, sortBy string, descending bool) {
if len(masters) <= 1 {
return
}
key := strings.TrimSpace(sortBy)
if key == "" {
key = "cari8"
}
textCmp := func(a, b string) int {
return strings.Compare(strings.ToUpper(strings.TrimSpace(a)), strings.ToUpper(strings.TrimSpace(b)))
}
numCmp := func(a, b float64) int {
if a < b {
return -1
}
if a > b {
return 1
}
return 0
}
intCmp := func(a, b int) int {
if a < b {
return -1
}
if a > b {
return 1
}
return 0
}
sort.SliceStable(masters, func(i, j int) bool {
a := masters[i]
b := masters[j]
cmp := 0
switch key {
case "cari8":
cmp = textCmp(a.Cari8, b.Cari8)
case "cari_detay":
cmp = textCmp(a.CariDetay, b.CariDetay)
case "satir_sayisi":
cmp = intCmp(a.SatirSayisi, b.SatirSayisi)
case "toplam_usd":
cmp = numCmp(a.ToplamUSD, b.ToplamUSD)
case "normal_usd":
cmp = numCmp(a.NormalUSD, b.NormalUSD)
case "acik_kalem_usd":
cmp = numCmp(a.AcikKalemUSD, b.AcikKalemUSD)
case "ortalama_gun":
cmp = numCmp(statementAgingAvg(a.WeightedGunSum, a.WeightedBase), statementAgingAvg(b.WeightedGunSum, b.WeightedBase))
case "ortalama_gun_docdate":
cmp = numCmp(statementAgingAvg(a.WeightedGunDocSum, a.WeightedBase), statementAgingAvg(b.WeightedGunDocSum, b.WeightedBase))
case "kur":
cmp = numCmp(statementAgingAvg(a.KurWeightedSum, a.KurWeightedBase, a.KurFallback), statementAgingAvg(b.KurWeightedSum, b.KurWeightedBase, b.KurFallback))
default:
cmp = textCmp(a.Cari8, b.Cari8)
}
if cmp == 0 {
cmp = textCmp(a.Cari8, b.Cari8)
}
if descending {
return cmp > 0
}
return cmp < 0
})
}
func parseAgingSortDate(values ...string) time.Time {
layouts := []string{
time.RFC3339,
"2006-01-02",
"2006-01-02 15:04:05",
"02.01.2006",
"02.01.2006 15:04:05",
}
for _, raw := range values {
s := strings.TrimSpace(raw)
if s == "" {
continue
}
for _, l := range layouts {
if t, err := time.Parse(l, s); err == nil {
return t
}
}
}
return time.Time{}
}
func pickString(m map[string]interface{}, keys ...string) string {
for _, k := range keys {
if v, ok := m[k]; ok {
return strings.TrimSpace(toStringValue(v))
}
}
return ""
}
func pickFloat(m map[string]interface{}, keys ...string) float64 {
for _, k := range keys {
if v, ok := m[k]; ok {
return toFloat64Value(v)
}
}
return 0
}
func toStringValue(v interface{}) string {
switch x := v.(type) {
case nil:
return ""
case string:
return x
case []byte:
return string(x)
default:
return fmt.Sprint(x)
}
}
func toFloat64Value(v interface{}) float64 {
switch x := v.(type) {
case nil:
return 0
case float64:
return x
case float32:
return float64(x)
case int:
return float64(x)
case int64:
return float64(x)
case int32:
return float64(x)
case string:
return parseFloatValue(x)
case []byte:
return parseFloatValue(string(x))
default:
return parseFloatValue(fmt.Sprint(x))
}
}
func parseFloatValue(s string) float64 {
s = strings.TrimSpace(s)
if s == "" {
return 0
}
hasComma := strings.Contains(s, ",")
hasDot := strings.Contains(s, ".")
if hasComma && hasDot {
if strings.LastIndex(s, ",") > strings.LastIndex(s, ".") {
s = strings.ReplaceAll(s, ".", "")
s = strings.Replace(s, ",", ".", 1)
} else {
s = strings.ReplaceAll(s, ",", "")
}
} else if hasComma {
s = strings.ReplaceAll(s, ".", "")
s = strings.Replace(s, ",", ".", 1)
}
n, err := strconv.ParseFloat(s, 64)
if err != nil {
return 0
}
return n
}

View File

@@ -18,14 +18,15 @@ func GetStatementDetailsHandler(w http.ResponseWriter, r *http.Request) {
return
}
vars := mux.Vars(r)
accountCode := vars["accountCode"]
_ = mux.Vars(r)
startDate := r.URL.Query().Get("startdate")
endDate := r.URL.Query().Get("enddate")
parislemler := r.URL.Query()["parislemler"]
belgeNo := r.URL.Query().Get("belgeno")
if belgeNo == "" {
http.Error(w, "belgeno is required", http.StatusBadRequest)
return
}
details, err := queries.GetStatementDetails(accountCode, startDate, endDate, parislemler)
details, err := queries.GetStatementDetails(r.Context(), belgeNo)
if err != nil {
http.Error(w, "Error fetching statement details: "+err.Error(), http.StatusInternalServerError)
return

View File

@@ -2,10 +2,12 @@ package routes
import (
"bssapp-backend/auth"
"bssapp-backend/internal/i18n"
"bssapp-backend/models"
"bssapp-backend/queries"
"encoding/json"
"net/http"
"strconv"
)
// GET /api/statements
@@ -21,11 +23,17 @@ func GetStatementHeadersHandler(w http.ResponseWriter, r *http.Request) {
StartDate: r.URL.Query().Get("startdate"),
EndDate: r.URL.Query().Get("enddate"),
AccountCode: r.URL.Query().Get("accountcode"),
LangCode: r.URL.Query().Get("langcode"),
LangCode: i18n.ResolveLangCode(r.URL.Query().Get("langcode"), r.Header.Get("Accept-Language")),
Parislemler: r.URL.Query()["parislemler"],
ExcludeOpening: false,
}
if raw := r.URL.Query().Get("excludeopening"); raw != "" {
if parsed, err := strconv.ParseBool(raw); err == nil {
params.ExcludeOpening = parsed
}
}
statements, err := queries.GetStatements(params)
statements, err := queries.GetStatements(r.Context(), params)
if err != nil {
http.Error(w, "Error fetching statements: "+err.Error(), http.StatusInternalServerError)
return

View File

@@ -2,6 +2,7 @@ package routes
import (
"bssapp-backend/auth"
"bssapp-backend/internal/i18n"
"bssapp-backend/models"
"bssapp-backend/queries"
"bytes"
@@ -40,9 +41,18 @@ const (
)
// Kolonlar
var hMainCols = []string{
"Belge No", "Tarih", "Vade", "İşlem",
"Açıklama", "Para", "Borç", "Alacak", "Bakiye",
func hMainCols(lang string) []string {
return []string{
i18n.T(lang, "pdf.main.doc_no"),
i18n.T(lang, "pdf.main.date"),
i18n.T(lang, "pdf.main.due_date"),
i18n.T(lang, "pdf.main.operation"),
i18n.T(lang, "pdf.main.description"),
i18n.T(lang, "pdf.main.currency"),
i18n.T(lang, "pdf.main.debit"),
i18n.T(lang, "pdf.main.credit"),
i18n.T(lang, "pdf.main.balance"),
}
}
var hMainWbase = []float64{
@@ -136,7 +146,7 @@ func hCalcRowHeightForText(pdf *gofpdf.Fpdf, text string, colWidth, lineHeight,
/* ============================ HEADER ============================ */
func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) float64 {
func hDrawPageHeader(pdf *gofpdf.Fpdf, lang, cariKod, cariIsim, start, end string) float64 {
if logoPath, err := resolvePdfImagePath("Baggi-Tekstil-A.s-Logolu.jpeg"); err == nil {
pdf.ImageOptions(logoPath, hMarginL, 2, hLogoW, 0, false, gofpdf.ImageOptions{}, 0, "")
}
@@ -149,13 +159,13 @@ func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) flo
pdf.SetFont(hFontFamilyBold, "", 12)
pdf.SetXY(hMarginL+hLogoW+8, hMarginT+10)
pdf.CellFormat(120, 6, "Cari Hesap Raporu", "", 0, "L", false, 0, "")
pdf.CellFormat(120, 6, i18n.T(lang, "pdf.report_title"), "", 0, "L", false, 0, "")
// Bugünün tarihi (sağ üst)
today := time.Now().Format("02.01.2006")
pdf.SetFont(hFontFamilyReg, "", 9)
pdf.SetXY(hPageWidth-hMarginR-40, hMarginT+3)
pdf.CellFormat(40, 6, "Tarih: "+today, "", 0, "R", false, 0, "")
pdf.CellFormat(40, 6, i18n.T(lang, "pdf.date")+": "+today, "", 0, "R", false, 0, "")
// Cari & Tarih kutuları (daha yukarı taşındı)
boxY := hMarginT + hLogoW - 6
@@ -163,11 +173,11 @@ func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) flo
pdf.Rect(hMarginL, boxY, 140, 11, "")
pdf.SetXY(hMarginL+2, boxY+3)
pdf.CellFormat(136, 5, fmt.Sprintf("Cari: %s — %s", cariKod, cariIsim), "", 0, "L", false, 0, "")
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s — %s", i18n.T(lang, "pdf.customer"), cariKod, cariIsim), "", 0, "L", false, 0, "")
pdf.Rect(hPageWidth-hMarginR-140, boxY, 140, 11, "")
pdf.SetXY(hPageWidth-hMarginR-138, boxY+3)
pdf.CellFormat(136, 5, fmt.Sprintf("Tarih Aralığı: %s → %s", start, end), "", 0, "R", false, 0, "")
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s → %s", i18n.T(lang, "pdf.date_range"), start, end), "", 0, "R", false, 0, "")
// Alt çizgi
y := boxY + 13
@@ -180,7 +190,7 @@ func hDrawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) flo
/* ============================ TABLO ============================ */
func hDrawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
func hDrawGroupBar(pdf *gofpdf.Fpdf, lang, currency string, sonBakiye float64) {
x := hMarginL
y := pdf.GetY()
w := hPageWidth - hMarginL - hMarginR
@@ -194,9 +204,9 @@ func hDrawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
pdf.SetTextColor(hColorPrimary[0], hColorPrimary[1], hColorPrimary[2])
pdf.SetXY(x+hCellPadX+1.0, y+(h-5.0)/2)
pdf.CellFormat(w*0.6, 5.0, currency, "", 0, "L", false, 0, "")
pdf.CellFormat(w*0.6, 5.0, fmt.Sprintf("%s: %s", i18n.T(lang, "pdf.currency_prefix"), currency), "", 0, "L", false, 0, "")
txt := "Son Bakiye = " + hFormatCurrencyTR(sonBakiye)
txt := i18n.T(lang, "pdf.ending_balance") + " = " + hFormatCurrencyTR(sonBakiye)
pdf.SetXY(x+w*0.4, y+(h-5.0)/2)
pdf.CellFormat(w*0.6-2.0, 5.0, txt, "", 0, "R", false, 0, "")
@@ -282,6 +292,10 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
accountCode := r.URL.Query().Get("accountcode")
startDate := r.URL.Query().Get("startdate")
endDate := r.URL.Query().Get("enddate")
langCode := i18n.ResolveLangCode(
r.URL.Query().Get("langcode"),
r.Header.Get("Accept-Language"),
)
rawParis := r.URL.Query()["parislemler"]
var parislemler []string
@@ -292,7 +306,7 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
}
}
headers, _, err := queries.GetStatementsHPDF(accountCode, startDate, endDate, parislemler)
headers, _, err := queries.GetStatementsHPDF(r.Context(), accountCode, startDate, endDate, langCode, parislemler)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
@@ -348,7 +362,7 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
newPage := func() {
pageNum++
pdf.AddPage()
tableTop := hDrawPageHeader(pdf, accountCode, cariIsim, startDate, endDate)
tableTop := hDrawPageHeader(pdf, langCode, accountCode, cariIsim, startDate, endDate)
pdf.SetY(tableTop)
}
@@ -356,8 +370,8 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
for _, cur := range order {
g := groups[cur]
hDrawGroupBar(pdf, cur, g.sonBakiye)
hDrawMainHeaderRow(pdf, hMainCols, mainWn)
hDrawGroupBar(pdf, langCode, cur, g.sonBakiye)
hDrawMainHeaderRow(pdf, hMainCols(langCode), mainWn)
rowIndex := 0
for _, h := range g.rows {
@@ -372,8 +386,8 @@ func ExportStatementHeaderReportPDFHandler(mssql *sql.DB) http.HandlerFunc {
rh := hCalcRowHeightForText(pdf, row[4], mainWn[4], hLineHMain, hCellPadX)
if hNeedNewPage(pdf, rh+hHeaderRowH) {
newPage()
hDrawGroupBar(pdf, cur, g.sonBakiye)
hDrawMainHeaderRow(pdf, hMainCols, mainWn)
hDrawGroupBar(pdf, langCode, cur, g.sonBakiye)
hDrawMainHeaderRow(pdf, hMainCols(langCode), mainWn)
}
hDrawMainDataRow(pdf, row, mainWn, rh, rowIndex)

View File

@@ -3,6 +3,7 @@ package routes
import (
"bssapp-backend/auth"
"bssapp-backend/internal/i18n"
"bssapp-backend/models"
"bssapp-backend/queries"
"bytes"
@@ -48,10 +49,18 @@ const (
logoW = 42.0
)
// Ana tablo kolonları
var mainCols = []string{
"Belge No", "Tarih", "Vade", "İşlem",
"Açıklama", "Para", "Borç", "Alacak", "Bakiye",
func mainCols(lang string) []string {
return []string{
i18n.T(lang, "pdf.main.doc_no"),
i18n.T(lang, "pdf.main.date"),
i18n.T(lang, "pdf.main.due_date"),
i18n.T(lang, "pdf.main.operation"),
i18n.T(lang, "pdf.main.description"),
i18n.T(lang, "pdf.main.currency"),
i18n.T(lang, "pdf.main.debit"),
i18n.T(lang, "pdf.main.credit"),
i18n.T(lang, "pdf.main.balance"),
}
}
// Ana tablo kolon genişlikleri (ilk 3 geniş)
@@ -68,10 +77,21 @@ var mainWbase = []float64{
}
// Detay tablo kolonları ve genişlikleri
var dCols = []string{
"Ana Grup", "Alt Grup", "Garson", "Fit", "İçerik",
"Ürün", "Renk", "Adet", "Fiyat", "Tutar",
func detailCols(lang string) []string {
return []string{
i18n.T(lang, "pdf.detail.main_group"),
i18n.T(lang, "pdf.detail.sub_group"),
i18n.T(lang, "pdf.detail.waiter"),
i18n.T(lang, "pdf.detail.fit"),
i18n.T(lang, "pdf.detail.content"),
i18n.T(lang, "pdf.detail.product"),
i18n.T(lang, "pdf.detail.color"),
i18n.T(lang, "pdf.detail.qty"),
i18n.T(lang, "pdf.detail.price"),
i18n.T(lang, "pdf.detail.total"),
}
}
var dWbase = []float64{
30, 28, 22, 20, 56, 30, 22, 20, 20, 26}
@@ -224,7 +244,7 @@ func drawLabeledBox(pdf *gofpdf.Fpdf, x, y, w, h float64, label, value string, a
}
}
func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) float64 {
func drawPageHeader(pdf *gofpdf.Fpdf, lang, cariKod, cariIsim, start, end string) float64 {
if logoPath, err := resolvePdfImagePath("Baggi-Tekstil-A.s-Logolu.jpeg"); err == nil {
pdf.ImageOptions(logoPath, hMarginL, 2, hLogoW, 0, false, gofpdf.ImageOptions{}, 0, "")
}
@@ -237,13 +257,13 @@ func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) floa
pdf.SetFont(hFontFamilyBold, "", 12)
pdf.SetXY(hMarginL+hLogoW+8, hMarginT+10)
pdf.CellFormat(120, 6, "Cari Hesap Raporu", "", 0, "L", false, 0, "")
pdf.CellFormat(120, 6, i18n.T(lang, "pdf.report_title"), "", 0, "L", false, 0, "")
// Bugünün tarihi (sağ üst)
today := time.Now().Format("02.01.2006")
pdf.SetFont(hFontFamilyReg, "", 9)
pdf.SetXY(hPageWidth-hMarginR-40, hMarginT+3)
pdf.CellFormat(40, 6, "Tarih: "+today, "", 0, "R", false, 0, "")
pdf.CellFormat(40, 6, i18n.T(lang, "pdf.date")+": "+today, "", 0, "R", false, 0, "")
// Cari & Tarih kutuları (daha yukarı taşındı)
boxY := hMarginT + hLogoW - 6
@@ -251,11 +271,11 @@ func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) floa
pdf.Rect(hMarginL, boxY, 140, 11, "")
pdf.SetXY(hMarginL+2, boxY+3)
pdf.CellFormat(136, 5, fmt.Sprintf("Cari: %s — %s", cariKod, cariIsim), "", 0, "L", false, 0, "")
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s — %s", i18n.T(lang, "pdf.customer"), cariKod, cariIsim), "", 0, "L", false, 0, "")
pdf.Rect(hPageWidth-hMarginR-140, boxY, 140, 11, "")
pdf.SetXY(hPageWidth-hMarginR-138, boxY+3)
pdf.CellFormat(136, 5, fmt.Sprintf("Tarih Aralığı: %s → %s", start, end), "", 0, "R", false, 0, "")
pdf.CellFormat(136, 5, fmt.Sprintf("%s: %s → %s", i18n.T(lang, "pdf.date_range"), start, end), "", 0, "R", false, 0, "")
// Alt çizgi
y := boxY + 13
@@ -268,7 +288,7 @@ func drawPageHeader(pdf *gofpdf.Fpdf, cariKod, cariIsim, start, end string) floa
/* ============================ GROUP BAR ============================ */
func drawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
func drawGroupBar(pdf *gofpdf.Fpdf, lang, currency string, sonBakiye float64) {
// Kutu alanı (tam genişlik)
x := marginL
y := pdf.GetY()
@@ -285,9 +305,9 @@ func drawGroupBar(pdf *gofpdf.Fpdf, currency string, sonBakiye float64) {
pdf.SetTextColor(colorPrimary[0], colorPrimary[1], colorPrimary[2])
pdf.SetXY(x+cellPadX+1.0, y+(h-5.0)/2)
pdf.CellFormat(w*0.6, 5.0, fmt.Sprintf("%s", currency), "", 0, "L", false, 0, "")
pdf.CellFormat(w*0.6, 5.0, fmt.Sprintf("%s: %s", i18n.T(lang, "pdf.currency_prefix"), currency), "", 0, "L", false, 0, "")
txt := "Son Bakiye = " + formatCurrencyTR(sonBakiye)
txt := i18n.T(lang, "pdf.ending_balance") + " = " + formatCurrencyTR(sonBakiye)
pdf.SetXY(x+w*0.4, y+(h-5.0)/2)
pdf.CellFormat(w*0.6-2.0, 5.0, txt, "", 0, "R", false, 0, "")
@@ -430,6 +450,10 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
accountCode := r.URL.Query().Get("accountcode")
startDate := r.URL.Query().Get("startdate")
endDate := r.URL.Query().Get("enddate")
langCode := i18n.ResolveLangCode(
r.URL.Query().Get("langcode"),
r.Header.Get("Accept-Language"),
)
// parislemler sanitize
rawParis := r.URL.Query()["parislemler"]
@@ -445,7 +469,7 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
accountCode, startDate, endDate, parislemler)
// 1) Header verileri
headers, belgeNos, err := queries.GetStatementsPDF(accountCode, startDate, endDate, parislemler)
headers, belgeNos, err := queries.GetStatementsPDF(r.Context(), accountCode, startDate, endDate, langCode, parislemler)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
@@ -520,12 +544,12 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
pdf.AddPage()
// drawPageHeader tablo başlangıç yüksekliğini döndürüyor
tableTop := drawPageHeader(pdf, accountCode, cariIsim, startDate, endDate)
tableTop := drawPageHeader(pdf, langCode, accountCode, cariIsim, startDate, endDate)
// Sayfa numarası
pdf.SetFont(fontFamilyReg, "", 6)
pdf.SetXY(pageWidth-marginR-28, pageHeight-marginB+3)
pdf.CellFormat(28, 5, fmt.Sprintf("Sayfa %d", pageNum), "", 0, "R", false, 0, "")
pdf.CellFormat(28, 5, fmt.Sprintf("%s %d", i18n.T(langCode, "pdf.page"), pageNum), "", 0, "R", false, 0, "")
// Tablo Y konumunu ayarla
pdf.SetY(tableTop)
@@ -540,8 +564,8 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
if needNewPage(pdf, groupBarH+headerRowH) {
newPage()
}
drawGroupBar(pdf, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols, mainWn)
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
for _, h := range g.rows {
row := []string{
@@ -557,8 +581,8 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
if needNewPage(pdf, rh+headerRowH) {
newPage()
drawGroupBar(pdf, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols, mainWn)
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
}
drawMainDataRow(pdf, row, mainWn, rh)
@@ -567,10 +591,10 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
if len(details) > 0 {
if needNewPage(pdf, subHeaderRowH) {
newPage()
drawGroupBar(pdf, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols, mainWn)
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
}
drawDetailHeaderRow(pdf, dCols, dWn)
drawDetailHeaderRow(pdf, detailCols(langCode), dWn)
for i, d := range details {
drow := []string{
@@ -591,9 +615,9 @@ func ExportPDFHandler(mssql *sql.DB) http.HandlerFunc {
if needNewPage(pdf, rh2) {
newPage()
drawGroupBar(pdf, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols, mainWn)
drawDetailHeaderRow(pdf, dCols, dWn)
drawGroupBar(pdf, langCode, cur, g.sonBakiye)
drawMainHeaderRow(pdf, mainCols(langCode), mainWn)
drawDetailHeaderRow(pdf, detailCols(langCode), dWn)
}
// zebra: çift indekslerde açık zemin
fill := (i%2 == 0)

View File

@@ -0,0 +1,41 @@
package routes
import (
"database/sql"
"log"
"strings"
)
// EnsureTranslationPerfIndexes creates helpful indexes for translation listing/search.
// It is safe to run on each startup; failures are logged and do not stop the service.
func EnsureTranslationPerfIndexes(db *sql.DB) {
if db == nil {
return
}
statements := []string{
`CREATE EXTENSION IF NOT EXISTS pg_trgm`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_t_key_lang ON mk_translator (t_key, lang_code)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_status_lang_updated ON mk_translator (status, lang_code, updated_at DESC)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_manual_status ON mk_translator (is_manual, status)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_source_type_expr ON mk_translator ((COALESCE(NULLIF(provider_meta->>'source_type',''),'dummy')))`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_source_text_trgm ON mk_translator USING gin (source_text_tr gin_trgm_ops)`,
`CREATE INDEX IF NOT EXISTS idx_mk_translator_translated_text_trgm ON mk_translator USING gin (translated_text gin_trgm_ops)`,
}
for _, stmt := range statements {
if _, err := db.Exec(stmt); err != nil {
log.Printf("[TranslationPerf] index_setup_warn sql=%q err=%v", summarizeSQL(stmt), err)
continue
}
log.Printf("[TranslationPerf] index_ready sql=%q", summarizeSQL(stmt))
}
}
func summarizeSQL(sqlText string) string {
s := strings.TrimSpace(sqlText)
if len(s) <= 100 {
return s
}
return s[:100] + "..."
}

1688
svc/routes/translations.go Normal file
View File

@@ -0,0 +1,1688 @@
package routes
import (
"bssapp-backend/models"
"bytes"
"context"
"database/sql"
"encoding/json"
"errors"
"fmt"
"io"
"io/fs"
"log"
"net/http"
"net/url"
"os"
"path/filepath"
"regexp"
"sort"
"strconv"
"strings"
"time"
"github.com/gorilla/mux"
"github.com/lib/pq"
)
var translationLangSet = map[string]struct{}{
"tr": {},
"en": {},
"de": {},
"it": {},
"es": {},
"ru": {},
"ar": {},
}
var translationStatusSet = map[string]struct{}{
"pending": {},
"approved": {},
"rejected": {},
}
var translationSourceTypeSet = map[string]struct{}{
"dummy": {},
"postgre": {},
"mssql": {},
}
var (
reQuotedText = regexp.MustCompile(`['"]([^'"]{3,120})['"]`)
reHasLetter = regexp.MustCompile(`[A-Za-zÇĞİÖŞÜçğıöşü]`)
reBadText = regexp.MustCompile(`^(GET|POST|PUT|DELETE|OPTIONS|true|false|null|undefined)$`)
reKeyUnsafe = regexp.MustCompile(`[^a-z0-9_]+`)
)
type TranslationUpdatePayload struct {
SourceTextTR *string `json:"source_text_tr"`
TranslatedText *string `json:"translated_text"`
SourceType *string `json:"source_type"`
IsManual *bool `json:"is_manual"`
Status *string `json:"status"`
}
type UpsertMissingPayload struct {
Items []UpsertMissingItem `json:"items"`
Languages []string `json:"languages"`
}
type UpsertMissingItem struct {
TKey string `json:"t_key"`
SourceTextTR string `json:"source_text_tr"`
}
type SyncSourcesPayload struct {
AutoTranslate bool `json:"auto_translate"`
Languages []string `json:"languages"`
Limit int `json:"limit"`
OnlyNew *bool `json:"only_new"`
}
type BulkApprovePayload struct {
IDs []int64 `json:"ids"`
}
type BulkUpdatePayload struct {
Items []BulkUpdateItem `json:"items"`
}
type TranslateSelectedPayload struct {
TKeys []string `json:"t_keys"`
Languages []string `json:"languages"`
Limit int `json:"limit"`
}
type BulkUpdateItem struct {
ID int64 `json:"id"`
SourceTextTR *string `json:"source_text_tr"`
TranslatedText *string `json:"translated_text"`
SourceType *string `json:"source_type"`
IsManual *bool `json:"is_manual"`
Status *string `json:"status"`
}
type TranslationSyncOptions struct {
AutoTranslate bool
Languages []string
Limit int
OnlyNew bool
TraceID string
}
type TranslationSyncResult struct {
SeedCount int `json:"seed_count"`
AffectedCount int `json:"affected_count"`
AutoTranslated int `json:"auto_translated"`
TargetLangs []string `json:"target_languages"`
TraceID string `json:"trace_id"`
DurationMS int64 `json:"duration_ms"`
}
type sourceSeed struct {
TKey string
SourceText string
SourceType string
}
func GetTranslationRowsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
q := strings.TrimSpace(r.URL.Query().Get("q"))
lang := normalizeTranslationLang(r.URL.Query().Get("lang"))
status := normalizeTranslationStatus(r.URL.Query().Get("status"))
sourceType := normalizeTranslationSourceType(r.URL.Query().Get("source_type"))
manualFilter := strings.TrimSpace(strings.ToLower(r.URL.Query().Get("manual")))
missingOnly := strings.TrimSpace(strings.ToLower(r.URL.Query().Get("missing"))) == "true"
limit := 0
if raw := strings.TrimSpace(r.URL.Query().Get("limit")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 && parsed <= 50000 {
limit = parsed
}
}
offset := 0
if raw := strings.TrimSpace(r.URL.Query().Get("offset")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed >= 0 && parsed <= 1000000 {
offset = parsed
}
}
clauses := []string{"1=1"}
args := make([]any, 0, 8)
argIndex := 1
if q != "" {
clauses = append(clauses, fmt.Sprintf("(source_text_tr ILIKE $%d OR translated_text ILIKE $%d)", argIndex, argIndex))
args = append(args, "%"+q+"%")
argIndex++
}
if lang != "" {
clauses = append(clauses, fmt.Sprintf("lang_code = $%d", argIndex))
args = append(args, lang)
argIndex++
}
if status != "" {
clauses = append(clauses, fmt.Sprintf("status = $%d", argIndex))
args = append(args, status)
argIndex++
}
if sourceType != "" {
clauses = append(clauses, fmt.Sprintf("COALESCE(NULLIF(provider_meta->>'source_type',''),'dummy') = $%d", argIndex))
args = append(args, sourceType)
argIndex++
}
switch manualFilter {
case "true":
clauses = append(clauses, "is_manual = true")
case "false":
clauses = append(clauses, "is_manual = false")
}
if missingOnly {
clauses = append(clauses, "(translated_text IS NULL OR btrim(translated_text) = '')")
}
query := fmt.Sprintf(`
SELECT
id,
t_key,
lang_code,
COALESCE(NULLIF(provider_meta->>'source_type',''), 'dummy') AS source_type,
source_text_tr,
COALESCE(translated_text, '') AS translated_text,
is_manual,
status,
COALESCE(provider, '') AS provider,
updated_at
FROM mk_translator
WHERE %s
ORDER BY t_key, lang_code
`, strings.Join(clauses, " AND "))
if limit > 0 {
query += fmt.Sprintf("LIMIT $%d", argIndex)
args = append(args, limit)
argIndex++
}
if offset > 0 {
query += fmt.Sprintf(" OFFSET $%d", argIndex)
args = append(args, offset)
}
rows, err := db.Query(query, args...)
if err != nil {
http.Error(w, "translation query error", http.StatusInternalServerError)
return
}
defer rows.Close()
list := make([]models.TranslatorRow, 0, 1024)
for rows.Next() {
var row models.TranslatorRow
if err := rows.Scan(
&row.ID,
&row.TKey,
&row.LangCode,
&row.SourceType,
&row.SourceTextTR,
&row.TranslatedText,
&row.IsManual,
&row.Status,
&row.Provider,
&row.UpdatedAt,
); err != nil {
http.Error(w, "translation scan error", http.StatusInternalServerError)
return
}
list = append(list, row)
}
if err := rows.Err(); err != nil {
http.Error(w, "translation rows error", http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(map[string]any{
"rows": list,
"count": len(list),
})
}
}
func UpdateTranslationRowHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
id, err := strconv.ParseInt(strings.TrimSpace(mux.Vars(r)["id"]), 10, 64)
if err != nil || id <= 0 {
http.Error(w, "invalid row id", http.StatusBadRequest)
return
}
var payload TranslationUpdatePayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
if payload.Status != nil {
normalized := normalizeTranslationStatus(*payload.Status)
if normalized == "" {
http.Error(w, "invalid status", http.StatusBadRequest)
return
}
payload.Status = &normalized
}
if payload.SourceType != nil {
normalized := normalizeTranslationSourceType(*payload.SourceType)
if normalized == "" {
http.Error(w, "invalid source_type", http.StatusBadRequest)
return
}
payload.SourceType = &normalized
}
updateQuery := `
UPDATE mk_translator
SET
source_text_tr = COALESCE($2, source_text_tr),
translated_text = COALESCE($3, translated_text),
is_manual = COALESCE($4, is_manual),
status = COALESCE($5, status),
provider_meta = CASE
WHEN $6::text IS NULL THEN provider_meta
ELSE jsonb_set(COALESCE(provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($6::text), true)
END,
updated_at = NOW()
WHERE id = $1
RETURNING
id,
t_key,
lang_code,
COALESCE(NULLIF(provider_meta->>'source_type',''), 'dummy') AS source_type,
source_text_tr,
COALESCE(translated_text, '') AS translated_text,
is_manual,
status,
COALESCE(provider, '') AS provider,
updated_at
`
var row models.TranslatorRow
err = db.QueryRow(
updateQuery,
id,
nullableString(payload.SourceTextTR),
nullableString(payload.TranslatedText),
payload.IsManual,
payload.Status,
nullableString(payload.SourceType),
).Scan(
&row.ID,
&row.TKey,
&row.LangCode,
&row.SourceType,
&row.SourceTextTR,
&row.TranslatedText,
&row.IsManual,
&row.Status,
&row.Provider,
&row.UpdatedAt,
)
if err == sql.ErrNoRows {
http.Error(w, "translation row not found", http.StatusNotFound)
return
}
if err != nil {
http.Error(w, "translation update error", http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(row)
}
}
func UpsertMissingTranslationsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload UpsertMissingPayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
items := normalizeMissingItems(payload.Items)
if len(items) == 0 {
http.Error(w, "items required", http.StatusBadRequest)
return
}
languages := normalizeTargetLanguages(payload.Languages)
affected, err := upsertMissingRows(db, items, languages, "dummy")
if err != nil {
http.Error(w, "upsert missing error", http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"items": len(items),
"target_langs": languages,
"affected_count": affected,
})
}
}
func SyncTranslationSourcesHandler(pgDB *sql.DB, mssqlDB *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload SyncSourcesPayload
_ = json.NewDecoder(r.Body).Decode(&payload)
traceID := requestTraceID(r)
w.Header().Set("X-Trace-ID", traceID)
start := time.Now()
onlyNew := payload.OnlyNew == nil || *payload.OnlyNew
log.Printf(
"[TranslationSync] trace=%s stage=request auto_translate=%t only_new=%t limit=%d langs=%v",
traceID,
payload.AutoTranslate,
onlyNew,
payload.Limit,
payload.Languages,
)
result, err := PerformTranslationSync(pgDB, mssqlDB, TranslationSyncOptions{
AutoTranslate: payload.AutoTranslate,
Languages: payload.Languages,
Limit: payload.Limit,
OnlyNew: onlyNew,
TraceID: traceID,
})
if err != nil {
log.Printf(
"[TranslationSync] trace=%s stage=error duration_ms=%d err=%v",
traceID,
time.Since(start).Milliseconds(),
err,
)
http.Error(w, "translation source sync error", http.StatusInternalServerError)
return
}
log.Printf(
"[TranslationSync] trace=%s stage=response duration_ms=%d seeds=%d affected=%d auto_translated=%d",
traceID,
time.Since(start).Milliseconds(),
result.SeedCount,
result.AffectedCount,
result.AutoTranslated,
)
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"trace_id": traceID,
"result": result,
"seed_count": result.SeedCount,
"affected_count": result.AffectedCount,
"auto_translated": result.AutoTranslated,
"target_languages": result.TargetLangs,
})
}
}
func TranslateSelectedTranslationsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload TranslateSelectedPayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
keys := normalizeStringList(payload.TKeys, 5000)
if len(keys) == 0 {
http.Error(w, "t_keys required", http.StatusBadRequest)
return
}
targetLangs := normalizeTargetLanguages(payload.Languages)
limit := payload.Limit
if limit <= 0 {
limit = len(keys) * len(targetLangs)
}
if limit <= 0 {
limit = 1000
}
if limit > 50000 {
limit = 50000
}
traceID := requestTraceID(r)
w.Header().Set("X-Trace-ID", traceID)
start := time.Now()
log.Printf(
"[TranslationSelected] trace=%s stage=request keys=%d limit=%d langs=%v",
traceID,
len(keys),
limit,
targetLangs,
)
translatedCount, err := autoTranslatePendingRowsForKeys(db, targetLangs, limit, keys, traceID)
if err != nil {
log.Printf(
"[TranslationSelected] trace=%s stage=error duration_ms=%d err=%v",
traceID,
time.Since(start).Milliseconds(),
err,
)
http.Error(w, "translate selected error", http.StatusInternalServerError)
return
}
log.Printf(
"[TranslationSelected] trace=%s stage=done duration_ms=%d translated=%d",
traceID,
time.Since(start).Milliseconds(),
translatedCount,
)
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"trace_id": traceID,
"translated_count": translatedCount,
"key_count": len(keys),
"target_languages": targetLangs,
"duration_ms": time.Since(start).Milliseconds(),
})
}
}
func BulkApproveTranslationsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload BulkApprovePayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
ids := normalizeIDListInt64(payload.IDs)
if len(ids) == 0 {
http.Error(w, "ids required", http.StatusBadRequest)
return
}
res, err := db.Exec(`
UPDATE mk_translator
SET
status = 'approved',
is_manual = true,
updated_at = NOW(),
provider_meta = jsonb_set(COALESCE(provider_meta, '{}'::jsonb), '{is_new}', 'false'::jsonb, true)
WHERE id = ANY($1)
`, pq.Array(ids))
if err != nil {
http.Error(w, "bulk approve error", http.StatusInternalServerError)
return
}
affected, _ := res.RowsAffected()
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"affected_count": affected,
})
}
}
func BulkUpdateTranslationsHandler(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
var payload BulkUpdatePayload
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
http.Error(w, "invalid payload", http.StatusBadRequest)
return
}
if len(payload.Items) == 0 {
http.Error(w, "items required", http.StatusBadRequest)
return
}
tx, err := db.Begin()
if err != nil {
http.Error(w, "transaction start error", http.StatusInternalServerError)
return
}
defer tx.Rollback()
affected := 0
for _, it := range payload.Items {
if it.ID <= 0 {
continue
}
status := normalizeOptionalStatus(it.Status)
sourceType := normalizeOptionalSourceType(it.SourceType)
res, err := tx.Exec(`
UPDATE mk_translator
SET
source_text_tr = COALESCE($2, source_text_tr),
translated_text = COALESCE($3, translated_text),
is_manual = COALESCE($4, is_manual),
status = COALESCE($5, status),
provider_meta = CASE
WHEN $6::text IS NULL THEN provider_meta
ELSE jsonb_set(COALESCE(provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($6::text), true)
END,
updated_at = NOW()
WHERE id = $1
`, it.ID, nullableString(it.SourceTextTR), nullableString(it.TranslatedText), it.IsManual, status, sourceType)
if err != nil {
http.Error(w, "bulk update error", http.StatusInternalServerError)
return
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
}
if err := tx.Commit(); err != nil {
http.Error(w, "transaction commit error", http.StatusInternalServerError)
return
}
_ = json.NewEncoder(w).Encode(map[string]any{
"success": true,
"affected_count": affected,
})
}
}
func PerformTranslationSync(pgDB *sql.DB, mssqlDB *sql.DB, options TranslationSyncOptions) (TranslationSyncResult, error) {
traceID := strings.TrimSpace(options.TraceID)
if traceID == "" {
traceID = "trsync-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
start := time.Now()
limit := options.Limit
if limit <= 0 || limit > 100000 {
limit = 20000
}
targetLangs := normalizeTargetLanguages(options.Languages)
log.Printf(
"[TranslationSync] trace=%s stage=start auto_translate=%t only_new=%t limit=%d langs=%v",
traceID,
options.AutoTranslate,
options.OnlyNew,
limit,
targetLangs,
)
collectStart := time.Now()
seeds := collectSourceSeeds(pgDB, mssqlDB, limit)
seeds, reusedByText := reuseExistingSeedKeys(pgDB, seeds)
log.Printf(
"[TranslationSync] trace=%s stage=collect done_ms=%d total=%d reused_by_text=%d sources=%s",
traceID,
time.Since(collectStart).Milliseconds(),
len(seeds),
reusedByText,
formatSourceCounts(countSeedsBySource(seeds)),
)
if options.OnlyNew {
before := len(seeds)
filterStart := time.Now()
seeds = filterNewSeeds(pgDB, seeds)
log.Printf(
"[TranslationSync] trace=%s stage=filter_only_new done_ms=%d before=%d after=%d skipped=%d",
traceID,
time.Since(filterStart).Milliseconds(),
before,
len(seeds),
before-len(seeds),
)
}
if len(seeds) == 0 {
return TranslationSyncResult{
TargetLangs: targetLangs,
TraceID: traceID,
DurationMS: time.Since(start).Milliseconds(),
}, nil
}
upsertStart := time.Now()
affected, err := upsertSourceSeeds(pgDB, seeds, targetLangs)
if err != nil {
return TranslationSyncResult{}, err
}
log.Printf(
"[TranslationSync] trace=%s stage=upsert done_ms=%d affected=%d",
traceID,
time.Since(upsertStart).Milliseconds(),
affected,
)
autoTranslated := 0
if options.AutoTranslate {
autoStart := time.Now()
var autoErr error
autoTranslated, autoErr = autoTranslatePendingRowsForKeys(pgDB, targetLangs, limit, uniqueSeedKeys(seeds), traceID)
if autoErr != nil {
log.Printf(
"[TranslationSync] trace=%s stage=auto_translate done_ms=%d translated=%d err=%v",
traceID,
time.Since(autoStart).Milliseconds(),
autoTranslated,
autoErr,
)
} else {
log.Printf(
"[TranslationSync] trace=%s stage=auto_translate done_ms=%d translated=%d",
traceID,
time.Since(autoStart).Milliseconds(),
autoTranslated,
)
}
}
result := TranslationSyncResult{
SeedCount: len(seeds),
AffectedCount: affected,
AutoTranslated: autoTranslated,
TargetLangs: targetLangs,
TraceID: traceID,
DurationMS: time.Since(start).Milliseconds(),
}
log.Printf(
"[TranslationSync] trace=%s stage=done duration_ms=%d seeds=%d affected=%d auto_translated=%d",
traceID,
result.DurationMS,
result.SeedCount,
result.AffectedCount,
result.AutoTranslated,
)
return result, nil
}
func upsertMissingRows(db *sql.DB, items []UpsertMissingItem, languages []string, forcedSourceType string) (int, error) {
tx, err := db.Begin()
if err != nil {
return 0, err
}
defer tx.Rollback()
affected := 0
for _, it := range items {
sourceType := forcedSourceType
if sourceType == "" {
sourceType = "dummy"
}
res, err := tx.Exec(`
INSERT INTO mk_translator
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
VALUES
($1, 'tr', $2, $2, false, 'approved', 'seed', jsonb_build_object('source_type', $3::text))
ON CONFLICT (t_key, lang_code) DO UPDATE
SET
source_text_tr = EXCLUDED.source_text_tr,
provider_meta = jsonb_set(COALESCE(mk_translator.provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($3::text), true),
updated_at = NOW()
`, it.TKey, it.SourceTextTR, sourceType)
if err != nil {
return 0, err
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
for _, lang := range languages {
res, err := tx.Exec(`
INSERT INTO mk_translator
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
VALUES
($1, $2, $3, NULL, false, 'pending', NULL, jsonb_build_object('source_type', $4::text))
ON CONFLICT (t_key, lang_code) DO UPDATE
SET
source_text_tr = EXCLUDED.source_text_tr,
provider_meta = jsonb_set(COALESCE(mk_translator.provider_meta, '{}'::jsonb), '{source_type}', to_jsonb($4::text), true),
updated_at = NOW()
`, it.TKey, lang, it.SourceTextTR, sourceType)
if err != nil {
return 0, err
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
}
}
if err := tx.Commit(); err != nil {
return 0, err
}
return affected, nil
}
func upsertSourceSeeds(db *sql.DB, seeds []sourceSeed, languages []string) (int, error) {
tx, err := db.Begin()
if err != nil {
return 0, err
}
defer tx.Rollback()
affected := 0
for _, seed := range seeds {
if seed.TKey == "" || seed.SourceText == "" {
continue
}
sourceType := normalizeTranslationSourceType(seed.SourceType)
if sourceType == "" {
sourceType = "dummy"
}
res, err := tx.Exec(`
INSERT INTO mk_translator
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
VALUES
($1, 'tr', $2, $2, false, 'approved', 'seed', jsonb_build_object('source_type', $3::text, 'is_new', false))
ON CONFLICT (t_key, lang_code) DO UPDATE
SET
source_text_tr = EXCLUDED.source_text_tr,
provider_meta = jsonb_set(
COALESCE(mk_translator.provider_meta, '{}'::jsonb),
'{source_type}',
to_jsonb(COALESCE(NULLIF(mk_translator.provider_meta->>'source_type', ''), $3::text)),
true
),
updated_at = NOW()
`, seed.TKey, seed.SourceText, sourceType)
if err != nil {
return 0, err
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
for _, lang := range languages {
res, err := tx.Exec(`
INSERT INTO mk_translator
(t_key, lang_code, source_text_tr, translated_text, is_manual, status, provider, provider_meta)
VALUES
($1, $2, $3, NULL, false, 'pending', NULL, jsonb_build_object('source_type', $4::text, 'is_new', true))
ON CONFLICT (t_key, lang_code) DO UPDATE
SET
source_text_tr = EXCLUDED.source_text_tr,
provider_meta = jsonb_set(
COALESCE(mk_translator.provider_meta, '{}'::jsonb),
'{source_type}',
to_jsonb(COALESCE(NULLIF(mk_translator.provider_meta->>'source_type', ''), $4::text)),
true
),
updated_at = NOW()
`, seed.TKey, lang, seed.SourceText, sourceType)
if err != nil {
return 0, err
}
if n, _ := res.RowsAffected(); n > 0 {
affected += int(n)
}
}
}
if err := tx.Commit(); err != nil {
return 0, err
}
return affected, nil
}
func collectSourceSeeds(pgDB *sql.DB, mssqlDB *sql.DB, limit int) []sourceSeed {
seen := map[string]struct{}{}
out := make([]sourceSeed, 0, limit)
appendSeed := func(seed sourceSeed) {
if seed.TKey == "" || seed.SourceText == "" || seed.SourceType == "" {
return
}
key := normalizeSeedTextKey(seed.SourceText)
if _, ok := seen[key]; ok {
return
}
seen[key] = struct{}{}
out = append(out, seed)
}
for _, row := range collectPostgreSeeds(pgDB, limit) {
appendSeed(row)
if len(out) >= limit {
return out
}
}
for _, row := range collectMSSQLSeeds(mssqlDB, limit-len(out)) {
appendSeed(row)
if len(out) >= limit {
return out
}
}
for _, row := range collectDummySeeds(limit - len(out)) {
appendSeed(row)
if len(out) >= limit {
return out
}
}
return out
}
func collectPostgreSeeds(pgDB *sql.DB, limit int) []sourceSeed {
if pgDB == nil || limit <= 0 {
return nil
}
rows, err := pgDB.Query(`
SELECT table_name, column_name
FROM information_schema.columns
WHERE table_schema = 'public'
ORDER BY table_name, ordinal_position
LIMIT $1
`, limit)
if err != nil {
return nil
}
defer rows.Close()
out := make([]sourceSeed, 0, limit)
for rows.Next() && len(out) < limit {
var tableName, columnName string
if err := rows.Scan(&tableName, &columnName); err != nil {
continue
}
text := normalizeDisplayText(columnName)
key := makeTextBasedSeedKey(text)
out = append(out, sourceSeed{
TKey: key,
SourceText: text,
SourceType: "postgre",
})
}
return out
}
func collectMSSQLSeeds(mssqlDB *sql.DB, limit int) []sourceSeed {
if mssqlDB == nil || limit <= 0 {
return nil
}
maxPerRun := parsePositiveIntEnv("TRANSLATION_MSSQL_SEED_LIMIT", 2500)
if limit > maxPerRun {
limit = maxPerRun
}
timeoutSec := parsePositiveIntEnv("TRANSLATION_MSSQL_SCHEMA_TIMEOUT_SEC", 20)
query := fmt.Sprintf(`
SELECT TOP (%d) TABLE_NAME, COLUMN_NAME
FROM INFORMATION_SCHEMA.COLUMNS
ORDER BY TABLE_NAME, ORDINAL_POSITION
`, limit)
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(timeoutSec)*time.Second)
defer cancel()
rows, err := mssqlDB.QueryContext(ctx, query)
if err != nil {
log.Printf("[TranslationSync] stage=collect_mssql skipped err=%v", err)
return nil
}
defer rows.Close()
out := make([]sourceSeed, 0, limit)
for rows.Next() && len(out) < limit {
var tableName, columnName string
if err := rows.Scan(&tableName, &columnName); err != nil {
continue
}
text := normalizeDisplayText(columnName)
key := makeTextBasedSeedKey(text)
out = append(out, sourceSeed{
TKey: key,
SourceText: text,
SourceType: "mssql",
})
}
return out
}
func collectDummySeeds(limit int) []sourceSeed {
if limit <= 0 {
return nil
}
root := detectProjectRoot()
if root == "" {
return nil
}
uiRoot := filepath.Join(root, "ui", "src")
if _, err := os.Stat(uiRoot); err != nil {
return nil
}
out := make([]sourceSeed, 0, limit)
seen := make(map[string]struct{}, limit)
_ = filepath.WalkDir(uiRoot, func(path string, d fs.DirEntry, err error) error {
if err != nil || d.IsDir() {
return nil
}
ext := strings.ToLower(filepath.Ext(path))
if ext != ".vue" && ext != ".js" && ext != ".ts" {
return nil
}
b, err := os.ReadFile(path)
if err != nil {
return nil
}
matches := reQuotedText.FindAllStringSubmatch(string(b), -1)
for _, m := range matches {
text := strings.TrimSpace(m[1])
if !isCandidateText(text) {
continue
}
if _, ok := seen[text]; ok {
continue
}
seen[text] = struct{}{}
key := makeTextBasedSeedKey(text)
out = append(out, sourceSeed{
TKey: key,
SourceText: text,
SourceType: "dummy",
})
if len(out) >= limit {
return errors.New("limit reached")
}
}
return nil
})
return out
}
func autoTranslatePendingRows(db *sql.DB, langs []string, limit int) (int, error) {
return autoTranslatePendingRowsForKeys(db, langs, limit, nil, "")
}
func autoTranslatePendingRowsForKeys(db *sql.DB, langs []string, limit int, keys []string, traceID string) (int, error) {
traceID = strings.TrimSpace(traceID)
if traceID == "" {
traceID = "trauto-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
if len(keys) == 0 {
log.Printf("[TranslationAuto] trace=%s stage=skip reason=no_keys", traceID)
return 0, nil
}
start := time.Now()
rows, err := db.Query(`
SELECT id, lang_code, source_text_tr
FROM mk_translator
WHERE lang_code = ANY($1)
AND t_key = ANY($3)
AND (translated_text IS NULL OR btrim(translated_text) = '')
AND is_manual = false
ORDER BY updated_at ASC
LIMIT $2
`, pqArray(langs), limit, pq.Array(keys))
if err != nil {
return 0, err
}
defer rows.Close()
type pending struct {
ID int64
Lang string
Text string
}
list := make([]pending, 0, limit)
pendingByLang := map[string]int{}
sourceChars := 0
for rows.Next() {
var p pending
if err := rows.Scan(&p.ID, &p.Lang, &p.Text); err != nil {
continue
}
if strings.TrimSpace(p.Text) == "" {
continue
}
p.Lang = normalizeTranslationLang(p.Lang)
if p.Lang == "" {
continue
}
list = append(list, p)
pendingByLang[p.Lang]++
sourceChars += len([]rune(strings.TrimSpace(p.Text)))
}
if err := rows.Err(); err != nil {
return 0, err
}
log.Printf(
"[TranslationAuto] trace=%s stage=prepare candidates=%d limit=%d keys=%d langs=%v source_chars=%d pending_by_lang=%s",
traceID,
len(list),
limit,
len(keys),
langs,
sourceChars,
formatLangCounts(pendingByLang),
)
if len(list) == 0 {
log.Printf(
"[TranslationAuto] trace=%s stage=done duration_ms=%d translated=0 failed_translate=0 failed_update=0 rps=0.00",
traceID,
time.Since(start).Milliseconds(),
)
return 0, nil
}
done := 0
failedTranslate := 0
failedUpdate := 0
doneByLang := map[string]int{}
progressEvery := parsePositiveIntEnv("TRANSLATION_AUTO_PROGRESS_EVERY", 100)
if progressEvery <= 0 {
progressEvery = 100
}
progressSec := parsePositiveIntEnv("TRANSLATION_AUTO_PROGRESS_SEC", 15)
if progressSec <= 0 {
progressSec = 15
}
progressTicker := time.Duration(progressSec) * time.Second
lastProgress := time.Now()
for i, p := range list {
tr, err := callAzureTranslate(p.Text, p.Lang)
if err != nil || strings.TrimSpace(tr) == "" {
failedTranslate++
continue
}
_, err = db.Exec(`
UPDATE mk_translator
SET translated_text = $2,
status = 'pending',
is_manual = false,
provider = 'azure_translator',
updated_at = NOW()
WHERE id = $1
`, p.ID, strings.TrimSpace(tr))
if err != nil {
failedUpdate++
continue
}
done++
doneByLang[p.Lang]++
processed := i + 1
shouldLogProgress := processed%progressEvery == 0 || time.Since(lastProgress) >= progressTicker || processed == len(list)
if shouldLogProgress {
elapsed := time.Since(start)
rps := float64(done)
if elapsed > 0 {
rps = float64(done) / elapsed.Seconds()
}
log.Printf(
"[TranslationAuto] trace=%s stage=progress processed=%d/%d translated=%d failed_translate=%d failed_update=%d elapsed_ms=%d rps=%.2f done_by_lang=%s",
traceID,
processed,
len(list),
done,
failedTranslate,
failedUpdate,
elapsed.Milliseconds(),
rps,
formatLangCounts(doneByLang),
)
lastProgress = time.Now()
}
}
elapsed := time.Since(start)
rps := float64(done)
if elapsed > 0 {
rps = float64(done) / elapsed.Seconds()
}
log.Printf(
"[TranslationAuto] trace=%s stage=done duration_ms=%d candidates=%d translated=%d failed_translate=%d failed_update=%d rps=%.2f done_by_lang=%s",
traceID,
elapsed.Milliseconds(),
len(list),
done,
failedTranslate,
failedUpdate,
rps,
formatLangCounts(doneByLang),
)
return done, nil
}
func formatLangCounts(counts map[string]int) string {
if len(counts) == 0 {
return "-"
}
keys := make([]string, 0, len(counts))
for k := range counts {
keys = append(keys, k)
}
sort.Strings(keys)
parts := make([]string, 0, len(keys))
for _, k := range keys {
parts = append(parts, fmt.Sprintf("%s=%d", k, counts[k]))
}
return strings.Join(parts, ",")
}
func filterNewSeeds(pgDB *sql.DB, seeds []sourceSeed) []sourceSeed {
if pgDB == nil || len(seeds) == 0 {
return seeds
}
keys := uniqueSeedKeys(seeds)
if len(keys) == 0 {
return nil
}
textKeys := uniqueSeedTextKeys(seeds)
rows, err := pgDB.Query(`
SELECT DISTINCT t_key, lower(btrim(source_text_tr)) AS text_key
FROM mk_translator
WHERE t_key = ANY($1)
OR lower(btrim(source_text_tr)) = ANY($2)
`, pq.Array(keys), pq.Array(textKeys))
if err != nil {
return seeds
}
defer rows.Close()
existing := make(map[string]struct{}, len(keys))
existingText := make(map[string]struct{}, len(textKeys))
for rows.Next() {
var key string
var textKey sql.NullString
if err := rows.Scan(&key, &textKey); err == nil {
if strings.TrimSpace(key) != "" {
existing[key] = struct{}{}
}
if textKey.Valid {
t := strings.TrimSpace(textKey.String)
if t != "" {
existingText[t] = struct{}{}
}
}
}
}
out := make([]sourceSeed, 0, len(seeds))
for _, seed := range seeds {
if _, ok := existing[seed.TKey]; ok {
continue
}
if _, ok := existingText[normalizeSeedTextKey(seed.SourceText)]; ok {
continue
}
out = append(out, seed)
}
return out
}
func uniqueSeedKeys(seeds []sourceSeed) []string {
seen := make(map[string]struct{}, len(seeds))
out := make([]string, 0, len(seeds))
for _, seed := range seeds {
if seed.TKey == "" {
continue
}
if _, ok := seen[seed.TKey]; ok {
continue
}
seen[seed.TKey] = struct{}{}
out = append(out, seed.TKey)
}
return out
}
func uniqueSeedTextKeys(seeds []sourceSeed) []string {
seen := make(map[string]struct{}, len(seeds))
out := make([]string, 0, len(seeds))
for _, seed := range seeds {
k := normalizeSeedTextKey(seed.SourceText)
if k == "" {
continue
}
if _, ok := seen[k]; ok {
continue
}
seen[k] = struct{}{}
out = append(out, k)
}
return out
}
func reuseExistingSeedKeys(pgDB *sql.DB, seeds []sourceSeed) ([]sourceSeed, int) {
if pgDB == nil || len(seeds) == 0 {
return seeds, 0
}
textKeys := uniqueSeedTextKeys(seeds)
if len(textKeys) == 0 {
return seeds, 0
}
rows, err := pgDB.Query(`
SELECT x.text_key, x.t_key
FROM (
SELECT
lower(btrim(source_text_tr)) AS text_key,
t_key,
ROW_NUMBER() OVER (
PARTITION BY lower(btrim(source_text_tr))
ORDER BY id ASC
) AS rn
FROM mk_translator
WHERE lower(btrim(source_text_tr)) = ANY($1)
) x
WHERE x.rn = 1
`, pq.Array(textKeys))
if err != nil {
return seeds, 0
}
defer rows.Close()
existingByText := make(map[string]string, len(textKeys))
for rows.Next() {
var textKey, tKey string
if err := rows.Scan(&textKey, &tKey); err != nil {
continue
}
textKey = strings.TrimSpace(strings.ToLower(textKey))
tKey = strings.TrimSpace(tKey)
if textKey == "" || tKey == "" {
continue
}
existingByText[textKey] = tKey
}
reused := 0
for i := range seeds {
textKey := normalizeSeedTextKey(seeds[i].SourceText)
if textKey == "" {
continue
}
if existingKey, ok := existingByText[textKey]; ok && existingKey != "" && seeds[i].TKey != existingKey {
seeds[i].TKey = existingKey
reused++
}
}
return seeds, reused
}
func countSeedsBySource(seeds []sourceSeed) map[string]int {
out := map[string]int{
"dummy": 0,
"postgre": 0,
"mssql": 0,
}
for _, s := range seeds {
key := normalizeTranslationSourceType(s.SourceType)
if key == "" {
key = "dummy"
}
out[key]++
}
return out
}
func formatSourceCounts(counts map[string]int) string {
return fmt.Sprintf("dummy=%d postgre=%d mssql=%d", counts["dummy"], counts["postgre"], counts["mssql"])
}
func requestTraceID(r *http.Request) string {
if r == nil {
return "trsync-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
id := strings.TrimSpace(r.Header.Get("X-Request-ID"))
if id == "" {
id = strings.TrimSpace(r.Header.Get("X-Correlation-ID"))
}
if id == "" {
id = "trsync-" + strconv.FormatInt(time.Now().UnixNano(), 36)
}
return id
}
func callAzureTranslate(sourceText, targetLang string) (string, error) {
key := strings.TrimSpace(os.Getenv("AZURE_TRANSLATOR_KEY"))
endpoint := strings.TrimSpace(os.Getenv("AZURE_TRANSLATOR_ENDPOINT"))
region := strings.TrimSpace(os.Getenv("AZURE_TRANSLATOR_REGION"))
if key == "" {
return "", errors.New("AZURE_TRANSLATOR_KEY not set")
}
if endpoint == "" {
return "", errors.New("AZURE_TRANSLATOR_ENDPOINT not set")
}
if region == "" {
return "", errors.New("AZURE_TRANSLATOR_REGION not set")
}
sourceLang := strings.TrimSpace(strings.ToLower(os.Getenv("TRANSLATION_SOURCE_LANG")))
if sourceLang == "" {
sourceLang = "tr"
}
targetLang = normalizeTranslationLang(targetLang)
if targetLang == "" || targetLang == "tr" {
return "", fmt.Errorf("invalid target language: %q", targetLang)
}
endpoint = strings.TrimRight(endpoint, "/")
normalizedEndpoint := strings.ToLower(endpoint)
translatePath := "/translate"
// Azure custom endpoint requires the translator path with version in URL.
if strings.Contains(normalizedEndpoint, ".cognitiveservices.azure.com") {
translatePath = "/translator/text/v3.0/translate"
}
baseURL, err := url.Parse(endpoint + translatePath)
if err != nil {
return "", fmt.Errorf("invalid AZURE_TRANSLATOR_ENDPOINT: %w", err)
}
q := baseURL.Query()
if translatePath == "/translate" {
q.Set("api-version", "3.0")
}
q.Set("from", sourceLang)
q.Set("to", targetLang)
baseURL.RawQuery = q.Encode()
payload := []map[string]string{
{"text": sourceText},
}
body, _ := json.Marshal(payload)
req, err := http.NewRequest(http.MethodPost, baseURL.String(), bytes.NewReader(body))
if err != nil {
return "", err
}
req.Header.Set("Ocp-Apim-Subscription-Key", key)
req.Header.Set("Ocp-Apim-Subscription-Region", region)
req.Header.Set("Content-Type", "application/json; charset=UTF-8")
timeoutSec := parsePositiveIntEnv("TRANSLATION_HTTP_TIMEOUT_SEC", 60)
client := &http.Client{Timeout: time.Duration(timeoutSec) * time.Second}
resp, err := client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
if resp.StatusCode >= 300 {
raw, _ := io.ReadAll(io.LimitReader(resp.Body, 1024))
return "", fmt.Errorf("azure translator status=%d body=%s", resp.StatusCode, strings.TrimSpace(string(raw)))
}
var result []struct {
Translations []struct {
Text string `json:"text"`
To string `json:"to"`
} `json:"translations"`
}
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return "", err
}
if len(result) == 0 || len(result[0].Translations) == 0 {
return "", errors.New("azure translator empty response")
}
return strings.TrimSpace(result[0].Translations[0].Text), nil
}
func nullableString(v *string) any {
if v == nil {
return nil
}
s := strings.TrimSpace(*v)
return s
}
func normalizeTranslationLang(v string) string {
lang := strings.ToLower(strings.TrimSpace(v))
if _, ok := translationLangSet[lang]; ok {
return lang
}
return ""
}
func normalizeTranslationStatus(v string) string {
status := strings.ToLower(strings.TrimSpace(v))
if _, ok := translationStatusSet[status]; ok {
return status
}
return ""
}
func normalizeTranslationSourceType(v string) string {
sourceType := strings.ToLower(strings.TrimSpace(v))
if _, ok := translationSourceTypeSet[sourceType]; ok {
return sourceType
}
return ""
}
func normalizeTargetLanguages(list []string) []string {
if len(list) == 0 {
return []string{"en", "de", "it", "es", "ru", "ar"}
}
seen := make(map[string]struct{}, len(list))
out := make([]string, 0, len(list))
for _, v := range list {
lang := normalizeTranslationLang(v)
if lang == "" || lang == "tr" {
continue
}
if _, ok := seen[lang]; ok {
continue
}
seen[lang] = struct{}{}
out = append(out, lang)
}
if len(out) == 0 {
return []string{"en", "de", "it", "es", "ru", "ar"}
}
return out
}
func normalizeOptionalStatus(v *string) any {
if v == nil {
return nil
}
s := normalizeTranslationStatus(*v)
if s == "" {
return nil
}
return s
}
func normalizeOptionalSourceType(v *string) any {
if v == nil {
return nil
}
s := normalizeTranslationSourceType(*v)
if s == "" {
return nil
}
return s
}
func normalizeMissingItems(items []UpsertMissingItem) []UpsertMissingItem {
seen := make(map[string]struct{}, len(items))
out := make([]UpsertMissingItem, 0, len(items))
for _, it := range items {
key := strings.TrimSpace(it.TKey)
source := strings.TrimSpace(it.SourceTextTR)
if key == "" || source == "" {
continue
}
if _, ok := seen[key]; ok {
continue
}
seen[key] = struct{}{}
out = append(out, UpsertMissingItem{
TKey: key,
SourceTextTR: source,
})
}
return out
}
func normalizeIDListInt64(ids []int64) []int64 {
seen := make(map[int64]struct{}, len(ids))
out := make([]int64, 0, len(ids))
for _, id := range ids {
if id <= 0 {
continue
}
if _, ok := seen[id]; ok {
continue
}
seen[id] = struct{}{}
out = append(out, id)
}
sort.Slice(out, func(i, j int) bool { return out[i] < out[j] })
return out
}
func detectProjectRoot() string {
wd, err := os.Getwd()
if err != nil {
return ""
}
candidates := []string{
wd,
filepath.Dir(wd),
filepath.Dir(filepath.Dir(wd)),
}
for _, c := range candidates {
if _, err := os.Stat(filepath.Join(c, "ui")); err == nil {
return c
}
}
return ""
}
func isCandidateText(s string) bool {
s = strings.TrimSpace(s)
if len(s) < 3 || len(s) > 120 {
return false
}
if reBadText.MatchString(s) {
return false
}
if !reHasLetter.MatchString(s) {
return false
}
if strings.Contains(s, "/api/") {
return false
}
return true
}
func sanitizeKey(s string) string {
s = strings.ToLower(strings.TrimSpace(s))
s = strings.ReplaceAll(s, " ", "_")
s = reKeyUnsafe.ReplaceAllString(s, "_")
s = strings.Trim(s, "_")
if s == "" {
return "x"
}
return s
}
func normalizeDisplayText(s string) string {
s = strings.TrimSpace(strings.ReplaceAll(s, "_", " "))
s = strings.Join(strings.Fields(s), " ")
if s == "" {
return ""
}
return s
}
func hashKey(s string) string {
base := sanitizeKey(s)
if len(base) > 40 {
base = base[:40]
}
sum := 0
for _, r := range s {
sum += int(r)
}
return fmt.Sprintf("%s_%d", base, sum%1000000)
}
func makeTextBasedSeedKey(sourceText string) string {
return "txt." + hashKey(normalizeSeedTextKey(sourceText))
}
func normalizeSeedTextKey(s string) string {
return strings.ToLower(strings.TrimSpace(normalizeDisplayText(s)))
}
func pqArray(values []string) any {
if len(values) == 0 {
return pq.Array([]string{})
}
out := make([]string, 0, len(values))
for _, v := range values {
out = append(out, strings.TrimSpace(v))
}
sort.Strings(out)
return pq.Array(out)
}
func parsePositiveIntEnv(name string, fallback int) int {
raw := strings.TrimSpace(os.Getenv(name))
if raw == "" {
return fallback
}
n, err := strconv.Atoi(raw)
if err != nil || n <= 0 {
return fallback
}
return n
}
func normalizeStringList(items []string, max int) []string {
if len(items) == 0 {
return nil
}
if max <= 0 {
max = len(items)
}
out := make([]string, 0, len(items))
seen := make(map[string]struct{}, len(items))
for _, raw := range items {
v := strings.TrimSpace(raw)
if v == "" {
continue
}
if _, ok := seen[v]; ok {
continue
}
seen[v] = struct{}{}
out = append(out, v)
if len(out) >= max {
break
}
}
return out
}

View File

@@ -3,8 +3,10 @@ package routes
import (
"bssapp-backend/auth"
"bssapp-backend/internal/auditlog"
"bssapp-backend/internal/authz"
"bssapp-backend/internal/mailer"
"bssapp-backend/internal/security"
"bssapp-backend/middlewares"
"bssapp-backend/models"
"bssapp-backend/queries"
"bytes"
@@ -323,6 +325,9 @@ func handleUserUpdate(db *sql.DB, w http.ResponseWriter, r *http.Request, userID
return
}
authz.ClearPiyasaCache(int(userID))
middlewares.ClearAuthzScopeCacheForUser(userID)
_ = json.NewEncoder(w).Encode(map[string]any{"success": true})
}
@@ -424,6 +429,9 @@ func handleUserDelete(db *sql.DB, w http.ResponseWriter, r *http.Request, userID
return
}
authz.ClearPiyasaCache(int(userID))
middlewares.ClearAuthzScopeCacheForUser(userID)
if claims != nil {
auditlog.Enqueue(r.Context(), auditlog.ActivityLog{
ActionType: "user_delete",

137
svc/run.log Normal file
View File

@@ -0,0 +1,137 @@
2026/02/23 12:29:31 🔥🔥🔥 BSSAPP BACKEND STARTED — LOGIN ROUTE SHOULD EXIST 🔥🔥🔥
2026/02/23 12:29:31 🔐 JWT_SECRET yüklendi
MSSQL bağlantısı başarılı
2026/02/23 12:29:31 PostgreSQL bağlantısı başarılı
2026/02/23 12:29:31 ✅ Admin dept permissions seeded
2026/02/23 12:29:31 🟢 auditlog Init called, buffer: 1000
2026/02/23 12:29:31 🕵️ AuditLog sistemi başlatıldı (buffer=1000)
2026/02/23 12:29:31 ✉️ Graph Mailer hazır (App-only token) | from=baggiss@baggi.com.tr
2026/02/23 12:29:31 ✉️ Graph Mailer hazır
2026/02/23 12:29:31 🟢 auditlog worker STARTED
📋 [DEBUG] İlk 10 kullanıcı:
- 1 : ctengiz
- 2 : ali.kale
- 5 : mehmet.keçeci
- 6 : mert.keçeci
- 7 : samet.keçeci
- 9 : orhan.caliskan
- 10 : nilgun.sara
- 14 : rustem.kurbanov
- 15 : caner.akyol
- 16 : kemal.matyakupov
2026/02/23 12:29:32 ✅ Route+Perm registered → POST /api/auth/login [auth:login]
2026/02/23 12:29:32 ✅ Route+Perm registered → POST /api/auth/refresh [auth:refresh]
2026/02/23 12:29:32 ✅ Route+Perm registered → POST /api/password/forgot [auth:update]
2026/02/23 12:29:33 ✅ Route+Perm registered → GET /api/password/reset/validate/{token} [auth:view]
2026/02/23 12:29:33 ✅ Route+Perm registered → POST /api/password/reset [auth:update]
2026/02/23 12:29:34 ✅ Route+Perm registered → POST /api/password/change [auth:update]
2026/02/23 12:29:34 ✅ Route+Perm registered → GET /api/activity-logs [system:read]
2026/02/23 12:29:35 ✅ Route+Perm registered → POST /api/test-mail [system:update]
2026/02/23 12:29:35 ✅ Route+Perm registered → GET /api/roles/{id}/permissions [system:update]
2026/02/23 12:29:36 ✅ Route+Perm registered → POST /api/roles/{id}/permissions [system:update]
2026/02/23 12:29:36 ✅ Route+Perm registered → GET /api/users/{id}/permissions [system:update]
2026/02/23 12:29:36 ✅ Route+Perm registered → POST /api/users/{id}/permissions [system:update]
2026/02/23 12:29:37 ✅ Route+Perm registered → GET /api/permissions/routes [system:view]
2026/02/23 12:29:37 ✅ Route+Perm registered → GET /api/permissions/effective [system:view]
2026/02/23 12:29:38 ✅ Route+Perm registered → GET /api/permissions/matrix [system:view]
2026/02/23 12:29:38 ✅ Route+Perm registered → GET /api/role-dept-permissions/list [system:update]
2026/02/23 12:29:38 ✅ Route+Perm registered → GET /api/roles/{roleId}/departments/{deptCode}/permissions [system:update]
2026/02/23 12:29:39 ✅ Route+Perm registered → POST /api/roles/{roleId}/departments/{deptCode}/permissions [system:update]
2026/02/23 12:29:39 ✅ Route+Perm registered → GET /api/users/list [user:view]
2026/02/23 12:29:40 ✅ Route+Perm registered → POST /api/users [user:insert]
2026/02/23 12:29:40 ✅ Route+Perm registered → GET /api/users/{id} [user:update]
2026/02/23 12:29:41 ✅ Route+Perm registered → PUT /api/users/{id} [user:update]
2026/02/23 12:29:41 ✅ Route+Perm registered → DELETE /api/users/{id} [user:delete]
2026/02/23 12:29:41 ✅ Route+Perm registered → POST /api/users/{id}/admin-reset-password [user:update]
2026/02/23 12:29:42 ✅ Route+Perm registered → POST /api/users/{id}/send-password-mail [user:update]
2026/02/23 12:29:42 ✅ Route+Perm registered → POST /api/users/create [user:insert]
2026/02/23 12:29:43 ✅ Route+Perm registered → GET /api/lookups/users-perm [user:view]
2026/02/23 12:29:43 ✅ Route+Perm registered → GET /api/lookups/roles-perm [user:view]
2026/02/23 12:29:43 ✅ Route+Perm registered → GET /api/lookups/departments-perm [user:view]
2026/02/23 12:29:44 ✅ Route+Perm registered → GET /api/lookups/modules [user:view]
2026/02/23 12:29:44 ✅ Route+Perm registered → GET /api/lookups/roles [user:view]
2026/02/23 12:29:45 ✅ Route+Perm registered → GET /api/lookups/departments [user:view]
2026/02/23 12:29:45 ✅ Route+Perm registered → GET /api/lookups/nebim-users [user:view]
2026/02/23 12:29:46 ✅ Route+Perm registered → GET /api/lookups/piyasalar [user:view]
2026/02/23 12:29:46 ✅ Route+Perm registered → GET /api/accounts [customer:view]
2026/02/23 12:29:46 ✅ Route+Perm registered → GET /api/customer-list [customer:view]
2026/02/23 12:29:47 ✅ Route+Perm registered → GET /api/today-currency [finance:view]
2026/02/23 12:29:47 ✅ Route+Perm registered → GET /api/export-pdf [finance:export]
2026/02/23 12:29:48 ✅ Route+Perm registered → GET /api/exportstamentheaderreport-pdf [finance:export]
2026/02/23 12:29:48 ✅ Route+Perm registered → GET /api/finance/customer-balances [finance:view]
2026/02/23 12:29:48 ✅ Route+Perm registered → GET /api/statements [finance:view]
2026/02/23 12:29:49 ✅ Route+Perm registered → GET /api/statements/{id}/details [finance:view]
2026/02/23 12:29:49 ✅ Route+Perm registered → POST /api/order/create [order:insert]
2026/02/23 12:29:50 ✅ Route+Perm registered → POST /api/order/update [order:update]
2026/02/23 12:29:50 ✅ Route+Perm registered → GET /api/order/get/{id} [order:view]
2026/02/23 12:29:51 ✅ Route+Perm registered → GET /api/orders/list [order:view]
2026/02/23 12:29:51 ✅ Route+Perm registered → GET /api/orders/production-list [order:update]
2026/02/23 12:29:51 ✅ Route+Perm registered → GET /api/orders/production-items/{id} [order:view]
2026/02/23 12:29:52 ✅ Route+Perm registered → POST /api/orders/production-items/{id}/insert-missing [order:update]
2026/02/23 12:29:52 ✅ Route+Perm registered → POST /api/orders/production-items/{id}/validate [order:update]
2026/02/23 12:29:53 ✅ Route+Perm registered → POST /api/orders/production-items/{id}/apply [order:update]
2026/02/23 12:29:53 ✅ Route+Perm registered → GET /api/orders/close-ready [order:update]
2026/02/23 12:29:54 ✅ Route+Perm registered → POST /api/orders/bulk-close [order:update]
2026/02/23 12:29:54 ✅ Route+Perm registered → GET /api/orders/export [order:export]
2026/02/23 12:29:54 ✅ Route+Perm registered → GET /api/order/check/{id} [order:view]
2026/02/23 12:29:55 ✅ Route+Perm registered → POST /api/order/validate [order:insert]
2026/02/23 12:29:55 ✅ Route+Perm registered → GET /api/order/pdf/{id} [order:export]
2026/02/23 12:29:56 ✅ Route+Perm registered → GET /api/order-inventory [order:view]
2026/02/23 12:29:56 ✅ Route+Perm registered → GET /api/orderpricelistb2b [order:view]
2026/02/23 12:29:57 ✅ Route+Perm registered → GET /api/min-price [order:view]
2026/02/23 12:29:57 ✅ Route+Perm registered → GET /api/products [order:view]
2026/02/23 12:29:57 ✅ Route+Perm registered → GET /api/product-detail [order:view]
2026/02/23 12:29:58 ✅ Route+Perm registered → GET /api/product-colors [order:view]
2026/02/23 12:29:58 ✅ Route+Perm registered → GET /api/product-colorsize [order:view]
2026/02/23 12:29:59 ✅ Route+Perm registered → GET /api/product-secondcolor [order:view]
2026/02/23 12:29:59 ✅ Route+Perm registered → GET /api/roles [user:view]
2026/02/23 12:29:59 ✅ Route+Perm registered → GET /api/departments [user:view]
2026/02/23 12:30:00 ✅ Route+Perm registered → GET /api/piyasalar [user:view]
2026/02/23 12:30:01 ✅ Route+Perm registered → POST /api/roles/{id}/departments [user:update]
2026/02/23 12:30:01 ✅ Route+Perm registered → POST /api/roles/{id}/piyasalar [user:update]
2026/02/23 12:30:01 ✅ Route+Perm registered → POST /api/users/{id}/roles [user:update]
2026/02/23 12:30:02 ✅ Route+Perm registered → POST /api/admin/users/{id}/piyasa-sync [admin:user.update]
2026/02/23 12:30:02 🌍 CORS Allowed Origin: http://ss.baggi.com.tr/app
2026/02/23 12:30:02 🚀 Server running at: 0.0.0.0:8080
2026/02/23 12:30:43 ➡️ POST /api/auth/login | auth=false
2026/02/23 12:30:44 🔎 LOGIN DEBUG | mk_user_found=false err=mk_user not found hash_len=0
2026/02/23 12:30:44 🟡 LEGACY LOGIN PATH: x
2026/02/23 12:30:44 🟡 LEGACY LOGIN QUERY HIT: x
2026/02/23 12:30:44 ❌ LEGACY SCAN ERROR: sql: no rows in result set
2026/02/23 12:30:44 ⬅️ POST /api/auth/login | status=401 | 279.0065ms
2026/02/23 12:30:44 ⚠️ LOGGER: claims is NIL
2026/02/23 12:30:44 🧾 auditlog INSERT | actor_dfusr=<nil> actor_user=<nil> role=public nav /api/auth/login target=<nil>
2026/02/23 12:30:50 ➡️ POST /api/auth/login | auth=false
2026/02/23 12:30:50 🧪 MK USER FROM DB
2026/02/23 12:30:50 🧪 ID=5 role_id=3 role_code='admin' depts=[UST_YONETIM]
2026/02/23 12:30:50 🔎 LOGIN DEBUG | mk_user_found=true err=<nil> hash_len=60
2026/02/23 12:30:50 🧪 LOGIN RESPONSE USER DEBUG
2026/02/23 12:30:50 🧪 user.ID = 5
2026/02/23 12:30:50 🧪 user.Username = mehmet.keçeci
2026/02/23 12:30:50 🧪 user.RoleID = 3
2026/02/23 12:30:50 🧪 user.RoleCode = 'admin'
2026/02/23 12:30:50 🧪 user.IsActive = true
2026/02/23 12:30:50 ⬅️ POST /api/auth/login | status=200 | 593.239ms
2026/02/23 12:30:50 ⚠️ LOGGER: claims is NIL
2026/02/23 12:30:50 🧾 auditlog INSERT | actor_dfusr=<nil> actor_user=<nil> role=public nav /api/auth/login target=<nil>
2026/02/23 12:30:52 🔐 GLOBAL AUTH user=5 role=admin
2026/02/23 12:30:52 ➡️ GET /api/finance/customer-balances | auth=true
2026/02/23 12:30:52 AUTH_MIDDLEWARE PASS user=5 role=admin method=GET path=/api/finance/customer-balances
2026/02/23 12:30:52 🔐 PERM CHECK user=5 role=3 dept=[UST_YONETIM] finance:view
2026/02/23 12:30:53 ↳ ROLE+DEPT OVERRIDE = true
2026/02/23 12:33:21 ⬅️ GET /api/finance/customer-balances | status=200 | 2m28.8586087s
2026/02/23 12:33:21 ✅ LOGGER CLAIMS user=mehmet.keçeci role=admin id=5
2026/02/23 12:33:21 🧾 auditlog INSERT | actor_dfusr=5 actor_user=mehmet.keçeci role=admin nav /api/finance/customer-balances target=<nil>
2026/02/23 13:40:17 ➡️ POST /api/auth/refresh | auth=false
2026/02/23 13:40:18 ⬅️ POST /api/auth/refresh | status=200 | 852.618ms
2026/02/23 13:40:18 ⚠️ LOGGER: claims is NIL
2026/02/23 13:40:18 🧾 auditlog INSERT | actor_dfusr=<nil> actor_user=<nil> role=public nav /api/auth/refresh target=<nil>
2026/02/23 13:40:18 🔐 GLOBAL AUTH user=5 role=admin
2026/02/23 13:40:18 ➡️ GET /api/finance/customer-balances | auth=true
2026/02/23 13:40:18 AUTH_MIDDLEWARE PASS user=5 role=admin method=GET path=/api/finance/customer-balances
2026/02/23 13:40:18 🔐 PERM CHECK user=5 role=3 dept=[UST_YONETIM] finance:view
2026/02/23 13:40:19 ↳ ROLE+DEPT OVERRIDE = true
2026/02/23 13:42:46 ⬅️ GET /api/finance/customer-balances | status=200 | 2m27.9525306s
2026/02/23 13:42:46 ✅ LOGGER CLAIMS user=mehmet.keçeci role=admin id=5
2026/02/23 13:42:46 🧾 auditlog INSERT | actor_dfusr=5 actor_user=mehmet.keçeci role=admin nav /api/finance/customer-balances target=<nil>
exit status 1

View File

@@ -0,0 +1,69 @@
package main
import (
"bssapp-backend/routes"
"database/sql"
"log"
"os"
"strconv"
"strings"
"time"
)
func startTranslationSyncScheduler(pgDB *sql.DB, mssqlDB *sql.DB) {
enabled := strings.TrimSpace(strings.ToLower(os.Getenv("TRANSLATION_SYNC_ENABLED")))
if enabled == "0" || enabled == "false" || enabled == "off" {
log.Println("🛑 Translation sync scheduler disabled")
return
}
hour := 4
if raw := strings.TrimSpace(os.Getenv("TRANSLATION_SYNC_HOUR")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed >= 0 && parsed <= 23 {
hour = parsed
}
}
limit := 30000
if raw := strings.TrimSpace(os.Getenv("TRANSLATION_SYNC_LIMIT")); raw != "" {
if parsed, err := strconv.Atoi(raw); err == nil && parsed > 0 {
limit = parsed
}
}
go func() {
for {
next := nextRunAt(time.Now(), hour)
wait := time.Until(next)
log.Printf("🕓 Translation sync next run at %s (in %s)", next.Format(time.RFC3339), wait.Round(time.Second))
time.Sleep(wait)
result, err := routes.PerformTranslationSync(pgDB, mssqlDB, routes.TranslationSyncOptions{
AutoTranslate: true,
Languages: []string{"en", "de", "it", "es", "ru", "ar"},
Limit: limit,
OnlyNew: true,
})
if err != nil {
log.Printf("❌ Translation sync failed: %v", err)
continue
}
log.Printf(
"✅ Translation sync done: seeds=%d affected=%d auto_translated=%d langs=%v",
result.SeedCount,
result.AffectedCount,
result.AutoTranslated,
result.TargetLangs,
)
}
}()
}
func nextRunAt(now time.Time, hour int) time.Time {
next := time.Date(now.Year(), now.Month(), now.Day(), hour, 0, 0, 0, now.Location())
if !next.After(now) {
next = next.Add(24 * time.Hour)
}
return next
}

View File

@@ -146,7 +146,11 @@ createQuasarApp(createApp, quasarUserOptions)
return Promise[ method ]([
import(/* webpackMode: "eager" */ 'boot/dayjs')
import(/* webpackMode: "eager" */ 'boot/dayjs'),
import(/* webpackMode: "eager" */ 'boot/locale'),
import(/* webpackMode: "eager" */ 'boot/resizeObserverGuard')
]).then(bootFiles => {
const boot = mapFn(bootFiles).filter(entry => typeof entry === 'function')

Some files were not shown because too many files have changed in this diff Show More