latest-before-monorepo

This commit is contained in:
cureb 2026-02-09 21:47:55 +00:00
parent 5eb79565c9
commit 5ac0a9e479
51 changed files with 3917 additions and 285 deletions

View File

@ -1,3 +1,10 @@
const { toSafeRedirectUrl } = require("../../utils/urlSafety")
const {
sanitizeDealDescriptionHtml,
sanitizeOptionalPlainText,
sanitizeRequiredPlainText,
} = require("../../utils/inputSanitizer")
function mapCreateDealRequestToDealCreateData(payload, userId) {
const {
title,
@ -13,26 +20,28 @@ function mapCreateDealRequestToDealCreateData(payload, userId) {
discountValue,
} = payload
const normalizedCouponCode =
couponCode === undefined || couponCode === null
? null
: String(couponCode).trim() || null
const hasUrl = Boolean(url)
const normalizedTitle = sanitizeRequiredPlainText(title, { fieldName: "TITLE", maxLength: 300 })
const normalizedDescription = sanitizeDealDescriptionHtml(description)
const normalizedCouponCode = sanitizeOptionalPlainText(couponCode, { maxLength: 120 })
const normalizedLocation = sanitizeOptionalPlainText(location, { maxLength: 150 })
const normalizedSellerName = sanitizeOptionalPlainText(sellerName ?? customSeller, {
maxLength: 120,
})
const normalizedUrl = toSafeRedirectUrl(url)
const hasUrl = Boolean(normalizedUrl)
const saleType = !hasUrl ? "OFFLINE" : normalizedCouponCode ? "CODE" : "ONLINE"
const hasPrice = price != null
const normalizedDiscountType = hasPrice ? null : discountType ?? null
const normalizedDiscountValue = hasPrice ? null : discountValue ?? null
const normalizedSellerName = sellerName ?? customSeller ?? null
return {
title,
description: description ?? null,
url: url ?? null,
title: normalizedTitle,
description: normalizedDescription,
url: normalizedUrl,
price: price ?? null,
originalPrice: originalPrice ?? null,
couponCode: normalizedCouponCode,
location: location ?? null,
location: normalizedLocation,
discountType: normalizedDiscountType,
discountValue: normalizedDiscountValue,
saletype: saleType,

View File

@ -1,5 +1,6 @@
const formatDateAsString = (value) =>
value instanceof Date ? value.toISOString() : value ?? null
const { normalizeMediaPath } = require("../../utils/mediaPath")
function mapCommentToDealCommentResponse(comment) {
return {
@ -18,7 +19,7 @@ function mapCommentToDealCommentResponse(comment) {
user: {
id: comment.user.id,
username: comment.user.username,
avatarUrl: comment.user.avatarUrl ?? null,
avatarUrl: normalizeMediaPath(comment.user.avatarUrl) ?? null,
},
}
}

View File

@ -1,4 +1,5 @@
const formatDateAsString = (value) => (value instanceof Date ? value.toISOString() : value ?? null)
const { normalizeMediaPath } = require("../../utils/mediaPath")
function mapDealToDealCardResponse(deal) {
return {
@ -30,7 +31,7 @@ function mapDealToDealCardResponse(deal) {
user: {
id: deal.user.id,
username: deal.user.username,
avatarUrl: deal.user.avatarUrl ?? null,
avatarUrl: normalizeMediaPath(deal.user.avatarUrl) ?? null,
},
seller: deal.seller
@ -43,7 +44,7 @@ function mapDealToDealCardResponse(deal) {
url: null,
},
imageUrl: deal.images?.[0]?.imageUrl || "",
imageUrl: normalizeMediaPath(deal.images?.[0]?.imageUrl) || "",
}
}

View File

@ -1,5 +1,6 @@
// adapters/responses/dealDetail.adapter.js
const {mapBreadcrumbToResponse} =require( "./breadCrumb.adapter")
const { normalizeMediaPath } = require("../../utils/mediaPath")
const formatDateAsString = (value) =>
value instanceof Date ? value.toISOString() : value ?? null
@ -35,7 +36,7 @@ function mapSimilarDealItem(d) {
title: d.title,
price: d.price ?? null,
score: Number.isFinite(d.score) ? d.score : 0,
imageUrl: d.imageUrl || "",
imageUrl: normalizeMediaPath(d.imageUrl) || "",
sellerName: d.sellerName || "Bilinmiyor",
createdAt: formatDateAsString(d.createdAt), // SimilarDealSchema: nullable OK
// url: d.url ?? null,
@ -78,7 +79,7 @@ function mapDealToDealDetailResponse(deal) {
user: {
id: deal.user.id,
username: deal.user.username,
avatarUrl: deal.user.avatarUrl ?? null,
avatarUrl: normalizeMediaPath(deal.user.avatarUrl) ?? null,
},
userStats: {
totalLikes: deal.userStats?.totalLikes ?? 0,
@ -100,7 +101,7 @@ function mapDealToDealDetailResponse(deal) {
images: (deal.images || []).map((img) => ({
id: img.id,
imageUrl: img.imageUrl,
imageUrl: normalizeMediaPath(img.imageUrl) || "",
order: img.order,
})),
@ -124,7 +125,7 @@ function mapDealToDealDetailResponse(deal) {
user: {
id: comment.user.id,
username: comment.user.username,
avatarUrl: comment.user.avatarUrl ?? null,
avatarUrl: normalizeMediaPath(comment.user.avatarUrl) ?? null,
},
}
}),

View File

@ -1,3 +1,5 @@
const { normalizeMediaPath } = require("../../utils/mediaPath")
function mapMeRequestToUserId(req) {
// authMiddleware -> req.user.userId
return req.user.userId;
@ -8,7 +10,7 @@ function mapMeResultToResponse(user) {
id: user.id,
username: user.username,
email: user.email,
avatarUrl: user.avatarUrl ?? null,
avatarUrl: normalizeMediaPath(user.avatarUrl) ?? null,
role: user.role,
};
}

View File

@ -1,12 +1,13 @@
const formatDateAsString = (value) =>
value instanceof Date ? value.toISOString() : value ?? null
const { normalizeMediaPath } = require("../../utils/mediaPath")
// adapters/responses/publicUser.adapter.js
function mapUserToPublicUserSummaryResponse(user) {
return {
id: user.id,
username: user.username,
avatarUrl: user.avatarUrl ?? null,
avatarUrl: normalizeMediaPath(user.avatarUrl) ?? null,
}
}
@ -14,7 +15,7 @@ function mapUserToPublicUserDetailsResponse(user) {
return {
id: user.id,
username: user.username,
avatarUrl: user.avatarUrl ?? null,
avatarUrl: normalizeMediaPath(user.avatarUrl) ?? null,
email: user.email,
createdAt: formatDateAsString(user.createdAt), // ISO string
}

View File

@ -3,6 +3,7 @@ const dealCardAdapter = require("./dealCard.adapter")
const dealCommentAdapter = require("./comment.adapter")
const publicUserAdapter = require("./publicUser.adapter") // yoksa yaz
const userProfileStatsAdapter = require("./userProfileStats.adapter")
const { normalizeMediaPath } = require("../../utils/mediaPath")
const formatDateAsString = (value) =>
value instanceof Date ? value.toISOString() : value ?? null
@ -14,7 +15,7 @@ function mapUserBadgeToResponse(item) {
? {
id: item.badge.id,
name: item.badge.name,
iconUrl: item.badge.iconUrl ?? null,
iconUrl: normalizeMediaPath(item.badge.iconUrl) ?? null,
description: item.badge.description ?? null,
}
: null,

View File

@ -0,0 +1,161 @@
const prisma = require("./client")
const DEFAULT_SATURATION_RATIO = 0.3
const DEFAULT_TX_USER_CHUNK_SIZE = Math.max(
1,
Number(process.env.USER_INTEREST_DB_TX_USER_CHUNK_SIZE) || 200
)
function getDb(db) {
return db || prisma
}
function normalizePositiveInt(value) {
const num = Number(value)
if (!Number.isInteger(num) || num <= 0) return null
return num
}
function normalizePoints(value) {
const num = Number(value)
if (!Number.isFinite(num) || num <= 0) return null
return Math.floor(num)
}
function normalizeScores(raw) {
if (!raw || typeof raw !== "object" || Array.isArray(raw)) return {}
return { ...raw }
}
function normalizeSaturationRatio(value) {
const num = Number(value)
if (!Number.isFinite(num)) return DEFAULT_SATURATION_RATIO
if (num <= 0 || num >= 1) return DEFAULT_SATURATION_RATIO
return num
}
function getMaxAllowedBySaturation({ currentCategoryScore, totalScore, ratio }) {
const current = Number(currentCategoryScore) || 0
const total = Number(totalScore) || 0
const otherTotal = Math.max(0, total - current)
if (otherTotal <= 0) return Number.POSITIVE_INFINITY
return Math.floor((otherTotal * ratio) / (1 - ratio))
}
function aggregateIncrements(increments = []) {
const map = new Map()
for (const item of Array.isArray(increments) ? increments : []) {
const userId = normalizePositiveInt(item?.userId)
const categoryId = normalizePositiveInt(item?.categoryId)
const points = normalizePoints(item?.points)
if (!userId || !categoryId || !points) continue
const key = `${userId}:${categoryId}`
map.set(key, (map.get(key) || 0) + points)
}
const groupedByUser = new Map()
for (const [key, points] of map.entries()) {
const [userIdRaw, categoryIdRaw] = key.split(":")
const userId = Number(userIdRaw)
const categoryId = Number(categoryIdRaw)
if (!groupedByUser.has(userId)) groupedByUser.set(userId, [])
groupedByUser.get(userId).push({ categoryId, points })
}
return groupedByUser
}
function chunkEntries(entries = [], size = DEFAULT_TX_USER_CHUNK_SIZE) {
const normalizedSize = Math.max(1, Number(size) || DEFAULT_TX_USER_CHUNK_SIZE)
const chunks = []
for (let i = 0; i < entries.length; i += normalizedSize) {
chunks.push(entries.slice(i, i + normalizedSize))
}
return chunks
}
async function getUserInterestProfile(userId, db) {
const uid = normalizePositiveInt(userId)
if (!uid) return null
const p = getDb(db)
const rows = await p.$queryRawUnsafe(
'SELECT "userId", "categoryScores", "totalScore", "createdAt", "updatedAt" FROM "UserInterestProfile" WHERE "userId" = $1 LIMIT 1',
uid
)
return Array.isArray(rows) && rows.length ? rows[0] : null
}
async function applyInterestIncrementsBatch(increments = [], options = {}, db) {
const groupedByUser = aggregateIncrements(increments)
if (!groupedByUser.size) {
return { updated: 0, appliedPoints: 0 }
}
const saturationRatio = normalizeSaturationRatio(options?.saturationRatio)
let updated = 0
let appliedPoints = 0
const userEntries = Array.from(groupedByUser.entries())
const chunks = chunkEntries(userEntries)
for (const chunk of chunks) {
await getDb(db).$transaction(async (tx) => {
for (const [userId, entries] of chunk) {
await tx.$executeRawUnsafe(
'INSERT INTO "UserInterestProfile" ("userId", "categoryScores", "totalScore", "createdAt", "updatedAt") VALUES ($1, \'{}\'::jsonb, 0, NOW(), NOW()) ON CONFLICT ("userId") DO NOTHING',
userId
)
const rows = await tx.$queryRawUnsafe(
'SELECT "userId", "categoryScores", "totalScore" FROM "UserInterestProfile" WHERE "userId" = $1 FOR UPDATE',
userId
)
const profile = Array.isArray(rows) && rows.length ? rows[0] : null
if (!profile) continue
const scores = normalizeScores(profile.categoryScores)
let totalScore = Number(profile.totalScore || 0)
let changed = false
for (const entry of entries) {
const categoryKey = String(entry.categoryId)
const currentCategoryScore = Number(scores[categoryKey] || 0)
const maxAllowedBySaturation = getMaxAllowedBySaturation({
currentCategoryScore,
totalScore,
ratio: saturationRatio,
})
let nextCategoryScore = currentCategoryScore + entry.points
if (Number.isFinite(maxAllowedBySaturation)) {
nextCategoryScore = Math.min(nextCategoryScore, maxAllowedBySaturation)
}
const applied = Math.max(0, Math.floor(nextCategoryScore - currentCategoryScore))
if (applied <= 0) continue
scores[categoryKey] = currentCategoryScore + applied
totalScore += applied
appliedPoints += applied
changed = true
}
if (!changed) continue
await tx.$executeRawUnsafe(
'UPDATE "UserInterestProfile" SET "categoryScores" = $1::jsonb, "totalScore" = $2, "updatedAt" = NOW() WHERE "userId" = $3',
JSON.stringify(scores),
totalScore,
userId
)
updated += 1
}
})
}
return { updated, appliedPoints }
}
module.exports = {
getUserInterestProfile,
applyInterestIncrementsBatch,
}

View File

@ -3,6 +3,7 @@ const { getRedisConnectionOptions } = require("../services/redis/connection")
const connection = getRedisConnectionOptions()
const queue = new Queue("db-sync", { connection })
const DB_SYNC_REPEAT_MS = Math.max(2000, Number(process.env.DB_SYNC_REPEAT_MS) || 10000)
async function ensureDbSyncRepeatable() {
return queue.add(
@ -10,7 +11,7 @@ async function ensureDbSyncRepeatable() {
{},
{
jobId: "db-sync-batch",
repeat: { every: 30000 },
repeat: { every: DB_SYNC_REPEAT_MS },
removeOnComplete: true,
removeOnFail: 200,
}

1776
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -14,9 +14,9 @@
"license": "ISC",
"type": "commonjs",
"dependencies": {
"@aws-sdk/client-s3": "^3.985.0",
"@prisma/client": "^6.18.0",
"@shared/contracts": "file:../Contracts",
"@supabase/supabase-js": "^2.78.0",
"axios": "^1.11.0",
"bcryptjs": "^3.0.2",
"bullmq": "^5.67.0",

View File

@ -0,0 +1,16 @@
-- CreateTable
CREATE TABLE "UserInterestProfile" (
"userId" INTEGER NOT NULL,
"categoryScores" JSONB NOT NULL DEFAULT '{}',
"totalScore" INTEGER NOT NULL DEFAULT 0,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "UserInterestProfile_pkey" PRIMARY KEY ("userId")
);
-- CreateIndex
CREATE INDEX "UserInterestProfile_updatedAt_idx" ON "UserInterestProfile"("updatedAt");
-- AddForeignKey
ALTER TABLE "UserInterestProfile" ADD CONSTRAINT "UserInterestProfile_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "UserInterestProfile" ALTER COLUMN "updatedAt" DROP DEFAULT;

View File

@ -0,0 +1,2 @@
ALTER TABLE "Notification"
ADD COLUMN "extras" JSONB;

View File

@ -49,6 +49,7 @@ model User {
auditEvents AuditEvent[]
userNotes UserNote[] @relation("UserNotes")
notesAuthored UserNote[] @relation("UserNotesAuthor")
interestProfile UserInterestProfile?
}
model UserNote {
@ -466,6 +467,18 @@ model AuditEvent {
@@index([action, createdAt])
}
model UserInterestProfile {
userId Int @id
categoryScores Json @default("{}")
totalScore Int @default(0)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@index([updatedAt])
}
model DealAnalyticsTotal {
dealId Int @id
impressions Int @default(0)
@ -499,6 +512,7 @@ model Notification {
userId Int
message String
type String @default("INFO")
extras Json?
createdAt DateTime @default(now())
readAt DateTime?

View File

@ -9,6 +9,13 @@ function randInt(min, max) {
return Math.floor(Math.random() * (max - min + 1)) + min
}
function pickRandomCategoryId(categoryIds = [], fallbackCategoryId = 0) {
if (Array.isArray(categoryIds) && categoryIds.length) {
return categoryIds[randInt(0, categoryIds.length - 1)]
}
return fallbackCategoryId
}
// Stabil gerçek foto linkleri (redirect yok, hotlink derdi az)
function realImage(seed, w = 1200, h = 900) {
return `https://picsum.photos/seed/${encodeURIComponent(seed)}/${w}/${h}`
@ -246,7 +253,7 @@ function loadDealsJson(filePath) {
// deals.jsondan seed + her deala 3 foto + score 0-200 + tarih dağılımı:
// - %70: son 5 gün
// - %30: 9-11 gün önce
async function seedDealsFromJson({ userId, sellerId, categoryId, dealsFilePath }) {
async function seedDealsFromJson({ userId, sellerId, categoryIds = [], defaultCategoryId = 0, dealsFilePath }) {
const baseItems = loadDealsJson(dealsFilePath)
// 30 adet olacak şekilde çoğalt (title/url benzersizleşsin)
@ -309,7 +316,7 @@ async function seedDealsFromJson({ userId, sellerId, categoryId, dealsFilePath }
commentCount: randInt(0, 25),
userId,
sellerId,
categoryId,
categoryId: pickRandomCategoryId(categoryIds, defaultCategoryId),
score: randInt(0, 200),
createdAt,
}
@ -388,6 +395,12 @@ async function main() {
where: { slug: "pc-ssd" },
select: { id: true },
})
const availableCategoryIds = (
await prisma.category.findMany({
where: { isActive: true, id: { gt: 0 } },
select: { id: true },
})
).map((cat) => cat.id)
// ---------- TAGS ----------
await upsertTagBySlug("ssd", "SSD")
@ -414,7 +427,7 @@ async function main() {
commentCount: 1,
userId: user.id,
sellerId: amazon.id,
categoryId: catSSD?.id ?? 0,
categoryId: pickRandomCategoryId(availableCategoryIds, catSSD?.id ?? 0),
// score: randInt(0, 200), // modelinde varsa aç
}
@ -440,7 +453,8 @@ async function main() {
await seedDealsFromJson({
userId: user.id,
sellerId: amazon.id,
categoryId: catSSD?.id ?? 0,
categoryIds: availableCategoryIds,
defaultCategoryId: catSSD?.id ?? 0,
dealsFilePath,
})

View File

@ -18,6 +18,20 @@ const { AUDIT_ACTIONS } = require("../services/auditActions")
const { account } = endpoints
function attachNotificationExtras(validatedList = [], sourceList = []) {
const extrasById = new Map(
(Array.isArray(sourceList) ? sourceList : []).map((item) => [
Number(item?.id),
item?.extras ?? null,
])
)
return (Array.isArray(validatedList) ? validatedList : []).map((item) => ({
...item,
extras: extrasById.get(Number(item?.id)) ?? null,
}))
}
router.post(
"/avatar",
requireAuth,
@ -51,7 +65,9 @@ router.post(
router.get("/me", requireAuth, async (req, res) => {
try {
const user = await getUserProfile(req.auth.userId)
res.json(account.accountMeResponseSchema.parse(user))
const payload = account.accountMeResponseSchema.parse(user)
payload.notifications = attachNotificationExtras(payload.notifications, user?.notifications)
res.json(payload)
} catch (err) {
res.status(400).json({ error: err.message })
}
@ -79,7 +95,9 @@ router.get("/notifications", requireAuth, async (req, res) => {
try {
const input = account.accountNotificationsListRequestSchema.parse(req.query)
const payload = await getUserNotificationsPage(req.auth.userId, input.page, 10)
res.json(account.accountNotificationsListResponseSchema.parse(payload))
const validated = account.accountNotificationsListResponseSchema.parse(payload)
validated.results = attachNotificationExtras(validated.results, payload?.results)
res.json(validated)
} catch (err) {
res.status(400).json({ error: err.message })
}

View File

@ -7,6 +7,7 @@ const { mapDealToDealDetailResponse } = require("../adapters/responses/dealDetai
const { mapPaginatedDealsToDealCardResponse } = require("../adapters/responses/dealCard.adapter")
const { getClientIp } = require("../utils/requestInfo")
const { queueDealImpressions } = require("../services/redis/dealAnalytics.service")
const { trackUserCategoryInterest, USER_INTEREST_ACTIONS } = require("../services/userInterest.service")
router.get("/:slug", async (req, res) => {
@ -53,6 +54,14 @@ router.get("/:slug/deals", optionalAuth, async (req, res) => {
userId: req.auth?.userId ?? null,
ip: getClientIp(req),
}).catch((err) => console.error("Deal impression queue failed:", err?.message || err))
if (req.auth?.userId) {
trackUserCategoryInterest({
userId: req.auth.userId,
categoryId: category.id,
action: USER_INTEREST_ACTIONS.CATEGORY_VISIT,
}).catch((err) => console.error("User interest track failed:", err?.message || err))
}
res.json({

View File

@ -21,6 +21,7 @@ const {
} = require("../services/deal.service")
const dealSaveService = require("../services/dealSave.service")
const dealReportService = require("../services/dealReport.service")
const personalizedFeedService = require("../services/personalizedFeed.service")
const { mapCreateDealRequestToDealCreateData } = require("../adapters/requests/dealCreate.adapter")
const { mapDealToDealDetailResponse } = require("../adapters/responses/dealDetail.adapter")
@ -31,18 +32,32 @@ const {
queueDealView,
queueDealClick,
} = require("../services/redis/dealAnalytics.service")
const { trackUserCategoryInterest, USER_INTEREST_ACTIONS } = require("../services/userInterest.service")
const { getOrCacheDeal } = require("../services/redis/dealCache.service")
const { enqueueAuditFromRequest, buildAuditMeta } = require("../services/audit.service")
const { AUDIT_ACTIONS } = require("../services/auditActions")
const { toSafeRedirectUrl } = require("../utils/urlSafety")
const { deals, users } = endpoints
function isUserInterestDebugEnabled() {
const raw = String(process.env.USER_INTEREST_DEBUG || "0").trim().toLowerCase()
return raw === "1" || raw === "true" || raw === "yes" || raw === "on"
}
function parsePage(value) {
const num = Number(value)
if (!Number.isInteger(num) || num < 1) return 1
return num
}
function logUserInterestDebug(label, payload = {}) {
if (!isUserInterestDebugEnabled()) return
try {
console.log(`[user-interest] ${label}`, payload)
} catch {}
}
const listQueryValidator = validate(deals.dealsListRequestSchema, "query", "validatedDealListQuery")
const buildViewer = (req) =>
@ -147,6 +162,32 @@ router.get("/new", requireApiKey, optionalAuth, listQueryValidator, createListHa
router.get("/hot", requireApiKey, optionalAuth, listQueryValidator, createListHandler("HOT"))
router.get("/trending", requireApiKey, optionalAuth, listQueryValidator, createListHandler("TRENDING"))
router.get("/for-you", requireApiKey, requireAuth, async (req, res) => {
try {
const page = parsePage(req.query.page)
const payload = await personalizedFeedService.getPersonalizedDeals({
userId: req.auth.userId,
page,
})
const response = deals.dealsListResponseSchema.parse(
mapPaginatedDealsToDealCardResponse(payload)
)
const dealIds = payload?.results?.map((deal) => deal.id) || []
queueDealImpressions({
dealIds,
userId: req.auth?.userId ?? null,
ip: getClientIp(req),
}).catch((err) => console.error("Deal impression queue failed:", err?.message || err))
res.json({ ...response, personalizedListId: payload.personalizedListId ?? null })
} catch (err) {
console.error(err)
const status = err.statusCode || 500
res.status(status).json({ error: err.message || "Sunucu hatasi" })
}
})
router.get("/search/suggest", optionalAuth, async (req, res) => {
try {
const q = String(req.query.q || "").trim()
@ -166,20 +207,51 @@ router.get("/search/suggest", optionalAuth, async (req, res) => {
})
// Resolve deal URL (SSR uses api key; user token optional)
router.post("/url", requireApiKey, optionalAuth, async (req, res) => {
router.post(
"/url",
(req, res, next) => {
logUserInterestDebug("deal-click-request", {
hasApiKeyHeader: Boolean(req.headers?.["x-api-key"]),
hasAuthorizationHeader: Boolean(req.headers?.authorization),
hasAtCookie: Boolean(req.cookies?.at),
dealIdRaw: req.body?.dealId ?? null,
})
return next()
},
requireApiKey,
optionalAuth,
async (req, res) => {
try {
const dealId = Number(req.body?.dealId)
if (!Number.isInteger(dealId) || dealId <= 0) {
logUserInterestDebug("deal-click-skip", {
reason: "invalid_deal_id",
dealIdRaw: req.body?.dealId ?? null,
})
return res.status(400).json({ error: "dealId invalid" })
}
const deal = await getOrCacheDeal(dealId, { ttlSeconds: 15 * 60 })
if (!deal) return res.status(404).json({ error: "Deal bulunamadi" })
if (!deal) {
logUserInterestDebug("deal-click-skip", {
reason: "deal_not_found",
dealId,
})
return res.status(404).json({ error: "Deal bulunamadi" })
}
if (deal.status === "PENDING" || deal.status === "REJECTED") {
const isOwner = req.auth?.userId && Number(deal.userId) === Number(req.auth.userId)
const isMod = req.auth?.role === "MOD" || req.auth?.role === "ADMIN"
if (!isOwner && !isMod) return res.status(404).json({ error: "Deal bulunamadi" })
if (!isOwner && !isMod) {
logUserInterestDebug("deal-click-skip", {
reason: "deal_not_visible_for_user",
dealId,
status: deal.status,
userId: req.auth?.userId ?? null,
})
return res.status(404).json({ error: "Deal bulunamadi" })
}
}
const userId = req.auth?.userId ?? null
@ -187,8 +259,33 @@ router.post("/url", requireApiKey, optionalAuth, async (req, res) => {
queueDealClick({ dealId, userId, ip }).catch((err) =>
console.error("Deal click queue failed:", err?.message || err)
)
if (userId) {
trackUserCategoryInterest({
userId,
categoryId: deal.categoryId,
action: USER_INTEREST_ACTIONS.DEAL_CLICK,
}).catch((err) => console.error("User interest track failed:", err?.message || err))
logUserInterestDebug("deal-click-track", {
dealId,
userId,
categoryId: deal.categoryId ?? null,
status: deal.status,
})
} else {
logUserInterestDebug("deal-click-skip", {
reason: "missing_auth_user",
dealId,
categoryId: deal.categoryId ?? null,
hasAuthorizationHeader: Boolean(req.headers?.authorization),
hasAtCookie: Boolean(req.cookies?.at),
})
}
res.json({ url: deal.url ?? null })
const safeUrl = toSafeRedirectUrl(deal.url)
if (!safeUrl) {
return res.status(422).json({ error: "Deal URL gecersiz" })
}
res.json({ url: safeUrl })
} catch (err) {
console.error(err)
res.status(500).json({ error: "Sunucu hatasi" })
@ -436,6 +533,13 @@ router.get(
userId: req.auth?.userId ?? null,
ip: getClientIp(req),
}).catch((err) => console.error("Deal view queue failed:", err?.message || err))
if (req.auth?.userId) {
trackUserCategoryInterest({
userId: req.auth.userId,
categoryId: deal.categoryId,
action: USER_INTEREST_ACTIONS.DEAL_VIEW,
}).catch((err) => console.error("User interest track failed:", err?.message || err))
}
const mapped = mapDealToDealDetailResponse(deal)
res.json(deals.dealDetailResponseSchema.parse(mapped))
@ -475,7 +579,8 @@ router.post(
res.json(deals.dealCreateResponseSchema.parse(mapped))
} catch (err) {
console.error(err)
res.status(500).json({ error: "Sunucu hatasi" })
const status = err.statusCode || 500
res.status(status).json({ error: status >= 500 ? "Sunucu hatasi" : err.message })
}
}
)

View File

@ -25,10 +25,9 @@ router.post(
const key = uuidv4()
const webpBuffer = await makeWebp(req.file.buffer, { quality: 40 })
const path = `misc/${req.auth.userId}/${key}.webp`
const path = `images/dealDescription/${key}.webp`
const url = await uploadImage({
bucket: "deal",
path,
fileBuffer: webpBuffer,
contentType: "image/webp",

View File

@ -21,6 +21,8 @@ const {
} = require("./redis/dbSync.service")
const { ensureCategoryIdCounter, generateCategoryId } = require("./redis/categoryId.service")
const { ensureSellerIdCounter, generateSellerId } = require("./redis/sellerId.service")
const { sanitizeOptionalPlainText } = require("../utils/inputSanitizer")
const { normalizeMediaPath } = require("../utils/mediaPath")
function httpError(statusCode, message) {
const err = new Error(message)
@ -29,11 +31,19 @@ function httpError(statusCode, message) {
}
function normalizeCategoryPayload(input = {}, fallback = {}) {
const name = input.name !== undefined ? String(input.name || "").trim() : fallback.name
const rawSlug = input.slug !== undefined ? String(input.slug || "").trim() : fallback.slug
const name =
input.name !== undefined
? sanitizeOptionalPlainText(input.name, { maxLength: 120 }) || ""
: sanitizeOptionalPlainText(fallback.name, { maxLength: 120 }) || ""
const rawSlug =
input.slug !== undefined
? sanitizeOptionalPlainText(input.slug, { maxLength: 160 }) || ""
: sanitizeOptionalPlainText(fallback.slug, { maxLength: 160 }) || ""
const slug = rawSlug ? slugify(rawSlug) : name ? slugify(name) : fallback.slug
const description =
input.description !== undefined ? String(input.description || "").trim() : fallback.description
input.description !== undefined
? sanitizeOptionalPlainText(input.description, { maxLength: 300 }) || ""
: sanitizeOptionalPlainText(fallback.description, { maxLength: 300 }) || ""
const parentId =
input.parentId !== undefined && input.parentId !== null
? Number(input.parentId)
@ -169,10 +179,18 @@ async function updateCategory(categoryId, input = {}) {
}
function normalizeSellerPayload(input = {}, fallback = {}) {
const name = input.name !== undefined ? String(input.name || "").trim() : fallback.name
const url = input.url !== undefined ? String(input.url || "").trim() : fallback.url
const name =
input.name !== undefined
? sanitizeOptionalPlainText(input.name, { maxLength: 120 }) || ""
: sanitizeOptionalPlainText(fallback.name, { maxLength: 120 }) || ""
const url =
input.url !== undefined
? sanitizeOptionalPlainText(input.url, { maxLength: 500 }) || ""
: sanitizeOptionalPlainText(fallback.url, { maxLength: 500 }) || ""
const sellerLogo =
input.sellerLogo !== undefined ? String(input.sellerLogo || "").trim() : fallback.sellerLogo
input.sellerLogo !== undefined
? normalizeMediaPath(sanitizeOptionalPlainText(input.sellerLogo, { maxLength: 500 }) || "") || ""
: normalizeMediaPath(sanitizeOptionalPlainText(fallback.sellerLogo, { maxLength: 500 }) || "") || ""
const isActive =
input.isActive !== undefined ? Boolean(input.isActive) : Boolean(fallback.isActive ?? true)
return { name, url: url ?? "", sellerLogo: sellerLogo ?? "", isActive }
@ -191,7 +209,7 @@ async function listSellersCached() {
id: seller.id,
name: seller.name,
url: seller.url ?? "",
sellerLogo: seller.sellerLogo ?? "",
sellerLogo: normalizeMediaPath(seller.sellerLogo) ?? "",
isActive: seller.isActive !== undefined ? Boolean(seller.isActive) : true,
}))
}
@ -230,7 +248,7 @@ async function createSeller(input = {}, { createdById } = {}) {
}).catch((err) => console.error("DB sync seller create failed:", err?.message || err))
if (input.domain) {
const domain = String(input.domain || "").trim().toLowerCase()
const domain = (sanitizeOptionalPlainText(input.domain, { maxLength: 255 }) || "").toLowerCase()
if (domain) {
await setSellerDomainInRedis(domain, id)
queueSellerDomainUpsert({ sellerId: id, domain, createdById: creatorId }).catch((err) =>
@ -284,7 +302,7 @@ async function updateSeller(sellerId, input = {}, { createdById } = {}) {
}).catch((err) => console.error("DB sync seller update failed:", err?.message || err))
if (input.domain) {
const domain = String(input.domain || "").trim().toLowerCase()
const domain = (sanitizeOptionalPlainText(input.domain, { maxLength: 255 }) || "").toLowerCase()
if (domain) {
await setSellerDomainInRedis(domain, id)
queueSellerDomainUpsert({ sellerId: id, domain, createdById: creatorId }).catch((err) =>

View File

@ -8,6 +8,8 @@ const refreshTokenDb = require("../db/refreshToken.db")
const { queueAuditEvent } = require("./redis/dbSync.service")
const { AUDIT_ACTIONS } = require("./auditActions")
const { buildAuditMeta } = require("./audit.service")
const { sanitizeOptionalPlainText } = require("../utils/inputSanitizer")
const { normalizeMediaPath } = require("../utils/mediaPath")
const REUSE_GRACE_MS = Number(process.env.REFRESH_REUSE_GRACE_MS || 10000)
@ -51,7 +53,7 @@ function mapUserPublic(user) {
id: user.id,
username: user.username,
email: user.email,
avatarUrl: user.avatarUrl ?? null,
avatarUrl: normalizeMediaPath(user.avatarUrl) ?? null,
role: user.role,
}
}
@ -100,8 +102,13 @@ async function register({ username, email, password, meta = {} }) {
const existingUser = await authDb.findUserByEmail(email)
if (existingUser) throw httpError(400, "Bu e-posta zaten kayitli.")
const normalizedUsername = sanitizeOptionalPlainText(username, { maxLength: 18 })
if (!normalizedUsername || normalizedUsername.length < 5) {
throw httpError(400, "Kullanici adi gecersiz.")
}
const passwordHash = await bcrypt.hash(password, 10)
const user = await authDb.createUser({ username, email, passwordHash })
const user = await authDb.createUser({ username: normalizedUsername, email, passwordHash })
const { token: accessToken } = signAccessToken(user)

View File

@ -20,8 +20,7 @@ async function updateUserAvatar(userId, file) {
const webpBuffer = await makeWebp(buffer, { quality: 80 })
const imageUrl = await uploadImage({
bucket: "avatars",
path: `${userId}_${Date.now()}.webp`,
path: `avatars/${userId}_${Date.now()}.webp`,
fileBuffer: webpBuffer,
contentType: "image/webp",
})

View File

@ -1,6 +1,8 @@
const badgeDb = require("../db/badge.db")
const userBadgeDb = require("../db/userBadge.db")
const userDb = require("../db/user.db")
const { sanitizeOptionalPlainText } = require("../utils/inputSanitizer")
const { normalizeMediaPath } = require("../utils/mediaPath")
function assertPositiveInt(value, name) {
const num = Number(value)
@ -11,8 +13,13 @@ function assertPositiveInt(value, name) {
function normalizeOptionalString(value) {
if (value === undefined) return undefined
if (value === null) return null
const trimmed = String(value).trim()
return trimmed ? trimmed : null
return sanitizeOptionalPlainText(value, { maxLength: 500 })
}
function normalizeOptionalImagePath(value) {
if (value === undefined) return undefined
const normalized = normalizeMediaPath(value)
return normalized ?? null
}
async function listBadges() {
@ -20,12 +27,12 @@ async function listBadges() {
}
async function createBadge({ name, iconUrl, description }) {
const normalizedName = String(name || "").trim()
const normalizedName = sanitizeOptionalPlainText(name, { maxLength: 120 })
if (!normalizedName) throw new Error("Badge adı zorunlu.")
return badgeDb.createBadge({
name: normalizedName,
iconUrl: normalizeOptionalString(iconUrl),
iconUrl: normalizeOptionalImagePath(iconUrl),
description: normalizeOptionalString(description),
})
}
@ -35,11 +42,11 @@ async function updateBadge(badgeId, { name, iconUrl, description }) {
const data = {}
if (name !== undefined) {
const normalizedName = String(name || "").trim()
const normalizedName = sanitizeOptionalPlainText(name, { maxLength: 120 })
if (!normalizedName) throw new Error("Badge adı zorunlu.")
data.name = normalizedName
}
if (iconUrl !== undefined) data.iconUrl = normalizeOptionalString(iconUrl)
if (iconUrl !== undefined) data.iconUrl = normalizeOptionalImagePath(iconUrl)
if (description !== undefined) data.description = normalizeOptionalString(description)
if (!Object.keys(data).length) throw new Error("Güncellenecek alan yok.")

View File

@ -1,5 +1,107 @@
const categoryDb = require("../db/category.db")
const dealService = require("./deal.service")
const { listCategoriesFromRedis, setCategoriesInRedis, setCategoryInRedis } = require("./redis/categoryCache.service")
function normalizeCategory(category = {}) {
const id = Number(category.id)
if (!Number.isInteger(id) || id < 0) return null
const parentIdRaw = category.parentId
const parentId =
parentIdRaw === null || parentIdRaw === undefined ? null : Number(parentIdRaw)
return {
id,
name: category.name,
slug: String(category.slug || "").trim().toLowerCase(),
parentId: Number.isInteger(parentId) ? parentId : null,
isActive: category.isActive !== undefined ? Boolean(category.isActive) : true,
description: category.description ?? "",
}
}
function buildCategoryMaps(categories = []) {
const byId = new Map()
const bySlug = new Map()
categories.forEach((item) => {
const category = normalizeCategory(item)
if (!category) return
byId.set(category.id, category)
if (category.slug) bySlug.set(category.slug, category)
})
return { byId, bySlug }
}
function getCategoryBreadcrumbFromMap(categoryId, byId, { includeUndefined = false } = {}) {
const currentId = Number(categoryId)
if (!Number.isInteger(currentId)) return []
const path = []
const visited = new Set()
let nextId = currentId
while (true) {
if (visited.has(nextId)) break
visited.add(nextId)
const category = byId.get(nextId)
if (!category) break
if (includeUndefined || category.id !== 0) {
path.push({ id: category.id, name: category.name, slug: category.slug })
}
if (category.parentId === null || category.parentId === undefined) break
nextId = Number(category.parentId)
}
return path.reverse()
}
function getCategoryDescendantIdsFromMap(categoryId, categories = []) {
const rootId = Number(categoryId)
if (!Number.isInteger(rootId) || rootId <= 0) return []
const childrenByParent = new Map()
categories.forEach((item) => {
const category = normalizeCategory(item)
if (!category || category.parentId === null) return
const parentId = Number(category.parentId)
if (!Number.isInteger(parentId)) return
if (!childrenByParent.has(parentId)) childrenByParent.set(parentId, [])
childrenByParent.get(parentId).push(category.id)
})
const seen = new Set([rootId])
const queue = [rootId]
while (queue.length) {
const current = queue.shift()
const children = childrenByParent.get(current) || []
children.forEach((childId) => {
if (seen.has(childId)) return
seen.add(childId)
queue.push(childId)
})
}
return Array.from(seen)
}
async function listCategoriesCached() {
let categories = await listCategoriesFromRedis()
if (categories.length) return categories
categories = await categoryDb.listCategories({
select: { id: true, name: true, slug: true, parentId: true, isActive: true, description: true },
orderBy: { id: "asc" },
})
if (categories.length) {
await setCategoriesInRedis(categories)
}
return categories
}
async function findCategoryBySlug(slug) {
const normalizedSlug = String(slug || "").trim()
@ -7,13 +109,25 @@ async function findCategoryBySlug(slug) {
throw new Error("INVALID_SLUG")
}
const categories = await listCategoriesCached()
if (categories.length) {
const { byId, bySlug } = buildCategoryMaps(categories)
const cachedCategory = bySlug.get(normalizedSlug.toLowerCase())
if (cachedCategory) {
const breadcrumb = getCategoryBreadcrumbFromMap(cachedCategory.id, byId)
return { category: cachedCategory, breadcrumb }
}
}
const category = await categoryDb.findCategoryBySlug(normalizedSlug)
if (!category) {
throw new Error("CATEGORY_NOT_FOUND")
}
const normalizedCategory = normalizeCategory(category) || category
await setCategoryInRedis(normalizedCategory)
const breadcrumb = await categoryDb.getCategoryBreadcrumb(category.id)
return { category, breadcrumb }
return { category: normalizedCategory, breadcrumb }
}
async function getDealsByCategoryId(categoryId, { page = 1, limit = 10, filters = {}, viewer = null, scope = "USER" } = {}) {
@ -22,7 +136,14 @@ async function getDealsByCategoryId(categoryId, { page = 1, limit = 10, filters
throw new Error("INVALID_CATEGORY_ID")
}
const categoryIds = await categoryDb.getCategoryDescendantIds(normalizedId)
let categoryIds = []
const categories = await listCategoriesCached()
if (categories.length) {
categoryIds = getCategoryDescendantIdsFromMap(normalizedId, categories)
}
if (!categoryIds.length) {
categoryIds = await categoryDb.getCategoryDescendantIds(normalizedId)
}
return dealService.getDeals({
preset: "NEW",

View File

@ -7,7 +7,14 @@ const {
} = require("./redis/commentCache.service")
const { getOrCacheDeal, getDealIdByCommentId } = require("./redis/dealCache.service")
const { generateCommentId } = require("./redis/commentId.service")
const { queueCommentCreate, queueCommentDelete } = require("./redis/dbSync.service")
const {
queueCommentCreate,
queueCommentDelete,
queueNotificationCreate,
} = require("./redis/dbSync.service")
const { publishNotification } = require("./redis/notificationPubsub.service")
const { trackUserCategoryInterest, USER_INTEREST_ACTIONS } = require("./userInterest.service")
const { sanitizeOptionalPlainText } = require("../utils/inputSanitizer")
function parseParentId(value) {
if (value === undefined || value === null || value === "" || value === "null") return null
@ -43,7 +50,8 @@ async function getCommentsByDealId(dealId, { parentId, page, limit, sort, viewer
}
async function createComment({ dealId, userId, text, parentId = null }) {
if (!text || typeof text !== "string" || !text.trim()) {
const normalizedText = sanitizeOptionalPlainText(text, { maxLength: 2000 })
if (!normalizedText) {
throw new Error("Yorum bos olamaz.")
}
@ -62,7 +70,11 @@ async function createComment({ dealId, userId, text, parentId = null }) {
if (Number(cachedParent.dealId) !== Number(dealId)) {
throw new Error("Yanıtlanan yorum bu deal'a ait degil.")
}
parent = { id: cachedParent.id, dealId: cachedParent.dealId }
parent = {
id: cachedParent.id,
dealId: cachedParent.dealId,
userId: Number(cachedParent.userId) || null,
}
}
const user = await userDB.findUser(
@ -75,7 +87,7 @@ async function createComment({ dealId, userId, text, parentId = null }) {
const commentId = await generateCommentId()
const comment = {
id: commentId,
text: text.trim(),
text: normalizedText,
userId,
dealId,
parentId: parent ? parent.id : null,
@ -94,11 +106,43 @@ async function createComment({ dealId, userId, text, parentId = null }) {
commentId,
dealId,
userId,
text: text.trim(),
text: normalizedText,
parentId: parent ? parent.id : null,
createdAt: createdAt.toISOString(),
}).catch((err) => console.error("DB sync comment create failed:", err?.message || err))
trackUserCategoryInterest({
userId,
categoryId: deal.categoryId,
action: USER_INTEREST_ACTIONS.COMMENT_CREATE,
}).catch((err) => console.error("User interest track failed:", err?.message || err))
const parentUserId = Number(parent?.userId)
if (
parent &&
Number.isInteger(parentUserId) &&
parentUserId > 0 &&
parentUserId !== Number(userId)
) {
const notificationPayload = {
userId: parentUserId,
message: "Yorumuna cevap geldi.",
type: "COMMENT_REPLY",
extras: {
dealId: Number(dealId),
commentId: Number(commentId),
parentCommentId: Number(parent.id),
},
createdAt: createdAt.toISOString(),
}
queueNotificationCreate(notificationPayload).catch((err) =>
console.error("DB sync comment reply notification failed:", err?.message || err)
)
publishNotification(notificationPayload).catch((err) =>
console.error("Comment reply notification publish failed:", err?.message || err)
)
}
return comment
}

View File

@ -1138,14 +1138,10 @@ async function createDeal(dealCreateData, files = []) {
const file = files[i]
const order = i
const key = uuidv4()
const basePath = `deals/${dealId}/${key}`
const detailPath = `${basePath}_detail.webp`
const thumbPath = `${basePath}_thumb.webp`
const BUCKET = "deal"
const detailPath = `images/details/${key}.webp`
const thumbPath = `images/thumbnail/${key}.webp`
const detailBuffer = await makeDetailWebp(file.buffer)
const detailUrl = await uploadImage({
bucket: BUCKET,
path: detailPath,
fileBuffer: detailBuffer,
contentType: "image/webp",
@ -1154,7 +1150,6 @@ async function createDeal(dealCreateData, files = []) {
if (order === 0) {
const thumbBuffer = await makeThumbWebp(file.buffer)
await uploadImage({
bucket: BUCKET,
path: thumbPath,
fileBuffer: thumbBuffer,
contentType: "image/webp",

View File

@ -1,6 +1,7 @@
const dealDB = require("../db/deal.db")
const dealReportDB = require("../db/dealReport.db")
const { queueDealReportStatusUpdate } = require("./redis/dbSync.service")
const { sanitizeOptionalPlainText } = require("../utils/inputSanitizer")
const PAGE_LIMIT = 20
const ALLOWED_REASONS = new Set([
@ -56,7 +57,7 @@ async function createDealReport({ dealId, userId, reason, note }) {
dealId: did,
userId: uid,
reason: normalizedReason,
note: note ? String(note).trim().slice(0, 500) : null,
note: sanitizeOptionalPlainText(note, { maxLength: 500 }),
})
return { reported: true }

View File

@ -11,6 +11,7 @@ const {
const { mapDealToRedisJson } = require("./redis/dealIndexing.service")
const { getOrCacheDeal, updateDealSavesInRedis, setDealInRedis } = require("./redis/dealCache.service")
const { queueDealSaveUpdate } = require("./redis/dbSync.service")
const { trackUserCategoryInterest, USER_INTEREST_ACTIONS } = require("./userInterest.service")
const PAGE_LIMIT = 20
const ALLOWED_STATUSES = new Set(["ACTIVE", "EXPIRED"])
@ -80,6 +81,12 @@ async function saveDealForUser({ userId, dealId }) {
action: "SAVE",
createdAt: new Date().toISOString(),
}).catch((err) => console.error("DB sync dealSave queue failed:", err?.message || err))
trackUserCategoryInterest({
userId: uid,
categoryId: deal.categoryId,
action: USER_INTEREST_ACTIONS.DEAL_SAVE,
}).catch((err) => console.error("User interest track failed:", err?.message || err))
return { saved: true }
}

View File

@ -2,11 +2,12 @@ const { cacheImageFromUrl } = require("./redis/linkPreviewImageCache.service")
function extractImageUrlsFromDescription(description, { max = 5 } = {}) {
if (!description || typeof description !== "string") return []
const regex = /<img[^>]+src=["']([^"']+)["'][^>]*>/gi
const regex = /<img[^>]+src\s*=\s*(?:"([^"]+)"|'([^']+)'|([^\s>]+))[^>]*>/gi
const urls = []
let match
while ((match = regex.exec(description)) !== null) {
if (match[1]) urls.push(match[1])
const src = match[1] || match[2] || match[3] || ""
if (src) urls.push(src)
if (urls.length >= max) break
}
return urls
@ -14,15 +15,19 @@ function extractImageUrlsFromDescription(description, { max = 5 } = {}) {
function replaceDescriptionImageUrls(description, urlMap, { maxImages = 5 } = {}) {
if (!description || typeof description !== "string") return description
if (!urlMap || urlMap.size === 0) return description
let seen = 0
return description.replace(/<img[^>]+src=["']([^"']+)["'][^>]*>/gi, (full, src) => {
const safeMap = urlMap instanceof Map ? urlMap : new Map()
return description.replace(
/<img[^>]+src\s*=\s*(?:"([^"]+)"|'([^']+)'|([^\s>]+))[^>]*>/gi,
(full, srcDq, srcSq, srcUnq) => {
seen += 1
if (seen > maxImages) return ""
const next = urlMap.get(src)
if (!next) return full
return full.replace(src, next)
})
const src = srcDq || srcSq || srcUnq || ""
const next = safeMap.get(src)
if (!next) return full
return full.replace(src, next)
}
)
}
async function cacheLinkPreviewImages({ product, baseUrl } = {}) {

View File

@ -11,6 +11,12 @@ const { queueDealUpdate, queueNotificationCreate } = require("./redis/dbSync.ser
const { publishNotification } = require("./redis/notificationPubsub.service")
const { getSellerById } = require("./redis/sellerCache.service")
const { attachTagsToDeal, normalizeTags } = require("./tag.service")
const { toSafeRedirectUrl } = require("../utils/urlSafety")
const {
sanitizeDealDescriptionHtml,
sanitizeOptionalPlainText,
sanitizeRequiredPlainText,
} = require("../utils/inputSanitizer")
function normalizeDealForModResponse(deal) {
if (!deal) return deal
@ -146,6 +152,9 @@ async function approveDeal(dealId) {
userId: Number(deal.userId),
message: "Fırsatın onaylandı!",
type: "MODERATION",
extras: {
dealId: Number(id),
},
createdAt: updatedAt.toISOString(),
}
queueNotificationCreate(payload).catch((err) =>
@ -225,14 +234,34 @@ async function updateDealForMod(dealId, input = {}, viewer = null) {
const data = {}
if (input.title !== undefined) data.title = input.title
if (input.description !== undefined) data.description = input.description ?? null
if (input.url !== undefined) data.url = input.url ?? null
if (input.title !== undefined) {
data.title = sanitizeRequiredPlainText(input.title, { fieldName: "TITLE", maxLength: 300 })
}
if (input.description !== undefined) {
data.description = sanitizeDealDescriptionHtml(input.description)
}
if (input.url !== undefined) {
if (input.url === null) {
data.url = null
} else {
const safeUrl = toSafeRedirectUrl(input.url)
if (!safeUrl) {
const err = new Error("INVALID_URL")
err.statusCode = 400
throw err
}
data.url = safeUrl
}
}
if (input.price !== undefined) data.price = input.price ?? null
if (input.originalPrice !== undefined) data.originalPrice = input.originalPrice ?? null
if (input.shippingPrice !== undefined) data.shippingPrice = input.shippingPrice ?? null
if (input.couponCode !== undefined) data.couponCode = input.couponCode ?? null
if (input.location !== undefined) data.location = input.location ?? null
if (input.couponCode !== undefined) {
data.couponCode = sanitizeOptionalPlainText(input.couponCode, { maxLength: 120 })
}
if (input.location !== undefined) {
data.location = sanitizeOptionalPlainText(input.location, { maxLength: 150 })
}
if (input.discountValue !== undefined) data.discountValue = input.discountValue ?? null
if (input.discountType !== undefined) {
const normalized =
@ -272,9 +301,7 @@ async function updateDealForMod(dealId, input = {}, viewer = null) {
}
if (input.customSeller !== undefined) {
const normalized =
typeof input.customSeller === "string" ? input.customSeller.trim() : null
data.customSeller = normalized || null
data.customSeller = sanitizeOptionalPlainText(input.customSeller, { maxLength: 120 })
if (data.customSeller) data.sellerId = null
}

View File

@ -6,6 +6,7 @@ const {
getOrCacheUserModeration,
setUserModerationInRedis,
} = require("./redis/userModerationCache.service")
const { sanitizeOptionalPlainText } = require("../utils/inputSanitizer")
function assertUserId(userId) {
const id = Number(userId)
@ -94,7 +95,7 @@ async function updateUserRole(userId, role) {
async function addUserNote({ userId, createdById, note }) {
const uid = assertUserId(userId)
const cid = assertUserId(createdById)
const text = String(note || "").trim()
const text = sanitizeOptionalPlainText(note, { maxLength: 1000 })
if (!text) {
const err = new Error("NOTE_REQUIRED")
err.statusCode = 400
@ -104,11 +105,11 @@ async function addUserNote({ userId, createdById, note }) {
queueUserNoteCreate({
userId: uid,
createdById: cid,
note: text.slice(0, 1000),
note: text,
createdAt: new Date().toISOString(),
}).catch((err) => console.error("DB sync user note failed:", err?.message || err))
return { userId: uid, createdById: cid, note: text.slice(0, 1000) }
return { userId: uid, createdById: cid, note: text }
}
async function listUserNotes({ userId, page = 1, limit = 20 }) {

View File

@ -0,0 +1,327 @@
const { getRedisClient } = require("./redis/client")
const userInterestProfileDb = require("../db/userInterestProfile.db")
const { getCategoryDealIndexKey } = require("./redis/categoryDealIndex.service")
const { getDealsByIdsFromRedis } = require("./redis/hotDealList.service")
const { getNewDealIds } = require("./redis/newDealList.service")
const FEED_KEY_PREFIX = "deals:lists:personalized:user:"
const FEED_TTL_SECONDS = Math.max(60, Number(process.env.PERSONAL_FEED_TTL_SECONDS) || 2 * 60 * 60)
const FEED_REBUILD_THRESHOLD_SECONDS = Math.max(
60,
Number(process.env.PERSONAL_FEED_REBUILD_THRESHOLD_SECONDS) || 60 * 60
)
const FEED_CANDIDATE_LIMIT = Math.max(20, Number(process.env.PERSONAL_FEED_CANDIDATE_LIMIT) || 120)
const FEED_MAX_CATEGORIES = Math.max(1, Number(process.env.PERSONAL_FEED_MAX_CATEGORIES) || 8)
const FEED_PER_CATEGORY_LIMIT = Math.max(5, Number(process.env.PERSONAL_FEED_PER_CATEGORY_LIMIT) || 40)
const FEED_LOOKBACK_DAYS = Math.max(1, Number(process.env.PERSONAL_FEED_LOOKBACK_DAYS) || 30)
const FEED_NOISE_MAX = Math.max(0, Number(process.env.PERSONAL_FEED_NOISE_MAX) || 50)
const FEED_PAGE_LIMIT = 20
function normalizePositiveInt(value) {
const num = Number(value)
if (!Number.isInteger(num) || num <= 0) return null
return num
}
function normalizePagination({ page, limit }) {
const rawPage = Number(page)
const rawLimit = Number(limit)
const safePage = Number.isInteger(rawPage) && rawPage > 0 ? rawPage : 1
const safeLimit =
Number.isInteger(rawLimit) && rawLimit > 0 ? Math.min(rawLimit, 50) : 20
return { page: safePage, limit: safeLimit, skip: (safePage - 1) * safeLimit }
}
function getLatestKey(userId) {
return `${FEED_KEY_PREFIX}${userId}:latest`
}
function getFeedKey(userId, feedId) {
return `${FEED_KEY_PREFIX}${userId}:${feedId}`
}
function getFeedKeyMatchPattern(userId) {
return `${FEED_KEY_PREFIX}${userId}:*`
}
function parseCategoryScores(rawScores) {
if (!rawScores || typeof rawScores !== "object" || Array.isArray(rawScores)) return []
const entries = []
for (const [categoryIdRaw, scoreRaw] of Object.entries(rawScores)) {
const categoryId = normalizePositiveInt(categoryIdRaw)
const score = Number(scoreRaw)
if (!categoryId || !Number.isFinite(score) || score <= 0) continue
entries.push({ categoryId, score })
}
return entries.sort((a, b) => b.score - a.score)
}
function buildFallbackFeedIds(dealIds = []) {
return Array.from(
new Set(
(Array.isArray(dealIds) ? dealIds : [])
.map((id) => Number(id))
.filter((id) => Number.isInteger(id) && id > 0)
)
).slice(0, FEED_CANDIDATE_LIMIT)
}
function computePersonalScore({ categoryScore, dealScore }) {
const safeCategory = Math.max(0, Number(categoryScore) || 0)
const safeDealScore = Math.max(1, Number(dealScore) || 0)
const noise = FEED_NOISE_MAX > 0 ? Math.floor(Math.random() * (FEED_NOISE_MAX + 1)) : 0
return safeCategory * safeDealScore + noise
}
async function getFeedFromRedis(redis, userId) {
const latestId = await redis.get(getLatestKey(userId))
if (!latestId) return null
const key = getFeedKey(userId, latestId)
const raw = await redis.call("JSON.GET", key)
const ttl = Number(await redis.ttl(key))
if (!raw || ttl <= 0) return null
try {
const parsed = JSON.parse(raw)
return {
id: String(parsed.id || latestId),
dealIds: buildFallbackFeedIds(parsed.dealIds || []),
ttl,
}
} catch {
return null
}
}
async function listUserFeedKeys(redis, userId) {
const pattern = getFeedKeyMatchPattern(userId)
const keys = []
let cursor = "0"
do {
const [nextCursor, batch] = await redis.scan(cursor, "MATCH", pattern, "COUNT", 100)
cursor = String(nextCursor)
if (Array.isArray(batch) && batch.length) {
batch.forEach((key) => {
if (String(key).endsWith(":latest")) return
keys.push(String(key))
})
}
} while (cursor !== "0")
return Array.from(new Set(keys))
}
function extractFeedIdFromKey(userId, key) {
const prefix = `${FEED_KEY_PREFIX}${userId}:`
if (!String(key).startsWith(prefix)) return null
const feedId = String(key).slice(prefix.length)
return feedId || null
}
async function getBestFeedFromRedis(redis, userId) {
const keys = await listUserFeedKeys(redis, userId)
if (!keys.length) return null
const pipeline = redis.pipeline()
keys.forEach((key) => pipeline.ttl(key))
keys.forEach((key) => pipeline.call("JSON.GET", key))
const results = await pipeline.exec()
if (!Array.isArray(results) || !results.length) return null
const ttlResults = results.slice(0, keys.length)
const jsonResults = results.slice(keys.length)
let best = null
keys.forEach((key, idx) => {
try {
const ttl = Number(ttlResults[idx]?.[1] ?? -1)
if (!Number.isFinite(ttl) || ttl <= 0) return
const raw = jsonResults[idx]?.[1]
if (!raw) return
const parsed = JSON.parse(raw)
const dealIds = buildFallbackFeedIds(parsed?.dealIds || [])
const feedId = extractFeedIdFromKey(userId, key) || String(parsed?.id || "")
if (!feedId) return
if (!best || ttl > best.ttl) {
best = {
id: feedId,
dealIds,
ttl,
}
}
} catch {}
})
return best
}
async function setLatestPointer(redis, userId, feedId, ttlSeconds) {
const ttl = Math.max(1, Number(ttlSeconds) || FEED_TTL_SECONDS)
await redis.set(getLatestKey(userId), String(feedId), "EX", ttl)
}
async function collectCandidateIdsFromIndexes(redis, topCategories = []) {
if (!topCategories.length) return new Map()
const cutoffTs = Date.now() - FEED_LOOKBACK_DAYS * 24 * 60 * 60 * 1000
const pipeline = redis.pipeline()
const refs = []
topCategories.forEach((entry) => {
const key = getCategoryDealIndexKey(entry.categoryId)
if (!key) return
pipeline.zrevrangebyscore(key, "+inf", String(cutoffTs), "LIMIT", 0, FEED_PER_CATEGORY_LIMIT)
refs.push(entry)
})
if (!refs.length) return new Map()
const results = await pipeline.exec()
const categoryByDealId = new Map()
results.forEach((result, idx) => {
const [, rawIds] = result || []
const categoryEntry = refs[idx]
const ids = Array.isArray(rawIds) ? rawIds : []
ids.forEach((id) => {
const dealId = Number(id)
if (!Number.isInteger(dealId) || dealId <= 0) return
if (!categoryByDealId.has(dealId)) {
categoryByDealId.set(dealId, categoryEntry)
}
})
})
return categoryByDealId
}
async function buildPersonalizedFeedForUser(redis, userId) {
const profile = await userInterestProfileDb.getUserInterestProfile(userId)
const categories = parseCategoryScores(profile?.categoryScores).slice(0, FEED_MAX_CATEGORIES)
if (!categories.length) {
const fallback = await getNewDealIds({})
return {
id: String(Date.now()),
dealIds: buildFallbackFeedIds(fallback?.dealIds || []),
}
}
const categoryByDealId = await collectCandidateIdsFromIndexes(redis, categories)
const candidateIds = Array.from(categoryByDealId.keys()).slice(0, FEED_CANDIDATE_LIMIT * 3)
if (!candidateIds.length) {
const fallback = await getNewDealIds({})
return {
id: String(Date.now()),
dealIds: buildFallbackFeedIds(fallback?.dealIds || []),
}
}
const deals = await getDealsByIdsFromRedis(candidateIds, userId)
const scored = deals
.filter((deal) => String(deal?.status || "").toUpperCase() === "ACTIVE")
.map((deal) => {
const entry = categoryByDealId.get(Number(deal.id))
const categoryScore = Number(entry?.score || 0)
return {
id: Number(deal.id),
score: computePersonalScore({
categoryScore,
dealScore: Number(deal.score || 0),
}),
}
})
.filter((item) => Number.isInteger(item.id) && item.id > 0)
scored.sort((a, b) => b.score - a.score)
const rankedIds = Array.from(new Set(scored.map((item) => item.id))).slice(0, FEED_CANDIDATE_LIMIT)
if (!rankedIds.length) {
const fallback = await getNewDealIds({})
return {
id: String(Date.now()),
dealIds: buildFallbackFeedIds(fallback?.dealIds || []),
}
}
return {
id: String(Date.now()),
dealIds: rankedIds,
}
}
async function cacheFeed(redis, userId, feed) {
const feedId = String(feed?.id || Date.now())
const dealIds = buildFallbackFeedIds(feed?.dealIds || [])
const key = getFeedKey(userId, feedId)
const payload = {
id: feedId,
createdAt: new Date().toISOString(),
total: dealIds.length,
dealIds,
}
await redis.call("JSON.SET", key, "$", JSON.stringify(payload))
await redis.expire(key, FEED_TTL_SECONDS)
await setLatestPointer(redis, userId, feedId, FEED_TTL_SECONDS)
return { id: feedId, dealIds, ttl: FEED_TTL_SECONDS }
}
async function getOrBuildFeedIds(userId) {
const uid = normalizePositiveInt(userId)
if (!uid) return { id: null, dealIds: [] }
const redis = getRedisClient()
try {
const best = (await getBestFeedFromRedis(redis, uid)) || (await getFeedFromRedis(redis, uid))
if (best && best.ttl >= FEED_REBUILD_THRESHOLD_SECONDS) {
await setLatestPointer(redis, uid, best.id, best.ttl)
return best
}
if (best && best.ttl > 0) {
// Keep current feed as fallback while creating a fresh one.
const built = await buildPersonalizedFeedForUser(redis, uid)
const cached = await cacheFeed(redis, uid, built)
return cached?.dealIds?.length ? cached : best
}
} catch {}
try {
const built = await buildPersonalizedFeedForUser(redis, uid)
return cacheFeed(redis, uid, built)
} catch {
const fallback = await getNewDealIds({})
const dealIds = buildFallbackFeedIds(fallback?.dealIds || [])
const payload = { id: String(Date.now()), dealIds, ttl: 0 }
try {
return cacheFeed(redis, uid, payload)
} catch {
return payload
}
}
}
async function getPersonalizedDeals({
userId,
page = 1,
} = {}) {
const uid = normalizePositiveInt(userId)
if (!uid) return { page: 1, total: 0, totalPages: 0, results: [], personalizedListId: null }
const pagination = normalizePagination({ page, limit: FEED_PAGE_LIMIT })
const feed = await getOrBuildFeedIds(uid)
const ids = Array.isArray(feed?.dealIds) ? feed.dealIds : []
const pageIds = ids.slice(pagination.skip, pagination.skip + pagination.limit)
const deals = await getDealsByIdsFromRedis(pageIds, uid)
return {
page: pagination.page,
total: ids.length,
totalPages: ids.length ? Math.ceil(ids.length / pagination.limit) : 0,
results: deals,
personalizedListId: feed?.id || null,
}
}
module.exports = {
getPersonalizedDeals,
}

View File

@ -1,4 +1,5 @@
const axios = require("axios")
const { requestProductPreviewOverRedis } = require("./redis/scraperRpc.service")
function buildScraperUrl(baseUrl, targetUrl) {
if (!baseUrl) throw new Error("SCRAPER_API_URL missing")
@ -21,6 +22,14 @@ function buildScraperUrl(baseUrl, targetUrl) {
}
async function getProductPreviewFromUrl(url) {
const transport = String(process.env.SCRAPER_TRANSPORT || "http")
.trim()
.toLowerCase()
if (transport === "redis") {
return requestProductPreviewOverRedis(url, { timeoutMs: 20000 })
}
const baseUrl = process.env.SCRAPER_API_URL
const scraperUrl = buildScraperUrl(baseUrl, url)

View File

@ -1,22 +1,27 @@
const bcrypt = require("bcryptjs")
const bcrypt = require("bcryptjs")
const userDb = require("../db/user.db")
const notificationDb = require("../db/notification.db")
const refreshTokenDb = require("../db/refreshToken.db")
const { queueNotificationReadAll } = require("./redis/dbSync.service")
const { normalizeMediaPath } = require("../utils/mediaPath")
function assertPositiveInt(v, name = "id") {
const n = Number(v)
if (!Number.isInteger(n) || n <= 0) throw new Error(`Geçersiz ${name}.`)
if (!Number.isInteger(n) || n <= 0) throw new Error(`Gecersiz ${name}.`)
return n
}
async function updateAvatarUrl(userId, url) {
const id = assertPositiveInt(userId, "userId")
if (!url || typeof url !== "string" || !url.trim())
throw new Error("Geçersiz URL.")
if (!url || typeof url !== "string" || !url.trim()) {
throw new Error("Gecersiz URL.")
}
const normalizedAvatarUrl = normalizeMediaPath(url)
if (!normalizedAvatarUrl) throw new Error("Gecersiz URL.")
const select = { id: true, username: true, avatarUrl: true }
return userDb.updateUser({ id }, { avatarUrl: url.trim() }, { select })
return userDb.updateUser({ id }, { avatarUrl: normalizedAvatarUrl }, { select })
}
async function getUserProfile(userId) {
@ -40,11 +45,13 @@ async function getUserProfile(userId) {
id: true,
message: true,
type: true,
extras: true,
createdAt: true,
readAt: true,
},
},
}
const user = await userDb.findUser({ id }, { select })
if (!user) return user
@ -53,6 +60,7 @@ async function getUserProfile(userId) {
const notifications = Array.isArray(user.notifications)
? user.notifications.map((n) => ({
...n,
extras: n.extras ?? null,
createdAt: formatDate(n.createdAt),
readAt: formatDate(n.readAt),
unread: n.readAt == null,
@ -65,7 +73,7 @@ async function getUserProfile(userId) {
? {
id: item.badge.id,
name: item.badge.name,
iconUrl: item.badge.iconUrl ?? null,
iconUrl: normalizeMediaPath(item.badge.iconUrl) ?? null,
description: item.badge.description ?? null,
}
: null,
@ -76,21 +84,13 @@ async function getUserProfile(userId) {
return {
id: user.id,
username: user.username,
avatarUrl: user.avatarUrl ?? null,
avatarUrl: normalizeMediaPath(user.avatarUrl) ?? null,
createdAt: formatDate(user.createdAt),
notifications,
badges,
}
}
module.exports = {
updateAvatarUrl,
getUserProfile,
markAllNotificationsRead,
getUserNotificationsPage,
changePassword,
}
async function markAllNotificationsRead(userId) {
const id = assertPositiveInt(userId, "userId")
const readAt = new Date().toISOString()
@ -116,6 +116,7 @@ async function getUserNotificationsPage(userId, page = 1, limit = 10) {
id: true,
message: true,
type: true,
extras: true,
createdAt: true,
readAt: true,
},
@ -127,6 +128,7 @@ async function getUserNotificationsPage(userId, page = 1, limit = 10) {
const results = Array.isArray(notifications)
? notifications.map((n) => ({
...n,
extras: n.extras ?? null,
createdAt: formatDate(n.createdAt),
readAt: formatDate(n.readAt),
unread: n.readAt == null,
@ -145,23 +147,33 @@ async function getUserNotificationsPage(userId, page = 1, limit = 10) {
async function changePassword(userId, { currentPassword, newPassword }) {
const id = assertPositiveInt(userId, "userId")
if (!currentPassword || typeof currentPassword !== "string")
throw new Error("Mevcut şifre gerekli.")
if (!newPassword || typeof newPassword !== "string")
throw new Error("Yeni şifre gerekli.")
if (!currentPassword || typeof currentPassword !== "string") {
throw new Error("Mevcut sifre gerekli.")
}
if (!newPassword || typeof newPassword !== "string") {
throw new Error("Yeni sifre gerekli.")
}
const user = await userDb.findUser(
{ id },
{ select: { id: true, passwordHash: true } }
)
if (!user) throw new Error("Kullanıcı bulunamadı.")
if (!user) throw new Error("Kullanici bulunamadi.")
const isMatch = await bcrypt.compare(currentPassword, user.passwordHash)
if (!isMatch) throw new Error("Mevcut şifre hatalı.")
if (!isMatch) throw new Error("Mevcut sifre hatali.")
const passwordHash = await bcrypt.hash(newPassword, 10)
await userDb.updateUser({ id }, { passwordHash })
await refreshTokenDb.revokeAllUserRefreshTokens(id)
return { message: "Şifre güncellendi." }
return { message: "Sifre guncellendi." }
}
module.exports = {
updateAvatarUrl,
getUserProfile,
markAllNotificationsRead,
getUserNotificationsPage,
changePassword,
}

View File

@ -0,0 +1,149 @@
const { getRedisClient } = require("./client")
const CATEGORY_DEAL_INDEX_KEY_PREFIX = "index:category:"
const CATEGORY_DEAL_INDEX_KEY_SUFFIX = ":deals"
function normalizePositiveInt(value) {
const num = Number(value)
if (!Number.isInteger(num) || num <= 0) return null
return num
}
function normalizeEpochMs(value) {
const num = Number(value)
if (!Number.isFinite(num) || num <= 0) return null
return Math.floor(num)
}
function toEpochMs(value) {
if (value instanceof Date) return value.getTime()
const date = new Date(value)
if (Number.isNaN(date.getTime())) return null
return date.getTime()
}
function isActiveStatus(status) {
return String(status || "").toUpperCase() === "ACTIVE"
}
function getCategoryDealIndexKey(categoryId) {
const cid = normalizePositiveInt(categoryId)
if (!cid) return null
return `${CATEGORY_DEAL_INDEX_KEY_PREFIX}${cid}${CATEGORY_DEAL_INDEX_KEY_SUFFIX}`
}
function normalizeDealIndexPayload(payload = {}) {
const dealId = normalizePositiveInt(payload.dealId ?? payload.id)
const categoryId = normalizePositiveInt(payload.categoryId)
const createdAtTs =
normalizeEpochMs(payload.createdAtTs) ?? normalizeEpochMs(toEpochMs(payload.createdAt))
const status = String(payload.status || "").toUpperCase()
return { dealId, categoryId, createdAtTs, status }
}
function isIndexableDeal(payload = {}) {
const normalized = normalizeDealIndexPayload(payload)
return Boolean(
normalized.dealId &&
normalized.categoryId &&
normalized.createdAtTs &&
isActiveStatus(normalized.status)
)
}
function addDealToCategoryIndexInPipeline(pipeline, payload = {}) {
const normalized = normalizeDealIndexPayload(payload)
if (!normalized.dealId || !normalized.categoryId || !normalized.createdAtTs) return false
if (!isActiveStatus(normalized.status)) return false
const key = getCategoryDealIndexKey(normalized.categoryId)
if (!key) return false
pipeline.zadd(key, String(normalized.createdAtTs), String(normalized.dealId))
return true
}
function removeDealFromCategoryIndexInPipeline(pipeline, payload = {}) {
const normalized = normalizeDealIndexPayload(payload)
if (!normalized.dealId || !normalized.categoryId) return false
const key = getCategoryDealIndexKey(normalized.categoryId)
if (!key) return false
pipeline.zrem(key, String(normalized.dealId))
return true
}
async function reconcileDealCategoryIndex({ before = null, after = null } = {}) {
const prev = normalizeDealIndexPayload(before || {})
const next = normalizeDealIndexPayload(after || {})
const prevIndexable = isIndexableDeal(prev)
const nextIndexable = isIndexableDeal(next)
const redis = getRedisClient()
const pipeline = redis.pipeline()
let commandCount = 0
if (prevIndexable) {
const removedForStatus = !nextIndexable
const movedCategory = nextIndexable && prev.categoryId !== next.categoryId
if (removedForStatus || movedCategory) {
if (removeDealFromCategoryIndexInPipeline(pipeline, prev)) commandCount += 1
}
}
if (nextIndexable) {
const isNew = !prevIndexable
const movedCategory = prevIndexable && prev.categoryId !== next.categoryId
const scoreChanged =
prevIndexable &&
prev.categoryId === next.categoryId &&
Number(prev.createdAtTs) !== Number(next.createdAtTs)
if (isNew || movedCategory || scoreChanged) {
if (addDealToCategoryIndexInPipeline(pipeline, next)) commandCount += 1
}
}
if (!commandCount) return 0
try {
await pipeline.exec()
return commandCount
} catch {
return 0
}
}
async function getRecentDealIdsByCategory({
categoryId,
sinceTs,
limit = 30,
} = {}) {
const cid = normalizePositiveInt(categoryId)
if (!cid) return []
const key = getCategoryDealIndexKey(cid)
if (!key) return []
const minTs = normalizeEpochMs(sinceTs) || 0
const safeLimit = Math.max(1, Math.min(Number(limit) || 30, 300))
const redis = getRedisClient()
try {
const ids = await redis.zrevrangebyscore(
key,
"+inf",
String(minTs),
"LIMIT",
0,
safeLimit
)
return Array.isArray(ids)
? ids.map((id) => Number(id)).filter((id) => Number.isInteger(id) && id > 0)
: []
} catch {
return []
}
}
module.exports = {
getCategoryDealIndexKey,
addDealToCategoryIndexInPipeline,
removeDealFromCategoryIndexInPipeline,
reconcileDealCategoryIndex,
getRecentDealIdsByCategory,
}

View File

@ -28,6 +28,15 @@ function createRedisClient() {
return getRedisClient()
}
function normalizeJsonValue(value) {
if (value === undefined || value === null) return null
try {
return JSON.parse(JSON.stringify(value))
} catch {
return null
}
}
async function tryQueue({ redisAction, fallbackAction, label }) {
try {
await redisAction()
@ -323,15 +332,17 @@ async function queueDealAiReviewUpdate({ dealId, data, updatedAt }) {
})
}
async function queueNotificationCreate({ userId, message, type = "INFO", createdAt }) {
async function queueNotificationCreate({ userId, message, type = "INFO", extras = null, createdAt }) {
if (!userId || !message) return
const redis = createRedisClient()
const normalizedExtras = normalizeJsonValue(extras)
const field = `notification:${userId}:${Date.now()}`
const payload = JSON.stringify({
userId: Number(userId),
message: String(message),
type: String(type || "INFO"),
extras: normalizedExtras,
createdAt,
})
@ -345,6 +356,7 @@ async function queueNotificationCreate({ userId, message, type = "INFO", created
userId: Number(userId),
message: String(message),
type: String(type || "INFO"),
extras: normalizedExtras,
createdAt: createdAt ? new Date(createdAt) : new Date(),
},
})

View File

@ -9,6 +9,8 @@ const {
setUserPublicInRedis,
ensureUserMinTtl,
} = require("./userPublicCache.service")
const { reconcileDealCategoryIndex } = require("./categoryDealIndex.service")
const { reconcileDealSellerIndex } = require("./sellerDealIndex.service")
const DEAL_KEY_PREFIX = "deals:cache:"
const DEAL_ANALYTICS_TOTAL_PREFIX = "deals:analytics:total:"
@ -226,6 +228,8 @@ async function cacheDealFromDb(dealId, { ttlSeconds = 1800 } = {}) {
}
await pipeline.exec()
await cacheVotesAndAnalytics(redis, deal.id, payload, { ttlSeconds })
await reconcileDealCategoryIndex({ before: null, after: payload })
await reconcileDealSellerIndex({ before: null, after: payload })
} catch {
// ignore cache failures
} finally {}
@ -271,8 +275,12 @@ async function updateDealInRedis(dealId, patch = {}, { updatedAt = new Date() }
const iso = toIso(updatedAt)
try {
const exists = await redis.call("JSON.GET", key)
if (!exists) return null
const beforeRaw = await redis.call("JSON.GET", key)
if (!beforeRaw) return null
let beforeDeal = null
try {
beforeDeal = JSON.parse(beforeRaw)
} catch {}
const pipeline = redis.pipeline()
Object.entries(patch || {}).forEach(([field, value]) => {
@ -284,7 +292,10 @@ async function updateDealInRedis(dealId, patch = {}, { updatedAt = new Date() }
await pipeline.exec()
const raw = await redis.call("JSON.GET", key)
return raw ? JSON.parse(raw) : null
const updated = raw ? JSON.parse(raw) : null
await reconcileDealCategoryIndex({ before: beforeDeal, after: updated })
await reconcileDealSellerIndex({ before: beforeDeal, after: updated })
return updated
} catch {
return null
} finally {}
@ -315,6 +326,8 @@ async function setDealInRedis(
ttlSeconds,
skipDbEnsure: skipAnalyticsInit,
})
await reconcileDealCategoryIndex({ before: null, after: payload })
await reconcileDealSellerIndex({ before: null, after: payload })
return payload
} catch {
return payload

View File

@ -6,6 +6,8 @@ const { getRedisClient } = require("./client")
const { setUsersPublicInRedis } = require("./userPublicCache.service")
const { setBadgesInRedis } = require("./badgeCache.service")
const badgeDb = require("../../db/badge.db")
const { addDealToCategoryIndexInPipeline } = require("./categoryDealIndex.service")
const { addDealToSellerIndexInPipeline } = require("./sellerDealIndex.service")
const DEAL_KEY_PREFIX = "deals:cache:"
const DEAL_ANALYTICS_TOTAL_PREFIX = "deals:analytics:total:"
@ -219,11 +221,32 @@ async function seedRecentDealsToRedis({ days = 30, ttlDays = 31, batchSize = 200
for (const deal of chunk) {
try {
const mapped = mapDealToRedisJson(deal)
const key = `${DEAL_KEY_PREFIX}${deal.id}`
const payload = JSON.stringify(mapDealToRedisJson(deal))
const payload = JSON.stringify(mapped)
pipeline.call("JSON.SET", key, "$", payload, "NX")
setCommands.push({ deal, index: cmdIndex })
cmdIndex += 1
if (
addDealToCategoryIndexInPipeline(pipeline, {
dealId: deal.id,
categoryId: mapped.categoryId,
createdAtTs: mapped.createdAtTs,
status: mapped.status,
})
) {
cmdIndex += 1
}
if (
addDealToSellerIndexInPipeline(pipeline, {
dealId: deal.id,
sellerId: mapped.sellerId,
createdAtTs: mapped.createdAtTs,
status: mapped.status,
})
) {
cmdIndex += 1
}
const totals = totalsById.get(deal.id) || { impressions: 0, views: 0, clicks: 0 }
pipeline.hset(
`${DEAL_ANALYTICS_TOTAL_PREFIX}${deal.id}`,

View File

@ -4,7 +4,13 @@ const { getRedisClient } = require("./client")
const IMAGE_KEY_PREFIX = "cache:deal_create:image:"
const DEFAULT_TTL_SECONDS = 5 * 60
const MAX_IMAGE_BYTES = 2 * 1024 * 1024
const DEFAULT_MAX_IMAGE_BYTES = 8 * 1024 * 1024
function getMaxImageBytes() {
const raw = Number(process.env.LINK_PREVIEW_MAX_IMAGE_BYTES)
if (!Number.isFinite(raw) || raw < 256 * 1024) return DEFAULT_MAX_IMAGE_BYTES
return Math.floor(raw)
}
function createRedisClient() {
return getRedisClient()
@ -32,6 +38,7 @@ function buildKey(normalizedUrl) {
async function cacheImageFromUrl(rawUrl, { ttlSeconds = DEFAULT_TTL_SECONDS } = {}) {
const normalized = normalizeUrl(rawUrl)
if (!normalized) return null
const maxImageBytes = getMaxImageBytes()
const key = buildKey(normalized)
const redis = createRedisClient()
@ -43,8 +50,8 @@ async function cacheImageFromUrl(rawUrl, { ttlSeconds = DEFAULT_TTL_SECONDS } =
const response = await axios.get(normalized, {
responseType: "arraybuffer",
timeout: 15000,
maxContentLength: MAX_IMAGE_BYTES,
maxBodyLength: MAX_IMAGE_BYTES,
maxContentLength: maxImageBytes,
maxBodyLength: maxImageBytes,
validateStatus: (status) => status >= 200 && status < 300,
})
@ -52,7 +59,7 @@ async function cacheImageFromUrl(rawUrl, { ttlSeconds = DEFAULT_TTL_SECONDS } =
if (!contentType.startsWith("image/")) return null
const buffer = Buffer.from(response.data || [])
if (!buffer.length || buffer.length > MAX_IMAGE_BYTES) return null
if (!buffer.length || buffer.length > maxImageBytes) return null
const payload = JSON.stringify({
ct: contentType,

View File

@ -0,0 +1,97 @@
const Redis = require("ioredis")
const { randomUUID } = require("crypto")
const { getRedisClient } = require("./client")
const { getRedisConnectionOptions } = require("./connection")
const DEFAULT_QUEUE_KEY = "scraper:requests"
const DEFAULT_RESPONSE_CHANNEL_PREFIX = "scraper:response:"
const DEFAULT_TIMEOUT_MS = 20000
function normalizeTimeoutMs(value) {
const num = Number(value)
if (!Number.isFinite(num) || num < 1000) return DEFAULT_TIMEOUT_MS
return Math.floor(num)
}
async function requestProductPreviewOverRedis(url, options = {}) {
if (!url) throw new Error("url parametresi zorunlu")
const queueKey = process.env.SCRAPER_QUEUE_KEY || DEFAULT_QUEUE_KEY
const responsePrefix =
process.env.SCRAPER_RESPONSE_CHANNEL_PREFIX || DEFAULT_RESPONSE_CHANNEL_PREFIX
const timeoutMs = normalizeTimeoutMs(
options.timeoutMs ?? process.env.SCRAPER_RPC_TIMEOUT_MS
)
const requestId = randomUUID()
const responseChannel = `${responsePrefix}${requestId}`
const redis = getRedisClient()
const subscriber = new Redis(getRedisConnectionOptions())
const requestPayload = {
requestId,
type: "PRODUCT_PREVIEW",
url: String(url),
createdAt: new Date().toISOString(),
}
let timeoutHandle = null
let settled = false
function finish() {
if (timeoutHandle) {
clearTimeout(timeoutHandle)
timeoutHandle = null
}
subscriber.removeAllListeners("message")
subscriber
.unsubscribe(responseChannel)
.catch(() => {})
.finally(() => subscriber.disconnect())
}
return new Promise(async (resolve, reject) => {
const safeResolve = (data) => {
if (settled) return
settled = true
finish()
resolve(data)
}
const safeReject = (err) => {
if (settled) return
settled = true
finish()
reject(err)
}
try {
subscriber.on("message", (channel, rawMessage) => {
if (channel !== responseChannel) return
try {
const parsed = JSON.parse(String(rawMessage || "{}"))
if (parsed?.error) {
return safeReject(new Error(String(parsed.error)))
}
if (parsed && typeof parsed === "object" && parsed.product) {
return safeResolve(parsed.product)
}
return safeResolve(parsed)
} catch {
return safeReject(new Error("Scraper yaniti parse edilemedi"))
}
})
await subscriber.subscribe(responseChannel)
await redis.rpush(queueKey, JSON.stringify(requestPayload))
timeoutHandle = setTimeout(() => {
safeReject(new Error("Scraper yaniti zaman asimina ugradi"))
}, timeoutMs)
} catch (err) {
safeReject(err instanceof Error ? err : new Error("Scraper RPC hatasi"))
}
})
}
module.exports = { requestProductPreviewOverRedis }

View File

@ -0,0 +1,157 @@
const { getRedisClient } = require("./client")
const SELLER_DEAL_INDEX_KEY_PREFIX = "index:seller:"
const SELLER_DEAL_INDEX_KEY_SUFFIX = ":deals"
function normalizePositiveInt(value) {
const num = Number(value)
if (!Number.isInteger(num) || num <= 0) return null
return num
}
function normalizeEpochMs(value) {
const num = Number(value)
if (!Number.isFinite(num) || num <= 0) return null
return Math.floor(num)
}
function toEpochMs(value) {
if (value instanceof Date) return value.getTime()
const date = new Date(value)
if (Number.isNaN(date.getTime())) return null
return date.getTime()
}
function isActiveStatus(status) {
return String(status || "").toUpperCase() === "ACTIVE"
}
function getSellerDealIndexKey(sellerId) {
const sid = normalizePositiveInt(sellerId)
if (!sid) return null
return `${SELLER_DEAL_INDEX_KEY_PREFIX}${sid}${SELLER_DEAL_INDEX_KEY_SUFFIX}`
}
function normalizeDealIndexPayload(payload = {}) {
const dealId = normalizePositiveInt(payload.dealId ?? payload.id)
const sellerId = normalizePositiveInt(payload.sellerId)
const createdAtTs =
normalizeEpochMs(payload.createdAtTs) ?? normalizeEpochMs(toEpochMs(payload.createdAt))
const status = String(payload.status || "").toUpperCase()
return { dealId, sellerId, createdAtTs, status }
}
function isIndexableDeal(payload = {}) {
const normalized = normalizeDealIndexPayload(payload)
return Boolean(
normalized.dealId &&
normalized.sellerId &&
normalized.createdAtTs &&
isActiveStatus(normalized.status)
)
}
function addDealToSellerIndexInPipeline(pipeline, payload = {}) {
const normalized = normalizeDealIndexPayload(payload)
if (!normalized.dealId || !normalized.sellerId || !normalized.createdAtTs) return false
if (!isActiveStatus(normalized.status)) return false
const key = getSellerDealIndexKey(normalized.sellerId)
if (!key) return false
pipeline.zadd(key, String(normalized.createdAtTs), String(normalized.dealId))
return true
}
function removeDealFromSellerIndexInPipeline(pipeline, payload = {}) {
const normalized = normalizeDealIndexPayload(payload)
if (!normalized.dealId || !normalized.sellerId) return false
const key = getSellerDealIndexKey(normalized.sellerId)
if (!key) return false
pipeline.zrem(key, String(normalized.dealId))
return true
}
async function reconcileDealSellerIndex({ before = null, after = null } = {}) {
const prev = normalizeDealIndexPayload(before || {})
const next = normalizeDealIndexPayload(after || {})
const prevIndexable = isIndexableDeal(prev)
const nextIndexable = isIndexableDeal(next)
const redis = getRedisClient()
const pipeline = redis.pipeline()
let commandCount = 0
if (prevIndexable) {
const removedForStatus = !nextIndexable
const movedSeller = nextIndexable && prev.sellerId !== next.sellerId
if (removedForStatus || movedSeller) {
if (removeDealFromSellerIndexInPipeline(pipeline, prev)) commandCount += 1
}
}
if (nextIndexable) {
const isNew = !prevIndexable
const movedSeller = prevIndexable && prev.sellerId !== next.sellerId
const scoreChanged =
prevIndexable &&
prev.sellerId === next.sellerId &&
Number(prev.createdAtTs) !== Number(next.createdAtTs)
if (isNew || movedSeller || scoreChanged) {
if (addDealToSellerIndexInPipeline(pipeline, next)) commandCount += 1
}
}
if (!commandCount) return 0
try {
await pipeline.exec()
return commandCount
} catch {
return 0
}
}
async function getRecentDealIdsBySeller({
sellerId,
offset = 0,
limit = 30,
} = {}) {
const sid = normalizePositiveInt(sellerId)
if (!sid) return []
const key = getSellerDealIndexKey(sid)
if (!key) return []
const safeOffset = Math.max(0, Number(offset) || 0)
const safeLimit = Math.max(1, Math.min(Number(limit) || 30, 300))
const redis = getRedisClient()
try {
const ids = await redis.zrevrange(key, safeOffset, safeOffset + safeLimit - 1)
return Array.isArray(ids)
? ids.map((id) => Number(id)).filter((id) => Number.isInteger(id) && id > 0)
: []
} catch {
return []
}
}
async function getSellerDealIndexCount(sellerId) {
const sid = normalizePositiveInt(sellerId)
if (!sid) return 0
const key = getSellerDealIndexKey(sid)
if (!key) return 0
const redis = getRedisClient()
try {
const count = await redis.zcard(key)
return Number.isFinite(Number(count)) ? Number(count) : 0
} catch {
return 0
}
}
module.exports = {
getSellerDealIndexKey,
addDealToSellerIndexInPipeline,
removeDealFromSellerIndexInPipeline,
reconcileDealSellerIndex,
getRecentDealIdsBySeller,
getSellerDealIndexCount,
}

View File

@ -1,11 +1,95 @@
// services/seller/sellerService.js
const { findSeller, findSellers } = require("../db/seller.db")
const dealService = require("./deal.service")
const { listSellersFromRedis, setSellerInRedis, setSellersInRedis } = require("./redis/sellerCache.service")
const { getRecentDealIdsBySeller, getSellerDealIndexCount } = require("./redis/sellerDealIndex.service")
const { getDealsByIdsFromRedis } = require("./redis/hotDealList.service")
const DEFAULT_LIMIT = 10
const MAX_LIMIT = 50
function normalizeSellerName(value) {
return String(value || "").trim()
}
function normalizeSeller(seller = {}) {
const id = Number(seller.id)
if (!Number.isInteger(id) || id <= 0) return null
return {
id,
name: String(seller.name || "").trim(),
url: seller.url ?? null,
sellerLogo: seller.sellerLogo ?? null,
isActive: seller.isActive !== undefined ? Boolean(seller.isActive) : true,
}
}
async function listSellersCached() {
let sellers = await listSellersFromRedis()
if (sellers.length) return sellers
sellers = await findSellers(
{},
{ select: { id: true, name: true, url: true, sellerLogo: true, isActive: true }, orderBy: { name: "asc" } }
)
if (sellers.length) {
await setSellersInRedis(sellers)
}
return sellers
}
function clampPagination({ page, limit }) {
const rawPage = Number(page)
const rawLimit = Number(limit)
const normalizedPage = Number.isFinite(rawPage) ? Math.max(1, Math.floor(rawPage)) : 1
let normalizedLimit = Number.isFinite(rawLimit) ? Math.max(1, Math.floor(rawLimit)) : DEFAULT_LIMIT
normalizedLimit = Math.min(MAX_LIMIT, normalizedLimit)
const skip = (normalizedPage - 1) * normalizedLimit
return { page: normalizedPage, limit: normalizedLimit, skip }
}
function normalizeDealCardFromRedis(deal = {}) {
return {
...deal,
id: Number(deal.id),
score: Number.isFinite(deal.score) ? deal.score : 0,
commentCount: Number.isFinite(deal.commentCount) ? deal.commentCount : 0,
price: deal.price ?? null,
originalPrice: deal.originalPrice ?? null,
shippingPrice: deal.shippingPrice ?? null,
discountValue: deal.discountValue ?? null,
}
}
function hasSellerFilters(filters = {}) {
if (!filters || typeof filters !== "object") return false
const keys = [
"status",
"categoryId",
"categoryIds",
"saleType",
"affiliateType",
"minPrice",
"maxPrice",
"priceMin",
"priceMax",
"minScore",
"maxScore",
"sortBy",
"sortDir",
"createdAfter",
"createdBefore",
"from",
"to",
"hasImage",
]
return keys.some((key) => {
const value = filters[key]
return value !== undefined && value !== null && String(value).trim() !== ""
})
}
async function getSellerByName(name) {
const normalized = normalizeSellerName(name)
if (!normalized) {
@ -14,10 +98,28 @@ async function getSellerByName(name) {
throw err
}
return findSeller(
const sellers = await listSellersCached()
const lower = normalized.toLowerCase()
const cached = sellers
.map(normalizeSeller)
.filter(Boolean)
.find((seller) => seller.name.toLowerCase() === lower)
if (cached) {
return { id: cached.id, name: cached.name, url: cached.url, sellerLogo: cached.sellerLogo }
}
const seller = await findSeller(
{ name: { equals: normalized, mode: "insensitive" } },
{ select: { id: true, name: true, url: true, sellerLogo: true } }
{ select: { id: true, name: true, url: true, sellerLogo: true, isActive: true } }
)
if (seller) {
await setSellerInRedis(seller)
return { id: seller.id, name: seller.name, url: seller.url, sellerLogo: seller.sellerLogo }
}
return null
}
async function getDealsBySellerName(name, { page = 1, limit = 10, filters = {}, viewer = null, scope = "USER" } = {}) {
@ -28,9 +130,65 @@ async function getDealsBySellerName(name, { page = 1, limit = 10, filters = {},
throw err
}
const searchTerm = String(filters?.q || "").trim()
const useSellerIndex = !searchTerm && !hasSellerFilters(filters)
if (useSellerIndex) {
const pagination = clampPagination({ page, limit })
const [total, ids] = await Promise.all([
getSellerDealIndexCount(seller.id),
getRecentDealIdsBySeller({
sellerId: seller.id,
offset: pagination.skip,
limit: pagination.limit,
}),
])
if (!total) {
return {
seller,
payload: {
page: pagination.page,
total: 0,
totalPages: 0,
results: [],
},
}
}
if (!ids.length) {
return {
seller,
payload: {
page: pagination.page,
total,
totalPages: Math.ceil(total / pagination.limit),
results: [],
},
}
}
const viewerId = viewer?.userId ? Number(viewer.userId) : null
const deals = await getDealsByIdsFromRedis(ids, viewerId)
if (deals.length === ids.length) {
const activeDeals = deals.filter((deal) => String(deal?.status || "").toUpperCase() === "ACTIVE")
if (activeDeals.length === ids.length) {
return {
seller,
payload: {
page: pagination.page,
total,
totalPages: Math.ceil(total / pagination.limit),
results: activeDeals.map(normalizeDealCardFromRedis),
},
}
}
}
}
const payload = await dealService.getDeals({
preset: "NEW",
q: filters?.q,
q: searchTerm || undefined,
page,
limit,
viewer,
@ -44,10 +202,15 @@ async function getDealsBySellerName(name, { page = 1, limit = 10, filters = {},
}
async function getActiveSellers() {
return findSellers(
{ isActive: true },
{ select: { name: true, sellerLogo: true }, orderBy: { name: "asc" } }
)
const sellers = await listSellersCached()
return sellers
.map(normalizeSeller)
.filter((seller) => seller && seller.isActive)
.sort((a, b) => a.name.localeCompare(b.name))
.map((seller) => ({
name: seller.name,
sellerLogo: seller.sellerLogo,
}))
}
module.exports = {

View File

@ -1,39 +1,3 @@
const { createClient } = require("@supabase/supabase-js")
const supabase = createClient(
process.env.SUPABASE_URL,
process.env.SUPABASE_KEY
)
/**
* @param {Object} options
* @param {string} options.bucket - supabase bucket adı
* @param {string} options.path - storage içi path
* @param {Buffer} options.fileBuffer - file buffer
* @param {string} options.contentType
*/
async function uploadImage({
bucket,
path,
fileBuffer,
contentType,
}) {
const { error } = await supabase.storage
.from(bucket)
.upload(path, fileBuffer, {
contentType,
upsert: true,
})
if (error) {
throw new Error(error.message)
}
const { data } = supabase.storage
.from(bucket)
.getPublicUrl(path)
return data.publicUrl
}
const { uploadImage } = require("./uploadImage.service")
module.exports = { uploadImage }

View File

@ -1,22 +1,58 @@
const { createClient } = require("@supabase/supabase-js")
const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3")
const supabase = createClient(
process.env.SUPABASE_URL,
process.env.SUPABASE_KEY
)
function createR2Client() {
const endpoint = process.env.R2_ENDPOINT
const accessKeyId = process.env.R2_ACCESS_KEY_ID
const secretAccessKey = process.env.R2_SECRET_ACCESS_KEY
if (!endpoint || !accessKeyId || !secretAccessKey) {
throw new Error("R2 config missing (R2_ENDPOINT/R2_ACCESS_KEY_ID/R2_SECRET_ACCESS_KEY)")
}
return new S3Client({
region: process.env.R2_REGION || "auto",
endpoint,
credentials: { accessKeyId, secretAccessKey },
})
}
let r2Client = null
function getR2Client() {
if (!r2Client) r2Client = createR2Client()
return r2Client
}
function encodeObjectKey(key) {
return String(key)
.split("/")
.map((part) => encodeURIComponent(part))
.join("/")
}
function toStoredPath(key) {
const normalized = String(key || "").replace(/^\/+/, "")
return `/${encodeObjectKey(normalized)}`
}
async function uploadImage({ bucket, path, fileBuffer, contentType }) {
const { error } = await supabase.storage
.from(bucket)
.upload(path, fileBuffer, {
contentType,
upsert: true,
if (!path || !fileBuffer) throw new Error("uploadImage params missing")
const targetBucket = bucket || process.env.R2_BUCKET_NAME
if (!targetBucket) throw new Error("R2 bucket missing (R2_BUCKET_NAME)")
const r2 = getR2Client()
await r2.send(
new PutObjectCommand({
Bucket: targetBucket,
Key: path,
Body: fileBuffer,
ContentType: contentType || "image/webp",
})
)
if (error) throw new Error(error.message)
const { data } = supabase.storage.from(bucket).getPublicUrl(path)
return data.publicUrl
return toStoredPath(path)
}
module.exports = { uploadImage }

View File

@ -0,0 +1,204 @@
const { getRedisClient } = require("./redis/client")
const userInterestProfileDb = require("../db/userInterestProfile.db")
const USER_INTEREST_INCREMENT_HASH_KEY = "bull:dbsync:userInterestIncrements"
const USER_INTEREST_INCREMENT_HASH_KEY_PREFIX = `${USER_INTEREST_INCREMENT_HASH_KEY}:`
const DAILY_USER_KEY_PREFIX = "users:interest:daily:"
const USER_INTEREST_ACTIONS = Object.freeze({
CATEGORY_VISIT: "CATEGORY_VISIT",
DEAL_VIEW: "DEAL_VIEW",
DEAL_CLICK: "DEAL_CLICK",
DEAL_HOT_VOTE: "DEAL_HOT_VOTE",
DEAL_SAVE: "DEAL_SAVE",
COMMENT_CREATE: "COMMENT_CREATE",
})
const ACTION_POINTS = Object.freeze({
[USER_INTEREST_ACTIONS.CATEGORY_VISIT]: 1,
[USER_INTEREST_ACTIONS.DEAL_VIEW]: 2,
[USER_INTEREST_ACTIONS.DEAL_CLICK]: 12,
[USER_INTEREST_ACTIONS.DEAL_HOT_VOTE]: 5,
[USER_INTEREST_ACTIONS.DEAL_SAVE]: 8,
[USER_INTEREST_ACTIONS.COMMENT_CREATE]: 4,
})
const DEFAULT_DAILY_CAP = Number(process.env.USER_INTEREST_DAILY_CATEGORY_CAP) || 50
const DEFAULT_TTL_SECONDS = Number(process.env.USER_INTEREST_DAILY_TTL_SECONDS) || 24 * 60 * 60
const DEFAULT_FULL_LIMIT = Number(process.env.USER_INTEREST_ACTION_FULL_LIMIT) || 5
const DEFAULT_HALF_LIMIT = Number(process.env.USER_INTEREST_ACTION_HALF_LIMIT) || 10
const DEFAULT_SATURATION_RATIO = Number(process.env.USER_INTEREST_SATURATION_RATIO) || 0.3
const DEFAULT_INCREMENT_SHARDS = Math.max(
1,
Math.min(128, Number(process.env.USER_INTEREST_INCREMENT_SHARDS) || 32)
)
const APPLY_CAPS_SCRIPT = `
local actionCount = redis.call("HINCRBY", KEYS[1], ARGV[1], 1)
local basePoints = tonumber(ARGV[3]) or 0
local fullLimit = tonumber(ARGV[4]) or 5
local halfLimit = tonumber(ARGV[5]) or 10
local ttlSeconds = tonumber(ARGV[6]) or 86400
local dailyCap = tonumber(ARGV[7]) or 50
local awarded = 0
if actionCount <= fullLimit then
awarded = basePoints
elseif actionCount <= halfLimit then
awarded = math.floor(basePoints / 2)
else
awarded = 0
end
if awarded <= 0 then
local ttlNow = redis.call("TTL", KEYS[1])
if ttlNow < 0 then
redis.call("EXPIRE", KEYS[1], ttlSeconds)
end
return {0, actionCount}
end
local usedToday = tonumber(redis.call("HGET", KEYS[1], ARGV[2]) or "0")
local remaining = dailyCap - usedToday
if remaining <= 0 then
local ttlNow = redis.call("TTL", KEYS[1])
if ttlNow < 0 then
redis.call("EXPIRE", KEYS[1], ttlSeconds)
end
return {0, actionCount}
end
if awarded > remaining then
awarded = remaining
end
if awarded > 0 then
redis.call("HINCRBY", KEYS[1], ARGV[2], awarded)
end
local ttlNow = redis.call("TTL", KEYS[1])
if ttlNow < 0 then
redis.call("EXPIRE", KEYS[1], ttlSeconds)
end
return {awarded, actionCount}
`
function normalizePositiveInt(value) {
const num = Number(value)
if (!Number.isInteger(num) || num <= 0) return null
return num
}
function normalizeAction(action) {
const normalized = String(action || "").trim().toUpperCase()
if (!ACTION_POINTS[normalized]) return null
return normalized
}
function buildDailyUserKey(userId) {
return `${DAILY_USER_KEY_PREFIX}${userId}`
}
function buildDailyCategoryScoreField(categoryId) {
return `cat:${categoryId}:score`
}
function buildDailyActionField(categoryId, action) {
return `cat:${categoryId}:act:${action}`
}
async function applyRedisCaps({ redis, userId, categoryId, action, basePoints }) {
const userKey = buildDailyUserKey(userId)
const actionField = buildDailyActionField(categoryId, action)
const categoryScoreField = buildDailyCategoryScoreField(categoryId)
const result = await redis.eval(
APPLY_CAPS_SCRIPT,
1,
userKey,
actionField,
categoryScoreField,
String(basePoints),
String(DEFAULT_FULL_LIMIT),
String(DEFAULT_HALF_LIMIT),
String(DEFAULT_TTL_SECONDS),
String(DEFAULT_DAILY_CAP)
)
const awarded = Number(Array.isArray(result) ? result[0] : 0)
return Number.isFinite(awarded) && awarded > 0 ? Math.floor(awarded) : 0
}
async function queueIncrement({ redis, userId, categoryId, points }) {
const field = `${userId}:${categoryId}`
const key = getUserInterestIncrementHashKeyByUserId(userId)
await redis.hincrby(key, field, points)
}
async function persistFallback({ userId, categoryId, points }) {
return userInterestProfileDb.applyInterestIncrementsBatch(
[{ userId, categoryId, points }],
{ saturationRatio: DEFAULT_SATURATION_RATIO }
)
}
async function trackUserCategoryInterest({ userId, categoryId, action }) {
const uid = normalizePositiveInt(userId)
const cid = normalizePositiveInt(categoryId)
const normalizedAction = normalizeAction(action)
if (!uid || !cid || !normalizedAction) return { awarded: 0, queued: false }
const basePoints = Number(ACTION_POINTS[normalizedAction] || 0)
if (!Number.isInteger(basePoints) || basePoints <= 0) return { awarded: 0, queued: false }
const redis = getRedisClient()
try {
const awarded = await applyRedisCaps({
redis,
userId: uid,
categoryId: cid,
action: normalizedAction,
basePoints,
})
if (!awarded) return { awarded: 0, queued: false }
await queueIncrement({
redis,
userId: uid,
categoryId: cid,
points: awarded,
})
return { awarded, queued: true }
} catch (err) {
try {
await persistFallback({ userId: uid, categoryId: cid, points: basePoints })
return { awarded: basePoints, queued: false, fallback: true }
} catch {
return { awarded: 0, queued: false, fallback: false }
}
}
}
function getUserInterestIncrementHashKeyByUserId(userId) {
const uid = normalizePositiveInt(userId)
if (!uid || DEFAULT_INCREMENT_SHARDS <= 1) return USER_INTEREST_INCREMENT_HASH_KEY
const shard = uid % DEFAULT_INCREMENT_SHARDS
return `${USER_INTEREST_INCREMENT_HASH_KEY_PREFIX}${shard}`
}
function getUserInterestIncrementHashKeys() {
if (DEFAULT_INCREMENT_SHARDS <= 1) return [USER_INTEREST_INCREMENT_HASH_KEY]
const keys = [USER_INTEREST_INCREMENT_HASH_KEY]
for (let shard = 0; shard < DEFAULT_INCREMENT_SHARDS; shard += 1) {
keys.push(`${USER_INTEREST_INCREMENT_HASH_KEY_PREFIX}${shard}`)
}
return keys
}
module.exports = {
USER_INTEREST_ACTIONS,
USER_INTEREST_INCREMENT_HASH_KEY,
getUserInterestIncrementHashKeys,
trackUserCategoryInterest,
}

View File

@ -2,6 +2,8 @@ const { updateDealVoteInRedis } = require("./redis/dealVote.service");
const { queueVoteUpdate, queueDealUpdate, queueNotificationCreate } = require("./redis/dbSync.service");
const { updateDealInRedis, getDealFromRedis } = require("./redis/dealCache.service");
const { publishNotification } = require("./redis/notificationPubsub.service");
const { trackUserCategoryInterest, USER_INTEREST_ACTIONS } = require("./userInterest.service");
const voteDb = require("../db/vote.db")
async function voteDeal({ dealId, userId, voteType }) {
if (!dealId || !userId || voteType === undefined) {
@ -60,12 +62,20 @@ async function voteDeal({ dealId, userId, voteType }) {
userId: ownerId,
message: "Fırsatın 100 beğeniyi geçti!",
type: "MILESTONE",
extras: {
dealId: Number(dealId),
milestone,
},
createdAt: updatedAt.toISOString(),
}).catch((err) => console.error("DB sync notification queue failed:", err?.message || err))
publishNotification({
userId: ownerId,
message: "Fırsatın 100 beğeniyi geçti!",
type: "MILESTONE",
extras: {
dealId: Number(dealId),
milestone,
},
createdAt: updatedAt.toISOString(),
}).catch((err) => console.error("Notification publish failed:", err?.message || err))
}
@ -80,6 +90,14 @@ async function voteDeal({ dealId, userId, voteType }) {
delta = 0;
}
if (Number(voteType) === 1) {
trackUserCategoryInterest({
userId,
categoryId: deal.categoryId,
action: USER_INTEREST_ACTIONS.DEAL_HOT_VOTE,
}).catch((err) => console.error("User interest track failed:", err?.message || err))
}
return {
dealId,
voteType,

158
utils/inputSanitizer.js Normal file
View File

@ -0,0 +1,158 @@
const { toSafeRedirectUrl } = require("./urlSafety")
const ALLOWED_DESCRIPTION_TAGS = new Set(["p", "strong", "ul", "ol", "li", "img", "br"])
const SELF_CLOSING_DESCRIPTION_TAGS = new Set(["img", "br"])
function stripControlChars(value) {
return String(value || "").replace(/[\u0000-\u001F\u007F]/g, "")
}
function stripHtmlTags(value) {
return String(value || "")
.replace(/<!--[\s\S]*?-->/g, "")
.replace(/<\/?[^>]+>/g, "")
}
function escapeHtml(value) {
return String(value || "")
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
}
function escapeHtmlAttribute(value) {
return String(value || "")
.replace(/&/g, "&amp;")
.replace(/"/g, "&quot;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
}
function sanitizeOptionalPlainText(value, { maxLength } = {}) {
if (value === undefined || value === null) return null
let normalized = stripControlChars(value)
normalized = stripHtmlTags(normalized).trim()
if (!normalized) return null
if (Number.isInteger(maxLength) && maxLength > 0 && normalized.length > maxLength) {
normalized = normalized.slice(0, maxLength)
}
return normalized
}
function sanitizeRequiredPlainText(value, { fieldName = "field", maxLength } = {}) {
const normalized = sanitizeOptionalPlainText(value, { maxLength })
if (!normalized) {
const err = new Error(`${fieldName}_REQUIRED`)
err.statusCode = 400
throw err
}
return normalized
}
function sanitizeImageSrc(value) {
const trimmed = stripControlChars(value).trim()
if (!trimmed) return null
const lower = trimmed.toLowerCase()
if (
lower.startsWith("javascript:") ||
lower.startsWith("data:") ||
lower.startsWith("vbscript:")
) {
return null
}
const isHttp = lower.startsWith("http://") || lower.startsWith("https://")
const isProtocolRelative = lower.startsWith("//")
const isRootRelative = trimmed.startsWith("/")
if (!isHttp && !isProtocolRelative && !isRootRelative) return null
return toSafeRedirectUrl(trimmed)
}
function sanitizeImgTagAttributes(rawAttrs = "") {
const attrs = {}
const attrRegex = /([a-zA-Z0-9:-]+)\s*=\s*("([^"]*)"|'([^']*)'|([^\s"'=<>`]+))/g
let match
while ((match = attrRegex.exec(rawAttrs)) !== null) {
const name = String(match[1] || "").toLowerCase()
const rawValue = match[3] ?? match[4] ?? match[5] ?? ""
if (name === "src") {
const safeSrc = sanitizeImageSrc(rawValue)
if (safeSrc) attrs.src = safeSrc
continue
}
if (name === "alt" || name === "title") {
const safeText = sanitizeOptionalPlainText(rawValue, { maxLength: 300 })
if (safeText) attrs[name] = safeText
}
}
return attrs
}
function sanitizeDealDescriptionHtml(value) {
if (value === undefined || value === null) return null
const raw = stripControlChars(value).trim()
if (!raw) return null
const tagRegex =
/<\/?([a-zA-Z0-9]+)((?:\s+[a-zA-Z0-9:-]+(?:\s*=\s*(?:"[^"]*"|'[^']*'|[^\s"'=<>`]+))?)*)\s*\/?>/g
const stack = []
let output = ""
let cursor = 0
let match
while ((match = tagRegex.exec(raw)) !== null) {
const [fullTag, rawTagName, rawAttrs = ""] = match
const tagName = String(rawTagName || "").toLowerCase()
const isClosing = fullTag.startsWith("</")
output += escapeHtml(raw.slice(cursor, match.index))
if (ALLOWED_DESCRIPTION_TAGS.has(tagName)) {
if (isClosing) {
if (!SELF_CLOSING_DESCRIPTION_TAGS.has(tagName)) {
const idx = stack.lastIndexOf(tagName)
if (idx !== -1) {
for (let i = stack.length - 1; i >= idx; i -= 1) {
output += `</${stack.pop()}>`
}
}
}
} else if (tagName === "br") {
output += "<br>"
} else if (tagName === "img") {
const attrs = sanitizeImgTagAttributes(rawAttrs)
if (attrs.src) {
const altPart = attrs.alt ? ` alt="${escapeHtmlAttribute(attrs.alt)}"` : ""
const titlePart = attrs.title ? ` title="${escapeHtmlAttribute(attrs.title)}"` : ""
output += `<img src="${escapeHtmlAttribute(attrs.src)}"${altPart}${titlePart}>`
}
} else {
output += `<${tagName}>`
stack.push(tagName)
}
}
cursor = tagRegex.lastIndex
}
output += escapeHtml(raw.slice(cursor))
while (stack.length) {
output += `</${stack.pop()}>`
}
const normalized = output.trim()
return normalized || null
}
module.exports = {
sanitizeOptionalPlainText,
sanitizeRequiredPlainText,
sanitizeDealDescriptionHtml,
}

39
utils/mediaPath.js Normal file
View File

@ -0,0 +1,39 @@
function normalizeMediaPath(value) {
if (value === undefined) return undefined
if (value === null) return null
const raw = String(value).trim()
if (!raw) return null
const lower = raw.toLowerCase()
if (
lower.startsWith("javascript:") ||
lower.startsWith("data:") ||
lower.startsWith("vbscript:")
) {
return null
}
try {
if (raw.startsWith("http://") || raw.startsWith("https://")) {
const parsed = new URL(raw)
return parsed.pathname || "/"
}
if (raw.startsWith("//")) {
const parsed = new URL(`https:${raw}`)
return parsed.pathname || "/"
}
const domainLike = /^[a-z0-9.-]+\.[a-z]{2,}(?::\d+)?(\/|$)/i.test(raw)
if (domainLike) {
const parsed = new URL(`https://${raw}`)
return parsed.pathname || "/"
}
} catch {}
if (raw.startsWith("/")) return raw
return `/${raw.replace(/^\/+/, "")}`
}
module.exports = { normalizeMediaPath }

17
utils/urlSafety.js Normal file
View File

@ -0,0 +1,17 @@
function toSafeRedirectUrl(rawUrl) {
if (rawUrl === undefined || rawUrl === null) return null
const trimmed = String(rawUrl).trim()
if (!trimmed) return null
// Header icin tehlikeli kontrol karakterlerini temizle.
const cleaned = trimmed.replace(/[\u0000-\u001F\u007F]/g, "")
if (!cleaned) return null
try {
return encodeURI(cleaned)
} catch {
return null
}
}
module.exports = { toSafeRedirectUrl }

View File

@ -24,12 +24,28 @@ const {
const { DEAL_ANALYTICS_TOTAL_HASH_KEY } = require("../services/redis/dealAnalytics.service")
const commentLikeDb = require("../db/commentLike.db")
const dealAnalyticsDb = require("../db/dealAnalytics.db")
const userInterestProfileDb = require("../db/userInterestProfile.db")
const prisma = require("../db/client")
const { getUserInterestIncrementHashKeys } = require("../services/userInterest.service")
const USER_INTEREST_DB_APPLY_BATCH_SIZE = Math.max(
100,
Number(process.env.USER_INTEREST_DB_APPLY_BATCH_SIZE) || 2000
)
function createRedisClient() {
return new Redis(getRedisConnectionOptions())
}
function normalizeJsonValue(value) {
if (value === undefined || value === null) return null
try {
return JSON.parse(JSON.stringify(value))
} catch {
return null
}
}
async function consumeUserUpdates(redis) {
const data = await redis.eval(
"local data = redis.call('HGETALL', KEYS[1]); redis.call('DEL', KEYS[1]); return data;",
@ -339,6 +355,57 @@ async function consumeVoteUpdates(redis) {
return result?.count ?? batch.length
}
async function consumeUserInterestIncrements(redis) {
const hashKeys = getUserInterestIncrementHashKeys()
if (!hashKeys.length) return 0
const increments = []
for (const hashKey of hashKeys) {
const data = await redis.eval(
"local data = redis.call('HGETALL', KEYS[1]); redis.call('DEL', KEYS[1]); return data;",
1,
hashKey
)
if (!data || data.length === 0) continue
for (let i = 0; i < data.length; i += 2) {
try {
const field = String(data[i] || "")
const points = Number(data[i + 1] || 0)
if (!field || !Number.isFinite(points) || points <= 0) continue
const [userIdRaw, categoryIdRaw] = field.split(":")
const userId = Number(userIdRaw)
const categoryId = Number(categoryIdRaw)
if (!Number.isInteger(userId) || userId <= 0) continue
if (!Number.isInteger(categoryId) || categoryId <= 0) continue
increments.push({
userId,
categoryId,
points: Math.floor(points),
})
} catch (err) {
console.error("db-sync userInterest parse failed:", err?.message || err)
}
}
}
if (!increments.length) return 0
let updated = 0
for (let i = 0; i < increments.length; i += USER_INTEREST_DB_APPLY_BATCH_SIZE) {
const chunk = increments.slice(i, i + USER_INTEREST_DB_APPLY_BATCH_SIZE)
try {
const result = await userInterestProfileDb.applyInterestIncrementsBatch(chunk)
updated += Number(result?.updated || 0)
} catch (err) {
console.error("db-sync userInterest batch failed:", err?.message || err)
}
}
return updated
}
async function consumeCommentLikeUpdates(redis) {
const data = await redis.eval(
"local data = redis.call('HGETALL', KEYS[1]); redis.call('DEL', KEYS[1]); return data;",
@ -791,6 +858,7 @@ async function consumeNotifications(redis) {
userId: Number(parsed.userId),
message: String(parsed.message),
type: String(parsed.type || "INFO"),
extras: normalizeJsonValue(parsed.extras),
createdAt: parsed.createdAt ? new Date(parsed.createdAt) : new Date(),
})
} catch (err) {
@ -809,6 +877,7 @@ async function consumeNotifications(redis) {
userId: item.userId,
message: item.message,
type: item.type,
extras: item.extras,
createdAt: item.createdAt,
},
})
@ -1012,6 +1081,7 @@ async function handler() {
const categoryUpsertCount = await consumeCategoryUpserts(redis)
const sellerUpsertCount = await consumeSellerUpserts(redis)
const sellerDomainUpsertCount = await consumeSellerDomainUpserts(redis)
const userInterestCount = await consumeUserInterestIncrements(redis)
return {
votes: voteCount,
commentLikes: commentLikeCount,
@ -1031,6 +1101,7 @@ async function handler() {
categoryUpserts: categoryUpsertCount,
sellerUpserts: sellerUpsertCount,
sellerDomainUpserts: sellerDomainUpsertCount,
userInterests: userInterestCount,
}
} finally {
redis.disconnect()
@ -1045,7 +1116,7 @@ function startDbSyncWorker() {
worker.on("completed", (job) => {
console.log(
`DB sync batch done. Votes: ${job.returnvalue?.votes ?? 0} CommentLikes: ${job.returnvalue?.commentLikes ?? 0} CommentsCreated: ${job.returnvalue?.commentsCreated ?? 0} CommentsDeleted: ${job.returnvalue?.commentsDeleted ?? 0} DealSaves: ${job.returnvalue?.dealSaves ?? 0} DealEvents: ${job.returnvalue?.dealEvents ?? 0} DealCreates: ${job.returnvalue?.dealCreates ?? 0} DealAiReviews: ${job.returnvalue?.dealAiReviews ?? 0} NotificationsRead: ${job.returnvalue?.notificationsRead ?? 0} Notifications: ${job.returnvalue?.notifications ?? 0} DealUpdates: ${job.returnvalue?.dealUpdates ?? 0} Audits: ${job.returnvalue?.audits ?? 0} UserUpdates: ${job.returnvalue?.userUpdates ?? 0} UserNotes: ${job.returnvalue?.userNotes ?? 0} DealReportUpdates: ${job.returnvalue?.dealReportUpdates ?? 0} CategoryUpserts: ${job.returnvalue?.categoryUpserts ?? 0} SellerUpserts: ${job.returnvalue?.sellerUpserts ?? 0} SellerDomainUpserts: ${job.returnvalue?.sellerDomainUpserts ?? 0}`
`DB sync batch done. Votes: ${job.returnvalue?.votes ?? 0} CommentLikes: ${job.returnvalue?.commentLikes ?? 0} CommentsCreated: ${job.returnvalue?.commentsCreated ?? 0} CommentsDeleted: ${job.returnvalue?.commentsDeleted ?? 0} DealSaves: ${job.returnvalue?.dealSaves ?? 0} DealEvents: ${job.returnvalue?.dealEvents ?? 0} DealCreates: ${job.returnvalue?.dealCreates ?? 0} DealAiReviews: ${job.returnvalue?.dealAiReviews ?? 0} NotificationsRead: ${job.returnvalue?.notificationsRead ?? 0} Notifications: ${job.returnvalue?.notifications ?? 0} DealUpdates: ${job.returnvalue?.dealUpdates ?? 0} Audits: ${job.returnvalue?.audits ?? 0} UserUpdates: ${job.returnvalue?.userUpdates ?? 0} UserNotes: ${job.returnvalue?.userNotes ?? 0} DealReportUpdates: ${job.returnvalue?.dealReportUpdates ?? 0} CategoryUpserts: ${job.returnvalue?.categoryUpserts ?? 0} SellerUpserts: ${job.returnvalue?.sellerUpserts ?? 0} SellerDomainUpserts: ${job.returnvalue?.sellerDomainUpserts ?? 0} UserInterests: ${job.returnvalue?.userInterests ?? 0}`
)
})
@ -1057,3 +1128,4 @@ function startDbSyncWorker() {
}
module.exports = { startDbSyncWorker }