updated
This commit is contained in:
parent
e380d084d9
commit
5eb79565c9
|
|
@ -12,6 +12,7 @@ function mapDealToDealCardResponse(deal) {
|
|||
location: deal.location ?? null,
|
||||
discountType: deal.discountType ?? null,
|
||||
discountValue: deal.discountValue ?? null,
|
||||
barcodeId: deal.barcodeId ?? null,
|
||||
|
||||
score: deal.score,
|
||||
commentsCount: deal.commentCount,
|
||||
|
|
|
|||
|
|
@ -61,6 +61,7 @@ function mapDealToDealDetailResponse(deal) {
|
|||
location: deal.location ?? null,
|
||||
discountType: deal.discountType ?? null,
|
||||
discountValue: deal.discountValue ?? null,
|
||||
barcodeId: deal.barcodeId ?? null,
|
||||
score: Number.isFinite(deal.score) ? deal.score : 0,
|
||||
myVote: deal.myVote ?? 0,
|
||||
isSaved: Boolean(deal.isSaved),
|
||||
|
|
|
|||
|
|
@ -96,8 +96,39 @@ async function applyDealEventBatch(events = []) {
|
|||
return { inserted: data.length, increments }
|
||||
}
|
||||
|
||||
async function applyDealTotalsBatch(increments = []) {
|
||||
const data = (Array.isArray(increments) ? increments : []).filter(
|
||||
(item) => item && Number.isInteger(Number(item.dealId))
|
||||
)
|
||||
if (!data.length) return { updated: 0 }
|
||||
|
||||
await prisma.$transaction(async (tx) => {
|
||||
for (const inc of data) {
|
||||
const dealId = Number(inc.dealId)
|
||||
if (!Number.isInteger(dealId) || dealId <= 0) continue
|
||||
await tx.dealAnalyticsTotal.upsert({
|
||||
where: { dealId },
|
||||
create: {
|
||||
dealId,
|
||||
impressions: Number(inc.impressions || 0),
|
||||
views: Number(inc.views || 0),
|
||||
clicks: Number(inc.clicks || 0),
|
||||
},
|
||||
update: {
|
||||
impressions: { increment: Number(inc.impressions || 0) },
|
||||
views: { increment: Number(inc.views || 0) },
|
||||
clicks: { increment: Number(inc.clicks || 0) },
|
||||
},
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
return { updated: data.length }
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
ensureTotalsForDealIds,
|
||||
getTotalsByDealIds,
|
||||
applyDealEventBatch,
|
||||
applyDealTotalsBatch,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,2 @@
|
|||
-- AlterTable
|
||||
ALTER TABLE "Deal" ADD COLUMN "barcodeId" TEXT;
|
||||
|
|
@ -224,6 +224,7 @@ model Deal {
|
|||
discountType DiscountType? @default(AMOUNT)
|
||||
discountValue Float?
|
||||
maxNotifiedMilestone Int @default(0)
|
||||
barcodeId String?
|
||||
userId Int
|
||||
score Int @default(0)
|
||||
commentCount Int @default(0)
|
||||
|
|
|
|||
19
routes/cache.routes.js
Normal file
19
routes/cache.routes.js
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
const express = require("express")
|
||||
const { getCachedImageByKey } = require("../services/redis/linkPreviewImageCache.service")
|
||||
|
||||
const router = express.Router()
|
||||
|
||||
router.get("/deal_create/:key", async (req, res) => {
|
||||
try {
|
||||
const key = req.params.key
|
||||
const cached = await getCachedImageByKey(key)
|
||||
if (!cached) return res.status(404).json({ error: "Not found" })
|
||||
res.setHeader("Content-Type", cached.contentType)
|
||||
res.setHeader("Cache-Control", "public, max-age=300")
|
||||
return res.status(200).send(cached.buffer)
|
||||
} catch (err) {
|
||||
return res.status(500).json({ error: "Sunucu hatasi" })
|
||||
}
|
||||
})
|
||||
|
||||
module.exports = router
|
||||
|
|
@ -676,15 +676,7 @@ router.get("/categories", requireAuth, requireRole("MOD"), async (req, res) => {
|
|||
|
||||
router.get("/deals/reports", requireAuth, requireRole("MOD"), async (req, res) => {
|
||||
try {
|
||||
const page = Number(req.query.page || 1)
|
||||
const status = req.query.status
|
||||
const dealId = req.query.dealId
|
||||
const userId = req.query.userId
|
||||
const payload = await dealReportService.listDealReports({
|
||||
page,
|
||||
status,
|
||||
dealId,
|
||||
userId,
|
||||
})
|
||||
res.json(payload)
|
||||
} catch (err) {
|
||||
|
|
@ -693,6 +685,17 @@ router.get("/deals/reports", requireAuth, requireRole("MOD"), async (req, res) =
|
|||
}
|
||||
})
|
||||
|
||||
router.get("/deals/reports/pending", requireAuth, requireRole("MOD"), async (req, res) => {
|
||||
try {
|
||||
const page = Number(req.query.page || 1)
|
||||
const payload = await dealReportService.getPendingReports({ page })
|
||||
res.json(payload)
|
||||
} catch (err) {
|
||||
const status = err.statusCode || 500
|
||||
res.status(status).json({ error: err.message || "Sunucu hatasi" })
|
||||
}
|
||||
})
|
||||
|
||||
router.post("/badges", requireAuth, requireRole("MOD"), modBadgeCreateValidator, async (req, res) => {
|
||||
try {
|
||||
const badge = await badgeService.createBadge(req.validatedBadgeCreate)
|
||||
|
|
@ -797,4 +800,3 @@ router.delete(
|
|||
)
|
||||
|
||||
module.exports = router
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,8 @@ const { endpoints } = require("@shared/contracts")
|
|||
const { getSellerByName, getDealsBySellerName, getActiveSellers } = require("../services/seller.service")
|
||||
const { findSellerFromLink } = require("../services/sellerLookup.service")
|
||||
const { getProductPreviewFromUrl } = require("../services/productPreview.service")
|
||||
const { setBarcodeForUrl } = require("../services/redis/linkPreviewCache.service")
|
||||
const { cacheLinkPreviewImages } = require("../services/linkPreviewImage.service")
|
||||
const { mapSellerToSellerDetailsResponse } = require("../adapters/responses/sellerDetails.adapter")
|
||||
const { mapPaginatedDealsToDealCardResponse } = require("../adapters/responses/dealCard.adapter")
|
||||
const { getClientIp } = require("../utils/requestInfo")
|
||||
|
|
@ -24,10 +26,23 @@ router.post("/from-link", requireAuth, async (req, res) => {
|
|||
try {
|
||||
const sellerUrl = req.body.url
|
||||
if (!sellerUrl) return res.status(400).json({ error: "url parametresi zorunlu" })
|
||||
const [sellerLookup, product] = await Promise.all([
|
||||
const [sellerLookup, initialProduct] = await Promise.all([
|
||||
findSellerFromLink(sellerUrl),
|
||||
getProductPreviewFromUrl(sellerUrl),
|
||||
])
|
||||
let product = initialProduct
|
||||
|
||||
if (product?.barcodeId) {
|
||||
setBarcodeForUrl(sellerUrl, product.barcodeId, { ttlSeconds: 15 * 60 }).catch((err) =>
|
||||
console.error("Link preview barcode cache failed:", err?.message || err)
|
||||
)
|
||||
}
|
||||
|
||||
const baseUrl = `${req.protocol}://${req.get("host")}`
|
||||
if (product && baseUrl) {
|
||||
const cached = await cacheLinkPreviewImages({ product, baseUrl })
|
||||
product = cached.product
|
||||
}
|
||||
|
||||
const response = seller.sellerLookupResponseSchema.parse(
|
||||
sellerLookup
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ const requireAuth = require("../middleware/requireAuth")
|
|||
const requireNotRestricted = require("../middleware/requireNotRestricted")
|
||||
const { upload } = require("../middleware/upload.middleware")
|
||||
const { uploadImage } = require("../services/uploadImage.service")
|
||||
const { makeWebp } = require("../utils/processImage")
|
||||
const { enqueueAuditFromRequest, buildAuditMeta } = require("../services/audit.service")
|
||||
const { AUDIT_ACTIONS } = require("../services/auditActions")
|
||||
|
||||
|
|
@ -23,14 +24,14 @@ router.post(
|
|||
}
|
||||
|
||||
const key = uuidv4()
|
||||
const ext = req.file.originalname?.split(".").pop() || "jpg"
|
||||
const path = `misc/${req.auth.userId}/${key}.${ext}`
|
||||
const webpBuffer = await makeWebp(req.file.buffer, { quality: 40 })
|
||||
const path = `misc/${req.auth.userId}/${key}.webp`
|
||||
|
||||
const url = await uploadImage({
|
||||
bucket: "deal",
|
||||
path,
|
||||
fileBuffer: req.file.buffer,
|
||||
contentType: req.file.mimetype,
|
||||
fileBuffer: webpBuffer,
|
||||
contentType: "image/webp",
|
||||
})
|
||||
|
||||
enqueueAuditFromRequest(
|
||||
|
|
@ -39,7 +40,7 @@ router.post(
|
|||
buildAuditMeta({
|
||||
entityType: "MEDIA",
|
||||
entityId: path,
|
||||
extra: { contentType: req.file.mimetype },
|
||||
extra: { contentType: "image/webp" },
|
||||
})
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ const categoryRoutes =require("./routes/category.routes")
|
|||
const modRoutes = require("./routes/mod.routes")
|
||||
const uploadRoutes = require("./routes/upload.routes")
|
||||
const badgeRoutes = require("./routes/badge.routes")
|
||||
const cacheRoutes = require("./routes/cache.routes")
|
||||
const { ensureDealSearchIndex } = require("./services/redis/searchIndex.service")
|
||||
const { seedRecentDealsToRedis, seedReferenceDataToRedis } = require("./services/redis/dealIndexing.service")
|
||||
const { ensureCommentIdCounter } = require("./services/redis/commentId.service")
|
||||
|
|
@ -69,6 +70,7 @@ app.use("/api/category", categoryRoutes);
|
|||
app.use("/api/mod", modRoutes);
|
||||
app.use("/api/uploads", uploadRoutes);
|
||||
app.use("/api/badges", badgeRoutes);
|
||||
app.use("/cache", cacheRoutes);
|
||||
|
||||
app.get("/api/openapi.json", (req, res) => {
|
||||
res.sendFile(path.join(__dirname, "docs", "openapi.json"));
|
||||
|
|
@ -79,12 +81,16 @@ app.get("/api/docs", (req, res) => {
|
|||
});
|
||||
|
||||
async function startServer() {
|
||||
try {
|
||||
await ensureDealSearchIndex()
|
||||
await seedReferenceDataToRedis()
|
||||
await ensureDealIdCounter()
|
||||
const ttlDays = Number(process.env.REDIS_DEAL_TTL_DAYS) || 31
|
||||
await seedRecentDealsToRedis({ days: 31, ttlDays })
|
||||
await ensureCommentIdCounter()
|
||||
} catch (err) {
|
||||
console.error("Redis init skipped:", err?.message || err)
|
||||
}
|
||||
// Sunucuyu dinlemeye ba??la
|
||||
app.listen(3000, () => console.log("Server running on http://localhost:3000"));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ async function getPendingDealCount(redis) {
|
|||
try {
|
||||
const result = await redis.call(
|
||||
"FT.SEARCH",
|
||||
"idx:data:deals",
|
||||
"idx:deals",
|
||||
"@status:{PENDING}",
|
||||
"LIMIT",
|
||||
0,
|
||||
|
|
@ -111,6 +111,19 @@ async function getAdminMetrics() {
|
|||
},
|
||||
dbsyncQueues: queues,
|
||||
}
|
||||
} catch (err) {
|
||||
const openReports = await dealReportDb.countDealReports({ status: "OPEN" })
|
||||
return {
|
||||
pendingDeals: null,
|
||||
openReports,
|
||||
redis: {
|
||||
usedMemory: null,
|
||||
connectedClients: null,
|
||||
keyspaceHits: null,
|
||||
keyspaceMisses: null,
|
||||
},
|
||||
dbsyncQueues: {},
|
||||
}
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
const fs = require("fs")
|
||||
const { uploadImage } = require("./uploadImage.service")
|
||||
const { makeWebp } = require("../utils/processImage")
|
||||
const { validateImage } = require("../utils/validateImage")
|
||||
|
||||
const userDB = require("../db/user.db")
|
||||
|
|
@ -16,12 +17,13 @@ async function updateUserAvatar(userId, file) {
|
|||
})
|
||||
|
||||
const buffer = fs.readFileSync(file.path)
|
||||
const webpBuffer = await makeWebp(buffer, { quality: 80 })
|
||||
|
||||
const imageUrl = await uploadImage({
|
||||
bucket: "avatars",
|
||||
path: `${userId}_${Date.now()}.jpg`,
|
||||
fileBuffer: buffer,
|
||||
contentType: file.mimetype,
|
||||
path: `${userId}_${Date.now()}.webp`,
|
||||
fileBuffer: webpBuffer,
|
||||
contentType: "image/webp",
|
||||
})
|
||||
|
||||
fs.unlinkSync(file.path)
|
||||
|
|
|
|||
|
|
@ -19,6 +19,9 @@ const { getHotDealIds, getHotRangeDealIds, getDealsByIdsFromRedis } = require(".
|
|||
const { getTrendingDealIds } = require("./redis/trendingDealList.service")
|
||||
const { getNewDealIds } = require("./redis/newDealList.service")
|
||||
const { setUserPublicInRedis } = require("./redis/userPublicCache.service")
|
||||
const { getUserSavedIdsFromRedis, getUserSavedMapForDeals } = require("./redis/userCache.service")
|
||||
const { getMyVotesForDeals } = require("./redis/dealVote.service")
|
||||
const { getBarcodeForUrl } = require("./redis/linkPreviewCache.service")
|
||||
const {
|
||||
buildDealSearchQuery,
|
||||
searchDeals,
|
||||
|
|
@ -44,6 +47,7 @@ const DEAL_CARD_SELECT = {
|
|||
location: true,
|
||||
discountType: true,
|
||||
discountValue: true,
|
||||
barcodeId: true,
|
||||
score: true,
|
||||
commentCount: true,
|
||||
url: true,
|
||||
|
|
@ -74,6 +78,7 @@ const DEAL_DETAIL_SELECT = {
|
|||
location: true,
|
||||
discountType: true,
|
||||
discountValue: true,
|
||||
barcodeId: true,
|
||||
score: true,
|
||||
commentCount: true,
|
||||
status: true,
|
||||
|
|
@ -374,6 +379,19 @@ async function getDealsFromRedisSearch({
|
|||
includeMinMax: pagination.page === 1,
|
||||
})
|
||||
|
||||
if (!searchResult) {
|
||||
return getDealsFromDbPreset({
|
||||
preset: "RAW",
|
||||
q,
|
||||
page,
|
||||
limit,
|
||||
viewer,
|
||||
filters,
|
||||
baseWhere,
|
||||
scope: "USER",
|
||||
})
|
||||
}
|
||||
|
||||
if (searchResult.total === 0 && q) {
|
||||
const fuzzyTextQuery = buildFuzzyTextQuery(q)
|
||||
if (fuzzyTextQuery) {
|
||||
|
|
@ -386,6 +404,18 @@ async function getDealsFromRedisSearch({
|
|||
sortDir,
|
||||
includeMinMax: pagination.page === 1,
|
||||
})
|
||||
if (!searchResult) {
|
||||
return getDealsFromDbPreset({
|
||||
preset: "RAW",
|
||||
q,
|
||||
page,
|
||||
limit,
|
||||
viewer,
|
||||
filters,
|
||||
baseWhere,
|
||||
scope: "USER",
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -598,13 +628,13 @@ async function getHotDealsFromRedis({ page, limit, viewer, hotListId } = {}) {
|
|||
const { hotListId: listId, dealIds } = await getHotDealIds({ hotListId })
|
||||
|
||||
if (!dealIds.length) {
|
||||
return {
|
||||
page: pagination.page,
|
||||
total: 0,
|
||||
totalPages: 0,
|
||||
results: [],
|
||||
hotListId: listId,
|
||||
}
|
||||
const fallback = await getDealsFromDbPreset({
|
||||
preset: "HOT",
|
||||
page,
|
||||
limit,
|
||||
viewer,
|
||||
})
|
||||
return { ...fallback, hotListId: listId }
|
||||
}
|
||||
|
||||
const pageIds = dealIds.slice(pagination.skip, pagination.skip + pagination.limit)
|
||||
|
|
@ -636,13 +666,13 @@ async function getTrendingDealsFromRedis({ page, limit, viewer, trendingListId }
|
|||
const { trendingListId: listId, dealIds } = await getTrendingDealIds({ trendingListId })
|
||||
|
||||
if (!dealIds.length) {
|
||||
return {
|
||||
page: pagination.page,
|
||||
total: 0,
|
||||
totalPages: 0,
|
||||
results: [],
|
||||
trendingListId: listId,
|
||||
}
|
||||
const fallback = await getDealsFromDbPreset({
|
||||
preset: "TRENDING",
|
||||
page,
|
||||
limit,
|
||||
viewer,
|
||||
})
|
||||
return { ...fallback, trendingListId: listId }
|
||||
}
|
||||
|
||||
const pageIds = dealIds.slice(pagination.skip, pagination.skip + pagination.limit)
|
||||
|
|
@ -674,18 +704,57 @@ async function getHotRangeDealsFromRedis({ page, limit, viewer, range } = {}) {
|
|||
const { listId, dealIds } = await getHotRangeDealIds({ range })
|
||||
|
||||
if (!dealIds.length) {
|
||||
return {
|
||||
page: pagination.page,
|
||||
total: 0,
|
||||
totalPages: 0,
|
||||
results: [],
|
||||
hotListId: listId,
|
||||
}
|
||||
const preset =
|
||||
range === "day" ? "HOT_DAY" : range === "week" ? "HOT_WEEK" : range === "month" ? "HOT_MONTH" : "HOT"
|
||||
const fallback = await getDealsFromDbPreset({
|
||||
preset,
|
||||
page,
|
||||
limit,
|
||||
viewer,
|
||||
})
|
||||
return { ...fallback, hotListId: listId }
|
||||
}
|
||||
|
||||
const pageIds = dealIds.slice(pagination.skip, pagination.skip + pagination.limit)
|
||||
const viewerId = viewer?.userId ? Number(viewer.userId) : null
|
||||
const deals = await getDealsByIdsFromRedis(pageIds, viewerId)
|
||||
if (!deals.length) {
|
||||
return getDealsFromDbPreset({
|
||||
preset: "NEW",
|
||||
page,
|
||||
limit: REDIS_SEARCH_LIMIT,
|
||||
viewer,
|
||||
})
|
||||
}
|
||||
if (!deals.length) {
|
||||
const preset =
|
||||
range === "day" ? "HOT_DAY" : range === "week" ? "HOT_WEEK" : range === "month" ? "HOT_MONTH" : "HOT"
|
||||
const fallback = await getDealsFromDbPreset({
|
||||
preset,
|
||||
page,
|
||||
limit,
|
||||
viewer,
|
||||
})
|
||||
return { ...fallback, hotListId: listId }
|
||||
}
|
||||
if (!deals.length) {
|
||||
const fallback = await getDealsFromDbPreset({
|
||||
preset: "TRENDING",
|
||||
page,
|
||||
limit,
|
||||
viewer,
|
||||
})
|
||||
return { ...fallback, trendingListId: listId }
|
||||
}
|
||||
if (!deals.length) {
|
||||
const fallback = await getDealsFromDbPreset({
|
||||
preset: "HOT",
|
||||
page,
|
||||
limit,
|
||||
viewer,
|
||||
})
|
||||
return { ...fallback, hotListId: listId }
|
||||
}
|
||||
|
||||
const enriched = deals.map((deal) => ({
|
||||
...deal,
|
||||
|
|
@ -707,6 +776,69 @@ async function getHotRangeDealsFromRedis({ page, limit, viewer, range } = {}) {
|
|||
}
|
||||
}
|
||||
|
||||
async function getDealsFromDbPreset({
|
||||
preset,
|
||||
q,
|
||||
page,
|
||||
limit,
|
||||
viewer = null,
|
||||
targetUserId = null,
|
||||
filters = null,
|
||||
baseWhere = null,
|
||||
scope = "USER",
|
||||
} = {}) {
|
||||
const pagination = clampPagination({ page, limit })
|
||||
const { where: presetWhere, orderBy: presetOrder } = buildPresetCriteria(preset, {
|
||||
viewer,
|
||||
targetUserId,
|
||||
})
|
||||
const searchClause = buildSearchClause(q)
|
||||
const allowStatus = preset === "MY" || scope === "MOD"
|
||||
const filterWhere = buildFilterWhere(filters, { allowStatus })
|
||||
|
||||
const clauses = []
|
||||
if (presetWhere && Object.keys(presetWhere).length > 0) clauses.push(presetWhere)
|
||||
if (baseWhere && Object.keys(baseWhere).length > 0) clauses.push(baseWhere)
|
||||
if (searchClause) clauses.push(searchClause)
|
||||
if (filterWhere) clauses.push(filterWhere)
|
||||
|
||||
const finalWhere = clauses.length === 0 ? {} : clauses.length === 1 ? clauses[0] : { AND: clauses }
|
||||
const orderBy = presetOrder ?? [{ createdAt: "desc" }]
|
||||
|
||||
const [deals, total] = await Promise.all([
|
||||
dealDB.findDeals(finalWhere, {
|
||||
skip: pagination.skip,
|
||||
take: pagination.limit,
|
||||
orderBy,
|
||||
select: DEAL_CARD_SELECT,
|
||||
}),
|
||||
dealDB.countDeals(finalWhere),
|
||||
])
|
||||
|
||||
const dealIds = deals.map((d) => Number(d.id)).filter((id) => Number.isInteger(id) && id > 0)
|
||||
const viewerId = viewer?.userId ? Number(viewer.userId) : null
|
||||
const [voteMap, savedMap] = await Promise.all([
|
||||
viewerId ? getMyVotesForDeals(dealIds, viewerId) : Promise.resolve(new Map()),
|
||||
viewerId ? getUserSavedMapForDeals(viewerId, dealIds) : Promise.resolve(new Map()),
|
||||
])
|
||||
|
||||
const enriched = deals.map((deal) => {
|
||||
const id = Number(deal.id)
|
||||
return {
|
||||
...deal,
|
||||
myVote: viewerId ? Number(voteMap.get(id) ?? 0) : 0,
|
||||
isSaved: viewerId ? savedMap.get(id) === true : false,
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
page: pagination.page,
|
||||
total,
|
||||
totalPages: Math.ceil(total / pagination.limit),
|
||||
results: enriched,
|
||||
}
|
||||
}
|
||||
|
||||
async function getBestWidgetDeals({ viewer = null, limit = 5 } = {}) {
|
||||
const take = Math.max(1, Math.min(Number(limit) || 5, 20))
|
||||
const viewerId = viewer?.userId ? Number(viewer.userId) : null
|
||||
|
|
@ -733,9 +865,15 @@ async function getBestWidgetDeals({ viewer = null, limit = 5 } = {}) {
|
|||
}
|
||||
|
||||
const [hotDay, hotWeek, hotMonth] = await Promise.all([
|
||||
pickTop(dayList?.dealIds || []),
|
||||
pickTop(weekList?.dealIds || []),
|
||||
pickTop(monthList?.dealIds || []),
|
||||
dayList?.dealIds?.length
|
||||
? pickTop(dayList.dealIds)
|
||||
: (await getDealsFromDbPreset({ preset: "HOT_DAY", page: 1, limit: take, viewer })).results,
|
||||
weekList?.dealIds?.length
|
||||
? pickTop(weekList.dealIds)
|
||||
: (await getDealsFromDbPreset({ preset: "HOT_WEEK", page: 1, limit: take, viewer })).results,
|
||||
monthList?.dealIds?.length
|
||||
? pickTop(monthList.dealIds)
|
||||
: (await getDealsFromDbPreset({ preset: "HOT_MONTH", page: 1, limit: take, viewer })).results,
|
||||
])
|
||||
|
||||
return { hotDay, hotWeek, hotMonth }
|
||||
|
|
@ -757,6 +895,8 @@ async function getDealSuggestions({ q, limit = 8, viewer } = {}) {
|
|||
sortDir: "DESC",
|
||||
})
|
||||
|
||||
if (!searchResult) return { results: [] }
|
||||
|
||||
if (!searchResult.dealIds.length) return { results: [] }
|
||||
|
||||
const viewerId = viewer?.userId ? Number(viewer.userId) : null
|
||||
|
|
@ -801,12 +941,12 @@ async function getNewDealsFromRedis({ page, viewer, newListId } = {}) {
|
|||
const { newListId: listId, dealIds } = await getNewDealIds({ newListId })
|
||||
|
||||
if (!dealIds.length) {
|
||||
return {
|
||||
page: pagination.page,
|
||||
total: 0,
|
||||
totalPages: 0,
|
||||
results: [],
|
||||
}
|
||||
return getDealsFromDbPreset({
|
||||
preset: "NEW",
|
||||
page,
|
||||
limit: REDIS_SEARCH_LIMIT,
|
||||
viewer,
|
||||
})
|
||||
}
|
||||
|
||||
const pageIds = dealIds.slice(pagination.skip, pagination.skip + pagination.limit)
|
||||
|
|
@ -881,49 +1021,17 @@ async function getDeals({
|
|||
})
|
||||
}
|
||||
|
||||
const { where: presetWhere, orderBy: presetOrder } = buildPresetCriteria(preset, {
|
||||
return getDealsFromDbPreset({
|
||||
preset,
|
||||
q,
|
||||
page,
|
||||
limit,
|
||||
viewer,
|
||||
targetUserId,
|
||||
filters,
|
||||
baseWhere,
|
||||
scope,
|
||||
})
|
||||
const searchClause = buildSearchClause(q)
|
||||
const allowStatus = preset === "MY" || scope === "MOD"
|
||||
const filterWhere = buildFilterWhere(filters, { allowStatus })
|
||||
|
||||
const clauses = []
|
||||
if (presetWhere && Object.keys(presetWhere).length > 0) clauses.push(presetWhere)
|
||||
if (baseWhere && Object.keys(baseWhere).length > 0) clauses.push(baseWhere)
|
||||
if (searchClause) clauses.push(searchClause)
|
||||
if (filterWhere) clauses.push(filterWhere)
|
||||
|
||||
const finalWhere = clauses.length === 0 ? {} : clauses.length === 1 ? clauses[0] : { AND: clauses }
|
||||
const orderBy = presetOrder ?? [{ createdAt: "desc" }]
|
||||
|
||||
const [deals, total] = await Promise.all([
|
||||
dealDB.findDeals(finalWhere, {
|
||||
skip: pagination.skip,
|
||||
take: pagination.limit,
|
||||
orderBy,
|
||||
select: DEAL_CARD_SELECT,
|
||||
}),
|
||||
dealDB.countDeals(finalWhere),
|
||||
])
|
||||
|
||||
const dealIds = deals.map((d) => d.id)
|
||||
const viewerId = viewer?.userId ? Number(viewer.userId) : null
|
||||
|
||||
const enriched = deals.map((deal) => ({
|
||||
...deal,
|
||||
myVote: viewerId
|
||||
? Number(deal.votes?.find((vote) => Number(vote.userId) === viewerId)?.voteType ?? 0)
|
||||
: 0,
|
||||
}))
|
||||
|
||||
return {
|
||||
page: pagination.page,
|
||||
total,
|
||||
totalPages: Math.ceil(total / pagination.limit),
|
||||
results: enriched,
|
||||
}
|
||||
}
|
||||
|
||||
async function getDealById(id, viewer = null) {
|
||||
|
|
@ -939,7 +1047,7 @@ async function getDealById(id, viewer = null) {
|
|||
if (!isOwner && !isMod) return null
|
||||
}
|
||||
|
||||
const [breadcrumb, similarDeals, userStatsAgg, myVote, commentsResp, seller] = await Promise.all([
|
||||
const [breadcrumb, similarDeals, userStatsAgg, myVote, commentsResp, seller, savedCache] = await Promise.all([
|
||||
categoryDB.getCategoryBreadcrumb(deal.categoryId, { includeUndefined: false }),
|
||||
buildSimilarDealsForDetail(
|
||||
{
|
||||
|
|
@ -956,6 +1064,7 @@ async function getDealById(id, viewer = null) {
|
|||
viewer?.userId ? getDealVoteFromRedis(deal.id, viewer.userId) : Promise.resolve(0),
|
||||
getCommentsForDeal({ dealId: deal.id, parentId: null, page: 1, limit: 10, sort: "NEW", viewerId: viewer?.userId ?? null }).catch(() => ({ results: [] })),
|
||||
deal.sellerId ? getSellerById(Number(deal.sellerId)) : Promise.resolve(null),
|
||||
viewer?.userId ? getUserSavedIdsFromRedis(viewer.userId) : Promise.resolve(null),
|
||||
])
|
||||
|
||||
const userStats = {
|
||||
|
|
@ -972,8 +1081,7 @@ async function getDealById(id, viewer = null) {
|
|||
userStats,
|
||||
myVote,
|
||||
isSaved: viewer?.userId
|
||||
? Array.isArray(deal.savedBy) &&
|
||||
deal.savedBy.some((s) => Number(s?.userId) === Number(viewer.userId))
|
||||
? Boolean(savedCache?.savedSet?.has(Number(deal.id)))
|
||||
: false,
|
||||
}
|
||||
}
|
||||
|
|
@ -989,6 +1097,10 @@ async function createDeal(dealCreateData, files = []) {
|
|||
sellerId = seller.id
|
||||
dealCreateData.customSeller = null
|
||||
}
|
||||
const cachedBarcode = await getBarcodeForUrl(dealCreateData.url)
|
||||
if (cachedBarcode) {
|
||||
dealCreateData.barcodeId = cachedBarcode
|
||||
}
|
||||
}
|
||||
|
||||
const userId = Number(dealCreateData?.user?.connect?.id)
|
||||
|
|
@ -1065,6 +1177,7 @@ async function createDeal(dealCreateData, files = []) {
|
|||
location: dealCreateData.location ?? null,
|
||||
discountType: dealCreateData.discountType ?? null,
|
||||
discountValue: dealCreateData.discountValue ?? null,
|
||||
barcodeId: dealCreateData.barcodeId ?? null,
|
||||
maxNotifiedMilestone: 0,
|
||||
userId,
|
||||
score: 0,
|
||||
|
|
@ -1080,7 +1193,6 @@ async function createDeal(dealCreateData, files = []) {
|
|||
user,
|
||||
images,
|
||||
dealTags: [],
|
||||
votes: [],
|
||||
comments: [],
|
||||
aiReview: null,
|
||||
}
|
||||
|
|
@ -1107,6 +1219,7 @@ async function createDeal(dealCreateData, files = []) {
|
|||
location: dealPayload.location,
|
||||
discountType: dealPayload.discountType,
|
||||
discountValue: dealPayload.discountValue,
|
||||
barcodeId: dealPayload.barcodeId,
|
||||
maxNotifiedMilestone: dealPayload.maxNotifiedMilestone,
|
||||
userId,
|
||||
status: dealPayload.status,
|
||||
|
|
|
|||
|
|
@ -62,16 +62,40 @@ async function createDealReport({ dealId, userId, reason, note }) {
|
|||
return { reported: true }
|
||||
}
|
||||
|
||||
async function listDealReports({ page = 1, status = null, dealId = null, userId = null } = {}) {
|
||||
const safePage = normalizePage(page)
|
||||
const skip = (safePage - 1) * PAGE_LIMIT
|
||||
async function listDealReports({ status = null, dealId = null, userId = null } = {}) {
|
||||
const skip = 0
|
||||
|
||||
const where = {}
|
||||
const normalizedStatus = normalizeStatus(status)
|
||||
if (normalizedStatus) where.status = normalizedStatus
|
||||
where.status = normalizedStatus || "OPEN"
|
||||
|
||||
if (Number.isInteger(Number(dealId))) where.dealId = Number(dealId)
|
||||
if (Number.isInteger(Number(userId))) where.userId = Number(userId)
|
||||
|
||||
const [total, reports] = await Promise.all([
|
||||
dealReportDB.countDealReports(where),
|
||||
dealReportDB.listDealReports(where, {
|
||||
skip,
|
||||
orderBy: { createdAt: "asc" },
|
||||
include: {
|
||||
deal: { select: { id: true, title: true, status: true } },
|
||||
user: { select: { id: true, username: true } },
|
||||
},
|
||||
}),
|
||||
])
|
||||
|
||||
return {
|
||||
total,
|
||||
results: reports,
|
||||
}
|
||||
}
|
||||
|
||||
async function getPendingReports({ page = 1 } = {}) {
|
||||
const safePage = normalizePage(page)
|
||||
const skip = (safePage - 1) * PAGE_LIMIT
|
||||
|
||||
const where = { status: "OPEN" }
|
||||
|
||||
const [total, reports] = await Promise.all([
|
||||
dealReportDB.countDealReports(where),
|
||||
dealReportDB.listDealReports(where, {
|
||||
|
|
@ -114,5 +138,6 @@ async function updateDealReportStatus({ reportId, status }) {
|
|||
module.exports = {
|
||||
createDealReport,
|
||||
listDealReports,
|
||||
getPendingReports,
|
||||
updateDealReportStatus,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -31,8 +31,6 @@ const DEAL_CACHE_INCLUDE = {
|
|||
user: { select: { id: true, username: true, avatarUrl: true } },
|
||||
images: { orderBy: { order: "asc" }, select: { id: true, imageUrl: true, order: true } },
|
||||
dealTags: { include: { tag: { select: { id: true, slug: true, name: true } } } },
|
||||
votes: { select: { userId: true, voteType: true } },
|
||||
savedBy: { select: { userId: true, createdAt: true } },
|
||||
comments: {
|
||||
orderBy: { createdAt: "desc" },
|
||||
include: {
|
||||
|
|
@ -166,6 +164,18 @@ async function listSavedDeals({ userId, page = 1 }) {
|
|||
{ id: { in: missingIds }, status: { in: Array.from(ALLOWED_STATUSES) } },
|
||||
{ include: DEAL_CACHE_INCLUDE }
|
||||
)
|
||||
const fallbackMap = new Map()
|
||||
missingDeals.forEach((deal) => {
|
||||
const payload = mapDealToRedisJson(deal)
|
||||
const myVote = 0
|
||||
fallbackMap.set(Number(deal.id), {
|
||||
...payload,
|
||||
user: deal.user ?? null,
|
||||
seller: deal.seller ?? null,
|
||||
myVote,
|
||||
isSaved: true,
|
||||
})
|
||||
})
|
||||
await Promise.all(
|
||||
missingDeals.map((deal) => {
|
||||
const payload = mapDealToRedisJson(deal)
|
||||
|
|
@ -174,6 +184,9 @@ async function listSavedDeals({ userId, page = 1 }) {
|
|||
)
|
||||
const hydrated = await getDealsByIdsFromRedis(missingIds, uid)
|
||||
hydrated.forEach((d) => cachedMap.set(Number(d.id), d))
|
||||
if (!hydrated.length && fallbackMap.size) {
|
||||
fallbackMap.forEach((value, key) => cachedMap.set(key, value))
|
||||
}
|
||||
}
|
||||
|
||||
const results = pageIds.map((id) => cachedMap.get(id)).filter(Boolean)
|
||||
|
|
|
|||
58
services/linkPreviewImage.service.js
Normal file
58
services/linkPreviewImage.service.js
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
const { cacheImageFromUrl } = require("./redis/linkPreviewImageCache.service")
|
||||
|
||||
function extractImageUrlsFromDescription(description, { max = 5 } = {}) {
|
||||
if (!description || typeof description !== "string") return []
|
||||
const regex = /<img[^>]+src=["']([^"']+)["'][^>]*>/gi
|
||||
const urls = []
|
||||
let match
|
||||
while ((match = regex.exec(description)) !== null) {
|
||||
if (match[1]) urls.push(match[1])
|
||||
if (urls.length >= max) break
|
||||
}
|
||||
return urls
|
||||
}
|
||||
|
||||
function replaceDescriptionImageUrls(description, urlMap, { maxImages = 5 } = {}) {
|
||||
if (!description || typeof description !== "string") return description
|
||||
if (!urlMap || urlMap.size === 0) return description
|
||||
let seen = 0
|
||||
return description.replace(/<img[^>]+src=["']([^"']+)["'][^>]*>/gi, (full, src) => {
|
||||
seen += 1
|
||||
if (seen > maxImages) return ""
|
||||
const next = urlMap.get(src)
|
||||
if (!next) return full
|
||||
return full.replace(src, next)
|
||||
})
|
||||
}
|
||||
|
||||
async function cacheLinkPreviewImages({ product, baseUrl } = {}) {
|
||||
if (!product || typeof product !== "object") return { product }
|
||||
const images = Array.isArray(product.images) ? product.images : []
|
||||
const description = product.description || ""
|
||||
const descImages = extractImageUrlsFromDescription(description, { max: 5 })
|
||||
const combined = [...images, ...descImages].filter(Boolean)
|
||||
const unique = Array.from(new Set(combined))
|
||||
|
||||
const urlMap = new Map()
|
||||
for (const url of unique) {
|
||||
const cached = await cacheImageFromUrl(url, { ttlSeconds: 5 * 60 })
|
||||
if (cached?.key) {
|
||||
urlMap.set(url, `${baseUrl}/cache/deal_create/${cached.key}`)
|
||||
}
|
||||
}
|
||||
|
||||
const nextImages = images.map((url) => urlMap.get(url) || url)
|
||||
const nextDescription = replaceDescriptionImageUrls(description, urlMap, { maxImages: 5 })
|
||||
|
||||
return {
|
||||
product: {
|
||||
...product,
|
||||
images: nextImages,
|
||||
description: nextDescription,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
cacheLinkPreviewImages,
|
||||
}
|
||||
|
|
@ -309,7 +309,11 @@ async function updateDealForMod(dealId, input = {}, viewer = null) {
|
|||
updatedAt: updatedAt.toISOString(),
|
||||
}).catch((err) => console.error("DB sync deal update failed:", err?.message || err))
|
||||
|
||||
const normalized = updated || existing
|
||||
let normalized = updated || existing
|
||||
if (!normalized?.user) {
|
||||
const refreshed = await getOrCacheDealForModeration(id)
|
||||
if (refreshed?.deal) normalized = refreshed.deal
|
||||
}
|
||||
const enriched = await enrichDealSeller(normalized)
|
||||
return normalizeDealForModResponse(enriched)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -55,6 +55,8 @@ async function getBadgesFromRedis() {
|
|||
}
|
||||
})
|
||||
return badges
|
||||
} catch {
|
||||
return []
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const { getRedisClient } = require("./client")
|
||||
const { getRequestContext } = require("../requestContext")
|
||||
|
||||
const MISS_HASH_KEY = "cache:misses"
|
||||
const MISS_HASH_KEY = "metrics:cache:misses"
|
||||
|
||||
function shouldLog() {
|
||||
return String(process.env.CACHE_MISS_LOG || "").trim() === "1"
|
||||
|
|
|
|||
|
|
@ -17,6 +17,8 @@ async function getCategoryById(id) {
|
|||
await recordCacheMiss({ key: `${CATEGORIES_KEY}:${cid}`, label: "category" })
|
||||
}
|
||||
return raw ? JSON.parse(raw) : null
|
||||
} catch {
|
||||
return null
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -68,6 +70,8 @@ async function listCategoriesFromRedis() {
|
|||
}
|
||||
}
|
||||
return list
|
||||
} catch {
|
||||
return []
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
const prisma = require("../../db/client")
|
||||
const { ensureCounterAtLeast, nextId } = require("./idGenerator.service")
|
||||
const CATEGORY_ID_KEY = "ids:category"
|
||||
const CATEGORY_ID_KEY = "data:ids:category"
|
||||
|
||||
async function ensureCategoryIdCounter() {
|
||||
const latest = await prisma.category.findFirst({
|
||||
|
|
@ -12,7 +12,15 @@ async function ensureCategoryIdCounter() {
|
|||
}
|
||||
|
||||
async function generateCategoryId() {
|
||||
return nextId(CATEGORY_ID_KEY)
|
||||
try {
|
||||
return await nextId(CATEGORY_ID_KEY)
|
||||
} catch {
|
||||
const latest = await prisma.category.findFirst({
|
||||
select: { id: true },
|
||||
orderBy: { id: "desc" },
|
||||
})
|
||||
return (latest?.id ?? 0) + 1
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { ensureCategoryIdCounter, generateCategoryId }
|
||||
|
|
|
|||
|
|
@ -2,9 +2,9 @@ const { getRedisClient } = require("./client")
|
|||
const { getOrCacheDeal, getDealIdByCommentId, ensureMinDealTtl } = require("./dealCache.service")
|
||||
|
||||
const DEFAULT_TTL_SECONDS = 15 * 60
|
||||
const DEAL_KEY_PREFIX = "data:deals:"
|
||||
const COMMENT_LOOKUP_KEY = "data:comments:lookup"
|
||||
const COMMENT_IDS_KEY = "data:comments:ids"
|
||||
const DEAL_KEY_PREFIX = "deals:cache:"
|
||||
const COMMENT_LOOKUP_KEY = "comments:lookup"
|
||||
const COMMENT_IDS_KEY = "comments:ids"
|
||||
|
||||
function createRedisClient() {
|
||||
return getRedisClient()
|
||||
|
|
@ -49,6 +49,8 @@ async function updateDealCommentsInRedis(dealId, comments, commentCount) {
|
|||
pipeline.call("JSON.SET", `${DEAL_KEY_PREFIX}${dealId}`, "$.commentCount", Number(commentCount))
|
||||
}
|
||||
await pipeline.exec()
|
||||
} catch {
|
||||
// ignore cache failures
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -129,6 +131,8 @@ async function addCommentToRedis(comment, { ttlSeconds = DEFAULT_TTL_SECONDS } =
|
|||
try {
|
||||
await redis.hset(COMMENT_LOOKUP_KEY, String(comment.id), String(comment.dealId))
|
||||
await redis.sadd(COMMENT_IDS_KEY, String(comment.id))
|
||||
} catch {
|
||||
// ignore cache failures
|
||||
} finally {}
|
||||
return { added: true }
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
const prisma = require("../../db/client")
|
||||
const { ensureCounterAtLeast, nextId } = require("./idGenerator.service")
|
||||
const COMMENT_ID_KEY = "ids:comment"
|
||||
const COMMENT_ID_KEY = "data:ids:comment"
|
||||
|
||||
async function ensureCommentIdCounter() {
|
||||
const latest = await prisma.comment.findFirst({
|
||||
|
|
@ -12,7 +12,15 @@ async function ensureCommentIdCounter() {
|
|||
}
|
||||
|
||||
async function generateCommentId() {
|
||||
return nextId(COMMENT_ID_KEY)
|
||||
try {
|
||||
return await nextId(COMMENT_ID_KEY)
|
||||
} catch {
|
||||
const latest = await prisma.comment.findFirst({
|
||||
select: { id: true },
|
||||
orderBy: { id: "desc" },
|
||||
})
|
||||
return (latest?.id ?? 0) + 1
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { ensureCommentIdCounter, generateCommentId }
|
||||
|
|
|
|||
|
|
@ -1,32 +1,117 @@
|
|||
const { getRedisClient } = require("./client")
|
||||
const prisma = require("../../db/client")
|
||||
const voteDb = require("../../db/vote.db")
|
||||
const commentLikeDb = require("../../db/commentLike.db")
|
||||
const dealAnalyticsDb = require("../../db/dealAnalytics.db")
|
||||
const dealSaveDb = require("../../db/dealSave.db")
|
||||
|
||||
const VOTE_HASH_KEY = "dbsync:votes"
|
||||
const COMMENT_LIKE_HASH_KEY = "dbsync:commentLikes"
|
||||
const COMMENT_HASH_KEY = "dbsync:comments"
|
||||
const COMMENT_DELETE_HASH_KEY = "dbsync:commentDeletes"
|
||||
const DEAL_UPDATE_HASH_KEY = "dbsync:dealUpdates"
|
||||
const DEAL_CREATE_HASH_KEY = "dbsync:dealCreates"
|
||||
const DEAL_AI_REVIEW_HASH_KEY = "dbsync:dealAiReviews"
|
||||
const NOTIFICATION_HASH_KEY = "dbsync:notifications"
|
||||
const NOTIFICATION_READ_HASH_KEY = "dbsync:notificationReads"
|
||||
const DEAL_SAVE_HASH_KEY = "dbsync:dealSaves"
|
||||
const AUDIT_HASH_KEY = "dbsync:audits"
|
||||
const USER_UPDATE_HASH_KEY = "dbsync:users"
|
||||
const USER_NOTE_HASH_KEY = "dbsync:userNotes"
|
||||
const DEAL_REPORT_UPDATE_HASH_KEY = "dbsync:dealReportUpdates"
|
||||
const CATEGORY_UPSERT_HASH_KEY = "dbsync:categoryUpserts"
|
||||
const SELLER_UPSERT_HASH_KEY = "dbsync:sellerUpserts"
|
||||
const SELLER_DOMAIN_UPSERT_HASH_KEY = "dbsync:sellerDomainUpserts"
|
||||
const VOTE_HASH_KEY = "bull:dbsync:votes"
|
||||
const COMMENT_LIKE_HASH_KEY = "bull:dbsync:commentLikes"
|
||||
const COMMENT_HASH_KEY = "bull:dbsync:comments"
|
||||
const COMMENT_DELETE_HASH_KEY = "bull:dbsync:commentDeletes"
|
||||
const DEAL_UPDATE_HASH_KEY = "bull:dbsync:dealUpdates"
|
||||
const DEAL_CREATE_HASH_KEY = "bull:dbsync:dealCreates"
|
||||
const DEAL_AI_REVIEW_HASH_KEY = "bull:dbsync:dealAiReviews"
|
||||
const NOTIFICATION_HASH_KEY = "bull:dbsync:notifications"
|
||||
const NOTIFICATION_READ_HASH_KEY = "bull:dbsync:notificationReads"
|
||||
const DEAL_SAVE_HASH_KEY = "bull:dbsync:dealSaves"
|
||||
const AUDIT_HASH_KEY = "bull:dbsync:audits"
|
||||
const USER_UPDATE_HASH_KEY = "bull:dbsync:users"
|
||||
const USER_NOTE_HASH_KEY = "bull:dbsync:userNotes"
|
||||
const DEAL_REPORT_UPDATE_HASH_KEY = "bull:dbsync:dealReportUpdates"
|
||||
const CATEGORY_UPSERT_HASH_KEY = "bull:dbsync:categoryUpserts"
|
||||
const SELLER_UPSERT_HASH_KEY = "bull:dbsync:sellerUpserts"
|
||||
const SELLER_DOMAIN_UPSERT_HASH_KEY = "bull:dbsync:sellerDomainUpserts"
|
||||
const DEAL_ANALYTICS_TOTAL_HASH_KEY = "bull:dbsync:dealAnalyticsTotals"
|
||||
|
||||
function createRedisClient() {
|
||||
return getRedisClient()
|
||||
}
|
||||
|
||||
async function tryQueue({ redisAction, fallbackAction, label }) {
|
||||
try {
|
||||
await redisAction()
|
||||
return { queued: true }
|
||||
} catch (err) {
|
||||
if (fallbackAction) {
|
||||
try {
|
||||
await fallbackAction()
|
||||
return { queued: false, fallback: true }
|
||||
} catch (fallbackErr) {
|
||||
console.error(`[dbsync-fallback] ${label || "unknown"} failed:`, fallbackErr?.message || fallbackErr)
|
||||
}
|
||||
}
|
||||
console.error(`[dbsync-queue] ${label || "unknown"} failed:`, err?.message || err)
|
||||
return { queued: false, fallback: false }
|
||||
}
|
||||
}
|
||||
|
||||
const DEAL_UPDATE_FIELDS = new Set([
|
||||
"title",
|
||||
"description",
|
||||
"url",
|
||||
"price",
|
||||
"originalPrice",
|
||||
"shippingPrice",
|
||||
"couponCode",
|
||||
"location",
|
||||
"discountType",
|
||||
"discountValue",
|
||||
"barcodeId",
|
||||
"maxNotifiedMilestone",
|
||||
"status",
|
||||
"saletype",
|
||||
"affiliateType",
|
||||
"sellerId",
|
||||
"customSeller",
|
||||
"categoryId",
|
||||
"userId",
|
||||
])
|
||||
|
||||
function sanitizeDealUpdate(data) {
|
||||
const patch = {}
|
||||
if (!data || typeof data !== "object") return patch
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
if (!DEAL_UPDATE_FIELDS.has(key)) continue
|
||||
patch[key] = value
|
||||
}
|
||||
return patch
|
||||
}
|
||||
|
||||
function normalizeDealCreateData(data = {}) {
|
||||
return {
|
||||
id: Number(data.id),
|
||||
title: String(data.title || ""),
|
||||
description: data.description ?? null,
|
||||
url: data.url ?? null,
|
||||
price: data.price ?? null,
|
||||
originalPrice: data.originalPrice ?? null,
|
||||
shippingPrice: data.shippingPrice ?? null,
|
||||
percentOff: data.percentOff ?? null,
|
||||
couponCode: data.couponCode ?? null,
|
||||
location: data.location ?? null,
|
||||
discountType: data.discountType ?? null,
|
||||
discountValue: data.discountValue ?? null,
|
||||
barcodeId: data.barcodeId ?? null,
|
||||
maxNotifiedMilestone: Number.isFinite(Number(data.maxNotifiedMilestone))
|
||||
? Number(data.maxNotifiedMilestone)
|
||||
: 0,
|
||||
userId: Number(data.userId),
|
||||
status: String(data.status || "PENDING"),
|
||||
saletype: String(data.saletype || "ONLINE"),
|
||||
affiliateType: String(data.affiliateType || "NON_AFFILIATE"),
|
||||
sellerId: data.sellerId ? Number(data.sellerId) : null,
|
||||
customSeller: data.customSeller ?? null,
|
||||
categoryId: Number.isInteger(Number(data.categoryId)) ? Number(data.categoryId) : 0,
|
||||
createdAt: data.createdAt ? new Date(data.createdAt) : new Date(),
|
||||
updatedAt: data.updatedAt ? new Date(data.updatedAt) : new Date(),
|
||||
}
|
||||
}
|
||||
|
||||
async function queueVoteUpdate({ dealId, userId, voteType, createdAt }) {
|
||||
if (!dealId || !userId) return
|
||||
const redis = createRedisClient()
|
||||
|
||||
try {
|
||||
const field = `vote:${dealId}:${userId}`
|
||||
const payload = JSON.stringify({
|
||||
dealId: Number(dealId),
|
||||
|
|
@ -34,15 +119,24 @@ async function queueVoteUpdate({ dealId, userId, voteType, createdAt }) {
|
|||
voteType: Number(voteType),
|
||||
createdAt,
|
||||
})
|
||||
await redis.hset(VOTE_HASH_KEY, field, payload)
|
||||
} finally {}
|
||||
|
||||
await tryQueue({
|
||||
label: "vote",
|
||||
redisAction: () => redis.hset(VOTE_HASH_KEY, field, payload),
|
||||
fallbackAction: () =>
|
||||
voteDb.voteDealTx({
|
||||
dealId: Number(dealId),
|
||||
userId: Number(userId),
|
||||
voteType: Number(voteType),
|
||||
createdAt,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
async function queueCommentLikeUpdate({ commentId, userId, like, createdAt }) {
|
||||
if (!commentId || !userId) return
|
||||
const redis = createRedisClient()
|
||||
|
||||
try {
|
||||
const field = `commentLike:${commentId}:${userId}`
|
||||
const payload = JSON.stringify({
|
||||
commentId: Number(commentId),
|
||||
|
|
@ -50,15 +144,23 @@ async function queueCommentLikeUpdate({ commentId, userId, like, createdAt }) {
|
|||
like: Boolean(like),
|
||||
createdAt,
|
||||
})
|
||||
await redis.hset(COMMENT_LIKE_HASH_KEY, field, payload)
|
||||
} finally {}
|
||||
|
||||
await tryQueue({
|
||||
label: "comment-like",
|
||||
redisAction: () => redis.hset(COMMENT_LIKE_HASH_KEY, field, payload),
|
||||
fallbackAction: () =>
|
||||
commentLikeDb.setCommentLike({
|
||||
commentId: Number(commentId),
|
||||
userId: Number(userId),
|
||||
like: Boolean(like),
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
async function queueCommentCreate({ commentId, dealId, userId, text, parentId, createdAt }) {
|
||||
if (!commentId || !dealId || !userId) return
|
||||
const redis = createRedisClient()
|
||||
|
||||
try {
|
||||
const field = `comment:${commentId}`
|
||||
const payload = JSON.stringify({
|
||||
commentId: Number(commentId),
|
||||
|
|
@ -68,45 +170,91 @@ async function queueCommentCreate({ commentId, dealId, userId, text, parentId, c
|
|||
parentId: parentId ? Number(parentId) : null,
|
||||
createdAt,
|
||||
})
|
||||
await redis.hset(COMMENT_HASH_KEY, field, payload)
|
||||
} finally {}
|
||||
|
||||
await tryQueue({
|
||||
label: "comment-create",
|
||||
redisAction: () => redis.hset(COMMENT_HASH_KEY, field, payload),
|
||||
fallbackAction: async () => {
|
||||
await prisma.$transaction(async (tx) => {
|
||||
await tx.comment.create({
|
||||
data: {
|
||||
id: Number(commentId),
|
||||
dealId: Number(dealId),
|
||||
userId: Number(userId),
|
||||
text: String(text || ""),
|
||||
parentId: parentId ? Number(parentId) : null,
|
||||
createdAt: createdAt ? new Date(createdAt) : new Date(),
|
||||
},
|
||||
})
|
||||
await tx.deal.update({
|
||||
where: { id: Number(dealId) },
|
||||
data: { commentCount: { increment: 1 } },
|
||||
})
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
async function queueCommentDelete({ commentId, dealId, createdAt }) {
|
||||
if (!commentId || !dealId) return
|
||||
const redis = createRedisClient()
|
||||
|
||||
try {
|
||||
const field = `commentDelete:${commentId}`
|
||||
const payload = JSON.stringify({
|
||||
commentId: Number(commentId),
|
||||
dealId: Number(dealId),
|
||||
createdAt,
|
||||
})
|
||||
await redis.hset(COMMENT_DELETE_HASH_KEY, field, payload)
|
||||
} finally {}
|
||||
|
||||
await tryQueue({
|
||||
label: "comment-delete",
|
||||
redisAction: () => redis.hset(COMMENT_DELETE_HASH_KEY, field, payload),
|
||||
fallbackAction: async () => {
|
||||
await prisma.$transaction(async (tx) => {
|
||||
const result = await tx.comment.updateMany({
|
||||
where: { id: Number(commentId), deletedAt: null },
|
||||
data: { deletedAt: new Date() },
|
||||
})
|
||||
if (result.count > 0) {
|
||||
await tx.deal.update({
|
||||
where: { id: Number(dealId) },
|
||||
data: { commentCount: { decrement: 1 } },
|
||||
})
|
||||
}
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
async function queueDealUpdate({ dealId, data, updatedAt }) {
|
||||
if (!dealId || !data || typeof data !== "object") return
|
||||
const redis = createRedisClient()
|
||||
|
||||
try {
|
||||
const field = `dealUpdate:${dealId}`
|
||||
const payload = JSON.stringify({
|
||||
dealId: Number(dealId),
|
||||
data,
|
||||
updatedAt,
|
||||
})
|
||||
await redis.hset(DEAL_UPDATE_HASH_KEY, field, payload)
|
||||
} finally {}
|
||||
const patch = sanitizeDealUpdate(data)
|
||||
|
||||
await tryQueue({
|
||||
label: "deal-update",
|
||||
redisAction: () => redis.hset(DEAL_UPDATE_HASH_KEY, field, payload),
|
||||
fallbackAction: async () => {
|
||||
if (!Object.keys(patch).length) return
|
||||
await prisma.deal.update({
|
||||
where: { id: Number(dealId) },
|
||||
data: { ...patch, updatedAt: updatedAt ? new Date(updatedAt) : new Date() },
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
async function queueDealCreate({ dealId, data, images = [], createdAt }) {
|
||||
if (!dealId || !data || typeof data !== "object") return
|
||||
const redis = createRedisClient()
|
||||
|
||||
try {
|
||||
const field = `dealCreate:${dealId}`
|
||||
const payload = JSON.stringify({
|
||||
dealId: Number(dealId),
|
||||
|
|
@ -114,30 +262,71 @@ async function queueDealCreate({ dealId, data, images = [], createdAt }) {
|
|||
images: Array.isArray(images) ? images : [],
|
||||
createdAt,
|
||||
})
|
||||
await redis.hset(DEAL_CREATE_HASH_KEY, field, payload)
|
||||
} finally {}
|
||||
|
||||
await tryQueue({
|
||||
label: "deal-create",
|
||||
redisAction: () => redis.hset(DEAL_CREATE_HASH_KEY, field, payload),
|
||||
fallbackAction: async () => {
|
||||
const normalized = normalizeDealCreateData({ ...data, id: dealId })
|
||||
await prisma.deal.create({ data: normalized })
|
||||
await dealAnalyticsDb.ensureTotalsForDealIds([Number(dealId)])
|
||||
if (Array.isArray(images) && images.length) {
|
||||
const imagesData = images.map((img) => ({
|
||||
dealId: Number(dealId),
|
||||
imageUrl: String(img.imageUrl || ""),
|
||||
order: Number(img.order || 0),
|
||||
}))
|
||||
await prisma.dealImage.createMany({ data: imagesData })
|
||||
}
|
||||
await prisma.$executeRawUnsafe(
|
||||
'SELECT setval(pg_get_serial_sequence(\'"Deal"\', \'id\'), (SELECT MAX(id) FROM "Deal"))'
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
async function queueDealAiReviewUpdate({ dealId, data, updatedAt }) {
|
||||
if (!dealId || !data || typeof data !== "object") return
|
||||
const redis = createRedisClient()
|
||||
|
||||
try {
|
||||
const field = `dealAiReview:${dealId}`
|
||||
const payload = JSON.stringify({
|
||||
dealId: Number(dealId),
|
||||
data,
|
||||
updatedAt,
|
||||
})
|
||||
await redis.hset(DEAL_AI_REVIEW_HASH_KEY, field, payload)
|
||||
} finally {}
|
||||
|
||||
await tryQueue({
|
||||
label: "deal-ai-review",
|
||||
redisAction: () => redis.hset(DEAL_AI_REVIEW_HASH_KEY, field, payload),
|
||||
fallbackAction: () =>
|
||||
prisma.dealAiReview.upsert({
|
||||
where: { dealId: Number(dealId) },
|
||||
create: {
|
||||
dealId: Number(dealId),
|
||||
bestCategoryId: Number(data.bestCategoryId) || 0,
|
||||
tags: Array.isArray(data.tags) ? data.tags : [],
|
||||
needsReview: Boolean(data.needsReview),
|
||||
hasIssue: Boolean(data.hasIssue),
|
||||
issueType: String(data.issueType || "NONE"),
|
||||
issueReason: data.issueReason ?? null,
|
||||
},
|
||||
update: {
|
||||
bestCategoryId: Number(data.bestCategoryId) || 0,
|
||||
tags: Array.isArray(data.tags) ? data.tags : [],
|
||||
needsReview: Boolean(data.needsReview),
|
||||
hasIssue: Boolean(data.hasIssue),
|
||||
issueType: String(data.issueType || "NONE"),
|
||||
issueReason: data.issueReason ?? null,
|
||||
},
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
async function queueNotificationCreate({ userId, message, type = "INFO", createdAt }) {
|
||||
if (!userId || !message) return
|
||||
const redis = createRedisClient()
|
||||
|
||||
try {
|
||||
const field = `notification:${userId}:${Date.now()}`
|
||||
const payload = JSON.stringify({
|
||||
userId: Number(userId),
|
||||
|
|
@ -145,22 +334,54 @@ async function queueNotificationCreate({ userId, message, type = "INFO", created
|
|||
type: String(type || "INFO"),
|
||||
createdAt,
|
||||
})
|
||||
await redis.hset(NOTIFICATION_HASH_KEY, field, payload)
|
||||
} finally {}
|
||||
|
||||
await tryQueue({
|
||||
label: "notification-create",
|
||||
redisAction: () => redis.hset(NOTIFICATION_HASH_KEY, field, payload),
|
||||
fallbackAction: async () => {
|
||||
await prisma.$transaction(async (tx) => {
|
||||
await tx.notification.create({
|
||||
data: {
|
||||
userId: Number(userId),
|
||||
message: String(message),
|
||||
type: String(type || "INFO"),
|
||||
createdAt: createdAt ? new Date(createdAt) : new Date(),
|
||||
},
|
||||
})
|
||||
await tx.user.update({
|
||||
where: { id: Number(userId) },
|
||||
data: { notificationCount: { increment: 1 } },
|
||||
})
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
async function queueNotificationReadAll({ userId, readAt }) {
|
||||
if (!userId) return
|
||||
const redis = createRedisClient()
|
||||
|
||||
try {
|
||||
const field = `notificationRead:${userId}:${Date.now()}`
|
||||
const payload = JSON.stringify({
|
||||
userId: Number(userId),
|
||||
readAt,
|
||||
})
|
||||
await redis.hset(NOTIFICATION_READ_HASH_KEY, field, payload)
|
||||
} finally {}
|
||||
|
||||
await tryQueue({
|
||||
label: "notification-read",
|
||||
redisAction: () => redis.hset(NOTIFICATION_READ_HASH_KEY, field, payload),
|
||||
fallbackAction: async () => {
|
||||
const readAtDate = readAt ? new Date(readAt) : new Date()
|
||||
await prisma.notification.updateMany({
|
||||
where: {
|
||||
userId: Number(userId),
|
||||
readAt: null,
|
||||
createdAt: { lte: readAtDate },
|
||||
},
|
||||
data: { readAt: readAtDate },
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
async function queueDealSaveUpdate({ dealId, userId, action, createdAt }) {
|
||||
|
|
@ -169,7 +390,6 @@ async function queueDealSaveUpdate({ dealId, userId, action, createdAt }) {
|
|||
if (!["SAVE", "UNSAVE"].includes(normalized)) return
|
||||
const redis = createRedisClient()
|
||||
|
||||
try {
|
||||
const field = `dealSave:${dealId}:${userId}`
|
||||
const payload = JSON.stringify({
|
||||
dealId: Number(dealId),
|
||||
|
|
@ -177,14 +397,27 @@ async function queueDealSaveUpdate({ dealId, userId, action, createdAt }) {
|
|||
action: normalized,
|
||||
createdAt,
|
||||
})
|
||||
await redis.hset(DEAL_SAVE_HASH_KEY, field, payload)
|
||||
} finally {}
|
||||
|
||||
await tryQueue({
|
||||
label: "deal-save",
|
||||
redisAction: () => redis.hset(DEAL_SAVE_HASH_KEY, field, payload),
|
||||
fallbackAction: async () => {
|
||||
if (normalized === "SAVE") {
|
||||
await dealSaveDb.upsertDealSave({
|
||||
dealId: Number(dealId),
|
||||
userId: Number(userId),
|
||||
createdAt: createdAt ? new Date(createdAt) : new Date(),
|
||||
})
|
||||
return
|
||||
}
|
||||
await dealSaveDb.deleteDealSave({ dealId: Number(dealId), userId: Number(userId) })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
async function queueAuditEvent({ userId, action, ip, userAgent, meta = null, createdAt }) {
|
||||
if (!action) return
|
||||
const redis = createRedisClient()
|
||||
try {
|
||||
const field = `audit:${Date.now()}:${Math.random().toString(36).slice(2, 8)}`
|
||||
const payload = JSON.stringify({
|
||||
userId: userId ? Number(userId) : null,
|
||||
|
|
@ -194,28 +427,48 @@ async function queueAuditEvent({ userId, action, ip, userAgent, meta = null, cre
|
|||
meta,
|
||||
createdAt,
|
||||
})
|
||||
await redis.hset(AUDIT_HASH_KEY, field, payload)
|
||||
} finally {}
|
||||
|
||||
await tryQueue({
|
||||
label: "audit",
|
||||
redisAction: () => redis.hset(AUDIT_HASH_KEY, field, payload),
|
||||
fallbackAction: () =>
|
||||
prisma.auditEvent.create({
|
||||
data: {
|
||||
userId: userId ? Number(userId) : null,
|
||||
action: String(action),
|
||||
ip: ip ?? null,
|
||||
userAgent: userAgent ?? null,
|
||||
meta,
|
||||
createdAt: createdAt ? new Date(createdAt) : new Date(),
|
||||
},
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
async function queueUserUpdate({ userId, data, updatedAt }) {
|
||||
if (!userId || !data || typeof data !== "object") return
|
||||
const redis = createRedisClient()
|
||||
try {
|
||||
const field = `userUpdate:${userId}`
|
||||
const payload = JSON.stringify({
|
||||
userId: Number(userId),
|
||||
data,
|
||||
updatedAt,
|
||||
})
|
||||
await redis.hset(USER_UPDATE_HASH_KEY, field, payload)
|
||||
} finally {}
|
||||
|
||||
await tryQueue({
|
||||
label: "user-update",
|
||||
redisAction: () => redis.hset(USER_UPDATE_HASH_KEY, field, payload),
|
||||
fallbackAction: () =>
|
||||
prisma.user.update({
|
||||
where: { id: Number(userId) },
|
||||
data: { ...data, updatedAt: updatedAt ? new Date(updatedAt) : new Date() },
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
async function queueUserNoteCreate({ userId, createdById, note, createdAt }) {
|
||||
if (!userId || !createdById || !note) return
|
||||
const redis = createRedisClient()
|
||||
try {
|
||||
const field = `userNote:${userId}:${Date.now()}`
|
||||
const payload = JSON.stringify({
|
||||
userId: Number(userId),
|
||||
|
|
@ -223,64 +476,112 @@ async function queueUserNoteCreate({ userId, createdById, note, createdAt }) {
|
|||
note: String(note),
|
||||
createdAt,
|
||||
})
|
||||
await redis.hset(USER_NOTE_HASH_KEY, field, payload)
|
||||
} finally {}
|
||||
|
||||
await tryQueue({
|
||||
label: "user-note",
|
||||
redisAction: () => redis.hset(USER_NOTE_HASH_KEY, field, payload),
|
||||
fallbackAction: () =>
|
||||
prisma.userNote.create({
|
||||
data: {
|
||||
userId: Number(userId),
|
||||
createdById: Number(createdById),
|
||||
note: String(note),
|
||||
createdAt: createdAt ? new Date(createdAt) : new Date(),
|
||||
},
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
async function queueDealReportStatusUpdate({ reportId, status, updatedAt }) {
|
||||
if (!reportId || !status) return
|
||||
const redis = createRedisClient()
|
||||
try {
|
||||
const field = `dealReport:${reportId}`
|
||||
const payload = JSON.stringify({
|
||||
reportId: Number(reportId),
|
||||
status: String(status),
|
||||
updatedAt,
|
||||
})
|
||||
await redis.hset(DEAL_REPORT_UPDATE_HASH_KEY, field, payload)
|
||||
} finally {}
|
||||
|
||||
await tryQueue({
|
||||
label: "deal-report",
|
||||
redisAction: () => redis.hset(DEAL_REPORT_UPDATE_HASH_KEY, field, payload),
|
||||
fallbackAction: () =>
|
||||
prisma.dealReport.update({
|
||||
where: { id: Number(reportId) },
|
||||
data: { status: String(status), updatedAt: updatedAt ? new Date(updatedAt) : new Date() },
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
async function queueCategoryUpsert({ categoryId, data, updatedAt }) {
|
||||
if (!categoryId || !data || typeof data !== "object") return
|
||||
const redis = createRedisClient()
|
||||
try {
|
||||
const field = `category:${categoryId}`
|
||||
const payload = JSON.stringify({
|
||||
categoryId: Number(categoryId),
|
||||
data,
|
||||
updatedAt,
|
||||
})
|
||||
await redis.hset(CATEGORY_UPSERT_HASH_KEY, field, payload)
|
||||
} finally {}
|
||||
|
||||
await tryQueue({
|
||||
label: "category-upsert",
|
||||
redisAction: () => redis.hset(CATEGORY_UPSERT_HASH_KEY, field, payload),
|
||||
fallbackAction: () =>
|
||||
prisma.category.upsert({
|
||||
where: { id: Number(categoryId) },
|
||||
create: { id: Number(categoryId), ...data },
|
||||
update: data,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
async function queueSellerUpsert({ sellerId, data, updatedAt }) {
|
||||
if (!sellerId || !data || typeof data !== "object") return
|
||||
const redis = createRedisClient()
|
||||
try {
|
||||
const field = `seller:${sellerId}`
|
||||
const payload = JSON.stringify({
|
||||
sellerId: Number(sellerId),
|
||||
data,
|
||||
updatedAt,
|
||||
})
|
||||
await redis.hset(SELLER_UPSERT_HASH_KEY, field, payload)
|
||||
} finally {}
|
||||
|
||||
await tryQueue({
|
||||
label: "seller-upsert",
|
||||
redisAction: () => redis.hset(SELLER_UPSERT_HASH_KEY, field, payload),
|
||||
fallbackAction: () =>
|
||||
prisma.seller.upsert({
|
||||
where: { id: Number(sellerId) },
|
||||
create: { id: Number(sellerId), ...data },
|
||||
update: data,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
async function queueSellerDomainUpsert({ sellerId, domain, createdById }) {
|
||||
if (!sellerId || !domain || !createdById) return
|
||||
const redis = createRedisClient()
|
||||
try {
|
||||
const field = `sellerDomain:${sellerId}:${String(domain).toLowerCase()}`
|
||||
const normalizedDomain = String(domain).toLowerCase()
|
||||
const field = `sellerDomain:${sellerId}:${normalizedDomain}`
|
||||
const payload = JSON.stringify({
|
||||
sellerId: Number(sellerId),
|
||||
domain: String(domain).toLowerCase(),
|
||||
domain: normalizedDomain,
|
||||
createdById: Number(createdById),
|
||||
})
|
||||
await redis.hset(SELLER_DOMAIN_UPSERT_HASH_KEY, field, payload)
|
||||
} finally {}
|
||||
|
||||
await tryQueue({
|
||||
label: "seller-domain-upsert",
|
||||
redisAction: () => redis.hset(SELLER_DOMAIN_UPSERT_HASH_KEY, field, payload),
|
||||
fallbackAction: () =>
|
||||
prisma.sellerDomain.upsert({
|
||||
where: { domain: normalizedDomain },
|
||||
create: {
|
||||
domain: normalizedDomain,
|
||||
sellerId: Number(sellerId),
|
||||
createdById: Number(createdById),
|
||||
},
|
||||
update: { sellerId: Number(sellerId) },
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
|
@ -318,4 +619,5 @@ module.exports = {
|
|||
CATEGORY_UPSERT_HASH_KEY,
|
||||
SELLER_UPSERT_HASH_KEY,
|
||||
SELLER_DOMAIN_UPSERT_HASH_KEY,
|
||||
DEAL_ANALYTICS_TOTAL_HASH_KEY,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,9 +2,8 @@ const { randomUUID } = require("crypto")
|
|||
const { getRedisClient } = require("./client")
|
||||
const dealAnalyticsDb = require("../../db/dealAnalytics.db")
|
||||
const { ensureMinDealTtl } = require("./dealCache.service")
|
||||
|
||||
const DEAL_EVENT_HASH_KEY = "dbsync:dealEvents"
|
||||
const DEAL_ANALYTICS_TOTAL_PREFIX = "data:deals:analytics:total:"
|
||||
const { DEAL_ANALYTICS_TOTAL_HASH_KEY } = require("./dbSync.service")
|
||||
const DEAL_ANALYTICS_TOTAL_PREFIX = "deals:analytics:total:"
|
||||
|
||||
function createRedisClient() {
|
||||
return getRedisClient()
|
||||
|
|
@ -29,6 +28,21 @@ function isValidEventType(type) {
|
|||
return ["IMPRESSION", "VIEW", "CLICK"].includes(normalized)
|
||||
}
|
||||
|
||||
function aggregateEventIncrements(events = []) {
|
||||
const byDeal = new Map()
|
||||
for (const event of events) {
|
||||
const dealId = Number(event.dealId)
|
||||
if (!Number.isInteger(dealId) || dealId <= 0) continue
|
||||
const type = String(event.type || "").toUpperCase()
|
||||
const entry = byDeal.get(dealId) || { dealId, impressions: 0, views: 0, clicks: 0 }
|
||||
if (type === "IMPRESSION") entry.impressions += 1
|
||||
else if (type === "VIEW") entry.views += 1
|
||||
else if (type === "CLICK") entry.clicks += 1
|
||||
byDeal.set(dealId, entry)
|
||||
}
|
||||
return Array.from(byDeal.values())
|
||||
}
|
||||
|
||||
async function seedDealAnalyticsTotals({ dealIds = [] } = {}) {
|
||||
const ids = normalizeIds(dealIds)
|
||||
if (!ids.length) return 0
|
||||
|
|
@ -75,22 +89,26 @@ async function queueDealEvents(events = []) {
|
|||
)
|
||||
if (!valid.length) return 0
|
||||
|
||||
const increments = aggregateEventIncrements(valid)
|
||||
if (!increments.length) return 0
|
||||
|
||||
const redis = createRedisClient()
|
||||
try {
|
||||
await incrementDealAnalyticsTotalsInRedis(increments)
|
||||
const pipeline = redis.pipeline()
|
||||
valid.forEach((event) => {
|
||||
const field = `dealEvent:${randomUUID()}`
|
||||
const payload = JSON.stringify({
|
||||
dealId: Number(event.dealId),
|
||||
type: String(event.type).toUpperCase(),
|
||||
userId: event.userId ? Number(event.userId) : null,
|
||||
ip: event.ip ? String(event.ip) : null,
|
||||
createdAt: event.createdAt || new Date().toISOString(),
|
||||
})
|
||||
pipeline.hset(DEAL_EVENT_HASH_KEY, field, payload)
|
||||
increments.forEach((entry) => {
|
||||
const field = `dealTotals:${entry.dealId}:${randomUUID()}`
|
||||
pipeline.hset(DEAL_ANALYTICS_TOTAL_HASH_KEY, field, JSON.stringify(entry))
|
||||
})
|
||||
await pipeline.exec()
|
||||
return valid.length
|
||||
} catch {
|
||||
try {
|
||||
await dealAnalyticsDb.applyDealTotalsBatch(increments)
|
||||
return valid.length
|
||||
} catch {
|
||||
return 0
|
||||
}
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -154,6 +172,8 @@ async function incrementDealAnalyticsTotalsInRedis(increments = []) {
|
|||
})
|
||||
await pipeline.exec()
|
||||
return data.length
|
||||
} catch {
|
||||
return 0
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -164,5 +184,5 @@ module.exports = {
|
|||
queueDealView,
|
||||
queueDealClick,
|
||||
incrementDealAnalyticsTotalsInRedis,
|
||||
DEAL_EVENT_HASH_KEY,
|
||||
DEAL_ANALYTICS_TOTAL_HASH_KEY,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,11 +10,10 @@ const {
|
|||
ensureUserMinTtl,
|
||||
} = require("./userPublicCache.service")
|
||||
|
||||
const DEAL_KEY_PREFIX = "data:deals:"
|
||||
const DEAL_VOTE_HASH_PREFIX = "data:deals:votes:"
|
||||
const DEAL_ANALYTICS_TOTAL_PREFIX = "data:deals:analytics:total:"
|
||||
const COMMENT_LOOKUP_KEY = "data:comments:lookup"
|
||||
const COMMENT_IDS_KEY = "data:comments:ids"
|
||||
const DEAL_KEY_PREFIX = "deals:cache:"
|
||||
const DEAL_ANALYTICS_TOTAL_PREFIX = "deals:analytics:total:"
|
||||
const COMMENT_LOOKUP_KEY = "comments:lookup"
|
||||
const COMMENT_IDS_KEY = "comments:ids"
|
||||
|
||||
function createRedisClient() {
|
||||
return getRedisClient()
|
||||
|
|
@ -46,19 +45,10 @@ async function getAnalyticsTotalsForDeal(dealId) {
|
|||
}
|
||||
|
||||
async function cacheVotesAndAnalytics(redis, dealId, payload, { ttlSeconds, skipDbEnsure } = {}) {
|
||||
const voteKey = `${DEAL_VOTE_HASH_PREFIX}${dealId}`
|
||||
const analyticsKey = `${DEAL_ANALYTICS_TOTAL_PREFIX}${dealId}`
|
||||
|
||||
const pipeline = redis.pipeline()
|
||||
|
||||
pipeline.del(voteKey)
|
||||
if (Array.isArray(payload?.votes) && payload.votes.length) {
|
||||
payload.votes.forEach((vote) => {
|
||||
if (!vote?.userId) return
|
||||
pipeline.hset(voteKey, String(vote.userId), String(vote.voteType ?? 0))
|
||||
})
|
||||
}
|
||||
|
||||
const totals = skipDbEnsure
|
||||
? { impressions: 0, views: 0, clicks: 0 }
|
||||
: await getAnalyticsTotalsForDeal(dealId)
|
||||
|
|
@ -73,13 +63,14 @@ async function cacheVotesAndAnalytics(redis, dealId, payload, { ttlSeconds, skip
|
|||
)
|
||||
|
||||
if (ttlSeconds) {
|
||||
if (Array.isArray(payload?.votes) && payload.votes.length) {
|
||||
pipeline.expire(voteKey, Number(ttlSeconds))
|
||||
}
|
||||
pipeline.expire(analyticsKey, Number(ttlSeconds))
|
||||
}
|
||||
|
||||
try {
|
||||
await pipeline.exec()
|
||||
} catch {
|
||||
// ignore cache failures
|
||||
}
|
||||
}
|
||||
|
||||
async function ensureMinDealTtl(dealId, { minSeconds = 15 * 60 } = {}) {
|
||||
|
|
@ -87,7 +78,6 @@ async function ensureMinDealTtl(dealId, { minSeconds = 15 * 60 } = {}) {
|
|||
if (!Number.isInteger(id) || id <= 0) return { bumped: false }
|
||||
const redis = createRedisClient()
|
||||
const key = `${DEAL_KEY_PREFIX}${id}`
|
||||
const voteKey = `${DEAL_VOTE_HASH_PREFIX}${id}`
|
||||
const analyticsKey = `${DEAL_ANALYTICS_TOTAL_PREFIX}${id}`
|
||||
const minTtl = Math.max(1, Number(minSeconds) || 15 * 60)
|
||||
|
||||
|
|
@ -98,12 +88,13 @@ async function ensureMinDealTtl(dealId, { minSeconds = 15 * 60 } = {}) {
|
|||
const nextTtl = minTtl
|
||||
const pipeline = redis.pipeline()
|
||||
pipeline.expire(key, nextTtl)
|
||||
pipeline.expire(voteKey, nextTtl)
|
||||
pipeline.expire(analyticsKey, nextTtl)
|
||||
await pipeline.exec()
|
||||
return { bumped: true, ttl: nextTtl }
|
||||
}
|
||||
return { bumped: false, ttl }
|
||||
} catch {
|
||||
return { bumped: false }
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -119,27 +110,10 @@ async function updateDealSavesInRedis({ dealId, userId, action, createdAt, minSe
|
|||
const redis = createRedisClient()
|
||||
const key = `${DEAL_KEY_PREFIX}${id}`
|
||||
try {
|
||||
const raw = await redis.call("JSON.GET", key, "$.savedBy")
|
||||
let savedBy = []
|
||||
if (raw) {
|
||||
const parsed = JSON.parse(raw)
|
||||
const arr = Array.isArray(parsed) ? parsed[0] : []
|
||||
savedBy = Array.isArray(arr) ? arr : []
|
||||
}
|
||||
|
||||
const exists = savedBy.some((s) => Number(s?.userId) === uid)
|
||||
if (normalized === "SAVE" && !exists) {
|
||||
savedBy = [
|
||||
{ userId: uid, createdAt: createdAt ? toIso(createdAt) : new Date().toISOString() },
|
||||
...savedBy,
|
||||
]
|
||||
} else if (normalized === "UNSAVE" && exists) {
|
||||
savedBy = savedBy.filter((s) => Number(s?.userId) !== uid)
|
||||
}
|
||||
|
||||
await redis.call("JSON.SET", key, "$.savedBy", JSON.stringify(savedBy))
|
||||
await ensureMinDealTtl(id, { minSeconds })
|
||||
return { updated: true }
|
||||
} catch {
|
||||
return { updated: false }
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -184,6 +158,8 @@ async function getDealFromRedis(dealId) {
|
|||
}
|
||||
}
|
||||
return deal
|
||||
} catch {
|
||||
return null
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -208,8 +184,6 @@ async function cacheDealFromDb(dealId, { ttlSeconds = 1800 } = {}) {
|
|||
},
|
||||
images: { orderBy: { order: "asc" }, select: { id: true, imageUrl: true, order: true } },
|
||||
dealTags: { include: { tag: { select: { id: true, slug: true, name: true } } } },
|
||||
votes: { select: { userId: true, voteType: true } },
|
||||
savedBy: { select: { userId: true, createdAt: true } },
|
||||
comments: {
|
||||
orderBy: { createdAt: "desc" },
|
||||
include: {
|
||||
|
|
@ -252,6 +226,8 @@ async function cacheDealFromDb(dealId, { ttlSeconds = 1800 } = {}) {
|
|||
}
|
||||
await pipeline.exec()
|
||||
await cacheVotesAndAnalytics(redis, deal.id, payload, { ttlSeconds })
|
||||
} catch {
|
||||
// ignore cache failures
|
||||
} finally {}
|
||||
if (deal.user) {
|
||||
await ensureUserMinTtl(deal.user.id, { minSeconds: ttlSeconds })
|
||||
|
|
@ -267,6 +243,8 @@ async function getDealIdByCommentId(commentId) {
|
|||
await recordCacheMiss({ key: `${COMMENT_LOOKUP_KEY}:${commentId}`, label: "comment-lookup" })
|
||||
}
|
||||
return raw ? Number(raw) : null
|
||||
} catch {
|
||||
return null
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -307,6 +285,8 @@ async function updateDealInRedis(dealId, patch = {}, { updatedAt = new Date() }
|
|||
|
||||
const raw = await redis.call("JSON.GET", key)
|
||||
return raw ? JSON.parse(raw) : null
|
||||
} catch {
|
||||
return null
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -336,6 +316,8 @@ async function setDealInRedis(
|
|||
skipDbEnsure: skipAnalyticsInit,
|
||||
})
|
||||
return payload
|
||||
} catch {
|
||||
return payload
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
const prisma = require("../../db/client")
|
||||
const { ensureCounterAtLeast, nextId } = require("./idGenerator.service")
|
||||
const DEAL_ID_KEY = "ids:deal"
|
||||
const DEAL_ID_KEY = "data:ids:deal"
|
||||
|
||||
async function ensureDealIdCounter() {
|
||||
const latest = await prisma.deal.findFirst({
|
||||
|
|
@ -12,7 +12,15 @@ async function ensureDealIdCounter() {
|
|||
}
|
||||
|
||||
async function generateDealId() {
|
||||
return nextId(DEAL_ID_KEY)
|
||||
try {
|
||||
return await nextId(DEAL_ID_KEY)
|
||||
} catch {
|
||||
const latest = await prisma.deal.findFirst({
|
||||
select: { id: true },
|
||||
orderBy: { id: "desc" },
|
||||
})
|
||||
return (latest?.id ?? 0) + 1
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { ensureDealIdCounter, generateDealId }
|
||||
|
|
|
|||
|
|
@ -7,13 +7,12 @@ const { setUsersPublicInRedis } = require("./userPublicCache.service")
|
|||
const { setBadgesInRedis } = require("./badgeCache.service")
|
||||
const badgeDb = require("../../db/badge.db")
|
||||
|
||||
const DEAL_KEY_PREFIX = "data:deals:"
|
||||
const DEAL_VOTE_HASH_PREFIX = "data:deals:votes:"
|
||||
const DEAL_ANALYTICS_TOTAL_PREFIX = "data:deals:analytics:total:"
|
||||
const COMMENT_LOOKUP_KEY = "data:comments:lookup"
|
||||
const COMMENT_IDS_KEY = "data:comments:ids"
|
||||
const DEAL_KEY_PREFIX = "deals:cache:"
|
||||
const DEAL_ANALYTICS_TOTAL_PREFIX = "deals:analytics:total:"
|
||||
const COMMENT_LOOKUP_KEY = "comments:lookup"
|
||||
const COMMENT_IDS_KEY = "comments:ids"
|
||||
const SELLERS_KEY = "data:sellers"
|
||||
const SELLER_DOMAINS_KEY = "data:sellerdomains"
|
||||
const SELLER_DOMAINS_KEY = "data:seller:domains"
|
||||
const CATEGORIES_KEY = "data:categories"
|
||||
|
||||
function createRedisClient() {
|
||||
|
|
@ -43,14 +42,6 @@ function mapDealToRedisJson(deal) {
|
|||
}))
|
||||
: []
|
||||
|
||||
const votes =
|
||||
Array.isArray(deal.votes) && deal.votes.length
|
||||
? deal.votes.map((vote) => ({
|
||||
userId: vote.userId,
|
||||
voteType: vote.voteType,
|
||||
}))
|
||||
: []
|
||||
|
||||
const commentsRaw = Array.isArray(deal.comments) ? deal.comments : []
|
||||
const repliesCountByParent = new Map()
|
||||
commentsRaw.forEach((comment) => {
|
||||
|
|
@ -86,14 +77,6 @@ function mapDealToRedisJson(deal) {
|
|||
}))
|
||||
: []
|
||||
|
||||
const savedBy =
|
||||
Array.isArray(deal.savedBy) && deal.savedBy.length
|
||||
? deal.savedBy.map((save) => ({
|
||||
userId: save.userId,
|
||||
createdAt: toIso(save.createdAt),
|
||||
}))
|
||||
: []
|
||||
|
||||
return {
|
||||
id: deal.id,
|
||||
title: deal.title,
|
||||
|
|
@ -108,6 +91,7 @@ function mapDealToRedisJson(deal) {
|
|||
location: deal.location ?? null,
|
||||
discountType: deal.discountType ?? null,
|
||||
discountValue: deal.discountValue ?? null,
|
||||
barcodeId: deal.barcodeId ?? null,
|
||||
maxNotifiedMilestone: Number.isFinite(deal.maxNotifiedMilestone)
|
||||
? deal.maxNotifiedMilestone
|
||||
: 0,
|
||||
|
|
@ -132,8 +116,6 @@ function mapDealToRedisJson(deal) {
|
|||
}))
|
||||
: [],
|
||||
tags,
|
||||
votes,
|
||||
savedBy,
|
||||
comments,
|
||||
aiReview: deal.aiReview
|
||||
? {
|
||||
|
|
@ -175,8 +157,6 @@ async function seedRecentDealsToRedis({ days = 30, ttlDays = 31, batchSize = 200
|
|||
},
|
||||
images: { orderBy: { order: "asc" }, select: { id: true, imageUrl: true, order: true } },
|
||||
dealTags: { include: { tag: { select: { id: true, slug: true, name: true } } } },
|
||||
votes: { select: { userId: true, voteType: true } },
|
||||
savedBy: { select: { userId: true, createdAt: true } },
|
||||
comments: {
|
||||
orderBy: { createdAt: "desc" },
|
||||
include: {
|
||||
|
|
@ -262,17 +242,6 @@ async function seedRecentDealsToRedis({ days = 30, ttlDays = 31, batchSize = 200
|
|||
cmdIndex += 1
|
||||
})
|
||||
}
|
||||
if (Array.isArray(deal.votes) && deal.votes.length) {
|
||||
deal.votes.forEach((vote) => {
|
||||
if (!vote?.userId) return
|
||||
pipeline.hset(
|
||||
`${DEAL_VOTE_HASH_PREFIX}${deal.id}`,
|
||||
String(vote.userId),
|
||||
String(vote.voteType ?? 0)
|
||||
)
|
||||
cmdIndex += 1
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Redis seed skip deal:", deal?.id, err?.message || err)
|
||||
}
|
||||
|
|
@ -288,16 +257,11 @@ async function seedRecentDealsToRedis({ days = 30, ttlDays = 31, batchSize = 200
|
|||
const ttlSeconds = Math.ceil(ttlMs / 1000)
|
||||
|
||||
const dealKey = `${DEAL_KEY_PREFIX}${deal.id}`
|
||||
const voteKey = `${DEAL_VOTE_HASH_PREFIX}${deal.id}`
|
||||
const analyticsKey = `${DEAL_ANALYTICS_TOTAL_PREFIX}${deal.id}`
|
||||
const dealTtl = await redis.ttl(dealKey)
|
||||
if (dealTtl === -1) {
|
||||
await redis.expire(dealKey, ttlSeconds)
|
||||
}
|
||||
const voteTtl = await redis.ttl(voteKey)
|
||||
if (voteTtl === -1) {
|
||||
await redis.expire(voteKey, ttlSeconds)
|
||||
}
|
||||
const analyticsTtl = await redis.ttl(analyticsKey)
|
||||
if (analyticsTtl === -1) {
|
||||
await redis.expire(analyticsKey, ttlSeconds)
|
||||
|
|
|
|||
|
|
@ -176,7 +176,7 @@ async function aggregatePriceRange(query) {
|
|||
try {
|
||||
const results = await redis.call(
|
||||
"FT.AGGREGATE",
|
||||
"idx:data:deals",
|
||||
"idx:deals",
|
||||
query || "*",
|
||||
"GROUPBY",
|
||||
"0",
|
||||
|
|
@ -212,6 +212,8 @@ async function aggregatePriceRange(query) {
|
|||
minPrice: Number.isFinite(min) ? min : null,
|
||||
maxPrice: Number.isFinite(max) ? max : null,
|
||||
}
|
||||
} catch {
|
||||
return { minPrice: null, maxPrice: null }
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -233,7 +235,7 @@ async function searchDeals({
|
|||
const range = includeMinMax ? await aggregatePriceRange(query) : { minPrice: null, maxPrice: null }
|
||||
const results = await redis.call(
|
||||
"FT.SEARCH",
|
||||
"idx:data:deals",
|
||||
"idx:deals",
|
||||
query || "*",
|
||||
"SORTBY",
|
||||
sort.field,
|
||||
|
|
@ -264,6 +266,8 @@ async function searchDeals({
|
|||
minPrice: range.minPrice,
|
||||
maxPrice: range.maxPrice,
|
||||
}
|
||||
} catch {
|
||||
return null
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,19 +1,20 @@
|
|||
const { getRedisClient } = require("./client")
|
||||
const dealDb = require("../../db/deal.db")
|
||||
const { ensureMinDealTtl } = require("./dealCache.service")
|
||||
|
||||
function createRedisClient() {
|
||||
return getRedisClient()
|
||||
}
|
||||
|
||||
const DEAL_VOTE_HASH_PREFIX = "data:deals:votes:"
|
||||
const USER_VOTE_HASH_PREFIX = "users:votes:"
|
||||
const USER_VOTE_TTL_SECONDS = 6 * 60 * 60
|
||||
|
||||
async function updateDealVoteInRedis({ dealId, userId, voteType, score }) {
|
||||
if (!dealId || !userId) return
|
||||
const redis = createRedisClient()
|
||||
|
||||
try {
|
||||
const key = `data:deals:${dealId}`
|
||||
const voteKey = `${DEAL_VOTE_HASH_PREFIX}${dealId}`
|
||||
const key = `deals:cache:${dealId}`
|
||||
const raw = await redis.call("JSON.GET", key)
|
||||
if (!raw) return { updated: false, delta: 0, score: null }
|
||||
|
||||
|
|
@ -23,33 +24,28 @@ async function updateDealVoteInRedis({ dealId, userId, voteType, score }) {
|
|||
? Number(deal.maxNotifiedMilestone)
|
||||
: 0
|
||||
const dealUserId = Number(deal?.userId)
|
||||
const rawVotes = deal?.votes ?? []
|
||||
|
||||
let votes = []
|
||||
votes = Array.isArray(rawVotes) ? rawVotes : []
|
||||
const normalizedUserId = Number(userId)
|
||||
const normalizedVoteType = Number(voteType)
|
||||
const idx = votes.findIndex((vote) => Number(vote.userId) === normalizedUserId)
|
||||
const oldVote = idx >= 0 ? Number(votes[idx]?.voteType ?? 0) : 0
|
||||
if (idx >= 0) {
|
||||
votes[idx] = { userId: normalizedUserId, voteType: normalizedVoteType }
|
||||
} else {
|
||||
votes.push({ userId: normalizedUserId, voteType: normalizedVoteType })
|
||||
}
|
||||
|
||||
await redis.call("JSON.SET", key, "$.votes", JSON.stringify(votes))
|
||||
const oldRaw = await redis.hget(
|
||||
`${USER_VOTE_HASH_PREFIX}${normalizedUserId}`,
|
||||
String(dealId)
|
||||
)
|
||||
const oldVote = oldRaw == null ? 0 : Number(oldRaw)
|
||||
const delta = normalizedVoteType - oldVote
|
||||
const nextScore =
|
||||
score !== undefined && score !== null ? Number(score) : currentScore + delta
|
||||
await redis.call("JSON.SET", key, "$.score", nextScore)
|
||||
await redis.hset(voteKey, String(normalizedUserId), String(normalizedVoteType))
|
||||
const dealTtl = await redis.ttl(key)
|
||||
if (Number.isFinite(dealTtl) && dealTtl > 0) {
|
||||
await redis.expire(voteKey, dealTtl)
|
||||
if (normalizedVoteType === 0) {
|
||||
await redis.hdel(`${USER_VOTE_HASH_PREFIX}${normalizedUserId}`, String(dealId))
|
||||
} else {
|
||||
await redis.hset(`${USER_VOTE_HASH_PREFIX}${normalizedUserId}`, String(dealId), String(normalizedVoteType))
|
||||
await redis.expire(`${USER_VOTE_HASH_PREFIX}${normalizedUserId}`, USER_VOTE_TTL_SECONDS)
|
||||
}
|
||||
await ensureMinDealTtl(dealId, { minSeconds: 15 * 60 })
|
||||
|
||||
return { updated: true, delta, score: nextScore, maxNotifiedMilestone, dealUserId }
|
||||
} catch {
|
||||
return { updated: false, delta: 0, score: null }
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -59,10 +55,36 @@ async function getDealVoteFromRedis(dealId, userId) {
|
|||
if (!Number.isInteger(id) || !Number.isInteger(uid)) return 0
|
||||
const redis = createRedisClient()
|
||||
try {
|
||||
const voteKey = `${DEAL_VOTE_HASH_PREFIX}${id}`
|
||||
const raw = await redis.hget(voteKey, String(uid))
|
||||
const value = raw == null ? 0 : Number(raw)
|
||||
const userKey = `${USER_VOTE_HASH_PREFIX}${uid}`
|
||||
const exists = await redis.exists(userKey)
|
||||
if (!exists) {
|
||||
const dbVotes = await dealDb.findVotes(
|
||||
{ userId: Number(uid) },
|
||||
{ select: { dealId: true, voteType: true } }
|
||||
)
|
||||
const pipeline = redis.pipeline()
|
||||
dbVotes.forEach((vote) => {
|
||||
const did = Number(vote.dealId)
|
||||
const v = Number(vote.voteType)
|
||||
if (!Number.isInteger(did)) return
|
||||
if (Number.isFinite(v) && v !== 0) {
|
||||
pipeline.hset(userKey, String(did), String(v))
|
||||
}
|
||||
})
|
||||
pipeline.expire(userKey, USER_VOTE_TTL_SECONDS)
|
||||
await pipeline.exec()
|
||||
const fromDb = dbVotes.find((v) => Number(v.dealId) === id)
|
||||
const dbVote = Number(fromDb?.voteType ?? 0)
|
||||
return Number.isFinite(dbVote) ? dbVote : 0
|
||||
}
|
||||
const raw = await redis.hget(userKey, String(id))
|
||||
if (raw != null) {
|
||||
const value = Number(raw)
|
||||
return Number.isFinite(value) ? value : 0
|
||||
}
|
||||
return 0
|
||||
} catch {
|
||||
return 0
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -76,17 +98,39 @@ async function getMyVotesForDeals(dealIds = [], userId) {
|
|||
|
||||
const redis = createRedisClient()
|
||||
try {
|
||||
const pipeline = redis.pipeline()
|
||||
ids.forEach((id) => {
|
||||
pipeline.hget(`${DEAL_VOTE_HASH_PREFIX}${id}`, String(uid))
|
||||
})
|
||||
const results = await pipeline.exec()
|
||||
const userKey = `${USER_VOTE_HASH_PREFIX}${uid}`
|
||||
const exists = await redis.exists(userKey)
|
||||
if (exists) {
|
||||
const results = await redis.hmget(userKey, ids.map(String))
|
||||
const map = new Map()
|
||||
results.forEach(([, raw], idx) => {
|
||||
results.forEach((raw, idx) => {
|
||||
const value = raw == null ? 0 : Number(raw)
|
||||
map.set(ids[idx], Number.isFinite(value) ? value : 0)
|
||||
})
|
||||
return map
|
||||
}
|
||||
|
||||
const dbVotes = await dealDb.findVotes(
|
||||
{ dealId: { in: ids }, userId: Number(uid) },
|
||||
{ select: { dealId: true, voteType: true } }
|
||||
)
|
||||
const map = new Map()
|
||||
ids.forEach((id) => map.set(id, 0))
|
||||
const pipeline = redis.pipeline()
|
||||
dbVotes.forEach((vote) => {
|
||||
const did = Number(vote.dealId)
|
||||
const v = Number(vote.voteType)
|
||||
if (!Number.isInteger(did)) return
|
||||
if (Number.isFinite(v) && v !== 0) {
|
||||
map.set(did, v)
|
||||
pipeline.hset(userKey, String(did), String(v))
|
||||
}
|
||||
})
|
||||
pipeline.expire(userKey, USER_VOTE_TTL_SECONDS)
|
||||
await pipeline.exec()
|
||||
return map
|
||||
} catch {
|
||||
return new Map()
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -8,9 +8,27 @@ function createRedisClient() {
|
|||
return getRedisClient()
|
||||
}
|
||||
|
||||
const USER_SAVED_HASH_PREFIX = "users:savedmap:"
|
||||
|
||||
async function getUserSavedMap(redis, dealIds = [], viewerId = null) {
|
||||
const uid = Number(viewerId)
|
||||
if (!Number.isInteger(uid) || !dealIds.length) return new Map()
|
||||
try {
|
||||
const results = await redis.hmget(`${USER_SAVED_HASH_PREFIX}${uid}`, dealIds.map(String))
|
||||
const map = new Map()
|
||||
results.forEach((raw, idx) => {
|
||||
const value = raw == null ? 0 : Number(raw)
|
||||
if (value) map.set(Number(dealIds[idx]), true)
|
||||
})
|
||||
return map
|
||||
} catch {
|
||||
return new Map()
|
||||
}
|
||||
}
|
||||
|
||||
async function getHotDealListId(redis, hotListId) {
|
||||
if (hotListId) return String(hotListId)
|
||||
const latest = await redis.get("lists:hot:latest")
|
||||
const latest = await redis.get("deals:lists:hot:latest")
|
||||
return latest ? String(latest) : null
|
||||
}
|
||||
|
||||
|
|
@ -27,7 +45,7 @@ async function getHotDealIds({ hotListId } = {}) {
|
|||
const listId = await getHotDealListId(redis, hotListId)
|
||||
if (!listId) return { hotListId: null, dealIds: [] }
|
||||
|
||||
const key = `lists:hot:${listId}`
|
||||
const key = `deals:lists:hot:${listId}`
|
||||
const raw = await redis.call("JSON.GET", key, "$.dealIds")
|
||||
if (!raw) return { hotListId: listId, dealIds: [] }
|
||||
|
||||
|
|
@ -38,6 +56,8 @@ async function getHotDealIds({ hotListId } = {}) {
|
|||
hotListId: listId,
|
||||
dealIds: Array.isArray(dealIds) ? dealIds.map((id) => Number(id)) : [],
|
||||
}
|
||||
} catch {
|
||||
return { hotListId: null, dealIds: [] }
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -48,7 +68,7 @@ async function getDealsByIdsFromRedis(ids = [], viewerId = null) {
|
|||
try {
|
||||
const pipeline = redis.pipeline()
|
||||
ids.forEach((id) => {
|
||||
pipeline.call("JSON.GET", `data:deals:${id}`)
|
||||
pipeline.call("JSON.GET", `deals:cache:${id}`)
|
||||
})
|
||||
|
||||
const results = await pipeline.exec()
|
||||
|
|
@ -76,6 +96,7 @@ async function getDealsByIdsFromRedis(ids = [], viewerId = null) {
|
|||
.filter((id) => Number.isInteger(id) && id > 0)
|
||||
const sellerMap = sellerIds.length ? await getSellersByIds(sellerIds) : new Map()
|
||||
const voteMap = viewerId ? await getMyVotesForDeals(ordered.map((d) => d.id), viewerId) : new Map()
|
||||
const savedMap = viewerId ? await getUserSavedMap(redis, ordered.map((d) => d.id), viewerId) : new Map()
|
||||
|
||||
const userIds = ordered
|
||||
.map((deal) => Number(deal?.userId))
|
||||
|
|
@ -89,7 +110,7 @@ async function getDealsByIdsFromRedis(ids = [], viewerId = null) {
|
|||
const missingSet = new Set(missingUserIds)
|
||||
const ttlPipeline = redis.pipeline()
|
||||
ordered.forEach((deal) => {
|
||||
ttlPipeline.ttl(`data:deals:${deal.id}`)
|
||||
ttlPipeline.ttl(`deals:cache:${deal.id}`)
|
||||
})
|
||||
const ttlResults = await ttlPipeline.exec()
|
||||
const ttlByDealId = new Map()
|
||||
|
|
@ -139,21 +160,20 @@ async function getDealsByIdsFromRedis(ids = [], viewerId = null) {
|
|||
if (seller) next = { ...next, seller }
|
||||
}
|
||||
const myVote = viewerId ? Number(voteMap.get(Number(next.id)) ?? 0) : 0
|
||||
const isSaved = viewerId
|
||||
? Array.isArray(next.savedBy) &&
|
||||
next.savedBy.some((s) => Number(s?.userId) === Number(viewerId))
|
||||
: false
|
||||
const isSaved = viewerId ? savedMap.get(Number(next.id)) === true : false
|
||||
return { ...next, myVote, isSaved }
|
||||
})
|
||||
|
||||
return enriched
|
||||
} catch {
|
||||
return []
|
||||
} finally {}
|
||||
}
|
||||
|
||||
async function getDealByIdFromRedis(id, viewerId = null) {
|
||||
const redis = createRedisClient()
|
||||
try {
|
||||
const raw = await redis.call("JSON.GET", `data:deals:${id}`)
|
||||
const raw = await redis.call("JSON.GET", `deals:cache:${id}`)
|
||||
if (!raw) return null
|
||||
let deal = JSON.parse(raw)
|
||||
if (deal?.sellerId && !deal?.seller) {
|
||||
|
|
@ -162,12 +182,13 @@ async function getDealByIdFromRedis(id, viewerId = null) {
|
|||
}
|
||||
if (viewerId) {
|
||||
const voteMap = await getMyVotesForDeals([deal.id], viewerId)
|
||||
const isSaved = Array.isArray(deal.savedBy)
|
||||
? deal.savedBy.some((s) => Number(s?.userId) === Number(viewerId))
|
||||
: false
|
||||
const savedMap = await getUserSavedMap(redis, [deal.id], viewerId)
|
||||
const isSaved = savedMap.get(Number(deal.id)) === true
|
||||
deal = { ...deal, myVote: Number(voteMap.get(Number(deal.id)) ?? 0), isSaved }
|
||||
}
|
||||
return deal
|
||||
} catch {
|
||||
return null
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -177,11 +198,11 @@ async function getHotRangeDealIds({ range, listId } = {}) {
|
|||
try {
|
||||
const prefix =
|
||||
range === "day"
|
||||
? "lists:hot_day"
|
||||
? "deals:lists:hot_day"
|
||||
: range === "week"
|
||||
? "lists:hot_week"
|
||||
? "deals:lists:hot_week"
|
||||
: range === "month"
|
||||
? "lists:hot_month"
|
||||
? "deals:lists:hot_month"
|
||||
: null
|
||||
if (!prefix) return { listId: null, dealIds: [] }
|
||||
|
||||
|
|
@ -199,6 +220,8 @@ async function getHotRangeDealIds({ range, listId } = {}) {
|
|||
listId: resolvedId,
|
||||
dealIds: Array.isArray(dealIds) ? dealIds.map((id) => Number(id)) : [],
|
||||
}
|
||||
} catch {
|
||||
return { listId: null, dealIds: [] }
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
|
|||
53
services/redis/linkPreviewCache.service.js
Normal file
53
services/redis/linkPreviewCache.service.js
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
const { getRedisClient } = require("./client")
|
||||
|
||||
const BARCODE_KEY_PREFIX = "linkpreview:barcode:"
|
||||
const DEFAULT_TTL_SECONDS = 15 * 60
|
||||
|
||||
function createRedisClient() {
|
||||
return getRedisClient()
|
||||
}
|
||||
|
||||
function normalizeUrl(rawUrl) {
|
||||
if (!rawUrl || typeof rawUrl !== "string") return null
|
||||
try {
|
||||
const url = new URL(rawUrl)
|
||||
const hostname = url.hostname.toLowerCase()
|
||||
const pathname = url.pathname || "/"
|
||||
return `${url.protocol}//${hostname}${pathname}`
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
async function setBarcodeForUrl(url, barcodeId, { ttlSeconds = DEFAULT_TTL_SECONDS } = {}) {
|
||||
const normalized = normalizeUrl(url)
|
||||
if (!normalized) return { saved: false }
|
||||
if (barcodeId === undefined || barcodeId === null || barcodeId === "") return { saved: false }
|
||||
|
||||
const redis = createRedisClient()
|
||||
const key = `${BARCODE_KEY_PREFIX}${normalized}`
|
||||
try {
|
||||
await redis.set(key, String(barcodeId), "EX", Number(ttlSeconds) || DEFAULT_TTL_SECONDS)
|
||||
return { saved: true }
|
||||
} catch {
|
||||
return { saved: false }
|
||||
} finally {}
|
||||
}
|
||||
|
||||
async function getBarcodeForUrl(url) {
|
||||
const normalized = normalizeUrl(url)
|
||||
if (!normalized) return null
|
||||
const redis = createRedisClient()
|
||||
const key = `${BARCODE_KEY_PREFIX}${normalized}`
|
||||
try {
|
||||
const raw = await redis.get(key)
|
||||
return raw ?? null
|
||||
} catch {
|
||||
return null
|
||||
} finally {}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
setBarcodeForUrl,
|
||||
getBarcodeForUrl,
|
||||
}
|
||||
86
services/redis/linkPreviewImageCache.service.js
Normal file
86
services/redis/linkPreviewImageCache.service.js
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
const crypto = require("crypto")
|
||||
const axios = require("axios")
|
||||
const { getRedisClient } = require("./client")
|
||||
|
||||
const IMAGE_KEY_PREFIX = "cache:deal_create:image:"
|
||||
const DEFAULT_TTL_SECONDS = 5 * 60
|
||||
const MAX_IMAGE_BYTES = 2 * 1024 * 1024
|
||||
|
||||
function createRedisClient() {
|
||||
return getRedisClient()
|
||||
}
|
||||
|
||||
function normalizeUrl(rawUrl) {
|
||||
if (!rawUrl || typeof rawUrl !== "string") return null
|
||||
try {
|
||||
const url = new URL(rawUrl)
|
||||
if (!["http:", "https:"].includes(url.protocol)) return null
|
||||
const hostname = url.hostname.toLowerCase()
|
||||
const pathname = url.pathname || "/"
|
||||
const search = url.search || ""
|
||||
return `${url.protocol}//${hostname}${pathname}${search}`
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
function buildKey(normalizedUrl) {
|
||||
const hash = crypto.createHash("sha1").update(normalizedUrl).digest("hex")
|
||||
return `${IMAGE_KEY_PREFIX}${hash}`
|
||||
}
|
||||
|
||||
async function cacheImageFromUrl(rawUrl, { ttlSeconds = DEFAULT_TTL_SECONDS } = {}) {
|
||||
const normalized = normalizeUrl(rawUrl)
|
||||
if (!normalized) return null
|
||||
|
||||
const key = buildKey(normalized)
|
||||
const redis = createRedisClient()
|
||||
|
||||
try {
|
||||
const cached = await redis.get(key)
|
||||
if (cached) return { key: key.replace(IMAGE_KEY_PREFIX, ""), cached: true }
|
||||
|
||||
const response = await axios.get(normalized, {
|
||||
responseType: "arraybuffer",
|
||||
timeout: 15000,
|
||||
maxContentLength: MAX_IMAGE_BYTES,
|
||||
maxBodyLength: MAX_IMAGE_BYTES,
|
||||
validateStatus: (status) => status >= 200 && status < 300,
|
||||
})
|
||||
|
||||
const contentType = String(response.headers?.["content-type"] || "")
|
||||
if (!contentType.startsWith("image/")) return null
|
||||
|
||||
const buffer = Buffer.from(response.data || [])
|
||||
if (!buffer.length || buffer.length > MAX_IMAGE_BYTES) return null
|
||||
|
||||
const payload = JSON.stringify({
|
||||
ct: contentType,
|
||||
b64: buffer.toString("base64"),
|
||||
})
|
||||
await redis.set(key, payload, "EX", Number(ttlSeconds) || DEFAULT_TTL_SECONDS)
|
||||
return { key: key.replace(IMAGE_KEY_PREFIX, ""), cached: false }
|
||||
} catch {
|
||||
return null
|
||||
} finally {}
|
||||
}
|
||||
|
||||
async function getCachedImageByKey(hashKey) {
|
||||
if (!hashKey || typeof hashKey !== "string") return null
|
||||
const redis = createRedisClient()
|
||||
const key = `${IMAGE_KEY_PREFIX}${hashKey}`
|
||||
try {
|
||||
const raw = await redis.get(key)
|
||||
if (!raw) return null
|
||||
const parsed = JSON.parse(raw)
|
||||
if (!parsed?.ct || !parsed?.b64) return null
|
||||
return { contentType: parsed.ct, buffer: Buffer.from(parsed.b64, "base64") }
|
||||
} catch {
|
||||
return null
|
||||
} finally {}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
cacheImageFromUrl,
|
||||
getCachedImageByKey,
|
||||
}
|
||||
|
|
@ -6,7 +6,7 @@ function createRedisClient() {
|
|||
|
||||
async function getNewDealListId(redis, newListId) {
|
||||
if (newListId) return String(newListId)
|
||||
const latest = await redis.get("lists:new:latest")
|
||||
const latest = await redis.get("deals:lists:new:latest")
|
||||
return latest ? String(latest) : null
|
||||
}
|
||||
|
||||
|
|
@ -17,7 +17,7 @@ async function getNewDealIds({ newListId } = {}) {
|
|||
const listId = await getNewDealListId(redis, newListId)
|
||||
if (!listId) return { newListId: null, dealIds: [] }
|
||||
|
||||
const key = `lists:new:${listId}`
|
||||
const key = `deals:lists:new:${listId}`
|
||||
const raw = await redis.call("JSON.GET", key, "$.dealIds")
|
||||
if (!raw) return { newListId: listId, dealIds: [] }
|
||||
|
||||
|
|
@ -28,6 +28,8 @@ async function getNewDealIds({ newListId } = {}) {
|
|||
newListId: listId,
|
||||
dealIds: Array.isArray(dealIds) ? dealIds.map((id) => Number(id)) : [],
|
||||
}
|
||||
} catch {
|
||||
return { newListId: null, dealIds: [] }
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@ async function publishNotification(payload) {
|
|||
try {
|
||||
const message = JSON.stringify(payload)
|
||||
return await redis.publish(NOTIFICATIONS_CHANNEL, message)
|
||||
} catch {
|
||||
return 0
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -10,12 +10,12 @@ async function ensureDealSearchIndex() {
|
|||
try {
|
||||
await redis.call(
|
||||
"FT.CREATE",
|
||||
"idx:data:deals",
|
||||
"idx:deals",
|
||||
"ON",
|
||||
"JSON",
|
||||
"PREFIX",
|
||||
"1",
|
||||
"data:deals:",
|
||||
"deals:cache:",
|
||||
"SCHEMA",
|
||||
"$.id",
|
||||
"AS",
|
||||
|
|
@ -158,12 +158,12 @@ async function ensureDealSearchIndex() {
|
|||
"TAG"
|
||||
)
|
||||
|
||||
console.log("✅ Redis search index created: idx:data:deals")
|
||||
console.log("✅ Redis search index created: idx:deals")
|
||||
await ensureDealIndexFields(redis)
|
||||
} catch (err) {
|
||||
const message = err?.message || ""
|
||||
if (message.includes("Index already exists")) {
|
||||
console.log("ℹ️ Redis search index already exists: idx:data:deals")
|
||||
console.log("ℹ️ Redis search index already exists: idx:deals")
|
||||
await ensureDealIndexFields(redis)
|
||||
} else {
|
||||
throw err
|
||||
|
|
@ -204,7 +204,7 @@ async function ensureDealIndexFields(redis) {
|
|||
|
||||
for (const field of fields) {
|
||||
try {
|
||||
await redis.call("FT.ALTER", "idx:data:deals", "SCHEMA", "ADD", ...field)
|
||||
await redis.call("FT.ALTER", "idx:deals", "SCHEMA", "ADD", ...field)
|
||||
console.log(`✅ Redis search index field added: ${field[2]}`)
|
||||
} catch (err) {
|
||||
const message = err?.message || ""
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ const { getRedisClient } = require("./client")
|
|||
const { recordCacheMiss } = require("./cacheMetrics.service")
|
||||
|
||||
const SELLERS_KEY = "data:sellers"
|
||||
const SELLER_DOMAINS_KEY = "data:sellerdomains"
|
||||
const SELLER_DOMAINS_KEY = "data:seller:domains"
|
||||
|
||||
function createRedisClient() {
|
||||
return getRedisClient()
|
||||
|
|
@ -28,6 +28,8 @@ async function getSellerById(id) {
|
|||
await recordCacheMiss({ key: `${SELLERS_KEY}:${sellerId}`, label: "seller" })
|
||||
}
|
||||
return raw ? JSON.parse(raw) : null
|
||||
} catch {
|
||||
return null
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -48,6 +50,8 @@ async function getSellersByIds(ids = []) {
|
|||
}
|
||||
})
|
||||
return map
|
||||
} catch {
|
||||
return new Map()
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -62,6 +66,8 @@ async function getSellerIdByDomain(domain) {
|
|||
}
|
||||
const id = raw ? Number(raw) : null
|
||||
return Number.isInteger(id) && id > 0 ? id : null
|
||||
} catch {
|
||||
return null
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -131,6 +137,8 @@ async function listSellersFromRedis() {
|
|||
}
|
||||
}
|
||||
return list
|
||||
} catch {
|
||||
return []
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
const prisma = require("../../db/client")
|
||||
const { ensureCounterAtLeast, nextId } = require("./idGenerator.service")
|
||||
const SELLER_ID_KEY = "ids:seller"
|
||||
const SELLER_ID_KEY = "data:ids:seller"
|
||||
|
||||
async function ensureSellerIdCounter() {
|
||||
const latest = await prisma.seller.findFirst({
|
||||
|
|
@ -12,7 +12,15 @@ async function ensureSellerIdCounter() {
|
|||
}
|
||||
|
||||
async function generateSellerId() {
|
||||
return nextId(SELLER_ID_KEY)
|
||||
try {
|
||||
return await nextId(SELLER_ID_KEY)
|
||||
} catch {
|
||||
const latest = await prisma.seller.findFirst({
|
||||
select: { id: true },
|
||||
orderBy: { id: "desc" },
|
||||
})
|
||||
return (latest?.id ?? 0) + 1
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { ensureSellerIdCounter, generateSellerId }
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ function createRedisClient() {
|
|||
|
||||
async function getTrendingDealListId(redis, trendingListId) {
|
||||
if (trendingListId) return String(trendingListId)
|
||||
const latest = await redis.get("lists:trending:latest")
|
||||
const latest = await redis.get("deals:lists:trending:latest")
|
||||
return latest ? String(latest) : null
|
||||
}
|
||||
|
||||
|
|
@ -17,7 +17,7 @@ async function getTrendingDealIds({ trendingListId } = {}) {
|
|||
const listId = await getTrendingDealListId(redis, trendingListId)
|
||||
if (!listId) return { trendingListId: null, dealIds: [] }
|
||||
|
||||
const key = `lists:trending:${listId}`
|
||||
const key = `deals:lists:trending:${listId}`
|
||||
const raw = await redis.call("JSON.GET", key, "$.dealIds")
|
||||
if (!raw) return { trendingListId: listId, dealIds: [] }
|
||||
|
||||
|
|
@ -28,6 +28,8 @@ async function getTrendingDealIds({ trendingListId } = {}) {
|
|||
trendingListId: listId,
|
||||
dealIds: Array.isArray(dealIds) ? dealIds.map((id) => Number(id)) : [],
|
||||
}
|
||||
} catch {
|
||||
return { trendingListId: null, dealIds: [] }
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
const { getRedisClient } = require("./client")
|
||||
const dealSaveDb = require("../../db/dealSave.db")
|
||||
|
||||
const USER_KEY_PREFIX = "data:users:"
|
||||
const USER_SAVED_SET_PREFIX = "data:users:saved:"
|
||||
const USER_UNSAVED_SET_PREFIX = "data:users:unsaved:"
|
||||
const USER_KEY_PREFIX = "users:cache:"
|
||||
const USER_SAVED_SET_PREFIX = "users:saved:"
|
||||
const USER_UNSAVED_SET_PREFIX = "users:unsaved:"
|
||||
const USER_SAVED_HASH_PREFIX = "users:savedmap:"
|
||||
const DEFAULT_USER_TTL_SECONDS = 60 * 60
|
||||
|
||||
function createRedisClient() {
|
||||
|
|
@ -19,6 +21,7 @@ async function ensureUserCache(userId, { ttlSeconds = DEFAULT_USER_TTL_SECONDS }
|
|||
if (!id) return false
|
||||
const redis = createRedisClient()
|
||||
const key = `${USER_KEY_PREFIX}${id}`
|
||||
const savedMapKey = `${USER_SAVED_HASH_PREFIX}${id}`
|
||||
try {
|
||||
const exists = await redis.exists(key)
|
||||
if (!exists) {
|
||||
|
|
@ -31,8 +34,11 @@ async function ensureUserCache(userId, { ttlSeconds = DEFAULT_USER_TTL_SECONDS }
|
|||
}
|
||||
if (ttlSeconds) {
|
||||
await redis.expire(key, Number(ttlSeconds))
|
||||
await redis.expire(savedMapKey, Number(ttlSeconds))
|
||||
}
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -43,6 +49,7 @@ async function getUserSavedIdsFromRedis(userId) {
|
|||
const key = `${USER_KEY_PREFIX}${id}`
|
||||
const setKey = `${USER_SAVED_SET_PREFIX}${id}`
|
||||
const unsavedKey = `${USER_UNSAVED_SET_PREFIX}${id}`
|
||||
const savedMapKey = `${USER_SAVED_HASH_PREFIX}${id}`
|
||||
try {
|
||||
const [raw, setIds, unsavedIds] = await Promise.all([
|
||||
redis.call("JSON.GET", key, "$.savedDeals"),
|
||||
|
|
@ -65,6 +72,50 @@ async function getUserSavedIdsFromRedis(userId) {
|
|||
savedSet: new Set(setList),
|
||||
unsavedSet: new Set(unsavedList),
|
||||
}
|
||||
} catch {
|
||||
return { jsonIds: [], savedSet: new Set(), unsavedSet: new Set() }
|
||||
} finally {}
|
||||
}
|
||||
|
||||
async function getUserSavedMapForDeals(userId, dealIds = []) {
|
||||
const uid = normalizeUserId(userId)
|
||||
const ids = Array.from(
|
||||
new Set((Array.isArray(dealIds) ? dealIds : []).map(Number).filter((id) => Number.isInteger(id) && id > 0))
|
||||
)
|
||||
if (!uid || !ids.length) return new Map()
|
||||
const redis = createRedisClient()
|
||||
const key = `${USER_SAVED_HASH_PREFIX}${uid}`
|
||||
try {
|
||||
const exists = await redis.exists(key)
|
||||
if (!exists) {
|
||||
const saved = await dealSaveDb.findDealSavesByUser(uid, {
|
||||
select: { dealId: true },
|
||||
where: { userId: uid },
|
||||
})
|
||||
const pipeline = redis.pipeline()
|
||||
saved.forEach((entry) => {
|
||||
if (!entry?.dealId) return
|
||||
pipeline.hset(key, String(entry.dealId), "1")
|
||||
})
|
||||
pipeline.expire(key, DEFAULT_USER_TTL_SECONDS)
|
||||
await pipeline.exec()
|
||||
const savedSet = new Set(saved.map((s) => Number(s.dealId)))
|
||||
const map = new Map()
|
||||
ids.forEach((id) => {
|
||||
if (savedSet.has(id)) map.set(id, true)
|
||||
})
|
||||
return map
|
||||
}
|
||||
|
||||
const results = await redis.hmget(key, ids.map(String))
|
||||
const map = new Map()
|
||||
results.forEach((raw, idx) => {
|
||||
const value = raw == null ? 0 : Number(raw)
|
||||
if (value) map.set(ids[idx], true)
|
||||
})
|
||||
return map
|
||||
} catch {
|
||||
return new Map()
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -73,13 +124,23 @@ async function setUserSavedDeals(userId, ids = [], { ttlSeconds = DEFAULT_USER_T
|
|||
if (!uid) return false
|
||||
const redis = createRedisClient()
|
||||
const key = `${USER_KEY_PREFIX}${uid}`
|
||||
const savedMapKey = `${USER_SAVED_HASH_PREFIX}${uid}`
|
||||
try {
|
||||
const list = Array.isArray(ids) ? ids : []
|
||||
await redis.call("JSON.SET", key, "$.savedDeals", JSON.stringify(list))
|
||||
await redis.del(savedMapKey)
|
||||
if (list.length) {
|
||||
const pipeline = redis.pipeline()
|
||||
list.forEach((id) => pipeline.hset(savedMapKey, String(id), "1"))
|
||||
await pipeline.exec()
|
||||
}
|
||||
if (ttlSeconds) {
|
||||
await redis.expire(key, Number(ttlSeconds))
|
||||
await redis.expire(savedMapKey, Number(ttlSeconds))
|
||||
}
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -91,6 +152,7 @@ async function addUserSavedDeal(userId, dealId, { ttlSeconds = DEFAULT_USER_TTL_
|
|||
const key = `${USER_KEY_PREFIX}${uid}`
|
||||
const setKey = `${USER_SAVED_SET_PREFIX}${uid}`
|
||||
const unsavedKey = `${USER_UNSAVED_SET_PREFIX}${uid}`
|
||||
const savedMapKey = `${USER_SAVED_HASH_PREFIX}${uid}`
|
||||
try {
|
||||
await ensureUserCache(uid, { ttlSeconds })
|
||||
const raw = await redis.call("JSON.GET", key, "$.savedDeals")
|
||||
|
|
@ -109,12 +171,16 @@ async function addUserSavedDeal(userId, dealId, { ttlSeconds = DEFAULT_USER_TTL_
|
|||
}
|
||||
await redis.sadd(setKey, String(did))
|
||||
await redis.srem(unsavedKey, String(did))
|
||||
await redis.hset(savedMapKey, String(did), "1")
|
||||
if (ttlSeconds) {
|
||||
await redis.expire(setKey, Number(ttlSeconds))
|
||||
await redis.expire(unsavedKey, Number(ttlSeconds))
|
||||
await redis.expire(key, Number(ttlSeconds))
|
||||
await redis.expire(savedMapKey, Number(ttlSeconds))
|
||||
}
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -126,6 +192,7 @@ async function removeUserSavedDeal(userId, dealId, { ttlSeconds = DEFAULT_USER_T
|
|||
const key = `${USER_KEY_PREFIX}${uid}`
|
||||
const setKey = `${USER_SAVED_SET_PREFIX}${uid}`
|
||||
const unsavedKey = `${USER_UNSAVED_SET_PREFIX}${uid}`
|
||||
const savedMapKey = `${USER_SAVED_HASH_PREFIX}${uid}`
|
||||
try {
|
||||
const raw = await redis.call("JSON.GET", key, "$.savedDeals")
|
||||
if (raw) {
|
||||
|
|
@ -138,18 +205,23 @@ async function removeUserSavedDeal(userId, dealId, { ttlSeconds = DEFAULT_USER_T
|
|||
}
|
||||
await redis.srem(setKey, String(did))
|
||||
await redis.sadd(unsavedKey, String(did))
|
||||
await redis.hdel(savedMapKey, String(did))
|
||||
if (ttlSeconds) {
|
||||
await redis.expire(setKey, Number(ttlSeconds))
|
||||
await redis.expire(unsavedKey, Number(ttlSeconds))
|
||||
await redis.expire(key, Number(ttlSeconds))
|
||||
await redis.expire(savedMapKey, Number(ttlSeconds))
|
||||
}
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
} finally {}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
ensureUserCache,
|
||||
getUserSavedIdsFromRedis,
|
||||
getUserSavedMapForDeals,
|
||||
setUserSavedDeals,
|
||||
addUserSavedDeal,
|
||||
removeUserSavedDeal,
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ const { getRedisClient } = require("./client")
|
|||
const userDb = require("../../db/user.db")
|
||||
const { recordCacheMiss } = require("./cacheMetrics.service")
|
||||
|
||||
const USER_MOD_KEY_PREFIX = "user:mod:"
|
||||
const USER_MOD_KEY_PREFIX = "users:moderation:"
|
||||
const DEFAULT_TTL_SECONDS = 60 * 60
|
||||
|
||||
function createRedisClient() {
|
||||
|
|
@ -36,6 +36,8 @@ async function getUserModerationFromRedis(userId) {
|
|||
await recordCacheMiss({ key: `${USER_MOD_KEY_PREFIX}${id}`, label: "user-mod" })
|
||||
}
|
||||
return raw ? JSON.parse(raw) : null
|
||||
} catch {
|
||||
return null
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -48,6 +50,8 @@ async function setUserModerationInRedis(user, { ttlSeconds = DEFAULT_TTL_SECONDS
|
|||
await redis.call("JSON.SET", key, "$", JSON.stringify(payload))
|
||||
if (ttlSeconds) await redis.expire(key, Number(ttlSeconds))
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
const { getRedisClient } = require("./client")
|
||||
const { recordCacheMiss } = require("./cacheMetrics.service")
|
||||
|
||||
const PROFILE_KEY_PREFIX = "data:profiles:user:"
|
||||
const DEFAULT_TTL_SECONDS = 60
|
||||
const PROFILE_KEY_PREFIX = "users:profile:"
|
||||
const DEFAULT_TTL_SECONDS = 5 * 60
|
||||
|
||||
function createRedisClient() {
|
||||
return getRedisClient()
|
||||
|
|
@ -24,6 +24,8 @@ async function getUserProfileFromRedis(userName) {
|
|||
return null
|
||||
}
|
||||
return JSON.parse(raw)
|
||||
} catch {
|
||||
return null
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -39,6 +41,8 @@ async function setUserProfileInRedis(userName, payload, { ttlSeconds = DEFAULT_T
|
|||
await redis.expire(key, ttl)
|
||||
}
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
const { getRedisClient } = require("./client")
|
||||
const { recordCacheMiss } = require("./cacheMetrics.service")
|
||||
|
||||
const USER_PUBLIC_ID_KEY_PREFIX = "user:id:"
|
||||
const USER_PUBLIC_NAME_KEY_PREFIX = "user:name:"
|
||||
const USER_PUBLIC_ID_KEY_PREFIX = "users:public:id:"
|
||||
const USER_PUBLIC_NAME_KEY_PREFIX = "users:public:name:"
|
||||
const DEFAULT_USER_TTL_SECONDS = 60 * 60
|
||||
|
||||
function createRedisClient() {
|
||||
|
|
@ -50,6 +50,8 @@ async function getUserPublicFromRedis(userId) {
|
|||
return null
|
||||
}
|
||||
return JSON.parse(raw)
|
||||
} catch {
|
||||
return null
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -77,6 +79,8 @@ async function getUsersPublicByIds(userIds = []) {
|
|||
})
|
||||
|
||||
return map
|
||||
} catch {
|
||||
return new Map()
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -108,14 +112,16 @@ async function setUsersPublicInRedis(users = [], { ttlSecondsById = null } = {})
|
|||
})
|
||||
await pipeline.exec()
|
||||
return payloads.length
|
||||
} catch {
|
||||
return 0
|
||||
} finally {}
|
||||
}
|
||||
|
||||
async function setUserPublicInRedis(user, { ttlSeconds = DEFAULT_USER_TTL_SECONDS } = {}) {
|
||||
const payload = normalizeUserPayload(user)
|
||||
if (!payload) return false
|
||||
await setUsersPublicInRedis([payload], { ttlSecondsById: { [payload.id]: ttlSeconds } })
|
||||
return true
|
||||
const count = await setUsersPublicInRedis([payload], { ttlSecondsById: { [payload.id]: ttlSeconds } })
|
||||
return count > 0
|
||||
}
|
||||
|
||||
async function ensureUserMinTtl(userId, { minSeconds = DEFAULT_USER_TTL_SECONDS } = {}) {
|
||||
|
|
@ -133,6 +139,8 @@ async function ensureUserMinTtl(userId, { minSeconds = DEFAULT_USER_TTL_SECONDS
|
|||
return { bumped: true, ttl: nextTtl }
|
||||
}
|
||||
return { bumped: false, ttl }
|
||||
} catch {
|
||||
return { bumped: false }
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
@ -150,6 +158,8 @@ async function getUserIdByUsername(userName) {
|
|||
}
|
||||
const id = raw ? Number(raw) : null
|
||||
return Number.isInteger(id) && id > 0 ? id : null
|
||||
} catch {
|
||||
return null
|
||||
} finally {}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -113,6 +113,27 @@ async function removeTagsFromDeal(dealId, tags = []) {
|
|||
select: { tag: { select: { id: true, slug: true, name: true } } },
|
||||
})
|
||||
|
||||
const remainingSlugs = new Set(
|
||||
tagsForDeal.map((entry) => entry?.tag?.slug).filter(Boolean)
|
||||
)
|
||||
if (slugs.length) {
|
||||
const aiReview = await tx.dealAiReview.findUnique({
|
||||
where: { dealId: id },
|
||||
select: { tags: true },
|
||||
})
|
||||
if (aiReview && Array.isArray(aiReview.tags) && aiReview.tags.length) {
|
||||
const filtered = aiReview.tags.filter(
|
||||
(tag) => !slugs.includes(String(tag)) || remainingSlugs.has(String(tag))
|
||||
)
|
||||
if (filtered.length !== aiReview.tags.length) {
|
||||
await tx.dealAiReview.update({
|
||||
where: { dealId: id },
|
||||
data: { tags: filtered },
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
tags: tagsForDeal.map((entry) => entry.tag),
|
||||
removed: toRemove.length,
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
const dealDb = require("../db/deal.db");
|
||||
const { updateDealVoteInRedis } = require("./redis/dealVote.service");
|
||||
const { queueVoteUpdate, queueDealUpdate, queueNotificationCreate } = require("./redis/dbSync.service");
|
||||
const { updateDealInRedis } = require("./redis/dealCache.service");
|
||||
const { updateDealInRedis, getDealFromRedis } = require("./redis/dealCache.service");
|
||||
const { publishNotification } = require("./redis/notificationPubsub.service");
|
||||
|
||||
async function voteDeal({ dealId, userId, voteType }) {
|
||||
|
|
@ -17,10 +16,7 @@ async function voteDeal({ dealId, userId, voteType }) {
|
|||
throw err;
|
||||
}
|
||||
|
||||
const deal = await dealDb.findDeal(
|
||||
{ id: Number(dealId) },
|
||||
{ select: { status: true } }
|
||||
);
|
||||
const deal = await getDealFromRedis(Number(dealId));
|
||||
if (!deal || !["ACTIVE", "EXPIRED"].includes(deal.status)) {
|
||||
const err = new Error("deal sadece ACTIVE veya EXPIRED iken oylanabilir");
|
||||
err.statusCode = 400;
|
||||
|
|
@ -79,13 +75,9 @@ async function voteDeal({ dealId, userId, voteType }) {
|
|||
let delta = redisResult?.delta ?? 0;
|
||||
let score = redisResult?.score ?? null;
|
||||
if (score === null) {
|
||||
const current = await dealDb.findDeal(
|
||||
{ id: Number(dealId) },
|
||||
{ select: { score: true, votes: { where: { userId: Number(userId) }, select: { voteType: true } } } }
|
||||
);
|
||||
const oldVote = current?.votes?.[0]?.voteType ?? 0;
|
||||
delta = Number(voteType) - Number(oldVote);
|
||||
score = Number(current?.score ?? 0) + delta;
|
||||
const current = await getDealFromRedis(Number(dealId));
|
||||
score = current ? Number(current?.score ?? 0) : null;
|
||||
delta = 0;
|
||||
}
|
||||
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -16,4 +16,11 @@ async function makeThumbWebp(inputBuffer) {
|
|||
.toBuffer();
|
||||
}
|
||||
|
||||
module.exports = { makeDetailWebp, makeThumbWebp };
|
||||
async function makeWebp(inputBuffer, { quality = 80 } = {}) {
|
||||
return sharp(inputBuffer)
|
||||
.rotate()
|
||||
.webp({ quality })
|
||||
.toBuffer();
|
||||
}
|
||||
|
||||
module.exports = { makeDetailWebp, makeThumbWebp, makeWebp };
|
||||
|
|
|
|||
|
|
@ -21,10 +21,7 @@ const {
|
|||
SELLER_UPSERT_HASH_KEY,
|
||||
SELLER_DOMAIN_UPSERT_HASH_KEY,
|
||||
} = require("../services/redis/dbSync.service")
|
||||
const {
|
||||
DEAL_EVENT_HASH_KEY,
|
||||
incrementDealAnalyticsTotalsInRedis,
|
||||
} = require("../services/redis/dealAnalytics.service")
|
||||
const { DEAL_ANALYTICS_TOTAL_HASH_KEY } = require("../services/redis/dealAnalytics.service")
|
||||
const commentLikeDb = require("../db/commentLike.db")
|
||||
const dealAnalyticsDb = require("../db/dealAnalytics.db")
|
||||
const prisma = require("../db/client")
|
||||
|
|
@ -493,11 +490,11 @@ async function consumeCommentDeletes(redis) {
|
|||
})
|
||||
}
|
||||
|
||||
async function consumeDealEvents(redis) {
|
||||
async function consumeDealAnalyticsTotals(redis) {
|
||||
const data = await redis.eval(
|
||||
"local data = redis.call('HGETALL', KEYS[1]); redis.call('DEL', KEYS[1]); return data;",
|
||||
1,
|
||||
DEAL_EVENT_HASH_KEY
|
||||
DEAL_ANALYTICS_TOTAL_HASH_KEY
|
||||
)
|
||||
if (!data || data.length === 0) return 0
|
||||
|
||||
|
|
@ -506,31 +503,29 @@ async function consumeDealEvents(redis) {
|
|||
pairs[data[i]] = data[i + 1]
|
||||
}
|
||||
|
||||
const events = []
|
||||
const increments = []
|
||||
for (const payload of Object.values(pairs)) {
|
||||
try {
|
||||
const parsed = JSON.parse(payload)
|
||||
if (!parsed?.dealId || (!parsed?.userId && !parsed?.ip)) continue
|
||||
events.push({
|
||||
if (!parsed?.dealId) continue
|
||||
increments.push({
|
||||
dealId: Number(parsed.dealId),
|
||||
type: String(parsed.type || "IMPRESSION").toUpperCase(),
|
||||
userId: parsed.userId ? Number(parsed.userId) : null,
|
||||
ip: parsed.ip ? String(parsed.ip) : null,
|
||||
createdAt: parsed.createdAt,
|
||||
impressions: Number(parsed.impressions || 0),
|
||||
views: Number(parsed.views || 0),
|
||||
clicks: Number(parsed.clicks || 0),
|
||||
})
|
||||
} catch (err) {
|
||||
console.error("db-sync dealEvent parse failed:", err?.message || err)
|
||||
console.error("db-sync dealAnalyticsTotals parse failed:", err?.message || err)
|
||||
}
|
||||
}
|
||||
|
||||
if (!events.length) return 0
|
||||
if (!increments.length) return 0
|
||||
|
||||
try {
|
||||
const result = await dealAnalyticsDb.applyDealEventBatch(events)
|
||||
await incrementDealAnalyticsTotalsInRedis(result?.increments || [])
|
||||
return result?.inserted ?? 0
|
||||
const result = await dealAnalyticsDb.applyDealTotalsBatch(increments)
|
||||
return result?.updated ?? 0
|
||||
} catch (err) {
|
||||
console.error("db-sync dealEvent batch failed:", err?.message || err)
|
||||
console.error("db-sync dealAnalyticsTotals batch failed:", err?.message || err)
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
|
@ -1003,7 +998,7 @@ async function handler() {
|
|||
const commentLikeCount = await consumeCommentLikeUpdates(redis)
|
||||
const commentDeleteCount = await consumeCommentDeletes(redis)
|
||||
const dealSaveCount = await consumeDealSaveUpdates(redis)
|
||||
const dealEventCount = await consumeDealEvents(redis)
|
||||
const dealEventCount = await consumeDealAnalyticsTotals(redis)
|
||||
const dealCreateCount = await consumeDealCreates(redis)
|
||||
const dealAiReviewCount = await consumeDealAiReviewUpdates(redis)
|
||||
const notificationReadCount = await consumeNotificationReads(redis)
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ function parseSearchResults(results = []) {
|
|||
|
||||
// i=1'den başlıyoruz (results[0] toplam sayıdır), ikişer ikişer atlıyoruz
|
||||
for (let i = 1; i < results.length; i += 2) {
|
||||
const key = results[i]; // Örn: "data:deals:20"
|
||||
const key = results[i]; // Örn: "deals:cache:20"
|
||||
const value = results[i + 1]; // Örn: ["$", "[{\"id\":20,...}]"]
|
||||
|
||||
try {
|
||||
|
|
@ -26,7 +26,7 @@ function parseSearchResults(results = []) {
|
|||
const [deal] = JSON.parse(value[1]);
|
||||
ids.push(Number(deal.id));
|
||||
} catch {
|
||||
// Eğer JSON'da bir sorun olursa, ID'yi key'den (data:deals:20) güvenli bir şekilde çek
|
||||
// Eğer JSON'da bir sorun olursa, ID'yi key'den (deals:cache:20) güvenli bir şekilde çek
|
||||
const idFromKey = key.split(":")[2];
|
||||
if (idFromKey) ids.push(Number(idFromKey));
|
||||
}
|
||||
|
|
@ -50,11 +50,11 @@ async function buildHotDealListForRange({ windowDays, listKey, latestKey }) {
|
|||
*/
|
||||
const query = `@status:{ACTIVE} @createdAtTs:[${cutoffMs} +inf]`
|
||||
|
||||
console.log(`🔍 Redis Query: FT.SEARCH idx:data:deals "${query}" SORTBY score DESC DIALECT 3`)
|
||||
console.log(`🔍 Redis Query: FT.SEARCH idx:deals "${query}" SORTBY score DESC DIALECT 3`)
|
||||
|
||||
const results = await redis.call(
|
||||
"FT.SEARCH",
|
||||
"idx:data:deals",
|
||||
"idx:deals",
|
||||
query,
|
||||
"SORTBY", "score", "DESC",
|
||||
"LIMIT", "0", String(HOT_DEAL_LIMIT),
|
||||
|
|
@ -94,23 +94,23 @@ async function handler() {
|
|||
const results = {}
|
||||
results.hot = await buildHotDealListForRange({
|
||||
windowDays: HOT_DEAL_WINDOW_DAYS,
|
||||
listKey: "lists:hot",
|
||||
latestKey: "lists:hot:latest",
|
||||
listKey: "deals:lists:hot",
|
||||
latestKey: "deals:lists:hot:latest",
|
||||
})
|
||||
results.hotDay = await buildHotDealListForRange({
|
||||
windowDays: HOT_DAY_WINDOW_DAYS,
|
||||
listKey: "lists:hot_day",
|
||||
latestKey: "lists:hot_day:latest",
|
||||
listKey: "deals:lists:hot_day",
|
||||
latestKey: "deals:lists:hot_day:latest",
|
||||
})
|
||||
results.hotWeek = await buildHotDealListForRange({
|
||||
windowDays: HOT_WEEK_WINDOW_DAYS,
|
||||
listKey: "lists:hot_week",
|
||||
latestKey: "lists:hot_week:latest",
|
||||
listKey: "deals:lists:hot_week",
|
||||
latestKey: "deals:lists:hot_week:latest",
|
||||
})
|
||||
results.hotMonth = await buildHotDealListForRange({
|
||||
windowDays: HOT_MONTH_WINDOW_DAYS,
|
||||
listKey: "lists:hot_month",
|
||||
latestKey: "lists:hot_month:latest",
|
||||
listKey: "deals:lists:hot_month",
|
||||
latestKey: "deals:lists:hot_month:latest",
|
||||
})
|
||||
return results
|
||||
}
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ async function buildNewDealList() {
|
|||
|
||||
const results = await redis.call(
|
||||
"FT.SEARCH",
|
||||
"idx:data:deals",
|
||||
"idx:deals",
|
||||
query,
|
||||
"SORTBY",
|
||||
"createdAtTs",
|
||||
|
|
@ -59,10 +59,10 @@ async function buildNewDealList() {
|
|||
dealIds,
|
||||
}
|
||||
|
||||
const key = `lists:new:${runId}`
|
||||
const key = `deals:lists:new:${runId}`
|
||||
await redis.call("JSON.SET", key, "$", JSON.stringify(payload))
|
||||
await redis.expire(key, NEW_DEAL_TTL_SECONDS)
|
||||
await redis.set("lists:new:latest", runId, "EX", NEW_DEAL_TTL_SECONDS)
|
||||
await redis.set("deals:lists:new:latest", runId, "EX", NEW_DEAL_TTL_SECONDS)
|
||||
|
||||
return { id: runId, total: dealIds.length }
|
||||
} catch (error) {
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ function parseSearchResults(results = []) {
|
|||
|
||||
// i=1'den başlıyoruz (results[0] toplam sayıdır), ikişer ikişer atlıyoruz
|
||||
for (let i = 1; i < results.length; i += 2) {
|
||||
const key = results[i]; // Örn: "data:deals:20"
|
||||
const key = results[i]; // Örn: "deals:cache:20"
|
||||
const value = results[i + 1]; // Örn: ["$", "[{\"id\":20,...}]"]
|
||||
|
||||
try {
|
||||
|
|
@ -23,7 +23,7 @@ function parseSearchResults(results = []) {
|
|||
const [deal] = JSON.parse(value[1]);
|
||||
ids.push(Number(deal.id));
|
||||
} catch {
|
||||
// Eğer JSON'da bir sorun olursa, ID'yi key'den (data:deals:20) güvenli bir şekilde çek
|
||||
// Eğer JSON'da bir sorun olursa, ID'yi key'den (deals:cache:20) güvenli bir şekilde çek
|
||||
const idFromKey = key.split(":")[2];
|
||||
if (idFromKey) ids.push(Number(idFromKey));
|
||||
}
|
||||
|
|
@ -51,11 +51,11 @@ async function buildTrendingDealList() {
|
|||
*/
|
||||
const query = `@status:{ACTIVE} @createdAtTs:[${cutoffMs} +inf]`
|
||||
|
||||
console.log(`🔍 Redis Query: FT.SEARCH idx:data:deals "${query}" SORTBY score DESC DIALECT 3`)
|
||||
console.log(`🔍 Redis Query: FT.SEARCH idx:deals "${query}" SORTBY score DESC DIALECT 3`)
|
||||
|
||||
const results = await redis.call(
|
||||
"FT.SEARCH",
|
||||
"idx:data:deals",
|
||||
"idx:deals",
|
||||
query,
|
||||
"SORTBY", "score", "DESC",
|
||||
"LIMIT", "0", String(TRENDING_DEAL_LIMIT),
|
||||
|
|
@ -76,10 +76,10 @@ async function buildTrendingDealList() {
|
|||
dealIds,
|
||||
}
|
||||
|
||||
const key = `lists:trending:${runId}`
|
||||
const key = `deals:lists:trending:${runId}`
|
||||
await redis.call("JSON.SET", key, "$", JSON.stringify(payload))
|
||||
await redis.expire(key, TRENDING_DEAL_TTL_SECONDS)
|
||||
await redis.set("lists:trending:latest", runId, "EX", TRENDING_DEAL_TTL_SECONDS)
|
||||
await redis.set("deals:lists:trending:latest", runId, "EX", TRENDING_DEAL_TTL_SECONDS)
|
||||
|
||||
return { id: runId, total: dealIds.length }
|
||||
} catch (error) {
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user