Greatly improve prefetching and reduce # of db calls for each trade_offer

This commit is contained in:
badblocks 2025-05-18 15:26:34 -07:00
parent b89025a7e0
commit 95d794d8b9
5 changed files with 89 additions and 67 deletions

View file

@ -89,27 +89,30 @@ class HomePageView(TemplateView):
logger.error(f"Error fetching 'All' featured offers: {str(e)}")
featured["All"] = []
try:
# Pull out distinct (rarity_level, rarity_icon) tuples
distinct_rarities = base_offer_qs.values_list("rarity_level", "rarity_icon").distinct()
# *** we only show All Featured Offers for now,
# *** we will add rarity-tabbed featured offers later
# try:
# # Pull out distinct (rarity_level, rarity_icon) tuples
# distinct_rarities = base_offer_qs.values_list("rarity_level", "rarity_icon").distinct()
# Prepare a list that holds tuples of (rarity_level, rarity_icon, offers)
rarity_offers = []
for rarity_level, rarity_icon in distinct_rarities:
offers = base_offer_qs.filter(rarity_level=rarity_level).order_by("created_at")[:6]
rarity_offers.append((rarity_level, rarity_icon, offers))
# # Prepare a list that holds tuples of (rarity_level, rarity_icon, offers)
# rarity_offers = []
# for rarity_level, rarity_icon in distinct_rarities:
# offers = base_offer_qs.filter(rarity_level=rarity_level).order_by("created_at")[:6]
# rarity_offers.append((rarity_level, rarity_icon, offers))
# Sort by rarity_level (from greatest to least)
rarity_offers.sort(key=lambda x: x[0], reverse=True)
# # Sort by rarity_level (from greatest to least)
# rarity_offers.sort(key=lambda x: x[0], reverse=True)
# Add the sorted offers to the OrderedDict
for rarity_level, rarity_icon, offers in rarity_offers:
featured[rarity_icon] = offers
except Exception as e:
logger.error(f"Error processing rarity-based featured offers: {str(e)}")
# # Add the sorted offers to the OrderedDict
# for rarity_level, rarity_icon, offers in rarity_offers:
# featured[rarity_icon] = offers
# except Exception as e:
# logger.error(f"Error processing rarity-based featured offers: {str(e)}")
context["featured_offers"] = featured
# Generate a cache key based on the pks and updated_at timestamps of all featured offers
# *** we will separate cache keys for each featured section later
all_offer_identifiers = []
for section_name,section_offers in featured.items():
# featured_section is a QuerySet. Fetch (pk, updated_at) tuples.