Refactor database models to majorly increase queries needed and decrease load times of home from 30 secs to 5 sec (we will be caching the rest to decrease even further via background tasks)
This commit is contained in:
parent
f7a9b2f823
commit
86c7eba10a
25 changed files with 1941 additions and 1560 deletions
|
|
@ -1,24 +1,24 @@
|
|||
from collections import defaultdict
|
||||
from collections import defaultdict, OrderedDict
|
||||
from django.views.generic import TemplateView
|
||||
from django.urls import reverse_lazy
|
||||
from django.db.models import Count, Q, Prefetch, Sum, F, IntegerField, Value, BooleanField, Case, When
|
||||
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
|
||||
from trades.models import TradeOffer, TradeAcceptance, TradeOfferHaveCard, TradeOfferWantCard
|
||||
from cards.models import Card, CardSet, Rarity
|
||||
from cards.models import Card
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.cache import cache_page
|
||||
from django.template.response import TemplateResponse
|
||||
from django.http import HttpResponseRedirect
|
||||
from silk.profiling.profiler import silk_profile
|
||||
#from silk.profiling.profiler import silk_profile
|
||||
|
||||
class HomePageView(TemplateView):
|
||||
template_name = "home/home.html"
|
||||
|
||||
@silk_profile(name='Home Page')
|
||||
#@silk_profile(name='Home Page')
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
|
||||
context["cards"] = Card.objects.all().order_by("name", "rarity__pk")
|
||||
context["cards"] = Card.objects.all().order_by("name")
|
||||
|
||||
# Reuse base trade offer queryset for market stats
|
||||
base_offer_qs = TradeOffer.objects.filter(is_closed=False)
|
||||
|
|
@ -29,46 +29,44 @@ class HomePageView(TemplateView):
|
|||
|
||||
# Most Offered Cards
|
||||
context["most_offered_cards"] = (
|
||||
Card.objects_no_prefetch.filter(tradeofferhavecard__isnull=False)
|
||||
Card.objects.filter(tradeofferhavecard__isnull=False)
|
||||
.annotate(offer_count=Sum("tradeofferhavecard__quantity"))
|
||||
.order_by("-offer_count")[:6]
|
||||
)
|
||||
|
||||
# Most Wanted Cards
|
||||
context["most_wanted_cards"] = (
|
||||
Card.objects_no_prefetch.filter(tradeofferwantcard__isnull=False)
|
||||
Card.objects.filter(tradeofferwantcard__isnull=False)
|
||||
.annotate(offer_count=Sum("tradeofferwantcard__quantity"))
|
||||
.order_by("-offer_count")[:6]
|
||||
)
|
||||
|
||||
# Least Offered Cards
|
||||
context["least_offered_cards"] = (
|
||||
Card.objects_no_prefetch.annotate(offer_count=Sum("tradeofferhavecard__quantity"))
|
||||
Card.objects.annotate(offer_count=Sum("tradeofferhavecard__quantity"))
|
||||
.order_by("offer_count")[:6]
|
||||
)
|
||||
|
||||
featured = {}
|
||||
# Featured "All" offers
|
||||
# Build featured offers with custom ordering
|
||||
featured = OrderedDict()
|
||||
# Featured "All" offers remains fixed at the top
|
||||
featured["All"] = base_offer_qs.order_by("created_at")[:6]
|
||||
|
||||
# Get the normalized ids for rarities with pk<=5.
|
||||
normalized_ids = list(
|
||||
Rarity.objects.filter(pk__lte=5).values_list("normalized_id", flat=True).distinct()
|
||||
)
|
||||
|
||||
rarity_map = {
|
||||
rarity.normalized_id: rarity.icons
|
||||
for rarity in Rarity.objects.filter(pk__lte=5)
|
||||
}
|
||||
|
||||
# For each normalized id (sorted descending), filter base offers that have the matching trade offer rarity.
|
||||
for norm in sorted(normalized_ids, reverse=True):
|
||||
offers_qs = base_offer_qs.filter(
|
||||
rarity__normalized_id=norm # now using trade_offer.rarity
|
||||
).order_by("created_at").distinct()[:6]
|
||||
icon_label = rarity_map.get(norm)
|
||||
if icon_label:
|
||||
featured[icon_label] = offers_qs
|
||||
# Pull out distinct (rarity_level, rarity_icon) tuples
|
||||
distinct_rarities = base_offer_qs.values_list("rarity_level", "rarity_icon").distinct()
|
||||
|
||||
# Prepare a list that holds tuples of (rarity_level, rarity_icon, offers)
|
||||
rarity_offers = []
|
||||
for rarity_level, rarity_icon in distinct_rarities:
|
||||
offers = base_offer_qs.filter(rarity_level=rarity_level).order_by("created_at")[:6]
|
||||
rarity_offers.append((rarity_level, rarity_icon, offers))
|
||||
|
||||
# Sort by rarity_level (from greatest to least)
|
||||
rarity_offers.sort(key=lambda x: x[0], reverse=True)
|
||||
|
||||
# Add the sorted offers to the OrderedDict
|
||||
for rarity_level, rarity_icon, offers in rarity_offers:
|
||||
featured[rarity_icon] = offers
|
||||
|
||||
context["featured_offers"] = featured
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue