Don't use redis cache at all for ticket querysets as this breaks pagination

This commit is contained in:
Timothy Hobbs 2021-07-07 20:40:34 +02:00
parent f840136e1e
commit 63bb949d63
2 changed files with 2 additions and 16 deletions

View File

@ -1,5 +1,4 @@
from django.db.models import Q
from django.core.cache import cache
from django.urls import reverse
from django.utils.html import escape
from django.utils.translation import ugettext as _
@ -139,21 +138,10 @@ class __Query__:
# https://stackoverflow.com/questions/30487056/django-queryset-contains-duplicate-entries
return queryset.distinct()
def get_cache_key(self):
return str(self.huser.user.pk) + ":" + self.base64
def refresh_query(self):
tickets = self.huser.get_tickets_in_queues().select_related()
ticket_qs = self.__run__(tickets)
cache.set(self.get_cache_key(), ticket_qs, timeout=3600)
return ticket_qs
def get(self):
# Prefilter the allowed tickets
objects = cache.get(self.get_cache_key())
if objects is not None:
return objects
return self.refresh_query()
tickets = self.huser.get_tickets_in_queues().select_related()
return self.__run__(tickets)
def get_datatables_context(self, **kwargs):
"""

View File

@ -1130,8 +1130,6 @@ def ticket_list(request):
urlsafe_query = query_to_base64(query_params)
Query(huser, base64query=urlsafe_query).refresh_query()
user_saved_queries = SavedSearch.objects.filter(Q(user=request.user) | Q(shared__exact=True))
search_message = ''