如何增加cookiecutter-django的超时

问题描述

我正在处理Redis缓存中的一些数据。但似乎我无法足够快地处理它以适应请求超时。有没有办法增加Nginx或django的超时时间? (我什至不确定cookiecutter-django是否具有Nginx)。

# views.py
from rest_framework import viewsets
from rest_framework.response import Response
from rest_framework.pagination import PageNumberPagination

class SmallResultsSetPagination(PageNumberPagination):
    page_size = 5
    page_size_query_param = "page_size"


class FooViewSet(viewsets.ModelViewSet):
    queryset = Foo.objects.all().order_by("id")
    serializer_class = FooSerializer
    pagination_class = SmallResultsSetPagination
    filterset_fields = ["bar"]

# serializers.py
from rest_framework import serializers

from .models import Foo


class FooSerializer(serializers.ModelSerializer):
    id = serializers.IntegerField(read_only=True)
    DT_RowId = serializers.SerializerMethodField()

    def get_DT_RowId(self,obj):
        return obj.id

    class Meta:
        model = Foo
        fields = (
            "id","DT_RowId","name","baz","api_data",)
        datatables_always_serialize = ("baz","api_data")

# models.py
import logging
import xml.etree.ElementTree as ElementTree

from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.cache import cache
from django.db import models
from django.utils.functional import cached_property

import requests
from requests.exceptions import ConnectionError,Timeout

logger = logging.getLogger(__name__)


def third_party_api():
    bars = cache.get("bars")
    if bars:
        print("cache hit")
        return bars

    def bars_to_dict(root):
        bars = {}
        for bar in root[1]:
            bar_name = issuer.tag
            entry = {}
            for pair in bar:
                tag = pair.tag.split("}")[-1]
                value = pair.text
                entry[tag] = value
            key = entry["buzz"].strip().lower()
            bars[key] = entry
        return bars

    try:
        r = requests.get(
            f"{API}",timeout=5,)
        root = ElementTree.fromstring(r.text)
        bars = bars_to_dict(root)
        cache.set("bars",bars,60 * 5)
        return bars
    except (ConnectionError,Timeout) as e:
        if settings.DEBUG:
            tree = ElementTree.parse("scripts/bars.xml")
            root = tree.getroot()
            bars = bars_to_dict(root)
            cache.set("bars",60 * 5)
            return bars
        else:
            return {}


class Foo(models.Model):
    baz = models.BooleanField(default=False)

    @cached_property
    def api_data(foo):
        bars = third_party_api()
        match = bars.get(foo.id)
        if match:
            field = match.get("biz",False)
            return field == "true"
        else:
            return False

当我在暂存https://host.com/api/foos/?page_size=7时按下可浏览的api时,会得到Bad Gateway的page_size值>7。我很确定我为认超时做了过多的计算。

解决方法

该设置位于settings/base.py

内部

https://github.com/pydanny/cookiecutter-django/blob/8d5542d6754b520e0698286d8a0e6b6fc1257715/%7B%7Bcookiecutter.project_slug%7D%7D/config/settings/base.py#L289

# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-time-limit
CELERY_TASK_TIME_LIMIT = 5 * 60
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-soft-time-limit
CELERY_TASK_SOFT_TIME_LIMIT = 60

单位是秒。