refactor: helm to kustomize (wip)

This commit is contained in:
Nicolas Giard 2024-05-04 01:13:19 -04:00
parent e35b46eed8
commit 3ea70f2ceb
5 changed files with 656 additions and 0 deletions

190
k8s/datatracker.yaml Normal file
View file

@ -0,0 +1,190 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: deployment
spec:
replicas: 1
revisionHistoryLimit: 2
selector:
matchLabels:
app: datatracker
strategy:
type: Recreate
template:
metadata:
labels:
app: datatracker
spec:
# -------------------------------------------------------
# Node Affinity
# -------------------------------------------------------
nodeSelector:
doks.digitalocean.com/node-pool: pool-apollo-hperf
affinity:
podAntiAffinity:
preferredDuringSchedulingIgnoredDuringExecution:
- weight: 100
podAffinityTerm:
labelSelector:
matchExpressions:
- key: app
operator: In
values:
- mailarchive
topologyKey: "kubernetes.io/hostname"
# -------------------------------------------------------
securityContext:
runAsNonRoot: true
containers:
# -----------------------------------------------------
# ScoutAPM Container
# -----------------------------------------------------
- name: scoutapm
image: "scoutapp/scoutapm:version-1.4.0"
imagePullPolicy: IfNotPresent
livenessProbe:
exec:
command:
- "sh"
- "-c"
- "./core-agent probe --tcp 0.0.0.0:6590 | grep -q 'Agent found'"
securityContext:
readOnlyRootFilesystem: true
runAsUser: 65534 # "nobody" user by default
runAsGroup: 65534 # "nogroup" group by default
# -----------------------------------------------------
# Datatracker Container
# -----------------------------------------------------
- name: datatracker
image: "ghcr.io/ietf-tools/datatracker:$APP_IMAGE_TAG"
imagePullPolicy: Always
ports:
- containerPort: 80
name: http
protocol: TCP
volumeMounts:
- name: dt-vol
mountPath: /a
- name: dt-tmp
mountPath: /tmp
- name: dt-cfg
mountPath: /workspace/ietf/settings_local.py
subPath: settings_local.py
envFrom:
- configMapRef:
name: django-config
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: true
runAsUser: 1000
runAsGroup: 1000
volumes:
- name: dt-vol
nfs:
server: 10.108.0.18
path: "/mnt/datatracker"
readOnly: false
- name: dt-tmp
emptyDir:
sizeLimit: "2Gi"
- name: dt-cfg
configMap:
name: files-cfgmap
dnsPolicy: ClusterFirst
restartPolicy: Always
terminationGracePeriodSeconds: 30
---
apiVersion: v1
kind: ConfigMap
metadata:
name: django-config
data:
# n.b., these are debug values / non-secret secrets
DATATRACKER_SERVER_MODE: "development" # development for staging, production for production
DATATRACKER_ADMINS: |-
Robert Sparks <rjsparks@nostrum.com>
Ryan Cross <rcross@amsl.com>
Kesara Rathnayake <kesara@staff.ietf.org>
Jennifer Richards <jennifer@staff.ietf.org>
Nicolas Giard <nick@staff.ietf.org>
DATATRACKER_ALLOWED_HOSTS: ".ietf.org" # newline-separated list also allowed
# DATATRACKER_DATATRACKER_DEBUG: "false"
# DB access details - needs to be filled in
# DATATRACKER_DBHOST: "db"
# DATATRACKER_DBPORT: "5432"
# DATATRACKER_DBNAME: "datatracker"
# DATATRACKER_DBUSER: "django" # secret
# DATATRACKER_DBPASS: "RkTkDPFnKpko" # secret
DATATRACKER_DJANGO_SECRET_KEY: "PDwXboUq!=hPjnrtG2=ge#N$Dwy+wn@uivrugwpic8mxyPfHk" # secret
# Set this to point testing / staging at the production statics server until we
# sort that out
# DATATRACKER_STATIC_URL: "https://static.ietf.org/dt/12.10.0/"
# DATATRACKER_EMAIL_DEBUG: "true"
# Outgoing email details
# DATATRACKER_EMAIL_HOST: "localhost" # defaults to localhost
# DATATRACKER_EMAIL_PORT: "2025" # defaults to 2025
# The value here is the default from settings.py (i.e., not actually secret)
DATATRACKER_NOMCOM_APP_SECRET_B64: "m9pzMezVoFNJfsvU9XSZxGnXnwup6P5ZgCQeEnROOoQ=" # secret
DATATRACKER_IANA_SYNC_PASSWORD: "this-is-the-iana-sync-password" # secret
DATATRACKER_RFC_EDITOR_SYNC_PASSWORD: "this-is-the-rfc-editor-sync-password" # secret
DATATRACKER_YOUTUBE_API_KEY: "this-is-the-youtube-api-key" # secret
DATATRACKER_GITHUB_BACKUP_API_KEY: "this-is-the-github-backup-api-key" # secret
# API key configuration
DATATRACKER_API_KEY_TYPE: "ES265"
# secret - value here is the default from settings.py (i.e., not actually secret)
DATATRACKER_API_PUBLIC_KEY_PEM_B64: |-
Ci0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tCk1Ga3dFd1lIS29aSXpqMENBUVlJS
29aSXpqMERBUWNEUWdBRXFWb2pzYW9mREpTY3VNSk4rdHNodW15Tk01TUUKZ2Fyel
ZQcWtWb3ZtRjZ5RTdJSi9kdjRGY1YrUUtDdEovck9TOGUzNlk4WkFFVll1dWtoZXM
weVoxdz09Ci0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLQo=
# secret - value here is the default from settings.py (i.e., not actually secret)
DATATRACKER_API_PRIVATE_KEY_PEM_B64: |-
Ci0tLS0tQkVHSU4gUFJJVkFURSBLRVktLS0tLQpNSUdIQWdFQU1CTUdCeXFHU000O
UFnRUdDQ3FHU000OUF3RUhCRzB3YXdJQkFRUWdvSTZMSmtvcEtxOFhySGk5ClFxR1
F2RTRBODNURllqcUx6KzhnVUxZZWNzcWhSQU5DQUFTcFdpT3hxaDhNbEp5NHdrMzY
yeUc2Ykkwemt3U0IKcXZOVStxUldpK1lYcklUc2duOTIvZ1Z4WDVBb0swbitzNUx4
N2ZwanhrQVJWaTY2U0Y2elRKblgKLS0tLS1FTkQgUFJJVkFURSBLRVktLS0tLQo=
# DATATRACKER_MEETECHO_API_BASE: "https://meetings.conf.meetecho.com/api/v1/"
DATATRACKER_MEETECHO_CLIENT_ID: "this-is-the-meetecho-client-id" # secret
DATATRACKER_MEETECHO_CLIENT_SECRET: "this-is-the-meetecho-client-secret" # secret
# DATATRACKER_MATOMO_SITE_ID: "7" # must be present to enable Matomo
# DATATRACKER_MATOMO_DOMAIN_PATH: "analytics.ietf.org"
CELERY_PASSWORD: "this-is-a-secret" # secret
DATATRACKER_APP_API_TOKENS_JSON: "{}" # secret
# use this to override default - one entry per line
# DATATRACKER_CSRF_TRUSTED_ORIGINS: |-
# https://datatracker.staging.ietf.org
# Scout configuration
DATATRACKER_SCOUT_KEY: "this-is-the-scout-key"
DATATRACKER_SCOUT_NAME: "StagingDatatracker"
---
apiVersion: v1
kind: Service
metadata:
name: datatracker
spec:
type: ClusterIP
ports:
- port: 80
targetPort: http
protocol: TCP
name: http
selector:
app: datatracker

10
k8s/kustomization.yaml Normal file
View file

@ -0,0 +1,10 @@
namespace: datatracker
namePrefix: datatracker-
configMapGenerator:
- name: files-cfgmap
files:
- settings_local.py
resources:
- datatracker.yaml
- memcached.yaml
- rabbitmq.yaml

14
k8s/memcached.yaml Normal file
View file

@ -0,0 +1,14 @@
---
apiVersion: v1
kind: Service
metadata:
name: memcached
spec:
type: ClusterIP
ports:
- port: 11211
targetPort: memcached
protocol: TCP
name: memcached
selector:
app: memcached

183
k8s/rabbitmq.yaml Normal file
View file

@ -0,0 +1,183 @@
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: rabbitmq
spec:
replicas: 1
revisionHistoryLimit: 2
serviceName: rabbitmq
selector:
matchLabels:
app: rabbitmq
template:
metadata:
labels:
app: rabbitmq
spec:
# -------------------------------------------------------
# Node Affinity
# -------------------------------------------------------
nodeSelector:
doks.digitalocean.com/node-pool: pool-apollo-hperf
affinity:
podAffinity:
preferredDuringSchedulingIgnoredDuringExecution:
- weight: 100
podAffinityTerm:
labelSelector:
matchExpressions:
- key: app
operator: In
values:
- datatracker
topologyKey: "kubernetes.io/hostname"
# -------------------------------------------------------
securityContext:
runAsNonRoot: true
initContainers:
# -----------------------------------------------------
# Init RabbitMQ data
# -----------------------------------------------------
- name: init-rabbitmq
image: busybox:stable
command:
- "sh"
- "-c"
- "mkdir -p -m700 /mnt/rabbitmq && chown 100:101 /mnt/rabbitmq"
securityContext:
runAsNonRoot: false
runAsUser: 0
readOnlyRootFilesystem: true
volumeMounts:
- name: "rabbitmq-data"
mountPath: "/mnt"
containers:
# -----------------------------------------------------
# RabbitMQ Container
# -----------------------------------------------------
- image: "ghcr.io/ietf-tools/datatracker-mq:3.12-alpine"
imagePullPolicy: Always
name: rabbitmq
ports:
- name: amqp
containerPort: 5672
protocol: TCP
volumeMounts:
- name: rabbitmq-data
mountPath: /var/lib/rabbitmq
subPath: "rabbitmq"
- name: rabbitmq-tmp
mountPath: /tmp
- name: rabbitmq-config
mountPath: "/etc/rabbitmq"
livenessProbe:
exec:
command: ["rabbitmq-diagnostics", "-q", "ping"]
periodSeconds: 30
timeoutSeconds: 5
startupProbe:
initialDelaySeconds: 15
periodSeconds: 5
timeoutSeconds: 5
successThreshold: 1
failureThreshold: 60
exec:
command: ["rabbitmq-diagnostics", "-q", "ping"]
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: true
# rabbitmq image sets up uid/gid 100/101
runAsUser: 100
runAsGroup: 101
volumes:
- name: rabbitmq-data
# TODO: TODO TODO
- name: rabbitmq-tmp
emptyDir:
sizeLimit: "50Mi"
- name: rabbitmq-config
configMap:
name: "rabbitmq-configmap"
dnsPolicy: ClusterFirst
restartPolicy: Always
terminationGracePeriodSeconds: 30
---
apiVersion: v1
kind: ConfigMap
metadata:
name: rabbitmq-configmap
data:
definitions.json: |-
{
"permissions": [
{
"configure": ".*",
"read": ".*",
"user": "datatracker",
"vhost": "dt",
"write": ".*"
}
],
"users": [
{
"hashing_algorithm": "rabbit_password_hashing_sha256",
"limits": {},
"name": "datatracker",
"password_hash": "HJxcItcpXtBN+R/CH7dUelfKBOvdUs3AWo82SBw2yLMSguzb",
"tags": []
}
],
"vhosts": [
{
"limits": [],
"metadata": {
"description": "",
"tags": []
},
"name": "dt"
}
]
}
rabbitmq.conf: |-
# prevent guest from logging in over tcp
loopback_users.guest = true
# load saved definitions
load_definitions = /etc/rabbitmq/definitions.json
# Ensure that enough disk is available to flush to disk. To do this, need to limit the
# memory available to the container to something reasonable. See
# https://www.rabbitmq.com/production-checklist.html#monitoring-and-resource-usage
# for recommendations.
# 1-1.5 times the memory available to the container is adequate for disk limit
disk_free_limit.absolute = 6000MB
# This should be ~40% of the memory available to the container. Use an
# absolute number because relative will be proprtional to the full machine
# memory.
vm_memory_high_watermark.absolute = 1600MB
# Logging
log.file = false
log.console = true
log.console.level = info
log.console.formatter = json
---
apiVersion: v1
kind: Service
metadata:
name: rabbitmq
spec:
type: ClusterIP
clusterIP: None # headless service
ports:
- port: 5672
targetPort: amqp
protocol: TCP
name: amqp
selector:
app: rabbitmq

259
k8s/settings_local.py Normal file
View file

@ -0,0 +1,259 @@
# Copyright The IETF Trust 2007-2024, All Rights Reserved
# -*- coding: utf-8 -*-
from base64 import b64decode
from email.utils import parseaddr
import json
from ietf import __release_hash__
from ietf.settings import * # pyflakes:ignore
def _remove_whitespace_and_b64decode(s):
"""Helper to strip out whitespace and base64 decode"""
return b64decode("".join(s.split()))
def _multiline_to_list(s):
"""Helper to split at newlines and conver to list"""
return [item.strip() for item in s.split("\n")]
# Default to "development". Production _must_ set DATATRACKER_SERVER_MODE="production" in the env!
SERVER_MODE = os.environ.get("DATATRACKER_SERVER_MODE", "development")
# Secrets
_SECRET_KEY = os.environ.get("DATATRACKER_DJANGO_SECRET_KEY", None)
if _SECRET_KEY is not None:
SECRET_KEY = _SECRET_KEY
else:
raise RuntimeError("DATATRACKER_DJANGO_SECRET_KEY must be set")
_NOMCOM_APP_SECRET_B64 = os.environ.get("DATATRACKER_NOMCOM_APP_SECRET_B64", None)
if _NOMCOM_APP_SECRET_B64 is not None:
NOMCOM_APP_SECRET = _remove_whitespace_and_b64decode(_NOMCOM_APP_SECRET_B64)
else:
raise RuntimeError("DATATRACKER_NOMCOM_APP_SECRET_B64 must be set")
_IANA_SYNC_PASSWORD = os.environ.get("DATATRACKER_IANA_SYNC_PASSWORD", None)
if _IANA_SYNC_PASSWORD is not None:
IANA_SYNC_PASSWORD = _IANA_SYNC_PASSWORD
else:
raise RuntimeError("DATATRACKER_IANA_SYNC_PASSWORD must be set")
_RFC_EDITOR_SYNC_PASSWORD = os.environ.get("DATATRACKER_RFC_EDITOR_SYNC_PASSWORD", None)
if _RFC_EDITOR_SYNC_PASSWORD is not None:
RFC_EDITOR_SYNC_PASSWORD = os.environ.get("DATATRACKER_RFC_EDITOR_SYNC_PASSWORD")
else:
raise RuntimeError("DATATRACKER_RFC_EDITOR_SYNC_PASSWORD must be set")
_YOUTUBE_API_KEY = os.environ.get("DATATRACKER_YOUTUBE_API_KEY", None)
if _YOUTUBE_API_KEY is not None:
YOUTUBE_API_KEY = _YOUTUBE_API_KEY
else:
raise RuntimeError("DATATRACKER_YOUTUBE_API_KEY must be set")
_GITHUB_BACKUP_API_KEY = os.environ.get("DATATRACKER_GITHUB_BACKUP_API_KEY", None)
if _GITHUB_BACKUP_API_KEY is not None:
GITHUB_BACKUP_API_KEY = _GITHUB_BACKUP_API_KEY
else:
raise RuntimeError("DATATRACKER_GITHUB_BACKUP_API_KEY must be set")
_API_KEY_TYPE = os.environ.get("DATATRACKER_API_KEY_TYPE", None)
if _API_KEY_TYPE is not None:
API_KEY_TYPE = _API_KEY_TYPE
else:
raise RuntimeError("DATATRACKER_API_KEY_TYPE must be set")
_API_PUBLIC_KEY_PEM_B64 = os.environ.get("DATATRACKER_API_PUBLIC_KEY_PEM_B64", None)
if _API_PUBLIC_KEY_PEM_B64 is not None:
API_PUBLIC_KEY_PEM = _remove_whitespace_and_b64decode(_API_PUBLIC_KEY_PEM_B64)
else:
raise RuntimeError("DATATRACKER_API_PUBLIC_KEY_PEM_B64 must be set")
_API_PRIVATE_KEY_PEM_B64 = os.environ.get("DATATRACKER_API_PRIVATE_KEY_PEM_B64", None)
if _API_PRIVATE_KEY_PEM_B64 is not None:
API_PRIVATE_KEY_PEM = _remove_whitespace_and_b64decode(_API_PRIVATE_KEY_PEM_B64)
else:
raise RuntimeError("DATATRACKER_API_PRIVATE_KEY_PEM_B64 must be set")
# Set DEBUG if DATATRACKER_DEBUG env var is the word "true"
DEBUG = os.environ.get("DATATRACKER_DEBUG", "false").lower() == "true"
# DATATRACKER_ALLOWED_HOSTS env var is a comma-separated list of allowed hosts
_allowed_hosts_str = os.environ.get("DATATRACKER_ALLOWED_HOSTS", None)
if _allowed_hosts_str is not None:
ALLOWED_HOSTS = _multiline_to_list(_allowed_hosts_str)
DATABASES = {
"default": {
"HOST": os.environ.get("DATATRACKER_DBHOST", "db"),
"PORT": os.environ.get("DATATRACKER_DBPORT", "5432"),
"NAME": os.environ.get("DATATRACKER_DBNAME", "datatracker"),
"ENGINE": "django.db.backends.postgresql",
"USER": os.environ.get("DATATRACKER_DBUSER", "django"),
"PASSWORD": os.environ.get("DATATRACKER_DBPASS", ""),
},
}
# DATATRACKER_ADMINS is a newline-delimited list of addresses parseable by email.utils.parseaddr
_admins_str = os.environ.get("DATATRACKER_ADMINS", None)
if _admins_str is not None:
ADMINS = [parseaddr(admin) for admin in _multiline_to_list(_admins_str)]
else:
raise RuntimeError("DATATRACKER_ADMINS must be set")
USING_DEBUG_EMAIL_SERVER = os.environ.get("DATATRACKER_EMAIL_DEBUG", "false").lower() == "true"
EMAIL_HOST = os.environ.get("DATATRACKER_EMAIL_HOST", "localhost")
EMAIL_PORT = int(os.environ.get("DATATRACKER_EMAIL_PORT", "2025"))
_celery_password = os.environ.get("CELERY_PASSWORD", None)
if _celery_password is None:
raise RuntimeError("CELERY_PASSWORD must be set")
CELERY_BROKER_URL = "amqp://datatracker:{password}@{host}/{queue}".format(
host=os.environ.get("RABBITMQ_HOSTNAME", "rabbitmq"),
password=_celery_password,
queue=os.environ.get("RABBITMQ_QUEUE", "dt")
)
IANA_SYNC_USERNAME = "ietfsync"
IANA_SYNC_CHANGES_URL = "https://datatracker.iana.org:4443/data-tracker/changes"
IANA_SYNC_PROTOCOLS_URL = "http://www.iana.org/protocols/"
RFC_EDITOR_NOTIFICATION_URL = "http://www.rfc-editor.org/parser/parser.php"
STATS_REGISTRATION_ATTENDEES_JSON_URL = 'https://registration.ietf.org/{number}/attendees/?apikey=redacted'
#FIRST_CUTOFF_DAYS = 12
#SECOND_CUTOFF_DAYS = 12
#SUBMISSION_CUTOFF_DAYS = 26
#SUBMISSION_CORRECTION_DAYS = 57
MEETING_MATERIALS_SUBMISSION_CUTOFF_DAYS = 26
MEETING_MATERIALS_SUBMISSION_CORRECTION_DAYS = 54
HTPASSWD_COMMAND = "/usr/bin/htpasswd2"
_MEETECHO_CLIENT_ID = os.environ.get("DATATRACKER_MEETECHO_CLIENT_ID", None)
_MEETECHO_CLIENT_SECRET = os.environ.get("DATATRACKER_MEETECHO_CLIENT_SECRET", None)
if _MEETECHO_CLIENT_ID is not None and _MEETECHO_CLIENT_SECRET is not None:
MEETECHO_API_CONFIG = {
"api_base": os.environ.get(
"DATATRACKER_MEETECHO_API_BASE",
"https://meetings.conf.meetecho.com/api/v1/",
),
"client_id": _MEETECHO_CLIENT_ID,
"client_secret": _MEETECHO_CLIENT_SECRET,
"request_timeout": 3.01, # python-requests doc recommend slightly > a multiple of 3 seconds
}
else:
raise RuntimeError(
"DATATRACKER_MEETECHO_CLIENT_ID and DATATRACKER_MEETECHO_CLIENT_SECRET must be set"
)
_APP_API_TOKENS_JSON = os.environ.get("DATATRACKER_APP_API_TOKENS_JSON", None)
if _APP_API_TOKENS_JSON is not None:
APP_API_TOKENS = json.loads(_APP_API_TOKENS_JSON)
else:
APP_API_TOKENS = {}
EMAIL_COPY_TO = ""
# Until we teach the datatracker to look beyond cloudflare for this check
IDSUBMIT_MAX_DAILY_SAME_SUBMITTER = 5000
# Leave DATATRACKER_MATOMO_SITE_ID unset to disable Matomo reporting
if "DATATRACKER_MATOMO_SITE_ID" in os.environ:
MATOMO_DOMAIN_PATH = os.environ.get("DATATRACKER_MATOMO_DOMAIN_PATH", "analytics.ietf.org")
MATOMO_SITE_ID = os.environ.get("DATATRACKER_MATOMO_SITE_ID")
MATOMO_DISABLE_COOKIES = True
# Leave DATATRACKER_SCOUT_KEY unset to disable Scout APM agent
_SCOUT_KEY = os.environ.get("DATATRACKER_SCOUT_KEY", None)
if _SCOUT_KEY is not None:
if SERVER_MODE == "production":
PROD_PRE_APPS = ["scout_apm.django", ]
else:
DEV_PRE_APPS = ["scout_apm.django", ]
SCOUT_MONITOR = True
SCOUT_KEY = _SCOUT_KEY
SCOUT_NAME = os.environ.get("DATATRACKER_SCOUT_NAME", "Datatracker")
SCOUT_ERRORS_ENABLED = True
SCOUT_SHUTDOWN_MESSAGE_ENABLED = False
SCOUT_CORE_AGENT_SOCKET_PATH = "tcp://{host}:{port}".format(
host=os.environ.get("DATATRACKER_SCOUT_CORE_AGENT_HOST", "localhost"),
port=os.environ.get("DATATRACKER_SCOUT_CORE_AGENT_PORT", "6590"),
)
SCOUT_CORE_AGENT_DOWNLOAD = False
SCOUT_CORE_AGENT_LAUNCH = False
SCOUT_REVISION_SHA = __release_hash__[:7]
# Path to the email alias lists. Used by ietf.utils.aliases
DRAFT_ALIASES_PATH = "/a/postfix/draft-aliases"
DRAFT_VIRTUAL_PATH = "/a/postfix/draft-virtual"
GROUP_ALIASES_PATH = "/a/postfix/group-aliases"
GROUP_VIRTUAL_PATH = "/a/postfix/group-virtual"
STATIC_URL = os.environ.get("DATATRACKER_STATIC_URL", None)
if STATIC_URL is None:
from ietf import __version__
STATIC_URL = f"https://static.ietf.org/dt/{__version__}/"
# Set these to the same as "production" in settings.py, whether production mode or not
MEDIA_ROOT = "/a/www/www6s/lib/dt/media/"
MEDIA_URL = "https://www.ietf.org/lib/dt/media/"
PHOTOS_DIRNAME = "photo"
PHOTOS_DIR = MEDIA_ROOT + PHOTOS_DIRNAME
# Normally only set for debug, but needed until we have a real FS
DJANGO_VITE_MANIFEST_PATH = os.path.join(BASE_DIR, 'static/dist-neue/manifest.json')
# Binaries that are different in the docker image
DE_GFM_BINARY = "/usr/local/bin/de-gfm"
IDSUBMIT_IDNITS_BINARY = "/usr/local/bin/idnits"
# Duplicating production cache from settings.py and using it whether we're in production mode or not
MEMCACHED_HOST = os.environ.get("MEMCACHED_SERVICE_HOST", "127.0.0.1")
MEMCACHED_PORT = os.environ.get("MEMCACHED_SERVICE_PORT", "11211")
from ietf import __version__
CACHES = {
"default": {
"BACKEND": "ietf.utils.cache.LenientMemcacheCache",
"LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}",
"VERSION": __version__,
"KEY_PREFIX": "ietf:dt",
"KEY_FUNCTION": lambda key, key_prefix, version: (
f"{key_prefix}:{version}:{sha384(str(key).encode('utf8')).hexdigest()}"
),
},
"sessions": {
"BACKEND": "ietf.utils.cache.LenientMemcacheCache",
"LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}",
# No release-specific VERSION setting.
"KEY_PREFIX": "ietf:dt",
},
"htmlized": {
"BACKEND": "django.core.cache.backends.filebased.FileBasedCache",
"LOCATION": "/a/cache/datatracker/htmlized",
"OPTIONS": {
"MAX_ENTRIES": 100000, # 100,000
},
},
"pdfized": {
"BACKEND": "django.core.cache.backends.filebased.FileBasedCache",
"LOCATION": "/a/cache/datatracker/pdfized",
"OPTIONS": {
"MAX_ENTRIES": 100000, # 100,000
},
},
"slowpages": {
"BACKEND": "django.core.cache.backends.filebased.FileBasedCache",
"LOCATION": "/a/cache/datatracker/slowpages",
"OPTIONS": {
"MAX_ENTRIES": 5000,
},
},
}
_csrf_trusted_origins_str = os.environ.get("DATATRACKER_CSRF_TRUSTED_ORIGINS")
if _csrf_trusted_origins_str is not None:
CSRF_TRUSTED_ORIGINS = _multiline_to_list(_csrf_trusted_origins_str)