ci: merge main to release (#8250)

This commit is contained in:
Robert Sparks 2024-11-21 08:51:12 -06:00 committed by GitHub
commit 03ced83655
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
57 changed files with 1504 additions and 1270 deletions

282
.gitattributes vendored
View file

@ -1,2 +1,280 @@
/.yarn/releases/** binary
/.yarn/plugins/** binary
# Auto detect text files and perform LF normalization
* text=auto
# ---------------------------------------------------
# Python Projects
# ---------------------------------------------------
# Source files
*.pxd text diff=python
*.py text diff=python
*.py3 text diff=python
*.pyw text diff=python
*.pyx text diff=python
*.pyz text diff=python
*.pyi text diff=python
# Binary files
*.db binary
*.p binary
*.pkl binary
*.pickle binary
*.pyc binary export-ignore
*.pyo binary export-ignore
*.pyd binary
# Jupyter notebook
*.ipynb text eol=lf
# ---------------------------------------------------
# Web Projects
# ---------------------------------------------------
# Source code
*.bash text eol=lf
*.bat text eol=crlf
*.cmd text eol=crlf
*.coffee text
*.css text diff=css
*.htm text diff=html
*.html text diff=html
*.inc text
*.ini text
*.js text
*.mjs text
*.cjs text
*.json text
*.jsx text
*.less text
*.ls text
*.map text -diff
*.od text
*.onlydata text
*.php text diff=php
*.pl text
*.ps1 text eol=crlf
*.py text diff=python
*.rb text diff=ruby
*.sass text
*.scm text
*.scss text diff=css
*.sh text eol=lf
.husky/* text eol=lf
*.sql text
*.styl text
*.tag text
*.ts text
*.tsx text
*.xml text
*.xhtml text diff=html
# Docker
Dockerfile text
# Documentation
*.ipynb text eol=lf
*.markdown text diff=markdown
*.md text diff=markdown
*.mdwn text diff=markdown
*.mdown text diff=markdown
*.mkd text diff=markdown
*.mkdn text diff=markdown
*.mdtxt text
*.mdtext text
*.txt text
AUTHORS text
CHANGELOG text
CHANGES text
CONTRIBUTING text
COPYING text
copyright text
*COPYRIGHT* text
INSTALL text
license text
LICENSE text
NEWS text
readme text
*README* text
TODO text
# Templates
*.dot text
*.ejs text
*.erb text
*.haml text
*.handlebars text
*.hbs text
*.hbt text
*.jade text
*.latte text
*.mustache text
*.njk text
*.phtml text
*.pug text
*.svelte text
*.tmpl text
*.tpl text
*.twig text
*.vue text
# Configs
*.cnf text
*.conf text
*.config text
.editorconfig text
.env text
.gitattributes text
.gitconfig text
.htaccess text
*.lock text -diff
package.json text eol=lf
package-lock.json text eol=lf -diff
pnpm-lock.yaml text eol=lf -diff
.prettierrc text
yarn.lock text -diff
*.toml text
*.yaml text
*.yml text
browserslist text
Makefile text
makefile text
# Fixes syntax highlighting on GitHub to allow comments
tsconfig.json linguist-language=JSON-with-Comments
# Heroku
Procfile text
# Graphics
*.ai binary
*.bmp binary
*.eps binary
*.gif binary
*.gifv binary
*.ico binary
*.jng binary
*.jp2 binary
*.jpg binary
*.jpeg binary
*.jpx binary
*.jxr binary
*.pdf binary
*.png binary
*.psb binary
*.psd binary
*.svg text
*.svgz binary
*.tif binary
*.tiff binary
*.wbmp binary
*.webp binary
# Audio
*.kar binary
*.m4a binary
*.mid binary
*.midi binary
*.mp3 binary
*.ogg binary
*.ra binary
# Video
*.3gpp binary
*.3gp binary
*.as binary
*.asf binary
*.asx binary
*.avi binary
*.fla binary
*.flv binary
*.m4v binary
*.mng binary
*.mov binary
*.mp4 binary
*.mpeg binary
*.mpg binary
*.ogv binary
*.swc binary
*.swf binary
*.webm binary
# Archives
*.7z binary
*.gz binary
*.jar binary
*.rar binary
*.tar binary
*.zip binary
# Fonts
*.ttf binary
*.eot binary
*.otf binary
*.woff binary
*.woff2 binary
# Executables
*.exe binary
*.pyc binary
# Prevents massive diffs caused by vendored, minified files
**/.yarn/releases/** binary
**/.yarn/plugins/** binary
# RC files (like .babelrc or .eslintrc)
*.*rc text
# Ignore files (like .npmignore or .gitignore)
*.*ignore text
# Prevents massive diffs from built files
dist/* binary
# ---------------------------------------------------
# Common
# ---------------------------------------------------
# Documents
*.bibtex text diff=bibtex
*.doc diff=astextplain
*.DOC diff=astextplain
*.docx diff=astextplain
*.DOCX diff=astextplain
*.dot diff=astextplain
*.DOT diff=astextplain
*.pdf diff=astextplain
*.PDF diff=astextplain
*.rtf diff=astextplain
*.RTF diff=astextplain
*.md text diff=markdown
*.mdx text diff=markdown
*.tex text diff=tex
*.adoc text
*.textile text
*.mustache text
*.csv text eol=crlf
*.tab text
*.tsv text
*.txt text
*.sql text
*.epub diff=astextplain
# Text files where line endings should be preserved
*.patch -text
# ---------------------------------------------------
# Vzic specific
# ---------------------------------------------------
*.pl text diff=perl
*.pm text diff=perl
# C/C++
*.c text diff=cpp
*.cc text diff=cpp
*.cxx text diff=cpp
*.cpp text diff=cpp
*.cpi text diff=cpp
*.c++ text diff=cpp
*.hpp text diff=cpp
*.h text diff=cpp
*.h++ text diff=cpp
*.hh text diff=cpp

View file

@ -137,6 +137,7 @@ jobs:
uses: ./.github/workflows/tests.yml
if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }}
needs: [prepare]
secrets: inherit
with:
ignoreLowerCoverage: ${{ github.event.inputs.ignoreLowerCoverage == 'true' }}
skipSelenium: true
@ -149,7 +150,8 @@ jobs:
name: Make Release
if: ${{ !failure() && !cancelled() }}
needs: [tests, prepare]
runs-on: ubuntu-latest
runs-on:
group: hperf-8c32r
permissions:
contents: write
packages: write
@ -166,215 +168,107 @@ jobs:
fetch-depth: 1
fetch-tags: false
- name: Launch build VM
id: azlaunch
timeout-minutes: 10
run: |
echo "Authenticating to Azure..."
az login --service-principal -u ${{ secrets.AZ_BUILD_APP_ID }} -p ${{ secrets.AZ_BUILD_PWD }} --tenant ${{ secrets.AZ_BUILD_TENANT_ID }}
echo "Creating VM..."
vminfo=$(az vm create \
--resource-group ghaDatatracker \
--name tmpGhaBuildVM-${{ github.run_number }} \
--image Ubuntu2204 \
--admin-username azureuser \
--generate-ssh-keys \
--priority Spot \
--size Standard_D8ads_v5 \
--max-price -1 \
--ephemeral-os-disk \
--os-disk-size-gb 100 \
--eviction-policy Delete \
--nic-delete-option Delete \
--os-disk-delete-option Delete \
--output tsv \
--query "publicIpAddress")
echo "ipaddr=$vminfo" >> "$GITHUB_OUTPUT"
echo "VM Public IP: $vminfo"
cat ~/.ssh/id_rsa > ${{ github.workspace }}/prvkey.key
echo "Fetching SSH host public keys..."
until ssh-keyscan -t rsa $vminfo 2> /dev/null
do
echo "Will try again in 5 seconds..."
sleep 5
done
ssh-keyscan -t rsa $vminfo >> ~/.ssh/known_hosts
- name: Setup Node.js environment
uses: actions/setup-node@v4
with:
node-version: 18.x
- name: Remote SSH into Build VM
uses: appleboy/ssh-action@25ce8cbbcb08177468c7ff7ec5cbfa236f9341e1
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.x"
- name: Download a Coverage Results
if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }}
uses: actions/download-artifact@v4.1.8
with:
name: coverage
- name: Make Release Build
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_ACTOR: ${{ github.actor }}
GITHUB_SHA: ${{ github.sha }}
GITHUB_REF_NAME: ${{ github.ref_name }}
GITHUB_RUN_ID: ${{ github.run_id }}
DEBIAN_FRONTEND: noninteractive
BROWSERSLIST_IGNORE_OLD_DATA: 1
run: |
echo "PKG_VERSION: $PKG_VERSION"
echo "GITHUB_SHA: $GITHUB_SHA"
echo "GITHUB_REF_NAME: $GITHUB_REF_NAME"
echo "Running frontend build script..."
echo "Compiling native node packages..."
yarn rebuild
echo "Packaging static assets..."
yarn build --base=https://static.ietf.org/dt/$PKG_VERSION/
yarn legacy:build
echo "Setting version $PKG_VERSION..."
sed -i -r -e "s|^__version__ += '.*'$|__version__ = '$PKG_VERSION'|" ietf/__init__.py
sed -i -r -e "s|^__release_hash__ += '.*'$|__release_hash__ = '$GITHUB_SHA'|" ietf/__init__.py
sed -i -r -e "s|^__release_branch__ += '.*'$|__release_branch__ = '$GITHUB_REF_NAME'|" ietf/__init__.py
- name: Set Production Flags
if: ${{ env.SHOULD_DEPLOY == 'true' }}
run: |
echo "Setting production flags in settings.py..."
sed -i -r -e 's/^DEBUG *= *.*$/DEBUG = False/' -e "s/^SERVER_MODE *= *.*\$/SERVER_MODE = 'production'/" ietf/settings.py
- name: Make Release Tarball
env:
DEBIAN_FRONTEND: noninteractive
run: |
echo "Build release tarball..."
mkdir -p /home/runner/work/release
tar -czf /home/runner/work/release/release.tar.gz -X dev/build/exclude-patterns.txt .
- name: Collect + Push Statics
env:
DEBIAN_FRONTEND: noninteractive
AWS_ACCESS_KEY_ID: ${{ secrets.CF_R2_STATIC_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_R2_STATIC_KEY_SECRET }}
AWS_DEFAULT_REGION: auto
AWS_ENDPOINT_URL: ${{ secrets.CF_R2_ENDPOINT }}
PKG_VERSION: ${{ env.PKG_VERSION }}
SHOULD_DEPLOY: ${{ env.SHOULD_DEPLOY }}
SKIP_TESTS: ${{ github.event.inputs.skiptests }}
run: |
echo "Collecting statics..."
echo "Using ghcr.io/ietf-tools/datatracker-app-base:${{ env.TARGET_BASE }}"
docker run --rm --name collectstatics -v $(pwd):/workspace ghcr.io/ietf-tools/datatracker-app-base:${{ env.TARGET_BASE }} sh dev/build/collectstatics.sh
echo "Pushing statics..."
cd static
aws s3 sync . s3://static/dt/$PKG_VERSION --only-show-errors
- name: Augment dockerignore for docker image build
env:
DEBIAN_FRONTEND: noninteractive
BROWSERSLIST_IGNORE_OLD_DATA: 1
TARGET_BASE: ${{ env.TARGET_BASE }}
run: |
cat >> .dockerignore <<EOL
.devcontainer
.github
.vscode
k8s
playwright
svn-history
docker-compose.yml
EOL
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
host: ${{ steps.azlaunch.outputs.ipaddr }}
port: 22
username: azureuser
command_timeout: 60m
key_path: ${{ github.workspace }}/prvkey.key
envs: GITHUB_TOKEN,GITHUB_ACTOR,GITHUB_SHA,GITHUB_REF_NAME,GITHUB_RUN_ID,AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,AWS_DEFAULT_REGION,AWS_ENDPOINT_URL,PKG_VERSION,SHOULD_DEPLOY,SKIP_TESTS,DEBIAN_FRONTEND,BROWSERSLIST_IGNORE_OLD_DATA
script_stop: true
script: |
export DEBIAN_FRONTEND=noninteractive
lsb_release -a
sudo apt-get update
sudo apt-get upgrade -y
sudo apt-get install wget unzip curl -y
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
echo "=========================================================================="
echo "Installing Docker..."
echo "=========================================================================="
curl -fsSL https://get.docker.com -o get-docker.sh
sudo sh get-docker.sh
sudo docker buildx create \
--name container-builder \
--driver docker-container \
--bootstrap --use
- name: Build Images
uses: docker/build-push-action@v6
env:
DOCKER_BUILD_SUMMARY: false
with:
context: .
file: dev/build/Dockerfile
platforms: ${{ github.event.inputs.skiparm == 'true' && 'linux/amd64' || 'linux/amd64,linux/arm64' }}
push: true
tags: ghcr.io/ietf-tools/datatracker:${{ env.PKG_VERSION }}
cache-from: type=gha
cache-to: type=gha,mode=max
echo "=========================================================================="
echo "Login to ghcr.io..."
echo "=========================================================================="
echo $GITHUB_TOKEN | sudo docker login ghcr.io -u $GITHUB_ACTOR --password-stdin
echo "=========================================================================="
echo "Installing GH CLI..."
echo "=========================================================================="
sudo mkdir -p -m 755 /etc/apt/keyrings \
&& wget -qO- https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null \
&& sudo chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
&& sudo apt update \
&& sudo apt install gh -y
echo "=========================================================================="
echo "Installing AWS CLI..."
echo "=========================================================================="
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
unzip awscliv2.zip
sudo ./aws/install
echo "=========================================================================="
echo "Install Node.js..."
echo "=========================================================================="
curl -fsSL https://deb.nodesource.com/setup_18.x -o nodesource_setup.sh
sudo bash nodesource_setup.sh
sudo apt-get install -y nodejs
sudo corepack enable
echo "=========================================================================="
echo "Install Python 3.x..."
echo "=========================================================================="
sudo apt-get install python3 python3-dev -y
python3 --version
echo "=========================================================================="
echo "Clone project..."
echo "=========================================================================="
sudo mkdir -p /workspace
sudo chown azureuser /workspace
cd /workspace
gh repo clone ietf-tools/datatracker -- --depth=1 --no-tags
cd datatracker
if [ "$SKIP_TESTS" = "false" ] || [ "$GITHUB_REF_NAME" = "release" ] ; then
echo "=========================================================================="
echo "Downloading coverage..."
echo "=========================================================================="
gh run download $GITHUB_RUN_ID -n coverage
fi
echo "=========================================================================="
echo "Building project..."
echo "=========================================================================="
echo "PKG_VERSION: $PKG_VERSION"
echo "GITHUB_SHA: $GITHUB_SHA"
echo "GITHUB_REF_NAME: $GITHUB_REF_NAME"
echo "Running frontend build script..."
echo "Compiling native node packages..."
yarn rebuild
echo "Packaging static assets..."
yarn build --base=https://static.ietf.org/dt/$PKG_VERSION/
yarn legacy:build
echo "Setting version $PKG_VERSION..."
sed -i -r -e "s|^__version__ += '.*'$|__version__ = '$PKG_VERSION'|" ietf/__init__.py
sed -i -r -e "s|^__release_hash__ += '.*'$|__release_hash__ = '$GITHUB_SHA'|" ietf/__init__.py
sed -i -r -e "s|^__release_branch__ += '.*'$|__release_branch__ = '$GITHUB_REF_NAME'|" ietf/__init__.py
if [ "$SHOULD_DEPLOY" = "true" ] ; then
echo "=========================================================================="
echo "Setting production flags in settings.py..."
echo "=========================================================================="
sed -i -r -e 's/^DEBUG *= *.*$/DEBUG = False/' -e "s/^SERVER_MODE *= *.*\$/SERVER_MODE = 'production'/" ietf/settings.py
fi
echo "=========================================================================="
echo "Build release tarball..."
echo "=========================================================================="
mkdir -p /workspace/release
tar -czf /workspace/release.tar.gz -X dev/build/exclude-patterns.txt .
echo "=========================================================================="
echo "Collecting statics..."
echo "=========================================================================="
echo "Using ghcr.io/ietf-tools/datatracker-app-base:${{ env.TARGET_BASE }}"
sudo docker run --rm --name collectstatics -v $(pwd):/workspace ghcr.io/ietf-tools/datatracker-app-base:${{ env.TARGET_BASE }} sh dev/build/collectstatics.sh
echo "Pushing statics..."
cd static
aws s3 sync . s3://static/dt/$PKG_VERSION --only-show-errors
cd ..
echo "=========================================================================="
echo "Augment dockerignore for docker image build..."
echo "=========================================================================="
cat >> .dockerignore <<EOL
.devcontainer
.github
.vscode
k8s
playwright
svn-history
docker-compose.yml
EOL
echo "=========================================================================="
echo "Building Images..."
echo "=========================================================================="
sudo docker buildx build --file dev/build/Dockerfile --platform linux/amd64,linux/arm64 --tag ghcr.io/ietf-tools/datatracker:$PKG_VERSION --push .
- name: Fetch release tarball
run: |
mkdir -p /home/runner/work/release
chmod 0600 ${{ github.workspace }}/prvkey.key
scp -i ${{ github.workspace }}/prvkey.key azureuser@${{ steps.azlaunch.outputs.ipaddr }}:/workspace/release.tar.gz /home/runner/work/release/release.tar.gz
- name: Destroy Build VM + resources
if: always()
run: |
echo "Terminate VM..."
az vm delete -g ghaDatatracker -n tmpGhaBuildVM-${{ github.run_number }} --yes --force-deletion true
echo "Delete Public IP..."
az resource delete -g ghaDatatracker -n tmpGhaBuildVM-${{ github.run_number }}PublicIP --resource-type "Microsoft.Network/publicIPAddresses"
echo "Delete Network Security Group..."
az resource delete -g ghaDatatracker -n tmpGhaBuildVM-${{ github.run_number }}NSG --resource-type "Microsoft.Network/networkSecurityGroups"
echo "Delete Virtual Network..."
az resource delete -g ghaDatatracker -n tmpGhaBuildVM-${{ github.run_number }}VNET --resource-type "Microsoft.Network/virtualNetworks"
echo "Logout from Azure..."
az logout
- name: Update CHANGELOG
id: changelog
uses: Requarks/changelog-action@v1
@ -456,50 +350,34 @@ jobs:
steps:
- name: Notify on Slack (Success)
if: ${{ !contains(join(needs.*.result, ','), 'failure') }}
uses: slackapi/slack-github-action@v1.27.0
uses: slackapi/slack-github-action@v2
with:
channel-id: ${{ secrets.SLACK_GH_BUILDS_CHANNEL_ID }}
token: ${{ secrets.SLACK_GH_BOT }}
method: chat.postMessage
payload: |
{
"text": "Datatracker Build <https://github.com/ietf-tools/datatracker/actions/runs/${{ github.run_id }}|${{ env.PKG_VERSION }}> by ${{ github.triggering_actor }} - <@${{ secrets.SLACK_UID_RJSPARKS }}>",
"attachments": [
{
"color": "28a745",
"fields": [
{
"title": "Status",
"short": true,
"value": "Completed"
}
]
}
]
}
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_GH_BOT }}
channel: ${{ secrets.SLACK_GH_BUILDS_CHANNEL_ID }}
text: "Datatracker Build <https://github.com/ietf-tools/datatracker/actions/runs/${{ github.run_id }}|${{ env.PKG_VERSION }}> by ${{ github.triggering_actor }}"
attachments:
- color: "28a745"
fields:
- title: "Status"
short: true
value: "Completed"
- name: Notify on Slack (Failure)
if: ${{ contains(join(needs.*.result, ','), 'failure') }}
uses: slackapi/slack-github-action@v1.27.0
uses: slackapi/slack-github-action@v2
with:
channel-id: ${{ secrets.SLACK_GH_BUILDS_CHANNEL_ID }}
token: ${{ secrets.SLACK_GH_BOT }}
method: chat.postMessage
payload: |
{
"text": "Datatracker Build <https://github.com/ietf-tools/datatracker/actions/runs/${{ github.run_id }}|${{ env.PKG_VERSION }}> by ${{ github.triggering_actor }} - <@${{ secrets.SLACK_UID_RJSPARKS }}>",
"attachments": [
{
"color": "a82929",
"fields": [
{
"title": "Status",
"short": true,
"value": "Failed"
}
]
}
]
}
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_GH_BOT }}
channel: ${{ secrets.SLACK_GH_BUILDS_CHANNEL_ID }}
text: "Datatracker Build <https://github.com/ietf-tools/datatracker/actions/runs/${{ github.run_id }}|${{ env.PKG_VERSION }}> by ${{ github.triggering_actor }}"
attachments:
- color: "a82929"
fields:
- title: "Status"
short: true
value: "Failed"
# -----------------------------------------------------------------
# SANDBOX

View file

@ -38,7 +38,7 @@ jobs:
ssh-keyscan -t rsa $vminfo >> ~/.ssh/known_hosts
- name: Remote SSH into VM
uses: appleboy/ssh-action@25ce8cbbcb08177468c7ff7ec5cbfa236f9341e1
uses: appleboy/ssh-action@7eaf76671a0d7eec5d98ee897acda4f968735a17
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:

View file

@ -73,9 +73,11 @@ jobs:
path: geckodriver.log
- name: Upload Coverage Results to Codecov
uses: codecov/codecov-action@v4.6.0
uses: codecov/codecov-action@v5
with:
disable_search: true
files: coverage.xml
token: ${{ secrets.CODECOV_TOKEN }}
- name: Convert Coverage Results
if: ${{ always() }}

View file

@ -3,11 +3,13 @@
n-dropdown(
:options='jumpToDayOptions'
size='huge'
:show='isDropdownOpenRef'
:show-arrow='true'
trigger='click'
@select='jumpToDay'
@clickoutside='handleCloseDropdown'
)
button
button(@click='handleOpenDropdown')
i.bi.bi-arrow-down-circle
button(@click='agendaStore.$patch({ filterShown: true })')
i.bi.bi-funnel
@ -28,7 +30,7 @@
</template>
<script setup>
import { computed, h } from 'vue'
import { computed, h, ref } from 'vue'
import {
NBadge,
NDropdown,
@ -61,7 +63,8 @@ function optionToLink(opts){
{
class: 'dropdown-link',
'data-testid': 'mobile-link',
href: `#${key}`
href: `#${key}`,
onClick: () => jumpToDay(key)
},
[
h(
@ -77,6 +80,12 @@ function optionToLink(opts){
}
}
const isDropdownOpenRef = ref(false)
const handleOpenDropdown = () => isDropdownOpenRef.value = true
const handleCloseDropdown = () => isDropdownOpenRef.value = false
const jumpToDayOptions = computed(() => {
const days = []
if (agendaStore.isMeetingLive) {
@ -124,6 +133,7 @@ function jumpToDay (dayId) {
} else {
document.getElementById(dayId)?.scrollIntoView(true)
}
isDropdownOpenRef.value = false
}
function downloadIcs (key) {

View file

@ -83,6 +83,14 @@
template(#trigger)
span.badge.is-bof BoF
span #[a(href='https://www.ietf.org/how/bofs/', target='_blank') Birds of a Feather] sessions (BoFs) are initial discussions about a particular topic of interest to the IETF community.
n-popover(
v-if='item.isProposed'
trigger='hover'
:width='250'
)
template(#trigger)
span.badge.is-proposed Proposed
span #[a(href='https://www.ietf.org/process/wgs/', target='_blank') Proposed WGs] are groups in the process of being chartered. If the charter is not approved by the IESG before the IETF meeting, the session may be canceled.
.agenda-table-note(v-if='item.note')
i.bi.bi-arrow-return-right.me-1
span {{item.note}}
@ -468,6 +476,7 @@ const meetingEvents = computed(() => {
// groupParentName: item.groupParent?.name,
icon,
isBoF: item.isBoF,
isProposed: item.isProposed,
isSessionEvent: item.type === 'regular',
links,
location: item.location,
@ -1012,10 +1021,25 @@ onBeforeUnmount(() => {
word-wrap: break-word;
}
.badge.is-bof {
background-color: $teal-500;
.badge {
margin: 0 8px;
&.is-bof {
background-color: $teal-500;
@at-root .theme-dark & {
background-color: $teal-700;
}
}
&.is-proposed {
background-color: $gray-500;
@at-root .theme-dark & {
background-color: $gray-700;
}
}
@media screen and (max-width: $bs5-break-md) {
width: 30px;
display: block;

View file

@ -1,4 +1,4 @@
FROM ghcr.io/ietf-tools/datatracker-app-base:20241029T1632
FROM ghcr.io/ietf-tools/datatracker-app-base:20241114T1954
LABEL maintainer="IETF Tools Team <tools-discuss@ietf.org>"
ENV DEBIAN_FRONTEND=noninteractive

View file

@ -1 +1 @@
20241029T1632
20241114T1954

View file

@ -1,8 +1,8 @@
# Copyright The IETF Trust 2007-2019, All Rights Reserved
# -*- coding: utf-8 -*-
from ietf import __version__
from ietf.settings import * # pyflakes:ignore
STATIC_URL = "https://static.ietf.org/dt/%s/"%__version__
STATIC_ROOT = os.path.abspath(BASE_DIR + "/../static/")
# Copyright The IETF Trust 2007-2019, All Rights Reserved
# -*- coding: utf-8 -*-
from ietf import __version__
from ietf.settings import * # pyflakes:ignore
STATIC_URL = "https://static.ietf.org/dt/%s/"%__version__
STATIC_ROOT = os.path.abspath(BASE_DIR + "/../static/")

View file

@ -8,7 +8,7 @@
"dependencies": {
"dockerode": "^4.0.2",
"fs-extra": "^11.2.0",
"nanoid": "5.0.7",
"nanoid": "5.0.8",
"nanoid-dictionary": "5.0.0-beta.1",
"slugify": "1.6.6",
"tar": "^7.4.3",
@ -546,9 +546,9 @@
"optional": true
},
"node_modules/nanoid": {
"version": "5.0.7",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.7.tgz",
"integrity": "sha512-oLxFY2gd2IqnjcYyOXD8XGCftpGtZP2AbHbOkthDkvRywH5ayNtPVy9YlOPcHckXzbLTCHpkb7FB+yuxKV13pQ==",
"version": "5.0.8",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.8.tgz",
"integrity": "sha512-TcJPw+9RV9dibz1hHUzlLVy8N4X9TnwirAjrU08Juo6BNKggzVfP2ZJ/3ZUSq15Xl5i85i+Z89XBO90pB2PghQ==",
"funding": [
{
"type": "github",
@ -1346,9 +1346,9 @@
"optional": true
},
"nanoid": {
"version": "5.0.7",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.7.tgz",
"integrity": "sha512-oLxFY2gd2IqnjcYyOXD8XGCftpGtZP2AbHbOkthDkvRywH5ayNtPVy9YlOPcHckXzbLTCHpkb7FB+yuxKV13pQ=="
"version": "5.0.8",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.8.tgz",
"integrity": "sha512-TcJPw+9RV9dibz1hHUzlLVy8N4X9TnwirAjrU08Juo6BNKggzVfP2ZJ/3ZUSq15Xl5i85i+Z89XBO90pB2PghQ=="
},
"nanoid-dictionary": {
"version": "5.0.0-beta.1",

View file

@ -4,7 +4,7 @@
"dependencies": {
"dockerode": "^4.0.2",
"fs-extra": "^11.2.0",
"nanoid": "5.0.7",
"nanoid": "5.0.8",
"nanoid-dictionary": "5.0.0-beta.1",
"slugify": "1.6.6",
"tar": "^7.4.3",

View file

@ -1,32 +1,32 @@
# This docker-compose replicates the test workflow happening on GitHub during a PR / build check.
# To be used from the debug.sh script.
version: '3.8'
services:
app:
image: ghcr.io/ietf-tools/datatracker-app-base:latest
command: -f /dev/null
working_dir: /__w/datatracker/datatracker
entrypoint: tail
hostname: app
volumes:
- /var/run/docker.sock:/var/run/docker.sock
environment:
CI: 'true'
GITHUB_ACTIONS: 'true'
HOME: /github/home
deploy:
resources:
limits:
cpus: '2'
memory: '7GB'
db:
image: ghcr.io/ietf-tools/datatracker-db:latest
restart: unless-stopped
volumes:
- postgresdb-data:/var/lib/postgresql/data
volumes:
postgresdb-data:
# This docker-compose replicates the test workflow happening on GitHub during a PR / build check.
# To be used from the debug.sh script.
version: '3.8'
services:
app:
image: ghcr.io/ietf-tools/datatracker-app-base:latest
command: -f /dev/null
working_dir: /__w/datatracker/datatracker
entrypoint: tail
hostname: app
volumes:
- /var/run/docker.sock:/var/run/docker.sock
environment:
CI: 'true'
GITHUB_ACTIONS: 'true'
HOME: /github/home
deploy:
resources:
limits:
cpus: '2'
memory: '7GB'
db:
image: ghcr.io/ietf-tools/datatracker-db:latest
restart: unless-stopped
volumes:
- postgresdb-data:/var/lib/postgresql/data
volumes:
postgresdb-data:

View file

@ -1,22 +1,22 @@
{
"Servers": {
"1": {
"Name": "Local Dev",
"Group": "Servers",
"Host": "db",
"Port": 5432,
"MaintenanceDB": "postgres",
"Username": "django",
"UseSSHTunnel": 0,
"TunnelPort": "22",
"TunnelAuthentication": 0,
"KerberosAuthentication": false,
"ConnectionParameters": {
"sslmode": "prefer",
"connect_timeout": 10,
"sslcert": "<STORAGE_DIR>/.postgresql/postgresql.crt",
"sslkey": "<STORAGE_DIR>/.postgresql/postgresql.key"
}
}
}
}
{
"Servers": {
"1": {
"Name": "Local Dev",
"Group": "Servers",
"Host": "db",
"Port": 5432,
"MaintenanceDB": "postgres",
"Username": "django",
"UseSSHTunnel": 0,
"TunnelPort": "22",
"TunnelAuthentication": 0,
"KerberosAuthentication": false,
"ConnectionParameters": {
"sslmode": "prefer",
"connect_timeout": 10,
"sslcert": "<STORAGE_DIR>/.postgresql/postgresql.crt",
"sslkey": "<STORAGE_DIR>/.postgresql/postgresql.key"
}
}
}
}

View file

@ -146,6 +146,7 @@ class AdminJsonSerializer(Serializer):
field_value = None
else:
field_value = field
# Need QuerySetAny instead of QuerySet until django-stubs 5.0.1
if isinstance(field_value, QuerySetAny) or isinstance(field_value, list):
self._current[name] = dict([ (rel.pk, self.expand_related(rel, name)) for rel in field_value ])
else:

View file

@ -13,10 +13,6 @@ basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
sys.path = [ basedir ] + sys.path
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
if os.path.exists(virtualenv_activation):
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER)
import django

View file

@ -8,10 +8,6 @@ basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
sys.path = [ basedir ] + sys.path
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
if os.path.exists(virtualenv_activation):
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER)
import django

View file

@ -8,10 +8,6 @@ basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
sys.path = [ basedir ] + sys.path
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
if os.path.exists(virtualenv_activation):
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER)
import django

View file

@ -1,65 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -*- Python -*-
#
'''
This script merges two Person records into one. It determines which record is the target
based on most current User record (last_login) unless -f (force) option is used to
force SOURCE TARGET as specified on the command line. The order of operations is
important. We must complete all source.save() operations before moving the aliases to
the target, this is to avoid extra "Possible duplicate Person" emails going out, if the
Person is saved without an alias the Person.save() creates another one, which then
conflicts with the moved one.
'''
# Set PYTHONPATH and load environment variables for standalone script -----------------
import os, sys
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
sys.path = [ basedir ] + sys.path
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
if os.path.exists(virtualenv_activation):
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
import django
django.setup()
# -------------------------------------------------------------------------------------
import argparse
from django.contrib import admin
from ietf.person.models import Person
from ietf.person.utils import (merge_persons, send_merge_notification, handle_users,
determine_merge_order)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("source_id",type=int)
parser.add_argument("target_id",type=int)
parser.add_argument('-f','--force', help='force merge order',action='store_true')
parser.add_argument('-v','--verbose', help='verbose output',action='store_true')
args = parser.parse_args()
source = Person.objects.get(pk=args.source_id)
target = Person.objects.get(pk=args.target_id)
# set merge order
if not args.force:
source,target = determine_merge_order(source,target)
# confirm
print "Merging person {}({}) to {}({})".format(source.ascii,source.pk,target.ascii,target.pk)
print handle_users(source,target,check_only=True)
response = raw_input('Ok to continue y/n? ')
if response.lower() != 'y':
sys.exit()
# perform merge
success, changes = merge_persons(source, target, verbose=args.verbose)
# send email notification
send_merge_notification(target,changes)
if __name__ == "__main__":
main()

View file

@ -7,10 +7,6 @@ basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
sys.path = [ basedir ] + sys.path
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
if os.path.exists(virtualenv_activation):
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
import django
django.setup()

View file

@ -3,7 +3,7 @@
from django.conf import settings
from django.db import models
from django.db import models, transaction
from django.db.models import signals
from django.urls import reverse as urlreverse
@ -117,7 +117,10 @@ def notify_events(sender, instance, **kwargs):
# start a Celery task during tests. To prevent this, don't queue a celery task if we're running
# tests.
if settings.SERVER_MODE != "test":
notify_event_to_subscribers_task.delay(event_id=instance.pk)
# Wrap in on_commit in case a transaction is open
transaction.on_commit(
lambda: notify_event_to_subscribers_task.delay(event_id=instance.pk)
)
signals.post_save.connect(notify_events)

View file

@ -431,8 +431,10 @@ class CommunityListTests(TestCase):
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
# Mock out the on_commit call so we can tell whether the task was actually queued
@mock.patch("ietf.submit.views.transaction.on_commit", side_effect=lambda x: x())
@mock.patch("ietf.community.models.notify_event_to_subscribers_task")
def test_notification_signal_receiver(self, mock_notify_task):
def test_notification_signal_receiver(self, mock_notify_task, mock_on_commit):
"""Saving a DocEvent should notify subscribers
This implicitly tests that notify_events is hooked up to the post_save signal.

View file

@ -1,190 +1,190 @@
<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object pk="1" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/home.rst</field>
<field type="CharField" name="title">Home page of group</field>
<field type="TextField" name="variables"></field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">rst</field>
<field type="TextField" name="content">Home page
=========
This is the home page of the nomcom group.</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="2" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/email/inexistent_person.txt</field>
<field type="CharField" name="title">Email sent to chair of nomcom and secretariat when Email and Person are created if some of them don't exist</field>
<field type="TextField" name="variables">$email: Newly created email
$fullname: Fullname of the new person
$person_id: Id of the new Person object
$group: Name of the group</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">plain</field>
<field type="TextField" name="content">Hello,
A new person with name $fullname and email $email has been created. The new Person object has the following id: '$person_id'.
Please, check if there is some more action nedeed.</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="3" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/email/new_nominee.txt</field>
<field type="CharField" name="title">Email sent to nominees when they are nominated</field>
<field type="TextField" name="variables">$nominee: Full name of the nominee
$position: Name of the position
$domain: Server domain
$accept_url: Url hash to accept nominations
$decline_url: Url hash to decline nominations</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">plain</field>
<field type="TextField" name="content">Hi,
You have been nominated for the position of $position.
The NomCom would appreciate receiving an indication of whether or not you accept this nomination to stand for consideration as a candidate for this position.
You can accept the nomination via web going to the following link https://$domain$accept_url or decline the nomination going the following link https://$domain$decline_url
If you accept, you will need to fill out a questionnaire. You will receive the questionnaire by email.
Best regards,
</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="4" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/email/new_nomination.txt</field>
<field type="CharField" name="title">Email sent to nominators and secretariat when the nominators make the nominations</field>
<field type="TextField" name="variables">$nominator: Full name of the nominator
$nominator_email: Email of the nominator
$nominee: Full name of the nominee
$nominee_email: Email of the nominee
$position: Nomination position</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">plain</field>
<field type="TextField" name="content">A new nomination have been received.
Nominator: $nominator ($nominator_email)
Nominee: $nominee ($nominee_email)
Position: $position</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="5" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/position/questionnaire.txt</field>
<field type="CharField" name="title">Questionnaire sent to the nomine</field>
<field type="TextField" name="variables">$position: Position</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">plain</field>
<field type="TextField" name="content">Enter here the questionnaire for the position $position:
Questionnaire</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="6" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/position/requirements</field>
<field type="CharField" name="title">Position requirements</field>
<field type="TextField" name="variables">$position: Position</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">rst</field>
<field type="TextField" name="content">These are the requirements for the position $position:
Requirements.</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="7" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/position/header_questionnaire.txt</field>
<field type="CharField" name="title">Header of the email that contains the questionnaire sent to the nomine</field>
<field type="TextField" name="variables">$nominee: Full name of the nomine
$position: Position</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">plain</field>
<field type="TextField" name="content">Hi $nominee, this is the questionnaire for the position $position:
</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="8" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/email/nomination_accept_reminder.txt</field>
<field type="CharField" name="title">Email sent to nominees asking them to accept (or decline) the nominations.</field>
<field type="TextField" name="variables">$positions: Nomination positions</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">plain</field>
<field type="TextField" name="content">Hi,
You have been nominated for the position of $position.
The NomCom would appreciate receiving an indication of whether or not you accept this nomination to stand for consideration as a candidate for this position.
You can accept the nomination via web going to the following link https://$domain$accept_url or decline the nomination going the following link https://$domain$decline_url
If you accept, you will need to fill out a questionnaire.
Best regards,</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="9" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/email/nomination_receipt.txt</field>
<field type="CharField" name="title">Email sent to nominator to get a confirmation mail containing feedback in cleartext</field>
<field type="TextField" name="variables">$nominee: Full name of the nominee
$position: Name of the position
$domain: Server domain
$accept_url: Url hash to accept nominations
$decline_url: Url hash to decline nominations</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">plain</field>
<field type="TextField" name="content">Hi,
Your nomination of $nominee for the position of
$position has been received and registered.
The following comments have also been registered:
--------------------------------------------------------------------------
$comments
--------------------------------------------------------------------------
Thank you,</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="10" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/email/feedback_receipt.txt</field>
<field type="CharField" name="title">Email sent to feedback author to get a confirmation mail containing feedback in cleartext</field>
<field type="TextField" name="variables">$nominee: Full name of the nominee
$position: Nomination position
$comments: Comments on this candidate</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">plain</field>
<field type="TextField" name="content">Hi,
Your input regarding $about has been received and registered.
The following comments have been registered:
--------------------------------------------------------------------------
$comments
--------------------------------------------------------------------------
Thank you,</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="11" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/email/questionnaire_reminder.txt</field>
<field type="CharField" name="title">Email sent to nominees reminding them to complete a questionnaire</field>
<field type="TextField" name="variables">$positions: Nomination positions</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">plain</field>
<field type="TextField" name="content">
Thank you for accepting your nomination for the position of $position.
Please remember to complete and return the questionnaire for this position at your earliest opportunity.
The questionnaire is repeated below for your convenience.
--------</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="12" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/topic/description</field>
<field type="CharField" name="title">Description of Topic</field>
<field type="TextField" name="variables">$topic: Topic'</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">rst</field>
<field type="TextField" name="content">This is a description of the topic "$topic"
Describe the topic and add any information/instructions for the responder here.
</field>
</object>
<object pk="13" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/iesg_requirements</field>
<field type="CharField" name="title">Generic IESG Requirements</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">rst</field>
<field type="TextField" name="content">Generic IESG Requirements Yo!</field>
</object>
</django-objects>
<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object pk="1" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/home.rst</field>
<field type="CharField" name="title">Home page of group</field>
<field type="TextField" name="variables"></field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">rst</field>
<field type="TextField" name="content">Home page
=========
This is the home page of the nomcom group.</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="2" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/email/inexistent_person.txt</field>
<field type="CharField" name="title">Email sent to chair of nomcom and secretariat when Email and Person are created if some of them don't exist</field>
<field type="TextField" name="variables">$email: Newly created email
$fullname: Fullname of the new person
$person_id: Id of the new Person object
$group: Name of the group</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">plain</field>
<field type="TextField" name="content">Hello,
A new person with name $fullname and email $email has been created. The new Person object has the following id: '$person_id'.
Please, check if there is some more action nedeed.</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="3" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/email/new_nominee.txt</field>
<field type="CharField" name="title">Email sent to nominees when they are nominated</field>
<field type="TextField" name="variables">$nominee: Full name of the nominee
$position: Name of the position
$domain: Server domain
$accept_url: Url hash to accept nominations
$decline_url: Url hash to decline nominations</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">plain</field>
<field type="TextField" name="content">Hi,
You have been nominated for the position of $position.
The NomCom would appreciate receiving an indication of whether or not you accept this nomination to stand for consideration as a candidate for this position.
You can accept the nomination via web going to the following link https://$domain$accept_url or decline the nomination going the following link https://$domain$decline_url
If you accept, you will need to fill out a questionnaire. You will receive the questionnaire by email.
Best regards,
</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="4" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/email/new_nomination.txt</field>
<field type="CharField" name="title">Email sent to nominators and secretariat when the nominators make the nominations</field>
<field type="TextField" name="variables">$nominator: Full name of the nominator
$nominator_email: Email of the nominator
$nominee: Full name of the nominee
$nominee_email: Email of the nominee
$position: Nomination position</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">plain</field>
<field type="TextField" name="content">A new nomination have been received.
Nominator: $nominator ($nominator_email)
Nominee: $nominee ($nominee_email)
Position: $position</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="5" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/position/questionnaire.txt</field>
<field type="CharField" name="title">Questionnaire sent to the nomine</field>
<field type="TextField" name="variables">$position: Position</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">plain</field>
<field type="TextField" name="content">Enter here the questionnaire for the position $position:
Questionnaire</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="6" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/position/requirements</field>
<field type="CharField" name="title">Position requirements</field>
<field type="TextField" name="variables">$position: Position</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">rst</field>
<field type="TextField" name="content">These are the requirements for the position $position:
Requirements.</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="7" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/position/header_questionnaire.txt</field>
<field type="CharField" name="title">Header of the email that contains the questionnaire sent to the nomine</field>
<field type="TextField" name="variables">$nominee: Full name of the nomine
$position: Position</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">plain</field>
<field type="TextField" name="content">Hi $nominee, this is the questionnaire for the position $position:
</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="8" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/email/nomination_accept_reminder.txt</field>
<field type="CharField" name="title">Email sent to nominees asking them to accept (or decline) the nominations.</field>
<field type="TextField" name="variables">$positions: Nomination positions</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">plain</field>
<field type="TextField" name="content">Hi,
You have been nominated for the position of $position.
The NomCom would appreciate receiving an indication of whether or not you accept this nomination to stand for consideration as a candidate for this position.
You can accept the nomination via web going to the following link https://$domain$accept_url or decline the nomination going the following link https://$domain$decline_url
If you accept, you will need to fill out a questionnaire.
Best regards,</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="9" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/email/nomination_receipt.txt</field>
<field type="CharField" name="title">Email sent to nominator to get a confirmation mail containing feedback in cleartext</field>
<field type="TextField" name="variables">$nominee: Full name of the nominee
$position: Name of the position
$domain: Server domain
$accept_url: Url hash to accept nominations
$decline_url: Url hash to decline nominations</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">plain</field>
<field type="TextField" name="content">Hi,
Your nomination of $nominee for the position of
$position has been received and registered.
The following comments have also been registered:
--------------------------------------------------------------------------
$comments
--------------------------------------------------------------------------
Thank you,</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="10" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/email/feedback_receipt.txt</field>
<field type="CharField" name="title">Email sent to feedback author to get a confirmation mail containing feedback in cleartext</field>
<field type="TextField" name="variables">$nominee: Full name of the nominee
$position: Nomination position
$comments: Comments on this candidate</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">plain</field>
<field type="TextField" name="content">Hi,
Your input regarding $about has been received and registered.
The following comments have been registered:
--------------------------------------------------------------------------
$comments
--------------------------------------------------------------------------
Thank you,</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="11" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/email/questionnaire_reminder.txt</field>
<field type="CharField" name="title">Email sent to nominees reminding them to complete a questionnaire</field>
<field type="TextField" name="variables">$positions: Nomination positions</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">plain</field>
<field type="TextField" name="content">
Thank you for accepting your nomination for the position of $position.
Please remember to complete and return the questionnaire for this position at your earliest opportunity.
The questionnaire is repeated below for your convenience.
--------</field>
<field to="group.group" name="group" rel="ManyToOneRel"><None></None></field>
</object>
<object pk="12" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/topic/description</field>
<field type="CharField" name="title">Description of Topic</field>
<field type="TextField" name="variables">$topic: Topic'</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">rst</field>
<field type="TextField" name="content">This is a description of the topic "$topic"
Describe the topic and add any information/instructions for the responder here.
</field>
</object>
<object pk="13" model="dbtemplate.dbtemplate">
<field type="CharField" name="path">/nomcom/defaults/iesg_requirements</field>
<field type="CharField" name="title">Generic IESG Requirements</field>
<field to="name.dbtemplatetypename" name="type" rel="ManyToOneRel">rst</field>
<field type="TextField" name="content">Generic IESG Requirements Yo!</field>
</object>
</django-objects>

View file

@ -5,6 +5,7 @@
import os
import datetime
import io
from django.http import HttpRequest
import lxml
import bibtexparser
import mock
@ -52,6 +53,7 @@ from ietf.doc.utils import (
generate_idnits2_rfcs_obsoleted,
get_doc_email_aliases,
)
from ietf.doc.views_doc import get_diff_revisions
from ietf.group.models import Group, Role
from ietf.group.factories import GroupFactory, RoleFactory
from ietf.ipr.factories import HolderIprDisclosureFactory
@ -71,96 +73,163 @@ from ietf.doc.utils_search import AD_WORKLOAD
class SearchTests(TestCase):
def test_search(self):
def test_search_handles_querystring_parameters(self):
"""Search parameters via querystring should not actually search"""
url = urlreverse("ietf.doc.views_search.search")
r = self.client.get(url + "?name=some-document-name&oldDrafts=on")
# Check that we got a valid response and that the warning about query string parameters is shown.
self.assertContains(
r,
"Searching via the URL query string is no longer supported.",
status_code=200,
)
# Check that the form was filled in correctly (not an exhaustive check, but different from the
# form defaults)
pq = PyQuery(r.content)
self.assertEqual(
pq("form#search_form input#id_name").attr("value"),
"some-document-name",
"The name field should be set in the SearchForm",
)
self.assertEqual(
pq("form#search_form input#id_olddrafts").attr("checked"),
"checked",
"The old drafts checkbox should be selected in the SearchForm",
)
self.assertIsNone(
pq("form#search_form input#id_rfcs").attr("checked"),
"The RFCs checkbox should not be selected in the SearchForm",
)
self.assertIsNone(
pq("form#search_form input#id_activedrafts").attr("checked"),
"The active drafts checkbox should not be selected in the SearchForm",
)
draft = WgDraftFactory(name='draft-ietf-mars-test',group=GroupFactory(acronym='mars',parent=Group.objects.get(acronym='farfut')),authors=[PersonFactory()],ad=PersonFactory())
def test_search(self):
draft = WgDraftFactory(
name="draft-ietf-mars-test",
group=GroupFactory(acronym="mars", parent=Group.objects.get(acronym="farfut")),
authors=[PersonFactory()],
ad=PersonFactory(),
)
rfc = WgRfcFactory()
draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="pub-req"))
old_draft = IndividualDraftFactory(name='draft-foo-mars-test',authors=[PersonFactory()],title="Optimizing Martian Network Topologies")
old_draft = IndividualDraftFactory(
name="draft-foo-mars-test",
authors=[PersonFactory()],
title="Optimizing Martian Network Topologies",
)
old_draft.set_state(State.objects.get(used=True, type="draft", slug="expired"))
base_url = urlreverse('ietf.doc.views_search.search')
url = urlreverse("ietf.doc.views_search.search")
# only show form, no search yet
r = self.client.get(base_url)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
# no match
r = self.client.get(base_url + "?activedrafts=on&name=thisisnotadocumentname")
r = self.client.post(url, {"activedrafts": "on", "name": "thisisnotadocumentname"})
self.assertEqual(r.status_code, 200)
self.assertContains(r, "No documents match")
r = self.client.get(base_url + "?rfcs=on&name=xyzzy")
r = self.client.post(url, {"rfcs": "on", "name": "xyzzy"})
self.assertEqual(r.status_code, 200)
self.assertContains(r, "No documents match")
r = self.client.get(base_url + "?olddrafts=on&name=bar")
r = self.client.post(url, {"olddrafts": "on", "name": "bar"})
self.assertEqual(r.status_code, 200)
self.assertContains(r, "No documents match")
r = self.client.get(base_url + "?olddrafts=on&name=foo")
r = self.client.post(url, {"olddrafts": "on", "name": "foo"})
self.assertEqual(r.status_code, 200)
self.assertContains(r, "draft-foo-mars-test")
r = self.client.get(base_url + "?olddrafts=on&name=FoO") # mixed case
r = self.client.post(url, {"olddrafts": "on", "name": "FoO"}) # mixed case
self.assertEqual(r.status_code, 200)
self.assertContains(r, "draft-foo-mars-test")
# find by RFC
r = self.client.get(base_url + "?rfcs=on&name=%s" % rfc.name)
r = self.client.post(url, {"rfcs": "on", "name": rfc.name})
self.assertEqual(r.status_code, 200)
self.assertContains(r, rfc.title)
# find by active/inactive
draft.set_state(State.objects.get(type="draft", slug="active"))
r = self.client.get(base_url + "?activedrafts=on&name=%s" % draft.name)
r = self.client.post(url, {"activedrafts": "on", "name": draft.name})
self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title)
draft.set_state(State.objects.get(type="draft", slug="expired"))
r = self.client.get(base_url + "?olddrafts=on&name=%s" % draft.name)
r = self.client.post(url, {"olddrafts": "on", "name": draft.name})
self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title)
draft.set_state(State.objects.get(type="draft", slug="active"))
# find by title
r = self.client.get(base_url + "?activedrafts=on&name=%s" % draft.title.split()[0])
r = self.client.post(url, {"activedrafts": "on", "name": draft.title.split()[0]})
self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title)
# find by author
r = self.client.get(base_url + "?activedrafts=on&by=author&author=%s" % draft.documentauthor_set.first().person.name_parts()[1])
r = self.client.post(
url,
{
"activedrafts": "on",
"by": "author",
"author": draft.documentauthor_set.first().person.name_parts()[1],
},
)
self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title)
# find by group
r = self.client.get(base_url + "?activedrafts=on&by=group&group=%s" % draft.group.acronym)
r = self.client.post(
url,
{"activedrafts": "on", "by": "group", "group": draft.group.acronym},
)
self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title)
r = self.client.get(base_url + "?activedrafts=on&by=group&group=%s" % draft.group.acronym.swapcase())
r = self.client.post(
url,
{"activedrafts": "on", "by": "group", "group": draft.group.acronym.swapcase()},
)
self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title)
# find by area
r = self.client.get(base_url + "?activedrafts=on&by=area&area=%s" % draft.group.parent_id)
r = self.client.post(
url,
{"activedrafts": "on", "by": "area", "area": draft.group.parent_id},
)
self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title)
# find by area
r = self.client.get(base_url + "?activedrafts=on&by=area&area=%s" % draft.group.parent_id)
r = self.client.post(
url,
{"activedrafts": "on", "by": "area", "area": draft.group.parent_id},
)
self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title)
# find by AD
r = self.client.get(base_url + "?activedrafts=on&by=ad&ad=%s" % draft.ad_id)
r = self.client.post(url, {"activedrafts": "on", "by": "ad", "ad": draft.ad_id})
self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title)
# find by IESG state
r = self.client.get(base_url + "?activedrafts=on&by=state&state=%s&substate=" % draft.get_state("draft-iesg").pk)
r = self.client.post(
url,
{
"activedrafts": "on",
"by": "state",
"state": draft.get_state("draft-iesg").pk,
"substate": "",
},
)
self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title)
@ -169,15 +238,15 @@ class SearchTests(TestCase):
rfc = WgRfcFactory()
draft.set_state(State.objects.get(type="draft", slug="rfc"))
draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc)
base_url = urlreverse('ietf.doc.views_search.search')
url = urlreverse("ietf.doc.views_search.search")
# find by RFC
r = self.client.get(base_url + f"?rfcs=on&name={rfc.name}")
r = self.client.post(url, {"rfcs": "on", "name": rfc.name})
self.assertEqual(r.status_code, 200)
self.assertContains(r, rfc.title)
# find by draft
r = self.client.get(base_url + f"?activedrafts=on&rfcs=on&name={draft.name}")
r = self.client.post(url, {"activedrafts": "on", "rfcs": "on", "name": draft.name})
self.assertEqual(r.status_code, 200)
self.assertContains(r, rfc.title)
@ -1887,6 +1956,18 @@ class DocTestCase(TestCase):
self.assertContains(r, notes.text)
self.assertContains(r, rfced_note.text)
def test_diff_revisions(self):
ind_doc = IndividualDraftFactory(create_revisions=range(2))
wg_doc = WgDraftFactory(
relations=[("replaces", ind_doc)], create_revisions=range(2)
)
diff_revisions = get_diff_revisions(HttpRequest(), wg_doc.name, wg_doc)
self.assertEqual(len(diff_revisions), 4)
self.assertEqual(
[t[3] for t in diff_revisions],
[f"{n}-{v:02d}" for n in [wg_doc.name, ind_doc.name] for v in [1, 0]],
)
def test_history(self):
doc = IndividualDraftFactory()
@ -2739,60 +2820,6 @@ class DocumentMeetingTests(TestCase):
self.assertIsNone(doc.get_related_meeting(), f'{doc.type.slug} should not be related to meeting')
class ChartTests(ResourceTestCaseMixin, TestCase):
def test_search_chart_conf(self):
doc = IndividualDraftFactory()
conf_url = urlreverse('ietf.doc.views_stats.chart_conf_newrevisiondocevent')
# No qurey arguments; expect an empty json object
r = self.client.get(conf_url)
self.assertValidJSONResponse(r)
self.assertEqual(unicontent(r), '{}')
# No match
r = self.client.get(conf_url + '?activedrafts=on&name=thisisnotadocumentname')
self.assertValidJSONResponse(r)
d = r.json()
self.assertEqual(d['chart']['type'], settings.CHART_TYPE_COLUMN_OPTIONS['chart']['type'])
r = self.client.get(conf_url + '?activedrafts=on&name=%s'%doc.name[6:12])
self.assertValidJSONResponse(r)
d = r.json()
self.assertEqual(d['chart']['type'], settings.CHART_TYPE_COLUMN_OPTIONS['chart']['type'])
self.assertEqual(len(d['series'][0]['data']), 0)
def test_search_chart_data(self):
doc = IndividualDraftFactory()
data_url = urlreverse('ietf.doc.views_stats.chart_data_newrevisiondocevent')
# No qurey arguments; expect an empty json list
r = self.client.get(data_url)
self.assertValidJSONResponse(r)
self.assertEqual(unicontent(r), '[]')
# No match
r = self.client.get(data_url + '?activedrafts=on&name=thisisnotadocumentname')
self.assertValidJSONResponse(r)
d = r.json()
self.assertEqual(unicontent(r), '[]')
r = self.client.get(data_url + '?activedrafts=on&name=%s'%doc.name[6:12])
self.assertValidJSONResponse(r)
d = r.json()
self.assertEqual(len(d), 1)
self.assertEqual(len(d[0]), 2)
def test_search_chart(self):
doc = IndividualDraftFactory()
chart_url = urlreverse('ietf.doc.views_stats.chart_newrevisiondocevent')
r = self.client.get(chart_url)
self.assertEqual(r.status_code, 200)
r = self.client.get(chart_url + '?activedrafts=on&name=%s'%doc.name[6:12])
self.assertEqual(r.status_code, 200)
def test_personal_chart(self):
person = PersonFactory.create()
IndividualDraftFactory.create(

View file

@ -92,10 +92,8 @@ class EditAuthorsTests(IetfSeleniumTestCase):
self.assertEqual(len(author_forms), 1)
# get the "add author" button so we can add blank author forms
add_author_button = self.driver.find_element(By.ID, 'add-author-button')
for index, auth in enumerate(authors):
self.scroll_to_element(add_author_button) # Can only click if it's in view!
add_author_button.click() # Create a new form. Automatically scrolls to it.
self.scroll_and_click((By.ID, 'add-author-button')) # Create new form. Automatically scrolls to it.
author_forms = authors_list.find_elements(By.CLASS_NAME, 'author-panel')
authors_added = index + 1
self.assertEqual(len(author_forms), authors_added + 1) # Started with 1 author, hence +1

View file

@ -68,10 +68,6 @@ urlpatterns = [
),
url(r'^investigate/?$', views_doc.investigate),
url(r'^stats/newrevisiondocevent/?$', views_stats.chart_newrevisiondocevent),
url(r'^stats/newrevisiondocevent/conf/?$', views_stats.chart_conf_newrevisiondocevent),
url(r'^stats/newrevisiondocevent/data/?$', views_stats.chart_data_newrevisiondocevent),
url(r'^stats/person/(?P<id>[0-9]+)/drafts/conf/?$', views_stats.chart_conf_person_drafts),
url(r'^stats/person/(?P<id>[0-9]+)/drafts/data/?$', views_stats.chart_data_person_drafts),

View file

@ -3,9 +3,7 @@
import datetime
import hashlib
import io
import json
import math
import os
import re
@ -348,6 +346,7 @@ def augment_events_with_revision(doc, events):
"""Take a set of events for doc and add a .rev attribute with the
revision they refer to by checking NewRevisionDocEvents."""
# Need QuerySetAny instead of QuerySet until django-stubs 5.0.1
if isinstance(events, QuerySetAny):
qs = events.filter(newrevisiondocevent__isnull=False)
else:
@ -1047,14 +1046,6 @@ def make_rev_history(doc):
return sorted(history, key=lambda x: x['published'])
def get_search_cache_key(params):
from ietf.doc.views_search import SearchForm
fields = set(SearchForm.base_fields) - set(['sort',])
kwargs = dict([ (k,v) for (k,v) in list(params.items()) if k in fields ])
key = "doc:document:search:" + hashlib.sha512(json.dumps(kwargs, sort_keys=True).encode('utf-8')).hexdigest()
return key
def build_file_urls(doc: Union[Document, DocHistory]):
if doc.type_id == "rfc":
base_path = os.path.join(settings.RFC_PATH, doc.name + ".")

View file

@ -1133,10 +1133,10 @@ def get_diff_revisions(request, name, doc):
diff_documents = [doc]
diff_documents.extend(
Document.objects.filter(
relateddocument__source=doc,
relateddocument__relationship="replaces",
)
[
r.target
for r in RelatedDocument.objects.filter(source=doc, relationship="replaces")
]
)
if doc.came_from_draft():
diff_documents.append(doc.came_from_draft())

View file

@ -37,6 +37,8 @@
import re
import datetime
import copy
import hashlib
import json
import operator
from collections import defaultdict
@ -44,16 +46,17 @@ from functools import reduce
from django import forms
from django.conf import settings
from django.contrib import messages
from django.core.cache import cache, caches
from django.urls import reverse as urlreverse
from django.db.models import Q
from django.http import Http404, HttpResponseBadRequest, HttpResponse, HttpResponseRedirect, QueryDict
from django.http import Http404, HttpResponseBadRequest, HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.utils import timezone
from django.utils.html import strip_tags
from django.utils.cache import _generate_cache_key # type: ignore
from django.utils.text import slugify
from django_stubs_ext import QuerySetAny
import debug # pyflakes:ignore
@ -62,7 +65,7 @@ from ietf.doc.models import ( Document, DocHistory, State,
IESG_BALLOT_ACTIVE_STATES, IESG_STATCHG_CONFLREV_ACTIVE_STATES,
IESG_CHARTER_ACTIVE_STATES )
from ietf.doc.fields import select2_id_doc_name_json
from ietf.doc.utils import get_search_cache_key, augment_events_with_revision, needed_ballot_positions
from ietf.doc.utils import augment_events_with_revision, needed_ballot_positions
from ietf.group.models import Group
from ietf.idindex.index import active_drafts_index_by_group
from ietf.name.models import DocTagName, DocTypeName, StreamName
@ -145,6 +148,29 @@ class SearchForm(forms.Form):
q['irtfstate'] = None
return q
def cache_key_fragment(self):
"""Hash a bound form to get a value for use in a cache key
Raises a ValueError if the form is not valid.
"""
def _serialize_value(val):
# Need QuerySetAny instead of QuerySet until django-stubs 5.0.1
if isinstance(val, QuerySetAny):
return [item.pk for item in val]
else:
return getattr(val, "pk", val) # use pk if present, else value
if not self.is_valid():
raise ValueError(f"SearchForm invalid: {self.errors}")
contents = {
field_name: _serialize_value(field_value)
for field_name, field_value in self.cleaned_data.items()
if field_name != "sort" and field_value is not None
}
contents_json = json.dumps(contents, sort_keys=True)
return hashlib.sha512(contents_json.encode("utf-8")).hexdigest()
def retrieve_search_results(form, all_types=False):
"""Takes a validated SearchForm and return the results."""
@ -256,45 +282,64 @@ def retrieve_search_results(form, all_types=False):
return docs
def search(request):
if request.GET:
# backwards compatibility
get_params = request.GET.copy()
if 'activeDrafts' in request.GET:
get_params['activedrafts'] = request.GET['activeDrafts']
if 'oldDrafts' in request.GET:
get_params['olddrafts'] = request.GET['oldDrafts']
if 'subState' in request.GET:
get_params['substate'] = request.GET['subState']
"""Search for a draft"""
# defaults for results / meta
results = []
meta = {"by": None, "searching": False}
form = SearchForm(get_params)
if not form.is_valid():
return HttpResponseBadRequest("form not valid: %s" % form.errors)
cache_key = get_search_cache_key(get_params)
cached_val = cache.get(cache_key)
if cached_val:
[results, meta] = cached_val
else:
results = retrieve_search_results(form)
results, meta = prepare_document_table(request, results, get_params)
cache.set(cache_key, [results, meta]) # for settings.CACHE_MIDDLEWARE_SECONDS
log(f"Search results computed for {get_params}")
meta['searching'] = True
if request.method == "POST":
form = SearchForm(data=request.POST)
if form.is_valid():
cache_key = f"doc:document:search:{form.cache_key_fragment()}"
cached_val = cache.get(cache_key)
if cached_val:
[results, meta] = cached_val
else:
results = retrieve_search_results(form)
results, meta = prepare_document_table(
request, results, form.cleaned_data
)
cache.set(
cache_key, [results, meta]
) # for settings.CACHE_MIDDLEWARE_SECONDS
log(f"Search results computed for {form.cleaned_data}")
meta["searching"] = True
else:
form = SearchForm()
results = []
meta = { 'by': None, 'searching': False }
get_params = QueryDict('')
if request.GET:
# backwards compatibility - fill in the form
get_params = request.GET.copy()
if "activeDrafts" in request.GET:
get_params["activedrafts"] = request.GET["activeDrafts"]
if "oldDrafts" in request.GET:
get_params["olddrafts"] = request.GET["oldDrafts"]
if "subState" in request.GET:
get_params["substate"] = request.GET["subState"]
form = SearchForm(data=get_params)
messages.error(
request,
(
"Searching via the URL query string is no longer supported. "
"The form below has been filled in with the parameters from your request. "
'To execute your search, please click "Search".'
),
)
else:
form = SearchForm()
return render(request, 'doc/search/search.html', {
'form':form, 'docs':results, 'meta':meta, 'queryargs':get_params.urlencode() },
return render(
request,
"doc/search/search.html",
context={"form": form, "docs": results, "meta": meta},
)
def frontpage(request):
form = SearchForm()
return render(request, 'doc/frontpage.html', {'form':form})
def search_for_name(request, name):
def find_unique(n):
exact = Document.objects.filter(name__iexact=n).first()

View file

@ -4,20 +4,15 @@ import copy
import datetime
from django.conf import settings
from django.core.cache import cache
from django.urls import reverse as urlreverse
from django.db.models.aggregates import Count
from django.db.models.functions import TruncDate
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import render
from django.http import JsonResponse
from django.views.decorators.cache import cache_page
import debug # pyflakes:ignore
from ietf.doc.models import DocEvent
from ietf.doc.templatetags.ietf_filters import comma_separated_list
from ietf.doc.utils import get_search_cache_key
from ietf.doc.views_search import SearchForm, retrieve_search_results
from ietf.name.models import DocTypeName
from ietf.person.models import Person
from ietf.utils.timezone import date_today
@ -113,49 +108,6 @@ def make_title(queryargs):
title += ' with name matching "%s"' % name
return title
def chart_newrevisiondocevent(request):
return render(request, "doc/stats/highstock.html", {
"title": "Document Statistics",
"confurl": urlreverse("ietf.doc.views_stats.chart_conf_newrevisiondocevent"),
"dataurl": urlreverse("ietf.doc.views_stats.chart_data_newrevisiondocevent"),
"queryargs": request.GET.urlencode(),
}
)
#@cache_page(60*15)
def chart_data_newrevisiondocevent(request):
queryargs = request.GET
if queryargs:
cache_key = get_search_cache_key(queryargs)
results = cache.get(cache_key)
if not results:
form = SearchForm(queryargs)
if not form.is_valid():
return HttpResponseBadRequest("form not valid: %s" % form.errors)
results = retrieve_search_results(form)
if results.exists():
cache.set(cache_key, results)
if results.exists():
data = model_to_timeline_data(DocEvent, doc__in=results, type='new_revision')
else:
data = []
else:
data = []
return JsonResponse(data, safe=False)
@cache_page(60*15)
def chart_conf_newrevisiondocevent(request):
queryargs = request.GET
if queryargs:
conf = copy.deepcopy(settings.CHART_TYPE_COLUMN_OPTIONS)
conf['title']['text'] = make_title(queryargs)
conf['series'][0]['name'] = "Submitted %s" % get_doctypes(queryargs, pluralize=True).lower(),
else:
conf = {}
return JsonResponse(conf)
@cache_page(60*15)
def chart_conf_person_drafts(request, id):
person = Person.objects.filter(id=id).first()

View file

@ -65,6 +65,53 @@ class StreamTests(TestCase):
self.assertTrue(Role.objects.filter(name="delegate", group__acronym=stream_acronym, email__address="ad2@ietf.org"))
class GroupLeadershipTests(TestCase):
def test_leadership_wg(self):
# setup various group states
bof_role = RoleFactory(
group__type_id="wg", group__state_id="bof", name_id="chair"
)
proposed_role = RoleFactory(
group__type_id="wg", group__state_id="proposed", name_id="chair"
)
active_role = RoleFactory(
group__type_id="wg", group__state_id="active", name_id="chair"
)
conclude_role = RoleFactory(
group__type_id="wg", group__state_id="conclude", name_id="chair"
)
url = urlreverse(
"ietf.group.views.group_leadership", kwargs={"group_type": "wg"}
)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertContains(r, "Group Leadership")
self.assertContains(r, bof_role.person.last_name())
self.assertContains(r, proposed_role.person.last_name())
self.assertContains(r, active_role.person.last_name())
self.assertNotContains(r, conclude_role.person.last_name())
def test_leadership_wg_csv(self):
url = urlreverse(
"ietf.group.views.group_leadership_csv", kwargs={"group_type": "wg"}
)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(r["Content-Type"], "text/csv")
self.assertContains(r, "Chairman, Sops")
def test_leadership_rg(self):
role = RoleFactory(group__type_id="rg", name_id="chair")
url = urlreverse(
"ietf.group.views.group_leadership", kwargs={"group_type": "rg"}
)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertContains(r, "Group Leadership")
self.assertContains(r, role.person.last_name())
self.assertNotContains(r, "Chairman, Sops")
class GroupStatsTests(TestCase):
def setUp(self):
super().setUp()

View file

@ -1431,7 +1431,7 @@ class MilestoneTests(TestCase):
RoleFactory(group=group,name_id='chair',person=PersonFactory(user__username='marschairman'))
draft = WgDraftFactory(group=group)
m1 = GroupMilestone.objects.create(id=1,
m1 = GroupMilestone.objects.create(
group=group,
desc="Test 1",
due=date_today(DEADLINE_TZINFO),
@ -1439,7 +1439,7 @@ class MilestoneTests(TestCase):
state_id="active")
m1.docs.set([draft])
m2 = GroupMilestone.objects.create(id=2,
m2 = GroupMilestone.objects.create(
group=group,
desc="Test 2",
due=date_today(DEADLINE_TZINFO),
@ -1580,13 +1580,14 @@ class MilestoneTests(TestCase):
events_before = group.groupevent_set.count()
# add
r = self.client.post(url, { 'prefix': "m1",
'm1-id': m1.id,
'm1-desc': m1.desc,
'm1-due': m1.due.strftime("%B %Y"),
'm1-resolved': m1.resolved,
'm1-docs': pklist(m1.docs),
'm1-review': "accept",
mstr = f"m{m1.id}"
r = self.client.post(url, { 'prefix': mstr,
f'{mstr}-id': m1.id,
f'{mstr}-desc': m1.desc,
f'{mstr}-due': m1.due.strftime("%B %Y"),
f'{mstr}-resolved': m1.resolved,
f'{mstr}-docs': pklist(m1.docs),
f'{mstr}-review': "accept",
'action': "save",
})
self.assertEqual(r.status_code, 302)
@ -1606,13 +1607,14 @@ class MilestoneTests(TestCase):
events_before = group.groupevent_set.count()
# delete
r = self.client.post(url, { 'prefix': "m1",
'm1-id': m1.id,
'm1-desc': m1.desc,
'm1-due': m1.due.strftime("%B %Y"),
'm1-resolved': "",
'm1-docs': pklist(m1.docs),
'm1-delete': "checked",
mstr = f"m{m1.id}"
r = self.client.post(url, { 'prefix': mstr,
f'{mstr}-id': m1.id,
f'{mstr}-desc': m1.desc,
f'{mstr}-due': m1.due.strftime("%B %Y"),
f'{mstr}-resolved': "",
f'{mstr}-docs': pklist(m1.docs),
f'{mstr}-delete': "checked",
'action': "save",
})
self.assertEqual(r.status_code, 302)
@ -1635,13 +1637,14 @@ class MilestoneTests(TestCase):
due = self.last_day_of_month(date_today(DEADLINE_TZINFO) + datetime.timedelta(days=365))
mstr = f"m{m1.id}"
# faulty post
r = self.client.post(url, { 'prefix': "m1",
'm1-id': m1.id,
'm1-desc': "", # no description
'm1-due': due.strftime("%B %Y"),
'm1-resolved': "",
'm1-docs': doc_pks,
r = self.client.post(url, { 'prefix': mstr,
f'{mstr}-id': m1.id,
f'{mstr}-desc': "", # no description
f'{mstr}-due': due.strftime("%B %Y"),
f'{mstr}-resolved': "",
f'{mstr}-docs': doc_pks,
'action': "save",
})
self.assertEqual(r.status_code, 200)
@ -1653,13 +1656,13 @@ class MilestoneTests(TestCase):
# edit
mailbox_before = len(outbox)
r = self.client.post(url, { 'prefix': "m1",
'm1-id': m1.id,
'm1-desc': "Test 2 - changed",
'm1-due': due.strftime("%B %Y"),
'm1-resolved': "Done",
'm1-resolved_checkbox': "checked",
'm1-docs': doc_pks,
r = self.client.post(url, { 'prefix': mstr,
f'{mstr}-id': m1.id,
f'{mstr}-desc': "Test 2 - changed",
f'{mstr}-due': due.strftime("%B %Y"),
f'{mstr}-resolved': "Done",
f'{mstr}-resolved_checkbox': "checked",
f'{mstr}-docs': doc_pks,
'action': "save",
})
self.assertEqual(r.status_code, 302)

View file

@ -57,7 +57,9 @@ info_detail_urls = [
group_urls = [
url(r'^$', views.active_groups),
url(r'^$', views.active_groups),
url(r'^leadership/(?P<group_type>(wg|rg))/$', views.group_leadership),
url(r'^leadership/(?P<group_type>(wg|rg))/csv/$', views.group_leadership_csv),
url(r'^groupstats.json', views.group_stats_data, None, 'ietf.group.views.group_stats_data'),
url(r'^groupmenu.json', views.group_menu_data, None, 'ietf.group.views.group_menu_data'),
url(r'^chartering/$', views.chartering_groups),

View file

@ -35,6 +35,7 @@
import copy
import csv
import datetime
import itertools
import math
@ -437,6 +438,48 @@ def prepare_group_documents(request, group, clist):
return docs, meta, docs_related, meta_related
def get_leadership(group_type):
people = Person.objects.filter(
role__name__slug="chair",
role__group__type=group_type,
role__group__state__slug__in=("active", "bof", "proposed"),
).distinct()
leaders = []
for person in people:
parts = person.name_parts()
groups = [
r.group.acronym
for r in person.role_set.filter(
name__slug="chair",
group__type=group_type,
group__state__slug__in=("active", "bof", "proposed"),
)
]
entry = {"name": "%s, %s" % (parts[3], parts[1]), "groups": ", ".join(groups)}
leaders.append(entry)
return sorted(leaders, key=lambda a: a["name"])
def group_leadership(request, group_type=None):
context = {}
context["leaders"] = get_leadership(group_type)
context["group_type"] = group_type
return render(request, "group/group_leadership.html", context)
def group_leadership_csv(request, group_type=None):
leaders = get_leadership(group_type)
response = HttpResponse(content_type="text/csv")
response["Content-Disposition"] = (
f'attachment; filename="group_leadership_{group_type}.csv"'
)
writer = csv.writer(response, dialect=csv.excel, delimiter=str(","))
writer.writerow(["Name", "Groups"])
for leader in leaders:
writer.writerow([leader["name"], leader["groups"]])
return response
def group_home(request, acronym, group_type=None):
group = get_group_or_404(acronym, group_type)
kwargs = dict(acronym=group.acronym)

View file

@ -203,6 +203,7 @@ class SearchLiaisonForm(forms.Form):
class CustomModelMultipleChoiceField(ModelMultipleChoiceField):
'''If value is a QuerySet, return it as is (for use in widget.render)'''
def prepare_value(self, value):
# Need QuerySetAny instead of QuerySet until django-stubs 5.0.1
if isinstance(value, QuerySetAny):
return value
if (hasattr(value, '__iter__') and

View file

@ -35,6 +35,7 @@ class ShowAttachmentsWidget(Widget):
html = '<div id="id_%s">' % name
html += '<span class="d-none showAttachmentsEmpty form-control widget">No files attached</span>'
html += '<div class="attachedFiles form-control widget">'
# Need QuerySetAny instead of QuerySet until django-stubs 5.0.1
if value and isinstance(value, QuerySetAny):
for attachment in value:
html += '<a class="initialAttach" href="%s">%s</a>&nbsp' % (conditional_escape(attachment.document.get_href()), conditional_escape(attachment.document.title))

View file

@ -249,7 +249,9 @@ class EditMeetingScheduleTests(IetfSeleniumTestCase):
self.assertTrue(s1_element.is_displayed()) # should still be displayed
self.assertIn('hidden-parent', s1_element.get_attribute('class'),
'Session should be hidden when parent disabled')
s1_element.click() # try to select
self.scroll_and_click((By.CSS_SELECTOR, '#session{}'.format(s1.pk)))
self.assertNotIn('selected', s1_element.get_attribute('class'),
'Session should not be selectable when parent disabled')
@ -299,9 +301,9 @@ class EditMeetingScheduleTests(IetfSeleniumTestCase):
'Session s1 should have moved to second meeting day')
# swap timeslot column - put session in a differently-timed timeslot
self.driver.find_element(By.CSS_SELECTOR,
self.scroll_and_click((By.CSS_SELECTOR,
'.day .swap-timeslot-col[data-timeslot-pk="{}"]'.format(slot1b.pk)
).click() # open modal on the second timeslot for room1
)) # open modal on the second timeslot for room1
self.assertTrue(self.driver.find_element(By.CSS_SELECTOR, "#swap-timeslot-col-modal").is_displayed())
self.driver.find_element(By.CSS_SELECTOR,
'#swap-timeslot-col-modal input[name="target_timeslot"][value="{}"]'.format(slot4.pk)
@ -1373,13 +1375,8 @@ class InterimTests(IetfSeleniumTestCase):
self.assertFalse(modal_div.is_displayed())
# Click the 'materials' button
open_modal_button = self.wait.until(
expected_conditions.element_to_be_clickable(
(By.CSS_SELECTOR, '[data-bs-target="#modal-%s"]' % slug)
),
'Modal open button not found or not clickable',
)
open_modal_button.click()
open_modal_button_locator = (By.CSS_SELECTOR, '[data-bs-target="#modal-%s"]' % slug)
self.scroll_and_click(open_modal_button_locator)
self.wait.until(
expected_conditions.visibility_of(modal_div),
'Modal did not become visible after clicking open button',

View file

@ -407,6 +407,40 @@ class MeetingTests(BaseMeetingTestCase):
r = self.client.get(urlreverse('floor-plan', kwargs=dict(num=meeting.number)))
self.assertEqual(r.status_code, 200)
def test_session_recordings_via_factories(self):
session = SessionFactory(meeting__type_id="ietf", meeting__date=date_today()-datetime.timedelta(days=180))
self.assertEqual(session.meetecho_recording_name, "")
self.assertEqual(len(session.recordings()), 0)
url = urlreverse("ietf.meeting.views.session_details", kwargs=dict(num=session.meeting.number, acronym=session.group.acronym))
r = self.client.get(url)
q = PyQuery(r.content)
# debug.show("q(f'#notes_and_recordings_{session.pk}')")
self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 1)
link = q(f"#notes_and_recordings_{session.pk} tr a")
self.assertEqual(len(link), 1)
self.assertEqual(link[0].attrib['href'], str(session.session_recording_url()))
session.meetecho_recording_name = 'my_test_session_name'
session.save()
r = self.client.get(url)
q = PyQuery(r.content)
self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 1)
links = q(f"#notes_and_recordings_{session.pk} tr a")
self.assertEqual(len(links), 1)
self.assertEqual(links[0].attrib['href'], session.session_recording_url())
new_recording_url = "https://www.youtube.com/watch?v=jNQXAC9IVRw"
new_recording_title = "Me at the zoo"
create_recording(session, new_recording_url, new_recording_title)
r = self.client.get(url)
q = PyQuery(r.content)
self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 2)
links = q(f"#notes_and_recordings_{session.pk} tr a")
self.assertEqual(len(links), 2)
self.assertEqual(links[0].attrib['href'], new_recording_url)
self.assertIn(new_recording_title, links[0].text_content())
#debug.show("q(f'#notes_and_recordings_{session_pk}')")
def test_agenda_ical_next_meeting_type(self):
# start with no upcoming IETF meetings, just an interim
MeetingFactory(

View file

@ -1786,6 +1786,7 @@ def agenda_extract_schedule (item):
"type": item.session.type.slug,
"purpose": item.session.purpose.slug,
"isBoF": item.session.group_at_the_time().state_id == "bof",
"isProposed": item.session.group_at_the_time().state_id == "proposed",
"filterKeywords": item.filter_keywords,
"groupAcronym": item.session.group_at_the_time().acronym,
"groupName": item.session.group_at_the_time().name,
@ -4104,6 +4105,7 @@ def organize_proceedings_sessions(sessions):
'minutes': _format_materials((s, s.minutes()) for s in ss),
'bluesheets': _format_materials((s, s.bluesheets()) for s in ss),
'recordings': _format_materials((s, s.recordings()) for s in ss),
'meetecho_recordings': _format_materials((s, [s.session_recording_url()]) for s in ss),
'chatlogs': _format_materials((s, s.chatlogs()) for s in ss),
'slides': _format_materials((s, s.slides()) for s in ss),
'drafts': _format_materials((s, s.drafts()) for s in ss),

View file

@ -1,71 +1,71 @@
/* Following functions based off code written by Arne Brodowski
http://www.arnebrodowski.de/blog/507-Add-and-remove-Django-Admin-Inlines-with-JavaScript.html
2012-02-01 customized for new Rolodex. Email formset doesn't have an id field, rather a "address"
field as primary key. Also for some reason the "active" boolean field doesn't get saved properly
if the checkbox input has an empty "value" argument.
*/
import $ from 'jquery';
function increment_form_ids(el, to, name) {
var from = to-1
$(':input', $(el)).each(function(i,e){
var old_name = $(e).attr('name')
var old_id = $(e).attr('id')
$(e).attr('name', old_name.replace(from, to))
$(e).attr('id', old_id.replace(from, to))
if ($(e).attr('type') != 'checkbox') {
$(e).val('')
}
})
}
function add_inline_form(name) {
if (name=="email") {
var first = $('#id_'+name+'-0-address').parents('.inline-related')
}
else {
var first = $('#id_'+name+'-0-id').parents('.inline-related')
}
// check to see if this is a stacked or tabular inline
if (first.hasClass("tabular")) {
var field_table = first.parent().find('table > tbody')
const children = field_table.children('tr.dynamic-inline')
var count = children.length
const last = $(children[count-1])
var copy = last.clone(true)
copy.removeClass("row1 row2")
copy.find("input[name$='address']").attr("readonly", false)
copy.addClass("row"+((count % 2) ? 2 : 1))
copy.insertAfter(last)
increment_form_ids($(copy), count, name)
}
else {
var last = $(first).parent().children('.last-related')
var copy = $(last).clone(true)
var count = $(first).parent().children('.inline-related').length
$(last).removeClass('last-related')
var header = $('h3', copy)
header.html(header.html().replace("#"+count, "#"+(count+1)))
$(last).after(copy)
increment_form_ids($(first).parents('.inline-group').children('.last-related'), count, name)
}
$('input#id_'+name+'-TOTAL_FORMS').val(count+1)
return false;
}
// Add all the "Add Another" links to the bottom of each inline group
$(function() {
var html_template = '<ul class="tools">'+
'<li>'+
'<a id="addlink-{{prefix}}" class="addlink" href="#">'+
'Add another</a>'+
'</li>'+
'</ul>'
$('.inline-group').each(function(i) {
//prefix is in the name of the input fields before the "-"
var prefix = $("input[type='hidden'][name!='csrfmiddlewaretoken']", this).attr("name").split("-")[0];
$(this).append(html_template.replace("{{prefix}}", prefix));
$('#addlink-' + prefix).on('click', () => add_inline_form(prefix));
})
})
/* Following functions based off code written by Arne Brodowski
http://www.arnebrodowski.de/blog/507-Add-and-remove-Django-Admin-Inlines-with-JavaScript.html
2012-02-01 customized for new Rolodex. Email formset doesn't have an id field, rather a "address"
field as primary key. Also for some reason the "active" boolean field doesn't get saved properly
if the checkbox input has an empty "value" argument.
*/
import $ from 'jquery';
function increment_form_ids(el, to, name) {
var from = to-1
$(':input', $(el)).each(function(i,e){
var old_name = $(e).attr('name')
var old_id = $(e).attr('id')
$(e).attr('name', old_name.replace(from, to))
$(e).attr('id', old_id.replace(from, to))
if ($(e).attr('type') != 'checkbox') {
$(e).val('')
}
})
}
function add_inline_form(name) {
if (name=="email") {
var first = $('#id_'+name+'-0-address').parents('.inline-related')
}
else {
var first = $('#id_'+name+'-0-id').parents('.inline-related')
}
// check to see if this is a stacked or tabular inline
if (first.hasClass("tabular")) {
var field_table = first.parent().find('table > tbody')
const children = field_table.children('tr.dynamic-inline')
var count = children.length
const last = $(children[count-1])
var copy = last.clone(true)
copy.removeClass("row1 row2")
copy.find("input[name$='address']").attr("readonly", false)
copy.addClass("row"+((count % 2) ? 2 : 1))
copy.insertAfter(last)
increment_form_ids($(copy), count, name)
}
else {
var last = $(first).parent().children('.last-related')
var copy = $(last).clone(true)
var count = $(first).parent().children('.inline-related').length
$(last).removeClass('last-related')
var header = $('h3', copy)
header.html(header.html().replace("#"+count, "#"+(count+1)))
$(last).after(copy)
increment_form_ids($(first).parents('.inline-group').children('.last-related'), count, name)
}
$('input#id_'+name+'-TOTAL_FORMS').val(count+1)
return false;
}
// Add all the "Add Another" links to the bottom of each inline group
$(function() {
var html_template = '<ul class="tools">'+
'<li>'+
'<a id="addlink-{{prefix}}" class="addlink" href="#">'+
'Add another</a>'+
'</li>'+
'</ul>'
$('.inline-group').each(function(i) {
//prefix is in the name of the input fields before the "-"
var prefix = $("input[type='hidden'][name!='csrfmiddlewaretoken']", this).attr("name").split("-")[0];
$(this).append(html_template.replace("{{prefix}}", prefix));
$('#addlink-' + prefix).on('click', () => add_inline_form(prefix));
})
})

View file

@ -598,6 +598,7 @@ TEST_CODE_COVERAGE_EXCLUDE_FILES = [
"ietf/review/import_from_review_tool.py",
"ietf/utils/patch.py",
"ietf/utils/test_data.py",
"ietf/utils/jstest.py",
]
# These are code line regex patterns
@ -743,8 +744,6 @@ IANA_SYNC_PASSWORD = "secret"
IANA_SYNC_CHANGES_URL = "https://datatracker.iana.org:4443/data-tracker/changes"
IANA_SYNC_PROTOCOLS_URL = "https://www.iana.org/protocols/"
RFC_TEXT_RSYNC_SOURCE="ftp.rfc-editor.org::rfcs-text-only"
RFC_EDITOR_SYNC_PASSWORD="secret"
RFC_EDITOR_SYNC_NOTIFICATION_URL = "https://www.rfc-editor.org/parser/parser.php"
RFC_EDITOR_GROUP_NOTIFICATION_EMAIL = "webmaster@rfc-editor.org"
@ -971,7 +970,6 @@ OIDC_EXTRA_SCOPE_CLAIMS = 'ietf.ietfauth.utils.OidcExtraScopeClaims'
# ==============================================================================
RSYNC_BINARY = '/usr/bin/rsync'
YANGLINT_BINARY = '/usr/bin/yanglint'
DE_GFM_BINARY = '/usr/bin/de-gfm.ruby2.5'
@ -1013,7 +1011,6 @@ CHAT_URL_PATTERN = 'https://zulip.ietf.org/#narrow/stream/{chat_room_name}'
# CHAT_ARCHIVE_URL_PATTERN = 'https://www.ietf.org/jabber/logs/{chat_room_name}?C=M;O=D'
PYFLAKES_DEFAULT_ARGS= ["ietf", ]
VULTURE_DEFAULT_ARGS= ["ietf", ]
# Automatic Scheduling
#
@ -1061,8 +1058,6 @@ GROUP_ALIAS_DOMAIN = IETF_DOMAIN
TEST_DATA_DIR = os.path.abspath(BASE_DIR + "/../test/data")
POSTCONFIRM_PATH = "/a/postconfirm/wrapper"
USER_PREFERENCE_DEFAULTS = {
"expires_soon" : "14",
"new_enough" : "14",
@ -1077,6 +1072,7 @@ EXCLUDED_PERSONAL_EMAIL_REGEX_PATTERNS = [
"@ietf.org$",
]
# Configuration for django-markup
MARKUP_SETTINGS = {
'restructuredtext': {
'settings_overrides': {
@ -1090,8 +1086,6 @@ MARKUP_SETTINGS = {
}
}
MAILMAN_LIB_DIR = '/usr/lib/mailman'
# This is the number of seconds required between subscribing to an ietf
# mailing list and datatracker account creation being accepted
LIST_ACCOUNT_DELAY = 60*60*25 # 25 hours

View file

@ -8,7 +8,6 @@ from django.db.models import ForeignKey
import debug # pyflakes:ignore
class Status(models.Model):
name = 'Status'
date = models.DateTimeField(default=timezone.now)
slug = models.SlugField(blank=False, null=False, unique=True)

View file

@ -2334,8 +2334,8 @@ class ApprovalsTestCase(BaseSubmitTestCase):
self.assertEqual(len(Preapproval.objects.filter(name=preapproval.name)), 0)
# Transaction.on_commit() requires use of TransactionTestCase, but that has a performance penalty. Replace it
# with a no-op for testing purposes.
# Transaction.on_commit() interacts badly with TestCase's transaction behavior. Replace it
# with a pass-through for testing purposes.
@mock.patch.object(transaction, 'on_commit', lambda x: x())
@override_settings(IDTRACKER_BASE_URL='https://datatracker.example.com')
class ApiSubmissionTests(BaseSubmitTestCase):

View file

@ -90,7 +90,8 @@ def upload_submission(request):
clear_existing_files(form)
save_files(form)
create_submission_event(request, submission, desc="Uploaded submission")
# Wrap in on_commit so the delayed task cannot start until the view is done with the DB
# Wrap in on_commit in case a transaction is open
# (As of 2024-11-08, this only runs in a transaction during tests)
transaction.on_commit(
lambda: process_uploaded_submission_task.delay(submission.pk)
)
@ -166,7 +167,8 @@ def api_submission(request):
save_files(form)
create_submission_event(request, submission, desc="Uploaded submission through API")
# Wrap in on_commit so the delayed task cannot start until the view is done with the DB
# Wrap in on_commit in case a transaction is open
# (As of 2024-11-08, this only runs in a transaction during tests)
transaction.on_commit(
lambda: process_and_accept_uploaded_submission_task.delay(submission.pk)
)

View file

@ -2,12 +2,12 @@
# -*- coding: utf-8 -*-
import datetime
import os
import json
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.db import transaction
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render
from django.utils import timezone
@ -22,8 +22,6 @@ from ietf.utils.log import log
from ietf.utils.response import permission_denied
SYNC_BIN_PATH = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "../bin"))
#@role_required('Secretariat', 'IANA', 'RFC Editor')
def discrepancies(request):
sections = find_discrepancies()
@ -79,16 +77,32 @@ def notify(request, org, notification):
if request.method == "POST":
if notification == "index":
log("Queuing RFC Editor index sync from notify view POST")
tasks.rfc_editor_index_update_task.delay()
# Wrap in on_commit in case a transaction is open
# (As of 2024-11-08, this only runs in a transaction during tests)
transaction.on_commit(
lambda: tasks.rfc_editor_index_update_task.delay()
)
elif notification == "queue":
log("Queuing RFC Editor queue sync from notify view POST")
tasks.rfc_editor_queue_updates_task.delay()
# Wrap in on_commit in case a transaction is open
# (As of 2024-11-08, this only runs in a transaction during tests)
transaction.on_commit(
lambda: tasks.rfc_editor_queue_updates_task.delay()
)
elif notification == "changes":
log("Queuing IANA changes sync from notify view POST")
tasks.iana_changes_update_task.delay()
# Wrap in on_commit in case a transaction is open
# (As of 2024-11-08, this only runs in a transaction during tests)
transaction.on_commit(
lambda: tasks.iana_changes_update_task.delay()
)
elif notification == "protocols":
log("Queuing IANA protocols sync from notify view POST")
tasks.iana_protocols_update_task.delay()
# Wrap in on_commit in case a transaction is open
# (As of 2024-11-08, this only runs in a transaction during tests)
transaction.on_commit(
lambda: tasks.iana_protocols_update_task.delay()
)
return HttpResponse("OK", content_type="text/plain; charset=%s"%settings.DEFAULT_CHARSET)

View file

@ -4,8 +4,10 @@
{% load widget_tweaks %}
{% load ietf_filters %}
<form id="search_form"
method="post"
class="form-horizontal"
action="{% url 'ietf.doc.views_search.search' %}">
{% csrf_token %}
<!-- [html-validate-disable-block input-missing-label -- labelled via aria-label] -->
<div class="input-group search_field">
{{ form.name|add_class:"form-control"|attr:"placeholder:Document name/title/RFC number"|attr:"aria-label:Document name/title/RFC number" }}

View file

@ -1,29 +0,0 @@
{% extends "base.html" %}
{# Copyright The IETF Trust 2015, All Rights Reserved #}
{% load origin %}
{% load static %}
{% load ietf_filters %}
{% block pagehead %}
<link rel="stylesheet" href="{% static "ietf/css/highcharts.css" %}">
{% endblock %}
{% block js %}
<script src="{% static 'ietf/js/highstock.js' %}"></script>
<script>
$(function () {
var chart;
$.getJSON('{{ confurl }}?{{ queryargs }}', function (conf) {
chart = Highcharts.stockChart('chart', conf);
chart.showLoading();
$.getJSON('{{ dataurl }}?{{ queryargs }}', function (data) {
chart.series[0].setData(data);
chart.hideLoading();
});
});
});
</script>
{% endblock %}
{% block title %}Document Statistics{% endblock %}
{% block content %}
{% origin %}
<div id="chart"></div>
{% endblock %}

View file

@ -0,0 +1,34 @@
{% extends "base.html" %}
{# Copyright The IETF Trust 2024, All Rights Reserved #}
{% load origin static person_filters ietf_filters %}
{% block pagehead %}
<link rel="stylesheet" href="{% static 'ietf/css/list.css' %}">
{% endblock %}
{% block title %}Group Leadership{% endblock %}
{% block content %}
{% origin %}
<h1>Group Leadership ({{ group_type }})</h1>
{% if user|has_role:"Secretariat" %}
<div class="text-end">
<a class="btn btn-primary" href="{% url 'ietf.group.views.group_leadership_csv' group_type=group_type %}">
<i class="bi bi-file-ruled"></i> Export as CSV
</a>
</div>
{% endif %}
<table class="table table-sm table-striped">
<thead>
<tr>
<th scope="col">Leader</th>
<th scope="col">Groups</th>
</tr>
</thead>
<tbody>
{% for leader in leaders %}
<tr>
<td>{{ leader.name }}</td>
<td>{{ leader.groups }}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endblock %}

View file

@ -40,7 +40,7 @@
{% if future %}
<h2 class="mt-5" id="futuremeets">
Future Meetings
<a class="float-end"
<a class="ms-2"
aria-label="icalendar entry for all scheduled future {{ group.acronym }} meetings"
title="icalendar entry for all scheduled future {{ group.acronym }} meetings"
href="{% url 'ietf.meeting.views.upcoming_ical' %}?show={{ group.acronym }}">
@ -174,4 +174,4 @@
ietf_timezone.initialize('local');
});
</script>
{% endblock %}
{% endblock %}

View file

@ -88,12 +88,13 @@
</a>
<br>
{% endfor %}
{% if entry.session.video_stream_url %}
<a href="{{ entry.session.session_recording_url }}">
Session recording
</a>
<br>
{% endif %}
{% for rec in entry.meetecho_recordings %}
<a href="{{ rec.material }}">
Session recording
{% if rec.time %}{{ rec.time|date:"D G:i"}}{% endif %}
</a>
<br>
{% endfor%}
</td>
{# slides #}
<td>

View file

@ -19,7 +19,7 @@
</a>
{% for meeting in meetings %}
{% if meeting.show_important_dates %}
<h2 class="mt-5">
<h2 class="mt-5" id="IETF{{ meeting.number }}">
IETF {{ meeting.number }}
<br>
<small class="text-body-secondary">{{ meeting.date }}, {{ meeting.city }}, {{ meeting.country }}</small>

View file

@ -320,51 +320,50 @@
</tr>
{% endif %}
{# Recordings #}
{% if session.has_recordings %}
{% with session.recordings as recordings %}
{% if recordings %}
{# There's no guaranteed order, so this is a bit messy: #}
{# First, the audio recordings, if any #}
{% for r in recordings %}
{% if r.get_href and 'audio' in r.get_href %}
<tr>
<td>
<a href="{{ r.get_href }}"><i class="bi bi-file-play"></i> {{ r.title }}</a>
</td>
</tr>
{% endif %}
{% endfor %}
{# Then the youtube recordings #}
{% for r in recordings %}
{% if r.get_href and 'youtu' in r.get_href %}
<tr>
<td>
<a href="{{ r.get_href }}"><i class="bi bi-file-slides"></i> {{ r.title }}</a>
</td>
</tr>
{% endif %}
{% endfor %}
{# Finally, any other recordings #}
{% for r in recordings %}
{% if r.get_href and not 'audio' in r.get_href and not 'youtu' in r.get_href %}
<tr>
<td>
<a href="{{ r.get_href }}"><i class="bi bi-file-play"></i> {{ r.title }}</a>
</td>
</tr>
{% endif %}
{% endfor %}
{% endif %}
{% endwith %}
{% if session.video_stream_url %}
<tr>
<td>
<a href="{{ session.session_recording_url }}">
<i class="bi bi-file-slides"></i> Session recording
</a>
</td>
</tr>
{% with session.recordings as recordings %}
{% if recordings %}
{# There's no guaranteed order, so this is a bit messy: #}
{# First, the audio recordings, if any #}
{% for r in recordings %}
{% if r.get_href and 'audio' in r.get_href %}
<tr>
<td>
<a href="{{ r.get_href }}"><i class="bi bi-file-play"></i> {{ r.title }}</a>
</td>
</tr>
{% endif %}
{% endfor %}
{# Then the youtube recordings #}
{% for r in recordings %}
{% if r.get_href and 'youtu' in r.get_href %}
<tr>
<td>
<a href="{{ r.get_href }}"><i class="bi bi-file-slides"></i> {{ r.title }}</a>
</td>
</tr>
{% endif %}
{% endfor %}
{# Finally, any other recordings #}
{% for r in recordings %}
{% if r.get_href and not 'audio' in r.get_href and not 'youtu' in r.get_href %}
<tr>
<td>
<a href="{{ r.get_href }}"><i class="bi bi-file-play"></i> {{ r.title }}</a>
</td>
</tr>
{% endif %}
{% endfor %}
{% endif %}
{% endwith %}
{% if session.session_recording_url %}
<tr>
<td>
<a href="{{ session.session_recording_url }}">
<i class="bi bi-file-slides"></i>
Meetecho session recording
</a>
</td>
</tr>
{% endif %}
</tbody>
</table>

View file

@ -1,82 +0,0 @@
#!/usr/bin/env python
# Copyright The IETF Trust 2013-2020, All Rights Reserved
# -*- coding: utf-8 -*-
# -*- Python -*-
#
# $Id: aliasutil.py $
#
# Author: Markus Stenberg <mstenber@cisco.com>
#
"""
Mailing list alias dumping utilities
"""
from django.conf import settings
from ietf.utils.log import log
import debug # pyflakes:ignore
def rewrite_email_address(email):
""" Prettify the email address (and if it's empty, skip it by
returning None). """
if not email:
return
email = email.strip()
if not email:
return
if email[0]=='<' and email[-1] == '>':
email = email[1:-1]
# If it doesn't look like email, skip
if '@' not in email and '?' not in email:
return
return email
def rewrite_address_list(l):
""" This utility function makes sure there is exactly one instance
of an address within the result list, and preserves order
(although it may not be relevant to start with) """
h = {}
for address in l:
#address = address.strip()
if address in h: continue
h[address] = True
yield address
def dump_sublist(afile, vfile, alias, adomains, vdomain, emails):
if not emails:
return emails
# Nones in the list should be skipped
emails = [_f for _f in emails if _f]
# Make sure emails are sane and eliminate the Nones again for
# non-sane ones
emails = [rewrite_email_address(e) for e in emails]
emails = [_f for _f in emails if _f]
# And we'll eliminate the duplicates too but preserve order
emails = list(rewrite_address_list(emails))
if not emails:
return emails
try:
filtername = 'xfilter-%s' % (alias, ) # in aliases, --> | expandname
expandname = 'expand-%s' % (alias, ) # in virtual, --> email list
for domain in adomains:
aliasaddr = '%s@%s' % (alias, domain) # in virtual, --> filtername
vfile.write('%-64s %s\n' % (aliasaddr, filtername))
afile.write('%-64s "|%s filter %s %s"\n' % (filtername+':', settings.POSTCONFIRM_PATH, expandname, vdomain))
vfile.write('%-64s %s\n' % ("%s@%s"%(expandname, vdomain), ', '.join(emails)))
except UnicodeEncodeError:
# If there's unicode in email address, something is badly
# wrong and we just silently punt
# XXX - is there better approach?
log('Error encoding email address for an %s alias: %s' % (alias, repr(emails)))
return []
return emails

View file

@ -12,6 +12,8 @@ try:
from selenium import webdriver
from selenium.webdriver.firefox.service import Service
from selenium.webdriver.firefox.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.common.by import By
except ImportError as e:
skip_selenium = True
@ -87,6 +89,48 @@ class IetfSeleniumTestCase(IetfLiveServerTestCase):
# actions = ActionChains(self.driver)
# actions.move_to_element(element).perform()
def scroll_and_click(self, element_locator, timeout_seconds=5):
"""
Selenium has restrictions around clicking elements outside the viewport, so
this wrapper encapsulates the boilerplate of forcing scrolling and clicking.
:param element_locator: A two item tuple of a Selenium locator eg `(By.CSS_SELECTOR, '#something')`
"""
# so that we can restore the state of the webpage after clicking
original_html_scroll_behaviour_to_restore = self.driver.execute_script('return document.documentElement.style.scrollBehavior')
original_html_overflow_to_restore = self.driver.execute_script('return document.documentElement.style.overflow')
original_body_scroll_behaviour_to_restore = self.driver.execute_script('return document.body.style.scrollBehavior')
original_body_overflow_to_restore = self.driver.execute_script('return document.body.style.overflow')
self.driver.execute_script('document.documentElement.style.scrollBehavior = "auto"')
self.driver.execute_script('document.documentElement.style.overflow = "auto"')
self.driver.execute_script('document.body.style.scrollBehavior = "auto"')
self.driver.execute_script('document.body.style.overflow = "auto"')
element = self.driver.find_element(element_locator[0], element_locator[1])
self.scroll_to_element(element)
# Note that Selenium itself seems to have multiple definitions of 'clickable'.
# You might expect that the following wait for the 'element_to_be_clickable'
# would confirm that the following .click() would succeed but it doesn't.
# That's why the preceeding code attempts to force scrolling to bring the
# element into the viewport to allow clicking.
WebDriverWait(self.driver, timeout_seconds).until(expected_conditions.element_to_be_clickable(element_locator))
element.click()
if original_html_scroll_behaviour_to_restore:
self.driver.execute_script(f'document.documentElement.style.scrollBehavior = "{original_html_scroll_behaviour_to_restore}"')
if original_html_overflow_to_restore:
self.driver.execute_script(f'document.documentElement.style.overflow = "{original_html_overflow_to_restore}"')
if original_body_scroll_behaviour_to_restore:
self.driver.execute_script(f'document.body.style.scrollBehavior = "{original_body_scroll_behaviour_to_restore}"')
if original_body_overflow_to_restore:
self.driver.execute_script(f'document.body.style.overflow = "{original_body_overflow_to_restore}"')
class presence_of_element_child_by_css_selector:
"""Wait for presence of a child of a WebElement matching a CSS selector

View file

@ -1,5 +1,5 @@
# Kustomize deployment
## Run locally
# Kustomize deployment
## Run locally
The `secrets.yaml` file is provided as a reference only and must be referenced manually in the `kustomization.yaml` file.

View file

@ -1,16 +1,16 @@
namespace: datatracker
namePrefix: dt-
configMapGenerator:
- name: files-cfgmap
files:
- nginx-logging.conf
- nginx-auth.conf
- nginx-datatracker.conf
- settings_local.py
resources:
- auth.yaml
- beat.yaml
- celery.yaml
- datatracker.yaml
- memcached.yaml
- rabbitmq.yaml
namespace: datatracker
namePrefix: dt-
configMapGenerator:
- name: files-cfgmap
files:
- nginx-logging.conf
- nginx-auth.conf
- nginx-datatracker.conf
- settings_local.py
resources:
- auth.yaml
- beat.yaml
- celery.yaml
- datatracker.yaml
- memcached.yaml
- rabbitmq.yaml

View file

@ -1,80 +1,80 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: memcached
spec:
replicas: 1
revisionHistoryLimit: 2
selector:
matchLabels:
app: memcached
template:
metadata:
labels:
app: memcached
spec:
securityContext:
runAsNonRoot: true
containers:
# -----------------------------------------------------
# Memcached
# -----------------------------------------------------
- image: "memcached:1.6-alpine"
imagePullPolicy: IfNotPresent
args: ["-m", "1024"]
name: memcached
ports:
- name: memcached
containerPort: 11211
protocol: TCP
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: true
# memcached image sets up uid/gid 11211
runAsUser: 11211
runAsGroup: 11211
# -----------------------------------------------------
# Memcached Exporter for Prometheus
# -----------------------------------------------------
- image: "quay.io/prometheus/memcached-exporter:v0.14.3"
imagePullPolicy: IfNotPresent
name: memcached-exporter
ports:
- name: metrics
containerPort: 9150
protocol: TCP
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: true
runAsUser: 65534 # nobody
runAsGroup: 65534 # nobody
dnsPolicy: ClusterFirst
restartPolicy: Always
terminationGracePeriodSeconds: 30
---
apiVersion: v1
kind: Service
metadata:
name: memcached
annotations:
k8s.grafana.com/scrape: "true" # this is not a bool
k8s.grafana.com/metrics.portName: "metrics"
spec:
type: ClusterIP
ports:
- port: 11211
targetPort: memcached
protocol: TCP
name: memcached
- port: 9150
targetPort: metrics
protocol: TCP
name: metrics
selector:
app: memcached
apiVersion: apps/v1
kind: Deployment
metadata:
name: memcached
spec:
replicas: 1
revisionHistoryLimit: 2
selector:
matchLabels:
app: memcached
template:
metadata:
labels:
app: memcached
spec:
securityContext:
runAsNonRoot: true
containers:
# -----------------------------------------------------
# Memcached
# -----------------------------------------------------
- image: "memcached:1.6-alpine"
imagePullPolicy: IfNotPresent
args: ["-m", "1024"]
name: memcached
ports:
- name: memcached
containerPort: 11211
protocol: TCP
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: true
# memcached image sets up uid/gid 11211
runAsUser: 11211
runAsGroup: 11211
# -----------------------------------------------------
# Memcached Exporter for Prometheus
# -----------------------------------------------------
- image: "quay.io/prometheus/memcached-exporter:v0.14.3"
imagePullPolicy: IfNotPresent
name: memcached-exporter
ports:
- name: metrics
containerPort: 9150
protocol: TCP
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: true
runAsUser: 65534 # nobody
runAsGroup: 65534 # nobody
dnsPolicy: ClusterFirst
restartPolicy: Always
terminationGracePeriodSeconds: 30
---
apiVersion: v1
kind: Service
metadata:
name: memcached
annotations:
k8s.grafana.com/scrape: "true" # this is not a bool
k8s.grafana.com/metrics.portName: "metrics"
spec:
type: ClusterIP
ports:
- port: 11211
targetPort: memcached
protocol: TCP
name: memcached
- port: 9150
targetPort: metrics
protocol: TCP
name: metrics
selector:
app: memcached

View file

@ -1,83 +1,83 @@
apiVersion: v1
kind: Secret
metadata:
name: secrets-env
type: Opaque
stringData:
DATATRACKER_SERVER_MODE: "development" # development for staging, production for production
DATATRACKER_ADMINS: |-
Robert Sparks <rjsparks@nostrum.com>
Ryan Cross <rcross@amsl.com>
Kesara Rathnayake <kesara@staff.ietf.org>
Jennifer Richards <jennifer@staff.ietf.org>
Nicolas Giard <nick@staff.ietf.org>
DATATRACKER_ALLOWED_HOSTS: ".ietf.org" # newline-separated list also allowed
# DATATRACKER_DATATRACKER_DEBUG: "false"
# DB access details - needs to be filled in
# DATATRACKER_DB_HOST: "db"
# DATATRACKER_DB_PORT: "5432"
# DATATRACKER_DB_NAME: "datatracker"
# DATATRACKER_DB_USER: "django" # secret
# DATATRACKER_DB_PASS: "RkTkDPFnKpko" # secret
# DATATRACKER_DB_CONN_MAX_AGE: "0" # connection per request if not set, no limit if set to "None"
# DATATRACKER_DB_CONN_HEALTH_CHECKS: "false"
DATATRACKER_DJANGO_SECRET_KEY: "PDwXboUq!=hPjnrtG2=ge#N$Dwy+wn@uivrugwpic8mxyPfHk" # secret
# Set this to point testing / staging at the production statics server until we
# sort that out
# DATATRACKER_STATIC_URL: "https://static.ietf.org/dt/12.10.0/"
# DATATRACKER_EMAIL_DEBUG: "true"
# Outgoing email details
# DATATRACKER_EMAIL_HOST: "localhost" # defaults to localhost
# DATATRACKER_EMAIL_PORT: "2025" # defaults to 2025
# The value here is the default from settings.py (i.e., not actually secret)
DATATRACKER_NOMCOM_APP_SECRET_B64: "m9pzMezVoFNJfsvU9XSZxGnXnwup6P5ZgCQeEnROOoQ=" # secret
DATATRACKER_IANA_SYNC_PASSWORD: "this-is-the-iana-sync-password" # secret
DATATRACKER_RFC_EDITOR_SYNC_PASSWORD: "this-is-the-rfc-editor-sync-password" # secret
DATATRACKER_YOUTUBE_API_KEY: "this-is-the-youtube-api-key" # secret
DATATRACKER_GITHUB_BACKUP_API_KEY: "this-is-the-github-backup-api-key" # secret
# API key configuration
DATATRACKER_API_KEY_TYPE: "ES265"
# secret - value here is the default from settings.py (i.e., not actually secret)
DATATRACKER_API_PUBLIC_KEY_PEM_B64: |-
Ci0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tCk1Ga3dFd1lIS29aSXpqMENBUVlJS
29aSXpqMERBUWNEUWdBRXFWb2pzYW9mREpTY3VNSk4rdHNodW15Tk01TUUKZ2Fyel
ZQcWtWb3ZtRjZ5RTdJSi9kdjRGY1YrUUtDdEovck9TOGUzNlk4WkFFVll1dWtoZXM
weVoxdz09Ci0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLQo=
# secret - value here is the default from settings.py (i.e., not actually secret)
DATATRACKER_API_PRIVATE_KEY_PEM_B64: |-
Ci0tLS0tQkVHSU4gUFJJVkFURSBLRVktLS0tLQpNSUdIQWdFQU1CTUdCeXFHU000O
UFnRUdDQ3FHU000OUF3RUhCRzB3YXdJQkFRUWdvSTZMSmtvcEtxOFhySGk5ClFxR1
F2RTRBODNURllqcUx6KzhnVUxZZWNzcWhSQU5DQUFTcFdpT3hxaDhNbEp5NHdrMzY
yeUc2Ykkwemt3U0IKcXZOVStxUldpK1lYcklUc2duOTIvZ1Z4WDVBb0swbitzNUx4
N2ZwanhrQVJWaTY2U0Y2elRKblgKLS0tLS1FTkQgUFJJVkFURSBLRVktLS0tLQo=
#DATATRACKER_REGISTRATION_API_KEY: "some-key" # secret"
# DATATRACKER_MEETECHO_API_BASE: "https://meetings.conf.meetecho.com/api/v1/"
DATATRACKER_MEETECHO_CLIENT_ID: "this-is-the-meetecho-client-id" # secret
DATATRACKER_MEETECHO_CLIENT_SECRET: "this-is-the-meetecho-client-secret" # secret
# DATATRACKER_MATOMO_SITE_ID: "7" # must be present to enable Matomo
# DATATRACKER_MATOMO_DOMAIN_PATH: "analytics.ietf.org"
CELERY_PASSWORD: "this-is-a-secret" # secret
# Only one of these may be set
# DATATRACKER_APP_API_TOKENS_JSON_B64: "e30K" # secret
# DATATRACKER_APP_API_TOKENS_JSON: "{}" # secret
# use this to override default - one entry per line
# DATATRACKER_CSRF_TRUSTED_ORIGINS: |-
# https://datatracker.staging.ietf.org
# Scout configuration
DATATRACKER_SCOUT_KEY: "this-is-the-scout-key"
apiVersion: v1
kind: Secret
metadata:
name: secrets-env
type: Opaque
stringData:
DATATRACKER_SERVER_MODE: "development" # development for staging, production for production
DATATRACKER_ADMINS: |-
Robert Sparks <rjsparks@nostrum.com>
Ryan Cross <rcross@amsl.com>
Kesara Rathnayake <kesara@staff.ietf.org>
Jennifer Richards <jennifer@staff.ietf.org>
Nicolas Giard <nick@staff.ietf.org>
DATATRACKER_ALLOWED_HOSTS: ".ietf.org" # newline-separated list also allowed
# DATATRACKER_DATATRACKER_DEBUG: "false"
# DB access details - needs to be filled in
# DATATRACKER_DB_HOST: "db"
# DATATRACKER_DB_PORT: "5432"
# DATATRACKER_DB_NAME: "datatracker"
# DATATRACKER_DB_USER: "django" # secret
# DATATRACKER_DB_PASS: "RkTkDPFnKpko" # secret
# DATATRACKER_DB_CONN_MAX_AGE: "0" # connection per request if not set, no limit if set to "None"
# DATATRACKER_DB_CONN_HEALTH_CHECKS: "false"
DATATRACKER_DJANGO_SECRET_KEY: "PDwXboUq!=hPjnrtG2=ge#N$Dwy+wn@uivrugwpic8mxyPfHk" # secret
# Set this to point testing / staging at the production statics server until we
# sort that out
# DATATRACKER_STATIC_URL: "https://static.ietf.org/dt/12.10.0/"
# DATATRACKER_EMAIL_DEBUG: "true"
# Outgoing email details
# DATATRACKER_EMAIL_HOST: "localhost" # defaults to localhost
# DATATRACKER_EMAIL_PORT: "2025" # defaults to 2025
# The value here is the default from settings.py (i.e., not actually secret)
DATATRACKER_NOMCOM_APP_SECRET_B64: "m9pzMezVoFNJfsvU9XSZxGnXnwup6P5ZgCQeEnROOoQ=" # secret
DATATRACKER_IANA_SYNC_PASSWORD: "this-is-the-iana-sync-password" # secret
DATATRACKER_RFC_EDITOR_SYNC_PASSWORD: "this-is-the-rfc-editor-sync-password" # secret
DATATRACKER_YOUTUBE_API_KEY: "this-is-the-youtube-api-key" # secret
DATATRACKER_GITHUB_BACKUP_API_KEY: "this-is-the-github-backup-api-key" # secret
# API key configuration
DATATRACKER_API_KEY_TYPE: "ES265"
# secret - value here is the default from settings.py (i.e., not actually secret)
DATATRACKER_API_PUBLIC_KEY_PEM_B64: |-
Ci0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tCk1Ga3dFd1lIS29aSXpqMENBUVlJS
29aSXpqMERBUWNEUWdBRXFWb2pzYW9mREpTY3VNSk4rdHNodW15Tk01TUUKZ2Fyel
ZQcWtWb3ZtRjZ5RTdJSi9kdjRGY1YrUUtDdEovck9TOGUzNlk4WkFFVll1dWtoZXM
weVoxdz09Ci0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLQo=
# secret - value here is the default from settings.py (i.e., not actually secret)
DATATRACKER_API_PRIVATE_KEY_PEM_B64: |-
Ci0tLS0tQkVHSU4gUFJJVkFURSBLRVktLS0tLQpNSUdIQWdFQU1CTUdCeXFHU000O
UFnRUdDQ3FHU000OUF3RUhCRzB3YXdJQkFRUWdvSTZMSmtvcEtxOFhySGk5ClFxR1
F2RTRBODNURllqcUx6KzhnVUxZZWNzcWhSQU5DQUFTcFdpT3hxaDhNbEp5NHdrMzY
yeUc2Ykkwemt3U0IKcXZOVStxUldpK1lYcklUc2duOTIvZ1Z4WDVBb0swbitzNUx4
N2ZwanhrQVJWaTY2U0Y2elRKblgKLS0tLS1FTkQgUFJJVkFURSBLRVktLS0tLQo=
#DATATRACKER_REGISTRATION_API_KEY: "some-key" # secret"
# DATATRACKER_MEETECHO_API_BASE: "https://meetings.conf.meetecho.com/api/v1/"
DATATRACKER_MEETECHO_CLIENT_ID: "this-is-the-meetecho-client-id" # secret
DATATRACKER_MEETECHO_CLIENT_SECRET: "this-is-the-meetecho-client-secret" # secret
# DATATRACKER_MATOMO_SITE_ID: "7" # must be present to enable Matomo
# DATATRACKER_MATOMO_DOMAIN_PATH: "analytics.ietf.org"
CELERY_PASSWORD: "this-is-a-secret" # secret
# Only one of these may be set
# DATATRACKER_APP_API_TOKENS_JSON_B64: "e30K" # secret
# DATATRACKER_APP_API_TOKENS_JSON: "{}" # secret
# use this to override default - one entry per line
# DATATRACKER_CSRF_TRUSTED_ORIGINS: |-
# https://datatracker.staging.ietf.org
# Scout configuration
DATATRACKER_SCOUT_KEY: "this-is-the-scout-key"
DATATRACKER_SCOUT_NAME: "StagingDatatracker"