ci: merge main to release (#8250)

This commit is contained in:
Robert Sparks 2024-11-21 08:51:12 -06:00 committed by GitHub
commit 03ced83655
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
57 changed files with 1504 additions and 1270 deletions

282
.gitattributes vendored
View file

@ -1,2 +1,280 @@
/.yarn/releases/** binary # Auto detect text files and perform LF normalization
/.yarn/plugins/** binary * text=auto
# ---------------------------------------------------
# Python Projects
# ---------------------------------------------------
# Source files
*.pxd text diff=python
*.py text diff=python
*.py3 text diff=python
*.pyw text diff=python
*.pyx text diff=python
*.pyz text diff=python
*.pyi text diff=python
# Binary files
*.db binary
*.p binary
*.pkl binary
*.pickle binary
*.pyc binary export-ignore
*.pyo binary export-ignore
*.pyd binary
# Jupyter notebook
*.ipynb text eol=lf
# ---------------------------------------------------
# Web Projects
# ---------------------------------------------------
# Source code
*.bash text eol=lf
*.bat text eol=crlf
*.cmd text eol=crlf
*.coffee text
*.css text diff=css
*.htm text diff=html
*.html text diff=html
*.inc text
*.ini text
*.js text
*.mjs text
*.cjs text
*.json text
*.jsx text
*.less text
*.ls text
*.map text -diff
*.od text
*.onlydata text
*.php text diff=php
*.pl text
*.ps1 text eol=crlf
*.py text diff=python
*.rb text diff=ruby
*.sass text
*.scm text
*.scss text diff=css
*.sh text eol=lf
.husky/* text eol=lf
*.sql text
*.styl text
*.tag text
*.ts text
*.tsx text
*.xml text
*.xhtml text diff=html
# Docker
Dockerfile text
# Documentation
*.ipynb text eol=lf
*.markdown text diff=markdown
*.md text diff=markdown
*.mdwn text diff=markdown
*.mdown text diff=markdown
*.mkd text diff=markdown
*.mkdn text diff=markdown
*.mdtxt text
*.mdtext text
*.txt text
AUTHORS text
CHANGELOG text
CHANGES text
CONTRIBUTING text
COPYING text
copyright text
*COPYRIGHT* text
INSTALL text
license text
LICENSE text
NEWS text
readme text
*README* text
TODO text
# Templates
*.dot text
*.ejs text
*.erb text
*.haml text
*.handlebars text
*.hbs text
*.hbt text
*.jade text
*.latte text
*.mustache text
*.njk text
*.phtml text
*.pug text
*.svelte text
*.tmpl text
*.tpl text
*.twig text
*.vue text
# Configs
*.cnf text
*.conf text
*.config text
.editorconfig text
.env text
.gitattributes text
.gitconfig text
.htaccess text
*.lock text -diff
package.json text eol=lf
package-lock.json text eol=lf -diff
pnpm-lock.yaml text eol=lf -diff
.prettierrc text
yarn.lock text -diff
*.toml text
*.yaml text
*.yml text
browserslist text
Makefile text
makefile text
# Fixes syntax highlighting on GitHub to allow comments
tsconfig.json linguist-language=JSON-with-Comments
# Heroku
Procfile text
# Graphics
*.ai binary
*.bmp binary
*.eps binary
*.gif binary
*.gifv binary
*.ico binary
*.jng binary
*.jp2 binary
*.jpg binary
*.jpeg binary
*.jpx binary
*.jxr binary
*.pdf binary
*.png binary
*.psb binary
*.psd binary
*.svg text
*.svgz binary
*.tif binary
*.tiff binary
*.wbmp binary
*.webp binary
# Audio
*.kar binary
*.m4a binary
*.mid binary
*.midi binary
*.mp3 binary
*.ogg binary
*.ra binary
# Video
*.3gpp binary
*.3gp binary
*.as binary
*.asf binary
*.asx binary
*.avi binary
*.fla binary
*.flv binary
*.m4v binary
*.mng binary
*.mov binary
*.mp4 binary
*.mpeg binary
*.mpg binary
*.ogv binary
*.swc binary
*.swf binary
*.webm binary
# Archives
*.7z binary
*.gz binary
*.jar binary
*.rar binary
*.tar binary
*.zip binary
# Fonts
*.ttf binary
*.eot binary
*.otf binary
*.woff binary
*.woff2 binary
# Executables
*.exe binary
*.pyc binary
# Prevents massive diffs caused by vendored, minified files
**/.yarn/releases/** binary
**/.yarn/plugins/** binary
# RC files (like .babelrc or .eslintrc)
*.*rc text
# Ignore files (like .npmignore or .gitignore)
*.*ignore text
# Prevents massive diffs from built files
dist/* binary
# ---------------------------------------------------
# Common
# ---------------------------------------------------
# Documents
*.bibtex text diff=bibtex
*.doc diff=astextplain
*.DOC diff=astextplain
*.docx diff=astextplain
*.DOCX diff=astextplain
*.dot diff=astextplain
*.DOT diff=astextplain
*.pdf diff=astextplain
*.PDF diff=astextplain
*.rtf diff=astextplain
*.RTF diff=astextplain
*.md text diff=markdown
*.mdx text diff=markdown
*.tex text diff=tex
*.adoc text
*.textile text
*.mustache text
*.csv text eol=crlf
*.tab text
*.tsv text
*.txt text
*.sql text
*.epub diff=astextplain
# Text files where line endings should be preserved
*.patch -text
# ---------------------------------------------------
# Vzic specific
# ---------------------------------------------------
*.pl text diff=perl
*.pm text diff=perl
# C/C++
*.c text diff=cpp
*.cc text diff=cpp
*.cxx text diff=cpp
*.cpp text diff=cpp
*.cpi text diff=cpp
*.c++ text diff=cpp
*.hpp text diff=cpp
*.h text diff=cpp
*.h++ text diff=cpp
*.hh text diff=cpp

View file

@ -137,6 +137,7 @@ jobs:
uses: ./.github/workflows/tests.yml uses: ./.github/workflows/tests.yml
if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }} if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }}
needs: [prepare] needs: [prepare]
secrets: inherit
with: with:
ignoreLowerCoverage: ${{ github.event.inputs.ignoreLowerCoverage == 'true' }} ignoreLowerCoverage: ${{ github.event.inputs.ignoreLowerCoverage == 'true' }}
skipSelenium: true skipSelenium: true
@ -149,7 +150,8 @@ jobs:
name: Make Release name: Make Release
if: ${{ !failure() && !cancelled() }} if: ${{ !failure() && !cancelled() }}
needs: [tests, prepare] needs: [tests, prepare]
runs-on: ubuntu-latest runs-on:
group: hperf-8c32r
permissions: permissions:
contents: write contents: write
packages: write packages: write
@ -166,214 +168,106 @@ jobs:
fetch-depth: 1 fetch-depth: 1
fetch-tags: false fetch-tags: false
- name: Launch build VM - name: Setup Node.js environment
id: azlaunch uses: actions/setup-node@v4
timeout-minutes: 10 with:
run: | node-version: 18.x
echo "Authenticating to Azure..."
az login --service-principal -u ${{ secrets.AZ_BUILD_APP_ID }} -p ${{ secrets.AZ_BUILD_PWD }} --tenant ${{ secrets.AZ_BUILD_TENANT_ID }}
echo "Creating VM..." - name: Setup Python
vminfo=$(az vm create \ uses: actions/setup-python@v5
--resource-group ghaDatatracker \ with:
--name tmpGhaBuildVM-${{ github.run_number }} \ python-version: "3.x"
--image Ubuntu2204 \
--admin-username azureuser \
--generate-ssh-keys \
--priority Spot \
--size Standard_D8ads_v5 \
--max-price -1 \
--ephemeral-os-disk \
--os-disk-size-gb 100 \
--eviction-policy Delete \
--nic-delete-option Delete \
--os-disk-delete-option Delete \
--output tsv \
--query "publicIpAddress")
echo "ipaddr=$vminfo" >> "$GITHUB_OUTPUT"
echo "VM Public IP: $vminfo"
cat ~/.ssh/id_rsa > ${{ github.workspace }}/prvkey.key
echo "Fetching SSH host public keys..." - name: Download a Coverage Results
until ssh-keyscan -t rsa $vminfo 2> /dev/null if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }}
do uses: actions/download-artifact@v4.1.8
echo "Will try again in 5 seconds..." with:
sleep 5 name: coverage
done
ssh-keyscan -t rsa $vminfo >> ~/.ssh/known_hosts
- name: Remote SSH into Build VM - name: Make Release Build
uses: appleboy/ssh-action@25ce8cbbcb08177468c7ff7ec5cbfa236f9341e1
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} DEBIAN_FRONTEND: noninteractive
GITHUB_ACTOR: ${{ github.actor }} BROWSERSLIST_IGNORE_OLD_DATA: 1
GITHUB_SHA: ${{ github.sha }} run: |
GITHUB_REF_NAME: ${{ github.ref_name }} echo "PKG_VERSION: $PKG_VERSION"
GITHUB_RUN_ID: ${{ github.run_id }} echo "GITHUB_SHA: $GITHUB_SHA"
echo "GITHUB_REF_NAME: $GITHUB_REF_NAME"
echo "Running frontend build script..."
echo "Compiling native node packages..."
yarn rebuild
echo "Packaging static assets..."
yarn build --base=https://static.ietf.org/dt/$PKG_VERSION/
yarn legacy:build
echo "Setting version $PKG_VERSION..."
sed -i -r -e "s|^__version__ += '.*'$|__version__ = '$PKG_VERSION'|" ietf/__init__.py
sed -i -r -e "s|^__release_hash__ += '.*'$|__release_hash__ = '$GITHUB_SHA'|" ietf/__init__.py
sed -i -r -e "s|^__release_branch__ += '.*'$|__release_branch__ = '$GITHUB_REF_NAME'|" ietf/__init__.py
- name: Set Production Flags
if: ${{ env.SHOULD_DEPLOY == 'true' }}
run: |
echo "Setting production flags in settings.py..."
sed -i -r -e 's/^DEBUG *= *.*$/DEBUG = False/' -e "s/^SERVER_MODE *= *.*\$/SERVER_MODE = 'production'/" ietf/settings.py
- name: Make Release Tarball
env:
DEBIAN_FRONTEND: noninteractive
run: |
echo "Build release tarball..."
mkdir -p /home/runner/work/release
tar -czf /home/runner/work/release/release.tar.gz -X dev/build/exclude-patterns.txt .
- name: Collect + Push Statics
env:
DEBIAN_FRONTEND: noninteractive
AWS_ACCESS_KEY_ID: ${{ secrets.CF_R2_STATIC_KEY_ID }} AWS_ACCESS_KEY_ID: ${{ secrets.CF_R2_STATIC_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_R2_STATIC_KEY_SECRET }} AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_R2_STATIC_KEY_SECRET }}
AWS_DEFAULT_REGION: auto AWS_DEFAULT_REGION: auto
AWS_ENDPOINT_URL: ${{ secrets.CF_R2_ENDPOINT }} AWS_ENDPOINT_URL: ${{ secrets.CF_R2_ENDPOINT }}
PKG_VERSION: ${{ env.PKG_VERSION }} run: |
SHOULD_DEPLOY: ${{ env.SHOULD_DEPLOY }} echo "Collecting statics..."
SKIP_TESTS: ${{ github.event.inputs.skiptests }} echo "Using ghcr.io/ietf-tools/datatracker-app-base:${{ env.TARGET_BASE }}"
docker run --rm --name collectstatics -v $(pwd):/workspace ghcr.io/ietf-tools/datatracker-app-base:${{ env.TARGET_BASE }} sh dev/build/collectstatics.sh
echo "Pushing statics..."
cd static
aws s3 sync . s3://static/dt/$PKG_VERSION --only-show-errors
- name: Augment dockerignore for docker image build
env:
DEBIAN_FRONTEND: noninteractive DEBIAN_FRONTEND: noninteractive
BROWSERSLIST_IGNORE_OLD_DATA: 1 run: |
TARGET_BASE: ${{ env.TARGET_BASE }} cat >> .dockerignore <<EOL
.devcontainer
.github
.vscode
k8s
playwright
svn-history
docker-compose.yml
EOL
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with: with:
host: ${{ steps.azlaunch.outputs.ipaddr }} registry: ghcr.io
port: 22 username: ${{ github.actor }}
username: azureuser password: ${{ secrets.GITHUB_TOKEN }}
command_timeout: 60m
key_path: ${{ github.workspace }}/prvkey.key
envs: GITHUB_TOKEN,GITHUB_ACTOR,GITHUB_SHA,GITHUB_REF_NAME,GITHUB_RUN_ID,AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,AWS_DEFAULT_REGION,AWS_ENDPOINT_URL,PKG_VERSION,SHOULD_DEPLOY,SKIP_TESTS,DEBIAN_FRONTEND,BROWSERSLIST_IGNORE_OLD_DATA
script_stop: true
script: |
export DEBIAN_FRONTEND=noninteractive
lsb_release -a
sudo apt-get update
sudo apt-get upgrade -y
sudo apt-get install wget unzip curl -y
echo "==========================================================================" - name: Build Images
echo "Installing Docker..." uses: docker/build-push-action@v6
echo "==========================================================================" env:
curl -fsSL https://get.docker.com -o get-docker.sh DOCKER_BUILD_SUMMARY: false
sudo sh get-docker.sh with:
sudo docker buildx create \ context: .
--name container-builder \ file: dev/build/Dockerfile
--driver docker-container \ platforms: ${{ github.event.inputs.skiparm == 'true' && 'linux/amd64' || 'linux/amd64,linux/arm64' }}
--bootstrap --use push: true
tags: ghcr.io/ietf-tools/datatracker:${{ env.PKG_VERSION }}
echo "==========================================================================" cache-from: type=gha
echo "Login to ghcr.io..." cache-to: type=gha,mode=max
echo "=========================================================================="
echo $GITHUB_TOKEN | sudo docker login ghcr.io -u $GITHUB_ACTOR --password-stdin
echo "=========================================================================="
echo "Installing GH CLI..."
echo "=========================================================================="
sudo mkdir -p -m 755 /etc/apt/keyrings \
&& wget -qO- https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null \
&& sudo chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
&& sudo apt update \
&& sudo apt install gh -y
echo "=========================================================================="
echo "Installing AWS CLI..."
echo "=========================================================================="
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
unzip awscliv2.zip
sudo ./aws/install
echo "=========================================================================="
echo "Install Node.js..."
echo "=========================================================================="
curl -fsSL https://deb.nodesource.com/setup_18.x -o nodesource_setup.sh
sudo bash nodesource_setup.sh
sudo apt-get install -y nodejs
sudo corepack enable
echo "=========================================================================="
echo "Install Python 3.x..."
echo "=========================================================================="
sudo apt-get install python3 python3-dev -y
python3 --version
echo "=========================================================================="
echo "Clone project..."
echo "=========================================================================="
sudo mkdir -p /workspace
sudo chown azureuser /workspace
cd /workspace
gh repo clone ietf-tools/datatracker -- --depth=1 --no-tags
cd datatracker
if [ "$SKIP_TESTS" = "false" ] || [ "$GITHUB_REF_NAME" = "release" ] ; then
echo "=========================================================================="
echo "Downloading coverage..."
echo "=========================================================================="
gh run download $GITHUB_RUN_ID -n coverage
fi
echo "=========================================================================="
echo "Building project..."
echo "=========================================================================="
echo "PKG_VERSION: $PKG_VERSION"
echo "GITHUB_SHA: $GITHUB_SHA"
echo "GITHUB_REF_NAME: $GITHUB_REF_NAME"
echo "Running frontend build script..."
echo "Compiling native node packages..."
yarn rebuild
echo "Packaging static assets..."
yarn build --base=https://static.ietf.org/dt/$PKG_VERSION/
yarn legacy:build
echo "Setting version $PKG_VERSION..."
sed -i -r -e "s|^__version__ += '.*'$|__version__ = '$PKG_VERSION'|" ietf/__init__.py
sed -i -r -e "s|^__release_hash__ += '.*'$|__release_hash__ = '$GITHUB_SHA'|" ietf/__init__.py
sed -i -r -e "s|^__release_branch__ += '.*'$|__release_branch__ = '$GITHUB_REF_NAME'|" ietf/__init__.py
if [ "$SHOULD_DEPLOY" = "true" ] ; then
echo "=========================================================================="
echo "Setting production flags in settings.py..."
echo "=========================================================================="
sed -i -r -e 's/^DEBUG *= *.*$/DEBUG = False/' -e "s/^SERVER_MODE *= *.*\$/SERVER_MODE = 'production'/" ietf/settings.py
fi
echo "=========================================================================="
echo "Build release tarball..."
echo "=========================================================================="
mkdir -p /workspace/release
tar -czf /workspace/release.tar.gz -X dev/build/exclude-patterns.txt .
echo "=========================================================================="
echo "Collecting statics..."
echo "=========================================================================="
echo "Using ghcr.io/ietf-tools/datatracker-app-base:${{ env.TARGET_BASE }}"
sudo docker run --rm --name collectstatics -v $(pwd):/workspace ghcr.io/ietf-tools/datatracker-app-base:${{ env.TARGET_BASE }} sh dev/build/collectstatics.sh
echo "Pushing statics..."
cd static
aws s3 sync . s3://static/dt/$PKG_VERSION --only-show-errors
cd ..
echo "=========================================================================="
echo "Augment dockerignore for docker image build..."
echo "=========================================================================="
cat >> .dockerignore <<EOL
.devcontainer
.github
.vscode
k8s
playwright
svn-history
docker-compose.yml
EOL
echo "=========================================================================="
echo "Building Images..."
echo "=========================================================================="
sudo docker buildx build --file dev/build/Dockerfile --platform linux/amd64,linux/arm64 --tag ghcr.io/ietf-tools/datatracker:$PKG_VERSION --push .
- name: Fetch release tarball
run: |
mkdir -p /home/runner/work/release
chmod 0600 ${{ github.workspace }}/prvkey.key
scp -i ${{ github.workspace }}/prvkey.key azureuser@${{ steps.azlaunch.outputs.ipaddr }}:/workspace/release.tar.gz /home/runner/work/release/release.tar.gz
- name: Destroy Build VM + resources
if: always()
run: |
echo "Terminate VM..."
az vm delete -g ghaDatatracker -n tmpGhaBuildVM-${{ github.run_number }} --yes --force-deletion true
echo "Delete Public IP..."
az resource delete -g ghaDatatracker -n tmpGhaBuildVM-${{ github.run_number }}PublicIP --resource-type "Microsoft.Network/publicIPAddresses"
echo "Delete Network Security Group..."
az resource delete -g ghaDatatracker -n tmpGhaBuildVM-${{ github.run_number }}NSG --resource-type "Microsoft.Network/networkSecurityGroups"
echo "Delete Virtual Network..."
az resource delete -g ghaDatatracker -n tmpGhaBuildVM-${{ github.run_number }}VNET --resource-type "Microsoft.Network/virtualNetworks"
echo "Logout from Azure..."
az logout
- name: Update CHANGELOG - name: Update CHANGELOG
id: changelog id: changelog
@ -456,50 +350,34 @@ jobs:
steps: steps:
- name: Notify on Slack (Success) - name: Notify on Slack (Success)
if: ${{ !contains(join(needs.*.result, ','), 'failure') }} if: ${{ !contains(join(needs.*.result, ','), 'failure') }}
uses: slackapi/slack-github-action@v1.27.0 uses: slackapi/slack-github-action@v2
with: with:
channel-id: ${{ secrets.SLACK_GH_BUILDS_CHANNEL_ID }} token: ${{ secrets.SLACK_GH_BOT }}
method: chat.postMessage
payload: | payload: |
{ channel: ${{ secrets.SLACK_GH_BUILDS_CHANNEL_ID }}
"text": "Datatracker Build <https://github.com/ietf-tools/datatracker/actions/runs/${{ github.run_id }}|${{ env.PKG_VERSION }}> by ${{ github.triggering_actor }} - <@${{ secrets.SLACK_UID_RJSPARKS }}>", text: "Datatracker Build <https://github.com/ietf-tools/datatracker/actions/runs/${{ github.run_id }}|${{ env.PKG_VERSION }}> by ${{ github.triggering_actor }}"
"attachments": [ attachments:
{ - color: "28a745"
"color": "28a745", fields:
"fields": [ - title: "Status"
{ short: true
"title": "Status", value: "Completed"
"short": true,
"value": "Completed"
}
]
}
]
}
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_GH_BOT }}
- name: Notify on Slack (Failure) - name: Notify on Slack (Failure)
if: ${{ contains(join(needs.*.result, ','), 'failure') }} if: ${{ contains(join(needs.*.result, ','), 'failure') }}
uses: slackapi/slack-github-action@v1.27.0 uses: slackapi/slack-github-action@v2
with: with:
channel-id: ${{ secrets.SLACK_GH_BUILDS_CHANNEL_ID }} token: ${{ secrets.SLACK_GH_BOT }}
method: chat.postMessage
payload: | payload: |
{ channel: ${{ secrets.SLACK_GH_BUILDS_CHANNEL_ID }}
"text": "Datatracker Build <https://github.com/ietf-tools/datatracker/actions/runs/${{ github.run_id }}|${{ env.PKG_VERSION }}> by ${{ github.triggering_actor }} - <@${{ secrets.SLACK_UID_RJSPARKS }}>", text: "Datatracker Build <https://github.com/ietf-tools/datatracker/actions/runs/${{ github.run_id }}|${{ env.PKG_VERSION }}> by ${{ github.triggering_actor }}"
"attachments": [ attachments:
{ - color: "a82929"
"color": "a82929", fields:
"fields": [ - title: "Status"
{ short: true
"title": "Status", value: "Failed"
"short": true,
"value": "Failed"
}
]
}
]
}
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_GH_BOT }}
# ----------------------------------------------------------------- # -----------------------------------------------------------------
# SANDBOX # SANDBOX

View file

@ -38,7 +38,7 @@ jobs:
ssh-keyscan -t rsa $vminfo >> ~/.ssh/known_hosts ssh-keyscan -t rsa $vminfo >> ~/.ssh/known_hosts
- name: Remote SSH into VM - name: Remote SSH into VM
uses: appleboy/ssh-action@25ce8cbbcb08177468c7ff7ec5cbfa236f9341e1 uses: appleboy/ssh-action@7eaf76671a0d7eec5d98ee897acda4f968735a17
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:

View file

@ -73,9 +73,11 @@ jobs:
path: geckodriver.log path: geckodriver.log
- name: Upload Coverage Results to Codecov - name: Upload Coverage Results to Codecov
uses: codecov/codecov-action@v4.6.0 uses: codecov/codecov-action@v5
with: with:
disable_search: true
files: coverage.xml files: coverage.xml
token: ${{ secrets.CODECOV_TOKEN }}
- name: Convert Coverage Results - name: Convert Coverage Results
if: ${{ always() }} if: ${{ always() }}

View file

@ -3,11 +3,13 @@
n-dropdown( n-dropdown(
:options='jumpToDayOptions' :options='jumpToDayOptions'
size='huge' size='huge'
:show='isDropdownOpenRef'
:show-arrow='true' :show-arrow='true'
trigger='click' trigger='click'
@select='jumpToDay' @select='jumpToDay'
@clickoutside='handleCloseDropdown'
) )
button button(@click='handleOpenDropdown')
i.bi.bi-arrow-down-circle i.bi.bi-arrow-down-circle
button(@click='agendaStore.$patch({ filterShown: true })') button(@click='agendaStore.$patch({ filterShown: true })')
i.bi.bi-funnel i.bi.bi-funnel
@ -28,7 +30,7 @@
</template> </template>
<script setup> <script setup>
import { computed, h } from 'vue' import { computed, h, ref } from 'vue'
import { import {
NBadge, NBadge,
NDropdown, NDropdown,
@ -61,7 +63,8 @@ function optionToLink(opts){
{ {
class: 'dropdown-link', class: 'dropdown-link',
'data-testid': 'mobile-link', 'data-testid': 'mobile-link',
href: `#${key}` href: `#${key}`,
onClick: () => jumpToDay(key)
}, },
[ [
h( h(
@ -77,6 +80,12 @@ function optionToLink(opts){
} }
} }
const isDropdownOpenRef = ref(false)
const handleOpenDropdown = () => isDropdownOpenRef.value = true
const handleCloseDropdown = () => isDropdownOpenRef.value = false
const jumpToDayOptions = computed(() => { const jumpToDayOptions = computed(() => {
const days = [] const days = []
if (agendaStore.isMeetingLive) { if (agendaStore.isMeetingLive) {
@ -124,6 +133,7 @@ function jumpToDay (dayId) {
} else { } else {
document.getElementById(dayId)?.scrollIntoView(true) document.getElementById(dayId)?.scrollIntoView(true)
} }
isDropdownOpenRef.value = false
} }
function downloadIcs (key) { function downloadIcs (key) {

View file

@ -83,6 +83,14 @@
template(#trigger) template(#trigger)
span.badge.is-bof BoF span.badge.is-bof BoF
span #[a(href='https://www.ietf.org/how/bofs/', target='_blank') Birds of a Feather] sessions (BoFs) are initial discussions about a particular topic of interest to the IETF community. span #[a(href='https://www.ietf.org/how/bofs/', target='_blank') Birds of a Feather] sessions (BoFs) are initial discussions about a particular topic of interest to the IETF community.
n-popover(
v-if='item.isProposed'
trigger='hover'
:width='250'
)
template(#trigger)
span.badge.is-proposed Proposed
span #[a(href='https://www.ietf.org/process/wgs/', target='_blank') Proposed WGs] are groups in the process of being chartered. If the charter is not approved by the IESG before the IETF meeting, the session may be canceled.
.agenda-table-note(v-if='item.note') .agenda-table-note(v-if='item.note')
i.bi.bi-arrow-return-right.me-1 i.bi.bi-arrow-return-right.me-1
span {{item.note}} span {{item.note}}
@ -468,6 +476,7 @@ const meetingEvents = computed(() => {
// groupParentName: item.groupParent?.name, // groupParentName: item.groupParent?.name,
icon, icon,
isBoF: item.isBoF, isBoF: item.isBoF,
isProposed: item.isProposed,
isSessionEvent: item.type === 'regular', isSessionEvent: item.type === 'regular',
links, links,
location: item.location, location: item.location,
@ -1012,10 +1021,25 @@ onBeforeUnmount(() => {
word-wrap: break-word; word-wrap: break-word;
} }
.badge.is-bof { .badge {
background-color: $teal-500;
margin: 0 8px; margin: 0 8px;
&.is-bof {
background-color: $teal-500;
@at-root .theme-dark & {
background-color: $teal-700;
}
}
&.is-proposed {
background-color: $gray-500;
@at-root .theme-dark & {
background-color: $gray-700;
}
}
@media screen and (max-width: $bs5-break-md) { @media screen and (max-width: $bs5-break-md) {
width: 30px; width: 30px;
display: block; display: block;

View file

@ -1,4 +1,4 @@
FROM ghcr.io/ietf-tools/datatracker-app-base:20241029T1632 FROM ghcr.io/ietf-tools/datatracker-app-base:20241114T1954
LABEL maintainer="IETF Tools Team <tools-discuss@ietf.org>" LABEL maintainer="IETF Tools Team <tools-discuss@ietf.org>"
ENV DEBIAN_FRONTEND=noninteractive ENV DEBIAN_FRONTEND=noninteractive

View file

@ -1 +1 @@
20241029T1632 20241114T1954

View file

@ -8,7 +8,7 @@
"dependencies": { "dependencies": {
"dockerode": "^4.0.2", "dockerode": "^4.0.2",
"fs-extra": "^11.2.0", "fs-extra": "^11.2.0",
"nanoid": "5.0.7", "nanoid": "5.0.8",
"nanoid-dictionary": "5.0.0-beta.1", "nanoid-dictionary": "5.0.0-beta.1",
"slugify": "1.6.6", "slugify": "1.6.6",
"tar": "^7.4.3", "tar": "^7.4.3",
@ -546,9 +546,9 @@
"optional": true "optional": true
}, },
"node_modules/nanoid": { "node_modules/nanoid": {
"version": "5.0.7", "version": "5.0.8",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.7.tgz", "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.8.tgz",
"integrity": "sha512-oLxFY2gd2IqnjcYyOXD8XGCftpGtZP2AbHbOkthDkvRywH5ayNtPVy9YlOPcHckXzbLTCHpkb7FB+yuxKV13pQ==", "integrity": "sha512-TcJPw+9RV9dibz1hHUzlLVy8N4X9TnwirAjrU08Juo6BNKggzVfP2ZJ/3ZUSq15Xl5i85i+Z89XBO90pB2PghQ==",
"funding": [ "funding": [
{ {
"type": "github", "type": "github",
@ -1346,9 +1346,9 @@
"optional": true "optional": true
}, },
"nanoid": { "nanoid": {
"version": "5.0.7", "version": "5.0.8",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.7.tgz", "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.8.tgz",
"integrity": "sha512-oLxFY2gd2IqnjcYyOXD8XGCftpGtZP2AbHbOkthDkvRywH5ayNtPVy9YlOPcHckXzbLTCHpkb7FB+yuxKV13pQ==" "integrity": "sha512-TcJPw+9RV9dibz1hHUzlLVy8N4X9TnwirAjrU08Juo6BNKggzVfP2ZJ/3ZUSq15Xl5i85i+Z89XBO90pB2PghQ=="
}, },
"nanoid-dictionary": { "nanoid-dictionary": {
"version": "5.0.0-beta.1", "version": "5.0.0-beta.1",

View file

@ -4,7 +4,7 @@
"dependencies": { "dependencies": {
"dockerode": "^4.0.2", "dockerode": "^4.0.2",
"fs-extra": "^11.2.0", "fs-extra": "^11.2.0",
"nanoid": "5.0.7", "nanoid": "5.0.8",
"nanoid-dictionary": "5.0.0-beta.1", "nanoid-dictionary": "5.0.0-beta.1",
"slugify": "1.6.6", "slugify": "1.6.6",
"tar": "^7.4.3", "tar": "^7.4.3",

View file

@ -146,6 +146,7 @@ class AdminJsonSerializer(Serializer):
field_value = None field_value = None
else: else:
field_value = field field_value = field
# Need QuerySetAny instead of QuerySet until django-stubs 5.0.1
if isinstance(field_value, QuerySetAny) or isinstance(field_value, list): if isinstance(field_value, QuerySetAny) or isinstance(field_value, list):
self._current[name] = dict([ (rel.pk, self.expand_related(rel, name)) for rel in field_value ]) self._current[name] = dict([ (rel.pk, self.expand_related(rel, name)) for rel in field_value ])
else: else:

View file

@ -13,10 +13,6 @@ basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
sys.path = [ basedir ] + sys.path sys.path = [ basedir ] + sys.path
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings" os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
if os.path.exists(virtualenv_activation):
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER) syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER)
import django import django

View file

@ -8,10 +8,6 @@ basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
sys.path = [ basedir ] + sys.path sys.path = [ basedir ] + sys.path
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings" os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
if os.path.exists(virtualenv_activation):
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER) syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER)
import django import django

View file

@ -8,10 +8,6 @@ basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
sys.path = [ basedir ] + sys.path sys.path = [ basedir ] + sys.path
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings" os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
if os.path.exists(virtualenv_activation):
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER) syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER)
import django import django

View file

@ -1,65 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -*- Python -*-
#
'''
This script merges two Person records into one. It determines which record is the target
based on most current User record (last_login) unless -f (force) option is used to
force SOURCE TARGET as specified on the command line. The order of operations is
important. We must complete all source.save() operations before moving the aliases to
the target, this is to avoid extra "Possible duplicate Person" emails going out, if the
Person is saved without an alias the Person.save() creates another one, which then
conflicts with the moved one.
'''
# Set PYTHONPATH and load environment variables for standalone script -----------------
import os, sys
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
sys.path = [ basedir ] + sys.path
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
if os.path.exists(virtualenv_activation):
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
import django
django.setup()
# -------------------------------------------------------------------------------------
import argparse
from django.contrib import admin
from ietf.person.models import Person
from ietf.person.utils import (merge_persons, send_merge_notification, handle_users,
determine_merge_order)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("source_id",type=int)
parser.add_argument("target_id",type=int)
parser.add_argument('-f','--force', help='force merge order',action='store_true')
parser.add_argument('-v','--verbose', help='verbose output',action='store_true')
args = parser.parse_args()
source = Person.objects.get(pk=args.source_id)
target = Person.objects.get(pk=args.target_id)
# set merge order
if not args.force:
source,target = determine_merge_order(source,target)
# confirm
print "Merging person {}({}) to {}({})".format(source.ascii,source.pk,target.ascii,target.pk)
print handle_users(source,target,check_only=True)
response = raw_input('Ok to continue y/n? ')
if response.lower() != 'y':
sys.exit()
# perform merge
success, changes = merge_persons(source, target, verbose=args.verbose)
# send email notification
send_merge_notification(target,changes)
if __name__ == "__main__":
main()

View file

@ -7,10 +7,6 @@ basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
sys.path = [ basedir ] + sys.path sys.path = [ basedir ] + sys.path
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings" os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
if os.path.exists(virtualenv_activation):
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
import django import django
django.setup() django.setup()

View file

@ -3,7 +3,7 @@
from django.conf import settings from django.conf import settings
from django.db import models from django.db import models, transaction
from django.db.models import signals from django.db.models import signals
from django.urls import reverse as urlreverse from django.urls import reverse as urlreverse
@ -117,7 +117,10 @@ def notify_events(sender, instance, **kwargs):
# start a Celery task during tests. To prevent this, don't queue a celery task if we're running # start a Celery task during tests. To prevent this, don't queue a celery task if we're running
# tests. # tests.
if settings.SERVER_MODE != "test": if settings.SERVER_MODE != "test":
notify_event_to_subscribers_task.delay(event_id=instance.pk) # Wrap in on_commit in case a transaction is open
transaction.on_commit(
lambda: notify_event_to_subscribers_task.delay(event_id=instance.pk)
)
signals.post_save.connect(notify_events) signals.post_save.connect(notify_events)

View file

@ -431,8 +431,10 @@ class CommunityListTests(TestCase):
r = self.client.get(url) r = self.client.get(url)
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
# Mock out the on_commit call so we can tell whether the task was actually queued
@mock.patch("ietf.submit.views.transaction.on_commit", side_effect=lambda x: x())
@mock.patch("ietf.community.models.notify_event_to_subscribers_task") @mock.patch("ietf.community.models.notify_event_to_subscribers_task")
def test_notification_signal_receiver(self, mock_notify_task): def test_notification_signal_receiver(self, mock_notify_task, mock_on_commit):
"""Saving a DocEvent should notify subscribers """Saving a DocEvent should notify subscribers
This implicitly tests that notify_events is hooked up to the post_save signal. This implicitly tests that notify_events is hooked up to the post_save signal.

View file

@ -5,6 +5,7 @@
import os import os
import datetime import datetime
import io import io
from django.http import HttpRequest
import lxml import lxml
import bibtexparser import bibtexparser
import mock import mock
@ -52,6 +53,7 @@ from ietf.doc.utils import (
generate_idnits2_rfcs_obsoleted, generate_idnits2_rfcs_obsoleted,
get_doc_email_aliases, get_doc_email_aliases,
) )
from ietf.doc.views_doc import get_diff_revisions
from ietf.group.models import Group, Role from ietf.group.models import Group, Role
from ietf.group.factories import GroupFactory, RoleFactory from ietf.group.factories import GroupFactory, RoleFactory
from ietf.ipr.factories import HolderIprDisclosureFactory from ietf.ipr.factories import HolderIprDisclosureFactory
@ -71,96 +73,163 @@ from ietf.doc.utils_search import AD_WORKLOAD
class SearchTests(TestCase): class SearchTests(TestCase):
def test_search(self): def test_search_handles_querystring_parameters(self):
"""Search parameters via querystring should not actually search"""
url = urlreverse("ietf.doc.views_search.search")
r = self.client.get(url + "?name=some-document-name&oldDrafts=on")
# Check that we got a valid response and that the warning about query string parameters is shown.
self.assertContains(
r,
"Searching via the URL query string is no longer supported.",
status_code=200,
)
# Check that the form was filled in correctly (not an exhaustive check, but different from the
# form defaults)
pq = PyQuery(r.content)
self.assertEqual(
pq("form#search_form input#id_name").attr("value"),
"some-document-name",
"The name field should be set in the SearchForm",
)
self.assertEqual(
pq("form#search_form input#id_olddrafts").attr("checked"),
"checked",
"The old drafts checkbox should be selected in the SearchForm",
)
self.assertIsNone(
pq("form#search_form input#id_rfcs").attr("checked"),
"The RFCs checkbox should not be selected in the SearchForm",
)
self.assertIsNone(
pq("form#search_form input#id_activedrafts").attr("checked"),
"The active drafts checkbox should not be selected in the SearchForm",
)
draft = WgDraftFactory(name='draft-ietf-mars-test',group=GroupFactory(acronym='mars',parent=Group.objects.get(acronym='farfut')),authors=[PersonFactory()],ad=PersonFactory()) def test_search(self):
draft = WgDraftFactory(
name="draft-ietf-mars-test",
group=GroupFactory(acronym="mars", parent=Group.objects.get(acronym="farfut")),
authors=[PersonFactory()],
ad=PersonFactory(),
)
rfc = WgRfcFactory() rfc = WgRfcFactory()
draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="pub-req")) draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="pub-req"))
old_draft = IndividualDraftFactory(name='draft-foo-mars-test',authors=[PersonFactory()],title="Optimizing Martian Network Topologies") old_draft = IndividualDraftFactory(
name="draft-foo-mars-test",
authors=[PersonFactory()],
title="Optimizing Martian Network Topologies",
)
old_draft.set_state(State.objects.get(used=True, type="draft", slug="expired")) old_draft.set_state(State.objects.get(used=True, type="draft", slug="expired"))
base_url = urlreverse('ietf.doc.views_search.search') url = urlreverse("ietf.doc.views_search.search")
# only show form, no search yet # only show form, no search yet
r = self.client.get(base_url) r = self.client.get(url)
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
# no match # no match
r = self.client.get(base_url + "?activedrafts=on&name=thisisnotadocumentname") r = self.client.post(url, {"activedrafts": "on", "name": "thisisnotadocumentname"})
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertContains(r, "No documents match") self.assertContains(r, "No documents match")
r = self.client.get(base_url + "?rfcs=on&name=xyzzy") r = self.client.post(url, {"rfcs": "on", "name": "xyzzy"})
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertContains(r, "No documents match") self.assertContains(r, "No documents match")
r = self.client.get(base_url + "?olddrafts=on&name=bar") r = self.client.post(url, {"olddrafts": "on", "name": "bar"})
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertContains(r, "No documents match") self.assertContains(r, "No documents match")
r = self.client.get(base_url + "?olddrafts=on&name=foo") r = self.client.post(url, {"olddrafts": "on", "name": "foo"})
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertContains(r, "draft-foo-mars-test") self.assertContains(r, "draft-foo-mars-test")
r = self.client.get(base_url + "?olddrafts=on&name=FoO") # mixed case r = self.client.post(url, {"olddrafts": "on", "name": "FoO"}) # mixed case
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertContains(r, "draft-foo-mars-test") self.assertContains(r, "draft-foo-mars-test")
# find by RFC # find by RFC
r = self.client.get(base_url + "?rfcs=on&name=%s" % rfc.name) r = self.client.post(url, {"rfcs": "on", "name": rfc.name})
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertContains(r, rfc.title) self.assertContains(r, rfc.title)
# find by active/inactive # find by active/inactive
draft.set_state(State.objects.get(type="draft", slug="active")) draft.set_state(State.objects.get(type="draft", slug="active"))
r = self.client.get(base_url + "?activedrafts=on&name=%s" % draft.name) r = self.client.post(url, {"activedrafts": "on", "name": draft.name})
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title) self.assertContains(r, draft.title)
draft.set_state(State.objects.get(type="draft", slug="expired")) draft.set_state(State.objects.get(type="draft", slug="expired"))
r = self.client.get(base_url + "?olddrafts=on&name=%s" % draft.name) r = self.client.post(url, {"olddrafts": "on", "name": draft.name})
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title) self.assertContains(r, draft.title)
draft.set_state(State.objects.get(type="draft", slug="active")) draft.set_state(State.objects.get(type="draft", slug="active"))
# find by title # find by title
r = self.client.get(base_url + "?activedrafts=on&name=%s" % draft.title.split()[0]) r = self.client.post(url, {"activedrafts": "on", "name": draft.title.split()[0]})
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title) self.assertContains(r, draft.title)
# find by author # find by author
r = self.client.get(base_url + "?activedrafts=on&by=author&author=%s" % draft.documentauthor_set.first().person.name_parts()[1]) r = self.client.post(
url,
{
"activedrafts": "on",
"by": "author",
"author": draft.documentauthor_set.first().person.name_parts()[1],
},
)
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title) self.assertContains(r, draft.title)
# find by group # find by group
r = self.client.get(base_url + "?activedrafts=on&by=group&group=%s" % draft.group.acronym) r = self.client.post(
url,
{"activedrafts": "on", "by": "group", "group": draft.group.acronym},
)
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title) self.assertContains(r, draft.title)
r = self.client.get(base_url + "?activedrafts=on&by=group&group=%s" % draft.group.acronym.swapcase()) r = self.client.post(
url,
{"activedrafts": "on", "by": "group", "group": draft.group.acronym.swapcase()},
)
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title) self.assertContains(r, draft.title)
# find by area # find by area
r = self.client.get(base_url + "?activedrafts=on&by=area&area=%s" % draft.group.parent_id) r = self.client.post(
url,
{"activedrafts": "on", "by": "area", "area": draft.group.parent_id},
)
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title) self.assertContains(r, draft.title)
# find by area # find by area
r = self.client.get(base_url + "?activedrafts=on&by=area&area=%s" % draft.group.parent_id) r = self.client.post(
url,
{"activedrafts": "on", "by": "area", "area": draft.group.parent_id},
)
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title) self.assertContains(r, draft.title)
# find by AD # find by AD
r = self.client.get(base_url + "?activedrafts=on&by=ad&ad=%s" % draft.ad_id) r = self.client.post(url, {"activedrafts": "on", "by": "ad", "ad": draft.ad_id})
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title) self.assertContains(r, draft.title)
# find by IESG state # find by IESG state
r = self.client.get(base_url + "?activedrafts=on&by=state&state=%s&substate=" % draft.get_state("draft-iesg").pk) r = self.client.post(
url,
{
"activedrafts": "on",
"by": "state",
"state": draft.get_state("draft-iesg").pk,
"substate": "",
},
)
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertContains(r, draft.title) self.assertContains(r, draft.title)
@ -169,15 +238,15 @@ class SearchTests(TestCase):
rfc = WgRfcFactory() rfc = WgRfcFactory()
draft.set_state(State.objects.get(type="draft", slug="rfc")) draft.set_state(State.objects.get(type="draft", slug="rfc"))
draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc) draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc)
base_url = urlreverse('ietf.doc.views_search.search') url = urlreverse("ietf.doc.views_search.search")
# find by RFC # find by RFC
r = self.client.get(base_url + f"?rfcs=on&name={rfc.name}") r = self.client.post(url, {"rfcs": "on", "name": rfc.name})
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertContains(r, rfc.title) self.assertContains(r, rfc.title)
# find by draft # find by draft
r = self.client.get(base_url + f"?activedrafts=on&rfcs=on&name={draft.name}") r = self.client.post(url, {"activedrafts": "on", "rfcs": "on", "name": draft.name})
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertContains(r, rfc.title) self.assertContains(r, rfc.title)
@ -1887,6 +1956,18 @@ class DocTestCase(TestCase):
self.assertContains(r, notes.text) self.assertContains(r, notes.text)
self.assertContains(r, rfced_note.text) self.assertContains(r, rfced_note.text)
def test_diff_revisions(self):
ind_doc = IndividualDraftFactory(create_revisions=range(2))
wg_doc = WgDraftFactory(
relations=[("replaces", ind_doc)], create_revisions=range(2)
)
diff_revisions = get_diff_revisions(HttpRequest(), wg_doc.name, wg_doc)
self.assertEqual(len(diff_revisions), 4)
self.assertEqual(
[t[3] for t in diff_revisions],
[f"{n}-{v:02d}" for n in [wg_doc.name, ind_doc.name] for v in [1, 0]],
)
def test_history(self): def test_history(self):
doc = IndividualDraftFactory() doc = IndividualDraftFactory()
@ -2739,60 +2820,6 @@ class DocumentMeetingTests(TestCase):
self.assertIsNone(doc.get_related_meeting(), f'{doc.type.slug} should not be related to meeting') self.assertIsNone(doc.get_related_meeting(), f'{doc.type.slug} should not be related to meeting')
class ChartTests(ResourceTestCaseMixin, TestCase): class ChartTests(ResourceTestCaseMixin, TestCase):
def test_search_chart_conf(self):
doc = IndividualDraftFactory()
conf_url = urlreverse('ietf.doc.views_stats.chart_conf_newrevisiondocevent')
# No qurey arguments; expect an empty json object
r = self.client.get(conf_url)
self.assertValidJSONResponse(r)
self.assertEqual(unicontent(r), '{}')
# No match
r = self.client.get(conf_url + '?activedrafts=on&name=thisisnotadocumentname')
self.assertValidJSONResponse(r)
d = r.json()
self.assertEqual(d['chart']['type'], settings.CHART_TYPE_COLUMN_OPTIONS['chart']['type'])
r = self.client.get(conf_url + '?activedrafts=on&name=%s'%doc.name[6:12])
self.assertValidJSONResponse(r)
d = r.json()
self.assertEqual(d['chart']['type'], settings.CHART_TYPE_COLUMN_OPTIONS['chart']['type'])
self.assertEqual(len(d['series'][0]['data']), 0)
def test_search_chart_data(self):
doc = IndividualDraftFactory()
data_url = urlreverse('ietf.doc.views_stats.chart_data_newrevisiondocevent')
# No qurey arguments; expect an empty json list
r = self.client.get(data_url)
self.assertValidJSONResponse(r)
self.assertEqual(unicontent(r), '[]')
# No match
r = self.client.get(data_url + '?activedrafts=on&name=thisisnotadocumentname')
self.assertValidJSONResponse(r)
d = r.json()
self.assertEqual(unicontent(r), '[]')
r = self.client.get(data_url + '?activedrafts=on&name=%s'%doc.name[6:12])
self.assertValidJSONResponse(r)
d = r.json()
self.assertEqual(len(d), 1)
self.assertEqual(len(d[0]), 2)
def test_search_chart(self):
doc = IndividualDraftFactory()
chart_url = urlreverse('ietf.doc.views_stats.chart_newrevisiondocevent')
r = self.client.get(chart_url)
self.assertEqual(r.status_code, 200)
r = self.client.get(chart_url + '?activedrafts=on&name=%s'%doc.name[6:12])
self.assertEqual(r.status_code, 200)
def test_personal_chart(self): def test_personal_chart(self):
person = PersonFactory.create() person = PersonFactory.create()
IndividualDraftFactory.create( IndividualDraftFactory.create(

View file

@ -92,10 +92,8 @@ class EditAuthorsTests(IetfSeleniumTestCase):
self.assertEqual(len(author_forms), 1) self.assertEqual(len(author_forms), 1)
# get the "add author" button so we can add blank author forms # get the "add author" button so we can add blank author forms
add_author_button = self.driver.find_element(By.ID, 'add-author-button')
for index, auth in enumerate(authors): for index, auth in enumerate(authors):
self.scroll_to_element(add_author_button) # Can only click if it's in view! self.scroll_and_click((By.ID, 'add-author-button')) # Create new form. Automatically scrolls to it.
add_author_button.click() # Create a new form. Automatically scrolls to it.
author_forms = authors_list.find_elements(By.CLASS_NAME, 'author-panel') author_forms = authors_list.find_elements(By.CLASS_NAME, 'author-panel')
authors_added = index + 1 authors_added = index + 1
self.assertEqual(len(author_forms), authors_added + 1) # Started with 1 author, hence +1 self.assertEqual(len(author_forms), authors_added + 1) # Started with 1 author, hence +1

View file

@ -68,10 +68,6 @@ urlpatterns = [
), ),
url(r'^investigate/?$', views_doc.investigate), url(r'^investigate/?$', views_doc.investigate),
url(r'^stats/newrevisiondocevent/?$', views_stats.chart_newrevisiondocevent),
url(r'^stats/newrevisiondocevent/conf/?$', views_stats.chart_conf_newrevisiondocevent),
url(r'^stats/newrevisiondocevent/data/?$', views_stats.chart_data_newrevisiondocevent),
url(r'^stats/person/(?P<id>[0-9]+)/drafts/conf/?$', views_stats.chart_conf_person_drafts), url(r'^stats/person/(?P<id>[0-9]+)/drafts/conf/?$', views_stats.chart_conf_person_drafts),
url(r'^stats/person/(?P<id>[0-9]+)/drafts/data/?$', views_stats.chart_data_person_drafts), url(r'^stats/person/(?P<id>[0-9]+)/drafts/data/?$', views_stats.chart_data_person_drafts),

View file

@ -3,9 +3,7 @@
import datetime import datetime
import hashlib
import io import io
import json
import math import math
import os import os
import re import re
@ -348,6 +346,7 @@ def augment_events_with_revision(doc, events):
"""Take a set of events for doc and add a .rev attribute with the """Take a set of events for doc and add a .rev attribute with the
revision they refer to by checking NewRevisionDocEvents.""" revision they refer to by checking NewRevisionDocEvents."""
# Need QuerySetAny instead of QuerySet until django-stubs 5.0.1
if isinstance(events, QuerySetAny): if isinstance(events, QuerySetAny):
qs = events.filter(newrevisiondocevent__isnull=False) qs = events.filter(newrevisiondocevent__isnull=False)
else: else:
@ -1047,14 +1046,6 @@ def make_rev_history(doc):
return sorted(history, key=lambda x: x['published']) return sorted(history, key=lambda x: x['published'])
def get_search_cache_key(params):
from ietf.doc.views_search import SearchForm
fields = set(SearchForm.base_fields) - set(['sort',])
kwargs = dict([ (k,v) for (k,v) in list(params.items()) if k in fields ])
key = "doc:document:search:" + hashlib.sha512(json.dumps(kwargs, sort_keys=True).encode('utf-8')).hexdigest()
return key
def build_file_urls(doc: Union[Document, DocHistory]): def build_file_urls(doc: Union[Document, DocHistory]):
if doc.type_id == "rfc": if doc.type_id == "rfc":
base_path = os.path.join(settings.RFC_PATH, doc.name + ".") base_path = os.path.join(settings.RFC_PATH, doc.name + ".")

View file

@ -1133,10 +1133,10 @@ def get_diff_revisions(request, name, doc):
diff_documents = [doc] diff_documents = [doc]
diff_documents.extend( diff_documents.extend(
Document.objects.filter( [
relateddocument__source=doc, r.target
relateddocument__relationship="replaces", for r in RelatedDocument.objects.filter(source=doc, relationship="replaces")
) ]
) )
if doc.came_from_draft(): if doc.came_from_draft():
diff_documents.append(doc.came_from_draft()) diff_documents.append(doc.came_from_draft())

View file

@ -37,6 +37,8 @@
import re import re
import datetime import datetime
import copy import copy
import hashlib
import json
import operator import operator
from collections import defaultdict from collections import defaultdict
@ -44,16 +46,17 @@ from functools import reduce
from django import forms from django import forms
from django.conf import settings from django.conf import settings
from django.contrib import messages
from django.core.cache import cache, caches from django.core.cache import cache, caches
from django.urls import reverse as urlreverse from django.urls import reverse as urlreverse
from django.db.models import Q from django.db.models import Q
from django.http import Http404, HttpResponseBadRequest, HttpResponse, HttpResponseRedirect, QueryDict from django.http import Http404, HttpResponseBadRequest, HttpResponse, HttpResponseRedirect
from django.shortcuts import render from django.shortcuts import render
from django.utils import timezone from django.utils import timezone
from django.utils.html import strip_tags from django.utils.html import strip_tags
from django.utils.cache import _generate_cache_key # type: ignore from django.utils.cache import _generate_cache_key # type: ignore
from django.utils.text import slugify from django.utils.text import slugify
from django_stubs_ext import QuerySetAny
import debug # pyflakes:ignore import debug # pyflakes:ignore
@ -62,7 +65,7 @@ from ietf.doc.models import ( Document, DocHistory, State,
IESG_BALLOT_ACTIVE_STATES, IESG_STATCHG_CONFLREV_ACTIVE_STATES, IESG_BALLOT_ACTIVE_STATES, IESG_STATCHG_CONFLREV_ACTIVE_STATES,
IESG_CHARTER_ACTIVE_STATES ) IESG_CHARTER_ACTIVE_STATES )
from ietf.doc.fields import select2_id_doc_name_json from ietf.doc.fields import select2_id_doc_name_json
from ietf.doc.utils import get_search_cache_key, augment_events_with_revision, needed_ballot_positions from ietf.doc.utils import augment_events_with_revision, needed_ballot_positions
from ietf.group.models import Group from ietf.group.models import Group
from ietf.idindex.index import active_drafts_index_by_group from ietf.idindex.index import active_drafts_index_by_group
from ietf.name.models import DocTagName, DocTypeName, StreamName from ietf.name.models import DocTagName, DocTypeName, StreamName
@ -145,6 +148,29 @@ class SearchForm(forms.Form):
q['irtfstate'] = None q['irtfstate'] = None
return q return q
def cache_key_fragment(self):
"""Hash a bound form to get a value for use in a cache key
Raises a ValueError if the form is not valid.
"""
def _serialize_value(val):
# Need QuerySetAny instead of QuerySet until django-stubs 5.0.1
if isinstance(val, QuerySetAny):
return [item.pk for item in val]
else:
return getattr(val, "pk", val) # use pk if present, else value
if not self.is_valid():
raise ValueError(f"SearchForm invalid: {self.errors}")
contents = {
field_name: _serialize_value(field_value)
for field_name, field_value in self.cleaned_data.items()
if field_name != "sort" and field_value is not None
}
contents_json = json.dumps(contents, sort_keys=True)
return hashlib.sha512(contents_json.encode("utf-8")).hexdigest()
def retrieve_search_results(form, all_types=False): def retrieve_search_results(form, all_types=False):
"""Takes a validated SearchForm and return the results.""" """Takes a validated SearchForm and return the results."""
@ -256,45 +282,64 @@ def retrieve_search_results(form, all_types=False):
return docs return docs
def search(request): def search(request):
if request.GET: """Search for a draft"""
# backwards compatibility # defaults for results / meta
get_params = request.GET.copy() results = []
if 'activeDrafts' in request.GET: meta = {"by": None, "searching": False}
get_params['activedrafts'] = request.GET['activeDrafts']
if 'oldDrafts' in request.GET:
get_params['olddrafts'] = request.GET['oldDrafts']
if 'subState' in request.GET:
get_params['substate'] = request.GET['subState']
form = SearchForm(get_params) if request.method == "POST":
if not form.is_valid(): form = SearchForm(data=request.POST)
return HttpResponseBadRequest("form not valid: %s" % form.errors) if form.is_valid():
cache_key = f"doc:document:search:{form.cache_key_fragment()}"
cache_key = get_search_cache_key(get_params) cached_val = cache.get(cache_key)
cached_val = cache.get(cache_key) if cached_val:
if cached_val: [results, meta] = cached_val
[results, meta] = cached_val else:
else: results = retrieve_search_results(form)
results = retrieve_search_results(form) results, meta = prepare_document_table(
results, meta = prepare_document_table(request, results, get_params) request, results, form.cleaned_data
cache.set(cache_key, [results, meta]) # for settings.CACHE_MIDDLEWARE_SECONDS )
log(f"Search results computed for {get_params}") cache.set(
meta['searching'] = True cache_key, [results, meta]
) # for settings.CACHE_MIDDLEWARE_SECONDS
log(f"Search results computed for {form.cleaned_data}")
meta["searching"] = True
else: else:
form = SearchForm() if request.GET:
results = [] # backwards compatibility - fill in the form
meta = { 'by': None, 'searching': False } get_params = request.GET.copy()
get_params = QueryDict('') if "activeDrafts" in request.GET:
get_params["activedrafts"] = request.GET["activeDrafts"]
if "oldDrafts" in request.GET:
get_params["olddrafts"] = request.GET["oldDrafts"]
if "subState" in request.GET:
get_params["substate"] = request.GET["subState"]
form = SearchForm(data=get_params)
messages.error(
request,
(
"Searching via the URL query string is no longer supported. "
"The form below has been filled in with the parameters from your request. "
'To execute your search, please click "Search".'
),
)
else:
form = SearchForm()
return render(request, 'doc/search/search.html', { return render(
'form':form, 'docs':results, 'meta':meta, 'queryargs':get_params.urlencode() }, request,
"doc/search/search.html",
context={"form": form, "docs": results, "meta": meta},
) )
def frontpage(request): def frontpage(request):
form = SearchForm() form = SearchForm()
return render(request, 'doc/frontpage.html', {'form':form}) return render(request, 'doc/frontpage.html', {'form':form})
def search_for_name(request, name): def search_for_name(request, name):
def find_unique(n): def find_unique(n):
exact = Document.objects.filter(name__iexact=n).first() exact = Document.objects.filter(name__iexact=n).first()

View file

@ -4,20 +4,15 @@ import copy
import datetime import datetime
from django.conf import settings from django.conf import settings
from django.core.cache import cache
from django.urls import reverse as urlreverse
from django.db.models.aggregates import Count from django.db.models.aggregates import Count
from django.db.models.functions import TruncDate from django.db.models.functions import TruncDate
from django.http import JsonResponse, HttpResponseBadRequest from django.http import JsonResponse
from django.shortcuts import render
from django.views.decorators.cache import cache_page from django.views.decorators.cache import cache_page
import debug # pyflakes:ignore import debug # pyflakes:ignore
from ietf.doc.models import DocEvent from ietf.doc.models import DocEvent
from ietf.doc.templatetags.ietf_filters import comma_separated_list from ietf.doc.templatetags.ietf_filters import comma_separated_list
from ietf.doc.utils import get_search_cache_key
from ietf.doc.views_search import SearchForm, retrieve_search_results
from ietf.name.models import DocTypeName from ietf.name.models import DocTypeName
from ietf.person.models import Person from ietf.person.models import Person
from ietf.utils.timezone import date_today from ietf.utils.timezone import date_today
@ -113,49 +108,6 @@ def make_title(queryargs):
title += ' with name matching "%s"' % name title += ' with name matching "%s"' % name
return title return title
def chart_newrevisiondocevent(request):
return render(request, "doc/stats/highstock.html", {
"title": "Document Statistics",
"confurl": urlreverse("ietf.doc.views_stats.chart_conf_newrevisiondocevent"),
"dataurl": urlreverse("ietf.doc.views_stats.chart_data_newrevisiondocevent"),
"queryargs": request.GET.urlencode(),
}
)
#@cache_page(60*15)
def chart_data_newrevisiondocevent(request):
queryargs = request.GET
if queryargs:
cache_key = get_search_cache_key(queryargs)
results = cache.get(cache_key)
if not results:
form = SearchForm(queryargs)
if not form.is_valid():
return HttpResponseBadRequest("form not valid: %s" % form.errors)
results = retrieve_search_results(form)
if results.exists():
cache.set(cache_key, results)
if results.exists():
data = model_to_timeline_data(DocEvent, doc__in=results, type='new_revision')
else:
data = []
else:
data = []
return JsonResponse(data, safe=False)
@cache_page(60*15)
def chart_conf_newrevisiondocevent(request):
queryargs = request.GET
if queryargs:
conf = copy.deepcopy(settings.CHART_TYPE_COLUMN_OPTIONS)
conf['title']['text'] = make_title(queryargs)
conf['series'][0]['name'] = "Submitted %s" % get_doctypes(queryargs, pluralize=True).lower(),
else:
conf = {}
return JsonResponse(conf)
@cache_page(60*15) @cache_page(60*15)
def chart_conf_person_drafts(request, id): def chart_conf_person_drafts(request, id):
person = Person.objects.filter(id=id).first() person = Person.objects.filter(id=id).first()

View file

@ -65,6 +65,53 @@ class StreamTests(TestCase):
self.assertTrue(Role.objects.filter(name="delegate", group__acronym=stream_acronym, email__address="ad2@ietf.org")) self.assertTrue(Role.objects.filter(name="delegate", group__acronym=stream_acronym, email__address="ad2@ietf.org"))
class GroupLeadershipTests(TestCase):
def test_leadership_wg(self):
# setup various group states
bof_role = RoleFactory(
group__type_id="wg", group__state_id="bof", name_id="chair"
)
proposed_role = RoleFactory(
group__type_id="wg", group__state_id="proposed", name_id="chair"
)
active_role = RoleFactory(
group__type_id="wg", group__state_id="active", name_id="chair"
)
conclude_role = RoleFactory(
group__type_id="wg", group__state_id="conclude", name_id="chair"
)
url = urlreverse(
"ietf.group.views.group_leadership", kwargs={"group_type": "wg"}
)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertContains(r, "Group Leadership")
self.assertContains(r, bof_role.person.last_name())
self.assertContains(r, proposed_role.person.last_name())
self.assertContains(r, active_role.person.last_name())
self.assertNotContains(r, conclude_role.person.last_name())
def test_leadership_wg_csv(self):
url = urlreverse(
"ietf.group.views.group_leadership_csv", kwargs={"group_type": "wg"}
)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(r["Content-Type"], "text/csv")
self.assertContains(r, "Chairman, Sops")
def test_leadership_rg(self):
role = RoleFactory(group__type_id="rg", name_id="chair")
url = urlreverse(
"ietf.group.views.group_leadership", kwargs={"group_type": "rg"}
)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertContains(r, "Group Leadership")
self.assertContains(r, role.person.last_name())
self.assertNotContains(r, "Chairman, Sops")
class GroupStatsTests(TestCase): class GroupStatsTests(TestCase):
def setUp(self): def setUp(self):
super().setUp() super().setUp()

View file

@ -1431,7 +1431,7 @@ class MilestoneTests(TestCase):
RoleFactory(group=group,name_id='chair',person=PersonFactory(user__username='marschairman')) RoleFactory(group=group,name_id='chair',person=PersonFactory(user__username='marschairman'))
draft = WgDraftFactory(group=group) draft = WgDraftFactory(group=group)
m1 = GroupMilestone.objects.create(id=1, m1 = GroupMilestone.objects.create(
group=group, group=group,
desc="Test 1", desc="Test 1",
due=date_today(DEADLINE_TZINFO), due=date_today(DEADLINE_TZINFO),
@ -1439,7 +1439,7 @@ class MilestoneTests(TestCase):
state_id="active") state_id="active")
m1.docs.set([draft]) m1.docs.set([draft])
m2 = GroupMilestone.objects.create(id=2, m2 = GroupMilestone.objects.create(
group=group, group=group,
desc="Test 2", desc="Test 2",
due=date_today(DEADLINE_TZINFO), due=date_today(DEADLINE_TZINFO),
@ -1580,13 +1580,14 @@ class MilestoneTests(TestCase):
events_before = group.groupevent_set.count() events_before = group.groupevent_set.count()
# add # add
r = self.client.post(url, { 'prefix': "m1", mstr = f"m{m1.id}"
'm1-id': m1.id, r = self.client.post(url, { 'prefix': mstr,
'm1-desc': m1.desc, f'{mstr}-id': m1.id,
'm1-due': m1.due.strftime("%B %Y"), f'{mstr}-desc': m1.desc,
'm1-resolved': m1.resolved, f'{mstr}-due': m1.due.strftime("%B %Y"),
'm1-docs': pklist(m1.docs), f'{mstr}-resolved': m1.resolved,
'm1-review': "accept", f'{mstr}-docs': pklist(m1.docs),
f'{mstr}-review': "accept",
'action': "save", 'action': "save",
}) })
self.assertEqual(r.status_code, 302) self.assertEqual(r.status_code, 302)
@ -1606,13 +1607,14 @@ class MilestoneTests(TestCase):
events_before = group.groupevent_set.count() events_before = group.groupevent_set.count()
# delete # delete
r = self.client.post(url, { 'prefix': "m1", mstr = f"m{m1.id}"
'm1-id': m1.id, r = self.client.post(url, { 'prefix': mstr,
'm1-desc': m1.desc, f'{mstr}-id': m1.id,
'm1-due': m1.due.strftime("%B %Y"), f'{mstr}-desc': m1.desc,
'm1-resolved': "", f'{mstr}-due': m1.due.strftime("%B %Y"),
'm1-docs': pklist(m1.docs), f'{mstr}-resolved': "",
'm1-delete': "checked", f'{mstr}-docs': pklist(m1.docs),
f'{mstr}-delete': "checked",
'action': "save", 'action': "save",
}) })
self.assertEqual(r.status_code, 302) self.assertEqual(r.status_code, 302)
@ -1635,13 +1637,14 @@ class MilestoneTests(TestCase):
due = self.last_day_of_month(date_today(DEADLINE_TZINFO) + datetime.timedelta(days=365)) due = self.last_day_of_month(date_today(DEADLINE_TZINFO) + datetime.timedelta(days=365))
mstr = f"m{m1.id}"
# faulty post # faulty post
r = self.client.post(url, { 'prefix': "m1", r = self.client.post(url, { 'prefix': mstr,
'm1-id': m1.id, f'{mstr}-id': m1.id,
'm1-desc': "", # no description f'{mstr}-desc': "", # no description
'm1-due': due.strftime("%B %Y"), f'{mstr}-due': due.strftime("%B %Y"),
'm1-resolved': "", f'{mstr}-resolved': "",
'm1-docs': doc_pks, f'{mstr}-docs': doc_pks,
'action': "save", 'action': "save",
}) })
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
@ -1653,13 +1656,13 @@ class MilestoneTests(TestCase):
# edit # edit
mailbox_before = len(outbox) mailbox_before = len(outbox)
r = self.client.post(url, { 'prefix': "m1", r = self.client.post(url, { 'prefix': mstr,
'm1-id': m1.id, f'{mstr}-id': m1.id,
'm1-desc': "Test 2 - changed", f'{mstr}-desc': "Test 2 - changed",
'm1-due': due.strftime("%B %Y"), f'{mstr}-due': due.strftime("%B %Y"),
'm1-resolved': "Done", f'{mstr}-resolved': "Done",
'm1-resolved_checkbox': "checked", f'{mstr}-resolved_checkbox': "checked",
'm1-docs': doc_pks, f'{mstr}-docs': doc_pks,
'action': "save", 'action': "save",
}) })
self.assertEqual(r.status_code, 302) self.assertEqual(r.status_code, 302)

View file

@ -58,6 +58,8 @@ info_detail_urls = [
group_urls = [ group_urls = [
url(r'^$', views.active_groups), url(r'^$', views.active_groups),
url(r'^leadership/(?P<group_type>(wg|rg))/$', views.group_leadership),
url(r'^leadership/(?P<group_type>(wg|rg))/csv/$', views.group_leadership_csv),
url(r'^groupstats.json', views.group_stats_data, None, 'ietf.group.views.group_stats_data'), url(r'^groupstats.json', views.group_stats_data, None, 'ietf.group.views.group_stats_data'),
url(r'^groupmenu.json', views.group_menu_data, None, 'ietf.group.views.group_menu_data'), url(r'^groupmenu.json', views.group_menu_data, None, 'ietf.group.views.group_menu_data'),
url(r'^chartering/$', views.chartering_groups), url(r'^chartering/$', views.chartering_groups),

View file

@ -35,6 +35,7 @@
import copy import copy
import csv
import datetime import datetime
import itertools import itertools
import math import math
@ -437,6 +438,48 @@ def prepare_group_documents(request, group, clist):
return docs, meta, docs_related, meta_related return docs, meta, docs_related, meta_related
def get_leadership(group_type):
people = Person.objects.filter(
role__name__slug="chair",
role__group__type=group_type,
role__group__state__slug__in=("active", "bof", "proposed"),
).distinct()
leaders = []
for person in people:
parts = person.name_parts()
groups = [
r.group.acronym
for r in person.role_set.filter(
name__slug="chair",
group__type=group_type,
group__state__slug__in=("active", "bof", "proposed"),
)
]
entry = {"name": "%s, %s" % (parts[3], parts[1]), "groups": ", ".join(groups)}
leaders.append(entry)
return sorted(leaders, key=lambda a: a["name"])
def group_leadership(request, group_type=None):
context = {}
context["leaders"] = get_leadership(group_type)
context["group_type"] = group_type
return render(request, "group/group_leadership.html", context)
def group_leadership_csv(request, group_type=None):
leaders = get_leadership(group_type)
response = HttpResponse(content_type="text/csv")
response["Content-Disposition"] = (
f'attachment; filename="group_leadership_{group_type}.csv"'
)
writer = csv.writer(response, dialect=csv.excel, delimiter=str(","))
writer.writerow(["Name", "Groups"])
for leader in leaders:
writer.writerow([leader["name"], leader["groups"]])
return response
def group_home(request, acronym, group_type=None): def group_home(request, acronym, group_type=None):
group = get_group_or_404(acronym, group_type) group = get_group_or_404(acronym, group_type)
kwargs = dict(acronym=group.acronym) kwargs = dict(acronym=group.acronym)

View file

@ -203,6 +203,7 @@ class SearchLiaisonForm(forms.Form):
class CustomModelMultipleChoiceField(ModelMultipleChoiceField): class CustomModelMultipleChoiceField(ModelMultipleChoiceField):
'''If value is a QuerySet, return it as is (for use in widget.render)''' '''If value is a QuerySet, return it as is (for use in widget.render)'''
def prepare_value(self, value): def prepare_value(self, value):
# Need QuerySetAny instead of QuerySet until django-stubs 5.0.1
if isinstance(value, QuerySetAny): if isinstance(value, QuerySetAny):
return value return value
if (hasattr(value, '__iter__') and if (hasattr(value, '__iter__') and

View file

@ -35,6 +35,7 @@ class ShowAttachmentsWidget(Widget):
html = '<div id="id_%s">' % name html = '<div id="id_%s">' % name
html += '<span class="d-none showAttachmentsEmpty form-control widget">No files attached</span>' html += '<span class="d-none showAttachmentsEmpty form-control widget">No files attached</span>'
html += '<div class="attachedFiles form-control widget">' html += '<div class="attachedFiles form-control widget">'
# Need QuerySetAny instead of QuerySet until django-stubs 5.0.1
if value and isinstance(value, QuerySetAny): if value and isinstance(value, QuerySetAny):
for attachment in value: for attachment in value:
html += '<a class="initialAttach" href="%s">%s</a>&nbsp' % (conditional_escape(attachment.document.get_href()), conditional_escape(attachment.document.title)) html += '<a class="initialAttach" href="%s">%s</a>&nbsp' % (conditional_escape(attachment.document.get_href()), conditional_escape(attachment.document.title))

View file

@ -249,7 +249,9 @@ class EditMeetingScheduleTests(IetfSeleniumTestCase):
self.assertTrue(s1_element.is_displayed()) # should still be displayed self.assertTrue(s1_element.is_displayed()) # should still be displayed
self.assertIn('hidden-parent', s1_element.get_attribute('class'), self.assertIn('hidden-parent', s1_element.get_attribute('class'),
'Session should be hidden when parent disabled') 'Session should be hidden when parent disabled')
s1_element.click() # try to select
self.scroll_and_click((By.CSS_SELECTOR, '#session{}'.format(s1.pk)))
self.assertNotIn('selected', s1_element.get_attribute('class'), self.assertNotIn('selected', s1_element.get_attribute('class'),
'Session should not be selectable when parent disabled') 'Session should not be selectable when parent disabled')
@ -299,9 +301,9 @@ class EditMeetingScheduleTests(IetfSeleniumTestCase):
'Session s1 should have moved to second meeting day') 'Session s1 should have moved to second meeting day')
# swap timeslot column - put session in a differently-timed timeslot # swap timeslot column - put session in a differently-timed timeslot
self.driver.find_element(By.CSS_SELECTOR, self.scroll_and_click((By.CSS_SELECTOR,
'.day .swap-timeslot-col[data-timeslot-pk="{}"]'.format(slot1b.pk) '.day .swap-timeslot-col[data-timeslot-pk="{}"]'.format(slot1b.pk)
).click() # open modal on the second timeslot for room1 )) # open modal on the second timeslot for room1
self.assertTrue(self.driver.find_element(By.CSS_SELECTOR, "#swap-timeslot-col-modal").is_displayed()) self.assertTrue(self.driver.find_element(By.CSS_SELECTOR, "#swap-timeslot-col-modal").is_displayed())
self.driver.find_element(By.CSS_SELECTOR, self.driver.find_element(By.CSS_SELECTOR,
'#swap-timeslot-col-modal input[name="target_timeslot"][value="{}"]'.format(slot4.pk) '#swap-timeslot-col-modal input[name="target_timeslot"][value="{}"]'.format(slot4.pk)
@ -1373,13 +1375,8 @@ class InterimTests(IetfSeleniumTestCase):
self.assertFalse(modal_div.is_displayed()) self.assertFalse(modal_div.is_displayed())
# Click the 'materials' button # Click the 'materials' button
open_modal_button = self.wait.until( open_modal_button_locator = (By.CSS_SELECTOR, '[data-bs-target="#modal-%s"]' % slug)
expected_conditions.element_to_be_clickable( self.scroll_and_click(open_modal_button_locator)
(By.CSS_SELECTOR, '[data-bs-target="#modal-%s"]' % slug)
),
'Modal open button not found or not clickable',
)
open_modal_button.click()
self.wait.until( self.wait.until(
expected_conditions.visibility_of(modal_div), expected_conditions.visibility_of(modal_div),
'Modal did not become visible after clicking open button', 'Modal did not become visible after clicking open button',

View file

@ -407,6 +407,40 @@ class MeetingTests(BaseMeetingTestCase):
r = self.client.get(urlreverse('floor-plan', kwargs=dict(num=meeting.number))) r = self.client.get(urlreverse('floor-plan', kwargs=dict(num=meeting.number)))
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
def test_session_recordings_via_factories(self):
session = SessionFactory(meeting__type_id="ietf", meeting__date=date_today()-datetime.timedelta(days=180))
self.assertEqual(session.meetecho_recording_name, "")
self.assertEqual(len(session.recordings()), 0)
url = urlreverse("ietf.meeting.views.session_details", kwargs=dict(num=session.meeting.number, acronym=session.group.acronym))
r = self.client.get(url)
q = PyQuery(r.content)
# debug.show("q(f'#notes_and_recordings_{session.pk}')")
self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 1)
link = q(f"#notes_and_recordings_{session.pk} tr a")
self.assertEqual(len(link), 1)
self.assertEqual(link[0].attrib['href'], str(session.session_recording_url()))
session.meetecho_recording_name = 'my_test_session_name'
session.save()
r = self.client.get(url)
q = PyQuery(r.content)
self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 1)
links = q(f"#notes_and_recordings_{session.pk} tr a")
self.assertEqual(len(links), 1)
self.assertEqual(links[0].attrib['href'], session.session_recording_url())
new_recording_url = "https://www.youtube.com/watch?v=jNQXAC9IVRw"
new_recording_title = "Me at the zoo"
create_recording(session, new_recording_url, new_recording_title)
r = self.client.get(url)
q = PyQuery(r.content)
self.assertEqual(len(q(f"#notes_and_recordings_{session.pk} tr")), 2)
links = q(f"#notes_and_recordings_{session.pk} tr a")
self.assertEqual(len(links), 2)
self.assertEqual(links[0].attrib['href'], new_recording_url)
self.assertIn(new_recording_title, links[0].text_content())
#debug.show("q(f'#notes_and_recordings_{session_pk}')")
def test_agenda_ical_next_meeting_type(self): def test_agenda_ical_next_meeting_type(self):
# start with no upcoming IETF meetings, just an interim # start with no upcoming IETF meetings, just an interim
MeetingFactory( MeetingFactory(

View file

@ -1786,6 +1786,7 @@ def agenda_extract_schedule (item):
"type": item.session.type.slug, "type": item.session.type.slug,
"purpose": item.session.purpose.slug, "purpose": item.session.purpose.slug,
"isBoF": item.session.group_at_the_time().state_id == "bof", "isBoF": item.session.group_at_the_time().state_id == "bof",
"isProposed": item.session.group_at_the_time().state_id == "proposed",
"filterKeywords": item.filter_keywords, "filterKeywords": item.filter_keywords,
"groupAcronym": item.session.group_at_the_time().acronym, "groupAcronym": item.session.group_at_the_time().acronym,
"groupName": item.session.group_at_the_time().name, "groupName": item.session.group_at_the_time().name,
@ -4104,6 +4105,7 @@ def organize_proceedings_sessions(sessions):
'minutes': _format_materials((s, s.minutes()) for s in ss), 'minutes': _format_materials((s, s.minutes()) for s in ss),
'bluesheets': _format_materials((s, s.bluesheets()) for s in ss), 'bluesheets': _format_materials((s, s.bluesheets()) for s in ss),
'recordings': _format_materials((s, s.recordings()) for s in ss), 'recordings': _format_materials((s, s.recordings()) for s in ss),
'meetecho_recordings': _format_materials((s, [s.session_recording_url()]) for s in ss),
'chatlogs': _format_materials((s, s.chatlogs()) for s in ss), 'chatlogs': _format_materials((s, s.chatlogs()) for s in ss),
'slides': _format_materials((s, s.slides()) for s in ss), 'slides': _format_materials((s, s.slides()) for s in ss),
'drafts': _format_materials((s, s.drafts()) for s in ss), 'drafts': _format_materials((s, s.drafts()) for s in ss),

View file

@ -598,6 +598,7 @@ TEST_CODE_COVERAGE_EXCLUDE_FILES = [
"ietf/review/import_from_review_tool.py", "ietf/review/import_from_review_tool.py",
"ietf/utils/patch.py", "ietf/utils/patch.py",
"ietf/utils/test_data.py", "ietf/utils/test_data.py",
"ietf/utils/jstest.py",
] ]
# These are code line regex patterns # These are code line regex patterns
@ -743,8 +744,6 @@ IANA_SYNC_PASSWORD = "secret"
IANA_SYNC_CHANGES_URL = "https://datatracker.iana.org:4443/data-tracker/changes" IANA_SYNC_CHANGES_URL = "https://datatracker.iana.org:4443/data-tracker/changes"
IANA_SYNC_PROTOCOLS_URL = "https://www.iana.org/protocols/" IANA_SYNC_PROTOCOLS_URL = "https://www.iana.org/protocols/"
RFC_TEXT_RSYNC_SOURCE="ftp.rfc-editor.org::rfcs-text-only"
RFC_EDITOR_SYNC_PASSWORD="secret" RFC_EDITOR_SYNC_PASSWORD="secret"
RFC_EDITOR_SYNC_NOTIFICATION_URL = "https://www.rfc-editor.org/parser/parser.php" RFC_EDITOR_SYNC_NOTIFICATION_URL = "https://www.rfc-editor.org/parser/parser.php"
RFC_EDITOR_GROUP_NOTIFICATION_EMAIL = "webmaster@rfc-editor.org" RFC_EDITOR_GROUP_NOTIFICATION_EMAIL = "webmaster@rfc-editor.org"
@ -971,7 +970,6 @@ OIDC_EXTRA_SCOPE_CLAIMS = 'ietf.ietfauth.utils.OidcExtraScopeClaims'
# ============================================================================== # ==============================================================================
RSYNC_BINARY = '/usr/bin/rsync'
YANGLINT_BINARY = '/usr/bin/yanglint' YANGLINT_BINARY = '/usr/bin/yanglint'
DE_GFM_BINARY = '/usr/bin/de-gfm.ruby2.5' DE_GFM_BINARY = '/usr/bin/de-gfm.ruby2.5'
@ -1013,7 +1011,6 @@ CHAT_URL_PATTERN = 'https://zulip.ietf.org/#narrow/stream/{chat_room_name}'
# CHAT_ARCHIVE_URL_PATTERN = 'https://www.ietf.org/jabber/logs/{chat_room_name}?C=M;O=D' # CHAT_ARCHIVE_URL_PATTERN = 'https://www.ietf.org/jabber/logs/{chat_room_name}?C=M;O=D'
PYFLAKES_DEFAULT_ARGS= ["ietf", ] PYFLAKES_DEFAULT_ARGS= ["ietf", ]
VULTURE_DEFAULT_ARGS= ["ietf", ]
# Automatic Scheduling # Automatic Scheduling
# #
@ -1061,8 +1058,6 @@ GROUP_ALIAS_DOMAIN = IETF_DOMAIN
TEST_DATA_DIR = os.path.abspath(BASE_DIR + "/../test/data") TEST_DATA_DIR = os.path.abspath(BASE_DIR + "/../test/data")
POSTCONFIRM_PATH = "/a/postconfirm/wrapper"
USER_PREFERENCE_DEFAULTS = { USER_PREFERENCE_DEFAULTS = {
"expires_soon" : "14", "expires_soon" : "14",
"new_enough" : "14", "new_enough" : "14",
@ -1077,6 +1072,7 @@ EXCLUDED_PERSONAL_EMAIL_REGEX_PATTERNS = [
"@ietf.org$", "@ietf.org$",
] ]
# Configuration for django-markup
MARKUP_SETTINGS = { MARKUP_SETTINGS = {
'restructuredtext': { 'restructuredtext': {
'settings_overrides': { 'settings_overrides': {
@ -1090,8 +1086,6 @@ MARKUP_SETTINGS = {
} }
} }
MAILMAN_LIB_DIR = '/usr/lib/mailman'
# This is the number of seconds required between subscribing to an ietf # This is the number of seconds required between subscribing to an ietf
# mailing list and datatracker account creation being accepted # mailing list and datatracker account creation being accepted
LIST_ACCOUNT_DELAY = 60*60*25 # 25 hours LIST_ACCOUNT_DELAY = 60*60*25 # 25 hours

View file

@ -8,7 +8,6 @@ from django.db.models import ForeignKey
import debug # pyflakes:ignore import debug # pyflakes:ignore
class Status(models.Model): class Status(models.Model):
name = 'Status'
date = models.DateTimeField(default=timezone.now) date = models.DateTimeField(default=timezone.now)
slug = models.SlugField(blank=False, null=False, unique=True) slug = models.SlugField(blank=False, null=False, unique=True)

View file

@ -2334,8 +2334,8 @@ class ApprovalsTestCase(BaseSubmitTestCase):
self.assertEqual(len(Preapproval.objects.filter(name=preapproval.name)), 0) self.assertEqual(len(Preapproval.objects.filter(name=preapproval.name)), 0)
# Transaction.on_commit() requires use of TransactionTestCase, but that has a performance penalty. Replace it # Transaction.on_commit() interacts badly with TestCase's transaction behavior. Replace it
# with a no-op for testing purposes. # with a pass-through for testing purposes.
@mock.patch.object(transaction, 'on_commit', lambda x: x()) @mock.patch.object(transaction, 'on_commit', lambda x: x())
@override_settings(IDTRACKER_BASE_URL='https://datatracker.example.com') @override_settings(IDTRACKER_BASE_URL='https://datatracker.example.com')
class ApiSubmissionTests(BaseSubmitTestCase): class ApiSubmissionTests(BaseSubmitTestCase):

View file

@ -90,7 +90,8 @@ def upload_submission(request):
clear_existing_files(form) clear_existing_files(form)
save_files(form) save_files(form)
create_submission_event(request, submission, desc="Uploaded submission") create_submission_event(request, submission, desc="Uploaded submission")
# Wrap in on_commit so the delayed task cannot start until the view is done with the DB # Wrap in on_commit in case a transaction is open
# (As of 2024-11-08, this only runs in a transaction during tests)
transaction.on_commit( transaction.on_commit(
lambda: process_uploaded_submission_task.delay(submission.pk) lambda: process_uploaded_submission_task.delay(submission.pk)
) )
@ -166,7 +167,8 @@ def api_submission(request):
save_files(form) save_files(form)
create_submission_event(request, submission, desc="Uploaded submission through API") create_submission_event(request, submission, desc="Uploaded submission through API")
# Wrap in on_commit so the delayed task cannot start until the view is done with the DB # Wrap in on_commit in case a transaction is open
# (As of 2024-11-08, this only runs in a transaction during tests)
transaction.on_commit( transaction.on_commit(
lambda: process_and_accept_uploaded_submission_task.delay(submission.pk) lambda: process_and_accept_uploaded_submission_task.delay(submission.pk)
) )

View file

@ -2,12 +2,12 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import datetime import datetime
import os
import json import json
from django.conf import settings from django.conf import settings
from django.contrib.auth.models import User from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes.models import ContentType
from django.db import transaction
from django.http import HttpResponse, HttpResponseRedirect, Http404 from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render from django.shortcuts import render
from django.utils import timezone from django.utils import timezone
@ -22,8 +22,6 @@ from ietf.utils.log import log
from ietf.utils.response import permission_denied from ietf.utils.response import permission_denied
SYNC_BIN_PATH = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "../bin"))
#@role_required('Secretariat', 'IANA', 'RFC Editor') #@role_required('Secretariat', 'IANA', 'RFC Editor')
def discrepancies(request): def discrepancies(request):
sections = find_discrepancies() sections = find_discrepancies()
@ -79,16 +77,32 @@ def notify(request, org, notification):
if request.method == "POST": if request.method == "POST":
if notification == "index": if notification == "index":
log("Queuing RFC Editor index sync from notify view POST") log("Queuing RFC Editor index sync from notify view POST")
tasks.rfc_editor_index_update_task.delay() # Wrap in on_commit in case a transaction is open
# (As of 2024-11-08, this only runs in a transaction during tests)
transaction.on_commit(
lambda: tasks.rfc_editor_index_update_task.delay()
)
elif notification == "queue": elif notification == "queue":
log("Queuing RFC Editor queue sync from notify view POST") log("Queuing RFC Editor queue sync from notify view POST")
tasks.rfc_editor_queue_updates_task.delay() # Wrap in on_commit in case a transaction is open
# (As of 2024-11-08, this only runs in a transaction during tests)
transaction.on_commit(
lambda: tasks.rfc_editor_queue_updates_task.delay()
)
elif notification == "changes": elif notification == "changes":
log("Queuing IANA changes sync from notify view POST") log("Queuing IANA changes sync from notify view POST")
tasks.iana_changes_update_task.delay() # Wrap in on_commit in case a transaction is open
# (As of 2024-11-08, this only runs in a transaction during tests)
transaction.on_commit(
lambda: tasks.iana_changes_update_task.delay()
)
elif notification == "protocols": elif notification == "protocols":
log("Queuing IANA protocols sync from notify view POST") log("Queuing IANA protocols sync from notify view POST")
tasks.iana_protocols_update_task.delay() # Wrap in on_commit in case a transaction is open
# (As of 2024-11-08, this only runs in a transaction during tests)
transaction.on_commit(
lambda: tasks.iana_protocols_update_task.delay()
)
return HttpResponse("OK", content_type="text/plain; charset=%s"%settings.DEFAULT_CHARSET) return HttpResponse("OK", content_type="text/plain; charset=%s"%settings.DEFAULT_CHARSET)

View file

@ -4,8 +4,10 @@
{% load widget_tweaks %} {% load widget_tweaks %}
{% load ietf_filters %} {% load ietf_filters %}
<form id="search_form" <form id="search_form"
method="post"
class="form-horizontal" class="form-horizontal"
action="{% url 'ietf.doc.views_search.search' %}"> action="{% url 'ietf.doc.views_search.search' %}">
{% csrf_token %}
<!-- [html-validate-disable-block input-missing-label -- labelled via aria-label] --> <!-- [html-validate-disable-block input-missing-label -- labelled via aria-label] -->
<div class="input-group search_field"> <div class="input-group search_field">
{{ form.name|add_class:"form-control"|attr:"placeholder:Document name/title/RFC number"|attr:"aria-label:Document name/title/RFC number" }} {{ form.name|add_class:"form-control"|attr:"placeholder:Document name/title/RFC number"|attr:"aria-label:Document name/title/RFC number" }}

View file

@ -1,29 +0,0 @@
{% extends "base.html" %}
{# Copyright The IETF Trust 2015, All Rights Reserved #}
{% load origin %}
{% load static %}
{% load ietf_filters %}
{% block pagehead %}
<link rel="stylesheet" href="{% static "ietf/css/highcharts.css" %}">
{% endblock %}
{% block js %}
<script src="{% static 'ietf/js/highstock.js' %}"></script>
<script>
$(function () {
var chart;
$.getJSON('{{ confurl }}?{{ queryargs }}', function (conf) {
chart = Highcharts.stockChart('chart', conf);
chart.showLoading();
$.getJSON('{{ dataurl }}?{{ queryargs }}', function (data) {
chart.series[0].setData(data);
chart.hideLoading();
});
});
});
</script>
{% endblock %}
{% block title %}Document Statistics{% endblock %}
{% block content %}
{% origin %}
<div id="chart"></div>
{% endblock %}

View file

@ -0,0 +1,34 @@
{% extends "base.html" %}
{# Copyright The IETF Trust 2024, All Rights Reserved #}
{% load origin static person_filters ietf_filters %}
{% block pagehead %}
<link rel="stylesheet" href="{% static 'ietf/css/list.css' %}">
{% endblock %}
{% block title %}Group Leadership{% endblock %}
{% block content %}
{% origin %}
<h1>Group Leadership ({{ group_type }})</h1>
{% if user|has_role:"Secretariat" %}
<div class="text-end">
<a class="btn btn-primary" href="{% url 'ietf.group.views.group_leadership_csv' group_type=group_type %}">
<i class="bi bi-file-ruled"></i> Export as CSV
</a>
</div>
{% endif %}
<table class="table table-sm table-striped">
<thead>
<tr>
<th scope="col">Leader</th>
<th scope="col">Groups</th>
</tr>
</thead>
<tbody>
{% for leader in leaders %}
<tr>
<td>{{ leader.name }}</td>
<td>{{ leader.groups }}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endblock %}

View file

@ -40,7 +40,7 @@
{% if future %} {% if future %}
<h2 class="mt-5" id="futuremeets"> <h2 class="mt-5" id="futuremeets">
Future Meetings Future Meetings
<a class="float-end" <a class="ms-2"
aria-label="icalendar entry for all scheduled future {{ group.acronym }} meetings" aria-label="icalendar entry for all scheduled future {{ group.acronym }} meetings"
title="icalendar entry for all scheduled future {{ group.acronym }} meetings" title="icalendar entry for all scheduled future {{ group.acronym }} meetings"
href="{% url 'ietf.meeting.views.upcoming_ical' %}?show={{ group.acronym }}"> href="{% url 'ietf.meeting.views.upcoming_ical' %}?show={{ group.acronym }}">

View file

@ -88,12 +88,13 @@
</a> </a>
<br> <br>
{% endfor %} {% endfor %}
{% if entry.session.video_stream_url %} {% for rec in entry.meetecho_recordings %}
<a href="{{ entry.session.session_recording_url }}"> <a href="{{ rec.material }}">
Session recording Session recording
</a> {% if rec.time %}{{ rec.time|date:"D G:i"}}{% endif %}
<br> </a>
{% endif %} <br>
{% endfor%}
</td> </td>
{# slides #} {# slides #}
<td> <td>

View file

@ -19,7 +19,7 @@
</a> </a>
{% for meeting in meetings %} {% for meeting in meetings %}
{% if meeting.show_important_dates %} {% if meeting.show_important_dates %}
<h2 class="mt-5"> <h2 class="mt-5" id="IETF{{ meeting.number }}">
IETF {{ meeting.number }} IETF {{ meeting.number }}
<br> <br>
<small class="text-body-secondary">{{ meeting.date }}, {{ meeting.city }}, {{ meeting.country }}</small> <small class="text-body-secondary">{{ meeting.date }}, {{ meeting.city }}, {{ meeting.country }}</small>

View file

@ -320,51 +320,50 @@
</tr> </tr>
{% endif %} {% endif %}
{# Recordings #} {# Recordings #}
{% if session.has_recordings %} {% with session.recordings as recordings %}
{% with session.recordings as recordings %} {% if recordings %}
{% if recordings %} {# There's no guaranteed order, so this is a bit messy: #}
{# There's no guaranteed order, so this is a bit messy: #} {# First, the audio recordings, if any #}
{# First, the audio recordings, if any #} {% for r in recordings %}
{% for r in recordings %} {% if r.get_href and 'audio' in r.get_href %}
{% if r.get_href and 'audio' in r.get_href %} <tr>
<tr> <td>
<td> <a href="{{ r.get_href }}"><i class="bi bi-file-play"></i> {{ r.title }}</a>
<a href="{{ r.get_href }}"><i class="bi bi-file-play"></i> {{ r.title }}</a> </td>
</td> </tr>
</tr> {% endif %}
{% endif %} {% endfor %}
{% endfor %} {# Then the youtube recordings #}
{# Then the youtube recordings #} {% for r in recordings %}
{% for r in recordings %} {% if r.get_href and 'youtu' in r.get_href %}
{% if r.get_href and 'youtu' in r.get_href %} <tr>
<tr> <td>
<td> <a href="{{ r.get_href }}"><i class="bi bi-file-slides"></i> {{ r.title }}</a>
<a href="{{ r.get_href }}"><i class="bi bi-file-slides"></i> {{ r.title }}</a> </td>
</td> </tr>
</tr> {% endif %}
{% endif %} {% endfor %}
{% endfor %} {# Finally, any other recordings #}
{# Finally, any other recordings #} {% for r in recordings %}
{% for r in recordings %} {% if r.get_href and not 'audio' in r.get_href and not 'youtu' in r.get_href %}
{% if r.get_href and not 'audio' in r.get_href and not 'youtu' in r.get_href %} <tr>
<tr> <td>
<td> <a href="{{ r.get_href }}"><i class="bi bi-file-play"></i> {{ r.title }}</a>
<a href="{{ r.get_href }}"><i class="bi bi-file-play"></i> {{ r.title }}</a> </td>
</td> </tr>
</tr> {% endif %}
{% endif %} {% endfor %}
{% endfor %}
{% endif %}
{% endwith %}
{% if session.video_stream_url %}
<tr>
<td>
<a href="{{ session.session_recording_url }}">
<i class="bi bi-file-slides"></i> Session recording
</a>
</td>
</tr>
{% endif %} {% endif %}
{% endwith %}
{% if session.session_recording_url %}
<tr>
<td>
<a href="{{ session.session_recording_url }}">
<i class="bi bi-file-slides"></i>
Meetecho session recording
</a>
</td>
</tr>
{% endif %} {% endif %}
</tbody> </tbody>
</table> </table>

View file

@ -1,82 +0,0 @@
#!/usr/bin/env python
# Copyright The IETF Trust 2013-2020, All Rights Reserved
# -*- coding: utf-8 -*-
# -*- Python -*-
#
# $Id: aliasutil.py $
#
# Author: Markus Stenberg <mstenber@cisco.com>
#
"""
Mailing list alias dumping utilities
"""
from django.conf import settings
from ietf.utils.log import log
import debug # pyflakes:ignore
def rewrite_email_address(email):
""" Prettify the email address (and if it's empty, skip it by
returning None). """
if not email:
return
email = email.strip()
if not email:
return
if email[0]=='<' and email[-1] == '>':
email = email[1:-1]
# If it doesn't look like email, skip
if '@' not in email and '?' not in email:
return
return email
def rewrite_address_list(l):
""" This utility function makes sure there is exactly one instance
of an address within the result list, and preserves order
(although it may not be relevant to start with) """
h = {}
for address in l:
#address = address.strip()
if address in h: continue
h[address] = True
yield address
def dump_sublist(afile, vfile, alias, adomains, vdomain, emails):
if not emails:
return emails
# Nones in the list should be skipped
emails = [_f for _f in emails if _f]
# Make sure emails are sane and eliminate the Nones again for
# non-sane ones
emails = [rewrite_email_address(e) for e in emails]
emails = [_f for _f in emails if _f]
# And we'll eliminate the duplicates too but preserve order
emails = list(rewrite_address_list(emails))
if not emails:
return emails
try:
filtername = 'xfilter-%s' % (alias, ) # in aliases, --> | expandname
expandname = 'expand-%s' % (alias, ) # in virtual, --> email list
for domain in adomains:
aliasaddr = '%s@%s' % (alias, domain) # in virtual, --> filtername
vfile.write('%-64s %s\n' % (aliasaddr, filtername))
afile.write('%-64s "|%s filter %s %s"\n' % (filtername+':', settings.POSTCONFIRM_PATH, expandname, vdomain))
vfile.write('%-64s %s\n' % ("%s@%s"%(expandname, vdomain), ', '.join(emails)))
except UnicodeEncodeError:
# If there's unicode in email address, something is badly
# wrong and we just silently punt
# XXX - is there better approach?
log('Error encoding email address for an %s alias: %s' % (alias, repr(emails)))
return []
return emails

View file

@ -12,6 +12,8 @@ try:
from selenium import webdriver from selenium import webdriver
from selenium.webdriver.firefox.service import Service from selenium.webdriver.firefox.service import Service
from selenium.webdriver.firefox.options import Options from selenium.webdriver.firefox.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.common.by import By from selenium.webdriver.common.by import By
except ImportError as e: except ImportError as e:
skip_selenium = True skip_selenium = True
@ -87,6 +89,48 @@ class IetfSeleniumTestCase(IetfLiveServerTestCase):
# actions = ActionChains(self.driver) # actions = ActionChains(self.driver)
# actions.move_to_element(element).perform() # actions.move_to_element(element).perform()
def scroll_and_click(self, element_locator, timeout_seconds=5):
"""
Selenium has restrictions around clicking elements outside the viewport, so
this wrapper encapsulates the boilerplate of forcing scrolling and clicking.
:param element_locator: A two item tuple of a Selenium locator eg `(By.CSS_SELECTOR, '#something')`
"""
# so that we can restore the state of the webpage after clicking
original_html_scroll_behaviour_to_restore = self.driver.execute_script('return document.documentElement.style.scrollBehavior')
original_html_overflow_to_restore = self.driver.execute_script('return document.documentElement.style.overflow')
original_body_scroll_behaviour_to_restore = self.driver.execute_script('return document.body.style.scrollBehavior')
original_body_overflow_to_restore = self.driver.execute_script('return document.body.style.overflow')
self.driver.execute_script('document.documentElement.style.scrollBehavior = "auto"')
self.driver.execute_script('document.documentElement.style.overflow = "auto"')
self.driver.execute_script('document.body.style.scrollBehavior = "auto"')
self.driver.execute_script('document.body.style.overflow = "auto"')
element = self.driver.find_element(element_locator[0], element_locator[1])
self.scroll_to_element(element)
# Note that Selenium itself seems to have multiple definitions of 'clickable'.
# You might expect that the following wait for the 'element_to_be_clickable'
# would confirm that the following .click() would succeed but it doesn't.
# That's why the preceeding code attempts to force scrolling to bring the
# element into the viewport to allow clicking.
WebDriverWait(self.driver, timeout_seconds).until(expected_conditions.element_to_be_clickable(element_locator))
element.click()
if original_html_scroll_behaviour_to_restore:
self.driver.execute_script(f'document.documentElement.style.scrollBehavior = "{original_html_scroll_behaviour_to_restore}"')
if original_html_overflow_to_restore:
self.driver.execute_script(f'document.documentElement.style.overflow = "{original_html_overflow_to_restore}"')
if original_body_scroll_behaviour_to_restore:
self.driver.execute_script(f'document.body.style.scrollBehavior = "{original_body_scroll_behaviour_to_restore}"')
if original_body_overflow_to_restore:
self.driver.execute_script(f'document.body.style.overflow = "{original_body_overflow_to_restore}"')
class presence_of_element_child_by_css_selector: class presence_of_element_child_by_css_selector:
"""Wait for presence of a child of a WebElement matching a CSS selector """Wait for presence of a child of a WebElement matching a CSS selector