commit
795fdfe243
302
.github/workflows/build.yml
vendored
302
.github/workflows/build.yml
vendored
|
@ -156,109 +156,221 @@ jobs:
|
|||
with:
|
||||
fetch-depth: 1
|
||||
fetch-tags: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '16'
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Download a Coverage Results
|
||||
if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }}
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: coverage
|
||||
|
||||
- name: Make Release Build
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
BROWSERSLIST_IGNORE_OLD_DATA: 1
|
||||
- name: Launch build VM
|
||||
id: azlaunch
|
||||
run: |
|
||||
echo "PKG_VERSION: $PKG_VERSION"
|
||||
echo "GITHUB_SHA: $GITHUB_SHA"
|
||||
echo "GITHUB_REF_NAME: $GITHUB_REF_NAME"
|
||||
echo "Running frontend build script..."
|
||||
echo "Compiling native node packages..."
|
||||
yarn rebuild
|
||||
echo "Packaging static assets..."
|
||||
yarn build --base=https://static.ietf.org/dt/$PKG_VERSION/
|
||||
yarn legacy:build
|
||||
echo "Setting version $PKG_VERSION..."
|
||||
sed -i -r -e "s|^__version__ += '.*'$|__version__ = '$PKG_VERSION'|" ietf/__init__.py
|
||||
sed -i -r -e "s|^__release_hash__ += '.*'$|__release_hash__ = '$GITHUB_SHA'|" ietf/__init__.py
|
||||
sed -i -r -e "s|^__release_branch__ += '.*'$|__release_branch__ = '$GITHUB_REF_NAME'|" ietf/__init__.py
|
||||
echo "Authenticating to Azure..."
|
||||
az login --service-principal -u ${{ secrets.AZ_BUILD_APP_ID }} -p ${{ secrets.AZ_BUILD_PWD }} --tenant ${{ secrets.AZ_BUILD_TENANT_ID }}
|
||||
echo "Creating VM..."
|
||||
vminfo=$(az vm create \
|
||||
--resource-group ghaDatatracker \
|
||||
--name tmpGhaBuildVM \
|
||||
--image Ubuntu2204 \
|
||||
--admin-username azureuser \
|
||||
--generate-ssh-keys \
|
||||
--priority Spot \
|
||||
--size Standard_D8ads_v5 \
|
||||
--max-price -1 \
|
||||
--ephemeral-os-disk \
|
||||
--os-disk-size-gb 100 \
|
||||
--eviction-policy Delete \
|
||||
--nic-delete-option Delete \
|
||||
--output tsv \
|
||||
--query "publicIpAddress")
|
||||
echo "ipaddr=$vminfo" >> "$GITHUB_OUTPUT"
|
||||
echo "VM Public IP: $vminfo"
|
||||
cat ~/.ssh/id_rsa > ${{ github.workspace }}/prvkey.key
|
||||
ssh-keyscan -t rsa $vminfo >> ~/.ssh/known_hosts
|
||||
|
||||
- name: Set Production Flags
|
||||
if: ${{ env.SHOULD_DEPLOY == 'true' }}
|
||||
run: |
|
||||
echo "Setting production flags in settings.py..."
|
||||
sed -i -r -e 's/^DEBUG *= *.*$/DEBUG = False/' -e "s/^SERVER_MODE *= *.*\$/SERVER_MODE = 'production'/" ietf/settings.py
|
||||
|
||||
- name: Make Release Tarball
|
||||
- name: Remote SSH into Build VM
|
||||
uses: appleboy/ssh-action@25ce8cbbcb08177468c7ff7ec5cbfa236f9341e1
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
echo "Build release tarball..."
|
||||
mkdir -p /home/runner/work/release
|
||||
tar -czf /home/runner/work/release/release.tar.gz -X dev/build/exclude-patterns.txt .
|
||||
|
||||
- name: Collect + Push Statics
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_ACTOR: ${{ github.actor }}
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
GITHUB_REF_NAME: ${{ github.ref_name }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.CF_R2_STATIC_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_R2_STATIC_KEY_SECRET }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
AWS_ENDPOINT_URL: ${{ secrets.CF_R2_ENDPOINT }}
|
||||
run: |
|
||||
echo "Collecting statics..."
|
||||
docker run --rm --name collectstatics -v $(pwd):/workspace ghcr.io/ietf-tools/datatracker-app-base:latest sh dev/build/collectstatics.sh
|
||||
echo "Pushing statics..."
|
||||
cd static
|
||||
aws s3 sync . s3://static/dt/$PKG_VERSION --only-show-errors
|
||||
|
||||
- name: Augment dockerignore for docker image build
|
||||
env:
|
||||
PKG_VERSION: ${{ env.PKG_VERSION }}
|
||||
SHOULD_DEPLOY: ${{ env.SHOULD_DEPLOY }}
|
||||
SKIP_TESTS: ${{ github.event.inputs.skiptests }}
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
BROWSERSLIST_IGNORE_OLD_DATA: 1
|
||||
with:
|
||||
host: ${{ steps.azlaunch.outputs.ipaddr }}
|
||||
port: 22
|
||||
username: azureuser
|
||||
command_timeout: 60m
|
||||
key_path: ${{ github.workspace }}/prvkey.key
|
||||
envs: GITHUB_TOKEN,GITHUB_ACTOR,GITHUB_SHA,GITHUB_REF_NAME,GITHUB_RUN_ID,AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,AWS_DEFAULT_REGION,AWS_ENDPOINT_URL,PKG_VERSION,SHOULD_DEPLOY,SKIP_TESTS,DEBIAN_FRONTEND,BROWSERSLIST_IGNORE_OLD_DATA
|
||||
script_stop: true
|
||||
script: |
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
lsb_release -a
|
||||
sudo apt-get update
|
||||
sudo apt-get upgrade -y
|
||||
sudo apt-get install wget unzip curl -y
|
||||
|
||||
echo "=========================================================================="
|
||||
echo "Installing Docker..."
|
||||
echo "=========================================================================="
|
||||
curl -fsSL https://get.docker.com -o get-docker.sh
|
||||
sudo sh get-docker.sh
|
||||
sudo docker buildx create \
|
||||
--name container-builder \
|
||||
--driver docker-container \
|
||||
--bootstrap --use
|
||||
|
||||
echo "=========================================================================="
|
||||
echo "Login to ghcr.io..."
|
||||
echo "=========================================================================="
|
||||
echo $GITHUB_TOKEN | sudo docker login ghcr.io -u $GITHUB_ACTOR --password-stdin
|
||||
|
||||
echo "=========================================================================="
|
||||
echo "Installing GH CLI..."
|
||||
echo "=========================================================================="
|
||||
sudo mkdir -p -m 755 /etc/apt/keyrings \
|
||||
&& wget -qO- https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null \
|
||||
&& sudo chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \
|
||||
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
|
||||
&& sudo apt update \
|
||||
&& sudo apt install gh -y
|
||||
|
||||
echo "=========================================================================="
|
||||
echo "Installing AWS CLI..."
|
||||
echo "=========================================================================="
|
||||
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
|
||||
unzip awscliv2.zip
|
||||
sudo ./aws/install
|
||||
|
||||
echo "=========================================================================="
|
||||
echo "Install Node.js..."
|
||||
echo "=========================================================================="
|
||||
curl -fsSL https://deb.nodesource.com/setup_18.x -o nodesource_setup.sh
|
||||
sudo bash nodesource_setup.sh
|
||||
sudo apt-get install -y nodejs
|
||||
sudo corepack enable
|
||||
|
||||
echo "=========================================================================="
|
||||
echo "Install Python 3.x..."
|
||||
echo "=========================================================================="
|
||||
sudo apt-get install python3 python3-dev -y
|
||||
python3 --version
|
||||
|
||||
echo "=========================================================================="
|
||||
echo "Clone project..."
|
||||
echo "=========================================================================="
|
||||
sudo mkdir -p /workspace
|
||||
sudo chown azureuser /workspace
|
||||
cd /workspace
|
||||
gh repo clone ietf-tools/datatracker -- --depth=1 --no-tags
|
||||
cd datatracker
|
||||
|
||||
if [ "$SKIP_TESTS" = "false" ] || [ "$GITHUB_REF_NAME" = "release" ] ; then
|
||||
echo "=========================================================================="
|
||||
echo "Downloading coverage..."
|
||||
echo "=========================================================================="
|
||||
gh run download $GITHUB_RUN_ID -n coverage
|
||||
fi
|
||||
|
||||
echo "=========================================================================="
|
||||
echo "Building project..."
|
||||
echo "=========================================================================="
|
||||
echo "PKG_VERSION: $PKG_VERSION"
|
||||
echo "GITHUB_SHA: $GITHUB_SHA"
|
||||
echo "GITHUB_REF_NAME: $GITHUB_REF_NAME"
|
||||
echo "Running frontend build script..."
|
||||
echo "Compiling native node packages..."
|
||||
yarn rebuild
|
||||
echo "Packaging static assets..."
|
||||
yarn build --base=https://static.ietf.org/dt/$PKG_VERSION/
|
||||
yarn legacy:build
|
||||
echo "Setting version $PKG_VERSION..."
|
||||
sed -i -r -e "s|^__version__ += '.*'$|__version__ = '$PKG_VERSION'|" ietf/__init__.py
|
||||
sed -i -r -e "s|^__release_hash__ += '.*'$|__release_hash__ = '$GITHUB_SHA'|" ietf/__init__.py
|
||||
sed -i -r -e "s|^__release_branch__ += '.*'$|__release_branch__ = '$GITHUB_REF_NAME'|" ietf/__init__.py
|
||||
|
||||
if [ "$SHOULD_DEPLOY" = "true" ] ; then
|
||||
echo "=========================================================================="
|
||||
echo "Setting production flags in settings.py..."
|
||||
echo "=========================================================================="
|
||||
sed -i -r -e 's/^DEBUG *= *.*$/DEBUG = False/' -e "s/^SERVER_MODE *= *.*\$/SERVER_MODE = 'production'/" ietf/settings.py
|
||||
fi
|
||||
|
||||
echo "=========================================================================="
|
||||
echo "Build release tarball..."
|
||||
echo "=========================================================================="
|
||||
mkdir -p /workspace/release
|
||||
tar -czf /workspace/release.tar.gz -X dev/build/exclude-patterns.txt .
|
||||
|
||||
echo "=========================================================================="
|
||||
echo "Collecting statics..."
|
||||
echo "=========================================================================="
|
||||
sudo docker run --rm --name collectstatics -v $(pwd):/workspace ghcr.io/ietf-tools/datatracker-app-base:latest sh dev/build/collectstatics.sh
|
||||
echo "Pushing statics..."
|
||||
cd static
|
||||
aws s3 sync . s3://static/dt/$PKG_VERSION --only-show-errors
|
||||
cd ..
|
||||
|
||||
echo "=========================================================================="
|
||||
echo "Augment dockerignore for docker image build..."
|
||||
echo "=========================================================================="
|
||||
cat >> .dockerignore <<EOL
|
||||
.devcontainer
|
||||
.github
|
||||
.vscode
|
||||
k8s
|
||||
playwright
|
||||
svn-history
|
||||
docker-compose.yml
|
||||
EOL
|
||||
|
||||
echo "=========================================================================="
|
||||
echo "Building Images..."
|
||||
echo "=========================================================================="
|
||||
sudo docker buildx build --file dev/build/Dockerfile --platform linux/amd64,linux/arm64 --tag ghcr.io/ietf-tools/datatracker:$PKG_VERSION --push .
|
||||
|
||||
- name: Fetch release tarball
|
||||
run: |
|
||||
cat >> .dockerignore <<EOL
|
||||
.devcontainer
|
||||
.github
|
||||
.vscode
|
||||
k8s
|
||||
playwright
|
||||
svn-history
|
||||
docker-compose.yml
|
||||
EOL
|
||||
mkdir -p /home/runner/work/release
|
||||
chmod 0600 ${{ github.workspace }}/prvkey.key
|
||||
scp -i ${{ github.workspace }}/prvkey.key azureuser@${{ steps.azlaunch.outputs.ipaddr }}:/workspace/release.tar.gz /home/runner/work/release/release.tar.gz
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Destroy Build VM + resources
|
||||
if: always()
|
||||
shell: pwsh
|
||||
run: |
|
||||
echo "Destroying VM..."
|
||||
az vm delete -g ghaDatatracker -n tmpGhaBuildVM --yes --force-deletion true
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
$resourceOrderRemovalOrder = [ordered]@{
|
||||
"Microsoft.Compute/virtualMachines" = 0
|
||||
"Microsoft.Compute/disks" = 1
|
||||
"Microsoft.Network/networkInterfaces" = 2
|
||||
"Microsoft.Network/publicIpAddresses" = 3
|
||||
"Microsoft.Network/networkSecurityGroups" = 4
|
||||
"Microsoft.Network/virtualNetworks" = 5
|
||||
}
|
||||
echo "Fetching remaining resources..."
|
||||
$resources = az resource list --resource-group ghaDatatracker | ConvertFrom-Json
|
||||
|
||||
- name: Build Release Docker Image
|
||||
uses: docker/build-push-action@v6
|
||||
env:
|
||||
DOCKER_BUILD_SUMMARY: false
|
||||
with:
|
||||
context: .
|
||||
file: dev/build/Dockerfile
|
||||
platforms: ${{ github.event.inputs.skiparm == 'true' && 'linux/amd64' || 'linux/amd64,linux/arm64' }}
|
||||
push: true
|
||||
tags: ghcr.io/ietf-tools/datatracker:${{ env.PKG_VERSION }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
$orderedResources = $resources
|
||||
| Sort-Object @{
|
||||
Expression = {$resourceOrderRemovalOrder[$_.type]}
|
||||
Descending = $False
|
||||
}
|
||||
|
||||
echo "Deleting remaining resources..."
|
||||
$orderedResources | ForEach-Object {
|
||||
az resource delete --resource-group ghaDatatracker --ids $_.id --verbose
|
||||
}
|
||||
|
||||
echo "Logout from Azure..."
|
||||
az logout
|
||||
|
||||
- name: Update CHANGELOG
|
||||
id: changelog
|
||||
|
@ -270,6 +382,12 @@ jobs:
|
|||
toTag: ${{ env.TO_TAG }}
|
||||
writeToFile: false
|
||||
|
||||
- name: Download Coverage Results
|
||||
if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }}
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: coverage
|
||||
|
||||
- name: Prepare Coverage Action
|
||||
if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }}
|
||||
working-directory: ./dev/coverage-action
|
||||
|
@ -335,7 +453,7 @@ jobs:
|
|||
steps:
|
||||
- name: Notify on Slack (Success)
|
||||
if: ${{ !contains(join(needs.*.result, ','), 'failure') }}
|
||||
uses: slackapi/slack-github-action@v1.26.0
|
||||
uses: slackapi/slack-github-action@v1.27.0
|
||||
with:
|
||||
channel-id: ${{ secrets.SLACK_GH_BUILDS_CHANNEL_ID }}
|
||||
payload: |
|
||||
|
@ -358,7 +476,7 @@ jobs:
|
|||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_GH_BOT }}
|
||||
- name: Notify on Slack (Failure)
|
||||
if: ${{ contains(join(needs.*.result, ','), 'failure') }}
|
||||
uses: slackapi/slack-github-action@v1.26.0
|
||||
uses: slackapi/slack-github-action@v1.27.0
|
||||
with:
|
||||
channel-id: ${{ secrets.SLACK_GH_BUILDS_CHANNEL_ID }}
|
||||
payload: |
|
||||
|
|
2
.github/workflows/tests-az.yml
vendored
2
.github/workflows/tests-az.yml
vendored
|
@ -38,7 +38,7 @@ jobs:
|
|||
ssh-keyscan -t rsa $vminfo >> ~/.ssh/known_hosts
|
||||
|
||||
- name: Remote SSH into VM
|
||||
uses: appleboy/ssh-action@029f5b4aeeeb58fdfe1410a5d17f967dacf36262
|
||||
uses: appleboy/ssh-action@25ce8cbbcb08177468c7ff7ec5cbfa236f9341e1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
|
|
2
.github/workflows/tests.yml
vendored
2
.github/workflows/tests.yml
vendored
|
@ -59,7 +59,7 @@ jobs:
|
|||
path: geckodriver.log
|
||||
|
||||
- name: Upload Coverage Results to Codecov
|
||||
uses: codecov/codecov-action@v4.5.0
|
||||
uses: codecov/codecov-action@v4.6.0
|
||||
with:
|
||||
files: coverage.xml
|
||||
|
||||
|
|
2
LICENSE
2
LICENSE
|
@ -1,6 +1,6 @@
|
|||
BSD 3-Clause License
|
||||
|
||||
Copyright (c) 2008-2023, The IETF Trust
|
||||
Copyright (c) 2008-2024, The IETF Trust
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
|
|
75
dev/coverage-action/package-lock.json
generated
75
dev/coverage-action/package-lock.json
generated
|
@ -9,19 +9,27 @@
|
|||
"version": "1.0.0",
|
||||
"license": "BSD-3-Clause",
|
||||
"dependencies": {
|
||||
"@actions/core": "1.10.1",
|
||||
"@actions/core": "1.11.1",
|
||||
"@actions/github": "6.0.0",
|
||||
"lodash": "4.17.21",
|
||||
"luxon": "3.4.4"
|
||||
"luxon": "3.5.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/core": {
|
||||
"version": "1.10.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.1.tgz",
|
||||
"integrity": "sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g==",
|
||||
"version": "1.11.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz",
|
||||
"integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==",
|
||||
"dependencies": {
|
||||
"@actions/http-client": "^2.0.1",
|
||||
"uuid": "^8.3.2"
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/http-client": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/exec": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz",
|
||||
"integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==",
|
||||
"dependencies": {
|
||||
"@actions/io": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/github": {
|
||||
|
@ -44,6 +52,11 @@
|
|||
"undici": "^5.25.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/io": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz",
|
||||
"integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="
|
||||
},
|
||||
"node_modules/@fastify/busboy": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.0.0.tgz",
|
||||
|
@ -196,9 +209,9 @@
|
|||
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
|
||||
},
|
||||
"node_modules/luxon": {
|
||||
"version": "3.4.4",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.4.tgz",
|
||||
"integrity": "sha512-zobTr7akeGHnv7eBOXcRgMeCP6+uyYsczwmeRCauvpvaAltgNyTbLH/+VaEAPUeWBT+1GuNmz4wC/6jtQzbbVA==",
|
||||
"version": "3.5.0",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.5.0.tgz",
|
||||
"integrity": "sha512-rh+Zjr6DNfUYR3bPwJEnuwDdqMbxZW7LOQfUN4B54+Cl+0o5zaU9RJ6bcidfDtC1cWCZXQ+nvX8bf6bAji37QQ==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
|
@ -235,14 +248,6 @@
|
|||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz",
|
||||
"integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w=="
|
||||
},
|
||||
"node_modules/uuid": {
|
||||
"version": "8.3.2",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
|
||||
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
|
||||
"bin": {
|
||||
"uuid": "dist/bin/uuid"
|
||||
}
|
||||
},
|
||||
"node_modules/wrappy": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||
|
@ -251,12 +256,20 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@actions/core": {
|
||||
"version": "1.10.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.1.tgz",
|
||||
"integrity": "sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g==",
|
||||
"version": "1.11.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz",
|
||||
"integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==",
|
||||
"requires": {
|
||||
"@actions/http-client": "^2.0.1",
|
||||
"uuid": "^8.3.2"
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/http-client": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"@actions/exec": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz",
|
||||
"integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==",
|
||||
"requires": {
|
||||
"@actions/io": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"@actions/github": {
|
||||
|
@ -279,6 +292,11 @@
|
|||
"undici": "^5.25.4"
|
||||
}
|
||||
},
|
||||
"@actions/io": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz",
|
||||
"integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="
|
||||
},
|
||||
"@fastify/busboy": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.0.0.tgz",
|
||||
|
@ -395,9 +413,9 @@
|
|||
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
|
||||
},
|
||||
"luxon": {
|
||||
"version": "3.4.4",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.4.tgz",
|
||||
"integrity": "sha512-zobTr7akeGHnv7eBOXcRgMeCP6+uyYsczwmeRCauvpvaAltgNyTbLH/+VaEAPUeWBT+1GuNmz4wC/6jtQzbbVA=="
|
||||
"version": "3.5.0",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.5.0.tgz",
|
||||
"integrity": "sha512-rh+Zjr6DNfUYR3bPwJEnuwDdqMbxZW7LOQfUN4B54+Cl+0o5zaU9RJ6bcidfDtC1cWCZXQ+nvX8bf6bAji37QQ=="
|
||||
},
|
||||
"once": {
|
||||
"version": "1.4.0",
|
||||
|
@ -425,11 +443,6 @@
|
|||
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz",
|
||||
"integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w=="
|
||||
},
|
||||
"uuid": {
|
||||
"version": "8.3.2",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
|
||||
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
|
||||
},
|
||||
"wrappy": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||
|
|
|
@ -6,9 +6,9 @@
|
|||
"author": "IETF Trust",
|
||||
"license": "BSD-3-Clause",
|
||||
"dependencies": {
|
||||
"@actions/core": "1.10.1",
|
||||
"@actions/core": "1.11.1",
|
||||
"@actions/github": "6.0.0",
|
||||
"lodash": "4.17.21",
|
||||
"luxon": "3.4.4"
|
||||
"luxon": "3.5.0"
|
||||
}
|
||||
}
|
||||
|
|
14
dev/deploy-to-container/package-lock.json
generated
14
dev/deploy-to-container/package-lock.json
generated
|
@ -11,7 +11,7 @@
|
|||
"nanoid": "5.0.7",
|
||||
"nanoid-dictionary": "5.0.0-beta.1",
|
||||
"slugify": "1.6.6",
|
||||
"tar": "^7.4.0",
|
||||
"tar": "^7.4.3",
|
||||
"yargs": "^17.7.2"
|
||||
},
|
||||
"engines": {
|
||||
|
@ -788,9 +788,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/tar": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/tar/-/tar-7.4.0.tgz",
|
||||
"integrity": "sha512-XQs0S8fuAkQWuqhDeCdMlJXDX80D7EOVLDPVFkna9yQfzS+PHKgfxcei0jf6/+QAWcjqrnC8uM3fSAnrQl+XYg==",
|
||||
"version": "7.4.3",
|
||||
"resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
|
||||
"integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==",
|
||||
"dependencies": {
|
||||
"@isaacs/fs-minipass": "^4.0.0",
|
||||
"chownr": "^3.0.0",
|
||||
|
@ -1503,9 +1503,9 @@
|
|||
}
|
||||
},
|
||||
"tar": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/tar/-/tar-7.4.0.tgz",
|
||||
"integrity": "sha512-XQs0S8fuAkQWuqhDeCdMlJXDX80D7EOVLDPVFkna9yQfzS+PHKgfxcei0jf6/+QAWcjqrnC8uM3fSAnrQl+XYg==",
|
||||
"version": "7.4.3",
|
||||
"resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
|
||||
"integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==",
|
||||
"requires": {
|
||||
"@isaacs/fs-minipass": "^4.0.0",
|
||||
"chownr": "^3.0.0",
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
"nanoid": "5.0.7",
|
||||
"nanoid-dictionary": "5.0.0-beta.1",
|
||||
"slugify": "1.6.6",
|
||||
"tar": "^7.4.0",
|
||||
"tar": "^7.4.3",
|
||||
"yargs": "^17.7.2"
|
||||
},
|
||||
"engines": {
|
||||
|
|
28
dev/diff/package-lock.json
generated
28
dev/diff/package-lock.json
generated
|
@ -15,9 +15,9 @@
|
|||
"keypress": "^0.2.1",
|
||||
"listr2": "^6.6.1",
|
||||
"lodash-es": "^4.17.21",
|
||||
"luxon": "^3.4.4",
|
||||
"luxon": "^3.5.0",
|
||||
"pretty-bytes": "^6.1.1",
|
||||
"tar": "^7.4.0",
|
||||
"tar": "^7.4.3",
|
||||
"yargs": "^17.7.2"
|
||||
},
|
||||
"engines": {
|
||||
|
@ -1060,9 +1060,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/luxon": {
|
||||
"version": "3.4.4",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.4.tgz",
|
||||
"integrity": "sha512-zobTr7akeGHnv7eBOXcRgMeCP6+uyYsczwmeRCauvpvaAltgNyTbLH/+VaEAPUeWBT+1GuNmz4wC/6jtQzbbVA==",
|
||||
"version": "3.5.0",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.5.0.tgz",
|
||||
"integrity": "sha512-rh+Zjr6DNfUYR3bPwJEnuwDdqMbxZW7LOQfUN4B54+Cl+0o5zaU9RJ6bcidfDtC1cWCZXQ+nvX8bf6bAji37QQ==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
|
@ -1493,9 +1493,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/tar": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/tar/-/tar-7.4.0.tgz",
|
||||
"integrity": "sha512-XQs0S8fuAkQWuqhDeCdMlJXDX80D7EOVLDPVFkna9yQfzS+PHKgfxcei0jf6/+QAWcjqrnC8uM3fSAnrQl+XYg==",
|
||||
"version": "7.4.3",
|
||||
"resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
|
||||
"integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==",
|
||||
"dependencies": {
|
||||
"@isaacs/fs-minipass": "^4.0.0",
|
||||
"chownr": "^3.0.0",
|
||||
|
@ -2410,9 +2410,9 @@
|
|||
"integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ=="
|
||||
},
|
||||
"luxon": {
|
||||
"version": "3.4.4",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.4.tgz",
|
||||
"integrity": "sha512-zobTr7akeGHnv7eBOXcRgMeCP6+uyYsczwmeRCauvpvaAltgNyTbLH/+VaEAPUeWBT+1GuNmz4wC/6jtQzbbVA=="
|
||||
"version": "3.5.0",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.5.0.tgz",
|
||||
"integrity": "sha512-rh+Zjr6DNfUYR3bPwJEnuwDdqMbxZW7LOQfUN4B54+Cl+0o5zaU9RJ6bcidfDtC1cWCZXQ+nvX8bf6bAji37QQ=="
|
||||
},
|
||||
"mimic-fn": {
|
||||
"version": "2.1.0",
|
||||
|
@ -2691,9 +2691,9 @@
|
|||
}
|
||||
},
|
||||
"tar": {
|
||||
"version": "7.4.0",
|
||||
"resolved": "https://registry.npmjs.org/tar/-/tar-7.4.0.tgz",
|
||||
"integrity": "sha512-XQs0S8fuAkQWuqhDeCdMlJXDX80D7EOVLDPVFkna9yQfzS+PHKgfxcei0jf6/+QAWcjqrnC8uM3fSAnrQl+XYg==",
|
||||
"version": "7.4.3",
|
||||
"resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
|
||||
"integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==",
|
||||
"requires": {
|
||||
"@isaacs/fs-minipass": "^4.0.0",
|
||||
"chownr": "^3.0.0",
|
||||
|
|
|
@ -11,9 +11,9 @@
|
|||
"keypress": "^0.2.1",
|
||||
"listr2": "^6.6.1",
|
||||
"lodash-es": "^4.17.21",
|
||||
"luxon": "^3.4.4",
|
||||
"luxon": "^3.5.0",
|
||||
"pretty-bytes": "^6.1.1",
|
||||
"tar": "^7.4.0",
|
||||
"tar": "^7.4.3",
|
||||
"yargs": "^17.7.2"
|
||||
},
|
||||
"engines": {
|
||||
|
|
|
@ -125,6 +125,15 @@ ENV LC_ALL en_US.UTF-8
|
|||
ADD https://raw.githubusercontent.com/ietf-tools/idnits-mirror/main/idnits /usr/local/bin/
|
||||
RUN chmod +rx /usr/local/bin/idnits
|
||||
|
||||
# Install required fonts
|
||||
RUN mkdir -p /tmp/fonts && \
|
||||
wget -q -O /tmp/fonts.tar.gz https://github.com/ietf-tools/xml2rfc-fonts/archive/refs/tags/3.22.0.tar.gz && \
|
||||
tar zxf /tmp/fonts.tar.gz -C /tmp/fonts && \
|
||||
mv /tmp/fonts/*/noto/* /usr/local/share/fonts/ && \
|
||||
mv /tmp/fonts/*/roboto_mono/* /usr/local/share/fonts/ && \
|
||||
rm -rf /tmp/fonts.tar.gz /tmp/fonts/ && \
|
||||
fc-cache -f
|
||||
|
||||
# Turn off rsyslog kernel logging (doesn't work in Docker)
|
||||
RUN sed -i '/imklog/s/^/#/' /etc/rsyslog.conf
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright The IETF Trust 2015-2020, All Rights Reserved
|
||||
# Copyright The IETF Trust 2015-2024, All Rights Reserved
|
||||
# -*- coding: utf-8 -*-
|
||||
import base64
|
||||
import datetime
|
||||
|
@ -222,6 +222,70 @@ class CustomApiTests(TestCase):
|
|||
event = doc.latest_event()
|
||||
self.assertEqual(event.by, recman)
|
||||
|
||||
def test_api_set_meetecho_recording_name(self):
|
||||
url = urlreverse("ietf.meeting.views.api_set_meetecho_recording_name")
|
||||
recmanrole = RoleFactory(group__type_id="ietf", name_id="recman")
|
||||
recman = recmanrole.person
|
||||
meeting = MeetingFactory(type_id="ietf")
|
||||
session = SessionFactory(group__type_id="wg", meeting=meeting)
|
||||
apikey = PersonalApiKey.objects.create(endpoint=url, person=recman)
|
||||
name = "testname"
|
||||
|
||||
# error cases
|
||||
r = self.client.post(url, {})
|
||||
self.assertContains(r, "Missing apikey parameter", status_code=400)
|
||||
|
||||
badrole = RoleFactory(group__type_id="ietf", name_id="ad")
|
||||
badapikey = PersonalApiKey.objects.create(endpoint=url, person=badrole.person)
|
||||
badrole.person.user.last_login = timezone.now()
|
||||
badrole.person.user.save()
|
||||
r = self.client.post(url, {"apikey": badapikey.hash()})
|
||||
self.assertContains(r, "Restricted to role: Recording Manager", status_code=403)
|
||||
|
||||
r = self.client.post(url, {"apikey": apikey.hash()})
|
||||
self.assertContains(r, "Too long since last regular login", status_code=400)
|
||||
recman.user.last_login = timezone.now()
|
||||
recman.user.save()
|
||||
|
||||
r = self.client.get(url, {"apikey": apikey.hash()})
|
||||
self.assertContains(r, "Method not allowed", status_code=405)
|
||||
|
||||
r = self.client.post(url, {"apikey": apikey.hash()})
|
||||
self.assertContains(r, "Missing session_id parameter", status_code=400)
|
||||
|
||||
r = self.client.post(url, {"apikey": apikey.hash(), "session_id": session.pk})
|
||||
self.assertContains(r, "Missing name parameter", status_code=400)
|
||||
|
||||
bad_pk = int(Session.objects.order_by("-pk").first().pk) + 1
|
||||
r = self.client.post(
|
||||
url,
|
||||
{
|
||||
"apikey": apikey.hash(),
|
||||
"session_id": bad_pk,
|
||||
"name": name,
|
||||
},
|
||||
)
|
||||
self.assertContains(r, "Session not found", status_code=400)
|
||||
|
||||
r = self.client.post(
|
||||
url,
|
||||
{
|
||||
"apikey": apikey.hash(),
|
||||
"session_id": "foo",
|
||||
"name": name,
|
||||
},
|
||||
)
|
||||
self.assertContains(r, "Invalid session_id", status_code=400)
|
||||
|
||||
r = self.client.post(
|
||||
url, {"apikey": apikey.hash(), "session_id": session.pk, "name": name}
|
||||
)
|
||||
self.assertContains(r, "Done", status_code=200)
|
||||
|
||||
session.refresh_from_db()
|
||||
self.assertEqual(session.meetecho_recording_name, name)
|
||||
|
||||
|
||||
def test_api_add_session_attendees_deprecated(self):
|
||||
# Deprecated test - should be removed when we stop accepting a simple list of user PKs in
|
||||
# the add_session_attendees() view
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright The IETF Trust 2017, All Rights Reserved
|
||||
# Copyright The IETF Trust 2017-2024, All Rights Reserved
|
||||
|
||||
from django.conf import settings
|
||||
from django.urls import include
|
||||
|
@ -39,6 +39,8 @@ urlpatterns = [
|
|||
url(r'^iesg/position', views_ballot.api_set_position),
|
||||
# Let Meetecho set session video URLs
|
||||
url(r'^meeting/session/video/url$', meeting_views.api_set_session_video_url),
|
||||
# Let Meetecho tell us the name of its recordings
|
||||
url(r'^meeting/session/recording-name$', meeting_views.api_set_meetecho_recording_name),
|
||||
# Meeting agenda + floorplan data
|
||||
url(r'^meeting/(?P<num>[A-Za-z0-9._+-]+)/agenda-data$', meeting_views.api_get_agenda_data),
|
||||
# Meeting session materials
|
||||
|
@ -59,7 +61,7 @@ urlpatterns = [
|
|||
# Email alias listing
|
||||
url(r'^person/email/$', api_views.active_email_list),
|
||||
# Draft submission API
|
||||
url(r'^submit/?$', submit_views.api_submit),
|
||||
url(r'^submit/?$', submit_views.api_submit_tombstone),
|
||||
# Draft upload API
|
||||
url(r'^submission/?$', submit_views.api_submission),
|
||||
# Draft submission state API
|
||||
|
|
|
@ -95,9 +95,7 @@ class CommunityListTests(TestCase):
|
|||
|
||||
url = urlreverse(ietf.community.views.view_list, kwargs={ "email_or_name": person.plain_name()})
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 300)
|
||||
self.assertIn("bazquux@example.com", r.content.decode())
|
||||
self.assertIn("foobar@example.com", r.content.decode())
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
def complex_person(self, *args, **kwargs):
|
||||
person = PersonFactory(*args, **kwargs)
|
||||
|
|
|
@ -28,11 +28,14 @@ from ietf.utils.decorators import ignore_view_kwargs
|
|||
from ietf.utils.http import is_ajax
|
||||
from ietf.utils.response import permission_denied
|
||||
|
||||
class MultiplePersonError(Exception):
|
||||
"""More than one Person record matches the given email or name"""
|
||||
pass
|
||||
|
||||
def lookup_community_list(request, email_or_name=None, acronym=None):
|
||||
"""Finds a CommunityList for a person or group
|
||||
|
||||
Instantiates an unsaved CommunityList if one is not found.
|
||||
|
||||
If the person or group cannot be found and uniquely identified, raises an Http404 exception
|
||||
"""
|
||||
assert email_or_name or acronym
|
||||
|
||||
if acronym:
|
||||
|
@ -44,19 +47,14 @@ def lookup_community_list(request, email_or_name=None, acronym=None):
|
|||
if hasattr(request.user, 'person') and request.user.person in persons:
|
||||
person = request.user.person
|
||||
else:
|
||||
raise MultiplePersonError("\r\n".join([p.user.username for p in persons]))
|
||||
raise Http404(f"Unable to identify the CommunityList for {email_or_name}")
|
||||
else:
|
||||
person = persons[0]
|
||||
clist = CommunityList.objects.filter(person=person).first() or CommunityList(person=person)
|
||||
|
||||
return clist
|
||||
|
||||
def view_list(request, email_or_name=None):
|
||||
try:
|
||||
clist = lookup_community_list(request, email_or_name)
|
||||
except MultiplePersonError as err:
|
||||
return HttpResponse(str(err), status=300)
|
||||
|
||||
clist = lookup_community_list(request, email_or_name) # may raise Http404
|
||||
docs = docs_tracked_by_community_list(clist)
|
||||
docs, meta = prepare_document_table(request, docs, request.GET)
|
||||
|
||||
|
@ -76,10 +74,7 @@ def view_list(request, email_or_name=None):
|
|||
def manage_list(request, email_or_name=None, acronym=None):
|
||||
# we need to be a bit careful because clist may not exist in the
|
||||
# database so we can't call related stuff on it yet
|
||||
try:
|
||||
clist = lookup_community_list(request, email_or_name, acronym)
|
||||
except MultiplePersonError as err:
|
||||
return HttpResponse(str(err), status=300)
|
||||
clist = lookup_community_list(request, email_or_name, acronym) # may raise Http404
|
||||
|
||||
if not can_manage_community_list(request.user, clist):
|
||||
permission_denied(request, "You do not have permission to access this view")
|
||||
|
@ -166,10 +161,7 @@ def track_document(request, name, email_or_name=None, acronym=None):
|
|||
doc = get_object_or_404(Document, name=name)
|
||||
|
||||
if request.method == "POST":
|
||||
try:
|
||||
clist = lookup_community_list(request, email_or_name, acronym)
|
||||
except MultiplePersonError as err:
|
||||
return HttpResponse(str(err), status=300)
|
||||
clist = lookup_community_list(request, email_or_name, acronym) # may raise Http404
|
||||
if not can_manage_community_list(request.user, clist):
|
||||
permission_denied(request, "You do not have permission to access this view")
|
||||
|
||||
|
@ -191,10 +183,7 @@ def track_document(request, name, email_or_name=None, acronym=None):
|
|||
@login_required
|
||||
def untrack_document(request, name, email_or_name=None, acronym=None):
|
||||
doc = get_object_or_404(Document, name=name)
|
||||
try:
|
||||
clist = lookup_community_list(request, email_or_name, acronym)
|
||||
except MultiplePersonError as err:
|
||||
return HttpResponse(str(err), status=300)
|
||||
clist = lookup_community_list(request, email_or_name, acronym) # may raise Http404
|
||||
if not can_manage_community_list(request.user, clist):
|
||||
permission_denied(request, "You do not have permission to access this view")
|
||||
|
||||
|
@ -214,11 +203,7 @@ def untrack_document(request, name, email_or_name=None, acronym=None):
|
|||
|
||||
@ignore_view_kwargs("group_type")
|
||||
def export_to_csv(request, email_or_name=None, acronym=None):
|
||||
try:
|
||||
clist = lookup_community_list(request, email_or_name, acronym)
|
||||
except MultiplePersonError as err:
|
||||
return HttpResponse(str(err), status=300)
|
||||
|
||||
clist = lookup_community_list(request, email_or_name, acronym) # may raise Http404
|
||||
response = HttpResponse(content_type='text/csv')
|
||||
|
||||
if clist.group:
|
||||
|
@ -259,11 +244,7 @@ def export_to_csv(request, email_or_name=None, acronym=None):
|
|||
|
||||
@ignore_view_kwargs("group_type")
|
||||
def feed(request, email_or_name=None, acronym=None):
|
||||
try:
|
||||
clist = lookup_community_list(request, email_or_name, acronym)
|
||||
except MultiplePersonError as err:
|
||||
return HttpResponse(str(err), status=300)
|
||||
|
||||
clist = lookup_community_list(request, email_or_name, acronym) # may raise Http404
|
||||
significant = request.GET.get('significant', '') == '1'
|
||||
|
||||
documents = docs_tracked_by_community_list(clist).values_list('pk', flat=True)
|
||||
|
@ -299,12 +280,9 @@ def feed(request, email_or_name=None, acronym=None):
|
|||
@login_required
|
||||
@ignore_view_kwargs("group_type")
|
||||
def subscription(request, email_or_name=None, acronym=None):
|
||||
try:
|
||||
clist = lookup_community_list(request, email_or_name, acronym)
|
||||
if clist.pk is None:
|
||||
raise Http404
|
||||
except MultiplePersonError as err:
|
||||
return HttpResponse(str(err), status=300)
|
||||
clist = lookup_community_list(request, email_or_name, acronym) # may raise Http404
|
||||
if clist.pk is None:
|
||||
raise Http404
|
||||
|
||||
person = request.user.person
|
||||
|
||||
|
|
|
@ -87,6 +87,12 @@ class GroupPagesTests(TestCase):
|
|||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertContains(r, g.acronym)
|
||||
if t == "area":
|
||||
q = PyQuery(r.content)
|
||||
wg_url = urlreverse("ietf.group.views.active_groups", kwargs=dict(group_type="wg"))
|
||||
href = f"{wg_url}#{g.acronym.upper()}"
|
||||
self.assertEqual(q(f"h2#id-{g.acronym} a").attr("href"), href)
|
||||
self.assertEqual(q(f'h2#id-{g.acronym} a[href="{href}"]').text(), f"({g.acronym.upper()})")
|
||||
|
||||
url = urlreverse('ietf.group.views.active_groups', kwargs=dict())
|
||||
r = self.client.get(url)
|
||||
|
|
|
@ -87,9 +87,18 @@ class IESGTests(TestCase):
|
|||
group=dated_group,
|
||||
person=Person.objects.get(user__username='ad'),
|
||||
)
|
||||
dated_milestones = DatedGroupMilestoneFactory.create_batch(
|
||||
2, group=dated_group, state_id="review"
|
||||
)
|
||||
dated_milestones = [
|
||||
DatedGroupMilestoneFactory(
|
||||
group=dated_group,
|
||||
state_id="review",
|
||||
desc="This is the description of one dated group milestone",
|
||||
),
|
||||
DatedGroupMilestoneFactory(
|
||||
group=dated_group,
|
||||
state_id="review",
|
||||
desc="This is the description of another dated group milestone",
|
||||
),
|
||||
]
|
||||
dated_milestones[0].due -= datetime.timedelta(days=1) # make this one earlier
|
||||
dated_milestones[0].save()
|
||||
|
||||
|
@ -99,9 +108,18 @@ class IESGTests(TestCase):
|
|||
group=dateless_group,
|
||||
person=Person.objects.get(user__username='ad'),
|
||||
)
|
||||
dateless_milestones = DatelessGroupMilestoneFactory.create_batch(
|
||||
2, group=dateless_group, state_id="review"
|
||||
)
|
||||
dateless_milestones = [
|
||||
DatelessGroupMilestoneFactory(
|
||||
group=dateless_group,
|
||||
state_id="review",
|
||||
desc="This is the description of one dateless group milestone",
|
||||
),
|
||||
DatelessGroupMilestoneFactory(
|
||||
group=dateless_group,
|
||||
state_id="review",
|
||||
desc="This is the description of another dateless group milestone",
|
||||
),
|
||||
]
|
||||
|
||||
url = urlreverse("ietf.iesg.views.milestones_needing_review")
|
||||
self.client.login(username="ad", password="ad+password")
|
||||
|
@ -111,17 +129,29 @@ class IESGTests(TestCase):
|
|||
|
||||
# check order-by-date
|
||||
dated_tbody = pq(f'td:contains("{dated_milestones[0].desc}")').closest("tbody")
|
||||
next_td = dated_tbody.find('td:contains("Next")')
|
||||
self.assertEqual(next_td.siblings()[0].text.strip(), dated_milestones[0].desc)
|
||||
last_td = dated_tbody.find('td:contains("Last")')
|
||||
self.assertEqual(last_td.siblings()[0].text.strip(), dated_milestones[1].desc)
|
||||
rows = list(dated_tbody.items("tr")) # keep as pyquery objects
|
||||
self.assertTrue(rows[0].find('td:first:contains("Last")')) # Last milestone shown first
|
||||
self.assertFalse(rows[0].find('td:first:contains("Next")'))
|
||||
self.assertTrue(rows[0].find(f'td:contains("{dated_milestones[1].desc}")'))
|
||||
self.assertFalse(rows[0].find(f'td:contains("{dated_milestones[0].desc}")'))
|
||||
|
||||
self.assertFalse(rows[1].find('td:first:contains("Last")')) # Last milestone shown first
|
||||
self.assertTrue(rows[1].find('td:first:contains("Next")'))
|
||||
self.assertFalse(rows[1].find(f'td:contains("{dated_milestones[1].desc}")'))
|
||||
self.assertTrue(rows[1].find(f'td:contains("{dated_milestones[0].desc}")'))
|
||||
|
||||
# check order-by-order
|
||||
dateless_tbody = pq(f'td:contains("{dateless_milestones[0].desc}")').closest("tbody")
|
||||
next_td = dateless_tbody.find('td:contains("Next")')
|
||||
self.assertEqual(next_td.siblings()[0].text.strip(), dateless_milestones[0].desc)
|
||||
last_td = dateless_tbody.find('td:contains("Last")')
|
||||
self.assertEqual(last_td.siblings()[0].text.strip(), dateless_milestones[1].desc)
|
||||
rows = list(dateless_tbody.items("tr")) # keep as pyquery objects
|
||||
self.assertTrue(rows[0].find('td:first:contains("Last")')) # Last milestone shown first
|
||||
self.assertFalse(rows[0].find('td:first:contains("Next")'))
|
||||
self.assertTrue(rows[0].find(f'td:contains("{dateless_milestones[1].desc}")'))
|
||||
self.assertFalse(rows[0].find(f'td:contains("{dateless_milestones[0].desc}")'))
|
||||
|
||||
self.assertFalse(rows[1].find('td:first:contains("Last")')) # Last milestone shown first
|
||||
self.assertTrue(rows[1].find('td:first:contains("Next")'))
|
||||
self.assertFalse(rows[1].find(f'td:contains("{dateless_milestones[1].desc}")'))
|
||||
self.assertTrue(rows[1].find(f'td:contains("{dateless_milestones[0].desc}")'))
|
||||
|
||||
|
||||
def test_review_decisions(self):
|
||||
|
@ -404,6 +434,8 @@ class IESGAgendaTests(TestCase):
|
|||
|
||||
self.assertContains(r, action_items.text)
|
||||
|
||||
q = PyQuery(r.content)
|
||||
|
||||
for k, d in self.telechat_docs.items():
|
||||
if d.type_id == "charter":
|
||||
self.assertContains(r, d.group.name, msg_prefix="%s '%s' not in response" % (k, d.group.name))
|
||||
|
@ -412,6 +444,18 @@ class IESGAgendaTests(TestCase):
|
|||
self.assertContains(r, d.name, msg_prefix="%s '%s' not in response" % (k, d.name))
|
||||
self.assertContains(r, d.title, msg_prefix="%s '%s' title not in response" % (k, d.title))
|
||||
|
||||
if d.type_id in ["charter", "draft"]:
|
||||
if d.group.parent is None:
|
||||
continue
|
||||
wg_url = urlreverse("ietf.group.views.active_groups", kwargs=dict(group_type="wg"))
|
||||
href = f"{wg_url}#{d.group.parent.acronym.upper()}"
|
||||
texts = [elem.text.strip() for elem in q(f'a[href="{href}"]')]
|
||||
self.assertGreater(len(texts), 0)
|
||||
if d.type_id == "charter":
|
||||
self.assertTrue(any(t == d.group.parent.acronym.upper() for t in texts))
|
||||
elif d.type_id == "draft":
|
||||
self.assertTrue(any(t == f"({d.group.parent.acronym.upper()})" for t in texts))
|
||||
|
||||
for i, mi in enumerate(self.mgmt_items, start=1):
|
||||
s = "6." + str(i)
|
||||
self.assertContains(r, s, msg_prefix="Section '%s' not in response" % s)
|
||||
|
|
|
@ -171,31 +171,44 @@ def message_from_message(message,by=None):
|
|||
)
|
||||
return msg
|
||||
|
||||
|
||||
class UndeliverableIprResponseError(Exception):
|
||||
"""Response email could not be delivered and should be treated as an error"""
|
||||
|
||||
|
||||
def process_response_email(msg):
|
||||
"""Saves an incoming message. msg=string. Message "To" field is expected to
|
||||
be in the format ietf-ipr+[identifier]@ietf.org. Expect to find a message with
|
||||
a matching value in the reply_to field, associated to an IPR disclosure through
|
||||
IprEvent. Create a Message object for the incoming message and associate it to
|
||||
the original message via new IprEvent"""
|
||||
"""Save an incoming IPR response email message
|
||||
|
||||
Message "To" field is expected to be in the format ietf-ipr+[identifier]@ietf.org. If
|
||||
the address or identifier is missing, the message will be silently dropped.
|
||||
|
||||
Expect to find a message with a matching value in the reply_to field, associated to an
|
||||
IPR disclosure through IprEvent. If it cannot be matched, raises UndeliverableIprResponseError
|
||||
|
||||
Creates a Message object for the incoming message and associates it to
|
||||
the original message via new IprEvent
|
||||
"""
|
||||
message = message_from_bytes(force_bytes(msg))
|
||||
to = message.get('To', '')
|
||||
|
||||
# exit if this isn't a response we're interested in (with plus addressing)
|
||||
local,domain = get_base_ipr_request_address().split('@')
|
||||
local, domain = get_base_ipr_request_address().split('@')
|
||||
if not re.match(r'^{}\+[a-zA-Z0-9_\-]{}@{}'.format(local,'{16}',domain),to):
|
||||
return None
|
||||
|
||||
_from = message.get("From", "<unknown>")
|
||||
log(f"Ignoring IPR email without a message identifier from {_from} to {to}")
|
||||
return
|
||||
|
||||
try:
|
||||
to_message = Message.objects.get(reply_to=to)
|
||||
except Message.DoesNotExist:
|
||||
log('Error finding matching message ({})'.format(to))
|
||||
return None
|
||||
raise UndeliverableIprResponseError(f"Unable to find message matching {to}")
|
||||
|
||||
try:
|
||||
disclosure = to_message.msgevents.first().disclosure
|
||||
except:
|
||||
log('Error processing message ({})'.format(to))
|
||||
return None
|
||||
raise UndeliverableIprResponseError("Error processing message for {to}")
|
||||
|
||||
ietf_message = message_from_message(message)
|
||||
IprEvent.objects.create(
|
||||
|
@ -207,4 +220,4 @@ def process_response_email(msg):
|
|||
)
|
||||
|
||||
log("Received IPR email from %s" % ietf_message.frm)
|
||||
return ietf_message
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ from textwrap import dedent
|
|||
from django.core.management import CommandError
|
||||
|
||||
from ietf.utils.management.base import EmailOnFailureCommand
|
||||
from ietf.ipr.mail import process_response_email
|
||||
from ietf.ipr.mail import process_response_email, UndeliverableIprResponseError
|
||||
|
||||
import debug # pyflakes:ignore
|
||||
|
||||
|
@ -31,7 +31,7 @@ class Command(EmailOnFailureCommand):
|
|||
self.msg_bytes = sys.stdin.buffer.read()
|
||||
try:
|
||||
process_response_email(self.msg_bytes)
|
||||
except ValueError as e:
|
||||
except (ValueError, UndeliverableIprResponseError) as e:
|
||||
raise CommandError(e)
|
||||
|
||||
failure_subject = 'Error during ipr email processing'
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
|
||||
import datetime
|
||||
import mock
|
||||
import re
|
||||
|
||||
from pyquery import PyQuery
|
||||
from urllib.parse import quote, urlparse
|
||||
|
@ -35,9 +36,9 @@ from ietf.ipr.factories import (
|
|||
)
|
||||
from ietf.ipr.forms import DraftForm, HolderIprDisclosureForm
|
||||
from ietf.ipr.mail import (process_response_email, get_reply_to, get_update_submitter_emails,
|
||||
get_pseudo_submitter, get_holders, get_update_cc_addrs)
|
||||
from ietf.ipr.models import (IprDisclosureBase,GenericIprDisclosure,HolderIprDisclosure,
|
||||
ThirdPartyIprDisclosure)
|
||||
get_pseudo_submitter, get_holders, get_update_cc_addrs, UndeliverableIprResponseError)
|
||||
from ietf.ipr.models import (IprDisclosureBase, GenericIprDisclosure, HolderIprDisclosure,
|
||||
ThirdPartyIprDisclosure, IprEvent)
|
||||
from ietf.ipr.templatetags.ipr_filters import no_revisions_message
|
||||
from ietf.ipr.utils import get_genitive, get_ipr_summary, ingest_response_email
|
||||
from ietf.mailtrigger.utils import gather_address_lists
|
||||
|
@ -712,7 +713,7 @@ I would like to revoke this declaration.
|
|||
)
|
||||
self.assertIn(f'{settings.IDTRACKER_BASE_URL}{urlreverse("ietf.ipr.views.showlist")}', get_payload_text(outbox[1]).replace('\n',' '))
|
||||
|
||||
def send_ipr_email_helper(self):
|
||||
def send_ipr_email_helper(self) -> tuple[str, IprEvent, HolderIprDisclosure]:
|
||||
ipr = HolderIprDisclosureFactory()
|
||||
url = urlreverse('ietf.ipr.views.email',kwargs={ "id": ipr.id })
|
||||
self.client.login(username="secretary", password="secretary+password")
|
||||
|
@ -730,10 +731,11 @@ I would like to revoke this declaration.
|
|||
q = Message.objects.filter(reply_to=data['reply_to'])
|
||||
self.assertEqual(q.count(),1)
|
||||
event = q[0].msgevents.first()
|
||||
assert event is not None
|
||||
self.assertTrue(event.response_past_due())
|
||||
self.assertEqual(len(outbox), 1)
|
||||
self.assertTrue('joe@test.com' in outbox[0]['To'])
|
||||
return data['reply_to'], event
|
||||
return data['reply_to'], event, ipr
|
||||
|
||||
uninteresting_ipr_message_strings = [
|
||||
("To: {to}\nCc: {cc}\nFrom: joe@test.com\nDate: {date}\nSubject: test\n"),
|
||||
|
@ -747,34 +749,46 @@ I would like to revoke this declaration.
|
|||
|
||||
def test_process_response_email(self):
|
||||
# first send a mail
|
||||
reply_to, event = self.send_ipr_email_helper()
|
||||
reply_to, event, _ = self.send_ipr_email_helper()
|
||||
|
||||
# test process response uninteresting messages
|
||||
addrs = gather_address_lists('ipr_disclosure_submitted').as_strings()
|
||||
for message_string in self.uninteresting_ipr_message_strings:
|
||||
result = process_response_email(
|
||||
process_response_email(
|
||||
message_string.format(
|
||||
to=addrs.to,
|
||||
cc=addrs.cc,
|
||||
date=timezone.now().ctime()
|
||||
)
|
||||
)
|
||||
self.assertIsNone(result)
|
||||
|
||||
|
||||
# test process response
|
||||
message_string = """To: {}
|
||||
From: joe@test.com
|
||||
Date: {}
|
||||
Subject: test
|
||||
""".format(reply_to, timezone.now().ctime())
|
||||
result = process_response_email(message_string)
|
||||
|
||||
self.assertIsInstance(result, Message)
|
||||
process_response_email(message_string)
|
||||
self.assertFalse(event.response_past_due())
|
||||
|
||||
# test with an unmatchable message identifier
|
||||
bad_reply_to = re.sub(
|
||||
r"\+.{16}@",
|
||||
'+0123456789abcdef@',
|
||||
reply_to,
|
||||
)
|
||||
self.assertNotEqual(reply_to, bad_reply_to)
|
||||
message_string = f"""To: {bad_reply_to}
|
||||
From: joe@test.com
|
||||
Date: {timezone.now().ctime()}
|
||||
Subject: test
|
||||
"""
|
||||
with self.assertRaises(UndeliverableIprResponseError):
|
||||
process_response_email(message_string)
|
||||
|
||||
def test_process_response_email_with_invalid_encoding(self):
|
||||
"""Interesting emails with invalid encoding should be handled"""
|
||||
reply_to, _ = self.send_ipr_email_helper()
|
||||
reply_to, _, disclosure = self.send_ipr_email_helper()
|
||||
# test process response
|
||||
message_string = """To: {}
|
||||
From: joe@test.com
|
||||
|
@ -782,8 +796,8 @@ Date: {}
|
|||
Subject: test
|
||||
""".format(reply_to, timezone.now().ctime())
|
||||
message_bytes = message_string.encode('utf8') + b'\nInvalid stuff: \xfe\xff\n'
|
||||
result = process_response_email(message_bytes)
|
||||
self.assertIsInstance(result, Message)
|
||||
process_response_email(message_bytes)
|
||||
result = IprEvent.objects.filter(disclosure=disclosure).first().message # newest
|
||||
# \ufffd is a rhombus character with an inverse ?, used to replace invalid characters
|
||||
self.assertEqual(result.body, 'Invalid stuff: \ufffd\ufffd\n\n', # not sure where the extra \n is from
|
||||
'Invalid characters should be replaced with \ufffd characters')
|
||||
|
@ -798,8 +812,7 @@ Subject: test
|
|||
cc=addrs.cc,
|
||||
date=timezone.now().ctime(),
|
||||
).encode('utf8') + b'\nInvalid stuff: \xfe\xff\n'
|
||||
result = process_response_email(message_bytes)
|
||||
self.assertIsNone(result)
|
||||
process_response_email(message_bytes)
|
||||
|
||||
@override_settings(ADMINS=(("Some Admin", "admin@example.com"),))
|
||||
@mock.patch("ietf.ipr.utils.process_response_email")
|
||||
|
@ -816,8 +829,8 @@ Subject: test
|
|||
self.assertEqual(mock_process_response_email.call_args, mock.call(message))
|
||||
mock_process_response_email.reset_mock()
|
||||
|
||||
mock_process_response_email.side_effect = None
|
||||
mock_process_response_email.return_value = None # rejected message
|
||||
mock_process_response_email.side_effect = UndeliverableIprResponseError
|
||||
mock_process_response_email.return_value = None
|
||||
with self.assertRaises(EmailIngestionError) as context:
|
||||
ingest_response_email(message)
|
||||
self.assertIsNone(context.exception.as_emailmessage()) # should not send an email on a clean rejection
|
||||
|
@ -825,6 +838,14 @@ Subject: test
|
|||
self.assertEqual(mock_process_response_email.call_args, mock.call(message))
|
||||
mock_process_response_email.reset_mock()
|
||||
|
||||
mock_process_response_email.side_effect = None
|
||||
mock_process_response_email.return_value = None # ignored message
|
||||
ingest_response_email(message) # should not raise an exception
|
||||
self.assertIsNone(context.exception.as_emailmessage()) # should not send an email on ignored message
|
||||
self.assertTrue(mock_process_response_email.called)
|
||||
self.assertEqual(mock_process_response_email.call_args, mock.call(message))
|
||||
mock_process_response_email.reset_mock()
|
||||
|
||||
# successful operation
|
||||
mock_process_response_email.return_value = MessageFactory()
|
||||
ingest_response_email(message)
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
from textwrap import dedent
|
||||
|
||||
from ietf.ipr.mail import process_response_email
|
||||
from ietf.ipr.mail import process_response_email, UndeliverableIprResponseError
|
||||
from ietf.ipr.models import IprDocRel
|
||||
|
||||
import debug # pyflakes:ignore
|
||||
|
@ -92,7 +92,11 @@ def generate_draft_recursive_txt():
|
|||
def ingest_response_email(message: bytes):
|
||||
from ietf.api.views import EmailIngestionError # avoid circular import
|
||||
try:
|
||||
result = process_response_email(message)
|
||||
process_response_email(message)
|
||||
except UndeliverableIprResponseError:
|
||||
# Message was rejected due to some problem the sender can fix, so bounce but don't send
|
||||
# an email to the admins
|
||||
raise EmailIngestionError("IPR response rejected", email_body=None)
|
||||
except Exception as err:
|
||||
# Message was rejected due to an unhandled exception. This is likely something
|
||||
# the admins need to address, so send them a copy of the email.
|
||||
|
@ -106,8 +110,3 @@ def ingest_response_email(message: bytes):
|
|||
email_original_message=message,
|
||||
email_attach_traceback=True,
|
||||
) from err
|
||||
|
||||
if result is None:
|
||||
# Message was rejected due to some problem the sender can fix, so bounce but don't send
|
||||
# an email to the admins
|
||||
raise EmailIngestionError("IPR response rejected", email_body=None)
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
# Copyright The IETF Trust 2024, All Rights Reserved
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("meeting", "0008_remove_schedtimesessassignment_notes"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="session",
|
||||
name="meetecho_recording_name",
|
||||
field=models.CharField(
|
||||
blank=True, help_text="Name of the meetecho recording", max_length=64
|
||||
),
|
||||
),
|
||||
]
|
|
@ -1042,6 +1042,7 @@ class Session(models.Model):
|
|||
on_agenda = models.BooleanField(default=True, help_text='Is this session visible on the meeting agenda?')
|
||||
has_onsite_tool = models.BooleanField(default=False, help_text="Does this session use the officially supported onsite and remote tooling?")
|
||||
chat_room = models.CharField(blank=True, max_length=32, help_text='Name of Zulip stream, if different from group acronym')
|
||||
meetecho_recording_name = models.CharField(blank=True, max_length=64, help_text="Name of the meetecho recording")
|
||||
|
||||
tombstone_for = models.ForeignKey('Session', blank=True, null=True, help_text="This session is the tombstone for a session that was rescheduled", on_delete=models.CASCADE)
|
||||
|
||||
|
@ -1332,17 +1333,23 @@ class Session(models.Model):
|
|||
return None
|
||||
|
||||
def _session_recording_url_label(self):
|
||||
otsa = self.official_timeslotassignment()
|
||||
if otsa is None:
|
||||
return None
|
||||
if self.meeting.type.slug == "ietf" and self.has_onsite_tool:
|
||||
session_label = f"IETF{self.meeting.number}-{self.group.acronym.upper()}-{self.official_timeslotassignment().timeslot.time.strftime('%Y%m%d-%H%M')}"
|
||||
session_label = f"IETF{self.meeting.number}-{self.group.acronym.upper()}-{otsa.timeslot.time.strftime('%Y%m%d-%H%M')}"
|
||||
else:
|
||||
session_label = f"IETF-{self.group.acronym.upper()}-{self.official_timeslotassignment().timeslot.time.strftime('%Y%m%d-%H%M')}"
|
||||
session_label = f"IETF-{self.group.acronym.upper()}-{otsa.timeslot.time.strftime('%Y%m%d-%H%M')}"
|
||||
return session_label
|
||||
|
||||
def session_recording_url(self):
|
||||
url_formatter = getattr(settings, "MEETECHO_SESSION_RECORDING_URL", "")
|
||||
url = None
|
||||
if url_formatter and self.video_stream_url:
|
||||
url = url_formatter.format(session_label=self._session_recording_url_label())
|
||||
name = self.meetecho_recording_name
|
||||
if name is None or name.strip() == "":
|
||||
name = self._session_recording_url_label()
|
||||
if url_formatter.strip() != "" and name is not None:
|
||||
url = url_formatter.format(session_label=name)
|
||||
return url
|
||||
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright The IETF Trust 2021, All Rights Reserved
|
||||
# Copyright The IETF Trust 2021-2024, All Rights Reserved
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Tests of models in the Meeting application"""
|
||||
import datetime
|
||||
|
@ -172,6 +172,10 @@ class SessionTests(TestCase):
|
|||
settings.MEETECHO_SESSION_RECORDING_URL = "http://player.example.com?{session_label}"
|
||||
self.assertEqual(session.session_recording_url(), "http://player.example.com?LABEL")
|
||||
|
||||
session.meetecho_recording_name="actualname"
|
||||
session.save()
|
||||
self.assertEqual(session.session_recording_url(), "http://player.example.com?actualname")
|
||||
|
||||
def test_session_recording_url_label_ietf(self):
|
||||
session = SessionFactory(
|
||||
meeting__type_id='ietf',
|
||||
|
|
|
@ -7173,6 +7173,20 @@ class SessionTests(TestCase):
|
|||
status_id='schedw',
|
||||
add_to_schedule=False,
|
||||
)
|
||||
session_with_none_purpose = SessionFactory(
|
||||
meeting=meeting,
|
||||
group__parent=area,
|
||||
purpose_id="none",
|
||||
status_id="schedw",
|
||||
add_to_schedule=False,
|
||||
)
|
||||
tutorial_session = SessionFactory(
|
||||
meeting=meeting,
|
||||
group__parent=area,
|
||||
purpose_id="tutorial",
|
||||
status_id="schedw",
|
||||
add_to_schedule=False,
|
||||
)
|
||||
def _sreq_edit_link(sess):
|
||||
return urlreverse(
|
||||
'ietf.secr.sreq.views.edit',
|
||||
|
@ -7211,6 +7225,8 @@ class SessionTests(TestCase):
|
|||
self.assertContains(r, _sreq_edit_link(proposed_wg_session)) # link to the session request
|
||||
self.assertContains(r, rg_session.group.acronym)
|
||||
self.assertContains(r, _sreq_edit_link(rg_session)) # link to the session request
|
||||
self.assertContains(r, session_with_none_purpose.group.acronym)
|
||||
self.assertContains(r, tutorial_session.group.acronym)
|
||||
# check headings - note that the special types (has_meetings, etc) do not have a group parent
|
||||
# so they show up in 'other'
|
||||
q = PyQuery(r.content)
|
||||
|
@ -7218,6 +7234,22 @@ class SessionTests(TestCase):
|
|||
self.assertEqual(len(q('h2#other-groups')), 1)
|
||||
self.assertEqual(len(q('h2#irtf')), 1) # rg group has irtf group as parent
|
||||
|
||||
# check rounded pills
|
||||
self.assertNotContains( # no rounded pill for sessions with regular purpose
|
||||
r,
|
||||
'<span class="badge rounded-pill text-bg-info">Regular</span>',
|
||||
html=True,
|
||||
)
|
||||
self.assertNotContains( # no rounded pill for session with no purpose specified
|
||||
r,
|
||||
'<span class="badge rounded-pill text-bg-info">None</span>',
|
||||
html=True,
|
||||
)
|
||||
self.assertContains( # rounded pill for session with non-regular purpose
|
||||
r,
|
||||
'<span class="badge rounded-pill text-bg-info">Tutorial</span>',
|
||||
html=True,
|
||||
)
|
||||
|
||||
def test_request_minutes(self):
|
||||
meeting = MeetingFactory(type_id='ietf')
|
||||
|
|
|
@ -1840,7 +1840,7 @@ def agenda_extract_slide(item):
|
|||
"id": item.id,
|
||||
"title": item.title,
|
||||
"rev": item.rev,
|
||||
"url": item.get_versionless_href(),
|
||||
"url": item.get_href(),
|
||||
"ext": item.file_extension(),
|
||||
}
|
||||
|
||||
|
@ -4270,6 +4270,45 @@ class OldUploadRedirect(RedirectView):
|
|||
def get_redirect_url(self, **kwargs):
|
||||
return reverse_lazy('ietf.meeting.views.session_details',kwargs=self.kwargs)
|
||||
|
||||
|
||||
@require_api_key
|
||||
@role_required("Recording Manager")
|
||||
@csrf_exempt
|
||||
def api_set_meetecho_recording_name(request):
|
||||
"""Set name for meetecho recording
|
||||
|
||||
parameters:
|
||||
apikey: the poster's personal API key
|
||||
session_id: id of the session to update
|
||||
name: the name to use for the recording at meetecho player
|
||||
"""
|
||||
def err(code, text):
|
||||
return HttpResponse(text, status=code, content_type='text/plain')
|
||||
|
||||
if request.method != "POST":
|
||||
return HttpResponseNotAllowed(
|
||||
content="Method not allowed", content_type="text/plain", permitted_methods=('POST',)
|
||||
)
|
||||
|
||||
session_id = request.POST.get('session_id', None)
|
||||
if session_id is None:
|
||||
return err(400, 'Missing session_id parameter')
|
||||
name = request.POST.get('name', None)
|
||||
if name is None:
|
||||
return err(400, 'Missing name parameter')
|
||||
|
||||
try:
|
||||
session = Session.objects.get(pk=session_id)
|
||||
except Session.DoesNotExist:
|
||||
return err(400, f"Session not found with session_id '{session_id}'")
|
||||
except ValueError:
|
||||
return err(400, "Invalid session_id: {session_id}")
|
||||
|
||||
session.meetecho_recording_name = name
|
||||
session.save()
|
||||
|
||||
return HttpResponse("Done", status=200, content_type='text/plain')
|
||||
|
||||
@require_api_key
|
||||
@role_required('Recording Manager')
|
||||
@csrf_exempt
|
||||
|
|
|
@ -59,7 +59,7 @@ def name_parts(name):
|
|||
last = parts[0]
|
||||
if len(parts) >= 2:
|
||||
# Handle reverse-order names with uppercase surname correctly
|
||||
if len(first)>1 and re.search("^[A-Z-]+$", first):
|
||||
if len(first)>1 and re.search("^[A-Z-]+$", first) and first != "JP":
|
||||
first, last = last, first.capitalize()
|
||||
# Handle exception for RFC Editor
|
||||
if (prefix, first, middle, last, suffix) == ('', 'Editor', '', 'Rfc', ''):
|
||||
|
|
|
@ -173,9 +173,7 @@ class PersonTests(TestCase):
|
|||
|
||||
url = urlreverse("ietf.person.views.photo", kwargs={ "email_or_name": person.plain_name()})
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 300)
|
||||
self.assertIn("bazquux@example.com", r.content.decode())
|
||||
self.assertIn("foobar@example.com", r.content.decode())
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
def test_name_methods(self):
|
||||
person = PersonFactory(name="Dr. Jens F. Möller", )
|
||||
|
|
|
@ -76,7 +76,7 @@ def profile(request, email_or_name):
|
|||
def photo(request, email_or_name):
|
||||
persons = lookup_persons(email_or_name)
|
||||
if len(persons) > 1:
|
||||
return HttpResponse(r"\r\n".join([p.user.username for p in persons]), status=300)
|
||||
raise Http404("No photo found")
|
||||
person = persons[0]
|
||||
if not person.photo:
|
||||
raise Http404("No photo found")
|
||||
|
|
|
@ -13,9 +13,10 @@ from ietf.group.factories import GroupFactory, RoleFactory
|
|||
from ietf.meeting.models import Session, ResourceAssociation, SchedulingEvent, Constraint
|
||||
from ietf.meeting.factories import MeetingFactory, SessionFactory
|
||||
from ietf.name.models import ConstraintName, TimerangeName
|
||||
from ietf.person.factories import PersonFactory
|
||||
from ietf.person.models import Person
|
||||
from ietf.secr.sreq.forms import SessionForm
|
||||
from ietf.utils.mail import outbox, empty_outbox, get_payload_text
|
||||
from ietf.utils.mail import outbox, empty_outbox, get_payload_text, send_mail
|
||||
from ietf.utils.timezone import date_today
|
||||
|
||||
|
||||
|
@ -78,6 +79,32 @@ class SessionRequestTestCase(TestCase):
|
|||
self.assertRedirects(r,reverse('ietf.secr.sreq.views.main'))
|
||||
self.assertEqual(SchedulingEvent.objects.filter(session=session).order_by('-id')[0].status_id, 'deleted')
|
||||
|
||||
def test_cancel_notification_msg(self):
|
||||
to = "<iesg-secretary@ietf.org>"
|
||||
subject = "Dummy subject"
|
||||
template = "sreq/session_cancel_notification.txt"
|
||||
meeting = MeetingFactory(type_id="ietf", date=date_today())
|
||||
requester = PersonFactory(name="James O'Rourke", user__username="jimorourke")
|
||||
context = {"meeting": meeting, "requester": requester}
|
||||
cc = "cc.a@example.com, cc.b@example.com"
|
||||
bcc = "bcc@example.com"
|
||||
|
||||
msg = send_mail(
|
||||
None,
|
||||
to,
|
||||
None,
|
||||
subject,
|
||||
template,
|
||||
context,
|
||||
cc=cc,
|
||||
bcc=bcc,
|
||||
)
|
||||
self.assertEqual(requester.name, "James O'Rourke") # note ' (single quote) in the name
|
||||
self.assertIn(
|
||||
f"A request to cancel a meeting session has just been submitted by {requester.name}.",
|
||||
get_payload_text(msg),
|
||||
)
|
||||
|
||||
def test_edit(self):
|
||||
meeting = MeetingFactory(type_id='ietf', date=date_today())
|
||||
mars = RoleFactory(name_id='chair', person__user__username='marschairman', group__acronym='mars').group
|
||||
|
@ -701,6 +728,33 @@ class SubmitRequestCase(TestCase):
|
|||
self.assertNotIn('1 Hour, 1 Hour, 1 Hour', notification_payload)
|
||||
self.assertNotIn('The third session requires your approval', notification_payload)
|
||||
|
||||
def test_request_notification_msg(self):
|
||||
to = "<iesg-secretary@ietf.org>"
|
||||
subject = "Dummy subject"
|
||||
template = "sreq/session_request_notification.txt"
|
||||
header = "A new"
|
||||
meeting = MeetingFactory(type_id="ietf", date=date_today())
|
||||
requester = PersonFactory(name="James O'Rourke", user__username="jimorourke")
|
||||
context = {"header": header, "meeting": meeting, "requester": requester}
|
||||
cc = "cc.a@example.com, cc.b@example.com"
|
||||
bcc = "bcc@example.com"
|
||||
|
||||
msg = send_mail(
|
||||
None,
|
||||
to,
|
||||
None,
|
||||
subject,
|
||||
template,
|
||||
context,
|
||||
cc=cc,
|
||||
bcc=bcc,
|
||||
)
|
||||
self.assertEqual(requester.name, "James O'Rourke") # note ' (single quote) in the name
|
||||
self.assertIn(
|
||||
f"{header} meeting session request has just been submitted by {requester.name}.",
|
||||
get_payload_text(msg),
|
||||
)
|
||||
|
||||
def test_request_notification_third_session(self):
|
||||
meeting = MeetingFactory(type_id='ietf', date=date_today())
|
||||
ad = Person.objects.get(user__username='ad')
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
{% load ams_filters %}
|
||||
|
||||
A request to cancel a meeting session has just been submitted by {{ requester }}.
|
||||
{% autoescape off %}{% load ams_filters %}
|
||||
|
||||
A request to cancel a meeting session has just been submitted by {{ requester }}.{% endautoescape %}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{% load ams_filters %}
|
||||
{% autoescape off %}{% load ams_filters %}
|
||||
|
||||
{% filter wordwrap:78 %}{{ header }} meeting session request has just been submitted by {{ requester }}.{% endfilter %}
|
||||
|
||||
{% include "includes/session_info.txt" %}
|
||||
{% include "includes/session_info.txt" %}{% endautoescape %}
|
||||
|
|
|
@ -344,7 +344,7 @@ div:is(.artwork, .sourcecode) pre {
|
|||
flex: 0 0 content;
|
||||
margin: 0;
|
||||
max-width: 72ch;
|
||||
overflow: auto;
|
||||
overflow: auto clip;
|
||||
}
|
||||
div:is(.artwork, .sourcecode) .pilcrow {
|
||||
flex: 0 0 1ch;
|
||||
|
|
|
@ -2,19 +2,16 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import datetime
|
||||
import email
|
||||
import sys
|
||||
import tempfile
|
||||
import xml2rfc
|
||||
from contextlib import ExitStack
|
||||
|
||||
from email.utils import formataddr
|
||||
from typing import Tuple
|
||||
from unidecode import unidecode
|
||||
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
|
@ -37,10 +34,8 @@ from ietf.submit.models import Submission, Preapproval
|
|||
from ietf.submit.utils import validate_submission_name, validate_submission_rev, validate_submission_document_date, remote_ip
|
||||
from ietf.submit.parsers.plain_parser import PlainParser
|
||||
from ietf.submit.parsers.xml_parser import XMLParser
|
||||
from ietf.utils import log
|
||||
from ietf.utils.draft import PlaintextDraft
|
||||
from ietf.utils.fields import ModelMultipleChoiceField
|
||||
from ietf.utils.text import normalize_text
|
||||
from ietf.utils.timezone import date_today
|
||||
from ietf.utils.xmldraft import InvalidXMLError, XMLDraft, XMLParseError
|
||||
|
||||
|
@ -371,273 +366,6 @@ class SubmissionBaseUploadForm(forms.Form):
|
|||
return None
|
||||
|
||||
|
||||
class DeprecatedSubmissionBaseUploadForm(SubmissionBaseUploadForm):
|
||||
def clean(self):
|
||||
def format_messages(where, e, log):
|
||||
out = log.write_out.getvalue().splitlines()
|
||||
err = log.write_err.getvalue().splitlines()
|
||||
m = str(e)
|
||||
if m:
|
||||
m = [ m ]
|
||||
else:
|
||||
import traceback
|
||||
typ, val, tb = sys.exc_info()
|
||||
m = traceback.format_exception(typ, val, tb)
|
||||
m = [ l.replace('\n ', ':\n ') for l in m ]
|
||||
msgs = [s for s in (["Error from xml2rfc (%s):" % (where,)] + m + out + err) if s]
|
||||
return msgs
|
||||
|
||||
if self.shutdown and not has_role(self.request.user, "Secretariat"):
|
||||
raise forms.ValidationError(self.cutoff_warning)
|
||||
|
||||
for ext in self.formats:
|
||||
f = self.cleaned_data.get(ext, None)
|
||||
if not f:
|
||||
continue
|
||||
self.file_types.append('.%s' % ext)
|
||||
if not ('.txt' in self.file_types or '.xml' in self.file_types):
|
||||
if not self.errors:
|
||||
raise forms.ValidationError('Unexpected submission file types; found %s, but %s is required' % (', '.join(self.file_types), ' or '.join(self.base_formats)))
|
||||
|
||||
#debug.show('self.cleaned_data["xml"]')
|
||||
if self.cleaned_data.get('xml'):
|
||||
#if not self.cleaned_data.get('txt'):
|
||||
xml_file = self.cleaned_data.get('xml')
|
||||
file_name = {}
|
||||
xml2rfc.log.write_out = io.StringIO() # open(os.devnull, "w")
|
||||
xml2rfc.log.write_err = io.StringIO() # open(os.devnull, "w")
|
||||
tfn = None
|
||||
with ExitStack() as stack:
|
||||
@stack.callback
|
||||
def cleanup(): # called when context exited, even in case of exception
|
||||
if tfn is not None:
|
||||
os.unlink(tfn)
|
||||
|
||||
# We need to write the xml file to disk in order to hand it
|
||||
# over to the xml parser. XXX FIXME: investigate updating
|
||||
# xml2rfc to be able to work with file handles to in-memory
|
||||
# files.
|
||||
name, ext = os.path.splitext(os.path.basename(xml_file.name))
|
||||
with tempfile.NamedTemporaryFile(prefix=name+'-',
|
||||
suffix='.xml',
|
||||
mode='wb+',
|
||||
delete=False) as tf:
|
||||
tfn = tf.name
|
||||
for chunk in xml_file.chunks():
|
||||
tf.write(chunk)
|
||||
|
||||
parser = xml2rfc.XmlRfcParser(str(tfn), quiet=True)
|
||||
# --- Parse the xml ---
|
||||
try:
|
||||
self.xmltree = parser.parse(remove_comments=False)
|
||||
# If we have v2, run it through v2v3. Keep track of the submitted version, though.
|
||||
self.xmlroot = self.xmltree.getroot()
|
||||
self.xml_version = self.xmlroot.get('version', '2')
|
||||
if self.xml_version == '2':
|
||||
v2v3 = xml2rfc.V2v3XmlWriter(self.xmltree)
|
||||
self.xmltree.tree = v2v3.convert2to3()
|
||||
self.xmlroot = self.xmltree.getroot() # update to the new root
|
||||
|
||||
draftname = self.xmlroot.attrib.get('docName')
|
||||
if draftname is None:
|
||||
self.add_error('xml', "No docName attribute found in the xml root element")
|
||||
name_error = validate_submission_name(draftname)
|
||||
if name_error:
|
||||
self.add_error('xml', name_error) # This is a critical and immediate failure - do not proceed with other validation.
|
||||
else:
|
||||
revmatch = re.search("-[0-9][0-9]$", draftname)
|
||||
if revmatch:
|
||||
self.revision = draftname[-2:]
|
||||
self.filename = draftname[:-3]
|
||||
else:
|
||||
self.revision = None
|
||||
self.filename = draftname
|
||||
self.title = self.xmlroot.findtext('front/title').strip()
|
||||
if type(self.title) is str:
|
||||
self.title = unidecode(self.title)
|
||||
self.title = normalize_text(self.title)
|
||||
self.abstract = (self.xmlroot.findtext('front/abstract') or '').strip()
|
||||
if type(self.abstract) is str:
|
||||
self.abstract = unidecode(self.abstract)
|
||||
author_info = self.xmlroot.findall('front/author')
|
||||
for author in author_info:
|
||||
info = {
|
||||
"name": author.attrib.get('fullname'),
|
||||
"email": author.findtext('address/email'),
|
||||
"affiliation": author.findtext('organization'),
|
||||
}
|
||||
elem = author.find('address/postal/country')
|
||||
if elem != None:
|
||||
ascii_country = elem.get('ascii', None)
|
||||
info['country'] = ascii_country if ascii_country else elem.text
|
||||
|
||||
for item in info:
|
||||
if info[item]:
|
||||
info[item] = info[item].strip()
|
||||
self.authors.append(info)
|
||||
|
||||
# --- Prep the xml ---
|
||||
file_name['xml'] = os.path.join(settings.IDSUBMIT_STAGING_PATH, '%s-%s%s' % (self.filename, self.revision, ext))
|
||||
try:
|
||||
prep = xml2rfc.PrepToolWriter(self.xmltree, quiet=True, liberal=True, keep_pis=[xml2rfc.V3_PI_TARGET])
|
||||
prep.options.accept_prepped = True
|
||||
self.xmltree.tree = prep.prep()
|
||||
if self.xmltree.tree == None:
|
||||
self.add_error('xml', "Error from xml2rfc (prep): %s" % prep.errors)
|
||||
except Exception as e:
|
||||
msgs = format_messages('prep', e, xml2rfc.log)
|
||||
self.add_error('xml', msgs)
|
||||
|
||||
# --- Convert to txt ---
|
||||
if not ('txt' in self.cleaned_data and self.cleaned_data['txt']):
|
||||
file_name['txt'] = os.path.join(settings.IDSUBMIT_STAGING_PATH, '%s-%s.txt' % (self.filename, self.revision))
|
||||
try:
|
||||
writer = xml2rfc.TextWriter(self.xmltree, quiet=True)
|
||||
writer.options.accept_prepped = True
|
||||
writer.write(file_name['txt'])
|
||||
log.log("In %s: xml2rfc %s generated %s from %s (version %s)" %
|
||||
( os.path.dirname(file_name['xml']),
|
||||
xml2rfc.__version__,
|
||||
os.path.basename(file_name['txt']),
|
||||
os.path.basename(file_name['xml']),
|
||||
self.xml_version))
|
||||
except Exception as e:
|
||||
msgs = format_messages('txt', e, xml2rfc.log)
|
||||
log.log('\n'.join(msgs))
|
||||
self.add_error('xml', msgs)
|
||||
|
||||
# --- Convert to html ---
|
||||
try:
|
||||
file_name['html'] = os.path.join(settings.IDSUBMIT_STAGING_PATH, '%s-%s.html' % (self.filename, self.revision))
|
||||
writer = xml2rfc.HtmlWriter(self.xmltree, quiet=True)
|
||||
writer.write(file_name['html'])
|
||||
self.file_types.append('.html')
|
||||
log.log("In %s: xml2rfc %s generated %s from %s (version %s)" %
|
||||
( os.path.dirname(file_name['xml']),
|
||||
xml2rfc.__version__,
|
||||
os.path.basename(file_name['html']),
|
||||
os.path.basename(file_name['xml']),
|
||||
self.xml_version))
|
||||
except Exception as e:
|
||||
msgs = format_messages('html', e, xml2rfc.log)
|
||||
self.add_error('xml', msgs)
|
||||
|
||||
except Exception as e:
|
||||
try:
|
||||
msgs = format_messages('txt', e, xml2rfc.log)
|
||||
log.log('\n'.join(msgs))
|
||||
self.add_error('xml', msgs)
|
||||
except Exception:
|
||||
self.add_error('xml', "An exception occurred when trying to process the XML file: %s" % e)
|
||||
|
||||
# The following errors are likely noise if we have previous field
|
||||
# errors:
|
||||
if self.errors:
|
||||
raise forms.ValidationError('')
|
||||
|
||||
if self.cleaned_data.get('txt'):
|
||||
# try to parse it
|
||||
txt_file = self.cleaned_data['txt']
|
||||
txt_file.seek(0)
|
||||
bytes = txt_file.read()
|
||||
txt_file.seek(0)
|
||||
try:
|
||||
text = bytes.decode(PlainParser.encoding)
|
||||
self.parsed_draft = PlaintextDraft(text, txt_file.name)
|
||||
if self.filename == None:
|
||||
self.filename = self.parsed_draft.filename
|
||||
elif self.filename != self.parsed_draft.filename:
|
||||
self.add_error('txt', "Inconsistent name information: xml:%s, txt:%s" % (self.filename, self.parsed_draft.filename))
|
||||
if self.revision == None:
|
||||
self.revision = self.parsed_draft.revision
|
||||
elif self.revision != self.parsed_draft.revision:
|
||||
self.add_error('txt', "Inconsistent revision information: xml:%s, txt:%s" % (self.revision, self.parsed_draft.revision))
|
||||
if self.title == None:
|
||||
self.title = self.parsed_draft.get_title()
|
||||
elif self.title != self.parsed_draft.get_title():
|
||||
self.add_error('txt', "Inconsistent title information: xml:%s, txt:%s" % (self.title, self.parsed_draft.get_title()))
|
||||
except (UnicodeDecodeError, LookupError) as e:
|
||||
self.add_error('txt', 'Failed decoding the uploaded file: "%s"' % str(e))
|
||||
|
||||
rev_error = validate_submission_rev(self.filename, self.revision)
|
||||
if rev_error:
|
||||
raise forms.ValidationError(rev_error)
|
||||
|
||||
# The following errors are likely noise if we have previous field
|
||||
# errors:
|
||||
if self.errors:
|
||||
raise forms.ValidationError('')
|
||||
|
||||
if not self.filename:
|
||||
raise forms.ValidationError("Could not extract a valid Internet-Draft name from the upload. "
|
||||
"To fix this in a text upload, please make sure that the full Internet-Draft name including "
|
||||
"revision number appears centered on its own line below the document title on the "
|
||||
"first page. In an xml upload, please make sure that the top-level <rfc/> "
|
||||
"element has a docName attribute which provides the full Internet-Draft name including "
|
||||
"revision number.")
|
||||
|
||||
if not self.revision:
|
||||
raise forms.ValidationError("Could not extract a valid Internet-Draft revision from the upload. "
|
||||
"To fix this in a text upload, please make sure that the full Internet-Draft name including "
|
||||
"revision number appears centered on its own line below the document title on the "
|
||||
"first page. In an xml upload, please make sure that the top-level <rfc/> "
|
||||
"element has a docName attribute which provides the full Internet-Draft name including "
|
||||
"revision number.")
|
||||
|
||||
if not self.title:
|
||||
raise forms.ValidationError("Could not extract a valid title from the upload")
|
||||
|
||||
if self.cleaned_data.get('txt') or self.cleaned_data.get('xml'):
|
||||
# check group
|
||||
self.group = self.deduce_group(self.filename)
|
||||
|
||||
# check existing
|
||||
existing = Submission.objects.filter(name=self.filename, rev=self.revision).exclude(state__in=("posted", "cancel", "waiting-for-draft"))
|
||||
if existing:
|
||||
raise forms.ValidationError(mark_safe('A submission with same name and revision is currently being processed. <a href="%s">Check the status here.</a>' % urlreverse("ietf.submit.views.submission_status", kwargs={ 'submission_id': existing[0].pk })))
|
||||
|
||||
# cut-off
|
||||
if self.revision == '00' and self.in_first_cut_off:
|
||||
raise forms.ValidationError(mark_safe(self.cutoff_warning))
|
||||
|
||||
# check thresholds
|
||||
today = date_today()
|
||||
|
||||
self.check_submissions_thresholds(
|
||||
"for the Internet-Draft %s" % self.filename,
|
||||
dict(name=self.filename, rev=self.revision, submission_date=today),
|
||||
settings.IDSUBMIT_MAX_DAILY_SAME_DRAFT_NAME, settings.IDSUBMIT_MAX_DAILY_SAME_DRAFT_NAME_SIZE,
|
||||
)
|
||||
self.check_submissions_thresholds(
|
||||
"for the same submitter",
|
||||
dict(remote_ip=self.remote_ip, submission_date=today),
|
||||
settings.IDSUBMIT_MAX_DAILY_SAME_SUBMITTER, settings.IDSUBMIT_MAX_DAILY_SAME_SUBMITTER_SIZE,
|
||||
)
|
||||
if self.group:
|
||||
self.check_submissions_thresholds(
|
||||
"for the group \"%s\"" % (self.group.acronym),
|
||||
dict(group=self.group, submission_date=today),
|
||||
settings.IDSUBMIT_MAX_DAILY_SAME_GROUP, settings.IDSUBMIT_MAX_DAILY_SAME_GROUP_SIZE,
|
||||
)
|
||||
self.check_submissions_thresholds(
|
||||
"across all submitters",
|
||||
dict(submission_date=today),
|
||||
settings.IDSUBMIT_MAX_DAILY_SUBMISSIONS, settings.IDSUBMIT_MAX_DAILY_SUBMISSIONS_SIZE,
|
||||
)
|
||||
|
||||
return super().clean()
|
||||
|
||||
|
||||
class DeprecatedSubmissionAutoUploadForm(DeprecatedSubmissionBaseUploadForm):
|
||||
"""Full-service upload form, replaced by the asynchronous version"""
|
||||
user = forms.EmailField(required=True)
|
||||
|
||||
def __init__(self, request, *args, **kwargs):
|
||||
super(DeprecatedSubmissionAutoUploadForm, self).__init__(request, *args, **kwargs)
|
||||
self.formats = ['xml', ]
|
||||
self.base_formats = ['xml', ]
|
||||
|
||||
|
||||
class SubmissionManualUploadForm(SubmissionBaseUploadForm):
|
||||
txt = forms.FileField(label='.txt format', required=False)
|
||||
formats = SubmissionBaseUploadForm.formats + ('txt',)
|
||||
|
@ -676,6 +404,7 @@ class SubmissionManualUploadForm(SubmissionBaseUploadForm):
|
|||
)
|
||||
return txt_file
|
||||
|
||||
|
||||
class SubmissionAutoUploadForm(SubmissionBaseUploadForm):
|
||||
user = forms.EmailField(required=True)
|
||||
replaces = forms.CharField(required=False, max_length=1000, strip=True)
|
||||
|
@ -930,3 +659,13 @@ class MessageModelForm(forms.ModelForm):
|
|||
self.fields['frm'].label='From'
|
||||
self.fields['frm'].widget.attrs['readonly'] = True
|
||||
self.fields['reply_to'].widget.attrs['readonly'] = True
|
||||
|
||||
|
||||
class SubmissionSearchForm(forms.Form):
|
||||
"""Form used for search_submission"""
|
||||
|
||||
name = forms.CharField(max_length=255, required=True, label="I-D name")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields["name"].widget.attrs["placeholder"] = "draft-..."
|
||||
|
|
|
@ -27,11 +27,6 @@ from django.utils import timezone
|
|||
from django.utils.encoding import force_str
|
||||
import debug # pyflakes:ignore
|
||||
|
||||
from ietf.submit.utils import (expirable_submissions, expire_submission, find_submission_filenames,
|
||||
post_submission, validate_submission_name, validate_submission_rev,
|
||||
process_and_accept_uploaded_submission, SubmissionError, process_submission_text,
|
||||
process_submission_xml, process_uploaded_submission,
|
||||
process_and_validate_submission)
|
||||
from ietf.doc.factories import (DocumentFactory, WgDraftFactory, IndividualDraftFactory,
|
||||
ReviewFactory, WgRfcFactory)
|
||||
from ietf.doc.models import ( Document, DocEvent, State,
|
||||
|
@ -44,12 +39,17 @@ from ietf.meeting.models import Meeting
|
|||
from ietf.meeting.factories import MeetingFactory
|
||||
from ietf.name.models import DraftSubmissionStateName, FormalLanguageName
|
||||
from ietf.person.models import Person
|
||||
from ietf.person.factories import UserFactory, PersonFactory, EmailFactory
|
||||
from ietf.person.factories import UserFactory, PersonFactory
|
||||
from ietf.submit.factories import SubmissionFactory, SubmissionExtResourceFactory
|
||||
from ietf.submit.forms import SubmissionBaseUploadForm, SubmissionAutoUploadForm
|
||||
from ietf.submit.models import Submission, Preapproval, SubmissionExtResource
|
||||
from ietf.submit.tasks import cancel_stale_submissions, process_and_accept_uploaded_submission_task
|
||||
from ietf.submit.utils import apply_yang_checker_to_draft, run_all_yang_model_checks
|
||||
from ietf.submit.utils import (expirable_submissions, expire_submission, find_submission_filenames,
|
||||
post_submission, validate_submission_name, validate_submission_rev,
|
||||
process_and_accept_uploaded_submission, SubmissionError, process_submission_text,
|
||||
process_submission_xml, process_uploaded_submission,
|
||||
process_and_validate_submission, apply_yang_checker_to_draft,
|
||||
run_all_yang_model_checks)
|
||||
from ietf.utils import tool_version
|
||||
from ietf.utils.accesstoken import generate_access_token
|
||||
from ietf.utils.mail import outbox, get_payload_text
|
||||
|
@ -2345,6 +2345,12 @@ class ApiSubmissionTests(BaseSubmitTestCase):
|
|||
super().setUp()
|
||||
MeetingFactory(type_id='ietf', date=date_today()+datetime.timedelta(days=60))
|
||||
|
||||
def test_api_submit_tombstone(self):
|
||||
"""Tombstone for obsolete API endpoint should return 410 Gone"""
|
||||
url = urlreverse("ietf.submit.views.api_submit_tombstone")
|
||||
self.assertEqual(self.client.get(url).status_code, 410)
|
||||
self.assertEqual(self.client.post(url).status_code, 410)
|
||||
|
||||
def test_upload_draft(self):
|
||||
"""api_submission accepts a submission and queues it for processing"""
|
||||
url = urlreverse('ietf.submit.views.api_submission')
|
||||
|
@ -3191,141 +3197,6 @@ class AsyncSubmissionTests(BaseSubmitTestCase):
|
|||
self.assertEqual(subm.state_id, "cancel")
|
||||
self.assertEqual(subm.submissionevent_set.count(), 2)
|
||||
|
||||
|
||||
class ApiSubmitTests(BaseSubmitTestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
# break early in case of missing configuration
|
||||
self.assertTrue(os.path.exists(settings.IDSUBMIT_IDNITS_BINARY))
|
||||
MeetingFactory(type_id='ietf', date=date_today()+datetime.timedelta(days=60))
|
||||
|
||||
def do_post_submission(self, rev, author=None, name=None, group=None, email=None, title=None, year=None):
|
||||
url = urlreverse('ietf.submit.views.api_submit')
|
||||
if author is None:
|
||||
author = PersonFactory()
|
||||
if name is None:
|
||||
slug = re.sub('[^a-z0-9-]+', '', author.ascii_parts()[3].lower())
|
||||
name = 'draft-%s-foo' % slug
|
||||
if email is None:
|
||||
email = author.user.username
|
||||
# submit
|
||||
data = {}
|
||||
data['xml'], author = submission_file(f'{name}-{rev}', f'{name}-{rev}.xml', group, "test_submission.xml", author=author, email=email, title=title, year=year)
|
||||
data['user'] = email
|
||||
r = self.client.post(url, data)
|
||||
return r, author, name
|
||||
|
||||
def test_api_submit_info(self):
|
||||
url = urlreverse('ietf.submit.views.api_submit')
|
||||
r = self.client.get(url)
|
||||
expected = "A simplified Internet-Draft submission interface, intended for automation"
|
||||
self.assertContains(r, expected, status_code=200)
|
||||
|
||||
def test_api_submit_bad_method(self):
|
||||
url = urlreverse('ietf.submit.views.api_submit')
|
||||
r = self.client.put(url)
|
||||
self.assertEqual(r.status_code, 405)
|
||||
|
||||
def test_api_submit_ok(self):
|
||||
r, author, name = self.do_post_submission('00')
|
||||
expected = "Upload of %s OK, confirmation requests sent to:\n %s" % (name, author.formatted_email().replace('\n',''))
|
||||
self.assertContains(r, expected, status_code=200)
|
||||
|
||||
def test_api_submit_secondary_email_active(self):
|
||||
person = PersonFactory()
|
||||
email = EmailFactory(person=person)
|
||||
r, author, name = self.do_post_submission('00', author=person, email=email.address)
|
||||
for expected in [
|
||||
"Upload of %s OK, confirmation requests sent to:" % (name, ),
|
||||
author.formatted_email().replace('\n',''),
|
||||
]:
|
||||
self.assertContains(r, expected, status_code=200)
|
||||
|
||||
def test_api_submit_secondary_email_inactive(self):
|
||||
person = PersonFactory()
|
||||
prim = person.email()
|
||||
prim.primary = True
|
||||
prim.save()
|
||||
email = EmailFactory(person=person, active=False)
|
||||
r, author, name = self.do_post_submission('00', author=person, email=email.address)
|
||||
expected = "No such user: %s" % email.address
|
||||
self.assertContains(r, expected, status_code=400)
|
||||
|
||||
def test_api_submit_no_user(self):
|
||||
email='nonexistant.user@example.org'
|
||||
r, author, name = self.do_post_submission('00', email=email)
|
||||
expected = "No such user: %s" % email
|
||||
self.assertContains(r, expected, status_code=400)
|
||||
|
||||
def test_api_submit_no_person(self):
|
||||
user = UserFactory()
|
||||
email = user.username
|
||||
r, author, name = self.do_post_submission('00', email=email)
|
||||
expected = "No person with username %s" % email
|
||||
self.assertContains(r, expected, status_code=400)
|
||||
|
||||
def test_api_submit_wrong_revision(self):
|
||||
r, author, name = self.do_post_submission('01')
|
||||
expected = "Invalid revision (revision 00 is expected)"
|
||||
self.assertContains(r, expected, status_code=400)
|
||||
|
||||
def test_api_submit_update_existing_submissiondocevent_rev(self):
|
||||
draft, _ = create_draft_submission_with_rev_mismatch(rev='01')
|
||||
r, _, __ = self.do_post_submission(rev='01', name=draft.name)
|
||||
expected = "Submission failed"
|
||||
self.assertContains(r, expected, status_code=409)
|
||||
|
||||
def test_api_submit_update_later_submissiondocevent_rev(self):
|
||||
draft, _ = create_draft_submission_with_rev_mismatch(rev='02')
|
||||
r, _, __ = self.do_post_submission(rev='01', name=draft.name)
|
||||
expected = "Submission failed"
|
||||
self.assertContains(r, expected, status_code=409)
|
||||
|
||||
def test_api_submit_pending_submission(self):
|
||||
r, author, name = self.do_post_submission('00')
|
||||
expected = "Upload of"
|
||||
self.assertContains(r, expected, status_code=200)
|
||||
r, author, name = self.do_post_submission('00', author=author, name=name)
|
||||
expected = "A submission with same name and revision is currently being processed"
|
||||
self.assertContains(r, expected, status_code=400)
|
||||
|
||||
def test_api_submit_no_title(self):
|
||||
r, author, name = self.do_post_submission('00', title=" ")
|
||||
expected = "Could not extract a valid title from the upload"
|
||||
self.assertContains(r, expected, status_code=400)
|
||||
|
||||
def test_api_submit_failed_idnits(self):
|
||||
# `year` on the next line must be leap year or this test will fail every Feb 29
|
||||
r, author, name = self.do_post_submission('00', year="2012")
|
||||
expected = "Document date must be within 3 days of submission date"
|
||||
self.assertContains(r, expected, status_code=400)
|
||||
|
||||
def test_api_submit_keeps_extresources(self):
|
||||
"""API submit should not disturb doc external resources
|
||||
|
||||
Tests that the submission inherits the existing doc's docextresource_set.
|
||||
Relies on separate testing that Submission external_resources will be
|
||||
handled appropriately.
|
||||
"""
|
||||
draft = WgDraftFactory()
|
||||
|
||||
# add an external resource
|
||||
self.assertEqual(draft.docextresource_set.count(), 0)
|
||||
extres = draft.docextresource_set.create(
|
||||
name_id='faq',
|
||||
display_name='this is a display name',
|
||||
value='https://example.com/faq-for-test.html',
|
||||
)
|
||||
|
||||
r, _, __ = self.do_post_submission('01', name=draft.name)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
# draft = Document.objects.get(pk=draft.pk) # update the draft
|
||||
sub = Submission.objects.get(name=draft.name)
|
||||
self.assertEqual(
|
||||
[str(r) for r in sub.external_resources.all()],
|
||||
[str(extres)],
|
||||
)
|
||||
|
||||
|
||||
class RefsTests(BaseSubmitTestCase):
|
||||
|
||||
|
@ -3513,3 +3384,29 @@ class YangCheckerTests(TestCase):
|
|||
apply_yang_checker_to_draft(checker, draft)
|
||||
self.assertEqual(checker.check_file_txt.call_args, mock.call(draft.get_file_name()))
|
||||
|
||||
|
||||
@override_settings(IDSUBMIT_REPOSITORY_PATH="/some/path/", IDSUBMIT_STAGING_PATH="/some/other/path")
|
||||
class SubmissionErrorTests(TestCase):
|
||||
def test_sanitize_message(self):
|
||||
sanitized = SubmissionError.sanitize_message(
|
||||
"This refers to /some/path/with-a-file\n"
|
||||
"and also /some/other/path/with-a-different-file isn't that neat?\n"
|
||||
"and has /some/path//////with-slashes"
|
||||
)
|
||||
self.assertEqual(
|
||||
sanitized,
|
||||
"This refers to **/with-a-file\n"
|
||||
"and also **/with-a-different-file isn't that neat?\n"
|
||||
"and has **/with-slashes"
|
||||
)
|
||||
|
||||
@mock.patch.object(SubmissionError, "sanitize_message")
|
||||
def test_submissionerror(self, mock_sanitize_message):
|
||||
SubmissionError()
|
||||
self.assertFalse(mock_sanitize_message.called)
|
||||
SubmissionError("hi", "there")
|
||||
self.assertTrue(mock_sanitize_message.called)
|
||||
self.assertCountEqual(
|
||||
mock_sanitize_message.call_args_list,
|
||||
[mock.call("hi"), mock.call("there")],
|
||||
)
|
||||
|
|
|
@ -17,6 +17,7 @@ from pathlib import Path
|
|||
from shutil import move
|
||||
from typing import Optional, Union # pyflakes:ignore
|
||||
from unidecode import unidecode
|
||||
from xml2rfc import RfcWriterError
|
||||
from xym import xym
|
||||
|
||||
from django.conf import settings
|
||||
|
@ -918,8 +919,51 @@ def accept_submission_requires_group_approval(submission):
|
|||
|
||||
|
||||
class SubmissionError(Exception):
|
||||
"""Exception for errors during submission processing"""
|
||||
pass
|
||||
"""Exception for errors during submission processing
|
||||
|
||||
Sanitizes paths appearing in exception messages.
|
||||
"""
|
||||
def __init__(self, *args):
|
||||
if len(args) > 0:
|
||||
args = (self.sanitize_message(arg) for arg in args)
|
||||
super().__init__(*args)
|
||||
|
||||
@staticmethod
|
||||
def sanitize_message(msg):
|
||||
# Paths likely to appear in submission-related errors
|
||||
paths = [
|
||||
p for p in (
|
||||
getattr(settings, "ALL_ID_DOWNLOAD_DIR", None),
|
||||
getattr(settings, "BIBXML_BASE_PATH", None),
|
||||
getattr(settings, "DERIVED_DIR", None),
|
||||
getattr(settings, "FTP_DIR", None),
|
||||
getattr(settings, "IDSUBMIT_REPOSITORY_PATH", None),
|
||||
getattr(settings, "IDSUBMIT_STAGING_PATH", None),
|
||||
getattr(settings, "INTERNET_ALL_DRAFTS_ARCHIVE_DIR", None),
|
||||
getattr(settings, "INTERNET_DRAFT_PATH", None),
|
||||
getattr(settings, "INTERNET_DRAFT_ARCHIVE_DIR", None),
|
||||
getattr(settings, "INTERNET_DRAFT_PDF_PATH", None),
|
||||
getattr(settings, "RFC_PATH", None),
|
||||
getattr(settings, "SUBMIT_YANG_CATALOG_MODEL_DIR", None),
|
||||
getattr(settings, "SUBMIT_YANG_DRAFT_MODEL_DIR", None),
|
||||
getattr(settings, "SUBMIT_YANG_IANA_MODEL_DIR", None),
|
||||
getattr(settings, "SUBMIT_YANG_RFC_MODEL_DIR", None),
|
||||
"/tmp/",
|
||||
) if p is not None
|
||||
]
|
||||
return re.sub(fr"({'|'.join(paths)})/*", "**/", msg)
|
||||
|
||||
|
||||
class XmlRfcError(SubmissionError):
|
||||
"""SubmissionError caused by xml2rfc
|
||||
|
||||
Includes the output from xml2rfc, if any, in xml2rfc_stdout / xml2rfc_stderr
|
||||
"""
|
||||
def __init__(self, *args, xml2rfc_stdout: str, xml2rfc_stderr: str):
|
||||
super().__init__(*args)
|
||||
self.xml2rfc_stderr = xml2rfc_stderr
|
||||
self.xml2rfc_stdout = xml2rfc_stdout
|
||||
|
||||
|
||||
class InconsistentRevisionError(SubmissionError):
|
||||
"""SubmissionError caused by an inconsistent revision"""
|
||||
|
@ -937,27 +981,55 @@ def render_missing_formats(submission):
|
|||
If a txt file already exists, leaves it in place. Overwrites an existing html file
|
||||
if there is one.
|
||||
"""
|
||||
xml2rfc.log.write_out = io.StringIO() # open(os.devnull, "w")
|
||||
xml2rfc.log.write_err = io.StringIO() # open(os.devnull, "w")
|
||||
# Capture stdio/stdout from xml2rfc
|
||||
xml2rfc_stdout = io.StringIO()
|
||||
xml2rfc_stderr = io.StringIO()
|
||||
xml2rfc.log.write_out = xml2rfc_stdout
|
||||
xml2rfc.log.write_err = xml2rfc_stderr
|
||||
xml_path = staging_path(submission.name, submission.rev, '.xml')
|
||||
parser = xml2rfc.XmlRfcParser(str(xml_path), quiet=True)
|
||||
# --- Parse the xml ---
|
||||
xmltree = parser.parse(remove_comments=False)
|
||||
try:
|
||||
# --- Parse the xml ---
|
||||
xmltree = parser.parse(remove_comments=False)
|
||||
except Exception as err:
|
||||
raise XmlRfcError(
|
||||
"Error parsing XML",
|
||||
xml2rfc_stdout=xml2rfc_stdout.getvalue(),
|
||||
xml2rfc_stderr=xml2rfc_stderr.getvalue(),
|
||||
) from err
|
||||
# If we have v2, run it through v2v3. Keep track of the submitted version, though.
|
||||
xmlroot = xmltree.getroot()
|
||||
xml_version = xmlroot.get('version', '2')
|
||||
if xml_version == '2':
|
||||
v2v3 = xml2rfc.V2v3XmlWriter(xmltree)
|
||||
xmltree.tree = v2v3.convert2to3()
|
||||
try:
|
||||
xmltree.tree = v2v3.convert2to3()
|
||||
except Exception as err:
|
||||
raise XmlRfcError(
|
||||
"Error converting v2 XML to v3",
|
||||
xml2rfc_stdout=xml2rfc_stdout.getvalue(),
|
||||
xml2rfc_stderr=xml2rfc_stderr.getvalue(),
|
||||
) from err
|
||||
|
||||
# --- Prep the xml ---
|
||||
today = date_today()
|
||||
prep = xml2rfc.PrepToolWriter(xmltree, quiet=True, liberal=True, keep_pis=[xml2rfc.V3_PI_TARGET])
|
||||
prep.options.accept_prepped = True
|
||||
prep.options.date = today
|
||||
xmltree.tree = prep.prep()
|
||||
if xmltree.tree == None:
|
||||
raise SubmissionError(f'Error from xml2rfc (prep): {prep.errors}')
|
||||
try:
|
||||
xmltree.tree = prep.prep()
|
||||
except RfcWriterError:
|
||||
raise XmlRfcError(
|
||||
f"Error during xml2rfc prep: {prep.errors}",
|
||||
xml2rfc_stdout=xml2rfc_stdout.getvalue(),
|
||||
xml2rfc_stderr=xml2rfc_stderr.getvalue(),
|
||||
)
|
||||
except Exception as err:
|
||||
raise XmlRfcError(
|
||||
"Unexpected error during xml2rfc prep",
|
||||
xml2rfc_stdout=xml2rfc_stdout.getvalue(),
|
||||
xml2rfc_stderr=xml2rfc_stderr.getvalue(),
|
||||
) from err
|
||||
|
||||
# --- Convert to txt ---
|
||||
txt_path = staging_path(submission.name, submission.rev, '.txt')
|
||||
|
@ -965,7 +1037,14 @@ def render_missing_formats(submission):
|
|||
writer = xml2rfc.TextWriter(xmltree, quiet=True)
|
||||
writer.options.accept_prepped = True
|
||||
writer.options.date = today
|
||||
writer.write(txt_path)
|
||||
try:
|
||||
writer.write(txt_path)
|
||||
except Exception as err:
|
||||
raise XmlRfcError(
|
||||
"Error generating text format from XML",
|
||||
xml2rfc_stdout=xml2rfc_stdout.getvalue(),
|
||||
xml2rfc_stderr=xml2rfc_stderr.getvalue(),
|
||||
) from err
|
||||
log.log(
|
||||
'In %s: xml2rfc %s generated %s from %s (version %s)' % (
|
||||
str(xml_path.parent),
|
||||
|
@ -980,7 +1059,14 @@ def render_missing_formats(submission):
|
|||
html_path = staging_path(submission.name, submission.rev, '.html')
|
||||
writer = xml2rfc.HtmlWriter(xmltree, quiet=True)
|
||||
writer.options.date = today
|
||||
writer.write(str(html_path))
|
||||
try:
|
||||
writer.write(str(html_path))
|
||||
except Exception as err:
|
||||
raise XmlRfcError(
|
||||
"Error generating HTML format from XML",
|
||||
xml2rfc_stdout=xml2rfc_stdout.getvalue(),
|
||||
xml2rfc_stderr=xml2rfc_stderr.getvalue(),
|
||||
) from err
|
||||
log.log(
|
||||
'In %s: xml2rfc %s generated %s from %s (version %s)' % (
|
||||
str(xml_path.parent),
|
||||
|
@ -1263,7 +1349,7 @@ def process_submission_text(filename, revision):
|
|||
def process_and_validate_submission(submission):
|
||||
"""Process and validate a submission
|
||||
|
||||
Raises SubmissionError if an error is encountered.
|
||||
Raises SubmissionError or a subclass if an error is encountered.
|
||||
"""
|
||||
if len(set(submission.file_types.split(",")).intersection({".xml", ".txt"})) == 0:
|
||||
raise SubmissionError("Require XML and/or text format to process an Internet-Draft submission.")
|
||||
|
@ -1273,7 +1359,16 @@ def process_and_validate_submission(submission):
|
|||
# Parse XML first, if we have it
|
||||
if ".xml" in submission.file_types:
|
||||
xml_metadata = process_submission_xml(submission.name, submission.rev)
|
||||
render_missing_formats(submission) # makes HTML and text, unless text was uploaded
|
||||
try:
|
||||
render_missing_formats(submission) # makes HTML and text, unless text was uploaded
|
||||
except XmlRfcError as err:
|
||||
# log stdio/stderr
|
||||
log.log(
|
||||
f"xml2rfc failure when rendering missing formats for {submission.name}-{submission.rev}:\n"
|
||||
f">> stdout:\n{err.xml2rfc_stdout}\n"
|
||||
f">> stderr:\n{err.xml2rfc_stderr}"
|
||||
)
|
||||
raise
|
||||
# Parse text, whether uploaded or generated from XML
|
||||
text_metadata = process_submission_text(submission.name, submission.rev)
|
||||
|
||||
|
@ -1332,11 +1427,11 @@ def process_and_validate_submission(submission):
|
|||
raise SubmissionError('Checks failed: ' + ' / '.join(errors))
|
||||
except SubmissionError:
|
||||
raise # pass SubmissionErrors up the stack
|
||||
except Exception:
|
||||
except Exception as err:
|
||||
# convert other exceptions into SubmissionErrors
|
||||
log.log(f'Unexpected exception while processing submission {submission.pk}.')
|
||||
log.log(traceback.format_exc())
|
||||
raise SubmissionError('A system error occurred while processing the submission.')
|
||||
raise SubmissionError('A system error occurred while processing the submission.') from err
|
||||
|
||||
|
||||
def submitter_is_author(submission):
|
||||
|
@ -1428,6 +1523,7 @@ def process_uploaded_submission(submission):
|
|||
create_submission_event(None, submission, desc="Uploaded submission (diverted to manual process)")
|
||||
send_manual_post_request(None, submission, errors=dict(consistency=str(consistency_error)))
|
||||
except SubmissionError as err:
|
||||
# something generic went wrong
|
||||
submission.refresh_from_db() # guard against incomplete changes in submission validation / processing
|
||||
cancel_submission(submission) # changes Submission.state
|
||||
create_submission_event(None, submission, f"Submission rejected: {err}")
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
# Copyright The IETF Trust 2011-2020, All Rights Reserved
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
import re
|
||||
import datetime
|
||||
|
||||
|
@ -28,19 +26,47 @@ from ietf.group.utils import group_features_group_filter
|
|||
from ietf.ietfauth.utils import has_role, role_required
|
||||
from ietf.mailtrigger.utils import gather_address_lists
|
||||
from ietf.person.models import Email
|
||||
from ietf.submit.forms import (SubmissionAutoUploadForm, AuthorForm, SubmitterForm, EditSubmissionForm,
|
||||
PreapprovalForm, ReplacesForm,
|
||||
DeprecatedSubmissionAutoUploadForm, SubmissionManualUploadForm)
|
||||
from ietf.submit.forms import (
|
||||
SubmissionAutoUploadForm,
|
||||
AuthorForm,
|
||||
SubmitterForm,
|
||||
EditSubmissionForm,
|
||||
PreapprovalForm,
|
||||
ReplacesForm,
|
||||
SubmissionManualUploadForm,
|
||||
SubmissionSearchForm,
|
||||
)
|
||||
from ietf.submit.mail import send_full_url, send_manual_post_request
|
||||
from ietf.submit.models import (Submission, Preapproval, SubmissionExtResource,
|
||||
DraftSubmissionStateName )
|
||||
from ietf.submit.tasks import process_uploaded_submission_task, process_and_accept_uploaded_submission_task, poke
|
||||
from ietf.submit.utils import ( approvable_submissions_for_user, preapprovals_for_user,
|
||||
recently_approved_by_user, validate_submission, create_submission_event, docevent_from_submission,
|
||||
post_submission, cancel_submission, rename_submission_files, remove_submission_files, get_draft_meta,
|
||||
get_submission, fill_in_submission, apply_checkers, save_files, clear_existing_files,
|
||||
check_submission_revision_consistency, accept_submission, accept_submission_requires_group_approval,
|
||||
accept_submission_requires_prev_auth_approval, update_submission_external_resources)
|
||||
from ietf.submit.models import (
|
||||
Submission,
|
||||
Preapproval,
|
||||
SubmissionExtResource,
|
||||
DraftSubmissionStateName,
|
||||
)
|
||||
from ietf.submit.tasks import (
|
||||
process_uploaded_submission_task,
|
||||
process_and_accept_uploaded_submission_task,
|
||||
poke,
|
||||
)
|
||||
from ietf.submit.utils import (
|
||||
approvable_submissions_for_user,
|
||||
preapprovals_for_user,
|
||||
recently_approved_by_user,
|
||||
validate_submission,
|
||||
create_submission_event,
|
||||
docevent_from_submission,
|
||||
post_submission,
|
||||
cancel_submission,
|
||||
rename_submission_files,
|
||||
remove_submission_files,
|
||||
get_submission,
|
||||
save_files,
|
||||
clear_existing_files,
|
||||
accept_submission,
|
||||
accept_submission_requires_group_approval,
|
||||
accept_submission_requires_prev_auth_approval,
|
||||
update_submission_external_resources,
|
||||
)
|
||||
from ietf.stats.utils import clean_country_name
|
||||
from ietf.utils.accesstoken import generate_access_token
|
||||
from ietf.utils.log import log
|
||||
|
@ -187,119 +213,45 @@ def api_submission_status(request, submission_id):
|
|||
|
||||
|
||||
@csrf_exempt
|
||||
def api_submit(request):
|
||||
"Automated submission entrypoint"
|
||||
submission = None
|
||||
def err(code, text):
|
||||
return HttpResponse(text, status=code, content_type='text/plain')
|
||||
def api_submit_tombstone(request):
|
||||
"""Tombstone for removed automated submission entrypoint"""
|
||||
return render(
|
||||
request,
|
||||
'submit/api_submit_info.html',
|
||||
status=410, # Gone
|
||||
)
|
||||
|
||||
if request.method == 'GET':
|
||||
return render(request, 'submit/api_submit_info.html')
|
||||
elif request.method == 'POST':
|
||||
exception = None
|
||||
try:
|
||||
form = DeprecatedSubmissionAutoUploadForm(request, data=request.POST, files=request.FILES)
|
||||
if form.is_valid():
|
||||
log('got valid submission form for %s' % form.filename)
|
||||
username = form.cleaned_data['user']
|
||||
user = User.objects.filter(username__iexact=username)
|
||||
if user.count() == 0:
|
||||
# See if a secondary login was being used
|
||||
email = Email.objects.filter(address=username, active=True)
|
||||
# The error messages don't talk about 'email', as the field we're
|
||||
# looking at is still the 'username' field.
|
||||
if email.count() == 0:
|
||||
return err(400, "No such user: %s" % username)
|
||||
elif email.count() > 1:
|
||||
return err(500, "Multiple matching accounts for %s" % username)
|
||||
email = email.first()
|
||||
if not hasattr(email, 'person'):
|
||||
return err(400, "No person matches %s" % username)
|
||||
person = email.person
|
||||
if not hasattr(person, 'user'):
|
||||
return err(400, "No user matches: %s" % username)
|
||||
user = person.user
|
||||
elif user.count() > 1:
|
||||
return err(500, "Multiple matching accounts for %s" % username)
|
||||
else:
|
||||
user = user.first()
|
||||
if not hasattr(user, 'person'):
|
||||
return err(400, "No person with username %s" % username)
|
||||
|
||||
saved_files = save_files(form)
|
||||
authors, abstract, file_name, file_size = get_draft_meta(form, saved_files)
|
||||
for a in authors:
|
||||
if not a['email']:
|
||||
raise ValidationError("Missing email address for author %s" % a)
|
||||
|
||||
submission = get_submission(form)
|
||||
fill_in_submission(form, submission, authors, abstract, file_size)
|
||||
apply_checkers(submission, file_name)
|
||||
|
||||
create_submission_event(request, submission, desc="Uploaded submission via api_submit")
|
||||
|
||||
errors = validate_submission(submission)
|
||||
if errors:
|
||||
raise ValidationError(errors)
|
||||
|
||||
# must do this after validate_submission() or data needed for check may be invalid
|
||||
if check_submission_revision_consistency(submission):
|
||||
return err( 409, "Submission failed due to a document revision inconsistency error "
|
||||
"in the database. Please contact the secretariat for assistance.")
|
||||
|
||||
errors = [ c.message for c in submission.checks.all() if c.passed==False ]
|
||||
if errors:
|
||||
raise ValidationError(errors)
|
||||
|
||||
if not username.lower() in [ a['email'].lower() for a in authors ]:
|
||||
raise ValidationError('Submitter %s is not one of the document authors' % user.username)
|
||||
|
||||
submission.submitter = user.person.formatted_email()
|
||||
sent_to = accept_submission(submission, request)
|
||||
|
||||
return HttpResponse(
|
||||
"Upload of %s OK, confirmation requests sent to:\n %s" % (submission.name, ',\n '.join(sent_to)),
|
||||
content_type="text/plain")
|
||||
else:
|
||||
raise ValidationError(form.errors)
|
||||
except IOError as e:
|
||||
exception = e
|
||||
return err(500, "IO Error: %s" % str(e))
|
||||
except ValidationError as e:
|
||||
exception = e
|
||||
return err(400, "Validation Error: %s" % str(e))
|
||||
except Exception as e:
|
||||
exception = e
|
||||
raise
|
||||
return err(500, "Exception: %s" % str(e))
|
||||
finally:
|
||||
if exception and submission:
|
||||
remove_submission_files(submission)
|
||||
submission.delete()
|
||||
else:
|
||||
return err(405, "Method not allowed")
|
||||
|
||||
def tool_instructions(request):
|
||||
return render(request, 'submit/tool_instructions.html', {'selected': 'instructions'})
|
||||
|
||||
|
||||
def search_submission(request):
|
||||
error = None
|
||||
name = None
|
||||
if request.method == 'POST':
|
||||
name = request.POST.get('name', '')
|
||||
submission = Submission.objects.filter(name=name).order_by('-pk').first()
|
||||
if submission:
|
||||
return redirect(submission_status, submission_id=submission.pk)
|
||||
else:
|
||||
if re.search(r'-\d\d$', name):
|
||||
submission = Submission.objects.filter(name=name[:-3]).order_by('-pk').first()
|
||||
if submission:
|
||||
return redirect(submission_status, submission_id=submission.pk)
|
||||
error = 'No valid submission found for %s' % name
|
||||
return render(request, 'submit/search_submission.html',
|
||||
{'selected': 'status',
|
||||
'error': error,
|
||||
'name': name})
|
||||
if request.method == "POST":
|
||||
form = SubmissionSearchForm(request.POST)
|
||||
if form.is_valid():
|
||||
name = form.cleaned_data["name"]
|
||||
submission = Submission.objects.filter(name=name).order_by("-pk").first()
|
||||
if submission:
|
||||
return redirect(submission_status, submission_id=submission.pk)
|
||||
else:
|
||||
if re.search(r"-\d\d$", name):
|
||||
submission = (
|
||||
Submission.objects.filter(name=name[:-3])
|
||||
.order_by("-pk")
|
||||
.first()
|
||||
)
|
||||
if submission:
|
||||
return redirect(submission_status, submission_id=submission.pk)
|
||||
form.add_error(None, f"No valid submission found for {name}")
|
||||
else:
|
||||
form = SubmissionSearchForm()
|
||||
return render(
|
||||
request,
|
||||
"submit/search_submission.html",
|
||||
{"selected": "status", "form": form},
|
||||
)
|
||||
|
||||
|
||||
def can_edit_submission(user, submission, access_token):
|
||||
key_matched = access_token and submission.access_token() == access_token
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
<p>
|
||||
This section describes the autogenerated read-only API towards the database tables. See also
|
||||
the
|
||||
<a href="{% url 'ietf.submit.views.api_submit' %}">Internet-Draft submission API description</a>
|
||||
<a href="{% url 'ietf.submit.views.api_submission' %}">Internet-Draft submission API description</a>
|
||||
and the
|
||||
<a href="#iesg-position-api">IESG ballot position API description</a>
|
||||
</p>
|
||||
|
|
|
@ -65,7 +65,7 @@
|
|||
{% for area in areas %}
|
||||
<h2 class="mt-5" id="id-{{ area.acronym|slugify }}">
|
||||
{{ area.name }}
|
||||
<a href="{% url 'ietf.group.views.active_groups' group_type='wg' %}#{{ area.acronym }}">({{ area.acronym|upper }})</a>
|
||||
<a href="{% url 'ietf.group.views.active_groups' group_type='wg' %}#{{ area.acronym|upper }}">({{ area.acronym|upper }})</a>
|
||||
</h2>
|
||||
{% if area.description %}
|
||||
<p>
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
<div class="row">
|
||||
<div class="col-3 text-end fw-bold">Area</div>
|
||||
<div class="col">
|
||||
<a href="{% url 'ietf.group.views.active_groups' group_type='wg' %}#{{ doc.group.parent.acronym }}">
|
||||
<a href="{% url 'ietf.group.views.active_groups' group_type='wg' %}#{{ doc.group.parent.acronym|upper }}">
|
||||
{{ doc.group.parent.acronym|upper }}</a>
|
||||
({% person_link doc.ad %})
|
||||
</div>
|
||||
|
|
|
@ -37,7 +37,7 @@
|
|||
<div class="col-3 text-end fw-bold">Token</div>
|
||||
<div class="col">
|
||||
{% person_link doc.ad %}
|
||||
<a href="{% url 'ietf.group.views.active_groups' group_type='wg' %}#{{ doc.group.parent.acronym }}">
|
||||
<a href="{% url 'ietf.group.views.active_groups' group_type='wg' %}#{{ doc.group.parent.acronym|upper }}">
|
||||
({{ doc.group.parent.acronym|upper }})
|
||||
</a>
|
||||
</div>
|
||||
|
|
|
@ -154,7 +154,7 @@
|
|||
<a href="{% url "ietf.secr.sreq.views.edit" num=meeting.number acronym=session.group.acronym %}">
|
||||
{{ session.group.acronym }}
|
||||
</a>
|
||||
{% if session.purpose_id != "regular" %}
|
||||
{% if session.purpose_id != "regular" and session.purpose_id != "none" %}
|
||||
<br><span class="badge rounded-pill text-bg-info">{{session.purpose}}</span>
|
||||
{% endif %}
|
||||
{% if session.joint_with_groups.count %}joint with {{ session.joint_with_groups_acronyms|join:' ' }}{% endif %}
|
||||
|
|
|
@ -1,56 +1,13 @@
|
|||
{% extends "base.html" %}
|
||||
{# Copyright The IETF Trust 2015-2022, All Rights Reserved #}
|
||||
{# Copyright The IETF Trust 2015-2024, All Rights Reserved #}
|
||||
{% load origin ietf_filters %}
|
||||
{% block title %}I-D submission API instructions{% endblock %}
|
||||
{% block title %}Obsolete I-D submission API notice{% endblock %}
|
||||
{% block content %}
|
||||
{% origin %}
|
||||
<h1 class="mb-3">Internet-Draft submission API instructions</h1>
|
||||
<h1 class="mb-3">Obsolete Internet-Draft submission API notice</h1>
|
||||
<p>
|
||||
Note: API endpoint described here is known to have a slow response time or to fail
|
||||
due to timeout for some Internet-Draft submissions, particularly those with large file sizes.
|
||||
It is recommended to use the <a href="{% url 'ietf.submit.views.api_submission' %}">new API endpoint</a>
|
||||
instead for increased reliability.
|
||||
The API endpoint previously available here is obsolete and is no longer supported.
|
||||
Please use the <a href="{% url 'ietf.submit.views.api_submission' %}">new API endpoint</a>
|
||||
instead.
|
||||
</p>
|
||||
<p>
|
||||
A simplified Internet-Draft submission interface, intended for automation,
|
||||
is available at <code>{% absurl 'ietf.submit.views.api_submit' %}</code>.
|
||||
</p>
|
||||
<p>
|
||||
The interface accepts only XML uploads that can be processed on the server, and
|
||||
requires the user to have a datatracker account. A successful submit still requires
|
||||
the same email confirmation round-trip as submissions done through the regular
|
||||
<a href="{% url 'ietf.submit.views.upload_submission' %}">submission tool</a>.
|
||||
</p>
|
||||
<p>
|
||||
This interface does not provide all the options which the regular submission tool does.
|
||||
Some limitations:
|
||||
</p>
|
||||
<ul>
|
||||
<li>Only XML-only uploads are supported, not text or combined.</li>
|
||||
<li>Document replacement information cannot be supplied.</li>
|
||||
<li>
|
||||
The server expects <code>multipart/form-data</code>, supported by <code>curl</code> but <b>not</b> by <code>wget</code>.
|
||||
</li>
|
||||
</ul>
|
||||
<p>
|
||||
It takes two parameters:
|
||||
</p>
|
||||
<ul>
|
||||
<li>
|
||||
<code>user</code> which is the user login
|
||||
</li>
|
||||
<li>
|
||||
<code>xml</code>, which is the submitted file
|
||||
</li>
|
||||
</ul>
|
||||
<p>
|
||||
It returns an appropriate http result code, and a brief explanatory text message.
|
||||
</p>
|
||||
<p>
|
||||
Here is an example:
|
||||
</p>
|
||||
<pre class="border p-3">
|
||||
$ curl -S -F "user=user.name@example.com" -F "xml=@~/draft-user-example.xml" {% absurl 'ietf.submit.views.api_submit' %}
|
||||
Upload of draft-user-example OK, confirmation requests sent to:
|
||||
User Name <user.name@example.com></pre>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
||||
|
|
|
@ -12,14 +12,8 @@
|
|||
<form method="post">
|
||||
{% csrf_token %}
|
||||
<div class="mb-3">
|
||||
<label class="form-label" for="name">I-D name</label>
|
||||
<input type="text" class="form-control" id="name" name="name" placeholder="draft-..." required>
|
||||
{% bootstrap_form form %}
|
||||
</div>
|
||||
{% if error %}
|
||||
<p class="alert alert-danger my-3">
|
||||
{{ error }}
|
||||
</p>
|
||||
{% endif %}
|
||||
<button class="btn btn-primary" type="submit">See status</button>
|
||||
</form>
|
||||
{% endblock %}
|
|
@ -19,11 +19,13 @@ from email.mime.multipart import MIMEMultipart
|
|||
from email.header import Header, decode_header
|
||||
from email import message_from_bytes, message_from_string
|
||||
from email import charset as Charset
|
||||
from typing import Optional
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib import messages
|
||||
from django.core.exceptions import ImproperlyConfigured, ValidationError
|
||||
from django.core.validators import validate_email
|
||||
from django.http import HttpRequest
|
||||
from django.template.loader import render_to_string
|
||||
from django.template import Context,RequestContext
|
||||
from django.utils import timezone
|
||||
|
@ -64,6 +66,18 @@ def add_headers(msg):
|
|||
msg['From'] = settings.DEFAULT_FROM_EMAIL
|
||||
return msg
|
||||
|
||||
|
||||
def decode_header_value(value: str) -> str:
|
||||
"""Decode a header value
|
||||
|
||||
Easier-to-use wrapper around email.message.decode_header()
|
||||
"""
|
||||
return "".join(
|
||||
part.decode(charset if charset else "utf-8") if isinstance(part, bytes) else part
|
||||
for part, charset in decode_header(value)
|
||||
)
|
||||
|
||||
|
||||
class SMTPSomeRefusedRecipients(smtplib.SMTPException):
|
||||
|
||||
def __init__(self, message, original_msg, refusals):
|
||||
|
@ -251,8 +265,7 @@ def parseaddr(addr):
|
|||
|
||||
"""
|
||||
|
||||
addr = ''.join( [ ( s.decode(m) if m else s.decode()) if isinstance(s, bytes) else s for (s,m) in decode_header(addr) ] )
|
||||
name, addr = simple_parseaddr(addr)
|
||||
name, addr = simple_parseaddr(decode_header_value(addr))
|
||||
return name, addr
|
||||
|
||||
def excludeaddrs(addrlist, exlist):
|
||||
|
@ -330,18 +343,45 @@ def condition_message(to, frm, subject, msg, cc, extra):
|
|||
msg['Message-ID'] = make_msgid()
|
||||
|
||||
|
||||
def show_that_mail_was_sent(request,leadline,msg,bcc):
|
||||
if request and request.user:
|
||||
from ietf.ietfauth.utils import has_role
|
||||
if has_role(request.user,['Area Director','Secretariat','IANA','RFC Editor','ISE','IAD','IRTF Chair','WG Chair','RG Chair','WG Secretary','RG Secretary']):
|
||||
info = "%s at %s %s\n" % (leadline,timezone.now().strftime("%Y-%m-%d %H:%M:%S"),settings.TIME_ZONE)
|
||||
info += "Subject: %s\n" % force_str(msg.get('Subject','[no subject]'))
|
||||
info += "To: %s\n" % msg.get('To','[no to]')
|
||||
if msg.get('Cc'):
|
||||
info += "Cc: %s\n" % msg.get('Cc')
|
||||
if bcc:
|
||||
info += "Bcc: %s\n" % bcc
|
||||
messages.info(request,info,extra_tags='preformatted',fail_silently=True)
|
||||
def show_that_mail_was_sent(request: HttpRequest, leadline: str, msg: Message, bcc: Optional[str]):
|
||||
if request and request.user:
|
||||
from ietf.ietfauth.utils import has_role
|
||||
|
||||
if has_role(
|
||||
request.user,
|
||||
[
|
||||
"Area Director",
|
||||
"Secretariat",
|
||||
"IANA",
|
||||
"RFC Editor",
|
||||
"ISE",
|
||||
"IAD",
|
||||
"IRTF Chair",
|
||||
"WG Chair",
|
||||
"RG Chair",
|
||||
"WG Secretary",
|
||||
"RG Secretary",
|
||||
],
|
||||
):
|
||||
subject = decode_header_value(msg.get("Subject", "[no subject]"))
|
||||
_to = decode_header_value(msg.get("To", "[no to]"))
|
||||
info_lines = [
|
||||
f"{leadline} at {timezone.now():%Y-%m-%d %H:%M:%S %Z}",
|
||||
f"Subject: {subject}",
|
||||
f"To: {_to}",
|
||||
]
|
||||
cc = msg.get("Cc", None)
|
||||
if cc:
|
||||
info_lines.append(f"Cc: {decode_header_value(cc)}")
|
||||
if bcc:
|
||||
info_lines.append(f"Bcc: {decode_header_value(bcc)}")
|
||||
messages.info(
|
||||
request,
|
||||
"\n".join(info_lines),
|
||||
extra_tags="preformatted",
|
||||
fail_silently=True,
|
||||
)
|
||||
|
||||
|
||||
def save_as_message(request, msg, bcc):
|
||||
by = ((request and request.user and not request.user.is_anonymous and request.user.person)
|
||||
|
|
|
@ -481,12 +481,10 @@ class ConferenceManager(Manager):
|
|||
class SlidesManager(Manager):
|
||||
"""Interface between Datatracker models and Meetecho API
|
||||
|
||||
Note: The URL we send comes from get_versionless_href(). This should match what we use as the
|
||||
URL in api_get_session_materials(). Additionally, it _must_ give the right result for a Document
|
||||
instance that has not yet been persisted to the database. This is because upload_session_slides()
|
||||
(as of 2024-03-07) SessionPresentations before saving its updated Documents. This means, for
|
||||
example, using get_absolute_url() will cause bugs. (We should refactor upload_session_slides() to
|
||||
avoid this requirement.)
|
||||
Note: the URL sent for a slide deck comes from DocumentInfo.get_href() and includes the revision
|
||||
of the slides being sent. Be sure that 1) the URL matches what api_get_session_materials() returns
|
||||
for the slides; and 2) the URL is valid if it is fetched immediately - possibly even before the call
|
||||
to SlidesManager.add() or send_update() returns.
|
||||
"""
|
||||
|
||||
def __init__(self, api_config):
|
||||
|
@ -521,7 +519,7 @@ class SlidesManager(Manager):
|
|||
deck={
|
||||
"id": slides.pk,
|
||||
"title": slides.title,
|
||||
"url": slides.get_versionless_href(), # see above note re: get_versionless_href()
|
||||
"url": slides.get_href(),
|
||||
"rev": slides.rev,
|
||||
"order": order,
|
||||
}
|
||||
|
@ -575,7 +573,7 @@ class SlidesManager(Manager):
|
|||
{
|
||||
"id": deck.document.pk,
|
||||
"title": deck.document.title,
|
||||
"url": deck.document.get_versionless_href(), # see note above re: get_versionless_href()
|
||||
"url": deck.document.get_href(),
|
||||
"rev": deck.document.rev,
|
||||
"order": deck.order,
|
||||
}
|
||||
|
|
|
@ -11,10 +11,11 @@ import pytz
|
|||
import shutil
|
||||
import types
|
||||
|
||||
from mock import patch
|
||||
from mock import call, patch
|
||||
from pyquery import PyQuery
|
||||
from typing import Dict, List # pyflakes:ignore
|
||||
|
||||
from email.message import Message
|
||||
from email.mime.image import MIMEImage
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
|
@ -32,6 +33,7 @@ from django.template import Template # pyflakes:ignore
|
|||
from django.template.defaulttags import URLNode
|
||||
from django.template.loader import get_template, render_to_string
|
||||
from django.templatetags.static import StaticNode
|
||||
from django.test import RequestFactory
|
||||
from django.urls import reverse as urlreverse
|
||||
|
||||
import debug # pyflakes:ignore
|
||||
|
@ -42,7 +44,15 @@ from ietf.submit.tests import submission_file
|
|||
from ietf.utils.draft import PlaintextDraft, getmeta
|
||||
from ietf.utils.fields import SearchableField
|
||||
from ietf.utils.log import unreachable, assertion
|
||||
from ietf.utils.mail import send_mail_preformatted, send_mail_text, send_mail_mime, outbox, get_payload_text
|
||||
from ietf.utils.mail import (
|
||||
send_mail_preformatted,
|
||||
send_mail_text,
|
||||
send_mail_mime,
|
||||
outbox,
|
||||
get_payload_text,
|
||||
decode_header_value,
|
||||
show_that_mail_was_sent,
|
||||
)
|
||||
from ietf.utils.test_runner import get_template_paths, set_coverage_checking
|
||||
from ietf.utils.test_utils import TestCase, unicontent
|
||||
from ietf.utils.text import parse_unicode
|
||||
|
@ -109,6 +119,135 @@ body
|
|||
recv = outbox[-1]
|
||||
self.assertEqual(recv['Fuzz'], 'bucket, monger')
|
||||
|
||||
|
||||
class MailUtilsTests(TestCase):
|
||||
def test_decode_header_value(self):
|
||||
self.assertEqual(
|
||||
decode_header_value("cake"),
|
||||
"cake",
|
||||
"decodes simple string value",
|
||||
)
|
||||
self.assertEqual(
|
||||
decode_header_value("=?utf-8?b?8J+Ogg==?="),
|
||||
"\U0001f382",
|
||||
"decodes single utf-8-encoded part",
|
||||
)
|
||||
self.assertEqual(
|
||||
decode_header_value("=?utf-8?b?8J+Ogg==?= = =?macintosh?b?jYxrjg==?="),
|
||||
"\U0001f382 = çåké",
|
||||
"decodes a value with non-utf-8 encodings",
|
||||
)
|
||||
|
||||
# Patch in a side_effect so we can distinguish values that came from decode_header_value.
|
||||
@patch("ietf.utils.mail.decode_header_value", side_effect=lambda s: f"decoded-{s}")
|
||||
@patch("ietf.utils.mail.messages")
|
||||
def test_show_that_mail_was_sent(self, mock_messages, mock_decode_header_value):
|
||||
request = RequestFactory().get("/some/path")
|
||||
request.user = object() # just needs to exist
|
||||
msg = Message()
|
||||
msg["To"] = "to-value"
|
||||
msg["Subject"] = "subject-value"
|
||||
msg["Cc"] = "cc-value"
|
||||
with patch("ietf.ietfauth.utils.has_role", return_value=True):
|
||||
show_that_mail_was_sent(request, "mail was sent", msg, "bcc-value")
|
||||
self.assertCountEqual(
|
||||
mock_decode_header_value.call_args_list,
|
||||
[call("to-value"), call("subject-value"), call("cc-value"), call("bcc-value")],
|
||||
)
|
||||
self.assertEqual(mock_messages.info.call_args[0][0], request)
|
||||
self.assertIn("mail was sent", mock_messages.info.call_args[0][1])
|
||||
self.assertIn("decoded-subject-value", mock_messages.info.call_args[0][1])
|
||||
self.assertIn("decoded-to-value", mock_messages.info.call_args[0][1])
|
||||
self.assertIn("decoded-cc-value", mock_messages.info.call_args[0][1])
|
||||
self.assertIn("decoded-bcc-value", mock_messages.info.call_args[0][1])
|
||||
mock_messages.reset_mock()
|
||||
mock_decode_header_value.reset_mock()
|
||||
|
||||
# no bcc
|
||||
with patch("ietf.ietfauth.utils.has_role", return_value=True):
|
||||
show_that_mail_was_sent(request, "mail was sent", msg, None)
|
||||
self.assertCountEqual(
|
||||
mock_decode_header_value.call_args_list,
|
||||
[call("to-value"), call("subject-value"), call("cc-value")],
|
||||
)
|
||||
self.assertEqual(mock_messages.info.call_args[0][0], request)
|
||||
self.assertIn("mail was sent", mock_messages.info.call_args[0][1])
|
||||
self.assertIn("decoded-subject-value", mock_messages.info.call_args[0][1])
|
||||
self.assertIn("decoded-to-value", mock_messages.info.call_args[0][1])
|
||||
self.assertIn("decoded-cc-value", mock_messages.info.call_args[0][1])
|
||||
# Note: here and below - when using assertNotIn(), leaving off the "decoded-" prefix
|
||||
# proves that neither the original value nor the decoded value appear.
|
||||
self.assertNotIn("bcc-value", mock_messages.info.call_args[0][1])
|
||||
mock_messages.reset_mock()
|
||||
mock_decode_header_value.reset_mock()
|
||||
|
||||
# no cc
|
||||
del msg["Cc"]
|
||||
with patch("ietf.ietfauth.utils.has_role", return_value=True):
|
||||
show_that_mail_was_sent(request, "mail was sent", msg, None)
|
||||
self.assertCountEqual(
|
||||
mock_decode_header_value.call_args_list,
|
||||
[call("to-value"), call("subject-value")],
|
||||
)
|
||||
self.assertEqual(mock_messages.info.call_args[0][0], request)
|
||||
self.assertIn("mail was sent", mock_messages.info.call_args[0][1])
|
||||
self.assertIn("decoded-subject-value", mock_messages.info.call_args[0][1])
|
||||
self.assertIn("decoded-to-value", mock_messages.info.call_args[0][1])
|
||||
self.assertNotIn("cc-value", mock_messages.info.call_args[0][1])
|
||||
self.assertNotIn("bcc-value", mock_messages.info.call_args[0][1])
|
||||
mock_messages.reset_mock()
|
||||
mock_decode_header_value.reset_mock()
|
||||
|
||||
# no to
|
||||
del msg["To"]
|
||||
with patch("ietf.ietfauth.utils.has_role", return_value=True):
|
||||
show_that_mail_was_sent(request, "mail was sent", msg, None)
|
||||
self.assertCountEqual(
|
||||
mock_decode_header_value.call_args_list,
|
||||
[call("[no to]"), call("subject-value")],
|
||||
)
|
||||
self.assertEqual(mock_messages.info.call_args[0][0], request)
|
||||
self.assertIn("mail was sent", mock_messages.info.call_args[0][1])
|
||||
self.assertIn("decoded-subject-value", mock_messages.info.call_args[0][1])
|
||||
self.assertIn("decoded-[no to]", mock_messages.info.call_args[0][1])
|
||||
self.assertNotIn("to-value", mock_messages.info.call_args[0][1])
|
||||
self.assertNotIn("cc-value", mock_messages.info.call_args[0][1])
|
||||
self.assertNotIn("bcc-value", mock_messages.info.call_args[0][1])
|
||||
mock_messages.reset_mock()
|
||||
mock_decode_header_value.reset_mock()
|
||||
|
||||
# no subject
|
||||
del msg["Subject"]
|
||||
with patch("ietf.ietfauth.utils.has_role", return_value=True):
|
||||
show_that_mail_was_sent(request, "mail was sent", msg, None)
|
||||
self.assertCountEqual(
|
||||
mock_decode_header_value.call_args_list,
|
||||
[call("[no to]"), call("[no subject]")],
|
||||
)
|
||||
self.assertEqual(mock_messages.info.call_args[0][0], request)
|
||||
self.assertIn("mail was sent", mock_messages.info.call_args[0][1])
|
||||
self.assertIn("decoded-[no subject]", mock_messages.info.call_args[0][1])
|
||||
self.assertNotIn("subject-value", mock_messages.info.call_args[0][1])
|
||||
self.assertIn("decoded-[no to]", mock_messages.info.call_args[0][1])
|
||||
self.assertNotIn("to-value", mock_messages.info.call_args[0][1])
|
||||
self.assertNotIn("cc-value", mock_messages.info.call_args[0][1])
|
||||
self.assertNotIn("bcc-value", mock_messages.info.call_args[0][1])
|
||||
mock_messages.reset_mock()
|
||||
mock_decode_header_value.reset_mock()
|
||||
|
||||
# user does not have role
|
||||
with patch("ietf.ietfauth.utils.has_role", return_value=False):
|
||||
show_that_mail_was_sent(request, "mail was sent", msg, None)
|
||||
self.assertFalse(mock_messages.called)
|
||||
|
||||
# no user
|
||||
request.user = None
|
||||
with patch("ietf.ietfauth.utils.has_role", return_value=True) as mock_has_role:
|
||||
show_that_mail_was_sent(request, "mail was sent", msg, None)
|
||||
self.assertFalse(mock_messages.called)
|
||||
self.assertFalse(mock_has_role.called)
|
||||
|
||||
|
||||
class TestSMTPServer(TestCase):
|
||||
|
||||
def test_address_rejected(self):
|
||||
|
|
|
@ -558,7 +558,7 @@ class SlidesManagerTests(TestCase):
|
|||
deck={
|
||||
"id": slides_doc.pk,
|
||||
"title": slides_doc.title,
|
||||
"url": slides_doc.get_versionless_href(),
|
||||
"url": slides_doc.get_href(session.meeting),
|
||||
"rev": slides_doc.rev,
|
||||
"order": 13,
|
||||
},
|
||||
|
@ -597,7 +597,7 @@ class SlidesManagerTests(TestCase):
|
|||
{
|
||||
"id": slides_doc.pk,
|
||||
"title": slides_doc.title,
|
||||
"url": slides_doc.get_versionless_href(),
|
||||
"url": slides_doc.get_href(session.meeting),
|
||||
"rev": slides_doc.rev,
|
||||
"order": 1,
|
||||
},
|
||||
|
@ -635,7 +635,7 @@ class SlidesManagerTests(TestCase):
|
|||
deck={
|
||||
"id": slides_doc.pk,
|
||||
"title": slides_doc.title,
|
||||
"url": slides_doc.get_versionless_href(),
|
||||
"url": slides_doc.get_href(slides.session.meeting),
|
||||
"rev": slides_doc.rev,
|
||||
"order": 23,
|
||||
},
|
||||
|
@ -660,7 +660,7 @@ class SlidesManagerTests(TestCase):
|
|||
{
|
||||
"id": slides.document_id,
|
||||
"title": slides.document.title,
|
||||
"url": slides.document.get_versionless_href(),
|
||||
"url": slides.document.get_href(slides.session.meeting),
|
||||
"rev": slides.document.rev,
|
||||
"order": 0,
|
||||
}
|
||||
|
|
|
@ -84,7 +84,7 @@ spec:
|
|||
mountPath: /etc/nginx/conf.d/00logging.conf
|
||||
subPath: nginx-logging.conf
|
||||
- name: dt-cfg
|
||||
mountPath: /etc/nginx/conf.d/auth.conf
|
||||
mountPath: /etc/nginx/conf.d/default.conf
|
||||
subPath: nginx-auth.conf
|
||||
# -----------------------------------------------------
|
||||
# ScoutAPM Container
|
||||
|
|
|
@ -84,7 +84,8 @@ spec:
|
|||
mountPath: /etc/nginx/conf.d/00logging.conf
|
||||
subPath: nginx-logging.conf
|
||||
- name: dt-cfg
|
||||
mountPath: /etc/nginx/conf.d/datatracker.conf
|
||||
# Replaces the original default.conf
|
||||
mountPath: /etc/nginx/conf.d/default.conf
|
||||
subPath: nginx-datatracker.conf
|
||||
# -----------------------------------------------------
|
||||
# ScoutAPM Container
|
||||
|
|
|
@ -20,6 +20,10 @@ test.describe('site status', () => {
|
|||
by: 'Exile is a cool Amiga game'
|
||||
}
|
||||
|
||||
test.beforeEach(({ browserName }) => {
|
||||
test.skip(browserName === 'firefox', 'bypassing flaky tests on Firefox')
|
||||
})
|
||||
|
||||
test('Renders server status as Notification', async ({ page }) => {
|
||||
await page.route('/status/latest.json', route => {
|
||||
route.fulfill({
|
||||
|
|
Loading…
Reference in a new issue