ci: merge main to release (#8448)

This commit is contained in:
Robert Sparks 2025-01-21 10:32:03 -06:00 committed by GitHub
commit a4eeae5b27
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
29 changed files with 872 additions and 699 deletions

View file

@ -0,0 +1,47 @@
name: Build Dev/Test Blobstore Docker Image
on:
push:
branches:
- 'main'
paths:
- '.github/workflows/build-devblobstore.yml'
workflow_dispatch:
env:
MINIO_VERSION: latest
jobs:
publish:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Docker Build & Push
uses: docker/build-push-action@v6
env:
DOCKER_BUILD_NO_SUMMARY: true
with:
context: .
file: docker/devblobstore.Dockerfile
platforms: linux/amd64,linux/arm64
push: true
build-args: MINIO_VERSION=${{ env.MINIO_VERSION }}
tags: |
ghcr.io/ietf-tools/datatracker-devblobstore:${{ env.MINIO_VERSION }}
ghcr.io/ietf-tools/datatracker-devblobstore:latest

View file

@ -97,7 +97,7 @@ jobs:
echo "IS_RELEASE=true" >> $GITHUB_ENV
- name: Create Draft Release
uses: ncipollo/release-action@v1.14.0
uses: ncipollo/release-action@v1.15.0
if: ${{ github.ref_name == 'release' }}
with:
prerelease: true
@ -311,7 +311,7 @@ jobs:
histCoveragePath: historical-coverage.json
- name: Create Release
uses: ncipollo/release-action@v1.14.0
uses: ncipollo/release-action@v1.15.0
if: ${{ env.SHOULD_DEPLOY == 'true' }}
with:
allowUpdates: true
@ -324,7 +324,7 @@ jobs:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Update Baseline Coverage
uses: ncipollo/release-action@v1.14.0
uses: ncipollo/release-action@v1.15.0
if: ${{ github.event.inputs.updateCoverage == 'true' || github.ref_name == 'release' }}
with:
allowUpdates: true

View file

@ -13,7 +13,34 @@ on:
- 'package.json'
jobs:
# -----------------------------------------------------------------
# PREPARE
# -----------------------------------------------------------------
prepare:
name: Prepare
runs-on: ubuntu-latest
outputs:
base_image_version: ${{ steps.baseimgversion.outputs.base_image_version }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 1
fetch-tags: false
- name: Get Base Image Target Version
id: baseimgversion
run: |
echo "base_image_version=$(sed -n '1p' dev/build/TARGET_BASE)" >> $GITHUB_OUTPUT
# -----------------------------------------------------------------
# TESTS
# -----------------------------------------------------------------
tests:
name: Run Tests
uses: ./.github/workflows/tests.yml
needs: [prepare]
with:
ignoreLowerCoverage: false
ignoreLowerCoverage: false
skipSelenium: true
targetBaseVersion: ${{ needs.prepare.outputs.base_image_version }}

View file

@ -1,4 +1,4 @@
FROM ghcr.io/ietf-tools/datatracker-app-base:20241212T1741
FROM ghcr.io/ietf-tools/datatracker-app-base:20250117T1516
LABEL maintainer="IETF Tools Team <tools-discuss@ietf.org>"
ENV DEBIAN_FRONTEND=noninteractive

View file

@ -1 +1 @@
20241212T1741
20250117T1516

View file

@ -6,7 +6,7 @@
"": {
"name": "deploy-to-container",
"dependencies": {
"dockerode": "^4.0.2",
"dockerode": "^4.0.3",
"fs-extra": "^11.2.0",
"nanoid": "5.0.9",
"nanoid-dictionary": "5.0.0-beta.1",
@ -23,6 +23,35 @@
"resolved": "https://registry.npmjs.org/@balena/dockerignore/-/dockerignore-1.0.2.tgz",
"integrity": "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q=="
},
"node_modules/@grpc/grpc-js": {
"version": "1.12.5",
"resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.5.tgz",
"integrity": "sha512-d3iiHxdpg5+ZcJ6jnDSOT8Z0O0VMVGy34jAnYLUX8yd36b1qn8f1TwOA/Lc7TsOh03IkPJ38eGI5qD2EjNkoEA==",
"dependencies": {
"@grpc/proto-loader": "^0.7.13",
"@js-sdsl/ordered-map": "^4.4.2"
},
"engines": {
"node": ">=12.10.0"
}
},
"node_modules/@grpc/proto-loader": {
"version": "0.7.13",
"resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.13.tgz",
"integrity": "sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==",
"dependencies": {
"lodash.camelcase": "^4.3.0",
"long": "^5.0.0",
"protobufjs": "^7.2.5",
"yargs": "^17.7.2"
},
"bin": {
"proto-loader-gen-types": "build/bin/proto-loader-gen-types.js"
},
"engines": {
"node": ">=6"
}
},
"node_modules/@isaacs/cliui": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
@ -123,6 +152,15 @@
"node": ">=18.0.0"
}
},
"node_modules/@js-sdsl/ordered-map": {
"version": "4.4.2",
"resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz",
"integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==",
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/js-sdsl"
}
},
"node_modules/@pkgjs/parseargs": {
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
@ -132,6 +170,68 @@
"node": ">=14"
}
},
"node_modules/@protobufjs/aspromise": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz",
"integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ=="
},
"node_modules/@protobufjs/base64": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz",
"integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg=="
},
"node_modules/@protobufjs/codegen": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz",
"integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg=="
},
"node_modules/@protobufjs/eventemitter": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz",
"integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q=="
},
"node_modules/@protobufjs/fetch": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz",
"integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==",
"dependencies": {
"@protobufjs/aspromise": "^1.1.1",
"@protobufjs/inquire": "^1.1.0"
}
},
"node_modules/@protobufjs/float": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz",
"integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ=="
},
"node_modules/@protobufjs/inquire": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz",
"integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q=="
},
"node_modules/@protobufjs/path": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz",
"integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA=="
},
"node_modules/@protobufjs/pool": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz",
"integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw=="
},
"node_modules/@protobufjs/utf8": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz",
"integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="
},
"node_modules/@types/node": {
"version": "22.10.5",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.5.tgz",
"integrity": "sha512-F8Q+SeGimwOo86fiovQh8qiXfFEh2/ocYv7tU5pJ3EXMSSxk1Joj5wefpFK2fHTf/N6HKGSxIDBT9f3gCxXPkQ==",
"dependencies": {
"undici-types": "~6.20.0"
}
},
"node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
@ -246,14 +346,14 @@
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
},
"node_modules/cpu-features": {
"version": "0.0.9",
"resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.9.tgz",
"integrity": "sha512-AKjgn2rP2yJyfbepsmLfiYcmtNn/2eUvocUyM/09yB0YDiz39HteK/5/T4Onf0pmdYDMgkBoGvRLvEguzyL7wQ==",
"version": "0.0.10",
"resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz",
"integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==",
"hasInstallScript": true,
"optional": true,
"dependencies": {
"buildcheck": "~0.0.6",
"nan": "^2.17.0"
"nan": "^2.19.0"
},
"engines": {
"node": ">=10.0.0"
@ -273,11 +373,11 @@
}
},
"node_modules/debug": {
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
"integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
"dependencies": {
"ms": "2.1.2"
"ms": "^2.1.3"
},
"engines": {
"node": ">=6.0"
@ -289,9 +389,9 @@
}
},
"node_modules/docker-modem": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.3.tgz",
"integrity": "sha512-89zhop5YVhcPEt5FpUFGr3cDyceGhq/F9J+ZndQ4KfqNvfbJpPMfgeixFgUj5OjCYAboElqODxY5Z1EBsSa6sg==",
"version": "5.0.5",
"resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.5.tgz",
"integrity": "sha512-Cxw8uEcvNTRmsQuGqzzfiCnfGgf96tVJItLh8taOX0miTcIBALKH5TckCSuZbpbjP7uhAl81dOL9sxfa6HgCIg==",
"dependencies": {
"debug": "^4.1.1",
"readable-stream": "^3.5.0",
@ -303,13 +403,17 @@
}
},
"node_modules/dockerode": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.2.tgz",
"integrity": "sha512-9wM1BVpVMFr2Pw3eJNXrYYt6DT9k0xMcsSCjtPvyQ+xa1iPg/Mo3T/gUcwI0B2cczqCeCYRPF8yFYDwtFXT0+w==",
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.3.tgz",
"integrity": "sha512-QSXJFcBQNaGZO6U3qWW4B7p8yRIJn/dWmvL2AQWfO/bjptBBO6QYdVkYSYFz9qoivP2jsOHZfmXMAfrK0BMKyg==",
"dependencies": {
"@balena/dockerignore": "^1.0.2",
"docker-modem": "^5.0.3",
"tar-fs": "~2.0.1"
"@grpc/grpc-js": "^1.11.1",
"@grpc/proto-loader": "^0.7.13",
"docker-modem": "^5.0.5",
"protobufjs": "^7.3.2",
"tar-fs": "~2.0.1",
"uuid": "^10.0.0"
},
"engines": {
"node": ">= 8.0"
@ -473,6 +577,16 @@
"graceful-fs": "^4.1.6"
}
},
"node_modules/lodash.camelcase": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz",
"integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA=="
},
"node_modules/long": {
"version": "5.2.4",
"resolved": "https://registry.npmjs.org/long/-/long-5.2.4.tgz",
"integrity": "sha512-qtzLbJE8hq7VabR3mISmVGtoXP8KGc2Z/AT8OuqlYD7JTR3oqrgwdjnk07wpj1twXxYmgDXgoKVWUG/fReSzHg=="
},
"node_modules/lru-cache": {
"version": "10.2.2",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz",
@ -535,14 +649,14 @@
"integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="
},
"node_modules/ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
},
"node_modules/nan": {
"version": "2.18.0",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.18.0.tgz",
"integrity": "sha512-W7tfG7vMOGtD30sHoZSSc/JVYiyDPEyQVso/Zz+/uQd0B0L46gtC+pHha5FFMRpil6fm/AoEcRWyOVi4+E/f8w==",
"version": "2.22.0",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.22.0.tgz",
"integrity": "sha512-nbajikzWTMwsW+eSsNm3QwlOs7het9gGJU5dDZzRTQGk03vyBOauxgI4VakDzE0PtsGTmXPsXTbbjVhRwR5mpw==",
"optional": true
},
"node_modules/nanoid": {
@ -598,6 +712,29 @@
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/protobufjs": {
"version": "7.4.0",
"resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz",
"integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==",
"hasInstallScript": true,
"dependencies": {
"@protobufjs/aspromise": "^1.1.2",
"@protobufjs/base64": "^1.1.2",
"@protobufjs/codegen": "^2.0.4",
"@protobufjs/eventemitter": "^1.1.0",
"@protobufjs/fetch": "^1.1.0",
"@protobufjs/float": "^1.0.2",
"@protobufjs/inquire": "^1.1.0",
"@protobufjs/path": "^1.1.2",
"@protobufjs/pool": "^1.1.0",
"@protobufjs/utf8": "^1.1.0",
"@types/node": ">=13.7.0",
"long": "^5.0.0"
},
"engines": {
"node": ">=12.0.0"
}
},
"node_modules/pump": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz",
@ -713,9 +850,9 @@
"integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ=="
},
"node_modules/ssh2": {
"version": "1.15.0",
"resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.15.0.tgz",
"integrity": "sha512-C0PHgX4h6lBxYx7hcXwu3QWdh4tg6tZZsTfXcdvc5caW/EMxaB4H9dWsl7qk+F7LAW762hp8VbXOX7x4xUYvEw==",
"version": "1.16.0",
"resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz",
"integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==",
"hasInstallScript": true,
"dependencies": {
"asn1": "^0.2.6",
@ -725,8 +862,8 @@
"node": ">=10.16.0"
},
"optionalDependencies": {
"cpu-features": "~0.0.9",
"nan": "^2.18.0"
"cpu-features": "~0.0.10",
"nan": "^2.20.0"
}
},
"node_modules/string_decoder": {
@ -875,6 +1012,11 @@
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
"integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="
},
"node_modules/undici-types": {
"version": "6.20.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz",
"integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="
},
"node_modules/universalify": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz",
@ -888,6 +1030,18 @@
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
},
"node_modules/uuid": {
"version": "10.0.0",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz",
"integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==",
"funding": [
"https://github.com/sponsors/broofa",
"https://github.com/sponsors/ctavan"
],
"bin": {
"uuid": "dist/bin/uuid"
}
},
"node_modules/which": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
@ -988,6 +1142,26 @@
"resolved": "https://registry.npmjs.org/@balena/dockerignore/-/dockerignore-1.0.2.tgz",
"integrity": "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q=="
},
"@grpc/grpc-js": {
"version": "1.12.5",
"resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.5.tgz",
"integrity": "sha512-d3iiHxdpg5+ZcJ6jnDSOT8Z0O0VMVGy34jAnYLUX8yd36b1qn8f1TwOA/Lc7TsOh03IkPJ38eGI5qD2EjNkoEA==",
"requires": {
"@grpc/proto-loader": "^0.7.13",
"@js-sdsl/ordered-map": "^4.4.2"
}
},
"@grpc/proto-loader": {
"version": "0.7.13",
"resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.13.tgz",
"integrity": "sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==",
"requires": {
"lodash.camelcase": "^4.3.0",
"long": "^5.0.0",
"protobufjs": "^7.2.5",
"yargs": "^17.7.2"
}
},
"@isaacs/cliui": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
@ -1054,12 +1228,79 @@
"minipass": "^7.0.4"
}
},
"@js-sdsl/ordered-map": {
"version": "4.4.2",
"resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz",
"integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw=="
},
"@pkgjs/parseargs": {
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
"integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
"optional": true
},
"@protobufjs/aspromise": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz",
"integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ=="
},
"@protobufjs/base64": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz",
"integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg=="
},
"@protobufjs/codegen": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz",
"integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg=="
},
"@protobufjs/eventemitter": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz",
"integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q=="
},
"@protobufjs/fetch": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz",
"integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==",
"requires": {
"@protobufjs/aspromise": "^1.1.1",
"@protobufjs/inquire": "^1.1.0"
}
},
"@protobufjs/float": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz",
"integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ=="
},
"@protobufjs/inquire": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz",
"integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q=="
},
"@protobufjs/path": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz",
"integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA=="
},
"@protobufjs/pool": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz",
"integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw=="
},
"@protobufjs/utf8": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz",
"integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="
},
"@types/node": {
"version": "22.10.5",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.5.tgz",
"integrity": "sha512-F8Q+SeGimwOo86fiovQh8qiXfFEh2/ocYv7tU5pJ3EXMSSxk1Joj5wefpFK2fHTf/N6HKGSxIDBT9f3gCxXPkQ==",
"requires": {
"undici-types": "~6.20.0"
}
},
"ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
@ -1142,13 +1383,13 @@
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
},
"cpu-features": {
"version": "0.0.9",
"resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.9.tgz",
"integrity": "sha512-AKjgn2rP2yJyfbepsmLfiYcmtNn/2eUvocUyM/09yB0YDiz39HteK/5/T4Onf0pmdYDMgkBoGvRLvEguzyL7wQ==",
"version": "0.0.10",
"resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz",
"integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==",
"optional": true,
"requires": {
"buildcheck": "~0.0.6",
"nan": "^2.17.0"
"nan": "^2.19.0"
}
},
"cross-spawn": {
@ -1162,17 +1403,17 @@
}
},
"debug": {
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
"integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
"requires": {
"ms": "2.1.2"
"ms": "^2.1.3"
}
},
"docker-modem": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.3.tgz",
"integrity": "sha512-89zhop5YVhcPEt5FpUFGr3cDyceGhq/F9J+ZndQ4KfqNvfbJpPMfgeixFgUj5OjCYAboElqODxY5Z1EBsSa6sg==",
"version": "5.0.5",
"resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.5.tgz",
"integrity": "sha512-Cxw8uEcvNTRmsQuGqzzfiCnfGgf96tVJItLh8taOX0miTcIBALKH5TckCSuZbpbjP7uhAl81dOL9sxfa6HgCIg==",
"requires": {
"debug": "^4.1.1",
"readable-stream": "^3.5.0",
@ -1181,13 +1422,17 @@
}
},
"dockerode": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.2.tgz",
"integrity": "sha512-9wM1BVpVMFr2Pw3eJNXrYYt6DT9k0xMcsSCjtPvyQ+xa1iPg/Mo3T/gUcwI0B2cczqCeCYRPF8yFYDwtFXT0+w==",
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.3.tgz",
"integrity": "sha512-QSXJFcBQNaGZO6U3qWW4B7p8yRIJn/dWmvL2AQWfO/bjptBBO6QYdVkYSYFz9qoivP2jsOHZfmXMAfrK0BMKyg==",
"requires": {
"@balena/dockerignore": "^1.0.2",
"docker-modem": "^5.0.3",
"tar-fs": "~2.0.1"
"@grpc/grpc-js": "^1.11.1",
"@grpc/proto-loader": "^0.7.13",
"docker-modem": "^5.0.5",
"protobufjs": "^7.3.2",
"tar-fs": "~2.0.1",
"uuid": "^10.0.0"
}
},
"eastasianwidth": {
@ -1297,6 +1542,16 @@
"universalify": "^2.0.0"
}
},
"lodash.camelcase": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz",
"integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA=="
},
"long": {
"version": "5.2.4",
"resolved": "https://registry.npmjs.org/long/-/long-5.2.4.tgz",
"integrity": "sha512-qtzLbJE8hq7VabR3mISmVGtoXP8KGc2Z/AT8OuqlYD7JTR3oqrgwdjnk07wpj1twXxYmgDXgoKVWUG/fReSzHg=="
},
"lru-cache": {
"version": "10.2.2",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz",
@ -1335,14 +1590,14 @@
"integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="
},
"ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
},
"nan": {
"version": "2.18.0",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.18.0.tgz",
"integrity": "sha512-W7tfG7vMOGtD30sHoZSSc/JVYiyDPEyQVso/Zz+/uQd0B0L46gtC+pHha5FFMRpil6fm/AoEcRWyOVi4+E/f8w==",
"version": "2.22.0",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.22.0.tgz",
"integrity": "sha512-nbajikzWTMwsW+eSsNm3QwlOs7het9gGJU5dDZzRTQGk03vyBOauxgI4VakDzE0PtsGTmXPsXTbbjVhRwR5mpw==",
"optional": true
},
"nanoid": {
@ -1377,6 +1632,25 @@
"minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
}
},
"protobufjs": {
"version": "7.4.0",
"resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz",
"integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==",
"requires": {
"@protobufjs/aspromise": "^1.1.2",
"@protobufjs/base64": "^1.1.2",
"@protobufjs/codegen": "^2.0.4",
"@protobufjs/eventemitter": "^1.1.0",
"@protobufjs/fetch": "^1.1.0",
"@protobufjs/float": "^1.0.2",
"@protobufjs/inquire": "^1.1.0",
"@protobufjs/path": "^1.1.2",
"@protobufjs/pool": "^1.1.0",
"@protobufjs/utf8": "^1.1.0",
"@types/node": ">=13.7.0",
"long": "^5.0.0"
}
},
"pump": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz",
@ -1448,14 +1722,14 @@
"integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ=="
},
"ssh2": {
"version": "1.15.0",
"resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.15.0.tgz",
"integrity": "sha512-C0PHgX4h6lBxYx7hcXwu3QWdh4tg6tZZsTfXcdvc5caW/EMxaB4H9dWsl7qk+F7LAW762hp8VbXOX7x4xUYvEw==",
"version": "1.16.0",
"resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz",
"integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==",
"requires": {
"asn1": "^0.2.6",
"bcrypt-pbkdf": "^1.0.2",
"cpu-features": "~0.0.9",
"nan": "^2.18.0"
"cpu-features": "~0.0.10",
"nan": "^2.20.0"
}
},
"string_decoder": {
@ -1571,6 +1845,11 @@
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
"integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="
},
"undici-types": {
"version": "6.20.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz",
"integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="
},
"universalify": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz",
@ -1581,6 +1860,11 @@
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
},
"uuid": {
"version": "10.0.0",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz",
"integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ=="
},
"which": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",

View file

@ -2,7 +2,7 @@
"name": "deploy-to-container",
"type": "module",
"dependencies": {
"dockerode": "^4.0.2",
"dockerode": "^4.0.3",
"fs-extra": "^11.2.0",
"nanoid": "5.0.9",
"nanoid-dictionary": "5.0.0-beta.1",

View file

@ -0,0 +1,9 @@
ARG MINIO_VERSION=latest
FROM quay.io/minio/minio:${MINIO_VERSION}
LABEL maintainer="IETF Tools Team <tools-discuss@ietf.org>"
ENV MINIO_ROOT_USER=minio_root
ENV MINIO_ROOT_PASSWORD=minio_pass
ENV MINIO_DEFAULT_BUCKETS=defaultbucket
CMD ["server", "--console-address", ":9001", "/data"]

View file

@ -554,105 +554,12 @@ class CustomApiTests(TestCase):
newdoccontent = get_unicode_document_content(newdoc.name, Path(session.meeting.get_materials_path()) / type_id / newdoc.uploaded_filename)
self.assertEqual(json.loads(content), json.loads(newdoccontent))
def test_deprecated_api_upload_bluesheet(self):
url = urlreverse('ietf.meeting.views.api_upload_bluesheet')
recmanrole = RoleFactory(group__type_id='ietf', name_id='recman')
recman = recmanrole.person
meeting = MeetingFactory(type_id='ietf')
session = SessionFactory(group__type_id='wg', meeting=meeting)
group = session.group
apikey = PersonalApiKeyFactory(endpoint=url, person=recman)
people = [
{"name": "Andrea Andreotti", "affiliation": "Azienda"},
{"name": "Bosse Bernadotte", "affiliation": "Bolag"},
{"name": "Charles Charlemagne", "affiliation": "Compagnie"},
]
for i in range(3):
faker = random_faker()
people.append(dict(name=faker.name(), affiliation=faker.company()))
bluesheet = json.dumps(people)
# error cases
r = self.client.post(url, {})
self.assertContains(r, "Missing apikey parameter", status_code=400)
badrole = RoleFactory(group__type_id='ietf', name_id='ad')
badapikey = PersonalApiKeyFactory(endpoint=url, person=badrole.person)
badrole.person.user.last_login = timezone.now()
badrole.person.user.save()
r = self.client.post(url, {'apikey': badapikey.hash()})
self.assertContains(r, "Restricted to roles: Recording Manager, Secretariat", status_code=403)
r = self.client.post(url, {'apikey': apikey.hash()})
self.assertContains(r, "Too long since last regular login", status_code=400)
recman.user.last_login = timezone.now()
recman.user.save()
r = self.client.get(url, {'apikey': apikey.hash()})
self.assertContains(r, "Method not allowed", status_code=405)
r = self.client.post(url, {'apikey': apikey.hash(), 'group': group.acronym})
self.assertContains(r, "Missing meeting parameter", status_code=400)
r = self.client.post(url, {'apikey': apikey.hash(), 'meeting': meeting.number, })
self.assertContains(r, "Missing group parameter", status_code=400)
r = self.client.post(url, {'apikey': apikey.hash(), 'meeting': meeting.number, 'group': group.acronym})
self.assertContains(r, "Missing item parameter", status_code=400)
r = self.client.post(url,
{'apikey': apikey.hash(), 'meeting': meeting.number, 'group': group.acronym, 'item': '1'})
self.assertContains(r, "Missing bluesheet parameter", status_code=400)
r = self.client.post(url, {'apikey': apikey.hash(), 'meeting': '1', 'group': group.acronym,
'item': '1', 'bluesheet': bluesheet, })
self.assertContains(r, "No sessions found for meeting", status_code=400)
r = self.client.post(url, {'apikey': apikey.hash(), 'meeting': meeting.number, 'group': 'bogous',
'item': '1', 'bluesheet': bluesheet, })
self.assertContains(r, "No sessions found in meeting '%s' for group 'bogous'" % meeting.number, status_code=400)
r = self.client.post(url, {'apikey': apikey.hash(), 'meeting': meeting.number, 'group': group.acronym,
'item': '1', 'bluesheet': "foobar", })
self.assertContains(r, "Invalid json value: 'foobar'", status_code=400)
r = self.client.post(url, {'apikey': apikey.hash(), 'meeting': meeting.number, 'group': group.acronym,
'item': '5', 'bluesheet': bluesheet, })
self.assertContains(r, "No item '5' found in list of sessions for group", status_code=400)
r = self.client.post(url, {'apikey': apikey.hash(), 'meeting': meeting.number, 'group': group.acronym,
'item': 'foo', 'bluesheet': bluesheet, })
self.assertContains(r, "Expected a numeric value for 'item', found 'foo'", status_code=400)
r = self.client.post(url, {'apikey': apikey.hash(), 'meeting': meeting.number, 'group': group.acronym,
'item': '1', 'bluesheet': bluesheet, })
self.assertContains(r, "Done", status_code=200)
# Submit again, with slightly different content, as an updated version
people[1]['affiliation'] = 'Bolaget AB'
bluesheet = json.dumps(people)
r = self.client.post(url, {'apikey': apikey.hash(), 'meeting': meeting.number, 'group': group.acronym,
'item': '1', 'bluesheet': bluesheet, })
self.assertContains(r, "Done", status_code=200)
bluesheet = session.presentations.filter(document__type__slug='bluesheets').first().document
# We've submitted an update; check that the rev is right
self.assertEqual(bluesheet.rev, '01')
# Check the content
with open(bluesheet.get_file_name()) as file:
text = file.read()
for p in people:
self.assertIn(p['name'], html.unescape(text))
self.assertIn(p['affiliation'], html.unescape(text))
def test_api_upload_bluesheet(self):
url = urlreverse("ietf.meeting.views.api_upload_bluesheet")
recmanrole = RoleFactory(group__type_id="ietf", name_id="recman")
recman = recmanrole.person
meeting = MeetingFactory(type_id="ietf")
session = SessionFactory(group__type_id="wg", meeting=meeting)
group = session.group
apikey = PersonalApiKeyFactory(endpoint=url, person=recman)
people = [
@ -692,18 +599,6 @@ class CustomApiTests(TestCase):
r = self.client.post(url, {"apikey": apikey.hash(), "session_id": session.pk})
self.assertContains(r, "Missing bluesheet parameter", status_code=400)
r = self.client.post(
url,
{
"apikey": apikey.hash(),
"meeting": meeting.number,
"group": group.acronym,
"item": "1",
"bluesheet": "foobar",
},
)
self.assertContains(r, "Invalid json value: 'foobar'", status_code=400)
bad_session_pk = int(Session.objects.order_by("-pk").first().pk) + 1
r = self.client.post(
url,
@ -742,9 +637,7 @@ class CustomApiTests(TestCase):
url,
{
"apikey": apikey.hash(),
"meeting": meeting.number,
"group": group.acronym,
"item": "1",
"session_id": session.pk,
"bluesheet": bluesheet,
},
)

View file

@ -276,6 +276,7 @@ class InvestigateForm(forms.Form):
),
min_length=8,
)
task_id = forms.CharField(required=False, widget=forms.HiddenInput)
def clean_name_fragment(self):
disallowed_characters = ["%", "/", "\\", "*"]

View file

@ -31,6 +31,7 @@ from .utils import (
generate_idnits2_rfcs_obsoleted,
update_or_create_draft_bibxml_file,
ensure_draft_bibxml_path_exists,
investigate_fragment,
)
@ -119,3 +120,11 @@ def generate_draft_bibxml_files_task(days=7, process_all=False):
update_or_create_draft_bibxml_file(event.doc, event.rev)
except Exception as err:
log.log(f"Error generating bibxml for {event.doc.name}-{event.rev}: {err}")
@shared_task(ignore_result=False)
def investigate_fragment_task(name_fragment: str):
return {
"name_fragment": name_fragment,
"results": investigate_fragment(name_fragment),
}

View file

@ -3280,7 +3280,8 @@ class InvestigateTests(TestCase):
"draft-this-should-not-be-possible-00.txt",
)
def test_investigate(self):
def test_investigate_get(self):
"""GET with no querystring should retrieve the investigate UI"""
url = urlreverse("ietf.doc.views_doc.investigate")
login_testing_unauthorized(self, "secretary", url)
r = self.client.get(url)
@ -3288,36 +3289,143 @@ class InvestigateTests(TestCase):
q = PyQuery(r.content)
self.assertEqual(len(q("form#investigate")), 1)
self.assertEqual(len(q("div#results")), 0)
r = self.client.post(url, dict(name_fragment="this-is-not-found"))
@mock.patch("ietf.doc.views_doc.AsyncResult")
def test_investgate_get_task_id(self, mock_asyncresult):
"""GET with querystring should lookup task status"""
url = urlreverse("ietf.doc.views_doc.investigate")
login_testing_unauthorized(self, "secretary", url)
mock_asyncresult.return_value.ready.return_value = True
r = self.client.get(url + "?id=a-task-id")
self.assertEqual(r.status_code, 200)
self.assertEqual(r.json(), {"status": "ready"})
self.assertTrue(mock_asyncresult.called)
self.assertEqual(mock_asyncresult.call_args, mock.call("a-task-id"))
mock_asyncresult.reset_mock()
mock_asyncresult.return_value.ready.return_value = False
r = self.client.get(url + "?id=a-task-id")
self.assertEqual(r.status_code, 200)
self.assertEqual(r.json(), {"status": "notready"})
self.assertTrue(mock_asyncresult.called)
self.assertEqual(mock_asyncresult.call_args, mock.call("a-task-id"))
@mock.patch("ietf.doc.views_doc.investigate_fragment_task")
def test_investigate_post(self, mock_investigate_fragment_task):
"""POST with a name_fragment and no task_id should start a celery task"""
url = urlreverse("ietf.doc.views_doc.investigate")
login_testing_unauthorized(self, "secretary", url)
# test some invalid cases
r = self.client.post(url, {"name_fragment": "short"}) # limit is >= 8 characters
self.assertEqual(r.status_code, 200)
q = PyQuery(r.content)
self.assertEqual(len(q("#id_name_fragment.is-invalid")), 1)
self.assertFalse(mock_investigate_fragment_task.delay.called)
for char in ["*", "%", "/", "\\"]:
r = self.client.post(url, {"name_fragment": f"bad{char}character"})
self.assertEqual(r.status_code, 200)
q = PyQuery(r.content)
self.assertEqual(len(q("#id_name_fragment.is-invalid")), 1)
self.assertFalse(mock_investigate_fragment_task.delay.called)
# now a valid one
mock_investigate_fragment_task.delay.return_value.id = "a-task-id"
r = self.client.post(url, {"name_fragment": "this-is-a-valid-fragment"})
self.assertEqual(r.status_code, 200)
self.assertTrue(mock_investigate_fragment_task.delay.called)
self.assertEqual(mock_investigate_fragment_task.delay.call_args, mock.call("this-is-a-valid-fragment"))
self.assertEqual(r.json(), {"id": "a-task-id"})
@mock.patch("ietf.doc.views_doc.AsyncResult")
def test_investigate_post_task_id(self, mock_asyncresult):
"""POST with name_fragment and task_id should retrieve results"""
url = urlreverse("ietf.doc.views_doc.investigate")
login_testing_unauthorized(self, "secretary", url)
# First, test a non-successful result - this could be a failure or non-existent task id
mock_result = mock_asyncresult.return_value
mock_result.successful.return_value = False
r = self.client.post(url, {"name_fragment": "some-fragment", "task_id": "a-task-id"})
self.assertContains(r, "The investigation task failed.", status_code=200)
self.assertTrue(mock_asyncresult.called)
self.assertEqual(mock_asyncresult.call_args, mock.call("a-task-id"))
self.assertFalse(mock_result.get.called)
mock_asyncresult.reset_mock()
q = PyQuery(r.content)
self.assertEqual(q("#id_name_fragment").val(), "some-fragment")
self.assertEqual(q("#id_task_id").val(), "a-task-id")
# now the various successful result mixes
mock_result = mock_asyncresult.return_value
mock_result.successful.return_value = True
mock_result.get.return_value = {
"name_fragment": "different-fragment",
"results": {
"can_verify": set(),
"unverifiable_collections": set(),
"unexpected": set(),
}
}
r = self.client.post(url, {"name_fragment": "some-fragment", "task_id": "a-task-id"})
self.assertEqual(r.status_code, 200)
self.assertTrue(mock_asyncresult.called)
self.assertEqual(mock_asyncresult.call_args, mock.call("a-task-id"))
mock_asyncresult.reset_mock()
q = PyQuery(r.content)
self.assertEqual(q("#id_name_fragment").val(), "different-fragment", "name_fragment should be reset")
self.assertEqual(q("#id_task_id").val(), "", "task_id should be cleared")
self.assertEqual(len(q("div#results")), 1)
self.assertEqual(len(q("table#authenticated")), 0)
self.assertEqual(len(q("table#unverifiable")), 0)
self.assertEqual(len(q("table#unexpected")), 0)
r = self.client.post(url, dict(name_fragment="mixed-provenance"))
# This file was created in setUp. It allows the view to render properly
# but its location / content don't matter for this test otherwise.
a_file_that_exists = Path(settings.INTERNET_DRAFT_PATH) / "draft-this-is-active-00.txt"
mock_result.get.return_value = {
"name_fragment": "different-fragment",
"results": {
"can_verify": {a_file_that_exists},
"unverifiable_collections": {a_file_that_exists},
"unexpected": set(),
}
}
r = self.client.post(url, {"name_fragment": "some-fragment", "task_id": "a-task-id"})
self.assertEqual(r.status_code, 200)
self.assertTrue(mock_asyncresult.called)
self.assertEqual(mock_asyncresult.call_args, mock.call("a-task-id"))
mock_asyncresult.reset_mock()
q = PyQuery(r.content)
self.assertEqual(q("#id_name_fragment").val(), "different-fragment", "name_fragment should be reset")
self.assertEqual(q("#id_task_id").val(), "", "task_id should be cleared")
self.assertEqual(len(q("div#results")), 1)
self.assertEqual(len(q("table#authenticated")), 1)
self.assertEqual(len(q("table#unverifiable")), 1)
self.assertEqual(len(q("table#unexpected")), 0)
r = self.client.post(url, dict(name_fragment="not-be-possible"))
mock_result.get.return_value = {
"name_fragment": "different-fragment",
"results": {
"can_verify": set(),
"unverifiable_collections": set(),
"unexpected": {a_file_that_exists},
}
}
r = self.client.post(url, {"name_fragment": "some-fragment", "task_id": "a-task-id"})
self.assertEqual(r.status_code, 200)
self.assertTrue(mock_asyncresult.called)
self.assertEqual(mock_asyncresult.call_args, mock.call("a-task-id"))
mock_asyncresult.reset_mock()
q = PyQuery(r.content)
self.assertEqual(q("#id_name_fragment").val(), "different-fragment", "name_fragment should be reset")
self.assertEqual(q("#id_task_id").val(), "", "task_id should be cleared")
self.assertEqual(len(q("div#results")), 1)
self.assertEqual(len(q("table#authenticated")), 0)
self.assertEqual(len(q("table#unverifiable")), 0)
self.assertEqual(len(q("table#unexpected")), 1)
r = self.client.post(url, dict(name_fragment="short"))
self.assertEqual(r.status_code, 200)
q = PyQuery(r.content)
self.assertEqual(len(q("#id_name_fragment.is-invalid")), 1)
for char in ["*", "%", "/", "\\"]:
r = self.client.post(url, dict(name_fragment=f"bad{char}character"))
self.assertEqual(r.status_code, 200)
q = PyQuery(r.content)
self.assertEqual(len(q("#id_name_fragment.is-invalid")), 1)
class LogIOErrorTests(TestCase):

View file

@ -20,6 +20,7 @@ from .tasks import (
generate_draft_bibxml_files_task,
generate_idnits2_rfcs_obsoleted_task,
generate_idnits2_rfc_status_task,
investigate_fragment_task,
notify_expirations_task,
)
@ -98,6 +99,18 @@ class TaskTests(TestCase):
self.assertEqual(mock_expire.call_args_list[1], mock.call(docs[1]))
self.assertEqual(mock_expire.call_args_list[2], mock.call(docs[2]))
def test_investigate_fragment_task(self):
investigation_results = object() # singleton
with mock.patch(
"ietf.doc.tasks.investigate_fragment", return_value=investigation_results
) as mock_inv:
retval = investigate_fragment_task("some fragment")
self.assertTrue(mock_inv.called)
self.assertEqual(mock_inv.call_args, mock.call("some fragment"))
self.assertEqual(
retval, {"name_fragment": "some fragment", "results": investigation_results}
)
class Idnits2SupportTests(TestCase):
settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['DERIVED_DIR']

View file

@ -41,10 +41,11 @@ import re
from pathlib import Path
from celery.result import AsyncResult
from django.core.cache import caches
from django.core.exceptions import PermissionDenied
from django.db.models import Max
from django.http import HttpResponse, Http404, HttpResponseBadRequest
from django.http import HttpResponse, Http404, HttpResponseBadRequest, JsonResponse
from django.shortcuts import render, get_object_or_404, redirect
from django.template.loader import render_to_string
from django.urls import reverse as urlreverse
@ -59,8 +60,9 @@ from ietf.doc.models import ( Document, DocHistory, DocEvent, BallotDocEvent, Ba
ConsensusDocEvent, NewRevisionDocEvent, TelechatDocEvent, WriteupDocEvent, IanaExpertDocEvent,
IESG_BALLOT_ACTIVE_STATES, STATUSCHANGE_RELATIONS, DocumentActionHolder, DocumentAuthor,
RelatedDocument, RelatedDocHistory)
from ietf.doc.tasks import investigate_fragment_task
from ietf.doc.utils import (augment_events_with_revision,
can_adopt_draft, can_unadopt_draft, get_chartering_type, get_tags_for_stream_id, investigate_fragment,
can_adopt_draft, can_unadopt_draft, get_chartering_type, get_tags_for_stream_id,
needed_ballot_positions, nice_consensus, update_telechat, has_same_ballot,
get_initial_notify, make_notify_changed_event, make_rev_history, default_consensus,
add_events_message_info, get_unicode_document_content,
@ -2275,16 +2277,67 @@ def idnits2_state(request, name, rev=None):
content_type="text/plain;charset=utf-8",
)
@role_required("Secretariat")
def investigate(request):
"""Investigate a fragment
A plain GET with no querystring returns the UI page.
POST with the task_id field empty starts an async task and returns a JSON response with
the ID needed to monitor the task for results.
GET with a querystring parameter "id" will poll the status of the async task and return "ready"
or "notready".
POST with the task_id field set to the id of a "ready" task will return its results or an error
if the task failed or the id is invalid (expired, never exited, etc).
"""
results = None
# Start an investigation or retrieve a result on a POST
if request.method == "POST":
form = InvestigateForm(request.POST)
if form.is_valid():
name_fragment = form.cleaned_data["name_fragment"]
results = investigate_fragment(name_fragment)
task_id = form.cleaned_data["task_id"]
if task_id:
# Ignore the rest of the form and retrieve the result
task_result = AsyncResult(task_id)
if task_result.successful():
retval = task_result.get()
results = retval["results"]
form.data = form.data.copy()
form.data["name_fragment"] = retval[
"name_fragment"
] # ensure consistency
del form.data["task_id"] # do not request the task result again
else:
form.add_error(
None,
"The investigation task failed. Please try again and ask for help if this recurs.",
)
# Falls through to the render at the end!
else:
name_fragment = form.cleaned_data["name_fragment"]
task_result = investigate_fragment_task.delay(name_fragment)
return JsonResponse({"id": task_result.id})
else:
form = InvestigateForm()
task_id = request.GET.get("id", None)
if task_id is not None:
# Check status if we got the "id" parameter
task_result = AsyncResult(task_id)
return JsonResponse(
{"status": "ready" if task_result.ready() else "notready"}
)
else:
# Serve up an empty form
form = InvestigateForm()
# If we get here, it is just a plain GET - serve the UI
return render(
request, "doc/investigate.html", context=dict(form=form, results=results)
request,
"doc/investigate.html",
context={
"form": form,
"results": results,
},
)

View file

@ -4666,11 +4666,6 @@ def api_upload_bluesheet(request):
content="Method not allowed", content_type="text/plain", permitted_methods=('POST',)
)
# Temporary: fall back to deprecated interface if we have old-style parameters.
# Do away with this once meetecho is using the new pk-based interface.
if any(k in request.POST for k in ['meeting', 'group', 'item']):
return deprecated_api_upload_bluesheet(request)
session_id = request.POST.get('session_id', None)
if session_id is None:
return err(400, 'Missing session_id parameter')
@ -4707,65 +4702,6 @@ def api_upload_bluesheet(request):
return HttpResponse("Done", status=200, content_type='text/plain')
def deprecated_api_upload_bluesheet(request):
def err(code, text):
return HttpResponse(text, status=code, content_type='text/plain')
if request.method == 'POST':
# parameters:
# apikey: the poster's personal API key
# meeting: number as string, i.e., '101', or 'interim-2018-quic-02'
# group: acronym or special, i.e., 'quic' or 'plenary'
# item: '1', '2', '3' (the group's first, second, third etc.
# session during the week)
# bluesheet: json blob with
# [{'name': 'Name', 'affiliation': 'Organization', }, ...]
for item in ['meeting', 'group', 'item', 'bluesheet',]:
value = request.POST.get(item)
if not value:
return err(400, "Missing %s parameter" % item)
number = request.POST.get('meeting')
sessions = Session.objects.filter(meeting__number=number)
if not sessions.exists():
return err(400, "No sessions found for meeting '%s'" % (number, ))
acronym = request.POST.get('group')
sessions = sessions.filter(group__acronym=acronym)
if not sessions.exists():
return err(400, "No sessions found in meeting '%s' for group '%s'" % (number, acronym))
session_times = [ (s.official_timeslotassignment().timeslot.time, s.id, s) for s in sessions if s.official_timeslotassignment() ]
session_times.sort()
item = request.POST.get('item')
if not item.isdigit():
return err(400, "Expected a numeric value for 'item', found '%s'" % (item, ))
n = int(item)-1 # change 1-based to 0-based
try:
time, __, session = session_times[n]
except IndexError:
return err(400, "No item '%s' found in list of sessions for group" % (item, ))
bjson = request.POST.get('bluesheet')
try:
data = json.loads(bjson)
except json.decoder.JSONDecodeError:
return err(400, "Invalid json value: '%s'" % (bjson, ))
text = render_to_string('meeting/bluesheet.txt', {
'data': data,
'session': session,
})
fd, name = tempfile.mkstemp(suffix=".txt", text=True)
os.close(fd)
with open(name, "w") as file:
file.write(text)
with open(name, "br") as file:
save_err = save_bluesheet(request, session, file)
if save_err:
return err(400, save_err)
else:
return err(405, "Method not allowed")
return HttpResponse("Done", status=200, content_type='text/plain')
def important_dates(request, num=None, output_format=None):
assert num is None or num.isdigit()
preview_roles = ['Area Director', 'Secretariat', 'IETF Chair', 'IAD', ]

View file

@ -452,6 +452,7 @@ INSTALLED_APPS = [
'django_vite',
'django_bootstrap5',
'django_celery_beat',
'django_celery_results',
'corsheaders',
'django_markup',
'oidc_provider',
@ -744,8 +745,8 @@ INTERNET_DRAFT_PDF_PATH = '/a/www/ietf-datatracker/pdf/'
RFC_PATH = '/a/www/ietf-ftp/rfc/'
CHARTER_PATH = '/a/ietfdata/doc/charter/'
CHARTER_COPY_PATH = '/a/www/ietf-ftp/ietf' # copy 1wg-charters files here if set
CHARTER_COPY_OTHER_PATH = '/a/www/ftp/ietf'
CHARTER_COPY_THIRD_PATH = '/a/www/ftp/charter'
CHARTER_COPY_OTHER_PATH = '/a/ftp/ietf'
CHARTER_COPY_THIRD_PATH = '/a/ftp/charter'
GROUP_SUMMARY_PATH = '/a/www/ietf-ftp/ietf'
BOFREQ_PATH = '/a/ietfdata/doc/bofreq/'
CONFLICT_REVIEW_PATH = '/a/ietfdata/doc/conflict-review'
@ -1226,7 +1227,9 @@ CELERY_BROKER_CONNECTION_RETRY_ON_STARTUP = True # the default, but setting it
# https://docs.celeryq.dev/en/stable/userguide/tasks.html#rpc-result-backend-rabbitmq-qpid
# Results can be retrieved only once and only by the caller of the task. Results will be
# lost if the message broker restarts.
CELERY_RESULT_BACKEND = 'rpc://' # sends a msg via the msg broker
CELERY_RESULT_BACKEND = 'django-cache' # use a Django cache for results
CELERY_CACHE_BACKEND = 'celery-results' # which Django cache to use
CELERY_RESULT_EXPIRES = datetime.timedelta(minutes=5) # how long are results valid? (Default is 1 day)
CELERY_TASK_IGNORE_RESULT = True # ignore results unless specifically enabled for a task
# Meetecho API setup: Uncomment this and provide real credentials to enable
@ -1309,6 +1312,11 @@ if "CACHES" not in locals():
"MAX_ENTRIES": 5000,
},
},
"celery-results": {
"BACKEND": "django.core.cache.backends.memcached.PyMemcacheCache",
"LOCATION": f"{MEMCACHED_HOST}:{MEMCACHED_PORT}",
"KEY_PREFIX": "ietf:celery",
},
}
else:
CACHES = {
@ -1347,6 +1355,11 @@ if "CACHES" not in locals():
"MAX_ENTRIES": 5000,
},
},
"celery-results": {
"BACKEND": "django.core.cache.backends.memcached.PyMemcacheCache",
"LOCATION": "app:11211",
"KEY_PREFIX": "ietf:celery",
},
}
PUBLISH_IPR_STATES = ['posted', 'removed', 'removed_objfalse']

View file

@ -0,0 +1,53 @@
// Copyright The IETF Trust 2025, All Rights Reserved
document.addEventListener('DOMContentLoaded', () => {
const investigateForm = document.forms['investigate']
investigateForm.addEventListener('submit', (event) => {
// Intercept submission unless we've filled in the task_id field
if (!investigateForm.elements['id_task_id'].value) {
event.preventDefault()
runInvestigation()
}
})
const runInvestigation = async () => {
// Submit the request
const response = await fetch('', {
method: investigateForm.method, body: new FormData(investigateForm)
})
if (!response.ok) {
loadResultsFromTask('bogus-task-id') // bad task id will generate an error from Django
}
const taskId = (await response.json()).id
// Poll for completion of the investigation up to 18*10 = 180 seconds
waitForResults(taskId, 18)
}
const waitForResults = async (taskId, retries) => {
// indicate that investigation is in progress
document.getElementById('spinner').classList.remove('d-none')
document.getElementById('investigate-button').disabled = true
investigateForm.elements['id_name_fragment'].disabled = true
const response = await fetch('?' + new URLSearchParams({ id: taskId }))
if (!response.ok) {
loadResultsFromTask('bogus-task-id') // bad task id will generate an error from Django
}
const result = await response.json()
if (result.status !== 'ready' && retries > 0) {
// 10 seconds per retry
setTimeout(waitForResults, 10000, taskId, retries - 1)
} else {
/* Either the response is ready or we timed out waiting. In either case, submit
the task_id via POST and let Django display an error if it's not ready. Before
submitting, re-enable the form fields so the POST is valid. Other in-progress
indicators will be reset when the POST response is loaded. */
loadResultsFromTask(taskId)
}
}
const loadResultsFromTask = (taskId) => {
investigateForm.elements['id_name_fragment'].disabled = false
investigateForm.elements['id_task_id'].value = taskId
investigateForm.submit()
}
})

View file

@ -1598,5 +1598,7 @@ def populate_yang_model_dirs():
except UnicodeDecodeError as e:
log.log(f"Error processing {item.name}: {e}")
ftp_moddir = Path(settings.FTP_DIR) / "yang" / "draftmod"
ftp_moddir = Path(settings.FTP_DIR) / "yang" / "draftmod/"
if not moddir.endswith("/"):
moddir += "/"
subprocess.call(("/usr/bin/rsync", "-aq", "--delete", moddir, ftp_moddir))

View file

@ -373,6 +373,7 @@ def update_docs_from_rfc_index(
"INDEPENDENT": StreamName.objects.get(slug="ise"),
"IRTF": StreamName.objects.get(slug="irtf"),
"IAB": StreamName.objects.get(slug="iab"),
"Editorial": StreamName.objects.get(slug="editorial"),
"Legacy": StreamName.objects.get(slug="legacy"),
}

View file

@ -6,112 +6,122 @@
<link rel="stylesheet" href="{% static "ietf/css/list.css" %}">
{% endblock %}
{% block content %}
{% origin %}
<h1>Investigate</h1>
<form id="investigate" method="post">
{% csrf_token %}
{% bootstrap_form form %}
<button class="btn btn-primary" type="submit">Investigate</button>
</form>
{% if results %}
<div id="results">
{% if results.can_verify %}
<h2>These can be authenticated</h2>
<table id="authenticated" class="table table-sm table-striped tablesorter">
<thead>
<tr>
<th scope="col" data-sort="name">Name</th>
<th scope="col" data-sort="modified">Last Modified On</th>
<th scope="col" data-sort="link">Link</th>
<th scope="col" data-sort="source">Source</th>
</tr>
</thead>
<tbody>
{% for path in results.can_verify %}
{% with url=path|url_for_path %}
<tr>
<td>{{path.name}}</td>
<td>
{% if path|mtime_is_epoch %}
Timestamp has been lost (is Unix Epoch)
{% else %}
{{path|mtime|date:"DATETIME_FORMAT"}}
{% endif %}
</td>
<td><a href="{{url}}">{{url}}</a></td>
<td>{{path}}</td>
</tr>
{% endwith %}
{% endfor %}
</tbody>
</table>
{% else %}
<h2>Nothing with this name fragment can be authenticated</h2>
{% endif %}
<hr>
{% if results.unverifiable_collections %}
<h2>These are in the archive, but cannot be authenticated</h2>
<table id="unverifiable" class="table table-sm table-striped tablesorter">
<thead>
<tr>
<th scope="col" data-sort="name">Name</th>
<th scope="col" data-sort="modified">Last Modified On</th>
<th scope="col" data-sort="link">Link</th>
<th scope="col" data-sort="source">Source</th>
</tr>
</thead>
<tbody>
{% for path in results.unverifiable_collections %}
{% with url=path|url_for_path %}
<tr>
<td>{{path.name}}</td>
<td>
{% if path|mtime_is_epoch %}
Timestamp has been lost (is Unix Epoch)
{% else %}
{{path|mtime|date:"DATETIME_FORMAT"}}
{% endif %}
</td>
<td><a href="{{url}}">{{url}}</a></td>
<td>{{path}}</td>
</tr>
{% endwith %}
{% endfor %}
</tbody>
</table>
{% endif %}
{% if results.unexpected %}
<h2>These are unexpected and we do not know what their origin is. These cannot be authenticated</h2>
<table id="unexpected" class="table table-sm table-striped tablesorter">
<thead>
<tr>
<th scope="col" data-sort="name">Name</th>
<th scope="col" data-sort="modified">Last Modified On</th>
<th scope="col" data-sort="link">Link</th>
</tr>
</thead>
<tbody>
{% for path in results.unexpected %}
{% with url=path|url_for_path %}
<tr>
<td>{{path.name}}</td>
<td>
{% if path|mtime_is_epoch %}
Timestamp has been lost (is Unix Epoch)
{% else %}
{{path|mtime|date:"DATETIME_FORMAT"}}
{% endif %}
</td>
<td><a href="{{url}}">{{url}}</a></td>
</tr>
{% endwith %}
{% endfor %}
</tbody>
</table>
{% endif %}
{% origin %}
<h1>Investigate</h1>
<div class="mb-3">
<form id="investigate" method="post">
{% csrf_token %}
{% bootstrap_form form %}
<button class="btn btn-primary" type="submit" id="investigate-button">
<span id="spinner"
class="spinner-border spinner-border-sm d-none"
role="status"
aria-hidden="true">
</span>
Investigate
</button>
</form>
</div>
{% endif %}
{% if results %}
<div id="results">
{% if results.can_verify %}
<h2>These can be authenticated</h2>
<table id="authenticated" class="table table-sm table-striped tablesorter">
<thead>
<tr>
<th scope="col" data-sort="name">Name</th>
<th scope="col" data-sort="modified">Last Modified On</th>
<th scope="col" data-sort="link">Link</th>
<th scope="col" data-sort="source">Source</th>
</tr>
</thead>
<tbody>
{% for path in results.can_verify %}
{% with url=path|url_for_path %}
<tr>
<td>{{ path.name }}</td>
<td>
{% if path|mtime_is_epoch %}
Timestamp has been lost (is Unix Epoch)
{% else %}
{{ path|mtime|date:"DATETIME_FORMAT" }}
{% endif %}
</td>
<td><a href="{{ url }}">{{ url }}</a></td>
<td>{{ path }}</td>
</tr>
{% endwith %}
{% endfor %}
</tbody>
</table>
{% else %}
<h2>Nothing with this name fragment can be authenticated</h2>
{% endif %}
<hr>
{% if results.unverifiable_collections %}
<h2>These are in the archive, but cannot be authenticated</h2>
<table id="unverifiable" class="table table-sm table-striped tablesorter">
<thead>
<tr>
<th scope="col" data-sort="name">Name</th>
<th scope="col" data-sort="modified">Last Modified On</th>
<th scope="col" data-sort="link">Link</th>
<th scope="col" data-sort="source">Source</th>
</tr>
</thead>
<tbody>
{% for path in results.unverifiable_collections %}
{% with url=path|url_for_path %}
<tr>
<td>{{ path.name }}</td>
<td>
{% if path|mtime_is_epoch %}
Timestamp has been lost (is Unix Epoch)
{% else %}
{{ path|mtime|date:"DATETIME_FORMAT" }}
{% endif %}
</td>
<td><a href="{{ url }}">{{ url }}</a></td>
<td>{{ path }}</td>
</tr>
{% endwith %}
{% endfor %}
</tbody>
</table>
{% endif %}
{% if results.unexpected %}
<h2>These are unexpected and we do not know what their origin is. These cannot be authenticated</h2>
<table id="unexpected" class="table table-sm table-striped tablesorter">
<thead>
<tr>
<th scope="col" data-sort="name">Name</th>
<th scope="col" data-sort="modified">Last Modified On</th>
<th scope="col" data-sort="link">Link</th>
</tr>
</thead>
<tbody>
{% for path in results.unexpected %}
{% with url=path|url_for_path %}
<tr>
<td>{{ path.name }}</td>
<td>
{% if path|mtime_is_epoch %}
Timestamp has been lost (is Unix Epoch)
{% else %}
{{ path|mtime|date:"DATETIME_FORMAT" }}
{% endif %}
</td>
<td><a href="{{ url }}">{{ url }}</a></td>
</tr>
{% endwith %}
{% endfor %}
</tbody>
</table>
{% endif %}
</div>
{% endif %}
{% endblock %}
{% block js %}
<script src="{% static "ietf/js/list.js" %}"></script>
<script src="{% static "ietf/js/investigate.js" %}"></script>
{% endblock %}

View file

@ -1,209 +0,0 @@
# Copyright The IETF Trust 2018-2020, All Rights Reserved
# -*- coding: utf-8 -*-
import io
import warnings
from collections import OrderedDict
from django.apps import apps
from django.contrib.admin.utils import NestedObjects
from django.core import serializers
from django.core.management.base import BaseCommand, CommandError
from django.core.management.utils import parse_apps_and_model_labels
from django.db import DEFAULT_DB_ALIAS, router
import debug # pyflakes:ignore
debug.debug = True
class ProxyModelWarning(Warning):
pass
class Command(BaseCommand):
help = (
"Output a database object and its related objects as a fixture of the given format "
)
def add_arguments(self, parser):
parser.add_argument(
'args', metavar='app_label.ModelName', nargs=1,
help='Specifies the app_label.ModelName for which to dump objects given by --pks',
)
parser.add_argument(
'--format', default='json', dest='format',
help='Specifies the output serialization format for fixtures.',
)
parser.add_argument(
'--indent', default=None, dest='indent', type=int,
help='Specifies the indent level to use when pretty-printing output.',
)
parser.add_argument(
'--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS,
help='Nominates a specific database to dump fixtures from. '
'Defaults to the "default" database.',
)
parser.add_argument(
'-e', '--exclude', dest='exclude', action='append', default=[],
help='An app_label or app_label.ModelName to exclude '
'(use multiple --exclude to exclude multiple apps/models).',
)
parser.add_argument(
'--natural-foreign', action='store_true', dest='use_natural_foreign_keys', default=False,
help='Use natural foreign keys if they are available.',
)
parser.add_argument(
'--natural-primary', action='store_true', dest='use_natural_primary_keys', default=False,
help='Use natural primary keys if they are available.',
)
parser.add_argument(
'-o', '--output', default=None, dest='output',
help='Specifies file to which the output is written.'
)
parser.add_argument(
'--pks', dest='primary_keys', required=True,
help="Only dump objects with given primary keys. Accepts a comma-separated "
"list of keys. This option only works when you specify one model.",
)
def handle(self, *app_labels, **options):
format = options['format']
indent = options['indent']
using = options['database']
excludes = options['exclude']
output = options['output']
show_traceback = options['traceback']
use_natural_foreign_keys = options['use_natural_foreign_keys']
use_natural_primary_keys = options['use_natural_primary_keys']
pks = options['primary_keys']
if pks:
primary_keys = [pk.strip() for pk in pks.split(',')]
else:
primary_keys = []
excluded_models, excluded_apps = parse_apps_and_model_labels(excludes)
if len(app_labels) == 0:
if primary_keys:
raise CommandError("You can only use --pks option with one model")
app_list = OrderedDict(
(app_config, None) for app_config in apps.get_app_configs()
if app_config.models_module is not None and app_config not in excluded_apps
)
else:
if len(app_labels) > 1 and primary_keys:
raise CommandError("You can only use --pks option with one model")
app_list = OrderedDict()
for label in app_labels:
try:
app_label, model_label = label.split('.')
try:
app_config = apps.get_app_config(app_label)
except LookupError as e:
raise CommandError(str(e))
if app_config.models_module is None or app_config in excluded_apps:
continue
try:
model = app_config.get_model(model_label)
except LookupError:
raise CommandError("Unknown model: %s.%s" % (app_label, model_label))
app_list_value = app_list.setdefault(app_config, [])
# We may have previously seen a "all-models" request for
# this app (no model qualifier was given). In this case
# there is no need adding specific models to the list.
if app_list_value is not None:
if model not in app_list_value:
app_list_value.append(model)
except ValueError:
if primary_keys:
raise CommandError("You can only use --pks option with one model")
# This is just an app - no model qualifier
app_label = label
try:
app_config = apps.get_app_config(app_label)
except LookupError as e:
raise CommandError(str(e))
if app_config.models_module is None or app_config in excluded_apps:
continue
app_list[app_config] = None
# Check that the serialization format exists; this is a shortcut to
# avoid collating all the objects and _then_ failing.
if format not in serializers.get_public_serializer_formats():
try:
serializers.get_serializer(format)
except serializers.SerializerDoesNotExist:
pass
raise CommandError("Unknown serialization format: %s" % format)
def flatten(l):
if isinstance(l, list):
for el in l:
if isinstance(el, list):
for sub in flatten(el):
yield sub
else:
yield el
else:
yield l
def get_objects(count_only=False):
"""
Collate the objects to be serialized. If count_only is True, just
count the number of objects to be serialized.
"""
models = serializers.sort_dependencies(list(app_list.items()))
for model in models:
if model in excluded_models:
continue
if model._meta.proxy and model._meta.proxy_for_model not in models:
warnings.warn(
"%s is a proxy model and won't be serialized." % model._meta.label,
category=ProxyModelWarning,
)
if not model._meta.proxy and router.allow_migrate_model(using, model):
objects = model._default_manager
queryset = objects.using(using).order_by(model._meta.pk.name)
if primary_keys:
queryset = queryset.filter(pk__in=primary_keys)
if count_only:
yield queryset.order_by().count()
else:
for obj in queryset.iterator():
collector = NestedObjects(using=using)
collector.collect([obj,])
object_list = list(flatten(collector.nested()))
object_list.reverse()
for o in object_list:
yield o
try:
self.stdout.ending = None
progress_output = None
object_count = 0
# If dumpdata is outputting to stdout, there is no way to display progress
if (output and self.stdout.isatty() and options['verbosity'] > 0):
progress_output = self.stdout
object_count = sum(get_objects(count_only=True))
stream = io.open(output, 'w') if output else None
try:
serializers.serialize(
format, get_objects(), indent=indent,
use_natural_foreign_keys=use_natural_foreign_keys,
use_natural_primary_keys=use_natural_primary_keys,
stream=stream or self.stdout, progress_output=progress_output,
object_count=object_count,
)
finally:
if stream:
stream.close()
except Exception as e:
if show_traceback:
raise
raise CommandError("Unable to serialize database: %s" % e)

View file

@ -1,129 +0,0 @@
# Copyright The IETF Trust 2018-2020, All Rights Reserved
# -*- coding: utf-8 -*-
import gzip
import os
#import sys
import tqdm
import zipfile
try:
import bz2
has_bz2 = True
except ImportError:
has_bz2 = False
from django.core.exceptions import ObjectDoesNotExist
from django.core import serializers
from django.db import DEFAULT_DB_ALIAS, DatabaseError, IntegrityError, connections
from django.db.models.signals import post_save
from django.utils.encoding import force_str
import django.core.management.commands.loaddata as loaddata
import debug # pyflakes:ignore
from ietf.community.signals import notify_of_events_receiver
class Command(loaddata.Command):
help = ("""
Load a fixture of related objects to the database. The fixture is expected
to contain a set of related objects, created with the 'dumprelated' management
command. It differs from the 'loaddata' command in that it silently ignores
attempts to load duplicate entries, and continues loading subsequent entries.
""")
def add_arguments(self, parser):
parser.add_argument('args', metavar='fixture', nargs='+', help='Fixture files.')
parser.add_argument(
'--database', action='store', dest='database', default=DEFAULT_DB_ALIAS,
help='Nominates a specific database to load fixtures into. Defaults to the "default" database.',
)
parser.add_argument(
'--ignorenonexistent', '-i', action='store_true', dest='ignore', default=False,
help='Ignores entries in the serialized data for fields that do not '
'currently exist on the model.',
)
def handle(self, *args, **options):
self.ignore = options['ignore']
self.using = options['database']
self.verbosity = options['verbosity']
#
self.compression_formats = {
None: (open, 'rb'),
'gz': (gzip.GzipFile, 'rb'),
'zip': (SingleZipReader, 'r'),
}
if has_bz2:
self.compression_formats['bz2'] = (bz2.BZ2File, 'r')
#
self.serialization_formats = serializers.get_public_serializer_formats()
#
post_save.disconnect(notify_of_events_receiver())
#
connection = connections[self.using]
self.fixture_count = 0
self.loaded_object_count = 0
self.fixture_object_count = 0
#
for arg in args:
fixture_file = arg
self.stdout.write("Loading objects from %s" % fixture_file)
_, ser_fmt, cmp_fmt = self.parse_name(os.path.basename(fixture_file))
open_method, mode = self.compression_formats[cmp_fmt]
fixture = open_method(fixture_file, mode)
objects_in_fixture = 0
self.stdout.write("Getting object count...\b\b\b", ending='')
self.stdout.flush()
for o in serializers.deserialize(ser_fmt, fixture, using=self.using, ignorenonexistent=self.ignore,):
objects_in_fixture += 1
self.stdout.write(" %d" % objects_in_fixture)
#
fixture = open_method(fixture_file, mode)
self.fixture_count += 1
objects = serializers.deserialize(ser_fmt, fixture, using=self.using, ignorenonexistent=self.ignore,)
with connection.constraint_checks_disabled():
for obj in tqdm.tqdm(objects, total=objects_in_fixture):
try:
obj.save(using=self.using)
self.loaded_object_count += 1
except (DatabaseError, IntegrityError, ObjectDoesNotExist, AttributeError) as e:
error_msg = force_str(e)
if "Duplicate entry" in error_msg:
pass
else:
self.stderr.write("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % {
'app_label': obj.object._meta.app_label,
'object_name': obj.object._meta.object_name,
'pk': obj.object.pk,
'error_msg': error_msg,
}, )
self.fixture_object_count += objects_in_fixture
if self.verbosity >= 1:
if self.fixture_object_count == self.loaded_object_count:
self.stdout.write(
"Installed %d object(s) from %d fixture(s)"
% (self.loaded_object_count, self.fixture_count)
)
else:
self.stdout.write(
"Installed %d object(s) (of %d) from %d fixture(s)"
% (self.loaded_object_count, self.fixture_object_count, self.fixture_count)
)
class SingleZipReader(zipfile.ZipFile):
def __init__(self, *args, **kwargs):
zipfile.ZipFile.__init__(self, *args, **kwargs)
if len(self.namelist()) != 1:
raise ValueError("Zip-compressed fixtures must contain one file.")
def read(self):
return zipfile.ZipFile.read(self, self.namelist()[0])

View file

@ -15,6 +15,16 @@ spec:
labels:
app: auth
spec:
affinity:
podAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
- labelSelector:
matchExpressions:
- key: app
operator: In
values:
- datatracker
topologyKey: "kubernetes.io/hostname"
securityContext:
runAsNonRoot: true
containers:

View file

@ -17,6 +17,16 @@ spec:
labels:
app: beat
spec:
affinity:
podAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
- labelSelector:
matchExpressions:
- key: app
operator: In
values:
- datatracker
topologyKey: "kubernetes.io/hostname"
securityContext:
runAsNonRoot: true
containers:

View file

@ -17,6 +17,16 @@ spec:
labels:
app: celery
spec:
affinity:
podAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
- labelSelector:
matchExpressions:
- key: app
operator: In
values:
- datatracker
topologyKey: "kubernetes.io/hostname"
securityContext:
runAsNonRoot: true
containers:

View file

@ -13,6 +13,16 @@ spec:
labels:
app: memcached
spec:
affinity:
podAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
- labelSelector:
matchExpressions:
- key: app
operator: In
values:
- datatracker
topologyKey: "kubernetes.io/hostname"
securityContext:
runAsNonRoot: true
containers:

View file

@ -13,6 +13,16 @@ spec:
labels:
app: rabbitmq
spec:
affinity:
podAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
- labelSelector:
matchExpressions:
- key: app
operator: In
values:
- datatracker
topologyKey: "kubernetes.io/hostname"
securityContext:
runAsNonRoot: true
containers:

View file

@ -132,6 +132,7 @@
"ietf/static/js/highcharts.js",
"ietf/static/js/highstock.js",
"ietf/static/js/ietf.js",
"ietf/static/js/investigate.js",
"ietf/static/js/ipr-edit.js",
"ietf/static/js/ipr-search.js",
"ietf/static/js/js-cookie.js",

View file

@ -13,6 +13,7 @@ Django>4.2,<5
django-analytical>=3.1.0
django-bootstrap5>=21.3
django-celery-beat>=2.3.0
django-celery-results>=2.5.1
django-csp>=3.7
django-cors-headers>=3.11.0
django-debug-toolbar>=3.2.4