ci: merge main to release
This commit is contained in:
commit
5955154223
4
.github/workflows/build.yml
vendored
4
.github/workflows/build.yml
vendored
|
@ -137,9 +137,9 @@ jobs:
|
|||
echo "Running tests..."
|
||||
if [[ "x${{ github.event.inputs.ignoreLowerCoverage }}" == "xtrue" ]]; then
|
||||
echo "Lower coverage failures will be ignored."
|
||||
./ietf/manage.py test --settings=settings_postgrestest --ignore-lower-coverage
|
||||
./ietf/manage.py test --validate-html-harder --settings=settings_postgrestest --ignore-lower-coverage
|
||||
else
|
||||
./ietf/manage.py test --settings=settings_postgrestest
|
||||
./ietf/manage.py test --validate-html-harder --settings=settings_postgrestest
|
||||
fi
|
||||
coverage xml
|
||||
|
||||
|
|
2
.github/workflows/ci-run-tests.yml
vendored
2
.github/workflows/ci-run-tests.yml
vendored
|
@ -45,7 +45,7 @@ jobs:
|
|||
exit 1
|
||||
fi
|
||||
echo "Running tests..."
|
||||
./ietf/manage.py test --settings=settings_postgrestest
|
||||
./ietf/manage.py test --validate-html-harder --settings=settings_postgrestest
|
||||
coverage xml
|
||||
|
||||
- name: Upload Coverage Results to Codecov
|
||||
|
|
115
.vscode/settings.json
vendored
115
.vscode/settings.json
vendored
|
@ -1,59 +1,60 @@
|
|||
{
|
||||
"taskExplorer.exclude": [
|
||||
"**/.vscode-test/**",
|
||||
"**/bin/**",
|
||||
"**/build/**",
|
||||
"**/CompiledOutput/**",
|
||||
"**/dist/**",
|
||||
"**/doc/**",
|
||||
"**/ext/**",
|
||||
"**/out/**",
|
||||
"**/output/**",
|
||||
"**/packages/**",
|
||||
"**/release/**",
|
||||
"**/releases/**",
|
||||
"**/samples/**",
|
||||
"**/sdks/**",
|
||||
"**/static/**",
|
||||
"**/target/**",
|
||||
"**/test/**",
|
||||
"**/third_party/**",
|
||||
"**/vendor/**",
|
||||
"**/work/**",
|
||||
"/workspace/bootstrap/nuget/MyGet.ps1"
|
||||
],
|
||||
"taskExplorer.enableAnt": false,
|
||||
"taskExplorer.enableAppPublisher": false,
|
||||
"taskExplorer.enablePipenv": false,
|
||||
"taskExplorer.enableBash": false,
|
||||
"taskExplorer.enableBatch": false,
|
||||
"taskExplorer.enableGradle": false,
|
||||
"taskExplorer.enableGrunt": false,
|
||||
"taskExplorer.enableGulp": false,
|
||||
"taskExplorer.enablePerl": false,
|
||||
"taskExplorer.enableMake": false,
|
||||
"taskExplorer.enableMaven": false,
|
||||
"taskExplorer.enableNsis": false,
|
||||
"taskExplorer.enableNpm": false,
|
||||
"taskExplorer.enablePowershell": false,
|
||||
"taskExplorer.enablePython": false,
|
||||
"taskExplorer.enableRuby": false,
|
||||
"taskExplorer.enableTsc": false,
|
||||
"taskExplorer.enableWorkspace": true,
|
||||
"taskExplorer.enableExplorerView": false,
|
||||
"taskExplorer.enableSideBar": true,
|
||||
"search.exclude": {
|
||||
"**/.yarn": true,
|
||||
"**/.pnp.*": true
|
||||
},
|
||||
"eslint.nodePath": ".yarn/sdks",
|
||||
"eslint.validate": [
|
||||
"javascript",
|
||||
"javascriptreact",
|
||||
"vue"
|
||||
],
|
||||
"python.linting.pylintArgs": ["--load-plugins", "pylint_django"],
|
||||
"python.testing.pytestEnabled": false,
|
||||
"python.testing.unittestEnabled": false,
|
||||
"python.linting.enabled": true
|
||||
"taskExplorer.exclude": [
|
||||
"**/.vscode-test/**",
|
||||
"**/bin/**",
|
||||
"**/build/**",
|
||||
"**/CompiledOutput/**",
|
||||
"**/dist/**",
|
||||
"**/doc/**",
|
||||
"**/ext/**",
|
||||
"**/out/**",
|
||||
"**/output/**",
|
||||
"**/packages/**",
|
||||
"**/release/**",
|
||||
"**/releases/**",
|
||||
"**/samples/**",
|
||||
"**/sdks/**",
|
||||
"**/static/**",
|
||||
"**/target/**",
|
||||
"**/test/**",
|
||||
"**/third_party/**",
|
||||
"**/vendor/**",
|
||||
"**/work/**",
|
||||
"/workspace/bootstrap/nuget/MyGet.ps1"
|
||||
],
|
||||
"taskExplorer.enabledTasks": {
|
||||
"ant": false,
|
||||
"bash": false,
|
||||
"batch": false,
|
||||
"composer": false,
|
||||
"gradle": false,
|
||||
"grunt": false,
|
||||
"gulp": false,
|
||||
"make": false,
|
||||
"maven": false,
|
||||
"npm": false,
|
||||
"perl": false,
|
||||
"pipenv": false,
|
||||
"powershell": false,
|
||||
"python": false,
|
||||
"ruby": false,
|
||||
"tsc": false
|
||||
},
|
||||
"taskExplorer.enableExplorerView": false,
|
||||
"taskExplorer.enableSideBar": true,
|
||||
"taskExplorer.showLastTasks": false,
|
||||
"search.exclude": {
|
||||
"**/.yarn": true,
|
||||
"**/.pnp.*": true
|
||||
},
|
||||
"eslint.nodePath": ".yarn/sdks",
|
||||
"eslint.validate": [
|
||||
"javascript",
|
||||
"javascriptreact",
|
||||
"vue"
|
||||
],
|
||||
"python.linting.pylintArgs": ["--load-plugins", "pylint_django"],
|
||||
"python.testing.pytestEnabled": false,
|
||||
"python.testing.unittestEnabled": false,
|
||||
"python.linting.enabled": true
|
||||
}
|
||||
|
|
4
LICENSE
4
LICENSE
|
@ -1,6 +1,6 @@
|
|||
BSD 3-Clause License
|
||||
|
||||
Copyright (c) 2008-2022, The IETF Trust
|
||||
Copyright (c) 2008-2023, The IETF Trust
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
|
@ -26,4 +26,4 @@ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
|
|
@ -58,6 +58,8 @@ Click the <kbd>Fork</kbd> button in the top-right corner of the repository to cr
|
|||
|
||||
As outlined in the [Contributing](https://github.com/ietf-tools/.github/blob/main/CONTRIBUTING.md) guide, you will first want to create a fork of the datatracker project in your personal GitHub account before cloning it.
|
||||
|
||||
Windows developers: [Start with WSL2 from the beginning](https://github.com/ietf-tools/.github/blob/main/docs/windows-dev.md).
|
||||
|
||||
Because of the extensive history of this project, cloning the datatracker project locally can take a long time / disk space. You can speed up the cloning process by limiting the history depth, for example *(replace `USERNAME` with your GitHub username)*:
|
||||
|
||||
- To fetch only up to the 10 latest commits:
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
span.meeting-warning(v-if='agendaStore.meeting.warningNote') {{agendaStore.meeting.warningNote}}
|
||||
h4
|
||||
span {{agendaStore.meeting.city}}, {{ meetingDate }}
|
||||
h6.float-end.d-none.d-lg-inline(v-if='meetingUpdated') #[span.text-muted Updated:] {{ meetingUpdated }}
|
||||
h6.float-end.d-none.d-lg-inline(v-if='meetingUpdated') #[span.text-body-secondary Updated:] {{ meetingUpdated }}
|
||||
|
||||
.agenda-topnav.my-3
|
||||
meeting-navigation
|
||||
|
@ -49,7 +49,7 @@
|
|||
n-popover(v-if='!agendaStore.infoNoteShown')
|
||||
template(#trigger)
|
||||
n-button.ms-2(text, @click='toggleInfoNote')
|
||||
i.bi.bi-info-circle.text-muted
|
||||
i.bi.bi-info-circle.text-body-secondary
|
||||
span Show Info Note
|
||||
.col-12.col-sm-auto.d-flex.align-items-center
|
||||
i.bi.bi-globe.me-2
|
||||
|
|
|
@ -58,7 +58,7 @@
|
|||
)
|
||||
i.bi.bi-x-square.me-2
|
||||
span Discard
|
||||
n-divider: small.text-muted Calendar
|
||||
n-divider: small.text-body-secondary Calendar
|
||||
n-button.mt-2(
|
||||
id='agenda-quickaccess-calview-btn'
|
||||
block
|
||||
|
@ -86,7 +86,7 @@
|
|||
i.bi.bi-calendar-check.me-2
|
||||
span {{ shortMode ? '.ics' : 'Add to your calendar...' }}
|
||||
template(v-if='agendaStore.meetingDays.length > 0')
|
||||
n-divider: small.text-muted Jump to...
|
||||
n-divider: small.text-body-secondary Jump to...
|
||||
ul.nav.nav-pills.flex-column.small.agenda-quickaccess-jumpto
|
||||
li.nav-item(v-if='agendaStore.isMeetingLive')
|
||||
a.nav-link(
|
||||
|
|
|
@ -20,7 +20,7 @@ n-modal(v-model:show='modalShown')
|
|||
i.bi.bi-share
|
||||
span Share this view
|
||||
.agenda-share-content
|
||||
.text-muted.pb-2 Use the following URL for sharing the current view #[em (including any active filters)] with other users:
|
||||
.text-body-secondary.pb-2 Use the following URL for sharing the current view #[em (including any active filters)] with other users:
|
||||
n-input-group
|
||||
n-input(
|
||||
ref='filteredUrlIpt'
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
)
|
||||
template(#default)
|
||||
div(v-html='item.text')
|
||||
span.text-muted(v-else)
|
||||
span.text-body-secondary(v-else)
|
||||
em No chat log available.
|
||||
</template>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
:columns='columns'
|
||||
striped
|
||||
)
|
||||
span.text-muted(v-else)
|
||||
span.text-body-secondary(v-else)
|
||||
em No polls available.
|
||||
</template>
|
||||
|
||||
|
|
1535
dev/coverage-action/package-lock.json
generated
1535
dev/coverage-action/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -14,11 +14,11 @@
|
|||
"luxon": "3.3.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "8.43.0",
|
||||
"eslint": "8.45.0",
|
||||
"eslint-config-standard": "17.1.0",
|
||||
"eslint-plugin-import": "2.27.5",
|
||||
"eslint-plugin-node": "11.1.0",
|
||||
"eslint-plugin-promise": "6.1.1",
|
||||
"npm-check-updates": "16.10.13"
|
||||
"npm-check-updates": "16.10.16"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,6 +27,9 @@ if [ -n "$PGHOST" ]; then
|
|||
psql -U django -h $PGHOST -d datatracker -v ON_ERROR_STOP=1 -c '\x' -c 'ALTER USER django set search_path=datatracker,public;'
|
||||
fi
|
||||
|
||||
echo "Starting memcached..."
|
||||
/usr/bin/memcached -d -u root
|
||||
|
||||
echo "Running Datatracker checks..."
|
||||
./ietf/manage.py check
|
||||
|
||||
|
|
28
dev/diff/package-lock.json
generated
28
dev/diff/package-lock.json
generated
|
@ -6,7 +6,7 @@
|
|||
"": {
|
||||
"name": "diff",
|
||||
"dependencies": {
|
||||
"chalk": "^5.2.0",
|
||||
"chalk": "^5.3.0",
|
||||
"dockerode": "^3.3.5",
|
||||
"enquirer": "^2.3.6",
|
||||
"extract-zip": "^2.0.1",
|
||||
|
@ -16,7 +16,7 @@
|
|||
"listr2": "^6.6.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"luxon": "^3.3.0",
|
||||
"pretty-bytes": "^6.1.0",
|
||||
"pretty-bytes": "^6.1.1",
|
||||
"tar": "^6.1.15",
|
||||
"yargs": "^17.7.2"
|
||||
},
|
||||
|
@ -193,9 +193,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/chalk": {
|
||||
"version": "5.2.0",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-5.2.0.tgz",
|
||||
"integrity": "sha512-ree3Gqw/nazQAPuJJEy+avdl7QfZMcUvmHIKgEZkGL+xOBzRvup5Hxo6LHuMceSxOabuJLJm5Yp/92R9eMmMvA==",
|
||||
"version": "5.3.0",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz",
|
||||
"integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==",
|
||||
"engines": {
|
||||
"node": "^12.17.0 || ^14.13 || >=16.0.0"
|
||||
},
|
||||
|
@ -981,9 +981,9 @@
|
|||
"integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg=="
|
||||
},
|
||||
"node_modules/pretty-bytes": {
|
||||
"version": "6.1.0",
|
||||
"resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-6.1.0.tgz",
|
||||
"integrity": "sha512-Rk753HI8f4uivXi4ZCIYdhmG1V+WKzvRMg/X+M42a6t7D07RcmopXJMDNk6N++7Bl75URRGsb40ruvg7Hcp2wQ==",
|
||||
"version": "6.1.1",
|
||||
"resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-6.1.1.tgz",
|
||||
"integrity": "sha512-mQUvGU6aUFQ+rNvTIAcZuWGRT9a6f6Yrg9bHs4ImKF+HZCEK+plBvnAZYSIQztknZF2qnzNtr6F8s0+IuptdlQ==",
|
||||
"engines": {
|
||||
"node": "^14.13.1 || >=16.0.0"
|
||||
},
|
||||
|
@ -1497,9 +1497,9 @@
|
|||
}
|
||||
},
|
||||
"chalk": {
|
||||
"version": "5.2.0",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-5.2.0.tgz",
|
||||
"integrity": "sha512-ree3Gqw/nazQAPuJJEy+avdl7QfZMcUvmHIKgEZkGL+xOBzRvup5Hxo6LHuMceSxOabuJLJm5Yp/92R9eMmMvA=="
|
||||
"version": "5.3.0",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz",
|
||||
"integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w=="
|
||||
},
|
||||
"chownr": {
|
||||
"version": "1.1.4",
|
||||
|
@ -2032,9 +2032,9 @@
|
|||
"integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg=="
|
||||
},
|
||||
"pretty-bytes": {
|
||||
"version": "6.1.0",
|
||||
"resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-6.1.0.tgz",
|
||||
"integrity": "sha512-Rk753HI8f4uivXi4ZCIYdhmG1V+WKzvRMg/X+M42a6t7D07RcmopXJMDNk6N++7Bl75URRGsb40ruvg7Hcp2wQ=="
|
||||
"version": "6.1.1",
|
||||
"resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-6.1.1.tgz",
|
||||
"integrity": "sha512-mQUvGU6aUFQ+rNvTIAcZuWGRT9a6f6Yrg9bHs4ImKF+HZCEK+plBvnAZYSIQztknZF2qnzNtr6F8s0+IuptdlQ=="
|
||||
},
|
||||
"pump": {
|
||||
"version": "3.0.0",
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
"name": "diff",
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"chalk": "^5.2.0",
|
||||
"chalk": "^5.3.0",
|
||||
"dockerode": "^3.3.5",
|
||||
"enquirer": "^2.3.6",
|
||||
"extract-zip": "^2.0.1",
|
||||
|
@ -12,7 +12,7 @@
|
|||
"listr2": "^6.6.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"luxon": "^3.3.0",
|
||||
"pretty-bytes": "^6.1.0",
|
||||
"pretty-bytes": "^6.1.1",
|
||||
"tar": "^6.1.15",
|
||||
"yargs": "^17.7.2"
|
||||
},
|
||||
|
|
|
@ -74,7 +74,7 @@ services:
|
|||
CELERY_APP: ietf
|
||||
CELERY_ROLE: worker
|
||||
UPDATE_REQUIREMENTS_FROM: requirements.txt
|
||||
DEV_MODE: yes
|
||||
DEV_MODE: "yes"
|
||||
command:
|
||||
- '--loglevel=INFO'
|
||||
depends_on:
|
||||
|
|
|
@ -29,9 +29,9 @@ This project includes a devcontainer configuration which automates the setup of
|
|||
### Initial Setup
|
||||
|
||||
1. Launch [VS Code](https://code.visualstudio.com/)
|
||||
2. Under the **Extensions** tab, ensure you have the **Remote - Containers** ([ms-vscode-remote.remote-containers](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers)) extension installed.
|
||||
2. Under the **Extensions** tab, ensure you have the **Dev Containers** ([ms-vscode-remote.remote-containers](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers)) extension installed.
|
||||
* On Linux, note that the Snap installation of VS Code is [incompatible with this plugin](https://code.visualstudio.com/docs/devcontainers/containers#_system-requirements:~:text=snap%20package%20is%20not%20supported).
|
||||
* On Windows, you also need the **Remote - WSL** ([ms-vscode-remote.remote-wsl](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-wsl)) extension to take advantage of the WSL 2 *(Windows Subsystem for Linux)* native integration.
|
||||
* On Windows, you also need the **WSL** ([ms-vscode-remote.remote-wsl](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-wsl)) extension to take advantage of the WSL 2 *(Windows Subsystem for Linux)* native integration.
|
||||
2. Open the top-level directory of the datatracker code you fetched above.
|
||||
3. A prompt inviting you to reopen the project in containers will appear in the bottom-right corner. Click the **Reopen in Container** button. If you missed the prompt, you can press `F1`, start typing `reopen in container` task and launch it.
|
||||
4. VS Code will relaunch in the dev environment and create the containers automatically.
|
||||
|
@ -45,7 +45,7 @@ You can also open the datatracker project folder and click the **Reopen in conta
|
|||
|
||||
### Usage
|
||||
|
||||
- Under the **Run and Debug** tab, you can run the server with the debugger attached using **Run Server** (F5). Once the server is ready to accept connections, you'll be prompted to open in a browser. You can also open [http://localhost:8000](http://localhost:8000) in a browser.
|
||||
- Under the **Run and Debug** tab, you can run the server with the debugger attached using **Run Server** (F5). Once the server is ready to accept connections, you'll be prompted to open in a browser. Navigate to [http://localhost:8000](http://localhost:8000) in your preferred browser.
|
||||
|
||||
> An alternate profile **Run Server with Debug Toolbar** is also available from the dropdown menu, which displays various tools
|
||||
on top of the webpage. However, note that this configuration has a significant performance impact.
|
||||
|
@ -64,11 +64,7 @@ You can also open the datatracker project folder and click the **Reopen in conta
|
|||
|
||||

|
||||
|
||||
- Under the **SQL Tools** tab, a connection **Local Dev** is preconfigured to connect to the DB container. Using this tool, you can list tables, view records and execute SQL queries directly from VS Code.
|
||||
|
||||
> The port `3306` is also exposed to the host automatically, should you prefer to use your own SQL tool.
|
||||
|
||||

|
||||
- The pgAdmin web interface, a PostgreSQL DB browser / management UI, is available at [http://localhost:8000/pgadmin/](http://localhost:8000/pgadmin/).
|
||||
|
||||
- Under the **Task Explorer** tab, a list of available preconfigured tasks is displayed. *(You may need to expand the tree to `src > vscode` to see it.)* These are common scritps you can run *(e.g. run tests, fetch assets, etc.)*.
|
||||
|
||||
|
@ -103,7 +99,7 @@ You can also open the datatracker project folder and click the **Reopen in conta
|
|||
2. Wait for the containers to initialize. Upon completion, you will be dropped into a shell from which you can start the datatracker and execute related commands as usual, for example
|
||||
|
||||
```
|
||||
ietf/manage.py runserver 0.0.0.0:8000
|
||||
ietf/manage.py runserver 0.0.0.0:8001
|
||||
```
|
||||
|
||||
to start the datatracker.
|
||||
|
@ -161,11 +157,11 @@ docker compose down -v --rmi all
|
|||
docker image prune
|
||||
```
|
||||
|
||||
### Accessing MariaDB Port
|
||||
### Accessing PostgreSQL Port
|
||||
|
||||
The port is exposed but not mapped to `3306` to avoid potential conflicts with the host. To get the mapped port, run the command *(from the project `/docker` directory)*:
|
||||
The port is exposed but not automatically mapped to `5432` to avoid potential conflicts with the host. To get the mapped port, run the command *(from the project `/docker` directory)*:
|
||||
```sh
|
||||
docker compose port db 3306
|
||||
docker compose port db 5432
|
||||
```
|
||||
|
||||
## Notes / Troubleshooting
|
||||
|
|
|
@ -40,7 +40,7 @@ INTERNAL_IPS = [".".join(ip.split(".")[:-1] + ["1"]) for ip in ips] + ['127.0.0.
|
|||
# 'ietf.context_processors.sql_debug',
|
||||
# ]
|
||||
|
||||
DOCUMENT_PATH_PATTERN = '/assets/ietf-ftp/{doc.type_id}/'
|
||||
DOCUMENT_PATH_PATTERN = '/assets/ietfdata/doc/{doc.type_id}/'
|
||||
INTERNET_DRAFT_PATH = '/assets/ietf-ftp/internet-drafts/'
|
||||
RFC_PATH = '/assets/ietf-ftp/rfc/'
|
||||
CHARTER_PATH = '/assets/ietf-ftp/charter/'
|
||||
|
|
|
@ -725,6 +725,15 @@ class CustomApiTests(TestCase):
|
|||
self.assertEqual(r.status_code, 200)
|
||||
jsondata = r.json()
|
||||
self.assertEqual(jsondata['success'], True)
|
||||
|
||||
def test_api_get_session_matherials_no_agenda_meeting_url(self):
|
||||
meeting = MeetingFactory(type_id='ietf')
|
||||
session = SessionFactory(meeting=meeting)
|
||||
url = urlreverse('ietf.meeting.views.api_get_session_materials', kwargs={'session_id': session.pk})
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
|
||||
|
||||
class DirectAuthApiTests(TestCase):
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ def view_list(request, username=None):
|
|||
docs = docs_tracked_by_community_list(clist)
|
||||
docs, meta = prepare_document_table(request, docs, request.GET)
|
||||
|
||||
subscribed = request.user.is_authenticated and EmailSubscription.objects.filter(community_list=clist, email__person__user=request.user)
|
||||
subscribed = request.user.is_authenticated and (EmailSubscription.objects.none() if clist.pk is None else EmailSubscription.objects.filter(community_list=clist, email__person__user=request.user))
|
||||
|
||||
return render(request, 'community/view_list.html', {
|
||||
'clist': clist,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright The IETF Trust 2016-2020, All Rights Reserved
|
||||
# Copyright The IETF Trust 2016-2023, All Rights Reserved
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
|
@ -23,7 +23,6 @@ from ietf.utils.text import xslugify
|
|||
from ietf.utils.timezone import date_today
|
||||
|
||||
|
||||
|
||||
def draft_name_generator(type_id,group,n):
|
||||
return '%s-%s-%s-%s%d'%(
|
||||
type_id,
|
||||
|
@ -577,4 +576,31 @@ class EditorialRfcFactory(RgDraftFactory):
|
|||
def reset_canonical_name(obj, create, extracted, **kwargs):
|
||||
if hasattr(obj, '_canonical_name'):
|
||||
del obj._canonical_name
|
||||
return None
|
||||
return None
|
||||
|
||||
class StatementFactory(BaseDocumentFactory):
|
||||
type_id = "statement"
|
||||
title = factory.Faker("sentence")
|
||||
group = factory.SubFactory("ietf.group.factories.GroupFactory", acronym="iab")
|
||||
|
||||
name = factory.LazyAttribute(
|
||||
lambda o: "statement-%s-%s" % (xslugify(o.group.acronym), xslugify(o.title))
|
||||
)
|
||||
uploaded_filename = factory.LazyAttribute(lambda o: f"{o.name}-{o.rev}.md")
|
||||
|
||||
published_statement_event = factory.RelatedFactory(
|
||||
"ietf.doc.factories.DocEventFactory",
|
||||
"doc",
|
||||
type="published_statement",
|
||||
time=timezone.now() - datetime.timedelta(days=1),
|
||||
)
|
||||
|
||||
@factory.post_generation
|
||||
def states(obj, create, extracted, **kwargs):
|
||||
if not create:
|
||||
return
|
||||
if extracted:
|
||||
for state_type_id, state_slug in extracted:
|
||||
obj.set_state(State.objects.get(type_id=state_type_id, slug=state_slug))
|
||||
else:
|
||||
obj.set_state(State.objects.get(type_id="statement", slug="active"))
|
||||
|
|
320
ietf/doc/management/commands/import_iab_statements.py
Normal file
320
ietf/doc/management/commands/import_iab_statements.py
Normal file
|
@ -0,0 +1,320 @@
|
|||
# Copyright The IETF Trust 2023, All Rights Reserved
|
||||
|
||||
import debug # pyflakes:ignore
|
||||
|
||||
import csv
|
||||
import datetime
|
||||
import io
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
from collections import defaultdict
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from ietf.doc.models import Document, DocAlias, DocEvent, State
|
||||
from ietf.utils.text import xslugify
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Performs a one-time import of IAB statements"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if Document.objects.filter(type="statement", group__acronym="iab").exists():
|
||||
print("IAB statement documents already exist - exiting")
|
||||
exit(-1)
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
process = subprocess.Popen(
|
||||
["git", "clone", "https://github.com/kesara/iab-scraper.git", tmpdir],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
stdout, stderr = process.communicate()
|
||||
if not Path(tmpdir).joinpath("iab_minutes", "2022-12-14.md").exists():
|
||||
print("Git clone of the iab-scraper directory did not go as expected")
|
||||
print("stdout:", stdout)
|
||||
print("stderr:", stderr)
|
||||
print(f"Clean up {tmpdir} manually")
|
||||
exit(-1)
|
||||
|
||||
spreadsheet_rows = load_spreadsheet()
|
||||
with open("iab_statement_redirects.csv", "w") as redirect_file:
|
||||
redirect_writer = csv.writer(redirect_file)
|
||||
for index, (file_fix, date_string, title, url, _) in enumerate(
|
||||
spreadsheet_rows
|
||||
):
|
||||
name = url.split("/")[6].lower()
|
||||
if name.startswith("iabs"):
|
||||
name = name[5:]
|
||||
elif name.startswith("iab"):
|
||||
name = name[4:]
|
||||
if index == 1:
|
||||
name += "-archive" # https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-statement-on-identifiers-and-unicode-7-0-0/archive/
|
||||
if index == 100:
|
||||
name = (
|
||||
"2010-" + name
|
||||
) # https://www.iab.org/documents/correspondence-reports-documents/docs2010/iab-statement-on-the-rpki/
|
||||
if index == 152:
|
||||
name = (
|
||||
"2018-" + name
|
||||
) # https://www.iab.org/documents/correspondence-reports-documents/2018-2/iab-statement-on-the-rpki/
|
||||
docname = f"statement-iab-{xslugify(name)}"
|
||||
ext = None
|
||||
base_sourcename = (
|
||||
f"{date_string}-{file_fix}" if file_fix != "" else date_string
|
||||
)
|
||||
if (
|
||||
Path(tmpdir)
|
||||
.joinpath("iab_statements", f"{base_sourcename}.md")
|
||||
.exists()
|
||||
):
|
||||
ext = "md"
|
||||
elif (
|
||||
Path(tmpdir)
|
||||
.joinpath("iab_statements", f"{base_sourcename}.pdf")
|
||||
.exists()
|
||||
):
|
||||
ext = "pdf"
|
||||
if ext is None:
|
||||
debug.show(
|
||||
'f"Could not find {Path(tmpdir).joinpath("iab_statements", f"{base_path}.md")}"'
|
||||
)
|
||||
continue
|
||||
filename = f"{docname}-00.{ext}"
|
||||
# Create Document
|
||||
doc = Document.objects.create(
|
||||
name=docname,
|
||||
type_id="statement",
|
||||
title=title,
|
||||
group_id=7, # The IAB group
|
||||
rev="00",
|
||||
uploaded_filename=filename,
|
||||
)
|
||||
doc.set_state(State.objects.get(type_id="statement", slug="active"))
|
||||
DocAlias.objects.create(name=doc.name).docs.add(doc)
|
||||
year, month, day = [int(part) for part in date_string.split("-")]
|
||||
e1 = DocEvent.objects.create(
|
||||
time=datetime.datetime(
|
||||
year, month, day, 12, 00, tzinfo=datetime.timezone.utc
|
||||
),
|
||||
type="published_statement",
|
||||
doc=doc,
|
||||
rev="00",
|
||||
by_id=1,
|
||||
desc="Statement published (note: The 1200Z time of day is inaccurate - the actual time of day is not known)",
|
||||
)
|
||||
e2 = DocEvent.objects.create(
|
||||
type="added_comment",
|
||||
doc=doc,
|
||||
rev="00",
|
||||
by_id=1, # The "(System)" person
|
||||
desc="Statement moved into datatracker from iab wordpress website",
|
||||
)
|
||||
doc.save_with_history([e1, e2])
|
||||
|
||||
# Put file in place
|
||||
source = Path(tmpdir).joinpath(
|
||||
"iab_statements", f"{base_sourcename}.{ext}"
|
||||
)
|
||||
dest = Path(settings.DOCUMENT_PATH_PATTERN.format(doc=doc)).joinpath(
|
||||
filename
|
||||
)
|
||||
if dest.exists():
|
||||
print(f"WARNING: {dest} already exists - not overwriting it.")
|
||||
else:
|
||||
os.makedirs(dest.parent, exist_ok=True)
|
||||
shutil.copy(source, dest)
|
||||
|
||||
redirect_writer.writerow(
|
||||
[
|
||||
url,
|
||||
f"https://datatracker.ietf.org/doc/{docname}",
|
||||
]
|
||||
)
|
||||
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def load_spreadsheet():
|
||||
csv_dump = '''2002-03-01,IAB RFC Publication Process Description(txt) March 2003,https://www.iab.org/documents/correspondence-reports-documents/docs2003/iab-rfc-publication-process/,deprecated
|
||||
2015-01-27,IAB Statement on Identifiers and Unicode 7.0.0 (archive),https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-statement-on-identifiers-and-unicode-7-0-0/archive/,deprecated
|
||||
2010-02-05,Response to the EC’s RFI on Forums and Consortiums,https://www.iab.org/documents/correspondence-reports-documents/docs2010/response-to-the-ecs-rfi-on-forums-and-consortiums/,https://www.iab.org/wp-content/IAB-uploads/2011/03/2010-02-05-IAB-Response-Euro-ICT-Questionnaire.pdf
|
||||
2011-03-30,IAB responds to NTIA Request for Comments on the IANA Functions,https://www.iab.org/documents/correspondence-reports-documents/2011-2/iab-responds-to-ntia-request-for-comments-on-the-iana-functions/,https://www.iab.org/wp-content/IAB-uploads/2011/04/2011-03-30-iab-iana-noi-response.pdf
|
||||
2011-07-28,IAB's response to the NTIA FNOI on IANA,https://www.iab.org/documents/correspondence-reports-documents/2011-2/iabs-response-to-the-ntia-fnoi-on-iana/,https://www.iab.org/wp-content/IAB-uploads/2011/07/IANA-IAB-FNOI-2011.pdf
|
||||
2011-12-16,"Questionnaire in support of the ICANN bid for the IANA function [Dec 16, 2011]",https://www.iab.org/documents/correspondence-reports-documents/2011-2/questionnaire-in-support-of-the-icann-bid-for-the-iana-function/,https://www.iab.org/wp-content/IAB-uploads/2011/12/IAB-Past-Performance-Questionnaire.pdf
|
||||
2012-04-03,IETF Oversight of the IANA Protocol Parameter Function,https://www.iab.org/documents/correspondence-reports-documents/2012-2/ietf-oversight-of-the-iana-protocol-parameter-function/,https://www.iab.org/wp-content/IAB-uploads/2012/04/IETF-IANA-Oversight.pdf
|
||||
2012-04-29,IETF and IAB comment on OMB Circular A-119,https://www.iab.org/documents/correspondence-reports-documents/2012-2/ietf-and-iab-comment-on-omb-circular-a-119/,https://www.iab.org/wp-content/IAB-uploads/2012/04/OMB-119.pdf
|
||||
2012-05-24,IAB submits updated ICANN performance evaluation,https://www.iab.org/documents/correspondence-reports-documents/2012-2/iab-submits-updated-icann-performance-evaluation/,https://www.iab.org/wp-content/IAB-uploads/2012/05/IAB-Past-Performance-Questionnaire-FINAL.pdf
|
||||
2013-07-02,Open letter to the European Commission and the European Parliament in the matter of the Transatlantic Trade and Investment Partnership (TTIP),https://www.iab.org/documents/correspondence-reports-documents/2013-2/open-letter-to-the-ec/,https://www.iab.org/wp-content/IAB-uploads/2013/07/TTIP_market_driven_standards_EU_letter.pdf
|
||||
2013-05-10,Comments In the matter of Transatlantic Trade and Investment Partnership (TTIP) (USTR-2013-0019),https://www.iab.org/documents/correspondence-reports-documents/2013-2/comments-in-the-matter-of-transatlantic-trade-and-investment-partnership-ttip-ustr-2013-0019/,https://www.iab.org/wp-content/IAB-uploads/2013/07/TTIP_market_driven_standards_FINAL.pdf
|
||||
2013-10-23,IAB Comments on Recommendation for Random Number Generation Using Deterministic Random Bit Generators,https://www.iab.org/documents/correspondence-reports-documents/2013-2/nist-sp-800-90a/,https://www.iab.org/wp-content/IAB-uploads/2013/10/IAB-NIST-FINAL.pdf
|
||||
2014-04-07,IAB Comments on NISTIR 7977,https://www.iab.org/documents/correspondence-reports-documents/2014-2/iab-comments-on-nistir-7977/,https://www.iab.org/wp-content/IAB-uploads/2014/04/IAB-NIST7977-20140407.pdf
|
||||
2014-04-29,Comments to ICANN on the Transition of NTIA’s Stewardship of the IANA Functions,https://www.iab.org/documents/correspondence-reports-documents/2014-2/iab-response-to-icann-iana-transition-proposal/,https://www.iab.org/wp-content/IAB-uploads/2014/04/iab-response-to-20140408-20140428a.pdf
|
||||
2016-05-27,"IAB Comments to US NTIA Request for Comments, ""The Benefits, Challenges, and Potential Roles for the Government in Fostering the Advancement of the Internet of Things""",https://www.iab.org/documents/correspondence-reports-documents/2016-2/iab-comments-to-ntia-request-for-comments-the-benefits-challenges-and-potential-roles-for-the-government/,https://www.iab.org/wp-content/IAB-uploads/2016/05/ntia-iot-20160525.pdf
|
||||
2016-05-24,"IAB Chair Testifies before the United States Senate Committee on Commerce, Science, and Transportation on ""Examining the Multistakeholder Plan for Transitioning the Internet Assigned Number Authority""",https://www.iab.org/documents/correspondence-reports-documents/2016-2/iab-chair-statement-before-us-senate-committee-on-iana-transition/,https://www.iab.org/wp-content/IAB-uploads/2016/05/sullivan-to-senate-commerce-20160524.pdf
|
||||
2018-07-16,IAB Response to NTIA Notice of Inquiry on International Internet Policy Priorities,https://www.iab.org/documents/correspondence-reports-documents/2018-2/iab-response-to-ntia-notice-of-inquiry-on-international-internet-policy-priorities-response/,https://www.iab.org/wp-content/IAB-uploads/2018/07/IAB-response-to-the-2018-NTIA-Notice-of-Inquiry.pdf
|
||||
2018-09-09,Internet Architecture Board Comments on the Australian Assistance and Access Bill 2018,https://www.iab.org/documents/correspondence-reports-documents/2018-2/internet-architecture-board-comments-on-the-australian-assistance-and-access-bill-2018/,https://www.iab.org/wp-content/IAB-uploads/2018/09/IAB-Comments-on-Australian-Assistance-and-Access-Bill-2018.pdf
|
||||
2023-03-03,IAB Response to the Office of the High Commissioner for Human Rights Call for Input on “The relationship between human rights and technical standard-setting processes for new and emerging digital technologies”,https://www.iab.org/documents/correspondence-reports-documents/2023-2/iab-response-to-the-ohchr-call-for-input-on-the-relationship-between-human-rights-and-technical-standard/,https://www.iab.org/wp-content/IAB-uploads/2023/03/IAB-Response-to-OHCHR-consultation.pdf
|
||||
1998-12-09,"IAB Request to IANA for Delegating IPv6 Address Space, Mail Message, December 1998",https://www.iab.org/documents/correspondence-reports-documents/docs98/iab-request-to-iana-for-delegating-ipv6-address-space-mail-message-december-1998/,
|
||||
1998-12-18,"1998 Statements on Cryptography, Mail Message, December 1998.",https://www.iab.org/documents/correspondence-reports-documents/docs98/1998-statements-on-cryptography/,
|
||||
1999-02-22,Correspondence between Bradner and Dyson on Protocol Parameter Parameters,https://www.iab.org/documents/correspondence-reports-documents/docs99/correspondence-between-bradner-and-dyson-on-protocol-parameter-parameters/,
|
||||
1999-08-13,Comment on ICANN ASO membership,https://www.iab.org/documents/correspondence-reports-documents/docs99/comment-on-icann-aso-membership/,
|
||||
1999-10-19,Ad Hoc Group on Numbering,https://www.iab.org/documents/correspondence-reports-documents/docs99/ad-hoc-group-on-numbering/,
|
||||
2000-05-01,"IAB Statement on Infrastructure Domain and Subdomains, May 2000.",https://www.iab.org/documents/correspondence-reports-documents/docs2000/iab-statement-on-infrastructure-domain-and-subdomains-may-2000/,
|
||||
2002-05-01,"IETF and ITU-T Cooperation Arrangements, May 2002",https://www.iab.org/documents/correspondence-reports-documents/docs2002/ietf-and-itu-t-cooperation-arrangements-may-2002/,
|
||||
2002-05-03,"IAB replyto ENUM liaison statement, May 2002",https://www.iab.org/documents/correspondence-reports-documents/docs2002/enum-response/,
|
||||
2002-05-24,"Interim Approval for Internet Telephone Numbering System (ENUM) Provisioning, 24 May 2002",https://www.iab.org/documents/correspondence-reports-documents/docs2002/enum-pr/,
|
||||
2002-06-01,"IAB response to ICANN Evolution and Reform, June 2002",https://www.iab.org/documents/correspondence-reports-documents/docs2002/icann-response/,
|
||||
2002-09-01,"IAB response to ICANN Evolution and Reform Committee's Second Interim Report, September 2002",https://www.iab.org/documents/correspondence-reports-documents/docs2002/icann-response-2/,
|
||||
2002-10-01,"IAB response to ICANN Evolution and Reform Committee's Final Implementation Report, October 2002",https://www.iab.org/documents/correspondence-reports-documents/docs2002/icann-response-3/,
|
||||
2002-12-10,"IAB Response to RIRs request regarding 6bone address entries in ip6.arpa, December 2002",https://www.iab.org/documents/correspondence-reports-documents/docs2002/3ffe/,
|
||||
2003-01-03,"IETF Notice of Withdrawal from the Protocol Support Organization, January 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/icann-pso-notice/,
|
||||
2003-01-25,"IAB Response to Verisign GRS IDN Announcement, January 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/icann-vgrs-response/,
|
||||
2003-07-10,"Note: Unified Notification Protocol Considerations, July 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-07-10-iab-notification/,
|
||||
2003-08-01,Open Architectural Issues in the Development of the Internet,https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-08-architectural-issues/,
|
||||
2003-08-28,RFC Document editing/ queueing suggestion,https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-08-28-klensin-rfc-editor/,
|
||||
2003-09-02,"IAB Chair's announcement of an Advisory Committee, September 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-09-02-adv-committee/,
|
||||
2003-09-19,"IAB Commentary: Architectural Concerns on the Use of DNS Wildcards, September 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-09-20-dns-wildcards/,
|
||||
2003-09-24,"IAB to ICANN: IAB input related to the .cs code in ISO 3166, 24 September 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-09-25-icann-cs-code/,
|
||||
2003-09-24,"IAB to ISO: IAB comment on stability of ISO 3166 and other infrastructure standards, 24 September 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-09-25-iso-cs-code/,
|
||||
2003-09-25,"Correspondance to ISO concerning .cs code, and advice to ICANN, 25 September 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-09-25-icann-cs-code-2/,
|
||||
2003-09-25,"Correspondance to ISO concerning .cs code, and advice to ICANN, 25 September 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-09-25-iso-cs-code-2/,
|
||||
2003-09-26,ISO Codes,https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-09-23-isocodes/,
|
||||
2003-10-02,"IESG to IAB: Checking data for validity before usage in a protocol, 2 October 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-10-02-iesg-dns-validity-check-query/,
|
||||
2003-10-14,"Survey of Current Security Work in the IETF, October 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-10-14-security-survey/,
|
||||
2003-10-17,"IAB to ICANN SESAC:Wildcard entries in DNS domains, 17 October 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-10-17-crocker-wildcards-2/,
|
||||
2003-10-17,"IAB note to Steve Crocker, Chair, ICANN Security and Stability Advisory Committee, October 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-10-17-crocker-wildcards/,
|
||||
2003-10-18,"IAB concerns against permanent deployment of edge-based port filtering, October 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-10-18-edge-filters/,
|
||||
2003-11-08,"IAB Response to IESG architectural query: Checking data for validity before usage in protocol, November 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-11-08-iesg-dns-validity-check-query-response/,
|
||||
2004-01-19,"Number Resource Organisation (NRO) formation, 19 January 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-01-19-nro/,
|
||||
2004-01-22,"IAB to RIPE NCC:ENUM Administration, 22 January 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-01-22-enum-subcodes/,
|
||||
2004-02-09,"IAB to IANA: Instructions to IANA -Delegation of 2.0.0.2.ip6.arpa, 9 February, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-02-09-6to4-rev-delegation/,
|
||||
2004-02-26,The IETF -- what is it?,https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-02-26-ietf-defn/,
|
||||
2004-04-15,"IAB to ICANN: Validity checks for names, 15 April, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-04-15-icann-dns-validity-check/,
|
||||
2004-05-07,"IAB to IANA: IPv6 Allocation Policy , 7 May, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-05-07-iana-v6alloc/,
|
||||
2004-05-24,"IAB to IANA: Instructions to IANA -Delegation of 3.f.f.e.ip6.arpa, 24 May, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-05-24-3ffe-rev-delegation/,
|
||||
2004-05-27,"IAB to ICANN:Concerns regarding IANA Report, 27 May, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-05-27-iana-report/,
|
||||
2004-07-16,"Upcoming clarifications to RIPE NCC instructions for e164.arpa operation, 16 July 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-07-15-enum-instructions/,
|
||||
2004-07-16,"IANA Delegation Requests, 16 July 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-07-16-iana-delegation/,
|
||||
2004-08-06,OMA-IETF Standardization Collaboration,https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-08-draft-iab-oma-liaison-00/,
|
||||
2004-08-12,"IAB to RIPE NCC:Notice of revision of instructions concerning the ENUM Registry, 12 August, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-08-12-enum-instructions/,
|
||||
2004-08-12,"Response to your letter of August 4, 12 August 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-08-12-icann-wildcard/,
|
||||
2004-09-27,"IAB to ICANN:Report of Concerns over IANA Performance , 27 September, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-09-27-iana-concerns/,
|
||||
2004-09-27,"IAB Report of IETF IANA Functions , 27 September 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-09-27-iana-report/,
|
||||
2004-11-03,"IAB to IESG:Comments on Teredo , 3 November, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-11-03-teredo-comments/,
|
||||
2004-11-12,"IAB to ICANN:Response to ICANN Request for assistance with new TLD Policy , 12 November, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-11-12-icann-new-tld-policy/,
|
||||
2004-11-29,"The IETF and IPv6 Address Allocation , 29 November 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-11-29-ipv6-allocs/,
|
||||
2004-12-15,"IAB Comment to Internet AD:Comments on IANA Considerations in IPv6 ULA draft, 15 December, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-12-15-ipv6-ula-iana-considerations/,
|
||||
2005-02-16,"IAB review of Structure of the IETF Administrative Support Activity, 16 February 2005",https://www.iab.org/documents/correspondence-reports-documents/docs2005/2005-02-16-iasa/,
|
||||
2005-08-26,"SiteFinder returns, 26 August 2005",https://www.iab.org/documents/correspondence-reports-documents/docs2005/2005-08-26-ssac-note/,
|
||||
2005-09-01,"Re: SiteFinder returns, 1 September 2005",https://www.iab.org/documents/correspondence-reports-documents/docs2005/2005-09-01-ssac-response/,
|
||||
2005-10-14,"IAB to ICANN: IAB comments on ICANN IDN Guidelines, 14 October 2005",https://www.iab.org/documents/correspondence-reports-documents/docs2005/2005-10-14-idn-guidelines/,
|
||||
2005-11-07,"IAB to ICANN – Nameserver change for e164.arpa, 7 November 2005",https://www.iab.org/documents/correspondence-reports-documents/docs2005/2005-11-07-nameserver-change/,
|
||||
2005-11-22,"IETF to ICANN – IANA structural status, 22 November 2005",https://www.iab.org/documents/correspondence-reports-documents/docs2005/2005-11-22-iana-structure/,
|
||||
2005-11-29,"IAB to IANA – Teredo prefix assignment, 29 November 2005",https://www.iab.org/documents/correspondence-reports-documents/docs2005/2005-11-29-teredo-prefix/,
|
||||
2005-12-22,"IAB to ICANN – dot arpa TLD management, 22 December 2005",https://www.iab.org/documents/correspondence-reports-documents/docs2005/2005-12-22-dot-arpa/,
|
||||
2006-03-06,"IAB Position on the IETF IANA Technical Parameter Function, 6 March 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/iab-iana-position/,
|
||||
2006-03-28,"IAB to ICANN – Name server changes for ip6.arpa, 28 March 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/2006-03-28-nameserver-change/,
|
||||
2006-04-20,"IAB to IANA – Administrative contact information change for arpa, 20 April 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/2006-04-20-update-to-administrative-contact-information-for-arpa-iana/,
|
||||
2006-04-20,"IAB to ITU TSB – FYI re contact info changes for e164.arpa, 20 April 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/2006-04-20-update-to-contact-information-for-e164-arpa-hill/,
|
||||
2006-04-20,"IAB to IANA – Contact information changes for e164.arpa, 20 April 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/2006-04-20-update-to-contact-information-for-e164-arpa-iana/,
|
||||
2006-05-15,"IAB to IANA – Request to IANA for status update on deployment of DNSSEC on IANA managed zones, 15 May 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/2006-05-15-iab-request-to-iana-to-sign-dnssec-zones/,
|
||||
2006-06-07,"The IAB announces the mailing list for the discussion of the independent submissions process, 7 June 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/2006-06-07-independent-submissions/,
|
||||
2006-06-19,"Procedural issues with liaison on nextsteps, 19 June 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/2006-06-16-response-to-idn-liaison-issues/,
|
||||
2006-10-12,"The IAB sends a note to the Registry Services Technical Evaluation Panel on the use of wildcards in the .travel TLD, 12 October 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/2006-10-12-rstep-note/,
|
||||
2006-10-19,"The IAB sends a note to the OIF Technical Committee Chair on IETF Protocol Extensions, 19 October 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/2006-10-19-oifnote/,
|
||||
2007-05-21,"The IAB responds to ITU Consultation on Resolution 102, 21 May 2007",https://www.iab.org/documents/correspondence-reports-documents/docs2007/2007-05-21-itu-resolution-102/,
|
||||
2007-07-05,"Correspondence from the RIPE NCC regarding deployment of DNSSEC in the E164.ARPA zone, 5 July 2007",https://www.iab.org/documents/correspondence-reports-documents/docs2007/2007-07-05-ripe-ncc-dnssec-e164/,
|
||||
2007-07-24,"Correspondence from the IAB to the ITU-TSB Director regarding deployment of DNSSEC in the E164.ARPA zone, 24 July 2007",https://www.iab.org/documents/correspondence-reports-documents/docs2007/2007-07-24-iab-itu-dnssec-e164/,
|
||||
2007-10-10,"Follow-up work on NAT-PT, 10 October 2007",https://www.iab.org/documents/correspondence-reports-documents/docs2007/follow-up-work-on-nat-pt/,
|
||||
2008-02-15,"Correspondence from the IAB to the National Telecommunications and Information Administration, US Department of Commerce regarding the ICANN/DoC Joint Project Agreement, 15 February 2008",https://www.iab.org/documents/correspondence-reports-documents/docs2008/2008-02-15-midterm-view-icann-doc-jpa/,
|
||||
2008-03-07,"The IAB’s response to ICANN’s solicitation on DNS stability, 7 March 2008",https://www.iab.org/documents/correspondence-reports-documents/docs2008/2008-03-07-icann-new-gtlds/,
|
||||
2008-06-04,Proposed RFC Editor Structure,https://www.iab.org/documents/correspondence-reports-documents/docs2008/2008-06-04-rfc-editor-model/,
|
||||
2008-08-16,"The IAB’s response to Geoff Huston’s request concerning 32-bit AS numbers, 16 August 2008",https://www.iab.org/documents/correspondence-reports-documents/docs2008/2008-08-16-32bit-as-huston/,
|
||||
2008-09-05,Proposed RFC Editor Structure,https://www.iab.org/documents/correspondence-reports-documents/docs2008/2008-09-05-rfc-editor-model/,
|
||||
2008-11-18,"The IAB’s correspondence with NTIA on DNSSEC deployment at the root, 18 November 2008",https://www.iab.org/documents/correspondence-reports-documents/docs2008/2008-11-18-dnssec-deployment-at-the-root/,
|
||||
2008-12-04,"IAB correspondence with Geoff Huston on TAs, IANA, RIRs et al.., 4 December 2008",https://www.iab.org/documents/correspondence-reports-documents/docs2008/2008-12-04-huston-tas-iana-rirs/,
|
||||
2009-06-02,"IAB correspondence with IANA on the Signing of .ARPA, 2 June 2009",https://www.iab.org/documents/correspondence-reports-documents/docs2009/2009-06-02-roseman-signing-by-iana-of-arpa/,
|
||||
2009-10-14,"IAB correspondence with ICANN on their “Scaling the Root” study., 14 October 2009",https://www.iab.org/documents/correspondence-reports-documents/docs2009/2009-10-14-icann-scaling-the-root/,
|
||||
2010-01-27,IAB statement on the RPKI,https://www.iab.org/documents/correspondence-reports-documents/docs2010/iab-statement-on-the-rpki/,
|
||||
2010-07-30,Transition of IN-ADDR.ARPA generation,https://www.iab.org/documents/correspondence-reports-documents/docs2010/transition-of-in-addr-arpa-generation/,
|
||||
2011-06-22,Response to ARIN's request for guidance regarding Draft Policy ARIN-2011-5,https://www.iab.org/documents/correspondence-reports-documents/2011-2/response-to-arins-request-for-guidance-regarding-draft-policy-arin-2011-5/,
|
||||
2011-07-25,"IAB Response to ""Some IESG Thoughts on Liaisons""",https://www.iab.org/documents/correspondence-reports-documents/2011-2/iab-response-to-some-iesg-thoughts-on-liaisons/,
|
||||
2011-09-16,Letter to the European Commission on Global Interoperability in Emergency Services,https://www.iab.org/documents/correspondence-reports-documents/2011-2/letter-to-the-european-commission-on-global-interoperability-in-emergency-services/,
|
||||
2012-02-08,"IAB Statement: ""The interpretation of rules in the ICANN gTLD Applicant Guidebook""",https://www.iab.org/documents/correspondence-reports-documents/2012-2/iab-statement-the-interpretation-of-rules-in-the-icann-gtld-applicant-guidebook/,
|
||||
2012-03-26,"Response to ICANN questions concerning ""The interpretation of rules in the ICANN gTLD Applicant Guidebook""",https://www.iab.org/documents/correspondence-reports-documents/2012-2/response-to-icann-questions-concerning-the-interpretation-of-rules-in-the-icann-gtld-applicant-guidebook/,
|
||||
2012-03-30,IAB Member Roles in Evaluating New Work Proposals,https://www.iab.org/documents/correspondence-reports-documents/2012-2/iab-member-roles-in-evaluating-new-work-proposals/,
|
||||
2012-08-29,Leading Global Standards Organizations Endorse ‘OpenStand’ Principles that Drive Innovation and Borderless Commerce,https://www.iab.org/documents/correspondence-reports-documents/2012-2/leading-global-standards-organizations-endorse-%e2%80%98openstand/,
|
||||
2013-03-28,IAB Response to RSSAC restructure document (28 March 2013),https://www.iab.org/documents/correspondence-reports-documents/2013-2/iab-response-to-rssac-restructure-document-28-march-2013/,
|
||||
2013-05-28,Consultation on Root Zone KSK Rollover from the IAB,https://www.iab.org/documents/correspondence-reports-documents/2013-2/consultation-on-root-zone-ksk-rollover-from-the-iab/,
|
||||
2013-07-10,IAB Statement: Dotless Domains Considered Harmful,https://www.iab.org/documents/correspondence-reports-documents/2013-2/iab-statement-dotless-domains-considered-harmful/,
|
||||
2013-07-16,IAB Response to ICANN Consultation on the Source of Policies & User Instructions for Internet Number Resource Requests,https://www.iab.org/documents/correspondence-reports-documents/2013-2/iab-response-to-iana-policies-user-instructions-25jun13/,
|
||||
2013-10-03,Statement from the IAB on the Strengths of the OpenStand Principles,https://www.iab.org/documents/correspondence-reports-documents/2013-2/statement-from-openstand-on-the-strengths-of-the-openstand-principles/,
|
||||
2013-10-07,Montevideo Statement on the Future of Internet Cooperation,https://www.iab.org/documents/correspondence-reports-documents/2013-2/montevideo-statement-on-the-future-of-internet-cooperation/,
|
||||
2013-11-27,IAB Statement on draft-farrell-perpass-attack-00,https://www.iab.org/documents/correspondence-reports-documents/2013-2/iab-statement-on-draft-farrell-perpass-attack-00/,
|
||||
2014-01-23,IAB Comments Regarding the IRTF CFRG chair,https://www.iab.org/documents/correspondence-reports-documents/2014-2/0123-iab-comments-regarding-the-irtf-cfrg-chair/,
|
||||
2014-02-14,"Statement from the I* Leaders Coordination Meeting, Santa Monica, 14 February 2014",https://www.iab.org/documents/correspondence-reports-documents/2014-2/statement-from-the-i-leaders-coordination-meeting-santa-monica-14-february-2014/,
|
||||
2014-03-11,Re: Guiding the Evolution of the IANA Protocol Parameter Registries,https://www.iab.org/documents/correspondence-reports-documents/2014-2/re-guiding-the-evolution-of-the-iana-protocol-parameter-registries/,
|
||||
2014-03-14,Internet Technical Leaders Welcome IANA Globalization Progress,https://www.iab.org/documents/correspondence-reports-documents/2014-2/internet-technical-leaders-welcome-iana-globalization-progress/,
|
||||
2014-05-13,I* Post-NETmundial Meeting Statement,https://www.iab.org/documents/correspondence-reports-documents/2014-2/i-post-netmundial-meeting-statement/,
|
||||
2014-06-05,Comments on ICANN Board Member Compensation from the IAB,https://www.iab.org/documents/correspondence-reports-documents/2014-2/comments-on-icann-board-member-compensation/,
|
||||
2014-11-13,IAB Statement on Internet Confidentiality,https://www.iab.org/documents/correspondence-reports-documents/2014-2/iab-statement-on-internet-confidentiality/,
|
||||
2014-12-04,IAB statement on the NETmundial Initiative,https://www.iab.org/documents/correspondence-reports-documents/2014-2/iab-statement-on-the-netmundial-initiative/,
|
||||
2014-12-17,IAB Comments on CSTD Report Mapping International Internet Public Policy Issues,https://www.iab.org/documents/correspondence-reports-documents/2014-2/iab-comments-on-cstd-report-mapping-international-public-policy-issues/,
|
||||
2015-02-11,IAB liaison to ICANN Root Server System Advisory Committee (RSSAC),https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-liaison-to-icann-root-server-system-advisory-council-rssac/,
|
||||
2015-02-11,IAB Statement on Identifiers and Unicode 7.0.0,https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-statement-on-identifiers-and-unicode-7-0-0/,
|
||||
2015-03-02,IAB Statement on Liaison Compensation,https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-statement-on-liaison-compensation/,
|
||||
2015-04-09,IAB Comments on The HTTPS-Only Standard,https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-comments-on-the-https-only-standard/,
|
||||
2015-06-03,IAB comments on CCWG-Accountability Draft Report,https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-comments-on-ccwg-accountability-draft-report/,
|
||||
2015-06-12,IAB Statement on the Trade in Security Technologies,https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-statement-on-the-trade-in-security-technologies/,
|
||||
2015-06-24,"IAB Correspondence to U.S. Bureau of Industry and Security, re RIN 0694-AG49",https://www.iab.org/documents/correspondence-reports-documents/2015-2/rin-0694-ag49/,
|
||||
2015-09-07,Internet Architecture Board comments on the ICG Proposal,https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-comments-on-icg-proposal/,
|
||||
2015-09-09,IAB comments on the CCWG accountability 2d draft report,https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-comments-on-ccwg-accountability/,
|
||||
2015-10-07,IAB Comments to FCC on Rules regarding Authorization of Radiofrequency Equipment,https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-comments-on-fcc-15-92/,
|
||||
2015-12-16,IAB comments on the CCWG accountability 3d draft report,https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-comments-on-the-ccwg-accountability-3d-draft-report/,
|
||||
2016-01-13,"Comments from the Internet Architecture Board (IAB) on ""Registration Data Access Protocol (RDAP) Operational Profile for gTLD Registries and Registrars""",https://www.iab.org/documents/correspondence-reports-documents/2016-2/comments-from-the-internet-architecture-board-iab-on-registration-data-access-protocol-rdap-operational-profile-for-gtld-registries-and-registrars/,
|
||||
2016-05-04,IAB comments on Draft New ICANN Bylaws,https://www.iab.org/documents/correspondence-reports-documents/2016-2/iab-comments-on-draft-new-icann-bylaws/,
|
||||
2016-05-11,IAB Comments on Proposed Changes to Internet Society Bylaws,https://www.iab.org/documents/correspondence-reports-documents/2016-2/iab-comments-on-proposed-changes-to-internet-society-bylaws/,
|
||||
2016-07-17,Comments from the IAB on LGRs for second level,https://www.iab.org/documents/correspondence-reports-documents/2016-2/comments-from-the-iab-on-lgrs-for-second-level/,
|
||||
2016-09-01,IAB statement on IANA Intellectual Property Rights,https://www.iab.org/documents/correspondence-reports-documents/2016-2/iab-statement-on-iana-intellectual-property-rights/,
|
||||
2016-09-14,IAB Statement on the IANA Stewardship Transition,https://www.iab.org/documents/correspondence-reports-documents/2016-2/iab-statement-on-the-iana-stewardship-transition/,
|
||||
2016-11-07,IAB Statement on IPv6,https://www.iab.org/documents/correspondence-reports-documents/2016-2/iab-statement-on-ipv6/,
|
||||
2016-12-07,"IAB comment on ""Revised Proposed Implementation of GNSO Thick Whois Consensus Policy Requiring Consistent Labeling and Display of RDDS (Whois) Output for All gTLDs""",https://www.iab.org/documents/correspondence-reports-documents/2016-2/iab-comment-on-revised-proposed-implementation-of-gnso-thick-whois-consensus-policy-requiring-consistent-labeling-and-display-of-rdds-whois-output-for-all-gtlds/,
|
||||
2017-01-04,IAB comments on Identifier Technology Health Indicators: Definition,https://www.iab.org/documents/correspondence-reports-documents/2017-2/iab-comments-on-identifier-technology-health-indicators-definition/,
|
||||
2017-02-01,IAB Statement on OCSP Stapling,https://www.iab.org/documents/correspondence-reports-documents/2017-2/iab-statement-on-ocsp-stapling/,
|
||||
2017-02-16,Follow up on barriers to entry blog post,https://www.iab.org/documents/correspondence-reports-documents/2017-2/follow-up-on-barriers-to-entry-blog-post/,
|
||||
2017-03-02,IAB Comments to United States NTIA on the Green Paper: Fostering the Advancement of the Internet of Things,https://www.iab.org/documents/correspondence-reports-documents/2017-2/iab-comments-to-ntia-on-fostering-the-advancement-of-iot/,
|
||||
2017-03-30,Internet Architecture Board statement on the registration of special use names in the ARPA domain,https://www.iab.org/documents/correspondence-reports-documents/2017-2/iab-statement-on-the-registration-of-special-use-names-in-the-arpa-domain/,
|
||||
2017-05-01,Comments from the IAB on IDN Implementation Guidelines,https://www.iab.org/documents/correspondence-reports-documents/2017-2/comments-from-the-iab-on-idn-implementation-guidelines/,
|
||||
2017-07-31,IAB Response to FCC-17-89,https://www.iab.org/documents/correspondence-reports-documents/2017-2/iab-response-to-fcc-17-89/,
|
||||
2018-03-15,IAB Statement on Identifiers and Unicode,https://www.iab.org/documents/correspondence-reports-documents/2018-2/iab-statement-on-identifiers-and-unicode/,
|
||||
2018-04-03,IAB Statement on the RPKI,https://www.iab.org/documents/correspondence-reports-documents/2018-2/iab-statement-on-the-rpki/,
|
||||
2019-05-02,Revised Operating Instructions for e164.arpa (ENUM),https://www.iab.org/documents/correspondence-reports-documents/2019-2/revised-operating-instructions-for-e164-arpa-enum/,
|
||||
2019-06-26,Comments on Evolving the Governance of the Root Server System,https://www.iab.org/documents/correspondence-reports-documents/2019-2/comments-on-evolving-the-governance-of-the-root-server-system/,
|
||||
2019-09-04,Avoiding Unintended Harm to Internet Infrastructure,https://www.iab.org/documents/correspondence-reports-documents/2019-2/avoiding-unintended-harm-to-internet-infrastructure/,
|
||||
2020-07-01,"IAB correspondence with the National Telecommunications and Information Administration (NTIA) on DNSSEC deployment for the Root Zone [Docket No. 100603240-0240-01], 1 July 2010",https://www.iab.org/documents/correspondence-reports-documents/docs2010/2010-07-01-alexander-dnssec-deployment-for-the-root-zone/,
|
||||
2020-09-29,IAB Comments on the Draft Final Report on the new gTLD Subsequent Procedures Policy Development Process,https://www.iab.org/documents/correspondence-reports-documents/2020-2/iab-comments-on-new-gtld-subsequent-procedures/,
|
||||
2021-07-14,IAB Statement on Inclusive Language in IAB Stream Documents,https://www.iab.org/documents/correspondence-reports-documents/2021-2/iab-statement-on-inclusive-language-in-iab-stream-documents/,
|
||||
2022-04-08,IAB comment on Mandated Browser Root Certificates in the European Union’s eIDAS Regulation on the Internet,https://www.iab.org/documents/correspondence-reports-documents/2022-2/iab-comment-on-mandated-browser-root-certificates-in-the-european-unions-eidas-regulation-on-the-internet/,
|
||||
2022-04-08,"IAB Comments on A Notice by the Federal Communications Commission on Secure Internet Routing, issued 03/11/2022",https://www.iab.org/documents/correspondence-reports-documents/2022-2/iab-comments-on-a-notice-by-the-federal-communications-commission-on-secure-internet-routing-issued-03-11-2022/,
|
||||
2022-07-08,IAB Statement to OSTP on Privacy-Enhancing Technologies,https://www.iab.org/documents/correspondence-reports-documents/2022-2/iab-statement-to-ostp-on-privacy-enhancing-technologies/,
|
||||
2022-11-21,IAB Comments on a notice by the Federal Trade Commission on “Trade Regulation Rule on Commercial Surveillance and Data Security” (16 CFR Part 464),https://www.iab.org/documents/correspondence-reports-documents/2022-2/iab-comments-on-a-notice-by-the-federal-trade-commission-on-trade-regulation-rule-on-commercial-surveillance-and-data-security-16-cfr-part-464/,
|
||||
'''
|
||||
|
||||
rows = []
|
||||
date_count = defaultdict(lambda: 0)
|
||||
with io.StringIO(csv_dump) as csv_file:
|
||||
reader = csv.reader(csv_file)
|
||||
for row in reader:
|
||||
date = row[0]
|
||||
if date_count[date] == 0:
|
||||
row.insert(0, "")
|
||||
else:
|
||||
row.insert(0, date_count[date])
|
||||
date_count[date] += 1
|
||||
rows.append(row)
|
||||
return rows
|
86
ietf/doc/migrations/0005_alter_docevent_type.py
Normal file
86
ietf/doc/migrations/0005_alter_docevent_type.py
Normal file
|
@ -0,0 +1,86 @@
|
|||
# Copyright The IETF Trust 2023, All Rights Reserved
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("doc", "0004_alter_dochistory_ad_alter_dochistory_shepherd_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="docevent",
|
||||
name="type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("new_revision", "Added new revision"),
|
||||
("new_submission", "Uploaded new revision"),
|
||||
("changed_document", "Changed document metadata"),
|
||||
("added_comment", "Added comment"),
|
||||
("added_message", "Added message"),
|
||||
("edited_authors", "Edited the documents author list"),
|
||||
("deleted", "Deleted document"),
|
||||
("changed_state", "Changed state"),
|
||||
("changed_stream", "Changed document stream"),
|
||||
("expired_document", "Expired document"),
|
||||
("extended_expiry", "Extended expiry of document"),
|
||||
("requested_resurrect", "Requested resurrect"),
|
||||
("completed_resurrect", "Completed resurrect"),
|
||||
("changed_consensus", "Changed consensus"),
|
||||
("published_rfc", "Published RFC"),
|
||||
(
|
||||
"added_suggested_replaces",
|
||||
"Added suggested replacement relationships",
|
||||
),
|
||||
(
|
||||
"reviewed_suggested_replaces",
|
||||
"Reviewed suggested replacement relationships",
|
||||
),
|
||||
("changed_action_holders", "Changed action holders for document"),
|
||||
("changed_group", "Changed group"),
|
||||
("changed_protocol_writeup", "Changed protocol writeup"),
|
||||
("changed_charter_milestone", "Changed charter milestone"),
|
||||
("initial_review", "Set initial review time"),
|
||||
("changed_review_announcement", "Changed WG Review text"),
|
||||
("changed_action_announcement", "Changed WG Action text"),
|
||||
("started_iesg_process", "Started IESG process on document"),
|
||||
("created_ballot", "Created ballot"),
|
||||
("closed_ballot", "Closed ballot"),
|
||||
("sent_ballot_announcement", "Sent ballot announcement"),
|
||||
("changed_ballot_position", "Changed ballot position"),
|
||||
("changed_ballot_approval_text", "Changed ballot approval text"),
|
||||
("changed_ballot_writeup_text", "Changed ballot writeup text"),
|
||||
("changed_rfc_editor_note_text", "Changed RFC Editor Note text"),
|
||||
("changed_last_call_text", "Changed last call text"),
|
||||
("requested_last_call", "Requested last call"),
|
||||
("sent_last_call", "Sent last call"),
|
||||
("scheduled_for_telechat", "Scheduled for telechat"),
|
||||
("iesg_approved", "IESG approved document (no problem)"),
|
||||
("iesg_disapproved", "IESG disapproved document (do not publish)"),
|
||||
("approved_in_minute", "Approved in minute"),
|
||||
("iana_review", "IANA review comment"),
|
||||
("rfc_in_iana_registry", "RFC is in IANA registry"),
|
||||
(
|
||||
"rfc_editor_received_announcement",
|
||||
"Announcement was received by RFC Editor",
|
||||
),
|
||||
("requested_publication", "Publication at RFC Editor requested"),
|
||||
(
|
||||
"sync_from_rfc_editor",
|
||||
"Received updated information from RFC Editor",
|
||||
),
|
||||
("requested_review", "Requested review"),
|
||||
("assigned_review_request", "Assigned review request"),
|
||||
("closed_review_request", "Closed review request"),
|
||||
("closed_review_assignment", "Closed review assignment"),
|
||||
("downref_approved", "Downref approved"),
|
||||
("posted_related_ipr", "Posted related IPR"),
|
||||
("removed_related_ipr", "Removed related IPR"),
|
||||
("changed_editors", "Changed BOF Request editors"),
|
||||
("published_statement", "Published statement"),
|
||||
],
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
43
ietf/doc/migrations/0006_statements.py
Normal file
43
ietf/doc/migrations/0006_statements.py
Normal file
|
@ -0,0 +1,43 @@
|
|||
# Copyright The IETF Trust 2023, All Rights Reserved
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def forward(apps, schema_editor):
|
||||
StateType = apps.get_model("doc", "StateType")
|
||||
State = apps.get_model("doc", "State")
|
||||
|
||||
StateType.objects.create(slug="statement", label="Statement State")
|
||||
State.objects.create(
|
||||
slug="active",
|
||||
type_id="statement",
|
||||
name="Active",
|
||||
order=0,
|
||||
desc="The statement is active",
|
||||
)
|
||||
State.objects.create(
|
||||
slug="replaced",
|
||||
type_id="statement",
|
||||
name="Replaced",
|
||||
order=0,
|
||||
desc="The statement has been replaced",
|
||||
)
|
||||
|
||||
|
||||
def reverse(apps, schema_editor):
|
||||
StateType = apps.get_model("doc", "StateType")
|
||||
State = apps.get_model("doc", "State")
|
||||
|
||||
State.objects.filter(type_id="statement").delete()
|
||||
StateType.objects.filter(slug="statement").delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("doc", "0005_alter_docevent_type"),
|
||||
("name", "0004_statements"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(forward, reverse),
|
||||
]
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright The IETF Trust 2010-2020, All Rights Reserved
|
||||
# Copyright The IETF Trust 2010-2023, All Rights Reserved
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
|
@ -162,7 +162,7 @@ class DocumentInfo(models.Model):
|
|||
self._cached_file_path = settings.CONFLICT_REVIEW_PATH
|
||||
elif self.type_id == "statchg":
|
||||
self._cached_file_path = settings.STATUS_CHANGE_PATH
|
||||
elif self.type_id == "bofreq":
|
||||
elif self.type_id == "bofreq": # TODO: This is probably unneeded, as is the separate path setting
|
||||
self._cached_file_path = settings.BOFREQ_PATH
|
||||
else:
|
||||
self._cached_file_path = settings.DOCUMENT_PATH_PATTERN.format(doc=self)
|
||||
|
@ -186,7 +186,7 @@ class DocumentInfo(models.Model):
|
|||
elif self.type_id == 'review':
|
||||
# TODO: This will be wrong if a review is updated on the same day it was created (or updated more than once on the same day)
|
||||
self._cached_base_name = "%s.txt" % self.name
|
||||
elif self.type_id == 'bofreq':
|
||||
elif self.type_id in ['bofreq', 'statement']:
|
||||
self._cached_base_name = "%s-%s.md" % (self.name, self.rev)
|
||||
else:
|
||||
if self.rev:
|
||||
|
@ -1290,7 +1290,11 @@ EVENT_TYPES = [
|
|||
("removed_related_ipr", "Removed related IPR"),
|
||||
|
||||
# Bofreq Editor events
|
||||
("changed_editors", "Changed BOF Request editors")
|
||||
("changed_editors", "Changed BOF Request editors"),
|
||||
|
||||
# Statement events
|
||||
("published_statement", "Published statement"),
|
||||
|
||||
]
|
||||
|
||||
class DocEvent(models.Model):
|
||||
|
|
|
@ -261,6 +261,17 @@ class SearchTests(TestCase):
|
|||
parsed = urlparse(r["Location"])
|
||||
self.assertEqual(parsed.path, urlreverse('ietf.doc.views_search.search'))
|
||||
self.assertEqual(parse_qs(parsed.query)["name"][0], "draft-ietf-doesnotexist-42")
|
||||
|
||||
def test_search_rfc(self):
|
||||
rfc = WgRfcFactory(name="rfc0000")
|
||||
|
||||
# search for existing RFC should redirect directly to the RFC page
|
||||
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name=rfc.name)))
|
||||
self.assertRedirects(r, f'/doc/{rfc.name}/', status_code=302, target_status_code=200)
|
||||
|
||||
# search for existing RFC with revision number should redirect to the RFC page
|
||||
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name=rfc.name + "-99")), follow=True)
|
||||
self.assertRedirects(r, f'/doc/{rfc.name}/', status_code=302, target_status_code=200)
|
||||
|
||||
def test_frontpage(self):
|
||||
r = self.client.get("/")
|
||||
|
@ -1617,6 +1628,10 @@ class DocTestCase(TestCase):
|
|||
CharterFactory(name='charter-ietf-mars')
|
||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name="charter-ietf-mars")))
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
def test_incorrect_rfc_url(self):
|
||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name="rfc8989", rev="00")))
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
def test_document_conflict_review(self):
|
||||
ConflictReviewFactory(name='conflict-review-imaginary-irtf-submission')
|
||||
|
@ -1993,6 +2008,12 @@ class DocTestCase(TestCase):
|
|||
#
|
||||
self.assertNotIn('day', entry)
|
||||
|
||||
# test for incorrect case - revision for RFC
|
||||
rfc = WgRfcFactory(name="rfc0000")
|
||||
url = urlreverse('ietf.doc.views_doc.document_bibtex', kwargs=dict(name=rfc.name, rev='00'))
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
april1 = IndividualRfcFactory.create(
|
||||
stream_id = 'ise',
|
||||
states = [('draft','rfc'),('draft-iesg','pub')],
|
||||
|
@ -2585,6 +2606,7 @@ class ChartTests(ResourceTestCaseMixin, TestCase):
|
|||
d = r.json()
|
||||
self.assertEqual(len(d), 1)
|
||||
self.assertEqual(len(d[0]), 2)
|
||||
self.assertEqual(d[0][1], 1)
|
||||
|
||||
page_url = urlreverse('ietf.person.views.profile', kwargs=dict(email_or_name=person.name))
|
||||
r = self.client.get(page_url)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright The IETF Trust 2011-2020, All Rights Reserved
|
||||
# Copyright The IETF Trust 2011-2023, All Rights Reserved
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
|
@ -311,6 +311,24 @@ class ChangeStateTests(TestCase):
|
|||
# action holders
|
||||
self.assertCountEqual(draft.action_holders.all(), [ad])
|
||||
|
||||
def test_iesg_state_edit_button(self):
|
||||
ad = Person.objects.get(user__username="ad")
|
||||
draft = WgDraftFactory(ad=ad,states=[('draft','active'),('draft-iesg','ad-eval')])
|
||||
|
||||
url = urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name))
|
||||
self.client.login(username="ad", password="ad+password")
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
self.assertIn("Edit", q('tr:contains("IESG state")').text())
|
||||
|
||||
draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="dead"))
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
self.assertNotIn("Edit", q('tr:contains("IESG state")').text())
|
||||
|
||||
|
||||
class EditInfoTests(TestCase):
|
||||
def test_edit_info(self):
|
||||
|
|
359
ietf/doc/tests_statement.py
Normal file
359
ietf/doc/tests_statement.py
Normal file
|
@ -0,0 +1,359 @@
|
|||
# Copyright The IETF Trust 2023, All Rights Reserved
|
||||
|
||||
import debug # pyflakes:ignore
|
||||
|
||||
from pyquery import PyQuery
|
||||
|
||||
from pathlib import Path
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||
from django.template.loader import render_to_string
|
||||
from django.urls import reverse as urlreverse
|
||||
|
||||
from ietf.doc.factories import StatementFactory, DocEventFactory
|
||||
from ietf.doc.models import Document, DocAlias, State, NewRevisionDocEvent
|
||||
from ietf.group.models import Group
|
||||
from ietf.person.factories import PersonFactory
|
||||
from ietf.utils.mail import outbox, empty_outbox
|
||||
from ietf.utils.test_utils import (
|
||||
TestCase,
|
||||
reload_db_objects,
|
||||
login_testing_unauthorized,
|
||||
)
|
||||
|
||||
|
||||
class StatementsTestCase(TestCase):
|
||||
settings_temp_path_overrides = TestCase.settings_temp_path_overrides + [
|
||||
"DOCUMENT_PATH_PATTERN"
|
||||
]
|
||||
|
||||
def extract_content(self, response):
|
||||
if not hasattr(response, "_cached_extraction"):
|
||||
response._cached_extraction = list(response.streaming_content)[0].decode(
|
||||
"utf-8"
|
||||
)
|
||||
return response._cached_extraction
|
||||
|
||||
def write_statement_markdown_file(self, statement):
|
||||
(
|
||||
Path(settings.DOCUMENT_PATH_PATTERN.format(doc=statement))
|
||||
/ ("%s-%s.md" % (statement.name, statement.rev))
|
||||
).write_text(
|
||||
"""# This is a test statement.
|
||||
Version: {statement.rev}
|
||||
|
||||
## A section
|
||||
|
||||
This test section has some text.
|
||||
"""
|
||||
)
|
||||
|
||||
def write_statement_pdf_file(self, statement):
|
||||
(
|
||||
Path(settings.DOCUMENT_PATH_PATTERN.format(doc=statement))
|
||||
/ ("%s-%s.pdf" % (statement.name, statement.rev))
|
||||
).write_text(
|
||||
f"{statement.rev} This is not valid PDF, but the test does not need it to be"
|
||||
)
|
||||
|
||||
def test_statement_doc_view(self):
|
||||
doc = StatementFactory()
|
||||
self.write_statement_markdown_file(doc)
|
||||
url = urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name))
|
||||
response = self.client.get(url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
q = PyQuery(response.content)
|
||||
self.assertEqual(q("#statement-state").text(), "Active")
|
||||
self.assertEqual(q("#statement-type").text(), "IAB Statement")
|
||||
self.assertIn("has some text", q(".card-body").text())
|
||||
published = doc.docevent_set.filter(type="published_statement").last().time
|
||||
self.assertIn(
|
||||
published.astimezone(ZoneInfo(settings.TIME_ZONE)).date().isoformat(),
|
||||
q("#published").text(),
|
||||
)
|
||||
|
||||
doc.set_state(State.objects.get(type_id="statement", slug="replaced"))
|
||||
doc2 = StatementFactory()
|
||||
doc2.relateddocument_set.create(
|
||||
relationship_id="replaces", target=doc.docalias.first()
|
||||
)
|
||||
response = self.client.get(url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
q = PyQuery(response.content)
|
||||
self.assertEqual(q("#statement-state").text(), "Replaced")
|
||||
self.assertEqual(q("#statement-type").text(), "Replaced IAB Statement")
|
||||
self.assertEqual(q("#statement-type").next().text(), f"Replaced by {doc2.name}")
|
||||
|
||||
url = urlreverse(
|
||||
"ietf.doc.views_doc.document_main", kwargs=dict(name=doc2.name)
|
||||
)
|
||||
response = self.client.get(url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
q = PyQuery(response.content)
|
||||
self.assertEqual(q("#statement-type").text(), "IAB Statement")
|
||||
self.assertEqual(q("#statement-type").next().text(), f"Replaces {doc.name}")
|
||||
|
||||
def test_serve_pdf(self):
|
||||
url = urlreverse(
|
||||
"ietf.doc.views_statement.serve_pdf",
|
||||
kwargs=dict(name="statement-does-not-exist"),
|
||||
)
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
doc = StatementFactory()
|
||||
url = urlreverse(
|
||||
"ietf.doc.views_statement.serve_pdf", kwargs=dict(name=doc.name)
|
||||
)
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 404) # File not found
|
||||
|
||||
self.write_statement_pdf_file(doc)
|
||||
doc.rev = "01"
|
||||
e = DocEventFactory(type="published_statement", doc=doc, rev=doc.rev)
|
||||
doc.save_with_history([e])
|
||||
self.write_statement_pdf_file(doc)
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.get("Content-Type"), "application/pdf")
|
||||
self.assertTrue(
|
||||
self.extract_content(r).startswith(doc.rev)
|
||||
) # relies on test doc not actually being pdf
|
||||
|
||||
url = urlreverse(
|
||||
"ietf.doc.views_statement.serve_pdf", kwargs=dict(name=doc.name, rev="00")
|
||||
)
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(self.extract_content(r).startswith("00 "))
|
||||
url = urlreverse(
|
||||
"ietf.doc.views_statement.serve_pdf", kwargs=dict(name=doc.name, rev="01")
|
||||
)
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertTrue(self.extract_content(r).startswith("01 "))
|
||||
|
||||
def test_submit(self):
|
||||
doc = StatementFactory()
|
||||
url = urlreverse("ietf.doc.views_statement.submit", kwargs=dict(name=doc.name))
|
||||
|
||||
rev = doc.rev
|
||||
r = self.client.post(
|
||||
url, {"statement_submission": "enter", "statement_content": "# oiwefrase"}
|
||||
)
|
||||
self.assertEqual(r.status_code, 302)
|
||||
doc = reload_db_objects(doc)
|
||||
self.assertEqual(rev, doc.rev)
|
||||
|
||||
nobody = PersonFactory()
|
||||
self.client.login(
|
||||
username=nobody.user.username, password=nobody.user.username + "+password"
|
||||
)
|
||||
r = self.client.post(
|
||||
url, {"statement_submission": "enter", "statement_content": "# oiwefrase"}
|
||||
)
|
||||
self.assertEqual(r.status_code, 403)
|
||||
doc = reload_db_objects(doc)
|
||||
self.assertEqual(rev, doc.rev)
|
||||
self.client.logout()
|
||||
|
||||
for username in ["secretary"]: # There is potential for expanding this list
|
||||
self.client.login(username=username, password=username + "+password")
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
file = SimpleUploadedFile(
|
||||
"random.pdf",
|
||||
b"not valid pdf",
|
||||
content_type="application/pdf",
|
||||
)
|
||||
for postdict in [
|
||||
{
|
||||
"statement_submission": "enter",
|
||||
"statement_content": f"# {username}",
|
||||
},
|
||||
{
|
||||
"statement_submission": "upload",
|
||||
"statement_file": file,
|
||||
},
|
||||
]:
|
||||
docevent_count = doc.docevent_set.count()
|
||||
empty_outbox()
|
||||
r = self.client.post(url, postdict)
|
||||
self.assertEqual(r.status_code, 302)
|
||||
doc = reload_db_objects(doc)
|
||||
self.assertEqual("%02d" % (int(rev) + 1), doc.rev)
|
||||
if postdict["statement_submission"] == "enter":
|
||||
self.assertEqual(f"# {username}", doc.text())
|
||||
else:
|
||||
self.assertEqual("not valid pdf", doc.text())
|
||||
self.assertEqual(docevent_count + 1, doc.docevent_set.count())
|
||||
self.assertEqual(0, len(outbox))
|
||||
rev = doc.rev
|
||||
self.client.logout()
|
||||
|
||||
def test_start_new_statement(self):
|
||||
url = urlreverse("ietf.doc.views_statement.new_statement")
|
||||
login_testing_unauthorized(self, "secretary", url)
|
||||
r = self.client.get(url)
|
||||
self.assertContains(
|
||||
r,
|
||||
"Replace this with the content of the statement in markdown source",
|
||||
status_code=200,
|
||||
)
|
||||
group = Group.objects.get(acronym="iab")
|
||||
r = self.client.post(
|
||||
url,
|
||||
dict(
|
||||
group=group.pk,
|
||||
title="default",
|
||||
statement_submission="enter",
|
||||
statement_content=render_to_string(
|
||||
"doc/statement/statement_template.md", {"settings": settings}
|
||||
),
|
||||
),
|
||||
)
|
||||
self.assertContains(r, "The example content may not be saved.", status_code=200)
|
||||
|
||||
file = SimpleUploadedFile(
|
||||
"random.pdf",
|
||||
b"not valid pdf",
|
||||
content_type="application/pdf",
|
||||
)
|
||||
group = Group.objects.get(acronym="iab")
|
||||
for postdict in [
|
||||
dict(
|
||||
group=group.pk,
|
||||
title="title one",
|
||||
statement_submission="enter",
|
||||
statement_content="some stuff",
|
||||
),
|
||||
dict(
|
||||
group=group.pk,
|
||||
title="title two",
|
||||
statement_submission="upload",
|
||||
statement_file=file,
|
||||
),
|
||||
]:
|
||||
empty_outbox()
|
||||
r = self.client.post(url, postdict)
|
||||
self.assertEqual(r.status_code, 302)
|
||||
name = f"statement-{group.acronym}-{postdict['title']}".replace(
|
||||
" ", "-"
|
||||
) # cheap slugification
|
||||
statement = Document.objects.filter(
|
||||
name=name, type_id="statement"
|
||||
).first()
|
||||
self.assertIsNotNone(statement)
|
||||
self.assertIsNotNone(DocAlias.objects.filter(name=name).first())
|
||||
self.assertEqual(statement.title, postdict["title"])
|
||||
self.assertEqual(statement.rev, "00")
|
||||
self.assertEqual(statement.get_state_slug(), "active")
|
||||
self.assertEqual(
|
||||
statement.latest_event(NewRevisionDocEvent).rev, "00"
|
||||
)
|
||||
self.assertIsNotNone(statement.latest_event(type="published_statement"))
|
||||
if postdict["statement_submission"] == "enter":
|
||||
self.assertEqual(statement.text_or_error(), "some stuff")
|
||||
else:
|
||||
self.assertTrue(statement.uploaded_filename.endswith("pdf"))
|
||||
self.assertEqual(len(outbox), 0)
|
||||
|
||||
existing_statement = StatementFactory()
|
||||
for postdict in [
|
||||
dict(
|
||||
group=group.pk,
|
||||
title="",
|
||||
statement_submission="enter",
|
||||
statement_content="some stuff",
|
||||
),
|
||||
dict(
|
||||
group=group.pk,
|
||||
title="a title",
|
||||
statement_submission="enter",
|
||||
statement_content="",
|
||||
),
|
||||
dict(
|
||||
group=group.pk,
|
||||
title=existing_statement.title,
|
||||
statement_submission="enter",
|
||||
statement_content="some stuff",
|
||||
),
|
||||
dict(
|
||||
group=group.pk,
|
||||
title="森川",
|
||||
statement_submission="enter",
|
||||
statement_content="some stuff",
|
||||
),
|
||||
dict(
|
||||
group=group.pk,
|
||||
title="a title",
|
||||
statement_submission="",
|
||||
statement_content="some stuff",
|
||||
),
|
||||
dict(
|
||||
group="",
|
||||
title="a title",
|
||||
statement_submission="enter",
|
||||
statement_content="some stuff",
|
||||
),
|
||||
dict(
|
||||
group=0,
|
||||
title="a title",
|
||||
statement_submission="enter",
|
||||
statement_content="some stuff",
|
||||
),
|
||||
]:
|
||||
r = self.client.post(url, postdict)
|
||||
self.assertEqual(r.status_code, 200, f"Wrong status_code for {postdict}")
|
||||
q = PyQuery(r.content)
|
||||
self.assertTrue(
|
||||
q("form div.is-invalid"), f"Expected an error for {postdict}"
|
||||
)
|
||||
|
||||
def test_submit_non_markdown_formats(self):
|
||||
doc = StatementFactory()
|
||||
|
||||
file = SimpleUploadedFile(
|
||||
"random.pdf",
|
||||
b"01 This is not valid PDF, but the test does not need it to be",
|
||||
content_type="application/pdf",
|
||||
)
|
||||
|
||||
url = urlreverse("ietf.doc.views_statement.submit", kwargs=dict(name=doc.name))
|
||||
login_testing_unauthorized(self, "secretary", url)
|
||||
|
||||
r = self.client.post(
|
||||
url,
|
||||
{
|
||||
"statement_submission": "upload",
|
||||
"statement_file": file,
|
||||
},
|
||||
)
|
||||
self.assertEqual(r.status_code, 302)
|
||||
self.assertEqual(
|
||||
r["Location"],
|
||||
urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)),
|
||||
)
|
||||
|
||||
doc = reload_db_objects(doc)
|
||||
self.assertEqual(doc.rev, "01")
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
self.assertEqual(
|
||||
q("#id_statement_content").text().strip(),
|
||||
"The current revision of this statement is in pdf format",
|
||||
)
|
||||
|
||||
file = SimpleUploadedFile(
|
||||
"random.mp4", b"29ucdvn2o09hano5", content_type="video/mp4"
|
||||
)
|
||||
r = self.client.post(
|
||||
url, {"statement_submission": "upload", "statement_file": file}
|
||||
)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
self.assertTrue("Unexpected content" in q("#id_statement_file").next().text())
|
|
@ -14,7 +14,7 @@ from textwrap import wrap
|
|||
from django.conf import settings
|
||||
from django.urls import reverse as urlreverse
|
||||
|
||||
from ietf.doc.factories import DocumentFactory, IndividualRfcFactory, WgRfcFactory
|
||||
from ietf.doc.factories import DocumentFactory, IndividualRfcFactory, WgRfcFactory, DocEventFactory
|
||||
from ietf.doc.models import ( Document, DocAlias, State, DocEvent,
|
||||
BallotPositionDocEvent, NewRevisionDocEvent, TelechatDocEvent, WriteupDocEvent )
|
||||
from ietf.doc.utils import create_ballot_if_not_open
|
||||
|
@ -86,6 +86,16 @@ class StatusChangeTests(TestCase):
|
|||
status_change = Document.objects.get(name='status-change-imaginary-new2')
|
||||
self.assertIsNone(status_change.ad)
|
||||
|
||||
# Verify that the right thing happens if a control along the way uppercases RFC
|
||||
r = self.client.post(url,dict(
|
||||
document_name="imaginary-new3",title="A new imaginary status change",
|
||||
create_in_state=state_strpk,notify='ipu@ietf.org',new_relation_row_blah="RFC9999",
|
||||
statchg_relation_row_blah="tois")
|
||||
)
|
||||
self.assertEqual(r.status_code, 302)
|
||||
status_change = Document.objects.get(name='status-change-imaginary-new3')
|
||||
self.assertTrue(status_change.relateddocument_set.filter(relationship__slug='tois',target__name='rfc9999'))
|
||||
|
||||
|
||||
def test_change_state(self):
|
||||
|
||||
|
@ -289,7 +299,19 @@ class StatusChangeTests(TestCase):
|
|||
self.assertEqual(r.status_code,200)
|
||||
self.assertContains(r, 'RFC9999 from Proposed Standard to Internet Standard')
|
||||
self.assertContains(r, 'RFC9998 from Informational to Historic')
|
||||
|
||||
q = PyQuery(r.content)
|
||||
self.assertEqual(len(q("button[name='send_last_call_request']")), 1)
|
||||
|
||||
# Make sure request LC isn't offered with no responsible AD.
|
||||
doc.ad = None
|
||||
doc.save_with_history([DocEventFactory(doc=doc)])
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code,200)
|
||||
q = PyQuery(r.content)
|
||||
self.assertEqual(len(q("button[name='send_last_call_request']")), 0)
|
||||
doc.ad = Person.objects.get(name='Ad No2')
|
||||
doc.save_with_history([DocEventFactory(doc=doc)])
|
||||
|
||||
# request last call
|
||||
messages_before = len(outbox)
|
||||
r = self.client.post(url,dict(last_call_text='stuff',send_last_call_request='Save+and+Request+Last+Call'))
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright The IETF Trust 2009-2020, All Rights Reserved
|
||||
# Copyright The IETF Trust 2009-2023, All Rights Reserved
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
|
||||
# All rights reserved. Contact: Pasi Eronen <pasi.eronen@nokia.com>
|
||||
|
@ -37,7 +37,7 @@ from django.conf import settings
|
|||
from django.urls import include
|
||||
from django.views.generic import RedirectView
|
||||
|
||||
from ietf.doc import views_search, views_draft, views_ballot, views_status_change, views_doc, views_downref, views_stats, views_help, views_bofreq
|
||||
from ietf.doc import views_search, views_draft, views_ballot, views_status_change, views_doc, views_downref, views_stats, views_help, views_bofreq, views_statement
|
||||
from ietf.utils.urls import url
|
||||
|
||||
session_patterns = [
|
||||
|
@ -57,6 +57,7 @@ urlpatterns = [
|
|||
url(r'^start-rfc-status-change/(?:%(name)s/)?$' % settings.URL_REGEXPS, views_status_change.start_rfc_status_change),
|
||||
url(r'^bof-requests/?$', views_bofreq.bof_requests),
|
||||
url(r'^bof-requests/new/$', views_bofreq.new_bof_request),
|
||||
url(r'^statement/new/$', views_statement.new_statement),
|
||||
url(r'^iesg/?$', views_search.drafts_in_iesg_process),
|
||||
url(r'^email-aliases/?$', views_doc.email_aliases),
|
||||
url(r'^downref/?$', views_downref.downref_registry),
|
||||
|
@ -169,6 +170,7 @@ urlpatterns = [
|
|||
|
||||
url(r'^%(charter)s/' % settings.URL_REGEXPS, include('ietf.doc.urls_charter')),
|
||||
url(r'^%(bofreq)s/' % settings.URL_REGEXPS, include('ietf.doc.urls_bofreq')),
|
||||
url(r'^%(statement)s/' % settings.URL_REGEXPS, include('ietf.doc.urls_statement')),
|
||||
url(r'^%(name)s/conflict-review/' % settings.URL_REGEXPS, include('ietf.doc.urls_conflict_review')),
|
||||
url(r'^%(name)s/status-change/' % settings.URL_REGEXPS, include('ietf.doc.urls_status_change')),
|
||||
url(r'^%(name)s/material/' % settings.URL_REGEXPS, include('ietf.doc.urls_material')),
|
||||
|
|
10
ietf/doc/urls_statement.py
Normal file
10
ietf/doc/urls_statement.py
Normal file
|
@ -0,0 +1,10 @@
|
|||
# Copyright The IETF Trust 2023, All Rights Reserved
|
||||
|
||||
from django.conf import settings
|
||||
from ietf.doc import views_statement
|
||||
from ietf.utils.urls import url
|
||||
|
||||
urlpatterns = [
|
||||
url(r"^(?:%(rev)s/)?pdf/$" % settings.URL_REGEXPS, views_statement.serve_pdf),
|
||||
url(r"^submit/$", views_statement.submit),
|
||||
]
|
|
@ -721,7 +721,7 @@ def ballot_rfceditornote(request, name):
|
|||
e = WriteupDocEvent(doc=doc, rev=doc.rev, by=login)
|
||||
e.by = login
|
||||
e.type = "changed_rfc_editor_note_text"
|
||||
e.desc = "RFC Editor Note was changed"
|
||||
e.desc = f"RFC Editor Note was changed to \n{t}"
|
||||
e.text = t.rstrip()
|
||||
e.save()
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright The IETF Trust 2009-2022, All Rights Reserved
|
||||
# Copyright The IETF Trust 2009-2023, All Rights Reserved
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Parts Copyright (C) 2009-2010 Nokia Corporation and/or its subsidiary(-ies).
|
||||
|
@ -79,7 +79,7 @@ from ietf.mailtrigger.utils import gather_relevant_expansions
|
|||
from ietf.meeting.models import Session
|
||||
from ietf.meeting.utils import group_sessions, get_upcoming_manageable_sessions, sort_sessions, add_event_info_to_session_qs
|
||||
from ietf.review.models import ReviewAssignment
|
||||
from ietf.review.utils import can_request_review_of_doc, review_assignments_to_list_for_docs
|
||||
from ietf.review.utils import can_request_review_of_doc, review_assignments_to_list_for_docs, review_requests_to_list_for_docs
|
||||
from ietf.review.utils import no_review_from_teams_on_doc
|
||||
from ietf.utils import markup_txt, log, markdown
|
||||
from ietf.utils.draft import PlaintextDraft
|
||||
|
@ -191,6 +191,9 @@ def interesting_doc_relations(doc):
|
|||
return interesting_relations_that, interesting_relations_that_doc
|
||||
|
||||
def document_main(request, name, rev=None, document_html=False):
|
||||
if name.startswith("rfc") and rev is not None:
|
||||
raise Http404()
|
||||
|
||||
doc = get_object_or_404(Document.objects.select_related(), docalias__name=name)
|
||||
|
||||
# take care of possible redirections
|
||||
|
@ -498,6 +501,7 @@ def document_main(request, name, rev=None, document_html=False):
|
|||
started_iesg_process = doc.latest_event(type="started_iesg_process")
|
||||
|
||||
review_assignments = review_assignments_to_list_for_docs([doc]).get(doc.name, [])
|
||||
review_requests = review_requests_to_list_for_docs([doc]).get(doc.name, [])
|
||||
no_review_from_teams = no_review_from_teams_on_doc(doc, rev or doc.rev)
|
||||
|
||||
exp_comment = doc.latest_event(IanaExpertDocEvent,type="comment")
|
||||
|
@ -512,11 +516,13 @@ def document_main(request, name, rev=None, document_html=False):
|
|||
# Do not show the Auth48 URL in the "Additional URLs" section
|
||||
additional_urls = doc.documenturl_set.exclude(tag_id='auth48')
|
||||
|
||||
# Stream description passing test
|
||||
# Stream description and name passing test
|
||||
if doc.stream != None:
|
||||
stream_desc = doc.stream.desc
|
||||
stream = "draft-stream-" + doc.stream.slug
|
||||
else:
|
||||
stream_desc = "(None)"
|
||||
stream = "(None)"
|
||||
|
||||
html = None
|
||||
js = None
|
||||
|
@ -553,6 +559,7 @@ def document_main(request, name, rev=None, document_html=False):
|
|||
split_content=split_content,
|
||||
revisions=simple_diff_revisions if document_html else revisions,
|
||||
snapshot=snapshot,
|
||||
stream=stream,
|
||||
stream_desc=stream_desc,
|
||||
latest_revision=latest_revision,
|
||||
latest_rev=latest_rev,
|
||||
|
@ -613,6 +620,7 @@ def document_main(request, name, rev=None, document_html=False):
|
|||
actions=actions,
|
||||
presentations=presentations,
|
||||
review_assignments=review_assignments,
|
||||
review_requests=review_requests,
|
||||
no_review_from_teams=no_review_from_teams,
|
||||
due_date=due_date,
|
||||
diff_revisions=diff_revisions
|
||||
|
@ -842,7 +850,40 @@ def document_main(request, name, rev=None, document_html=False):
|
|||
)
|
||||
)
|
||||
|
||||
if doc.type_id == "statement":
|
||||
if doc.uploaded_filename:
|
||||
basename = doc.uploaded_filename.split(".")[0] # strip extension
|
||||
else:
|
||||
basename = f"{doc.name}-{doc.rev}"
|
||||
variants = set([match.name.split(".")[1] for match in Path(doc.get_file_path()).glob(f"{basename}.*")])
|
||||
inlineable = any([ext in variants for ext in ["md", "txt"]])
|
||||
if inlineable:
|
||||
content = markdown.markdown(doc.text_or_error())
|
||||
else:
|
||||
content = "No format available to display inline"
|
||||
if "pdf" in variants:
|
||||
pdf_url = urlreverse(
|
||||
"ietf.doc.views_statement.serve_pdf",
|
||||
kwargs=dict(name=doc.name, rev=doc.rev),
|
||||
)
|
||||
content += f" - Download [pdf]({pdf_url})"
|
||||
content = markdown.markdown(content)
|
||||
can_manage = has_role(request.user,["Secretariat"]) # Add IAB or IESG as appropriate
|
||||
interesting_relations_that, interesting_relations_that_doc = interesting_doc_relations(doc)
|
||||
published = doc.latest_event(type="published_statement").time
|
||||
|
||||
return render(request, "doc/document_statement.html",
|
||||
dict(doc=doc,
|
||||
top=top,
|
||||
revisions=revisions,
|
||||
latest_rev=latest_rev,
|
||||
published=published,
|
||||
content=content,
|
||||
snapshot=snapshot,
|
||||
replaces=interesting_relations_that_doc.filter(relationship="replaces"),
|
||||
replaced_by=interesting_relations_that.filter(relationship="replaces"),
|
||||
can_manage=can_manage,
|
||||
))
|
||||
|
||||
raise Http404("Document not found: %s" % (name + ("-%s"%rev if rev else "")))
|
||||
|
||||
|
@ -1080,6 +1121,9 @@ def document_history(request, name):
|
|||
|
||||
|
||||
def document_bibtex(request, name, rev=None):
|
||||
if name.startswith('rfc') and rev is not None:
|
||||
raise Http404()
|
||||
|
||||
# Make sure URL_REGEXPS did not grab too much for the rev number
|
||||
if rev != None and len(rev) != 2:
|
||||
mo = re.search(r"^(?P<m>[0-9]{1,2})-(?P<n>[0-9]{2})$", rev)
|
||||
|
@ -1112,6 +1156,11 @@ def document_bibtex(request, name, rev=None):
|
|||
else:
|
||||
doi = None
|
||||
|
||||
if doc.is_dochistory():
|
||||
latest_event = doc.latest_event(type='new_revision', rev=rev)
|
||||
if latest_event:
|
||||
doc.pub_date = latest_event.time
|
||||
|
||||
return render(request, "doc/document_bibtex.bib",
|
||||
dict(doc=doc,
|
||||
replaced_by=replaced_by,
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
# Copyright The IETF Trust 2013-2023, All Rights Reserved
|
||||
|
||||
from django.shortcuts import render, get_object_or_404
|
||||
from django.http import Http404
|
||||
|
||||
|
@ -5,11 +7,13 @@ from ietf.doc.models import State, StateType, IESG_SUBSTATE_TAGS
|
|||
from ietf.name.models import DocRelationshipName, DocTagName
|
||||
from ietf.doc.utils import get_tags_for_stream_id
|
||||
|
||||
def state_help(request, type):
|
||||
def state_help(request, type=None):
|
||||
slug, title = {
|
||||
"draft-iesg": ("draft-iesg", "IESG States for Internet-Drafts"),
|
||||
"draft-rfceditor": ("draft-rfceditor", "RFC Editor States for Internet-Drafts"),
|
||||
"draft-iana-action": ("draft-iana-action", "IANA Action States for Internet-Drafts"),
|
||||
"draft-iana-review": ("draft-iana-review", "IANA Review States for Internet-Drafts"),
|
||||
"draft-iana-experts": ("draft-iana-experts", "IANA Expert Review States for Internet-Drafts"),
|
||||
"draft-stream-ietf": ("draft-stream-ietf", "IETF Stream States for Internet-Drafts"),
|
||||
"draft-stream-irtf": ("draft-stream-irtf", "IRTF Stream States for Internet-Drafts"),
|
||||
"draft-stream-ise": ("draft-stream-ise", "ISE Stream States for Internet-Drafts"),
|
||||
|
@ -19,10 +23,11 @@ def state_help(request, type):
|
|||
"status-change": ("statchg", "RFC Status Change States"),
|
||||
"bofreq": ("bofreq", "BOF Request States"),
|
||||
"procmaterials": ("procmaterials", "Proceedings Materials States"),
|
||||
"statement": {"statement", "Statement States"}
|
||||
}.get(type, (None, None))
|
||||
state_type = get_object_or_404(StateType, slug=slug)
|
||||
|
||||
states = State.objects.filter(type=state_type).order_by("order")
|
||||
states = State.objects.filter(used=True, type=state_type).order_by("order")
|
||||
|
||||
has_next_states = False
|
||||
for state in states:
|
||||
|
|
|
@ -389,9 +389,6 @@ def reject_reviewer_assignment(request, name, assignment_id):
|
|||
state=review_assignment.state,
|
||||
)
|
||||
|
||||
policy = get_reviewer_queue_policy(review_assignment.review_request.team)
|
||||
policy.return_reviewer_to_rotation_top(review_assignment.reviewer.person, form.cleaned_data['wants_to_be_next'])
|
||||
|
||||
msg = render_to_string("review/reviewer_assignment_rejected.txt", {
|
||||
"by": request.user.person,
|
||||
"message_to_secretary": form.cleaned_data.get("message_to_secretary"),
|
||||
|
@ -441,7 +438,7 @@ def withdraw_reviewer_assignment(request, name, assignment_id):
|
|||
)
|
||||
|
||||
policy = get_reviewer_queue_policy(review_assignment.review_request.team)
|
||||
policy.return_reviewer_to_rotation_top(review_assignment.reviewer.person, True)
|
||||
policy.set_wants_to_be_next(review_assignment.reviewer.person)
|
||||
|
||||
msg = "Review assignment withdrawn by %s"%request.user.person
|
||||
|
||||
|
|
|
@ -58,7 +58,7 @@ from ietf.doc.models import ( Document, DocHistory, DocAlias, State,
|
|||
IESG_BALLOT_ACTIVE_STATES, IESG_STATCHG_CONFLREV_ACTIVE_STATES,
|
||||
IESG_CHARTER_ACTIVE_STATES )
|
||||
from ietf.doc.fields import select2_id_doc_name_json
|
||||
from ietf.doc.utils import get_search_cache_key, augment_events_with_revision
|
||||
from ietf.doc.utils import get_search_cache_key, augment_events_with_revision, needed_ballot_positions
|
||||
from ietf.group.models import Group
|
||||
from ietf.idindex.index import active_drafts_index_by_group
|
||||
from ietf.name.models import DocTagName, DocTypeName, StreamName
|
||||
|
@ -290,8 +290,8 @@ def search_for_name(request, name):
|
|||
redirect_to = find_unique(rev_split.group(1))
|
||||
if redirect_to:
|
||||
rev = rev_split.group(2)
|
||||
# check if we can redirect directly to the rev
|
||||
if DocHistory.objects.filter(doc__docalias__name=redirect_to, rev=rev).exists():
|
||||
# check if we can redirect directly to the rev if it's draft, if rfc - always redirect to main page
|
||||
if not redirect_to.startswith('rfc') and DocHistory.objects.filter(doc__docalias__name=redirect_to, rev=rev).exists():
|
||||
return cached_redirect(cache_key, urlreverse("ietf.doc.views_doc.document_main", kwargs={ "name": redirect_to, "rev": rev }))
|
||||
else:
|
||||
return cached_redirect(cache_key, urlreverse("ietf.doc.views_doc.document_main", kwargs={ "name": redirect_to }))
|
||||
|
@ -497,6 +497,7 @@ def ad_workload(request):
|
|||
[
|
||||
("Publication Requested Internet-Draft", False),
|
||||
("AD Evaluation Internet-Draft", False),
|
||||
("Last Call Requested Internet-Draft", True),
|
||||
("In Last Call Internet-Draft", True),
|
||||
("Waiting for Writeup Internet-Draft", False),
|
||||
("IESG Evaluation - Defer Internet-Draft", False),
|
||||
|
@ -532,6 +533,7 @@ def ad_workload(request):
|
|||
[
|
||||
("Publication Requested Status Change", False),
|
||||
("AD Evaluation Status Change", False),
|
||||
("Last Call Requested Status Change", True),
|
||||
("In Last Call Status Change", True),
|
||||
("Waiting for Writeup Status Change", False),
|
||||
("IESG Evaluation Status Change", True),
|
||||
|
@ -705,18 +707,20 @@ def docs_for_ad(request, name):
|
|||
|
||||
for d in results:
|
||||
d.search_heading = ad_dashboard_group(d)
|
||||
#
|
||||
# Additional content showing docs with blocking positions by this ad
|
||||
|
||||
# Additional content showing docs with blocking positions by this AD,
|
||||
# and docs that the AD hasn't balloted on that are lacking ballot positions to progress
|
||||
blocked_docs = []
|
||||
not_balloted_docs = []
|
||||
if ad in get_active_ads():
|
||||
possible_docs = Document.objects.filter(Q(states__type="draft-iesg",
|
||||
states__slug__in=IESG_BALLOT_ACTIVE_STATES) |
|
||||
Q(states__type="charter",
|
||||
states__slug__in=IESG_CHARTER_ACTIVE_STATES) |
|
||||
Q(states__type__in=("statchg", "conflrev"),
|
||||
states__slug__in=IESG_STATCHG_CONFLREV_ACTIVE_STATES),
|
||||
docevent__ballotpositiondocevent__pos__blocking=True,
|
||||
docevent__ballotpositiondocevent__balloter=ad).distinct()
|
||||
iesg_docs = Document.objects.filter(Q(states__type="draft-iesg",
|
||||
states__slug__in=IESG_BALLOT_ACTIVE_STATES) |
|
||||
Q(states__type="charter",
|
||||
states__slug__in=IESG_CHARTER_ACTIVE_STATES) |
|
||||
Q(states__type__in=("statchg", "conflrev"),
|
||||
states__slug__in=IESG_STATCHG_CONFLREV_ACTIVE_STATES)).distinct()
|
||||
possible_docs = iesg_docs.filter(docevent__ballotpositiondocevent__pos__blocking=True,
|
||||
docevent__ballotpositiondocevent__balloter=ad)
|
||||
for doc in possible_docs:
|
||||
ballot = doc.active_ballot()
|
||||
if not ballot:
|
||||
|
@ -737,12 +741,26 @@ def docs_for_ad(request, name):
|
|||
if blocked_docs:
|
||||
blocked_docs.sort(key=lambda d: min(p.time for p in d.blocking_positions if p.balloter==ad), reverse=True)
|
||||
|
||||
for d in blocked_docs:
|
||||
if d.get_base_name() == 'charter-ietf-shmoo-01-04.txt':
|
||||
print('Is in list')
|
||||
possible_docs = iesg_docs.exclude(
|
||||
Q(docevent__ballotpositiondocevent__balloter=ad)
|
||||
)
|
||||
for doc in possible_docs:
|
||||
ballot = doc.active_ballot()
|
||||
if (
|
||||
not ballot
|
||||
or doc.get_state_slug("draft") == "repl"
|
||||
or (doc.telechat_date() and doc.telechat_date() > timezone.now().date())
|
||||
):
|
||||
continue
|
||||
|
||||
iesg_ballot_summary = needed_ballot_positions(
|
||||
doc, list(ballot.active_balloter_positions().values())
|
||||
)
|
||||
if re.search(r"\bNeeds\s+\d+", iesg_ballot_summary):
|
||||
not_balloted_docs.append(doc)
|
||||
|
||||
return render(request, 'doc/drafts_for_ad.html', {
|
||||
'form':form, 'docs':results, 'meta':meta, 'ad_name': ad.plain_name(), 'blocked_docs': blocked_docs
|
||||
'form':form, 'docs':results, 'meta':meta, 'ad_name': ad.plain_name(), 'blocked_docs': blocked_docs, 'not_balloted_docs': not_balloted_docs
|
||||
})
|
||||
def drafts_in_last_call(request):
|
||||
lc_state = State.objects.get(type="draft-iesg", slug="lc").pk
|
||||
|
|
274
ietf/doc/views_statement.py
Normal file
274
ietf/doc/views_statement.py
Normal file
|
@ -0,0 +1,274 @@
|
|||
# Copyright The IETF Trust 2023, All Rights Reserved
|
||||
|
||||
import debug # pyflakes: ignore
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
from django.http import FileResponse, Http404
|
||||
from django.views.decorators.cache import cache_control
|
||||
from django.shortcuts import get_object_or_404, render, redirect
|
||||
from django.template.loader import render_to_string
|
||||
from ietf.utils import markdown
|
||||
from django.utils.html import escape
|
||||
|
||||
from ietf.doc.models import Document, DocAlias, DocEvent, NewRevisionDocEvent, State
|
||||
from ietf.group.models import Group
|
||||
from ietf.ietfauth.utils import role_required
|
||||
from ietf.utils.text import xslugify
|
||||
from ietf.utils.textupload import get_cleaned_text_file_content
|
||||
|
||||
CONST_PDF_REV_NOTICE = "The current revision of this statement is in pdf format"
|
||||
|
||||
|
||||
@cache_control(max_age=3600)
|
||||
def serve_pdf(self, name, rev=None):
|
||||
doc = get_object_or_404(Document, name=name)
|
||||
if rev is None:
|
||||
rev = doc.rev
|
||||
p = Path(doc.get_file_path()).joinpath(f"{doc.name}-{rev}.pdf")
|
||||
if not p.exists():
|
||||
raise Http404
|
||||
else:
|
||||
return FileResponse(p.open(mode="rb"), content_type="application/pdf")
|
||||
|
||||
|
||||
class StatementUploadForm(forms.Form):
|
||||
ACTIONS = [
|
||||
("enter", "Enter content directly"),
|
||||
("upload", "Upload content from file"),
|
||||
]
|
||||
statement_submission = forms.ChoiceField(choices=ACTIONS, widget=forms.RadioSelect)
|
||||
statement_file = forms.FileField(
|
||||
label="Markdown or PDF source file to upload", required=False
|
||||
)
|
||||
statement_content = forms.CharField(
|
||||
widget=forms.Textarea(attrs={"rows": 30}), required=False, strip=False
|
||||
)
|
||||
|
||||
def clean(self):
|
||||
def require_field(f):
|
||||
if not self.cleaned_data.get(f):
|
||||
self.add_error(f, forms.ValidationError("You must fill in this field."))
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
submission_method = self.cleaned_data.get("statement_submission")
|
||||
markdown_content = ""
|
||||
if submission_method == "enter":
|
||||
if require_field("statement_content"):
|
||||
markdown_content = self.cleaned_data["statement_content"].replace(
|
||||
"\r", ""
|
||||
)
|
||||
default_content = render_to_string(
|
||||
"doc/statement/statement_template.md", {}
|
||||
)
|
||||
if markdown_content == default_content:
|
||||
raise forms.ValidationError(
|
||||
"The example content may not be saved. Edit it to contain the next revision statement content."
|
||||
)
|
||||
if markdown_content == CONST_PDF_REV_NOTICE:
|
||||
raise forms.ValidationError(
|
||||
"Not proceeding with the text noting that the current version is pdf. Did you mean to upload a new PDF?"
|
||||
)
|
||||
elif submission_method == "upload":
|
||||
if require_field("statement_file"):
|
||||
content_type = self.cleaned_data["statement_file"].content_type
|
||||
acceptable_types = (
|
||||
"application/pdf",
|
||||
) + settings.DOC_TEXT_FILE_VALID_UPLOAD_MIME_TYPES
|
||||
if not content_type.startswith(
|
||||
acceptable_types
|
||||
): # dances around decoration of types with encoding etc.
|
||||
self.add_error(
|
||||
"statement_file",
|
||||
forms.ValidationError(
|
||||
f"Unexpected content type: Expected one of {', '.join(acceptable_types)}"
|
||||
),
|
||||
)
|
||||
elif content_type != "application/pdf":
|
||||
markdown_content = get_cleaned_text_file_content(
|
||||
self.cleaned_data["statement_file"]
|
||||
)
|
||||
if markdown_content != "":
|
||||
try:
|
||||
_ = markdown.markdown(markdown_content)
|
||||
except Exception as e:
|
||||
raise forms.ValidationError(f"Markdown processing failed: {e}")
|
||||
|
||||
|
||||
@role_required("Secretariat")
|
||||
def submit(request, name):
|
||||
statement = get_object_or_404(Document, type="statement", name=name)
|
||||
|
||||
if request.method == "POST":
|
||||
form = StatementUploadForm(request.POST, request.FILES)
|
||||
if form.is_valid():
|
||||
statement_submission = form.cleaned_data["statement_submission"]
|
||||
writing_pdf = (
|
||||
statement_submission == "upload"
|
||||
and form.cleaned_data["statement_file"].content_type
|
||||
== "application/pdf"
|
||||
)
|
||||
|
||||
statement.rev = "%02d" % (int(statement.rev) + 1)
|
||||
statement.uploaded_filename = (
|
||||
f"{statement.name}-{statement.rev}.{'pdf' if writing_pdf else 'md'}"
|
||||
)
|
||||
e = NewRevisionDocEvent.objects.create(
|
||||
type="new_revision",
|
||||
doc=statement,
|
||||
by=request.user.person,
|
||||
rev=statement.rev,
|
||||
desc="New revision available",
|
||||
)
|
||||
statement.save_with_history([e])
|
||||
markdown_content = ""
|
||||
if statement_submission == "upload":
|
||||
if not writing_pdf:
|
||||
markdown_content = get_cleaned_text_file_content(
|
||||
form.cleaned_data["statement_file"]
|
||||
)
|
||||
else:
|
||||
markdown_content = form.cleaned_data["statement_content"]
|
||||
with Path(statement.get_file_name()).open(
|
||||
mode="wb" if writing_pdf else "w"
|
||||
) as destination:
|
||||
if writing_pdf:
|
||||
for chunk in form.cleaned_data["statement_file"].chunks():
|
||||
destination.write(chunk)
|
||||
else:
|
||||
destination.write(markdown_content)
|
||||
return redirect("ietf.doc.views_doc.document_main", name=statement.name)
|
||||
|
||||
else:
|
||||
if statement.uploaded_filename.endswith("pdf"):
|
||||
text = CONST_PDF_REV_NOTICE
|
||||
else:
|
||||
text = statement.text_or_error()
|
||||
init = {
|
||||
"statement_content": text,
|
||||
"statement_submission": "enter",
|
||||
}
|
||||
form = StatementUploadForm(initial=init)
|
||||
return render(
|
||||
request, "doc/statement/upload_content.html", {"form": form, "doc": statement}
|
||||
)
|
||||
|
||||
|
||||
class NewStatementForm(StatementUploadForm):
|
||||
group = forms.ModelChoiceField(
|
||||
queryset=Group.objects.filter(acronym__in=["iab", "iesg"])
|
||||
)
|
||||
title = forms.CharField(max_length=255)
|
||||
field_order = [
|
||||
"group",
|
||||
"title",
|
||||
"statement_submission",
|
||||
"statement_file",
|
||||
"statement_content",
|
||||
]
|
||||
|
||||
def name_from_title_and_group(self, title, group):
|
||||
title_slug = xslugify(title)
|
||||
if title_slug.startswith(f"{group.acronym}-"):
|
||||
title_slug = title_slug[len(f"{group.acronym}-") :]
|
||||
name = f"statement-{group.acronym}-{title_slug[:240]}"
|
||||
return name.replace("_", "-")
|
||||
|
||||
def clean(self):
|
||||
if all([field in self.cleaned_data for field in ["title", "group"]]):
|
||||
title = self.cleaned_data["title"]
|
||||
group = self.cleaned_data["group"]
|
||||
name = self.name_from_title_and_group(title, group)
|
||||
if name == self.name_from_title_and_group("", group):
|
||||
self.add_error(
|
||||
"title",
|
||||
forms.ValidationError(
|
||||
"The filename derived from this title is empty. Please include a few descriptive words using ascii or numeric characters"
|
||||
),
|
||||
)
|
||||
if Document.objects.filter(name=name).exists():
|
||||
self.add_error(
|
||||
"title",
|
||||
forms.ValidationError(
|
||||
"This title produces a filename already used by an existing statement"
|
||||
),
|
||||
)
|
||||
return super().clean()
|
||||
|
||||
|
||||
@role_required("Secretariat")
|
||||
def new_statement(request):
|
||||
if request.method == "POST":
|
||||
form = NewStatementForm(request.POST, request.FILES)
|
||||
if form.is_valid():
|
||||
statement_submission = form.cleaned_data["statement_submission"]
|
||||
writing_pdf = (
|
||||
statement_submission == "upload"
|
||||
and form.cleaned_data["statement_file"].content_type
|
||||
== "application/pdf"
|
||||
)
|
||||
|
||||
group = form.cleaned_data["group"]
|
||||
title = form.cleaned_data["title"]
|
||||
name = form.name_from_title_and_group(title, group)
|
||||
statement = Document.objects.create(
|
||||
type_id="statement",
|
||||
group=group,
|
||||
name=name,
|
||||
title=title,
|
||||
abstract="",
|
||||
rev="00",
|
||||
uploaded_filename=f"{name}-00.{'pdf' if writing_pdf else 'md'}",
|
||||
)
|
||||
statement.set_state(State.objects.get(type_id="statement", slug="active"))
|
||||
e1 = NewRevisionDocEvent.objects.create(
|
||||
type="new_revision",
|
||||
doc=statement,
|
||||
by=request.user.person,
|
||||
rev=statement.rev,
|
||||
desc="New revision available",
|
||||
time=statement.time,
|
||||
)
|
||||
e2 = DocEvent.objects.create(
|
||||
type="published_statement",
|
||||
doc=statement,
|
||||
rev=statement.rev,
|
||||
by=request.user.person,
|
||||
desc="Statement published",
|
||||
)
|
||||
statement.save_with_history([e1, e2])
|
||||
alias = DocAlias.objects.create(name=name)
|
||||
alias.docs.set([statement])
|
||||
markdown_content = ""
|
||||
if statement_submission == "upload":
|
||||
if not writing_pdf:
|
||||
markdown_content = get_cleaned_text_file_content(
|
||||
form.cleaned_data["statement_file"]
|
||||
)
|
||||
else:
|
||||
markdown_content = form.cleaned_data["statement_content"]
|
||||
with Path(statement.get_file_name()).open(
|
||||
mode="wb" if writing_pdf else "w"
|
||||
) as destination:
|
||||
if writing_pdf:
|
||||
for chunk in form.cleaned_data["statement_file"].chunks():
|
||||
destination.write(chunk)
|
||||
else:
|
||||
destination.write(markdown_content)
|
||||
return redirect("ietf.doc.views_doc.document_main", name=statement.name)
|
||||
|
||||
else:
|
||||
init = {
|
||||
"statement_content": escape(
|
||||
render_to_string(
|
||||
"doc/statement/statement_template.md", {"settings": settings}
|
||||
)
|
||||
),
|
||||
"statement_submission": "enter",
|
||||
}
|
||||
form = NewStatementForm(initial=init)
|
||||
return render(request, "doc/statement/new_statement.html", {"form": form})
|
|
@ -40,7 +40,7 @@ def model_to_timeline_data(model, field='time', **kwargs):
|
|||
assert field in [ f.name for f in model._meta.get_fields() ]
|
||||
|
||||
objects = ( model.objects.filter(**kwargs)
|
||||
.annotate(date=TruncDate(field))
|
||||
.annotate(date=TruncDate(field, tzinfo=datetime.timezone.utc))
|
||||
.order_by('date')
|
||||
.values('date')
|
||||
.annotate(count=Count('id')))
|
||||
|
|
|
@ -418,7 +418,7 @@ def clean_helper(form, formtype):
|
|||
rfc_fields = {}
|
||||
status_fields={}
|
||||
for k in sorted(form.data.keys()):
|
||||
v = form.data[k]
|
||||
v = form.data[k].lower()
|
||||
if k.startswith('new_relation_row'):
|
||||
if re.match(r'\d{1,4}',v):
|
||||
v = 'rfc'+v
|
||||
|
@ -685,7 +685,7 @@ def last_call(request, name):
|
|||
form = LastCallTextForm(initial=dict(last_call_text=escape(last_call_event.text)))
|
||||
|
||||
if request.method == 'POST':
|
||||
if "save_last_call_text" in request.POST or "send_last_call_request" in request.POST:
|
||||
if "save_last_call_text" in request.POST or ("send_last_call_request" in request.POST and status_change.ad is not None):
|
||||
form = LastCallTextForm(request.POST)
|
||||
if form.is_valid():
|
||||
events = []
|
||||
|
|
|
@ -283,12 +283,6 @@ class GroupPagesTests(TestCase):
|
|||
self.assertContains(r, milestone.desc)
|
||||
self.assertContains(r, milestone.docs.all()[0].name)
|
||||
|
||||
def test_about_rendertest(self):
|
||||
group = CharterFactory().group
|
||||
url = urlreverse('ietf.group.views.group_about_rendertest', kwargs=dict(acronym=group.acronym))
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code,200)
|
||||
|
||||
|
||||
def test_group_about(self):
|
||||
|
||||
|
|
|
@ -41,7 +41,7 @@ class ReviewTests(TestCase):
|
|||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertContains(r, review_req.doc.name)
|
||||
self.assertContains(r, assignment.reviewer.person.name)
|
||||
self.assertContains(r, escape(assignment.reviewer.person.name))
|
||||
|
||||
url = urlreverse(ietf.group.views.review_requests, kwargs={ 'acronym': group.acronym })
|
||||
|
||||
|
@ -101,8 +101,20 @@ class ReviewTests(TestCase):
|
|||
|
||||
self.assertEqual(list(suggested_review_requests_for_team(team)), [])
|
||||
|
||||
# blocked by an already existing request (don't suggest it again)
|
||||
review_req.state_id = "requested"
|
||||
review_req.save()
|
||||
self.assertEqual(list(suggested_review_requests_for_team(team)), [])
|
||||
|
||||
# ... but not for a previous version
|
||||
review_req.requested_rev = prev_rev
|
||||
review_req.save()
|
||||
self.assertEqual(len(suggested_review_requests_for_team(team)), 1)
|
||||
|
||||
|
||||
# blocked by completion
|
||||
review_req.state = ReviewRequestStateName.objects.get(slug="assigned")
|
||||
review_req.requested_rev = ""
|
||||
review_req.save()
|
||||
assignment.state = ReviewAssignmentStateName.objects.get(slug="completed")
|
||||
assignment.reviewed_rev = review_req.doc.rev
|
||||
|
@ -116,6 +128,7 @@ class ReviewTests(TestCase):
|
|||
|
||||
self.assertEqual(len(suggested_review_requests_for_team(team)), 1)
|
||||
|
||||
|
||||
def test_suggested_review_requests_on_lc_and_telechat(self):
|
||||
review_req = ReviewRequestFactory(state_id='assigned')
|
||||
doc = review_req.doc
|
||||
|
@ -183,7 +196,7 @@ class ReviewTests(TestCase):
|
|||
urlreverse(ietf.group.views.reviewer_overview, kwargs={ 'acronym': group.acronym, 'group_type': group.type_id })]:
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertContains(r, reviewer.name)
|
||||
self.assertContains(r, escape(reviewer.name))
|
||||
self.assertContains(r, review_req1.doc.name)
|
||||
# without a login, reason for being unavailable should not be seen
|
||||
self.assertNotContains(r, "Availability")
|
||||
|
@ -199,13 +212,13 @@ class ReviewTests(TestCase):
|
|||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
# review team members can see reason for being unavailable
|
||||
self.assertContains(r, "Availability")
|
||||
self.assertContains(r, "Available")
|
||||
|
||||
self.client.login(username="secretary", password="secretary+password")
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
# secretariat can see reason for being unavailable
|
||||
self.assertContains(r, "Availability")
|
||||
self.assertContains(r, "Available")
|
||||
|
||||
# add one closed review with no response and see it is visible
|
||||
review_req2 = ReviewRequestFactory(state_id='completed',team=team)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright The IETF Trust 2013-2020, All Rights Reserved
|
||||
# Copyright The IETF Trust 2013-2023, All Rights Reserved
|
||||
|
||||
from django.conf import settings
|
||||
from django.urls import include
|
||||
|
@ -20,7 +20,6 @@ info_detail_urls = [
|
|||
url(r'^documents/subscription/$', community_views.subscription),
|
||||
url(r'^charter/$', views.group_about),
|
||||
url(r'^about/$', views.group_about),
|
||||
url(r'^about/rendertest/$', views.group_about_rendertest),
|
||||
url(r'^about/status/$', views.group_about_status),
|
||||
url(r'^about/status/edit/$', views.group_about_status_edit),
|
||||
url(r'^about/status/meeting/(?P<num>\d+)/$', views.group_about_status_meeting),
|
||||
|
@ -48,6 +47,7 @@ info_detail_urls = [
|
|||
url(r'^secretarysettings/$', views.change_review_secretary_settings),
|
||||
url(r'^reset_next_reviewer/$', views.reset_next_reviewer),
|
||||
url(r'^email-aliases/$', RedirectView.as_view(pattern_name=views.email,permanent=False),name='ietf.group.urls_info_details.redirect.email'),
|
||||
url(r'^statements/$', views.statements),
|
||||
]
|
||||
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright The IETF Trust 2012-2021, All Rights Reserved
|
||||
# Copyright The IETF Trust 2012-2023, All Rights Reserved
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
|
@ -233,6 +233,8 @@ def construct_group_menu_context(request, group, selected, group_type, others):
|
|||
|
||||
if group.features.has_meetings:
|
||||
entries.append(("Meetings", urlreverse("ietf.group.views.meetings", kwargs=kwargs)))
|
||||
if group.acronym in ["iab", "iesg"]:
|
||||
entries.append(("Statements", urlreverse("ietf.group.views.statements", kwargs=kwargs)))
|
||||
entries.append(("History", urlreverse("ietf.group.views.history", kwargs=kwargs)))
|
||||
entries.append(("Photos", urlreverse("ietf.group.views.group_photos", kwargs=kwargs)))
|
||||
entries.append(("Email expansions", urlreverse("ietf.group.views.email", kwargs=kwargs)))
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright The IETF Trust 2009-2022, All Rights Reserved
|
||||
# Copyright The IETF Trust 2009-2023, All Rights Reserved
|
||||
#
|
||||
# Portion Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
|
||||
# All rights reserved. Contact: Pasi Eronen <pasi.eronen@nokia.com>
|
||||
|
@ -48,7 +48,7 @@ from simple_history.utils import update_change_reason
|
|||
from django import forms
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.db.models import Q, Count
|
||||
from django.db.models import Q, Count, OuterRef, Subquery
|
||||
from django.http import HttpResponse, HttpResponseRedirect, Http404, JsonResponse
|
||||
from django.shortcuts import render, redirect, get_object_or_404
|
||||
from django.template.loader import render_to_string
|
||||
|
@ -61,7 +61,7 @@ import debug # pyflakes:ignore
|
|||
|
||||
from ietf.community.models import CommunityList, EmailSubscription
|
||||
from ietf.community.utils import docs_tracked_by_community_list
|
||||
from ietf.doc.models import DocTagName, State, DocAlias, RelatedDocument, Document
|
||||
from ietf.doc.models import DocTagName, State, DocAlias, RelatedDocument, Document, DocEvent
|
||||
from ietf.doc.templatetags.ietf_filters import clean_whitespace
|
||||
from ietf.doc.utils import get_chartering_type, get_tags_for_stream_id
|
||||
from ietf.doc.utils_charter import charter_name_for_group, replace_charter_of_replaced_group
|
||||
|
@ -604,17 +604,6 @@ def all_status(request):
|
|||
}
|
||||
)
|
||||
|
||||
def group_about_rendertest(request, acronym, group_type=None):
|
||||
group = get_group_or_404(acronym, group_type)
|
||||
charter = None
|
||||
if group.charter:
|
||||
charter = get_charter_text(group)
|
||||
try:
|
||||
rendered = markdown.markdown(charter)
|
||||
except Exception as e:
|
||||
rendered = f'Markdown rendering failed: {e}'
|
||||
return render(request, 'group/group_about_rendertest.html', {'group':group, 'charter':charter, 'rendered':rendered})
|
||||
|
||||
def group_about_status(request, acronym, group_type=None):
|
||||
group = get_group_or_404(acronym, group_type)
|
||||
status_update = group.latest_event(type='status_update')
|
||||
|
@ -2093,7 +2082,32 @@ def reset_next_reviewer(request, acronym, group_type=None):
|
|||
|
||||
return render(request, 'group/reset_next_reviewer.html', { 'group':group, 'form': form,})
|
||||
|
||||
|
||||
def statements(request, acronym, group_type=None):
|
||||
if not acronym in ["iab", "iesg"]:
|
||||
raise Http404
|
||||
group = get_group_or_404(acronym, group_type)
|
||||
statements = group.document_set.filter(type_id="statement").annotate(
|
||||
published=Subquery(
|
||||
DocEvent.objects.filter(
|
||||
doc=OuterRef("pk"),
|
||||
type="published_statement"
|
||||
).order_by("-time").values("time")[:1]
|
||||
)
|
||||
).order_by("-published")
|
||||
return render(
|
||||
request,
|
||||
"group/statements.html",
|
||||
construct_group_menu_context(
|
||||
request,
|
||||
group,
|
||||
"statements",
|
||||
group_type,
|
||||
{
|
||||
"group": group,
|
||||
"statements": statements,
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -223,7 +223,7 @@ def is_bofreq_editor(user, doc):
|
|||
def openid_userinfo(claims, user):
|
||||
# Populate claims dict.
|
||||
person = get_object_or_404(Person, user=user)
|
||||
email = person.email()
|
||||
email = person.email_allowing_inactive()
|
||||
if person.photo:
|
||||
photo_url = person.cdn_photo_url()
|
||||
else:
|
||||
|
|
|
@ -41,7 +41,7 @@ class PasswordStrengthInput(PasswordInput):
|
|||
<div class="progress" style="margin-bottom: 10px;">
|
||||
<div class="progress-bar progress-bar-warning password_strength_bar" role="progressbar" aria-valuenow="0" aria-valuemin="0" aria-valuemax="5" style="width: 0%%"></div>
|
||||
</div>
|
||||
<p class="text-muted password_strength_info hidden">
|
||||
<p class="text-body-secondary password_strength_info hidden">
|
||||
<span class="label label-danger">
|
||||
%s
|
||||
</span>
|
||||
|
@ -89,7 +89,7 @@ class PasswordConfirmationInput(PasswordInput):
|
|||
|
||||
confirmation_markup = """
|
||||
<div style="margin-top: 10px;" class="hidden password_strength_info">
|
||||
<p class="text-muted">
|
||||
<p class="text-body-secondary">
|
||||
<span class="label label-danger">
|
||||
%s
|
||||
</span>
|
||||
|
|
31
ietf/mailtrigger/migrations/0002_slidesubmitter.py
Normal file
31
ietf/mailtrigger/migrations/0002_slidesubmitter.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
# Copyright The IETF Trust 2023, All Rights Reserved
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
def forward(apps, schema_editor):
|
||||
MailTrigger = apps.get_model("mailtrigger", "MailTrigger")
|
||||
Recipient = apps.get_model("mailtrigger", "Recipient")
|
||||
r = Recipient.objects.create(
|
||||
slug="slides_proposer",
|
||||
desc="Person who proposed slides",
|
||||
template="{{ proposer.email }}"
|
||||
)
|
||||
mt = MailTrigger.objects.get(slug="slides_proposed")
|
||||
mt.cc.add(r)
|
||||
|
||||
def reverse(apps, schema_editor):
|
||||
MailTrigger = apps.get_model("mailtrigger", "MailTrigger")
|
||||
Recipient = apps.get_model("mailtrigger", "Recipient")
|
||||
mt = MailTrigger.objects.get(slug="slides_proposed")
|
||||
r = Recipient.objects.get(slug="slides_proposer")
|
||||
mt.cc.remove(r)
|
||||
r.delete()
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("mailtrigger", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(forward, reverse)
|
||||
]
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright The IETF Trust 2015-2019, All Rights Reserved
|
||||
# Copyright The IETF Trust 2015-2023, All Rights Reserved
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
|
@ -70,7 +70,7 @@ def gather_relevant_expansions(**kwargs):
|
|||
|
||||
relevant.add('doc_state_edited')
|
||||
|
||||
if not doc.type_id in ['bofreq',]:
|
||||
if not doc.type_id in ['bofreq', 'statement']:
|
||||
relevant.update(['doc_telechat_details_changed','ballot_deferred','iesg_ballot_saved'])
|
||||
|
||||
if doc.type_id in ['draft','statchg']:
|
||||
|
|
|
@ -648,6 +648,20 @@ class MeetingTests(BaseMeetingTestCase):
|
|||
self.assertFalse(row.find("a:contains(\"Bad Slideshow\")"))
|
||||
|
||||
# test with no meeting number in url
|
||||
# Add various group sessions
|
||||
groups = []
|
||||
parent_groups = [
|
||||
GroupFactory.create(type_id="area", acronym="gen"),
|
||||
GroupFactory.create(acronym="iab"),
|
||||
GroupFactory.create(acronym="irtf"),
|
||||
]
|
||||
for parent in parent_groups:
|
||||
groups.append(GroupFactory.create(parent=parent))
|
||||
for acronym in ["rsab", "edu"]:
|
||||
groups.append(GroupFactory.create(acronym=acronym))
|
||||
for group in groups:
|
||||
SessionFactory(meeting=meeting, group=group)
|
||||
self.write_materials_files(meeting, session)
|
||||
url = urlreverse("ietf.meeting.views.materials", kwargs=dict())
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
@ -657,6 +671,10 @@ class MeetingTests(BaseMeetingTestCase):
|
|||
self.assertTrue(row.find('a:contains("Minutes")'))
|
||||
self.assertTrue(row.find('a:contains("Slideshow")'))
|
||||
self.assertFalse(row.find("a:contains(\"Bad Slideshow\")"))
|
||||
# test for different sections
|
||||
sections = ["plenaries", "gen", "iab", "editorial", "irtf", "training"]
|
||||
for section in sections:
|
||||
self.assertEqual(len(q(f"#{section}")), 1, f"{section} section should exists in proceedings")
|
||||
|
||||
# test with a loggged-in wg chair
|
||||
self.client.login(username="marschairman", password="marschairman+password")
|
||||
|
@ -7632,6 +7650,13 @@ class ProceedingsTests(BaseMeetingTestCase):
|
|||
'Correct title and link for each ProceedingsMaterial should appear in the correct order'
|
||||
)
|
||||
|
||||
def _assertGroupSessions(self, response, meeting):
|
||||
"""Checks that group/sessions are present"""
|
||||
pq = PyQuery(response.content)
|
||||
sections = ["plenaries", "gen", "iab", "editorial", "irtf", "training"]
|
||||
for section in sections:
|
||||
self.assertEqual(len(pq(f"#{section}")), 1, f"{section} section should exists in proceedings")
|
||||
|
||||
def test_proceedings(self):
|
||||
"""Proceedings should be displayed correctly
|
||||
|
||||
|
@ -7645,6 +7670,20 @@ class ProceedingsTests(BaseMeetingTestCase):
|
|||
SessionPresentationFactory(document__type_id='recording',session=session)
|
||||
SessionPresentationFactory(document__type_id='recording',session=session,document__title="Audio recording for tests")
|
||||
|
||||
# Add various group sessions
|
||||
groups = []
|
||||
parent_groups = [
|
||||
GroupFactory.create(type_id="area", acronym="gen"),
|
||||
GroupFactory.create(acronym="iab"),
|
||||
GroupFactory.create(acronym="irtf"),
|
||||
]
|
||||
for parent in parent_groups:
|
||||
groups.append(GroupFactory.create(parent=parent))
|
||||
for acronym in ["rsab", "edu"]:
|
||||
groups.append(GroupFactory.create(acronym=acronym))
|
||||
for group in groups:
|
||||
SessionFactory(meeting=meeting, group=group)
|
||||
|
||||
self.write_materials_files(meeting, session)
|
||||
self._create_proceedings_materials(meeting)
|
||||
|
||||
|
@ -7691,6 +7730,7 @@ class ProceedingsTests(BaseMeetingTestCase):
|
|||
# configurable contents
|
||||
self._assertMeetingHostsDisplayed(r, meeting)
|
||||
self._assertProceedingsMaterialsDisplayed(r, meeting)
|
||||
self._assertGroupSessions(r, meeting)
|
||||
|
||||
def test_named_session(self):
|
||||
"""Session with a name should appear separately in the proceedings"""
|
||||
|
|
|
@ -159,18 +159,19 @@ def materials(request, num=None):
|
|||
irtf = sessions.filter(group__parent__acronym = 'irtf')
|
||||
training = sessions.filter(group__acronym__in=['edu','iaoc'], type_id__in=['regular', 'other', ])
|
||||
iab = sessions.filter(group__parent__acronym = 'iab')
|
||||
editorial = sessions.filter(group__acronym__in=['rsab','rswg'])
|
||||
|
||||
session_pks = [s.pk for ss in [plenaries, ietf, irtf, training, iab] for s in ss]
|
||||
session_pks = [s.pk for ss in [plenaries, ietf, irtf, training, iab, editorial] for s in ss]
|
||||
other = sessions.filter(type__in=['regular'], group__type__features__has_meetings=True).exclude(pk__in=session_pks)
|
||||
|
||||
for topic in [plenaries, ietf, training, irtf, iab]:
|
||||
for topic in [plenaries, ietf, training, irtf, iab, editorial]:
|
||||
for event in topic:
|
||||
date_list = []
|
||||
for slide_event in event.all_meeting_slides(): date_list.append(slide_event.time)
|
||||
for agenda_event in event.all_meeting_agendas(): date_list.append(agenda_event.time)
|
||||
if date_list: setattr(event, 'last_update', sorted(date_list, reverse=True)[0])
|
||||
|
||||
for session_list in [plenaries, ietf, training, irtf, iab, other]:
|
||||
for session_list in [plenaries, ietf, training, irtf, iab, editorial, other]:
|
||||
for session in session_list:
|
||||
session.past_cutoff_date = past_cutoff_date
|
||||
|
||||
|
@ -183,6 +184,7 @@ def materials(request, num=None):
|
|||
irtf, _ = organize_proceedings_sessions(irtf)
|
||||
training, _ = organize_proceedings_sessions(training)
|
||||
iab, _ = organize_proceedings_sessions(iab)
|
||||
editorial, _ = organize_proceedings_sessions(editorial)
|
||||
other, _ = organize_proceedings_sessions(other)
|
||||
|
||||
ietf_areas = []
|
||||
|
@ -202,6 +204,7 @@ def materials(request, num=None):
|
|||
'training': training,
|
||||
'irtf': irtf,
|
||||
'iab': iab,
|
||||
'editorial': editorial,
|
||||
'other': other,
|
||||
'cut_off_date': cut_off_date,
|
||||
'cor_cut_off_date': cor_cut_off_date,
|
||||
|
@ -1704,9 +1707,11 @@ def api_get_session_materials (request, session_id=None):
|
|||
})
|
||||
else:
|
||||
pass # no action available if it's past cutoff
|
||||
|
||||
|
||||
agenda = session.agenda()
|
||||
agenda_url = agenda.get_href() if agenda is not None else None
|
||||
return JsonResponse({
|
||||
"url": session.agenda().get_href(),
|
||||
"url": agenda_url,
|
||||
"slides": {
|
||||
"decks": list(map(agenda_extract_slide, session.slides())),
|
||||
"actions": slides_actions,
|
||||
|
@ -2885,7 +2890,7 @@ def propose_session_slides(request, session_id, num):
|
|||
submission.filename = filename
|
||||
submission.save()
|
||||
|
||||
(to, cc) = gather_address_lists('slides_proposed', group=session.group).as_strings()
|
||||
(to, cc) = gather_address_lists('slides_proposed', group=session.group, proposer=request.user.person).as_strings()
|
||||
msg_txt = render_to_string("meeting/slides_proposed.txt", {
|
||||
"to": to,
|
||||
"cc": cc,
|
||||
|
@ -3777,6 +3782,10 @@ def proceedings(request, num=None):
|
|||
sessions.filter(group__parent__acronym = 'iab')
|
||||
.exclude(current_status='notmeet')
|
||||
)
|
||||
editorial, _ = organize_proceedings_sessions(
|
||||
sessions.filter(group__acronym__in=['rsab','rswg'])
|
||||
.exclude(current_status='notmeet')
|
||||
)
|
||||
|
||||
ietf = sessions.filter(group__parent__type__slug = 'area').exclude(group__acronym='edu').order_by('group__parent__acronym', 'group__acronym')
|
||||
ietf_areas = []
|
||||
|
@ -3796,6 +3805,7 @@ def proceedings(request, num=None):
|
|||
'training': training,
|
||||
'irtf': irtf,
|
||||
'iab': iab,
|
||||
'editorial': editorial,
|
||||
'ietf_areas': ietf_areas,
|
||||
'cut_off_date': cut_off_date,
|
||||
'cor_cut_off_date': cor_cut_off_date,
|
||||
|
|
|
@ -2539,6 +2539,32 @@
|
|||
"model": "doc.state",
|
||||
"pk": 174
|
||||
},
|
||||
{
|
||||
"fields": {
|
||||
"desc": "The statement is active",
|
||||
"name": "Active",
|
||||
"next_states": [],
|
||||
"order": 0,
|
||||
"slug": "active",
|
||||
"type": "statement",
|
||||
"used": true
|
||||
},
|
||||
"model": "doc.state",
|
||||
"pk": 175
|
||||
},
|
||||
{
|
||||
"fields": {
|
||||
"desc": "The statement has been replaced",
|
||||
"name": "Replaced",
|
||||
"next_states": [],
|
||||
"order": 0,
|
||||
"slug": "replaced",
|
||||
"type": "statement",
|
||||
"used": true
|
||||
},
|
||||
"model": "doc.state",
|
||||
"pk": 176
|
||||
},
|
||||
{
|
||||
"fields": {
|
||||
"label": "State"
|
||||
|
@ -2742,6 +2768,13 @@
|
|||
"model": "doc.statetype",
|
||||
"pk": "statchg"
|
||||
},
|
||||
{
|
||||
"fields": {
|
||||
"label": "Statement State"
|
||||
},
|
||||
"model": "doc.statetype",
|
||||
"pk": "statement"
|
||||
},
|
||||
{
|
||||
"fields": {
|
||||
"about_page": "ietf.group.views.group_about",
|
||||
|
@ -5557,7 +5590,9 @@
|
|||
},
|
||||
{
|
||||
"fields": {
|
||||
"cc": [],
|
||||
"cc": [
|
||||
"slides_proposer"
|
||||
],
|
||||
"desc": "Recipients when slides are proposed for a given session",
|
||||
"to": [
|
||||
"group_chairs",
|
||||
|
@ -6348,6 +6383,14 @@
|
|||
"model": "mailtrigger.recipient",
|
||||
"pk": "session_requests"
|
||||
},
|
||||
{
|
||||
"fields": {
|
||||
"desc": "Person who proposed slides",
|
||||
"template": "{{ proposer.email }}"
|
||||
},
|
||||
"model": "mailtrigger.recipient",
|
||||
"pk": "slides_proposer"
|
||||
},
|
||||
{
|
||||
"fields": {
|
||||
"desc": "The managers of any related streams",
|
||||
|
@ -10593,6 +10636,17 @@
|
|||
"model": "name.doctypename",
|
||||
"pk": "statchg"
|
||||
},
|
||||
{
|
||||
"fields": {
|
||||
"desc": "",
|
||||
"name": "Statement",
|
||||
"order": 0,
|
||||
"prefix": "statement",
|
||||
"used": true
|
||||
},
|
||||
"model": "name.doctypename",
|
||||
"pk": "statement"
|
||||
},
|
||||
{
|
||||
"fields": {
|
||||
"desc": "",
|
||||
|
@ -11059,8 +11113,9 @@
|
|||
{
|
||||
"fields": {
|
||||
"desc": "",
|
||||
"legend": "C",
|
||||
"name": "Comment",
|
||||
"order": 0,
|
||||
"order": 1,
|
||||
"used": true
|
||||
},
|
||||
"model": "name.feedbacktypename",
|
||||
|
@ -11069,8 +11124,9 @@
|
|||
{
|
||||
"fields": {
|
||||
"desc": "",
|
||||
"legend": "J",
|
||||
"name": "Junk",
|
||||
"order": 0,
|
||||
"order": 5,
|
||||
"used": true
|
||||
},
|
||||
"model": "name.feedbacktypename",
|
||||
|
@ -11079,8 +11135,9 @@
|
|||
{
|
||||
"fields": {
|
||||
"desc": "",
|
||||
"legend": "N",
|
||||
"name": "Nomination",
|
||||
"order": 0,
|
||||
"order": 2,
|
||||
"used": true
|
||||
},
|
||||
"model": "name.feedbacktypename",
|
||||
|
@ -11089,8 +11146,20 @@
|
|||
{
|
||||
"fields": {
|
||||
"desc": "",
|
||||
"legend": "O",
|
||||
"name": "Overcome by events",
|
||||
"order": 4,
|
||||
"used": true
|
||||
},
|
||||
"model": "name.feedbacktypename",
|
||||
"pk": "obe"
|
||||
},
|
||||
{
|
||||
"fields": {
|
||||
"desc": "",
|
||||
"legend": "Q",
|
||||
"name": "Questionnaire response",
|
||||
"order": 0,
|
||||
"order": 3,
|
||||
"used": true
|
||||
},
|
||||
"model": "name.feedbacktypename",
|
||||
|
@ -11099,8 +11168,9 @@
|
|||
{
|
||||
"fields": {
|
||||
"desc": "",
|
||||
"legend": "R",
|
||||
"name": "Read",
|
||||
"order": 0,
|
||||
"order": 6,
|
||||
"used": true
|
||||
},
|
||||
"model": "name.feedbacktypename",
|
||||
|
@ -13180,7 +13250,7 @@
|
|||
"desc": "Flipchars",
|
||||
"name": "Flipcharts",
|
||||
"order": 0,
|
||||
"used": true
|
||||
"used": false
|
||||
},
|
||||
"model": "name.roomresourcename",
|
||||
"pk": "flipcharts"
|
||||
|
@ -13230,7 +13300,7 @@
|
|||
"desc": "Experimental Room Setup (U-Shape and classroom, subject to availability)",
|
||||
"name": "Experimental Room Setup (U-Shape and classroom)",
|
||||
"order": 0,
|
||||
"used": true
|
||||
"used": false
|
||||
},
|
||||
"model": "name.roomresourcename",
|
||||
"pk": "u-shape"
|
||||
|
@ -16385,7 +16455,7 @@
|
|||
"fields": {
|
||||
"command": "xym",
|
||||
"switch": "--version",
|
||||
"time": "2023-05-14T07:09:32.713Z",
|
||||
"time": "2023-07-17T07:09:47.664Z",
|
||||
"used": true,
|
||||
"version": "xym 0.7.0"
|
||||
},
|
||||
|
@ -16396,7 +16466,7 @@
|
|||
"fields": {
|
||||
"command": "pyang",
|
||||
"switch": "--version",
|
||||
"time": "2023-05-14T07:09:33.045Z",
|
||||
"time": "2023-07-17T07:09:48.075Z",
|
||||
"used": true,
|
||||
"version": "pyang 2.5.3"
|
||||
},
|
||||
|
@ -16407,7 +16477,7 @@
|
|||
"fields": {
|
||||
"command": "yanglint",
|
||||
"switch": "--version",
|
||||
"time": "2023-05-14T07:09:33.065Z",
|
||||
"time": "2023-07-17T07:09:48.104Z",
|
||||
"used": true,
|
||||
"version": "yanglint SO 1.9.2"
|
||||
},
|
||||
|
@ -16418,9 +16488,9 @@
|
|||
"fields": {
|
||||
"command": "xml2rfc",
|
||||
"switch": "--version",
|
||||
"time": "2023-05-14T07:09:33.970Z",
|
||||
"time": "2023-07-17T07:09:49.075Z",
|
||||
"used": true,
|
||||
"version": "xml2rfc 3.17.1"
|
||||
"version": "xml2rfc 3.17.4"
|
||||
},
|
||||
"model": "utils.versioninfo",
|
||||
"pk": 4
|
||||
|
|
21
ietf/name/migrations/0004_statements.py
Normal file
21
ietf/name/migrations/0004_statements.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
# Copyright The IETF Trust 2023, All Rights Reserved
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
def forward(apps, schema_editor):
|
||||
DocTypeName = apps.get_model("name", "DocTypeName")
|
||||
DocTypeName.objects.create(slug="statement", name="Statement", prefix="statement", desc="", used=True)
|
||||
|
||||
|
||||
def reverse(apps, schema_editor):
|
||||
DocTypeName = apps.get_model("name", "DocTypeName")
|
||||
DocTypeName.objects.filter(slug="statement").delete()
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("name", "0003_populate_telechatagendasectionname"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(forward, reverse),
|
||||
]
|
20
ietf/name/migrations/0005_feedbacktypename_schema.py
Normal file
20
ietf/name/migrations/0005_feedbacktypename_schema.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
# Copyright The IETF Trust 2023, All Rights Reserved
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("name", "0004_statements"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="FeedbackTypeName",
|
||||
name="legend",
|
||||
field=models.CharField(
|
||||
default="",
|
||||
help_text="One-character legend for feedback classification form",
|
||||
max_length=1,
|
||||
),
|
||||
),
|
||||
]
|
36
ietf/name/migrations/0006_feedbacktypename_data.py
Normal file
36
ietf/name/migrations/0006_feedbacktypename_data.py
Normal file
|
@ -0,0 +1,36 @@
|
|||
# Copyright The IETF Trust 2023, All Rights Reserved
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
def forward(apps, schema_editor):
|
||||
FeedbackTypeName = apps.get_model("name", "FeedbackTypeName")
|
||||
FeedbackTypeName.objects.create(slug="obe", name="Overcome by events")
|
||||
for slug, legend, order in (
|
||||
('comment', 'C', 1),
|
||||
('nomina', 'N', 2),
|
||||
('questio', 'Q', 3),
|
||||
('obe', 'O', 4),
|
||||
('junk', 'J', 5),
|
||||
('read', 'R', 6),
|
||||
):
|
||||
ft = FeedbackTypeName.objects.get(slug=slug)
|
||||
ft.legend = legend
|
||||
ft.order = order
|
||||
ft.save()
|
||||
|
||||
def reverse(apps, schema_editor):
|
||||
FeedbackTypeName = apps.get_model("name", "FeedbackTypeName")
|
||||
FeedbackTypeName.objects.filter(slug="obe").delete()
|
||||
for ft in FeedbackTypeName.objects.all():
|
||||
ft.legend = ""
|
||||
ft.order = 0
|
||||
ft.save()
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("name", "0005_feedbacktypename_schema"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(forward, reverse),
|
||||
]
|
|
@ -94,6 +94,7 @@ class NomineePositionStateName(NameModel):
|
|||
"""Status of a candidate for a position: None, Accepted, Declined"""
|
||||
class FeedbackTypeName(NameModel):
|
||||
"""Type of feedback: questionnaires, nominations, comments"""
|
||||
legend = models.CharField(max_length=1, default="", help_text="One-character legend for feedback classification form")
|
||||
class DBTemplateTypeName(NameModel):
|
||||
"""reStructuredText, Plain, Django"""
|
||||
class DraftSubmissionStateName(NameModel):
|
||||
|
|
|
@ -653,7 +653,7 @@ class PrivateKeyForm(forms.Form):
|
|||
|
||||
class PendingFeedbackForm(forms.ModelForm):
|
||||
|
||||
type = forms.ModelChoiceField(queryset=FeedbackTypeName.objects.all().order_by('pk'), widget=forms.RadioSelect, empty_label='Unclassified', required=False)
|
||||
type = forms.ModelChoiceField(queryset=FeedbackTypeName.objects.all(), widget=forms.RadioSelect, empty_label='Unclassified', required=False)
|
||||
|
||||
class Meta:
|
||||
model = Feedback
|
||||
|
|
|
@ -11,7 +11,7 @@ import debug # pyflakes:ignore
|
|||
from ietf.nomcom.factories import nomcom_kwargs_for_year, NomComFactory, NomineePositionFactory, key
|
||||
from ietf.person.factories import EmailFactory
|
||||
from ietf.group.models import Group
|
||||
from ietf.person.models import User
|
||||
from ietf.person.models import Person, User
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = ("Create (or delete) a nomcom for test and development purposes.")
|
||||
|
@ -27,7 +27,9 @@ class Command(BaseCommand):
|
|||
if opt_delete:
|
||||
if Group.objects.filter(acronym='nomcom7437').exists():
|
||||
Group.objects.filter(acronym='nomcom7437').delete()
|
||||
User.objects.filter(username__in=['testchair','testmember','testcandidate']).delete()
|
||||
users_to_delete = ['testchair','testmember','testcandidate']
|
||||
Person.objects.filter(user__username__in=users_to_delete).delete()
|
||||
User.objects.filter(username__in=users_to_delete).delete()
|
||||
self.stdout.write("Deleted test group 'nomcom7437' and its related objects.")
|
||||
else:
|
||||
self.stderr.write("test nomcom 'nomcom7437' does not exist; nothing to do.\n")
|
||||
|
@ -57,6 +59,6 @@ class Command(BaseCommand):
|
|||
position__nomcom=nc, position__name='Test Area Director', position__is_iesg_position=True,
|
||||
)
|
||||
|
||||
self.stdout.write("%s\n" % key)
|
||||
self.stdout.write("%s\n" % key.decode())
|
||||
self.stdout.write("Nomcom 7437 created. The private key can also be found at any time\nin ietf/nomcom/factories.py. Note that it is NOT a secure key.\n")
|
||||
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
# Copyright The IETF Trust 2013-2019, All Rights Reserved
|
||||
# Copyright The IETF Trust 2013-2023, All Rights Reserved
|
||||
import os
|
||||
import tempfile
|
||||
import re
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
from django import template
|
||||
from django.conf import settings
|
||||
from django.template.defaultfilters import linebreaksbr, force_escape
|
||||
|
@ -84,3 +86,11 @@ def decrypt(string, request, year, plain=False):
|
|||
if not plain:
|
||||
return force_escape(linebreaksbr(out))
|
||||
return mark_safe(force_escape(out))
|
||||
|
||||
@register.filter
|
||||
def feedback_totals(staterank_list):
|
||||
totals = defaultdict(lambda: 0)
|
||||
for fb_dict in staterank_list:
|
||||
for fbtype_name, fbtype_count, _ in fb_dict['feedback']:
|
||||
totals[fbtype_name] += fbtype_count
|
||||
return totals.values()
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright The IETF Trust 2012-2022, All Rights Reserved
|
||||
# Copyright The IETF Trust 2012-2023, All Rights Reserved
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
|
@ -1423,6 +1423,35 @@ class InactiveNomcomTests(TestCase):
|
|||
q = PyQuery(response.content)
|
||||
self.assertFalse( q('#templateform') )
|
||||
|
||||
class FeedbackIndexTests(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
setup_test_public_keys_dir(self)
|
||||
self.nc = NomComFactory.create(**nomcom_kwargs_for_year())
|
||||
self.author = PersonFactory.create().email_set.first().address
|
||||
self.member = self.nc.group.role_set.filter(name='member').first().person
|
||||
self.nominee = self.nc.nominee_set.order_by('pk').first()
|
||||
self.position = self.nc.position_set.first()
|
||||
for type_id in ['comment','nomina','questio']:
|
||||
f = FeedbackFactory.create(author=self.author,nomcom=self.nc,type_id=type_id)
|
||||
f.positions.add(self.position)
|
||||
f.nominees.add(self.nominee)
|
||||
|
||||
def tearDown(self):
|
||||
teardown_test_public_keys_dir(self)
|
||||
super().tearDown()
|
||||
|
||||
def test_feedback_index_totals(self):
|
||||
url = reverse('ietf.nomcom.views.view_feedback',kwargs={'year':self.nc.year()})
|
||||
login_testing_unauthorized(self, self.member.user.username, url)
|
||||
provide_private_key_to_test_client(self)
|
||||
response = self.client.get(url)
|
||||
self.assertEqual(response.status_code,200)
|
||||
q = PyQuery(response.content)
|
||||
r = q('tfoot').eq(0).find('td').contents()
|
||||
self.assertEqual([a.strip() for a in r], ['1', '1', '1', '0'])
|
||||
|
||||
class FeedbackLastSeenTests(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
@ -2834,3 +2863,92 @@ class VolunteerDecoratorUnitTests(TestCase):
|
|||
self.assertEqual(v.qualifications,'path_2')
|
||||
if v.person == author_person:
|
||||
self.assertEqual(v.qualifications,'path_3')
|
||||
|
||||
class ReclassifyFeedbackTests(TestCase):
|
||||
"""Tests for feedback reclassification"""
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
setup_test_public_keys_dir(self)
|
||||
nomcom_test_data()
|
||||
self.nc = NomComFactory.create(**nomcom_kwargs_for_year())
|
||||
self.chair = self.nc.group.role_set.filter(name='chair').first().person
|
||||
self.member = self.nc.group.role_set.filter(name='member').first().person
|
||||
self.nominee = self.nc.nominee_set.order_by('pk').first()
|
||||
self.position = self.nc.position_set.first()
|
||||
self.topic = self.nc.topic_set.first()
|
||||
|
||||
def tearDown(self):
|
||||
teardown_test_public_keys_dir(self)
|
||||
super().tearDown()
|
||||
|
||||
def test_reclassify_feedback_nominee(self):
|
||||
fb = FeedbackFactory.create(nomcom=self.nc,type_id='comment')
|
||||
fb.positions.add(self.position)
|
||||
fb.nominees.add(self.nominee)
|
||||
fb.save()
|
||||
self.assertEqual(Feedback.objects.comments().count(), 1)
|
||||
|
||||
url = reverse('ietf.nomcom.views.view_feedback_nominee', kwargs={'year':self.nc.year(), 'nominee_id':self.nominee.id})
|
||||
login_testing_unauthorized(self,self.member.user.username,url)
|
||||
provide_private_key_to_test_client(self)
|
||||
response = self.client.post(url, {'feedback_id': fb.id, 'type': 'obe'})
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
self.client.logout()
|
||||
self.client.login(username=self.chair.user.username, password=self.chair.user.username + "+password")
|
||||
provide_private_key_to_test_client(self)
|
||||
|
||||
response = self.client.post(url, {'feedback_id': fb.id, 'type': 'obe'})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
fb = Feedback.objects.get(id=fb.id)
|
||||
self.assertEqual(fb.type_id,'obe')
|
||||
self.assertEqual(Feedback.objects.comments().count(), 0)
|
||||
self.assertEqual(Feedback.objects.filter(type='obe').count(), 1)
|
||||
|
||||
def test_reclassify_feedback_topic(self):
|
||||
fb = FeedbackFactory.create(nomcom=self.nc,type_id='comment')
|
||||
fb.topics.add(self.topic)
|
||||
fb.save()
|
||||
self.assertEqual(Feedback.objects.comments().count(), 1)
|
||||
|
||||
url = reverse('ietf.nomcom.views.view_feedback_topic', kwargs={'year':self.nc.year(), 'topic_id':self.topic.id})
|
||||
login_testing_unauthorized(self,self.member.user.username,url)
|
||||
provide_private_key_to_test_client(self)
|
||||
response = self.client.post(url, {'feedback_id': fb.id, 'type': 'unclassified'})
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
self.client.logout()
|
||||
self.client.login(username=self.chair.user.username, password=self.chair.user.username + "+password")
|
||||
provide_private_key_to_test_client(self)
|
||||
|
||||
response = self.client.post(url, {'feedback_id': fb.id, 'type': 'unclassified'})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
fb = Feedback.objects.get(id=fb.id)
|
||||
self.assertEqual(fb.type_id,None)
|
||||
self.assertEqual(Feedback.objects.comments().count(), 0)
|
||||
self.assertEqual(Feedback.objects.filter(type=None).count(), 1)
|
||||
|
||||
def test_reclassify_feedback_unrelated(self):
|
||||
fb = FeedbackFactory(nomcom=self.nc, type_id='read')
|
||||
self.assertEqual(Feedback.objects.filter(type='read').count(), 1)
|
||||
|
||||
url = reverse('ietf.nomcom.views.view_feedback_unrelated', kwargs={'year':self.nc.year()})
|
||||
login_testing_unauthorized(self,self.member.user.username,url)
|
||||
provide_private_key_to_test_client(self)
|
||||
response = self.client.post(url, {'feedback_id': fb.id, 'type': 'junk'})
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
self.client.logout()
|
||||
self.client.login(username=self.chair.user.username, password=self.chair.user.username + "+password")
|
||||
provide_private_key_to_test_client(self)
|
||||
|
||||
response = self.client.post(url, {'feedback_id': fb.id, 'type': 'junk'})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
fb = Feedback.objects.get(id=fb.id)
|
||||
self.assertEqual(fb.type_id, 'junk')
|
||||
self.assertEqual(Feedback.objects.filter(type='read').count(), 0)
|
||||
self.assertEqual(Feedback.objects.filter(type='junk').count(), 1)
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
# Copyright The IETF Trust 2012-2020, All Rights Reserved
|
||||
# Copyright The IETF Trust 2012-2023, All Rights Reserved
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
import datetime
|
||||
import re
|
||||
from collections import OrderedDict, Counter
|
||||
from collections import Counter
|
||||
import csv
|
||||
import hmac
|
||||
|
||||
|
@ -14,7 +14,7 @@ from django.contrib.auth.decorators import login_required
|
|||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
|
||||
from django.forms.models import modelformset_factory, inlineformset_factory
|
||||
from django.http import Http404, HttpResponseRedirect, HttpResponse
|
||||
from django.http import Http404, HttpResponseRedirect, HttpResponse, HttpResponseForbidden
|
||||
from django.shortcuts import render, get_object_or_404, redirect
|
||||
from django.template.loader import render_to_string
|
||||
from django.urls import reverse
|
||||
|
@ -190,6 +190,7 @@ def private_index(request, year):
|
|||
nomcom = get_nomcom_by_year(year)
|
||||
all_nominee_positions = NomineePosition.objects.get_by_nomcom(nomcom).not_duplicated()
|
||||
is_chair = nomcom.group.has_role(request.user, "chair")
|
||||
mailto = None
|
||||
if is_chair and request.method == 'POST':
|
||||
if nomcom.group.state_id != 'active':
|
||||
messages.warning(request, "This nomcom is not active. Request administrative assistance if Nominee state needs to change.")
|
||||
|
@ -207,6 +208,8 @@ def private_index(request, year):
|
|||
elif action == "set_as_pending":
|
||||
nominations.update(state='pending')
|
||||
messages.success(request,'The selected nominations have been set as pending')
|
||||
elif action == 'email':
|
||||
mailto = ','.join([np.nominee.email.email_address() for np in nominations])
|
||||
else:
|
||||
messages.warning(request, "Please, select some nominations to work with")
|
||||
|
||||
|
@ -233,7 +236,7 @@ def private_index(request, year):
|
|||
'position__id':p.pk,
|
||||
'position': p,
|
||||
} for p in positions]
|
||||
states = list(NomineePositionStateName.objects.values('slug', 'name')) + [{'slug': questionnaire_state, 'name': 'Questionnaire'}]
|
||||
states = [{'slug': questionnaire_state, 'name': 'Accepted and sent Questionnaire'}] + list(NomineePositionStateName.objects.values('slug', 'name'))
|
||||
positions = set([ n.position for n in all_nominee_positions.order_by('position__name') ])
|
||||
for s in stats:
|
||||
for state in states:
|
||||
|
@ -278,6 +281,7 @@ def private_index(request, year):
|
|||
'selected_position': selected_position and int(selected_position) or None,
|
||||
'selected': 'index',
|
||||
'is_chair': is_chair,
|
||||
'mailto': mailto,
|
||||
})
|
||||
|
||||
|
||||
|
@ -763,7 +767,6 @@ def process_nomination_status(request, year, nominee_position_id, state, date, h
|
|||
'selected': 'feedback',
|
||||
'form': form })
|
||||
|
||||
|
||||
@role_required("Nomcom")
|
||||
@nomcom_private_key_required
|
||||
def view_feedback(request, year):
|
||||
|
@ -771,7 +774,7 @@ def view_feedback(request, year):
|
|||
nominees = Nominee.objects.get_by_nomcom(nomcom).not_duplicated().distinct()
|
||||
independent_feedback_types = []
|
||||
nominee_feedback_types = []
|
||||
for ft in FeedbackTypeName.objects.all():
|
||||
for ft in FeedbackTypeName.objects.filter(used=True):
|
||||
if ft.slug in settings.NOMINEE_FEEDBACK_TYPES:
|
||||
nominee_feedback_types.append(ft)
|
||||
else:
|
||||
|
@ -834,7 +837,8 @@ def view_feedback(request, year):
|
|||
'topics_feedback': topics_feedback,
|
||||
'independent_feedback': independent_feedback,
|
||||
'nominees_feedback': nominees_feedback,
|
||||
'nomcom': nomcom})
|
||||
'nomcom': nomcom,
|
||||
})
|
||||
|
||||
|
||||
@role_required("Nomcom Chair", "Nomcom Advisor")
|
||||
|
@ -920,23 +924,13 @@ def view_feedback_pending(request, year):
|
|||
formset = FeedbackFormSet(queryset=feedback_page.object_list)
|
||||
for form in formset.forms:
|
||||
form.set_nomcom(nomcom, request.user)
|
||||
type_dict = OrderedDict()
|
||||
for t in FeedbackTypeName.objects.all().order_by('pk'):
|
||||
rest = t.name
|
||||
slug = rest[0]
|
||||
rest = rest[1:]
|
||||
while slug in type_dict and rest:
|
||||
slug = rest[0]
|
||||
rest = rest[1]
|
||||
type_dict[slug] = t
|
||||
return render(request, 'nomcom/view_feedback_pending.html',
|
||||
{'year': year,
|
||||
'selected': 'feedback_pending',
|
||||
'formset': formset,
|
||||
'extra_step': extra_step,
|
||||
'type_dict': type_dict,
|
||||
'extra_ids': extra_ids,
|
||||
'types': FeedbackTypeName.objects.all().order_by('pk'),
|
||||
'types': FeedbackTypeName.objects.filter(used=True),
|
||||
'nomcom': nomcom,
|
||||
'is_chair_task' : True,
|
||||
'page': feedback_page,
|
||||
|
@ -947,22 +941,59 @@ def view_feedback_pending(request, year):
|
|||
@nomcom_private_key_required
|
||||
def view_feedback_unrelated(request, year):
|
||||
nomcom = get_nomcom_by_year(year)
|
||||
|
||||
if request.method == 'POST':
|
||||
if not nomcom.group.has_role(request.user, ['chair','advisor']):
|
||||
return HttpResponseForbidden('Restricted to roles: Nomcom Chair, Nomcom Advisor')
|
||||
feedback_id = request.POST.get('feedback_id', None)
|
||||
feedback = get_object_or_404(Feedback, id=feedback_id)
|
||||
type = request.POST.get('type', None)
|
||||
if type:
|
||||
if type == 'unclassified':
|
||||
feedback.type = None
|
||||
messages.success(request, 'The selected feedback has been de-classified. Please reclassify it in the Pending emails tab.')
|
||||
else:
|
||||
feedback.type = FeedbackTypeName.objects.get(slug=type)
|
||||
messages.success(request, f'The selected feedback has been reclassified as {feedback.type.name}.')
|
||||
feedback.save()
|
||||
else:
|
||||
return render(request, 'nomcom/view_feedback_unrelated.html',
|
||||
{'year': year,
|
||||
'nomcom': nomcom,
|
||||
'feedback_types': FeedbackTypeName.objects.filter(used=True).exclude(slug__in=settings.NOMINEE_FEEDBACK_TYPES),
|
||||
'reclassify_feedback': feedback,
|
||||
'is_chair_task' : True,
|
||||
})
|
||||
|
||||
feedback_types = []
|
||||
for ft in FeedbackTypeName.objects.exclude(slug__in=settings.NOMINEE_FEEDBACK_TYPES):
|
||||
for ft in FeedbackTypeName.objects.filter(used=True).exclude(slug__in=settings.NOMINEE_FEEDBACK_TYPES):
|
||||
feedback_types.append({'ft': ft,
|
||||
'feedback': ft.feedback_set.get_by_nomcom(nomcom)})
|
||||
|
||||
return render(request, 'nomcom/view_feedback_unrelated.html',
|
||||
{'year': year,
|
||||
'selected': 'view_feedback',
|
||||
'feedback_types': feedback_types,
|
||||
'nomcom': nomcom})
|
||||
'nomcom': nomcom,
|
||||
})
|
||||
|
||||
@role_required("Nomcom")
|
||||
@nomcom_private_key_required
|
||||
def view_feedback_topic(request, year, topic_id):
|
||||
nomcom = get_nomcom_by_year(year)
|
||||
# At present, the only feedback type for topics is 'comment'.
|
||||
# Reclassifying from 'comment' to 'comment' is a no-op,
|
||||
# so the only meaningful action is to de-classify it.
|
||||
if request.method == 'POST':
|
||||
nomcom = get_nomcom_by_year(year)
|
||||
if not nomcom.group.has_role(request.user, ['chair','advisor']):
|
||||
return HttpResponseForbidden('Restricted to roles: Nomcom Chair, Nomcom Advisor')
|
||||
feedback_id = request.POST.get('feedback_id', None)
|
||||
feedback = get_object_or_404(Feedback, id=feedback_id)
|
||||
feedback.type = None
|
||||
feedback.topics.clear()
|
||||
feedback.save()
|
||||
messages.success(request, 'The selected feedback has been de-classified. Please reclassify it in the Pending emails tab.')
|
||||
|
||||
topic = get_object_or_404(Topic, id=topic_id)
|
||||
nomcom = get_nomcom_by_year(year)
|
||||
feedback_types = FeedbackTypeName.objects.filter(slug__in=['comment',])
|
||||
|
||||
last_seen = TopicFeedbackLastSeen.objects.filter(reviewer=request.user.person,topic=topic).first()
|
||||
|
@ -974,18 +1005,42 @@ def view_feedback_topic(request, year, topic_id):
|
|||
|
||||
return render(request, 'nomcom/view_feedback_topic.html',
|
||||
{'year': year,
|
||||
'selected': 'view_feedback',
|
||||
'topic': topic,
|
||||
'feedback_types': feedback_types,
|
||||
'last_seen_time' : last_seen_time,
|
||||
'nomcom': nomcom})
|
||||
'nomcom': nomcom,
|
||||
})
|
||||
|
||||
@role_required("Nomcom")
|
||||
@nomcom_private_key_required
|
||||
def view_feedback_nominee(request, year, nominee_id):
|
||||
nomcom = get_nomcom_by_year(year)
|
||||
nominee = get_object_or_404(Nominee, id=nominee_id)
|
||||
feedback_types = FeedbackTypeName.objects.filter(slug__in=settings.NOMINEE_FEEDBACK_TYPES)
|
||||
feedback_types = FeedbackTypeName.objects.filter(used=True, slug__in=settings.NOMINEE_FEEDBACK_TYPES)
|
||||
|
||||
if request.method == 'POST':
|
||||
if not nomcom.group.has_role(request.user, ['chair','advisor']):
|
||||
return HttpResponseForbidden('Restricted to roles: Nomcom Chair, Nomcom Advisor')
|
||||
feedback_id = request.POST.get('feedback_id', None)
|
||||
feedback = get_object_or_404(Feedback, id=feedback_id)
|
||||
type = request.POST.get('type', None)
|
||||
if type:
|
||||
if type == 'unclassified':
|
||||
feedback.type = None
|
||||
feedback.nominees.clear()
|
||||
messages.success(request, 'The selected feedback has been de-classified. Please reclassify it in the Pending emails tab.')
|
||||
else:
|
||||
feedback.type = FeedbackTypeName.objects.get(slug=type)
|
||||
messages.success(request, f'The selected feedback has been reclassified as {feedback.type.name}.')
|
||||
feedback.save()
|
||||
else:
|
||||
return render(request, 'nomcom/view_feedback_nominee.html',
|
||||
{'year': year,
|
||||
'nomcom': nomcom,
|
||||
'feedback_types': feedback_types,
|
||||
'reclassify_feedback': feedback,
|
||||
'is_chair_task': True,
|
||||
})
|
||||
|
||||
last_seen = FeedbackLastSeen.objects.filter(reviewer=request.user.person,nominee=nominee).first()
|
||||
last_seen_time = (last_seen and last_seen.time) or datetime.datetime(year=1, month=1, day=1, tzinfo=datetime.timezone.utc)
|
||||
|
@ -996,11 +1051,11 @@ def view_feedback_nominee(request, year, nominee_id):
|
|||
|
||||
return render(request, 'nomcom/view_feedback_nominee.html',
|
||||
{'year': year,
|
||||
'selected': 'view_feedback',
|
||||
'nominee': nominee,
|
||||
'feedback_types': feedback_types,
|
||||
'last_seen_time' : last_seen_time,
|
||||
'nomcom': nomcom})
|
||||
'nomcom': nomcom,
|
||||
})
|
||||
|
||||
|
||||
@role_required("Nomcom Chair", "Nomcom Advisor")
|
||||
|
|
|
@ -145,6 +145,14 @@ class Person(models.Model):
|
|||
e = self.email_set.filter(active=True).order_by("-time").first()
|
||||
self._cached_email = e
|
||||
return self._cached_email
|
||||
def email_allowing_inactive(self):
|
||||
if not hasattr(self, "_cached_email_allowing_inactive"):
|
||||
e = self.email()
|
||||
if not e:
|
||||
e = self.email_set.order_by("-time").first()
|
||||
log.assertion(statement="e is not None", note=f"Person {self.pk} has no Email objects")
|
||||
self._cached_email_allowing_inactive = e
|
||||
return self._cached_email_allowing_inactive
|
||||
def email_address(self):
|
||||
e = self.email()
|
||||
if e:
|
||||
|
|
|
@ -112,6 +112,14 @@ class PersonTests(TestCase):
|
|||
r = self.client.get(url)
|
||||
self.assertContains(r, person.name, status_code=200)
|
||||
|
||||
def test_case_insensitive(self):
|
||||
# Case insensitive seach
|
||||
person = PersonFactory(name="Test Person")
|
||||
url = urlreverse("ietf.person.views.profile", kwargs={ "email_or_name": "test person"})
|
||||
r = self.client.get(url)
|
||||
self.assertContains(r, person.name, status_code=200)
|
||||
self.assertNotIn('More than one person', r.content.decode())
|
||||
|
||||
def test_person_profile_duplicates(self):
|
||||
# same Person name and email - should not show on the profile as multiple Person records
|
||||
person = PersonFactory(name="bazquux@example.com", user__email="bazquux@example.com")
|
||||
|
|
|
@ -69,11 +69,11 @@ def ajax_select2_search(request, model_name):
|
|||
|
||||
|
||||
def profile(request, email_or_name):
|
||||
aliases = Alias.objects.filter(name=email_or_name)
|
||||
aliases = Alias.objects.filter(name__iexact=email_or_name)
|
||||
persons = set(a.person for a in aliases)
|
||||
|
||||
if '@' in email_or_name:
|
||||
emails = Email.objects.filter(address=email_or_name)
|
||||
emails = Email.objects.filter(address__iexact=email_or_name)
|
||||
persons.update(e.person for e in emails)
|
||||
|
||||
persons = [p for p in persons if p and p.id]
|
||||
|
|
|
@ -84,7 +84,7 @@ class AbstractReviewerQueuePolicy:
|
|||
rotation_list = self._filter_unavailable_reviewers(rotation_list)
|
||||
return rotation_list
|
||||
|
||||
def return_reviewer_to_rotation_top(self, reviewer_person, wants_to_be_next):
|
||||
def set_wants_to_be_next(self, reviewer_person):
|
||||
"""
|
||||
Return a reviewer to the top of the rotation, e.g. because they rejected a review,
|
||||
and should retroactively not have been rotated over.
|
||||
|
@ -475,14 +475,13 @@ class RotateAlphabeticallyReviewerQueuePolicy(AbstractReviewerQueuePolicy):
|
|||
|
||||
return reviewers[next_reviewer_index:] + reviewers[:next_reviewer_index]
|
||||
|
||||
def return_reviewer_to_rotation_top(self, reviewer_person, wants_to_be_next):
|
||||
def set_wants_to_be_next(self, reviewer_person):
|
||||
# As RotateAlphabetically does not keep a full rotation list,
|
||||
# returning someone to a particular order is complex.
|
||||
# Instead, the "assign me next" flag is set.
|
||||
if wants_to_be_next:
|
||||
settings = self._reviewer_settings_for(reviewer_person)
|
||||
settings.request_assignment_next = wants_to_be_next
|
||||
settings.save()
|
||||
settings = self._reviewer_settings_for(reviewer_person)
|
||||
settings.request_assignment_next = True
|
||||
settings.save()
|
||||
|
||||
def _update_skip_next(self, rotation_pks, assignee_person):
|
||||
"""Decrement skip_next for all users skipped
|
||||
|
@ -570,14 +569,13 @@ class LeastRecentlyUsedReviewerQueuePolicy(AbstractReviewerQueuePolicy):
|
|||
rotation_list += reviewers_with_assignment
|
||||
return rotation_list
|
||||
|
||||
def return_reviewer_to_rotation_top(self, reviewer_person, wants_to_be_next):
|
||||
def set_wants_to_be_next(self, reviewer_person):
|
||||
# Reviewer rotation for this policy ignores rejected/withdrawn
|
||||
# reviews, so it automatically adjusts the position of someone
|
||||
# who rejected a review and no further action is needed.
|
||||
if wants_to_be_next:
|
||||
settings = self._reviewer_settings_for(reviewer_person)
|
||||
settings.request_assignment_next = wants_to_be_next
|
||||
settings.save()
|
||||
settings = self._reviewer_settings_for(reviewer_person)
|
||||
settings.request_assignment_next = True
|
||||
settings.save()
|
||||
|
||||
|
||||
QUEUE_POLICY_NAME_MAPPING = {
|
||||
|
|
|
@ -115,7 +115,7 @@ class _Wrapper(TestCase):
|
|||
return (ReviewerSettings.objects.filter(team=self.team, person=person).first()
|
||||
or ReviewerSettings(team=self.team, person=person))
|
||||
|
||||
def test_return_reviewer_to_rotation_top(self):
|
||||
def test_set_wants_to_be_next(self):
|
||||
# Subclass must implement this
|
||||
raise NotImplementedError
|
||||
|
||||
|
@ -507,11 +507,9 @@ class RotateAlphabeticallyReviewerQueuePolicyTest(_Wrapper.ReviewerQueuePolicyTe
|
|||
rotation = self.policy.default_reviewer_rotation_list()
|
||||
self.assertEqual(rotation, available_reviewers[2:] + available_reviewers[:1])
|
||||
|
||||
def test_return_reviewer_to_rotation_top(self):
|
||||
def test_set_wants_to_be_next(self):
|
||||
reviewer = self.append_reviewer()
|
||||
self.policy.return_reviewer_to_rotation_top(reviewer, False)
|
||||
self.assertFalse(self.reviewer_settings_for(reviewer).request_assignment_next)
|
||||
self.policy.return_reviewer_to_rotation_top(reviewer, True)
|
||||
self.policy.set_wants_to_be_next(reviewer)
|
||||
self.assertTrue(self.reviewer_settings_for(reviewer).request_assignment_next)
|
||||
|
||||
def test_update_policy_state_for_assignment(self):
|
||||
|
@ -725,11 +723,9 @@ class LeastRecentlyUsedReviewerQueuePolicyTest(_Wrapper.ReviewerQueuePolicyTestC
|
|||
self.assertEqual(self.policy.default_reviewer_rotation_list(),
|
||||
available_reviewers[2:] + [first_reviewer, second_reviewer])
|
||||
|
||||
def test_return_reviewer_to_rotation_top(self):
|
||||
def test_set_wants_to_be_next(self):
|
||||
reviewer = self.append_reviewer()
|
||||
self.policy.return_reviewer_to_rotation_top(reviewer, False)
|
||||
self.assertFalse(self.reviewer_settings_for(reviewer).request_assignment_next)
|
||||
self.policy.return_reviewer_to_rotation_top(reviewer, True)
|
||||
self.policy.set_wants_to_be_next(reviewer)
|
||||
self.assertTrue(self.reviewer_settings_for(reviewer).request_assignment_next)
|
||||
|
||||
def test_assign_reviewer_updates_skip_next_without_add_skip(self):
|
||||
|
|
|
@ -79,6 +79,11 @@ def review_assignments_to_list_for_docs(docs):
|
|||
|
||||
return extract_revision_ordered_review_assignments_for_documents_and_replaced(assignment_qs, doc_names)
|
||||
|
||||
def review_requests_to_list_for_docs(docs):
|
||||
review_requests_qs = ReviewRequest.objects.filter(Q(state_id='requested'))
|
||||
doc_names = [d.name for d in docs]
|
||||
return extract_revision_ordered_review_requests_for_documents_and_replaced(review_requests_qs, doc_names)
|
||||
|
||||
def augment_review_requests_with_events(review_reqs):
|
||||
req_dict = { r.pk: r for r in review_reqs }
|
||||
for e in ReviewRequestDocEvent.objects.filter(review_request__in=review_reqs, type__in=["assigned_review_request", "closed_review_request"]).order_by("time"):
|
||||
|
@ -589,10 +594,12 @@ def suggested_review_requests_for_team(team):
|
|||
and existing.reviewassignment_set.filter(state_id__in=("assigned", "accepted")).exists()
|
||||
and (not existing.requested_rev or existing.requested_rev == request.doc.rev))
|
||||
request_closed = existing.state_id not in ('requested','assigned')
|
||||
# Is there a review request for this document already in system
|
||||
requested = existing.state_id in ('requested') and (not existing.requested_rev or existing.requested_rev == request.doc.rev)
|
||||
# at least one assignment was completed for the requested version or the current doc version if no specific version was requested:
|
||||
some_assignment_completed = existing.reviewassignment_set.filter(reviewed_rev=existing.requested_rev or existing.doc.rev, state_id='completed').exists()
|
||||
|
||||
return any([no_review_document, no_review_rev, pending, request_closed, some_assignment_completed])
|
||||
return any([no_review_document, no_review_rev, pending, request_closed, requested, some_assignment_completed])
|
||||
|
||||
res = [r for r in requests.values()
|
||||
if not any(blocks(e, r) for e in existing_requests[r.doc_id])]
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright The IETF Trust 2007-2022, All Rights Reserved
|
||||
# Copyright The IETF Trust 2007-2023, All Rights Reserved
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
|
@ -10,6 +10,7 @@ import os
|
|||
import sys
|
||||
import datetime
|
||||
import warnings
|
||||
from hashlib import sha384
|
||||
from typing import Any, Dict, List, Tuple # pyflakes:ignore
|
||||
|
||||
warnings.simplefilter("always", DeprecationWarning)
|
||||
|
@ -657,6 +658,7 @@ URL_REGEXPS = {
|
|||
"acronym": r"(?P<acronym>[-a-z0-9]+)",
|
||||
"bofreq": r"(?P<name>bofreq-[-a-z0-9]+)",
|
||||
"charter": r"(?P<name>charter-[-a-z0-9]+)",
|
||||
"statement": r"(?P<name>statement-[-a-z0-9]+)",
|
||||
"date": r"(?P<date>\d{4}-\d{2}-\d{2})",
|
||||
"name": r"(?P<name>[A-Za-z0-9._+-]+?)",
|
||||
"document": r"(?P<document>[a-z][-a-z0-9]+)", # regular document names
|
||||
|
@ -668,7 +670,6 @@ URL_REGEXPS = {
|
|||
# Override this in settings_local.py if needed
|
||||
# *_PATH variables ends with a slash/ .
|
||||
|
||||
#DOCUMENT_PATH_PATTERN = '/a/www/ietf-ftp/{doc.type_id}/'
|
||||
DOCUMENT_PATH_PATTERN = '/a/ietfdata/doc/{doc.type_id}/'
|
||||
INTERNET_DRAFT_PATH = '/a/ietfdata/doc/draft/repository'
|
||||
INTERNET_DRAFT_PDF_PATH = '/a/www/ietf-datatracker/pdf/'
|
||||
|
@ -733,6 +734,9 @@ CACHES = {
|
|||
'LOCATION': '127.0.0.1:11211',
|
||||
'VERSION': __version__,
|
||||
'KEY_PREFIX': 'ietf:dt',
|
||||
'KEY_FUNCTION': lambda key, key_prefix, version: (
|
||||
f"{key_prefix}:{version}:{sha384(key.encode('utf8')).hexdigest()}"
|
||||
),
|
||||
},
|
||||
'sessions': {
|
||||
'BACKEND': 'ietf.utils.cache.LenientMemcacheCache',
|
||||
|
@ -802,7 +806,7 @@ NOMCOM_PUBLIC_KEYS_DIR = '/a/www/nomcom/public_keys/'
|
|||
NOMCOM_FROM_EMAIL = 'nomcom-chair-{year}@ietf.org'
|
||||
OPENSSL_COMMAND = '/usr/bin/openssl'
|
||||
DAYS_TO_EXPIRE_NOMINATION_LINK = ''
|
||||
NOMINEE_FEEDBACK_TYPES = ['comment', 'questio', 'nomina']
|
||||
NOMINEE_FEEDBACK_TYPES = ['comment', 'questio', 'nomina', 'obe']
|
||||
|
||||
# SlideSubmission settings
|
||||
SLIDE_STAGING_PATH = '/a/www/www6s/staging/'
|
||||
|
@ -1295,6 +1299,6 @@ if SERVER_MODE != 'production':
|
|||
# Cannot have this set to True if we're using http: from the dev-server:
|
||||
CSRF_COOKIE_SECURE = False
|
||||
CSRF_COOKIE_SAMESITE = 'Lax'
|
||||
CSRF_TRUSTED_ORIGINS += ['http://localhost:8000']
|
||||
CSRF_TRUSTED_ORIGINS += ['http://localhost:8000', 'http://127.0.0.1:8000', 'http://[::1]:8000']
|
||||
SESSION_COOKIE_SECURE = False
|
||||
SESSION_COOKIE_SAMESITE = 'Lax'
|
||||
|
|
|
@ -1,11 +1,5 @@
|
|||
@use "sass:map";
|
||||
|
||||
// FIXME: It's not clear why these three variables remain unset by bs5, but just
|
||||
// set them to placeholder values so the CSS embedded in the HTML validates.
|
||||
$btn-font-family: inherit !default;
|
||||
$nav-link-font-weight: inherit !default;
|
||||
$tooltip-margin: inherit !default;
|
||||
|
||||
$font-family-sans-serif: "Inter", system-ui, -apple-system, "Segoe UI", Roboto, "Helvetica Neue", "Noto Sans", "Liberation Sans", Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";
|
||||
$font-family-monospace: "Noto Sans Mono", SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;
|
||||
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
// Based on https://github.com/martinthomson/rfc-txt-html/blob/db4be92247979541cd26a32b7b2bf0e044ca098b/txt.css
|
||||
// (Version hash to make future merging easier.)
|
||||
// Based on https://github.com/martinthomson/rfc-txt-html/blob/main/txt.css
|
||||
|
||||
:root {
|
||||
--line: 1.2em;
|
||||
--line: 1.3em;
|
||||
--block: 0 0 0 3ch;
|
||||
--paragraph: var(--line) 0 var(--line) 3ch;
|
||||
}
|
||||
|
@ -425,8 +424,8 @@ sup, sub {
|
|||
/* Authors */
|
||||
address, address.vcard {
|
||||
font-style: normal;
|
||||
// margin: 2em 0 var(--line) 3ch;
|
||||
margin-top: 2em;
|
||||
// margin: var(--line) 0 var(--line) 3ch
|
||||
margin-top: var(--line);
|
||||
margin-right: 0;
|
||||
margin-bottom: var(--line);
|
||||
margin-left: 3ch;
|
||||
|
|
|
@ -84,6 +84,34 @@ html {
|
|||
scroll-padding-top: 60px;
|
||||
}
|
||||
|
||||
// Make submenus open on hover.
|
||||
@include media-breakpoint-up(lg) {
|
||||
.dropdown-menu>li>ul {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.dropdown-menu>li:hover>ul {
|
||||
display: block;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@include media-breakpoint-up(md) {
|
||||
.leftmenu .nav>li>ul {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.leftmenu .nav>li:hover>ul {
|
||||
display: block;
|
||||
}
|
||||
}
|
||||
|
||||
:is(.dropdown-menu, .leftmenu .nav) .dropdown-menu {
|
||||
top: 0;
|
||||
left: 100%;
|
||||
right: auto;
|
||||
}
|
||||
|
||||
// Make textareas in forms use a monospace font
|
||||
textarea.form-control {
|
||||
font-family: $font-family-code;
|
||||
|
@ -272,7 +300,7 @@ th,
|
|||
}
|
||||
|
||||
// Styles for d3.js graphical SVG timelines
|
||||
#timeline {
|
||||
#doc-timeline {
|
||||
font-size: small;
|
||||
|
||||
.axis path,
|
||||
|
|
|
@ -86,7 +86,7 @@ function scale_x() {
|
|||
}
|
||||
|
||||
function update_x_axis() {
|
||||
d3.select("#timeline svg .x.axis")
|
||||
d3.select("#doc-timeline svg .x.axis")
|
||||
.call(x_axis)
|
||||
.selectAll("text")
|
||||
.style("text-anchor", "end")
|
||||
|
@ -96,7 +96,7 @@ function update_x_axis() {
|
|||
function update_timeline() {
|
||||
bar_y = {};
|
||||
scale_x();
|
||||
var chart = d3.select("#timeline svg")
|
||||
var chart = d3.select("#doc-timeline svg")
|
||||
.attr("width", width);
|
||||
// enter data (skip the last pseudo entry)
|
||||
var bar = chart.selectAll("g")
|
||||
|
@ -111,12 +111,12 @@ function draw_timeline() {
|
|||
bar_height = parseFloat($("body")
|
||||
.css("line-height"));
|
||||
|
||||
var div = $("#timeline");
|
||||
var div = $("#doc-timeline");
|
||||
div.addClass("my-3");
|
||||
if (div.is(":empty")) {
|
||||
div.append("<svg></svg>");
|
||||
}
|
||||
var chart = d3.select("#timeline svg")
|
||||
var chart = d3.select("#doc-timeline svg")
|
||||
.attr("width", width);
|
||||
|
||||
var defs = chart.append("defs");
|
||||
|
@ -249,7 +249,7 @@ d3.json("doc.json")
|
|||
published: expiration_date(data[data.length - 1])
|
||||
});
|
||||
|
||||
width = $("#timeline")
|
||||
width = $("#doc-timeline")
|
||||
.width();
|
||||
draw_timeline();
|
||||
}
|
||||
|
@ -258,11 +258,11 @@ d3.json("doc.json")
|
|||
$(window)
|
||||
.on({
|
||||
resize: function () {
|
||||
var g = $("#timeline svg");
|
||||
var g = $("#doc-timeline svg");
|
||||
g.remove();
|
||||
width = $("#timeline")
|
||||
width = $("#doc-timeline")
|
||||
.width();
|
||||
$("#timeline")
|
||||
$("#doc-timeline")
|
||||
.append(g);
|
||||
update_timeline();
|
||||
}
|
||||
|
|
|
@ -487,13 +487,13 @@ $(function () {
|
|||
|
||||
// Disable a particular swap modal radio input
|
||||
let updateSwapRadios = function (labels, radios, disableValue, datePrecision) {
|
||||
labels.removeClass('text-muted');
|
||||
labels.removeClass('text-body-secondary');
|
||||
radios.prop('disabled', false);
|
||||
radios.prop('checked', false);
|
||||
// disable the input requested by value
|
||||
let disableInput = radios.filter('[value="' + disableValue + '"]');
|
||||
if (disableInput) {
|
||||
disableInput.parent().addClass('text-muted');
|
||||
disableInput.parent().addClass('text-body-secondary');
|
||||
disableInput.prop('disabled', true);
|
||||
}
|
||||
if (officialSchedule) {
|
||||
|
@ -502,7 +502,7 @@ $(function () {
|
|||
const past_radios = radios.filter(
|
||||
(_, radio) => parseISOTimestamp(radio.closest('*[data-start]').dataset.start).isSameOrBefore(now, datePrecision)
|
||||
);
|
||||
past_radios.parent().addClass('text-muted');
|
||||
past_radios.parent().addClass('text-body-secondary');
|
||||
past_radios.prop('disabled', true);
|
||||
}
|
||||
return disableInput; // return the input that was specifically disabled, if any
|
||||
|
@ -859,10 +859,10 @@ $(function () {
|
|||
.not('.hidden')
|
||||
.length === 0) {
|
||||
purpose_input.setAttribute('disabled', 'disabled');
|
||||
purpose_input.closest('.session-purpose-toggle').classList.add('text-muted');
|
||||
purpose_input.closest('.session-purpose-toggle').classList.add('text-body-secondary');
|
||||
} else {
|
||||
purpose_input.removeAttribute('disabled');
|
||||
purpose_input.closest('.session-purpose-toggle').classList.remove('text-muted');
|
||||
purpose_input.closest('.session-purpose-toggle').classList.remove('text-body-secondary');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -93,18 +93,6 @@ $(document)
|
|||
|
||||
$(document)
|
||||
.ready(function () {
|
||||
|
||||
function dropdown_hover(e) {
|
||||
var navbar = $(this)
|
||||
.closest(".navbar");
|
||||
if (navbar.length === 0 || navbar.find(".navbar-toggler")
|
||||
.is(":hidden")) {
|
||||
$(this)
|
||||
.children(".dropdown-toggle")
|
||||
.dropdown(e.type == "mouseenter" ? "show" : "hide");
|
||||
}
|
||||
}
|
||||
|
||||
// load data for the menu
|
||||
$.ajax({
|
||||
url: $(document.body)
|
||||
|
@ -140,9 +128,6 @@ $(document)
|
|||
}
|
||||
attachTo.append(menu.join(""));
|
||||
}
|
||||
|
||||
$("ul.nav li.dropdown, ul.nav li.dropend")
|
||||
.on("mouseenter mouseleave", dropdown_hover);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
@ -184,7 +169,7 @@ $(function () {
|
|||
.after($(`
|
||||
<div class="col-xl-2 ps-0 small">
|
||||
<div id="righthand-panel" class="position-fixed col-xl-2 bg-light d-flex flex-column justify-content-between align-items-start">
|
||||
<nav id="righthand-nav" class="navbar navbar-light w-100 overflow-auto align-items-start flex-fill"></nav>
|
||||
<nav id="righthand-nav" class="navbar w-100 overflow-auto align-items-start flex-fill"></nav>
|
||||
</div>
|
||||
</div>
|
||||
`));
|
||||
|
|
|
@ -34,9 +34,9 @@
|
|||
.removeClass("label-danger");
|
||||
|
||||
widget
|
||||
.find(".text-muted")
|
||||
.find(".text-body-secondary")
|
||||
.addClass("form-text")
|
||||
.removeClass("text-muted");
|
||||
.removeClass("text-body-secondary");
|
||||
|
||||
self.initListeners();
|
||||
},
|
||||
|
|
29
ietf/static/js/upload_statement.js
Normal file
29
ietf/static/js/upload_statement.js
Normal file
|
@ -0,0 +1,29 @@
|
|||
$(document)
|
||||
.ready(function () {
|
||||
var form = $("form.upload-content");
|
||||
// review submission selection
|
||||
form.find("[name=statement_submission]")
|
||||
.on("click change", function () {
|
||||
var val = form.find("[name=statement_submission]:checked")
|
||||
.val();
|
||||
|
||||
var shouldBeVisible = {
|
||||
enter: ['[name="statement_content"]'],
|
||||
upload: ['[name="statement_file"]'],
|
||||
};
|
||||
|
||||
for (var v in shouldBeVisible) {
|
||||
for (var i in shouldBeVisible[v]) {
|
||||
var selector = shouldBeVisible[v][i];
|
||||
var row = form.find(selector);
|
||||
if (!row.is(".row"))
|
||||
row = row.closest(".row");
|
||||
if ($.inArray(selector, shouldBeVisible[val]) != -1)
|
||||
row.show();
|
||||
else
|
||||
row.hide();
|
||||
}
|
||||
}
|
||||
})
|
||||
.trigger("change");
|
||||
});
|
|
@ -2878,7 +2878,7 @@ class ApiSubmissionTests(BaseSubmitTestCase):
|
|||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(
|
||||
r.json(),
|
||||
{'id': str(s.pk), 'state': 'validating'},
|
||||
{'id': str(s.pk), 'state': 'validating', 'state_desc': s.state.name},
|
||||
)
|
||||
|
||||
s.state_id = 'uploaded'
|
||||
|
@ -2887,7 +2887,7 @@ class ApiSubmissionTests(BaseSubmitTestCase):
|
|||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(
|
||||
r.json(),
|
||||
{'id': str(s.pk), 'state': 'uploaded'},
|
||||
{'id': str(s.pk), 'state': 'uploaded', 'state_desc': s.state.name},
|
||||
)
|
||||
|
||||
# try an invalid one
|
||||
|
|
|
@ -183,6 +183,7 @@ def api_submission_status(request, submission_id):
|
|||
{
|
||||
'id': str(submission.pk),
|
||||
'state': submission.state.slug,
|
||||
'state_desc': submission.state.name,
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@
|
|||
data-group-menu-data-url="{% url 'ietf.group.views.group_menu_data' %}">
|
||||
{% analytical_body_top %}
|
||||
<a class="visually-hidden visually-hidden-focusable" href="#content">Skip to main content</a>
|
||||
<nav class="navbar navbar-expand-lg {% if server_mode and server_mode != "production" %} navbar-light bg-warning {% else %} navbar-dark bg-secondary {% endif %} {% if navbar_mode %} {{ navbar_mode }} {% else %} fixed-top {% endif %}">
|
||||
<nav class="navbar navbar-expand-lg {% if server_mode and server_mode != "production" %} bg-warning {% else %} bg-secondary {% endif %} {% if navbar_mode %} {{ navbar_mode }} {% else %} fixed-top {% endif %}">
|
||||
<div class="container-fluid">
|
||||
<a class="navbar-brand" href="/">
|
||||
<img alt="IETF Logo" class="me-2"
|
||||
|
@ -126,7 +126,7 @@
|
|||
<a href="https://www.rfc-editor.org/" class="p-3 text-nowrap">RFC Editor</a>
|
||||
<a href="https://www.iana.org/" class="p-3">IANA</a>
|
||||
<a href="https://www.ietf.org/privacy-statement/" class="p-3 text-nowrap">Privacy Statement</a>
|
||||
<div class="small text-muted py-3">
|
||||
<div class="small text-body-secondary py-3">
|
||||
{% if version_num %}
|
||||
<a class="mx-2" href="/release/about">About IETF Datatracker</a>
|
||||
<span class="mx-2">
|
||||
|
|
|
@ -10,7 +10,6 @@
|
|||
<a href="#"
|
||||
class="nav-link dropdown-toggle"
|
||||
role="button"
|
||||
data-bs-auto-close="false"
|
||||
data-bs-toggle="dropdown"
|
||||
aria-expanded="false">
|
||||
Groups
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
</p>
|
||||
{% endif %}
|
||||
<h2>Add new subscription</h2>
|
||||
<p class="text-muted">
|
||||
<p class="text-body-secondary">
|
||||
The email addresses you can choose between are those registered in
|
||||
<a href="{% url "ietf.ietfauth.views.profile" %}">your profile</a>.
|
||||
</p>
|
||||
|
|
|
@ -103,7 +103,7 @@
|
|||
</div>
|
||||
</div>
|
||||
{% else %}
|
||||
<small class="text-center text-muted">
|
||||
<small class="text-center text-body-secondary">
|
||||
Add <code>ietf.context_processors.sql_debug</code> to <code>settings.DEV_TEMPLATE_CONTEXT_PROCESSORS</code> to turn on the SQL statement table.
|
||||
</small>
|
||||
{% endif %}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
{% load ietf_filters %}
|
||||
|
||||
{% if prev or count %}
|
||||
<span{% if count == 0 %} class="text-muted"{% endif %}>{{ count }}</span>
|
||||
<span{% if count == 0 %} class="text-body-secondary"{% endif %}>{{ count }}</span>
|
||||
{% if user|has_role:"Area Director,Secretariat" %}
|
||||
<i data-bs-toggle="popover"
|
||||
{% if count != prev %}
|
||||
|
@ -21,11 +21,11 @@
|
|||
{% endif %}
|
||||
{% with label.2 as up_is_good %}
|
||||
{% if prev < count %}
|
||||
class="bi bi-arrow-up-right-circle{% if count %}-fill{% endif %} {{ up_is_good|yesno:'text-success,text-danger,text-muted' }}"
|
||||
class="bi bi-arrow-up-right-circle{% if count %}-fill{% endif %} {{ up_is_good|yesno:'text-success,text-danger,text-body-secondary' }}"
|
||||
{% elif prev > count %}
|
||||
class="bi bi-arrow-down-right-circle{% if count %}-fill{% endif %} {{ up_is_good|yesno:'text-danger,text-success,text-muted' }}"
|
||||
class="bi bi-arrow-down-right-circle{% if count %}-fill{% endif %} {{ up_is_good|yesno:'text-danger,text-success,text-body-secondary' }}"
|
||||
{% else %}
|
||||
class="bi bi-arrow-right-circle text-muted"
|
||||
class="bi bi-arrow-right-circle text-body-secondary"
|
||||
{% endif %}
|
||||
></i>
|
||||
{% endwith %}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<h1>
|
||||
Add comment
|
||||
<br>
|
||||
<small class="text-muted">{{ doc }}</small>
|
||||
<small class="text-body-secondary">{{ doc }}</small>
|
||||
</h1>
|
||||
<form method="post">
|
||||
{% csrf_token %}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<h1>
|
||||
Add document to session
|
||||
<br>
|
||||
<small class="text-muted">{{ doc.name }}
|
||||
<small class="text-body-secondary">{{ doc.name }}
|
||||
<br>
|
||||
{{ doc.title }}</small>
|
||||
</h1>
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
<h1>
|
||||
Approval announcement writeup
|
||||
<br>
|
||||
<small class="text-muted">{{ doc }}</small>
|
||||
<small class="text-body-secondary">{{ doc }}</small>
|
||||
</h1>
|
||||
<form method="post">
|
||||
{% csrf_token %}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<h1>
|
||||
Approve ballot
|
||||
<br>
|
||||
<small class="text-muted">{{ doc }}</small>
|
||||
<small class="text-body-secondary">{{ doc }}</small>
|
||||
</h1>
|
||||
<form method="post">
|
||||
{% csrf_token %}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<h1>
|
||||
Approve downward references
|
||||
<br>
|
||||
<small class="text-muted">{{ doc }}</small>
|
||||
<small class="text-body-secondary">{{ doc }}</small>
|
||||
</h1>
|
||||
<p>
|
||||
The ballot for
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<h1>
|
||||
Ballot issued
|
||||
<br>
|
||||
<small class="text-muted">{{ doc }}</small>
|
||||
<small class="text-body-secondary">{{ doc }}</small>
|
||||
</h1>
|
||||
<p>
|
||||
Ballot for
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<h1>
|
||||
Clear ballot
|
||||
<br>
|
||||
<small class="text-muted">{{ doc }}</small>
|
||||
<small class="text-body-secondary">{{ doc }}</small>
|
||||
</h1>
|
||||
<form method="post">
|
||||
{% csrf_token %}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<h1>
|
||||
Defer ballot
|
||||
<br>
|
||||
<small class="text-muted">{{ doc }}</small>
|
||||
<small class="text-body-secondary">{{ doc }}</small>
|
||||
</h1>
|
||||
<form method="post">
|
||||
{% csrf_token %}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<h1>
|
||||
Change position for {{ balloter.plain_name }}
|
||||
<br>
|
||||
<small class="text-muted">{{ doc }}</small>
|
||||
<small class="text-body-secondary">{{ doc }}</small>
|
||||
</h1>
|
||||
{% if ballot.ballot_type.question %}
|
||||
<div class="alert alert-warning my-3">
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
<h1>
|
||||
Issue ballot
|
||||
<br>
|
||||
<small class="text-muted">{{ doc }}</small>
|
||||
<small class="text-body-secondary">{{ doc }}</small>
|
||||
</h1>
|
||||
<p class="mt-3">
|
||||
{{ question }}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<h1>
|
||||
Close ballot
|
||||
<br>
|
||||
<small class="text-muted">{{ doc }}</small>
|
||||
<small class="text-body-secondary">{{ doc }}</small>
|
||||
</h1>
|
||||
<p>
|
||||
{{ question }}
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
<h1 class="mb-3">
|
||||
Last call text
|
||||
<br>
|
||||
<small class="text-muted">{{ doc }}</small>
|
||||
<small class="text-body-secondary">{{ doc }}</small>
|
||||
</h1>
|
||||
<form method="post">
|
||||
{% csrf_token %}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<h1 class="mb-3">
|
||||
RFC Editor Note
|
||||
<br>
|
||||
<small class="text-muted">{{ doc }}</small>
|
||||
<small class="text-body-secondary">{{ doc }}</small>
|
||||
</h1>
|
||||
{% bootstrap_messages %}
|
||||
<form method="post">
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
<h1>
|
||||
Issue ballot
|
||||
<br>
|
||||
<small class="text-muted">{{ doc }}</small>
|
||||
<small class="text-body-secondary">{{ doc }}</small>
|
||||
</h1>
|
||||
<p class="mt-3">
|
||||
{{ question }}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<h1>
|
||||
Close ballot
|
||||
<br>
|
||||
<small class="text-muted">{{ doc }}</small>
|
||||
<small class="text-body-secondary">{{ doc }}</small>
|
||||
</h1>
|
||||
<p>
|
||||
{{ question }}
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
<h1>
|
||||
Send ballot position for {{ balloter }}
|
||||
<br>
|
||||
<small class="text-muted">{{ doc }}</small>
|
||||
<small class="text-body-secondary">{{ doc }}</small>
|
||||
</h1>
|
||||
<form class="mt-3" method="post">
|
||||
{% csrf_token %}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<h1>
|
||||
Undefer ballot
|
||||
<br>
|
||||
<small class="text-muted">{{ doc }}</small>
|
||||
<small class="text-body-secondary">{{ doc }}</small>
|
||||
</h1>
|
||||
<form class="undefer mt-3" method="post">
|
||||
{% csrf_token %}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<h1>
|
||||
Ballot writeup and notes
|
||||
<br>
|
||||
<small class="text-muted">{{ doc }}</small>
|
||||
<small class="text-body-secondary">{{ doc }}</small>
|
||||
</h1>
|
||||
<form method="post">
|
||||
{% csrf_token %}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue