ci: merge main to release
This commit is contained in:
commit
554e73657e
2
.github/workflows/build-base-app.yml
vendored
2
.github/workflows/build-base-app.yml
vendored
|
@ -17,7 +17,7 @@ jobs:
|
|||
packages: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
|
2
.github/workflows/build-celery-worker.yml
vendored
2
.github/workflows/build-celery-worker.yml
vendored
|
@ -19,7 +19,7 @@ jobs:
|
|||
packages: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
|
2
.github/workflows/build-mq-broker.yml
vendored
2
.github/workflows/build-mq-broker.yml
vendored
|
@ -18,7 +18,7 @@ jobs:
|
|||
packages: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
|
171
.github/workflows/build.yml
vendored
171
.github/workflows/build.yml
vendored
|
@ -52,7 +52,7 @@ jobs:
|
|||
to_tag: ${{ steps.semver.outputs.current }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
@ -100,171 +100,22 @@ jobs:
|
|||
# -----------------------------------------------------------------
|
||||
# TESTS
|
||||
# -----------------------------------------------------------------
|
||||
tests-python:
|
||||
name: Run Tests (Python)
|
||||
|
||||
tests:
|
||||
name: Run Tests
|
||||
uses: ./.github/workflows/tests.yml
|
||||
if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }}
|
||||
needs: [prepare]
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/ietf-tools/datatracker-app-base:latest
|
||||
|
||||
services:
|
||||
db:
|
||||
image: ghcr.io/ietf-tools/datatracker-db:latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare for tests
|
||||
run: |
|
||||
chmod +x ./dev/tests/prepare.sh
|
||||
sh ./dev/tests/prepare.sh
|
||||
|
||||
- name: Ensure DB is ready
|
||||
run: |
|
||||
/usr/local/bin/wait-for db:5432 -- echo "DB ready"
|
||||
|
||||
- name: Run all tests
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Running checks..."
|
||||
./ietf/manage.py check
|
||||
./ietf/manage.py migrate --fake-initial
|
||||
echo "Validating migrations..."
|
||||
if ! ( ietf/manage.py makemigrations --dry-run --check --verbosity 3 ) ; then
|
||||
echo "Model changes without migrations found."
|
||||
exit 1
|
||||
fi
|
||||
echo "Running tests..."
|
||||
if [[ "x${{ github.event.inputs.ignoreLowerCoverage }}" == "xtrue" ]]; then
|
||||
echo "Lower coverage failures will be ignored."
|
||||
./ietf/manage.py test --validate-html-harder --settings=settings_test --ignore-lower-coverage
|
||||
else
|
||||
./ietf/manage.py test --validate-html-harder --settings=settings_test
|
||||
fi
|
||||
coverage xml
|
||||
with:
|
||||
ignoreLowerCoverage: ${{ github.event.inputs.ignoreLowerCoverage == 'true' }}
|
||||
|
||||
- name: Upload Coverage Results to Codecov
|
||||
uses: codecov/codecov-action@v3.1.4
|
||||
with:
|
||||
files: coverage.xml
|
||||
|
||||
- name: Convert Coverage Results
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
mv latest-coverage.json coverage.json
|
||||
|
||||
- name: Upload Coverage Results as Build Artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: coverage
|
||||
path: coverage.json
|
||||
|
||||
tests-playwright:
|
||||
name: Run Tests (Playwright)
|
||||
if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }}
|
||||
needs: [prepare]
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
project: [chromium, firefox]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '18'
|
||||
|
||||
- name: Run all tests
|
||||
run: |
|
||||
echo "Installing dependencies..."
|
||||
yarn
|
||||
echo "Installing Playwright..."
|
||||
cd playwright
|
||||
mkdir test-results
|
||||
npm ci
|
||||
npx playwright install --with-deps ${{ matrix.project }}
|
||||
echo "Running tests..."
|
||||
npx playwright test --project=${{ matrix.project }}
|
||||
|
||||
- name: Upload Report
|
||||
uses: actions/upload-artifact@v3
|
||||
if: ${{ always() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: playwright-results-${{ matrix.project }}
|
||||
path: playwright/test-results/
|
||||
if-no-files-found: ignore
|
||||
|
||||
tests-playwright-legacy:
|
||||
name: Run Tests (Playwright Legacy)
|
||||
if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }}
|
||||
needs: [prepare]
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/ietf-tools/datatracker-app-base:latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
project: [chromium, firefox]
|
||||
|
||||
services:
|
||||
db:
|
||||
image: ghcr.io/ietf-tools/datatracker-db:latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare for tests
|
||||
run: |
|
||||
chmod +x ./dev/tests/prepare.sh
|
||||
sh ./dev/tests/prepare.sh
|
||||
|
||||
- name: Ensure DB is ready
|
||||
run: |
|
||||
/usr/local/bin/wait-for db:5432 -- echo "DB ready"
|
||||
|
||||
- name: Start Datatracker
|
||||
run: |
|
||||
echo "Running checks..."
|
||||
./ietf/manage.py check
|
||||
echo "Starting datatracker..."
|
||||
./ietf/manage.py runserver 0.0.0.0:8000 --settings=settings_local &
|
||||
echo "Waiting for datatracker to be ready..."
|
||||
/usr/local/bin/wait-for localhost:8000 -- echo "Datatracker ready"
|
||||
|
||||
- name: Run all tests
|
||||
env:
|
||||
# Required to get firefox to run as root:
|
||||
HOME: ""
|
||||
run: |
|
||||
echo "Installing dependencies..."
|
||||
yarn
|
||||
echo "Installing Playwright..."
|
||||
cd playwright
|
||||
mkdir test-results
|
||||
npm ci
|
||||
npx playwright install --with-deps ${{ matrix.project }}
|
||||
echo "Running tests..."
|
||||
npx playwright test --project=${{ matrix.project }} -c playwright-legacy.config.js
|
||||
|
||||
- name: Upload Report
|
||||
uses: actions/upload-artifact@v3
|
||||
if: ${{ always() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: playwright-legacy-results-${{ matrix.project }}
|
||||
path: playwright/test-results/
|
||||
if-no-files-found: ignore
|
||||
|
||||
# -----------------------------------------------------------------
|
||||
# RELEASE
|
||||
# -----------------------------------------------------------------
|
||||
release:
|
||||
name: Make Release
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs: [tests-python, tests-playwright, tests-playwright-legacy, prepare]
|
||||
needs: [tests, prepare]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
SHOULD_DEPLOY: ${{needs.prepare.outputs.should_deploy}}
|
||||
|
@ -273,7 +124,7 @@ jobs:
|
|||
TO_TAG: ${{needs.prepare.outputs.to_tag}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
@ -389,7 +240,7 @@ jobs:
|
|||
notify:
|
||||
name: Notify
|
||||
if: ${{ always() }}
|
||||
needs: [prepare, tests-python, tests-playwright, tests-playwright-legacy, release]
|
||||
needs: [prepare, tests, release]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
PKG_VERSION: ${{needs.prepare.outputs.pkg_version}}
|
||||
|
@ -456,7 +307,7 @@ jobs:
|
|||
PKG_VERSION: ${{needs.prepare.outputs.pkg_version}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Download a Release Artifact
|
||||
uses: actions/download-artifact@v3.0.2
|
||||
|
|
153
.github/workflows/ci-run-tests.yml
vendored
153
.github/workflows/ci-run-tests.yml
vendored
|
@ -1,4 +1,4 @@
|
|||
name: Run All Tests
|
||||
name: PR - Run All Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
@ -12,150 +12,7 @@ on:
|
|||
- 'package.json'
|
||||
|
||||
jobs:
|
||||
tests-python:
|
||||
name: Run Tests (Python)
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/ietf-tools/datatracker-app-base:latest
|
||||
|
||||
services:
|
||||
db:
|
||||
image: ghcr.io/ietf-tools/datatracker-db:latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare for tests
|
||||
run: |
|
||||
chmod +x ./dev/tests/prepare.sh
|
||||
sh ./dev/tests/prepare.sh
|
||||
|
||||
- name: Ensure DB is ready
|
||||
run: |
|
||||
/usr/local/bin/wait-for db:5432 -- echo "DB ready"
|
||||
|
||||
- name: Run all tests
|
||||
run: |
|
||||
echo "Running checks..."
|
||||
./ietf/manage.py check
|
||||
./ietf/manage.py migrate --fake-initial
|
||||
echo "Validating migrations..."
|
||||
if ! ( ietf/manage.py makemigrations --dry-run --check --verbosity 3 ) ; then
|
||||
echo "Model changes without migrations found."
|
||||
echo ${MSG}
|
||||
exit 1
|
||||
fi
|
||||
echo "Running tests..."
|
||||
./ietf/manage.py test --validate-html-harder --settings=settings_test
|
||||
coverage xml
|
||||
|
||||
- name: Upload Coverage Results to Codecov
|
||||
uses: codecov/codecov-action@v3.1.4
|
||||
with:
|
||||
files: coverage.xml
|
||||
|
||||
- name: Convert Coverage Results
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
mv latest-coverage.json coverage.json
|
||||
|
||||
- name: Upload Coverage Results as Build Artifact
|
||||
uses: actions/upload-artifact@v3.0.0
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: coverage
|
||||
path: coverage.json
|
||||
|
||||
tests-playwright:
|
||||
name: Run Tests (Playwright)
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
project: [chromium, firefox]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '18'
|
||||
|
||||
- name: Run all tests
|
||||
run: |
|
||||
echo "Installing dependencies..."
|
||||
yarn
|
||||
echo "Installing Playwright..."
|
||||
cd playwright
|
||||
mkdir test-results
|
||||
npm ci
|
||||
npx playwright install --with-deps ${{ matrix.project }}
|
||||
echo "Running tests..."
|
||||
npx playwright test --project=${{ matrix.project }}
|
||||
|
||||
- name: Upload Report
|
||||
uses: actions/upload-artifact@v3.0.0
|
||||
if: ${{ always() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: playwright-results-${{ matrix.project }}
|
||||
path: playwright/test-results/
|
||||
if-no-files-found: ignore
|
||||
|
||||
tests-playwright-legacy:
|
||||
name: Run Tests (Playwright Legacy)
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/ietf-tools/datatracker-app-base:latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
project: [chromium, firefox]
|
||||
|
||||
services:
|
||||
db:
|
||||
image: ghcr.io/ietf-tools/datatracker-db:latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare for tests
|
||||
run: |
|
||||
chmod +x ./dev/tests/prepare.sh
|
||||
sh ./dev/tests/prepare.sh
|
||||
|
||||
- name: Ensure DB is ready
|
||||
run: |
|
||||
/usr/local/bin/wait-for db:5432 -- echo "DB ready"
|
||||
|
||||
- name: Start Datatracker
|
||||
run: |
|
||||
echo "Running checks..."
|
||||
./ietf/manage.py check
|
||||
./ietf/manage.py migrate --fake-initial
|
||||
echo "Starting datatracker..."
|
||||
./ietf/manage.py runserver 0.0.0.0:8000 --settings=settings_local &
|
||||
echo "Waiting for datatracker to be ready..."
|
||||
/usr/local/bin/wait-for localhost:8000 -- echo "Datatracker ready"
|
||||
|
||||
- name: Run all tests
|
||||
env:
|
||||
# Required to get firefox to run as root:
|
||||
HOME: ""
|
||||
run: |
|
||||
echo "Installing dependencies..."
|
||||
yarn
|
||||
echo "Installing Playwright..."
|
||||
cd playwright
|
||||
mkdir test-results
|
||||
npm ci
|
||||
npx playwright install --with-deps ${{ matrix.project }}
|
||||
echo "Running tests..."
|
||||
npx playwright test --project=${{ matrix.project }} -c playwright-legacy.config.js
|
||||
|
||||
- name: Upload Report
|
||||
uses: actions/upload-artifact@v3
|
||||
if: ${{ always() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: playwright-legacy-results-${{ matrix.project }}
|
||||
path: playwright/test-results/
|
||||
if-no-files-found: ignore
|
||||
tests:
|
||||
uses: ./.github/workflows/tests.yml
|
||||
with:
|
||||
ignoreLowerCoverage: false
|
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
|
@ -26,7 +26,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
|
|
2
.github/workflows/dependency-review.yml
vendored
2
.github/workflows/dependency-review.yml
vendored
|
@ -15,6 +15,6 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Checkout Repository'
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
- name: 'Dependency Review'
|
||||
uses: actions/dependency-review-action@v3
|
||||
|
|
|
@ -29,7 +29,7 @@ jobs:
|
|||
contents: read
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
|
|
2
.github/workflows/sandbox-refresh.yml
vendored
2
.github/workflows/sandbox-refresh.yml
vendored
|
@ -15,7 +15,7 @@ jobs:
|
|||
contents: read
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Refresh DBs
|
||||
env:
|
||||
|
|
164
.github/workflows/tests.yml
vendored
Normal file
164
.github/workflows/tests.yml
vendored
Normal file
|
@ -0,0 +1,164 @@
|
|||
name: Reusable Tests Workflow
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ignoreLowerCoverage:
|
||||
description: 'Ignore Lower Coverage'
|
||||
default: false
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
tests-python:
|
||||
name: Python Tests
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/ietf-tools/datatracker-app-base:latest
|
||||
|
||||
services:
|
||||
db:
|
||||
image: ghcr.io/ietf-tools/datatracker-db:latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Prepare for tests
|
||||
run: |
|
||||
chmod +x ./dev/tests/prepare.sh
|
||||
sh ./dev/tests/prepare.sh
|
||||
|
||||
- name: Ensure DB is ready
|
||||
run: |
|
||||
/usr/local/bin/wait-for db:5432 -- echo "DB ready"
|
||||
|
||||
- name: Run all tests
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Running checks..."
|
||||
./ietf/manage.py check
|
||||
./ietf/manage.py migrate --fake-initial
|
||||
echo "Validating migrations..."
|
||||
if ! ( ietf/manage.py makemigrations --dry-run --check --verbosity 3 ) ; then
|
||||
echo "Model changes without migrations found."
|
||||
exit 1
|
||||
fi
|
||||
echo "Running tests..."
|
||||
if [[ "x${{ github.event.inputs.ignoreLowerCoverage }}" == "xtrue" ]]; then
|
||||
echo "Lower coverage failures will be ignored."
|
||||
./ietf/manage.py test --validate-html-harder --settings=settings_test --ignore-lower-coverage
|
||||
else
|
||||
./ietf/manage.py test --validate-html-harder --settings=settings_test
|
||||
fi
|
||||
coverage xml
|
||||
|
||||
- name: Upload Coverage Results to Codecov
|
||||
uses: codecov/codecov-action@v3.1.4
|
||||
with:
|
||||
files: coverage.xml
|
||||
|
||||
- name: Convert Coverage Results
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
mv latest-coverage.json coverage.json
|
||||
|
||||
- name: Upload Coverage Results as Build Artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: coverage
|
||||
path: coverage.json
|
||||
|
||||
tests-playwright:
|
||||
name: Playwright Tests
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
project: [chromium, firefox]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '18'
|
||||
|
||||
- name: Run all tests
|
||||
run: |
|
||||
echo "Installing dependencies..."
|
||||
yarn
|
||||
echo "Installing Playwright..."
|
||||
cd playwright
|
||||
mkdir test-results
|
||||
npm ci
|
||||
npx playwright install --with-deps ${{ matrix.project }}
|
||||
echo "Running tests..."
|
||||
npx playwright test --project=${{ matrix.project }}
|
||||
|
||||
- name: Upload Report
|
||||
uses: actions/upload-artifact@v3
|
||||
if: ${{ always() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: playwright-results-${{ matrix.project }}
|
||||
path: playwright/test-results/
|
||||
if-no-files-found: ignore
|
||||
|
||||
tests-playwright-legacy:
|
||||
name: Playwright Legacy Tests
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/ietf-tools/datatracker-app-base:latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
project: [chromium, firefox]
|
||||
|
||||
services:
|
||||
db:
|
||||
image: ghcr.io/ietf-tools/datatracker-db:latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Prepare for tests
|
||||
run: |
|
||||
chmod +x ./dev/tests/prepare.sh
|
||||
sh ./dev/tests/prepare.sh
|
||||
|
||||
- name: Ensure DB is ready
|
||||
run: |
|
||||
/usr/local/bin/wait-for db:5432 -- echo "DB ready"
|
||||
|
||||
- name: Start Datatracker
|
||||
run: |
|
||||
echo "Running checks..."
|
||||
./ietf/manage.py check
|
||||
./ietf/manage.py migrate --fake-initial
|
||||
echo "Starting datatracker..."
|
||||
./ietf/manage.py runserver 0.0.0.0:8000 --settings=settings_local &
|
||||
echo "Waiting for datatracker to be ready..."
|
||||
/usr/local/bin/wait-for localhost:8000 -- echo "Datatracker ready"
|
||||
|
||||
- name: Run all tests
|
||||
env:
|
||||
# Required to get firefox to run as root:
|
||||
HOME: ""
|
||||
run: |
|
||||
echo "Installing dependencies..."
|
||||
yarn
|
||||
echo "Installing Playwright..."
|
||||
cd playwright
|
||||
mkdir test-results
|
||||
npm ci
|
||||
npx playwright install --with-deps ${{ matrix.project }}
|
||||
echo "Running tests..."
|
||||
npx playwright test --project=${{ matrix.project }} -c playwright-legacy.config.js
|
||||
|
||||
- name: Upload Report
|
||||
uses: actions/upload-artifact@v3
|
||||
if: ${{ always() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: playwright-legacy-results-${{ matrix.project }}
|
||||
path: playwright/test-results/
|
||||
if-no-files-found: ignore
|
179
.pnp.cjs
generated
179
.pnp.cjs
generated
|
@ -46,29 +46,29 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["@parcel/transformer-inline-string", "npm:2.9.3"],\
|
||||
["@parcel/transformer-sass", "npm:2.9.3"],\
|
||||
["@popperjs/core", "npm:2.11.8"],\
|
||||
["@rollup/pluginutils", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.0.3"],\
|
||||
["@rollup/pluginutils", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.0.4"],\
|
||||
["@twuni/emojify", "npm:1.0.2"],\
|
||||
["@vitejs/plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.3.3"],\
|
||||
["bootstrap", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.3.0"],\
|
||||
["bootstrap", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.3.1"],\
|
||||
["bootstrap-icons", "npm:1.10.5"],\
|
||||
["browser-fs-access", "npm:0.34.1"],\
|
||||
["browserlist", "npm:1.0.1"],\
|
||||
["c8", "npm:8.0.1"],\
|
||||
["caniuse-lite", "npm:1.0.30001519"],\
|
||||
["d3", "npm:7.8.5"],\
|
||||
["eslint", "npm:8.47.0"],\
|
||||
["eslint", "npm:8.48.0"],\
|
||||
["eslint-config-standard", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:17.1.0"],\
|
||||
["eslint-plugin-cypress", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.14.0"],\
|
||||
["eslint-plugin-import", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.28.1"],\
|
||||
["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.0.1"],\
|
||||
["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.0.2"],\
|
||||
["eslint-plugin-node", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:11.1.0"],\
|
||||
["eslint-plugin-promise", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.1"],\
|
||||
["eslint-plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.17.0"],\
|
||||
["file-saver", "npm:2.0.5"],\
|
||||
["highcharts", "npm:11.1.0"],\
|
||||
["html-validate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:8.2.0"],\
|
||||
["html-validate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:8.3.0"],\
|
||||
["ical.js", "npm:1.5.0"],\
|
||||
["jquery", "npm:3.7.0"],\
|
||||
["jquery", "npm:3.7.1"],\
|
||||
["jquery-migrate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.1"],\
|
||||
["js-cookie", "npm:3.0.5"],\
|
||||
["list.js", "npm:2.3.1"],\
|
||||
|
@ -89,7 +89,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["select2", "npm:4.1.0-rc.0"],\
|
||||
["select2-bootstrap-5-theme", "npm:1.3.0"],\
|
||||
["send", "npm:0.18.0"],\
|
||||
["shepherd.js", "npm:11.1.1"],\
|
||||
["shepherd.js", "npm:11.2.0"],\
|
||||
["slugify", "npm:1.6.6"],\
|
||||
["sortablejs", "npm:1.15.0"],\
|
||||
["vanillajs-datepicker", "npm:1.3.4"],\
|
||||
|
@ -438,12 +438,12 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:84c6b60ea80c2e474ae2eb1949a4b42a725b5ad125a348fd9ccd31d528ef15de82d28192a86b98baf21067fd8c90bd02753cac42e9ee96bf5f8084788455b3e4#npm:4.4.0", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@eslint-community-eslint-utils-virtual-5ceedd2a81/0/cache/@eslint-community-eslint-utils-npm-4.4.0-d1791bd5a3-cdfe3ae42b.zip/node_modules/@eslint-community/eslint-utils/",\
|
||||
["virtual:0dd1c3662912d25464a284caa5dbde8cc315ca056be4ded44d6f67e20c4895461cf49fc7bff27c35f254bdb0924477031e3e50d50a333908daaff17dcf43b01d#npm:4.4.0", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@eslint-community-eslint-utils-virtual-01223f6a8e/0/cache/@eslint-community-eslint-utils-npm-4.4.0-d1791bd5a3-cdfe3ae42b.zip/node_modules/@eslint-community/eslint-utils/",\
|
||||
"packageDependencies": [\
|
||||
["@eslint-community/eslint-utils", "virtual:84c6b60ea80c2e474ae2eb1949a4b42a725b5ad125a348fd9ccd31d528ef15de82d28192a86b98baf21067fd8c90bd02753cac42e9ee96bf5f8084788455b3e4#npm:4.4.0"],\
|
||||
["@eslint-community/eslint-utils", "virtual:0dd1c3662912d25464a284caa5dbde8cc315ca056be4ded44d6f67e20c4895461cf49fc7bff27c35f254bdb0924477031e3e50d50a333908daaff17dcf43b01d#npm:4.4.0"],\
|
||||
["@types/eslint", null],\
|
||||
["eslint", "npm:8.47.0"],\
|
||||
["eslint", "npm:8.48.0"],\
|
||||
["eslint-visitor-keys", "npm:3.3.0"]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
|
@ -497,20 +497,31 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["@floating-ui/core", [\
|
||||
["npm:1.2.6", {\
|
||||
"packageLocation": "./.yarn/cache/@floating-ui-core-npm-1.2.6-083bec342c-e4aa96c435.zip/node_modules/@floating-ui/core/",\
|
||||
["npm:1.4.1", {\
|
||||
"packageLocation": "./.yarn/cache/@floating-ui-core-npm-1.4.1-fe89c45d92-be4ab864fe.zip/node_modules/@floating-ui/core/",\
|
||||
"packageDependencies": [\
|
||||
["@floating-ui/core", "npm:1.2.6"]\
|
||||
["@floating-ui/core", "npm:1.4.1"],\
|
||||
["@floating-ui/utils", "npm:0.1.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@floating-ui/dom", [\
|
||||
["npm:1.2.6", {\
|
||||
"packageLocation": "./.yarn/cache/@floating-ui-dom-npm-1.2.6-9d4be07ec3-2226c6c244.zip/node_modules/@floating-ui/dom/",\
|
||||
["npm:1.5.2", {\
|
||||
"packageLocation": "./.yarn/cache/@floating-ui-dom-npm-1.5.2-f1b8ca0c30-3c71eed50b.zip/node_modules/@floating-ui/dom/",\
|
||||
"packageDependencies": [\
|
||||
["@floating-ui/dom", "npm:1.2.6"],\
|
||||
["@floating-ui/core", "npm:1.2.6"]\
|
||||
["@floating-ui/dom", "npm:1.5.2"],\
|
||||
["@floating-ui/core", "npm:1.4.1"],\
|
||||
["@floating-ui/utils", "npm:0.1.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@floating-ui/utils", [\
|
||||
["npm:0.1.2", {\
|
||||
"packageLocation": "./.yarn/cache/@floating-ui-utils-npm-0.1.2-22eefe56f0-3e29fd3c69.zip/node_modules/@floating-ui/utils/",\
|
||||
"packageDependencies": [\
|
||||
["@floating-ui/utils", "npm:0.1.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
|
@ -2346,17 +2357,17 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["@rollup/pluginutils", [\
|
||||
["npm:5.0.3", {\
|
||||
"packageLocation": "./.yarn/cache/@rollup-pluginutils-npm-5.0.3-33f9e7f020-8efbdeac53.zip/node_modules/@rollup/pluginutils/",\
|
||||
["npm:5.0.4", {\
|
||||
"packageLocation": "./.yarn/cache/@rollup-pluginutils-npm-5.0.4-344c94a032-893d5805ac.zip/node_modules/@rollup/pluginutils/",\
|
||||
"packageDependencies": [\
|
||||
["@rollup/pluginutils", "npm:5.0.3"]\
|
||||
["@rollup/pluginutils", "npm:5.0.4"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.0.3", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@rollup-pluginutils-virtual-e8c80fae3a/0/cache/@rollup-pluginutils-npm-5.0.3-33f9e7f020-8efbdeac53.zip/node_modules/@rollup/pluginutils/",\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.0.4", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@rollup-pluginutils-virtual-72dfe81051/0/cache/@rollup-pluginutils-npm-5.0.4-344c94a032-893d5805ac.zip/node_modules/@rollup/pluginutils/",\
|
||||
"packageDependencies": [\
|
||||
["@rollup/pluginutils", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.0.3"],\
|
||||
["@rollup/pluginutils", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.0.4"],\
|
||||
["@types/estree", "npm:1.0.0"],\
|
||||
["@types/rollup", null],\
|
||||
["estree-walker", "npm:2.0.2"],\
|
||||
|
@ -2378,10 +2389,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:d54cf140bc899b791890b50d03f9737577eb8c2e0b480e2b1bc40b168f05a300b97a0338d73f013a8f7410236526aba6ee56a063db404e7ae64ba5f1e4e85cb8#npm:2.0.0", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@sidvind-better-ajv-errors-virtual-23ff750c09/0/cache/@sidvind-better-ajv-errors-npm-2.0.0-3531bddef9-12b0d87855.zip/node_modules/@sidvind/better-ajv-errors/",\
|
||||
["virtual:0f17270113a645b9ccd471681c6953a9ecf2cc875b79eb96d26d7cb579b1f042c2aaab59d6799ee85bf4e9b312f464a118c211e37c33fa47b3e11095c49e32d7#npm:2.0.0", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@sidvind-better-ajv-errors-virtual-148105bc23/0/cache/@sidvind-better-ajv-errors-npm-2.0.0-3531bddef9-12b0d87855.zip/node_modules/@sidvind/better-ajv-errors/",\
|
||||
"packageDependencies": [\
|
||||
["@sidvind/better-ajv-errors", "virtual:d54cf140bc899b791890b50d03f9737577eb8c2e0b480e2b1bc40b168f05a300b97a0338d73f013a8f7410236526aba6ee56a063db404e7ae64ba5f1e4e85cb8#npm:2.0.0"],\
|
||||
["@sidvind/better-ajv-errors", "virtual:0f17270113a645b9ccd471681c6953a9ecf2cc875b79eb96d26d7cb579b1f042c2aaab59d6799ee85bf4e9b312f464a118c211e37c33fa47b3e11095c49e32d7#npm:2.0.0"],\
|
||||
["@babel/code-frame", "npm:7.16.7"],\
|
||||
["@types/ajv", null],\
|
||||
["ajv", "npm:8.11.0"],\
|
||||
|
@ -3180,10 +3191,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["npm:5.3.0", {\
|
||||
"packageLocation": "./.yarn/cache/bootstrap-npm-5.3.0-240c38a3b2-29a83cc8ca.zip/node_modules/bootstrap/",\
|
||||
["npm:5.3.1", {\
|
||||
"packageLocation": "./.yarn/cache/bootstrap-npm-5.3.1-9ad45c2765-f8176376aa.zip/node_modules/bootstrap/",\
|
||||
"packageDependencies": [\
|
||||
["bootstrap", "npm:5.3.0"]\
|
||||
["bootstrap", "npm:5.3.1"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
|
@ -3200,10 +3211,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.3.0", {\
|
||||
"packageLocation": "./.yarn/__virtual__/bootstrap-virtual-3c63ba6f80/0/cache/bootstrap-npm-5.3.0-240c38a3b2-29a83cc8ca.zip/node_modules/bootstrap/",\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.3.1", {\
|
||||
"packageLocation": "./.yarn/__virtual__/bootstrap-virtual-94a6bfc3bc/0/cache/bootstrap-npm-5.3.1-9ad45c2765-f8176376aa.zip/node_modules/bootstrap/",\
|
||||
"packageDependencies": [\
|
||||
["bootstrap", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.3.0"],\
|
||||
["bootstrap", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.3.1"],\
|
||||
["@popperjs/core", "npm:2.11.8"],\
|
||||
["@types/popperjs__core", null]\
|
||||
],\
|
||||
|
@ -4619,11 +4630,11 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["eslint", [\
|
||||
["npm:8.47.0", {\
|
||||
"packageLocation": "./.yarn/cache/eslint-npm-8.47.0-84c6b60ea8-1988617f70.zip/node_modules/eslint/",\
|
||||
["npm:8.48.0", {\
|
||||
"packageLocation": "./.yarn/cache/eslint-npm-8.48.0-0dd1c36629-f20b359a4f.zip/node_modules/eslint/",\
|
||||
"packageDependencies": [\
|
||||
["eslint", "npm:8.47.0"],\
|
||||
["@eslint-community/eslint-utils", "virtual:84c6b60ea80c2e474ae2eb1949a4b42a725b5ad125a348fd9ccd31d528ef15de82d28192a86b98baf21067fd8c90bd02753cac42e9ee96bf5f8084788455b3e4#npm:4.4.0"],\
|
||||
["eslint", "npm:8.48.0"],\
|
||||
["@eslint-community/eslint-utils", "virtual:0dd1c3662912d25464a284caa5dbde8cc315ca056be4ded44d6f67e20c4895461cf49fc7bff27c35f254bdb0924477031e3e50d50a333908daaff17dcf43b01d#npm:4.4.0"],\
|
||||
["@eslint-community/regexpp", "npm:4.8.0"],\
|
||||
["@eslint/eslintrc", "npm:2.1.2"],\
|
||||
["@eslint/js", "npm:8.48.0"],\
|
||||
|
@ -4680,9 +4691,9 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["@types/eslint-plugin-import", null],\
|
||||
["@types/eslint-plugin-n", null],\
|
||||
["@types/eslint-plugin-promise", null],\
|
||||
["eslint", "npm:8.47.0"],\
|
||||
["eslint", "npm:8.48.0"],\
|
||||
["eslint-plugin-import", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.28.1"],\
|
||||
["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.0.1"],\
|
||||
["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.0.2"],\
|
||||
["eslint-plugin-promise", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.1"]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
|
@ -4729,7 +4740,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["@types/typescript-eslint__parser", null],\
|
||||
["@typescript-eslint/parser", null],\
|
||||
["debug", "virtual:65bed195431eadffc59e2238eb20cc12d9a1665bc7458ce780a9320ff795091b03cb5c4c2094938315ddd967b5b02c0f1df67b3ed435c69b7457092b7cc06ed8#npm:3.2.7"],\
|
||||
["eslint", "npm:8.47.0"],\
|
||||
["eslint", "npm:8.48.0"],\
|
||||
["eslint-import-resolver-node", "npm:0.3.7"],\
|
||||
["eslint-import-resolver-typescript", null],\
|
||||
["eslint-import-resolver-webpack", null]\
|
||||
|
@ -4762,7 +4773,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"packageDependencies": [\
|
||||
["eslint-plugin-cypress", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.14.0"],\
|
||||
["@types/eslint", null],\
|
||||
["eslint", "npm:8.47.0"],\
|
||||
["eslint", "npm:8.48.0"],\
|
||||
["globals", "npm:13.21.0"]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
|
@ -4785,7 +4796,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"packageDependencies": [\
|
||||
["eslint-plugin-es", "virtual:5cccaf00e87dfff96dbbb5eaf7a3055373358b8114d6a1adfb32f54ed6b40ba06068d3aa1fdd8062899a0cad040f68c17cc6b72bac2cdbe9700f3d6330d112f3#npm:3.0.1"],\
|
||||
["@types/eslint", null],\
|
||||
["eslint", "npm:8.47.0"],\
|
||||
["eslint", "npm:8.48.0"],\
|
||||
["eslint-utils", "npm:2.1.0"],\
|
||||
["regexpp", "npm:3.2.0"]\
|
||||
],\
|
||||
|
@ -4804,14 +4815,14 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:adc54309e8e54b60324bd0d3562e4cdf4588bb7e8e9bf0e8567ae0b912e220b364ab900a1f69ea824481e4ed94aa6d687c737b8f554fa53b86231581c20d170a#npm:7.1.0", {\
|
||||
"packageLocation": "./.yarn/__virtual__/eslint-plugin-es-x-virtual-7882922717/0/cache/eslint-plugin-es-x-npm-7.1.0-35735e8bbc-a19924313c.zip/node_modules/eslint-plugin-es-x/",\
|
||||
["virtual:40d6f5c942a7ef0ae65f54bca96af56e7db0d52fb7321d7f8d1da62ed519e1f8c80fdfb1299383ab8a4a5e7182ecc1d4bae33d806b79817d62ed4ad091e77615#npm:7.1.0", {\
|
||||
"packageLocation": "./.yarn/__virtual__/eslint-plugin-es-x-virtual-3346953c48/0/cache/eslint-plugin-es-x-npm-7.1.0-35735e8bbc-a19924313c.zip/node_modules/eslint-plugin-es-x/",\
|
||||
"packageDependencies": [\
|
||||
["eslint-plugin-es-x", "virtual:adc54309e8e54b60324bd0d3562e4cdf4588bb7e8e9bf0e8567ae0b912e220b364ab900a1f69ea824481e4ed94aa6d687c737b8f554fa53b86231581c20d170a#npm:7.1.0"],\
|
||||
["@eslint-community/eslint-utils", "virtual:84c6b60ea80c2e474ae2eb1949a4b42a725b5ad125a348fd9ccd31d528ef15de82d28192a86b98baf21067fd8c90bd02753cac42e9ee96bf5f8084788455b3e4#npm:4.4.0"],\
|
||||
["eslint-plugin-es-x", "virtual:40d6f5c942a7ef0ae65f54bca96af56e7db0d52fb7321d7f8d1da62ed519e1f8c80fdfb1299383ab8a4a5e7182ecc1d4bae33d806b79817d62ed4ad091e77615#npm:7.1.0"],\
|
||||
["@eslint-community/eslint-utils", "virtual:0dd1c3662912d25464a284caa5dbde8cc315ca056be4ded44d6f67e20c4895461cf49fc7bff27c35f254bdb0924477031e3e50d50a333908daaff17dcf43b01d#npm:4.4.0"],\
|
||||
["@eslint-community/regexpp", "npm:4.5.1"],\
|
||||
["@types/eslint", null],\
|
||||
["eslint", "npm:8.47.0"]\
|
||||
["eslint", "npm:8.48.0"]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
"@types/eslint",\
|
||||
|
@ -4841,7 +4852,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["array.prototype.flatmap", "npm:1.3.1"],\
|
||||
["debug", "virtual:65bed195431eadffc59e2238eb20cc12d9a1665bc7458ce780a9320ff795091b03cb5c4c2094938315ddd967b5b02c0f1df67b3ed435c69b7457092b7cc06ed8#npm:3.2.7"],\
|
||||
["doctrine", "npm:2.1.0"],\
|
||||
["eslint", "npm:8.47.0"],\
|
||||
["eslint", "npm:8.48.0"],\
|
||||
["eslint-import-resolver-node", "npm:0.3.7"],\
|
||||
["eslint-module-utils", "virtual:ef2ff17f0affe5aeeb05f2e27f2212e975bb78d898c026b74cc62e05a17de36abb35a54f0831f2ff5fced26e6128bfc2c0cf332f7c60149823619b008d0ea480#npm:2.8.0"],\
|
||||
["has", "npm:1.0.3"],\
|
||||
|
@ -4864,22 +4875,22 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["eslint-plugin-n", [\
|
||||
["npm:16.0.1", {\
|
||||
"packageLocation": "./.yarn/cache/eslint-plugin-n-npm-16.0.1-6a07bf1b46-407002bb06.zip/node_modules/eslint-plugin-n/",\
|
||||
["npm:16.0.2", {\
|
||||
"packageLocation": "./.yarn/cache/eslint-plugin-n-npm-16.0.2-6a256d6ab7-44cffe32a3.zip/node_modules/eslint-plugin-n/",\
|
||||
"packageDependencies": [\
|
||||
["eslint-plugin-n", "npm:16.0.1"]\
|
||||
["eslint-plugin-n", "npm:16.0.2"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.0.1", {\
|
||||
"packageLocation": "./.yarn/__virtual__/eslint-plugin-n-virtual-adc54309e8/0/cache/eslint-plugin-n-npm-16.0.1-6a07bf1b46-407002bb06.zip/node_modules/eslint-plugin-n/",\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.0.2", {\
|
||||
"packageLocation": "./.yarn/__virtual__/eslint-plugin-n-virtual-40d6f5c942/0/cache/eslint-plugin-n-npm-16.0.2-6a256d6ab7-44cffe32a3.zip/node_modules/eslint-plugin-n/",\
|
||||
"packageDependencies": [\
|
||||
["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.0.1"],\
|
||||
["@eslint-community/eslint-utils", "virtual:84c6b60ea80c2e474ae2eb1949a4b42a725b5ad125a348fd9ccd31d528ef15de82d28192a86b98baf21067fd8c90bd02753cac42e9ee96bf5f8084788455b3e4#npm:4.4.0"],\
|
||||
["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.0.2"],\
|
||||
["@eslint-community/eslint-utils", "virtual:0dd1c3662912d25464a284caa5dbde8cc315ca056be4ded44d6f67e20c4895461cf49fc7bff27c35f254bdb0924477031e3e50d50a333908daaff17dcf43b01d#npm:4.4.0"],\
|
||||
["@types/eslint", null],\
|
||||
["builtins", "npm:5.0.1"],\
|
||||
["eslint", "npm:8.47.0"],\
|
||||
["eslint-plugin-es-x", "virtual:adc54309e8e54b60324bd0d3562e4cdf4588bb7e8e9bf0e8567ae0b912e220b364ab900a1f69ea824481e4ed94aa6d687c737b8f554fa53b86231581c20d170a#npm:7.1.0"],\
|
||||
["eslint", "npm:8.48.0"],\
|
||||
["eslint-plugin-es-x", "virtual:40d6f5c942a7ef0ae65f54bca96af56e7db0d52fb7321d7f8d1da62ed519e1f8c80fdfb1299383ab8a4a5e7182ecc1d4bae33d806b79817d62ed4ad091e77615#npm:7.1.0"],\
|
||||
["ignore", "npm:5.2.4"],\
|
||||
["is-core-module", "npm:2.12.1"],\
|
||||
["minimatch", "npm:3.1.2"],\
|
||||
|
@ -4906,7 +4917,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"packageDependencies": [\
|
||||
["eslint-plugin-node", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:11.1.0"],\
|
||||
["@types/eslint", null],\
|
||||
["eslint", "npm:8.47.0"],\
|
||||
["eslint", "npm:8.48.0"],\
|
||||
["eslint-plugin-es", "virtual:5cccaf00e87dfff96dbbb5eaf7a3055373358b8114d6a1adfb32f54ed6b40ba06068d3aa1fdd8062899a0cad040f68c17cc6b72bac2cdbe9700f3d6330d112f3#npm:3.0.1"],\
|
||||
["eslint-utils", "npm:2.1.0"],\
|
||||
["ignore", "npm:5.2.0"],\
|
||||
|
@ -4934,7 +4945,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"packageDependencies": [\
|
||||
["eslint-plugin-promise", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.1"],\
|
||||
["@types/eslint", null],\
|
||||
["eslint", "npm:8.47.0"]\
|
||||
["eslint", "npm:8.48.0"]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
"@types/eslint",\
|
||||
|
@ -4955,9 +4966,9 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"packageLocation": "./.yarn/__virtual__/eslint-plugin-vue-virtual-e39e5d6bef/0/cache/eslint-plugin-vue-npm-9.17.0-c32115eab8-2ef53a0387.zip/node_modules/eslint-plugin-vue/",\
|
||||
"packageDependencies": [\
|
||||
["eslint-plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.17.0"],\
|
||||
["@eslint-community/eslint-utils", "virtual:84c6b60ea80c2e474ae2eb1949a4b42a725b5ad125a348fd9ccd31d528ef15de82d28192a86b98baf21067fd8c90bd02753cac42e9ee96bf5f8084788455b3e4#npm:4.4.0"],\
|
||||
["@eslint-community/eslint-utils", "virtual:0dd1c3662912d25464a284caa5dbde8cc315ca056be4ded44d6f67e20c4895461cf49fc7bff27c35f254bdb0924477031e3e50d50a333908daaff17dcf43b01d#npm:4.4.0"],\
|
||||
["@types/eslint", null],\
|
||||
["eslint", "npm:8.47.0"],\
|
||||
["eslint", "npm:8.48.0"],\
|
||||
["natural-compare", "npm:1.4.0"],\
|
||||
["nth-check", "npm:2.1.1"],\
|
||||
["postcss-selector-parser", "npm:6.0.13"],\
|
||||
|
@ -5640,20 +5651,20 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["html-validate", [\
|
||||
["npm:8.2.0", {\
|
||||
"packageLocation": "./.yarn/cache/html-validate-npm-8.2.0-51da0ed7e6-793287a454.zip/node_modules/html-validate/",\
|
||||
["npm:8.3.0", {\
|
||||
"packageLocation": "./.yarn/cache/html-validate-npm-8.3.0-71b7ba49e2-fd96a96fa7.zip/node_modules/html-validate/",\
|
||||
"packageDependencies": [\
|
||||
["html-validate", "npm:8.2.0"]\
|
||||
["html-validate", "npm:8.3.0"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:8.2.0", {\
|
||||
"packageLocation": "./.yarn/__virtual__/html-validate-virtual-d54cf140bc/0/cache/html-validate-npm-8.2.0-51da0ed7e6-793287a454.zip/node_modules/html-validate/",\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:8.3.0", {\
|
||||
"packageLocation": "./.yarn/__virtual__/html-validate-virtual-0f17270113/0/cache/html-validate-npm-8.3.0-71b7ba49e2-fd96a96fa7.zip/node_modules/html-validate/",\
|
||||
"packageDependencies": [\
|
||||
["html-validate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:8.2.0"],\
|
||||
["html-validate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:8.3.0"],\
|
||||
["@babel/code-frame", "npm:7.16.7"],\
|
||||
["@html-validate/stylish", "npm:4.1.0"],\
|
||||
["@sidvind/better-ajv-errors", "virtual:d54cf140bc899b791890b50d03f9737577eb8c2e0b480e2b1bc40b168f05a300b97a0338d73f013a8f7410236526aba6ee56a063db404e7ae64ba5f1e4e85cb8#npm:2.0.0"],\
|
||||
["@sidvind/better-ajv-errors", "virtual:0f17270113a645b9ccd471681c6953a9ecf2cc875b79eb96d26d7cb579b1f042c2aaab59d6799ee85bf4e9b312f464a118c211e37c33fa47b3e11095c49e32d7#npm:2.0.0"],\
|
||||
["@types/jest", null],\
|
||||
["@types/jest-diff", null],\
|
||||
["@types/jest-snapshot", null],\
|
||||
|
@ -6323,10 +6334,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["jquery", [\
|
||||
["npm:3.7.0", {\
|
||||
"packageLocation": "./.yarn/cache/jquery-npm-3.7.0-a02a382bf4-907785e133.zip/node_modules/jquery/",\
|
||||
["npm:3.7.1", {\
|
||||
"packageLocation": "./.yarn/cache/jquery-npm-3.7.1-eeeac0f21e-4370b8139d.zip/node_modules/jquery/",\
|
||||
"packageDependencies": [\
|
||||
["jquery", "npm:3.7.0"]\
|
||||
["jquery", "npm:3.7.1"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
|
@ -6344,7 +6355,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"packageDependencies": [\
|
||||
["jquery-migrate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.1"],\
|
||||
["@types/jquery", null],\
|
||||
["jquery", "npm:3.7.0"]\
|
||||
["jquery", "npm:3.7.1"]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
"@types/jquery",\
|
||||
|
@ -8097,29 +8108,29 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["@parcel/transformer-inline-string", "npm:2.9.3"],\
|
||||
["@parcel/transformer-sass", "npm:2.9.3"],\
|
||||
["@popperjs/core", "npm:2.11.8"],\
|
||||
["@rollup/pluginutils", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.0.3"],\
|
||||
["@rollup/pluginutils", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.0.4"],\
|
||||
["@twuni/emojify", "npm:1.0.2"],\
|
||||
["@vitejs/plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.3.3"],\
|
||||
["bootstrap", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.3.0"],\
|
||||
["bootstrap", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:5.3.1"],\
|
||||
["bootstrap-icons", "npm:1.10.5"],\
|
||||
["browser-fs-access", "npm:0.34.1"],\
|
||||
["browserlist", "npm:1.0.1"],\
|
||||
["c8", "npm:8.0.1"],\
|
||||
["caniuse-lite", "npm:1.0.30001519"],\
|
||||
["d3", "npm:7.8.5"],\
|
||||
["eslint", "npm:8.47.0"],\
|
||||
["eslint", "npm:8.48.0"],\
|
||||
["eslint-config-standard", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:17.1.0"],\
|
||||
["eslint-plugin-cypress", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.14.0"],\
|
||||
["eslint-plugin-import", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.28.1"],\
|
||||
["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.0.1"],\
|
||||
["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.0.2"],\
|
||||
["eslint-plugin-node", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:11.1.0"],\
|
||||
["eslint-plugin-promise", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.1"],\
|
||||
["eslint-plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.17.0"],\
|
||||
["file-saver", "npm:2.0.5"],\
|
||||
["highcharts", "npm:11.1.0"],\
|
||||
["html-validate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:8.2.0"],\
|
||||
["html-validate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:8.3.0"],\
|
||||
["ical.js", "npm:1.5.0"],\
|
||||
["jquery", "npm:3.7.0"],\
|
||||
["jquery", "npm:3.7.1"],\
|
||||
["jquery-migrate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.1"],\
|
||||
["js-cookie", "npm:3.0.5"],\
|
||||
["list.js", "npm:2.3.1"],\
|
||||
|
@ -8140,7 +8151,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["select2", "npm:4.1.0-rc.0"],\
|
||||
["select2-bootstrap-5-theme", "npm:1.3.0"],\
|
||||
["send", "npm:0.18.0"],\
|
||||
["shepherd.js", "npm:11.1.1"],\
|
||||
["shepherd.js", "npm:11.2.0"],\
|
||||
["slugify", "npm:1.6.6"],\
|
||||
["sortablejs", "npm:1.15.0"],\
|
||||
["vanillajs-datepicker", "npm:1.3.4"],\
|
||||
|
@ -8388,11 +8399,11 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["shepherd.js", [\
|
||||
["npm:11.1.1", {\
|
||||
"packageLocation": "./.yarn/cache/shepherd.js-npm-11.1.1-c87ab2ed73-1d2a0563b6.zip/node_modules/shepherd.js/",\
|
||||
["npm:11.2.0", {\
|
||||
"packageLocation": "./.yarn/cache/shepherd.js-npm-11.2.0-94b9af1487-0e71e63e51.zip/node_modules/shepherd.js/",\
|
||||
"packageDependencies": [\
|
||||
["shepherd.js", "npm:11.1.1"],\
|
||||
["@floating-ui/dom", "npm:1.2.6"],\
|
||||
["shepherd.js", "npm:11.2.0"],\
|
||||
["@floating-ui/dom", "npm:1.5.2"],\
|
||||
["deepmerge", "npm:4.3.1"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
|
@ -9151,7 +9162,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["vue-eslint-parser", "virtual:e39e5d6bef7a93bd3b21c5c9ba6ef825c92fc73c8d9c9e01699e1dc11e40fd3bc150ba16509e2cf59495cb098c32b2e4a85c0c21802fddeffc3208b01f4f5a16#npm:9.3.1"],\
|
||||
["@types/eslint", null],\
|
||||
["debug", "virtual:b86a9fb34323a98c6519528ed55faa0d9b44ca8879307c0b29aa384bde47ff59a7d0c9051b31246f14521dfb71ba3c5d6d0b35c29fffc17bf875aa6ad977d9e8#npm:4.3.4"],\
|
||||
["eslint", "npm:8.47.0"],\
|
||||
["eslint", "npm:8.48.0"],\
|
||||
["eslint-scope", "npm:7.1.1"],\
|
||||
["eslint-visitor-keys", "npm:3.3.0"],\
|
||||
["espree", "npm:9.3.2"],\
|
||||
|
|
Binary file not shown.
BIN
.yarn/cache/@floating-ui-core-npm-1.4.1-fe89c45d92-be4ab864fe.zip
vendored
Normal file
BIN
.yarn/cache/@floating-ui-core-npm-1.4.1-fe89c45d92-be4ab864fe.zip
vendored
Normal file
Binary file not shown.
Binary file not shown.
BIN
.yarn/cache/@floating-ui-dom-npm-1.5.2-f1b8ca0c30-3c71eed50b.zip
vendored
Normal file
BIN
.yarn/cache/@floating-ui-dom-npm-1.5.2-f1b8ca0c30-3c71eed50b.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@floating-ui-utils-npm-0.1.2-22eefe56f0-3e29fd3c69.zip
vendored
Normal file
BIN
.yarn/cache/@floating-ui-utils-npm-0.1.2-22eefe56f0-3e29fd3c69.zip
vendored
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
.yarn/cache/bootstrap-npm-5.3.1-9ad45c2765-f8176376aa.zip
vendored
Normal file
BIN
.yarn/cache/bootstrap-npm-5.3.1-9ad45c2765-f8176376aa.zip
vendored
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
.yarn/cache/html-validate-npm-8.3.0-71b7ba49e2-fd96a96fa7.zip
vendored
Normal file
BIN
.yarn/cache/html-validate-npm-8.3.0-71b7ba49e2-fd96a96fa7.zip
vendored
Normal file
Binary file not shown.
Binary file not shown.
BIN
.yarn/cache/jquery-npm-3.7.1-eeeac0f21e-4370b8139d.zip
vendored
Normal file
BIN
.yarn/cache/jquery-npm-3.7.1-eeeac0f21e-4370b8139d.zip
vendored
Normal file
Binary file not shown.
Binary file not shown.
BIN
.yarn/cache/shepherd.js-npm-11.2.0-94b9af1487-0e71e63e51.zip
vendored
Normal file
BIN
.yarn/cache/shepherd.js-npm-11.2.0-94b9af1487-0e71e63e51.zip
vendored
Normal file
Binary file not shown.
274
bin/add-old-drafts-from-archive.py
Executable file → Normal file
274
bin/add-old-drafts-from-archive.py
Executable file → Normal file
|
@ -1,151 +1,157 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright The IETF Trust 2017-2019, All Rights Reserved
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from contextlib import closing
|
||||
|
||||
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
|
||||
print("This is only here as documention - please read the file")
|
||||
sys.exit(0)
|
||||
|
||||
import django
|
||||
django.setup()
|
||||
# #!/usr/bin/env python
|
||||
# # Copyright The IETF Trust 2017-2019, All Rights Reserved
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.validators import validate_email, ValidationError
|
||||
from ietf.utils.draft import PlaintextDraft
|
||||
from ietf.submit.utils import update_authors
|
||||
from ietf.utils.timezone import date_today
|
||||
# import datetime
|
||||
# import os
|
||||
# import sys
|
||||
# from pathlib import Path
|
||||
# from contextlib import closing
|
||||
|
||||
import debug # pyflakes:ignore
|
||||
# os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
|
||||
|
||||
from ietf.doc.models import Document, NewRevisionDocEvent, DocEvent, State
|
||||
from ietf.person.models import Person
|
||||
# import django
|
||||
# django.setup()
|
||||
|
||||
system = Person.objects.get(name="(System)")
|
||||
expired = State.objects.get(type='draft',slug='expired')
|
||||
# from django.conf import settings
|
||||
# from django.core.validators import validate_email, ValidationError
|
||||
# from ietf.utils.draft import PlaintextDraft
|
||||
# from ietf.submit.utils import update_authors
|
||||
# from ietf.utils.timezone import date_today
|
||||
|
||||
names = set()
|
||||
print 'collecting draft names ...'
|
||||
versions = 0
|
||||
for p in Path(settings.INTERNET_DRAFT_PATH).glob('draft*.txt'):
|
||||
n = str(p).split('/')[-1].split('-')
|
||||
if n[-1][:2].isdigit():
|
||||
name = '-'.join(n[:-1])
|
||||
if '--' in name or '.txt' in name or '[' in name or '=' in name or '&' in name:
|
||||
continue
|
||||
if name.startswith('draft-draft-'):
|
||||
continue
|
||||
if name == 'draft-ietf-trade-iotp-v1_0-dsig':
|
||||
continue
|
||||
if len(n[-1]) != 6:
|
||||
continue
|
||||
if name.startswith('draft-mlee-'):
|
||||
continue
|
||||
names.add('-'.join(n[:-1]))
|
||||
# import debug # pyflakes:ignore
|
||||
|
||||
count=0
|
||||
print 'iterating through names ...'
|
||||
for name in sorted(names):
|
||||
if not Document.objects.filter(name=name).exists():
|
||||
paths = list(Path(settings.INTERNET_DRAFT_PATH).glob('%s-??.txt'%name))
|
||||
paths.sort()
|
||||
doc = None
|
||||
for p in paths:
|
||||
n = str(p).split('/')[-1].split('-')
|
||||
rev = n[-1][:2]
|
||||
with open(str(p)) as txt_file:
|
||||
raw = txt_file.read()
|
||||
try:
|
||||
text = raw.decode('utf8')
|
||||
except UnicodeDecodeError:
|
||||
text = raw.decode('latin1')
|
||||
try:
|
||||
draft = PlaintextDraft(text, txt_file.name, name_from_source=True)
|
||||
except Exception as e:
|
||||
print name, rev, "Can't parse", p,":",e
|
||||
continue
|
||||
if draft.errors and draft.errors.keys()!=['draftname',]:
|
||||
print "Errors - could not process", name, rev, datetime.datetime.fromtimestamp(p.stat().st_mtime, datetime.timezone.utc), draft.errors, draft.get_title().encode('utf8')
|
||||
else:
|
||||
time = datetime.datetime.fromtimestamp(p.stat().st_mtime, datetime.timezone.utc)
|
||||
if not doc:
|
||||
doc = Document.objects.create(name=name,
|
||||
time=time,
|
||||
type_id='draft',
|
||||
title=draft.get_title(),
|
||||
abstract=draft.get_abstract(),
|
||||
rev = rev,
|
||||
pages=draft.get_pagecount(),
|
||||
words=draft.get_wordcount(),
|
||||
expires=time+datetime.timedelta(settings.INTERNET_DRAFT_DAYS_TO_EXPIRE),
|
||||
)
|
||||
DocAlias.objects.create(name=doc.name).docs.add(doc)
|
||||
doc.states.add(expired)
|
||||
# update authors
|
||||
authors = []
|
||||
for author in draft.get_author_list():
|
||||
full_name, first_name, middle_initial, last_name, name_suffix, email, country, company = author
|
||||
# from ietf.doc.models import Document, NewRevisionDocEvent, DocEvent, State
|
||||
# from ietf.person.models import Person
|
||||
|
||||
author_name = full_name.replace("\n", "").replace("\r", "").replace("<", "").replace(">", "").strip()
|
||||
# system = Person.objects.get(name="(System)")
|
||||
# expired = State.objects.get(type='draft',slug='expired')
|
||||
|
||||
if email:
|
||||
try:
|
||||
validate_email(email)
|
||||
except ValidationError:
|
||||
email = ""
|
||||
# names = set()
|
||||
# print 'collecting draft names ...'
|
||||
# versions = 0
|
||||
# for p in Path(settings.INTERNET_DRAFT_PATH).glob('draft*.txt'):
|
||||
# n = str(p).split('/')[-1].split('-')
|
||||
# if n[-1][:2].isdigit():
|
||||
# name = '-'.join(n[:-1])
|
||||
# if '--' in name or '.txt' in name or '[' in name or '=' in name or '&' in name:
|
||||
# continue
|
||||
# if name.startswith('draft-draft-'):
|
||||
# continue
|
||||
# if name == 'draft-ietf-trade-iotp-v1_0-dsig':
|
||||
# continue
|
||||
# if len(n[-1]) != 6:
|
||||
# continue
|
||||
# if name.startswith('draft-mlee-'):
|
||||
# continue
|
||||
# names.add('-'.join(n[:-1]))
|
||||
|
||||
def turn_into_unicode(s):
|
||||
if s is None:
|
||||
return u""
|
||||
# count=0
|
||||
# print 'iterating through names ...'
|
||||
# for name in sorted(names):
|
||||
# if not Document.objects.filter(name=name).exists():
|
||||
# paths = list(Path(settings.INTERNET_DRAFT_PATH).glob('%s-??.txt'%name))
|
||||
# paths.sort()
|
||||
# doc = None
|
||||
# for p in paths:
|
||||
# n = str(p).split('/')[-1].split('-')
|
||||
# rev = n[-1][:2]
|
||||
# with open(str(p)) as txt_file:
|
||||
# raw = txt_file.read()
|
||||
# try:
|
||||
# text = raw.decode('utf8')
|
||||
# except UnicodeDecodeError:
|
||||
# text = raw.decode('latin1')
|
||||
# try:
|
||||
# draft = PlaintextDraft(text, txt_file.name, name_from_source=True)
|
||||
# except Exception as e:
|
||||
# print name, rev, "Can't parse", p,":",e
|
||||
# continue
|
||||
# if draft.errors and draft.errors.keys()!=['draftname',]:
|
||||
# print "Errors - could not process", name, rev, datetime.datetime.fromtimestamp(p.stat().st_mtime, datetime.timezone.utc), draft.errors, draft.get_title().encode('utf8')
|
||||
# else:
|
||||
# time = datetime.datetime.fromtimestamp(p.stat().st_mtime, datetime.timezone.utc)
|
||||
# if not doc:
|
||||
# doc = Document.objects.create(name=name,
|
||||
# time=time,
|
||||
# type_id='draft',
|
||||
# title=draft.get_title(),
|
||||
# abstract=draft.get_abstract(),
|
||||
# rev = rev,
|
||||
# pages=draft.get_pagecount(),
|
||||
# words=draft.get_wordcount(),
|
||||
# expires=time+datetime.timedelta(settings.INTERNET_DRAFT_DAYS_TO_EXPIRE),
|
||||
# )
|
||||
# DocAlias.objects.create(name=doc.name).docs.add(doc)
|
||||
# doc.states.add(expired)
|
||||
# # update authors
|
||||
# authors = []
|
||||
# for author in draft.get_author_list():
|
||||
# full_name, first_name, middle_initial, last_name, name_suffix, email, country, company = author
|
||||
|
||||
if isinstance(s, unicode):
|
||||
return s
|
||||
else:
|
||||
try:
|
||||
return s.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
try:
|
||||
return s.decode("latin-1")
|
||||
except UnicodeDecodeError:
|
||||
return ""
|
||||
# author_name = full_name.replace("\n", "").replace("\r", "").replace("<", "").replace(">", "").strip()
|
||||
|
||||
author_name = turn_into_unicode(author_name)
|
||||
email = turn_into_unicode(email)
|
||||
company = turn_into_unicode(company)
|
||||
# if email:
|
||||
# try:
|
||||
# validate_email(email)
|
||||
# except ValidationError:
|
||||
# email = ""
|
||||
|
||||
authors.append({
|
||||
"name": author_name,
|
||||
"email": email,
|
||||
"affiliation": company,
|
||||
"country": country
|
||||
})
|
||||
dummysubmission=type('', (), {})() #https://stackoverflow.com/questions/19476816/creating-an-empty-object-in-python
|
||||
dummysubmission.authors = authors
|
||||
update_authors(doc,dummysubmission)
|
||||
# def turn_into_unicode(s):
|
||||
# if s is None:
|
||||
# return u""
|
||||
|
||||
# if isinstance(s, unicode):
|
||||
# return s
|
||||
# else:
|
||||
# try:
|
||||
# return s.decode("utf-8")
|
||||
# except UnicodeDecodeError:
|
||||
# try:
|
||||
# return s.decode("latin-1")
|
||||
# except UnicodeDecodeError:
|
||||
# return ""
|
||||
|
||||
# author_name = turn_into_unicode(author_name)
|
||||
# email = turn_into_unicode(email)
|
||||
# company = turn_into_unicode(company)
|
||||
|
||||
# authors.append({
|
||||
# "name": author_name,
|
||||
# "email": email,
|
||||
# "affiliation": company,
|
||||
# "country": country
|
||||
# })
|
||||
# dummysubmission=type('', (), {})() #https://stackoverflow.com/questions/19476816/creating-an-empty-object-in-python
|
||||
# dummysubmission.authors = authors
|
||||
# update_authors(doc,dummysubmission)
|
||||
|
||||
# add a docevent with words explaining where this came from
|
||||
events = []
|
||||
e = NewRevisionDocEvent.objects.create(
|
||||
type="new_revision",
|
||||
doc=doc,
|
||||
rev=rev,
|
||||
by=system,
|
||||
desc="New version available: <b>%s-%s.txt</b>" % (doc.name, doc.rev),
|
||||
time=time,
|
||||
)
|
||||
events.append(e)
|
||||
e = DocEvent.objects.create(
|
||||
type="comment",
|
||||
doc = doc,
|
||||
rev = rev,
|
||||
by = system,
|
||||
desc = "Revision added from id-archive on %s by %s"%(date_today(),sys.argv[0]),
|
||||
time=time,
|
||||
)
|
||||
events.append(e)
|
||||
doc.time = time
|
||||
doc.rev = rev
|
||||
doc.save_with_history(events)
|
||||
print "Added",name, rev
|
||||
# # add a docevent with words explaining where this came from
|
||||
# events = []
|
||||
# e = NewRevisionDocEvent.objects.create(
|
||||
# type="new_revision",
|
||||
# doc=doc,
|
||||
# rev=rev,
|
||||
# by=system,
|
||||
# desc="New version available: <b>%s-%s.txt</b>" % (doc.name, doc.rev),
|
||||
# time=time,
|
||||
# )
|
||||
# events.append(e)
|
||||
# e = DocEvent.objects.create(
|
||||
# type="comment",
|
||||
# doc = doc,
|
||||
# rev = rev,
|
||||
# by = system,
|
||||
# desc = "Revision added from id-archive on %s by %s"%(date_today(),sys.argv[0]),
|
||||
# time=time,
|
||||
# )
|
||||
# events.append(e)
|
||||
# doc.time = time
|
||||
# doc.rev = rev
|
||||
# doc.save_with_history(events)
|
||||
# print "Added",name, rev
|
||||
|
|
54
dev/coverage-action/package-lock.json
generated
54
dev/coverage-action/package-lock.json
generated
|
@ -17,12 +17,12 @@
|
|||
"luxon": "3.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "8.47.0",
|
||||
"eslint": "8.48.0",
|
||||
"eslint-config-standard": "17.1.0",
|
||||
"eslint-plugin-import": "2.28.1",
|
||||
"eslint-plugin-node": "11.1.0",
|
||||
"eslint-plugin-promise": "6.1.1",
|
||||
"npm-check-updates": "16.10.16"
|
||||
"npm-check-updates": "16.13.2"
|
||||
}
|
||||
},
|
||||
"node_modules/@aashutoshrathi/word-wrap": {
|
||||
|
@ -1387,9 +1387,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/commander": {
|
||||
"version": "10.0.0",
|
||||
"resolved": "https://registry.npmjs.org/commander/-/commander-10.0.0.tgz",
|
||||
"integrity": "sha512-zS5PnTI22FIRM6ylNW8G4Ap0IEOyk62fhLSD0+uHRT9McRCLGpkVNvao4bjimpK/GShynyQkFFxHhwMcETmduA==",
|
||||
"version": "10.0.1",
|
||||
"resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz",
|
||||
"integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
|
@ -1765,15 +1765,15 @@
|
|||
}
|
||||
},
|
||||
"node_modules/eslint": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint/-/eslint-8.47.0.tgz",
|
||||
"integrity": "sha512-spUQWrdPt+pRVP1TTJLmfRNJJHHZryFmptzcafwSvHsceV81djHOdnEeDmkdotZyLNjDhrOasNK8nikkoG1O8Q==",
|
||||
"version": "8.48.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint/-/eslint-8.48.0.tgz",
|
||||
"integrity": "sha512-sb6DLeIuRXxeM1YljSe1KEx9/YYeZFQWcV8Rq9HfigmdDEugjLEVEa1ozDjL6YDjBpQHPJxJzze+alxi4T3OLg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.2.0",
|
||||
"@eslint-community/regexpp": "^4.6.1",
|
||||
"@eslint/eslintrc": "^2.1.2",
|
||||
"@eslint/js": "^8.47.0",
|
||||
"@eslint/js": "8.48.0",
|
||||
"@humanwhocodes/config-array": "^0.11.10",
|
||||
"@humanwhocodes/module-importer": "^1.0.1",
|
||||
"@nodelib/fs.walk": "^1.2.8",
|
||||
|
@ -3997,14 +3997,14 @@
|
|||
}
|
||||
},
|
||||
"node_modules/npm-check-updates": {
|
||||
"version": "16.10.16",
|
||||
"resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.10.16.tgz",
|
||||
"integrity": "sha512-d8mNYce/l8o5RHPE5ZUp2P1zj9poI7KWQCh5AsTIP3EhicONEhc63mLQQv4/nkCsMb3wCrikx6YOo4BOwN4+1w==",
|
||||
"version": "16.13.2",
|
||||
"resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.13.2.tgz",
|
||||
"integrity": "sha512-0pQI+k1y0JVwenB2gBc69tXFYfkckSVrNrlcn7TIrZfis4LnfdzakY/LYzZKt/lx37edN2isk3d2Zw4csptu/w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"chalk": "^5.3.0",
|
||||
"cli-table3": "^0.6.3",
|
||||
"commander": "^10.0.0",
|
||||
"commander": "^10.0.1",
|
||||
"fast-memoize": "^2.5.2",
|
||||
"find-up": "5.0.0",
|
||||
"fp-and-or": "^0.1.3",
|
||||
|
@ -4016,6 +4016,7 @@
|
|||
"json-parse-helpfulerror": "^1.0.3",
|
||||
"jsonlines": "^0.1.1",
|
||||
"lodash": "^4.17.21",
|
||||
"make-fetch-happen": "^11.1.1",
|
||||
"minimatch": "^9.0.3",
|
||||
"p-map": "^4.0.0",
|
||||
"pacote": "15.2.0",
|
||||
|
@ -4025,7 +4026,7 @@
|
|||
"rc-config-loader": "^4.1.3",
|
||||
"remote-git-tags": "^3.0.0",
|
||||
"rimraf": "^5.0.1",
|
||||
"semver": "^7.5.3",
|
||||
"semver": "^7.5.4",
|
||||
"semver-utils": "^1.1.4",
|
||||
"source-map-support": "^0.5.21",
|
||||
"spawn-please": "^2.0.1",
|
||||
|
@ -7088,9 +7089,9 @@
|
|||
"integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg=="
|
||||
},
|
||||
"commander": {
|
||||
"version": "10.0.0",
|
||||
"resolved": "https://registry.npmjs.org/commander/-/commander-10.0.0.tgz",
|
||||
"integrity": "sha512-zS5PnTI22FIRM6ylNW8G4Ap0IEOyk62fhLSD0+uHRT9McRCLGpkVNvao4bjimpK/GShynyQkFFxHhwMcETmduA==",
|
||||
"version": "10.0.1",
|
||||
"resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz",
|
||||
"integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==",
|
||||
"dev": true
|
||||
},
|
||||
"concat-map": {
|
||||
|
@ -7372,15 +7373,15 @@
|
|||
"dev": true
|
||||
},
|
||||
"eslint": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint/-/eslint-8.47.0.tgz",
|
||||
"integrity": "sha512-spUQWrdPt+pRVP1TTJLmfRNJJHHZryFmptzcafwSvHsceV81djHOdnEeDmkdotZyLNjDhrOasNK8nikkoG1O8Q==",
|
||||
"version": "8.48.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint/-/eslint-8.48.0.tgz",
|
||||
"integrity": "sha512-sb6DLeIuRXxeM1YljSe1KEx9/YYeZFQWcV8Rq9HfigmdDEugjLEVEa1ozDjL6YDjBpQHPJxJzze+alxi4T3OLg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@eslint-community/eslint-utils": "^4.2.0",
|
||||
"@eslint-community/regexpp": "^4.6.1",
|
||||
"@eslint/eslintrc": "^2.1.2",
|
||||
"@eslint/js": "^8.47.0",
|
||||
"@eslint/js": "8.48.0",
|
||||
"@humanwhocodes/config-array": "^0.11.10",
|
||||
"@humanwhocodes/module-importer": "^1.0.1",
|
||||
"@nodelib/fs.walk": "^1.2.8",
|
||||
|
@ -8987,14 +8988,14 @@
|
|||
}
|
||||
},
|
||||
"npm-check-updates": {
|
||||
"version": "16.10.16",
|
||||
"resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.10.16.tgz",
|
||||
"integrity": "sha512-d8mNYce/l8o5RHPE5ZUp2P1zj9poI7KWQCh5AsTIP3EhicONEhc63mLQQv4/nkCsMb3wCrikx6YOo4BOwN4+1w==",
|
||||
"version": "16.13.2",
|
||||
"resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.13.2.tgz",
|
||||
"integrity": "sha512-0pQI+k1y0JVwenB2gBc69tXFYfkckSVrNrlcn7TIrZfis4LnfdzakY/LYzZKt/lx37edN2isk3d2Zw4csptu/w==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"chalk": "^5.3.0",
|
||||
"cli-table3": "^0.6.3",
|
||||
"commander": "^10.0.0",
|
||||
"commander": "^10.0.1",
|
||||
"fast-memoize": "^2.5.2",
|
||||
"find-up": "5.0.0",
|
||||
"fp-and-or": "^0.1.3",
|
||||
|
@ -9006,6 +9007,7 @@
|
|||
"json-parse-helpfulerror": "^1.0.3",
|
||||
"jsonlines": "^0.1.1",
|
||||
"lodash": "^4.17.21",
|
||||
"make-fetch-happen": "^11.1.1",
|
||||
"minimatch": "^9.0.3",
|
||||
"p-map": "^4.0.0",
|
||||
"pacote": "15.2.0",
|
||||
|
@ -9015,7 +9017,7 @@
|
|||
"rc-config-loader": "^4.1.3",
|
||||
"remote-git-tags": "^3.0.0",
|
||||
"rimraf": "^5.0.1",
|
||||
"semver": "^7.5.3",
|
||||
"semver": "^7.5.4",
|
||||
"semver-utils": "^1.1.4",
|
||||
"source-map-support": "^0.5.21",
|
||||
"spawn-please": "^2.0.1",
|
||||
|
|
|
@ -14,11 +14,11 @@
|
|||
"luxon": "3.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "8.47.0",
|
||||
"eslint": "8.48.0",
|
||||
"eslint-config-standard": "17.1.0",
|
||||
"eslint-plugin-import": "2.28.1",
|
||||
"eslint-plugin-node": "11.1.0",
|
||||
"eslint-plugin-promise": "6.1.1",
|
||||
"npm-check-updates": "16.10.16"
|
||||
"npm-check-updates": "16.13.2"
|
||||
}
|
||||
}
|
||||
|
|
14
dev/del-old-packages/package-lock.json
generated
14
dev/del-old-packages/package-lock.json
generated
|
@ -10,7 +10,7 @@
|
|||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@octokit/core": "^4.2.4",
|
||||
"luxon": "^3.4.2"
|
||||
"luxon": "^3.4.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/auth-token": {
|
||||
|
@ -141,9 +141,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/luxon": {
|
||||
"version": "3.4.2",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.2.tgz",
|
||||
"integrity": "sha512-uBoAVCVcajsrqy3pv7eo5jEUz1oeLmCcnMv8n4AJpT5hbpN9lUssAXibNElpbLce3Mhm9dyBzwYLs9zctM/0tA==",
|
||||
"version": "3.4.3",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.3.tgz",
|
||||
"integrity": "sha512-tFWBiv3h7z+T/tDaoxA8rqTxy1CHV6gHS//QdaH4pulbq/JuBSGgQspQQqcgnwdAx6pNI7cmvz5Sv/addzHmUg==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
|
@ -315,9 +315,9 @@
|
|||
"integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q=="
|
||||
},
|
||||
"luxon": {
|
||||
"version": "3.4.2",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.2.tgz",
|
||||
"integrity": "sha512-uBoAVCVcajsrqy3pv7eo5jEUz1oeLmCcnMv8n4AJpT5hbpN9lUssAXibNElpbLce3Mhm9dyBzwYLs9zctM/0tA=="
|
||||
"version": "3.4.3",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.3.tgz",
|
||||
"integrity": "sha512-tFWBiv3h7z+T/tDaoxA8rqTxy1CHV6gHS//QdaH4pulbq/JuBSGgQspQQqcgnwdAx6pNI7cmvz5Sv/addzHmUg=="
|
||||
},
|
||||
"node-fetch": {
|
||||
"version": "2.6.7",
|
||||
|
|
|
@ -11,6 +11,6 @@
|
|||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@octokit/core": "^4.2.4",
|
||||
"luxon": "^3.4.2"
|
||||
"luxon": "^3.4.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -581,6 +581,7 @@ class CustomApiTests(TestCase):
|
|||
url = urlreverse('ietf.api.views.PersonalInformationExportView')
|
||||
login_testing_unauthorized(self, person.user.username, url)
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
jsondata = r.json()
|
||||
data = jsondata['person.person'][str(person.id)]
|
||||
self.assertEqual(data['name'], person.name)
|
||||
|
|
2
ietf/bin/.gitignore
vendored
2
ietf/bin/.gitignore
vendored
|
@ -1,2 +0,0 @@
|
|||
/*.pyc
|
||||
/settings_local.py
|
|
@ -1,296 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import os, re, sys, shutil, pathlib
|
||||
from collections import namedtuple
|
||||
from PIL import Image
|
||||
|
||||
# boilerplate
|
||||
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
sys.path = [ basedir ] + sys.path
|
||||
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
|
||||
|
||||
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
|
||||
if os.path.exists(virtualenv_activation):
|
||||
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
|
||||
|
||||
import django
|
||||
django.setup()
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.text import slugify
|
||||
|
||||
import debug
|
||||
|
||||
from ietf.group.models import Role, Person
|
||||
from ietf.person.name import name_parts
|
||||
|
||||
old_images_dir = ''
|
||||
new_images_dir = settings.PHOTOS_DIR
|
||||
|
||||
if not os.path.exists(new_images_dir):
|
||||
print("New images directory does not exist: %s" % new_images_dir)
|
||||
sys.exit(1)
|
||||
|
||||
old_image_files = []
|
||||
for dir in settings.OLD_PHOTO_DIRS:
|
||||
if not os.path.exists(dir):
|
||||
print("Old images directory does not exist: %s" % dir)
|
||||
sys.exit(1)
|
||||
old_image_files += [ f for f in pathlib.Path(dir).iterdir() if f.is_file() and f.suffix.lower() in ['.jpg', '.jpeg', '.png'] ]
|
||||
|
||||
photo = namedtuple('photo', ['path', 'name', 'ext', 'width', 'height', 'time', 'file'])
|
||||
|
||||
old_images = []
|
||||
for f in old_image_files:
|
||||
path = str(f)
|
||||
img = Image.open(path)
|
||||
old_images.append(photo(path, f.stem.decode('utf8'), f.suffix, img.size[0], img.size[1], f.stat().st_mtime, f))
|
||||
|
||||
# Fix up some names:
|
||||
|
||||
def fix_missing_surnames(images):
|
||||
replacement = {
|
||||
"alissa": "alissa-cooper",
|
||||
"alissa1": "alissa-cooper",
|
||||
"andrei": "andrei-robachevsky",
|
||||
"bernard": "bernard-aboba",
|
||||
"danny": "danny-mcpherson",
|
||||
"danny1": "danny-mcpherson",
|
||||
"dthaler": "dave-thaler",
|
||||
"eliot-mug": "eliot-lear",
|
||||
"erik.nordmark-300": "erik-nordmark",
|
||||
"hannes": "hannes-tschofenig",
|
||||
"hildebrand": "joe-hildebrand",
|
||||
"housley": "russ-housley",
|
||||
"jariarkko": "jari-arkko",
|
||||
"joel": "joel-jaeggli",
|
||||
"joel1": "joel-jaeggli",
|
||||
"joel2": "joel-jaeggli",
|
||||
"jon": "jon-peterson",
|
||||
"kessens": "david-kessens",
|
||||
"klensin": "john-klensin",
|
||||
"lars": "lars-eggert",
|
||||
"lars1": "lars-eggert",
|
||||
"marc_blanchet": "marc-blanchet",
|
||||
"marcelo": "marcelo-bagnulo",
|
||||
"olaf": "olaf-kolkman",
|
||||
"olaf1": "olaf-kolkman",
|
||||
"ross": "ross-callon",
|
||||
"spencer": "spencer-dawkins",
|
||||
"spencer1": "spencer-dawkins",
|
||||
"vijay": "vijay-gurbani",
|
||||
"xing": "xing-li",
|
||||
}
|
||||
|
||||
for i in range(len(images)):
|
||||
img = images[i]
|
||||
name = re.sub('-[0-9]+x[0-9]+', '', img.name)
|
||||
if '/iab/' in img.path and name in replacement:
|
||||
name = replacement[name]
|
||||
images[i] = photo(img.path, name, img.ext, img.width, img.height, img.time, img.file)
|
||||
|
||||
|
||||
fix_missing_surnames(old_images)
|
||||
|
||||
interesting_persons = set(Person.objects.all())
|
||||
|
||||
name_alias = {
|
||||
u"andy": [u"andrew", ],
|
||||
u"ben": [u"benjamin", ],
|
||||
u"bill": [u"william", ],
|
||||
u"bob": [u"robert", ],
|
||||
u"chris": [u"christopher", u"christian"],
|
||||
u"dan": [u"daniel", ],
|
||||
u"dave": [u"david", ],
|
||||
u"dick": [u"richard", ],
|
||||
u"fred": [u"alfred", ],
|
||||
u"geoff": [u"geoffrey", ],
|
||||
u"jake": [u"jacob", ],
|
||||
u"jerry": [u"gerald", ],
|
||||
u"jim": [u"james", ],
|
||||
u"joe": [u"joseph", ],
|
||||
u"jon": [u"jonathan", ],
|
||||
u"mike": [u"michael", ],
|
||||
u"ned": [u"edward", ],
|
||||
u"pete": [u"peter", ],
|
||||
u"ron": [u"ronald", ],
|
||||
u"russ": [u"russel", ],
|
||||
u"steve": [u"stephen", ],
|
||||
u"ted": [u"edward", ],
|
||||
u"terry": [u"terence", ],
|
||||
u"tom": [u"thomas", ],
|
||||
u"wes": [u"wesley", ],
|
||||
u"will": [u"william", ],
|
||||
|
||||
u"beth": [u"elizabeth", ],
|
||||
u"liz": [u"elizabeth", ],
|
||||
u"lynn": [u"carolyn", ],
|
||||
u"pat": [u"patricia", u"patrick", ],
|
||||
u"sue": [u"susan", ],
|
||||
}
|
||||
# Add lookups from long to short, from the initial set
|
||||
for key,value in name_alias.items():
|
||||
for item in value:
|
||||
if item in name_alias:
|
||||
name_alias[item] += [ key ];
|
||||
else:
|
||||
name_alias[item] = [ key ];
|
||||
|
||||
exceptions = {
|
||||
'Aboba' : 'aboba-bernard',
|
||||
'Bernardos' : 'cano-carlos',
|
||||
'Bormann' : 'bormann-carsten',
|
||||
'Hinden' : 'hinden-bob',
|
||||
'Hutton' : 'hutton-andy',
|
||||
'Narten' : 'narten-thomas', # but there's no picture of him
|
||||
'O\'Donoghue' : 'odonoghue-karen',
|
||||
'Przygienda' : 'przygienda-antoni',
|
||||
'Salowey' : 'salowey-joe',
|
||||
'Gunter Van de Velde' : 'vandevelde-gunter',
|
||||
'Eric Vyncke' : 'vynke-eric',
|
||||
'Zuniga' : 'zuniga-carlos-juan',
|
||||
'Zhen Cao' : 'zhen-cao',
|
||||
'Jamal Hadi Salim': 'hadi-salim-jamal',
|
||||
}
|
||||
|
||||
# Manually copied Bo Burman and Thubert Pascal from wg/photos/
|
||||
# Manually copied Victor Pascual (main image, not thumb) from wg/
|
||||
# Manually copied Eric Vync?ke (main image, not thumb) from wg/photos/
|
||||
# Manually copied Danial King (main image, not thumb) from wg/photos/
|
||||
# Manually copied the thumb (not labelled as such) for Tianran Zhou as both the main and thumb image from wg/photos/
|
||||
|
||||
processed_files = []
|
||||
|
||||
for person in sorted(list(interesting_persons),key=lambda x:x.last_name()+x.ascii):
|
||||
substr_pattern = None
|
||||
for exception in exceptions:
|
||||
if exception in person.ascii:
|
||||
substr_pattern = exceptions[exception]
|
||||
break
|
||||
if not person.ascii.strip():
|
||||
print(" Setting person.ascii for %s" % person.name)
|
||||
person.ascii = person.name.encode('ascii', errors='replace').decode('ascii')
|
||||
|
||||
_, first, _, last, _ = person.ascii_parts()
|
||||
first = first.lower()
|
||||
last = last. lower()
|
||||
if not substr_pattern:
|
||||
substr_pattern = slugify("%s %s" % (last, first))
|
||||
|
||||
if first in ['', '<>'] or last in ['', '<>']:
|
||||
continue
|
||||
|
||||
#debug.show('1, substr_pattern')
|
||||
|
||||
candidates = [x for x in old_images if x.name.lower().startswith(substr_pattern)]
|
||||
# Also check the reverse the name order (necessary for Deng Hui, for instance)
|
||||
substr_pattern = slugify("%s %s" % (first, last))
|
||||
#debug.show('2, substr_pattern')
|
||||
prev_len = len(candidates)
|
||||
candidates += [x for x in old_images if x.name.lower().startswith(substr_pattern)]
|
||||
if prev_len < len(candidates) :
|
||||
print(" Found match with '%s %s' for '%s %s'" % (last, first, first, last, ))
|
||||
# If no joy, try a short name
|
||||
if first in name_alias:
|
||||
prev_len = len(candidates)
|
||||
for alias in name_alias[first]:
|
||||
substr_pattern = slugify("%s %s" % (last, alias))
|
||||
#debug.show('3, substr_pattern')
|
||||
candidates += [x for x in old_images if x.name.lower().startswith(substr_pattern)]
|
||||
if prev_len < len(candidates):
|
||||
print(" Found match with '%s %s' for '%s %s'" % (alias, last, first, last, ))
|
||||
|
||||
|
||||
# # If still no joy, try with Person.plain_name() (necessary for Donald Eastlake)
|
||||
# if not candidates:
|
||||
# prefix, first, middle, last, suffix = person.name_parts()
|
||||
# name_parts = person.plain_name().lower().split()
|
||||
#
|
||||
# substr_pattern = u'-'.join(name_parts[-1:]+name_parts[0:1])
|
||||
# candidates = [x for x in old_images if x.name.lower().startswith(substr_pattern)]
|
||||
# # If no joy, try a short name
|
||||
# if not candidates and first in name_alias:
|
||||
# prev_len = len(candidates)
|
||||
# for alias in name_alias[first]:
|
||||
# substr_pattern = u'-'.join(name_parts[-1:]+[alias])
|
||||
# candidates += [x for x in old_images if x.name.lower().startswith(substr_pattern)]
|
||||
# if prev_len < len(candidates) :
|
||||
# print(" Used '%s %s' instead of '%s %s'" % (alias, last, first, last, ))
|
||||
|
||||
# # Fixup for other exceptional cases
|
||||
# if person.ascii=="David Oran":
|
||||
# candidates = ['oran-dave-th.jpg','oran-david.jpg']
|
||||
#
|
||||
# if person.ascii=="Susan Hares":
|
||||
# candidates = ['hares-sue-th.jpg','hares-susan.JPG']
|
||||
#
|
||||
# if person.ascii=="Mahesh Jethanandani":
|
||||
# candidates = ['Mahesh-Jethanandani-th.jpg','Jethanandani-Mahesh.jpg']
|
||||
|
||||
processed_files += [ c.path for c in candidates ]
|
||||
|
||||
# We now have a list of candidate photos.
|
||||
# * Consider anything less than 200x200 a thumbnail
|
||||
# * For the full photo, sort by size (width) and time
|
||||
# * For the thumbnail:
|
||||
# - first look for a square photo less than 200x200
|
||||
# - if none found, then for the first in the sorted list less than 200x200
|
||||
# - if none found, then the smallest photo
|
||||
if candidates:
|
||||
candidates.sort(key=lambda x: "%04d-%d" % (x.width, x.time))
|
||||
iesg_cand = [ c for c in candidates if '/iesg/' in c.path ]
|
||||
iab_cand = [ c for c in candidates if '/iab/' in c.path ]
|
||||
if iesg_cand:
|
||||
full = iesg_cand[-1]
|
||||
thumb = iesg_cand[-1]
|
||||
elif iab_cand:
|
||||
full = iab_cand[-1]
|
||||
thumb = iab_cand[0]
|
||||
else:
|
||||
full = candidates[-1]
|
||||
thumbs = [ c for c in candidates if c.width==c.height and c.width <= 200 ]
|
||||
if not thumbs:
|
||||
thumbs = [ c for c in candidates if c.width==c.height ]
|
||||
if not thumbs:
|
||||
thumbs = [ c for c in candidates if c.width <= 200 ]
|
||||
if not thumbs:
|
||||
thumbs = candidates[:1]
|
||||
thumb = thumbs[-1]
|
||||
candidates = [ thumb, full ]
|
||||
|
||||
# At this point we either have no candidates or two. If two, the first will be the thumb
|
||||
|
||||
def copy(old, new):
|
||||
if not os.path.exists(new):
|
||||
print("Copying "+old+" to "+new)
|
||||
shutil.copy(old, new)
|
||||
shutil.copystat(old, new)
|
||||
|
||||
assert(len(candidates) in [0,2])
|
||||
if len(candidates)==2:
|
||||
thumb, full = candidates
|
||||
|
||||
new_name = person.photo_name(thumb=False)+full.ext.lower()
|
||||
new_thumb_name = person.photo_name(thumb=True)+thumb.ext.lower()
|
||||
|
||||
copy( full.path, os.path.join(new_images_dir,new_name) )
|
||||
|
||||
#
|
||||
copy( thumb.path, os.path.join(new_images_dir,new_thumb_name) )
|
||||
|
||||
|
||||
print("")
|
||||
not_processed = 0
|
||||
for file in old_image_files:
|
||||
if ( file.is_file()
|
||||
and not file.suffix.lower() in ['.txt', '.lck', '.html',]
|
||||
and not file.name.startswith('index.')
|
||||
and not file.name.startswith('milestoneupdate')
|
||||
and not file.name.startswith('nopicture')
|
||||
and not file.name.startswith('robots.txt')
|
||||
):
|
||||
if not str(file).decode('utf8') in processed_files:
|
||||
not_processed += 1
|
||||
print(u"Not processed: "+str(file).decode('utf8'))
|
||||
print("")
|
||||
print("Not processed: %s files" % not_processed)
|
|
@ -1,48 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import sys, os, sys
|
||||
import datetime
|
||||
|
||||
# boilerplate
|
||||
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
sys.path = [ basedir ] + sys.path
|
||||
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
|
||||
|
||||
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
|
||||
if os.path.exists(virtualenv_activation):
|
||||
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
|
||||
|
||||
import django
|
||||
django.setup()
|
||||
|
||||
from django.core import management
|
||||
from django.template.loader import render_to_string
|
||||
|
||||
from ietf import settings
|
||||
from ietf.utils.mail import send_mail_preformatted
|
||||
from ietf.utils.mail import send_mail
|
||||
|
||||
target_date=datetime.date(year=2014,month=1,day=24)
|
||||
|
||||
send_mail(request = None,
|
||||
to = "IETF-Announce <ietf-announce@ietf.org>",
|
||||
frm = "The IESG <iesg-secretary@ietf.org>",
|
||||
subject = "Upcoming change to announcement email header fields (using old header)",
|
||||
template = "utils/header_change_content.txt",
|
||||
context = dict(oldornew='old', target_date=target_date),
|
||||
extra = {'Reply-To' : 'ietf@ietf.org',
|
||||
'Sender' : '<iesg-secretary@ietf.org>',
|
||||
}
|
||||
)
|
||||
|
||||
send_mail(request = None,
|
||||
to = "IETF-Announce:;",
|
||||
frm = "The IESG <noreply@ietf.org>",
|
||||
subject = "Upcoming change to announcement email header fields (using new header)",
|
||||
template = "utils/header_change_content.txt",
|
||||
context = dict(oldornew='new', target_date=target_date),
|
||||
extra = {'Reply-To' : 'IETF Discussion List <ietf@ietf.org>',
|
||||
'Sender' : '<iesg-secretary@ietf.org>',
|
||||
},
|
||||
bcc = '<ietf-announce@ietf.org>',
|
||||
)
|
|
@ -1,43 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# -*- Python -*-
|
||||
#
|
||||
|
||||
import os, sys
|
||||
|
||||
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
sys.path = [ basedir ] + sys.path
|
||||
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
|
||||
|
||||
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
|
||||
if os.path.exists(virtualenv_activation):
|
||||
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
|
||||
|
||||
import django
|
||||
django.setup()
|
||||
|
||||
from ietf.group.models import Group
|
||||
from ietf.person.models import Person
|
||||
from ietf.name.models import SessionStatusName
|
||||
from ietf.meeting.models import Meeting, Session, ScheduledSession as ScheduleTimeslotSSessionAssignment
|
||||
|
||||
secretariat = Group.objects.get(acronym='secretariat')
|
||||
system = Person.objects.get(id=1, name='(System)')
|
||||
scheduled = SessionStatusName.objects.get(slug='sched')
|
||||
|
||||
for meeting in Meeting.objects.filter(type="ietf").order_by("date"):
|
||||
print "Checking %s schedules ..." % meeting
|
||||
brk, __ = Session.objects.get_or_create(meeting=meeting, group=secretariat, requested_by=system, status=scheduled, name='Break', type_id='break',)
|
||||
reg, __ = Session.objects.get_or_create(meeting=meeting, group=secretariat, requested_by=system, status=scheduled, name='Registration', type_id='reg',)
|
||||
|
||||
for schedule in meeting.schedule_set.all():
|
||||
print " Checking for missing Break and Reg sessions in %s" % schedule
|
||||
for timeslot in meeting.timeslot_set.all():
|
||||
if timeslot.type_id == 'break' and not (schedule.base and SchedTimeSessAssignment.objects.filter(timeslot=timeslot, session=brk, schedule=schedule.base).exists()):
|
||||
assignment, created = SchedTimeSessAssignment.objects.get_or_create(timeslot=timeslot, session=brk, schedule=schedule)
|
||||
if created:
|
||||
print " Added %s break assignment" % timeslot
|
||||
if timeslot.type_id == 'reg' and not (schedule.base and SchedTimeSessAssignment.objects.filter(timeslot=timeslot, session=reg, schedule=schedule.base).exists()):
|
||||
assignment, created = SchedTimeSessAssignment.objects.get_or_create(timeslot=timeslot, session=reg, schedule=schedule)
|
||||
if created:
|
||||
print " Added %s registration assignment" % timeslot
|
|
@ -1,72 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
version = "0.10"
|
||||
program = os.path.basename(sys.argv[0])
|
||||
progdir = os.path.dirname(sys.argv[0])
|
||||
|
||||
# boilerplate
|
||||
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
sys.path = [ basedir ] + sys.path
|
||||
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
|
||||
|
||||
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
|
||||
if os.path.exists(virtualenv_activation):
|
||||
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
def note(string):
|
||||
sys.stdout.write("%s\n" % (string))
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
def warn(string):
|
||||
sys.stderr.write(" * %s\n" % (string))
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
import re
|
||||
import datetime
|
||||
|
||||
import django
|
||||
django.setup()
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from ietf.utils.path import path as Path
|
||||
from ietf.doc.models import Document, NewRevisionDocEvent
|
||||
from ietf.person.models import Person
|
||||
|
||||
system_entity = Person.objects.get(name="(System)")
|
||||
|
||||
charterdir = Path(settings.CHARTER_PATH)
|
||||
for file in charterdir.files("charter-ietf-*.txt"):
|
||||
fname = file.name
|
||||
ftime = datetime.datetime.fromtimestamp(file.mtime, datetime.timezone.utc)
|
||||
match = re.search("^(?P<name>[a-z0-9-]+)-(?P<rev>\d\d-\d\d)\.txt$", fname)
|
||||
if match:
|
||||
name = match.group("name")
|
||||
rev = match.group("rev")
|
||||
else:
|
||||
match = re.search("^(?P<name>[a-z0-9-]+)-(?P<rev>\d\d)\.txt$", fname)
|
||||
if match:
|
||||
name = match.group("name")
|
||||
rev = match.group("rev")
|
||||
else:
|
||||
warn("Failed extracting revision from filename: '%s'" % fname)
|
||||
try:
|
||||
doc = Document.objects.get(type="charter", name=name)
|
||||
try:
|
||||
event = NewRevisionDocEvent.objects.get(doc=doc, type='new_revision', rev=rev)
|
||||
note(".")
|
||||
except NewRevisionDocEvent.MultipleObjectsReturned, e:
|
||||
warn("Multiple NewRevisionDocEvent exists for '%s'" % fname)
|
||||
except NewRevisionDocEvent.DoesNotExist:
|
||||
event = NewRevisionDocEvent(doc=doc, type='new_revision', rev=rev, by=system_entity, time=ftime, desc="")
|
||||
event.save()
|
||||
note("Created new NewRevisionDocEvent for %s-%s" % (name, rev))
|
||||
except Document.DoesNotExist:
|
||||
warn("Document not found: '%s'; no NewRevisionDocEvent created for '%s'" % (name, fname))
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
version = "0.10"
|
||||
program = os.path.basename(sys.argv[0])
|
||||
progdir = os.path.dirname(sys.argv[0])
|
||||
|
||||
# boilerplate
|
||||
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
sys.path = [ basedir ] + sys.path
|
||||
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
|
||||
|
||||
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
|
||||
if os.path.exists(virtualenv_activation):
|
||||
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
|
||||
|
||||
import django
|
||||
django.setup()
|
||||
|
||||
from django.template import Template, Context
|
||||
|
||||
from ietf.doc.models import Document
|
||||
from ietf.person.models import Person
|
||||
|
||||
drafts = Document.objects.filter(type="draft")
|
||||
|
||||
ads = {}
|
||||
for p in Person.objects.filter(ad_document_set__type="draft").distinct():
|
||||
ads[p.id] = p.role_email("ad")
|
||||
|
||||
for d in drafts:
|
||||
d.ad_email = ads.get(d.ad_id)
|
||||
|
||||
templ_text = """{% for draft in drafts %}{% if draft.notify or draft.ad_email %}{{ draft.name }}{% if draft.notify %} docnotify='{{ draft.notify|cut:"<"|cut:">" }}'{% endif %}{% if draft.ad_email %} docsponsor='{{ draft.ad_email }}'{% endif %}
|
||||
{% endif %}{% endfor %}"""
|
||||
template = Template(templ_text)
|
||||
context = Context({ 'drafts':drafts })
|
||||
|
||||
print template.render(context).encode('utf-8')
|
|
@ -1,37 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import sys, os, sys
|
||||
import syslog
|
||||
|
||||
# boilerplate
|
||||
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
sys.path = [ basedir ] + sys.path
|
||||
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
|
||||
|
||||
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
|
||||
if os.path.exists(virtualenv_activation):
|
||||
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
parser = OptionParser()
|
||||
parser.add_option("-t", "--to", dest="to",
|
||||
help="Email address to send report to", metavar="EMAIL")
|
||||
|
||||
options, args = parser.parse_args()
|
||||
|
||||
syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER)
|
||||
|
||||
import django
|
||||
django.setup()
|
||||
|
||||
from ietf.sync.mails import email_discrepancies
|
||||
|
||||
receivers = ["iesg-secretary@ietf.org"]
|
||||
|
||||
if options.to:
|
||||
receivers = [options.to]
|
||||
|
||||
email_discrepancies(receivers)
|
||||
|
||||
syslog.syslog("Emailed sync discrepancies to %s" % receivers)
|
|
@ -1,106 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
|
||||
version = "0.10"
|
||||
program = os.path.basename(sys.argv[0])
|
||||
progdir = os.path.dirname(sys.argv[0])
|
||||
|
||||
# boilerplate
|
||||
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
sys.path = [ basedir ] + sys.path
|
||||
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
|
||||
|
||||
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
|
||||
if os.path.exists(virtualenv_activation):
|
||||
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
def note(string):
|
||||
sys.stdout.write("%s\n" % (string))
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
def warn(string):
|
||||
sys.stderr.write(" * %s\n" % (string))
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
import re
|
||||
from datetime import datetime as Datetime
|
||||
import time
|
||||
import warnings
|
||||
warnings.filterwarnings('ignore', message='the sets module is deprecated', append=True)
|
||||
|
||||
import django
|
||||
django.setup()
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from ietf.utils.path import path as Path
|
||||
|
||||
from ietf.submit.models import Submission
|
||||
from ietf.doc.models import Document
|
||||
|
||||
|
||||
|
||||
args = sys.argv[1:]
|
||||
if len(args) < 3:
|
||||
warn("Expected '$ %s DRAFTNAME USER.LOG POSTFIX.LOG', but found no arguments -- exiting" % program)
|
||||
sys.exit(1)
|
||||
|
||||
draft = args[0]
|
||||
if re.search("\.txt$", draft):
|
||||
draft = draft[:-4]
|
||||
if re.search("-\d\d$", draft):
|
||||
draft = draft[:-3]
|
||||
|
||||
if len(args) == 1:
|
||||
logfiles = [ arg[1] ]
|
||||
else:
|
||||
logfiles = args[1:]
|
||||
|
||||
from_email = settings.IDSUBMIT_FROM_EMAIL
|
||||
if "<" in from_email:
|
||||
from_email = from_email.split("<")[1].split(">")[0]
|
||||
|
||||
submission = Submission.objects.filter(name=draft).latest('submission_date')
|
||||
document = Document.objects.get(name=draft)
|
||||
emails = [ author.email.address for author in document.documentauthor_set.all() if author.email ]
|
||||
|
||||
timestrings = []
|
||||
for file in [ Path(settings.INTERNET_DRAFT_PATH) / ("%s-%s.txt"%(draft, submission.rev)),
|
||||
Path(settings.IDSUBMIT_STAGING_PATH) / ("%s-%s.txt"%(draft, submission.rev)) ]:
|
||||
if os.path.exists(file):
|
||||
upload_time = time.localtime(file.mtime)
|
||||
ts = time.strftime("%b %d %H:%M", upload_time)
|
||||
timestrings += [ ts ]
|
||||
timestrings += [ ts[:-1] + chr(((ord(ts[-1])-ord('0')+1)%10)+ord('0')) ]
|
||||
print "Looking for mail log lines timestamped %s, also checking %s ..." % (timestrings[0], timestrings[1])
|
||||
|
||||
for log in logfiles:
|
||||
print "\n Checking %s ...\n" % log
|
||||
if log.endswith('.gz'):
|
||||
import gzip
|
||||
logfile = gzip.open(log)
|
||||
else:
|
||||
logfile = io.open(log)
|
||||
queue_ids = []
|
||||
for line in logfile:
|
||||
if from_email in line and "Confirmation for Auto-Post of I-D "+draft in line:
|
||||
ts = line[:12]
|
||||
timestrings += [ ts ]
|
||||
print "Found a mention of %s, adding timestamp %s: \n %s" % (draft, ts, line)
|
||||
for ts in timestrings:
|
||||
if line.startswith(ts):
|
||||
if from_email in line:
|
||||
for to_email in emails:
|
||||
if to_email in line:
|
||||
sys.stdout.write(line)
|
||||
if "queued_as:" in line:
|
||||
queue_ids += [ line.split("queued_as:")[1].split(",")[0] ]
|
||||
elif queue_ids:
|
||||
for qi in queue_ids:
|
||||
if qi in line:
|
||||
sys.stdout.write(line)
|
|
@ -1,27 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# -*- Python -*-
|
||||
#
|
||||
'''
|
||||
This script calls ietf.meeting.helpers.check_interim_minutes() which sends
|
||||
a reminder email for interim meetings that occurred 10 days ago but still
|
||||
don't have minutes.
|
||||
'''
|
||||
|
||||
# Set PYTHONPATH and load environment variables for standalone script -----------------
|
||||
import os, sys
|
||||
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
sys.path = [ basedir ] + sys.path
|
||||
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
|
||||
|
||||
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
|
||||
if os.path.exists(virtualenv_activation):
|
||||
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
|
||||
|
||||
import django
|
||||
django.setup()
|
||||
# -------------------------------------------------------------------------------------
|
||||
|
||||
from ietf.meeting.helpers import check_interim_minutes
|
||||
|
||||
check_interim_minutes()
|
|
@ -1,36 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
|
||||
import os, sys
|
||||
import syslog
|
||||
|
||||
# boilerplate
|
||||
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
sys.path = [ basedir ] + sys.path
|
||||
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
|
||||
|
||||
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
|
||||
if os.path.exists(virtualenv_activation):
|
||||
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
|
||||
|
||||
syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER)
|
||||
|
||||
import django
|
||||
django.setup()
|
||||
|
||||
from django.utils.encoding import force_str
|
||||
from ietf.group.models import Role
|
||||
|
||||
addresses = set()
|
||||
for role in Role.objects.filter(
|
||||
group__state__slug='active',
|
||||
group__type__in=['ag','area','dir','iab','ietf','irtf','nomcom','rg','team','wg','rag']):
|
||||
#sys.stderr.write(str(role)+'\n')
|
||||
for e in role.person.email_set.all():
|
||||
if e.active and not e.address.startswith('unknown-email-'):
|
||||
addresses.add(e.address)
|
||||
|
||||
addresses = list(addresses)
|
||||
addresses.sort()
|
||||
for a in addresses:
|
||||
print(force_str(a))
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/sh
|
||||
python manage.py dumpdata --format=xml "$@" | sed -e 's/<\/*object/\
|
||||
&/g' -e 's/<field/\
|
||||
&/g' -e 's/<\/django-objects/\
|
||||
&/g'
|
File diff suppressed because it is too large
Load diff
|
@ -1,9 +0,0 @@
|
|||
#!/bin/sh
|
||||
#
|
||||
# Copyright The IETF Trust 2007, All Rights Reserved
|
||||
#
|
||||
#python manage.py dumpdata --format=xml redirects | xmllint --format -
|
||||
python manage.py dumpdata --format=xml redirects | sed -e 's/<\/*object/\
|
||||
&/g' -e 's/<field/\
|
||||
&/g' -e 's/<\/django-objects/\
|
||||
&/g'
|
|
@ -1,38 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Run some non-modifying tests on top of the real database, to
|
||||
# exercise the code with real data.
|
||||
#
|
||||
|
||||
import os, subprocess, datetime
|
||||
|
||||
base_dir = os.path.relpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), ".."))
|
||||
|
||||
path = os.path.abspath(os.path.join(base_dir, ".."))
|
||||
if os.environ.get("PYTHONPATH"):
|
||||
path += ":" + os.environ.get("PYTHONPATH")
|
||||
os.environ["PYTHONPATH"] = path
|
||||
|
||||
|
||||
|
||||
def run_script(script, *args):
|
||||
script_base = os.path.splitext(os.path.basename(script))[0]
|
||||
script_path = os.path.join(base_dir, script)
|
||||
output_path = os.path.join(base_dir, script_base)
|
||||
arg_str = " " + " ".join(args) if args else ""
|
||||
cmd_line = "%s%s > %s.output" % (script_path, arg_str, output_path)
|
||||
print "Running %s" % cmd_line
|
||||
before = datetime.datetime.now()
|
||||
returncode = subprocess.call(cmd_line, shell=True)
|
||||
print " (took %.3f seconds)" % (datetime.datetime.now() - before).total_seconds()
|
||||
return returncode
|
||||
|
||||
# idindex
|
||||
run_script("idindex/generate_id_abstracts_txt.py")
|
||||
run_script("idindex/generate_id_index_txt.py")
|
||||
run_script("idindex/generate_all_id_txt.py")
|
||||
run_script("idindex/generate_all_id2_txt.py")
|
||||
|
||||
# test crawler
|
||||
crawl_input = os.path.join(base_dir, "utils/crawlurls.txt")
|
||||
run_script("bin/test-crawl", "--urls %s" % crawl_input)
|
|
@ -1,87 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# -*- Python -*-
|
||||
#
|
||||
'''
|
||||
This script configures Django Admin permissions
|
||||
'''
|
||||
|
||||
# Set PYTHONPATH and load environment variables for standalone script -----------------
|
||||
import os, sys
|
||||
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
sys.path = [ basedir ] + sys.path
|
||||
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
|
||||
|
||||
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
|
||||
if os.path.exists(virtualenv_activation):
|
||||
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
|
||||
|
||||
import django
|
||||
django.setup()
|
||||
# -------------------------------------------------------------------------------------
|
||||
|
||||
from django.contrib.auth.models import Group as AuthGroup
|
||||
from django.contrib.auth.models import Permission
|
||||
from ietf.group.models import Group
|
||||
|
||||
|
||||
def permission_names_to_objects(names):
|
||||
"""
|
||||
Given an iterable of permission names (e.g. 'app_label.add_model'),
|
||||
return an iterable of Permission objects for them. The permission
|
||||
must already exist, because a permission name is not enough information
|
||||
to create a new permission.
|
||||
"""
|
||||
result = []
|
||||
for name in names:
|
||||
app_label, codename = name.split(".", 1)
|
||||
try:
|
||||
result.append(Permission.objects.get(content_type__app_label=app_label,
|
||||
codename=codename))
|
||||
except Permission.DoesNotExist:
|
||||
print ("NO SUCH PERMISSION: %s, %s" % (app_label, codename))
|
||||
raise
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def main():
|
||||
secretariat = Group.objects.get(acronym='secretariat')
|
||||
users = [ r.person.user for r in secretariat.role_set.filter(name='secr') ]
|
||||
|
||||
# Set Auth Group members
|
||||
auth_group, _ = AuthGroup.objects.get_or_create(name='secretariat')
|
||||
auth_group.user_set.set(users)
|
||||
|
||||
# Set Auth Group Admin Permissions
|
||||
names = ['auth.add_user','auth.change_user','auth.delete_user',
|
||||
'dbtemplate.change_dbtemplate',
|
||||
'group.add_group','group.change_group','group.delete_group',
|
||||
'group.add_role','group.change_role','group.delete_role',
|
||||
'group.add_groupevent','group.change_groupevent','group.delete_groupevent',
|
||||
'iesg.add_telechatagendaitem','iesg.change_telechatagendaitem','iesg.delete_telechatagendaitem',
|
||||
'iesg.add_telechatdate','iesg.change_telechatdate','iesg.delete_telechatdate',
|
||||
'liaisons.add_liaisonstatementgroupcontacts', 'liaisons.change_liaisonstatementgroupcontacts', 'liaisons.delete_liaisonstatementgroupcontacts',
|
||||
'mailinglists.add_list','mailinglists.change_list','mailinglists.delete_list',
|
||||
'mailtrigger.add_mailtrigger','mailtrigger.change_mailtrigger','mailtrigger.delete_mailtrigger',
|
||||
'mailtrigger.add_recipient','mailtrigger.change_recipient','mailtrigger.delete_recipient',
|
||||
'meeting.add_floorplan','meeting.change_floorplan','meeting.delete_floorplan',
|
||||
'meeting.add_importantdate','meeting.change_importantdate','meeting.delete_importantdate',
|
||||
'meeting.add_meeting','meeting.change_meeting','meeting.delete_meeting',
|
||||
'meeting.add_room','meeting.change_room','meeting.delete_room',
|
||||
'meeting.add_urlresource','meeting.change_urlresource','meeting.delete_urlresource',
|
||||
'message.add_announcementfrom','message.change_announcementfrom','message.delete_announcementfrom',
|
||||
'nomcom.add_nomcom','nomcom.change_nomcom','nomcom.delete_nomcom',
|
||||
'nomcom.add_volunteer','nomcom.change_volunteer','nomcom.delete_volunteer',
|
||||
'person.add_person','person.change_person','person.delete_person',
|
||||
'person.add_alias','person.change_alias','person.delete_alias',
|
||||
'person.add_email','person.change_email','person.delete_email',
|
||||
'submit.add_submission','submit.change_submission','submit.delete_submission',
|
||||
]
|
||||
|
||||
permissions = permission_names_to_objects(names)
|
||||
auth_group.permissions.set(permissions)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
1
ietf/cookies/.gitignore
vendored
1
ietf/cookies/.gitignore
vendored
|
@ -1 +0,0 @@
|
|||
/*.pyc
|
2
ietf/database-notes/.gitignore
vendored
2
ietf/database-notes/.gitignore
vendored
|
@ -1,2 +0,0 @@
|
|||
/*.pyc
|
||||
/settings_local.py
|
|
@ -35,6 +35,7 @@ def draft_name_generator(type_id,group,n):
|
|||
class BaseDocumentFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
model = Document
|
||||
skip_postgeneration_save = True
|
||||
|
||||
title = factory.Faker('sentence',nb_words=5)
|
||||
abstract = factory.Faker('paragraph', nb_sentences=5)
|
||||
|
@ -329,6 +330,7 @@ class ReviewFactory(BaseDocumentFactory):
|
|||
class DocAliasFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
model = DocAlias
|
||||
skip_postgeneration_save = True
|
||||
|
||||
@factory.post_generation
|
||||
def document(self, create, extracted, **kwargs):
|
||||
|
@ -378,6 +380,7 @@ class NewRevisionDocEventFactory(DocEventFactory):
|
|||
class StateDocEventFactory(DocEventFactory):
|
||||
class Meta:
|
||||
model = StateDocEvent
|
||||
skip_postgeneration_save = True
|
||||
|
||||
type = 'changed_state'
|
||||
state_type_id = 'draft-iesg'
|
||||
|
@ -451,6 +454,7 @@ class WgDocumentAuthorFactory(DocumentAuthorFactory):
|
|||
class BofreqEditorDocEventFactory(DocEventFactory):
|
||||
class Meta:
|
||||
model = BofreqEditorDocEvent
|
||||
skip_postgeneration_save = True
|
||||
|
||||
type = "changed_editors"
|
||||
doc = factory.SubFactory('ietf.doc.factories.BofreqFactory')
|
||||
|
@ -465,10 +469,12 @@ class BofreqEditorDocEventFactory(DocEventFactory):
|
|||
else:
|
||||
obj.editors.set(PersonFactory.create_batch(3))
|
||||
obj.desc = f'Changed editors to {", ".join(obj.editors.values_list("name",flat=True)) or "(None)"}'
|
||||
obj.save()
|
||||
|
||||
class BofreqResponsibleDocEventFactory(DocEventFactory):
|
||||
class Meta:
|
||||
model = BofreqResponsibleDocEvent
|
||||
skip_postgeneration_save = True
|
||||
|
||||
type = "changed_responsible"
|
||||
doc = factory.SubFactory('ietf.doc.factories.BofreqFactory')
|
||||
|
@ -483,7 +489,8 @@ class BofreqResponsibleDocEventFactory(DocEventFactory):
|
|||
else:
|
||||
ad = RoleFactory(group__type_id='area',name_id='ad').person
|
||||
obj.responsible.set([ad])
|
||||
obj.desc = f'Changed responsible leadership to {", ".join(obj.responsible.values_list("name",flat=True)) or "(None)"}'
|
||||
obj.desc = f'Changed responsible leadership to {", ".join(obj.responsible.values_list("name",flat=True)) or "(None)"}'
|
||||
obj.save()
|
||||
|
||||
class BofreqFactory(BaseDocumentFactory):
|
||||
type_id = 'bofreq'
|
||||
|
|
|
@ -1,320 +0,0 @@
|
|||
# Copyright The IETF Trust 2023, All Rights Reserved
|
||||
|
||||
import debug # pyflakes:ignore
|
||||
|
||||
import csv
|
||||
import datetime
|
||||
import io
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
from collections import defaultdict
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from ietf.doc.models import Document, DocAlias, DocEvent, State
|
||||
from ietf.utils.text import xslugify
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Performs a one-time import of IAB statements"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if Document.objects.filter(type="statement", group__acronym="iab").exists():
|
||||
print("IAB statement documents already exist - exiting")
|
||||
exit(-1)
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
process = subprocess.Popen(
|
||||
["git", "clone", "https://github.com/kesara/iab-scraper.git", tmpdir],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
stdout, stderr = process.communicate()
|
||||
if not Path(tmpdir).joinpath("iab_minutes", "2022-12-14.md").exists():
|
||||
print("Git clone of the iab-scraper directory did not go as expected")
|
||||
print("stdout:", stdout)
|
||||
print("stderr:", stderr)
|
||||
print(f"Clean up {tmpdir} manually")
|
||||
exit(-1)
|
||||
|
||||
spreadsheet_rows = load_spreadsheet()
|
||||
with open("iab_statement_redirects.csv", "w") as redirect_file:
|
||||
redirect_writer = csv.writer(redirect_file)
|
||||
for index, (file_fix, date_string, title, url, _) in enumerate(
|
||||
spreadsheet_rows
|
||||
):
|
||||
name = url.split("/")[6].lower()
|
||||
if name.startswith("iabs"):
|
||||
name = name[5:]
|
||||
elif name.startswith("iab"):
|
||||
name = name[4:]
|
||||
if index == 1:
|
||||
name += "-archive" # https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-statement-on-identifiers-and-unicode-7-0-0/archive/
|
||||
if index == 100:
|
||||
name = (
|
||||
"2010-" + name
|
||||
) # https://www.iab.org/documents/correspondence-reports-documents/docs2010/iab-statement-on-the-rpki/
|
||||
if index == 152:
|
||||
name = (
|
||||
"2018-" + name
|
||||
) # https://www.iab.org/documents/correspondence-reports-documents/2018-2/iab-statement-on-the-rpki/
|
||||
docname = f"statement-iab-{xslugify(name)}"
|
||||
ext = None
|
||||
base_sourcename = (
|
||||
f"{date_string}-{file_fix}" if file_fix != "" else date_string
|
||||
)
|
||||
if (
|
||||
Path(tmpdir)
|
||||
.joinpath("iab_statements", f"{base_sourcename}.md")
|
||||
.exists()
|
||||
):
|
||||
ext = "md"
|
||||
elif (
|
||||
Path(tmpdir)
|
||||
.joinpath("iab_statements", f"{base_sourcename}.pdf")
|
||||
.exists()
|
||||
):
|
||||
ext = "pdf"
|
||||
if ext is None:
|
||||
debug.show(
|
||||
'f"Could not find {Path(tmpdir).joinpath("iab_statements", f"{base_path}.md")}"'
|
||||
)
|
||||
continue
|
||||
filename = f"{docname}-00.{ext}"
|
||||
# Create Document
|
||||
doc = Document.objects.create(
|
||||
name=docname,
|
||||
type_id="statement",
|
||||
title=title,
|
||||
group_id=7, # The IAB group
|
||||
rev="00",
|
||||
uploaded_filename=filename,
|
||||
)
|
||||
doc.set_state(State.objects.get(type_id="statement", slug="active"))
|
||||
DocAlias.objects.create(name=doc.name).docs.add(doc)
|
||||
year, month, day = [int(part) for part in date_string.split("-")]
|
||||
e1 = DocEvent.objects.create(
|
||||
time=datetime.datetime(
|
||||
year, month, day, 12, 00, tzinfo=datetime.timezone.utc
|
||||
),
|
||||
type="published_statement",
|
||||
doc=doc,
|
||||
rev="00",
|
||||
by_id=1,
|
||||
desc="Statement published (note: The 1200Z time of day is inaccurate - the actual time of day is not known)",
|
||||
)
|
||||
e2 = DocEvent.objects.create(
|
||||
type="added_comment",
|
||||
doc=doc,
|
||||
rev="00",
|
||||
by_id=1, # The "(System)" person
|
||||
desc="Statement moved into datatracker from iab wordpress website",
|
||||
)
|
||||
doc.save_with_history([e1, e2])
|
||||
|
||||
# Put file in place
|
||||
source = Path(tmpdir).joinpath(
|
||||
"iab_statements", f"{base_sourcename}.{ext}"
|
||||
)
|
||||
dest = Path(settings.DOCUMENT_PATH_PATTERN.format(doc=doc)).joinpath(
|
||||
filename
|
||||
)
|
||||
if dest.exists():
|
||||
print(f"WARNING: {dest} already exists - not overwriting it.")
|
||||
else:
|
||||
os.makedirs(dest.parent, exist_ok=True)
|
||||
shutil.copy(source, dest)
|
||||
|
||||
redirect_writer.writerow(
|
||||
[
|
||||
url,
|
||||
f"https://datatracker.ietf.org/doc/{docname}",
|
||||
]
|
||||
)
|
||||
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def load_spreadsheet():
|
||||
csv_dump = '''2002-03-01,IAB RFC Publication Process Description(txt) March 2003,https://www.iab.org/documents/correspondence-reports-documents/docs2003/iab-rfc-publication-process/,deprecated
|
||||
2015-01-27,IAB Statement on Identifiers and Unicode 7.0.0 (archive),https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-statement-on-identifiers-and-unicode-7-0-0/archive/,deprecated
|
||||
2010-02-05,Response to the EC’s RFI on Forums and Consortiums,https://www.iab.org/documents/correspondence-reports-documents/docs2010/response-to-the-ecs-rfi-on-forums-and-consortiums/,https://www.iab.org/wp-content/IAB-uploads/2011/03/2010-02-05-IAB-Response-Euro-ICT-Questionnaire.pdf
|
||||
2011-03-30,IAB responds to NTIA Request for Comments on the IANA Functions,https://www.iab.org/documents/correspondence-reports-documents/2011-2/iab-responds-to-ntia-request-for-comments-on-the-iana-functions/,https://www.iab.org/wp-content/IAB-uploads/2011/04/2011-03-30-iab-iana-noi-response.pdf
|
||||
2011-07-28,IAB's response to the NTIA FNOI on IANA,https://www.iab.org/documents/correspondence-reports-documents/2011-2/iabs-response-to-the-ntia-fnoi-on-iana/,https://www.iab.org/wp-content/IAB-uploads/2011/07/IANA-IAB-FNOI-2011.pdf
|
||||
2011-12-16,"Questionnaire in support of the ICANN bid for the IANA function [Dec 16, 2011]",https://www.iab.org/documents/correspondence-reports-documents/2011-2/questionnaire-in-support-of-the-icann-bid-for-the-iana-function/,https://www.iab.org/wp-content/IAB-uploads/2011/12/IAB-Past-Performance-Questionnaire.pdf
|
||||
2012-04-03,IETF Oversight of the IANA Protocol Parameter Function,https://www.iab.org/documents/correspondence-reports-documents/2012-2/ietf-oversight-of-the-iana-protocol-parameter-function/,https://www.iab.org/wp-content/IAB-uploads/2012/04/IETF-IANA-Oversight.pdf
|
||||
2012-04-29,IETF and IAB comment on OMB Circular A-119,https://www.iab.org/documents/correspondence-reports-documents/2012-2/ietf-and-iab-comment-on-omb-circular-a-119/,https://www.iab.org/wp-content/IAB-uploads/2012/04/OMB-119.pdf
|
||||
2012-05-24,IAB submits updated ICANN performance evaluation,https://www.iab.org/documents/correspondence-reports-documents/2012-2/iab-submits-updated-icann-performance-evaluation/,https://www.iab.org/wp-content/IAB-uploads/2012/05/IAB-Past-Performance-Questionnaire-FINAL.pdf
|
||||
2013-07-02,Open letter to the European Commission and the European Parliament in the matter of the Transatlantic Trade and Investment Partnership (TTIP),https://www.iab.org/documents/correspondence-reports-documents/2013-2/open-letter-to-the-ec/,https://www.iab.org/wp-content/IAB-uploads/2013/07/TTIP_market_driven_standards_EU_letter.pdf
|
||||
2013-05-10,Comments In the matter of Transatlantic Trade and Investment Partnership (TTIP) (USTR-2013-0019),https://www.iab.org/documents/correspondence-reports-documents/2013-2/comments-in-the-matter-of-transatlantic-trade-and-investment-partnership-ttip-ustr-2013-0019/,https://www.iab.org/wp-content/IAB-uploads/2013/07/TTIP_market_driven_standards_FINAL.pdf
|
||||
2013-10-23,IAB Comments on Recommendation for Random Number Generation Using Deterministic Random Bit Generators,https://www.iab.org/documents/correspondence-reports-documents/2013-2/nist-sp-800-90a/,https://www.iab.org/wp-content/IAB-uploads/2013/10/IAB-NIST-FINAL.pdf
|
||||
2014-04-07,IAB Comments on NISTIR 7977,https://www.iab.org/documents/correspondence-reports-documents/2014-2/iab-comments-on-nistir-7977/,https://www.iab.org/wp-content/IAB-uploads/2014/04/IAB-NIST7977-20140407.pdf
|
||||
2014-04-29,Comments to ICANN on the Transition of NTIA’s Stewardship of the IANA Functions,https://www.iab.org/documents/correspondence-reports-documents/2014-2/iab-response-to-icann-iana-transition-proposal/,https://www.iab.org/wp-content/IAB-uploads/2014/04/iab-response-to-20140408-20140428a.pdf
|
||||
2016-05-27,"IAB Comments to US NTIA Request for Comments, ""The Benefits, Challenges, and Potential Roles for the Government in Fostering the Advancement of the Internet of Things""",https://www.iab.org/documents/correspondence-reports-documents/2016-2/iab-comments-to-ntia-request-for-comments-the-benefits-challenges-and-potential-roles-for-the-government/,https://www.iab.org/wp-content/IAB-uploads/2016/05/ntia-iot-20160525.pdf
|
||||
2016-05-24,"IAB Chair Testifies before the United States Senate Committee on Commerce, Science, and Transportation on ""Examining the Multistakeholder Plan for Transitioning the Internet Assigned Number Authority""",https://www.iab.org/documents/correspondence-reports-documents/2016-2/iab-chair-statement-before-us-senate-committee-on-iana-transition/,https://www.iab.org/wp-content/IAB-uploads/2016/05/sullivan-to-senate-commerce-20160524.pdf
|
||||
2018-07-16,IAB Response to NTIA Notice of Inquiry on International Internet Policy Priorities,https://www.iab.org/documents/correspondence-reports-documents/2018-2/iab-response-to-ntia-notice-of-inquiry-on-international-internet-policy-priorities-response/,https://www.iab.org/wp-content/IAB-uploads/2018/07/IAB-response-to-the-2018-NTIA-Notice-of-Inquiry.pdf
|
||||
2018-09-09,Internet Architecture Board Comments on the Australian Assistance and Access Bill 2018,https://www.iab.org/documents/correspondence-reports-documents/2018-2/internet-architecture-board-comments-on-the-australian-assistance-and-access-bill-2018/,https://www.iab.org/wp-content/IAB-uploads/2018/09/IAB-Comments-on-Australian-Assistance-and-Access-Bill-2018.pdf
|
||||
2023-03-03,IAB Response to the Office of the High Commissioner for Human Rights Call for Input on “The relationship between human rights and technical standard-setting processes for new and emerging digital technologies”,https://www.iab.org/documents/correspondence-reports-documents/2023-2/iab-response-to-the-ohchr-call-for-input-on-the-relationship-between-human-rights-and-technical-standard/,https://www.iab.org/wp-content/IAB-uploads/2023/03/IAB-Response-to-OHCHR-consultation.pdf
|
||||
1998-12-09,"IAB Request to IANA for Delegating IPv6 Address Space, Mail Message, December 1998",https://www.iab.org/documents/correspondence-reports-documents/docs98/iab-request-to-iana-for-delegating-ipv6-address-space-mail-message-december-1998/,
|
||||
1998-12-18,"1998 Statements on Cryptography, Mail Message, December 1998.",https://www.iab.org/documents/correspondence-reports-documents/docs98/1998-statements-on-cryptography/,
|
||||
1999-02-22,Correspondence between Bradner and Dyson on Protocol Parameter Parameters,https://www.iab.org/documents/correspondence-reports-documents/docs99/correspondence-between-bradner-and-dyson-on-protocol-parameter-parameters/,
|
||||
1999-08-13,Comment on ICANN ASO membership,https://www.iab.org/documents/correspondence-reports-documents/docs99/comment-on-icann-aso-membership/,
|
||||
1999-10-19,Ad Hoc Group on Numbering,https://www.iab.org/documents/correspondence-reports-documents/docs99/ad-hoc-group-on-numbering/,
|
||||
2000-05-01,"IAB Statement on Infrastructure Domain and Subdomains, May 2000.",https://www.iab.org/documents/correspondence-reports-documents/docs2000/iab-statement-on-infrastructure-domain-and-subdomains-may-2000/,
|
||||
2002-05-01,"IETF and ITU-T Cooperation Arrangements, May 2002",https://www.iab.org/documents/correspondence-reports-documents/docs2002/ietf-and-itu-t-cooperation-arrangements-may-2002/,
|
||||
2002-05-03,"IAB replyto ENUM liaison statement, May 2002",https://www.iab.org/documents/correspondence-reports-documents/docs2002/enum-response/,
|
||||
2002-05-24,"Interim Approval for Internet Telephone Numbering System (ENUM) Provisioning, 24 May 2002",https://www.iab.org/documents/correspondence-reports-documents/docs2002/enum-pr/,
|
||||
2002-06-01,"IAB response to ICANN Evolution and Reform, June 2002",https://www.iab.org/documents/correspondence-reports-documents/docs2002/icann-response/,
|
||||
2002-09-01,"IAB response to ICANN Evolution and Reform Committee's Second Interim Report, September 2002",https://www.iab.org/documents/correspondence-reports-documents/docs2002/icann-response-2/,
|
||||
2002-10-01,"IAB response to ICANN Evolution and Reform Committee's Final Implementation Report, October 2002",https://www.iab.org/documents/correspondence-reports-documents/docs2002/icann-response-3/,
|
||||
2002-12-10,"IAB Response to RIRs request regarding 6bone address entries in ip6.arpa, December 2002",https://www.iab.org/documents/correspondence-reports-documents/docs2002/3ffe/,
|
||||
2003-01-03,"IETF Notice of Withdrawal from the Protocol Support Organization, January 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/icann-pso-notice/,
|
||||
2003-01-25,"IAB Response to Verisign GRS IDN Announcement, January 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/icann-vgrs-response/,
|
||||
2003-07-10,"Note: Unified Notification Protocol Considerations, July 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-07-10-iab-notification/,
|
||||
2003-08-01,Open Architectural Issues in the Development of the Internet,https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-08-architectural-issues/,
|
||||
2003-08-28,RFC Document editing/ queueing suggestion,https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-08-28-klensin-rfc-editor/,
|
||||
2003-09-02,"IAB Chair's announcement of an Advisory Committee, September 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-09-02-adv-committee/,
|
||||
2003-09-19,"IAB Commentary: Architectural Concerns on the Use of DNS Wildcards, September 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-09-20-dns-wildcards/,
|
||||
2003-09-24,"IAB to ICANN: IAB input related to the .cs code in ISO 3166, 24 September 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-09-25-icann-cs-code/,
|
||||
2003-09-24,"IAB to ISO: IAB comment on stability of ISO 3166 and other infrastructure standards, 24 September 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-09-25-iso-cs-code/,
|
||||
2003-09-25,"Correspondance to ISO concerning .cs code, and advice to ICANN, 25 September 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-09-25-icann-cs-code-2/,
|
||||
2003-09-25,"Correspondance to ISO concerning .cs code, and advice to ICANN, 25 September 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-09-25-iso-cs-code-2/,
|
||||
2003-09-26,ISO Codes,https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-09-23-isocodes/,
|
||||
2003-10-02,"IESG to IAB: Checking data for validity before usage in a protocol, 2 October 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-10-02-iesg-dns-validity-check-query/,
|
||||
2003-10-14,"Survey of Current Security Work in the IETF, October 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-10-14-security-survey/,
|
||||
2003-10-17,"IAB to ICANN SESAC:Wildcard entries in DNS domains, 17 October 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-10-17-crocker-wildcards-2/,
|
||||
2003-10-17,"IAB note to Steve Crocker, Chair, ICANN Security and Stability Advisory Committee, October 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-10-17-crocker-wildcards/,
|
||||
2003-10-18,"IAB concerns against permanent deployment of edge-based port filtering, October 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-10-18-edge-filters/,
|
||||
2003-11-08,"IAB Response to IESG architectural query: Checking data for validity before usage in protocol, November 2003",https://www.iab.org/documents/correspondence-reports-documents/docs2003/2003-11-08-iesg-dns-validity-check-query-response/,
|
||||
2004-01-19,"Number Resource Organisation (NRO) formation, 19 January 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-01-19-nro/,
|
||||
2004-01-22,"IAB to RIPE NCC:ENUM Administration, 22 January 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-01-22-enum-subcodes/,
|
||||
2004-02-09,"IAB to IANA: Instructions to IANA -Delegation of 2.0.0.2.ip6.arpa, 9 February, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-02-09-6to4-rev-delegation/,
|
||||
2004-02-26,The IETF -- what is it?,https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-02-26-ietf-defn/,
|
||||
2004-04-15,"IAB to ICANN: Validity checks for names, 15 April, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-04-15-icann-dns-validity-check/,
|
||||
2004-05-07,"IAB to IANA: IPv6 Allocation Policy , 7 May, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-05-07-iana-v6alloc/,
|
||||
2004-05-24,"IAB to IANA: Instructions to IANA -Delegation of 3.f.f.e.ip6.arpa, 24 May, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-05-24-3ffe-rev-delegation/,
|
||||
2004-05-27,"IAB to ICANN:Concerns regarding IANA Report, 27 May, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-05-27-iana-report/,
|
||||
2004-07-16,"Upcoming clarifications to RIPE NCC instructions for e164.arpa operation, 16 July 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-07-15-enum-instructions/,
|
||||
2004-07-16,"IANA Delegation Requests, 16 July 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-07-16-iana-delegation/,
|
||||
2004-08-06,OMA-IETF Standardization Collaboration,https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-08-draft-iab-oma-liaison-00/,
|
||||
2004-08-12,"IAB to RIPE NCC:Notice of revision of instructions concerning the ENUM Registry, 12 August, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-08-12-enum-instructions/,
|
||||
2004-08-12,"Response to your letter of August 4, 12 August 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-08-12-icann-wildcard/,
|
||||
2004-09-27,"IAB to ICANN:Report of Concerns over IANA Performance , 27 September, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-09-27-iana-concerns/,
|
||||
2004-09-27,"IAB Report of IETF IANA Functions , 27 September 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-09-27-iana-report/,
|
||||
2004-11-03,"IAB to IESG:Comments on Teredo , 3 November, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-11-03-teredo-comments/,
|
||||
2004-11-12,"IAB to ICANN:Response to ICANN Request for assistance with new TLD Policy , 12 November, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-11-12-icann-new-tld-policy/,
|
||||
2004-11-29,"The IETF and IPv6 Address Allocation , 29 November 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-11-29-ipv6-allocs/,
|
||||
2004-12-15,"IAB Comment to Internet AD:Comments on IANA Considerations in IPv6 ULA draft, 15 December, 2004",https://www.iab.org/documents/correspondence-reports-documents/docs2004/2004-12-15-ipv6-ula-iana-considerations/,
|
||||
2005-02-16,"IAB review of Structure of the IETF Administrative Support Activity, 16 February 2005",https://www.iab.org/documents/correspondence-reports-documents/docs2005/2005-02-16-iasa/,
|
||||
2005-08-26,"SiteFinder returns, 26 August 2005",https://www.iab.org/documents/correspondence-reports-documents/docs2005/2005-08-26-ssac-note/,
|
||||
2005-09-01,"Re: SiteFinder returns, 1 September 2005",https://www.iab.org/documents/correspondence-reports-documents/docs2005/2005-09-01-ssac-response/,
|
||||
2005-10-14,"IAB to ICANN: IAB comments on ICANN IDN Guidelines, 14 October 2005",https://www.iab.org/documents/correspondence-reports-documents/docs2005/2005-10-14-idn-guidelines/,
|
||||
2005-11-07,"IAB to ICANN – Nameserver change for e164.arpa, 7 November 2005",https://www.iab.org/documents/correspondence-reports-documents/docs2005/2005-11-07-nameserver-change/,
|
||||
2005-11-22,"IETF to ICANN – IANA structural status, 22 November 2005",https://www.iab.org/documents/correspondence-reports-documents/docs2005/2005-11-22-iana-structure/,
|
||||
2005-11-29,"IAB to IANA – Teredo prefix assignment, 29 November 2005",https://www.iab.org/documents/correspondence-reports-documents/docs2005/2005-11-29-teredo-prefix/,
|
||||
2005-12-22,"IAB to ICANN – dot arpa TLD management, 22 December 2005",https://www.iab.org/documents/correspondence-reports-documents/docs2005/2005-12-22-dot-arpa/,
|
||||
2006-03-06,"IAB Position on the IETF IANA Technical Parameter Function, 6 March 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/iab-iana-position/,
|
||||
2006-03-28,"IAB to ICANN – Name server changes for ip6.arpa, 28 March 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/2006-03-28-nameserver-change/,
|
||||
2006-04-20,"IAB to IANA – Administrative contact information change for arpa, 20 April 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/2006-04-20-update-to-administrative-contact-information-for-arpa-iana/,
|
||||
2006-04-20,"IAB to ITU TSB – FYI re contact info changes for e164.arpa, 20 April 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/2006-04-20-update-to-contact-information-for-e164-arpa-hill/,
|
||||
2006-04-20,"IAB to IANA – Contact information changes for e164.arpa, 20 April 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/2006-04-20-update-to-contact-information-for-e164-arpa-iana/,
|
||||
2006-05-15,"IAB to IANA – Request to IANA for status update on deployment of DNSSEC on IANA managed zones, 15 May 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/2006-05-15-iab-request-to-iana-to-sign-dnssec-zones/,
|
||||
2006-06-07,"The IAB announces the mailing list for the discussion of the independent submissions process, 7 June 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/2006-06-07-independent-submissions/,
|
||||
2006-06-19,"Procedural issues with liaison on nextsteps, 19 June 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/2006-06-16-response-to-idn-liaison-issues/,
|
||||
2006-10-12,"The IAB sends a note to the Registry Services Technical Evaluation Panel on the use of wildcards in the .travel TLD, 12 October 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/2006-10-12-rstep-note/,
|
||||
2006-10-19,"The IAB sends a note to the OIF Technical Committee Chair on IETF Protocol Extensions, 19 October 2006",https://www.iab.org/documents/correspondence-reports-documents/docs2006/2006-10-19-oifnote/,
|
||||
2007-05-21,"The IAB responds to ITU Consultation on Resolution 102, 21 May 2007",https://www.iab.org/documents/correspondence-reports-documents/docs2007/2007-05-21-itu-resolution-102/,
|
||||
2007-07-05,"Correspondence from the RIPE NCC regarding deployment of DNSSEC in the E164.ARPA zone, 5 July 2007",https://www.iab.org/documents/correspondence-reports-documents/docs2007/2007-07-05-ripe-ncc-dnssec-e164/,
|
||||
2007-07-24,"Correspondence from the IAB to the ITU-TSB Director regarding deployment of DNSSEC in the E164.ARPA zone, 24 July 2007",https://www.iab.org/documents/correspondence-reports-documents/docs2007/2007-07-24-iab-itu-dnssec-e164/,
|
||||
2007-10-10,"Follow-up work on NAT-PT, 10 October 2007",https://www.iab.org/documents/correspondence-reports-documents/docs2007/follow-up-work-on-nat-pt/,
|
||||
2008-02-15,"Correspondence from the IAB to the National Telecommunications and Information Administration, US Department of Commerce regarding the ICANN/DoC Joint Project Agreement, 15 February 2008",https://www.iab.org/documents/correspondence-reports-documents/docs2008/2008-02-15-midterm-view-icann-doc-jpa/,
|
||||
2008-03-07,"The IAB’s response to ICANN’s solicitation on DNS stability, 7 March 2008",https://www.iab.org/documents/correspondence-reports-documents/docs2008/2008-03-07-icann-new-gtlds/,
|
||||
2008-06-04,Proposed RFC Editor Structure,https://www.iab.org/documents/correspondence-reports-documents/docs2008/2008-06-04-rfc-editor-model/,
|
||||
2008-08-16,"The IAB’s response to Geoff Huston’s request concerning 32-bit AS numbers, 16 August 2008",https://www.iab.org/documents/correspondence-reports-documents/docs2008/2008-08-16-32bit-as-huston/,
|
||||
2008-09-05,Proposed RFC Editor Structure,https://www.iab.org/documents/correspondence-reports-documents/docs2008/2008-09-05-rfc-editor-model/,
|
||||
2008-11-18,"The IAB’s correspondence with NTIA on DNSSEC deployment at the root, 18 November 2008",https://www.iab.org/documents/correspondence-reports-documents/docs2008/2008-11-18-dnssec-deployment-at-the-root/,
|
||||
2008-12-04,"IAB correspondence with Geoff Huston on TAs, IANA, RIRs et al.., 4 December 2008",https://www.iab.org/documents/correspondence-reports-documents/docs2008/2008-12-04-huston-tas-iana-rirs/,
|
||||
2009-06-02,"IAB correspondence with IANA on the Signing of .ARPA, 2 June 2009",https://www.iab.org/documents/correspondence-reports-documents/docs2009/2009-06-02-roseman-signing-by-iana-of-arpa/,
|
||||
2009-10-14,"IAB correspondence with ICANN on their “Scaling the Root” study., 14 October 2009",https://www.iab.org/documents/correspondence-reports-documents/docs2009/2009-10-14-icann-scaling-the-root/,
|
||||
2010-01-27,IAB statement on the RPKI,https://www.iab.org/documents/correspondence-reports-documents/docs2010/iab-statement-on-the-rpki/,
|
||||
2010-07-30,Transition of IN-ADDR.ARPA generation,https://www.iab.org/documents/correspondence-reports-documents/docs2010/transition-of-in-addr-arpa-generation/,
|
||||
2011-06-22,Response to ARIN's request for guidance regarding Draft Policy ARIN-2011-5,https://www.iab.org/documents/correspondence-reports-documents/2011-2/response-to-arins-request-for-guidance-regarding-draft-policy-arin-2011-5/,
|
||||
2011-07-25,"IAB Response to ""Some IESG Thoughts on Liaisons""",https://www.iab.org/documents/correspondence-reports-documents/2011-2/iab-response-to-some-iesg-thoughts-on-liaisons/,
|
||||
2011-09-16,Letter to the European Commission on Global Interoperability in Emergency Services,https://www.iab.org/documents/correspondence-reports-documents/2011-2/letter-to-the-european-commission-on-global-interoperability-in-emergency-services/,
|
||||
2012-02-08,"IAB Statement: ""The interpretation of rules in the ICANN gTLD Applicant Guidebook""",https://www.iab.org/documents/correspondence-reports-documents/2012-2/iab-statement-the-interpretation-of-rules-in-the-icann-gtld-applicant-guidebook/,
|
||||
2012-03-26,"Response to ICANN questions concerning ""The interpretation of rules in the ICANN gTLD Applicant Guidebook""",https://www.iab.org/documents/correspondence-reports-documents/2012-2/response-to-icann-questions-concerning-the-interpretation-of-rules-in-the-icann-gtld-applicant-guidebook/,
|
||||
2012-03-30,IAB Member Roles in Evaluating New Work Proposals,https://www.iab.org/documents/correspondence-reports-documents/2012-2/iab-member-roles-in-evaluating-new-work-proposals/,
|
||||
2012-08-29,Leading Global Standards Organizations Endorse ‘OpenStand’ Principles that Drive Innovation and Borderless Commerce,https://www.iab.org/documents/correspondence-reports-documents/2012-2/leading-global-standards-organizations-endorse-%e2%80%98openstand/,
|
||||
2013-03-28,IAB Response to RSSAC restructure document (28 March 2013),https://www.iab.org/documents/correspondence-reports-documents/2013-2/iab-response-to-rssac-restructure-document-28-march-2013/,
|
||||
2013-05-28,Consultation on Root Zone KSK Rollover from the IAB,https://www.iab.org/documents/correspondence-reports-documents/2013-2/consultation-on-root-zone-ksk-rollover-from-the-iab/,
|
||||
2013-07-10,IAB Statement: Dotless Domains Considered Harmful,https://www.iab.org/documents/correspondence-reports-documents/2013-2/iab-statement-dotless-domains-considered-harmful/,
|
||||
2013-07-16,IAB Response to ICANN Consultation on the Source of Policies & User Instructions for Internet Number Resource Requests,https://www.iab.org/documents/correspondence-reports-documents/2013-2/iab-response-to-iana-policies-user-instructions-25jun13/,
|
||||
2013-10-03,Statement from the IAB on the Strengths of the OpenStand Principles,https://www.iab.org/documents/correspondence-reports-documents/2013-2/statement-from-openstand-on-the-strengths-of-the-openstand-principles/,
|
||||
2013-10-07,Montevideo Statement on the Future of Internet Cooperation,https://www.iab.org/documents/correspondence-reports-documents/2013-2/montevideo-statement-on-the-future-of-internet-cooperation/,
|
||||
2013-11-27,IAB Statement on draft-farrell-perpass-attack-00,https://www.iab.org/documents/correspondence-reports-documents/2013-2/iab-statement-on-draft-farrell-perpass-attack-00/,
|
||||
2014-01-23,IAB Comments Regarding the IRTF CFRG chair,https://www.iab.org/documents/correspondence-reports-documents/2014-2/0123-iab-comments-regarding-the-irtf-cfrg-chair/,
|
||||
2014-02-14,"Statement from the I* Leaders Coordination Meeting, Santa Monica, 14 February 2014",https://www.iab.org/documents/correspondence-reports-documents/2014-2/statement-from-the-i-leaders-coordination-meeting-santa-monica-14-february-2014/,
|
||||
2014-03-11,Re: Guiding the Evolution of the IANA Protocol Parameter Registries,https://www.iab.org/documents/correspondence-reports-documents/2014-2/re-guiding-the-evolution-of-the-iana-protocol-parameter-registries/,
|
||||
2014-03-14,Internet Technical Leaders Welcome IANA Globalization Progress,https://www.iab.org/documents/correspondence-reports-documents/2014-2/internet-technical-leaders-welcome-iana-globalization-progress/,
|
||||
2014-05-13,I* Post-NETmundial Meeting Statement,https://www.iab.org/documents/correspondence-reports-documents/2014-2/i-post-netmundial-meeting-statement/,
|
||||
2014-06-05,Comments on ICANN Board Member Compensation from the IAB,https://www.iab.org/documents/correspondence-reports-documents/2014-2/comments-on-icann-board-member-compensation/,
|
||||
2014-11-13,IAB Statement on Internet Confidentiality,https://www.iab.org/documents/correspondence-reports-documents/2014-2/iab-statement-on-internet-confidentiality/,
|
||||
2014-12-04,IAB statement on the NETmundial Initiative,https://www.iab.org/documents/correspondence-reports-documents/2014-2/iab-statement-on-the-netmundial-initiative/,
|
||||
2014-12-17,IAB Comments on CSTD Report Mapping International Internet Public Policy Issues,https://www.iab.org/documents/correspondence-reports-documents/2014-2/iab-comments-on-cstd-report-mapping-international-public-policy-issues/,
|
||||
2015-02-11,IAB liaison to ICANN Root Server System Advisory Committee (RSSAC),https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-liaison-to-icann-root-server-system-advisory-council-rssac/,
|
||||
2015-02-11,IAB Statement on Identifiers and Unicode 7.0.0,https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-statement-on-identifiers-and-unicode-7-0-0/,
|
||||
2015-03-02,IAB Statement on Liaison Compensation,https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-statement-on-liaison-compensation/,
|
||||
2015-04-09,IAB Comments on The HTTPS-Only Standard,https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-comments-on-the-https-only-standard/,
|
||||
2015-06-03,IAB comments on CCWG-Accountability Draft Report,https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-comments-on-ccwg-accountability-draft-report/,
|
||||
2015-06-12,IAB Statement on the Trade in Security Technologies,https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-statement-on-the-trade-in-security-technologies/,
|
||||
2015-06-24,"IAB Correspondence to U.S. Bureau of Industry and Security, re RIN 0694-AG49",https://www.iab.org/documents/correspondence-reports-documents/2015-2/rin-0694-ag49/,
|
||||
2015-09-07,Internet Architecture Board comments on the ICG Proposal,https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-comments-on-icg-proposal/,
|
||||
2015-09-09,IAB comments on the CCWG accountability 2d draft report,https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-comments-on-ccwg-accountability/,
|
||||
2015-10-07,IAB Comments to FCC on Rules regarding Authorization of Radiofrequency Equipment,https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-comments-on-fcc-15-92/,
|
||||
2015-12-16,IAB comments on the CCWG accountability 3d draft report,https://www.iab.org/documents/correspondence-reports-documents/2015-2/iab-comments-on-the-ccwg-accountability-3d-draft-report/,
|
||||
2016-01-13,"Comments from the Internet Architecture Board (IAB) on ""Registration Data Access Protocol (RDAP) Operational Profile for gTLD Registries and Registrars""",https://www.iab.org/documents/correspondence-reports-documents/2016-2/comments-from-the-internet-architecture-board-iab-on-registration-data-access-protocol-rdap-operational-profile-for-gtld-registries-and-registrars/,
|
||||
2016-05-04,IAB comments on Draft New ICANN Bylaws,https://www.iab.org/documents/correspondence-reports-documents/2016-2/iab-comments-on-draft-new-icann-bylaws/,
|
||||
2016-05-11,IAB Comments on Proposed Changes to Internet Society Bylaws,https://www.iab.org/documents/correspondence-reports-documents/2016-2/iab-comments-on-proposed-changes-to-internet-society-bylaws/,
|
||||
2016-07-17,Comments from the IAB on LGRs for second level,https://www.iab.org/documents/correspondence-reports-documents/2016-2/comments-from-the-iab-on-lgrs-for-second-level/,
|
||||
2016-09-01,IAB statement on IANA Intellectual Property Rights,https://www.iab.org/documents/correspondence-reports-documents/2016-2/iab-statement-on-iana-intellectual-property-rights/,
|
||||
2016-09-14,IAB Statement on the IANA Stewardship Transition,https://www.iab.org/documents/correspondence-reports-documents/2016-2/iab-statement-on-the-iana-stewardship-transition/,
|
||||
2016-11-07,IAB Statement on IPv6,https://www.iab.org/documents/correspondence-reports-documents/2016-2/iab-statement-on-ipv6/,
|
||||
2016-12-07,"IAB comment on ""Revised Proposed Implementation of GNSO Thick Whois Consensus Policy Requiring Consistent Labeling and Display of RDDS (Whois) Output for All gTLDs""",https://www.iab.org/documents/correspondence-reports-documents/2016-2/iab-comment-on-revised-proposed-implementation-of-gnso-thick-whois-consensus-policy-requiring-consistent-labeling-and-display-of-rdds-whois-output-for-all-gtlds/,
|
||||
2017-01-04,IAB comments on Identifier Technology Health Indicators: Definition,https://www.iab.org/documents/correspondence-reports-documents/2017-2/iab-comments-on-identifier-technology-health-indicators-definition/,
|
||||
2017-02-01,IAB Statement on OCSP Stapling,https://www.iab.org/documents/correspondence-reports-documents/2017-2/iab-statement-on-ocsp-stapling/,
|
||||
2017-02-16,Follow up on barriers to entry blog post,https://www.iab.org/documents/correspondence-reports-documents/2017-2/follow-up-on-barriers-to-entry-blog-post/,
|
||||
2017-03-02,IAB Comments to United States NTIA on the Green Paper: Fostering the Advancement of the Internet of Things,https://www.iab.org/documents/correspondence-reports-documents/2017-2/iab-comments-to-ntia-on-fostering-the-advancement-of-iot/,
|
||||
2017-03-30,Internet Architecture Board statement on the registration of special use names in the ARPA domain,https://www.iab.org/documents/correspondence-reports-documents/2017-2/iab-statement-on-the-registration-of-special-use-names-in-the-arpa-domain/,
|
||||
2017-05-01,Comments from the IAB on IDN Implementation Guidelines,https://www.iab.org/documents/correspondence-reports-documents/2017-2/comments-from-the-iab-on-idn-implementation-guidelines/,
|
||||
2017-07-31,IAB Response to FCC-17-89,https://www.iab.org/documents/correspondence-reports-documents/2017-2/iab-response-to-fcc-17-89/,
|
||||
2018-03-15,IAB Statement on Identifiers and Unicode,https://www.iab.org/documents/correspondence-reports-documents/2018-2/iab-statement-on-identifiers-and-unicode/,
|
||||
2018-04-03,IAB Statement on the RPKI,https://www.iab.org/documents/correspondence-reports-documents/2018-2/iab-statement-on-the-rpki/,
|
||||
2019-05-02,Revised Operating Instructions for e164.arpa (ENUM),https://www.iab.org/documents/correspondence-reports-documents/2019-2/revised-operating-instructions-for-e164-arpa-enum/,
|
||||
2019-06-26,Comments on Evolving the Governance of the Root Server System,https://www.iab.org/documents/correspondence-reports-documents/2019-2/comments-on-evolving-the-governance-of-the-root-server-system/,
|
||||
2019-09-04,Avoiding Unintended Harm to Internet Infrastructure,https://www.iab.org/documents/correspondence-reports-documents/2019-2/avoiding-unintended-harm-to-internet-infrastructure/,
|
||||
2020-07-01,"IAB correspondence with the National Telecommunications and Information Administration (NTIA) on DNSSEC deployment for the Root Zone [Docket No. 100603240-0240-01], 1 July 2010",https://www.iab.org/documents/correspondence-reports-documents/docs2010/2010-07-01-alexander-dnssec-deployment-for-the-root-zone/,
|
||||
2020-09-29,IAB Comments on the Draft Final Report on the new gTLD Subsequent Procedures Policy Development Process,https://www.iab.org/documents/correspondence-reports-documents/2020-2/iab-comments-on-new-gtld-subsequent-procedures/,
|
||||
2021-07-14,IAB Statement on Inclusive Language in IAB Stream Documents,https://www.iab.org/documents/correspondence-reports-documents/2021-2/iab-statement-on-inclusive-language-in-iab-stream-documents/,
|
||||
2022-04-08,IAB comment on Mandated Browser Root Certificates in the European Union’s eIDAS Regulation on the Internet,https://www.iab.org/documents/correspondence-reports-documents/2022-2/iab-comment-on-mandated-browser-root-certificates-in-the-european-unions-eidas-regulation-on-the-internet/,
|
||||
2022-04-08,"IAB Comments on A Notice by the Federal Communications Commission on Secure Internet Routing, issued 03/11/2022",https://www.iab.org/documents/correspondence-reports-documents/2022-2/iab-comments-on-a-notice-by-the-federal-communications-commission-on-secure-internet-routing-issued-03-11-2022/,
|
||||
2022-07-08,IAB Statement to OSTP on Privacy-Enhancing Technologies,https://www.iab.org/documents/correspondence-reports-documents/2022-2/iab-statement-to-ostp-on-privacy-enhancing-technologies/,
|
||||
2022-11-21,IAB Comments on a notice by the Federal Trade Commission on “Trade Regulation Rule on Commercial Surveillance and Data Security” (16 CFR Part 464),https://www.iab.org/documents/correspondence-reports-documents/2022-2/iab-comments-on-a-notice-by-the-federal-trade-commission-on-trade-regulation-rule-on-commercial-surveillance-and-data-security-16-cfr-part-464/,
|
||||
'''
|
||||
|
||||
rows = []
|
||||
date_count = defaultdict(lambda: 0)
|
||||
with io.StringIO(csv_dump) as csv_file:
|
||||
reader = csv.reader(csv_file)
|
||||
for row in reader:
|
||||
date = row[0]
|
||||
if date_count[date] == 0:
|
||||
row.insert(0, "")
|
||||
else:
|
||||
row.insert(0, date_count[date])
|
||||
date_count[date] += 1
|
||||
rows.append(row)
|
||||
return rows
|
90
ietf/doc/migrations/0007_alter_docevent_type.py
Normal file
90
ietf/doc/migrations/0007_alter_docevent_type.py
Normal file
|
@ -0,0 +1,90 @@
|
|||
# Generated by Django 4.2.4 on 2023-08-23 21:38
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("doc", "0006_statements"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="docevent",
|
||||
name="type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("new_revision", "Added new revision"),
|
||||
("new_submission", "Uploaded new revision"),
|
||||
("changed_document", "Changed document metadata"),
|
||||
("added_comment", "Added comment"),
|
||||
("added_message", "Added message"),
|
||||
("edited_authors", "Edited the documents author list"),
|
||||
("deleted", "Deleted document"),
|
||||
("changed_state", "Changed state"),
|
||||
("changed_stream", "Changed document stream"),
|
||||
("expired_document", "Expired document"),
|
||||
("extended_expiry", "Extended expiry of document"),
|
||||
("requested_resurrect", "Requested resurrect"),
|
||||
("completed_resurrect", "Completed resurrect"),
|
||||
("changed_consensus", "Changed consensus"),
|
||||
("published_rfc", "Published RFC"),
|
||||
(
|
||||
"added_suggested_replaces",
|
||||
"Added suggested replacement relationships",
|
||||
),
|
||||
(
|
||||
"reviewed_suggested_replaces",
|
||||
"Reviewed suggested replacement relationships",
|
||||
),
|
||||
("changed_action_holders", "Changed action holders for document"),
|
||||
("changed_group", "Changed group"),
|
||||
("changed_protocol_writeup", "Changed protocol writeup"),
|
||||
("changed_charter_milestone", "Changed charter milestone"),
|
||||
("initial_review", "Set initial review time"),
|
||||
("changed_review_announcement", "Changed WG Review text"),
|
||||
("changed_action_announcement", "Changed WG Action text"),
|
||||
("started_iesg_process", "Started IESG process on document"),
|
||||
("created_ballot", "Created ballot"),
|
||||
("closed_ballot", "Closed ballot"),
|
||||
("sent_ballot_announcement", "Sent ballot announcement"),
|
||||
("changed_ballot_position", "Changed ballot position"),
|
||||
("changed_ballot_approval_text", "Changed ballot approval text"),
|
||||
("changed_ballot_writeup_text", "Changed ballot writeup text"),
|
||||
("changed_rfc_editor_note_text", "Changed RFC Editor Note text"),
|
||||
("changed_last_call_text", "Changed last call text"),
|
||||
("requested_last_call", "Requested last call"),
|
||||
("sent_last_call", "Sent last call"),
|
||||
("scheduled_for_telechat", "Scheduled for telechat"),
|
||||
("iesg_approved", "IESG approved document (no problem)"),
|
||||
("iesg_disapproved", "IESG disapproved document (do not publish)"),
|
||||
("approved_in_minute", "Approved in minute"),
|
||||
("iana_review", "IANA review comment"),
|
||||
("rfc_in_iana_registry", "RFC is in IANA registry"),
|
||||
(
|
||||
"rfc_editor_received_announcement",
|
||||
"Announcement was received by RFC Editor",
|
||||
),
|
||||
("requested_publication", "Publication at RFC Editor requested"),
|
||||
(
|
||||
"sync_from_rfc_editor",
|
||||
"Received updated information from RFC Editor",
|
||||
),
|
||||
("requested_review", "Requested review"),
|
||||
("assigned_review_request", "Assigned review request"),
|
||||
("closed_review_request", "Closed review request"),
|
||||
("closed_review_assignment", "Closed review assignment"),
|
||||
("downref_approved", "Downref approved"),
|
||||
("posted_related_ipr", "Posted related IPR"),
|
||||
("removed_related_ipr", "Removed related IPR"),
|
||||
(
|
||||
"removed_objfalse_related_ipr",
|
||||
"Removed Objectively False related IPR",
|
||||
),
|
||||
("changed_editors", "Changed BOF Request editors"),
|
||||
("published_statement", "Published statement"),
|
||||
],
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
|
@ -963,7 +963,7 @@ class Document(DocumentInfo):
|
|||
def displayname_with_link(self):
|
||||
return mark_safe('<a href="%s">%s-%s</a>' % (self.get_absolute_url(), self.name , self.rev))
|
||||
|
||||
def ipr(self,states=('posted','removed')):
|
||||
def ipr(self,states=settings.PUBLISH_IPR_STATES):
|
||||
"""Returns the IPR disclosures against this document (as a queryset over IprDocRel)."""
|
||||
from ietf.ipr.models import IprDocRel
|
||||
return IprDocRel.objects.filter(document__docs=self, disclosure__state__in=states)
|
||||
|
@ -973,7 +973,7 @@ class Document(DocumentInfo):
|
|||
document directly or indirectly obsoletes or replaces
|
||||
"""
|
||||
from ietf.ipr.models import IprDocRel
|
||||
iprs = IprDocRel.objects.filter(document__in=list(self.docalias.all())+self.all_related_that_doc(('obs','replaces'))).filter(disclosure__state__in=('posted','removed')).values_list('disclosure', flat=True).distinct()
|
||||
iprs = IprDocRel.objects.filter(document__in=list(self.docalias.all())+self.all_related_that_doc(('obs','replaces'))).filter(disclosure__state__in=settings.PUBLISH_IPR_STATES).values_list('disclosure', flat=True).distinct()
|
||||
return iprs
|
||||
|
||||
def future_presentations(self):
|
||||
|
@ -1288,6 +1288,7 @@ EVENT_TYPES = [
|
|||
# IPR events
|
||||
("posted_related_ipr", "Posted related IPR"),
|
||||
("removed_related_ipr", "Removed related IPR"),
|
||||
("removed_objfalse_related_ipr", "Removed Objectively False related IPR"),
|
||||
|
||||
# Bofreq Editor events
|
||||
("changed_editors", "Changed BOF Request editors"),
|
||||
|
|
|
@ -23,7 +23,7 @@ import ietf.review.mailarch
|
|||
|
||||
from ietf.doc.factories import ( NewRevisionDocEventFactory, IndividualDraftFactory, WgDraftFactory,
|
||||
WgRfcFactory, ReviewFactory, DocumentFactory)
|
||||
from ietf.doc.models import ( Document, DocumentAuthor, RelatedDocument, DocEvent, ReviewRequestDocEvent,
|
||||
from ietf.doc.models import ( DocumentAuthor, RelatedDocument, DocEvent, ReviewRequestDocEvent,
|
||||
ReviewAssignmentDocEvent, )
|
||||
from ietf.group.factories import RoleFactory, ReviewTeamFactory
|
||||
from ietf.group.models import Group
|
||||
|
@ -914,7 +914,8 @@ class ReviewTests(TestCase):
|
|||
date_today().isoformat(),
|
||||
]
|
||||
review_name = "-".join(c for c in name_components if c).lower()
|
||||
Document.objects.create(name=review_name,type_id='review',group=assignment.review_request.team)
|
||||
|
||||
ReviewFactory(name=review_name,type_id='review',group=assignment.review_request.team)
|
||||
|
||||
r = self.client.post(url, data={
|
||||
"result": ReviewResultName.objects.get(reviewteamsettings_review_results_set__group=assignment.review_request.team, slug="ready").pk,
|
||||
|
@ -930,10 +931,9 @@ class ReviewTests(TestCase):
|
|||
})
|
||||
self.assertEqual(r.status_code, 302)
|
||||
r2 = self.client.get(r.url)
|
||||
# FIXME-LARS: this fails when the tests are run with --debug-mode, i.e., DEBUG is set:
|
||||
if not settings.DEBUG:
|
||||
self.assertEqual(len(r2.context['messages']),1)
|
||||
self.assertIn('Attempt to save review failed', list(r2.context['messages'])[0].message)
|
||||
self.assertEqual(r2.status_code, 200)
|
||||
self.assertEqual(len(r2.context['messages']),1)
|
||||
self.assertIn('Attempt to save review failed', list(r2.context['messages'])[0].message)
|
||||
|
||||
def test_partially_complete_review(self):
|
||||
assignment, url = self.setup_complete_review_test()
|
||||
|
|
|
@ -154,10 +154,24 @@ class ActionHoldersTests(TestCase):
|
|||
self.assertGreaterEqual(doc.documentactionholder_set.get(person=self.ad).time_added, right_now)
|
||||
|
||||
def test_update_action_holders_add_tag_need_rev(self):
|
||||
"""Adding need-rev tag adds authors as action holders"""
|
||||
"""Adding need-rev tag drops AD and adds authors as action holders"""
|
||||
doc = self.doc_in_iesg_state('pub-req')
|
||||
first_author = self.authors[0]
|
||||
doc.action_holders.add(first_author)
|
||||
doc.action_holders.add(doc.ad)
|
||||
self.assertCountEqual(doc.action_holders.all(), [first_author, doc.ad])
|
||||
self.update_doc_state(doc,
|
||||
doc.get_state('draft-iesg'),
|
||||
add_tags=['need-rev'],
|
||||
remove_tags=None)
|
||||
self.assertCountEqual(doc.action_holders.all(), self.authors)
|
||||
self.assertNotIn(self.ad, doc.action_holders.all())
|
||||
|
||||
# Check case where an author is ad
|
||||
doc = self.doc_in_iesg_state('pub-req')
|
||||
doc.ad = first_author
|
||||
doc.save()
|
||||
doc.action_holders.add(first_author)
|
||||
self.assertCountEqual(doc.action_holders.all(), [first_author])
|
||||
self.update_doc_state(doc,
|
||||
doc.get_state('draft-iesg'),
|
||||
|
@ -175,6 +189,12 @@ class ActionHoldersTests(TestCase):
|
|||
remove_tags=None)
|
||||
self.assertCountEqual(doc.action_holders.all(), self.authors)
|
||||
|
||||
def test_update_action_holders_add_tag_ad_f_up(self):
|
||||
doc = self.doc_in_iesg_state('pub-req')
|
||||
self.assertEqual(doc.action_holders.count(), 0)
|
||||
self.update_doc_state(doc, doc.get_state('draft-iesg'), add_tags=['ad-f-up'])
|
||||
self.assertCountEqual(doc.action_holders.all(), [self.ad])
|
||||
|
||||
def test_update_action_holders_remove_tag_need_rev(self):
|
||||
"""Removing need-rev tag drops authors as action holders"""
|
||||
doc = self.doc_in_iesg_state('pub-req')
|
||||
|
@ -189,13 +209,14 @@ class ActionHoldersTests(TestCase):
|
|||
def test_update_action_holders_add_tag_need_rev_ignores_non_authors(self):
|
||||
"""Adding need-rev tag does not affect existing action holders"""
|
||||
doc = self.doc_in_iesg_state('pub-req')
|
||||
doc.action_holders.add(self.ad)
|
||||
self.assertCountEqual(doc.action_holders.all(),[self.ad])
|
||||
other_person = PersonFactory()
|
||||
doc.action_holders.add(other_person)
|
||||
self.assertCountEqual(doc.action_holders.all(),[other_person])
|
||||
self.update_doc_state(doc,
|
||||
doc.get_state('draft-iesg'),
|
||||
add_tags=['need-rev'],
|
||||
remove_tags=None)
|
||||
self.assertCountEqual(doc.action_holders.all(), [self.ad] + self.authors)
|
||||
self.assertCountEqual(doc.action_holders.all(), [other_person] + self.authors)
|
||||
|
||||
def test_update_action_holders_remove_tag_need_rev_ignores_non_authors(self):
|
||||
"""Removing need-rev tag does not affect non-author action holders"""
|
||||
|
|
|
@ -12,6 +12,7 @@ import re
|
|||
import textwrap
|
||||
|
||||
from collections import defaultdict, namedtuple, Counter
|
||||
from dataclasses import dataclass
|
||||
from typing import Union
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
|
@ -460,6 +461,21 @@ def add_action_holder_change_event(doc, by, prev_set, reason=None):
|
|||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TagSetComparer:
|
||||
before: set[str]
|
||||
after: set[str]
|
||||
|
||||
def changed(self):
|
||||
return self.before != self.after
|
||||
|
||||
def added(self, tag):
|
||||
return tag in self.after and tag not in self.before
|
||||
|
||||
def removed(self, tag):
|
||||
return tag in self.before and tag not in self.after
|
||||
|
||||
|
||||
def update_action_holders(doc, prev_state=None, new_state=None, prev_tags=None, new_tags=None):
|
||||
"""Update the action holders for doc based on state transition
|
||||
|
||||
|
@ -473,34 +489,45 @@ def update_action_holders(doc, prev_state=None, new_state=None, prev_tags=None,
|
|||
if prev_state and new_state:
|
||||
assert prev_state.type_id == new_state.type_id
|
||||
|
||||
# Convert tags to sets of slugs
|
||||
prev_tag_slugs = {t.slug for t in (prev_tags or [])}
|
||||
new_tag_slugs = {t.slug for t in (new_tags or [])}
|
||||
# Convert tags to sets of slugs
|
||||
tags = TagSetComparer(
|
||||
before={t.slug for t in (prev_tags or [])},
|
||||
after={t.slug for t in (new_tags or [])},
|
||||
)
|
||||
|
||||
# Do nothing if state / tag have not changed
|
||||
if (prev_state == new_state) and (prev_tag_slugs == new_tag_slugs):
|
||||
if (prev_state == new_state) and not tags.changed():
|
||||
return None
|
||||
|
||||
# Remember original list of action holders to later check if it changed
|
||||
prev_set = list(doc.action_holders.all())
|
||||
# Only draft-iesg states are of interest (for now)
|
||||
if (prev_state != new_state) and (getattr(new_state, 'type_id') == 'draft-iesg'):
|
||||
|
||||
# Update the action holders. To get this right for people with more
|
||||
# than one relationship to the document, do removals first, then adds.
|
||||
# Remove outdated action holders
|
||||
iesg_state_changed = (prev_state != new_state) and (getattr(new_state, "type_id", None) == "draft-iesg")
|
||||
if iesg_state_changed:
|
||||
# Clear the action_holders list on a state change. This will reset the age of any that get added back.
|
||||
doc.action_holders.clear()
|
||||
if doc.ad and new_state.slug not in DocumentActionHolder.CLEAR_ACTION_HOLDERS_STATES:
|
||||
# Default to responsible AD for states other than these
|
||||
if tags.removed("need-rev"):
|
||||
# Removed the 'need-rev' tag - drop authors from the action holders list
|
||||
DocumentActionHolder.objects.filter(document=doc, person__in=doc.authors()).delete()
|
||||
elif tags.added("need-rev"):
|
||||
# Remove the AD if we're asking for a new revision
|
||||
DocumentActionHolder.objects.filter(document=doc, person=doc.ad).delete()
|
||||
|
||||
# Add new action holders
|
||||
if doc.ad:
|
||||
# AD is an action holder unless specified otherwise for the new state
|
||||
if iesg_state_changed and new_state.slug not in DocumentActionHolder.CLEAR_ACTION_HOLDERS_STATES:
|
||||
doc.action_holders.add(doc.ad)
|
||||
|
||||
if prev_tag_slugs != new_tag_slugs:
|
||||
# If we have added or removed the need-rev tag, add or remove authors as action holders
|
||||
if ('need-rev' in prev_tag_slugs) and ('need-rev' not in new_tag_slugs):
|
||||
# Removed the 'need-rev' tag - drop authors from the action holders list
|
||||
DocumentActionHolder.objects.filter(document=doc, person__in=doc.authors()).delete()
|
||||
elif ('need-rev' not in prev_tag_slugs) and ('need-rev' in new_tag_slugs):
|
||||
# Added the 'need-rev' tag - add authors to the action holders list
|
||||
for auth in doc.authors():
|
||||
if not doc.action_holders.filter(pk=auth.pk).exists():
|
||||
doc.action_holders.add(auth)
|
||||
# If AD follow-up is needed, make sure they are an action holder
|
||||
if tags.added("ad-f-up"):
|
||||
doc.action_holders.add(doc.ad)
|
||||
# Authors get the action if a revision is needed
|
||||
if tags.added("need-rev"):
|
||||
for auth in doc.authors():
|
||||
doc.action_holders.add(auth)
|
||||
|
||||
# Now create an event if we changed the set
|
||||
return add_action_holder_change_event(
|
||||
|
|
|
@ -749,6 +749,7 @@ def docs_for_ad(request, name):
|
|||
if (
|
||||
not ballot
|
||||
or doc.get_state_slug("draft") == "repl"
|
||||
or doc.get_state_slug("draft-iesg") == "defer"
|
||||
or (doc.telechat_date() and doc.telechat_date() > timezone.now().date())
|
||||
):
|
||||
continue
|
||||
|
@ -801,7 +802,7 @@ def drafts_in_iesg_process(request):
|
|||
|
||||
def recent_drafts(request, days=7):
|
||||
slowcache = caches['slowpages']
|
||||
cache_key = f'recentdraftsview{days}'
|
||||
cache_key = f'recentdraftsview{days}'
|
||||
cached_val = slowcache.get(cache_key)
|
||||
if not cached_val:
|
||||
since = timezone.now()-datetime.timedelta(days=days)
|
||||
|
|
|
@ -25,6 +25,7 @@ class GroupFactory(factory.django.DjangoModelFactory):
|
|||
class Meta:
|
||||
model = Group
|
||||
django_get_or_create = ('acronym',)
|
||||
skip_postgeneration_save = True
|
||||
|
||||
name = factory.Faker('sentence',nb_words=6)
|
||||
acronym = factory.Sequence(lambda n: 'acronym%d' %n)
|
||||
|
@ -87,6 +88,7 @@ class DatelessGroupMilestoneFactory(BaseGroupMilestoneFactory):
|
|||
class GroupHistoryFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
model=GroupHistory
|
||||
skip_postgeneration_save = True
|
||||
|
||||
time = lambda: timezone.now()
|
||||
group = factory.SubFactory(GroupFactory, state_id='active')
|
||||
|
@ -138,6 +140,7 @@ class AppealFactory(factory.django.DjangoModelFactory):
|
|||
class AppealArtifactFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
model=AppealArtifact
|
||||
skip_postgeneration_save = True
|
||||
|
||||
appeal = factory.SubFactory(AppealFactory)
|
||||
artifact_type = factory.SubFactory("ietf.name.factories.AppealArtifactTypeNameFactory", slug="appeal")
|
||||
|
@ -158,4 +161,5 @@ class AppealArtifactFactory(factory.django.DjangoModelFactory):
|
|||
if isinstance(using, str):
|
||||
using = using.encode("utf-8")
|
||||
obj.bits = memoryview(using)
|
||||
obj.save()
|
||||
|
||||
|
|
|
@ -1,205 +0,0 @@
|
|||
# Copyright The IETF Trust 2023, All Rights Reserved
|
||||
|
||||
import debug # pyflakes: ignore
|
||||
|
||||
import datetime
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from ietf.group.models import Appeal, AppealArtifact
|
||||
|
||||
from ietf.name.models import AppealArtifactTypeName
|
||||
|
||||
|
||||
PDF_FILES = [
|
||||
"2006-01-04-appeal.pdf",
|
||||
"2006-08-24-appeal.pdf",
|
||||
"2006-09-11-appeal.pdf",
|
||||
"2008-11-29-appeal.pdf",
|
||||
"2010-06-07-appeal.pdf",
|
||||
"2010-06-07-response.pdf",
|
||||
"2013-07-08-appeal.pdf",
|
||||
"2015-06-22-appeal.pdf",
|
||||
"2019-01-31-appeal.pdf",
|
||||
"2019-01-31-response.pdf",
|
||||
]
|
||||
|
||||
NAME_PART_MAP = {
|
||||
"appeal": "appeal",
|
||||
"response": "response",
|
||||
"appeal_with_response": "response",
|
||||
"reply_to_response": "reply",
|
||||
}
|
||||
|
||||
|
||||
def bits_name(date, part):
|
||||
part_type = part["type"]
|
||||
name_fragment = NAME_PART_MAP[part_type]
|
||||
prefix = f"{date:%Y-%m-%d}-{name_fragment}"
|
||||
if f"{prefix}.pdf" in PDF_FILES:
|
||||
ext = "pdf"
|
||||
else:
|
||||
ext = "md"
|
||||
return f"{prefix}.{ext}"
|
||||
|
||||
|
||||
def date_from_string(datestring):
|
||||
year, month, day = [int(part) for part in datestring.split("-")]
|
||||
return datetime.date(year, month, day)
|
||||
|
||||
|
||||
def work_to_do():
|
||||
# Taken from https://www.iab.org/appeals/ on 2023-08-24 - some lines carved out below as exceptions
|
||||
input = """
|
||||
2020-07-31 IAB appeal for arpa assignment (Timothy McSweeney) IAB Response (2020-08-26)
|
||||
2019-01-31 An appeal to make the procedure related to Independent Submission Stream more transparent (Shyam Bandyopadhyay) IAB Response (2019-03-06)
|
||||
2015-06-22 Appeal to the IAB concerning the IESG response to his appeal concerning the IESG approval of the “draft-ietf-ianaplan-icg-response” (JFC Morfin) IAB Response (2015-07-08)
|
||||
2013-07-08 Appeal to the IAB irt. RFC 6852 (JFC Morfin) IAB Response (2013-07-17)
|
||||
2010-06-07 Appeal over the IESG Publication of the IDNA2008 Document Set Without Appropriate Explanation to the Internet Community (JFC Morfin) IAB Response (2010-08-20)
|
||||
2008-11-29 Appeal to the IAB Concerning the Way Users Are Not Permitted To Adequately Contribute to the IETF (JFC Morfin) IAB Response (2009-01-28)
|
||||
2006-10-10 Complaints about suspension from the ietf@ietf.org mailing list (Todd Glassey) IAB Response (2006-10-31)
|
||||
2006-09-11 Appeal to the IAB over IESG dismissed appeals from J-F C. Morfin (JFC Morfin) IAB Response (2006-12-05)
|
||||
2006-09-10 Appeal of IESG Decision of July 10, 2006 from Dean Anderson (Dean Anderson) IAB Response (2006-09-27)
|
||||
2006-08-24 Appeal Against the decision to consider expediting an RFC Publication from J-F C. Morfin (JFC Morfin) IAB Response (2006-09-07)
|
||||
2006-04-18 Appeal Against IESG PR-Action from Dean Anderson (Dean Anderson) IAB Response (2006-07-13)
|
||||
2006-02-08 Appeal Against IESG Decision by Julian Mehnle (Julian Mehnle) IAB Response (2006-03-02)
|
||||
2006-01-04 Appeal Against IESG Decision by Jefsey Morfin (JFC Morfin) IAB Response (2006-01-31)
|
||||
2003-01-04 Appeal against IESG decision (Robert Elz) IAB Response (includes original appeal)(2003-02-15)
|
||||
2000-11-15 Appeal Against IESG Action by Mr. D J Bernstein (D J Bernstein) IAB Response (2001-02-26)
|
||||
1999-10-23 Appeal against IESG Inaction by W.A. Simpson (William Allen Simpson) IAB Response (2000-01-11)
|
||||
1999-05-01 Appeal against IESG action (William Allen Simpson) IAB Response (1999-10-05)
|
||||
1996-03-06 Appeal SNMPv2 SMI Appeal by Mr. David T. Perkins, IAB consideration (David Perkins) IAB Response (includes original appeal) (1996-03-06)
|
||||
"""
|
||||
|
||||
work = []
|
||||
|
||||
for line in input.split("\n"):
|
||||
line = line.strip()
|
||||
if line == "":
|
||||
continue
|
||||
appeal_date = line[:10]
|
||||
response_date = line[-11:-1]
|
||||
title = line[11:-12].strip().split(")")[0] + ")"
|
||||
item = dict(title=title, date=appeal_date, parts=[])
|
||||
if appeal_date in [
|
||||
"2006-10-10",
|
||||
"2000-11-15",
|
||||
"1999-10-23",
|
||||
"1999-05-01",
|
||||
"1996-03-06",
|
||||
]:
|
||||
item["parts"].append(dict(type="appeal_with_response", date=response_date))
|
||||
else:
|
||||
item["parts"].append(dict(type="appeal", date=appeal_date))
|
||||
item["parts"].append(dict(type="response", date=response_date))
|
||||
work.append(item)
|
||||
|
||||
# Hand building the items for the following
|
||||
# exceptions="""
|
||||
# 2003-10-09 Appeal to the IAB on the site-local issue (Tony Hain)
|
||||
# IAB Response (2003-11-12)
|
||||
# Tony Hain reply to IAB Response (2003-11-18)
|
||||
# 1995-02-18 (etc.) Appeal Against IESG Inaction by Mr. Dave Cocker, Mr W. Simpson (Dave Crocker, William Allen Simpson) IAB Response (1995-04-04 and 1995-04-05)
|
||||
# """
|
||||
item = dict(
|
||||
title="Appeal to the IAB on the site-local issue (Tony Hain)",
|
||||
date="2003-10-09",
|
||||
parts=[],
|
||||
)
|
||||
item["parts"].append(
|
||||
dict(
|
||||
type="appeal",
|
||||
date="2003-10-09",
|
||||
)
|
||||
)
|
||||
item["parts"].append(
|
||||
dict(
|
||||
type="response",
|
||||
date="2003-11-12",
|
||||
)
|
||||
)
|
||||
item["parts"].append(
|
||||
dict(
|
||||
type="reply_to_response",
|
||||
date="2003-11-18",
|
||||
)
|
||||
)
|
||||
work.append(item)
|
||||
|
||||
item = dict(
|
||||
title="Appeal Against IESG Inaction by Mr. Dave Cocker, Mr W. Simpson (Dave Crocker, William Allen Simpson)",
|
||||
date="1995-02-18",
|
||||
parts=[],
|
||||
)
|
||||
item["parts"].append(
|
||||
dict(
|
||||
type="appeal",
|
||||
date="1995-02-18",
|
||||
)
|
||||
)
|
||||
item["parts"].append(
|
||||
dict(
|
||||
type="response",
|
||||
date="1995-04-05",
|
||||
)
|
||||
)
|
||||
work.append(item)
|
||||
|
||||
for item in work:
|
||||
item["date"] = date_from_string(item["date"])
|
||||
for part in item["parts"]:
|
||||
part["date"] = date_from_string(part["date"])
|
||||
|
||||
work.sort(key=lambda o: o["date"])
|
||||
|
||||
return work
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Performs a one-time import of IAB appeals"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
process = subprocess.Popen(
|
||||
["git", "clone", "https://github.com/kesara/iab-scraper.git", tmpdir],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
stdout, stderr = process.communicate()
|
||||
if not Path(tmpdir).joinpath("iab_appeals", "1995-02-18-appeal.md").exists():
|
||||
print("Git clone of the iab-scraper directory did not go as expected")
|
||||
print("stdout:", stdout)
|
||||
print("stderr:", stderr)
|
||||
print(f"Clean up {tmpdir} manually")
|
||||
exit(-1)
|
||||
|
||||
work = work_to_do()
|
||||
|
||||
for item in work:
|
||||
# IAB is group 7
|
||||
appeal = Appeal.objects.create(name=item["title"], date=item["date"], group_id=7)
|
||||
for part in item["parts"]:
|
||||
bits_file_name = bits_name(item["date"], part)
|
||||
if bits_file_name.endswith(".pdf"):
|
||||
content_type = "application/pdf"
|
||||
else:
|
||||
content_type = "text/markdown;charset=utf-8"
|
||||
with Path(tmpdir).joinpath("iab_appeals", bits_file_name).open(
|
||||
"rb"
|
||||
) as source_file:
|
||||
bits = source_file.read()
|
||||
artifact_type = AppealArtifactTypeName.objects.get(slug=part["type"])
|
||||
AppealArtifact.objects.create(
|
||||
appeal = appeal,
|
||||
artifact_type=artifact_type,
|
||||
date=part["date"],
|
||||
content_type=content_type,
|
||||
bits=bits,
|
||||
)
|
||||
|
||||
shutil.rmtree(tmpdir)
|
2
ietf/idindex/.gitignore
vendored
2
ietf/idindex/.gitignore
vendored
|
@ -1,2 +0,0 @@
|
|||
/*.pyc
|
||||
/settings_local.py
|
2
ietf/iesg/.gitignore
vendored
2
ietf/iesg/.gitignore
vendored
|
@ -1,2 +0,0 @@
|
|||
/*.pyc
|
||||
/settings_local.py
|
2
ietf/ietfauth/.gitignore
vendored
2
ietf/ietfauth/.gitignore
vendored
|
@ -1,2 +0,0 @@
|
|||
/*.swp
|
||||
/*.pyc
|
|
@ -14,6 +14,7 @@ from ietf.person.factories import UserFactory, PersonFactory
|
|||
class OidClientRecordFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
model = OidClientRecord
|
||||
skip_postgeneration_save = True
|
||||
|
||||
name = factory.Faker('company')
|
||||
owner = factory.SubFactory(UserFactory)
|
||||
|
|
2
ietf/ipr/.gitignore
vendored
2
ietf/ipr/.gitignore
vendored
|
@ -1,2 +0,0 @@
|
|||
/*.pyc
|
||||
/settings_local.py
|
|
@ -26,6 +26,7 @@ def _fake_patent_info():
|
|||
class IprDisclosureBaseFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
model = IprDisclosureBase
|
||||
skip_postgeneration_save = True
|
||||
|
||||
by = factory.SubFactory('ietf.person.factories.PersonFactory')
|
||||
compliant = True
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
# Copyright The IETF Trust 2007-2020, All Rights Reserved
|
||||
# Copyright The IETF Trust 2007-2023, All Rights Reserved
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.syndication.views import Feed
|
||||
from django.utils.feedgenerator import Atom1Feed
|
||||
from django.urls import reverse_lazy
|
||||
|
@ -19,7 +20,7 @@ class LatestIprDisclosuresFeed(Feed):
|
|||
feed_url = "/feed/ipr/"
|
||||
|
||||
def items(self):
|
||||
return IprDisclosureBase.objects.filter(state__in=('posted','removed')).order_by('-time')[:30]
|
||||
return IprDisclosureBase.objects.filter(state__in=settings.PUBLISH_IPR_STATES).order_by('-time')[:30]
|
||||
|
||||
def item_title(self, item):
|
||||
return mark_safe(item.title)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright The IETF Trust 2007-2020, All Rights Reserved
|
||||
# Copyright The IETF Trust 2007-2023, All Rights Reserved
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
|
@ -231,6 +231,7 @@ class IprEvent(models.Model):
|
|||
event_type_map = {
|
||||
'posted': 'posted_related_ipr',
|
||||
'removed': 'removed_related_ipr',
|
||||
'removed_objfalse': 'removed_objfalse_related_ipr',
|
||||
}
|
||||
if self.type_id in event_type_map:
|
||||
related_docs = set() # related docs, no duplicates
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
# Copyright The IETF Trust 2007-2019, All Rights Reserved
|
||||
# Copyright The IETF Trust 2007-2023, All Rights Reserved
|
||||
#
|
||||
from django.conf import settings
|
||||
from django.contrib.sitemaps import GenericSitemap
|
||||
from ietf.ipr.models import IprDisclosureBase
|
||||
|
||||
# changefreq is "never except when it gets updated or withdrawn"
|
||||
# so skip giving one
|
||||
|
||||
queryset = IprDisclosureBase.objects.filter(state__in=('posted','removed'))
|
||||
queryset = IprDisclosureBase.objects.filter(state__in=settings.PUBLISH_IPR_STATES)
|
||||
archive = {'queryset':queryset, 'date_field': 'time', 'allow_empty':True }
|
||||
IPRMap = GenericSitemap(archive) # type: ignore
|
||||
|
|
|
@ -126,6 +126,11 @@ class IprTests(TestCase):
|
|||
r = self.client.get(urlreverse("ietf.ipr.views.show", kwargs=dict(id=ipr.pk)))
|
||||
self.assertContains(r, 'This IPR disclosure was removed')
|
||||
|
||||
def test_show_removed_objfalse(self):
|
||||
ipr = HolderIprDisclosureFactory(state_id='removed_objfalse')
|
||||
r = self.client.get(urlreverse("ietf.ipr.views.show", kwargs=dict(id=ipr.pk)))
|
||||
self.assertContains(r, 'This IPR disclosure was removed as objectively false')
|
||||
|
||||
def test_ipr_history(self):
|
||||
ipr = HolderIprDisclosureFactory()
|
||||
r = self.client.get(urlreverse("ietf.ipr.views.history", kwargs=dict(id=ipr.pk)))
|
||||
|
@ -204,6 +209,24 @@ class IprTests(TestCase):
|
|||
r = self.client.get(url + "?submit=iprtitle&iprtitle=%s" % quote(ipr.title))
|
||||
self.assertContains(r, ipr.title)
|
||||
|
||||
def test_search_null_characters(self):
|
||||
"""IPR search gracefully rejects null characters in parameters"""
|
||||
# Not a combinatorially exhaustive set, but tries to exercise all the parameters
|
||||
bad_params = [
|
||||
"option=document_search&document_search=draft-\x00stuff"
|
||||
"submit=dra\x00ft",
|
||||
"submit=draft&id=some\x00id",
|
||||
"submit=draft&id_document_tag=some\x00id",
|
||||
"submit=draft&id=someid&state=re\x00moved",
|
||||
"submit=draft&id=someid&state=posted&state=re\x00moved",
|
||||
"submit=draft&id=someid&state=removed&draft=draft-no\x00tvalid",
|
||||
"submit=rfc&rfc=rfc\x00123",
|
||||
]
|
||||
url = urlreverse("ietf.ipr.views.search")
|
||||
for query_params in bad_params:
|
||||
r = self.client.get(f"{url}?{query_params}")
|
||||
self.assertEqual(r.status_code, 400, f"querystring '{query_params}' should be rejected")
|
||||
|
||||
def test_feed(self):
|
||||
ipr = HolderIprDisclosureFactory()
|
||||
r = self.client.get("/feed/ipr/")
|
||||
|
@ -576,7 +599,7 @@ I would like to revoke this declaration.
|
|||
self.client.login(username="secretary", password="secretary+password")
|
||||
|
||||
# test for presence of pending ipr
|
||||
num = IprDisclosureBase.objects.filter(state__in=('removed','rejected')).count()
|
||||
num = IprDisclosureBase.objects.filter(state__in=('removed','removed_objfalse','rejected')).count()
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code,200)
|
||||
|
@ -785,18 +808,28 @@ Subject: test
|
|||
'New Document already has a "posted_related_ipr" DocEvent')
|
||||
self.assertEqual(0, doc.docevent_set.filter(type='removed_related_ipr').count(),
|
||||
'New Document already has a "removed_related_ipr" DocEvent')
|
||||
self.assertEqual(0, doc.docevent_set.filter(type='removed_objfalse_related_ipr').count(),
|
||||
'New Document already has a "removed_objfalse_related_ipr" DocEvent')
|
||||
# A 'posted' IprEvent must create a corresponding DocEvent
|
||||
IprEventFactory(type_id='posted', disclosure=ipr)
|
||||
self.assertEqual(1, doc.docevent_set.filter(type='posted_related_ipr').count(),
|
||||
'Creating "posted" IprEvent did not create a "posted_related_ipr" DocEvent')
|
||||
self.assertEqual(0, doc.docevent_set.filter(type='removed_related_ipr').count(),
|
||||
'Creating "posted" IprEvent created a "removed_related_ipr" DocEvent')
|
||||
self.assertEqual(0, doc.docevent_set.filter(type='removed_objfalse_related_ipr').count(),
|
||||
'Creating "posted" IprEvent created a "removed_objfalse_related_ipr" DocEvent')
|
||||
# A 'removed' IprEvent must create a corresponding DocEvent
|
||||
IprEventFactory(type_id='removed', disclosure=ipr)
|
||||
self.assertEqual(1, doc.docevent_set.filter(type='posted_related_ipr').count(),
|
||||
'Creating "removed" IprEvent created a "posted_related_ipr" DocEvent')
|
||||
self.assertEqual(1, doc.docevent_set.filter(type='removed_related_ipr').count(),
|
||||
'Creating "removed" IprEvent did not create a "removed_related_ipr" DocEvent')
|
||||
# A 'removed_objfalse' IprEvent must create a corresponding DocEvent
|
||||
IprEventFactory(type_id='removed_objfalse', disclosure=ipr)
|
||||
self.assertEqual(1, doc.docevent_set.filter(type='posted_related_ipr').count(),
|
||||
'Creating "removed_objfalse" IprEvent created a "posted_related_ipr" DocEvent')
|
||||
self.assertEqual(1, doc.docevent_set.filter(type='removed_objfalse_related_ipr').count(),
|
||||
'Creating "removed_objfalse" IprEvent did not create a "removed_objfalse_related_ipr" DocEvent')
|
||||
# The DocEvent descriptions must refer to the IprEvents
|
||||
posted_docevent = doc.docevent_set.filter(type='posted_related_ipr').first()
|
||||
self.assertIn(ipr.title, posted_docevent.desc,
|
||||
|
@ -804,6 +837,9 @@ Subject: test
|
|||
removed_docevent = doc.docevent_set.filter(type='removed_related_ipr').first()
|
||||
self.assertIn(ipr.title, removed_docevent.desc,
|
||||
'IprDisclosure title does not appear in DocEvent desc when removed')
|
||||
removed_objfalse_docevent = doc.docevent_set.filter(type='removed_objfalse_related_ipr').first()
|
||||
self.assertIn(ipr.title, removed_objfalse_docevent.desc,
|
||||
'IprDisclosure title does not appear in DocEvent desc when removed as objectively false')
|
||||
|
||||
def test_no_revisions_message(self):
|
||||
draft = WgDraftFactory(rev="02")
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright The IETF Trust 2007-2022, All Rights Reserved
|
||||
# Copyright The IETF Trust 2007-2023, All Rights Reserved
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
|
@ -10,7 +10,7 @@ from django.contrib import messages
|
|||
from django.db.models import Q
|
||||
from django.forms.models import inlineformset_factory, model_to_dict
|
||||
from django.forms.formsets import formset_factory
|
||||
from django.http import HttpResponse, Http404, HttpResponseRedirect
|
||||
from django.http import HttpResponse, Http404, HttpResponseRedirect, HttpResponseBadRequest
|
||||
from django.shortcuts import render, get_object_or_404, redirect
|
||||
from django.template.loader import render_to_string
|
||||
from django.urls import reverse as urlreverse
|
||||
|
@ -269,7 +269,7 @@ def add_email(request, id):
|
|||
@role_required('Secretariat',)
|
||||
def admin(request, state):
|
||||
"""Administrative disclosure listing. For non-posted disclosures"""
|
||||
states = IprDisclosureStateName.objects.filter(slug__in=[state, "rejected"] if state == "removed" else [state])
|
||||
states = IprDisclosureStateName.objects.filter(slug__in=[state, "rejected", "removed_objfalse"] if state == "removed" else [state])
|
||||
if not states:
|
||||
raise Http404
|
||||
|
||||
|
@ -629,11 +629,16 @@ def post(request, id):
|
|||
|
||||
def search(request):
|
||||
search_type = request.GET.get("submit")
|
||||
if search_type and "\x00" in search_type:
|
||||
return HttpResponseBadRequest("Null characters are not allowed")
|
||||
|
||||
# query field
|
||||
q = ''
|
||||
# legacy support
|
||||
if not search_type and request.GET.get("option", None) == "document_search":
|
||||
docname = request.GET.get("document_search", "")
|
||||
if docname and "\x00" in docname:
|
||||
return HttpResponseBadRequest("Null characters are not allowed")
|
||||
if docname.startswith("draft-"):
|
||||
search_type = "draft"
|
||||
q = docname
|
||||
|
@ -643,18 +648,24 @@ def search(request):
|
|||
if search_type:
|
||||
form = SearchForm(request.GET)
|
||||
docid = request.GET.get("id") or request.GET.get("id_document_tag") or ""
|
||||
if docid and "\x00" in docid:
|
||||
return HttpResponseBadRequest("Null characters are not allowed")
|
||||
docs = doc = None
|
||||
iprs = []
|
||||
related_iprs = []
|
||||
|
||||
# set states
|
||||
states = request.GET.getlist('state',('posted','removed'))
|
||||
states = request.GET.getlist('state',settings.PUBLISH_IPR_STATES)
|
||||
if any("\x00" in state for state in states if state):
|
||||
return HttpResponseBadRequest("Null characters are not allowed")
|
||||
if states == ['all']:
|
||||
states = IprDisclosureStateName.objects.values_list('slug',flat=True)
|
||||
|
||||
# get query field
|
||||
if request.GET.get(search_type):
|
||||
q = request.GET.get(search_type)
|
||||
if q and "\x00" in q:
|
||||
return HttpResponseBadRequest("Null characters are not allowed")
|
||||
|
||||
if q or docid:
|
||||
# Search by RFC number or draft-identifier
|
||||
|
@ -664,13 +675,12 @@ def search(request):
|
|||
|
||||
if docid:
|
||||
start = DocAlias.objects.filter(name__iexact=docid)
|
||||
else:
|
||||
if search_type == "draft":
|
||||
q = normalize_draftname(q)
|
||||
start = DocAlias.objects.filter(name__icontains=q, name__startswith="draft")
|
||||
elif search_type == "rfc":
|
||||
start = DocAlias.objects.filter(name="rfc%s" % q.lstrip("0"))
|
||||
|
||||
elif search_type == "draft":
|
||||
q = normalize_draftname(q)
|
||||
start = DocAlias.objects.filter(name__icontains=q, name__startswith="draft")
|
||||
else: # search_type == "rfc"
|
||||
start = DocAlias.objects.filter(name="rfc%s" % q.lstrip("0"))
|
||||
|
||||
# one match
|
||||
if len(start) == 1:
|
||||
first = start[0]
|
||||
|
@ -778,7 +788,7 @@ def show(request, id):
|
|||
"""View of individual declaration"""
|
||||
ipr = get_object_or_404(IprDisclosureBase, id=id).get_child()
|
||||
if not has_role(request.user, 'Secretariat'):
|
||||
if ipr.state.slug == 'removed':
|
||||
if ipr.state.slug in ['removed', 'removed_objfalse']:
|
||||
return render(request, "ipr/removed.html", {
|
||||
'ipr': ipr
|
||||
})
|
||||
|
@ -801,10 +811,10 @@ def show(request, id):
|
|||
|
||||
def showlist(request):
|
||||
"""List all disclosures by type, posted only"""
|
||||
generic = GenericIprDisclosure.objects.filter(state__in=('posted','removed')).prefetch_related('relatedipr_source_set__target','relatedipr_target_set__source').order_by('-time')
|
||||
specific = HolderIprDisclosure.objects.filter(state__in=('posted','removed')).prefetch_related('relatedipr_source_set__target','relatedipr_target_set__source').order_by('-time')
|
||||
thirdpty = ThirdPartyIprDisclosure.objects.filter(state__in=('posted','removed')).prefetch_related('relatedipr_source_set__target','relatedipr_target_set__source').order_by('-time')
|
||||
nondocspecific = NonDocSpecificIprDisclosure.objects.filter(state__in=('posted','removed')).prefetch_related('relatedipr_source_set__target','relatedipr_target_set__source').order_by('-time')
|
||||
generic = GenericIprDisclosure.objects.filter(state__in=settings.PUBLISH_IPR_STATES).prefetch_related('relatedipr_source_set__target','relatedipr_target_set__source').order_by('-time')
|
||||
specific = HolderIprDisclosure.objects.filter(state__in=settings.PUBLISH_IPR_STATES).prefetch_related('relatedipr_source_set__target','relatedipr_target_set__source').order_by('-time')
|
||||
thirdpty = ThirdPartyIprDisclosure.objects.filter(state__in=settings.PUBLISH_IPR_STATES).prefetch_related('relatedipr_source_set__target','relatedipr_target_set__source').order_by('-time')
|
||||
nondocspecific = NonDocSpecificIprDisclosure.objects.filter(state__in=settings.PUBLISH_IPR_STATES).prefetch_related('relatedipr_source_set__target','relatedipr_target_set__source').order_by('-time')
|
||||
|
||||
# combine nondocspecific with generic and re-sort
|
||||
generic = itertools.chain(generic,nondocspecific)
|
||||
|
|
2
ietf/liaisons/.gitignore
vendored
2
ietf/liaisons/.gitignore
vendored
|
@ -1,2 +0,0 @@
|
|||
/*.pyc
|
||||
/settings_local.py
|
|
@ -6,6 +6,7 @@ from ietf.liaisons.models import LiaisonStatement, LiaisonStatementEvent, Liaiso
|
|||
class LiaisonStatementFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
model = LiaisonStatement
|
||||
skip_postgeneration_save = True
|
||||
|
||||
title = factory.Faker('sentence')
|
||||
from_contact = factory.SubFactory('ietf.person.factories.EmailFactory')
|
||||
|
|
1
ietf/liaisons/management/.gitignore
vendored
1
ietf/liaisons/management/.gitignore
vendored
|
@ -1 +0,0 @@
|
|||
/*.pyc
|
2
ietf/mailinglists/.gitignore
vendored
2
ietf/mailinglists/.gitignore
vendored
|
@ -1,2 +0,0 @@
|
|||
/*.pyc
|
||||
/settings_local.py
|
2
ietf/meeting/.gitignore
vendored
2
ietf/meeting/.gitignore
vendored
|
@ -1,2 +0,0 @@
|
|||
/*.pyc
|
||||
/*.swp
|
|
@ -23,6 +23,7 @@ from ietf.utils.text import xslugify
|
|||
class MeetingFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
model = Meeting
|
||||
skip_postgeneration_save = True
|
||||
|
||||
type_id = factory.Iterator(['ietf','interim'])
|
||||
|
||||
|
@ -103,6 +104,7 @@ class MeetingFactory(factory.django.DjangoModelFactory):
|
|||
class SessionFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
model = Session
|
||||
skip_postgeneration_save = True
|
||||
|
||||
meeting = factory.SubFactory(MeetingFactory)
|
||||
purpose_id = 'regular'
|
||||
|
@ -156,6 +158,7 @@ class ScheduleFactory(factory.django.DjangoModelFactory):
|
|||
class RoomFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
model = Room
|
||||
skip_postgeneration_save = True
|
||||
|
||||
meeting = factory.SubFactory(MeetingFactory)
|
||||
name = factory.Faker('name')
|
||||
|
@ -172,6 +175,7 @@ class RoomFactory(factory.django.DjangoModelFactory):
|
|||
class TimeSlotFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
model = TimeSlot
|
||||
skip_postgeneration_save = True
|
||||
|
||||
meeting = factory.SubFactory(MeetingFactory)
|
||||
type_id = 'regular'
|
||||
|
@ -225,6 +229,7 @@ class FloorPlanFactory(factory.django.DjangoModelFactory):
|
|||
class SlideSubmissionFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
model = SlideSubmission
|
||||
skip_postgeneration_save = True
|
||||
|
||||
session = factory.SubFactory(SessionFactory)
|
||||
title = factory.Faker('sentence')
|
||||
|
@ -238,6 +243,7 @@ class SlideSubmissionFactory(factory.django.DjangoModelFactory):
|
|||
class ConstraintFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
model = Constraint
|
||||
skip_postgeneration_save = True
|
||||
|
||||
meeting = factory.SubFactory(MeetingFactory)
|
||||
source = factory.SubFactory(GroupFactory)
|
||||
|
|
|
@ -1,240 +0,0 @@
|
|||
# Copyright The IETF Trust 2023, All Rights Reserved
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
|
||||
from ietf.doc.models import Document, DocAlias, DocEvent
|
||||
from ietf.meeting.models import Meeting, Session
|
||||
|
||||
|
||||
def agendas_to_import():
|
||||
return [
|
||||
"2018-09-05.md",
|
||||
"2018-09-12.md",
|
||||
"2018-09-26.md",
|
||||
"2018-10-03.md",
|
||||
"2018-10-10.md",
|
||||
"2018-10-24.md",
|
||||
"2018-11-04.md",
|
||||
"2018-11-05.md",
|
||||
"2018-11-08.md",
|
||||
"2018-11-21.md",
|
||||
"2018-11-28.md",
|
||||
"2018-12-05.md",
|
||||
"2018-12-19.md",
|
||||
"2019-01-09.md",
|
||||
"2019-01-16.md",
|
||||
"2019-01-23.md",
|
||||
"2019-02-06.md",
|
||||
"2019-02-13.md",
|
||||
"2019-02-27.md",
|
||||
"2019-03-06.md",
|
||||
"2019-03-13.md",
|
||||
"2019-03-24.md",
|
||||
"2019-03-25.md",
|
||||
"2019-03-28.md",
|
||||
"2019-04-10.md",
|
||||
"2019-04-17.md",
|
||||
"2019-05-01.md",
|
||||
"2019-05-08.md",
|
||||
"2019-05-29.md",
|
||||
"2019-06-12.md",
|
||||
"2019-06-26.md",
|
||||
"2019-07-10.md",
|
||||
"2019-07-21.md",
|
||||
"2019-07-25.md",
|
||||
"2019-08-07.md",
|
||||
"2019-08-21.md",
|
||||
"2019-08-28.md",
|
||||
"2019-09-04.md",
|
||||
"2019-09-18.md",
|
||||
"2019-10-02.md",
|
||||
"2019-10-16.md",
|
||||
"2019-10-30.md",
|
||||
"2019-11-17.md",
|
||||
"2019-11-18.md",
|
||||
"2019-11-21.md",
|
||||
"2019-12-04.md",
|
||||
"2019-12-11.md",
|
||||
"2019-12-18.md",
|
||||
"2020-01-08.md",
|
||||
"2020-01-15.md",
|
||||
"2020-01-22.md",
|
||||
"2020-02-05.md",
|
||||
"2020-02-12.md",
|
||||
"2020-02-19.md",
|
||||
"2020-03-04.md",
|
||||
"2020-03-11.md",
|
||||
"2020-03-18.md",
|
||||
"2020-04-01.md",
|
||||
"2020-04-08.md",
|
||||
"2020-04-15.md",
|
||||
"2020-04-29.md",
|
||||
"2020-05-13.md",
|
||||
"2020-05-20.md",
|
||||
"2020-05-27.md",
|
||||
"2020-06-10.md",
|
||||
"2020-06-17.md",
|
||||
"2020-07-01.md",
|
||||
"2020-07-15.md",
|
||||
"2020-08-12.md",
|
||||
"2020-08-26.md",
|
||||
"2020-09-09.md",
|
||||
"2020-09-23.md",
|
||||
"2020-10-07.md",
|
||||
"2020-10-14.md",
|
||||
"2020-10-21.md",
|
||||
"2020-11-04.md",
|
||||
"2020-12-02.md",
|
||||
"2020-12-16.md",
|
||||
"2021-01-06.md",
|
||||
"2021-01-13.md",
|
||||
"2021-01-20.md",
|
||||
"2021-01-27.md",
|
||||
"2021-02-03.md",
|
||||
"2021-02-10.md",
|
||||
"2021-02-17.md",
|
||||
"2021-02-24.md",
|
||||
"2021-03-03.md",
|
||||
"2021-03-24.md",
|
||||
"2021-03-31.md",
|
||||
"2021-04-07.md",
|
||||
"2021-04-14.md",
|
||||
"2021-04-21.md",
|
||||
"2021-05-05.md",
|
||||
"2021-05-12.md",
|
||||
"2021-05-19.md",
|
||||
"2021-05-26.md",
|
||||
"2021-06-02.md",
|
||||
"2021-06-16.md",
|
||||
"2021-06-23.md",
|
||||
"2021-06-30.md",
|
||||
"2021-07-14.md",
|
||||
"2021-07-21.md",
|
||||
"2021-08-11.md",
|
||||
"2021-08-25.md",
|
||||
"2021-09-01.md",
|
||||
"2021-09-08.md",
|
||||
"2021-09-22.md",
|
||||
"2021-10-06.md",
|
||||
"2021-10-20.md",
|
||||
"2021-10-27.md",
|
||||
"2021-11-17.md",
|
||||
"2021-12-01.md",
|
||||
"2021-12-08.md",
|
||||
"2021-12-15.md",
|
||||
"2022-01-12.md",
|
||||
"2022-01-19.md",
|
||||
"2022-02-02.md",
|
||||
"2022-02-16.md",
|
||||
"2022-02-23.md",
|
||||
"2022-03-02.md",
|
||||
"2022-03-09.md",
|
||||
"2022-03-20.md",
|
||||
"2022-04-06.md",
|
||||
"2022-04-13.md",
|
||||
"2022-04-20.md",
|
||||
"2022-04-27.md",
|
||||
"2022-05-04.md",
|
||||
"2022-05-11.md",
|
||||
"2022-06-01.md",
|
||||
"2022-06-15.md",
|
||||
"2022-06-22.md",
|
||||
"2022-06-29.md",
|
||||
"2022-07-06.md",
|
||||
"2022-07-24.md",
|
||||
"2022-07-26.md",
|
||||
"2022-08-10.md",
|
||||
"2022-08-24.md",
|
||||
"2022-09-07.md",
|
||||
"2022-09-21.md",
|
||||
"2022-09-28.md",
|
||||
"2022-10-05.md",
|
||||
"2022-10-12.md",
|
||||
"2022-10-26.md",
|
||||
"2022-11-06.md",
|
||||
"2022-11-08.md",
|
||||
"2022-11-10.md",
|
||||
"2022-11-23.md",
|
||||
"2022-12-07.md",
|
||||
"2022-12-14.md",
|
||||
]
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Performs a one-time import of older IAB agendas"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if Document.objects.filter(name="agenda-interim-2018-iab-26-20180905").exists():
|
||||
print("Command has already been run - exiting")
|
||||
exit(0)
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
process = subprocess.Popen(
|
||||
["git", "clone", "https://github.com/kesara/iab-scraper.git", tmpdir],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
stdout, stderr = process.communicate()
|
||||
if not Path(tmpdir).joinpath("iab_agendas", "2018-09-05.md").exists():
|
||||
print("Git clone of the iab-scraper directory did not go as expected")
|
||||
print("stdout:", stdout)
|
||||
print("stderr:", stderr)
|
||||
print(f"Clean up {tmpdir} manually")
|
||||
exit(-1)
|
||||
|
||||
agendas = agendas_to_import()
|
||||
for agenda in agendas:
|
||||
[year, month, day] = [int(part) for part in agenda[:10].split("-")]
|
||||
agenda_date = datetime.date(year, month, day)
|
||||
meeting = Meeting.objects.get(
|
||||
date=agenda_date, type_id="interim", session__group__acronym="iab"
|
||||
)
|
||||
counter = int(meeting.number.split("-")[3])
|
||||
agenda_docname = (
|
||||
f"agenda-interim-{year}-iab-{counter:02d}-{agenda_date:%Y%m%d}"
|
||||
)
|
||||
agenda_filename = f"{agenda_docname}-00.md"
|
||||
# Create Document
|
||||
doc = Document.objects.create(
|
||||
name=agenda_docname,
|
||||
type_id="agenda",
|
||||
title=f"Agenda {meeting.number} {agenda_date:%Y-%m-%d}",
|
||||
group_id=7, # The IAB group
|
||||
rev="00",
|
||||
uploaded_filename=agenda_filename,
|
||||
)
|
||||
DocAlias.objects.create(name=doc.name).docs.add(doc)
|
||||
e = DocEvent.objects.create(
|
||||
type="comment",
|
||||
doc=doc,
|
||||
rev="00",
|
||||
by_id=1, # The "(System)" person
|
||||
desc="Agenda moved into datatracker from iab wordpress website",
|
||||
)
|
||||
doc.save_with_history([e])
|
||||
|
||||
session = Session.objects.get(meeting=meeting)
|
||||
# Add Document to Session
|
||||
session.sessionpresentation_set.create(document=doc, rev=doc.rev)
|
||||
|
||||
# Put file in place
|
||||
source = Path(tmpdir).joinpath("iab_agendas", agenda)
|
||||
dest = Path(settings.AGENDA_PATH).joinpath(
|
||||
meeting.number, "agenda", agenda_filename
|
||||
)
|
||||
if dest.exists():
|
||||
print(f"WARNING: {dest} already exists - not overwriting it.")
|
||||
else:
|
||||
os.makedirs(dest.parent, exist_ok=True)
|
||||
shutil.copy(source, dest)
|
||||
|
||||
shutil.rmtree(tmpdir)
|
|
@ -12097,6 +12097,16 @@
|
|||
"model": "name.iprdisclosurestatename",
|
||||
"pk": "removed"
|
||||
},
|
||||
{
|
||||
"fields": {
|
||||
"desc": "",
|
||||
"name": "Removed Objectively False",
|
||||
"order": 5,
|
||||
"used": true
|
||||
},
|
||||
"model": "name.iprdisclosurestatename",
|
||||
"pk": "removed_objfalse"
|
||||
},
|
||||
{
|
||||
"fields": {
|
||||
"desc": "",
|
||||
|
@ -12207,6 +12217,16 @@
|
|||
"model": "name.ipreventtypename",
|
||||
"pk": "removed"
|
||||
},
|
||||
{
|
||||
"fields": {
|
||||
"desc": "",
|
||||
"name": "Removed Objectively False",
|
||||
"order": 0,
|
||||
"used": true
|
||||
},
|
||||
"model": "name.ipreventtypename",
|
||||
"pk": "removed_objfalse"
|
||||
},
|
||||
{
|
||||
"fields": {
|
||||
"desc": "",
|
||||
|
|
24
ietf/name/migrations/0008_removed_objfalse.py
Normal file
24
ietf/name/migrations/0008_removed_objfalse.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
# Copyright The IETF Trust 2023, All Rights Reserved
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
def forward(apps, schema_editor):
|
||||
IprDisclosureStateName = apps.get_model("name", "IprDisclosureStateName")
|
||||
IprDisclosureStateName.objects.create(slug="removed_objfalse", name="Removed Objectively False", order=5)
|
||||
IprEventTypeName = apps.get_model("name", "IprEventTypeName")
|
||||
IprEventTypeName.objects.create(slug="removed_objfalse", name="Removed Objectively False")
|
||||
|
||||
def reverse(apps, schema_editor):
|
||||
IprDisclosureStateName = apps.get_model("name", "IprDisclosureStateName")
|
||||
IprDisclosureStateName.objects.filter(slug="removed_objfalse").delete()
|
||||
IprEventTypeName = apps.get_model("name", "IprEventTypeName")
|
||||
IprEventTypeName.objects.filter(slug="removed_objfalse").delete()
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("name", "0007_appeal_artifact_typename"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(forward, reverse),
|
||||
]
|
|
@ -84,6 +84,7 @@ def nomcom_kwargs_for_year(year=None, *args, **kwargs):
|
|||
class NomComFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
model = NomCom
|
||||
skip_postgeneration_save = True
|
||||
|
||||
group = factory.SubFactory(GroupFactory,type_id='nomcom')
|
||||
|
||||
|
@ -167,6 +168,7 @@ class NomineePositionFactory(factory.django.DjangoModelFactory):
|
|||
class FeedbackFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
model = Feedback
|
||||
skip_postgeneration_save = True
|
||||
|
||||
nomcom = factory.SubFactory(NomComFactory)
|
||||
subject = factory.Faker('sentence')
|
||||
|
@ -176,6 +178,7 @@ class FeedbackFactory(factory.django.DjangoModelFactory):
|
|||
def comments(obj, create, extracted, **kwargs):
|
||||
comment_text = Faker().paragraph()
|
||||
obj.comments = obj.nomcom.encrypt(comment_text)
|
||||
obj.save()
|
||||
|
||||
class TopicFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
|
|
|
@ -46,6 +46,7 @@ class UserFactory(factory.django.DjangoModelFactory):
|
|||
model = User
|
||||
django_get_or_create = ('username',)
|
||||
exclude = ['faker', ]
|
||||
skip_postgeneration_save = True
|
||||
|
||||
faker = factory.LazyFunction(random_faker)
|
||||
# normalize these i18n Unicode strings in the same way the database does
|
||||
|
@ -55,13 +56,16 @@ class UserFactory(factory.django.DjangoModelFactory):
|
|||
slugify(unidecode(u.last_name)), n, fake.domain_name())) # type: ignore
|
||||
username = factory.LazyAttribute(lambda u: u.email)
|
||||
|
||||
# Consider using PostGenerationMethodCall instead
|
||||
@factory.post_generation
|
||||
def set_password(obj, create, extracted, **kwargs): # pylint: disable=no-self-argument
|
||||
obj.set_password( '%s+password' % obj.username ) # pylint: disable=no-value-for-parameter
|
||||
obj.save()
|
||||
|
||||
class PersonFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
model = Person
|
||||
skip_postgeneration_save = True
|
||||
|
||||
user = factory.SubFactory(UserFactory)
|
||||
name = factory.LazyAttribute(lambda p: normalize_name('%s %s'%(p.user.first_name, p.user.last_name)))
|
||||
|
@ -106,6 +110,7 @@ class PersonFactory(factory.django.DjangoModelFactory):
|
|||
def delete_file(file):
|
||||
os.unlink(file)
|
||||
atexit.register(delete_file, photodst)
|
||||
obj.save()
|
||||
|
||||
class AliasFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
|
|
2
ietf/redirects/.gitignore
vendored
2
ietf/redirects/.gitignore
vendored
|
@ -1,2 +0,0 @@
|
|||
/*.pyc
|
||||
/settings_local.py
|
2
ietf/redirects/fixtures/.gitignore
vendored
2
ietf/redirects/fixtures/.gitignore
vendored
|
@ -1,2 +0,0 @@
|
|||
/*.pyc
|
||||
/settings_local.py
|
|
@ -11,6 +11,7 @@ from ietf.name.models import ReviewTypeName, ReviewResultName
|
|||
class ReviewTeamSettingsFactory(factory.django.DjangoModelFactory):
|
||||
class Meta:
|
||||
model = ReviewTeamSettings
|
||||
skip_postgeneration_save = True
|
||||
|
||||
group = factory.SubFactory('ietf.group.factories.GroupFactory',type_id='review')
|
||||
reviewer_queue_policy_id = 'RotateAlphabetically'
|
||||
|
|
|
@ -1270,6 +1270,8 @@ if 'CACHES' not in locals():
|
|||
},
|
||||
}
|
||||
|
||||
PUBLISH_IPR_STATES = ['posted', 'removed', 'removed_objfalse']
|
||||
|
||||
# We provide a secret key only for test and development modes. It's
|
||||
# absolutely vital that django fails to start in production mode unless a
|
||||
# secret key has been provided elsewhere, not in this file which is
|
||||
|
|
|
@ -20,7 +20,7 @@ function replace_with_internal(table, internal_table, i) {
|
|||
.replaceWith(internal_table[i]
|
||||
.children("table")
|
||||
.children("tbody")
|
||||
.clone());
|
||||
.clone(true));
|
||||
}
|
||||
|
||||
function field_magic(i, e, fields) {
|
||||
|
@ -160,10 +160,10 @@ $(document)
|
|||
// create the internal table and add list.js to them
|
||||
var thead = $(this)
|
||||
.siblings("thead:first")
|
||||
.clone();
|
||||
.clone(true);
|
||||
|
||||
var tbody = $(this)
|
||||
.clone();
|
||||
.clone(true);
|
||||
|
||||
var tbody_rows = $(tbody)
|
||||
.find("tr")
|
||||
|
@ -178,7 +178,7 @@ $(document)
|
|||
|
||||
var parent = $(table)
|
||||
.parent()
|
||||
.clone();
|
||||
.clone(true);
|
||||
|
||||
$(parent)
|
||||
.children("table")
|
||||
|
@ -251,6 +251,7 @@ $(document)
|
|||
$.each(list_instance, (i, e) => {
|
||||
e.on("sortComplete", function () {
|
||||
replace_with_internal(table, internal_table, i);
|
||||
$(table).find("[data-bs-original-title]").tooltip();
|
||||
if (i == list_instance.length - 1) {
|
||||
$(table)
|
||||
.find("thead:first tr")
|
||||
|
|
1
ietf/submit/.gitignore
vendored
1
ietf/submit/.gitignore
vendored
|
@ -1 +0,0 @@
|
|||
/*.pyc
|
1
ietf/submit/parsers/.gitignore
vendored
1
ietf/submit/parsers/.gitignore
vendored
|
@ -1 +0,0 @@
|
|||
/*.pyc
|
1
ietf/submit/templatetags/.gitignore
vendored
1
ietf/submit/templatetags/.gitignore
vendored
|
@ -1 +0,0 @@
|
|||
/*.pyc
|
1
ietf/templates/.gitignore
vendored
1
ietf/templates/.gitignore
vendored
|
@ -1 +0,0 @@
|
|||
/*.pyc
|
|
@ -1,17 +1,14 @@
|
|||
{# Copyright The IETF Trust 2015, All Rights Reserved #}
|
||||
{% extends "base.html" %}
|
||||
{% load origin %}
|
||||
{% origin %}
|
||||
{% load django_bootstrap5 %}
|
||||
{% block title %}Remove tracking of document {{ name }}{% endblock %}
|
||||
{% bootstrap_messages %}
|
||||
<form method="post">
|
||||
{% csrf_token %}
|
||||
<p>
|
||||
Remove {{ name }} from the list?
|
||||
</p>
|
||||
<button type="submit"
|
||||
class="btn btn-primary"
|
||||
value="Remove tracking of document">
|
||||
Remove tracking of document
|
||||
</button>
|
||||
</form>
|
||||
{% block content %}
|
||||
<form method="post">
|
||||
{% csrf_token %}
|
||||
<p>
|
||||
Remove {{ name }} from the list?
|
||||
</p>
|
||||
<button type="submit" class="btn btn-primary" value="Remove tracking of document">Remove tracking of document</button>
|
||||
</form>
|
||||
{% endblock %}
|
|
@ -279,7 +279,7 @@
|
|||
</a>
|
||||
</th>
|
||||
<td class="edit">
|
||||
{% if iesg_state.slug != 'idexists' and iesg_state.slug != 'dead' and can_edit %}
|
||||
{% if iesg_state.slug != 'idexists' and iesg_state.slug != 'dead' and can_edit or user|has_role:"Secretariat" %}
|
||||
<a class="btn btn-primary btn-sm"
|
||||
href="{% url 'ietf.doc.views_draft.change_state' name=doc.name %}">
|
||||
Edit
|
||||
|
@ -684,7 +684,7 @@
|
|||
<ul class="dropdown-menu" role="menu">
|
||||
<li role="presentation">
|
||||
<a class="dropdown-item"
|
||||
href="https://mailarchive.ietf.org/arch/search?q=%22{{ doc.name }}%22"
|
||||
href="https://mailarchive.ietf.org/arch/search/?q=%22{{ doc.name }}%22"
|
||||
rel="nofollow"
|
||||
target="_blank">
|
||||
IETF Mail Archive
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{% extends "base.html" %}
|
||||
{# Copyright The IETF Trust 2015, All Rights Reserved #}
|
||||
{# Copyright The IETF Trust 2015-2023, All Rights Reserved #}
|
||||
{% load origin %}
|
||||
{% load django_bootstrap5 %}
|
||||
{% block title %}Change state for {{ doc }}{% endblock %}
|
||||
|
@ -10,6 +10,13 @@
|
|||
<br>
|
||||
<small class="text-body-secondary">{{ doc }}</small>
|
||||
</h1>
|
||||
{% if state.slug == "dead" %}
|
||||
<p class="alert alert-warning my-3">
|
||||
This document is in IESG state "Dead". It is unusual to change
|
||||
this to anything other than "AD is watching", and this should
|
||||
never be used as a replacement for Begin IESG Processing.
|
||||
</p>
|
||||
{% endif %}
|
||||
<a class="btn btn-info my-3"
|
||||
href="{% url 'ietf.doc.views_help.state_help' type="draft-iesg" %}">Help on states</a>
|
||||
<form class="mt-3" method="post">
|
||||
|
|
|
@ -180,5 +180,5 @@
|
|||
</form>
|
||||
<p class="text-center pt-5">
|
||||
<a class="btn btn-outline-primary btn-sm" href="https://www.ietf.org/search">Search page for www.ietf.org website</a>
|
||||
<a class="btn btn-outline-primary btn-sm" href="https://mailarchive.ietf.org">Search page for IETF mail list archives</a>
|
||||
<a class="btn btn-outline-primary btn-sm" href="https://mailarchive.ietf.org/arch/">Search page for IETF mail list archives</a>
|
||||
</p>
|
1
ietf/templates/idindex/.gitignore
vendored
1
ietf/templates/idindex/.gitignore
vendored
|
@ -1 +0,0 @@
|
|||
/*.pyc
|
1
ietf/templates/iesg/.gitignore
vendored
1
ietf/templates/iesg/.gitignore
vendored
|
@ -1 +0,0 @@
|
|||
/*.pyc
|
1
ietf/templates/ipr/.gitignore
vendored
1
ietf/templates/ipr/.gitignore
vendored
|
@ -1 +0,0 @@
|
|||
/*.pyc
|
|
@ -103,6 +103,8 @@
|
|||
IPR Disclosure ID #{{ item.source.id }},
|
||||
{% if item.source.state.slug == "removed" %}
|
||||
"{{ item.source.title }}" (which was removed at the request of the submitter)
|
||||
{% elif item.source.state.slug == "removed_objfalse" %}
|
||||
"{{ item.source.title }}" (which was removed as objectively false)
|
||||
{% else %}
|
||||
"<a href="{% url "ietf.ipr.views.show" id=item.source.id %}">{{ item.source.title }}</a>"
|
||||
{% endif %}
|
||||
|
@ -122,6 +124,8 @@
|
|||
IPR Disclosure ID #{{ item.target.id }},
|
||||
{% if item.target.state.slug == "removed" %}
|
||||
"{{ item.target.title }}" (which was removed at the request of the submitter)
|
||||
{% elif item.source.state.slug == "removed_objfalse" %}
|
||||
"{{ item.source.title }}" (which was removed as objectively false)
|
||||
{% elif item.target.state.slug == "rejected" %}
|
||||
"{{ item.target.title }}" (which was rejected)
|
||||
{% elif item.target.state.slug == "parked" %}
|
||||
|
|
|
@ -1,11 +1,15 @@
|
|||
{% extends "base.html" %}
|
||||
{# Copyright The IETF Trust 2015, All Rights Reserved #}
|
||||
{# Copyright The IETF Trust 2015-2023, All Rights Reserved #}
|
||||
{% load origin %}
|
||||
{% block title %}{{ ipr.title }}{% endblock %}
|
||||
{% block content %}
|
||||
{% origin %}
|
||||
<h1>{{ ipr.title }}</h1>
|
||||
<p class="alert alert-info my-3">
|
||||
This IPR disclosure was removed at the submitter's request.
|
||||
{% if ipr.state.slug == "removed" %}
|
||||
This IPR disclosure was removed at the submitter's request.
|
||||
{% elif ipr.state.slug == "removed_objfalse" %}
|
||||
This IPR disclosure was removed as objectively false.
|
||||
{% endif %}
|
||||
</p>
|
||||
{% endblock %}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue