Merge remote-tracking branch 'ietf-tools/main' into feat/postgres

This commit is contained in:
Robert Sparks 2023-03-20 13:46:32 -05:00
commit 88a78cf1ab
No known key found for this signature in database
GPG key ID: 6E2A6A5775F91318
140 changed files with 8368 additions and 4720 deletions

19
.github/dependabot.yml vendored Normal file
View file

@ -0,0 +1,19 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
- package-ecosystem: "docker"
directory: "/docker"
schedule:
interval: "weekly"
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "weekly"

View file

@ -33,7 +33,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Docker Build & Push
uses: docker/build-push-action@v3
uses: docker/build-push-action@v4
with:
context: .
file: docker/base.Dockerfile

View file

@ -35,7 +35,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Docker Build & Push
uses: docker/build-push-action@v3
uses: docker/build-push-action@v4
with:
context: .
file: dev/celery/Dockerfile

View file

@ -34,7 +34,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Docker Build & Push
uses: docker/build-push-action@v3
uses: docker/build-push-action@v4
with:
context: .
file: dev/mq/Dockerfile

View file

@ -144,7 +144,7 @@ jobs:
run: |
echo "Running checks..."
./ietf/manage.py check
./ietf/manage.py migrate
./ietf/manage.py migrate --fake-initial
echo "Validating migrations..."
if ! ( ietf/manage.py makemigrations --dry-run --check --verbosity 3 ) ; then
echo "Model changes without migrations found."

View file

@ -38,7 +38,7 @@ jobs:
run: |
echo "Running checks..."
./ietf/manage.py check
./ietf/manage.py migrate
./ietf/manage.py migrate --fake-initial
echo "Validating migrations..."
if ! ( ietf/manage.py makemigrations --dry-run --check --verbosity 3 ) ; then
echo "Model changes without migrations found."

View file

@ -17,4 +17,4 @@ jobs:
- name: 'Checkout Repository'
uses: actions/checkout@v3
- name: 'Dependency Review'
uses: actions/dependency-review-action@v2
uses: actions/dependency-review-action@v3

View file

@ -39,7 +39,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Docker Build & Push
uses: docker/build-push-action@v3
uses: docker/build-push-action@v4
with:
context: .
file: dev/shared-assets-sync/Dockerfile

View file

@ -67,7 +67,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Docker Build & Push
uses: docker/build-push-action@v3
uses: docker/build-push-action@v4
with:
context: .
file: docker/db.Dockerfile
@ -202,7 +202,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Docker Build & Push
uses: docker/build-push-action@v3
uses: docker/build-push-action@v4
with:
context: .
file: docker/db-pg.Dockerfile

771
.pnp.cjs generated

File diff suppressed because it is too large Load diff

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

File diff suppressed because it is too large Load diff

View file

@ -6,18 +6,19 @@
"author": "IETF Trust",
"license": "BSD-3-Clause",
"dependencies": {
"@actions/core": "1.9.1",
"@actions/github": "5.0.0",
"chart.js": "3.7.1",
"@actions/core": "1.10.0",
"@actions/github": "5.1.1",
"chart.js": "3.5.1",
"chartjs-node-canvas": "4.1.6",
"lodash": "4.17.21",
"luxon": "2.5.2"
"luxon": "3.3.0"
},
"devDependencies": {
"eslint": "7.32.0",
"eslint-config-standard": "16.0.3",
"eslint-plugin-import": "2.25.4",
"eslint": "8.36.0",
"eslint-config-standard": "17.0.0",
"eslint-plugin-import": "2.27.5",
"eslint-plugin-node": "11.1.0",
"eslint-plugin-promise": "5.2.0"
"eslint-plugin-promise": "6.1.1",
"npm-check-updates": "16.7.12"
}
}

View file

@ -9,7 +9,7 @@
"version": "1.0.0",
"license": "ISC",
"dependencies": {
"@octokit/core": "^4.1.0",
"@octokit/core": "^4.2.0",
"luxon": "^3.2.1"
}
},
@ -25,15 +25,15 @@
}
},
"node_modules/@octokit/core": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-4.1.0.tgz",
"integrity": "sha512-Czz/59VefU+kKDy+ZfDwtOIYIkFjExOKf+HA92aiTZJ6EfWpFzYQWw0l54ji8bVmyhc+mGaLUbSUmXazG7z5OQ==",
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-4.2.0.tgz",
"integrity": "sha512-AgvDRUg3COpR82P7PBdGZF/NNqGmtMq2NiPqeSsDIeCfYFOZ9gddqWNQHnFdEUf+YwOj4aZYmJnlPp7OXmDIDg==",
"dependencies": {
"@octokit/auth-token": "^3.0.0",
"@octokit/graphql": "^5.0.0",
"@octokit/request": "^6.0.0",
"@octokit/request-error": "^3.0.0",
"@octokit/types": "^8.0.0",
"@octokit/types": "^9.0.0",
"before-after-hook": "^2.2.0",
"universal-user-agent": "^6.0.0"
},
@ -41,6 +41,19 @@
"node": ">= 14"
}
},
"node_modules/@octokit/core/node_modules/@octokit/openapi-types": {
"version": "16.0.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-16.0.0.tgz",
"integrity": "sha512-JbFWOqTJVLHZSUUoF4FzAZKYtqdxWu9Z5m2QQnOyEa04fOFljvyh7D3GYKbfuaSWisqehImiVIMG4eyJeP5VEA=="
},
"node_modules/@octokit/core/node_modules/@octokit/types": {
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.0.0.tgz",
"integrity": "sha512-LUewfj94xCMH2rbD5YJ+6AQ4AVjFYTgpp6rboWM5T7N3IsIF65SBEOVcYMGAEzO/kKNiNaW4LoWtoThOhH06gw==",
"dependencies": {
"@octokit/openapi-types": "^16.0.0"
}
},
"node_modules/@octokit/endpoint": {
"version": "7.0.3",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-7.0.3.tgz",
@ -202,17 +215,32 @@
}
},
"@octokit/core": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-4.1.0.tgz",
"integrity": "sha512-Czz/59VefU+kKDy+ZfDwtOIYIkFjExOKf+HA92aiTZJ6EfWpFzYQWw0l54ji8bVmyhc+mGaLUbSUmXazG7z5OQ==",
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-4.2.0.tgz",
"integrity": "sha512-AgvDRUg3COpR82P7PBdGZF/NNqGmtMq2NiPqeSsDIeCfYFOZ9gddqWNQHnFdEUf+YwOj4aZYmJnlPp7OXmDIDg==",
"requires": {
"@octokit/auth-token": "^3.0.0",
"@octokit/graphql": "^5.0.0",
"@octokit/request": "^6.0.0",
"@octokit/request-error": "^3.0.0",
"@octokit/types": "^8.0.0",
"@octokit/types": "^9.0.0",
"before-after-hook": "^2.2.0",
"universal-user-agent": "^6.0.0"
},
"dependencies": {
"@octokit/openapi-types": {
"version": "16.0.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-16.0.0.tgz",
"integrity": "sha512-JbFWOqTJVLHZSUUoF4FzAZKYtqdxWu9Z5m2QQnOyEa04fOFljvyh7D3GYKbfuaSWisqehImiVIMG4eyJeP5VEA=="
},
"@octokit/types": {
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.0.0.tgz",
"integrity": "sha512-LUewfj94xCMH2rbD5YJ+6AQ4AVjFYTgpp6rboWM5T7N3IsIF65SBEOVcYMGAEzO/kKNiNaW4LoWtoThOhH06gw==",
"requires": {
"@octokit/openapi-types": "^16.0.0"
}
}
}
},
"@octokit/endpoint": {

View file

@ -10,7 +10,7 @@
"author": "",
"license": "ISC",
"dependencies": {
"@octokit/core": "^4.1.0",
"@octokit/core": "^4.2.0",
"luxon": "^3.2.1"
}
}

View file

@ -12,12 +12,34 @@
"nanoid-dictionary": "5.0.0-beta.1",
"slugify": "1.6.5",
"tar": "^6.1.11",
"yargs": "^17.5.1"
"yargs": "^17.7.1"
},
"engines": {
"node": ">=16"
}
},
"node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
"engines": {
"node": ">=8"
}
},
"node_modules/ansi-styles": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
"dependencies": {
"color-convert": "^2.0.1"
},
"engines": {
"node": ">=8"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/asn1": {
"version": "0.2.6",
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz",
@ -68,80 +90,16 @@
"integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="
},
"node_modules/cliui": {
"version": "7.0.4",
"resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
"integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
"integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==",
"dependencies": {
"string-width": "^4.2.0",
"strip-ansi": "^6.0.0",
"strip-ansi": "^6.0.1",
"wrap-ansi": "^7.0.0"
}
},
"node_modules/cliui/node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
"engines": {
"node": ">=8"
}
},
"node_modules/cliui/node_modules/ansi-styles": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
"dependencies": {
"color-convert": "^2.0.1"
},
"engines": {
"node": ">=8"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/cliui/node_modules/emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
},
"node_modules/cliui/node_modules/string-width": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
"dependencies": {
"emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0",
"strip-ansi": "^6.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/cliui/node_modules/strip-ansi": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"dependencies": {
"ansi-regex": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/cliui/node_modules/wrap-ansi": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
"dependencies": {
"ansi-styles": "^4.0.0",
"string-width": "^4.1.0",
"strip-ansi": "^6.0.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
"node": ">=12"
}
},
"node_modules/color-convert": {
@ -216,6 +174,11 @@
"node": ">= 8.0"
}
},
"node_modules/emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
},
"node_modules/end-of-stream": {
"version": "1.4.4",
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
@ -483,6 +446,30 @@
"safe-buffer": "~5.2.0"
}
},
"node_modules/string-width": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
"dependencies": {
"emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0",
"strip-ansi": "^6.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/strip-ansi": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"dependencies": {
"ansi-regex": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/tar": {
"version": "6.1.11",
"resolved": "https://registry.npmjs.org/tar/-/tar-6.1.11.tgz",
@ -584,6 +571,22 @@
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
},
"node_modules/wrap-ansi": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
"dependencies": {
"ansi-styles": "^4.0.0",
"string-width": "^4.1.0",
"strip-ansi": "^6.0.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
@ -603,17 +606,17 @@
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
},
"node_modules/yargs": {
"version": "17.5.1",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.5.1.tgz",
"integrity": "sha512-t6YAJcxDkNX7NFYiVtKvWUz8l+PaKTLiL63mJYWR2GnHq2gjEWISzsLp9wg3aY36dY1j+gfIEL3pIF+XlJJfbA==",
"version": "17.7.1",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.1.tgz",
"integrity": "sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw==",
"dependencies": {
"cliui": "^7.0.2",
"cliui": "^8.0.1",
"escalade": "^3.1.1",
"get-caller-file": "^2.0.5",
"require-directory": "^2.1.1",
"string-width": "^4.2.3",
"y18n": "^5.0.5",
"yargs-parser": "^21.0.0"
"yargs-parser": "^21.1.1"
},
"engines": {
"node": ">=12"
@ -626,46 +629,22 @@
"engines": {
"node": ">=12"
}
},
"node_modules/yargs/node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
"engines": {
"node": ">=8"
}
},
"node_modules/yargs/node_modules/emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
},
"node_modules/yargs/node_modules/string-width": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
"dependencies": {
"emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0",
"strip-ansi": "^6.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/yargs/node_modules/strip-ansi": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"dependencies": {
"ansi-regex": "^5.0.1"
},
"engines": {
"node": ">=8"
}
}
},
"dependencies": {
"ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="
},
"ansi-styles": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
"requires": {
"color-convert": "^2.0.1"
}
},
"asn1": {
"version": "0.2.6",
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz",
@ -699,61 +678,13 @@
"integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="
},
"cliui": {
"version": "7.0.4",
"resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
"integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
"integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==",
"requires": {
"string-width": "^4.2.0",
"strip-ansi": "^6.0.0",
"strip-ansi": "^6.0.1",
"wrap-ansi": "^7.0.0"
},
"dependencies": {
"ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="
},
"ansi-styles": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
"requires": {
"color-convert": "^2.0.1"
}
},
"emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
},
"string-width": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
"requires": {
"emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0",
"strip-ansi": "^6.0.1"
}
},
"strip-ansi": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"requires": {
"ansi-regex": "^5.0.1"
}
},
"wrap-ansi": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
"requires": {
"ansi-styles": "^4.0.0",
"string-width": "^4.1.0",
"strip-ansi": "^6.0.0"
}
}
}
},
"color-convert": {
@ -807,6 +738,11 @@
"tar-fs": "~2.0.1"
}
},
"emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
},
"end-of-stream": {
"version": "1.4.4",
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
@ -996,6 +932,24 @@
"safe-buffer": "~5.2.0"
}
},
"string-width": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
"requires": {
"emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0",
"strip-ansi": "^6.0.1"
}
},
"strip-ansi": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"requires": {
"ansi-regex": "^5.0.1"
}
},
"tar": {
"version": "6.1.11",
"resolved": "https://registry.npmjs.org/tar/-/tar-6.1.11.tgz",
@ -1075,6 +1029,16 @@
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
},
"wrap-ansi": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
"requires": {
"ansi-styles": "^4.0.0",
"string-width": "^4.1.0",
"strip-ansi": "^6.0.0"
}
},
"wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
@ -1091,47 +1055,17 @@
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
},
"yargs": {
"version": "17.5.1",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.5.1.tgz",
"integrity": "sha512-t6YAJcxDkNX7NFYiVtKvWUz8l+PaKTLiL63mJYWR2GnHq2gjEWISzsLp9wg3aY36dY1j+gfIEL3pIF+XlJJfbA==",
"version": "17.7.1",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.1.tgz",
"integrity": "sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw==",
"requires": {
"cliui": "^7.0.2",
"cliui": "^8.0.1",
"escalade": "^3.1.1",
"get-caller-file": "^2.0.5",
"require-directory": "^2.1.1",
"string-width": "^4.2.3",
"y18n": "^5.0.5",
"yargs-parser": "^21.0.0"
},
"dependencies": {
"ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="
},
"emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
},
"string-width": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
"requires": {
"emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0",
"strip-ansi": "^6.0.1"
}
},
"strip-ansi": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"requires": {
"ansi-regex": "^5.0.1"
}
}
"yargs-parser": "^21.1.1"
}
},
"yargs-parser": {

View file

@ -8,7 +8,7 @@
"nanoid-dictionary": "5.0.0-beta.1",
"slugify": "1.6.5",
"tar": "^6.1.11",
"yargs": "^17.5.1"
"yargs": "^17.7.1"
},
"engines": {
"node": ">=16"

View file

@ -33,7 +33,7 @@ echo "Running Datatracker checks..."
# Migrate, adjusting to what the current state of the underlying database might be:
echo "Running Datatracker migrations..."
/usr/local/bin/python ./ietf/manage.py migrate --settings=settings_local
/usr/local/bin/python ./ietf/manage.py migrate --fake-initial --settings=settings_local
echo "Starting Datatracker..."
./ietf/manage.py runserver 0.0.0.0:8000 --settings=settings_local

View file

@ -16,7 +16,7 @@
"listr2": "^5.0.2",
"lodash-es": "^4.17.21",
"luxon": "^3.2.1",
"pretty-bytes": "^6.0.0",
"pretty-bytes": "^6.1.0",
"tar": "^6.1.11",
"yargs": "^17.5.1"
},
@ -1141,9 +1141,9 @@
"integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg=="
},
"node_modules/pretty-bytes": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-6.0.0.tgz",
"integrity": "sha512-6UqkYefdogmzqAZWzJ7laYeJnaXDy2/J+ZqiiMtS7t7OfpXWTlaeGMwX8U6EFvPV/YWWEKRkS8hKS4k60WHTOg==",
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-6.1.0.tgz",
"integrity": "sha512-Rk753HI8f4uivXi4ZCIYdhmG1V+WKzvRMg/X+M42a6t7D07RcmopXJMDNk6N++7Bl75URRGsb40ruvg7Hcp2wQ==",
"engines": {
"node": "^14.13.1 || >=16.0.0"
},
@ -2311,9 +2311,9 @@
"integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg=="
},
"pretty-bytes": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-6.0.0.tgz",
"integrity": "sha512-6UqkYefdogmzqAZWzJ7laYeJnaXDy2/J+ZqiiMtS7t7OfpXWTlaeGMwX8U6EFvPV/YWWEKRkS8hKS4k60WHTOg=="
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-6.1.0.tgz",
"integrity": "sha512-Rk753HI8f4uivXi4ZCIYdhmG1V+WKzvRMg/X+M42a6t7D07RcmopXJMDNk6N++7Bl75URRGsb40ruvg7Hcp2wQ=="
},
"pump": {
"version": "3.0.0",

View file

@ -12,7 +12,7 @@
"listr2": "^5.0.2",
"lodash-es": "^4.17.21",
"luxon": "^3.2.1",
"pretty-bytes": "^6.0.0",
"pretty-bytes": "^6.1.0",
"tar": "^6.1.11",
"yargs": "^17.5.1"
},

View file

@ -14,5 +14,5 @@ chmod +x ./docker/scripts/app-create-dirs.sh
./docker/scripts/app-create-dirs.sh
./ietf/manage.py check
./ietf/manage.py migrate
./ietf/manage.py migrate --fake-initial

View file

@ -113,7 +113,7 @@ echo "Running initial checks..."
# Migrate, adjusting to what the current state of the underlying database might be:
/usr/local/bin/python $WORKSPACEDIR/ietf/manage.py migrate --settings=settings_local
/usr/local/bin/python $WORKSPACEDIR/ietf/manage.py migrate --fake-initial --settings=settings_local
echo "-----------------------------------------------------------------"
echo "Done!"

15
ietf/api/ietf_utils.py Normal file
View file

@ -0,0 +1,15 @@
# Copyright The IETF Trust 2023, All Rights Reserved
# This is not utils.py because Tastypie implicitly consumes ietf.api.utils.
# See ietf.api.__init__.py for details.
from django.conf import settings
def is_valid_token(endpoint, token):
# This is where we would consider integration with vault
# Settings implementation for now.
if hasattr(settings, "APP_API_TOKENS"):
token_store = settings.APP_API_TOKENS
if endpoint in token_store and token in token_store[endpoint]:
return True
return False

View file

@ -8,12 +8,12 @@ import os
import sys
from importlib import import_module
from mock import patch
from pathlib import Path
from django.apps import apps
from django.conf import settings
from django.test import Client
from django.test.utils import override_settings
from django.urls import reverse as urlreverse
from django.utils import timezone
@ -27,7 +27,6 @@ from ietf.doc.models import RelatedDocument, State
from ietf.doc.factories import IndividualDraftFactory, WgDraftFactory
from ietf.group.factories import RoleFactory
from ietf.meeting.factories import MeetingFactory, SessionFactory
from ietf.meeting.test_data import make_meeting_test_data
from ietf.meeting.models import Session
from ietf.person.factories import PersonFactory, random_faker
from ietf.person.models import User
@ -46,20 +45,6 @@ OMITTED_APPS = (
class CustomApiTests(TestCase):
settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['AGENDA_PATH']
# Using mock to patch the import functions in ietf.meeting.views, where
# api_import_recordings() are using them:
@patch('ietf.meeting.views.import_audio_files')
def test_notify_meeting_import_audio_files(self, mock_import_audio):
meeting = make_meeting_test_data()
client = Client(Accept='application/json')
# try invalid method GET
url = urlreverse('ietf.meeting.views.api_import_recordings', kwargs={'number':meeting.number})
r = client.get(url)
self.assertEqual(r.status_code, 405)
# try valid method POST
r = client.post(url)
self.assertEqual(r.status_code, 201)
def test_api_help_page(self):
url = urlreverse('ietf.api.views.api_help')
r = self.client.get(url)
@ -546,6 +531,101 @@ class CustomApiTests(TestCase):
jsondata = r.json()
self.assertEqual(jsondata['success'], True)
class DirectAuthApiTests(TestCase):
def setUp(self):
super().setUp()
self.valid_token = "nSZJDerbau6WZwbEAYuQ"
self.invalid_token = self.valid_token
while self.invalid_token == self.valid_token:
self.invalid_token = User.objects.make_random_password(20)
self.url = urlreverse("ietf.api.views.directauth")
self.valid_person = PersonFactory()
self.valid_password = self.valid_person.user.username+"+password"
self.invalid_password = self.valid_password
while self.invalid_password == self.valid_password:
self.invalid_password = User.objects.make_random_password(20)
self.valid_body_with_good_password = self.post_dict(authtoken=self.valid_token, username=self.valid_person.user.username, password=self.valid_password)
self.valid_body_with_bad_password = self.post_dict(authtoken=self.valid_token, username=self.valid_person.user.username, password=self.invalid_password)
self.valid_body_with_unknown_user = self.post_dict(authtoken=self.valid_token, username="notauser@nowhere.nada", password=self.valid_password)
def post_dict(self, authtoken, username, password):
data = dict()
if authtoken is not None:
data["authtoken"] = authtoken
if username is not None:
data["username"] = username
if password is not None:
data["password"] = password
return dict(data = json.dumps(data))
def response_data(self, response):
try:
data = json.loads(response.content)
except json.decoder.JSONDecodeError:
data = None
self.assertIsNotNone(data)
return data
def test_bad_methods(self):
for method in (self.client.get, self.client.put, self.client.head, self.client.delete, self.client.patch):
r = method(self.url)
self.assertEqual(r.status_code, 405)
def test_bad_post(self):
for bad in [
self.post_dict(authtoken=None, username=self.valid_person.user.username, password=self.valid_password),
self.post_dict(authtoken=self.valid_token, username=None, password=self.valid_password),
self.post_dict(authtoken=self.valid_token, username=self.valid_person.user.username, password=None),
self.post_dict(authtoken=None, username=None, password=self.valid_password),
self.post_dict(authtoken=self.valid_token, username=None, password=None),
self.post_dict(authtoken=None, username=self.valid_person.user.username, password=None),
self.post_dict(authtoken=None, username=None, password=None),
]:
r = self.client.post(self.url, bad)
self.assertEqual(r.status_code, 200)
data = self.response_data(r)
self.assertEqual(data["result"], "failure")
self.assertEqual(data["reason"], "invalid post")
bad = dict(authtoken=self.valid_token, username=self.valid_person.user.username, password=self.valid_password)
r = self.client.post(self.url, bad)
self.assertEqual(r.status_code, 200)
data = self.response_data(r)
self.assertEqual(data["result"], "failure")
self.assertEqual(data["reason"], "invalid post")
def test_notokenstore(self):
self.assertFalse(hasattr(settings, "APP_API_TOKENS"))
r = self.client.post(self.url,self.valid_body_with_good_password)
self.assertEqual(r.status_code, 200)
data = self.response_data(r)
self.assertEqual(data["result"], "failure")
self.assertEqual(data["reason"], "invalid authtoken")
@override_settings(APP_API_TOKENS={"ietf.api.views.directauth":"nSZJDerbau6WZwbEAYuQ"})
def test_bad_username(self):
r = self.client.post(self.url, self.valid_body_with_unknown_user)
self.assertEqual(r.status_code, 200)
data = self.response_data(r)
self.assertEqual(data["result"], "failure")
self.assertEqual(data["reason"], "authentication failed")
@override_settings(APP_API_TOKENS={"ietf.api.views.directauth":"nSZJDerbau6WZwbEAYuQ"})
def test_bad_password(self):
r = self.client.post(self.url, self.valid_body_with_bad_password)
self.assertEqual(r.status_code, 200)
data = self.response_data(r)
self.assertEqual(data["result"], "failure")
self.assertEqual(data["reason"], "authentication failed")
@override_settings(APP_API_TOKENS={"ietf.api.views.directauth":"nSZJDerbau6WZwbEAYuQ"})
def test_good_password(self):
r = self.client.post(self.url, self.valid_body_with_good_password)
self.assertEqual(r.status_code, 200)
data = self.response_data(r)
self.assertEqual(data["result"], "success")
class TastypieApiTestCase(ResourceTestCaseMixin, TestCase):
def __init__(self, *args, **kwargs):

View file

@ -32,8 +32,6 @@ urlpatterns = [
url(r'^meeting/(?P<num>[A-Za-z0-9._+-]+)/agenda-data$', meeting_views.api_get_agenda_data),
# Meeting session materials
url(r'^meeting/session/(?P<session_id>[A-Za-z0-9._+-]+)/materials$', meeting_views.api_get_session_materials),
# Let Meetecho trigger recording imports
url(r'^notify/meeting/import_recordings/(?P<number>[a-z0-9-]+)/?$', meeting_views.api_import_recordings),
# Let MeetEcho upload bluesheets
url(r'^notify/meeting/bluesheet/?$', meeting_views.api_upload_bluesheet),
# Let MeetEcho tell us about session attendees
@ -60,6 +58,8 @@ urlpatterns = [
# latest versions
url(r'^rfcdiff-latest-json/%(name)s(?:-%(rev)s)?(\.txt|\.html)?/?$' % settings.URL_REGEXPS, api_views.rfcdiff_latest_json),
url(r'^rfcdiff-latest-json/(?P<name>[Rr][Ff][Cc] [0-9]+?)(\.txt|\.html)?/?$', api_views.rfcdiff_latest_json),
# direct authentication
url(r'^directauth/?$', api_views.directauth),
]
# Additional (standard) Tastypie endpoints

View file

@ -9,6 +9,7 @@ import re
from jwcrypto.jwk import JWK
from django.conf import settings
from django.contrib.auth import authenticate
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
@ -32,6 +33,7 @@ import ietf
from ietf.person.models import Person, Email
from ietf.api import _api_list
from ietf.api.serializer import JsonExportMixin
from ietf.api.ietf_utils import is_valid_token
from ietf.doc.utils import fuzzy_find_documents
from ietf.ietfauth.views import send_account_creation_email
from ietf.ietfauth.utils import role_required
@ -388,3 +390,42 @@ def rfcdiff_latest_json(request, name, rev=None):
if not response:
raise Http404
return HttpResponse(json.dumps(response), content_type='application/json')
@csrf_exempt
def directauth(request):
if request.method == "POST":
raw_data = request.POST.get("data", None)
if raw_data:
try:
data = json.loads(raw_data)
except json.decoder.JSONDecodeError:
data = None
if raw_data is None or data is None:
return HttpResponse(json.dumps(dict(result="failure",reason="invalid post")), content_type='application/json')
authtoken = data.get('authtoken', None)
username = data.get('username', None)
password = data.get('password', None)
if any([item is None for item in (authtoken, username, password)]):
return HttpResponse(json.dumps(dict(result="failure",reason="invalid post")), content_type='application/json')
if not is_valid_token("ietf.api.views.directauth", authtoken):
return HttpResponse(json.dumps(dict(result="failure",reason="invalid authtoken")), content_type='application/json')
user_query = User.objects.filter(username__iexact=username)
# Matching email would be consistent with auth everywhere else in the app, but until we can map users well
# in the imap server, people's annotations are associated with a very specific login.
# If we get a second user of this API, add an "allow_any_email" argument.
# Note well that we are using user.username, not what was passed to the API.
if user_query.count() == 1 and authenticate(username = user_query.first().username, password = password):
return HttpResponse(json.dumps(dict(result="success")), content_type='application/json')
return HttpResponse(json.dumps(dict(result="failure", reason="authentication failed")), content_type='application/json')
else:
return HttpResponse(status=405)

View file

@ -12,7 +12,7 @@ from ietf.doc.models import DocEvent
from ietf.meeting.models import Meeting, SessionPresentation
from ietf.person.models import Person
from ietf.secr.proceedings.proc_utils import is_powerpoint, post_process
from ietf.meeting.utils import is_powerpoint, post_process
class Command(BaseCommand):
help = ('Fix uploaded_filename and generate pdf from pptx')

View file

@ -0,0 +1,51 @@
# Generated by Django 2.2.28 on 2023-03-14 16:10
from typing import List, Tuple
from django.db import migrations, models
import ietf.iesg.models
class Migration(migrations.Migration):
initial = True
dependencies: List[Tuple[str]] = [
]
operations = [
migrations.CreateModel(
name='Telechat',
fields=[
('telechat_id', models.IntegerField(primary_key=True, serialize=False)),
('telechat_date', models.DateField(blank=True, null=True)),
('minute_approved', models.IntegerField(blank=True, null=True)),
('wg_news_txt', models.TextField(blank=True)),
('iab_news_txt', models.TextField(blank=True)),
('management_issue', models.TextField(blank=True)),
('frozen', models.IntegerField(blank=True, null=True)),
('mi_frozen', models.IntegerField(blank=True, null=True)),
],
options={
'db_table': 'telechat',
},
),
migrations.CreateModel(
name='TelechatAgendaItem',
fields=[
('id', models.AutoField(db_column='template_id', primary_key=True, serialize=False)),
('text', models.TextField(blank=True, db_column='template_text')),
('type', models.IntegerField(choices=[(1, 'Any Other Business (WG News, New Proposals, etc.)'), (2, 'IAB News'), (3, 'Management Item')], db_column='template_type', default=3)),
('title', models.CharField(db_column='template_title', max_length=255)),
],
),
migrations.CreateModel(
name='TelechatDate',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField(default=ietf.iesg.models.next_telechat_date)),
],
options={
'ordering': ['-date'],
},
),
]

View file

@ -0,0 +1,17 @@
# Generated by Django 2.2.28 on 2023-03-14 16:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('iesg', '0001_initial'),
]
operations = [
migrations.AddIndex(
model_name='telechatdate',
index=models.Index(fields=['-date'], name='iesg_telech_date_a0e0ed_idx'),
),
]

View file

@ -63,7 +63,7 @@ from ietf.iesg.models import TelechatDate
from ietf.iesg.utils import telechat_page_count
from ietf.ietfauth.utils import has_role, role_required, user_is_person
from ietf.person.models import Person
from ietf.secr.proceedings.proc_utils import get_activity_stats
from ietf.meeting.utils import get_activity_stats
from ietf.doc.utils_search import fill_in_document_table_attributes, fill_in_telechat_date
from ietf.utils.timezone import date_today, datetime_from_date

View file

@ -47,6 +47,7 @@ from ietf.meeting.models import Session, TimeSlot, Meeting, SchedTimeSessAssignm
from ietf.meeting.test_data import make_meeting_test_data, make_interim_meeting, make_interim_test_data
from ietf.meeting.utils import finalize, condition_slide_order
from ietf.meeting.utils import add_event_info_to_session_qs
from ietf.meeting.utils import create_recording, get_next_sequence
from ietf.meeting.views import session_draft_list, parse_agenda_filter_params, sessions_post_save, agenda_extract_schedule
from ietf.name.models import SessionStatusName, ImportantDateName, RoleName, ProceedingsMaterialTypeName
from ietf.utils.decorators import skip_coverage
@ -8100,3 +8101,20 @@ class ProceedingsTests(BaseMeetingTestCase):
pm = meeting.proceedings_materials.get(pk=pm.pk)
self.assertEqual(str(pm), 'This Is Not the Default Name')
self.assertEqual(pm.document.rev, orig_rev, 'Renaming should not change document revision')
def test_create_recording(self):
session = SessionFactory(meeting__type_id='ietf', meeting__number=72, group__acronym='mars')
filename = 'ietf42-testroomt-20000101-0800.mp3'
url = settings.IETF_AUDIO_URL + 'ietf{}/{}'.format(session.meeting.number, filename)
doc = create_recording(session, url)
self.assertEqual(doc.name,'recording-72-mars-1')
self.assertEqual(doc.group,session.group)
self.assertEqual(doc.external_url,url)
self.assertTrue(doc in session.materials.all())
def test_get_next_sequence(self):
session = SessionFactory(meeting__type_id='ietf', meeting__number=72, group__acronym='mars')
meeting = session.meeting
group = session.group
sequence = get_next_sequence(group,meeting,'recording')
self.assertEqual(sequence,1)

View file

@ -2,6 +2,7 @@
# -*- coding: utf-8 -*-
import datetime
import itertools
import os
import pytz
import requests
import subprocess
@ -19,13 +20,14 @@ from django.utils.encoding import smart_text
import debug # pyflakes:ignore
from ietf.dbtemplate.models import DBTemplate
from ietf.meeting.models import Session, SchedulingEvent, TimeSlot, Constraint, SchedTimeSessAssignment
from ietf.meeting.models import (Session, SchedulingEvent, TimeSlot,
Constraint, SchedTimeSessAssignment, SessionPresentation)
from ietf.doc.models import Document, DocAlias, State, NewRevisionDocEvent
from ietf.doc.models import DocEvent
from ietf.group.models import Group
from ietf.group.utils import can_manage_materials
from ietf.name.models import SessionStatusName, ConstraintName, DocTypeName
from ietf.person.models import Person
from ietf.secr.proceedings.proc_utils import import_audio_files
from ietf.utils.html import sanitize_document
from ietf.utils.log import log
from ietf.utils.timezone import date_today
@ -180,7 +182,6 @@ def finalize(meeting):
sp.rev = '00'
sp.save()
import_audio_files(meeting)
create_proceedings_templates(meeting)
meeting.proceedings_final = True
meeting.save()
@ -756,3 +757,156 @@ def write_doc_for_session(session, type_id, filename, contents):
with open(path / filename, "wb") as file:
file.write(contents.encode('utf-8'))
return
def create_recording(session, url, title=None, user=None):
'''
Creates the Document type=recording, setting external_url and creating
NewRevisionDocEvent
'''
sequence = get_next_sequence(session.group,session.meeting,'recording')
name = 'recording-{}-{}-{}'.format(session.meeting.number,session.group.acronym,sequence)
time = session.official_timeslotassignment().timeslot.time.strftime('%Y-%m-%d %H:%M')
if not title:
if url.endswith('mp3'):
title = 'Audio recording for {}'.format(time)
else:
title = 'Video recording for {}'.format(time)
doc = Document.objects.create(name=name,
title=title,
external_url=url,
group=session.group,
rev='00',
type_id='recording')
doc.set_state(State.objects.get(type='recording', slug='active'))
DocAlias.objects.create(name=doc.name).docs.add(doc)
# create DocEvent
NewRevisionDocEvent.objects.create(type='new_revision',
by=user or Person.objects.get(name='(System)'),
doc=doc,
rev=doc.rev,
desc='New revision available',
time=doc.time)
pres = SessionPresentation.objects.create(session=session,document=doc,rev=doc.rev)
session.sessionpresentation_set.add(pres)
return doc
def get_next_sequence(group, meeting, type):
'''
Returns the next sequence number to use for a document of type = type.
Takes a group=Group object, meeting=Meeting object, type = string
'''
aliases = DocAlias.objects.filter(name__startswith='{}-{}-{}-'.format(type, meeting.number, group.acronym))
if not aliases:
return 1
aliases = aliases.order_by('name')
sequence = int(aliases.last().name.split('-')[-1]) + 1
return sequence
def get_activity_stats(sdate, edate):
'''
This function takes a date range and produces a dictionary of statistics / objects for
use in an activity report. Generally the end date will be the date of the last meeting
and the start date will be the date of the meeting before that.
Data between midnight UTC on the specified dates are included in the stats.
'''
sdatetime = pytz.utc.localize(datetime.datetime.combine(sdate, datetime.time()))
edatetime = pytz.utc.localize(datetime.datetime.combine(edate, datetime.time()))
data = {}
data['sdate'] = sdate
data['edate'] = edate
events = DocEvent.objects.filter(doc__type='draft', time__gte=sdatetime, time__lt=edatetime)
data['actions_count'] = events.filter(type='iesg_approved').count()
data['last_calls_count'] = events.filter(type='sent_last_call').count()
new_draft_events = events.filter(newrevisiondocevent__rev='00')
new_drafts = list(set([e.doc_id for e in new_draft_events]))
data['new_docs'] = list(set([e.doc for e in new_draft_events]))
data['new_drafts_count'] = len(new_drafts)
data['new_drafts_updated_count'] = events.filter(doc__id__in=new_drafts,newrevisiondocevent__rev='01').count()
data['new_drafts_updated_more_count'] = events.filter(doc__id__in=new_drafts,newrevisiondocevent__rev='02').count()
update_events = events.filter(type='new_revision').exclude(doc__id__in=new_drafts)
data['updated_drafts_count'] = len(set([e.doc_id for e in update_events]))
# Calculate Final Four Weeks stats (ffw)
ffwdate = edatetime - datetime.timedelta(days=28)
ffw_new_count = events.filter(time__gte=ffwdate, newrevisiondocevent__rev='00').count()
try:
ffw_new_percent = format(ffw_new_count / float(data['new_drafts_count']), '.0%')
except ZeroDivisionError:
ffw_new_percent = 0
data['ffw_new_count'] = ffw_new_count
data['ffw_new_percent'] = ffw_new_percent
ffw_update_events = events.filter(time__gte=ffwdate, type='new_revision').exclude(doc__id__in=new_drafts)
ffw_update_count = len(set([e.doc_id for e in ffw_update_events]))
try:
ffw_update_percent = format(ffw_update_count / float(data['updated_drafts_count']),'.0%')
except ZeroDivisionError:
ffw_update_percent = 0
data['ffw_update_count'] = ffw_update_count
data['ffw_update_percent'] = ffw_update_percent
rfcs = events.filter(type='published_rfc')
data['rfcs'] = rfcs.select_related('doc').select_related('doc__group').select_related('doc__intended_std_level')
data['counts'] = {'std': rfcs.filter(doc__intended_std_level__in=('ps', 'ds', 'std')).count(),
'bcp': rfcs.filter(doc__intended_std_level='bcp').count(),
'exp': rfcs.filter(doc__intended_std_level='exp').count(),
'inf': rfcs.filter(doc__intended_std_level='inf').count()}
data['new_groups'] = Group.objects.filter(
type='wg',
groupevent__changestategroupevent__state='active',
groupevent__time__gte=sdatetime,
groupevent__time__lt=edatetime)
data['concluded_groups'] = Group.objects.filter(
type='wg',
groupevent__changestategroupevent__state='conclude',
groupevent__time__gte=sdatetime,
groupevent__time__lt=edatetime)
return data
def is_powerpoint(doc):
'''
Returns true if document is a Powerpoint presentation
'''
return doc.file_extension() in ('ppt', 'pptx')
def post_process(doc):
'''
Does post processing on uploaded file.
- Convert PPT to PDF
'''
if is_powerpoint(doc) and hasattr(settings, 'SECR_PPT2PDF_COMMAND'):
try:
cmd = list(settings.SECR_PPT2PDF_COMMAND) # Don't operate on the list actually in settings
cmd.append(doc.get_file_path()) # outdir
cmd.append(os.path.join(doc.get_file_path(), doc.uploaded_filename)) # filename
subprocess.check_call(cmd)
except (subprocess.CalledProcessError, OSError) as error:
log("Error converting PPT: %s" % (error))
return
# change extension
base, ext = os.path.splitext(doc.uploaded_filename)
doc.uploaded_filename = base + '.pdf'
e = DocEvent.objects.create(
type='changed_document',
by=Person.objects.get(name="(System)"),
doc=doc,
rev=doc.rev,
desc='Converted document to PDF',
)
doc.save_with_history([e])

View file

@ -82,10 +82,9 @@ from ietf.meeting.utils import diff_meeting_schedules, prefetch_schedule_diff_ob
from ietf.meeting.utils import swap_meeting_schedule_timeslot_assignments, bulk_create_timeslots
from ietf.meeting.utils import preprocess_meeting_important_dates
from ietf.meeting.utils import new_doc_for_session, write_doc_for_session
from ietf.meeting.utils import get_activity_stats, post_process, create_recording
from ietf.message.utils import infer_message
from ietf.name.models import SlideSubmissionStatusName, ProceedingsMaterialTypeName, SessionPurposeName
from ietf.secr.proceedings.proc_utils import (get_activity_stats, post_process, import_audio_files,
create_recording)
from ietf.utils import markdown
from ietf.utils.decorators import require_api_key
from ietf.utils.hedgedoc import Note, NoteError
@ -3798,16 +3797,6 @@ class OldUploadRedirect(RedirectView):
def get_redirect_url(self, **kwargs):
return reverse_lazy('ietf.meeting.views.session_details',kwargs=self.kwargs)
@csrf_exempt
def api_import_recordings(request, number):
'''REST API to check for recording files and import'''
if request.method == 'POST':
meeting = get_meeting(number)
import_audio_files(meeting)
return HttpResponse(status=201)
else:
return HttpResponse(status=405)
@require_api_key
@role_required('Recording Manager')
@csrf_exempt

View file

@ -1,117 +0,0 @@
import re
from django import forms
from django.db.models import Count
from ietf.group.models import Group, Role
from ietf.name.models import GroupStateName, GroupTypeName, RoleName
from ietf.person.models import Person, Email
# ---------------------------------------------
# Select Choices
# ---------------------------------------------
SEARCH_MEETING_CHOICES = (('',''),('NO','NO'),('YES','YES'))
# ---------------------------------------------
# Functions
# ---------------------------------------------
def get_person(name):
'''
This function takes a string which is in the name autocomplete format "name - (id)"
and returns a person object
'''
match = re.search(r'\((\d+)\)', name)
if not match:
return None
id = match.group(1)
try:
person = Person.objects.get(id=id)
except (Person.ObjectDoesNoExist, Person.MultipleObjectsReturned):
return None
return person
def get_parent_group_choices():
area_choices = [(g.id, g.name) for g in Group.objects.filter(type='area',state='active')]
other_parents = Group.objects.annotate(children=Count('group')).filter(children__gt=0).order_by('name').exclude(type='area')
other_choices = [(g.id, g.name) for g in other_parents]
choices = (('Working Group Areas',area_choices),('Other',other_choices))
return choices
# ---------------------------------------------
# Forms
# ---------------------------------------------
class DescriptionForm (forms.Form):
description = forms.CharField(widget=forms.Textarea(attrs={'rows':'20'}),required=True, strip=False)
class RoleForm(forms.Form):
name = forms.ModelChoiceField(RoleName.objects.filter(slug__in=('chair','editor','secr','techadv')),empty_label=None)
person = forms.CharField(max_length=50,widget=forms.TextInput(attrs={'class':'name-autocomplete'}),help_text="To see a list of people type the first name, or last name, or both.")
email = forms.CharField(widget=forms.Select(),help_text="Select an email")
group_acronym = forms.CharField(widget=forms.HiddenInput(),required=False)
def __init__(self, *args, **kwargs):
self.group = kwargs.pop('group')
super(RoleForm, self).__init__(*args,**kwargs)
# this form is re-used in roles app, use different roles in select
if self.group.features.custom_group_roles:
self.fields['name'].queryset = RoleName.objects.all()
# check for id within parenthesis to ensure name was selected from the list
def clean_person(self):
person = self.cleaned_data.get('person', '')
m = re.search(r'(\d+)', person)
if person and not m:
raise forms.ValidationError("You must select an entry from the list!")
# return person object
return get_person(person)
# check that email exists and return the Email object
def clean_email(self):
email = self.cleaned_data['email']
try:
obj = Email.objects.get(address=email)
except Email.ObjectDoesNoExist:
raise forms.ValidationError("Email address not found!")
# return email object
return obj
def clean(self):
# here we abort if there are any errors with individual fields
# One predictable problem is that the user types a name rather then
# selecting one from the list, as instructed to do. We need to abort
# so the error is displayed before trying to call get_person()
if any(self.errors):
# Don't bother validating the formset unless each form is valid on its own
return
super(RoleForm, self).clean()
cleaned_data = self.cleaned_data
person = cleaned_data['person']
email = cleaned_data['email']
name = cleaned_data['name']
group_acronym = cleaned_data['group_acronym']
if email.person != person:
raise forms.ValidationError('ERROR: The person associated with the chosen email address is different from the chosen person')
if Role.objects.filter(name=name,group=self.group,person=person,email=email):
raise forms.ValidationError('ERROR: This is a duplicate entry')
if not group_acronym:
raise forms.ValidationError('You must select a group.')
return cleaned_data
class SearchForm(forms.Form):
group_acronym = forms.CharField(max_length=12,required=False)
group_name = forms.CharField(max_length=80,required=False)
primary_area = forms.ModelChoiceField(queryset=Group.objects.filter(type='area',state='active'),required=False)
type = forms.ModelChoiceField(queryset=GroupTypeName.objects.all(),required=False)
meeting_scheduled = forms.CharField(widget=forms.Select(choices=SEARCH_MEETING_CHOICES),required=False)
state = forms.ModelChoiceField(queryset=GroupStateName.objects.exclude(slug__in=('dormant','unknown')),required=False)

View file

@ -1,69 +0,0 @@
# Copyright The IETF Trust 2013-2020, All Rights Reserved
# -*- coding: utf-8 -*-
from django.urls import reverse
from ietf.utils.test_utils import TestCase
from ietf.group.models import Group
from ietf.secr.groups.forms import get_parent_group_choices
from ietf.group.factories import GroupFactory, RoleFactory
from ietf.meeting.factories import MeetingFactory
from ietf.person.factories import PersonFactory
import debug # pyflakes:ignore
class GroupsTest(TestCase):
def test_get_parent_group_choices(self):
GroupFactory(type_id='area')
choices = get_parent_group_choices()
area = Group.objects.filter(type='area',state='active').first()
# This is opaque. Can it be rewritten to be more self-documenting?
self.assertEqual(choices[0][1][0][0],area.id)
# ------- Test Search -------- #
def test_search(self):
"Test Search"
MeetingFactory(type_id='ietf')
group = GroupFactory()
url = reverse('ietf.secr.groups.views.search')
post_data = {'group_acronym':group.acronym,'submit':'Search'}
self.client.login(username="secretary", password="secretary+password")
response = self.client.post(url,post_data,follow=True)
self.assertContains(response, group.acronym)
# ------- Test View -------- #
def test_view(self):
MeetingFactory(type_id='ietf')
group = GroupFactory()
url = reverse('ietf.secr.groups.views.view', kwargs={'acronym':group.acronym})
self.client.login(username="secretary", password="secretary+password")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# ------- Test People -------- #
def test_people_delete(self):
role = RoleFactory(name_id='member')
group = role.group
id = role.id
url = reverse('ietf.secr.groups.views.delete_role', kwargs={'acronym':group.acronym,'id':role.id})
target = reverse('ietf.secr.groups.views.people', kwargs={'acronym':group.acronym})
self.client.login(username="secretary", password="secretary+password")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
response = self.client.post(url, {'post':'yes'})
self.assertRedirects(response, target)
self.assertFalse(group.role_set.filter(id=id))
def test_people_add(self):
person = PersonFactory()
group = GroupFactory()
url = reverse('ietf.secr.groups.views.people', kwargs={'acronym':group.acronym})
post_data = {'group_acronym':group.acronym,
'name':'chair',
'person':'Joe Smith - (%s)' % person.id,
'email':person.email_set.all()[0].address,
'submit':'Add'}
self.client.login(username="secretary", password="secretary+password")
response = self.client.post(url,post_data,follow=True)
self.assertRedirects(response, url)
self.assertContains(response, 'added successfully')

View file

@ -1,14 +0,0 @@
from django.conf import settings
from ietf.secr.groups import views
from ietf.utils.urls import url
urlpatterns = [
url(r'^$', views.search),
url(r'^blue-dot-report/$', views.blue_dot),
#(r'^ajax/get_ads/$', views.get_ads),
url(r'^%(acronym)s/$' % settings.URL_REGEXPS, views.view),
url(r'^%(acronym)s/delete/(?P<id>\d{1,6})/$' % settings.URL_REGEXPS, views.delete_role),
url(r'^%(acronym)s/charter/$' % settings.URL_REGEXPS, views.charter),
url(r'^%(acronym)s/people/$' % settings.URL_REGEXPS, views.people),
]

View file

@ -1,301 +0,0 @@
from django.contrib import messages
from django.conf import settings
from django.shortcuts import render, get_object_or_404, redirect
from ietf.group.models import Group, GroupEvent, Role
from ietf.group.utils import save_group_in_history, get_charter_text
from ietf.ietfauth.utils import role_required
from ietf.person.models import Person
from ietf.secr.groups.forms import RoleForm, SearchForm
from ietf.secr.utils.meeting import get_current_meeting
from ietf.liaisons.views import contacts_from_roles
# -------------------------------------------------
# Helper Functions
# -------------------------------------------------
def add_legacy_fields(group):
'''
This function takes a Group object as input and adds legacy attributes:
start_date,proposed_date,concluded_date,meeting_scheduled
'''
# it's possible there could be multiple records of a certain type in which case
# we just return the latest record
query = GroupEvent.objects.filter(group=group, type="changed_state").order_by('time')
proposed = query.filter(changestategroupevent__state="proposed")
meeting = get_current_meeting()
if proposed:
group.proposed_date = proposed[0].time
active = query.filter(changestategroupevent__state="active")
if active:
group.start_date = active[0].time
concluded = query.filter(changestategroupevent__state="conclude")
if concluded:
group.concluded_date = concluded[0].time
if group.session_set.filter(meeting__number=meeting.number):
group.meeting_scheduled = 'YES'
else:
group.meeting_scheduled = 'NO'
group.chairs = group.role_set.filter(name="chair")
group.techadvisors = group.role_set.filter(name="techadv")
group.editors = group.role_set.filter(name="editor")
group.secretaries = group.role_set.filter(name="secr")
# Note: liaison_contacts is now a dict instead of a model instance with fields. In
# templates, the dict can still be accessed using '.contacts' and .cc_contacts', though.
group.liaison_contacts = dict(
contacts=contacts_from_roles(group.role_set.filter(name='liaison_contact')),
cc_contacts=contacts_from_roles(group.role_set.filter(name='liaison_cc_contact')),
)
#fill_in_charter_info(group)
#--------------------------------------------------
# AJAX Functions
# -------------------------------------------------
'''
def get_ads(request):
""" AJAX function which takes a URL parameter, "area" and returns the area directors
in the form of a list of dictionaries with "id" and "value" keys(in json format).
Used to populate select options.
"""
results=[]
area = request.GET.get('area','')
qs = AreaDirector.objects.filter(area=area)
for item in qs:
d = {'id': item.id, 'value': item.person.first_name + ' ' + item.person.last_name}
results.append(d)
return HttpResponse(json.dumps(results), content_type='application/javascript')
'''
# -------------------------------------------------
# Standard View Functions
# -------------------------------------------------
@role_required('Secretariat')
def blue_dot(request):
'''
This is a report view. It returns a text/plain listing of working group chairs.
'''
people = Person.objects.filter(role__name__slug='chair',
role__group__type='wg',
role__group__state__slug__in=('active','bof','proposed')).distinct()
chairs = []
for person in people:
parts = person.name_parts()
groups = [ r.group.acronym for r in person.role_set.filter(name__slug='chair',
group__type='wg',
group__state__slug__in=('active','bof','proposed')) ]
entry = {'name':'%s, %s' % (parts[3], parts[1]),
'groups': ', '.join(groups)}
chairs.append(entry)
# sort the list
sorted_chairs = sorted(chairs, key = lambda a: a['name'])
return render(request, 'groups/blue_dot_report.txt', { 'chairs':sorted_chairs },
content_type="text/plain; charset=%s"%settings.DEFAULT_CHARSET,
)
@role_required('Secretariat')
def charter(request, acronym):
"""
View Group Charter
**Templates:**
* ``groups/charter.html``
**Template Variables:**
* group, charter_text
"""
group = get_object_or_404(Group, acronym=acronym)
# TODO: get_charter_text() should be updated to return None
if group.charter:
charter_text = get_charter_text(group)
else:
charter_text = ''
return render(request, 'groups/charter.html', {
'group': group,
'charter_text': charter_text},
)
@role_required('Secretariat')
def delete_role(request, acronym, id):
"""
Handle deleting roles for groups (chair, editor, advisor, secretary)
**Templates:**
* none
Redirects to people page on success.
"""
group = get_object_or_404(Group, acronym=acronym)
role = get_object_or_404(Role, id=id)
if request.method == 'POST' and request.POST['post'] == 'yes':
# save group
save_group_in_history(group)
role.delete()
messages.success(request, 'The entry was deleted successfully')
return redirect('ietf.secr.groups.views.people', acronym=acronym)
return render(request, 'confirm_delete.html', {'object': role})
@role_required('Secretariat')
def people(request, acronym):
"""
Edit Group Roles (Chairs, Secretary, etc)
**Templates:**
* ``groups/people.html``
**Template Variables:**
* form, group
"""
group = get_object_or_404(Group, acronym=acronym)
if request.method == 'POST':
# we need to pass group for form validation
form = RoleForm(request.POST,group=group)
if form.is_valid():
name = form.cleaned_data['name']
person = form.cleaned_data['person']
email = form.cleaned_data['email']
# save group
save_group_in_history(group)
Role.objects.create(name=name,
person=person,
email=email,
group=group)
if not email.origin or email.origin == person.user.username:
email.origin = "role: %s %s" % (group.acronym, name.slug)
email.save()
messages.success(request, 'New %s added successfully!' % name)
return redirect('ietf.secr.groups.views.people', acronym=group.acronym)
else:
form = RoleForm(initial={'name':'chair', 'group_acronym':group.acronym}, group=group)
return render(request, 'groups/people.html', {
'form':form,
'group':group},
)
@role_required('Secretariat')
def search(request):
"""
Search IETF Groups
**Templates:**
* ``groups/search.html``
**Template Variables:**
* form, results
"""
results = []
if request.method == 'POST':
form = SearchForm(request.POST)
if form.is_valid():
kwargs = {}
group_acronym = form.cleaned_data['group_acronym']
group_name = form.cleaned_data['group_name']
primary_area = form.cleaned_data['primary_area']
meeting_scheduled = form.cleaned_data['meeting_scheduled']
state = form.cleaned_data['state']
type = form.cleaned_data['type']
meeting = get_current_meeting()
# construct search query
if group_acronym:
kwargs['acronym__istartswith'] = group_acronym
if group_name:
kwargs['name__istartswith'] = group_name
if primary_area:
kwargs['parent'] = primary_area
if state:
kwargs['state'] = state
if type:
kwargs['type'] = type
#else:
# kwargs['type__in'] = ('wg','rg','ietf','ag','sdo','team')
if meeting_scheduled == 'YES':
kwargs['session__meeting__number'] = meeting.number
# perform query
if kwargs:
if meeting_scheduled == 'NO':
qs = Group.objects.filter(**kwargs).exclude(session__meeting__number=meeting.number).distinct()
else:
qs = Group.objects.filter(**kwargs).distinct()
else:
qs = Group.objects.all()
results = qs.order_by('acronym')
# if there's just one result go straight to view
if len(results) == 1:
return redirect('ietf.secr.groups.views.view', acronym=results[0].acronym)
# process GET argument to support link from area app
elif 'primary_area' in request.GET:
area = request.GET.get('primary_area','')
results = Group.objects.filter(parent__id=area,type='wg',state__in=('bof','active','proposed')).order_by('name')
form = SearchForm({'primary_area':area,'state':'','type':'wg'})
else:
form = SearchForm(initial={'state':'active'})
# loop through results and tack on meeting_scheduled because it is no longer an
# attribute of the meeting model
for result in results:
add_legacy_fields(result)
return render(request, 'groups/search.html', {
'results': results,
'form': form},
)
@role_required('Secretariat')
def view(request, acronym):
"""
View IETF Group details
**Templates:**
* ``groups/view.html``
**Template Variables:**
* group
"""
group = get_object_or_404(Group, acronym=acronym)
add_legacy_fields(group)
return render(request, 'groups/view.html', { 'group': group } )

View file

@ -1,44 +0,0 @@
# Copyright The IETF Trust 2007-2019, All Rights Reserved
from django import forms
from ietf.doc.models import Document
from ietf.meeting.models import Session
from ietf.meeting.utils import add_event_info_to_session_qs
# ---------------------------------------------
# Globals
# ---------------------------------------------
VALID_SLIDE_EXTENSIONS = ('.doc','.docx','.pdf','.ppt','.pptx','.txt','.zip')
VALID_MINUTES_EXTENSIONS = ('.txt','.html','.htm','.pdf')
VALID_AGENDA_EXTENSIONS = ('.txt','.html','.htm')
VALID_BLUESHEET_EXTENSIONS = ('.pdf','.jpg','.jpeg')
#----------------------------------------------------------
# Forms
#----------------------------------------------------------
class RecordingForm(forms.Form):
external_url = forms.URLField(label='Url')
session = forms.ModelChoiceField(queryset=Session.objects)
session.widget.attrs['class'] = "select2-field"
session.widget.attrs['data-minimum-input-length'] = 0
def __init__(self, *args, **kwargs):
self.meeting = kwargs.pop('meeting')
super(RecordingForm, self).__init__(*args,**kwargs)
self.fields['session'].queryset = add_event_info_to_session_qs(
Session.objects.filter(meeting=self.meeting, type__in=['regular','plenary','other'])
).filter(current_status='sched').order_by('group__acronym')
class RecordingEditForm(forms.ModelForm):
class Meta:
model = Document
fields = ['external_url']
def __init__(self, *args, **kwargs):
super(RecordingEditForm, self).__init__(*args, **kwargs)
self.fields['external_url'].label='Url'

View file

@ -1,28 +0,0 @@
# Copyright The IETF Trust 2018-2020, All Rights Reserved
# -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-02-20 10:52
from django.db import migrations
class Migration(migrations.Migration):
initial = True
dependencies = [
('meeting', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='InterimMeeting',
fields=[
],
options={
'proxy': True,
'indexes': [],
},
bases=('meeting.meeting',),
),
]

View file

@ -1,62 +0,0 @@
# Copyright The IETF Trust 2013-2020, All Rights Reserved
# -*- coding: utf-8 -*-
import os
from django.conf import settings
from django.db import models
from ietf.meeting.models import Meeting
class InterimManager(models.Manager):
'''A custom manager to limit objects to type=interim'''
def get_queryset(self):
return super(InterimManager, self).get_queryset().filter(type='interim')
class InterimMeeting(Meeting):
'''
This class is a proxy of Meeting. It's purpose is to provide extra methods that are
useful for an interim meeting, to help in templates. Most information is derived from
the session associated with this meeting. We are assuming there is only one.
'''
class Meta:
proxy = True
objects = InterimManager()
def group(self):
return self.session_set.all()[0].group
def agenda(self): # pylint: disable=method-hidden
session = self.session_set.all()[0]
agendas = session.materials.exclude(states__slug='deleted').filter(type='agenda')
if agendas:
return agendas[0]
else:
return None
def minutes(self):
session = self.session_set.all()[0]
minutes = session.materials.exclude(states__slug='deleted').filter(type='minutes')
if minutes:
return minutes[0]
else:
return None
def get_proceedings_path(self, group=None):
return os.path.join(self.get_materials_path(),'proceedings.html')
def get_proceedings_url(self, group=None):
'''
If the proceedings file doesn't exist return empty string. For use in templates.
'''
if os.path.exists(self.get_proceedings_path()):
url = "%sproceedings/%s/proceedings.html" % (
settings.IETF_HOST_URL,
self.number)
return url
else:
return ''

View file

@ -1,50 +0,0 @@
CREATE TABLE `interim_slides` (
`id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
`meeting_num` integer NOT NULL,
`group_acronym_id` integer,
`slide_num` integer,
`slide_type_id` integer NOT NULL,
`slide_name` varchar(255) NOT NULL,
`irtf` integer NOT NULL,
`interim` bool NOT NULL,
`order_num` integer,
`in_q` integer
)
;
CREATE TABLE `interim_minutes` (
`id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
`meeting_num` integer NOT NULL,
`group_acronym_id` integer NOT NULL,
`filename` varchar(255) NOT NULL,
`irtf` integer NOT NULL,
`interim` bool NOT NULL
)
;
CREATE TABLE `interim_agenda` (
`id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY,
`meeting_num` integer NOT NULL,
`group_acronym_id` integer NOT NULL,
`filename` varchar(255) NOT NULL,
`irtf` integer NOT NULL,
`interim` bool NOT NULL
)
;
CREATE TABLE `interim_meetings` (
`meeting_num` integer NOT NULL PRIMARY KEY AUTO_INCREMENT,
`start_date` date ,
`end_date` date ,
`city` varchar(255) ,
`state` varchar(255) ,
`country` varchar(255) ,
`time_zone` integer,
`ack` longtext ,
`agenda_html` longtext ,
`agenda_text` longtext ,
`future_meeting` longtext ,
`overview1` longtext ,
`overview2` longtext ,
`group_acronym_id` integer
)
;
alter table interim_meetings auto_increment=201;

View file

@ -1,305 +0,0 @@
# Copyright The IETF Trust 2013-2020, All Rights Reserved
# -*- coding: utf-8 -*-
'''
proc_utils.py
This module contains all the functions for generating static proceedings pages
'''
import datetime
import os
import pytz
import re
import subprocess
from urllib.parse import urlencode
import debug # pyflakes:ignore
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from ietf.doc.models import Document, DocAlias, DocEvent, NewRevisionDocEvent, State
from ietf.group.models import Group
from ietf.meeting.models import Meeting, SessionPresentation, TimeSlot, SchedTimeSessAssignment, Session
from ietf.person.models import Person
from ietf.utils.log import log
from ietf.utils.mail import send_mail
from ietf.utils.timezone import make_aware
AUDIO_FILE_RE = re.compile(r'ietf(?P<number>[\d]+)-(?P<room>.*)-(?P<time>[\d]{8}-[\d]{4})')
VIDEO_TITLE_RE = re.compile(r'IETF(?P<number>[\d]+)-(?P<name>.*)-(?P<date>\d{8})-(?P<time>\d{4})')
def _get_session(number,name,date,time):
'''Lookup session using data from video title'''
meeting = Meeting.objects.get(number=number)
timeslot_time = make_aware(datetime.datetime.strptime(date + time,'%Y%m%d%H%M'), meeting.tz())
try:
assignment = SchedTimeSessAssignment.objects.get(
schedule__in = [meeting.schedule, meeting.schedule.base],
session__group__acronym = name.lower(),
timeslot__time = timeslot_time,
)
except (SchedTimeSessAssignment.DoesNotExist, SchedTimeSessAssignment.MultipleObjectsReturned):
return None
return assignment.session
def _get_urls_from_json(doc):
'''Returns list of dictionary title,url from search results'''
urls = []
for item in doc['items']:
title = item['snippet']['title']
#params = dict(v=item['snippet']['resourceId']['videoId'], list=item['snippet']['playlistId'])
params = [('v',item['snippet']['resourceId']['videoId']), ('list',item['snippet']['playlistId'])]
url = settings.YOUTUBE_BASE_URL + '?' + urlencode(params)
urls.append(dict(title=title, url=url))
return urls
def import_audio_files(meeting):
'''
Checks for audio files and creates corresponding materials (docs) for the Session
Expects audio files in the format ietf[meeting num]-[room]-YYYMMDD-HHMM.*,
Example: ietf90-salonb-20140721-1710.mp3
'''
unmatched_files = []
path = os.path.join(settings.MEETING_RECORDINGS_DIR, meeting.type.slug + meeting.number)
if not os.path.exists(path):
return None
for filename in os.listdir(path):
timeslot = get_timeslot_for_filename(filename)
if timeslot:
sessions = Session.objects.with_current_status().filter(
timeslotassignments__schedule=timeslot.meeting.schedule_id,
).filter(
current_status='sched',
).order_by('timeslotassignments__timeslot__time')
if not sessions:
continue
url = settings.IETF_AUDIO_URL + 'ietf{}/{}'.format(meeting.number, filename)
doc = get_or_create_recording_document(url, sessions[0])
attach_recording(doc, sessions)
else:
# use for reconciliation email
unmatched_files.append(filename)
if unmatched_files:
send_audio_import_warning(unmatched_files)
def get_timeslot_for_filename(filename):
'''Returns a timeslot matching the filename given.
NOTE: currently only works with ietfNN prefix (regular meetings)
'''
from ietf.meeting.utils import add_event_info_to_session_qs
basename, _ = os.path.splitext(filename)
match = AUDIO_FILE_RE.match(basename)
if match:
try:
meeting = Meeting.objects.get(number=match.groupdict()['number'])
room_mapping = {normalize_room_name(room.name): room.name for room in meeting.room_set.all()}
time = make_aware(datetime.datetime.strptime(match.groupdict()['time'],'%Y%m%d-%H%M'), meeting.tz())
slots = TimeSlot.objects.filter(
meeting=meeting,
location__name=room_mapping[match.groupdict()['room']],
time=time,
sessionassignments__schedule__in=[meeting.schedule, meeting.schedule.base if meeting.schedule else None],
).distinct()
uncancelled_slots = [t for t in slots if not add_event_info_to_session_qs(t.sessions.all()).filter(current_status='canceled').exists()]
return uncancelled_slots[0]
except (ObjectDoesNotExist, KeyError, IndexError):
return None
def attach_recording(doc, sessions):
'''Associate recording document with sessions'''
for session in sessions:
if doc not in session.materials.all():
# add document to session
presentation = SessionPresentation.objects.create(
session=session,
document=doc,
rev=doc.rev)
session.sessionpresentation_set.add(presentation)
if not doc.docalias.filter(name__startswith='recording-{}-{}'.format(session.meeting.number,session.group.acronym)):
sequence = get_next_sequence(session.group,session.meeting,'recording')
name = 'recording-{}-{}-{}'.format(session.meeting.number,session.group.acronym,sequence)
DocAlias.objects.create(name=name).docs.add(doc)
def normalize_room_name(name):
'''Returns room name converted to be used as portion of filename'''
return name.lower().replace(' ','').replace('/','_')
def get_or_create_recording_document(url,session):
try:
return Document.objects.get(external_url=url)
except ObjectDoesNotExist:
return create_recording(session,url)
def create_recording(session, url, title=None, user=None):
'''
Creates the Document type=recording, setting external_url and creating
NewRevisionDocEvent
'''
sequence = get_next_sequence(session.group,session.meeting,'recording')
name = 'recording-{}-{}-{}'.format(session.meeting.number,session.group.acronym,sequence)
time = session.official_timeslotassignment().timeslot.time.strftime('%Y-%m-%d %H:%M')
if not title:
if url.endswith('mp3'):
title = 'Audio recording for {}'.format(time)
else:
title = 'Video recording for {}'.format(time)
doc = Document.objects.create(name=name,
title=title,
external_url=url,
group=session.group,
rev='00',
type_id='recording')
doc.set_state(State.objects.get(type='recording', slug='active'))
DocAlias.objects.create(name=doc.name).docs.add(doc)
# create DocEvent
NewRevisionDocEvent.objects.create(type='new_revision',
by=user or Person.objects.get(name='(System)'),
doc=doc,
rev=doc.rev,
desc='New revision available',
time=doc.time)
pres = SessionPresentation.objects.create(session=session,document=doc,rev=doc.rev)
session.sessionpresentation_set.add(pres)
return doc
def get_next_sequence(group,meeting,type):
'''
Returns the next sequence number to use for a document of type = type.
Takes a group=Group object, meeting=Meeting object, type = string
'''
aliases = DocAlias.objects.filter(name__startswith='{}-{}-{}-'.format(type,meeting.number,group.acronym))
if not aliases:
return 1
aliases = aliases.order_by('name')
sequence = int(aliases.last().name.split('-')[-1]) + 1
return sequence
def send_audio_import_warning(unmatched_files):
'''Send email to interested parties that some audio files weren't matched to timeslots'''
send_mail(request = None,
to = settings.AUDIO_IMPORT_EMAIL,
frm = "IETF Secretariat <ietf-secretariat@ietf.org>",
subject = "Audio file import warning",
template = "proceedings/audio_import_warning.txt",
context = dict(unmatched_files=unmatched_files),
extra = {})
# -------------------------------------------------
# End Recording Functions
# -------------------------------------------------
def get_activity_stats(sdate, edate):
'''
This function takes a date range and produces a dictionary of statistics / objects for
use in an activity report. Generally the end date will be the date of the last meeting
and the start date will be the date of the meeting before that.
Data between midnight UTC on the specified dates are included in the stats.
'''
sdatetime = pytz.utc.localize(datetime.datetime.combine(sdate, datetime.time()))
edatetime = pytz.utc.localize(datetime.datetime.combine(edate, datetime.time()))
data = {}
data['sdate'] = sdate
data['edate'] = edate
events = DocEvent.objects.filter(doc__type='draft', time__gte=sdatetime, time__lt=edatetime)
data['actions_count'] = events.filter(type='iesg_approved').count()
data['last_calls_count'] = events.filter(type='sent_last_call').count()
new_draft_events = events.filter(newrevisiondocevent__rev='00')
new_drafts = list(set([ e.doc_id for e in new_draft_events ]))
data['new_docs'] = list(set([ e.doc for e in new_draft_events ]))
data['new_drafts_count'] = len(new_drafts)
data['new_drafts_updated_count'] = events.filter(doc__id__in=new_drafts,newrevisiondocevent__rev='01').count()
data['new_drafts_updated_more_count'] = events.filter(doc__id__in=new_drafts,newrevisiondocevent__rev='02').count()
update_events = events.filter(type='new_revision').exclude(doc__id__in=new_drafts)
data['updated_drafts_count'] = len(set([ e.doc_id for e in update_events ]))
# Calculate Final Four Weeks stats (ffw)
ffwdate = edatetime - datetime.timedelta(days=28)
ffw_new_count = events.filter(time__gte=ffwdate,newrevisiondocevent__rev='00').count()
try:
ffw_new_percent = format(ffw_new_count / float(data['new_drafts_count']),'.0%')
except ZeroDivisionError:
ffw_new_percent = 0
data['ffw_new_count'] = ffw_new_count
data['ffw_new_percent'] = ffw_new_percent
ffw_update_events = events.filter(time__gte=ffwdate,type='new_revision').exclude(doc__id__in=new_drafts)
ffw_update_count = len(set([ e.doc_id for e in ffw_update_events ]))
try:
ffw_update_percent = format(ffw_update_count / float(data['updated_drafts_count']),'.0%')
except ZeroDivisionError:
ffw_update_percent = 0
data['ffw_update_count'] = ffw_update_count
data['ffw_update_percent'] = ffw_update_percent
rfcs = events.filter(type='published_rfc')
data['rfcs'] = rfcs.select_related('doc').select_related('doc__group').select_related('doc__intended_std_level')
data['counts'] = {'std':rfcs.filter(doc__intended_std_level__in=('ps','ds','std')).count(),
'bcp':rfcs.filter(doc__intended_std_level='bcp').count(),
'exp':rfcs.filter(doc__intended_std_level='exp').count(),
'inf':rfcs.filter(doc__intended_std_level='inf').count()}
data['new_groups'] = Group.objects.filter(
type='wg',
groupevent__changestategroupevent__state='active',
groupevent__time__gte=sdatetime,
groupevent__time__lt=edatetime)
data['concluded_groups'] = Group.objects.filter(
type='wg',
groupevent__changestategroupevent__state='conclude',
groupevent__time__gte=sdatetime,
groupevent__time__lt=edatetime)
return data
def is_powerpoint(doc):
'''
Returns true if document is a Powerpoint presentation
'''
return doc.file_extension() in ('ppt','pptx')
def post_process(doc):
'''
Does post processing on uploaded file.
- Convert PPT to PDF
'''
if is_powerpoint(doc) and hasattr(settings,'SECR_PPT2PDF_COMMAND'):
try:
cmd = list(settings.SECR_PPT2PDF_COMMAND) # Don't operate on the list actually in settings
cmd.append(doc.get_file_path()) # outdir
cmd.append(os.path.join(doc.get_file_path(),doc.uploaded_filename)) # filename
subprocess.check_call(cmd)
except (subprocess.CalledProcessError, OSError) as error:
log("Error converting PPT: %s" % (error))
return
# change extension
base,ext = os.path.splitext(doc.uploaded_filename)
doc.uploaded_filename = base + '.pdf'
e = DocEvent.objects.create(
type='changed_document',
by=Person.objects.get(name="(System)"),
doc=doc,
rev=doc.rev,
desc='Converted document to PDF',
)
doc.save_with_history([e])

View file

@ -1,61 +0,0 @@
from django import template
from ietf.person.models import Person
register = template.Library()
@register.filter
def abbr_status(value):
"""
Converts RFC Status to a short abbreviation
"""
d = {'Proposed Standard':'PS',
'Draft Standard':'DS',
'Standard':'S',
'Historic':'H',
'Informational':'I',
'Experimental':'E',
'Best Current Practice':'BCP',
'Internet Standard':'IS'}
return d.get(value,value)
@register.filter(name='display_duration')
def display_duration(value):
"""
Maps a session requested duration from select index to
label."""
map = {'0':'None',
'1800':'30 Minutes',
'3600':'1 Hour',
'5400':'1.5 Hours',
'7200':'2 Hours',
'9000':'2.5 Hours'}
if value in map:
return map[value]
else:
x=int(value)
return "%d Hours %d Minutes %d Seconds"%(x//3600,(x%3600)//60,x%60)
@register.filter
def is_ppt(value):
'''
Checks if the value ends in ppt or pptx
'''
if value.endswith('ppt') or value.endswith('pptx'):
return True
else:
return False
@register.filter
def smart_login(user):
'''
Expects a Person object. If person is a Secretariat returns "on behalf of the"
'''
if not isinstance (user, Person):
return user
if user.role_set.filter(name='secr',group__acronym='secretariat'):
return '%s, on behalf of the' % user
else:
return '%s, a chair of the' % user

View file

@ -1,192 +0,0 @@
# Copyright The IETF Trust 2013-2020, All Rights Reserved
# -*- coding: utf-8 -*-
import debug # pyflakes:ignore
import io
import json
import os
from django.conf import settings
from django.urls import reverse
from ietf.doc.models import Document
from ietf.group.factories import RoleFactory
from ietf.meeting.models import SchedTimeSessAssignment, SchedulingEvent
from ietf.meeting.factories import MeetingFactory, SessionFactory
from ietf.person.models import Person
from ietf.name.models import SessionStatusName
from ietf.utils.test_utils import TestCase
from ietf.utils.mail import outbox
from ietf.secr.proceedings.proc_utils import (import_audio_files,
get_timeslot_for_filename, normalize_room_name, send_audio_import_warning,
get_or_create_recording_document, create_recording, get_next_sequence,
_get_session, _get_urls_from_json)
SECR_USER='secretary'
class ProceedingsTestCase(TestCase):
def test_main(self):
"Main Test"
MeetingFactory(type_id='ietf')
RoleFactory(name_id='chair',person__user__username='marschairman')
url = reverse('ietf.secr.proceedings.views.main')
self.client.login(username="secretary", password="secretary+password")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# test chair access
self.client.logout()
self.client.login(username="marschairman", password="marschairman+password")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
class VideoRecordingTestCase(TestCase):
def test_get_session(self):
session = SessionFactory()
meeting = session.meeting
number = meeting.number
name = session.group.acronym
ts_time = session.official_timeslotassignment().timeslot.local_start_time()
date = ts_time.strftime('%Y%m%d')
time = ts_time.strftime('%H%M')
self.assertEqual(_get_session(number,name,date,time),session)
def test_get_urls_from_json(self):
path = os.path.join(settings.BASE_DIR, "../test/data/youtube-playlistitems.json")
with io.open(path) as f:
doc = json.load(f)
urls = _get_urls_from_json(doc)
self.assertEqual(len(urls),2)
self.assertEqual(urls[0]['title'],'IETF98 Wrap Up')
self.assertEqual(urls[0]['url'],'https://www.youtube.com/watch?v=lhYWB5FFkg4&list=PLC86T-6ZTP5jo6kIuqdyeYYhsKv9sUwG1')
class RecordingTestCase(TestCase):
settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['MEETING_RECORDINGS_DIR']
def test_page(self):
meeting = MeetingFactory(type_id='ietf')
url = reverse('ietf.secr.proceedings.views.recording', kwargs={'meeting_num':meeting.number})
self.client.login(username="secretary", password="secretary+password")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_post(self):
session = SessionFactory(status_id='sched',meeting__type_id='ietf')
meeting = session.meeting
group = session.group
url = reverse('ietf.secr.proceedings.views.recording', kwargs={'meeting_num':meeting.number})
data = dict(group=group.acronym,external_url='http://youtube.com/xyz',session=session.pk)
self.client.login(username="secretary", password="secretary+password")
response = self.client.post(url,data,follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, group.acronym)
# now test edit
doc = session.materials.filter(type='recording').first()
external_url = 'http://youtube.com/aaa'
url = reverse('ietf.secr.proceedings.views.recording_edit', kwargs={'meeting_num':meeting.number,'name':doc.name})
response = self.client.post(url,dict(external_url=external_url),follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, external_url)
def test_import_audio_files(self):
session = SessionFactory(status_id='sched',meeting__type_id='ietf')
meeting = session.meeting
timeslot = session.official_timeslotassignment().timeslot
self.create_audio_file_for_timeslot(timeslot)
import_audio_files(meeting)
self.assertEqual(session.materials.filter(type='recording').count(),1)
def create_audio_file_for_timeslot(self, timeslot):
filename = self.get_filename_for_timeslot(timeslot)
path = os.path.join(settings.MEETING_RECORDINGS_DIR,'ietf' + timeslot.meeting.number,filename)
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
with io.open(path, "w") as f:
f.write('dummy')
def get_filename_for_timeslot(self, timeslot):
'''Returns the filename of a session recording given timeslot'''
return "{prefix}-{room}-{date}.mp3".format(
prefix=timeslot.meeting.type.slug + timeslot.meeting.number,
room=normalize_room_name(timeslot.location.name),
date=timeslot.local_start_time().strftime('%Y%m%d-%H%M'))
def test_import_audio_files_shared_timeslot(self):
meeting = MeetingFactory(type_id='ietf',number='72')
mars_session = SessionFactory(meeting=meeting,status_id='sched',group__acronym='mars')
ames_session = SessionFactory(meeting=meeting,status_id='sched',group__acronym='ames')
scheduled = SessionStatusName.objects.get(slug='sched')
SchedulingEvent.objects.create(
session=mars_session,
status=scheduled,
by=Person.objects.get(name='(System)')
)
SchedulingEvent.objects.create(
session=ames_session,
status=scheduled,
by=Person.objects.get(name='(System)')
)
timeslot = mars_session.official_timeslotassignment().timeslot
SchedTimeSessAssignment.objects.create(timeslot=timeslot,session=ames_session,schedule=meeting.schedule)
self.create_audio_file_for_timeslot(timeslot)
import_audio_files(meeting)
doc = mars_session.materials.filter(type='recording').first()
self.assertTrue(doc in ames_session.materials.all())
self.assertTrue(doc.docalias.filter(name='recording-72-mars-1'))
self.assertTrue(doc.docalias.filter(name='recording-72-ames-1'))
def test_normalize_room_name(self):
self.assertEqual(normalize_room_name('Test Room'),'testroom')
self.assertEqual(normalize_room_name('Rome/Venice'), 'rome_venice')
def test_get_timeslot_for_filename(self):
session = SessionFactory(meeting__type_id='ietf')
timeslot = session.timeslotassignments.first().timeslot
name = self.get_filename_for_timeslot(timeslot)
self.assertEqual(get_timeslot_for_filename(name),timeslot)
def test_get_or_create_recording_document(self):
session = SessionFactory(meeting__type_id='ietf', meeting__number=72, group__acronym='mars')
# test create
filename = 'ietf42-testroom-20000101-0800.mp3'
docs_before = Document.objects.filter(type='recording').count()
doc = get_or_create_recording_document(filename,session)
docs_after = Document.objects.filter(type='recording').count()
self.assertEqual(docs_after,docs_before + 1)
self.assertTrue(doc.external_url.endswith(filename))
# test get
docs_before = docs_after
doc2 = get_or_create_recording_document(filename,session)
docs_after = Document.objects.filter(type='recording').count()
self.assertEqual(docs_after,docs_before)
self.assertEqual(doc,doc2)
def test_create_recording(self):
session = SessionFactory(meeting__type_id='ietf', meeting__number=72, group__acronym='mars')
filename = 'ietf42-testroomt-20000101-0800.mp3'
url = settings.IETF_AUDIO_URL + 'ietf{}/{}'.format(session.meeting.number, filename)
doc = create_recording(session, url)
self.assertEqual(doc.name,'recording-72-mars-1')
self.assertEqual(doc.group,session.group)
self.assertEqual(doc.external_url,url)
self.assertTrue(doc in session.materials.all())
def test_get_next_sequence(self):
session = SessionFactory(meeting__type_id='ietf', meeting__number=72, group__acronym='mars')
meeting = session.meeting
group = session.group
sequence = get_next_sequence(group,meeting,'recording')
self.assertEqual(sequence,1)
def test_send_audio_import_warning(self):
length_before = len(outbox)
send_audio_import_warning(['recording-43-badroom-20000101-0800.mp3'])
self.assertEqual(len(outbox), length_before + 1)
self.assertTrue('Audio file import' in outbox[-1]['Subject'])

View file

@ -1,16 +0,0 @@
from django.conf import settings
from ietf.meeting.views import OldUploadRedirect
from ietf.utils.urls import url
from ietf.secr.proceedings import views
urlpatterns = [
url(r'^$', views.main),
# special offline URL for testing proceedings build
url(r'^process-pdfs/(?P<meeting_num>\d{1,3})/$', views.process_pdfs),
url(r'^(?P<meeting_num>\d{1,3})/$', views.select),
url(r'^(?P<meeting_num>\d{1,3})/recording/$', views.recording),
url(r'^(?P<meeting_num>\d{1,3})/recording/edit/(?P<name>[A-Za-z0-9_\-\+]+)$', views.recording_edit),
url(r'^(?P<num>\d{1,3}|interim-\d{4}-[A-Za-z0-9_\-\+]+)/%(acronym)s/$' % settings.URL_REGEXPS,
OldUploadRedirect.as_view(permanent=True)),
]

View file

@ -1,324 +0,0 @@
# Copyright The IETF Trust 2013-2020, All Rights Reserved
# -*- coding: utf-8 -*-
import datetime
import glob
import itertools
import os
import debug # pyflakes:ignore
from django.conf import settings
from django.contrib import messages
from django.urls import reverse
from django.db.models import Max
from django.http import HttpResponseRedirect
from django.shortcuts import render, get_object_or_404, redirect
from ietf.secr.utils.decorators import sec_only
from ietf.secr.utils.group import get_my_groups
from ietf.secr.utils.meeting import get_timeslot, get_proceedings_url
from ietf.doc.models import Document, DocEvent
from ietf.person.models import Person
from ietf.ietfauth.utils import has_role, role_required
from ietf.meeting.models import Meeting, Session
from ietf.meeting.utils import add_event_info_to_session_qs
from ietf.secr.proceedings.forms import RecordingForm, RecordingEditForm
from ietf.secr.proceedings.proc_utils import (create_recording)
from ietf.utils.timezone import date_today
# -------------------------------------------------
# Globals
# -------------------------------------------------
AUTHORIZED_ROLES=('WG Chair','WG Secretary','RG Chair','RG Secretary', 'AG Secretary', 'RAG Secretary', 'IRTF Chair','IETF Trust Chair','IAB Group Chair','IAOC Chair','IAD','Area Director','Secretariat','Team Chair')
# -------------------------------------------------
# Helper Functions
# -------------------------------------------------
def build_choices(queryset):
'''
This function takes a queryset (or list) of Groups and builds a list of tuples for use
as choices in a select widget. Using acronym for both value and label.
'''
choices = [ (g.acronym,g.acronym) for g in queryset ]
return sorted(choices, key=lambda choices: choices[1])
def find_index(slide_id, qs):
'''
This function looks up a slide in a queryset of slides,
returning the index.
'''
for i in range(0,qs.count()):
if str(qs[i].pk) == slide_id:
return i
def get_doc_filename(doc):
'''
This function takes a Document of type slides,minute or agenda and returns
the full path to the file on disk.
'''
session = doc.session_set.all()[0]
meeting = session.meeting
if doc.external_url:
return os.path.join(meeting.get_materials_path(),doc.type.slug,doc.uploaded_filename)
else:
path = os.path.join(meeting.get_materials_path(),doc.type.slug,doc.name)
files = glob.glob(path + '.*')
# TODO we might want to choose from among multiple files using some logic
return files[0]
def get_unmatched_recordings(meeting):
'''
Returns a list of recording filenames that haven't been matched to a session
'''
unmatched_recordings = []
path = os.path.join(settings.MEETING_RECORDINGS_DIR,'ietf{}'.format(meeting.number))
try:
files = os.listdir(path)
except OSError:
files = []
url = settings.IETF_AUDIO_URL + 'ietf%s' % meeting.number
recordings = Document.objects.filter(type='recording',external_url__startswith=url)
filenames = [ d.external_url.split('/')[-1] for d in recordings ]
for file in files:
if file not in filenames:
unmatched_recordings.append(file)
return unmatched_recordings
def get_extras(meeting):
'''
Gather "extras" which are one off groups. ie iab-wcit(86)
'''
groups = []
sessions = Session.objects.filter(meeting=meeting).exclude(group__parent__type__in=('area','irtf'))
for session in sessions:
timeslot = get_timeslot(session)
if timeslot and timeslot.type_id == 'regular' and session.materials.all():
groups.append(session.group)
return groups
def get_next_slide_num(session):
'''
This function takes a session object and returns the
next slide number to use for a newly added slide as a string.
'''
if session.meeting.type_id == 'ietf':
pattern = 'slides-%s-%s' % (session.meeting.number,session.group.acronym)
elif session.meeting.type_id == 'interim':
pattern = 'slides-%s' % (session.meeting.number)
slides = Document.objects.filter(type='slides',name__startswith=pattern)
if slides:
nums = [ s.name.split('-')[-1] for s in slides ]
nums.sort(key=int)
return str(int(nums[-1]) + 1)
else:
return '0'
def get_next_order_num(session):
'''
This function takes a session object and returns the
next slide order number to use for a newly added slide as an integer.
'''
max_order = session.materials.aggregate(Max('order'))['order__max']
return max_order + 1 if max_order else 1
def parsedate(d):
'''
This function takes a date object and returns a tuple of year,month,day
'''
return (d.strftime('%Y'),d.strftime('%m'),d.strftime('%d'))
# --------------------------------------------------
# STANDARD VIEW FUNCTIONS
# --------------------------------------------------
@role_required(*AUTHORIZED_ROLES)
def main(request):
'''
List IETF Meetings. If the user is Secratariat list includes all meetings otherwise
show only those meetings whose corrections submission date has not passed.
**Templates:**
* ``proceedings/main.html``
**Template Variables:**
* meetings, interim_meetings, today
'''
if has_role(request.user,'Secretariat'):
meetings = Meeting.objects.filter(type='ietf').order_by('-number')
else:
# select meetings still within the cutoff period
today = date_today()
meetings = [m for m in Meeting.objects.filter(type='ietf').order_by('-number') if m.get_submission_correction_date()>=today]
groups = get_my_groups(request.user)
interim_sessions = add_event_info_to_session_qs(Session.objects.filter(group__in=groups, meeting__type='interim')).filter(current_status='sched').select_related('meeting')
interim_meetings = sorted({s.meeting for s in interim_sessions}, key=lambda m: m.date, reverse=True)
# tac on group for use in templates
for m in interim_meetings:
m.group = m.session_set.first().group
# we today's date to see if we're past the submissio cutoff
today = date_today()
return render(request, 'proceedings/main.html',{
'meetings': meetings,
'interim_meetings': interim_meetings,
'today': today},
)
@sec_only
def process_pdfs(request, meeting_num):
'''
This function is used to update the database once meeting materials in PPT format
are converted to PDF format and uploaded to the server. It basically finds every PowerPoint
slide document for the given meeting and checks to see if there is a PDF version. If there
is external_url is changed. Then when proceedings are generated the URL will refer to the
PDF document.
'''
warn_count = 0
count = 0
meeting = get_object_or_404(Meeting, number=meeting_num)
ppt = Document.objects.filter(session__meeting=meeting,type='slides',uploaded_filename__endswith='.ppt').exclude(states__slug='deleted')
pptx = Document.objects.filter(session__meeting=meeting,type='slides',uploaded_filename__endswith='.pptx').exclude(states__slug='deleted')
for doc in itertools.chain(ppt,pptx):
base,ext = os.path.splitext(doc.uploaded_filename)
pdf_file = base + '.pdf'
path = os.path.join(settings.SECR_PROCEEDINGS_DIR,meeting_num,'slides',pdf_file)
if os.path.exists(path):
doc.uploaded_filename = pdf_file
e = DocEvent.objects.create(
type='changed_document',
by=Person.objects.get(name="(System)"),
doc=doc,
rev=doc.rev,
desc='Set URL to PDF version',
)
doc.save_with_history([e])
count += 1
else:
warn_count += 1
if warn_count:
messages.warning(request, '%s PDF files processed. %s PowerPoint files still not converted.' % (count, warn_count))
else:
messages.success(request, '%s PDF files processed' % count)
url = reverse('ietf.secr.proceedings.views.select', kwargs={'meeting_num':meeting_num})
return HttpResponseRedirect(url)
@role_required('Secretariat')
def recording(request, meeting_num):
'''
Enter Session recording info. Creates Document and associates it with Session.
For auditing purposes, lists all scheduled sessions and associated recordings, if
any. Also lists those audio recording files which haven't been matched to a
session.
'''
meeting = get_object_or_404(Meeting, number=meeting_num)
sessions = Session.objects.filter(
timeslotassignments__schedule__in=[meeting.schedule, meeting.schedule.base if meeting.schedule else None]
).exclude(
type__in=['reg','break']
).order_by('group__acronym')
if request.method == 'POST':
form = RecordingForm(request.POST,meeting=meeting)
if form.is_valid():
external_url = form.cleaned_data['external_url']
session = form.cleaned_data['session']
if Document.objects.filter(type='recording',external_url=external_url):
messages.error(request, "Recording already exists")
return redirect('ietf.secr.proceedings.views.recording', meeting_num=meeting_num)
else:
create_recording(session,external_url)
messages.success(request,'Recording added')
return redirect('ietf.secr.proceedings.views.recording', meeting_num=meeting_num)
else:
form = RecordingForm(meeting=meeting)
return render(request, 'proceedings/recording.html',{
'meeting':meeting,
'form':form,
'sessions':sessions,
'unmatched_recordings': get_unmatched_recordings(meeting)},
)
@role_required('Secretariat')
def recording_edit(request, meeting_num, name):
'''
Edit recording Document
'''
recording = get_object_or_404(Document, name=name)
meeting = get_object_or_404(Meeting, number=meeting_num)
if request.method == 'POST':
button_text = request.POST.get('submit', '')
if button_text == 'Cancel':
return redirect('ietf.secr.proceedings.views.recording', meeting_num=meeting_num)
form = RecordingEditForm(request.POST, instance=recording)
if form.is_valid():
# save record and rebuild proceedings
form.save(commit=False)
e = DocEvent.objects.create(
type='changed_document',
by=request.user.person,
doc=recording,
rev=recording.rev,
desc='Changed URL to %s' % recording.external_url,
)
recording.save_with_history([e])
messages.success(request,'Recording saved')
return redirect('ietf.secr.proceedings.views.recording', meeting_num=meeting_num)
else:
form = RecordingEditForm(instance=recording)
return render(request, 'proceedings/recording_edit.html',{
'meeting':meeting,
'form':form,
'recording':recording},
)
# TODO - should probably rename this since it's not selecting groups anymore
def select(request, meeting_num):
'''
Provide the secretariat only functions related to meeting materials management
'''
if not has_role(request.user,'Secretariat'):
return HttpResponseRedirect(reverse('ietf.meeting.views.materials_editable_groups', kwargs={'num':meeting_num}))
meeting = get_object_or_404(Meeting, number=meeting_num)
proceedings_url = get_proceedings_url(meeting)
# get the time proceedings were generated
path = os.path.join(settings.SECR_PROCEEDINGS_DIR,meeting.number,'index.html')
if os.path.exists(path):
last_run = datetime.datetime.fromtimestamp(os.path.getmtime(path), datetime.timezone.utc)
else:
last_run = None
# count PowerPoint files waiting to be converted
# TODO : This should look at SessionPresentation instead
ppt = Document.objects.filter(session__meeting=meeting,type='slides',uploaded_filename__endswith='.ppt').exclude(states__slug='deleted')
pptx = Document.objects.filter(session__meeting=meeting,type='slides',uploaded_filename__endswith='.pptx').exclude(states__slug='deleted')
ppt_count = ppt.count() + pptx.count()
return render(request, 'proceedings/select.html', {
'meeting': meeting,
'last_run': last_run,
'proceedings_url': proceedings_url,
'ppt_count': ppt_count},
)

View file

@ -1,67 +0,0 @@
# Copyright The IETF Trust 2013-2020, All Rights Reserved
# -*- coding: utf-8 -*-
from django.urls import reverse
from ietf.utils.test_utils import TestCase
from ietf.group.factories import GroupFactory, RoleFactory
from ietf.person.models import Person
import debug # pyflakes:ignore
SECR_USER='secretary'
class SecrRolesMainTestCase(TestCase):
def setUp(self):
super().setUp()
GroupFactory(type_id='sdo') # need this for the RoleForm initialization
def test_main(self):
"Main Test"
url = reverse('ietf.secr.roles.views.main')
self.client.login(username="secretary", password="secretary+password")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_roles_delete(self):
role = RoleFactory(name_id='chair',group__acronym='mars')
group = role.group
id = role.id
url = reverse('ietf.secr.roles.views.delete_role', kwargs={'acronym':group.acronym,'id':role.id})
target = reverse('ietf.secr.roles.views.main')
self.client.login(username="secretary", password="secretary+password")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
response = self.client.post(url, {'post':'yes'})
self.assertRedirects(response, target)
self.assertFalse(group.role_set.filter(id=id))
def test_roles_add(self):
person = Person.objects.get(name='Areað Irector')
group = GroupFactory()
url = reverse('ietf.secr.roles.views.main')
target = reverse('ietf.secr.roles.views.main') + '?group=%s' % group.acronym
post_data = {'group_acronym':group.acronym,
'name':'chair',
'person':'Joe Smith - (%s)' % person.id,
'email':person.email_set.all()[0].address,
'submit':'Add'}
self.client.login(username="secretary", password="secretary+password")
response = self.client.post(url,post_data,follow=True)
self.assertRedirects(response, target)
self.assertContains(response, 'added successfully')
def test_roles_add_no_group(self):
person = Person.objects.get(name='Areað Irector')
url = reverse('ietf.secr.roles.views.main')
post_data = {'group_acronym':'',
'name':'chair',
'person':'Joe Smith - (%s)' % person.id,
'email':person.email_set.all()[0].address,
'submit':'Add'}
self.client.login(username="secretary", password="secretary+password")
response = self.client.post(url,post_data,follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'You must select a group')

Some files were not shown because too many files have changed in this diff Show more