From 064432c2dd698e7c1ebb8adcc3fbdb834aa2633c Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Thu, 10 Mar 2022 13:57:09 -0500 Subject: [PATCH 01/41] chore: bring docker dev init script up to date with bs5 branch (#3611) --- .devcontainer/docker-compose.extend.yml | 1 + docker/docker-compose.extend.yml | 3 +- docker/scripts/app-init.sh | 54 +- ietf/.gitignore | 2 + package-lock.json | 1112 +++++------------------ 5 files changed, 272 insertions(+), 900 deletions(-) diff --git a/.devcontainer/docker-compose.extend.yml b/.devcontainer/docker-compose.extend.yml index 3493a18ff..cacd44df3 100644 --- a/.devcontainer/docker-compose.extend.yml +++ b/.devcontainer/docker-compose.extend.yml @@ -7,5 +7,6 @@ services: DJANGO_SETTINGS_MODULE: settings_local_sqlitetest volumes: - ..:/root/src + - /root/src/node_modules # Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function. network_mode: service:db \ No newline at end of file diff --git a/docker/docker-compose.extend.yml b/docker/docker-compose.extend.yml index 9aba74173..979da02ef 100644 --- a/docker/docker-compose.extend.yml +++ b/docker/docker-compose.extend.yml @@ -5,7 +5,8 @@ services: ports: - '8000:8000' volumes: - - ..:/root/src + - ..:/root/src + - /root/src/node_modules db: ports: - '3306' \ No newline at end of file diff --git a/docker/scripts/app-init.sh b/docker/scripts/app-init.sh index 9e11854f5..80d58b60f 100755 --- a/docker/scripts/app-init.sh +++ b/docker/scripts/app-init.sh @@ -4,6 +4,17 @@ WORKSPACEDIR="/root/src" service rsyslog start +# Generate static assets + +npm install +echo "Building static assets... (this could take a minute or two)" +cd bootstrap +npm install -g grunt-cli +npm install +grunt dist +cp -r dist/. ../ietf/static/ietf/bootstrap/ +cd .. + # Copy config files if needed if [ ! -f "$WORKSPACEDIR/ietf/settings_local.py" ]; then @@ -41,33 +52,34 @@ fi # Create assets directories -for sub in \ +for sub in \ test/id \ test/staging \ test/archive \ test/rfc \ test/media \ test/wiki/ietf \ - data/nomcom_keys/public_keys \ - data/developers/ietf-ftp \ - data/developers/ietf-ftp/bofreq \ - data/developers/ietf-ftp/charter \ - data/developers/ietf-ftp/conflict-reviews \ - data/developers/ietf-ftp/internet-drafts \ - data/developers/ietf-ftp/rfc \ - data/developers/ietf-ftp/status-changes \ - data/developers/ietf-ftp/yang/catalogmod \ - data/developers/ietf-ftp/yang/draftmod \ - data/developers/ietf-ftp/yang/ianamod \ - data/developers/ietf-ftp/yang/invalmod \ - data/developers/ietf-ftp/yang/rfcmod \ - data/developers/www6s \ - data/developers/www6s/staging \ - data/developers/www6s/wg-descriptions \ - data/developers/www6s/proceedings \ - data/developers/www6/ \ - data/developers/www6/iesg \ - data/developers/www6/iesg/evaluation \ + data/nomcom_keys/public_keys \ + data/developers/ietf-ftp \ + data/developers/ietf-ftp/bofreq \ + data/developers/ietf-ftp/charter \ + data/developers/ietf-ftp/conflict-reviews \ + data/developers/ietf-ftp/internet-drafts \ + data/developers/ietf-ftp/rfc \ + data/developers/ietf-ftp/status-changes \ + data/developers/ietf-ftp/yang/catalogmod \ + data/developers/ietf-ftp/yang/draftmod \ + data/developers/ietf-ftp/yang/ianamod \ + data/developers/ietf-ftp/yang/invalmod \ + data/developers/ietf-ftp/yang/rfcmod \ + data/developers/www6s \ + data/developers/www6s/staging \ + data/developers/www6s/wg-descriptions \ + data/developers/www6s/proceedings \ + data/developers/www6/ \ + data/developers/www6/iesg \ + data/developers/www6/iesg/evaluation \ + data/developers/media/photo \ ; do dir="/root/src/$sub" if [ ! -d "$dir" ]; then diff --git a/ietf/.gitignore b/ietf/.gitignore index 2cc1317c1..c82387cae 100644 --- a/ietf/.gitignore +++ b/ietf/.gitignore @@ -1,3 +1,5 @@ /*.pyc /settings_local.py +/settings_local_debug.py +/settings_local_sqlitetest.py /ietfdb.sql.gz diff --git a/package-lock.json b/package-lock.json index 45b1ebe0c..f907d9bc8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5,12 +5,12 @@ "packages": { "": { "dependencies": { - "caniuse-lite": "1.0.30001282" + "caniuse-lite": "1.0.30001314" }, "devDependencies": { "browserlist": "latest", - "cypress": "9.0.0", - "cypress-real-events": "1.5.1", + "cypress": "9.5.1", + "cypress-real-events": "1.7.0", "npm-check-updates": "12.5.2" } }, @@ -68,9 +68,9 @@ } }, "node_modules/@cypress/request": { - "version": "2.88.7", - "resolved": "https://registry.npmjs.org/@cypress/request/-/request-2.88.7.tgz", - "integrity": "sha512-FTULIP2rnDJvZDT9t6B4nSfYR40ue19tVmv3wUcY05R9/FPCoMl1nAPJkzWzBCo7ltVn5ThQTbxiMoGBN7k0ig==", + "version": "2.88.10", + "resolved": "https://registry.npmjs.org/@cypress/request/-/request-2.88.10.tgz", + "integrity": "sha512-Zp7F+R93N0yZyG34GutyTNr+okam7s/Fzc1+i3kcqOP8vk6OuajuE9qZJ6Rs+10/1JFtXFYMdyarnU1rZuJesg==", "dev": true, "dependencies": { "aws-sign2": "~0.7.0", @@ -80,8 +80,7 @@ "extend": "~3.0.2", "forever-agent": "~0.6.1", "form-data": "~2.3.2", - "har-validator": "~5.1.3", - "http-signature": "~1.2.0", + "http-signature": "~1.3.6", "is-typedarray": "~1.0.0", "isstream": "~0.1.2", "json-stringify-safe": "~5.0.1", @@ -167,39 +166,6 @@ "semver": "^7.3.5" } }, - "node_modules/@npmcli/fs/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@npmcli/fs/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@npmcli/fs/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/@npmcli/git": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-3.0.0.tgz", @@ -229,39 +195,6 @@ "node": ">=12" } }, - "node_modules/@npmcli/git/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@npmcli/git/node_modules/semver/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@npmcli/git/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/@npmcli/installed-package-contents": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-1.0.7.tgz", @@ -358,9 +291,9 @@ "dev": true }, "node_modules/@types/sinonjs__fake-timers": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-6.0.4.tgz", - "integrity": "sha512-IFQTJARgMUBF+xVd2b+hIgXWrZEjND3vJtRCvIelcFB5SIXfjV4bOHbHJ0eXKh+0COrBRc8MqteKAz/j88rE0A==", + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.1.tgz", + "integrity": "sha512-0kSuKjAS0TrGLJ0M/+8MaFkGsQhZpB6pxOmvS3K8FYI72K//YmdfoW9X2qPsAKh1mkwxGD5zib9s1FIFed6E8g==", "dev": true }, "node_modules/@types/sizzle": { @@ -412,22 +345,6 @@ "node": ">=8" } }, - "node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, "node_modules/ansi-align": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz", @@ -616,6 +533,26 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "dev": true }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, "node_modules/bcrypt-pbkdf": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", @@ -767,6 +704,30 @@ "node": ">=4" } }, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, "node_modules/buffer-crc32": { "version": "0.2.13", "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", @@ -817,24 +778,6 @@ "node": ">= 10" } }, - "node_modules/cacache/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/cacache/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/cacheable-request": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz", @@ -872,9 +815,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001282", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001282.tgz", - "integrity": "sha512-YhF/hG6nqBEllymSIjLtR2iWDDnChvhnVJqp+vloyt2tEHFG1yBR+ac2B/rOw0qOK0m0lEXU2dv4E/sMk5P9Kg==", + "version": "1.0.30001314", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001314.tgz", + "integrity": "sha512-0zaSO+TnCHtHJIbpLroX7nsD+vYuOVjl3uzFbJO1wMVbuveJA0RK2WcQA9ZUIOiO0/ArMiMgHJLxfEZhQiC0kw==", "funding": { "type": "opencollective", "url": "https://opencollective.com/browserslist" @@ -999,19 +942,18 @@ } }, "node_modules/cli-table3": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.0.tgz", - "integrity": "sha512-gnB85c3MGC7Nm9I/FkiasNBOKjOiO1RNuXXarQms37q4QMpWdlbBgD/VnOStA2faG1dpXMv31RFApjX1/QdgWQ==", + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.1.tgz", + "integrity": "sha512-w0q/enDHhPLq44ovMGdQeeDLvwxwavsJX7oQGYt/LrBlYsyaxyDnp6z3QzFut/6kLLKnlcUVJLrpB7KBfgG/RA==", "dev": true, "dependencies": { - "object-assign": "^4.1.0", "string-width": "^4.2.0" }, "engines": { "node": "10.* || >= 12.*" }, "optionalDependencies": { - "colors": "^1.1.2" + "colors": "1.4.0" } }, "node_modules/cli-truncate": { @@ -1204,25 +1146,26 @@ } }, "node_modules/cypress": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/cypress/-/cypress-9.0.0.tgz", - "integrity": "sha512-/93SWBZTw7BjFZ+I9S8SqkFYZx7VhedDjTtRBmXO0VzTeDbmxgK/snMJm/VFjrqk/caWbI+XY4Qr80myDMQvYg==", + "version": "9.5.1", + "resolved": "https://registry.npmjs.org/cypress/-/cypress-9.5.1.tgz", + "integrity": "sha512-H7lUWB3Svr44gz1rNnj941xmdsCljXoJa2cDneAltjI9leKLMQLm30x6jLlpQ730tiVtIbW5HdUmBzPzwzfUQg==", "dev": true, "hasInstallScript": true, "dependencies": { - "@cypress/request": "^2.88.7", + "@cypress/request": "^2.88.10", "@cypress/xvfb": "^1.2.4", "@types/node": "^14.14.31", - "@types/sinonjs__fake-timers": "^6.0.2", + "@types/sinonjs__fake-timers": "8.1.1", "@types/sizzle": "^2.3.2", "arch": "^2.2.0", "blob-util": "^2.0.2", "bluebird": "^3.7.2", + "buffer": "^5.6.0", "cachedir": "^2.3.0", "chalk": "^4.1.0", "check-more-types": "^2.24.0", "cli-cursor": "^3.1.0", - "cli-table3": "~0.6.0", + "cli-table3": "~0.6.1", "commander": "^5.1.0", "common-tags": "^1.8.0", "dayjs": "^1.10.4", @@ -1246,10 +1189,10 @@ "pretty-bytes": "^5.6.0", "proxy-from-env": "1.0.0", "request-progress": "^3.0.0", + "semver": "^7.3.2", "supports-color": "^8.1.1", "tmp": "~0.2.1", "untildify": "^4.0.0", - "url": "^0.11.0", "yauzl": "^2.10.0" }, "bin": { @@ -1260,12 +1203,12 @@ } }, "node_modules/cypress-real-events": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/cypress-real-events/-/cypress-real-events-1.5.1.tgz", - "integrity": "sha512-Jwi/IJePcZrKyhdtVddaf+mqJrj3y1vpREMDgtWwz+oxvj5FbBpeU0ASu9zpB3bMbsMo7g//buopZIe4jx3iSA==", + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/cypress-real-events/-/cypress-real-events-1.7.0.tgz", + "integrity": "sha512-iyXp07j0V9sG3YClVDcvHN2DAQDgr+EjTID82uWDw6OZBlU3pXEBqTMNYqroz3bxlb0k+F74U81aZwzMNaKyew==", "dev": true, "peerDependencies": { - "cypress": "^4.x || ^5.x || ^6.x || ^7.x || ^8.x" + "cypress": "^4.x || ^5.x || ^6.x || ^7.x || ^8.x || ^9.x" } }, "node_modules/dashdash": { @@ -1541,18 +1484,6 @@ "node >=0.6.0" ] }, - "node_modules/fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true - }, - "node_modules/fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true - }, "node_modules/fast-memoize": { "version": "2.5.2", "resolved": "https://registry.npmjs.org/fast-memoize/-/fast-memoize-2.5.2.tgz", @@ -1797,29 +1728,6 @@ "integrity": "sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg==", "dev": true }, - "node_modules/har-schema": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/har-validator": { - "version": "5.1.5", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz", - "integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==", - "deprecated": "this library is no longer supported", - "dev": true, - "dependencies": { - "ajv": "^6.12.3", - "har-schema": "^2.0.0" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/has": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", @@ -1889,18 +1797,17 @@ } }, "node_modules/http-signature": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.3.6.tgz", + "integrity": "sha512-3adrsD6zqo4GsTqtO7FyrejHNv+NgiIfAfv68+jVlFmSr9OGy7zrxONceFRLKvnnZA5jbxQBX1u9PpB6Wi32Gw==", "dev": true, "dependencies": { "assert-plus": "^1.0.0", - "jsprim": "^1.2.2", - "sshpk": "^1.7.0" + "jsprim": "^2.0.2", + "sshpk": "^1.14.1" }, "engines": { - "node": ">=0.8", - "npm": ">=1.3.7" + "node": ">=0.10" } }, "node_modules/human-signals": { @@ -1921,6 +1828,26 @@ "ms": "^2.0.0" } }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, "node_modules/ignore-walk": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-4.0.1.tgz", @@ -2176,15 +2103,9 @@ } }, "node_modules/json-schema": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=", - "dev": true - }, - "node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", "dev": true }, "node_modules/json-stringify-safe": { @@ -2236,9 +2157,9 @@ ] }, "node_modules/jsprim": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", - "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-2.0.2.tgz", + "integrity": "sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==", "dev": true, "engines": [ "node >=0.6.0" @@ -2246,7 +2167,7 @@ "dependencies": { "assert-plus": "1.0.0", "extsprintf": "1.3.0", - "json-schema": "0.2.3", + "json-schema": "0.4.0", "verror": "1.10.0" } }, @@ -2475,6 +2396,18 @@ "node": ">=0.10.0" } }, + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/make-fetch-happen": { "version": "10.0.5", "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.0.5.tgz", @@ -2695,12 +2628,6 @@ "node": ">=8" } }, - "node_modules/minipass/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/minizlib": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", @@ -2714,12 +2641,6 @@ "node": ">= 8" } }, - "node_modules/minizlib/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/mkdirp": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", @@ -2771,18 +2692,6 @@ "node": "^12.22 || ^14.13 || >=16" } }, - "node_modules/node-gyp/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/node-gyp/node_modules/nopt": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", @@ -2798,27 +2707,6 @@ "node": ">=6" } }, - "node_modules/node-gyp/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/node-gyp/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/normalize-url": { "version": "4.5.1", "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.1.tgz", @@ -3074,18 +2962,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/npm-check-updates/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/npm-check-updates/node_modules/micromatch": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", @@ -3159,21 +3035,6 @@ "node": ">=8" } }, - "node_modules/npm-check-updates/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/npm-check-updates/node_modules/slash": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", @@ -3214,12 +3075,6 @@ "node": ">=8.0" } }, - "node_modules/npm-check-updates/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/npm-install-checks": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-4.0.0.tgz", @@ -3232,39 +3087,6 @@ "node": ">=10" } }, - "node_modules/npm-install-checks/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/npm-install-checks/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/npm-install-checks/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/npm-normalize-package-bin": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz", @@ -3297,39 +3119,6 @@ "node": ">=10" } }, - "node_modules/npm-package-arg/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/npm-package-arg/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/npm-package-arg/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/npm-packlist": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-3.0.0.tgz", @@ -3363,39 +3152,6 @@ "node": "^12.13.0 || ^14.15.0 || >=16" } }, - "node_modules/npm-pick-manifest/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/npm-pick-manifest/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/npm-pick-manifest/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/npm-registry-fetch": { "version": "13.0.1", "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-13.0.1.tgz", @@ -3441,15 +3197,6 @@ "node": "^12.13.0 || ^14.15.0 || >=16" } }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -3750,24 +3497,14 @@ } }, "node_modules/qs": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==", + "version": "6.5.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", + "integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==", "dev": true, "engines": { "node": ">=0.6" } }, - "node_modules/querystring": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", - "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=", - "deprecated": "The querystring API is considered Legacy. new code should use the URLSearchParams API instead.", - "dev": true, - "engines": { - "node": ">=0.4.x" - } - }, "node_modules/queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -3888,18 +3625,6 @@ "node": ">=10" } }, - "node_modules/read-package-json/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/read-package-json/node_modules/normalize-package-data": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", @@ -3915,27 +3640,6 @@ "node": ">=10" } }, - "node_modules/read-package-json/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/read-package-json/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/registry-auth-token": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.1.tgz", @@ -4101,6 +3805,21 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", "dev": true }, + "node_modules/semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/semver-diff": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-3.1.1.tgz", @@ -4273,9 +3992,9 @@ "dev": true }, "node_modules/sshpk": { - "version": "1.16.1", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", - "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz", + "integrity": "sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ==", "dev": true, "dependencies": { "asn1": "~0.2.3", @@ -4376,12 +4095,6 @@ "node": ">= 10" } }, - "node_modules/tar/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/throttleit": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-1.0.0.tgz", @@ -4576,58 +4289,6 @@ "is-ci": "bin.js" } }, - "node_modules/update-notifier/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/update-notifier/node_modules/semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/update-notifier/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, - "node_modules/uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/url": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/url/-/url-0.11.0.tgz", - "integrity": "sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE=", - "dev": true, - "dependencies": { - "punycode": "1.3.2", - "querystring": "0.2.0" - } - }, "node_modules/url-parse-lax": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", @@ -4640,12 +4301,6 @@ "node": ">=4" } }, - "node_modules/url/node_modules/punycode": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", - "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=", - "dev": true - }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", @@ -4774,6 +4429,12 @@ "node": ">=8" } }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, "node_modules/yaml": { "version": "1.10.2", "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", @@ -4808,9 +4469,9 @@ }, "dependencies": { "@cypress/request": { - "version": "2.88.7", - "resolved": "https://registry.npmjs.org/@cypress/request/-/request-2.88.7.tgz", - "integrity": "sha512-FTULIP2rnDJvZDT9t6B4nSfYR40ue19tVmv3wUcY05R9/FPCoMl1nAPJkzWzBCo7ltVn5ThQTbxiMoGBN7k0ig==", + "version": "2.88.10", + "resolved": "https://registry.npmjs.org/@cypress/request/-/request-2.88.10.tgz", + "integrity": "sha512-Zp7F+R93N0yZyG34GutyTNr+okam7s/Fzc1+i3kcqOP8vk6OuajuE9qZJ6Rs+10/1JFtXFYMdyarnU1rZuJesg==", "dev": true, "requires": { "aws-sign2": "~0.7.0", @@ -4820,8 +4481,7 @@ "extend": "~3.0.2", "forever-agent": "~0.6.1", "form-data": "~2.3.2", - "har-validator": "~5.1.3", - "http-signature": "~1.2.0", + "http-signature": "~1.3.6", "is-typedarray": "~1.0.0", "isstream": "~0.1.2", "json-stringify-safe": "~5.0.1", @@ -4897,32 +4557,6 @@ "requires": { "@gar/promisify": "^1.0.1", "semver": "^7.3.5" - }, - "dependencies": { - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "requires": { - "yallist": "^4.0.0" - } - }, - "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - } } }, "@npmcli/git": { @@ -4947,32 +4581,6 @@ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.4.2.tgz", "integrity": "sha512-Xs3+hFPDSKQmL05Gs6NhvAADol1u9TmLoNoE03ZjszX6a5iYIO3rPUM4jIjoBUJeTaWEBMozjjmV70gvdRfIdw==", "dev": true - }, - "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - }, - "dependencies": { - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "requires": { - "yallist": "^4.0.0" - } - } - } - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true } } }, @@ -5051,9 +4659,9 @@ "dev": true }, "@types/sinonjs__fake-timers": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-6.0.4.tgz", - "integrity": "sha512-IFQTJARgMUBF+xVd2b+hIgXWrZEjND3vJtRCvIelcFB5SIXfjV4bOHbHJ0eXKh+0COrBRc8MqteKAz/j88rE0A==", + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.1.tgz", + "integrity": "sha512-0kSuKjAS0TrGLJ0M/+8MaFkGsQhZpB6pxOmvS3K8FYI72K//YmdfoW9X2qPsAKh1mkwxGD5zib9s1FIFed6E8g==", "dev": true }, "@types/sizzle": { @@ -5099,18 +4707,6 @@ "indent-string": "^4.0.0" } }, - "ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "requires": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - } - }, "ansi-align": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz", @@ -5251,6 +4847,12 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "dev": true }, + "base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true + }, "bcrypt-pbkdf": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", @@ -5373,6 +4975,16 @@ } } }, + "buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "requires": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, "buffer-crc32": { "version": "0.2.13", "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", @@ -5415,23 +5027,6 @@ "ssri": "^8.0.1", "tar": "^6.0.2", "unique-filename": "^1.1.1" - }, - "dependencies": { - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "requires": { - "yallist": "^4.0.0" - } - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - } } }, "cacheable-request": { @@ -5464,9 +5059,9 @@ "dev": true }, "caniuse-lite": { - "version": "1.0.30001282", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001282.tgz", - "integrity": "sha512-YhF/hG6nqBEllymSIjLtR2iWDDnChvhnVJqp+vloyt2tEHFG1yBR+ac2B/rOw0qOK0m0lEXU2dv4E/sMk5P9Kg==" + "version": "1.0.30001314", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001314.tgz", + "integrity": "sha512-0zaSO+TnCHtHJIbpLroX7nsD+vYuOVjl3uzFbJO1wMVbuveJA0RK2WcQA9ZUIOiO0/ArMiMgHJLxfEZhQiC0kw==" }, "caseless": { "version": "0.12.0", @@ -5558,13 +5153,12 @@ } }, "cli-table3": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.0.tgz", - "integrity": "sha512-gnB85c3MGC7Nm9I/FkiasNBOKjOiO1RNuXXarQms37q4QMpWdlbBgD/VnOStA2faG1dpXMv31RFApjX1/QdgWQ==", + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.1.tgz", + "integrity": "sha512-w0q/enDHhPLq44ovMGdQeeDLvwxwavsJX7oQGYt/LrBlYsyaxyDnp6z3QzFut/6kLLKnlcUVJLrpB7KBfgG/RA==", "dev": true, "requires": { - "colors": "^1.1.2", - "object-assign": "^4.1.0", + "colors": "1.4.0", "string-width": "^4.2.0" } }, @@ -5715,24 +5309,25 @@ "dev": true }, "cypress": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/cypress/-/cypress-9.0.0.tgz", - "integrity": "sha512-/93SWBZTw7BjFZ+I9S8SqkFYZx7VhedDjTtRBmXO0VzTeDbmxgK/snMJm/VFjrqk/caWbI+XY4Qr80myDMQvYg==", + "version": "9.5.1", + "resolved": "https://registry.npmjs.org/cypress/-/cypress-9.5.1.tgz", + "integrity": "sha512-H7lUWB3Svr44gz1rNnj941xmdsCljXoJa2cDneAltjI9leKLMQLm30x6jLlpQ730tiVtIbW5HdUmBzPzwzfUQg==", "dev": true, "requires": { - "@cypress/request": "^2.88.7", + "@cypress/request": "^2.88.10", "@cypress/xvfb": "^1.2.4", "@types/node": "^14.14.31", - "@types/sinonjs__fake-timers": "^6.0.2", + "@types/sinonjs__fake-timers": "8.1.1", "@types/sizzle": "^2.3.2", "arch": "^2.2.0", "blob-util": "^2.0.2", "bluebird": "^3.7.2", + "buffer": "^5.6.0", "cachedir": "^2.3.0", "chalk": "^4.1.0", "check-more-types": "^2.24.0", "cli-cursor": "^3.1.0", - "cli-table3": "~0.6.0", + "cli-table3": "~0.6.1", "commander": "^5.1.0", "common-tags": "^1.8.0", "dayjs": "^1.10.4", @@ -5756,17 +5351,17 @@ "pretty-bytes": "^5.6.0", "proxy-from-env": "1.0.0", "request-progress": "^3.0.0", + "semver": "^7.3.2", "supports-color": "^8.1.1", "tmp": "~0.2.1", "untildify": "^4.0.0", - "url": "^0.11.0", "yauzl": "^2.10.0" } }, "cypress-real-events": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/cypress-real-events/-/cypress-real-events-1.5.1.tgz", - "integrity": "sha512-Jwi/IJePcZrKyhdtVddaf+mqJrj3y1vpREMDgtWwz+oxvj5FbBpeU0ASu9zpB3bMbsMo7g//buopZIe4jx3iSA==", + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/cypress-real-events/-/cypress-real-events-1.7.0.tgz", + "integrity": "sha512-iyXp07j0V9sG3YClVDcvHN2DAQDgr+EjTID82uWDw6OZBlU3pXEBqTMNYqroz3bxlb0k+F74U81aZwzMNaKyew==", "dev": true, "requires": {} }, @@ -5984,18 +5579,6 @@ "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", "dev": true }, - "fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true - }, - "fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true - }, "fast-memoize": { "version": "2.5.2", "resolved": "https://registry.npmjs.org/fast-memoize/-/fast-memoize-2.5.2.tgz", @@ -6194,22 +5777,6 @@ "integrity": "sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg==", "dev": true }, - "har-schema": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", - "dev": true - }, - "har-validator": { - "version": "5.1.5", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz", - "integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==", - "dev": true, - "requires": { - "ajv": "^6.12.3", - "har-schema": "^2.0.0" - } - }, "has": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", @@ -6266,14 +5833,14 @@ } }, "http-signature": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.3.6.tgz", + "integrity": "sha512-3adrsD6zqo4GsTqtO7FyrejHNv+NgiIfAfv68+jVlFmSr9OGy7zrxONceFRLKvnnZA5jbxQBX1u9PpB6Wi32Gw==", "dev": true, "requires": { "assert-plus": "^1.0.0", - "jsprim": "^1.2.2", - "sshpk": "^1.7.0" + "jsprim": "^2.0.2", + "sshpk": "^1.14.1" } }, "human-signals": { @@ -6291,6 +5858,12 @@ "ms": "^2.0.0" } }, + "ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true + }, "ignore-walk": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-4.0.1.tgz", @@ -6489,15 +6062,9 @@ } }, "json-schema": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=", - "dev": true - }, - "json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", "dev": true }, "json-stringify-safe": { @@ -6538,14 +6105,14 @@ "dev": true }, "jsprim": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", - "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-2.0.2.tgz", + "integrity": "sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==", "dev": true, "requires": { "assert-plus": "1.0.0", "extsprintf": "1.3.0", - "json-schema": "0.2.3", + "json-schema": "0.4.0", "verror": "1.10.0" } }, @@ -6721,6 +6288,15 @@ "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==", "dev": true }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + } + }, "make-fetch-happen": { "version": "10.0.5", "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.0.5.tgz", @@ -6833,14 +6409,6 @@ "dev": true, "requires": { "yallist": "^4.0.0" - }, - "dependencies": { - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - } } }, "minipass-collect": { @@ -6909,14 +6477,6 @@ "requires": { "minipass": "^3.0.0", "yallist": "^4.0.0" - }, - "dependencies": { - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - } } }, "mkdirp": { @@ -6955,15 +6515,6 @@ "which": "^2.0.2" }, "dependencies": { - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "requires": { - "yallist": "^4.0.0" - } - }, "nopt": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", @@ -6972,21 +6523,6 @@ "requires": { "abbrev": "1" } - }, - "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true } } }, @@ -7178,15 +6714,6 @@ "p-locate": "^5.0.0" } }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "requires": { - "yallist": "^4.0.0" - } - }, "micromatch": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", @@ -7236,15 +6763,6 @@ "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", "dev": true }, - "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } - }, "slash": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", @@ -7275,12 +6793,6 @@ "requires": { "is-number": "^7.0.0" } - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true } } }, @@ -7291,32 +6803,6 @@ "dev": true, "requires": { "semver": "^7.1.1" - }, - "dependencies": { - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "requires": { - "yallist": "^4.0.0" - } - }, - "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - } } }, "npm-normalize-package-bin": { @@ -7344,30 +6830,6 @@ "requires": { "lru-cache": "^6.0.0" } - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "requires": { - "yallist": "^4.0.0" - } - }, - "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true } } }, @@ -7393,32 +6855,6 @@ "npm-normalize-package-bin": "^1.0.1", "npm-package-arg": "^9.0.0", "semver": "^7.3.5" - }, - "dependencies": { - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "requires": { - "yallist": "^4.0.0" - } - }, - "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - } } }, "npm-registry-fetch": { @@ -7457,12 +6893,6 @@ "set-blocking": "^2.0.0" } }, - "object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", - "dev": true - }, "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -7687,15 +7117,9 @@ } }, "qs": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==", - "dev": true - }, - "querystring": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", - "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=", + "version": "6.5.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", + "integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==", "dev": true }, "queue-microtask": { @@ -7780,15 +7204,6 @@ "lru-cache": "^6.0.0" } }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "requires": { - "yallist": "^4.0.0" - } - }, "normalize-package-data": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", @@ -7800,21 +7215,6 @@ "semver": "^7.3.4", "validate-npm-package-license": "^3.0.1" } - }, - "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true } } }, @@ -7937,6 +7337,15 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", "dev": true }, + "semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "dev": true, + "requires": { + "lru-cache": "^6.0.0" + } + }, "semver-diff": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-3.1.1.tgz", @@ -8081,9 +7490,9 @@ "dev": true }, "sshpk": { - "version": "1.16.1", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", - "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz", + "integrity": "sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ==", "dev": true, "requires": { "asn1": "~0.2.3", @@ -8153,14 +7562,6 @@ "minizlib": "^2.1.1", "mkdirp": "^1.0.3", "yallist": "^4.0.0" - }, - "dependencies": { - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - } } }, "throttleit": { @@ -8317,57 +7718,6 @@ "requires": { "ci-info": "^2.0.0" } - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "requires": { - "yallist": "^4.0.0" - } - }, - "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - } - } - }, - "uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, - "requires": { - "punycode": "^2.1.0" - } - }, - "url": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/url/-/url-0.11.0.tgz", - "integrity": "sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE=", - "dev": true, - "requires": { - "punycode": "1.3.2", - "querystring": "0.2.0" - }, - "dependencies": { - "punycode": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", - "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=", - "dev": true } } }, @@ -8484,6 +7834,12 @@ "integrity": "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==", "dev": true }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, "yaml": { "version": "1.10.2", "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", From 2493fd063da56233a6263c34f5e4bd18f29edb12 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Thu, 10 Mar 2022 14:17:47 -0500 Subject: [PATCH 02/41] ci: only run codeql on schedule --- .github/workflows/codeql-analysis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 0b7bb0566..2ed7034d6 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -3,8 +3,8 @@ name: CodeQL Analysis on: # push: # branches: [ main ] - pull_request: - branches: [ main ] + # pull_request: + # branches: [ main ] schedule: - cron: '23 1 * * 1' From bd8638e6df5589637168fcf9fbc0cfcb06adcf86 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 10 Mar 2022 15:21:26 -0400 Subject: [PATCH 03/41] feat: shorten long agenda filter button labels (#3602) Sessions with long names lead to poor rendering of the agenda filter buttons on the 'customize the agenda view' page. This truncates those labels at 3 words. --- ietf/templates/meeting/agenda_filter.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ietf/templates/meeting/agenda_filter.html b/ietf/templates/meeting/agenda_filter.html index 8c06aedd4..16a1d8a63 100644 --- a/ietf/templates/meeting/agenda_filter.html +++ b/ietf/templates/meeting/agenda_filter.html @@ -69,9 +69,9 @@ Optional parameters: {% if button.toggled_by %}data-filter-keywords="{{ button.toggled_by|join:"," }}"{% endif %} data-filter-item="{{ button.keyword }}"> {% if button.is_bof %} - {{ button.label }} + {{ button.label|truncatewords:"3" }} {% else %} - {{ button.label }} + {{ button.label|truncatewords:"3" }} {% endif %} From c251b95e4293599aaaaf404b5c51cdb9cea0e56b Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Thu, 10 Mar 2022 14:23:48 -0600 Subject: [PATCH 04/41] docs: Remove stale and incomplete information from the README (#3612) --- README.md | 101 +++--------------------------------------------------- 1 file changed, 4 insertions(+), 97 deletions(-) diff --git a/README.md b/README.md index 4a9acca5d..b2b9cb6e6 100644 --- a/README.md +++ b/README.md @@ -18,13 +18,8 @@ - [Changelog](https://github.com/ietf-tools/datatracker/blob/main/CHANGELOG.md) - [Contributing](https://github.com/ietf-tools/.github/blob/main/CONTRIBUTING.md) - [Getting Started](#getting-started) - - [Prerequisites](#prerequisites) - [Git Cloning Tips](#git-cloning-tips) - - [Code Tree Overview](#code-tree-overview) - - [Adding a New Web Page](#adding-a-new-web-page) - - [Testing your work](#testing-your-work) -- [Docker Dev Environment](docker/README.md) -- [Continuous Integration](#continuous-integration) + - [Docker Dev Environment](docker/README.md) - [Database & Assets](#database--assets) - [Old Datatracker Branches](https://github.com/ietf-tools/old-datatracker-branches/branches/all) @@ -38,15 +33,6 @@ This project is following the standard **Git Feature Workflow** development mode You can submit bug reports, enhancement and new feature requests in the [discussions](https://github.com/ietf-tools/datatracker/discussions) area. Accepted tickets will be converted to issues. -#### Prerequisites - -- Python 3.6 -- Django 2.x -- Node.js 16.x -- MariaDB 10 - -> See the [Docker Dev Environment](docker/README.md) section for a preconfigured docker environment. - #### Git Cloning Tips Because of the extensive history of this project, cloning the datatracker project locally can take a long time / disk space. You can speed up the cloning process by limiting the history depth, for example: @@ -60,90 +46,15 @@ Because of the extensive history of this project, cloning the datatracker projec git clone --shallow-since=DATE https://github.com/ietf-tools/datatracker.git ``` -#### Code Tree Overview +#### Overview of the datatracker models -The `ietf/templates/` directory contains Django templates used to generate web pages for the datatracker, mailing list, wgcharter and other things. - -Most of the other `ietf` sub-directories, such as `meeting`, contain the python/Django model and view information that go with the related templates. In these directories, the key files are: - -| File | Description | -|--|--| -| urls.py | binds a URL to a view, possibly selecting some data from the model. | -| models.py | has the data models for the tool area. | -| views.py | has the views for this tool area, and is where views are bound to the template. | - -#### Adding a New Web Page - -To add a new page to the tools, first explore the `models.py` to see if the model you need already exists. Within `models.py` are classes such as: - -```python -class IETFWG(models.Model): - ACTIVE = 1 - group_acronym = models.ForeignKey(Acronym, primary_key=True, unique=True, editable=False) - group_type = models.ForeignKey(WGType) - proposed_date = models.DateField(null=True, blank=True) - start_date = models.DateField(null=True, blank=True) - dormant_date = models.DateField(null=True, blank=True) - ... -``` - -In this example, the `IETFWG` class can be used to reference various fields of the database including `group_type`. Of note here is that `group_acronym` is the `Acronym` model so fields in that model can be accessed (e.g., `group_acronym.name`). - -Next, add a template for the new page in the proper sub-directory of the `ietf/templates` directory. For a simple page that iterates over one type of object, the key part of the template will look something like this: - -```html -{% for wg in object_list %} - -{{ wg }} -{{ wg.group_acronym.name }} - -{% endfor %} -``` -In this case, we're expecting `object_list` to be passed to the template from the view and expecting it to contain objects with the `IETFWG` model. - -Then add a view for the template to `views.py`. A simple view might look like: - -```python -def list_wgwebmail(request): - wgs = IETFWG.objects.all(); - return render_to_response('mailinglists/wgwebmail_list.html', {'object_list': wgs}) -``` -The selects the IETFWG objects from the database and renders the template with them in object_list. The model you're using has to be explicitly imported at the top of views.py in the imports statement. - -Finally, add a URL to display the view to `urls.py`. For this example, the reference to `list_wgwebmail` view is called: - -```python -urlpatterns += patterns('', - ... - (r'^wg/$', views.list_wgwebmail), -) -``` - -#### Testing your work - -Assuming you have the database settings configured already, you can run the server locally with: - -```sh - $ ietf/manage.py runserver localhost: - ``` -where `` is arbitrary. Then connect your web browser to `localhost:` and provide the URL to see your work. - -When you believe you are ready to commit your work, you should run the test suite to make sure that no tests break. You do this by running - -```sh - $ ietf/manage.py test --settings=settings_sqlitetest -``` - -### Docker Dev Environment +A beginning of a [walkthrough of the datatracker models](https://notes.ietf.org/iab-aid-datatracker-database-overview) was prepared for the AIB AID workshop. +#### Docker Dev Environment In order to simplify and reduce the time required for setup, a preconfigured docker environment is available. Read the [Docker Dev Environment](docker/README.md) guide to get started. -### Continuous Integration - -*TODO* - ### Database & Assets Nightly database dumps of the datatracker are available at @@ -151,7 +62,3 @@ https://www.ietf.org/lib/dt/sprint/ietf_utf8.sql.gz > Note that this link is provided as reference only. To update the database in your dev environment to the latest version, you should instead run the `docker/cleandb` script! -Additional data files used by the datatracker (e.g. instance drafts, charters, rfcs, agendas, minutes, etc.) are available at -https://www.ietf.org/standards/ids/internet-draft-mirror-sites/ - -> A script is available at `docker/scripts/app-rsync-extras.sh` to automatically fetch these resources via rsync. From ccae60153c553f2907ff670cded75c4cadf3bcee Mon Sep 17 00:00:00 2001 From: Jim Fenton Date: Thu, 10 Mar 2022 16:01:15 -0800 Subject: [PATCH 05/41] docs: Suggest cloning a fork rather than ietf-tools repo (#3614) --- README.md | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index b2b9cb6e6..09abf4d32 100644 --- a/README.md +++ b/README.md @@ -35,20 +35,25 @@ You can submit bug reports, enhancement and new feature requests in the [discuss #### Git Cloning Tips +As outlined in the [Contributing](https://github.com/ietf-tools/.github/blob/main/CONTRIBUTING.md) guide, you will first want to create a fork of the datatracker project in your personal GitHub account before cloning it. + Because of the extensive history of this project, cloning the datatracker project locally can take a long time / disk space. You can speed up the cloning process by limiting the history depth, for example: - To fetch only up to the 10 latest commits: ```sh - git clone --depth=10 https://github.com/ietf-tools/datatracker.git + git clone --depth=10 https://github.com/jdoe/datatracker.git ``` - To fetch only up to a specific date: ```sh - git clone --shallow-since=DATE https://github.com/ietf-tools/datatracker.git + git clone --shallow-since=DATE https://github.com/jdoe/datatracker.git ``` +But substitute your GitHub username in place of *jdoe*. + #### Overview of the datatracker models -A beginning of a [walkthrough of the datatracker models](https://notes.ietf.org/iab-aid-datatracker-database-overview) was prepared for the AIB AID workshop. +A beginning of a [walkthrough of the datatracker models](https://notes.ietf.org/iab-aid-datatracker-database-overview) was prepared for the IAB AID workshop. + #### Docker Dev Environment In order to simplify and reduce the time required for setup, a preconfigured docker environment is available. From 387b4f42e418dd5ee648b56118ea224ff6bf4eb6 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Thu, 10 Mar 2022 19:45:17 -0500 Subject: [PATCH 06/41] docs: update README --- README.md | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 09abf4d32..9cfc03adf 100644 --- a/README.md +++ b/README.md @@ -37,19 +37,17 @@ You can submit bug reports, enhancement and new feature requests in the [discuss As outlined in the [Contributing](https://github.com/ietf-tools/.github/blob/main/CONTRIBUTING.md) guide, you will first want to create a fork of the datatracker project in your personal GitHub account before cloning it. -Because of the extensive history of this project, cloning the datatracker project locally can take a long time / disk space. You can speed up the cloning process by limiting the history depth, for example: +Because of the extensive history of this project, cloning the datatracker project locally can take a long time / disk space. You can speed up the cloning process by limiting the history depth, for example *(replace `USERNAME` with your GitHub username)*: - To fetch only up to the 10 latest commits: ```sh - git clone --depth=10 https://github.com/jdoe/datatracker.git + git clone --depth=10 https://github.com/USERNAME/datatracker.git ``` - To fetch only up to a specific date: ```sh - git clone --shallow-since=DATE https://github.com/jdoe/datatracker.git + git clone --shallow-since=DATE https://github.com/USERNAME/datatracker.git ``` -But substitute your GitHub username in place of *jdoe*. - #### Overview of the datatracker models A beginning of a [walkthrough of the datatracker models](https://notes.ietf.org/iab-aid-datatracker-database-overview) was prepared for the IAB AID workshop. From 1efb3987999dba4b9e4ae111d6fe0aee3694c764 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 11 Mar 2022 12:24:13 -0500 Subject: [PATCH 07/41] chore: add issue templates --- .github/ISSUE_TEMPLATE/bug_report.md | 38 +++++++++++++++++++++++ .github/ISSUE_TEMPLATE/feature-request.md | 20 ++++++++++++ .github/ISSUE_TEMPLATE/feature_request.md | 20 ++++++++++++ .github/ISSUE_TEMPLATE/report-a-bug.md | 38 +++++++++++++++++++++++ 4 files changed, 116 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/feature-request.md create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/ISSUE_TEMPLATE/report-a-bug.md diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000..f3d5c415e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,38 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: bug +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Desktop (please complete the following information):** + - OS: [e.g. iOS] + - Browser [e.g. chrome, safari] + - Version [e.g. 22] + +**Smartphone (please complete the following information):** + - Device: [e.g. iPhone6] + - OS: [e.g. iOS8.1] + - Browser [e.g. stock browser, safari] + - Version [e.g. 22] + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature-request.md b/.github/ISSUE_TEMPLATE/feature-request.md new file mode 100644 index 000000000..1e25a1ef0 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature-request.md @@ -0,0 +1,20 @@ +--- +name: Feature Request +about: Submit ideas for new features or improvements. +title: '' +labels: enhancement +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000..11fc491ef --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: enhancement +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.github/ISSUE_TEMPLATE/report-a-bug.md b/.github/ISSUE_TEMPLATE/report-a-bug.md new file mode 100644 index 000000000..2d49070a5 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/report-a-bug.md @@ -0,0 +1,38 @@ +--- +name: Report a Bug +about: Create a report to help us improve +title: '' +labels: bug +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Desktop (please complete the following information):** + - OS: [e.g. iOS] + - Browser [e.g. chrome, safari] + - Version [e.g. 22] + +**Smartphone (please complete the following information):** + - Device: [e.g. iPhone6] + - OS: [e.g. iOS8.1] + - Browser [e.g. stock browser, safari] + - Version [e.g. 22] + +**Additional context** +Add any other context about the problem here. From 9c605a6d8d6cca4bcc2a231f64ddf1e778a7e24b Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 11 Mar 2022 12:29:45 -0500 Subject: [PATCH 08/41] chore: update issue templates config --- .github/ISSUE_TEMPLATE/bug_report.md | 38 ----------------------- .github/ISSUE_TEMPLATE/config.yml | 8 +++++ .github/ISSUE_TEMPLATE/feature_request.md | 20 ------------ 3 files changed, 8 insertions(+), 58 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/config.yml delete mode 100644 .github/ISSUE_TEMPLATE/feature_request.md diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index f3d5c415e..000000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,38 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -title: '' -labels: bug -assignees: '' - ---- - -**Describe the bug** -A clear and concise description of what the bug is. - -**To Reproduce** -Steps to reproduce the behavior: -1. Go to '...' -2. Click on '....' -3. Scroll down to '....' -4. See error - -**Expected behavior** -A clear and concise description of what you expected to happen. - -**Screenshots** -If applicable, add screenshots to help explain your problem. - -**Desktop (please complete the following information):** - - OS: [e.g. iOS] - - Browser [e.g. chrome, safari] - - Version [e.g. 22] - -**Smartphone (please complete the following information):** - - Device: [e.g. iPhone6] - - OS: [e.g. iOS8.1] - - Browser [e.g. stock browser, safari] - - Version [e.g. 22] - -**Additional context** -Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 000000000..ce71e70c2 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,8 @@ +blank_issues_enabled: false +contact_links: + - name: Help / Questions + url: https://github.com/ietf-tools/datatracker/discussions/categories/help-questions + about: Need help? Have a question on setting up the project or its usage? + - name: Report a Security Issue + url: https://www.ietf.org/about/administration/policies-procedures/vulnerability-disclosure/ + about: Privately report security issues so they can be addressed quickly. \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 11fc491ef..000000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this project -title: '' -labels: enhancement -assignees: '' - ---- - -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - -**Describe the solution you'd like** -A clear and concise description of what you want to happen. - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - -**Additional context** -Add any other context or screenshots about the feature request here. From b636a1e4af346116bfb7bf846cb6dc21074288de Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 11 Mar 2022 12:30:35 -0500 Subject: [PATCH 09/41] chore: remove duplicate security issue link --- .github/ISSUE_TEMPLATE/config.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index ce71e70c2..869a4838c 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -3,6 +3,3 @@ contact_links: - name: Help / Questions url: https://github.com/ietf-tools/datatracker/discussions/categories/help-questions about: Need help? Have a question on setting up the project or its usage? - - name: Report a Security Issue - url: https://www.ietf.org/about/administration/policies-procedures/vulnerability-disclosure/ - about: Privately report security issues so they can be addressed quickly. \ No newline at end of file From e001bba28f58384fac2bb77d3a3410d1b6eb84f4 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 11 Mar 2022 12:41:53 -0500 Subject: [PATCH 10/41] docs: point feature requests to discussions --- .github/ISSUE_TEMPLATE/config.yml | 3 +++ .github/ISSUE_TEMPLATE/feature-request.md | 20 -------------------- 2 files changed, 3 insertions(+), 20 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/feature-request.md diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 869a4838c..c0f7ffdca 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -3,3 +3,6 @@ contact_links: - name: Help / Questions url: https://github.com/ietf-tools/datatracker/discussions/categories/help-questions about: Need help? Have a question on setting up the project or its usage? + - name: New Feature / Enhancement Request + url: https://github.com/ietf-tools/datatracker/discussions/categories/ideas + about: Submit ideas for new features or improvements. diff --git a/.github/ISSUE_TEMPLATE/feature-request.md b/.github/ISSUE_TEMPLATE/feature-request.md deleted file mode 100644 index 1e25a1ef0..000000000 --- a/.github/ISSUE_TEMPLATE/feature-request.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -name: Feature Request -about: Submit ideas for new features or improvements. -title: '' -labels: enhancement -assignees: '' - ---- - -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - -**Describe the solution you'd like** -A clear and concise description of what you want to happen. - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - -**Additional context** -Add any other context or screenshots about the feature request here. From 91a341dac6d2af510f72dbf74f14a43c1bd60197 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 11 Mar 2022 13:01:06 -0500 Subject: [PATCH 11/41] chore: add bug report form --- .github/ISSUE_TEMPLATE/report-a-bug.yml | 56 +++++++++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/report-a-bug.yml diff --git a/.github/ISSUE_TEMPLATE/report-a-bug.yml b/.github/ISSUE_TEMPLATE/report-a-bug.yml new file mode 100644 index 000000000..32af9f484 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/report-a-bug.yml @@ -0,0 +1,56 @@ +name: Report a Bug +description: Something isn't right? File a bug report +title: "" +labels: ["bug"] +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this bug report! + - type: textarea + id: what-happened + attributes: + label: What happened? + description: Also tell us, what did you expect to happen? + placeholder: Tell us what you see! + validations: + required: true + - type: input + id: version + attributes: + label: Version + description: What version of Datatracker are you running? + placeholder: e.g. 7.45.0 + - type: dropdown + id: os + attributes: + label: What operating system are you using? + options: + - Linux + - MacOS (arm) + - MacOS (x64) + - Windows + - Other + - type: dropdown + id: browsers + attributes: + label: What browsers are you seeing the problem on? + multiple: true + options: + - Chrome / Edge + - Firefox + - Safari + - type: textarea + id: logs + attributes: + label: Relevant log output + description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks. + render: shell + - type: checkboxes + id: terms + attributes: + label: Code of Conduct + description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/ietf-tools/.github/blob/main/CODE_OF_CONDUCT.md) + options: + - label: I agree to follow the IETF's Code of Conduct + required: true From ec99aa846d8f37636b23a493250e076059fb6f68 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 11 Mar 2022 13:01:29 -0500 Subject: [PATCH 12/41] chore: remove bug report issue template --- .github/ISSUE_TEMPLATE/report-a-bug.md | 38 -------------------------- 1 file changed, 38 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/report-a-bug.md diff --git a/.github/ISSUE_TEMPLATE/report-a-bug.md b/.github/ISSUE_TEMPLATE/report-a-bug.md deleted file mode 100644 index 2d49070a5..000000000 --- a/.github/ISSUE_TEMPLATE/report-a-bug.md +++ /dev/null @@ -1,38 +0,0 @@ ---- -name: Report a Bug -about: Create a report to help us improve -title: '' -labels: bug -assignees: '' - ---- - -**Describe the bug** -A clear and concise description of what the bug is. - -**To Reproduce** -Steps to reproduce the behavior: -1. Go to '...' -2. Click on '....' -3. Scroll down to '....' -4. See error - -**Expected behavior** -A clear and concise description of what you expected to happen. - -**Screenshots** -If applicable, add screenshots to help explain your problem. - -**Desktop (please complete the following information):** - - OS: [e.g. iOS] - - Browser [e.g. chrome, safari] - - Version [e.g. 22] - -**Smartphone (please complete the following information):** - - Device: [e.g. iPhone6] - - OS: [e.g. iOS8.1] - - Browser [e.g. stock browser, safari] - - Version [e.g. 22] - -**Additional context** -Add any other context about the problem here. From b6d740a124232d8630ac89c6921640223765c26b Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 11 Mar 2022 13:02:14 -0500 Subject: [PATCH 13/41] chore: fix bug report form --- .github/ISSUE_TEMPLATE/report-a-bug.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/report-a-bug.yml b/.github/ISSUE_TEMPLATE/report-a-bug.yml index 32af9f484..b37d6e654 100644 --- a/.github/ISSUE_TEMPLATE/report-a-bug.yml +++ b/.github/ISSUE_TEMPLATE/report-a-bug.yml @@ -1,6 +1,5 @@ name: Report a Bug description: Something isn't right? File a bug report -title: "" labels: ["bug"] body: - type: markdown From ee686837ee1eef8da1c717624b50184a7cb5342f Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Fri, 11 Mar 2022 12:59:21 -0600 Subject: [PATCH 14/41] fix: point the api index page into Github instead of trac/svn --- ietf/templates/api/index.html | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/ietf/templates/api/index.html b/ietf/templates/api/index.html index 41a5a0bd6..f30c153c1 100644 --- a/ietf/templates/api/index.html +++ b/ietf/templates/api/index.html @@ -19,21 +19,11 @@ to generate an API which mirrors the Django ORM (Object Relational Mapping) for the database. Each Django model class maps down to the SQL database tables and up to the API. The Django models classes are defined in the - models.py files of the datatracker: -

- -

- https://svn.ietf.org/svn/tools/ietfdb/trunk/ietf/doc/models.py -
- https://svn.ietf.org/svn/tools/ietfdb/trunk/ietf/group/models.py -
- http://svn.ietf.org/svn/tools/ietfdb/trunk/ietf/iesg/models.py -
- … - + models.py files of the datatracker. Browse the code tree + for the models file in each app (such as ietf/doc/models.py, + ietf/group/models.py, …).

- The API top endpoint is at https://datatracker.ietf.org/api/v1/. The top endpoint lists inferior endpoints, and thus permits some autodiscovery, but there's really no substitute for looking at the actual ORM model classes. @@ -45,7 +35,7 @@ https://datatracker.ietf.org/api/v1/group/group/
- https://trac.ietf.org/trac/ietfdb/browser/trunk/ietf/group/models.py + ietf/group/models.py

From 007e693d6474be3568ff616d3df3bb728bf6a92f Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 11 Mar 2022 14:03:17 -0500 Subject: [PATCH 15/41] chore: remove os + version + log fields from bug report form --- .github/ISSUE_TEMPLATE/report-a-bug.yml | 24 +----------------------- 1 file changed, 1 insertion(+), 23 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/report-a-bug.yml b/.github/ISSUE_TEMPLATE/report-a-bug.yml index b37d6e654..cad0d0307 100644 --- a/.github/ISSUE_TEMPLATE/report-a-bug.yml +++ b/.github/ISSUE_TEMPLATE/report-a-bug.yml @@ -14,37 +14,15 @@ body: placeholder: Tell us what you see! validations: required: true - - type: input - id: version - attributes: - label: Version - description: What version of Datatracker are you running? - placeholder: e.g. 7.45.0 - - type: dropdown - id: os - attributes: - label: What operating system are you using? - options: - - Linux - - MacOS (arm) - - MacOS (x64) - - Windows - - Other - type: dropdown id: browsers attributes: - label: What browsers are you seeing the problem on? + label: What browser(s) are you seeing the problem on? multiple: true options: - Chrome / Edge - Firefox - Safari - - type: textarea - id: logs - attributes: - label: Relevant log output - description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks. - render: shell - type: checkboxes id: terms attributes: From 7b3ec180abd8901aa40d4acc424547bf9353e102 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Fri, 11 Mar 2022 13:44:35 -0600 Subject: [PATCH 16/41] fix: point to GitHub for bug reports (#3625) * fix: point to GitHub for bug reports * fix: point report bug link to template chooser * fix: point report bug link to template chooser (2) Co-authored-by: Nicolas Giard --- ietf/templates/500.html | 8 ++++---- ietf/templates/base.html | 8 ++++---- ietf/templates/base/menu.html | 6 +++--- ietf/templates/release/about.html | 9 ++++----- 4 files changed, 15 insertions(+), 16 deletions(-) diff --git a/ietf/templates/500.html b/ietf/templates/500.html index df81215d9..80c18a735 100644 --- a/ietf/templates/500.html +++ b/ietf/templates/500.html @@ -1,4 +1,4 @@ -{# Copyright The IETF Trust 2007, All Rights Reserved #} +{# Copyright The IETF Trust 2007-2022, All Rights Reserved #} {% extends "base.html" %} {% load static %} {% block title %}500 Internal Server Error{% endblock %} @@ -17,9 +17,9 @@

A failure report with details about what happened has been sent to the - server administrators. It would be helpful if you would file a bug - report with additional information at the - IETF database issue tracker, too. + server administrators. It would be helpful if you would create an issue + providing additional information at + GitHub, too.

{% endblock %} diff --git a/ietf/templates/base.html b/ietf/templates/base.html index 8890e6619..446c87e2d 100644 --- a/ietf/templates/base.html +++ b/ietf/templates/base.html @@ -1,5 +1,5 @@ {% load ietf_filters static %} -{# Copyright The IETF Trust 2015-2021, All Rights Reserved #} +{# Copyright The IETF Trust 2015-2022, All Rights Reserved #} {% load origin %}{% origin %} {% load bootstrap3 %} @@ -133,12 +133,12 @@

{% if version_num %} About | - IETF Datatracker | + IETF Datatracker | Version {{ version_num }} | {{revision_date}} | {% endif %} Report a bug: - Tracker: + GitHub: {% if bugreport_email %} Email: {% endif %} @@ -170,4 +170,4 @@ {% endif %} - \ No newline at end of file + diff --git a/ietf/templates/base/menu.html b/ietf/templates/base/menu.html index 7f8886efa..b40e1daa1 100644 --- a/ietf/templates/base/menu.html +++ b/ietf/templates/base/menu.html @@ -1,4 +1,4 @@ -{# Copyright The IETF Trust 2015-2019, All Rights Reserved #}{% load origin %}{% origin %} +{# Copyright The IETF Trust 2015-2022, All Rights Reserved #}{% load origin %}{% origin %} {% load ietf_filters managed_groups wg_menu active_groups_menu group_filters cache %} {% if flavor != "top" %} @@ -157,10 +157,10 @@

  • API Help
  • Release notes
  • {% if flavor == "top" %}{% endif %} -
  • Report a bug
  • +
  • Report a bug
  • {% if flavor == "top" %}{% endif %} {% if flavor == "top" %} {% include "base/menu_user.html" %} -{% endif %} \ No newline at end of file +{% endif %} diff --git a/ietf/templates/release/about.html b/ietf/templates/release/about.html index ca306c1aa..481e375b3 100644 --- a/ietf/templates/release/about.html +++ b/ietf/templates/release/about.html @@ -1,5 +1,5 @@ {% extends "base.html" %} -{# Copyright The IETF Trust 2015, All Rights Reserved #} +{# Copyright The IETF Trust 2015-2022, All Rights Reserved #} {% load origin %} {% block title %}About the Datatracker{% endblock %} @@ -23,10 +23,9 @@

    All the Datatracker code is publicly avaliable from the - IETF tools SVN repository. - Bug tickets and wiki notes are available from the - Issue Tracker, and - there are also release notes available since version 2.00. + IETF tools GitHub repository. + Bug tickets should be reported as issues at GitHub. + There are also release notes available since version 2.00.

    From 9ffb74c4c38ff9ae9508105496fb71ab5cecd81a Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Fri, 11 Mar 2022 15:02:50 -0600 Subject: [PATCH 17/41] fix: remove stale explanation of status reports (#3628) --- ietf/templates/group/group_about_status_edit.html | 5 ----- 1 file changed, 5 deletions(-) diff --git a/ietf/templates/group/group_about_status_edit.html b/ietf/templates/group/group_about_status_edit.html index 749723d2e..ff20d107d 100644 --- a/ietf/templates/group/group_about_status_edit.html +++ b/ietf/templates/group/group_about_status_edit.html @@ -24,9 +24,4 @@ {% endbuttons %} -

    About Status Updates

    -

    Capturing group status updates in the datatracker allows including them in meeting proceedings. This capability was -added to address the IESG request at ticket 1773. -Not all groups are expected to provide status updates. Those that do have historically sent messages by email or have placed them on a wiki. For example, see the Kitten report sent to SAAG for IETF94 or the Routing area high level summaries for IETF94.

    - {% endblock %} From 85d4ce748bd324d46901b3ec69a3ed8a1b3c554f Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Fri, 11 Mar 2022 15:22:44 -0600 Subject: [PATCH 18/41] fix: adjust about page to reflect move to GitHub. Normalize information level on page. (#3629) --- ietf/release/urls.py | 2 +- ietf/templates/release/about.html | 67 ++++++++----------------------- 2 files changed, 18 insertions(+), 51 deletions(-) diff --git a/ietf/release/urls.py b/ietf/release/urls.py index 0053d9f2c..994f41190 100644 --- a/ietf/release/urls.py +++ b/ietf/release/urls.py @@ -10,7 +10,7 @@ from ietf.utils.urls import url urlpatterns = [ url(r'^$', views.release), url(r'^(?P[0-9.]+.*)/$', views.release), - url(r'^about/?$', TemplateView.as_view(template_name='release/about.html')), + url(r'^about/?$', TemplateView.as_view(template_name='release/about.html'), name='releaseabout'), url(r'^stats/?$', views.stats), ] diff --git a/ietf/templates/release/about.html b/ietf/templates/release/about.html index 481e375b3..6f1abb209 100644 --- a/ietf/templates/release/about.html +++ b/ietf/templates/release/about.html @@ -22,11 +22,10 @@

    - All the Datatracker code is publicly avaliable from the - IETF tools GitHub repository. - Bug tickets should be reported as issues at GitHub. - There are also release notes available since version 2.00. - + The Datatracker is an open-source project, using GitHub. +

    +

    + There are release notes available since version 2.00.

    @@ -36,25 +35,14 @@

    -

    Version 6.x Work

    -

    - - Between the release of 6.0.0 in April 2015 and the latest release - there has been numerous releases (75, as of 11 Feb 2017) which has extended the - functionality of the datatracker substantially. The release list - gives more information. - -

    - -

    Version 6.0.0: Facelift using Bootstrap

    +

    Version 7.0.0: Django 2

    + +

    Version 6.0.0: Facelift using Bootstrap 3

    During more than a year, from July 2013 to late 2014, Lars Eggert worked intensively on a major facelift to the datatracker, porting the GUI to Bootstrap. The work - took - - 287 separate commits - , and comprised changes to 1016 different files. + took 287 separate commits, and comprised changes to 1016 different files.

    @@ -74,13 +62,8 @@

    - Additional - - page conversion work - has been done by Ole Laursen, with - - final style tweaks, bug-fixes and adaptations - + Additional page conversion work was performed by Ole Laursen, with + final style tweaks, bug-fixes and adaptations by Henrik Levkowetz, giving it a distinct colour palette (with the addition of complementing green and red colours for success and error indications), @@ -94,37 +77,21 @@ optical size font to go with PT Sans, it works well for headers when paired with PT Serif.)

    -

    Version 5.x Work

    -

    - - Between the release of 5.0.0 in January 2014 and the last release in the 5.x series in - April 2015, there were 42 releases containing bug fixes and features. Worth mentioning - were 4 code sprint releases, added support for the secretariat's agenda scheduling work, - the addition of pages for Research Groups and Teams, a JSON interface to the - database for tool builders, improved IPR support, a move to Django 1.7, - and many - improvements in testing support. - -

    - +

    Version 5.0.0: Shim Removal

    -
    - To be written. -
    +

    At this point, the views and templates were completely adapted to the new models introduced at 4.0.0

    Version 4.00: New Database Schema

    -
    - To be written. -
    +

    This release was a complete redesign of the underlying Django models. It introduced a set of facades, referred to as a "Shim Layer", + which allowed the refactor to focus only on the models, leaving the views and templates for later adaptation. +

    Version 3.00: Django Port of the IESG Datatracker Pages

    -
    - To be written. -
    +

    This release added the IESG only portions of the previous IESG tracker to the public Datatracker. +

    Version 2.00: Django Port of the Public Datatracker Pages

    From ebc2e1a26240d01cbc16e8cc01bddaf3f6d053bf Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 11 Mar 2022 17:36:44 -0400 Subject: [PATCH 19/41] fix: remove anchor tag with empty href (#3630) --- ietf/templates/release/about.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ietf/templates/release/about.html b/ietf/templates/release/about.html index 6f1abb209..41082408a 100644 --- a/ietf/templates/release/about.html +++ b/ietf/templates/release/about.html @@ -56,7 +56,7 @@ The work relies heavily on the capabilities of Bootstrap, and continues to use the Django framework which the datatracker has been build on since version - 2.00. It also uses icons from FontAwesome, and functions from + 2.00. It also uses icons from FontAwesome, and functions from django-bootstrap3.

    From e12ea0c64c6398f8fd91639e3db0a5866a1ee000 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Fri, 11 Mar 2022 17:52:38 -0400 Subject: [PATCH 20/41] fix: use same github issue URL on 500 error page as elsewhere (#3631) --- ietf/templates/500.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ietf/templates/500.html b/ietf/templates/500.html index 80c18a735..c8bbf2190 100644 --- a/ietf/templates/500.html +++ b/ietf/templates/500.html @@ -19,7 +19,7 @@ A failure report with details about what happened has been sent to the server administrators. It would be helpful if you would create an issue providing additional information at - GitHub, too. + GitHub, too.

    {% endblock %} From 369a5b9f1a16c5c1451d77b7aa42658d50b0ba56 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 11 Mar 2022 17:23:22 -0500 Subject: [PATCH 21/41] chore: update report-a-bug.yml form --- .github/ISSUE_TEMPLATE/report-a-bug.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/ISSUE_TEMPLATE/report-a-bug.yml b/.github/ISSUE_TEMPLATE/report-a-bug.yml index cad0d0307..d911d8e2e 100644 --- a/.github/ISSUE_TEMPLATE/report-a-bug.yml +++ b/.github/ISSUE_TEMPLATE/report-a-bug.yml @@ -23,6 +23,7 @@ body: - Chrome / Edge - Firefox - Safari + - Not Applicable - type: checkboxes id: terms attributes: From 86a3895ab94b921383985f285b4369bd8898da04 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Mon, 14 Mar 2022 18:07:45 -0400 Subject: [PATCH 22/41] ci: fix __init__.py version quotes --- ietf/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ietf/__init__.py b/ietf/__init__.py index 4704fff11..5e55e8c20 100644 --- a/ietf/__init__.py +++ b/ietf/__init__.py @@ -4,8 +4,9 @@ from . import checks # pyflakes:ignore +# Version must stay in single quotes for automatic CI replace # Don't add patch number here: -__version__ = "7.46.1.dev0" +__version__ = '7.46.1.dev0' # set this to ".p1", ".p2", etc. after patching __patch__ = "" From aa718e7e989078d2b231cfd33584eded2ca03f1f Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Mon, 14 Mar 2022 17:17:31 -0500 Subject: [PATCH 23/41] chore: bring docker-compose to root level (#3642) (#3643) Authored-by: Nicolas Giard --- .devcontainer/devcontainer.json | 2 +- .devcontainer/docker-compose.extend.yml | 2 +- docker/docker-compose.yml => docker-compose.yml | 2 +- docker/cleanall | 2 ++ docker/cleandb | 2 ++ docker/docker-compose.extend.yml | 2 +- docker/run | 4 +++- 7 files changed, 11 insertions(+), 5 deletions(-) rename docker/docker-compose.yml => docker-compose.yml (98%) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 846389b7b..c9df9e3e6 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -2,7 +2,7 @@ // https://github.com/microsoft/vscode-dev-containers/tree/v0.202.5/containers/python-3 { "name": "IETF Datatracker", - "dockerComposeFile": ["../docker/docker-compose.yml", "docker-compose.extend.yml"], + "dockerComposeFile": ["../docker-compose.yml", "docker-compose.extend.yml"], "service": "app", "workspaceFolder": "/root/src", "shutdownAction": "stopCompose", diff --git a/.devcontainer/docker-compose.extend.yml b/.devcontainer/docker-compose.extend.yml index cacd44df3..9510a4e93 100644 --- a/.devcontainer/docker-compose.extend.yml +++ b/.devcontainer/docker-compose.extend.yml @@ -6,7 +6,7 @@ services: EDITOR_VSCODE: 1 DJANGO_SETTINGS_MODULE: settings_local_sqlitetest volumes: - - ..:/root/src + - .:/root/src - /root/src/node_modules # Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function. network_mode: service:db \ No newline at end of file diff --git a/docker/docker-compose.yml b/docker-compose.yml similarity index 98% rename from docker/docker-compose.yml rename to docker-compose.yml index 2d0fc01c9..f9eca07aa 100644 --- a/docker/docker-compose.yml +++ b/docker-compose.yml @@ -3,7 +3,7 @@ version: '3.8' services: app: build: - context: .. + context: . dockerfile: docker/app.Dockerfile args: # Update 'VARIANT' to pick a version of Python: 3, 3.10, 3.9, 3.8, 3.7, 3.6 diff --git a/docker/cleanall b/docker/cleanall index dd6f67669..feb786a23 100755 --- a/docker/cleanall +++ b/docker/cleanall @@ -1,7 +1,9 @@ #!/bin/bash +cd .. echo "Shutting down any instance still running and purge images..." docker-compose down -v --rmi all echo "Purging dangling images..." docker image prune +cd docker echo "Done!" diff --git a/docker/cleandb b/docker/cleandb index ae2ece3bc..d94958261 100755 --- a/docker/cleandb +++ b/docker/cleandb @@ -1,8 +1,10 @@ #!/bin/bash echo "Shutting down any instance still running..." +cd .. docker-compose down -v echo "Rebuilding the DB image..." docker-compose pull db docker-compose build --no-cache db +cd docker echo "Done!" \ No newline at end of file diff --git a/docker/docker-compose.extend.yml b/docker/docker-compose.extend.yml index 979da02ef..91fb9da90 100644 --- a/docker/docker-compose.extend.yml +++ b/docker/docker-compose.extend.yml @@ -5,7 +5,7 @@ services: ports: - '8000:8000' volumes: - - ..:/root/src + - .:/root/src - /root/src/node_modules db: ports: diff --git a/docker/run b/docker/run index 84264b397..c17816ec2 100755 --- a/docker/run +++ b/docker/run @@ -1,5 +1,7 @@ #!/bin/bash -docker-compose -f docker-compose.yml -f docker-compose.extend.yml up -d +cd .. +docker-compose -f docker-compose.yml -f docker/docker-compose.extend.yml up -d docker-compose exec app /bin/sh /docker-init.sh docker-compose down +cd docker From 868afd287c6f16b9fb41eb8d95f1637ca105c6cc Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Wed, 16 Mar 2022 09:57:51 -0400 Subject: [PATCH 24/41] chore: improve dev docker for offline + other fixes (#3652) --- .devcontainer/devcontainer.json | 7 +++++-- .devcontainer/docker-compose.extend.yml | 4 ++-- .npmrc | 4 +++- .vscode/settings.json | 2 +- docker/app.Dockerfile | 17 +++++++++++++++-- docker/docker-compose.extend.yml | 4 ++-- docker/run | 2 +- docker/scripts/app-cypress.sh | 2 +- docker/scripts/app-init.sh | 12 ++++++------ docker/scripts/app-win32-timezone-fix.sh | 2 +- 10 files changed, 37 insertions(+), 19 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index c9df9e3e6..5a91b21b6 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -4,7 +4,7 @@ "name": "IETF Datatracker", "dockerComposeFile": ["../docker-compose.yml", "docker-compose.extend.yml"], "service": "app", - "workspaceFolder": "/root/src", + "workspaceFolder": "/workspace", "shutdownAction": "stopCompose", "postCreateCommand": "/docker-init.sh", "containerEnv": { @@ -69,7 +69,10 @@ "spmeesseman.vscode-taskexplorer", "mtxr.sqltools", "mtxr.sqltools-driver-mysql", - "mrmlnc.vscode-duplicate" + "mrmlnc.vscode-duplicate", + "eamodio.gitlens", + "oderwat.indent-rainbow", + "johnsoncodehk.volar" ], // Use 'forwardPorts' to make a list of ports inside the container available locally. diff --git a/.devcontainer/docker-compose.extend.yml b/.devcontainer/docker-compose.extend.yml index 9510a4e93..794ac0c5b 100644 --- a/.devcontainer/docker-compose.extend.yml +++ b/.devcontainer/docker-compose.extend.yml @@ -6,7 +6,7 @@ services: EDITOR_VSCODE: 1 DJANGO_SETTINGS_MODULE: settings_local_sqlitetest volumes: - - .:/root/src - - /root/src/node_modules + - .:/workspace + - /workspace/node_modules # Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function. network_mode: service:db \ No newline at end of file diff --git a/.npmrc b/.npmrc index 92eb5e67e..20daf4621 100644 --- a/.npmrc +++ b/.npmrc @@ -1,2 +1,4 @@ save-exact = true -save-prefix = "" \ No newline at end of file +save-prefix = "" +fund = false +loglevel = "warn" \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json index 7eefd1893..7dcffaa3f 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -20,7 +20,7 @@ "**/third_party/**", "**/vendor/**", "**/work/**", - "/root/src/bootstrap/nuget/MyGet.ps1" + "/workspace/bootstrap/nuget/MyGet.ps1" ], "taskExplorer.enableAnt": false, "taskExplorer.enableAppPublisher": false, diff --git a/docker/app.Dockerfile b/docker/app.Dockerfile index 525f8f252..d95a1c4bc 100644 --- a/docker/app.Dockerfile +++ b/docker/app.Dockerfile @@ -88,6 +88,8 @@ ENV DBUS_SESSION_BUS_ADDRESS=/dev/null ENV npm_config_loglevel warn # allow installing when the main user is root ENV npm_config_unsafe_perm true +# disable NPM funding messages +ENV npm_config_fund false # Set locale to en_US.UTF-8 RUN echo "LC_ALL=en_US.UTF-8" >> /etc/environment && \ @@ -99,7 +101,7 @@ RUN echo "LC_ALL=en_US.UTF-8" >> /etc/environment && \ ENV LC_ALL en_US.UTF-8 # Install bower -RUN npm install -g bower +RUN npm install -g bower grunt-cli # Install idnits ADD https://raw.githubusercontent.com/ietf-tools/idnits-mirror/main/idnits /usr/local/bin/ @@ -116,10 +118,21 @@ RUN sed -i '/imklog/s/^/#/' /etc/rsyslog.conf # Colorize the bash shell RUN sed -i 's/#force_color_prompt=/force_color_prompt=/' /root/.bashrc +ADD https://raw.githubusercontent.com/eficode/wait-for/v2.1.3/wait-for /usr/local/bin/ +RUN chmod +rx /usr/local/bin/wait-for + # Copy the startup file COPY docker/scripts/app-init.sh /docker-init.sh RUN sed -i 's/\r$//' /docker-init.sh && \ chmod +x /docker-init.sh -WORKDIR /root/src +# Create workspace +RUN mkdir -p /workspace +WORKDIR /workspace + +# Install NPM modules +COPY package.json package.json +RUN npm install --no-audit +RUN rm -f package.json package-lock.json + # ENTRYPOINT [ "/docker-init.sh" ] diff --git a/docker/docker-compose.extend.yml b/docker/docker-compose.extend.yml index 91fb9da90..542ff2faa 100644 --- a/docker/docker-compose.extend.yml +++ b/docker/docker-compose.extend.yml @@ -5,8 +5,8 @@ services: ports: - '8000:8000' volumes: - - .:/root/src - - /root/src/node_modules + - .:/workspace + - /workspace/node_modules db: ports: - '3306' \ No newline at end of file diff --git a/docker/run b/docker/run index c17816ec2..319d07166 100755 --- a/docker/run +++ b/docker/run @@ -3,5 +3,5 @@ cd .. docker-compose -f docker-compose.yml -f docker/docker-compose.extend.yml up -d docker-compose exec app /bin/sh /docker-init.sh -docker-compose down +docker-compose stop cd docker diff --git a/docker/scripts/app-cypress.sh b/docker/scripts/app-cypress.sh index 79e451914..91b1e0d9c 100755 --- a/docker/scripts/app-cypress.sh +++ b/docker/scripts/app-cypress.sh @@ -1,6 +1,6 @@ #!/bin/bash -WORKSPACEDIR="/root/src" +WORKSPACEDIR="/workspace" pushd . cd $WORKSPACEDIR diff --git a/docker/scripts/app-init.sh b/docker/scripts/app-init.sh index 80d58b60f..708ce837b 100755 --- a/docker/scripts/app-init.sh +++ b/docker/scripts/app-init.sh @@ -1,16 +1,16 @@ #!/bin/bash -WORKSPACEDIR="/root/src" +WORKSPACEDIR="/workspace" service rsyslog start # Generate static assets -npm install +npm install --prefer-offline --no-audit echo "Building static assets... (this could take a minute or two)" cd bootstrap -npm install -g grunt-cli -npm install +npm install -g grunt-cli --prefer-offline --no-audit +npm install --prefer-offline --no-audit grunt dist cp -r dist/. ../ietf/static/ietf/bootstrap/ cd .. @@ -81,7 +81,7 @@ for sub in \ data/developers/www6/iesg/evaluation \ data/developers/media/photo \ ; do - dir="/root/src/$sub" + dir="/workspace/$sub" if [ ! -d "$dir" ]; then echo "Creating dir $dir" mkdir -p "$dir"; @@ -91,7 +91,7 @@ done # Wait for DB container if [ -n "$EDITOR_VSCODE" ]; then echo "Waiting for DB container to come online ..." - wget -qO- https://raw.githubusercontent.com/eficode/wait-for/v2.1.3/wait-for | sh -s -- localhost:3306 -- echo "DB ready" + /usr/local/bin/wait-for localhost:3306 -- echo "DB ready" fi # Initial checks diff --git a/docker/scripts/app-win32-timezone-fix.sh b/docker/scripts/app-win32-timezone-fix.sh index 031f54fa4..f2b1229dc 100755 --- a/docker/scripts/app-win32-timezone-fix.sh +++ b/docker/scripts/app-win32-timezone-fix.sh @@ -1,6 +1,6 @@ #!/bin/bash -WORKSPACEDIR="/root/src" +WORKSPACEDIR="/workspace" ICSFILES=$(/usr/bin/find $WORKSPACEDIR/vzic/zoneinfo/ -name '*.ics' -print) for ICSFILE in $ICSFILES From a60c31e45d80e8749d7e183e26ea198983e4433e Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Wed, 16 Mar 2022 11:08:47 -0500 Subject: [PATCH 25/41] chore: remove svn/trac infrastructure (#3649) * chore: Remove svn/trac related infrastructure * chore: Remove more svn/trac infrastructure * fix: remove commented out Trac requirement --- bin/commitlog | 33 - bin/mergedevbranch | 259 ------- bin/mergeready | 414 ---------- bin/mkdevbranch | 282 ------- bin/mkpatch | 132 ---- bin/mkrelease | 348 --------- bin/sprintcoders.py | 21 - .../buildbot_passwords.py.template | 21 - buildbot/masters/datatracker/custom_steps.py | 183 ----- .../masters/datatracker/master.buildbot27.cfg | 426 ----------- buildbot/masters/datatracker/master.cfg | 716 ------------------ buildbot/restart-masters | 3 - buildbot/restart-workers | 3 - buildbot/setup-buildbot-workers | 180 ----- .../workers/datatracker_lin_py36_1/info/admin | 1 - .../workers/datatracker_lin_py36_1/info/host | 1 - .../workers/datatracker_lin_py36_2/info/admin | 1 - .../workers/datatracker_lin_py36_2/info/host | 1 - .../workers/datatracker_lin_py36_3/info/admin | 1 - .../workers/datatracker_lin_py36_3/info/host | 1 - .../workers/datatracker_lin_py36_4/info/admin | 1 - .../workers/datatracker_lin_py36_4/info/host | 1 - docker/configs/settings_local.py | 3 - docker/configs/settings_local_sqlitetest.py | 3 - ietf/settings.py | 55 -- .../templates/utils/wiki/IetfSpecificFeatures | 38 - ietf/templates/utils/wiki/InterMapTxt | 72 -- ietf/templates/utils/wiki/SvnTracHooks | 73 -- .../templates/utils/wiki/ThisTracInstallation | 94 --- ietf/templates/utils/wiki/TrainingMaterials | 10 - ietf/templates/utils/wiki/WikiStart | 29 - .../management/commands/create_group_wikis.py | 413 ---------- ietf/utils/tests.py | 82 -- requirements.txt | 2 - 34 files changed, 3903 deletions(-) delete mode 100755 bin/commitlog delete mode 100755 bin/mergedevbranch delete mode 100755 bin/mergeready delete mode 100755 bin/mkdevbranch delete mode 100755 bin/mkpatch delete mode 100755 bin/mkrelease delete mode 100644 bin/sprintcoders.py delete mode 100644 buildbot/masters/datatracker/buildbot_passwords.py.template delete mode 100644 buildbot/masters/datatracker/custom_steps.py delete mode 100644 buildbot/masters/datatracker/master.buildbot27.cfg delete mode 100644 buildbot/masters/datatracker/master.cfg delete mode 100755 buildbot/restart-masters delete mode 100755 buildbot/restart-workers delete mode 100755 buildbot/setup-buildbot-workers delete mode 100644 buildbot/workers/datatracker_lin_py36_1/info/admin delete mode 100644 buildbot/workers/datatracker_lin_py36_1/info/host delete mode 100644 buildbot/workers/datatracker_lin_py36_2/info/admin delete mode 100644 buildbot/workers/datatracker_lin_py36_2/info/host delete mode 100644 buildbot/workers/datatracker_lin_py36_3/info/admin delete mode 100644 buildbot/workers/datatracker_lin_py36_3/info/host delete mode 100644 buildbot/workers/datatracker_lin_py36_4/info/admin delete mode 100644 buildbot/workers/datatracker_lin_py36_4/info/host delete mode 100644 ietf/templates/utils/wiki/IetfSpecificFeatures delete mode 100644 ietf/templates/utils/wiki/InterMapTxt delete mode 100644 ietf/templates/utils/wiki/SvnTracHooks delete mode 100644 ietf/templates/utils/wiki/ThisTracInstallation delete mode 100644 ietf/templates/utils/wiki/TrainingMaterials delete mode 100644 ietf/templates/utils/wiki/WikiStart delete mode 100644 ietf/utils/management/commands/create_group_wikis.py diff --git a/bin/commitlog b/bin/commitlog deleted file mode 100755 index 7afada720..000000000 --- a/bin/commitlog +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash -cur=$(svn info | awk '/^Revision:/ { print $2 }') -# List recent commits, extract revision numbers, stop at release, and grab the last revision -# - get svn log, looking backwards from HEAD -# - stop on 'Set version info ...' -# - filter out revision lines -# - grab the first field -# - grab the last line -# - remove the leading 'r' -rev=$(svn log $PWD -r ${2:-HEAD}:${1:-$((cur-100))} \ - | sed -r -n -e '1,/^Set version info( and settings)?( back)? to (development|release)/p' \ - | egrep '^r[0-9]+ \|' \ - | cut -d ' ' -f 1 \ - | tail -n 1 \ - | sed 's/^r//') - -# Grab commit log lines from just after the start rev, going forwards, and reformat -# - Get svn log entries, starting with the earliest -# - Filter out dividing lines and revision/committer/date lines, keeping the messages -# - Insert leading asterisks: ' * ' at the front of the first line in each text block -# - Unwrap lines that start with unindented text -# - Do line folding at column 76 -# - Indent any unindented lines 4 spaces -# - Add blank lines in front of log entries -svn log $PWD -r $((rev+2)):${2:-HEAD} \ - | sed -r 's/^(----------|r[0-9]+).*$/\n/' \ - | sed '1,/./s/^/ * /;/^$/,/./s/^/ * /;/^ \* $/d' \ - | sed -e :a -e '$!N;s/\n([A-Za-z0-9])/ \\1/;ta' -e 'P;D' \ - | fold -sw76 \ - | sed -r 's/^([^ ].*)$/ &/' \ - | sed -r 's/^ \* /\n * /' -echo "" -TZ=UTC date +" -- Robert Sparks %d %b %Y %H:%M:%S %z" diff --git a/bin/mergedevbranch b/bin/mergedevbranch deleted file mode 100755 index 517b89c1c..000000000 --- a/bin/mergedevbranch +++ /dev/null @@ -1,259 +0,0 @@ -#!/bin/bash - -version=0.20 -program=${0##*/} -progdir=${0%/*} -if [ "$progdir" = "$program" ]; then progdir="."; fi - -# ---------------------------------------------------------------------- -function usage() { - cat < - -COPYRIGHT - Copyright 2010 Henrik Levkowetz. - - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2 of the License, or (at - your option) any later version. There is NO WARRANTY; not even the - implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR - PURPOSE. See the GNU General Public License for more details. - -EOF - -} - -# ---------------------------------------------------------------------- -function die() { - echo -e "\n$program: error: $*" > /dev/stderr - exit 1 -} - -function note() { - if [ -n "$VERBOSE" ]; then echo -e "$*"; fi -} - -# ---------------------------------------------------------------------- -function version() { - echo -e "$program $version" -} - -# ---------------------------------------------------------------------- -trap 'echo "$program($LINENO): Command failed with error code $? ([$$] $0 $*)"; exit 1' ERR - - -# ---------------------------------------------------------------------- -# Option parsing - -# Options -shortopts=cnhvV -longopts=commit,no-commit,help,verbose,version - -# Default values -ARG_COMMIT=1 - -if [ "$(uname)" = "Linux" ]; then - args=$(getopt -o "$shortopts" --long "$longopts" -n '$program' -- $SV "$@") - if [ $? != 0 ] ; then die "Terminating..." >&2 ; exit 1 ; fi - eval set -- "$args" - sed="sed -r" -else - # Darwin, BSDs - args=$(getopt -o$shortopts $SV $*) - if [ $? != 0 ] ; then die "Terminating..." >&2 ; exit 1 ; fi - set -- $args - sed="sed -E" -fi - -while true ; do - case "$1" in - -c| --commit) ARG_COMMIT=1;; # Run commit in addition to merge - -n| --no-commit) ARG_COMMIT=0;; # Don't commit after merge - -h| --help) usage; exit;; # Show this help, then exit - -v| --verbose) VERBOSE=1;; # Be more talkative - -V| --version) version; exit;; # Show program version, then exit - --) shift; break;; - *) die "Internal error, inconsistent option specification: '$1'";; - esac - shift -done - -# ---------------------------------------------------------------------- -# The program itself - -# Argument validation -[[ $1 =~ @ ]] && set ${1/@/ } -[ $# -ge 2 ] || die "Expected branch and repository revision on the command line" -[ ${PWD##*/} = trunk ] || die "Expected this script to be run in trunk" - -# Global settings -cwd=${PWD##*/} -branch=$1 -rev=$2 -fix=$3 - -# remove leading 'r' from rev, if present -rev=${rev#r} - -repo=$(echo -n "$(svn info | grep "^Repository Root: " | sed 's/Repository Root: //')") - -[ -z "$by" ] && by=${RELEASER_REAL_NAME} -[ -z "$by" ] && by=$(getent passwd $(whoami) | cut -d ':' -f 5 | tr -d ',') -[ -z "$by" ] && die "Can't determine the real name of the user running this script" - -python -c 'import django' || die "Can't find django - can't run tests" - -note "Identify the branch:" -if svn info $branch > /dev/null 2>&1; then - branch="${branch#^/}" -elif svn info ${repo}/personal/$branch > /dev/null 2>&1; then - branch="personal/$branch" -elif svn info ${repo}/branch/$branch > /dev/null 2>&1; then - branch="branch/$branch" -elif svn info ${repo}/$branch > /dev/null 2>&1; then - true -else - die "Could not find a branch matching '$branch'" -fi - -note "Svn update, to make sure we don't have a mixed revision working copy" -svn update -q - -mergelog=$(mktemp) -svn propget svn:mergeinfo . > $mergelog - -if grep "@$rev $branch" $mergelog; then die "Changeset $branch@$rev is already in the merge log. Skipping it."; exit 0; fi - -note "Will attempt merge from $branch@$rev" - -# "Check there's no uncommitted changes ..." -echo "" -$do svn st | grep "^[AMGRD] " && { - echo "" - read -p "There are uncommitted changes. Really do merge? [y/N] " - [ "$REPLY" = "Y" -o "$REPLY" = "y" ] || exit -} - -note "Extract who and what:" -info=$(svn log ${repo}/ -r $rev --incremental) -set $(echo "$info" | tail -n +2 | head -n 1 | tr "|" "\t") -who=$2; echo -e "$who" -comment=$(echo "$info" | tail -n +3); echo -e "$comment\n" -comment=$(echo "$comment" | sed -r -e 's/(commit )?ready (for|to) merge\.?//i' -e '/^$/d') -files=$(svn diff ${repo}/ -c $rev --summarize | awk '{$1=""; print;}' | while read file; do echo "${file/$repo\/$branch\//}"; done) - -echo -e "Files: \n$files\n" - -read -p "Continue with diff? [Y/n] " -[ "$REPLY" = "Y" -o "$REPLY" = "y" -o "$REPLY" = "" ] || exit - -note "Diff:" -note "svn diff -c $rev $repo/$branch" -svn diff -c $rev $repo/$branch | less - -echo "" -read -p "Additional descriptive text (hit return for none): " -if [ "$REPLY" != "" ]; then - comment="$REPLY - - $comment" -fi - -echo "" -read -p "Continue with the merge? [Y/n] " -[ "$REPLY" = "Y" -o "$REPLY" = "y" -o "$REPLY" = "" ] || exit - -note "Do the merge:" -if [[ $rev =~ : ]]; then - svn merge -r $rev ${repo}/$branch . || die "Merge of $branch @$rev failed. The merge command was: - svn merge -r $rev ${repo}/$branch ." -else - svn merge -c $rev ${repo}/$branch . || die "Merge of $branch @$rev failed. The merge command was: - svn merge -c $rev ${repo}/$branch ." -fi - -note "Writing commit script" -echo -e "#!/bin/bash\n\nsvn commit -m \"Merged in [$rev] from $who:\n ${comment//\"/\'} ${fix//\"/\'}\"" > ../cicmd/commit-${rev}-merge.sh -chmod +x ../cicmd/commit-${rev}-merge.sh - -M=$(svn st | cut -c 1-7 | grep -oh 'M' | head -n 1) -C=$(svn st | cut -c 1-7 | grep -oh 'C' | head -n 1) -G=$(svn st | cut -c 1-7 | grep -oh 'G' | head -n 1) - -##cd ../ -##rsync -a $cwd/ merged@$rev/ -##cp cicmd/commit-${rev}-merge.sh merged@$rev/commit -##cd - - -# Potentially run flake8 at this point -# read -p "Run flake8? [y/N] " -# if [ "$REPLY" = "Y" -o "$REPLY" = "y"]; then -# mod=$(svn st | cut -c 9- | grep '\.py$') -# flake8 $mod | less -# fi - -read -p "Continue with tests? [Y/n] " -[ "$REPLY" = "Y" -o "$REPLY" = "y" -o "$REPLY" = "" ] || exit - -echo -e "\nRunning tests" -time { ietf/manage.py test --settings=settings_sqlitetest \ - || die "Tests failed.\nThe commit script is ../cicmd/commit-${rev}-merge.sh"; } 3>&1 1>&2 2>&3 | bin/count -echo "" - -note "Sending email to changeset author: <$who>" -SEND_ARGS="" -[ "${RELEASER_EMAIL}" ] && SEND_ARGS="-r ${RELEASER_EMAIL}" -mail "${SEND_ARGS}" -s "Merged datatracker branch personal/$branch@$rev to trunk" $who -c rjsparks@nostrum.com <<-EOF -Hi, - -This is an automatic merge info message. Your code in personal/$branch@$rev -has been merged to trunk, and will be part of the next release if nothing -goes wrong during final testing. - -Regards, - - $by - (via the mergesprintbranch script) -EOF - -echo "" -read -p "Continue with the commit? [Y/n] " -[ "$REPLY" = "Y" -o "$REPLY" = "y" -o "$REPLY" = "" ] || ARG_COMMIT=0 - -if [ "$ARG_COMMIT" != 0 ]; then - echo "Svn update:" - svn update -q - echo "Committing the merge:" - echo "" - svn commit -m "Merged in [$rev] from $who: - ${comment//\"/\'} ${fix//\"/\'}" -else - echo "This merge has not been committed yet." - echo "To commit it, run this commit command: ../cicmd/commit-$rev-merge.sh" -fi - -echo -e "\n------------------------------------------------------------------------\n\n" - - diff --git a/bin/mergeready b/bin/mergeready deleted file mode 100755 index f4047725e..000000000 --- a/bin/mergeready +++ /dev/null @@ -1,414 +0,0 @@ -#!/usr/bin/env python -# -*- python -*- -""" -NAME - %(program)s - look for SVN commits that are ready to merge - -SYNOPSIS - %(program)s [OPTIONS] ARGS - -DESCRIPTION - %(program)s looks in the SVN log for commits which are marked with the - phrase 'Commit ready for merge', and compares the resulting list with - the 'svn:mergeinfo' property on the current directory, in order to - work out which (if any) commits are ready to merge, but not yet - merged. The command requires (and checks) that it's running in a - directory named 'trunk', and requires that to be an SVN working copy. - - The files (in the top directory of the working copy) 'ready-for-merge' - and 'hold-for-merge' are also consulted for additions and exceptions to - the merge list. - - A list of commit date, committer, and branch@revision for each commit - which is marked ready for merge, but not yet merged, is then written - to standard out. - -%(options)s - -AUTHOR - Written by Henrik Levkowetz, - -COPYRIGHT - Copyright 2014 Henrik Levkowetz - - This program is free software; you can redistribute it and/or modify - it under the terms of the Simplified BSD license as published by the - Open Source Initiative at http://opensource.org/licenses/BSD-2-Clause. - -""" -from __future__ import print_function, unicode_literals - -import sys -import os - -path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -if not path in sys.path: - sys.path.insert(0, path) - -import getopt -import re -import pytz -import tzparse -import debug - -version = "0.20" -program = os.path.basename(sys.argv[0]) -progdir = os.path.dirname(sys.argv[0]) - -# ---------------------------------------------------------------------- -# Parse options - -options = "" -for line in re.findall("\n +(if|elif) +opt in \[(.+)\]:\s+#(.+)\n", open(sys.argv[0]).read()): - if not options: - options += "OPTIONS\n" - options += " %-16s %s\n" % (line[1].replace('"', ''), line[2]) -options = options.strip() - -# with ' < 1:' on the next line, this is a no-op: -if len(sys.argv) < 1: - print(__doc__ % locals()) - sys.exit(1) - -try: - opts, files = getopt.gnu_getopt(sys.argv[1:], "hvV", ["help", "version","verbose",]) -except Exception as e: - print( "%s: %s" % (program, e)) - sys.exit(1) - -# ---------------------------------------------------------------------- -# Handle options - -# set default values, if any -opt_verbose = 0 - -# handle individual options -for opt, value in opts: - if opt in ["-h", "--help"]: # Output this help, then exit - print( __doc__ % locals() ) - sys.exit(1) - elif opt in ["-V", "--version"]: # Output version information, then exit - print( program, version ) - sys.exit(0) - elif opt in ["-v", "--verbose"]: # Output version information, then exit - opt_verbose += 1 - -# ---------------------------------------------------------------------- -def say(s): - sys.stderr.write("%s\n" % (s)) - - -# ---------------------------------------------------------------------- -def note(s): - if opt_verbose: - sys.stderr.write("%s\n" % (s)) - -# ---------------------------------------------------------------------- -def die(s, error=1): - sys.stderr.write("\n%s: Error: %s\n\n" % (program, s)) - sys.exit(error) - -# ---------------------------------------------------------------------- -# The program itself - -import os -import json - -cwd = os.getcwd() - -if cwd.split(os.path.sep)[-1] != 'trunk': - die("Expected to run this operation in trunk, but the current\ndirectory is '%s'" % cwd) - -# ---------------------------------------------------------------------- -# Some utility functions - -def pipe(cmd, inp=None): - import shlex - from subprocess import Popen, PIPE - args = shlex.split(cmd) - bufsize = 4096 - stdin = PIPE if inp else None - pipe = Popen(args, stdin=stdin, stdout=PIPE, stderr=PIPE, bufsize=bufsize) - out, err = pipe.communicate(inp) - code = pipe.returncode - if code != 0: - raise OSError(err) - return out.decode('utf-8') - -def split_loginfo(line): - try: - parts = line.split() - rev = parts[0][1:] - who = parts[2] - date = parts[4] - time = parts[5] - tz = parts[6] - when = tzparse.tzparse(" ".join(parts[4:7]), "%Y-%m-%d %H:%M:%S %Z") - when = when.astimezone(pytz.utc) - except ValueError as e: - sys.stderr.write("Bad log line format: %s\n %s\n" % (line, e)) - - return rev, who, when - -# ---------------------------------------------------------------------- - -# Get repository information -svn_info = {} -for line in pipe('svn info .').splitlines(): - if line: - key, value = line.strip().split(':', 1) - svn_info[key] = value.strip() - -repo = svn_info["Repository Root"] -head = int(svn_info['Revision']) - -# Get current mergeinfo from cache and svn -cachefn = os.path.join(os.environ.get('HOME', '.'), '.mergeinfo') - -if os.path.exists(cachefn): - note("Reading mergeinfo cache file %s" % cachefn) - with open(cachefn, "r") as file: - cache = json.load(file) -else: - sys.stderr.write("No merge info cache file found -- will have to extract all information from SVN.\n"+ - "This may take some time.\n\n") - opt_verbose = True - cache = {} -mergeinfo = cache[repo] if repo in cache else {} - -merged_revs = {} -write_cache = False -loginfo_format = r'^r[0-9]+ \| [^@]+@[^@]+ \| \d\d\d\d-\d\d-\d\d ' -note("Getting svn:mergeinfo for current branch") -for line in pipe('svn propget svn:mergeinfo .').splitlines(): - if opt_verbose: - sys.stderr.write('.') - if line in mergeinfo: - merged = mergeinfo[line] - else: - merged = {} - branch, revs = line.strip().split(':',1) - for part in revs.split(','): - if '-' in part: - beg, end = part.split('-') - try: - commit_log = pipe('svn log -v -r %s:%s %s%s' % (beg, end, repo, branch)) - for logline in commit_log.splitlines(): - if re.search(loginfo_format, logline): - rev, who, when = split_loginfo(logline) - merged[rev] = branch[1:] - write_cache = True - except OSError: - pass - else: - merged[part] = branch[1:] - write_cache = True - mergeinfo[line] = merged - merged_revs.update(merged) -note('') - -if write_cache: - cache[repo] = mergeinfo - with open(cachefn, "w") as file: - json.dump(cache, file, indent=2, sort_keys=True) - -def get_changeset_list_from_file(repo, filename): - """ - This is used to read changesets to hold or merge from the ready-for-merge - and hold-for-merge files. - """ - list = [] - if os.path.exists(filename): - note("Reading list from '%s'" % filename) - else: - note("File doesn't exist: '%s'" % filename) - return list - with open(filename) as file: - for line in file: - line = line.strip() - if line.startswith('#') or line == "": - continue - try: - #note(" '%s'" % line) - parts = line.split() - if len(parts) >1 and parts[1] == '@': - branch, rev = parts[0], parts[2] - changeset = "%s@%s" % (branch, rev) - else: - changeset = parts[0] - branch, rev = changeset.split('@') - if branch.startswith('^'): - branch = branch[1:] - if branch.startswith('/'): - branch = branch[1:] - if not (rev in merged_revs and branch == merged_revs[rev]): - list += [(rev, repo, branch),] - #elif rev in merged_revs and not branch == merged_revs[rev]: - # sys.stderr.write('Rev %s: %s != %s' % (rev, branch, merged_revs[rev])) - else: - #sys.stderr.write('Already merged: merged_revs[%s]: %s\n' % (rev, merged_revs[rev])) - pass - except ValueError as e: - sys.stderr.write("Bad changeset specification in %s: '%s': %s\n" % (file.name, changeset, e)) - return list - -def get_ready_commits(repo, tree): - list = [] - note("Getting ready commits from '%s'" % tree) - cmd = 'svn log -v -r %s:HEAD %s/%s/' % ((head-200), repo, tree) - if opt_verbose > 1: - note("Running '%s' ..." % cmd) - commit_log = pipe(cmd) - for line in commit_log.splitlines(): - if re.search(loginfo_format, line): - rev, who, when = split_loginfo(line) - branch = None - continue - if (line.startswith(' M') or line.startswith(' A') or line.startswith(' D')) and branch == None: - type, path = line[:4], line[5:] - if ' (from ' in path: - i = path.index(' (from ') - path = path[:i] - branch = '/'.join(path.split('/')[1:4]) - elif re.search("(?i)((commit|branch) ready (for|to) merge)", line): - if not rev in merged_revs: - note(" %s %s: %s@%s" % (when.strftime("%Y-%m-%d %H:%MZ"), who, branch, rev)) - list += [(rev, repo, branch),] - elif rev in merged_revs and not branch == merged_revs[rev]: - sys.stderr.write('Rev %s: %s != %s\n' % (rev, branch, merged_revs[rev])) - else: - pass - else: - pass - - return list - -ready = get_changeset_list_from_file(repo, 'ready-for-merge') -ready += get_changeset_list_from_file(repo, '../ready-for-merge') -hold = get_changeset_list_from_file(repo, 'hold-for-merge') -hold += get_changeset_list_from_file(repo, '../hold-for-merge') -ready += get_ready_commits(repo, 'personal') -ready += get_ready_commits(repo, 'branch/iola') -ready += get_ready_commits(repo, 'branch/dash') - -ready_commits = {} -all_commits = {} -not_passed = {} -branches = set() -for entry in ready: - rev, repo, branch = entry - branches.add(branch) - # Get the time, committer, and commit message - cmd = 'svn log -v -r %s %s/%s/' % (rev, repo, branch) - if opt_verbose > 1: - note("Running '%s' ..." % cmd) - try: - loginfo = pipe(cmd).splitlines() - except OSError: - continue - try: - rev, who, when = split_loginfo(loginfo[1]) - except IndexError: - die("Wrong changeset version in %s@%s ?" % (branch, rev)) - for line in loginfo[3:]: - type, path = line[:4], line[5:] - if 'M' in type or 'A' in type or 'D' in type: - if ' (from ' in path: - i = path.index(' (from ') - path = path[:i] - break - # Get the test status - try: - cmd = 'svn propget --revprop -r %s "test:unittest"' % rev - unittest = pipe(cmd).strip() - except OSError as e: - if "E200017" in str(e): - unittest = "" - pass - else: - raise - # - dirs = path.split(os.path.sep) - dirs = dirs[:dirs.index('ietf')] if 'ietf' in dirs else dirs[:4] - merge_path = os.path.join(*dirs) - if not (rev, repo, merge_path) in hold: - output_line = "%s %-24s ^/%s@%s" % (when.strftime("%Y-%m-%d_%H:%MZ"), who+":", merge_path, rev) - all_commits[when] = (rev, repo, branch, who, merge_path) - if unittest == 'passed': - ready_commits[when] = output_line - else: - not_passed[when] = output_line - -hold_revs = {} -for rev, repo, branch in hold: - hold_revs[rev] = branch - -unmerged_branch_commits = {} -for branch in branches: - try: - cmd = 'svn ls %s/%s --depth empty' % (repo, branch) - _ = pipe(cmd) - except OSError: - note("Skipping nonexistent branch %s" % branch) - continue - note("Fetching commit information for branch %s" % branch) - commits = [] - cmd = 'svn log -v -r 0:HEAD --stop-on-copy %s/%s/' % (repo, branch) - commit_log = pipe(cmd) - rev = None - mod = False - for line in commit_log.splitlines(): - if re.search(loginfo_format, line): - rev, who, when = split_loginfo(line) - elif re.search('^ [AMD]', line): - if not ' (from ' in line and not mod: - mod = True - elif re.search('^-{72}$', line) and rev and mod: - if not rev in merged_revs and not rev in hold_revs: - commits.append(rev) - rev = None - mod = False - commits.sort() - unmerged_branch_commits[branch] = commits - -keys = list(all_commits.keys()) -keys.sort() -# Check that we don't have holes in the commit list -- commits not mentioned -# as ready for merge, and not already merged, earlier than a waiting commit. -unmerged = False -for key in keys: - (rev, repo, branch, who, merge_path) = all_commits[key] - try: - i = unmerged_branch_commits[branch].index(rev) - except: - say("Unexpected state. Mismatch between branch name and revision in hold-for-merge or ready-for-merge?") - raise - if not i == 0: - unmerged = True - sys.stderr.write("There are unmerged commits ahead of r%s on branch ^/%s:\n" % (rev, branch)) - for j in range(0,i): - commit = unmerged_branch_commits[branch][j] - if commit != rev: - sys.stderr.write(" %s:\n" % commit) - commit_comment = pipe("svn log -c %s ^/" % commit).splitlines()[3:-1] - for l in commit_comment: - sys.stderr.write(" %s\n" % l) - unmerged_branch_commits[branch] = unmerged_branch_commits[branch][i:] - sys.stderr.write("\n") - del unmerged_branch_commits[branch][0] - -keys = list(not_passed.keys()) -keys.sort() -if len(keys) > 0: - print("") - print("Commits marked ready which haven't passed the test suite:\n") - for key in keys: - print(not_passed[key]) - print('') - -keys = list(ready_commits.keys()) -keys.sort() -for key in keys: - print(ready_commits[key]) - -print("\n%s pending merges" % len(keys)) diff --git a/bin/mkdevbranch b/bin/mkdevbranch deleted file mode 100755 index 7a2ff5853..000000000 --- a/bin/mkdevbranch +++ /dev/null @@ -1,282 +0,0 @@ -#!/bin/bash - -version=0.24 -program=${0##*/} -progdir=${0%/*} -svn_url_base="https://svn.ietf.org/svn/tools/ietfdb" -if [ "$progdir" = "$program" ]; then progdir="."; fi - -# ---------------------------------------------------------------------- -function usage() { - cat < - -COPYRIGHT - Copyright 2016 Henrik Levkowetz. - - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2 of the License, or (at - your option) any later version. There is NO WARRANTY; not even the - implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR - PURPOSE. See the GNU General Public License for more details. - -EOF - -} - -# ---------------------------------------------------------------------- -function die() { - echo -e "\n$program: error: $*" >/dev/stderr - exit 1 -} - -function warn() { - echo "$program: Warning: $*" 1>&2 -} - -function note() { - if [ -n "$VERBOSE" ]; then echo -e "$*"; fi -} - -# ---------------------------------------------------------------------- -function version() { - echo -e "$program $version" -} - -# ---------------------------------------------------------------------- - -function check_svn_path_exists() { - local __resultvar=$1 - local __path=$2 - local myresult - - svn info "${__path}" >/dev/null 2>&1 - myresult=$? - - # shellcheck disable=SC2086 - eval $__resultvar="'$myresult'" -} - -function mksvndir() { - who=$1 - if [ "$2" ]; then dir=$2; else dir=$who; fi - check_svn_path_exists exists "${svn_url_base}/personal/$dir" - # shellcheck disable=SC2154 - if [ "$exists" != "0" ]; then - $do echo "Creating personal directory area for IETF datatracker coding: /personal/$dir" - $do svn mkdir "${svn_url_base}/personal/$dir" -m "Personal SVN dir for $who, for IETF datatracker code" - else - echo "Repository area personal/$dir is already in place." - fi -} - -function mksvntarget() { - local who=$1 - local target=$2 - local source=$3 - local email=$4 - local name=$5 - - check_svn_path_exists exists "${svn_url_base}/personal/$who/$target" - if [ "$exists" != "0" ]; then - $do echo " creating $target branch for $who ($name)." - $do svn cp "${svn_url_base}/$source" \ - "${svn_url_base}/personal/$who/$target/" \ - -m "New branch for $target" - $do echo "New branch: ^/personal/$who/$target" - if [ -n "$email" ]; then - notify_user "$who" "$target" "$email" "$name" - fi - else - $do echo " branch personal/$who/$target already exists." - fi -} - -function notify_user() { - local login=$1 - local target=$2 - local email=$3 - local name=$4 - - $do mail "$name <$email>" -s "A new SVN branch for you for IETF datatracker coding${rev:+, based on $rev}." -b rjsparks@nostrum.com <<-EOF - Hi, - $msg - This mail has been automatically generated by the $program script. - - A new SVN branch has been set up for you for IETF datatracker coding, at - ${svn_url_base}/personal/$login/$target - ${rev:+This branch is based on $rev. }You can check it out by doing - svn co ${svn_url_base}/personal/$login/$target - - There's also a database dump available at - https://www.ietf.org/lib/dt/sprint/ietf_utf8.sql.gz -- this dump is served - via CDN, and should hopefully be swifter to download than the alternatives. - - Please read the instructions about sprint coder setup at - https://trac.ietf.org/tools/ietfdb/wiki/SprintCoderSetup - -- both the workflow description and the details of setting up your - environment. - - - Best regards, - - The IETF tools team (via the $program script) - - EOF -} - -function get_sprinters_info() { - local n=$1 - - curl -L -s "https://trac.ietf.org/trac/ietfdb/wiki/IETF${n}SprintSignUp?format=txt" | \ - grep -E "^\|\|" | \ - grep -Ev "^\|\|\s+\|\|\s+" | \ - tail -n +2 | \ - python3 sprintcoders.py | \ - update "$progdir/sprint${n}.txt" - -} - -# ---------------------------------------------------------------------- -trap 'echo "$program($LINENO): Command failed with error code $? ([$$] "$0" $*)"; exit 1' ERR - -# ---------------------------------------------------------------------- -# Option parsing - -# Options -short_options=hm:M:nsvV -long_options=help,meeting=,message=,dry-run,sprint,verbose,version - -# Default values -num="" -msg="" -do="" - -if [ "$(uname)" = "Linux" ]; then - # shellcheck disable=SC2086 - # shellcheck disable=SC2048 - if ! args=$(getopt -o $short_options --long $long_options -n $program -- $SV $*); then - die "Terminating..." >&2 - exit 1 - fi - # shellcheck disable=SC2086 - eval set -- $args - sed="sed -r" -else - # Darwin, BSDs - # shellcheck disable=SC2086 - # shellcheck disable=SC2048 - if ! args=$(getopt -o$short_options $SV $*); then - die "Terminating..." >&2 - exit 1 - fi - # shellcheck disable=SC2086 - set -- $args - sed="sed -E" -fi - -while true; do - case "$1" in - -h | --help) - usage - exit - ;; # Show this help, then exit - -m | --meeting) - num=$2 - shift - ;; # Specify the IETF meeting number - -M | --message) - msg=$2 - shift - ;; # Specify extra message text - -n | --dry-run) do="echo -- ==>" ;; # Only show what would be done - -s | --sprint) SPRINT=1 ;; # Make branches for sprint sign-ups - -v | --verbose) VERBOSE=1 ;; # Be more talkative - -V | --version) - version - exit - ;; # Show program version, then exit - --) - shift - break - ;; - *) die "Internal error, inconsistent option specification: '$1'" ;; - esac - shift -done - -# ---------------------------------------------------------------------- -# The program itself - -who="" -tag=$(svn log -v ${svn_url_base}/tags/dev/ --limit 1 | grep '/tags/' | awk '{print $2}') - -source="${tag:1}" -target="${tag##*/}" -rev="dev tag $target" - -[ "$1" ] && who="$1" -[ "$2" ] && target="${target%.dev*}-$2" - -if [ -z "${who}${SPRINT}" ]; then die "Specify either individual developer name or --sprint"; fi - -cd $progdir || exit - -if [ "$who" ]; then - mksvndir "$who" - mksvntarget "$who" "$target" "$source" -fi -if [ "$SPRINT" ]; then - [ "$msg" ] && msg=" -$msg -" - [ "$num" ] || num=$(curl -L -s "https://tools.ietf.org/meta/current-ietf-number.txt") - for n in $(seq $((num - 3)) "$num"); do - get_sprinters_info "$n" - done - - # shellcheck disable=SC2046 - # shellcheck disable=SC2012 - # shellcheck disable=SC2162 - # sed -E (regexp extended) breaks this usage on MacOS 10.15, so back to regular sed. - cat $(ls $progdir/sprint*.txt | tail -n 2) $progdir/extras.txt | \ - sed -e 's/[ \t]*$//' -e 's/[ \t]+/ /g' | \ - sort | uniq | \ - while read login email name; do - echo "" - echo "$login ($name <$email>):" - mksvndir "$login" - mksvntarget "$login" "$target" "$source" "$email" "$name" - done -fi diff --git a/bin/mkpatch b/bin/mkpatch deleted file mode 100755 index 4379fdb5e..000000000 --- a/bin/mkpatch +++ /dev/null @@ -1,132 +0,0 @@ -#!/bin/bash -# -*- indent-with-tabs: 1 -*- - -version=0.10 -program=${0##*/} -progdir=${0%/*} -if [ "$progdir" = "$program" ]; then progdir="."; fi - -# ---------------------------------------------------------------------- -function usage() { - cat < - -COPYRIGHT - Copyright 2013 Henrik Levkowetz. - - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2 of the License, or (at - your option) any later version. There is NO WARRANTY; not even the - implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR - PURPOSE. See the GNU General Public License for more details. - -EOF - -} - -# ---------------------------------------------------------------------- -function die() { - echo -e "\n$program: error: $*" > /dev/stderr - exit 1 -} - -function note() { - if [ -n "$VERBOSE" ]; then echo -e "$*"; fi -} - -# ---------------------------------------------------------------------- -function version() { - echo -e "$program $version" -} - -# ---------------------------------------------------------------------- -trap 'echo "$program($LINENO): Command failed with error code $? ([$$] $0 $*)"; exit 1' ERR - - -# ---------------------------------------------------------------------- -# Option parsing - -# Options -shortopts=c:n:or:hvV -longopts=change=,name=,overwrite,revision=,help,verbose,version - -# Default values - -if [ "$(uname)" = "Linux" ]; then - args=$(getopt -o "$shortopts" --long "$longopts" -n "$program" -- $SV "$@") - if [ $? != 0 ] ; then die "Terminating..." >&2 ; exit 1 ; fi - eval set -- "$args" - sed="sed -r" - date="date -d" -else - # Darwin, BSDs - args=$(getopt -o$shortopts $SV $*) - if [ $? != 0 ] ; then die "Terminating..." >&2 ; exit 1 ; fi - set -- $args - sed="sed -E" - date="date -j -f %Y-%m-%d" -fi - -while true ; do - case "$1" in - -c| --change) CHG="$2"; shift;; # Use the change made by revision ARG - -n| --name) NAME="$2"; shift;; # Patch name - -o| --overwrite) OVER=1;; # Overwrite any existing patch file - -h| --help) usage; exit;; # Show this help, then exit - -v| --verbose) VERBOSE=1;; # Be more talkative - -V| --version) version; exit;; # Show program version, then exit - --) shift; break;; - *) die "Internal error, inconsistent option specification: '$1'";; - esac - shift -done - -# ---------------------------------------------------------------------- -# The program itself - -if [ "$CHG" ]; then - if [ "$NAME" ]; then - name="${NAME//_/-}-c$CHG" - else - name=$(echo $(svn log -c $CHG | sed -r -e '/^---/d' -e '/^r[0-9]+/d' -e '/^$/d' -e 's/Merged in \[[0-9]+\] from [^:]+..//' ) | sed -r -e 's/(.*)/\L\1/' -e 's/[^[:alnum:]]/-/g' -e 's/-+/-/g' -e's/-$//' | cut -c 1-40) - name="$name-c$CHG" - fi -else - if [ "$NAME" ]; then - if [ $# -lt 1 ]; then die "Expected file list on the command line."; fi - name="${NAME//_/-}" - else - die "Please use the -n switch to provide a patch name" - fi -fi - -patchfile=$progdir/../../patches/$(date +%Y-%m-%d)-$name.patch -if [ -e $patchfile -a ! -n "$OVER" ]; then die "Patchfile $patchfile already exists"; fi -svn diff ${CHG:+ -c $CHG} ${REV:+ -r $REV} "$@" > $patchfile -less $patchfile -echo "" -echo "" -echo "Patch is in $patchfile" diff --git a/bin/mkrelease b/bin/mkrelease deleted file mode 100755 index 19f2bc7f0..000000000 --- a/bin/mkrelease +++ /dev/null @@ -1,348 +0,0 @@ -#!/bin/bash - -version=0.10 -program=${0##*/} -progdir=${0%/*} -if [ "$progdir" = "$program" ]; then progdir="."; fi -PATH="$PATH:$progdir" - -# ---------------------------------------------------------------------- -function usage() { - cat < - -COPYRIGHT - Copyright 2007 The IETF Trust. - -EOF - -} - -# ---------------------------------------------------------------------- -function die() { - echo -e "\n$program: error: $*" > /dev/stderr - exit 1 -} - -function say() { - if [ -n "$VERBOSE" ]; then echo -e "$*"; fi -} - -function note() { - if [ -n "$VERBOSE" ]; then echo -e "\n$*"; fi -} - -function check() { - [ "$(which $1)" ] || die "could not find the '$1' command. $2" -} - -# ---------------------------------------------------------------------- -function version() { - echo -e "$program $version" -} - -# ---------------------------------------------------------------------- -trap 'echo "$program($LINENO): Command failed with error code $? ([$$] $0 $*)"; exit 1' ERR - - -# ---------------------------------------------------------------------- -# Option parsing - -# Options -shortopts=hmnipvV -longopts=help,message,dry-run,ignore-resources,permit-migr-mix,verbose,version - -# Default values -MSG="" -PROJ=ietfdb -VERFILE=ietf/__init__.py -SETTINGS=ietf/settings.py -PERMIT_MIGR_MIX="" -IGNORE_RESOURCES="" -do="" - -if [ "$(uname)" = "Linux" ]; then - args=$(getopt -o "$shortopts" --long "$longopts" -n '$program' -- $SV "$@") - if [ $? != 0 ] ; then die "Terminating..." >&2 ; exit 1 ; fi - eval set -- "$args" - sed="sed -r" -else - # Darwin, BSDs - args=$(getopt -o$shortopts $SV $*) - if [ $? != 0 ] ; then die "Terminating..." >&2 ; exit 1 ; fi - set -- $args - sed="sed -E" -fi - -while true ; do - case "$1" in - -h| --help) usage; exit;; # Show this help, then exit - -m| --message) MSG=$2; shift;; # Specify a commit message - -n| --dry-run) do="echo ==>";; # Show what would be done - -i| --ignore-resources) IGNORE_RESOURCES=1;; # Don't try to update resources - -p| --permit-migr-mix) PERMIT_MIGR_MIX=1;; # Permit mixed schema and data migrations - -v| --verbose) VERBOSE=1;; # Be more talkative - -V| --version) version; exit;; # Show program version, then exit - --) shift; break;; - *) die "Internal error, inconsistent option specification: '$1'";; - esac - shift -done - -# ---------------------------------------------------------------------- -# Check some requirements - -check bower "It is required to update web resources. Install with npm." - -# ---------------------------------------------------------------------- -# The program itself - -ARGMIN=1 - -if [ $# -lt $ARGMIN ]; then - usage - die "$# arguments found, $ARGMIN required" -fi - -[ -z "$by" ] && by=${RELEASER_REAL_NAME} -[ -z "$by" ] && by=$(getent passwd $(whoami) | cut -d ':' -f 5 | tr -d ',') -[ -z "$by" ] && die "Can't determine the real name of the user running this script" - -VER=$1 - -REPO=$(svn info | grep "^Repository Root:" | awk '{ print $3 }') -RDATE=$(svn info | grep "^Last Changed Date:" | awk '{ print $4 "T" $5 $6 }') -RURL=$(svn info | grep "^URL:" | awk '{ print $2 }') -RDIR=${RURL#$REPO} - -DIR=${RDIR#/} -if [ -z "$DIR" ]; then - die "Couldn't find anything to release here" -elif [ "${DIR%%/*}" = "trunk" ]; then - SRC="trunk" -elif [ "${DIR%%/*}" = "branch" ]; then - tmp=${DIR#*/} # get rid of 'branch/' - SRC="branch/${tmp%%/*}" # keep first subdir under branch/ -fi - - -note "Releasing from $SRC" - -note "Locating the root of the working copy ..." -while [ "${#DIR}" -gt "${#SRC}" ]; do - [ "$DIR" = "$prev" ] && die "Internal error" - cd .. - #note " now at $PWD" - prev=$DIR - DIR=${DIR%/*} -done -if [ "$DIR" != "$SRC" ]; then - die "Couldn't find the root of your '$SRC' working copy" -fi -say " $DIR" - -REPO=${REPO%/} # remove trailing slash -SRC=${SRC#/} # remove leading slash - -MAJOR=${VER%%.*} -REST=${VER#*.} -MINOR=${REST%%.*} -MAINT=${REST#*.} -VER="$(printf %d.%d.%d $MAJOR $MINOR $MAINT)" -NEXT=$(( $MAINT + 1 )) -DEV="$(printf %d.%d.%d.dev0 $MAJOR $MINOR $NEXT)" - -#cd $DIR ?? - -note "Checking that changelog information is available ..." -changes=$( sed -n "/^$PROJ ($VER.*)/,/^ -- /p" changelog ) -[ "$changes" ] || die "No changelog information for $VER found" -#note "$changes" - -note "Checking for migrations not in SVN" -$do svn st | grep "^[?] .*/migrations/[0-9].*\.py$" && die "There seems to be new migrations which aren't in SVN" - -if [ -z "$PERMIT_MIGR_MIX" ]; then - note "Checking that we don't have both schema and data migrations ..." - cur=$(svn info | awk '/^Revision:/ { print $2 }') - migrations=$(svn log $PWD -v -r HEAD:$((cur-100)) | sed -n -e '1,/^Set version info and settings back to development mode/p' | grep '^...A /.*/migrations/0.*.py' | cut -c6- | awk '{ print $1 }' | sed -re 's|/trunk/||') - if [ -n "$migrations" ]; then - datamigr=$(for m in "$migrations"; do egrep -sl 'migrations\.RunPython' $m; done || true) - schemamigr=$(for m in "$migrations"; do egrep -sl 'migrations\.(Add|Alter|Create|Delete|Remove|Rename)(Field|Model|UniqueTogether)' $m; done || true) - if [ -n "$datamigr" -a -n "$schemamigr" ]; then - echo -e "\n Schema migrations:" - for m in $schemamigr; do - echo " $m" - done - echo -e "\n Data migrations:" - for m in $datamigr; do - echo " $m" - done - die "\n Found both data migrations and schema migrations in this release.\n This is likely to cause delay between schema changes and deployment,\n which means the old code will run on the new schema longer than necessary." - fi - fi -fi - -if [ -z "$IGNORE_RESOURCES" ]; then - note "Updating bower assets ..." - $do ietf/manage.py bower_install > .mkrelease-bower-install.log - $do rm .mkrelease-bower-install.log # This happens at once unless the previous command returns error - $do svn st ietf/externals/static | grep -v '^\?' || true - $do svn commit ietf/externals/static -m "Updated bower-managed static web assets" - # Get rid of bower-installed files which we don't use: - $do rm -rf ietf/externals/static/datatracker/ - $do rm -rf ietf/externals/static/jquery.cookie/ - $do rm -f $(svn st ietf/externals/ | grep '^\?' | awk '{print $2}') -fi - -note "Collecting static files ..." -$do ietf/manage.py collectstatic --noinput --ignore=bower.json --ignore='README.*' --ignore=rev | grep -v "Found another file with the destination path" -#$do svn commit static/lib/ -m "Updated static files under static/lib/" - -# note "Checking that there's a recent test-crawler log" -# touch -d $RDATE .svn/.latest-commit -# TCLOG=$(ls -t ../test-crawl-*.log | head -n 1) -# [ $TCLOG -nt .svn/.latest-commit ] || die "Looked for ../test-crawl-*.log, but didn't find one newer than the latest repository commit ($RDATE)" - -note "Upgrading the python library modules before checking migrations and running tests ..." -$do pip install --upgrade -r requirements.txt -$do pip check - -note "Checking that all model changes have been captured in migrations ..." -$do ietf/manage.py makemigrations | tee /dev/stderr | $do grep -q "^No changes detected$" || die "Model changes without migrations found." - -note "Making sure all migrations have been run ..." -$do ietf/manage.py migrate - -note "Running the tests suite and writing release coverage data ..." -$do ietf/manage.py test --settings=ietf.settings_releasetest --save-version-coverage=$VER ${PERMIT_MIGR_MIX:+"--permit-mixed-migrations"} - -note "Committing the release coverage data ..." -$do svn commit release-coverage.json.gz -m "Code coverage data for release $VER" - -if [ -d ../coverage ]; then - cp .coverage ../coverage/$VER.coverage - rsync -a static/coverage/ ../coverage/$VER/ -fi - -contributors=$(echo "$changes" | gawk '/^ \* Merged in \[[0-9]+\] from [^: ]+/ {sub(":",""); print $6;}' | sort | uniq) -note "Contributors: -$contributors" - -note "Setting the current time on the release notes in the changelog file ..." -$do sed -r -i -e "1,/^ -- /s/([A-Za-z-]+ <[a-z0-9.-]+@[a-z0-9.-]+> ).*$/\1$(TZ=UTC date +'%d %b %Y %H:%M:%S %z')/" changelog -say " $(grep -m1 "^ -- " changelog)" - -note "Verifying that version $VER doesn't already exist ..." -$do svn info $REPO/tags/$VER 2>&1 | $do egrep -q "(Not a valid URL|URL .* non-existent)" || die "The tag '$VER' already exists (or there was an error testing for it)." -say " Ok" - -note "Committing the changelog ..." -$do svn commit changelog -m "Changelog entry for $VER" - -note "Verifying there's no uncommitted changes ..." -$do svn st | grep "^[AMGRD] " && die "There seems to be uncommitted changes in this working copy" - -note "Updating the version info in $VERFILE and making sure'\$Rev\$' is Ok ..." -$do sed -i -r -e "/^__version__/s/\"[.0-9]+(dev[0-9]+)?\"/\"$VER\"/" \ - -e "/^__rev__/s/\".*\"/\"\$Rev:\$\"/" \ - $VERFILE - -note "Updating the deployment settings in settings.py" -$do sed -i -r -e 's/^DEBUG *= *.*$/DEBUG = False/' \ - -e "s/^SERVER_MODE *= *.*\$/SERVER_MODE = 'production'/" \ - $SETTINGS - -note "Committing version information for version $VER: $MSG ..." -$do svn commit $VERFILE $SETTINGS -m "Set version info to release version $VER before branching. $MSG" - -note "Creating new tag 'tags/$VER' from $SRC ..." -$do svn cp $REPO/$SRC $REPO/tags/$VER -m "Creating new tag 'tags/$VER' from $SRC" - -note "Updating version and revision info to indicate that the source and branch aren't releases ..." -$do sed -i -r -e "/^__version__/s/\"[0-9.]*\"/\"$DEV\"/" \ - -e "/^__rev__/s/\"\\\$Rev: (.*) \\\$\"/\"\$Rev:\$ (dev) Latest release: Rev. \1 \"/" \ - $VERFILE - -note "Updating the deployment settings in settings.py to development mode ..." -$do sed -i -r -e 's/^DEBUG *= *.*$/DEBUG = True/' \ - -e "s/^SERVER_MODE *= *.*\$/SERVER_MODE = 'development'/" \ - $SETTINGS - -note "Committing the updated version and deployment settings ..." -$do svn commit $VERFILE $SETTINGS -m "Set version info and settings back to development mode" - -note "Creating new tag 'tags/dev/$DEV' from $SRC ..." -$do svn cp $REPO/$SRC $REPO/tags/dev/$DEV -m "Creating new tag 'tags/dev/$DEV' from $SRC" - -$do svn update -q - -[ -d ~/src/db/mail ] || mkdir -p ~/src/db/mail -echo " -Hi, - -This is an automatic notification about a new datatracker release, -v$VER, generated when running the mkrelease script. - -Release notes: - -$changes - -The new version is available for installation through SVN checkout, with - 'svn checkout https://svn.ietf.org/svn/tools/$PROJ/tags/$VER' - -For development, copy the new development version instead: - 'svn copy https://svn.ietf.org/svn/tools/$PROJ/tags/dev/$DEV' - -Regards, - - $by - (via the mkrelease script) -" > ~/src/db/mail/release-mail-v$VER.txt - -SEND_ARGS="" -[ "${RELEASER_EMAIL}" ] && SEND_ARGS="-r ${RELEASER_EMAIL}" - -cat ~/src/db/mail/release-mail-v$VER.txt | $do mail "${SEND_ARGS}" -s "New datatracker release: v$VER" housley@vigilsec.com rjsparks@nostrum.com krathnayake@ietf.org -c jay@ietf.org -c glen@amsl.com -c maddy@amsl.com -c cmorgan@amsl.com -c avezza@amsl.com -c amorris@amsl.com -c smccammon@amsl.com -c kmoreland@amsl.com $contributors - -cat ~/src/db/mail/release-mail-v$VER.txt | $do mail "${SEND_ARGS}" -s "New datatracker release: v$VER" iesg@ietf.org wgchairs@ietf.org codesprints@ietf.org -b rjsparks@nostrum.com - - -# Removed at Henrik's suggestion -#$do toolsfeed control changelog /www/tools.ietf.org/tools/atomfeed.xml -#$do toolpush /www/tools.ietf.org/tools/atomfeed.xml - diff --git a/bin/sprintcoders.py b/bin/sprintcoders.py deleted file mode 100644 index f86bdb3c4..000000000 --- a/bin/sprintcoders.py +++ /dev/null @@ -1,21 +0,0 @@ -import sys, re - -with open("aliases") as afile: - try: - aliases = dict([line.strip().split(None, 1) for line in afile.read().splitlines() if line.strip()]) - except ValueError: - sys.stderr.write([line.strip().split(None, 1) for line in afile.read().splitlines() if line.strip()]) - raise - -for line in sys.stdin: - try: - blank, name, email, rest = line.strip().split("||", 3) - email = email.strip() - except ValueError: - sys.stderr.write(line + "\n") - raise - - login, dummy = re.split("[@.]", email, 1) - if email in aliases: - login = aliases[email] - print("\t".join((login.strip().lower(), email.strip().lower(), name.strip()))) diff --git a/buildbot/masters/datatracker/buildbot_passwords.py.template b/buildbot/masters/datatracker/buildbot_passwords.py.template deleted file mode 100644 index 7033b203f..000000000 --- a/buildbot/masters/datatracker/buildbot_passwords.py.template +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright The IETF Trust 2015-2020, All Rights Reserved -# -*- python; coding: utf-8 -*- - -# Uncomment and set passwords below to match those set for the workers, then -# save as "buildbot_passwords.py" - -# datatracker_lin_py27_1_pw = "" -# datatracker_lin_py27_2_pw = "" -# datatracker_lin_py27_3_pw = "" -# datatracker_osx_py27_4_pw = "" -# datatracker_lin_py27_5_pw = "" -# datatracker_lin_py27_6_pw = "" -# ietfdb_svn_hook_pw = "" -# -# datatracker_lin_py36_1_pw = "" -# datatracker_lin_py36_2_pw = "" -# datatracker_lin_py36_3_pw = "" -# datatracker_lin_py36_4_pw = "" -# datatracker_lin_py36_5_pw = "" -# datatracker_lin_py36_6_pw = "" - diff --git a/buildbot/masters/datatracker/custom_steps.py b/buildbot/masters/datatracker/custom_steps.py deleted file mode 100644 index 593d29b2d..000000000 --- a/buildbot/masters/datatracker/custom_steps.py +++ /dev/null @@ -1,183 +0,0 @@ -# Copyright The IETF Trust 2015-2020, All Rights Reserved -# -*- coding: utf-8 -*- - -import re - -from buildbot.plugins import steps - -class TestCrawlerShellCommand(steps.WarningCountingShellCommand): - name = "testcrawl" - haltOnFailure = 1 - flunkOnFailure = 1 - descriptionDone = ["test crawler"] - command=["bin/test-crawl"] - - warningPatterns = { - "exceptions": "^(Traceback| File| |.*Error|.*Exception)", - "failed": " FAIL ", - "warnings": " WARN", - "slow": " SLOW", - "invalid_html": " invalid html:", - } - - logline = "^ *(?P\d+:\d+:\d+) +(?P\d+) +(?P\d+) +(?P\d+) +(?P\d+.\d+)s +(?P.+)" - - def setTestResults(self, **kwargs): - """ - Called by subclasses to set the relevant statistics; this actually - adds to any statistics already present - """ - for kw in kwargs: - value = kwargs[kw] - if value.isdigit(): - # Counter - value = int(value) - value += self.step_status.getStatistic(kw, 0) - elif re.search("^[0-9]+\.[0-9]+$", value): - # Runtime - value = float(value) - value += self.step_status.getStatistic(kw, 0) - else: - # This is a percentage, and we can't add them - pass - self.step_status.setStatistic(kw, value) - - def createSummary(self, log): - """ - Match log lines against warningPattern. - - Warnings are collected into another log for this step, and the - build-wide 'warnings-count' is updated.""" - - warnings = {} - wregex = {} - - regex_class = re.compile("").__class__ - - if not isinstance(self.logline, regex_class): - self.logline = re.compile(self.logline) - - for key in self.warningPatterns: - warnings[key] = [] - pattern = self.warningPatterns[key] - if not isinstance(pattern, regex_class): - wregex[key] = re.compile(pattern) - else: - wregex[key] = pattern - - # Count matches to the various warning patterns - last_line = None - for line in log.getText().split("\n"): - for key in wregex: - match = re.search(wregex[key], line) - if match: - warnings[key].append(line) - if re.search(self.logline, line): - last_line = line - - # If there were any warnings, make the log if lines with warnings - # available - for key in warnings: - if len(warnings[key]) > 0: - self.addCompleteLog("%s (%d)" % (key, len(warnings[key])), - "\n".join(warnings[key]) + "\n") - self.step_status.setStatistic(key, len(warnings[key])) - self.setProperty(key, len(warnings[key]), "TestCrawlerShellCommand") - - if last_line: - match = re.search(self.logline, last_line) - for key in ['elapsed', 'pages']: - info = match.group(key) - self.step_status.setStatistic(key, info) - self.setProperty(key, info, "TestCrawlerShellCommand") - - def describe(self, done=False): - description = steps.WarningCountingShellCommand.describe(self, done) - if done: - description = description[:] # make a private copy - for name in ["time", "elapsed", "pages", "failed", "warnings", "slow", "invalid_html", ]: - if name in self.step_status.statistics: - value = self.step_status.getStatistic(name) - displayName = name.replace('_', ' ') - # special case. Mph. - if type(value) is float: # this is run-time - description.append('%s: %.2fs' % (displayName, value)) - elif type(value) is int: - description.append('%s: %d' % (displayName, value)) - else: - description.append('%s: %s' % (displayName, value)) - return description - - -class DjangoTest(steps.WarningCountingShellCommand): - - name = "test" - warnOnFailure = 1 - description = ["testing"] - descriptionDone = ["test"] - command = ["manage.py", "test", ] - - regexPatterns = { - "tests": "Ran (\d+) tests in [0-9.]+s", - "time": "Ran \d+ tests in ([0-9.]+)s", - "skipped": "(?:OK|FAILED).*skipped=(\d+)", - "failed": "FAILED.*failures=(\d+)", - "errors": "FAILED.*errors=(\d+)", - "template_coverage":" +Template coverage: +([0-9.]+%)", - "url_coverage": " +Url coverage: +([0-9.]+%)", - "code_coverage": " +Code coverage: +([0-9.]+%)", - } - - def setTestResults(self, **kwargs): - """ - Called by subclasses to set the relevant statistics; this actually - adds to any statistics already present - """ - for kw in kwargs: - value = kwargs[kw] - if value.isdigit(): - # Counter - value = int(value) - value += self.step_status.getStatistic(kw, 0) - elif re.search("^[0-9]+\.[0-9]+$", value): - # Runtime - value = float(value) - value += self.step_status.getStatistic(kw, 0) - else: - # This is a percentage, and we can't add them - pass - self.step_status.setStatistic(kw, value) - - def createSummary(self, log): - info = {} - for line in log.getText().split("\n"): - for key in self.regexPatterns: - regex = self.regexPatterns[key] - match = re.search(regex, line) - if match: - info[key] = match.group(1) - self.setTestResults(**info) - - def describe(self, done=False): - description = steps.WarningCountingShellCommand.describe(self, done) - if done: - description = description[:] # make a private copy - self.step_status.statistics["passed"] = ( - self.step_status.getStatistic("tests",0) - - self.step_status.getStatistic("skipped",0) - - self.step_status.getStatistic("failed",0) - - self.step_status.getStatistic("errors",0)) - for name in ["time", "tests", "passed", "skipped", "failed", "errors", "template_coverage", "url_coverage", "code_coverage", ]: - if name in self.step_status.statistics: - value = self.step_status.getStatistic(name) - displayName = name.replace('_', ' ') - # special case. Mph. - if displayName == 'template coverage': - displayName = 'templ. coverage' - if type(value) is float: # this is run-time - description.append('%s: %.2fs' % (displayName, value)) - elif type(value) is int: - description.append('%s: %d' % (displayName, value)) - else: - description.append('%s: %s' % (displayName, value)) - return description diff --git a/buildbot/masters/datatracker/master.buildbot27.cfg b/buildbot/masters/datatracker/master.buildbot27.cfg deleted file mode 100644 index e4b1535b7..000000000 --- a/buildbot/masters/datatracker/master.buildbot27.cfg +++ /dev/null @@ -1,426 +0,0 @@ -# -*- python -*- -# ex: set filetype=python: - -from os.path import expanduser as expandtilde -from buildbot.plugins import worker, changes, schedulers, util, steps -import buildbot_passwords -import custom_steps - -# This is a sample buildmaster config file. It must be installed as -# 'master.cfg' in your buildmaster's base directory. - -# This is the dictionary that the buildmaster pays attention to. We also use -# a shorter alias to save typing. -c = BuildmasterConfig = {} - -####### SETTINGS - -# For miscellaneous settings, see MISC. SETTINGS at the bottom of the file - -####### WORKERS - -# The 'workers' list defines the set of recognized workers. Each element is -# a Worker object, specifying a unique worker name and password. The same -# worker name and password must be configured on the worker. -c['workers'] = [ - worker.Worker("datatracker_lin_py36_1", buildbot_passwords.datatracker_lin_py36_1_pw), - worker.Worker("datatracker_lin_py36_2", buildbot_passwords.datatracker_lin_py36_2_pw), - worker.Worker("datatracker_lin_py36_3", buildbot_passwords.datatracker_lin_py36_3_pw), -# worker.Worker("datatracker_lin_py36_4", buildbot_passwords.datatracker_lin_py36_4_pw), - worker.Worker("datatracker_lin_py36_5", buildbot_passwords.datatracker_lin_py36_5_pw), - worker.Worker("datatracker_lin_py36_6", buildbot_passwords.datatracker_lin_py36_6_pw), -] - -# 'protocols' contains information about protocols which master will use for -# communicating with workers. You must define at least 'port' option that workers -# could connect to your master with this protocol. -# 'port' must match the value configured into the workers (with their -# --master option) -c['protocols'] = {'pb': {'port': 9989}} - -####### CHANGESOURCES - -# the 'change_source' setting tells the buildmaster how it should find out -# about source code changes. Here we point to the buildbot version of a python hello-world project. - -c['change_source'] = [ - changes.PBChangeSource(user='ietfdb', passwd=buildbot_passwords.ietfdb_svn_hook_pw), -] - -####### SCHEDULERS - -# Configure the Schedulers, which decide how to react to incoming changes. In this -# case, just kick off a 'runtests' build - -c['schedulers'] = [ - # Branch schedulers - schedulers.SingleBranchScheduler(name="pyflakes", treeStableTimer=10, builderNames=["Check PyFlakes"], - change_filter=util.ChangeFilter(branch='trunk')), - schedulers.SingleBranchScheduler(name="lin_test", treeStableTimer=60*5, builderNames=["Test Suite"], - change_filter=util.ChangeFilter(branch='trunk')), -# schedulers.SingleBranchScheduler(name="osx_test", treeStableTimer=60*5, builderNames=["Test Suite (OS X)"], -# change_filter=util.ChangeFilter(branch='trunk')), - # - schedulers.AnyBranchScheduler(name="pyflakes_branch", treeStableTimer=10, builderNames=["[branch] Check PyFlakes"], - change_filter=util.ChangeFilter(branch_re='branch/.*')), -# schedulers.AnyBranchScheduler(name="lin_test_branch", treeStableTimer=60*5, builderNames=["[branch] Test Suite"], -# change_filter=util.ChangeFilter(branch_re='branch/.*')), -# schedulers.AnyBranchScheduler(name="osx_test_branch", treeStableTimer=60*5, builderNames=["[branch] Test Suite (OS X)"], -# change_filter=util.ChangeFilter(branch_re='branch/.*')), - # - schedulers.AnyBranchScheduler(name="pyflakes_personal",treeStableTimer=10, builderNames=["[personal] Check PyFlakes"], - change_filter=util.ChangeFilter(branch_re='personal/.*')), - schedulers.AnyBranchScheduler(name="lin_test_personal",treeStableTimer=60*5, builderNames=["[personal] Test Suite"], - change_filter=util.ChangeFilter(branch_re='personal/.*')), - # Periodic Schedulers - schedulers.Nightly(name="lin_test_old_libs", hour=16, minute=12, branch="trunk", builderNames=["Verify Minimum Libs"],), - schedulers.Nightly(name="lin_test_libs", hour=16, minute=42, branch="trunk", builderNames=["Verify Latest Libs"],), - schedulers.Nightly(name="crawler", hour=9, minute=00, branch="trunk", onlyIfChanged=True, builderNames=["Test-Crawler"],), - - # schedulers.Force schedulers - schedulers.ForceScheduler(name="force_pyflakes", builderNames=["Check PyFlakes"]), - schedulers.ForceScheduler(name="force_lin_test", builderNames=["Test Suite"]), -# schedulers.ForceScheduler(name="force_osx_test", builderNames=["Test Suite (OS X)"]), - schedulers.ForceScheduler(name="force_test_crawler", builderNames=["Test-Crawler"]), - # - schedulers.ForceScheduler(name="force_pyflakes_branch", builderNames=["[branch] Check PyFlakes"]), - schedulers.ForceScheduler(name="force_lin_test_branch", builderNames=["[branch] Test Suite"]), -# schedulers.ForceScheduler(name="force_osx_test_branch", builderNames=["[branch] Test Suite (OS X)"]), - # - schedulers.ForceScheduler(name="force_pyflakes_personal", builderNames=["[personal] Check PyFlakes"]), - schedulers.ForceScheduler(name="force_lin_test_personal", builderNames=["[personal] Test Suite"]), - -] - -####### BUILDERS - -# The 'builders' list defines the Builders, which tell Buildbot how to perform a build: -# what steps, and which workers can execute them. Note that any particular build will -# only take place on one worker. - - -#### Custom subclassed builder - -factory = util.BuildFactory() -# check out the source -factory.addStep(steps.Git(repourl='git://github.com/buildbot/hello-world.git', mode='incremental')) -# run the tests (note that this will require that 'trial' is installed) -factory.addStep(steps.ShellCommand(command=["trial", "hello"], - env={"PYTHONPATH": "."})) - -c['builders'] = [] - -# -*- section Builder_Run_pyflakes -*- - -factory = util.BuildFactory() -factory.addStep(steps.SVN( - username='buildbot@tools.ietf.org', - descriptionDone="svn update", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - repourl=util.Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'), - descriptionSuffix=[util.Interpolate('%(src::branch)s %(src::revision)s')], - )) -factory.addStep(steps.ShellCommand( - descriptionDone="seting up settings_local.py", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - command=["cp", expandtilde("~/settings_local.py"), "./ietf/"], - )) -factory.addStep(steps.PyFlakes( - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - command=["ietf/manage.py", "pyflakes", "--verbosity=0"], - )) -# This should be the last action -factory.addStep(steps.ShellCommand( - descriptionDone="mark as passed", - workdir=util.Interpolate('build/%(src::branch)s'), - command=["svn", "--username=buildbot@tools.ietf.org", "--non-interactive", - "propset", "--revprop", "-r", util.Property('got_revision'), "test:pyflakes", "passed" ], - )) - -c['builders'].append(util.BuilderConfig(name="Check PyFlakes", factory=factory, category="1. trunk", - workernames=["datatracker_lin_py36_1", ])) -c['builders'].append(util.BuilderConfig(name="[branch] Check PyFlakes", factory=factory, category="2. branch", - workernames=["datatracker_lin_py36_2", ])) -c['builders'].append(util.BuilderConfig(name="[personal] Check PyFlakes", factory=factory, category="3. personal", - workernames=["datatracker_lin_py36_3", ])) - -# -*- section Builder_TestSuite -*- - -factory = util.BuildFactory() -factory.addStep(steps.SVN( - username='buildbot@tools.ietf.org', - descriptionDone="svn update", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - repourl=util.Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'), - descriptionSuffix=[util.Interpolate('%(src::branch)s %(src::revision)s')], - )) -factory.addStep(steps.RemovePYCs(workdir=util.Interpolate('build/%(src::branch)s'))) -factory.addStep(steps.ShellCommand( - descriptionDone="remove tmp-* dirs", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - command=["rm", "-rf", "tmp-*/"], - )) -factory.addStep(steps.ShellCommand( - descriptionDone="install requirements", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - command=["pip", "install", "-r", "requirements.txt"], - )) -factory.addStep(steps.ShellCommand( - descriptionDone="copy settings_local.py", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - command=["cp", expandtilde("~/settings_local.py"), "./ietf/"], - )) -factory.addStep(steps.ShellCommand( - descriptionDone="collect static files", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=False, - flunkOnFailure=False, - command=["ietf/manage.py", "collectstatic", "--noinput", ], - )) -factory.addStep(custom_steps.DjangoTest( - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - want_stderr=True, - command=["ietf/manage.py", "test", "--settings=settings_sqlitetest", "--verbosity=2", ], - )) -# This should come after tests -factory.addStep(steps.ShellCommand( - descriptionDone="mark as passed", - workdir=util.Interpolate('build/%(src::branch)s'), - command=["svn", "--username=buildbot@tools.ietf.org", "--non-interactive", - "propset", "--revprop", "-r", util.Property('got_revision'), "test:unittest", "passed" ], - )) - -c['builders'].append(util.BuilderConfig(name="Test Suite", factory=factory, category="1. trunk", - workernames=["datatracker_lin_py36_1", ])) -c['builders'].append(util.BuilderConfig(name="[branch] Test Suite", factory=factory, category="2. branch", - workernames=["datatracker_lin_py36_2", ])) -c['builders'].append(util.BuilderConfig(name="[personal] Test Suite", factory=factory, category="3. personal", - workernames=["datatracker_lin_py36_3", ])) - - -# -*- section Builder_TestCrawler -*- - -factory = util.BuildFactory() -factory.addStep(steps.SVN( - username='buildbot@tools.ietf.org', - descriptionDone="svn update", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - repourl=util.Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'), - descriptionSuffix=[util.Interpolate('%(src::branch)s %(src::revision)s')], - )) -factory.addStep(steps.RemovePYCs(workdir=util.Interpolate('build/%(src::branch)s'))) -factory.addStep(steps.ShellCommand( - descriptionDone="install requirements", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - command=["pip", "install", "-r", "requirements.txt"], - )) -factory.addStep(steps.ShellCommand( - descriptionDone="copy settings_local.py", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - command=["cp", expandtilde("~/settings_local.py"), "./ietf/"], - )) -factory.addStep(steps.ShellCommand( - descriptionDone="run migrations", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - command=["ietf/manage.py", "migrate"], - )) -factory.addStep(custom_steps.TestCrawlerShellCommand( - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - want_stderr=True, - command=["bin/test-crawl", "--settings=ietf.settings_testcrawl"], - )) -# This should be the last action -factory.addStep(steps.ShellCommand( - descriptionDone="mark as passed", - workdir=util.Interpolate('build/%(src::branch)s'), - command=["svn", "--username=buildbot@tools.ietf.org", "--non-interactive", - "propset", "--revprop", "-r", util.Property('got_revision'), "test:crawler", "passed" ], - )) - -c['builders'].append(util.BuilderConfig(name="Test-Crawler", factory=factory, category="1. trunk", - workernames=["datatracker_lin_py36_6", ])) - - -# -*- section Builder_Verify_Minimum_Libs -*- - -# This build runs pip install --upgrade, to make sure that we install the earliest version of -# all dependencies, in order to get an indication if/when an incompatibility turns up with a new -# version of a dependency. The other test suite builders *don't* use --upgrade, in order to not -# change the external test conditions and produce spurious errors because of version changes in -# dependencies. - -factory = util.BuildFactory() -factory.addStep(steps.ShellCommand( - descriptionDone="remove tweaked requirements", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=False, - flunkOnFailure=False, - command=["rm", "requirements.txt"], - )) -factory.addStep(steps.SVN( - username='buildbot@tools.ietf.org', - descriptionDone="svn update", - workdir=util.Interpolate('build/%(src::branch)s'), - alwaysUseLatest=True, - haltOnFailure=True, - repourl=util.Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'), - descriptionSuffix=[util.Interpolate('%(src::branch)s %(src::revision)s')], - )) -factory.addStep(steps.RemovePYCs(workdir=util.Interpolate('build/%(src::branch)s'))) -factory.addStep(steps.ShellCommand( - descriptionDone="edit requirements", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - command=["sed", "-i", "-e", "s/>=/==/", "requirements.txt"], - )) -factory.addStep(steps.ShellCommand( - descriptionDone="install/upgrade requirements", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - command=["pip", "install", "--upgrade", "-r", "requirements.txt"], - )) -factory.addStep(steps.ShellCommand( - descriptionDone="seting up settings_local.py", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - command=["cp", expandtilde("~/settings_local.py"), "./ietf/"], - )) -factory.addStep(steps.ShellCommand( - descriptionDone="list installed pyton modules", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - command=["pip", "freeze"], - )) -factory.addStep(steps.ShellCommand( - descriptionDone="collect static files", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=False, - flunkOnFailure=False, - command=["ietf/manage.py", "collectstatic", "--noinput", ], - )) -factory.addStep(custom_steps.DjangoTest( - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - command=["ietf/manage.py", "test", "--settings=settings_sqlitetest", "--verbosity=2", ], - )) -c['builders'].append(util.BuilderConfig(name="Verify Minimum Libs", factory=factory, category="1. trunk", - workernames=["datatracker_lin_py36_5", ])) - - -# -*- section Builder_Veryfy_Latest_Libs -*- - -# This build runs pip install --upgrade, to make sure that we install the latest version of all -# dependencies, in order to get an indication if/when an incompatibility turns up with a new -# version of a dependency. The other test suite builders *don't* use --upgrade, in order to not -# change the external test conditions and produce spurious errors because of version changes in -# dependencies. - -factory = util.BuildFactory() -factory.addStep(steps.SVN( - username='buildbot@tools.ietf.org', - descriptionDone="svn update", - workdir=util.Interpolate('build/%(src::branch)s'), - alwaysUseLatest=True, - haltOnFailure=True, - repourl=util.Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'), - descriptionSuffix=[util.Interpolate('%(src::branch)s %(src::revision)s')], - )) -factory.addStep(steps.RemovePYCs(workdir=util.Interpolate('build/%(src::branch)s'))) -factory.addStep(steps.ShellCommand( - descriptionDone="install/upgrade requirements", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - command=["pip", "install", "--upgrade", "-r", "requirements.txt"], - )) -factory.addStep(steps.ShellCommand( - descriptionDone="seting up settings_local.py", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - command=["cp", expandtilde("~/settings_local.py"), "./ietf/"], - )) -factory.addStep(steps.ShellCommand( - descriptionDone="list installed pyton modules", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - command=["pip", "freeze"], - )) -factory.addStep(steps.ShellCommand( - descriptionDone="collect static files", - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=False, - flunkOnFailure=False, - command=["ietf/manage.py", "collectstatic", "--noinput", ], - )) -factory.addStep(custom_steps.DjangoTest( - workdir=util.Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - command=["ietf/manage.py", "test", "--settings=settings_sqlitetest", "--verbosity=2", ], - )) - -c['builders'].append(util.BuilderConfig(name="Verify Latest Libs", factory=factory, category="1. trunk", - workernames=["datatracker_lin_py36_5", ])) - -####### BUILDBOT SERVICES - -# 'services' is a list of BuildbotService items like reporter targets. The -# status of each build will be pushed to these targets. buildbot/reporters/*.py -# has a variety to choose from, like IRC bots. - -c['services'] = [] - -####### PROJECT IDENTITY - -# the 'title' string will appear at the top of this buildbot installation's -# home pages (linked to the 'titleURL'). - -c['title'] = "Buildbot: IETF Datatracker" -c['titleURL'] = "https://datatracker.ietf.org/" - -# the 'buildbotURL' string should point to the location where the buildbot's -# internal web server is visible. This typically uses the port number set in -# the 'www' entry below, but with an externally-visible host name which the -# buildbot cannot figure out without some help. - -c['buildbotURL'] = "http://dunkelfelder.tools.ietf.org:8010/" - -# minimalistic config to activate new web UI -c['www'] = { - 'port': 8010, - 'plugins': { - 'waterfall_view': True, - 'console_view': True, - 'grid_view': True, - }, - 'default_page': 'waterfall_view', - 'debug': True, - 'auth': util.UserPasswordAuth({"ietfdb": "ietfdb"}), -} - - -####### DB URL - -c['db'] = { - # This specifies what database buildbot uses to store its state. - # It's easy to start with sqlite, but it's recommended to switch to a dedicated - # database, such as PostgreSQL or MySQL, for use in production environments. - # http://docs.buildbot.net/current/manual/configuration/global.html#database-specification - 'db_url' : "sqlite:///state.sqlite", -} - -####### MISC. SETTINGS - -c['buildbotNetUsageData'] = 'full' - diff --git a/buildbot/masters/datatracker/master.cfg b/buildbot/masters/datatracker/master.cfg deleted file mode 100644 index 8bb3020e4..000000000 --- a/buildbot/masters/datatracker/master.cfg +++ /dev/null @@ -1,716 +0,0 @@ -# Copyright The IETF Trust 2015-2020, All Rights Reserved -# -*- python; coding: utf-8 -*- - -# This is a buildbot config file for buildbot 0.8.14.p1 (patched to work with -# workers of version 2.7 in addition to 0.8 workers). - - -import re -from buildbot_passwords import * - - -# This is the dictionary that the buildmaster pays attention to. We also use -# a shorter alias to save typing. -c = BuildmasterConfig = {} - -# -*- section BuildSlaves -*- - -# The 'slaves' list defines the set of recognized buildslaves. Each element is -# a BuildSlave object, specifying a unique slave name and password. The same -# slave name and password must be configured on the slave. -from buildbot.buildslave import BuildSlave -c['slaves'] = [ -# - BuildSlave("dunkelfelder_lin_py36_1", dunkelfelder_lin_py36_1_pw), - BuildSlave("dunkelfelder_lin_py36_2", dunkelfelder_lin_py36_2_pw), - BuildSlave("dunkelfelder_lin_py36_3", dunkelfelder_lin_py36_3_pw), - BuildSlave("dunkelfelder_lin_py36_4", dunkelfelder_lin_py36_4_pw), - - BuildSlave("dornfelder_lin_py36_1", dornfelder_lin_py36_1_pw), - BuildSlave("dornfelder_lin_py36_2", dornfelder_lin_py36_2_pw), - BuildSlave("dornfelder_lin_py36_3", dornfelder_lin_py36_3_pw), - BuildSlave("dornfelder_lin_py36_4", dornfelder_lin_py36_4_pw), -] - -# 'protocols' contains information about protocols which master will use for -# communicating with slaves. -# You must define at least 'port' option that slaves could connect to your master -# with this protocol. -# 'port' must match the value configured into the buildslaves (with their -# --master option) -c['protocols'] = {'pb': {'host':'zinfandel.tools.ietf.org', 'port': 9989}} - -####### CHANGESOURCES -# -*- section ChangeSources -*- - -# the 'change_source' setting tells the buildmaster how it should find out -# about source code changes. - -from buildbot.changes.pb import PBChangeSource -# c['change_source'] = [] -# with open("users") as file: -# userinfo = json.read(file) -# for user in userinfo: -# prefix = userinfo[user]["prefix"] -# c.['change_source'].append(PBChangeSource(user=user, passwd="BRiR6XcT7x3$", prefix=prefix)) -c['change_source'] = [ - PBChangeSource(user="ietfdb", passwd=ietfdb_svn_hook_pw), -] - -####### SCHEDULERS -# -*- section Schedulers -*- - -# Configure the Schedulers, which decide how to react to incoming changes. In this -# case, just kick off a 'runtests' build - -from buildbot.schedulers.basic import SingleBranchScheduler, AnyBranchScheduler -from buildbot.schedulers.forcesched import ForceScheduler -from buildbot.schedulers.timed import Nightly -from buildbot.changes import filter -c['schedulers'] = [ - # Branch schedulers - SingleBranchScheduler(name="pyflakes", treeStableTimer=10, builderNames=["Check PyFlakes"], - change_filter=filter.ChangeFilter(branch='trunk')), - SingleBranchScheduler(name="lin_test", treeStableTimer=60*5, builderNames=["Test Suite"], - change_filter=filter.ChangeFilter(branch='trunk')), -# SingleBranchScheduler(name="osx_test", treeStableTimer=60*5, builderNames=["Test Suite (OS X)"], -# change_filter=filter.ChangeFilter(branch='trunk')), - # - AnyBranchScheduler(name="pyflakes_branch", treeStableTimer=10, builderNames=["[branch] Check PyFlakes"], - change_filter=filter.ChangeFilter(branch_re='branch/.*')), -# AnyBranchScheduler(name="lin_test_branch", treeStableTimer=60*5, builderNames=["[branch] Test Suite"], -# change_filter=filter.ChangeFilter(branch_re='branch/.*')), -# AnyBranchScheduler(name="osx_test_branch", treeStableTimer=60*5, builderNames=["[branch] Test Suite (OS X)"], -# change_filter=filter.ChangeFilter(branch_re='branch/.*')), - # - AnyBranchScheduler(name="pyflakes_personal",treeStableTimer=10, builderNames=["[personal] Check PyFlakes"], - change_filter=filter.ChangeFilter(branch_re='personal/.*')), - AnyBranchScheduler(name="lin_test_personal",treeStableTimer=60*5, builderNames=["[personal] Test Suite"], - change_filter=filter.ChangeFilter(branch_re='personal/.*')), - # Periodic Schedulers - Nightly(name="lin_test_old_libs", hour=16, minute=12, branch="trunk", builderNames=["Verify Minimum Libs"],), - Nightly(name="lin_test_libs", hour=16, minute=42, branch="trunk", builderNames=["Verify Latest Libs"],), - Nightly(name="crawler", hour=23, minute=00, branch="trunk", onlyIfChanged=True, builderNames=["Test-Crawler"],), - - # Force schedulers - ForceScheduler(name="force_pyflakes", builderNames=["Check PyFlakes"]), - ForceScheduler(name="force_lin_test", builderNames=["Test Suite"]), -# ForceScheduler(name="force_osx_test", builderNames=["Test Suite (OS X)"]), - ForceScheduler(name="force_test_crawler", builderNames=["Test-Crawler"]), - # - ForceScheduler(name="force_pyflakes_branch", builderNames=["[branch] Check PyFlakes"]), - ForceScheduler(name="force_lin_test_branch", builderNames=["[branch] Test Suite"]), -# ForceScheduler(name="force_osx_test_branch", builderNames=["[branch] Test Suite (OS X)"]), - # - ForceScheduler(name="force_pyflakes_personal", builderNames=["[personal] Check PyFlakes"]), - ForceScheduler(name="force_lin_test_personal", builderNames=["[personal] Test Suite"]), -] - -####### BUILDERS -# -*- section Builders -*- - -# The 'builders' list defines the Builders, which tell Buildbot how to perform a build: -# what steps, and which slaves can execute them. Note that any particular build will -# only take place on one slave. - -from buildbot.process.factory import BuildFactory -from buildbot.steps.source.svn import SVN -from buildbot.steps.shell import ShellCommand, WarningCountingShellCommand -from buildbot.steps.python import PyFlakes -from buildbot.steps.python_twisted import RemovePYCs -from buildbot.steps.slave import SetPropertiesFromEnv -# -from buildbot.process.properties import Property, Interpolate -from buildbot.config import BuilderConfig - -#### Custom subclassed builder - -class TestCrawlerShellCommand(WarningCountingShellCommand): - name = "testcrawl" - haltOnFailure = 1 - flunkOnFailure = 1 - descriptionDone = ["test crawler"] - command=["bin/test-crawl"] - - warningPatterns = { - "exceptions": "^(Traceback| File| |.*Error|.*Exception)", - "failed": " FAIL ", - "warnings": " WARN", - "slow": " SLOW", - "invalid_html": " invalid html:", - } - - logline = "^ *(?P\d+:\d+:\d+) +(?P\d+) +(?P\d+) +(?P\d+) +(?P\d+.\d+)s +(?P.+)" - - def setTestResults(self, **kwargs): - """ - Called by subclasses to set the relevant statistics; this actually - adds to any statistics already present - """ - for kw in kwargs: - value = kwargs[kw] - if value.isdigit(): - # Counter - value = int(value) - value += self.step_status.getStatistic(kw, 0) - elif re.search("^[0-9]+\.[0-9]+$", value): - # Runtime - value = float(value) - value += self.step_status.getStatistic(kw, 0) - else: - # This is a percentage, and we can't add them - pass - self.step_status.setStatistic(kw, value) - - def createSummary(self, log): - """ - Match log lines against warningPattern. - - Warnings are collected into another log for this step, and the - build-wide 'warnings-count' is updated.""" - - warnings = {} - wregex = {} - - regex_class = re.compile("").__class__ - - if not isinstance(self.logline, regex_class): - self.logline = re.compile(self.logline) - - for key in self.warningPatterns: - warnings[key] = [] - pattern = self.warningPatterns[key] - if not isinstance(pattern, regex_class): - wregex[key] = re.compile(pattern) - else: - wregex[key] = pattern - - # Count matches to the various warning patterns - for line in log.getText().split("\n"): - for key in wregex: - match = re.search(wregex[key], line) - if match: - warnings[key].append(line) - if re.search(self.logline, line): - last_line = line - - # If there were any warnings, make the log if lines with warnings - # available - for key in warnings: - if len(warnings[key]) > 0: - self.addCompleteLog("%s (%d)" % (key, len(warnings[key])), - "\n".join(warnings[key]) + "\n") - self.step_status.setStatistic(key, len(warnings[key])) - self.setProperty(key, len(warnings[key]), "TestCrawlerShellCommand") - - match = re.search(self.logline, last_line) - for key in ['elapsed', 'pages']: - info = match.group(key) - self.step_status.setStatistic(key, info) - self.setProperty(key, info, "TestCrawlerShellCommand") - - def describe(self, done=False): - description = WarningCountingShellCommand.describe(self, done) - if done: - description = description[:] # make a private copy - for name in ["time", "elapsed", "pages", "failed", "warnings", "slow", "invalid_html", ]: - if name in self.step_status.statistics: - value = self.step_status.getStatistic(name) - displayName = name.replace('_', ' ') - # special case. Mph. - if type(value) is float: # this is run-time - description.append('%s: %.2fs' % (displayName, value)) - elif type(value) is int: - description.append('%s: %d' % (displayName, value)) - else: - description.append('%s: %s' % (displayName, value)) - return description - - -class UnitTest(WarningCountingShellCommand): - - name = "test" - warnOnFailure = 1 - description = ["testing"] - descriptionDone = ["test"] - command = ["python", "-m", "unittest", "discover"] - - regexPatterns = { - "tests": "Ran (\d+) tests in [0-9.]+s", - "time": "Ran \d+ tests in ([0-9.]+)s", - "skipped": "(?:OK|FAILED).*skipped=(\d+)", - "failed": "FAILED.*failures=(\d+)", - "errors": "FAILED.*errors=(\d+)", - "template_coverage":" +Template coverage: +([0-9.]+%)", - "url_coverage": " +Url coverage: +([0-9.]+%)", - "code_coverage": " +Code coverage: +([0-9.]+%)", - } - - def setTestResults(self, **kwargs): - """ - Called by subclasses to set the relevant statistics; this actually - adds to any statistics already present - """ - for kw in kwargs: - value = kwargs[kw] - if value.isdigit(): - # Counter - value = int(value) - value += self.step_status.getStatistic(kw, 0) - elif re.search("^[0-9]+\.[0-9]+$", value): - # Runtime - value = float(value) - value += self.step_status.getStatistic(kw, 0) - else: - # This is a percentage, and we can't add them - pass - self.step_status.setStatistic(kw, value) - - def createSummary(self, log): - info = {} - for line in log.getText().split("\n"): - for key in self.regexPatterns: - regex = self.regexPatterns[key] - match = re.search(regex, line) - if match: - info[key] = match.group(1) - self.setTestResults(**info) - - def describe(self, done=False): - description = WarningCountingShellCommand.describe(self, done) - if done: - description = description[:] # make a private copy - self.step_status.statistics["passed"] = ( - self.step_status.getStatistic("tests",0) - - self.step_status.getStatistic("skipped",0) - - self.step_status.getStatistic("failed",0) - - self.step_status.getStatistic("errors",0)) - for name in ["time", "tests", "passed", "skipped", "failed", "errors", "template_coverage", "url_coverage", "code_coverage", ]: - if name in self.step_status.statistics: - value = self.step_status.getStatistic(name) - displayName = name.replace('_', ' ') - # special case. Mph. - if displayName == 'template coverage': - displayName = 'templ. coverage' - if type(value) is float: # this is run-time - description.append('%s: %.2fs' % (displayName, value)) - elif type(value) is int: - description.append('%s: %d' % (displayName, value)) - else: - description.append('%s: %s' % (displayName, value)) - return description - - -## Set up builders - -c['builders'] = [] - -# -*- section Builder_Run_pyflakes -*- - -factory = BuildFactory() -factory.addStep(SetPropertiesFromEnv(variables=['HOME',])) -factory.addStep(SVN( - username='buildbot@tools.ietf.org', - descriptionDone="svn update", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - repourl=Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'), - descriptionSuffix=[Interpolate('%(src::branch)s %(src::revision)s')], - )) -factory.addStep(ShellCommand( - descriptionDone="install requirements", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["pip", "install", "-r", "requirements.txt"], - )) -factory.addStep(ShellCommand( - descriptionDone="seting up settings_local.py", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["cp", Interpolate("%(prop:HOME)s/settings_local.py"), "./ietf/"], - )) -factory.addStep(PyFlakes( - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["ietf/manage.py", "pyflakes", "--verbosity=0"], - )) -# This should be the last action -factory.addStep(ShellCommand( - descriptionDone="mark as passed", - workdir=Interpolate('build/%(src::branch)s'), - flunkOnFailure=False, - usePTY=False, - command=["svn", "--username=buildbot@tools.ietf.org", "--non-interactive", - "propset", "--revprop", "-r", Property('got_revision'), "test:pyflakes", "passed" ], - )) - -c['builders'].append(BuilderConfig(name="Check PyFlakes", factory=factory, category="1. trunk", - slavenames=["dunkelfelder_lin_py36_1", "dornfelder_lin_py36_1", ])) -c['builders'].append(BuilderConfig(name="[branch] Check PyFlakes", factory=factory, category="2. branch", - slavenames=["dunkelfelder_lin_py36_2", "dornfelder_lin_py36_2", ])) -c['builders'].append(BuilderConfig(name="[personal] Check PyFlakes", factory=factory, category="3. personal", - slavenames=["dunkelfelder_lin_py36_2",])) - -# -*- section Builder_TestSuite -*- - -factory = BuildFactory() -factory.addStep(SetPropertiesFromEnv(variables=['HOME',])) -factory.addStep(SVN( - username='buildbot@tools.ietf.org', - descriptionDone="svn update", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - repourl=Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'), - descriptionSuffix=[Interpolate('%(src::branch)s %(src::revision)s')], - )) -factory.addStep(RemovePYCs(workdir=Interpolate('build/%(src::branch)s'), usePTY=False)) -factory.addStep(ShellCommand( - descriptionDone="remove tmp-* dirs", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["rm", "-rf", "tmp-*/"], - )) -factory.addStep(ShellCommand( - descriptionDone="install requirements", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["pip", "install", "-r", "requirements.txt"], - )) -factory.addStep(ShellCommand( - descriptionDone="copy settings_local.py", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["cp", Interpolate("%(prop:HOME)s/settings_local.py"), "./ietf/"], - )) -factory.addStep(ShellCommand( - descriptionDone="collect static files", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=False, - flunkOnFailure=False, - usePTY=False, - command=["ietf/manage.py", "collectstatic", "--noinput", ], - )) -factory.addStep(UnitTest( - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["ietf/manage.py", "test", "--settings=settings_sqlitetest", "--verbosity=2", ], - )) -# This should come after tests -factory.addStep(ShellCommand( - descriptionDone="mark as passed", - workdir=Interpolate('build/%(src::branch)s'), - flunkOnFailure=False, - usePTY=False, - command=["svn", "--username=buildbot@tools.ietf.org", "--non-interactive", - "propset", "--revprop", "-r", Property('got_revision'), "test:unittest", "passed" ], - )) - -c['builders'].append(BuilderConfig(name="Test Suite", factory=factory, category="1. trunk", - slavenames=["dunkelfelder_lin_py36_1", "dornfelder_lin_py36_1", ])) -c['builders'].append(BuilderConfig(name="[branch] Test Suite", factory=factory, category="2. branch", - slavenames=["dunkelfelder_lin_py36_2", "dornfelder_lin_py36_2", ])) -c['builders'].append(BuilderConfig(name="[personal] Test Suite", factory=factory, category="3. personal", - slavenames=["dunkelfelder_lin_py36_2", "dornfelder_lin_py36_2", ])) - - -# -*- section Builder_TestCrawler -*- - -factory = BuildFactory() -factory.addStep(SetPropertiesFromEnv(variables=['HOME',])) -factory.addStep(ShellCommand( - descriptionDone="update database", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - timeout=3600, # 1 hour - command=["docker/updatedb", "-q"], - )) -factory.addStep(SVN( - username='buildbot@tools.ietf.org', - descriptionDone="svn update", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - repourl=Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'), - descriptionSuffix=[Interpolate('%(src::branch)s %(src::revision)s')], - )) -factory.addStep(RemovePYCs(workdir=Interpolate('build/%(src::branch)s'), usePTY=False)) -factory.addStep(ShellCommand( - descriptionDone="install requirements", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["pip", "install", "-r", "requirements.txt"], - )) -factory.addStep(ShellCommand( - descriptionDone="copy settings_local.py", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["cp", Interpolate("%(prop:HOME)s/settings_local.py"), "./ietf/"], - )) -factory.addStep(ShellCommand( - descriptionDone="run migrations", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["ietf/manage.py", "migrate"], - )) -# This will not only do a prelimnary sanity check, but also patch libs as needed: -factory.addStep(ShellCommand( - descriptionDone="run django checks", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["ietf/manage.py", "check"], - )) -factory.addStep(TestCrawlerShellCommand( - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["bin/test-crawl", "--settings=ietf.settings_testcrawl"], - )) -# This should be the last action -factory.addStep(ShellCommand( - descriptionDone="mark as passed", - workdir=Interpolate('build/%(src::branch)s'), - flunkOnFailure=False, - usePTY=False, - command=["svn", "--username=buildbot@tools.ietf.org", "--non-interactive", - "propset", "--revprop", "-r", Property('got_revision'), "test:crawler", "passed" ], - )) - -c['builders'].append(BuilderConfig(name="Test-Crawler", factory=factory, category="1. trunk", - slavenames=["dunkelfelder_lin_py36_4", ])) - - -# -*- section Builder_Verify_Old_Libs -*- - -# This build runs pip install --upgrade, to make sure that we install the earliest version of -# all dependencies, in order to get an indication if/when an incompatibility turns up with a new -# version of a dependency. The other test suite builders *don't* use --upgrade, in order to not -# change the external test conditions and produce spurious errors because of version changes in -# dependencies. - -factory = BuildFactory() -factory.addStep(SetPropertiesFromEnv(variables=['HOME',])) -factory.addStep(ShellCommand( - descriptionDone="remove tweaked requirements", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=False, - flunkOnFailure=False, - usePTY=False, - command=["rm", "requirements.txt"], - )) -factory.addStep(SVN( - username='buildbot@tools.ietf.org', - descriptionDone="svn update", - workdir=Interpolate('build/%(src::branch)s'), - alwaysUseLatest=True, - haltOnFailure=True, - usePTY=False, - repourl=Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'), - descriptionSuffix=[Interpolate('%(src::branch)s %(src::revision)s')], - )) -factory.addStep(RemovePYCs(workdir=Interpolate('build/%(src::branch)s'), usePTY=False)) -factory.addStep(ShellCommand( - descriptionDone="edit requirements", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["sed", "-i", "-e", "s/>=/==/", "requirements.txt"], - )) -factory.addStep(ShellCommand( - descriptionDone="install/upgrade requirements", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["pip", "install", "--upgrade", "-r", "requirements.txt"], - )) -factory.addStep(ShellCommand( - descriptionDone="seting up settings_local.py", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["cp", Interpolate("%(prop:HOME)s/settings_local.py"), "./ietf/"], - )) -factory.addStep(ShellCommand( - descriptionDone="list installed pyton modules", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["pip", "freeze"], - )) -factory.addStep(ShellCommand( - descriptionDone="collect static files", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=False, - flunkOnFailure=False, - usePTY=False, - command=["ietf/manage.py", "collectstatic", "--noinput", ], - )) -factory.addStep(UnitTest( - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["ietf/manage.py", "test", "--settings=settings_sqlitetest", "--verbosity=2", ], - )) -c['builders'].append(BuilderConfig(name="Verify Minimum Libs", factory=factory, category="1. trunk", - slavenames=["dornfelder_lin_py36_3", ])) - - -# -*- section Verify_Latest_Libs -*- - -# This build runs pip install --upgrade, to make sure that we install the latest version of all -# dependencies, in order to get an indication if/when an incompatibility turns up with a new -# version of a dependency. The other test suite builders *don't* use --upgrade, in order to not -# change the external test conditions and produce spurious errors because of version changes in -# dependencies. - -factory = BuildFactory() -factory.addStep(SetPropertiesFromEnv(variables=['HOME',])) -factory.addStep(SVN( - username='buildbot@tools.ietf.org', - descriptionDone="svn update", - workdir=Interpolate('build/%(src::branch)s'), - alwaysUseLatest=True, - haltOnFailure=True, - usePTY=False, - repourl=Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'), - descriptionSuffix=[Interpolate('%(src::branch)s %(src::revision)s')], - )) -factory.addStep(RemovePYCs(workdir=Interpolate('build/%(src::branch)s'), usePTY=False)) -factory.addStep(ShellCommand( - descriptionDone="install/upgrade requirements", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["pip", "install", "--upgrade", "-r", "requirements.txt"], - )) -factory.addStep(ShellCommand( - descriptionDone="seting up settings_local.py", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["cp", Interpolate("%(prop:HOME)s/settings_local.py"), "./ietf/"], - )) -factory.addStep(ShellCommand( - descriptionDone="list installed pyton modules", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["pip", "freeze"], - )) -factory.addStep(ShellCommand( - descriptionDone="collect static files", - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=False, - flunkOnFailure=False, - usePTY=False, - command=["ietf/manage.py", "collectstatic", "--noinput", ], - )) -factory.addStep(UnitTest( - workdir=Interpolate('build/%(src::branch)s'), - haltOnFailure=True, - usePTY=False, - command=["ietf/manage.py", "test", "--settings=settings_sqlitetest", "--verbosity=2", ], - )) - -c['builders'].append(BuilderConfig(name="Verify Latest Libs", factory=factory, category="1. trunk", - slavenames=["dornfelder_lin_py36_3", ])) - - -####### STATUS TARGETS -# -*- section StatusTargets -*- - -# 'status' is a list of Status Targets. The results of each build will be -# pushed to these targets. buildbot/status/*.py has a variety to choose from, -# including web pages, email senders, and IRC bots. - -c['status'] = [] - -from buildbot.status import html, mail -from buildbot.status.web import authz, auth - -authz_cfg=authz.Authz( - # change any of these to True to enable; see the manual for more - # options - auth=auth.BasicAuth([("ietfdb","ietfdb")]), - gracefulShutdown = False, - forceBuild = 'auth', # use this to test your slave once it is set up - forceAllBuilds = False, - pingBuilder = False, - stopBuild = 'auth', - stopAllBuilds = False, - cancelPendingBuild = 'auth', -) -c['status'].append(html.WebStatus(http_port=8010, authz=authz_cfg)) - -# A second web status with slightly different rendering -from twisted.python import log -def changelinkfilter(html, project): - log.msg(" * changelinkfilter(html='%s', project='%s')" % (html, project)) - return html - -import jinja2, os -trac_template_loaders = [jinja2.FileSystemLoader(os.path.join(os.getcwd(), 'trac_view'))] -c['status'].append(html.WebStatus(http_port=8011, jinja_loaders=trac_template_loaders, - authz=authz_cfg)) - -# Email notifications -from zope.interface import implements -from buildbot import interfaces - -class UsernameIsEmailAddress(): - "This IEmailLookup provider assumes that the svn username is a valid email address." - implements(interfaces.IEmailLookup) - def getAddress(self, name): - return name - -c['status'].append(mail.MailNotifier( - fromaddr='buildbot@tools.ietf.org', - sendToInterestedUsers=False, - extraRecipients=['rjsparks@nostrum.com',], - mode="problem", - )) - -c['status'].append(mail.MailNotifier( - fromaddr='buildbot@tools.ietf.org', - lookup=UsernameIsEmailAddress(), - mode="problem", - )) - -####### PROJECT IDENTITY -# -*- section Project -*- - -# the 'title' string will appear at the top of this buildbot -# installation's html.WebStatus home page (linked to the -# 'titleURL') and is embedded in the title of the waterfall HTML page. - -c['title'] = "Buildbot: IETF Datatracker" -c['titleURL'] = "https://datatracker.ietf.org/" - -# the 'buildbotURL' string should point to the location where the buildbot's -# internal web server (usually the html.WebStatus page) is visible. This -# typically uses the port number set in the Waterfall 'status' entry, but -# with an externally-visible host name which the buildbot cannot figure out -# without some help. - -c['buildbotURL'] = "http://zinfandel.tools.ietf.org:8010/" - -####### DB URL - -c['db'] = { - # This specifies what database buildbot uses to store its state. You can leave - # this at its default for all but the largest installations. - 'db_url' : "sqlite:///state.sqlite", -} diff --git a/buildbot/restart-masters b/buildbot/restart-masters deleted file mode 100755 index be827cf40..000000000 --- a/buildbot/restart-masters +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -for d in masters/*; do ( cd $d; . env/bin/activate; buildbot restart; ); done diff --git a/buildbot/restart-workers b/buildbot/restart-workers deleted file mode 100755 index 7e62828ce..000000000 --- a/buildbot/restart-workers +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -for d in ${1:-workers/*}; do ( cd $d; . env/bin/activate; buildbot-worker restart; ); done diff --git a/buildbot/setup-buildbot-workers b/buildbot/setup-buildbot-workers deleted file mode 100755 index 026ce5e25..000000000 --- a/buildbot/setup-buildbot-workers +++ /dev/null @@ -1,180 +0,0 @@ -#!/bin/bash -# -*- indent-with-tabs: 0 -*- - -version=0.10 -program=${0##*/} -progdir=${0%/*} -if [ "$progdir" = "$program" ]; then progdir="."; fi - -# ---------------------------------------------------------------------- -function usage() { -# Possible sections: -# NAME -# SYNOPSIS -# CONFIGURATION [Normally only in Section 4] -# DESCRIPTION -# OPTIONS [Normally only in Sections 1, 8] -# EXIT STATUS [Normally only in Sections 1, 8] -# RETURN VALUE [Normally only in Sections 2, 3] -# ERRORS [Typically only in Sections 2, 3] -# ENVIRONMENT -# FILES -# VERSIONS [Normally only in Sections 2, 3] -# CONFORMING TO -# NOTES -# BUGS -# EXAMPLE -# SEE ALSO - - cat <. Repository: - https://svn.tools.ietf.org/svn/tools/ietfdb/trunk/buildbot - -COPYRIGHT - Copyright 2020 the IETF Trust. All rights reserved. - - Redistribution and use in source and binary forms, with or - without modification, are permitted provided that the conditions - laid out in the 3-clause BSD license is followed. - - License text: https://opensource.org/licenses/BSD-3-Clause - -EOF - -} - -# ---------------------------------------------------------------------- - -function die() { - echo -e "\n$program: error: $*" >&2 - exit 1 -} - -function err() { - echo -e "${red}$*${reset}" >&2 -} - -function note() { - if [ -n "$VERBOSE" ]; then echo -e "\n$*"; fi -} - -# ---------------------------------------------------------------------- -function version() { - echo -e "$program $version" -} - -# ---------------------------------------------------------------------- -trap 'echo "$program($LINENO): Command failed with error code $? ([$$] $0 $*)"; exit 1' ERR - - -# ---------------------------------------------------------------------- -# Option parsing - -# Options -shortopts=a:hp:s:vV -longopts=admin:,help,server:,passwd:,python:,verbose,version - -# Default values -read -d ' ' <<< $(who -m) -user=$REPLY -name=$(getent passwd $user | cut -d ':' -f 5 | cut -d ',' -f 1) -server='zinfandel.tools.ietf.org' -pass='' # must be set on the command line -python=python3.6 - -if [ "$(uname)" = "Linux" ]; then - args=$(getopt -o "$shortopts" --long "$longopts" -n '$program' -- $SV "$@") - if [ $? != 0 ] ; then die "Terminating..." >&2 ; exit 1 ; fi - files="$@" - eval set -- "$args" - sed="sed -r" -else - # Darwin, BSDs - args=$(getopt -o$shortopts $SV $*) - if [ $? != 0 ] ; then die "Terminating..." >&2 ; exit 1 ; fi - files="$@" - set -- $args - sed="sed -E" -fi - -while true ; do - case "$1" in - -a| --admin) admin="$2"; shift;; # "Name " of buildbot admin - -h| --help) usage; exit;; # Show this help, then exit - -p| --passwd) pass=$2; shift;; # Worker password - --python) python=$2; shift;; # Python version to use (e.g., 'python3.6') - -s| --server) server=$2; shift;; # Set the server fqdn - -v| --verbose) VERBOSE=1;; # Be more talkative - -V| --version) version; exit;; # Show program version, then exit - --) shift; break;; - *) die "Internal error, inconsistent option specification: '$1'";; - esac - shift -done - -# ---------------------------------------------------------------------- -# The program itself - -dir=$(dirname $(realpath $0)) -if [ -d "$dir/slaves" ]; then - path="$dir/slaves" -else - path="$dir/workers" -fi - -for worker in $path/*; do - ( - cd $worker; - pwd - if [ ! -d ./env ]; then - echo "Setting up virtual environment" - # Change python version to match deployment version - python3.6 -m venv env - fi - . env/bin/activate - pip install buildbot-worker - if [ ! -f ./buildbot.tac ]; then - pwfile=$dir/${worker##*/}_pw - echo "Looking for pwfile: $pwfile" - [ -f "$pwfile" ] && pass=$(< $pwfile) - [ -z "$pass" ] && die "Missing parameter: worker password" - buildbot-worker create-worker $PWD $server ${PWD##*/} $pass - fi - if ! grep -q "$name" ./info/admin; then - read -p "Expected $name in $PWD/info/admin, but found $(<./info/admin) -- change it [Y/n]?" - if [ "$REPLY" = "Y" -o "$REPLY" = "y" ]; then - if [ -z "$admin" ]; then - read -p "Admin (Your Name ): " - admin=$REPLY - fi - echo "Setting up ./info/admin" - echo "$admin" > ./info/admin - echo "Setting up ./info/host" - echo "$(uname -s -n -r) $(python --version)" > ./info/host - fi - fi - buildbot-worker stop - buildbot-worker start - ) -done \ No newline at end of file diff --git a/buildbot/workers/datatracker_lin_py36_1/info/admin b/buildbot/workers/datatracker_lin_py36_1/info/admin deleted file mode 100644 index 064f019af..000000000 --- a/buildbot/workers/datatracker_lin_py36_1/info/admin +++ /dev/null @@ -1 +0,0 @@ -Henrik Levkowetz diff --git a/buildbot/workers/datatracker_lin_py36_1/info/host b/buildbot/workers/datatracker_lin_py36_1/info/host deleted file mode 100644 index 0d0764050..000000000 --- a/buildbot/workers/datatracker_lin_py36_1/info/host +++ /dev/null @@ -1 +0,0 @@ -Linux dunkelfelder 3.16.0-4-amd64 Python 3.6.9 diff --git a/buildbot/workers/datatracker_lin_py36_2/info/admin b/buildbot/workers/datatracker_lin_py36_2/info/admin deleted file mode 100644 index 064f019af..000000000 --- a/buildbot/workers/datatracker_lin_py36_2/info/admin +++ /dev/null @@ -1 +0,0 @@ -Henrik Levkowetz diff --git a/buildbot/workers/datatracker_lin_py36_2/info/host b/buildbot/workers/datatracker_lin_py36_2/info/host deleted file mode 100644 index 0d0764050..000000000 --- a/buildbot/workers/datatracker_lin_py36_2/info/host +++ /dev/null @@ -1 +0,0 @@ -Linux dunkelfelder 3.16.0-4-amd64 Python 3.6.9 diff --git a/buildbot/workers/datatracker_lin_py36_3/info/admin b/buildbot/workers/datatracker_lin_py36_3/info/admin deleted file mode 100644 index 064f019af..000000000 --- a/buildbot/workers/datatracker_lin_py36_3/info/admin +++ /dev/null @@ -1 +0,0 @@ -Henrik Levkowetz diff --git a/buildbot/workers/datatracker_lin_py36_3/info/host b/buildbot/workers/datatracker_lin_py36_3/info/host deleted file mode 100644 index 0d0764050..000000000 --- a/buildbot/workers/datatracker_lin_py36_3/info/host +++ /dev/null @@ -1 +0,0 @@ -Linux dunkelfelder 3.16.0-4-amd64 Python 3.6.9 diff --git a/buildbot/workers/datatracker_lin_py36_4/info/admin b/buildbot/workers/datatracker_lin_py36_4/info/admin deleted file mode 100644 index 064f019af..000000000 --- a/buildbot/workers/datatracker_lin_py36_4/info/admin +++ /dev/null @@ -1 +0,0 @@ -Henrik Levkowetz diff --git a/buildbot/workers/datatracker_lin_py36_4/info/host b/buildbot/workers/datatracker_lin_py36_4/info/host deleted file mode 100644 index 0d0764050..000000000 --- a/buildbot/workers/datatracker_lin_py36_4/info/host +++ /dev/null @@ -1 +0,0 @@ -Linux dunkelfelder 3.16.0-4-amd64 Python 3.6.9 diff --git a/docker/configs/settings_local.py b/docker/configs/settings_local.py index 6c4339581..7224f3329 100644 --- a/docker/configs/settings_local.py +++ b/docker/configs/settings_local.py @@ -38,9 +38,6 @@ USING_DEBUG_EMAIL_SERVER=True EMAIL_HOST='localhost' EMAIL_PORT=2025 -TRAC_WIKI_DIR_PATTERN = "test/wiki/%s" -TRAC_SVN_DIR_PATTERN = "test/svn/%s" - MEDIA_BASE_DIR = 'test' MEDIA_ROOT = MEDIA_BASE_DIR + '/media/' MEDIA_URL = '/media/' diff --git a/docker/configs/settings_local_sqlitetest.py b/docker/configs/settings_local_sqlitetest.py index 2d9277784..b852ba4bd 100644 --- a/docker/configs/settings_local_sqlitetest.py +++ b/docker/configs/settings_local_sqlitetest.py @@ -63,9 +63,6 @@ USING_DEBUG_EMAIL_SERVER=True EMAIL_HOST='localhost' EMAIL_PORT=2025 -TRAC_WIKI_DIR_PATTERN = "test/wiki/%s" -TRAC_SVN_DIR_PATTERN = "test/svn/%s" - MEDIA_BASE_DIR = 'test' MEDIA_ROOT = MEDIA_BASE_DIR + '/media/' MEDIA_URL = '/media/' diff --git a/ietf/settings.py b/ietf/settings.py index db6f0eb28..981fa73a0 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -1107,61 +1107,6 @@ USER_PREFERENCE_DEFAULTS = { "left_menu" : "off", } -TRAC_MASTER_DIR = "/a/www/trac-setup/" -TRAC_WIKI_DIR_PATTERN = "/a/www/www6s/trac/%s" -TRAC_WIKI_URL_PATTERN = "https://trac.ietf.org/trac/%s/wiki" -TRAC_ISSUE_URL_PATTERN = "https://trac.ietf.org/trac/%s/report/1" -TRAC_SVN_DIR_PATTERN = "/a/svn/group/%s" -#TRAC_SVN_URL_PATTERN = "https://svn.ietf.org/svn/group/%s/" - -# The group types setting was replaced by a group feature entry 10 Jan 2019 -#TRAC_CREATE_GROUP_TYPES = ['wg', 'rg', 'area', 'team', 'dir', 'review', 'ag', 'nomcom', ] -TRAC_CREATE_GROUP_STATES = ['bof', 'active', ] -TRAC_CREATE_GROUP_ACRONYMS = ['iesg', 'iaoc', 'ietf', ] - -# This is overridden in production's settings-local. Make sure to update it. -TRAC_CREATE_ADHOC_WIKIS = [ - # admin group acronym, name, sub-path - # A trailing fileglob wildcard is supported on group acronyms - ('iesg', 'Meeting', "ietf/meeting"), - ('nomcom*', 'NomCom', 'nomcom'), -] - -SVN_PACKAGES = [ - "/usr/lib/python/dist-packages/svn", - "/usr/lib/python3.6/dist-packages/libsvn", -] - -TRAC_ENV_OPTIONS = [ - ('project', 'name', "{name} Wiki"), - ('trac', 'database', 'sqlite:db/trac.db' ), - ('trac', 'repository_type', 'svn'), - ('trac', 'repository_dir', "{svn_dir}"), - ('inherit', 'file', "/a/www/trac-setup/conf/trac.ini"), - ('components', 'tracopt.versioncontrol.svn.*', 'enabled'), -] - -TRAC_WIKI_PAGES_TEMPLATES = [ - "utils/wiki/IetfSpecificFeatures", - "utils/wiki/InterMapTxt", - "utils/wiki/SvnTracHooks", - "utils/wiki/ThisTracInstallation", - "utils/wiki/TrainingMaterials", - "utils/wiki/WikiStart", -] - -TRAC_ISSUE_SEVERITY_ADD = [ - "-", - "Candidate WG Document", - "Active WG Document", - "Waiting for Expert Review", - "In WG Last Call", - "Waiting for Shepherd Writeup", - "Submitted WG Document", - "Dead WG Document", -] - -SVN_ADMIN_COMMAND = "/usr/bin/svnadmin" # Email addresses people attempt to set for their account will be checked # against the following list of regex expressions with re.search(pat, addr): diff --git a/ietf/templates/utils/wiki/IetfSpecificFeatures b/ietf/templates/utils/wiki/IetfSpecificFeatures deleted file mode 100644 index b9cc8c397..000000000 --- a/ietf/templates/utils/wiki/IetfSpecificFeatures +++ /dev/null @@ -1,38 +0,0 @@ -= IETF-Specific Information = - -== Editing the Wiki and Issues == - -In order to create and edit wiki pages and issues, you need to log in. Click on the -small 'Login' link above the main horizontal menubar. You log in with the same -username (your email address) and password as the datatracker. If you don't have a -login/passwd or need to reset your passwd, go to https://datatracker.ietf.org/accounts/create/. - -The login and password is also used for commits to the SVN repository. See more about -the repository further down. - -== IETF-Specific Features == - -This Trac installation has a few IETF-specific features which are not generally found -in Trac: - - * Occurences of RFC numbers or draft names in Wiki text will generate links to the - RFC or draft in question. Unless you want to point to an RFC or draft in a - specific location which is different from the automatically generated link, you - don't need to explicitly add links for RFCs and drafts. Examples: RFC 2026, - draft-ietf-poised95-std-proc-3 - - * Each issue in the issue tracker can be indicated to concern a 'component'. This is - a standard Trac feature; however, the list of available components is automatically - updated to include all the active working group drafts. This makes it easier to - associate issues with drafts for the WG participants, without the Chairs needing to - go in as admin users and add a new component each time there's a new WG draft. - - * Everywhere you can use wiki markup (on the wiki pages, roadmap descriptions, - etc.) you may embed a macro which shows a ticket statistics graph. Full - information about the macro is available at [http://trac-hacks.org/wiki/TicketStatsMacro]. - Briefly, the macro syntax is: - {{{ - [[TicketStats( height=250,daterange=12m,res_days=30)]] - }}} - which gives this result: [[TicketStats( height=250,daterange=12m,res_days=30)]] - diff --git a/ietf/templates/utils/wiki/InterMapTxt b/ietf/templates/utils/wiki/InterMapTxt deleted file mode 100644 index 442741a87..000000000 --- a/ietf/templates/utils/wiki/InterMapTxt +++ /dev/null @@ -1,72 +0,0 @@ -= InterMapTxt = - -== This is the place for defining InterWiki prefixes == - -This page was modelled after the MeatBall:InterMapTxt page. -In addition, an optional comment is allowed after the mapping. - - -This page is interpreted in a special way by Trac, in order to support -!InterWiki links in a flexible and dynamic way. - -The code block after the first line separator in this page -will be interpreted as a list of !InterWiki specifications: -{{{ -prefix URL [ # comment] -}}} - -By using `$1`, `$2`, etc. within the URL, it is possible to create -InterWiki links which support multiple arguments, e.g. Trac:ticket:40. -The URL itself can be optionally followed by a comment, -which will subsequently be used for decorating the links -using that prefix. - -New !InterWiki links can be created by adding to that list, in real time. -Note however that ''deletions'' are also taken into account immediately, -so it may be better to use comments for disabling prefixes. - -Also note that !InterWiki prefixes are case insensitive. - - -== List of Active Prefixes == - -[[InterWiki]] - - ----- - -== Prefix Definitions == - -{{{ -PEP http://www.python.org/peps/pep-$1.html # Python Enhancement Proposal -Trac-ML http://thread.gmane.org/gmane.comp.version-control.subversion.trac.general/ # Message $1 in Trac Mailing List -trac-dev http://thread.gmane.org/gmane.comp.version-control.subversion.trac.devel/ # Message $1 in Trac Development Mailing List - -Mercurial http://www.selenic.com/mercurial/wiki/index.cgi/ # the wiki for the Mercurial distributed SCM - -RFC http://datatracker.ietf.org/doc/html/rfc$1.html # IETF's RFC $1 -DataTracker https://datatracker.ietf.org/doc/ -dt https://datatracker.ietf.org/doc/ - -# -# A arbitrary pick of InterWiki prefixes... -# -Acronym http://www.acronymfinder.com/af-query.asp?String=exact&Acronym= -C2find http://c2.com/cgi/wiki?FindPage&value= -Cache http://www.google.com/search?q=cache: -CPAN http://search.cpan.org/perldoc? -DebianBug http://bugs.debian.org/ -DebianPackage http://packages.debian.org/ -Dictionary http://www.dict.org/bin/Dict?Database=*&Form=Dict1&Strategy=*&Query= -Google http://www.google.com/search?q= -GoogleGroups http://groups.google.com/group/$1/msg/$2 # Message $2 in $1 Google Group -JargonFile http://downlode.org/perl/jargon-redirect.cgi?term= -MeatBall http://www.usemod.com/cgi-bin/mb.pl? -MetaWiki http://sunir.org/apps/meta.pl? -MetaWikiPedia http://meta.wikipedia.org/wiki/ -MoinMoin http://moinmoin.wikiwikiweb.de/ -WhoIs http://www.whois.sc/ -Why http://clublet.com/c/c/why? -c2Wiki http://c2.com/cgi/wiki? -WikiPedia http://en.wikipedia.org/wiki/ -}}} diff --git a/ietf/templates/utils/wiki/SvnTracHooks b/ietf/templates/utils/wiki/SvnTracHooks deleted file mode 100644 index 1bdd15ec8..000000000 --- a/ietf/templates/utils/wiki/SvnTracHooks +++ /dev/null @@ -1,73 +0,0 @@ -= SVN Trac Hooks = - -If the Trac Hooks for SVN has been installed for the svn repository -coupled to this Trac instance, the Key Phrases documented below may -be used in SVN commit messages to cause automatic updates and annotations -of Trac issues. - -== The trac-post-commit-hook == - -This script looks at the commit message associated with an SVN commit, -and based on the presence of a number of key phrases will add annotations -to tickets and also possibly change ticket status, for instance closing -it. - -=== Key Phrases === - -The key phrases available are: -{{{ - Fix - Fixes - Fix for - Close - Closes - - Addresses - References - Relates to - Related to - See -}}} - -=== Ticket specification === - -The specification of the ticket to act on may specify one or more -tickets, using any of the following forms: -{{{ - - , {, } - , and -}}} -and variations thereof. - -=== Ticket identification === - -The individual ticket specification -can take any of the following forms: -{{{ - # - ticket - ticket: - issue - issue: - bug - bug: -}}} - -=== Examples === - -{{{ - Clarify header normalization vs matching request headers (see #147) - - Resolve #27: fix definition of idempotent - - Note change for issue 157 (related to #157) - - Define http and https URI schemes: addresses #58, #128, #159 - - Define http and https URI schemes: addresses #58, #128, #159; - fixes #157: removed reference to RFC1900 use of IP addresses in URI. - - Resolve #140: rephrase note so that it becomes clear that the described ... -}}} - diff --git a/ietf/templates/utils/wiki/ThisTracInstallation b/ietf/templates/utils/wiki/ThisTracInstallation deleted file mode 100644 index 454c3f7c8..000000000 --- a/ietf/templates/utils/wiki/ThisTracInstallation +++ /dev/null @@ -1,94 +0,0 @@ -{{{ -#!rst - -Trac Installation on tools.ietf.org -=================================== - -Background ----------- - -The Track installation used on the tools.ietf.org site is different from the -installation examples provided with Trac and on http://trac.edgewall.com. The -reason is mainly that the multi-project examples all assume that Trac -constitutes the whole of the deployed environment, rather than being part of a -greater set. This means that the examples assume that accessing the -individual projects through URLs of the form "/$some_path/trac/$projname" -makes sense, while in our case, we would like the URLs to look like -"/$some_path/$projname/trac". In the multi-project configuration, this would -make Trac always believe that the project name is 'trac' - the last path -component. - -Explored Alternatives ---------------------- - -Make Apache set ``TRAC_ENV`` dynamically -........................................ - -Tell Apache to dynamically set Trac's environment variable ``TRAC_ENV`` to the -particular value for the accessed project: - -``/etc/apache2/sites-available/tools.ietf.org``: - -:: - - ScriptAliasMatch "^/wg/[^/]+/trac(/.*)?" /usr/share/trac/cgi-bin/trac.cgi$1 - - SetEnv TRAC_ENV "/www/tools.ietf.org/tools/trac/wg/$1" - - -This doesn't work because Apache doesn't support $n replacements based on -earlier LocationMatch matches. - -Use .htaccess with default ScriptAlias -...................................... - -Maybe we could use individual .htaccess files in each WG directory to set the -``TRAC_ENV`` variable to the required value? - -``/etc/apache2/sites-available/tools.ietf.org``: - -:: - - ScriptAliasMatch "^/wg/[^/]+/trac(/.*)?" /usr/share/trac/cgi-bin/trac.cgi$1 - - -``/www/tools.ietf.org/wg/examplewg/.htaccess``: - -:: - - SetEnv TRAC_ENV "/www/tools.ietf.org/wg/examplewg/trac" - -This doesn't work because this .htaccess isn't read when ScriptAlias points to -another directory. - - -Use .htaccess with a local CGI script -..................................... - -Suppose we let ScriptAlias point to a script which is placed so that the -.htaccess file actually gets read? - -``/etc/apache2/sites-available/tools.ietf.org``: - -:: - - ScriptAliasMatch "^/wg/([^/]+)/trac(/.*)?" /www/tools.ietf.org/wg/$1/trac/index.cgi$2 - - -``/www/tools.ietf.org/wg/examplewg/.htaccess``: - -:: - - SetEnv TRAC_ENV "/www/tools.ietf.org/wg/examplewg/trac" - -This *does* work, but it is not easily adapted to a Fast-CGI solution. It is -the set-up which is currently in use, but an alternative which will permit -fast-cgi usage would be preferred - the current solution is anything but -snappy... - - - - - - -}}} diff --git a/ietf/templates/utils/wiki/TrainingMaterials b/ietf/templates/utils/wiki/TrainingMaterials deleted file mode 100644 index 5c1622933..000000000 --- a/ietf/templates/utils/wiki/TrainingMaterials +++ /dev/null @@ -1,10 +0,0 @@ -= Training Materials = - -WG Traingin Materials go here. - -If you want to embed video, you can use the ![[Movie(,width=,height=)]] -macro to embed moves from [http://youtube.com/ YouTube]. Suggested width and height parameters: width=640,height=385. - -Example which doesn't point to an actual video: - -[[Movie(http://www.youtube.com/watch?v=g_exampleid,width=640px,height=385px)]] diff --git a/ietf/templates/utils/wiki/WikiStart b/ietf/templates/utils/wiki/WikiStart deleted file mode 100644 index 44bdc9ff4..000000000 --- a/ietf/templates/utils/wiki/WikiStart +++ /dev/null @@ -1,29 +0,0 @@ -= Welcome to this IETF WG Trac installation = - -Trac is a '''minimalistic''' approach to '''web-based''' project management, -suitable for software and documentation projects and similar. Its goal is to -simplify effective tracking and handling of project issues, enhancements and -overall progress. - -As all Wiki pages, this page is editable, this means that you can modify the -contents of this page simply by using your web-browser. Simply click on the -"Edit this page" link at the bottom of the page. WikiFormatting will give you -a detailed description of available Wiki formatting commands. - -There is nothing in this page which isn't also covered in one of the other -wiki pages, so the first adjustment you make of this Trac installation could -be to edit the content of this page, replacing this initial text with content -appropriate to your Working Group. - -There are some aspects of this Trac installation which are specific to the -IETF environment. Those are described in IetfSpecificFeatures. - - -== Starting Points == - - * TracGuide -- Built-in Documentation - * [http://trac.edgewall.org/ The Trac project] -- Trac Open Source Project - * [http://trac.edgewall.org/wiki/TracFaq Trac FAQ] -- Frequently Asked Questions - * TracSupport -- Trac Support - -For a complete list of local wiki pages, see TitleIndex. diff --git a/ietf/utils/management/commands/create_group_wikis.py b/ietf/utils/management/commands/create_group_wikis.py deleted file mode 100644 index bbcfadf78..000000000 --- a/ietf/utils/management/commands/create_group_wikis.py +++ /dev/null @@ -1,413 +0,0 @@ -# Copyright The IETF Trust 2016-2020, All Rights Reserved -# -*- coding: utf-8 -*- - - -import os -import copy -import io -import pkg_resources -import syslog - -from trac.core import TracError -from trac.env import Environment -from trac.perm import PermissionSystem -from trac.ticket.model import Component, Milestone, Severity -from trac.util.text import unicode_unquote -from trac.wiki.model import WikiPage - -from django.conf import settings -from django.core.management.base import BaseCommand, CommandError -from django.db.models import Q -from django.template.loader import render_to_string - -import debug # pyflakes:ignore - -from ietf.group.models import Group, GroupFeatures -from ietf.utils.pipe import pipe - -logtag = __name__.split('.')[-1] -logname = "user.log" -syslog.openlog(str(logtag), syslog.LOG_PID, syslog.LOG_USER) - -class Command(BaseCommand): - help = "Create group wikis for WGs, RGs and Areas which don't have one." - - def add_arguments(self, parser): - parser.add_argument('--wiki-dir-pattern', dest='wiki_dir_pattern', - default=settings.TRAC_WIKI_DIR_PATTERN, - help='A pattern with %s placeholder for group wiki path') - parser.add_argument('--svn-dir-pattern', dest='svn_dir_pattern', - default=settings.TRAC_SVN_DIR_PATTERN, - help='A pattern with %s placeholder for group svn path') - parser.add_argument('--group-list', '-g', dest='group_list', help='Limit processing to groups with the given acronyms (a comma-separated list)') - parser.add_argument('--dummy-run', '-n', default=False, action='store_true', dest='dummy_run', help='Make no changes, just show what would be done') - - secretariat = Group.objects.get(acronym='secretariat') - - def note(self, msg): - if self.verbosity > 1: - self.stdout.write(msg) - - def log(self, msg): - syslog.syslog(msg) - self.stdout.write(msg) - self.stderr.write(msg) - - # --- svn --- - - def do_cmd(self, cmd, *args): - quoted_args = [ '"%s"'%a if ' ' in a else a for a in args ] - if self.dummy_run: - self.note("Would run %s %s ..." % (os.path.basename(cmd), " ".join(quoted_args))) - else: - self.note("Running %s %s ..." % (os.path.basename(cmd), " ".join(quoted_args))) - command = [ cmd, ] + list(args) - command = ' '.join(command).encode('utf-8') - code, out, err = pipe(command) - out = out.decode('utf-8') - err = err.decode('utf-8') - msg = None - if code != 0: - msg = "Error %s: %s when executing '%s'" % (code, err, " ".join(command)) - self.log(msg) - return msg, out - - def svn_admin_cmd(self, *args): - return self.do_cmd(settings.SVN_ADMIN_COMMAND, *args) - - def create_svn(self, svn): - if self.dummy_run: - self.note(" Would create svn repository: %s" % svn) - return "Dummy run, no svn repo created" - else: - self.note(" Creating svn repository: %s" % svn) - if not os.path.exists(os.path.dirname(svn)): - msg = "Intended to create '%s', but parent directory is missing" % svn - self.log(msg) - return msg - err, out= self.svn_admin_cmd("create", svn ) - if err: - self.log(err) - return err - return "" - - # --- trac --- - - def remove_demo_components(self, env): - for component in Component.select(env): - if component.name.startswith('component'): - component.delete() - - def remove_demo_milestones(self, env): - for milestone in Milestone.select(env): - if milestone.name.startswith('milestone'): - milestone.delete() - - def symlink_to_master_assets(self, path, env): - master_dir = settings.TRAC_MASTER_DIR - master_htdocs = os.path.join(master_dir, "htdocs") - group_htdocs = os.path.join(path, "htdocs") - self.note(" Symlinking %s to %s" % (master_htdocs, group_htdocs)) - os.removedirs(group_htdocs) - os.symlink(master_htdocs, group_htdocs) - - def add_wg_draft_states(self, group, env): - for state in settings.TRAC_ISSUE_SEVERITY_ADD: - self.note(" Adding severity %s" % state) - severity = Severity(env) - severity.name = state - severity.insert() - - def add_wiki_page(self, env, name, text): - page = WikiPage(env, name) - if page.time: - self.note(" ** Page %s already exists, not adding it." % name) - return - page.text = text - page.save(author="(System)", comment="Initial page import") - - def add_default_wiki_pages(self, env): - dir = pkg_resources.resource_filename('trac.wiki', 'default-pages') - #WikiAdmin(env).load_pages(dir) - with env.db_transaction: - for name in os.listdir(dir): - filename = os.path.join(dir, name) - name = unicode_unquote(name.encode('utf-8')) - if os.path.isfile(filename): - self.note(" Adding page %s" % name) - with io.open(filename, encoding='utf-8') as file: - text = file.read() - self.add_wiki_page(env, name, text) - - def add_custom_wiki_pages(self, group, env): - for templ in settings.TRAC_WIKI_PAGES_TEMPLATES: - _, name = os.path.split(templ) - text = render_to_string(templ, {"group": group}) - self.note(" Adding page %s" % name) - self.add_wiki_page(env, name, text) - - def sync_default_repository(self, group, env): - repository = env.get_repository('') - if repository: - self.note(" Indexing default repository") - repository.sync() - else: - self.log("Trac environment '%s' does not have any repository" % env) - - def create_adhoc_trac(self, name, path): - if not os.path.exists(os.path.dirname(path)): - msg = "Intended to create '%s', but parent directory is missing" % path - self.log(msg) - return None, msg - options = copy.deepcopy(settings.TRAC_ENV_OPTIONS) - # Interpolate group field names to values in the option settings: - - remove = [] - for i in range(len(options)): - sect, key, val = options[i] - if key in ['repository_type', 'repository_dir', ]: - remove = [i] + remove - else: - val = val.format(name=name) - options[i] = sect, key, val - for i in remove: - del options[i] - - # Try to creat ethe environment, remove unwanted defaults, and add - # custom pages and settings. - if self.dummy_run: - self.note("Would create Trac for '%s' at %s" % (name, path)) - return None, "Dummy run, no trac created" - else: - try: - self.note("Creating Trac for '%s' at %s" % (name, path)) - env = Environment(path, create=True, options=options) - self.remove_demo_components(env) - self.remove_demo_milestones(env) - # Use custom assets (if any) from the master setup - self.symlink_to_master_assets(path, env) - self.add_custom_wiki_pages(name, env) - self.add_default_wiki_pages(env) - # Permissions will be handled during permission update later. - return env, "" - except TracError as e: - msg = "While creating Trac instance for %s: %s" % (name, e) - self.log(msg) - return None, msg - - def create_group_trac(self, group): - if not os.path.exists(os.path.dirname(group.trac_dir)): - msg = "Intended to create '%s', but parent directory is missing" % group.trac_dir - self.log(msg) - return None, msg - options = copy.deepcopy(settings.TRAC_ENV_OPTIONS) - # Interpolate group field names to values in the option settings: - for i in range(len(options)): - sect, key, val = options[i] - val = val.format(**group.__dict__) - options[i] = sect, key, val - # Try to create the environment, remove unwanted defaults, and add - # custom pages and settings. - if self.dummy_run: - self.note("Would create Trac for group '%s' at %s" % (group.acronym, group.trac_dir)) - return None, "Dummy run, no trac created" - else: - try: - self.note("Creating Trac for group '%s' at %s" % (group.acronym, group.trac_dir)) - env = Environment(group.trac_dir, create=True, options=options) - self.remove_demo_components(env) - self.remove_demo_milestones(env) - self.maybe_add_group_url(group, 'wiki', settings.TRAC_WIKI_URL_PATTERN % group.acronym) - self.maybe_add_group_url(group, 'tracker', settings.TRAC_ISSUE_URL_PATTERN % group.acronym) - # Use custom assets (if any) from the master setup - self.symlink_to_master_assets(group.trac_dir, env) - if group.features.acts_like_wg: - self.add_wg_draft_states(group, env) - self.add_custom_wiki_pages(group, env) - self.add_default_wiki_pages(env) - self.sync_default_repository(group, env) - # Components (i.e., drafts) will be handled during components - # update later - # Permissions will be handled during permission update later. - return env, "" - except (TracError, IOError) as e: - msg = "While creating Trac instance for %s: %s" % (group, e) - self.log(msg) - return None, msg - - def update_trac_permissions(self, name, group, env): - if self.dummy_run: - self.note("Would update Trac permissions for '%s' from group %s" % (name, group.acronym)) - else: - self.note("Updating Trac permissions for '%s' from group %s" % (name, group.acronym)) - mgr = PermissionSystem(env) - permission_list = mgr.get_all_permissions() - permission_list = [ (u,a) for (u,a) in permission_list if not u in ['anonymous', 'authenticated']] - permissions = {} - for user, action in permission_list: - if not user in permissions: - permissions[user] = [] - permissions[user].append(action) - roles = ( list( group.role_set.filter(name_id__in=set(['chair', 'secr', 'ad', 'trac-admin', ]+group.features.admin_roles))) - + list(self.secretariat.role_set.filter(name_id__in=['trac-admin', ]) )) - users = [] - for role in roles: - user = role.email.address.lower() - users.append(user) - if not user in permissions: - try: - self.note(" Granting admin permission for %s" % user) - mgr.grant_permission(user, 'TRAC_ADMIN') - if not user in permissions: - permissions[user] = [] - permissions[user].append('TRAC_ADMIN') - except TracError as e: - self.log("While adding admin permission for %s: %s" (user, e)) - for user in permissions: - if not user in users: - if 'TRAC_ADMIN' in permissions[user]: - try: - self.note(" Revoking admin permission for %s" % user) - mgr.revoke_permission(user, 'TRAC_ADMIN') - except TracError as e: - self.log("While revoking admin permission for %s: %s" (user, e)) - - def update_trac_components(self, group, env): - if self.dummy_run: - self.note("Would update Trac components for group '%s'" % group.acronym) - else: - self.note("Updating Trac components for group '%s'" % group.acronym) - components = Component.select(env) - comp_names = [ c.name for c in components ] - group_docs = group.document_set.filter(states__slug='active', type_id='draft').distinct() - group_comp = [] - for doc in group_docs: - if not doc.name.startswith('draft-'): - self.log("While adding components: unexpectd %s group doc name: %s" % (group.acronym, doc.name)) - continue - name = doc.name[len('draft-'):] - if name.startswith('ietf-'): - name = name[len('ietf-'):] - elif name.startswith('irtf-'): - name = name[len('ietf-'):] - if name.startswith(group.acronym+'-'): - name = name[len(group.acronym+'-'):] - group_comp.append(name) - if not name in comp_names and not doc.name in comp_names: - self.note(" Group draft: %s" % doc.name) - self.note(" Adding component %s" % name) - comp = Component(env) - comp.name = name - comp.owner = "%s@ietf.org" % doc.name - comp.insert() - - def maybe_add_group_url(self, group, slug, url): - if not group.groupextresource_set.filter(name__slug=slug).exists(): - self.note(" adding %s %s URL ..." % (group.acronym, slug)) - group.groupextresource_set.create(name_id=slug,value=url) - - def add_custom_pages(self, group, env): - for template_name in settings.TRAC_WIKI_PAGES_TEMPLATES: - pass - - def add_custom_group_states(self, group, env): - for state_name in settings.TRAC_ISSUE_SEVERITY_ADD: - pass - - # -------------------------------------------------------------------- - - def handle(self, *filenames, **options): - self.verbosity = options['verbosity'] - self.errors = [] - self.wiki_dir_pattern = options.get('wiki_dir_pattern', settings.TRAC_WIKI_DIR_PATTERN) - self.svn_dir_pattern = options.get('svn_dir_pattern', settings.TRAC_SVN_DIR_PATTERN) - self.group_list = options.get('group_list', None) - self.dummy_run = options.get('dummy_run', False) - self.wiki_dir_pattern = os.path.join(str(settings.BASE_DIR), str('..'), self.wiki_dir_pattern) - self.svn_dir_pattern = os.path.join(settings.BASE_DIR, '..', self.svn_dir_pattern) - - if not self.group_list is None: - self.group_list = self.group_list.split('.') - - if isinstance(self.verbosity, str) and self.verbosity.isdigit(): - self.verbosity = int(self.verbosity) - - if self.dummy_run and self.verbosity < 2: - self.verbosity = 2 - - if not os.path.exists(os.path.dirname(self.wiki_dir_pattern)): - raise CommandError('The Wiki base direcory specified for the wiki directories (%s) does not exist.' % os.path.dirname(self.wiki_dir_pattern)) - - if not os.path.exists(os.path.dirname(self.svn_dir_pattern)): - raise CommandError('The SVN base direcory specified for the SVN directories (%s) does not exist.' % os.path.dirname(self.svn_dir_pattern)) - - gtypes = [ f.type for f in GroupFeatures.objects.filter(create_wiki=True) ] - gfilter = Q(type__in=gtypes, state__slug__in=settings.TRAC_CREATE_GROUP_STATES) - gfilter |= Q(acronym__in=settings.TRAC_CREATE_GROUP_ACRONYMS) - - groups = Group.objects.filter(gfilter).order_by('acronym') - if self.group_list: - - groups = groups.filter(acronym__in=self.group_list) - - for group in groups: - try: - self.note("Processing group '%s'" % group.acronym) - group.trac_dir = self.wiki_dir_pattern % group.acronym - group.svn_dir = self.svn_dir_pattern % group.acronym - - if not os.path.exists(group.svn_dir): - err = self.create_svn(group.svn_dir) - if err: - self.errors.append(err) - - if not os.path.exists(group.trac_dir): - trac_env, msg = self.create_group_trac(group) - if not trac_env: - self.errors.append(msg) - else: - trac_env = Environment(group.trac_dir) - - if not trac_env and not self.dummy_run: - continue - - self.update_trac_permissions(group.acronym, group, trac_env) - self.update_trac_components(group, trac_env) - - except Exception as e: - self.errors.append(e) - self.log("While processing %s: %s" % (group.acronym, e)) - raise - - for acronym, name, path in settings.TRAC_CREATE_ADHOC_WIKIS: - try: - self.note("Processing wiki '%s'" % name) - if not os.path.isabs(path): - path = self.wiki_dir_pattern % path - - if not os.path.exists(path): - trac_env, msg = self.create_adhoc_trac(name, path) - if not trac_env: - self.errors.append(msg) - else: - trac_env = Environment(path) - - if not trac_env and not self.dummy_run: - continue - - if acronym.endswith('*'): - groups = Group.objects.filter(acronym__startswith=acronym[:-1], state_id='active') - for group in groups: - self.update_trac_permissions(name, group, trac_env) - else: - group = Group.objects.get(acronym=acronym) - self.update_trac_permissions(name, group, trac_env) - - except Exception as e: - self.errors.append(e) - self.log("While processing %s: %s" % (name, e)) - raise - - - if self.errors: - raise CommandError("There were %s failures in WG Trac creation:\n %s" % (len(self.errors), "\n ".join(self.errors))) diff --git a/ietf/utils/tests.py b/ietf/utils/tests.py index 40943cfca..5e9a6e7d0 100644 --- a/ietf/utils/tests.py +++ b/ietf/utils/tests.py @@ -15,15 +15,12 @@ from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from fnmatch import fnmatch from importlib import import_module -from .pipe import pipe from textwrap import dedent -from unittest import skipIf from tempfile import mkdtemp from django.apps import apps from django.contrib.auth.models import User from django.conf import settings -from django.core.management import call_command from django.template import Context from django.template import Template # pyflakes:ignore from django.template.defaulttags import URLNode @@ -33,8 +30,6 @@ from django.urls import reverse as urlreverse import debug # pyflakes:ignore -from ietf.group.factories import GroupFactory -from ietf.group.models import Group from ietf.person.name import name_parts, unidecode_name from ietf.submit.tests import submission_file from ietf.utils.bower_storage import BowerStorageFinder @@ -46,22 +41,6 @@ from ietf.utils.test_utils import TestCase from ietf.utils.text import parse_unicode from ietf.utils.xmldraft import XMLDraft - -skip_wiki_glue_testing = False -skip_message_svn = "" -skip_message_trac = "" -try: - import svn # pyflakes:ignore -except ImportError as e: - skip_wiki_glue_testing = True - skip_message_svn = "Skipping trac tests: %s" % e - print(" "+skip_message_svn) - -if sys.version_info.major==3: - skip_version_trac = True - skip_message_trac = "Skipping trac tests: Trac not available for Python3 as of 14 Jul 2019, 04 Jul 2020" - print(" "+skip_message_trac) - class SendingMail(TestCase): def test_send_mail_preformatted(self): @@ -292,67 +271,6 @@ class TemplateChecksTestCase(TestCase): r = self.client.get(url) self.assertTemplateUsed(r, '500.html') -@skipIf(skip_version_trac, skip_message_trac) -@skipIf(skip_wiki_glue_testing, skip_message_svn) -class TestWikiGlueManagementCommand(TestCase): - - def setUp(self): - super().setUp() - # We create temporary wiki and svn directories, and provide them to the management - # command through command line switches. We have to do it this way because the - # management command reads in its own copy of settings.py in its own python - # environment, so we can't modify it here. - set_coverage_checking(False) - self.wiki_dir_pattern = os.path.abspath('tmp-wiki-dir-root/%s') - if not os.path.exists(os.path.dirname(self.wiki_dir_pattern)): - os.mkdir(os.path.dirname(self.wiki_dir_pattern)) - self.svn_dir_pattern = os.path.abspath('tmp-svn-dir-root/%s') - if not os.path.exists(os.path.dirname(self.svn_dir_pattern)): - os.mkdir(os.path.dirname(self.svn_dir_pattern)) - - def tearDown(self): - shutil.rmtree(os.path.dirname(self.wiki_dir_pattern)) - shutil.rmtree(os.path.dirname(self.svn_dir_pattern)) - set_coverage_checking(True) - super().tearDown() - - def test_wiki_create_output(self): - for group_type in ['wg','rg','ag','area','rag']: - GroupFactory(type_id=group_type) - groups = Group.objects.filter( - type__slug__in=['wg','rg','ag','area','rag'], - state__slug='active' - ).order_by('acronym') - out = io.StringIO() - err = io.StringIO() - call_command('create_group_wikis', stdout=out, stderr=err, verbosity=2, - wiki_dir_pattern=self.wiki_dir_pattern, - svn_dir_pattern=self.svn_dir_pattern, - ) - command_output = out.getvalue() - command_errors = err.getvalue() - self.assertEqual("", command_errors) - for group in groups: - self.assertIn("Processing group '%s'" % group.acronym, command_output) - # Do a bit of verification using trac-admin, too - admin_code, admin_output, admin_error = pipe( - 'trac-admin %s permission list' % (self.wiki_dir_pattern % group.acronym)) - self.assertEqual(admin_code, 0) - roles = group.role_set.filter(name_id__in=['chair', 'secr', 'ad']) - for role in roles: - user = role.email.address.lower() - self.assertIn("Granting admin permission for %s" % user, command_output) - self.assertIn(user, admin_output) - docs = group.document_set.filter(states__slug='active', type_id='draft') - for doc in docs: - name = doc.name - name = name.replace('draft-','') - name = name.replace(doc.stream_id+'-', '') - name = name.replace(group.acronym+'-', '') - self.assertIn("Adding component %s"%name, command_output) - for page in settings.TRAC_WIKI_PAGES_TEMPLATES: - self.assertIn("Adding page %s" % os.path.basename(page), command_output) - self.assertIn("Indexing default repository", command_output) OMITTED_APPS = [ 'ietf.secr.meetings', diff --git a/requirements.txt b/requirements.txt index 57ea6376d..2513ffd52 100644 --- a/requirements.txt +++ b/requirements.txt @@ -66,10 +66,8 @@ rfc2html>=2.0.1 scout-apm>=2.23.0 selenium>=3.141.0,<4.0 six>=1.10.0 -svn>=1.0.1 tblib>=1.3.0 tqdm>=3.7.0 -#Trac>=1.0.10,<1.2 Unidecode>=0.4.18,<1.2.0 #wsgiref>=0.1.2 weasyprint>=52.5,<53 From 9e06efeabaaede65bd2f60b549d8aa009948b0f7 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Wed, 16 Mar 2022 11:54:00 -0500 Subject: [PATCH 26/41] feat: update the shepherd writeup template (#3650) * feat: update the shepherd writeup template Fixes #3580. * fix: correct the version date --- ietf/templates/doc/shepherd_writeup.txt | 122 ++++++------------------ 1 file changed, 29 insertions(+), 93 deletions(-) diff --git a/ietf/templates/doc/shepherd_writeup.txt b/ietf/templates/doc/shepherd_writeup.txt index eb484c6d7..b703de60c 100644 --- a/ietf/templates/doc/shepherd_writeup.txt +++ b/ietf/templates/doc/shepherd_writeup.txt @@ -1,119 +1,55 @@ -{% if doc.stream %}{% if doc.stream.slug == 'ietf' %}{% if doc.group.type.slug == 'individ' %}As required by RFC 4858, this is the current template for the Document -Shepherd Write-Up. Changes are expected over time. +{% if doc.stream %}{% if doc.stream.slug == 'ietf' %}As required by RFC 4858, this is the current template for the Document Shepherd Write-Up. Changes are expected over time. +This version is dated 2 March 2022. +{% if doc.group.type.slug == 'individ' %} {# The odd placement of the else/endif are to control newlines in the text rendering #} +(1) Was the document considered in any WG, and if so, why was it not adopted as a work item there? -This version is dated 1 November 2019. +(2) Was there controversy about particular points that caused the WG to not adopt the document?{% else %} +(1) Does the working group (WG) consensus represent the strong concurrence of a few individuals, with others being silent, or did it reach broad agreement? -(1) What type of RFC is being requested (BCP, Proposed Standard, Internet Standard, Informational, Experimental, or Historic)? Why is this the proper type of RFC? Is this type of RFC indicated in the title page header? +(2) Was there controversy about particular points, or were there decisions where the consensus was particularly rough?{% endif %} -(2) The IESG approval announcement includes a Document Announcement Write-Up. Please provide such a Document Announcement Write-Up. Recent examples can be found in the "Action" announcements for approved documents. The approval announcement contains the following sections: +(3) Has anyone threatened an appeal or otherwise indicated extreme discontent? If so, please summarize the areas of conflict in separate email messages to the responsible Area Director. (It should be in a separate email because this questionnaire is publicly available.) -Technical Summary +(4) For protocol documents, are there existing implementations of the contents of the document? Have a significant number of potential implementers indicated plans to implement? Are any existing implementations reported somewhere, either in the document itself (as RFC 7942 recommends) or elsewhere (where)? -Relevant content can frequently be found in the abstract and/or introduction of the document. If not, this may be an indication that there are deficiencies in the abstract or introduction. +Additional Reviews -Working Group Summary +(5) Does this document need review from other IETF working groups or external organizations? Have those reviews occurred? -Was the document considered in any WG, and if so, why was it not adopted as a work item there? Was there controversy about particular points that caused the WG to not adopt the document? +(6) Describe how the document meets any required formal expert review criteria, such as the MIB Doctor, YANG Doctor, media type, and URI type reviews. -Document Quality +(7) If the document contains a YANG module, has the final version of the module been checked with any of the recommended validation tools (https://trac.ietf.org/trac/ops/wiki/yang-review-tools) for syntax and formatting validation? If there are any resulting errors or warnings, what is the justification for not fixing them at this time? Does the YANG module comply with the Network Management Datastore Architecture (NMDA) as specified in RFC 8342? -Are there existing implementations of the protocol? Have a significant number of vendors indicated their plan to implement the specification? Are there any reviewers that merit special mention as having done a thorough review, e.g., one that resulted in important changes or a conclusion that the document had no substantive issues? If there was a MIB Doctor, YANG Doctor, Media Type or other expert review, what was its course (briefly)? In the case of a Media Type review, on what date was the request posted? Personnel Who is the Document Shepherd? Who is the Responsible Area Director? +(8) Describe reviews and automated checks performed to validate sections of the final version of the document written in a formal language, such as XML code, BNF rules, MIB definitions, CBOR’s CDDL, etc. -(3) Briefly describe the review of this document that was performed by the Document Shepherd. If this version of the document is not ready for publication, please explain why the document is being forwarded to the IESG. +Document Shepherd Checks -(4) Does the document Shepherd have any concerns about the depth or breadth of the reviews that have been performed? +(9) Based on the shepherd’s review of the document, is it their opinion that this document is needed, clearly written, complete, correctly designed, and ready to be handed off to the responsible Area Director? -(5) Do portions of the document need review from a particular or from broader perspective, e.g., security, operational complexity, AAA, DNS, DHCP, XML, or internationalization? If so, describe the review that took place. +(10) Several IETF Areas have assembled lists of common issues that their reviewers encounter. These are linked from https://trac.ietf.org/trac/iesg/wiki/ExpertTopics. Do any such issues remain that would merit specific attention from subsequent reviews? -(6) Describe any specific concerns or issues that the Document Shepherd has with this document that the Responsible Area Director and/or the IESG should be aware of? For example, perhaps he or she is uncomfortable with certain parts of the document, or has concerns whether there really is a need for it. In any event, if the interested community has discussed those issues and has indicated that it still wishes to advance the document, detail those concerns here. +(11) What type of RFC publication is being requested on the IETF stream (Best Current Practice, Proposed Standard, Internet Standard, Informational, Experimental, or Historic)? Why is this the proper type of RFC? Do all datatracker state attributes correctly reflect this intent? -(7) Has each author confirmed that any and all appropriate IPR disclosures required for full conformance with the provisions of BCP 78 and BCP 79 have already been filed. If not, explain why. +(12) Has the interested community confirmed that any and all appropriate IPR disclosures required by BCP 78 and BCP 79 have been filed? If not, explain why. If yes, summarize any discussion and conclusion regarding the intellectual property rights (IPR) disclosures, including links to relevant emails. -(8) Has an IPR disclosure been filed that references this document? If so, summarize any discussion and conclusion regarding the IPR disclosures. +(13) Has each Author or Contributor confirmed their willingness to be listed as such? If the number of Authors/Editors on the front page is greater than 5, please provide a justification. -(9) How solid is the consensus of the interested community behind this document? Does it represent the strong concurrence of a few individuals, with others being silent, or does the interested community as a whole understand and agree with it? +(14) Identify any remaining I-D nits in this document. (See http://www.ietf.org/tools/idnits/ and the checkbox items found in Guidelines to Authors of Internet-Drafts). Simply running the idnits tool is not enough; please review the entire guidelines document. -(10) Has anyone threatened an appeal or otherwise indicated extreme discontent? If so, please summarise the areas of conflict in separate email messages to the Responsible Area Director. (It should be in a separate email because this questionnaire is publicly available.) +(15) Should any informative references be normative or vice-versa? -(11) Identify any ID nits the Document Shepherd has found in this document. (See http://www.ietf.org/tools/idnits/ and the Internet-Drafts Checklist). Boilerplate checks are not enough; this check needs to be thorough. +(16) List any normative references that are not freely available to anyone. Did the community have sufficient access to review any such normative references? -(12) Describe how the document meets any required formal review criteria, such as the MIB Doctor, YANG Doctor, media type, and URI type reviews. +(17) Are there any normative downward references (see RFC 3967, BCP 97)? If so, list them. -(13) Have all references within this document been identified as either normative or informative? +(18) Are there normative references to documents that are not ready for advancement or are otherwise in an unclear state? If they exist, what is the plan for their completion? -(14) Are there normative references to documents that are not ready for advancement or are otherwise in an unclear state? If such normative references exist, what is the plan for their completion? +(19) Will publication of this document change the status of any existing RFCs? If so, does the Datatracker metadata correctly reflect this and are those RFCs listed on the title page, in the abstract, and discussed in the introduction? If not, explain why and point to the part of the document where the relationship of this document to these other RFCs is discussed. -(15) Are there downward normative references (see RFC 3967)? If so, list these downward references to support the Area Director in the Last Call procedure. +(20) Describe the document shepherd's review of the IANA considerations section, especially with regard to its consistency with the body of the document. Confirm that all aspects of the document requiring IANA assignments are associated with the appropriate reservations in IANA registries. Confirm that any referenced IANA registries have been clearly identified. Confirm that each newly created IANA registry specifies its initial contents, allocations procedures, and a reasonable name (see RFC 8126). -(16) Will publication of this document change the status of any existing RFCs? Are those RFCs listed on the title page header, listed in the abstract, and discussed in the introduction? If the RFCs are not listed in the Abstract and Introduction, explain why, and point to the part of the document where the relationship of this document to the other RFCs is discussed. If this information is not in the document, explain why the interested community considers it unnecessary. +(21) List any new IANA registries that require Designated Expert Review for future allocations. Are the instructions to the Designated Expert clear? Please include suggestions of designated experts, if appropriate. -(17) Describe the Document Shepherd's review of the IANA considerations section, especially with regard to its consistency with the body of the document. Confirm that all protocol extensions that the document makes are associated with the appropriate reservations in IANA registries. Confirm that any referenced IANA registries have been clearly identified. Confirm that newly created IANA registries include a detailed specification of the initial contents for the registry, that allocations procedures for future registrations are defined, and a reasonable name for the new registry has been suggested (see RFC 8126). - -(18) List any new IANA registries that require Expert Review for future allocations. Provide any public guidance that the IESG would find useful in selecting the IANA Experts for these new registries. - -(19) Describe reviews and automated checks performed by to validate sections of the document written in a formal language, such as XML code, BNF rules, MIB definitions, YANG modules, etc. - -(20) If the document contains a YANG module, has the module been checked with any of the recommended validation tools (https://trac.ietf.org/trac/ops/wiki/yang-review-tools) for syntax and formatting validation? If there are any resulting errors or warnings, what is the justification for not fixing them at this time? Does the YANG module comply with the Network Management Datastore Architecture (NMDA) as specified in RFC8342? -{% else %}As required by RFC 4858, this is the current template for the Document -Shepherd Write-Up. Changes are expected over time. - -This version is dated 1 November 2019. - -(1) What type of RFC is being requested (BCP, Proposed Standard, Internet Standard, Informational, Experimental, or Historic)? Why is this the proper type of RFC? Is this type of RFC indicated in the title page header? - -(2) The IESG approval announcement includes a Document Announcement Write-Up. Please provide such a Document Announcement Write-Up. Recent examples can be found in the "Action" announcements for approved documents. The approval announcement contains the following sections: - -Technical Summary: - -Relevant content can frequently be found in the abstract and/or introduction of the document. If not, this may be an indication that there are deficiencies in the abstract or introduction. - -Working Group Summary: - -Was there anything in WG process that is worth noting? For example, was there controversy about particular points or were there decisions where the consensus was particularly rough? - -Document Quality: - -Are there existing implementations of the protocol? Have a significant number of vendors indicated their plan to implement the specification? Are there any reviewers that merit special mention as having done a thorough review, e.g., one that resulted in important changes or a conclusion that the document had no substantive issues? If there was a MIB Doctor, YANG Doctor, Media Type or other expert review, what was its course (briefly)? In the case of a Media Type review, on what date was the request posted? - -Personnel: - -Who is the Document Shepherd? Who is the Responsible Area Director? - -(3) Briefly describe the review of this document that was performed by the Document Shepherd. If this version of the document is not ready for publication, please explain why the document is being forwarded to the IESG. - -(4) Does the document Shepherd have any concerns about the depth or breadth of the reviews that have been performed? - -(5) Do portions of the document need review from a particular or from broader perspective, e.g., security, operational complexity, AAA, DNS, DHCP, XML, or internationalization? If so, describe the review that took place. - -(6) Describe any specific concerns or issues that the Document Shepherd has with this document that the Responsible Area Director and/or the IESG should be aware of? For example, perhaps he or she is uncomfortable with certain parts of the document, or has concerns whether there really is a need for it. In any event, if the WG has discussed those issues and has indicated that it still wishes to advance the document, detail those concerns here. - -(7) Has each author confirmed that any and all appropriate IPR disclosures required for full conformance with the provisions of BCP 78 and BCP 79 have already been filed. If not, explain why? - -(8) Has an IPR disclosure been filed that references this document? If so, summarize any WG discussion and conclusion regarding the IPR disclosures. - -(9) How solid is the WG consensus behind this document? Does it represent the strong concurrence of a few individuals, with others being silent, or does the WG as a whole understand and agree with it? - -(10) Has anyone threatened an appeal or otherwise indicated extreme discontent? If so, please summarise the areas of conflict in separate email messages to the Responsible Area Director. (It should be in a separate email because this questionnaire is publicly available.) - -(11) Identify any ID nits the Document Shepherd has found in this document. (See http://www.ietf.org/tools/idnits/ and the Internet-Drafts Checklist). Boilerplate checks are not enough; this check needs to be thorough. - -(12) Describe how the document meets any required formal review criteria, such as the MIB Doctor, YANG Doctor, media type, and URI type reviews. - -(13) Have all references within this document been identified as either normative or informative? - -(14) Are there normative references to documents that are not ready for advancement or are otherwise in an unclear state? If such normative references exist, what is the plan for their completion? - -(15) Are there downward normative references (see RFC 3967)? If so, list these downward references to support the Area Director in the Last Call procedure. - -(16) Will publication of this document change the status of any existing RFCs? Are those RFCs listed on the title page header, listed in the abstract, and discussed in the introduction? If the RFCs are not listed in the Abstract and Introduction, explain why, and point to the part of the document where the relationship of this document to the other RFCs is discussed. If this information is not in the document, explain why the WG considers it unnecessary. - -(17) Describe the Document Shepherd's review of the IANA considerations section, especially with regard to its consistency with the body of the document. Confirm that all protocol extensions that the document makes are associated with the appropriate reservations in IANA registries. Confirm that any referenced IANA registries have been clearly identified. Confirm that newly created IANA registries include a detailed specification of the initial contents for the registry, that allocations procedures for future registrations are defined, and a reasonable name for the new registry has been suggested (see RFC 8126). - -(18) List any new IANA registries that require Expert Review for future allocations. Provide any public guidance that the IESG would find useful in selecting the IANA Experts for these new registries. - -(19) Describe reviews and automated checks performed by the Document Shepherd to validate sections of the document written in a formal language, such as XML code, BNF rules, MIB definitions, YANG modules, etc. - -(20) If the document contains a YANG module, has the module been checked with any of the recommended validation tools (https://trac.ietf.org/trac/ops/wiki/yang-review-tools) for syntax and formatting validation? If there are any resulting errors or warnings, what is the justification for not fixing them at this time? Does the YANG module comply with the Network Management Datastore Architecture (NMDA) as specified in RFC8342? -{% endif %}{% else %}There is no default shepherd writeup template for the {{doc.stream}} stream +{% else %}There is no default shepherd writeup template for the {{doc.stream}} stream {% endif %}{% else %}There is no stream set for this document (thus, no default shepherd writeup template) {% endif %} From a043bd5f8e873b2ee06223787e32223c7444799e Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Wed, 16 Mar 2022 13:22:10 -0400 Subject: [PATCH 27/41] chore: move hold-for-merge + ready-for-merge into dev/legacy --- hold-for-merge => dev/legacy/hold-for-merge | 0 ready-for-merge => dev/legacy/ready-for-merge | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename hold-for-merge => dev/legacy/hold-for-merge (100%) rename ready-for-merge => dev/legacy/ready-for-merge (100%) diff --git a/hold-for-merge b/dev/legacy/hold-for-merge similarity index 100% rename from hold-for-merge rename to dev/legacy/hold-for-merge diff --git a/ready-for-merge b/dev/legacy/ready-for-merge similarity index 100% rename from ready-for-merge rename to dev/legacy/ready-for-merge From efc87dc35b16b25210c64d703528c26669ab313d Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Wed, 16 Mar 2022 13:15:05 -0500 Subject: [PATCH 28/41] test: match changes to release/about * chore: bring docker-compose to root level (#3642) * test: match changes to release/about Co-authored-by: Nicolas Giard --- ietf/release/tests.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ietf/release/tests.py b/ietf/release/tests.py index bb2d95100..d2e90ab43 100644 --- a/ietf/release/tests.py +++ b/ietf/release/tests.py @@ -29,7 +29,6 @@ class ReleasePagesTest(TestCase): text = q('#content').text() for word in ["About", "2.00", "3.00", "4.00", "5.0.0"]: self.assertIn(word, text) - self.assertGreater(len(q('#content a')), 16) def test_stats(self): url = reverse('ietf.release.views.stats') From 65799c9d1cbf24e13cc0e9663284120ae33d44bc Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Wed, 16 Mar 2022 21:23:01 -0400 Subject: [PATCH 29/41] ci: build base test docker image --- .github/workflows/build-test-base.yml | 36 ++++++++ dev/docker-test-base/Dockerfile | 120 ++++++++++++++++++++++++++ 2 files changed, 156 insertions(+) create mode 100644 .github/workflows/build-test-base.yml create mode 100644 dev/docker-test-base/Dockerfile diff --git a/.github/workflows/build-test-base.yml b/.github/workflows/build-test-base.yml new file mode 100644 index 000000000..e8c24c03d --- /dev/null +++ b/.github/workflows/build-test-base.yml @@ -0,0 +1,36 @@ +name: Build Base Test Docker Image + +on: + push: + branches: + # TEMPORARY: Replace with main once bs5 is merged + - 'feat/bs5' + paths: + - 'package.json' + - 'requirements.txt' + + workflow_dispatch: + +env: + REGISTRY: ghcr.io + IMAGE_NAME: datatracker-test-base + +jobs: + publish: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - uses: actions/checkout@v2 + + - name: Docker Build & Push Action + uses: mr-smithers-excellent/docker-build-push@v5.6 + with: + image: ${{ env.IMAGE_NAME }} + tags: latest + registry: ${{ env.REGISTRY }} + dockerfile: dev/docker-test-base/Dockerfile + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} diff --git a/dev/docker-test-base/Dockerfile b/dev/docker-test-base/Dockerfile new file mode 100644 index 000000000..ca3e164a7 --- /dev/null +++ b/dev/docker-test-base/Dockerfile @@ -0,0 +1,120 @@ +FROM python:3.6-bullseye +LABEL maintainer="IETF Tools Team " + +ENV DEBIAN_FRONTEND=noninteractive + +RUN apt-get update +RUN apt-get -qy upgrade + +# Add Node.js Source +RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash - + +# Install the packages we need +RUN apt-get install -qy \ + apache2-utils \ + apt-file \ + apt-utils \ + bash \ + build-essential \ + curl \ + enscript \ + gawk \ + gcc \ + ghostscript \ + git \ + gnupg \ + graphviz \ + jq \ + less \ + libcairo2-dev \ + libgtk2.0-0 \ + libgtk-3-0 \ + libnotify-dev \ + libgconf-2-4 \ + libgbm-dev \ + libnss3 \ + libxss1 \ + libasound2 \ + libxtst6 \ + libmagic-dev \ + libmariadb-dev \ + libtidy-dev \ + locales \ + mariadb-client \ + netcat \ + nodejs \ + pigz \ + pv \ + python3-ipython \ + ripgrep \ + rsync \ + rsyslog \ + ruby \ + ruby-rubygems \ + unzip \ + wget \ + xauth \ + xvfb \ + yang-tools + +# Install kramdown-rfc2629 (ruby) +RUN gem install kramdown-rfc2629 + +# Install chromedriver +COPY docker/scripts/app-install-chromedriver.sh /tmp/app-install-chromedriver.sh +RUN sed -i 's/\r$//' /tmp/app-install-chromedriver.sh && \ + chmod +x /tmp/app-install-chromedriver.sh +RUN /tmp/app-install-chromedriver.sh + +# Get rid of installation files we don't need in the image, to reduce size +RUN apt-get clean && rm -rf /var/lib/apt/lists/* + +# "fake" dbus address to prevent errors +# https://github.com/SeleniumHQ/docker-selenium/issues/87 +ENV DBUS_SESSION_BUS_ADDRESS=/dev/null + +# avoid million NPM install messages +ENV npm_config_loglevel warn +# allow installing when the main user is root +ENV npm_config_unsafe_perm true +# disable NPM funding messages +ENV npm_config_fund false + +# Set locale to en_US.UTF-8 +RUN echo "LC_ALL=en_US.UTF-8" >> /etc/environment && \ + echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen && \ + echo "LANG=en_US.UTF-8" > /etc/locale.conf && \ + dpkg-reconfigure locales && \ + locale-gen en_US.UTF-8 && \ + update-locale LC_ALL en_US.UTF-8 +ENV LC_ALL en_US.UTF-8 + +# Install idnits +ADD https://raw.githubusercontent.com/ietf-tools/idnits-mirror/main/idnits /usr/local/bin/ +RUN chmod +rx /usr/local/bin/idnits + +# Install current datatracker python dependencies +COPY requirements.txt /tmp/pip-tmp/ +RUN pip3 --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.txt \ + && rm -rf /tmp/pip-tmp + +# Turn off rsyslog kernel logging (doesn't work in Docker) +RUN sed -i '/imklog/s/^/#/' /etc/rsyslog.conf + +# Fetch wait-for utility +ADD https://raw.githubusercontent.com/eficode/wait-for/v2.1.3/wait-for /usr/local/bin/ +RUN chmod +rx /usr/local/bin/wait-for + +# Copy the startup file +COPY docker/scripts/app-init.sh /docker-init.sh +RUN sed -i 's/\r$//' /docker-init.sh && \ + chmod +x /docker-init.sh + +# Create workspace +RUN mkdir -p /workspace +WORKDIR /workspace + +# Install NPM modules +COPY package.json package.json +RUN npm install --no-audit +RUN rm -f package.json package-lock.json \ No newline at end of file From 4daeb3c6b61225ca8b754cab86836d1c5de92492 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Wed, 16 Mar 2022 21:23:51 -0400 Subject: [PATCH 30/41] ci: fix base test docker image --- .github/workflows/build-test-base.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build-test-base.yml b/.github/workflows/build-test-base.yml index e8c24c03d..4feb6911e 100644 --- a/.github/workflows/build-test-base.yml +++ b/.github/workflows/build-test-base.yml @@ -26,11 +26,11 @@ jobs: - uses: actions/checkout@v2 - name: Docker Build & Push Action - uses: mr-smithers-excellent/docker-build-push@v5.6 - with: - image: ${{ env.IMAGE_NAME }} - tags: latest - registry: ${{ env.REGISTRY }} - dockerfile: dev/docker-test-base/Dockerfile - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} + uses: mr-smithers-excellent/docker-build-push@v5.6 + with: + image: ${{ env.IMAGE_NAME }} + tags: latest + registry: ${{ env.REGISTRY }} + dockerfile: dev/docker-test-base/Dockerfile + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} From ec9f3ba57e8f30e3938613c6adb4d2e837adfeeb Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Thu, 17 Mar 2022 14:51:57 -0400 Subject: [PATCH 31/41] ci: add default db commands to dockerfile --- docker/db.Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker/db.Dockerfile b/docker/db.Dockerfile index b9cc773ec..ec27bcd4a 100644 --- a/docker/db.Dockerfile +++ b/docker/db.Dockerfile @@ -58,3 +58,5 @@ LABEL maintainer="IETF Tools Team " # Copy the mysql data folder from the import stage COPY --from=importStage /var/lib/mysql /var/lib/mysql + +CMD ['--character-set-server=utf8', '--collation-server=utf8_unicode_ci', '--innodb-buffer-pool-size=1G', '--innodb-log-buffer-size=128M', '--innodb-log-file-size=256M', '--innodb-write-io-threads=8', '--innodb-flush-log-at-trx-commit=0', '--performance-schema=1'] From cee45d608bf7838f83f1066e6fc9ae09f01a8045 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Thu, 17 Mar 2022 17:03:37 -0400 Subject: [PATCH 32/41] ci: fix db docker image start command --- .github/workflows/dev-db-nightly.yml | 8 ++++++++ docker/db.Dockerfile | 3 ++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/.github/workflows/dev-db-nightly.yml b/.github/workflows/dev-db-nightly.yml index c620fd7b3..e6e348f37 100644 --- a/.github/workflows/dev-db-nightly.yml +++ b/.github/workflows/dev-db-nightly.yml @@ -7,9 +7,17 @@ name: Nightly Dev DB Image # Controls when the workflow will run on: + # Run every night schedule: - cron: '0 0 * * *' + # Run on db.Dockerfile changes + push: + branches: + - main + paths: + - 'docker/db.Dockerfile' + # Allows you to run this workflow manually from the Actions tab workflow_dispatch: diff --git a/docker/db.Dockerfile b/docker/db.Dockerfile index ec27bcd4a..ed9db845d 100644 --- a/docker/db.Dockerfile +++ b/docker/db.Dockerfile @@ -59,4 +59,5 @@ LABEL maintainer="IETF Tools Team " # Copy the mysql data folder from the import stage COPY --from=importStage /var/lib/mysql /var/lib/mysql -CMD ['--character-set-server=utf8', '--collation-server=utf8_unicode_ci', '--innodb-buffer-pool-size=1G', '--innodb-log-buffer-size=128M', '--innodb-log-file-size=256M', '--innodb-write-io-threads=8', '--innodb-flush-log-at-trx-commit=0', '--performance-schema=1'] +ENTRYPOINT ['docker-entrypoint.sh'] +CMD ['mariadbd', '--character-set-server=utf8', '--collation-server=utf8_unicode_ci', '--innodb-buffer-pool-size=1G', '--innodb-log-buffer-size=128M', '--innodb-log-file-size=256M', '--innodb-write-io-threads=8', '--innodb-flush-log-at-trx-commit=0', '--performance-schema=1'] From 3c91773e3202c4d68de98e463dfc041f0df9bb9f Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 17 Mar 2022 18:33:26 -0300 Subject: [PATCH 33/41] test: remove unused import that causes test failure (#3667) --- ietf/utils/tests.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ietf/utils/tests.py b/ietf/utils/tests.py index 5e9a6e7d0..952f5ed85 100644 --- a/ietf/utils/tests.py +++ b/ietf/utils/tests.py @@ -5,7 +5,6 @@ import io import os.path import shutil -import sys import types from typing import Dict, List # pyflakes:ignore From 7fab7416468c09cb1883bba632ab88d047831fe6 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Thu, 17 Mar 2022 18:09:15 -0400 Subject: [PATCH 34/41] ci: revert db dockerfile --- docker/db.Dockerfile | 3 --- 1 file changed, 3 deletions(-) diff --git a/docker/db.Dockerfile b/docker/db.Dockerfile index ed9db845d..b9cc773ec 100644 --- a/docker/db.Dockerfile +++ b/docker/db.Dockerfile @@ -58,6 +58,3 @@ LABEL maintainer="IETF Tools Team " # Copy the mysql data folder from the import stage COPY --from=importStage /var/lib/mysql /var/lib/mysql - -ENTRYPOINT ['docker-entrypoint.sh'] -CMD ['mariadbd', '--character-set-server=utf8', '--collation-server=utf8_unicode_ci', '--innodb-buffer-pool-size=1G', '--innodb-log-buffer-size=128M', '--innodb-log-file-size=256M', '--innodb-write-io-threads=8', '--innodb-flush-log-at-trx-commit=0', '--performance-schema=1'] From b2febd788fe6d0eba50b0a0f774d1e0d1c0c755c Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Fri, 18 Mar 2022 16:34:18 -0400 Subject: [PATCH 35/41] chore: remove django settings module from devcontainer config (#3673) --- .devcontainer/docker-compose.extend.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.devcontainer/docker-compose.extend.yml b/.devcontainer/docker-compose.extend.yml index 794ac0c5b..310872634 100644 --- a/.devcontainer/docker-compose.extend.yml +++ b/.devcontainer/docker-compose.extend.yml @@ -4,9 +4,8 @@ services: app: environment: EDITOR_VSCODE: 1 - DJANGO_SETTINGS_MODULE: settings_local_sqlitetest volumes: - .:/workspace - /workspace/node_modules # Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function. - network_mode: service:db \ No newline at end of file + network_mode: service:db From 916a3a07e3b70336de237bdac74b90854ef6288a Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Sat, 19 Mar 2022 06:26:23 -0400 Subject: [PATCH 36/41] ci: add docker-cli to base test image --- dev/docker-test-base/Dockerfile | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/dev/docker-test-base/Dockerfile b/dev/docker-test-base/Dockerfile index ca3e164a7..39c49220c 100644 --- a/dev/docker-test-base/Dockerfile +++ b/dev/docker-test-base/Dockerfile @@ -3,13 +3,20 @@ LABEL maintainer="IETF Tools Team " ENV DEBIAN_FRONTEND=noninteractive + RUN apt-get update RUN apt-get -qy upgrade # Add Node.js Source RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash - +# Add Docker Source +RUN curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg +RUN echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian \ + $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null + # Install the packages we need +RUN apt-get update RUN apt-get install -qy \ apache2-utils \ apt-file \ @@ -17,6 +24,7 @@ RUN apt-get install -qy \ bash \ build-essential \ curl \ + docker-ce-cli \ enscript \ gawk \ gcc \ @@ -105,11 +113,6 @@ RUN sed -i '/imklog/s/^/#/' /etc/rsyslog.conf ADD https://raw.githubusercontent.com/eficode/wait-for/v2.1.3/wait-for /usr/local/bin/ RUN chmod +rx /usr/local/bin/wait-for -# Copy the startup file -COPY docker/scripts/app-init.sh /docker-init.sh -RUN sed -i 's/\r$//' /docker-init.sh && \ - chmod +x /docker-init.sh - # Create workspace RUN mkdir -p /workspace WORKDIR /workspace @@ -117,4 +120,4 @@ WORKDIR /workspace # Install NPM modules COPY package.json package.json RUN npm install --no-audit -RUN rm -f package.json package-lock.json \ No newline at end of file +RUN rm -f package.json package-lock.json From 4dc024797698faab8ae21f634228dd3aefda50f1 Mon Sep 17 00:00:00 2001 From: Peter Yee Date: Sun, 20 Mar 2022 05:54:15 -0700 Subject: [PATCH 37/41] fix: use approver's identity instead of System (#3701) Fixes issue #3545 Conditionally tests if the approver is AnonymousUser, in which case System is still used --- ietf/submit/utils.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/ietf/submit/utils.py b/ietf/submit/utils.py index 495759dd6..55801ae48 100644 --- a/ietf/submit/utils.py +++ b/ietf/submit/utils.py @@ -18,6 +18,7 @@ from django.db import transaction from django.http import HttpRequest # pyflakes:ignore from django.utils.module_loading import import_string from django.template.loader import render_to_string +from django.contrib.auth.models import AnonymousUser import debug # pyflakes:ignore @@ -330,11 +331,15 @@ def post_submission(request, submission, approved_doc_desc, approved_subm_desc): # Add all the previous submission events as docevents events += post_rev00_submission_events(draft, submission, submitter) + if isinstance(request.user, AnonymousUser): + doer=system + else: + doer=request.user.person # Add an approval docevent e = SubmissionDocEvent.objects.create( type="new_submission", doc=draft, - by=system, + by=doer, desc=approved_doc_desc, submission=submission, rev=submission.rev, From d637b9025e5a51178767285b8b264faa6b4d1116 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Mon, 21 Mar 2022 14:26:32 -0400 Subject: [PATCH 38/41] docs: update README to point to release page --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 9cfc03adf..6c61fd9af 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ - [**Production Website**](https://datatracker.ietf.org) -- [Changelog](https://github.com/ietf-tools/datatracker/blob/main/CHANGELOG.md) +- [Changelog](https://github.com/ietf-tools/datatracker/releases) - [Contributing](https://github.com/ietf-tools/.github/blob/main/CONTRIBUTING.md) - [Getting Started](#getting-started) - [Git Cloning Tips](#git-cloning-tips) From ffb6904b09458bb74b6263e725df21caa491dd8f Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Mon, 21 Mar 2022 14:44:58 -0400 Subject: [PATCH 39/41] ci: inject release hash + branch during build --- .github/workflows/build.yml | 2 ++ ietf/__init__.py | 8 +++++++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index fc79952e9..885d33701 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -89,6 +89,8 @@ jobs: sh ./dev/deploy/build.sh echo "Setting version $PKG_VERSION_STRICT..." sed -i -r -e "s/^__version__ += '.*'$/__version__ = '$PKG_VERSION_STRICT'/" ietf/__init__.py + sed -i -r -e "s/^__release_hash__ += '.*'$/__release_hash__ = '$GITHUB_SHA'/" ietf/__init__.py + sed -i -r -e "s/^__release_branch__ += '.*'$/__release_branch__ = '$GITHUB_REF_NAME'/" ietf/__init__.py echo "Build release tarball..." mkdir -p /home/runner/work/release tar -czf /home/runner/work/release/release.tar.gz -X dev/deploy/exclude-patterns.txt . diff --git a/ietf/__init__.py b/ietf/__init__.py index 5e55e8c20..69d0844c1 100644 --- a/ietf/__init__.py +++ b/ietf/__init__.py @@ -6,7 +6,13 @@ from . import checks # pyflakes:ignore # Version must stay in single quotes for automatic CI replace # Don't add patch number here: -__version__ = '7.46.1.dev0' +__version__ = '7.0.0-dev' + +# Release hash must stay in single quotes for automatic CI replace +__release_hash__ = '' + +# Release branch must stay in single quotes for automatic CI replace +__release_branch__ = '' # set this to ".p1", ".p2", etc. after patching __patch__ = "" From a19802260c1c31555b2688bb15446d58f0616f59 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Tue, 22 Mar 2022 08:30:15 -0400 Subject: [PATCH 40/41] chore: allow custom port for dev docker run (#3722) --- .gitignore | 3 ++- docker/docker-compose.extend.yml | 2 +- docker/run | 37 +++++++++++++++++++++++++++++++- 3 files changed, 39 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index 3c76eff85..7de6c6582 100644 --- a/.gitignore +++ b/.gitignore @@ -53,4 +53,5 @@ *.pyc __pycache__ node_modules -ietf/static/ietf/bootstrap \ No newline at end of file +ietf/static/ietf/bootstrap +/docker/docker-compose.extend-custom.yml \ No newline at end of file diff --git a/docker/docker-compose.extend.yml b/docker/docker-compose.extend.yml index 542ff2faa..489947f97 100644 --- a/docker/docker-compose.extend.yml +++ b/docker/docker-compose.extend.yml @@ -3,7 +3,7 @@ version: '3.8' services: app: ports: - - '8000:8000' + - 'CUSTOM_PORT:8000' volumes: - .:/workspace - /workspace/node_modules diff --git a/docker/run b/docker/run index 319d07166..9dc80ece5 100755 --- a/docker/run +++ b/docker/run @@ -1,7 +1,42 @@ #!/bin/bash +# Usage info +show_help() { + cat << EOF +Usage: ${0##*/} [-h] [-p PORT] +Run datatracker in dev containers using docker-compose. + + -h display this help and exit + -p PORT use custom HTTP port for datatracker + +EOF +} + +CUSTOM_PORT=8000 + +while getopts hp: opt; do + case $opt in + h) + show_help + exit 0 + ;; + p) + CUSTOM_PORT=$OPTARG + echo "Using custom port $CUSTOM_PORT..." + ;; + *) + CUSTOM_PORT=8000 + echo "Using port 8000..." + ;; + esac +done + +cp docker-compose.extend.yml docker-compose.extend-custom.yml +sed -i -r -e "s/CUSTOM_PORT/$CUSTOM_PORT/" docker-compose.extend-custom.yml cd .. -docker-compose -f docker-compose.yml -f docker/docker-compose.extend.yml up -d +docker-compose -f docker-compose.yml -f docker/docker-compose.extend-custom.yml up -d +docker-compose port db 3306 docker-compose exec app /bin/sh /docker-init.sh docker-compose stop cd docker +rm -f docker-compose.extend-custom.yml From 902e37d24d45c72ad401e761d4ef44af5cf20d03 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Tue, 22 Mar 2022 13:04:50 -0500 Subject: [PATCH 41/41] fix: improve looking into submission request for remote ip. (#3720) --- ietf/submit/forms.py | 4 ++-- ietf/submit/utils.py | 9 +++++++++ ietf/submit/views.py | 4 ++-- 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/ietf/submit/forms.py b/ietf/submit/forms.py index e935a8072..a00faab3c 100644 --- a/ietf/submit/forms.py +++ b/ietf/submit/forms.py @@ -33,7 +33,7 @@ from ietf.meeting.models import Meeting from ietf.message.models import Message from ietf.name.models import FormalLanguageName, GroupTypeName from ietf.submit.models import Submission, Preapproval -from ietf.submit.utils import validate_submission_name, validate_submission_rev, validate_submission_document_date +from ietf.submit.utils import validate_submission_name, validate_submission_rev, validate_submission_document_date, remote_ip from ietf.submit.parsers.pdf_parser import PDFParser from ietf.submit.parsers.plain_parser import PlainParser from ietf.submit.parsers.xml_parser import XMLParser @@ -47,7 +47,7 @@ class SubmissionBaseUploadForm(forms.Form): def __init__(self, request, *args, **kwargs): super(SubmissionBaseUploadForm, self).__init__(*args, **kwargs) - self.remote_ip = request.META.get('REMOTE_ADDR', None) + self.remote_ip = remote_ip(request) self.request = request self.in_first_cut_off = False diff --git a/ietf/submit/utils.py b/ietf/submit/utils.py index 55801ae48..f1cbe35e1 100644 --- a/ietf/submit/utils.py +++ b/ietf/submit/utils.py @@ -1019,3 +1019,12 @@ def update_submission_external_resources(submission, new_resources): for new_res in new_resources: new_res.submission = submission new_res.save() + +def remote_ip(request): + if 'CF-Connecting-IP' in request.META: + remote_ip = request.META.get('CF-Connecting-IP') + elif 'X-Forwarded-For' in request.META: + remote_ip = request.META.get('X-Forwarded-For').split(',')[0] + else: + remote_ip = request.META.get('REMOTE_ADDR', None) + return remote_ip diff --git a/ietf/submit/views.py b/ietf/submit/views.py index 66c7eadf3..9eb303eb2 100644 --- a/ietf/submit/views.py +++ b/ietf/submit/views.py @@ -39,7 +39,7 @@ from ietf.submit.utils import ( approvable_submissions_for_user, preapprovals_fo post_submission, cancel_submission, rename_submission_files, remove_submission_files, get_draft_meta, get_submission, fill_in_submission, apply_checkers, save_files, check_submission_revision_consistency, accept_submission, accept_submission_requires_group_approval, - accept_submission_requires_prev_auth_approval, update_submission_external_resources ) + accept_submission_requires_prev_auth_approval, update_submission_external_resources, remote_ip ) from ietf.stats.utils import clean_country_name from ietf.utils.accesstoken import generate_access_token from ietf.utils.log import log @@ -729,7 +729,7 @@ def add_manualpost_email(request, submission_id=None, access_token=None): submission, submission_email_event = ( add_submission_email(request=request, - remote_ip=request.META.get('REMOTE_ADDR', None), + remote_ip=remote_ip(request), name = form.draft_name, rev=form.revision, submission_pk = submission_pk,