diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index fca50fc4b..83d61fde8 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -77,7 +77,7 @@ jobs:
         echo "NEXT_VERSION=$nextStrict" >> $GITHUB_ENV
         
     - name: Create Draft Release
-      uses: ncipollo/release-action@v1.13.0
+      uses: ncipollo/release-action@v1.14.0
       if: ${{ github.ref_name == 'release' }}
       with:
         prerelease: true
@@ -150,7 +150,7 @@ jobs:
         
     - name: Download a Coverage Results
       if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }}
-      uses: actions/download-artifact@v4.1.1
+      uses: actions/download-artifact@v4.1.2
       with:
         name: coverage
 
@@ -267,7 +267,7 @@ jobs:
         histCoveragePath: historical-coverage.json
         
     - name: Create Release
-      uses: ncipollo/release-action@v1.13.0
+      uses: ncipollo/release-action@v1.14.0
       if: ${{ env.SHOULD_DEPLOY == 'true' }}
       with:
         allowUpdates: true
@@ -280,7 +280,7 @@ jobs:
         token: ${{ secrets.GITHUB_TOKEN }}
 
     - name: Update Baseline Coverage
-      uses: ncipollo/release-action@v1.13.0
+      uses: ncipollo/release-action@v1.14.0
       if: ${{ github.event.inputs.updateCoverage == 'true' || github.ref_name == 'release' }}
       with:
         allowUpdates: true
@@ -374,7 +374,7 @@ jobs:
     - uses: actions/checkout@v4
         
     - name: Download a Release Artifact
-      uses: actions/download-artifact@v4.1.1
+      uses: actions/download-artifact@v4.1.2
       with:
         name: release-${{ env.PKG_VERSION }}
         
@@ -410,7 +410,7 @@ jobs:
       
     steps:
     - name: Download a Release Artifact
-      uses: actions/download-artifact@v4.1.1
+      uses: actions/download-artifact@v4.1.2
       with:
         name: release-${{ env.PKG_VERSION }}
         path: /a/www/ietf-datatracker/main.dev.${{ github.run_number }}
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index 6aa69ca52..ca8e8f614 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -59,7 +59,7 @@ jobs:
         path: geckodriver.log
 
     - name: Upload Coverage Results to Codecov
-      uses: codecov/codecov-action@v3.1.5
+      uses: codecov/codecov-action@v4.0.1
       with:
         files: coverage.xml
         
diff --git a/.pnp.cjs b/.pnp.cjs
index 5e79c7894..3e42bea09 100644
--- a/.pnp.cjs
+++ b/.pnp.cjs
@@ -54,7 +54,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["browser-fs-access", "npm:0.35.0"],\
             ["browserlist", "npm:1.0.1"],\
             ["c8", "npm:9.1.0"],\
-            ["caniuse-lite", "npm:1.0.30001581"],\
+            ["caniuse-lite", "npm:1.0.30001588"],\
             ["d3", "npm:7.8.5"],\
             ["eslint", "npm:8.56.0"],\
             ["eslint-config-standard", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:17.1.0"],\
@@ -63,7 +63,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.6.2"],\
             ["eslint-plugin-node", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:11.1.0"],\
             ["eslint-plugin-promise", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.1"],\
-            ["eslint-plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.20.1"],\
+            ["eslint-plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.21.1"],\
             ["file-saver", "npm:2.0.5"],\
             ["highcharts", "npm:11.3.0"],\
             ["html-validate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:8.9.1"],\
@@ -76,7 +76,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["lodash-es", "npm:4.17.21"],\
             ["luxon", "npm:3.4.4"],\
             ["moment", "npm:2.30.1"],\
-            ["moment-timezone", "npm:0.5.44"],\
+            ["moment-timezone", "npm:0.5.45"],\
             ["ms", "npm:2.1.3"],\
             ["murmurhash-js", "npm:1.0.0"],\
             ["naive-ui", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.37.3"],\
@@ -84,7 +84,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["pinia", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.1.7"],\
             ["pinia-plugin-persist", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:1.0.0"],\
             ["pug", "npm:3.0.2"],\
-            ["sass", "npm:1.70.0"],\
+            ["sass", "npm:1.71.0"],\
             ["seedrandom", "npm:3.0.5"],\
             ["select2", "npm:4.1.0-rc.0"],\
             ["select2-bootstrap-5-theme", "npm:1.3.0"],\
@@ -94,7 +94,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["sortablejs", "npm:1.15.2"],\
             ["vanillajs-datepicker", "npm:1.3.4"],\
             ["vite", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.5.2"],\
-            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.15"],\
+            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.19"],\
             ["vue-router", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.2.5"],\
             ["zxcvbn", "npm:4.4.2"]\
           ],\
@@ -150,10 +150,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
           ],\
           "linkType": "HARD"\
         }],\
-        ["npm:7.23.6", {\
-          "packageLocation": "./.yarn/cache/@babel-parser-npm-7.23.6-2fad283d6e-140801c437.zip/node_modules/@babel/parser/",\
+        ["npm:7.23.9", {\
+          "packageLocation": "./.yarn/cache/@babel-parser-npm-7.23.9-720a0b56cb-e7cd4960ac.zip/node_modules/@babel/parser/",\
           "packageDependencies": [\
-            ["@babel/parser", "npm:7.23.6"],\
+            ["@babel/parser", "npm:7.23.9"],\
             ["@babel/types", "npm:7.18.4"]\
           ],\
           "linkType": "HARD"\
@@ -231,7 +231,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
           "packageDependencies": [\
             ["@css-render/vue3-ssr", "virtual:535ce3a5bf8429bbdd476b0f4bedb68cb91a1d57eac35720679464b7eeafc062414751fda54be317bf7e7886eec3b33992730a480671dc4d6974fd45406b1082#npm:0.15.10"],\
             ["@types/vue", null],\
-            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.15"]\
+            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.19"]\
           ],\
           "packagePeers": [\
             "@types/vue",\
@@ -244,7 +244,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
           "packageDependencies": [\
             ["@css-render/vue3-ssr", "virtual:9083f0b60f7ff3c9457189a27c2996ceed17cab3520ae1c32ab5e5244b992c3c8baaf999ad3c2b19ef13e1964e3197201ef68b1b3153ac72686293207b8892cf#npm:0.15.12"],\
             ["@types/vue", null],\
-            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.15"]\
+            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.19"]\
           ],\
           "packagePeers": [\
             "@types/vue",\
@@ -744,7 +744,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["@fullcalendar/core", "npm:6.1.10"],\
             ["@types/fullcalendar__core", null],\
             ["@types/vue", null],\
-            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.15"]\
+            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.19"]\
           ],\
           "packagePeers": [\
             "@fullcalendar/core",\
@@ -2708,7 +2708,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["@types/vite", null],\
             ["@types/vue", null],\
             ["vite", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.5.2"],\
-            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.15"]\
+            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.19"]\
           ],\
           "packagePeers": [\
             "@types/vite",\
@@ -2720,12 +2720,12 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
         }]\
       ]],\
       ["@vue/compiler-core", [\
-        ["npm:3.4.15", {\
-          "packageLocation": "./.yarn/cache/@vue-compiler-core-npm-3.4.15-4f131dda24-1610f715b8.zip/node_modules/@vue/compiler-core/",\
+        ["npm:3.4.19", {\
+          "packageLocation": "./.yarn/cache/@vue-compiler-core-npm-3.4.19-d8490343cd-92fbcc52c0.zip/node_modules/@vue/compiler-core/",\
           "packageDependencies": [\
-            ["@vue/compiler-core", "npm:3.4.15"],\
-            ["@babel/parser", "npm:7.23.6"],\
-            ["@vue/shared", "npm:3.4.15"],\
+            ["@vue/compiler-core", "npm:3.4.19"],\
+            ["@babel/parser", "npm:7.23.9"],\
+            ["@vue/shared", "npm:3.4.19"],\
             ["entities", "npm:4.5.0"],\
             ["estree-walker", "npm:2.0.2"],\
             ["source-map-js", "npm:1.0.2"]\
@@ -2734,28 +2734,28 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
         }]\
       ]],\
       ["@vue/compiler-dom", [\
-        ["npm:3.4.15", {\
-          "packageLocation": "./.yarn/cache/@vue-compiler-dom-npm-3.4.15-8299b45d96-373968c2c6.zip/node_modules/@vue/compiler-dom/",\
+        ["npm:3.4.19", {\
+          "packageLocation": "./.yarn/cache/@vue-compiler-dom-npm-3.4.19-4dd285f289-b74c620c40.zip/node_modules/@vue/compiler-dom/",\
           "packageDependencies": [\
-            ["@vue/compiler-dom", "npm:3.4.15"],\
-            ["@vue/compiler-core", "npm:3.4.15"],\
-            ["@vue/shared", "npm:3.4.15"]\
+            ["@vue/compiler-dom", "npm:3.4.19"],\
+            ["@vue/compiler-core", "npm:3.4.19"],\
+            ["@vue/shared", "npm:3.4.19"]\
           ],\
           "linkType": "HARD"\
         }]\
       ]],\
       ["@vue/compiler-sfc", [\
-        ["npm:3.4.15", {\
-          "packageLocation": "./.yarn/cache/@vue-compiler-sfc-npm-3.4.15-3d3ce9fc16-4a707346c3.zip/node_modules/@vue/compiler-sfc/",\
+        ["npm:3.4.19", {\
+          "packageLocation": "./.yarn/cache/@vue-compiler-sfc-npm-3.4.19-7ebf5c3b28-d622207fdb.zip/node_modules/@vue/compiler-sfc/",\
           "packageDependencies": [\
-            ["@vue/compiler-sfc", "npm:3.4.15"],\
-            ["@babel/parser", "npm:7.23.6"],\
-            ["@vue/compiler-core", "npm:3.4.15"],\
-            ["@vue/compiler-dom", "npm:3.4.15"],\
-            ["@vue/compiler-ssr", "npm:3.4.15"],\
-            ["@vue/shared", "npm:3.4.15"],\
+            ["@vue/compiler-sfc", "npm:3.4.19"],\
+            ["@babel/parser", "npm:7.23.9"],\
+            ["@vue/compiler-core", "npm:3.4.19"],\
+            ["@vue/compiler-dom", "npm:3.4.19"],\
+            ["@vue/compiler-ssr", "npm:3.4.19"],\
+            ["@vue/shared", "npm:3.4.19"],\
             ["estree-walker", "npm:2.0.2"],\
-            ["magic-string", "npm:0.30.5"],\
+            ["magic-string", "npm:0.30.7"],\
             ["postcss", "npm:8.4.33"],\
             ["source-map-js", "npm:1.0.2"]\
           ],\
@@ -2763,12 +2763,12 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
         }]\
       ]],\
       ["@vue/compiler-ssr", [\
-        ["npm:3.4.15", {\
-          "packageLocation": "./.yarn/cache/@vue-compiler-ssr-npm-3.4.15-05dd3d13a5-45a12ae2dd.zip/node_modules/@vue/compiler-ssr/",\
+        ["npm:3.4.19", {\
+          "packageLocation": "./.yarn/cache/@vue-compiler-ssr-npm-3.4.19-5732449e1f-b4599560fd.zip/node_modules/@vue/compiler-ssr/",\
           "packageDependencies": [\
-            ["@vue/compiler-ssr", "npm:3.4.15"],\
-            ["@vue/compiler-dom", "npm:3.4.15"],\
-            ["@vue/shared", "npm:3.4.15"]\
+            ["@vue/compiler-ssr", "npm:3.4.19"],\
+            ["@vue/compiler-dom", "npm:3.4.19"],\
+            ["@vue/shared", "npm:3.4.19"]\
           ],\
           "linkType": "HARD"\
         }]\
@@ -2783,54 +2783,54 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
         }]\
       ]],\
       ["@vue/reactivity", [\
-        ["npm:3.4.15", {\
-          "packageLocation": "./.yarn/cache/@vue-reactivity-npm-3.4.15-fde29aa046-e1f8ef7ec3.zip/node_modules/@vue/reactivity/",\
+        ["npm:3.4.19", {\
+          "packageLocation": "./.yarn/cache/@vue-reactivity-npm-3.4.19-6434952da4-67f6264792.zip/node_modules/@vue/reactivity/",\
           "packageDependencies": [\
-            ["@vue/reactivity", "npm:3.4.15"],\
-            ["@vue/shared", "npm:3.4.15"]\
+            ["@vue/reactivity", "npm:3.4.19"],\
+            ["@vue/shared", "npm:3.4.19"]\
           ],\
           "linkType": "HARD"\
         }]\
       ]],\
       ["@vue/runtime-core", [\
-        ["npm:3.4.15", {\
-          "packageLocation": "./.yarn/cache/@vue-runtime-core-npm-3.4.15-b9057fef14-6ab6721410.zip/node_modules/@vue/runtime-core/",\
+        ["npm:3.4.19", {\
+          "packageLocation": "./.yarn/cache/@vue-runtime-core-npm-3.4.19-2318784b1f-7303ec2585.zip/node_modules/@vue/runtime-core/",\
           "packageDependencies": [\
-            ["@vue/runtime-core", "npm:3.4.15"],\
-            ["@vue/reactivity", "npm:3.4.15"],\
-            ["@vue/shared", "npm:3.4.15"]\
+            ["@vue/runtime-core", "npm:3.4.19"],\
+            ["@vue/reactivity", "npm:3.4.19"],\
+            ["@vue/shared", "npm:3.4.19"]\
           ],\
           "linkType": "HARD"\
         }]\
       ]],\
       ["@vue/runtime-dom", [\
-        ["npm:3.4.15", {\
-          "packageLocation": "./.yarn/cache/@vue-runtime-dom-npm-3.4.15-7dfc9b71f4-4f2e79d956.zip/node_modules/@vue/runtime-dom/",\
+        ["npm:3.4.19", {\
+          "packageLocation": "./.yarn/cache/@vue-runtime-dom-npm-3.4.19-e17ab14a02-0afdb7b383.zip/node_modules/@vue/runtime-dom/",\
           "packageDependencies": [\
-            ["@vue/runtime-dom", "npm:3.4.15"],\
-            ["@vue/runtime-core", "npm:3.4.15"],\
-            ["@vue/shared", "npm:3.4.15"],\
+            ["@vue/runtime-dom", "npm:3.4.19"],\
+            ["@vue/runtime-core", "npm:3.4.19"],\
+            ["@vue/shared", "npm:3.4.19"],\
             ["csstype", "npm:3.1.3"]\
           ],\
           "linkType": "HARD"\
         }]\
       ]],\
       ["@vue/server-renderer", [\
-        ["npm:3.4.15", {\
-          "packageLocation": "./.yarn/cache/@vue-server-renderer-npm-3.4.15-fd81b21d4f-de93ccffe7.zip/node_modules/@vue/server-renderer/",\
+        ["npm:3.4.19", {\
+          "packageLocation": "./.yarn/cache/@vue-server-renderer-npm-3.4.19-cd67ced293-ae270a7269.zip/node_modules/@vue/server-renderer/",\
           "packageDependencies": [\
-            ["@vue/server-renderer", "npm:3.4.15"]\
+            ["@vue/server-renderer", "npm:3.4.19"]\
           ],\
           "linkType": "SOFT"\
         }],\
-        ["virtual:22db5c00fc66102c519417539d30aa289c17b3734eaa2f7ecaa126181d222b35d01ed6523b175bd3a8e0b244322f685e795d810e50df8db08ab29d82533296a8#npm:3.4.15", {\
-          "packageLocation": "./.yarn/__virtual__/@vue-server-renderer-virtual-66d8b02a0a/0/cache/@vue-server-renderer-npm-3.4.15-fd81b21d4f-de93ccffe7.zip/node_modules/@vue/server-renderer/",\
+        ["virtual:4a00eaa92881d899c4bfd888f27f491cbd205f41f1b80ddbe244d9ec5f873ecb404c076848bc49c7c1cccfc64cd782632063852fa7617c2e3becac25e6bea510#npm:3.4.19", {\
+          "packageLocation": "./.yarn/__virtual__/@vue-server-renderer-virtual-8668139ea7/0/cache/@vue-server-renderer-npm-3.4.19-cd67ced293-ae270a7269.zip/node_modules/@vue/server-renderer/",\
           "packageDependencies": [\
-            ["@vue/server-renderer", "virtual:22db5c00fc66102c519417539d30aa289c17b3734eaa2f7ecaa126181d222b35d01ed6523b175bd3a8e0b244322f685e795d810e50df8db08ab29d82533296a8#npm:3.4.15"],\
+            ["@vue/server-renderer", "virtual:4a00eaa92881d899c4bfd888f27f491cbd205f41f1b80ddbe244d9ec5f873ecb404c076848bc49c7c1cccfc64cd782632063852fa7617c2e3becac25e6bea510#npm:3.4.19"],\
             ["@types/vue", null],\
-            ["@vue/compiler-ssr", "npm:3.4.15"],\
-            ["@vue/shared", "npm:3.4.15"],\
-            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.15"]\
+            ["@vue/compiler-ssr", "npm:3.4.19"],\
+            ["@vue/shared", "npm:3.4.19"],\
+            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.19"]\
           ],\
           "packagePeers": [\
             "@types/vue",\
@@ -2840,10 +2840,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
         }]\
       ]],\
       ["@vue/shared", [\
-        ["npm:3.4.15", {\
-          "packageLocation": "./.yarn/cache/@vue-shared-npm-3.4.15-638dcb7e89-237db3a880.zip/node_modules/@vue/shared/",\
+        ["npm:3.4.19", {\
+          "packageLocation": "./.yarn/cache/@vue-shared-npm-3.4.19-adf9fa6e28-676c2ec007.zip/node_modules/@vue/shared/",\
           "packageDependencies": [\
-            ["@vue/shared", "npm:3.4.15"]\
+            ["@vue/shared", "npm:3.4.19"]\
           ],\
           "linkType": "HARD"\
         }]\
@@ -3443,10 +3443,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
           ],\
           "linkType": "HARD"\
         }],\
-        ["npm:1.0.30001581", {\
-          "packageLocation": "./.yarn/cache/caniuse-lite-npm-1.0.30001581-7909cc6e66-ca4e2cd9d0.zip/node_modules/caniuse-lite/",\
+        ["npm:1.0.30001588", {\
+          "packageLocation": "./.yarn/cache/caniuse-lite-npm-1.0.30001588-4c3a70951f-2ab5fcec8f.zip/node_modules/caniuse-lite/",\
           "packageDependencies": [\
-            ["caniuse-lite", "npm:1.0.30001581"]\
+            ["caniuse-lite", "npm:1.0.30001588"]\
           ],\
           "linkType": "HARD"\
         }]\
@@ -5042,17 +5042,17 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
         }]\
       ]],\
       ["eslint-plugin-vue", [\
-        ["npm:9.20.1", {\
-          "packageLocation": "./.yarn/cache/eslint-plugin-vue-npm-9.20.1-ec1d3386bd-fe50f4b842.zip/node_modules/eslint-plugin-vue/",\
+        ["npm:9.21.1", {\
+          "packageLocation": "./.yarn/cache/eslint-plugin-vue-npm-9.21.1-7f8f9cf843-3fe29a7062.zip/node_modules/eslint-plugin-vue/",\
           "packageDependencies": [\
-            ["eslint-plugin-vue", "npm:9.20.1"]\
+            ["eslint-plugin-vue", "npm:9.21.1"]\
           ],\
           "linkType": "SOFT"\
         }],\
-        ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.20.1", {\
-          "packageLocation": "./.yarn/__virtual__/eslint-plugin-vue-virtual-2b4f27d06a/0/cache/eslint-plugin-vue-npm-9.20.1-ec1d3386bd-fe50f4b842.zip/node_modules/eslint-plugin-vue/",\
+        ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.21.1", {\
+          "packageLocation": "./.yarn/__virtual__/eslint-plugin-vue-virtual-b75de6babf/0/cache/eslint-plugin-vue-npm-9.21.1-7f8f9cf843-3fe29a7062.zip/node_modules/eslint-plugin-vue/",\
           "packageDependencies": [\
-            ["eslint-plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.20.1"],\
+            ["eslint-plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.21.1"],\
             ["@eslint-community/eslint-utils", "virtual:6eec398a4132b5372ea5ffc0bc36d4c81602b7e444a89685d0d958016d8fd53df5c0c97c6a8bf99951469e2c6c06135dd192e9309f6e39b1a4c85e0faabe1f6b#npm:4.4.0"],\
             ["@types/eslint", null],\
             ["eslint", "npm:8.56.0"],\
@@ -5060,7 +5060,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["nth-check", "npm:2.1.1"],\
             ["postcss-selector-parser", "npm:6.0.13"],\
             ["semver", "npm:7.5.4"],\
-            ["vue-eslint-parser", "virtual:2b4f27d06a7cfda3abc993d9bc9ad4ae1e096241fb2f2e268ef190ab6fde2cae6795f9fa1e60e98bb9033ee6da1528cb45e9f336be633f59db8da1d4778c6a5c#npm:9.4.0"],\
+            ["vue-eslint-parser", "virtual:b75de6babfc82866444f6e4bb38628e1f23fbdc3141dadeeae372874c6f2a8214269c8e41ea4e063c99eb4bd451512d2674515cea245e6541b2bf98231a3a9e5#npm:9.4.2"],\
             ["xml-name-validator", "npm:4.0.0"]\
           ],\
           "packagePeers": [\
@@ -6845,10 +6845,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
         }]\
       ]],\
       ["magic-string", [\
-        ["npm:0.30.5", {\
-          "packageLocation": "./.yarn/cache/magic-string-npm-0.30.5-dffb7e6a73-da10fecff0.zip/node_modules/magic-string/",\
+        ["npm:0.30.7", {\
+          "packageLocation": "./.yarn/cache/magic-string-npm-0.30.7-0bb5819095-bdf102e36a.zip/node_modules/magic-string/",\
           "packageDependencies": [\
-            ["magic-string", "npm:0.30.5"],\
+            ["magic-string", "npm:0.30.7"],\
             ["@jridgewell/sourcemap-codec", "npm:1.4.15"]\
           ],\
           "linkType": "HARD"\
@@ -7064,10 +7064,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
         }]\
       ]],\
       ["moment-timezone", [\
-        ["npm:0.5.44", {\
-          "packageLocation": "./.yarn/cache/moment-timezone-npm-0.5.44-16af7889fb-2f1de58f14.zip/node_modules/moment-timezone/",\
+        ["npm:0.5.45", {\
+          "packageLocation": "./.yarn/cache/moment-timezone-npm-0.5.45-2df3ad72a4-a22e9f983f.zip/node_modules/moment-timezone/",\
           "packageDependencies": [\
-            ["moment-timezone", "npm:0.5.44"],\
+            ["moment-timezone", "npm:0.5.45"],\
             ["moment", "npm:2.29.4"]\
           ],\
           "linkType": "HARD"\
@@ -7194,7 +7194,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["treemate", "npm:0.3.11"],\
             ["vdirs", "virtual:9083f0b60f7ff3c9457189a27c2996ceed17cab3520ae1c32ab5e5244b992c3c8baaf999ad3c2b19ef13e1964e3197201ef68b1b3153ac72686293207b8892cf#npm:0.1.8"],\
             ["vooks", "virtual:9083f0b60f7ff3c9457189a27c2996ceed17cab3520ae1c32ab5e5244b992c3c8baaf999ad3c2b19ef13e1964e3197201ef68b1b3153ac72686293207b8892cf#npm:0.2.12"],\
-            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.15"],\
+            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.19"],\
             ["vueuc", "virtual:9083f0b60f7ff3c9457189a27c2996ceed17cab3520ae1c32ab5e5244b992c3c8baaf999ad3c2b19ef13e1964e3197201ef68b1b3153ac72686293207b8892cf#npm:0.4.58"]\
           ],\
           "packagePeers": [\
@@ -7690,7 +7690,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["@vue/composition-api", null],\
             ["@vue/devtools-api", "npm:6.5.0"],\
             ["typescript", null],\
-            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.15"],\
+            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.19"],\
             ["vue-demi", "virtual:cf6f7439ee76dfd2e7f8f2565ae847d76901434fc49c65702190cdf3d1c61e61c701a5c45b514c4bdeacb8f4bcac9c8a98bd4db3d0bc8e403d9e8db2cf14372a#npm:0.14.5"]\
           ],\
           "packagePeers": [\
@@ -7721,7 +7721,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["@types/vue__composition-api", null],\
             ["@vue/composition-api", null],\
             ["pinia", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.1.7"],\
-            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.15"],\
+            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.19"],\
             ["vue-demi", "virtual:f56fcf19bbebc2ada1b28955da8cc216b1e9a569a1a7337d2d1926c1ebd1bc7a5bd91aedae1d05c15c8562f33caf7c59bd3020a667340f6bdc6a7b13fc2ba847#npm:0.12.5"]\
           ],\
           "packagePeers": [\
@@ -8265,7 +8265,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["browser-fs-access", "npm:0.35.0"],\
             ["browserlist", "npm:1.0.1"],\
             ["c8", "npm:9.1.0"],\
-            ["caniuse-lite", "npm:1.0.30001581"],\
+            ["caniuse-lite", "npm:1.0.30001588"],\
             ["d3", "npm:7.8.5"],\
             ["eslint", "npm:8.56.0"],\
             ["eslint-config-standard", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:17.1.0"],\
@@ -8274,7 +8274,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:16.6.2"],\
             ["eslint-plugin-node", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:11.1.0"],\
             ["eslint-plugin-promise", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.1"],\
-            ["eslint-plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.20.1"],\
+            ["eslint-plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.21.1"],\
             ["file-saver", "npm:2.0.5"],\
             ["highcharts", "npm:11.3.0"],\
             ["html-validate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:8.9.1"],\
@@ -8287,7 +8287,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["lodash-es", "npm:4.17.21"],\
             ["luxon", "npm:3.4.4"],\
             ["moment", "npm:2.30.1"],\
-            ["moment-timezone", "npm:0.5.44"],\
+            ["moment-timezone", "npm:0.5.45"],\
             ["ms", "npm:2.1.3"],\
             ["murmurhash-js", "npm:1.0.0"],\
             ["naive-ui", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.37.3"],\
@@ -8295,7 +8295,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["pinia", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.1.7"],\
             ["pinia-plugin-persist", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:1.0.0"],\
             ["pug", "npm:3.0.2"],\
-            ["sass", "npm:1.70.0"],\
+            ["sass", "npm:1.71.0"],\
             ["seedrandom", "npm:3.0.5"],\
             ["select2", "npm:4.1.0-rc.0"],\
             ["select2-bootstrap-5-theme", "npm:1.3.0"],\
@@ -8305,7 +8305,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["sortablejs", "npm:1.15.2"],\
             ["vanillajs-datepicker", "npm:1.3.4"],\
             ["vite", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.5.2"],\
-            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.15"],\
+            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.19"],\
             ["vue-router", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.2.5"],\
             ["zxcvbn", "npm:4.4.2"]\
           ],\
@@ -8392,10 +8392,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
           ],\
           "linkType": "HARD"\
         }],\
-        ["npm:1.70.0", {\
-          "packageLocation": "./.yarn/cache/sass-npm-1.70.0-153257249c-fd1b622cf9.zip/node_modules/sass/",\
+        ["npm:1.71.0", {\
+          "packageLocation": "./.yarn/cache/sass-npm-1.71.0-38088726c4-5ba6b4b994.zip/node_modules/sass/",\
           "packageDependencies": [\
-            ["sass", "npm:1.70.0"],\
+            ["sass", "npm:1.71.0"],\
             ["chokidar", "npm:3.5.3"],\
             ["immutable", "npm:4.0.0"],\
             ["source-map-js", "npm:1.0.2"]\
@@ -9176,7 +9176,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["vdirs", "virtual:9083f0b60f7ff3c9457189a27c2996ceed17cab3520ae1c32ab5e5244b992c3c8baaf999ad3c2b19ef13e1964e3197201ef68b1b3153ac72686293207b8892cf#npm:0.1.8"],\
             ["@types/vue", null],\
             ["evtd", "npm:0.2.3"],\
-            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.15"]\
+            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.19"]\
           ],\
           "packagePeers": [\
             "@types/vue",\
@@ -9210,7 +9210,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["lightningcss", null],\
             ["postcss", "npm:8.4.33"],\
             ["rollup", "npm:3.29.4"],\
-            ["sass", "npm:1.70.0"],\
+            ["sass", "npm:1.71.0"],\
             ["stylus", null],\
             ["sugarss", null],\
             ["terser", null]\
@@ -9256,7 +9256,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["vooks", "virtual:9083f0b60f7ff3c9457189a27c2996ceed17cab3520ae1c32ab5e5244b992c3c8baaf999ad3c2b19ef13e1964e3197201ef68b1b3153ac72686293207b8892cf#npm:0.2.12"],\
             ["@types/vue", null],\
             ["evtd", "npm:0.2.3"],\
-            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.15"]\
+            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.19"]\
           ],\
           "packagePeers": [\
             "@types/vue",\
@@ -9266,23 +9266,23 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
         }]\
       ]],\
       ["vue", [\
-        ["npm:3.4.15", {\
-          "packageLocation": "./.yarn/cache/vue-npm-3.4.15-11fe9fcc84-6e9ff02c9b.zip/node_modules/vue/",\
+        ["npm:3.4.19", {\
+          "packageLocation": "./.yarn/cache/vue-npm-3.4.19-79f858aa3d-8c83a8097d.zip/node_modules/vue/",\
           "packageDependencies": [\
-            ["vue", "npm:3.4.15"]\
+            ["vue", "npm:3.4.19"]\
           ],\
           "linkType": "SOFT"\
         }],\
-        ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.15", {\
-          "packageLocation": "./.yarn/__virtual__/vue-virtual-22db5c00fc/0/cache/vue-npm-3.4.15-11fe9fcc84-6e9ff02c9b.zip/node_modules/vue/",\
+        ["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.19", {\
+          "packageLocation": "./.yarn/__virtual__/vue-virtual-4a00eaa928/0/cache/vue-npm-3.4.19-79f858aa3d-8c83a8097d.zip/node_modules/vue/",\
           "packageDependencies": [\
-            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.15"],\
+            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.19"],\
             ["@types/typescript", null],\
-            ["@vue/compiler-dom", "npm:3.4.15"],\
-            ["@vue/compiler-sfc", "npm:3.4.15"],\
-            ["@vue/runtime-dom", "npm:3.4.15"],\
-            ["@vue/server-renderer", "virtual:22db5c00fc66102c519417539d30aa289c17b3734eaa2f7ecaa126181d222b35d01ed6523b175bd3a8e0b244322f685e795d810e50df8db08ab29d82533296a8#npm:3.4.15"],\
-            ["@vue/shared", "npm:3.4.15"],\
+            ["@vue/compiler-dom", "npm:3.4.19"],\
+            ["@vue/compiler-sfc", "npm:3.4.19"],\
+            ["@vue/runtime-dom", "npm:3.4.19"],\
+            ["@vue/server-renderer", "virtual:4a00eaa92881d899c4bfd888f27f491cbd205f41f1b80ddbe244d9ec5f873ecb404c076848bc49c7c1cccfc64cd782632063852fa7617c2e3becac25e6bea510#npm:3.4.19"],\
+            ["@vue/shared", "npm:3.4.19"],\
             ["typescript", null]\
           ],\
           "packagePeers": [\
@@ -9314,7 +9314,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["@types/vue", null],\
             ["@types/vue__composition-api", null],\
             ["@vue/composition-api", null],\
-            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.15"]\
+            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.19"]\
           ],\
           "packagePeers": [\
             "@types/vue",\
@@ -9331,7 +9331,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["@types/vue", null],\
             ["@types/vue__composition-api", null],\
             ["@vue/composition-api", null],\
-            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.15"]\
+            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.19"]\
           ],\
           "packagePeers": [\
             "@types/vue",\
@@ -9343,17 +9343,17 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
         }]\
       ]],\
       ["vue-eslint-parser", [\
-        ["npm:9.4.0", {\
-          "packageLocation": "./.yarn/cache/vue-eslint-parser-npm-9.4.0-9f9ee7131a-b53d05d3ba.zip/node_modules/vue-eslint-parser/",\
+        ["npm:9.4.2", {\
+          "packageLocation": "./.yarn/cache/vue-eslint-parser-npm-9.4.2-3e4e696025-67f14c8ea1.zip/node_modules/vue-eslint-parser/",\
           "packageDependencies": [\
-            ["vue-eslint-parser", "npm:9.4.0"]\
+            ["vue-eslint-parser", "npm:9.4.2"]\
           ],\
           "linkType": "SOFT"\
         }],\
-        ["virtual:2b4f27d06a7cfda3abc993d9bc9ad4ae1e096241fb2f2e268ef190ab6fde2cae6795f9fa1e60e98bb9033ee6da1528cb45e9f336be633f59db8da1d4778c6a5c#npm:9.4.0", {\
-          "packageLocation": "./.yarn/__virtual__/vue-eslint-parser-virtual-6df112c4ac/0/cache/vue-eslint-parser-npm-9.4.0-9f9ee7131a-b53d05d3ba.zip/node_modules/vue-eslint-parser/",\
+        ["virtual:b75de6babfc82866444f6e4bb38628e1f23fbdc3141dadeeae372874c6f2a8214269c8e41ea4e063c99eb4bd451512d2674515cea245e6541b2bf98231a3a9e5#npm:9.4.2", {\
+          "packageLocation": "./.yarn/__virtual__/vue-eslint-parser-virtual-dcc4b805bd/0/cache/vue-eslint-parser-npm-9.4.2-3e4e696025-67f14c8ea1.zip/node_modules/vue-eslint-parser/",\
           "packageDependencies": [\
-            ["vue-eslint-parser", "virtual:2b4f27d06a7cfda3abc993d9bc9ad4ae1e096241fb2f2e268ef190ab6fde2cae6795f9fa1e60e98bb9033ee6da1528cb45e9f336be633f59db8da1d4778c6a5c#npm:9.4.0"],\
+            ["vue-eslint-parser", "virtual:b75de6babfc82866444f6e4bb38628e1f23fbdc3141dadeeae372874c6f2a8214269c8e41ea4e063c99eb4bd451512d2674515cea245e6541b2bf98231a3a9e5#npm:9.4.2"],\
             ["@types/eslint", null],\
             ["debug", "virtual:b86a9fb34323a98c6519528ed55faa0d9b44ca8879307c0b29aa384bde47ff59a7d0c9051b31246f14521dfb71ba3c5d6d0b35c29fffc17bf875aa6ad977d9e8#npm:4.3.4"],\
             ["eslint", "npm:8.56.0"],\
@@ -9385,7 +9385,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["vue-router", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:4.2.5"],\
             ["@types/vue", null],\
             ["@vue/devtools-api", "npm:6.5.0"],\
-            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.15"]\
+            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.19"]\
           ],\
           "packagePeers": [\
             "@types/vue",\
@@ -9414,7 +9414,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
             ["seemly", "npm:0.3.6"],\
             ["vdirs", "virtual:9083f0b60f7ff3c9457189a27c2996ceed17cab3520ae1c32ab5e5244b992c3c8baaf999ad3c2b19ef13e1964e3197201ef68b1b3153ac72686293207b8892cf#npm:0.1.8"],\
             ["vooks", "virtual:9083f0b60f7ff3c9457189a27c2996ceed17cab3520ae1c32ab5e5244b992c3c8baaf999ad3c2b19ef13e1964e3197201ef68b1b3153ac72686293207b8892cf#npm:0.2.12"],\
-            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.15"]\
+            ["vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.19"]\
           ],\
           "packagePeers": [\
             "@types/vue",\
diff --git a/.yarn/cache/@babel-parser-npm-7.23.6-2fad283d6e-140801c437.zip b/.yarn/cache/@babel-parser-npm-7.23.6-2fad283d6e-140801c437.zip
deleted file mode 100644
index 5537b3ffb..000000000
Binary files a/.yarn/cache/@babel-parser-npm-7.23.6-2fad283d6e-140801c437.zip and /dev/null differ
diff --git a/.yarn/cache/@babel-parser-npm-7.23.9-720a0b56cb-e7cd4960ac.zip b/.yarn/cache/@babel-parser-npm-7.23.9-720a0b56cb-e7cd4960ac.zip
new file mode 100644
index 000000000..7b6c44fc3
Binary files /dev/null and b/.yarn/cache/@babel-parser-npm-7.23.9-720a0b56cb-e7cd4960ac.zip differ
diff --git a/.yarn/cache/@vue-compiler-core-npm-3.4.15-4f131dda24-1610f715b8.zip b/.yarn/cache/@vue-compiler-core-npm-3.4.15-4f131dda24-1610f715b8.zip
deleted file mode 100644
index 0752549fd..000000000
Binary files a/.yarn/cache/@vue-compiler-core-npm-3.4.15-4f131dda24-1610f715b8.zip and /dev/null differ
diff --git a/.yarn/cache/@vue-compiler-core-npm-3.4.19-d8490343cd-92fbcc52c0.zip b/.yarn/cache/@vue-compiler-core-npm-3.4.19-d8490343cd-92fbcc52c0.zip
new file mode 100644
index 000000000..07bbfbba9
Binary files /dev/null and b/.yarn/cache/@vue-compiler-core-npm-3.4.19-d8490343cd-92fbcc52c0.zip differ
diff --git a/.yarn/cache/@vue-compiler-dom-npm-3.4.15-8299b45d96-373968c2c6.zip b/.yarn/cache/@vue-compiler-dom-npm-3.4.15-8299b45d96-373968c2c6.zip
deleted file mode 100644
index 99390043d..000000000
Binary files a/.yarn/cache/@vue-compiler-dom-npm-3.4.15-8299b45d96-373968c2c6.zip and /dev/null differ
diff --git a/.yarn/cache/@vue-compiler-dom-npm-3.4.19-4dd285f289-b74c620c40.zip b/.yarn/cache/@vue-compiler-dom-npm-3.4.19-4dd285f289-b74c620c40.zip
new file mode 100644
index 000000000..98e8fbd28
Binary files /dev/null and b/.yarn/cache/@vue-compiler-dom-npm-3.4.19-4dd285f289-b74c620c40.zip differ
diff --git a/.yarn/cache/@vue-compiler-sfc-npm-3.4.15-3d3ce9fc16-4a707346c3.zip b/.yarn/cache/@vue-compiler-sfc-npm-3.4.15-3d3ce9fc16-4a707346c3.zip
deleted file mode 100644
index a77adea96..000000000
Binary files a/.yarn/cache/@vue-compiler-sfc-npm-3.4.15-3d3ce9fc16-4a707346c3.zip and /dev/null differ
diff --git a/.yarn/cache/@vue-compiler-sfc-npm-3.4.19-7ebf5c3b28-d622207fdb.zip b/.yarn/cache/@vue-compiler-sfc-npm-3.4.19-7ebf5c3b28-d622207fdb.zip
new file mode 100644
index 000000000..13b88fb14
Binary files /dev/null and b/.yarn/cache/@vue-compiler-sfc-npm-3.4.19-7ebf5c3b28-d622207fdb.zip differ
diff --git a/.yarn/cache/@vue-compiler-ssr-npm-3.4.15-05dd3d13a5-45a12ae2dd.zip b/.yarn/cache/@vue-compiler-ssr-npm-3.4.15-05dd3d13a5-45a12ae2dd.zip
deleted file mode 100644
index 9078a17ce..000000000
Binary files a/.yarn/cache/@vue-compiler-ssr-npm-3.4.15-05dd3d13a5-45a12ae2dd.zip and /dev/null differ
diff --git a/.yarn/cache/@vue-compiler-ssr-npm-3.4.19-5732449e1f-b4599560fd.zip b/.yarn/cache/@vue-compiler-ssr-npm-3.4.19-5732449e1f-b4599560fd.zip
new file mode 100644
index 000000000..9e39b456b
Binary files /dev/null and b/.yarn/cache/@vue-compiler-ssr-npm-3.4.19-5732449e1f-b4599560fd.zip differ
diff --git a/.yarn/cache/@vue-reactivity-npm-3.4.15-fde29aa046-e1f8ef7ec3.zip b/.yarn/cache/@vue-reactivity-npm-3.4.15-fde29aa046-e1f8ef7ec3.zip
deleted file mode 100644
index ce90ec701..000000000
Binary files a/.yarn/cache/@vue-reactivity-npm-3.4.15-fde29aa046-e1f8ef7ec3.zip and /dev/null differ
diff --git a/.yarn/cache/@vue-reactivity-npm-3.4.19-6434952da4-67f6264792.zip b/.yarn/cache/@vue-reactivity-npm-3.4.19-6434952da4-67f6264792.zip
new file mode 100644
index 000000000..c2d22e94b
Binary files /dev/null and b/.yarn/cache/@vue-reactivity-npm-3.4.19-6434952da4-67f6264792.zip differ
diff --git a/.yarn/cache/@vue-runtime-core-npm-3.4.15-b9057fef14-6ab6721410.zip b/.yarn/cache/@vue-runtime-core-npm-3.4.15-b9057fef14-6ab6721410.zip
deleted file mode 100644
index 20d0947df..000000000
Binary files a/.yarn/cache/@vue-runtime-core-npm-3.4.15-b9057fef14-6ab6721410.zip and /dev/null differ
diff --git a/.yarn/cache/@vue-runtime-core-npm-3.4.19-2318784b1f-7303ec2585.zip b/.yarn/cache/@vue-runtime-core-npm-3.4.19-2318784b1f-7303ec2585.zip
new file mode 100644
index 000000000..7fd252bb7
Binary files /dev/null and b/.yarn/cache/@vue-runtime-core-npm-3.4.19-2318784b1f-7303ec2585.zip differ
diff --git a/.yarn/cache/@vue-runtime-dom-npm-3.4.15-7dfc9b71f4-4f2e79d956.zip b/.yarn/cache/@vue-runtime-dom-npm-3.4.15-7dfc9b71f4-4f2e79d956.zip
deleted file mode 100644
index 563d95199..000000000
Binary files a/.yarn/cache/@vue-runtime-dom-npm-3.4.15-7dfc9b71f4-4f2e79d956.zip and /dev/null differ
diff --git a/.yarn/cache/@vue-runtime-dom-npm-3.4.19-e17ab14a02-0afdb7b383.zip b/.yarn/cache/@vue-runtime-dom-npm-3.4.19-e17ab14a02-0afdb7b383.zip
new file mode 100644
index 000000000..d65d766dd
Binary files /dev/null and b/.yarn/cache/@vue-runtime-dom-npm-3.4.19-e17ab14a02-0afdb7b383.zip differ
diff --git a/.yarn/cache/@vue-server-renderer-npm-3.4.15-fd81b21d4f-de93ccffe7.zip b/.yarn/cache/@vue-server-renderer-npm-3.4.15-fd81b21d4f-de93ccffe7.zip
deleted file mode 100644
index be363ebd5..000000000
Binary files a/.yarn/cache/@vue-server-renderer-npm-3.4.15-fd81b21d4f-de93ccffe7.zip and /dev/null differ
diff --git a/.yarn/cache/@vue-server-renderer-npm-3.4.19-cd67ced293-ae270a7269.zip b/.yarn/cache/@vue-server-renderer-npm-3.4.19-cd67ced293-ae270a7269.zip
new file mode 100644
index 000000000..54c11e6bd
Binary files /dev/null and b/.yarn/cache/@vue-server-renderer-npm-3.4.19-cd67ced293-ae270a7269.zip differ
diff --git a/.yarn/cache/@vue-shared-npm-3.4.15-638dcb7e89-237db3a880.zip b/.yarn/cache/@vue-shared-npm-3.4.19-adf9fa6e28-676c2ec007.zip
similarity index 92%
rename from .yarn/cache/@vue-shared-npm-3.4.15-638dcb7e89-237db3a880.zip
rename to .yarn/cache/@vue-shared-npm-3.4.19-adf9fa6e28-676c2ec007.zip
index 8b5c31b99..6d76b26e0 100644
Binary files a/.yarn/cache/@vue-shared-npm-3.4.15-638dcb7e89-237db3a880.zip and b/.yarn/cache/@vue-shared-npm-3.4.19-adf9fa6e28-676c2ec007.zip differ
diff --git a/.yarn/cache/caniuse-lite-npm-1.0.30001581-7909cc6e66-ca4e2cd9d0.zip b/.yarn/cache/caniuse-lite-npm-1.0.30001581-7909cc6e66-ca4e2cd9d0.zip
deleted file mode 100644
index 817fa0ab3..000000000
Binary files a/.yarn/cache/caniuse-lite-npm-1.0.30001581-7909cc6e66-ca4e2cd9d0.zip and /dev/null differ
diff --git a/.yarn/cache/caniuse-lite-npm-1.0.30001588-4c3a70951f-2ab5fcec8f.zip b/.yarn/cache/caniuse-lite-npm-1.0.30001588-4c3a70951f-2ab5fcec8f.zip
new file mode 100644
index 000000000..f3d38b0b8
Binary files /dev/null and b/.yarn/cache/caniuse-lite-npm-1.0.30001588-4c3a70951f-2ab5fcec8f.zip differ
diff --git a/.yarn/cache/eslint-plugin-vue-npm-9.20.1-ec1d3386bd-fe50f4b842.zip b/.yarn/cache/eslint-plugin-vue-npm-9.21.1-7f8f9cf843-3fe29a7062.zip
similarity index 88%
rename from .yarn/cache/eslint-plugin-vue-npm-9.20.1-ec1d3386bd-fe50f4b842.zip
rename to .yarn/cache/eslint-plugin-vue-npm-9.21.1-7f8f9cf843-3fe29a7062.zip
index 72337f34c..432b49799 100644
Binary files a/.yarn/cache/eslint-plugin-vue-npm-9.20.1-ec1d3386bd-fe50f4b842.zip and b/.yarn/cache/eslint-plugin-vue-npm-9.21.1-7f8f9cf843-3fe29a7062.zip differ
diff --git a/.yarn/cache/magic-string-npm-0.30.5-dffb7e6a73-da10fecff0.zip b/.yarn/cache/magic-string-npm-0.30.5-dffb7e6a73-da10fecff0.zip
deleted file mode 100644
index 649679c9b..000000000
Binary files a/.yarn/cache/magic-string-npm-0.30.5-dffb7e6a73-da10fecff0.zip and /dev/null differ
diff --git a/.yarn/cache/magic-string-npm-0.30.7-0bb5819095-bdf102e36a.zip b/.yarn/cache/magic-string-npm-0.30.7-0bb5819095-bdf102e36a.zip
new file mode 100644
index 000000000..7d9e6ff1d
Binary files /dev/null and b/.yarn/cache/magic-string-npm-0.30.7-0bb5819095-bdf102e36a.zip differ
diff --git a/.yarn/cache/moment-timezone-npm-0.5.44-16af7889fb-2f1de58f14.zip b/.yarn/cache/moment-timezone-npm-0.5.44-16af7889fb-2f1de58f14.zip
deleted file mode 100644
index 01896c17d..000000000
Binary files a/.yarn/cache/moment-timezone-npm-0.5.44-16af7889fb-2f1de58f14.zip and /dev/null differ
diff --git a/.yarn/cache/moment-timezone-npm-0.5.45-2df3ad72a4-a22e9f983f.zip b/.yarn/cache/moment-timezone-npm-0.5.45-2df3ad72a4-a22e9f983f.zip
new file mode 100644
index 000000000..4cd7864ca
Binary files /dev/null and b/.yarn/cache/moment-timezone-npm-0.5.45-2df3ad72a4-a22e9f983f.zip differ
diff --git a/.yarn/cache/sass-npm-1.70.0-153257249c-fd1b622cf9.zip b/.yarn/cache/sass-npm-1.70.0-153257249c-fd1b622cf9.zip
deleted file mode 100644
index 1240945fb..000000000
Binary files a/.yarn/cache/sass-npm-1.70.0-153257249c-fd1b622cf9.zip and /dev/null differ
diff --git a/.yarn/cache/sass-npm-1.71.0-38088726c4-5ba6b4b994.zip b/.yarn/cache/sass-npm-1.71.0-38088726c4-5ba6b4b994.zip
new file mode 100644
index 000000000..8a7412b25
Binary files /dev/null and b/.yarn/cache/sass-npm-1.71.0-38088726c4-5ba6b4b994.zip differ
diff --git a/.yarn/cache/vue-eslint-parser-npm-9.4.0-9f9ee7131a-b53d05d3ba.zip b/.yarn/cache/vue-eslint-parser-npm-9.4.0-9f9ee7131a-b53d05d3ba.zip
deleted file mode 100644
index ab19b7c6f..000000000
Binary files a/.yarn/cache/vue-eslint-parser-npm-9.4.0-9f9ee7131a-b53d05d3ba.zip and /dev/null differ
diff --git a/.yarn/cache/vue-eslint-parser-npm-9.4.2-3e4e696025-67f14c8ea1.zip b/.yarn/cache/vue-eslint-parser-npm-9.4.2-3e4e696025-67f14c8ea1.zip
new file mode 100644
index 000000000..9ec85e189
Binary files /dev/null and b/.yarn/cache/vue-eslint-parser-npm-9.4.2-3e4e696025-67f14c8ea1.zip differ
diff --git a/.yarn/cache/vue-npm-3.4.15-11fe9fcc84-6e9ff02c9b.zip b/.yarn/cache/vue-npm-3.4.15-11fe9fcc84-6e9ff02c9b.zip
deleted file mode 100644
index a8ec8dc6d..000000000
Binary files a/.yarn/cache/vue-npm-3.4.15-11fe9fcc84-6e9ff02c9b.zip and /dev/null differ
diff --git a/.yarn/cache/vue-npm-3.4.19-79f858aa3d-8c83a8097d.zip b/.yarn/cache/vue-npm-3.4.19-79f858aa3d-8c83a8097d.zip
new file mode 100644
index 000000000..5721e24c9
Binary files /dev/null and b/.yarn/cache/vue-npm-3.4.19-79f858aa3d-8c83a8097d.zip differ
diff --git a/bin/mm_hourly b/bin/mm_hourly
index 0d1da2e57..e371fd611 100755
--- a/bin/mm_hourly
+++ b/bin/mm_hourly
@@ -21,4 +21,3 @@ source $DTDIR/env/bin/activate
 
 logger -p user.info -t cron "Running $DTDIR/bin/mm_hourly"
 
-$DTDIR/ietf/manage.py import_mailman_listinfo
diff --git a/dev/coverage-action/package-lock.json b/dev/coverage-action/package-lock.json
index 36661ba14..ebc5272f8 100644
--- a/dev/coverage-action/package-lock.json
+++ b/dev/coverage-action/package-lock.json
@@ -22,7 +22,7 @@
         "eslint-plugin-import": "2.29.1",
         "eslint-plugin-node": "11.1.0",
         "eslint-plugin-promise": "6.1.1",
-        "npm-check-updates": "16.14.14"
+        "npm-check-updates": "16.14.15"
       }
     },
     "node_modules/@aashutoshrathi/word-wrap": {
@@ -4079,9 +4079,9 @@
       }
     },
     "node_modules/npm-check-updates": {
-      "version": "16.14.14",
-      "resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.14.14.tgz",
-      "integrity": "sha512-Y3ajS/Ep40jM489rLBdz9jehn/BMil5s9fA4PSr2ZJxxSmtLWCSmRqsI2IEZ9Nb3MTMu8a3s7kBs0l+JbjdkTA==",
+      "version": "16.14.15",
+      "resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.14.15.tgz",
+      "integrity": "sha512-WH0wJ9j6CP7Azl+LLCxWAYqroT2IX02kRIzgK/fg0rPpMbETgHITWBdOPtrv521xmA3JMgeNsQ62zvVtS/nCmQ==",
       "dev": true,
       "dependencies": {
         "chalk": "^5.3.0",
@@ -9249,9 +9249,9 @@
       }
     },
     "npm-check-updates": {
-      "version": "16.14.14",
-      "resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.14.14.tgz",
-      "integrity": "sha512-Y3ajS/Ep40jM489rLBdz9jehn/BMil5s9fA4PSr2ZJxxSmtLWCSmRqsI2IEZ9Nb3MTMu8a3s7kBs0l+JbjdkTA==",
+      "version": "16.14.15",
+      "resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.14.15.tgz",
+      "integrity": "sha512-WH0wJ9j6CP7Azl+LLCxWAYqroT2IX02kRIzgK/fg0rPpMbETgHITWBdOPtrv521xmA3JMgeNsQ62zvVtS/nCmQ==",
       "dev": true,
       "requires": {
         "chalk": "^5.3.0",
diff --git a/dev/coverage-action/package.json b/dev/coverage-action/package.json
index e1bbb3d3f..f9e10c1e2 100644
--- a/dev/coverage-action/package.json
+++ b/dev/coverage-action/package.json
@@ -19,6 +19,6 @@
     "eslint-plugin-import": "2.29.1",
     "eslint-plugin-node": "11.1.0",
     "eslint-plugin-promise": "6.1.1",
-    "npm-check-updates": "16.14.14"
+    "npm-check-updates": "16.14.15"
   }
 }
diff --git a/dev/deploy-to-container/package-lock.json b/dev/deploy-to-container/package-lock.json
index 6ef15a2b7..6ef000fc9 100644
--- a/dev/deploy-to-container/package-lock.json
+++ b/dev/deploy-to-container/package-lock.json
@@ -8,7 +8,7 @@
       "dependencies": {
         "dockerode": "^4.0.2",
         "fs-extra": "^11.2.0",
-        "nanoid": "5.0.4",
+        "nanoid": "5.0.5",
         "nanoid-dictionary": "5.0.0-beta.1",
         "slugify": "1.6.6",
         "tar": "^6.2.0",
@@ -337,9 +337,9 @@
       "optional": true
     },
     "node_modules/nanoid": {
-      "version": "5.0.4",
-      "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.4.tgz",
-      "integrity": "sha512-vAjmBf13gsmhXSgBrtIclinISzFFy22WwCYoyilZlsrRXNIHSwgFQ1bEdjRwMT3aoadeIF6HMuDRlOxzfXV8ig==",
+      "version": "5.0.5",
+      "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.5.tgz",
+      "integrity": "sha512-/Veqm+QKsyMY3kqi4faWplnY1u+VuKO3dD2binyPIybP31DRO29bPF+1mszgLnrR2KqSLceFLBNw0zmvDzN1QQ==",
       "funding": [
         {
           "type": "github",
@@ -878,9 +878,9 @@
       "optional": true
     },
     "nanoid": {
-      "version": "5.0.4",
-      "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.4.tgz",
-      "integrity": "sha512-vAjmBf13gsmhXSgBrtIclinISzFFy22WwCYoyilZlsrRXNIHSwgFQ1bEdjRwMT3aoadeIF6HMuDRlOxzfXV8ig=="
+      "version": "5.0.5",
+      "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.5.tgz",
+      "integrity": "sha512-/Veqm+QKsyMY3kqi4faWplnY1u+VuKO3dD2binyPIybP31DRO29bPF+1mszgLnrR2KqSLceFLBNw0zmvDzN1QQ=="
     },
     "nanoid-dictionary": {
       "version": "5.0.0-beta.1",
diff --git a/dev/deploy-to-container/package.json b/dev/deploy-to-container/package.json
index 575268bbf..400138a9b 100644
--- a/dev/deploy-to-container/package.json
+++ b/dev/deploy-to-container/package.json
@@ -4,7 +4,7 @@
   "dependencies": {
     "dockerode": "^4.0.2",
     "fs-extra": "^11.2.0",
-    "nanoid": "5.0.4",
+    "nanoid": "5.0.5",
     "nanoid-dictionary": "5.0.0-beta.1",
     "slugify": "1.6.6",
     "tar": "^6.2.0",
diff --git a/docker/base.Dockerfile b/docker/base.Dockerfile
index 12d03d74d..4a307c6c6 100644
--- a/docker/base.Dockerfile
+++ b/docker/base.Dockerfile
@@ -1,146 +1,146 @@
-FROM python:3.9-bullseye
-LABEL maintainer="IETF Tools Team <tools-discuss@ietf.org>"
-
-ENV DEBIAN_FRONTEND=noninteractive
-ENV NODE_MAJOR=16
-
-# Update system packages
-RUN apt-get update \
-    && apt-get -qy upgrade \
-    && apt-get -y install --no-install-recommends apt-utils dialog 2>&1
-
-# Add Node.js Source
-RUN apt-get install -y --no-install-recommends ca-certificates curl gnupg \
-    && mkdir -p /etc/apt/keyrings\
-    && curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
-RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
-
-# Add Docker Source
-RUN curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
-RUN echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian \
-    $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null
-
-# Add PostgreSQL Source 
-RUN echo "deb http://apt.postgresql.org/pub/repos/apt $(. /etc/os-release && echo "$VERSION_CODENAME")-pgdg main" | tee /etc/apt/sources.list.d/pgdg.list
-RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
-
-# Install the packages we need
-RUN apt-get update --fix-missing && apt-get install -qy --no-install-recommends \
-	apache2-utils \
-	apt-file \
-	bash \
-	build-essential \
-	curl \
-	default-jdk \
-	docker-ce-cli \
-	enscript \
-	firefox-esr \
-	gawk \
-	g++ \
-	gcc \
-	ghostscript \
-	git \
-	gnupg \
-	jq \
-	less \
-	libcairo2-dev \
-	libgtk2.0-0 \
-	libgtk-3-0 \
-	libnotify-dev \
-	libgconf-2-4 \
-	libgbm-dev \
-	libnss3 \
-	libxss1 \
-	libasound2 \
-	libxtst6 \
-	libmagic-dev \
-	libmariadb-dev \
-	libmemcached-tools \
-	locales \
-	make \
-	mariadb-client \
-	memcached \
-	nano \
-	netcat \
-	nodejs \
-	pgloader \
-	pigz \
-	postgresql-client-14 \
-	pv \
-	python3-ipython \
-	ripgrep \
-	rsync \
-	rsyslog \
-	ruby \
-	ruby-rubygems \
-	unzip \
-	wget \
-	xauth \
-	xvfb \
-	yang-tools \
-	zsh
-
-# Install kramdown-rfc2629 (ruby)
-RUN gem install kramdown-rfc2629
-
-# GeckoDriver
-ARG GECKODRIVER_VERSION=latest
-RUN GK_VERSION=$(if [ ${GECKODRIVER_VERSION:-latest} = "latest" ]; then echo "0.33.0"; else echo $GECKODRIVER_VERSION; fi) \
-  && echo "Using GeckoDriver version: "$GK_VERSION \
-  && wget --no-verbose -O /tmp/geckodriver.tar.gz https://github.com/mozilla/geckodriver/releases/download/v$GK_VERSION/geckodriver-v$GK_VERSION-linux64.tar.gz \
-  && rm -rf /opt/geckodriver \
-  && tar -C /opt -zxf /tmp/geckodriver.tar.gz \
-  && rm /tmp/geckodriver.tar.gz \
-  && mv /opt/geckodriver /opt/geckodriver-$GK_VERSION \
-  && chmod 755 /opt/geckodriver-$GK_VERSION \
-  && ln -fs /opt/geckodriver-$GK_VERSION /usr/bin/geckodriver
-
-# Activate Yarn
-RUN corepack enable
-
-# Get rid of installation files we don't need in the image, to reduce size
-RUN apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/* /var/cache/apt/*
-
-# "fake" dbus address to prevent errors
-# https://github.com/SeleniumHQ/docker-selenium/issues/87
-ENV DBUS_SESSION_BUS_ADDRESS=/dev/null
-
-# avoid million NPM install messages
-ENV npm_config_loglevel warn
-# allow installing when the main user is root
-ENV npm_config_unsafe_perm true
-# disable NPM funding messages
-ENV npm_config_fund false
-
-# Set locale to en_US.UTF-8
-RUN echo "LC_ALL=en_US.UTF-8" >> /etc/environment && \
-    echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen && \
-    echo "LANG=en_US.UTF-8" > /etc/locale.conf && \
-    dpkg-reconfigure locales && \
-    locale-gen en_US.UTF-8 && \
-    update-locale LC_ALL en_US.UTF-8
-ENV LC_ALL en_US.UTF-8
-
-# Install idnits
-ADD https://raw.githubusercontent.com/ietf-tools/idnits-mirror/main/idnits /usr/local/bin/
-RUN chmod +rx /usr/local/bin/idnits
-
-# Turn off rsyslog kernel logging (doesn't work in Docker)
-RUN sed -i '/imklog/s/^/#/' /etc/rsyslog.conf
-
-# Colorize the bash shell
-RUN sed -i 's/#force_color_prompt=/force_color_prompt=/' /root/.bashrc
-
-# Turn off rsyslog kernel logging (doesn't work in Docker)
-RUN sed -i '/imklog/s/^/#/' /etc/rsyslog.conf
-
-# Fetch wait-for utility
-ADD https://raw.githubusercontent.com/eficode/wait-for/v2.1.3/wait-for /usr/local/bin/
-RUN chmod +rx /usr/local/bin/wait-for
-
-# Create assets directory
-RUN mkdir -p /assets
-
-# Create workspace
-RUN mkdir -p /workspace
-WORKDIR /workspace
+FROM python:3.9-bullseye
+LABEL maintainer="IETF Tools Team <tools-discuss@ietf.org>"
+
+ENV DEBIAN_FRONTEND=noninteractive
+ENV NODE_MAJOR=16
+
+# Update system packages
+RUN apt-get update \
+    && apt-get -qy upgrade \
+    && apt-get -y install --no-install-recommends apt-utils dialog 2>&1
+
+# Add Node.js Source
+RUN apt-get install -y --no-install-recommends ca-certificates curl gnupg \
+    && mkdir -p /etc/apt/keyrings\
+    && curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
+RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
+
+# Add Docker Source
+RUN curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
+RUN echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian \
+    $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null
+
+# Add PostgreSQL Source 
+RUN echo "deb http://apt.postgresql.org/pub/repos/apt $(. /etc/os-release && echo "$VERSION_CODENAME")-pgdg main" | tee /etc/apt/sources.list.d/pgdg.list
+RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
+
+# Install the packages we need
+RUN apt-get update --fix-missing && apt-get install -qy --no-install-recommends \
+	apache2-utils \
+	apt-file \
+	bash \
+	build-essential \
+	curl \
+	default-jdk \
+	docker-ce-cli \
+	enscript \
+	firefox-esr \
+	gawk \
+	g++ \
+	gcc \
+	ghostscript \
+	git \
+	gnupg \
+	jq \
+	less \
+	libcairo2-dev \
+	libgtk2.0-0 \
+	libgtk-3-0 \
+	libnotify-dev \
+	libgconf-2-4 \
+	libgbm-dev \
+	libnss3 \
+	libxss1 \
+	libasound2 \
+	libxtst6 \
+	libmagic-dev \
+	libmariadb-dev \
+	libmemcached-tools \
+	locales \
+	make \
+	mariadb-client \
+	memcached \
+	nano \
+	netcat \
+	nodejs \
+	pgloader \
+	pigz \
+	postgresql-client-14 \
+	pv \
+	python3-ipython \
+	ripgrep \
+	rsync \
+	rsyslog \
+	ruby \
+	ruby-rubygems \
+	unzip \
+	wget \
+	xauth \
+	xvfb \
+	yang-tools \
+	zsh
+
+# Install kramdown-rfc2629 (ruby)
+RUN gem install kramdown-rfc2629
+
+# GeckoDriver
+ARG GECKODRIVER_VERSION=latest
+RUN GK_VERSION=$(if [ ${GECKODRIVER_VERSION:-latest} = "latest" ]; then echo "0.34.0"; else echo $GECKODRIVER_VERSION; fi) \
+  && echo "Using GeckoDriver version: "$GK_VERSION \
+  && wget --no-verbose -O /tmp/geckodriver.tar.gz https://github.com/mozilla/geckodriver/releases/download/v$GK_VERSION/geckodriver-v$GK_VERSION-linux64.tar.gz \
+  && rm -rf /opt/geckodriver \
+  && tar -C /opt -zxf /tmp/geckodriver.tar.gz \
+  && rm /tmp/geckodriver.tar.gz \
+  && mv /opt/geckodriver /opt/geckodriver-$GK_VERSION \
+  && chmod 755 /opt/geckodriver-$GK_VERSION \
+  && ln -fs /opt/geckodriver-$GK_VERSION /usr/bin/geckodriver
+
+# Activate Yarn
+RUN corepack enable
+
+# Get rid of installation files we don't need in the image, to reduce size
+RUN apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/* /var/cache/apt/*
+
+# "fake" dbus address to prevent errors
+# https://github.com/SeleniumHQ/docker-selenium/issues/87
+ENV DBUS_SESSION_BUS_ADDRESS=/dev/null
+
+# avoid million NPM install messages
+ENV npm_config_loglevel warn
+# allow installing when the main user is root
+ENV npm_config_unsafe_perm true
+# disable NPM funding messages
+ENV npm_config_fund false
+
+# Set locale to en_US.UTF-8
+RUN echo "LC_ALL=en_US.UTF-8" >> /etc/environment && \
+    echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen && \
+    echo "LANG=en_US.UTF-8" > /etc/locale.conf && \
+    dpkg-reconfigure locales && \
+    locale-gen en_US.UTF-8 && \
+    update-locale LC_ALL en_US.UTF-8
+ENV LC_ALL en_US.UTF-8
+
+# Install idnits
+ADD https://raw.githubusercontent.com/ietf-tools/idnits-mirror/main/idnits /usr/local/bin/
+RUN chmod +rx /usr/local/bin/idnits
+
+# Turn off rsyslog kernel logging (doesn't work in Docker)
+RUN sed -i '/imklog/s/^/#/' /etc/rsyslog.conf
+
+# Colorize the bash shell
+RUN sed -i 's/#force_color_prompt=/force_color_prompt=/' /root/.bashrc
+
+# Turn off rsyslog kernel logging (doesn't work in Docker)
+RUN sed -i '/imklog/s/^/#/' /etc/rsyslog.conf
+
+# Fetch wait-for utility
+ADD https://raw.githubusercontent.com/eficode/wait-for/v2.1.3/wait-for /usr/local/bin/
+RUN chmod +rx /usr/local/bin/wait-for
+
+# Create assets directory
+RUN mkdir -p /assets
+
+# Create workspace
+RUN mkdir -p /workspace
+WORKDIR /workspace
diff --git a/ietf/api/tests.py b/ietf/api/tests.py
index a495accc3..4fc1d46cd 100644
--- a/ietf/api/tests.py
+++ b/ietf/api/tests.py
@@ -32,8 +32,8 @@ from ietf.meeting.factories import MeetingFactory, SessionFactory
 from ietf.meeting.models import Session
 from ietf.nomcom.models import Volunteer, NomCom
 from ietf.nomcom.factories import NomComFactory, nomcom_kwargs_for_year
-from ietf.person.factories import PersonFactory, random_faker
-from ietf.person.models import User
+from ietf.person.factories import PersonFactory, random_faker, EmailFactory
+from ietf.person.models import Email, User
 from ietf.person.models import PersonalApiKey
 from ietf.stats.models import MeetingRegistration
 from ietf.utils.mail import outbox, get_payload_text
@@ -368,7 +368,7 @@ class CustomApiTests(TestCase):
             r = self.client.post(url,{'apikey':apikey.hash(),'apidata': f'{{"session_id":{session.pk}, "{type_id}":{content}}}'})
             self.assertEqual(r.status_code, 200)
 
-            newdoc = session.sessionpresentation_set.get(document__type_id=type_id).document
+            newdoc = session.presentations.get(document__type_id=type_id).document
             newdoccontent = get_unicode_document_content(newdoc.name, Path(session.meeting.get_materials_path()) / type_id / newdoc.uploaded_filename)
             self.assertEqual(json.loads(content), json.loads(newdoccontent))
 
@@ -454,7 +454,7 @@ class CustomApiTests(TestCase):
                                    'item': '1', 'bluesheet': bluesheet, })
         self.assertContains(r, "Done", status_code=200)
 
-        bluesheet = session.sessionpresentation_set.filter(document__type__slug='bluesheets').first().document
+        bluesheet = session.presentations.filter(document__type__slug='bluesheets').first().document
         # We've submitted an update; check that the rev is right
         self.assertEqual(bluesheet.rev, '01')
         # Check the content
@@ -569,7 +569,7 @@ class CustomApiTests(TestCase):
         self.assertContains(r, "Done", status_code=200)
 
         bluesheet = (
-            session.sessionpresentation_set.filter(document__type__slug="bluesheets")
+            session.presentations.filter(document__type__slug="bluesheets")
             .first()
             .document
         )
@@ -853,6 +853,28 @@ class CustomApiTests(TestCase):
             405,
         )
 
+    @override_settings(APP_API_TOKENS={"ietf.api.views.active_email_list": ["valid-token"]})
+    def test_active_email_list(self):
+        EmailFactory(active=True)  # make sure there's at least one active email...
+        EmailFactory(active=False)  # ... and at least one non-active emai
+        url = urlreverse("ietf.api.views.active_email_list")
+        r = self.client.post(url, headers={})
+        self.assertEqual(r.status_code, 403)
+        r = self.client.get(url, headers={})
+        self.assertEqual(r.status_code, 403)
+        r = self.client.get(url, headers={"X-Api-Key": "not-the-valid-token"})
+        self.assertEqual(r.status_code, 403)
+        r = self.client.post(url, headers={"X-Api-Key": "not-the-valid-token"})
+        self.assertEqual(r.status_code, 403)
+        r = self.client.post(url, headers={"X-Api-Key": "valid-token"})
+        self.assertEqual(r.status_code, 405)
+        r = self.client.get(url, headers={"X-Api-Key": "valid-token"})
+        self.assertEqual(r.status_code, 200)
+        self.assertEqual(r.headers["Content-Type"], "application/json")
+        result = json.loads(r.content)
+        self.assertCountEqual(result.keys(), ["addresses"])
+        self.assertCountEqual(result["addresses"], Email.objects.filter(active=True).values_list("address", flat=True))
+
 
 class DirectAuthApiTests(TestCase):
 
diff --git a/ietf/api/urls.py b/ietf/api/urls.py
index 107bd398d..1adc02a03 100644
--- a/ietf/api/urls.py
+++ b/ietf/api/urls.py
@@ -49,6 +49,8 @@ urlpatterns = [
     # OpenID authentication provider
     url(r'^openid/$', TemplateView.as_view(template_name='api/openid-issuer.html'), name='ietf.api.urls.oidc_issuer'),
     url(r'^openid/', include('oidc_provider.urls', namespace='oidc_provider')),
+    # Email alias listing
+    url(r'^person/email/$', api_views.active_email_list),
     # Draft submission API
     url(r'^submit/?$', submit_views.api_submit),
     # Draft upload API
diff --git a/ietf/api/views.py b/ietf/api/views.py
index 4205aa315..744c6548a 100644
--- a/ietf/api/views.py
+++ b/ietf/api/views.py
@@ -488,3 +488,15 @@ def group_aliases(request):
             }
         )
     return HttpResponse(status=405)
+
+
+@requires_api_token
+@csrf_exempt
+def active_email_list(request):
+    if request.method == "GET":
+        return JsonResponse(
+            {
+                "addresses": list(Email.objects.filter(active=True).values_list("address", flat=True)),
+            }
+        )
+    return HttpResponse(status=405)
diff --git a/ietf/community/views.py b/ietf/community/views.py
index a9d3bd584..4a28a391f 100644
--- a/ietf/community/views.py
+++ b/ietf/community/views.py
@@ -24,6 +24,7 @@ from ietf.group.models import Group
 from ietf.doc.models import DocEvent, Document
 from ietf.doc.utils_search import prepare_document_table
 from ietf.person.utils import lookup_persons
+from ietf.utils.decorators import ignore_view_kwargs
 from ietf.utils.http import is_ajax
 from ietf.utils.response import permission_denied
 
@@ -70,6 +71,7 @@ def view_list(request, email_or_name=None):
     })
 
 @login_required
+@ignore_view_kwargs("group_type")
 def manage_list(request, email_or_name=None, acronym=None):
     # we need to be a bit careful because clist may not exist in the
     # database so we can't call related stuff on it yet
@@ -209,6 +211,7 @@ def untrack_document(request, name, email_or_name=None, acronym=None):
     })
 
 
+@ignore_view_kwargs("group_type")
 def export_to_csv(request, email_or_name=None, acronym=None):
     try:
         clist = lookup_community_list(request, email_or_name, acronym)
@@ -253,6 +256,7 @@ def export_to_csv(request, email_or_name=None, acronym=None):
 
     return response
 
+@ignore_view_kwargs("group_type")
 def feed(request, email_or_name=None, acronym=None):
     try:
         clist = lookup_community_list(request, email_or_name, acronym)
@@ -292,6 +296,7 @@ def feed(request, email_or_name=None, acronym=None):
 
 
 @login_required
+@ignore_view_kwargs("group_type")
 def subscription(request, email_or_name=None, acronym=None):
     try:
         clist = lookup_community_list(request, email_or_name, acronym)
diff --git a/ietf/doc/migrations/0021_narrativeminutes.py b/ietf/doc/migrations/0021_narrativeminutes.py
new file mode 100644
index 000000000..0f330bd05
--- /dev/null
+++ b/ietf/doc/migrations/0021_narrativeminutes.py
@@ -0,0 +1,39 @@
+# Copyright The IETF Trust 2023, All Rights Reserved
+
+from django.db import migrations
+
+
+def forward(apps, schema_editor):
+    StateType = apps.get_model("doc", "StateType")
+    State = apps.get_model("doc", "State")
+
+    StateType.objects.create(
+        slug="narrativeminutes",
+        label="State",
+    )
+    for order, slug in enumerate(["active", "deleted"]):
+        State.objects.create(
+            slug=slug,
+            type_id="narrativeminutes",
+            name=slug.capitalize(),
+            order=order,
+            desc="",
+            used=True,
+        )
+
+
+def reverse(apps, schema_editor):
+    StateType = apps.get_model("doc", "StateType")
+    State = apps.get_model("doc", "State")
+
+    State.objects.filter(type_id="narrativeminutes").delete()
+    StateType.objects.filter(slug="narrativeminutes").delete()
+
+
+class Migration(migrations.Migration):
+    dependencies = [
+        ("doc", "0020_move_errata_tags"),
+        ("name", "0013_narrativeminutes"),
+    ]
+
+    operations = [migrations.RunPython(forward, reverse)]
diff --git a/ietf/doc/models.py b/ietf/doc/models.py
index af4843fc4..d97e8238e 100644
--- a/ietf/doc/models.py
+++ b/ietf/doc/models.py
@@ -148,7 +148,7 @@ class DocumentInfo(models.Model):
                     else:
                         self._cached_file_path = settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR
             elif self.meeting_related() and self.type_id in (
-                    "agenda", "minutes", "slides", "bluesheets", "procmaterials", "chatlog", "polls"
+                    "agenda", "minutes", "narrativeminutes", "slides", "bluesheets", "procmaterials", "chatlog", "polls"
             ):
                 meeting = self.get_related_meeting()
                 if meeting is not None:
@@ -438,7 +438,7 @@ class DocumentInfo(models.Model):
         return e != None and (e.text != "")
 
     def meeting_related(self):
-        if self.type_id in ("agenda","minutes","bluesheets","slides","recording","procmaterials","chatlog","polls"):
+        if self.type_id in ("agenda","minutes", "narrativeminutes", "bluesheets","slides","recording","procmaterials","chatlog","polls"):
              return self.type_id != "slides" or self.get_state_slug('reuse_policy')=='single'
         return False
 
@@ -1028,7 +1028,7 @@ class Document(DocumentInfo):
     def future_presentations(self):
         """ returns related SessionPresentation objects for meetings that
             have not yet ended. This implementation allows for 2 week meetings """
-        candidate_presentations = self.sessionpresentation_set.filter(
+        candidate_presentations = self.presentations.filter(
             session__meeting__date__gte=date_today() - datetime.timedelta(days=15)
         )
         return sorted(
@@ -1041,11 +1041,11 @@ class Document(DocumentInfo):
         """ returns related SessionPresentation objects for the most recent meeting in the past"""
         # Assumes no two meetings have the same start date - if the assumption is violated, one will be chosen arbitrarily
         today = date_today()
-        candidate_presentations = self.sessionpresentation_set.filter(session__meeting__date__lte=today)
+        candidate_presentations = self.presentations.filter(session__meeting__date__lte=today)
         candidate_meetings = set([p.session.meeting for p in candidate_presentations if p.session.meeting.end_date()<today])
         if candidate_meetings:
             mtg = sorted(list(candidate_meetings),key=lambda x:x.date,reverse=True)[0]
-            return self.sessionpresentation_set.filter(session__meeting=mtg)
+            return self.presentations.filter(session__meeting=mtg)
         else:
             return None
 
diff --git a/ietf/doc/tests.py b/ietf/doc/tests.py
index 6d4902542..45d07dc05 100644
--- a/ietf/doc/tests.py
+++ b/ietf/doc/tests.py
@@ -2529,8 +2529,8 @@ class DocumentMeetingTests(TestCase):
 
     def test_view_document_meetings(self):
         doc = IndividualDraftFactory.create()
-        doc.sessionpresentation_set.create(session=self.inprog,rev=None)
-        doc.sessionpresentation_set.create(session=self.interim,rev=None)
+        doc.presentations.create(session=self.inprog,rev=None)
+        doc.presentations.create(session=self.interim,rev=None)
 
         url = urlreverse('ietf.doc.views_doc.all_presentations', kwargs=dict(name=doc.name))
         response = self.client.get(url)
@@ -2541,8 +2541,8 @@ class DocumentMeetingTests(TestCase):
         self.assertFalse(q('#addsessionsbutton'))
         self.assertFalse(q("a.btn:contains('Remove document')"))
 
-        doc.sessionpresentation_set.create(session=self.past_cutoff,rev=None)
-        doc.sessionpresentation_set.create(session=self.past,rev=None)
+        doc.presentations.create(session=self.past_cutoff,rev=None)
+        doc.presentations.create(session=self.past,rev=None)
 
         self.client.login(username="secretary", password="secretary+password")
         response = self.client.get(url)
@@ -2577,7 +2577,7 @@ class DocumentMeetingTests(TestCase):
 
     def test_edit_document_session(self):
         doc = IndividualDraftFactory.create()
-        sp = doc.sessionpresentation_set.create(session=self.future,rev=None)
+        sp = doc.presentations.create(session=self.future,rev=None)
 
         url = urlreverse('ietf.doc.views_doc.edit_sessionpresentation',kwargs=dict(name='no-such-doc',session_id=sp.session_id))
         response = self.client.get(url)
@@ -2604,12 +2604,12 @@ class DocumentMeetingTests(TestCase):
         self.assertEqual(1,doc.docevent_set.count())
         response = self.client.post(url,{'version':'00','save':''})
         self.assertEqual(response.status_code, 302)
-        self.assertEqual(doc.sessionpresentation_set.get(pk=sp.pk).rev,'00')
+        self.assertEqual(doc.presentations.get(pk=sp.pk).rev,'00')
         self.assertEqual(2,doc.docevent_set.count())
 
     def test_edit_document_session_after_proceedings_closed(self):
         doc = IndividualDraftFactory.create()
-        sp = doc.sessionpresentation_set.create(session=self.past_cutoff,rev=None)
+        sp = doc.presentations.create(session=self.past_cutoff,rev=None)
 
         url = urlreverse('ietf.doc.views_doc.edit_sessionpresentation',kwargs=dict(name=doc.name,session_id=sp.session_id))
         self.client.login(username=self.group_chair.user.username,password='%s+password'%self.group_chair.user.username)
@@ -2624,7 +2624,7 @@ class DocumentMeetingTests(TestCase):
 
     def test_remove_document_session(self):
         doc = IndividualDraftFactory.create()
-        sp = doc.sessionpresentation_set.create(session=self.future,rev=None)
+        sp = doc.presentations.create(session=self.future,rev=None)
 
         url = urlreverse('ietf.doc.views_doc.remove_sessionpresentation',kwargs=dict(name='no-such-doc',session_id=sp.session_id))
         response = self.client.get(url)
@@ -2649,12 +2649,12 @@ class DocumentMeetingTests(TestCase):
         self.assertEqual(1,doc.docevent_set.count())
         response = self.client.post(url,{'remove_session':''})
         self.assertEqual(response.status_code, 302)
-        self.assertFalse(doc.sessionpresentation_set.filter(pk=sp.pk).exists())
+        self.assertFalse(doc.presentations.filter(pk=sp.pk).exists())
         self.assertEqual(2,doc.docevent_set.count())
 
     def test_remove_document_session_after_proceedings_closed(self):
         doc = IndividualDraftFactory.create()
-        sp = doc.sessionpresentation_set.create(session=self.past_cutoff,rev=None)
+        sp = doc.presentations.create(session=self.past_cutoff,rev=None)
 
         url = urlreverse('ietf.doc.views_doc.remove_sessionpresentation',kwargs=dict(name=doc.name,session_id=sp.session_id))
         self.client.login(username=self.group_chair.user.username,password='%s+password'%self.group_chair.user.username)
diff --git a/ietf/doc/views_doc.py b/ietf/doc/views_doc.py
index 64f9de4f5..907f1b200 100644
--- a/ietf/doc/views_doc.py
+++ b/ietf/doc/views_doc.py
@@ -832,7 +832,7 @@ def document_main(request, name, rev=None, document_html=False):
                                        sorted_relations=sorted_relations,
                                        ))
 
-    elif doc.type_id in ("slides", "agenda", "minutes", "bluesheets", "procmaterials",):
+    elif doc.type_id in ("slides", "agenda", "minutes", "narrativeminutes", "bluesheets", "procmaterials",):
         can_manage_material = can_manage_materials(request.user, doc.group)
         presentations = doc.future_presentations()
         if doc.uploaded_filename:
@@ -916,9 +916,9 @@ def document_main(request, name, rev=None, document_html=False):
 
     elif doc.type_id in ("chatlog", "polls"):
         if isinstance(doc,DocHistory):
-            session = doc.doc.sessionpresentation_set.last().session
+            session = doc.doc.presentations.last().session
         else:
-            session = doc.sessionpresentation_set.last().session
+            session = doc.presentations.last().session
         pathname = Path(session.meeting.get_materials_path()) / doc.type_id / doc.uploaded_filename
         content = get_unicode_document_content(doc.name, str(pathname))
         return render(
@@ -943,7 +943,7 @@ def document_main(request, name, rev=None, document_html=False):
         variants = set([match.name.split(".")[1] for match in Path(doc.get_file_path()).glob(f"{basename}.*")])
         inlineable = any([ext in variants for ext in ["md", "txt"]])
         if inlineable:
-            content = markdown.markdown(doc.text_or_error())
+            content = markdown.liberal_markdown(doc.text_or_error())
         else:
             content = "No format available to display inline"
             if "pdf" in variants:
@@ -2057,7 +2057,7 @@ class VersionForm(forms.Form):
 
 def edit_sessionpresentation(request,name,session_id):
     doc = get_object_or_404(Document, name=name)
-    sp = get_object_or_404(doc.sessionpresentation_set, session_id=session_id)
+    sp = get_object_or_404(doc.presentations, session_id=session_id)
 
     if not sp.session.can_manage_materials(request.user):
         raise Http404
@@ -2074,7 +2074,7 @@ def edit_sessionpresentation(request,name,session_id):
         if form.is_valid():
             new_selection = form.cleaned_data['version']
             if initial['version'] != new_selection:
-                doc.sessionpresentation_set.filter(pk=sp.pk).update(rev=None if new_selection=='current' else new_selection)
+                doc.presentations.filter(pk=sp.pk).update(rev=None if new_selection=='current' else new_selection)
                 c = DocEvent(type="added_comment", doc=doc, rev=doc.rev, by=request.user.person)
                 c.desc = "Revision for session %s changed to  %s" % (sp.session,new_selection)
                 c.save()
@@ -2086,7 +2086,7 @@ def edit_sessionpresentation(request,name,session_id):
 
 def remove_sessionpresentation(request,name,session_id):
     doc = get_object_or_404(Document, name=name)
-    sp = get_object_or_404(doc.sessionpresentation_set, session_id=session_id)
+    sp = get_object_or_404(doc.presentations, session_id=session_id)
 
     if not sp.session.can_manage_materials(request.user):
         raise Http404
@@ -2095,7 +2095,7 @@ def remove_sessionpresentation(request,name,session_id):
         raise Http404
 
     if request.method == 'POST':
-        doc.sessionpresentation_set.filter(pk=sp.pk).delete()
+        doc.presentations.filter(pk=sp.pk).delete()
         c = DocEvent(type="added_comment", doc=doc, rev=doc.rev, by=request.user.person)
         c.desc = "Removed from session: %s" % (sp.session)
         c.save()
@@ -2119,7 +2119,7 @@ def add_sessionpresentation(request,name):
     version_choices.insert(0,('current','Current at the time of the session'))
 
     sessions = get_upcoming_manageable_sessions(request.user)
-    sessions = sort_sessions([s for s in sessions if not s.sessionpresentation_set.filter(document=doc).exists()])
+    sessions = sort_sessions([s for s in sessions if not s.presentations.filter(document=doc).exists()])
     if doc.group:
         sessions = sorted(sessions,key=lambda x:0 if x.group==doc.group else 1)
 
@@ -2132,7 +2132,7 @@ def add_sessionpresentation(request,name):
             session_id = session_form.cleaned_data['session']
             version = version_form.cleaned_data['version']
             rev = None if version=='current' else version
-            doc.sessionpresentation_set.create(session_id=session_id,rev=rev)
+            doc.presentations.create(session_id=session_id,rev=rev)
             c = DocEvent(type="added_comment", doc=doc, rev=doc.rev, by=request.user.person)
             c.desc = "%s to session: %s" % ('Added -%s'%rev if rev else 'Added', Session.objects.get(pk=session_id))
             c.save()
diff --git a/ietf/doc/views_material.py b/ietf/doc/views_material.py
index 5b16c247b..19bc02cfd 100644
--- a/ietf/doc/views_material.py
+++ b/ietf/doc/views_material.py
@@ -21,9 +21,11 @@ from ietf.doc.models import NewRevisionDocEvent
 from ietf.doc.utils import add_state_change_event, check_common_doc_name_rules
 from ietf.group.models import Group
 from ietf.group.utils import can_manage_materials
+from ietf.utils.decorators import ignore_view_kwargs
 from ietf.utils.response import permission_denied
 
 @login_required
+@ignore_view_kwargs("group_type")
 def choose_material_type(request, acronym):
     group = get_object_or_404(Group, acronym=acronym)
     if not group.features.has_nonsession_materials:
@@ -91,6 +93,7 @@ class UploadMaterialForm(forms.Form):
         return name
 
 @login_required
+@ignore_view_kwargs("group_type")
 def edit_material(request, name=None, acronym=None, action=None, doc_type=None):
     # the materials process is not very developed, so at the moment we
     # handle everything through the same view/form
@@ -110,6 +113,8 @@ def edit_material(request, name=None, acronym=None, action=None, doc_type=None):
     valid_doctypes = ['procmaterials']
     if group is not None:
         valid_doctypes.extend(['minutes','agenda','bluesheets'])
+        if group.acronym=="iesg":
+            valid_doctypes.append("narrativeminutes")
         valid_doctypes.extend(group.features.material_types)
 
     if document_type.slug not in valid_doctypes:
diff --git a/ietf/doc/views_statement.py b/ietf/doc/views_statement.py
index 04adb5d1d..bf9f47ddf 100644
--- a/ietf/doc/views_statement.py
+++ b/ietf/doc/views_statement.py
@@ -94,7 +94,7 @@ class StatementUploadForm(forms.Form):
                     )
         if markdown_content != "":
             try:
-                _ = markdown.markdown(markdown_content)
+                _ = markdown.liberal_markdown(markdown_content)
             except Exception as e:
                 raise forms.ValidationError(f"Markdown processing failed: {e}")
 
diff --git a/ietf/group/admin.py b/ietf/group/admin.py
index 5095b6b24..fedec49d8 100644
--- a/ietf/group/admin.py
+++ b/ietf/group/admin.py
@@ -1,4 +1,4 @@
-# Copyright The IETF Trust 2010-2020, All Rights Reserved
+# Copyright The IETF Trust 2010-2024, All Rights Reserved
 # -*- coding: utf-8 -*-
 
 import re
@@ -72,6 +72,12 @@ class GroupForm(forms.ModelForm):
                     'Acronym is invalid. For groups that create documents, the acronym must be at least '
                     'two characters and only contain lowercase letters and numbers starting with a letter.'
                 )
+            elif self.cleaned_data['type'].pk == 'sdo':
+                valid_re = r'^[a-z0-9][a-z0-9-]*[a-z0-9]$'
+                error_msg = (
+                    'Acronym is invalid. It must be at least two characters and only contain lowercase '
+                    'letters and numbers. It may contain hyphens, but that is discouraged.'
+                )
             else:
                 valid_re = r'^[a-z][a-z0-9-]*[a-z0-9]$'
                 error_msg = (
diff --git a/ietf/group/management/commands/import_iesg_appeals.py b/ietf/group/management/commands/import_iesg_appeals.py
new file mode 100644
index 000000000..525239e6b
--- /dev/null
+++ b/ietf/group/management/commands/import_iesg_appeals.py
@@ -0,0 +1,292 @@
+# Copyright The IETF Trust 2023, All Rights Reserved
+
+import datetime
+import re
+import shutil
+import subprocess
+import tempfile
+
+from pathlib import Path
+import dateutil
+
+from django.conf import settings
+from django.core.management import BaseCommand
+
+from ietf.group.models import Appeal, AppealArtifact
+
+
+class Command(BaseCommand):
+    help = "Performs a one-time import of IESG appeals"
+
+    def handle(self, *args, **options):
+        old_appeals_root = (
+            "/a/www/www6/iesg/appeal"
+            if settings.SERVER_MODE == "production"
+            else "/assets/www6/iesg/appeal"
+        )
+        tmpdir = tempfile.mkdtemp()
+        process = subprocess.Popen(
+            ["git", "clone", "https://github.com/kesara/iesg-scraper.git", tmpdir],
+            stdout=subprocess.PIPE,
+            stderr=subprocess.PIPE,
+        )
+        sub_stdout, sub_stderr = process.communicate()
+        if not (Path(tmpdir) / "iesg_appeals" / "anderson-2006-03-08.md").exists():
+            self.stdout.write(
+                "Git clone of the iesg-scraper directory did not go as expected"
+            )
+            self.stdout.write("stdout:", sub_stdout)
+            self.stdout.write("stderr:", sub_stderr)
+            self.stdout.write(f"Clean up {tmpdir} manually")
+            exit(-1)
+        titles = [
+            "Appeal: IESG Statement on Guidance on In-Person and Online Interim Meetings (John Klensin, 2023-08-15)",
+            "Appeal of current Guidance on in-Person and Online meetings (Ted Hardie, Alan Frindell, 2023-07-19)",
+            "Appeal re: URI Scheme Application and draft-mcsweeney-drop-scheme (Tim McSweeney, 2020-07-08)",
+            "Appeal to the IESG re WGLC of draft-ietf-spring-srv6-network-programming (Fernando Gont, Andrew Alston, and Sander Steffann, 2020-04-22)",
+            "Appeal re Protocol Action: 'URI Design and Ownership' to Best \nCurrent Practice (draft-nottingham-rfc7320bis-03.txt) (John Klensin; 2020-02-04)",
+            "Appeal of IESG Conflict Review process and decision on draft-mavrogiannopoulos-pkcs8-validated-parameters-02 (John Klensin; 2018-07-07)",
+            "Appeal of IESG decision to defer action and request that ISE publish draft-klensin-dns-function-considerations (John Klensin; 2017-11-29)",
+            'Appeal to the IESG concerning its approval of the "draft-ietf-ianaplan-icg-response" (PDF file) (JFC Morfin; 2015-03-11)',
+            "Appeal re tzdist mailing list moderation (Tobias Conradi; 2014-08-28) / Withdrawn by Submitter",
+            "Appeal re draft-masotta-tftpexts-windowsize-opt (Patrick Masotta; 2013-11-14)",
+            "Appeal re draft-ietf-manet-nhdp-sec-threats (Abdussalam Baryun; 2013-06-19)",
+            "Appeal of decision to advance RFC6376 (Douglas Otis; 2013-05-30)",
+            "Appeal to the IESG in regards to RFC 6852 (PDF file) (JFC Morfin; 2013-04-05)",
+            "Appeal to the IESG concerning the approbation of the IDNA2008 document set (PDF file) (JFC Morfin; 2010-03-10)",
+            "Authentication-Results Header Field Appeal (Douglas Otis, David Rand; 2009-02-16) / Withdrawn by Submitter",
+            "Appeal to the IAB of IESG rejection of Appeal to Last Call draft-ietf-grow-anycast (Dean Anderson; 2008-11-14)",
+            "Appeal to the IESG Concerning the Way At Large Internet Lead Users Are Not Permitted To Adequately Contribute to the IETF Deliverables (JFC Morfin; 2008-09-10)",
+            "Appeal over suspension of posting rights for Todd Glassey (Todd Glassey; 2008-07-28)",
+            "Appeal against IESG blocking DISCUSS on draft-klensin-rfc2821bis (John C Klensin; 2008-06-13)",
+            "Appeal: Continued Abuse of Process by IPR-WG Chair (Dean Anderson; 2007-12-26)",
+            "Appeal to the IESG from Todd Glassey (Todd Glassey; 2007-11-26)",
+            "Appeal Against the Removal of the Co-Chairs of the GEOPRIV Working Group (PDF file) (Randall Gellens, Allison Mankin, and Andrew Newton; 2007-06-22)",
+            "Appeal concerning the WG-LTRU rechartering (JFC Morfin; 2006-10-24)",
+            "Appeal against decision within July 10 IESG appeal dismissal (JFC Morfin; 2006-09-09)",
+            "Appeal: Mandatory to implement HTTP authentication mechanism in the Atom Publishing Protocol (Robert Sayre; 2006-08-29)",
+            "Appeal Against IESG Decisions Regarding the draft-ietf-ltru-matching (PDF file) (JFC Morfin; 2006-08-16)",
+            "Amended Appeal Re: grow: Last Call: 'Operation of Anycast Services' to BCP (draft-ietf-grow-anycast) (Dean Anderson; 2006-06-14)",
+            "Appeal Against an IESG Decision Denying Me IANA Language Registration Process by way of PR-Action (PDF file) (JFC Morfin; 2006-05-17)",
+            "Appeal to the IESG of PR-Action against Dean Anderson (Dean Anderson; 2006-03-08)",
+            "Appeal to IESG against AD decision: one must clear the confusion opposing the RFC 3066 Bis consensus (JFC Morfin; 2006-02-20)",
+            "Appeal to the IESG of an IESG decision (JFC Morfin; 2006-02-17)",
+            "Appeal to the IESG in reference to the ietf-languages@alvestrand.no mailing list (JFC Morfin; 2006-02-07)",
+            "Appeal to the IESG against an IESG decision concerning RFC 3066 Bis Draft (JFC Morfin; 2006-01-14)",
+            "Appeal over a key change in a poor RFC 3066 bis example (JFC Morfin; 2005-10-19)",
+            "Additional appeal against publication of draft-lyon-senderid-* in regards to its recommended use of Resent- header fields in the way that is inconsistant with RFC2822(William Leibzon; 2005-08-29)",
+            "Appeal: Publication of draft-lyon-senderid-core-01 in conflict with referenced draft-schlitt-spf-classic-02 (Julian Mehnle; 2005-08-25)",
+            'Appeal of decision to standardize "Mapping Between the Multimedia Messaging Service (MMS) and Internet Mail" (John C Klensin; 2005-06-10)',
+            "Appeal regarding IESG decision on the GROW WG (David Meyer; 2003-11-15)",
+            "Appeal: Official notice of appeal on suspension rights (Todd Glassey; 2003-08-06)",
+            "Appeal: AD response to Site-Local Appeal (Tony Hain; 2003-07-31)",
+            "Appeal against IESG decision for draft-chiba-radius-dynamic-authorization-05.txt (Glen Zorn; 2003-01-15)",
+            "Appeal Against moving draft-ietf-ipngwg-addr-arch-v3 to Draft Standard (Robert Elz; 2002-11-05)",
+        ]
+        date_re = re.compile(r"\d{4}-\d{2}-\d{2}")
+        dates = [
+            datetime.datetime.strptime(date_re.search(t).group(), "%Y-%m-%d").date()
+            for t in titles
+        ]
+
+        parts = [
+            ["klensin-2023-08-15.txt", "response-to-klensin-2023-08-15.txt"],
+            [
+                "hardie-frindell-2023-07-19.txt",
+                "response-to-hardie-frindell-2023-07-19.txt",
+            ],
+            ["mcsweeney-2020-07-08.txt", "response-to-mcsweeney-2020-07-08.pdf"],
+            ["gont-2020-04-22.txt", "response-to-gont-2020-06-02.txt"],
+            ["klensin-2020-02-04.txt", "response-to-klensin-2020-02-04.txt"],
+            ["klensin-2018-07-07.txt", "response-to-klensin-2018-07-07.txt"],
+            ["klensin-2017-11-29.txt", "response-to-klensin-2017-11-29.md"],
+            ["morfin-2015-03-11.pdf", "response-to-morfin-2015-03-11.md"],
+            ["conradi-2014-08-28.txt"],
+            ["masotta-2013-11-14.txt", "response-to-masotta-2013-11-14.md"],
+            ["baryun-2013-06-19.txt", "response-to-baryun-2013-06-19.md"],
+            ["otis-2013-05-30.txt", "response-to-otis-2013-05-30.md"],
+            ["morfin-2013-04-05.pdf", "response-to-morfin-2013-04-05.md"],
+            ["morfin-2010-03-10.pdf", "response-to-morfin-2010-03-10.txt"],
+            ["otis-2009-02-16.txt"],
+            ["anderson-2008-11-14.md", "response-to-anderson-2008-11-14.txt"],
+            ["morfin-2008-09-10.txt", "response-to-morfin-2008-09-10.txt"],
+            ["glassey-2008-07-28.txt", "response-to-glassey-2008-07-28.txt"],
+            ["klensin-2008-06-13.txt", "response-to-klensin-2008-06-13.txt"],
+            ["anderson-2007-12-26.txt", "response-to-anderson-2007-12-26.txt"],
+            ["glassey-2007-11-26.txt", "response-to-glassey-2007-11-26.txt"],
+            ["gellens-2007-06-22.pdf", "response-to-gellens-2007-06-22.txt"],
+            ["morfin-2006-10-24.txt", "response-to-morfin-2006-10-24.txt"],
+            ["morfin-2006-09-09.txt", "response-to-morfin-2006-09-09.txt"],
+            ["sayre-2006-08-29.txt", "response-to-sayre-2006-08-29.txt"],
+            [
+                "morfin-2006-08-16.pdf",
+                "response-to-morfin-2006-08-17.txt",
+                "response-to-morfin-2006-08-17-part2.txt",
+            ],
+            ["anderson-2006-06-13.txt", "response-to-anderson-2006-06-14.txt"],
+            ["morfin-2006-05-17.pdf", "response-to-morfin-2006-05-17.txt"],
+            ["anderson-2006-03-08.md", "response-to-anderson-2006-03-08.txt"],
+            ["morfin-2006-02-20.txt", "response-to-morfin-2006-02-20.txt"],
+            ["morfin-2006-02-17.txt", "response-to-morfin-2006-02-17.txt"],
+            ["morfin-2006-02-07.txt", "response-to-morfin-2006-02-07.txt"],
+            ["morfin-2006-01-14.txt", "response-to-morfin-2006-01-14.txt"],
+            ["morfin-2005-10-19.txt", "response-to-morfin-2005-10-19.txt"],
+            ["leibzon-2005-08-29.txt", "response-to-leibzon-2005-08-29.txt"],
+            ["mehnle-2005-08-25.txt", "response-to-mehnle-2005-08-25.txt"],
+            ["klensin-2005-06-10.txt", "response-to-klensin-2005-06-10.txt"],
+            ["meyer-2003-11-15.txt", "response-to-meyer-2003-11-15.txt"],
+            ["glassey-2003-08-06.txt", "response-to-glassey-2003-08-06.txt"],
+            ["hain-2003-07-31.txt", "response-to-hain-2003-07-31.txt"],
+            ["zorn-2003-01-15.txt", "response-to-zorn-2003-01-15.txt"],
+            ["elz-2002-11-05.txt", "response-to-elz-2002-11-05.txt"],
+        ]
+
+        assert len(titles) == len(dates)
+        assert len(titles) == len(parts)
+
+        part_times = dict()
+        part_times["klensin-2023-08-15.txt"] = "2023-08-15 15:03:55 -0400"
+        part_times["response-to-klensin-2023-08-15.txt"] = "2023-08-24 18:54:13 +0300"
+        part_times["hardie-frindell-2023-07-19.txt"] = "2023-07-19 07:17:16PDT"
+        part_times[
+            "response-to-hardie-frindell-2023-07-19.txt"
+        ] = "2023-08-15 11:58:26PDT"
+        part_times["mcsweeney-2020-07-08.txt"] = "2020-07-08 14:45:00 -0400"
+        part_times["response-to-mcsweeney-2020-07-08.pdf"] = "2020-07-28 12:54:04 -0000"
+        part_times["gont-2020-04-22.txt"] = "2020-04-22 22:26:20 -0400"
+        part_times["response-to-gont-2020-06-02.txt"] = "2020-06-02 20:44:29 -0400"
+        part_times["klensin-2020-02-04.txt"] = "2020-02-04 13:54:46 -0500"
+        # part_times["response-to-klensin-2020-02-04.txt"]="2020-03-24 11:49:31EDT"
+        part_times["response-to-klensin-2020-02-04.txt"] = "2020-03-24 11:49:31 -0400"
+        part_times["klensin-2018-07-07.txt"] = "2018-07-07 12:40:43PDT"
+        # part_times["response-to-klensin-2018-07-07.txt"]="2018-08-16 10:46:45EDT"
+        part_times["response-to-klensin-2018-07-07.txt"] = "2018-08-16 10:46:45 -0400"
+        part_times["klensin-2017-11-29.txt"] = "2017-11-29 09:35:02 -0500"
+        part_times["response-to-klensin-2017-11-29.md"] = "2017-11-30 11:33:04 -0500"
+        part_times["morfin-2015-03-11.pdf"] = "2015-03-11 18:03:44 -0000"
+        part_times["response-to-morfin-2015-03-11.md"] = "2015-04-16 15:18:09 -0000"
+        part_times["conradi-2014-08-28.txt"] = "2014-08-28 22:28:06 +0300"
+        part_times["masotta-2013-11-14.txt"] = "2013-11-14 15:35:19 +0200"
+        part_times["response-to-masotta-2013-11-14.md"] = "2014-01-27 07:39:32 -0800"
+        part_times["baryun-2013-06-19.txt"] = "2013-06-19 06:29:51PDT"
+        part_times["response-to-baryun-2013-06-19.md"] = "2013-07-02 15:24:42 -0700"
+        part_times["otis-2013-05-30.txt"] = "2013-05-30 19:35:18 +0000"
+        part_times["response-to-otis-2013-05-30.md"] = "2013-06-27 11:56:48 -0700"
+        part_times["morfin-2013-04-05.pdf"] = "2013-04-05 17:31:19 -0700"
+        part_times["response-to-morfin-2013-04-05.md"] = "2013-04-17 08:17:29 -0700"
+        part_times["morfin-2010-03-10.pdf"] = "2010-03-10 21:40:58 +0100"
+        part_times["response-to-morfin-2010-03-10.txt"] = "2010-04-07 14:26:06 -0700"
+        part_times["otis-2009-02-16.txt"] = "2009-02-16 15:47:15 -0800"
+        part_times["anderson-2008-11-14.md"] = "2008-11-14 00:16:58 -0500"
+        part_times["response-to-anderson-2008-11-14.txt"] = "2008-12-15 11:00:02 -0800"
+        part_times["morfin-2008-09-10.txt"] = "2008-09-10 04:10:13 +0200"
+        part_times["response-to-morfin-2008-09-10.txt"] = "2008-09-28 10:00:01PDT"
+        part_times["glassey-2008-07-28.txt"] = "2008-07-28 08:34:52 -0700"
+        part_times["response-to-glassey-2008-07-28.txt"] = "2008-09-02 11:00:01PDT"
+        part_times["klensin-2008-06-13.txt"] = "2008-06-13 21:14:38 -0400"
+        part_times["response-to-klensin-2008-06-13.txt"] = "2008-07-07 10:00:01 PDT"
+        # part_times["anderson-2007-12-26.txt"]="2007-12-26 17:19:34EST"
+        part_times["anderson-2007-12-26.txt"] = "2007-12-26 17:19:34 -0500"
+        part_times["response-to-anderson-2007-12-26.txt"] = "2008-01-15 17:21:05 -0500"
+        part_times["glassey-2007-11-26.txt"] = "2007-11-26 08:13:22 -0800"
+        part_times["response-to-glassey-2007-11-26.txt"] = "2008-01-23 17:38:43 -0500"
+        part_times["gellens-2007-06-22.pdf"] = "2007-06-22 21:45:41 -0400"
+        part_times["response-to-gellens-2007-06-22.txt"] = "2007-09-20 14:01:27 -0400"
+        part_times["morfin-2006-10-24.txt"] = "2006-10-24 05:03:17 +0200"
+        part_times["response-to-morfin-2006-10-24.txt"] = "2006-11-07 12:56:02 -0500"
+        part_times["morfin-2006-09-09.txt"] = "2006-09-09 02:54:55 +0200"
+        part_times["response-to-morfin-2006-09-09.txt"] = "2006-09-15 12:56:31 -0400"
+        part_times["sayre-2006-08-29.txt"] = "2006-08-29 17:05:03 -0400"
+        part_times["response-to-sayre-2006-08-29.txt"] = "2006-10-16 13:07:18 -0400"
+        part_times["morfin-2006-08-16.pdf"] = "2006-08-16 18:28:19 -0400"
+        part_times["response-to-morfin-2006-08-17.txt"] = "2006-08-22 12:05:42 -0400"
+        part_times[
+            "response-to-morfin-2006-08-17-part2.txt"
+        ] = "2006-11-07 13:00:58 -0500"
+        # part_times["anderson-2006-06-13.txt"]="2006-06-13 21:51:18EDT"
+        part_times["anderson-2006-06-13.txt"] = "2006-06-13 21:51:18 -0400"
+        part_times["response-to-anderson-2006-06-14.txt"] = "2006-07-10 14:31:08 -0400"
+        part_times["morfin-2006-05-17.pdf"] = "2006-05-17 06:46:18 +0200"
+        part_times["response-to-morfin-2006-05-17.txt"] = "2006-07-10 14:18:10 -0400"
+        part_times["anderson-2006-03-08.md"] = "2006-03-08 09:42:44 +0100"
+        part_times["response-to-anderson-2006-03-08.txt"] = "2006-03-20 14:55:38 -0500"
+        part_times["morfin-2006-02-20.txt"] = "2006-02-20 19:18:24 +0100"
+        part_times["response-to-morfin-2006-02-20.txt"] = "2006-03-06 13:08:39 -0500"
+        part_times["morfin-2006-02-17.txt"] = "2006-02-17 18:59:38 +0100"
+        part_times["response-to-morfin-2006-02-17.txt"] = "2006-07-10 14:05:15 -0400"
+        part_times["morfin-2006-02-07.txt"] = "2006-02-07 19:38:57 -0500"
+        part_times["response-to-morfin-2006-02-07.txt"] = "2006-02-21 19:09:26 -0500"
+        part_times["morfin-2006-01-14.txt"] = "2006-01-14 15:05:24 +0100"
+        part_times["response-to-morfin-2006-01-14.txt"] = "2006-02-21 12:23:38 -0500"
+        part_times["morfin-2005-10-19.txt"] = "2005-10-19 17:12:11 +0200"
+        part_times["response-to-morfin-2005-10-19.txt"] = "2005-11-15 11:42:30 -0500"
+        part_times["leibzon-2005-08-29.txt"] = "2005-08-29 08:28:52PDT"
+        part_times["response-to-leibzon-2005-08-29.txt"] = "2005-12-08 14:04:47 -0500"
+        part_times["mehnle-2005-08-25.txt"] = "2005-08-25 00:45:26 +0200"
+        part_times["response-to-mehnle-2005-08-25.txt"] = "2005-12-08 13:37:38 -0500"
+        part_times["klensin-2005-06-10.txt"] = "2005-06-10 14:49:17 -0400"
+        part_times["response-to-klensin-2005-06-10.txt"] = "2005-07-22 18:14:06 -0400"
+        part_times["meyer-2003-11-15.txt"] = "2003-11-15 09:47:11 -0800"
+        part_times["response-to-meyer-2003-11-15.txt"] = "2003-11-25 10:56:06 -0500"
+        part_times["glassey-2003-08-06.txt"] = "2003-08-06 02:14:24 +0000"
+        part_times["response-to-glassey-2003-08-06.txt"] = "2003-09-24 09:54:51 -0400"
+        part_times["hain-2003-07-31.txt"] = "2003-07-31 16:44:19 -0700"
+        part_times["response-to-hain-2003-07-31.txt"] = "2003-09-30 14:44:30 -0400"
+        part_times["zorn-2003-01-15.txt"] = "2003-01-15 01:22:28 -0800"
+        part_times["elz-2002-11-05.txt"] = "2002-11-05 10:51:13 +0700"
+        # No time could be found for this one:
+        part_times["response-to-zorn-2003-01-15.txt"] = "2003-02-08"
+        # This one was issued sometime between 2002-12-27 (when IESG minutes note that the
+        # appeal response was approved) and 2003-01-04 (when the appeal was escalated to
+        # the IAB) - we're using the earlier end of the window
+        part_times["response-to-elz-2002-11-05.txt"] = "2002-12-27"
+        for name in part_times:
+            part_times[name] = dateutil.parser.parse(part_times[name]).astimezone(
+                datetime.timezone.utc
+            )
+
+        redirects = []
+        for index, title in enumerate(titles):
+            # IESG is group 2
+            appeal = Appeal.objects.create(
+                name=titles[index], date=dates[index], group_id=2
+            )
+            for part in parts[index]:
+                if part.endswith(".pdf"):
+                    content_type = "application/pdf"
+                else:
+                    content_type = "text/markdown;charset=utf-8"
+                if part.endswith(".md"):
+                    source_path = Path(tmpdir) / "iesg_appeals" / part
+                else:
+                    source_path = Path(old_appeals_root) / part
+                with source_path.open("rb") as source_file:
+                    bits = source_file.read()
+                    if part == "morfin-2008-09-10.txt":
+                        bits = bits.decode("macintosh")
+                        bits = bits.replace("\r", "\n")
+                        bits = bits.encode("utf8")
+                    elif part in ["morfin-2006-02-07.txt", "morfin-2006-01-14.txt"]:
+                        bits = bits.decode("windows-1252").encode("utf8")
+                    artifact_type_id = (
+                        "response" if part.startswith("response") else "appeal"
+                    )
+                    artifact = AppealArtifact.objects.create(
+                        appeal=appeal,
+                        artifact_type_id=artifact_type_id,
+                        date=part_times[part].date(),
+                        content_type=content_type,
+                        bits=bits,
+                    )
+                    redirects.append(
+                        (
+                            part.replace(".md", ".html")
+                            if part.endswith(".md")
+                            else part,
+                            artifact.pk,
+                        )
+                    )
+
+        shutil.rmtree(tmpdir)
+        with open("iesg_appeal_redirects.txt", "w") as f:
+            f.write(str(redirects))
diff --git a/ietf/group/management/commands/import_iesg_statements.py b/ietf/group/management/commands/import_iesg_statements.py
new file mode 100644
index 000000000..523715396
--- /dev/null
+++ b/ietf/group/management/commands/import_iesg_statements.py
@@ -0,0 +1,189 @@
+# Copyright The IETF Trust 2024, All Rights Reserved
+
+import debug  # pyflakes:ignore
+
+import datetime
+import os
+import shutil
+import subprocess
+import tempfile
+
+from collections import namedtuple, Counter
+from pathlib import Path
+
+from django.conf import settings
+from django.core.management.base import BaseCommand
+
+from ietf.doc.models import Document, DocEvent, State
+from ietf.utils.text import xslugify
+
+
+class Command(BaseCommand):
+    help = "Performs a one-time import of IESG statements"
+
+    def handle(self, *args, **options):
+        if Document.objects.filter(type="statement", group__acronym="iesg").exists():
+            self.stdout.write("IESG statement documents already exist - exiting")
+            exit(-1)
+        tmpdir = tempfile.mkdtemp()
+        process = subprocess.Popen(
+            ["git", "clone", "https://github.com/kesara/iesg-scraper.git", tmpdir],
+            stdout=subprocess.PIPE,
+            stderr=subprocess.PIPE,
+        )
+        sub_stdout, sub_stderr = process.communicate()
+        if not Path(tmpdir).joinpath("iesg_statements", "2000-08-29-0.md").exists():
+            self.stdout.write(
+                "Git clone of the iesg-scraper directory did not go as expected"
+            )
+            self.stdout.write("stdout:", sub_stdout)
+            self.stdout.write("stderr:", sub_stderr)
+            self.stdout.write(f"Clean up {tmpdir} manually")
+            exit(-1)
+
+        for item in self.get_work_items():
+            replaced = item.title.endswith(" SUPERSEDED") or item.doc_time.date() == datetime.date(2007,7,30)
+            title = item.title
+            if title.endswith(" - SUPERSEDED"):
+                title = title[: -len(" - SUPERSEDED")]
+            name = f"statement-iesg-{xslugify(title)}-{item.doc_time:%Y%m%d}"
+            dest_filename = f"{name}-00.md"
+            # Create Document
+            doc = Document.objects.create(
+                name=name,
+                type_id="statement",
+                title=title,
+                group_id=2,  # The IESG group
+                rev="00",
+                uploaded_filename=dest_filename,
+            )
+            doc.set_state(
+                State.objects.get(
+                    type_id="statement",
+                    slug="replaced" if replaced else "active",
+                )
+            )
+            e1 = DocEvent.objects.create(
+                time=item.doc_time,
+                type="published_statement",
+                doc=doc,
+                rev="00",
+                by_id=1,  # (System)
+                desc="Statement published (note: The exact time of day is inaccurate - the actual time of day is not known)",
+            )
+            e2 = DocEvent.objects.create(
+                type="added_comment",
+                doc=doc,
+                rev="00",
+                by_id=1,  # (System)
+                desc="Statement moved into datatracker from www.ietf.org",
+            )
+            doc.save_with_history([e1, e2])
+
+            # Put file in place
+            source = Path(tmpdir).joinpath("iesg_statements", item.source_filename)
+            dest = Path(settings.DOCUMENT_PATH_PATTERN.format(doc=doc)).joinpath(
+                dest_filename
+            )
+            if dest.exists():
+                self.stdout.write(
+                    f"WARNING: {dest} already exists - not overwriting it."
+                )
+            else:
+                os.makedirs(dest.parent, exist_ok=True)
+                shutil.copy(source, dest)
+
+        shutil.rmtree(tmpdir)
+
+    def get_work_items(self):
+        Item = namedtuple("Item", "doc_time source_filename title")
+        items = []
+        dressed_rows = " ".join(
+            self.cut_paste_from_www().expandtabs(1).split(" ")
+        ).split("\n")
+        # Rube-Goldberg-esque dance to deal with conflicting directions of the scrape and
+        # what order we want the result to sort to
+        dressed_rows.reverse()
+        total_times_date_seen = Counter([row.split(" ")[0] for row in dressed_rows])
+        count_date_seen_so_far = Counter()
+        for row in dressed_rows:
+            date_part = row.split(" ")[0]
+            title_part = row[len(date_part) + 1 :]
+            datetime_args = list(map(int, date_part.replace("-0", "-").split("-")))
+            # Use the minutes in timestamps to preserve order of statements
+            # on the same day as they currently appear at www.ietf.org
+            datetime_args.extend([12, count_date_seen_so_far[date_part]])
+            count_date_seen_so_far[date_part] += 1
+            doc_time = datetime.datetime(*datetime_args, tzinfo=datetime.timezone.utc)
+            items.append(
+                Item(
+                    doc_time,
+                    f"{date_part}-{total_times_date_seen[date_part] - count_date_seen_so_far[date_part]}.md",
+                    title_part,
+                )
+            )
+        return items
+
+    def cut_paste_from_www(self):
+        return """2023-08-24	Support Documents in IETF Working Groups
+2023-08-14	Guidance on In-Person and Online Interim Meetings
+2023-05-01	IESG Statement on EtherTypes
+2023-03-15	Second Report on the RFC 8989 Experiment
+2023-01-27	Guidance on In-Person and Online Interim Meetings - SUPERSEDED
+2022-10-31	Statement on Restricting Access to IETF IT Systems
+2022-01-21	Handling Ballot Positions
+2021-09-01	Report on the RFC 8989 experiment
+2021-07-21	IESG Statement on Allocation of Email Addresses in the ietf.org Domain
+2021-05-11	IESG Statement on Inclusive Language
+2021-05-10	IESG Statement on Internet-Draft Authorship
+2021-05-07	IESG Processing of RFC Errata for the IETF Stream
+2021-04-16	Last Call Guidance to the Community
+2020-07-23	IESG Statement On Oppressive or Exclusionary Language
+2020-05-01	Guidance on Face-to-Face and Virtual Interim Meetings - SUPERSEDED
+2018-03-16	IETF Meeting Photography Policy
+2018-01-11	Guidance on Face-to-Face and Virtual Interim Meetings - SUPERSEDED
+2017-02-09	License File for Open Source Repositories
+2016-11-13	Support Documents in IETF Working Groups - SUPERSEDED
+2016-02-05	Guidance on Face-to-Face and Virtual Interim Meetings - SUPERSEDED
+2016-01-11	Guidance on Face-to-Face and Virtual Interim Meetings - SUPERSEDED
+2015-08-20	IESG Statement on Maximizing Encrypted Access To IETF Information
+2015-06-11	IESG Statement on Internet-Draft Authorship - SUPERSEDED
+2014-07-20	IESG Statement on Designating RFCs as Historic
+2014-05-07	DISCUSS Criteria in IESG Review
+2014-03-02	Writable MIB Module IESG Statement
+2013-11-03	IETF Anti-Harassment Policy
+2012-10-25	IESG Statement on Ethertypes - SUPERSEDED
+2012-10-25	IESG Statement on Removal of an Internet-Draft from the IETF Web Site
+2011-10-20	IESG Statement on Designating RFCs as Historic - SUPERSEDED
+2011-06-27	IESG Statement on Designating RFCs as Historic - SUPERSEDED
+2011-06-13	IESG Statement on IESG Processing of RFC Errata concerning RFC Metadata
+2010-10-11	IESG Statement on Document Shepherds
+2010-05-24	IESG Statement on the Usage of Assignable Codepoints, Addresses and Names in Specification Examples
+2010-05-24	IESG Statement on NomCom Eligibility and Day Passes
+2009-09-08	IESG Statement on Copyright
+2009-01-20	IESG Statement on Proposed Status for IETF Documents Reserving Resources for Example Purposes
+2008-09-02	Guidance on Interim Meetings, Conference Calls and Jabber Sessions - SUPERSEDED
+2008-07-30	IESG Processing of RFC Errata for the IETF Stream
+2008-04-14	IESG Statement on Spam Control on IETF Mailing Lists
+2008-03-03	IESG Statement on Registration Requests for URIs Containing Telephone Numbers
+2008-02-27	IESG Statement on RFC3406 and URN Namespaces Registry Review
+2008-01-23	Advice for WG Chairs Dealing with Off-Topic Postings
+2007-10-04	On Appeals of IESG and Area Director Actions and Decisions
+2007-07-05	Experimental Specification of New Congestion Control Algorithms
+2007-03-20	Guidance on Area Director Sponsoring of Documents
+2007-01-15	Last Call Guidance to the Community - SUPERSEDED
+2006-04-19	IESG Statement: Normative and Informative References
+2006-02-17	IESG Statement on Disruptive Posting
+2006-01-09	Guidance for Spam Control on IETF Mailing Lists - SUPERSEDED
+2006-01-05	IESG Statement on AUTH48 State
+2005-05-12	Syntax for Format Definitions
+2003-02-11	IESG Statement on IDN
+2002-11-27	Copyright Statement in MIB and PIB Modules
+2002-03-13	Guidance for Spam Control on IETF Mailing Lists - SUPERSEDED
+2001-12-21	On Design Teams
+2001-10-01	Guidelines for the Use of Formal Languages in IETF Specifications
+2001-03-21	Establishment of Temporary Sub-IP Area
+2000-12-06	Plans to Organize "Sub-IP" Technologies in the IETF
+2000-11-20	A New IETF Work Area
+2000-08-29	Guidance on Interim IETF Working Group Meetings and Conference Calls - SUPERSEDED
+2000-08-29	IESG Guidance on the Moderation of IETF Working Group Mailing Lists"""
diff --git a/ietf/group/milestones.py b/ietf/group/milestones.py
index 039fdb44c..52f2eaebe 100644
--- a/ietf/group/milestones.py
+++ b/ietf/group/milestones.py
@@ -399,7 +399,7 @@ def edit_milestones(request, acronym, group_type=None, milestone_set="current"):
                        can_change_uses_milestone_dates=can_change_uses_milestone_dates))
 
 @login_required
-def reset_charter_milestones(request, group_type, acronym):
+def reset_charter_milestones(request, acronym, group_type=None):
     """Reset charter milestones to the currently in-use milestones."""
     group = get_group_or_404(acronym, group_type)
     if not group.features.has_milestones:
diff --git a/ietf/group/tests_info.py b/ietf/group/tests_info.py
index 4c353f110..6ecac7d34 100644
--- a/ietf/group/tests_info.py
+++ b/ietf/group/tests_info.py
@@ -1,4 +1,4 @@
-# Copyright The IETF Trust 2009-2023, All Rights Reserved
+# Copyright The IETF Trust 2009-2024, All Rights Reserved
 # -*- coding: utf-8 -*-
 
 
@@ -2044,8 +2044,17 @@ class AcronymValidationTests(TestCase):
         self.assertTrue(form.is_valid())
         form = AdminGroupForm({'acronym':'shouldfail-','name':'should fail','type':'wg','state':'active','used_roles':'[]','time':now})
         self.assertIn('acronym',form.errors)
+        form = AdminGroupForm({'acronym':'shouldfail-','name':'should fail','type':'sdo','state':'active','used_roles':'[]','time':now})
+        self.assertIn('acronym',form.errors)
         form = AdminGroupForm({'acronym':'-shouldfail','name':'should fail','type':'wg','state':'active','used_roles':'[]','time':now})
         self.assertIn('acronym',form.errors)
+        form = AdminGroupForm({'acronym':'-shouldfail','name':'should fail','type':'sdo','state':'active','used_roles':'[]','time':now})
+        self.assertIn('acronym',form.errors)
+        # SDO groups (and only SDO groups) can have a leading number
+        form = AdminGroupForm({'acronym':'3gpp-should-pass','name':'should pass','type':'sdo','state':'active','used_roles':'[]','time':now})
+        self.assertTrue(form.is_valid())
+        form = AdminGroupForm({'acronym':'123shouldfail','name':'should fail','type':'wg','state':'active','used_roles':'[]','time':now})
+        self.assertIn('acronym',form.errors)
 
         wg = GroupFactory(acronym='bad-idea', type_id='wg') # There are some existing wg and programs with hyphens in their acronyms.
         form = AdminGroupForm({'acronym':wg.acronym,'name':wg.name,'type':wg.type_id,'state':wg.state_id,'used_roles':str(wg.used_roles),'time':now},instance=wg)
diff --git a/ietf/group/views.py b/ietf/group/views.py
index 698963678..374cf80df 100644
--- a/ietf/group/views.py
+++ b/ietf/group/views.py
@@ -43,12 +43,14 @@ import re
 import json
 
 from collections import OrderedDict, defaultdict
+import types
 from simple_history.utils import update_change_reason
 
 from django import forms
 from django.conf import settings
 from django.contrib.auth.decorators import login_required
-from django.db.models import Q, Count, OuterRef, Subquery
+from django.db.models import Count, F, OuterRef, Prefetch, Q, Subquery, TextField, Value
+from django.db.models.functions import Coalesce
 from django.http import HttpResponse, HttpResponseRedirect, Http404, JsonResponse
 from django.shortcuts import render, redirect, get_object_or_404
 from django.template.loader import render_to_string
@@ -83,6 +85,7 @@ from ietf.group.utils import (get_charter_text, can_manage_all_groups_of_type,
 from ietf.ietfauth.utils import has_role, is_authorized_in_group
 from ietf.mailtrigger.utils import gather_relevant_expansions
 from ietf.meeting.helpers import get_meeting
+from ietf.meeting.models import ImportantDate, SchedTimeSessAssignment, SchedulingEvent
 from ietf.meeting.utils import group_sessions
 from ietf.name.models import GroupTypeName, StreamName
 from ietf.person.models import Email, Person
@@ -116,6 +119,7 @@ from ietf.dbtemplate.models import DBTemplate
 from ietf.mailtrigger.utils import gather_address_lists
 from ietf.mailtrigger.models import Recipient
 from ietf.settings import MAILING_LIST_INFO_URL
+from ietf.utils.decorators import ignore_view_kwargs
 from ietf.utils.response import permission_denied
 from ietf.utils.text import strip_suffix
 from ietf.utils import markdown
@@ -834,21 +838,70 @@ def meetings(request, acronym, group_type=None):
 
     four_years_ago = timezone.now() - datetime.timedelta(days=4 * 365)
 
-    sessions = (
-        group.session_set.with_current_status()
-        .filter(
-            meeting__date__gt=four_years_ago
-            if group.acronym != "iab"
-            else datetime.date(1970, 1, 1),
-            type__in=["regular", "plenary", "other"],
-        )
-        .filter(
-            current_status__in=["sched", "schedw", "appr", "canceled"],
+    stsas = SchedTimeSessAssignment.objects.filter(
+        session__type__in=["regular", "plenary", "other"],
+        session__group=group)
+    if group.acronym not in ["iab", "iesg"]:
+        stsas = stsas.filter(session__meeting__date__gt=four_years_ago)
+    stsas = stsas.annotate(sessionstatus=Coalesce(
+                Subquery(
+                    SchedulingEvent.objects.filter(
+                        session=OuterRef("session__pk")
+                    ).order_by(
+                        '-time', '-id'
+                    ).values('status')[:1]),
+                Value(''), 
+                output_field=TextField())
+    ).filter(
+        sessionstatus__in=["sched", "schedw", "appr", "canceled"],
+        session__meeting__schedule=F("schedule")
+    ).distinct().select_related(
+        "session", "session__group", "session__group__parent", "session__meeting__type", "timeslot"
+    ).prefetch_related(
+        "session__materials",
+        "session__materials__states",
+        Prefetch("session__materials",
+            queryset=Document.objects.exclude(states__type=F("type"), states__slug='deleted').order_by('presentations__order').prefetch_related('states'),
+            to_attr="prefetched_active_materials"
+        ),
+    )
+
+    stsas = list(stsas)
+
+    for stsa in stsas:
+        stsa.session._otsa = stsa
+        stsa.session.official_timeslotassignment = types.MethodType(lambda self:self._otsa, stsa.session)
+        stsa.session.current_status = stsa.sessionstatus
+
+    sessions = sorted(
+        set([stsa.session for stsa in stsas]),
+        key=lambda x: (
+            x._otsa.timeslot.time,
+            x._otsa.timeslot.type_id,
+            x._otsa.session.group.parent.name if x._otsa.session.group.parent else None,
+            x._otsa.session.name
         )
     )
-    sessions = list(sessions)
+
+    meeting_seen = None
+    for s in sessions:
+        if s.meeting != meeting_seen:
+            meeting_seen = s.meeting
+            order = 1
+        s._oim = order
+        s.order_in_meeting = types.MethodType(lambda self:self._oim, s)
+        order += 1
+
+
+    revsub_dates_by_meeting = dict(ImportantDate.objects.filter(name_id="revsub", meeting__session__in=sessions).distinct().values_list("meeting_id","date"))
+
     for s in sessions:
         s.order_number = s.order_in_meeting()
+        if s.meeting.pk in revsub_dates_by_meeting:
+            cutoff_date = revsub_dates_by_meeting[s.meeting.pk]
+        else:
+            cutoff_date = s.meeting.date + datetime.timedelta(days=s.meeting.submission_correction_day_offset)
+        s.cached_is_cutoff = date_today(datetime.timezone.utc) > cutoff_date
 
     future, in_progress, recent, past = group_sessions(sessions)
 
@@ -856,7 +909,7 @@ def meetings(request, acronym, group_type=None):
     can_always_edit = has_role(request.user, ["Secretariat", "Area Director"])
 
     far_past = []
-    if group.acronym == "iab":
+    if group.acronym in ["iab", "iesg"]:
         recent_past = []
         for s in past:
             if s.time >= four_years_ago:
@@ -1346,16 +1399,36 @@ def stream_edit(request, acronym):
                 )
 
 
-@cache_control(public=True, max_age=30*60)
+@cache_control(public=True, max_age=30 * 60)
 @cache_page(30 * 60)
 def group_menu_data(request):
-    groups = Group.objects.filter(state="active", parent__state="active").filter(Q(type__features__acts_like_wg=True)|Q(type_id__in=['program','iabasg','iabworkshop'])|Q(parent__acronym='ietfadminllc')|Q(parent__acronym='rfceditor')).order_by("-type_id","acronym")
+    groups = (
+        Group.objects.filter(state="active", parent__state="active")
+        .filter(
+            Q(type__features__acts_like_wg=True)
+            | Q(type_id__in=["program", "iabasg", "iabworkshop"])
+            | Q(parent__acronym="ietfadminllc")
+            | Q(parent__acronym="rfceditor")
+        )
+        .order_by("-type_id", "acronym")
+        .select_related("type")
+    )
 
     groups_by_parent = defaultdict(list)
     for g in groups:
-        url = urlreverse("ietf.group.views.group_home", kwargs={ 'group_type': g.type_id, 'acronym': g.acronym })
-#        groups_by_parent[g.parent_id].append({ 'acronym': g.acronym, 'name': escape(g.name), 'url': url })
-        groups_by_parent[g.parent_id].append({ 'acronym': g.acronym, 'name': escape(g.name), 'type': escape(g.type.verbose_name or g.type.name), 'url': url })
+        url = urlreverse(
+            "ietf.group.views.group_home",
+            kwargs={"group_type": g.type_id, "acronym": g.acronym},
+        )
+        #        groups_by_parent[g.parent_id].append({ 'acronym': g.acronym, 'name': escape(g.name), 'url': url })
+        groups_by_parent[g.parent_id].append(
+            {
+                "acronym": g.acronym,
+                "name": escape(g.name),
+                "type": escape(g.type.verbose_name or g.type.name),
+                "url": url,
+            }
+        )
 
     iab = Group.objects.get(acronym="iab")
     groups_by_parent[iab.pk].insert(
@@ -1364,12 +1437,15 @@ def group_menu_data(request):
             "acronym": iab.acronym,
             "name": iab.name,
             "type": "Top Level Group",
-            "url": urlreverse("ietf.group.views.group_home", kwargs={"acronym": iab.acronym})
-        }
+            "url": urlreverse(
+                "ietf.group.views.group_home", kwargs={"acronym": iab.acronym}
+            ),
+        },
     )
     return JsonResponse(groups_by_parent)
 
 
+
 @cache_control(public=True, max_age=30 * 60)
 @cache_page(30 * 60)
 def group_stats_data(request, years="3", only_active=True):
@@ -2115,14 +2191,25 @@ def statements(request, acronym, group_type=None):
     if not acronym in ["iab", "iesg"]:
         raise Http404
     group = get_group_or_404(acronym, group_type)
-    statements = group.document_set.filter(type_id="statement").annotate(
-        published=Subquery(
-            DocEvent.objects.filter(
-                doc=OuterRef("pk"),
-                type="published_statement"
-            ).order_by("-time").values("time")[:1]
+    statements = (
+        group.document_set.filter(type_id="statement")
+        .annotate(
+            published=Subquery(
+                DocEvent.objects.filter(doc=OuterRef("pk"), type="published_statement")
+                .order_by("-time")
+                .values("time")[:1]
+            )
         )
-    ).order_by("-published")
+        .annotate(
+            status=Subquery(
+                Document.states.through.objects.filter(
+                    document_id=OuterRef("pk"), state__type="statement"
+                ).values_list("state__slug", flat=True)[:1]
+            )
+        )
+        .order_by("-published")
+    )
+    debug.show("statements.first().status")
     return render(
         request,
         "group/statements.html",
@@ -2158,7 +2245,8 @@ def appeals(request, acronym, group_type=None):
         ),
     )
 
-def appeal_artifact(request, acronym, artifact_id, group_type=None):
+@ignore_view_kwargs("group_type")
+def appeal_artifact(request, acronym, artifact_id):
     artifact = get_object_or_404(AppealArtifact, pk=artifact_id)
     if artifact.is_markdown():
         artifact_html = markdown.markdown(artifact.bits.tobytes().decode("utf-8"))
@@ -2177,7 +2265,8 @@ def appeal_artifact(request, acronym, artifact_id, group_type=None):
         )
     
 @role_required("Secretariat")
-def appeal_artifact_markdown(request, acronym, artifact_id, group_type=None):
+@ignore_view_kwargs("group_type")
+def appeal_artifact_markdown(request, acronym, artifact_id):
     artifact = get_object_or_404(AppealArtifact, pk=artifact_id)
     if artifact.is_markdown():
         return HttpResponse(artifact.bits, content_type=artifact.content_type)
diff --git a/ietf/meeting/forms.py b/ietf/meeting/forms.py
index ef6a2721e..164f0fd3b 100644
--- a/ietf/meeting/forms.py
+++ b/ietf/meeting/forms.py
@@ -341,7 +341,7 @@ class InterimSessionModelForm(forms.ModelForm):
                 # FIXME: What about agendas in html or markdown format?
                 uploaded_filename='{}-00.txt'.format(filename))
             doc.set_state(State.objects.get(type__slug=doc.type.slug, slug='active'))
-            self.instance.sessionpresentation_set.create(document=doc, rev=doc.rev)
+            self.instance.presentations.create(document=doc, rev=doc.rev)
             NewRevisionDocEvent.objects.create(
                 type='new_revision',
                 by=self.user.person,
@@ -380,7 +380,8 @@ class InterimAnnounceForm(forms.ModelForm):
 class InterimCancelForm(forms.Form):
     group = forms.CharField(max_length=255, required=False)
     date = forms.DateField(required=False)
-    comments = forms.CharField(required=False, widget=forms.Textarea(attrs={'placeholder': 'enter optional comments here'}), strip=False)
+    # max_length must match Session.agenda_note
+    comments = forms.CharField(max_length=512, required=False, widget=forms.Textarea(attrs={'placeholder': 'enter optional comments here'}), strip=False)
 
     def __init__(self, *args, **kwargs):
         super(InterimCancelForm, self).__init__(*args, **kwargs)
diff --git a/ietf/meeting/helpers.py b/ietf/meeting/helpers.py
index 14478787f..c0e250cdc 100644
--- a/ietf/meeting/helpers.py
+++ b/ietf/meeting/helpers.py
@@ -104,7 +104,7 @@ def preprocess_assignments_for_agenda(assignments_queryset, meeting, extra_prefe
                 queryset=add_event_info_to_session_qs(Session.objects.all().prefetch_related(
                     'group', 'group__charter', 'group__charter__group',
                     Prefetch('materials',
-                             queryset=Document.objects.exclude(states__type=F("type"), states__slug='deleted').order_by('sessionpresentation__order').prefetch_related('states'),
+                             queryset=Document.objects.exclude(states__type=F("type"), states__slug='deleted').order_by('presentations__order').prefetch_related('states'),
                              to_attr='prefetched_active_materials'
                     )
                 ))
@@ -890,7 +890,7 @@ def make_materials_directories(meeting):
     # was merged with the regular datatracker code; then in secr/proceedings/views.py
     # in make_directories())
     saved_umask = os.umask(0)   
-    for leaf in ('slides','agenda','minutes','id','rfc','bluesheets'):
+    for leaf in ('slides','agenda','minutes', 'narrativeminutes', 'id','rfc','bluesheets'):
         target = os.path.join(path,leaf)
         if not os.path.exists(target):
             os.makedirs(target)
diff --git a/ietf/meeting/management/commands/import_iesg_minutes.py b/ietf/meeting/management/commands/import_iesg_minutes.py
new file mode 100644
index 000000000..92abbe92d
--- /dev/null
+++ b/ietf/meeting/management/commands/import_iesg_minutes.py
@@ -0,0 +1,343 @@
+# Copyright The IETF Trust 2023, All Rights Reserved
+
+from collections import namedtuple
+import datetime
+import os
+import re
+import shutil
+
+from django.conf import settings
+from django.core.management import BaseCommand
+
+from pathlib import Path
+from zoneinfo import ZoneInfo
+from ietf.doc.models import DocEvent, Document
+
+from ietf.meeting.models import (
+    Meeting,
+    SchedTimeSessAssignment,
+    Schedule,
+    SchedulingEvent,
+    Session,
+    TimeSlot,
+)
+from ietf.name.models import DocTypeName
+
+
+def add_time_of_day(bare_datetime):
+    """Add a time for the iesg meeting based on a date and make it tzaware
+
+    From the secretariat - the telechats happened at these times:
+    2015-04-09 to present: 0700 PT America/Los Angeles
+    1993-02-01 to 2015-03-12: 1130 ET America/New York
+    1991-07-30 to 1993-01-25: 1200 ET America/New York
+    """
+    dt = None
+    if bare_datetime.year > 2015:
+        dt = bare_datetime.replace(hour=7).replace(
+            tzinfo=ZoneInfo("America/Los_Angeles")
+        )
+    elif bare_datetime.year == 2015:
+        if bare_datetime.month >= 4:
+            dt = bare_datetime.replace(hour=7).replace(
+                tzinfo=ZoneInfo("America/Los_Angeles")
+            )
+        else:
+            dt = bare_datetime.replace(hour=11, minute=30).replace(
+                tzinfo=ZoneInfo("America/New_York")
+            )
+    elif bare_datetime.year > 1993:
+        dt = bare_datetime.replace(hour=11, minute=30).replace(
+            tzinfo=ZoneInfo("America/New_York")
+        )
+    elif bare_datetime.year == 1993:
+        if bare_datetime.month >= 2:
+            dt = bare_datetime.replace(hour=11, minute=30).replace(
+                tzinfo=ZoneInfo("America/New_York")
+            )
+        else:
+            dt = bare_datetime.replace(hour=12).replace(
+                tzinfo=ZoneInfo("America/New_York")
+            )
+    else:
+        dt = bare_datetime.replace(hour=12).replace(tzinfo=ZoneInfo("America/New_York"))
+
+    return dt.astimezone(datetime.timezone.utc)
+
+
+def build_bof_coord_data():
+    CoordTuple = namedtuple("CoordTuple", "meeting_number source_name")
+
+    def utc_from_la_time(time):
+        return time.replace(tzinfo=ZoneInfo("America/Los_Angeles")).astimezone(
+            datetime.timezone.utc
+        )
+
+    data = dict()
+    data[utc_from_la_time(datetime.datetime(2016, 6, 10, 7, 0))] = CoordTuple(
+        96, "2015/bof-minutes-ietf-96.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2016, 10, 6, 7, 0))] = CoordTuple(
+        97, "2016/BoF-Minutes-2016-10-06.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2017, 2, 15, 8, 0))] = CoordTuple(
+        98, "2017/bof-minutes-ietf-98.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2017, 6, 7, 8, 0))] = CoordTuple(
+        99, "2017/bof-minutes-ietf-99.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2017, 10, 5, 7, 0))] = CoordTuple(
+        100, "2017/bof-minutes-ietf-100.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2018, 2, 5, 11, 0))] = CoordTuple(
+        101, "2018/bof-minutes-ietf-101.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2018, 6, 5, 8, 0))] = CoordTuple(
+        102, "2018/bof-minutes-ietf-102.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2018, 9, 26, 7, 0))] = CoordTuple(
+        103, "2018/bof-minutes-ietf-103.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2019, 2, 15, 9, 0))] = CoordTuple(
+        104, "2019/bof-minutes-ietf-104.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2019, 6, 11, 7, 30))] = CoordTuple(
+        105, "2019/bof-minutes-ietf-105.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2019, 10, 9, 6, 30))] = CoordTuple(
+        106, "2019/bof-minutes-ietf-106.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2020, 2, 13, 8, 0))] = CoordTuple(
+        107, "2020/bof-minutes-ietf-107.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2020, 6, 15, 8, 0))] = CoordTuple(
+        108, "2020/bof-minutes-ietf-108.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2020, 10, 9, 7, 0))] = CoordTuple(
+        109, "2020/bof-minutes-ietf-109.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2021, 1, 14, 13, 30))] = CoordTuple(
+        110, "2021/bof-minutes-ietf-110.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2021, 6, 1, 8, 0))] = CoordTuple(
+        111, "2021/bof-minutes-ietf-111.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2021, 9, 15, 9, 0))] = CoordTuple(
+        112, "2021/bof-minutes-ietf-112.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2022, 1, 28, 7, 0))] = CoordTuple(
+        113, "2022/bof-minutes-ietf-113.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2022, 6, 2, 10, 0))] = CoordTuple(
+        114, "2022/bof-minutes-ietf-114.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2022, 9, 13, 9, 0))] = CoordTuple(
+        115, "2022/bof-minutes-ietf-115.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2023, 2, 1, 9, 0))] = CoordTuple(
+        116, "2023/bof-minutes-ietf-116.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2023, 6, 1, 7, 0))] = CoordTuple(
+        117, "2023/bof-minutes-ietf-117.txt"
+    )
+    data[utc_from_la_time(datetime.datetime(2023, 9, 15, 8, 0))] = CoordTuple(
+        118, "2023/bof-minutes-ietf-118.txt"
+    )
+    return data
+
+
+class Command(BaseCommand):
+    help = "Performs a one-time import of IESG minutes, creating Meetings to attach them to"
+
+    def handle(self, *args, **options):
+        old_minutes_root = (
+            "/a/www/www6/iesg/minutes"
+            if settings.SERVER_MODE == "production"
+            else "/assets/www6/iesg/minutes"
+        )
+        minutes_dir = Path(old_minutes_root)
+        date_re = re.compile(r"\d{4}-\d{2}-\d{2}")
+        meeting_times = set()
+        for file_prefix in ["minutes", "narrative"]:
+            paths = list(minutes_dir.glob(f"[12][09][0129][0-9]/{file_prefix}*.txt"))
+            paths.extend(
+                list(minutes_dir.glob(f"[12][09][0129][0-9]/{file_prefix}*.html"))
+            )
+            for path in paths:
+                s = date_re.search(path.name)
+                if s:
+                    meeting_times.add(
+                        add_time_of_day(
+                            datetime.datetime.strptime(s.group(), "%Y-%m-%d")
+                        )
+                    )
+        bof_coord_data = build_bof_coord_data()
+        bof_times = set(bof_coord_data.keys())
+        assert len(bof_times.intersection(meeting_times)) == 0
+        meeting_times.update(bof_times)
+        year_seen = None
+        for dt in sorted(meeting_times):
+            if dt.year != year_seen:
+                counter = 1
+                year_seen = dt.year
+            meeting_name = f"interim-{dt.year}-iesg-{counter:02d}"
+            meeting = Meeting.objects.create(
+                number=meeting_name,
+                type_id="interim",
+                date=dt.date(),
+                days=1,
+                time_zone=dt.tzname(),
+            )
+            schedule = Schedule.objects.create(
+                meeting=meeting,
+                owner_id=1,  # the "(System)" person
+                visible=True,
+                public=True,
+            )
+            meeting.schedule = schedule
+            meeting.save()
+            session = Session.objects.create(
+                meeting=meeting,
+                group_id=2,  # The IESG group
+                type_id="regular",
+                purpose_id="regular",
+                name=(
+                    f"IETF {bof_coord_data[dt].meeting_number} BOF Coordination Call"
+                    if dt in bof_times
+                    else "Formal Telechat"
+                ),
+            )
+            SchedulingEvent.objects.create(
+                session=session,
+                status_id="sched",
+                by_id=1,  # (System)
+            )
+            timeslot = TimeSlot.objects.create(
+                meeting=meeting,
+                type_id="regular",
+                time=dt,
+                duration=datetime.timedelta(seconds=2 * 60 * 60),
+            )
+            SchedTimeSessAssignment.objects.create(
+                timeslot=timeslot, session=session, schedule=schedule
+            )
+
+            if dt in bof_times:
+                source = minutes_dir / bof_coord_data[dt].source_name
+                if source.exists():
+                    doc_name = (
+                        f"minutes-interim-{dt.year}-iesg-{counter:02d}-{dt:%Y%m%d%H%M}"
+                    )
+                    doc_filename = f"{doc_name}-00.txt"
+                    doc = Document.objects.create(
+                        name=doc_name,
+                        type_id="minutes",
+                        title=f"Minutes IETF {bof_coord_data[dt].meeting_number} BOF coordination {meeting_name} {dt:%Y-%m-%d %H:%M}",
+                        group_id=2,  # the IESG group
+                        rev="00",
+                        uploaded_filename=doc_filename,
+                    )
+                    e = DocEvent.objects.create(
+                        type="comment",
+                        doc=doc,
+                        rev="00",
+                        by_id=1,  # "(System)"
+                        desc="Minutes moved into datatracker",
+                    )
+                    doc.save_with_history([e])
+                    session.presentations.create(document=doc, rev=doc.rev)
+                    dest = (
+                        Path(settings.AGENDA_PATH)
+                        / meeting_name
+                        / "minutes"
+                        / doc_filename
+                    )
+                    if dest.exists():
+                        self.stdout.write(
+                            f"WARNING: {dest} already exists - not overwriting it."
+                        )
+                    else:
+                        os.makedirs(dest.parent, exist_ok=True)
+                        shutil.copy(source, dest)
+            else:
+                for type_id in ["minutes", "narrativeminutes"]:
+                    source_file_prefix = (
+                        "minutes" if type_id == "minutes" else "narrative-minutes"
+                    )
+                    txt_source = (
+                        minutes_dir
+                        / f"{dt.year}"
+                        / f"{source_file_prefix}-{dt:%Y-%m-%d}.txt"
+                    )
+                    html_source = (
+                        minutes_dir
+                        / f"{dt.year}"
+                        / f"{source_file_prefix}-{dt:%Y-%m-%d}.html"
+                    )
+                    if txt_source.exists() and html_source.exists():
+                        self.stdout.write(
+                            f"WARNING: Both {txt_source} and {html_source} exist."
+                        )
+                    if txt_source.exists() or html_source.exists():
+                        prefix = DocTypeName.objects.get(slug=type_id).prefix
+                        doc_name = f"{prefix}-interim-{dt.year}-iesg-{counter:02d}-{dt:%Y%m%d%H%M}"
+                        suffix = "html" if html_source.exists() else "txt"
+                        doc_filename = f"{doc_name}-00.{suffix}"
+                        verbose_type = (
+                            "Minutes" if type_id == "minutes" else "Narrative Minutes"
+                        )
+                        doc = Document.objects.create(
+                            name=doc_name,
+                            type_id=type_id,
+                            title=f"{verbose_type} {meeting_name} {dt:%Y-%m-%d %H:%M}",
+                            group_id=2,  # the IESG group
+                            rev="00",
+                            uploaded_filename=doc_filename,
+                        )
+                        e = DocEvent.objects.create(
+                            type="comment",
+                            doc=doc,
+                            rev="00",
+                            by_id=1,  # "(System)"
+                            desc=f"{verbose_type} moved into datatracker",
+                        )
+                        doc.save_with_history([e])
+                        session.presentations.create(document=doc, rev=doc.rev)
+                        dest = (
+                            Path(settings.AGENDA_PATH)
+                            / meeting_name
+                            / type_id
+                            / doc_filename
+                        )
+                        if dest.exists():
+                            self.stdout.write(
+                                f"WARNING: {dest} already exists - not overwriting it."
+                            )
+                        else:
+                            os.makedirs(dest.parent, exist_ok=True)
+                            if html_source.exists():
+                                html_content = html_source.read_text(encoding="utf-8")
+                                html_content = html_content.replace(
+                                    f'href="IESGnarrative-{dt:%Y-%m-%d}.html#',
+                                    'href="#',
+                                )
+                                html_content = re.sub(
+                                    r'<a href="file:///[^"]*"><span[^>]*>([^<]*)</span></a>',
+                                    r"\1",
+                                    html_content,
+                                )
+                                html_content = re.sub(
+                                    r'<a href="file:///[^"]*">([^<]*)</a>',
+                                    r"\1",
+                                    html_content,
+                                )
+                                html_content = re.sub(
+                                    '<a href="http://validator.w3.org/[^>]*> *<img[^>]*></a>',
+                                    "",
+                                    html_content
+                                )
+                                dest.write_text(html_content, encoding="utf-8")
+                            else:
+                                shutil.copy(txt_source, dest)
+
+            counter += 1
diff --git a/ietf/meeting/migrations/0005_alter_session_agenda_note.py b/ietf/meeting/migrations/0005_alter_session_agenda_note.py
new file mode 100644
index 000000000..59daeea45
--- /dev/null
+++ b/ietf/meeting/migrations/0005_alter_session_agenda_note.py
@@ -0,0 +1,18 @@
+# Copyright The IETF Trust 2024, All Rights Reserved
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ("meeting", "0004_session_chat_room"),
+    ]
+
+    operations = [
+        migrations.AlterField(
+            model_name="session",
+            name="agenda_note",
+            field=models.CharField(blank=True, max_length=512),
+        ),
+    ]
diff --git a/ietf/meeting/migrations/0006_alter_sessionpresentation_document_and_session.py b/ietf/meeting/migrations/0006_alter_sessionpresentation_document_and_session.py
new file mode 100644
index 000000000..e8d6a663f
--- /dev/null
+++ b/ietf/meeting/migrations/0006_alter_sessionpresentation_document_and_session.py
@@ -0,0 +1,33 @@
+# Copyright The IETF Trust 2024, All Rights Reserved
+
+from django.db import migrations
+import django.db.models.deletion
+import ietf.utils.models
+
+
+class Migration(migrations.Migration):
+    dependencies = [
+        ("doc", "0021_narrativeminutes"),
+        ("meeting", "0005_alter_session_agenda_note"),
+    ]
+
+    operations = [
+        migrations.AlterField(
+            model_name="sessionpresentation",
+            name="document",
+            field=ietf.utils.models.ForeignKey(
+                on_delete=django.db.models.deletion.CASCADE,
+                related_name="presentations",
+                to="doc.document",
+            ),
+        ),
+        migrations.AlterField(
+            model_name="sessionpresentation",
+            name="session",
+            field=ietf.utils.models.ForeignKey(
+                on_delete=django.db.models.deletion.CASCADE,
+                related_name="presentations",
+                to="meeting.session",
+            ),
+        ),
+    ]
diff --git a/ietf/meeting/models.py b/ietf/meeting/models.py
index de9613de1..4689495e0 100644
--- a/ietf/meeting/models.py
+++ b/ietf/meeting/models.py
@@ -1,4 +1,4 @@
-# Copyright The IETF Trust 2007-2022, All Rights Reserved
+# Copyright The IETF Trust 2007-2024, All Rights Reserved
 # -*- coding: utf-8 -*-
 
 
@@ -451,8 +451,9 @@ class Room(models.Model):
     # end floorplan-related stuff
 
     def __str__(self):
-        return u"%s size: %s" % (self.name, self.capacity)
-
+        if len(self.functional_name) > 0 and self.functional_name != self.name:
+            return f"{self.name} [{self.functional_name}] (size: {self.capacity})"    
+        return f"{self.name} (size: {self.capacity})"    
 
     def dom_id(self):
         return "room%u" % (self.pk)
@@ -904,8 +905,8 @@ class Constraint(models.Model):
 
 
 class SessionPresentation(models.Model):
-    session = ForeignKey('Session')
-    document = ForeignKey(Document)
+    session = ForeignKey('Session', related_name="presentations")
+    document = ForeignKey(Document, related_name="presentations")
     rev = models.CharField(verbose_name="revision", max_length=16, null=True, blank=True)
     order = models.PositiveSmallIntegerField(default=0)
 
@@ -1017,7 +1018,7 @@ class Session(models.Model):
     group = ForeignKey(Group)    # The group type historically determined the session type.  BOFs also need to be added as a group. Note that not all meeting requests have a natural group to associate with.
     joint_with_groups = models.ManyToManyField(Group, related_name='sessions_joint_in',blank=True)
     attendees = models.IntegerField(null=True, blank=True)
-    agenda_note = models.CharField(blank=True, max_length=255)
+    agenda_note = models.CharField(blank=True, max_length=512)
     requested_duration = models.DurationField(default=datetime.timedelta(0))
     comments = models.TextField(blank=True)
     scheduled = models.DateTimeField(null=True, blank=True)
@@ -1044,7 +1045,7 @@ class Session(models.Model):
             for d in l:
                 d.meeting_related = lambda: True
         else:
-            l = self.materials.filter(type=material_type).exclude(states__type=material_type, states__slug='deleted').order_by('sessionpresentation__order')
+            l = self.materials.filter(type=material_type).exclude(states__type=material_type, states__slug='deleted').order_by('presentations__order')
 
         if only_one:
             if l:
@@ -1064,16 +1065,25 @@ class Session(models.Model):
             self._cached_minutes = self.get_material("minutes", only_one=True)
         return self._cached_minutes
 
+    def narrative_minutes(self):
+        if not hasattr(self, '_cached_narrative_minutes'):
+            self._cached_minutes = self.get_material("narrativeminutes", only_one=True)
+        return self._cached_minutes
+
     def recordings(self):
         return list(self.get_material("recording", only_one=False))
 
     def bluesheets(self):
         return list(self.get_material("bluesheets", only_one=False))
 
+    def chatlogs(self):
+        return list(self.get_material("chatlog", only_one=False))
+
     def slides(self):
         if not hasattr(self, "_slides_cache"):
             self._slides_cache = list(self.get_material("slides", only_one=False))
         return self._slides_cache
+    
 
     def drafts(self):
         return list(self.materials.filter(type='draft'))
@@ -1137,6 +1147,7 @@ class Session(models.Model):
         return can_manage_materials(user,self.group)
 
     def is_material_submission_cutoff(self):
+        debug.say("is_material_submission_cutoff got called")
         return date_today(datetime.timezone.utc) > self.meeting.get_submission_correction_date()
     
     def joint_with_groups_acronyms(self):
@@ -1237,10 +1248,21 @@ class Session(models.Model):
         return settings.CHAT_URL_PATTERN.format(chat_room_name=self.chat_room_name())
 
     def chat_archive_url(self):
-        chatlog = self.sessionpresentation_set.filter(document__type__slug='chatlog').first()
-        if chatlog is not None:
-            return chatlog.document.get_href()
-        elif self.meeting.date <= datetime.date(2022, 7, 15):
+
+        if hasattr(self,"prefetched_active_materials"):
+            chatlog_doc = None
+            for doc in self.prefetched_active_materials:
+                if doc.type_id=="chatlog":
+                    chatlog_doc = doc
+                    break
+            if chatlog_doc is not None:
+                return chatlog_doc.get_href()
+        else:
+            chatlog = self.presentations.filter(document__type__slug='chatlog').first()
+            if chatlog is not None:
+                return chatlog.document.get_href()
+            
+        if self.meeting.date <= datetime.date(2022, 7, 15):
             # datatracker 8.8.0 released on 2022 July 15; before that, fall back to old log URL
             return f'https://www.ietf.org/jabber/logs/{ self.chat_room_name() }?C=M;O=D'
         elif hasattr(settings,'CHAT_ARCHIVE_URL_PATTERN'):
@@ -1267,29 +1289,27 @@ class Session(models.Model):
             return self.meeting.group_at_the_time(self.group_at_the_time().parent)
 
     def audio_stream_url(self):
-        if (
-            self.meeting.type.slug == "ietf"
-            and self.has_onsite_tool
-            and (url := getattr(settings, "MEETECHO_AUDIO_STREAM_URL", ""))
-        ):
+        url = getattr(settings, "MEETECHO_AUDIO_STREAM_URL", "")
+        if self.meeting.type.slug == "ietf" and self.has_onsite_tool and url:
             return url.format(session=self)
         return None
 
     def video_stream_url(self):
-        if (
-            self.meeting.type.slug == "ietf"
-            and self.has_onsite_tool
-            and (url := getattr(settings, "MEETECHO_VIDEO_STREAM_URL", ""))
-        ):
+        url = getattr(settings, "MEETECHO_VIDEO_STREAM_URL", "")
+        if self.meeting.type.slug == "ietf" and self.has_onsite_tool and url:
             return url.format(session=self)
         return None
 
     def onsite_tool_url(self):
-        if (
-            self.meeting.type.slug == "ietf"
-            and self.has_onsite_tool
-            and (url := getattr(settings, "MEETECHO_ONSITE_TOOL_URL", ""))
-        ):
+        url = getattr(settings, "MEETECHO_ONSITE_TOOL_URL", "")
+        if self.meeting.type.slug == "ietf" and self.has_onsite_tool and url:
+            return url.format(session=self)
+        return None
+
+    def session_recording_url(self):
+        url = getattr(settings, "MEETECHO_SESSION_RECORDING_URL", "")
+        if self.meeting.type.slug == "ietf" and self.has_onsite_tool and url:
+            self.group.acronym_upper = self.group.acronym.upper()
             return url.format(session=self)
         return None
 
diff --git a/ietf/meeting/templatetags/proceedings_filters.py b/ietf/meeting/templatetags/proceedings_filters.py
index f5fe0e1f1..a2a4932e7 100644
--- a/ietf/meeting/templatetags/proceedings_filters.py
+++ b/ietf/meeting/templatetags/proceedings_filters.py
@@ -11,7 +11,7 @@ def hack_recording_title(recording,add_timestamp=False):
    if recording.title.startswith('Audio recording for') or recording.title.startswith('Video recording for'):
        hacked_title = recording.title[:15]
        if add_timestamp:
-           hacked_title += ' '+recording.sessionpresentation_set.first().session.official_timeslotassignment().timeslot.time.strftime("%a %H:%M")
+           hacked_title += ' '+recording.presentations.first().session.official_timeslotassignment().timeslot.time.strftime("%a %H:%M")
        return hacked_title
    else:
        return recording.title
diff --git a/ietf/meeting/templatetags/session_filters.py b/ietf/meeting/templatetags/session_filters.py
index 4fe377a81..3846dab49 100644
--- a/ietf/meeting/templatetags/session_filters.py
+++ b/ietf/meeting/templatetags/session_filters.py
@@ -8,7 +8,7 @@ register = template.Library()
 
 @register.filter
 def presented_versions(session, doc):
-    sp = session.sessionpresentation_set.filter(document=doc)
+    sp = session.presentations.filter(document=doc)
     if not sp:
         return "Document not in session"
     else:
diff --git a/ietf/meeting/test_data.py b/ietf/meeting/test_data.py
index 5ecb494df..8be55b47a 100644
--- a/ietf/meeting/test_data.py
+++ b/ietf/meeting/test_data.py
@@ -51,7 +51,7 @@ def make_interim_meeting(group,date,status='sched',tz='UTC'):
     doc = DocumentFactory.create(name=name, type_id='agenda', title="Agenda",
         uploaded_filename=file, group=group, rev=rev, states=[('draft','active')])
     pres = SessionPresentation.objects.create(session=session, document=doc, rev=doc.rev)
-    session.sessionpresentation_set.add(pres)
+    session.presentations.add(pres)
     # minutes
     name = "minutes-%s-%s" % (meeting.number, time.strftime("%Y%m%d%H%M"))
     rev = '00'
@@ -59,7 +59,7 @@ def make_interim_meeting(group,date,status='sched',tz='UTC'):
     doc = DocumentFactory.create(name=name, type_id='minutes', title="Minutes",
         uploaded_filename=file, group=group, rev=rev, states=[('draft','active')])
     pres = SessionPresentation.objects.create(session=session, document=doc, rev=doc.rev)
-    session.sessionpresentation_set.add(pres)
+    session.presentations.add(pres)
     # slides
     title = "Slideshow"
 
@@ -70,7 +70,7 @@ def make_interim_meeting(group,date,status='sched',tz='UTC'):
         uploaded_filename=file, group=group, rev=rev,
         states=[('slides','active'), ('reuse_policy', 'single')])
     pres = SessionPresentation.objects.create(session=session, document=doc, rev=doc.rev)
-    session.sessionpresentation_set.add(pres)
+    session.presentations.add(pres)
     #
     return meeting
 
@@ -198,24 +198,24 @@ def make_meeting_test_data(meeting=None, create_interims=False):
     doc = DocumentFactory.create(name='agenda-72-mars', type_id='agenda', title="Agenda",
         uploaded_filename="agenda-72-mars.txt", group=mars, rev='00', states=[('agenda','active')])
     pres = SessionPresentation.objects.create(session=mars_session,document=doc,rev=doc.rev)
-    mars_session.sessionpresentation_set.add(pres) # 
+    mars_session.presentations.add(pres) # 
 
     doc = DocumentFactory.create(name='minutes-72-mars', type_id='minutes', title="Minutes",
         uploaded_filename="minutes-72-mars.md", group=mars, rev='00', states=[('minutes','active')])
     pres = SessionPresentation.objects.create(session=mars_session,document=doc,rev=doc.rev)
-    mars_session.sessionpresentation_set.add(pres)
+    mars_session.presentations.add(pres)
 
     doc = DocumentFactory.create(name='slides-72-mars-1-active', type_id='slides', title="Slideshow",
         uploaded_filename="slides-72-mars.txt", group=mars, rev='00',
         states=[('slides','active'), ('reuse_policy', 'single')])
     pres = SessionPresentation.objects.create(session=mars_session,document=doc,rev=doc.rev)
-    mars_session.sessionpresentation_set.add(pres)
+    mars_session.presentations.add(pres)
 
     doc = DocumentFactory.create(name='slides-72-mars-2-deleted', type_id='slides',
         title="Bad Slideshow", uploaded_filename="slides-72-mars-2-deleted.txt", group=mars, rev='00',
         states=[('slides','deleted'), ('reuse_policy', 'single')])
     pres = SessionPresentation.objects.create(session=mars_session,document=doc,rev=doc.rev)
-    mars_session.sessionpresentation_set.add(pres)
+    mars_session.presentations.add(pres)
     
     # Future Interim Meetings
     date = date_today() + datetime.timedelta(days=365)
diff --git a/ietf/meeting/tests_js.py b/ietf/meeting/tests_js.py
index 517836f87..6199ed7eb 100644
--- a/ietf/meeting/tests_js.py
+++ b/ietf/meeting/tests_js.py
@@ -884,9 +884,9 @@ class SlideReorderTests(IetfSeleniumTestCase):
     def setUp(self):
         super(SlideReorderTests, self).setUp()
         self.session = SessionFactory(meeting__type_id='ietf', status_id='sched')
-        self.session.sessionpresentation_set.create(document=DocumentFactory(type_id='slides',name='one'),order=1)
-        self.session.sessionpresentation_set.create(document=DocumentFactory(type_id='slides',name='two'),order=2)
-        self.session.sessionpresentation_set.create(document=DocumentFactory(type_id='slides',name='three'),order=3)
+        self.session.presentations.create(document=DocumentFactory(type_id='slides',name='one'),order=1)
+        self.session.presentations.create(document=DocumentFactory(type_id='slides',name='two'),order=2)
+        self.session.presentations.create(document=DocumentFactory(type_id='slides',name='three'),order=3)
 
     def secr_login(self):
         self.login('secretary')
@@ -906,7 +906,7 @@ class SlideReorderTests(IetfSeleniumTestCase):
         ActionChains(self.driver).drag_and_drop(second,third).perform()
 
         time.sleep(0.1) # The API that modifies the database runs async
-        names=self.session.sessionpresentation_set.values_list('document__name',flat=True) 
+        names=self.session.presentations.values_list('document__name',flat=True) 
         self.assertEqual(list(names),['one','three','two'])
 
 @ifSeleniumEnabled
diff --git a/ietf/meeting/tests_schedule_forms.py b/ietf/meeting/tests_schedule_forms.py
index 58c1332bd..426d26dc2 100644
--- a/ietf/meeting/tests_schedule_forms.py
+++ b/ietf/meeting/tests_schedule_forms.py
@@ -140,13 +140,13 @@ class TimeSlotEditFormTests(TestCase):
         rendered = str(TimeSlotEditForm(instance=ts)['location'])
         # noinspection PyTypeChecker
         self.assertInHTML(
-            f'<option value="{ts.location.pk}" selected>{ts.location.name} size: None</option>',
+            f'<option value="{ts.location.pk}" selected>{ts.location}</option>',
             rendered,
         )
         for room in rooms:
             # noinspection PyTypeChecker
             self.assertInHTML(
-                f'<option value="{room.pk}">{room.name} size: {room.capacity}</option>',
+                f'<option value="{room.pk}">{room}</option>',
                 rendered,
             )
 
diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py
index a57fcf63c..e2abcede8 100644
--- a/ietf/meeting/tests_views.py
+++ b/ietf/meeting/tests_views.py
@@ -468,16 +468,16 @@ class MeetingTests(BaseMeetingTestCase):
         doc = DocumentFactory.create(name='agenda-172-mars', type_id='agenda', title="Agenda",
             uploaded_filename="agenda-172-mars.txt", group=session107.group, rev='00', states=[('agenda','active')])
         pres = SessionPresentation.objects.create(session=session107,document=doc,rev=doc.rev)
-        session107.sessionpresentation_set.add(pres) # 
+        session107.presentations.add(pres) # 
         doc = DocumentFactory.create(name='minutes-172-mars', type_id='minutes', title="Minutes",
             uploaded_filename="minutes-172-mars.md", group=session107.group, rev='00', states=[('minutes','active')])
         pres = SessionPresentation.objects.create(session=session107,document=doc,rev=doc.rev)
-        session107.sessionpresentation_set.add(pres)
+        session107.presentations.add(pres)
         doc = DocumentFactory.create(name='slides-172-mars-1-active', type_id='slides', title="Slideshow",
             uploaded_filename="slides-172-mars.txt", group=session107.group, rev='00',
             states=[('slides','active'), ('reuse_policy', 'single')])
         pres = SessionPresentation.objects.create(session=session107,document=doc,rev=doc.rev)
-        session107.sessionpresentation_set.add(pres)
+        session107.presentations.add(pres)
 
         for session in (
             Session.objects.filter(meeting=meeting, group__acronym="mars").first(),
@@ -548,7 +548,7 @@ class MeetingTests(BaseMeetingTestCase):
         named_row = named_label.closest('tr')
         self.assertTrue(named_row)
 
-        for material in (sp.document for sp in plain_session.sessionpresentation_set.all()):
+        for material in (sp.document for sp in plain_session.presentations.all()):
             if material.type_id == 'draft':
                 expected_url = urlreverse(
                     'ietf.doc.views_doc.document_main',
@@ -559,7 +559,7 @@ class MeetingTests(BaseMeetingTestCase):
             self.assertTrue(plain_row.find(f'a[href="{expected_url}"]'))
             self.assertFalse(named_row.find(f'a[href="{expected_url}"]'))
 
-        for material in (sp.document for sp in named_session.sessionpresentation_set.all()):
+        for material in (sp.document for sp in named_session.presentations.all()):
             if material.type_id == 'draft':
                 expected_url = urlreverse(
                     'ietf.doc.views_doc.document_main',
@@ -955,10 +955,10 @@ class MeetingTests(BaseMeetingTestCase):
         # but lists a different on in its agenda. The expectation is that the pdf and tgz views will return both.
         session = SessionFactory(group__type_id='wg',meeting__type_id='ietf')
         draft1 = WgDraftFactory(group=session.group)
-        session.sessionpresentation_set.create(document=draft1)
+        session.presentations.create(document=draft1)
         draft2 = WgDraftFactory(group=session.group)
         agenda = DocumentFactory(type_id='agenda',group=session.group, uploaded_filename='agenda-%s-%s' % (session.meeting.number,session.group.acronym), states=[('agenda','active')])
-        session.sessionpresentation_set.create(document=agenda)
+        session.presentations.create(document=agenda)
         self.write_materials_file(session.meeting, session.materials.get(type="agenda"),
                                   "1. WG status (15 minutes)\n\n2. Status of %s\n\n" % draft2.name)
         filenames = []
@@ -3083,18 +3083,18 @@ class ReorderSlidesTests(TestCase):
             r = self.client.post(url, {'order':1, 'name':slides.name })
             self.assertEqual(r.status_code, 200)
             self.assertEqual(r.json()['success'],True)
-            self.assertEqual(session.sessionpresentation_set.count(),1)
+            self.assertEqual(session.presentations.count(),1)
 
             # Ignore a request to add slides that are already in a session
             r = self.client.post(url, {'order':1, 'name':slides.name })
             self.assertEqual(r.status_code, 200)
             self.assertEqual(r.json()['success'],True)
-            self.assertEqual(session.sessionpresentation_set.count(),1)
+            self.assertEqual(session.presentations.count(),1)
 
 
             session2 = SessionFactory(group=session.group, meeting=session.meeting)
             SessionPresentationFactory.create_batch(3, document__type_id='slides', session=session2)
-            for num, sp in enumerate(session2.sessionpresentation_set.filter(document__type_id='slides'),start=1):
+            for num, sp in enumerate(session2.presentations.filter(document__type_id='slides'),start=1):
                 sp.order = num
                 sp.save()
 
@@ -3106,22 +3106,22 @@ class ReorderSlidesTests(TestCase):
             r = self.client.post(url, {'order':1, 'name':more_slides[0].name})
             self.assertEqual(r.status_code, 200)
             self.assertEqual(r.json()['success'],True)
-            self.assertEqual(session2.sessionpresentation_set.get(document=more_slides[0]).order,1)
-            self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,5)))
+            self.assertEqual(session2.presentations.get(document=more_slides[0]).order,1)
+            self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,5)))
 
             # Insert at end
             r = self.client.post(url, {'order':5, 'name':more_slides[1].name})
             self.assertEqual(r.status_code, 200)
             self.assertEqual(r.json()['success'],True)
-            self.assertEqual(session2.sessionpresentation_set.get(document=more_slides[1]).order,5)
-            self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,6)))
+            self.assertEqual(session2.presentations.get(document=more_slides[1]).order,5)
+            self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,6)))
 
             # Insert in middle
             r = self.client.post(url, {'order':3, 'name':more_slides[2].name})
             self.assertEqual(r.status_code, 200)
             self.assertEqual(r.json()['success'],True)
-            self.assertEqual(session2.sessionpresentation_set.get(document=more_slides[2]).order,3)
-            self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,7)))
+            self.assertEqual(session2.presentations.get(document=more_slides[2]).order,3)
+            self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,7)))
 
     def test_remove_slides_from_session(self):
         for type_id in ['ietf','interim']:
@@ -3172,7 +3172,7 @@ class ReorderSlidesTests(TestCase):
             self.assertEqual(r.json()['success'],False)
             self.assertIn('index is not valid',r.json()['error'])
 
-            session.sessionpresentation_set.create(document=slides, rev=slides.rev, order=1)
+            session.presentations.create(document=slides, rev=slides.rev, order=1)
 
             # Bad names
             r = self.client.post(url, {'oldIndex':1})
@@ -3193,7 +3193,7 @@ class ReorderSlidesTests(TestCase):
             self.assertEqual(r.json()['success'],False)
             self.assertIn('SessionPresentation not found',r.json()['error'])
 
-            session.sessionpresentation_set.create(document=slides2, rev=slides2.rev, order=2)
+            session.presentations.create(document=slides2, rev=slides2.rev, order=2)
             r = self.client.post(url, {'oldIndex':1, 'name':slides2.name })
             self.assertEqual(r.status_code, 200)
             self.assertEqual(r.json()['success'],False)
@@ -3203,11 +3203,11 @@ class ReorderSlidesTests(TestCase):
             r = self.client.post(url, {'oldIndex':1, 'name':slides.name })
             self.assertEqual(r.status_code, 200)
             self.assertEqual(r.json()['success'],True)
-            self.assertEqual(session.sessionpresentation_set.count(),1)
+            self.assertEqual(session.presentations.count(),1)
 
             session2 = SessionFactory(group=session.group, meeting=session.meeting)
             sp_list = SessionPresentationFactory.create_batch(5, document__type_id='slides', session=session2)
-            for num, sp in enumerate(session2.sessionpresentation_set.filter(document__type_id='slides'),start=1):
+            for num, sp in enumerate(session2.presentations.filter(document__type_id='slides'),start=1):
                 sp.order = num
                 sp.save()
 
@@ -3217,22 +3217,22 @@ class ReorderSlidesTests(TestCase):
             r = self.client.post(url, {'oldIndex':1, 'name':sp_list[0].document.name })
             self.assertEqual(r.status_code, 200)
             self.assertEqual(r.json()['success'],True)
-            self.assertFalse(session2.sessionpresentation_set.filter(pk=sp_list[0].pk).exists())
-            self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,5)))
+            self.assertFalse(session2.presentations.filter(pk=sp_list[0].pk).exists())
+            self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,5)))
 
             # delete in middle of list
             r = self.client.post(url, {'oldIndex':4, 'name':sp_list[4].document.name })
             self.assertEqual(r.status_code, 200)
             self.assertEqual(r.json()['success'],True)
-            self.assertFalse(session2.sessionpresentation_set.filter(pk=sp_list[4].pk).exists())
-            self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,4)))
+            self.assertFalse(session2.presentations.filter(pk=sp_list[4].pk).exists())
+            self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,4)))
 
             # delete at end of list
             r = self.client.post(url, {'oldIndex':2, 'name':sp_list[2].document.name })
             self.assertEqual(r.status_code, 200)
             self.assertEqual(r.json()['success'],True)
-            self.assertFalse(session2.sessionpresentation_set.filter(pk=sp_list[2].pk).exists())
-            self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,3)))
+            self.assertFalse(session2.presentations.filter(pk=sp_list[2].pk).exists())
+            self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,3)))
 
 
 
@@ -3290,45 +3290,45 @@ class ReorderSlidesTests(TestCase):
             r = self.client.post(url, {'oldIndex':1, 'newIndex':3})
             self.assertEqual(r.status_code, 200)
             self.assertEqual(r.json()['success'],True)
-            self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,1,4,5]))
+            self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,1,4,5]))
 
             # Move to beginning
             r = self.client.post(url, {'oldIndex':3, 'newIndex':1})
             self.assertEqual(r.status_code, 200)
             self.assertEqual(r.json()['success'],True)
-            self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,3,4,5]))
+            self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,3,4,5]))
             
             # Move from end
             r = self.client.post(url, {'oldIndex':5, 'newIndex':3})
             self.assertEqual(r.status_code, 200)
             self.assertEqual(r.json()['success'],True)
-            self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,5,3,4]))
+            self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,5,3,4]))
 
             # Move to end
             r = self.client.post(url, {'oldIndex':3, 'newIndex':5})
             self.assertEqual(r.status_code, 200)
             self.assertEqual(r.json()['success'],True)
-            self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,3,4,5]))
+            self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,3,4,5]))
 
             # Move beginning to end
             r = self.client.post(url, {'oldIndex':1, 'newIndex':5})
             self.assertEqual(r.status_code, 200)
             self.assertEqual(r.json()['success'],True)
-            self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,4,5,1]))
+            self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,4,5,1]))
 
             # Move middle to middle 
             r = self.client.post(url, {'oldIndex':3, 'newIndex':4})
             self.assertEqual(r.status_code, 200)
             self.assertEqual(r.json()['success'],True)
-            self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,5,4,1]))
+            self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,5,4,1]))
 
             r = self.client.post(url, {'oldIndex':3, 'newIndex':2})
             self.assertEqual(r.status_code, 200)
             self.assertEqual(r.json()['success'],True)
-            self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,5,3,4,1]))
+            self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,5,3,4,1]))
 
             # Reset for next iteration in the loop
-            session.sessionpresentation_set.update(order=F('pk'))
+            session.presentations.update(order=F('pk'))
             self.client.logout()
 
 
@@ -3345,7 +3345,7 @@ class ReorderSlidesTests(TestCase):
         except AssertionError:
             pass
 
-        self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('order',flat=True)),list(range(1,6)))
+        self.assertEqual(list(session.presentations.order_by('order').values_list('order',flat=True)),list(range(1,6)))
 
 
 class EditTests(TestCase):
@@ -4334,7 +4334,7 @@ class SessionDetailsTests(TestCase):
         group.role_set.create(name_id='chair',person = group_chair, email = group_chair.email())
         session = SessionFactory.create(meeting__type_id='ietf',group=group, meeting__date=date_today() + datetime.timedelta(days=90))
         SessionPresentationFactory.create(session=session,document__type_id='draft',rev=None)
-        old_draft = session.sessionpresentation_set.filter(document__type='draft').first().document
+        old_draft = session.presentations.filter(document__type='draft').first().document
         new_draft = DocumentFactory(type_id='draft')
 
         url = urlreverse('ietf.meeting.views.add_session_drafts', kwargs=dict(num=session.meeting.number, session_id=session.pk))
@@ -4355,10 +4355,10 @@ class SessionDetailsTests(TestCase):
         q = PyQuery(r.content)
         self.assertIn("Already linked:", q('form .text-danger').text())
 
-        self.assertEqual(1,session.sessionpresentation_set.count())
+        self.assertEqual(1,session.presentations.count())
         r = self.client.post(url,dict(drafts=[new_draft.pk,]))
         self.assertTrue(r.status_code, 302)
-        self.assertEqual(2,session.sessionpresentation_set.count())
+        self.assertEqual(2,session.presentations.count())
 
         session.meeting.date -= datetime.timedelta(days=180)
         session.meeting.save()
@@ -5595,8 +5595,17 @@ class InterimTests(TestCase):
         self.assertEqual(r.status_code, 403)
         self.assertFalse(mock.called, 'Should not cancel sessions if request rejected')
 
-        # test cancelling before announcement
+        # test with overly-long comments
+        comments += '0123456789abcdef'*32
         self.client.login(username="marschairman", password="marschairman+password")
+        r = self.client.post(url, {'comments': comments})
+        self.assertEqual(r.status_code, 200)
+        q = PyQuery(r.content)
+        self.assertTrue(q('form .is-invalid'))
+        # truncate to max_length
+        comments = comments[:512]
+
+        # test cancelling before announcement
         length_before = len(outbox)
         r = self.client.post(url, {'comments': comments})
         self.assertRedirects(r, urlreverse('ietf.meeting.views.upcoming'))
@@ -5618,6 +5627,7 @@ class InterimTests(TestCase):
             self.assertEqual(session.agenda_note, comments)
         self.assertEqual(len(outbox), length_before + 1)
         self.assertIn('Interim Meeting Cancelled', outbox[-1]['Subject'])
+        self.assertIn(comments, get_payload_text(outbox[-1]))
         self.assertTrue(mock.called, 'Should cancel sessions if request handled')
         self.assertCountEqual(mock.call_args[0][1], meeting.session_set.all())
 
@@ -5972,7 +5982,7 @@ class FinalizeProceedingsTests(TestCase):
     def test_finalize_proceedings(self):
         make_meeting_test_data()
         meeting = Meeting.objects.filter(type_id='ietf').order_by('id').last()
-        meeting.session_set.filter(group__acronym='mars').first().sessionpresentation_set.create(document=Document.objects.filter(type='draft').first(),rev=None)
+        meeting.session_set.filter(group__acronym='mars').first().presentations.create(document=Document.objects.filter(type='draft').first(),rev=None)
 
         url = urlreverse('ietf.meeting.views.finalize_proceedings',kwargs={'num':meeting.number})
         login_testing_unauthorized(self,"secretary",url)
@@ -5980,12 +5990,12 @@ class FinalizeProceedingsTests(TestCase):
         self.assertEqual(r.status_code, 200)
 
         self.assertEqual(meeting.proceedings_final,False)
-        self.assertEqual(meeting.session_set.filter(group__acronym="mars").first().sessionpresentation_set.filter(document__type="draft").first().rev,None)
+        self.assertEqual(meeting.session_set.filter(group__acronym="mars").first().presentations.filter(document__type="draft").first().rev,None)
         r = self.client.post(url,{'finalize':1})
         self.assertEqual(r.status_code, 302)
         meeting = Meeting.objects.get(pk=meeting.pk)
         self.assertEqual(meeting.proceedings_final,True)
-        self.assertEqual(meeting.session_set.filter(group__acronym="mars").first().sessionpresentation_set.filter(document__type="draft").first().rev,'00')
+        self.assertEqual(meeting.session_set.filter(group__acronym="mars").first().presentations.filter(document__type="draft").first().rev,'00')
  
 class MaterialsTests(TestCase):
     settings_temp_path_overrides = TestCase.settings_temp_path_overrides + [
@@ -6027,12 +6037,12 @@ class MaterialsTests(TestCase):
         self.assertEqual(r.status_code, 200)
         q = PyQuery(r.content)
         self.assertIn('Upload', str(q("title")))
-        self.assertFalse(session.sessionpresentation_set.exists())
+        self.assertFalse(session.presentations.exists())
         test_file = StringIO('%PDF-1.4\n%âãÏÓ\nthis is some text for a test')
         test_file.name = "not_really.pdf"
         r = self.client.post(url,dict(file=test_file))
         self.assertEqual(r.status_code, 302)
-        bs_doc = session.sessionpresentation_set.filter(document__type_id='bluesheets').first().document
+        bs_doc = session.presentations.filter(document__type_id='bluesheets').first().document
         self.assertEqual(bs_doc.rev,'00')
         r = self.client.get(url)
         self.assertEqual(r.status_code, 200)
@@ -6062,12 +6072,12 @@ class MaterialsTests(TestCase):
         self.assertEqual(r.status_code, 200)
         q = PyQuery(r.content)
         self.assertIn('Upload', str(q("title")))
-        self.assertFalse(session.sessionpresentation_set.exists())
+        self.assertFalse(session.presentations.exists())
         test_file = StringIO('%PDF-1.4\n%âãÏÓ\nthis is some text for a test')
         test_file.name = "not_really.pdf"
         r = self.client.post(url,dict(file=test_file))
         self.assertEqual(r.status_code, 302)
-        bs_doc = session.sessionpresentation_set.filter(document__type_id='bluesheets').first().document
+        bs_doc = session.presentations.filter(document__type_id='bluesheets').first().document
         self.assertEqual(bs_doc.rev,'00')
 
     def test_upload_bluesheets_interim_chair_access(self):
@@ -6095,7 +6105,7 @@ class MaterialsTests(TestCase):
             self.assertEqual(r.status_code, 200)
             q = PyQuery(r.content)
             self.assertIn('Upload', str(q("Title")))
-            self.assertFalse(session.sessionpresentation_set.exists())
+            self.assertFalse(session.presentations.exists())
             self.assertFalse(q('form input[type="checkbox"]'))
     
             session2 = SessionFactory(meeting=session.meeting,group=session.group)
@@ -6130,7 +6140,7 @@ class MaterialsTests(TestCase):
             test_file.name = "some.html"
             r = self.client.post(url,dict(submission_method="upload",file=test_file))
             self.assertEqual(r.status_code, 302)
-            doc = session.sessionpresentation_set.filter(document__type_id=doctype).first().document
+            doc = session.presentations.filter(document__type_id=doctype).first().document
             self.assertEqual(doc.rev,'00')
             text = doc.text()
             self.assertIn('Some text', text)
@@ -6142,9 +6152,9 @@ class MaterialsTests(TestCase):
             test_file.name = "some.txt"
             r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=False))
             self.assertEqual(r.status_code, 302)
-            doc = session.sessionpresentation_set.filter(document__type_id=doctype).first().document
+            doc = session.presentations.filter(document__type_id=doctype).first().document
             self.assertEqual(doc.rev,'01')
-            self.assertFalse(session2.sessionpresentation_set.filter(document__type_id=doctype))
+            self.assertFalse(session2.presentations.filter(document__type_id=doctype))
     
             r = self.client.get(url)
             self.assertEqual(r.status_code, 200)
@@ -6156,7 +6166,7 @@ class MaterialsTests(TestCase):
             self.assertEqual(r.status_code, 302)
             doc = Document.objects.get(pk=doc.pk)
             self.assertEqual(doc.rev,'02')
-            self.assertTrue(session2.sessionpresentation_set.filter(document__type_id=doctype))
+            self.assertTrue(session2.presentations.filter(document__type_id=doctype))
 
             # Test bad encoding
             test_file = BytesIO('<html><h1>Title</h1><section>Some\x93text</section></html>'.encode('latin1'))
@@ -6186,7 +6196,7 @@ class MaterialsTests(TestCase):
             self.assertEqual(r.status_code, 200)
             q = PyQuery(r.content)
             self.assertIn('Upload', str(q("Title")))
-            self.assertFalse(session.sessionpresentation_set.exists())
+            self.assertFalse(session.presentations.exists())
             self.assertFalse(q('form input[type="checkbox"]'))
 
             test_file = BytesIO(b'this is some text for a test')
@@ -6208,12 +6218,12 @@ class MaterialsTests(TestCase):
             self.assertEqual(r.status_code, 200)
             q = PyQuery(r.content)
             self.assertIn('Upload', str(q("title")))
-            self.assertFalse(session.sessionpresentation_set.filter(document__type_id=doctype))
+            self.assertFalse(session.presentations.filter(document__type_id=doctype))
             test_file = BytesIO(b'this is some text for a test')
             test_file.name = "not_really.txt"
             r = self.client.post(url,dict(submission_method="upload",file=test_file))
             self.assertEqual(r.status_code, 302)
-            doc = session.sessionpresentation_set.filter(document__type_id=doctype).first().document
+            doc = session.presentations.filter(document__type_id=doctype).first().document
             self.assertEqual(doc.rev,'00')
 
             # Verify that we don't have dead links
@@ -6232,12 +6242,12 @@ class MaterialsTests(TestCase):
         self.assertEqual(r.status_code, 200)
         q = PyQuery(r.content)
         self.assertIn('Upload', str(q("Title")))
-        self.assertFalse(session.sessionpresentation_set.exists())
+        self.assertFalse(session.presentations.exists())
 
         test_text = 'Enter agenda from scratch'
         r = self.client.post(url,dict(submission_method="enter",content=test_text))
         self.assertRedirects(r, redirect_url)
-        doc = session.sessionpresentation_set.filter(document__type_id='agenda').first().document
+        doc = session.presentations.filter(document__type_id='agenda').first().document
         self.assertEqual(doc.rev,'00')
 
         r = self.client.get(url)
@@ -6273,14 +6283,14 @@ class MaterialsTests(TestCase):
         self.assertEqual(r.status_code, 200)
         q = PyQuery(r.content)
         self.assertIn('Upload', str(q("title")))
-        self.assertFalse(session1.sessionpresentation_set.filter(document__type_id='slides'))
+        self.assertFalse(session1.presentations.filter(document__type_id='slides'))
         test_file = BytesIO(b'this is not really a slide')
         test_file.name = 'not_really.txt'
         r = self.client.post(url,dict(file=test_file,title='a test slide file',apply_to_all=True))
         self.assertEqual(r.status_code, 302)
-        self.assertEqual(session1.sessionpresentation_set.count(),1) 
-        self.assertEqual(session2.sessionpresentation_set.count(),1) 
-        sp = session2.sessionpresentation_set.first()
+        self.assertEqual(session1.presentations.count(),1) 
+        self.assertEqual(session2.presentations.count(),1) 
+        sp = session2.presentations.first()
         self.assertEqual(sp.document.name, 'slides-%s-%s-a-test-slide-file' % (session1.meeting.number,session1.group.acronym ) )
         self.assertEqual(sp.order,1)
 
@@ -6289,14 +6299,14 @@ class MaterialsTests(TestCase):
         test_file.name = 'also_not_really.txt'
         r = self.client.post(url,dict(file=test_file,title='a different slide file',apply_to_all=False))
         self.assertEqual(r.status_code, 302)
-        self.assertEqual(session1.sessionpresentation_set.count(),1)
-        self.assertEqual(session2.sessionpresentation_set.count(),2)
-        sp = session2.sessionpresentation_set.get(document__name__endswith='-a-different-slide-file')
+        self.assertEqual(session1.presentations.count(),1)
+        self.assertEqual(session2.presentations.count(),2)
+        sp = session2.presentations.get(document__name__endswith='-a-different-slide-file')
         self.assertEqual(sp.order,2)
         self.assertEqual(sp.rev,'00')
         self.assertEqual(sp.document.rev,'00')
 
-        url = urlreverse('ietf.meeting.views.upload_session_slides',kwargs={'num':session2.meeting.number,'session_id':session2.id,'name':session2.sessionpresentation_set.get(order=2).document.name})
+        url = urlreverse('ietf.meeting.views.upload_session_slides',kwargs={'num':session2.meeting.number,'session_id':session2.id,'name':session2.presentations.get(order=2).document.name})
         r = self.client.get(url)
         self.assertTrue(r.status_code, 200)
         q = PyQuery(r.content)
@@ -6305,9 +6315,9 @@ class MaterialsTests(TestCase):
         test_file.name = 'doesnotmatter.txt'
         r = self.client.post(url,dict(file=test_file,title='rename the presentation',apply_to_all=False))
         self.assertEqual(r.status_code, 302)
-        self.assertEqual(session1.sessionpresentation_set.count(),1)
-        self.assertEqual(session2.sessionpresentation_set.count(),2)
-        sp = session2.sessionpresentation_set.get(order=2)
+        self.assertEqual(session1.presentations.count(),1)
+        self.assertEqual(session2.presentations.count(),2)
+        sp = session2.presentations.get(order=2)
         self.assertEqual(sp.rev,'01')
         self.assertEqual(sp.document.rev,'01')
  
@@ -6319,7 +6329,7 @@ class MaterialsTests(TestCase):
         self.assertEqual(r.status_code, 200)
         q = PyQuery(r.content)
         self.assertIn('Upload', str(q("title")))
-        self.assertFalse(session1.sessionpresentation_set.filter(document__type_id='slides'))
+        self.assertFalse(session1.presentations.filter(document__type_id='slides'))
         test_file = BytesIO(b'this is not really a slide')
         test_file.name = 'not_really.txt'
         r = self.client.post(url,dict(file=test_file,title='title with bad character \U0001fabc '))
@@ -6331,7 +6341,7 @@ class MaterialsTests(TestCase):
     def test_remove_sessionpresentation(self):
         session = SessionFactory(meeting__type_id='ietf')
         doc = DocumentFactory(type_id='slides')
-        session.sessionpresentation_set.create(document=doc)
+        session.presentations.create(document=doc)
 
         url = urlreverse('ietf.meeting.views.remove_sessionpresentation',kwargs={'num':session.meeting.number,'session_id':session.id,'name':'no-such-doc'})
         response = self.client.get(url)
@@ -6346,10 +6356,10 @@ class MaterialsTests(TestCase):
         response = self.client.get(url)
         self.assertEqual(response.status_code, 200)
 
-        self.assertEqual(1,session.sessionpresentation_set.count())
+        self.assertEqual(1,session.presentations.count())
         response = self.client.post(url,{'remove_session':''})
         self.assertEqual(response.status_code, 302)
-        self.assertEqual(0,session.sessionpresentation_set.count())
+        self.assertEqual(0,session.presentations.count())
         self.assertEqual(2,doc.docevent_set.count())
 
     def test_propose_session_slides(self):
@@ -6438,8 +6448,8 @@ class MaterialsTests(TestCase):
         submission = SlideSubmission.objects.get(id = submission.id)
         self.assertEqual(submission.status_id, 'approved')
         self.assertIsNotNone(submission.doc)
-        self.assertEqual(session.sessionpresentation_set.count(),1)
-        self.assertEqual(session.sessionpresentation_set.first().document.title,'different title')
+        self.assertEqual(session.presentations.count(),1)
+        self.assertEqual(session.presentations.first().document.title,'different title')
         r = self.client.get(url)
         self.assertEqual(r.status_code, 200)
         self.assertRegex(r.content.decode(), r"These\s+slides\s+have\s+already\s+been\s+approved")
@@ -6461,8 +6471,8 @@ class MaterialsTests(TestCase):
         self.assertTrue(q('#id_apply_to_all'))
         r = self.client.post(url,dict(title='yet another title',approve='approve'))
         self.assertEqual(r.status_code,302)
-        self.assertEqual(session1.sessionpresentation_set.count(),1)
-        self.assertEqual(session2.sessionpresentation_set.count(),0)
+        self.assertEqual(session1.presentations.count(),1)
+        self.assertEqual(session2.presentations.count(),0)
 
     def test_approve_proposed_slides_multisession_apply_all(self):
         submission = SlideSubmissionFactory(session__meeting__type_id='ietf')
@@ -6476,8 +6486,8 @@ class MaterialsTests(TestCase):
         self.assertEqual(r.status_code,200)
         r = self.client.post(url,dict(title='yet another title',apply_to_all=1,approve='approve'))
         self.assertEqual(r.status_code,302)
-        self.assertEqual(session1.sessionpresentation_set.count(),1)
-        self.assertEqual(session2.sessionpresentation_set.count(),1)
+        self.assertEqual(session1.presentations.count(),1)
+        self.assertEqual(session2.presentations.count(),1)
 
     def test_submit_and_approve_multiple_versions(self):
         session = SessionFactory(meeting__type_id='ietf')
@@ -6502,7 +6512,7 @@ class MaterialsTests(TestCase):
         self.assertEqual(r.status_code,302)
         self.client.logout()
 
-        self.assertEqual(session.sessionpresentation_set.first().document.rev,'00')
+        self.assertEqual(session.presentations.first().document.rev,'00')
 
         login_testing_unauthorized(self,newperson.user.username,propose_url)
         test_file = BytesIO(b'this is not really a slide, but it is another version of it')
@@ -6530,9 +6540,9 @@ class MaterialsTests(TestCase):
 
         self.assertEqual(SlideSubmission.objects.filter(status__slug = 'pending').count(),0)
         self.assertEqual(SlideSubmission.objects.filter(status__slug = 'rejected').count(),1)
-        self.assertEqual(session.sessionpresentation_set.first().document.rev,'01')
+        self.assertEqual(session.presentations.first().document.rev,'01')
         path = os.path.join(submission.session.meeting.get_materials_path(),'slides')
-        filename = os.path.join(path,session.sessionpresentation_set.first().document.name+'-01.txt')
+        filename = os.path.join(path,session.presentations.first().document.name+'-01.txt')
         self.assertTrue(os.path.exists(filename))
         fd = io.open(filename, 'r')
         contents = fd.read()
@@ -6649,7 +6659,7 @@ class ImportNotesTests(TestCase):
         self.client.login(username='secretary', password='secretary+password')
         r = self.client.post(url, {'markdown_text': 'replaced below'})  # create a rev
         with open(
-                self.session.sessionpresentation_set.filter(document__type="minutes").first().document.get_file_name(),
+                self.session.presentations.filter(document__type="minutes").first().document.get_file_name(),
                 'wb'
         ) as f:
             # Replace existing content with an invalid Unicode byte string. The particular invalid
@@ -6674,7 +6684,7 @@ class ImportNotesTests(TestCase):
         self.client.login(username='secretary', password='secretary+password')
         r = self.client.post(url, {'markdown_text': 'original markdown text'})  # create a rev
         # remove the file uploaded for the first rev
-        minutes_docs = self.session.sessionpresentation_set.filter(document__type='minutes')
+        minutes_docs = self.session.presentations.filter(document__type='minutes')
         self.assertEqual(minutes_docs.count(), 1)
         Path(minutes_docs.first().document.get_file_name()).unlink()
 
@@ -7809,7 +7819,7 @@ class ProceedingsTests(BaseMeetingTestCase):
         named_row = named_label.closest('tr')
         self.assertTrue(named_row)
 
-        for material in (sp.document for sp in plain_session.sessionpresentation_set.all()):
+        for material in (sp.document for sp in plain_session.presentations.all()):
             if material.type_id == 'draft':
                 expected_url = urlreverse(
                     'ietf.doc.views_doc.document_main',
@@ -7820,7 +7830,7 @@ class ProceedingsTests(BaseMeetingTestCase):
             self.assertTrue(plain_row.find(f'a[href="{expected_url}"]'))
             self.assertFalse(named_row.find(f'a[href="{expected_url}"]'))
 
-        for material in (sp.document for sp in named_session.sessionpresentation_set.all()):
+        for material in (sp.document for sp in named_session.presentations.all()):
             if material.type_id == 'draft':
                 expected_url = urlreverse(
                     'ietf.doc.views_doc.document_main',
diff --git a/ietf/meeting/utils.py b/ietf/meeting/utils.py
index 416e9c61f..9fb062b02 100644
--- a/ietf/meeting/utils.py
+++ b/ietf/meeting/utils.py
@@ -32,7 +32,10 @@ from ietf.utils.timezone import date_today
 
 
 def session_time_for_sorting(session, use_meeting_date):
-    official_timeslot = TimeSlot.objects.filter(sessionassignments__session=session, sessionassignments__schedule__in=[session.meeting.schedule, session.meeting.schedule.base if session.meeting.schedule else None]).first()
+    if hasattr(session, "_otsa"):
+        official_timeslot=session._otsa.timeslot
+    else:
+        official_timeslot = TimeSlot.objects.filter(sessionassignments__session=session, sessionassignments__schedule__in=[session.meeting.schedule, session.meeting.schedule.base if session.meeting.schedule else None]).first()
     if official_timeslot:
         return official_timeslot.time
     elif use_meeting_date and session.meeting.date:
@@ -75,13 +78,14 @@ def group_sessions(sessions):
     in_progress = []
     recent = []
     past = []
+
     for s in sessions:
         today = date_today(s.meeting.tz())
         if s.meeting.date > today:
             future.append(s)
         elif s.meeting.end_date() >= today:
             in_progress.append(s)
-        elif not s.is_material_submission_cutoff():
+        elif not getattr(s, "cached_is_cutoff", lambda: s.is_material_submission_cutoff):
             recent.append(s)
         else:
             past.append(s)
@@ -91,6 +95,7 @@ def group_sessions(sessions):
     recent.reverse()
     past.reverse()
 
+
     return future, in_progress, recent, past
 
 def get_upcoming_manageable_sessions(user):
@@ -148,7 +153,7 @@ def finalize(meeting):
         )
     ).astimezone(pytz.utc) + datetime.timedelta(days=1)
     for session in meeting.session_set.all():
-        for sp in session.sessionpresentation_set.filter(document__type='draft',rev=None):
+        for sp in session.presentations.filter(document__type='draft',rev=None):
             rev_before_end = [e for e in sp.document.docevent_set.filter(newrevisiondocevent__isnull=False).order_by('-time') if e.time <= end_time ]
             if rev_before_end:
                 sp.rev = rev_before_end[-1].newrevisiondocevent.rev
@@ -180,7 +185,7 @@ def sort_accept_tuple(accept):
     return tup
 
 def condition_slide_order(session):
-    qs = session.sessionpresentation_set.filter(document__type_id='slides').order_by('order')
+    qs = session.presentations.filter(document__type_id='slides').order_by('order')
     order_list = qs.values_list('order',flat=True)
     if list(order_list) != list(range(1,qs.count()+1)):
         for num, sp in enumerate(qs, start=1):
@@ -563,7 +568,7 @@ def save_session_minutes_revision(session, file, ext, request, encoding=None, ap
     Returns (Document, [DocEvents]), which should be passed to doc.save_with_history()
     if the file contents are stored successfully.
     """
-    minutes_sp = session.sessionpresentation_set.filter(document__type='minutes').first()
+    minutes_sp = session.presentations.filter(document__type='minutes').first()
     if minutes_sp:
         doc = minutes_sp.document
         doc.rev = '%02d' % (int(doc.rev)+1)
@@ -597,17 +602,17 @@ def save_session_minutes_revision(session, file, ext, request, encoding=None, ap
                 rev = '00',
             )
         doc.states.add(State.objects.get(type_id='minutes',slug='active'))
-        if session.sessionpresentation_set.filter(document=doc).exists():
-            sp = session.sessionpresentation_set.get(document=doc)
+        if session.presentations.filter(document=doc).exists():
+            sp = session.presentations.get(document=doc)
             sp.rev = doc.rev
             sp.save()
         else:
-            session.sessionpresentation_set.create(document=doc,rev=doc.rev)
+            session.presentations.create(document=doc,rev=doc.rev)
     if apply_to_all:
         for other_session in get_meeting_sessions(session.meeting.number, session.group.acronym):
             if other_session != session:
-                other_session.sessionpresentation_set.filter(document__type='minutes').delete()
-                other_session.sessionpresentation_set.create(document=doc,rev=doc.rev)
+                other_session.presentations.filter(document__type='minutes').delete()
+                other_session.presentations.create(document=doc,rev=doc.rev)
     filename = f'{doc.name}-{doc.rev}{ext}'
     doc.uploaded_filename = filename
     e = NewRevisionDocEvent.objects.create(
@@ -719,7 +724,7 @@ def new_doc_for_session(type_id, session):
                 rev = '00',
             )
     doc.states.add(State.objects.get(type_id=type_id, slug='active'))
-    session.sessionpresentation_set.create(document=doc,rev='00')
+    session.presentations.create(document=doc,rev='00')
     return doc
 
 def write_doc_for_session(session, type_id, filename, contents):
@@ -760,7 +765,7 @@ def create_recording(session, url, title=None, user=None):
                                        desc='New revision available',
                                        time=doc.time)
     pres = SessionPresentation.objects.create(session=session,document=doc,rev=doc.rev)
-    session.sessionpresentation_set.add(pres)
+    session.presentations.add(pres)
 
     return doc
 
diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py
index eae9b145e..1171f7b0b 100644
--- a/ietf/meeting/views.py
+++ b/ietf/meeting/views.py
@@ -1,4 +1,4 @@
-# Copyright The IETF Trust 2007-2023, All Rights Reserved
+# Copyright The IETF Trust 2007-2024, All Rights Reserved
 # -*- coding: utf-8 -*-
 
 
@@ -2157,7 +2157,7 @@ def agenda_json(request, num=None):
     # time of the meeting
     assignments = preprocess_assignments_for_agenda(assignments, meeting, extra_prefetches=[
         "session__materials__docevent_set",
-        "session__sessionpresentation_set",
+        "session__presentations",
         "timeslot__meeting"
     ])
     for asgn in assignments:
@@ -2427,12 +2427,12 @@ def session_details(request, num, acronym):
             session.cancelled = session.current_status in Session.CANCELED_STATUSES
             session.status = status_names.get(session.current_status, session.current_status)
 
-        session.filtered_artifacts = list(session.sessionpresentation_set.filter(document__type__slug__in=['agenda','minutes','bluesheets']))
-        session.filtered_artifacts.sort(key=lambda d:['agenda','minutes','bluesheets'].index(d.document.type.slug))
-        session.filtered_slides    = session.sessionpresentation_set.filter(document__type__slug='slides').order_by('order')
-        session.filtered_drafts    = session.sessionpresentation_set.filter(document__type__slug='draft')
-        session.filtered_chatlog_and_polls = session.sessionpresentation_set.filter(document__type__slug__in=('chatlog', 'polls')).order_by('document__type__slug')
-        # TODO FIXME Deleted materials shouldn't be in the sessionpresentation_set
+        session.filtered_artifacts = list(session.presentations.filter(document__type__slug__in=['agenda','minutes','narrativeminutes', 'bluesheets']))
+        session.filtered_artifacts.sort(key=lambda d:['agenda','minutes', 'narrativeminutes', 'bluesheets'].index(d.document.type.slug))
+        session.filtered_slides    = session.presentations.filter(document__type__slug='slides').order_by('order')
+        session.filtered_drafts    = session.presentations.filter(document__type__slug='draft')
+        session.filtered_chatlog_and_polls = session.presentations.filter(document__type__slug__in=('chatlog', 'polls')).order_by('document__type__slug')
+        # TODO FIXME Deleted materials shouldn't be in the presentations
         for qs in [session.filtered_artifacts,session.filtered_slides,session.filtered_drafts]:
             qs = [p for p in qs if p.document.get_state_slug(p.document.type_id)!='deleted']
             session.type_counter.update([p.document.type.slug for p in qs])
@@ -2490,7 +2490,7 @@ def add_session_drafts(request, session_id, num):
     if session.is_material_submission_cutoff() and not has_role(request.user, "Secretariat"):
         raise Http404
 
-    already_linked = [sp.document for sp in session.sessionpresentation_set.filter(document__type_id='draft')]
+    already_linked = [sp.document for sp in session.presentations.filter(document__type_id='draft')]
 
     session_number = None
     sessions = get_sessions(session.meeting.number,session.group.acronym)
@@ -2501,7 +2501,7 @@ def add_session_drafts(request, session_id, num):
         form = SessionDraftsForm(request.POST,already_linked=already_linked)
         if form.is_valid():
             for draft in form.cleaned_data['drafts']:
-                session.sessionpresentation_set.create(document=draft,rev=None)
+                session.presentations.create(document=draft,rev=None)
                 c = DocEvent(type="added_comment", doc=draft, rev=draft.rev, by=request.user.person)
                 c.desc = "Added to session: %s" % session
                 c.save()
@@ -2512,7 +2512,7 @@ def add_session_drafts(request, session_id, num):
     return render(request, "meeting/add_session_drafts.html",
                   { 'session': session,
                     'session_number': session_number,
-                    'already_linked': session.sessionpresentation_set.filter(document__type_id='draft'),
+                    'already_linked': session.presentations.filter(document__type_id='draft'),
                     'form': form,
                   })
 
@@ -2554,7 +2554,7 @@ def upload_session_bluesheets(request, session_id, num):
     else: 
         form = UploadBlueSheetForm()
 
-    bluesheet_sp = session.sessionpresentation_set.filter(document__type='bluesheets').first()
+    bluesheet_sp = session.presentations.filter(document__type='bluesheets').first()
 
     return render(request, "meeting/upload_session_bluesheets.html", 
                   {'session': session,
@@ -2565,7 +2565,7 @@ def upload_session_bluesheets(request, session_id, num):
 
 
 def save_bluesheet(request, session, file, encoding='utf-8'):
-    bluesheet_sp = session.sessionpresentation_set.filter(document__type='bluesheets').first()
+    bluesheet_sp = session.presentations.filter(document__type='bluesheets').first()
     _, ext = os.path.splitext(file.name)
 
     if bluesheet_sp:
@@ -2595,7 +2595,7 @@ def save_bluesheet(request, session, file, encoding='utf-8'):
                   rev = '00',
               )
         doc.states.add(State.objects.get(type_id='bluesheets',slug='active'))
-        session.sessionpresentation_set.create(document=doc,rev='00')
+        session.presentations.create(document=doc,rev='00')
     filename = '%s-%s%s'% ( doc.name, doc.rev, ext)
     doc.uploaded_filename = filename
     e = NewRevisionDocEvent.objects.create(doc=doc, rev=doc.rev, by=request.user.person, type='new_revision', desc='New revision available: %s'%doc.rev)
@@ -2620,7 +2620,7 @@ def upload_session_minutes(request, session_id, num):
     if len(sessions) > 1:
        session_number = 1 + sessions.index(session)
 
-    minutes_sp = session.sessionpresentation_set.filter(document__type='minutes').first()
+    minutes_sp = session.presentations.filter(document__type='minutes').first()
     
     if request.method == 'POST':
         form = UploadMinutesForm(show_apply_to_all_checkbox,request.POST,request.FILES)
@@ -2712,7 +2712,7 @@ def upload_session_agenda(request, session_id, num):
     if len(sessions) > 1:
        session_number = 1 + sessions.index(session)
 
-    agenda_sp = session.sessionpresentation_set.filter(document__type='agenda').first()
+    agenda_sp = session.presentations.filter(document__type='agenda').first()
     
     if request.method == 'POST':
         form = UploadOrEnterAgendaForm(show_apply_to_all_checkbox,request.POST,request.FILES)
@@ -2771,17 +2771,17 @@ def upload_session_agenda(request, session_id, num):
                               rev = '00',
                           )
                 doc.states.add(State.objects.get(type_id='agenda',slug='active'))
-            if session.sessionpresentation_set.filter(document=doc).exists():
-                sp = session.sessionpresentation_set.get(document=doc)
+            if session.presentations.filter(document=doc).exists():
+                sp = session.presentations.get(document=doc)
                 sp.rev = doc.rev
                 sp.save()
             else:
-                session.sessionpresentation_set.create(document=doc,rev=doc.rev)
+                session.presentations.create(document=doc,rev=doc.rev)
             if apply_to_all:
                 for other_session in sessions:
                     if other_session != session:
-                        other_session.sessionpresentation_set.filter(document__type='agenda').delete()
-                        other_session.sessionpresentation_set.create(document=doc,rev=doc.rev)
+                        other_session.presentations.filter(document__type='agenda').delete()
+                        other_session.presentations.create(document=doc,rev=doc.rev)
             filename = '%s-%s%s'% ( doc.name, doc.rev, ext)
             doc.uploaded_filename = filename
             e = NewRevisionDocEvent.objects.create(doc=doc,by=request.user.person,type='new_revision',desc='New revision available: %s'%doc.rev,rev=doc.rev)
@@ -2832,7 +2832,7 @@ def upload_session_slides(request, session_id, num, name=None):
         slides = Document.objects.filter(name=name).first()
         if not (slides and slides.type_id=='slides'):
             raise Http404
-        slides_sp = session.sessionpresentation_set.filter(document=slides).first()
+        slides_sp = session.presentations.filter(document=slides).first()
     
     if request.method == 'POST':
         form = UploadSlidesForm(session, show_apply_to_all_checkbox,request.POST,request.FILES)
@@ -2872,18 +2872,18 @@ def upload_session_slides(request, session_id, num, name=None):
                           )
                 doc.states.add(State.objects.get(type_id='slides',slug='active'))
                 doc.states.add(State.objects.get(type_id='reuse_policy',slug='single'))
-            if session.sessionpresentation_set.filter(document=doc).exists():
-                sp = session.sessionpresentation_set.get(document=doc)
+            if session.presentations.filter(document=doc).exists():
+                sp = session.presentations.get(document=doc)
                 sp.rev = doc.rev
                 sp.save()
             else:
-                max_order = session.sessionpresentation_set.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0
-                session.sessionpresentation_set.create(document=doc,rev=doc.rev,order=max_order+1)
+                max_order = session.presentations.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0
+                session.presentations.create(document=doc,rev=doc.rev,order=max_order+1)
             if apply_to_all:
                 for other_session in sessions:
-                    if other_session != session and not other_session.sessionpresentation_set.filter(document=doc).exists():
-                        max_order = other_session.sessionpresentation_set.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0
-                        other_session.sessionpresentation_set.create(document=doc,rev=doc.rev,order=max_order+1)
+                    if other_session != session and not other_session.presentations.filter(document=doc).exists():
+                        max_order = other_session.presentations.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0
+                        other_session.presentations.create(document=doc,rev=doc.rev,order=max_order+1)
             filename = '%s-%s%s'% ( doc.name, doc.rev, ext)
             doc.uploaded_filename = filename
             e = NewRevisionDocEvent.objects.create(doc=doc,by=request.user.person,type='new_revision',desc='New revision available: %s'%doc.rev,rev=doc.rev)
@@ -2983,7 +2983,7 @@ def remove_sessionpresentation(request, session_id, num, name):
     if session.is_material_submission_cutoff() and not has_role(request.user, "Secretariat"):
         permission_denied(request, "The materials cutoff for this session has passed. Contact the secretariat for further action.")
     if request.method == 'POST':
-        session.sessionpresentation_set.filter(pk=sp.pk).delete()
+        session.presentations.filter(pk=sp.pk).delete()
         c = DocEvent(type="added_comment", doc=sp.document, rev=sp.document.rev, by=request.user.person)
         c.desc = "Removed from session: %s" % (session)
         c.save()
@@ -3008,7 +3008,7 @@ def ajax_add_slides_to_session(request, session_id, num):
         order = int(order_str)
     except (ValueError, TypeError):
         return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied order is not valid' }),content_type='application/json')
-    if order < 1 or order > session.sessionpresentation_set.filter(document__type_id='slides').count() + 1 :
+    if order < 1 or order > session.presentations.filter(document__type_id='slides').count() + 1 :
         return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied order is not valid' }),content_type='application/json')
 
     name = request.POST.get('name', None)
@@ -3016,10 +3016,10 @@ def ajax_add_slides_to_session(request, session_id, num):
     if not doc:
         return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied name is not valid' }),content_type='application/json')
 
-    if not session.sessionpresentation_set.filter(document=doc).exists():
+    if not session.presentations.filter(document=doc).exists():
         condition_slide_order(session)
-        session.sessionpresentation_set.filter(document__type_id='slides', order__gte=order).update(order=F('order')+1)
-        session.sessionpresentation_set.create(document=doc,rev=doc.rev,order=order)
+        session.presentations.filter(document__type_id='slides', order__gte=order).update(order=F('order')+1)
+        session.presentations.create(document=doc,rev=doc.rev,order=order)
         DocEvent.objects.create(type="added_comment", doc=doc, rev=doc.rev, by=request.user.person, desc="Added to session: %s" % session)
 
     return HttpResponse(json.dumps({'success':True}), content_type='application/json')
@@ -3041,7 +3041,7 @@ def ajax_remove_slides_from_session(request, session_id, num):
         oldIndex = int(oldIndex_str)
     except (ValueError, TypeError):
         return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied index is not valid' }),content_type='application/json')
-    if oldIndex < 1 or oldIndex > session.sessionpresentation_set.filter(document__type_id='slides').count() :
+    if oldIndex < 1 or oldIndex > session.presentations.filter(document__type_id='slides').count() :
         return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied index is not valid' }),content_type='application/json')
 
     name = request.POST.get('name', None)
@@ -3050,11 +3050,11 @@ def ajax_remove_slides_from_session(request, session_id, num):
         return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied name is not valid' }),content_type='application/json')
 
     condition_slide_order(session)
-    affected_presentations = session.sessionpresentation_set.filter(document=doc).first()
+    affected_presentations = session.presentations.filter(document=doc).first()
     if affected_presentations:
         if affected_presentations.order == oldIndex:
             affected_presentations.delete()
-            session.sessionpresentation_set.filter(document__type_id='slides', order__gt=oldIndex).update(order=F('order')-1)    
+            session.presentations.filter(document__type_id='slides', order__gt=oldIndex).update(order=F('order')-1)    
             DocEvent.objects.create(type="added_comment", doc=doc, rev=doc.rev, by=request.user.person, desc="Removed from session: %s" % session)
             return HttpResponse(json.dumps({'success':True}), content_type='application/json')
         else:
@@ -3074,7 +3074,7 @@ def ajax_reorder_slides_in_session(request, session_id, num):
     if request.method != 'POST' or not request.POST:
         return HttpResponse(json.dumps({ 'success' : False, 'error' : 'No data submitted or not POST' }),content_type='application/json')  
 
-    num_slides_in_session = session.sessionpresentation_set.filter(document__type_id='slides').count()
+    num_slides_in_session = session.presentations.filter(document__type_id='slides').count()
     oldIndex_str = request.POST.get('oldIndex', None)
     try:
         oldIndex = int(oldIndex_str)
@@ -3095,11 +3095,11 @@ def ajax_reorder_slides_in_session(request, session_id, num):
         return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied index is not valid' }),content_type='application/json')
 
     condition_slide_order(session)
-    sp = session.sessionpresentation_set.get(order=oldIndex)
+    sp = session.presentations.get(order=oldIndex)
     if oldIndex < newIndex:
-        session.sessionpresentation_set.filter(order__gt=oldIndex, order__lte=newIndex).update(order=F('order')-1)
+        session.presentations.filter(order__gt=oldIndex, order__lte=newIndex).update(order=F('order')-1)
     else:
-        session.sessionpresentation_set.filter(order__gte=newIndex, order__lt=oldIndex).update(order=F('order')+1)
+        session.presentations.filter(order__gte=newIndex, order__lt=oldIndex).update(order=F('order')+1)
     sp.order = newIndex
     sp.save()
 
@@ -3749,7 +3749,7 @@ def organize_proceedings_sessions(sessions):
             if s.current_status != 'canceled':
                 all_canceled = False
             by_name.setdefault(s.name, [])
-            if s.current_status != 'notmeet' or s.sessionpresentation_set.exists():
+            if s.current_status != 'notmeet' or s.presentations.exists():
                 by_name[s.name].append(s)  # for notmeet, only include sessions with materials
         for sess_name, ss in by_name.items():
             session = ss[0] if ss else None
@@ -3781,11 +3781,12 @@ def organize_proceedings_sessions(sessions):
                 'name': sess_name,
                 'session': session,
                 'canceled': all_canceled,
-                'has_materials': s.sessionpresentation_set.exists(),
+                'has_materials': s.presentations.exists(),
                 'agendas': _format_materials((s, s.agenda()) for s in ss),
                 'minutes': _format_materials((s, s.minutes()) for s in ss),
                 'bluesheets': _format_materials((s, s.bluesheets()) for s in ss),
                 'recordings': _format_materials((s, s.recordings()) for s in ss),
+                'chatlogs': _format_materials((s, s.chatlogs()) for s in ss),
                 'slides': _format_materials((s, s.slides()) for s in ss),
                 'drafts': _format_materials((s, s.drafts()) for s in ss),
                 'last_update': session.last_update if hasattr(session, 'last_update') else None
@@ -4148,7 +4149,7 @@ def api_upload_chatlog(request):
     session = Session.objects.filter(pk=session_id).first()
     if not session:
         return err(400, "Invalid session")
-    chatlog_sp = session.sessionpresentation_set.filter(document__type='chatlog').first()
+    chatlog_sp = session.presentations.filter(document__type='chatlog').first()
     if chatlog_sp:
         doc = chatlog_sp.document
         doc.rev = f"{(int(doc.rev)+1):02d}"
@@ -4188,7 +4189,7 @@ def api_upload_polls(request):
     session = Session.objects.filter(pk=session_id).first()
     if not session:
         return err(400, "Invalid session")
-    polls_sp = session.sessionpresentation_set.filter(document__type='polls').first()
+    polls_sp = session.presentations.filter(document__type='polls').first()
     if polls_sp:
         doc = polls_sp.document
         doc.rev = f"{(int(doc.rev)+1):02d}"
@@ -4605,18 +4606,18 @@ def approve_proposed_slides(request, slidesubmission_id, num):
                           )
                 doc.states.add(State.objects.get(type_id='slides',slug='active'))
                 doc.states.add(State.objects.get(type_id='reuse_policy',slug='single'))
-                if submission.session.sessionpresentation_set.filter(document=doc).exists():
-                    sp = submission.session.sessionpresentation_set.get(document=doc)
+                if submission.session.presentations.filter(document=doc).exists():
+                    sp = submission.session.presentations.get(document=doc)
                     sp.rev = doc.rev
                     sp.save()
                 else:
-                    max_order = submission.session.sessionpresentation_set.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0
-                    submission.session.sessionpresentation_set.create(document=doc,rev=doc.rev,order=max_order+1)
+                    max_order = submission.session.presentations.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0
+                    submission.session.presentations.create(document=doc,rev=doc.rev,order=max_order+1)
                 if apply_to_all:
                     for other_session in sessions:
-                        if other_session != submission.session and not other_session.sessionpresentation_set.filter(document=doc).exists():
-                            max_order = other_session.sessionpresentation_set.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0
-                            other_session.sessionpresentation_set.create(document=doc,rev=doc.rev,order=max_order+1)
+                        if other_session != submission.session and not other_session.presentations.filter(document=doc).exists():
+                            max_order = other_session.presentations.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0
+                            other_session.presentations.create(document=doc,rev=doc.rev,order=max_order+1)
                 sub_name, sub_ext = os.path.splitext(submission.filename)
                 target_filename = '%s-%s%s' % (sub_name[:sub_name.rfind('-ss')],doc.rev,sub_ext)
                 doc.uploaded_filename = target_filename
diff --git a/ietf/name/migrations/0013_narrativeminutes.py b/ietf/name/migrations/0013_narrativeminutes.py
new file mode 100644
index 000000000..89aa75a37
--- /dev/null
+++ b/ietf/name/migrations/0013_narrativeminutes.py
@@ -0,0 +1,35 @@
+# Copyright The IETF Trust 2023, All Rights Reserved
+
+from django.db import migrations, models
+
+
+def forward(apps, schema_editor):
+    DocTypeName = apps.get_model("name", "DocTypeName")
+    DocTypeName.objects.create(
+        slug="narrativeminutes",
+        name="Narrative Minutes",
+        desc="",
+        used=True,
+        order=0,
+        prefix="narrative-minutes",
+    )
+
+
+def reverse(apps, schema_editor):
+    DocTypeName = apps.get_model("name", "DocTypeName")
+    DocTypeName.objects.filter(slug="narrativeminutes").delete()
+
+
+class Migration(migrations.Migration):
+    dependencies = [
+        ("name", "0012_adjust_important_dates"),
+    ]
+
+    operations = [
+        migrations.AlterField(
+            model_name="doctypename",
+            name="prefix",
+            field=models.CharField(default="", max_length=32),
+        ),
+        migrations.RunPython(forward, reverse),
+    ]
diff --git a/ietf/name/models.py b/ietf/name/models.py
index d4d53def8..8c2520a48 100644
--- a/ietf/name/models.py
+++ b/ietf/name/models.py
@@ -43,7 +43,7 @@ class DocRelationshipName(NameModel):
 class DocTypeName(NameModel):
     """Draft, Agenda, Minutes, Charter, Discuss, Guideline, Email,
     Review, Issue, Wiki, RFC"""
-    prefix =  models.CharField(max_length=16, default="")
+    prefix =  models.CharField(max_length=32, default="")
 class DocTagName(NameModel):
     """Waiting for Reference, IANA Coordination, Revised ID Needed,
     External Party, AD Followup, Point Raised - Writeup Needed, ..."""
diff --git a/ietf/settings.py b/ietf/settings.py
index 17ed58d6f..15302fee6 100644
--- a/ietf/settings.py
+++ b/ietf/settings.py
@@ -677,7 +677,6 @@ STATUS_CHANGE_PATH = '/a/ietfdata/doc/status-change'
 AGENDA_PATH = '/a/www/www6s/proceedings/'
 MEETINGHOST_LOGO_PATH = AGENDA_PATH  # put these in the same place as other proceedings files
 IPR_DOCUMENT_PATH = '/a/www/ietf-ftp/ietf/IPR/'
-IESG_WG_EVALUATION_DIR = "/a/www/www6/iesg/evaluation"
 # Move drafts to this directory when they expire
 INTERNET_DRAFT_ARCHIVE_DIR = '/a/ietfdata/doc/draft/collection/draft-archive/'
 # The following directory contains linked copies of all drafts, but don't
@@ -856,6 +855,7 @@ MEETING_MATERIALS_SERVE_LOCALLY = True
 MEETING_DOC_LOCAL_HREFS = {
     "agenda": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}",
     "minutes": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}",
+    "narrativeminutes": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}",
     "slides": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}",
     "chatlog": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}",
     "polls": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}",
@@ -867,6 +867,7 @@ MEETING_DOC_LOCAL_HREFS = {
 MEETING_DOC_CDN_HREFS = {
     "agenda": "https://www.ietf.org/proceedings/{meeting.number}/agenda/{doc.name}-{doc.rev}",
     "minutes": "https://www.ietf.org/proceedings/{meeting.number}/minutes/{doc.name}-{doc.rev}",
+    "narrativeminutes": "https://www.ietf.org/proceedings/{meeting.number}/narrative-minutes/{doc.name}-{doc.rev}",
     "slides": "https://www.ietf.org/proceedings/{meeting.number}/slides/{doc.name}-{doc.rev}",
     "recording": "{doc.external_url}",
     "bluesheets": "https://www.ietf.org/proceedings/{meeting.number}/bluesheets/{doc.uploaded_filename}",
@@ -878,6 +879,7 @@ MEETING_DOC_HREFS = MEETING_DOC_LOCAL_HREFS if MEETING_MATERIALS_SERVE_LOCALLY e
 MEETING_DOC_OLD_HREFS = {
     "agenda": "/meeting/{meeting.number}/materials/{doc.name}",
     "minutes": "/meeting/{meeting.number}/materials/{doc.name}",
+    "narrativeminutes" : "/meeting/{meeting.number}/materials/{doc.name}",
     "slides": "/meeting/{meeting.number}/materials/{doc.name}",
     "recording": "{doc.external_url}",
     "bluesheets": "https://www.ietf.org/proceedings/{meeting.number}/bluesheets/{doc.uploaded_filename}",
@@ -887,6 +889,7 @@ MEETING_DOC_OLD_HREFS = {
 MEETING_DOC_GREFS = {
     "agenda": "/meeting/{meeting.number}/materials/{doc.name}",
     "minutes": "/meeting/{meeting.number}/materials/{doc.name}",
+    "narrativeminutes": "/meeting/{meeting.number}/materials/{doc.name}",
     "slides": "/meeting/{meeting.number}/materials/{doc.name}",
     "recording": "{doc.external_url}",
     "bluesheets": "https://www.ietf.org/proceedings/{meeting.number}/bluesheets/{doc.uploaded_filename}",
@@ -900,6 +903,7 @@ MEETING_MATERIALS_DEFAULT_SUBMISSION_CORRECTION_DAYS = 50
 MEETING_VALID_UPLOAD_EXTENSIONS = {
     'agenda':       ['.txt','.html','.htm', '.md', ],
     'minutes':      ['.txt','.html','.htm', '.md', '.pdf', ],
+    'narrativeminutes': ['.txt','.html','.htm', '.md', '.pdf', ],
     'slides':       ['.doc','.docx','.pdf','.ppt','.pptx','.txt', ], # Note the removal of .zip
     'bluesheets':   ['.pdf', '.txt', ],
     'procmaterials':['.pdf', ],
@@ -909,6 +913,7 @@ MEETING_VALID_UPLOAD_EXTENSIONS = {
 MEETING_VALID_UPLOAD_MIME_TYPES = {
     'agenda':       ['text/plain', 'text/html', 'text/markdown', 'text/x-markdown', ],
     'minutes':      ['text/plain', 'text/html', 'application/pdf', 'text/markdown', 'text/x-markdown', ],
+    'narrative-minutes': ['text/plain', 'text/html', 'application/pdf', 'text/markdown', 'text/x-markdown', ],
     'slides':       [],
     'bluesheets':   ['application/pdf', 'text/plain', ],
     'procmaterials':['application/pdf', ],
@@ -1174,6 +1179,7 @@ CELERY_TASK_IGNORE_RESULT = True  # ignore results unless specifically enabled f
 MEETECHO_ONSITE_TOOL_URL = "https://meetings.conf.meetecho.com/onsite{session.meeting.number}/?session={session.pk}"
 MEETECHO_VIDEO_STREAM_URL = "https://meetings.conf.meetecho.com/ietf{session.meeting.number}/?session={session.pk}"
 MEETECHO_AUDIO_STREAM_URL = "https://mp3.conf.meetecho.com/ietf{session.meeting.number}/{session.pk}.m3u"
+MEETECHO_SESSION_RECORDING_URL = "https://www.meetecho.com/ietf{session.meeting.number}/recordings#{session.group.acronym_upper}"
 
 # Put the production SECRET_KEY in settings_local.py, and also any other
 # sensitive or site-specific changes.  DO NOT commit settings_local.py to svn.
diff --git a/ietf/templates/doc/bofreq/bofreq_template.md b/ietf/templates/doc/bofreq/bofreq_template.md
index 7039e4679..78949ba7b 100644
--- a/ietf/templates/doc/bofreq/bofreq_template.md
+++ b/ietf/templates/doc/bofreq/bofreq_template.md
@@ -8,7 +8,6 @@ Fill in the details below. Keep items in the order they appear here.
 - Status: (not) WG Forming
 - Responsible AD: name
 - BOF proponents: name <email>, name <email> (1-3 people - who are requesting and coordinating discussion for proposal) 
-- BOF chairs: TBD
 - Number of people expected to attend: 100
 - Length of session (1 or 2 hours): 2 hours
 - Conflicts (whole Areas and/or WGs)
diff --git a/ietf/templates/doc/document_statement.html b/ietf/templates/doc/document_statement.html
index 79ea305cd..7b9759c3e 100644
--- a/ietf/templates/doc/document_statement.html
+++ b/ietf/templates/doc/document_statement.html
@@ -52,7 +52,7 @@
                 <td class="edit"></td>
                 <td id="statement-state">
                     {% if doc.get_state %}
-                        <span title="{{ doc.get_state.desc }}" class="{% if doc.get_state.name|slugify == 'active' %}text-success{% else %}text-danger{% endif %}">{{ doc.get_state.name }}</span>
+                        <span title="{{ doc.get_state.desc }}" class="badge rounded-pill {% if doc.get_state.name|slugify == 'active' %}text-bg-success{% else %}text-bg-warning{% endif %}">{{ doc.get_state.name }}</span>
                     {% else %}
                         No document state
                     {% endif %}
diff --git a/ietf/templates/group/meetings-row.html b/ietf/templates/group/meetings-row.html
index 65ba435ba..57c727eea 100644
--- a/ietf/templates/group/meetings-row.html
+++ b/ietf/templates/group/meetings-row.html
@@ -60,6 +60,12 @@
                {% if s.minutes %}href="{{ s.minutes.get_absolute_url }}"{% endif %}>
                 Minutes
             </a>
+            {% if group.acronym == "iesg" %}
+            <a class="btn btn-sm {% if not s.narrative_minutes %}btn-secondary disabled{% else %}btn-primary{% endif %}"
+               {% if s.narrative_minutes %}href="{{ s.narrative_minutes.get_absolute_url }}"{% endif %}>
+                Narrative Minutes
+            </a>
+            {% endif %}
             <a class="btn btn-primary btn-sm"
                 href="{% url 'ietf.meeting.views.session_details' num=s.meeting.number acronym=s.group.acronym %}">
                 {% if can_always_edit or can_edit_materials %}
diff --git a/ietf/templates/group/meetings.html b/ietf/templates/group/meetings.html
index 19f39d6d9..8acc688cc 100644
--- a/ietf/templates/group/meetings.html
+++ b/ietf/templates/group/meetings.html
@@ -85,7 +85,7 @@
         </table>
     {% endif %}
     {# The following is a temporary performance workaround, not long term design #}
-    {% if group.acronym != "iab" %}
+    {% if group.acronym != "iab" and group.acronym != "iesg" %}
         <p class="alert alert-info my-3">
             This page shows meetings within the last four years. For earlier meetings, please see the
             <a href="https://www.ietf.org/how/meetings/past/">proceedings</a>.
@@ -139,6 +139,12 @@
                                 {% if s.minutes %}href="{{ s.minutes.get_absolute_url }}"{% endif %}>
                                     Minutes
                                 </a>
+                                {% if group.acronym == "iesg" %}
+                                <a class="btn btn-sm {% if not s.narrative_minutes %}btn-secondary disabled{% else %}btn-primary{% endif %}"
+                                {% if s.narrative_minutes %}href="{{ s.narrative_minutes.get_absolute_url }}"{% endif %}>
+                                    Narrative Minutes
+                                </a>                                
+                                {% endif %}
                                 <a class="btn btn-primary btn-sm"
                                     href="{% url 'ietf.meeting.views.session_details' num=s.meeting.number acronym=s.group.acronym %}">
                                     {% if can_always_edit or can_edit_materials %}
diff --git a/ietf/templates/group/statements.html b/ietf/templates/group/statements.html
index 4e0fc6153..035c3bc96 100644
--- a/ietf/templates/group/statements.html
+++ b/ietf/templates/group/statements.html
@@ -29,7 +29,9 @@
             {% for statement in statements %}
             <tr>
                 <td title="{{ statement.published|date:'Y-m-d H:i:s O' }}">{{ statement.published|date:"Y-m-d" }}</td>
-                <td><a href="{% url 'ietf.doc.views_doc.document_main' name=statement.name %}">{{statement.title}}</a></td>
+                <td><a href="{% url 'ietf.doc.views_doc.document_main' name=statement.name %}">{{statement.title}}</a>
+                    {% if statement.status == "replaced" %}<span class="badge rounded-pill text-bg-warning">Replaced</span>{% endif %}
+                </td>
             </tr>
             {% endfor %}
         </tbody>
diff --git a/ietf/templates/meeting/group_proceedings.html b/ietf/templates/meeting/group_proceedings.html
index 618c28164..666685000 100644
--- a/ietf/templates/meeting/group_proceedings.html
+++ b/ietf/templates/meeting/group_proceedings.html
@@ -1,4 +1,4 @@
-{# Copyright The IETF Trust 2015, All Rights Reserved #}
+{# Copyright The IETF Trust 2015-2024, All Rights Reserved #}
 {% load origin %}
 {% origin %}
 {% load ietf_filters %}
@@ -54,6 +54,18 @@
                 </a>
                 <br>
             {% endfor %}
+            {% for chatlog in entry.chatlogs %}
+                <a href="{{ chatlog.material|meeting_href:meeting }}">
+                    Chatlog
+                    {% if chatlog.time %}{{chatlog.time|date:"D G:i"}}{% endif %}
+                </a>
+                <br>
+            {% empty %}
+                <a href="{{ entry.session.chat_archive_url }}">
+                    Chatlog
+                </a>
+                <br>
+            {% endfor %}
         </td>
         {# recordings #}
         <td>
@@ -64,6 +76,12 @@
               </a>
               <br>
           {% endfor %}
+            {% if entry.session.video_stream_url %}
+                <a href="{{ entry.session.session_recording_url }}">
+                    Session recording
+                </a>
+                <br>
+            {% endif %}
         </td>
         {# slides #}
         <td>
diff --git a/ietf/templates/meeting/interim_meeting_cancellation_notice.txt b/ietf/templates/meeting/interim_meeting_cancellation_notice.txt
index d4774c168..83f29bf1e 100644
--- a/ietf/templates/meeting/interim_meeting_cancellation_notice.txt
+++ b/ietf/templates/meeting/interim_meeting_cancellation_notice.txt
@@ -3,6 +3,6 @@ The {{ group.name }} ({{ group.acronym }}) {% if not meeting.city %}virtual {% e
 interim meeting for {{ meeting.date|date:"Y-m-d" }} from {{ start_time|time:"H:i" }} to {{ end_time|time:"H:i" }} {{ meeting.time_zone }}
 has been cancelled.
 
-{{ meeting.session_set.0.agenda_note }}
+{{ meeting.session_set.first.agenda_note }}
 {% endtimezone %}
 
diff --git a/ietf/templates/meeting/interim_session_buttons.html b/ietf/templates/meeting/interim_session_buttons.html
index 2334ec9e8..2f0951338 100644
--- a/ietf/templates/meeting/interim_session_buttons.html
+++ b/ietf/templates/meeting/interim_session_buttons.html
@@ -148,7 +148,7 @@
                     {% endfor %}
                 {% elif session.video_stream_url %}
                     <a class="btn btn-outline-primary"
-                       href="http://www.meetecho.com/ietf{{ meeting.number }}/recordings#{{ acronym.upper }}"
+                       href="{{ session.session_recording_url }}"
                        aria-label="Meetecho session recording"
                        title="Meetecho session recording">
                         <i class="bi bi-file-slides"></i>
diff --git a/ietf/templates/meeting/session_buttons_include.html b/ietf/templates/meeting/session_buttons_include.html
index b69413238..3dc9c45dc 100644
--- a/ietf/templates/meeting/session_buttons_include.html
+++ b/ietf/templates/meeting/session_buttons_include.html
@@ -181,7 +181,7 @@
                         {% if session.video_stream_url %}
                             <a class="btn btn-outline-primary"
                                 role="button"
-                                href="https://www.meetecho.com/ietf{{ meeting.number }}/recordings#{{ acronym.upper }}"
+                                href="{{ session.session_recording_url }}"
                                 aria-label="Session recording"
                                 title="Session recording">
                                 <i class="bi bi-file-slides"></i>
@@ -351,7 +351,7 @@
                             {% if session.video_stream_url %}
                                 <li>
                                     <a class="dropdown-item"
-                                        href="https://www.meetecho.com/ietf{{ meeting.number }}/recordings#{{ acronym.upper }}">
+                                        href="{{ session.session_recording_url }}">
                                         <i class="bi bi-file-slides"></i> Session recording
                                     </a>
                                 </li>
diff --git a/ietf/templates/meeting/session_details_panel.html b/ietf/templates/meeting/session_details_panel.html
index 3ff09fc33..0e3005018 100644
--- a/ietf/templates/meeting/session_details_panel.html
+++ b/ietf/templates/meeting/session_details_panel.html
@@ -344,7 +344,7 @@
                 {% if session.video_stream_url %}
                     <tr>
                         <td>
-                            <a href="https://www.meetecho.com/ietf{{ meeting.number }}/recordings#{{ group.acronym.upper }}">
+                            <a href="{{ session.session_recording_url }}">
                                 <i class="bi bi-file-slides"></i> Session recording
                             </a>
                         </td>
diff --git a/ietf/templates/meeting/timeslot_edit.html b/ietf/templates/meeting/timeslot_edit.html
index 46b4eb946..11691ba6d 100644
--- a/ietf/templates/meeting/timeslot_edit.html
+++ b/ietf/templates/meeting/timeslot_edit.html
@@ -121,6 +121,7 @@ a.new-timeslot-link { color: lightgray; font-size: large;}
                             <tr>
                                 <th scope="row">
                                     <span class="room-heading">{{ room.name }}
+                                        {% if room.functional_name and room.name != room.functional_name %} - {{ room.functional_name }}{% endif %}
                                         {% if room.capacity %}<span class="capacity">({{ room.capacity }})</span>{% endif %}
                                     </span>
                                 </th>
diff --git a/ietf/utils/decorators.py b/ietf/utils/decorators.py
index 254854a6c..7c8e3fbc5 100644
--- a/ietf/utils/decorators.py
+++ b/ietf/utils/decorators.py
@@ -114,3 +114,30 @@ def memoize(func):
     # we cannot set up the cache here.
     return decorate(func, _memoize)
 
+
+def ignore_view_kwargs(*args):
+    """Ignore the specified kwargs if they are present
+
+    Usage: 
+      @ignore_view_kwargs("ignore_arg1", "ignore_arg2")
+      def my_view(request, good_arg):
+        ...
+
+      This will allow my_view() to be used in url() paths that have zero, one, or both of
+      ignore_arg1 and ignore_arg2 captured. These will be ignored, while good_arg will still
+      be captured as usual.
+    """
+    kwargs_to_ignore = args
+
+    def decorate(view):
+        @wraps(view)
+        def wrapped(*args, **kwargs):
+            for kwarg in kwargs_to_ignore:
+                kwargs.pop(kwarg, None)
+            return view(*args, **kwargs)
+
+        return wrapped
+
+    return decorate
+
+
diff --git a/ietf/utils/markdown.py b/ietf/utils/markdown.py
index 63d1c7a70..446d34895 100644
--- a/ietf/utils/markdown.py
+++ b/ietf/utils/markdown.py
@@ -12,7 +12,7 @@ from markdown.postprocessors import Postprocessor
 from django.utils.safestring import mark_safe
 
 from ietf.doc.templatetags.ietf_filters import urlize_ietf_docs
-from ietf.utils.text import bleach_cleaner, bleach_linker
+from ietf.utils.text import bleach_cleaner, liberal_bleach_cleaner, bleach_linker
 
 
 class LinkifyExtension(Extension):
@@ -49,3 +49,19 @@ def markdown(text):
             )
         )
     )
+
+def liberal_markdown(text):
+    return mark_safe(
+        liberal_bleach_cleaner.clean(
+            python_markdown.markdown(
+                text,
+                extensions=[
+                    "extra",
+                    "nl2br",
+                    "sane_lists",
+                    "toc",
+                    LinkifyExtension(),
+                ],
+            )
+        )
+    )
diff --git a/ietf/utils/tests.py b/ietf/utils/tests.py
index 499b87488..4ac2732ed 100644
--- a/ietf/utils/tests.py
+++ b/ietf/utils/tests.py
@@ -5,6 +5,7 @@
 import datetime
 import io
 import json
+import lxml.etree
 import os.path
 import pytz
 import shutil
@@ -450,6 +451,41 @@ class XMLDraftTests(TestCase):
                 datetime.date(today.year, 1 if today.month != 1 else 2, 15),
             )
 
+    def test_parse_docname(self):
+        with self.assertRaises(ValueError) as cm:
+            XMLDraft.parse_docname(lxml.etree.Element("xml"))  # no docName
+        self.assertIn("Missing docName attribute", str(cm.exception))
+
+        # There to be more invalid docNames, but we use XMLDraft in places where we don't
+        # actually care about the validation, so for now just test what has long been the
+        # implementation. 
+        with self.assertRaises(ValueError) as cm:
+            XMLDraft.parse_docname(lxml.etree.Element("xml", docName=""))  # not a valid docName
+        self.assertIn("Unable to parse docName", str(cm.exception))
+
+        self.assertEqual(
+            XMLDraft.parse_docname(lxml.etree.Element("xml", docName="draft-foo-bar-baz-01")),
+            ("draft-foo-bar-baz", "01"),
+        )
+
+        self.assertEqual(
+            XMLDraft.parse_docname(lxml.etree.Element("xml", docName="draft-foo-bar-baz")),
+            ("draft-foo-bar-baz", None),
+        )
+
+        self.assertEqual(
+            XMLDraft.parse_docname(lxml.etree.Element("xml", docName="draft-foo-bar-baz-")),
+            ("draft-foo-bar-baz-", None),
+        )
+
+        # This is awful, but is how we've been running for some time. The missing rev will trigger
+        # validation errors for submissions, so we're at least somewhat guarded against this
+        # property.
+        self.assertEqual(
+            XMLDraft.parse_docname(lxml.etree.Element("xml", docName="-01")),
+            ("-01", None),
+        )
+
 
 class NameTests(TestCase):
 
diff --git a/ietf/utils/text.py b/ietf/utils/text.py
index 48f5538cb..2fba113d0 100644
--- a/ietf/utils/text.py
+++ b/ietf/utils/text.py
@@ -46,6 +46,15 @@ bleach_cleaner = bleach.sanitizer.Cleaner(
     tags=tags, attributes=attributes, protocols=protocols, strip=True
 )
 
+liberal_tags = copy.copy(tags)
+liberal_attributes = copy.copy(attributes)
+liberal_tags.update(["img","figure","figcaption"])
+liberal_attributes["img"] = ["src","alt"]
+
+liberal_bleach_cleaner = bleach.sanitizer.Cleaner(
+    tags=liberal_tags, attributes=liberal_attributes, protocols=protocols, strip=True
+)
+
 validate_url = URLValidator()
 
 
diff --git a/ietf/utils/xmldraft.py b/ietf/utils/xmldraft.py
index 5a0abb613..3a9ac02b1 100644
--- a/ietf/utils/xmldraft.py
+++ b/ietf/utils/xmldraft.py
@@ -29,7 +29,7 @@ class XMLDraft(Draft):
         # cast xml_file to str so, e.g., this will work with a Path
         self.xmltree, self.xml_version = self.parse_xml(str(xml_file))
         self.xmlroot = self.xmltree.getroot()
-        self.filename, self.revision = self._parse_docname()
+        self.filename, self.revision = self.parse_docname(self.xmlroot)
 
     @staticmethod
     def parse_xml(filename):
@@ -125,8 +125,11 @@ class XMLDraft(Draft):
             section_name = section_elt.get('title')  # fall back to title if we have it
         return section_name
 
-    def _parse_docname(self):
-        docname = self.xmlroot.attrib.get('docName')
+    @staticmethod
+    def parse_docname(xmlroot):
+        docname = xmlroot.attrib.get('docName')
+        if docname is None:
+            raise ValueError("Missing docName attribute in the XML root element")
         revmatch = re.match(
             r'^(?P<filename>.+?)(?:-(?P<rev>[0-9][0-9]))?$',
             docname,
diff --git a/package.json b/package.json
index 00d4ef97c..dc08219de 100644
--- a/package.json
+++ b/package.json
@@ -21,7 +21,7 @@
     "bootstrap": "5.3.2",
     "bootstrap-icons": "1.11.3",
     "browser-fs-access": "0.35.0",
-    "caniuse-lite": "1.0.30001581",
+    "caniuse-lite": "1.0.30001588",
     "d3": "7.8.5",
     "file-saver": "2.0.5",
     "highcharts": "11.3.0",
@@ -33,7 +33,7 @@
     "lodash-es": "4.17.21",
     "luxon": "3.4.4",
     "moment": "2.30.1",
-    "moment-timezone": "0.5.44",
+    "moment-timezone": "0.5.45",
     "ms": "2.1.3",
     "murmurhash-js": "1.0.0",
     "naive-ui": "2.37.3",
@@ -46,7 +46,7 @@
     "slugify": "1.6.6",
     "sortablejs": "1.15.2",
     "vanillajs-datepicker": "1.3.4",
-    "vue": "3.4.15",
+    "vue": "3.4.19",
     "vue-router": "4.2.5",
     "zxcvbn": "4.4.2"
   },
@@ -65,12 +65,12 @@
     "eslint-plugin-n": "16.6.2",
     "eslint-plugin-node": "11.1.0",
     "eslint-plugin-promise": "6.1.1",
-    "eslint-plugin-vue": "9.20.1",
+    "eslint-plugin-vue": "9.21.1",
     "html-validate": "8.9.1",
     "jquery-migrate": "3.4.1",
     "parcel": "2.11.0",
     "pug": "3.0.2",
-    "sass": "1.70.0",
+    "sass": "1.71.0",
     "seedrandom": "3.0.5",
     "vite": "4.5.2"
   },
diff --git a/playwright/package-lock.json b/playwright/package-lock.json
index baa6f31c5..1168717d2 100644
--- a/playwright/package-lock.json
+++ b/playwright/package-lock.json
@@ -6,7 +6,7 @@
   "packages": {
     "": {
       "dependencies": {
-        "@faker-js/faker": "8.4.0",
+        "@faker-js/faker": "8.4.1",
         "lodash": "4.17.21",
         "lodash-es": "4.17.21",
         "luxon": "3.4.4",
@@ -22,7 +22,7 @@
         "eslint-plugin-n": "16.6.2",
         "eslint-plugin-node": "11.1.0",
         "eslint-plugin-promise": "6.1.1",
-        "npm-check-updates": "16.14.14"
+        "npm-check-updates": "16.14.15"
       }
     },
     "node_modules/@aashutoshrathi/word-wrap": {
@@ -101,9 +101,9 @@
       }
     },
     "node_modules/@faker-js/faker": {
-      "version": "8.4.0",
-      "resolved": "https://registry.npmjs.org/@faker-js/faker/-/faker-8.4.0.tgz",
-      "integrity": "sha512-htW87352wzUCdX1jyUQocUcmAaFqcR/w082EC8iP/gtkF0K+aKcBp0hR5Arb7dzR8tQ1TrhE9DNa5EbJELm84w==",
+      "version": "8.4.1",
+      "resolved": "https://registry.npmjs.org/@faker-js/faker/-/faker-8.4.1.tgz",
+      "integrity": "sha512-XQ3cU+Q8Uqmrbf2e0cIC/QN43sTBSC8KF12u29Mb47tWrt2hAgBXSgpZMj4Ao8Uk0iJcU99QsOCaIL8934obCg==",
       "funding": [
         {
           "type": "opencollective",
@@ -3804,9 +3804,9 @@
       }
     },
     "node_modules/npm-check-updates": {
-      "version": "16.14.14",
-      "resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.14.14.tgz",
-      "integrity": "sha512-Y3ajS/Ep40jM489rLBdz9jehn/BMil5s9fA4PSr2ZJxxSmtLWCSmRqsI2IEZ9Nb3MTMu8a3s7kBs0l+JbjdkTA==",
+      "version": "16.14.15",
+      "resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.14.15.tgz",
+      "integrity": "sha512-WH0wJ9j6CP7Azl+LLCxWAYqroT2IX02kRIzgK/fg0rPpMbETgHITWBdOPtrv521xmA3JMgeNsQ62zvVtS/nCmQ==",
       "dev": true,
       "dependencies": {
         "chalk": "^5.3.0",
@@ -6015,9 +6015,9 @@
       "dev": true
     },
     "@faker-js/faker": {
-      "version": "8.4.0",
-      "resolved": "https://registry.npmjs.org/@faker-js/faker/-/faker-8.4.0.tgz",
-      "integrity": "sha512-htW87352wzUCdX1jyUQocUcmAaFqcR/w082EC8iP/gtkF0K+aKcBp0hR5Arb7dzR8tQ1TrhE9DNa5EbJELm84w=="
+      "version": "8.4.1",
+      "resolved": "https://registry.npmjs.org/@faker-js/faker/-/faker-8.4.1.tgz",
+      "integrity": "sha512-XQ3cU+Q8Uqmrbf2e0cIC/QN43sTBSC8KF12u29Mb47tWrt2hAgBXSgpZMj4Ao8Uk0iJcU99QsOCaIL8934obCg=="
     },
     "@humanwhocodes/config-array": {
       "version": "0.11.13",
@@ -8721,9 +8721,9 @@
       }
     },
     "npm-check-updates": {
-      "version": "16.14.14",
-      "resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.14.14.tgz",
-      "integrity": "sha512-Y3ajS/Ep40jM489rLBdz9jehn/BMil5s9fA4PSr2ZJxxSmtLWCSmRqsI2IEZ9Nb3MTMu8a3s7kBs0l+JbjdkTA==",
+      "version": "16.14.15",
+      "resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-16.14.15.tgz",
+      "integrity": "sha512-WH0wJ9j6CP7Azl+LLCxWAYqroT2IX02kRIzgK/fg0rPpMbETgHITWBdOPtrv521xmA3JMgeNsQ62zvVtS/nCmQ==",
       "dev": true,
       "requires": {
         "chalk": "^5.3.0",
diff --git a/playwright/package.json b/playwright/package.json
index ecd55ca3c..0ec3357ea 100644
--- a/playwright/package.json
+++ b/playwright/package.json
@@ -14,10 +14,10 @@
     "eslint-plugin-n": "16.6.2",
     "eslint-plugin-node": "11.1.0",
     "eslint-plugin-promise": "6.1.1",
-    "npm-check-updates": "16.14.14"
+    "npm-check-updates": "16.14.15"
   },
   "dependencies": {
-    "@faker-js/faker": "8.4.0",
+    "@faker-js/faker": "8.4.1",
     "lodash": "4.17.21",
     "lodash-es": "4.17.21",
     "luxon": "3.4.4",
diff --git a/yarn.lock b/yarn.lock
index 83ececbb9..d0a4b971c 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -39,12 +39,12 @@ __metadata:
   languageName: node
   linkType: hard
 
-"@babel/parser@npm:^7.23.6":
-  version: 7.23.6
-  resolution: "@babel/parser@npm:7.23.6"
+"@babel/parser@npm:^7.23.9":
+  version: 7.23.9
+  resolution: "@babel/parser@npm:7.23.9"
   bin:
     parser: ./bin/babel-parser.js
-  checksum: 140801c43731a6c41fd193f5c02bc71fd647a0360ca616b23d2db8be4b9739b9f951a03fc7c2db4f9b9214f4b27c1074db0f18bc3fa653783082d5af7c8860d5
+  checksum: e7cd4960ac8671774e13803349da88d512f9292d7baa952173260d3e8f15620a28a3701f14f709d769209022f9e7b79965256b8be204fc550cfe783cdcabe7c7
   languageName: node
   linkType: hard
 
@@ -2018,53 +2018,53 @@ __metadata:
   languageName: node
   linkType: hard
 
-"@vue/compiler-core@npm:3.4.15":
-  version: 3.4.15
-  resolution: "@vue/compiler-core@npm:3.4.15"
+"@vue/compiler-core@npm:3.4.19":
+  version: 3.4.19
+  resolution: "@vue/compiler-core@npm:3.4.19"
   dependencies:
-    "@babel/parser": ^7.23.6
-    "@vue/shared": 3.4.15
+    "@babel/parser": ^7.23.9
+    "@vue/shared": 3.4.19
     entities: ^4.5.0
     estree-walker: ^2.0.2
     source-map-js: ^1.0.2
-  checksum: 1610f715b8ab6de95aa9f904d484ed275cf39e947d3fbb92a8ff7d7178360b71cfeae2710ef819dbeb738e1f94bf191298449719a2ecc860389338bcdef220f5
+  checksum: 92fbcc52c0e0b44c88a5af84c9beb3aab80c85f9fc81bdb00ea64b6c0e524843670f576d6734c7fe385c116f71ae189bc6e9dc0674fd4898c3163b32c00aaebc
   languageName: node
   linkType: hard
 
-"@vue/compiler-dom@npm:3.4.15":
-  version: 3.4.15
-  resolution: "@vue/compiler-dom@npm:3.4.15"
+"@vue/compiler-dom@npm:3.4.19":
+  version: 3.4.19
+  resolution: "@vue/compiler-dom@npm:3.4.19"
   dependencies:
-    "@vue/compiler-core": 3.4.15
-    "@vue/shared": 3.4.15
-  checksum: 373968c2c603f4eb9ebbf5f31ca2dc89991c4c1b0cee0213e613ad8b4ee632a33174e92bd91e0f8ff65f55188b46b742b91269a098c1e421d8f8bc919d5adc25
+    "@vue/compiler-core": 3.4.19
+    "@vue/shared": 3.4.19
+  checksum: b74c620c40b1bb9c06726fc61320291155bca44cf06ee55a7f030df90cd009af603ffeeacabebcca83a006d2f589997c2f32801f885a899ddb75818fc060d05c
   languageName: node
   linkType: hard
 
-"@vue/compiler-sfc@npm:3.4.15":
-  version: 3.4.15
-  resolution: "@vue/compiler-sfc@npm:3.4.15"
+"@vue/compiler-sfc@npm:3.4.19":
+  version: 3.4.19
+  resolution: "@vue/compiler-sfc@npm:3.4.19"
   dependencies:
-    "@babel/parser": ^7.23.6
-    "@vue/compiler-core": 3.4.15
-    "@vue/compiler-dom": 3.4.15
-    "@vue/compiler-ssr": 3.4.15
-    "@vue/shared": 3.4.15
+    "@babel/parser": ^7.23.9
+    "@vue/compiler-core": 3.4.19
+    "@vue/compiler-dom": 3.4.19
+    "@vue/compiler-ssr": 3.4.19
+    "@vue/shared": 3.4.19
     estree-walker: ^2.0.2
-    magic-string: ^0.30.5
+    magic-string: ^0.30.6
     postcss: ^8.4.33
     source-map-js: ^1.0.2
-  checksum: 4a707346c32b6deaec47c4bb1fddaaa6ec881e286db59de8922960f52a617ff7bebfcbe19e80c98a0fd91d0f575d962787f77c16ac10a7eaac7d938c48bfb4c7
+  checksum: d622207fdb2030320d3612226da077914018cdf9deb06db0368bbb5dd4ee796aa5f83717287cd5834157d67596142957e7d955d16b5345eafa3e13cb48d3a79a
   languageName: node
   linkType: hard
 
-"@vue/compiler-ssr@npm:3.4.15":
-  version: 3.4.15
-  resolution: "@vue/compiler-ssr@npm:3.4.15"
+"@vue/compiler-ssr@npm:3.4.19":
+  version: 3.4.19
+  resolution: "@vue/compiler-ssr@npm:3.4.19"
   dependencies:
-    "@vue/compiler-dom": 3.4.15
-    "@vue/shared": 3.4.15
-  checksum: 45a12ae2dd2e645db53d43b3c27df1d8fbf0584199d6e5581c96b4566d889376f5da411f8e453e113e3dcae0f2cc80b6f6fb36110f3f42f5cc260e48a99dd37f
+    "@vue/compiler-dom": 3.4.19
+    "@vue/shared": 3.4.19
+  checksum: b4599560fdad327f30b0a8fc72427bf2c17c44620924e948a3e87c3c35f2e98c080152e0540350b27b4dec832b74752bc94e1334ca8d114c741a4ae1ae67f6f7
   languageName: node
   linkType: hard
 
@@ -2075,52 +2075,52 @@ __metadata:
   languageName: node
   linkType: hard
 
-"@vue/reactivity@npm:3.4.15":
-  version: 3.4.15
-  resolution: "@vue/reactivity@npm:3.4.15"
+"@vue/reactivity@npm:3.4.19":
+  version: 3.4.19
+  resolution: "@vue/reactivity@npm:3.4.19"
   dependencies:
-    "@vue/shared": 3.4.15
-  checksum: e1f8ef7ec3e933b5dd5e3aa3e281c38d1fd2834772016ea5193058d80342704afbed0e7728cf31eb5762c2705785eec98b3d154ae22005691bee5b35125a4d7c
+    "@vue/shared": 3.4.19
+  checksum: 67f6264792fc655734c54c7a0bc5209f4900a3bec1ab5e56367d73f1fd7e29c9a2c7483919c5750471c9730e76a718ccade793a997b9f58e8c1560bf89a750b6
   languageName: node
   linkType: hard
 
-"@vue/runtime-core@npm:3.4.15":
-  version: 3.4.15
-  resolution: "@vue/runtime-core@npm:3.4.15"
+"@vue/runtime-core@npm:3.4.19":
+  version: 3.4.19
+  resolution: "@vue/runtime-core@npm:3.4.19"
   dependencies:
-    "@vue/reactivity": 3.4.15
-    "@vue/shared": 3.4.15
-  checksum: 6ab6721410ce5379d3a0de8632527be5cae26adda33854bd32117cf395713d41980f47b3774ba4dfbe7242377397d61a5728aa14b6a0fbd9e8f77049ef1ca4a4
+    "@vue/reactivity": 3.4.19
+    "@vue/shared": 3.4.19
+  checksum: 7303ec2585c8ba906baaad2660243f160650624e8cb90765d693fc577271403cfeffcfd01edfb09204d907a5ba9810e49a2e34f6311488bd55f5d6c38d9232c0
   languageName: node
   linkType: hard
 
-"@vue/runtime-dom@npm:3.4.15":
-  version: 3.4.15
-  resolution: "@vue/runtime-dom@npm:3.4.15"
+"@vue/runtime-dom@npm:3.4.19":
+  version: 3.4.19
+  resolution: "@vue/runtime-dom@npm:3.4.19"
   dependencies:
-    "@vue/runtime-core": 3.4.15
-    "@vue/shared": 3.4.15
+    "@vue/runtime-core": 3.4.19
+    "@vue/shared": 3.4.19
     csstype: ^3.1.3
-  checksum: 4f2e79d95688dc110629d4879ce6cc9bdaf284a29636c28ea9bc5cb420649eaac7d1a545e11d54516311b0cfdc507a2979aaaf89e9eddd386d41ee36d29db60e
+  checksum: 0afdb7b3837b4e5cc3f5ec63578e387b4b07ba569d5a8a4545874ddd518b66d6554258da1aa6b4cc4a60a189ff5e7863d868cf5a5fc81d663e70d62033048ab3
   languageName: node
   linkType: hard
 
-"@vue/server-renderer@npm:3.4.15":
-  version: 3.4.15
-  resolution: "@vue/server-renderer@npm:3.4.15"
+"@vue/server-renderer@npm:3.4.19":
+  version: 3.4.19
+  resolution: "@vue/server-renderer@npm:3.4.19"
   dependencies:
-    "@vue/compiler-ssr": 3.4.15
-    "@vue/shared": 3.4.15
+    "@vue/compiler-ssr": 3.4.19
+    "@vue/shared": 3.4.19
   peerDependencies:
-    vue: 3.4.15
-  checksum: de93ccffe7008a12974d6f82024238f7b7b25817aae6846dabdcfb8534a6ce01528f7b13447b2561394112e4b6fd1bd125c3391c0ac9d849c6de167bf44f4e55
+    vue: 3.4.19
+  checksum: ae270a72697872f9a43b5c25586927e6e05188bf9ef2fff8a16dae479c0d0048156ddb5728224c399e3d13305e0d8898deab7b45cccd82dec06195bae7fd783d
   languageName: node
   linkType: hard
 
-"@vue/shared@npm:3.4.15":
-  version: 3.4.15
-  resolution: "@vue/shared@npm:3.4.15"
-  checksum: 237db3a880692c69358c46679562cee85d8495090a3c8ed44a4d4daa7c4a61d74e330b9bd1f3cec7362a2ae443f46186be8a86b44bff7604d5bd72ad994b8021
+"@vue/shared@npm:3.4.19":
+  version: 3.4.19
+  resolution: "@vue/shared@npm:3.4.19"
+  checksum: 676c2ec007efc5963a37811e1991f7a114ea603d52721feb59e6c1ac119127d1bdf80c57b09b32a53bb803922edc50e3753d847e800e16018a80fc5f9b84fcf5
   languageName: node
   linkType: hard
 
@@ -2629,10 +2629,10 @@ browserlist@latest:
   languageName: node
   linkType: hard
 
-"caniuse-lite@npm:1.0.30001581":
-  version: 1.0.30001581
-  resolution: "caniuse-lite@npm:1.0.30001581"
-  checksum: ca4e2cd9d0acf5e3c71fa2e7cd65561e4532d32b640145f634c333792074bb63de1239b35abfb6b6d372f97caf26f8d97faac7ba51ef190717ad2d3ae9c0d7a2
+"caniuse-lite@npm:1.0.30001588":
+  version: 1.0.30001588
+  resolution: "caniuse-lite@npm:1.0.30001588"
+  checksum: 2ab5fcec8fd3ee5d817a44bf1fb69804a6924d190e476863fb519692cd3e85a3a775bf4a2b6ba793f8db592ca61255b7f77f3d773ff7d42b452216f180bcdd2f
   languageName: node
   linkType: hard
 
@@ -3939,20 +3939,20 @@ browserlist@latest:
   languageName: node
   linkType: hard
 
-"eslint-plugin-vue@npm:9.20.1":
-  version: 9.20.1
-  resolution: "eslint-plugin-vue@npm:9.20.1"
+"eslint-plugin-vue@npm:9.21.1":
+  version: 9.21.1
+  resolution: "eslint-plugin-vue@npm:9.21.1"
   dependencies:
     "@eslint-community/eslint-utils": ^4.4.0
     natural-compare: ^1.4.0
     nth-check: ^2.1.1
     postcss-selector-parser: ^6.0.13
     semver: ^7.5.4
-    vue-eslint-parser: ^9.4.0
+    vue-eslint-parser: ^9.4.2
     xml-name-validator: ^4.0.0
   peerDependencies:
     eslint: ^6.2.0 || ^7.0.0 || ^8.0.0
-  checksum: fe50f4b842469297c6bd272a489f4c7aff72926d3e91b702bb72d2c8698932910e398a5ff74b2419c04fa32a179f6ce78140010bdf977d5c9f02a568a5675c74
+  checksum: 3fe29a7062b7d88ad6232f1fc81ee45761aafea7454f6abdf38b41ed8ace01c068b1755b57af8e5530bc2f4db907959b53838604aced776bd13fbb0e575a17b0
   languageName: node
   linkType: hard
 
@@ -5633,12 +5633,12 @@ browserlist@latest:
   languageName: node
   linkType: hard
 
-"magic-string@npm:^0.30.5":
-  version: 0.30.5
-  resolution: "magic-string@npm:0.30.5"
+"magic-string@npm:^0.30.6":
+  version: 0.30.7
+  resolution: "magic-string@npm:0.30.7"
   dependencies:
     "@jridgewell/sourcemap-codec": ^1.4.15
-  checksum: da10fecff0c0a7d3faf756913ce62bd6d5e7b0402be48c3b27bfd651b90e29677e279069a63b764bcdc1b8ecdcdb898f29a5c5ec510f2323e8d62ee057a6eb18
+  checksum: bdf102e36a44d1728ec61b69d655caba3f66ca58898e292f6debe57dc30896bd37908bfe3464a7464a435831a9e44aa905cebd681e21c2f44bbe4dddf225619f
   languageName: node
   linkType: hard
 
@@ -5829,12 +5829,12 @@ browserlist@latest:
   languageName: node
   linkType: hard
 
-"moment-timezone@npm:0.5.44":
-  version: 0.5.44
-  resolution: "moment-timezone@npm:0.5.44"
+"moment-timezone@npm:0.5.45":
+  version: 0.5.45
+  resolution: "moment-timezone@npm:0.5.45"
   dependencies:
     moment: ^2.29.4
-  checksum: 2f1de58f145bb87896ca06afaebaea0904f24542a900208d6a56a54f2fb50b38d9d2b61c46039ed36d0ec575140ff9ba6a5824877551763dbf3db7bda0333781
+  checksum: a22e9f983fbe1a01757ce30685bce92e3f6efa692eb682afd47b82da3ff960b3c8c2c3883ec6715c124bc985a342b57cba1f6ba25a1c8b4c7ad766db3cd5e1d0
   languageName: node
   linkType: hard
 
@@ -6977,7 +6977,7 @@ browserlist@latest:
     browser-fs-access: 0.35.0
     browserlist: latest
     c8: 9.1.0
-    caniuse-lite: 1.0.30001581
+    caniuse-lite: 1.0.30001588
     d3: 7.8.5
     eslint: 8.56.0
     eslint-config-standard: 17.1.0
@@ -6986,7 +6986,7 @@ browserlist@latest:
     eslint-plugin-n: 16.6.2
     eslint-plugin-node: 11.1.0
     eslint-plugin-promise: 6.1.1
-    eslint-plugin-vue: 9.20.1
+    eslint-plugin-vue: 9.21.1
     file-saver: 2.0.5
     highcharts: 11.3.0
     html-validate: 8.9.1
@@ -6999,7 +6999,7 @@ browserlist@latest:
     lodash-es: 4.17.21
     luxon: 3.4.4
     moment: 2.30.1
-    moment-timezone: 0.5.44
+    moment-timezone: 0.5.45
     ms: 2.1.3
     murmurhash-js: 1.0.0
     naive-ui: 2.37.3
@@ -7007,7 +7007,7 @@ browserlist@latest:
     pinia: 2.1.7
     pinia-plugin-persist: 1.0.0
     pug: 3.0.2
-    sass: 1.70.0
+    sass: 1.71.0
     seedrandom: 3.0.5
     select2: 4.1.0-rc.0
     select2-bootstrap-5-theme: 1.3.0
@@ -7017,7 +7017,7 @@ browserlist@latest:
     sortablejs: 1.15.2
     vanillajs-datepicker: 1.3.4
     vite: 4.5.2
-    vue: 3.4.15
+    vue: 3.4.19
     vue-router: 4.2.5
     zxcvbn: 4.4.2
   languageName: unknown
@@ -7083,16 +7083,16 @@ browserlist@latest:
   languageName: node
   linkType: hard
 
-"sass@npm:1.70.0":
-  version: 1.70.0
-  resolution: "sass@npm:1.70.0"
+"sass@npm:1.71.0":
+  version: 1.71.0
+  resolution: "sass@npm:1.71.0"
   dependencies:
     chokidar: ">=3.0.0 <4.0.0"
     immutable: ^4.0.0
     source-map-js: ">=0.6.2 <2.0.0"
   bin:
     sass: sass.js
-  checksum: fd1b622cf9b7fa699a03ec634611997552ece45eb98ac365fef22f42bdcb8ed63b326b64173379c966830c8551ae801e44e4a00d2de16fdadda2dc8f35400bbb
+  checksum: 5ba6b4b994c7ae94286919d8be8a3692038c27175b4ad3a3ac43f51a91188590d3ddd3a0ef1022135503b9d70d0732efdbedfd56f3ee1a4651549f5b13f8942f
   languageName: node
   linkType: hard
 
@@ -7890,9 +7890,9 @@ browserlist@latest:
   languageName: node
   linkType: hard
 
-"vue-eslint-parser@npm:^9.4.0":
-  version: 9.4.0
-  resolution: "vue-eslint-parser@npm:9.4.0"
+"vue-eslint-parser@npm:^9.4.2":
+  version: 9.4.2
+  resolution: "vue-eslint-parser@npm:9.4.2"
   dependencies:
     debug: ^4.3.4
     eslint-scope: ^7.1.1
@@ -7903,7 +7903,7 @@ browserlist@latest:
     semver: ^7.3.6
   peerDependencies:
     eslint: ">=6.0.0"
-  checksum: b53d05d3ba5828aafc14486d862350ccc9784642f6274495ddac89eaa7508f7ffeb85a707bf7faf0ed72d6a361f82b374ce0b3462a01c91cf4577ec3cf4ea9c6
+  checksum: 67f14c8ea19b578077a878864a5ec438ab4c597381923c9814fac39b3772da8654ac2a543467b5880607f694131f8ff34b87bd24c10bbc5f99fa2fcac49ff2e6
   languageName: node
   linkType: hard
 
@@ -7918,21 +7918,21 @@ browserlist@latest:
   languageName: node
   linkType: hard
 
-"vue@npm:3.4.15":
-  version: 3.4.15
-  resolution: "vue@npm:3.4.15"
+"vue@npm:3.4.19":
+  version: 3.4.19
+  resolution: "vue@npm:3.4.19"
   dependencies:
-    "@vue/compiler-dom": 3.4.15
-    "@vue/compiler-sfc": 3.4.15
-    "@vue/runtime-dom": 3.4.15
-    "@vue/server-renderer": 3.4.15
-    "@vue/shared": 3.4.15
+    "@vue/compiler-dom": 3.4.19
+    "@vue/compiler-sfc": 3.4.19
+    "@vue/runtime-dom": 3.4.19
+    "@vue/server-renderer": 3.4.19
+    "@vue/shared": 3.4.19
   peerDependencies:
     typescript: "*"
   peerDependenciesMeta:
     typescript:
       optional: true
-  checksum: 6e9ff02c9bd46cb47ff2225e7b51b75b00343b7f52076a56c2a90ce15de88c1de1aaa6b176ac39ca324479ee208b7f7e7992f54a353b0ee6b303081ac5ab30b0
+  checksum: 8c83a8097dfe00a4da05a80358d9d4ebd8fd51ba2eebd00fb53d3253963dcd14b1a5ea6edcbef5e10145b534844720d1b5af608966208753e88cc24253758688
   languageName: node
   linkType: hard