chore: Merge main into feat/django4
This commit is contained in:
commit
477a2decb5
245
.pnp.cjs
generated
245
.pnp.cjs
generated
|
@ -34,15 +34,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"packageLocation": "./",\
|
||||
"packageDependencies": [\
|
||||
["@faker-js/faker", "npm:7.6.0"],\
|
||||
["@fullcalendar/bootstrap5", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/core", "npm:6.1.5"],\
|
||||
["@fullcalendar/bootstrap5", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/core", "npm:6.1.6"],\
|
||||
["@fullcalendar/daygrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/icalendar", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/interaction", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/list", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/luxon2", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/timegrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/vue3", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/icalendar", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/interaction", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/list", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/luxon2", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/timegrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/vue3", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@parcel/optimizer-data-url", "npm:2.8.3"],\
|
||||
["@parcel/transformer-inline-string", "npm:2.8.3"],\
|
||||
["@parcel/transformer-sass", "npm:2.8.3"],\
|
||||
|
@ -55,9 +55,9 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["browser-fs-access", "npm:0.33.1"],\
|
||||
["browserlist", "npm:1.0.1"],\
|
||||
["c8", "npm:7.13.0"],\
|
||||
["caniuse-lite", "npm:1.0.30001469"],\
|
||||
["caniuse-lite", "npm:1.0.30001481"],\
|
||||
["d3", "npm:7.8.4"],\
|
||||
["eslint", "npm:8.37.0"],\
|
||||
["eslint", "npm:8.39.0"],\
|
||||
["eslint-config-standard", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:17.0.0"],\
|
||||
["eslint-plugin-cypress", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.13.2"],\
|
||||
["eslint-plugin-import", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.27.5"],\
|
||||
|
@ -71,7 +71,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["ical.js", "npm:1.5.0"],\
|
||||
["jquery", "npm:3.6.4"],\
|
||||
["jquery-migrate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.1"],\
|
||||
["js-cookie", "npm:3.0.1"],\
|
||||
["js-cookie", "npm:3.0.5"],\
|
||||
["list.js", "npm:2.3.1"],\
|
||||
["lodash", "npm:4.17.21"],\
|
||||
["lodash-es", "npm:4.17.21"],\
|
||||
|
@ -422,12 +422,12 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:662100842a01f22e4884c53947ff3c835cfa610bc0302aa718d92c79c197b8d7a4529f512f843ab15d1ae9637360e6549918eab5fe7f0237430b928365312129#npm:4.4.0", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@eslint-community-eslint-utils-virtual-836cc93013/0/cache/@eslint-community-eslint-utils-npm-4.4.0-d1791bd5a3-cdfe3ae42b.zip/node_modules/@eslint-community/eslint-utils/",\
|
||||
["virtual:d92bace04d6d26247b24721a8766993aa3a98c631265bb389d6aa7c17b2a0708ccc877f009bd6a9cb50b8597fbe776c68dae7621d198ff292ee4bc23bd4b012f#npm:4.4.0", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@eslint-community-eslint-utils-virtual-75945d7e20/0/cache/@eslint-community-eslint-utils-npm-4.4.0-d1791bd5a3-cdfe3ae42b.zip/node_modules/@eslint-community/eslint-utils/",\
|
||||
"packageDependencies": [\
|
||||
["@eslint-community/eslint-utils", "virtual:662100842a01f22e4884c53947ff3c835cfa610bc0302aa718d92c79c197b8d7a4529f512f843ab15d1ae9637360e6549918eab5fe7f0237430b928365312129#npm:4.4.0"],\
|
||||
["@eslint-community/eslint-utils", "virtual:d92bace04d6d26247b24721a8766993aa3a98c631265bb389d6aa7c17b2a0708ccc877f009bd6a9cb50b8597fbe776c68dae7621d198ff292ee4bc23bd4b012f#npm:4.4.0"],\
|
||||
["@types/eslint", null],\
|
||||
["eslint", "npm:8.37.0"],\
|
||||
["eslint", "npm:8.39.0"],\
|
||||
["eslint-visitor-keys", "npm:3.3.0"]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
|
@ -465,10 +465,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["@eslint/js", [\
|
||||
["npm:8.37.0", {\
|
||||
"packageLocation": "./.yarn/cache/@eslint-js-npm-8.37.0-9ecead4a75-7a07fb085c.zip/node_modules/@eslint/js/",\
|
||||
["npm:8.39.0", {\
|
||||
"packageLocation": "./.yarn/cache/@eslint-js-npm-8.39.0-558d35aa2d-63fe36e2bf.zip/node_modules/@eslint/js/",\
|
||||
"packageDependencies": [\
|
||||
["@eslint/js", "npm:8.37.0"]\
|
||||
["@eslint/js", "npm:8.39.0"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
|
@ -502,18 +502,18 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["@fullcalendar/bootstrap5", [\
|
||||
["npm:6.1.5", {\
|
||||
"packageLocation": "./.yarn/cache/@fullcalendar-bootstrap5-npm-6.1.5-d08c2db486-c2f8865823.zip/node_modules/@fullcalendar/bootstrap5/",\
|
||||
["npm:6.1.6", {\
|
||||
"packageLocation": "./.yarn/cache/@fullcalendar-bootstrap5-npm-6.1.6-3eb2f2a80e-09f2bdf7dc.zip/node_modules/@fullcalendar/bootstrap5/",\
|
||||
"packageDependencies": [\
|
||||
["@fullcalendar/bootstrap5", "npm:6.1.5"]\
|
||||
["@fullcalendar/bootstrap5", "npm:6.1.6"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@fullcalendar-bootstrap5-virtual-cb510edef6/0/cache/@fullcalendar-bootstrap5-npm-6.1.5-d08c2db486-c2f8865823.zip/node_modules/@fullcalendar/bootstrap5/",\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@fullcalendar-bootstrap5-virtual-4eb1694ecd/0/cache/@fullcalendar-bootstrap5-npm-6.1.6-3eb2f2a80e-09f2bdf7dc.zip/node_modules/@fullcalendar/bootstrap5/",\
|
||||
"packageDependencies": [\
|
||||
["@fullcalendar/bootstrap5", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/core", "npm:6.1.5"],\
|
||||
["@fullcalendar/bootstrap5", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/core", "npm:6.1.6"],\
|
||||
["@types/fullcalendar__core", null]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
|
@ -524,10 +524,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["@fullcalendar/core", [\
|
||||
["npm:6.1.5", {\
|
||||
"packageLocation": "./.yarn/cache/@fullcalendar-core-npm-6.1.5-2464e961fe-6892c11c40.zip/node_modules/@fullcalendar/core/",\
|
||||
["npm:6.1.6", {\
|
||||
"packageLocation": "./.yarn/cache/@fullcalendar-core-npm-6.1.6-a28815a826-72ec698bd2.zip/node_modules/@fullcalendar/core/",\
|
||||
"packageDependencies": [\
|
||||
["@fullcalendar/core", "npm:6.1.5"],\
|
||||
["@fullcalendar/core", "npm:6.1.6"],\
|
||||
["preact", "npm:10.12.1"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
|
@ -541,11 +541,31 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["npm:6.1.6", {\
|
||||
"packageLocation": "./.yarn/cache/@fullcalendar-daygrid-npm-6.1.6-13b72a08b0-e7b60e359b.zip/node_modules/@fullcalendar/daygrid/",\
|
||||
"packageDependencies": [\
|
||||
["@fullcalendar/daygrid", "npm:6.1.6"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:1495e7dbe47a0c3fff6635b537a1f6362ca58f10310322fb93a5f9a6208e3e7081cd944d8d2131ee2c338abdf4479816afa3d632114f0fceb5b350c0ee84672d#npm:6.1.6", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@fullcalendar-daygrid-virtual-e69ea54296/0/cache/@fullcalendar-daygrid-npm-6.1.6-13b72a08b0-e7b60e359b.zip/node_modules/@fullcalendar/daygrid/",\
|
||||
"packageDependencies": [\
|
||||
["@fullcalendar/daygrid", "virtual:1495e7dbe47a0c3fff6635b537a1f6362ca58f10310322fb93a5f9a6208e3e7081cd944d8d2131ee2c338abdf4479816afa3d632114f0fceb5b350c0ee84672d#npm:6.1.6"],\
|
||||
["@fullcalendar/core", "npm:6.1.6"],\
|
||||
["@types/fullcalendar__core", null]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
"@fullcalendar/core",\
|
||||
"@types/fullcalendar__core"\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@fullcalendar-daygrid-virtual-cb2eb2d850/0/cache/@fullcalendar-daygrid-npm-6.1.5-3a84c9028f-f62aa7364d.zip/node_modules/@fullcalendar/daygrid/",\
|
||||
"packageDependencies": [\
|
||||
["@fullcalendar/daygrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/core", "npm:6.1.5"],\
|
||||
["@fullcalendar/core", "npm:6.1.6"],\
|
||||
["@types/fullcalendar__core", null]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
|
@ -556,18 +576,18 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["@fullcalendar/icalendar", [\
|
||||
["npm:6.1.5", {\
|
||||
"packageLocation": "./.yarn/cache/@fullcalendar-icalendar-npm-6.1.5-17dc61f880-c7d53f9328.zip/node_modules/@fullcalendar/icalendar/",\
|
||||
["npm:6.1.6", {\
|
||||
"packageLocation": "./.yarn/cache/@fullcalendar-icalendar-npm-6.1.6-8b28c6e7df-50d290346a.zip/node_modules/@fullcalendar/icalendar/",\
|
||||
"packageDependencies": [\
|
||||
["@fullcalendar/icalendar", "npm:6.1.5"]\
|
||||
["@fullcalendar/icalendar", "npm:6.1.6"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@fullcalendar-icalendar-virtual-dd999caeec/0/cache/@fullcalendar-icalendar-npm-6.1.5-17dc61f880-c7d53f9328.zip/node_modules/@fullcalendar/icalendar/",\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@fullcalendar-icalendar-virtual-82e9ef0594/0/cache/@fullcalendar-icalendar-npm-6.1.6-8b28c6e7df-50d290346a.zip/node_modules/@fullcalendar/icalendar/",\
|
||||
"packageDependencies": [\
|
||||
["@fullcalendar/icalendar", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/core", "npm:6.1.5"],\
|
||||
["@fullcalendar/icalendar", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/core", "npm:6.1.6"],\
|
||||
["@types/fullcalendar__core", null],\
|
||||
["@types/ical.js", null],\
|
||||
["ical.js", "npm:1.5.0"]\
|
||||
|
@ -582,18 +602,18 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["@fullcalendar/interaction", [\
|
||||
["npm:6.1.5", {\
|
||||
"packageLocation": "./.yarn/cache/@fullcalendar-interaction-npm-6.1.5-785fbf6e56-dd3d7fb8ba.zip/node_modules/@fullcalendar/interaction/",\
|
||||
["npm:6.1.6", {\
|
||||
"packageLocation": "./.yarn/cache/@fullcalendar-interaction-npm-6.1.6-68ac4c3627-b7dea49059.zip/node_modules/@fullcalendar/interaction/",\
|
||||
"packageDependencies": [\
|
||||
["@fullcalendar/interaction", "npm:6.1.5"]\
|
||||
["@fullcalendar/interaction", "npm:6.1.6"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@fullcalendar-interaction-virtual-1a60d299e5/0/cache/@fullcalendar-interaction-npm-6.1.5-785fbf6e56-dd3d7fb8ba.zip/node_modules/@fullcalendar/interaction/",\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@fullcalendar-interaction-virtual-24a0309729/0/cache/@fullcalendar-interaction-npm-6.1.6-68ac4c3627-b7dea49059.zip/node_modules/@fullcalendar/interaction/",\
|
||||
"packageDependencies": [\
|
||||
["@fullcalendar/interaction", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/core", "npm:6.1.5"],\
|
||||
["@fullcalendar/interaction", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/core", "npm:6.1.6"],\
|
||||
["@types/fullcalendar__core", null]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
|
@ -604,18 +624,18 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["@fullcalendar/list", [\
|
||||
["npm:6.1.5", {\
|
||||
"packageLocation": "./.yarn/cache/@fullcalendar-list-npm-6.1.5-fb1bce0bb6-4879294b57.zip/node_modules/@fullcalendar/list/",\
|
||||
["npm:6.1.6", {\
|
||||
"packageLocation": "./.yarn/cache/@fullcalendar-list-npm-6.1.6-ada94f2f8a-e5beb01c62.zip/node_modules/@fullcalendar/list/",\
|
||||
"packageDependencies": [\
|
||||
["@fullcalendar/list", "npm:6.1.5"]\
|
||||
["@fullcalendar/list", "npm:6.1.6"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@fullcalendar-list-virtual-16f90c842d/0/cache/@fullcalendar-list-npm-6.1.5-fb1bce0bb6-4879294b57.zip/node_modules/@fullcalendar/list/",\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@fullcalendar-list-virtual-42a14f5528/0/cache/@fullcalendar-list-npm-6.1.6-ada94f2f8a-e5beb01c62.zip/node_modules/@fullcalendar/list/",\
|
||||
"packageDependencies": [\
|
||||
["@fullcalendar/list", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/core", "npm:6.1.5"],\
|
||||
["@fullcalendar/list", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/core", "npm:6.1.6"],\
|
||||
["@types/fullcalendar__core", null]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
|
@ -626,18 +646,18 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["@fullcalendar/luxon2", [\
|
||||
["npm:6.1.5", {\
|
||||
"packageLocation": "./.yarn/cache/@fullcalendar-luxon2-npm-6.1.5-440cbb8a69-6253f7d066.zip/node_modules/@fullcalendar/luxon2/",\
|
||||
["npm:6.1.6", {\
|
||||
"packageLocation": "./.yarn/cache/@fullcalendar-luxon2-npm-6.1.6-514b9ce297-48d76dc556.zip/node_modules/@fullcalendar/luxon2/",\
|
||||
"packageDependencies": [\
|
||||
["@fullcalendar/luxon2", "npm:6.1.5"]\
|
||||
["@fullcalendar/luxon2", "npm:6.1.6"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@fullcalendar-luxon2-virtual-c1119c19f4/0/cache/@fullcalendar-luxon2-npm-6.1.5-440cbb8a69-6253f7d066.zip/node_modules/@fullcalendar/luxon2/",\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@fullcalendar-luxon2-virtual-4dc3f00cca/0/cache/@fullcalendar-luxon2-npm-6.1.6-514b9ce297-48d76dc556.zip/node_modules/@fullcalendar/luxon2/",\
|
||||
"packageDependencies": [\
|
||||
["@fullcalendar/luxon2", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/core", "npm:6.1.5"],\
|
||||
["@fullcalendar/luxon2", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/core", "npm:6.1.6"],\
|
||||
["@types/fullcalendar__core", null],\
|
||||
["@types/luxon", null],\
|
||||
["luxon", "npm:3.3.0"]\
|
||||
|
@ -652,19 +672,19 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["@fullcalendar/timegrid", [\
|
||||
["npm:6.1.5", {\
|
||||
"packageLocation": "./.yarn/cache/@fullcalendar-timegrid-npm-6.1.5-0011ead6b7-236aabb57a.zip/node_modules/@fullcalendar/timegrid/",\
|
||||
["npm:6.1.6", {\
|
||||
"packageLocation": "./.yarn/cache/@fullcalendar-timegrid-npm-6.1.6-6c6109a419-ef79183dad.zip/node_modules/@fullcalendar/timegrid/",\
|
||||
"packageDependencies": [\
|
||||
["@fullcalendar/timegrid", "npm:6.1.5"]\
|
||||
["@fullcalendar/timegrid", "npm:6.1.6"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@fullcalendar-timegrid-virtual-c484ca9f82/0/cache/@fullcalendar-timegrid-npm-6.1.5-0011ead6b7-236aabb57a.zip/node_modules/@fullcalendar/timegrid/",\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@fullcalendar-timegrid-virtual-1495e7dbe4/0/cache/@fullcalendar-timegrid-npm-6.1.6-6c6109a419-ef79183dad.zip/node_modules/@fullcalendar/timegrid/",\
|
||||
"packageDependencies": [\
|
||||
["@fullcalendar/timegrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/core", "npm:6.1.5"],\
|
||||
["@fullcalendar/daygrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/timegrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/core", "npm:6.1.6"],\
|
||||
["@fullcalendar/daygrid", "virtual:1495e7dbe47a0c3fff6635b537a1f6362ca58f10310322fb93a5f9a6208e3e7081cd944d8d2131ee2c338abdf4479816afa3d632114f0fceb5b350c0ee84672d#npm:6.1.6"],\
|
||||
["@types/fullcalendar__core", null]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
|
@ -675,18 +695,18 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["@fullcalendar/vue3", [\
|
||||
["npm:6.1.5", {\
|
||||
"packageLocation": "./.yarn/cache/@fullcalendar-vue3-npm-6.1.5-fb96111297-bf426f3668.zip/node_modules/@fullcalendar/vue3/",\
|
||||
["npm:6.1.6", {\
|
||||
"packageLocation": "./.yarn/cache/@fullcalendar-vue3-npm-6.1.6-b1a9d6f44b-68fe3ce204.zip/node_modules/@fullcalendar/vue3/",\
|
||||
"packageDependencies": [\
|
||||
["@fullcalendar/vue3", "npm:6.1.5"]\
|
||||
["@fullcalendar/vue3", "npm:6.1.6"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@fullcalendar-vue3-virtual-3ccbb4b9e7/0/cache/@fullcalendar-vue3-npm-6.1.5-fb96111297-bf426f3668.zip/node_modules/@fullcalendar/vue3/",\
|
||||
["virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6", {\
|
||||
"packageLocation": "./.yarn/__virtual__/@fullcalendar-vue3-virtual-ce9611c7d0/0/cache/@fullcalendar-vue3-npm-6.1.6-b1a9d6f44b-68fe3ce204.zip/node_modules/@fullcalendar/vue3/",\
|
||||
"packageDependencies": [\
|
||||
["@fullcalendar/vue3", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/core", "npm:6.1.5"],\
|
||||
["@fullcalendar/vue3", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/core", "npm:6.1.6"],\
|
||||
["@types/fullcalendar__core", null],\
|
||||
["@types/vue", null],\
|
||||
["vue", "npm:3.2.47"]\
|
||||
|
@ -3100,10 +3120,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:1.0.30001469", {\
|
||||
"packageLocation": "./.yarn/cache/caniuse-lite-npm-1.0.30001469-14058b3844-8e496509d7.zip/node_modules/caniuse-lite/",\
|
||||
["npm:1.0.30001481", {\
|
||||
"packageLocation": "./.yarn/cache/caniuse-lite-npm-1.0.30001481-cd8272ecaa-8200a043c1.zip/node_modules/caniuse-lite/",\
|
||||
"packageDependencies": [\
|
||||
["caniuse-lite", "npm:1.0.30001469"]\
|
||||
["caniuse-lite", "npm:1.0.30001481"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
|
@ -4293,14 +4313,14 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["eslint", [\
|
||||
["npm:8.37.0", {\
|
||||
"packageLocation": "./.yarn/cache/eslint-npm-8.37.0-662100842a-80f3d5cdce.zip/node_modules/eslint/",\
|
||||
["npm:8.39.0", {\
|
||||
"packageLocation": "./.yarn/cache/eslint-npm-8.39.0-d92bace04d-d7a074ff32.zip/node_modules/eslint/",\
|
||||
"packageDependencies": [\
|
||||
["eslint", "npm:8.37.0"],\
|
||||
["@eslint-community/eslint-utils", "virtual:662100842a01f22e4884c53947ff3c835cfa610bc0302aa718d92c79c197b8d7a4529f512f843ab15d1ae9637360e6549918eab5fe7f0237430b928365312129#npm:4.4.0"],\
|
||||
["eslint", "npm:8.39.0"],\
|
||||
["@eslint-community/eslint-utils", "virtual:d92bace04d6d26247b24721a8766993aa3a98c631265bb389d6aa7c17b2a0708ccc877f009bd6a9cb50b8597fbe776c68dae7621d198ff292ee4bc23bd4b012f#npm:4.4.0"],\
|
||||
["@eslint-community/regexpp", "npm:4.4.1"],\
|
||||
["@eslint/eslintrc", "npm:2.0.2"],\
|
||||
["@eslint/js", "npm:8.37.0"],\
|
||||
["@eslint/js", "npm:8.39.0"],\
|
||||
["@humanwhocodes/config-array", "npm:0.11.8"],\
|
||||
["@humanwhocodes/module-importer", "npm:1.0.1"],\
|
||||
["@nodelib/fs.walk", "npm:1.2.8"],\
|
||||
|
@ -4310,7 +4330,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["debug", "virtual:b86a9fb34323a98c6519528ed55faa0d9b44ca8879307c0b29aa384bde47ff59a7d0c9051b31246f14521dfb71ba3c5d6d0b35c29fffc17bf875aa6ad977d9e8#npm:4.3.4"],\
|
||||
["doctrine", "npm:3.0.0"],\
|
||||
["escape-string-regexp", "npm:4.0.0"],\
|
||||
["eslint-scope", "npm:7.1.1"],\
|
||||
["eslint-scope", "npm:7.2.0"],\
|
||||
["eslint-visitor-keys", "npm:3.4.0"],\
|
||||
["espree", "npm:9.5.1"],\
|
||||
["esquery", "npm:1.5.0"],\
|
||||
|
@ -4357,7 +4377,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["@types/eslint-plugin-import", null],\
|
||||
["@types/eslint-plugin-n", null],\
|
||||
["@types/eslint-plugin-promise", null],\
|
||||
["eslint", "npm:8.37.0"],\
|
||||
["eslint", "npm:8.39.0"],\
|
||||
["eslint-plugin-import", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.27.5"],\
|
||||
["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:15.7.0"],\
|
||||
["eslint-plugin-promise", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.1"]\
|
||||
|
@ -4406,7 +4426,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["@types/typescript-eslint__parser", null],\
|
||||
["@typescript-eslint/parser", null],\
|
||||
["debug", "virtual:65bed195431eadffc59e2238eb20cc12d9a1665bc7458ce780a9320ff795091b03cb5c4c2094938315ddd967b5b02c0f1df67b3ed435c69b7457092b7cc06ed8#npm:3.2.7"],\
|
||||
["eslint", "npm:8.37.0"],\
|
||||
["eslint", "npm:8.39.0"],\
|
||||
["eslint-import-resolver-node", "npm:0.3.7"],\
|
||||
["eslint-import-resolver-typescript", null],\
|
||||
["eslint-import-resolver-webpack", null]\
|
||||
|
@ -4439,7 +4459,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"packageDependencies": [\
|
||||
["eslint-plugin-cypress", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.13.2"],\
|
||||
["@types/eslint", null],\
|
||||
["eslint", "npm:8.37.0"],\
|
||||
["eslint", "npm:8.39.0"],\
|
||||
["globals", "npm:11.12.0"]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
|
@ -4469,7 +4489,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"packageDependencies": [\
|
||||
["eslint-plugin-es", "virtual:5cccaf00e87dfff96dbbb5eaf7a3055373358b8114d6a1adfb32f54ed6b40ba06068d3aa1fdd8062899a0cad040f68c17cc6b72bac2cdbe9700f3d6330d112f3#npm:3.0.1"],\
|
||||
["@types/eslint", null],\
|
||||
["eslint", "npm:8.37.0"],\
|
||||
["eslint", "npm:8.39.0"],\
|
||||
["eslint-utils", "npm:2.1.0"],\
|
||||
["regexpp", "npm:3.2.0"]\
|
||||
],\
|
||||
|
@ -4484,7 +4504,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"packageDependencies": [\
|
||||
["eslint-plugin-es", "virtual:fbb8c3721066ef61887f5309241d337d9cae043cc5c9fede0ea8d900cdabf786b9dea63ee87ce779e1ffd85e6a35d182759c106301f028068c68edced9b12f1f#npm:4.1.0"],\
|
||||
["@types/eslint", null],\
|
||||
["eslint", "npm:8.37.0"],\
|
||||
["eslint", "npm:8.39.0"],\
|
||||
["eslint-utils", "npm:2.1.0"],\
|
||||
["regexpp", "npm:3.2.0"]\
|
||||
],\
|
||||
|
@ -4515,7 +4535,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["array.prototype.flatmap", "npm:1.3.1"],\
|
||||
["debug", "virtual:65bed195431eadffc59e2238eb20cc12d9a1665bc7458ce780a9320ff795091b03cb5c4c2094938315ddd967b5b02c0f1df67b3ed435c69b7457092b7cc06ed8#npm:3.2.7"],\
|
||||
["doctrine", "npm:2.1.0"],\
|
||||
["eslint", "npm:8.37.0"],\
|
||||
["eslint", "npm:8.39.0"],\
|
||||
["eslint-import-resolver-node", "npm:0.3.7"],\
|
||||
["eslint-module-utils", "virtual:bd6906041f26404b69936f019863470d6b0f75b1c61c4157b266eb36d096227fb0d250c76d47e694906c4d352d0c4c1a1a71e1f575fbbb05fe72b26198a32c57#npm:2.7.4"],\
|
||||
["has", "npm:1.0.3"],\
|
||||
|
@ -4550,7 +4570,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["eslint-plugin-n", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:15.7.0"],\
|
||||
["@types/eslint", null],\
|
||||
["builtins", "npm:5.0.1"],\
|
||||
["eslint", "npm:8.37.0"],\
|
||||
["eslint", "npm:8.39.0"],\
|
||||
["eslint-plugin-es", "virtual:fbb8c3721066ef61887f5309241d337d9cae043cc5c9fede0ea8d900cdabf786b9dea63ee87ce779e1ffd85e6a35d182759c106301f028068c68edced9b12f1f#npm:4.1.0"],\
|
||||
["eslint-utils", "virtual:fbb8c3721066ef61887f5309241d337d9cae043cc5c9fede0ea8d900cdabf786b9dea63ee87ce779e1ffd85e6a35d182759c106301f028068c68edced9b12f1f#npm:3.0.0"],\
|
||||
["ignore", "npm:5.2.0"],\
|
||||
|
@ -4579,7 +4599,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"packageDependencies": [\
|
||||
["eslint-plugin-node", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:11.1.0"],\
|
||||
["@types/eslint", null],\
|
||||
["eslint", "npm:8.37.0"],\
|
||||
["eslint", "npm:8.39.0"],\
|
||||
["eslint-plugin-es", "virtual:5cccaf00e87dfff96dbbb5eaf7a3055373358b8114d6a1adfb32f54ed6b40ba06068d3aa1fdd8062899a0cad040f68c17cc6b72bac2cdbe9700f3d6330d112f3#npm:3.0.1"],\
|
||||
["eslint-utils", "npm:2.1.0"],\
|
||||
["ignore", "npm:5.2.0"],\
|
||||
|
@ -4607,7 +4627,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"packageDependencies": [\
|
||||
["eslint-plugin-promise", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.1"],\
|
||||
["@types/eslint", null],\
|
||||
["eslint", "npm:8.37.0"]\
|
||||
["eslint", "npm:8.39.0"]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
"@types/eslint",\
|
||||
|
@ -4628,9 +4648,9 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"packageLocation": "./.yarn/__virtual__/eslint-plugin-vue-virtual-b496f670e3/0/cache/eslint-plugin-vue-npm-9.11.0-b3c0e8ae5d-1bcb6cf768.zip/node_modules/eslint-plugin-vue/",\
|
||||
"packageDependencies": [\
|
||||
["eslint-plugin-vue", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:9.11.0"],\
|
||||
["@eslint-community/eslint-utils", "virtual:662100842a01f22e4884c53947ff3c835cfa610bc0302aa718d92c79c197b8d7a4529f512f843ab15d1ae9637360e6549918eab5fe7f0237430b928365312129#npm:4.4.0"],\
|
||||
["@eslint-community/eslint-utils", "virtual:d92bace04d6d26247b24721a8766993aa3a98c631265bb389d6aa7c17b2a0708ccc877f009bd6a9cb50b8597fbe776c68dae7621d198ff292ee4bc23bd4b012f#npm:4.4.0"],\
|
||||
["@types/eslint", null],\
|
||||
["eslint", "npm:8.37.0"],\
|
||||
["eslint", "npm:8.39.0"],\
|
||||
["natural-compare", "npm:1.4.0"],\
|
||||
["nth-check", "npm:2.1.1"],\
|
||||
["postcss-selector-parser", "npm:6.0.10"],\
|
||||
|
@ -4654,6 +4674,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["estraverse", "npm:5.3.0"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:7.2.0", {\
|
||||
"packageLocation": "./.yarn/cache/eslint-scope-npm-7.2.0-88784f5a38-64591a2d8b.zip/node_modules/eslint-scope/",\
|
||||
"packageDependencies": [\
|
||||
["eslint-scope", "npm:7.2.0"],\
|
||||
["esrecurse", "npm:4.3.0"],\
|
||||
["estraverse", "npm:5.3.0"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["eslint-utils", [\
|
||||
|
@ -4677,7 +4706,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"packageDependencies": [\
|
||||
["eslint-utils", "virtual:fbb8c3721066ef61887f5309241d337d9cae043cc5c9fede0ea8d900cdabf786b9dea63ee87ce779e1ffd85e6a35d182759c106301f028068c68edced9b12f1f#npm:3.0.0"],\
|
||||
["@types/eslint", null],\
|
||||
["eslint", "npm:8.37.0"],\
|
||||
["eslint", "npm:8.39.0"],\
|
||||
["eslint-visitor-keys", "npm:2.1.0"]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
|
@ -5981,10 +6010,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["js-cookie", [\
|
||||
["npm:3.0.1", {\
|
||||
"packageLocation": "./.yarn/cache/js-cookie-npm-3.0.1-04c7177de1-bb48de67e2.zip/node_modules/js-cookie/",\
|
||||
["npm:3.0.5", {\
|
||||
"packageLocation": "./.yarn/cache/js-cookie-npm-3.0.5-8fc8fcc9b4-2dbd2809c6.zip/node_modules/js-cookie/",\
|
||||
"packageDependencies": [\
|
||||
["js-cookie", "npm:3.0.1"]\
|
||||
["js-cookie", "npm:3.0.5"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
|
@ -7633,15 +7662,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"packageDependencies": [\
|
||||
["root-workspace-0b6124", "workspace:."],\
|
||||
["@faker-js/faker", "npm:7.6.0"],\
|
||||
["@fullcalendar/bootstrap5", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/core", "npm:6.1.5"],\
|
||||
["@fullcalendar/bootstrap5", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/core", "npm:6.1.6"],\
|
||||
["@fullcalendar/daygrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/icalendar", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/interaction", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/list", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/luxon2", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/timegrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/vue3", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.5"],\
|
||||
["@fullcalendar/icalendar", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/interaction", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/list", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/luxon2", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/timegrid", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@fullcalendar/vue3", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:6.1.6"],\
|
||||
["@parcel/optimizer-data-url", "npm:2.8.3"],\
|
||||
["@parcel/transformer-inline-string", "npm:2.8.3"],\
|
||||
["@parcel/transformer-sass", "npm:2.8.3"],\
|
||||
|
@ -7654,9 +7683,9 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["browser-fs-access", "npm:0.33.1"],\
|
||||
["browserlist", "npm:1.0.1"],\
|
||||
["c8", "npm:7.13.0"],\
|
||||
["caniuse-lite", "npm:1.0.30001469"],\
|
||||
["caniuse-lite", "npm:1.0.30001481"],\
|
||||
["d3", "npm:7.8.4"],\
|
||||
["eslint", "npm:8.37.0"],\
|
||||
["eslint", "npm:8.39.0"],\
|
||||
["eslint-config-standard", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:17.0.0"],\
|
||||
["eslint-plugin-cypress", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.13.2"],\
|
||||
["eslint-plugin-import", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:2.27.5"],\
|
||||
|
@ -7670,7 +7699,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["ical.js", "npm:1.5.0"],\
|
||||
["jquery", "npm:3.6.4"],\
|
||||
["jquery-migrate", "virtual:dc3fc578bfa5e06182a4d2be39ede0bc5b74940b1ffe0d70c26892ab140a4699787750fba175dc306292e80b4aa2c8c5f68c2a821e69b2c37e360c0dff36ff66#npm:3.4.1"],\
|
||||
["js-cookie", "npm:3.0.1"],\
|
||||
["js-cookie", "npm:3.0.5"],\
|
||||
["list.js", "npm:2.3.1"],\
|
||||
["lodash", "npm:4.17.21"],\
|
||||
["lodash-es", "npm:4.17.21"],\
|
||||
|
@ -8643,7 +8672,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["vue-eslint-parser", "virtual:b496f670e3acb84cc194f988fd3fa15eff7479fe9212e466815f1d307efda1d2ed5782452f48c17f7b772004bd86fa771d60b67ce1881c71b828caf6fc2a9603#npm:9.0.3"],\
|
||||
["@types/eslint", null],\
|
||||
["debug", "virtual:b86a9fb34323a98c6519528ed55faa0d9b44ca8879307c0b29aa384bde47ff59a7d0c9051b31246f14521dfb71ba3c5d6d0b35c29fffc17bf875aa6ad977d9e8#npm:4.3.4"],\
|
||||
["eslint", "npm:8.37.0"],\
|
||||
["eslint", "npm:8.39.0"],\
|
||||
["eslint-scope", "npm:7.1.1"],\
|
||||
["eslint-visitor-keys", "npm:3.3.0"],\
|
||||
["espree", "npm:9.3.2"],\
|
||||
|
|
Binary file not shown.
Binary file not shown.
BIN
.yarn/cache/@fullcalendar-bootstrap5-npm-6.1.6-3eb2f2a80e-09f2bdf7dc.zip
vendored
Normal file
BIN
.yarn/cache/@fullcalendar-bootstrap5-npm-6.1.6-3eb2f2a80e-09f2bdf7dc.zip
vendored
Normal file
Binary file not shown.
Binary file not shown.
BIN
.yarn/cache/@fullcalendar-core-npm-6.1.6-a28815a826-72ec698bd2.zip
vendored
Normal file
BIN
.yarn/cache/@fullcalendar-core-npm-6.1.6-a28815a826-72ec698bd2.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@fullcalendar-daygrid-npm-6.1.6-13b72a08b0-e7b60e359b.zip
vendored
Normal file
BIN
.yarn/cache/@fullcalendar-daygrid-npm-6.1.6-13b72a08b0-e7b60e359b.zip
vendored
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
.yarn/cache/@fullcalendar-list-npm-6.1.6-ada94f2f8a-e5beb01c62.zip
vendored
Normal file
BIN
.yarn/cache/@fullcalendar-list-npm-6.1.6-ada94f2f8a-e5beb01c62.zip
vendored
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
.yarn/cache/caniuse-lite-npm-1.0.30001481-cd8272ecaa-8200a043c1.zip
vendored
Normal file
BIN
.yarn/cache/caniuse-lite-npm-1.0.30001481-cd8272ecaa-8200a043c1.zip
vendored
Normal file
Binary file not shown.
Binary file not shown.
BIN
.yarn/cache/eslint-scope-npm-7.2.0-88784f5a38-64591a2d8b.zip
vendored
Normal file
BIN
.yarn/cache/eslint-scope-npm-7.2.0-88784f5a38-64591a2d8b.zip
vendored
Normal file
Binary file not shown.
Binary file not shown.
BIN
.yarn/cache/js-cookie-npm-3.0.5-8fc8fcc9b4-2dbd2809c6.zip
vendored
Normal file
BIN
.yarn/cache/js-cookie-npm-3.0.5-8fc8fcc9b4-2dbd2809c6.zip
vendored
Normal file
Binary file not shown.
14
dev/deploy-to-container/package-lock.json
generated
14
dev/deploy-to-container/package-lock.json
generated
|
@ -12,7 +12,7 @@
|
|||
"nanoid-dictionary": "5.0.0-beta.1",
|
||||
"slugify": "1.6.6",
|
||||
"tar": "^6.1.13",
|
||||
"yargs": "^17.7.1"
|
||||
"yargs": "^17.7.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
|
@ -626,9 +626,9 @@
|
|||
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
|
||||
},
|
||||
"node_modules/yargs": {
|
||||
"version": "17.7.1",
|
||||
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.1.tgz",
|
||||
"integrity": "sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw==",
|
||||
"version": "17.7.2",
|
||||
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
|
||||
"integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
|
||||
"dependencies": {
|
||||
"cliui": "^8.0.1",
|
||||
"escalade": "^3.1.1",
|
||||
|
@ -1086,9 +1086,9 @@
|
|||
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
|
||||
},
|
||||
"yargs": {
|
||||
"version": "17.7.1",
|
||||
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.1.tgz",
|
||||
"integrity": "sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw==",
|
||||
"version": "17.7.2",
|
||||
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
|
||||
"integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
|
||||
"requires": {
|
||||
"cliui": "^8.0.1",
|
||||
"escalade": "^3.1.1",
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
"nanoid-dictionary": "5.0.0-beta.1",
|
||||
"slugify": "1.6.6",
|
||||
"tar": "^6.1.13",
|
||||
"yargs": "^17.7.1"
|
||||
"yargs": "^17.7.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
|
|
|
@ -23,9 +23,6 @@ CELERY_BROKER_URL = '__MQCONNSTR__'
|
|||
IDSUBMIT_IDNITS_BINARY = "/usr/local/bin/idnits"
|
||||
IDSUBMIT_REPOSITORY_PATH = "/test/id/"
|
||||
IDSUBMIT_STAGING_PATH = "/test/staging/"
|
||||
INTERNET_DRAFT_ARCHIVE_DIR = "/test/archive/"
|
||||
INTERNET_ALL_DRAFTS_ARCHIVE_DIR = "/test/archive/"
|
||||
RFC_PATH = "/test/rfc/"
|
||||
|
||||
AGENDA_PATH = '/assets/www6s/proceedings/'
|
||||
MEETINGHOST_LOGO_PATH = AGENDA_PATH
|
||||
|
@ -65,6 +62,7 @@ CONFLICT_REVIEW_PATH = '/assets/ietf-ftp/conflict-reviews/'
|
|||
STATUS_CHANGE_PATH = '/assets/ietf-ftp/status-changes/'
|
||||
INTERNET_DRAFT_ARCHIVE_DIR = '/assets/ietf-ftp/internet-drafts/'
|
||||
INTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/assets/ietf-ftp/internet-drafts/'
|
||||
BIBXML_BASE_PATH = '/assets/ietfdata/derived/bibxml'
|
||||
|
||||
NOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/'
|
||||
SLIDE_STAGING_PATH = '/test/staging/'
|
||||
|
|
|
@ -20,9 +20,6 @@ DATABASES = {
|
|||
IDSUBMIT_IDNITS_BINARY = "/usr/local/bin/idnits"
|
||||
IDSUBMIT_REPOSITORY_PATH = "test/id/"
|
||||
IDSUBMIT_STAGING_PATH = "test/staging/"
|
||||
INTERNET_DRAFT_ARCHIVE_DIR = "test/archive/"
|
||||
INTERNET_ALL_DRAFTS_ARCHIVE_DIR = "test/archive/"
|
||||
RFC_PATH = "test/rfc/"
|
||||
|
||||
AGENDA_PATH = '/assets/www6s/proceedings/'
|
||||
MEETINGHOST_LOGO_PATH = AGENDA_PATH
|
||||
|
@ -62,6 +59,7 @@ CONFLICT_REVIEW_PATH = '/assets/ietf-ftp/conflict-reviews/'
|
|||
STATUS_CHANGE_PATH = '/assets/ietf-ftp/status-changes/'
|
||||
INTERNET_DRAFT_ARCHIVE_DIR = '/assets/ietf-ftp/internet-drafts/'
|
||||
INTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/assets/ietf-ftp/internet-drafts/'
|
||||
BIBXML_BASE_PATH = '/assets/ietfdata/derived/bibxml'
|
||||
|
||||
NOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/'
|
||||
SLIDE_STAGING_PATH = 'test/staging/'
|
||||
|
|
|
@ -19,9 +19,6 @@ DATABASES = {
|
|||
IDSUBMIT_IDNITS_BINARY = "/usr/local/bin/idnits"
|
||||
IDSUBMIT_REPOSITORY_PATH = "test/id/"
|
||||
IDSUBMIT_STAGING_PATH = "test/staging/"
|
||||
INTERNET_DRAFT_ARCHIVE_DIR = "test/archive/"
|
||||
INTERNET_ALL_DRAFTS_ARCHIVE_DIR = "test/archive/"
|
||||
RFC_PATH = "test/rfc/"
|
||||
|
||||
AGENDA_PATH = '/assets/www6s/proceedings/'
|
||||
MEETINGHOST_LOGO_PATH = AGENDA_PATH
|
||||
|
@ -61,6 +58,7 @@ CONFLICT_REVIEW_PATH = '/assets/ietf-ftp/conflict-reviews/'
|
|||
STATUS_CHANGE_PATH = '/assets/ietf-ftp/status-changes/'
|
||||
INTERNET_DRAFT_ARCHIVE_DIR = '/assets/ietf-ftp/internet-drafts/'
|
||||
INTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/assets/ietf-ftp/internet-drafts/'
|
||||
BIBXML_BASE_PATH = '/assets/ietfdata/derived/bibxml'
|
||||
|
||||
NOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/'
|
||||
SLIDE_STAGING_PATH = 'test/staging/'
|
||||
|
|
|
@ -10,9 +10,6 @@ from ietf.settings_postgresqldb import DATABASES # pyflakes:ignore
|
|||
IDSUBMIT_IDNITS_BINARY = "/usr/local/bin/idnits"
|
||||
IDSUBMIT_REPOSITORY_PATH = "test/id/"
|
||||
IDSUBMIT_STAGING_PATH = "test/staging/"
|
||||
INTERNET_DRAFT_ARCHIVE_DIR = "test/archive/"
|
||||
INTERNET_ALL_DRAFTS_ARCHIVE_DIR = "test/archive/"
|
||||
RFC_PATH = "test/rfc/"
|
||||
|
||||
AGENDA_PATH = '/assets/www6s/proceedings/'
|
||||
MEETINGHOST_LOGO_PATH = AGENDA_PATH
|
||||
|
@ -52,6 +49,7 @@ CONFLICT_REVIEW_PATH = '/assets/ietf-ftp/conflict-reviews/'
|
|||
STATUS_CHANGE_PATH = '/assets/ietf-ftp/status-changes/'
|
||||
INTERNET_DRAFT_ARCHIVE_DIR = '/assets/archive/id'
|
||||
INTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/assets/archive/id'
|
||||
BIBXML_BASE_PATH = '/assets/ietfdata/derived/bibxml'
|
||||
|
||||
NOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/'
|
||||
SLIDE_STAGING_PATH = 'test/staging/'
|
||||
|
|
|
@ -21,6 +21,10 @@ for sub in \
|
|||
/assets/ietf-ftp/yang/ianamod \
|
||||
/assets/ietf-ftp/yang/invalmod \
|
||||
/assets/ietf-ftp/yang/rfcmod \
|
||||
/assets/ietfdata \
|
||||
/assets/ietfdata/derived \
|
||||
/assets/ietfdata/derived/bibxml \
|
||||
/assets/ietfdata/derived/bibxml/bibxml-ids \
|
||||
/assets/www6s \
|
||||
/assets/www6s/staging \
|
||||
/assets/www6s/wg-descriptions \
|
||||
|
|
|
@ -4,6 +4,9 @@ WORKSPACEDIR="/workspace"
|
|||
|
||||
sudo service rsyslog start &>/dev/null
|
||||
|
||||
# Add /workspace as a safe git directory
|
||||
git config --global --add safe.directory /workspace
|
||||
|
||||
# Turn off git info in zsh prompt (causes slowdowns)
|
||||
git config oh-my-zsh.hide-info 1
|
||||
|
||||
|
|
|
@ -320,22 +320,26 @@ This test section has some text.
|
|||
file = NamedTemporaryFile(delete=False,mode="w+",encoding='utf-8')
|
||||
file.write(f'# {username}')
|
||||
file.close()
|
||||
for postdict in [
|
||||
{'bofreq_submission':'enter','bofreq_content':f'# {username}'},
|
||||
{'bofreq_submission':'upload','bofreq_file':open(file.name,'rb')},
|
||||
]:
|
||||
docevent_count = doc.docevent_set.count()
|
||||
empty_outbox()
|
||||
r = self.client.post(url, postdict)
|
||||
self.assertEqual(r.status_code, 302)
|
||||
doc = reload_db_objects(doc)
|
||||
self.assertEqual('%02d'%(int(rev)+1) ,doc.rev)
|
||||
self.assertEqual(f'# {username}', doc.text())
|
||||
self.assertEqual(docevent_count+1, doc.docevent_set.count())
|
||||
self.assertEqual(1, len(outbox))
|
||||
rev = doc.rev
|
||||
try:
|
||||
with open(file.name, 'rb') as bofreq_fd:
|
||||
for postdict in [
|
||||
{'bofreq_submission':'enter','bofreq_content':f'# {username}'},
|
||||
{'bofreq_submission':'upload','bofreq_file':bofreq_fd},
|
||||
]:
|
||||
docevent_count = doc.docevent_set.count()
|
||||
empty_outbox()
|
||||
r = self.client.post(url, postdict)
|
||||
self.assertEqual(r.status_code, 302)
|
||||
doc = reload_db_objects(doc)
|
||||
self.assertEqual('%02d'%(int(rev)+1) ,doc.rev)
|
||||
self.assertEqual(f'# {username}', doc.text())
|
||||
self.assertEqual(docevent_count+1, doc.docevent_set.count())
|
||||
self.assertEqual(1, len(outbox))
|
||||
rev = doc.rev
|
||||
finally:
|
||||
os.unlink(file.name)
|
||||
|
||||
self.client.logout()
|
||||
os.unlink(file.name)
|
||||
|
||||
def test_start_new_bofreq(self):
|
||||
url = urlreverse('ietf.doc.views_bofreq.new_bof_request')
|
||||
|
@ -350,25 +354,28 @@ This test section has some text.
|
|||
file = NamedTemporaryFile(delete=False,mode="w+",encoding='utf-8')
|
||||
file.write('some stuff')
|
||||
file.close()
|
||||
for postdict in [
|
||||
dict(title='title one', bofreq_submission='enter', bofreq_content='some stuff'),
|
||||
dict(title='title two', bofreq_submission='upload', bofreq_file=open(file.name,'rb')),
|
||||
]:
|
||||
empty_outbox()
|
||||
r = self.client.post(url, postdict)
|
||||
self.assertEqual(r.status_code,302)
|
||||
name = f"bofreq-{xslugify(nobody.last_name())[:64]}-{postdict['title']}".replace(' ','-')
|
||||
bofreq = Document.objects.filter(name=name,type_id='bofreq').first()
|
||||
self.assertIsNotNone(bofreq)
|
||||
self.assertIsNotNone(DocAlias.objects.filter(name=name).first())
|
||||
self.assertEqual(bofreq.title, postdict['title'])
|
||||
self.assertEqual(bofreq.rev, '00')
|
||||
self.assertEqual(bofreq.get_state_slug(), 'proposed')
|
||||
self.assertEqual(list(bofreq_editors(bofreq)), [nobody])
|
||||
self.assertEqual(bofreq.latest_event(NewRevisionDocEvent).rev, '00')
|
||||
self.assertEqual(bofreq.text_or_error(), 'some stuff')
|
||||
self.assertEqual(len(outbox),1)
|
||||
os.unlink(file.name)
|
||||
try:
|
||||
with open(file.name,'rb') as bofreq_fd:
|
||||
for postdict in [
|
||||
dict(title='title one', bofreq_submission='enter', bofreq_content='some stuff'),
|
||||
dict(title='title two', bofreq_submission='upload', bofreq_file=bofreq_fd),
|
||||
]:
|
||||
empty_outbox()
|
||||
r = self.client.post(url, postdict)
|
||||
self.assertEqual(r.status_code,302)
|
||||
name = f"bofreq-{xslugify(nobody.last_name())[:64]}-{postdict['title']}".replace(' ','-')
|
||||
bofreq = Document.objects.filter(name=name,type_id='bofreq').first()
|
||||
self.assertIsNotNone(bofreq)
|
||||
self.assertIsNotNone(DocAlias.objects.filter(name=name).first())
|
||||
self.assertEqual(bofreq.title, postdict['title'])
|
||||
self.assertEqual(bofreq.rev, '00')
|
||||
self.assertEqual(bofreq.get_state_slug(), 'proposed')
|
||||
self.assertEqual(list(bofreq_editors(bofreq)), [nobody])
|
||||
self.assertEqual(bofreq.latest_event(NewRevisionDocEvent).rev, '00')
|
||||
self.assertEqual(bofreq.text_or_error(), 'some stuff')
|
||||
self.assertEqual(len(outbox),1)
|
||||
finally:
|
||||
os.unlink(file.name)
|
||||
existing_bofreq = BofreqFactory(requester_lastname=nobody.last_name())
|
||||
for postdict in [
|
||||
dict(title='', bofreq_submission='enter', bofreq_content='some stuff'),
|
||||
|
|
|
@ -499,6 +499,8 @@ class ReviewTests(TestCase):
|
|||
|
||||
tar.add(os.path.relpath(tmp.name))
|
||||
|
||||
mbox.close()
|
||||
|
||||
return mbox_path
|
||||
|
||||
def test_search_mail_archive(self):
|
||||
|
|
|
@ -23,8 +23,12 @@ class Command(EmailOnFailureCommand):
|
|||
|
||||
def handle(self, *args, **options):
|
||||
email = options.get('email', None)
|
||||
binary_input = io.open(email, 'rb') if email else sys.stdin.buffer
|
||||
self.msg_bytes = binary_input.read()
|
||||
if email:
|
||||
binary_input = io.open(email, 'rb')
|
||||
self.msg_bytes = binary_input.read()
|
||||
binary_input.close()
|
||||
else:
|
||||
self.msg_bytes = sys.stdin.buffer.read()
|
||||
try:
|
||||
process_response_email(self.msg_bytes)
|
||||
except ValueError as e:
|
||||
|
@ -44,4 +48,4 @@ class Command(EmailOnFailureCommand):
|
|||
'application', 'octet-stream', # mime type
|
||||
filename='original-message',
|
||||
)
|
||||
return msg
|
||||
return msg
|
||||
|
|
|
@ -894,23 +894,27 @@ class MeetingTests(BaseMeetingTestCase):
|
|||
|
||||
def test_session_draft_tarfile(self):
|
||||
session, filenames = self.build_session_setup()
|
||||
url = urlreverse('ietf.meeting.views.session_draft_tarfile', kwargs={'num':session.meeting.number,'acronym':session.group.acronym})
|
||||
response = self.client.get(url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.get('Content-Type'), 'application/octet-stream')
|
||||
for filename in filenames:
|
||||
os.unlink(filename)
|
||||
try:
|
||||
url = urlreverse('ietf.meeting.views.session_draft_tarfile', kwargs={'num':session.meeting.number,'acronym':session.group.acronym})
|
||||
response = self.client.get(url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.get('Content-Type'), 'application/octet-stream')
|
||||
finally:
|
||||
for filename in filenames:
|
||||
os.unlink(filename)
|
||||
|
||||
@skipIf(skip_pdf_tests, skip_message)
|
||||
@skip_coverage
|
||||
def test_session_draft_pdf(self):
|
||||
session, filenames = self.build_session_setup()
|
||||
url = urlreverse('ietf.meeting.views.session_draft_pdf', kwargs={'num':session.meeting.number,'acronym':session.group.acronym})
|
||||
response = self.client.get(url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.get('Content-Type'), 'application/pdf')
|
||||
for filename in filenames:
|
||||
os.unlink(filename)
|
||||
try:
|
||||
url = urlreverse('ietf.meeting.views.session_draft_pdf', kwargs={'num':session.meeting.number,'acronym':session.group.acronym})
|
||||
response = self.client.get(url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.get('Content-Type'), 'application/pdf')
|
||||
finally:
|
||||
for filename in filenames:
|
||||
os.unlink(filename)
|
||||
|
||||
def test_current_materials(self):
|
||||
url = urlreverse('ietf.meeting.views.current_materials')
|
||||
|
@ -6411,7 +6415,9 @@ class MaterialsTests(TestCase):
|
|||
path = os.path.join(submission.session.meeting.get_materials_path(),'slides')
|
||||
filename = os.path.join(path,session.sessionpresentation_set.first().document.name+'-01.txt')
|
||||
self.assertTrue(os.path.exists(filename))
|
||||
contents = io.open(filename,'r').read()
|
||||
fd = io.open(filename, 'r')
|
||||
contents = fd.read()
|
||||
fd.close()
|
||||
self.assertIn('third version', contents)
|
||||
|
||||
|
||||
|
@ -7946,12 +7952,13 @@ class ProceedingsTests(BaseMeetingTestCase):
|
|||
"""Upload proceedings materials document"""
|
||||
meeting = self._procmat_test_meeting()
|
||||
for mat_type in ProceedingsMaterialTypeName.objects.filter(used=True):
|
||||
mat = self.upload_proceedings_material_test(
|
||||
meeting,
|
||||
mat_type,
|
||||
{'file': self._proceedings_file(), 'external_url': ''},
|
||||
)
|
||||
self.assertEqual(mat.get_href(), f'{mat.document.name}:00')
|
||||
with self._proceedings_file() as fd:
|
||||
mat = self.upload_proceedings_material_test(
|
||||
meeting,
|
||||
mat_type,
|
||||
{'file': fd, 'external_url': ''},
|
||||
)
|
||||
self.assertEqual(mat.get_href(), f'{mat.document.name}:00')
|
||||
|
||||
def test_add_proceedings_material_doc_invalid_ext(self):
|
||||
"""Upload proceedings materials document with disallowed extension"""
|
||||
|
@ -8038,12 +8045,13 @@ class ProceedingsTests(BaseMeetingTestCase):
|
|||
kwargs=dict(num=meeting.number, material_type=pm_doc.type.slug),
|
||||
)
|
||||
self.client.login(username='secretary', password='secretary+password')
|
||||
r = self.client.post(pm_doc_url, {'file': self._proceedings_file(), 'external_url': ''})
|
||||
self.assertRedirects(r, success_url)
|
||||
self.assertEqual(meeting.proceedings_materials.count(), 2)
|
||||
pm_doc = meeting.proceedings_materials.get(pk=pm_doc.pk) # refresh from DB
|
||||
self.assertEqual(pm_doc.document.rev, '01')
|
||||
self.assertEqual(pm_doc.get_href(), f'{pm_doc.document.name}:01')
|
||||
with self._proceedings_file() as fd:
|
||||
r = self.client.post(pm_doc_url, {'file': fd, 'external_url': ''})
|
||||
self.assertRedirects(r, success_url)
|
||||
self.assertEqual(meeting.proceedings_materials.count(), 2)
|
||||
pm_doc = meeting.proceedings_materials.get(pk=pm_doc.pk) # refresh from DB
|
||||
self.assertEqual(pm_doc.document.rev, '01')
|
||||
self.assertEqual(pm_doc.get_href(), f'{pm_doc.document.name}:01')
|
||||
|
||||
# Replace the uploaded document with a URL
|
||||
r = self.client.post(pm_doc_url, {'use_url': 'on', 'external_url': 'https://example.com/second'})
|
||||
|
@ -8066,12 +8074,13 @@ class ProceedingsTests(BaseMeetingTestCase):
|
|||
self.assertEqual(pm_url.get_href(), 'https://example.com/third')
|
||||
|
||||
# Now replace the URL doc with an uploaded file
|
||||
r = self.client.post(pm_url_url, {'file': self._proceedings_file(), 'external_url': ''})
|
||||
self.assertRedirects(r, success_url)
|
||||
self.assertEqual(meeting.proceedings_materials.count(), 2)
|
||||
pm_url = meeting.proceedings_materials.get(pk=pm_url.pk) # refresh from DB
|
||||
self.assertEqual(pm_url.document.rev, '02')
|
||||
self.assertEqual(pm_url.get_href(), f'{pm_url.document.name}:02')
|
||||
with self._proceedings_file() as fd:
|
||||
r = self.client.post(pm_url_url, {'file': fd, 'external_url': ''})
|
||||
self.assertRedirects(r, success_url)
|
||||
self.assertEqual(meeting.proceedings_materials.count(), 2)
|
||||
pm_url = meeting.proceedings_materials.get(pk=pm_url.pk) # refresh from DB
|
||||
self.assertEqual(pm_url.document.rev, '02')
|
||||
self.assertEqual(pm_url.get_href(), f'{pm_url.document.name}:02')
|
||||
|
||||
def test_remove_proceedings_material(self):
|
||||
"""Proceedings material can be removed"""
|
||||
|
|
|
@ -42,8 +42,11 @@ class Command(EmailOnFailureCommand):
|
|||
except NomCom.DoesNotExist:
|
||||
raise CommandError("NomCom %s does not exist or it isn't active" % year)
|
||||
|
||||
binary_input = io.open(email, 'rb') if email else sys.stdin.buffer
|
||||
self.msg = binary_input.read()
|
||||
if email:
|
||||
with io.open(email, 'rb') as binary_input:
|
||||
self.msg = binary_input.read()
|
||||
else:
|
||||
self.msg = sys.stdin.buffer.read()
|
||||
|
||||
try:
|
||||
feedback = create_feedback_email(self.nomcom, self.msg)
|
||||
|
|
|
@ -94,7 +94,8 @@ def check_comments(encryped, plain, privatekey_file):
|
|||
|
||||
decrypted_file.close()
|
||||
encrypted_file.close()
|
||||
decrypted_comments = io.open(decrypted_file.name, 'rb').read().decode('utf-8')
|
||||
with io.open(decrypted_file.name, 'rb') as fd:
|
||||
decrypted_comments = fd.read().decode('utf-8')
|
||||
os.unlink(encrypted_file.name)
|
||||
os.unlink(decrypted_file.name)
|
||||
|
||||
|
@ -116,7 +117,8 @@ def nomcom_test_data():
|
|||
nomcom_test_cert_file, privatekey_file = generate_cert()
|
||||
|
||||
nomcom.public_key.storage = FileSystemStorage(location=settings.NOMCOM_PUBLIC_KEYS_DIR)
|
||||
nomcom.public_key.save('cert', File(io.open(nomcom_test_cert_file.name, 'r')))
|
||||
with io.open(nomcom_test_cert_file.name, 'r') as fd:
|
||||
nomcom.public_key.save('cert', File(fd))
|
||||
|
||||
# chair and member
|
||||
create_person(group, "chair", username=CHAIR_USER, email_address='%s%s'%(CHAIR_USER,EMAIL_DOMAIN))
|
||||
|
|
|
@ -715,7 +715,8 @@ class NomcomViewsTest(TestCase):
|
|||
|
||||
# save the cert file in tmp
|
||||
#nomcom.public_key.storage.location = tempfile.gettempdir()
|
||||
nomcom.public_key.save('cert', File(io.open(self.cert_file.name, 'r')))
|
||||
with io.open(self.cert_file.name, 'r') as fd:
|
||||
nomcom.public_key.save('cert', File(fd))
|
||||
|
||||
response = self.client.get(nominate_url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
@ -781,7 +782,8 @@ class NomcomViewsTest(TestCase):
|
|||
|
||||
# save the cert file in tmp
|
||||
#nomcom.public_key.storage.location = tempfile.gettempdir()
|
||||
nomcom.public_key.save('cert', File(io.open(self.cert_file.name, 'r')))
|
||||
with io.open(self.cert_file.name, 'r') as fd:
|
||||
nomcom.public_key.save('cert', File(fd))
|
||||
|
||||
response = self.client.get(nominate_url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
@ -863,7 +865,8 @@ class NomcomViewsTest(TestCase):
|
|||
|
||||
# save the cert file in tmp
|
||||
#nomcom.public_key.storage.location = tempfile.gettempdir()
|
||||
nomcom.public_key.save('cert', File(io.open(self.cert_file.name, 'r')))
|
||||
with io.open(self.cert_file.name, 'r') as fd:
|
||||
nomcom.public_key.save('cert', File(fd))
|
||||
|
||||
response = self.client.get(self.add_questionnaire_url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
@ -942,7 +945,8 @@ class NomcomViewsTest(TestCase):
|
|||
|
||||
# save the cert file in tmp
|
||||
#nomcom.public_key.storage.location = tempfile.gettempdir()
|
||||
nomcom.public_key.save('cert', File(io.open(self.cert_file.name, 'r')))
|
||||
with io.open(self.cert_file.name, 'r') as fd:
|
||||
nomcom.public_key.save('cert', File(fd))
|
||||
|
||||
response = self.client.get(feedback_url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
@ -1066,7 +1070,8 @@ class FeedbackTest(TestCase):
|
|||
|
||||
# save the cert file in tmp
|
||||
#nomcom.public_key.storage.location = tempfile.gettempdir()
|
||||
nomcom.public_key.save('cert', File(io.open(self.cert_file.name, 'r')))
|
||||
with io.open(self.cert_file.name, 'r') as fd:
|
||||
nomcom.public_key.save('cert', File(fd))
|
||||
|
||||
comment_text = 'Plain text. Comments with accents äöåÄÖÅ éáíóú âêîôû ü àèìòù.'
|
||||
comments = nomcom.encrypt(comment_text)
|
||||
|
@ -1089,7 +1094,8 @@ class ReminderTest(TestCase):
|
|||
self.nomcom = get_nomcom_by_year(NOMCOM_YEAR)
|
||||
self.cert_file, self.privatekey_file = get_cert_files()
|
||||
#self.nomcom.public_key.storage.location = tempfile.gettempdir()
|
||||
self.nomcom.public_key.save('cert', File(io.open(self.cert_file.name, 'r')))
|
||||
with io.open(self.cert_file.name, 'r') as fd:
|
||||
self.nomcom.public_key.save('cert', File(fd))
|
||||
|
||||
gen = Position.objects.get(nomcom=self.nomcom,name='GEN')
|
||||
rai = Position.objects.get(nomcom=self.nomcom,name='RAI')
|
||||
|
|
|
@ -100,14 +100,14 @@ def photo(request, email_or_name):
|
|||
if not size.isdigit():
|
||||
return HttpResponse("Size must be integer", status=400)
|
||||
size = int(size)
|
||||
img = Image.open(person.photo)
|
||||
img = img.resize((size, img.height*size//img.width))
|
||||
bytes = BytesIO()
|
||||
try:
|
||||
img.save(bytes, format='JPEG')
|
||||
return HttpResponse(bytes.getvalue(), content_type='image/jpg')
|
||||
except OSError:
|
||||
raise Http404
|
||||
with Image.open(person.photo) as img:
|
||||
img = img.resize((size, img.height*size//img.width))
|
||||
bytes = BytesIO()
|
||||
try:
|
||||
img.save(bytes, format='JPEG')
|
||||
return HttpResponse(bytes.getvalue(), content_type='image/jpg')
|
||||
except OSError:
|
||||
raise Http404
|
||||
|
||||
|
||||
@role_required("Secretariat")
|
||||
|
|
|
@ -103,6 +103,7 @@ def retrieve_messages_from_mbox(mbox_fileobj):
|
|||
"date": msg["Date"],
|
||||
"utcdate": (utcdate.date().isoformat(), utcdate.time().isoformat()) if utcdate else ("", ""),
|
||||
})
|
||||
mbox.close()
|
||||
|
||||
return res
|
||||
|
||||
|
|
|
@ -1,69 +1,93 @@
|
|||
$(document)
|
||||
.ready(function () {
|
||||
// fill in submitter info when an author button is clicked
|
||||
$("form.idsubmit button.author")
|
||||
.on("click", function () {
|
||||
var name = $(this)
|
||||
.data("name");
|
||||
var email = $(this)
|
||||
.data("email");
|
||||
$(function () {
|
||||
// fill in submitter info when an author button is clicked
|
||||
$("form.idsubmit button.author")
|
||||
.on("click", function () {
|
||||
var name = $(this)
|
||||
.data("name");
|
||||
var email = $(this)
|
||||
.data("email");
|
||||
|
||||
$(this)
|
||||
.parents("form")
|
||||
.find("input[name=submitter-name]")
|
||||
.val(name || "");
|
||||
$(this)
|
||||
.parents("form")
|
||||
.find("input[name=submitter-email]")
|
||||
.val(email || "");
|
||||
});
|
||||
$(this)
|
||||
.parents("form")
|
||||
.find("input[name=submitter-name]")
|
||||
.val(name || "");
|
||||
$(this)
|
||||
.parents("form")
|
||||
.find("input[name=submitter-email]")
|
||||
.val(email || "");
|
||||
});
|
||||
|
||||
$("form.idsubmit")
|
||||
.on("submit", function () {
|
||||
if (this.submittedAlready)
|
||||
return false;
|
||||
else {
|
||||
this.submittedAlready = true;
|
||||
return true;
|
||||
}
|
||||
});
|
||||
$("form.idsubmit")
|
||||
.on("submit", function () {
|
||||
if (this.submittedAlready)
|
||||
return false;
|
||||
else {
|
||||
this.submittedAlready = true;
|
||||
return true;
|
||||
}
|
||||
});
|
||||
|
||||
$("form.idsubmit #add-author")
|
||||
.on("click", function () {
|
||||
// clone the last author block and make it empty
|
||||
var cloner = $("#cloner");
|
||||
var next = cloner.clone();
|
||||
next.find('input:not([type=hidden])')
|
||||
.val('');
|
||||
$("form.idsubmit #add-author")
|
||||
.on("click", function () {
|
||||
// clone the last author block and make it empty
|
||||
var cloner = $("#cloner");
|
||||
var next = cloner.clone();
|
||||
next.find('input:not([type=hidden])')
|
||||
.val('');
|
||||
|
||||
// find the author number
|
||||
var t = next.children('h3')
|
||||
.text();
|
||||
var n = parseInt(t.replace(/\D/g, ''));
|
||||
// find the author number
|
||||
var t = next.children('h3')
|
||||
.text();
|
||||
var n = parseInt(t.replace(/\D/g, ''));
|
||||
|
||||
// change the number in attributes and text
|
||||
next.find('*')
|
||||
.each(function () {
|
||||
var e = this;
|
||||
$.each(['id', 'for', 'name', 'value'], function (i, v) {
|
||||
if ($(e)
|
||||
.attr(v)) {
|
||||
$(e)
|
||||
.attr(v, $(e)
|
||||
.attr(v)
|
||||
.replace(n - 1, n));
|
||||
}
|
||||
});
|
||||
// change the number in attributes and text
|
||||
next.find('*')
|
||||
.each(function () {
|
||||
var e = this;
|
||||
$.each(['id', 'for', 'name', 'value'], function (i, v) {
|
||||
if ($(e)
|
||||
.attr(v)) {
|
||||
$(e)
|
||||
.attr(v, $(e)
|
||||
.attr(v)
|
||||
.replace(n - 1, n));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
t = t.replace(n, n + 1);
|
||||
next.children('h3')
|
||||
.text(t);
|
||||
t = t.replace(n, n + 1);
|
||||
next.children('h3')
|
||||
.text(t);
|
||||
|
||||
// move the cloner id to next and insert next into the DOM
|
||||
cloner.removeAttr('id');
|
||||
next.attr('id', 'cloner');
|
||||
next.insertAfter(cloner);
|
||||
// move the cloner id to next and insert next into the DOM
|
||||
cloner.removeAttr('id');
|
||||
next.attr('id', 'cloner');
|
||||
next.insertAfter(cloner);
|
||||
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Reload page periodically if the enableAutoReload checkbox is present and checked
|
||||
const autoReloadSwitch = document.getElementById("enableAutoReload");
|
||||
const timeSinceDisplay = document.getElementById("time-since-uploaded");
|
||||
if (autoReloadSwitch) {
|
||||
const autoReloadTime = 30000; // ms
|
||||
let autoReloadTimeoutId;
|
||||
autoReloadSwitch.parentElement.classList.remove("d-none");
|
||||
timeSinceDisplay.classList.remove("d-none");
|
||||
autoReloadTimeoutId = setTimeout(() => location.reload(), autoReloadTime);
|
||||
autoReloadSwitch.addEventListener("change", (e) => {
|
||||
if (e.currentTarget.checked) {
|
||||
if (!autoReloadTimeoutId) {
|
||||
autoReloadTimeoutId = setTimeout(() => location.reload(), autoReloadTime);
|
||||
timeSinceDisplay.classList.remove("d-none");
|
||||
}
|
||||
} else {
|
||||
if (autoReloadTimeoutId) {
|
||||
clearTimeout(autoReloadTimeoutId);
|
||||
autoReloadTimeoutId = null;
|
||||
timeSinceDisplay.classList.add("d-none");
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
|
|
@ -13,8 +13,8 @@ import xml2rfc
|
|||
from contextlib import ExitStack
|
||||
|
||||
from email.utils import formataddr
|
||||
from typing import Tuple
|
||||
from unidecode import unidecode
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
|
@ -36,7 +36,6 @@ from ietf.message.models import Message
|
|||
from ietf.name.models import FormalLanguageName, GroupTypeName
|
||||
from ietf.submit.models import Submission, Preapproval
|
||||
from ietf.submit.utils import validate_submission_name, validate_submission_rev, validate_submission_document_date, remote_ip
|
||||
from ietf.submit.parsers.pdf_parser import PDFParser
|
||||
from ietf.submit.parsers.plain_parser import PlainParser
|
||||
from ietf.submit.parsers.xml_parser import XMLParser
|
||||
from ietf.utils import log
|
||||
|
@ -49,6 +48,9 @@ from ietf.utils.xmldraft import XMLDraft, XMLParseError
|
|||
class SubmissionBaseUploadForm(forms.Form):
|
||||
xml = forms.FileField(label='.xml format', required=True)
|
||||
|
||||
formats: Tuple[str, ...] = ('xml',) # allowed formats
|
||||
base_formats: Tuple[str, ...] = ('xml',) # at least one of these is required
|
||||
|
||||
def __init__(self, request, *args, **kwargs):
|
||||
super(SubmissionBaseUploadForm, self).__init__(*args, **kwargs)
|
||||
|
||||
|
@ -66,18 +68,11 @@ class SubmissionBaseUploadForm(forms.Form):
|
|||
self.title = None
|
||||
self.abstract = None
|
||||
self.authors = []
|
||||
self.parsed_draft = None
|
||||
self.file_types = []
|
||||
self.file_info = {} # indexed by file field name, e.g., 'txt', 'xml', ...
|
||||
self.xml_version = None
|
||||
# No code currently (14 Sep 2017) uses this class directly; it is
|
||||
# only used through its subclasses. The two assignments below are
|
||||
# set to trigger an exception if it is used directly only to make
|
||||
# sure that adequate consideration is made if it is decided to use it
|
||||
# directly in the future. Feel free to set these appropriately to
|
||||
# avoid the exceptions in that case:
|
||||
self.formats = None # None will raise an exception in clean() if this isn't changed in a subclass
|
||||
self.base_formats = None # None will raise an exception in clean() if this isn't changed in a subclass
|
||||
|
||||
self._extracted_filenames_and_revisions = {}
|
||||
|
||||
def set_cutoff_warnings(self):
|
||||
now = timezone.now()
|
||||
|
@ -126,20 +121,17 @@ class SubmissionBaseUploadForm(forms.Form):
|
|||
'The I-D submission tool will be reopened after %s (IETF-meeting local time).' % (cutoff_01_str, reopen_str))
|
||||
self.shutdown = True
|
||||
|
||||
def clean_file(self, field_name, parser_class):
|
||||
def _clean_file(self, field_name, parser_class):
|
||||
f = self.cleaned_data[field_name]
|
||||
if not f:
|
||||
return f
|
||||
|
||||
self.file_info[field_name] = parser_class(f).critical_parse()
|
||||
if self.file_info[field_name].errors:
|
||||
raise forms.ValidationError(self.file_info[field_name].errors)
|
||||
raise forms.ValidationError(self.file_info[field_name].errors, code="critical_error")
|
||||
return f
|
||||
|
||||
def clean_xml(self):
|
||||
return self.clean_file("xml", XMLParser)
|
||||
|
||||
def clean(self):
|
||||
def format_messages(where, e, log_msgs):
|
||||
m = str(e)
|
||||
if m:
|
||||
|
@ -148,38 +140,12 @@ class SubmissionBaseUploadForm(forms.Form):
|
|||
import traceback
|
||||
typ, val, tb = sys.exc_info()
|
||||
m = traceback.format_exception(typ, val, tb)
|
||||
m = [ l.replace('\n ', ':\n ') for l in m ]
|
||||
msgs = [s for s in (["Error from xml2rfc (%s):" % (where,)] + m + log_msgs) if s]
|
||||
m = [l.replace('\n ', ':\n ') for l in m]
|
||||
msgs = [s for s in ([f"Error from xml2rfc ({where}):"] + m + log_msgs) if s]
|
||||
return msgs
|
||||
|
||||
if self.shutdown and not has_role(self.request.user, "Secretariat"):
|
||||
raise forms.ValidationError('The submission tool is currently shut down')
|
||||
|
||||
# check general submission rate thresholds before doing any more work
|
||||
today = date_today()
|
||||
self.check_submissions_thresholds(
|
||||
"for the same submitter",
|
||||
dict(remote_ip=self.remote_ip, submission_date=today),
|
||||
settings.IDSUBMIT_MAX_DAILY_SAME_SUBMITTER, settings.IDSUBMIT_MAX_DAILY_SAME_SUBMITTER_SIZE,
|
||||
)
|
||||
self.check_submissions_thresholds(
|
||||
"across all submitters",
|
||||
dict(submission_date=today),
|
||||
settings.IDSUBMIT_MAX_DAILY_SUBMISSIONS, settings.IDSUBMIT_MAX_DAILY_SUBMISSIONS_SIZE,
|
||||
)
|
||||
|
||||
for ext in self.formats:
|
||||
f = self.cleaned_data.get(ext, None)
|
||||
if not f:
|
||||
continue
|
||||
self.file_types.append('.%s' % ext)
|
||||
if not ('.txt' in self.file_types or '.xml' in self.file_types):
|
||||
if not self.errors:
|
||||
raise forms.ValidationError('Unexpected submission file types; found %s, but %s is required' % (', '.join(self.file_types), ' or '.join(self.base_formats)))
|
||||
|
||||
# Determine the draft name and revision. Try XML first.
|
||||
if self.cleaned_data.get('xml'):
|
||||
xml_file = self.cleaned_data.get('xml')
|
||||
xml_file = self._clean_file("xml", XMLParser)
|
||||
if xml_file:
|
||||
tfn = None
|
||||
with ExitStack() as stack:
|
||||
@stack.callback
|
||||
|
@ -204,86 +170,131 @@ class SubmissionBaseUploadForm(forms.Form):
|
|||
xml_draft = XMLDraft(tfn)
|
||||
except XMLParseError as e:
|
||||
msgs = format_messages('xml', e, e.parser_msgs())
|
||||
self.add_error('xml', msgs)
|
||||
return
|
||||
raise forms.ValidationError(msgs, code="xml_parse_error")
|
||||
except Exception as e:
|
||||
self.add_error('xml', f'Error parsing XML Internet-Draft: {e}')
|
||||
return
|
||||
raise forms.ValidationError(f"Error parsing XML Internet-Draft: {e}", code="parse_exception")
|
||||
if not xml_draft.filename:
|
||||
raise forms.ValidationError(
|
||||
"Could not extract a valid Internet-Draft name from the XML. "
|
||||
"Please make sure that the top-level <rfc/> "
|
||||
"element has a docName attribute which provides the full Internet-Draft name including "
|
||||
"revision number.",
|
||||
code="parse_error_filename",
|
||||
)
|
||||
if not xml_draft.revision:
|
||||
raise forms.ValidationError(
|
||||
"Could not extract a valid Internet-Draft revision from the XML. "
|
||||
"Please make sure that the top-level <rfc/> "
|
||||
"element has a docName attribute which provides the full Internet-Draft name including "
|
||||
"revision number.",
|
||||
code="parse_error_revision",
|
||||
)
|
||||
self._extracted_filenames_and_revisions['xml'] = (xml_draft.filename, xml_draft.revision)
|
||||
return xml_file
|
||||
|
||||
self.filename = xml_draft.filename
|
||||
self.revision = xml_draft.revision
|
||||
elif self.cleaned_data.get('txt'):
|
||||
# no XML available, extract from the text if we have it
|
||||
# n.b., this code path is unused until a subclass with a 'txt' field is created.
|
||||
txt_file = self.cleaned_data['txt']
|
||||
txt_file.seek(0)
|
||||
bytes = txt_file.read()
|
||||
try:
|
||||
text = bytes.decode(self.file_info['txt'].charset)
|
||||
self.parsed_draft = PlaintextDraft(text, txt_file.name)
|
||||
self.filename = self.parsed_draft.filename
|
||||
self.revision = self.parsed_draft.revision
|
||||
except (UnicodeDecodeError, LookupError) as e:
|
||||
self.add_error('txt', 'Failed decoding the uploaded file: "%s"' % str(e))
|
||||
def clean(self):
|
||||
if self.shutdown and not has_role(self.request.user, "Secretariat"):
|
||||
raise forms.ValidationError('The submission tool is currently shut down')
|
||||
|
||||
rev_error = validate_submission_rev(self.filename, self.revision)
|
||||
if rev_error:
|
||||
raise forms.ValidationError(rev_error)
|
||||
# check general submission rate thresholds before doing any more work
|
||||
today = date_today()
|
||||
self.check_submissions_thresholds(
|
||||
"for the same submitter",
|
||||
dict(remote_ip=self.remote_ip, submission_date=today),
|
||||
settings.IDSUBMIT_MAX_DAILY_SAME_SUBMITTER, settings.IDSUBMIT_MAX_DAILY_SAME_SUBMITTER_SIZE,
|
||||
)
|
||||
self.check_submissions_thresholds(
|
||||
"across all submitters",
|
||||
dict(submission_date=today),
|
||||
settings.IDSUBMIT_MAX_DAILY_SUBMISSIONS, settings.IDSUBMIT_MAX_DAILY_SUBMISSIONS_SIZE,
|
||||
)
|
||||
|
||||
for ext in self.formats:
|
||||
f = self.cleaned_data.get(ext, None)
|
||||
if not f:
|
||||
continue
|
||||
self.file_types.append('.%s' % ext)
|
||||
if not any(f".{bt}" in self.file_types for bt in self.base_formats):
|
||||
if not self.errors:
|
||||
raise forms.ValidationError(
|
||||
"Unexpected submission file types; found {}, but {} is required".format(
|
||||
", ".join(ft.lstrip(".") for ft in self.file_types),
|
||||
" or ".join(self.base_formats),
|
||||
)
|
||||
)
|
||||
|
||||
# The following errors are likely noise if we have previous field
|
||||
# errors:
|
||||
if self.errors:
|
||||
raise forms.ValidationError('')
|
||||
|
||||
# Check that all formats agree on draft name/rev
|
||||
filename_from = None
|
||||
for fmt, (extracted_name, extracted_rev) in self._extracted_filenames_and_revisions.items():
|
||||
if self.filename is None:
|
||||
filename_from = fmt
|
||||
self.filename = extracted_name
|
||||
self.revision = extracted_rev
|
||||
elif self.filename != extracted_name:
|
||||
raise forms.ValidationError(
|
||||
{fmt: f"Extracted filename '{extracted_name}' does not match filename '{self.filename}' from {filename_from} format"},
|
||||
code="filename_mismatch",
|
||||
)
|
||||
elif self.revision != extracted_rev:
|
||||
raise forms.ValidationError(
|
||||
{fmt: f"Extracted revision ({extracted_rev}) does not match revision from {filename_from} format ({self.revision})"},
|
||||
code="revision_mismatch",
|
||||
)
|
||||
# Not expected to encounter missing filename/revision here because
|
||||
# the individual fields should fail validation, but just in case
|
||||
if not self.filename:
|
||||
raise forms.ValidationError("Could not extract a valid Internet-Draft name from the upload. "
|
||||
"To fix this in a text upload, please make sure that the full Internet-Draft name including "
|
||||
"revision number appears centered on its own line below the document title on the "
|
||||
"first page. In an xml upload, please make sure that the top-level <rfc/> "
|
||||
"element has a docName attribute which provides the full Internet-Draft name including "
|
||||
"revision number.")
|
||||
|
||||
raise forms.ValidationError(
|
||||
"Unable to extract a filename from any uploaded format.",
|
||||
code="no_filename",
|
||||
)
|
||||
if not self.revision:
|
||||
raise forms.ValidationError("Could not extract a valid Internet-Draft revision from the upload. "
|
||||
"To fix this in a text upload, please make sure that the full Internet-Draft name including "
|
||||
"revision number appears centered on its own line below the document title on the "
|
||||
"first page. In an xml upload, please make sure that the top-level <rfc/> "
|
||||
"element has a docName attribute which provides the full Internet-Draft name including "
|
||||
"revision number.")
|
||||
raise forms.ValidationError(
|
||||
"Unable to extract a revision from any uploaded format.",
|
||||
code="no_revision",
|
||||
)
|
||||
|
||||
name_error = validate_submission_name(self.filename)
|
||||
if name_error:
|
||||
raise forms.ValidationError(name_error)
|
||||
|
||||
rev_error = validate_submission_rev(self.filename, self.revision)
|
||||
if rev_error:
|
||||
raise forms.ValidationError(rev_error)
|
||||
|
||||
self.check_for_old_uppercase_collisions(self.filename)
|
||||
|
||||
if self.cleaned_data.get('txt') or self.cleaned_data.get('xml'):
|
||||
# check group
|
||||
self.group = self.deduce_group(self.filename)
|
||||
# check existing
|
||||
existing = Submission.objects.filter(name=self.filename, rev=self.revision).exclude(state__in=("posted", "cancel", "waiting-for-draft"))
|
||||
if existing:
|
||||
raise forms.ValidationError(
|
||||
format_html(
|
||||
'A submission with same name and revision is currently being processed. <a href="{}">Check the status here.</a>',
|
||||
urljoin(
|
||||
settings.IDTRACKER_BASE_URL,
|
||||
urlreverse("ietf.submit.views.submission_status", kwargs={'submission_id': existing[0].pk}),
|
||||
)
|
||||
)
|
||||
# check group
|
||||
self.group = self.deduce_group(self.filename)
|
||||
# check existing
|
||||
existing = Submission.objects.filter(name=self.filename, rev=self.revision).exclude(state__in=("posted", "cancel", "waiting-for-draft"))
|
||||
if existing:
|
||||
raise forms.ValidationError(
|
||||
format_html(
|
||||
'A submission with same name and revision is currently being processed. <a href="{}">Check the status here.</a>',
|
||||
urlreverse("ietf.submit.views.submission_status", kwargs={'submission_id': existing[0].pk}),
|
||||
)
|
||||
|
||||
# cut-off
|
||||
if self.revision == '00' and self.in_first_cut_off:
|
||||
raise forms.ValidationError(mark_safe(self.cutoff_warning))
|
||||
# check thresholds that depend on the draft / group
|
||||
self.check_submissions_thresholds(
|
||||
"for the Internet-Draft %s" % self.filename,
|
||||
dict(name=self.filename, rev=self.revision, submission_date=today),
|
||||
settings.IDSUBMIT_MAX_DAILY_SAME_DRAFT_NAME, settings.IDSUBMIT_MAX_DAILY_SAME_DRAFT_NAME_SIZE,
|
||||
)
|
||||
if self.group:
|
||||
self.check_submissions_thresholds(
|
||||
"for the group \"%s\"" % (self.group.acronym),
|
||||
dict(group=self.group, submission_date=today),
|
||||
settings.IDSUBMIT_MAX_DAILY_SAME_GROUP, settings.IDSUBMIT_MAX_DAILY_SAME_GROUP_SIZE,
|
||||
)
|
||||
|
||||
# cut-off
|
||||
if self.revision == '00' and self.in_first_cut_off:
|
||||
raise forms.ValidationError(mark_safe(self.cutoff_warning))
|
||||
# check thresholds that depend on the draft / group
|
||||
self.check_submissions_thresholds(
|
||||
"for the Internet-Draft %s" % self.filename,
|
||||
dict(name=self.filename, rev=self.revision, submission_date=today),
|
||||
settings.IDSUBMIT_MAX_DAILY_SAME_DRAFT_NAME, settings.IDSUBMIT_MAX_DAILY_SAME_DRAFT_NAME_SIZE,
|
||||
)
|
||||
if self.group:
|
||||
self.check_submissions_thresholds(
|
||||
"for the group \"%s\"" % (self.group.acronym),
|
||||
dict(group=self.group, submission_date=today),
|
||||
settings.IDSUBMIT_MAX_DAILY_SAME_GROUP, settings.IDSUBMIT_MAX_DAILY_SAME_GROUP_SIZE,
|
||||
)
|
||||
return super().clean()
|
||||
|
||||
@staticmethod
|
||||
|
@ -613,26 +624,6 @@ class DeprecatedSubmissionBaseUploadForm(SubmissionBaseUploadForm):
|
|||
return super().clean()
|
||||
|
||||
|
||||
class SubmissionManualUploadForm(DeprecatedSubmissionBaseUploadForm):
|
||||
xml = forms.FileField(label='.xml format', required=False) # xml field with required=False instead of True
|
||||
txt = forms.FileField(label='.txt format', required=False)
|
||||
# We won't permit html upload until we can verify that the content
|
||||
# reasonably matches the text and/or xml upload. Till then, we generate
|
||||
# html for version 3 xml submissions.
|
||||
# html = forms.FileField(label='.html format', required=False)
|
||||
pdf = forms.FileField(label='.pdf format', required=False)
|
||||
|
||||
def __init__(self, request, *args, **kwargs):
|
||||
super(SubmissionManualUploadForm, self).__init__(request, *args, **kwargs)
|
||||
self.formats = settings.IDSUBMIT_FILE_TYPES
|
||||
self.base_formats = ['txt', 'xml', ]
|
||||
|
||||
def clean_txt(self):
|
||||
return self.clean_file("txt", PlainParser)
|
||||
|
||||
def clean_pdf(self):
|
||||
return self.clean_file("pdf", PDFParser)
|
||||
|
||||
class DeprecatedSubmissionAutoUploadForm(DeprecatedSubmissionBaseUploadForm):
|
||||
"""Full-service upload form, replaced by the asynchronous version"""
|
||||
user = forms.EmailField(required=True)
|
||||
|
@ -642,17 +633,50 @@ class DeprecatedSubmissionAutoUploadForm(DeprecatedSubmissionBaseUploadForm):
|
|||
self.formats = ['xml', ]
|
||||
self.base_formats = ['xml', ]
|
||||
|
||||
|
||||
class SubmissionManualUploadForm(SubmissionBaseUploadForm):
|
||||
txt = forms.FileField(label='.txt format', required=False)
|
||||
formats = SubmissionBaseUploadForm.formats + ('txt',)
|
||||
base_formats = SubmissionBaseUploadForm.base_formats + ('txt',)
|
||||
|
||||
def __init__(self, request, *args, **kwargs):
|
||||
super().__init__(request, *args, **kwargs)
|
||||
self.fields['xml'].required = False
|
||||
|
||||
def clean_txt(self):
|
||||
txt_file = self._clean_file("txt", PlainParser)
|
||||
if txt_file is not None:
|
||||
bytes = txt_file.read()
|
||||
try:
|
||||
text = bytes.decode(self.file_info["txt"].charset)
|
||||
parsed_draft = PlaintextDraft(text, txt_file.name)
|
||||
self._extracted_filenames_and_revisions["txt"] = (parsed_draft.filename, parsed_draft.revision)
|
||||
except (UnicodeDecodeError, LookupError) as e:
|
||||
raise forms.ValidationError(f'Failed decoding the uploaded file: "{str(e)}"', code="decode_failed")
|
||||
if not parsed_draft.filename:
|
||||
raise forms.ValidationError(
|
||||
"Could not extract a valid Internet-Draft name from the plaintext. "
|
||||
"Please make sure that the full Internet-Draft name including "
|
||||
"revision number appears centered on its own line below the document title on the "
|
||||
"first page.",
|
||||
code="parse_error_filename",
|
||||
)
|
||||
if not parsed_draft.revision:
|
||||
raise forms.ValidationError(
|
||||
"Could not extract a valid Internet-Draft revision from the plaintext. "
|
||||
"Please make sure that the full Internet-Draft name including "
|
||||
"revision number appears centered on its own line below the document title on the "
|
||||
"first page.",
|
||||
code="parse_error_revision",
|
||||
)
|
||||
return txt_file
|
||||
|
||||
class SubmissionAutoUploadForm(SubmissionBaseUploadForm):
|
||||
user = forms.EmailField(required=True)
|
||||
replaces = forms.CharField(required=False, max_length=1000, strip=True)
|
||||
|
||||
def __init__(self, request, *args, **kwargs):
|
||||
super().__init__(request, *args, **kwargs)
|
||||
self.formats = ['xml', ]
|
||||
self.base_formats = ['xml', ]
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
cleaned_data = super().clean()
|
||||
|
||||
# Clean the replaces field after the rest of the cleaning so we know the name of the
|
||||
# uploaded draft via self.filename
|
||||
|
@ -692,6 +716,7 @@ class SubmissionAutoUploadForm(SubmissionBaseUploadForm):
|
|||
alias.name + " is approved by the IESG and cannot be replaced"
|
||||
),
|
||||
)
|
||||
return cleaned_data
|
||||
|
||||
|
||||
class NameEmailForm(forms.Form):
|
||||
|
|
|
@ -9,7 +9,8 @@ from django.conf import settings
|
|||
from django.utils import timezone
|
||||
|
||||
from ietf.submit.models import Submission
|
||||
from ietf.submit.utils import cancel_submission, create_submission_event, process_uploaded_submission
|
||||
from ietf.submit.utils import (cancel_submission, create_submission_event, process_uploaded_submission,
|
||||
process_and_accept_uploaded_submission)
|
||||
from ietf.utils import log
|
||||
|
||||
|
||||
|
@ -23,6 +24,16 @@ def process_uploaded_submission_task(submission_id):
|
|||
process_uploaded_submission(submission)
|
||||
|
||||
|
||||
@shared_task
|
||||
def process_and_accept_uploaded_submission_task(submission_id):
|
||||
try:
|
||||
submission = Submission.objects.get(pk=submission_id)
|
||||
except Submission.DoesNotExist:
|
||||
log.log(f'process_uploaded_submission_task called for missing submission_id={submission_id}')
|
||||
else:
|
||||
process_and_accept_uploaded_submission(submission)
|
||||
|
||||
|
||||
@shared_task
|
||||
def cancel_stale_submissions():
|
||||
now = timezone.now()
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
|
||||
|
||||
Network Working Group A. Name
|
||||
Network Working Group %(firstpagename)37s
|
||||
Internet-Draft Test Centre Inc.
|
||||
Intended status: Informational %(month)s %(year)s
|
||||
Expires: %(expiration)s
|
||||
|
|
|
@ -5,13 +5,14 @@
|
|||
import datetime
|
||||
import email
|
||||
import io
|
||||
import mock
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import mock
|
||||
|
||||
from io import StringIO
|
||||
from pyquery import PyQuery
|
||||
from typing import Tuple
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
|
@ -28,7 +29,9 @@ import debug # pyflakes:ignore
|
|||
|
||||
from ietf.submit.utils import (expirable_submissions, expire_submission, find_submission_filenames,
|
||||
post_submission, validate_submission_name, validate_submission_rev,
|
||||
process_uploaded_submission, SubmissionError, process_submission_text)
|
||||
process_and_accept_uploaded_submission, SubmissionError, process_submission_text,
|
||||
process_submission_xml, process_uploaded_submission,
|
||||
process_and_validate_submission)
|
||||
from ietf.doc.factories import (DocumentFactory, WgDraftFactory, IndividualDraftFactory, IndividualRfcFactory,
|
||||
ReviewFactory, WgRfcFactory)
|
||||
from ietf.doc.models import ( Document, DocAlias, DocEvent, State,
|
||||
|
@ -47,7 +50,7 @@ from ietf.submit.factories import SubmissionFactory, SubmissionExtResourceFactor
|
|||
from ietf.submit.forms import SubmissionBaseUploadForm, SubmissionAutoUploadForm
|
||||
from ietf.submit.models import Submission, Preapproval, SubmissionExtResource
|
||||
from ietf.submit.mail import add_submission_email, process_response_email
|
||||
from ietf.submit.tasks import cancel_stale_submissions, process_uploaded_submission_task
|
||||
from ietf.submit.tasks import cancel_stale_submissions, process_and_accept_uploaded_submission_task
|
||||
from ietf.utils.accesstoken import generate_access_token
|
||||
from ietf.utils.mail import outbox, empty_outbox, get_payload_text
|
||||
from ietf.utils.models import VersionInfo
|
||||
|
@ -91,7 +94,28 @@ class BaseSubmitTestCase(TestCase):
|
|||
def archive_dir(self):
|
||||
return settings.INTERNET_DRAFT_ARCHIVE_DIR
|
||||
|
||||
def submission_file(name_in_doc, name_in_post, group, templatename, author=None, email=None, title=None, year=None, ascii=True):
|
||||
def post_to_upload_submission(self, *args, **kwargs):
|
||||
"""POST to the upload_submission endpoint
|
||||
|
||||
Use this instead of directly POSTing to be sure that the appropriate celery
|
||||
tasks would be queued (but are not actually queued during testing)
|
||||
"""
|
||||
# Mock task so we can check that it's called without actually submitting a celery task.
|
||||
# Also mock on_commit() because otherwise the test transaction prevents the call from
|
||||
# ever being made.
|
||||
with mock.patch("ietf.submit.views.process_uploaded_submission_task") as mocked_task:
|
||||
with mock.patch("ietf.submit.views.transaction.on_commit", side_effect=lambda x: x()):
|
||||
response = self.client.post(*args, **kwargs)
|
||||
if response.status_code == 302:
|
||||
# A 302 indicates we're being redirected to the status page, meaning the upload
|
||||
# was accepted. Check that the task would have been queued.
|
||||
self.assertTrue(mocked_task.delay.called)
|
||||
else:
|
||||
self.assertFalse(mocked_task.delay.called)
|
||||
return response
|
||||
|
||||
|
||||
def submission_file_contents(name_in_doc, group, templatename, author=None, email=None, title=None, year=None, ascii=True):
|
||||
_today = date_today()
|
||||
# construct appropriate text draft
|
||||
f = io.open(os.path.join(settings.BASE_DIR, "submit", templatename))
|
||||
|
@ -128,10 +152,18 @@ def submission_file(name_in_doc, name_in_post, group, templatename, author=None,
|
|||
email=email,
|
||||
title=title,
|
||||
)
|
||||
return submission_text, author
|
||||
|
||||
|
||||
def submission_file(name_in_doc, name_in_post, group, templatename, author=None, email=None, title=None, year=None, ascii=True):
|
||||
submission_text, author = submission_file_contents(
|
||||
name_in_doc, group, templatename, author, email, title, year, ascii
|
||||
)
|
||||
file = StringIO(submission_text)
|
||||
file.name = name_in_post
|
||||
return file, author
|
||||
|
||||
|
||||
def create_draft_submission_with_rev_mismatch(rev='01'):
|
||||
"""Create a draft and submission with mismatched version
|
||||
|
||||
|
@ -172,31 +204,44 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
# Submit views assume there is a "next" IETF to look for cutoff dates against
|
||||
MeetingFactory(type_id='ietf', date=date_today()+datetime.timedelta(days=180))
|
||||
|
||||
def create_and_post_submission(self, name, rev, author, group=None, formats=("txt",), base_filename=None):
|
||||
def create_and_post_submission(self, name, rev, author, group=None, formats=("txt",), base_filename=None, ascii=True):
|
||||
"""Helper to create and post a submission
|
||||
|
||||
If base_filename is None, defaults to 'test_submission'
|
||||
If base_filename is None, defaults to 'test_submission'.
|
||||
"""
|
||||
url = urlreverse('ietf.submit.views.upload_submission')
|
||||
files = dict()
|
||||
|
||||
for format in formats:
|
||||
fn = '.'.join((base_filename or 'test_submission', format))
|
||||
files[format], __ = submission_file(f'{name}-{rev}', f'{name}-{rev}.{format}', group, fn, author=author)
|
||||
files[format], __ = submission_file(f'{name}-{rev}', f'{name}-{rev}.{format}', group, fn, author=author, ascii=ascii)
|
||||
|
||||
r = self.client.post(url, files)
|
||||
if r.status_code != 302:
|
||||
r = self.post_to_upload_submission(url, files)
|
||||
if r.status_code == 302:
|
||||
# A redirect means the upload was accepted and queued for processing
|
||||
process_submission = True
|
||||
last_submission = Submission.objects.order_by("-pk").first()
|
||||
self.assertEqual(last_submission.state_id, "validating")
|
||||
else:
|
||||
process_submission = False
|
||||
q = PyQuery(r.content)
|
||||
print(q('div.invalid-feedback').text())
|
||||
self.assertNoFormPostErrors(r, ".invalid-feedback,.alert-danger")
|
||||
|
||||
for format in formats:
|
||||
self.assertTrue(os.path.exists(os.path.join(self.staging_dir, "%s-%s.%s" % (name, rev, format))))
|
||||
if format == 'xml':
|
||||
self.assertTrue(os.path.exists(os.path.join(self.staging_dir, "%s-%s.%s" % (name, rev, 'html'))))
|
||||
|
||||
# Now process the submission like the task would do
|
||||
if process_submission:
|
||||
process_uploaded_submission(Submission.objects.order_by('-pk').first())
|
||||
for format in formats:
|
||||
self.assertTrue(os.path.exists(os.path.join(self.staging_dir, "%s-%s.%s" % (name, rev, format))))
|
||||
if format == 'xml':
|
||||
self.assertTrue(os.path.exists(os.path.join(self.staging_dir, "%s-%s.%s" % (name, rev, 'html'))))
|
||||
return r
|
||||
|
||||
def do_submission(self, name, rev, group=None, formats=["txt",], author=None):
|
||||
def do_submission(self, name, rev, group=None, formats: Tuple[str, ...]=("txt",), author=None, base_filename=None, ascii=True):
|
||||
"""Simulate uploading a draft and waiting for validation results
|
||||
|
||||
Returns the "full access" status URL and the author associated with the submitted draft.
|
||||
"""
|
||||
# break early in case of missing configuration
|
||||
self.assertTrue(os.path.exists(settings.IDSUBMIT_IDNITS_BINARY))
|
||||
|
||||
|
@ -211,7 +256,9 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
# submit
|
||||
if author is None:
|
||||
author = PersonFactory()
|
||||
r = self.create_and_post_submission(name, rev, author, group, formats)
|
||||
r = self.create_and_post_submission(
|
||||
name=name, rev=rev, author=author, group=group, formats=formats, base_filename=base_filename, ascii=ascii
|
||||
)
|
||||
status_url = r["Location"]
|
||||
|
||||
self.assertEqual(Submission.objects.filter(name=name).count(), 1)
|
||||
|
@ -223,8 +270,10 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
sys.stderr.write("Author initials: %s\n" % author.initials())
|
||||
self.assertEqual(len(submission.authors), 1)
|
||||
a = submission.authors[0]
|
||||
self.assertEqual(a["name"], author.ascii_name())
|
||||
self.assertEqual(a["email"], author.email().address.lower())
|
||||
if ascii:
|
||||
self.assertEqual(a["name"], author.ascii_name())
|
||||
if author.email():
|
||||
self.assertEqual(a["email"], author.email().address.lower())
|
||||
self.assertEqual(a["affiliation"], "Test Centre Inc.")
|
||||
self.assertEqual(a["country"], "UK")
|
||||
|
||||
|
@ -1262,11 +1311,8 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
|
||||
def test_submit_new_wg_with_dash(self):
|
||||
group = Group.objects.create(acronym="mars-special", name="Mars Special", type_id="wg", state_id="active")
|
||||
|
||||
name = "draft-ietf-%s-testing-tests" % group.acronym
|
||||
|
||||
self.do_submission(name, "00")
|
||||
|
||||
self.create_and_post_submission(name=name, rev="00", author=PersonFactory())
|
||||
self.assertEqual(Submission.objects.get(name=name).group.acronym, group.acronym)
|
||||
|
||||
def test_submit_new_wg_v2_country_only(self):
|
||||
|
@ -1292,19 +1338,15 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
|
||||
def test_submit_new_irtf(self):
|
||||
group = Group.objects.create(acronym="saturnrg", name="Saturn", type_id="rg", state_id="active")
|
||||
|
||||
name = "draft-irtf-%s-testing-tests" % group.acronym
|
||||
|
||||
self.do_submission(name, "00")
|
||||
|
||||
self.assertEqual(Submission.objects.get(name=name).group.acronym, group.acronym)
|
||||
self.assertEqual(Submission.objects.get(name=name).group.type_id, group.type_id)
|
||||
self.create_and_post_submission(name=name, rev="00", author=PersonFactory())
|
||||
submission = Submission.objects.get(name=name)
|
||||
self.assertEqual(submission.group.acronym, group.acronym)
|
||||
self.assertEqual(submission.group.type_id, group.type_id)
|
||||
|
||||
def test_submit_new_iab(self):
|
||||
name = "draft-iab-testing-tests"
|
||||
|
||||
self.do_submission(name, "00")
|
||||
|
||||
self.create_and_post_submission(name=name, rev="00", author=PersonFactory())
|
||||
self.assertEqual(Submission.objects.get(name=name).group.acronym, "iab")
|
||||
|
||||
def test_cancel_submission(self):
|
||||
|
@ -1514,17 +1556,21 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
rev = "00"
|
||||
group = "mars"
|
||||
|
||||
self.do_submission(name, rev, group, ["txt", "xml", "pdf"])
|
||||
self.do_submission(name, rev, group, ["txt", "xml"])
|
||||
|
||||
self.assertEqual(Submission.objects.filter(name=name).count(), 1)
|
||||
|
||||
self.assertTrue(os.path.exists(os.path.join(self.staging_dir, "%s-%s.txt" % (name, rev))))
|
||||
self.assertTrue(name in io.open(os.path.join(self.staging_dir, "%s-%s.txt" % (name, rev))).read())
|
||||
fd = io.open(os.path.join(self.staging_dir, "%s-%s.txt" % (name, rev)))
|
||||
txt_contents = fd.read()
|
||||
fd.close()
|
||||
self.assertTrue(name in txt_contents)
|
||||
self.assertTrue(os.path.exists(os.path.join(self.staging_dir, "%s-%s.xml" % (name, rev))))
|
||||
self.assertTrue(name in io.open(os.path.join(self.staging_dir, "%s-%s.xml" % (name, rev))).read())
|
||||
self.assertTrue('<?xml version="1.0" encoding="UTF-8"?>' in io.open(os.path.join(self.staging_dir, "%s-%s.xml" % (name, rev))).read())
|
||||
self.assertTrue(os.path.exists(os.path.join(self.staging_dir, "%s-%s.pdf" % (name, rev))))
|
||||
self.assertTrue('This is PDF' in io.open(os.path.join(self.staging_dir, "%s-%s.pdf" % (name, rev))).read())
|
||||
fd = io.open(os.path.join(self.staging_dir, "%s-%s.xml" % (name, rev)))
|
||||
xml_contents = fd.read()
|
||||
fd.close()
|
||||
self.assertTrue(name in xml_contents)
|
||||
self.assertTrue('<?xml version="1.0" encoding="UTF-8"?>' in xml_contents)
|
||||
|
||||
def test_expire_submissions(self):
|
||||
s = Submission.objects.create(name="draft-ietf-mars-foo",
|
||||
|
@ -1630,7 +1676,7 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
for format in formats:
|
||||
files[format], author = submission_file(f'{name}-{rev}', f'{name}-{rev}.bad', group, "test_submission.bad")
|
||||
|
||||
r = self.client.post(url, files)
|
||||
r = self.post_to_upload_submission(url, files)
|
||||
|
||||
self.assertEqual(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
|
@ -1649,14 +1695,9 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
files[format], author = submission_file(name_in_doc, name_in_post, group, "test_submission.%s" % format)
|
||||
files[format].name = name_in_post
|
||||
|
||||
r = self.client.post(url, files)
|
||||
|
||||
r = self.post_to_upload_submission(url, files)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
self.assertTrue(len(q("form .invalid-feedback")) > 0)
|
||||
m = q('div.invalid-feedback').text()
|
||||
|
||||
return r, q, m
|
||||
return r
|
||||
|
||||
def test_submit_bad_file_txt(self):
|
||||
r, q, m = self.submit_bad_file("some name", ["txt"])
|
||||
|
@ -1666,15 +1707,15 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
self.assertIn('document does not contain a legitimate name', m)
|
||||
|
||||
def test_submit_bad_doc_name(self):
|
||||
r, q, m = self.submit_bad_doc_name_with_ext(name_in_doc="draft-foo.dot-bar", name_in_post="draft-foo.dot-bar", formats=["txt"])
|
||||
self.assertIn('contains a disallowed character with byte code: 46', m)
|
||||
r = self.submit_bad_doc_name_with_ext(name_in_doc="draft-foo.dot-bar", name_in_post="draft-foo.dot-bar", formats=["txt"])
|
||||
self.assertContains(r, "contains a disallowed character with byte code: 46")
|
||||
# This actually is allowed by the existing code. A significant rework of the validation mechanics is needed.
|
||||
# r, q, m = self.submit_bad_doc_name_with_ext(name_in_doc="draft-foo-bar-00.txt", name_in_post="draft-foo-bar-00.txt", formats=["txt"])
|
||||
# self.assertIn('Did you include a filename extension in the name by mistake?', m)
|
||||
r, q, m = self.submit_bad_doc_name_with_ext(name_in_doc="draft-foo-bar-00.xml", name_in_post="draft-foo-bar-00.xml", formats=["xml"])
|
||||
self.assertIn('Did you include a filename extension in the name by mistake?', m)
|
||||
r, q, m = self.submit_bad_doc_name_with_ext(name_in_doc="../malicious-name-in-content-00", name_in_post="../malicious-name-in-post-00.xml", formats=["xml"])
|
||||
self.assertIn('Did you include a filename extension in the name by mistake?', m)
|
||||
r = self.submit_bad_doc_name_with_ext(name_in_doc="draft-foo-bar-00.xml", name_in_post="draft-foo-bar-00.xml", formats=["xml"])
|
||||
self.assertContains(r, "Could not extract a valid Internet-Draft revision from the XML")
|
||||
r = self.submit_bad_doc_name_with_ext(name_in_doc="../malicious-name-in-content-00", name_in_post="../malicious-name-in-post-00.xml", formats=["xml"])
|
||||
self.assertContains(r, "Did you include a filename extension in the name by mistake?")
|
||||
|
||||
def test_submit_bad_file_xml(self):
|
||||
r, q, m = self.submit_bad_file("some name", ["xml"])
|
||||
|
@ -1682,12 +1723,6 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
self.assertIn('Expected the XML file to have extension ".xml"', m)
|
||||
self.assertIn('Expected an XML file of type "application/xml"', m)
|
||||
|
||||
def test_submit_bad_file_pdf(self):
|
||||
r, q, m = self.submit_bad_file("some name", ["pdf"])
|
||||
self.assertIn('Invalid characters were found in the name', m)
|
||||
self.assertIn('Expected the PDF file to have extension ".pdf"', m)
|
||||
self.assertIn('Expected an PDF file of type "application/pdf"', m)
|
||||
|
||||
def test_submit_file_in_archive(self):
|
||||
name = "draft-authorname-testing-file-exists"
|
||||
rev = '00'
|
||||
|
@ -1711,7 +1746,7 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
with io.open(fn, 'w') as f:
|
||||
f.write("a" * 2000)
|
||||
files[format], author = submission_file(f'{name}-{rev}', f'{name}-{rev}.{format}', group, "test_submission.%s" % format)
|
||||
r = self.client.post(url, files)
|
||||
r = self.post_to_upload_submission(url, files)
|
||||
|
||||
self.assertEqual(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
|
@ -1722,25 +1757,11 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
def test_submit_nonascii_name(self):
|
||||
name = "draft-authorname-testing-nonascii"
|
||||
rev = "00"
|
||||
group = None
|
||||
|
||||
# get
|
||||
url = urlreverse('ietf.submit.views.upload_submission')
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
|
||||
# submit
|
||||
#author = PersonFactory(name=u"Jörgen Nilsson".encode('latin1'))
|
||||
user = UserFactory(first_name="Jörgen", last_name="Nilsson")
|
||||
author = PersonFactory(user=user)
|
||||
|
||||
file, __ = submission_file(f'{name}-{rev}', f'{name}-{rev}.txt', group, "test_submission.nonascii", author=author, ascii=False)
|
||||
files = {"txt": file }
|
||||
|
||||
r = self.client.post(url, files)
|
||||
self.assertEqual(r.status_code, 302)
|
||||
status_url = r["Location"]
|
||||
status_url, _ = self.do_submission(name=name, rev=rev, author=author, base_filename="test_submission.nonascii", ascii=False)
|
||||
r = self.client.get(status_url)
|
||||
q = PyQuery(r.content)
|
||||
m = q('p.alert-warning').text()
|
||||
|
@ -1750,19 +1771,12 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
def test_submit_missing_author_email(self):
|
||||
name = "draft-authorname-testing-noemail"
|
||||
rev = "00"
|
||||
group = None
|
||||
|
||||
author = PersonFactory()
|
||||
for e in author.email_set.all():
|
||||
e.delete()
|
||||
|
||||
files = {"txt": submission_file(f'{name}-{rev}', f'{name}-{rev}.txt', group, "test_submission.txt", author=author, ascii=True)[0] }
|
||||
|
||||
# submit
|
||||
url = urlreverse('ietf.submit.views.upload_submission')
|
||||
r = self.client.post(url, files)
|
||||
self.assertEqual(r.status_code, 302)
|
||||
status_url = r["Location"]
|
||||
status_url, _ = self.do_submission(name=name, rev=rev, author=author)
|
||||
r = self.client.get(status_url)
|
||||
q = PyQuery(r.content)
|
||||
m = q('p.text-danger').text()
|
||||
|
@ -1773,20 +1787,13 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
def test_submit_bad_author_email(self):
|
||||
name = "draft-authorname-testing-bademail"
|
||||
rev = "00"
|
||||
group = None
|
||||
|
||||
author = PersonFactory()
|
||||
email = author.email_set.first()
|
||||
email.address = '@bad.email'
|
||||
email.save()
|
||||
|
||||
files = {"xml": submission_file(f'{name}-{rev}',f'{name}-{rev}.xml', group, "test_submission.xml", author=author, ascii=False)[0] }
|
||||
|
||||
# submit
|
||||
url = urlreverse('ietf.submit.views.upload_submission')
|
||||
r = self.client.post(url, files)
|
||||
self.assertEqual(r.status_code, 302)
|
||||
status_url = r["Location"]
|
||||
status_url, _ = self.do_submission(name=name, rev=rev, author=author, formats=('xml',))
|
||||
r = self.client.get(status_url)
|
||||
q = PyQuery(r.content)
|
||||
m = q('p.text-danger').text()
|
||||
|
@ -1797,15 +1804,8 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
def test_submit_invalid_yang(self):
|
||||
name = "draft-yang-testing-invalid"
|
||||
rev = "00"
|
||||
group = None
|
||||
|
||||
# submit
|
||||
files = {"txt": submission_file(f'{name}-{rev}', f'{name}-{rev}.txt', group, "test_submission_invalid_yang.txt")[0] }
|
||||
|
||||
url = urlreverse('ietf.submit.views.upload_submission')
|
||||
r = self.client.post(url, files)
|
||||
self.assertEqual(r.status_code, 302)
|
||||
status_url = r["Location"]
|
||||
status_url, _ = self.do_submission(name=name, rev=rev, base_filename="test_submission_invalid_yang")
|
||||
r = self.client.get(status_url)
|
||||
q = PyQuery(r.content)
|
||||
#
|
||||
|
@ -2693,7 +2693,7 @@ Subject: test
|
|||
for format in formats:
|
||||
files[format], author = submission_file(f'{name}-{rev}', f'{name}-{rev}.{format}', group, "test_submission.%s" % format)
|
||||
|
||||
r = self.client.post(url, files)
|
||||
r = self.post_to_upload_submission(url, files)
|
||||
if r.status_code != 302:
|
||||
q = PyQuery(r.content)
|
||||
print(q('div.invalid-feedback span.form-text div').text())
|
||||
|
@ -2742,6 +2742,8 @@ Subject: test
|
|||
@mock.patch.object(transaction, 'on_commit', lambda x: x())
|
||||
@override_settings(IDTRACKER_BASE_URL='https://datatracker.example.com')
|
||||
class ApiSubmissionTests(BaseSubmitTestCase):
|
||||
TASK_TO_MOCK = "ietf.submit.views.process_and_accept_uploaded_submission_task"
|
||||
|
||||
def test_upload_draft(self):
|
||||
"""api_submission accepts a submission and queues it for processing"""
|
||||
url = urlreverse('ietf.submit.views.api_submission')
|
||||
|
@ -2750,7 +2752,7 @@ class ApiSubmissionTests(BaseSubmitTestCase):
|
|||
'xml': xml,
|
||||
'user': author.user.username,
|
||||
}
|
||||
with mock.patch('ietf.submit.views.process_uploaded_submission_task') as mock_task:
|
||||
with mock.patch(self.TASK_TO_MOCK) as mock_task:
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
response = r.json()
|
||||
|
@ -2788,7 +2790,7 @@ class ApiSubmissionTests(BaseSubmitTestCase):
|
|||
'replaces': existing_draft.name,
|
||||
}
|
||||
# mock out the task so we don't call to celery during testing!
|
||||
with mock.patch('ietf.submit.views.process_uploaded_submission_task'):
|
||||
with mock.patch(self.TASK_TO_MOCK):
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
submission = Submission.objects.last()
|
||||
|
@ -2806,7 +2808,7 @@ class ApiSubmissionTests(BaseSubmitTestCase):
|
|||
'xml': xml,
|
||||
'user': 'i.dont.exist@nowhere.example.com',
|
||||
}
|
||||
with mock.patch('ietf.submit.views.process_uploaded_submission_task') as mock_task:
|
||||
with mock.patch(self.TASK_TO_MOCK) as mock_task:
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
response = r.json()
|
||||
|
@ -2820,7 +2822,7 @@ class ApiSubmissionTests(BaseSubmitTestCase):
|
|||
'xml': xml,
|
||||
'user': author.user.username,
|
||||
}
|
||||
with mock.patch('ietf.submit.views.process_uploaded_submission_task') as mock_task:
|
||||
with mock.patch(self.TASK_TO_MOCK) as mock_task:
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
response = r.json()
|
||||
|
@ -2834,7 +2836,7 @@ class ApiSubmissionTests(BaseSubmitTestCase):
|
|||
'xml': xml,
|
||||
'user': author.user.username,
|
||||
}
|
||||
with mock.patch('ietf.submit.views.process_uploaded_submission_task') as mock_task:
|
||||
with mock.patch(self.TASK_TO_MOCK) as mock_task:
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
response = r.json()
|
||||
|
@ -2850,7 +2852,7 @@ class ApiSubmissionTests(BaseSubmitTestCase):
|
|||
'xml': xml,
|
||||
'user': author.user.username,
|
||||
}
|
||||
with mock.patch('ietf.submit.views.process_uploaded_submission_task') as mock_task:
|
||||
with mock.patch(self.TASK_TO_MOCK) as mock_task:
|
||||
r = self.client.post(url, data)
|
||||
self.assertEqual(r.status_code, 400)
|
||||
response = r.json()
|
||||
|
@ -3105,8 +3107,8 @@ class SubmissionUploadFormTests(BaseSubmitTestCase):
|
|||
|
||||
class AsyncSubmissionTests(BaseSubmitTestCase):
|
||||
"""Tests of async submission-related tasks"""
|
||||
def test_process_uploaded_submission(self):
|
||||
"""process_uploaded_submission should properly process a submission"""
|
||||
def test_process_and_accept_uploaded_submission(self):
|
||||
"""process_and_accept_uploaded_submission should properly process a submission"""
|
||||
_today = date_today()
|
||||
xml, author = submission_file('draft-somebody-test-00', 'draft-somebody-test-00.xml', None, 'test_submission.xml')
|
||||
xml_data = xml.read()
|
||||
|
@ -3126,7 +3128,7 @@ class AsyncSubmissionTests(BaseSubmitTestCase):
|
|||
self.assertFalse(txt_path.exists())
|
||||
html_path = xml_path.with_suffix('.html')
|
||||
self.assertFalse(html_path.exists())
|
||||
process_uploaded_submission(submission)
|
||||
process_and_accept_uploaded_submission(submission)
|
||||
|
||||
submission = Submission.objects.get(pk=submission.pk) # refresh
|
||||
self.assertEqual(submission.state_id, 'auth', 'accepted submission should be in auth state')
|
||||
|
@ -3144,8 +3146,8 @@ class AsyncSubmissionTests(BaseSubmitTestCase):
|
|||
self.assertEqual(submission.file_size, os.stat(txt_path).st_size)
|
||||
self.assertIn('Completed submission validation checks', submission.submissionevent_set.last().desc)
|
||||
|
||||
def test_process_uploaded_submission_invalid(self):
|
||||
"""process_uploaded_submission should properly process an invalid submission"""
|
||||
def test_process_and_accept_uploaded_submission_invalid(self):
|
||||
"""process_and_accept_uploaded_submission should properly process an invalid submission"""
|
||||
xml, author = submission_file('draft-somebody-test-00', 'draft-somebody-test-00.xml', None, 'test_submission.xml')
|
||||
xml_data = xml.read()
|
||||
xml.close()
|
||||
|
@ -3166,7 +3168,7 @@ class AsyncSubmissionTests(BaseSubmitTestCase):
|
|||
xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.xml'
|
||||
with xml_path.open('w') as f:
|
||||
f.write(xml_data)
|
||||
process_uploaded_submission(submission)
|
||||
process_and_accept_uploaded_submission(submission)
|
||||
submission = Submission.objects.get(pk=submission.pk) # refresh
|
||||
self.assertEqual(submission.state_id, 'cancel')
|
||||
self.assertIn('not one of the document authors', submission.submissionevent_set.last().desc)
|
||||
|
@ -3182,10 +3184,10 @@ class AsyncSubmissionTests(BaseSubmitTestCase):
|
|||
xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.xml'
|
||||
with xml_path.open('w') as f:
|
||||
f.write(re.sub(r'<email>.*</email>', '', xml_data))
|
||||
process_uploaded_submission(submission)
|
||||
process_and_accept_uploaded_submission(submission)
|
||||
submission = Submission.objects.get(pk=submission.pk) # refresh
|
||||
self.assertEqual(submission.state_id, 'cancel')
|
||||
self.assertIn('Missing email address', submission.submissionevent_set.last().desc)
|
||||
self.assertIn('Email address not found for all authors', submission.submissionevent_set.last().desc)
|
||||
|
||||
# no title
|
||||
submission = SubmissionFactory(
|
||||
|
@ -3198,7 +3200,7 @@ class AsyncSubmissionTests(BaseSubmitTestCase):
|
|||
xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.xml'
|
||||
with xml_path.open('w') as f:
|
||||
f.write(re.sub(r'<title>.*</title>', '<title></title>', xml_data))
|
||||
process_uploaded_submission(submission)
|
||||
process_and_accept_uploaded_submission(submission)
|
||||
submission = Submission.objects.get(pk=submission.pk) # refresh
|
||||
self.assertEqual(submission.state_id, 'cancel')
|
||||
self.assertIn('Could not extract a valid title', submission.submissionevent_set.last().desc)
|
||||
|
@ -3214,10 +3216,10 @@ class AsyncSubmissionTests(BaseSubmitTestCase):
|
|||
xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-different-name-00.xml'
|
||||
with xml_path.open('w') as f:
|
||||
f.write(xml_data)
|
||||
process_uploaded_submission(submission)
|
||||
process_and_accept_uploaded_submission(submission)
|
||||
submission = Submission.objects.get(pk=submission.pk) # refresh
|
||||
self.assertEqual(submission.state_id, 'cancel')
|
||||
self.assertIn('Internet-Draft filename disagrees', submission.submissionevent_set.last().desc)
|
||||
self.assertIn('Submission rejected: XML Internet-Draft filename', submission.submissionevent_set.last().desc)
|
||||
|
||||
# rev mismatch
|
||||
submission = SubmissionFactory(
|
||||
|
@ -3230,10 +3232,10 @@ class AsyncSubmissionTests(BaseSubmitTestCase):
|
|||
xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-01.xml'
|
||||
with xml_path.open('w') as f:
|
||||
f.write(xml_data)
|
||||
process_uploaded_submission(submission)
|
||||
process_and_accept_uploaded_submission(submission)
|
||||
submission = Submission.objects.get(pk=submission.pk) # refresh
|
||||
self.assertEqual(submission.state_id, 'cancel')
|
||||
self.assertIn('revision disagrees', submission.submissionevent_set.last().desc)
|
||||
self.assertIn('Submission rejected: XML Internet-Draft revision', submission.submissionevent_set.last().desc)
|
||||
|
||||
# not xml
|
||||
submission = SubmissionFactory(
|
||||
|
@ -3246,7 +3248,7 @@ class AsyncSubmissionTests(BaseSubmitTestCase):
|
|||
txt_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.txt'
|
||||
with txt_path.open('w') as f:
|
||||
f.write(txt_data)
|
||||
process_uploaded_submission(submission)
|
||||
process_and_accept_uploaded_submission(submission)
|
||||
submission = Submission.objects.get(pk=submission.pk) # refresh
|
||||
self.assertEqual(submission.state_id, 'cancel')
|
||||
self.assertIn('Only XML Internet-Draft submissions', submission.submissionevent_set.last().desc)
|
||||
|
@ -3263,7 +3265,7 @@ class AsyncSubmissionTests(BaseSubmitTestCase):
|
|||
with xml_path.open('w') as f:
|
||||
f.write(xml_data)
|
||||
with mock.patch('ietf.submit.utils.process_submission_xml') as mock_proc_xml:
|
||||
process_uploaded_submission(submission)
|
||||
process_and_accept_uploaded_submission(submission)
|
||||
submission = Submission.objects.get(pk=submission.pk) # refresh
|
||||
self.assertFalse(mock_proc_xml.called, 'Should not process submission not in "validating" state')
|
||||
self.assertEqual(submission.state_id, 'uploaded', 'State should not be changed')
|
||||
|
@ -3291,80 +3293,196 @@ class AsyncSubmissionTests(BaseSubmitTestCase):
|
|||
symbol='x',
|
||||
)
|
||||
):
|
||||
process_uploaded_submission(submission)
|
||||
process_and_accept_uploaded_submission(submission)
|
||||
submission = Submission.objects.get(pk=submission.pk) # refresh
|
||||
self.assertEqual(submission.state_id, 'cancel')
|
||||
self.assertIn('fake failure', submission.submissionevent_set.last().desc)
|
||||
|
||||
|
||||
@mock.patch('ietf.submit.tasks.process_uploaded_submission')
|
||||
def test_process_uploaded_submission_task(self, mock_method):
|
||||
"""process_uploaded_submission_task task should properly call its method"""
|
||||
@mock.patch('ietf.submit.tasks.process_and_accept_uploaded_submission')
|
||||
def test_process_and_accept_uploaded_submission_task(self, mock_method):
|
||||
"""process_and_accept_uploaded_submission_task task should properly call its method"""
|
||||
s = SubmissionFactory()
|
||||
process_uploaded_submission_task(s.pk)
|
||||
process_and_accept_uploaded_submission_task(s.pk)
|
||||
self.assertEqual(mock_method.call_count, 1)
|
||||
self.assertEqual(mock_method.call_args.args, (s,))
|
||||
|
||||
@mock.patch('ietf.submit.tasks.process_uploaded_submission')
|
||||
def test_process_uploaded_submission_task_ignores_invalid_id(self, mock_method):
|
||||
"""process_uploaded_submission_task should ignore an invalid submission_id"""
|
||||
@mock.patch('ietf.submit.tasks.process_and_accept_uploaded_submission')
|
||||
def test_process_and_accept_uploaded_submission_task_ignores_invalid_id(self, mock_method):
|
||||
"""process_and_accept_uploaded_submission_task should ignore an invalid submission_id"""
|
||||
SubmissionFactory() # be sure there is a Submission
|
||||
bad_pk = 9876
|
||||
self.assertEqual(Submission.objects.filter(pk=bad_pk).count(), 0)
|
||||
process_uploaded_submission_task(bad_pk)
|
||||
process_and_accept_uploaded_submission_task(bad_pk)
|
||||
self.assertEqual(mock_method.call_count, 0)
|
||||
|
||||
def test_process_submission_text_consistency_checks(self):
|
||||
"""process_submission_text should check draft metadata against submission"""
|
||||
submission = SubmissionFactory(
|
||||
name='draft-somebody-test',
|
||||
rev='00',
|
||||
title='Correct Draft Title',
|
||||
def test_process_submission_xml(self):
|
||||
xml_path = Path(settings.IDSUBMIT_STAGING_PATH) / "draft-somebody-test-00.xml"
|
||||
xml, _ = submission_file(
|
||||
"draft-somebody-test-00",
|
||||
"draft-somebody-test-00.xml",
|
||||
None,
|
||||
"test_submission.xml",
|
||||
title="Correct Draft Title",
|
||||
)
|
||||
txt_path = Path(settings.IDSUBMIT_STAGING_PATH) / 'draft-somebody-test-00.txt'
|
||||
xml_path.write_text(xml.read())
|
||||
output = process_submission_xml("draft-somebody-test", "00")
|
||||
self.assertEqual(output["filename"], "draft-somebody-test")
|
||||
self.assertEqual(output["rev"], "00")
|
||||
self.assertEqual(output["title"], "Correct Draft Title")
|
||||
self.assertIsNone(output["abstract"])
|
||||
self.assertEqual(len(output["authors"]), 1) # not checking in detail, parsing is unreliable
|
||||
self.assertIsNone(output["document_date"])
|
||||
self.assertIsNone(output["pages"])
|
||||
self.assertIsNone(output["words"])
|
||||
self.assertIsNone(output["first_two_pages"])
|
||||
self.assertIsNone(output["file_size"])
|
||||
self.assertIsNone(output["formal_languages"])
|
||||
self.assertEqual(output["xml_version"], "3")
|
||||
|
||||
# name mismatch
|
||||
xml, _ = submission_file(
|
||||
"draft-somebody-wrong-name-00", # name that appears in the file
|
||||
"draft-somebody-test-00.xml",
|
||||
None,
|
||||
"test_submission.xml",
|
||||
title="Correct Draft Title",
|
||||
)
|
||||
xml_path.write_text(xml.read())
|
||||
with self.assertRaisesMessage(SubmissionError, "disagrees with submission filename"):
|
||||
process_submission_xml("draft-somebody-test", "00")
|
||||
|
||||
# rev mismatch
|
||||
xml, _ = submission_file(
|
||||
"draft-somebody-test-01", # name that appears in the file
|
||||
"draft-somebody-test-00.xml",
|
||||
None,
|
||||
"test_submission.xml",
|
||||
title="Correct Draft Title",
|
||||
)
|
||||
xml_path.write_text(xml.read())
|
||||
with self.assertRaisesMessage(SubmissionError, "disagrees with submission revision"):
|
||||
process_submission_xml("draft-somebody-test", "00")
|
||||
|
||||
# missing title
|
||||
xml, _ = submission_file(
|
||||
"draft-somebody-test-00", # name that appears in the file
|
||||
"draft-somebody-test-00.xml",
|
||||
None,
|
||||
"test_submission.xml",
|
||||
title="",
|
||||
)
|
||||
xml_path.write_text(xml.read())
|
||||
with self.assertRaisesMessage(SubmissionError, "Could not extract a valid title"):
|
||||
process_submission_xml("draft-somebody-test", "00")
|
||||
|
||||
def test_process_submission_text(self):
|
||||
txt_path = Path(settings.IDSUBMIT_STAGING_PATH) / "draft-somebody-test-00.txt"
|
||||
txt, _ = submission_file(
|
||||
"draft-somebody-test-00",
|
||||
"draft-somebody-test-00.txt",
|
||||
None,
|
||||
"test_submission.txt",
|
||||
title="Correct Draft Title",
|
||||
)
|
||||
txt_path.write_text(txt.read())
|
||||
output = process_submission_text("draft-somebody-test", "00")
|
||||
self.assertEqual(output["filename"], "draft-somebody-test")
|
||||
self.assertEqual(output["rev"], "00")
|
||||
self.assertEqual(output["title"], "Correct Draft Title")
|
||||
self.assertEqual(output["abstract"].strip(), "This document describes how to test tests.")
|
||||
self.assertEqual(len(output["authors"]), 1) # not checking in detail, parsing is unreliable
|
||||
self.assertLessEqual(output["document_date"] - date_today(), datetime.timedelta(days=1))
|
||||
self.assertEqual(output["pages"], 2)
|
||||
self.assertGreater(output["words"], 0) # make sure it got something
|
||||
self.assertGreater(len(output["first_two_pages"]), 0) # make sure it got something
|
||||
self.assertGreater(output["file_size"], 0) # make sure it got something
|
||||
self.assertEqual(output["formal_languages"].count(), 1)
|
||||
self.assertIsNone(output["xml_version"])
|
||||
|
||||
# name mismatch
|
||||
txt, _ = submission_file(
|
||||
'draft-somebody-wrong-name-00', # name that appears in the file
|
||||
'draft-somebody-test-00.xml',
|
||||
"draft-somebody-wrong-name-00", # name that appears in the file
|
||||
"draft-somebody-test-00.txt",
|
||||
None,
|
||||
'test_submission.txt',
|
||||
title='Correct Draft Title',
|
||||
"test_submission.txt",
|
||||
title="Correct Draft Title",
|
||||
)
|
||||
txt_path.open('w').write(txt.read())
|
||||
with txt_path.open('w') as fd:
|
||||
fd.write(txt.read())
|
||||
txt.close()
|
||||
with self.assertRaisesMessage(SubmissionError, 'disagrees with submission filename'):
|
||||
process_submission_text(submission)
|
||||
process_submission_text("draft-somebody-test", "00")
|
||||
|
||||
# rev mismatch
|
||||
txt, _ = submission_file(
|
||||
'draft-somebody-test-01', # name that appears in the file
|
||||
'draft-somebody-test-00.xml',
|
||||
"draft-somebody-test-01", # name that appears in the file
|
||||
"draft-somebody-test-00.txt",
|
||||
None,
|
||||
'test_submission.txt',
|
||||
title='Correct Draft Title',
|
||||
"test_submission.txt",
|
||||
title="Correct Draft Title",
|
||||
)
|
||||
txt_path.open('w').write(txt.read())
|
||||
with txt_path.open('w') as fd:
|
||||
fd.write(txt.read())
|
||||
txt.close()
|
||||
with self.assertRaisesMessage(SubmissionError, 'disagrees with submission revision'):
|
||||
process_submission_text(submission)
|
||||
process_submission_text("draft-somebody-test", "00")
|
||||
|
||||
# title mismatch
|
||||
txt, _ = submission_file(
|
||||
'draft-somebody-test-00', # name that appears in the file
|
||||
'draft-somebody-test-00.xml',
|
||||
None,
|
||||
'test_submission.txt',
|
||||
title='Not Correct Draft Title',
|
||||
def test_process_and_validate_submission(self):
|
||||
xml_data = {
|
||||
"title": "The Title",
|
||||
"authors": [{
|
||||
"name": "Jane Doe",
|
||||
"email": "jdoe@example.com",
|
||||
"affiliation": "Test Centre",
|
||||
"country": "UK",
|
||||
}],
|
||||
"xml_version": "3",
|
||||
}
|
||||
text_data = {
|
||||
"title": "The Title",
|
||||
"abstract": "This is an abstract.",
|
||||
"authors": [{
|
||||
"name": "John Doh",
|
||||
"email": "ignored@example.com",
|
||||
"affiliation": "Ignored",
|
||||
"country": "CA",
|
||||
}],
|
||||
"document_date": date_today(),
|
||||
"pages": 25,
|
||||
"words": 1234,
|
||||
"first_two_pages": "Pages One and Two",
|
||||
"file_size": 4321,
|
||||
"formal_languages": FormalLanguageName.objects.none(),
|
||||
}
|
||||
submission = SubmissionFactory(
|
||||
state_id="validating",
|
||||
file_types=".xml,.txt",
|
||||
)
|
||||
txt_path.open('w').write(txt.read())
|
||||
with self.assertRaisesMessage(SubmissionError, 'disagrees with submission title'):
|
||||
process_submission_text(submission)
|
||||
with mock.patch("ietf.submit.utils.process_submission_xml", return_value=xml_data):
|
||||
with mock.patch("ietf.submit.utils.process_submission_text", return_value=text_data):
|
||||
with mock.patch("ietf.submit.utils.render_missing_formats") as mock_render:
|
||||
with mock.patch("ietf.submit.utils.apply_checkers") as mock_checkers:
|
||||
process_and_validate_submission(submission)
|
||||
self.assertTrue(mock_render.called)
|
||||
self.assertTrue(mock_checkers.called)
|
||||
submission = Submission.objects.get(pk=submission.pk)
|
||||
self.assertEqual(submission.title, text_data["title"])
|
||||
self.assertEqual(submission.abstract, text_data["abstract"])
|
||||
self.assertEqual(submission.authors, xml_data["authors"])
|
||||
self.assertEqual(submission.document_date, text_data["document_date"])
|
||||
self.assertEqual(submission.pages, text_data["pages"])
|
||||
self.assertEqual(submission.words, text_data["words"])
|
||||
self.assertEqual(submission.first_two_pages, text_data["first_two_pages"])
|
||||
self.assertEqual(submission.file_size, text_data["file_size"])
|
||||
self.assertEqual(submission.xml_version, xml_data["xml_version"])
|
||||
|
||||
def test_status_of_validating_submission(self):
|
||||
s = SubmissionFactory(state_id='validating')
|
||||
url = urlreverse('ietf.submit.views.submission_status', kwargs={'submission_id': s.pk})
|
||||
r = self.client.get(url)
|
||||
self.assertContains(r, s.name)
|
||||
self.assertContains(r, 'still being processed and validated', status_code=200)
|
||||
self.assertContains(r, 'This submission is being processed and validated.', status_code=200)
|
||||
|
||||
@override_settings(IDSUBMIT_MAX_VALIDATION_TIME=datetime.timedelta(minutes=30))
|
||||
def test_cancel_stale_submissions(self):
|
||||
|
@ -3648,5 +3766,5 @@ class TestOldNamesAreProtected(BaseSubmitTestCase):
|
|||
url = urlreverse("ietf.submit.views.upload_submission")
|
||||
files = {}
|
||||
files["xml"], _ = submission_file("draft-something-hascapitalletters-00", "draft-something-hascapitalletters-00.xml", None, "test_submission.xml")
|
||||
r = self.client.post(url, files)
|
||||
r = self.post_to_upload_submission(url, files)
|
||||
self.assertContains(r,"Case-conflicting draft name found",status_code=200)
|
||||
|
|
|
@ -11,7 +11,7 @@ import time
|
|||
import traceback
|
||||
import xml2rfc
|
||||
|
||||
from typing import Optional # pyflakes:ignore
|
||||
from typing import Optional, Union # pyflakes:ignore
|
||||
from unidecode import unidecode
|
||||
|
||||
from django.conf import settings
|
||||
|
@ -40,7 +40,7 @@ from ietf.name.models import StreamName, FormalLanguageName
|
|||
from ietf.person.models import Person, Email
|
||||
from ietf.community.utils import update_name_contains_indexes_with_new_doc
|
||||
from ietf.submit.mail import ( announce_to_lists, announce_new_version, announce_to_authors,
|
||||
send_approval_request, send_submission_confirmation, announce_new_wg_00 )
|
||||
send_approval_request, send_submission_confirmation, announce_new_wg_00, send_manual_post_request )
|
||||
from ietf.submit.models import ( Submission, SubmissionEvent, Preapproval, DraftSubmissionStateName,
|
||||
SubmissionCheck, SubmissionExtResource )
|
||||
from ietf.utils import log
|
||||
|
@ -911,6 +911,9 @@ class SubmissionError(Exception):
|
|||
"""Exception for errors during submission processing"""
|
||||
pass
|
||||
|
||||
class InconsistentRevisionError(SubmissionError):
|
||||
"""SubmissionError caused by an inconsistent revision"""
|
||||
|
||||
|
||||
def staging_path(filename, revision, ext):
|
||||
if len(ext) > 0 and ext[0] != '.':
|
||||
|
@ -1128,102 +1131,169 @@ def _normalize_title(title):
|
|||
return normalize_text(title) # normalize whitespace
|
||||
|
||||
|
||||
def process_submission_xml(submission):
|
||||
def process_submission_xml(filename, revision):
|
||||
"""Validate and extract info from an uploaded submission"""
|
||||
xml_path = staging_path(submission.name, submission.rev, '.xml')
|
||||
xml_path = staging_path(filename, revision, '.xml')
|
||||
xml_draft = XMLDraft(xml_path)
|
||||
|
||||
if submission.name != xml_draft.filename:
|
||||
raise SubmissionError('XML Internet-Draft filename disagrees with submission filename')
|
||||
if submission.rev != xml_draft.revision:
|
||||
raise SubmissionError('XML Internet-Draft revision disagrees with submission revision')
|
||||
|
||||
authors = xml_draft.get_author_list()
|
||||
for a in authors:
|
||||
if not a['email']:
|
||||
raise SubmissionError(f'Missing email address for author {a}')
|
||||
|
||||
author_emails = [a['email'].lower() for a in authors]
|
||||
submitter = get_person_from_name_email(**submission.submitter_parsed()) # the ** expands dict into kwargs
|
||||
if not any(
|
||||
email.address.lower() in author_emails
|
||||
for email in submitter.email_set.filter(active=True)
|
||||
):
|
||||
raise SubmissionError(f'Submitter ({submitter}) is not one of the document authors')
|
||||
|
||||
# Fill in the submission data
|
||||
submission.title = _normalize_title(xml_draft.get_title())
|
||||
if not submission.title:
|
||||
raise SubmissionError('Could not extract a valid title from the XML')
|
||||
submission.authors = [
|
||||
{key: auth[key] for key in ('name', 'email', 'affiliation', 'country')}
|
||||
for auth in authors
|
||||
]
|
||||
submission.xml_version = xml_draft.xml_version
|
||||
submission.save()
|
||||
if filename != xml_draft.filename:
|
||||
raise SubmissionError(
|
||||
f"XML Internet-Draft filename ({xml_draft.filename}) "
|
||||
f"disagrees with submission filename ({filename})"
|
||||
)
|
||||
if revision != xml_draft.revision:
|
||||
raise SubmissionError(
|
||||
f"XML Internet-Draft revision ({xml_draft.revision}) "
|
||||
f"disagrees with submission revision ({revision})"
|
||||
)
|
||||
title = _normalize_title(xml_draft.get_title())
|
||||
if not title:
|
||||
raise SubmissionError("Could not extract a valid title from the XML")
|
||||
|
||||
return {
|
||||
"filename": xml_draft.filename,
|
||||
"rev": xml_draft.revision,
|
||||
"title": title,
|
||||
"authors": [
|
||||
{key: auth[key] for key in ('name', 'email', 'affiliation', 'country')}
|
||||
for auth in xml_draft.get_author_list()
|
||||
],
|
||||
"abstract": None, # not supported from XML
|
||||
"document_date": None, # not supported from XML
|
||||
"pages": None, # not supported from XML
|
||||
"words": None, # not supported from XML
|
||||
"first_two_pages": None, # not supported from XML
|
||||
"file_size": None, # not supported from XML
|
||||
"formal_languages": None, # not supported from XML
|
||||
"xml_version": xml_draft.xml_version,
|
||||
}
|
||||
|
||||
|
||||
def process_submission_text(submission):
|
||||
"""Validate/extract data from the text version of a submitted draft
|
||||
|
||||
This assumes the draft was uploaded as XML and extracts data that is not
|
||||
currently available directly from the XML. Additional processing, e.g. from
|
||||
get_draft_meta(), would need to be added in order to support direct text
|
||||
draft uploads.
|
||||
def _turn_into_unicode(s: Optional[Union[str, bytes]]):
|
||||
"""Decode a possibly null string-like item as a string
|
||||
|
||||
Copied from ietf.submit.utils.get_draft_meta(), would be nice to
|
||||
ditch this.
|
||||
"""
|
||||
text_path = staging_path(submission.name, submission.rev, '.txt')
|
||||
if s is None:
|
||||
return ""
|
||||
|
||||
if isinstance(s, str):
|
||||
return s
|
||||
else:
|
||||
try:
|
||||
return s.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
try:
|
||||
return s.decode("latin-1")
|
||||
except UnicodeDecodeError:
|
||||
return ""
|
||||
|
||||
|
||||
def _is_valid_email(addr):
|
||||
try:
|
||||
validate_email(addr)
|
||||
except ValidationError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def process_submission_text(filename, revision):
|
||||
"""Validate/extract data from the text version of a submitted draft"""
|
||||
text_path = staging_path(filename, revision, '.txt')
|
||||
text_draft = PlaintextDraft.from_file(text_path)
|
||||
|
||||
if submission.name != text_draft.filename:
|
||||
if filename != text_draft.filename:
|
||||
raise SubmissionError(
|
||||
f'Text Internet-Draft filename ({text_draft.filename}) disagrees with submission filename ({submission.name})'
|
||||
f"Text Internet-Draft filename ({text_draft.filename}) "
|
||||
f"disagrees with submission filename ({filename})"
|
||||
)
|
||||
if submission.rev != text_draft.revision:
|
||||
if revision != text_draft.revision:
|
||||
raise SubmissionError(
|
||||
f'Text Internet-Draft revision ({text_draft.revision}) disagrees with submission revision ({submission.rev})')
|
||||
text_title = _normalize_title(text_draft.get_title())
|
||||
if not text_title:
|
||||
raise SubmissionError('Could not extract a valid title from the text')
|
||||
if text_title != submission.title:
|
||||
raise SubmissionError(
|
||||
f'Text Internet-Draft title ({text_title}) disagrees with submission title ({submission.title})')
|
||||
f"Text Internet-Draft revision ({text_draft.revision}) "
|
||||
f"disagrees with submission revision ({revision})"
|
||||
)
|
||||
title = _normalize_title(text_draft.get_title())
|
||||
if not title:
|
||||
# This test doesn't work well - the text_draft parser tends to grab "Abstract" as
|
||||
# the title if there's an empty title.
|
||||
raise SubmissionError("Could not extract a title from the text")
|
||||
|
||||
submission.abstract = text_draft.get_abstract()
|
||||
submission.document_date = text_draft.get_creation_date()
|
||||
submission.pages = text_draft.get_pagecount()
|
||||
submission.words = text_draft.get_wordcount()
|
||||
submission.first_two_pages = ''.join(text_draft.pages[:2])
|
||||
submission.file_size = os.stat(text_path).st_size
|
||||
submission.save()
|
||||
|
||||
submission.formal_languages.set(
|
||||
FormalLanguageName.objects.filter(
|
||||
# Drops \r, \n, <, >. Based on get_draft_meta() behavior
|
||||
trans_table = str.maketrans("", "", "\r\n<>")
|
||||
authors = [
|
||||
{
|
||||
"name": fullname.translate(trans_table).strip(),
|
||||
"email": _turn_into_unicode(email if _is_valid_email(email) else ""),
|
||||
"affiliation": _turn_into_unicode(company),
|
||||
"country": _turn_into_unicode(country),
|
||||
}
|
||||
for (fullname, _, _, _, _, email, country, company) in text_draft.get_author_list()
|
||||
]
|
||||
return {
|
||||
"filename": text_draft.filename,
|
||||
"rev": text_draft.revision,
|
||||
"title": _normalize_title(text_draft.get_title()),
|
||||
"authors": authors,
|
||||
"abstract": text_draft.get_abstract(),
|
||||
"document_date": text_draft.get_creation_date(),
|
||||
"pages": text_draft.get_pagecount(),
|
||||
"words": text_draft.get_wordcount(),
|
||||
"first_two_pages": ''.join(text_draft.pages[:2]),
|
||||
"file_size": os.stat(text_path).st_size,
|
||||
"formal_languages": FormalLanguageName.objects.filter(
|
||||
slug__in=text_draft.get_formal_languages()
|
||||
)
|
||||
)
|
||||
),
|
||||
"xml_version": None, # not supported from text
|
||||
}
|
||||
|
||||
|
||||
def process_uploaded_submission(submission):
|
||||
def abort_submission(error):
|
||||
cancel_submission(submission)
|
||||
create_submission_event(None, submission, f'Submission rejected: {error}')
|
||||
def process_and_validate_submission(submission):
|
||||
"""Process and validate a submission
|
||||
|
||||
if submission.state_id != 'validating':
|
||||
log.log(f'Submission {submission.pk} is not in "validating" state, skipping.')
|
||||
return # do nothing
|
||||
|
||||
if submission.file_types != '.xml':
|
||||
abort_submission('Only XML Internet-Draft submissions can be processed.')
|
||||
Raises SubmissionError if an error is encountered.
|
||||
"""
|
||||
if len(set(submission.file_types.split(",")).intersection({".xml", ".txt"})) == 0:
|
||||
raise SubmissionError("Require XML and/or text format to process an Internet-Draft submission.")
|
||||
|
||||
try:
|
||||
process_submission_xml(submission)
|
||||
if check_submission_revision_consistency(submission):
|
||||
xml_metadata = None
|
||||
# Parse XML first, if we have it
|
||||
if ".xml" in submission.file_types:
|
||||
xml_metadata = process_submission_xml(submission.name, submission.rev)
|
||||
render_missing_formats(submission) # makes HTML and text, unless text was uploaded
|
||||
# Parse text, whether uploaded or generated from XML
|
||||
text_metadata = process_submission_text(submission.name, submission.rev)
|
||||
|
||||
if xml_metadata and xml_metadata["title"] != text_metadata["title"]:
|
||||
raise SubmissionError(
|
||||
'Document revision inconsistency error in the database. '
|
||||
'Please contact the secretariat for assistance.'
|
||||
f"Text Internet-Draft title ({text_metadata['title']}) "
|
||||
f"disagrees with XML Internet-Draft title ({xml_metadata['title']})"
|
||||
)
|
||||
render_missing_formats(submission)
|
||||
process_submission_text(submission)
|
||||
|
||||
# Fill in the submission from the parsed XML/text metadata
|
||||
if xml_metadata is not None:
|
||||
# Items preferred / only available from XML
|
||||
submission.xml_version = xml_metadata["xml_version"]
|
||||
submission.authors = xml_metadata["authors"]
|
||||
else:
|
||||
# Items to get from text only if XML not available
|
||||
submission.authors = text_metadata["authors"]
|
||||
|
||||
# Items always to get from text, even when XML is available
|
||||
submission.title = text_metadata["title"] # verified above this agrees with XML, if present
|
||||
submission.abstract = text_metadata["abstract"]
|
||||
submission.document_date = text_metadata["document_date"]
|
||||
submission.pages = text_metadata["pages"]
|
||||
submission.words = text_metadata["words"]
|
||||
submission.first_two_pages = text_metadata["first_two_pages"]
|
||||
submission.file_size = text_metadata["file_size"]
|
||||
submission.save()
|
||||
submission.formal_languages.set(text_metadata["formal_languages"])
|
||||
|
||||
consistency_error = check_submission_revision_consistency(submission)
|
||||
if consistency_error:
|
||||
raise InconsistentRevisionError(consistency_error)
|
||||
set_extresources_from_existing_draft(submission)
|
||||
apply_checkers(
|
||||
submission,
|
||||
|
@ -1235,16 +1305,105 @@ def process_uploaded_submission(submission):
|
|||
errors = [c.message for c in submission.checks.filter(passed__isnull=False) if not c.passed]
|
||||
if len(errors) > 0:
|
||||
raise SubmissionError('Checks failed: ' + ' / '.join(errors))
|
||||
except SubmissionError as err:
|
||||
abort_submission(err)
|
||||
except SubmissionError:
|
||||
raise # pass SubmissionErrors up the stack
|
||||
except Exception:
|
||||
# convert other exceptions into SubmissionErrors
|
||||
log.log(f'Unexpected exception while processing submission {submission.pk}.')
|
||||
log.log(traceback.format_exc())
|
||||
abort_submission('A system error occurred while processing the submission.')
|
||||
raise SubmissionError('A system error occurred while processing the submission.')
|
||||
|
||||
# if we get here and are still "validating", accept the draft
|
||||
if submission.state_id == 'validating':
|
||||
submission.state_id = 'uploaded'
|
||||
|
||||
def submitter_is_author(submission):
|
||||
submitter = get_person_from_name_email(**submission.submitter_parsed())
|
||||
if submitter:
|
||||
author_emails = [
|
||||
author["email"].strip().lower()
|
||||
for author in submission.authors
|
||||
if "email" in author
|
||||
]
|
||||
return any(
|
||||
email.address.lower() in author_emails
|
||||
for email in submitter.email_set.filter(active=True)
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def all_authors_have_emails(submission):
|
||||
return all(a["email"] for a in submission.authors)
|
||||
|
||||
|
||||
def process_and_accept_uploaded_submission(submission):
|
||||
"""Process, validate, and, if valid, accept an uploaded submission
|
||||
|
||||
Requires that the submitter already be set and is an author of the submitted draft.
|
||||
The submission must be in the "validating" state. On success, it will be in the
|
||||
"posted" state. On error, it wil be in the "cancel" state.
|
||||
"""
|
||||
if submission.state_id != "validating":
|
||||
log.log(f'Submission {submission.pk} is not in "validating" state, skipping.')
|
||||
return # do nothing
|
||||
|
||||
if submission.file_types != '.xml':
|
||||
# permit only XML uploads for automatic acceptance
|
||||
cancel_submission(submission)
|
||||
create_submission_event(
|
||||
None,
|
||||
submission,
|
||||
"Only XML Internet-Draft submissions can be processed.",
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
process_and_validate_submission(submission)
|
||||
except SubmissionError as err:
|
||||
cancel_submission(submission) # changes Submission.state
|
||||
create_submission_event(None, submission, f"Submission rejected: {err}")
|
||||
return
|
||||
|
||||
if not all_authors_have_emails(submission):
|
||||
cancel_submission(submission) # changes Submission.state
|
||||
create_submission_event(
|
||||
None,
|
||||
submission,
|
||||
"Submission rejected: Email address not found for all authors"
|
||||
)
|
||||
return
|
||||
|
||||
if not submitter_is_author(submission):
|
||||
cancel_submission(submission) # changes Submission.state
|
||||
create_submission_event(
|
||||
None,
|
||||
submission,
|
||||
f"Submission rejected: Submitter ({submission.submitter}) is not one of the document authors",
|
||||
)
|
||||
return
|
||||
|
||||
create_submission_event(None, submission, desc="Completed submission validation checks")
|
||||
accept_submission(submission)
|
||||
|
||||
|
||||
def process_uploaded_submission(submission):
|
||||
"""Process and validate an uploaded submission
|
||||
|
||||
The submission must be in the "validating" state. On success, it will be in the "uploaded"
|
||||
state. On error, it will be in the "cancel" state.
|
||||
"""
|
||||
if submission.state_id != "validating":
|
||||
log.log(f'Submission {submission.pk} is not in "validating" state, skipping.')
|
||||
return # do nothing
|
||||
|
||||
try:
|
||||
process_and_validate_submission(submission)
|
||||
except InconsistentRevisionError as consistency_error:
|
||||
submission.state_id = "manual"
|
||||
submission.save()
|
||||
create_submission_event(None, submission, desc="Uploaded submission (diverted to manual process)")
|
||||
send_manual_post_request(None, submission, errors=dict(consistency=str(consistency_error)))
|
||||
except SubmissionError as err:
|
||||
cancel_submission(submission) # changes Submission.state
|
||||
create_submission_event(None, submission, f"Submission rejected: {err}")
|
||||
else:
|
||||
submission.state_id = "uploaded"
|
||||
submission.save()
|
||||
create_submission_event(None, submission, desc="Completed submission validation checks")
|
||||
accept_submission(submission)
|
||||
|
|
|
@ -12,7 +12,7 @@ from urllib.parse import urljoin
|
|||
from django.conf import settings
|
||||
from django.contrib import messages
|
||||
from django.contrib.auth.models import User
|
||||
from django.db import DataError, transaction
|
||||
from django.db import transaction
|
||||
from django.urls import reverse as urlreverse
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.http import HttpResponseRedirect, Http404, HttpResponseForbidden, HttpResponse, JsonResponse
|
||||
|
@ -30,13 +30,13 @@ from ietf.ietfauth.utils import has_role, role_required
|
|||
from ietf.mailtrigger.utils import gather_address_lists
|
||||
from ietf.message.models import Message, MessageAttachment
|
||||
from ietf.person.models import Email
|
||||
from ietf.submit.forms import ( SubmissionManualUploadForm, SubmissionAutoUploadForm, AuthorForm,
|
||||
SubmitterForm, EditSubmissionForm, PreapprovalForm, ReplacesForm, SubmissionEmailForm, MessageModelForm,
|
||||
DeprecatedSubmissionAutoUploadForm )
|
||||
from ietf.submit.forms import (SubmissionAutoUploadForm, AuthorForm, SubmitterForm, EditSubmissionForm,
|
||||
PreapprovalForm, ReplacesForm, SubmissionEmailForm, MessageModelForm,
|
||||
DeprecatedSubmissionAutoUploadForm, SubmissionManualUploadForm)
|
||||
from ietf.submit.mail import send_full_url, send_manual_post_request, add_submission_email, get_reply_to
|
||||
from ietf.submit.models import (Submission, Preapproval, SubmissionExtResource,
|
||||
DraftSubmissionStateName, SubmissionEmailEvent )
|
||||
from ietf.submit.tasks import process_uploaded_submission_task, poke
|
||||
from ietf.submit.tasks import process_uploaded_submission_task, process_and_accept_uploaded_submission_task, poke
|
||||
from ietf.submit.utils import ( approvable_submissions_for_user, preapprovals_for_user,
|
||||
recently_approved_by_user, validate_submission, create_submission_event, docevent_from_submission,
|
||||
post_submission, cancel_submission, rename_submission_files, remove_submission_files, get_draft_meta,
|
||||
|
@ -52,64 +52,35 @@ from ietf.utils.timezone import date_today
|
|||
|
||||
|
||||
def upload_submission(request):
|
||||
if request.method == 'POST':
|
||||
try:
|
||||
form = SubmissionManualUploadForm(request, data=request.POST, files=request.FILES)
|
||||
if form.is_valid():
|
||||
log('got valid submission form for %s' % form.filename)
|
||||
saved_files = save_files(form)
|
||||
authors, abstract, file_name, file_size = get_draft_meta(form, saved_files)
|
||||
|
||||
submission = get_submission(form)
|
||||
try:
|
||||
fill_in_submission(form, submission, authors, abstract, file_size)
|
||||
except Exception as e:
|
||||
log("Exception: %s\n" % e)
|
||||
if submission and submission.id:
|
||||
submission.delete()
|
||||
raise
|
||||
|
||||
apply_checkers(submission, file_name)
|
||||
|
||||
consistency_error = check_submission_revision_consistency(submission)
|
||||
if consistency_error:
|
||||
# A data consistency problem diverted this to manual processing - send notification
|
||||
submission.state = DraftSubmissionStateName.objects.get(slug="manual")
|
||||
submission.save()
|
||||
create_submission_event(request, submission, desc="Uploaded submission (diverted to manual process)")
|
||||
send_manual_post_request(request, submission, errors=dict(consistency=consistency_error))
|
||||
else:
|
||||
# This is the usual case
|
||||
create_submission_event(request, submission, desc="Uploaded submission")
|
||||
|
||||
# Don't add an "Uploaded new revision doevent yet, in case of cancellation
|
||||
|
||||
return redirect("ietf.submit.views.submission_status", submission_id=submission.pk, access_token=submission.access_token())
|
||||
except IOError as e:
|
||||
if "read error" in str(e): # The server got an IOError when trying to read POST data
|
||||
form = SubmissionManualUploadForm(request=request)
|
||||
form._errors = {}
|
||||
form._errors["__all__"] = form.error_class(["There was a failure receiving the complete form data -- please try again."])
|
||||
else:
|
||||
raise
|
||||
except ValidationError as e:
|
||||
form = SubmissionManualUploadForm(request=request)
|
||||
form._errors = {}
|
||||
form._errors["__all__"] = form.error_class(["There was a failure converting the xml file to text -- please verify that your xml file is valid. (%s)" % e.message])
|
||||
if debug.debug:
|
||||
raise
|
||||
except DataError as e:
|
||||
form = SubmissionManualUploadForm(request=request)
|
||||
form._errors = {}
|
||||
form._errors["__all__"] = form.error_class(["There was a failure processing your upload -- please verify that your Internet-Draft passes idnits. (%s)" % e.message])
|
||||
if debug.debug:
|
||||
raise
|
||||
if request.method == "POST":
|
||||
form = SubmissionManualUploadForm(
|
||||
request, data=request.POST, files=request.FILES
|
||||
)
|
||||
if form.is_valid():
|
||||
submission = get_submission(form)
|
||||
submission.state = DraftSubmissionStateName.objects.get(slug="validating")
|
||||
submission.remote_ip = form.remote_ip
|
||||
submission.file_types = ",".join(form.file_types)
|
||||
submission.submission_date = date_today()
|
||||
submission.save()
|
||||
clear_existing_files(form)
|
||||
save_files(form)
|
||||
create_submission_event(request, submission, desc="Uploaded submission")
|
||||
# Wrap in on_commit so the delayed task cannot start until the view is done with the DB
|
||||
transaction.on_commit(
|
||||
lambda: process_uploaded_submission_task.delay(submission.pk)
|
||||
)
|
||||
return redirect(
|
||||
"ietf.submit.views.submission_status",
|
||||
submission_id=submission.pk,
|
||||
access_token=submission.access_token(),
|
||||
)
|
||||
else:
|
||||
form = SubmissionManualUploadForm(request=request)
|
||||
|
||||
return render(request, 'submit/upload_submission.html',
|
||||
{'selected': 'index',
|
||||
'form': form})
|
||||
return render(
|
||||
request, "submit/upload_submission.html", {"selected": "index", "form": form}
|
||||
)
|
||||
|
||||
@csrf_exempt
|
||||
def api_submission(request):
|
||||
|
@ -173,7 +144,7 @@ def api_submission(request):
|
|||
|
||||
# Wrap in on_commit so the delayed task cannot start until the view is done with the DB
|
||||
transaction.on_commit(
|
||||
lambda: process_uploaded_submission_task.delay(submission.pk)
|
||||
lambda: process_and_accept_uploaded_submission_task.delay(submission.pk)
|
||||
)
|
||||
return JsonResponse(
|
||||
{
|
||||
|
@ -222,7 +193,7 @@ def api_submit(request):
|
|||
submission = None
|
||||
def err(code, text):
|
||||
return HttpResponse(text, status=code, content_type='text/plain')
|
||||
|
||||
|
||||
if request.method == 'GET':
|
||||
return render(request, 'submit/api_submit_info.html')
|
||||
elif request.method == 'POST':
|
||||
|
@ -301,7 +272,7 @@ def api_submit(request):
|
|||
except Exception as e:
|
||||
exception = e
|
||||
raise
|
||||
return err(500, "Exception: %s" % str(e))
|
||||
return err(500, "Exception: %s" % str(e))
|
||||
finally:
|
||||
if exception and submission:
|
||||
remove_submission_files(submission)
|
||||
|
@ -470,7 +441,7 @@ def submission_status(request, submission_id, access_token=None):
|
|||
update_submission_external_resources(submission, extresources)
|
||||
|
||||
approvals_received = submitter_form.cleaned_data['approvals_received']
|
||||
|
||||
|
||||
if submission.rev == '00' and submission.group and not submission.group.is_active:
|
||||
permission_denied(request, 'Posting a new Internet-Draft for an inactive group is not permitted.')
|
||||
|
||||
|
@ -710,7 +681,7 @@ def confirm_submission(request, submission_id, auth_token):
|
|||
messages.error(request, 'The submission is not in a state where it can be cancelled.')
|
||||
|
||||
return redirect("ietf.submit.views.submission_status", submission_id=submission_id)
|
||||
|
||||
|
||||
else:
|
||||
raise RuntimeError("Unexpected state in confirm_submission()")
|
||||
|
||||
|
@ -783,7 +754,7 @@ def manualpost(request):
|
|||
'''
|
||||
|
||||
manual = Submission.objects.filter(state_id = "manual").distinct()
|
||||
|
||||
|
||||
for s in manual:
|
||||
s.passes_checks = all([ c.passed!=False for c in s.checks.all() ])
|
||||
s.errors = validate_submission(s)
|
||||
|
@ -799,7 +770,7 @@ def manualpost(request):
|
|||
def cancel_waiting_for_draft(request):
|
||||
if request.method == 'POST':
|
||||
can_cancel = has_role(request.user, "Secretariat")
|
||||
|
||||
|
||||
if not can_cancel:
|
||||
permission_denied(request, 'You do not have permission to perform this action.')
|
||||
|
||||
|
@ -808,12 +779,12 @@ def cancel_waiting_for_draft(request):
|
|||
|
||||
submission = get_submission_or_404(submission_id, access_token = access_token)
|
||||
cancel_submission(submission)
|
||||
|
||||
|
||||
create_submission_event(request, submission, "Cancelled submission")
|
||||
if (submission.rev != "00"):
|
||||
# Add a doc event
|
||||
docevent_from_submission(submission, "Cancelled submission for rev {}".format(submission.rev))
|
||||
|
||||
|
||||
return redirect("ietf.submit.views.manualpost")
|
||||
|
||||
|
||||
|
@ -826,19 +797,19 @@ def add_manualpost_email(request, submission_id=None, access_token=None):
|
|||
button_text = request.POST.get('submit', '')
|
||||
if button_text == 'Cancel':
|
||||
return redirect("submit/manual_post.html")
|
||||
|
||||
|
||||
form = SubmissionEmailForm(request.POST)
|
||||
if form.is_valid():
|
||||
submission_pk = form.cleaned_data['submission_pk']
|
||||
message = form.cleaned_data['message']
|
||||
#in_reply_to = form.cleaned_data['in_reply_to']
|
||||
# create Message
|
||||
|
||||
|
||||
if form.cleaned_data['direction'] == 'incoming':
|
||||
msgtype = 'msgin'
|
||||
else:
|
||||
msgtype = 'msgout'
|
||||
|
||||
|
||||
submission, submission_email_event = (
|
||||
add_submission_email(request=request,
|
||||
remote_ip=remote_ip(request),
|
||||
|
@ -848,15 +819,15 @@ def add_manualpost_email(request, submission_id=None, access_token=None):
|
|||
message = message,
|
||||
by = request.user.person,
|
||||
msgtype = msgtype) )
|
||||
|
||||
|
||||
messages.success(request, 'Email added.')
|
||||
|
||||
|
||||
try:
|
||||
draft = Document.objects.get(name=submission.name)
|
||||
except Document.DoesNotExist:
|
||||
# Assume this is revision 00 - we'll do this later
|
||||
draft = None
|
||||
|
||||
|
||||
if (draft != None):
|
||||
e = AddedMessageEvent(type="added_message", doc=draft)
|
||||
e.message = submission_email_event.submissionemailevent.message
|
||||
|
@ -866,7 +837,7 @@ def add_manualpost_email(request, submission_id=None, access_token=None):
|
|||
e.desc = submission_email_event.desc
|
||||
e.time = submission_email_event.time
|
||||
e.save()
|
||||
|
||||
|
||||
return redirect("ietf.submit.views.manualpost")
|
||||
except ValidationError as e:
|
||||
form = SubmissionEmailForm(request.POST)
|
||||
|
@ -883,7 +854,7 @@ def add_manualpost_email(request, submission_id=None, access_token=None):
|
|||
initial['submission_pk'] = submission.pk
|
||||
else:
|
||||
initial['direction'] = 'incoming'
|
||||
|
||||
|
||||
form = SubmissionEmailForm(initial=initial)
|
||||
|
||||
return render(request, 'submit/add_submit_email.html',dict(form=form))
|
||||
|
@ -914,20 +885,20 @@ def send_submission_email(request, submission_id, message_id=None):
|
|||
reply_to = form.cleaned_data['reply_to'],
|
||||
body = form.cleaned_data['body']
|
||||
)
|
||||
|
||||
|
||||
in_reply_to_id = form.cleaned_data['in_reply_to_id']
|
||||
in_reply_to = None
|
||||
rp = ""
|
||||
|
||||
|
||||
if in_reply_to_id:
|
||||
rp = " reply"
|
||||
try:
|
||||
in_reply_to = Message.objects.get(id=in_reply_to_id)
|
||||
except Message.DoesNotExist:
|
||||
log("Unable to retrieve in_reply_to message: %s" % in_reply_to_id)
|
||||
|
||||
|
||||
desc = "Sent message {} - manual post - {}-{}".format(rp,
|
||||
submission.name,
|
||||
submission.name,
|
||||
submission.rev)
|
||||
SubmissionEmailEvent.objects.create(
|
||||
submission = submission,
|
||||
|
@ -941,14 +912,14 @@ def send_submission_email(request, submission_id, message_id=None):
|
|||
send_mail_message(None,msg)
|
||||
|
||||
messages.success(request, 'Email sent.')
|
||||
return redirect('ietf.submit.views.submission_status',
|
||||
return redirect('ietf.submit.views.submission_status',
|
||||
submission_id=submission.id,
|
||||
access_token=submission.access_token())
|
||||
|
||||
else:
|
||||
reply_to = get_reply_to()
|
||||
msg = None
|
||||
|
||||
|
||||
if not message_id:
|
||||
addrs = gather_address_lists('sub_confirmation_requested',submission=submission).as_strings(compact=False)
|
||||
to_email = addrs.to
|
||||
|
@ -958,7 +929,7 @@ def send_submission_email(request, submission_id, message_id=None):
|
|||
try:
|
||||
submitEmail = SubmissionEmailEvent.objects.get(id=message_id)
|
||||
msg = submitEmail.message
|
||||
|
||||
|
||||
if msg:
|
||||
to_email = msg.frm
|
||||
cc = msg.cc
|
||||
|
@ -979,24 +950,24 @@ def send_submission_email(request, submission_id, message_id=None):
|
|||
'subject': subject,
|
||||
'reply_to': reply_to,
|
||||
}
|
||||
|
||||
|
||||
if msg:
|
||||
initial['in_reply_to_id'] = msg.id
|
||||
|
||||
|
||||
form = MessageModelForm(initial=initial)
|
||||
|
||||
return render(request, "submit/email.html", {
|
||||
'submission': submission,
|
||||
'access_token': submission.access_token(),
|
||||
'form':form})
|
||||
|
||||
|
||||
|
||||
def show_submission_email_message(request, submission_id, message_id, access_token=None):
|
||||
submission = get_submission_or_404(submission_id, access_token)
|
||||
|
||||
submitEmail = get_object_or_404(SubmissionEmailEvent, pk=message_id)
|
||||
submitEmail = get_object_or_404(SubmissionEmailEvent, pk=message_id)
|
||||
attachments = submitEmail.message.messageattachment_set.all()
|
||||
|
||||
|
||||
return render(request, 'submit/submission_email.html',
|
||||
{'submission': submission,
|
||||
'message': submitEmail,
|
||||
|
@ -1007,25 +978,25 @@ def show_submission_email_attachment(request, submission_id, message_id, filenam
|
|||
|
||||
message = get_object_or_404(SubmissionEmailEvent, pk=message_id)
|
||||
|
||||
attach = get_object_or_404(MessageAttachment,
|
||||
message=message.message,
|
||||
attach = get_object_or_404(MessageAttachment,
|
||||
message=message.message,
|
||||
filename=filename)
|
||||
|
||||
|
||||
if attach.encoding == "base64":
|
||||
body = base64.b64decode(attach.body)
|
||||
else:
|
||||
body = attach.body.encode('utf-8')
|
||||
|
||||
|
||||
if attach.content_type is None:
|
||||
content_type='text/plain'
|
||||
else:
|
||||
content_type=attach.content_type
|
||||
|
||||
|
||||
response = HttpResponse(body, content_type=content_type)
|
||||
response['Content-Disposition'] = 'attachment; filename=%s' % attach.filename
|
||||
response['Content-Length'] = len(body)
|
||||
return response
|
||||
|
||||
|
||||
|
||||
def get_submission_or_404(submission_id, access_token=None):
|
||||
submission = get_object_or_404(Submission, pk=submission_id)
|
||||
|
|
|
@ -10,7 +10,10 @@
|
|||
{% block nomcom_content %}
|
||||
{% origin %}
|
||||
<h2>Volunteers for {{ nomcom.group }}</h2>
|
||||
{% if not public %}<a href="{% url 'ietf.nomcom.views.private_volunteers_csv' year=year %}">Download as csv</a>{% endif %}
|
||||
{% if not public %}
|
||||
<a class="btn btn-primary" role="button" href="{% url 'ietf.nomcom.views.private_volunteers_csv' year=year %}">Download as csv</a>
|
||||
<a class="btn btn-primary" role="button" href="{% url 'ietf.nomcom.views.qualified_volunteer_list_for_announcement' year=year %}">View simplified list of only qualified volunteers</a>
|
||||
{% endif %}
|
||||
{% regroup volunteers by eligible as volunteers_by_eligibility %}
|
||||
{% for eligibility_group in volunteers_by_eligibility %}
|
||||
<h3 class="mt-3">{{ eligibility_group.grouper|yesno:"Eligible, Not Eligible" }}</h3>
|
||||
|
|
|
@ -133,17 +133,31 @@
|
|||
This submission is awaiting the first Internet-Draft upload.
|
||||
</p>
|
||||
{% elif submission.state_id == 'validating' %}
|
||||
<p class="alert alert-warning my-3">
|
||||
This submission is still being processed and validated. This normally takes a few minutes after
|
||||
<div class="alert alert-danger my-3">
|
||||
Notice: The Internet-Draft submission process has changed as of Datatracker version 10.3.0.
|
||||
Your Internet-Draft is currently being processed and validated asynchronously. Results will be
|
||||
displayed at this URL when they are available. If JavaScript is enabled in your
|
||||
browser, this page will refreshed automatically. If JavaScript is not enabled, or if you
|
||||
disable the automatic refresh with the toggle below, please reload this page in a few
|
||||
minutes to see the results.
|
||||
</div>
|
||||
<div class="alert alert-warning my-3">
|
||||
This submission is being processed and validated. This normally takes a few minutes after
|
||||
submission.
|
||||
{% with earliest_event=submission.submissionevent_set.last %}
|
||||
{% if earliest_event %}
|
||||
It has been {{ earliest_event.time|timesince }} since submission.
|
||||
{% endif %}
|
||||
{% if earliest_event %}
|
||||
Your draft was uploaded at {{ earliest_event.time }}<span id="time-since-uploaded" class="d-none">
|
||||
({{ earliest_event.time|timesince }} ago)</span>.
|
||||
{% endif %}
|
||||
{% endwith %}
|
||||
Please contact the secretariat for assistance if it has been more than an hour.
|
||||
</p>
|
||||
{% else %}
|
||||
|
||||
<div class="form-check form-switch mt-3 d-none">{# hide with d-none unless javascript makes it visible #}
|
||||
<input class="form-check-input" type="checkbox" id="enableAutoReload" checked>
|
||||
<label class="form-check-label" for="enableAutoReload"> Refresh automatically </label>
|
||||
</div>
|
||||
</div>
|
||||
{% else %}
|
||||
<h2 class="mt-5">Meta-data from the submission</h2>
|
||||
{% if errors %}
|
||||
<div class="alert alert-danger my-3">
|
||||
|
|
|
@ -47,7 +47,8 @@
|
|||
aria-controls="other-formats">
|
||||
<input class="form-check-input"
|
||||
id="checkbox"
|
||||
type="checkbox">
|
||||
type="checkbox"
|
||||
{% if form.errors.txt %}checked {% endif %}>
|
||||
Submit other formats
|
||||
</label>
|
||||
</div>
|
||||
|
@ -60,14 +61,8 @@
|
|||
However, if you cannot for some reason submit XML, you must
|
||||
submit a plaintext rendering of your I-D.
|
||||
</p>
|
||||
{% bootstrap_label '<i class="bi bi-file-pdf"></i> PDF rendering of the I-D' label_class="form-label fw-bold" %}
|
||||
{% bootstrap_field form.pdf show_label=False %}
|
||||
<p class="form-text">
|
||||
Optional to submit, will be auto-generated based
|
||||
on the submitted XML.
|
||||
</p>
|
||||
</div>
|
||||
{% bootstrap_form_errors form %}
|
||||
{% bootstrap_form_errors form type="non_fields" %}
|
||||
{% bootstrap_button button_type="submit" name="upload" content="Upload" %}
|
||||
</form>
|
||||
{% include "submit/problem-reports-footer.html" %}
|
||||
|
|
|
@ -73,6 +73,7 @@ class Command(BaseCommand):
|
|||
data = json.load(file)
|
||||
except ValueError as e:
|
||||
raise CommandError("Failure to read json data from %s: %s" % (filename, e))
|
||||
file.close()
|
||||
version = version or data["version"]
|
||||
if not version in data:
|
||||
raise CommandError("There is no data for version %s available in %s" % (version, filename))
|
||||
|
|
|
@ -12,23 +12,23 @@ def pipe(cmd, str=None):
|
|||
if str and len(str) > 4096: # XXX: Hardcoded Linux 2.4, 2.6 pipe buffer size
|
||||
bufsize = len(str)
|
||||
|
||||
pipe = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, bufsize=bufsize, shell=True)
|
||||
if not str is None:
|
||||
pipe.stdin.write(str)
|
||||
pipe.stdin.close()
|
||||
with Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, bufsize=bufsize, shell=True) as pipe:
|
||||
if not str is None:
|
||||
pipe.stdin.write(str)
|
||||
pipe.stdin.close()
|
||||
|
||||
out = b""
|
||||
err = b""
|
||||
while True:
|
||||
str = pipe.stdout.read()
|
||||
if str:
|
||||
out += str
|
||||
code = pipe.poll()
|
||||
if code != None:
|
||||
err = pipe.stderr.read()
|
||||
break
|
||||
if len(out) >= MAX:
|
||||
err = "Output exceeds %s bytes and has been truncated" % MAX
|
||||
break
|
||||
out = b""
|
||||
err = b""
|
||||
while True:
|
||||
str = pipe.stdout.read()
|
||||
if str:
|
||||
out += str
|
||||
code = pipe.poll()
|
||||
if code != None:
|
||||
err = pipe.stderr.read()
|
||||
break
|
||||
if len(out) >= MAX:
|
||||
err = "Output exceeds %s bytes and has been truncated" % MAX
|
||||
break
|
||||
|
||||
return (code, out, err)
|
||||
|
|
|
@ -210,6 +210,7 @@ class TestCase(django.test.TestCase):
|
|||
'INTERNET_ALL_DRAFTS_ARCHIVE_DIR',
|
||||
'INTERNET_DRAFT_ARCHIVE_DIR',
|
||||
'INTERNET_DRAFT_PATH',
|
||||
'BIBXML_BASE_PATH',
|
||||
]
|
||||
|
||||
parser = html5lib.HTMLParser(strict=True)
|
||||
|
@ -314,4 +315,4 @@ class TestCase(django.test.TestCase):
|
|||
for dir in self._ietf_temp_dirs.values():
|
||||
shutil.rmtree(dir)
|
||||
self.requests_mock.stop()
|
||||
super().tearDown()
|
||||
super().tearDown()
|
||||
|
|
|
@ -73,7 +73,7 @@ def validate_file_size(file, missing_ok=False):
|
|||
|
||||
def validate_mime_type(file, valid, missing_ok=False):
|
||||
try:
|
||||
file.open()
|
||||
file.open() # Callers expect this to remain open. Consider refactoring.
|
||||
except FileNotFoundError:
|
||||
if missing_ok:
|
||||
return None, None
|
||||
|
|
22
package.json
22
package.json
|
@ -7,27 +7,27 @@
|
|||
"legacy:build": "parcel build"
|
||||
},
|
||||
"dependencies": {
|
||||
"@fullcalendar/bootstrap5": "6.1.5",
|
||||
"@fullcalendar/core": "6.1.5",
|
||||
"@fullcalendar/bootstrap5": "6.1.6",
|
||||
"@fullcalendar/core": "6.1.6",
|
||||
"@fullcalendar/daygrid": "6.1.5",
|
||||
"@fullcalendar/icalendar": "6.1.5",
|
||||
"@fullcalendar/interaction": "6.1.5",
|
||||
"@fullcalendar/list": "6.1.5",
|
||||
"@fullcalendar/luxon2": "6.1.5",
|
||||
"@fullcalendar/timegrid": "6.1.5",
|
||||
"@fullcalendar/vue3": "6.1.5",
|
||||
"@fullcalendar/icalendar": "6.1.6",
|
||||
"@fullcalendar/interaction": "6.1.6",
|
||||
"@fullcalendar/list": "6.1.6",
|
||||
"@fullcalendar/luxon2": "6.1.6",
|
||||
"@fullcalendar/timegrid": "6.1.6",
|
||||
"@fullcalendar/vue3": "6.1.6",
|
||||
"@popperjs/core": "2.11.7",
|
||||
"@twuni/emojify": "1.0.2",
|
||||
"bootstrap": "5.2.3",
|
||||
"bootstrap-icons": "1.10.4",
|
||||
"browser-fs-access": "0.33.1",
|
||||
"caniuse-lite": "1.0.30001469",
|
||||
"caniuse-lite": "1.0.30001481",
|
||||
"d3": "7.8.4",
|
||||
"file-saver": "2.0.5",
|
||||
"highcharts": "10.3.3",
|
||||
"ical.js": "1.5.0",
|
||||
"jquery": "3.6.4",
|
||||
"js-cookie": "3.0.1",
|
||||
"js-cookie": "3.0.5",
|
||||
"list.js": "2.3.1",
|
||||
"lodash": "4.17.21",
|
||||
"lodash-es": "4.17.21",
|
||||
|
@ -59,7 +59,7 @@
|
|||
"@vitejs/plugin-vue": "4.1.0",
|
||||
"browserlist": "latest",
|
||||
"c8": "7.13.0",
|
||||
"eslint": "8.37.0",
|
||||
"eslint": "8.39.0",
|
||||
"eslint-config-standard": "17.0.0",
|
||||
"eslint-plugin-cypress": "2.13.2",
|
||||
"eslint-plugin-import": "2.27.5",
|
||||
|
|
|
@ -18,7 +18,7 @@ django-cors-headers>=3.11.0
|
|||
django-debug-toolbar>=3.2.4
|
||||
django-form-utils>=1.0.3 # Only one use, in the liaisons app. Last release was in 2015.
|
||||
django-markup>=1.5 # Limited use - need to reconcile against direct use of markdown
|
||||
django-oidc-provider>=0.7
|
||||
django-oidc-provider>=0.7,<0.8 # 0.8 dropped Django 2 support
|
||||
django-password-strength>=1.2.1
|
||||
django-referrer-policy>=1.0
|
||||
django-simple-history>=3.0.0
|
||||
|
@ -43,7 +43,6 @@ lxml>=4.8.0,<5
|
|||
markdown>=3.3.6
|
||||
mock>=4.0.3 # Used only by tests, of course
|
||||
mypy>=0.782,<0.790 # Version requirements determined by django-stubs.
|
||||
mysqlclient>=2.1.0
|
||||
oic>=1.3 # Used only by tests
|
||||
Pillow>=9.1.0
|
||||
psycopg2<2.9
|
||||
|
|
159
yarn.lock
159
yarn.lock
|
@ -272,10 +272,10 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@eslint/js@npm:8.37.0":
|
||||
version: 8.37.0
|
||||
resolution: "@eslint/js@npm:8.37.0"
|
||||
checksum: 7a07fb085c94ce1538949012c292fd3a6cd734f149bc03af6157dfbd8a7477678899ef57b4a27e15b36470a997389ad79a0533d5880c71e67720ae1a7de7c62d
|
||||
"@eslint/js@npm:8.39.0":
|
||||
version: 8.39.0
|
||||
resolution: "@eslint/js@npm:8.39.0"
|
||||
checksum: 63fe36e2bfb5ff5705d1c1a8ccecd8eb2f81d9af239713489e767b0e398759c0177fcc75ad62581d02942f2776903a8496d5fae48dc2d883dff1b96fcb19e9e2
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
|
@ -302,25 +302,25 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@fullcalendar/bootstrap5@npm:6.1.5":
|
||||
version: 6.1.5
|
||||
resolution: "@fullcalendar/bootstrap5@npm:6.1.5"
|
||||
"@fullcalendar/bootstrap5@npm:6.1.6":
|
||||
version: 6.1.6
|
||||
resolution: "@fullcalendar/bootstrap5@npm:6.1.6"
|
||||
peerDependencies:
|
||||
"@fullcalendar/core": ~6.1.5
|
||||
checksum: c2f886582344e268bdb342ef6ae2152a9ffe42f4ba48ab1eaa54414413cf52dbe3fb2c423efb44ca491318a53bb17bc476602b9a21672b07dd3bec48eec9345d
|
||||
"@fullcalendar/core": ~6.1.6
|
||||
checksum: 09f2bdf7dc6811c6069e76edf8455e0ef833da1859ab91aa4d97ca1dbd652219e81f0332fa5d311edac35231bd6fa022f1c0607b995299a1b17357b317f8c1e8
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@fullcalendar/core@npm:6.1.5":
|
||||
version: 6.1.5
|
||||
resolution: "@fullcalendar/core@npm:6.1.5"
|
||||
"@fullcalendar/core@npm:6.1.6":
|
||||
version: 6.1.6
|
||||
resolution: "@fullcalendar/core@npm:6.1.6"
|
||||
dependencies:
|
||||
preact: ~10.12.1
|
||||
checksum: 6892c11c40899fc986edc3d36b8e764c8e78f1c53df2c6e23d418d0191fc0bd1562bd0d9cfc74fd986d02ac1e18c8a4a9d34a40b8329964cc9fb9f8ab8367635
|
||||
checksum: 72ec698bd226ea76cef106b9eecd4391a0434b13f9f02f663a84720d5b1e0ca8db21f1ace72278d22814cc4d1dfb1374f17532abde0ae99210bfd5ad7468e17f
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@fullcalendar/daygrid@npm:6.1.5, @fullcalendar/daygrid@npm:~6.1.5":
|
||||
"@fullcalendar/daygrid@npm:6.1.5":
|
||||
version: 6.1.5
|
||||
resolution: "@fullcalendar/daygrid@npm:6.1.5"
|
||||
peerDependencies:
|
||||
|
@ -329,62 +329,71 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@fullcalendar/icalendar@npm:6.1.5":
|
||||
version: 6.1.5
|
||||
resolution: "@fullcalendar/icalendar@npm:6.1.5"
|
||||
"@fullcalendar/daygrid@npm:~6.1.6":
|
||||
version: 6.1.6
|
||||
resolution: "@fullcalendar/daygrid@npm:6.1.6"
|
||||
peerDependencies:
|
||||
"@fullcalendar/core": ~6.1.5
|
||||
"@fullcalendar/core": ~6.1.6
|
||||
checksum: e7b60e359b620091c834fe3472761b9f4ebf3cd8d65a2ae56ead10dcb65a898c4c86770046dc3addff86002df9972a58395b7916502e8a4ec8d7b83a74b150b9
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@fullcalendar/icalendar@npm:6.1.6":
|
||||
version: 6.1.6
|
||||
resolution: "@fullcalendar/icalendar@npm:6.1.6"
|
||||
peerDependencies:
|
||||
"@fullcalendar/core": ~6.1.6
|
||||
ical.js: ^1.4.0
|
||||
checksum: c7d53f932860dceac8d69ef9280c2f9990de976ff060871f13b40c3ff69b951104dfa283cecb233bb03bffd71766c4b31687f76904e9d18ff5fa28b36d04ae3b
|
||||
checksum: 50d290346aa9ba0cb36e6072d05b2ffe1dd1d3b89975eaf89c6309e58adafd7e2ce1ca3e3063634218c5c29207e39c75530ebe6908f66f12bc8782eaba57d52d
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@fullcalendar/interaction@npm:6.1.5":
|
||||
version: 6.1.5
|
||||
resolution: "@fullcalendar/interaction@npm:6.1.5"
|
||||
"@fullcalendar/interaction@npm:6.1.6":
|
||||
version: 6.1.6
|
||||
resolution: "@fullcalendar/interaction@npm:6.1.6"
|
||||
peerDependencies:
|
||||
"@fullcalendar/core": ~6.1.5
|
||||
checksum: dd3d7fb8ba33b13b79ee0788f81fd6831f90d3663e04467fd4b72f1c82af6d6e7d1a91ea8ea8c2c203c39d7f6a388d1bbfd501f79f7198629da03c9c9385588d
|
||||
"@fullcalendar/core": ~6.1.6
|
||||
checksum: b7dea490592cdaf902788825ebea122816185657bb37f82dd78ba8129d10fff0c1c45a70d819dd44b5b7088d8b51588899e8e2332652fd939e263160912940dd
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@fullcalendar/list@npm:6.1.5":
|
||||
version: 6.1.5
|
||||
resolution: "@fullcalendar/list@npm:6.1.5"
|
||||
"@fullcalendar/list@npm:6.1.6":
|
||||
version: 6.1.6
|
||||
resolution: "@fullcalendar/list@npm:6.1.6"
|
||||
peerDependencies:
|
||||
"@fullcalendar/core": ~6.1.5
|
||||
checksum: 4879294b57445a47a15c0316b3fa79265afaf02155b4fd250429a9af9fcb90eadf0842b105fa2a984efe2f044b205dbd48dffe7237d5ec9341c549fea251ad08
|
||||
"@fullcalendar/core": ~6.1.6
|
||||
checksum: e5beb01c624b9994490ef0e8e3cb4e9f97efd4a0be6539191f912a27df4cd56bbf4124f558536618f18889b8133920fadf0962994a7345fb089fd4245bcbcf15
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@fullcalendar/luxon2@npm:6.1.5":
|
||||
version: 6.1.5
|
||||
resolution: "@fullcalendar/luxon2@npm:6.1.5"
|
||||
"@fullcalendar/luxon2@npm:6.1.6":
|
||||
version: 6.1.6
|
||||
resolution: "@fullcalendar/luxon2@npm:6.1.6"
|
||||
peerDependencies:
|
||||
"@fullcalendar/core": ~6.1.5
|
||||
"@fullcalendar/core": ~6.1.6
|
||||
luxon: ^2.0.0
|
||||
checksum: 6253f7d06695a73745c719b44d4048ec2aca1dc0dbf24c57ad7721cf540cff1672e9ccc9e595bb2ec822c5f54b066a259fb5280feb778dce779288417c668085
|
||||
checksum: 48d76dc556960e3ac18fc7b0afd7da5c7fb2dd2817afaba8bdf6423999aaaa174dfe8464813e79d3e5d717b0a92f793260a227f848af2427c902ed7506a01ffa
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@fullcalendar/timegrid@npm:6.1.5":
|
||||
version: 6.1.5
|
||||
resolution: "@fullcalendar/timegrid@npm:6.1.5"
|
||||
"@fullcalendar/timegrid@npm:6.1.6":
|
||||
version: 6.1.6
|
||||
resolution: "@fullcalendar/timegrid@npm:6.1.6"
|
||||
dependencies:
|
||||
"@fullcalendar/daygrid": ~6.1.5
|
||||
"@fullcalendar/daygrid": ~6.1.6
|
||||
peerDependencies:
|
||||
"@fullcalendar/core": ~6.1.5
|
||||
checksum: 236aabb57a3a8e8f41c43bf3a76b074731fee8a4e815b3c32a3532a7503678d488bbc1b2c74c270580ef37fb2d9ed9edf0b4646943812a5c238cd1cd7104e5f8
|
||||
"@fullcalendar/core": ~6.1.6
|
||||
checksum: ef79183dad0a49f86f43b3eb58fabfd48312ec0abf03b690094be72c3d8821a488038f8308cb0e136c0f716648409dc33c8a5fc77192c7c0d93cd9962cb4c814
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@fullcalendar/vue3@npm:6.1.5":
|
||||
version: 6.1.5
|
||||
resolution: "@fullcalendar/vue3@npm:6.1.5"
|
||||
"@fullcalendar/vue3@npm:6.1.6":
|
||||
version: 6.1.6
|
||||
resolution: "@fullcalendar/vue3@npm:6.1.6"
|
||||
peerDependencies:
|
||||
"@fullcalendar/core": ~6.1.5
|
||||
"@fullcalendar/core": ~6.1.6
|
||||
vue: ^3.0.11
|
||||
checksum: bf426f3668ec813174b33c7151555299ee40176f53a481e72f5352002da257a9182c6b1c6cc0083f5e10f76601ccbe25af814fe0ae741516149e078dccc7efd1
|
||||
checksum: 68fe3ce2049173a46c85d60d2f344644da5f50603455cc0838917449d5daae406e32995834384a02c1d741aea0ec19446ec9feb1d9c0f19b466b83336b98c8ae
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
|
@ -2332,10 +2341,10 @@ browserlist@latest:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"caniuse-lite@npm:1.0.30001469":
|
||||
version: 1.0.30001469
|
||||
resolution: "caniuse-lite@npm:1.0.30001469"
|
||||
checksum: 8e496509d7e9ff189c72205675b5db0c5f1b6a09917027441e835efae0848a468a8c4e7d2b409ffc202438fcd23ae53e017f976a03c22c04d12d3c0e1e33e5de
|
||||
"caniuse-lite@npm:1.0.30001481":
|
||||
version: 1.0.30001481
|
||||
resolution: "caniuse-lite@npm:1.0.30001481"
|
||||
checksum: 8200a043c191b4fd4fe0beda37a58fd61869c895ab93f87bdd0420e5927453f48434d716ce9da8552ff6c3ecc4dcd1366354cda3a134f3cc844af741574a7cab
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
|
@ -3587,6 +3596,16 @@ browserlist@latest:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"eslint-scope@npm:^7.2.0":
|
||||
version: 7.2.0
|
||||
resolution: "eslint-scope@npm:7.2.0"
|
||||
dependencies:
|
||||
esrecurse: ^4.3.0
|
||||
estraverse: ^5.2.0
|
||||
checksum: 64591a2d8b244ade9c690b59ef238a11d5c721a98bcee9e9f445454f442d03d3e04eda88e95a4daec558220a99fa384309d9faae3d459bd40e7a81b4063980ae
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"eslint-utils@npm:^2.0.0":
|
||||
version: 2.1.0
|
||||
resolution: "eslint-utils@npm:2.1.0"
|
||||
|
@ -3635,14 +3654,14 @@ browserlist@latest:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"eslint@npm:8.37.0":
|
||||
version: 8.37.0
|
||||
resolution: "eslint@npm:8.37.0"
|
||||
"eslint@npm:8.39.0":
|
||||
version: 8.39.0
|
||||
resolution: "eslint@npm:8.39.0"
|
||||
dependencies:
|
||||
"@eslint-community/eslint-utils": ^4.2.0
|
||||
"@eslint-community/regexpp": ^4.4.0
|
||||
"@eslint/eslintrc": ^2.0.2
|
||||
"@eslint/js": 8.37.0
|
||||
"@eslint/js": 8.39.0
|
||||
"@humanwhocodes/config-array": ^0.11.8
|
||||
"@humanwhocodes/module-importer": ^1.0.1
|
||||
"@nodelib/fs.walk": ^1.2.8
|
||||
|
@ -3652,7 +3671,7 @@ browserlist@latest:
|
|||
debug: ^4.3.2
|
||||
doctrine: ^3.0.0
|
||||
escape-string-regexp: ^4.0.0
|
||||
eslint-scope: ^7.1.1
|
||||
eslint-scope: ^7.2.0
|
||||
eslint-visitor-keys: ^3.4.0
|
||||
espree: ^9.5.1
|
||||
esquery: ^1.4.2
|
||||
|
@ -3681,7 +3700,7 @@ browserlist@latest:
|
|||
text-table: ^0.2.0
|
||||
bin:
|
||||
eslint: bin/eslint.js
|
||||
checksum: 80f3d5cdce2d671f4794e392d234a78d039c347673defb0596268bd481e8f30a53d93c01ff4f66a546c87d97ab4122c0e9cafe1371f87cb03cee6b7d5aa97595
|
||||
checksum: d7a074ff326e7ea482500dc0427a7d4b0260460f0f812d19b46b1cca681806b67309f23da9d17cd3de8eb74dd3c14cb549c4d58b05b140564d14cc1a391122a0
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
|
@ -4775,10 +4794,10 @@ browserlist@latest:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"js-cookie@npm:3.0.1":
|
||||
version: 3.0.1
|
||||
resolution: "js-cookie@npm:3.0.1"
|
||||
checksum: bb48de67e2a6bd1ae3dfd6b2d5a167c33dd0c5a37e909206161eb0358c98f17cb55acd55827a58e9eea3630d89444e7479f7938ef4420dda443218b8c434a4c3
|
||||
"js-cookie@npm:3.0.5":
|
||||
version: 3.0.5
|
||||
resolution: "js-cookie@npm:3.0.5"
|
||||
checksum: 2dbd2809c6180fbcf060c6957cb82dbb47edae0ead6bd71cbeedf448aa6b6923115003b995f7d3e3077bfe2cb76295ea6b584eb7196cca8ba0a09f389f64967a
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
|
@ -6289,15 +6308,15 @@ browserlist@latest:
|
|||
resolution: "root-workspace-0b6124@workspace:."
|
||||
dependencies:
|
||||
"@faker-js/faker": 7.6.0
|
||||
"@fullcalendar/bootstrap5": 6.1.5
|
||||
"@fullcalendar/core": 6.1.5
|
||||
"@fullcalendar/bootstrap5": 6.1.6
|
||||
"@fullcalendar/core": 6.1.6
|
||||
"@fullcalendar/daygrid": 6.1.5
|
||||
"@fullcalendar/icalendar": 6.1.5
|
||||
"@fullcalendar/interaction": 6.1.5
|
||||
"@fullcalendar/list": 6.1.5
|
||||
"@fullcalendar/luxon2": 6.1.5
|
||||
"@fullcalendar/timegrid": 6.1.5
|
||||
"@fullcalendar/vue3": 6.1.5
|
||||
"@fullcalendar/icalendar": 6.1.6
|
||||
"@fullcalendar/interaction": 6.1.6
|
||||
"@fullcalendar/list": 6.1.6
|
||||
"@fullcalendar/luxon2": 6.1.6
|
||||
"@fullcalendar/timegrid": 6.1.6
|
||||
"@fullcalendar/vue3": 6.1.6
|
||||
"@parcel/optimizer-data-url": 2.8.3
|
||||
"@parcel/transformer-inline-string": 2.8.3
|
||||
"@parcel/transformer-sass": 2.8.3
|
||||
|
@ -6310,9 +6329,9 @@ browserlist@latest:
|
|||
browser-fs-access: 0.33.1
|
||||
browserlist: latest
|
||||
c8: 7.13.0
|
||||
caniuse-lite: 1.0.30001469
|
||||
caniuse-lite: 1.0.30001481
|
||||
d3: 7.8.4
|
||||
eslint: 8.37.0
|
||||
eslint: 8.39.0
|
||||
eslint-config-standard: 17.0.0
|
||||
eslint-plugin-cypress: 2.13.2
|
||||
eslint-plugin-import: 2.27.5
|
||||
|
@ -6326,7 +6345,7 @@ browserlist@latest:
|
|||
ical.js: 1.5.0
|
||||
jquery: 3.6.4
|
||||
jquery-migrate: 3.4.1
|
||||
js-cookie: 3.0.1
|
||||
js-cookie: 3.0.5
|
||||
list.js: 2.3.1
|
||||
lodash: 4.17.21
|
||||
lodash-es: 4.17.21
|
||||
|
|
Loading…
Reference in a new issue