ci: merge main to release (#7806)
This commit is contained in:
commit
162aa54ffe
|
@ -81,7 +81,7 @@ Many developers are using [VS Code](https://code.visualstudio.com/) and taking a
|
||||||
|
|
||||||
If VS Code is not available to you, in your clone, type `cd docker; ./run`
|
If VS Code is not available to you, in your clone, type `cd docker; ./run`
|
||||||
|
|
||||||
Once the containers are started, run the tests to make sure your checkout is a good place to start from (all tests should pass - if any fail, ask for help at tools-develop@). Inside the app container's shell type:
|
Once the containers are started, run the tests to make sure your checkout is a good place to start from (all tests should pass - if any fail, ask for help at tools-help@). Inside the app container's shell type:
|
||||||
```sh
|
```sh
|
||||||
ietf/manage.py test --settings=settings_test
|
ietf/manage.py test --settings=settings_test
|
||||||
```
|
```
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
<template lang="pug">
|
<template lang="pug">
|
||||||
n-theme
|
n-theme
|
||||||
n-message-provider
|
n-notification-provider
|
||||||
component(:is='currentComponent', :component-id='props.componentId')
|
n-message-provider
|
||||||
|
component(:is='currentComponent', :component-id='props.componentId')
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script setup>
|
<script setup>
|
||||||
import { defineAsyncComponent, markRaw, onMounted, ref } from 'vue'
|
import { defineAsyncComponent, markRaw, onMounted, ref } from 'vue'
|
||||||
import { NMessageProvider } from 'naive-ui'
|
import { NMessageProvider, NNotificationProvider } from 'naive-ui'
|
||||||
|
|
||||||
import NTheme from './components/n-theme.vue'
|
import NTheme from './components/n-theme.vue'
|
||||||
|
|
||||||
|
@ -15,6 +16,7 @@ import NTheme from './components/n-theme.vue'
|
||||||
const availableComponents = {
|
const availableComponents = {
|
||||||
ChatLog: defineAsyncComponent(() => import('./components/ChatLog.vue')),
|
ChatLog: defineAsyncComponent(() => import('./components/ChatLog.vue')),
|
||||||
Polls: defineAsyncComponent(() => import('./components/Polls.vue')),
|
Polls: defineAsyncComponent(() => import('./components/Polls.vue')),
|
||||||
|
Status: defineAsyncComponent(() => import('./components/Status.vue'))
|
||||||
}
|
}
|
||||||
|
|
||||||
// PROPS
|
// PROPS
|
||||||
|
|
|
@ -323,7 +323,7 @@ const meetingUpdated = computed(() => {
|
||||||
if (!agendaStore.meeting.updated) { return false }
|
if (!agendaStore.meeting.updated) { return false }
|
||||||
|
|
||||||
const updatedDatetime = DateTime.fromISO(agendaStore.meeting.updated).setZone(agendaStore.timezone)
|
const updatedDatetime = DateTime.fromISO(agendaStore.meeting.updated).setZone(agendaStore.timezone)
|
||||||
if (!updatedDatetime.isValid || updatedDatetime < DateTime.fromISO('1980-01-01')) {
|
if (!updatedDatetime.isValid) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -296,7 +296,7 @@ const meetingEvents = computed(() => {
|
||||||
color: 'red'
|
color: 'red'
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if (agendaStore.useNotes) {
|
if (agendaStore.usesNotes) {
|
||||||
links.push({
|
links.push({
|
||||||
id: `lnk-${item.id}-note`,
|
id: `lnk-${item.id}-note`,
|
||||||
label: 'Notepad for note-takers',
|
label: 'Notepad for note-takers',
|
||||||
|
|
|
@ -50,7 +50,7 @@ export const useAgendaStore = defineStore('agenda', {
|
||||||
selectedCatSubs: [],
|
selectedCatSubs: [],
|
||||||
settingsShown: false,
|
settingsShown: false,
|
||||||
timezone: DateTime.local().zoneName,
|
timezone: DateTime.local().zoneName,
|
||||||
useNotes: false,
|
usesNotes: false,
|
||||||
visibleDays: []
|
visibleDays: []
|
||||||
}),
|
}),
|
||||||
getters: {
|
getters: {
|
||||||
|
@ -160,7 +160,7 @@ export const useAgendaStore = defineStore('agenda', {
|
||||||
this.isCurrentMeeting = agendaData.isCurrentMeeting
|
this.isCurrentMeeting = agendaData.isCurrentMeeting
|
||||||
this.meeting = agendaData.meeting
|
this.meeting = agendaData.meeting
|
||||||
this.schedule = agendaData.schedule
|
this.schedule = agendaData.schedule
|
||||||
this.useNotes = agendaData.useNotes
|
this.usesNotes = agendaData.usesNotes
|
||||||
|
|
||||||
// -> Compute current info note hash
|
// -> Compute current info note hash
|
||||||
this.infoNoteHash = murmur(agendaData.meeting.infoNote, 0).toString()
|
this.infoNoteHash = murmur(agendaData.meeting.infoNote, 0).toString()
|
||||||
|
|
80
client/components/Status.vue
Normal file
80
client/components/Status.vue
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
<script setup>
|
||||||
|
import { h, onMounted } from 'vue'
|
||||||
|
import { useNotification } from 'naive-ui'
|
||||||
|
import { localStorageWrapper } from '../shared/local-storage-wrapper'
|
||||||
|
import { JSONWrapper } from '../shared/json-wrapper'
|
||||||
|
import { STATUS_STORAGE_KEY, generateStatusTestId } from '../shared/status-common'
|
||||||
|
|
||||||
|
const getDismissedStatuses = () => {
|
||||||
|
const jsonString = localStorageWrapper.getItem(STATUS_STORAGE_KEY)
|
||||||
|
const jsonValue = JSONWrapper.parse(jsonString, [])
|
||||||
|
if(Array.isArray(jsonValue)) {
|
||||||
|
return jsonValue
|
||||||
|
}
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
const dismissStatus = (id) => {
|
||||||
|
const dissmissed = [id, ...getDismissedStatuses()]
|
||||||
|
localStorageWrapper.setItem(STATUS_STORAGE_KEY, JSONWrapper.stringify(dissmissed))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
let notificationInstances = {} // keyed by status.id
|
||||||
|
let notification
|
||||||
|
|
||||||
|
const pollStatusAPI = () => {
|
||||||
|
fetch('/status/latest.json')
|
||||||
|
.then(resp => resp.json())
|
||||||
|
.then(status => {
|
||||||
|
if(status === null || status.hasMessage === false) {
|
||||||
|
console.debug("No status message")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const dismissedStatuses = getDismissedStatuses()
|
||||||
|
if(dismissedStatuses.includes(status.id)) {
|
||||||
|
console.debug(`Not showing site status ${status.id} because it was already dismissed. Dismissed Ids:`, dismissedStatuses)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const isSameStatusPage = Boolean(document.querySelector(`[data-status-id="${status.id}"]`))
|
||||||
|
|
||||||
|
if(isSameStatusPage) {
|
||||||
|
console.debug(`Not showing site status ${status.id} because we're on the target page`)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if(notificationInstances[status.id]) {
|
||||||
|
console.debug(`Not showing site status ${status.id} because it's already been displayed`)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
notificationInstances[status.id] = notification.create({
|
||||||
|
title: status.title,
|
||||||
|
content: status.body,
|
||||||
|
meta: `${status.date}`,
|
||||||
|
action: () =>
|
||||||
|
h(
|
||||||
|
'a',
|
||||||
|
{
|
||||||
|
'data-testid': generateStatusTestId(status.id),
|
||||||
|
href: status.url,
|
||||||
|
'aria-label': `Read more about ${status.title}`
|
||||||
|
},
|
||||||
|
"Read more"
|
||||||
|
),
|
||||||
|
onClose: () => {
|
||||||
|
return dismissStatus(status.id)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.catch(e => {
|
||||||
|
console.error(e)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
onMounted(() => {
|
||||||
|
notification = useNotification()
|
||||||
|
pollStatusAPI(notification)
|
||||||
|
})
|
||||||
|
</script>
|
|
@ -12,6 +12,7 @@
|
||||||
<link href="https://static.ietf.org/fonts/noto-sans-mono/import.css" rel="stylesheet">
|
<link href="https://static.ietf.org/fonts/noto-sans-mono/import.css" rel="stylesheet">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
|
<div class="vue-embed" data-component="Status"></div>
|
||||||
<div class="pt-3 container-fluid">
|
<div class="pt-3 container-fluid">
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col mx-lg-3" id="content">
|
<div class="col mx-lg-3" id="content">
|
||||||
|
@ -20,5 +21,6 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<script type="module" src="./main.js"></script>
|
<script type="module" src="./main.js"></script>
|
||||||
|
<script type="module" src="./embedded.js"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
20
client/shared/json-wrapper.js
Normal file
20
client/shared/json-wrapper.js
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
export const JSONWrapper = {
|
||||||
|
parse(jsonString, defaultValue) {
|
||||||
|
if(typeof jsonString !== "string") {
|
||||||
|
return defaultValue
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return JSON.parse(jsonString);
|
||||||
|
} catch (e) {
|
||||||
|
console.error(e);
|
||||||
|
}
|
||||||
|
return defaultValue
|
||||||
|
},
|
||||||
|
stringify(data) {
|
||||||
|
try {
|
||||||
|
return JSON.stringify(data);
|
||||||
|
} catch (e) {
|
||||||
|
console.error(e)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
42
client/shared/local-storage-wrapper.js
Normal file
42
client/shared/local-storage-wrapper.js
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
|
||||||
|
/*
|
||||||
|
* DEVELOPER NOTE
|
||||||
|
*
|
||||||
|
* Some browsers can block storage (localStorage, sessionStorage)
|
||||||
|
* access for privacy reasons, and all browsers can have storage
|
||||||
|
* that's full, and then they throw exceptions.
|
||||||
|
*
|
||||||
|
* See https://michalzalecki.com/why-using-localStorage-directly-is-a-bad-idea/
|
||||||
|
*
|
||||||
|
* Exceptions can even be thrown when testing if localStorage
|
||||||
|
* even exists. This can throw:
|
||||||
|
*
|
||||||
|
* if (window.localStorage)
|
||||||
|
*
|
||||||
|
* Also localStorage/sessionStorage can be enabled after DOMContentLoaded
|
||||||
|
* so we handle it gracefully.
|
||||||
|
*
|
||||||
|
* 1) we need to wrap all usage in try/catch
|
||||||
|
* 2) we need to defer actual usage of these until
|
||||||
|
* necessary,
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const localStorageWrapper = {
|
||||||
|
getItem: (key) => {
|
||||||
|
try {
|
||||||
|
return localStorage.getItem(key)
|
||||||
|
} catch (e) {
|
||||||
|
console.error(e);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
setItem: (key, value) => {
|
||||||
|
try {
|
||||||
|
return localStorage.setItem(key, value)
|
||||||
|
} catch (e) {
|
||||||
|
console.error(e);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
},
|
||||||
|
}
|
5
client/shared/status-common.js
Normal file
5
client/shared/status-common.js
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
// Used in Playwright Status and components
|
||||||
|
|
||||||
|
export const STATUS_STORAGE_KEY = "status-dismissed"
|
||||||
|
|
||||||
|
export const generateStatusTestId = (id) => `status-${id}`
|
0
ietf/admin/__init__.py
Normal file
0
ietf/admin/__init__.py
Normal file
6
ietf/admin/apps.py
Normal file
6
ietf/admin/apps.py
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
# Copyright The IETF Trust 2024, All Rights Reserved
|
||||||
|
from django.contrib.admin import apps as admin_apps
|
||||||
|
|
||||||
|
|
||||||
|
class AdminConfig(admin_apps.AdminConfig):
|
||||||
|
default_site = "ietf.admin.sites.AdminSite"
|
15
ietf/admin/sites.py
Normal file
15
ietf/admin/sites.py
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
# Copyright The IETF Trust 2024, All Rights Reserved
|
||||||
|
from django.contrib.admin import AdminSite as _AdminSite
|
||||||
|
from django.conf import settings
|
||||||
|
from django.utils.safestring import mark_safe
|
||||||
|
|
||||||
|
|
||||||
|
class AdminSite(_AdminSite):
|
||||||
|
site_title = "Datatracker admin"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def site_header():
|
||||||
|
if settings.SERVER_MODE == "production":
|
||||||
|
return "Datatracker administration"
|
||||||
|
else:
|
||||||
|
return mark_safe('Datatracker administration <span class="text-danger">δ</span>')
|
|
@ -7,8 +7,10 @@ import re
|
||||||
|
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
from django.conf import settings
|
from django.apps import apps as django_apps
|
||||||
from django.core.exceptions import ObjectDoesNotExist
|
from django.core.exceptions import ObjectDoesNotExist
|
||||||
|
from django.utils.module_loading import autodiscover_modules
|
||||||
|
|
||||||
|
|
||||||
import debug # pyflakes:ignore
|
import debug # pyflakes:ignore
|
||||||
|
|
||||||
|
@ -21,38 +23,27 @@ from tastypie.fields import ApiField
|
||||||
|
|
||||||
_api_list = []
|
_api_list = []
|
||||||
|
|
||||||
for _app in settings.INSTALLED_APPS:
|
OMITTED_APPS_APIS = ["ietf.status"]
|
||||||
|
|
||||||
|
def populate_api_list():
|
||||||
_module_dict = globals()
|
_module_dict = globals()
|
||||||
if '.' in _app:
|
for app_config in django_apps.get_app_configs():
|
||||||
_root, _name = _app.split('.', 1)
|
if '.' in app_config.name and app_config.name not in OMITTED_APPS_APIS:
|
||||||
if _root == 'ietf':
|
_root, _name = app_config.name.split('.', 1)
|
||||||
if not '.' in _name:
|
if _root == 'ietf':
|
||||||
_api = Api(api_name=_name)
|
if not '.' in _name:
|
||||||
_module_dict[_name] = _api
|
_api = Api(api_name=_name)
|
||||||
_api_list.append((_name, _api))
|
_module_dict[_name] = _api
|
||||||
|
_api_list.append((_name, _api))
|
||||||
|
|
||||||
def autodiscover():
|
def autodiscover():
|
||||||
"""
|
"""
|
||||||
Auto-discover INSTALLED_APPS resources.py modules and fail silently when
|
Auto-discover INSTALLED_APPS resources.py modules and fail silently when
|
||||||
not present. This forces an import on them to register any admin bits they
|
not present. This forces an import on them to register any resources they
|
||||||
may want.
|
may want.
|
||||||
"""
|
"""
|
||||||
|
autodiscover_modules("resources")
|
||||||
|
|
||||||
from importlib import import_module
|
|
||||||
from django.conf import settings
|
|
||||||
from django.utils.module_loading import module_has_submodule
|
|
||||||
|
|
||||||
for app in settings.INSTALLED_APPS:
|
|
||||||
mod = import_module(app)
|
|
||||||
# Attempt to import the app's admin module.
|
|
||||||
try:
|
|
||||||
import_module('%s.resources' % (app, ))
|
|
||||||
except:
|
|
||||||
# Decide whether to bubble up this error. If the app just
|
|
||||||
# doesn't have an admin module, we can ignore the error
|
|
||||||
# attempting to import it, otherwise we want it to bubble up.
|
|
||||||
if module_has_submodule(mod, "resources"):
|
|
||||||
raise
|
|
||||||
|
|
||||||
class ModelResource(tastypie.resources.ModelResource):
|
class ModelResource(tastypie.resources.ModelResource):
|
||||||
def generate_cache_key(self, *args, **kwargs):
|
def generate_cache_key(self, *args, **kwargs):
|
||||||
|
|
|
@ -30,4 +30,5 @@ class Serializer(): ...
|
||||||
class ToOneField(tastypie.fields.ToOneField): ...
|
class ToOneField(tastypie.fields.ToOneField): ...
|
||||||
class TimedeltaField(tastypie.fields.ApiField): ...
|
class TimedeltaField(tastypie.fields.ApiField): ...
|
||||||
|
|
||||||
|
def populate_api_list() -> None: ...
|
||||||
def autodiscover() -> None: ...
|
def autodiscover() -> None: ...
|
||||||
|
|
15
ietf/api/apps.py
Normal file
15
ietf/api/apps.py
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
from django.apps import AppConfig
|
||||||
|
from . import populate_api_list
|
||||||
|
|
||||||
|
|
||||||
|
class ApiConfig(AppConfig):
|
||||||
|
name = "ietf.api"
|
||||||
|
|
||||||
|
def ready(self):
|
||||||
|
"""Hook to do init after the app registry is fully populated
|
||||||
|
|
||||||
|
Importing models or accessing the app registry is ok here, but do not
|
||||||
|
interact with the database. See
|
||||||
|
https://docs.djangoproject.com/en/4.2/ref/applications/#django.apps.AppConfig.ready
|
||||||
|
"""
|
||||||
|
populate_api_list()
|
|
@ -48,6 +48,7 @@ OMITTED_APPS = (
|
||||||
'ietf.secr.meetings',
|
'ietf.secr.meetings',
|
||||||
'ietf.secr.proceedings',
|
'ietf.secr.proceedings',
|
||||||
'ietf.ipr',
|
'ietf.ipr',
|
||||||
|
'ietf.status',
|
||||||
)
|
)
|
||||||
|
|
||||||
class CustomApiTests(TestCase):
|
class CustomApiTests(TestCase):
|
||||||
|
|
|
@ -11,6 +11,7 @@ from ietf.meeting import views as meeting_views
|
||||||
from ietf.submit import views as submit_views
|
from ietf.submit import views as submit_views
|
||||||
from ietf.utils.urls import url
|
from ietf.utils.urls import url
|
||||||
|
|
||||||
|
|
||||||
api.autodiscover()
|
api.autodiscover()
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
|
|
|
@ -105,6 +105,8 @@ def generate_draft_bibxml_files_task(days=7, process_all=False):
|
||||||
If process_all is False (the default), processes only docs with new revisions
|
If process_all is False (the default), processes only docs with new revisions
|
||||||
in the last specified number of days.
|
in the last specified number of days.
|
||||||
"""
|
"""
|
||||||
|
if not process_all and days < 1:
|
||||||
|
raise ValueError("Must call with days >= 1 or process_all=True")
|
||||||
ensure_draft_bibxml_path_exists()
|
ensure_draft_bibxml_path_exists()
|
||||||
doc_events = NewRevisionDocEvent.objects.filter(
|
doc_events = NewRevisionDocEvent.objects.filter(
|
||||||
type="new_revision",
|
type="new_revision",
|
||||||
|
|
|
@ -22,8 +22,6 @@ from .tasks import (
|
||||||
)
|
)
|
||||||
|
|
||||||
class TaskTests(TestCase):
|
class TaskTests(TestCase):
|
||||||
settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ["DERIVED_DIR"]
|
|
||||||
|
|
||||||
@mock.patch("ietf.doc.tasks.in_draft_expire_freeze")
|
@mock.patch("ietf.doc.tasks.in_draft_expire_freeze")
|
||||||
@mock.patch("ietf.doc.tasks.get_expired_drafts")
|
@mock.patch("ietf.doc.tasks.get_expired_drafts")
|
||||||
@mock.patch("ietf.doc.tasks.expirable_drafts")
|
@mock.patch("ietf.doc.tasks.expirable_drafts")
|
||||||
|
@ -63,8 +61,8 @@ class TaskTests(TestCase):
|
||||||
|
|
||||||
# test that an exception is raised
|
# test that an exception is raised
|
||||||
in_draft_expire_freeze_mock.side_effect = RuntimeError
|
in_draft_expire_freeze_mock.side_effect = RuntimeError
|
||||||
with self.assertRaises(RuntimeError): (
|
with self.assertRaises(RuntimeError):
|
||||||
expire_ids_task())
|
expire_ids_task()
|
||||||
|
|
||||||
@mock.patch("ietf.doc.tasks.send_expire_warning_for_draft")
|
@mock.patch("ietf.doc.tasks.send_expire_warning_for_draft")
|
||||||
@mock.patch("ietf.doc.tasks.get_soon_to_expire_drafts")
|
@mock.patch("ietf.doc.tasks.get_soon_to_expire_drafts")
|
||||||
|
@ -98,16 +96,10 @@ class TaskTests(TestCase):
|
||||||
self.assertEqual(mock_expire.call_args_list[1], mock.call(docs[1]))
|
self.assertEqual(mock_expire.call_args_list[1], mock.call(docs[1]))
|
||||||
self.assertEqual(mock_expire.call_args_list[2], mock.call(docs[2]))
|
self.assertEqual(mock_expire.call_args_list[2], mock.call(docs[2]))
|
||||||
|
|
||||||
@mock.patch("ietf.doc.tasks.generate_idnits2_rfc_status")
|
|
||||||
def test_generate_idnits2_rfc_status_task(self, mock_generate):
|
class Idnits2SupportTests(TestCase):
|
||||||
mock_generate.return_value = "dåtå"
|
settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['DERIVED_DIR']
|
||||||
generate_idnits2_rfc_status_task()
|
|
||||||
self.assertEqual(mock_generate.call_count, 1)
|
|
||||||
self.assertEqual(
|
|
||||||
"dåtå".encode("utf8"),
|
|
||||||
(Path(settings.DERIVED_DIR) / "idnits2-rfc-status").read_bytes(),
|
|
||||||
)
|
|
||||||
|
|
||||||
@mock.patch("ietf.doc.tasks.generate_idnits2_rfcs_obsoleted")
|
@mock.patch("ietf.doc.tasks.generate_idnits2_rfcs_obsoleted")
|
||||||
def test_generate_idnits2_rfcs_obsoleted_task(self, mock_generate):
|
def test_generate_idnits2_rfcs_obsoleted_task(self, mock_generate):
|
||||||
mock_generate.return_value = "dåtå"
|
mock_generate.return_value = "dåtå"
|
||||||
|
@ -118,17 +110,28 @@ class TaskTests(TestCase):
|
||||||
(Path(settings.DERIVED_DIR) / "idnits2-rfcs-obsoleted").read_bytes(),
|
(Path(settings.DERIVED_DIR) / "idnits2-rfcs-obsoleted").read_bytes(),
|
||||||
)
|
)
|
||||||
|
|
||||||
@mock.patch("ietf.doc.tasks.ensure_draft_bibxml_path_exists")
|
@mock.patch("ietf.doc.tasks.generate_idnits2_rfc_status")
|
||||||
@mock.patch("ietf.doc.tasks.update_or_create_draft_bibxml_file")
|
def test_generate_idnits2_rfc_status_task(self, mock_generate):
|
||||||
def test_generate_draft_bibxml_files_task(self, mock_create, mock_ensure_path):
|
mock_generate.return_value = "dåtå"
|
||||||
|
generate_idnits2_rfc_status_task()
|
||||||
|
self.assertEqual(mock_generate.call_count, 1)
|
||||||
|
self.assertEqual(
|
||||||
|
"dåtå".encode("utf8"),
|
||||||
|
(Path(settings.DERIVED_DIR) / "idnits2-rfc-status").read_bytes(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BIBXMLSupportTests(TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
super().setUp()
|
||||||
now = timezone.now()
|
now = timezone.now()
|
||||||
very_old_event = NewRevisionDocEventFactory(
|
self.very_old_event = NewRevisionDocEventFactory(
|
||||||
time=now - datetime.timedelta(days=1000), rev="17"
|
time=now - datetime.timedelta(days=1000), rev="17"
|
||||||
)
|
)
|
||||||
old_event = NewRevisionDocEventFactory(
|
self.old_event = NewRevisionDocEventFactory(
|
||||||
time=now - datetime.timedelta(days=8), rev="03"
|
time=now - datetime.timedelta(days=8), rev="03"
|
||||||
)
|
)
|
||||||
young_event = NewRevisionDocEventFactory(
|
self.young_event = NewRevisionDocEventFactory(
|
||||||
time=now - datetime.timedelta(days=6), rev="06"
|
time=now - datetime.timedelta(days=6), rev="06"
|
||||||
)
|
)
|
||||||
# a couple that should always be ignored
|
# a couple that should always be ignored
|
||||||
|
@ -141,53 +144,25 @@ class TaskTests(TestCase):
|
||||||
rev="09",
|
rev="09",
|
||||||
doc__type_id="rfc",
|
doc__type_id="rfc",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get rid of the "00" events created by the factories -- they're just noise for this test
|
# Get rid of the "00" events created by the factories -- they're just noise for this test
|
||||||
NewRevisionDocEvent.objects.filter(rev="00").delete()
|
NewRevisionDocEvent.objects.filter(rev="00").delete()
|
||||||
|
|
||||||
# default args - look back 7 days
|
@mock.patch("ietf.doc.tasks.ensure_draft_bibxml_path_exists")
|
||||||
generate_draft_bibxml_files_task()
|
@mock.patch("ietf.doc.tasks.update_or_create_draft_bibxml_file")
|
||||||
self.assertTrue(mock_ensure_path.called)
|
def test_generate_bibxml_files_for_all_drafts_task(self, mock_create, mock_ensure_path):
|
||||||
self.assertCountEqual(
|
|
||||||
mock_create.call_args_list, [mock.call(young_event.doc, young_event.rev)]
|
|
||||||
)
|
|
||||||
mock_create.reset_mock()
|
|
||||||
mock_ensure_path.reset_mock()
|
|
||||||
|
|
||||||
# shorter lookback
|
|
||||||
generate_draft_bibxml_files_task(days=5)
|
|
||||||
self.assertTrue(mock_ensure_path.called)
|
|
||||||
self.assertCountEqual(mock_create.call_args_list, [])
|
|
||||||
mock_create.reset_mock()
|
|
||||||
mock_ensure_path.reset_mock()
|
|
||||||
|
|
||||||
# longer lookback
|
|
||||||
generate_draft_bibxml_files_task(days=9)
|
|
||||||
self.assertTrue(mock_ensure_path.called)
|
|
||||||
self.assertCountEqual(
|
|
||||||
mock_create.call_args_list,
|
|
||||||
[
|
|
||||||
mock.call(young_event.doc, young_event.rev),
|
|
||||||
mock.call(old_event.doc, old_event.rev),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
mock_create.reset_mock()
|
|
||||||
mock_ensure_path.reset_mock()
|
|
||||||
|
|
||||||
# everything
|
|
||||||
generate_draft_bibxml_files_task(process_all=True)
|
generate_draft_bibxml_files_task(process_all=True)
|
||||||
self.assertTrue(mock_ensure_path.called)
|
self.assertTrue(mock_ensure_path.called)
|
||||||
self.assertCountEqual(
|
self.assertCountEqual(
|
||||||
mock_create.call_args_list,
|
mock_create.call_args_list,
|
||||||
[
|
[
|
||||||
mock.call(young_event.doc, young_event.rev),
|
mock.call(self.young_event.doc, self.young_event.rev),
|
||||||
mock.call(old_event.doc, old_event.rev),
|
mock.call(self.old_event.doc, self.old_event.rev),
|
||||||
mock.call(very_old_event.doc, very_old_event.rev),
|
mock.call(self.very_old_event.doc, self.very_old_event.rev),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
mock_create.reset_mock()
|
mock_create.reset_mock()
|
||||||
mock_ensure_path.reset_mock()
|
mock_ensure_path.reset_mock()
|
||||||
|
|
||||||
# everything should still be tried, even if there's an exception
|
# everything should still be tried, even if there's an exception
|
||||||
mock_create.side_effect = RuntimeError
|
mock_create.side_effect = RuntimeError
|
||||||
generate_draft_bibxml_files_task(process_all=True)
|
generate_draft_bibxml_files_task(process_all=True)
|
||||||
|
@ -195,8 +170,46 @@ class TaskTests(TestCase):
|
||||||
self.assertCountEqual(
|
self.assertCountEqual(
|
||||||
mock_create.call_args_list,
|
mock_create.call_args_list,
|
||||||
[
|
[
|
||||||
mock.call(young_event.doc, young_event.rev),
|
mock.call(self.young_event.doc, self.young_event.rev),
|
||||||
mock.call(old_event.doc, old_event.rev),
|
mock.call(self.old_event.doc, self.old_event.rev),
|
||||||
mock.call(very_old_event.doc, very_old_event.rev),
|
mock.call(self.very_old_event.doc, self.very_old_event.rev),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@mock.patch("ietf.doc.tasks.ensure_draft_bibxml_path_exists")
|
||||||
|
@mock.patch("ietf.doc.tasks.update_or_create_draft_bibxml_file")
|
||||||
|
def test_generate_bibxml_files_for_recent_drafts_task(self, mock_create, mock_ensure_path):
|
||||||
|
# default args - look back 7 days
|
||||||
|
generate_draft_bibxml_files_task()
|
||||||
|
self.assertTrue(mock_ensure_path.called)
|
||||||
|
self.assertCountEqual(
|
||||||
|
mock_create.call_args_list, [mock.call(self.young_event.doc, self.young_event.rev)]
|
||||||
|
)
|
||||||
|
mock_create.reset_mock()
|
||||||
|
mock_ensure_path.reset_mock()
|
||||||
|
|
||||||
|
# shorter lookback
|
||||||
|
generate_draft_bibxml_files_task(days=5)
|
||||||
|
self.assertTrue(mock_ensure_path.called)
|
||||||
|
self.assertCountEqual(mock_create.call_args_list, [])
|
||||||
|
mock_create.reset_mock()
|
||||||
|
mock_ensure_path.reset_mock()
|
||||||
|
|
||||||
|
# longer lookback
|
||||||
|
generate_draft_bibxml_files_task(days=9)
|
||||||
|
self.assertTrue(mock_ensure_path.called)
|
||||||
|
self.assertCountEqual(
|
||||||
|
mock_create.call_args_list,
|
||||||
|
[
|
||||||
|
mock.call(self.young_event.doc, self.young_event.rev),
|
||||||
|
mock.call(self.old_event.doc, self.old_event.rev),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
@mock.patch("ietf.doc.tasks.ensure_draft_bibxml_path_exists")
|
||||||
|
@mock.patch("ietf.doc.tasks.update_or_create_draft_bibxml_file")
|
||||||
|
def test_generate_bibxml_files_for_recent_drafts_task_with_bad_value(self, mock_create, mock_ensure_path):
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
generate_draft_bibxml_files_task(days=0)
|
||||||
|
self.assertFalse(mock_create.called)
|
||||||
|
self.assertFalse(mock_ensure_path.called)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Copyright The IETF Trust 2011-2020, All Rights Reserved
|
# Copyright The IETF Trust 2011-2024, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
@ -1228,6 +1228,7 @@ def fuzzy_find_documents(name, rev=None):
|
||||||
FoundDocuments = namedtuple('FoundDocuments', 'documents matched_name matched_rev')
|
FoundDocuments = namedtuple('FoundDocuments', 'documents matched_name matched_rev')
|
||||||
return FoundDocuments(docs, name, rev)
|
return FoundDocuments(docs, name, rev)
|
||||||
|
|
||||||
|
|
||||||
def bibxml_for_draft(doc, rev=None):
|
def bibxml_for_draft(doc, rev=None):
|
||||||
|
|
||||||
if rev is not None and rev != doc.rev:
|
if rev is not None and rev != doc.rev:
|
||||||
|
|
|
@ -607,12 +607,7 @@ def document_main(request, name, rev=None, document_html=False):
|
||||||
additional_urls = doc.documenturl_set.exclude(tag_id='auth48')
|
additional_urls = doc.documenturl_set.exclude(tag_id='auth48')
|
||||||
|
|
||||||
# Stream description and name passing test
|
# Stream description and name passing test
|
||||||
if doc.stream != None:
|
stream = ("draft-stream-" + doc.stream.slug) if doc.stream != None else "(None)"
|
||||||
stream_desc = doc.stream.desc
|
|
||||||
stream = "draft-stream-" + doc.stream.slug
|
|
||||||
else:
|
|
||||||
stream_desc = "(None)"
|
|
||||||
stream = "(None)"
|
|
||||||
|
|
||||||
html = None
|
html = None
|
||||||
js = None
|
js = None
|
||||||
|
@ -651,7 +646,6 @@ def document_main(request, name, rev=None, document_html=False):
|
||||||
revisions=simple_diff_revisions if document_html else revisions,
|
revisions=simple_diff_revisions if document_html else revisions,
|
||||||
snapshot=snapshot,
|
snapshot=snapshot,
|
||||||
stream=stream,
|
stream=stream,
|
||||||
stream_desc=stream_desc,
|
|
||||||
latest_revision=latest_revision,
|
latest_revision=latest_revision,
|
||||||
latest_rev=latest_rev,
|
latest_rev=latest_rev,
|
||||||
can_edit=can_edit,
|
can_edit=can_edit,
|
||||||
|
@ -1002,7 +996,7 @@ def document_raw_id(request, name, rev=None, ext=None):
|
||||||
for t in possible_types:
|
for t in possible_types:
|
||||||
if os.path.exists(base_path + t):
|
if os.path.exists(base_path + t):
|
||||||
found_types[t]=base_path+t
|
found_types[t]=base_path+t
|
||||||
if ext == None:
|
if ext is None:
|
||||||
ext = 'txt'
|
ext = 'txt'
|
||||||
if not ext in found_types:
|
if not ext in found_types:
|
||||||
raise Http404('dont have the file for that extension')
|
raise Http404('dont have the file for that extension')
|
||||||
|
@ -1233,7 +1227,7 @@ def document_bibtex(request, name, rev=None):
|
||||||
raise Http404()
|
raise Http404()
|
||||||
|
|
||||||
# Make sure URL_REGEXPS did not grab too much for the rev number
|
# Make sure URL_REGEXPS did not grab too much for the rev number
|
||||||
if rev != None and len(rev) != 2:
|
if rev is not None and len(rev) != 2:
|
||||||
mo = re.search(r"^(?P<m>[0-9]{1,2})-(?P<n>[0-9]{2})$", rev)
|
mo = re.search(r"^(?P<m>[0-9]{1,2})-(?P<n>[0-9]{2})$", rev)
|
||||||
if mo:
|
if mo:
|
||||||
name = name+"-"+mo.group(1)
|
name = name+"-"+mo.group(1)
|
||||||
|
@ -1256,7 +1250,7 @@ def document_bibtex(request, name, rev=None):
|
||||||
replaced_by = [d.name for d in doc.related_that("replaces")]
|
replaced_by = [d.name for d in doc.related_that("replaces")]
|
||||||
draft_became_rfc = doc.became_rfc()
|
draft_became_rfc = doc.became_rfc()
|
||||||
|
|
||||||
if rev != None and rev != doc.rev:
|
if rev is not None and rev != doc.rev:
|
||||||
# find the entry in the history
|
# find the entry in the history
|
||||||
for h in doc.history_set.order_by("-time"):
|
for h in doc.history_set.order_by("-time"):
|
||||||
if rev == h.rev:
|
if rev == h.rev:
|
||||||
|
@ -1297,7 +1291,7 @@ def document_bibxml(request, name, rev=None):
|
||||||
raise Http404()
|
raise Http404()
|
||||||
|
|
||||||
# Make sure URL_REGEXPS did not grab too much for the rev number
|
# Make sure URL_REGEXPS did not grab too much for the rev number
|
||||||
if rev != None and len(rev) != 2:
|
if rev is not None and len(rev) != 2:
|
||||||
mo = re.search(r"^(?P<m>[0-9]{1,2})-(?P<n>[0-9]{2})$", rev)
|
mo = re.search(r"^(?P<m>[0-9]{1,2})-(?P<n>[0-9]{2})$", rev)
|
||||||
if mo:
|
if mo:
|
||||||
name = name+"-"+mo.group(1)
|
name = name+"-"+mo.group(1)
|
||||||
|
@ -1445,7 +1439,7 @@ def document_referenced_by(request, name):
|
||||||
if doc.type_id in ["bcp","std","fyi"]:
|
if doc.type_id in ["bcp","std","fyi"]:
|
||||||
for rfc in doc.contains():
|
for rfc in doc.contains():
|
||||||
refs |= rfc.referenced_by()
|
refs |= rfc.referenced_by()
|
||||||
full = ( request.GET.get('full') != None )
|
full = ( request.GET.get('full') is not None )
|
||||||
numdocs = refs.count()
|
numdocs = refs.count()
|
||||||
if not full and numdocs>250:
|
if not full and numdocs>250:
|
||||||
refs=refs[:250]
|
refs=refs[:250]
|
||||||
|
@ -1465,7 +1459,7 @@ def document_ballot_content(request, doc, ballot_id, editable=True):
|
||||||
augment_events_with_revision(doc, all_ballots)
|
augment_events_with_revision(doc, all_ballots)
|
||||||
|
|
||||||
ballot = None
|
ballot = None
|
||||||
if ballot_id != None:
|
if ballot_id is not None:
|
||||||
ballot_id = int(ballot_id)
|
ballot_id = int(ballot_id)
|
||||||
for b in all_ballots:
|
for b in all_ballots:
|
||||||
if b.id == ballot_id:
|
if b.id == ballot_id:
|
||||||
|
@ -1667,7 +1661,7 @@ def add_comment(request, name):
|
||||||
|
|
||||||
login = request.user.person
|
login = request.user.person
|
||||||
|
|
||||||
if doc.type_id == "draft" and doc.group != None:
|
if doc.type_id == "draft" and doc.group is not None:
|
||||||
can_add_comment = bool(has_role(request.user, ("Area Director", "Secretariat", "IRTF Chair", "IANA", "RFC Editor")) or (
|
can_add_comment = bool(has_role(request.user, ("Area Director", "Secretariat", "IRTF Chair", "IANA", "RFC Editor")) or (
|
||||||
request.user.is_authenticated and
|
request.user.is_authenticated and
|
||||||
Role.objects.filter(name__in=("chair", "secr"),
|
Role.objects.filter(name__in=("chair", "secr"),
|
||||||
|
|
|
@ -334,35 +334,86 @@ def chartering_groups(request):
|
||||||
dict(charter_states=charter_states,
|
dict(charter_states=charter_states,
|
||||||
group_types=group_types))
|
group_types=group_types))
|
||||||
|
|
||||||
|
|
||||||
def concluded_groups(request):
|
def concluded_groups(request):
|
||||||
sections = OrderedDict()
|
sections = OrderedDict()
|
||||||
|
|
||||||
sections['WGs'] = Group.objects.filter(type='wg', state="conclude").select_related("state", "charter").order_by("parent__name","acronym")
|
sections["WGs"] = (
|
||||||
sections['RGs'] = Group.objects.filter(type='rg', state="conclude").select_related("state", "charter").order_by("parent__name","acronym")
|
Group.objects.filter(type="wg", state="conclude")
|
||||||
sections['BOFs'] = Group.objects.filter(type='wg', state="bof-conc").select_related("state", "charter").order_by("parent__name","acronym")
|
.select_related("state", "charter")
|
||||||
sections['AGs'] = Group.objects.filter(type='ag', state="conclude").select_related("state", "charter").order_by("parent__name","acronym")
|
.order_by("parent__name", "acronym")
|
||||||
sections['RAGs'] = Group.objects.filter(type='rag', state="conclude").select_related("state", "charter").order_by("parent__name","acronym")
|
)
|
||||||
sections['Directorates'] = Group.objects.filter(type='dir', state="conclude").select_related("state", "charter").order_by("parent__name","acronym")
|
sections["RGs"] = (
|
||||||
sections['Review teams'] = Group.objects.filter(type='review', state="conclude").select_related("state", "charter").order_by("parent__name","acronym")
|
Group.objects.filter(type="rg", state="conclude")
|
||||||
sections['Teams'] = Group.objects.filter(type='team', state="conclude").select_related("state", "charter").order_by("parent__name","acronym")
|
.select_related("state", "charter")
|
||||||
sections['Programs'] = Group.objects.filter(type='program', state="conclude").select_related("state", "charter").order_by("parent__name","acronym")
|
.order_by("parent__name", "acronym")
|
||||||
|
)
|
||||||
|
sections["BOFs"] = (
|
||||||
|
Group.objects.filter(type="wg", state="bof-conc")
|
||||||
|
.select_related("state", "charter")
|
||||||
|
.order_by("parent__name", "acronym")
|
||||||
|
)
|
||||||
|
sections["AGs"] = (
|
||||||
|
Group.objects.filter(type="ag", state="conclude")
|
||||||
|
.select_related("state", "charter")
|
||||||
|
.order_by("parent__name", "acronym")
|
||||||
|
)
|
||||||
|
sections["RAGs"] = (
|
||||||
|
Group.objects.filter(type="rag", state="conclude")
|
||||||
|
.select_related("state", "charter")
|
||||||
|
.order_by("parent__name", "acronym")
|
||||||
|
)
|
||||||
|
sections["Directorates"] = (
|
||||||
|
Group.objects.filter(type="dir", state="conclude")
|
||||||
|
.select_related("state", "charter")
|
||||||
|
.order_by("parent__name", "acronym")
|
||||||
|
)
|
||||||
|
sections["Review teams"] = (
|
||||||
|
Group.objects.filter(type="review", state="conclude")
|
||||||
|
.select_related("state", "charter")
|
||||||
|
.order_by("parent__name", "acronym")
|
||||||
|
)
|
||||||
|
sections["Teams"] = (
|
||||||
|
Group.objects.filter(type="team", state="conclude")
|
||||||
|
.select_related("state", "charter")
|
||||||
|
.order_by("parent__name", "acronym")
|
||||||
|
)
|
||||||
|
sections["Programs"] = (
|
||||||
|
Group.objects.filter(type="program", state="conclude")
|
||||||
|
.select_related("state", "charter")
|
||||||
|
.order_by("parent__name", "acronym")
|
||||||
|
)
|
||||||
|
|
||||||
for name, groups in sections.items():
|
for name, groups in sections.items():
|
||||||
|
|
||||||
# add start/conclusion date
|
# add start/conclusion date
|
||||||
d = dict((g.pk, g) for g in groups)
|
d = dict((g.pk, g) for g in groups)
|
||||||
|
|
||||||
for g in groups:
|
for g in groups:
|
||||||
g.start_date = g.conclude_date = None
|
g.start_date = g.conclude_date = None
|
||||||
|
|
||||||
for e in ChangeStateGroupEvent.objects.filter(group__in=groups, state="active").order_by("-time"):
|
# Some older BOFs were created in the "active" state, so consider both "active" and "bof"
|
||||||
|
# ChangeStateGroupEvents when finding the start date. A group with _both_ "active" and "bof"
|
||||||
|
# events should not be in the "bof-conc" state so this shouldn't cause a problem (if it does,
|
||||||
|
# we'll need to clean up the data)
|
||||||
|
for e in ChangeStateGroupEvent.objects.filter(
|
||||||
|
group__in=groups,
|
||||||
|
state__in=["active", "bof"] if name == "BOFs" else ["active"],
|
||||||
|
).order_by("-time"):
|
||||||
d[e.group_id].start_date = e.time
|
d[e.group_id].start_date = e.time
|
||||||
|
|
||||||
for e in ChangeStateGroupEvent.objects.filter(group__in=groups, state="conclude").order_by("time"):
|
# Similarly, some older BOFs were concluded into the "conclude" state and the event was never
|
||||||
|
# fixed, so consider both "conclude" and "bof-conc" ChangeStateGroupEvents when finding the
|
||||||
|
# concluded date. A group with _both_ "conclude" and "bof-conc" events should not be in the
|
||||||
|
# "bof-conc" state so this shouldn't cause a problem (if it does, we'll need to clean up the
|
||||||
|
# data)
|
||||||
|
for e in ChangeStateGroupEvent.objects.filter(
|
||||||
|
group__in=groups,
|
||||||
|
state__in=["bof-conc", "conclude"] if name == "BOFs" else ["conclude"],
|
||||||
|
).order_by("time"):
|
||||||
d[e.group_id].conclude_date = e.time
|
d[e.group_id].conclude_date = e.time
|
||||||
|
|
||||||
return render(request, 'group/concluded_groups.html',
|
return render(request, "group/concluded_groups.html", dict(sections=sections))
|
||||||
dict(sections=sections))
|
|
||||||
|
|
||||||
def prepare_group_documents(request, group, clist):
|
def prepare_group_documents(request, group, clist):
|
||||||
found_docs, meta = prepare_document_table(request, docs_tracked_by_community_list(clist), request.GET, max_results=500)
|
found_docs, meta = prepare_document_table(request, docs_tracked_by_community_list(clist), request.GET, max_results=500)
|
||||||
|
|
|
@ -527,6 +527,24 @@ class IetfAuthTests(TestCase):
|
||||||
self.assertIn(secondary_address, to)
|
self.assertIn(secondary_address, to)
|
||||||
self.assertNotIn(inactive_secondary_address, to)
|
self.assertNotIn(inactive_secondary_address, to)
|
||||||
|
|
||||||
|
def test_reset_password_without_user(self):
|
||||||
|
"""Reset password using email address for person without a user account"""
|
||||||
|
url = urlreverse('ietf.ietfauth.views.password_reset')
|
||||||
|
email = EmailFactory()
|
||||||
|
person = email.person
|
||||||
|
# Remove the user object from the person to get a Email/Person without User:
|
||||||
|
person.user = None
|
||||||
|
person.save()
|
||||||
|
# Remove the remaining User record, since reset_password looks for that by username:
|
||||||
|
User.objects.filter(username__iexact=email.address).delete()
|
||||||
|
empty_outbox()
|
||||||
|
r = self.client.post(url, { 'username': email.address })
|
||||||
|
self.assertEqual(len(outbox), 1)
|
||||||
|
lastReceivedEmail = outbox[-1]
|
||||||
|
self.assertIn(email.address, lastReceivedEmail.get('To'))
|
||||||
|
self.assertTrue(lastReceivedEmail.get('Subject').startswith("Confirm password reset"))
|
||||||
|
self.assertContains(r, "Your password reset request has been successfully received", status_code=200)
|
||||||
|
|
||||||
def test_review_overview(self):
|
def test_review_overview(self):
|
||||||
review_req = ReviewRequestFactory()
|
review_req = ReviewRequestFactory()
|
||||||
assignment = ReviewAssignmentFactory(review_request=review_req,reviewer=EmailFactory(person__user__username='reviewer'))
|
assignment = ReviewAssignmentFactory(review_request=review_req,reviewer=EmailFactory(person__user__username='reviewer'))
|
||||||
|
|
|
@ -491,9 +491,19 @@ def password_reset(request):
|
||||||
if not user:
|
if not user:
|
||||||
# try to find user ID from the email address
|
# try to find user ID from the email address
|
||||||
email = Email.objects.filter(address=submitted_username).first()
|
email = Email.objects.filter(address=submitted_username).first()
|
||||||
if email and email.person and email.person.user:
|
if email and email.person:
|
||||||
user = email.person.user
|
if email.person.user:
|
||||||
|
user = email.person.user
|
||||||
|
else:
|
||||||
|
# Create a User record with this (conditioned by way of Email) username
|
||||||
|
# Don't bother setting the name or email fields on User - rely on the
|
||||||
|
# Person pointer.
|
||||||
|
user = User.objects.create(
|
||||||
|
username=email.address.lower(),
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
email.person.user = user
|
||||||
|
email.person.save()
|
||||||
if user and user.person.email_set.filter(active=True).exists():
|
if user and user.person.email_set.filter(active=True).exists():
|
||||||
data = {
|
data = {
|
||||||
'username': user.username,
|
'username': user.username,
|
||||||
|
|
|
@ -489,9 +489,12 @@ class UploadAgendaForm(ApplyToAllFileUploadForm):
|
||||||
class UploadSlidesForm(ApplyToAllFileUploadForm):
|
class UploadSlidesForm(ApplyToAllFileUploadForm):
|
||||||
doc_type = 'slides'
|
doc_type = 'slides'
|
||||||
title = forms.CharField(max_length=255)
|
title = forms.CharField(max_length=255)
|
||||||
|
approved = forms.BooleanField(label='Auto-approve', initial=True, required=False)
|
||||||
|
|
||||||
def __init__(self, session, *args, **kwargs):
|
def __init__(self, session, show_apply_to_all_checkbox, can_manage, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(show_apply_to_all_checkbox, *args, **kwargs)
|
||||||
|
if not can_manage:
|
||||||
|
self.fields.pop('approved')
|
||||||
self.session = session
|
self.session = session
|
||||||
|
|
||||||
def clean_title(self):
|
def clean_title(self):
|
||||||
|
|
|
@ -369,20 +369,36 @@ class Meeting(models.Model):
|
||||||
|
|
||||||
def updated(self):
|
def updated(self):
|
||||||
# should be Meeting.modified, but we don't have that
|
# should be Meeting.modified, but we don't have that
|
||||||
min_time = pytz.utc.localize(datetime.datetime(1970, 1, 1, 0, 0, 0))
|
timeslots_updated = self.timeslot_set.aggregate(Max('modified'))["modified__max"]
|
||||||
timeslots_updated = self.timeslot_set.aggregate(Max('modified'))["modified__max"] or min_time
|
sessions_updated = self.session_set.aggregate(Max('modified'))["modified__max"]
|
||||||
sessions_updated = self.session_set.aggregate(Max('modified'))["modified__max"] or min_time
|
assignments_updated = None
|
||||||
assignments_updated = min_time
|
|
||||||
if self.schedule:
|
if self.schedule:
|
||||||
assignments_updated = SchedTimeSessAssignment.objects.filter(schedule__in=[self.schedule, self.schedule.base if self.schedule else None]).aggregate(Max('modified'))["modified__max"] or min_time
|
assignments_updated = SchedTimeSessAssignment.objects.filter(schedule__in=[self.schedule, self.schedule.base if self.schedule else None]).aggregate(Max('modified'))["modified__max"]
|
||||||
return max(timeslots_updated, sessions_updated, assignments_updated)
|
dts = [timeslots_updated, sessions_updated, assignments_updated]
|
||||||
|
valid_only = [dt for dt in dts if dt is not None]
|
||||||
|
return max(valid_only) if valid_only else None
|
||||||
|
|
||||||
@memoize
|
@memoize
|
||||||
def previous_meeting(self):
|
def previous_meeting(self):
|
||||||
return Meeting.objects.filter(type_id=self.type_id,date__lt=self.date).order_by('-date').first()
|
return Meeting.objects.filter(type_id=self.type_id,date__lt=self.date).order_by('-date').first()
|
||||||
|
|
||||||
def uses_notes(self):
|
def uses_notes(self):
|
||||||
return self.date>=datetime.date(2020,7,6)
|
if self.type_id != 'ietf':
|
||||||
|
return True
|
||||||
|
num = self.get_number()
|
||||||
|
return num is not None and num >= 108
|
||||||
|
|
||||||
|
def has_recordings(self):
|
||||||
|
if self.type_id != 'ietf':
|
||||||
|
return True
|
||||||
|
num = self.get_number()
|
||||||
|
return num is not None and num >= 80
|
||||||
|
|
||||||
|
def has_chat_logs(self):
|
||||||
|
if self.type_id != 'ietf':
|
||||||
|
return True;
|
||||||
|
num = self.get_number()
|
||||||
|
return num is not None and num >= 60
|
||||||
|
|
||||||
def meeting_start(self):
|
def meeting_start(self):
|
||||||
"""Meeting-local midnight at the start of the meeting date"""
|
"""Meeting-local midnight at the start of the meeting date"""
|
||||||
|
|
|
@ -259,7 +259,7 @@ class MeetingTests(BaseMeetingTestCase):
|
||||||
},
|
},
|
||||||
"categories": rjson.get("categories"), # Just expect the value to exist
|
"categories": rjson.get("categories"), # Just expect the value to exist
|
||||||
"isCurrentMeeting": True,
|
"isCurrentMeeting": True,
|
||||||
"useNotes": True,
|
"usesNotes": False, # make_meeting_test_data sets number=72
|
||||||
"schedule": rjson.get("schedule"), # Just expect the value to exist
|
"schedule": rjson.get("schedule"), # Just expect the value to exist
|
||||||
"floors": []
|
"floors": []
|
||||||
}
|
}
|
||||||
|
@ -294,6 +294,8 @@ class MeetingTests(BaseMeetingTestCase):
|
||||||
(slot.time + slot.duration).astimezone(meeting.tz()).strftime("%H%M"),
|
(slot.time + slot.duration).astimezone(meeting.tz()).strftime("%H%M"),
|
||||||
))
|
))
|
||||||
self.assertContains(r, f"shown in the {meeting.tz()} time zone")
|
self.assertContains(r, f"shown in the {meeting.tz()} time zone")
|
||||||
|
updated = meeting.updated().astimezone(meeting.tz()).strftime("%Y-%m-%d %H:%M:%S %Z")
|
||||||
|
self.assertContains(r, f"Updated {updated}")
|
||||||
|
|
||||||
# text, UTC
|
# text, UTC
|
||||||
r = self.client.get(urlreverse(
|
r = self.client.get(urlreverse(
|
||||||
|
@ -309,6 +311,16 @@ class MeetingTests(BaseMeetingTestCase):
|
||||||
(slot.time + slot.duration).astimezone(datetime.timezone.utc).strftime("%H%M"),
|
(slot.time + slot.duration).astimezone(datetime.timezone.utc).strftime("%H%M"),
|
||||||
))
|
))
|
||||||
self.assertContains(r, "shown in UTC")
|
self.assertContains(r, "shown in UTC")
|
||||||
|
updated = meeting.updated().astimezone(datetime.timezone.utc).strftime("%Y-%m-%d %H:%M:%S %Z")
|
||||||
|
self.assertContains(r, f"Updated {updated}")
|
||||||
|
|
||||||
|
# text, invalid updated (none)
|
||||||
|
with patch("ietf.meeting.models.Meeting.updated", return_value=None):
|
||||||
|
r = self.client.get(urlreverse(
|
||||||
|
"ietf.meeting.views.agenda_plain",
|
||||||
|
kwargs=dict(num=meeting.number, ext=".txt", utc="-utc"),
|
||||||
|
))
|
||||||
|
self.assertNotContains(r, "Updated ")
|
||||||
|
|
||||||
# future meeting, no agenda
|
# future meeting, no agenda
|
||||||
r = self.client.get(urlreverse("ietf.meeting.views.agenda_plain", kwargs=dict(num=future_meeting.number, ext=".txt")))
|
r = self.client.get(urlreverse("ietf.meeting.views.agenda_plain", kwargs=dict(num=future_meeting.number, ext=".txt")))
|
||||||
|
@ -859,6 +871,24 @@ class MeetingTests(BaseMeetingTestCase):
|
||||||
for d in meeting.importantdate_set.all():
|
for d in meeting.importantdate_set.all():
|
||||||
self.assertContains(r, d.date.isoformat())
|
self.assertContains(r, d.date.isoformat())
|
||||||
|
|
||||||
|
updated = meeting.updated()
|
||||||
|
self.assertIsNotNone(updated)
|
||||||
|
expected_updated = updated.astimezone(datetime.timezone.utc).strftime("%Y%m%dT%H%M%SZ")
|
||||||
|
self.assertContains(r, f"DTSTAMP:{expected_updated}")
|
||||||
|
dtstamps_count = r.content.decode("utf-8").count(f"DTSTAMP:{expected_updated}")
|
||||||
|
self.assertEqual(dtstamps_count, meeting.importantdate_set.count())
|
||||||
|
|
||||||
|
# With default cached_updated, 1970-01-01
|
||||||
|
with patch("ietf.meeting.models.Meeting.updated", return_value=None):
|
||||||
|
r = self.client.get(url)
|
||||||
|
for d in meeting.importantdate_set.all():
|
||||||
|
self.assertContains(r, d.date.isoformat())
|
||||||
|
|
||||||
|
expected_updated = "19700101T000000Z"
|
||||||
|
self.assertContains(r, f"DTSTAMP:{expected_updated}")
|
||||||
|
dtstamps_count = r.content.decode("utf-8").count(f"DTSTAMP:{expected_updated}")
|
||||||
|
self.assertEqual(dtstamps_count, meeting.importantdate_set.count())
|
||||||
|
|
||||||
def test_group_ical(self):
|
def test_group_ical(self):
|
||||||
meeting = make_meeting_test_data()
|
meeting = make_meeting_test_data()
|
||||||
s1 = Session.objects.filter(meeting=meeting, group__acronym="mars").first()
|
s1 = Session.objects.filter(meeting=meeting, group__acronym="mars").first()
|
||||||
|
@ -4952,7 +4982,23 @@ class InterimTests(TestCase):
|
||||||
expected_event_count=len(expected_event_summaries))
|
expected_event_count=len(expected_event_summaries))
|
||||||
self.assertNotContains(r, 'Remote instructions:')
|
self.assertNotContains(r, 'Remote instructions:')
|
||||||
|
|
||||||
def test_upcoming_ical_filter(self):
|
updated = meeting.updated()
|
||||||
|
self.assertIsNotNone(updated)
|
||||||
|
expected_updated = updated.astimezone(datetime.timezone.utc).strftime("%Y%m%dT%H%M%SZ")
|
||||||
|
self.assertContains(r, f"DTSTAMP:{expected_updated}")
|
||||||
|
|
||||||
|
# With default cached_updated, 1970-01-01
|
||||||
|
with patch("ietf.meeting.models.Meeting.updated", return_value=None):
|
||||||
|
r = self.client.get(url)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
self.assertEqual(meeting.type_id, "ietf")
|
||||||
|
|
||||||
|
expected_updated = "19700101T000000Z"
|
||||||
|
self.assertEqual(1, r.content.decode("utf-8").count(f"DTSTAMP:{expected_updated}"))
|
||||||
|
|
||||||
|
@patch("ietf.meeting.utils.preprocess_meeting_important_dates")
|
||||||
|
def test_upcoming_ical_filter(self, mock_preprocess_meeting_important_dates):
|
||||||
# Just a quick check of functionality - details tested by test_js.InterimTests
|
# Just a quick check of functionality - details tested by test_js.InterimTests
|
||||||
make_meeting_test_data(create_interims=True)
|
make_meeting_test_data(create_interims=True)
|
||||||
url = urlreverse("ietf.meeting.views.upcoming_ical")
|
url = urlreverse("ietf.meeting.views.upcoming_ical")
|
||||||
|
@ -4974,6 +5020,8 @@ class InterimTests(TestCase):
|
||||||
],
|
],
|
||||||
expected_event_count=2)
|
expected_event_count=2)
|
||||||
|
|
||||||
|
# Verify preprocess_meeting_important_dates isn't being called
|
||||||
|
mock_preprocess_meeting_important_dates.assert_not_called()
|
||||||
|
|
||||||
def test_upcoming_json(self):
|
def test_upcoming_json(self):
|
||||||
make_meeting_test_data(create_interims=True)
|
make_meeting_test_data(create_interims=True)
|
||||||
|
@ -6454,7 +6502,7 @@ class MaterialsTests(TestCase):
|
||||||
self.assertFalse(session1.presentations.filter(document__type_id='slides'))
|
self.assertFalse(session1.presentations.filter(document__type_id='slides'))
|
||||||
test_file = BytesIO(b'this is not really a slide')
|
test_file = BytesIO(b'this is not really a slide')
|
||||||
test_file.name = 'not_really.txt'
|
test_file.name = 'not_really.txt'
|
||||||
r = self.client.post(url,dict(file=test_file,title='a test slide file',apply_to_all=True))
|
r = self.client.post(url,dict(file=test_file,title='a test slide file',apply_to_all=True,approved=True))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.assertEqual(session1.presentations.count(),1)
|
self.assertEqual(session1.presentations.count(),1)
|
||||||
self.assertEqual(session2.presentations.count(),1)
|
self.assertEqual(session2.presentations.count(),1)
|
||||||
|
@ -6477,7 +6525,7 @@ class MaterialsTests(TestCase):
|
||||||
url = urlreverse('ietf.meeting.views.upload_session_slides',kwargs={'num':session2.meeting.number,'session_id':session2.id})
|
url = urlreverse('ietf.meeting.views.upload_session_slides',kwargs={'num':session2.meeting.number,'session_id':session2.id})
|
||||||
test_file = BytesIO(b'some other thing still not slidelike')
|
test_file = BytesIO(b'some other thing still not slidelike')
|
||||||
test_file.name = 'also_not_really.txt'
|
test_file.name = 'also_not_really.txt'
|
||||||
r = self.client.post(url,dict(file=test_file,title='a different slide file',apply_to_all=False))
|
r = self.client.post(url,dict(file=test_file,title='a different slide file',apply_to_all=False,approved=True))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.assertEqual(session1.presentations.count(),1)
|
self.assertEqual(session1.presentations.count(),1)
|
||||||
self.assertEqual(session2.presentations.count(),2)
|
self.assertEqual(session2.presentations.count(),2)
|
||||||
|
@ -6501,7 +6549,7 @@ class MaterialsTests(TestCase):
|
||||||
self.assertIn('Revise', str(q("title")))
|
self.assertIn('Revise', str(q("title")))
|
||||||
test_file = BytesIO(b'new content for the second slide deck')
|
test_file = BytesIO(b'new content for the second slide deck')
|
||||||
test_file.name = 'doesnotmatter.txt'
|
test_file.name = 'doesnotmatter.txt'
|
||||||
r = self.client.post(url,dict(file=test_file,title='rename the presentation',apply_to_all=False))
|
r = self.client.post(url,dict(file=test_file,title='rename the presentation',apply_to_all=False, approved=True))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.assertEqual(session1.presentations.count(),1)
|
self.assertEqual(session1.presentations.count(),1)
|
||||||
self.assertEqual(session2.presentations.count(),2)
|
self.assertEqual(session2.presentations.count(),2)
|
||||||
|
@ -6597,7 +6645,7 @@ class MaterialsTests(TestCase):
|
||||||
newperson = PersonFactory()
|
newperson = PersonFactory()
|
||||||
|
|
||||||
session_overview_url = urlreverse('ietf.meeting.views.session_details',kwargs={'num':session.meeting.number,'acronym':session.group.acronym})
|
session_overview_url = urlreverse('ietf.meeting.views.session_details',kwargs={'num':session.meeting.number,'acronym':session.group.acronym})
|
||||||
propose_url = urlreverse('ietf.meeting.views.propose_session_slides', kwargs={'session_id':session.pk, 'num': session.meeting.number})
|
upload_url = urlreverse('ietf.meeting.views.upload_session_slides', kwargs={'session_id':session.pk, 'num': session.meeting.number})
|
||||||
|
|
||||||
r = self.client.get(session_overview_url)
|
r = self.client.get(session_overview_url)
|
||||||
self.assertEqual(r.status_code,200)
|
self.assertEqual(r.status_code,200)
|
||||||
|
@ -6612,13 +6660,13 @@ class MaterialsTests(TestCase):
|
||||||
self.assertTrue(q('.proposeslides'))
|
self.assertTrue(q('.proposeslides'))
|
||||||
self.client.logout()
|
self.client.logout()
|
||||||
|
|
||||||
login_testing_unauthorized(self,newperson.user.username,propose_url)
|
login_testing_unauthorized(self,newperson.user.username,upload_url)
|
||||||
r = self.client.get(propose_url)
|
r = self.client.get(upload_url)
|
||||||
self.assertEqual(r.status_code,200)
|
self.assertEqual(r.status_code,200)
|
||||||
test_file = BytesIO(b'this is not really a slide')
|
test_file = BytesIO(b'this is not really a slide')
|
||||||
test_file.name = 'not_really.txt'
|
test_file.name = 'not_really.txt'
|
||||||
empty_outbox()
|
empty_outbox()
|
||||||
r = self.client.post(propose_url,dict(file=test_file,title='a test slide file',apply_to_all=True))
|
r = self.client.post(upload_url,dict(file=test_file,title='a test slide file',apply_to_all=True,approved=False))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
session = Session.objects.get(pk=session.pk)
|
session = Session.objects.get(pk=session.pk)
|
||||||
self.assertEqual(session.slidesubmission_set.count(),1)
|
self.assertEqual(session.slidesubmission_set.count(),1)
|
||||||
|
@ -6639,6 +6687,25 @@ class MaterialsTests(TestCase):
|
||||||
self.assertEqual(len(q('.proposedslidelist p')), 2)
|
self.assertEqual(len(q('.proposedslidelist p')), 2)
|
||||||
self.client.logout()
|
self.client.logout()
|
||||||
|
|
||||||
|
login_testing_unauthorized(self,chair.user.username,upload_url)
|
||||||
|
r = self.client.get(upload_url)
|
||||||
|
self.assertEqual(r.status_code,200)
|
||||||
|
test_file = BytesIO(b'this is not really a slide either')
|
||||||
|
test_file.name = 'again_not_really.txt'
|
||||||
|
empty_outbox()
|
||||||
|
r = self.client.post(upload_url,dict(file=test_file,title='a selfapproved test slide file',apply_to_all=True,approved=True))
|
||||||
|
self.assertEqual(r.status_code, 302)
|
||||||
|
self.assertEqual(len(outbox),0)
|
||||||
|
self.assertEqual(session.slidesubmission_set.count(),2)
|
||||||
|
self.client.logout()
|
||||||
|
|
||||||
|
self.client.login(username=chair.user.username, password=chair.user.username+"+password")
|
||||||
|
r = self.client.get(session_overview_url)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
q = PyQuery(r.content)
|
||||||
|
self.assertEqual(len(q('.uploadslidelist p')), 0)
|
||||||
|
self.client.logout()
|
||||||
|
|
||||||
def test_disapprove_proposed_slides(self):
|
def test_disapprove_proposed_slides(self):
|
||||||
submission = SlideSubmissionFactory()
|
submission = SlideSubmissionFactory()
|
||||||
submission.session.meeting.importantdate_set.create(name_id='revsub',date=date_today() + datetime.timedelta(days=20))
|
submission.session.meeting.importantdate_set.create(name_id='revsub',date=date_today() + datetime.timedelta(days=20))
|
||||||
|
@ -6759,12 +6826,12 @@ class MaterialsTests(TestCase):
|
||||||
session.meeting.importantdate_set.create(name_id='revsub',date=date_today()+datetime.timedelta(days=20))
|
session.meeting.importantdate_set.create(name_id='revsub',date=date_today()+datetime.timedelta(days=20))
|
||||||
newperson = PersonFactory()
|
newperson = PersonFactory()
|
||||||
|
|
||||||
propose_url = urlreverse('ietf.meeting.views.propose_session_slides', kwargs={'session_id':session.pk, 'num': session.meeting.number})
|
upload_url = urlreverse('ietf.meeting.views.upload_session_slides', kwargs={'session_id':session.pk, 'num': session.meeting.number})
|
||||||
|
|
||||||
login_testing_unauthorized(self,newperson.user.username,propose_url)
|
login_testing_unauthorized(self,newperson.user.username,upload_url)
|
||||||
test_file = BytesIO(b'this is not really a slide')
|
test_file = BytesIO(b'this is not really a slide')
|
||||||
test_file.name = 'not_really.txt'
|
test_file.name = 'not_really.txt'
|
||||||
r = self.client.post(propose_url,dict(file=test_file,title='a test slide file',apply_to_all=True))
|
r = self.client.post(upload_url,dict(file=test_file,title='a test slide file',apply_to_all=True,approved=False))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.client.logout()
|
self.client.logout()
|
||||||
|
|
||||||
|
@ -6787,15 +6854,15 @@ class MaterialsTests(TestCase):
|
||||||
|
|
||||||
self.assertEqual(session.presentations.first().document.rev,'00')
|
self.assertEqual(session.presentations.first().document.rev,'00')
|
||||||
|
|
||||||
login_testing_unauthorized(self,newperson.user.username,propose_url)
|
login_testing_unauthorized(self,newperson.user.username,upload_url)
|
||||||
test_file = BytesIO(b'this is not really a slide, but it is another version of it')
|
test_file = BytesIO(b'this is not really a slide, but it is another version of it')
|
||||||
test_file.name = 'not_really.txt'
|
test_file.name = 'not_really.txt'
|
||||||
r = self.client.post(propose_url,dict(file=test_file,title='a test slide file',apply_to_all=True))
|
r = self.client.post(upload_url,dict(file=test_file,title='a test slide file',apply_to_all=True))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
|
|
||||||
test_file = BytesIO(b'this is not really a slide, but it is third version of it')
|
test_file = BytesIO(b'this is not really a slide, but it is third version of it')
|
||||||
test_file.name = 'not_really.txt'
|
test_file.name = 'not_really.txt'
|
||||||
r = self.client.post(propose_url,dict(file=test_file,title='a test slide file',apply_to_all=True))
|
r = self.client.post(upload_url,dict(file=test_file,title='a test slide file',apply_to_all=True))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.client.logout()
|
self.client.logout()
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,6 @@ safe_for_all_meeting_types = [
|
||||||
url(r'^session/(?P<session_id>\d+)/narrativeminutes$', views.upload_session_narrativeminutes),
|
url(r'^session/(?P<session_id>\d+)/narrativeminutes$', views.upload_session_narrativeminutes),
|
||||||
url(r'^session/(?P<session_id>\d+)/agenda$', views.upload_session_agenda),
|
url(r'^session/(?P<session_id>\d+)/agenda$', views.upload_session_agenda),
|
||||||
url(r'^session/(?P<session_id>\d+)/import/minutes$', views.import_session_minutes),
|
url(r'^session/(?P<session_id>\d+)/import/minutes$', views.import_session_minutes),
|
||||||
url(r'^session/(?P<session_id>\d+)/propose_slides$', views.propose_session_slides),
|
|
||||||
url(r'^session/(?P<session_id>\d+)/slides(?:/%(name)s)?$' % settings.URL_REGEXPS, views.upload_session_slides),
|
url(r'^session/(?P<session_id>\d+)/slides(?:/%(name)s)?$' % settings.URL_REGEXPS, views.upload_session_slides),
|
||||||
url(r'^session/(?P<session_id>\d+)/add_to_session$', views.ajax_add_slides_to_session),
|
url(r'^session/(?P<session_id>\d+)/add_to_session$', views.ajax_add_slides_to_session),
|
||||||
url(r'^session/(?P<session_id>\d+)/remove_from_session$', views.ajax_remove_slides_from_session),
|
url(r'^session/(?P<session_id>\d+)/remove_from_session$', views.ajax_remove_slides_from_session),
|
||||||
|
|
|
@ -609,7 +609,8 @@ def bulk_create_timeslots(meeting, times, locations, other_props):
|
||||||
|
|
||||||
def preprocess_meeting_important_dates(meetings):
|
def preprocess_meeting_important_dates(meetings):
|
||||||
for m in meetings:
|
for m in meetings:
|
||||||
m.cached_updated = m.updated()
|
# cached_updated must be present, set it to 1970-01-01 if necessary
|
||||||
|
m.cached_updated = m.updated() or pytz.utc.localize(datetime.datetime(1970, 1, 1, 0, 0, 0))
|
||||||
m.important_dates = m.importantdate_set.prefetch_related("name")
|
m.important_dates = m.importantdate_set.prefetch_related("name")
|
||||||
for d in m.important_dates:
|
for d in m.important_dates:
|
||||||
d.midnight_cutoff = "UTC 23:59" in d.name.name
|
d.midnight_cutoff = "UTC 23:59" in d.name.name
|
||||||
|
@ -726,7 +727,7 @@ def save_session_minutes_revision(session, file, ext, request, encoding=None, ap
|
||||||
def handle_upload_file(file, filename, meeting, subdir, request=None, encoding=None):
|
def handle_upload_file(file, filename, meeting, subdir, request=None, encoding=None):
|
||||||
"""Accept an uploaded materials file
|
"""Accept an uploaded materials file
|
||||||
|
|
||||||
This function takes a file object, a filename and a meeting object and subdir as string.
|
This function takes a _binary mode_ file object, a filename and a meeting object and subdir as string.
|
||||||
It saves the file to the appropriate directory, get_materials_path() + subdir.
|
It saves the file to the appropriate directory, get_materials_path() + subdir.
|
||||||
If the file is a zip file, it creates a new directory in 'slides', which is the basename of the
|
If the file is a zip file, it creates a new directory in 'slides', which is the basename of the
|
||||||
zip file and unzips the file in the new directory.
|
zip file and unzips the file in the new directory.
|
||||||
|
@ -748,9 +749,18 @@ def handle_upload_file(file, filename, meeting, subdir, request=None, encoding=N
|
||||||
pass # if the file is already gone, so be it
|
pass # if the file is already gone, so be it
|
||||||
|
|
||||||
with (path / filename).open('wb+') as destination:
|
with (path / filename).open('wb+') as destination:
|
||||||
|
# prep file for reading
|
||||||
|
if hasattr(file, "chunks"):
|
||||||
|
chunks = file.chunks()
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
file.seek(0)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
chunks = [file.read()] # pretend we have chunks
|
||||||
|
|
||||||
if filename.suffix in settings.MEETING_VALID_MIME_TYPE_EXTENSIONS['text/html']:
|
if filename.suffix in settings.MEETING_VALID_MIME_TYPE_EXTENSIONS['text/html']:
|
||||||
file.open()
|
text = b"".join(chunks)
|
||||||
text = file.read()
|
|
||||||
if encoding:
|
if encoding:
|
||||||
try:
|
try:
|
||||||
text = text.decode(encoding)
|
text = text.decode(encoding)
|
||||||
|
@ -777,11 +787,8 @@ def handle_upload_file(file, filename, meeting, subdir, request=None, encoding=N
|
||||||
f"please check the resulting content. "
|
f"please check the resulting content. "
|
||||||
))
|
))
|
||||||
else:
|
else:
|
||||||
if hasattr(file, 'chunks'):
|
for chunk in chunks:
|
||||||
for chunk in file.chunks():
|
destination.write(chunk)
|
||||||
destination.write(chunk)
|
|
||||||
else:
|
|
||||||
destination.write(file.read())
|
|
||||||
|
|
||||||
# unzip zipfile
|
# unzip zipfile
|
||||||
if is_zipfile:
|
if is_zipfile:
|
||||||
|
|
|
@ -33,6 +33,7 @@ from django.conf import settings
|
||||||
from django.contrib import messages
|
from django.contrib import messages
|
||||||
from django.contrib.auth.decorators import login_required
|
from django.contrib.auth.decorators import login_required
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
|
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||||
from django.core.validators import URLValidator
|
from django.core.validators import URLValidator
|
||||||
from django.urls import reverse,reverse_lazy
|
from django.urls import reverse,reverse_lazy
|
||||||
from django.db.models import F, Max, Q
|
from django.db.models import F, Max, Q
|
||||||
|
@ -1616,7 +1617,6 @@ def agenda_plain(request, num=None, name=None, base=None, ext=None, owner=None,
|
||||||
"now": timezone.now().astimezone(meeting.tz()),
|
"now": timezone.now().astimezone(meeting.tz()),
|
||||||
"display_timezone": display_timezone,
|
"display_timezone": display_timezone,
|
||||||
"is_current_meeting": is_current_meeting,
|
"is_current_meeting": is_current_meeting,
|
||||||
"use_notes": meeting.uses_notes(),
|
|
||||||
"cache_time": 150 if is_current_meeting else 3600,
|
"cache_time": 150 if is_current_meeting else 3600,
|
||||||
},
|
},
|
||||||
content_type=mimetype[ext],
|
content_type=mimetype[ext],
|
||||||
|
@ -1691,7 +1691,7 @@ def api_get_agenda_data (request, num=None):
|
||||||
},
|
},
|
||||||
"categories": filter_organizer.get_filter_categories(),
|
"categories": filter_organizer.get_filter_categories(),
|
||||||
"isCurrentMeeting": is_current_meeting,
|
"isCurrentMeeting": is_current_meeting,
|
||||||
"useNotes": meeting.uses_notes(),
|
"usesNotes": meeting.uses_notes(),
|
||||||
"schedule": list(map(agenda_extract_schedule, filtered_assignments)),
|
"schedule": list(map(agenda_extract_schedule, filtered_assignments)),
|
||||||
"floors": list(map(agenda_extract_floorplan, floors))
|
"floors": list(map(agenda_extract_floorplan, floors))
|
||||||
})
|
})
|
||||||
|
@ -1702,7 +1702,7 @@ def api_get_session_materials(request, session_id=None):
|
||||||
|
|
||||||
minutes = session.minutes()
|
minutes = session.minutes()
|
||||||
slides_actions = []
|
slides_actions = []
|
||||||
if can_manage_session_materials(request.user, session.group, session):
|
if can_manage_session_materials(request.user, session.group, session) or not session.is_material_submission_cutoff():
|
||||||
slides_actions.append(
|
slides_actions.append(
|
||||||
{
|
{
|
||||||
"label": "Upload slides",
|
"label": "Upload slides",
|
||||||
|
@ -1712,16 +1712,6 @@ def api_get_session_materials(request, session_id=None):
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
elif not session.is_material_submission_cutoff():
|
|
||||||
slides_actions.append(
|
|
||||||
{
|
|
||||||
"label": "Propose slides",
|
|
||||||
"url": reverse(
|
|
||||||
"ietf.meeting.views.propose_session_slides",
|
|
||||||
kwargs={"num": session.meeting.number, "session_id": session.pk},
|
|
||||||
),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
pass # no action available if it's past cutoff
|
pass # no action available if it's past cutoff
|
||||||
|
|
||||||
|
@ -2498,7 +2488,6 @@ def session_details(request, num, acronym):
|
||||||
'can_manage_materials' : can_manage,
|
'can_manage_materials' : can_manage,
|
||||||
'can_view_request': can_view_request,
|
'can_view_request': can_view_request,
|
||||||
'thisweek': datetime_today()-datetime.timedelta(days=7),
|
'thisweek': datetime_today()-datetime.timedelta(days=7),
|
||||||
'use_notes': meeting.uses_notes(),
|
|
||||||
})
|
})
|
||||||
|
|
||||||
class SessionDraftsForm(forms.Form):
|
class SessionDraftsForm(forms.Form):
|
||||||
|
@ -2805,6 +2794,17 @@ class UploadOrEnterAgendaForm(UploadAgendaForm):
|
||||||
elif submission_method == "enter":
|
elif submission_method == "enter":
|
||||||
require_field("content")
|
require_field("content")
|
||||||
|
|
||||||
|
def get_file(self):
|
||||||
|
"""Get content as a file-like object"""
|
||||||
|
if self.cleaned_data.get("submission_method") == "upload":
|
||||||
|
return self.cleaned_data["file"]
|
||||||
|
else:
|
||||||
|
return SimpleUploadedFile(
|
||||||
|
name="uploaded.md",
|
||||||
|
content=self.cleaned_data["content"].encode("utf-8"),
|
||||||
|
content_type="text/markdown;charset=utf-8",
|
||||||
|
)
|
||||||
|
|
||||||
def upload_session_agenda(request, session_id, num):
|
def upload_session_agenda(request, session_id, num):
|
||||||
# num is redundant, but we're dragging it along an artifact of where we are in the current URL structure
|
# num is redundant, but we're dragging it along an artifact of where we are in the current URL structure
|
||||||
session = get_object_or_404(Session,pk=session_id)
|
session = get_object_or_404(Session,pk=session_id)
|
||||||
|
@ -2825,21 +2825,8 @@ def upload_session_agenda(request, session_id, num):
|
||||||
if request.method == 'POST':
|
if request.method == 'POST':
|
||||||
form = UploadOrEnterAgendaForm(show_apply_to_all_checkbox,request.POST,request.FILES)
|
form = UploadOrEnterAgendaForm(show_apply_to_all_checkbox,request.POST,request.FILES)
|
||||||
if form.is_valid():
|
if form.is_valid():
|
||||||
submission_method = form.cleaned_data['submission_method']
|
file = form.get_file()
|
||||||
if submission_method == "upload":
|
_, ext = os.path.splitext(file.name)
|
||||||
file = request.FILES['file']
|
|
||||||
_, ext = os.path.splitext(file.name)
|
|
||||||
else:
|
|
||||||
if agenda_sp:
|
|
||||||
doc = agenda_sp.document
|
|
||||||
_, ext = os.path.splitext(doc.uploaded_filename)
|
|
||||||
else:
|
|
||||||
ext = ".md"
|
|
||||||
fd, name = tempfile.mkstemp(suffix=ext, text=True)
|
|
||||||
os.close(fd)
|
|
||||||
with open(name, "w") as file:
|
|
||||||
file.write(form.cleaned_data['content'])
|
|
||||||
file = open(name, "rb")
|
|
||||||
apply_to_all = session.type.slug == 'regular'
|
apply_to_all = session.type.slug == 'regular'
|
||||||
if show_apply_to_all_checkbox:
|
if show_apply_to_all_checkbox:
|
||||||
apply_to_all = form.cleaned_data['apply_to_all']
|
apply_to_all = form.cleaned_data['apply_to_all']
|
||||||
|
@ -2920,6 +2907,7 @@ def upload_session_agenda(request, session_id, num):
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@login_required
|
||||||
def upload_session_slides(request, session_id, num, name=None):
|
def upload_session_slides(request, session_id, num, name=None):
|
||||||
"""Upload new or replacement slides for a session
|
"""Upload new or replacement slides for a session
|
||||||
|
|
||||||
|
@ -2927,10 +2915,7 @@ def upload_session_slides(request, session_id, num, name=None):
|
||||||
"""
|
"""
|
||||||
# num is redundant, but we're dragging it along an artifact of where we are in the current URL structure
|
# num is redundant, but we're dragging it along an artifact of where we are in the current URL structure
|
||||||
session = get_object_or_404(Session, pk=session_id)
|
session = get_object_or_404(Session, pk=session_id)
|
||||||
if not session.can_manage_materials(request.user):
|
can_manage = session.can_manage_materials(request.user)
|
||||||
permission_denied(
|
|
||||||
request, "You don't have permission to upload slides for this session."
|
|
||||||
)
|
|
||||||
if session.is_material_submission_cutoff() and not has_role(
|
if session.is_material_submission_cutoff() and not has_role(
|
||||||
request.user, "Secretariat"
|
request.user, "Secretariat"
|
||||||
):
|
):
|
||||||
|
@ -2955,7 +2940,7 @@ def upload_session_slides(request, session_id, num, name=None):
|
||||||
|
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
form = UploadSlidesForm(
|
form = UploadSlidesForm(
|
||||||
session, show_apply_to_all_checkbox, request.POST, request.FILES
|
session, show_apply_to_all_checkbox, can_manage, request.POST, request.FILES
|
||||||
)
|
)
|
||||||
if form.is_valid():
|
if form.is_valid():
|
||||||
file = request.FILES["file"]
|
file = request.FILES["file"]
|
||||||
|
@ -2963,6 +2948,46 @@ def upload_session_slides(request, session_id, num, name=None):
|
||||||
apply_to_all = session.type_id == "regular"
|
apply_to_all = session.type_id == "regular"
|
||||||
if show_apply_to_all_checkbox:
|
if show_apply_to_all_checkbox:
|
||||||
apply_to_all = form.cleaned_data["apply_to_all"]
|
apply_to_all = form.cleaned_data["apply_to_all"]
|
||||||
|
if can_manage:
|
||||||
|
approved = form.cleaned_data["approved"]
|
||||||
|
else:
|
||||||
|
approved = False
|
||||||
|
|
||||||
|
# Propose slides if not auto-approved
|
||||||
|
if not approved:
|
||||||
|
title = form.cleaned_data['title']
|
||||||
|
submission = SlideSubmission.objects.create(session = session, title = title, filename = '', apply_to_all = apply_to_all, submitter=request.user.person)
|
||||||
|
|
||||||
|
if session.meeting.type_id=='ietf':
|
||||||
|
name = 'slides-%s-%s' % (session.meeting.number,
|
||||||
|
session.group.acronym)
|
||||||
|
if not apply_to_all:
|
||||||
|
name += '-%s' % (session.docname_token(),)
|
||||||
|
else:
|
||||||
|
name = 'slides-%s-%s' % (session.meeting.number, session.docname_token())
|
||||||
|
name = name + '-' + slugify(title).replace('_', '-')[:128]
|
||||||
|
filename = '%s-ss%d%s'% (name, submission.id, ext)
|
||||||
|
destination = io.open(os.path.join(settings.SLIDE_STAGING_PATH, filename),'wb+')
|
||||||
|
for chunk in file.chunks():
|
||||||
|
destination.write(chunk)
|
||||||
|
destination.close()
|
||||||
|
|
||||||
|
submission.filename = filename
|
||||||
|
submission.save()
|
||||||
|
|
||||||
|
(to, cc) = gather_address_lists('slides_proposed', group=session.group, proposer=request.user.person).as_strings()
|
||||||
|
msg_txt = render_to_string("meeting/slides_proposed.txt", {
|
||||||
|
"to": to,
|
||||||
|
"cc": cc,
|
||||||
|
"submission": submission,
|
||||||
|
"settings": settings,
|
||||||
|
})
|
||||||
|
msg = infer_message(msg_txt)
|
||||||
|
msg.by = request.user.person
|
||||||
|
msg.save()
|
||||||
|
send_mail_message(request, msg)
|
||||||
|
messages.success(request, 'Successfully submitted proposed slides.')
|
||||||
|
return redirect('ietf.meeting.views.session_details',num=num,acronym=session.group.acronym)
|
||||||
|
|
||||||
# Handle creation / update of the Document (but do not save yet)
|
# Handle creation / update of the Document (but do not save yet)
|
||||||
if doc is not None:
|
if doc is not None:
|
||||||
|
@ -3076,7 +3101,7 @@ def upload_session_slides(request, session_id, num, name=None):
|
||||||
initial = {}
|
initial = {}
|
||||||
if doc is not None:
|
if doc is not None:
|
||||||
initial = {"title": doc.title}
|
initial = {"title": doc.title}
|
||||||
form = UploadSlidesForm(session, show_apply_to_all_checkbox, initial=initial)
|
form = UploadSlidesForm(session, show_apply_to_all_checkbox, can_manage, initial=initial)
|
||||||
|
|
||||||
return render(
|
return render(
|
||||||
request,
|
request,
|
||||||
|
@ -3085,77 +3110,12 @@ def upload_session_slides(request, session_id, num, name=None):
|
||||||
"session": session,
|
"session": session,
|
||||||
"session_number": session_number,
|
"session_number": session_number,
|
||||||
"slides_sp": session.presentations.filter(document=doc).first() if doc else None,
|
"slides_sp": session.presentations.filter(document=doc).first() if doc else None,
|
||||||
|
"manage": session.can_manage_materials(request.user),
|
||||||
"form": form,
|
"form": form,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@login_required
|
|
||||||
def propose_session_slides(request, session_id, num):
|
|
||||||
session = get_object_or_404(Session,pk=session_id)
|
|
||||||
if session.is_material_submission_cutoff() and not has_role(request.user, "Secretariat"):
|
|
||||||
permission_denied(request, "The materials cutoff for this session has passed. Contact the secretariat for further action.")
|
|
||||||
|
|
||||||
session_number = None
|
|
||||||
sessions = get_sessions(session.meeting.number,session.group.acronym)
|
|
||||||
show_apply_to_all_checkbox = len(sessions) > 1 if session.type_id == 'regular' else False
|
|
||||||
if len(sessions) > 1:
|
|
||||||
session_number = 1 + sessions.index(session)
|
|
||||||
|
|
||||||
|
|
||||||
if request.method == 'POST':
|
|
||||||
form = UploadSlidesForm(session, show_apply_to_all_checkbox,request.POST,request.FILES)
|
|
||||||
if form.is_valid():
|
|
||||||
file = request.FILES['file']
|
|
||||||
_, ext = os.path.splitext(file.name)
|
|
||||||
apply_to_all = session.type_id == 'regular'
|
|
||||||
if show_apply_to_all_checkbox:
|
|
||||||
apply_to_all = form.cleaned_data['apply_to_all']
|
|
||||||
title = form.cleaned_data['title']
|
|
||||||
|
|
||||||
submission = SlideSubmission.objects.create(session = session, title = title, filename = '', apply_to_all = apply_to_all, submitter=request.user.person)
|
|
||||||
|
|
||||||
if session.meeting.type_id=='ietf':
|
|
||||||
name = 'slides-%s-%s' % (session.meeting.number,
|
|
||||||
session.group.acronym)
|
|
||||||
if not apply_to_all:
|
|
||||||
name += '-%s' % (session.docname_token(),)
|
|
||||||
else:
|
|
||||||
name = 'slides-%s-%s' % (session.meeting.number, session.docname_token())
|
|
||||||
name = name + '-' + slugify(title).replace('_', '-')[:128]
|
|
||||||
filename = '%s-ss%d%s'% (name, submission.id, ext)
|
|
||||||
destination = io.open(os.path.join(settings.SLIDE_STAGING_PATH, filename),'wb+')
|
|
||||||
for chunk in file.chunks():
|
|
||||||
destination.write(chunk)
|
|
||||||
destination.close()
|
|
||||||
|
|
||||||
submission.filename = filename
|
|
||||||
submission.save()
|
|
||||||
|
|
||||||
(to, cc) = gather_address_lists('slides_proposed', group=session.group, proposer=request.user.person).as_strings()
|
|
||||||
msg_txt = render_to_string("meeting/slides_proposed.txt", {
|
|
||||||
"to": to,
|
|
||||||
"cc": cc,
|
|
||||||
"submission": submission,
|
|
||||||
"settings": settings,
|
|
||||||
})
|
|
||||||
msg = infer_message(msg_txt)
|
|
||||||
msg.by = request.user.person
|
|
||||||
msg.save()
|
|
||||||
send_mail_message(request, msg)
|
|
||||||
messages.success(request, 'Successfully submitted proposed slides.')
|
|
||||||
return redirect('ietf.meeting.views.session_details',num=num,acronym=session.group.acronym)
|
|
||||||
else:
|
|
||||||
initial = {}
|
|
||||||
form = UploadSlidesForm(session, show_apply_to_all_checkbox, initial=initial)
|
|
||||||
|
|
||||||
return render(request, "meeting/propose_session_slides.html",
|
|
||||||
{'session': session,
|
|
||||||
'session_number': session_number,
|
|
||||||
'form': form,
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
def remove_sessionpresentation(request, session_id, num, name):
|
def remove_sessionpresentation(request, session_id, num, name):
|
||||||
sp = get_object_or_404(
|
sp = get_object_or_404(
|
||||||
SessionPresentation, session_id=session_id, document__name=name
|
SessionPresentation, session_id=session_id, document__name=name
|
||||||
|
@ -4131,6 +4091,13 @@ def organize_proceedings_sessions(sessions):
|
||||||
|
|
||||||
def proceedings(request, num=None):
|
def proceedings(request, num=None):
|
||||||
|
|
||||||
|
def area_and_group_acronyms_from_session(s):
|
||||||
|
area = s.group_parent_at_the_time()
|
||||||
|
if area == None:
|
||||||
|
area = s.group.parent
|
||||||
|
group = s.group_at_the_time()
|
||||||
|
return (area.acronym, group.acronym)
|
||||||
|
|
||||||
meeting = get_meeting(num)
|
meeting = get_meeting(num)
|
||||||
|
|
||||||
# Early proceedings were hosted on www.ietf.org rather than the datatracker
|
# Early proceedings were hosted on www.ietf.org rather than the datatracker
|
||||||
|
@ -4181,12 +4148,11 @@ def proceedings(request, num=None):
|
||||||
.exclude(current_status='notmeet')
|
.exclude(current_status='notmeet')
|
||||||
)
|
)
|
||||||
|
|
||||||
ietf = sessions.filter(group__parent__type__slug = 'area').exclude(group__acronym='edu').order_by('group__parent__acronym', 'group__acronym')
|
ietf = sessions.filter(group__parent__type__slug = 'area').exclude(group__acronym__in=['edu','iepg','tools'])
|
||||||
|
ietf = list(ietf)
|
||||||
|
ietf.sort(key=lambda s: area_and_group_acronyms_from_session(s))
|
||||||
ietf_areas = []
|
ietf_areas = []
|
||||||
for area, area_sessions in itertools.groupby(
|
for area, area_sessions in itertools.groupby(ietf, key=lambda s: s.group_parent_at_the_time()):
|
||||||
ietf,
|
|
||||||
key=lambda s: s.group.parent
|
|
||||||
):
|
|
||||||
meeting_groups, not_meeting_groups = organize_proceedings_sessions(area_sessions)
|
meeting_groups, not_meeting_groups = organize_proceedings_sessions(area_sessions)
|
||||||
ietf_areas.append((area, meeting_groups, not_meeting_groups))
|
ietf_areas.append((area, meeting_groups, not_meeting_groups))
|
||||||
|
|
||||||
|
@ -5066,6 +5032,7 @@ def approve_proposed_slides(request, slidesubmission_id, num):
|
||||||
"cc": cc,
|
"cc": cc,
|
||||||
"submission": submission,
|
"submission": submission,
|
||||||
"settings": settings,
|
"settings": settings,
|
||||||
|
"approver": request.user.person
|
||||||
})
|
})
|
||||||
send_mail_text(request, to, None, subject, body, cc=cc)
|
send_mail_text(request, to, None, subject, body, cc=cc)
|
||||||
return redirect('ietf.meeting.views.session_details',num=num,acronym=acronym)
|
return redirect('ietf.meeting.views.session_details',num=num,acronym=acronym)
|
||||||
|
|
|
@ -14058,7 +14058,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"fields": {
|
"fields": {
|
||||||
"desc": "Legacy stream",
|
"desc": "Legacy",
|
||||||
"name": "Legacy",
|
"name": "Legacy",
|
||||||
"order": 6,
|
"order": 6,
|
||||||
"used": true
|
"used": true
|
||||||
|
|
21
ietf/name/migrations/0014_change_legacy_stream_desc.py
Normal file
21
ietf/name/migrations/0014_change_legacy_stream_desc.py
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
# Copyright The IETF Trust 2024, All Rights Reserved
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
def forward(apps, schema_editor):
|
||||||
|
StreamName = apps.get_model("name", "StreamName")
|
||||||
|
StreamName.objects.filter(pk="legacy").update(desc="Legacy")
|
||||||
|
|
||||||
|
def reverse(apps, schema_editor):
|
||||||
|
StreamName = apps.get_model("name", "StreamName")
|
||||||
|
StreamName.objects.filter(pk="legacy").update(desc="Legacy stream")
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("name", "0013_narrativeminutes"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(forward, reverse)
|
||||||
|
]
|
|
@ -86,7 +86,7 @@
|
||||||
<h2 id="downrefs">Downward References</h2>
|
<h2 id="downrefs">Downward References</h2>
|
||||||
{% for ref in downrefs %}
|
{% for ref in downrefs %}
|
||||||
<p>Add {{ref.target.name}}
|
<p>Add {{ref.target.name}}
|
||||||
({{ref.target.std_level}} - {{ref.target.stream.desc}})
|
({{ref.target.std_level}} - {{ref.target.stream.desc}} stream)
|
||||||
to downref registry.<br>
|
to downref registry.<br>
|
||||||
{% if not ref.target.std_level %}
|
{% if not ref.target.std_level %}
|
||||||
+++ Warning: The standards level has not been set yet!!!<br>
|
+++ Warning: The standards level has not been set yet!!!<br>
|
||||||
|
|
|
@ -436,7 +436,7 @@ STATICFILES_DIRS = (
|
||||||
|
|
||||||
INSTALLED_APPS = [
|
INSTALLED_APPS = [
|
||||||
# Django apps
|
# Django apps
|
||||||
'django.contrib.admin',
|
'ietf.admin', # replaces django.contrib.admin
|
||||||
'django.contrib.admindocs',
|
'django.contrib.admindocs',
|
||||||
'django.contrib.auth',
|
'django.contrib.auth',
|
||||||
'django.contrib.contenttypes',
|
'django.contrib.contenttypes',
|
||||||
|
@ -479,6 +479,7 @@ INSTALLED_APPS = [
|
||||||
'ietf.release',
|
'ietf.release',
|
||||||
'ietf.review',
|
'ietf.review',
|
||||||
'ietf.stats',
|
'ietf.stats',
|
||||||
|
'ietf.status',
|
||||||
'ietf.submit',
|
'ietf.submit',
|
||||||
'ietf.sync',
|
'ietf.sync',
|
||||||
'ietf.utils',
|
'ietf.utils',
|
||||||
|
|
|
@ -1189,6 +1189,13 @@ blockquote {
|
||||||
border-left: solid 1px var(--bs-body-color);
|
border-left: solid 1px var(--bs-body-color);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
iframe.status {
|
||||||
|
background-color:transparent;
|
||||||
|
border:none;
|
||||||
|
width:100%;
|
||||||
|
height:3.5em;
|
||||||
|
}
|
||||||
|
|
||||||
.overflow-shadows {
|
.overflow-shadows {
|
||||||
transition: box-shadow 0.5s;
|
transition: box-shadow 0.5s;
|
||||||
}
|
}
|
||||||
|
|
0
ietf/status/__init__.py
Normal file
0
ietf/status/__init__.py
Normal file
19
ietf/status/admin.py
Normal file
19
ietf/status/admin.py
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
# Copyright The IETF Trust 2024, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from django.contrib import admin
|
||||||
|
from django.template.defaultfilters import slugify
|
||||||
|
from .models import Status
|
||||||
|
|
||||||
|
class StatusAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ['title', 'body', 'active', 'date', 'by', 'page']
|
||||||
|
raw_id_fields = ['by']
|
||||||
|
|
||||||
|
def get_changeform_initial_data(self, request):
|
||||||
|
date = datetime.now()
|
||||||
|
return {
|
||||||
|
"slug": slugify(f"{date.year}-{date.month}-{date.day}-"),
|
||||||
|
}
|
||||||
|
|
||||||
|
admin.site.register(Status, StatusAdmin)
|
9
ietf/status/apps.py
Normal file
9
ietf/status/apps.py
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2024, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class StatusConfig(AppConfig):
|
||||||
|
default_auto_field = "django.db.models.BigAutoField"
|
||||||
|
name = "ietf.status"
|
75
ietf/status/migrations/0001_initial.py
Normal file
75
ietf/status/migrations/0001_initial.py
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
# Generated by Django 4.2.13 on 2024-07-21 22:47
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
import django.utils.timezone
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("person", "0002_alter_historicalperson_ascii_and_more"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="Status",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.BigAutoField(
|
||||||
|
auto_created=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
verbose_name="ID",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("date", models.DateTimeField(default=django.utils.timezone.now)),
|
||||||
|
("slug", models.SlugField(unique=True)),
|
||||||
|
(
|
||||||
|
"title",
|
||||||
|
models.CharField(
|
||||||
|
help_text="Your site status notification title.",
|
||||||
|
max_length=255,
|
||||||
|
verbose_name="Status title",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"body",
|
||||||
|
models.CharField(
|
||||||
|
help_text="Your site status notification body.",
|
||||||
|
max_length=255,
|
||||||
|
verbose_name="Status body",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"active",
|
||||||
|
models.BooleanField(
|
||||||
|
default=True,
|
||||||
|
help_text="Only active messages will be shown.",
|
||||||
|
verbose_name="Active?",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"page",
|
||||||
|
models.TextField(
|
||||||
|
blank=True,
|
||||||
|
help_text="More detail shown after people click 'Read more'. If empty no 'read more' will be shown",
|
||||||
|
null=True,
|
||||||
|
verbose_name="More detail (markdown)",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"by",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE, to="person.person"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name_plural": "statuses",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
0
ietf/status/migrations/__init__.py
Normal file
0
ietf/status/migrations/__init__.py
Normal file
24
ietf/status/models.py
Normal file
24
ietf/status/models.py
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
# Copyright The IETF Trust 2024, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.db import models
|
||||||
|
from django.db.models import ForeignKey
|
||||||
|
|
||||||
|
import debug # pyflakes:ignore
|
||||||
|
|
||||||
|
class Status(models.Model):
|
||||||
|
name = 'Status'
|
||||||
|
|
||||||
|
date = models.DateTimeField(default=timezone.now)
|
||||||
|
slug = models.SlugField(blank=False, null=False, unique=True)
|
||||||
|
title = models.CharField(max_length=255, verbose_name="Status title", help_text="Your site status notification title.")
|
||||||
|
body = models.CharField(max_length=255, verbose_name="Status body", help_text="Your site status notification body.", unique=False)
|
||||||
|
active = models.BooleanField(default=True, verbose_name="Active?", help_text="Only active messages will be shown.")
|
||||||
|
by = ForeignKey('person.Person', on_delete=models.CASCADE)
|
||||||
|
page = models.TextField(blank=True, null=True, verbose_name="More detail (markdown)", help_text="More detail shown after people click 'Read more'. If empty no 'read more' will be shown")
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "{} {} {} {}".format(self.date, self.active, self.by, self.title)
|
||||||
|
class Meta:
|
||||||
|
verbose_name_plural = "statuses"
|
120
ietf/status/tests.py
Normal file
120
ietf/status/tests.py
Normal file
|
@ -0,0 +1,120 @@
|
||||||
|
# Copyright The IETF Trust 2024, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import debug # pyflakes:ignore
|
||||||
|
|
||||||
|
from django.urls import reverse as urlreverse
|
||||||
|
from ietf.utils.test_utils import TestCase
|
||||||
|
from ietf.person.models import Person
|
||||||
|
from ietf.status.models import Status
|
||||||
|
|
||||||
|
class StatusTests(TestCase):
|
||||||
|
def test_status_latest_html(self):
|
||||||
|
status = Status.objects.create(
|
||||||
|
title = "my title 1",
|
||||||
|
body = "my body 1",
|
||||||
|
active = True,
|
||||||
|
by = Person.objects.get(user__username='ad'),
|
||||||
|
slug = "2024-1-1-my-title-1"
|
||||||
|
)
|
||||||
|
status.save()
|
||||||
|
|
||||||
|
url = urlreverse('ietf.status.views.status_latest_html')
|
||||||
|
r = self.client.get(url)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertContains(r, 'my title 1')
|
||||||
|
self.assertContains(r, 'my body 1')
|
||||||
|
|
||||||
|
status.delete()
|
||||||
|
|
||||||
|
r = self.client.get(url)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertNotContains(r, 'my title 1')
|
||||||
|
self.assertNotContains(r, 'my body 1')
|
||||||
|
|
||||||
|
def test_status_latest_json(self):
|
||||||
|
url = urlreverse('ietf.status.views.status_latest_json')
|
||||||
|
r = self.client.get(url)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
data = r.json()
|
||||||
|
self.assertFalse(data["hasMessage"])
|
||||||
|
|
||||||
|
status = Status.objects.create(
|
||||||
|
title = "my title 1",
|
||||||
|
body = "my body 1",
|
||||||
|
active = True,
|
||||||
|
by = Person.objects.get(user__username='ad'),
|
||||||
|
slug = "2024-1-1-my-title-1"
|
||||||
|
)
|
||||||
|
status.save()
|
||||||
|
|
||||||
|
r = self.client.get(url)
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
data = r.json()
|
||||||
|
self.assertTrue(data["hasMessage"])
|
||||||
|
self.assertEqual(data["title"], "my title 1")
|
||||||
|
self.assertEqual(data["body"], "my body 1")
|
||||||
|
self.assertEqual(data["slug"], '2024-1-1-my-title-1')
|
||||||
|
self.assertEqual(data["url"], '/status/2024-1-1-my-title-1')
|
||||||
|
|
||||||
|
status.delete()
|
||||||
|
|
||||||
|
def test_status_latest_redirect(self):
|
||||||
|
url = urlreverse('ietf.status.views.status_latest_redirect')
|
||||||
|
r = self.client.get(url)
|
||||||
|
# without a Status it should return Not Found
|
||||||
|
self.assertEqual(r.status_code, 404)
|
||||||
|
|
||||||
|
status = Status.objects.create(
|
||||||
|
title = "my title 1",
|
||||||
|
body = "my body 1",
|
||||||
|
active = True,
|
||||||
|
by = Person.objects.get(user__username='ad'),
|
||||||
|
slug = "2024-1-1-my-title-1"
|
||||||
|
)
|
||||||
|
status.save()
|
||||||
|
|
||||||
|
r = self.client.get(url)
|
||||||
|
# with a Status it should redirect
|
||||||
|
self.assertEqual(r.status_code, 302)
|
||||||
|
self.assertEqual(r.headers["Location"], "/status/2024-1-1-my-title-1")
|
||||||
|
|
||||||
|
status.delete()
|
||||||
|
|
||||||
|
def test_status_page(self):
|
||||||
|
slug = "2024-1-1-my-unique-slug"
|
||||||
|
r = self.client.get(f'/status/{slug}/')
|
||||||
|
# without a Status it should return Not Found
|
||||||
|
self.assertEqual(r.status_code, 404)
|
||||||
|
|
||||||
|
# status without `page` markdown should still 200
|
||||||
|
status = Status.objects.create(
|
||||||
|
title = "my title 1",
|
||||||
|
body = "my body 1",
|
||||||
|
active = True,
|
||||||
|
by = Person.objects.get(user__username='ad'),
|
||||||
|
slug = slug
|
||||||
|
)
|
||||||
|
status.save()
|
||||||
|
|
||||||
|
r = self.client.get(f'/status/{slug}/')
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
|
status.delete()
|
||||||
|
|
||||||
|
test_string = 'a string that'
|
||||||
|
status = Status.objects.create(
|
||||||
|
title = "my title 1",
|
||||||
|
body = "my body 1",
|
||||||
|
active = True,
|
||||||
|
by = Person.objects.get(user__username='ad'),
|
||||||
|
slug = slug,
|
||||||
|
page = f"# {test_string}"
|
||||||
|
)
|
||||||
|
status.save()
|
||||||
|
|
||||||
|
r = self.client.get(f'/status/{slug}/')
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertContains(r, test_string)
|
||||||
|
|
||||||
|
status.delete()
|
12
ietf/status/urls.py
Normal file
12
ietf/status/urls.py
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
# Copyright The IETF Trust 2024, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from ietf.status import views
|
||||||
|
from ietf.utils.urls import url
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
url(r"^$", views.status_latest_redirect),
|
||||||
|
url(r"^latest$", views.status_latest_html),
|
||||||
|
url(r"^latest.json$", views.status_latest_json),
|
||||||
|
url(r"(?P<slug>.*)", views.status_page)
|
||||||
|
]
|
46
ietf/status/views.py
Normal file
46
ietf/status/views.py
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
# Copyright The IETF Trust 2024, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from django.urls import reverse as urlreverse
|
||||||
|
from django.http import HttpResponseRedirect, HttpResponseNotFound, JsonResponse
|
||||||
|
from ietf.utils import markdown
|
||||||
|
from django.shortcuts import render, get_object_or_404
|
||||||
|
from ietf.status.models import Status
|
||||||
|
|
||||||
|
import debug # pyflakes:ignore
|
||||||
|
|
||||||
|
def get_last_active_status():
|
||||||
|
status = Status.objects.filter(active=True).order_by("-date").first()
|
||||||
|
if status is None:
|
||||||
|
return { "hasMessage": False }
|
||||||
|
|
||||||
|
context = {
|
||||||
|
"hasMessage": True,
|
||||||
|
"id": status.id,
|
||||||
|
"slug": status.slug,
|
||||||
|
"title": status.title,
|
||||||
|
"body": status.body,
|
||||||
|
"url": urlreverse("ietf.status.views.status_page", kwargs={ "slug": status.slug }),
|
||||||
|
"date": status.date.isoformat()
|
||||||
|
}
|
||||||
|
return context
|
||||||
|
|
||||||
|
def status_latest_html(request):
|
||||||
|
return render(request, "status/latest.html", context=get_last_active_status())
|
||||||
|
|
||||||
|
def status_page(request, slug):
|
||||||
|
sanitised_slug = slug.rstrip("/")
|
||||||
|
status = get_object_or_404(Status, slug=sanitised_slug)
|
||||||
|
return render(request, "status/status.html", context={
|
||||||
|
'status': status,
|
||||||
|
'status_page_html': markdown.markdown(status.page or ""),
|
||||||
|
})
|
||||||
|
|
||||||
|
def status_latest_json(request):
|
||||||
|
return JsonResponse(get_last_active_status())
|
||||||
|
|
||||||
|
def status_latest_redirect(request):
|
||||||
|
context = get_last_active_status()
|
||||||
|
if context["hasMessage"] == True:
|
||||||
|
return HttpResponseRedirect(context["url"])
|
||||||
|
return HttpResponseNotFound()
|
27
ietf/templates/admin/base.html
Normal file
27
ietf/templates/admin/base.html
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
{% extends 'admin/base.html' %}
|
||||||
|
{% load static %}
|
||||||
|
{% block extrastyle %}{{ block.super }}
|
||||||
|
{% if server_mode and server_mode != "production" %}
|
||||||
|
<style>
|
||||||
|
{# grab colors that match bootstrap so we don't have to import the css #}
|
||||||
|
html, :root{
|
||||||
|
--bs-danger-bg-subtle: #F8D7DAFF;
|
||||||
|
--bs-danger-text-emphasis: #58151CFF;
|
||||||
|
--bs-danger: #DC3545FF;
|
||||||
|
--bs-secondary: #6C757DFF;
|
||||||
|
--bs-primary-text-emphasis: #052C65FF;
|
||||||
|
}
|
||||||
|
html[data-theme="light"], :root {
|
||||||
|
--primary: var(--bs-danger-bg-subtle);
|
||||||
|
--secondary: var(--bs-danger-bg-subtle);
|
||||||
|
--accent: var(--bs-danger-text-emphasis);
|
||||||
|
--primary-fg: var(--bs-primary-text-emphasis);
|
||||||
|
--link-fg: var(--bs-danger-text-emphasis);
|
||||||
|
--header-color: var(--bs-secondary);
|
||||||
|
--breadcrumbs-fg: var(--bs-secondary);
|
||||||
|
--breadcrumbs-link-fg: var(--link-fg);
|
||||||
|
}
|
||||||
|
span.text-danger { color: var(--bs-danger); }
|
||||||
|
</style>
|
||||||
|
{% endif %}
|
||||||
|
{% endblock %}
|
|
@ -34,6 +34,7 @@
|
||||||
<body {% block bodyAttrs %}{% endblock %} class="navbar-offset position-relative"
|
<body {% block bodyAttrs %}{% endblock %} class="navbar-offset position-relative"
|
||||||
data-group-menu-data-url="{% url 'ietf.group.views.group_menu_data' %}">
|
data-group-menu-data-url="{% url 'ietf.group.views.group_menu_data' %}">
|
||||||
{% analytical_body_top %}
|
{% analytical_body_top %}
|
||||||
|
{% include "base/status.html" %}
|
||||||
<a class="visually-hidden visually-hidden-focusable" href="#content">Skip to main content</a>
|
<a class="visually-hidden visually-hidden-focusable" href="#content">Skip to main content</a>
|
||||||
<nav class="navbar navbar-expand-lg fixed-top {% if server_mode and server_mode != "production" %}bg-danger-subtle{% else %}bg-secondary-subtle{% endif %}">
|
<nav class="navbar navbar-expand-lg fixed-top {% if server_mode and server_mode != "production" %}bg-danger-subtle{% else %}bg-secondary-subtle{% endif %}">
|
||||||
<div class="container-fluid">
|
<div class="container-fluid">
|
||||||
|
@ -85,7 +86,7 @@
|
||||||
</div>
|
</div>
|
||||||
</nav>
|
</nav>
|
||||||
{% block precontent %}{% endblock %}
|
{% block precontent %}{% endblock %}
|
||||||
<div class="pt-3 container-fluid">
|
<main class="pt-3 container-fluid" id="main">
|
||||||
<div class="row">
|
<div class="row">
|
||||||
{% if request.COOKIES.left_menu == "on" and not hide_menu %}
|
{% if request.COOKIES.left_menu == "on" and not hide_menu %}
|
||||||
<div class="d-none d-md-block bg-light-subtle py-3 leftmenu small">
|
<div class="d-none d-md-block bg-light-subtle py-3 leftmenu small">
|
||||||
|
@ -114,7 +115,7 @@
|
||||||
{% block content_end %}{% endblock %}
|
{% block content_end %}{% endblock %}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</main>
|
||||||
{% block footer %}
|
{% block footer %}
|
||||||
<footer class="col-md-12 col-sm-12 border-top mt-5 py-5 bg-light-subtle text-center position-sticky">
|
<footer class="col-md-12 col-sm-12 border-top mt-5 py-5 bg-light-subtle text-center position-sticky">
|
||||||
<a href="https://www.ietf.org/" class="p-3">IETF</a>
|
<a href="https://www.ietf.org/" class="p-3">IETF</a>
|
||||||
|
|
2
ietf/templates/base/status.html
Normal file
2
ietf/templates/base/status.html
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
<noscript><iframe class="status" title="Site status" src="/status/latest"></iframe></noscript>
|
||||||
|
<div class="vue-embed" data-component="Status"></div>
|
|
@ -4,6 +4,7 @@
|
||||||
{% load origin %}
|
{% load origin %}
|
||||||
{% load static %}
|
{% load static %}
|
||||||
{% load ietf_filters textfilters %}
|
{% load ietf_filters textfilters %}
|
||||||
|
{% load django_vite %}
|
||||||
{% origin %}
|
{% origin %}
|
||||||
<html data-bs-theme="auto" lang="en">
|
<html data-bs-theme="auto" lang="en">
|
||||||
<head>
|
<head>
|
||||||
|
@ -28,6 +29,7 @@
|
||||||
{% if html %}
|
{% if html %}
|
||||||
<link rel="stylesheet" href="{% static 'ietf/css/document_html_txt.css' %}">
|
<link rel="stylesheet" href="{% static 'ietf/css/document_html_txt.css' %}">
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
{% vite_asset 'client/embedded.js' %}
|
||||||
<script src="{% static 'ietf/js/document_html.js' %}"></script>
|
<script src="{% static 'ietf/js/document_html.js' %}"></script>
|
||||||
<script src="{% static 'ietf/js/theme.js' %}"></script>
|
<script src="{% static 'ietf/js/theme.js' %}"></script>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
@ -51,6 +53,7 @@
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
{% analytical_body_top %}
|
{% analytical_body_top %}
|
||||||
|
{% include "base/status.html" %}
|
||||||
<div class="btn-toolbar sidebar-toolbar position-fixed top-0 end-0 m-2 m-lg-3 d-print-none">
|
<div class="btn-toolbar sidebar-toolbar position-fixed top-0 end-0 m-2 m-lg-3 d-print-none">
|
||||||
<div class="dropdown">
|
<div class="dropdown">
|
||||||
<button class="btn btn-outline-secondary btn-sm me-1 dropdown-toggle d-flex align-items-center"
|
<button class="btn btn-outline-secondary btn-sm me-1 dropdown-toggle d-flex align-items-center"
|
||||||
|
|
|
@ -33,7 +33,7 @@ No IPR declarations have been submitted directly on this I-D.
|
||||||
{% if downrefs %}
|
{% if downrefs %}
|
||||||
The document contains these normative downward references.
|
The document contains these normative downward references.
|
||||||
See RFC 3967 for additional information:
|
See RFC 3967 for additional information:
|
||||||
{% for ref in downrefs %} {{ref.target.name}}: {{ref.target.title}} ({{ref.target.std_level}} - {{ref.target.stream.desc}})
|
{% for ref in downrefs %} {{ref.target.name}}: {{ref.target.title}} ({{ref.target.std_level}} - {{ref.target.stream.desc}} stream)
|
||||||
{% endfor %}{%endif%}
|
{% endfor %}{%endif%}
|
||||||
|
|
||||||
{% endautoescape %}
|
{% endautoescape %}
|
||||||
|
|
|
@ -78,9 +78,9 @@
|
||||||
<div class="regular float-end">
|
<div class="regular float-end">
|
||||||
{# see note in the included templates re: show_agenda parameter and required JS import #}
|
{# see note in the included templates re: show_agenda parameter and required JS import #}
|
||||||
{% if s.meeting.type.slug == 'interim' %}
|
{% if s.meeting.type.slug == 'interim' %}
|
||||||
{% include "meeting/interim_session_buttons.html" with show_agenda=False show_empty=False session=s meeting=s.meeting use_notes=s.meeting.use_notes %}
|
{% include "meeting/interim_session_buttons.html" with show_agenda=False show_empty=False session=s meeting=s.meeting %}
|
||||||
{% else %}
|
{% else %}
|
||||||
{% include "meeting/session_buttons_include.html" with show_agenda=False item=s.official_timeslotassignment session=s meeting=s.meeting use_notes=s.meeting.use_notes %}
|
{% include "meeting/session_buttons_include.html" with show_agenda=False item=s.official_timeslotassignment session=s meeting=s.meeting %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
|
@ -7,7 +7,9 @@
|
||||||
{% filter center:72 %}{{ schedule.meeting.agenda_info_note|striptags|wordwrap:72|safe }}{% endfilter %}
|
{% filter center:72 %}{{ schedule.meeting.agenda_info_note|striptags|wordwrap:72|safe }}{% endfilter %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% filter center:72 %}{{ schedule.meeting.date|date:"F j" }}-{% if schedule.meeting.date.month != schedule.meeting.end_date.month %}{{ schedule.meeting.end_date|date:"F " }}{% endif %}{{ schedule.meeting.end_date|date:"j, Y" }}{% endfilter %}
|
{% filter center:72 %}{{ schedule.meeting.date|date:"F j" }}-{% if schedule.meeting.date.month != schedule.meeting.end_date.month %}{{ schedule.meeting.end_date|date:"F " }}{% endif %}{{ schedule.meeting.end_date|date:"j, Y" }}{% endfilter %}
|
||||||
|
{% if updated %}
|
||||||
{% filter center:72 %}Updated {{ updated|date:"Y-m-d H:i:s T" }}{% endfilter %}
|
{% filter center:72 %}Updated {{ updated|date:"Y-m-d H:i:s T" }}{% endfilter %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
{% filter center:72 %}IETF agendas are subject to change, up to and during the meeting.{% endfilter %}
|
{% filter center:72 %}IETF agendas are subject to change, up to and during the meeting.{% endfilter %}
|
||||||
{% filter center:72 %}Times are shown in {% if display_timezone.lower == "utc" %}UTC{% else %}the {{ display_timezone }} time zone{% endif %}.{% endfilter %}
|
{% filter center:72 %}Times are shown in {% if display_timezone.lower == "utc" %}UTC{% else %}the {{ display_timezone }} time zone{% endif %}.{% endfilter %}
|
||||||
|
|
|
@ -34,7 +34,7 @@
|
||||||
</a>
|
</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{# notes #}
|
{# notes #}
|
||||||
{% if use_notes %}
|
{% if session.agenda.uses_notes %}
|
||||||
<a class="btn btn-outline-primary"
|
<a class="btn btn-outline-primary"
|
||||||
href="{{ session.notes_url }}"
|
href="{{ session.notes_url }}"
|
||||||
aria-label="Notepad for note-takers"
|
aria-label="Notepad for note-takers"
|
||||||
|
|
|
@ -1,27 +0,0 @@
|
||||||
{% extends "base.html" %}
|
|
||||||
{# Copyright The IETF Trust 2015, All Rights Reserved #}
|
|
||||||
{% load origin static django_bootstrap5 tz %}
|
|
||||||
{% block title %}Propose Slides for {{ session.meeting }} : {{ session.group.acronym }}{% endblock %}
|
|
||||||
{% block content %}
|
|
||||||
{% origin %}
|
|
||||||
<h1>
|
|
||||||
Propose Slides for {{ session.meeting }}
|
|
||||||
<br>
|
|
||||||
<small class="text-body-secondary">{{ session.group.acronym }}
|
|
||||||
{% if session.name %}: {{ session.name }}{% endif %}
|
|
||||||
</small>
|
|
||||||
</h1>
|
|
||||||
{% if session_number %}
|
|
||||||
<h2 class="mt-3">
|
|
||||||
Session {{ session_number }} : {{ session.official_timeslotassignment.timeslot.time|timezone:session.meeting.time_zone|date:"D M-d-Y Hi" }}
|
|
||||||
</h2>
|
|
||||||
{% endif %}
|
|
||||||
<p class="alert alert-info my-3">
|
|
||||||
This form will allow you to propose a slide deck to the session chairs. After you upload your proposal, mail will be sent to the session chairs asking for their approval.
|
|
||||||
</p>
|
|
||||||
<form enctype="multipart/form-data" method="post">
|
|
||||||
{% csrf_token %}
|
|
||||||
{% bootstrap_form form %}
|
|
||||||
<button type="submit" class="btn btn-primary">Upload</button>
|
|
||||||
</form>
|
|
||||||
{% endblock %}
|
|
|
@ -41,7 +41,7 @@
|
||||||
</a>
|
</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{# Notes #}
|
{# Notes #}
|
||||||
{% if use_notes %}
|
{% if meeting.uses_notes %}
|
||||||
<a class="btn btn-outline-primary"
|
<a class="btn btn-outline-primary"
|
||||||
role="button"
|
role="button"
|
||||||
href="{{ session.notes_url }}"
|
href="{{ session.notes_url }}"
|
||||||
|
@ -126,7 +126,7 @@
|
||||||
</a>
|
</a>
|
||||||
{% else %}
|
{% else %}
|
||||||
{# chat logs #}
|
{# chat logs #}
|
||||||
{% if meeting.number|add:"0" >= 60 %}
|
{% if meeting.has_chat_logs %}
|
||||||
<a class="btn btn-outline-primary"
|
<a class="btn btn-outline-primary"
|
||||||
role="button"
|
role="button"
|
||||||
href="{{session.chat_archive_url}}"
|
href="{{session.chat_archive_url}}"
|
||||||
|
@ -136,7 +136,7 @@
|
||||||
</a>
|
</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{# Recordings #}
|
{# Recordings #}
|
||||||
{% if meeting.number|add:"0" >= 80 %}
|
{% if meeting.has_recordings %}
|
||||||
{% with session.recordings as recordings %}
|
{% with session.recordings as recordings %}
|
||||||
{% if recordings %}
|
{% if recordings %}
|
||||||
{# There's no guaranteed order, so this is a bit messy: #}
|
{# There's no guaranteed order, so this is a bit messy: #}
|
||||||
|
@ -229,7 +229,7 @@
|
||||||
</li>
|
</li>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{# Notes #}
|
{# Notes #}
|
||||||
{% if use_notes %}
|
{% if meeting.uses_notes %}
|
||||||
<li>
|
<li>
|
||||||
<a class="dropdown-item" href="{{ session.notes_url }}">
|
<a class="dropdown-item" href="{{ session.notes_url }}">
|
||||||
<i class="bi bi-journal-text"></i> Notepad for note-takers
|
<i class="bi bi-journal-text"></i> Notepad for note-takers
|
||||||
|
@ -303,7 +303,7 @@
|
||||||
</li>
|
</li>
|
||||||
{% else %}
|
{% else %}
|
||||||
{# chat logs #}
|
{# chat logs #}
|
||||||
{% if meeting.number|add:"0" >= 60 %}
|
{% if meeting.has_chat_logs %}
|
||||||
<li>
|
<li>
|
||||||
<a class="dropdown-item"
|
<a class="dropdown-item"
|
||||||
href="session.chat_room_url">
|
href="session.chat_room_url">
|
||||||
|
@ -312,7 +312,7 @@
|
||||||
</li>
|
</li>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{# Recordings #}
|
{# Recordings #}
|
||||||
{% if meeting.number|add:"0" >= 80 %}
|
{% if meeting.has_recordings %}
|
||||||
{% with session.recordings as recordings %}
|
{% with session.recordings as recordings %}
|
||||||
{% if recordings %}
|
{% if recordings %}
|
||||||
{# There's no guaranteed order, so this is a bit messy: #}
|
{# There's no guaranteed order, so this is a bit messy: #}
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
{% if meeting.type.slug == 'interim' %}
|
{% if meeting.type.slug == 'interim' %}
|
||||||
{% include "meeting/interim_session_buttons.html" with show_agenda=False show_empty=False %}
|
{% include "meeting/interim_session_buttons.html" with show_agenda=False show_empty=False %}
|
||||||
{% else %}
|
{% else %}
|
||||||
{% include "meeting/session_buttons_include.html" with show_agenda=False item=session.official_timeslotassignment use_notes=session.meeting.use_notes %}
|
{% include "meeting/session_buttons_include.html" with show_agenda=False item=session.official_timeslotassignment %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
@ -187,7 +187,7 @@
|
||||||
</a>
|
</a>
|
||||||
{% elif request.user.is_authenticated and not session.is_material_submission_cutoff %}
|
{% elif request.user.is_authenticated and not session.is_material_submission_cutoff %}
|
||||||
<a class="btn btn-primary proposeslides"
|
<a class="btn btn-primary proposeslides"
|
||||||
href="{% url 'ietf.meeting.views.propose_session_slides' session_id=session.pk num=session.meeting.number %}">
|
href="{% url 'ietf.meeting.views.upload_session_slides' session_id=session.pk num=session.meeting.number %}">
|
||||||
Propose slides
|
Propose slides
|
||||||
</a>
|
</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
@ -230,7 +230,7 @@
|
||||||
<table class="table table-sm table-striped meeting-tools"
|
<table class="table table-sm table-striped meeting-tools"
|
||||||
id="meeting_tools_{{ session.pk }}">
|
id="meeting_tools_{{ session.pk }}">
|
||||||
<tbody>
|
<tbody>
|
||||||
{% if use_notes %}
|
{% if meeting.uses_notes %}
|
||||||
<tr>
|
<tr>
|
||||||
<td>
|
<td>
|
||||||
<a href="{{ session.notes_url }}">
|
<a href="{{ session.notes_url }}">
|
||||||
|
@ -310,7 +310,7 @@
|
||||||
<table class="table table-sm table-striped meeting-tools"
|
<table class="table table-sm table-striped meeting-tools"
|
||||||
id="notes_and_recordings_{{ session.pk }}">
|
id="notes_and_recordings_{{ session.pk }}">
|
||||||
<tbody>
|
<tbody>
|
||||||
{% if use_notes %}
|
{% if session.uses_notes %}
|
||||||
<tr>
|
<tr>
|
||||||
<td>
|
<td>
|
||||||
<a href="{{ session.notes_url }}">
|
<a href="{{ session.notes_url }}">
|
||||||
|
@ -320,7 +320,7 @@
|
||||||
</tr>
|
</tr>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{# Recordings #}
|
{# Recordings #}
|
||||||
{% if meeting.number|add:"0" >= 80 %}
|
{% if session.has_recordings %}
|
||||||
{% with session.recordings as recordings %}
|
{% with session.recordings as recordings %}
|
||||||
{% if recordings %}
|
{% if recordings %}
|
||||||
{# There's no guaranteed order, so this is a bit messy: #}
|
{# There's no guaranteed order, so this is a bit messy: #}
|
||||||
|
@ -370,4 +370,4 @@
|
||||||
</table>
|
</table>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endwith %}{% endwith %}
|
{% endwith %}{% endwith %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
{% load ietf_filters %}{% autoescape off %}Your proposed slides have been approved for {{ submission.session.meeting }} : {{ submission.session.group.acronym }}{% if submission.session.name %} : {{submission.session.name}}{% endif %}
|
{% load ietf_filters %}{% autoescape off %}Your proposed slides have been approved for {{ submission.session.meeting }} : {{ submission.session.group.acronym }}{% if submission.session.name %} : {{submission.session.name}}{% endif %} by {{approver}}
|
||||||
|
|
||||||
Title: {{submission.title}}
|
Title: {{submission.title}}
|
||||||
|
|
||||||
|
|
|
@ -89,7 +89,7 @@
|
||||||
<span class="badge rounded-pill text-bg-warning">Cancelled</span>
|
<span class="badge rounded-pill text-bg-warning">Cancelled</span>
|
||||||
</td>
|
</td>
|
||||||
{% else %}
|
{% else %}
|
||||||
<td class="text-end">{% include "meeting/interim_session_buttons.html" with show_agenda=True use_notes=meeting.uses_notes %}</td>
|
<td class="text-end">{% include "meeting/interim_session_buttons.html" with show_agenda=True %}</td>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endwith %}
|
{% endwith %}
|
||||||
{% else %}
|
{% else %}
|
||||||
|
|
|
@ -17,15 +17,21 @@
|
||||||
{% else %}
|
{% else %}
|
||||||
Upload new
|
Upload new
|
||||||
{% endif %}
|
{% endif %}
|
||||||
slides for {{ session.meeting }}
|
slides for {{ session.meeting }} <br>
|
||||||
<br>
|
|
||||||
<small class="text-body-secondary">
|
<small class="text-body-secondary">
|
||||||
{{ session.group.acronym }}
|
{{ session.group.acronym }}
|
||||||
{% if session.name %}: {{ session.name }}{% endif %}
|
{% if session.name %}: {{ session.name }}{% endif %}
|
||||||
</small>
|
</small>
|
||||||
</h1>
|
</h1>
|
||||||
{% if session_number %}
|
{% if session_number %}
|
||||||
<h2>Session {{ session_number }} : {{ session.official_timeslotassignment.timeslot.time|timezone:session.meeting.time_zone|date:"D M-d-Y Hi" }}</h2>
|
<h2 class="mt-3">
|
||||||
|
Session {{ session_number }} : {{ session.official_timeslotassignment.timeslot.time|timezone:session.meeting.time_zone|date:"D M-d-Y Hi" }}
|
||||||
|
</h2>
|
||||||
|
{% endif %}
|
||||||
|
{% if not manage %}
|
||||||
|
<p class="alert alert-info my-3">
|
||||||
|
This form will allow you to propose a slide deck to the session chairs. After you upload your proposal, mail will be sent to the session chairs asking for their approval.
|
||||||
|
</p>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if slides_sp %}<h3>{{ slides_sp.document.name }}</h3>{% endif %}
|
{% if slides_sp %}<h3>{{ slides_sp.document.name }}</h3>{% endif %}
|
||||||
<form class="my-3" enctype="multipart/form-data" method="post">
|
<form class="my-3" enctype="multipart/form-data" method="post">
|
||||||
|
|
18
ietf/templates/status/latest.html
Normal file
18
ietf/templates/status/latest.html
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
{% load origin %}
|
||||||
|
{% load ietf_filters static %}
|
||||||
|
{% origin %}
|
||||||
|
<meta name="color-scheme" content="light dark">
|
||||||
|
<style type="text/css">
|
||||||
|
{# this template doesn't inherit from base.html so it has its own styles #}
|
||||||
|
body {background:transparent;font-family:sans-serif}
|
||||||
|
h1{font-size:18px;display:inline}
|
||||||
|
p{font-size:14px;display:inline}
|
||||||
|
.unimportant{opacity:0.6}
|
||||||
|
</style>
|
||||||
|
<!-- This page is intended to be iframed, and is only for non-JavaScript browsers. -->
|
||||||
|
{% if title %}
|
||||||
|
<h1>{{ title }}</h1>
|
||||||
|
<p>{{ body }} <a href="{{ url }}" target="_top" aria-label="read more about {{title}}">read more</a><br /><span class="unimportant">{{ date }}</span></p>
|
||||||
|
{% else %}
|
||||||
|
<p class="unimportant">No site status message.</p>
|
||||||
|
{% endif %}
|
15
ietf/templates/status/status.html
Normal file
15
ietf/templates/status/status.html
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
{% extends "base.html" %}
|
||||||
|
{% load origin %}
|
||||||
|
{% load ietf_filters static %}
|
||||||
|
{% block content %}
|
||||||
|
{% origin %}
|
||||||
|
<h1 data-status-id="{{ status.id }}">
|
||||||
|
{% block title %} {{ status.title }} {% endblock %}
|
||||||
|
{% if status.active == False %}
|
||||||
|
<span class="badge bg-secondary">inactive</span>
|
||||||
|
{% endif %}
|
||||||
|
</h1>
|
||||||
|
<div>
|
||||||
|
{{ status_page_html }}
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
|
@ -20,8 +20,6 @@ from ietf.liaisons.sitemaps import LiaisonMap
|
||||||
from ietf.utils.urls import url
|
from ietf.utils.urls import url
|
||||||
|
|
||||||
|
|
||||||
admin.autodiscover()
|
|
||||||
|
|
||||||
# sometimes, this code gets called more than once, which is an
|
# sometimes, this code gets called more than once, which is an
|
||||||
# that seems impossible to work around.
|
# that seems impossible to work around.
|
||||||
try:
|
try:
|
||||||
|
@ -63,6 +61,7 @@ urlpatterns = [
|
||||||
url(r'^sitemap-(?P<section>.+).xml$', sitemap_views.sitemap, {'sitemaps': sitemaps}),
|
url(r'^sitemap-(?P<section>.+).xml$', sitemap_views.sitemap, {'sitemaps': sitemaps}),
|
||||||
url(r'^sitemap.xml$', sitemap_views.index, { 'sitemaps': sitemaps}),
|
url(r'^sitemap.xml$', sitemap_views.index, { 'sitemaps': sitemaps}),
|
||||||
url(r'^stats/', include('ietf.stats.urls')),
|
url(r'^stats/', include('ietf.stats.urls')),
|
||||||
|
url(r'^status/', include('ietf.status.urls')),
|
||||||
url(r'^stream/', include(stream_urls)),
|
url(r'^stream/', include(stream_urls)),
|
||||||
url(r'^submit/', include('ietf.submit.urls')),
|
url(r'^submit/', include('ietf.submit.urls')),
|
||||||
url(r'^sync/', include('ietf.sync.urls')),
|
url(r'^sync/', include('ietf.sync.urls')),
|
||||||
|
|
|
@ -36,6 +36,7 @@ from django.urls import reverse as urlreverse
|
||||||
|
|
||||||
import debug # pyflakes:ignore
|
import debug # pyflakes:ignore
|
||||||
|
|
||||||
|
from ietf.admin.sites import AdminSite
|
||||||
from ietf.person.name import name_parts, unidecode_name
|
from ietf.person.name import name_parts, unidecode_name
|
||||||
from ietf.submit.tests import submission_file
|
from ietf.submit.tests import submission_file
|
||||||
from ietf.utils.draft import PlaintextDraft, getmeta
|
from ietf.utils.draft import PlaintextDraft, getmeta
|
||||||
|
@ -325,7 +326,7 @@ class AdminTestCase(TestCase):
|
||||||
User.objects.create_superuser('admin', 'admin@example.org', 'admin+password')
|
User.objects.create_superuser('admin', 'admin@example.org', 'admin+password')
|
||||||
self.client.login(username='admin', password='admin+password')
|
self.client.login(username='admin', password='admin+password')
|
||||||
rtop = self.client.get("/admin/")
|
rtop = self.client.get("/admin/")
|
||||||
self.assertContains(rtop, 'Django administration')
|
self.assertContains(rtop, AdminSite.site_header())
|
||||||
for name in self.apps:
|
for name in self.apps:
|
||||||
app_name = self.apps[name]
|
app_name = self.apps[name]
|
||||||
self.assertContains(rtop, name)
|
self.assertContains(rtop, name)
|
||||||
|
|
|
@ -23,5 +23,6 @@ server {
|
||||||
proxy_set_header X-Forwarded-For $${keepempty}proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $${keepempty}proxy_add_x_forwarded_for;
|
||||||
proxy_set_header X-Real-IP $${keepempty}remote_addr;
|
proxy_set_header X-Real-IP $${keepempty}remote_addr;
|
||||||
proxy_pass http://localhost:8000;
|
proxy_pass http://localhost:8000;
|
||||||
|
client_max_body_size 0; # disable size check
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,13 @@ def _multiline_to_list(s):
|
||||||
# Default to "development". Production _must_ set DATATRACKER_SERVER_MODE="production" in the env!
|
# Default to "development". Production _must_ set DATATRACKER_SERVER_MODE="production" in the env!
|
||||||
SERVER_MODE = os.environ.get("DATATRACKER_SERVER_MODE", "development")
|
SERVER_MODE = os.environ.get("DATATRACKER_SERVER_MODE", "development")
|
||||||
|
|
||||||
|
# Use X-Forwarded-Proto to determine request.is_secure(). This relies on CloudFlare overwriting the
|
||||||
|
# value of the header if an incoming request sets it, which it does:
|
||||||
|
# https://developers.cloudflare.com/fundamentals/reference/http-request-headers/#x-forwarded-proto
|
||||||
|
# See also, especially the warnings:
|
||||||
|
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-proxy-ssl-header
|
||||||
|
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
|
||||||
|
|
||||||
# Secrets
|
# Secrets
|
||||||
_SECRET_KEY = os.environ.get("DATATRACKER_DJANGO_SECRET_KEY", None)
|
_SECRET_KEY = os.environ.get("DATATRACKER_DJANGO_SECRET_KEY", None)
|
||||||
if _SECRET_KEY is not None:
|
if _SECRET_KEY is not None:
|
||||||
|
|
|
@ -630,7 +630,7 @@ module.exports = {
|
||||||
},
|
},
|
||||||
categories,
|
categories,
|
||||||
isCurrentMeeting: dateMode !== 'past',
|
isCurrentMeeting: dateMode !== 'past',
|
||||||
useNotes: true,
|
usesNotes: true,
|
||||||
schedule,
|
schedule,
|
||||||
floors
|
floors
|
||||||
}
|
}
|
||||||
|
|
61
playwright/tests/status/status.spec.js
Normal file
61
playwright/tests/status/status.spec.js
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
const {
|
||||||
|
test,
|
||||||
|
expect
|
||||||
|
} = require('@playwright/test')
|
||||||
|
const { STATUS_STORAGE_KEY, generateStatusTestId } = require('../../../client/shared/status-common.js')
|
||||||
|
|
||||||
|
test.describe('site status', () => {
|
||||||
|
const noStatus = {
|
||||||
|
hasMessage: false
|
||||||
|
}
|
||||||
|
|
||||||
|
const status1 = {
|
||||||
|
hasMessage: true,
|
||||||
|
id: 1,
|
||||||
|
slug: '2024-7-9fdfdf-sdfsdf',
|
||||||
|
title: 'My status title',
|
||||||
|
body: 'My status body',
|
||||||
|
url: '/status/2024-7-9fdfdf-sdfsdf',
|
||||||
|
date: '2024-07-09T07:05:13+00:00',
|
||||||
|
by: 'Exile is a cool Amiga game'
|
||||||
|
}
|
||||||
|
|
||||||
|
test('Renders server status as Notification', async ({ page }) => {
|
||||||
|
await page.route('/status/latest.json', route => {
|
||||||
|
route.fulfill({
|
||||||
|
status: 200,
|
||||||
|
contentType: 'application/json',
|
||||||
|
body: JSON.stringify(status1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
await page.goto('/')
|
||||||
|
await expect(page.getByTestId(generateStatusTestId(status1.id)), 'should have status').toHaveCount(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
test("Doesn't render dismissed server statuses", async ({ page }) => {
|
||||||
|
await page.route('/status/latest.json', route => {
|
||||||
|
route.fulfill({
|
||||||
|
status: 200,
|
||||||
|
contentType: 'application/json',
|
||||||
|
body: JSON.stringify(status1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
await page.goto('/')
|
||||||
|
await page.evaluate(({ key, value }) => localStorage.setItem(key, value), { key: STATUS_STORAGE_KEY, value: JSON.stringify([status1.id]) })
|
||||||
|
await expect(page.getByTestId(generateStatusTestId(status1.id)), 'should have status').toHaveCount(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('Handles no server status', async ({ page }) => {
|
||||||
|
await page.route('/status/latest.json', route => {
|
||||||
|
route.fulfill({
|
||||||
|
status: 200,
|
||||||
|
contentType: 'application/json',
|
||||||
|
body: JSON.stringify(noStatus)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
await page.goto('/')
|
||||||
|
|
||||||
|
await expect(page.getByTestId(generateStatusTestId(status1.id)), 'should have status').toHaveCount(0)
|
||||||
|
})
|
||||||
|
})
|
Loading…
Reference in a new issue