ci: merge main to release (#8344)
This commit is contained in:
commit
4f53cad224
|
@ -284,6 +284,7 @@ const meetingEvents = computed(() => {
|
||||||
const purposesWithoutLinks = ['admin', 'closed_meeting', 'officehours', 'social']
|
const purposesWithoutLinks = ['admin', 'closed_meeting', 'officehours', 'social']
|
||||||
if (item.flags.showAgenda || (typesWithLinks.includes(item.type) && !purposesWithoutLinks.includes(item.purpose))) {
|
if (item.flags.showAgenda || (typesWithLinks.includes(item.type) && !purposesWithoutLinks.includes(item.purpose))) {
|
||||||
if (item.flags.agenda) {
|
if (item.flags.agenda) {
|
||||||
|
// -> Meeting Materials
|
||||||
links.push({
|
links.push({
|
||||||
id: `lnk-${item.id}-tar`,
|
id: `lnk-${item.id}-tar`,
|
||||||
label: 'Download meeting materials as .tar archive',
|
label: 'Download meeting materials as .tar archive',
|
||||||
|
@ -305,7 +306,18 @@ const meetingEvents = computed(() => {
|
||||||
color: 'red'
|
color: 'red'
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if (agendaStore.usesNotes) {
|
// -> Point to Wiki for Hackathon sessions, HedgeDocs otherwise
|
||||||
|
if (item.name.toLowerCase().includes('hackathon')) {
|
||||||
|
links.push({
|
||||||
|
id: `lnk-${item.id}-wiki`,
|
||||||
|
label: 'Wiki',
|
||||||
|
icon: 'book',
|
||||||
|
href: getUrl('hackathonWiki', {
|
||||||
|
meetingNumber: agendaStore.meeting.number
|
||||||
|
}),
|
||||||
|
color: 'blue'
|
||||||
|
})
|
||||||
|
} else if (agendaStore.usesNotes) {
|
||||||
links.push({
|
links.push({
|
||||||
id: `lnk-${item.id}-note`,
|
id: `lnk-${item.id}-note`,
|
||||||
label: 'Notepad for note-takers',
|
label: 'Notepad for note-takers',
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
{
|
{
|
||||||
"bofDefinition": "https://www.ietf.org/how/bofs/",
|
"bofDefinition": "https://www.ietf.org/how/bofs/",
|
||||||
|
"hackathonWiki": "https://wiki.ietf.org/meeting/{meetingNumber}/hackathon",
|
||||||
"meetingCalIcs": "/meeting/{meetingNumber}/agenda.ics",
|
"meetingCalIcs": "/meeting/{meetingNumber}/agenda.ics",
|
||||||
"meetingDetails": "/meeting/{meetingNumber}/session/{eventAcronym}/",
|
"meetingDetails": "/meeting/{meetingNumber}/session/{eventAcronym}/",
|
||||||
"meetingMaterialsPdf": "/meeting/{meetingNumber}/agenda/{eventAcronym}-drafts.pdf",
|
"meetingMaterialsPdf": "/meeting/{meetingNumber}/agenda/{eventAcronym}-drafts.pdf",
|
||||||
|
|
14
dev/deploy-to-container/package-lock.json
generated
14
dev/deploy-to-container/package-lock.json
generated
|
@ -8,7 +8,7 @@
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"dockerode": "^4.0.2",
|
"dockerode": "^4.0.2",
|
||||||
"fs-extra": "^11.2.0",
|
"fs-extra": "^11.2.0",
|
||||||
"nanoid": "5.0.8",
|
"nanoid": "5.0.9",
|
||||||
"nanoid-dictionary": "5.0.0-beta.1",
|
"nanoid-dictionary": "5.0.0-beta.1",
|
||||||
"slugify": "1.6.6",
|
"slugify": "1.6.6",
|
||||||
"tar": "^7.4.3",
|
"tar": "^7.4.3",
|
||||||
|
@ -546,9 +546,9 @@
|
||||||
"optional": true
|
"optional": true
|
||||||
},
|
},
|
||||||
"node_modules/nanoid": {
|
"node_modules/nanoid": {
|
||||||
"version": "5.0.8",
|
"version": "5.0.9",
|
||||||
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.9.tgz",
|
||||||
"integrity": "sha512-TcJPw+9RV9dibz1hHUzlLVy8N4X9TnwirAjrU08Juo6BNKggzVfP2ZJ/3ZUSq15Xl5i85i+Z89XBO90pB2PghQ==",
|
"integrity": "sha512-Aooyr6MXU6HpvvWXKoVoXwKMs/KyVakWwg7xQfv5/S/RIgJMy0Ifa45H9qqYy7pTCszrHzP21Uk4PZq2HpEM8Q==",
|
||||||
"funding": [
|
"funding": [
|
||||||
{
|
{
|
||||||
"type": "github",
|
"type": "github",
|
||||||
|
@ -1346,9 +1346,9 @@
|
||||||
"optional": true
|
"optional": true
|
||||||
},
|
},
|
||||||
"nanoid": {
|
"nanoid": {
|
||||||
"version": "5.0.8",
|
"version": "5.0.9",
|
||||||
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.9.tgz",
|
||||||
"integrity": "sha512-TcJPw+9RV9dibz1hHUzlLVy8N4X9TnwirAjrU08Juo6BNKggzVfP2ZJ/3ZUSq15Xl5i85i+Z89XBO90pB2PghQ=="
|
"integrity": "sha512-Aooyr6MXU6HpvvWXKoVoXwKMs/KyVakWwg7xQfv5/S/RIgJMy0Ifa45H9qqYy7pTCszrHzP21Uk4PZq2HpEM8Q=="
|
||||||
},
|
},
|
||||||
"nanoid-dictionary": {
|
"nanoid-dictionary": {
|
||||||
"version": "5.0.0-beta.1",
|
"version": "5.0.0-beta.1",
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"dockerode": "^4.0.2",
|
"dockerode": "^4.0.2",
|
||||||
"fs-extra": "^11.2.0",
|
"fs-extra": "^11.2.0",
|
||||||
"nanoid": "5.0.8",
|
"nanoid": "5.0.9",
|
||||||
"nanoid-dictionary": "5.0.0-beta.1",
|
"nanoid-dictionary": "5.0.0-beta.1",
|
||||||
"slugify": "1.6.6",
|
"slugify": "1.6.6",
|
||||||
"tar": "^7.4.3",
|
"tar": "^7.4.3",
|
||||||
|
|
12
ietf/community/apps.py
Normal file
12
ietf/community/apps.py
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
# Copyright The IETF Trust 2024, All Rights Reserved
|
||||||
|
|
||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class CommunityConfig(AppConfig):
|
||||||
|
name = "ietf.community"
|
||||||
|
|
||||||
|
def ready(self):
|
||||||
|
"""Initialize the app after the registry is populated"""
|
||||||
|
# implicitly connects @receiver-decorated signals
|
||||||
|
from . import signals # pyflakes: ignore
|
|
@ -1,19 +1,14 @@
|
||||||
# Copyright The IETF Trust 2012-2020, All Rights Reserved
|
# Copyright The IETF Trust 2012-2020, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from django.db import models
|
||||||
from django.conf import settings
|
|
||||||
from django.db import models, transaction
|
|
||||||
from django.db.models import signals
|
|
||||||
from django.urls import reverse as urlreverse
|
from django.urls import reverse as urlreverse
|
||||||
|
|
||||||
from ietf.doc.models import Document, DocEvent, State
|
from ietf.doc.models import Document, State
|
||||||
from ietf.group.models import Group
|
from ietf.group.models import Group
|
||||||
from ietf.person.models import Person, Email
|
from ietf.person.models import Person, Email
|
||||||
from ietf.utils.models import ForeignKey
|
from ietf.utils.models import ForeignKey
|
||||||
|
|
||||||
from .tasks import notify_event_to_subscribers_task
|
|
||||||
|
|
||||||
|
|
||||||
class CommunityList(models.Model):
|
class CommunityList(models.Model):
|
||||||
person = ForeignKey(Person, blank=True, null=True)
|
person = ForeignKey(Person, blank=True, null=True)
|
||||||
|
@ -98,29 +93,3 @@ class EmailSubscription(models.Model):
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "%s to %s (%s changes)" % (self.email, self.community_list, self.notify_on)
|
return "%s to %s (%s changes)" % (self.email, self.community_list, self.notify_on)
|
||||||
|
|
||||||
|
|
||||||
def notify_events(sender, instance, **kwargs):
|
|
||||||
if not isinstance(instance, DocEvent):
|
|
||||||
return
|
|
||||||
|
|
||||||
if not kwargs.get("created", False):
|
|
||||||
return # only notify on creation
|
|
||||||
|
|
||||||
if instance.doc.type_id != 'draft':
|
|
||||||
return
|
|
||||||
|
|
||||||
if getattr(instance, "skip_community_list_notification", False):
|
|
||||||
return
|
|
||||||
|
|
||||||
# kludge alert: queuing a celery task in response to a signal can cause unexpected attempts to
|
|
||||||
# start a Celery task during tests. To prevent this, don't queue a celery task if we're running
|
|
||||||
# tests.
|
|
||||||
if settings.SERVER_MODE != "test":
|
|
||||||
# Wrap in on_commit in case a transaction is open
|
|
||||||
transaction.on_commit(
|
|
||||||
lambda: notify_event_to_subscribers_task.delay(event_id=instance.pk)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
signals.post_save.connect(notify_events)
|
|
||||||
|
|
44
ietf/community/signals.py
Normal file
44
ietf/community/signals.py
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
# Copyright The IETF Trust 2024, All Rights Reserved
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import transaction
|
||||||
|
from django.db.models.signals import post_save
|
||||||
|
from django.dispatch import receiver
|
||||||
|
|
||||||
|
from ietf.doc.models import DocEvent
|
||||||
|
from .tasks import notify_event_to_subscribers_task
|
||||||
|
|
||||||
|
|
||||||
|
def notify_of_event(event: DocEvent):
|
||||||
|
"""Send subscriber notification emails for a 'draft'-related DocEvent
|
||||||
|
|
||||||
|
If the event is attached to a draft of type 'doc', queues a task to send notification emails to
|
||||||
|
community list subscribers. No emails will be sent when SERVER_MODE is 'test'.
|
||||||
|
"""
|
||||||
|
if event.doc.type_id != "draft":
|
||||||
|
return
|
||||||
|
|
||||||
|
if getattr(event, "skip_community_list_notification", False):
|
||||||
|
return
|
||||||
|
|
||||||
|
# kludge alert: queuing a celery task in response to a signal can cause unexpected attempts to
|
||||||
|
# start a Celery task during tests. To prevent this, don't queue a celery task if we're running
|
||||||
|
# tests.
|
||||||
|
if settings.SERVER_MODE != "test":
|
||||||
|
# Wrap in on_commit in case a transaction is open
|
||||||
|
transaction.on_commit(
|
||||||
|
lambda: notify_event_to_subscribers_task.delay(event_id=event.pk)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# dispatch_uid ensures only a single signal receiver binding is made
|
||||||
|
@receiver(post_save, dispatch_uid="notify_of_events_receiver_uid")
|
||||||
|
def notify_of_events_receiver(sender, instance, **kwargs):
|
||||||
|
"""Call notify_of_event after saving a new DocEvent"""
|
||||||
|
if not isinstance(instance, DocEvent):
|
||||||
|
return
|
||||||
|
|
||||||
|
if not kwargs.get("created", False):
|
||||||
|
return # only notify on creation
|
||||||
|
|
||||||
|
notify_of_event(instance)
|
|
@ -1,7 +1,6 @@
|
||||||
# Copyright The IETF Trust 2016-2023, All Rights Reserved
|
# Copyright The IETF Trust 2016-2023, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
from pyquery import PyQuery
|
from pyquery import PyQuery
|
||||||
|
|
||||||
|
@ -11,6 +10,7 @@ from django.urls import reverse as urlreverse
|
||||||
import debug # pyflakes:ignore
|
import debug # pyflakes:ignore
|
||||||
|
|
||||||
from ietf.community.models import CommunityList, SearchRule, EmailSubscription
|
from ietf.community.models import CommunityList, SearchRule, EmailSubscription
|
||||||
|
from ietf.community.signals import notify_of_event
|
||||||
from ietf.community.utils import docs_matching_community_list_rule, community_list_rules_matching_doc
|
from ietf.community.utils import docs_matching_community_list_rule, community_list_rules_matching_doc
|
||||||
from ietf.community.utils import reset_name_contains_index_for_rule, notify_event_to_subscribers
|
from ietf.community.utils import reset_name_contains_index_for_rule, notify_event_to_subscribers
|
||||||
from ietf.community.tasks import notify_event_to_subscribers_task
|
from ietf.community.tasks import notify_event_to_subscribers_task
|
||||||
|
@ -431,53 +431,58 @@ class CommunityListTests(TestCase):
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
# Mock out the on_commit call so we can tell whether the task was actually queued
|
@mock.patch("ietf.community.signals.notify_of_event")
|
||||||
@mock.patch("ietf.submit.views.transaction.on_commit", side_effect=lambda x: x())
|
def test_notification_signal_receiver(self, mock_notify_of_event):
|
||||||
@mock.patch("ietf.community.models.notify_event_to_subscribers_task")
|
"""Saving a newly created DocEvent should notify subscribers
|
||||||
def test_notification_signal_receiver(self, mock_notify_task, mock_on_commit):
|
|
||||||
"""Saving a DocEvent should notify subscribers
|
|
||||||
|
|
||||||
This implicitly tests that notify_events is hooked up to the post_save signal.
|
This implicitly tests that notify_of_event_receiver is hooked up to the post_save signal.
|
||||||
"""
|
"""
|
||||||
# Arbitrary model that's not a DocEvent
|
# Arbitrary model that's not a DocEvent
|
||||||
person = PersonFactory()
|
person = PersonFactory.build() # builds but does not save...
|
||||||
mock_notify_task.reset_mock() # clear any calls that resulted from the factories
|
mock_notify_of_event.reset_mock() # clear any calls that resulted from the factories
|
||||||
# be careful overriding SERVER_MODE - we do it here because the method
|
person.save()
|
||||||
# under test does not make this call when in "test" mode
|
self.assertFalse(mock_notify_of_event.called)
|
||||||
with override_settings(SERVER_MODE="not-test"):
|
|
||||||
person.save()
|
|
||||||
self.assertFalse(mock_notify_task.delay.called)
|
|
||||||
|
|
||||||
# build a DocEvent that is not yet persisted
|
# build a DocEvent that is not yet persisted
|
||||||
doc = DocumentFactory()
|
doc = DocumentFactory()
|
||||||
d = DocEventFactory.build(by=person, doc=doc)
|
event = DocEventFactory.build(by=person, doc=doc) # builds but does not save...
|
||||||
# mock_notify_task.reset_mock() # clear any calls that resulted from the factories
|
mock_notify_of_event.reset_mock() # clear any calls that resulted from the factories
|
||||||
|
event.save()
|
||||||
|
self.assertEqual(mock_notify_of_event.call_count, 1, "notify_task should be run on creation of DocEvent")
|
||||||
|
self.assertEqual(mock_notify_of_event.call_args, mock.call(event))
|
||||||
|
|
||||||
|
# save the existing DocEvent and see that no notification is sent
|
||||||
|
mock_notify_of_event.reset_mock()
|
||||||
|
event.save()
|
||||||
|
self.assertFalse(mock_notify_of_event.called, "notify_task should not be run save of on existing DocEvent")
|
||||||
|
|
||||||
|
# Mock out the on_commit call so we can tell whether the task was actually queued
|
||||||
|
@mock.patch("ietf.submit.views.transaction.on_commit", side_effect=lambda x: x())
|
||||||
|
@mock.patch("ietf.community.signals.notify_event_to_subscribers_task")
|
||||||
|
def test_notify_of_event(self, mock_notify_task, mock_on_commit):
|
||||||
|
"""The community notification task should be called as intended"""
|
||||||
|
person = PersonFactory() # builds but does not save...
|
||||||
|
doc = DocumentFactory()
|
||||||
|
event = DocEventFactory(by=person, doc=doc)
|
||||||
# be careful overriding SERVER_MODE - we do it here because the method
|
# be careful overriding SERVER_MODE - we do it here because the method
|
||||||
# under test does not make this call when in "test" mode
|
# under test does not make this call when in "test" mode
|
||||||
with override_settings(SERVER_MODE="not-test"):
|
with override_settings(SERVER_MODE="not-test"):
|
||||||
d.save()
|
notify_of_event(event)
|
||||||
self.assertEqual(mock_notify_task.delay.call_count, 1, "notify_task should be run on creation of DocEvent")
|
self.assertTrue(mock_notify_task.delay.called, "notify_task should run for a DocEvent on a draft")
|
||||||
self.assertEqual(mock_notify_task.delay.call_args, mock.call(event_id = d.pk))
|
|
||||||
|
|
||||||
mock_notify_task.reset_mock()
|
mock_notify_task.reset_mock()
|
||||||
with override_settings(SERVER_MODE="not-test"):
|
|
||||||
d.save()
|
event.skip_community_list_notification = True
|
||||||
self.assertFalse(mock_notify_task.delay.called, "notify_task should not be run save of on existing DocEvent")
|
|
||||||
|
|
||||||
mock_notify_task.reset_mock()
|
|
||||||
d = DocEventFactory.build(by=person, doc=doc)
|
|
||||||
d.skip_community_list_notification = True
|
|
||||||
# be careful overriding SERVER_MODE - we do it here because the method
|
# be careful overriding SERVER_MODE - we do it here because the method
|
||||||
# under test does not make this call when in "test" mode
|
# under test does not make this call when in "test" mode
|
||||||
with override_settings(SERVER_MODE="not-test"):
|
with override_settings(SERVER_MODE="not-test"):
|
||||||
d.save()
|
notify_of_event(event)
|
||||||
self.assertFalse(mock_notify_task.delay.called, "notify_task should not run when skip_community_list_notification is set")
|
self.assertFalse(mock_notify_task.delay.called, "notify_task should not run when skip_community_list_notification is set")
|
||||||
|
|
||||||
d = DocEventFactory.build(by=person, doc=DocumentFactory(type_id="rfc"))
|
event = DocEventFactory.build(by=person, doc=DocumentFactory(type_id="rfc"))
|
||||||
# be careful overriding SERVER_MODE - we do it here because the method
|
# be careful overriding SERVER_MODE - we do it here because the method
|
||||||
# under test does not make this call when in "test" mode
|
# under test does not make this call when in "test" mode
|
||||||
with override_settings(SERVER_MODE="not-test"):
|
with override_settings(SERVER_MODE="not-test"):
|
||||||
d.save()
|
notify_of_event(event)
|
||||||
self.assertFalse(mock_notify_task.delay.called, "notify_task should not run on a document with type 'rfc'")
|
self.assertFalse(mock_notify_task.delay.called, "notify_task should not run on a document with type 'rfc'")
|
||||||
|
|
||||||
@mock.patch("ietf.utils.mail.send_mail_text")
|
@mock.patch("ietf.utils.mail.send_mail_text")
|
||||||
|
|
|
@ -3,6 +3,8 @@
|
||||||
# expiry of Internet-Drafts
|
# expiry of Internet-Drafts
|
||||||
|
|
||||||
|
|
||||||
|
import debug # pyflakes:ignore
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
|
||||||
|
@ -11,12 +13,12 @@ from pathlib import Path
|
||||||
|
|
||||||
from typing import List, Optional # pyflakes:ignore
|
from typing import List, Optional # pyflakes:ignore
|
||||||
|
|
||||||
|
from ietf.doc.utils import new_state_change_event, update_action_holders
|
||||||
from ietf.utils import log
|
from ietf.utils import log
|
||||||
from ietf.utils.mail import send_mail
|
from ietf.utils.mail import send_mail
|
||||||
from ietf.doc.models import Document, DocEvent, State, IESG_SUBSTATE_TAGS
|
from ietf.doc.models import Document, DocEvent, State, StateDocEvent
|
||||||
from ietf.person.models import Person
|
from ietf.person.models import Person
|
||||||
from ietf.meeting.models import Meeting
|
from ietf.meeting.models import Meeting
|
||||||
from ietf.doc.utils import add_state_change_event, update_action_holders
|
|
||||||
from ietf.mailtrigger.utils import gather_address_lists
|
from ietf.mailtrigger.utils import gather_address_lists
|
||||||
from ietf.utils.timezone import date_today, datetime_today, DEADLINE_TZINFO
|
from ietf.utils.timezone import date_today, datetime_today, DEADLINE_TZINFO
|
||||||
|
|
||||||
|
@ -148,10 +150,17 @@ def move_draft_files_to_archive(doc, rev):
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
def remove_ftp_copy(f):
|
||||||
|
mark = Path(settings.FTP_DIR) / "internet-drafts" / f
|
||||||
|
if mark.exists():
|
||||||
|
mark.unlink()
|
||||||
|
|
||||||
|
|
||||||
src_dir = Path(settings.INTERNET_DRAFT_PATH)
|
src_dir = Path(settings.INTERNET_DRAFT_PATH)
|
||||||
for file in src_dir.glob("%s-%s.*" % (doc.name, rev)):
|
for file in src_dir.glob("%s-%s.*" % (doc.name, rev)):
|
||||||
move_file(str(file.name))
|
move_file(str(file.name))
|
||||||
|
remove_ftp_copy(str(file.name))
|
||||||
|
|
||||||
def expire_draft(doc):
|
def expire_draft(doc):
|
||||||
# clean up files
|
# clean up files
|
||||||
|
@ -161,24 +170,11 @@ def expire_draft(doc):
|
||||||
|
|
||||||
events = []
|
events = []
|
||||||
|
|
||||||
# change the state
|
|
||||||
if doc.latest_event(type='started_iesg_process'):
|
|
||||||
new_state = State.objects.get(used=True, type="draft-iesg", slug="dead")
|
|
||||||
prev_state = doc.get_state(new_state.type_id)
|
|
||||||
prev_tags = doc.tags.filter(slug__in=IESG_SUBSTATE_TAGS)
|
|
||||||
if new_state != prev_state:
|
|
||||||
doc.set_state(new_state)
|
|
||||||
doc.tags.remove(*prev_tags)
|
|
||||||
e = add_state_change_event(doc, system, prev_state, new_state, prev_tags=prev_tags, new_tags=[])
|
|
||||||
if e:
|
|
||||||
events.append(e)
|
|
||||||
e = update_action_holders(doc, prev_state, new_state, prev_tags=prev_tags, new_tags=[])
|
|
||||||
if e:
|
|
||||||
events.append(e)
|
|
||||||
|
|
||||||
events.append(DocEvent.objects.create(doc=doc, rev=doc.rev, by=system, type="expired_document", desc="Document has expired"))
|
events.append(DocEvent.objects.create(doc=doc, rev=doc.rev, by=system, type="expired_document", desc="Document has expired"))
|
||||||
|
|
||||||
|
prev_draft_state=doc.get_state("draft")
|
||||||
doc.set_state(State.objects.get(used=True, type="draft", slug="expired"))
|
doc.set_state(State.objects.get(used=True, type="draft", slug="expired"))
|
||||||
|
events.append(update_action_holders(doc, prev_draft_state, doc.get_state("draft"),[],[]))
|
||||||
doc.save_with_history(events)
|
doc.save_with_history(events)
|
||||||
|
|
||||||
def clean_up_draft_files():
|
def clean_up_draft_files():
|
||||||
|
@ -238,3 +234,42 @@ def clean_up_draft_files():
|
||||||
except Document.DoesNotExist:
|
except Document.DoesNotExist:
|
||||||
# All uses of this past 2014 seem related to major system failures.
|
# All uses of this past 2014 seem related to major system failures.
|
||||||
move_file_to("unknown_ids")
|
move_file_to("unknown_ids")
|
||||||
|
|
||||||
|
|
||||||
|
def repair_dead_on_expire():
|
||||||
|
by = Person.objects.get(name="(System)")
|
||||||
|
id_exists = State.objects.get(type="draft-iesg", slug="idexists")
|
||||||
|
dead = State.objects.get(type="draft-iesg", slug="dead")
|
||||||
|
dead_drafts = Document.objects.filter(
|
||||||
|
states__type="draft-iesg", states__slug="dead", type_id="draft"
|
||||||
|
)
|
||||||
|
for d in dead_drafts:
|
||||||
|
dead_event = d.latest_event(
|
||||||
|
StateDocEvent, state_type="draft-iesg", state__slug="dead"
|
||||||
|
)
|
||||||
|
if dead_event is not None:
|
||||||
|
if d.docevent_set.filter(type="expired_document").exists():
|
||||||
|
closest_expiry = min(
|
||||||
|
[
|
||||||
|
abs(e.time - dead_event.time)
|
||||||
|
for e in d.docevent_set.filter(type="expired_document")
|
||||||
|
]
|
||||||
|
)
|
||||||
|
if closest_expiry.total_seconds() < 60:
|
||||||
|
d.set_state(id_exists)
|
||||||
|
events = []
|
||||||
|
e = DocEvent(
|
||||||
|
doc=d,
|
||||||
|
rev=d.rev,
|
||||||
|
type="added_comment",
|
||||||
|
by=by,
|
||||||
|
desc="IESG Dead state was set due only to document expiry - changing IESG state to ID-Exists",
|
||||||
|
)
|
||||||
|
e.skip_community_list_notification = True
|
||||||
|
e.save()
|
||||||
|
events.append(e)
|
||||||
|
e = new_state_change_event(d, by, dead, id_exists)
|
||||||
|
e.skip_community_list_notification = True
|
||||||
|
e.save()
|
||||||
|
events.append(e)
|
||||||
|
d.save_with_history(events)
|
||||||
|
|
|
@ -18,6 +18,7 @@ from .expire import (
|
||||||
in_draft_expire_freeze,
|
in_draft_expire_freeze,
|
||||||
get_expired_drafts,
|
get_expired_drafts,
|
||||||
expirable_drafts,
|
expirable_drafts,
|
||||||
|
repair_dead_on_expire,
|
||||||
send_expire_notice_for_draft,
|
send_expire_notice_for_draft,
|
||||||
expire_draft,
|
expire_draft,
|
||||||
clean_up_draft_files,
|
clean_up_draft_files,
|
||||||
|
@ -61,6 +62,11 @@ def expire_ids_task():
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task
|
||||||
|
def repair_dead_on_expire_task():
|
||||||
|
repair_dead_on_expire()
|
||||||
|
|
||||||
|
|
||||||
@shared_task
|
@shared_task
|
||||||
def notify_expirations_task(notify_days=14):
|
def notify_expirations_task(notify_days=14):
|
||||||
for doc in get_soon_to_expire_drafts(notify_days):
|
for doc in get_soon_to_expire_drafts(notify_days):
|
||||||
|
|
|
@ -19,10 +19,10 @@ from django.utils.html import escape
|
||||||
|
|
||||||
import debug # pyflakes:ignore
|
import debug # pyflakes:ignore
|
||||||
|
|
||||||
from ietf.doc.expire import get_expired_drafts, send_expire_notice_for_draft, expire_draft
|
from ietf.doc.expire import expirable_drafts, get_expired_drafts, repair_dead_on_expire, send_expire_notice_for_draft, expire_draft
|
||||||
from ietf.doc.factories import EditorialDraftFactory, IndividualDraftFactory, WgDraftFactory, RgDraftFactory, DocEventFactory
|
from ietf.doc.factories import EditorialDraftFactory, IndividualDraftFactory, StateDocEventFactory, WgDraftFactory, RgDraftFactory, DocEventFactory
|
||||||
from ietf.doc.models import ( Document, DocReminder, DocEvent,
|
from ietf.doc.models import ( Document, DocReminder, DocEvent,
|
||||||
ConsensusDocEvent, LastCallDocEvent, RelatedDocument, State, TelechatDocEvent,
|
ConsensusDocEvent, LastCallDocEvent, RelatedDocument, State, StateDocEvent, TelechatDocEvent,
|
||||||
WriteupDocEvent, DocRelationshipName, IanaExpertDocEvent )
|
WriteupDocEvent, DocRelationshipName, IanaExpertDocEvent )
|
||||||
from ietf.doc.utils import get_tags_for_stream_id, create_ballot_if_not_open
|
from ietf.doc.utils import get_tags_for_stream_id, create_ballot_if_not_open
|
||||||
from ietf.doc.views_draft import AdoptDraftForm
|
from ietf.doc.views_draft import AdoptDraftForm
|
||||||
|
@ -36,7 +36,7 @@ from ietf.iesg.models import TelechatDate
|
||||||
from ietf.utils.test_utils import login_testing_unauthorized
|
from ietf.utils.test_utils import login_testing_unauthorized
|
||||||
from ietf.utils.mail import outbox, empty_outbox, get_payload_text
|
from ietf.utils.mail import outbox, empty_outbox, get_payload_text
|
||||||
from ietf.utils.test_utils import TestCase
|
from ietf.utils.test_utils import TestCase
|
||||||
from ietf.utils.timezone import date_today, datetime_from_date, DEADLINE_TZINFO
|
from ietf.utils.timezone import date_today, datetime_today, datetime_from_date, DEADLINE_TZINFO
|
||||||
|
|
||||||
|
|
||||||
class ChangeStateTests(TestCase):
|
class ChangeStateTests(TestCase):
|
||||||
|
@ -763,13 +763,16 @@ class ExpireIDsTests(DraftFileMixin, TestCase):
|
||||||
txt = "%s-%s.txt" % (draft.name, draft.rev)
|
txt = "%s-%s.txt" % (draft.name, draft.rev)
|
||||||
self.write_draft_file(txt, 5000)
|
self.write_draft_file(txt, 5000)
|
||||||
|
|
||||||
|
self.assertFalse(expirable_drafts(Document.objects.filter(pk=draft.pk)).exists())
|
||||||
|
draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="idexists"))
|
||||||
|
self.assertTrue(expirable_drafts(Document.objects.filter(pk=draft.pk)).exists())
|
||||||
expire_draft(draft)
|
expire_draft(draft)
|
||||||
|
|
||||||
draft = Document.objects.get(name=draft.name)
|
draft = Document.objects.get(name=draft.name)
|
||||||
self.assertEqual(draft.get_state_slug(), "expired")
|
self.assertEqual(draft.get_state_slug(), "expired")
|
||||||
self.assertEqual(draft.get_state_slug("draft-iesg"), "dead")
|
self.assertEqual(draft.get_state_slug("draft-iesg"), "idexists")
|
||||||
self.assertTrue(draft.latest_event(type="expired_document"))
|
self.assertTrue(draft.latest_event(type="expired_document"))
|
||||||
self.assertCountEqual(draft.action_holders.all(), [])
|
self.assertEqual(draft.action_holders.count(), 0)
|
||||||
self.assertIn('Removed all action holders', draft.latest_event(type='changed_action_holders').desc)
|
self.assertIn('Removed all action holders', draft.latest_event(type='changed_action_holders').desc)
|
||||||
self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, txt)))
|
self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, txt)))
|
||||||
self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, txt)))
|
self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, txt)))
|
||||||
|
@ -842,6 +845,77 @@ class ExpireIDsTests(DraftFileMixin, TestCase):
|
||||||
self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, txt)))
|
self.assertTrue(not os.path.exists(os.path.join(settings.INTERNET_DRAFT_PATH, txt)))
|
||||||
self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, txt)))
|
self.assertTrue(os.path.exists(os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, txt)))
|
||||||
|
|
||||||
|
@mock.patch("ietf.community.signals.notify_of_event")
|
||||||
|
def test_repair_dead_on_expire(self, mock_notify):
|
||||||
|
|
||||||
|
# Create a draft in iesg idexists - ensure it doesn't get new docevents.
|
||||||
|
# Create a draft in iesg dead with no expires within the window - ensure it doesn't get new docevents and its state doesn't change.
|
||||||
|
# Create a draft in iesg dead with an expiry in the window - ensure it gets the right doc events, iesg state changes, draft state doesn't change.
|
||||||
|
last_year = datetime_today() - datetime.timedelta(days=365)
|
||||||
|
|
||||||
|
not_dead = WgDraftFactory(name="draft-not-dead")
|
||||||
|
not_dead_event_count = not_dead.docevent_set.count()
|
||||||
|
|
||||||
|
dead_not_from_expires = WgDraftFactory(name="draft-dead-not-from-expiring")
|
||||||
|
dead_not_from_expires.set_state(
|
||||||
|
State.objects.get(type="draft-iesg", slug="dead")
|
||||||
|
)
|
||||||
|
StateDocEventFactory(
|
||||||
|
doc=dead_not_from_expires, state=("draft-iesg", "dead"), time=last_year
|
||||||
|
)
|
||||||
|
DocEventFactory(
|
||||||
|
doc=dead_not_from_expires,
|
||||||
|
type="expired_document",
|
||||||
|
time=last_year + datetime.timedelta(days=1),
|
||||||
|
)
|
||||||
|
dead_not_from_expires_event_count = dead_not_from_expires.docevent_set.count()
|
||||||
|
|
||||||
|
dead_from_expires = []
|
||||||
|
dead_from_expires_event_count = dict()
|
||||||
|
for delta in [-5, 5]:
|
||||||
|
d = WgDraftFactory(
|
||||||
|
name=f"draft-dead-from-expiring-just-{'before' if delta<0 else 'after'}"
|
||||||
|
)
|
||||||
|
d.set_state(State.objects.get(type="draft-iesg", slug="dead"))
|
||||||
|
StateDocEventFactory(doc=d, state=("draft-iesg", "dead"), time=last_year)
|
||||||
|
DocEventFactory(
|
||||||
|
doc=d,
|
||||||
|
type="expired_document",
|
||||||
|
time=last_year + datetime.timedelta(seconds=delta),
|
||||||
|
)
|
||||||
|
dead_from_expires.append(d)
|
||||||
|
dead_from_expires_event_count[d] = d.docevent_set.count()
|
||||||
|
|
||||||
|
notified_during_factory_work = mock_notify.call_count
|
||||||
|
for call_args in mock_notify.call_args_list:
|
||||||
|
e = call_args.args[0]
|
||||||
|
self.assertTrue(isinstance(e,DocEvent))
|
||||||
|
self.assertFalse(hasattr(e,"skip_community_list_notification"))
|
||||||
|
|
||||||
|
repair_dead_on_expire()
|
||||||
|
|
||||||
|
self.assertEqual(not_dead.docevent_set.count(), not_dead_event_count)
|
||||||
|
self.assertEqual(
|
||||||
|
dead_not_from_expires.docevent_set.count(),
|
||||||
|
dead_not_from_expires_event_count,
|
||||||
|
)
|
||||||
|
for d in dead_from_expires:
|
||||||
|
self.assertEqual(
|
||||||
|
d.docevent_set.count(), dead_from_expires_event_count[d] + 2
|
||||||
|
)
|
||||||
|
self.assertIn(
|
||||||
|
"due only to document expiry", d.latest_event(type="added_comment").desc
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
d.latest_event(StateDocEvent).desc,
|
||||||
|
"IESG state changed to <b>I-D Exists</b> from Dead",
|
||||||
|
)
|
||||||
|
self.assertEqual(mock_notify.call_count, 4+notified_during_factory_work)
|
||||||
|
for call_args in mock_notify.call_args_list[-4:]:
|
||||||
|
e = call_args.args[0]
|
||||||
|
self.assertTrue(isinstance(e,DocEvent))
|
||||||
|
self.assertTrue(hasattr(e,"skip_community_list_notification"))
|
||||||
|
self.assertTrue(e.skip_community_list_notification)
|
||||||
|
|
||||||
class ExpireLastCallTests(TestCase):
|
class ExpireLastCallTests(TestCase):
|
||||||
def test_expire_last_call(self):
|
def test_expire_last_call(self):
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
# Copyright The IETF Trust 2024, All Rights Reserved
|
# Copyright The IETF Trust 2024, All Rights Reserved
|
||||||
|
|
||||||
|
import debug # pyflakes:ignore
|
||||||
import datetime
|
import datetime
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
|
@ -19,6 +21,7 @@ from .tasks import (
|
||||||
generate_idnits2_rfcs_obsoleted_task,
|
generate_idnits2_rfcs_obsoleted_task,
|
||||||
generate_idnits2_rfc_status_task,
|
generate_idnits2_rfc_status_task,
|
||||||
notify_expirations_task,
|
notify_expirations_task,
|
||||||
|
repair_dead_on_expire_task,
|
||||||
)
|
)
|
||||||
|
|
||||||
class TaskTests(TestCase):
|
class TaskTests(TestCase):
|
||||||
|
@ -96,6 +99,10 @@ class TaskTests(TestCase):
|
||||||
self.assertEqual(mock_expire.call_args_list[1], mock.call(docs[1]))
|
self.assertEqual(mock_expire.call_args_list[1], mock.call(docs[1]))
|
||||||
self.assertEqual(mock_expire.call_args_list[2], mock.call(docs[2]))
|
self.assertEqual(mock_expire.call_args_list[2], mock.call(docs[2]))
|
||||||
|
|
||||||
|
@mock.patch("ietf.doc.tasks.repair_dead_on_expire")
|
||||||
|
def test_repair_dead_on_expire_task(self, mock_repair):
|
||||||
|
repair_dead_on_expire_task()
|
||||||
|
self.assertEqual(mock_repair.call_count, 1)
|
||||||
|
|
||||||
class Idnits2SupportTests(TestCase):
|
class Idnits2SupportTests(TestCase):
|
||||||
settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['DERIVED_DIR']
|
settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['DERIVED_DIR']
|
||||||
|
|
|
@ -491,8 +491,9 @@ def update_action_holders(doc, prev_state=None, new_state=None, prev_tags=None,
|
||||||
|
|
||||||
Returns an event describing the change which should be passed to doc.save_with_history()
|
Returns an event describing the change which should be passed to doc.save_with_history()
|
||||||
|
|
||||||
Only cares about draft-iesg state changes. Places where other state types are updated
|
Only cares about draft-iesg state changes and draft expiration.
|
||||||
may not call this method. If you add rules for updating action holders on other state
|
Places where other state types are updated may not call this method.
|
||||||
|
If you add rules for updating action holders on other state
|
||||||
types, be sure this is called in the places that change that state.
|
types, be sure this is called in the places that change that state.
|
||||||
"""
|
"""
|
||||||
# Should not call this with different state types
|
# Should not call this with different state types
|
||||||
|
@ -511,41 +512,50 @@ def update_action_holders(doc, prev_state=None, new_state=None, prev_tags=None,
|
||||||
|
|
||||||
# Remember original list of action holders to later check if it changed
|
# Remember original list of action holders to later check if it changed
|
||||||
prev_set = list(doc.action_holders.all())
|
prev_set = list(doc.action_holders.all())
|
||||||
|
|
||||||
# Update the action holders. To get this right for people with more
|
if new_state and new_state.type_id=="draft" and new_state.slug=="expired":
|
||||||
# than one relationship to the document, do removals first, then adds.
|
|
||||||
# Remove outdated action holders
|
|
||||||
iesg_state_changed = (prev_state != new_state) and (getattr(new_state, "type_id", None) == "draft-iesg")
|
|
||||||
if iesg_state_changed:
|
|
||||||
# Clear the action_holders list on a state change. This will reset the age of any that get added back.
|
|
||||||
doc.action_holders.clear()
|
doc.action_holders.clear()
|
||||||
if tags.removed("need-rev"):
|
return add_action_holder_change_event(
|
||||||
# Removed the 'need-rev' tag - drop authors from the action holders list
|
doc,
|
||||||
DocumentActionHolder.objects.filter(document=doc, person__in=doc.authors()).delete()
|
Person.objects.get(name='(System)'),
|
||||||
elif tags.added("need-rev"):
|
prev_set,
|
||||||
# Remove the AD if we're asking for a new revision
|
reason='draft expired',
|
||||||
DocumentActionHolder.objects.filter(document=doc, person=doc.ad).delete()
|
)
|
||||||
|
else:
|
||||||
|
# Update the action holders. To get this right for people with more
|
||||||
|
# than one relationship to the document, do removals first, then adds.
|
||||||
|
# Remove outdated action holders
|
||||||
|
iesg_state_changed = (prev_state != new_state) and (getattr(new_state, "type_id", None) == "draft-iesg")
|
||||||
|
if iesg_state_changed:
|
||||||
|
# Clear the action_holders list on a state change. This will reset the age of any that get added back.
|
||||||
|
doc.action_holders.clear()
|
||||||
|
if tags.removed("need-rev"):
|
||||||
|
# Removed the 'need-rev' tag - drop authors from the action holders list
|
||||||
|
DocumentActionHolder.objects.filter(document=doc, person__in=doc.authors()).delete()
|
||||||
|
elif tags.added("need-rev"):
|
||||||
|
# Remove the AD if we're asking for a new revision
|
||||||
|
DocumentActionHolder.objects.filter(document=doc, person=doc.ad).delete()
|
||||||
|
|
||||||
# Add new action holders
|
# Add new action holders
|
||||||
if doc.ad:
|
if doc.ad:
|
||||||
# AD is an action holder unless specified otherwise for the new state
|
# AD is an action holder unless specified otherwise for the new state
|
||||||
if iesg_state_changed and new_state.slug not in DocumentActionHolder.CLEAR_ACTION_HOLDERS_STATES:
|
if iesg_state_changed and new_state.slug not in DocumentActionHolder.CLEAR_ACTION_HOLDERS_STATES:
|
||||||
doc.action_holders.add(doc.ad)
|
doc.action_holders.add(doc.ad)
|
||||||
# If AD follow-up is needed, make sure they are an action holder
|
# If AD follow-up is needed, make sure they are an action holder
|
||||||
if tags.added("ad-f-up"):
|
if tags.added("ad-f-up"):
|
||||||
doc.action_holders.add(doc.ad)
|
doc.action_holders.add(doc.ad)
|
||||||
# Authors get the action if a revision is needed
|
# Authors get the action if a revision is needed
|
||||||
if tags.added("need-rev"):
|
if tags.added("need-rev"):
|
||||||
for auth in doc.authors():
|
for auth in doc.authors():
|
||||||
doc.action_holders.add(auth)
|
doc.action_holders.add(auth)
|
||||||
|
|
||||||
# Now create an event if we changed the set
|
# Now create an event if we changed the set
|
||||||
return add_action_holder_change_event(
|
return add_action_holder_change_event(
|
||||||
doc,
|
doc,
|
||||||
Person.objects.get(name='(System)'),
|
Person.objects.get(name='(System)'),
|
||||||
prev_set,
|
prev_set,
|
||||||
reason='IESG state changed',
|
reason='IESG state changed',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def update_documentauthors(doc, new_docauthors, by=None, basis=None):
|
def update_documentauthors(doc, new_docauthors, by=None, basis=None):
|
||||||
|
|
|
@ -95,7 +95,8 @@ def change_state(request, name):
|
||||||
and logging the change as a comment."""
|
and logging the change as a comment."""
|
||||||
doc = get_object_or_404(Document, name=name)
|
doc = get_object_or_404(Document, name=name)
|
||||||
|
|
||||||
if (not doc.latest_event(type="started_iesg_process")) or doc.get_state_slug() == "expired":
|
# Steer ADs towards "Begin IESG Processing"
|
||||||
|
if doc.get_state_slug("draft-iesg")=="idexists" and not has_role(request.user,"Secretariat"):
|
||||||
raise Http404
|
raise Http404
|
||||||
|
|
||||||
login = request.user.person
|
login = request.user.person
|
||||||
|
|
|
@ -750,7 +750,6 @@ CONFLICT_REVIEW_PATH = '/a/ietfdata/doc/conflict-review'
|
||||||
STATUS_CHANGE_PATH = '/a/ietfdata/doc/status-change'
|
STATUS_CHANGE_PATH = '/a/ietfdata/doc/status-change'
|
||||||
AGENDA_PATH = '/a/www/www6s/proceedings/'
|
AGENDA_PATH = '/a/www/www6s/proceedings/'
|
||||||
MEETINGHOST_LOGO_PATH = AGENDA_PATH # put these in the same place as other proceedings files
|
MEETINGHOST_LOGO_PATH = AGENDA_PATH # put these in the same place as other proceedings files
|
||||||
IPR_DOCUMENT_PATH = '/a/www/ietf-ftp/ietf/IPR/'
|
|
||||||
# Move drafts to this directory when they expire
|
# Move drafts to this directory when they expire
|
||||||
INTERNET_DRAFT_ARCHIVE_DIR = '/a/ietfdata/doc/draft/collection/draft-archive/'
|
INTERNET_DRAFT_ARCHIVE_DIR = '/a/ietfdata/doc/draft/collection/draft-archive/'
|
||||||
# The following directory contains copies of all drafts - it used to be
|
# The following directory contains copies of all drafts - it used to be
|
||||||
|
|
|
@ -236,9 +236,13 @@ th,
|
||||||
|
|
||||||
// Helper to constrain the size of the main logo
|
// Helper to constrain the size of the main logo
|
||||||
.ietflogo {
|
.ietflogo {
|
||||||
width: 75%;
|
width: 100%;
|
||||||
max-width: 300px;
|
max-width: 300px;
|
||||||
}
|
}
|
||||||
|
.ietflogo > img {
|
||||||
|
min-width: 100px;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
// Make revision numbers pagination items fixed-width
|
// Make revision numbers pagination items fixed-width
|
||||||
.revision-list {
|
.revision-list {
|
||||||
|
|
|
@ -770,70 +770,6 @@ def save_files(form):
|
||||||
log.log("saved file %s" % name)
|
log.log("saved file %s" % name)
|
||||||
return file_name
|
return file_name
|
||||||
|
|
||||||
def get_draft_meta(form, saved_files):
|
|
||||||
authors = []
|
|
||||||
file_name = saved_files
|
|
||||||
|
|
||||||
if form.cleaned_data['xml']:
|
|
||||||
# Some meta-information, such as the page-count, can only
|
|
||||||
# be retrieved from the generated text file. Provide a
|
|
||||||
# parsed draft object to get at that kind of information.
|
|
||||||
file_name['txt'] = os.path.join(settings.IDSUBMIT_STAGING_PATH, '%s-%s.txt' % (form.filename, form.revision))
|
|
||||||
file_size = os.stat(file_name['txt']).st_size
|
|
||||||
with io.open(file_name['txt']) as txt_file:
|
|
||||||
form.parsed_draft = PlaintextDraft(txt_file.read(), txt_file.name)
|
|
||||||
else:
|
|
||||||
file_size = form.cleaned_data['txt'].size
|
|
||||||
|
|
||||||
if form.authors:
|
|
||||||
authors = form.authors
|
|
||||||
else:
|
|
||||||
# If we don't have an xml file, try to extract the
|
|
||||||
# relevant information from the text file
|
|
||||||
for author in form.parsed_draft.get_author_list():
|
|
||||||
full_name, first_name, middle_initial, last_name, name_suffix, email, country, company = author
|
|
||||||
|
|
||||||
name = full_name.replace("\n", "").replace("\r", "").replace("<", "").replace(">", "").strip()
|
|
||||||
|
|
||||||
if email:
|
|
||||||
try:
|
|
||||||
validate_email(email)
|
|
||||||
except ValidationError:
|
|
||||||
email = ""
|
|
||||||
|
|
||||||
def turn_into_unicode(s):
|
|
||||||
if s is None:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
if isinstance(s, str):
|
|
||||||
return s
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
return s.decode("utf-8")
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
try:
|
|
||||||
return s.decode("latin-1")
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
name = turn_into_unicode(name)
|
|
||||||
email = turn_into_unicode(email)
|
|
||||||
company = turn_into_unicode(company)
|
|
||||||
|
|
||||||
authors.append({
|
|
||||||
"name": name,
|
|
||||||
"email": email,
|
|
||||||
"affiliation": company,
|
|
||||||
"country": country
|
|
||||||
})
|
|
||||||
|
|
||||||
if form.abstract:
|
|
||||||
abstract = form.abstract
|
|
||||||
else:
|
|
||||||
abstract = form.parsed_draft.get_abstract()
|
|
||||||
|
|
||||||
return authors, abstract, file_name, file_size
|
|
||||||
|
|
||||||
|
|
||||||
def get_submission(form):
|
def get_submission(form):
|
||||||
# See if there is a Submission in state waiting-for-draft
|
# See if there is a Submission in state waiting-for-draft
|
||||||
|
@ -1272,8 +1208,7 @@ def process_submission_xml(filename, revision):
|
||||||
def _turn_into_unicode(s: Optional[Union[str, bytes]]):
|
def _turn_into_unicode(s: Optional[Union[str, bytes]]):
|
||||||
"""Decode a possibly null string-like item as a string
|
"""Decode a possibly null string-like item as a string
|
||||||
|
|
||||||
Copied from ietf.submit.utils.get_draft_meta(), would be nice to
|
Would be nice to ditch this.
|
||||||
ditch this.
|
|
||||||
"""
|
"""
|
||||||
if s is None:
|
if s is None:
|
||||||
return ""
|
return ""
|
||||||
|
@ -1317,7 +1252,7 @@ def process_submission_text(filename, revision):
|
||||||
if title:
|
if title:
|
||||||
title = _normalize_title(title)
|
title = _normalize_title(title)
|
||||||
|
|
||||||
# Drops \r, \n, <, >. Based on get_draft_meta() behavior
|
# Translation taable drops \r, \n, <, >.
|
||||||
trans_table = str.maketrans("", "", "\r\n<>")
|
trans_table = str.maketrans("", "", "\r\n<>")
|
||||||
authors = [
|
authors = [
|
||||||
{
|
{
|
||||||
|
|
|
@ -101,7 +101,7 @@
|
||||||
<div id="app"></div>
|
<div id="app"></div>
|
||||||
<div id="app-loading">
|
<div id="app-loading">
|
||||||
<div id="app-loading-footer">
|
<div id="app-loading-footer">
|
||||||
<a class="btn btn-light text-body-secondary mb-3" href="/meeting/{{ meetingData.meetingNumber }}/agenda.txt"><small>Switch to text-only version ⮞</small></a>
|
<a class="btn btn-light text-body-secondary mb-3" href="/meeting/{{ meetingData.meetingNumber }}/agenda.txt"><small>Switch to text-only version »</small></a>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
|
@ -23,7 +23,7 @@ import django.core.management.commands.loaddata as loaddata
|
||||||
|
|
||||||
import debug # pyflakes:ignore
|
import debug # pyflakes:ignore
|
||||||
|
|
||||||
from ietf.community.models import notify_events
|
from ietf.community.signals import notify_of_events_receiver
|
||||||
|
|
||||||
class Command(loaddata.Command):
|
class Command(loaddata.Command):
|
||||||
help = ("""
|
help = ("""
|
||||||
|
@ -62,7 +62,7 @@ class Command(loaddata.Command):
|
||||||
#
|
#
|
||||||
self.serialization_formats = serializers.get_public_serializer_formats()
|
self.serialization_formats = serializers.get_public_serializer_formats()
|
||||||
#
|
#
|
||||||
post_save.disconnect(notify_events)
|
post_save.disconnect(notify_of_events_receiver())
|
||||||
#
|
#
|
||||||
connection = connections[self.using]
|
connection = connections[self.using]
|
||||||
self.fixture_count = 0
|
self.fixture_count = 0
|
||||||
|
|
|
@ -286,10 +286,17 @@ test.describe('past - desktop', () => {
|
||||||
// No meeting materials yet warning badge
|
// No meeting materials yet warning badge
|
||||||
await expect(eventButtons.locator('.no-meeting-materials')).toBeVisible()
|
await expect(eventButtons.locator('.no-meeting-materials')).toBeVisible()
|
||||||
}
|
}
|
||||||
// Notepad button
|
if (event.name.toLowerCase().includes('hackathon')) {
|
||||||
const hedgeDocLink = `https://notes.ietf.org/notes-ietf-${meetingData.meeting.number}-${event.type === 'plenary' ? 'plenary' : event.acronym}`
|
// Hackathon Wiki button
|
||||||
await expect(eventButtons.locator(`#btn-lnk-${event.id}-note`)).toHaveAttribute('href', hedgeDocLink)
|
const hackathonWikiLink = `https://wiki.ietf.org/meeting/${meetingData.meeting.number}/hackathon`
|
||||||
await expect(eventButtons.locator(`#btn-lnk-${event.id}-note > i.bi`)).toBeVisible()
|
await expect(eventButtons.locator(`#btn-lnk-${event.id}-wiki`)).toHaveAttribute('href', hackathonWikiLink)
|
||||||
|
await expect(eventButtons.locator(`#btn-lnk-${event.id}-wiki > i.bi`)).toBeVisible()
|
||||||
|
} else {
|
||||||
|
// Notepad button
|
||||||
|
const hedgeDocLink = `https://notes.ietf.org/notes-ietf-${meetingData.meeting.number}-${event.type === 'plenary' ? 'plenary' : event.acronym}`
|
||||||
|
await expect(eventButtons.locator(`#btn-lnk-${event.id}-note`)).toHaveAttribute('href', hedgeDocLink)
|
||||||
|
await expect(eventButtons.locator(`#btn-lnk-${event.id}-note > i.bi`)).toBeVisible()
|
||||||
|
}
|
||||||
// Chat logs
|
// Chat logs
|
||||||
await expect(eventButtons.locator(`#btn-lnk-${event.id}-logs`)).toHaveAttribute('href', event.links.chatArchive)
|
await expect(eventButtons.locator(`#btn-lnk-${event.id}-logs`)).toHaveAttribute('href', event.links.chatArchive)
|
||||||
await expect(eventButtons.locator(`#btn-lnk-${event.id}-logs > i.bi`)).toBeVisible()
|
await expect(eventButtons.locator(`#btn-lnk-${event.id}-logs > i.bi`)).toBeVisible()
|
||||||
|
@ -1162,10 +1169,17 @@ test.describe('future - desktop', () => {
|
||||||
// No meeting materials yet warning badge
|
// No meeting materials yet warning badge
|
||||||
await expect(eventButtons.locator('.no-meeting-materials')).toBeVisible()
|
await expect(eventButtons.locator('.no-meeting-materials')).toBeVisible()
|
||||||
}
|
}
|
||||||
// Notepad button
|
if (event.name.toLowerCase().includes('hackathon')) {
|
||||||
const hedgeDocLink = `https://notes.ietf.org/notes-ietf-${meetingData.meeting.number}-${event.type === 'plenary' ? 'plenary' : event.acronym}`
|
// Hackathon Wiki button
|
||||||
await expect(eventButtons.locator(`#btn-lnk-${event.id}-note`)).toHaveAttribute('href', hedgeDocLink)
|
const hackathonWikiLink = `https://wiki.ietf.org/meeting/${meetingData.meeting.number}/hackathon`
|
||||||
await expect(eventButtons.locator(`#btn-lnk-${event.id}-note > i.bi`)).toBeVisible()
|
await expect(eventButtons.locator(`#btn-lnk-${event.id}-wiki`)).toHaveAttribute('href', hackathonWikiLink)
|
||||||
|
await expect(eventButtons.locator(`#btn-lnk-${event.id}-wiki > i.bi`)).toBeVisible()
|
||||||
|
} else {
|
||||||
|
// Notepad button
|
||||||
|
const hedgeDocLink = `https://notes.ietf.org/notes-ietf-${meetingData.meeting.number}-${event.type === 'plenary' ? 'plenary' : event.acronym}`
|
||||||
|
await expect(eventButtons.locator(`#btn-lnk-${event.id}-note`)).toHaveAttribute('href', hedgeDocLink)
|
||||||
|
await expect(eventButtons.locator(`#btn-lnk-${event.id}-note > i.bi`)).toBeVisible()
|
||||||
|
}
|
||||||
// Chat room
|
// Chat room
|
||||||
await expect(eventButtons.locator(`#btn-lnk-${event.id}-room`)).toHaveAttribute('href', event.links.chat)
|
await expect(eventButtons.locator(`#btn-lnk-${event.id}-room`)).toHaveAttribute('href', event.links.chat)
|
||||||
await expect(eventButtons.locator(`#btn-lnk-${event.id}-room > i.bi`)).toBeVisible()
|
await expect(eventButtons.locator(`#btn-lnk-${event.id}-room > i.bi`)).toBeVisible()
|
||||||
|
|
|
@ -5,6 +5,5 @@ SERVER_EMAIL = 'Django IETFdb Test Server<django-test@tools.ietf.org>'
|
||||||
|
|
||||||
SERVER_MODE = 'test'
|
SERVER_MODE = 'test'
|
||||||
|
|
||||||
IPR_DOCUMENT_PATH = '/home/ietf/adm/IPR/'
|
|
||||||
|
|
||||||
SITE_ID = 1
|
SITE_ID = 1
|
||||||
|
|
Loading…
Reference in a new issue