feat: Import IESG artifacts into the datatracker (#6908)
* chore: remove unused setting * feat: initial import of iesg minutes * fix: let the meetings view show older iesg meetings * feat: iesg narrative minutes * feat: import bof coordination call minutes * wip: import commands for iesg appeals and statements * feat: import iesg statements. * feat: import iesg artifacts * feat: many fewer n+1 queries for the group meetings view * fix: restore chain of elifs in views_doc * fix: use self.stdout.write vs print in mgmt commands * fix: use replace instead of astimezone when appropriate * chore: refactor new migrations into one * fix: transcode some old files into utf8 * fix: repair overzealous replace * chore: black * fix: address minro review comments * fix: actually capture transcoding work * fix: handle multiple iesg statements on the same day * fix: better titles * feat: pill badge replaced statements * fix: consolodate source repos to one * feat: liberal markdown for secretariat controlled content * fix: handle (and clean) html narrative minutes * feat: scrub harder * fix: simplify and improve a scrubber * chore: reorder migrations
This commit is contained in:
parent
5fc0f6926b
commit
8cb7f3dcae
|
@ -368,7 +368,7 @@ class CustomApiTests(TestCase):
|
|||
r = self.client.post(url,{'apikey':apikey.hash(),'apidata': f'{{"session_id":{session.pk}, "{type_id}":{content}}}'})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
newdoc = session.sessionpresentation_set.get(document__type_id=type_id).document
|
||||
newdoc = session.presentations.get(document__type_id=type_id).document
|
||||
newdoccontent = get_unicode_document_content(newdoc.name, Path(session.meeting.get_materials_path()) / type_id / newdoc.uploaded_filename)
|
||||
self.assertEqual(json.loads(content), json.loads(newdoccontent))
|
||||
|
||||
|
@ -454,7 +454,7 @@ class CustomApiTests(TestCase):
|
|||
'item': '1', 'bluesheet': bluesheet, })
|
||||
self.assertContains(r, "Done", status_code=200)
|
||||
|
||||
bluesheet = session.sessionpresentation_set.filter(document__type__slug='bluesheets').first().document
|
||||
bluesheet = session.presentations.filter(document__type__slug='bluesheets').first().document
|
||||
# We've submitted an update; check that the rev is right
|
||||
self.assertEqual(bluesheet.rev, '01')
|
||||
# Check the content
|
||||
|
@ -569,7 +569,7 @@ class CustomApiTests(TestCase):
|
|||
self.assertContains(r, "Done", status_code=200)
|
||||
|
||||
bluesheet = (
|
||||
session.sessionpresentation_set.filter(document__type__slug="bluesheets")
|
||||
session.presentations.filter(document__type__slug="bluesheets")
|
||||
.first()
|
||||
.document
|
||||
)
|
||||
|
|
39
ietf/doc/migrations/0021_narrativeminutes.py
Normal file
39
ietf/doc/migrations/0021_narrativeminutes.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
# Copyright The IETF Trust 2023, All Rights Reserved
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def forward(apps, schema_editor):
|
||||
StateType = apps.get_model("doc", "StateType")
|
||||
State = apps.get_model("doc", "State")
|
||||
|
||||
StateType.objects.create(
|
||||
slug="narrativeminutes",
|
||||
label="State",
|
||||
)
|
||||
for order, slug in enumerate(["active", "deleted"]):
|
||||
State.objects.create(
|
||||
slug=slug,
|
||||
type_id="narrativeminutes",
|
||||
name=slug.capitalize(),
|
||||
order=order,
|
||||
desc="",
|
||||
used=True,
|
||||
)
|
||||
|
||||
|
||||
def reverse(apps, schema_editor):
|
||||
StateType = apps.get_model("doc", "StateType")
|
||||
State = apps.get_model("doc", "State")
|
||||
|
||||
State.objects.filter(type_id="narrativeminutes").delete()
|
||||
StateType.objects.filter(slug="narrativeminutes").delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("doc", "0020_move_errata_tags"),
|
||||
("name", "0013_narrativeminutes"),
|
||||
]
|
||||
|
||||
operations = [migrations.RunPython(forward, reverse)]
|
|
@ -148,7 +148,7 @@ class DocumentInfo(models.Model):
|
|||
else:
|
||||
self._cached_file_path = settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR
|
||||
elif self.meeting_related() and self.type_id in (
|
||||
"agenda", "minutes", "slides", "bluesheets", "procmaterials", "chatlog", "polls"
|
||||
"agenda", "minutes", "narrativeminutes", "slides", "bluesheets", "procmaterials", "chatlog", "polls"
|
||||
):
|
||||
meeting = self.get_related_meeting()
|
||||
if meeting is not None:
|
||||
|
@ -438,7 +438,7 @@ class DocumentInfo(models.Model):
|
|||
return e != None and (e.text != "")
|
||||
|
||||
def meeting_related(self):
|
||||
if self.type_id in ("agenda","minutes","bluesheets","slides","recording","procmaterials","chatlog","polls"):
|
||||
if self.type_id in ("agenda","minutes", "narrativeminutes", "bluesheets","slides","recording","procmaterials","chatlog","polls"):
|
||||
return self.type_id != "slides" or self.get_state_slug('reuse_policy')=='single'
|
||||
return False
|
||||
|
||||
|
@ -1028,7 +1028,7 @@ class Document(DocumentInfo):
|
|||
def future_presentations(self):
|
||||
""" returns related SessionPresentation objects for meetings that
|
||||
have not yet ended. This implementation allows for 2 week meetings """
|
||||
candidate_presentations = self.sessionpresentation_set.filter(
|
||||
candidate_presentations = self.presentations.filter(
|
||||
session__meeting__date__gte=date_today() - datetime.timedelta(days=15)
|
||||
)
|
||||
return sorted(
|
||||
|
@ -1041,11 +1041,11 @@ class Document(DocumentInfo):
|
|||
""" returns related SessionPresentation objects for the most recent meeting in the past"""
|
||||
# Assumes no two meetings have the same start date - if the assumption is violated, one will be chosen arbitrarily
|
||||
today = date_today()
|
||||
candidate_presentations = self.sessionpresentation_set.filter(session__meeting__date__lte=today)
|
||||
candidate_presentations = self.presentations.filter(session__meeting__date__lte=today)
|
||||
candidate_meetings = set([p.session.meeting for p in candidate_presentations if p.session.meeting.end_date()<today])
|
||||
if candidate_meetings:
|
||||
mtg = sorted(list(candidate_meetings),key=lambda x:x.date,reverse=True)[0]
|
||||
return self.sessionpresentation_set.filter(session__meeting=mtg)
|
||||
return self.presentations.filter(session__meeting=mtg)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
|
|
@ -2529,8 +2529,8 @@ class DocumentMeetingTests(TestCase):
|
|||
|
||||
def test_view_document_meetings(self):
|
||||
doc = IndividualDraftFactory.create()
|
||||
doc.sessionpresentation_set.create(session=self.inprog,rev=None)
|
||||
doc.sessionpresentation_set.create(session=self.interim,rev=None)
|
||||
doc.presentations.create(session=self.inprog,rev=None)
|
||||
doc.presentations.create(session=self.interim,rev=None)
|
||||
|
||||
url = urlreverse('ietf.doc.views_doc.all_presentations', kwargs=dict(name=doc.name))
|
||||
response = self.client.get(url)
|
||||
|
@ -2541,8 +2541,8 @@ class DocumentMeetingTests(TestCase):
|
|||
self.assertFalse(q('#addsessionsbutton'))
|
||||
self.assertFalse(q("a.btn:contains('Remove document')"))
|
||||
|
||||
doc.sessionpresentation_set.create(session=self.past_cutoff,rev=None)
|
||||
doc.sessionpresentation_set.create(session=self.past,rev=None)
|
||||
doc.presentations.create(session=self.past_cutoff,rev=None)
|
||||
doc.presentations.create(session=self.past,rev=None)
|
||||
|
||||
self.client.login(username="secretary", password="secretary+password")
|
||||
response = self.client.get(url)
|
||||
|
@ -2577,7 +2577,7 @@ class DocumentMeetingTests(TestCase):
|
|||
|
||||
def test_edit_document_session(self):
|
||||
doc = IndividualDraftFactory.create()
|
||||
sp = doc.sessionpresentation_set.create(session=self.future,rev=None)
|
||||
sp = doc.presentations.create(session=self.future,rev=None)
|
||||
|
||||
url = urlreverse('ietf.doc.views_doc.edit_sessionpresentation',kwargs=dict(name='no-such-doc',session_id=sp.session_id))
|
||||
response = self.client.get(url)
|
||||
|
@ -2604,12 +2604,12 @@ class DocumentMeetingTests(TestCase):
|
|||
self.assertEqual(1,doc.docevent_set.count())
|
||||
response = self.client.post(url,{'version':'00','save':''})
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertEqual(doc.sessionpresentation_set.get(pk=sp.pk).rev,'00')
|
||||
self.assertEqual(doc.presentations.get(pk=sp.pk).rev,'00')
|
||||
self.assertEqual(2,doc.docevent_set.count())
|
||||
|
||||
def test_edit_document_session_after_proceedings_closed(self):
|
||||
doc = IndividualDraftFactory.create()
|
||||
sp = doc.sessionpresentation_set.create(session=self.past_cutoff,rev=None)
|
||||
sp = doc.presentations.create(session=self.past_cutoff,rev=None)
|
||||
|
||||
url = urlreverse('ietf.doc.views_doc.edit_sessionpresentation',kwargs=dict(name=doc.name,session_id=sp.session_id))
|
||||
self.client.login(username=self.group_chair.user.username,password='%s+password'%self.group_chair.user.username)
|
||||
|
@ -2624,7 +2624,7 @@ class DocumentMeetingTests(TestCase):
|
|||
|
||||
def test_remove_document_session(self):
|
||||
doc = IndividualDraftFactory.create()
|
||||
sp = doc.sessionpresentation_set.create(session=self.future,rev=None)
|
||||
sp = doc.presentations.create(session=self.future,rev=None)
|
||||
|
||||
url = urlreverse('ietf.doc.views_doc.remove_sessionpresentation',kwargs=dict(name='no-such-doc',session_id=sp.session_id))
|
||||
response = self.client.get(url)
|
||||
|
@ -2649,12 +2649,12 @@ class DocumentMeetingTests(TestCase):
|
|||
self.assertEqual(1,doc.docevent_set.count())
|
||||
response = self.client.post(url,{'remove_session':''})
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertFalse(doc.sessionpresentation_set.filter(pk=sp.pk).exists())
|
||||
self.assertFalse(doc.presentations.filter(pk=sp.pk).exists())
|
||||
self.assertEqual(2,doc.docevent_set.count())
|
||||
|
||||
def test_remove_document_session_after_proceedings_closed(self):
|
||||
doc = IndividualDraftFactory.create()
|
||||
sp = doc.sessionpresentation_set.create(session=self.past_cutoff,rev=None)
|
||||
sp = doc.presentations.create(session=self.past_cutoff,rev=None)
|
||||
|
||||
url = urlreverse('ietf.doc.views_doc.remove_sessionpresentation',kwargs=dict(name=doc.name,session_id=sp.session_id))
|
||||
self.client.login(username=self.group_chair.user.username,password='%s+password'%self.group_chair.user.username)
|
||||
|
|
|
@ -832,7 +832,7 @@ def document_main(request, name, rev=None, document_html=False):
|
|||
sorted_relations=sorted_relations,
|
||||
))
|
||||
|
||||
elif doc.type_id in ("slides", "agenda", "minutes", "bluesheets", "procmaterials",):
|
||||
elif doc.type_id in ("slides", "agenda", "minutes", "narrativeminutes", "bluesheets", "procmaterials",):
|
||||
can_manage_material = can_manage_materials(request.user, doc.group)
|
||||
presentations = doc.future_presentations()
|
||||
if doc.uploaded_filename:
|
||||
|
@ -916,9 +916,9 @@ def document_main(request, name, rev=None, document_html=False):
|
|||
|
||||
elif doc.type_id in ("chatlog", "polls"):
|
||||
if isinstance(doc,DocHistory):
|
||||
session = doc.doc.sessionpresentation_set.last().session
|
||||
session = doc.doc.presentations.last().session
|
||||
else:
|
||||
session = doc.sessionpresentation_set.last().session
|
||||
session = doc.presentations.last().session
|
||||
pathname = Path(session.meeting.get_materials_path()) / doc.type_id / doc.uploaded_filename
|
||||
content = get_unicode_document_content(doc.name, str(pathname))
|
||||
return render(
|
||||
|
@ -943,7 +943,7 @@ def document_main(request, name, rev=None, document_html=False):
|
|||
variants = set([match.name.split(".")[1] for match in Path(doc.get_file_path()).glob(f"{basename}.*")])
|
||||
inlineable = any([ext in variants for ext in ["md", "txt"]])
|
||||
if inlineable:
|
||||
content = markdown.markdown(doc.text_or_error())
|
||||
content = markdown.liberal_markdown(doc.text_or_error())
|
||||
else:
|
||||
content = "No format available to display inline"
|
||||
if "pdf" in variants:
|
||||
|
@ -2057,7 +2057,7 @@ class VersionForm(forms.Form):
|
|||
|
||||
def edit_sessionpresentation(request,name,session_id):
|
||||
doc = get_object_or_404(Document, name=name)
|
||||
sp = get_object_or_404(doc.sessionpresentation_set, session_id=session_id)
|
||||
sp = get_object_or_404(doc.presentations, session_id=session_id)
|
||||
|
||||
if not sp.session.can_manage_materials(request.user):
|
||||
raise Http404
|
||||
|
@ -2074,7 +2074,7 @@ def edit_sessionpresentation(request,name,session_id):
|
|||
if form.is_valid():
|
||||
new_selection = form.cleaned_data['version']
|
||||
if initial['version'] != new_selection:
|
||||
doc.sessionpresentation_set.filter(pk=sp.pk).update(rev=None if new_selection=='current' else new_selection)
|
||||
doc.presentations.filter(pk=sp.pk).update(rev=None if new_selection=='current' else new_selection)
|
||||
c = DocEvent(type="added_comment", doc=doc, rev=doc.rev, by=request.user.person)
|
||||
c.desc = "Revision for session %s changed to %s" % (sp.session,new_selection)
|
||||
c.save()
|
||||
|
@ -2086,7 +2086,7 @@ def edit_sessionpresentation(request,name,session_id):
|
|||
|
||||
def remove_sessionpresentation(request,name,session_id):
|
||||
doc = get_object_or_404(Document, name=name)
|
||||
sp = get_object_or_404(doc.sessionpresentation_set, session_id=session_id)
|
||||
sp = get_object_or_404(doc.presentations, session_id=session_id)
|
||||
|
||||
if not sp.session.can_manage_materials(request.user):
|
||||
raise Http404
|
||||
|
@ -2095,7 +2095,7 @@ def remove_sessionpresentation(request,name,session_id):
|
|||
raise Http404
|
||||
|
||||
if request.method == 'POST':
|
||||
doc.sessionpresentation_set.filter(pk=sp.pk).delete()
|
||||
doc.presentations.filter(pk=sp.pk).delete()
|
||||
c = DocEvent(type="added_comment", doc=doc, rev=doc.rev, by=request.user.person)
|
||||
c.desc = "Removed from session: %s" % (sp.session)
|
||||
c.save()
|
||||
|
@ -2119,7 +2119,7 @@ def add_sessionpresentation(request,name):
|
|||
version_choices.insert(0,('current','Current at the time of the session'))
|
||||
|
||||
sessions = get_upcoming_manageable_sessions(request.user)
|
||||
sessions = sort_sessions([s for s in sessions if not s.sessionpresentation_set.filter(document=doc).exists()])
|
||||
sessions = sort_sessions([s for s in sessions if not s.presentations.filter(document=doc).exists()])
|
||||
if doc.group:
|
||||
sessions = sorted(sessions,key=lambda x:0 if x.group==doc.group else 1)
|
||||
|
||||
|
@ -2132,7 +2132,7 @@ def add_sessionpresentation(request,name):
|
|||
session_id = session_form.cleaned_data['session']
|
||||
version = version_form.cleaned_data['version']
|
||||
rev = None if version=='current' else version
|
||||
doc.sessionpresentation_set.create(session_id=session_id,rev=rev)
|
||||
doc.presentations.create(session_id=session_id,rev=rev)
|
||||
c = DocEvent(type="added_comment", doc=doc, rev=doc.rev, by=request.user.person)
|
||||
c.desc = "%s to session: %s" % ('Added -%s'%rev if rev else 'Added', Session.objects.get(pk=session_id))
|
||||
c.save()
|
||||
|
|
|
@ -113,6 +113,8 @@ def edit_material(request, name=None, acronym=None, action=None, doc_type=None):
|
|||
valid_doctypes = ['procmaterials']
|
||||
if group is not None:
|
||||
valid_doctypes.extend(['minutes','agenda','bluesheets'])
|
||||
if group.acronym=="iesg":
|
||||
valid_doctypes.append("narrativeminutes")
|
||||
valid_doctypes.extend(group.features.material_types)
|
||||
|
||||
if document_type.slug not in valid_doctypes:
|
||||
|
|
|
@ -94,7 +94,7 @@ class StatementUploadForm(forms.Form):
|
|||
)
|
||||
if markdown_content != "":
|
||||
try:
|
||||
_ = markdown.markdown(markdown_content)
|
||||
_ = markdown.liberal_markdown(markdown_content)
|
||||
except Exception as e:
|
||||
raise forms.ValidationError(f"Markdown processing failed: {e}")
|
||||
|
||||
|
|
292
ietf/group/management/commands/import_iesg_appeals.py
Normal file
292
ietf/group/management/commands/import_iesg_appeals.py
Normal file
|
@ -0,0 +1,292 @@
|
|||
# Copyright The IETF Trust 2023, All Rights Reserved
|
||||
|
||||
import datetime
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
from pathlib import Path
|
||||
import dateutil
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management import BaseCommand
|
||||
|
||||
from ietf.group.models import Appeal, AppealArtifact
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Performs a one-time import of IESG appeals"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
old_appeals_root = (
|
||||
"/a/www/www6/iesg/appeal"
|
||||
if settings.SERVER_MODE == "production"
|
||||
else "/assets/www6/iesg/appeal"
|
||||
)
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
process = subprocess.Popen(
|
||||
["git", "clone", "https://github.com/kesara/iesg-scraper.git", tmpdir],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
sub_stdout, sub_stderr = process.communicate()
|
||||
if not (Path(tmpdir) / "iesg_appeals" / "anderson-2006-03-08.md").exists():
|
||||
self.stdout.write(
|
||||
"Git clone of the iesg-scraper directory did not go as expected"
|
||||
)
|
||||
self.stdout.write("stdout:", sub_stdout)
|
||||
self.stdout.write("stderr:", sub_stderr)
|
||||
self.stdout.write(f"Clean up {tmpdir} manually")
|
||||
exit(-1)
|
||||
titles = [
|
||||
"Appeal: IESG Statement on Guidance on In-Person and Online Interim Meetings (John Klensin, 2023-08-15)",
|
||||
"Appeal of current Guidance on in-Person and Online meetings (Ted Hardie, Alan Frindell, 2023-07-19)",
|
||||
"Appeal re: URI Scheme Application and draft-mcsweeney-drop-scheme (Tim McSweeney, 2020-07-08)",
|
||||
"Appeal to the IESG re WGLC of draft-ietf-spring-srv6-network-programming (Fernando Gont, Andrew Alston, and Sander Steffann, 2020-04-22)",
|
||||
"Appeal re Protocol Action: 'URI Design and Ownership' to Best \nCurrent Practice (draft-nottingham-rfc7320bis-03.txt) (John Klensin; 2020-02-04)",
|
||||
"Appeal of IESG Conflict Review process and decision on draft-mavrogiannopoulos-pkcs8-validated-parameters-02 (John Klensin; 2018-07-07)",
|
||||
"Appeal of IESG decision to defer action and request that ISE publish draft-klensin-dns-function-considerations (John Klensin; 2017-11-29)",
|
||||
'Appeal to the IESG concerning its approval of the "draft-ietf-ianaplan-icg-response" (PDF file) (JFC Morfin; 2015-03-11)',
|
||||
"Appeal re tzdist mailing list moderation (Tobias Conradi; 2014-08-28) / Withdrawn by Submitter",
|
||||
"Appeal re draft-masotta-tftpexts-windowsize-opt (Patrick Masotta; 2013-11-14)",
|
||||
"Appeal re draft-ietf-manet-nhdp-sec-threats (Abdussalam Baryun; 2013-06-19)",
|
||||
"Appeal of decision to advance RFC6376 (Douglas Otis; 2013-05-30)",
|
||||
"Appeal to the IESG in regards to RFC 6852 (PDF file) (JFC Morfin; 2013-04-05)",
|
||||
"Appeal to the IESG concerning the approbation of the IDNA2008 document set (PDF file) (JFC Morfin; 2010-03-10)",
|
||||
"Authentication-Results Header Field Appeal (Douglas Otis, David Rand; 2009-02-16) / Withdrawn by Submitter",
|
||||
"Appeal to the IAB of IESG rejection of Appeal to Last Call draft-ietf-grow-anycast (Dean Anderson; 2008-11-14)",
|
||||
"Appeal to the IESG Concerning the Way At Large Internet Lead Users Are Not Permitted To Adequately Contribute to the IETF Deliverables (JFC Morfin; 2008-09-10)",
|
||||
"Appeal over suspension of posting rights for Todd Glassey (Todd Glassey; 2008-07-28)",
|
||||
"Appeal against IESG blocking DISCUSS on draft-klensin-rfc2821bis (John C Klensin; 2008-06-13)",
|
||||
"Appeal: Continued Abuse of Process by IPR-WG Chair (Dean Anderson; 2007-12-26)",
|
||||
"Appeal to the IESG from Todd Glassey (Todd Glassey; 2007-11-26)",
|
||||
"Appeal Against the Removal of the Co-Chairs of the GEOPRIV Working Group (PDF file) (Randall Gellens, Allison Mankin, and Andrew Newton; 2007-06-22)",
|
||||
"Appeal concerning the WG-LTRU rechartering (JFC Morfin; 2006-10-24)",
|
||||
"Appeal against decision within July 10 IESG appeal dismissal (JFC Morfin; 2006-09-09)",
|
||||
"Appeal: Mandatory to implement HTTP authentication mechanism in the Atom Publishing Protocol (Robert Sayre; 2006-08-29)",
|
||||
"Appeal Against IESG Decisions Regarding the draft-ietf-ltru-matching (PDF file) (JFC Morfin; 2006-08-16)",
|
||||
"Amended Appeal Re: grow: Last Call: 'Operation of Anycast Services' to BCP (draft-ietf-grow-anycast) (Dean Anderson; 2006-06-14)",
|
||||
"Appeal Against an IESG Decision Denying Me IANA Language Registration Process by way of PR-Action (PDF file) (JFC Morfin; 2006-05-17)",
|
||||
"Appeal to the IESG of PR-Action against Dean Anderson (Dean Anderson; 2006-03-08)",
|
||||
"Appeal to IESG against AD decision: one must clear the confusion opposing the RFC 3066 Bis consensus (JFC Morfin; 2006-02-20)",
|
||||
"Appeal to the IESG of an IESG decision (JFC Morfin; 2006-02-17)",
|
||||
"Appeal to the IESG in reference to the ietf-languages@alvestrand.no mailing list (JFC Morfin; 2006-02-07)",
|
||||
"Appeal to the IESG against an IESG decision concerning RFC 3066 Bis Draft (JFC Morfin; 2006-01-14)",
|
||||
"Appeal over a key change in a poor RFC 3066 bis example (JFC Morfin; 2005-10-19)",
|
||||
"Additional appeal against publication of draft-lyon-senderid-* in regards to its recommended use of Resent- header fields in the way that is inconsistant with RFC2822(William Leibzon; 2005-08-29)",
|
||||
"Appeal: Publication of draft-lyon-senderid-core-01 in conflict with referenced draft-schlitt-spf-classic-02 (Julian Mehnle; 2005-08-25)",
|
||||
'Appeal of decision to standardize "Mapping Between the Multimedia Messaging Service (MMS) and Internet Mail" (John C Klensin; 2005-06-10)',
|
||||
"Appeal regarding IESG decision on the GROW WG (David Meyer; 2003-11-15)",
|
||||
"Appeal: Official notice of appeal on suspension rights (Todd Glassey; 2003-08-06)",
|
||||
"Appeal: AD response to Site-Local Appeal (Tony Hain; 2003-07-31)",
|
||||
"Appeal against IESG decision for draft-chiba-radius-dynamic-authorization-05.txt (Glen Zorn; 2003-01-15)",
|
||||
"Appeal Against moving draft-ietf-ipngwg-addr-arch-v3 to Draft Standard (Robert Elz; 2002-11-05)",
|
||||
]
|
||||
date_re = re.compile(r"\d{4}-\d{2}-\d{2}")
|
||||
dates = [
|
||||
datetime.datetime.strptime(date_re.search(t).group(), "%Y-%m-%d").date()
|
||||
for t in titles
|
||||
]
|
||||
|
||||
parts = [
|
||||
["klensin-2023-08-15.txt", "response-to-klensin-2023-08-15.txt"],
|
||||
[
|
||||
"hardie-frindell-2023-07-19.txt",
|
||||
"response-to-hardie-frindell-2023-07-19.txt",
|
||||
],
|
||||
["mcsweeney-2020-07-08.txt", "response-to-mcsweeney-2020-07-08.pdf"],
|
||||
["gont-2020-04-22.txt", "response-to-gont-2020-06-02.txt"],
|
||||
["klensin-2020-02-04.txt", "response-to-klensin-2020-02-04.txt"],
|
||||
["klensin-2018-07-07.txt", "response-to-klensin-2018-07-07.txt"],
|
||||
["klensin-2017-11-29.txt", "response-to-klensin-2017-11-29.md"],
|
||||
["morfin-2015-03-11.pdf", "response-to-morfin-2015-03-11.md"],
|
||||
["conradi-2014-08-28.txt"],
|
||||
["masotta-2013-11-14.txt", "response-to-masotta-2013-11-14.md"],
|
||||
["baryun-2013-06-19.txt", "response-to-baryun-2013-06-19.md"],
|
||||
["otis-2013-05-30.txt", "response-to-otis-2013-05-30.md"],
|
||||
["morfin-2013-04-05.pdf", "response-to-morfin-2013-04-05.md"],
|
||||
["morfin-2010-03-10.pdf", "response-to-morfin-2010-03-10.txt"],
|
||||
["otis-2009-02-16.txt"],
|
||||
["anderson-2008-11-14.md", "response-to-anderson-2008-11-14.txt"],
|
||||
["morfin-2008-09-10.txt", "response-to-morfin-2008-09-10.txt"],
|
||||
["glassey-2008-07-28.txt", "response-to-glassey-2008-07-28.txt"],
|
||||
["klensin-2008-06-13.txt", "response-to-klensin-2008-06-13.txt"],
|
||||
["anderson-2007-12-26.txt", "response-to-anderson-2007-12-26.txt"],
|
||||
["glassey-2007-11-26.txt", "response-to-glassey-2007-11-26.txt"],
|
||||
["gellens-2007-06-22.pdf", "response-to-gellens-2007-06-22.txt"],
|
||||
["morfin-2006-10-24.txt", "response-to-morfin-2006-10-24.txt"],
|
||||
["morfin-2006-09-09.txt", "response-to-morfin-2006-09-09.txt"],
|
||||
["sayre-2006-08-29.txt", "response-to-sayre-2006-08-29.txt"],
|
||||
[
|
||||
"morfin-2006-08-16.pdf",
|
||||
"response-to-morfin-2006-08-17.txt",
|
||||
"response-to-morfin-2006-08-17-part2.txt",
|
||||
],
|
||||
["anderson-2006-06-13.txt", "response-to-anderson-2006-06-14.txt"],
|
||||
["morfin-2006-05-17.pdf", "response-to-morfin-2006-05-17.txt"],
|
||||
["anderson-2006-03-08.md", "response-to-anderson-2006-03-08.txt"],
|
||||
["morfin-2006-02-20.txt", "response-to-morfin-2006-02-20.txt"],
|
||||
["morfin-2006-02-17.txt", "response-to-morfin-2006-02-17.txt"],
|
||||
["morfin-2006-02-07.txt", "response-to-morfin-2006-02-07.txt"],
|
||||
["morfin-2006-01-14.txt", "response-to-morfin-2006-01-14.txt"],
|
||||
["morfin-2005-10-19.txt", "response-to-morfin-2005-10-19.txt"],
|
||||
["leibzon-2005-08-29.txt", "response-to-leibzon-2005-08-29.txt"],
|
||||
["mehnle-2005-08-25.txt", "response-to-mehnle-2005-08-25.txt"],
|
||||
["klensin-2005-06-10.txt", "response-to-klensin-2005-06-10.txt"],
|
||||
["meyer-2003-11-15.txt", "response-to-meyer-2003-11-15.txt"],
|
||||
["glassey-2003-08-06.txt", "response-to-glassey-2003-08-06.txt"],
|
||||
["hain-2003-07-31.txt", "response-to-hain-2003-07-31.txt"],
|
||||
["zorn-2003-01-15.txt", "response-to-zorn-2003-01-15.txt"],
|
||||
["elz-2002-11-05.txt", "response-to-elz-2002-11-05.txt"],
|
||||
]
|
||||
|
||||
assert len(titles) == len(dates)
|
||||
assert len(titles) == len(parts)
|
||||
|
||||
part_times = dict()
|
||||
part_times["klensin-2023-08-15.txt"] = "2023-08-15 15:03:55 -0400"
|
||||
part_times["response-to-klensin-2023-08-15.txt"] = "2023-08-24 18:54:13 +0300"
|
||||
part_times["hardie-frindell-2023-07-19.txt"] = "2023-07-19 07:17:16PDT"
|
||||
part_times[
|
||||
"response-to-hardie-frindell-2023-07-19.txt"
|
||||
] = "2023-08-15 11:58:26PDT"
|
||||
part_times["mcsweeney-2020-07-08.txt"] = "2020-07-08 14:45:00 -0400"
|
||||
part_times["response-to-mcsweeney-2020-07-08.pdf"] = "2020-07-28 12:54:04 -0000"
|
||||
part_times["gont-2020-04-22.txt"] = "2020-04-22 22:26:20 -0400"
|
||||
part_times["response-to-gont-2020-06-02.txt"] = "2020-06-02 20:44:29 -0400"
|
||||
part_times["klensin-2020-02-04.txt"] = "2020-02-04 13:54:46 -0500"
|
||||
# part_times["response-to-klensin-2020-02-04.txt"]="2020-03-24 11:49:31EDT"
|
||||
part_times["response-to-klensin-2020-02-04.txt"] = "2020-03-24 11:49:31 -0400"
|
||||
part_times["klensin-2018-07-07.txt"] = "2018-07-07 12:40:43PDT"
|
||||
# part_times["response-to-klensin-2018-07-07.txt"]="2018-08-16 10:46:45EDT"
|
||||
part_times["response-to-klensin-2018-07-07.txt"] = "2018-08-16 10:46:45 -0400"
|
||||
part_times["klensin-2017-11-29.txt"] = "2017-11-29 09:35:02 -0500"
|
||||
part_times["response-to-klensin-2017-11-29.md"] = "2017-11-30 11:33:04 -0500"
|
||||
part_times["morfin-2015-03-11.pdf"] = "2015-03-11 18:03:44 -0000"
|
||||
part_times["response-to-morfin-2015-03-11.md"] = "2015-04-16 15:18:09 -0000"
|
||||
part_times["conradi-2014-08-28.txt"] = "2014-08-28 22:28:06 +0300"
|
||||
part_times["masotta-2013-11-14.txt"] = "2013-11-14 15:35:19 +0200"
|
||||
part_times["response-to-masotta-2013-11-14.md"] = "2014-01-27 07:39:32 -0800"
|
||||
part_times["baryun-2013-06-19.txt"] = "2013-06-19 06:29:51PDT"
|
||||
part_times["response-to-baryun-2013-06-19.md"] = "2013-07-02 15:24:42 -0700"
|
||||
part_times["otis-2013-05-30.txt"] = "2013-05-30 19:35:18 +0000"
|
||||
part_times["response-to-otis-2013-05-30.md"] = "2013-06-27 11:56:48 -0700"
|
||||
part_times["morfin-2013-04-05.pdf"] = "2013-04-05 17:31:19 -0700"
|
||||
part_times["response-to-morfin-2013-04-05.md"] = "2013-04-17 08:17:29 -0700"
|
||||
part_times["morfin-2010-03-10.pdf"] = "2010-03-10 21:40:58 +0100"
|
||||
part_times["response-to-morfin-2010-03-10.txt"] = "2010-04-07 14:26:06 -0700"
|
||||
part_times["otis-2009-02-16.txt"] = "2009-02-16 15:47:15 -0800"
|
||||
part_times["anderson-2008-11-14.md"] = "2008-11-14 00:16:58 -0500"
|
||||
part_times["response-to-anderson-2008-11-14.txt"] = "2008-12-15 11:00:02 -0800"
|
||||
part_times["morfin-2008-09-10.txt"] = "2008-09-10 04:10:13 +0200"
|
||||
part_times["response-to-morfin-2008-09-10.txt"] = "2008-09-28 10:00:01PDT"
|
||||
part_times["glassey-2008-07-28.txt"] = "2008-07-28 08:34:52 -0700"
|
||||
part_times["response-to-glassey-2008-07-28.txt"] = "2008-09-02 11:00:01PDT"
|
||||
part_times["klensin-2008-06-13.txt"] = "2008-06-13 21:14:38 -0400"
|
||||
part_times["response-to-klensin-2008-06-13.txt"] = "2008-07-07 10:00:01 PDT"
|
||||
# part_times["anderson-2007-12-26.txt"]="2007-12-26 17:19:34EST"
|
||||
part_times["anderson-2007-12-26.txt"] = "2007-12-26 17:19:34 -0500"
|
||||
part_times["response-to-anderson-2007-12-26.txt"] = "2008-01-15 17:21:05 -0500"
|
||||
part_times["glassey-2007-11-26.txt"] = "2007-11-26 08:13:22 -0800"
|
||||
part_times["response-to-glassey-2007-11-26.txt"] = "2008-01-23 17:38:43 -0500"
|
||||
part_times["gellens-2007-06-22.pdf"] = "2007-06-22 21:45:41 -0400"
|
||||
part_times["response-to-gellens-2007-06-22.txt"] = "2007-09-20 14:01:27 -0400"
|
||||
part_times["morfin-2006-10-24.txt"] = "2006-10-24 05:03:17 +0200"
|
||||
part_times["response-to-morfin-2006-10-24.txt"] = "2006-11-07 12:56:02 -0500"
|
||||
part_times["morfin-2006-09-09.txt"] = "2006-09-09 02:54:55 +0200"
|
||||
part_times["response-to-morfin-2006-09-09.txt"] = "2006-09-15 12:56:31 -0400"
|
||||
part_times["sayre-2006-08-29.txt"] = "2006-08-29 17:05:03 -0400"
|
||||
part_times["response-to-sayre-2006-08-29.txt"] = "2006-10-16 13:07:18 -0400"
|
||||
part_times["morfin-2006-08-16.pdf"] = "2006-08-16 18:28:19 -0400"
|
||||
part_times["response-to-morfin-2006-08-17.txt"] = "2006-08-22 12:05:42 -0400"
|
||||
part_times[
|
||||
"response-to-morfin-2006-08-17-part2.txt"
|
||||
] = "2006-11-07 13:00:58 -0500"
|
||||
# part_times["anderson-2006-06-13.txt"]="2006-06-13 21:51:18EDT"
|
||||
part_times["anderson-2006-06-13.txt"] = "2006-06-13 21:51:18 -0400"
|
||||
part_times["response-to-anderson-2006-06-14.txt"] = "2006-07-10 14:31:08 -0400"
|
||||
part_times["morfin-2006-05-17.pdf"] = "2006-05-17 06:46:18 +0200"
|
||||
part_times["response-to-morfin-2006-05-17.txt"] = "2006-07-10 14:18:10 -0400"
|
||||
part_times["anderson-2006-03-08.md"] = "2006-03-08 09:42:44 +0100"
|
||||
part_times["response-to-anderson-2006-03-08.txt"] = "2006-03-20 14:55:38 -0500"
|
||||
part_times["morfin-2006-02-20.txt"] = "2006-02-20 19:18:24 +0100"
|
||||
part_times["response-to-morfin-2006-02-20.txt"] = "2006-03-06 13:08:39 -0500"
|
||||
part_times["morfin-2006-02-17.txt"] = "2006-02-17 18:59:38 +0100"
|
||||
part_times["response-to-morfin-2006-02-17.txt"] = "2006-07-10 14:05:15 -0400"
|
||||
part_times["morfin-2006-02-07.txt"] = "2006-02-07 19:38:57 -0500"
|
||||
part_times["response-to-morfin-2006-02-07.txt"] = "2006-02-21 19:09:26 -0500"
|
||||
part_times["morfin-2006-01-14.txt"] = "2006-01-14 15:05:24 +0100"
|
||||
part_times["response-to-morfin-2006-01-14.txt"] = "2006-02-21 12:23:38 -0500"
|
||||
part_times["morfin-2005-10-19.txt"] = "2005-10-19 17:12:11 +0200"
|
||||
part_times["response-to-morfin-2005-10-19.txt"] = "2005-11-15 11:42:30 -0500"
|
||||
part_times["leibzon-2005-08-29.txt"] = "2005-08-29 08:28:52PDT"
|
||||
part_times["response-to-leibzon-2005-08-29.txt"] = "2005-12-08 14:04:47 -0500"
|
||||
part_times["mehnle-2005-08-25.txt"] = "2005-08-25 00:45:26 +0200"
|
||||
part_times["response-to-mehnle-2005-08-25.txt"] = "2005-12-08 13:37:38 -0500"
|
||||
part_times["klensin-2005-06-10.txt"] = "2005-06-10 14:49:17 -0400"
|
||||
part_times["response-to-klensin-2005-06-10.txt"] = "2005-07-22 18:14:06 -0400"
|
||||
part_times["meyer-2003-11-15.txt"] = "2003-11-15 09:47:11 -0800"
|
||||
part_times["response-to-meyer-2003-11-15.txt"] = "2003-11-25 10:56:06 -0500"
|
||||
part_times["glassey-2003-08-06.txt"] = "2003-08-06 02:14:24 +0000"
|
||||
part_times["response-to-glassey-2003-08-06.txt"] = "2003-09-24 09:54:51 -0400"
|
||||
part_times["hain-2003-07-31.txt"] = "2003-07-31 16:44:19 -0700"
|
||||
part_times["response-to-hain-2003-07-31.txt"] = "2003-09-30 14:44:30 -0400"
|
||||
part_times["zorn-2003-01-15.txt"] = "2003-01-15 01:22:28 -0800"
|
||||
part_times["elz-2002-11-05.txt"] = "2002-11-05 10:51:13 +0700"
|
||||
# No time could be found for this one:
|
||||
part_times["response-to-zorn-2003-01-15.txt"] = "2003-02-08"
|
||||
# This one was issued sometime between 2002-12-27 (when IESG minutes note that the
|
||||
# appeal response was approved) and 2003-01-04 (when the appeal was escalated to
|
||||
# the IAB) - we're using the earlier end of the window
|
||||
part_times["response-to-elz-2002-11-05.txt"] = "2002-12-27"
|
||||
for name in part_times:
|
||||
part_times[name] = dateutil.parser.parse(part_times[name]).astimezone(
|
||||
datetime.timezone.utc
|
||||
)
|
||||
|
||||
redirects = []
|
||||
for index, title in enumerate(titles):
|
||||
# IESG is group 2
|
||||
appeal = Appeal.objects.create(
|
||||
name=titles[index], date=dates[index], group_id=2
|
||||
)
|
||||
for part in parts[index]:
|
||||
if part.endswith(".pdf"):
|
||||
content_type = "application/pdf"
|
||||
else:
|
||||
content_type = "text/markdown;charset=utf-8"
|
||||
if part.endswith(".md"):
|
||||
source_path = Path(tmpdir) / "iesg_appeals" / part
|
||||
else:
|
||||
source_path = Path(old_appeals_root) / part
|
||||
with source_path.open("rb") as source_file:
|
||||
bits = source_file.read()
|
||||
if part == "morfin-2008-09-10.txt":
|
||||
bits = bits.decode("macintosh")
|
||||
bits = bits.replace("\r", "\n")
|
||||
bits = bits.encode("utf8")
|
||||
elif part in ["morfin-2006-02-07.txt", "morfin-2006-01-14.txt"]:
|
||||
bits = bits.decode("windows-1252").encode("utf8")
|
||||
artifact_type_id = (
|
||||
"response" if part.startswith("response") else "appeal"
|
||||
)
|
||||
artifact = AppealArtifact.objects.create(
|
||||
appeal=appeal,
|
||||
artifact_type_id=artifact_type_id,
|
||||
date=part_times[part].date(),
|
||||
content_type=content_type,
|
||||
bits=bits,
|
||||
)
|
||||
redirects.append(
|
||||
(
|
||||
part.replace(".md", ".html")
|
||||
if part.endswith(".md")
|
||||
else part,
|
||||
artifact.pk,
|
||||
)
|
||||
)
|
||||
|
||||
shutil.rmtree(tmpdir)
|
||||
with open("iesg_appeal_redirects.txt", "w") as f:
|
||||
f.write(str(redirects))
|
189
ietf/group/management/commands/import_iesg_statements.py
Normal file
189
ietf/group/management/commands/import_iesg_statements.py
Normal file
|
@ -0,0 +1,189 @@
|
|||
# Copyright The IETF Trust 2024, All Rights Reserved
|
||||
|
||||
import debug # pyflakes:ignore
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
from collections import namedtuple, Counter
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from ietf.doc.models import Document, DocEvent, State
|
||||
from ietf.utils.text import xslugify
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Performs a one-time import of IESG statements"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if Document.objects.filter(type="statement", group__acronym="iesg").exists():
|
||||
self.stdout.write("IESG statement documents already exist - exiting")
|
||||
exit(-1)
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
process = subprocess.Popen(
|
||||
["git", "clone", "https://github.com/kesara/iesg-scraper.git", tmpdir],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
sub_stdout, sub_stderr = process.communicate()
|
||||
if not Path(tmpdir).joinpath("iesg_statements", "2000-08-29-0.md").exists():
|
||||
self.stdout.write(
|
||||
"Git clone of the iesg-scraper directory did not go as expected"
|
||||
)
|
||||
self.stdout.write("stdout:", sub_stdout)
|
||||
self.stdout.write("stderr:", sub_stderr)
|
||||
self.stdout.write(f"Clean up {tmpdir} manually")
|
||||
exit(-1)
|
||||
|
||||
for item in self.get_work_items():
|
||||
replaced = item.title.endswith(" SUPERSEDED") or item.doc_time.date() == datetime.date(2007,7,30)
|
||||
title = item.title
|
||||
if title.endswith(" - SUPERSEDED"):
|
||||
title = title[: -len(" - SUPERSEDED")]
|
||||
name = f"statement-iesg-{xslugify(title)}-{item.doc_time:%Y%m%d}"
|
||||
dest_filename = f"{name}-00.md"
|
||||
# Create Document
|
||||
doc = Document.objects.create(
|
||||
name=name,
|
||||
type_id="statement",
|
||||
title=title,
|
||||
group_id=2, # The IESG group
|
||||
rev="00",
|
||||
uploaded_filename=dest_filename,
|
||||
)
|
||||
doc.set_state(
|
||||
State.objects.get(
|
||||
type_id="statement",
|
||||
slug="replaced" if replaced else "active",
|
||||
)
|
||||
)
|
||||
e1 = DocEvent.objects.create(
|
||||
time=item.doc_time,
|
||||
type="published_statement",
|
||||
doc=doc,
|
||||
rev="00",
|
||||
by_id=1, # (System)
|
||||
desc="Statement published (note: The exact time of day is inaccurate - the actual time of day is not known)",
|
||||
)
|
||||
e2 = DocEvent.objects.create(
|
||||
type="added_comment",
|
||||
doc=doc,
|
||||
rev="00",
|
||||
by_id=1, # (System)
|
||||
desc="Statement moved into datatracker from www.ietf.org",
|
||||
)
|
||||
doc.save_with_history([e1, e2])
|
||||
|
||||
# Put file in place
|
||||
source = Path(tmpdir).joinpath("iesg_statements", item.source_filename)
|
||||
dest = Path(settings.DOCUMENT_PATH_PATTERN.format(doc=doc)).joinpath(
|
||||
dest_filename
|
||||
)
|
||||
if dest.exists():
|
||||
self.stdout.write(
|
||||
f"WARNING: {dest} already exists - not overwriting it."
|
||||
)
|
||||
else:
|
||||
os.makedirs(dest.parent, exist_ok=True)
|
||||
shutil.copy(source, dest)
|
||||
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
def get_work_items(self):
|
||||
Item = namedtuple("Item", "doc_time source_filename title")
|
||||
items = []
|
||||
dressed_rows = " ".join(
|
||||
self.cut_paste_from_www().expandtabs(1).split(" ")
|
||||
).split("\n")
|
||||
# Rube-Goldberg-esque dance to deal with conflicting directions of the scrape and
|
||||
# what order we want the result to sort to
|
||||
dressed_rows.reverse()
|
||||
total_times_date_seen = Counter([row.split(" ")[0] for row in dressed_rows])
|
||||
count_date_seen_so_far = Counter()
|
||||
for row in dressed_rows:
|
||||
date_part = row.split(" ")[0]
|
||||
title_part = row[len(date_part) + 1 :]
|
||||
datetime_args = list(map(int, date_part.replace("-0", "-").split("-")))
|
||||
# Use the minutes in timestamps to preserve order of statements
|
||||
# on the same day as they currently appear at www.ietf.org
|
||||
datetime_args.extend([12, count_date_seen_so_far[date_part]])
|
||||
count_date_seen_so_far[date_part] += 1
|
||||
doc_time = datetime.datetime(*datetime_args, tzinfo=datetime.timezone.utc)
|
||||
items.append(
|
||||
Item(
|
||||
doc_time,
|
||||
f"{date_part}-{total_times_date_seen[date_part] - count_date_seen_so_far[date_part]}.md",
|
||||
title_part,
|
||||
)
|
||||
)
|
||||
return items
|
||||
|
||||
def cut_paste_from_www(self):
|
||||
return """2023-08-24 Support Documents in IETF Working Groups
|
||||
2023-08-14 Guidance on In-Person and Online Interim Meetings
|
||||
2023-05-01 IESG Statement on EtherTypes
|
||||
2023-03-15 Second Report on the RFC 8989 Experiment
|
||||
2023-01-27 Guidance on In-Person and Online Interim Meetings - SUPERSEDED
|
||||
2022-10-31 Statement on Restricting Access to IETF IT Systems
|
||||
2022-01-21 Handling Ballot Positions
|
||||
2021-09-01 Report on the RFC 8989 experiment
|
||||
2021-07-21 IESG Statement on Allocation of Email Addresses in the ietf.org Domain
|
||||
2021-05-11 IESG Statement on Inclusive Language
|
||||
2021-05-10 IESG Statement on Internet-Draft Authorship
|
||||
2021-05-07 IESG Processing of RFC Errata for the IETF Stream
|
||||
2021-04-16 Last Call Guidance to the Community
|
||||
2020-07-23 IESG Statement On Oppressive or Exclusionary Language
|
||||
2020-05-01 Guidance on Face-to-Face and Virtual Interim Meetings - SUPERSEDED
|
||||
2018-03-16 IETF Meeting Photography Policy
|
||||
2018-01-11 Guidance on Face-to-Face and Virtual Interim Meetings - SUPERSEDED
|
||||
2017-02-09 License File for Open Source Repositories
|
||||
2016-11-13 Support Documents in IETF Working Groups - SUPERSEDED
|
||||
2016-02-05 Guidance on Face-to-Face and Virtual Interim Meetings - SUPERSEDED
|
||||
2016-01-11 Guidance on Face-to-Face and Virtual Interim Meetings - SUPERSEDED
|
||||
2015-08-20 IESG Statement on Maximizing Encrypted Access To IETF Information
|
||||
2015-06-11 IESG Statement on Internet-Draft Authorship - SUPERSEDED
|
||||
2014-07-20 IESG Statement on Designating RFCs as Historic
|
||||
2014-05-07 DISCUSS Criteria in IESG Review
|
||||
2014-03-02 Writable MIB Module IESG Statement
|
||||
2013-11-03 IETF Anti-Harassment Policy
|
||||
2012-10-25 IESG Statement on Ethertypes - SUPERSEDED
|
||||
2012-10-25 IESG Statement on Removal of an Internet-Draft from the IETF Web Site
|
||||
2011-10-20 IESG Statement on Designating RFCs as Historic - SUPERSEDED
|
||||
2011-06-27 IESG Statement on Designating RFCs as Historic - SUPERSEDED
|
||||
2011-06-13 IESG Statement on IESG Processing of RFC Errata concerning RFC Metadata
|
||||
2010-10-11 IESG Statement on Document Shepherds
|
||||
2010-05-24 IESG Statement on the Usage of Assignable Codepoints, Addresses and Names in Specification Examples
|
||||
2010-05-24 IESG Statement on NomCom Eligibility and Day Passes
|
||||
2009-09-08 IESG Statement on Copyright
|
||||
2009-01-20 IESG Statement on Proposed Status for IETF Documents Reserving Resources for Example Purposes
|
||||
2008-09-02 Guidance on Interim Meetings, Conference Calls and Jabber Sessions - SUPERSEDED
|
||||
2008-07-30 IESG Processing of RFC Errata for the IETF Stream
|
||||
2008-04-14 IESG Statement on Spam Control on IETF Mailing Lists
|
||||
2008-03-03 IESG Statement on Registration Requests for URIs Containing Telephone Numbers
|
||||
2008-02-27 IESG Statement on RFC3406 and URN Namespaces Registry Review
|
||||
2008-01-23 Advice for WG Chairs Dealing with Off-Topic Postings
|
||||
2007-10-04 On Appeals of IESG and Area Director Actions and Decisions
|
||||
2007-07-05 Experimental Specification of New Congestion Control Algorithms
|
||||
2007-03-20 Guidance on Area Director Sponsoring of Documents
|
||||
2007-01-15 Last Call Guidance to the Community - SUPERSEDED
|
||||
2006-04-19 IESG Statement: Normative and Informative References
|
||||
2006-02-17 IESG Statement on Disruptive Posting
|
||||
2006-01-09 Guidance for Spam Control on IETF Mailing Lists - SUPERSEDED
|
||||
2006-01-05 IESG Statement on AUTH48 State
|
||||
2005-05-12 Syntax for Format Definitions
|
||||
2003-02-11 IESG Statement on IDN
|
||||
2002-11-27 Copyright Statement in MIB and PIB Modules
|
||||
2002-03-13 Guidance for Spam Control on IETF Mailing Lists - SUPERSEDED
|
||||
2001-12-21 On Design Teams
|
||||
2001-10-01 Guidelines for the Use of Formal Languages in IETF Specifications
|
||||
2001-03-21 Establishment of Temporary Sub-IP Area
|
||||
2000-12-06 Plans to Organize "Sub-IP" Technologies in the IETF
|
||||
2000-11-20 A New IETF Work Area
|
||||
2000-08-29 Guidance on Interim IETF Working Group Meetings and Conference Calls - SUPERSEDED
|
||||
2000-08-29 IESG Guidance on the Moderation of IETF Working Group Mailing Lists"""
|
|
@ -43,12 +43,14 @@ import re
|
|||
import json
|
||||
|
||||
from collections import OrderedDict, defaultdict
|
||||
import types
|
||||
from simple_history.utils import update_change_reason
|
||||
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.db.models import Q, Count, OuterRef, Subquery
|
||||
from django.db.models import Count, F, OuterRef, Prefetch, Q, Subquery, TextField, Value
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.http import HttpResponse, HttpResponseRedirect, Http404, JsonResponse
|
||||
from django.shortcuts import render, redirect, get_object_or_404
|
||||
from django.template.loader import render_to_string
|
||||
|
@ -83,6 +85,7 @@ from ietf.group.utils import (get_charter_text, can_manage_all_groups_of_type,
|
|||
from ietf.ietfauth.utils import has_role, is_authorized_in_group
|
||||
from ietf.mailtrigger.utils import gather_relevant_expansions
|
||||
from ietf.meeting.helpers import get_meeting
|
||||
from ietf.meeting.models import ImportantDate, SchedTimeSessAssignment, SchedulingEvent
|
||||
from ietf.meeting.utils import group_sessions
|
||||
from ietf.name.models import GroupTypeName, StreamName
|
||||
from ietf.person.models import Email, Person
|
||||
|
@ -835,21 +838,70 @@ def meetings(request, acronym, group_type=None):
|
|||
|
||||
four_years_ago = timezone.now() - datetime.timedelta(days=4 * 365)
|
||||
|
||||
sessions = (
|
||||
group.session_set.with_current_status()
|
||||
.filter(
|
||||
meeting__date__gt=four_years_ago
|
||||
if group.acronym != "iab"
|
||||
else datetime.date(1970, 1, 1),
|
||||
type__in=["regular", "plenary", "other"],
|
||||
)
|
||||
.filter(
|
||||
current_status__in=["sched", "schedw", "appr", "canceled"],
|
||||
stsas = SchedTimeSessAssignment.objects.filter(
|
||||
session__type__in=["regular", "plenary", "other"],
|
||||
session__group=group)
|
||||
if group.acronym not in ["iab", "iesg"]:
|
||||
stsas = stsas.filter(session__meeting__date__gt=four_years_ago)
|
||||
stsas = stsas.annotate(sessionstatus=Coalesce(
|
||||
Subquery(
|
||||
SchedulingEvent.objects.filter(
|
||||
session=OuterRef("session__pk")
|
||||
).order_by(
|
||||
'-time', '-id'
|
||||
).values('status')[:1]),
|
||||
Value(''),
|
||||
output_field=TextField())
|
||||
).filter(
|
||||
sessionstatus__in=["sched", "schedw", "appr", "canceled"],
|
||||
session__meeting__schedule=F("schedule")
|
||||
).distinct().select_related(
|
||||
"session", "session__group", "session__group__parent", "session__meeting__type", "timeslot"
|
||||
).prefetch_related(
|
||||
"session__materials",
|
||||
"session__materials__states",
|
||||
Prefetch("session__materials",
|
||||
queryset=Document.objects.exclude(states__type=F("type"), states__slug='deleted').order_by('presentations__order').prefetch_related('states'),
|
||||
to_attr="prefetched_active_materials"
|
||||
),
|
||||
)
|
||||
|
||||
stsas = list(stsas)
|
||||
|
||||
for stsa in stsas:
|
||||
stsa.session._otsa = stsa
|
||||
stsa.session.official_timeslotassignment = types.MethodType(lambda self:self._otsa, stsa.session)
|
||||
stsa.session.current_status = stsa.sessionstatus
|
||||
|
||||
sessions = sorted(
|
||||
set([stsa.session for stsa in stsas]),
|
||||
key=lambda x: (
|
||||
x._otsa.timeslot.time,
|
||||
x._otsa.timeslot.type_id,
|
||||
x._otsa.session.group.parent.name if x._otsa.session.group.parent else None,
|
||||
x._otsa.session.name
|
||||
)
|
||||
)
|
||||
sessions = list(sessions)
|
||||
|
||||
meeting_seen = None
|
||||
for s in sessions:
|
||||
if s.meeting != meeting_seen:
|
||||
meeting_seen = s.meeting
|
||||
order = 1
|
||||
s._oim = order
|
||||
s.order_in_meeting = types.MethodType(lambda self:self._oim, s)
|
||||
order += 1
|
||||
|
||||
|
||||
revsub_dates_by_meeting = dict(ImportantDate.objects.filter(name_id="revsub", meeting__session__in=sessions).distinct().values_list("meeting_id","date"))
|
||||
|
||||
for s in sessions:
|
||||
s.order_number = s.order_in_meeting()
|
||||
if s.meeting.pk in revsub_dates_by_meeting:
|
||||
cutoff_date = revsub_dates_by_meeting[s.meeting.pk]
|
||||
else:
|
||||
cutoff_date = s.meeting.date + datetime.timedelta(days=s.meeting.submission_correction_day_offset)
|
||||
s.cached_is_cutoff = date_today(datetime.timezone.utc) > cutoff_date
|
||||
|
||||
future, in_progress, recent, past = group_sessions(sessions)
|
||||
|
||||
|
@ -857,7 +909,7 @@ def meetings(request, acronym, group_type=None):
|
|||
can_always_edit = has_role(request.user, ["Secretariat", "Area Director"])
|
||||
|
||||
far_past = []
|
||||
if group.acronym == "iab":
|
||||
if group.acronym in ["iab", "iesg"]:
|
||||
recent_past = []
|
||||
for s in past:
|
||||
if s.time >= four_years_ago:
|
||||
|
@ -1347,16 +1399,36 @@ def stream_edit(request, acronym):
|
|||
)
|
||||
|
||||
|
||||
@cache_control(public=True, max_age=30*60)
|
||||
@cache_control(public=True, max_age=30 * 60)
|
||||
@cache_page(30 * 60)
|
||||
def group_menu_data(request):
|
||||
groups = Group.objects.filter(state="active", parent__state="active").filter(Q(type__features__acts_like_wg=True)|Q(type_id__in=['program','iabasg','iabworkshop'])|Q(parent__acronym='ietfadminllc')|Q(parent__acronym='rfceditor')).order_by("-type_id","acronym")
|
||||
groups = (
|
||||
Group.objects.filter(state="active", parent__state="active")
|
||||
.filter(
|
||||
Q(type__features__acts_like_wg=True)
|
||||
| Q(type_id__in=["program", "iabasg", "iabworkshop"])
|
||||
| Q(parent__acronym="ietfadminllc")
|
||||
| Q(parent__acronym="rfceditor")
|
||||
)
|
||||
.order_by("-type_id", "acronym")
|
||||
.select_related("type")
|
||||
)
|
||||
|
||||
groups_by_parent = defaultdict(list)
|
||||
for g in groups:
|
||||
url = urlreverse("ietf.group.views.group_home", kwargs={ 'group_type': g.type_id, 'acronym': g.acronym })
|
||||
# groups_by_parent[g.parent_id].append({ 'acronym': g.acronym, 'name': escape(g.name), 'url': url })
|
||||
groups_by_parent[g.parent_id].append({ 'acronym': g.acronym, 'name': escape(g.name), 'type': escape(g.type.verbose_name or g.type.name), 'url': url })
|
||||
url = urlreverse(
|
||||
"ietf.group.views.group_home",
|
||||
kwargs={"group_type": g.type_id, "acronym": g.acronym},
|
||||
)
|
||||
# groups_by_parent[g.parent_id].append({ 'acronym': g.acronym, 'name': escape(g.name), 'url': url })
|
||||
groups_by_parent[g.parent_id].append(
|
||||
{
|
||||
"acronym": g.acronym,
|
||||
"name": escape(g.name),
|
||||
"type": escape(g.type.verbose_name or g.type.name),
|
||||
"url": url,
|
||||
}
|
||||
)
|
||||
|
||||
iab = Group.objects.get(acronym="iab")
|
||||
groups_by_parent[iab.pk].insert(
|
||||
|
@ -1365,12 +1437,15 @@ def group_menu_data(request):
|
|||
"acronym": iab.acronym,
|
||||
"name": iab.name,
|
||||
"type": "Top Level Group",
|
||||
"url": urlreverse("ietf.group.views.group_home", kwargs={"acronym": iab.acronym})
|
||||
}
|
||||
"url": urlreverse(
|
||||
"ietf.group.views.group_home", kwargs={"acronym": iab.acronym}
|
||||
),
|
||||
},
|
||||
)
|
||||
return JsonResponse(groups_by_parent)
|
||||
|
||||
|
||||
|
||||
@cache_control(public=True, max_age=30 * 60)
|
||||
@cache_page(30 * 60)
|
||||
def group_stats_data(request, years="3", only_active=True):
|
||||
|
@ -2116,14 +2191,25 @@ def statements(request, acronym, group_type=None):
|
|||
if not acronym in ["iab", "iesg"]:
|
||||
raise Http404
|
||||
group = get_group_or_404(acronym, group_type)
|
||||
statements = group.document_set.filter(type_id="statement").annotate(
|
||||
published=Subquery(
|
||||
DocEvent.objects.filter(
|
||||
doc=OuterRef("pk"),
|
||||
type="published_statement"
|
||||
).order_by("-time").values("time")[:1]
|
||||
statements = (
|
||||
group.document_set.filter(type_id="statement")
|
||||
.annotate(
|
||||
published=Subquery(
|
||||
DocEvent.objects.filter(doc=OuterRef("pk"), type="published_statement")
|
||||
.order_by("-time")
|
||||
.values("time")[:1]
|
||||
)
|
||||
)
|
||||
).order_by("-published")
|
||||
.annotate(
|
||||
status=Subquery(
|
||||
Document.states.through.objects.filter(
|
||||
document_id=OuterRef("pk"), state__type="statement"
|
||||
).values_list("state__slug", flat=True)[:1]
|
||||
)
|
||||
)
|
||||
.order_by("-published")
|
||||
)
|
||||
debug.show("statements.first().status")
|
||||
return render(
|
||||
request,
|
||||
"group/statements.html",
|
||||
|
|
|
@ -341,7 +341,7 @@ class InterimSessionModelForm(forms.ModelForm):
|
|||
# FIXME: What about agendas in html or markdown format?
|
||||
uploaded_filename='{}-00.txt'.format(filename))
|
||||
doc.set_state(State.objects.get(type__slug=doc.type.slug, slug='active'))
|
||||
self.instance.sessionpresentation_set.create(document=doc, rev=doc.rev)
|
||||
self.instance.presentations.create(document=doc, rev=doc.rev)
|
||||
NewRevisionDocEvent.objects.create(
|
||||
type='new_revision',
|
||||
by=self.user.person,
|
||||
|
|
|
@ -104,7 +104,7 @@ def preprocess_assignments_for_agenda(assignments_queryset, meeting, extra_prefe
|
|||
queryset=add_event_info_to_session_qs(Session.objects.all().prefetch_related(
|
||||
'group', 'group__charter', 'group__charter__group',
|
||||
Prefetch('materials',
|
||||
queryset=Document.objects.exclude(states__type=F("type"), states__slug='deleted').order_by('sessionpresentation__order').prefetch_related('states'),
|
||||
queryset=Document.objects.exclude(states__type=F("type"), states__slug='deleted').order_by('presentations__order').prefetch_related('states'),
|
||||
to_attr='prefetched_active_materials'
|
||||
)
|
||||
))
|
||||
|
@ -890,7 +890,7 @@ def make_materials_directories(meeting):
|
|||
# was merged with the regular datatracker code; then in secr/proceedings/views.py
|
||||
# in make_directories())
|
||||
saved_umask = os.umask(0)
|
||||
for leaf in ('slides','agenda','minutes','id','rfc','bluesheets'):
|
||||
for leaf in ('slides','agenda','minutes', 'narrativeminutes', 'id','rfc','bluesheets'):
|
||||
target = os.path.join(path,leaf)
|
||||
if not os.path.exists(target):
|
||||
os.makedirs(target)
|
||||
|
|
343
ietf/meeting/management/commands/import_iesg_minutes.py
Normal file
343
ietf/meeting/management/commands/import_iesg_minutes.py
Normal file
|
@ -0,0 +1,343 @@
|
|||
# Copyright The IETF Trust 2023, All Rights Reserved
|
||||
|
||||
from collections import namedtuple
|
||||
import datetime
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management import BaseCommand
|
||||
|
||||
from pathlib import Path
|
||||
from zoneinfo import ZoneInfo
|
||||
from ietf.doc.models import DocEvent, Document
|
||||
|
||||
from ietf.meeting.models import (
|
||||
Meeting,
|
||||
SchedTimeSessAssignment,
|
||||
Schedule,
|
||||
SchedulingEvent,
|
||||
Session,
|
||||
TimeSlot,
|
||||
)
|
||||
from ietf.name.models import DocTypeName
|
||||
|
||||
|
||||
def add_time_of_day(bare_datetime):
|
||||
"""Add a time for the iesg meeting based on a date and make it tzaware
|
||||
|
||||
From the secretariat - the telechats happened at these times:
|
||||
2015-04-09 to present: 0700 PT America/Los Angeles
|
||||
1993-02-01 to 2015-03-12: 1130 ET America/New York
|
||||
1991-07-30 to 1993-01-25: 1200 ET America/New York
|
||||
"""
|
||||
dt = None
|
||||
if bare_datetime.year > 2015:
|
||||
dt = bare_datetime.replace(hour=7).replace(
|
||||
tzinfo=ZoneInfo("America/Los_Angeles")
|
||||
)
|
||||
elif bare_datetime.year == 2015:
|
||||
if bare_datetime.month >= 4:
|
||||
dt = bare_datetime.replace(hour=7).replace(
|
||||
tzinfo=ZoneInfo("America/Los_Angeles")
|
||||
)
|
||||
else:
|
||||
dt = bare_datetime.replace(hour=11, minute=30).replace(
|
||||
tzinfo=ZoneInfo("America/New_York")
|
||||
)
|
||||
elif bare_datetime.year > 1993:
|
||||
dt = bare_datetime.replace(hour=11, minute=30).replace(
|
||||
tzinfo=ZoneInfo("America/New_York")
|
||||
)
|
||||
elif bare_datetime.year == 1993:
|
||||
if bare_datetime.month >= 2:
|
||||
dt = bare_datetime.replace(hour=11, minute=30).replace(
|
||||
tzinfo=ZoneInfo("America/New_York")
|
||||
)
|
||||
else:
|
||||
dt = bare_datetime.replace(hour=12).replace(
|
||||
tzinfo=ZoneInfo("America/New_York")
|
||||
)
|
||||
else:
|
||||
dt = bare_datetime.replace(hour=12).replace(tzinfo=ZoneInfo("America/New_York"))
|
||||
|
||||
return dt.astimezone(datetime.timezone.utc)
|
||||
|
||||
|
||||
def build_bof_coord_data():
|
||||
CoordTuple = namedtuple("CoordTuple", "meeting_number source_name")
|
||||
|
||||
def utc_from_la_time(time):
|
||||
return time.replace(tzinfo=ZoneInfo("America/Los_Angeles")).astimezone(
|
||||
datetime.timezone.utc
|
||||
)
|
||||
|
||||
data = dict()
|
||||
data[utc_from_la_time(datetime.datetime(2016, 6, 10, 7, 0))] = CoordTuple(
|
||||
96, "2015/bof-minutes-ietf-96.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2016, 10, 6, 7, 0))] = CoordTuple(
|
||||
97, "2016/BoF-Minutes-2016-10-06.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2017, 2, 15, 8, 0))] = CoordTuple(
|
||||
98, "2017/bof-minutes-ietf-98.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2017, 6, 7, 8, 0))] = CoordTuple(
|
||||
99, "2017/bof-minutes-ietf-99.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2017, 10, 5, 7, 0))] = CoordTuple(
|
||||
100, "2017/bof-minutes-ietf-100.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2018, 2, 5, 11, 0))] = CoordTuple(
|
||||
101, "2018/bof-minutes-ietf-101.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2018, 6, 5, 8, 0))] = CoordTuple(
|
||||
102, "2018/bof-minutes-ietf-102.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2018, 9, 26, 7, 0))] = CoordTuple(
|
||||
103, "2018/bof-minutes-ietf-103.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2019, 2, 15, 9, 0))] = CoordTuple(
|
||||
104, "2019/bof-minutes-ietf-104.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2019, 6, 11, 7, 30))] = CoordTuple(
|
||||
105, "2019/bof-minutes-ietf-105.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2019, 10, 9, 6, 30))] = CoordTuple(
|
||||
106, "2019/bof-minutes-ietf-106.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2020, 2, 13, 8, 0))] = CoordTuple(
|
||||
107, "2020/bof-minutes-ietf-107.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2020, 6, 15, 8, 0))] = CoordTuple(
|
||||
108, "2020/bof-minutes-ietf-108.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2020, 10, 9, 7, 0))] = CoordTuple(
|
||||
109, "2020/bof-minutes-ietf-109.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2021, 1, 14, 13, 30))] = CoordTuple(
|
||||
110, "2021/bof-minutes-ietf-110.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2021, 6, 1, 8, 0))] = CoordTuple(
|
||||
111, "2021/bof-minutes-ietf-111.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2021, 9, 15, 9, 0))] = CoordTuple(
|
||||
112, "2021/bof-minutes-ietf-112.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2022, 1, 28, 7, 0))] = CoordTuple(
|
||||
113, "2022/bof-minutes-ietf-113.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2022, 6, 2, 10, 0))] = CoordTuple(
|
||||
114, "2022/bof-minutes-ietf-114.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2022, 9, 13, 9, 0))] = CoordTuple(
|
||||
115, "2022/bof-minutes-ietf-115.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2023, 2, 1, 9, 0))] = CoordTuple(
|
||||
116, "2023/bof-minutes-ietf-116.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2023, 6, 1, 7, 0))] = CoordTuple(
|
||||
117, "2023/bof-minutes-ietf-117.txt"
|
||||
)
|
||||
data[utc_from_la_time(datetime.datetime(2023, 9, 15, 8, 0))] = CoordTuple(
|
||||
118, "2023/bof-minutes-ietf-118.txt"
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Performs a one-time import of IESG minutes, creating Meetings to attach them to"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
old_minutes_root = (
|
||||
"/a/www/www6/iesg/minutes"
|
||||
if settings.SERVER_MODE == "production"
|
||||
else "/assets/www6/iesg/minutes"
|
||||
)
|
||||
minutes_dir = Path(old_minutes_root)
|
||||
date_re = re.compile(r"\d{4}-\d{2}-\d{2}")
|
||||
meeting_times = set()
|
||||
for file_prefix in ["minutes", "narrative"]:
|
||||
paths = list(minutes_dir.glob(f"[12][09][0129][0-9]/{file_prefix}*.txt"))
|
||||
paths.extend(
|
||||
list(minutes_dir.glob(f"[12][09][0129][0-9]/{file_prefix}*.html"))
|
||||
)
|
||||
for path in paths:
|
||||
s = date_re.search(path.name)
|
||||
if s:
|
||||
meeting_times.add(
|
||||
add_time_of_day(
|
||||
datetime.datetime.strptime(s.group(), "%Y-%m-%d")
|
||||
)
|
||||
)
|
||||
bof_coord_data = build_bof_coord_data()
|
||||
bof_times = set(bof_coord_data.keys())
|
||||
assert len(bof_times.intersection(meeting_times)) == 0
|
||||
meeting_times.update(bof_times)
|
||||
year_seen = None
|
||||
for dt in sorted(meeting_times):
|
||||
if dt.year != year_seen:
|
||||
counter = 1
|
||||
year_seen = dt.year
|
||||
meeting_name = f"interim-{dt.year}-iesg-{counter:02d}"
|
||||
meeting = Meeting.objects.create(
|
||||
number=meeting_name,
|
||||
type_id="interim",
|
||||
date=dt.date(),
|
||||
days=1,
|
||||
time_zone=dt.tzname(),
|
||||
)
|
||||
schedule = Schedule.objects.create(
|
||||
meeting=meeting,
|
||||
owner_id=1, # the "(System)" person
|
||||
visible=True,
|
||||
public=True,
|
||||
)
|
||||
meeting.schedule = schedule
|
||||
meeting.save()
|
||||
session = Session.objects.create(
|
||||
meeting=meeting,
|
||||
group_id=2, # The IESG group
|
||||
type_id="regular",
|
||||
purpose_id="regular",
|
||||
name=(
|
||||
f"IETF {bof_coord_data[dt].meeting_number} BOF Coordination Call"
|
||||
if dt in bof_times
|
||||
else "Formal Telechat"
|
||||
),
|
||||
)
|
||||
SchedulingEvent.objects.create(
|
||||
session=session,
|
||||
status_id="sched",
|
||||
by_id=1, # (System)
|
||||
)
|
||||
timeslot = TimeSlot.objects.create(
|
||||
meeting=meeting,
|
||||
type_id="regular",
|
||||
time=dt,
|
||||
duration=datetime.timedelta(seconds=2 * 60 * 60),
|
||||
)
|
||||
SchedTimeSessAssignment.objects.create(
|
||||
timeslot=timeslot, session=session, schedule=schedule
|
||||
)
|
||||
|
||||
if dt in bof_times:
|
||||
source = minutes_dir / bof_coord_data[dt].source_name
|
||||
if source.exists():
|
||||
doc_name = (
|
||||
f"minutes-interim-{dt.year}-iesg-{counter:02d}-{dt:%Y%m%d%H%M}"
|
||||
)
|
||||
doc_filename = f"{doc_name}-00.txt"
|
||||
doc = Document.objects.create(
|
||||
name=doc_name,
|
||||
type_id="minutes",
|
||||
title=f"Minutes IETF {bof_coord_data[dt].meeting_number} BOF coordination {meeting_name} {dt:%Y-%m-%d %H:%M}",
|
||||
group_id=2, # the IESG group
|
||||
rev="00",
|
||||
uploaded_filename=doc_filename,
|
||||
)
|
||||
e = DocEvent.objects.create(
|
||||
type="comment",
|
||||
doc=doc,
|
||||
rev="00",
|
||||
by_id=1, # "(System)"
|
||||
desc="Minutes moved into datatracker",
|
||||
)
|
||||
doc.save_with_history([e])
|
||||
session.presentations.create(document=doc, rev=doc.rev)
|
||||
dest = (
|
||||
Path(settings.AGENDA_PATH)
|
||||
/ meeting_name
|
||||
/ "minutes"
|
||||
/ doc_filename
|
||||
)
|
||||
if dest.exists():
|
||||
self.stdout.write(
|
||||
f"WARNING: {dest} already exists - not overwriting it."
|
||||
)
|
||||
else:
|
||||
os.makedirs(dest.parent, exist_ok=True)
|
||||
shutil.copy(source, dest)
|
||||
else:
|
||||
for type_id in ["minutes", "narrativeminutes"]:
|
||||
source_file_prefix = (
|
||||
"minutes" if type_id == "minutes" else "narrative-minutes"
|
||||
)
|
||||
txt_source = (
|
||||
minutes_dir
|
||||
/ f"{dt.year}"
|
||||
/ f"{source_file_prefix}-{dt:%Y-%m-%d}.txt"
|
||||
)
|
||||
html_source = (
|
||||
minutes_dir
|
||||
/ f"{dt.year}"
|
||||
/ f"{source_file_prefix}-{dt:%Y-%m-%d}.html"
|
||||
)
|
||||
if txt_source.exists() and html_source.exists():
|
||||
self.stdout.write(
|
||||
f"WARNING: Both {txt_source} and {html_source} exist."
|
||||
)
|
||||
if txt_source.exists() or html_source.exists():
|
||||
prefix = DocTypeName.objects.get(slug=type_id).prefix
|
||||
doc_name = f"{prefix}-interim-{dt.year}-iesg-{counter:02d}-{dt:%Y%m%d%H%M}"
|
||||
suffix = "html" if html_source.exists() else "txt"
|
||||
doc_filename = f"{doc_name}-00.{suffix}"
|
||||
verbose_type = (
|
||||
"Minutes" if type_id == "minutes" else "Narrative Minutes"
|
||||
)
|
||||
doc = Document.objects.create(
|
||||
name=doc_name,
|
||||
type_id=type_id,
|
||||
title=f"{verbose_type} {meeting_name} {dt:%Y-%m-%d %H:%M}",
|
||||
group_id=2, # the IESG group
|
||||
rev="00",
|
||||
uploaded_filename=doc_filename,
|
||||
)
|
||||
e = DocEvent.objects.create(
|
||||
type="comment",
|
||||
doc=doc,
|
||||
rev="00",
|
||||
by_id=1, # "(System)"
|
||||
desc=f"{verbose_type} moved into datatracker",
|
||||
)
|
||||
doc.save_with_history([e])
|
||||
session.presentations.create(document=doc, rev=doc.rev)
|
||||
dest = (
|
||||
Path(settings.AGENDA_PATH)
|
||||
/ meeting_name
|
||||
/ type_id
|
||||
/ doc_filename
|
||||
)
|
||||
if dest.exists():
|
||||
self.stdout.write(
|
||||
f"WARNING: {dest} already exists - not overwriting it."
|
||||
)
|
||||
else:
|
||||
os.makedirs(dest.parent, exist_ok=True)
|
||||
if html_source.exists():
|
||||
html_content = html_source.read_text(encoding="utf-8")
|
||||
html_content = html_content.replace(
|
||||
f'href="IESGnarrative-{dt:%Y-%m-%d}.html#',
|
||||
'href="#',
|
||||
)
|
||||
html_content = re.sub(
|
||||
r'<a href="file:///[^"]*"><span[^>]*>([^<]*)</span></a>',
|
||||
r"\1",
|
||||
html_content,
|
||||
)
|
||||
html_content = re.sub(
|
||||
r'<a href="file:///[^"]*">([^<]*)</a>',
|
||||
r"\1",
|
||||
html_content,
|
||||
)
|
||||
html_content = re.sub(
|
||||
'<a href="http://validator.w3.org/[^>]*> *<img[^>]*></a>',
|
||||
"",
|
||||
html_content
|
||||
)
|
||||
dest.write_text(html_content, encoding="utf-8")
|
||||
else:
|
||||
shutil.copy(txt_source, dest)
|
||||
|
||||
counter += 1
|
|
@ -0,0 +1,33 @@
|
|||
# Copyright The IETF Trust 2024, All Rights Reserved
|
||||
|
||||
from django.db import migrations
|
||||
import django.db.models.deletion
|
||||
import ietf.utils.models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("doc", "0021_narrativeminutes"),
|
||||
("meeting", "0005_alter_session_agenda_note"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="sessionpresentation",
|
||||
name="document",
|
||||
field=ietf.utils.models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="presentations",
|
||||
to="doc.document",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="sessionpresentation",
|
||||
name="session",
|
||||
field=ietf.utils.models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="presentations",
|
||||
to="meeting.session",
|
||||
),
|
||||
),
|
||||
]
|
|
@ -905,8 +905,8 @@ class Constraint(models.Model):
|
|||
|
||||
|
||||
class SessionPresentation(models.Model):
|
||||
session = ForeignKey('Session')
|
||||
document = ForeignKey(Document)
|
||||
session = ForeignKey('Session', related_name="presentations")
|
||||
document = ForeignKey(Document, related_name="presentations")
|
||||
rev = models.CharField(verbose_name="revision", max_length=16, null=True, blank=True)
|
||||
order = models.PositiveSmallIntegerField(default=0)
|
||||
|
||||
|
@ -1045,7 +1045,7 @@ class Session(models.Model):
|
|||
for d in l:
|
||||
d.meeting_related = lambda: True
|
||||
else:
|
||||
l = self.materials.filter(type=material_type).exclude(states__type=material_type, states__slug='deleted').order_by('sessionpresentation__order')
|
||||
l = self.materials.filter(type=material_type).exclude(states__type=material_type, states__slug='deleted').order_by('presentations__order')
|
||||
|
||||
if only_one:
|
||||
if l:
|
||||
|
@ -1065,6 +1065,11 @@ class Session(models.Model):
|
|||
self._cached_minutes = self.get_material("minutes", only_one=True)
|
||||
return self._cached_minutes
|
||||
|
||||
def narrative_minutes(self):
|
||||
if not hasattr(self, '_cached_narrative_minutes'):
|
||||
self._cached_minutes = self.get_material("narrativeminutes", only_one=True)
|
||||
return self._cached_minutes
|
||||
|
||||
def recordings(self):
|
||||
return list(self.get_material("recording", only_one=False))
|
||||
|
||||
|
@ -1078,6 +1083,7 @@ class Session(models.Model):
|
|||
if not hasattr(self, "_slides_cache"):
|
||||
self._slides_cache = list(self.get_material("slides", only_one=False))
|
||||
return self._slides_cache
|
||||
|
||||
|
||||
def drafts(self):
|
||||
return list(self.materials.filter(type='draft'))
|
||||
|
@ -1141,6 +1147,7 @@ class Session(models.Model):
|
|||
return can_manage_materials(user,self.group)
|
||||
|
||||
def is_material_submission_cutoff(self):
|
||||
debug.say("is_material_submission_cutoff got called")
|
||||
return date_today(datetime.timezone.utc) > self.meeting.get_submission_correction_date()
|
||||
|
||||
def joint_with_groups_acronyms(self):
|
||||
|
@ -1241,10 +1248,21 @@ class Session(models.Model):
|
|||
return settings.CHAT_URL_PATTERN.format(chat_room_name=self.chat_room_name())
|
||||
|
||||
def chat_archive_url(self):
|
||||
chatlog = self.sessionpresentation_set.filter(document__type__slug='chatlog').first()
|
||||
if chatlog is not None:
|
||||
return chatlog.document.get_href()
|
||||
elif self.meeting.date <= datetime.date(2022, 7, 15):
|
||||
|
||||
if hasattr(self,"prefetched_active_materials"):
|
||||
chatlog_doc = None
|
||||
for doc in self.prefetched_active_materials:
|
||||
if doc.type_id=="chatlog":
|
||||
chatlog_doc = doc
|
||||
break
|
||||
if chatlog_doc is not None:
|
||||
return chatlog_doc.get_href()
|
||||
else:
|
||||
chatlog = self.presentations.filter(document__type__slug='chatlog').first()
|
||||
if chatlog is not None:
|
||||
return chatlog.document.get_href()
|
||||
|
||||
if self.meeting.date <= datetime.date(2022, 7, 15):
|
||||
# datatracker 8.8.0 released on 2022 July 15; before that, fall back to old log URL
|
||||
return f'https://www.ietf.org/jabber/logs/{ self.chat_room_name() }?C=M;O=D'
|
||||
elif hasattr(settings,'CHAT_ARCHIVE_URL_PATTERN'):
|
||||
|
|
|
@ -11,7 +11,7 @@ def hack_recording_title(recording,add_timestamp=False):
|
|||
if recording.title.startswith('Audio recording for') or recording.title.startswith('Video recording for'):
|
||||
hacked_title = recording.title[:15]
|
||||
if add_timestamp:
|
||||
hacked_title += ' '+recording.sessionpresentation_set.first().session.official_timeslotassignment().timeslot.time.strftime("%a %H:%M")
|
||||
hacked_title += ' '+recording.presentations.first().session.official_timeslotassignment().timeslot.time.strftime("%a %H:%M")
|
||||
return hacked_title
|
||||
else:
|
||||
return recording.title
|
||||
|
|
|
@ -8,7 +8,7 @@ register = template.Library()
|
|||
|
||||
@register.filter
|
||||
def presented_versions(session, doc):
|
||||
sp = session.sessionpresentation_set.filter(document=doc)
|
||||
sp = session.presentations.filter(document=doc)
|
||||
if not sp:
|
||||
return "Document not in session"
|
||||
else:
|
||||
|
|
|
@ -51,7 +51,7 @@ def make_interim_meeting(group,date,status='sched',tz='UTC'):
|
|||
doc = DocumentFactory.create(name=name, type_id='agenda', title="Agenda",
|
||||
uploaded_filename=file, group=group, rev=rev, states=[('draft','active')])
|
||||
pres = SessionPresentation.objects.create(session=session, document=doc, rev=doc.rev)
|
||||
session.sessionpresentation_set.add(pres)
|
||||
session.presentations.add(pres)
|
||||
# minutes
|
||||
name = "minutes-%s-%s" % (meeting.number, time.strftime("%Y%m%d%H%M"))
|
||||
rev = '00'
|
||||
|
@ -59,7 +59,7 @@ def make_interim_meeting(group,date,status='sched',tz='UTC'):
|
|||
doc = DocumentFactory.create(name=name, type_id='minutes', title="Minutes",
|
||||
uploaded_filename=file, group=group, rev=rev, states=[('draft','active')])
|
||||
pres = SessionPresentation.objects.create(session=session, document=doc, rev=doc.rev)
|
||||
session.sessionpresentation_set.add(pres)
|
||||
session.presentations.add(pres)
|
||||
# slides
|
||||
title = "Slideshow"
|
||||
|
||||
|
@ -70,7 +70,7 @@ def make_interim_meeting(group,date,status='sched',tz='UTC'):
|
|||
uploaded_filename=file, group=group, rev=rev,
|
||||
states=[('slides','active'), ('reuse_policy', 'single')])
|
||||
pres = SessionPresentation.objects.create(session=session, document=doc, rev=doc.rev)
|
||||
session.sessionpresentation_set.add(pres)
|
||||
session.presentations.add(pres)
|
||||
#
|
||||
return meeting
|
||||
|
||||
|
@ -198,24 +198,24 @@ def make_meeting_test_data(meeting=None, create_interims=False):
|
|||
doc = DocumentFactory.create(name='agenda-72-mars', type_id='agenda', title="Agenda",
|
||||
uploaded_filename="agenda-72-mars.txt", group=mars, rev='00', states=[('agenda','active')])
|
||||
pres = SessionPresentation.objects.create(session=mars_session,document=doc,rev=doc.rev)
|
||||
mars_session.sessionpresentation_set.add(pres) #
|
||||
mars_session.presentations.add(pres) #
|
||||
|
||||
doc = DocumentFactory.create(name='minutes-72-mars', type_id='minutes', title="Minutes",
|
||||
uploaded_filename="minutes-72-mars.md", group=mars, rev='00', states=[('minutes','active')])
|
||||
pres = SessionPresentation.objects.create(session=mars_session,document=doc,rev=doc.rev)
|
||||
mars_session.sessionpresentation_set.add(pres)
|
||||
mars_session.presentations.add(pres)
|
||||
|
||||
doc = DocumentFactory.create(name='slides-72-mars-1-active', type_id='slides', title="Slideshow",
|
||||
uploaded_filename="slides-72-mars.txt", group=mars, rev='00',
|
||||
states=[('slides','active'), ('reuse_policy', 'single')])
|
||||
pres = SessionPresentation.objects.create(session=mars_session,document=doc,rev=doc.rev)
|
||||
mars_session.sessionpresentation_set.add(pres)
|
||||
mars_session.presentations.add(pres)
|
||||
|
||||
doc = DocumentFactory.create(name='slides-72-mars-2-deleted', type_id='slides',
|
||||
title="Bad Slideshow", uploaded_filename="slides-72-mars-2-deleted.txt", group=mars, rev='00',
|
||||
states=[('slides','deleted'), ('reuse_policy', 'single')])
|
||||
pres = SessionPresentation.objects.create(session=mars_session,document=doc,rev=doc.rev)
|
||||
mars_session.sessionpresentation_set.add(pres)
|
||||
mars_session.presentations.add(pres)
|
||||
|
||||
# Future Interim Meetings
|
||||
date = date_today() + datetime.timedelta(days=365)
|
||||
|
|
|
@ -884,9 +884,9 @@ class SlideReorderTests(IetfSeleniumTestCase):
|
|||
def setUp(self):
|
||||
super(SlideReorderTests, self).setUp()
|
||||
self.session = SessionFactory(meeting__type_id='ietf', status_id='sched')
|
||||
self.session.sessionpresentation_set.create(document=DocumentFactory(type_id='slides',name='one'),order=1)
|
||||
self.session.sessionpresentation_set.create(document=DocumentFactory(type_id='slides',name='two'),order=2)
|
||||
self.session.sessionpresentation_set.create(document=DocumentFactory(type_id='slides',name='three'),order=3)
|
||||
self.session.presentations.create(document=DocumentFactory(type_id='slides',name='one'),order=1)
|
||||
self.session.presentations.create(document=DocumentFactory(type_id='slides',name='two'),order=2)
|
||||
self.session.presentations.create(document=DocumentFactory(type_id='slides',name='three'),order=3)
|
||||
|
||||
def secr_login(self):
|
||||
self.login('secretary')
|
||||
|
@ -906,7 +906,7 @@ class SlideReorderTests(IetfSeleniumTestCase):
|
|||
ActionChains(self.driver).drag_and_drop(second,third).perform()
|
||||
|
||||
time.sleep(0.1) # The API that modifies the database runs async
|
||||
names=self.session.sessionpresentation_set.values_list('document__name',flat=True)
|
||||
names=self.session.presentations.values_list('document__name',flat=True)
|
||||
self.assertEqual(list(names),['one','three','two'])
|
||||
|
||||
@ifSeleniumEnabled
|
||||
|
|
|
@ -468,16 +468,16 @@ class MeetingTests(BaseMeetingTestCase):
|
|||
doc = DocumentFactory.create(name='agenda-172-mars', type_id='agenda', title="Agenda",
|
||||
uploaded_filename="agenda-172-mars.txt", group=session107.group, rev='00', states=[('agenda','active')])
|
||||
pres = SessionPresentation.objects.create(session=session107,document=doc,rev=doc.rev)
|
||||
session107.sessionpresentation_set.add(pres) #
|
||||
session107.presentations.add(pres) #
|
||||
doc = DocumentFactory.create(name='minutes-172-mars', type_id='minutes', title="Minutes",
|
||||
uploaded_filename="minutes-172-mars.md", group=session107.group, rev='00', states=[('minutes','active')])
|
||||
pres = SessionPresentation.objects.create(session=session107,document=doc,rev=doc.rev)
|
||||
session107.sessionpresentation_set.add(pres)
|
||||
session107.presentations.add(pres)
|
||||
doc = DocumentFactory.create(name='slides-172-mars-1-active', type_id='slides', title="Slideshow",
|
||||
uploaded_filename="slides-172-mars.txt", group=session107.group, rev='00',
|
||||
states=[('slides','active'), ('reuse_policy', 'single')])
|
||||
pres = SessionPresentation.objects.create(session=session107,document=doc,rev=doc.rev)
|
||||
session107.sessionpresentation_set.add(pres)
|
||||
session107.presentations.add(pres)
|
||||
|
||||
for session in (
|
||||
Session.objects.filter(meeting=meeting, group__acronym="mars").first(),
|
||||
|
@ -548,7 +548,7 @@ class MeetingTests(BaseMeetingTestCase):
|
|||
named_row = named_label.closest('tr')
|
||||
self.assertTrue(named_row)
|
||||
|
||||
for material in (sp.document for sp in plain_session.sessionpresentation_set.all()):
|
||||
for material in (sp.document for sp in plain_session.presentations.all()):
|
||||
if material.type_id == 'draft':
|
||||
expected_url = urlreverse(
|
||||
'ietf.doc.views_doc.document_main',
|
||||
|
@ -559,7 +559,7 @@ class MeetingTests(BaseMeetingTestCase):
|
|||
self.assertTrue(plain_row.find(f'a[href="{expected_url}"]'))
|
||||
self.assertFalse(named_row.find(f'a[href="{expected_url}"]'))
|
||||
|
||||
for material in (sp.document for sp in named_session.sessionpresentation_set.all()):
|
||||
for material in (sp.document for sp in named_session.presentations.all()):
|
||||
if material.type_id == 'draft':
|
||||
expected_url = urlreverse(
|
||||
'ietf.doc.views_doc.document_main',
|
||||
|
@ -955,10 +955,10 @@ class MeetingTests(BaseMeetingTestCase):
|
|||
# but lists a different on in its agenda. The expectation is that the pdf and tgz views will return both.
|
||||
session = SessionFactory(group__type_id='wg',meeting__type_id='ietf')
|
||||
draft1 = WgDraftFactory(group=session.group)
|
||||
session.sessionpresentation_set.create(document=draft1)
|
||||
session.presentations.create(document=draft1)
|
||||
draft2 = WgDraftFactory(group=session.group)
|
||||
agenda = DocumentFactory(type_id='agenda',group=session.group, uploaded_filename='agenda-%s-%s' % (session.meeting.number,session.group.acronym), states=[('agenda','active')])
|
||||
session.sessionpresentation_set.create(document=agenda)
|
||||
session.presentations.create(document=agenda)
|
||||
self.write_materials_file(session.meeting, session.materials.get(type="agenda"),
|
||||
"1. WG status (15 minutes)\n\n2. Status of %s\n\n" % draft2.name)
|
||||
filenames = []
|
||||
|
@ -3083,18 +3083,18 @@ class ReorderSlidesTests(TestCase):
|
|||
r = self.client.post(url, {'order':1, 'name':slides.name })
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(session.sessionpresentation_set.count(),1)
|
||||
self.assertEqual(session.presentations.count(),1)
|
||||
|
||||
# Ignore a request to add slides that are already in a session
|
||||
r = self.client.post(url, {'order':1, 'name':slides.name })
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(session.sessionpresentation_set.count(),1)
|
||||
self.assertEqual(session.presentations.count(),1)
|
||||
|
||||
|
||||
session2 = SessionFactory(group=session.group, meeting=session.meeting)
|
||||
SessionPresentationFactory.create_batch(3, document__type_id='slides', session=session2)
|
||||
for num, sp in enumerate(session2.sessionpresentation_set.filter(document__type_id='slides'),start=1):
|
||||
for num, sp in enumerate(session2.presentations.filter(document__type_id='slides'),start=1):
|
||||
sp.order = num
|
||||
sp.save()
|
||||
|
||||
|
@ -3106,22 +3106,22 @@ class ReorderSlidesTests(TestCase):
|
|||
r = self.client.post(url, {'order':1, 'name':more_slides[0].name})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(session2.sessionpresentation_set.get(document=more_slides[0]).order,1)
|
||||
self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,5)))
|
||||
self.assertEqual(session2.presentations.get(document=more_slides[0]).order,1)
|
||||
self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,5)))
|
||||
|
||||
# Insert at end
|
||||
r = self.client.post(url, {'order':5, 'name':more_slides[1].name})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(session2.sessionpresentation_set.get(document=more_slides[1]).order,5)
|
||||
self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,6)))
|
||||
self.assertEqual(session2.presentations.get(document=more_slides[1]).order,5)
|
||||
self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,6)))
|
||||
|
||||
# Insert in middle
|
||||
r = self.client.post(url, {'order':3, 'name':more_slides[2].name})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(session2.sessionpresentation_set.get(document=more_slides[2]).order,3)
|
||||
self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,7)))
|
||||
self.assertEqual(session2.presentations.get(document=more_slides[2]).order,3)
|
||||
self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,7)))
|
||||
|
||||
def test_remove_slides_from_session(self):
|
||||
for type_id in ['ietf','interim']:
|
||||
|
@ -3172,7 +3172,7 @@ class ReorderSlidesTests(TestCase):
|
|||
self.assertEqual(r.json()['success'],False)
|
||||
self.assertIn('index is not valid',r.json()['error'])
|
||||
|
||||
session.sessionpresentation_set.create(document=slides, rev=slides.rev, order=1)
|
||||
session.presentations.create(document=slides, rev=slides.rev, order=1)
|
||||
|
||||
# Bad names
|
||||
r = self.client.post(url, {'oldIndex':1})
|
||||
|
@ -3193,7 +3193,7 @@ class ReorderSlidesTests(TestCase):
|
|||
self.assertEqual(r.json()['success'],False)
|
||||
self.assertIn('SessionPresentation not found',r.json()['error'])
|
||||
|
||||
session.sessionpresentation_set.create(document=slides2, rev=slides2.rev, order=2)
|
||||
session.presentations.create(document=slides2, rev=slides2.rev, order=2)
|
||||
r = self.client.post(url, {'oldIndex':1, 'name':slides2.name })
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],False)
|
||||
|
@ -3203,11 +3203,11 @@ class ReorderSlidesTests(TestCase):
|
|||
r = self.client.post(url, {'oldIndex':1, 'name':slides.name })
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(session.sessionpresentation_set.count(),1)
|
||||
self.assertEqual(session.presentations.count(),1)
|
||||
|
||||
session2 = SessionFactory(group=session.group, meeting=session.meeting)
|
||||
sp_list = SessionPresentationFactory.create_batch(5, document__type_id='slides', session=session2)
|
||||
for num, sp in enumerate(session2.sessionpresentation_set.filter(document__type_id='slides'),start=1):
|
||||
for num, sp in enumerate(session2.presentations.filter(document__type_id='slides'),start=1):
|
||||
sp.order = num
|
||||
sp.save()
|
||||
|
||||
|
@ -3217,22 +3217,22 @@ class ReorderSlidesTests(TestCase):
|
|||
r = self.client.post(url, {'oldIndex':1, 'name':sp_list[0].document.name })
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertFalse(session2.sessionpresentation_set.filter(pk=sp_list[0].pk).exists())
|
||||
self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,5)))
|
||||
self.assertFalse(session2.presentations.filter(pk=sp_list[0].pk).exists())
|
||||
self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,5)))
|
||||
|
||||
# delete in middle of list
|
||||
r = self.client.post(url, {'oldIndex':4, 'name':sp_list[4].document.name })
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertFalse(session2.sessionpresentation_set.filter(pk=sp_list[4].pk).exists())
|
||||
self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,4)))
|
||||
self.assertFalse(session2.presentations.filter(pk=sp_list[4].pk).exists())
|
||||
self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,4)))
|
||||
|
||||
# delete at end of list
|
||||
r = self.client.post(url, {'oldIndex':2, 'name':sp_list[2].document.name })
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertFalse(session2.sessionpresentation_set.filter(pk=sp_list[2].pk).exists())
|
||||
self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,3)))
|
||||
self.assertFalse(session2.presentations.filter(pk=sp_list[2].pk).exists())
|
||||
self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,3)))
|
||||
|
||||
|
||||
|
||||
|
@ -3290,45 +3290,45 @@ class ReorderSlidesTests(TestCase):
|
|||
r = self.client.post(url, {'oldIndex':1, 'newIndex':3})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,1,4,5]))
|
||||
self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,1,4,5]))
|
||||
|
||||
# Move to beginning
|
||||
r = self.client.post(url, {'oldIndex':3, 'newIndex':1})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,3,4,5]))
|
||||
self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,3,4,5]))
|
||||
|
||||
# Move from end
|
||||
r = self.client.post(url, {'oldIndex':5, 'newIndex':3})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,5,3,4]))
|
||||
self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,5,3,4]))
|
||||
|
||||
# Move to end
|
||||
r = self.client.post(url, {'oldIndex':3, 'newIndex':5})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,3,4,5]))
|
||||
self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,3,4,5]))
|
||||
|
||||
# Move beginning to end
|
||||
r = self.client.post(url, {'oldIndex':1, 'newIndex':5})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,4,5,1]))
|
||||
self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,4,5,1]))
|
||||
|
||||
# Move middle to middle
|
||||
r = self.client.post(url, {'oldIndex':3, 'newIndex':4})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,5,4,1]))
|
||||
self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,5,4,1]))
|
||||
|
||||
r = self.client.post(url, {'oldIndex':3, 'newIndex':2})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,5,3,4,1]))
|
||||
self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,5,3,4,1]))
|
||||
|
||||
# Reset for next iteration in the loop
|
||||
session.sessionpresentation_set.update(order=F('pk'))
|
||||
session.presentations.update(order=F('pk'))
|
||||
self.client.logout()
|
||||
|
||||
|
||||
|
@ -3345,7 +3345,7 @@ class ReorderSlidesTests(TestCase):
|
|||
except AssertionError:
|
||||
pass
|
||||
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('order',flat=True)),list(range(1,6)))
|
||||
self.assertEqual(list(session.presentations.order_by('order').values_list('order',flat=True)),list(range(1,6)))
|
||||
|
||||
|
||||
class EditTests(TestCase):
|
||||
|
@ -4334,7 +4334,7 @@ class SessionDetailsTests(TestCase):
|
|||
group.role_set.create(name_id='chair',person = group_chair, email = group_chair.email())
|
||||
session = SessionFactory.create(meeting__type_id='ietf',group=group, meeting__date=date_today() + datetime.timedelta(days=90))
|
||||
SessionPresentationFactory.create(session=session,document__type_id='draft',rev=None)
|
||||
old_draft = session.sessionpresentation_set.filter(document__type='draft').first().document
|
||||
old_draft = session.presentations.filter(document__type='draft').first().document
|
||||
new_draft = DocumentFactory(type_id='draft')
|
||||
|
||||
url = urlreverse('ietf.meeting.views.add_session_drafts', kwargs=dict(num=session.meeting.number, session_id=session.pk))
|
||||
|
@ -4355,10 +4355,10 @@ class SessionDetailsTests(TestCase):
|
|||
q = PyQuery(r.content)
|
||||
self.assertIn("Already linked:", q('form .text-danger').text())
|
||||
|
||||
self.assertEqual(1,session.sessionpresentation_set.count())
|
||||
self.assertEqual(1,session.presentations.count())
|
||||
r = self.client.post(url,dict(drafts=[new_draft.pk,]))
|
||||
self.assertTrue(r.status_code, 302)
|
||||
self.assertEqual(2,session.sessionpresentation_set.count())
|
||||
self.assertEqual(2,session.presentations.count())
|
||||
|
||||
session.meeting.date -= datetime.timedelta(days=180)
|
||||
session.meeting.save()
|
||||
|
@ -5982,7 +5982,7 @@ class FinalizeProceedingsTests(TestCase):
|
|||
def test_finalize_proceedings(self):
|
||||
make_meeting_test_data()
|
||||
meeting = Meeting.objects.filter(type_id='ietf').order_by('id').last()
|
||||
meeting.session_set.filter(group__acronym='mars').first().sessionpresentation_set.create(document=Document.objects.filter(type='draft').first(),rev=None)
|
||||
meeting.session_set.filter(group__acronym='mars').first().presentations.create(document=Document.objects.filter(type='draft').first(),rev=None)
|
||||
|
||||
url = urlreverse('ietf.meeting.views.finalize_proceedings',kwargs={'num':meeting.number})
|
||||
login_testing_unauthorized(self,"secretary",url)
|
||||
|
@ -5990,12 +5990,12 @@ class FinalizeProceedingsTests(TestCase):
|
|||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
self.assertEqual(meeting.proceedings_final,False)
|
||||
self.assertEqual(meeting.session_set.filter(group__acronym="mars").first().sessionpresentation_set.filter(document__type="draft").first().rev,None)
|
||||
self.assertEqual(meeting.session_set.filter(group__acronym="mars").first().presentations.filter(document__type="draft").first().rev,None)
|
||||
r = self.client.post(url,{'finalize':1})
|
||||
self.assertEqual(r.status_code, 302)
|
||||
meeting = Meeting.objects.get(pk=meeting.pk)
|
||||
self.assertEqual(meeting.proceedings_final,True)
|
||||
self.assertEqual(meeting.session_set.filter(group__acronym="mars").first().sessionpresentation_set.filter(document__type="draft").first().rev,'00')
|
||||
self.assertEqual(meeting.session_set.filter(group__acronym="mars").first().presentations.filter(document__type="draft").first().rev,'00')
|
||||
|
||||
class MaterialsTests(TestCase):
|
||||
settings_temp_path_overrides = TestCase.settings_temp_path_overrides + [
|
||||
|
@ -6037,12 +6037,12 @@ class MaterialsTests(TestCase):
|
|||
self.assertEqual(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
self.assertIn('Upload', str(q("title")))
|
||||
self.assertFalse(session.sessionpresentation_set.exists())
|
||||
self.assertFalse(session.presentations.exists())
|
||||
test_file = StringIO('%PDF-1.4\n%âãÏÓ\nthis is some text for a test')
|
||||
test_file.name = "not_really.pdf"
|
||||
r = self.client.post(url,dict(file=test_file))
|
||||
self.assertEqual(r.status_code, 302)
|
||||
bs_doc = session.sessionpresentation_set.filter(document__type_id='bluesheets').first().document
|
||||
bs_doc = session.presentations.filter(document__type_id='bluesheets').first().document
|
||||
self.assertEqual(bs_doc.rev,'00')
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
@ -6072,12 +6072,12 @@ class MaterialsTests(TestCase):
|
|||
self.assertEqual(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
self.assertIn('Upload', str(q("title")))
|
||||
self.assertFalse(session.sessionpresentation_set.exists())
|
||||
self.assertFalse(session.presentations.exists())
|
||||
test_file = StringIO('%PDF-1.4\n%âãÏÓ\nthis is some text for a test')
|
||||
test_file.name = "not_really.pdf"
|
||||
r = self.client.post(url,dict(file=test_file))
|
||||
self.assertEqual(r.status_code, 302)
|
||||
bs_doc = session.sessionpresentation_set.filter(document__type_id='bluesheets').first().document
|
||||
bs_doc = session.presentations.filter(document__type_id='bluesheets').first().document
|
||||
self.assertEqual(bs_doc.rev,'00')
|
||||
|
||||
def test_upload_bluesheets_interim_chair_access(self):
|
||||
|
@ -6105,7 +6105,7 @@ class MaterialsTests(TestCase):
|
|||
self.assertEqual(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
self.assertIn('Upload', str(q("Title")))
|
||||
self.assertFalse(session.sessionpresentation_set.exists())
|
||||
self.assertFalse(session.presentations.exists())
|
||||
self.assertFalse(q('form input[type="checkbox"]'))
|
||||
|
||||
session2 = SessionFactory(meeting=session.meeting,group=session.group)
|
||||
|
@ -6140,7 +6140,7 @@ class MaterialsTests(TestCase):
|
|||
test_file.name = "some.html"
|
||||
r = self.client.post(url,dict(submission_method="upload",file=test_file))
|
||||
self.assertEqual(r.status_code, 302)
|
||||
doc = session.sessionpresentation_set.filter(document__type_id=doctype).first().document
|
||||
doc = session.presentations.filter(document__type_id=doctype).first().document
|
||||
self.assertEqual(doc.rev,'00')
|
||||
text = doc.text()
|
||||
self.assertIn('Some text', text)
|
||||
|
@ -6152,9 +6152,9 @@ class MaterialsTests(TestCase):
|
|||
test_file.name = "some.txt"
|
||||
r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=False))
|
||||
self.assertEqual(r.status_code, 302)
|
||||
doc = session.sessionpresentation_set.filter(document__type_id=doctype).first().document
|
||||
doc = session.presentations.filter(document__type_id=doctype).first().document
|
||||
self.assertEqual(doc.rev,'01')
|
||||
self.assertFalse(session2.sessionpresentation_set.filter(document__type_id=doctype))
|
||||
self.assertFalse(session2.presentations.filter(document__type_id=doctype))
|
||||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
@ -6166,7 +6166,7 @@ class MaterialsTests(TestCase):
|
|||
self.assertEqual(r.status_code, 302)
|
||||
doc = Document.objects.get(pk=doc.pk)
|
||||
self.assertEqual(doc.rev,'02')
|
||||
self.assertTrue(session2.sessionpresentation_set.filter(document__type_id=doctype))
|
||||
self.assertTrue(session2.presentations.filter(document__type_id=doctype))
|
||||
|
||||
# Test bad encoding
|
||||
test_file = BytesIO('<html><h1>Title</h1><section>Some\x93text</section></html>'.encode('latin1'))
|
||||
|
@ -6196,7 +6196,7 @@ class MaterialsTests(TestCase):
|
|||
self.assertEqual(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
self.assertIn('Upload', str(q("Title")))
|
||||
self.assertFalse(session.sessionpresentation_set.exists())
|
||||
self.assertFalse(session.presentations.exists())
|
||||
self.assertFalse(q('form input[type="checkbox"]'))
|
||||
|
||||
test_file = BytesIO(b'this is some text for a test')
|
||||
|
@ -6218,12 +6218,12 @@ class MaterialsTests(TestCase):
|
|||
self.assertEqual(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
self.assertIn('Upload', str(q("title")))
|
||||
self.assertFalse(session.sessionpresentation_set.filter(document__type_id=doctype))
|
||||
self.assertFalse(session.presentations.filter(document__type_id=doctype))
|
||||
test_file = BytesIO(b'this is some text for a test')
|
||||
test_file.name = "not_really.txt"
|
||||
r = self.client.post(url,dict(submission_method="upload",file=test_file))
|
||||
self.assertEqual(r.status_code, 302)
|
||||
doc = session.sessionpresentation_set.filter(document__type_id=doctype).first().document
|
||||
doc = session.presentations.filter(document__type_id=doctype).first().document
|
||||
self.assertEqual(doc.rev,'00')
|
||||
|
||||
# Verify that we don't have dead links
|
||||
|
@ -6242,12 +6242,12 @@ class MaterialsTests(TestCase):
|
|||
self.assertEqual(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
self.assertIn('Upload', str(q("Title")))
|
||||
self.assertFalse(session.sessionpresentation_set.exists())
|
||||
self.assertFalse(session.presentations.exists())
|
||||
|
||||
test_text = 'Enter agenda from scratch'
|
||||
r = self.client.post(url,dict(submission_method="enter",content=test_text))
|
||||
self.assertRedirects(r, redirect_url)
|
||||
doc = session.sessionpresentation_set.filter(document__type_id='agenda').first().document
|
||||
doc = session.presentations.filter(document__type_id='agenda').first().document
|
||||
self.assertEqual(doc.rev,'00')
|
||||
|
||||
r = self.client.get(url)
|
||||
|
@ -6283,14 +6283,14 @@ class MaterialsTests(TestCase):
|
|||
self.assertEqual(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
self.assertIn('Upload', str(q("title")))
|
||||
self.assertFalse(session1.sessionpresentation_set.filter(document__type_id='slides'))
|
||||
self.assertFalse(session1.presentations.filter(document__type_id='slides'))
|
||||
test_file = BytesIO(b'this is not really a slide')
|
||||
test_file.name = 'not_really.txt'
|
||||
r = self.client.post(url,dict(file=test_file,title='a test slide file',apply_to_all=True))
|
||||
self.assertEqual(r.status_code, 302)
|
||||
self.assertEqual(session1.sessionpresentation_set.count(),1)
|
||||
self.assertEqual(session2.sessionpresentation_set.count(),1)
|
||||
sp = session2.sessionpresentation_set.first()
|
||||
self.assertEqual(session1.presentations.count(),1)
|
||||
self.assertEqual(session2.presentations.count(),1)
|
||||
sp = session2.presentations.first()
|
||||
self.assertEqual(sp.document.name, 'slides-%s-%s-a-test-slide-file' % (session1.meeting.number,session1.group.acronym ) )
|
||||
self.assertEqual(sp.order,1)
|
||||
|
||||
|
@ -6299,14 +6299,14 @@ class MaterialsTests(TestCase):
|
|||
test_file.name = 'also_not_really.txt'
|
||||
r = self.client.post(url,dict(file=test_file,title='a different slide file',apply_to_all=False))
|
||||
self.assertEqual(r.status_code, 302)
|
||||
self.assertEqual(session1.sessionpresentation_set.count(),1)
|
||||
self.assertEqual(session2.sessionpresentation_set.count(),2)
|
||||
sp = session2.sessionpresentation_set.get(document__name__endswith='-a-different-slide-file')
|
||||
self.assertEqual(session1.presentations.count(),1)
|
||||
self.assertEqual(session2.presentations.count(),2)
|
||||
sp = session2.presentations.get(document__name__endswith='-a-different-slide-file')
|
||||
self.assertEqual(sp.order,2)
|
||||
self.assertEqual(sp.rev,'00')
|
||||
self.assertEqual(sp.document.rev,'00')
|
||||
|
||||
url = urlreverse('ietf.meeting.views.upload_session_slides',kwargs={'num':session2.meeting.number,'session_id':session2.id,'name':session2.sessionpresentation_set.get(order=2).document.name})
|
||||
url = urlreverse('ietf.meeting.views.upload_session_slides',kwargs={'num':session2.meeting.number,'session_id':session2.id,'name':session2.presentations.get(order=2).document.name})
|
||||
r = self.client.get(url)
|
||||
self.assertTrue(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
|
@ -6315,9 +6315,9 @@ class MaterialsTests(TestCase):
|
|||
test_file.name = 'doesnotmatter.txt'
|
||||
r = self.client.post(url,dict(file=test_file,title='rename the presentation',apply_to_all=False))
|
||||
self.assertEqual(r.status_code, 302)
|
||||
self.assertEqual(session1.sessionpresentation_set.count(),1)
|
||||
self.assertEqual(session2.sessionpresentation_set.count(),2)
|
||||
sp = session2.sessionpresentation_set.get(order=2)
|
||||
self.assertEqual(session1.presentations.count(),1)
|
||||
self.assertEqual(session2.presentations.count(),2)
|
||||
sp = session2.presentations.get(order=2)
|
||||
self.assertEqual(sp.rev,'01')
|
||||
self.assertEqual(sp.document.rev,'01')
|
||||
|
||||
|
@ -6329,7 +6329,7 @@ class MaterialsTests(TestCase):
|
|||
self.assertEqual(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
self.assertIn('Upload', str(q("title")))
|
||||
self.assertFalse(session1.sessionpresentation_set.filter(document__type_id='slides'))
|
||||
self.assertFalse(session1.presentations.filter(document__type_id='slides'))
|
||||
test_file = BytesIO(b'this is not really a slide')
|
||||
test_file.name = 'not_really.txt'
|
||||
r = self.client.post(url,dict(file=test_file,title='title with bad character \U0001fabc '))
|
||||
|
@ -6341,7 +6341,7 @@ class MaterialsTests(TestCase):
|
|||
def test_remove_sessionpresentation(self):
|
||||
session = SessionFactory(meeting__type_id='ietf')
|
||||
doc = DocumentFactory(type_id='slides')
|
||||
session.sessionpresentation_set.create(document=doc)
|
||||
session.presentations.create(document=doc)
|
||||
|
||||
url = urlreverse('ietf.meeting.views.remove_sessionpresentation',kwargs={'num':session.meeting.number,'session_id':session.id,'name':'no-such-doc'})
|
||||
response = self.client.get(url)
|
||||
|
@ -6356,10 +6356,10 @@ class MaterialsTests(TestCase):
|
|||
response = self.client.get(url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
self.assertEqual(1,session.sessionpresentation_set.count())
|
||||
self.assertEqual(1,session.presentations.count())
|
||||
response = self.client.post(url,{'remove_session':''})
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertEqual(0,session.sessionpresentation_set.count())
|
||||
self.assertEqual(0,session.presentations.count())
|
||||
self.assertEqual(2,doc.docevent_set.count())
|
||||
|
||||
def test_propose_session_slides(self):
|
||||
|
@ -6448,8 +6448,8 @@ class MaterialsTests(TestCase):
|
|||
submission = SlideSubmission.objects.get(id = submission.id)
|
||||
self.assertEqual(submission.status_id, 'approved')
|
||||
self.assertIsNotNone(submission.doc)
|
||||
self.assertEqual(session.sessionpresentation_set.count(),1)
|
||||
self.assertEqual(session.sessionpresentation_set.first().document.title,'different title')
|
||||
self.assertEqual(session.presentations.count(),1)
|
||||
self.assertEqual(session.presentations.first().document.title,'different title')
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertRegex(r.content.decode(), r"These\s+slides\s+have\s+already\s+been\s+approved")
|
||||
|
@ -6471,8 +6471,8 @@ class MaterialsTests(TestCase):
|
|||
self.assertTrue(q('#id_apply_to_all'))
|
||||
r = self.client.post(url,dict(title='yet another title',approve='approve'))
|
||||
self.assertEqual(r.status_code,302)
|
||||
self.assertEqual(session1.sessionpresentation_set.count(),1)
|
||||
self.assertEqual(session2.sessionpresentation_set.count(),0)
|
||||
self.assertEqual(session1.presentations.count(),1)
|
||||
self.assertEqual(session2.presentations.count(),0)
|
||||
|
||||
def test_approve_proposed_slides_multisession_apply_all(self):
|
||||
submission = SlideSubmissionFactory(session__meeting__type_id='ietf')
|
||||
|
@ -6486,8 +6486,8 @@ class MaterialsTests(TestCase):
|
|||
self.assertEqual(r.status_code,200)
|
||||
r = self.client.post(url,dict(title='yet another title',apply_to_all=1,approve='approve'))
|
||||
self.assertEqual(r.status_code,302)
|
||||
self.assertEqual(session1.sessionpresentation_set.count(),1)
|
||||
self.assertEqual(session2.sessionpresentation_set.count(),1)
|
||||
self.assertEqual(session1.presentations.count(),1)
|
||||
self.assertEqual(session2.presentations.count(),1)
|
||||
|
||||
def test_submit_and_approve_multiple_versions(self):
|
||||
session = SessionFactory(meeting__type_id='ietf')
|
||||
|
@ -6512,7 +6512,7 @@ class MaterialsTests(TestCase):
|
|||
self.assertEqual(r.status_code,302)
|
||||
self.client.logout()
|
||||
|
||||
self.assertEqual(session.sessionpresentation_set.first().document.rev,'00')
|
||||
self.assertEqual(session.presentations.first().document.rev,'00')
|
||||
|
||||
login_testing_unauthorized(self,newperson.user.username,propose_url)
|
||||
test_file = BytesIO(b'this is not really a slide, but it is another version of it')
|
||||
|
@ -6540,9 +6540,9 @@ class MaterialsTests(TestCase):
|
|||
|
||||
self.assertEqual(SlideSubmission.objects.filter(status__slug = 'pending').count(),0)
|
||||
self.assertEqual(SlideSubmission.objects.filter(status__slug = 'rejected').count(),1)
|
||||
self.assertEqual(session.sessionpresentation_set.first().document.rev,'01')
|
||||
self.assertEqual(session.presentations.first().document.rev,'01')
|
||||
path = os.path.join(submission.session.meeting.get_materials_path(),'slides')
|
||||
filename = os.path.join(path,session.sessionpresentation_set.first().document.name+'-01.txt')
|
||||
filename = os.path.join(path,session.presentations.first().document.name+'-01.txt')
|
||||
self.assertTrue(os.path.exists(filename))
|
||||
fd = io.open(filename, 'r')
|
||||
contents = fd.read()
|
||||
|
@ -6659,7 +6659,7 @@ class ImportNotesTests(TestCase):
|
|||
self.client.login(username='secretary', password='secretary+password')
|
||||
r = self.client.post(url, {'markdown_text': 'replaced below'}) # create a rev
|
||||
with open(
|
||||
self.session.sessionpresentation_set.filter(document__type="minutes").first().document.get_file_name(),
|
||||
self.session.presentations.filter(document__type="minutes").first().document.get_file_name(),
|
||||
'wb'
|
||||
) as f:
|
||||
# Replace existing content with an invalid Unicode byte string. The particular invalid
|
||||
|
@ -6684,7 +6684,7 @@ class ImportNotesTests(TestCase):
|
|||
self.client.login(username='secretary', password='secretary+password')
|
||||
r = self.client.post(url, {'markdown_text': 'original markdown text'}) # create a rev
|
||||
# remove the file uploaded for the first rev
|
||||
minutes_docs = self.session.sessionpresentation_set.filter(document__type='minutes')
|
||||
minutes_docs = self.session.presentations.filter(document__type='minutes')
|
||||
self.assertEqual(minutes_docs.count(), 1)
|
||||
Path(minutes_docs.first().document.get_file_name()).unlink()
|
||||
|
||||
|
@ -7819,7 +7819,7 @@ class ProceedingsTests(BaseMeetingTestCase):
|
|||
named_row = named_label.closest('tr')
|
||||
self.assertTrue(named_row)
|
||||
|
||||
for material in (sp.document for sp in plain_session.sessionpresentation_set.all()):
|
||||
for material in (sp.document for sp in plain_session.presentations.all()):
|
||||
if material.type_id == 'draft':
|
||||
expected_url = urlreverse(
|
||||
'ietf.doc.views_doc.document_main',
|
||||
|
@ -7830,7 +7830,7 @@ class ProceedingsTests(BaseMeetingTestCase):
|
|||
self.assertTrue(plain_row.find(f'a[href="{expected_url}"]'))
|
||||
self.assertFalse(named_row.find(f'a[href="{expected_url}"]'))
|
||||
|
||||
for material in (sp.document for sp in named_session.sessionpresentation_set.all()):
|
||||
for material in (sp.document for sp in named_session.presentations.all()):
|
||||
if material.type_id == 'draft':
|
||||
expected_url = urlreverse(
|
||||
'ietf.doc.views_doc.document_main',
|
||||
|
|
|
@ -32,7 +32,10 @@ from ietf.utils.timezone import date_today
|
|||
|
||||
|
||||
def session_time_for_sorting(session, use_meeting_date):
|
||||
official_timeslot = TimeSlot.objects.filter(sessionassignments__session=session, sessionassignments__schedule__in=[session.meeting.schedule, session.meeting.schedule.base if session.meeting.schedule else None]).first()
|
||||
if hasattr(session, "_otsa"):
|
||||
official_timeslot=session._otsa.timeslot
|
||||
else:
|
||||
official_timeslot = TimeSlot.objects.filter(sessionassignments__session=session, sessionassignments__schedule__in=[session.meeting.schedule, session.meeting.schedule.base if session.meeting.schedule else None]).first()
|
||||
if official_timeslot:
|
||||
return official_timeslot.time
|
||||
elif use_meeting_date and session.meeting.date:
|
||||
|
@ -75,13 +78,14 @@ def group_sessions(sessions):
|
|||
in_progress = []
|
||||
recent = []
|
||||
past = []
|
||||
|
||||
for s in sessions:
|
||||
today = date_today(s.meeting.tz())
|
||||
if s.meeting.date > today:
|
||||
future.append(s)
|
||||
elif s.meeting.end_date() >= today:
|
||||
in_progress.append(s)
|
||||
elif not s.is_material_submission_cutoff():
|
||||
elif not getattr(s, "cached_is_cutoff", lambda: s.is_material_submission_cutoff):
|
||||
recent.append(s)
|
||||
else:
|
||||
past.append(s)
|
||||
|
@ -91,6 +95,7 @@ def group_sessions(sessions):
|
|||
recent.reverse()
|
||||
past.reverse()
|
||||
|
||||
|
||||
return future, in_progress, recent, past
|
||||
|
||||
def get_upcoming_manageable_sessions(user):
|
||||
|
@ -148,7 +153,7 @@ def finalize(meeting):
|
|||
)
|
||||
).astimezone(pytz.utc) + datetime.timedelta(days=1)
|
||||
for session in meeting.session_set.all():
|
||||
for sp in session.sessionpresentation_set.filter(document__type='draft',rev=None):
|
||||
for sp in session.presentations.filter(document__type='draft',rev=None):
|
||||
rev_before_end = [e for e in sp.document.docevent_set.filter(newrevisiondocevent__isnull=False).order_by('-time') if e.time <= end_time ]
|
||||
if rev_before_end:
|
||||
sp.rev = rev_before_end[-1].newrevisiondocevent.rev
|
||||
|
@ -180,7 +185,7 @@ def sort_accept_tuple(accept):
|
|||
return tup
|
||||
|
||||
def condition_slide_order(session):
|
||||
qs = session.sessionpresentation_set.filter(document__type_id='slides').order_by('order')
|
||||
qs = session.presentations.filter(document__type_id='slides').order_by('order')
|
||||
order_list = qs.values_list('order',flat=True)
|
||||
if list(order_list) != list(range(1,qs.count()+1)):
|
||||
for num, sp in enumerate(qs, start=1):
|
||||
|
@ -563,7 +568,7 @@ def save_session_minutes_revision(session, file, ext, request, encoding=None, ap
|
|||
Returns (Document, [DocEvents]), which should be passed to doc.save_with_history()
|
||||
if the file contents are stored successfully.
|
||||
"""
|
||||
minutes_sp = session.sessionpresentation_set.filter(document__type='minutes').first()
|
||||
minutes_sp = session.presentations.filter(document__type='minutes').first()
|
||||
if minutes_sp:
|
||||
doc = minutes_sp.document
|
||||
doc.rev = '%02d' % (int(doc.rev)+1)
|
||||
|
@ -597,17 +602,17 @@ def save_session_minutes_revision(session, file, ext, request, encoding=None, ap
|
|||
rev = '00',
|
||||
)
|
||||
doc.states.add(State.objects.get(type_id='minutes',slug='active'))
|
||||
if session.sessionpresentation_set.filter(document=doc).exists():
|
||||
sp = session.sessionpresentation_set.get(document=doc)
|
||||
if session.presentations.filter(document=doc).exists():
|
||||
sp = session.presentations.get(document=doc)
|
||||
sp.rev = doc.rev
|
||||
sp.save()
|
||||
else:
|
||||
session.sessionpresentation_set.create(document=doc,rev=doc.rev)
|
||||
session.presentations.create(document=doc,rev=doc.rev)
|
||||
if apply_to_all:
|
||||
for other_session in get_meeting_sessions(session.meeting.number, session.group.acronym):
|
||||
if other_session != session:
|
||||
other_session.sessionpresentation_set.filter(document__type='minutes').delete()
|
||||
other_session.sessionpresentation_set.create(document=doc,rev=doc.rev)
|
||||
other_session.presentations.filter(document__type='minutes').delete()
|
||||
other_session.presentations.create(document=doc,rev=doc.rev)
|
||||
filename = f'{doc.name}-{doc.rev}{ext}'
|
||||
doc.uploaded_filename = filename
|
||||
e = NewRevisionDocEvent.objects.create(
|
||||
|
@ -719,7 +724,7 @@ def new_doc_for_session(type_id, session):
|
|||
rev = '00',
|
||||
)
|
||||
doc.states.add(State.objects.get(type_id=type_id, slug='active'))
|
||||
session.sessionpresentation_set.create(document=doc,rev='00')
|
||||
session.presentations.create(document=doc,rev='00')
|
||||
return doc
|
||||
|
||||
def write_doc_for_session(session, type_id, filename, contents):
|
||||
|
@ -760,7 +765,7 @@ def create_recording(session, url, title=None, user=None):
|
|||
desc='New revision available',
|
||||
time=doc.time)
|
||||
pres = SessionPresentation.objects.create(session=session,document=doc,rev=doc.rev)
|
||||
session.sessionpresentation_set.add(pres)
|
||||
session.presentations.add(pres)
|
||||
|
||||
return doc
|
||||
|
||||
|
|
|
@ -2157,7 +2157,7 @@ def agenda_json(request, num=None):
|
|||
# time of the meeting
|
||||
assignments = preprocess_assignments_for_agenda(assignments, meeting, extra_prefetches=[
|
||||
"session__materials__docevent_set",
|
||||
"session__sessionpresentation_set",
|
||||
"session__presentations",
|
||||
"timeslot__meeting"
|
||||
])
|
||||
for asgn in assignments:
|
||||
|
@ -2427,12 +2427,12 @@ def session_details(request, num, acronym):
|
|||
session.cancelled = session.current_status in Session.CANCELED_STATUSES
|
||||
session.status = status_names.get(session.current_status, session.current_status)
|
||||
|
||||
session.filtered_artifacts = list(session.sessionpresentation_set.filter(document__type__slug__in=['agenda','minutes','bluesheets']))
|
||||
session.filtered_artifacts.sort(key=lambda d:['agenda','minutes','bluesheets'].index(d.document.type.slug))
|
||||
session.filtered_slides = session.sessionpresentation_set.filter(document__type__slug='slides').order_by('order')
|
||||
session.filtered_drafts = session.sessionpresentation_set.filter(document__type__slug='draft')
|
||||
session.filtered_chatlog_and_polls = session.sessionpresentation_set.filter(document__type__slug__in=('chatlog', 'polls')).order_by('document__type__slug')
|
||||
# TODO FIXME Deleted materials shouldn't be in the sessionpresentation_set
|
||||
session.filtered_artifacts = list(session.presentations.filter(document__type__slug__in=['agenda','minutes','narrativeminutes', 'bluesheets']))
|
||||
session.filtered_artifacts.sort(key=lambda d:['agenda','minutes', 'narrativeminutes', 'bluesheets'].index(d.document.type.slug))
|
||||
session.filtered_slides = session.presentations.filter(document__type__slug='slides').order_by('order')
|
||||
session.filtered_drafts = session.presentations.filter(document__type__slug='draft')
|
||||
session.filtered_chatlog_and_polls = session.presentations.filter(document__type__slug__in=('chatlog', 'polls')).order_by('document__type__slug')
|
||||
# TODO FIXME Deleted materials shouldn't be in the presentations
|
||||
for qs in [session.filtered_artifacts,session.filtered_slides,session.filtered_drafts]:
|
||||
qs = [p for p in qs if p.document.get_state_slug(p.document.type_id)!='deleted']
|
||||
session.type_counter.update([p.document.type.slug for p in qs])
|
||||
|
@ -2490,7 +2490,7 @@ def add_session_drafts(request, session_id, num):
|
|||
if session.is_material_submission_cutoff() and not has_role(request.user, "Secretariat"):
|
||||
raise Http404
|
||||
|
||||
already_linked = [sp.document for sp in session.sessionpresentation_set.filter(document__type_id='draft')]
|
||||
already_linked = [sp.document for sp in session.presentations.filter(document__type_id='draft')]
|
||||
|
||||
session_number = None
|
||||
sessions = get_sessions(session.meeting.number,session.group.acronym)
|
||||
|
@ -2501,7 +2501,7 @@ def add_session_drafts(request, session_id, num):
|
|||
form = SessionDraftsForm(request.POST,already_linked=already_linked)
|
||||
if form.is_valid():
|
||||
for draft in form.cleaned_data['drafts']:
|
||||
session.sessionpresentation_set.create(document=draft,rev=None)
|
||||
session.presentations.create(document=draft,rev=None)
|
||||
c = DocEvent(type="added_comment", doc=draft, rev=draft.rev, by=request.user.person)
|
||||
c.desc = "Added to session: %s" % session
|
||||
c.save()
|
||||
|
@ -2512,7 +2512,7 @@ def add_session_drafts(request, session_id, num):
|
|||
return render(request, "meeting/add_session_drafts.html",
|
||||
{ 'session': session,
|
||||
'session_number': session_number,
|
||||
'already_linked': session.sessionpresentation_set.filter(document__type_id='draft'),
|
||||
'already_linked': session.presentations.filter(document__type_id='draft'),
|
||||
'form': form,
|
||||
})
|
||||
|
||||
|
@ -2554,7 +2554,7 @@ def upload_session_bluesheets(request, session_id, num):
|
|||
else:
|
||||
form = UploadBlueSheetForm()
|
||||
|
||||
bluesheet_sp = session.sessionpresentation_set.filter(document__type='bluesheets').first()
|
||||
bluesheet_sp = session.presentations.filter(document__type='bluesheets').first()
|
||||
|
||||
return render(request, "meeting/upload_session_bluesheets.html",
|
||||
{'session': session,
|
||||
|
@ -2565,7 +2565,7 @@ def upload_session_bluesheets(request, session_id, num):
|
|||
|
||||
|
||||
def save_bluesheet(request, session, file, encoding='utf-8'):
|
||||
bluesheet_sp = session.sessionpresentation_set.filter(document__type='bluesheets').first()
|
||||
bluesheet_sp = session.presentations.filter(document__type='bluesheets').first()
|
||||
_, ext = os.path.splitext(file.name)
|
||||
|
||||
if bluesheet_sp:
|
||||
|
@ -2595,7 +2595,7 @@ def save_bluesheet(request, session, file, encoding='utf-8'):
|
|||
rev = '00',
|
||||
)
|
||||
doc.states.add(State.objects.get(type_id='bluesheets',slug='active'))
|
||||
session.sessionpresentation_set.create(document=doc,rev='00')
|
||||
session.presentations.create(document=doc,rev='00')
|
||||
filename = '%s-%s%s'% ( doc.name, doc.rev, ext)
|
||||
doc.uploaded_filename = filename
|
||||
e = NewRevisionDocEvent.objects.create(doc=doc, rev=doc.rev, by=request.user.person, type='new_revision', desc='New revision available: %s'%doc.rev)
|
||||
|
@ -2620,7 +2620,7 @@ def upload_session_minutes(request, session_id, num):
|
|||
if len(sessions) > 1:
|
||||
session_number = 1 + sessions.index(session)
|
||||
|
||||
minutes_sp = session.sessionpresentation_set.filter(document__type='minutes').first()
|
||||
minutes_sp = session.presentations.filter(document__type='minutes').first()
|
||||
|
||||
if request.method == 'POST':
|
||||
form = UploadMinutesForm(show_apply_to_all_checkbox,request.POST,request.FILES)
|
||||
|
@ -2712,7 +2712,7 @@ def upload_session_agenda(request, session_id, num):
|
|||
if len(sessions) > 1:
|
||||
session_number = 1 + sessions.index(session)
|
||||
|
||||
agenda_sp = session.sessionpresentation_set.filter(document__type='agenda').first()
|
||||
agenda_sp = session.presentations.filter(document__type='agenda').first()
|
||||
|
||||
if request.method == 'POST':
|
||||
form = UploadOrEnterAgendaForm(show_apply_to_all_checkbox,request.POST,request.FILES)
|
||||
|
@ -2771,17 +2771,17 @@ def upload_session_agenda(request, session_id, num):
|
|||
rev = '00',
|
||||
)
|
||||
doc.states.add(State.objects.get(type_id='agenda',slug='active'))
|
||||
if session.sessionpresentation_set.filter(document=doc).exists():
|
||||
sp = session.sessionpresentation_set.get(document=doc)
|
||||
if session.presentations.filter(document=doc).exists():
|
||||
sp = session.presentations.get(document=doc)
|
||||
sp.rev = doc.rev
|
||||
sp.save()
|
||||
else:
|
||||
session.sessionpresentation_set.create(document=doc,rev=doc.rev)
|
||||
session.presentations.create(document=doc,rev=doc.rev)
|
||||
if apply_to_all:
|
||||
for other_session in sessions:
|
||||
if other_session != session:
|
||||
other_session.sessionpresentation_set.filter(document__type='agenda').delete()
|
||||
other_session.sessionpresentation_set.create(document=doc,rev=doc.rev)
|
||||
other_session.presentations.filter(document__type='agenda').delete()
|
||||
other_session.presentations.create(document=doc,rev=doc.rev)
|
||||
filename = '%s-%s%s'% ( doc.name, doc.rev, ext)
|
||||
doc.uploaded_filename = filename
|
||||
e = NewRevisionDocEvent.objects.create(doc=doc,by=request.user.person,type='new_revision',desc='New revision available: %s'%doc.rev,rev=doc.rev)
|
||||
|
@ -2832,7 +2832,7 @@ def upload_session_slides(request, session_id, num, name=None):
|
|||
slides = Document.objects.filter(name=name).first()
|
||||
if not (slides and slides.type_id=='slides'):
|
||||
raise Http404
|
||||
slides_sp = session.sessionpresentation_set.filter(document=slides).first()
|
||||
slides_sp = session.presentations.filter(document=slides).first()
|
||||
|
||||
if request.method == 'POST':
|
||||
form = UploadSlidesForm(session, show_apply_to_all_checkbox,request.POST,request.FILES)
|
||||
|
@ -2872,18 +2872,18 @@ def upload_session_slides(request, session_id, num, name=None):
|
|||
)
|
||||
doc.states.add(State.objects.get(type_id='slides',slug='active'))
|
||||
doc.states.add(State.objects.get(type_id='reuse_policy',slug='single'))
|
||||
if session.sessionpresentation_set.filter(document=doc).exists():
|
||||
sp = session.sessionpresentation_set.get(document=doc)
|
||||
if session.presentations.filter(document=doc).exists():
|
||||
sp = session.presentations.get(document=doc)
|
||||
sp.rev = doc.rev
|
||||
sp.save()
|
||||
else:
|
||||
max_order = session.sessionpresentation_set.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0
|
||||
session.sessionpresentation_set.create(document=doc,rev=doc.rev,order=max_order+1)
|
||||
max_order = session.presentations.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0
|
||||
session.presentations.create(document=doc,rev=doc.rev,order=max_order+1)
|
||||
if apply_to_all:
|
||||
for other_session in sessions:
|
||||
if other_session != session and not other_session.sessionpresentation_set.filter(document=doc).exists():
|
||||
max_order = other_session.sessionpresentation_set.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0
|
||||
other_session.sessionpresentation_set.create(document=doc,rev=doc.rev,order=max_order+1)
|
||||
if other_session != session and not other_session.presentations.filter(document=doc).exists():
|
||||
max_order = other_session.presentations.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0
|
||||
other_session.presentations.create(document=doc,rev=doc.rev,order=max_order+1)
|
||||
filename = '%s-%s%s'% ( doc.name, doc.rev, ext)
|
||||
doc.uploaded_filename = filename
|
||||
e = NewRevisionDocEvent.objects.create(doc=doc,by=request.user.person,type='new_revision',desc='New revision available: %s'%doc.rev,rev=doc.rev)
|
||||
|
@ -2983,7 +2983,7 @@ def remove_sessionpresentation(request, session_id, num, name):
|
|||
if session.is_material_submission_cutoff() and not has_role(request.user, "Secretariat"):
|
||||
permission_denied(request, "The materials cutoff for this session has passed. Contact the secretariat for further action.")
|
||||
if request.method == 'POST':
|
||||
session.sessionpresentation_set.filter(pk=sp.pk).delete()
|
||||
session.presentations.filter(pk=sp.pk).delete()
|
||||
c = DocEvent(type="added_comment", doc=sp.document, rev=sp.document.rev, by=request.user.person)
|
||||
c.desc = "Removed from session: %s" % (session)
|
||||
c.save()
|
||||
|
@ -3008,7 +3008,7 @@ def ajax_add_slides_to_session(request, session_id, num):
|
|||
order = int(order_str)
|
||||
except (ValueError, TypeError):
|
||||
return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied order is not valid' }),content_type='application/json')
|
||||
if order < 1 or order > session.sessionpresentation_set.filter(document__type_id='slides').count() + 1 :
|
||||
if order < 1 or order > session.presentations.filter(document__type_id='slides').count() + 1 :
|
||||
return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied order is not valid' }),content_type='application/json')
|
||||
|
||||
name = request.POST.get('name', None)
|
||||
|
@ -3016,10 +3016,10 @@ def ajax_add_slides_to_session(request, session_id, num):
|
|||
if not doc:
|
||||
return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied name is not valid' }),content_type='application/json')
|
||||
|
||||
if not session.sessionpresentation_set.filter(document=doc).exists():
|
||||
if not session.presentations.filter(document=doc).exists():
|
||||
condition_slide_order(session)
|
||||
session.sessionpresentation_set.filter(document__type_id='slides', order__gte=order).update(order=F('order')+1)
|
||||
session.sessionpresentation_set.create(document=doc,rev=doc.rev,order=order)
|
||||
session.presentations.filter(document__type_id='slides', order__gte=order).update(order=F('order')+1)
|
||||
session.presentations.create(document=doc,rev=doc.rev,order=order)
|
||||
DocEvent.objects.create(type="added_comment", doc=doc, rev=doc.rev, by=request.user.person, desc="Added to session: %s" % session)
|
||||
|
||||
return HttpResponse(json.dumps({'success':True}), content_type='application/json')
|
||||
|
@ -3041,7 +3041,7 @@ def ajax_remove_slides_from_session(request, session_id, num):
|
|||
oldIndex = int(oldIndex_str)
|
||||
except (ValueError, TypeError):
|
||||
return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied index is not valid' }),content_type='application/json')
|
||||
if oldIndex < 1 or oldIndex > session.sessionpresentation_set.filter(document__type_id='slides').count() :
|
||||
if oldIndex < 1 or oldIndex > session.presentations.filter(document__type_id='slides').count() :
|
||||
return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied index is not valid' }),content_type='application/json')
|
||||
|
||||
name = request.POST.get('name', None)
|
||||
|
@ -3050,11 +3050,11 @@ def ajax_remove_slides_from_session(request, session_id, num):
|
|||
return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied name is not valid' }),content_type='application/json')
|
||||
|
||||
condition_slide_order(session)
|
||||
affected_presentations = session.sessionpresentation_set.filter(document=doc).first()
|
||||
affected_presentations = session.presentations.filter(document=doc).first()
|
||||
if affected_presentations:
|
||||
if affected_presentations.order == oldIndex:
|
||||
affected_presentations.delete()
|
||||
session.sessionpresentation_set.filter(document__type_id='slides', order__gt=oldIndex).update(order=F('order')-1)
|
||||
session.presentations.filter(document__type_id='slides', order__gt=oldIndex).update(order=F('order')-1)
|
||||
DocEvent.objects.create(type="added_comment", doc=doc, rev=doc.rev, by=request.user.person, desc="Removed from session: %s" % session)
|
||||
return HttpResponse(json.dumps({'success':True}), content_type='application/json')
|
||||
else:
|
||||
|
@ -3074,7 +3074,7 @@ def ajax_reorder_slides_in_session(request, session_id, num):
|
|||
if request.method != 'POST' or not request.POST:
|
||||
return HttpResponse(json.dumps({ 'success' : False, 'error' : 'No data submitted or not POST' }),content_type='application/json')
|
||||
|
||||
num_slides_in_session = session.sessionpresentation_set.filter(document__type_id='slides').count()
|
||||
num_slides_in_session = session.presentations.filter(document__type_id='slides').count()
|
||||
oldIndex_str = request.POST.get('oldIndex', None)
|
||||
try:
|
||||
oldIndex = int(oldIndex_str)
|
||||
|
@ -3095,11 +3095,11 @@ def ajax_reorder_slides_in_session(request, session_id, num):
|
|||
return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied index is not valid' }),content_type='application/json')
|
||||
|
||||
condition_slide_order(session)
|
||||
sp = session.sessionpresentation_set.get(order=oldIndex)
|
||||
sp = session.presentations.get(order=oldIndex)
|
||||
if oldIndex < newIndex:
|
||||
session.sessionpresentation_set.filter(order__gt=oldIndex, order__lte=newIndex).update(order=F('order')-1)
|
||||
session.presentations.filter(order__gt=oldIndex, order__lte=newIndex).update(order=F('order')-1)
|
||||
else:
|
||||
session.sessionpresentation_set.filter(order__gte=newIndex, order__lt=oldIndex).update(order=F('order')+1)
|
||||
session.presentations.filter(order__gte=newIndex, order__lt=oldIndex).update(order=F('order')+1)
|
||||
sp.order = newIndex
|
||||
sp.save()
|
||||
|
||||
|
@ -3749,7 +3749,7 @@ def organize_proceedings_sessions(sessions):
|
|||
if s.current_status != 'canceled':
|
||||
all_canceled = False
|
||||
by_name.setdefault(s.name, [])
|
||||
if s.current_status != 'notmeet' or s.sessionpresentation_set.exists():
|
||||
if s.current_status != 'notmeet' or s.presentations.exists():
|
||||
by_name[s.name].append(s) # for notmeet, only include sessions with materials
|
||||
for sess_name, ss in by_name.items():
|
||||
session = ss[0] if ss else None
|
||||
|
@ -3781,7 +3781,7 @@ def organize_proceedings_sessions(sessions):
|
|||
'name': sess_name,
|
||||
'session': session,
|
||||
'canceled': all_canceled,
|
||||
'has_materials': s.sessionpresentation_set.exists(),
|
||||
'has_materials': s.presentations.exists(),
|
||||
'agendas': _format_materials((s, s.agenda()) for s in ss),
|
||||
'minutes': _format_materials((s, s.minutes()) for s in ss),
|
||||
'bluesheets': _format_materials((s, s.bluesheets()) for s in ss),
|
||||
|
@ -4149,7 +4149,7 @@ def api_upload_chatlog(request):
|
|||
session = Session.objects.filter(pk=session_id).first()
|
||||
if not session:
|
||||
return err(400, "Invalid session")
|
||||
chatlog_sp = session.sessionpresentation_set.filter(document__type='chatlog').first()
|
||||
chatlog_sp = session.presentations.filter(document__type='chatlog').first()
|
||||
if chatlog_sp:
|
||||
doc = chatlog_sp.document
|
||||
doc.rev = f"{(int(doc.rev)+1):02d}"
|
||||
|
@ -4189,7 +4189,7 @@ def api_upload_polls(request):
|
|||
session = Session.objects.filter(pk=session_id).first()
|
||||
if not session:
|
||||
return err(400, "Invalid session")
|
||||
polls_sp = session.sessionpresentation_set.filter(document__type='polls').first()
|
||||
polls_sp = session.presentations.filter(document__type='polls').first()
|
||||
if polls_sp:
|
||||
doc = polls_sp.document
|
||||
doc.rev = f"{(int(doc.rev)+1):02d}"
|
||||
|
@ -4606,18 +4606,18 @@ def approve_proposed_slides(request, slidesubmission_id, num):
|
|||
)
|
||||
doc.states.add(State.objects.get(type_id='slides',slug='active'))
|
||||
doc.states.add(State.objects.get(type_id='reuse_policy',slug='single'))
|
||||
if submission.session.sessionpresentation_set.filter(document=doc).exists():
|
||||
sp = submission.session.sessionpresentation_set.get(document=doc)
|
||||
if submission.session.presentations.filter(document=doc).exists():
|
||||
sp = submission.session.presentations.get(document=doc)
|
||||
sp.rev = doc.rev
|
||||
sp.save()
|
||||
else:
|
||||
max_order = submission.session.sessionpresentation_set.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0
|
||||
submission.session.sessionpresentation_set.create(document=doc,rev=doc.rev,order=max_order+1)
|
||||
max_order = submission.session.presentations.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0
|
||||
submission.session.presentations.create(document=doc,rev=doc.rev,order=max_order+1)
|
||||
if apply_to_all:
|
||||
for other_session in sessions:
|
||||
if other_session != submission.session and not other_session.sessionpresentation_set.filter(document=doc).exists():
|
||||
max_order = other_session.sessionpresentation_set.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0
|
||||
other_session.sessionpresentation_set.create(document=doc,rev=doc.rev,order=max_order+1)
|
||||
if other_session != submission.session and not other_session.presentations.filter(document=doc).exists():
|
||||
max_order = other_session.presentations.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0
|
||||
other_session.presentations.create(document=doc,rev=doc.rev,order=max_order+1)
|
||||
sub_name, sub_ext = os.path.splitext(submission.filename)
|
||||
target_filename = '%s-%s%s' % (sub_name[:sub_name.rfind('-ss')],doc.rev,sub_ext)
|
||||
doc.uploaded_filename = target_filename
|
||||
|
|
35
ietf/name/migrations/0013_narrativeminutes.py
Normal file
35
ietf/name/migrations/0013_narrativeminutes.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
# Copyright The IETF Trust 2023, All Rights Reserved
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def forward(apps, schema_editor):
|
||||
DocTypeName = apps.get_model("name", "DocTypeName")
|
||||
DocTypeName.objects.create(
|
||||
slug="narrativeminutes",
|
||||
name="Narrative Minutes",
|
||||
desc="",
|
||||
used=True,
|
||||
order=0,
|
||||
prefix="narrative-minutes",
|
||||
)
|
||||
|
||||
|
||||
def reverse(apps, schema_editor):
|
||||
DocTypeName = apps.get_model("name", "DocTypeName")
|
||||
DocTypeName.objects.filter(slug="narrativeminutes").delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("name", "0012_adjust_important_dates"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="doctypename",
|
||||
name="prefix",
|
||||
field=models.CharField(default="", max_length=32),
|
||||
),
|
||||
migrations.RunPython(forward, reverse),
|
||||
]
|
|
@ -43,7 +43,7 @@ class DocRelationshipName(NameModel):
|
|||
class DocTypeName(NameModel):
|
||||
"""Draft, Agenda, Minutes, Charter, Discuss, Guideline, Email,
|
||||
Review, Issue, Wiki, RFC"""
|
||||
prefix = models.CharField(max_length=16, default="")
|
||||
prefix = models.CharField(max_length=32, default="")
|
||||
class DocTagName(NameModel):
|
||||
"""Waiting for Reference, IANA Coordination, Revised ID Needed,
|
||||
External Party, AD Followup, Point Raised - Writeup Needed, ..."""
|
||||
|
|
|
@ -677,7 +677,6 @@ STATUS_CHANGE_PATH = '/a/ietfdata/doc/status-change'
|
|||
AGENDA_PATH = '/a/www/www6s/proceedings/'
|
||||
MEETINGHOST_LOGO_PATH = AGENDA_PATH # put these in the same place as other proceedings files
|
||||
IPR_DOCUMENT_PATH = '/a/www/ietf-ftp/ietf/IPR/'
|
||||
IESG_WG_EVALUATION_DIR = "/a/www/www6/iesg/evaluation"
|
||||
# Move drafts to this directory when they expire
|
||||
INTERNET_DRAFT_ARCHIVE_DIR = '/a/ietfdata/doc/draft/collection/draft-archive/'
|
||||
# The following directory contains linked copies of all drafts, but don't
|
||||
|
@ -856,6 +855,7 @@ MEETING_MATERIALS_SERVE_LOCALLY = True
|
|||
MEETING_DOC_LOCAL_HREFS = {
|
||||
"agenda": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}",
|
||||
"minutes": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}",
|
||||
"narrativeminutes": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}",
|
||||
"slides": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}",
|
||||
"chatlog": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}",
|
||||
"polls": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}",
|
||||
|
@ -867,6 +867,7 @@ MEETING_DOC_LOCAL_HREFS = {
|
|||
MEETING_DOC_CDN_HREFS = {
|
||||
"agenda": "https://www.ietf.org/proceedings/{meeting.number}/agenda/{doc.name}-{doc.rev}",
|
||||
"minutes": "https://www.ietf.org/proceedings/{meeting.number}/minutes/{doc.name}-{doc.rev}",
|
||||
"narrativeminutes": "https://www.ietf.org/proceedings/{meeting.number}/narrative-minutes/{doc.name}-{doc.rev}",
|
||||
"slides": "https://www.ietf.org/proceedings/{meeting.number}/slides/{doc.name}-{doc.rev}",
|
||||
"recording": "{doc.external_url}",
|
||||
"bluesheets": "https://www.ietf.org/proceedings/{meeting.number}/bluesheets/{doc.uploaded_filename}",
|
||||
|
@ -878,6 +879,7 @@ MEETING_DOC_HREFS = MEETING_DOC_LOCAL_HREFS if MEETING_MATERIALS_SERVE_LOCALLY e
|
|||
MEETING_DOC_OLD_HREFS = {
|
||||
"agenda": "/meeting/{meeting.number}/materials/{doc.name}",
|
||||
"minutes": "/meeting/{meeting.number}/materials/{doc.name}",
|
||||
"narrativeminutes" : "/meeting/{meeting.number}/materials/{doc.name}",
|
||||
"slides": "/meeting/{meeting.number}/materials/{doc.name}",
|
||||
"recording": "{doc.external_url}",
|
||||
"bluesheets": "https://www.ietf.org/proceedings/{meeting.number}/bluesheets/{doc.uploaded_filename}",
|
||||
|
@ -887,6 +889,7 @@ MEETING_DOC_OLD_HREFS = {
|
|||
MEETING_DOC_GREFS = {
|
||||
"agenda": "/meeting/{meeting.number}/materials/{doc.name}",
|
||||
"minutes": "/meeting/{meeting.number}/materials/{doc.name}",
|
||||
"narrativeminutes": "/meeting/{meeting.number}/materials/{doc.name}",
|
||||
"slides": "/meeting/{meeting.number}/materials/{doc.name}",
|
||||
"recording": "{doc.external_url}",
|
||||
"bluesheets": "https://www.ietf.org/proceedings/{meeting.number}/bluesheets/{doc.uploaded_filename}",
|
||||
|
@ -900,6 +903,7 @@ MEETING_MATERIALS_DEFAULT_SUBMISSION_CORRECTION_DAYS = 50
|
|||
MEETING_VALID_UPLOAD_EXTENSIONS = {
|
||||
'agenda': ['.txt','.html','.htm', '.md', ],
|
||||
'minutes': ['.txt','.html','.htm', '.md', '.pdf', ],
|
||||
'narrativeminutes': ['.txt','.html','.htm', '.md', '.pdf', ],
|
||||
'slides': ['.doc','.docx','.pdf','.ppt','.pptx','.txt', ], # Note the removal of .zip
|
||||
'bluesheets': ['.pdf', '.txt', ],
|
||||
'procmaterials':['.pdf', ],
|
||||
|
@ -909,6 +913,7 @@ MEETING_VALID_UPLOAD_EXTENSIONS = {
|
|||
MEETING_VALID_UPLOAD_MIME_TYPES = {
|
||||
'agenda': ['text/plain', 'text/html', 'text/markdown', 'text/x-markdown', ],
|
||||
'minutes': ['text/plain', 'text/html', 'application/pdf', 'text/markdown', 'text/x-markdown', ],
|
||||
'narrative-minutes': ['text/plain', 'text/html', 'application/pdf', 'text/markdown', 'text/x-markdown', ],
|
||||
'slides': [],
|
||||
'bluesheets': ['application/pdf', 'text/plain', ],
|
||||
'procmaterials':['application/pdf', ],
|
||||
|
|
|
@ -52,7 +52,7 @@
|
|||
<td class="edit"></td>
|
||||
<td id="statement-state">
|
||||
{% if doc.get_state %}
|
||||
<span title="{{ doc.get_state.desc }}" class="{% if doc.get_state.name|slugify == 'active' %}text-success{% else %}text-danger{% endif %}">{{ doc.get_state.name }}</span>
|
||||
<span title="{{ doc.get_state.desc }}" class="badge rounded-pill {% if doc.get_state.name|slugify == 'active' %}text-bg-success{% else %}text-bg-warning{% endif %}">{{ doc.get_state.name }}</span>
|
||||
{% else %}
|
||||
No document state
|
||||
{% endif %}
|
||||
|
|
|
@ -60,6 +60,12 @@
|
|||
{% if s.minutes %}href="{{ s.minutes.get_absolute_url }}"{% endif %}>
|
||||
Minutes
|
||||
</a>
|
||||
{% if group.acronym == "iesg" %}
|
||||
<a class="btn btn-sm {% if not s.narrative_minutes %}btn-secondary disabled{% else %}btn-primary{% endif %}"
|
||||
{% if s.narrative_minutes %}href="{{ s.narrative_minutes.get_absolute_url }}"{% endif %}>
|
||||
Narrative Minutes
|
||||
</a>
|
||||
{% endif %}
|
||||
<a class="btn btn-primary btn-sm"
|
||||
href="{% url 'ietf.meeting.views.session_details' num=s.meeting.number acronym=s.group.acronym %}">
|
||||
{% if can_always_edit or can_edit_materials %}
|
||||
|
|
|
@ -85,7 +85,7 @@
|
|||
</table>
|
||||
{% endif %}
|
||||
{# The following is a temporary performance workaround, not long term design #}
|
||||
{% if group.acronym != "iab" %}
|
||||
{% if group.acronym != "iab" and group.acronym != "iesg" %}
|
||||
<p class="alert alert-info my-3">
|
||||
This page shows meetings within the last four years. For earlier meetings, please see the
|
||||
<a href="https://www.ietf.org/how/meetings/past/">proceedings</a>.
|
||||
|
@ -139,6 +139,12 @@
|
|||
{% if s.minutes %}href="{{ s.minutes.get_absolute_url }}"{% endif %}>
|
||||
Minutes
|
||||
</a>
|
||||
{% if group.acronym == "iesg" %}
|
||||
<a class="btn btn-sm {% if not s.narrative_minutes %}btn-secondary disabled{% else %}btn-primary{% endif %}"
|
||||
{% if s.narrative_minutes %}href="{{ s.narrative_minutes.get_absolute_url }}"{% endif %}>
|
||||
Narrative Minutes
|
||||
</a>
|
||||
{% endif %}
|
||||
<a class="btn btn-primary btn-sm"
|
||||
href="{% url 'ietf.meeting.views.session_details' num=s.meeting.number acronym=s.group.acronym %}">
|
||||
{% if can_always_edit or can_edit_materials %}
|
||||
|
|
|
@ -29,7 +29,9 @@
|
|||
{% for statement in statements %}
|
||||
<tr>
|
||||
<td title="{{ statement.published|date:'Y-m-d H:i:s O' }}">{{ statement.published|date:"Y-m-d" }}</td>
|
||||
<td><a href="{% url 'ietf.doc.views_doc.document_main' name=statement.name %}">{{statement.title}}</a></td>
|
||||
<td><a href="{% url 'ietf.doc.views_doc.document_main' name=statement.name %}">{{statement.title}}</a>
|
||||
{% if statement.status == "replaced" %}<span class="badge rounded-pill text-bg-warning">Replaced</span>{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
|
|
|
@ -12,7 +12,7 @@ from markdown.postprocessors import Postprocessor
|
|||
from django.utils.safestring import mark_safe
|
||||
|
||||
from ietf.doc.templatetags.ietf_filters import urlize_ietf_docs
|
||||
from ietf.utils.text import bleach_cleaner, bleach_linker
|
||||
from ietf.utils.text import bleach_cleaner, liberal_bleach_cleaner, bleach_linker
|
||||
|
||||
|
||||
class LinkifyExtension(Extension):
|
||||
|
@ -49,3 +49,19 @@ def markdown(text):
|
|||
)
|
||||
)
|
||||
)
|
||||
|
||||
def liberal_markdown(text):
|
||||
return mark_safe(
|
||||
liberal_bleach_cleaner.clean(
|
||||
python_markdown.markdown(
|
||||
text,
|
||||
extensions=[
|
||||
"extra",
|
||||
"nl2br",
|
||||
"sane_lists",
|
||||
"toc",
|
||||
LinkifyExtension(),
|
||||
],
|
||||
)
|
||||
)
|
||||
)
|
||||
|
|
|
@ -46,6 +46,15 @@ bleach_cleaner = bleach.sanitizer.Cleaner(
|
|||
tags=tags, attributes=attributes, protocols=protocols, strip=True
|
||||
)
|
||||
|
||||
liberal_tags = copy.copy(tags)
|
||||
liberal_attributes = copy.copy(attributes)
|
||||
liberal_tags.update(["img","figure","figcaption"])
|
||||
liberal_attributes["img"] = ["src","alt"]
|
||||
|
||||
liberal_bleach_cleaner = bleach.sanitizer.Cleaner(
|
||||
tags=liberal_tags, attributes=liberal_attributes, protocols=protocols, strip=True
|
||||
)
|
||||
|
||||
validate_url = URLValidator()
|
||||
|
||||
|
||||
|
|
Loading…
Reference in a new issue