diff --git a/ietf/api/tests.py b/ietf/api/tests.py index a495accc3..15099a723 100644 --- a/ietf/api/tests.py +++ b/ietf/api/tests.py @@ -368,7 +368,7 @@ class CustomApiTests(TestCase): r = self.client.post(url,{'apikey':apikey.hash(),'apidata': f'{{"session_id":{session.pk}, "{type_id}":{content}}}'}) self.assertEqual(r.status_code, 200) - newdoc = session.sessionpresentation_set.get(document__type_id=type_id).document + newdoc = session.presentations.get(document__type_id=type_id).document newdoccontent = get_unicode_document_content(newdoc.name, Path(session.meeting.get_materials_path()) / type_id / newdoc.uploaded_filename) self.assertEqual(json.loads(content), json.loads(newdoccontent)) @@ -454,7 +454,7 @@ class CustomApiTests(TestCase): 'item': '1', 'bluesheet': bluesheet, }) self.assertContains(r, "Done", status_code=200) - bluesheet = session.sessionpresentation_set.filter(document__type__slug='bluesheets').first().document + bluesheet = session.presentations.filter(document__type__slug='bluesheets').first().document # We've submitted an update; check that the rev is right self.assertEqual(bluesheet.rev, '01') # Check the content @@ -569,7 +569,7 @@ class CustomApiTests(TestCase): self.assertContains(r, "Done", status_code=200) bluesheet = ( - session.sessionpresentation_set.filter(document__type__slug="bluesheets") + session.presentations.filter(document__type__slug="bluesheets") .first() .document ) diff --git a/ietf/doc/migrations/0021_narrativeminutes.py b/ietf/doc/migrations/0021_narrativeminutes.py new file mode 100644 index 000000000..0f330bd05 --- /dev/null +++ b/ietf/doc/migrations/0021_narrativeminutes.py @@ -0,0 +1,39 @@ +# Copyright The IETF Trust 2023, All Rights Reserved + +from django.db import migrations + + +def forward(apps, schema_editor): + StateType = apps.get_model("doc", "StateType") + State = apps.get_model("doc", "State") + + StateType.objects.create( + slug="narrativeminutes", + label="State", + ) + for order, slug in enumerate(["active", "deleted"]): + State.objects.create( + slug=slug, + type_id="narrativeminutes", + name=slug.capitalize(), + order=order, + desc="", + used=True, + ) + + +def reverse(apps, schema_editor): + StateType = apps.get_model("doc", "StateType") + State = apps.get_model("doc", "State") + + State.objects.filter(type_id="narrativeminutes").delete() + StateType.objects.filter(slug="narrativeminutes").delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("doc", "0020_move_errata_tags"), + ("name", "0013_narrativeminutes"), + ] + + operations = [migrations.RunPython(forward, reverse)] diff --git a/ietf/doc/models.py b/ietf/doc/models.py index af4843fc4..d97e8238e 100644 --- a/ietf/doc/models.py +++ b/ietf/doc/models.py @@ -148,7 +148,7 @@ class DocumentInfo(models.Model): else: self._cached_file_path = settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR elif self.meeting_related() and self.type_id in ( - "agenda", "minutes", "slides", "bluesheets", "procmaterials", "chatlog", "polls" + "agenda", "minutes", "narrativeminutes", "slides", "bluesheets", "procmaterials", "chatlog", "polls" ): meeting = self.get_related_meeting() if meeting is not None: @@ -438,7 +438,7 @@ class DocumentInfo(models.Model): return e != None and (e.text != "") def meeting_related(self): - if self.type_id in ("agenda","minutes","bluesheets","slides","recording","procmaterials","chatlog","polls"): + if self.type_id in ("agenda","minutes", "narrativeminutes", "bluesheets","slides","recording","procmaterials","chatlog","polls"): return self.type_id != "slides" or self.get_state_slug('reuse_policy')=='single' return False @@ -1028,7 +1028,7 @@ class Document(DocumentInfo): def future_presentations(self): """ returns related SessionPresentation objects for meetings that have not yet ended. This implementation allows for 2 week meetings """ - candidate_presentations = self.sessionpresentation_set.filter( + candidate_presentations = self.presentations.filter( session__meeting__date__gte=date_today() - datetime.timedelta(days=15) ) return sorted( @@ -1041,11 +1041,11 @@ class Document(DocumentInfo): """ returns related SessionPresentation objects for the most recent meeting in the past""" # Assumes no two meetings have the same start date - if the assumption is violated, one will be chosen arbitrarily today = date_today() - candidate_presentations = self.sessionpresentation_set.filter(session__meeting__date__lte=today) + candidate_presentations = self.presentations.filter(session__meeting__date__lte=today) candidate_meetings = set([p.session.meeting for p in candidate_presentations if p.session.meeting.end_date() cutoff_date future, in_progress, recent, past = group_sessions(sessions) @@ -857,7 +909,7 @@ def meetings(request, acronym, group_type=None): can_always_edit = has_role(request.user, ["Secretariat", "Area Director"]) far_past = [] - if group.acronym == "iab": + if group.acronym in ["iab", "iesg"]: recent_past = [] for s in past: if s.time >= four_years_ago: @@ -1347,16 +1399,36 @@ def stream_edit(request, acronym): ) -@cache_control(public=True, max_age=30*60) +@cache_control(public=True, max_age=30 * 60) @cache_page(30 * 60) def group_menu_data(request): - groups = Group.objects.filter(state="active", parent__state="active").filter(Q(type__features__acts_like_wg=True)|Q(type_id__in=['program','iabasg','iabworkshop'])|Q(parent__acronym='ietfadminllc')|Q(parent__acronym='rfceditor')).order_by("-type_id","acronym") + groups = ( + Group.objects.filter(state="active", parent__state="active") + .filter( + Q(type__features__acts_like_wg=True) + | Q(type_id__in=["program", "iabasg", "iabworkshop"]) + | Q(parent__acronym="ietfadminllc") + | Q(parent__acronym="rfceditor") + ) + .order_by("-type_id", "acronym") + .select_related("type") + ) groups_by_parent = defaultdict(list) for g in groups: - url = urlreverse("ietf.group.views.group_home", kwargs={ 'group_type': g.type_id, 'acronym': g.acronym }) -# groups_by_parent[g.parent_id].append({ 'acronym': g.acronym, 'name': escape(g.name), 'url': url }) - groups_by_parent[g.parent_id].append({ 'acronym': g.acronym, 'name': escape(g.name), 'type': escape(g.type.verbose_name or g.type.name), 'url': url }) + url = urlreverse( + "ietf.group.views.group_home", + kwargs={"group_type": g.type_id, "acronym": g.acronym}, + ) + # groups_by_parent[g.parent_id].append({ 'acronym': g.acronym, 'name': escape(g.name), 'url': url }) + groups_by_parent[g.parent_id].append( + { + "acronym": g.acronym, + "name": escape(g.name), + "type": escape(g.type.verbose_name or g.type.name), + "url": url, + } + ) iab = Group.objects.get(acronym="iab") groups_by_parent[iab.pk].insert( @@ -1365,12 +1437,15 @@ def group_menu_data(request): "acronym": iab.acronym, "name": iab.name, "type": "Top Level Group", - "url": urlreverse("ietf.group.views.group_home", kwargs={"acronym": iab.acronym}) - } + "url": urlreverse( + "ietf.group.views.group_home", kwargs={"acronym": iab.acronym} + ), + }, ) return JsonResponse(groups_by_parent) + @cache_control(public=True, max_age=30 * 60) @cache_page(30 * 60) def group_stats_data(request, years="3", only_active=True): @@ -2116,14 +2191,25 @@ def statements(request, acronym, group_type=None): if not acronym in ["iab", "iesg"]: raise Http404 group = get_group_or_404(acronym, group_type) - statements = group.document_set.filter(type_id="statement").annotate( - published=Subquery( - DocEvent.objects.filter( - doc=OuterRef("pk"), - type="published_statement" - ).order_by("-time").values("time")[:1] + statements = ( + group.document_set.filter(type_id="statement") + .annotate( + published=Subquery( + DocEvent.objects.filter(doc=OuterRef("pk"), type="published_statement") + .order_by("-time") + .values("time")[:1] + ) ) - ).order_by("-published") + .annotate( + status=Subquery( + Document.states.through.objects.filter( + document_id=OuterRef("pk"), state__type="statement" + ).values_list("state__slug", flat=True)[:1] + ) + ) + .order_by("-published") + ) + debug.show("statements.first().status") return render( request, "group/statements.html", diff --git a/ietf/meeting/forms.py b/ietf/meeting/forms.py index dfc765ad2..164f0fd3b 100644 --- a/ietf/meeting/forms.py +++ b/ietf/meeting/forms.py @@ -341,7 +341,7 @@ class InterimSessionModelForm(forms.ModelForm): # FIXME: What about agendas in html or markdown format? uploaded_filename='{}-00.txt'.format(filename)) doc.set_state(State.objects.get(type__slug=doc.type.slug, slug='active')) - self.instance.sessionpresentation_set.create(document=doc, rev=doc.rev) + self.instance.presentations.create(document=doc, rev=doc.rev) NewRevisionDocEvent.objects.create( type='new_revision', by=self.user.person, diff --git a/ietf/meeting/helpers.py b/ietf/meeting/helpers.py index 14478787f..c0e250cdc 100644 --- a/ietf/meeting/helpers.py +++ b/ietf/meeting/helpers.py @@ -104,7 +104,7 @@ def preprocess_assignments_for_agenda(assignments_queryset, meeting, extra_prefe queryset=add_event_info_to_session_qs(Session.objects.all().prefetch_related( 'group', 'group__charter', 'group__charter__group', Prefetch('materials', - queryset=Document.objects.exclude(states__type=F("type"), states__slug='deleted').order_by('sessionpresentation__order').prefetch_related('states'), + queryset=Document.objects.exclude(states__type=F("type"), states__slug='deleted').order_by('presentations__order').prefetch_related('states'), to_attr='prefetched_active_materials' ) )) @@ -890,7 +890,7 @@ def make_materials_directories(meeting): # was merged with the regular datatracker code; then in secr/proceedings/views.py # in make_directories()) saved_umask = os.umask(0) - for leaf in ('slides','agenda','minutes','id','rfc','bluesheets'): + for leaf in ('slides','agenda','minutes', 'narrativeminutes', 'id','rfc','bluesheets'): target = os.path.join(path,leaf) if not os.path.exists(target): os.makedirs(target) diff --git a/ietf/meeting/management/commands/import_iesg_minutes.py b/ietf/meeting/management/commands/import_iesg_minutes.py new file mode 100644 index 000000000..92abbe92d --- /dev/null +++ b/ietf/meeting/management/commands/import_iesg_minutes.py @@ -0,0 +1,343 @@ +# Copyright The IETF Trust 2023, All Rights Reserved + +from collections import namedtuple +import datetime +import os +import re +import shutil + +from django.conf import settings +from django.core.management import BaseCommand + +from pathlib import Path +from zoneinfo import ZoneInfo +from ietf.doc.models import DocEvent, Document + +from ietf.meeting.models import ( + Meeting, + SchedTimeSessAssignment, + Schedule, + SchedulingEvent, + Session, + TimeSlot, +) +from ietf.name.models import DocTypeName + + +def add_time_of_day(bare_datetime): + """Add a time for the iesg meeting based on a date and make it tzaware + + From the secretariat - the telechats happened at these times: + 2015-04-09 to present: 0700 PT America/Los Angeles + 1993-02-01 to 2015-03-12: 1130 ET America/New York + 1991-07-30 to 1993-01-25: 1200 ET America/New York + """ + dt = None + if bare_datetime.year > 2015: + dt = bare_datetime.replace(hour=7).replace( + tzinfo=ZoneInfo("America/Los_Angeles") + ) + elif bare_datetime.year == 2015: + if bare_datetime.month >= 4: + dt = bare_datetime.replace(hour=7).replace( + tzinfo=ZoneInfo("America/Los_Angeles") + ) + else: + dt = bare_datetime.replace(hour=11, minute=30).replace( + tzinfo=ZoneInfo("America/New_York") + ) + elif bare_datetime.year > 1993: + dt = bare_datetime.replace(hour=11, minute=30).replace( + tzinfo=ZoneInfo("America/New_York") + ) + elif bare_datetime.year == 1993: + if bare_datetime.month >= 2: + dt = bare_datetime.replace(hour=11, minute=30).replace( + tzinfo=ZoneInfo("America/New_York") + ) + else: + dt = bare_datetime.replace(hour=12).replace( + tzinfo=ZoneInfo("America/New_York") + ) + else: + dt = bare_datetime.replace(hour=12).replace(tzinfo=ZoneInfo("America/New_York")) + + return dt.astimezone(datetime.timezone.utc) + + +def build_bof_coord_data(): + CoordTuple = namedtuple("CoordTuple", "meeting_number source_name") + + def utc_from_la_time(time): + return time.replace(tzinfo=ZoneInfo("America/Los_Angeles")).astimezone( + datetime.timezone.utc + ) + + data = dict() + data[utc_from_la_time(datetime.datetime(2016, 6, 10, 7, 0))] = CoordTuple( + 96, "2015/bof-minutes-ietf-96.txt" + ) + data[utc_from_la_time(datetime.datetime(2016, 10, 6, 7, 0))] = CoordTuple( + 97, "2016/BoF-Minutes-2016-10-06.txt" + ) + data[utc_from_la_time(datetime.datetime(2017, 2, 15, 8, 0))] = CoordTuple( + 98, "2017/bof-minutes-ietf-98.txt" + ) + data[utc_from_la_time(datetime.datetime(2017, 6, 7, 8, 0))] = CoordTuple( + 99, "2017/bof-minutes-ietf-99.txt" + ) + data[utc_from_la_time(datetime.datetime(2017, 10, 5, 7, 0))] = CoordTuple( + 100, "2017/bof-minutes-ietf-100.txt" + ) + data[utc_from_la_time(datetime.datetime(2018, 2, 5, 11, 0))] = CoordTuple( + 101, "2018/bof-minutes-ietf-101.txt" + ) + data[utc_from_la_time(datetime.datetime(2018, 6, 5, 8, 0))] = CoordTuple( + 102, "2018/bof-minutes-ietf-102.txt" + ) + data[utc_from_la_time(datetime.datetime(2018, 9, 26, 7, 0))] = CoordTuple( + 103, "2018/bof-minutes-ietf-103.txt" + ) + data[utc_from_la_time(datetime.datetime(2019, 2, 15, 9, 0))] = CoordTuple( + 104, "2019/bof-minutes-ietf-104.txt" + ) + data[utc_from_la_time(datetime.datetime(2019, 6, 11, 7, 30))] = CoordTuple( + 105, "2019/bof-minutes-ietf-105.txt" + ) + data[utc_from_la_time(datetime.datetime(2019, 10, 9, 6, 30))] = CoordTuple( + 106, "2019/bof-minutes-ietf-106.txt" + ) + data[utc_from_la_time(datetime.datetime(2020, 2, 13, 8, 0))] = CoordTuple( + 107, "2020/bof-minutes-ietf-107.txt" + ) + data[utc_from_la_time(datetime.datetime(2020, 6, 15, 8, 0))] = CoordTuple( + 108, "2020/bof-minutes-ietf-108.txt" + ) + data[utc_from_la_time(datetime.datetime(2020, 10, 9, 7, 0))] = CoordTuple( + 109, "2020/bof-minutes-ietf-109.txt" + ) + data[utc_from_la_time(datetime.datetime(2021, 1, 14, 13, 30))] = CoordTuple( + 110, "2021/bof-minutes-ietf-110.txt" + ) + data[utc_from_la_time(datetime.datetime(2021, 6, 1, 8, 0))] = CoordTuple( + 111, "2021/bof-minutes-ietf-111.txt" + ) + data[utc_from_la_time(datetime.datetime(2021, 9, 15, 9, 0))] = CoordTuple( + 112, "2021/bof-minutes-ietf-112.txt" + ) + data[utc_from_la_time(datetime.datetime(2022, 1, 28, 7, 0))] = CoordTuple( + 113, "2022/bof-minutes-ietf-113.txt" + ) + data[utc_from_la_time(datetime.datetime(2022, 6, 2, 10, 0))] = CoordTuple( + 114, "2022/bof-minutes-ietf-114.txt" + ) + data[utc_from_la_time(datetime.datetime(2022, 9, 13, 9, 0))] = CoordTuple( + 115, "2022/bof-minutes-ietf-115.txt" + ) + data[utc_from_la_time(datetime.datetime(2023, 2, 1, 9, 0))] = CoordTuple( + 116, "2023/bof-minutes-ietf-116.txt" + ) + data[utc_from_la_time(datetime.datetime(2023, 6, 1, 7, 0))] = CoordTuple( + 117, "2023/bof-minutes-ietf-117.txt" + ) + data[utc_from_la_time(datetime.datetime(2023, 9, 15, 8, 0))] = CoordTuple( + 118, "2023/bof-minutes-ietf-118.txt" + ) + return data + + +class Command(BaseCommand): + help = "Performs a one-time import of IESG minutes, creating Meetings to attach them to" + + def handle(self, *args, **options): + old_minutes_root = ( + "/a/www/www6/iesg/minutes" + if settings.SERVER_MODE == "production" + else "/assets/www6/iesg/minutes" + ) + minutes_dir = Path(old_minutes_root) + date_re = re.compile(r"\d{4}-\d{2}-\d{2}") + meeting_times = set() + for file_prefix in ["minutes", "narrative"]: + paths = list(minutes_dir.glob(f"[12][09][0129][0-9]/{file_prefix}*.txt")) + paths.extend( + list(minutes_dir.glob(f"[12][09][0129][0-9]/{file_prefix}*.html")) + ) + for path in paths: + s = date_re.search(path.name) + if s: + meeting_times.add( + add_time_of_day( + datetime.datetime.strptime(s.group(), "%Y-%m-%d") + ) + ) + bof_coord_data = build_bof_coord_data() + bof_times = set(bof_coord_data.keys()) + assert len(bof_times.intersection(meeting_times)) == 0 + meeting_times.update(bof_times) + year_seen = None + for dt in sorted(meeting_times): + if dt.year != year_seen: + counter = 1 + year_seen = dt.year + meeting_name = f"interim-{dt.year}-iesg-{counter:02d}" + meeting = Meeting.objects.create( + number=meeting_name, + type_id="interim", + date=dt.date(), + days=1, + time_zone=dt.tzname(), + ) + schedule = Schedule.objects.create( + meeting=meeting, + owner_id=1, # the "(System)" person + visible=True, + public=True, + ) + meeting.schedule = schedule + meeting.save() + session = Session.objects.create( + meeting=meeting, + group_id=2, # The IESG group + type_id="regular", + purpose_id="regular", + name=( + f"IETF {bof_coord_data[dt].meeting_number} BOF Coordination Call" + if dt in bof_times + else "Formal Telechat" + ), + ) + SchedulingEvent.objects.create( + session=session, + status_id="sched", + by_id=1, # (System) + ) + timeslot = TimeSlot.objects.create( + meeting=meeting, + type_id="regular", + time=dt, + duration=datetime.timedelta(seconds=2 * 60 * 60), + ) + SchedTimeSessAssignment.objects.create( + timeslot=timeslot, session=session, schedule=schedule + ) + + if dt in bof_times: + source = minutes_dir / bof_coord_data[dt].source_name + if source.exists(): + doc_name = ( + f"minutes-interim-{dt.year}-iesg-{counter:02d}-{dt:%Y%m%d%H%M}" + ) + doc_filename = f"{doc_name}-00.txt" + doc = Document.objects.create( + name=doc_name, + type_id="minutes", + title=f"Minutes IETF {bof_coord_data[dt].meeting_number} BOF coordination {meeting_name} {dt:%Y-%m-%d %H:%M}", + group_id=2, # the IESG group + rev="00", + uploaded_filename=doc_filename, + ) + e = DocEvent.objects.create( + type="comment", + doc=doc, + rev="00", + by_id=1, # "(System)" + desc="Minutes moved into datatracker", + ) + doc.save_with_history([e]) + session.presentations.create(document=doc, rev=doc.rev) + dest = ( + Path(settings.AGENDA_PATH) + / meeting_name + / "minutes" + / doc_filename + ) + if dest.exists(): + self.stdout.write( + f"WARNING: {dest} already exists - not overwriting it." + ) + else: + os.makedirs(dest.parent, exist_ok=True) + shutil.copy(source, dest) + else: + for type_id in ["minutes", "narrativeminutes"]: + source_file_prefix = ( + "minutes" if type_id == "minutes" else "narrative-minutes" + ) + txt_source = ( + minutes_dir + / f"{dt.year}" + / f"{source_file_prefix}-{dt:%Y-%m-%d}.txt" + ) + html_source = ( + minutes_dir + / f"{dt.year}" + / f"{source_file_prefix}-{dt:%Y-%m-%d}.html" + ) + if txt_source.exists() and html_source.exists(): + self.stdout.write( + f"WARNING: Both {txt_source} and {html_source} exist." + ) + if txt_source.exists() or html_source.exists(): + prefix = DocTypeName.objects.get(slug=type_id).prefix + doc_name = f"{prefix}-interim-{dt.year}-iesg-{counter:02d}-{dt:%Y%m%d%H%M}" + suffix = "html" if html_source.exists() else "txt" + doc_filename = f"{doc_name}-00.{suffix}" + verbose_type = ( + "Minutes" if type_id == "minutes" else "Narrative Minutes" + ) + doc = Document.objects.create( + name=doc_name, + type_id=type_id, + title=f"{verbose_type} {meeting_name} {dt:%Y-%m-%d %H:%M}", + group_id=2, # the IESG group + rev="00", + uploaded_filename=doc_filename, + ) + e = DocEvent.objects.create( + type="comment", + doc=doc, + rev="00", + by_id=1, # "(System)" + desc=f"{verbose_type} moved into datatracker", + ) + doc.save_with_history([e]) + session.presentations.create(document=doc, rev=doc.rev) + dest = ( + Path(settings.AGENDA_PATH) + / meeting_name + / type_id + / doc_filename + ) + if dest.exists(): + self.stdout.write( + f"WARNING: {dest} already exists - not overwriting it." + ) + else: + os.makedirs(dest.parent, exist_ok=True) + if html_source.exists(): + html_content = html_source.read_text(encoding="utf-8") + html_content = html_content.replace( + f'href="IESGnarrative-{dt:%Y-%m-%d}.html#', + 'href="#', + ) + html_content = re.sub( + r']*>([^<]*)', + r"\1", + html_content, + ) + html_content = re.sub( + r'([^<]*)', + r"\1", + html_content, + ) + html_content = re.sub( + ' self.meeting.get_submission_correction_date() def joint_with_groups_acronyms(self): @@ -1241,10 +1248,21 @@ class Session(models.Model): return settings.CHAT_URL_PATTERN.format(chat_room_name=self.chat_room_name()) def chat_archive_url(self): - chatlog = self.sessionpresentation_set.filter(document__type__slug='chatlog').first() - if chatlog is not None: - return chatlog.document.get_href() - elif self.meeting.date <= datetime.date(2022, 7, 15): + + if hasattr(self,"prefetched_active_materials"): + chatlog_doc = None + for doc in self.prefetched_active_materials: + if doc.type_id=="chatlog": + chatlog_doc = doc + break + if chatlog_doc is not None: + return chatlog_doc.get_href() + else: + chatlog = self.presentations.filter(document__type__slug='chatlog').first() + if chatlog is not None: + return chatlog.document.get_href() + + if self.meeting.date <= datetime.date(2022, 7, 15): # datatracker 8.8.0 released on 2022 July 15; before that, fall back to old log URL return f'https://www.ietf.org/jabber/logs/{ self.chat_room_name() }?C=M;O=D' elif hasattr(settings,'CHAT_ARCHIVE_URL_PATTERN'): diff --git a/ietf/meeting/templatetags/proceedings_filters.py b/ietf/meeting/templatetags/proceedings_filters.py index f5fe0e1f1..a2a4932e7 100644 --- a/ietf/meeting/templatetags/proceedings_filters.py +++ b/ietf/meeting/templatetags/proceedings_filters.py @@ -11,7 +11,7 @@ def hack_recording_title(recording,add_timestamp=False): if recording.title.startswith('Audio recording for') or recording.title.startswith('Video recording for'): hacked_title = recording.title[:15] if add_timestamp: - hacked_title += ' '+recording.sessionpresentation_set.first().session.official_timeslotassignment().timeslot.time.strftime("%a %H:%M") + hacked_title += ' '+recording.presentations.first().session.official_timeslotassignment().timeslot.time.strftime("%a %H:%M") return hacked_title else: return recording.title diff --git a/ietf/meeting/templatetags/session_filters.py b/ietf/meeting/templatetags/session_filters.py index 4fe377a81..3846dab49 100644 --- a/ietf/meeting/templatetags/session_filters.py +++ b/ietf/meeting/templatetags/session_filters.py @@ -8,7 +8,7 @@ register = template.Library() @register.filter def presented_versions(session, doc): - sp = session.sessionpresentation_set.filter(document=doc) + sp = session.presentations.filter(document=doc) if not sp: return "Document not in session" else: diff --git a/ietf/meeting/test_data.py b/ietf/meeting/test_data.py index 5ecb494df..8be55b47a 100644 --- a/ietf/meeting/test_data.py +++ b/ietf/meeting/test_data.py @@ -51,7 +51,7 @@ def make_interim_meeting(group,date,status='sched',tz='UTC'): doc = DocumentFactory.create(name=name, type_id='agenda', title="Agenda", uploaded_filename=file, group=group, rev=rev, states=[('draft','active')]) pres = SessionPresentation.objects.create(session=session, document=doc, rev=doc.rev) - session.sessionpresentation_set.add(pres) + session.presentations.add(pres) # minutes name = "minutes-%s-%s" % (meeting.number, time.strftime("%Y%m%d%H%M")) rev = '00' @@ -59,7 +59,7 @@ def make_interim_meeting(group,date,status='sched',tz='UTC'): doc = DocumentFactory.create(name=name, type_id='minutes', title="Minutes", uploaded_filename=file, group=group, rev=rev, states=[('draft','active')]) pres = SessionPresentation.objects.create(session=session, document=doc, rev=doc.rev) - session.sessionpresentation_set.add(pres) + session.presentations.add(pres) # slides title = "Slideshow" @@ -70,7 +70,7 @@ def make_interim_meeting(group,date,status='sched',tz='UTC'): uploaded_filename=file, group=group, rev=rev, states=[('slides','active'), ('reuse_policy', 'single')]) pres = SessionPresentation.objects.create(session=session, document=doc, rev=doc.rev) - session.sessionpresentation_set.add(pres) + session.presentations.add(pres) # return meeting @@ -198,24 +198,24 @@ def make_meeting_test_data(meeting=None, create_interims=False): doc = DocumentFactory.create(name='agenda-72-mars', type_id='agenda', title="Agenda", uploaded_filename="agenda-72-mars.txt", group=mars, rev='00', states=[('agenda','active')]) pres = SessionPresentation.objects.create(session=mars_session,document=doc,rev=doc.rev) - mars_session.sessionpresentation_set.add(pres) # + mars_session.presentations.add(pres) # doc = DocumentFactory.create(name='minutes-72-mars', type_id='minutes', title="Minutes", uploaded_filename="minutes-72-mars.md", group=mars, rev='00', states=[('minutes','active')]) pres = SessionPresentation.objects.create(session=mars_session,document=doc,rev=doc.rev) - mars_session.sessionpresentation_set.add(pres) + mars_session.presentations.add(pres) doc = DocumentFactory.create(name='slides-72-mars-1-active', type_id='slides', title="Slideshow", uploaded_filename="slides-72-mars.txt", group=mars, rev='00', states=[('slides','active'), ('reuse_policy', 'single')]) pres = SessionPresentation.objects.create(session=mars_session,document=doc,rev=doc.rev) - mars_session.sessionpresentation_set.add(pres) + mars_session.presentations.add(pres) doc = DocumentFactory.create(name='slides-72-mars-2-deleted', type_id='slides', title="Bad Slideshow", uploaded_filename="slides-72-mars-2-deleted.txt", group=mars, rev='00', states=[('slides','deleted'), ('reuse_policy', 'single')]) pres = SessionPresentation.objects.create(session=mars_session,document=doc,rev=doc.rev) - mars_session.sessionpresentation_set.add(pres) + mars_session.presentations.add(pres) # Future Interim Meetings date = date_today() + datetime.timedelta(days=365) diff --git a/ietf/meeting/tests_js.py b/ietf/meeting/tests_js.py index 517836f87..6199ed7eb 100644 --- a/ietf/meeting/tests_js.py +++ b/ietf/meeting/tests_js.py @@ -884,9 +884,9 @@ class SlideReorderTests(IetfSeleniumTestCase): def setUp(self): super(SlideReorderTests, self).setUp() self.session = SessionFactory(meeting__type_id='ietf', status_id='sched') - self.session.sessionpresentation_set.create(document=DocumentFactory(type_id='slides',name='one'),order=1) - self.session.sessionpresentation_set.create(document=DocumentFactory(type_id='slides',name='two'),order=2) - self.session.sessionpresentation_set.create(document=DocumentFactory(type_id='slides',name='three'),order=3) + self.session.presentations.create(document=DocumentFactory(type_id='slides',name='one'),order=1) + self.session.presentations.create(document=DocumentFactory(type_id='slides',name='two'),order=2) + self.session.presentations.create(document=DocumentFactory(type_id='slides',name='three'),order=3) def secr_login(self): self.login('secretary') @@ -906,7 +906,7 @@ class SlideReorderTests(IetfSeleniumTestCase): ActionChains(self.driver).drag_and_drop(second,third).perform() time.sleep(0.1) # The API that modifies the database runs async - names=self.session.sessionpresentation_set.values_list('document__name',flat=True) + names=self.session.presentations.values_list('document__name',flat=True) self.assertEqual(list(names),['one','three','two']) @ifSeleniumEnabled diff --git a/ietf/meeting/tests_views.py b/ietf/meeting/tests_views.py index 2459eb14c..e2abcede8 100644 --- a/ietf/meeting/tests_views.py +++ b/ietf/meeting/tests_views.py @@ -468,16 +468,16 @@ class MeetingTests(BaseMeetingTestCase): doc = DocumentFactory.create(name='agenda-172-mars', type_id='agenda', title="Agenda", uploaded_filename="agenda-172-mars.txt", group=session107.group, rev='00', states=[('agenda','active')]) pres = SessionPresentation.objects.create(session=session107,document=doc,rev=doc.rev) - session107.sessionpresentation_set.add(pres) # + session107.presentations.add(pres) # doc = DocumentFactory.create(name='minutes-172-mars', type_id='minutes', title="Minutes", uploaded_filename="minutes-172-mars.md", group=session107.group, rev='00', states=[('minutes','active')]) pres = SessionPresentation.objects.create(session=session107,document=doc,rev=doc.rev) - session107.sessionpresentation_set.add(pres) + session107.presentations.add(pres) doc = DocumentFactory.create(name='slides-172-mars-1-active', type_id='slides', title="Slideshow", uploaded_filename="slides-172-mars.txt", group=session107.group, rev='00', states=[('slides','active'), ('reuse_policy', 'single')]) pres = SessionPresentation.objects.create(session=session107,document=doc,rev=doc.rev) - session107.sessionpresentation_set.add(pres) + session107.presentations.add(pres) for session in ( Session.objects.filter(meeting=meeting, group__acronym="mars").first(), @@ -548,7 +548,7 @@ class MeetingTests(BaseMeetingTestCase): named_row = named_label.closest('tr') self.assertTrue(named_row) - for material in (sp.document for sp in plain_session.sessionpresentation_set.all()): + for material in (sp.document for sp in plain_session.presentations.all()): if material.type_id == 'draft': expected_url = urlreverse( 'ietf.doc.views_doc.document_main', @@ -559,7 +559,7 @@ class MeetingTests(BaseMeetingTestCase): self.assertTrue(plain_row.find(f'a[href="{expected_url}"]')) self.assertFalse(named_row.find(f'a[href="{expected_url}"]')) - for material in (sp.document for sp in named_session.sessionpresentation_set.all()): + for material in (sp.document for sp in named_session.presentations.all()): if material.type_id == 'draft': expected_url = urlreverse( 'ietf.doc.views_doc.document_main', @@ -955,10 +955,10 @@ class MeetingTests(BaseMeetingTestCase): # but lists a different on in its agenda. The expectation is that the pdf and tgz views will return both. session = SessionFactory(group__type_id='wg',meeting__type_id='ietf') draft1 = WgDraftFactory(group=session.group) - session.sessionpresentation_set.create(document=draft1) + session.presentations.create(document=draft1) draft2 = WgDraftFactory(group=session.group) agenda = DocumentFactory(type_id='agenda',group=session.group, uploaded_filename='agenda-%s-%s' % (session.meeting.number,session.group.acronym), states=[('agenda','active')]) - session.sessionpresentation_set.create(document=agenda) + session.presentations.create(document=agenda) self.write_materials_file(session.meeting, session.materials.get(type="agenda"), "1. WG status (15 minutes)\n\n2. Status of %s\n\n" % draft2.name) filenames = [] @@ -3083,18 +3083,18 @@ class ReorderSlidesTests(TestCase): r = self.client.post(url, {'order':1, 'name':slides.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(session.sessionpresentation_set.count(),1) + self.assertEqual(session.presentations.count(),1) # Ignore a request to add slides that are already in a session r = self.client.post(url, {'order':1, 'name':slides.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(session.sessionpresentation_set.count(),1) + self.assertEqual(session.presentations.count(),1) session2 = SessionFactory(group=session.group, meeting=session.meeting) SessionPresentationFactory.create_batch(3, document__type_id='slides', session=session2) - for num, sp in enumerate(session2.sessionpresentation_set.filter(document__type_id='slides'),start=1): + for num, sp in enumerate(session2.presentations.filter(document__type_id='slides'),start=1): sp.order = num sp.save() @@ -3106,22 +3106,22 @@ class ReorderSlidesTests(TestCase): r = self.client.post(url, {'order':1, 'name':more_slides[0].name}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(session2.sessionpresentation_set.get(document=more_slides[0]).order,1) - self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,5))) + self.assertEqual(session2.presentations.get(document=more_slides[0]).order,1) + self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,5))) # Insert at end r = self.client.post(url, {'order':5, 'name':more_slides[1].name}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(session2.sessionpresentation_set.get(document=more_slides[1]).order,5) - self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,6))) + self.assertEqual(session2.presentations.get(document=more_slides[1]).order,5) + self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,6))) # Insert in middle r = self.client.post(url, {'order':3, 'name':more_slides[2].name}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(session2.sessionpresentation_set.get(document=more_slides[2]).order,3) - self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,7))) + self.assertEqual(session2.presentations.get(document=more_slides[2]).order,3) + self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,7))) def test_remove_slides_from_session(self): for type_id in ['ietf','interim']: @@ -3172,7 +3172,7 @@ class ReorderSlidesTests(TestCase): self.assertEqual(r.json()['success'],False) self.assertIn('index is not valid',r.json()['error']) - session.sessionpresentation_set.create(document=slides, rev=slides.rev, order=1) + session.presentations.create(document=slides, rev=slides.rev, order=1) # Bad names r = self.client.post(url, {'oldIndex':1}) @@ -3193,7 +3193,7 @@ class ReorderSlidesTests(TestCase): self.assertEqual(r.json()['success'],False) self.assertIn('SessionPresentation not found',r.json()['error']) - session.sessionpresentation_set.create(document=slides2, rev=slides2.rev, order=2) + session.presentations.create(document=slides2, rev=slides2.rev, order=2) r = self.client.post(url, {'oldIndex':1, 'name':slides2.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],False) @@ -3203,11 +3203,11 @@ class ReorderSlidesTests(TestCase): r = self.client.post(url, {'oldIndex':1, 'name':slides.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(session.sessionpresentation_set.count(),1) + self.assertEqual(session.presentations.count(),1) session2 = SessionFactory(group=session.group, meeting=session.meeting) sp_list = SessionPresentationFactory.create_batch(5, document__type_id='slides', session=session2) - for num, sp in enumerate(session2.sessionpresentation_set.filter(document__type_id='slides'),start=1): + for num, sp in enumerate(session2.presentations.filter(document__type_id='slides'),start=1): sp.order = num sp.save() @@ -3217,22 +3217,22 @@ class ReorderSlidesTests(TestCase): r = self.client.post(url, {'oldIndex':1, 'name':sp_list[0].document.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertFalse(session2.sessionpresentation_set.filter(pk=sp_list[0].pk).exists()) - self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,5))) + self.assertFalse(session2.presentations.filter(pk=sp_list[0].pk).exists()) + self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,5))) # delete in middle of list r = self.client.post(url, {'oldIndex':4, 'name':sp_list[4].document.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertFalse(session2.sessionpresentation_set.filter(pk=sp_list[4].pk).exists()) - self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,4))) + self.assertFalse(session2.presentations.filter(pk=sp_list[4].pk).exists()) + self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,4))) # delete at end of list r = self.client.post(url, {'oldIndex':2, 'name':sp_list[2].document.name }) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertFalse(session2.sessionpresentation_set.filter(pk=sp_list[2].pk).exists()) - self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,3))) + self.assertFalse(session2.presentations.filter(pk=sp_list[2].pk).exists()) + self.assertEqual(list(session2.presentations.order_by('order').values_list('order',flat=True)), list(range(1,3))) @@ -3290,45 +3290,45 @@ class ReorderSlidesTests(TestCase): r = self.client.post(url, {'oldIndex':1, 'newIndex':3}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,1,4,5])) + self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,1,4,5])) # Move to beginning r = self.client.post(url, {'oldIndex':3, 'newIndex':1}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,3,4,5])) + self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,3,4,5])) # Move from end r = self.client.post(url, {'oldIndex':5, 'newIndex':3}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,5,3,4])) + self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,5,3,4])) # Move to end r = self.client.post(url, {'oldIndex':3, 'newIndex':5}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,3,4,5])) + self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,3,4,5])) # Move beginning to end r = self.client.post(url, {'oldIndex':1, 'newIndex':5}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,4,5,1])) + self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,4,5,1])) # Move middle to middle r = self.client.post(url, {'oldIndex':3, 'newIndex':4}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,5,4,1])) + self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,5,4,1])) r = self.client.post(url, {'oldIndex':3, 'newIndex':2}) self.assertEqual(r.status_code, 200) self.assertEqual(r.json()['success'],True) - self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,5,3,4,1])) + self.assertEqual(list(session.presentations.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,5,3,4,1])) # Reset for next iteration in the loop - session.sessionpresentation_set.update(order=F('pk')) + session.presentations.update(order=F('pk')) self.client.logout() @@ -3345,7 +3345,7 @@ class ReorderSlidesTests(TestCase): except AssertionError: pass - self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('order',flat=True)),list(range(1,6))) + self.assertEqual(list(session.presentations.order_by('order').values_list('order',flat=True)),list(range(1,6))) class EditTests(TestCase): @@ -4334,7 +4334,7 @@ class SessionDetailsTests(TestCase): group.role_set.create(name_id='chair',person = group_chair, email = group_chair.email()) session = SessionFactory.create(meeting__type_id='ietf',group=group, meeting__date=date_today() + datetime.timedelta(days=90)) SessionPresentationFactory.create(session=session,document__type_id='draft',rev=None) - old_draft = session.sessionpresentation_set.filter(document__type='draft').first().document + old_draft = session.presentations.filter(document__type='draft').first().document new_draft = DocumentFactory(type_id='draft') url = urlreverse('ietf.meeting.views.add_session_drafts', kwargs=dict(num=session.meeting.number, session_id=session.pk)) @@ -4355,10 +4355,10 @@ class SessionDetailsTests(TestCase): q = PyQuery(r.content) self.assertIn("Already linked:", q('form .text-danger').text()) - self.assertEqual(1,session.sessionpresentation_set.count()) + self.assertEqual(1,session.presentations.count()) r = self.client.post(url,dict(drafts=[new_draft.pk,])) self.assertTrue(r.status_code, 302) - self.assertEqual(2,session.sessionpresentation_set.count()) + self.assertEqual(2,session.presentations.count()) session.meeting.date -= datetime.timedelta(days=180) session.meeting.save() @@ -5982,7 +5982,7 @@ class FinalizeProceedingsTests(TestCase): def test_finalize_proceedings(self): make_meeting_test_data() meeting = Meeting.objects.filter(type_id='ietf').order_by('id').last() - meeting.session_set.filter(group__acronym='mars').first().sessionpresentation_set.create(document=Document.objects.filter(type='draft').first(),rev=None) + meeting.session_set.filter(group__acronym='mars').first().presentations.create(document=Document.objects.filter(type='draft').first(),rev=None) url = urlreverse('ietf.meeting.views.finalize_proceedings',kwargs={'num':meeting.number}) login_testing_unauthorized(self,"secretary",url) @@ -5990,12 +5990,12 @@ class FinalizeProceedingsTests(TestCase): self.assertEqual(r.status_code, 200) self.assertEqual(meeting.proceedings_final,False) - self.assertEqual(meeting.session_set.filter(group__acronym="mars").first().sessionpresentation_set.filter(document__type="draft").first().rev,None) + self.assertEqual(meeting.session_set.filter(group__acronym="mars").first().presentations.filter(document__type="draft").first().rev,None) r = self.client.post(url,{'finalize':1}) self.assertEqual(r.status_code, 302) meeting = Meeting.objects.get(pk=meeting.pk) self.assertEqual(meeting.proceedings_final,True) - self.assertEqual(meeting.session_set.filter(group__acronym="mars").first().sessionpresentation_set.filter(document__type="draft").first().rev,'00') + self.assertEqual(meeting.session_set.filter(group__acronym="mars").first().presentations.filter(document__type="draft").first().rev,'00') class MaterialsTests(TestCase): settings_temp_path_overrides = TestCase.settings_temp_path_overrides + [ @@ -6037,12 +6037,12 @@ class MaterialsTests(TestCase): self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertIn('Upload', str(q("title"))) - self.assertFalse(session.sessionpresentation_set.exists()) + self.assertFalse(session.presentations.exists()) test_file = StringIO('%PDF-1.4\n%âãÏÓ\nthis is some text for a test') test_file.name = "not_really.pdf" r = self.client.post(url,dict(file=test_file)) self.assertEqual(r.status_code, 302) - bs_doc = session.sessionpresentation_set.filter(document__type_id='bluesheets').first().document + bs_doc = session.presentations.filter(document__type_id='bluesheets').first().document self.assertEqual(bs_doc.rev,'00') r = self.client.get(url) self.assertEqual(r.status_code, 200) @@ -6072,12 +6072,12 @@ class MaterialsTests(TestCase): self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertIn('Upload', str(q("title"))) - self.assertFalse(session.sessionpresentation_set.exists()) + self.assertFalse(session.presentations.exists()) test_file = StringIO('%PDF-1.4\n%âãÏÓ\nthis is some text for a test') test_file.name = "not_really.pdf" r = self.client.post(url,dict(file=test_file)) self.assertEqual(r.status_code, 302) - bs_doc = session.sessionpresentation_set.filter(document__type_id='bluesheets').first().document + bs_doc = session.presentations.filter(document__type_id='bluesheets').first().document self.assertEqual(bs_doc.rev,'00') def test_upload_bluesheets_interim_chair_access(self): @@ -6105,7 +6105,7 @@ class MaterialsTests(TestCase): self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertIn('Upload', str(q("Title"))) - self.assertFalse(session.sessionpresentation_set.exists()) + self.assertFalse(session.presentations.exists()) self.assertFalse(q('form input[type="checkbox"]')) session2 = SessionFactory(meeting=session.meeting,group=session.group) @@ -6140,7 +6140,7 @@ class MaterialsTests(TestCase): test_file.name = "some.html" r = self.client.post(url,dict(submission_method="upload",file=test_file)) self.assertEqual(r.status_code, 302) - doc = session.sessionpresentation_set.filter(document__type_id=doctype).first().document + doc = session.presentations.filter(document__type_id=doctype).first().document self.assertEqual(doc.rev,'00') text = doc.text() self.assertIn('Some text', text) @@ -6152,9 +6152,9 @@ class MaterialsTests(TestCase): test_file.name = "some.txt" r = self.client.post(url,dict(submission_method="upload",file=test_file,apply_to_all=False)) self.assertEqual(r.status_code, 302) - doc = session.sessionpresentation_set.filter(document__type_id=doctype).first().document + doc = session.presentations.filter(document__type_id=doctype).first().document self.assertEqual(doc.rev,'01') - self.assertFalse(session2.sessionpresentation_set.filter(document__type_id=doctype)) + self.assertFalse(session2.presentations.filter(document__type_id=doctype)) r = self.client.get(url) self.assertEqual(r.status_code, 200) @@ -6166,7 +6166,7 @@ class MaterialsTests(TestCase): self.assertEqual(r.status_code, 302) doc = Document.objects.get(pk=doc.pk) self.assertEqual(doc.rev,'02') - self.assertTrue(session2.sessionpresentation_set.filter(document__type_id=doctype)) + self.assertTrue(session2.presentations.filter(document__type_id=doctype)) # Test bad encoding test_file = BytesIO('

Title

Some\x93text
'.encode('latin1')) @@ -6196,7 +6196,7 @@ class MaterialsTests(TestCase): self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertIn('Upload', str(q("Title"))) - self.assertFalse(session.sessionpresentation_set.exists()) + self.assertFalse(session.presentations.exists()) self.assertFalse(q('form input[type="checkbox"]')) test_file = BytesIO(b'this is some text for a test') @@ -6218,12 +6218,12 @@ class MaterialsTests(TestCase): self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertIn('Upload', str(q("title"))) - self.assertFalse(session.sessionpresentation_set.filter(document__type_id=doctype)) + self.assertFalse(session.presentations.filter(document__type_id=doctype)) test_file = BytesIO(b'this is some text for a test') test_file.name = "not_really.txt" r = self.client.post(url,dict(submission_method="upload",file=test_file)) self.assertEqual(r.status_code, 302) - doc = session.sessionpresentation_set.filter(document__type_id=doctype).first().document + doc = session.presentations.filter(document__type_id=doctype).first().document self.assertEqual(doc.rev,'00') # Verify that we don't have dead links @@ -6242,12 +6242,12 @@ class MaterialsTests(TestCase): self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertIn('Upload', str(q("Title"))) - self.assertFalse(session.sessionpresentation_set.exists()) + self.assertFalse(session.presentations.exists()) test_text = 'Enter agenda from scratch' r = self.client.post(url,dict(submission_method="enter",content=test_text)) self.assertRedirects(r, redirect_url) - doc = session.sessionpresentation_set.filter(document__type_id='agenda').first().document + doc = session.presentations.filter(document__type_id='agenda').first().document self.assertEqual(doc.rev,'00') r = self.client.get(url) @@ -6283,14 +6283,14 @@ class MaterialsTests(TestCase): self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertIn('Upload', str(q("title"))) - self.assertFalse(session1.sessionpresentation_set.filter(document__type_id='slides')) + self.assertFalse(session1.presentations.filter(document__type_id='slides')) test_file = BytesIO(b'this is not really a slide') test_file.name = 'not_really.txt' r = self.client.post(url,dict(file=test_file,title='a test slide file',apply_to_all=True)) self.assertEqual(r.status_code, 302) - self.assertEqual(session1.sessionpresentation_set.count(),1) - self.assertEqual(session2.sessionpresentation_set.count(),1) - sp = session2.sessionpresentation_set.first() + self.assertEqual(session1.presentations.count(),1) + self.assertEqual(session2.presentations.count(),1) + sp = session2.presentations.first() self.assertEqual(sp.document.name, 'slides-%s-%s-a-test-slide-file' % (session1.meeting.number,session1.group.acronym ) ) self.assertEqual(sp.order,1) @@ -6299,14 +6299,14 @@ class MaterialsTests(TestCase): test_file.name = 'also_not_really.txt' r = self.client.post(url,dict(file=test_file,title='a different slide file',apply_to_all=False)) self.assertEqual(r.status_code, 302) - self.assertEqual(session1.sessionpresentation_set.count(),1) - self.assertEqual(session2.sessionpresentation_set.count(),2) - sp = session2.sessionpresentation_set.get(document__name__endswith='-a-different-slide-file') + self.assertEqual(session1.presentations.count(),1) + self.assertEqual(session2.presentations.count(),2) + sp = session2.presentations.get(document__name__endswith='-a-different-slide-file') self.assertEqual(sp.order,2) self.assertEqual(sp.rev,'00') self.assertEqual(sp.document.rev,'00') - url = urlreverse('ietf.meeting.views.upload_session_slides',kwargs={'num':session2.meeting.number,'session_id':session2.id,'name':session2.sessionpresentation_set.get(order=2).document.name}) + url = urlreverse('ietf.meeting.views.upload_session_slides',kwargs={'num':session2.meeting.number,'session_id':session2.id,'name':session2.presentations.get(order=2).document.name}) r = self.client.get(url) self.assertTrue(r.status_code, 200) q = PyQuery(r.content) @@ -6315,9 +6315,9 @@ class MaterialsTests(TestCase): test_file.name = 'doesnotmatter.txt' r = self.client.post(url,dict(file=test_file,title='rename the presentation',apply_to_all=False)) self.assertEqual(r.status_code, 302) - self.assertEqual(session1.sessionpresentation_set.count(),1) - self.assertEqual(session2.sessionpresentation_set.count(),2) - sp = session2.sessionpresentation_set.get(order=2) + self.assertEqual(session1.presentations.count(),1) + self.assertEqual(session2.presentations.count(),2) + sp = session2.presentations.get(order=2) self.assertEqual(sp.rev,'01') self.assertEqual(sp.document.rev,'01') @@ -6329,7 +6329,7 @@ class MaterialsTests(TestCase): self.assertEqual(r.status_code, 200) q = PyQuery(r.content) self.assertIn('Upload', str(q("title"))) - self.assertFalse(session1.sessionpresentation_set.filter(document__type_id='slides')) + self.assertFalse(session1.presentations.filter(document__type_id='slides')) test_file = BytesIO(b'this is not really a slide') test_file.name = 'not_really.txt' r = self.client.post(url,dict(file=test_file,title='title with bad character \U0001fabc ')) @@ -6341,7 +6341,7 @@ class MaterialsTests(TestCase): def test_remove_sessionpresentation(self): session = SessionFactory(meeting__type_id='ietf') doc = DocumentFactory(type_id='slides') - session.sessionpresentation_set.create(document=doc) + session.presentations.create(document=doc) url = urlreverse('ietf.meeting.views.remove_sessionpresentation',kwargs={'num':session.meeting.number,'session_id':session.id,'name':'no-such-doc'}) response = self.client.get(url) @@ -6356,10 +6356,10 @@ class MaterialsTests(TestCase): response = self.client.get(url) self.assertEqual(response.status_code, 200) - self.assertEqual(1,session.sessionpresentation_set.count()) + self.assertEqual(1,session.presentations.count()) response = self.client.post(url,{'remove_session':''}) self.assertEqual(response.status_code, 302) - self.assertEqual(0,session.sessionpresentation_set.count()) + self.assertEqual(0,session.presentations.count()) self.assertEqual(2,doc.docevent_set.count()) def test_propose_session_slides(self): @@ -6448,8 +6448,8 @@ class MaterialsTests(TestCase): submission = SlideSubmission.objects.get(id = submission.id) self.assertEqual(submission.status_id, 'approved') self.assertIsNotNone(submission.doc) - self.assertEqual(session.sessionpresentation_set.count(),1) - self.assertEqual(session.sessionpresentation_set.first().document.title,'different title') + self.assertEqual(session.presentations.count(),1) + self.assertEqual(session.presentations.first().document.title,'different title') r = self.client.get(url) self.assertEqual(r.status_code, 200) self.assertRegex(r.content.decode(), r"These\s+slides\s+have\s+already\s+been\s+approved") @@ -6471,8 +6471,8 @@ class MaterialsTests(TestCase): self.assertTrue(q('#id_apply_to_all')) r = self.client.post(url,dict(title='yet another title',approve='approve')) self.assertEqual(r.status_code,302) - self.assertEqual(session1.sessionpresentation_set.count(),1) - self.assertEqual(session2.sessionpresentation_set.count(),0) + self.assertEqual(session1.presentations.count(),1) + self.assertEqual(session2.presentations.count(),0) def test_approve_proposed_slides_multisession_apply_all(self): submission = SlideSubmissionFactory(session__meeting__type_id='ietf') @@ -6486,8 +6486,8 @@ class MaterialsTests(TestCase): self.assertEqual(r.status_code,200) r = self.client.post(url,dict(title='yet another title',apply_to_all=1,approve='approve')) self.assertEqual(r.status_code,302) - self.assertEqual(session1.sessionpresentation_set.count(),1) - self.assertEqual(session2.sessionpresentation_set.count(),1) + self.assertEqual(session1.presentations.count(),1) + self.assertEqual(session2.presentations.count(),1) def test_submit_and_approve_multiple_versions(self): session = SessionFactory(meeting__type_id='ietf') @@ -6512,7 +6512,7 @@ class MaterialsTests(TestCase): self.assertEqual(r.status_code,302) self.client.logout() - self.assertEqual(session.sessionpresentation_set.first().document.rev,'00') + self.assertEqual(session.presentations.first().document.rev,'00') login_testing_unauthorized(self,newperson.user.username,propose_url) test_file = BytesIO(b'this is not really a slide, but it is another version of it') @@ -6540,9 +6540,9 @@ class MaterialsTests(TestCase): self.assertEqual(SlideSubmission.objects.filter(status__slug = 'pending').count(),0) self.assertEqual(SlideSubmission.objects.filter(status__slug = 'rejected').count(),1) - self.assertEqual(session.sessionpresentation_set.first().document.rev,'01') + self.assertEqual(session.presentations.first().document.rev,'01') path = os.path.join(submission.session.meeting.get_materials_path(),'slides') - filename = os.path.join(path,session.sessionpresentation_set.first().document.name+'-01.txt') + filename = os.path.join(path,session.presentations.first().document.name+'-01.txt') self.assertTrue(os.path.exists(filename)) fd = io.open(filename, 'r') contents = fd.read() @@ -6659,7 +6659,7 @@ class ImportNotesTests(TestCase): self.client.login(username='secretary', password='secretary+password') r = self.client.post(url, {'markdown_text': 'replaced below'}) # create a rev with open( - self.session.sessionpresentation_set.filter(document__type="minutes").first().document.get_file_name(), + self.session.presentations.filter(document__type="minutes").first().document.get_file_name(), 'wb' ) as f: # Replace existing content with an invalid Unicode byte string. The particular invalid @@ -6684,7 +6684,7 @@ class ImportNotesTests(TestCase): self.client.login(username='secretary', password='secretary+password') r = self.client.post(url, {'markdown_text': 'original markdown text'}) # create a rev # remove the file uploaded for the first rev - minutes_docs = self.session.sessionpresentation_set.filter(document__type='minutes') + minutes_docs = self.session.presentations.filter(document__type='minutes') self.assertEqual(minutes_docs.count(), 1) Path(minutes_docs.first().document.get_file_name()).unlink() @@ -7819,7 +7819,7 @@ class ProceedingsTests(BaseMeetingTestCase): named_row = named_label.closest('tr') self.assertTrue(named_row) - for material in (sp.document for sp in plain_session.sessionpresentation_set.all()): + for material in (sp.document for sp in plain_session.presentations.all()): if material.type_id == 'draft': expected_url = urlreverse( 'ietf.doc.views_doc.document_main', @@ -7830,7 +7830,7 @@ class ProceedingsTests(BaseMeetingTestCase): self.assertTrue(plain_row.find(f'a[href="{expected_url}"]')) self.assertFalse(named_row.find(f'a[href="{expected_url}"]')) - for material in (sp.document for sp in named_session.sessionpresentation_set.all()): + for material in (sp.document for sp in named_session.presentations.all()): if material.type_id == 'draft': expected_url = urlreverse( 'ietf.doc.views_doc.document_main', diff --git a/ietf/meeting/utils.py b/ietf/meeting/utils.py index 416e9c61f..9fb062b02 100644 --- a/ietf/meeting/utils.py +++ b/ietf/meeting/utils.py @@ -32,7 +32,10 @@ from ietf.utils.timezone import date_today def session_time_for_sorting(session, use_meeting_date): - official_timeslot = TimeSlot.objects.filter(sessionassignments__session=session, sessionassignments__schedule__in=[session.meeting.schedule, session.meeting.schedule.base if session.meeting.schedule else None]).first() + if hasattr(session, "_otsa"): + official_timeslot=session._otsa.timeslot + else: + official_timeslot = TimeSlot.objects.filter(sessionassignments__session=session, sessionassignments__schedule__in=[session.meeting.schedule, session.meeting.schedule.base if session.meeting.schedule else None]).first() if official_timeslot: return official_timeslot.time elif use_meeting_date and session.meeting.date: @@ -75,13 +78,14 @@ def group_sessions(sessions): in_progress = [] recent = [] past = [] + for s in sessions: today = date_today(s.meeting.tz()) if s.meeting.date > today: future.append(s) elif s.meeting.end_date() >= today: in_progress.append(s) - elif not s.is_material_submission_cutoff(): + elif not getattr(s, "cached_is_cutoff", lambda: s.is_material_submission_cutoff): recent.append(s) else: past.append(s) @@ -91,6 +95,7 @@ def group_sessions(sessions): recent.reverse() past.reverse() + return future, in_progress, recent, past def get_upcoming_manageable_sessions(user): @@ -148,7 +153,7 @@ def finalize(meeting): ) ).astimezone(pytz.utc) + datetime.timedelta(days=1) for session in meeting.session_set.all(): - for sp in session.sessionpresentation_set.filter(document__type='draft',rev=None): + for sp in session.presentations.filter(document__type='draft',rev=None): rev_before_end = [e for e in sp.document.docevent_set.filter(newrevisiondocevent__isnull=False).order_by('-time') if e.time <= end_time ] if rev_before_end: sp.rev = rev_before_end[-1].newrevisiondocevent.rev @@ -180,7 +185,7 @@ def sort_accept_tuple(accept): return tup def condition_slide_order(session): - qs = session.sessionpresentation_set.filter(document__type_id='slides').order_by('order') + qs = session.presentations.filter(document__type_id='slides').order_by('order') order_list = qs.values_list('order',flat=True) if list(order_list) != list(range(1,qs.count()+1)): for num, sp in enumerate(qs, start=1): @@ -563,7 +568,7 @@ def save_session_minutes_revision(session, file, ext, request, encoding=None, ap Returns (Document, [DocEvents]), which should be passed to doc.save_with_history() if the file contents are stored successfully. """ - minutes_sp = session.sessionpresentation_set.filter(document__type='minutes').first() + minutes_sp = session.presentations.filter(document__type='minutes').first() if minutes_sp: doc = minutes_sp.document doc.rev = '%02d' % (int(doc.rev)+1) @@ -597,17 +602,17 @@ def save_session_minutes_revision(session, file, ext, request, encoding=None, ap rev = '00', ) doc.states.add(State.objects.get(type_id='minutes',slug='active')) - if session.sessionpresentation_set.filter(document=doc).exists(): - sp = session.sessionpresentation_set.get(document=doc) + if session.presentations.filter(document=doc).exists(): + sp = session.presentations.get(document=doc) sp.rev = doc.rev sp.save() else: - session.sessionpresentation_set.create(document=doc,rev=doc.rev) + session.presentations.create(document=doc,rev=doc.rev) if apply_to_all: for other_session in get_meeting_sessions(session.meeting.number, session.group.acronym): if other_session != session: - other_session.sessionpresentation_set.filter(document__type='minutes').delete() - other_session.sessionpresentation_set.create(document=doc,rev=doc.rev) + other_session.presentations.filter(document__type='minutes').delete() + other_session.presentations.create(document=doc,rev=doc.rev) filename = f'{doc.name}-{doc.rev}{ext}' doc.uploaded_filename = filename e = NewRevisionDocEvent.objects.create( @@ -719,7 +724,7 @@ def new_doc_for_session(type_id, session): rev = '00', ) doc.states.add(State.objects.get(type_id=type_id, slug='active')) - session.sessionpresentation_set.create(document=doc,rev='00') + session.presentations.create(document=doc,rev='00') return doc def write_doc_for_session(session, type_id, filename, contents): @@ -760,7 +765,7 @@ def create_recording(session, url, title=None, user=None): desc='New revision available', time=doc.time) pres = SessionPresentation.objects.create(session=session,document=doc,rev=doc.rev) - session.sessionpresentation_set.add(pres) + session.presentations.add(pres) return doc diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index db4eed0eb..1171f7b0b 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -2157,7 +2157,7 @@ def agenda_json(request, num=None): # time of the meeting assignments = preprocess_assignments_for_agenda(assignments, meeting, extra_prefetches=[ "session__materials__docevent_set", - "session__sessionpresentation_set", + "session__presentations", "timeslot__meeting" ]) for asgn in assignments: @@ -2427,12 +2427,12 @@ def session_details(request, num, acronym): session.cancelled = session.current_status in Session.CANCELED_STATUSES session.status = status_names.get(session.current_status, session.current_status) - session.filtered_artifacts = list(session.sessionpresentation_set.filter(document__type__slug__in=['agenda','minutes','bluesheets'])) - session.filtered_artifacts.sort(key=lambda d:['agenda','minutes','bluesheets'].index(d.document.type.slug)) - session.filtered_slides = session.sessionpresentation_set.filter(document__type__slug='slides').order_by('order') - session.filtered_drafts = session.sessionpresentation_set.filter(document__type__slug='draft') - session.filtered_chatlog_and_polls = session.sessionpresentation_set.filter(document__type__slug__in=('chatlog', 'polls')).order_by('document__type__slug') - # TODO FIXME Deleted materials shouldn't be in the sessionpresentation_set + session.filtered_artifacts = list(session.presentations.filter(document__type__slug__in=['agenda','minutes','narrativeminutes', 'bluesheets'])) + session.filtered_artifacts.sort(key=lambda d:['agenda','minutes', 'narrativeminutes', 'bluesheets'].index(d.document.type.slug)) + session.filtered_slides = session.presentations.filter(document__type__slug='slides').order_by('order') + session.filtered_drafts = session.presentations.filter(document__type__slug='draft') + session.filtered_chatlog_and_polls = session.presentations.filter(document__type__slug__in=('chatlog', 'polls')).order_by('document__type__slug') + # TODO FIXME Deleted materials shouldn't be in the presentations for qs in [session.filtered_artifacts,session.filtered_slides,session.filtered_drafts]: qs = [p for p in qs if p.document.get_state_slug(p.document.type_id)!='deleted'] session.type_counter.update([p.document.type.slug for p in qs]) @@ -2490,7 +2490,7 @@ def add_session_drafts(request, session_id, num): if session.is_material_submission_cutoff() and not has_role(request.user, "Secretariat"): raise Http404 - already_linked = [sp.document for sp in session.sessionpresentation_set.filter(document__type_id='draft')] + already_linked = [sp.document for sp in session.presentations.filter(document__type_id='draft')] session_number = None sessions = get_sessions(session.meeting.number,session.group.acronym) @@ -2501,7 +2501,7 @@ def add_session_drafts(request, session_id, num): form = SessionDraftsForm(request.POST,already_linked=already_linked) if form.is_valid(): for draft in form.cleaned_data['drafts']: - session.sessionpresentation_set.create(document=draft,rev=None) + session.presentations.create(document=draft,rev=None) c = DocEvent(type="added_comment", doc=draft, rev=draft.rev, by=request.user.person) c.desc = "Added to session: %s" % session c.save() @@ -2512,7 +2512,7 @@ def add_session_drafts(request, session_id, num): return render(request, "meeting/add_session_drafts.html", { 'session': session, 'session_number': session_number, - 'already_linked': session.sessionpresentation_set.filter(document__type_id='draft'), + 'already_linked': session.presentations.filter(document__type_id='draft'), 'form': form, }) @@ -2554,7 +2554,7 @@ def upload_session_bluesheets(request, session_id, num): else: form = UploadBlueSheetForm() - bluesheet_sp = session.sessionpresentation_set.filter(document__type='bluesheets').first() + bluesheet_sp = session.presentations.filter(document__type='bluesheets').first() return render(request, "meeting/upload_session_bluesheets.html", {'session': session, @@ -2565,7 +2565,7 @@ def upload_session_bluesheets(request, session_id, num): def save_bluesheet(request, session, file, encoding='utf-8'): - bluesheet_sp = session.sessionpresentation_set.filter(document__type='bluesheets').first() + bluesheet_sp = session.presentations.filter(document__type='bluesheets').first() _, ext = os.path.splitext(file.name) if bluesheet_sp: @@ -2595,7 +2595,7 @@ def save_bluesheet(request, session, file, encoding='utf-8'): rev = '00', ) doc.states.add(State.objects.get(type_id='bluesheets',slug='active')) - session.sessionpresentation_set.create(document=doc,rev='00') + session.presentations.create(document=doc,rev='00') filename = '%s-%s%s'% ( doc.name, doc.rev, ext) doc.uploaded_filename = filename e = NewRevisionDocEvent.objects.create(doc=doc, rev=doc.rev, by=request.user.person, type='new_revision', desc='New revision available: %s'%doc.rev) @@ -2620,7 +2620,7 @@ def upload_session_minutes(request, session_id, num): if len(sessions) > 1: session_number = 1 + sessions.index(session) - minutes_sp = session.sessionpresentation_set.filter(document__type='minutes').first() + minutes_sp = session.presentations.filter(document__type='minutes').first() if request.method == 'POST': form = UploadMinutesForm(show_apply_to_all_checkbox,request.POST,request.FILES) @@ -2712,7 +2712,7 @@ def upload_session_agenda(request, session_id, num): if len(sessions) > 1: session_number = 1 + sessions.index(session) - agenda_sp = session.sessionpresentation_set.filter(document__type='agenda').first() + agenda_sp = session.presentations.filter(document__type='agenda').first() if request.method == 'POST': form = UploadOrEnterAgendaForm(show_apply_to_all_checkbox,request.POST,request.FILES) @@ -2771,17 +2771,17 @@ def upload_session_agenda(request, session_id, num): rev = '00', ) doc.states.add(State.objects.get(type_id='agenda',slug='active')) - if session.sessionpresentation_set.filter(document=doc).exists(): - sp = session.sessionpresentation_set.get(document=doc) + if session.presentations.filter(document=doc).exists(): + sp = session.presentations.get(document=doc) sp.rev = doc.rev sp.save() else: - session.sessionpresentation_set.create(document=doc,rev=doc.rev) + session.presentations.create(document=doc,rev=doc.rev) if apply_to_all: for other_session in sessions: if other_session != session: - other_session.sessionpresentation_set.filter(document__type='agenda').delete() - other_session.sessionpresentation_set.create(document=doc,rev=doc.rev) + other_session.presentations.filter(document__type='agenda').delete() + other_session.presentations.create(document=doc,rev=doc.rev) filename = '%s-%s%s'% ( doc.name, doc.rev, ext) doc.uploaded_filename = filename e = NewRevisionDocEvent.objects.create(doc=doc,by=request.user.person,type='new_revision',desc='New revision available: %s'%doc.rev,rev=doc.rev) @@ -2832,7 +2832,7 @@ def upload_session_slides(request, session_id, num, name=None): slides = Document.objects.filter(name=name).first() if not (slides and slides.type_id=='slides'): raise Http404 - slides_sp = session.sessionpresentation_set.filter(document=slides).first() + slides_sp = session.presentations.filter(document=slides).first() if request.method == 'POST': form = UploadSlidesForm(session, show_apply_to_all_checkbox,request.POST,request.FILES) @@ -2872,18 +2872,18 @@ def upload_session_slides(request, session_id, num, name=None): ) doc.states.add(State.objects.get(type_id='slides',slug='active')) doc.states.add(State.objects.get(type_id='reuse_policy',slug='single')) - if session.sessionpresentation_set.filter(document=doc).exists(): - sp = session.sessionpresentation_set.get(document=doc) + if session.presentations.filter(document=doc).exists(): + sp = session.presentations.get(document=doc) sp.rev = doc.rev sp.save() else: - max_order = session.sessionpresentation_set.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0 - session.sessionpresentation_set.create(document=doc,rev=doc.rev,order=max_order+1) + max_order = session.presentations.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0 + session.presentations.create(document=doc,rev=doc.rev,order=max_order+1) if apply_to_all: for other_session in sessions: - if other_session != session and not other_session.sessionpresentation_set.filter(document=doc).exists(): - max_order = other_session.sessionpresentation_set.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0 - other_session.sessionpresentation_set.create(document=doc,rev=doc.rev,order=max_order+1) + if other_session != session and not other_session.presentations.filter(document=doc).exists(): + max_order = other_session.presentations.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0 + other_session.presentations.create(document=doc,rev=doc.rev,order=max_order+1) filename = '%s-%s%s'% ( doc.name, doc.rev, ext) doc.uploaded_filename = filename e = NewRevisionDocEvent.objects.create(doc=doc,by=request.user.person,type='new_revision',desc='New revision available: %s'%doc.rev,rev=doc.rev) @@ -2983,7 +2983,7 @@ def remove_sessionpresentation(request, session_id, num, name): if session.is_material_submission_cutoff() and not has_role(request.user, "Secretariat"): permission_denied(request, "The materials cutoff for this session has passed. Contact the secretariat for further action.") if request.method == 'POST': - session.sessionpresentation_set.filter(pk=sp.pk).delete() + session.presentations.filter(pk=sp.pk).delete() c = DocEvent(type="added_comment", doc=sp.document, rev=sp.document.rev, by=request.user.person) c.desc = "Removed from session: %s" % (session) c.save() @@ -3008,7 +3008,7 @@ def ajax_add_slides_to_session(request, session_id, num): order = int(order_str) except (ValueError, TypeError): return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied order is not valid' }),content_type='application/json') - if order < 1 or order > session.sessionpresentation_set.filter(document__type_id='slides').count() + 1 : + if order < 1 or order > session.presentations.filter(document__type_id='slides').count() + 1 : return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied order is not valid' }),content_type='application/json') name = request.POST.get('name', None) @@ -3016,10 +3016,10 @@ def ajax_add_slides_to_session(request, session_id, num): if not doc: return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied name is not valid' }),content_type='application/json') - if not session.sessionpresentation_set.filter(document=doc).exists(): + if not session.presentations.filter(document=doc).exists(): condition_slide_order(session) - session.sessionpresentation_set.filter(document__type_id='slides', order__gte=order).update(order=F('order')+1) - session.sessionpresentation_set.create(document=doc,rev=doc.rev,order=order) + session.presentations.filter(document__type_id='slides', order__gte=order).update(order=F('order')+1) + session.presentations.create(document=doc,rev=doc.rev,order=order) DocEvent.objects.create(type="added_comment", doc=doc, rev=doc.rev, by=request.user.person, desc="Added to session: %s" % session) return HttpResponse(json.dumps({'success':True}), content_type='application/json') @@ -3041,7 +3041,7 @@ def ajax_remove_slides_from_session(request, session_id, num): oldIndex = int(oldIndex_str) except (ValueError, TypeError): return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied index is not valid' }),content_type='application/json') - if oldIndex < 1 or oldIndex > session.sessionpresentation_set.filter(document__type_id='slides').count() : + if oldIndex < 1 or oldIndex > session.presentations.filter(document__type_id='slides').count() : return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied index is not valid' }),content_type='application/json') name = request.POST.get('name', None) @@ -3050,11 +3050,11 @@ def ajax_remove_slides_from_session(request, session_id, num): return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied name is not valid' }),content_type='application/json') condition_slide_order(session) - affected_presentations = session.sessionpresentation_set.filter(document=doc).first() + affected_presentations = session.presentations.filter(document=doc).first() if affected_presentations: if affected_presentations.order == oldIndex: affected_presentations.delete() - session.sessionpresentation_set.filter(document__type_id='slides', order__gt=oldIndex).update(order=F('order')-1) + session.presentations.filter(document__type_id='slides', order__gt=oldIndex).update(order=F('order')-1) DocEvent.objects.create(type="added_comment", doc=doc, rev=doc.rev, by=request.user.person, desc="Removed from session: %s" % session) return HttpResponse(json.dumps({'success':True}), content_type='application/json') else: @@ -3074,7 +3074,7 @@ def ajax_reorder_slides_in_session(request, session_id, num): if request.method != 'POST' or not request.POST: return HttpResponse(json.dumps({ 'success' : False, 'error' : 'No data submitted or not POST' }),content_type='application/json') - num_slides_in_session = session.sessionpresentation_set.filter(document__type_id='slides').count() + num_slides_in_session = session.presentations.filter(document__type_id='slides').count() oldIndex_str = request.POST.get('oldIndex', None) try: oldIndex = int(oldIndex_str) @@ -3095,11 +3095,11 @@ def ajax_reorder_slides_in_session(request, session_id, num): return HttpResponse(json.dumps({ 'success' : False, 'error' : 'Supplied index is not valid' }),content_type='application/json') condition_slide_order(session) - sp = session.sessionpresentation_set.get(order=oldIndex) + sp = session.presentations.get(order=oldIndex) if oldIndex < newIndex: - session.sessionpresentation_set.filter(order__gt=oldIndex, order__lte=newIndex).update(order=F('order')-1) + session.presentations.filter(order__gt=oldIndex, order__lte=newIndex).update(order=F('order')-1) else: - session.sessionpresentation_set.filter(order__gte=newIndex, order__lt=oldIndex).update(order=F('order')+1) + session.presentations.filter(order__gte=newIndex, order__lt=oldIndex).update(order=F('order')+1) sp.order = newIndex sp.save() @@ -3749,7 +3749,7 @@ def organize_proceedings_sessions(sessions): if s.current_status != 'canceled': all_canceled = False by_name.setdefault(s.name, []) - if s.current_status != 'notmeet' or s.sessionpresentation_set.exists(): + if s.current_status != 'notmeet' or s.presentations.exists(): by_name[s.name].append(s) # for notmeet, only include sessions with materials for sess_name, ss in by_name.items(): session = ss[0] if ss else None @@ -3781,7 +3781,7 @@ def organize_proceedings_sessions(sessions): 'name': sess_name, 'session': session, 'canceled': all_canceled, - 'has_materials': s.sessionpresentation_set.exists(), + 'has_materials': s.presentations.exists(), 'agendas': _format_materials((s, s.agenda()) for s in ss), 'minutes': _format_materials((s, s.minutes()) for s in ss), 'bluesheets': _format_materials((s, s.bluesheets()) for s in ss), @@ -4149,7 +4149,7 @@ def api_upload_chatlog(request): session = Session.objects.filter(pk=session_id).first() if not session: return err(400, "Invalid session") - chatlog_sp = session.sessionpresentation_set.filter(document__type='chatlog').first() + chatlog_sp = session.presentations.filter(document__type='chatlog').first() if chatlog_sp: doc = chatlog_sp.document doc.rev = f"{(int(doc.rev)+1):02d}" @@ -4189,7 +4189,7 @@ def api_upload_polls(request): session = Session.objects.filter(pk=session_id).first() if not session: return err(400, "Invalid session") - polls_sp = session.sessionpresentation_set.filter(document__type='polls').first() + polls_sp = session.presentations.filter(document__type='polls').first() if polls_sp: doc = polls_sp.document doc.rev = f"{(int(doc.rev)+1):02d}" @@ -4606,18 +4606,18 @@ def approve_proposed_slides(request, slidesubmission_id, num): ) doc.states.add(State.objects.get(type_id='slides',slug='active')) doc.states.add(State.objects.get(type_id='reuse_policy',slug='single')) - if submission.session.sessionpresentation_set.filter(document=doc).exists(): - sp = submission.session.sessionpresentation_set.get(document=doc) + if submission.session.presentations.filter(document=doc).exists(): + sp = submission.session.presentations.get(document=doc) sp.rev = doc.rev sp.save() else: - max_order = submission.session.sessionpresentation_set.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0 - submission.session.sessionpresentation_set.create(document=doc,rev=doc.rev,order=max_order+1) + max_order = submission.session.presentations.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0 + submission.session.presentations.create(document=doc,rev=doc.rev,order=max_order+1) if apply_to_all: for other_session in sessions: - if other_session != submission.session and not other_session.sessionpresentation_set.filter(document=doc).exists(): - max_order = other_session.sessionpresentation_set.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0 - other_session.sessionpresentation_set.create(document=doc,rev=doc.rev,order=max_order+1) + if other_session != submission.session and not other_session.presentations.filter(document=doc).exists(): + max_order = other_session.presentations.filter(document__type='slides').aggregate(Max('order'))['order__max'] or 0 + other_session.presentations.create(document=doc,rev=doc.rev,order=max_order+1) sub_name, sub_ext = os.path.splitext(submission.filename) target_filename = '%s-%s%s' % (sub_name[:sub_name.rfind('-ss')],doc.rev,sub_ext) doc.uploaded_filename = target_filename diff --git a/ietf/name/migrations/0013_narrativeminutes.py b/ietf/name/migrations/0013_narrativeminutes.py new file mode 100644 index 000000000..89aa75a37 --- /dev/null +++ b/ietf/name/migrations/0013_narrativeminutes.py @@ -0,0 +1,35 @@ +# Copyright The IETF Trust 2023, All Rights Reserved + +from django.db import migrations, models + + +def forward(apps, schema_editor): + DocTypeName = apps.get_model("name", "DocTypeName") + DocTypeName.objects.create( + slug="narrativeminutes", + name="Narrative Minutes", + desc="", + used=True, + order=0, + prefix="narrative-minutes", + ) + + +def reverse(apps, schema_editor): + DocTypeName = apps.get_model("name", "DocTypeName") + DocTypeName.objects.filter(slug="narrativeminutes").delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("name", "0012_adjust_important_dates"), + ] + + operations = [ + migrations.AlterField( + model_name="doctypename", + name="prefix", + field=models.CharField(default="", max_length=32), + ), + migrations.RunPython(forward, reverse), + ] diff --git a/ietf/name/models.py b/ietf/name/models.py index d4d53def8..8c2520a48 100644 --- a/ietf/name/models.py +++ b/ietf/name/models.py @@ -43,7 +43,7 @@ class DocRelationshipName(NameModel): class DocTypeName(NameModel): """Draft, Agenda, Minutes, Charter, Discuss, Guideline, Email, Review, Issue, Wiki, RFC""" - prefix = models.CharField(max_length=16, default="") + prefix = models.CharField(max_length=32, default="") class DocTagName(NameModel): """Waiting for Reference, IANA Coordination, Revised ID Needed, External Party, AD Followup, Point Raised - Writeup Needed, ...""" diff --git a/ietf/settings.py b/ietf/settings.py index 7fa4ebbe6..15302fee6 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -677,7 +677,6 @@ STATUS_CHANGE_PATH = '/a/ietfdata/doc/status-change' AGENDA_PATH = '/a/www/www6s/proceedings/' MEETINGHOST_LOGO_PATH = AGENDA_PATH # put these in the same place as other proceedings files IPR_DOCUMENT_PATH = '/a/www/ietf-ftp/ietf/IPR/' -IESG_WG_EVALUATION_DIR = "/a/www/www6/iesg/evaluation" # Move drafts to this directory when they expire INTERNET_DRAFT_ARCHIVE_DIR = '/a/ietfdata/doc/draft/collection/draft-archive/' # The following directory contains linked copies of all drafts, but don't @@ -856,6 +855,7 @@ MEETING_MATERIALS_SERVE_LOCALLY = True MEETING_DOC_LOCAL_HREFS = { "agenda": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}", "minutes": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}", + "narrativeminutes": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}", "slides": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}", "chatlog": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}", "polls": "/meeting/{meeting.number}/materials/{doc.name}-{doc.rev}", @@ -867,6 +867,7 @@ MEETING_DOC_LOCAL_HREFS = { MEETING_DOC_CDN_HREFS = { "agenda": "https://www.ietf.org/proceedings/{meeting.number}/agenda/{doc.name}-{doc.rev}", "minutes": "https://www.ietf.org/proceedings/{meeting.number}/minutes/{doc.name}-{doc.rev}", + "narrativeminutes": "https://www.ietf.org/proceedings/{meeting.number}/narrative-minutes/{doc.name}-{doc.rev}", "slides": "https://www.ietf.org/proceedings/{meeting.number}/slides/{doc.name}-{doc.rev}", "recording": "{doc.external_url}", "bluesheets": "https://www.ietf.org/proceedings/{meeting.number}/bluesheets/{doc.uploaded_filename}", @@ -878,6 +879,7 @@ MEETING_DOC_HREFS = MEETING_DOC_LOCAL_HREFS if MEETING_MATERIALS_SERVE_LOCALLY e MEETING_DOC_OLD_HREFS = { "agenda": "/meeting/{meeting.number}/materials/{doc.name}", "minutes": "/meeting/{meeting.number}/materials/{doc.name}", + "narrativeminutes" : "/meeting/{meeting.number}/materials/{doc.name}", "slides": "/meeting/{meeting.number}/materials/{doc.name}", "recording": "{doc.external_url}", "bluesheets": "https://www.ietf.org/proceedings/{meeting.number}/bluesheets/{doc.uploaded_filename}", @@ -887,6 +889,7 @@ MEETING_DOC_OLD_HREFS = { MEETING_DOC_GREFS = { "agenda": "/meeting/{meeting.number}/materials/{doc.name}", "minutes": "/meeting/{meeting.number}/materials/{doc.name}", + "narrativeminutes": "/meeting/{meeting.number}/materials/{doc.name}", "slides": "/meeting/{meeting.number}/materials/{doc.name}", "recording": "{doc.external_url}", "bluesheets": "https://www.ietf.org/proceedings/{meeting.number}/bluesheets/{doc.uploaded_filename}", @@ -900,6 +903,7 @@ MEETING_MATERIALS_DEFAULT_SUBMISSION_CORRECTION_DAYS = 50 MEETING_VALID_UPLOAD_EXTENSIONS = { 'agenda': ['.txt','.html','.htm', '.md', ], 'minutes': ['.txt','.html','.htm', '.md', '.pdf', ], + 'narrativeminutes': ['.txt','.html','.htm', '.md', '.pdf', ], 'slides': ['.doc','.docx','.pdf','.ppt','.pptx','.txt', ], # Note the removal of .zip 'bluesheets': ['.pdf', '.txt', ], 'procmaterials':['.pdf', ], @@ -909,6 +913,7 @@ MEETING_VALID_UPLOAD_EXTENSIONS = { MEETING_VALID_UPLOAD_MIME_TYPES = { 'agenda': ['text/plain', 'text/html', 'text/markdown', 'text/x-markdown', ], 'minutes': ['text/plain', 'text/html', 'application/pdf', 'text/markdown', 'text/x-markdown', ], + 'narrative-minutes': ['text/plain', 'text/html', 'application/pdf', 'text/markdown', 'text/x-markdown', ], 'slides': [], 'bluesheets': ['application/pdf', 'text/plain', ], 'procmaterials':['application/pdf', ], diff --git a/ietf/templates/doc/document_statement.html b/ietf/templates/doc/document_statement.html index 79ea305cd..7b9759c3e 100644 --- a/ietf/templates/doc/document_statement.html +++ b/ietf/templates/doc/document_statement.html @@ -52,7 +52,7 @@ {% if doc.get_state %} - {{ doc.get_state.name }} + {{ doc.get_state.name }} {% else %} No document state {% endif %} diff --git a/ietf/templates/group/meetings-row.html b/ietf/templates/group/meetings-row.html index 65ba435ba..57c727eea 100644 --- a/ietf/templates/group/meetings-row.html +++ b/ietf/templates/group/meetings-row.html @@ -60,6 +60,12 @@ {% if s.minutes %}href="{{ s.minutes.get_absolute_url }}"{% endif %}> Minutes
+ {% if group.acronym == "iesg" %} + + Narrative Minutes + + {% endif %} {% if can_always_edit or can_edit_materials %} diff --git a/ietf/templates/group/meetings.html b/ietf/templates/group/meetings.html index 19f39d6d9..8acc688cc 100644 --- a/ietf/templates/group/meetings.html +++ b/ietf/templates/group/meetings.html @@ -85,7 +85,7 @@ {% endif %} {# The following is a temporary performance workaround, not long term design #} - {% if group.acronym != "iab" %} + {% if group.acronym != "iab" and group.acronym != "iesg" %}

This page shows meetings within the last four years. For earlier meetings, please see the proceedings. @@ -139,6 +139,12 @@ {% if s.minutes %}href="{{ s.minutes.get_absolute_url }}"{% endif %}> Minutes + {% if group.acronym == "iesg" %} + + Narrative Minutes + + {% endif %} {% if can_always_edit or can_edit_materials %} diff --git a/ietf/templates/group/statements.html b/ietf/templates/group/statements.html index 4e0fc6153..035c3bc96 100644 --- a/ietf/templates/group/statements.html +++ b/ietf/templates/group/statements.html @@ -29,7 +29,9 @@ {% for statement in statements %} {{ statement.published|date:"Y-m-d" }} - {{statement.title}} + {{statement.title}} + {% if statement.status == "replaced" %}Replaced{% endif %} + {% endfor %} diff --git a/ietf/utils/markdown.py b/ietf/utils/markdown.py index 63d1c7a70..446d34895 100644 --- a/ietf/utils/markdown.py +++ b/ietf/utils/markdown.py @@ -12,7 +12,7 @@ from markdown.postprocessors import Postprocessor from django.utils.safestring import mark_safe from ietf.doc.templatetags.ietf_filters import urlize_ietf_docs -from ietf.utils.text import bleach_cleaner, bleach_linker +from ietf.utils.text import bleach_cleaner, liberal_bleach_cleaner, bleach_linker class LinkifyExtension(Extension): @@ -49,3 +49,19 @@ def markdown(text): ) ) ) + +def liberal_markdown(text): + return mark_safe( + liberal_bleach_cleaner.clean( + python_markdown.markdown( + text, + extensions=[ + "extra", + "nl2br", + "sane_lists", + "toc", + LinkifyExtension(), + ], + ) + ) + ) diff --git a/ietf/utils/text.py b/ietf/utils/text.py index 48f5538cb..2fba113d0 100644 --- a/ietf/utils/text.py +++ b/ietf/utils/text.py @@ -46,6 +46,15 @@ bleach_cleaner = bleach.sanitizer.Cleaner( tags=tags, attributes=attributes, protocols=protocols, strip=True ) +liberal_tags = copy.copy(tags) +liberal_attributes = copy.copy(attributes) +liberal_tags.update(["img","figure","figcaption"]) +liberal_attributes["img"] = ["src","alt"] + +liberal_bleach_cleaner = bleach.sanitizer.Cleaner( + tags=liberal_tags, attributes=liberal_attributes, protocols=protocols, strip=True +) + validate_url = URLValidator()