From de494790b643f1875e125c149459f7bbadf245a0 Mon Sep 17 00:00:00 2001 From: Matthew Holloway Date: Tue, 5 Nov 2024 15:03:21 +0000 Subject: [PATCH 01/32] fix: Selenium tests via scroll_and_click (#8150) * fix: selenium tests scroll_and_click * fix: reduce default timeout to 5 seconds * fix: also use scroll_and_click on test_upcoming_materials_modal * fix: remove conditional check on restoring scroll CSS * fix: restore conditional check on restoring scroll CSS * chore: code comments and adding jstest.py to coverage ignore --- ietf/meeting/tests_js.py | 17 +++++++--------- ietf/settings.py | 1 + ietf/utils/jstest.py | 44 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 52 insertions(+), 10 deletions(-) diff --git a/ietf/meeting/tests_js.py b/ietf/meeting/tests_js.py index b15aa70e7..a184a7c6d 100644 --- a/ietf/meeting/tests_js.py +++ b/ietf/meeting/tests_js.py @@ -249,7 +249,9 @@ class EditMeetingScheduleTests(IetfSeleniumTestCase): self.assertTrue(s1_element.is_displayed()) # should still be displayed self.assertIn('hidden-parent', s1_element.get_attribute('class'), 'Session should be hidden when parent disabled') - s1_element.click() # try to select + + self.scroll_and_click((By.CSS_SELECTOR, '#session{}'.format(s1.pk))) + self.assertNotIn('selected', s1_element.get_attribute('class'), 'Session should not be selectable when parent disabled') @@ -299,9 +301,9 @@ class EditMeetingScheduleTests(IetfSeleniumTestCase): 'Session s1 should have moved to second meeting day') # swap timeslot column - put session in a differently-timed timeslot - self.driver.find_element(By.CSS_SELECTOR, + self.scroll_and_click((By.CSS_SELECTOR, '.day .swap-timeslot-col[data-timeslot-pk="{}"]'.format(slot1b.pk) - ).click() # open modal on the second timeslot for room1 + )) # open modal on the second timeslot for room1 self.assertTrue(self.driver.find_element(By.CSS_SELECTOR, "#swap-timeslot-col-modal").is_displayed()) self.driver.find_element(By.CSS_SELECTOR, '#swap-timeslot-col-modal input[name="target_timeslot"][value="{}"]'.format(slot4.pk) @@ -1373,13 +1375,8 @@ class InterimTests(IetfSeleniumTestCase): self.assertFalse(modal_div.is_displayed()) # Click the 'materials' button - open_modal_button = self.wait.until( - expected_conditions.element_to_be_clickable( - (By.CSS_SELECTOR, '[data-bs-target="#modal-%s"]' % slug) - ), - 'Modal open button not found or not clickable', - ) - open_modal_button.click() + open_modal_button_locator = (By.CSS_SELECTOR, '[data-bs-target="#modal-%s"]' % slug) + self.scroll_and_click(open_modal_button_locator) self.wait.until( expected_conditions.visibility_of(modal_div), 'Modal did not become visible after clicking open button', diff --git a/ietf/settings.py b/ietf/settings.py index a1a7fee10..0c57d87d1 100644 --- a/ietf/settings.py +++ b/ietf/settings.py @@ -598,6 +598,7 @@ TEST_CODE_COVERAGE_EXCLUDE_FILES = [ "ietf/review/import_from_review_tool.py", "ietf/utils/patch.py", "ietf/utils/test_data.py", + "ietf/utils/jstest.py", ] # These are code line regex patterns diff --git a/ietf/utils/jstest.py b/ietf/utils/jstest.py index 157f97912..215d78d65 100644 --- a/ietf/utils/jstest.py +++ b/ietf/utils/jstest.py @@ -12,6 +12,8 @@ try: from selenium import webdriver from selenium.webdriver.firefox.service import Service from selenium.webdriver.firefox.options import Options + from selenium.webdriver.support.ui import WebDriverWait + from selenium.webdriver.support import expected_conditions from selenium.webdriver.common.by import By except ImportError as e: skip_selenium = True @@ -87,6 +89,48 @@ class IetfSeleniumTestCase(IetfLiveServerTestCase): # actions = ActionChains(self.driver) # actions.move_to_element(element).perform() + def scroll_and_click(self, element_locator, timeout_seconds=5): + """ + Selenium has restrictions around clicking elements outside the viewport, so + this wrapper encapsulates the boilerplate of forcing scrolling and clicking. + + :param element_locator: A two item tuple of a Selenium locator eg `(By.CSS_SELECTOR, '#something')` + """ + + # so that we can restore the state of the webpage after clicking + original_html_scroll_behaviour_to_restore = self.driver.execute_script('return document.documentElement.style.scrollBehavior') + original_html_overflow_to_restore = self.driver.execute_script('return document.documentElement.style.overflow') + + original_body_scroll_behaviour_to_restore = self.driver.execute_script('return document.body.style.scrollBehavior') + original_body_overflow_to_restore = self.driver.execute_script('return document.body.style.overflow') + + self.driver.execute_script('document.documentElement.style.scrollBehavior = "auto"') + self.driver.execute_script('document.documentElement.style.overflow = "auto"') + + self.driver.execute_script('document.body.style.scrollBehavior = "auto"') + self.driver.execute_script('document.body.style.overflow = "auto"') + + element = self.driver.find_element(element_locator[0], element_locator[1]) + self.scroll_to_element(element) + + # Note that Selenium itself seems to have multiple definitions of 'clickable'. + # You might expect that the following wait for the 'element_to_be_clickable' + # would confirm that the following .click() would succeed but it doesn't. + # That's why the preceeding code attempts to force scrolling to bring the + # element into the viewport to allow clicking. + WebDriverWait(self.driver, timeout_seconds).until(expected_conditions.element_to_be_clickable(element_locator)) + + element.click() + + if original_html_scroll_behaviour_to_restore: + self.driver.execute_script(f'document.documentElement.style.scrollBehavior = "{original_html_scroll_behaviour_to_restore}"') + if original_html_overflow_to_restore: + self.driver.execute_script(f'document.documentElement.style.overflow = "{original_html_overflow_to_restore}"') + + if original_body_scroll_behaviour_to_restore: + self.driver.execute_script(f'document.body.style.scrollBehavior = "{original_body_scroll_behaviour_to_restore}"') + if original_body_overflow_to_restore: + self.driver.execute_script(f'document.body.style.overflow = "{original_body_overflow_to_restore}"') class presence_of_element_child_by_css_selector: """Wait for presence of a child of a WebElement matching a CSS selector From 7b749f1623212411717de9567581b53b61c46aa3 Mon Sep 17 00:00:00 2001 From: Matthew Holloway Date: Wed, 6 Nov 2024 08:42:54 +0000 Subject: [PATCH 02/32] fix: Agenda mobile goto now (#8160) * fix: agenda mobile goto now * fix: manually close the dropdown --- client/agenda/AgendaMobileBar.vue | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/client/agenda/AgendaMobileBar.vue b/client/agenda/AgendaMobileBar.vue index 78af96e25..63611e21c 100644 --- a/client/agenda/AgendaMobileBar.vue +++ b/client/agenda/AgendaMobileBar.vue @@ -3,11 +3,13 @@ n-dropdown( :options='jumpToDayOptions' size='huge' + :show='isDropdownOpenRef' :show-arrow='true' trigger='click' @select='jumpToDay' + @clickoutside='handleCloseDropdown' ) - button + button(@click='handleOpenDropdown') i.bi.bi-arrow-down-circle button(@click='agendaStore.$patch({ filterShown: true })') i.bi.bi-funnel @@ -28,7 +30,7 @@ - -{% endblock %} -{% block title %}Document Statistics{% endblock %} -{% block content %} - {% origin %} -
-{% endblock %} \ No newline at end of file From b65a37b6e8f6cfe105cd89c61c7046fff9d21523 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Thu, 14 Nov 2024 18:05:38 -0400 Subject: [PATCH 14/32] feat: POST for document search requests (#8206) * refactor: doc search via POST (WIP) Changes the search view to use a POST instead of a GET. Refactors cache key computation to use cleaned data. Still todo: * refactor frontpage view to match * refactor menubar search (?) * refactor stats view that uses SearchForm * revive or drop the "backwards compatibility" branch * feat: convert GET search to POST Still todo: * refactor frontpage view to match * refactor menubar search (?) * refactor stats view that uses SearchForm * fix: revert frontpage changes, search works Still todo: * refactor stats view that uses SearchForm * fix: define vars in all branches * refactor: update stats use of SearchForm * chore: improve message * fix: remove lint * chore: comments re: QuerySetAny * test: test query string search params * style: Black * test: refactor test_search() * test: refactor test_search_became_rfc() * test: use scroll_and_click helper --- ietf/api/serializer.py | 1 + ietf/doc/tests.py | 155 +++++++++++++++------ ietf/doc/tests_js.py | 4 +- ietf/doc/utils.py | 11 +- ietf/doc/views_search.py | 107 +++++++++----- ietf/liaisons/forms.py | 1 + ietf/liaisons/widgets.py | 1 + ietf/templates/doc/search/search_form.html | 2 + 8 files changed, 196 insertions(+), 86 deletions(-) diff --git a/ietf/api/serializer.py b/ietf/api/serializer.py index 27f194c5b..ca34ea649 100644 --- a/ietf/api/serializer.py +++ b/ietf/api/serializer.py @@ -146,6 +146,7 @@ class AdminJsonSerializer(Serializer): field_value = None else: field_value = field + # Need QuerySetAny instead of QuerySet until django-stubs 5.0.1 if isinstance(field_value, QuerySetAny) or isinstance(field_value, list): self._current[name] = dict([ (rel.pk, self.expand_related(rel, name)) for rel in field_value ]) else: diff --git a/ietf/doc/tests.py b/ietf/doc/tests.py index a1f9f8da2..fa655cb88 100644 --- a/ietf/doc/tests.py +++ b/ietf/doc/tests.py @@ -71,96 +71,163 @@ from ietf.doc.utils_search import AD_WORKLOAD class SearchTests(TestCase): - def test_search(self): + def test_search_handles_querystring_parameters(self): + """Search parameters via querystring should not actually search""" + url = urlreverse("ietf.doc.views_search.search") + r = self.client.get(url + "?name=some-document-name&oldDrafts=on") + # Check that we got a valid response and that the warning about query string parameters is shown. + self.assertContains( + r, + "Searching via the URL query string is no longer supported.", + status_code=200, + ) + # Check that the form was filled in correctly (not an exhaustive check, but different from the + # form defaults) + pq = PyQuery(r.content) + self.assertEqual( + pq("form#search_form input#id_name").attr("value"), + "some-document-name", + "The name field should be set in the SearchForm", + ) + self.assertEqual( + pq("form#search_form input#id_olddrafts").attr("checked"), + "checked", + "The old drafts checkbox should be selected in the SearchForm", + ) + self.assertIsNone( + pq("form#search_form input#id_rfcs").attr("checked"), + "The RFCs checkbox should not be selected in the SearchForm", + ) + self.assertIsNone( + pq("form#search_form input#id_activedrafts").attr("checked"), + "The active drafts checkbox should not be selected in the SearchForm", + ) - draft = WgDraftFactory(name='draft-ietf-mars-test',group=GroupFactory(acronym='mars',parent=Group.objects.get(acronym='farfut')),authors=[PersonFactory()],ad=PersonFactory()) + def test_search(self): + draft = WgDraftFactory( + name="draft-ietf-mars-test", + group=GroupFactory(acronym="mars", parent=Group.objects.get(acronym="farfut")), + authors=[PersonFactory()], + ad=PersonFactory(), + ) rfc = WgRfcFactory() draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="pub-req")) - old_draft = IndividualDraftFactory(name='draft-foo-mars-test',authors=[PersonFactory()],title="Optimizing Martian Network Topologies") + old_draft = IndividualDraftFactory( + name="draft-foo-mars-test", + authors=[PersonFactory()], + title="Optimizing Martian Network Topologies", + ) old_draft.set_state(State.objects.get(used=True, type="draft", slug="expired")) - - base_url = urlreverse('ietf.doc.views_search.search') - + + url = urlreverse("ietf.doc.views_search.search") + # only show form, no search yet - r = self.client.get(base_url) + r = self.client.get(url) self.assertEqual(r.status_code, 200) - + # no match - r = self.client.get(base_url + "?activedrafts=on&name=thisisnotadocumentname") + r = self.client.post(url, {"activedrafts": "on", "name": "thisisnotadocumentname"}) self.assertEqual(r.status_code, 200) self.assertContains(r, "No documents match") - - r = self.client.get(base_url + "?rfcs=on&name=xyzzy") + + r = self.client.post(url, {"rfcs": "on", "name": "xyzzy"}) self.assertEqual(r.status_code, 200) self.assertContains(r, "No documents match") - - r = self.client.get(base_url + "?olddrafts=on&name=bar") + + r = self.client.post(url, {"olddrafts": "on", "name": "bar"}) self.assertEqual(r.status_code, 200) self.assertContains(r, "No documents match") - - r = self.client.get(base_url + "?olddrafts=on&name=foo") + + r = self.client.post(url, {"olddrafts": "on", "name": "foo"}) self.assertEqual(r.status_code, 200) self.assertContains(r, "draft-foo-mars-test") - - r = self.client.get(base_url + "?olddrafts=on&name=FoO") # mixed case + + r = self.client.post(url, {"olddrafts": "on", "name": "FoO"}) # mixed case self.assertEqual(r.status_code, 200) self.assertContains(r, "draft-foo-mars-test") - + # find by RFC - r = self.client.get(base_url + "?rfcs=on&name=%s" % rfc.name) + r = self.client.post(url, {"rfcs": "on", "name": rfc.name}) self.assertEqual(r.status_code, 200) self.assertContains(r, rfc.title) - + # find by active/inactive - + draft.set_state(State.objects.get(type="draft", slug="active")) - r = self.client.get(base_url + "?activedrafts=on&name=%s" % draft.name) + r = self.client.post(url, {"activedrafts": "on", "name": draft.name}) self.assertEqual(r.status_code, 200) self.assertContains(r, draft.title) - + draft.set_state(State.objects.get(type="draft", slug="expired")) - r = self.client.get(base_url + "?olddrafts=on&name=%s" % draft.name) + r = self.client.post(url, {"olddrafts": "on", "name": draft.name}) self.assertEqual(r.status_code, 200) self.assertContains(r, draft.title) - + draft.set_state(State.objects.get(type="draft", slug="active")) - + # find by title - r = self.client.get(base_url + "?activedrafts=on&name=%s" % draft.title.split()[0]) + r = self.client.post(url, {"activedrafts": "on", "name": draft.title.split()[0]}) self.assertEqual(r.status_code, 200) self.assertContains(r, draft.title) - + # find by author - r = self.client.get(base_url + "?activedrafts=on&by=author&author=%s" % draft.documentauthor_set.first().person.name_parts()[1]) + r = self.client.post( + url, + { + "activedrafts": "on", + "by": "author", + "author": draft.documentauthor_set.first().person.name_parts()[1], + }, + ) self.assertEqual(r.status_code, 200) self.assertContains(r, draft.title) - + # find by group - r = self.client.get(base_url + "?activedrafts=on&by=group&group=%s" % draft.group.acronym) + r = self.client.post( + url, + {"activedrafts": "on", "by": "group", "group": draft.group.acronym}, + ) self.assertEqual(r.status_code, 200) self.assertContains(r, draft.title) - - r = self.client.get(base_url + "?activedrafts=on&by=group&group=%s" % draft.group.acronym.swapcase()) + + r = self.client.post( + url, + {"activedrafts": "on", "by": "group", "group": draft.group.acronym.swapcase()}, + ) self.assertEqual(r.status_code, 200) self.assertContains(r, draft.title) - + # find by area - r = self.client.get(base_url + "?activedrafts=on&by=area&area=%s" % draft.group.parent_id) + r = self.client.post( + url, + {"activedrafts": "on", "by": "area", "area": draft.group.parent_id}, + ) self.assertEqual(r.status_code, 200) self.assertContains(r, draft.title) - + # find by area - r = self.client.get(base_url + "?activedrafts=on&by=area&area=%s" % draft.group.parent_id) + r = self.client.post( + url, + {"activedrafts": "on", "by": "area", "area": draft.group.parent_id}, + ) self.assertEqual(r.status_code, 200) self.assertContains(r, draft.title) - + # find by AD - r = self.client.get(base_url + "?activedrafts=on&by=ad&ad=%s" % draft.ad_id) + r = self.client.post(url, {"activedrafts": "on", "by": "ad", "ad": draft.ad_id}) self.assertEqual(r.status_code, 200) self.assertContains(r, draft.title) - + # find by IESG state - r = self.client.get(base_url + "?activedrafts=on&by=state&state=%s&substate=" % draft.get_state("draft-iesg").pk) + r = self.client.post( + url, + { + "activedrafts": "on", + "by": "state", + "state": draft.get_state("draft-iesg").pk, + "substate": "", + }, + ) self.assertEqual(r.status_code, 200) self.assertContains(r, draft.title) @@ -169,15 +236,15 @@ class SearchTests(TestCase): rfc = WgRfcFactory() draft.set_state(State.objects.get(type="draft", slug="rfc")) draft.relateddocument_set.create(relationship_id="became_rfc", target=rfc) - base_url = urlreverse('ietf.doc.views_search.search') + url = urlreverse("ietf.doc.views_search.search") # find by RFC - r = self.client.get(base_url + f"?rfcs=on&name={rfc.name}") + r = self.client.post(url, {"rfcs": "on", "name": rfc.name}) self.assertEqual(r.status_code, 200) self.assertContains(r, rfc.title) # find by draft - r = self.client.get(base_url + f"?activedrafts=on&rfcs=on&name={draft.name}") + r = self.client.post(url, {"activedrafts": "on", "rfcs": "on", "name": draft.name}) self.assertEqual(r.status_code, 200) self.assertContains(r, rfc.title) diff --git a/ietf/doc/tests_js.py b/ietf/doc/tests_js.py index acd74c4a0..9a5aad13b 100644 --- a/ietf/doc/tests_js.py +++ b/ietf/doc/tests_js.py @@ -92,10 +92,8 @@ class EditAuthorsTests(IetfSeleniumTestCase): self.assertEqual(len(author_forms), 1) # get the "add author" button so we can add blank author forms - add_author_button = self.driver.find_element(By.ID, 'add-author-button') for index, auth in enumerate(authors): - self.scroll_to_element(add_author_button) # Can only click if it's in view! - add_author_button.click() # Create a new form. Automatically scrolls to it. + self.scroll_and_click((By.ID, 'add-author-button')) # Create new form. Automatically scrolls to it. author_forms = authors_list.find_elements(By.CLASS_NAME, 'author-panel') authors_added = index + 1 self.assertEqual(len(author_forms), authors_added + 1) # Started with 1 author, hence +1 diff --git a/ietf/doc/utils.py b/ietf/doc/utils.py index 97243a20d..9b2570d8b 100644 --- a/ietf/doc/utils.py +++ b/ietf/doc/utils.py @@ -3,9 +3,7 @@ import datetime -import hashlib import io -import json import math import os import re @@ -348,6 +346,7 @@ def augment_events_with_revision(doc, events): """Take a set of events for doc and add a .rev attribute with the revision they refer to by checking NewRevisionDocEvents.""" + # Need QuerySetAny instead of QuerySet until django-stubs 5.0.1 if isinstance(events, QuerySetAny): qs = events.filter(newrevisiondocevent__isnull=False) else: @@ -1047,12 +1046,8 @@ def make_rev_history(doc): return sorted(history, key=lambda x: x['published']) -def get_search_cache_key(params): - from ietf.doc.views_search import SearchForm - fields = set(SearchForm.base_fields) - set(['sort',]) - kwargs = dict([ (k,v) for (k,v) in list(params.items()) if k in fields ]) - key = "doc:document:search:" + hashlib.sha512(json.dumps(kwargs, sort_keys=True).encode('utf-8')).hexdigest() - return key +def get_search_cache_key(key_fragment): + return f"doc:document:search:{key_fragment}" def build_file_urls(doc: Union[Document, DocHistory]): diff --git a/ietf/doc/views_search.py b/ietf/doc/views_search.py index 4fa3b2560..7b71dd77b 100644 --- a/ietf/doc/views_search.py +++ b/ietf/doc/views_search.py @@ -37,6 +37,8 @@ import re import datetime import copy +import hashlib +import json import operator from collections import defaultdict @@ -44,16 +46,17 @@ from functools import reduce from django import forms from django.conf import settings +from django.contrib import messages from django.core.cache import cache, caches from django.urls import reverse as urlreverse from django.db.models import Q -from django.http import Http404, HttpResponseBadRequest, HttpResponse, HttpResponseRedirect, QueryDict +from django.http import Http404, HttpResponseBadRequest, HttpResponse, HttpResponseRedirect from django.shortcuts import render from django.utils import timezone from django.utils.html import strip_tags from django.utils.cache import _generate_cache_key # type: ignore from django.utils.text import slugify - +from django_stubs_ext import QuerySetAny import debug # pyflakes:ignore @@ -145,6 +148,29 @@ class SearchForm(forms.Form): q['irtfstate'] = None return q + def cache_key_fragment(self): + """Hash a bound form to get a value for use in a cache key + + Raises a ValueError if the form is not valid. + """ + def _serialize_value(val): + # Need QuerySetAny instead of QuerySet until django-stubs 5.0.1 + if isinstance(val, QuerySetAny): + return [item.pk for item in val] + else: + return getattr(val, "pk", val) # use pk if present, else value + + if not self.is_valid(): + raise ValueError(f"SearchForm invalid: {self.errors}") + contents = { + field_name: _serialize_value(field_value) + for field_name, field_value in self.cleaned_data.items() + if field_name != "sort" and field_value is not None + } + contents_json = json.dumps(contents, sort_keys=True) + return hashlib.sha512(contents_json.encode("utf-8")).hexdigest() + + def retrieve_search_results(form, all_types=False): """Takes a validated SearchForm and return the results.""" @@ -256,45 +282,64 @@ def retrieve_search_results(form, all_types=False): return docs + def search(request): - if request.GET: - # backwards compatibility - get_params = request.GET.copy() - if 'activeDrafts' in request.GET: - get_params['activedrafts'] = request.GET['activeDrafts'] - if 'oldDrafts' in request.GET: - get_params['olddrafts'] = request.GET['oldDrafts'] - if 'subState' in request.GET: - get_params['substate'] = request.GET['subState'] + """Search for a draft""" + # defaults for results / meta + results = [] + meta = {"by": None, "searching": False} - form = SearchForm(get_params) - if not form.is_valid(): - return HttpResponseBadRequest("form not valid: %s" % form.errors) - - cache_key = get_search_cache_key(get_params) - cached_val = cache.get(cache_key) - if cached_val: - [results, meta] = cached_val - else: - results = retrieve_search_results(form) - results, meta = prepare_document_table(request, results, get_params) - cache.set(cache_key, [results, meta]) # for settings.CACHE_MIDDLEWARE_SECONDS - log(f"Search results computed for {get_params}") - meta['searching'] = True + if request.method == "POST": + form = SearchForm(data=request.POST) + if form.is_valid(): + cache_key = get_search_cache_key(form.cache_key_fragment()) + cached_val = cache.get(cache_key) + if cached_val: + [results, meta] = cached_val + else: + results = retrieve_search_results(form) + results, meta = prepare_document_table( + request, results, form.cleaned_data + ) + cache.set( + cache_key, [results, meta] + ) # for settings.CACHE_MIDDLEWARE_SECONDS + log(f"Search results computed for {form.cleaned_data}") + meta["searching"] = True else: - form = SearchForm() - results = [] - meta = { 'by': None, 'searching': False } - get_params = QueryDict('') + if request.GET: + # backwards compatibility - fill in the form + get_params = request.GET.copy() + if "activeDrafts" in request.GET: + get_params["activedrafts"] = request.GET["activeDrafts"] + if "oldDrafts" in request.GET: + get_params["olddrafts"] = request.GET["oldDrafts"] + if "subState" in request.GET: + get_params["substate"] = request.GET["subState"] + form = SearchForm(data=get_params) + messages.error( + request, + ( + "Searching via the URL query string is no longer supported. " + "The form below has been filled in with the parameters from your request. " + 'To execute your search, please click "Search".' + ), + ) + else: + form = SearchForm() - return render(request, 'doc/search/search.html', { - 'form':form, 'docs':results, 'meta':meta, 'queryargs':get_params.urlencode() }, + return render( + request, + "doc/search/search.html", + context={"form": form, "docs": results, "meta": meta}, ) + def frontpage(request): form = SearchForm() return render(request, 'doc/frontpage.html', {'form':form}) + def search_for_name(request, name): def find_unique(n): exact = Document.objects.filter(name__iexact=n).first() diff --git a/ietf/liaisons/forms.py b/ietf/liaisons/forms.py index 1d91041b2..a75028bf7 100644 --- a/ietf/liaisons/forms.py +++ b/ietf/liaisons/forms.py @@ -203,6 +203,7 @@ class SearchLiaisonForm(forms.Form): class CustomModelMultipleChoiceField(ModelMultipleChoiceField): '''If value is a QuerySet, return it as is (for use in widget.render)''' def prepare_value(self, value): + # Need QuerySetAny instead of QuerySet until django-stubs 5.0.1 if isinstance(value, QuerySetAny): return value if (hasattr(value, '__iter__') and diff --git a/ietf/liaisons/widgets.py b/ietf/liaisons/widgets.py index 74368e83f..3d4f2d13a 100644 --- a/ietf/liaisons/widgets.py +++ b/ietf/liaisons/widgets.py @@ -35,6 +35,7 @@ class ShowAttachmentsWidget(Widget): html = '
' % name html += 'No files attached' html += '
' + # Need QuerySetAny instead of QuerySet until django-stubs 5.0.1 if value and isinstance(value, QuerySetAny): for attachment in value: html += '%s ' % (conditional_escape(attachment.document.get_href()), conditional_escape(attachment.document.title)) diff --git a/ietf/templates/doc/search/search_form.html b/ietf/templates/doc/search/search_form.html index d4f463ec6..6c91894c8 100644 --- a/ietf/templates/doc/search/search_form.html +++ b/ietf/templates/doc/search/search_form.html @@ -4,8 +4,10 @@ {% load widget_tweaks %} {% load ietf_filters %}
+ {% csrf_token %}
{{ form.name|add_class:"form-control"|attr:"placeholder:Document name/title/RFC number"|attr:"aria-label:Document name/title/RFC number" }} From c89646ef8d00d23cd9914f457ee5114900307a3e Mon Sep 17 00:00:00 2001 From: Matthew Holloway Date: Thu, 14 Nov 2024 22:59:05 +0000 Subject: [PATCH 15/32] feat: Agenda proposed badge (#8164) --- client/agenda/AgendaScheduleList.vue | 28 ++++++++++++++++++++++++++-- ietf/meeting/views.py | 1 + 2 files changed, 27 insertions(+), 2 deletions(-) diff --git a/client/agenda/AgendaScheduleList.vue b/client/agenda/AgendaScheduleList.vue index 286999257..0cac7e39e 100644 --- a/client/agenda/AgendaScheduleList.vue +++ b/client/agenda/AgendaScheduleList.vue @@ -83,6 +83,14 @@ template(#trigger) span.badge.is-bof BoF span #[a(href='https://www.ietf.org/how/bofs/', target='_blank') Birds of a Feather] sessions (BoFs) are initial discussions about a particular topic of interest to the IETF community. + n-popover( + v-if='item.isProposed' + trigger='hover' + :width='250' + ) + template(#trigger) + span.badge.is-proposed Proposed + span #[a(href='https://www.ietf.org/process/wgs/', target='_blank') Proposed WGs] are groups in the process of being chartered. If the charter is not approved by the IESG before the IETF meeting, the session may be canceled. .agenda-table-note(v-if='item.note') i.bi.bi-arrow-return-right.me-1 span {{item.note}} @@ -468,6 +476,7 @@ const meetingEvents = computed(() => { // groupParentName: item.groupParent?.name, icon, isBoF: item.isBoF, + isProposed: item.isProposed, isSessionEvent: item.type === 'regular', links, location: item.location, @@ -1012,10 +1021,25 @@ onBeforeUnmount(() => { word-wrap: break-word; } - .badge.is-bof { - background-color: $teal-500; + .badge { margin: 0 8px; + &.is-bof { + background-color: $teal-500; + + @at-root .theme-dark & { + background-color: $teal-700; + } + } + + &.is-proposed { + background-color: $gray-500; + + @at-root .theme-dark & { + background-color: $gray-700; + } + } + @media screen and (max-width: $bs5-break-md) { width: 30px; display: block; diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index 628125776..a195e74ce 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -1786,6 +1786,7 @@ def agenda_extract_schedule (item): "type": item.session.type.slug, "purpose": item.session.purpose.slug, "isBoF": item.session.group_at_the_time().state_id == "bof", + "isProposed": item.session.group_at_the_time().state_id == "proposed", "filterKeywords": item.filter_keywords, "groupAcronym": item.session.group_at_the_time().acronym, "groupName": item.session.group_at_the_time().name, From 8bc51e3cc2db645033974b2805e268a18b098ab8 Mon Sep 17 00:00:00 2001 From: Matthew Holloway Date: Fri, 15 Nov 2024 18:16:34 +0000 Subject: [PATCH 16/32] feat: important dates page: autoid-n to IETFn (#8175) Co-authored-by: Robert Sparks --- ietf/templates/meeting/important-dates.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ietf/templates/meeting/important-dates.html b/ietf/templates/meeting/important-dates.html index 1d41b4a7f..568276f56 100644 --- a/ietf/templates/meeting/important-dates.html +++ b/ietf/templates/meeting/important-dates.html @@ -19,7 +19,7 @@ {% for meeting in meetings %} {% if meeting.show_important_dates %} -

+

IETF {{ meeting.number }}
{{ meeting.date }}, {{ meeting.city }}, {{ meeting.country }} From b07d4dbebc34e6b9abc589899a9290b02791e8aa Mon Sep 17 00:00:00 2001 From: Ryan Cross Date: Fri, 15 Nov 2024 18:18:09 +0000 Subject: [PATCH 17/32] feat: add group leadership list (#8135) * feat: add Group Leadership list * fix: only offer export to staff * fix: fix export button conditional * fix: improve tests. black format --------- Co-authored-by: Robert Sparks --- ietf/group/tests.py | 47 ++++++++++++++++++++++ ietf/group/urls.py | 4 +- ietf/group/views.py | 43 ++++++++++++++++++++ ietf/templates/group/group_leadership.html | 34 ++++++++++++++++ 4 files changed, 127 insertions(+), 1 deletion(-) create mode 100644 ietf/templates/group/group_leadership.html diff --git a/ietf/group/tests.py b/ietf/group/tests.py index 130c68b3f..31f8cc45b 100644 --- a/ietf/group/tests.py +++ b/ietf/group/tests.py @@ -65,6 +65,53 @@ class StreamTests(TestCase): self.assertTrue(Role.objects.filter(name="delegate", group__acronym=stream_acronym, email__address="ad2@ietf.org")) +class GroupLeadershipTests(TestCase): + def test_leadership_wg(self): + # setup various group states + bof_role = RoleFactory( + group__type_id="wg", group__state_id="bof", name_id="chair" + ) + proposed_role = RoleFactory( + group__type_id="wg", group__state_id="proposed", name_id="chair" + ) + active_role = RoleFactory( + group__type_id="wg", group__state_id="active", name_id="chair" + ) + conclude_role = RoleFactory( + group__type_id="wg", group__state_id="conclude", name_id="chair" + ) + url = urlreverse( + "ietf.group.views.group_leadership", kwargs={"group_type": "wg"} + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertContains(r, "Group Leadership") + self.assertContains(r, bof_role.person.last_name()) + self.assertContains(r, proposed_role.person.last_name()) + self.assertContains(r, active_role.person.last_name()) + self.assertNotContains(r, conclude_role.person.last_name()) + + def test_leadership_wg_csv(self): + url = urlreverse( + "ietf.group.views.group_leadership_csv", kwargs={"group_type": "wg"} + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertEqual(r["Content-Type"], "text/csv") + self.assertContains(r, "Chairman, Sops") + + def test_leadership_rg(self): + role = RoleFactory(group__type_id="rg", name_id="chair") + url = urlreverse( + "ietf.group.views.group_leadership", kwargs={"group_type": "rg"} + ) + r = self.client.get(url) + self.assertEqual(r.status_code, 200) + self.assertContains(r, "Group Leadership") + self.assertContains(r, role.person.last_name()) + self.assertNotContains(r, "Chairman, Sops") + + class GroupStatsTests(TestCase): def setUp(self): super().setUp() diff --git a/ietf/group/urls.py b/ietf/group/urls.py index b2af8d9e2..1824564c4 100644 --- a/ietf/group/urls.py +++ b/ietf/group/urls.py @@ -57,7 +57,9 @@ info_detail_urls = [ group_urls = [ - url(r'^$', views.active_groups), + url(r'^$', views.active_groups), + url(r'^leadership/(?P(wg|rg))/$', views.group_leadership), + url(r'^leadership/(?P(wg|rg))/csv/$', views.group_leadership_csv), url(r'^groupstats.json', views.group_stats_data, None, 'ietf.group.views.group_stats_data'), url(r'^groupmenu.json', views.group_menu_data, None, 'ietf.group.views.group_menu_data'), url(r'^chartering/$', views.chartering_groups), diff --git a/ietf/group/views.py b/ietf/group/views.py index 71986384e..f30569d23 100644 --- a/ietf/group/views.py +++ b/ietf/group/views.py @@ -35,6 +35,7 @@ import copy +import csv import datetime import itertools import math @@ -437,6 +438,48 @@ def prepare_group_documents(request, group, clist): return docs, meta, docs_related, meta_related + +def get_leadership(group_type): + people = Person.objects.filter( + role__name__slug="chair", + role__group__type=group_type, + role__group__state__slug__in=("active", "bof", "proposed"), + ).distinct() + leaders = [] + for person in people: + parts = person.name_parts() + groups = [ + r.group.acronym + for r in person.role_set.filter( + name__slug="chair", + group__type=group_type, + group__state__slug__in=("active", "bof", "proposed"), + ) + ] + entry = {"name": "%s, %s" % (parts[3], parts[1]), "groups": ", ".join(groups)} + leaders.append(entry) + return sorted(leaders, key=lambda a: a["name"]) + + +def group_leadership(request, group_type=None): + context = {} + context["leaders"] = get_leadership(group_type) + context["group_type"] = group_type + return render(request, "group/group_leadership.html", context) + + +def group_leadership_csv(request, group_type=None): + leaders = get_leadership(group_type) + response = HttpResponse(content_type="text/csv") + response["Content-Disposition"] = ( + f'attachment; filename="group_leadership_{group_type}.csv"' + ) + writer = csv.writer(response, dialect=csv.excel, delimiter=str(",")) + writer.writerow(["Name", "Groups"]) + for leader in leaders: + writer.writerow([leader["name"], leader["groups"]]) + return response + def group_home(request, acronym, group_type=None): group = get_group_or_404(acronym, group_type) kwargs = dict(acronym=group.acronym) diff --git a/ietf/templates/group/group_leadership.html b/ietf/templates/group/group_leadership.html new file mode 100644 index 000000000..644be3e15 --- /dev/null +++ b/ietf/templates/group/group_leadership.html @@ -0,0 +1,34 @@ +{% extends "base.html" %} +{# Copyright The IETF Trust 2024, All Rights Reserved #} +{% load origin static person_filters ietf_filters %} +{% block pagehead %} + +{% endblock %} +{% block title %}Group Leadership{% endblock %} +{% block content %} + {% origin %} +

Group Leadership ({{ group_type }})

+ {% if user|has_role:"Secretariat" %} + + {% endif %} + + + + + + + + + {% for leader in leaders %} + + + + + {% endfor %} + +
LeaderGroups
{{ leader.name }}{{ leader.groups }}
+{% endblock %} From 6608c9d530b62d10c88d637f949a00fb5fea4526 Mon Sep 17 00:00:00 2001 From: Jennifer Richards Date: Mon, 18 Nov 2024 10:53:50 -0400 Subject: [PATCH 18/32] refactor: eliminate single-use helper (#8226) --- ietf/doc/utils.py | 4 ---- ietf/doc/views_search.py | 4 ++-- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/ietf/doc/utils.py b/ietf/doc/utils.py index 9b2570d8b..a30430829 100644 --- a/ietf/doc/utils.py +++ b/ietf/doc/utils.py @@ -1046,10 +1046,6 @@ def make_rev_history(doc): return sorted(history, key=lambda x: x['published']) -def get_search_cache_key(key_fragment): - return f"doc:document:search:{key_fragment}" - - def build_file_urls(doc: Union[Document, DocHistory]): if doc.type_id == "rfc": base_path = os.path.join(settings.RFC_PATH, doc.name + ".") diff --git a/ietf/doc/views_search.py b/ietf/doc/views_search.py index 7b71dd77b..0f1937efb 100644 --- a/ietf/doc/views_search.py +++ b/ietf/doc/views_search.py @@ -65,7 +65,7 @@ from ietf.doc.models import ( Document, DocHistory, State, IESG_BALLOT_ACTIVE_STATES, IESG_STATCHG_CONFLREV_ACTIVE_STATES, IESG_CHARTER_ACTIVE_STATES ) from ietf.doc.fields import select2_id_doc_name_json -from ietf.doc.utils import get_search_cache_key, augment_events_with_revision, needed_ballot_positions +from ietf.doc.utils import augment_events_with_revision, needed_ballot_positions from ietf.group.models import Group from ietf.idindex.index import active_drafts_index_by_group from ietf.name.models import DocTagName, DocTypeName, StreamName @@ -292,7 +292,7 @@ def search(request): if request.method == "POST": form = SearchForm(data=request.POST) if form.is_valid(): - cache_key = get_search_cache_key(form.cache_key_fragment()) + cache_key = f"doc:document:search:{form.cache_key_fragment()}" cached_val = cache.get(cache_key) if cached_val: [results, meta] = cached_val From 48f339194c69d8d62736fbb3d1f54316a4b2f9ff Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Mon, 18 Nov 2024 10:41:40 -0600 Subject: [PATCH 19/32] chore: remove unused merge-person-records (#8227) --- ietf/bin/merge-person-records | 65 ----------------------------------- 1 file changed, 65 deletions(-) delete mode 100755 ietf/bin/merge-person-records diff --git a/ietf/bin/merge-person-records b/ietf/bin/merge-person-records deleted file mode 100755 index 155e5755f..000000000 --- a/ietf/bin/merge-person-records +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -*- Python -*- -# -''' -This script merges two Person records into one. It determines which record is the target -based on most current User record (last_login) unless -f (force) option is used to -force SOURCE TARGET as specified on the command line. The order of operations is -important. We must complete all source.save() operations before moving the aliases to -the target, this is to avoid extra "Possible duplicate Person" emails going out, if the -Person is saved without an alias the Person.save() creates another one, which then -conflicts with the moved one. -''' - -# Set PYTHONPATH and load environment variables for standalone script ----------------- -import os, sys -basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) -sys.path = [ basedir ] + sys.path -os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings" - -virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py") -if os.path.exists(virtualenv_activation): - execfile(virtualenv_activation, dict(__file__=virtualenv_activation)) - -import django -django.setup() -# ------------------------------------------------------------------------------------- - -import argparse -from django.contrib import admin -from ietf.person.models import Person -from ietf.person.utils import (merge_persons, send_merge_notification, handle_users, - determine_merge_order) - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument("source_id",type=int) - parser.add_argument("target_id",type=int) - parser.add_argument('-f','--force', help='force merge order',action='store_true') - parser.add_argument('-v','--verbose', help='verbose output',action='store_true') - args = parser.parse_args() - - source = Person.objects.get(pk=args.source_id) - target = Person.objects.get(pk=args.target_id) - - # set merge order - if not args.force: - source,target = determine_merge_order(source,target) - - # confirm - print "Merging person {}({}) to {}({})".format(source.ascii,source.pk,target.ascii,target.pk) - print handle_users(source,target,check_only=True) - response = raw_input('Ok to continue y/n? ') - if response.lower() != 'y': - sys.exit() - - # perform merge - success, changes = merge_persons(source, target, verbose=args.verbose) - - # send email notification - send_merge_notification(target,changes) - -if __name__ == "__main__": - main() From 1d7be8c98ce6f6fe420440d01bbe70a843253e6d Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Mon, 18 Nov 2024 12:48:47 -0600 Subject: [PATCH 20/32] chore: remove dead activate_this branches (#8228) --- ietf/bin/expire-ids | 4 ---- ietf/bin/iana-review-email | 4 ---- ietf/bin/notify-expirations | 4 ---- 3 files changed, 12 deletions(-) diff --git a/ietf/bin/expire-ids b/ietf/bin/expire-ids index 98ee8d75f..bb0b94ee6 100755 --- a/ietf/bin/expire-ids +++ b/ietf/bin/expire-ids @@ -13,10 +13,6 @@ basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) sys.path = [ basedir ] + sys.path os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings" -virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py") -if os.path.exists(virtualenv_activation): - execfile(virtualenv_activation, dict(__file__=virtualenv_activation)) - syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER) import django diff --git a/ietf/bin/iana-review-email b/ietf/bin/iana-review-email index 5c7a7183b..27aee4015 100755 --- a/ietf/bin/iana-review-email +++ b/ietf/bin/iana-review-email @@ -8,10 +8,6 @@ basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) sys.path = [ basedir ] + sys.path os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings" -virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py") -if os.path.exists(virtualenv_activation): - execfile(virtualenv_activation, dict(__file__=virtualenv_activation)) - syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER) import django diff --git a/ietf/bin/notify-expirations b/ietf/bin/notify-expirations index 0270c1376..fc2fd86a3 100755 --- a/ietf/bin/notify-expirations +++ b/ietf/bin/notify-expirations @@ -7,10 +7,6 @@ basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) sys.path = [ basedir ] + sys.path os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings" -virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py") -if os.path.exists(virtualenv_activation): - execfile(virtualenv_activation, dict(__file__=virtualenv_activation)) - import django django.setup() From fd816c4f415accd6ebb693b7e0033a2d8cf99487 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Mon, 18 Nov 2024 13:57:33 -0600 Subject: [PATCH 21/32] chore: remove unused activate_this code branch (#8230) --- ietf/bin/expire-submissions | 4 ---- 1 file changed, 4 deletions(-) diff --git a/ietf/bin/expire-submissions b/ietf/bin/expire-submissions index 22db38322..113a53ddf 100755 --- a/ietf/bin/expire-submissions +++ b/ietf/bin/expire-submissions @@ -8,10 +8,6 @@ basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) sys.path = [ basedir ] + sys.path os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings" -virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py") -if os.path.exists(virtualenv_activation): - execfile(virtualenv_activation, dict(__file__=virtualenv_activation)) - syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER) import django From 952bc90ee0e3a6a4f773dd1a6125dd7da9f97979 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Tue, 19 Nov 2024 08:07:49 -0600 Subject: [PATCH 22/32] fix: follow replaces when building list for diff control (#8234) --- ietf/doc/tests.py | 14 ++++++++++++++ ietf/doc/views_doc.py | 8 ++++---- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/ietf/doc/tests.py b/ietf/doc/tests.py index fa655cb88..abac10a5e 100644 --- a/ietf/doc/tests.py +++ b/ietf/doc/tests.py @@ -5,6 +5,7 @@ import os import datetime import io +from django.http import HttpRequest import lxml import bibtexparser import mock @@ -52,6 +53,7 @@ from ietf.doc.utils import ( generate_idnits2_rfcs_obsoleted, get_doc_email_aliases, ) +from ietf.doc.views_doc import get_diff_revisions from ietf.group.models import Group, Role from ietf.group.factories import GroupFactory, RoleFactory from ietf.ipr.factories import HolderIprDisclosureFactory @@ -1954,6 +1956,18 @@ class DocTestCase(TestCase): self.assertContains(r, notes.text) self.assertContains(r, rfced_note.text) + def test_diff_revisions(self): + ind_doc = IndividualDraftFactory(create_revisions=range(2)) + wg_doc = WgDraftFactory( + relations=[("replaces", ind_doc)], create_revisions=range(2) + ) + diff_revisions = get_diff_revisions(HttpRequest(), wg_doc.name, wg_doc) + self.assertEqual(len(diff_revisions), 4) + self.assertEqual( + [t[3] for t in diff_revisions], + [f"{n}-{v:02d}" for n in [wg_doc.name, ind_doc.name] for v in [1, 0]], + ) + def test_history(self): doc = IndividualDraftFactory() diff --git a/ietf/doc/views_doc.py b/ietf/doc/views_doc.py index 50c60aefc..9f7cf12bc 100644 --- a/ietf/doc/views_doc.py +++ b/ietf/doc/views_doc.py @@ -1133,10 +1133,10 @@ def get_diff_revisions(request, name, doc): diff_documents = [doc] diff_documents.extend( - Document.objects.filter( - relateddocument__source=doc, - relateddocument__relationship="replaces", - ) + [ + r.target + for r in RelatedDocument.objects.filter(source=doc, relationship="replaces") + ] ) if doc.came_from_draft(): diff_documents.append(doc.came_from_draft()) From 71f52bc964b2bd1e19116f1ed601871d4666a280 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Tue, 19 Nov 2024 15:01:20 -0500 Subject: [PATCH 23/32] fix: move future meetings button in group meetings page to the left (#8239) --- ietf/templates/group/meetings.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ietf/templates/group/meetings.html b/ietf/templates/group/meetings.html index 8acc688cc..deaea1e67 100644 --- a/ietf/templates/group/meetings.html +++ b/ietf/templates/group/meetings.html @@ -40,7 +40,7 @@ {% if future %}

Future Meetings - @@ -174,4 +174,4 @@ ietf_timezone.initialize('local'); }); -{% endblock %} \ No newline at end of file +{% endblock %} From bdf503a73ba42ea1fe514ecfd8c00ad12638b120 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Tue, 19 Nov 2024 15:06:56 -0600 Subject: [PATCH 24/32] fix: show meetecho recordings for groups with more than one session (#8238) --- ietf/meeting/views.py | 1 + ietf/templates/meeting/group_proceedings.html | 13 +++++++------ 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/ietf/meeting/views.py b/ietf/meeting/views.py index a195e74ce..f386f8932 100644 --- a/ietf/meeting/views.py +++ b/ietf/meeting/views.py @@ -4105,6 +4105,7 @@ def organize_proceedings_sessions(sessions): 'minutes': _format_materials((s, s.minutes()) for s in ss), 'bluesheets': _format_materials((s, s.bluesheets()) for s in ss), 'recordings': _format_materials((s, s.recordings()) for s in ss), + 'meetecho_recordings': _format_materials((s, [s.session_recording_url()]) for s in ss), 'chatlogs': _format_materials((s, s.chatlogs()) for s in ss), 'slides': _format_materials((s, s.slides()) for s in ss), 'drafts': _format_materials((s, s.drafts()) for s in ss), diff --git a/ietf/templates/meeting/group_proceedings.html b/ietf/templates/meeting/group_proceedings.html index 95d6dc5da..496fa9226 100644 --- a/ietf/templates/meeting/group_proceedings.html +++ b/ietf/templates/meeting/group_proceedings.html @@ -88,12 +88,13 @@
{% endfor %} - {% if entry.session.video_stream_url %} - - Session recording - -
- {% endif %} + {% for rec in entry.meetecho_recordings %} + + Session recording + {% if rec.time %}{{ rec.time|date:"D G:i"}}{% endif %} + +
+ {% endfor%} {# slides #} From 70b8c856774c6916550455dab20ddd8162b42b18 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Tue, 19 Nov 2024 16:13:54 -0500 Subject: [PATCH 25/32] ci: Update tests.yml workflow --- .github/workflows/tests.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index bb767513c..470811987 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -73,9 +73,10 @@ jobs: path: geckodriver.log - name: Upload Coverage Results to Codecov - uses: codecov/codecov-action@v4.6.0 + uses: codecov/codecov-action@v5 with: files: coverage.xml + token: ${{ secrets.CODECOV_TOKEN }} - name: Convert Coverage Results if: ${{ always() }} From 0027db42ebf4a86487a9a4b61c8be7f5dfa8faa8 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Tue, 19 Nov 2024 16:15:38 -0500 Subject: [PATCH 26/32] ci: Update tests.yml workflow --- .github/workflows/tests.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 470811987..8eb6adc95 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -75,6 +75,7 @@ jobs: - name: Upload Coverage Results to Codecov uses: codecov/codecov-action@v5 with: + disable_search: true files: coverage.xml token: ${{ secrets.CODECOV_TOKEN }} From 15b95da9a11abc52ea9c51fa8e5c5f4767b10b1d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Nov 2024 16:24:03 -0500 Subject: [PATCH 27/32] chore(deps): bump appleboy/ssh-action from 1.1.0 to 1.2.0 (#8221) Bumps [appleboy/ssh-action](https://github.com/appleboy/ssh-action) from 1.1.0 to 1.2.0. - [Release notes](https://github.com/appleboy/ssh-action/releases) - [Changelog](https://github.com/appleboy/ssh-action/blob/master/.goreleaser.yaml) - [Commits](https://github.com/appleboy/ssh-action/compare/25ce8cbbcb08177468c7ff7ec5cbfa236f9341e1...7eaf76671a0d7eec5d98ee897acda4f968735a17) --- updated-dependencies: - dependency-name: appleboy/ssh-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/tests-az.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests-az.yml b/.github/workflows/tests-az.yml index 3f828430a..6d53a121a 100644 --- a/.github/workflows/tests-az.yml +++ b/.github/workflows/tests-az.yml @@ -38,7 +38,7 @@ jobs: ssh-keyscan -t rsa $vminfo >> ~/.ssh/known_hosts - name: Remote SSH into VM - uses: appleboy/ssh-action@25ce8cbbcb08177468c7ff7ec5cbfa236f9341e1 + uses: appleboy/ssh-action@7eaf76671a0d7eec5d98ee897acda4f968735a17 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: From c3e0d28cad7dc8d7974665880f47bcb320f3a18c Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Tue, 19 Nov 2024 20:31:02 -0500 Subject: [PATCH 28/32] ci: Update build.yml workflow --- .github/workflows/build.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 67f24ba76..0dd605860 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -137,6 +137,7 @@ jobs: uses: ./.github/workflows/tests.yml if: ${{ github.event.inputs.skiptests == 'false' || github.ref_name == 'release' }} needs: [prepare] + secrets: inherit with: ignoreLowerCoverage: ${{ github.event.inputs.ignoreLowerCoverage == 'true' }} skipSelenium: true From 51a76c603d193f4b09b76e6d19b5bc2b64125690 Mon Sep 17 00:00:00 2001 From: Robert Sparks Date: Wed, 20 Nov 2024 11:40:23 -0600 Subject: [PATCH 29/32] fix: removew assumption about what pks tests milestones get (#8243) --- ietf/group/tests_info.py | 61 +++++++++++++++++++++------------------- 1 file changed, 32 insertions(+), 29 deletions(-) diff --git a/ietf/group/tests_info.py b/ietf/group/tests_info.py index 95fe9aa2e..35c8b2b0b 100644 --- a/ietf/group/tests_info.py +++ b/ietf/group/tests_info.py @@ -1431,7 +1431,7 @@ class MilestoneTests(TestCase): RoleFactory(group=group,name_id='chair',person=PersonFactory(user__username='marschairman')) draft = WgDraftFactory(group=group) - m1 = GroupMilestone.objects.create(id=1, + m1 = GroupMilestone.objects.create( group=group, desc="Test 1", due=date_today(DEADLINE_TZINFO), @@ -1439,7 +1439,7 @@ class MilestoneTests(TestCase): state_id="active") m1.docs.set([draft]) - m2 = GroupMilestone.objects.create(id=2, + m2 = GroupMilestone.objects.create( group=group, desc="Test 2", due=date_today(DEADLINE_TZINFO), @@ -1580,13 +1580,14 @@ class MilestoneTests(TestCase): events_before = group.groupevent_set.count() # add - r = self.client.post(url, { 'prefix': "m1", - 'm1-id': m1.id, - 'm1-desc': m1.desc, - 'm1-due': m1.due.strftime("%B %Y"), - 'm1-resolved': m1.resolved, - 'm1-docs': pklist(m1.docs), - 'm1-review': "accept", + mstr = f"m{m1.id}" + r = self.client.post(url, { 'prefix': mstr, + f'{mstr}-id': m1.id, + f'{mstr}-desc': m1.desc, + f'{mstr}-due': m1.due.strftime("%B %Y"), + f'{mstr}-resolved': m1.resolved, + f'{mstr}-docs': pklist(m1.docs), + f'{mstr}-review': "accept", 'action': "save", }) self.assertEqual(r.status_code, 302) @@ -1606,13 +1607,14 @@ class MilestoneTests(TestCase): events_before = group.groupevent_set.count() # delete - r = self.client.post(url, { 'prefix': "m1", - 'm1-id': m1.id, - 'm1-desc': m1.desc, - 'm1-due': m1.due.strftime("%B %Y"), - 'm1-resolved': "", - 'm1-docs': pklist(m1.docs), - 'm1-delete': "checked", + mstr = f"m{m1.id}" + r = self.client.post(url, { 'prefix': mstr, + f'{mstr}-id': m1.id, + f'{mstr}-desc': m1.desc, + f'{mstr}-due': m1.due.strftime("%B %Y"), + f'{mstr}-resolved': "", + f'{mstr}-docs': pklist(m1.docs), + f'{mstr}-delete': "checked", 'action': "save", }) self.assertEqual(r.status_code, 302) @@ -1635,13 +1637,14 @@ class MilestoneTests(TestCase): due = self.last_day_of_month(date_today(DEADLINE_TZINFO) + datetime.timedelta(days=365)) + mstr = f"m{m1.id}" # faulty post - r = self.client.post(url, { 'prefix': "m1", - 'm1-id': m1.id, - 'm1-desc': "", # no description - 'm1-due': due.strftime("%B %Y"), - 'm1-resolved': "", - 'm1-docs': doc_pks, + r = self.client.post(url, { 'prefix': mstr, + f'{mstr}-id': m1.id, + f'{mstr}-desc': "", # no description + f'{mstr}-due': due.strftime("%B %Y"), + f'{mstr}-resolved': "", + f'{mstr}-docs': doc_pks, 'action': "save", }) self.assertEqual(r.status_code, 200) @@ -1653,13 +1656,13 @@ class MilestoneTests(TestCase): # edit mailbox_before = len(outbox) - r = self.client.post(url, { 'prefix': "m1", - 'm1-id': m1.id, - 'm1-desc': "Test 2 - changed", - 'm1-due': due.strftime("%B %Y"), - 'm1-resolved': "Done", - 'm1-resolved_checkbox': "checked", - 'm1-docs': doc_pks, + r = self.client.post(url, { 'prefix': mstr, + f'{mstr}-id': m1.id, + f'{mstr}-desc': "Test 2 - changed", + f'{mstr}-due': due.strftime("%B %Y"), + f'{mstr}-resolved': "Done", + f'{mstr}-resolved_checkbox': "checked", + f'{mstr}-docs': doc_pks, 'action': "save", }) self.assertEqual(r.status_code, 302) From 7c025c9f2bbff6c229ec7b7befcdd2534bb84900 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Nov 2024 12:59:39 -0500 Subject: [PATCH 30/32] chore(deps): bump nanoid in /dev/deploy-to-container in the npm group (#8098) Bumps the npm group in /dev/deploy-to-container with 1 update: [nanoid](https://github.com/ai/nanoid). Updates `nanoid` from 5.0.7 to 5.0.8 - [Release notes](https://github.com/ai/nanoid/releases) - [Changelog](https://github.com/ai/nanoid/blob/main/CHANGELOG.md) - [Commits](https://github.com/ai/nanoid/compare/5.0.7...5.0.8) --- updated-dependencies: - dependency-name: nanoid dependency-type: direct:production update-type: version-update:semver-patch dependency-group: npm ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- dev/deploy-to-container/package-lock.json | 14 +++++++------- dev/deploy-to-container/package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/dev/deploy-to-container/package-lock.json b/dev/deploy-to-container/package-lock.json index f636a45f0..d787f6aa9 100644 --- a/dev/deploy-to-container/package-lock.json +++ b/dev/deploy-to-container/package-lock.json @@ -8,7 +8,7 @@ "dependencies": { "dockerode": "^4.0.2", "fs-extra": "^11.2.0", - "nanoid": "5.0.7", + "nanoid": "5.0.8", "nanoid-dictionary": "5.0.0-beta.1", "slugify": "1.6.6", "tar": "^7.4.3", @@ -546,9 +546,9 @@ "optional": true }, "node_modules/nanoid": { - "version": "5.0.7", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.7.tgz", - "integrity": "sha512-oLxFY2gd2IqnjcYyOXD8XGCftpGtZP2AbHbOkthDkvRywH5ayNtPVy9YlOPcHckXzbLTCHpkb7FB+yuxKV13pQ==", + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.8.tgz", + "integrity": "sha512-TcJPw+9RV9dibz1hHUzlLVy8N4X9TnwirAjrU08Juo6BNKggzVfP2ZJ/3ZUSq15Xl5i85i+Z89XBO90pB2PghQ==", "funding": [ { "type": "github", @@ -1346,9 +1346,9 @@ "optional": true }, "nanoid": { - "version": "5.0.7", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.7.tgz", - "integrity": "sha512-oLxFY2gd2IqnjcYyOXD8XGCftpGtZP2AbHbOkthDkvRywH5ayNtPVy9YlOPcHckXzbLTCHpkb7FB+yuxKV13pQ==" + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.8.tgz", + "integrity": "sha512-TcJPw+9RV9dibz1hHUzlLVy8N4X9TnwirAjrU08Juo6BNKggzVfP2ZJ/3ZUSq15Xl5i85i+Z89XBO90pB2PghQ==" }, "nanoid-dictionary": { "version": "5.0.0-beta.1", diff --git a/dev/deploy-to-container/package.json b/dev/deploy-to-container/package.json index be77fa5cc..530d1f3b8 100644 --- a/dev/deploy-to-container/package.json +++ b/dev/deploy-to-container/package.json @@ -4,7 +4,7 @@ "dependencies": { "dockerode": "^4.0.2", "fs-extra": "^11.2.0", - "nanoid": "5.0.7", + "nanoid": "5.0.8", "nanoid-dictionary": "5.0.0-beta.1", "slugify": "1.6.6", "tar": "^7.4.3", From 3c42d98c5d759045fbb0535fd187969600eb4b22 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Wed, 20 Nov 2024 13:09:28 -0500 Subject: [PATCH 31/32] ci: Update build.yml workflow --- .github/workflows/build.yml | 60 ++++++++++++++----------------------- 1 file changed, 22 insertions(+), 38 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 0dd605860..f1044223d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -350,50 +350,34 @@ jobs: steps: - name: Notify on Slack (Success) if: ${{ !contains(join(needs.*.result, ','), 'failure') }} - uses: slackapi/slack-github-action@v1.27.0 + uses: slackapi/slack-github-action@v2 with: - channel-id: ${{ secrets.SLACK_GH_BUILDS_CHANNEL_ID }} + token: ${{ secrets.SLACK_GH_BOT }} + method: chat.postMessage payload: | - { - "text": "Datatracker Build by ${{ github.triggering_actor }} - <@${{ secrets.SLACK_UID_RJSPARKS }}>", - "attachments": [ - { - "color": "28a745", - "fields": [ - { - "title": "Status", - "short": true, - "value": "Completed" - } - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_GH_BOT }} + channel: ${{ secrets.SLACK_GH_BUILDS_CHANNEL_ID }} + text: "Datatracker Build by ${{ github.triggering_actor }}" + attachments: + - color: "28a745" + fields: + - title: "Status" + short: true + value: "Completed" - name: Notify on Slack (Failure) if: ${{ contains(join(needs.*.result, ','), 'failure') }} - uses: slackapi/slack-github-action@v1.27.0 + uses: slackapi/slack-github-action@v2 with: - channel-id: ${{ secrets.SLACK_GH_BUILDS_CHANNEL_ID }} + token: ${{ secrets.SLACK_GH_BOT }} + method: chat.postMessage payload: | - { - "text": "Datatracker Build by ${{ github.triggering_actor }} - <@${{ secrets.SLACK_UID_RJSPARKS }}>", - "attachments": [ - { - "color": "a82929", - "fields": [ - { - "title": "Status", - "short": true, - "value": "Failed" - } - ] - } - ] - } - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_GH_BOT }} + channel: ${{ secrets.SLACK_GH_BUILDS_CHANNEL_ID }} + text: "Datatracker Build by ${{ github.triggering_actor }}" + attachments: + - color: "a82929" + fields: + - title: "Status" + short: true + value: "Failed" # ----------------------------------------------------------------- # SANDBOX From bdf753e05250129475c3310fa60cc31240028932 Mon Sep 17 00:00:00 2001 From: Nicolas Giard Date: Wed, 20 Nov 2024 17:48:00 -0500 Subject: [PATCH 32/32] chore: set gitattributes for normalizing line endings (#8245) * chore: add proper .gitattributes * chore: normalize file endings * chore: fix .gitattributes * chore: normalize file endings (2) --- .gitattributes | 282 ++++++++++++- dev/build/settings_local_collectstatics.py | 16 +- dev/tests/docker-compose.debug.yml | 64 +-- docker/configs/pgadmin-servers.json | 44 +- ietf/dbtemplate/fixtures/nomcom_templates.xml | 380 +++++++++--------- ietf/secr/static/js/dynamic_inlines.js | 142 +++---- k8s/README.md | 8 +- k8s/kustomization.yaml | 32 +- k8s/memcached.yaml | 160 ++++---- k8s/secrets.yaml | 164 ++++---- 10 files changed, 785 insertions(+), 507 deletions(-) diff --git a/.gitattributes b/.gitattributes index 937c0eb37..62f4aae43 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,2 +1,280 @@ -/.yarn/releases/** binary -/.yarn/plugins/** binary +# Auto detect text files and perform LF normalization +* text=auto + +# --------------------------------------------------- +# Python Projects +# --------------------------------------------------- + +# Source files +*.pxd text diff=python +*.py text diff=python +*.py3 text diff=python +*.pyw text diff=python +*.pyx text diff=python +*.pyz text diff=python +*.pyi text diff=python + +# Binary files +*.db binary +*.p binary +*.pkl binary +*.pickle binary +*.pyc binary export-ignore +*.pyo binary export-ignore +*.pyd binary + +# Jupyter notebook +*.ipynb text eol=lf + +# --------------------------------------------------- +# Web Projects +# --------------------------------------------------- + +# Source code +*.bash text eol=lf +*.bat text eol=crlf +*.cmd text eol=crlf +*.coffee text +*.css text diff=css +*.htm text diff=html +*.html text diff=html +*.inc text +*.ini text +*.js text +*.mjs text +*.cjs text +*.json text +*.jsx text +*.less text +*.ls text +*.map text -diff +*.od text +*.onlydata text +*.php text diff=php +*.pl text +*.ps1 text eol=crlf +*.py text diff=python +*.rb text diff=ruby +*.sass text +*.scm text +*.scss text diff=css +*.sh text eol=lf +.husky/* text eol=lf +*.sql text +*.styl text +*.tag text +*.ts text +*.tsx text +*.xml text +*.xhtml text diff=html + +# Docker +Dockerfile text + +# Documentation +*.ipynb text eol=lf +*.markdown text diff=markdown +*.md text diff=markdown +*.mdwn text diff=markdown +*.mdown text diff=markdown +*.mkd text diff=markdown +*.mkdn text diff=markdown +*.mdtxt text +*.mdtext text +*.txt text +AUTHORS text +CHANGELOG text +CHANGES text +CONTRIBUTING text +COPYING text +copyright text +*COPYRIGHT* text +INSTALL text +license text +LICENSE text +NEWS text +readme text +*README* text +TODO text + +# Templates +*.dot text +*.ejs text +*.erb text +*.haml text +*.handlebars text +*.hbs text +*.hbt text +*.jade text +*.latte text +*.mustache text +*.njk text +*.phtml text +*.pug text +*.svelte text +*.tmpl text +*.tpl text +*.twig text +*.vue text + +# Configs +*.cnf text +*.conf text +*.config text +.editorconfig text +.env text +.gitattributes text +.gitconfig text +.htaccess text +*.lock text -diff +package.json text eol=lf +package-lock.json text eol=lf -diff +pnpm-lock.yaml text eol=lf -diff +.prettierrc text +yarn.lock text -diff +*.toml text +*.yaml text +*.yml text +browserslist text +Makefile text +makefile text +# Fixes syntax highlighting on GitHub to allow comments +tsconfig.json linguist-language=JSON-with-Comments + +# Heroku +Procfile text + +# Graphics +*.ai binary +*.bmp binary +*.eps binary +*.gif binary +*.gifv binary +*.ico binary +*.jng binary +*.jp2 binary +*.jpg binary +*.jpeg binary +*.jpx binary +*.jxr binary +*.pdf binary +*.png binary +*.psb binary +*.psd binary +*.svg text +*.svgz binary +*.tif binary +*.tiff binary +*.wbmp binary +*.webp binary + +# Audio +*.kar binary +*.m4a binary +*.mid binary +*.midi binary +*.mp3 binary +*.ogg binary +*.ra binary + +# Video +*.3gpp binary +*.3gp binary +*.as binary +*.asf binary +*.asx binary +*.avi binary +*.fla binary +*.flv binary +*.m4v binary +*.mng binary +*.mov binary +*.mp4 binary +*.mpeg binary +*.mpg binary +*.ogv binary +*.swc binary +*.swf binary +*.webm binary + +# Archives +*.7z binary +*.gz binary +*.jar binary +*.rar binary +*.tar binary +*.zip binary + +# Fonts +*.ttf binary +*.eot binary +*.otf binary +*.woff binary +*.woff2 binary + +# Executables +*.exe binary +*.pyc binary +# Prevents massive diffs caused by vendored, minified files +**/.yarn/releases/** binary +**/.yarn/plugins/** binary + +# RC files (like .babelrc or .eslintrc) +*.*rc text + +# Ignore files (like .npmignore or .gitignore) +*.*ignore text + +# Prevents massive diffs from built files +dist/* binary + +# --------------------------------------------------- +# Common +# --------------------------------------------------- + +# Documents +*.bibtex text diff=bibtex +*.doc diff=astextplain +*.DOC diff=astextplain +*.docx diff=astextplain +*.DOCX diff=astextplain +*.dot diff=astextplain +*.DOT diff=astextplain +*.pdf diff=astextplain +*.PDF diff=astextplain +*.rtf diff=astextplain +*.RTF diff=astextplain +*.md text diff=markdown +*.mdx text diff=markdown +*.tex text diff=tex +*.adoc text +*.textile text +*.mustache text +*.csv text eol=crlf +*.tab text +*.tsv text +*.txt text +*.sql text +*.epub diff=astextplain + +# Text files where line endings should be preserved +*.patch -text + +# --------------------------------------------------- +# Vzic specific +# --------------------------------------------------- + +*.pl text diff=perl +*.pm text diff=perl + +# C/C++ +*.c text diff=cpp +*.cc text diff=cpp +*.cxx text diff=cpp +*.cpp text diff=cpp +*.cpi text diff=cpp +*.c++ text diff=cpp +*.hpp text diff=cpp +*.h text diff=cpp +*.h++ text diff=cpp +*.hh text diff=cpp \ No newline at end of file diff --git a/dev/build/settings_local_collectstatics.py b/dev/build/settings_local_collectstatics.py index 016b3f934..ccb4b3397 100644 --- a/dev/build/settings_local_collectstatics.py +++ b/dev/build/settings_local_collectstatics.py @@ -1,8 +1,8 @@ -# Copyright The IETF Trust 2007-2019, All Rights Reserved -# -*- coding: utf-8 -*- - -from ietf import __version__ -from ietf.settings import * # pyflakes:ignore - -STATIC_URL = "https://static.ietf.org/dt/%s/"%__version__ -STATIC_ROOT = os.path.abspath(BASE_DIR + "/../static/") +# Copyright The IETF Trust 2007-2019, All Rights Reserved +# -*- coding: utf-8 -*- + +from ietf import __version__ +from ietf.settings import * # pyflakes:ignore + +STATIC_URL = "https://static.ietf.org/dt/%s/"%__version__ +STATIC_ROOT = os.path.abspath(BASE_DIR + "/../static/") diff --git a/dev/tests/docker-compose.debug.yml b/dev/tests/docker-compose.debug.yml index 6362ef072..8d939e0ea 100644 --- a/dev/tests/docker-compose.debug.yml +++ b/dev/tests/docker-compose.debug.yml @@ -1,32 +1,32 @@ -# This docker-compose replicates the test workflow happening on GitHub during a PR / build check. -# To be used from the debug.sh script. - -version: '3.8' - -services: - app: - image: ghcr.io/ietf-tools/datatracker-app-base:latest - command: -f /dev/null - working_dir: /__w/datatracker/datatracker - entrypoint: tail - hostname: app - volumes: - - /var/run/docker.sock:/var/run/docker.sock - environment: - CI: 'true' - GITHUB_ACTIONS: 'true' - HOME: /github/home - deploy: - resources: - limits: - cpus: '2' - memory: '7GB' - - db: - image: ghcr.io/ietf-tools/datatracker-db:latest - restart: unless-stopped - volumes: - - postgresdb-data:/var/lib/postgresql/data - -volumes: - postgresdb-data: +# This docker-compose replicates the test workflow happening on GitHub during a PR / build check. +# To be used from the debug.sh script. + +version: '3.8' + +services: + app: + image: ghcr.io/ietf-tools/datatracker-app-base:latest + command: -f /dev/null + working_dir: /__w/datatracker/datatracker + entrypoint: tail + hostname: app + volumes: + - /var/run/docker.sock:/var/run/docker.sock + environment: + CI: 'true' + GITHUB_ACTIONS: 'true' + HOME: /github/home + deploy: + resources: + limits: + cpus: '2' + memory: '7GB' + + db: + image: ghcr.io/ietf-tools/datatracker-db:latest + restart: unless-stopped + volumes: + - postgresdb-data:/var/lib/postgresql/data + +volumes: + postgresdb-data: diff --git a/docker/configs/pgadmin-servers.json b/docker/configs/pgadmin-servers.json index 8b1c181d1..b4458af92 100644 --- a/docker/configs/pgadmin-servers.json +++ b/docker/configs/pgadmin-servers.json @@ -1,22 +1,22 @@ -{ - "Servers": { - "1": { - "Name": "Local Dev", - "Group": "Servers", - "Host": "db", - "Port": 5432, - "MaintenanceDB": "postgres", - "Username": "django", - "UseSSHTunnel": 0, - "TunnelPort": "22", - "TunnelAuthentication": 0, - "KerberosAuthentication": false, - "ConnectionParameters": { - "sslmode": "prefer", - "connect_timeout": 10, - "sslcert": "/.postgresql/postgresql.crt", - "sslkey": "/.postgresql/postgresql.key" - } - } - } -} +{ + "Servers": { + "1": { + "Name": "Local Dev", + "Group": "Servers", + "Host": "db", + "Port": 5432, + "MaintenanceDB": "postgres", + "Username": "django", + "UseSSHTunnel": 0, + "TunnelPort": "22", + "TunnelAuthentication": 0, + "KerberosAuthentication": false, + "ConnectionParameters": { + "sslmode": "prefer", + "connect_timeout": 10, + "sslcert": "/.postgresql/postgresql.crt", + "sslkey": "/.postgresql/postgresql.key" + } + } + } +} diff --git a/ietf/dbtemplate/fixtures/nomcom_templates.xml b/ietf/dbtemplate/fixtures/nomcom_templates.xml index abf0cb58f..e7065b84c 100644 --- a/ietf/dbtemplate/fixtures/nomcom_templates.xml +++ b/ietf/dbtemplate/fixtures/nomcom_templates.xml @@ -1,190 +1,190 @@ - - - - /nomcom/defaults/home.rst - Home page of group - - rst - Home page -========= - -This is the home page of the nomcom group. - - - - /nomcom/defaults/email/inexistent_person.txt - Email sent to chair of nomcom and secretariat when Email and Person are created if some of them don't exist - $email: Newly created email -$fullname: Fullname of the new person -$person_id: Id of the new Person object -$group: Name of the group - plain - Hello, - -A new person with name $fullname and email $email has been created. The new Person object has the following id: '$person_id'. - -Please, check if there is some more action nedeed. - - - - /nomcom/defaults/email/new_nominee.txt - Email sent to nominees when they are nominated - $nominee: Full name of the nominee -$position: Name of the position -$domain: Server domain -$accept_url: Url hash to accept nominations -$decline_url: Url hash to decline nominations - plain - Hi, - -You have been nominated for the position of $position. - -The NomCom would appreciate receiving an indication of whether or not you accept this nomination to stand for consideration as a candidate for this position. - -You can accept the nomination via web going to the following link https://$domain$accept_url or decline the nomination going the following link https://$domain$decline_url - -If you accept, you will need to fill out a questionnaire. You will receive the questionnaire by email. - -Best regards, - - - - - /nomcom/defaults/email/new_nomination.txt - Email sent to nominators and secretariat when the nominators make the nominations - $nominator: Full name of the nominator -$nominator_email: Email of the nominator -$nominee: Full name of the nominee -$nominee_email: Email of the nominee -$position: Nomination position - plain - A new nomination have been received. - -Nominator: $nominator ($nominator_email) -Nominee: $nominee ($nominee_email) -Position: $position - - - - /nomcom/defaults/position/questionnaire.txt - Questionnaire sent to the nomine - $position: Position - plain - Enter here the questionnaire for the position $position: - -Questionnaire - - - - /nomcom/defaults/position/requirements - Position requirements - $position: Position - rst - These are the requirements for the position $position: - -Requirements. - - - - /nomcom/defaults/position/header_questionnaire.txt - Header of the email that contains the questionnaire sent to the nomine - $nominee: Full name of the nomine -$position: Position - plain - Hi $nominee, this is the questionnaire for the position $position: - - - - - - /nomcom/defaults/email/nomination_accept_reminder.txt - Email sent to nominees asking them to accept (or decline) the nominations. - $positions: Nomination positions - plain - Hi, - -You have been nominated for the position of $position. - -The NomCom would appreciate receiving an indication of whether or not you accept this nomination to stand for consideration as a candidate for this position. - -You can accept the nomination via web going to the following link https://$domain$accept_url or decline the nomination going the following link https://$domain$decline_url - -If you accept, you will need to fill out a questionnaire. - -Best regards, - - - - /nomcom/defaults/email/nomination_receipt.txt - Email sent to nominator to get a confirmation mail containing feedback in cleartext - $nominee: Full name of the nominee -$position: Name of the position -$domain: Server domain -$accept_url: Url hash to accept nominations -$decline_url: Url hash to decline nominations - plain - Hi, - -Your nomination of $nominee for the position of -$position has been received and registered. - -The following comments have also been registered: - --------------------------------------------------------------------------- -$comments --------------------------------------------------------------------------- - -Thank you, - - - - /nomcom/defaults/email/feedback_receipt.txt - Email sent to feedback author to get a confirmation mail containing feedback in cleartext - $nominee: Full name of the nominee -$position: Nomination position -$comments: Comments on this candidate - plain - Hi, - -Your input regarding $about has been received and registered. - -The following comments have been registered: - --------------------------------------------------------------------------- -$comments --------------------------------------------------------------------------- - -Thank you, - - - - /nomcom/defaults/email/questionnaire_reminder.txt - Email sent to nominees reminding them to complete a questionnaire - $positions: Nomination positions - plain - -Thank you for accepting your nomination for the position of $position. - -Please remember to complete and return the questionnaire for this position at your earliest opportunity. -The questionnaire is repeated below for your convenience. - --------- - - - - /nomcom/defaults/topic/description - Description of Topic - $topic: Topic' - rst - This is a description of the topic "$topic" - -Describe the topic and add any information/instructions for the responder here. - - - - /nomcom/defaults/iesg_requirements - Generic IESG Requirements - rst - Generic IESG Requirements Yo! - - + + + + /nomcom/defaults/home.rst + Home page of group + + rst + Home page +========= + +This is the home page of the nomcom group. + + + + /nomcom/defaults/email/inexistent_person.txt + Email sent to chair of nomcom and secretariat when Email and Person are created if some of them don't exist + $email: Newly created email +$fullname: Fullname of the new person +$person_id: Id of the new Person object +$group: Name of the group + plain + Hello, + +A new person with name $fullname and email $email has been created. The new Person object has the following id: '$person_id'. + +Please, check if there is some more action nedeed. + + + + /nomcom/defaults/email/new_nominee.txt + Email sent to nominees when they are nominated + $nominee: Full name of the nominee +$position: Name of the position +$domain: Server domain +$accept_url: Url hash to accept nominations +$decline_url: Url hash to decline nominations + plain + Hi, + +You have been nominated for the position of $position. + +The NomCom would appreciate receiving an indication of whether or not you accept this nomination to stand for consideration as a candidate for this position. + +You can accept the nomination via web going to the following link https://$domain$accept_url or decline the nomination going the following link https://$domain$decline_url + +If you accept, you will need to fill out a questionnaire. You will receive the questionnaire by email. + +Best regards, + + + + + /nomcom/defaults/email/new_nomination.txt + Email sent to nominators and secretariat when the nominators make the nominations + $nominator: Full name of the nominator +$nominator_email: Email of the nominator +$nominee: Full name of the nominee +$nominee_email: Email of the nominee +$position: Nomination position + plain + A new nomination have been received. + +Nominator: $nominator ($nominator_email) +Nominee: $nominee ($nominee_email) +Position: $position + + + + /nomcom/defaults/position/questionnaire.txt + Questionnaire sent to the nomine + $position: Position + plain + Enter here the questionnaire for the position $position: + +Questionnaire + + + + /nomcom/defaults/position/requirements + Position requirements + $position: Position + rst + These are the requirements for the position $position: + +Requirements. + + + + /nomcom/defaults/position/header_questionnaire.txt + Header of the email that contains the questionnaire sent to the nomine + $nominee: Full name of the nomine +$position: Position + plain + Hi $nominee, this is the questionnaire for the position $position: + + + + + + /nomcom/defaults/email/nomination_accept_reminder.txt + Email sent to nominees asking them to accept (or decline) the nominations. + $positions: Nomination positions + plain + Hi, + +You have been nominated for the position of $position. + +The NomCom would appreciate receiving an indication of whether or not you accept this nomination to stand for consideration as a candidate for this position. + +You can accept the nomination via web going to the following link https://$domain$accept_url or decline the nomination going the following link https://$domain$decline_url + +If you accept, you will need to fill out a questionnaire. + +Best regards, + + + + /nomcom/defaults/email/nomination_receipt.txt + Email sent to nominator to get a confirmation mail containing feedback in cleartext + $nominee: Full name of the nominee +$position: Name of the position +$domain: Server domain +$accept_url: Url hash to accept nominations +$decline_url: Url hash to decline nominations + plain + Hi, + +Your nomination of $nominee for the position of +$position has been received and registered. + +The following comments have also been registered: + +-------------------------------------------------------------------------- +$comments +-------------------------------------------------------------------------- + +Thank you, + + + + /nomcom/defaults/email/feedback_receipt.txt + Email sent to feedback author to get a confirmation mail containing feedback in cleartext + $nominee: Full name of the nominee +$position: Nomination position +$comments: Comments on this candidate + plain + Hi, + +Your input regarding $about has been received and registered. + +The following comments have been registered: + +-------------------------------------------------------------------------- +$comments +-------------------------------------------------------------------------- + +Thank you, + + + + /nomcom/defaults/email/questionnaire_reminder.txt + Email sent to nominees reminding them to complete a questionnaire + $positions: Nomination positions + plain + +Thank you for accepting your nomination for the position of $position. + +Please remember to complete and return the questionnaire for this position at your earliest opportunity. +The questionnaire is repeated below for your convenience. + +-------- + + + + /nomcom/defaults/topic/description + Description of Topic + $topic: Topic' + rst + This is a description of the topic "$topic" + +Describe the topic and add any information/instructions for the responder here. + + + + /nomcom/defaults/iesg_requirements + Generic IESG Requirements + rst + Generic IESG Requirements Yo! + + diff --git a/ietf/secr/static/js/dynamic_inlines.js b/ietf/secr/static/js/dynamic_inlines.js index a1abc2d0e..d0753a3a6 100644 --- a/ietf/secr/static/js/dynamic_inlines.js +++ b/ietf/secr/static/js/dynamic_inlines.js @@ -1,71 +1,71 @@ -/* Following functions based off code written by Arne Brodowski -http://www.arnebrodowski.de/blog/507-Add-and-remove-Django-Admin-Inlines-with-JavaScript.html - -2012-02-01 customized for new Rolodex. Email formset doesn't have an id field, rather a "address" -field as primary key. Also for some reason the "active" boolean field doesn't get saved properly -if the checkbox input has an empty "value" argument. -*/ -import $ from 'jquery'; - -function increment_form_ids(el, to, name) { - var from = to-1 - $(':input', $(el)).each(function(i,e){ - var old_name = $(e).attr('name') - var old_id = $(e).attr('id') - $(e).attr('name', old_name.replace(from, to)) - $(e).attr('id', old_id.replace(from, to)) - if ($(e).attr('type') != 'checkbox') { - $(e).val('') - } - }) -} - -function add_inline_form(name) { - if (name=="email") { - var first = $('#id_'+name+'-0-address').parents('.inline-related') - } - else { - var first = $('#id_'+name+'-0-id').parents('.inline-related') - } - // check to see if this is a stacked or tabular inline - if (first.hasClass("tabular")) { - var field_table = first.parent().find('table > tbody') - const children = field_table.children('tr.dynamic-inline') - var count = children.length - const last = $(children[count-1]) - var copy = last.clone(true) - copy.removeClass("row1 row2") - copy.find("input[name$='address']").attr("readonly", false) - copy.addClass("row"+((count % 2) ? 2 : 1)) - copy.insertAfter(last) - increment_form_ids($(copy), count, name) - } - else { - var last = $(first).parent().children('.last-related') - var copy = $(last).clone(true) - var count = $(first).parent().children('.inline-related').length - $(last).removeClass('last-related') - var header = $('h3', copy) - header.html(header.html().replace("#"+count, "#"+(count+1))) - $(last).after(copy) - increment_form_ids($(first).parents('.inline-group').children('.last-related'), count, name) - } - $('input#id_'+name+'-TOTAL_FORMS').val(count+1) - return false; -} - -// Add all the "Add Another" links to the bottom of each inline group -$(function() { - var html_template = '' - $('.inline-group').each(function(i) { - //prefix is in the name of the input fields before the "-" - var prefix = $("input[type='hidden'][name!='csrfmiddlewaretoken']", this).attr("name").split("-")[0]; - $(this).append(html_template.replace("{{prefix}}", prefix)); - $('#addlink-' + prefix).on('click', () => add_inline_form(prefix)); - }) -}) +/* Following functions based off code written by Arne Brodowski +http://www.arnebrodowski.de/blog/507-Add-and-remove-Django-Admin-Inlines-with-JavaScript.html + +2012-02-01 customized for new Rolodex. Email formset doesn't have an id field, rather a "address" +field as primary key. Also for some reason the "active" boolean field doesn't get saved properly +if the checkbox input has an empty "value" argument. +*/ +import $ from 'jquery'; + +function increment_form_ids(el, to, name) { + var from = to-1 + $(':input', $(el)).each(function(i,e){ + var old_name = $(e).attr('name') + var old_id = $(e).attr('id') + $(e).attr('name', old_name.replace(from, to)) + $(e).attr('id', old_id.replace(from, to)) + if ($(e).attr('type') != 'checkbox') { + $(e).val('') + } + }) +} + +function add_inline_form(name) { + if (name=="email") { + var first = $('#id_'+name+'-0-address').parents('.inline-related') + } + else { + var first = $('#id_'+name+'-0-id').parents('.inline-related') + } + // check to see if this is a stacked or tabular inline + if (first.hasClass("tabular")) { + var field_table = first.parent().find('table > tbody') + const children = field_table.children('tr.dynamic-inline') + var count = children.length + const last = $(children[count-1]) + var copy = last.clone(true) + copy.removeClass("row1 row2") + copy.find("input[name$='address']").attr("readonly", false) + copy.addClass("row"+((count % 2) ? 2 : 1)) + copy.insertAfter(last) + increment_form_ids($(copy), count, name) + } + else { + var last = $(first).parent().children('.last-related') + var copy = $(last).clone(true) + var count = $(first).parent().children('.inline-related').length + $(last).removeClass('last-related') + var header = $('h3', copy) + header.html(header.html().replace("#"+count, "#"+(count+1))) + $(last).after(copy) + increment_form_ids($(first).parents('.inline-group').children('.last-related'), count, name) + } + $('input#id_'+name+'-TOTAL_FORMS').val(count+1) + return false; +} + +// Add all the "Add Another" links to the bottom of each inline group +$(function() { + var html_template = '' + $('.inline-group').each(function(i) { + //prefix is in the name of the input fields before the "-" + var prefix = $("input[type='hidden'][name!='csrfmiddlewaretoken']", this).attr("name").split("-")[0]; + $(this).append(html_template.replace("{{prefix}}", prefix)); + $('#addlink-' + prefix).on('click', () => add_inline_form(prefix)); + }) +}) diff --git a/k8s/README.md b/k8s/README.md index 73b597867..3966101ab 100644 --- a/k8s/README.md +++ b/k8s/README.md @@ -1,5 +1,5 @@ -# Kustomize deployment - -## Run locally - +# Kustomize deployment + +## Run locally + The `secrets.yaml` file is provided as a reference only and must be referenced manually in the `kustomization.yaml` file. \ No newline at end of file diff --git a/k8s/kustomization.yaml b/k8s/kustomization.yaml index 4b79f0075..2b623da2b 100644 --- a/k8s/kustomization.yaml +++ b/k8s/kustomization.yaml @@ -1,16 +1,16 @@ -namespace: datatracker -namePrefix: dt- -configMapGenerator: - - name: files-cfgmap - files: - - nginx-logging.conf - - nginx-auth.conf - - nginx-datatracker.conf - - settings_local.py -resources: - - auth.yaml - - beat.yaml - - celery.yaml - - datatracker.yaml - - memcached.yaml - - rabbitmq.yaml +namespace: datatracker +namePrefix: dt- +configMapGenerator: + - name: files-cfgmap + files: + - nginx-logging.conf + - nginx-auth.conf + - nginx-datatracker.conf + - settings_local.py +resources: + - auth.yaml + - beat.yaml + - celery.yaml + - datatracker.yaml + - memcached.yaml + - rabbitmq.yaml diff --git a/k8s/memcached.yaml b/k8s/memcached.yaml index 4b362c88c..5a4c9f0ae 100644 --- a/k8s/memcached.yaml +++ b/k8s/memcached.yaml @@ -1,80 +1,80 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: memcached -spec: - replicas: 1 - revisionHistoryLimit: 2 - selector: - matchLabels: - app: memcached - template: - metadata: - labels: - app: memcached - spec: - securityContext: - runAsNonRoot: true - containers: - # ----------------------------------------------------- - # Memcached - # ----------------------------------------------------- - - image: "memcached:1.6-alpine" - imagePullPolicy: IfNotPresent - args: ["-m", "1024"] - name: memcached - ports: - - name: memcached - containerPort: 11211 - protocol: TCP - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - readOnlyRootFilesystem: true - # memcached image sets up uid/gid 11211 - runAsUser: 11211 - runAsGroup: 11211 - # ----------------------------------------------------- - # Memcached Exporter for Prometheus - # ----------------------------------------------------- - - image: "quay.io/prometheus/memcached-exporter:v0.14.3" - imagePullPolicy: IfNotPresent - name: memcached-exporter - ports: - - name: metrics - containerPort: 9150 - protocol: TCP - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - readOnlyRootFilesystem: true - runAsUser: 65534 # nobody - runAsGroup: 65534 # nobody - dnsPolicy: ClusterFirst - restartPolicy: Always - terminationGracePeriodSeconds: 30 ---- -apiVersion: v1 -kind: Service -metadata: - name: memcached - annotations: - k8s.grafana.com/scrape: "true" # this is not a bool - k8s.grafana.com/metrics.portName: "metrics" -spec: - type: ClusterIP - ports: - - port: 11211 - targetPort: memcached - protocol: TCP - name: memcached - - port: 9150 - targetPort: metrics - protocol: TCP - name: metrics - selector: - app: memcached +apiVersion: apps/v1 +kind: Deployment +metadata: + name: memcached +spec: + replicas: 1 + revisionHistoryLimit: 2 + selector: + matchLabels: + app: memcached + template: + metadata: + labels: + app: memcached + spec: + securityContext: + runAsNonRoot: true + containers: + # ----------------------------------------------------- + # Memcached + # ----------------------------------------------------- + - image: "memcached:1.6-alpine" + imagePullPolicy: IfNotPresent + args: ["-m", "1024"] + name: memcached + ports: + - name: memcached + containerPort: 11211 + protocol: TCP + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + readOnlyRootFilesystem: true + # memcached image sets up uid/gid 11211 + runAsUser: 11211 + runAsGroup: 11211 + # ----------------------------------------------------- + # Memcached Exporter for Prometheus + # ----------------------------------------------------- + - image: "quay.io/prometheus/memcached-exporter:v0.14.3" + imagePullPolicy: IfNotPresent + name: memcached-exporter + ports: + - name: metrics + containerPort: 9150 + protocol: TCP + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + readOnlyRootFilesystem: true + runAsUser: 65534 # nobody + runAsGroup: 65534 # nobody + dnsPolicy: ClusterFirst + restartPolicy: Always + terminationGracePeriodSeconds: 30 +--- +apiVersion: v1 +kind: Service +metadata: + name: memcached + annotations: + k8s.grafana.com/scrape: "true" # this is not a bool + k8s.grafana.com/metrics.portName: "metrics" +spec: + type: ClusterIP + ports: + - port: 11211 + targetPort: memcached + protocol: TCP + name: memcached + - port: 9150 + targetPort: metrics + protocol: TCP + name: metrics + selector: + app: memcached diff --git a/k8s/secrets.yaml b/k8s/secrets.yaml index 4e76a86a5..ba90af9c2 100644 --- a/k8s/secrets.yaml +++ b/k8s/secrets.yaml @@ -1,83 +1,83 @@ -apiVersion: v1 -kind: Secret -metadata: - name: secrets-env -type: Opaque -stringData: - DATATRACKER_SERVER_MODE: "development" # development for staging, production for production - DATATRACKER_ADMINS: |- - Robert Sparks - Ryan Cross - Kesara Rathnayake - Jennifer Richards - Nicolas Giard - DATATRACKER_ALLOWED_HOSTS: ".ietf.org" # newline-separated list also allowed - # DATATRACKER_DATATRACKER_DEBUG: "false" - - # DB access details - needs to be filled in - # DATATRACKER_DB_HOST: "db" - # DATATRACKER_DB_PORT: "5432" - # DATATRACKER_DB_NAME: "datatracker" - # DATATRACKER_DB_USER: "django" # secret - # DATATRACKER_DB_PASS: "RkTkDPFnKpko" # secret - # DATATRACKER_DB_CONN_MAX_AGE: "0" # connection per request if not set, no limit if set to "None" - # DATATRACKER_DB_CONN_HEALTH_CHECKS: "false" - - DATATRACKER_DJANGO_SECRET_KEY: "PDwXboUq!=hPjnrtG2=ge#N$Dwy+wn@uivrugwpic8mxyPfHk" # secret - - # Set this to point testing / staging at the production statics server until we - # sort that out - # DATATRACKER_STATIC_URL: "https://static.ietf.org/dt/12.10.0/" - - # DATATRACKER_EMAIL_DEBUG: "true" - - # Outgoing email details - # DATATRACKER_EMAIL_HOST: "localhost" # defaults to localhost - # DATATRACKER_EMAIL_PORT: "2025" # defaults to 2025 - - # The value here is the default from settings.py (i.e., not actually secret) - DATATRACKER_NOMCOM_APP_SECRET_B64: "m9pzMezVoFNJfsvU9XSZxGnXnwup6P5ZgCQeEnROOoQ=" # secret - - DATATRACKER_IANA_SYNC_PASSWORD: "this-is-the-iana-sync-password" # secret - DATATRACKER_RFC_EDITOR_SYNC_PASSWORD: "this-is-the-rfc-editor-sync-password" # secret - DATATRACKER_YOUTUBE_API_KEY: "this-is-the-youtube-api-key" # secret - DATATRACKER_GITHUB_BACKUP_API_KEY: "this-is-the-github-backup-api-key" # secret - - # API key configuration - DATATRACKER_API_KEY_TYPE: "ES265" - # secret - value here is the default from settings.py (i.e., not actually secret) - DATATRACKER_API_PUBLIC_KEY_PEM_B64: |- - Ci0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tCk1Ga3dFd1lIS29aSXpqMENBUVlJS - 29aSXpqMERBUWNEUWdBRXFWb2pzYW9mREpTY3VNSk4rdHNodW15Tk01TUUKZ2Fyel - ZQcWtWb3ZtRjZ5RTdJSi9kdjRGY1YrUUtDdEovck9TOGUzNlk4WkFFVll1dWtoZXM - weVoxdz09Ci0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLQo= - # secret - value here is the default from settings.py (i.e., not actually secret) - DATATRACKER_API_PRIVATE_KEY_PEM_B64: |- - Ci0tLS0tQkVHSU4gUFJJVkFURSBLRVktLS0tLQpNSUdIQWdFQU1CTUdCeXFHU000O - UFnRUdDQ3FHU000OUF3RUhCRzB3YXdJQkFRUWdvSTZMSmtvcEtxOFhySGk5ClFxR1 - F2RTRBODNURllqcUx6KzhnVUxZZWNzcWhSQU5DQUFTcFdpT3hxaDhNbEp5NHdrMzY - yeUc2Ykkwemt3U0IKcXZOVStxUldpK1lYcklUc2duOTIvZ1Z4WDVBb0swbitzNUx4 - N2ZwanhrQVJWaTY2U0Y2elRKblgKLS0tLS1FTkQgUFJJVkFURSBLRVktLS0tLQo= - - #DATATRACKER_REGISTRATION_API_KEY: "some-key" # secret" - - # DATATRACKER_MEETECHO_API_BASE: "https://meetings.conf.meetecho.com/api/v1/" - DATATRACKER_MEETECHO_CLIENT_ID: "this-is-the-meetecho-client-id" # secret - DATATRACKER_MEETECHO_CLIENT_SECRET: "this-is-the-meetecho-client-secret" # secret - - # DATATRACKER_MATOMO_SITE_ID: "7" # must be present to enable Matomo - # DATATRACKER_MATOMO_DOMAIN_PATH: "analytics.ietf.org" - - CELERY_PASSWORD: "this-is-a-secret" # secret - - # Only one of these may be set - # DATATRACKER_APP_API_TOKENS_JSON_B64: "e30K" # secret - # DATATRACKER_APP_API_TOKENS_JSON: "{}" # secret - - # use this to override default - one entry per line - # DATATRACKER_CSRF_TRUSTED_ORIGINS: |- - # https://datatracker.staging.ietf.org - - # Scout configuration - DATATRACKER_SCOUT_KEY: "this-is-the-scout-key" +apiVersion: v1 +kind: Secret +metadata: + name: secrets-env +type: Opaque +stringData: + DATATRACKER_SERVER_MODE: "development" # development for staging, production for production + DATATRACKER_ADMINS: |- + Robert Sparks + Ryan Cross + Kesara Rathnayake + Jennifer Richards + Nicolas Giard + DATATRACKER_ALLOWED_HOSTS: ".ietf.org" # newline-separated list also allowed + # DATATRACKER_DATATRACKER_DEBUG: "false" + + # DB access details - needs to be filled in + # DATATRACKER_DB_HOST: "db" + # DATATRACKER_DB_PORT: "5432" + # DATATRACKER_DB_NAME: "datatracker" + # DATATRACKER_DB_USER: "django" # secret + # DATATRACKER_DB_PASS: "RkTkDPFnKpko" # secret + # DATATRACKER_DB_CONN_MAX_AGE: "0" # connection per request if not set, no limit if set to "None" + # DATATRACKER_DB_CONN_HEALTH_CHECKS: "false" + + DATATRACKER_DJANGO_SECRET_KEY: "PDwXboUq!=hPjnrtG2=ge#N$Dwy+wn@uivrugwpic8mxyPfHk" # secret + + # Set this to point testing / staging at the production statics server until we + # sort that out + # DATATRACKER_STATIC_URL: "https://static.ietf.org/dt/12.10.0/" + + # DATATRACKER_EMAIL_DEBUG: "true" + + # Outgoing email details + # DATATRACKER_EMAIL_HOST: "localhost" # defaults to localhost + # DATATRACKER_EMAIL_PORT: "2025" # defaults to 2025 + + # The value here is the default from settings.py (i.e., not actually secret) + DATATRACKER_NOMCOM_APP_SECRET_B64: "m9pzMezVoFNJfsvU9XSZxGnXnwup6P5ZgCQeEnROOoQ=" # secret + + DATATRACKER_IANA_SYNC_PASSWORD: "this-is-the-iana-sync-password" # secret + DATATRACKER_RFC_EDITOR_SYNC_PASSWORD: "this-is-the-rfc-editor-sync-password" # secret + DATATRACKER_YOUTUBE_API_KEY: "this-is-the-youtube-api-key" # secret + DATATRACKER_GITHUB_BACKUP_API_KEY: "this-is-the-github-backup-api-key" # secret + + # API key configuration + DATATRACKER_API_KEY_TYPE: "ES265" + # secret - value here is the default from settings.py (i.e., not actually secret) + DATATRACKER_API_PUBLIC_KEY_PEM_B64: |- + Ci0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tCk1Ga3dFd1lIS29aSXpqMENBUVlJS + 29aSXpqMERBUWNEUWdBRXFWb2pzYW9mREpTY3VNSk4rdHNodW15Tk01TUUKZ2Fyel + ZQcWtWb3ZtRjZ5RTdJSi9kdjRGY1YrUUtDdEovck9TOGUzNlk4WkFFVll1dWtoZXM + weVoxdz09Ci0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLQo= + # secret - value here is the default from settings.py (i.e., not actually secret) + DATATRACKER_API_PRIVATE_KEY_PEM_B64: |- + Ci0tLS0tQkVHSU4gUFJJVkFURSBLRVktLS0tLQpNSUdIQWdFQU1CTUdCeXFHU000O + UFnRUdDQ3FHU000OUF3RUhCRzB3YXdJQkFRUWdvSTZMSmtvcEtxOFhySGk5ClFxR1 + F2RTRBODNURllqcUx6KzhnVUxZZWNzcWhSQU5DQUFTcFdpT3hxaDhNbEp5NHdrMzY + yeUc2Ykkwemt3U0IKcXZOVStxUldpK1lYcklUc2duOTIvZ1Z4WDVBb0swbitzNUx4 + N2ZwanhrQVJWaTY2U0Y2elRKblgKLS0tLS1FTkQgUFJJVkFURSBLRVktLS0tLQo= + + #DATATRACKER_REGISTRATION_API_KEY: "some-key" # secret" + + # DATATRACKER_MEETECHO_API_BASE: "https://meetings.conf.meetecho.com/api/v1/" + DATATRACKER_MEETECHO_CLIENT_ID: "this-is-the-meetecho-client-id" # secret + DATATRACKER_MEETECHO_CLIENT_SECRET: "this-is-the-meetecho-client-secret" # secret + + # DATATRACKER_MATOMO_SITE_ID: "7" # must be present to enable Matomo + # DATATRACKER_MATOMO_DOMAIN_PATH: "analytics.ietf.org" + + CELERY_PASSWORD: "this-is-a-secret" # secret + + # Only one of these may be set + # DATATRACKER_APP_API_TOKENS_JSON_B64: "e30K" # secret + # DATATRACKER_APP_API_TOKENS_JSON: "{}" # secret + + # use this to override default - one entry per line + # DATATRACKER_CSRF_TRUSTED_ORIGINS: |- + # https://datatracker.staging.ietf.org + + # Scout configuration + DATATRACKER_SCOUT_KEY: "this-is-the-scout-key" DATATRACKER_SCOUT_NAME: "StagingDatatracker" \ No newline at end of file