ci: merge main to release (#7948)

ci: merge main to release
This commit is contained in:
Robert Sparks 2024-09-16 11:36:42 -05:00 committed by GitHub
commit 1bbe1861e6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
38 changed files with 711 additions and 187 deletions

1
.gitignore vendored
View file

@ -17,6 +17,7 @@ datatracker.sublime-workspace
/docker/docker-compose.extend-custom.yml /docker/docker-compose.extend-custom.yml
/env /env
/ghostdriver.log /ghostdriver.log
/geckodriver.log
/htmlcov /htmlcov
/ietf/static/dist-neue /ietf/static/dist-neue
/latest-coverage.json /latest-coverage.json

View file

@ -71,5 +71,11 @@ SLIDE_STAGING_PATH = '/test/staging/'
DE_GFM_BINARY = '/usr/local/bin/de-gfm' DE_GFM_BINARY = '/usr/local/bin/de-gfm'
# No real secrets here, these are public testing values _only_
APP_API_TOKENS = {
"ietf.api.views.ingest_email_test": ["ingestion-test-token"]
}
# OIDC configuration # OIDC configuration
SITE_URL = 'https://__HOSTNAME__' SITE_URL = 'https://__HOSTNAME__'

View file

@ -1,5 +1,3 @@
version: '3.8'
services: services:
app: app:
build: build:

View file

@ -1,7 +1,3 @@
version: '2.4'
# Use version 2.4 for mem_limit setting. Version 3+ uses deploy.resources.limits.memory
# instead, but that only works for swarm with docker-compose 1.25.1.
services: services:
mq: mq:
image: rabbitmq:3-alpine image: rabbitmq:3-alpine

View file

@ -1,5 +1,3 @@
version: '3.8'
services: services:
app: app:
ports: ports:

View file

@ -1022,7 +1022,9 @@ class CustomApiTests(TestCase):
sorted(e.address for e in emails), sorted(e.address for e in emails),
) )
@override_settings(APP_API_TOKENS={"ietf.api.views.ingest_email": "valid-token"}) @override_settings(
APP_API_TOKENS={"ietf.api.views.ingest_email": "valid-token", "ietf.api.views.ingest_email_test": "test-token"}
)
@mock.patch("ietf.api.views.iana_ingest_review_email") @mock.patch("ietf.api.views.iana_ingest_review_email")
@mock.patch("ietf.api.views.ipr_ingest_response_email") @mock.patch("ietf.api.views.ipr_ingest_response_email")
@mock.patch("ietf.api.views.nomcom_ingest_feedback_email") @mock.patch("ietf.api.views.nomcom_ingest_feedback_email")
@ -1032,29 +1034,47 @@ class CustomApiTests(TestCase):
mocks = {mock_nomcom_ingest, mock_ipr_ingest, mock_iana_ingest} mocks = {mock_nomcom_ingest, mock_ipr_ingest, mock_iana_ingest}
empty_outbox() empty_outbox()
url = urlreverse("ietf.api.views.ingest_email") url = urlreverse("ietf.api.views.ingest_email")
test_mode_url = urlreverse("ietf.api.views.ingest_email_test")
# test various bad calls # test various bad calls
r = self.client.get(url) r = self.client.get(url)
self.assertEqual(r.status_code, 403) self.assertEqual(r.status_code, 403)
self.assertFalse(any(m.called for m in mocks)) self.assertFalse(any(m.called for m in mocks))
r = self.client.get(test_mode_url)
self.assertEqual(r.status_code, 403)
self.assertFalse(any(m.called for m in mocks))
r = self.client.post(url) r = self.client.post(url)
self.assertEqual(r.status_code, 403) self.assertEqual(r.status_code, 403)
self.assertFalse(any(m.called for m in mocks)) self.assertFalse(any(m.called for m in mocks))
r = self.client.post(test_mode_url)
self.assertEqual(r.status_code, 403)
self.assertFalse(any(m.called for m in mocks))
r = self.client.get(url, headers={"X-Api-Key": "valid-token"}) r = self.client.get(url, headers={"X-Api-Key": "valid-token"})
self.assertEqual(r.status_code, 405) self.assertEqual(r.status_code, 405)
self.assertFalse(any(m.called for m in mocks)) self.assertFalse(any(m.called for m in mocks))
r = self.client.get(test_mode_url, headers={"X-Api-Key": "test-token"})
self.assertEqual(r.status_code, 405)
self.assertFalse(any(m.called for m in mocks))
r = self.client.post(url, headers={"X-Api-Key": "valid-token"}) r = self.client.post(url, headers={"X-Api-Key": "valid-token"})
self.assertEqual(r.status_code, 415) self.assertEqual(r.status_code, 415)
self.assertFalse(any(m.called for m in mocks)) self.assertFalse(any(m.called for m in mocks))
r = self.client.post(test_mode_url, headers={"X-Api-Key": "test-token"})
self.assertEqual(r.status_code, 415)
self.assertFalse(any(m.called for m in mocks))
r = self.client.post( r = self.client.post(
url, content_type="application/json", headers={"X-Api-Key": "valid-token"} url, content_type="application/json", headers={"X-Api-Key": "valid-token"}
) )
self.assertEqual(r.status_code, 400) self.assertEqual(r.status_code, 400)
self.assertFalse(any(m.called for m in mocks)) self.assertFalse(any(m.called for m in mocks))
r = self.client.post(
test_mode_url, content_type="application/json", headers={"X-Api-Key": "test-token"}
)
self.assertEqual(r.status_code, 400)
self.assertFalse(any(m.called for m in mocks))
r = self.client.post( r = self.client.post(
url, url,
@ -1064,6 +1084,14 @@ class CustomApiTests(TestCase):
) )
self.assertEqual(r.status_code, 400) self.assertEqual(r.status_code, 400)
self.assertFalse(any(m.called for m in mocks)) self.assertFalse(any(m.called for m in mocks))
r = self.client.post(
test_mode_url,
"this is not JSON!",
content_type="application/json",
headers={"X-Api-Key": "test-token"},
)
self.assertEqual(r.status_code, 400)
self.assertFalse(any(m.called for m in mocks))
r = self.client.post( r = self.client.post(
url, url,
@ -1073,6 +1101,14 @@ class CustomApiTests(TestCase):
) )
self.assertEqual(r.status_code, 400) self.assertEqual(r.status_code, 400)
self.assertFalse(any(m.called for m in mocks)) self.assertFalse(any(m.called for m in mocks))
r = self.client.post(
test_mode_url,
{"json": "yes", "valid_schema": False},
content_type="application/json",
headers={"X-Api-Key": "test-token"},
)
self.assertEqual(r.status_code, 400)
self.assertFalse(any(m.called for m in mocks))
# bad destination # bad destination
message_b64 = base64.b64encode(b"This is a message").decode() message_b64 = base64.b64encode(b"This is a message").decode()
@ -1086,6 +1122,16 @@ class CustomApiTests(TestCase):
self.assertEqual(r.headers["Content-Type"], "application/json") self.assertEqual(r.headers["Content-Type"], "application/json")
self.assertEqual(json.loads(r.content), {"result": "bad_dest"}) self.assertEqual(json.loads(r.content), {"result": "bad_dest"})
self.assertFalse(any(m.called for m in mocks)) self.assertFalse(any(m.called for m in mocks))
r = self.client.post(
test_mode_url,
{"dest": "not-a-destination", "message": message_b64},
content_type="application/json",
headers={"X-Api-Key": "test-token"},
)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.headers["Content-Type"], "application/json")
self.assertEqual(json.loads(r.content), {"result": "bad_dest"})
self.assertFalse(any(m.called for m in mocks))
# test that valid requests call handlers appropriately # test that valid requests call handlers appropriately
r = self.client.post( r = self.client.post(
@ -1102,6 +1148,19 @@ class CustomApiTests(TestCase):
self.assertFalse(any(m.called for m in (mocks - {mock_iana_ingest}))) self.assertFalse(any(m.called for m in (mocks - {mock_iana_ingest})))
mock_iana_ingest.reset_mock() mock_iana_ingest.reset_mock()
# the test mode endpoint should _not_ call the handler
r = self.client.post(
test_mode_url,
{"dest": "iana-review", "message": message_b64},
content_type="application/json",
headers={"X-Api-Key": "test-token"},
)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.headers["Content-Type"], "application/json")
self.assertEqual(json.loads(r.content), {"result": "ok"})
self.assertFalse(any(m.called for m in mocks))
mock_iana_ingest.reset_mock()
r = self.client.post( r = self.client.post(
url, url,
{"dest": "ipr-response", "message": message_b64}, {"dest": "ipr-response", "message": message_b64},
@ -1116,6 +1175,19 @@ class CustomApiTests(TestCase):
self.assertFalse(any(m.called for m in (mocks - {mock_ipr_ingest}))) self.assertFalse(any(m.called for m in (mocks - {mock_ipr_ingest})))
mock_ipr_ingest.reset_mock() mock_ipr_ingest.reset_mock()
# the test mode endpoint should _not_ call the handler
r = self.client.post(
test_mode_url,
{"dest": "ipr-response", "message": message_b64},
content_type="application/json",
headers={"X-Api-Key": "test-token"},
)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.headers["Content-Type"], "application/json")
self.assertEqual(json.loads(r.content), {"result": "ok"})
self.assertFalse(any(m.called for m in mocks))
mock_ipr_ingest.reset_mock()
# bad nomcom-feedback dest # bad nomcom-feedback dest
for bad_nomcom_dest in [ for bad_nomcom_dest in [
"nomcom-feedback", # no suffix "nomcom-feedback", # no suffix
@ -1133,6 +1205,16 @@ class CustomApiTests(TestCase):
self.assertEqual(r.headers["Content-Type"], "application/json") self.assertEqual(r.headers["Content-Type"], "application/json")
self.assertEqual(json.loads(r.content), {"result": "bad_dest"}) self.assertEqual(json.loads(r.content), {"result": "bad_dest"})
self.assertFalse(any(m.called for m in mocks)) self.assertFalse(any(m.called for m in mocks))
r = self.client.post(
test_mode_url,
{"dest": bad_nomcom_dest, "message": message_b64},
content_type="application/json",
headers={"X-Api-Key": "test-token"},
)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.headers["Content-Type"], "application/json")
self.assertEqual(json.loads(r.content), {"result": "bad_dest"})
self.assertFalse(any(m.called for m in mocks))
# good nomcom-feedback dest # good nomcom-feedback dest
random_year = randrange(100000) random_year = randrange(100000)
@ -1150,6 +1232,19 @@ class CustomApiTests(TestCase):
self.assertFalse(any(m.called for m in (mocks - {mock_nomcom_ingest}))) self.assertFalse(any(m.called for m in (mocks - {mock_nomcom_ingest})))
mock_nomcom_ingest.reset_mock() mock_nomcom_ingest.reset_mock()
# the test mode endpoint should _not_ call the handler
r = self.client.post(
test_mode_url,
{"dest": f"nomcom-feedback-{random_year}", "message": message_b64},
content_type="application/json",
headers={"X-Api-Key": "test-token"},
)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.headers["Content-Type"], "application/json")
self.assertEqual(json.loads(r.content), {"result": "ok"})
self.assertFalse(any(m.called for m in mocks))
mock_nomcom_ingest.reset_mock()
# test that exceptions lead to email being sent - assumes that iana-review handling is representative # test that exceptions lead to email being sent - assumes that iana-review handling is representative
mock_iana_ingest.side_effect = EmailIngestionError("Error: don't send email") mock_iana_ingest.side_effect = EmailIngestionError("Error: don't send email")
r = self.client.post( r = self.client.post(

View file

@ -27,6 +27,8 @@ urlpatterns = [
url(r'^doc/draft-aliases/$', api_views.draft_aliases), url(r'^doc/draft-aliases/$', api_views.draft_aliases),
# email ingestor # email ingestor
url(r'email/$', api_views.ingest_email), url(r'email/$', api_views.ingest_email),
# email ingestor
url(r'email/test/$', api_views.ingest_email_test),
# GDPR: export of personal information for the logged-in person # GDPR: export of personal information for the logged-in person
url(r'^export/personal-information/$', api_views.PersonalInformationExportView.as_view()), url(r'^export/personal-information/$', api_views.PersonalInformationExportView.as_view()),
# Email alias information for groups # Email alias information for groups

View file

@ -614,14 +614,16 @@ class EmailIngestionError(Exception):
return msg return msg
@requires_api_token def ingest_email_handler(request, test_mode=False):
@csrf_exempt """Ingest incoming email - handler
def ingest_email(request):
"""Ingest incoming email
Returns a 4xx or 5xx status code if the HTTP request was invalid or something went Returns a 4xx or 5xx status code if the HTTP request was invalid or something went
wrong while processing it. If the request was valid, returns a 200. This may or may wrong while processing it. If the request was valid, returns a 200. This may or may
not indicate that the message was accepted. not indicate that the message was accepted.
If test_mode is true, actual processing of a valid message will be skipped. In this
mode, a valid request with a valid destination will be treated as accepted. The
"bad_dest" error may still be returned.
""" """
def _http_err(code, text): def _http_err(code, text):
@ -657,14 +659,17 @@ def ingest_email(request):
try: try:
if dest == "iana-review": if dest == "iana-review":
valid_dest = True valid_dest = True
if not test_mode:
iana_ingest_review_email(message) iana_ingest_review_email(message)
elif dest == "ipr-response": elif dest == "ipr-response":
valid_dest = True valid_dest = True
if not test_mode:
ipr_ingest_response_email(message) ipr_ingest_response_email(message)
elif dest.startswith("nomcom-feedback-"): elif dest.startswith("nomcom-feedback-"):
maybe_year = dest[len("nomcom-feedback-"):] maybe_year = dest[len("nomcom-feedback-"):]
if maybe_year.isdecimal(): if maybe_year.isdecimal():
valid_dest = True valid_dest = True
if not test_mode:
nomcom_ingest_feedback_email(message, int(maybe_year)) nomcom_ingest_feedback_email(message, int(maybe_year))
except EmailIngestionError as err: except EmailIngestionError as err:
error_email = err.as_emailmessage() error_email = err.as_emailmessage()
@ -677,3 +682,25 @@ def ingest_email(request):
return _api_response("bad_dest") return _api_response("bad_dest")
return _api_response("ok") return _api_response("ok")
@requires_api_token
@csrf_exempt
def ingest_email(request):
"""Ingest incoming email
Hands off to ingest_email_handler() with test_mode=False. This allows @requires_api_token to
give the test endpoint a distinct token from the real one.
"""
return ingest_email_handler(request, test_mode=False)
@requires_api_token
@csrf_exempt
def ingest_email_test(request):
"""Ingest incoming email test endpoint
Hands off to ingest_email_handler() with test_mode=True. This allows @requires_api_token to
give the test endpoint a distinct token from the real one.
"""
return ingest_email_handler(request, test_mode=True)

View file

@ -108,10 +108,8 @@ class CommunityListTests(TestCase):
return [e for e in Email.objects.filter(person=person)] + \ return [e for e in Email.objects.filter(person=person)] + \
[a for a in Alias.objects.filter(person=person)] [a for a in Alias.objects.filter(person=person)]
def test_view_list(self): def do_view_list_test(self, person):
person = self.complex_person(user__username='plain')
draft = WgDraftFactory() draft = WgDraftFactory()
# without list # without list
for id in self.email_or_name_set(person): for id in self.email_or_name_set(person):
url = urlreverse(ietf.community.views.view_list, kwargs={ "email_or_name": id }) url = urlreverse(ietf.community.views.view_list, kwargs={ "email_or_name": id })
@ -134,6 +132,15 @@ class CommunityListTests(TestCase):
self.assertEqual(r.status_code, 200, msg=f"id='{id}', url='{url}'") self.assertEqual(r.status_code, 200, msg=f"id='{id}', url='{url}'")
self.assertContains(r, draft.name) self.assertContains(r, draft.name)
def test_view_list(self):
person = self.complex_person(user__username='plain')
self.do_view_list_test(person)
def test_view_list_without_active_email(self):
person = self.complex_person(user__username='plain')
person.email_set.update(active=False)
self.do_view_list_test(person)
def test_manage_personal_list(self): def test_manage_personal_list(self):
person = self.complex_person(user__username='plain') person = self.complex_person(user__username='plain')
ad = Person.objects.get(user__username='ad') ad = Person.objects.get(user__username='ad')

View file

@ -68,6 +68,7 @@ def view_list(request, email_or_name=None):
'meta': meta, 'meta': meta,
'can_manage_list': can_manage_community_list(request.user, clist), 'can_manage_list': can_manage_community_list(request.user, clist),
'subscribed': subscribed, 'subscribed': subscribed,
"email_or_name": email_or_name,
}) })
@login_required @login_required

View file

@ -1046,6 +1046,8 @@ def build_file_urls(doc: Union[Document, DocHistory]):
file_urls = [] file_urls = []
for t in found_types: for t in found_types:
if t == "ps": # Postscript might have been submitted but should not be displayed in the list of URLs
continue
label = "plain text" if t == "txt" else t label = "plain text" if t == "txt" else t
file_urls.append((label, base + doc.name + "." + t)) file_urls.append((label, base + doc.name + "." + t))

View file

@ -485,6 +485,29 @@ def ad_workload(request):
) )
ad.buckets = copy.deepcopy(bucket_template) ad.buckets = copy.deepcopy(bucket_template)
# https://github.com/ietf-tools/datatracker/issues/4577
docs_via_group_ad = Document.objects.exclude(
group__acronym="none"
).filter(
group__role__name="ad",
group__role__person=ad
).filter(
states__type="draft-stream-ietf",
states__slug__in=["wg-doc","wg-lc","waiting-for-implementation","chair-w","writeupw"]
)
doc_for_ad = Document.objects.filter(ad=ad)
ad.pre_pubreq = (docs_via_group_ad | doc_for_ad).filter(
type="draft"
).filter(
states__type="draft",
states__slug="active"
).filter(
states__type="draft-iesg",
states__slug="idexists"
).distinct().count()
for doc in Document.objects.exclude(type_id="rfc").filter(ad=ad): for doc in Document.objects.exclude(type_id="rfc").filter(ad=ad):
dt = doc_type(doc) dt = doc_type(doc)
state = doc_state(doc) state = doc_state(doc)

View file

@ -18,7 +18,7 @@ import debug # pyflakes:ignore
from ietf.doc.models import DocEvent, BallotPositionDocEvent, TelechatDocEvent from ietf.doc.models import DocEvent, BallotPositionDocEvent, TelechatDocEvent
from ietf.doc.models import Document, State, RelatedDocument from ietf.doc.models import Document, State, RelatedDocument
from ietf.doc.factories import WgDraftFactory, IndividualDraftFactory, ConflictReviewFactory, BaseDocumentFactory, CharterFactory, WgRfcFactory, IndividualRfcFactory from ietf.doc.factories import BallotDocEventFactory, BallotPositionDocEventFactory, TelechatDocEventFactory, WgDraftFactory, IndividualDraftFactory, ConflictReviewFactory, BaseDocumentFactory, CharterFactory, WgRfcFactory, IndividualRfcFactory
from ietf.doc.utils import create_ballot_if_not_open from ietf.doc.utils import create_ballot_if_not_open
from ietf.group.factories import RoleFactory, GroupFactory, DatedGroupMilestoneFactory, DatelessGroupMilestoneFactory from ietf.group.factories import RoleFactory, GroupFactory, DatedGroupMilestoneFactory, DatelessGroupMilestoneFactory
from ietf.group.models import Group, GroupMilestone, Role from ietf.group.models import Group, GroupMilestone, Role
@ -30,7 +30,6 @@ from ietf.utils.test_utils import TestCase, login_testing_unauthorized, uniconte
from ietf.iesg.factories import IESGMgmtItemFactory, TelechatAgendaContentFactory from ietf.iesg.factories import IESGMgmtItemFactory, TelechatAgendaContentFactory
from ietf.utils.timezone import date_today, DEADLINE_TZINFO from ietf.utils.timezone import date_today, DEADLINE_TZINFO
class IESGTests(TestCase): class IESGTests(TestCase):
def test_feed(self): def test_feed(self):
draft = WgDraftFactory(states=[('draft','active'),('draft-iesg','iesg-eva')],ad=Person.objects.get(user__username='ad')) draft = WgDraftFactory(states=[('draft','active'),('draft-iesg','iesg-eva')],ad=Person.objects.get(user__username='ad'))
@ -509,11 +508,12 @@ class IESGAgendaTests(TestCase):
def test_agenda_documents(self): def test_agenda_documents(self):
url = urlreverse("ietf.iesg.views.agenda_documents") url = urlreverse("ietf.iesg.views.agenda_documents")
r = self.client.get(url) r = self.client.get(url)
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
for k, d in self.telechat_docs.items(): for k, d in self.telechat_docs.items():
self.assertContains(r, d.name, msg_prefix="%s '%s' not in response" % (k, d.name, )) self.assertContains(r, d.name, msg_prefix="%s '%s' not in response" % (k, d.name, ))
self.assertContains(r, d.title, msg_prefix="%s '%s' title not in response" % (k, d.title, )) self.assertContains(r, d.title, msg_prefix="%s '%s' not in response" % (k, d.title, ))
def test_past_documents(self): def test_past_documents(self):
url = urlreverse("ietf.iesg.views.past_documents") url = urlreverse("ietf.iesg.views.past_documents")
@ -589,6 +589,66 @@ class IESGAgendaTests(TestCase):
draft = Document.objects.get(name="draft-ietf-mars-test") draft = Document.objects.get(name="draft-ietf-mars-test")
self.assertEqual(draft.telechat_date(),today) self.assertEqual(draft.telechat_date(),today)
class IESGAgendaTelechatPagesTests(TestCase):
def setUp(self):
super().setUp()
# make_immutable_test_data made a set of future telechats - only need one
# We'll take the "next" one
self.telechat_date = get_agenda_date()
# make_immutable_test_data made and area with only one ad - give it another
ad = Person.objects.get(user__username="ad")
adrole = Role.objects.get(person=ad, name="ad")
ad2 = RoleFactory(group=adrole.group, name_id="ad").person
self.ads=[ad,ad2]
# Make some drafts
docs = [
WgDraftFactory(pages=2, states=[('draft-iesg','iesg-eva'),]),
IndividualDraftFactory(pages=20, states=[('draft-iesg','iesg-eva'),]),
WgDraftFactory(pages=200, states=[('draft-iesg','iesg-eva'),]),
]
# Put them on the telechat
for doc in docs:
TelechatDocEventFactory(doc=doc, telechat_date=self.telechat_date)
# Give them ballots
ballots = [BallotDocEventFactory(doc=doc) for doc in docs]
# Give the "ad" Area-Director a discuss on one
BallotPositionDocEventFactory(balloter=ad, doc=docs[0], pos_id="discuss", ballot=ballots[0])
# and a "norecord" position on another
BallotPositionDocEventFactory(balloter=ad, doc=docs[1], pos_id="norecord", ballot=ballots[1])
# Now "ad" should have 220 pages left to ballot on.
# Every other ad should have 222 pages left to ballot on.
def test_ad_pages_left_to_ballot_on(self):
url = urlreverse("ietf.iesg.views.agenda_documents")
# A non-AD user won't get "pages left"
response = self.client.get(url)
telechat = response.context["telechats"][0]
self.assertEqual(telechat["date"], self.telechat_date)
self.assertEqual(telechat["ad_pages_left_to_ballot_on"],0)
self.assertNotContains(response,"pages left to ballot on")
username=self.ads[0].user.username
self.assertTrue(self.client.login(username=username, password=f"{username}+password"))
response = self.client.get(url)
telechat = response.context["telechats"][0]
self.assertEqual(telechat["ad_pages_left_to_ballot_on"],220)
self.assertContains(response,"220 pages left to ballot on")
self.client.logout()
username=self.ads[1].user.username
self.assertTrue(self.client.login(username=username, password=f"{username}+password"))
response = self.client.get(url)
telechat = response.context["telechats"][0]
self.assertEqual(telechat["ad_pages_left_to_ballot_on"],222)
class RescheduleOnAgendaTests(TestCase): class RescheduleOnAgendaTests(TestCase):
def test_reschedule(self): def test_reschedule(self):
draft = WgDraftFactory() draft = WgDraftFactory()

View file

@ -7,11 +7,11 @@ from ietf.doc.utils_search import fill_in_telechat_date
from ietf.iesg.agenda import get_doc_section from ietf.iesg.agenda import get_doc_section
TelechatPageCount = namedtuple('TelechatPageCount',['for_approval','for_action','related']) TelechatPageCount = namedtuple('TelechatPageCount',['for_approval','for_action','related','ad_pages_left_to_ballot_on'])
def telechat_page_count(date=None, docs=None): def telechat_page_count(date=None, docs=None, ad=None):
if not date and not docs: if not date and not docs:
return TelechatPageCount(0, 0, 0) return TelechatPageCount(0, 0, 0, 0)
if not docs: if not docs:
candidates = Document.objects.filter(docevent__telechatdocevent__telechat_date=date).distinct() candidates = Document.objects.filter(docevent__telechatdocevent__telechat_date=date).distinct()
@ -24,7 +24,18 @@ def telechat_page_count(date=None, docs=None):
drafts = [d for d in for_approval if d.type_id == 'draft'] drafts = [d for d in for_approval if d.type_id == 'draft']
pages_for_approval = sum([d.pages or 0 for d in drafts]) ad_pages_left_to_ballot_on = 0
pages_for_approval = 0
for draft in drafts:
pages_for_approval += draft.pages or 0
if ad:
ballot = draft.active_ballot()
if ballot:
positions = ballot.active_balloter_positions()
ad_position = positions[ad]
if ad_position is None or ad_position.pos_id == "norecord":
ad_pages_left_to_ballot_on += draft.pages or 0
pages_for_action = 0 pages_for_action = 0
for d in for_action: for d in for_action:
@ -53,4 +64,5 @@ def telechat_page_count(date=None, docs=None):
return TelechatPageCount(for_approval=pages_for_approval, return TelechatPageCount(for_approval=pages_for_approval,
for_action=pages_for_action, for_action=pages_for_action,
related=related_pages) related=related_pages,
ad_pages_left_to_ballot_on=ad_pages_left_to_ballot_on)

View file

@ -360,6 +360,8 @@ def handle_reschedule_form(request, doc, dates, status):
return form return form
def agenda_documents(request): def agenda_documents(request):
ad = request.user.person if has_role(request.user, "Area Director") else None
dates = list(TelechatDate.objects.active().order_by('date').values_list("date", flat=True)[:4]) dates = list(TelechatDate.objects.active().order_by('date').values_list("date", flat=True)[:4])
docs_by_date = dict((d, []) for d in dates) docs_by_date = dict((d, []) for d in dates)
@ -389,11 +391,13 @@ def agenda_documents(request):
# the search_result_row view to display them (which expects them) # the search_result_row view to display them (which expects them)
fill_in_document_table_attributes(docs_by_date[date], have_telechat_date=True) fill_in_document_table_attributes(docs_by_date[date], have_telechat_date=True)
fill_in_agenda_docs(date, sections, docs_by_date[date]) fill_in_agenda_docs(date, sections, docs_by_date[date])
pages = telechat_page_count(docs=docs_by_date[date]).for_approval page_count = telechat_page_count(docs=docs_by_date[date], ad=ad)
pages = page_count.for_approval
telechats.append({ telechats.append({
"date": date, "date": date,
"pages": pages, "pages": pages,
"ad_pages_left_to_ballot_on": page_count.ad_pages_left_to_ballot_on,
"sections": sorted((num, section) for num, section in sections.items() "sections": sorted((num, section) for num, section in sections.items()
if "2" <= num < "5") if "2" <= num < "5")
}) })

View file

@ -24,6 +24,7 @@ from ietf.doc.factories import (
RfcFactory, RfcFactory,
NewRevisionDocEventFactory NewRevisionDocEventFactory
) )
from ietf.doc.utils import prettify_std_name
from ietf.group.factories import RoleFactory from ietf.group.factories import RoleFactory
from ietf.ipr.factories import ( from ietf.ipr.factories import (
HolderIprDisclosureFactory, HolderIprDisclosureFactory,
@ -192,6 +193,32 @@ class IprTests(TestCase):
r = self.client.get(url + "?submit=rfc&rfc=321") r = self.client.get(url + "?submit=rfc&rfc=321")
self.assertContains(r, ipr.title) self.assertContains(r, ipr.title)
rfc_new = RfcFactory(rfc_number=322)
rfc_new.relateddocument_set.create(relationship_id="obs", target=rfc)
# find RFC 322 which obsoletes RFC 321 whose draft has IPR
r = self.client.get(url + "?submit=rfc&rfc=322")
self.assertContains(r, ipr.title)
self.assertContains(r, "Total number of IPR disclosures found: <b>1</b>")
self.assertContains(r, "Total number of documents searched: <b>3</b>.")
self.assertContains(
r,
f'Results for <a href="/doc/{rfc_new.name}/">{prettify_std_name(rfc_new.name)}</a> ("{rfc_new.title}")',
html=True,
)
self.assertContains(
r,
f'Results for <a href="/doc/{rfc.name}/">{prettify_std_name(rfc.name)}</a> ("{rfc.title}"), '
f'which was obsoleted by <a href="/doc/{rfc_new.name}/">{prettify_std_name(rfc_new.name)}</a> ("{rfc_new.title}")',
html=True,
)
self.assertContains(
r,
f'Results for <a href="/doc/{draft.name}/">{prettify_std_name(draft.name)}</a> ("{draft.title}"), '
f'which became rfc <a href="/doc/{rfc.name}/">{prettify_std_name(rfc.name)}</a> ("{rfc.title}")',
html=True,
)
# find by patent owner # find by patent owner
r = self.client.get(url + "?submit=holder&holder=%s" % ipr.holder_legal_name) r = self.client.get(url + "?submit=holder&holder=%s" % ipr.holder_legal_name)
self.assertContains(r, ipr.title) self.assertContains(r, ipr.title)

View file

@ -689,11 +689,41 @@ def search(request):
if len(start) == 1: if len(start) == 1:
first = start[0] first = start[0]
doc = first doc = first
docs = related_docs(first) docs = set([first])
iprs = iprs_from_docs(docs,states=states) docs.update(
related_docs(
first, relationship=("replaces", "obs"), reverse_relationship=()
)
)
docs.update(
set(
[
draft
for drafts in [
related_docs(
d, relationship=(), reverse_relationship=("became_rfc",)
)
for d in docs
]
for draft in drafts
]
)
)
docs.discard(None)
docs = sorted(
docs,
key=lambda d: (
d.rfc_number if d.rfc_number is not None else 0,
d.became_rfc().rfc_number if d.became_rfc() else 0,
),
reverse=True,
)
iprs = iprs_from_docs(docs, states=states)
template = "ipr/search_doc_result.html" template = "ipr/search_doc_result.html"
updated_docs = related_docs(first, ('updates',)) updated_docs = related_docs(first, ("updates",))
related_iprs = list(set(iprs_from_docs(updated_docs, states=states)) - set(iprs)) related_iprs = list(
set(iprs_from_docs(updated_docs, states=states)) - set(iprs)
)
# multiple matches, select just one # multiple matches, select just one
elif start: elif start:
docs = start docs = start

View file

@ -138,16 +138,16 @@ class Recipient(models.Model):
def gather_stream_managers(self, **kwargs): def gather_stream_managers(self, **kwargs):
addrs = [] addrs = []
manager_map = dict( manager_map = dict(
ise = '<rfc-ise@rfc-editor.org>', ise = ['<rfc-ise@rfc-editor.org>'],
irtf = '<irtf-chair@irtf.org>', irtf = ['<irtf-chair@irtf.org>'],
ietf = '<iesg@ietf.org>', ietf = ['<iesg@ietf.org>'],
iab = '<iab-chair@iab.org>', iab = ['<iab-chair@iab.org>'],
editorial = Role.objects.filter(group__acronym="rsab",name_id="chair").values_list("email__address", flat=True), editorial = Role.objects.filter(group__acronym="rsab",name_id="chair").values_list("email__address", flat=True),
) )
if 'streams' in kwargs: if 'streams' in kwargs:
for stream in kwargs['streams']: for stream in kwargs['streams']:
if stream in manager_map: if stream in manager_map:
addrs.append(manager_map[stream]) addrs.extend(manager_map[stream])
return addrs return addrs
def gather_doc_stream_manager(self, **kwargs): def gather_doc_stream_manager(self, **kwargs):
@ -234,7 +234,7 @@ class Recipient(models.Model):
try: try:
submitter = Alias.objects.get(name=submission.submitter).person submitter = Alias.objects.get(name=submission.submitter).person
if submitter and submitter.email(): if submitter and submitter.email():
addrs.extend(["%s <%s>" % (submitter.name, submitter.email().address)]) addrs.append(f"{submitter.name} <{submitter.email().address}>")
except (Alias.DoesNotExist, Alias.MultipleObjectsReturned): except (Alias.DoesNotExist, Alias.MultipleObjectsReturned):
pass pass
return addrs return addrs

View file

@ -1205,19 +1205,30 @@ class Session(models.Model):
else: else:
return "" return ""
@staticmethod
def _alpha_str(n: int):
"""Convert integer to string of a-z characters (a, b, c, ..., aa, ab, ...)"""
chars = []
while True:
chars.append(string.ascii_lowercase[n % 26])
n //= 26
# for 2nd letter and beyond, 0 means end the string
if n == 0:
break
# beyond the first letter, no need to represent a 0, so decrement
n -= 1
return "".join(chars[::-1])
def docname_token(self): def docname_token(self):
sess_mtg = Session.objects.filter(meeting=self.meeting, group=self.group).order_by('pk') sess_mtg = Session.objects.filter(meeting=self.meeting, group=self.group).order_by('pk')
index = list(sess_mtg).index(self) index = list(sess_mtg).index(self)
return 'sess%s' % (string.ascii_lowercase[index]) return f"sess{self._alpha_str(index)}"
def docname_token_only_for_multiple(self): def docname_token_only_for_multiple(self):
sess_mtg = Session.objects.filter(meeting=self.meeting, group=self.group).order_by('pk') sess_mtg = Session.objects.filter(meeting=self.meeting, group=self.group).order_by('pk')
if len(list(sess_mtg)) > 1: if len(list(sess_mtg)) > 1:
index = list(sess_mtg).index(self) index = list(sess_mtg).index(self)
if index < 26: token = f"sess{self._alpha_str(index)}"
token = 'sess%s' % (string.ascii_lowercase[index])
else:
token = 'sess%s%s' % (string.ascii_lowercase[index//26],string.ascii_lowercase[index%26])
return token return token
return None return None
@ -1320,12 +1331,19 @@ class Session(models.Model):
return url.format(session=self) return url.format(session=self)
return None return None
def _session_recording_url_label(self):
if self.meeting.type.slug == "ietf" and self.has_onsite_tool:
session_label = f"IETF{self.meeting.number}-{self.group.acronym.upper()}-{self.official_timeslotassignment().timeslot.time.strftime('%Y%m%d-%H%M')}"
else:
session_label = f"IETF-{self.group.acronym.upper()}-{self.official_timeslotassignment().timeslot.time.strftime('%Y%m%d-%H%M')}"
return session_label
def session_recording_url(self): def session_recording_url(self):
url = getattr(settings, "MEETECHO_SESSION_RECORDING_URL", "") url_formatter = getattr(settings, "MEETECHO_SESSION_RECORDING_URL", "")
if self.meeting.type.slug == "ietf" and self.has_onsite_tool and url: url = None
self.group.acronym_upper = self.group.acronym.upper() if url_formatter and self.video_stream_url:
return url.format(session=self) url = url_formatter.format(session_label=self._session_recording_url_label())
return None return url
class SchedulingEvent(models.Model): class SchedulingEvent(models.Model):

View file

@ -8,8 +8,10 @@ from mock import patch
from django.conf import settings from django.conf import settings
from django.test import override_settings from django.test import override_settings
import ietf.meeting.models
from ietf.group.factories import GroupFactory, GroupHistoryFactory from ietf.group.factories import GroupFactory, GroupHistoryFactory
from ietf.meeting.factories import MeetingFactory, SessionFactory, AttendedFactory, SessionPresentationFactory from ietf.meeting.factories import MeetingFactory, SessionFactory, AttendedFactory, SessionPresentationFactory
from ietf.meeting.models import Session
from ietf.stats.factories import MeetingRegistrationFactory from ietf.stats.factories import MeetingRegistrationFactory
from ietf.utils.test_utils import TestCase from ietf.utils.test_utils import TestCase
from ietf.utils.timezone import date_today, datetime_today from ietf.utils.timezone import date_today, datetime_today
@ -146,3 +148,49 @@ class SessionTests(TestCase):
self.assertEqual(session.chat_room_name(), 'plenary') self.assertEqual(session.chat_room_name(), 'plenary')
session.chat_room = 'fnord' session.chat_room = 'fnord'
self.assertEqual(session.chat_room_name(), 'fnord') self.assertEqual(session.chat_room_name(), 'fnord')
def test_alpha_str(self):
self.assertEqual(Session._alpha_str(0), "a")
self.assertEqual(Session._alpha_str(1), "b")
self.assertEqual(Session._alpha_str(25), "z")
self.assertEqual(Session._alpha_str(26), "aa")
self.assertEqual(Session._alpha_str(27 * 26 - 1), "zz")
self.assertEqual(Session._alpha_str(27 * 26), "aaa")
@patch.object(ietf.meeting.models.Session, "_session_recording_url_label", return_value="LABEL")
def test_session_recording_url(self, mock):
for session_type in ["ietf", "interim"]:
session = SessionFactory(meeting__type_id=session_type)
with override_settings():
if hasattr(settings, "MEETECHO_SESSION_RECORDING_URL"):
del settings.MEETECHO_SESSION_RECORDING_URL
self.assertIsNone(session.session_recording_url())
settings.MEETECHO_SESSION_RECORDING_URL = "http://player.example.com"
self.assertEqual(session.session_recording_url(), "http://player.example.com")
settings.MEETECHO_SESSION_RECORDING_URL = "http://player.example.com?{session_label}"
self.assertEqual(session.session_recording_url(), "http://player.example.com?LABEL")
def test_session_recording_url_label_ietf(self):
session = SessionFactory(
meeting__type_id='ietf',
meeting__date=date_today(),
meeting__number="123",
group__acronym="acro",
)
session_time = session.official_timeslotassignment().timeslot.time
self.assertEqual(
f"IETF123-ACRO-{session_time:%Y%m%d-%H%M}", # n.b., time in label is UTC
session._session_recording_url_label())
def test_session_recording_url_label_interim(self):
session = SessionFactory(
meeting__type_id='interim',
meeting__date=date_today(),
group__acronym="acro",
)
session_time = session.official_timeslotassignment().timeslot.time
self.assertEqual(
f"IETF-ACRO-{session_time:%Y%m%d-%H%M}", # n.b., time in label is UTC
session._session_recording_url_label())

View file

@ -17,45 +17,61 @@ def sql_log_middleware(get_response):
def sql_log(request): def sql_log(request):
response = get_response(request) response = get_response(request)
for q in connection.queries: for q in connection.queries:
if re.match('(update|insert)', q['sql'], re.IGNORECASE): if re.match("(update|insert)", q["sql"], re.IGNORECASE):
log(q['sql']) log(q["sql"])
return response return response
return sql_log return sql_log
class SMTPExceptionMiddleware(object): class SMTPExceptionMiddleware(object):
def __init__(self, get_response): def __init__(self, get_response):
self.get_response = get_response self.get_response = get_response
def __call__(self, request): def __call__(self, request):
return self.get_response(request) return self.get_response(request)
def process_exception(self, request, exception): def process_exception(self, request, exception):
if isinstance(exception, smtplib.SMTPException): if isinstance(exception, smtplib.SMTPException):
(extype, value, tb) = log_smtp_exception(exception) (extype, value, tb) = log_smtp_exception(exception)
return render(request, 'email_failed.html', return render(
{'exception': extype, 'args': value, 'traceback': "".join(tb)} ) request,
"email_failed.html",
{"exception": extype, "args": value, "traceback": "".join(tb)},
)
return None return None
class Utf8ExceptionMiddleware(object): class Utf8ExceptionMiddleware(object):
def __init__(self, get_response): def __init__(self, get_response):
self.get_response = get_response self.get_response = get_response
def __call__(self, request): def __call__(self, request):
return self.get_response(request) return self.get_response(request)
def process_exception(self, request, exception): def process_exception(self, request, exception):
if isinstance(exception, OperationalError): if isinstance(exception, OperationalError):
extype, e, tb = exc_parts() extype, e, tb = exc_parts()
if e.args[0] == 1366: if e.args[0] == 1366:
log("Database 4-byte utf8 exception: %s: %s" % (extype, e)) log("Database 4-byte utf8 exception: %s: %s" % (extype, e))
return render(request, 'utf8_4byte_failed.html', return render(
{'exception': extype, 'args': e.args, 'traceback': "".join(tb)} ) request,
"utf8_4byte_failed.html",
{"exception": extype, "args": e.args, "traceback": "".join(tb)},
)
return None return None
def redirect_trailing_period_middleware(get_response): def redirect_trailing_period_middleware(get_response):
def redirect_trailing_period(request): def redirect_trailing_period(request):
response = get_response(request) response = get_response(request)
if response.status_code == 404 and request.path.endswith("."): if response.status_code == 404 and request.path.endswith("."):
return HttpResponsePermanentRedirect(request.path.rstrip(".")) return HttpResponsePermanentRedirect(request.path.rstrip("."))
return response return response
return redirect_trailing_period return redirect_trailing_period
def unicode_nfkc_normalization_middleware(get_response): def unicode_nfkc_normalization_middleware(get_response):
def unicode_nfkc_normalization(request): def unicode_nfkc_normalization(request):
"""Do Unicode NFKC normalization to turn ligatures into individual characters. """Do Unicode NFKC normalization to turn ligatures into individual characters.
@ -65,9 +81,21 @@ def unicode_nfkc_normalization_middleware(get_response):
There are probably other elements of a request which may need this normalization There are probably other elements of a request which may need this normalization
too, but let's put that in as it comes up, rather than guess ahead. too, but let's put that in as it comes up, rather than guess ahead.
""" """
request.META["PATH_INFO"] = unicodedata.normalize('NFKC', request.META["PATH_INFO"]) request.META["PATH_INFO"] = unicodedata.normalize(
request.path_info = unicodedata.normalize('NFKC', request.path_info) "NFKC", request.META["PATH_INFO"]
)
request.path_info = unicodedata.normalize("NFKC", request.path_info)
response = get_response(request) response = get_response(request)
return response return response
return unicode_nfkc_normalization return unicode_nfkc_normalization
def is_authenticated_header_middleware(get_response):
"""Middleware to add an is-authenticated header to the response"""
def add_header(request):
response = get_response(request)
response["X-Datatracker-Is-Authenticated"] = "yes" if request.user.is_authenticated else "no"
return response
return add_header

View file

@ -401,24 +401,25 @@ if DEBUG:
MIDDLEWARE = [ MIDDLEWARE = [
'django.middleware.csrf.CsrfViewMiddleware', "django.middleware.csrf.CsrfViewMiddleware",
'corsheaders.middleware.CorsMiddleware', # see docs on CORS_REPLACE_HTTPS_REFERER before using it "corsheaders.middleware.CorsMiddleware", # see docs on CORS_REPLACE_HTTPS_REFERER before using it
'django.middleware.common.CommonMiddleware', "django.middleware.common.CommonMiddleware",
'django.contrib.sessions.middleware.SessionMiddleware', "django.contrib.sessions.middleware.SessionMiddleware",
'django.contrib.auth.middleware.AuthenticationMiddleware', "django.contrib.auth.middleware.AuthenticationMiddleware",
'django.contrib.messages.middleware.MessageMiddleware', "django.contrib.messages.middleware.MessageMiddleware",
'django.middleware.http.ConditionalGetMiddleware', "django.middleware.http.ConditionalGetMiddleware",
'simple_history.middleware.HistoryRequestMiddleware', "simple_history.middleware.HistoryRequestMiddleware",
# comment in this to get logging of SQL insert and update statements: # comment in this to get logging of SQL insert and update statements:
#'ietf.middleware.sql_log_middleware', #"ietf.middleware.sql_log_middleware",
'ietf.middleware.SMTPExceptionMiddleware', "ietf.middleware.SMTPExceptionMiddleware",
'ietf.middleware.Utf8ExceptionMiddleware', "ietf.middleware.Utf8ExceptionMiddleware",
'ietf.middleware.redirect_trailing_period_middleware', "ietf.middleware.redirect_trailing_period_middleware",
'django_referrer_policy.middleware.ReferrerPolicyMiddleware', "django_referrer_policy.middleware.ReferrerPolicyMiddleware",
'django.middleware.clickjacking.XFrameOptionsMiddleware', "django.middleware.clickjacking.XFrameOptionsMiddleware",
'django.middleware.security.SecurityMiddleware', "django.middleware.security.SecurityMiddleware",
# 'csp.middleware.CSPMiddleware', #"csp.middleware.CSPMiddleware",
'ietf.middleware.unicode_nfkc_normalization_middleware', "ietf.middleware.unicode_nfkc_normalization_middleware",
"ietf.middleware.is_authenticated_header_middleware",
] ]
ROOT_URLCONF = 'ietf.urls' ROOT_URLCONF = 'ietf.urls'
@ -1174,7 +1175,7 @@ CELERY_TASK_IGNORE_RESULT = True # ignore results unless specifically enabled f
MEETECHO_ONSITE_TOOL_URL = "https://meetings.conf.meetecho.com/onsite{session.meeting.number}/?session={session.pk}" MEETECHO_ONSITE_TOOL_URL = "https://meetings.conf.meetecho.com/onsite{session.meeting.number}/?session={session.pk}"
MEETECHO_VIDEO_STREAM_URL = "https://meetings.conf.meetecho.com/ietf{session.meeting.number}/?session={session.pk}" MEETECHO_VIDEO_STREAM_URL = "https://meetings.conf.meetecho.com/ietf{session.meeting.number}/?session={session.pk}"
MEETECHO_AUDIO_STREAM_URL = "https://mp3.conf.meetecho.com/ietf{session.meeting.number}/{session.pk}.m3u" MEETECHO_AUDIO_STREAM_URL = "https://mp3.conf.meetecho.com/ietf{session.meeting.number}/{session.pk}.m3u"
MEETECHO_SESSION_RECORDING_URL = "https://www.meetecho.com/ietf{session.meeting.number}/recordings#{session.group.acronym_upper}" MEETECHO_SESSION_RECORDING_URL = "https://meetecho-player.ietf.org/playout/?session={session_label}"
# Put the production SECRET_KEY in settings_local.py, and also any other # Put the production SECRET_KEY in settings_local.py, and also any other
# sensitive or site-specific changes. DO NOT commit settings_local.py to svn. # sensitive or site-specific changes. DO NOT commit settings_local.py to svn.

View file

@ -3,18 +3,18 @@
<div class="btn-group" role="group" aria-labelledby="list-feeds"> <div class="btn-group" role="group" aria-labelledby="list-feeds">
<a class="btn btn-primary" <a class="btn btn-primary"
title="Feed of all changes" title="Feed of all changes"
href="{% if clist.group %}{% url "ietf.community.views.feed" acronym=clist.group.acronym %}{% else %}{% url "ietf.community.views.feed" email_or_name=clist.person.email_address %}{% endif %}"> href="{% if clist.group %}{% url "ietf.community.views.feed" acronym=clist.group.acronym %}{% else %}{% url "ietf.community.views.feed" email_or_name=email_or_name %}{% endif %}">
<i class="bi bi-rss"></i> All changes <i class="bi bi-rss"></i> All changes
</a> </a>
<a class="btn btn-primary" <a class="btn btn-primary"
title="Feed of only significant state changes" title="Feed of only significant state changes"
href="{% if clist.group %}{% url "ietf.community.views.feed" acronym=clist.group.acronym %}{% else %}{% url "ietf.community.views.feed" email_or_name=clist.person.email_address %}{% endif %}?significant=1"> href="{% if clist.group %}{% url "ietf.community.views.feed" acronym=clist.group.acronym %}{% else %}{% url "ietf.community.views.feed" email_or_name=email_or_name %}{% endif %}?significant=1">
<i class="bi bi-rss"></i> Significant <i class="bi bi-rss"></i> Significant
</a> </a>
</div> </div>
{% if clist.pk != None %} {% if clist.pk != None %}
<a class="btn btn-primary" <a class="btn btn-primary"
href="{% if clist.group %}{% url "ietf.community.views.subscription" acronym=clist.group.acronym %}{% else %}{% url "ietf.community.views.subscription" email_or_name=clist.person.email_address %}{% endif %}"> href="{% if clist.group %}{% url "ietf.community.views.subscription" acronym=clist.group.acronym %}{% else %}{% url "ietf.community.views.subscription" email_or_name=email_or_name %}{% endif %}">
<i class="bi bi-envelope"></i> <i class="bi bi-envelope"></i>
{% if subscribed %} {% if subscribed %}
Change subscription Change subscription
@ -24,7 +24,7 @@
</a> </a>
{% endif %} {% endif %}
<a class="btn btn-primary" <a class="btn btn-primary"
href="{% if clist.group %}{% url "ietf.community.views.export_to_csv" acronym=clist.group.acronym %}{% else %}{% url "ietf.community.views.export_to_csv" email_or_name=clist.person.email_address %}{% endif %}"> href="{% if clist.group %}{% url "ietf.community.views.export_to_csv" acronym=clist.group.acronym %}{% else %}{% url "ietf.community.views.export_to_csv" email_or_name=email_or_name %}{% endif %}">
<i class="bi bi-file-ruled"></i> Export as CSV <i class="bi bi-file-ruled"></i> Export as CSV
</a> </a>
</div> </div>

View file

@ -12,7 +12,7 @@
{% bootstrap_messages %} {% bootstrap_messages %}
{% if can_manage_list %} {% if can_manage_list %}
<a class="btn btn-primary my-3" <a class="btn btn-primary my-3"
href="{% url "ietf.community.views.manage_list" email_or_name=clist.person.email_address %}"> href="{% url "ietf.community.views.manage_list" email_or_name=email_or_name %}">
<i class="bi bi-gear"></i> <i class="bi bi-gear"></i>
Manage list Manage list
</a> </a>

View file

@ -35,9 +35,12 @@
<thead class="wrap-anywhere"> <thead class="wrap-anywhere">
<tr> <tr>
<th scope="col" data-sort="name">Area Director</th> <th scope="col" data-sort="name">Area Director</th>
{% if dt.type.1 == "Internet-Draft" %}
<th scope="col" data-sort="pre-pubreq">Pre pubreq</th>
{% endif %}
{% for state, state_name in dt.states %} {% for state, state_name in dt.states %}
<th scope="col" class="col-1" title="" <th scope="col" class="col-1" data-sort="{{ state }}-num"
data-sort="{{ state }}-num"> >
<a href="{% url 'ietf.doc.views_help.state_help' type='draft-iesg' %}#{{ state }}"> <a href="{% url 'ietf.doc.views_help.state_help' type='draft-iesg' %}#{{ state }}">
{{ state_name|split:'/'|join:'/<wbr>' }} {{ state_name|split:'/'|join:'/<wbr>' }}
</a> </a>
@ -51,6 +54,17 @@
<td> <td>
<a href="{{ ad.dashboard }}">{{ ad.name }}</a> <a href="{{ ad.dashboard }}">{{ ad.name }}</a>
</td> </td>
{% if dt.type.1 == "Internet-Draft" %}
<td
class="col-1 align-bottom"
data-sum="pre-pubreq"
{% if user|has_role:"Area Director,Secretariat" %}
data-series-graph
{% endif %}
>
{{ ad.pre_pubreq }}
</td>
{% endif %}
{% for state, state_name in dt.states %} {% for state, state_name in dt.states %}
<td class="col-1 align-bottom" <td class="col-1 align-bottom"
id="{{ dt.type.0 }}-{{ ad|slugify }}-{{ state }}"> id="{{ dt.type.0 }}-{{ ad|slugify }}-{{ state }}">
@ -63,6 +77,16 @@
<tfoot class="table-group-divider"> <tfoot class="table-group-divider">
<tr> <tr>
<th scope="row">Sum</th> <th scope="row">Sum</th>
{% if dt.type.1 == "Internet-Draft" %}
<td class="align-bottom">
<div
data-sum-result="pre-pubreq"
{% if user|has_role:"Area Director,Secretariat" %}
data-series-graph
{% endif %}
></div>
</td>
{% endif %}
{% for state, state_name in dt.states %} {% for state, state_name in dt.states %}
<td class="align-bottom"> <td class="align-bottom">
<div id="chart-{{ dt.type.0 }}-sum-{{ state }}"></div> <div id="chart-{{ dt.type.0 }}-sum-{{ state }}"></div>
@ -87,61 +111,18 @@
</script> </script>
<script src="{% static "ietf/js/highcharts.js" %}"></script> <script src="{% static "ietf/js/highcharts.js" %}"></script>
{{ data|json_script:"data" }} {{ data|json_script:"data" }}
<script> <script>
const data = JSON.parse(document.getElementById("data").textContent); function highchartsConfigFactory({ element, ymax, series }){
return {
Object.entries(data).forEach(([dt, ads]) => {
max = {};
Object.entries(ads).forEach(([ad, states]) => {
Object.entries(states).forEach(([state, buckets]) => {
buckets.series = buckets.map((x) => x.length);
if (ad != "sum") {
max[state] = Math.max(...buckets.series,
max[state] ? max[state] : 0);
}
});
});
Object.entries(ads).forEach(([ad, states]) => {
Object.entries(states).forEach(([state, buckets]) => {
const cell = `chart-${dt}-${ad}-${state}`;
// if there is only a single datapoint in the
// bucket, display it without a graph
if (buckets.series.length == 1) {
document.getElementById(cell).innerHTML =
buckets.series[0];
return;
}
// if a bucket has all zeroes, fake a Highcharts
// plot with HTML, to reduce the number of plot
// objects on the page
if (buckets.series.every((x) => x == 0)) {
// document.getElementById(cell).innerHTML = `
// <div class="position-relative">
// <div class="position-absolute bottom-0 start-0">
// <div style="font-size: .7rem;" class="ms-1">0</div>
// </div>
// <div class="position-absolute bottom-0 end-0 w-100 ps-1">
// <div class="border-bottom mb-1 ms-3">0</div>
// </div>
// </div>
// `;
return;
}
// else actually create a graph
const ymax = Math.max(1, ad != "sum" ? max[state] : Math.max(...buckets.series));
Highcharts.chart({
title: { text: undefined }, title: { text: undefined },
chart: { chart: {
type: "line", type: "line",
animation: false, animation: false,
renderTo: cell, renderTo: element,
panning: { enabled: false }, panning: { enabled: false },
spacing: [4, 0, 5, 0], spacing: [4, 0, 5, 0],
height: "45%", height: "45%"
}, },
scrollbar: { enabled: false }, scrollbar: { enabled: false },
tooltip: { enabled: false }, tooltip: { enabled: false },
@ -153,7 +134,7 @@
title: { text: undefined}, title: { text: undefined},
labels: { enabled: false }, labels: { enabled: false },
zoomEnabled: false, zoomEnabled: false,
tickLength: 0, tickLength: 0
}, },
yAxis: { yAxis: {
title: { text: undefined}, title: { text: undefined},
@ -162,7 +143,7 @@
labels: { x: -3 }, labels: { x: -3 },
min: 0, min: 0,
max: ymax, max: ymax,
tickInterval: ymax, tickInterval: ymax
}, },
plotOptions: { plotOptions: {
series: { series: {
@ -178,21 +159,127 @@
// value // value
if (this.point.index + 1 == if (this.point.index + 1 ==
this.series.points.length) { this.series.points.length) {
return this.y; return this.y
} }
return undefined; return undefined
} }
} }
} }
}, },
series: [{ series: [{
name: undefined, name: undefined,
data: buckets.series, data: series,
enableMouseTracking: false, enableMouseTracking: false
}], }]
}); }
}); }
}); </script>
});
<script>
const GRAPH_BUFFER = 2;
function safeParseFloat(text) {
const trimNumber = text.trim()
if(!trimNumber.match(/^[0-9.]+$/)) {
console.warn(`Unable to parse "${trimNumber}" as a number.`)
return Number.NaN
}
return parseFloat(text)
}
Array.from(document.querySelectorAll("table"))
.filter(table => Boolean(table.querySelector("[data-sum]")))
.forEach(table => {
const sums = Array.from(table.querySelectorAll("[data-sum]")).reduce((
sumsAccumulator,
cell
) => {
const key = cell.dataset.sum
const value = safeParseFloat(cell.textContent)
if(key && !isNaN(value)) {
sumsAccumulator[key] = (sumsAccumulator[key] || 0) + (value || 0)
}
return sumsAccumulator
}, {})
Array.from(table.querySelectorAll('[data-sum-result]')).forEach(result => {
const key = result.dataset.sumResult
const value = sums[key]
if(value) {
result.innerText = value
}
})
Array.from(table.querySelectorAll('[data-series-graph]')).forEach(element => {
const endValue = safeParseFloat(element.innerText)
if(isNaN(endValue)) throw Error("Can't render Highcharts chart with non-numerical " + element.innerText)
const ymax = Math.max(1, endValue + GRAPH_BUFFER)
Highcharts.chart(
highchartsConfigFactory({
element,
ymax,
series: [endValue]
})
)
})
})
</script>
<script>
const data = JSON.parse(document.getElementById("data").textContent)
Object.entries(data).forEach(([dt, ads]) => {
max = {}
Object.entries(ads).forEach(([ad, states]) => {
Object.entries(states).forEach(([state, buckets]) => {
buckets.series = buckets.map((x) => x.length)
if (ad != "sum") {
max[state] = Math.max(...buckets.series, max[state] ? max[state] : 0)
}
})
})
Object.entries(ads).forEach(([ad, states]) => {
Object.entries(states).forEach(([state, buckets]) => {
const cell = `chart-${dt}-${ad}-${state}`
// if there is only a single datapoint in the
// bucket, display it without a graph
if (buckets.series.length == 1) {
document.getElementById(cell).innerHTML = buckets.series[0]
return
}
// if a bucket has all zeroes, fake a Highcharts
// plot with HTML, to reduce the number of plot
// objects on the page
if (buckets.series.every((x) => x === 0)) {
// document.getElementById(cell).innerHTML = `
// <div class="position-relative">
// <div class="position-absolute bottom-0 start-0">
// <div style="font-size: .7rem;" class="ms-1">0</div>
// </div>
// <div class="position-absolute bottom-0 end-0 w-100 ps-1">
// <div class="border-bottom mb-1 ms-3">0</div>
// </div>
// </div>
// `;
return
}
// else actually create a graph
const ymax = Math.max(1, ad !== "sum" ? max[state] : Math.max(...buckets.series))
Highcharts.chart(
highchartsConfigFactory({
element: cell,
ymax,
series: buckets.series
})
)
})
})
})
</script> </script>
{% endblock %} {% endblock %}

View file

@ -1,6 +1,6 @@
{% load ietf_filters %}{% autoescape off %}To: {{ to }} {% if cc %} {% load ietf_filters %}{% autoescape off %}To: {{ to }}{% if cc %}
Cc: {{ cc }} Cc: {{ cc }}{% endif %}
{% endif %}From: IESG Secretary <iesg-secretary@ietf.org> From: IESG Secretary <iesg-secretary@ietf.org>
Reply-To: IESG Secretary <iesg-secretary@ietf.org> Reply-To: IESG Secretary <iesg-secretary@ietf.org>
Subject: Evaluation: {{ doc.name }} Subject: Evaluation: {{ doc.name }}

View file

@ -21,7 +21,12 @@
<h2> <h2>
IESG telechat {{ t.date }} IESG telechat {{ t.date }}
<br> <br>
<small class="text-body-secondary">{{ t.pages }} page{{ t.pages|pluralize }}</small> <small class="text-body-secondary">
{{ t.pages }} page{{ t.pages|pluralize }}
{% if t.ad_pages_left_to_ballot_on %}
({{ t.ad_pages_left_to_ballot_on }} pages left to ballot on)
{% endif %}
</small>
</h2> </h2>
<div class="buttonlist"> <div class="buttonlist">
<a class="btn btn-primary" role="button" href="{% url 'ietf.iesg.views.agenda' %}"> <a class="btn btn-primary" role="button" href="{% url 'ietf.iesg.views.agenda' %}">

View file

@ -54,16 +54,27 @@
</tr> </tr>
</thead> </thead>
{% for doc in docs %} {% for d in docs %}
<tbody> <tbody>
<tr> <tr>
<th scope="col" class="table-info" colspan="3"> <th scope="col" class="table-info" colspan="3">
Results for {{ doc.name|prettystdname|urlize_ietf_docs }} ("{{ doc.title }}"){% if not forloop.first %}{% if doc.related %}, which was {{ doc.relation|lower }} {{ doc.related.source|prettystdname|urlize_ietf_docs }} ("{{ doc.related.source.title }}"){% endif %}{% endif %} Results for {{ d.name|prettystdname|urlize_ietf_docs }}
("{{ d.title }}"){% if d != doc and d.related %}, which
{% if d == d.related.source %}
{{ d.relation|lower }}
{{ d.related.target|prettystdname|urlize_ietf_docs }}
("{{ d.related.target.title }}")
{% else %}
was {{ d.relation|lower }}
{{ d.related.source|prettystdname|urlize_ietf_docs }}
("{{ d.related.source.title }}")
{% endif %}
{% endif %}
</th> </th>
</tr> </tr>
</tbody> </tbody>
<tbody> <tbody>
{% with doc.iprdocrel_set.all as doc_iprs %} {% with d.iprdocrel_set.all as doc_iprs %}
{% if doc_iprs %} {% if doc_iprs %}
{% for ipr in doc_iprs %} {% for ipr in doc_iprs %}
{% if ipr.disclosure.state_id in states %} {% if ipr.disclosure.state_id in states %}
@ -81,7 +92,7 @@
<td></td> <td></td>
<td></td> <td></td>
<td> <td>
No IPR disclosures have been submitted directly on {{ doc.name|prettystdname|urlize_ietf_docs }}{% if iprs %}, No IPR disclosures have been submitted directly on {{ d.name|prettystdname|urlize_ietf_docs }}{% if iprs %},
but there are disclosures on {% if docs|length == 2 %}a related document{% else %}related documents{% endif %}, listed on this page{% endif %}. but there are disclosures on {% if docs|length == 2 %}a related document{% else %}related documents{% endif %}, listed on this page{% endif %}.
</td> </td>
</tr> </tr>

View file

@ -146,15 +146,16 @@
{% endif %} {% endif %}
{% endwith %} {% endwith %}
{% endfor %} {% endfor %}
{% elif session.video_stream_url %} {% elif show_empty %}
{# <i class="bi"></i> #}
{% endif %}
{% if session.session_recording_url %}
<a class="btn btn-outline-primary" <a class="btn btn-outline-primary"
href="{{ session.session_recording_url }}" href="{{ session.session_recording_url }}"
aria-label="Meetecho session recording" aria-label="Meetecho session recording"
title="Meetecho session recording"> title="Meetecho session recording">
<i class="bi bi-file-slides"></i> <i class="bi bi-file-slides"></i>
</a> </a>
{% elif show_empty %}
{# <i class="bi"></i> #}
{% endif %} {% endif %}
{% endwith %} {% endwith %}
{% endif %} {% endif %}

View file

@ -23,12 +23,12 @@ class GunicornRequestJsonFormatter(DatatrackerJsonFormatter):
log_record.setdefault("referer", record.args["f"]) log_record.setdefault("referer", record.args["f"])
log_record.setdefault("user_agent", record.args["a"]) log_record.setdefault("user_agent", record.args["a"])
log_record.setdefault("len_bytes", record.args["B"]) log_record.setdefault("len_bytes", record.args["B"])
log_record.setdefault("duration_ms", record.args["M"]) log_record.setdefault("duration_s", record.args["L"]) # decimal seconds
log_record.setdefault("host", record.args["{host}i"]) log_record.setdefault("host", record.args["{host}i"])
log_record.setdefault("x_request_start", record.args["{x-request-start}i"]) log_record.setdefault("x_request_start", record.args["{x-request-start}i"])
log_record.setdefault("x_real_ip", record.args["{x-real-ip}i"])
log_record.setdefault("x_forwarded_for", record.args["{x-forwarded-for}i"]) log_record.setdefault("x_forwarded_for", record.args["{x-forwarded-for}i"])
log_record.setdefault("x_forwarded_proto", record.args["{x-forwarded-proto}i"]) log_record.setdefault("x_forwarded_proto", record.args["{x-forwarded-proto}i"])
log_record.setdefault("cf_connecting_ip", record.args["{cf-connecting-ip}i"]) log_record.setdefault("cf_connecting_ip", record.args["{cf-connecting-ip}i"])
log_record.setdefault("cf_connecting_ipv6", record.args["{cf-connecting-ipv6}i"]) log_record.setdefault("cf_connecting_ipv6", record.args["{cf-connecting-ipv6}i"])
log_record.setdefault("cf_ray", record.args["{cf-ray}i"]) log_record.setdefault("cf_ray", record.args["{cf-ray}i"])
log_record.setdefault("is_authenticated", record.args["{x-datatracker-is-authenticated}i"])

View file

@ -92,7 +92,17 @@ def send_smtp(msg, bcc=None):
''' '''
mark = time.time() mark = time.time()
add_headers(msg) add_headers(msg)
(fname, frm) = parseaddr(msg.get('From')) # N.B. We have a disconnect with most of this code assuming a From header value will only
# have one address.
# The frm computed here is only used as the envelope from.
# Previous code simply ran `parseaddr(msg.get('From'))`, getting lucky if the string returned
# from the get had more than one address in it. Python 3.9.20 changes the behavior of parseaddr
# and that erroneous use of the function no longer gets lucky.
# For the short term, to match behavior to date as closely as possible, if we get a message
# that has multiple addresses in the From header, we will use the first for the envelope from
from_tuples = getaddresses(msg.get_all('From', [settings.DEFAULT_FROM_EMAIL]))
assertion('len(from_tuples)==1', note=f"send_smtp received multiple From addresses: {from_tuples}")
_ , frm = from_tuples[0]
addrlist = msg.get_all('To') + msg.get_all('Cc', []) addrlist = msg.get_all('To') + msg.get_all('Cc', [])
if bcc: if bcc:
addrlist += [bcc] addrlist += [bcc]
@ -446,6 +456,8 @@ def parse_preformatted(preformatted, extra=None, override=None):
values = msg.get_all(key, []) values = msg.get_all(key, [])
if values: if values:
values = getaddresses(values) values = getaddresses(values)
if key=='From':
assertion('len(values)<2', note=f'parse_preformatted is constructing a From with multiple values: {values}')
del msg[key] del msg[key]
msg[key] = ',\n '.join(formataddr(v) for v in values) msg[key] = ',\n '.join(formataddr(v) for v in values)
for key in ['Subject', ]: for key in ['Subject', ]:

View file

@ -53,7 +53,7 @@ class SendingMail(TestCase):
def test_send_mail_preformatted(self): def test_send_mail_preformatted(self):
msg = """To: to1@example.com, to2@example.com msg = """To: to1@example.com, to2@example.com
From: from1@ietf.org, from2@ietf.org From: from1@ietf.org
Cc: cc1@example.com, cc2@example.com Cc: cc1@example.com, cc2@example.com
Bcc: bcc1@example.com, bcc2@example.com Bcc: bcc1@example.com, bcc2@example.com
Subject: subject Subject: subject
@ -63,7 +63,7 @@ body
send_mail_preformatted(None, msg, {}, {}) send_mail_preformatted(None, msg, {}, {})
recv = outbox[-1] recv = outbox[-1]
self.assertSameEmail(recv['To'], '<to1@example.com>, <to2@example.com>') self.assertSameEmail(recv['To'], '<to1@example.com>, <to2@example.com>')
self.assertSameEmail(recv['From'], 'from1@ietf.org, from2@ietf.org') self.assertSameEmail(recv['From'], 'from1@ietf.org')
self.assertSameEmail(recv['Cc'], 'cc1@example.com, cc2@example.com') self.assertSameEmail(recv['Cc'], 'cc1@example.com, cc2@example.com')
self.assertSameEmail(recv['Bcc'], None) self.assertSameEmail(recv['Bcc'], None)
self.assertEqual(recv['Subject'], 'subject') self.assertEqual(recv['Subject'], 'subject')
@ -71,14 +71,14 @@ body
override = { override = {
'To': 'oto1@example.net, oto2@example.net', 'To': 'oto1@example.net, oto2@example.net',
'From': 'ofrom1@ietf.org, ofrom2@ietf.org', 'From': 'ofrom1@ietf.org',
'Cc': 'occ1@example.net, occ2@example.net', 'Cc': 'occ1@example.net, occ2@example.net',
'Subject': 'osubject', 'Subject': 'osubject',
} }
send_mail_preformatted(request=None, preformatted=msg, extra={}, override=override) send_mail_preformatted(request=None, preformatted=msg, extra={}, override=override)
recv = outbox[-1] recv = outbox[-1]
self.assertSameEmail(recv['To'], '<oto1@example.net>, <oto2@example.net>') self.assertSameEmail(recv['To'], '<oto1@example.net>, <oto2@example.net>')
self.assertSameEmail(recv['From'], 'ofrom1@ietf.org, ofrom2@ietf.org') self.assertSameEmail(recv['From'], 'ofrom1@ietf.org')
self.assertSameEmail(recv['Cc'], 'occ1@example.net, occ2@example.net') self.assertSameEmail(recv['Cc'], 'occ1@example.net, occ2@example.net')
self.assertSameEmail(recv['Bcc'], None) self.assertSameEmail(recv['Bcc'], None)
self.assertEqual(recv['Subject'], 'osubject') self.assertEqual(recv['Subject'], 'osubject')
@ -86,14 +86,14 @@ body
override = { override = {
'To': ['<oto1@example.net>', 'oto2@example.net'], 'To': ['<oto1@example.net>', 'oto2@example.net'],
'From': ['<ofrom1@ietf.org>', 'ofrom2@ietf.org'], 'From': ['<ofrom1@ietf.org>'],
'Cc': ['<occ1@example.net>', 'occ2@example.net'], 'Cc': ['<occ1@example.net>', 'occ2@example.net'],
'Subject': 'osubject', 'Subject': 'osubject',
} }
send_mail_preformatted(request=None, preformatted=msg, extra={}, override=override) send_mail_preformatted(request=None, preformatted=msg, extra={}, override=override)
recv = outbox[-1] recv = outbox[-1]
self.assertSameEmail(recv['To'], '<oto1@example.net>, <oto2@example.net>') self.assertSameEmail(recv['To'], '<oto1@example.net>, <oto2@example.net>')
self.assertSameEmail(recv['From'], '<ofrom1@ietf.org>, ofrom2@ietf.org') self.assertSameEmail(recv['From'], '<ofrom1@ietf.org>')
self.assertSameEmail(recv['Cc'], '<occ1@example.net>, occ2@example.net') self.assertSameEmail(recv['Cc'], '<occ1@example.net>, occ2@example.net')
self.assertSameEmail(recv['Bcc'], None) self.assertSameEmail(recv['Bcc'], None)
self.assertEqual(recv['Subject'], 'osubject') self.assertEqual(recv['Subject'], 'osubject')

View file

@ -144,9 +144,5 @@ spec:
targetPort: http targetPort: http
protocol: TCP protocol: TCP
name: http name: http
- port: 8080
targetPort: http
protocol: TCP
name: http-old
selector: selector:
app: auth app: auth

View file

@ -32,7 +32,7 @@ server {
proxy_set_header Connection close; proxy_set_header Connection close;
proxy_set_header X-Request-Start "t=$${keepempty}msec"; proxy_set_header X-Request-Start "t=$${keepempty}msec";
proxy_set_header X-Forwarded-For $${keepempty}proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $${keepempty}proxy_add_x_forwarded_for;
proxy_set_header X-Real-IP $${keepempty}remote_addr; proxy_hide_header X-Datatracker-Is-Authenticated; # hide this from the outside world
proxy_pass http://localhost:8000; proxy_pass http://localhost:8000;
# Set timeouts longer than Cloudflare proxy limits # Set timeouts longer than Cloudflare proxy limits
proxy_connect_timeout 60; # nginx default (Cf = 15) proxy_connect_timeout 60; # nginx default (Cf = 15)

View file

@ -21,7 +21,7 @@ server {
proxy_set_header Connection close; proxy_set_header Connection close;
proxy_set_header X-Request-Start "t=$${keepempty}msec"; proxy_set_header X-Request-Start "t=$${keepempty}msec";
proxy_set_header X-Forwarded-For $${keepempty}proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $${keepempty}proxy_add_x_forwarded_for;
proxy_set_header X-Real-IP $${keepempty}remote_addr; proxy_hide_header X-Datatracker-Is-Authenticated; # hide this from the outside world
proxy_pass http://localhost:8000; proxy_pass http://localhost:8000;
# Set timeouts longer than Cloudflare proxy limits # Set timeouts longer than Cloudflare proxy limits
proxy_connect_timeout 60; # nginx default (Cf = 15) proxy_connect_timeout 60; # nginx default (Cf = 15)

View file

@ -9,7 +9,7 @@ log_format ietfjson escape=json
'"method":"$${keepempty}request_method",' '"method":"$${keepempty}request_method",'
'"status":"$${keepempty}status",' '"status":"$${keepempty}status",'
'"len_bytes":"$${keepempty}body_bytes_sent",' '"len_bytes":"$${keepempty}body_bytes_sent",'
'"duration_ms":"$${keepempty}request_time",' '"duration_s":"$${keepempty}request_time",'
'"referer":"$${keepempty}http_referer",' '"referer":"$${keepempty}http_referer",'
'"user_agent":"$${keepempty}http_user_agent",' '"user_agent":"$${keepempty}http_user_agent",'
'"x_forwarded_for":"$${keepempty}http_x_forwarded_for",' '"x_forwarded_for":"$${keepempty}http_x_forwarded_for",'

View file

@ -0,0 +1,26 @@
const { test, expect } = require('@playwright/test')
const viewports = require('../../helpers/viewports')
// ====================================================================
// IESG Dashboard
// ====================================================================
test.describe('/doc/ad/', () => {
test.beforeEach(async ({ page }) => {
await page.setViewportSize({
width: viewports.desktop[0],
height: viewports.desktop[1]
})
await page.goto('/doc/ad/')
})
test('Pre pubreq', async ({ page }) => {
const tablesLocator = page.locator('table')
const tablesCount = await tablesLocator.count()
expect(tablesCount).toBeGreaterThan(0)
const firstTable = tablesLocator.nth(0)
const theadTexts = await firstTable.locator('thead').allInnerTexts()
expect(theadTexts.join('')).toContain('Pre pubreq')
})
})

View file

@ -33,6 +33,7 @@ gunicorn>=20.1.0
hashids>=1.3.1 hashids>=1.3.1
html2text>=2020.1.16 # Used only to clean comment field of secr/sreq html2text>=2020.1.16 # Used only to clean comment field of secr/sreq
html5lib>=1.1 # Only used in tests html5lib>=1.1 # Only used in tests
importlib-metadata<8.5.0 # indirect req of Markdown/inflect; https://github.com/ietf-tools/datatracker/issues/7924
inflect>= 6.0.2 inflect>= 6.0.2
jsonfield>=3.1.0 # for SubmissionCheck. This is https://github.com/bradjasper/django-jsonfield/. jsonfield>=3.1.0 # for SubmissionCheck. This is https://github.com/bradjasper/django-jsonfield/.
jsonschema[format]>=4.2.1 jsonschema[format]>=4.2.1
@ -48,6 +49,7 @@ oic>=1.3 # Used only by tests
Pillow>=9.1.0 Pillow>=9.1.0
psycopg2>=2.9.6 psycopg2>=2.9.6
pyang>=2.5.3 pyang>=2.5.3
pydyf>0.8.0,<0.10.0 # until weasyprint adjusts for 0.10.0 and later
pyflakes>=2.4.0 pyflakes>=2.4.0
pyopenssl>=22.0.0 # Used by urllib3.contrib, which is used by PyQuery but not marked as a dependency pyopenssl>=22.0.0 # Used by urllib3.contrib, which is used by PyQuery but not marked as a dependency
pyquery>=1.4.3 pyquery>=1.4.3