feat: API to replace email alias generation commands (#7012)
* feat: DraftAliasGenerator class Encapsulates logic from generate_draft_aliases.py * refactor: Avoid circular imports * feat: Add draft_aliases API endpoint * feat: Add @requires_api_token decorator Stolen from feat/rpc-api * feat: Add token auth to draft_aliases endpoint * feat: draft-aliases-from-json.py script Parses output from the draft_aliases API call * chore: Remove unused cruft * refactor: Avoid shadowing "draft" name * fix: Suppress empty lists from DraftAliasGenerator * refactor: Use a GET instead of POST * feat: GroupAliasGenerator class * feat: group aliases API view * fix: Handle domains array correctly * fix: Suppress empty group aliases * refactor: Generalize aliases-from-json.py script * refactor: Same output fmt for draft and group alias apis * feat: Sort addresses for stability * fix: Add "anything" virtual alias * test: Test requires_api_token decorator * feat: Harden is_valid_token against misconfig * test: Test is_valid_token * test: Test draft_aliases view * test: Test group_aliases view * test: Test DraftAliasGenerator * fix: ise group is type "ise" in test data * test: Fix logic in testManagementCommand The test was incorrect - and fails when fixed. :-( * test: Test GroupAliasGenerator Test currently fails * fix: Suppress empty -ads alias * test: Fix group acronym copy/paste error I *think* this must be what had been intended. The code does not look like it ever dealt with GroupHistory, so I'm pretty sure it wasn't meant to have the same acronym used by two different Groups at different times. * test: Check draft .notify alias generation * test: Cover get_draft_notify_emails()
This commit is contained in:
parent
ae01f6fb92
commit
fa56223939
|
@ -2,14 +2,75 @@
|
|||
|
||||
# This is not utils.py because Tastypie implicitly consumes ietf.api.utils.
|
||||
# See ietf.api.__init__.py for details.
|
||||
from functools import wraps
|
||||
from typing import Callable, Optional, Union
|
||||
|
||||
from django.conf import settings
|
||||
from django.http import HttpResponseForbidden
|
||||
|
||||
|
||||
def is_valid_token(endpoint, token):
|
||||
# This is where we would consider integration with vault
|
||||
# Settings implementation for now.
|
||||
if hasattr(settings, "APP_API_TOKENS"):
|
||||
token_store = settings.APP_API_TOKENS
|
||||
if endpoint in token_store and token in token_store[endpoint]:
|
||||
return True
|
||||
if endpoint in token_store:
|
||||
endpoint_tokens = token_store[endpoint]
|
||||
# Be sure endpoints is a list or tuple so we don't accidentally use substring matching!
|
||||
if not isinstance(endpoint_tokens, (list, tuple)):
|
||||
endpoint_tokens = [endpoint_tokens]
|
||||
if token in endpoint_tokens:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def requires_api_token(func_or_endpoint: Optional[Union[Callable, str]] = None):
|
||||
"""Validate API token before executing the wrapped method
|
||||
|
||||
Usage:
|
||||
* Basic: endpoint defaults to the qualified name of the wrapped method. E.g., in ietf.api.views,
|
||||
|
||||
@requires_api_token
|
||||
def my_view(request):
|
||||
...
|
||||
|
||||
will require a token for "ietf.api.views.my_view"
|
||||
|
||||
* Custom endpoint: specify the endpoint explicitly
|
||||
|
||||
@requires_api_token("ietf.api.views.some_other_thing")
|
||||
def my_view(request):
|
||||
...
|
||||
|
||||
will require a token for "ietf.api.views.some_other_thing"
|
||||
"""
|
||||
|
||||
def decorate(f):
|
||||
if _endpoint is None:
|
||||
fname = getattr(f, "__qualname__", None)
|
||||
if fname is None:
|
||||
raise TypeError(
|
||||
"Cannot automatically decorate function that does not support __qualname__. "
|
||||
"Explicitly set the endpoint."
|
||||
)
|
||||
endpoint = "{}.{}".format(f.__module__, fname)
|
||||
else:
|
||||
endpoint = _endpoint
|
||||
|
||||
@wraps(f)
|
||||
def wrapped(request, *args, **kwargs):
|
||||
authtoken = request.META.get("HTTP_X_API_KEY", None)
|
||||
if authtoken is None or not is_valid_token(endpoint, authtoken):
|
||||
return HttpResponseForbidden()
|
||||
return f(request, *args, **kwargs)
|
||||
|
||||
return wrapped
|
||||
|
||||
# Magic to allow decorator to be used with or without parentheses
|
||||
if callable(func_or_endpoint):
|
||||
func = func_or_endpoint
|
||||
_endpoint = None
|
||||
return decorate(func)
|
||||
else:
|
||||
_endpoint = func_or_endpoint
|
||||
return decorate
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
import datetime
|
||||
import json
|
||||
import html
|
||||
import mock
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
@ -12,7 +13,8 @@ from pathlib import Path
|
|||
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.test import Client
|
||||
from django.http import HttpResponseForbidden
|
||||
from django.test import Client, RequestFactory
|
||||
from django.test.utils import override_settings
|
||||
from django.urls import reverse as urlreverse
|
||||
from django.utils import timezone
|
||||
|
@ -38,6 +40,8 @@ from ietf.utils.mail import outbox, get_payload_text
|
|||
from ietf.utils.models import DumpInfo
|
||||
from ietf.utils.test_utils import TestCase, login_testing_unauthorized, reload_db_objects
|
||||
|
||||
from .ietf_utils import is_valid_token, requires_api_token
|
||||
|
||||
OMITTED_APPS = (
|
||||
'ietf.secr.meetings',
|
||||
'ietf.secr.proceedings',
|
||||
|
@ -780,7 +784,74 @@ class CustomApiTests(TestCase):
|
|||
url = urlreverse('ietf.meeting.views.api_get_session_materials', kwargs={'session_id': session.pk})
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
@override_settings(APP_API_TOKENS={"ietf.api.views.email_aliases": ["valid-token"]})
|
||||
@mock.patch("ietf.api.views.DraftAliasGenerator")
|
||||
def test_draft_aliases(self, mock):
|
||||
mock.return_value = (("alias1", ("a1", "a2")), ("alias2", ("a3", "a4")))
|
||||
url = urlreverse("ietf.api.views.draft_aliases")
|
||||
r = self.client.get(url, headers={"X-Api-Key": "valid-token"})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.headers["Content-type"], "application/json")
|
||||
self.assertEqual(
|
||||
json.loads(r.content),
|
||||
{
|
||||
"aliases": [
|
||||
{"alias": "alias1", "domains": ["ietf"], "addresses": ["a1", "a2"]},
|
||||
{"alias": "alias2", "domains": ["ietf"], "addresses": ["a3", "a4"]},
|
||||
]}
|
||||
)
|
||||
# some invalid cases
|
||||
self.assertEqual(
|
||||
self.client.get(url, headers={}).status_code,
|
||||
403,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.client.get(url, headers={"X-Api-Key": "something-else"}).status_code,
|
||||
403,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.client.post(url, headers={"X-Api-Key": "something-else"}).status_code,
|
||||
403,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.client.post(url, headers={"X-Api-Key": "valid-token"}).status_code,
|
||||
405,
|
||||
)
|
||||
|
||||
@override_settings(APP_API_TOKENS={"ietf.api.views.email_aliases": ["valid-token"]})
|
||||
@mock.patch("ietf.api.views.GroupAliasGenerator")
|
||||
def test_group_aliases(self, mock):
|
||||
mock.return_value = (("alias1", ("ietf",), ("a1", "a2")), ("alias2", ("ietf", "iab"), ("a3", "a4")))
|
||||
url = urlreverse("ietf.api.views.group_aliases")
|
||||
r = self.client.get(url, headers={"X-Api-Key": "valid-token"})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.headers["Content-type"], "application/json")
|
||||
self.assertEqual(
|
||||
json.loads(r.content),
|
||||
{
|
||||
"aliases": [
|
||||
{"alias": "alias1", "domains": ["ietf"], "addresses": ["a1", "a2"]},
|
||||
{"alias": "alias2", "domains": ["ietf", "iab"], "addresses": ["a3", "a4"]},
|
||||
]}
|
||||
)
|
||||
# some invalid cases
|
||||
self.assertEqual(
|
||||
self.client.get(url, headers={}).status_code,
|
||||
403,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.client.get(url, headers={"X-Api-Key": "something-else"}).status_code,
|
||||
403,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.client.post(url, headers={"X-Api-Key": "something-else"}).status_code,
|
||||
403,
|
||||
)
|
||||
self.assertEqual(
|
||||
self.client.post(url, headers={"X-Api-Key": "valid-token"}).status_code,
|
||||
405,
|
||||
)
|
||||
|
||||
|
||||
class DirectAuthApiTests(TestCase):
|
||||
|
@ -1133,3 +1204,85 @@ class RfcdiffSupportTests(TestCase):
|
|||
url = urlreverse(self.target_view, kwargs={'name': name})
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 404)
|
||||
|
||||
|
||||
class TokenTests(TestCase):
|
||||
@override_settings(APP_API_TOKENS={"known.endpoint": ["token in a list"], "oops": "token as a str"})
|
||||
def test_is_valid_token(self):
|
||||
# various invalid cases
|
||||
self.assertFalse(is_valid_token("unknown.endpoint", "token in a list"))
|
||||
self.assertFalse(is_valid_token("known.endpoint", "token"))
|
||||
self.assertFalse(is_valid_token("known.endpoint", "token as a str"))
|
||||
self.assertFalse(is_valid_token("oops", "token"))
|
||||
self.assertFalse(is_valid_token("oops", "token in a list"))
|
||||
# the only valid cases
|
||||
self.assertTrue(is_valid_token("known.endpoint", "token in a list"))
|
||||
self.assertTrue(is_valid_token("oops", "token as a str"))
|
||||
|
||||
@mock.patch("ietf.api.ietf_utils.is_valid_token")
|
||||
def test_requires_api_token(self, mock_is_valid_token):
|
||||
called = False
|
||||
|
||||
@requires_api_token
|
||||
def fn_to_wrap(request, *args, **kwargs):
|
||||
nonlocal called
|
||||
called = True
|
||||
return request, args, kwargs
|
||||
|
||||
req_factory = RequestFactory()
|
||||
arg = object()
|
||||
kwarg = object()
|
||||
|
||||
# No X-Api-Key header
|
||||
mock_is_valid_token.return_value = False
|
||||
val = fn_to_wrap(
|
||||
req_factory.get("/some/url", headers={}),
|
||||
arg,
|
||||
kwarg=kwarg,
|
||||
)
|
||||
self.assertTrue(isinstance(val, HttpResponseForbidden))
|
||||
self.assertFalse(mock_is_valid_token.called)
|
||||
self.assertFalse(called)
|
||||
|
||||
# Bad X-Api-Key header (not resetting the mock, it was not used yet)
|
||||
val = fn_to_wrap(
|
||||
req_factory.get("/some/url", headers={"X-Api-Key": "some-value"}),
|
||||
arg,
|
||||
kwarg=kwarg,
|
||||
)
|
||||
self.assertTrue(isinstance(val, HttpResponseForbidden))
|
||||
self.assertTrue(mock_is_valid_token.called)
|
||||
self.assertEqual(
|
||||
mock_is_valid_token.call_args[0],
|
||||
(fn_to_wrap.__module__ + "." + fn_to_wrap.__qualname__, "some-value"),
|
||||
)
|
||||
self.assertFalse(called)
|
||||
|
||||
# Valid header
|
||||
mock_is_valid_token.reset_mock()
|
||||
mock_is_valid_token.return_value = True
|
||||
request = req_factory.get("/some/url", headers={"X-Api-Key": "some-value"})
|
||||
# Bad X-Api-Key header (not resetting the mock, it was not used yet)
|
||||
val = fn_to_wrap(
|
||||
request,
|
||||
arg,
|
||||
kwarg=kwarg,
|
||||
)
|
||||
self.assertEqual(val, (request, (arg,), {"kwarg": kwarg}))
|
||||
self.assertTrue(mock_is_valid_token.called)
|
||||
self.assertEqual(
|
||||
mock_is_valid_token.call_args[0],
|
||||
(fn_to_wrap.__module__ + "." + fn_to_wrap.__qualname__, "some-value"),
|
||||
)
|
||||
self.assertTrue(called)
|
||||
|
||||
# Test the endpoint setting
|
||||
@requires_api_token("endpoint")
|
||||
def another_fn_to_wrap(request):
|
||||
return "yep"
|
||||
|
||||
val = another_fn_to_wrap(request)
|
||||
self.assertEqual(
|
||||
mock_is_valid_token.call_args[0],
|
||||
("endpoint", "some-value"),
|
||||
)
|
||||
|
|
|
@ -22,8 +22,12 @@ urlpatterns = [
|
|||
url(r'^v2/person/person', api_views.ApiV2PersonExportView.as_view()),
|
||||
#
|
||||
# --- Custom API endpoints, sorted alphabetically ---
|
||||
# Email alias information for drafts
|
||||
url(r'^doc/draft-aliases/$', api_views.draft_aliases),
|
||||
# GPRD: export of personal information for the logged-in person
|
||||
url(r'^export/personal-information/$', api_views.PersonalInformationExportView.as_view()),
|
||||
# Email alias information for groups
|
||||
url(r'^group/group-aliases/$', api_views.group_aliases),
|
||||
# Let IESG members set positions programmatically
|
||||
url(r'^iesg/position', views_ballot.api_set_position),
|
||||
# Let Meetecho set session video URLs
|
||||
|
|
|
@ -2,42 +2,39 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import json
|
||||
import pytz
|
||||
import re
|
||||
|
||||
from jwcrypto.jwk import JWK
|
||||
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import authenticate
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import validate_email
|
||||
from django.http import HttpResponse, Http404
|
||||
from django.http import HttpResponse, Http404, JsonResponse
|
||||
from django.shortcuts import render, get_object_or_404
|
||||
from django.urls import reverse
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.views.decorators.gzip import gzip_page
|
||||
from django.views.generic.detail import DetailView
|
||||
|
||||
from jwcrypto.jwk import JWK
|
||||
from tastypie.exceptions import BadRequest
|
||||
from tastypie.utils.mime import determine_format, build_content_type
|
||||
from tastypie.utils import is_valid_jsonp_callback_value
|
||||
from tastypie.serializers import Serializer
|
||||
|
||||
import debug # pyflakes:ignore
|
||||
from tastypie.utils import is_valid_jsonp_callback_value
|
||||
from tastypie.utils.mime import determine_format, build_content_type
|
||||
|
||||
import ietf
|
||||
from ietf.person.models import Person, Email
|
||||
from ietf.api import _api_list
|
||||
from ietf.api.ietf_utils import is_valid_token, requires_api_token
|
||||
from ietf.api.serializer import JsonExportMixin
|
||||
from ietf.api.ietf_utils import is_valid_token
|
||||
from ietf.doc.utils import fuzzy_find_documents
|
||||
from ietf.ietfauth.views import send_account_creation_email
|
||||
from ietf.doc.utils import DraftAliasGenerator, fuzzy_find_documents
|
||||
from ietf.group.utils import GroupAliasGenerator
|
||||
from ietf.ietfauth.utils import role_required
|
||||
from ietf.ietfauth.views import send_account_creation_email
|
||||
from ietf.meeting.models import Meeting
|
||||
from ietf.nomcom.models import Volunteer, NomCom
|
||||
from ietf.person.models import Person, Email
|
||||
from ietf.stats.models import MeetingRegistration
|
||||
from ietf.utils import log
|
||||
from ietf.utils.decorators import require_api_key
|
||||
|
@ -453,3 +450,41 @@ def directauth(request):
|
|||
|
||||
else:
|
||||
return HttpResponse(status=405)
|
||||
|
||||
|
||||
@requires_api_token("ietf.api.views.email_aliases")
|
||||
@csrf_exempt
|
||||
def draft_aliases(request):
|
||||
if request.method == "GET":
|
||||
return JsonResponse(
|
||||
{
|
||||
"aliases": [
|
||||
{
|
||||
"alias": alias,
|
||||
"domains": ["ietf"],
|
||||
"addresses": address_list,
|
||||
}
|
||||
for alias, address_list in DraftAliasGenerator()
|
||||
]
|
||||
}
|
||||
)
|
||||
return HttpResponse(status=405)
|
||||
|
||||
|
||||
@requires_api_token("ietf.api.views.email_aliases")
|
||||
@csrf_exempt
|
||||
def group_aliases(request):
|
||||
if request.method == "GET":
|
||||
return JsonResponse(
|
||||
{
|
||||
"aliases": [
|
||||
{
|
||||
"alias": alias,
|
||||
"domains": domains,
|
||||
"addresses": address_list,
|
||||
}
|
||||
for alias, domains, address_list in GroupAliasGenerator()
|
||||
]
|
||||
}
|
||||
)
|
||||
return HttpResponse(status=405)
|
||||
|
|
99
ietf/bin/aliases-from-json.py
Normal file
99
ietf/bin/aliases-from-json.py
Normal file
|
@ -0,0 +1,99 @@
|
|||
# Copyright The IETF Trust 2024, All Rights Reserved
|
||||
#
|
||||
# Uses only Python standard lib
|
||||
#
|
||||
|
||||
import argparse
|
||||
import datetime
|
||||
import json
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
# Default options
|
||||
POSTCONFIRM_PATH = "/a/postconfirm/wrapper"
|
||||
VDOMAIN = "virtual.ietf.org"
|
||||
|
||||
# Map from domain label to dns domain
|
||||
ADOMAINS = {
|
||||
"ietf": "ietf.org",
|
||||
"irtf": "irtf.org",
|
||||
"iab": "iab.org",
|
||||
}
|
||||
|
||||
|
||||
def generate_files(records, adest, vdest, postconfirm, vdomain):
|
||||
"""Generate files from an iterable of records
|
||||
|
||||
If adest or vdest exists as a file, it will be overwritten. If it is a directory, files
|
||||
with the default names (draft-aliases and draft-virtual) will be created, but existing
|
||||
files _will not_ be overwritten!
|
||||
"""
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
tmppath = Path(tmpdir)
|
||||
apath = tmppath / "aliases"
|
||||
vpath = tmppath / "virtual"
|
||||
|
||||
with apath.open("w") as afile, vpath.open("w") as vfile:
|
||||
date = datetime.datetime.now(datetime.timezone.utc)
|
||||
signature = f"# Generated by {Path(__file__).absolute()} at {date}\n"
|
||||
afile.write(signature)
|
||||
vfile.write(signature)
|
||||
vfile.write(f"{vdomain} anything\n")
|
||||
|
||||
for item in records:
|
||||
alias = item["alias"]
|
||||
domains = item["domains"]
|
||||
address_list = item["addresses"]
|
||||
filtername = f"xfilter-{alias}"
|
||||
afile.write(f'{filtername + ":":64s} "|{postconfirm} filter expand-{alias} {vdomain}"\n')
|
||||
for dom in domains:
|
||||
vfile.write(f"{f'{alias}@{ADOMAINS[dom]}':64s} {filtername}\n")
|
||||
vfile.write(f"{f'expand-{alias}@{vdomain}':64s} {', '.join(sorted(address_list))}\n")
|
||||
|
||||
perms = stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH
|
||||
apath.chmod(perms)
|
||||
vpath.chmod(perms)
|
||||
shutil.move(apath, adest)
|
||||
shutil.move(vpath, vdest)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Convert a JSON stream of draft alias definitions into alias / virtual alias files."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--prefix",
|
||||
required=True,
|
||||
help="Prefix for output files. Files will be named <prefix>-aliases and <prefix>-virtual."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output-dir",
|
||||
default="./",
|
||||
type=Path,
|
||||
help="Destination for output files.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--postconfirm",
|
||||
default=POSTCONFIRM_PATH,
|
||||
help=f"Full path to postconfirm executable (defaults to {POSTCONFIRM_PATH}",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--vdomain",
|
||||
default=VDOMAIN,
|
||||
help=f"Virtual domain (defaults to {VDOMAIN}_",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
if not args.output_dir.is_dir():
|
||||
sys.stderr.write("Error: output-dir must be a directory")
|
||||
data = json.load(sys.stdin)
|
||||
generate_files(
|
||||
data["aliases"],
|
||||
adest=args.output_dir / f"{args.prefix}-aliases",
|
||||
vdest=args.output_dir / f"{args.prefix}-virtual",
|
||||
postconfirm=args.postconfirm,
|
||||
vdomain=args.vdomain,
|
||||
)
|
|
@ -45,7 +45,7 @@ from ietf.doc.factories import ( DocumentFactory, DocEventFactory, CharterFactor
|
|||
StatusChangeFactory, DocExtResourceFactory, RgDraftFactory, BcpFactory)
|
||||
from ietf.doc.forms import NotifyForm
|
||||
from ietf.doc.fields import SearchableDocumentsField
|
||||
from ietf.doc.utils import create_ballot_if_not_open, uppercase_std_abbreviated_name
|
||||
from ietf.doc.utils import create_ballot_if_not_open, uppercase_std_abbreviated_name, DraftAliasGenerator
|
||||
from ietf.group.models import Group, Role
|
||||
from ietf.group.factories import GroupFactory, RoleFactory
|
||||
from ietf.ipr.factories import HolderIprDisclosureFactory
|
||||
|
@ -2291,6 +2291,7 @@ class GenerateDraftAliasesTests(TestCase):
|
|||
"xfilter-" + doc3.name + ".ad",
|
||||
"xfilter-" + doc3.name + ".authors",
|
||||
"xfilter-" + doc3.name + ".chairs",
|
||||
"xfilter-" + doc3.name + ".all",
|
||||
"xfilter-" + doc5.name,
|
||||
"xfilter-" + doc5.name + ".authors",
|
||||
"xfilter-" + doc5.name + ".all",
|
||||
|
@ -2307,6 +2308,148 @@ class GenerateDraftAliasesTests(TestCase):
|
|||
]:
|
||||
self.assertNotIn(x, vcontent)
|
||||
|
||||
@override_settings(TOOLS_SERVER="tools.example.org", DRAFT_ALIAS_DOMAIN="draft.example.org")
|
||||
def test_generator_class(self):
|
||||
"""The DraftAliasGenerator should generate the same lists as the old mgmt cmd"""
|
||||
a_month_ago = (timezone.now() - datetime.timedelta(30)).astimezone(RPC_TZINFO)
|
||||
a_month_ago = a_month_ago.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
ad = RoleFactory(
|
||||
name_id="ad", group__type_id="area", group__state_id="active"
|
||||
).person
|
||||
shepherd = PersonFactory()
|
||||
author1 = PersonFactory()
|
||||
author2 = PersonFactory()
|
||||
author3 = PersonFactory()
|
||||
author4 = PersonFactory()
|
||||
author5 = PersonFactory()
|
||||
author6 = PersonFactory()
|
||||
mars = GroupFactory(type_id="wg", acronym="mars")
|
||||
marschairman = PersonFactory(user__username="marschairman")
|
||||
mars.role_set.create(
|
||||
name_id="chair", person=marschairman, email=marschairman.email()
|
||||
)
|
||||
doc1 = IndividualDraftFactory(authors=[author1], shepherd=shepherd.email(), ad=ad)
|
||||
doc2 = WgDraftFactory(
|
||||
name="draft-ietf-mars-test", group__acronym="mars", authors=[author2], ad=ad
|
||||
)
|
||||
doc2.notify = f"{doc2.name}.ad@draft.example.org"
|
||||
doc2.save()
|
||||
doc3 = WgDraftFactory.create(
|
||||
name="draft-ietf-mars-finished",
|
||||
group__acronym="mars",
|
||||
authors=[author3],
|
||||
ad=ad,
|
||||
std_level_id="ps",
|
||||
states=[("draft", "rfc"), ("draft-iesg", "pub")],
|
||||
time=a_month_ago,
|
||||
)
|
||||
rfc3 = WgRfcFactory()
|
||||
DocEventFactory.create(doc=rfc3, type="published_rfc", time=a_month_ago)
|
||||
doc3.relateddocument_set.create(relationship_id="became_rfc", target=rfc3)
|
||||
doc4 = WgDraftFactory.create(
|
||||
authors=[author4, author5],
|
||||
ad=ad,
|
||||
std_level_id="ps",
|
||||
states=[("draft", "rfc"), ("draft-iesg", "pub")],
|
||||
time=datetime.datetime(2010, 10, 10, tzinfo=ZoneInfo(settings.TIME_ZONE)),
|
||||
)
|
||||
rfc4 = WgRfcFactory()
|
||||
DocEventFactory.create(
|
||||
doc=rfc4,
|
||||
type="published_rfc",
|
||||
time=datetime.datetime(2010, 10, 10, tzinfo=RPC_TZINFO),
|
||||
)
|
||||
doc4.relateddocument_set.create(relationship_id="became_rfc", target=rfc4)
|
||||
doc5 = IndividualDraftFactory(authors=[author6])
|
||||
|
||||
output = [(alias, alist) for alias, alist in DraftAliasGenerator()]
|
||||
alias_dict = dict(output)
|
||||
self.assertEqual(len(alias_dict), len(output)) # no duplicate aliases
|
||||
expected_dict = {
|
||||
doc1.name: [author1.email_address()],
|
||||
doc1.name + ".ad": [ad.email_address()],
|
||||
doc1.name + ".authors": [author1.email_address()],
|
||||
doc1.name + ".shepherd": [shepherd.email_address()],
|
||||
doc1.name
|
||||
+ ".all": [
|
||||
author1.email_address(),
|
||||
ad.email_address(),
|
||||
shepherd.email_address(),
|
||||
],
|
||||
doc2.name: [author2.email_address()],
|
||||
doc2.name + ".ad": [ad.email_address()],
|
||||
doc2.name + ".authors": [author2.email_address()],
|
||||
doc2.name + ".chairs": [marschairman.email_address()],
|
||||
doc2.name + ".notify": [ad.email_address()],
|
||||
doc2.name
|
||||
+ ".all": [
|
||||
author2.email_address(),
|
||||
ad.email_address(),
|
||||
marschairman.email_address(),
|
||||
],
|
||||
doc3.name: [author3.email_address()],
|
||||
doc3.name + ".ad": [ad.email_address()],
|
||||
doc3.name + ".authors": [author3.email_address()],
|
||||
doc3.name + ".chairs": [marschairman.email_address()],
|
||||
doc3.name
|
||||
+ ".all": [
|
||||
author3.email_address(),
|
||||
ad.email_address(),
|
||||
marschairman.email_address(),
|
||||
],
|
||||
doc5.name: [author6.email_address()],
|
||||
doc5.name + ".authors": [author6.email_address()],
|
||||
doc5.name + ".all": [author6.email_address()],
|
||||
}
|
||||
# Sort lists for comparison
|
||||
self.assertEqual(
|
||||
{k: sorted(v) for k, v in alias_dict.items()},
|
||||
{k: sorted(v) for k, v in expected_dict.items()},
|
||||
)
|
||||
|
||||
@override_settings(TOOLS_SERVER="tools.example.org", DRAFT_ALIAS_DOMAIN="draft.example.org")
|
||||
def test_get_draft_notify_emails(self):
|
||||
ad = PersonFactory()
|
||||
shepherd = PersonFactory()
|
||||
author = PersonFactory()
|
||||
doc = DocumentFactory(authors=[author], shepherd=shepherd.email(), ad=ad)
|
||||
generator = DraftAliasGenerator()
|
||||
|
||||
doc.notify = f"{doc.name}@draft.example.org"
|
||||
doc.save()
|
||||
self.assertCountEqual(generator.get_draft_notify_emails(doc), [author.email_address()])
|
||||
|
||||
doc.notify = f"{doc.name}.ad@draft.example.org"
|
||||
doc.save()
|
||||
self.assertCountEqual(generator.get_draft_notify_emails(doc), [ad.email_address()])
|
||||
|
||||
doc.notify = f"{doc.name}.shepherd@draft.example.org"
|
||||
doc.save()
|
||||
self.assertCountEqual(generator.get_draft_notify_emails(doc), [shepherd.email_address()])
|
||||
|
||||
doc.notify = f"{doc.name}.all@draft.example.org"
|
||||
doc.save()
|
||||
self.assertCountEqual(
|
||||
generator.get_draft_notify_emails(doc),
|
||||
[ad.email_address(), author.email_address(), shepherd.email_address()]
|
||||
)
|
||||
|
||||
doc.notify = f"{doc.name}.notify@draft.example.org"
|
||||
doc.save()
|
||||
self.assertCountEqual(generator.get_draft_notify_emails(doc), [])
|
||||
|
||||
doc.notify = f"{doc.name}.ad@somewhere.example.com"
|
||||
doc.save()
|
||||
self.assertCountEqual(generator.get_draft_notify_emails(doc), [f"{doc.name}.ad@somewhere.example.com"])
|
||||
|
||||
doc.notify = f"somebody@example.com, nobody@example.com, {doc.name}.ad@tools.example.org"
|
||||
doc.save()
|
||||
self.assertCountEqual(
|
||||
generator.get_draft_notify_emails(doc),
|
||||
["somebody@example.com", "nobody@example.com", ad.email_address()]
|
||||
)
|
||||
|
||||
|
||||
class EmailAliasesTests(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
|
|
@ -13,7 +13,7 @@ import textwrap
|
|||
|
||||
from collections import defaultdict, namedtuple, Counter
|
||||
from dataclasses import dataclass
|
||||
from typing import Union
|
||||
from typing import Iterator, Union
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from django.conf import settings
|
||||
|
@ -41,7 +41,7 @@ from ietf.ietfauth.utils import has_role, is_authorized_in_doc_stream, is_indivi
|
|||
from ietf.person.models import Person
|
||||
from ietf.review.models import ReviewWish
|
||||
from ietf.utils import draft, log
|
||||
from ietf.utils.mail import send_mail
|
||||
from ietf.utils.mail import parseaddr, send_mail
|
||||
from ietf.mailtrigger.utils import gather_address_lists
|
||||
from ietf.utils.timezone import date_today, datetime_from_date, datetime_today, DEADLINE_TZINFO
|
||||
from ietf.utils.xmldraft import XMLDraft
|
||||
|
@ -1258,3 +1258,125 @@ def bibxml_for_draft(doc, rev=None):
|
|||
|
||||
return render_to_string('doc/bibxml.xml', {'name':name, 'doc':doc, 'doc_bibtype':'I-D', 'settings':settings})
|
||||
|
||||
|
||||
class DraftAliasGenerator:
|
||||
days = 2 * 365
|
||||
|
||||
def get_draft_ad_emails(self, doc):
|
||||
"""Get AD email addresses for the given draft, if any."""
|
||||
from ietf.group.utils import get_group_ad_emails # avoid circular import
|
||||
ad_emails = set()
|
||||
# If working group document, return current WG ADs
|
||||
if doc.group and doc.group.acronym != "none":
|
||||
ad_emails.update(get_group_ad_emails(doc.group))
|
||||
# Document may have an explicit AD set
|
||||
if doc.ad:
|
||||
ad_emails.add(doc.ad.email_address())
|
||||
return ad_emails
|
||||
|
||||
def get_draft_chair_emails(self, doc):
|
||||
"""Get chair email addresses for the given draft, if any."""
|
||||
from ietf.group.utils import get_group_role_emails # avoid circular import
|
||||
chair_emails = set()
|
||||
if doc.group:
|
||||
chair_emails.update(get_group_role_emails(doc.group, ["chair", "secr"]))
|
||||
return chair_emails
|
||||
|
||||
def get_draft_shepherd_email(self, doc):
|
||||
"""Get shepherd email addresses for the given draft, if any."""
|
||||
shepherd_email = set()
|
||||
if doc.shepherd:
|
||||
shepherd_email.add(doc.shepherd.email_address())
|
||||
return shepherd_email
|
||||
|
||||
def get_draft_authors_emails(self, doc):
|
||||
"""Get list of authors for the given draft."""
|
||||
author_emails = set()
|
||||
for author in doc.documentauthor_set.all():
|
||||
if author.email and author.email.email_address():
|
||||
author_emails.add(author.email.email_address())
|
||||
return author_emails
|
||||
|
||||
def get_draft_notify_emails(self, doc):
|
||||
"""Get list of email addresses to notify for the given draft."""
|
||||
ad_email_alias_regex = r"^%s.ad@(%s|%s)$" % (doc.name, settings.DRAFT_ALIAS_DOMAIN, settings.TOOLS_SERVER)
|
||||
all_email_alias_regex = r"^%s.all@(%s|%s)$" % (doc.name, settings.DRAFT_ALIAS_DOMAIN, settings.TOOLS_SERVER)
|
||||
author_email_alias_regex = r"^%s@(%s|%s)$" % (doc.name, settings.DRAFT_ALIAS_DOMAIN, settings.TOOLS_SERVER)
|
||||
notify_email_alias_regex = r"^%s.notify@(%s|%s)$" % (
|
||||
doc.name, settings.DRAFT_ALIAS_DOMAIN, settings.TOOLS_SERVER)
|
||||
shepherd_email_alias_regex = r"^%s.shepherd@(%s|%s)$" % (
|
||||
doc.name, settings.DRAFT_ALIAS_DOMAIN, settings.TOOLS_SERVER)
|
||||
notify_emails = set()
|
||||
if doc.notify:
|
||||
for e in doc.notify.split(','):
|
||||
e = e.strip()
|
||||
if re.search(ad_email_alias_regex, e):
|
||||
notify_emails.update(self.get_draft_ad_emails(doc))
|
||||
elif re.search(author_email_alias_regex, e):
|
||||
notify_emails.update(self.get_draft_authors_emails(doc))
|
||||
elif re.search(shepherd_email_alias_regex, e):
|
||||
notify_emails.update(self.get_draft_shepherd_email(doc))
|
||||
elif re.search(all_email_alias_regex, e):
|
||||
notify_emails.update(self.get_draft_ad_emails(doc))
|
||||
notify_emails.update(self.get_draft_authors_emails(doc))
|
||||
notify_emails.update(self.get_draft_shepherd_email(doc))
|
||||
elif re.search(notify_email_alias_regex, e):
|
||||
pass
|
||||
else:
|
||||
(name, email) = parseaddr(e)
|
||||
notify_emails.add(email)
|
||||
return notify_emails
|
||||
|
||||
def __iter__(self) -> Iterator[tuple[str, list[str]]]:
|
||||
# Internet-Drafts with active status or expired within self.days
|
||||
show_since = timezone.now() - datetime.timedelta(days=self.days)
|
||||
drafts = Document.objects.filter(type_id="draft")
|
||||
active_drafts = drafts.filter(states__slug='active')
|
||||
inactive_recent_drafts = drafts.exclude(states__slug='active').filter(expires__gte=show_since)
|
||||
interesting_drafts = active_drafts | inactive_recent_drafts
|
||||
|
||||
for this_draft in interesting_drafts.distinct().iterator():
|
||||
# Omit drafts that became RFCs, unless they were published in the last DEFAULT_YEARS
|
||||
if this_draft.get_state_slug() == "rfc":
|
||||
rfc = this_draft.became_rfc()
|
||||
log.assertion("rfc is not None")
|
||||
if rfc.latest_event(type='published_rfc').time < show_since:
|
||||
continue
|
||||
|
||||
alias = this_draft.name
|
||||
all = set()
|
||||
|
||||
# no suffix and .authors are the same list
|
||||
emails = self.get_draft_authors_emails(this_draft)
|
||||
all.update(emails)
|
||||
if emails:
|
||||
yield alias, list(emails)
|
||||
yield alias + ".authors", list(emails)
|
||||
|
||||
# .chairs = group chairs
|
||||
emails = self.get_draft_chair_emails(this_draft)
|
||||
if emails:
|
||||
all.update(emails)
|
||||
yield alias + ".chairs", list(emails)
|
||||
|
||||
# .ad = sponsoring AD / WG AD (WG document)
|
||||
emails = self.get_draft_ad_emails(this_draft)
|
||||
if emails:
|
||||
all.update(emails)
|
||||
yield alias + ".ad", list(emails)
|
||||
|
||||
# .notify = notify email list from the Document
|
||||
emails = self.get_draft_notify_emails(this_draft)
|
||||
if emails:
|
||||
all.update(emails)
|
||||
yield alias + ".notify", list(emails)
|
||||
|
||||
# .shepherd = shepherd email from the Document
|
||||
emails = self.get_draft_shepherd_email(this_draft)
|
||||
if emails:
|
||||
all.update(emails)
|
||||
yield alias + ".shepherd", list(emails)
|
||||
|
||||
# .all = everything from above
|
||||
if all:
|
||||
yield alias + ".all", list(all)
|
||||
|
|
|
@ -20,7 +20,7 @@ import debug # pyflakes:ignore
|
|||
from ietf.doc.factories import DocumentFactory, WgDraftFactory, EditorialDraftFactory
|
||||
from ietf.doc.models import DocEvent, RelatedDocument, Document
|
||||
from ietf.group.models import Role, Group
|
||||
from ietf.group.utils import get_group_role_emails, get_child_group_role_emails, get_group_ad_emails
|
||||
from ietf.group.utils import get_group_role_emails, get_child_group_role_emails, get_group_ad_emails, GroupAliasGenerator
|
||||
from ietf.group.factories import GroupFactory, RoleFactory
|
||||
from ietf.person.factories import PersonFactory, EmailFactory
|
||||
from ietf.person.models import Person
|
||||
|
@ -163,7 +163,7 @@ class GenerateGroupAliasesTests(TestCase):
|
|||
recent = GroupFactory(type_id='wg', acronym='recent', parent=area, state_id='conclude', time=a_month_ago)
|
||||
recentchair = PersonFactory(user__username='recentchair')
|
||||
recent.role_set.create(name_id='chair', person=recentchair, email=recentchair.email())
|
||||
wayold = GroupFactory(type_id='wg', acronym='recent', parent=area, state_id='conclude', time=a_decade_ago)
|
||||
wayold = GroupFactory(type_id='wg', acronym='wayold', parent=area, state_id='conclude', time=a_decade_ago)
|
||||
wayoldchair = PersonFactory(user__username='wayoldchair')
|
||||
wayold.role_set.create(name_id='chair', person=wayoldchair, email=wayoldchair.email())
|
||||
role2 = RoleFactory(name_id='ad', group__type_id='area', group__acronym='done', group__state_id='conclude')
|
||||
|
@ -220,7 +220,7 @@ class GenerateGroupAliasesTests(TestCase):
|
|||
testrgchair.email_address(),
|
||||
testragchair.email_address(),
|
||||
]]))
|
||||
self.assertFalse(all([x in vcontent for x in [
|
||||
self.assertFalse(any([x in vcontent for x in [
|
||||
done_ad.email_address(),
|
||||
wayoldchair.email_address(),
|
||||
individual.email_address(),
|
||||
|
@ -248,6 +248,64 @@ class GenerateGroupAliasesTests(TestCase):
|
|||
'xfilter-' + wayold.acronym + '-chairs',
|
||||
]]))
|
||||
|
||||
def test_generator_class(self):
|
||||
"""The GroupAliasGenerator should generate the same lists as the old mgmt cmd"""
|
||||
# clean out test fixture group roles we don't need for this test
|
||||
Role.objects.filter(
|
||||
group__acronym__in=["farfut", "iab", "ietf", "irtf", "ise", "ops", "rsab", "rsoc", "sops"]
|
||||
).delete()
|
||||
|
||||
a_month_ago = timezone.now() - datetime.timedelta(30)
|
||||
a_decade_ago = timezone.now() - datetime.timedelta(3650)
|
||||
role1 = RoleFactory(name_id='ad', group__type_id='area', group__acronym='myth', group__state_id='active')
|
||||
area = role1.group
|
||||
ad = role1.person
|
||||
mars = GroupFactory(type_id='wg', acronym='mars', parent=area)
|
||||
marschair = PersonFactory(user__username='marschair')
|
||||
mars.role_set.create(name_id='chair', person=marschair, email=marschair.email())
|
||||
marssecr = PersonFactory(user__username='marssecr')
|
||||
mars.role_set.create(name_id='secr', person=marssecr, email=marssecr.email())
|
||||
ames = GroupFactory(type_id='wg', acronym='ames', parent=area)
|
||||
ameschair = PersonFactory(user__username='ameschair')
|
||||
ames.role_set.create(name_id='chair', person=ameschair, email=ameschair.email())
|
||||
recent = GroupFactory(type_id='wg', acronym='recent', parent=area, state_id='conclude', time=a_month_ago)
|
||||
recentchair = PersonFactory(user__username='recentchair')
|
||||
recent.role_set.create(name_id='chair', person=recentchair, email=recentchair.email())
|
||||
wayold = GroupFactory(type_id='wg', acronym='wayold', parent=area, state_id='conclude', time=a_decade_ago)
|
||||
wayoldchair = PersonFactory(user__username='wayoldchair')
|
||||
wayold.role_set.create(name_id='chair', person=wayoldchair, email=wayoldchair.email())
|
||||
# create a "done" group that should not be included anywhere
|
||||
RoleFactory(name_id='ad', group__type_id='area', group__acronym='done', group__state_id='conclude')
|
||||
irtf = Group.objects.get(acronym='irtf')
|
||||
testrg = GroupFactory(type_id='rg', acronym='testrg', parent=irtf)
|
||||
testrgchair = PersonFactory(user__username='testrgchair')
|
||||
testrg.role_set.create(name_id='chair', person=testrgchair, email=testrgchair.email())
|
||||
testrag = GroupFactory(type_id='rg', acronym='testrag', parent=irtf)
|
||||
testragchair = PersonFactory(user__username='testragchair')
|
||||
testrag.role_set.create(name_id='chair', person=testragchair, email=testragchair.email())
|
||||
|
||||
output = [(alias, (domains, alist)) for alias, domains, alist in GroupAliasGenerator()]
|
||||
alias_dict = dict(output)
|
||||
self.maxDiff = None
|
||||
self.assertEqual(len(alias_dict), len(output)) # no duplicate aliases
|
||||
expected_dict = {
|
||||
area.acronym + "-ads": (["ietf"], [ad.email_address()]),
|
||||
area.acronym + "-chairs": (["ietf"], [ad.email_address(), marschair.email_address(), marssecr.email_address(), ameschair.email_address()]),
|
||||
mars.acronym + "-ads": (["ietf"], [ad.email_address()]),
|
||||
mars.acronym + "-chairs": (["ietf"], [marschair.email_address(), marssecr.email_address()]),
|
||||
ames.acronym + "-ads": (["ietf"], [ad.email_address()]),
|
||||
ames.acronym + "-chairs": (["ietf"], [ameschair.email_address()]),
|
||||
recent.acronym + "-ads": (["ietf"], [ad.email_address()]),
|
||||
recent.acronym + "-chairs": (["ietf"], [recentchair.email_address()]),
|
||||
testrg.acronym + "-chairs": (["ietf", "irtf"], [testrgchair.email_address()]),
|
||||
testrag.acronym + "-chairs": (["ietf", "irtf"], [testragchair.email_address()]),
|
||||
}
|
||||
# Sort lists for comparison
|
||||
self.assertEqual(
|
||||
{k: (sorted(doms), sorted(addrs)) for k, (doms, addrs) in alias_dict.items()},
|
||||
{k: (sorted(doms), sorted(addrs)) for k, (doms, addrs) in expected_dict.items()},
|
||||
)
|
||||
|
||||
|
||||
class GroupRoleEmailTests(TestCase):
|
||||
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
# Copyright The IETF Trust 2012-2023, All Rights Reserved
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import datetime
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from django.db.models import Q
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone
|
||||
from django.utils.html import format_html
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.urls import reverse as urlreverse
|
||||
|
@ -353,3 +354,74 @@ def update_role_set(group, role_name, new_value, by):
|
|||
e.save()
|
||||
|
||||
return added, removed
|
||||
|
||||
|
||||
class GroupAliasGenerator:
|
||||
days = 5 * 365
|
||||
active_states = ["active", "bof", "proposed"]
|
||||
group_types = [
|
||||
"wg",
|
||||
"rg",
|
||||
"rag",
|
||||
"dir",
|
||||
"team",
|
||||
"review",
|
||||
"program",
|
||||
"rfcedtyp",
|
||||
"edappr",
|
||||
"edwg",
|
||||
] # This should become groupfeature driven...
|
||||
no_ad_group_types = ["rg", "rag", "team", "program", "rfcedtyp", "edappr", "edwg"]
|
||||
|
||||
def __iter__(self):
|
||||
show_since = timezone.now() - datetime.timedelta(days=self.days)
|
||||
|
||||
# Loop through each group type and build -ads and -chairs entries
|
||||
for g in self.group_types:
|
||||
domains = ["ietf"]
|
||||
if g in ("rg", "rag"):
|
||||
domains.append("irtf")
|
||||
if g == "program":
|
||||
domains.append("iab")
|
||||
|
||||
entries = Group.objects.filter(type=g).all()
|
||||
active_entries = entries.filter(state__in=self.active_states)
|
||||
inactive_recent_entries = entries.exclude(
|
||||
state__in=self.active_states
|
||||
).filter(time__gte=show_since)
|
||||
interesting_entries = active_entries | inactive_recent_entries
|
||||
|
||||
for e in interesting_entries.distinct().iterator():
|
||||
name = e.acronym
|
||||
|
||||
# Research groups, teams, and programs do not have -ads lists
|
||||
if not g in self.no_ad_group_types:
|
||||
ad_emails = get_group_ad_emails(e)
|
||||
if ad_emails:
|
||||
yield name + "-ads", domains, list(ad_emails)
|
||||
# All group types have -chairs lists
|
||||
chair_emails = get_group_role_emails(e, ["chair", "secr"])
|
||||
if chair_emails:
|
||||
yield name + "-chairs", domains, list(chair_emails)
|
||||
|
||||
# The area lists include every chair in active working groups in the area
|
||||
areas = Group.objects.filter(type="area").all()
|
||||
active_areas = areas.filter(state__in=self.active_states)
|
||||
for area in active_areas:
|
||||
name = area.acronym
|
||||
area_ad_emails = get_group_role_emails(area, ["pre-ad", "ad", "chair"])
|
||||
if area_ad_emails:
|
||||
yield name + "-ads", ["ietf"], list(area_ad_emails)
|
||||
chair_emails = get_child_group_role_emails(area, ["chair", "secr"]) | area_ad_emails
|
||||
if chair_emails:
|
||||
yield name + "-chairs", ["ietf"], list(chair_emails)
|
||||
|
||||
# Other groups with chairs that require Internet-Draft submission approval
|
||||
gtypes = GroupTypeName.objects.values_list("slug", flat=True)
|
||||
special_groups = Group.objects.filter(
|
||||
type__features__req_subm_approval=True, acronym__in=gtypes, state="active"
|
||||
)
|
||||
for group in special_groups:
|
||||
chair_emails = get_group_role_emails(group, ["chair", "delegate"])
|
||||
if chair_emails:
|
||||
yield group.acronym + "-chairs", ["ietf"], list(chair_emails)
|
||||
|
|
|
@ -84,7 +84,7 @@ def make_immutable_base_data():
|
|||
create_person(iab, "chair")
|
||||
create_person(iab, "member")
|
||||
|
||||
ise = create_group(name="Independent Submission Editor", acronym="ise", type_id="rfcedtyp")
|
||||
ise = create_group(name="Independent Submission Editor", acronym="ise", type_id="ise")
|
||||
create_person(ise, "chair")
|
||||
|
||||
rsoc = create_group(name="RFC Series Oversight Committee", acronym="rsoc", type_id="rfcedtyp")
|
||||
|
|
Loading…
Reference in a new issue