feat: celery tasks to replace ietf/bin scripts (#6971)

* refactor: Change import style for clarity

* feat: Add iana_changes_updates_task()

* chore: Squelch lint warning

My linter does not like variables defined outside
of __init__()

* feat: Add PeriodicTask for iana_changes_updates_task

* refactor: tasks instead of scripts on sync.views.notify()

* test: Test iana_changes_updates_task

* refactor: rename task for consistency

* feat: Add iana_protocols_update_task

* feat: Add PeriodicTask for iana protocols sync

* refactor: Use protocol sync task instead of script in view

* refactor: itertools.batched() not available until py312

* test: test iana_protocols_update_task

* feat: Add idindex_update_task()

Calls idindex generation functions and does the file
update dance to put them in place.

* chore: Add comments to bin/hourly

* fix: annotate types and fix bug

* feat: Create PeriodicTask for idindex_update_task

* refactor: Move helpers into a class

More testable this way

* refactor: Make TempFileManager a context mgr

* test: Test idindex_update_task

* test: Test TempFileManager

* fix: Fix bug in TestFileManager

yay testing

* feat: Add expire_ids_task()

* feat: Create PeriodicTask for expire_ids_task

* test: Test expire_ids_task

* test: Test request timeout in iana_protocols_update_task

* refactor: do not re-raise timeout exception

Not sure this is the right thing to do, but it's the
same as rfc_editor_index_update_task

* feat: Add notify_expirations_task

* feat: Add "weekly" celery beat crontab

* refactor: Reorder crontab fields

This matches the crontab file field order

* feat: Add PeriodicTask for notify_expirations

* test: Test notify_expirations_task

* test: Add annotation to satisfy mypy
This commit is contained in:
Jennifer Richards 2024-01-31 17:24:20 -04:00 committed by GitHub
parent 118b00d729
commit b4cf04a09d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 526 additions and 23 deletions

View file

@ -45,6 +45,7 @@ ID=/a/ietfdata/doc/draft/repository
DERIVED=/a/ietfdata/derived
DOWNLOAD=/a/www/www6s/download
## Start of script refactored into idindex_update_task() ===
export TMPDIR=/a/tmp
TMPFILE1=`mktemp` || exit 1
@ -85,6 +86,8 @@ mv $TMPFILE9 $DERIVED/1id-index.txt
mv $TMPFILEA $DERIVED/1id-abstracts.txt
mv $TMPFILEB $DERIVED/all_id2.txt
## End of script refactored into idindex_update_task() ===
$DTDIR/ietf/manage.py generate_idnits2_rfc_status
$DTDIR/ietf/manage.py generate_idnits2_rfcs_obsoleted

56
ietf/doc/tasks.py Normal file
View file

@ -0,0 +1,56 @@
# Copyright The IETF Trust 2024, All Rights Reserved
#
# Celery task definitions
#
import datetime
import debug # pyflakes:ignore
from celery import shared_task
from ietf.utils import log
from ietf.utils.timezone import datetime_today
from .expire import (
in_draft_expire_freeze,
get_expired_drafts,
expirable_drafts,
send_expire_notice_for_draft,
expire_draft,
clean_up_draft_files,
get_soon_to_expire_drafts,
send_expire_warning_for_draft,
)
from .models import Document
@shared_task
def expire_ids_task():
try:
if not in_draft_expire_freeze():
log.log("Expiring drafts ...")
for doc in get_expired_drafts():
# verify expirability -- it might have changed after get_expired_drafts() was run
# (this whole loop took about 2 minutes on 04 Jan 2018)
# N.B., re-running expirable_drafts() repeatedly is fairly expensive. Where possible,
# it's much faster to run it once on a superset query of the objects you are going
# to test and keep its results. That's not desirable here because it would defeat
# the purpose of double-checking that a document is still expirable when it is actually
# being marked as expired.
if expirable_drafts(
Document.objects.filter(pk=doc.pk)
).exists() and doc.expires < datetime_today() + datetime.timedelta(1):
send_expire_notice_for_draft(doc)
expire_draft(doc)
log.log(f" Expired draft {doc.name}-{doc.rev}")
log.log("Cleaning up draft files")
clean_up_draft_files()
except Exception as e:
log.log("Exception in expire-ids: %s" % e)
raise
@shared_task
def notify_expirations_task(notify_days=14):
for doc in get_soon_to_expire_drafts(notify_days):
send_expire_warning_for_draft(doc)

63
ietf/doc/tests_tasks.py Normal file
View file

@ -0,0 +1,63 @@
# Copyright The IETF Trust 2024, All Rights Reserved
import mock
from ietf.utils.test_utils import TestCase
from ietf.utils.timezone import datetime_today
from .factories import DocumentFactory
from .models import Document
from .tasks import expire_ids_task, notify_expirations_task
class TaskTests(TestCase):
@mock.patch("ietf.doc.tasks.in_draft_expire_freeze")
@mock.patch("ietf.doc.tasks.get_expired_drafts")
@mock.patch("ietf.doc.tasks.expirable_drafts")
@mock.patch("ietf.doc.tasks.send_expire_notice_for_draft")
@mock.patch("ietf.doc.tasks.expire_draft")
@mock.patch("ietf.doc.tasks.clean_up_draft_files")
def test_expire_ids_task(
self,
clean_up_draft_files_mock,
expire_draft_mock,
send_expire_notice_for_draft_mock,
expirable_drafts_mock,
get_expired_drafts_mock,
in_draft_expire_freeze_mock,
):
# set up mocks
in_draft_expire_freeze_mock.return_value = False
doc, other_doc = DocumentFactory.create_batch(2)
doc.expires = datetime_today()
get_expired_drafts_mock.return_value = [doc, other_doc]
expirable_drafts_mock.side_effect = [
Document.objects.filter(pk=doc.pk),
Document.objects.filter(pk=other_doc.pk),
]
# call task
expire_ids_task()
# check results
self.assertTrue(in_draft_expire_freeze_mock.called)
self.assertEqual(expirable_drafts_mock.call_count, 2)
self.assertEqual(send_expire_notice_for_draft_mock.call_count, 1)
self.assertEqual(send_expire_notice_for_draft_mock.call_args[0], (doc,))
self.assertEqual(expire_draft_mock.call_count, 1)
self.assertEqual(expire_draft_mock.call_args[0], (doc,))
self.assertTrue(clean_up_draft_files_mock.called)
# test that an exception is raised
in_draft_expire_freeze_mock.side_effect = RuntimeError
with self.assertRaises(RuntimeError):(
expire_ids_task())
@mock.patch("ietf.doc.tasks.send_expire_warning_for_draft")
@mock.patch("ietf.doc.tasks.get_soon_to_expire_drafts")
def test_notify_expirations_task(self, get_drafts_mock, send_warning_mock):
# Set up mocks
get_drafts_mock.return_value = ["sentinel"]
notify_expirations_task()
self.assertEqual(send_warning_mock.call_count, 1)
self.assertEqual(send_warning_mock.call_args[0], ("sentinel",))

85
ietf/idindex/tasks.py Normal file
View file

@ -0,0 +1,85 @@
# Copyright The IETF Trust 2024, All Rights Reserved
#
# Celery task definitions
#
import shutil
import debug # pyflakes:ignore
from celery import shared_task
from contextlib import AbstractContextManager
from pathlib import Path
from tempfile import NamedTemporaryFile
from .index import all_id_txt, all_id2_txt, id_index_txt
class TempFileManager(AbstractContextManager):
def __init__(self, tmpdir=None) -> None:
self.cleanup_list: set[Path] = set()
self.dir = tmpdir
def make_temp_file(self, content):
with NamedTemporaryFile(mode="wt", delete=False, dir=self.dir) as tf:
tf_path = Path(tf.name)
self.cleanup_list.add(tf_path)
tf.write(content)
return tf_path
def move_into_place(self, src_path: Path, dest_path: Path):
shutil.move(src_path, dest_path)
dest_path.chmod(0o644)
self.cleanup_list.remove(src_path)
def cleanup(self):
for tf_path in self.cleanup_list:
tf_path.unlink(missing_ok=True)
def __exit__(self, exc_type, exc_val, exc_tb):
self.cleanup()
return False # False: do not suppress the exception
@shared_task
def idindex_update_task():
"""Update I-D indexes"""
id_path = Path("/a/ietfdata/doc/draft/repository")
derived_path = Path("/a/ietfdata/derived")
download_path = Path("/a/www/www6s/download")
with TempFileManager("/a/tmp") as tmp_mgr:
# Generate copies of new contents
all_id_content = all_id_txt()
all_id_tmpfile = tmp_mgr.make_temp_file(all_id_content)
derived_all_id_tmpfile = tmp_mgr.make_temp_file(all_id_content)
download_all_id_tmpfile = tmp_mgr.make_temp_file(all_id_content)
id_index_content = id_index_txt()
id_index_tmpfile = tmp_mgr.make_temp_file(id_index_content)
derived_id_index_tmpfile = tmp_mgr.make_temp_file(id_index_content)
download_id_index_tmpfile = tmp_mgr.make_temp_file(id_index_content)
id_abstracts_content = id_index_txt(with_abstracts=True)
id_abstracts_tmpfile = tmp_mgr.make_temp_file(id_abstracts_content)
derived_id_abstracts_tmpfile = tmp_mgr.make_temp_file(id_abstracts_content)
download_id_abstracts_tmpfile = tmp_mgr.make_temp_file(id_abstracts_content)
all_id2_content = all_id2_txt()
all_id2_tmpfile = tmp_mgr.make_temp_file(all_id2_content)
derived_all_id2_tmpfile = tmp_mgr.make_temp_file(all_id2_content)
# Move temp files as-atomically-as-possible into place
tmp_mgr.move_into_place(all_id_tmpfile, id_path / "all_id.txt")
tmp_mgr.move_into_place(derived_all_id_tmpfile, derived_path / "all_id.txt")
tmp_mgr.move_into_place(download_all_id_tmpfile, download_path / "id-all.txt")
tmp_mgr.move_into_place(id_index_tmpfile, id_path / "1id-index.txt")
tmp_mgr.move_into_place(derived_id_index_tmpfile, derived_path / "1id-index.txt")
tmp_mgr.move_into_place(download_id_index_tmpfile, download_path / "id-index.txt")
tmp_mgr.move_into_place(id_abstracts_tmpfile, id_path / "1id-abstracts.txt")
tmp_mgr.move_into_place(derived_id_abstracts_tmpfile, derived_path / "1id-abstracts.txt")
tmp_mgr.move_into_place(download_id_abstracts_tmpfile, download_path / "id-abstract.txt")
tmp_mgr.move_into_place(all_id2_tmpfile, id_path / "all_id2.txt")
tmp_mgr.move_into_place(derived_all_id2_tmpfile, derived_path / "all_id2.txt")

View file

@ -3,8 +3,10 @@
import datetime
import mock
from pathlib import Path
from tempfile import TemporaryDirectory
from django.conf import settings
from django.utils import timezone
@ -16,6 +18,7 @@ from ietf.doc.models import Document, RelatedDocument, State, LastCallDocEvent,
from ietf.group.factories import GroupFactory
from ietf.name.models import DocRelationshipName
from ietf.idindex.index import all_id_txt, all_id2_txt, id_index_txt
from ietf.idindex.tasks import idindex_update_task, TempFileManager
from ietf.person.factories import PersonFactory, EmailFactory
from ietf.utils.test_utils import TestCase
@ -151,3 +154,51 @@ class IndexTests(TestCase):
txt = id_index_txt(with_abstracts=True)
self.assertTrue(draft.abstract[:20] in txt)
class TaskTests(TestCase):
@mock.patch("ietf.idindex.tasks.all_id_txt")
@mock.patch("ietf.idindex.tasks.all_id2_txt")
@mock.patch("ietf.idindex.tasks.id_index_txt")
@mock.patch.object(TempFileManager, "__enter__")
def test_idindex_update_task(
self,
temp_file_mgr_enter_mock,
id_index_mock,
all_id2_mock,
all_id_mock,
):
# Replace TempFileManager's __enter__() method with one that returns a mock.
# Pass a spec to the mock so we validate that only actual methods are called.
mgr_mock = mock.Mock(spec=TempFileManager)
temp_file_mgr_enter_mock.return_value = mgr_mock
idindex_update_task()
self.assertEqual(all_id_mock.call_count, 1)
self.assertEqual(all_id2_mock.call_count, 1)
self.assertEqual(id_index_mock.call_count, 2)
self.assertEqual(id_index_mock.call_args_list[0], (tuple(), dict()))
self.assertEqual(
id_index_mock.call_args_list[1],
(tuple(), {"with_abstracts": True}),
)
self.assertEqual(mgr_mock.make_temp_file.call_count, 11)
self.assertEqual(mgr_mock.move_into_place.call_count, 11)
def test_temp_file_manager(self):
with TemporaryDirectory() as temp_dir:
temp_path = Path(temp_dir)
with TempFileManager(temp_path) as tfm:
path1 = tfm.make_temp_file("yay")
path2 = tfm.make_temp_file("boo") # do not keep this one
self.assertTrue(path1.exists())
self.assertTrue(path2.exists())
dest = temp_path / "yay.txt"
tfm.move_into_place(path1, dest)
# make sure things were cleaned up...
self.assertFalse(path1.exists()) # moved to dest
self.assertFalse(path2.exists()) # left behind
# check destination contents and permissions
self.assertEqual(dest.read_text(), "yay")
self.assertEqual(dest.stat().st_mode & 0o777, 0o644)

View file

@ -5,11 +5,14 @@
import datetime
import io
import requests
from celery import shared_task
from django.conf import settings
from django.utils import timezone
from ietf.sync.rfceditor import MIN_ERRATA_RESULTS, MIN_INDEX_RESULTS, parse_index, update_docs_from_rfc_index
from ietf.sync import iana
from ietf.sync import rfceditor
from ietf.utils import log
from ietf.utils.timezone import date_today
@ -44,7 +47,7 @@ def rfc_editor_index_update_task(full_index=False):
log.log(f'GET request timed out retrieving RFC editor index: {exc}')
return # failed
rfc_index_xml = response.text
index_data = parse_index(io.StringIO(rfc_index_xml))
index_data = rfceditor.parse_index(io.StringIO(rfc_index_xml))
try:
response = requests.get(
settings.RFC_EDITOR_ERRATA_JSON_URL,
@ -54,14 +57,98 @@ def rfc_editor_index_update_task(full_index=False):
log.log(f'GET request timed out retrieving RFC editor errata: {exc}')
return # failed
errata_data = response.json()
if len(index_data) < MIN_INDEX_RESULTS:
if len(index_data) < rfceditor.MIN_INDEX_RESULTS:
log.log("Not enough index entries, only %s" % len(index_data))
return # failed
if len(errata_data) < MIN_ERRATA_RESULTS:
if len(errata_data) < rfceditor.MIN_ERRATA_RESULTS:
log.log("Not enough errata entries, only %s" % len(errata_data))
return # failed
for rfc_number, changes, doc, rfc_published in update_docs_from_rfc_index(
for rfc_number, changes, doc, rfc_published in rfceditor.update_docs_from_rfc_index(
index_data, errata_data, skip_older_than_date=skip_date
):
for c in changes:
log.log("RFC%s, %s: %s" % (rfc_number, doc.name, c))
@shared_task
def iana_changes_update_task():
# compensate to avoid we ask for something that happened now and then
# don't get it back because our request interval is slightly off
CLOCK_SKEW_COMPENSATION = 5 # seconds
# actually the interface accepts 24 hours, but then we get into
# trouble with daylights savings - meh
MAX_INTERVAL_ACCEPTED_BY_IANA = datetime.timedelta(hours=23)
start = (
timezone.now()
- datetime.timedelta(hours=23)
+ datetime.timedelta(seconds=CLOCK_SKEW_COMPENSATION,)
)
end = start + datetime.timedelta(hours=23)
t = start
while t < end:
# the IANA server doesn't allow us to fetch more than a certain
# period, so loop over the requested period and make multiple
# requests if necessary
text = iana.fetch_changes_json(
settings.IANA_SYNC_CHANGES_URL, t, min(end, t + MAX_INTERVAL_ACCEPTED_BY_IANA)
)
log.log(f"Retrieved the JSON: {text}")
changes = iana.parse_changes_json(text)
added_events, warnings = iana.update_history_with_changes(
changes, send_email=True
)
for e in added_events:
log.log(
f"Added event for {e.doc_id} {e.time}: {e.desc} (parsed json: {e.json})"
)
for w in warnings:
log.log(f"WARNING: {w}")
t += MAX_INTERVAL_ACCEPTED_BY_IANA
@shared_task
def iana_protocols_update_task():
# Earliest date for which we have data suitable to update (was described as
# "this needs to be the date where this tool is first deployed" in the original
# iana-protocols-updates script)"
rfc_must_published_later_than = datetime.datetime(
2012,
11,
26,
tzinfo=datetime.timezone.utc,
)
try:
response = requests.get(
settings.IANA_SYNC_PROTOCOLS_URL,
timeout=30,
)
except requests.Timeout as exc:
log.log(f'GET request timed out retrieving IANA protocols page: {exc}')
return
rfc_numbers = iana.parse_protocol_page(response.text)
def batched(l, n):
"""Split list l up in batches of max size n.
For Python 3.12 or later, replace this with itertools.batched()
"""
return (l[i:i + n] for i in range(0, len(l), n))
for batch in batched(rfc_numbers, 100):
updated = iana.update_rfc_log_from_protocol_page(
batch,
rfc_must_published_later_than,
)
for d in updated:
log.log("Added history entry for %s" % d.display_name())

View file

@ -19,7 +19,7 @@ from django.test.utils import override_settings
import debug # pyflakes:ignore
from ietf.doc.factories import WgDraftFactory, RfcFactory, DocumentAuthorFactory
from ietf.doc.factories import WgDraftFactory, RfcFactory, DocumentAuthorFactory, DocEventFactory
from ietf.doc.models import Document, DocEvent, DeletedEvent, DocTagName, RelatedDocument, State, StateDocEvent
from ietf.doc.utils import add_state_change_event
from ietf.group.factories import GroupFactory
@ -685,8 +685,8 @@ class TaskTests(TestCase):
RFC_EDITOR_INDEX_URL="https://rfc-editor.example.com/index/",
RFC_EDITOR_ERRATA_JSON_URL="https://rfc-editor.example.com/errata/",
)
@mock.patch("ietf.sync.tasks.update_docs_from_rfc_index")
@mock.patch("ietf.sync.tasks.parse_index")
@mock.patch("ietf.sync.tasks.rfceditor.update_docs_from_rfc_index")
@mock.patch("ietf.sync.tasks.rfceditor.parse_index")
@mock.patch("ietf.sync.tasks.requests.get")
def test_rfc_editor_index_update_task(
self, requests_get_mock, parse_index_mock, update_docs_mock
@ -804,3 +804,102 @@ class TaskTests(TestCase):
parse_index_mock.return_value = MockIndexData(length=rfceditor.MIN_INDEX_RESULTS)
tasks.rfc_editor_index_update_task(full_index=False)
self.assertFalse(update_docs_mock.called)
@override_settings(IANA_SYNC_CHANGES_URL="https://iana.example.com/sync/")
@mock.patch("ietf.sync.tasks.iana.update_history_with_changes")
@mock.patch("ietf.sync.tasks.iana.parse_changes_json")
@mock.patch("ietf.sync.tasks.iana.fetch_changes_json")
def test_iana_changes_update_task(
self,
fetch_changes_mock,
parse_changes_mock,
update_history_mock,
):
# set up mocks
fetch_return_val = object()
fetch_changes_mock.return_value = fetch_return_val
parse_return_val = object()
parse_changes_mock.return_value = parse_return_val
event_with_json = DocEventFactory()
event_with_json.json = "hi I'm json"
update_history_mock.return_value = [
[event_with_json], # events
["oh no!"], # warnings
]
tasks.iana_changes_update_task()
self.assertEqual(fetch_changes_mock.call_count, 1)
self.assertEqual(
fetch_changes_mock.call_args[0][0],
"https://iana.example.com/sync/",
)
self.assertTrue(parse_changes_mock.called)
self.assertEqual(
parse_changes_mock.call_args,
((fetch_return_val,), {}),
)
self.assertTrue(update_history_mock.called)
self.assertEqual(
update_history_mock.call_args,
((parse_return_val,), {"send_email": True}),
)
@override_settings(IANA_SYNC_PROTOCOLS_URL="https://iana.example.com/proto/")
@mock.patch("ietf.sync.tasks.iana.update_rfc_log_from_protocol_page")
@mock.patch("ietf.sync.tasks.iana.parse_protocol_page")
@mock.patch("ietf.sync.tasks.requests.get")
def test_iana_protocols_update_task(
self,
requests_get_mock,
parse_protocols_mock,
update_rfc_log_mock,
):
# set up mocks
requests_get_mock.return_value = mock.Mock(text="fetched response")
parse_protocols_mock.return_value = range(110) # larger than batch size of 100
update_rfc_log_mock.return_value = [
mock.Mock(display_name=mock.Mock(return_value="name"))
]
# call the task
tasks.iana_protocols_update_task()
# check that it did the right things
self.assertTrue(requests_get_mock.called)
self.assertEqual(
requests_get_mock.call_args[0],
("https://iana.example.com/proto/",),
)
self.assertTrue(parse_protocols_mock.called)
self.assertEqual(
parse_protocols_mock.call_args[0],
("fetched response",),
)
self.assertEqual(update_rfc_log_mock.call_count, 2)
self.assertEqual(
update_rfc_log_mock.call_args_list[0][0][0],
range(100), # first batch
)
self.assertEqual(
update_rfc_log_mock.call_args_list[1][0][0],
range(100, 110), # second batch
)
# make sure the calls use the same later_than date and that it's the expected one
published_later_than = set(
update_rfc_log_mock.call_args_list[n][0][1] for n in (0, 1)
)
self.assertEqual(
published_later_than,
{datetime.datetime(2012,11,26,tzinfo=datetime.timezone.utc)}
)
# try with an exception
requests_get_mock.reset_mock()
parse_protocols_mock.reset_mock()
update_rfc_log_mock.reset_mock()
requests_get_mock.side_effect = requests.Timeout
tasks.iana_protocols_update_task()
self.assertTrue(requests_get_mock.called)
self.assertFalse(parse_protocols_mock.called)
self.assertFalse(update_rfc_log_mock.called)

View file

@ -17,6 +17,7 @@ from django.views.decorators.csrf import csrf_exempt
from ietf.doc.models import DeletedEvent, StateDocEvent, DocEvent
from ietf.ietfauth.utils import role_required, has_role
from ietf.sync import tasks
from ietf.sync.discrepancies import find_discrepancies
from ietf.utils.serialize import object_as_shallow_dict
from ietf.utils.log import log
@ -91,19 +92,18 @@ def notify(request, org, notification):
log("Subprocess error %s when running '%s': %s %s" % (p.returncode, cmd, err, out))
raise subprocess.CalledProcessError(p.returncode, cmdstring, "\n".join([err, out]))
log("Running sync script from notify view POST")
if notification == "protocols":
runscript("iana-protocols-updates")
if notification == "changes":
runscript("iana-changes-updates")
if notification == "queue":
runscript("rfc-editor-queue-updates")
if notification == "index":
runscript("rfc-editor-index-updates")
log("Queuing RFC Editor index sync from notify view POST")
tasks.rfc_editor_index_update_task.delay()
elif notification == "changes":
log("Queuing IANA changes sync from notify view POST")
tasks.iana_changes_update_task.delay()
elif notification == "protocols":
log("Queuing IANA protocols sync from notify view POST")
tasks.iana_protocols_update_task.delay()
elif notification == "queue":
log("Running sync script from notify view POST")
runscript("rfc-editor-queue-updates")
return HttpResponse("OK", content_type="text/plain; charset=%s"%settings.DEFAULT_CHARSET)

View file

@ -5,32 +5,41 @@ from django_celery_beat.models import CrontabSchedule, PeriodicTask
from django.core.management.base import BaseCommand
CRONTAB_DEFS = {
# same as "@weekly" in a crontab
"weekly": {
"minute": "0",
"hour": "0",
"day_of_month": "*",
"month_of_year": "*",
"day_of_week": "0",
},
"daily": {
"minute": "5",
"hour": "0",
"day_of_week": "*",
"day_of_month": "*",
"month_of_year": "*",
"day_of_week": "*",
},
"hourly": {
"minute": "5",
"hour": "*",
"day_of_week": "*",
"day_of_month": "*",
"month_of_year": "*",
"day_of_week": "*",
},
"every_15m": {
"minute": "*/15",
"hour": "*",
"day_of_week": "*",
"day_of_month": "*",
"month_of_year": "*",
"day_of_week": "*",
},
}
class Command(BaseCommand):
"""Manage periodic tasks"""
crontabs = None
def add_arguments(self, parser):
parser.add_argument("--create-default", action="store_true")
@ -112,6 +121,56 @@ class Command(BaseCommand):
),
)
PeriodicTask.objects.get_or_create(
name="Expire I-Ds",
task="ietf.doc.tasks.expire_ids_task",
defaults=dict(
enabled=False,
crontab=self.crontabs["daily"],
description="Create expiration notices for expired I-Ds",
),
)
PeriodicTask.objects.get_or_create(
name="Sync with IANA changes",
task="ietf.sync.tasks.iana_changes_update_task",
defaults=dict(
enabled=False,
crontab=self.crontabs["hourly"],
description="Fetch change list from IANA and apply to documents",
),
)
PeriodicTask.objects.get_or_create(
name="Sync with IANA protocols page",
task="ietf.sync.tasks.iana_changes_update_task",
defaults=dict(
enabled=False,
crontab=self.crontabs["hourly"],
description="Fetch protocols page from IANA and update document event logs",
),
)
PeriodicTask.objects.get_or_create(
name="Update I-D index files",
task="ietf.idindex.tasks.idindex_update_task",
defaults=dict(
enabled=False,
crontab=self.crontabs["hourly"],
description="Update I-D index files",
),
)
PeriodicTask.objects.get_or_create(
name="Send expiration notifications",
task="ietf.doc.tasks.notify_expirations_task",
defaults=dict(
enabled=False,
crontab=self.crontabs["weekly"],
description="Send notifications about I-Ds that will expire in the next 14 days",
)
)
def show_tasks(self):
for label, crontab in self.crontabs.items():
tasks = PeriodicTask.objects.filter(crontab=crontab).order_by(