Merge branch 'refs/heads/main' into feat/k8s

# Conflicts:
#	ietf/settings.py
#	ietf/utils/__init__.py
#	ietf/utils/log.py
This commit is contained in:
Jennifer Richards 2024-05-15 15:35:32 -03:00
commit 2b816630ef
No known key found for this signature in database
GPG key ID: 9B2BF5C5ADDA6A6E
22 changed files with 291 additions and 364 deletions

View file

@ -40,9 +40,5 @@ $DTDIR/ietf/manage.py populate_yang_model_dirs -v0
# Re-run yang checks on active documents
$DTDIR/ietf/manage.py run_yang_model_checks -v0
# Expire last calls
# Enable when removed from /a/www/ietf-datatracker/scripts/Cron-runner:
$DTDIR/ietf/bin/expire-last-calls
# Purge older PersonApiKeyEvents
$DTDIR/ietf/manage.py purge_old_personal_api_key_events 14

View file

@ -24,9 +24,6 @@ ID=/a/ietfdata/doc/draft/repository
DERIVED=/a/ietfdata/derived
DOWNLOAD=/a/www/www6s/download
$DTDIR/ietf/manage.py generate_idnits2_rfc_status
$DTDIR/ietf/manage.py generate_idnits2_rfcs_obsoleted
CHARTER=/a/www/ietf-ftp/charter
wget -q https://datatracker.ietf.org/wg/1wg-charters-by-acronym.txt -O $CHARTER/1wg-charters-by-acronym.txt
wget -q https://datatracker.ietf.org/wg/1wg-charters.txt -O $CHARTER/1wg-charters.txt

View file

@ -1,34 +0,0 @@
#!/usr/bin/env python
# This script requires that the proper virtual python environment has been
# invoked before start
import os
import sys
import syslog
# boilerplate
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
sys.path = [ basedir ] + sys.path
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
if os.path.exists(virtualenv_activation):
execfile(virtualenv_activation, dict(__file__=virtualenv_activation))
syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER)
import django
django.setup()
# ----------------------------------------------------------------------
from ietf.doc.lastcall import get_expired_last_calls, expire_last_call
drafts = get_expired_last_calls()
for doc in drafts:
try:
expire_last_call(doc)
syslog.syslog("Expired last call for %s (id=%s)" % (doc.file_tag(), doc.pk))
except Exception as e:
syslog.syslog(syslog.LOG_ERR, "ERROR: Failed to expire last call for %s (id=%s)" % (doc.file_tag(), doc.pk))

View file

@ -1,110 +0,0 @@
#!/usr/bin/env python
# This script requires that the proper virtual python environment has been
# invoked before start
import datetime
import io
import os
import requests
import sys
import syslog
import traceback
# boilerplate
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
sys.path = [ basedir ] + sys.path
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
# Before invoking django
syslog.openlog(os.path.basename(__file__), syslog.LOG_PID, syslog.LOG_USER)
import django
django.setup()
from django.conf import settings
from optparse import OptionParser
from django.core.mail import mail_admins
from ietf.doc.utils import rebuild_reference_relations
from ietf.utils.log import log
from ietf.utils.pipe import pipe
from ietf.utils.timezone import date_today
import ietf.sync.rfceditor
parser = OptionParser()
parser.add_option("-d", dest="skip_date",
help="To speed up processing skip RFCs published before this date (default is one year ago)", metavar="YYYY-MM-DD")
options, args = parser.parse_args()
skip_date = date_today() - datetime.timedelta(days=365)
if options.skip_date:
skip_date = datetime.datetime.strptime(options.skip_date, "%Y-%m-%d").date()
log("Updating document metadata from RFC index going back to %s, from %s" % (skip_date, settings.RFC_EDITOR_INDEX_URL))
try:
response = requests.get(
settings.RFC_EDITOR_INDEX_URL,
timeout=30, # seconds
)
except requests.Timeout as exc:
log(f'GET request timed out retrieving RFC editor index: {exc}')
sys.exit(1)
rfc_index_xml = response.text
index_data = ietf.sync.rfceditor.parse_index(io.StringIO(rfc_index_xml))
try:
response = requests.get(
settings.RFC_EDITOR_ERRATA_JSON_URL,
timeout=30, # seconds
)
except requests.Timeout as exc:
log(f'GET request timed out retrieving RFC editor errata: {exc}')
sys.exit(1)
errata_data = response.json()
if len(index_data) < ietf.sync.rfceditor.MIN_INDEX_RESULTS:
log("Not enough index entries, only %s" % len(index_data))
sys.exit(1)
if len(errata_data) < ietf.sync.rfceditor.MIN_ERRATA_RESULTS:
log("Not enough errata entries, only %s" % len(errata_data))
sys.exit(1)
new_rfcs = []
for rfc_number, changes, doc, rfc_published in ietf.sync.rfceditor.update_docs_from_rfc_index(index_data, errata_data, skip_older_than_date=skip_date):
if rfc_published:
new_rfcs.append(doc)
for c in changes:
log("RFC%s, %s: %s" % (rfc_number, doc.name, c))
sys.exit(0)
# This can be called while processing a notifying POST from the RFC Editor
# Spawn a child to sync the rfcs and calculate new reference relationships
# so that the POST
newpid = os.fork()
if newpid == 0:
try:
pipe("%s -a %s %s" % (settings.RSYNC_BINARY,settings.RFC_TEXT_RSYNC_SOURCE,settings.RFC_PATH))
for rfc in new_rfcs:
rebuild_reference_relations(rfc)
log("Updated references for %s"%rfc.name)
except:
subject = "Exception in updating references for new rfcs: %s : %s" % (sys.exc_info()[0],sys.exc_info()[1])
msg = "%s\n%s\n----\n%s"%(sys.exc_info()[0],sys.exc_info()[1],traceback.format_tb(sys.exc_info()[2]))
mail_admins(subject,msg,fail_silently=True)
log(subject)
os._exit(0)
else:
sys.exit(0)

View file

@ -1,44 +0,0 @@
#!/usr/bin/env python
import io
import os
import requests
import sys
# boilerplate
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
sys.path = [ basedir ] + sys.path
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
import django
django.setup()
from django.conf import settings
from ietf.sync.rfceditor import parse_queue, MIN_QUEUE_RESULTS, update_drafts_from_queue
from ietf.utils.log import log
log("Updating RFC Editor queue states from %s" % settings.RFC_EDITOR_QUEUE_URL)
try:
response = requests.get(
settings.RFC_EDITOR_QUEUE_URL,
timeout=30, # seconds
)
except requests.Timeout as exc:
log(f'GET request timed out retrieving RFC editor queue: {exc}')
sys.exit(1)
drafts, warnings = parse_queue(io.StringIO(response.text))
for w in warnings:
log(u"Warning: %s" % w)
if len(drafts) < MIN_QUEUE_RESULTS:
log("Not enough results, only %s" % len(drafts))
sys.exit(1)
changed, warnings = update_drafts_from_queue(drafts)
for w in warnings:
log(u"Warning: %s" % w)
for c in changed:
log(u"Updated %s" % c)

View file

@ -1,23 +0,0 @@
# Copyright The IETF Trust 2021 All Rights Reserved
import os
from django.conf import settings
from django.core.management.base import BaseCommand
from ietf.doc.utils import generate_idnits2_rfc_status
from ietf.utils.log import log
class Command(BaseCommand):
help = ('Generate the rfc_status blob used by idnits2')
def handle(self, *args, **options):
filename=os.path.join(settings.DERIVED_DIR,'idnits2-rfc-status')
blob = generate_idnits2_rfc_status()
try:
bytes = blob.encode('utf-8')
with open(filename,'wb') as f:
f.write(bytes)
except Exception as e:
log('failed to write idnits2-rfc-status: '+str(e))
raise e

View file

@ -1,23 +0,0 @@
# Copyright The IETF Trust 2021 All Rights Reserved
import os
from django.conf import settings
from django.core.management.base import BaseCommand
from ietf.doc.utils import generate_idnits2_rfcs_obsoleted
from ietf.utils.log import log
class Command(BaseCommand):
help = ('Generate the rfcs-obsoleted file used by idnits2')
def handle(self, *args, **options):
filename=os.path.join(settings.DERIVED_DIR,'idnits2-rfcs-obsoleted')
blob = generate_idnits2_rfcs_obsoleted()
try:
bytes = blob.encode('utf-8')
with open(filename,'wb') as f:
f.write(bytes)
except Exception as e:
log('failed to write idnits2-rfcs-obsoleted: '+str(e))
raise e

View file

@ -6,6 +6,9 @@ import datetime
import debug # pyflakes:ignore
from celery import shared_task
from pathlib import Path
from django.conf import settings
from ietf.utils import log
from ietf.utils.timezone import datetime_today
@ -20,7 +23,9 @@ from .expire import (
get_soon_to_expire_drafts,
send_expire_warning_for_draft,
)
from .lastcall import get_expired_last_calls, expire_last_call
from .models import Document
from .utils import generate_idnits2_rfc_status, generate_idnits2_rfcs_obsoleted
@shared_task
@ -54,3 +59,34 @@ def expire_ids_task():
def notify_expirations_task(notify_days=14):
for doc in get_soon_to_expire_drafts(notify_days):
send_expire_warning_for_draft(doc)
@shared_task
def expire_last_calls_task():
for doc in get_expired_last_calls():
try:
expire_last_call(doc)
except Exception:
log.log(f"ERROR: Failed to expire last call for {doc.file_tag()} (id={doc.pk})")
else:
log.log(f"Expired last call for {doc.file_tag()} (id={doc.pk})")
@shared_task
def generate_idnits2_rfc_status_task():
outpath = Path(settings.DERIVED_DIR) / "idnits2-rfc-status"
blob = generate_idnits2_rfc_status()
try:
outpath.write_text(blob, encoding="utf8")
except Exception as e:
log.log(f"failed to write idnits2-rfc-status: {e}")
@shared_task
def generate_idnits2_rfcs_obsoleted_task():
outpath = Path(settings.DERIVED_DIR) / "idnits2-rfcs-obsoleted"
blob = generate_idnits2_rfcs_obsoleted()
try:
outpath.write_text(blob, encoding="utf8")
except Exception as e:
log.log(f"failed to write idnits2-rfcs-obsoleted: {e}")

View file

@ -20,7 +20,6 @@ from tempfile import NamedTemporaryFile
from collections import defaultdict
from zoneinfo import ZoneInfo
from django.core.management import call_command
from django.urls import reverse as urlreverse
from django.conf import settings
from django.forms import Form
@ -45,7 +44,14 @@ from ietf.doc.factories import ( DocumentFactory, DocEventFactory, CharterFactor
StatusChangeFactory, DocExtResourceFactory, RgDraftFactory, BcpFactory)
from ietf.doc.forms import NotifyForm
from ietf.doc.fields import SearchableDocumentsField
from ietf.doc.utils import create_ballot_if_not_open, investigate_fragment, uppercase_std_abbreviated_name, DraftAliasGenerator
from ietf.doc.utils import (
create_ballot_if_not_open,
investigate_fragment,
uppercase_std_abbreviated_name,
DraftAliasGenerator,
generate_idnits2_rfc_status,
generate_idnits2_rfcs_obsoleted,
)
from ietf.group.models import Group, Role
from ietf.group.factories import GroupFactory, RoleFactory
from ietf.ipr.factories import HolderIprDisclosureFactory
@ -2831,32 +2837,40 @@ class MaterialsTests(TestCase):
class Idnits2SupportTests(TestCase):
settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['DERIVED_DIR']
def test_obsoleted(self):
def test_generate_idnits2_rfcs_obsoleted(self):
rfc = WgRfcFactory(rfc_number=1001)
WgRfcFactory(rfc_number=1003,relations=[('obs',rfc)])
rfc = WgRfcFactory(rfc_number=1005)
WgRfcFactory(rfc_number=1007,relations=[('obs',rfc)])
blob = generate_idnits2_rfcs_obsoleted()
self.assertEqual(blob, b'1001 1003\n1005 1007\n'.decode("utf8"))
def test_obsoleted(self):
url = urlreverse('ietf.doc.views_doc.idnits2_rfcs_obsoleted')
r = self.client.get(url)
self.assertEqual(r.status_code, 404)
call_command('generate_idnits2_rfcs_obsoleted')
# value written is arbitrary, expect it to be passed through
(Path(settings.DERIVED_DIR) / "idnits2-rfcs-obsoleted").write_bytes(b'1001 1003\n1005 1007\n')
url = urlreverse('ietf.doc.views_doc.idnits2_rfcs_obsoleted')
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertEqual(r.content, b'1001 1003\n1005 1007\n')
def test_rfc_status(self):
def test_generate_idnits2_rfc_status(self):
for slug in ('bcp', 'ds', 'exp', 'hist', 'inf', 'std', 'ps', 'unkn'):
WgRfcFactory(std_level_id=slug)
blob = generate_idnits2_rfc_status().replace("\n", "")
self.assertEqual(blob[6312-1], "O")
def test_rfc_status(self):
url = urlreverse('ietf.doc.views_doc.idnits2_rfc_status')
r = self.client.get(url)
self.assertEqual(r.status_code,404)
call_command('generate_idnits2_rfc_status')
# value written is arbitrary, expect it to be passed through
(Path(settings.DERIVED_DIR) / "idnits2-rfc-status").write_bytes(b'1001 1003\n1005 1007\n')
r = self.client.get(url)
self.assertEqual(r.status_code,200)
blob = unicontent(r).replace('\n','')
self.assertEqual(blob[6312-1],'O')
self.assertEqual(r.content, b'1001 1003\n1005 1007\n')
def test_idnits2_state(self):
rfc = WgRfcFactory()

View file

@ -1,15 +1,25 @@
# Copyright The IETF Trust 2024, All Rights Reserved
import mock
from pathlib import Path
from django.conf import settings
from ietf.utils.test_utils import TestCase
from ietf.utils.timezone import datetime_today
from .factories import DocumentFactory
from .models import Document
from .tasks import expire_ids_task, notify_expirations_task
from .tasks import (
expire_ids_task,
expire_last_calls_task,
generate_idnits2_rfcs_obsoleted_task,
generate_idnits2_rfc_status_task,
notify_expirations_task,
)
class TaskTests(TestCase):
settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ["DERIVED_DIR"]
@mock.patch("ietf.doc.tasks.in_draft_expire_freeze")
@mock.patch("ietf.doc.tasks.get_expired_drafts")
@ -50,7 +60,7 @@ class TaskTests(TestCase):
# test that an exception is raised
in_draft_expire_freeze_mock.side_effect = RuntimeError
with self.assertRaises(RuntimeError):(
with self.assertRaises(RuntimeError): (
expire_ids_task())
@mock.patch("ietf.doc.tasks.send_expire_warning_for_draft")
@ -61,3 +71,46 @@ class TaskTests(TestCase):
notify_expirations_task()
self.assertEqual(send_warning_mock.call_count, 1)
self.assertEqual(send_warning_mock.call_args[0], ("sentinel",))
@mock.patch("ietf.doc.tasks.expire_last_call")
@mock.patch("ietf.doc.tasks.get_expired_last_calls")
def test_expire_last_calls_task(self, mock_get_expired, mock_expire):
docs = DocumentFactory.create_batch(3)
mock_get_expired.return_value = docs
expire_last_calls_task()
self.assertTrue(mock_get_expired.called)
self.assertEqual(mock_expire.call_count, 3)
self.assertEqual(mock_expire.call_args_list[0], mock.call(docs[0]))
self.assertEqual(mock_expire.call_args_list[1], mock.call(docs[1]))
self.assertEqual(mock_expire.call_args_list[2], mock.call(docs[2]))
# Check that it runs even if exceptions occur
mock_get_expired.reset_mock()
mock_expire.reset_mock()
mock_expire.side_effect = ValueError
expire_last_calls_task()
self.assertTrue(mock_get_expired.called)
self.assertEqual(mock_expire.call_count, 3)
self.assertEqual(mock_expire.call_args_list[0], mock.call(docs[0]))
self.assertEqual(mock_expire.call_args_list[1], mock.call(docs[1]))
self.assertEqual(mock_expire.call_args_list[2], mock.call(docs[2]))
@mock.patch("ietf.doc.tasks.generate_idnits2_rfc_status")
def test_generate_idnits2_rfc_status_task(self, mock_generate):
mock_generate.return_value = "dåtå"
generate_idnits2_rfc_status_task()
self.assertEqual(mock_generate.call_count, 1)
self.assertEqual(
"dåtå".encode("utf8"),
(Path(settings.DERIVED_DIR) / "idnits2-rfc-status").read_bytes(),
)
@mock.patch("ietf.doc.tasks.generate_idnits2_rfcs_obsoleted")
def test_generate_idnits2_rfcs_obsoleted_task(self, mock_generate):
mock_generate.return_value = "dåtå"
generate_idnits2_rfcs_obsoleted_task()
self.assertEqual(mock_generate.call_count, 1)
self.assertEqual(
"dåtå".encode("utf8"),
(Path(settings.DERIVED_DIR) / "idnits2-rfcs-obsoleted").read_bytes(),
)

View file

@ -1,37 +0,0 @@
# Copyright The IETF Trust 2013-2020, All Rights Reserved
# -*- coding: utf-8 -*-
import syslog
from django.core.management.base import BaseCommand
from ietf.nomcom.models import NomCom, NomineePosition
from ietf.nomcom.utils import send_accept_reminder_to_nominee,send_questionnaire_reminder_to_nominee
from ietf.utils.timezone import date_today
def log(message):
syslog.syslog(message)
def is_time_to_send(nomcom,send_date,nomination_date):
if nomcom.reminder_interval:
days_passed = (send_date - nomination_date).days
return days_passed > 0 and days_passed % nomcom.reminder_interval == 0
else:
return bool(nomcom.reminderdates_set.filter(date=send_date))
class Command(BaseCommand):
help = ("Send acceptance and questionnaire reminders to nominees")
def handle(self, *args, **options):
for nomcom in NomCom.objects.filter(group__state__slug='active'):
nps = NomineePosition.objects.filter(nominee__nomcom=nomcom,nominee__duplicated__isnull=True)
for nominee_position in nps.pending():
if is_time_to_send(nomcom, date_today(), nominee_position.time.date()):
send_accept_reminder_to_nominee(nominee_position)
log('Sent accept reminder to %s' % nominee_position.nominee.email.address)
for nominee_position in nps.accepted().without_questionnaire_response():
if is_time_to_send(nomcom, date_today(), nominee_position.time.date()):
send_questionnaire_reminder_to_nominee(nominee_position)
log('Sent questionnaire reminder to %s' % nominee_position.nominee.email.address)

10
ietf/nomcom/tasks.py Normal file
View file

@ -0,0 +1,10 @@
# Copyright The IETF Trust 2024, All Rights Reserved
from celery import shared_task
from .utils import send_reminders
@shared_task
def send_nomcom_reminders_task():
send_reminders()

View file

@ -40,14 +40,14 @@ from ietf.nomcom.models import NomineePosition, Position, Nominee, \
NomineePositionStateName, Feedback, FeedbackTypeName, \
Nomination, FeedbackLastSeen, TopicFeedbackLastSeen, ReminderDates, \
NomCom
from ietf.nomcom.management.commands.send_reminders import Command, is_time_to_send
from ietf.nomcom.factories import NomComFactory, FeedbackFactory, TopicFactory, \
nomcom_kwargs_for_year, provide_private_key_to_test_client, \
key
from ietf.nomcom.tasks import send_nomcom_reminders_task
from ietf.nomcom.utils import get_nomcom_by_year, make_nomineeposition, \
get_hash_nominee_position, is_eligible, list_eligible, \
get_eligibility_date, suggest_affiliation, ingest_feedback_email, \
decorate_volunteers_with_qualifications
decorate_volunteers_with_qualifications, send_reminders, _is_time_to_send_reminder
from ietf.person.factories import PersonFactory, EmailFactory
from ietf.person.models import Email, Person
from ietf.stats.models import MeetingRegistration
@ -1207,32 +1207,37 @@ class ReminderTest(TestCase):
teardown_test_public_keys_dir(self)
super().tearDown()
def test_is_time_to_send(self):
def test_is_time_to_send_reminder(self):
self.nomcom.reminder_interval = 4
today = date_today()
self.assertTrue(is_time_to_send(self.nomcom,today+datetime.timedelta(days=4),today))
self.assertTrue(
_is_time_to_send_reminder(self.nomcom, today + datetime.timedelta(days=4), today)
)
for delta in range(4):
self.assertFalse(is_time_to_send(self.nomcom,today+datetime.timedelta(days=delta),today))
self.assertFalse(
_is_time_to_send_reminder(
self.nomcom, today + datetime.timedelta(days=delta), today
)
)
self.nomcom.reminder_interval = None
self.assertFalse(is_time_to_send(self.nomcom,today,today))
self.assertFalse(_is_time_to_send_reminder(self.nomcom, today, today))
self.nomcom.reminderdates_set.create(date=today)
self.assertTrue(is_time_to_send(self.nomcom,today,today))
self.assertTrue(_is_time_to_send_reminder(self.nomcom, today, today))
def test_command(self):
c = Command()
messages_before=len(outbox)
def test_send_reminders(self):
messages_before = len(outbox)
self.nomcom.reminder_interval = 3
self.nomcom.save()
c.handle(None,None)
send_reminders()
self.assertEqual(len(outbox), messages_before + 2)
self.assertIn('nominee1@example.org', outbox[-1]['To'])
self.assertIn('please complete', outbox[-1]['Subject'])
self.assertIn('nominee1@example.org', outbox[-2]['To'])
self.assertIn('please accept', outbox[-2]['Subject'])
messages_before=len(outbox)
messages_before = len(outbox)
self.nomcom.reminder_interval = 4
self.nomcom.save()
c.handle(None,None)
send_reminders()
self.assertEqual(len(outbox), messages_before + 1)
self.assertIn('nominee2@example.org', outbox[-1]['To'])
self.assertIn('please accept', outbox[-1]['Subject'])
@ -3048,3 +3053,10 @@ class ReclassifyFeedbackTests(TestCase):
self.assertEqual(fb.type_id, 'junk')
self.assertEqual(Feedback.objects.filter(type='read').count(), 0)
self.assertEqual(Feedback.objects.filter(type='junk').count(), 1)
class TaskTests(TestCase):
@mock.patch("ietf.nomcom.tasks.send_reminders")
def test_send_nomcom_reminders_task(self, mock_send):
send_nomcom_reminders_task()
self.assertEqual(mock_send.call_count, 1)

View file

@ -747,3 +747,27 @@ def ingest_feedback_email(message: bytes, year: int):
email_original_message=message,
) from err
log("Received nomcom email from %s" % feedback.author)
def _is_time_to_send_reminder(nomcom, send_date, nomination_date):
if nomcom.reminder_interval:
days_passed = (send_date - nomination_date).days
return days_passed > 0 and days_passed % nomcom.reminder_interval == 0
else:
return bool(nomcom.reminderdates_set.filter(date=send_date))
def send_reminders():
from .models import NomCom, NomineePosition
for nomcom in NomCom.objects.filter(group__state__slug="active"):
nps = NomineePosition.objects.filter(
nominee__nomcom=nomcom, nominee__duplicated__isnull=True
)
for nominee_position in nps.pending():
if _is_time_to_send_reminder(nomcom, date_today(), nominee_position.time.date()):
send_accept_reminder_to_nominee(nominee_position)
log(f"Sent accept reminder to {nominee_position.nominee.email.address}")
for nominee_position in nps.accepted().without_questionnaire_response():
if _is_time_to_send_reminder(nomcom, date_today(), nominee_position.time.date()):
send_questionnaire_reminder_to_nominee(nominee_position)
log(f"Sent questionnaire reminder to {nominee_position.nominee.email.address}")

View file

@ -240,7 +240,7 @@ LOGGING = {
#
'loggers': {
'django': {
'handlers': ['console', 'mail_admins',],
'handlers': ['debug_console', 'mail_admins'],
'level': 'INFO',
},
'django.request': {
@ -252,7 +252,7 @@ LOGGING = {
'level': 'INFO',
},
'django.security': {
'handlers': ['console', ],
'handlers': ['debug_console', ],
'level': 'INFO',
},
'oidc_provider': {
@ -260,13 +260,13 @@ LOGGING = {
'level': 'DEBUG',
},
'datatracker': {
'handlers': ['console', ],
'handlers': ['syslog'],
'level': 'INFO',
},
'celery': {
'handlers': ['console'],
'handlers': ['syslog'],
'level': 'INFO',
}
},
},
#
# No logger filters
@ -289,6 +289,13 @@ LOGGING = {
'class': 'logging.StreamHandler',
'formatter': 'django.server',
},
'syslog': {
'level': 'DEBUG',
'class': 'logging.handlers.SysLogHandler',
'facility': 'user',
'formatter': 'plain',
'address': '/dev/log',
},
'mail_admins': {
'level': 'ERROR',
'filters': [
@ -336,15 +343,6 @@ LOGGING = {
},
}
# This should be overridden by settings_local for any logger where debug (or
# other) custom log settings are wanted. Use "ietf/manage.py showloggers -l"
# to show registered loggers. The content here should match the levels above
# and is shown as an example:
UTILS_LOGGER_LEVELS: Dict[str, str] = {
# 'django': 'INFO',
# 'django.server': 'INFO',
}
# End logging
# ------------------------------------------------------------------------

View file

@ -13,6 +13,7 @@ from django.utils import timezone
from ietf.sync import iana
from ietf.sync import rfceditor
from ietf.sync.rfceditor import MIN_QUEUE_RESULTS, parse_queue, update_drafts_from_queue
from ietf.utils import log
from ietf.utils.timezone import date_today
@ -70,6 +71,33 @@ def rfc_editor_index_update_task(full_index=False):
log.log("RFC%s, %s: %s" % (rfc_number, doc.name, c))
@shared_task
def rfc_editor_queue_updates_task():
log.log(f"Updating RFC Editor queue states from {settings.RFC_EDITOR_QUEUE_URL}")
try:
response = requests.get(
settings.RFC_EDITOR_QUEUE_URL,
timeout=30, # seconds
)
except requests.Timeout as exc:
log.log(f"GET request timed out retrieving RFC editor queue: {exc}")
return # failed
drafts, warnings = parse_queue(io.StringIO(response.text))
for w in warnings:
log.log(f"Warning: {w}")
if len(drafts) < MIN_QUEUE_RESULTS:
log.log("Not enough results, only %s" % len(drafts))
return # failed
changed, warnings = update_drafts_from_queue(drafts)
for w in warnings:
log.log(f"Warning: {w}")
for c in changed:
log.log(f"Updated {c}")
@shared_task
def iana_changes_update_task():
# compensate to avoid we ask for something that happened now and then

View file

@ -886,6 +886,36 @@ class TaskTests(TestCase):
tasks.rfc_editor_index_update_task(full_index=False)
self.assertFalse(update_docs_mock.called)
@override_settings(RFC_EDITOR_QUEUE_URL="https://rfc-editor.example.com/queue/")
@mock.patch("ietf.sync.tasks.update_drafts_from_queue")
@mock.patch("ietf.sync.tasks.parse_queue")
def test_rfc_editor_queue_updates_task(self, mock_parse, mock_update):
# test a request timeout
self.requests_mock.get("https://rfc-editor.example.com/queue/", exc=requests.exceptions.Timeout)
tasks.rfc_editor_queue_updates_task()
self.assertFalse(mock_parse.called)
self.assertFalse(mock_update.called)
# now return a value rather than an exception
self.requests_mock.get("https://rfc-editor.example.com/queue/", text="the response")
# mock returning < MIN_QUEUE_RESULTS values - treated as an error, so no update takes place
mock_parse.return_value = ([n for n in range(rfceditor.MIN_QUEUE_RESULTS - 1)], ["a warning"])
tasks.rfc_editor_queue_updates_task()
self.assertEqual(mock_parse.call_count, 1)
self.assertEqual(mock_parse.call_args[0][0].read(), "the response")
self.assertFalse(mock_update.called)
mock_parse.reset_mock()
# mock returning +. MIN_QUEUE_RESULTS - should succeed
mock_parse.return_value = ([n for n in range(rfceditor.MIN_QUEUE_RESULTS)], ["a warning"])
mock_update.return_value = ([1,2,3], ["another warning"])
tasks.rfc_editor_queue_updates_task()
self.assertEqual(mock_parse.call_count, 1)
self.assertEqual(mock_parse.call_args[0][0].read(), "the response")
self.assertEqual(mock_update.call_count, 1)
self.assertEqual(mock_update.call_args, mock.call([n for n in range(rfceditor.MIN_QUEUE_RESULTS)]))
@override_settings(IANA_SYNC_CHANGES_URL="https://iana.example.com/sync/")
@mock.patch("ietf.sync.tasks.iana.update_history_with_changes")
@mock.patch("ietf.sync.tasks.iana.parse_changes_json")

View file

@ -2,7 +2,6 @@
# -*- coding: utf-8 -*-
import datetime
import subprocess
import os
import json
@ -79,30 +78,18 @@ def notify(request, org, notification):
raise Http404
if request.method == "POST":
def runscript(name):
python = os.path.join(os.path.dirname(settings.BASE_DIR), "env", "bin", "python")
cmd = [python, os.path.join(SYNC_BIN_PATH, name)]
cmdstring = " ".join(cmd)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
out = out.decode('utf-8')
err = err.decode('utf-8')
if p.returncode:
log("Subprocess error %s when running '%s': %s %s" % (p.returncode, cmd, err, out))
raise subprocess.CalledProcessError(p.returncode, cmdstring, "\n".join([err, out]))
if notification == "index":
log("Queuing RFC Editor index sync from notify view POST")
tasks.rfc_editor_index_update_task.delay()
elif notification == "queue":
log("Queuing RFC Editor queue sync from notify view POST")
tasks.rfc_editor_queue_updates_task.delay()
elif notification == "changes":
log("Queuing IANA changes sync from notify view POST")
tasks.iana_changes_update_task.delay()
elif notification == "protocols":
log("Queuing IANA protocols sync from notify view POST")
tasks.iana_protocols_update_task.delay()
elif notification == "queue":
log("Running sync script from notify view POST")
runscript("rfc-editor-queue-updates")
return HttpResponse("OK", content_type="text/plain; charset=%s"%settings.DEFAULT_CHARSET)

View file

@ -9,7 +9,7 @@ class _ToolVersionManager:
"xym",
"yanglint",
]
_versions = dict()
_versions: dict[str, str] = dict()
def __getitem__(self, item):
if item not in self._known:

View file

@ -9,26 +9,10 @@ import inspect
import os.path
import traceback
from typing import Callable # pyflakes:ignore
from django.conf import settings
from pythonjsonlogger import jsonlogger
import debug # pyflakes:ignore
formatter = jsonlogger.JsonFormatter
for name, level in settings.UTILS_LOGGER_LEVELS.items():
logger = logging.getLogger(name)
if not logger.hasHandlers():
debug.say(' Adding handlers to logger %s' % logger.name)
handlers = [
logging.StreamHandler(),
]
for h in handlers:
h.setFormatter(formatter) # type: ignore
h.setLevel(level)
logger.addHandler(h)
debug.say(" Setting %s logging level to %s" % (logger.name, level))
logger.setLevel(level)
def getclass(frame):
cls = None

View file

@ -141,6 +141,16 @@ class Command(BaseCommand):
),
)
PeriodicTask.objects.get_or_create(
name="Expire Last Calls",
task="ietf.doc.tasks.expire_last_calls_task",
defaults=dict(
enabled=False,
crontab=self.crontabs["daily"],
description="Move docs whose last call has expired to their next states",
),
)
PeriodicTask.objects.get_or_create(
name="Sync with IANA changes",
task="ietf.sync.tasks.iana_changes_update_task",
@ -181,6 +191,36 @@ class Command(BaseCommand):
)
)
PeriodicTask.objects.get_or_create(
name="Generate idnits2 rfcs-obsoleted blob",
task="ietf.doc.tasks.generate_idnits2_rfcs_obsoleted_task",
defaults=dict(
enabled=False,
crontab=self.crontabs["hourly"],
description="Generate the rfcs-obsoleted file used by idnits",
),
)
PeriodicTask.objects.get_or_create(
name="Generate idnits2 rfc-status blob",
task="ietf.doc.tasks.generate_idnits2_rfc_status_task",
defaults=dict(
enabled=False,
crontab=self.crontabs["hourly"],
description="Generate the rfc_status blob used by idnits",
),
)
PeriodicTask.objects.get_or_create(
name="Send NomCom reminders",
task="ietf.nomcom.tasks.send_nomcom_reminders_task",
defaults=dict(
enabled=False,
crontab=self.crontabs["daily"],
description="Send acceptance and questionnaire reminders to nominees",
),
)
def show_tasks(self):
for label, crontab in self.crontabs.items():
tasks = PeriodicTask.objects.filter(crontab=crontab).order_by(

View file

@ -11,18 +11,7 @@ from django.core.management.base import BaseCommand
import debug # pyflakes:ignore
class Command(BaseCommand):
"""
Display a list or tree representation of python loggers.
Add a UTILS_LOGGER_LEVELS setting in settings_local.py to configure
non-default logging levels for any registered logger, for instance:
UTILS_LOGGER_LEVELS = {
'oicd_provider': 'DEBUG',
'urllib3.connection': 'DEBUG',
}
"""
"""Display a list or tree representation of python loggers"""
help = dedent(__doc__).strip()