chore: remove alreay used on-shot import commands (#7333)

This commit is contained in:
Robert Sparks 2024-04-18 10:25:02 -05:00 committed by GitHub
parent b458d475a5
commit 370c3b24ed
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 0 additions and 932 deletions

View file

@ -1,294 +0,0 @@
# Copyright The IETF Trust 2023, All Rights Reserved
import csv
import datetime
import re
import shutil
import subprocess
import tempfile
from pathlib import Path
import dateutil
from django.conf import settings
from django.core.management import BaseCommand
from ietf.group.models import Appeal, AppealArtifact
class Command(BaseCommand):
help = "Performs a one-time import of IESG appeals"
def handle(self, *args, **options):
old_appeals_root = (
"/a/www/www6/iesg/appeal"
if settings.SERVER_MODE == "production"
else "/assets/www6/iesg/appeal"
)
tmpdir = tempfile.mkdtemp()
process = subprocess.Popen(
["git", "clone", "https://github.com/kesara/iesg-scraper.git", tmpdir],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
sub_stdout, sub_stderr = process.communicate()
if not (Path(tmpdir) / "iesg_appeals" / "anderson-2006-03-08.md").exists():
self.stdout.write(
"Git clone of the iesg-scraper directory did not go as expected"
)
self.stdout.write("stdout:", sub_stdout)
self.stdout.write("stderr:", sub_stderr)
self.stdout.write(f"Clean up {tmpdir} manually")
exit(-1)
titles = [
"Appeal: IESG Statement on Guidance on In-Person and Online Interim Meetings (John Klensin, 2023-08-15)",
"Appeal of current Guidance on in-Person and Online meetings (Ted Hardie, Alan Frindell, 2023-07-19)",
"Appeal re: URI Scheme Application and draft-mcsweeney-drop-scheme (Tim McSweeney, 2020-07-08)",
"Appeal to the IESG re WGLC of draft-ietf-spring-srv6-network-programming (Fernando Gont, Andrew Alston, and Sander Steffann, 2020-04-22)",
"Appeal re Protocol Action: 'URI Design and Ownership' to Best \nCurrent Practice (draft-nottingham-rfc7320bis-03.txt) (John Klensin; 2020-02-04)",
"Appeal of IESG Conflict Review process and decision on draft-mavrogiannopoulos-pkcs8-validated-parameters-02 (John Klensin; 2018-07-07)",
"Appeal of IESG decision to defer action and request that ISE publish draft-klensin-dns-function-considerations (John Klensin; 2017-11-29)",
'Appeal to the IESG concerning its approval of the "draft-ietf-ianaplan-icg-response" (PDF file) (JFC Morfin; 2015-03-11)',
"Appeal re tzdist mailing list moderation (Tobias Conradi; 2014-08-28) / Withdrawn by Submitter",
"Appeal re draft-masotta-tftpexts-windowsize-opt (Patrick Masotta; 2013-11-14)",
"Appeal re draft-ietf-manet-nhdp-sec-threats (Abdussalam Baryun; 2013-06-19)",
"Appeal of decision to advance RFC6376 (Douglas Otis; 2013-05-30)",
"Appeal to the IESG in regards to RFC 6852 (PDF file) (JFC Morfin; 2013-04-05)",
"Appeal to the IESG concerning the approbation of the IDNA2008 document set (PDF file) (JFC Morfin; 2010-03-10)",
"Authentication-Results Header Field Appeal (Douglas Otis, David Rand; 2009-02-16) / Withdrawn by Submitter",
"Appeal to the IAB of IESG rejection of Appeal to Last Call draft-ietf-grow-anycast (Dean Anderson; 2008-11-14)",
"Appeal to the IESG Concerning the Way At Large Internet Lead Users Are Not Permitted To Adequately Contribute to the IETF Deliverables (JFC Morfin; 2008-09-10)",
"Appeal over suspension of posting rights for Todd Glassey (Todd Glassey; 2008-07-28)",
"Appeal against IESG blocking DISCUSS on draft-klensin-rfc2821bis (John C Klensin; 2008-06-13)",
"Appeal: Continued Abuse of Process by IPR-WG Chair (Dean Anderson; 2007-12-26)",
"Appeal to the IESG from Todd Glassey (Todd Glassey; 2007-11-26)",
"Appeal Against the Removal of the Co-Chairs of the GEOPRIV Working Group (PDF file) (Randall Gellens, Allison Mankin, and Andrew Newton; 2007-06-22)",
"Appeal concerning the WG-LTRU rechartering (JFC Morfin; 2006-10-24)",
"Appeal against decision within July 10 IESG appeal dismissal (JFC Morfin; 2006-09-09)",
"Appeal: Mandatory to implement HTTP authentication mechanism in the Atom Publishing Protocol (Robert Sayre; 2006-08-29)",
"Appeal Against IESG Decisions Regarding the draft-ietf-ltru-matching (PDF file) (JFC Morfin; 2006-08-16)",
"Amended Appeal Re: grow: Last Call: 'Operation of Anycast Services' to BCP (draft-ietf-grow-anycast) (Dean Anderson; 2006-06-14)",
"Appeal Against an IESG Decision Denying Me IANA Language Registration Process by way of PR-Action (PDF file) (JFC Morfin; 2006-05-17)",
"Appeal to the IESG of PR-Action against Dean Anderson (Dean Anderson; 2006-03-08)",
"Appeal to IESG against AD decision: one must clear the confusion opposing the RFC 3066 Bis consensus (JFC Morfin; 2006-02-20)",
"Appeal to the IESG of an IESG decision (JFC Morfin; 2006-02-17)",
"Appeal to the IESG in reference to the ietf-languages@alvestrand.no mailing list (JFC Morfin; 2006-02-07)",
"Appeal to the IESG against an IESG decision concerning RFC 3066 Bis Draft (JFC Morfin; 2006-01-14)",
"Appeal over a key change in a poor RFC 3066 bis example (JFC Morfin; 2005-10-19)",
"Additional appeal against publication of draft-lyon-senderid-* in regards to its recommended use of Resent- header fields in the way that is inconsistant with RFC2822(William Leibzon; 2005-08-29)",
"Appeal: Publication of draft-lyon-senderid-core-01 in conflict with referenced draft-schlitt-spf-classic-02 (Julian Mehnle; 2005-08-25)",
'Appeal of decision to standardize "Mapping Between the Multimedia Messaging Service (MMS) and Internet Mail" (John C Klensin; 2005-06-10)',
"Appeal regarding IESG decision on the GROW WG (David Meyer; 2003-11-15)",
"Appeal: Official notice of appeal on suspension rights (Todd Glassey; 2003-08-06)",
"Appeal: AD response to Site-Local Appeal (Tony Hain; 2003-07-31)",
"Appeal against IESG decision for draft-chiba-radius-dynamic-authorization-05.txt (Glen Zorn; 2003-01-15)",
"Appeal Against moving draft-ietf-ipngwg-addr-arch-v3 to Draft Standard (Robert Elz; 2002-11-05)",
]
date_re = re.compile(r"\d{4}-\d{2}-\d{2}")
dates = [
datetime.datetime.strptime(date_re.search(t).group(), "%Y-%m-%d").date()
for t in titles
]
parts = [
["klensin-2023-08-15.txt", "response-to-klensin-2023-08-15.txt"],
[
"hardie-frindell-2023-07-19.txt",
"response-to-hardie-frindell-2023-07-19.txt",
],
["mcsweeney-2020-07-08.txt", "response-to-mcsweeney-2020-07-08.pdf"],
["gont-2020-04-22.txt", "response-to-gont-2020-06-02.txt"],
["klensin-2020-02-04.txt", "response-to-klensin-2020-02-04.txt"],
["klensin-2018-07-07.txt", "response-to-klensin-2018-07-07.txt"],
["klensin-2017-11-29.txt", "response-to-klensin-2017-11-29.md"],
["morfin-2015-03-11.pdf", "response-to-morfin-2015-03-11.md"],
["conradi-2014-08-28.txt"],
["masotta-2013-11-14.txt", "response-to-masotta-2013-11-14.md"],
["baryun-2013-06-19.txt", "response-to-baryun-2013-06-19.md"],
["otis-2013-05-30.txt", "response-to-otis-2013-05-30.md"],
["morfin-2013-04-05.pdf", "response-to-morfin-2013-04-05.md"],
["morfin-2010-03-10.pdf", "response-to-morfin-2010-03-10.txt"],
["otis-2009-02-16.txt"],
["anderson-2008-11-14.md", "response-to-anderson-2008-11-14.txt"],
["morfin-2008-09-10.txt", "response-to-morfin-2008-09-10.txt"],
["glassey-2008-07-28.txt", "response-to-glassey-2008-07-28.txt"],
["klensin-2008-06-13.txt", "response-to-klensin-2008-06-13.txt"],
["anderson-2007-12-26.txt", "response-to-anderson-2007-12-26.txt"],
["glassey-2007-11-26.txt", "response-to-glassey-2007-11-26.txt"],
["gellens-2007-06-22.pdf", "response-to-gellens-2007-06-22.txt"],
["morfin-2006-10-24.txt", "response-to-morfin-2006-10-24.txt"],
["morfin-2006-09-09.txt", "response-to-morfin-2006-09-09.txt"],
["sayre-2006-08-29.txt", "response-to-sayre-2006-08-29.txt"],
[
"morfin-2006-08-16.pdf",
"response-to-morfin-2006-08-17.txt",
"response-to-morfin-2006-08-17-part2.txt",
],
["anderson-2006-06-13.txt", "response-to-anderson-2006-06-14.txt"],
["morfin-2006-05-17.pdf", "response-to-morfin-2006-05-17.txt"],
["anderson-2006-03-08.md", "response-to-anderson-2006-03-08.txt"],
["morfin-2006-02-20.txt", "response-to-morfin-2006-02-20.txt"],
["morfin-2006-02-17.txt", "response-to-morfin-2006-02-17.txt"],
["morfin-2006-02-07.txt", "response-to-morfin-2006-02-07.txt"],
["morfin-2006-01-14.txt", "response-to-morfin-2006-01-14.txt"],
["morfin-2005-10-19.txt", "response-to-morfin-2005-10-19.txt"],
["leibzon-2005-08-29.txt", "response-to-leibzon-2005-08-29.txt"],
["mehnle-2005-08-25.txt", "response-to-mehnle-2005-08-25.txt"],
["klensin-2005-06-10.txt", "response-to-klensin-2005-06-10.txt"],
["meyer-2003-11-15.txt", "response-to-meyer-2003-11-15.txt"],
["glassey-2003-08-06.txt", "response-to-glassey-2003-08-06.txt"],
["hain-2003-07-31.txt", "response-to-hain-2003-07-31.txt"],
["zorn-2003-01-15.txt", "response-to-zorn-2003-01-15.txt"],
["elz-2002-11-05.txt", "response-to-elz-2002-11-05.txt"],
]
assert len(titles) == len(dates)
assert len(titles) == len(parts)
part_times = dict()
part_times["klensin-2023-08-15.txt"] = "2023-08-15 15:03:55 -0400"
part_times["response-to-klensin-2023-08-15.txt"] = "2023-08-24 18:54:13 +0300"
part_times["hardie-frindell-2023-07-19.txt"] = "2023-07-19 07:17:16PDT"
part_times["response-to-hardie-frindell-2023-07-19.txt"] = (
"2023-08-15 11:58:26PDT"
)
part_times["mcsweeney-2020-07-08.txt"] = "2020-07-08 14:45:00 -0400"
part_times["response-to-mcsweeney-2020-07-08.pdf"] = "2020-07-28 12:54:04 -0000"
part_times["gont-2020-04-22.txt"] = "2020-04-22 22:26:20 -0400"
part_times["response-to-gont-2020-06-02.txt"] = "2020-06-02 20:44:29 -0400"
part_times["klensin-2020-02-04.txt"] = "2020-02-04 13:54:46 -0500"
# part_times["response-to-klensin-2020-02-04.txt"]="2020-03-24 11:49:31EDT"
part_times["response-to-klensin-2020-02-04.txt"] = "2020-03-24 11:49:31 -0400"
part_times["klensin-2018-07-07.txt"] = "2018-07-07 12:40:43PDT"
# part_times["response-to-klensin-2018-07-07.txt"]="2018-08-16 10:46:45EDT"
part_times["response-to-klensin-2018-07-07.txt"] = "2018-08-16 10:46:45 -0400"
part_times["klensin-2017-11-29.txt"] = "2017-11-29 09:35:02 -0500"
part_times["response-to-klensin-2017-11-29.md"] = "2017-11-30 11:33:04 -0500"
part_times["morfin-2015-03-11.pdf"] = "2015-03-11 18:03:44 -0000"
part_times["response-to-morfin-2015-03-11.md"] = "2015-04-16 15:18:09 -0000"
part_times["conradi-2014-08-28.txt"] = "2014-08-28 22:28:06 +0300"
part_times["masotta-2013-11-14.txt"] = "2013-11-14 15:35:19 +0200"
part_times["response-to-masotta-2013-11-14.md"] = "2014-01-27 07:39:32 -0800"
part_times["baryun-2013-06-19.txt"] = "2013-06-19 06:29:51PDT"
part_times["response-to-baryun-2013-06-19.md"] = "2013-07-02 15:24:42 -0700"
part_times["otis-2013-05-30.txt"] = "2013-05-30 19:35:18 +0000"
part_times["response-to-otis-2013-05-30.md"] = "2013-06-27 11:56:48 -0700"
part_times["morfin-2013-04-05.pdf"] = "2013-04-05 17:31:19 -0700"
part_times["response-to-morfin-2013-04-05.md"] = "2013-04-17 08:17:29 -0700"
part_times["morfin-2010-03-10.pdf"] = "2010-03-10 21:40:58 +0100"
part_times["response-to-morfin-2010-03-10.txt"] = "2010-04-07 14:26:06 -0700"
part_times["otis-2009-02-16.txt"] = "2009-02-16 15:47:15 -0800"
part_times["anderson-2008-11-14.md"] = "2008-11-14 00:16:58 -0500"
part_times["response-to-anderson-2008-11-14.txt"] = "2008-12-15 11:00:02 -0800"
part_times["morfin-2008-09-10.txt"] = "2008-09-10 04:10:13 +0200"
part_times["response-to-morfin-2008-09-10.txt"] = "2008-09-28 10:00:01PDT"
part_times["glassey-2008-07-28.txt"] = "2008-07-28 08:34:52 -0700"
part_times["response-to-glassey-2008-07-28.txt"] = "2008-09-02 11:00:01PDT"
part_times["klensin-2008-06-13.txt"] = "2008-06-13 21:14:38 -0400"
part_times["response-to-klensin-2008-06-13.txt"] = "2008-07-07 10:00:01 PDT"
# part_times["anderson-2007-12-26.txt"]="2007-12-26 17:19:34EST"
part_times["anderson-2007-12-26.txt"] = "2007-12-26 17:19:34 -0500"
part_times["response-to-anderson-2007-12-26.txt"] = "2008-01-15 17:21:05 -0500"
part_times["glassey-2007-11-26.txt"] = "2007-11-26 08:13:22 -0800"
part_times["response-to-glassey-2007-11-26.txt"] = "2008-01-23 17:38:43 -0500"
part_times["gellens-2007-06-22.pdf"] = "2007-06-22 21:45:41 -0400"
part_times["response-to-gellens-2007-06-22.txt"] = "2007-09-20 14:01:27 -0400"
part_times["morfin-2006-10-24.txt"] = "2006-10-24 05:03:17 +0200"
part_times["response-to-morfin-2006-10-24.txt"] = "2006-11-07 12:56:02 -0500"
part_times["morfin-2006-09-09.txt"] = "2006-09-09 02:54:55 +0200"
part_times["response-to-morfin-2006-09-09.txt"] = "2006-09-15 12:56:31 -0400"
part_times["sayre-2006-08-29.txt"] = "2006-08-29 17:05:03 -0400"
part_times["response-to-sayre-2006-08-29.txt"] = "2006-10-16 13:07:18 -0400"
part_times["morfin-2006-08-16.pdf"] = "2006-08-16 18:28:19 -0400"
part_times["response-to-morfin-2006-08-17.txt"] = "2006-08-22 12:05:42 -0400"
part_times["response-to-morfin-2006-08-17-part2.txt"] = (
"2006-11-07 13:00:58 -0500"
)
# part_times["anderson-2006-06-13.txt"]="2006-06-13 21:51:18EDT"
part_times["anderson-2006-06-13.txt"] = "2006-06-13 21:51:18 -0400"
part_times["response-to-anderson-2006-06-14.txt"] = "2006-07-10 14:31:08 -0400"
part_times["morfin-2006-05-17.pdf"] = "2006-05-17 06:46:18 +0200"
part_times["response-to-morfin-2006-05-17.txt"] = "2006-07-10 14:18:10 -0400"
part_times["anderson-2006-03-08.md"] = "2006-03-08 09:42:44 +0100"
part_times["response-to-anderson-2006-03-08.txt"] = "2006-03-20 14:55:38 -0500"
part_times["morfin-2006-02-20.txt"] = "2006-02-20 19:18:24 +0100"
part_times["response-to-morfin-2006-02-20.txt"] = "2006-03-06 13:08:39 -0500"
part_times["morfin-2006-02-17.txt"] = "2006-02-17 18:59:38 +0100"
part_times["response-to-morfin-2006-02-17.txt"] = "2006-07-10 14:05:15 -0400"
part_times["morfin-2006-02-07.txt"] = "2006-02-07 19:38:57 -0500"
part_times["response-to-morfin-2006-02-07.txt"] = "2006-02-21 19:09:26 -0500"
part_times["morfin-2006-01-14.txt"] = "2006-01-14 15:05:24 +0100"
part_times["response-to-morfin-2006-01-14.txt"] = "2006-02-21 12:23:38 -0500"
part_times["morfin-2005-10-19.txt"] = "2005-10-19 17:12:11 +0200"
part_times["response-to-morfin-2005-10-19.txt"] = "2005-11-15 11:42:30 -0500"
part_times["leibzon-2005-08-29.txt"] = "2005-08-29 08:28:52PDT"
part_times["response-to-leibzon-2005-08-29.txt"] = "2005-12-08 14:04:47 -0500"
part_times["mehnle-2005-08-25.txt"] = "2005-08-25 00:45:26 +0200"
part_times["response-to-mehnle-2005-08-25.txt"] = "2005-12-08 13:37:38 -0500"
part_times["klensin-2005-06-10.txt"] = "2005-06-10 14:49:17 -0400"
part_times["response-to-klensin-2005-06-10.txt"] = "2005-07-22 18:14:06 -0400"
part_times["meyer-2003-11-15.txt"] = "2003-11-15 09:47:11 -0800"
part_times["response-to-meyer-2003-11-15.txt"] = "2003-11-25 10:56:06 -0500"
part_times["glassey-2003-08-06.txt"] = "2003-08-06 02:14:24 +0000"
part_times["response-to-glassey-2003-08-06.txt"] = "2003-09-24 09:54:51 -0400"
part_times["hain-2003-07-31.txt"] = "2003-07-31 16:44:19 -0700"
part_times["response-to-hain-2003-07-31.txt"] = "2003-09-30 14:44:30 -0400"
part_times["zorn-2003-01-15.txt"] = "2003-01-15 01:22:28 -0800"
part_times["elz-2002-11-05.txt"] = "2002-11-05 10:51:13 +0700"
# No time could be found for this one:
part_times["response-to-zorn-2003-01-15.txt"] = "2003-02-08"
# This one was issued sometime between 2002-12-27 (when IESG minutes note that the
# appeal response was approved) and 2003-01-04 (when the appeal was escalated to
# the IAB) - we're using the earlier end of the window
part_times["response-to-elz-2002-11-05.txt"] = "2002-12-27"
for name in part_times:
part_times[name] = dateutil.parser.parse(part_times[name]).astimezone(
datetime.timezone.utc
)
redirects = []
for index, title in enumerate(titles):
# IESG is group 2
appeal = Appeal.objects.create(
name=titles[index], date=dates[index], group_id=2
)
for part in parts[index]:
if part.endswith(".pdf"):
content_type = "application/pdf"
else:
content_type = "text/markdown;charset=utf-8"
if part.endswith(".md"):
source_path = Path(tmpdir) / "iesg_appeals" / part
else:
source_path = Path(old_appeals_root) / part
with source_path.open("rb") as source_file:
bits = source_file.read()
if part == "morfin-2008-09-10.txt":
bits = bits.decode("macintosh")
bits = bits.replace("\r", "\n")
bits = bits.encode("utf8")
elif part in ["morfin-2006-02-07.txt", "morfin-2006-01-14.txt"]:
bits = bits.decode("windows-1252").encode("utf8")
artifact_type_id = (
"response" if part.startswith("response") else "appeal"
)
artifact = AppealArtifact.objects.create(
appeal=appeal,
artifact_type_id=artifact_type_id,
date=part_times[part].date(),
content_type=content_type,
bits=bits,
)
redirects.append(
[
f'www6.ietf.org/iesg/appeal/{part.replace(".md", ".html") if part.endswith(".md") else part}',
f"https://datatracker.ietf.org/group/iesg/appeals/artifact/{artifact.pk}",
302,
]
)
shutil.rmtree(tmpdir)
with open("iesg_appeal_redirects.csv", "w", newline="") as f:
csvwriter = csv.writer(f)
for row in redirects:
csvwriter.writerow(row)

View file

@ -1,274 +0,0 @@
# Copyright The IETF Trust 2024, All Rights Reserved
import debug # pyflakes:ignore
import csv
import datetime
import os
import shutil
import subprocess
import tempfile
from collections import namedtuple, Counter
from pathlib import Path
from django.conf import settings
from django.core.management.base import BaseCommand
from ietf.doc.models import Document, DocEvent, State
from ietf.utils.text import xslugify
class Command(BaseCommand):
help = "Performs a one-time import of IESG statements"
def handle(self, *args, **options):
if Document.objects.filter(type="statement", group__acronym="iesg").exists():
self.stdout.write("IESG statement documents already exist - exiting")
exit(-1)
tmpdir = tempfile.mkdtemp()
process = subprocess.Popen(
["git", "clone", "https://github.com/kesara/iesg-scraper.git", tmpdir],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
sub_stdout, sub_stderr = process.communicate()
if not Path(tmpdir).joinpath("iesg_statements", "2000-08-29-0.md").exists():
self.stdout.write(
"Git clone of the iesg-scraper directory did not go as expected"
)
self.stdout.write("stdout:", sub_stdout)
self.stdout.write("stderr:", sub_stderr)
self.stdout.write(f"Clean up {tmpdir} manually")
exit(-1)
redirects = []
for item in self.get_work_items():
replaced = item.title.endswith(
" SUPERSEDED"
) or item.doc_time.date() == datetime.date(2007, 7, 30)
title = item.title
if title.endswith(" - SUPERSEDED"):
title = title[: -len(" - SUPERSEDED")]
name = f"statement-iesg-{xslugify(title)}-{item.doc_time:%Y%m%d}"
dest_filename = f"{name}-00.md"
# Create Document
doc = Document.objects.create(
name=name,
type_id="statement",
title=title,
group_id=2, # The IESG group
rev="00",
uploaded_filename=dest_filename,
)
doc.set_state(
State.objects.get(
type_id="statement",
slug="replaced" if replaced else "active",
)
)
e1 = DocEvent.objects.create(
time=item.doc_time,
type="published_statement",
doc=doc,
rev="00",
by_id=1, # (System)
desc="Statement published (note: The exact time of day is inaccurate - the actual time of day is not known)",
)
e2 = DocEvent.objects.create(
type="added_comment",
doc=doc,
rev="00",
by_id=1, # (System)
desc="Statement moved into datatracker from www.ietf.org",
)
doc.save_with_history([e1, e2])
# Put file in place
source = Path(tmpdir).joinpath("iesg_statements", item.source_filename)
dest = Path(settings.DOCUMENT_PATH_PATTERN.format(doc=doc)).joinpath(
dest_filename
)
if dest.exists():
self.stdout.write(
f"WARNING: {dest} already exists - not overwriting it."
)
else:
os.makedirs(dest.parent, exist_ok=True)
shutil.copy(source, dest)
redirects.append(
[
f"www.ietf.org/about/groups/iesg/statements/{item.slug}",
f"https://datatracker.ietf.org/doc/{name}",
302,
]
)
shutil.rmtree(tmpdir)
with open("iesg_statement_redirects.csv", "w", newline="") as f:
csvwriter = csv.writer(f)
for row in redirects:
csvwriter.writerow(row)
def get_work_items(self):
Item = namedtuple("Item", "doc_time source_filename title slug")
items = []
dressed_rows = " ".join(
self.cut_paste_from_www().expandtabs(1).split(" ")
).split("\n")
old_slugs = self.get_old_slugs()
# Rube-Goldberg-esque dance to deal with conflicting directions of the scrape and
# what order we want the result to sort to
dressed_rows.reverse()
old_slugs.reverse()
total_times_date_seen = Counter([row.split(" ")[0] for row in dressed_rows])
count_date_seen_so_far = Counter()
for row, slug in zip(dressed_rows, old_slugs):
date_part = row.split(" ")[0]
title_part = row[len(date_part) + 1 :]
datetime_args = list(map(int, date_part.replace("-0", "-").split("-")))
# Use the minutes in timestamps to preserve order of statements
# on the same day as they currently appear at www.ietf.org
datetime_args.extend([12, count_date_seen_so_far[date_part]])
count_date_seen_so_far[date_part] += 1
doc_time = datetime.datetime(*datetime_args, tzinfo=datetime.timezone.utc)
items.append(
Item(
doc_time,
f"{date_part}-{total_times_date_seen[date_part] - count_date_seen_so_far[date_part]}.md",
title_part,
slug,
)
)
return items
def cut_paste_from_www(self):
return """2023-08-24 Support Documents in IETF Working Groups
2023-08-14 Guidance on In-Person and Online Interim Meetings
2023-05-01 IESG Statement on EtherTypes
2023-03-15 Second Report on the RFC 8989 Experiment
2023-01-27 Guidance on In-Person and Online Interim Meetings - SUPERSEDED
2022-10-31 Statement on Restricting Access to IETF IT Systems
2022-01-21 Handling Ballot Positions
2021-09-01 Report on the RFC 8989 experiment
2021-07-21 IESG Statement on Allocation of Email Addresses in the ietf.org Domain
2021-05-11 IESG Statement on Inclusive Language
2021-05-10 IESG Statement on Internet-Draft Authorship
2021-05-07 IESG Processing of RFC Errata for the IETF Stream
2021-04-16 Last Call Guidance to the Community
2020-07-23 IESG Statement On Oppressive or Exclusionary Language
2020-05-01 Guidance on Face-to-Face and Virtual Interim Meetings - SUPERSEDED
2018-03-16 IETF Meeting Photography Policy
2018-01-11 Guidance on Face-to-Face and Virtual Interim Meetings - SUPERSEDED
2017-02-09 License File for Open Source Repositories
2016-11-13 Support Documents in IETF Working Groups - SUPERSEDED
2016-02-05 Guidance on Face-to-Face and Virtual Interim Meetings - SUPERSEDED
2016-01-11 Guidance on Face-to-Face and Virtual Interim Meetings - SUPERSEDED
2015-08-20 IESG Statement on Maximizing Encrypted Access To IETF Information
2015-06-11 IESG Statement on Internet-Draft Authorship - SUPERSEDED
2014-07-20 IESG Statement on Designating RFCs as Historic
2014-05-07 DISCUSS Criteria in IESG Review
2014-03-02 Writable MIB Module IESG Statement
2013-11-03 IETF Anti-Harassment Policy
2012-10-25 IESG Statement on Ethertypes - SUPERSEDED
2012-10-25 IESG Statement on Removal of an Internet-Draft from the IETF Web Site
2011-10-20 IESG Statement on Designating RFCs as Historic - SUPERSEDED
2011-06-27 IESG Statement on Designating RFCs as Historic - SUPERSEDED
2011-06-13 IESG Statement on IESG Processing of RFC Errata concerning RFC Metadata
2010-10-11 IESG Statement on Document Shepherds
2010-05-24 IESG Statement on the Usage of Assignable Codepoints, Addresses and Names in Specification Examples
2010-05-24 IESG Statement on NomCom Eligibility and Day Passes
2009-09-08 IESG Statement on Copyright
2009-01-20 IESG Statement on Proposed Status for IETF Documents Reserving Resources for Example Purposes
2008-09-02 Guidance on Interim Meetings, Conference Calls and Jabber Sessions - SUPERSEDED
2008-07-30 IESG Processing of RFC Errata for the IETF Stream
2008-04-14 IESG Statement on Spam Control on IETF Mailing Lists
2008-03-03 IESG Statement on Registration Requests for URIs Containing Telephone Numbers
2008-02-27 IESG Statement on RFC3406 and URN Namespaces Registry Review
2008-01-23 Advice for WG Chairs Dealing with Off-Topic Postings
2007-10-04 On Appeals of IESG and Area Director Actions and Decisions
2007-07-05 Experimental Specification of New Congestion Control Algorithms
2007-03-20 Guidance on Area Director Sponsoring of Documents
2007-01-15 Last Call Guidance to the Community - SUPERSEDED
2006-04-19 IESG Statement: Normative and Informative References
2006-02-17 IESG Statement on Disruptive Posting
2006-01-09 Guidance for Spam Control on IETF Mailing Lists - SUPERSEDED
2006-01-05 IESG Statement on AUTH48 State
2005-05-12 Syntax for Format Definitions
2003-02-11 IESG Statement on IDN
2002-11-27 Copyright Statement in MIB and PIB Modules
2002-03-13 Guidance for Spam Control on IETF Mailing Lists - SUPERSEDED
2001-12-21 On Design Teams
2001-10-01 Guidelines for the Use of Formal Languages in IETF Specifications
2001-03-21 Establishment of Temporary Sub-IP Area
2000-12-06 Plans to Organize "Sub-IP" Technologies in the IETF
2000-11-20 A New IETF Work Area
2000-08-29 Guidance on Interim IETF Working Group Meetings and Conference Calls - SUPERSEDED
2000-08-29 IESG Guidance on the Moderation of IETF Working Group Mailing Lists"""
def get_old_slugs(self):
return [
"support-documents",
"interim-meetings-guidance",
"ethertypes",
"second-report-on-the-rfc-8989-experiment",
"interim-meetings-guidance-2023-01-27",
"statement-on-restricting-access",
"handling-ballot-positions",
"report-on-rfc8989-experiment",
"email-addresses-ietf-domain",
"on-inclusive-language",
"internet-draft-authorship",
"processing-errata-ietf-stream",
"last-call-guidance",
"statement-on-oppressive-exclusionary-language",
"interim-meetings-guidance-2020-05-01",
"meeting-photography-policy",
"interim-meetings-guidance-2018-01-11",
"open-source-repositories-license",
"support-documents-2016-11-13",
"interim-meetings-guidance-2016-02-05",
"interim-meetings-guidance-2016-01-11",
"maximizing-encrypted-access",
"internet-draft-authorship-2015-06-11",
"designating-rfcs-historic",
"iesg-discuss-criteria",
"writable-mib-module",
"anti-harassment-policy",
"ethertypes-2012-10-25",
"internet-draft-removal",
"designating-rfcs-historic-2011-10-20",
"designating-rfcs-historic-2011-06-27",
"rfc-metadata-errata",
"document-shepherds",
"assignable-codepoints-addresses-names",
"nomcom-eligibility-day-passes",
"copyright-2009-09-08",
"reserving-resources-examples",
"interim-meetings-guidance-2008-09-02",
"processing-rfc-errata",
"spam-control-2008-04-14",
"registration-requests-uris",
"urn-namespaces-registry",
"off-topic-postings",
"appeals-actions-decisions",
"experimental-congestion-control",
"area-director-sponsoring-documents",
"last-call-guidance-2007-01-15",
"normative-informative-references",
"disruptive-posting",
"spam-control-2006-01-09",
"auth48",
"syntax-format-definitions",
"idn",
"copyright-2002-11-27",
"spam-control-2002-03-13",
"design-teams",
"formal-languages-use",
"sub-ip-area-2001-03-21",
"sub-ip-area-2000-11-20",
"sub-ip-area-2000-12-06",
"interim-meetings-guidance-2000-08-29",
"mailing-lists-moderation",
]

View file

@ -1,364 +0,0 @@
# Copyright The IETF Trust 2023, All Rights Reserved
from collections import namedtuple
import csv
import datetime
import os
import re
import shutil
from django.conf import settings
from django.core.management import BaseCommand
from pathlib import Path
from zoneinfo import ZoneInfo
from ietf.doc.models import DocEvent, Document
from ietf.meeting.models import (
Meeting,
SchedTimeSessAssignment,
Schedule,
SchedulingEvent,
Session,
TimeSlot,
)
from ietf.name.models import DocTypeName
def add_time_of_day(bare_datetime):
"""Add a time for the iesg meeting based on a date and make it tzaware
From the secretariat - the telechats happened at these times:
2015-04-09 to present: 0700 PT America/Los Angeles
1993-02-01 to 2015-03-12: 1130 ET America/New York
1991-07-30 to 1993-01-25: 1200 ET America/New York
"""
dt = None
if bare_datetime.year > 2015:
dt = bare_datetime.replace(hour=7).replace(
tzinfo=ZoneInfo("America/Los_Angeles")
)
elif bare_datetime.year == 2015:
if bare_datetime.month >= 4:
dt = bare_datetime.replace(hour=7).replace(
tzinfo=ZoneInfo("America/Los_Angeles")
)
else:
dt = bare_datetime.replace(hour=11, minute=30).replace(
tzinfo=ZoneInfo("America/New_York")
)
elif bare_datetime.year > 1993:
dt = bare_datetime.replace(hour=11, minute=30).replace(
tzinfo=ZoneInfo("America/New_York")
)
elif bare_datetime.year == 1993:
if bare_datetime.month >= 2:
dt = bare_datetime.replace(hour=11, minute=30).replace(
tzinfo=ZoneInfo("America/New_York")
)
else:
dt = bare_datetime.replace(hour=12).replace(
tzinfo=ZoneInfo("America/New_York")
)
else:
dt = bare_datetime.replace(hour=12).replace(tzinfo=ZoneInfo("America/New_York"))
return dt.astimezone(datetime.timezone.utc)
def build_bof_coord_data():
CoordTuple = namedtuple("CoordTuple", "meeting_number source_name")
def utc_from_la_time(time):
return time.replace(tzinfo=ZoneInfo("America/Los_Angeles")).astimezone(
datetime.timezone.utc
)
data = dict()
data[utc_from_la_time(datetime.datetime(2016, 6, 10, 7, 0))] = CoordTuple(
96, "2015/bof-minutes-ietf-96.txt"
)
data[utc_from_la_time(datetime.datetime(2016, 10, 6, 7, 0))] = CoordTuple(
97, "2016/BoF-Minutes-2016-10-06.txt"
)
data[utc_from_la_time(datetime.datetime(2017, 2, 15, 8, 0))] = CoordTuple(
98, "2017/bof-minutes-ietf-98.txt"
)
data[utc_from_la_time(datetime.datetime(2017, 6, 7, 8, 0))] = CoordTuple(
99, "2017/bof-minutes-ietf-99.txt"
)
data[utc_from_la_time(datetime.datetime(2017, 10, 5, 7, 0))] = CoordTuple(
100, "2017/bof-minutes-ietf-100.txt"
)
data[utc_from_la_time(datetime.datetime(2018, 2, 5, 11, 0))] = CoordTuple(
101, "2018/bof-minutes-ietf-101.txt"
)
data[utc_from_la_time(datetime.datetime(2018, 6, 5, 8, 0))] = CoordTuple(
102, "2018/bof-minutes-ietf-102.txt"
)
data[utc_from_la_time(datetime.datetime(2018, 9, 26, 7, 0))] = CoordTuple(
103, "2018/bof-minutes-ietf-103.txt"
)
data[utc_from_la_time(datetime.datetime(2019, 2, 15, 9, 0))] = CoordTuple(
104, "2019/bof-minutes-ietf-104.txt"
)
data[utc_from_la_time(datetime.datetime(2019, 6, 11, 7, 30))] = CoordTuple(
105, "2019/bof-minutes-ietf-105.txt"
)
data[utc_from_la_time(datetime.datetime(2019, 10, 9, 6, 30))] = CoordTuple(
106, "2019/bof-minutes-ietf-106.txt"
)
data[utc_from_la_time(datetime.datetime(2020, 2, 13, 8, 0))] = CoordTuple(
107, "2020/bof-minutes-ietf-107.txt"
)
data[utc_from_la_time(datetime.datetime(2020, 6, 15, 8, 0))] = CoordTuple(
108, "2020/bof-minutes-ietf-108.txt"
)
data[utc_from_la_time(datetime.datetime(2020, 10, 9, 7, 0))] = CoordTuple(
109, "2020/bof-minutes-ietf-109.txt"
)
data[utc_from_la_time(datetime.datetime(2021, 1, 14, 13, 30))] = CoordTuple(
110, "2021/bof-minutes-ietf-110.txt"
)
data[utc_from_la_time(datetime.datetime(2021, 6, 1, 8, 0))] = CoordTuple(
111, "2021/bof-minutes-ietf-111.txt"
)
data[utc_from_la_time(datetime.datetime(2021, 9, 15, 9, 0))] = CoordTuple(
112, "2021/bof-minutes-ietf-112.txt"
)
data[utc_from_la_time(datetime.datetime(2022, 1, 28, 7, 0))] = CoordTuple(
113, "2022/bof-minutes-ietf-113.txt"
)
data[utc_from_la_time(datetime.datetime(2022, 6, 2, 10, 0))] = CoordTuple(
114, "2022/bof-minutes-ietf-114.txt"
)
data[utc_from_la_time(datetime.datetime(2022, 9, 13, 9, 0))] = CoordTuple(
115, "2022/bof-minutes-ietf-115.txt"
)
data[utc_from_la_time(datetime.datetime(2023, 2, 1, 9, 0))] = CoordTuple(
116, "2023/bof-minutes-ietf-116.txt"
)
data[utc_from_la_time(datetime.datetime(2023, 6, 1, 7, 0))] = CoordTuple(
117, "2023/bof-minutes-ietf-117.txt"
)
data[utc_from_la_time(datetime.datetime(2023, 9, 15, 8, 0))] = CoordTuple(
118, "2023/bof-minutes-ietf-118.txt"
)
return data
class Command(BaseCommand):
help = "Performs a one-time import of IESG minutes, creating Meetings to attach them to"
def handle(self, *args, **options):
old_minutes_root = (
"/a/www/www6/iesg/minutes"
if settings.SERVER_MODE == "production"
else "/assets/www6/iesg/minutes"
)
minutes_dir = Path(old_minutes_root)
date_re = re.compile(r"\d{4}-\d{2}-\d{2}")
meeting_times = set()
redirects = []
for file_prefix in ["minutes", "narrative"]:
paths = list(minutes_dir.glob(f"[12][09][0129][0-9]/{file_prefix}*.txt"))
paths.extend(
list(minutes_dir.glob(f"[12][09][0129][0-9]/{file_prefix}*.html"))
)
for path in paths:
s = date_re.search(path.name)
if s:
meeting_times.add(
add_time_of_day(
datetime.datetime.strptime(s.group(), "%Y-%m-%d")
)
)
bof_coord_data = build_bof_coord_data()
bof_times = set(bof_coord_data.keys())
assert len(bof_times.intersection(meeting_times)) == 0
meeting_times.update(bof_times)
year_seen = None
for dt in sorted(meeting_times):
if dt.year != year_seen:
counter = 1
year_seen = dt.year
meeting_name = f"interim-{dt.year}-iesg-{counter:02d}"
meeting = Meeting.objects.create(
number=meeting_name,
type_id="interim",
date=dt.date(),
days=1,
time_zone=dt.tzname(),
)
schedule = Schedule.objects.create(
meeting=meeting,
owner_id=1, # the "(System)" person
visible=True,
public=True,
)
meeting.schedule = schedule
meeting.save()
session = Session.objects.create(
meeting=meeting,
group_id=2, # The IESG group
type_id="regular",
purpose_id="regular",
name=(
f"IETF {bof_coord_data[dt].meeting_number} BOF Coordination Call"
if dt in bof_times
else "Formal Telechat"
),
)
SchedulingEvent.objects.create(
session=session,
status_id="sched",
by_id=1, # (System)
)
timeslot = TimeSlot.objects.create(
meeting=meeting,
type_id="regular",
time=dt,
duration=datetime.timedelta(seconds=2 * 60 * 60),
)
SchedTimeSessAssignment.objects.create(
timeslot=timeslot, session=session, schedule=schedule
)
if dt in bof_times:
source = minutes_dir / bof_coord_data[dt].source_name
if source.exists():
doc_name = (
f"minutes-interim-{dt.year}-iesg-{counter:02d}-{dt:%Y%m%d%H%M}"
)
doc_filename = f"{doc_name}-00.txt"
doc = Document.objects.create(
name=doc_name,
type_id="minutes",
title=f"Minutes IETF {bof_coord_data[dt].meeting_number} BOF coordination {meeting_name} {dt:%Y-%m-%d %H:%M}",
group_id=2, # the IESG group
rev="00",
uploaded_filename=doc_filename,
)
e = DocEvent.objects.create(
type="comment",
doc=doc,
rev="00",
by_id=1, # "(System)"
desc="Minutes moved into datatracker",
)
doc.save_with_history([e])
session.presentations.create(document=doc, rev=doc.rev)
dest = (
Path(settings.AGENDA_PATH)
/ meeting_name
/ "minutes"
/ doc_filename
)
if dest.exists():
self.stdout.write(
f"WARNING: {dest} already exists - not overwriting it."
)
else:
os.makedirs(dest.parent, exist_ok=True)
shutil.copy(source, dest)
redirects.append(
[
f"www6.ietf.org/iesg/minutes/{dt.year}/{bof_coord_data[dt].source_name}",
f"https://datatracker.ietf.org/doc/{doc_name}",
302,
]
)
else:
for type_id in ["minutes", "narrativeminutes"]:
source_file_prefix = (
"minutes" if type_id == "minutes" else "narrative-minutes"
)
txt_source = (
minutes_dir
/ f"{dt.year}"
/ f"{source_file_prefix}-{dt:%Y-%m-%d}.txt"
)
html_source = (
minutes_dir
/ f"{dt.year}"
/ f"{source_file_prefix}-{dt:%Y-%m-%d}.html"
)
if txt_source.exists() and html_source.exists():
self.stdout.write(
f"WARNING: Both {txt_source} and {html_source} exist."
)
if txt_source.exists() or html_source.exists():
prefix = DocTypeName.objects.get(slug=type_id).prefix
doc_name = f"{prefix}-interim-{dt.year}-iesg-{counter:02d}-{dt:%Y%m%d%H%M}"
suffix = "html" if html_source.exists() else "txt"
doc_filename = f"{doc_name}-00.{suffix}"
verbose_type = (
"Minutes" if type_id == "minutes" else "Narrative Minutes"
)
doc = Document.objects.create(
name=doc_name,
type_id=type_id,
title=f"{verbose_type} {meeting_name} {dt:%Y-%m-%d %H:%M}",
group_id=2, # the IESG group
rev="00",
uploaded_filename=doc_filename,
)
e = DocEvent.objects.create(
type="comment",
doc=doc,
rev="00",
by_id=1, # "(System)"
desc=f"{verbose_type} moved into datatracker",
)
doc.save_with_history([e])
session.presentations.create(document=doc, rev=doc.rev)
dest = (
Path(settings.AGENDA_PATH)
/ meeting_name
/ type_id
/ doc_filename
)
if dest.exists():
self.stdout.write(
f"WARNING: {dest} already exists - not overwriting it."
)
else:
os.makedirs(dest.parent, exist_ok=True)
if html_source.exists():
html_content = html_source.read_text(encoding="utf-8")
html_content = html_content.replace(
f'href="IESGnarrative-{dt:%Y-%m-%d}.html#',
'href="#',
)
html_content = re.sub(
r'<a href="file:///[^"]*"><span[^>]*>([^<]*)</span></a>',
r"\1",
html_content,
)
html_content = re.sub(
r'<a href="file:///[^"]*">([^<]*)</a>',
r"\1",
html_content,
)
html_content = re.sub(
'<a href="http://validator.w3.org/[^>]*> *<img[^>]*></a>',
"",
html_content,
)
dest.write_text(html_content, encoding="utf-8")
else:
shutil.copy(txt_source, dest)
redirects.append(
[
f"www6.ietf.org/iesg/minutes/{dt.year}/{txt_source.name if txt_source.exists() else html_source.name}",
f"https://datatracker.ietf.org/doc/{doc_name}",
302,
]
)
counter += 1
with open("iesg_minutes_redirects.csv", "w", newline="") as f:
csvwriter = csv.writer(f)
for row in redirects:
csvwriter.writerow(row)