chore: checkpoint: docalias mostly removed

This commit is contained in:
Robert Sparks 2023-08-15 16:49:17 -05:00
parent 4946430159
commit 383899c5c4
No known key found for this signature in database
GPG key ID: 6E2A6A5775F91318
24 changed files with 80 additions and 297 deletions

View file

@ -932,7 +932,7 @@ def make_rev_history(doc):
if predecessors is None: if predecessors is None:
predecessors = [] predecessors = []
if hasattr(doc, 'relateddocument_set'): if hasattr(doc, 'relateddocument_set'):
for document in in doc.related_that_doc('replaces'): for document in doc.related_that_doc('replaces'):
if document not in predecessors: if document not in predecessors:
predecessors.append(document) predecessors.append(document)
predecessors.extend(get_predecessors(document, predecessors)) predecessors.extend(get_predecessors(document, predecessors))

View file

@ -38,7 +38,8 @@ from ietf.message.models import Message
from ietf.message.utils import infer_message from ietf.message.utils import infer_message
from ietf.name.models import IprLicenseTypeName from ietf.name.models import IprLicenseTypeName
from ietf.person.models import Person from ietf.person.models import Person
from ietf.secr.utils.document import get_rfc_num, is_draft from ietf.secr.utils.document import is_draft
from ietf.utils import log
from ietf.utils.draft_search import normalize_draftname from ietf.utils.draft_search import normalize_draftname
from ietf.utils.mail import send_mail, send_mail_message from ietf.utils.mail import send_mail, send_mail_message
from ietf.utils.response import permission_denied from ietf.utils.response import permission_denied
@ -71,10 +72,13 @@ def get_document_emails(ipr):
for rel in ipr.iprdocrel_set.all(): for rel in ipr.iprdocrel_set.all():
doc = rel.document.document doc = rel.document.document
if is_draft(doc): if doc.type_id=="draft":
doc_info = 'Internet-Draft entitled "{}" ({})'.format(doc.title,doc.name) doc_info = 'Internet-Draft entitled "{}" ({})'.format(doc.title,doc.name)
elif doc.type_id=="rfc":
doc_info = 'RFC entitled "{}" (RFC{})'.format(doc.title, doc.rfc_number)
else: else:
doc_info = 'RFC entitled "{}" (RFC{})'.format(doc.title,get_rfc_num(doc)) log.unreachable("2023-08-15")
return ""
addrs = gather_address_lists('ipr_posted_on_doc',doc=doc).as_strings(compact=False) addrs = gather_address_lists('ipr_posted_on_doc',doc=doc).as_strings(compact=False)

View file

@ -287,8 +287,6 @@ class AssignmentOrderResolver:
def _collect_context(self): def _collect_context(self):
"""Collect all relevant data about this team, document and review request.""" """Collect all relevant data about this team, document and review request."""
self.doc_aliases = DocAlias.objects.filter(docs=self.doc).values_list("name", flat=True)
# This data is collected as a dict, keys being person IDs, values being numbers/objects. # This data is collected as a dict, keys being person IDs, values being numbers/objects.
self.rotation_index = {p.pk: i for i, p in enumerate(self.rotation_list)} self.rotation_index = {p.pk: i for i, p in enumerate(self.rotation_list)}
self.reviewer_settings = self._reviewer_settings_for_person_ids(self.possible_person_ids) self.reviewer_settings = self._reviewer_settings_for_person_ids(self.possible_person_ids)
@ -354,8 +352,7 @@ class AssignmentOrderResolver:
add_boolean_score(+1, email.person_id in self.wish_to_review, "wishes to review document") add_boolean_score(+1, email.person_id in self.wish_to_review, "wishes to review document")
add_boolean_score(-1, email.person_id in self.connections, add_boolean_score(-1, email.person_id in self.connections,
self.connections.get(email.person_id)) # reviewer is somehow connected: bad self.connections.get(email.person_id)) # reviewer is somehow connected: bad
add_boolean_score(-1, settings.filter_re and any( add_boolean_score(-1, settings.filter_re and re.search(settings.filter_re, self.doc.name), "filter regexp matches")
re.search(settings.filter_re, n) for n in self.doc_aliases), "filter regexp matches")
# minimum interval between reviews # minimum interval between reviews
days_needed = self.days_needed_for_reviewers.get(email.person_id, 0) days_needed = self.days_needed_for_reviewers.get(email.person_id, 0)

View file

@ -175,7 +175,7 @@ def doc_detail(request, date, name):
This view displays the ballot information for the document, and lets the user make This view displays the ballot information for the document, and lets the user make
changes to ballot positions and document state. changes to ballot positions and document state.
''' '''
doc = get_object_or_404(Document, docalias__name=name) doc = get_object_or_404(Document, name=name)
if not is_doc_on_telechat(doc, date): if not is_doc_on_telechat(doc, date):
messages.warning(request, 'Dcoument: {name} is not on the Telechat agenda for {date}'.format( messages.warning(request, 'Dcoument: {name} is not on the Telechat agenda for {date}'.format(
name=doc.name, name=doc.name,
@ -342,7 +342,7 @@ def doc_navigate(request, date, name, nav):
nav - [next|previous] which direction the user wants to navigate in the list of docs nav - [next|previous] which direction the user wants to navigate in the list of docs
The view retrieves the appropriate document and redirects to the doc view. The view retrieves the appropriate document and redirects to the doc view.
''' '''
doc = get_object_or_404(Document, docalias__name=name) doc = get_object_or_404(Document, name=name)
agenda = agenda_data(date=date) agenda = agenda_data(date=date)
target = name target = name

View file

@ -13,15 +13,6 @@ def get_full_path(doc):
return None return None
return os.path.join(doc.get_file_path(), doc.uploaded_filename) return os.path.join(doc.get_file_path(), doc.uploaded_filename)
def get_rfc_num(doc):
qs = doc.docalias.filter(name__startswith='rfc')
return qs[0].name[3:] if qs else None
def is_draft(doc):
if doc.docalias.filter(name__startswith='rfc'):
return False
else:
return True
def get_start_date(doc): def get_start_date(doc):
''' '''

View file

@ -600,7 +600,6 @@ TEST_CODE_COVERAGE_EXCLUDE_FILES = [
"ietf/utils/test_runner.py", "ietf/utils/test_runner.py",
"ietf/name/generate_fixtures.py", "ietf/name/generate_fixtures.py",
"ietf/review/import_from_review_tool.py", "ietf/review/import_from_review_tool.py",
"ietf/stats/backfill_data.py",
"ietf/utils/patch.py", "ietf/utils/patch.py",
"ietf/utils/test_data.py", "ietf/utils/test_data.py",
] ]

View file

@ -1,184 +0,0 @@
#!/usr/bin/env python
# Copyright The IETF Trust 2017-2020, All Rights Reserved
# -*- coding: utf-8 -*-
import io
import sys
import os
import os.path
import argparse
import time
from typing import Set, Optional # pyflakes:ignore
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
sys.path = [ basedir ] + sys.path
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
if os.path.exists(virtualenv_activation):
exec(compile(io.open(virtualenv_activation, "rb").read(), virtualenv_activation, 'exec'), dict(__file__=virtualenv_activation))
import django
django.setup()
from django.conf import settings
import debug # pyflakes:ignore
from ietf.doc.models import Document
from ietf.name.models import FormalLanguageName
from ietf.utils.draft import PlaintextDraft
parser = argparse.ArgumentParser()
parser.add_argument("--document", help="specific document name")
parser.add_argument("--words", action="store_true", help="fill in word count")
parser.add_argument("--formlang", action="store_true", help="fill in formal languages")
parser.add_argument("--authors", action="store_true", help="fill in author info")
args = parser.parse_args()
formal_language_dict = { l.pk: l for l in FormalLanguageName.objects.all() }
docs_qs = Document.objects.filter(type="draft")
if args.document:
docs_qs = docs_qs.filter(docalias__name=args.document)
ts = time.strftime("%Y-%m-%d_%H:%M%z")
logfile = io.open('backfill-authorstats-%s.log'%ts, 'w')
print("Writing log to %s" % os.path.abspath(logfile.name))
def say(msg):
msg = msg.encode('utf8')
sys.stderr.write(msg)
sys.stderr.write('\n')
logfile.write(msg)
logfile.write('\n')
def unicode(text):
if text is None:
return text
# order matters here:
for encoding in ['ascii', 'utf8', 'latin1', ]:
try:
utext = text.decode(encoding)
# if encoding == 'latin1':
# say("Warning: falling back to latin1 decoding for %s ..." % utext[:216]])
return utext
except UnicodeDecodeError:
pass
start = time.time()
say("Running query for documents to process ...")
for doc in docs_qs.prefetch_related("docalias", "formal_languages", "documentauthor_set", "documentauthor_set__person", "documentauthor_set__person__alias_set"):
canonical_name = doc.name
for n in doc.docalias.all():
if n.name.startswith("rfc"):
canonical_name = n.name
if canonical_name.startswith("rfc"):
path = os.path.join(settings.RFC_PATH, canonical_name + ".txt")
else:
path = os.path.join(settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR, canonical_name + "-" + doc.rev + ".txt")
if not os.path.exists(path):
say("Skipping %s, no txt file found at %s" % (doc.name, path))
continue
with io.open(path, 'rb') as f:
say("\nProcessing %s" % doc.name)
sys.stdout.flush()
d = PlaintextDraft(unicode(f.read()), path)
updated = False
updates = {}
if args.words:
words = d.get_wordcount()
if words != doc.words:
updates["words"] = words
if args.formlang:
langs = d.get_formal_languages()
new_formal_languages = set(formal_language_dict[l] for l in langs)
old_formal_languages = set(doc.formal_languages.all())
if new_formal_languages != old_formal_languages:
for l in new_formal_languages - old_formal_languages:
doc.formal_languages.add(l)
updated = True
for l in old_formal_languages - new_formal_languages:
doc.formal_languages.remove(l)
updated = True
if args.authors:
old_authors = doc.documentauthor_set.all()
old_authors_by_name = {}
old_authors_by_email = {}
for author in old_authors:
for alias in author.person.alias_set.all():
old_authors_by_name[alias.name] = author
old_authors_by_name[author.person.plain_name()] = author
if author.email_id:
old_authors_by_email[author.email_id] = author
# the draft parser sometimes has a problem when
# affiliation isn't in the second line and it then thinks
# it's an extra author - skip those extra authors
seen = set() # type: Set[Optional[str]]
for full, _, _, _, _, email, country, company in d.get_author_list():
assert full is None or isinstance(full, str)
assert email is None or isinstance(email, str)
assert country is None or isinstance(country, str)
assert isinstance(company, str)
#full, email, country, company = [ unicode(s) for s in [full, email, country, company, ] ]
if email in seen:
continue
seen.add(email)
old_author = None
if email:
old_author = old_authors_by_email.get(email)
if not old_author:
old_author = old_authors_by_name.get(full)
if not old_author:
say("UNKNOWN AUTHOR: %s, %s, %s, %s, %s" % (doc.name, full, email, country, company))
continue
if old_author.affiliation != company:
say("new affiliation: %s [ %s <%s> ] %s -> %s" % (canonical_name, full, email, old_author.affiliation, company))
old_author.affiliation = company
old_author.save(update_fields=["affiliation"])
updated = True
if country is None:
country = ""
if old_author.country != country:
say("new country: %s [ %s <%s> ] %s -> %s" % (canonical_name , full, email, old_author.country, country))
old_author.country = country
old_author.save(update_fields=["country"])
updated = True
if updates:
Document.objects.filter(pk=doc.pk).update(**updates)
updated = True
if updated:
say("updated: %s" % canonical_name)
stop = time.time()
dur = stop-start
sec = dur%60
min = dur//60
say("Processing time %d:%02d" % (min, sec))
print("\n\nWrote log to %s" % os.path.abspath(logfile.name))
logfile.close()

View file

@ -11,6 +11,10 @@ from ietf.name.models import CountryName
from ietf.person.models import Person from ietf.person.models import Person
from ietf.utils.models import ForeignKey from ietf.utils.models import ForeignKey
### NOTE WELL: These models are expected to be removed and the stats app reimplemented.
# A bare python file that should have been a management command was used to populate
# these models when the app was first installed - it has been removed from main, but
# can be seen at https://github.com/ietf-tools/datatracker/blob/f2b716fc052a0152c32b86b428ba6ebfdcdf5cd2/ietf/stats/backfill_data.py
class AffiliationAlias(models.Model): class AffiliationAlias(models.Model):
"""Records that alias should be treated as name for statistical """Records that alias should be treated as name for statistical

View file

@ -20,7 +20,7 @@ import ietf.stats.views
from ietf.submit.models import Submission from ietf.submit.models import Submission
from ietf.doc.factories import WgDraftFactory, WgRfcFactory from ietf.doc.factories import WgDraftFactory, WgRfcFactory
from ietf.doc.models import Document, DocAlias, State, RelatedDocument, NewRevisionDocEvent, DocumentAuthor from ietf.doc.models import Document, State, RelatedDocument, NewRevisionDocEvent, DocumentAuthor
from ietf.group.factories import RoleFactory from ietf.group.factories import RoleFactory
from ietf.meeting.factories import MeetingFactory, AttendedFactory from ietf.meeting.factories import MeetingFactory, AttendedFactory
from ietf.person.factories import PersonFactory from ietf.person.factories import PersonFactory
@ -79,7 +79,6 @@ class StatisticsTests(TestCase):
words=100 words=100
) )
referencing_draft.set_state(State.objects.get(used=True, type="draft", slug="active")) referencing_draft.set_state(State.objects.get(used=True, type="draft", slug="active"))
DocAlias.objects.create(name=referencing_draft.name).docs.add(referencing_draft)
RelatedDocument.objects.create( RelatedDocument.objects.create(
source=referencing_draft, source=referencing_draft,
target=draft, target=draft,

View file

@ -34,7 +34,7 @@ from ietf.group.models import Role, Group
from ietf.person.models import Person from ietf.person.models import Person
from ietf.name.models import ReviewResultName, CountryName, DocRelationshipName, ReviewAssignmentStateName from ietf.name.models import ReviewResultName, CountryName, DocRelationshipName, ReviewAssignmentStateName
from ietf.person.name import plain_name from ietf.person.name import plain_name
from ietf.doc.models import DocAlias, Document, State, DocEvent from ietf.doc.models import Document, State, DocEvent
from ietf.meeting.models import Meeting from ietf.meeting.models import Meeting
from ietf.stats.models import MeetingRegistration, CountryAlias from ietf.stats.models import MeetingRegistration, CountryAlias
from ietf.stats.utils import get_aliased_affiliations, get_aliased_countries, compute_hirsch_index from ietf.stats.utils import get_aliased_affiliations, get_aliased_countries, compute_hirsch_index
@ -214,13 +214,13 @@ def document_stats(request, stats_type=None):
if any(stats_type == t[0] for t in possible_document_stats_types): if any(stats_type == t[0] for t in possible_document_stats_types):
# filter documents # filter documents
docalias_filters = Q(docs__type="draft") document_filters = Q(docs__type="draft")
rfc_state = State.objects.get(type="draft", slug="rfc") rfc_state = State.objects.get(type="draft", slug="rfc")
if document_type == "rfc": if document_type == "rfc":
docalias_filters &= Q(docs__states=rfc_state) document_filters &= Q(docs__states=rfc_state)
elif document_type == "draft": elif document_type == "draft":
docalias_filters &= ~Q(docs__states=rfc_state) document_filters &= ~Q(docs__states=rfc_state)
if from_time: if from_time:
# this is actually faster than joining in the database, # this is actually faster than joining in the database,
@ -231,9 +231,9 @@ def document_stats(request, stats_type=None):
docevent__type__in=["published_rfc", "new_revision"], docevent__type__in=["published_rfc", "new_revision"],
).values_list("pk")) ).values_list("pk"))
docalias_filters &= Q(docs__in=docs_within_time_constraint) document_filters &= Q(docs__in=docs_within_time_constraint)
docalias_qs = DocAlias.objects.filter(docalias_filters) document_qs = Document.objects.filter(document_filters)
if document_type == "rfc": if document_type == "rfc":
doc_label = "RFC" doc_label = "RFC"
@ -242,28 +242,15 @@ def document_stats(request, stats_type=None):
else: else:
doc_label = "document" doc_label = "document"
total_docs = docalias_qs.values_list("docs__name").distinct().count() total_docs = document_qs.values_list("name").distinct().count()
def generate_canonical_names(values):
for doc_id, ts in itertools.groupby(values.order_by("docs__name"), lambda a: a[0]):
chosen = None
for t in ts:
if chosen is None:
chosen = t
else:
if t[1].startswith("rfc"):
chosen = t
elif t[1].startswith("draft") and not chosen[1].startswith("rfc"):
chosen = t
yield chosen
if stats_type == "authors": if stats_type == "authors":
stats_title = "Number of authors for each {}".format(doc_label) stats_title = "Number of authors for each {}".format(doc_label)
bins = defaultdict(set) bins = defaultdict(set)
for name, canonical_name, author_count in generate_canonical_names(docalias_qs.values_list("docs__name", "name").annotate(Count("docs__documentauthor"))): for name, author_count in document_qs.values_list("name").annotate(Count("documentauthor")).values_list("name","documentauthor__count")
bins[author_count or 0].add(canonical_name) bins[author_count or 0].add(name)
series_data = [] series_data = []
for author_count, names in sorted(bins.items(), key=lambda t: t[0]): for author_count, names in sorted(bins.items(), key=lambda t: t[0]):
@ -278,8 +265,8 @@ def document_stats(request, stats_type=None):
bins = defaultdict(set) bins = defaultdict(set)
for name, canonical_name, pages in generate_canonical_names(docalias_qs.values_list("docs__name", "name", "docs__pages")): for name, pages in document_qs.values_list("name", "pages")):
bins[pages or 0].add(canonical_name) bins[pages or 0].add(name)
series_data = [] series_data = []
for pages, names in sorted(bins.items(), key=lambda t: t[0]): for pages, names in sorted(bins.items(), key=lambda t: t[0]):
@ -297,7 +284,7 @@ def document_stats(request, stats_type=None):
bins = defaultdict(set) bins = defaultdict(set)
for name, canonical_name, words in generate_canonical_names(docalias_qs.values_list("docs__name", "name", "docs__words")): for name, words in document_qs.values_list("name", "words")):
bins[put_into_bin(words, bin_size)].add(canonical_name) bins[put_into_bin(words, bin_size)].add(canonical_name)
series_data = [] series_data = []
@ -322,7 +309,7 @@ def document_stats(request, stats_type=None):
submission_types[doc_name] = file_types submission_types[doc_name] = file_types
doc_names_with_missing_types = {} doc_names_with_missing_types = {}
for doc_name, canonical_name, rev in generate_canonical_names(docalias_qs.values_list("docs__name", "name", "docs__rev")): for doc_name, rev in document_qs.values_list("name", "rev"):
types = submission_types.get(doc_name) types = submission_types.get(doc_name)
if types: if types:
for dot_ext in types.split(","): for dot_ext in types.split(","):
@ -367,7 +354,7 @@ def document_stats(request, stats_type=None):
bins = defaultdict(set) bins = defaultdict(set)
for name, canonical_name, formal_language_name in generate_canonical_names(docalias_qs.values_list("docs__name", "name", "docs__formal_languages__name")): for name, formal_language_name in document_qs.values_list("name", "formal_languages__name")):
bins[formal_language_name or ""].add(canonical_name) bins[formal_language_name or ""].add(canonical_name)
series_data = [] series_data = []

View file

@ -108,14 +108,14 @@ class Submission(models.Model):
@property @property
def active_wg_drafts_replaced(self): def active_wg_drafts_replaced(self):
return Document.objects.filter( return Document.objects.filter(
docalias__name__in=self.replaces.split(','), name__in=self.replaces.split(','),
group__in=Group.objects.active_wgs() group__in=Group.objects.active_wgs()
) )
@property @property
def closed_wg_drafts_replaced(self): def closed_wg_drafts_replaced(self):
return Document.objects.filter( return Document.objects.filter(
docalias__name__in=self.replaces.split(','), name__in=self.replaces.split(','),
group__in=Group.objects.closed_wgs() group__in=Group.objects.closed_wgs()
) )

View file

@ -34,7 +34,7 @@ from ietf.submit.utils import (expirable_submissions, expire_submission, find_su
process_and_validate_submission) process_and_validate_submission)
from ietf.doc.factories import (DocumentFactory, WgDraftFactory, IndividualDraftFactory, from ietf.doc.factories import (DocumentFactory, WgDraftFactory, IndividualDraftFactory,
ReviewFactory, WgRfcFactory) ReviewFactory, WgRfcFactory)
from ietf.doc.models import ( Document, DocAlias, DocEvent, State, from ietf.doc.models import ( Document, DocEvent, State,
BallotPositionDocEvent, DocumentAuthor, SubmissionDocEvent ) BallotPositionDocEvent, DocumentAuthor, SubmissionDocEvent )
from ietf.doc.utils import create_ballot_if_not_open, can_edit_docextresources, update_action_holders from ietf.doc.utils import create_ballot_if_not_open, can_edit_docextresources, update_action_holders
from ietf.group.factories import GroupFactory, RoleFactory from ietf.group.factories import GroupFactory, RoleFactory
@ -358,8 +358,6 @@ class SubmitTests(BaseSubmitTestCase):
note="", note="",
) )
sug_replaced_draft.set_state(State.objects.get(used=True, type="draft", slug="active")) sug_replaced_draft.set_state(State.objects.get(used=True, type="draft", slug="active"))
sug_replaced_alias = DocAlias.objects.create(name=sug_replaced_draft.name)
sug_replaced_alias.docs.add(sug_replaced_draft)
name = "draft-ietf-mars-testing-tests" name = "draft-ietf-mars-testing-tests"
rev = "00" rev = "00"
@ -400,7 +398,7 @@ class SubmitTests(BaseSubmitTestCase):
r = self.client.post(status_url, dict(action=action)) r = self.client.post(status_url, dict(action=action))
self.assertEqual(r.status_code, 302) self.assertEqual(r.status_code, 302)
draft = Document.objects.get(docalias__name=name) draft = Document.objects.get(name=name)
self.assertEqual(draft.rev, rev) self.assertEqual(draft.rev, rev)
new_revision = draft.latest_event(type="new_revision") new_revision = draft.latest_event(type="new_revision")
self.assertEqual(draft.group.acronym, "mars") self.assertEqual(draft.group.acronym, "mars")
@ -420,7 +418,7 @@ class SubmitTests(BaseSubmitTestCase):
self.assertEqual(draft.relations_that_doc("replaces").count(), 1) self.assertEqual(draft.relations_that_doc("replaces").count(), 1)
self.assertTrue(draft.relations_that_doc("replaces").first().target, draft) self.assertTrue(draft.relations_that_doc("replaces").first().target, draft)
self.assertEqual(draft.relations_that_doc("possibly-replaces").count(), 1) self.assertEqual(draft.relations_that_doc("possibly-replaces").count(), 1)
self.assertTrue(draft.relations_that_doc("possibly-replaces").first().target, sug_replaced_alias) self.assertTrue(draft.relations_that_doc("possibly-replaces").first().target, sug_replaced_draft)
self.assertEqual(len(outbox), mailbox_before + 5) self.assertEqual(len(outbox), mailbox_before + 5)
self.assertIn(("I-D Action: %s" % name), outbox[-4]["Subject"]) self.assertIn(("I-D Action: %s" % name), outbox[-4]["Subject"])
self.assertIn(author.ascii, get_payload_text(outbox[-4])) self.assertIn(author.ascii, get_payload_text(outbox[-4]))
@ -433,7 +431,7 @@ class SubmitTests(BaseSubmitTestCase):
# Check "Review of suggested possible replacements for..." mail # Check "Review of suggested possible replacements for..." mail
self.assertIn("review", outbox[-1]["Subject"].lower()) self.assertIn("review", outbox[-1]["Subject"].lower())
self.assertIn(name, get_payload_text(outbox[-1])) self.assertIn(name, get_payload_text(outbox[-1]))
self.assertIn(sug_replaced_alias.name, get_payload_text(outbox[-1])) self.assertIn(sug_replaced_draft.name, get_payload_text(outbox[-1]))
self.assertIn("ames-chairs@", outbox[-1]["To"].lower()) self.assertIn("ames-chairs@", outbox[-1]["To"].lower())
self.assertIn("mars-chairs@", outbox[-1]["To"].lower()) self.assertIn("mars-chairs@", outbox[-1]["To"].lower())
# Check submission settings # Check submission settings
@ -684,7 +682,7 @@ class SubmitTests(BaseSubmitTestCase):
self.assertTrue('New version approved' in edescs) self.assertTrue('New version approved' in edescs)
self.assertTrue('Uploaded new revision' in edescs) self.assertTrue('Uploaded new revision' in edescs)
draft = Document.objects.get(docalias__name=name) draft = Document.objects.get(name=name)
self.assertEqual(draft.rev, rev) self.assertEqual(draft.rev, rev)
self.assertEqual(draft.group.acronym, name.split("-")[2]) self.assertEqual(draft.group.acronym, name.split("-")[2])
# #
@ -911,7 +909,7 @@ class SubmitTests(BaseSubmitTestCase):
r = self.client.post(confirmation_url, {'action':'confirm'}) r = self.client.post(confirmation_url, {'action':'confirm'})
self.assertEqual(r.status_code, 302) self.assertEqual(r.status_code, 302)
draft = Document.objects.get(docalias__name=name) draft = Document.objects.get(name=name)
self.assertEqual(draft.rev, rev) self.assertEqual(draft.rev, rev)
new_revision = draft.latest_event() new_revision = draft.latest_event()
self.assertEqual(new_revision.type, "new_revision") self.assertEqual(new_revision.type, "new_revision")
@ -951,7 +949,7 @@ class SubmitTests(BaseSubmitTestCase):
action = force_post_button.parents("form").find('input[type=hidden][name="action"]').val() action = force_post_button.parents("form").find('input[type=hidden][name="action"]').val()
r = self.client.post(status_url, dict(action=action)) r = self.client.post(status_url, dict(action=action))
doc = Document.objects.get(docalias__name=name) doc = Document.objects.get(name=name)
self.assertEqual(doc.documentauthor_set.count(), 1) self.assertEqual(doc.documentauthor_set.count(), 1)
docauth = doc.documentauthor_set.first() docauth = doc.documentauthor_set.first()
self.assertEqual(docauth.person, author) self.assertEqual(docauth.person, author)
@ -1084,7 +1082,7 @@ class SubmitTests(BaseSubmitTestCase):
self.assertIn("New Version Notification", notification_email["Subject"]) self.assertIn("New Version Notification", notification_email["Subject"])
self.assertIn(author.email().address.lower(), notification_email["To"]) self.assertIn(author.email().address.lower(), notification_email["To"])
draft = Document.objects.get(docalias__name=name) draft = Document.objects.get(name=name)
self.assertEqual(draft.rev, rev) self.assertEqual(draft.rev, rev)
self.assertEqual(draft.docextresource_set.count(), 0) self.assertEqual(draft.docextresource_set.count(), 0)
new_revision = draft.latest_event() new_revision = draft.latest_event()
@ -1132,7 +1130,7 @@ class SubmitTests(BaseSubmitTestCase):
self._assert_extresources_form_not_present(r) self._assert_extresources_form_not_present(r)
# Check that the draft itself got the resources # Check that the draft itself got the resources
draft = Document.objects.get(docalias__name=name) draft = Document.objects.get(name=name)
self.assertCountEqual( self.assertCountEqual(
[str(r) for r in draft.docextresource_set.all()], [str(r) for r in draft.docextresource_set.all()],
[str(r) for r in resources], [str(r) for r in resources],
@ -1178,7 +1176,7 @@ class SubmitTests(BaseSubmitTestCase):
r = self.client.post(confirmation_url, {'action':'confirm'}) r = self.client.post(confirmation_url, {'action':'confirm'})
self.assertEqual(r.status_code, 302) self.assertEqual(r.status_code, 302)
self.assertEqual(len(outbox), mailbox_before+3) self.assertEqual(len(outbox), mailbox_before+3)
draft = Document.objects.get(docalias__name=name) draft = Document.objects.get(name=name)
self.assertEqual(draft.rev, rev) self.assertEqual(draft.rev, rev)
self.assertEqual(draft.relateddocument_set.filter(relationship_id='replaces').count(), replaces_count) self.assertEqual(draft.relateddocument_set.filter(relationship_id='replaces').count(), replaces_count)
self.assertEqual(draft.docextresource_set.count(), 0) self.assertEqual(draft.docextresource_set.count(), 0)
@ -1296,7 +1294,7 @@ class SubmitTests(BaseSubmitTestCase):
r = self.client.post(confirmation_url, {'action':'cancel'}) r = self.client.post(confirmation_url, {'action':'cancel'})
self.assertEqual(r.status_code, 302) self.assertEqual(r.status_code, 302)
self.assertEqual(len(outbox), mailbox_before) self.assertEqual(len(outbox), mailbox_before)
draft = Document.objects.get(docalias__name=name) draft = Document.objects.get(name=name)
self.assertEqual(draft.rev, old_rev) self.assertEqual(draft.rev, old_rev)
def test_submit_new_wg_with_dash(self): def test_submit_new_wg_with_dash(self):
@ -1453,7 +1451,7 @@ class SubmitTests(BaseSubmitTestCase):
r = self.client.post(status_url, dict(action=action)) r = self.client.post(status_url, dict(action=action))
self.assertEqual(r.status_code, 302) self.assertEqual(r.status_code, 302)
draft = Document.objects.get(docalias__name=name) draft = Document.objects.get(name=name)
self.assertEqual(draft.rev, rev) self.assertEqual(draft.rev, rev)
self.assertEqual(draft.docextresource_set.count(), 0) self.assertEqual(draft.docextresource_set.count(), 0)
self.verify_bibxml_ids_creation(draft) self.verify_bibxml_ids_creation(draft)

View file

@ -25,7 +25,7 @@ from django.utils import timezone
import debug # pyflakes:ignore import debug # pyflakes:ignore
from ietf.doc.models import ( Document, State, DocAlias, DocEvent, SubmissionDocEvent, from ietf.doc.models import ( Document, State, DocEvent, SubmissionDocEvent,
DocumentAuthor, AddedMessageEvent ) DocumentAuthor, AddedMessageEvent )
from ietf.doc.models import NewRevisionDocEvent from ietf.doc.models import NewRevisionDocEvent
from ietf.doc.models import RelatedDocument, DocRelationshipName, DocExtResource from ietf.doc.models import RelatedDocument, DocRelationshipName, DocExtResource
@ -375,10 +375,6 @@ def post_submission(request, submission, approved_doc_desc, approved_subm_desc):
events.append(e) events.append(e)
log.log(f"{submission.name}: created doc events") log.log(f"{submission.name}: created doc events")
# update related objects
alias, __ = DocAlias.objects.get_or_create(name=submission.name)
alias.docs.add(draft)
draft.set_state(State.objects.get(used=True, type="draft", slug="active")) draft.set_state(State.objects.get(used=True, type="draft", slug="active"))
update_authors(draft, submission) update_authors(draft, submission)
@ -1000,7 +996,7 @@ def accept_submission(submission: Submission, request: Optional[HttpRequest] = N
docevent_from_submission(submission, desc="Uploaded new revision", docevent_from_submission(submission, desc="Uploaded new revision",
who=requester if requester_is_author else None) who=requester if requester_is_author else None)
replaces = DocAlias.objects.filter(name__in=submission.replaces_names) replaces = Document.objects.filter(name__in=submission.replaces_names)
pretty_replaces = '(none)' if not replaces else ( pretty_replaces = '(none)' if not replaces else (
', '.join(prettify_std_name(r.name) for r in replaces) ', '.join(prettify_std_name(r.name) for r in replaces)
) )

View file

@ -22,7 +22,7 @@ from django.views.decorators.csrf import csrf_exempt
import debug # pyflakes:ignore import debug # pyflakes:ignore
from ietf.doc.models import Document, DocAlias, AddedMessageEvent from ietf.doc.models import Document, AddedMessageEvent
from ietf.doc.forms import ExtResourceForm from ietf.doc.forms import ExtResourceForm
from ietf.group.models import Group from ietf.group.models import Group
from ietf.group.utils import group_features_group_filter from ietf.group.utils import group_features_group_filter
@ -410,7 +410,7 @@ def submission_status(request, submission_id, access_token=None):
) )
submitter_form = SubmitterForm(initial=submission.submitter_parsed(), prefix="submitter") submitter_form = SubmitterForm(initial=submission.submitter_parsed(), prefix="submitter")
replaces_form = ReplacesForm(name=submission.name,initial=DocAlias.objects.filter(name__in=submission.replaces.split(","))) replaces_form = ReplacesForm(name=submission.name,initial=Document.objects.filter(name__in=submission.replaces.split(",")))
extresources_form = ExtResourceForm( extresources_form = ExtResourceForm(
initial=dict(resources=[er['res'] for er in external_resources]), initial=dict(resources=[er['res'] for er in external_resources]),
extresource_model=SubmissionExtResource, extresource_model=SubmissionExtResource,
@ -626,7 +626,7 @@ def edit_submission(request, submission_id, access_token=None):
else: else:
edit_form = EditSubmissionForm(instance=submission, prefix="edit") edit_form = EditSubmissionForm(instance=submission, prefix="edit")
submitter_form = SubmitterForm(initial=submission.submitter_parsed(), prefix="submitter") submitter_form = SubmitterForm(initial=submission.submitter_parsed(), prefix="submitter")
replaces_form = ReplacesForm(name=submission.name,initial=DocAlias.objects.filter(name__in=submission.replaces.split(","))) replaces_form = ReplacesForm(name=submission.name, initial=Document.objects.filter(name__in=submission.replaces.split(",")))
author_forms = [ AuthorForm(initial=author, prefix="authors-%s" % i) author_forms = [ AuthorForm(initial=author, prefix="authors-%s" % i)
for i, author in enumerate(submission.authors) ] for i, author in enumerate(submission.authors) ]

View file

@ -45,7 +45,7 @@ def update_rfc_log_from_protocol_page(rfc_names, rfc_must_published_later_than):
updated = [] updated = []
docs = Document.objects.filter(docalias__name__in=rfc_names).exclude( docs = Document.objects.filter(name__in=rfc_names).exclude(
docevent__type="rfc_in_iana_registry").filter( docevent__type="rfc_in_iana_registry").filter(
# only take those that were published after cutoff since we # only take those that were published after cutoff since we
# have a big bunch of old RFCs that we unfortunately don't have data for # have a big bunch of old RFCs that we unfortunately don't have data for
@ -189,7 +189,7 @@ def update_history_with_changes(changes, send_email=True):
state_type=state_type, state=state) state_type=state_type, state=state)
if not e: if not e:
try: try:
doc = Document.objects.get(docalias__name=docname) doc = Document.objects.get(name=docname)
except Document.DoesNotExist: except Document.DoesNotExist:
warnings.append("Document %s not found" % docname) warnings.append("Document %s not found" % docname)
continue continue

View file

@ -67,7 +67,7 @@
<label class="d-none d-md-block" aria-label="Document search"> <label class="d-none d-md-block" aria-label="Document search">
<input class="form-control select2-field search-select" <input class="form-control select2-field search-select"
id="navbar-doc-search" id="navbar-doc-search"
data-select2-ajax-url="{% url 'ietf.doc.views_search.ajax_select2_search_docs' model_name='docalias' doc_type='draft' %}" data-select2-ajax-url="{% url 'ietf.doc.views_search.ajax_select2_search_docs' model_name='document' doc_type='draft' %}"
type="text" type="text"
data-placeholder="Document search"> data-placeholder="Document search">
</label> </label>

View file

@ -23,10 +23,10 @@
<h2 class="my-3">Document history <h2 class="my-3">Document history
{% if related %} {% if related %}
<div class="float-end"> <div class="float-end">
{% for related_docalias in related %} {% for related_document in related %}
<a class="btn btn-outline-primary btn-sm" <a class="btn btn-outline-primary btn-sm"
href="{% url 'ietf.doc.views_doc.document_history' name=related_docalias.name %}"> href="{% url 'ietf.doc.views_doc.document_history' name=related_document.name %}">
Related history for {{ related_docalias.name }} Related history for {{ related_document.name }}
</a> </a>
{% endfor %} {% endfor %}
</div> </div>

View file

@ -39,7 +39,7 @@
id="new_relation_row_rfc" id="new_relation_row_rfc"
aria-label="Enter new affected RFC" aria-label="Enter new affected RFC"
class="form-control select2-field" class="form-control select2-field"
data-select2-ajax-url="{% url 'ietf.doc.views_search.ajax_select2_search_docs' model_name='docalias' doc_type='draft' %}" data-select2-ajax-url="{% url 'ietf.doc.views_search.ajax_select2_search_docs' model_name='document' doc_type='draft' %}"
data-result-key="text" data-result-key="text"
data-max-entries="1" data-max-entries="1"
data-width="resolve" data-width="resolve"

View file

@ -37,11 +37,11 @@ Goals and Milestones:
{% for milestone in group.milestones %} {% if milestone.resolved %}{{ milestone.resolved }} {% else %}{{ milestone.due|date:"M Y" }}{% endif %} - {{ milestone.desc }} {% for milestone in group.milestones %} {% if milestone.resolved %}{{ milestone.resolved }} {% else %}{{ milestone.due|date:"M Y" }}{% endif %} - {{ milestone.desc }}
{% endfor %} {% endfor %}
Internet-Drafts: Internet-Drafts:
{% for alias in group.drafts %} - {{ alias.document.title }} [{{ alias.name }}-{{ alias.document.rev }}] ({{ alias.document.pages }} pages) {% for document in group.drafts %} - {{ document.title }} [{{ document.name }}-{{ document.rev }}] ({{ document.pages }} pages)
{% endfor %} {% endfor %}
{% if group.rfcs %}Requests for Comments: {% if group.rfcs %}Requests for Comments:
{% for alias in group.rfcs %} {{ alias.name.upper }}: {{ alias.document.title}} ({{ alias.document.pages }} pages){% for r in alias.rel %} {% for document in group.rfcs %} {{ document.name.upper }}: {{ document.title}} ({{ document.pages }} pages){% for r in document.rel %}
* {{ r.action }} {{ r.target.name|upper }}{% endfor %}{% for r in alias.invrel %} * {{ r.action }} {{ r.target.name|upper }}{% endfor %}{% for r in document.invrel %}
* {% if r.relationsship == "obs" %}{{ r.inverse_action|upper }}{% else %}{{ r.action }}{% endif %} {{ r.source.canonical_name|upper }}{% endfor %} * {% if r.relationsship == "obs" %}{{ r.inverse_action|upper }}{% else %}{{ r.action }}{% endif %} {{ r.source.canonical_name|upper }}{% endfor %}
{% endfor %} {% endfor %}
{% else %}No Requests for Comments{% endif %} {% else %}No Requests for Comments{% endif %}

View file

@ -13,9 +13,9 @@
Please select one of following I-Ds: Please select one of following I-Ds:
</p> </p>
<ul> <ul>
{% for docalias in docs %} {% for doc in docs %}
<li> <li>
<a href="?submit=draft&amp;id={{ docalias.name }}">{{ docalias.name }}</a> <a href="?submit=draft&amp;id={{ doc.name }}">{{ doc.name }}</a>
</li> </li>
{% endfor %} {% endfor %}
</ul> </ul>

View file

@ -24,20 +24,20 @@
<th scope="col" data-sort="statement">Statement</th> <th scope="col" data-sort="statement">Statement</th>
</tr> </tr>
</thead> </thead>
{% for alias in docs %} {% for doc in docs %}
<tbody> <tbody>
<tr> <tr>
<th scope="col" class="table-info" colspan="3"> <th scope="col" class="table-info" colspan="3">
IPR that is related to {{ alias.name|prettystdname:""|urlize_ietf_docs }} ("{{ alias.document.title }}") IPR that is related to {{ doc.name|prettystdname:""|urlize_ietf_docs }} ("{{ doc.title }}")
{% if alias.related %} {% if doc.related %}
that was {{ alias.relation|lower }} {{ alias.related.source.name|prettystdname:""|urlize_ietf_docs }} ("{{ alias.related.source.title }}") that was {{ doc.relation|lower }} {{ doc.related.source.name|prettystdname:""|urlize_ietf_docs }} ("{{ doc.related.source.title }}")
{% endif %} {% endif %}
</th> </th>
</tr> </tr>
</tbody> </tbody>
<tbody> <tbody>
{% if alias.document.ipr %} {% if doc.ipr %}
{% for ipr in alias.document.ipr %} {% for ipr in doc.ipr %}
<tr> <tr>
<td>{{ ipr.disclosure.time|date:"Y-m-d" }}</td> <td>{{ ipr.disclosure.time|date:"Y-m-d" }}</td>
<td>{{ ipr.disclosure.id }}</td> <td>{{ ipr.disclosure.id }}</td>
@ -58,7 +58,7 @@
<tr> <tr>
<td></td> <td></td>
<td></td> <td></td>
<td>No IPR disclosures related to {{ alias.name|prettystdname|urlize_ietf_docs }} have been submitted.</td> <td>No IPR disclosures related to {{ doc.name|prettystdname|urlize_ietf_docs }} have been submitted.</td>
</tr> </tr>
{% endif %} {% endif %}
</tbody> </tbody>

View file

@ -20,22 +20,22 @@
<th scope="col" data-sort="statement">Statement</th> <th scope="col" data-sort="statement">Statement</th>
</tr> </tr>
</thead> </thead>
{% for alias in docs %} {% for doc in docs %}
<tbody> <tbody>
<tr class="table-info"> <tr class="table-info">
<th scope="col" colspan="3"> <th scope="col" colspan="3">
IPR related to {{ alias.name|prettystdname|urlize_ietf_docs }} ("{{ alias.document.title }}") IPR related to {{ doc.name|prettystdname|urlize_ietf_docs }} ("{{ doc.title }}")
{% if alias.related %} {% if doc.related %}
that was {{ alias.relation|lower }} {{ alias.related.source|prettystdname|urlize_ietf_docs }} ("{{ alias.related.source.title|escape }}") that was {{ doc.relation|lower }} {{ doc.related.source|prettystdname|urlize_ietf_docs }} ("{{ doc.related.source.title|escape }}")
{% endif %} {% endif %}
{% if alias.product_of_this_wg %}, a product of the {{ q }} WG{% endif %} {% if doc.product_of_this_wg %}, a product of the {{ q }} WG{% endif %}
: :
</th> </th>
</tr> </tr>
</tbody> </tbody>
<tbody> <tbody>
{% if alias.document.ipr %} {% if doc.ipr %}
{% for ipr in alias.document.ipr %} {% for ipr in doc.ipr %}
<tr> <tr>
<td>{{ ipr.disclosure.time|date:"Y-m-d" }}</td> <td>{{ ipr.disclosure.time|date:"Y-m-d" }}</td>
<td>{{ ipr.disclosure.id }}</td> <td>{{ ipr.disclosure.id }}</td>
@ -57,7 +57,7 @@
<td></td> <td></td>
<td></td> <td></td>
<td> <td>
No IPR disclosures related to <i>{{ alias.name|prettystdname|urlize_ietf_docs }}</i> have been submitted. No IPR disclosures related to <i>{{ doc.name|prettystdname|urlize_ietf_docs }}</i> have been submitted.
</td> </td>
</tr> </tr>
{% endif %} {% endif %}

View file

@ -10,7 +10,7 @@ from django.core.management.base import BaseCommand
import debug # pyflakes:ignore import debug # pyflakes:ignore
from ietf.doc.models import Document, State, DocAlias from ietf.doc.models import Document, State
from ietf.submit.models import Submission from ietf.submit.models import Submission
from ietf.submit.checkers import DraftYangChecker from ietf.submit.checkers import DraftYangChecker
@ -78,7 +78,7 @@ class Command(BaseCommand):
parts = name.rsplit('-',1) parts = name.rsplit('-',1)
if len(parts)==2 and len(parts[1])==2 and parts[1].isdigit(): if len(parts)==2 and len(parts[1])==2 and parts[1].isdigit():
name = parts[0] name = parts[0]
draft = DocAlias.objects.get(name=name).document draft = Document.objects.get(name=name)
self.check_yang(checker, draft, force=True) self.check_yang(checker, draft, force=True)
else: else:
for draft in Document.objects.filter(states=active_state, type_id='draft'): for draft in Document.objects.filter(states=active_state, type_id='draft'):

View file

@ -11,7 +11,7 @@ from django.utils.encoding import smart_str
import debug # pyflakes:ignore import debug # pyflakes:ignore
from ietf.doc.models import Document, DocAlias, State, DocumentAuthor, DocEvent, RelatedDocument, NewRevisionDocEvent from ietf.doc.models import Document, State, DocumentAuthor, DocEvent, RelatedDocument, NewRevisionDocEvent
from ietf.doc.factories import IndividualDraftFactory, ConflictReviewFactory, StatusChangeFactory, WgDraftFactory, WgRfcFactory from ietf.doc.factories import IndividualDraftFactory, ConflictReviewFactory, StatusChangeFactory, WgDraftFactory, WgRfcFactory
from ietf.group.models import Group, GroupHistory, Role, RoleHistory from ietf.group.models import Group, GroupHistory, Role, RoleHistory
from ietf.iesg.models import TelechatDate from ietf.iesg.models import TelechatDate
@ -177,7 +177,6 @@ def make_test_data():
charter.set_state(State.objects.get(used=True, slug="approved", type="charter")) charter.set_state(State.objects.get(used=True, slug="approved", type="charter"))
group.charter = charter group.charter = charter
group.save() group.save()
DocAlias.objects.create(name=charter.name).docs.add(charter)
setup_default_community_list_for_group(group) setup_default_community_list_for_group(group)
# ames WG # ames WG
@ -199,7 +198,6 @@ def make_test_data():
rev="00", rev="00",
) )
charter.set_state(State.objects.get(used=True, slug="infrev", type="charter")) charter.set_state(State.objects.get(used=True, slug="infrev", type="charter"))
DocAlias.objects.create(name=charter.name).docs.add(charter)
group.charter = charter group.charter = charter
group.save() group.save()
setup_default_community_list_for_group(group) setup_default_community_list_for_group(group)
@ -244,7 +242,6 @@ def make_test_data():
# rev="00", # rev="00",
# ) # )
#charter.set_state(State.objects.get(used=True, slug="infrev", type="charter")) #charter.set_state(State.objects.get(used=True, slug="infrev", type="charter"))
#DocAlias.objects.create(name=charter.name).docs.add(charter)
#group.charter = charter #group.charter = charter
#group.save() #group.save()
@ -288,8 +285,6 @@ def make_test_data():
expires=timezone.now(), expires=timezone.now(),
) )
old_draft.set_state(State.objects.get(used=True, type="draft", slug="expired")) old_draft.set_state(State.objects.get(used=True, type="draft", slug="expired"))
old_alias = DocAlias.objects.create(name=old_draft.name)
old_alias.docs.add(old_draft)
# draft # draft
draft = Document.objects.create( draft = Document.objects.create(
@ -314,9 +309,6 @@ def make_test_data():
draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="pub-req")) draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="pub-req"))
draft.set_state(State.objects.get(used=True, type="draft-stream-%s" % draft.stream_id, slug="wg-doc")) draft.set_state(State.objects.get(used=True, type="draft-stream-%s" % draft.stream_id, slug="wg-doc"))
doc_alias = DocAlias.objects.create(name=draft.name)
doc_alias.docs.add(draft)
RelatedDocument.objects.create(source=draft, target=old_draft, relationship=DocRelationshipName.objects.get(slug='replaces')) RelatedDocument.objects.create(source=draft, target=old_draft, relationship=DocRelationshipName.objects.get(slug='replaces'))
old_draft.set_state(State.objects.get(type='draft', slug='repl')) old_draft.set_state(State.objects.get(type='draft', slug='repl'))
@ -363,7 +355,7 @@ def make_test_data():
IprDocRel.objects.create( IprDocRel.objects.create(
disclosure=ipr, disclosure=ipr,
document=doc_alias, document=draft,
revisions='00', revisions='00',
) )