chore: checkpoint: docalias mostly removed
This commit is contained in:
parent
4946430159
commit
383899c5c4
|
@ -932,7 +932,7 @@ def make_rev_history(doc):
|
|||
if predecessors is None:
|
||||
predecessors = []
|
||||
if hasattr(doc, 'relateddocument_set'):
|
||||
for document in in doc.related_that_doc('replaces'):
|
||||
for document in doc.related_that_doc('replaces'):
|
||||
if document not in predecessors:
|
||||
predecessors.append(document)
|
||||
predecessors.extend(get_predecessors(document, predecessors))
|
||||
|
|
|
@ -38,7 +38,8 @@ from ietf.message.models import Message
|
|||
from ietf.message.utils import infer_message
|
||||
from ietf.name.models import IprLicenseTypeName
|
||||
from ietf.person.models import Person
|
||||
from ietf.secr.utils.document import get_rfc_num, is_draft
|
||||
from ietf.secr.utils.document import is_draft
|
||||
from ietf.utils import log
|
||||
from ietf.utils.draft_search import normalize_draftname
|
||||
from ietf.utils.mail import send_mail, send_mail_message
|
||||
from ietf.utils.response import permission_denied
|
||||
|
@ -71,10 +72,13 @@ def get_document_emails(ipr):
|
|||
for rel in ipr.iprdocrel_set.all():
|
||||
doc = rel.document.document
|
||||
|
||||
if is_draft(doc):
|
||||
if doc.type_id=="draft":
|
||||
doc_info = 'Internet-Draft entitled "{}" ({})'.format(doc.title,doc.name)
|
||||
elif doc.type_id=="rfc":
|
||||
doc_info = 'RFC entitled "{}" (RFC{})'.format(doc.title, doc.rfc_number)
|
||||
else:
|
||||
doc_info = 'RFC entitled "{}" (RFC{})'.format(doc.title,get_rfc_num(doc))
|
||||
log.unreachable("2023-08-15")
|
||||
return ""
|
||||
|
||||
addrs = gather_address_lists('ipr_posted_on_doc',doc=doc).as_strings(compact=False)
|
||||
|
||||
|
|
|
@ -287,8 +287,6 @@ class AssignmentOrderResolver:
|
|||
def _collect_context(self):
|
||||
"""Collect all relevant data about this team, document and review request."""
|
||||
|
||||
self.doc_aliases = DocAlias.objects.filter(docs=self.doc).values_list("name", flat=True)
|
||||
|
||||
# This data is collected as a dict, keys being person IDs, values being numbers/objects.
|
||||
self.rotation_index = {p.pk: i for i, p in enumerate(self.rotation_list)}
|
||||
self.reviewer_settings = self._reviewer_settings_for_person_ids(self.possible_person_ids)
|
||||
|
@ -354,8 +352,7 @@ class AssignmentOrderResolver:
|
|||
add_boolean_score(+1, email.person_id in self.wish_to_review, "wishes to review document")
|
||||
add_boolean_score(-1, email.person_id in self.connections,
|
||||
self.connections.get(email.person_id)) # reviewer is somehow connected: bad
|
||||
add_boolean_score(-1, settings.filter_re and any(
|
||||
re.search(settings.filter_re, n) for n in self.doc_aliases), "filter regexp matches")
|
||||
add_boolean_score(-1, settings.filter_re and re.search(settings.filter_re, self.doc.name), "filter regexp matches")
|
||||
|
||||
# minimum interval between reviews
|
||||
days_needed = self.days_needed_for_reviewers.get(email.person_id, 0)
|
||||
|
|
|
@ -175,7 +175,7 @@ def doc_detail(request, date, name):
|
|||
This view displays the ballot information for the document, and lets the user make
|
||||
changes to ballot positions and document state.
|
||||
'''
|
||||
doc = get_object_or_404(Document, docalias__name=name)
|
||||
doc = get_object_or_404(Document, name=name)
|
||||
if not is_doc_on_telechat(doc, date):
|
||||
messages.warning(request, 'Dcoument: {name} is not on the Telechat agenda for {date}'.format(
|
||||
name=doc.name,
|
||||
|
@ -342,7 +342,7 @@ def doc_navigate(request, date, name, nav):
|
|||
nav - [next|previous] which direction the user wants to navigate in the list of docs
|
||||
The view retrieves the appropriate document and redirects to the doc view.
|
||||
'''
|
||||
doc = get_object_or_404(Document, docalias__name=name)
|
||||
doc = get_object_or_404(Document, name=name)
|
||||
agenda = agenda_data(date=date)
|
||||
target = name
|
||||
|
||||
|
|
|
@ -13,15 +13,6 @@ def get_full_path(doc):
|
|||
return None
|
||||
return os.path.join(doc.get_file_path(), doc.uploaded_filename)
|
||||
|
||||
def get_rfc_num(doc):
|
||||
qs = doc.docalias.filter(name__startswith='rfc')
|
||||
return qs[0].name[3:] if qs else None
|
||||
|
||||
def is_draft(doc):
|
||||
if doc.docalias.filter(name__startswith='rfc'):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def get_start_date(doc):
|
||||
'''
|
||||
|
|
|
@ -600,7 +600,6 @@ TEST_CODE_COVERAGE_EXCLUDE_FILES = [
|
|||
"ietf/utils/test_runner.py",
|
||||
"ietf/name/generate_fixtures.py",
|
||||
"ietf/review/import_from_review_tool.py",
|
||||
"ietf/stats/backfill_data.py",
|
||||
"ietf/utils/patch.py",
|
||||
"ietf/utils/test_data.py",
|
||||
]
|
||||
|
|
|
@ -1,184 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright The IETF Trust 2017-2020, All Rights Reserved
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
import io
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
import argparse
|
||||
import time
|
||||
|
||||
from typing import Set, Optional # pyflakes:ignore
|
||||
|
||||
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
sys.path = [ basedir ] + sys.path
|
||||
os.environ["DJANGO_SETTINGS_MODULE"] = "ietf.settings"
|
||||
|
||||
virtualenv_activation = os.path.join(basedir, "env", "bin", "activate_this.py")
|
||||
if os.path.exists(virtualenv_activation):
|
||||
exec(compile(io.open(virtualenv_activation, "rb").read(), virtualenv_activation, 'exec'), dict(__file__=virtualenv_activation))
|
||||
|
||||
import django
|
||||
django.setup()
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
import debug # pyflakes:ignore
|
||||
|
||||
from ietf.doc.models import Document
|
||||
from ietf.name.models import FormalLanguageName
|
||||
from ietf.utils.draft import PlaintextDraft
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--document", help="specific document name")
|
||||
parser.add_argument("--words", action="store_true", help="fill in word count")
|
||||
parser.add_argument("--formlang", action="store_true", help="fill in formal languages")
|
||||
parser.add_argument("--authors", action="store_true", help="fill in author info")
|
||||
args = parser.parse_args()
|
||||
|
||||
formal_language_dict = { l.pk: l for l in FormalLanguageName.objects.all() }
|
||||
|
||||
docs_qs = Document.objects.filter(type="draft")
|
||||
|
||||
if args.document:
|
||||
docs_qs = docs_qs.filter(docalias__name=args.document)
|
||||
|
||||
ts = time.strftime("%Y-%m-%d_%H:%M%z")
|
||||
logfile = io.open('backfill-authorstats-%s.log'%ts, 'w')
|
||||
print("Writing log to %s" % os.path.abspath(logfile.name))
|
||||
|
||||
def say(msg):
|
||||
msg = msg.encode('utf8')
|
||||
sys.stderr.write(msg)
|
||||
sys.stderr.write('\n')
|
||||
logfile.write(msg)
|
||||
logfile.write('\n')
|
||||
|
||||
def unicode(text):
|
||||
if text is None:
|
||||
return text
|
||||
# order matters here:
|
||||
for encoding in ['ascii', 'utf8', 'latin1', ]:
|
||||
try:
|
||||
utext = text.decode(encoding)
|
||||
# if encoding == 'latin1':
|
||||
# say("Warning: falling back to latin1 decoding for %s ..." % utext[:216]])
|
||||
return utext
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
|
||||
start = time.time()
|
||||
say("Running query for documents to process ...")
|
||||
for doc in docs_qs.prefetch_related("docalias", "formal_languages", "documentauthor_set", "documentauthor_set__person", "documentauthor_set__person__alias_set"):
|
||||
canonical_name = doc.name
|
||||
for n in doc.docalias.all():
|
||||
if n.name.startswith("rfc"):
|
||||
canonical_name = n.name
|
||||
|
||||
if canonical_name.startswith("rfc"):
|
||||
path = os.path.join(settings.RFC_PATH, canonical_name + ".txt")
|
||||
else:
|
||||
path = os.path.join(settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR, canonical_name + "-" + doc.rev + ".txt")
|
||||
|
||||
if not os.path.exists(path):
|
||||
say("Skipping %s, no txt file found at %s" % (doc.name, path))
|
||||
continue
|
||||
|
||||
with io.open(path, 'rb') as f:
|
||||
say("\nProcessing %s" % doc.name)
|
||||
sys.stdout.flush()
|
||||
d = PlaintextDraft(unicode(f.read()), path)
|
||||
|
||||
updated = False
|
||||
|
||||
updates = {}
|
||||
|
||||
if args.words:
|
||||
words = d.get_wordcount()
|
||||
if words != doc.words:
|
||||
updates["words"] = words
|
||||
|
||||
if args.formlang:
|
||||
langs = d.get_formal_languages()
|
||||
|
||||
new_formal_languages = set(formal_language_dict[l] for l in langs)
|
||||
old_formal_languages = set(doc.formal_languages.all())
|
||||
|
||||
if new_formal_languages != old_formal_languages:
|
||||
for l in new_formal_languages - old_formal_languages:
|
||||
doc.formal_languages.add(l)
|
||||
updated = True
|
||||
for l in old_formal_languages - new_formal_languages:
|
||||
doc.formal_languages.remove(l)
|
||||
updated = True
|
||||
|
||||
if args.authors:
|
||||
old_authors = doc.documentauthor_set.all()
|
||||
old_authors_by_name = {}
|
||||
old_authors_by_email = {}
|
||||
for author in old_authors:
|
||||
for alias in author.person.alias_set.all():
|
||||
old_authors_by_name[alias.name] = author
|
||||
old_authors_by_name[author.person.plain_name()] = author
|
||||
|
||||
if author.email_id:
|
||||
old_authors_by_email[author.email_id] = author
|
||||
|
||||
# the draft parser sometimes has a problem when
|
||||
# affiliation isn't in the second line and it then thinks
|
||||
# it's an extra author - skip those extra authors
|
||||
seen = set() # type: Set[Optional[str]]
|
||||
for full, _, _, _, _, email, country, company in d.get_author_list():
|
||||
assert full is None or isinstance(full, str)
|
||||
assert email is None or isinstance(email, str)
|
||||
assert country is None or isinstance(country, str)
|
||||
assert isinstance(company, str)
|
||||
#full, email, country, company = [ unicode(s) for s in [full, email, country, company, ] ]
|
||||
if email in seen:
|
||||
continue
|
||||
seen.add(email)
|
||||
|
||||
old_author = None
|
||||
if email:
|
||||
old_author = old_authors_by_email.get(email)
|
||||
if not old_author:
|
||||
old_author = old_authors_by_name.get(full)
|
||||
|
||||
if not old_author:
|
||||
say("UNKNOWN AUTHOR: %s, %s, %s, %s, %s" % (doc.name, full, email, country, company))
|
||||
continue
|
||||
|
||||
if old_author.affiliation != company:
|
||||
say("new affiliation: %s [ %s <%s> ] %s -> %s" % (canonical_name, full, email, old_author.affiliation, company))
|
||||
old_author.affiliation = company
|
||||
old_author.save(update_fields=["affiliation"])
|
||||
updated = True
|
||||
|
||||
if country is None:
|
||||
country = ""
|
||||
|
||||
if old_author.country != country:
|
||||
say("new country: %s [ %s <%s> ] %s -> %s" % (canonical_name , full, email, old_author.country, country))
|
||||
old_author.country = country
|
||||
old_author.save(update_fields=["country"])
|
||||
updated = True
|
||||
|
||||
|
||||
if updates:
|
||||
Document.objects.filter(pk=doc.pk).update(**updates)
|
||||
updated = True
|
||||
|
||||
if updated:
|
||||
say("updated: %s" % canonical_name)
|
||||
|
||||
stop = time.time()
|
||||
dur = stop-start
|
||||
sec = dur%60
|
||||
min = dur//60
|
||||
say("Processing time %d:%02d" % (min, sec))
|
||||
|
||||
print("\n\nWrote log to %s" % os.path.abspath(logfile.name))
|
||||
logfile.close()
|
||||
|
|
@ -11,6 +11,10 @@ from ietf.name.models import CountryName
|
|||
from ietf.person.models import Person
|
||||
from ietf.utils.models import ForeignKey
|
||||
|
||||
### NOTE WELL: These models are expected to be removed and the stats app reimplemented.
|
||||
# A bare python file that should have been a management command was used to populate
|
||||
# these models when the app was first installed - it has been removed from main, but
|
||||
# can be seen at https://github.com/ietf-tools/datatracker/blob/f2b716fc052a0152c32b86b428ba6ebfdcdf5cd2/ietf/stats/backfill_data.py
|
||||
|
||||
class AffiliationAlias(models.Model):
|
||||
"""Records that alias should be treated as name for statistical
|
||||
|
|
|
@ -20,7 +20,7 @@ import ietf.stats.views
|
|||
|
||||
from ietf.submit.models import Submission
|
||||
from ietf.doc.factories import WgDraftFactory, WgRfcFactory
|
||||
from ietf.doc.models import Document, DocAlias, State, RelatedDocument, NewRevisionDocEvent, DocumentAuthor
|
||||
from ietf.doc.models import Document, State, RelatedDocument, NewRevisionDocEvent, DocumentAuthor
|
||||
from ietf.group.factories import RoleFactory
|
||||
from ietf.meeting.factories import MeetingFactory, AttendedFactory
|
||||
from ietf.person.factories import PersonFactory
|
||||
|
@ -79,7 +79,6 @@ class StatisticsTests(TestCase):
|
|||
words=100
|
||||
)
|
||||
referencing_draft.set_state(State.objects.get(used=True, type="draft", slug="active"))
|
||||
DocAlias.objects.create(name=referencing_draft.name).docs.add(referencing_draft)
|
||||
RelatedDocument.objects.create(
|
||||
source=referencing_draft,
|
||||
target=draft,
|
||||
|
|
|
@ -34,7 +34,7 @@ from ietf.group.models import Role, Group
|
|||
from ietf.person.models import Person
|
||||
from ietf.name.models import ReviewResultName, CountryName, DocRelationshipName, ReviewAssignmentStateName
|
||||
from ietf.person.name import plain_name
|
||||
from ietf.doc.models import DocAlias, Document, State, DocEvent
|
||||
from ietf.doc.models import Document, State, DocEvent
|
||||
from ietf.meeting.models import Meeting
|
||||
from ietf.stats.models import MeetingRegistration, CountryAlias
|
||||
from ietf.stats.utils import get_aliased_affiliations, get_aliased_countries, compute_hirsch_index
|
||||
|
@ -214,13 +214,13 @@ def document_stats(request, stats_type=None):
|
|||
|
||||
if any(stats_type == t[0] for t in possible_document_stats_types):
|
||||
# filter documents
|
||||
docalias_filters = Q(docs__type="draft")
|
||||
document_filters = Q(docs__type="draft")
|
||||
|
||||
rfc_state = State.objects.get(type="draft", slug="rfc")
|
||||
if document_type == "rfc":
|
||||
docalias_filters &= Q(docs__states=rfc_state)
|
||||
document_filters &= Q(docs__states=rfc_state)
|
||||
elif document_type == "draft":
|
||||
docalias_filters &= ~Q(docs__states=rfc_state)
|
||||
document_filters &= ~Q(docs__states=rfc_state)
|
||||
|
||||
if from_time:
|
||||
# this is actually faster than joining in the database,
|
||||
|
@ -231,9 +231,9 @@ def document_stats(request, stats_type=None):
|
|||
docevent__type__in=["published_rfc", "new_revision"],
|
||||
).values_list("pk"))
|
||||
|
||||
docalias_filters &= Q(docs__in=docs_within_time_constraint)
|
||||
document_filters &= Q(docs__in=docs_within_time_constraint)
|
||||
|
||||
docalias_qs = DocAlias.objects.filter(docalias_filters)
|
||||
document_qs = Document.objects.filter(document_filters)
|
||||
|
||||
if document_type == "rfc":
|
||||
doc_label = "RFC"
|
||||
|
@ -242,28 +242,15 @@ def document_stats(request, stats_type=None):
|
|||
else:
|
||||
doc_label = "document"
|
||||
|
||||
total_docs = docalias_qs.values_list("docs__name").distinct().count()
|
||||
|
||||
def generate_canonical_names(values):
|
||||
for doc_id, ts in itertools.groupby(values.order_by("docs__name"), lambda a: a[0]):
|
||||
chosen = None
|
||||
for t in ts:
|
||||
if chosen is None:
|
||||
chosen = t
|
||||
else:
|
||||
if t[1].startswith("rfc"):
|
||||
chosen = t
|
||||
elif t[1].startswith("draft") and not chosen[1].startswith("rfc"):
|
||||
chosen = t
|
||||
yield chosen
|
||||
total_docs = document_qs.values_list("name").distinct().count()
|
||||
|
||||
if stats_type == "authors":
|
||||
stats_title = "Number of authors for each {}".format(doc_label)
|
||||
|
||||
bins = defaultdict(set)
|
||||
|
||||
for name, canonical_name, author_count in generate_canonical_names(docalias_qs.values_list("docs__name", "name").annotate(Count("docs__documentauthor"))):
|
||||
bins[author_count or 0].add(canonical_name)
|
||||
for name, author_count in document_qs.values_list("name").annotate(Count("documentauthor")).values_list("name","documentauthor__count")
|
||||
bins[author_count or 0].add(name)
|
||||
|
||||
series_data = []
|
||||
for author_count, names in sorted(bins.items(), key=lambda t: t[0]):
|
||||
|
@ -278,8 +265,8 @@ def document_stats(request, stats_type=None):
|
|||
|
||||
bins = defaultdict(set)
|
||||
|
||||
for name, canonical_name, pages in generate_canonical_names(docalias_qs.values_list("docs__name", "name", "docs__pages")):
|
||||
bins[pages or 0].add(canonical_name)
|
||||
for name, pages in document_qs.values_list("name", "pages")):
|
||||
bins[pages or 0].add(name)
|
||||
|
||||
series_data = []
|
||||
for pages, names in sorted(bins.items(), key=lambda t: t[0]):
|
||||
|
@ -297,7 +284,7 @@ def document_stats(request, stats_type=None):
|
|||
|
||||
bins = defaultdict(set)
|
||||
|
||||
for name, canonical_name, words in generate_canonical_names(docalias_qs.values_list("docs__name", "name", "docs__words")):
|
||||
for name, words in document_qs.values_list("name", "words")):
|
||||
bins[put_into_bin(words, bin_size)].add(canonical_name)
|
||||
|
||||
series_data = []
|
||||
|
@ -322,7 +309,7 @@ def document_stats(request, stats_type=None):
|
|||
submission_types[doc_name] = file_types
|
||||
|
||||
doc_names_with_missing_types = {}
|
||||
for doc_name, canonical_name, rev in generate_canonical_names(docalias_qs.values_list("docs__name", "name", "docs__rev")):
|
||||
for doc_name, rev in document_qs.values_list("name", "rev"):
|
||||
types = submission_types.get(doc_name)
|
||||
if types:
|
||||
for dot_ext in types.split(","):
|
||||
|
@ -367,7 +354,7 @@ def document_stats(request, stats_type=None):
|
|||
|
||||
bins = defaultdict(set)
|
||||
|
||||
for name, canonical_name, formal_language_name in generate_canonical_names(docalias_qs.values_list("docs__name", "name", "docs__formal_languages__name")):
|
||||
for name, formal_language_name in document_qs.values_list("name", "formal_languages__name")):
|
||||
bins[formal_language_name or ""].add(canonical_name)
|
||||
|
||||
series_data = []
|
||||
|
|
|
@ -108,14 +108,14 @@ class Submission(models.Model):
|
|||
@property
|
||||
def active_wg_drafts_replaced(self):
|
||||
return Document.objects.filter(
|
||||
docalias__name__in=self.replaces.split(','),
|
||||
name__in=self.replaces.split(','),
|
||||
group__in=Group.objects.active_wgs()
|
||||
)
|
||||
|
||||
@property
|
||||
def closed_wg_drafts_replaced(self):
|
||||
return Document.objects.filter(
|
||||
docalias__name__in=self.replaces.split(','),
|
||||
name__in=self.replaces.split(','),
|
||||
group__in=Group.objects.closed_wgs()
|
||||
)
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ from ietf.submit.utils import (expirable_submissions, expire_submission, find_su
|
|||
process_and_validate_submission)
|
||||
from ietf.doc.factories import (DocumentFactory, WgDraftFactory, IndividualDraftFactory,
|
||||
ReviewFactory, WgRfcFactory)
|
||||
from ietf.doc.models import ( Document, DocAlias, DocEvent, State,
|
||||
from ietf.doc.models import ( Document, DocEvent, State,
|
||||
BallotPositionDocEvent, DocumentAuthor, SubmissionDocEvent )
|
||||
from ietf.doc.utils import create_ballot_if_not_open, can_edit_docextresources, update_action_holders
|
||||
from ietf.group.factories import GroupFactory, RoleFactory
|
||||
|
@ -358,8 +358,6 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
note="",
|
||||
)
|
||||
sug_replaced_draft.set_state(State.objects.get(used=True, type="draft", slug="active"))
|
||||
sug_replaced_alias = DocAlias.objects.create(name=sug_replaced_draft.name)
|
||||
sug_replaced_alias.docs.add(sug_replaced_draft)
|
||||
|
||||
name = "draft-ietf-mars-testing-tests"
|
||||
rev = "00"
|
||||
|
@ -400,7 +398,7 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
r = self.client.post(status_url, dict(action=action))
|
||||
self.assertEqual(r.status_code, 302)
|
||||
|
||||
draft = Document.objects.get(docalias__name=name)
|
||||
draft = Document.objects.get(name=name)
|
||||
self.assertEqual(draft.rev, rev)
|
||||
new_revision = draft.latest_event(type="new_revision")
|
||||
self.assertEqual(draft.group.acronym, "mars")
|
||||
|
@ -420,7 +418,7 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
self.assertEqual(draft.relations_that_doc("replaces").count(), 1)
|
||||
self.assertTrue(draft.relations_that_doc("replaces").first().target, draft)
|
||||
self.assertEqual(draft.relations_that_doc("possibly-replaces").count(), 1)
|
||||
self.assertTrue(draft.relations_that_doc("possibly-replaces").first().target, sug_replaced_alias)
|
||||
self.assertTrue(draft.relations_that_doc("possibly-replaces").first().target, sug_replaced_draft)
|
||||
self.assertEqual(len(outbox), mailbox_before + 5)
|
||||
self.assertIn(("I-D Action: %s" % name), outbox[-4]["Subject"])
|
||||
self.assertIn(author.ascii, get_payload_text(outbox[-4]))
|
||||
|
@ -433,7 +431,7 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
# Check "Review of suggested possible replacements for..." mail
|
||||
self.assertIn("review", outbox[-1]["Subject"].lower())
|
||||
self.assertIn(name, get_payload_text(outbox[-1]))
|
||||
self.assertIn(sug_replaced_alias.name, get_payload_text(outbox[-1]))
|
||||
self.assertIn(sug_replaced_draft.name, get_payload_text(outbox[-1]))
|
||||
self.assertIn("ames-chairs@", outbox[-1]["To"].lower())
|
||||
self.assertIn("mars-chairs@", outbox[-1]["To"].lower())
|
||||
# Check submission settings
|
||||
|
@ -684,7 +682,7 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
self.assertTrue('New version approved' in edescs)
|
||||
self.assertTrue('Uploaded new revision' in edescs)
|
||||
|
||||
draft = Document.objects.get(docalias__name=name)
|
||||
draft = Document.objects.get(name=name)
|
||||
self.assertEqual(draft.rev, rev)
|
||||
self.assertEqual(draft.group.acronym, name.split("-")[2])
|
||||
#
|
||||
|
@ -911,7 +909,7 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
r = self.client.post(confirmation_url, {'action':'confirm'})
|
||||
self.assertEqual(r.status_code, 302)
|
||||
|
||||
draft = Document.objects.get(docalias__name=name)
|
||||
draft = Document.objects.get(name=name)
|
||||
self.assertEqual(draft.rev, rev)
|
||||
new_revision = draft.latest_event()
|
||||
self.assertEqual(new_revision.type, "new_revision")
|
||||
|
@ -951,7 +949,7 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
action = force_post_button.parents("form").find('input[type=hidden][name="action"]').val()
|
||||
r = self.client.post(status_url, dict(action=action))
|
||||
|
||||
doc = Document.objects.get(docalias__name=name)
|
||||
doc = Document.objects.get(name=name)
|
||||
self.assertEqual(doc.documentauthor_set.count(), 1)
|
||||
docauth = doc.documentauthor_set.first()
|
||||
self.assertEqual(docauth.person, author)
|
||||
|
@ -1084,7 +1082,7 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
self.assertIn("New Version Notification", notification_email["Subject"])
|
||||
self.assertIn(author.email().address.lower(), notification_email["To"])
|
||||
|
||||
draft = Document.objects.get(docalias__name=name)
|
||||
draft = Document.objects.get(name=name)
|
||||
self.assertEqual(draft.rev, rev)
|
||||
self.assertEqual(draft.docextresource_set.count(), 0)
|
||||
new_revision = draft.latest_event()
|
||||
|
@ -1132,7 +1130,7 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
self._assert_extresources_form_not_present(r)
|
||||
|
||||
# Check that the draft itself got the resources
|
||||
draft = Document.objects.get(docalias__name=name)
|
||||
draft = Document.objects.get(name=name)
|
||||
self.assertCountEqual(
|
||||
[str(r) for r in draft.docextresource_set.all()],
|
||||
[str(r) for r in resources],
|
||||
|
@ -1178,7 +1176,7 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
r = self.client.post(confirmation_url, {'action':'confirm'})
|
||||
self.assertEqual(r.status_code, 302)
|
||||
self.assertEqual(len(outbox), mailbox_before+3)
|
||||
draft = Document.objects.get(docalias__name=name)
|
||||
draft = Document.objects.get(name=name)
|
||||
self.assertEqual(draft.rev, rev)
|
||||
self.assertEqual(draft.relateddocument_set.filter(relationship_id='replaces').count(), replaces_count)
|
||||
self.assertEqual(draft.docextresource_set.count(), 0)
|
||||
|
@ -1296,7 +1294,7 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
r = self.client.post(confirmation_url, {'action':'cancel'})
|
||||
self.assertEqual(r.status_code, 302)
|
||||
self.assertEqual(len(outbox), mailbox_before)
|
||||
draft = Document.objects.get(docalias__name=name)
|
||||
draft = Document.objects.get(name=name)
|
||||
self.assertEqual(draft.rev, old_rev)
|
||||
|
||||
def test_submit_new_wg_with_dash(self):
|
||||
|
@ -1453,7 +1451,7 @@ class SubmitTests(BaseSubmitTestCase):
|
|||
r = self.client.post(status_url, dict(action=action))
|
||||
self.assertEqual(r.status_code, 302)
|
||||
|
||||
draft = Document.objects.get(docalias__name=name)
|
||||
draft = Document.objects.get(name=name)
|
||||
self.assertEqual(draft.rev, rev)
|
||||
self.assertEqual(draft.docextresource_set.count(), 0)
|
||||
self.verify_bibxml_ids_creation(draft)
|
||||
|
|
|
@ -25,7 +25,7 @@ from django.utils import timezone
|
|||
|
||||
import debug # pyflakes:ignore
|
||||
|
||||
from ietf.doc.models import ( Document, State, DocAlias, DocEvent, SubmissionDocEvent,
|
||||
from ietf.doc.models import ( Document, State, DocEvent, SubmissionDocEvent,
|
||||
DocumentAuthor, AddedMessageEvent )
|
||||
from ietf.doc.models import NewRevisionDocEvent
|
||||
from ietf.doc.models import RelatedDocument, DocRelationshipName, DocExtResource
|
||||
|
@ -375,10 +375,6 @@ def post_submission(request, submission, approved_doc_desc, approved_subm_desc):
|
|||
events.append(e)
|
||||
log.log(f"{submission.name}: created doc events")
|
||||
|
||||
# update related objects
|
||||
alias, __ = DocAlias.objects.get_or_create(name=submission.name)
|
||||
alias.docs.add(draft)
|
||||
|
||||
draft.set_state(State.objects.get(used=True, type="draft", slug="active"))
|
||||
|
||||
update_authors(draft, submission)
|
||||
|
@ -1000,7 +996,7 @@ def accept_submission(submission: Submission, request: Optional[HttpRequest] = N
|
|||
docevent_from_submission(submission, desc="Uploaded new revision",
|
||||
who=requester if requester_is_author else None)
|
||||
|
||||
replaces = DocAlias.objects.filter(name__in=submission.replaces_names)
|
||||
replaces = Document.objects.filter(name__in=submission.replaces_names)
|
||||
pretty_replaces = '(none)' if not replaces else (
|
||||
', '.join(prettify_std_name(r.name) for r in replaces)
|
||||
)
|
||||
|
|
|
@ -22,7 +22,7 @@ from django.views.decorators.csrf import csrf_exempt
|
|||
|
||||
import debug # pyflakes:ignore
|
||||
|
||||
from ietf.doc.models import Document, DocAlias, AddedMessageEvent
|
||||
from ietf.doc.models import Document, AddedMessageEvent
|
||||
from ietf.doc.forms import ExtResourceForm
|
||||
from ietf.group.models import Group
|
||||
from ietf.group.utils import group_features_group_filter
|
||||
|
@ -410,7 +410,7 @@ def submission_status(request, submission_id, access_token=None):
|
|||
)
|
||||
|
||||
submitter_form = SubmitterForm(initial=submission.submitter_parsed(), prefix="submitter")
|
||||
replaces_form = ReplacesForm(name=submission.name,initial=DocAlias.objects.filter(name__in=submission.replaces.split(",")))
|
||||
replaces_form = ReplacesForm(name=submission.name,initial=Document.objects.filter(name__in=submission.replaces.split(",")))
|
||||
extresources_form = ExtResourceForm(
|
||||
initial=dict(resources=[er['res'] for er in external_resources]),
|
||||
extresource_model=SubmissionExtResource,
|
||||
|
@ -626,7 +626,7 @@ def edit_submission(request, submission_id, access_token=None):
|
|||
else:
|
||||
edit_form = EditSubmissionForm(instance=submission, prefix="edit")
|
||||
submitter_form = SubmitterForm(initial=submission.submitter_parsed(), prefix="submitter")
|
||||
replaces_form = ReplacesForm(name=submission.name,initial=DocAlias.objects.filter(name__in=submission.replaces.split(",")))
|
||||
replaces_form = ReplacesForm(name=submission.name, initial=Document.objects.filter(name__in=submission.replaces.split(",")))
|
||||
author_forms = [ AuthorForm(initial=author, prefix="authors-%s" % i)
|
||||
for i, author in enumerate(submission.authors) ]
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ def update_rfc_log_from_protocol_page(rfc_names, rfc_must_published_later_than):
|
|||
|
||||
updated = []
|
||||
|
||||
docs = Document.objects.filter(docalias__name__in=rfc_names).exclude(
|
||||
docs = Document.objects.filter(name__in=rfc_names).exclude(
|
||||
docevent__type="rfc_in_iana_registry").filter(
|
||||
# only take those that were published after cutoff since we
|
||||
# have a big bunch of old RFCs that we unfortunately don't have data for
|
||||
|
@ -189,7 +189,7 @@ def update_history_with_changes(changes, send_email=True):
|
|||
state_type=state_type, state=state)
|
||||
if not e:
|
||||
try:
|
||||
doc = Document.objects.get(docalias__name=docname)
|
||||
doc = Document.objects.get(name=docname)
|
||||
except Document.DoesNotExist:
|
||||
warnings.append("Document %s not found" % docname)
|
||||
continue
|
||||
|
|
|
@ -67,7 +67,7 @@
|
|||
<label class="d-none d-md-block" aria-label="Document search">
|
||||
<input class="form-control select2-field search-select"
|
||||
id="navbar-doc-search"
|
||||
data-select2-ajax-url="{% url 'ietf.doc.views_search.ajax_select2_search_docs' model_name='docalias' doc_type='draft' %}"
|
||||
data-select2-ajax-url="{% url 'ietf.doc.views_search.ajax_select2_search_docs' model_name='document' doc_type='draft' %}"
|
||||
type="text"
|
||||
data-placeholder="Document search">
|
||||
</label>
|
||||
|
|
|
@ -23,10 +23,10 @@
|
|||
<h2 class="my-3">Document history
|
||||
{% if related %}
|
||||
<div class="float-end">
|
||||
{% for related_docalias in related %}
|
||||
{% for related_document in related %}
|
||||
<a class="btn btn-outline-primary btn-sm"
|
||||
href="{% url 'ietf.doc.views_doc.document_history' name=related_docalias.name %}">
|
||||
Related history for {{ related_docalias.name }}
|
||||
href="{% url 'ietf.doc.views_doc.document_history' name=related_document.name %}">
|
||||
Related history for {{ related_document.name }}
|
||||
</a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
|
|
@ -39,7 +39,7 @@
|
|||
id="new_relation_row_rfc"
|
||||
aria-label="Enter new affected RFC"
|
||||
class="form-control select2-field"
|
||||
data-select2-ajax-url="{% url 'ietf.doc.views_search.ajax_select2_search_docs' model_name='docalias' doc_type='draft' %}"
|
||||
data-select2-ajax-url="{% url 'ietf.doc.views_search.ajax_select2_search_docs' model_name='document' doc_type='draft' %}"
|
||||
data-result-key="text"
|
||||
data-max-entries="1"
|
||||
data-width="resolve"
|
||||
|
|
|
@ -37,11 +37,11 @@ Goals and Milestones:
|
|||
{% for milestone in group.milestones %} {% if milestone.resolved %}{{ milestone.resolved }} {% else %}{{ milestone.due|date:"M Y" }}{% endif %} - {{ milestone.desc }}
|
||||
{% endfor %}
|
||||
Internet-Drafts:
|
||||
{% for alias in group.drafts %} - {{ alias.document.title }} [{{ alias.name }}-{{ alias.document.rev }}] ({{ alias.document.pages }} pages)
|
||||
{% for document in group.drafts %} - {{ document.title }} [{{ document.name }}-{{ document.rev }}] ({{ document.pages }} pages)
|
||||
{% endfor %}
|
||||
{% if group.rfcs %}Requests for Comments:
|
||||
{% for alias in group.rfcs %} {{ alias.name.upper }}: {{ alias.document.title}} ({{ alias.document.pages }} pages){% for r in alias.rel %}
|
||||
* {{ r.action }} {{ r.target.name|upper }}{% endfor %}{% for r in alias.invrel %}
|
||||
{% for document in group.rfcs %} {{ document.name.upper }}: {{ document.title}} ({{ document.pages }} pages){% for r in document.rel %}
|
||||
* {{ r.action }} {{ r.target.name|upper }}{% endfor %}{% for r in document.invrel %}
|
||||
* {% if r.relationsship == "obs" %}{{ r.inverse_action|upper }}{% else %}{{ r.action }}{% endif %} {{ r.source.canonical_name|upper }}{% endfor %}
|
||||
{% endfor %}
|
||||
{% else %}No Requests for Comments{% endif %}
|
||||
|
|
|
@ -13,9 +13,9 @@
|
|||
Please select one of following I-Ds:
|
||||
</p>
|
||||
<ul>
|
||||
{% for docalias in docs %}
|
||||
{% for doc in docs %}
|
||||
<li>
|
||||
<a href="?submit=draft&id={{ docalias.name }}">{{ docalias.name }}</a>
|
||||
<a href="?submit=draft&id={{ doc.name }}">{{ doc.name }}</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
|
|
@ -24,20 +24,20 @@
|
|||
<th scope="col" data-sort="statement">Statement</th>
|
||||
</tr>
|
||||
</thead>
|
||||
{% for alias in docs %}
|
||||
{% for doc in docs %}
|
||||
<tbody>
|
||||
<tr>
|
||||
<th scope="col" class="table-info" colspan="3">
|
||||
IPR that is related to {{ alias.name|prettystdname:""|urlize_ietf_docs }} ("{{ alias.document.title }}")
|
||||
{% if alias.related %}
|
||||
that was {{ alias.relation|lower }} {{ alias.related.source.name|prettystdname:""|urlize_ietf_docs }} ("{{ alias.related.source.title }}")
|
||||
IPR that is related to {{ doc.name|prettystdname:""|urlize_ietf_docs }} ("{{ doc.title }}")
|
||||
{% if doc.related %}
|
||||
that was {{ doc.relation|lower }} {{ doc.related.source.name|prettystdname:""|urlize_ietf_docs }} ("{{ doc.related.source.title }}")
|
||||
{% endif %}
|
||||
</th>
|
||||
</tr>
|
||||
</tbody>
|
||||
<tbody>
|
||||
{% if alias.document.ipr %}
|
||||
{% for ipr in alias.document.ipr %}
|
||||
{% if doc.ipr %}
|
||||
{% for ipr in doc.ipr %}
|
||||
<tr>
|
||||
<td>{{ ipr.disclosure.time|date:"Y-m-d" }}</td>
|
||||
<td>{{ ipr.disclosure.id }}</td>
|
||||
|
@ -58,7 +58,7 @@
|
|||
<tr>
|
||||
<td></td>
|
||||
<td></td>
|
||||
<td>No IPR disclosures related to {{ alias.name|prettystdname|urlize_ietf_docs }} have been submitted.</td>
|
||||
<td>No IPR disclosures related to {{ doc.name|prettystdname|urlize_ietf_docs }} have been submitted.</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
</tbody>
|
||||
|
|
|
@ -20,22 +20,22 @@
|
|||
<th scope="col" data-sort="statement">Statement</th>
|
||||
</tr>
|
||||
</thead>
|
||||
{% for alias in docs %}
|
||||
{% for doc in docs %}
|
||||
<tbody>
|
||||
<tr class="table-info">
|
||||
<th scope="col" colspan="3">
|
||||
IPR related to {{ alias.name|prettystdname|urlize_ietf_docs }} ("{{ alias.document.title }}")
|
||||
{% if alias.related %}
|
||||
that was {{ alias.relation|lower }} {{ alias.related.source|prettystdname|urlize_ietf_docs }} ("{{ alias.related.source.title|escape }}")
|
||||
IPR related to {{ doc.name|prettystdname|urlize_ietf_docs }} ("{{ doc.title }}")
|
||||
{% if doc.related %}
|
||||
that was {{ doc.relation|lower }} {{ doc.related.source|prettystdname|urlize_ietf_docs }} ("{{ doc.related.source.title|escape }}")
|
||||
{% endif %}
|
||||
{% if alias.product_of_this_wg %}, a product of the {{ q }} WG{% endif %}
|
||||
{% if doc.product_of_this_wg %}, a product of the {{ q }} WG{% endif %}
|
||||
:
|
||||
</th>
|
||||
</tr>
|
||||
</tbody>
|
||||
<tbody>
|
||||
{% if alias.document.ipr %}
|
||||
{% for ipr in alias.document.ipr %}
|
||||
{% if doc.ipr %}
|
||||
{% for ipr in doc.ipr %}
|
||||
<tr>
|
||||
<td>{{ ipr.disclosure.time|date:"Y-m-d" }}</td>
|
||||
<td>{{ ipr.disclosure.id }}</td>
|
||||
|
@ -57,7 +57,7 @@
|
|||
<td></td>
|
||||
<td></td>
|
||||
<td>
|
||||
No IPR disclosures related to <i>{{ alias.name|prettystdname|urlize_ietf_docs }}</i> have been submitted.
|
||||
No IPR disclosures related to <i>{{ doc.name|prettystdname|urlize_ietf_docs }}</i> have been submitted.
|
||||
</td>
|
||||
</tr>
|
||||
{% endif %}
|
||||
|
|
|
@ -10,7 +10,7 @@ from django.core.management.base import BaseCommand
|
|||
|
||||
import debug # pyflakes:ignore
|
||||
|
||||
from ietf.doc.models import Document, State, DocAlias
|
||||
from ietf.doc.models import Document, State
|
||||
from ietf.submit.models import Submission
|
||||
from ietf.submit.checkers import DraftYangChecker
|
||||
|
||||
|
@ -78,7 +78,7 @@ class Command(BaseCommand):
|
|||
parts = name.rsplit('-',1)
|
||||
if len(parts)==2 and len(parts[1])==2 and parts[1].isdigit():
|
||||
name = parts[0]
|
||||
draft = DocAlias.objects.get(name=name).document
|
||||
draft = Document.objects.get(name=name)
|
||||
self.check_yang(checker, draft, force=True)
|
||||
else:
|
||||
for draft in Document.objects.filter(states=active_state, type_id='draft'):
|
||||
|
|
|
@ -11,7 +11,7 @@ from django.utils.encoding import smart_str
|
|||
|
||||
import debug # pyflakes:ignore
|
||||
|
||||
from ietf.doc.models import Document, DocAlias, State, DocumentAuthor, DocEvent, RelatedDocument, NewRevisionDocEvent
|
||||
from ietf.doc.models import Document, State, DocumentAuthor, DocEvent, RelatedDocument, NewRevisionDocEvent
|
||||
from ietf.doc.factories import IndividualDraftFactory, ConflictReviewFactory, StatusChangeFactory, WgDraftFactory, WgRfcFactory
|
||||
from ietf.group.models import Group, GroupHistory, Role, RoleHistory
|
||||
from ietf.iesg.models import TelechatDate
|
||||
|
@ -177,7 +177,6 @@ def make_test_data():
|
|||
charter.set_state(State.objects.get(used=True, slug="approved", type="charter"))
|
||||
group.charter = charter
|
||||
group.save()
|
||||
DocAlias.objects.create(name=charter.name).docs.add(charter)
|
||||
setup_default_community_list_for_group(group)
|
||||
|
||||
# ames WG
|
||||
|
@ -199,7 +198,6 @@ def make_test_data():
|
|||
rev="00",
|
||||
)
|
||||
charter.set_state(State.objects.get(used=True, slug="infrev", type="charter"))
|
||||
DocAlias.objects.create(name=charter.name).docs.add(charter)
|
||||
group.charter = charter
|
||||
group.save()
|
||||
setup_default_community_list_for_group(group)
|
||||
|
@ -244,7 +242,6 @@ def make_test_data():
|
|||
# rev="00",
|
||||
# )
|
||||
#charter.set_state(State.objects.get(used=True, slug="infrev", type="charter"))
|
||||
#DocAlias.objects.create(name=charter.name).docs.add(charter)
|
||||
#group.charter = charter
|
||||
#group.save()
|
||||
|
||||
|
@ -288,8 +285,6 @@ def make_test_data():
|
|||
expires=timezone.now(),
|
||||
)
|
||||
old_draft.set_state(State.objects.get(used=True, type="draft", slug="expired"))
|
||||
old_alias = DocAlias.objects.create(name=old_draft.name)
|
||||
old_alias.docs.add(old_draft)
|
||||
|
||||
# draft
|
||||
draft = Document.objects.create(
|
||||
|
@ -314,9 +309,6 @@ def make_test_data():
|
|||
draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="pub-req"))
|
||||
draft.set_state(State.objects.get(used=True, type="draft-stream-%s" % draft.stream_id, slug="wg-doc"))
|
||||
|
||||
doc_alias = DocAlias.objects.create(name=draft.name)
|
||||
doc_alias.docs.add(draft)
|
||||
|
||||
RelatedDocument.objects.create(source=draft, target=old_draft, relationship=DocRelationshipName.objects.get(slug='replaces'))
|
||||
old_draft.set_state(State.objects.get(type='draft', slug='repl'))
|
||||
|
||||
|
@ -363,7 +355,7 @@ def make_test_data():
|
|||
|
||||
IprDocRel.objects.create(
|
||||
disclosure=ipr,
|
||||
document=doc_alias,
|
||||
document=draft,
|
||||
revisions='00',
|
||||
)
|
||||
|
||||
|
|
Loading…
Reference in a new issue