fix: preserve RelatedDocument references to subseries
This commit is contained in:
commit
511cac0b4b
|
@ -2,14 +2,15 @@
|
|||
|
||||
from django.db import migrations
|
||||
import django.db.models.deletion
|
||||
from django.db.models import F, Subquery, OuterRef
|
||||
from django.db.models import F, Subquery, OuterRef, CharField
|
||||
import ietf.utils.models
|
||||
|
||||
def forward(apps, schema_editor):
|
||||
RelatedDocument = apps.get_model("doc", "RelatedDocument")
|
||||
DocAlias = apps.get_model("doc", "DocAlias")
|
||||
subquery = Subquery(DocAlias.objects.filter(pk=OuterRef("deprecated_target")).values("docs")[:1])
|
||||
RelatedDocument.objects.annotate(firstdoc=subquery).update(target=F("firstdoc"))
|
||||
target_subquery = Subquery(DocAlias.objects.filter(pk=OuterRef("deprecated_target")).values("docs")[:1])
|
||||
name_subquery = Subquery(DocAlias.objects.filter(pk=OuterRef("deprecated_target")).values("name")[:1])
|
||||
RelatedDocument.objects.annotate(firstdoc=target_subquery).annotate(aliasname=name_subquery).update(target=F("firstdoc"),originaltargetaliasname=F("aliasname"))
|
||||
|
||||
def reverse(apps, schema_editor):
|
||||
pass
|
||||
|
@ -55,6 +56,12 @@ class Migration(migrations.Migration):
|
|||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="relateddocument",
|
||||
name="originaltargetaliasname",
|
||||
field=CharField(max_length=255,null=True),
|
||||
preserve_default=True,
|
||||
),
|
||||
migrations.RunPython(forward, reverse),
|
||||
migrations.AlterField(
|
||||
model_name="relateddocument",
|
||||
|
|
|
@ -2,14 +2,15 @@
|
|||
|
||||
from django.db import migrations
|
||||
import django.db.models.deletion
|
||||
from django.db.models import F, Subquery, OuterRef
|
||||
from django.db.models import F, Subquery, OuterRef, CharField
|
||||
import ietf.utils.models
|
||||
|
||||
def forward(apps, schema_editor):
|
||||
RelatedDocHistory = apps.get_model("doc", "RelatedDocHistory")
|
||||
DocAlias = apps.get_model("doc", "DocAlias")
|
||||
subquery = Subquery(DocAlias.objects.filter(pk=OuterRef("deprecated_target")).values("docs")[:1])
|
||||
RelatedDocHistory.objects.annotate(firstdoc=subquery).update(target=F("firstdoc"))
|
||||
target_subquery = Subquery(DocAlias.objects.filter(pk=OuterRef("deprecated_target")).values("docs")[:1])
|
||||
name_subquery = Subquery(DocAlias.objects.filter(pk=OuterRef("deprecated_target")).values("name")[:1])
|
||||
RelatedDocHistory.objects.annotate(firstdoc=target_subquery).annotate(aliasname=name_subquery).update(target=F("firstdoc"),originaltargetaliasname=F("aliasname"))
|
||||
|
||||
def reverse(apps, schema_editor):
|
||||
pass
|
||||
|
@ -57,6 +58,12 @@ class Migration(migrations.Migration):
|
|||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="relateddochistory",
|
||||
name="originaltargetaliasname",
|
||||
field=CharField(max_length=255,null=True),
|
||||
preserve_default=True,
|
||||
),
|
||||
migrations.RunPython(forward, reverse),
|
||||
migrations.AlterField(
|
||||
model_name="relateddochistory",
|
||||
|
|
|
@ -684,6 +684,7 @@ class RelatedDocument(models.Model):
|
|||
source = ForeignKey('Document')
|
||||
target = ForeignKey('Document', related_name='targets_related')
|
||||
relationship = ForeignKey(DocRelationshipName)
|
||||
originaltargetaliasname = models.CharField(max_length=255,null=True)
|
||||
def action(self):
|
||||
return self.relationship.name
|
||||
def __str__(self):
|
||||
|
@ -1118,6 +1119,7 @@ class RelatedDocHistory(models.Model):
|
|||
source = ForeignKey('DocHistory')
|
||||
target = ForeignKey('Document', related_name="reversely_related_document_history_set")
|
||||
relationship = ForeignKey(DocRelationshipName)
|
||||
originaltargetaliasname = models.CharField(max_length=255,null=True)
|
||||
def __str__(self):
|
||||
return u"%s %s %s" % (self.source.doc.name, self.relationship.name.lower(), self.target.name)
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ def reverse(apps, schema_editor):
|
|||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("mailtrigger", "0002_slidesubmitter"),
|
||||
("mailtrigger", "0003_ballot_approved_charter"),
|
||||
]
|
||||
|
||||
operations = [migrations.RunPython(forward, reverse)]
|
|
@ -12,13 +12,14 @@ from urllib.parse import urlencode
|
|||
from xml.dom import pulldom, Node
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import Subquery, OuterRef, F, Q
|
||||
from django.utils import timezone
|
||||
from django.utils.encoding import smart_bytes, force_str
|
||||
|
||||
import debug # pyflakes:ignore
|
||||
|
||||
from ietf.doc.models import ( Document, State, StateType, DocEvent, DocRelationshipName,
|
||||
DocTagName, RelatedDocument )
|
||||
DocTagName, RelatedDocument, RelatedDocHistory )
|
||||
from ietf.doc.expire import move_draft_files_to_archive
|
||||
from ietf.doc.utils import add_state_change_event, prettify_std_name, update_action_holders
|
||||
from ietf.group.models import Group
|
||||
|
@ -327,7 +328,7 @@ def parse_index(response):
|
|||
log("node: %s" % node)
|
||||
raise
|
||||
for d in data:
|
||||
k = "RFC%04d" % d[0]
|
||||
k = "RFC%d" % d[0]
|
||||
if k in also_list:
|
||||
d[9].extend(also_list[k])
|
||||
return data
|
||||
|
@ -680,13 +681,17 @@ def update_docs_from_rfc_index(
|
|||
if created:
|
||||
if first_sync_creating_subseries:
|
||||
subseries_doc.docevent_set.create(type=f"{subseries_slug}_history_marker", by=system, desc=f"No history of this {subseries_slug.upper()} document is currently available in the datatracker before this point")
|
||||
subseries_doc.docevent_set.create(type=f"{subseries_slug}_doc_created", by=system, desc=f"Created {subseries_doc_name} via sync to the rfc-index")
|
||||
subseries_doc.docevent_set.create(type=f"{subseries_slug}_doc_created", by=system, desc=f"Imported {subseries_doc_name} into the datatracker via sync to the rfc-index")
|
||||
else:
|
||||
subseries_doc.docevent_set.create(type=f"{subseries_slug}_doc_created", by=system, desc=f"Created {subseries_doc_name} via sync to the rfc-index")
|
||||
_, relationship_created = subseries_doc.relateddocument_set.get_or_create(relationship_id="contains", target=doc)
|
||||
if relationship_created:
|
||||
subseries_doc.docevent_set.create(type="sync_from_rfc_editor", by=system, desc=f"Added {doc.name} to {subseries_doc.name}")
|
||||
if first_sync_creating_subseries:
|
||||
rfc_events.append(doc.docevent_set.create(type=f"{subseries_slug}_history_marker", by=system, desc=f"No history of {subseries_doc.name.upper()} is currently available in the datatracker before this point"))
|
||||
rfc_events.append(doc.docevent_set.create(type="sync_from_rfc_editor", by=system, desc=f"Added {doc.name} to {subseries_doc.name}"))
|
||||
rfc_events.append(doc.docevent_set.create(type="sync_from_rfc_editor", by=system, desc=f"Imported membership of {doc.name} in {subseries_doc.name} via sync to the rfc-index"))
|
||||
else:
|
||||
rfc_events.append(doc.docevent_set.create(type="sync_from_rfc_editor", by=system, desc=f"Added {doc.name} to {subseries_doc.name}"))
|
||||
|
||||
for subdoc in doc.related_that("contains"):
|
||||
if subdoc.name not in also:
|
||||
|
@ -735,6 +740,40 @@ def update_docs_from_rfc_index(
|
|||
)
|
||||
doc.save_with_history(rfc_events)
|
||||
yield rfc_changes, doc, rfc_published # yield changes to the RFC
|
||||
|
||||
if first_sync_creating_subseries:
|
||||
# First - create the known subseries documents that have ghosted.
|
||||
# The RFC editor (as of 31 Oct 2023) claims these subseries docs do not exist.
|
||||
# The datatracker, on the other hand, will say that the series doc currently contains no RFCs.
|
||||
for name in ["fyi17", "std1", "bcp12", "bcp113", "bcp66"]:
|
||||
# Leaving most things to the default intentionally
|
||||
# Of note, title and stream are left to the defaults of "" and none.
|
||||
subseries_doc, created = Document.objects.get_or_create(type_id=name[:3], name=name)
|
||||
if not created:
|
||||
log(f"Warning: {name} unexpectedly already exists")
|
||||
else:
|
||||
subseries_slug = name[:3]
|
||||
subseries_doc.docevent_set.create(type=f"{subseries_slug}_history_marker", by=system, desc=f"No history of this {subseries_slug.upper()} document is currently available in the datatracker before this point")
|
||||
|
||||
|
||||
RelatedDocument.objects.filter(
|
||||
Q(originaltargetaliasname__startswith="bcp") |
|
||||
Q(originaltargetaliasname__startswith="std") |
|
||||
Q(originaltargetaliasname__startswith="fyi")
|
||||
).annotate(
|
||||
subseries_target=Subquery(
|
||||
Document.objects.filter(name=OuterRef("originaltargetaliasname")).values_list("pk",flat=True)[:1]
|
||||
)
|
||||
).update(target=F("subseries_target"))
|
||||
RelatedDocHistory.objects.filter(
|
||||
Q(originaltargetaliasname__startswith="bcp") |
|
||||
Q(originaltargetaliasname__startswith="std") |
|
||||
Q(originaltargetaliasname__startswith="fyi")
|
||||
).annotate(
|
||||
subseries_target=Subquery(
|
||||
Document.objects.filter(name=OuterRef("originaltargetaliasname")).values_list("pk",flat=True)[:1]
|
||||
)
|
||||
).update(target=F("subseries_target"))
|
||||
|
||||
|
||||
def post_approved_draft(url, name):
|
||||
|
|
Loading…
Reference in a new issue