Import new_revision events for the non-draft document types, this will

allow a more generic document history page; this will be important for
charters, less important for other types, but might as well get it
right from the beginning
 - Legacy-Id: 3940
This commit is contained in:
Ole Laursen 2012-02-21 16:50:17 +00:00
parent f2a05069ac
commit feb4f93dc6
6 changed files with 53 additions and 22 deletions

View file

@ -283,7 +283,6 @@ EVENT_TYPES = [
("changed_document", "Changed document metadata"),
("added_comment", "Added comment"),
("uploaded", "Uploaded document"),
("deleted", "Deleted document"),
# misc draft/RFC events

View file

@ -19,7 +19,7 @@ from ietf.doc.models import State, StateType
from ietf.doc.utils import get_tags_for_stream_id
from ietf.doc.models import Document
from ietf.name.utils import name
from redesign.importing.utils import old_person_to_person
from redesign.importing.utils import old_person_to_person, make_revision_event
from ietf.idtracker.models import AreaGroup, IETFWG, Area, AreaGroup, Acronym, AreaWGURL, IRTF, ChairsHistory, Role, AreaDirector
from ietf.liaisons.models import SDOs
from ietf.iesg.models import TelechatDates, Telechat, TelechatDate
@ -389,7 +389,8 @@ for o in IETFWG.objects.all().order_by("pk"):
import_date_event(group, "start", "active", "Started group")
import_date_event(group, "concluded", "conclude", "Concluded group")
# dormant_date is empty on all so don't bother with that
# import charter
charter = set_or_create_charter(group)
if group.state_id in ("active", "conclude"):
charter.rev = "01"
@ -402,5 +403,8 @@ for o in IETFWG.objects.all().order_by("pk"):
charter.time = e[0].time if e else group.time
charter.save()
e = make_revision_event(charter, system)
e.save()
# FIXME: missing fields from old: meeting_scheduled, email_keyword, meeting_scheduled_old

View file

@ -19,7 +19,7 @@ from ietf.idtracker.models import AreaDirector, IETFWG, Acronym, IRTF, PersonOrO
from ietf.meeting.models import *
from ietf.person.models import *
from ietf.doc.models import Document, DocAlias, State, DocEvent
from redesign.importing.utils import old_person_to_person, dont_save_queries
from redesign.importing.utils import old_person_to_person, dont_save_queries, make_revision_event
from redesign.interim.models import *
from ietf.name.models import *
from ietf.name.utils import name
@ -78,6 +78,7 @@ for o in InterimMeetings.objects.using(database).order_by("start_date"):
session.modified = datetime.datetime.combine(m.date, datetime.time(0, 0, 0))
session.save()
meeting = m
interim_meeting = o
def import_material_kind(kind, doctype):
@ -110,6 +111,7 @@ for o in InterimMeetings.objects.using(database).order_by("start_date"):
d.external_url = o.filename # save filenames for now as they don't appear to be quite regular
d.rev = "01"
d.group = session.group
d.time = datetime.datetime.combine(meeting.date, datetime.time(0, 0, 0)) # we may have better estimate below
d.save()
@ -120,6 +122,8 @@ for o in InterimMeetings.objects.using(database).order_by("start_date"):
session.materials.add(d)
# try to create a doc event to figure out who uploaded it
e = make_revision_event(d, system_person)
t = d.type_id
if d.type_id == "slides":
t = "slide, '%s" % d.title
@ -127,22 +131,23 @@ for o in InterimMeetings.objects.using(database).order_by("start_date"):
meeting_num=interim_meeting.meeting_num,
activity__startswith=t,
activity__endswith="was uploaded").using(database)[:1]
if activities:
a = activities[0]
try:
e = DocEvent.objects.get(doc=d, type="uploaded")
except DocEvent.DoesNotExist:
e = DocEvent(doc=d, type="uploaded")
e.time = datetime.datetime.combine(a.act_date, a.act_time)
try:
e.by = old_person_to_person(PersonOrOrgInfo.objects.get(pk=a.act_by)) or system_person
except PersonOrOrgInfo.DoesNotExist:
e.by = system_person
e.desc = u"Uploaded %s" % d.type_id
e.save()
pass
d.time = e.time
d.save()
else:
print "NO UPLOAD ACTIVITY RECORD for", d.name.encode("utf-8"), t.encode("utf-8"), interim_meeting.group_acronym_id, interim_meeting.meeting_num
e.save()
import_material_kind(InterimAgenda, agenda_doctype)
import_material_kind(InterimMinutes, minutes_doctype)

View file

@ -18,7 +18,7 @@ from ietf.idtracker.models import Acronym, EmailAddress
from ietf.liaisons.models import *
from ietf.doc.models import Document, DocAlias
from ietf.person.models import *
from redesign.importing.utils import old_person_to_person
from redesign.importing.utils import old_person_to_person, make_revision_event
from ietf.name.models import *
from ietf.name.utils import name
@ -45,6 +45,7 @@ purpose_mapping[None] = purpose_mapping[0] = purpose_mapping[3] # map unknown to
purpose_mapping[5] = purpose_mapping[3] # "Other" is mapped to "For information" as default
system_email = Email.objects.get(person__name="(System)")
system_person = Person.objects.get(name="(System)")
obviously_bogus_date = datetime.date(1970, 1, 1)
bodies = {
@ -199,6 +200,12 @@ for o in LiaisonDetail.objects.all().order_by("pk"):
DocAlias.objects.get_or_create(document=attachment, name=attachment.name)
e = make_revision_event(attachment, system_person)
if l.from_contact and l.from_contact.person:
e.by = l.from_contact.person
print e.by
e.save()
l.attachments.add(attachment)

View file

@ -20,7 +20,7 @@ from ietf.meeting.models import *
from ietf.proceedings.models import Meeting as MeetingOld, MeetingVenue, MeetingRoom, NonSession, WgMeetingSession, WgAgenda, Minute, Slide, WgProceedingsActivities, NotMeetingGroup
from ietf.person.models import *
from ietf.doc.models import Document, DocAlias, State, DocEvent
from redesign.importing.utils import old_person_to_person, dont_save_queries
from redesign.importing.utils import old_person_to_person, dont_save_queries, make_revision_event
from ietf.name.models import *
from ietf.name.utils import name
@ -261,7 +261,7 @@ def import_materials(wg_meeting_session, session):
d.external_url = o.filename # save filenames for now as they don't appear to be quite regular
d.rev = "01"
d.group = session.group
d.time = datetime.datetime.combine(session.meeting.date, datetime.time(0, 0, 0)) # we may have better estimate below
d.save()
d.set_state(State.objects.get(type=doctype, slug="active"))
@ -271,6 +271,8 @@ def import_materials(wg_meeting_session, session):
session.materials.add(d)
# try to create a doc event to figure out who uploaded it
e = make_revision_event(d, system_person)
t = d.type_id
if d.type_id == "slides":
t = "slide, '%s" % d.title
@ -280,20 +282,19 @@ def import_materials(wg_meeting_session, session):
activity__endswith="was uploaded")[:1]
if activities:
a = activities[0]
try:
e = DocEvent.objects.get(doc=d, type="uploaded")
except DocEvent.DoesNotExist:
e = DocEvent(doc=d, type="uploaded")
e.time = datetime.datetime.combine(a.act_date, datetime.time(*[int(s) for s in a.act_time.split(":")]))
try:
e.by = old_person_to_person(a.act_by) or system_person
except PersonOrOrgInfo.DoesNotExist:
e.by = system_person
e.desc = u"Uploaded %s" % d.type_id
e.save()
pass
d.time = e.time
d.save()
else:
print "NO UPLOAD ACTIVITY RECORD for", d.name.encode("utf-8"), t.encode("utf-8"), wg_meeting_session.group_acronym_id, wg_meeting_session.meeting_id
e.save()
import_material_kind(WgAgenda, agenda_doctype)
import_material_kind(Minute, minutes_doctype)

View file

@ -1,7 +1,9 @@
import datetime
from ietf.utils import unaccent
from ietf.person.models import Person, Email, Alias
from ietf.doc.models import NewRevisionDocEvent
from ietf.idtracker.models import EmailAddress
import datetime
def clean_email_address(addr):
addr = addr.replace("!", "@").replace("(at)", "@") # some obvious @ replacements
@ -158,6 +160,19 @@ def possibly_import_other_priority_email(email, old_email):
Email.objects.create(address=addr, person=email.person,
time=calc_email_import_time(old_email.priority))
def make_revision_event(doc, system_person):
try:
e = NewRevisionDocEvent.objects.get(doc=doc, type="new_revision")
except NewRevisionDocEvent.DoesNotExist:
e = NewRevisionDocEvent(doc=doc, type="new_revision")
e.rev = doc.rev
e.time = doc.time
e.by = system_person
e.desc = "Added new revision"
return e
def dont_save_queries():
# prevent memory from leaking when settings.DEBUG=True
from django.db import connection