Merged in changes from the conversion branch
- Legacy-Id: 3188
This commit is contained in:
commit
91b3212330
0
redesign/__init__.py
Normal file
0
redesign/__init__.py
Normal file
0
redesign/doc/__init__.py
Normal file
0
redesign/doc/__init__.py
Normal file
47
redesign/doc/admin.py
Normal file
47
redesign/doc/admin.py
Normal file
|
@ -0,0 +1,47 @@
|
|||
from django.contrib import admin
|
||||
from models import *
|
||||
from person.models import *
|
||||
|
||||
class DocumentAdmin(admin.ModelAdmin):
|
||||
list_display = ['name', 'rev', 'state', 'group', 'pages', 'intended_std_level', 'author_list', 'time']
|
||||
search_fields = ['name']
|
||||
raw_id_fields = ['authors', 'related', 'group', 'shepherd', 'ad']
|
||||
admin.site.register(Document, DocumentAdmin)
|
||||
|
||||
class DocHistoryAdmin(admin.ModelAdmin):
|
||||
list_display = ['doc', 'rev', 'state', 'group', 'pages', 'intended_std_level', 'author_list', 'time']
|
||||
search_fields = ['doc__name']
|
||||
ordering = ['time', 'doc', 'rev']
|
||||
raw_id_fields = ['doc', 'authors', 'related', 'group', 'shepherd', 'ad']
|
||||
admin.site.register(DocHistory, DocHistoryAdmin)
|
||||
|
||||
class DocAliasAdmin(admin.ModelAdmin):
|
||||
list_display = [ 'name', 'document_link', ]
|
||||
search_fields = [ 'name', 'document__name', ]
|
||||
raw_id_fields = ['document']
|
||||
admin.site.register(DocAlias, DocAliasAdmin)
|
||||
|
||||
|
||||
# events
|
||||
|
||||
class DocEventAdmin(admin.ModelAdmin):
|
||||
list_display = ["doc", "type", "by_raw", "time"]
|
||||
raw_id_fields = ["doc", "by"]
|
||||
|
||||
def by_raw(self, instance):
|
||||
return instance.by_id
|
||||
by_raw.short_description = "By"
|
||||
|
||||
admin.site.register(DocEvent, DocEventAdmin)
|
||||
|
||||
admin.site.register(NewRevisionDocEvent, DocEventAdmin)
|
||||
admin.site.register(WriteupDocEvent, DocEventAdmin)
|
||||
admin.site.register(StatusDateDocEvent, DocEventAdmin)
|
||||
admin.site.register(LastCallDocEvent, DocEventAdmin)
|
||||
admin.site.register(TelechatDocEvent, DocEventAdmin)
|
||||
|
||||
class BallotPositionDocEventAdmin(DocEventAdmin):
|
||||
raw_id_fields = ["doc", "by", "ad"]
|
||||
|
||||
admin.site.register(BallotPositionDocEvent, BallotPositionDocEventAdmin)
|
||||
|
264
redesign/doc/models.py
Normal file
264
redesign/doc/models.py
Normal file
|
@ -0,0 +1,264 @@
|
|||
# Copyright The IETF Trust 2007, All Rights Reserved
|
||||
|
||||
from django.db import models
|
||||
from django.core.urlresolvers import reverse as urlreverse
|
||||
|
||||
from redesign.group.models import *
|
||||
from redesign.name.models import *
|
||||
from redesign.person.models import Email, Person
|
||||
from redesign.util import admin_link
|
||||
|
||||
import datetime
|
||||
|
||||
class DocumentInfo(models.Model):
|
||||
"""Any kind of document. Draft, RFC, Charter, IPR Statement, Liaison Statement"""
|
||||
time = models.DateTimeField(default=datetime.datetime.now) # should probably have auto_now=True
|
||||
# Document related
|
||||
type = models.ForeignKey(DocTypeName, blank=True, null=True) # Draft, Agenda, Minutes, Charter, Discuss, Guideline, Email, Review, Issue, Wiki, External ...
|
||||
title = models.CharField(max_length=255)
|
||||
# State
|
||||
state = models.ForeignKey(DocStateName, blank=True, null=True) # Active/Expired/RFC/Replaced/Withdrawn
|
||||
tags = models.ManyToManyField(DocInfoTagName, blank=True, null=True) # Revised ID Needed, ExternalParty, AD Followup, ...
|
||||
stream = models.ForeignKey(DocStreamName, blank=True, null=True) # IETF, IAB, IRTF, Independent Submission
|
||||
group = models.ForeignKey(Group, blank=True, null=True) # WG, RG, IAB, IESG, Edu, Tools
|
||||
wg_state = models.ForeignKey(WgDocStateName, verbose_name="WG state", blank=True, null=True) # Not/Candidate/Active/Parked/LastCall/WriteUp/Submitted/Dead
|
||||
iesg_state = models.ForeignKey(IesgDocStateName, verbose_name="IESG state", blank=True, null=True) #
|
||||
iana_state = models.ForeignKey(IanaDocStateName, verbose_name="IANA state", blank=True, null=True)
|
||||
rfc_state = models.ForeignKey(RfcDocStateName, verbose_name="RFC state", blank=True, null=True)
|
||||
# Other
|
||||
abstract = models.TextField()
|
||||
rev = models.CharField(verbose_name="revision", max_length=16, blank=True)
|
||||
pages = models.IntegerField(blank=True, null=True)
|
||||
intended_std_level = models.ForeignKey(IntendedStdLevelName, blank=True, null=True)
|
||||
std_level = models.ForeignKey(StdLevelName, blank=True, null=True)
|
||||
ad = models.ForeignKey(Person, verbose_name="area director", related_name='ad_%(class)s_set', blank=True, null=True)
|
||||
shepherd = models.ForeignKey(Person, related_name='shepherd_%(class)s_set', blank=True, null=True)
|
||||
notify = models.CharField(max_length=255, blank=True)
|
||||
external_url = models.URLField(blank=True) # Should be set for documents with type 'External'.
|
||||
note = models.TextField(blank=True)
|
||||
internal_comments = models.TextField(blank=True)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
def author_list(self):
|
||||
return ", ".join(email.address for email in self.authors.all())
|
||||
|
||||
class RelatedDocument(models.Model):
|
||||
source = models.ForeignKey('Document')
|
||||
target = models.ForeignKey('DocAlias')
|
||||
relationship = models.ForeignKey(DocRelationshipName)
|
||||
def action(self):
|
||||
return self.relationship.name
|
||||
def inverse_action():
|
||||
infinitive = self.relationship.name[:-1]
|
||||
return u"%sd by" % infinitive
|
||||
def __unicode__(self):
|
||||
return u"%s %s %s" % (self.source.name, self.relationship.name.lower(), self.target.name)
|
||||
|
||||
class DocumentAuthor(models.Model):
|
||||
document = models.ForeignKey('Document')
|
||||
author = models.ForeignKey(Email, help_text="Email address used by author for submission")
|
||||
order = models.IntegerField(default=1)
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s %s (%s)" % (self.document.name, self.author.get_name(), self.order)
|
||||
|
||||
class Meta:
|
||||
ordering = ["document", "order"]
|
||||
|
||||
class Document(DocumentInfo):
|
||||
name = models.CharField(max_length=255, primary_key=True) # immutable
|
||||
related = models.ManyToManyField('DocAlias', through=RelatedDocument, blank=True, related_name="reversely_related_document_set")
|
||||
authors = models.ManyToManyField(Email, through=DocumentAuthor, blank=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
def get_absolute_url(self):
|
||||
name = self.name
|
||||
if self.state == "rfc":
|
||||
aliases = self.docalias_set.filter(name__startswith="rfc")
|
||||
if aliases:
|
||||
name = aliases[0].name
|
||||
return urlreverse('doc_view', kwargs={ 'name': name })
|
||||
|
||||
def file_tag(self):
|
||||
return u"<%s>" % self.filename_with_rev()
|
||||
|
||||
def filename_with_rev(self):
|
||||
# FIXME: compensate for tombstones?
|
||||
return u"%s-%s.txt" % (self.name, self.rev)
|
||||
|
||||
def latest_event(self, *args, **filter_args):
|
||||
"""Get latest event of optional Python type and with filter
|
||||
arguments, e.g. d.latest_event(type="xyz") returns an DocEvent
|
||||
while d.latest_event(WriteupDocEvent, type="xyz") returns a
|
||||
WriteupDocEvent event."""
|
||||
model = args[0] if args else DocEvent
|
||||
e = model.objects.filter(doc=self).filter(**filter_args).order_by('-time', '-id')[:1]
|
||||
return e[0] if e else None
|
||||
|
||||
def canonical_name(self):
|
||||
name = self.name
|
||||
if self.type_id == "draft" and self.state_id == "rfc":
|
||||
a = self.docalias_set.filter(name__startswith="rfc")
|
||||
if a:
|
||||
name = a[0].name
|
||||
return name
|
||||
|
||||
|
||||
class RelatedDocHistory(models.Model):
|
||||
source = models.ForeignKey('DocHistory')
|
||||
target = models.ForeignKey('DocAlias', related_name="reversely_related_document_history_set")
|
||||
relationship = models.ForeignKey(DocRelationshipName)
|
||||
def __unicode__(self):
|
||||
return u"%s %s %s" % (self.source.doc.name, self.relationship.name.lower(), self.target.name)
|
||||
|
||||
class DocHistoryAuthor(models.Model):
|
||||
document = models.ForeignKey('DocHistory')
|
||||
author = models.ForeignKey(Email)
|
||||
order = models.IntegerField()
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s %s (%s)" % (self.document.doc.name, self.author.get_name(), self.order)
|
||||
|
||||
class Meta:
|
||||
ordering = ["document", "order"]
|
||||
|
||||
class DocHistory(DocumentInfo):
|
||||
doc = models.ForeignKey(Document) # ID of the Document this relates to
|
||||
# Django 1.2 won't let us define these in the base class, so we have
|
||||
# to repeat them
|
||||
related = models.ManyToManyField('DocAlias', through=RelatedDocHistory, blank=True)
|
||||
authors = models.ManyToManyField(Email, through=DocHistoryAuthor, blank=True)
|
||||
def __unicode__(self):
|
||||
return unicode(self.doc.name)
|
||||
|
||||
def save_document_in_history(doc):
|
||||
def get_model_fields_as_dict(obj):
|
||||
return dict((field.name, getattr(obj, field.name))
|
||||
for field in obj._meta.fields
|
||||
if field is not obj._meta.pk)
|
||||
|
||||
# copy fields
|
||||
fields = get_model_fields_as_dict(doc)
|
||||
fields["doc"] = doc
|
||||
|
||||
dochist = DocHistory(**fields)
|
||||
dochist.save()
|
||||
|
||||
# copy many to many
|
||||
for field in doc._meta.many_to_many:
|
||||
if not field.rel.through:
|
||||
# just add the attributes
|
||||
rel = getattr(dochist, field.name)
|
||||
for item in getattr(doc, field.name).all():
|
||||
rel.add(item)
|
||||
|
||||
# copy remaining tricky many to many
|
||||
def transfer_fields(obj, HistModel):
|
||||
mfields = get_model_fields_as_dict(item)
|
||||
# map doc -> dochist
|
||||
for k, v in mfields.iteritems():
|
||||
if v == doc:
|
||||
mfields[k] = dochist
|
||||
HistModel.objects.create(**mfields)
|
||||
|
||||
for item in RelatedDocument.objects.filter(source=doc):
|
||||
transfer_fields(item, RelatedDocHistory)
|
||||
|
||||
for item in DocumentAuthor.objects.filter(document=doc):
|
||||
transfer_fields(item, DocHistoryAuthor)
|
||||
|
||||
return dochist
|
||||
|
||||
class DocAlias(models.Model):
|
||||
"""This is used for documents that may appear under multiple names,
|
||||
and in particular for RFCs, which for continuity still keep the
|
||||
same immutable Document.name, in the tables, but will be referred
|
||||
to by RFC number, primarily, after achieving RFC status.
|
||||
"""
|
||||
document = models.ForeignKey(Document)
|
||||
name = models.CharField(max_length=255, db_index=True)
|
||||
def __unicode__(self):
|
||||
return "%s-->%s" % (self.name, self.document.name)
|
||||
document_link = admin_link("document")
|
||||
class Meta:
|
||||
verbose_name = "document alias"
|
||||
verbose_name_plural = "document aliases"
|
||||
|
||||
|
||||
EVENT_TYPES = [
|
||||
# core events
|
||||
("new_revision", "Added new revision"),
|
||||
("changed_document", "Changed document metadata"),
|
||||
|
||||
# misc document events
|
||||
("added_comment", "Added comment"),
|
||||
("expired_document", "Expired document"),
|
||||
("requested_resurrect", "Requested resurrect"),
|
||||
("completed_resurrect", "Completed resurrect"),
|
||||
("published_rfc", "Published RFC"),
|
||||
|
||||
# IESG events
|
||||
("started_iesg_process", "Started IESG process on document"),
|
||||
|
||||
("sent_ballot_announcement", "Sent ballot announcement"),
|
||||
("changed_ballot_position", "Changed ballot position"),
|
||||
|
||||
("changed_ballot_approval_text", "Changed ballot approval text"),
|
||||
("changed_ballot_writeup_text", "Changed ballot writeup text"),
|
||||
|
||||
("changed_last_call_text", "Changed last call text"),
|
||||
("requested_last_call", "Requested last call"),
|
||||
("sent_last_call", "Sent last call"),
|
||||
|
||||
("changed_status_date", "Changed status date"),
|
||||
|
||||
("scheduled_for_telechat", "Scheduled for telechat"),
|
||||
|
||||
("iesg_approved", "IESG approved document (no problem)"),
|
||||
("iesg_disapproved", "IESG disapproved document (do not publish)"),
|
||||
|
||||
("approved_in_minute", "Approved in minute"),
|
||||
]
|
||||
|
||||
class DocEvent(models.Model):
|
||||
"""An occurrence for a document, used for tracking who, when and what."""
|
||||
time = models.DateTimeField(default=datetime.datetime.now, help_text="When the event happened")
|
||||
type = models.CharField(max_length=50, choices=EVENT_TYPES)
|
||||
by = models.ForeignKey(Person)
|
||||
doc = models.ForeignKey('doc.Document')
|
||||
desc = models.TextField()
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s %s at %s" % (self.by.name, self.get_type_display().lower(), self.time)
|
||||
|
||||
class Meta:
|
||||
ordering = ['-time', '-id']
|
||||
|
||||
class NewRevisionDocEvent(DocEvent):
|
||||
rev = models.CharField(max_length=16)
|
||||
|
||||
# IESG events
|
||||
class BallotPositionDocEvent(DocEvent):
|
||||
ad = models.ForeignKey(Person)
|
||||
pos = models.ForeignKey(BallotPositionName, verbose_name="position", default="norecord")
|
||||
discuss = models.TextField(help_text="Discuss text if position is discuss", blank=True)
|
||||
discuss_time = models.DateTimeField(help_text="Time discuss text was written", blank=True, null=True)
|
||||
comment = models.TextField(help_text="Optional comment", blank=True)
|
||||
comment_time = models.DateTimeField(help_text="Time optional comment was written", blank=True, null=True)
|
||||
|
||||
class WriteupDocEvent(DocEvent):
|
||||
text = models.TextField(blank=True)
|
||||
|
||||
class StatusDateDocEvent(DocEvent):
|
||||
date = models.DateField(blank=True, null=True)
|
||||
|
||||
class LastCallDocEvent(DocEvent):
|
||||
expires = models.DateTimeField(blank=True, null=True)
|
||||
|
||||
class TelechatDocEvent(DocEvent):
|
||||
telechat_date = models.DateField(blank=True, null=True)
|
||||
returning_item = models.BooleanField(default=False)
|
||||
|
899
redesign/doc/proxy.py
Normal file
899
redesign/doc/proxy.py
Normal file
|
@ -0,0 +1,899 @@
|
|||
from redesign.doc.models import *
|
||||
from redesign.person.models import Email
|
||||
from redesign.proxy_utils import TranslatingManager
|
||||
from redesign.name.proxy import *
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
import glob, os
|
||||
|
||||
|
||||
class InternetDraft(Document):
|
||||
objects = TranslatingManager(dict(filename="name",
|
||||
filename__contains="name__contains",
|
||||
id_document_tag="pk",
|
||||
status=lambda v: ("state", { 1: 'active', 2: 'expired', 3: 'rfc', 4: 'auth-rm', 5: 'repl', 6: 'ietf-rm'}[v]),
|
||||
job_owner="ad",
|
||||
rfc_number=lambda v: ("docalias__name", "rfc%s" % v),
|
||||
cur_state="iesg_state__order",
|
||||
idinternal__primary_flag=None,
|
||||
idinternal__cur_state__state="iesg_state__name",
|
||||
), always_filter=dict(type="draft"))
|
||||
|
||||
DAYS_TO_EXPIRE=185
|
||||
|
||||
# things from InternetDraft
|
||||
|
||||
#id_document_tag = models.AutoField(primary_key=True)
|
||||
@property
|
||||
def id_document_tag(self):
|
||||
return self.name # Will only work for some use cases
|
||||
#title = models.CharField(max_length=255, db_column='id_document_name') # same name
|
||||
#id_document_key = models.CharField(max_length=255, editable=False)
|
||||
@property
|
||||
def id_document_key(self):
|
||||
return self.title.upper()
|
||||
#group = models.ForeignKey(Acronym, db_column='group_acronym_id')
|
||||
@property
|
||||
def group(self):
|
||||
from group.proxy import Acronym as AcronymProxy
|
||||
g = super(InternetDraft, self).group
|
||||
return AcronymProxy(g) if g else None
|
||||
#filename = models.CharField(max_length=255, unique=True)
|
||||
@property
|
||||
def filename(self):
|
||||
return self.name
|
||||
#revision = models.CharField(max_length=2)
|
||||
@property
|
||||
def revision(self):
|
||||
return self.rev
|
||||
#revision_date = models.DateField()
|
||||
@property
|
||||
def revision_date(self):
|
||||
if hasattr(self, "new_revision"):
|
||||
e = self.new_revision
|
||||
else:
|
||||
e = self.latest_event(type="new_revision")
|
||||
return e.time.date() if e else None
|
||||
# helper function
|
||||
def get_file_type_matches_from(self, glob_path):
|
||||
possible_types = [".txt", ".pdf", ".xml", ".ps"]
|
||||
res = []
|
||||
for m in glob.glob(glob_path):
|
||||
for t in possible_types:
|
||||
if m.endswith(t):
|
||||
res.append(t)
|
||||
return ",".join(res)
|
||||
#file_type = models.CharField(max_length=20)
|
||||
@property
|
||||
def file_type(self):
|
||||
return self.get_file_type_matches_from(os.path.join(settings.INTERNET_DRAFT_PATH, self.name + "-" + self.rev + ".*")) or ".txt"
|
||||
#txt_page_count = models.IntegerField()
|
||||
@property
|
||||
def txt_page_count(self):
|
||||
return self.pages
|
||||
#local_path = models.CharField(max_length=255, blank=True) # unused
|
||||
#start_date = models.DateField()
|
||||
@property
|
||||
def start_date(self):
|
||||
e = NewRevisionDocEvent.objects.filter(doc=self).order_by("time")[:1]
|
||||
return e[0].time.date() if e else None
|
||||
#expiration_date = models.DateField()
|
||||
@property
|
||||
def expiration_date(self):
|
||||
e = self.latest_event(type__in=('expired_document', 'new_revision', "completed_resurrect"))
|
||||
return e.time.date() if e and e.type == "expired_document" else None
|
||||
#abstract = models.TextField() # same name
|
||||
#dunn_sent_date = models.DateField(null=True, blank=True) # unused
|
||||
#extension_date = models.DateField(null=True, blank=True) # unused
|
||||
#status = models.ForeignKey(IDStatus)
|
||||
@property
|
||||
def status(self):
|
||||
return IDStatus().from_object(self.state) if self.state else None
|
||||
|
||||
@property
|
||||
def status_id(self):
|
||||
return { 'active': 1, 'repl': 5, 'expired': 2, 'rfc': 3, 'auth-rm': 4, 'ietf-rm': 6 }[self.state_id]
|
||||
|
||||
#intended_status = models.ForeignKey(IDIntendedStatus)
|
||||
@property
|
||||
def intended_status(self):
|
||||
return self.intended_std_level
|
||||
|
||||
#lc_sent_date = models.DateField(null=True, blank=True)
|
||||
@property
|
||||
def lc_sent_date(self):
|
||||
e = self.latest_event(type="sent_last_call")
|
||||
return e.time.date() if e else None
|
||||
|
||||
#lc_changes = models.CharField(max_length=3) # used in DB, unused in Django code?
|
||||
|
||||
#lc_expiration_date = models.DateField(null=True, blank=True)
|
||||
@property
|
||||
def lc_expiration_date(self):
|
||||
e = self.latest_event(LastCallDocEvent, type="sent_last_call")
|
||||
return e.expires.date() if e else None
|
||||
|
||||
#b_sent_date = models.DateField(null=True, blank=True)
|
||||
@property
|
||||
def b_sent_date(self):
|
||||
e = self.latest_event(type="sent_ballot_announcement")
|
||||
return e.time.date() if e else None
|
||||
|
||||
#b_discussion_date = models.DateField(null=True, blank=True) # unused
|
||||
|
||||
#b_approve_date = models.DateField(null=True, blank=True)
|
||||
@property
|
||||
def b_approve_date(self):
|
||||
e = self.latest_event(type="iesg_approved")
|
||||
return e.time.date() if e else None
|
||||
|
||||
#wgreturn_date = models.DateField(null=True, blank=True) # unused
|
||||
|
||||
#rfc_number = models.IntegerField(null=True, blank=True, db_index=True)
|
||||
@property
|
||||
def rfc_number(self):
|
||||
n = self.canonical_name()
|
||||
return int(n[3:]) if n.startswith("rfc") else None
|
||||
|
||||
#comments = models.TextField(blank=True) # unused
|
||||
|
||||
#last_modified_date = models.DateField()
|
||||
@property
|
||||
def last_modified_date(self):
|
||||
return self.time.date()
|
||||
|
||||
#replaced_by = models.ForeignKey('self', db_column='replaced_by', blank=True, null=True, related_name='replaces_set')
|
||||
@property
|
||||
def replaced_by(self):
|
||||
r = InternetDraft.objects.filter(relateddocument__target__document=self, relateddocument__relationship="replaces")
|
||||
return r[0] if r else None
|
||||
|
||||
@property
|
||||
def replaced_by_id(self):
|
||||
r = self.replaced_by
|
||||
return r.id_document_tag if r else None
|
||||
|
||||
#replaces = FKAsOneToOne('replaces', reverse=True)
|
||||
@property
|
||||
def replaces(self):
|
||||
r = self.replaces_set
|
||||
return r[0] if r else None
|
||||
|
||||
@property
|
||||
def replaces_set(self):
|
||||
return InternetDraft.objects.filter(docalias__relateddocument__source=self, docalias__relateddocument__relationship="replaces")
|
||||
|
||||
#review_by_rfc_editor = models.BooleanField()
|
||||
@property
|
||||
def review_by_rfc_editor(self):
|
||||
return bool(self.tags.filter(slug='rfc-rev'))
|
||||
|
||||
#expired_tombstone = models.BooleanField()
|
||||
@property
|
||||
def expired_tombstone(self):
|
||||
return bool(self.tags.filter(slug='exp-tomb'))
|
||||
|
||||
def calc_process_start_end(self):
|
||||
import datetime
|
||||
start, end = datetime.datetime.min, datetime.datetime.max
|
||||
e = self.latest_event(type="started_iesg_process")
|
||||
if e:
|
||||
start = e.time
|
||||
if self.state_id == "rfc" and self.name.startswith("draft") and not hasattr(self, "viewing_as_rfc"):
|
||||
previous_process = self.latest_event(type="started_iesg_process", time__lt=e.time)
|
||||
if previous_process:
|
||||
start = previous_process.time
|
||||
end = e.time
|
||||
self._process_start = start
|
||||
self._process_end = end
|
||||
|
||||
@property
|
||||
def process_start(self):
|
||||
if not hasattr(self, "_process_start"):
|
||||
self.calc_process_start_end()
|
||||
return self._process_start
|
||||
|
||||
@property
|
||||
def process_end(self):
|
||||
if not hasattr(self, "_process_end"):
|
||||
self.calc_process_start_end()
|
||||
return self._process_end
|
||||
|
||||
#shepherd = BrokenForeignKey('PersonOrOrgInfo', null=True, blank=True, null_values=(0, )) # same name
|
||||
|
||||
#idinternal = FKAsOneToOne('idinternal', reverse=True, query=models.Q(rfc_flag = 0))
|
||||
@property
|
||||
def idinternal(self):
|
||||
# since IDInternal is now merged into the document, we try to
|
||||
# guess here
|
||||
if hasattr(self, "changed_ballot_position"):
|
||||
e = self.changed_ballot_position
|
||||
else:
|
||||
e = self.latest_event(type="changed_ballot_position")
|
||||
return self if self.iesg_state or e else None
|
||||
|
||||
# reverse relationship
|
||||
@property
|
||||
def authors(self):
|
||||
return IDAuthor.objects.filter(document=self)
|
||||
|
||||
# methods from InternetDraft
|
||||
def displayname(self):
|
||||
return self.name
|
||||
def file_tag(self):
|
||||
return "<%s>" % self.filename_with_rev()
|
||||
def filename_with_rev(self):
|
||||
return "%s-%s.txt" % (self.filename, self.revision_display())
|
||||
def group_acronym(self):
|
||||
return super(Document, self).group.acronym
|
||||
def group_ml_archive(self):
|
||||
return self.group.list_archive
|
||||
def idstate(self):
|
||||
return self.docstate()
|
||||
def revision_display(self):
|
||||
r = int(self.revision)
|
||||
if self.state_id != 'active' and not self.expired_tombstone:
|
||||
r = max(r - 1, 0)
|
||||
return "%02d" % r
|
||||
def expiration(self):
|
||||
e = self.latest_event(type__in=("completed_resurrect", "new_revision"))
|
||||
return e.time.date() + datetime.timedelta(days=self.DAYS_TO_EXPIRE)
|
||||
def can_expire(self):
|
||||
# Copying the logic from expire-ids-1 without thinking
|
||||
# much about it.
|
||||
if self.review_by_rfc_editor:
|
||||
return False
|
||||
idinternal = self.idinternal
|
||||
if idinternal:
|
||||
cur_state_id = idinternal.cur_state_id
|
||||
# 42 is "AD is Watching"; this matches what's in the
|
||||
# expire-ids-1 perl script.
|
||||
# A better way might be to add a column to the table
|
||||
# saying whether or not a document is prevented from
|
||||
# expiring.
|
||||
if cur_state_id < 42:
|
||||
return False
|
||||
return True
|
||||
|
||||
def clean_abstract(self):
|
||||
# Cleaning based on what "id-abstracts-text" script does
|
||||
import re
|
||||
a = self.abstract
|
||||
a = re.sub(" *\r\n *", "\n", a) # get rid of DOS line endings
|
||||
a = re.sub(" *\r *", "\n", a) # get rid of MAC line endings
|
||||
a = re.sub("(\n *){3,}", "\n\n", a) # get rid of excessive vertical whitespace
|
||||
a = re.sub("\f[\n ]*[^\n]*\n", "", a) # get rid of page headers
|
||||
# Get rid of 'key words' boilerplate and anything which follows it:
|
||||
# (No way that is part of the abstract...)
|
||||
a = re.sub("(?s)(Conventions [Uu]sed in this [Dd]ocument|Requirements [Ll]anguage)?[\n ]*The key words \"MUST\", \"MUST NOT\",.*$", "", a)
|
||||
# Get rid of status/copyright boilerplate
|
||||
a = re.sub("(?s)\nStatus of [tT]his Memo\n.*$", "", a)
|
||||
# wrap long lines without messing up formatting of Ok paragraphs:
|
||||
while re.match("([^\n]{72,}?) +", a):
|
||||
a = re.sub("([^\n]{72,}?) +([^\n ]*)(\n|$)", "\\1\n\\2 ", a)
|
||||
# Remove leading and trailing whitespace
|
||||
a = a.strip()
|
||||
return a
|
||||
|
||||
|
||||
# things from IDInternal
|
||||
|
||||
#draft = models.ForeignKey(InternetDraft, primary_key=True, unique=True, db_column='id_document_tag')
|
||||
@property
|
||||
def draft(self):
|
||||
return self
|
||||
|
||||
@property
|
||||
def draft_id(self):
|
||||
return self.name
|
||||
|
||||
#rfc_flag = models.IntegerField(null=True)
|
||||
@property
|
||||
def rfc_flag(self):
|
||||
return self.state_id == "rfc"
|
||||
|
||||
#ballot = models.ForeignKey(BallotInfo, related_name='drafts', db_column="ballot_id")
|
||||
@property
|
||||
def ballot(self):
|
||||
if not self.idinternal:
|
||||
raise BallotInfo.DoesNotExist()
|
||||
return self
|
||||
@property
|
||||
def ballot_id(self):
|
||||
return self.ballot.name
|
||||
|
||||
#primary_flag = models.IntegerField(blank=True, null=True)
|
||||
@property
|
||||
def primary_flag(self):
|
||||
# left-over from multi-ballot documents which we don't really
|
||||
# support anymore, just pretend we're always primary
|
||||
return True
|
||||
|
||||
#group_flag = models.IntegerField(blank=True, default=0) # not used anymore, contained the group acronym_id once upon a time (so it wasn't a flag)
|
||||
|
||||
#token_name = models.CharField(blank=True, max_length=25)
|
||||
@property
|
||||
def token_name(self):
|
||||
return self.ad.name
|
||||
|
||||
#token_email = models.CharField(blank=True, max_length=255)
|
||||
@property
|
||||
def token_email(self):
|
||||
return self.ad.email_address()
|
||||
|
||||
#note = models.TextField(blank=True) # same name
|
||||
|
||||
#status_date = models.DateField(blank=True,null=True)
|
||||
@property
|
||||
def status_date(self):
|
||||
e = self.latest_event(StatusDateDocEvent, type="changed_status_date")
|
||||
return e.date if e else None
|
||||
|
||||
#email_display = models.CharField(blank=True, max_length=50) # unused
|
||||
#agenda = models.IntegerField(null=True, blank=True)
|
||||
@property
|
||||
def agenda(self):
|
||||
e = self.latest_event(TelechatDocEvent, type="scheduled_for_telechat")
|
||||
return bool(e and e.telechat_date)
|
||||
|
||||
#cur_state = models.ForeignKey(IDState, db_column='cur_state', related_name='docs')
|
||||
@property
|
||||
def cur_state(self):
|
||||
return IDState().from_object(self.iesg_state) if self.iesg_state else None
|
||||
|
||||
@property
|
||||
def cur_state_id(self):
|
||||
return self.iesg_state.order if self.iesg_state else None
|
||||
|
||||
#prev_state = models.ForeignKey(IDState, db_column='prev_state', related_name='docs_prev')
|
||||
@property
|
||||
def prev_state(self):
|
||||
ds = self.dochistory_set.exclude(iesg_state=self.iesg_state).order_by('-time')[:1]
|
||||
return IDState().from_object(ds[0].iesg_state) if ds else None
|
||||
|
||||
#assigned_to = models.CharField(blank=True, max_length=25) # unused
|
||||
|
||||
#mark_by = models.ForeignKey(IESGLogin, db_column='mark_by', related_name='marked')
|
||||
@property
|
||||
def mark_by(self):
|
||||
e = self.latest_event()
|
||||
from person.proxy import IESGLogin as IESGLoginProxy
|
||||
return IESGLoginProxy().from_object(e.by) if e else None
|
||||
|
||||
# job_owner = models.ForeignKey(IESGLogin, db_column='job_owner', related_name='documents')
|
||||
@property
|
||||
def job_owner(self):
|
||||
from person.proxy import IESGLogin as IESGLoginProxy
|
||||
return IESGLoginProxy().from_object(self.ad) if self.ad else None
|
||||
|
||||
#event_date = models.DateField(null=True)
|
||||
@property
|
||||
def event_date(self):
|
||||
e = self.latest_event()
|
||||
return e.time if e else None
|
||||
|
||||
#area_acronym = models.ForeignKey(Area)
|
||||
@property
|
||||
def area_acronym(self):
|
||||
from group.proxy import Area
|
||||
g = super(InternetDraft, self).group # be careful with group which is proxied
|
||||
if g and g.type_id != "individ":
|
||||
return Area().from_object(g.parent)
|
||||
elif self.ad:
|
||||
# return area for AD
|
||||
try:
|
||||
area = Group.objects.get(role__name="ad", role__email=self.ad, state="active")
|
||||
return Area().from_object(area)
|
||||
except Group.DoesNotExist:
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
|
||||
#cur_sub_state = BrokenForeignKey(IDSubState, related_name='docs', null=True, blank=True, null_values=(0, -1))
|
||||
@property
|
||||
def cur_sub_state(self):
|
||||
s = self.tags.filter(slug__in=['extpty', 'need-rev', 'ad-f-up', 'point'])
|
||||
return IDSubState().from_object(s[0]) if s else None
|
||||
@property
|
||||
def cur_sub_state_id(self):
|
||||
s = self.cur_sub_state
|
||||
return s.order if s else None
|
||||
|
||||
#prev_sub_state = BrokenForeignKey(IDSubState, related_name='docs_prev', null=True, blank=True, null_values=(0, -1))
|
||||
@property
|
||||
def prev_sub_state(self):
|
||||
ds = self.dochistory_set.all().order_by('-time')[:1]
|
||||
substates = ds[0].tags.filter(slug__in=['extpty', 'need-rev', 'ad-f-up', 'point']) if ds else None
|
||||
return IDSubState().from_object(substates[0]) if substates else None
|
||||
@property
|
||||
def prev_sub_state_id(self):
|
||||
s = self.prev_sub_state
|
||||
return s.order if s else None
|
||||
|
||||
#returning_item = models.IntegerField(null=True, blank=True)
|
||||
@property
|
||||
def returning_item(self):
|
||||
e = self.latest_event(TelechatDocEvent, type="scheduled_for_telechat")
|
||||
return e.returning_item if e else None
|
||||
|
||||
#telechat_date = models.DateField(null=True, blank=True)
|
||||
@property
|
||||
def telechat_date(self):
|
||||
e = self.latest_event(TelechatDocEvent, type="scheduled_for_telechat")
|
||||
return e.telechat_date if e else None
|
||||
|
||||
#via_rfc_editor = models.IntegerField(null=True, blank=True)
|
||||
@property
|
||||
def via_rfc_editor(self):
|
||||
return bool(self.tags.filter(slug='via-rfc'))
|
||||
|
||||
#state_change_notice_to = models.CharField(blank=True, max_length=255)
|
||||
@property
|
||||
def state_change_notice_to(self):
|
||||
return self.notify
|
||||
|
||||
#dnp = models.IntegerField(null=True, blank=True)
|
||||
@property
|
||||
def dnp(self):
|
||||
e = self.latest_event(type__in=("iesg_disapproved", "iesg_approved"))
|
||||
return e != None and e.type == "iesg_disapproved"
|
||||
|
||||
#dnp_date = models.DateField(null=True, blank=True)
|
||||
@property
|
||||
def dnp_date(self):
|
||||
e = self.latest_event(type__in=("iesg_disapproved", "iesg_approved"))
|
||||
return e.time.date() if e != None and e.type == "iesg_disapproved" else None
|
||||
|
||||
#noproblem = models.IntegerField(null=True, blank=True)
|
||||
@property
|
||||
def noproblem(self):
|
||||
e = self.latest_event(type__in=("iesg_disapproved", "iesg_approved"))
|
||||
return e != None and e.type == "iesg_approved"
|
||||
|
||||
#resurrect_requested_by = BrokenForeignKey(IESGLogin, db_column='resurrect_requested_by', related_name='docsresurrected', null=True, blank=True)
|
||||
@property
|
||||
def resurrect_requested_by(self):
|
||||
e = self.latest_event(type__in=("requested_resurrect", "completed_resurrect"))
|
||||
from person.proxy import IESGLogin as IESGLoginProxy
|
||||
return IESGLoginProxy().from_object(e.by) if e and e.type == "requested_resurrect" else None
|
||||
|
||||
#approved_in_minute = models.IntegerField(null=True, blank=True)
|
||||
@property
|
||||
def approved_in_minute(self):
|
||||
return self.latest_event(type="approved_in_minute")
|
||||
|
||||
|
||||
def get_absolute_url(self):
|
||||
if self.rfc_flag and self.rfc_number:
|
||||
return "/doc/rfc%d/" % self.rfc_number
|
||||
else:
|
||||
return "/doc/%s/" % self.name
|
||||
|
||||
def document(self):
|
||||
return self
|
||||
|
||||
def comments(self):
|
||||
return DocumentComment.objects.filter(doc=self).order_by('-time')
|
||||
|
||||
def public_comments(self):
|
||||
return self.comments()
|
||||
|
||||
def ballot_set(self):
|
||||
return [self]
|
||||
def ballot_primary(self):
|
||||
return [self]
|
||||
def ballot_others(self):
|
||||
return []
|
||||
def docstate(self):
|
||||
if self.iesg_state:
|
||||
return self.iesg_state.name
|
||||
else:
|
||||
return "I-D Exists"
|
||||
def change_state(self, state, sub_state):
|
||||
self.iesg_state = state
|
||||
|
||||
|
||||
# things from BallotInfo
|
||||
#active = models.BooleanField()
|
||||
@property
|
||||
def active(self):
|
||||
# taken from BallotWrapper
|
||||
return self.latest_event(type="sent_ballot_announcement") and self.iesg_state and self.iesg_state.name in ['In Last Call', 'Waiting for Writeup', 'Waiting for AD Go-Ahead', 'IESG Evaluation', 'IESG Evaluation - Defer'] and (self.state_id == "rfc" or self.state_id == "active")
|
||||
|
||||
#an_sent = models.BooleanField()
|
||||
@property
|
||||
def an_sent(self):
|
||||
return bool(self.latest_event(type="iesg_approved"))
|
||||
|
||||
#an_sent_date = models.DateField(null=True, blank=True)
|
||||
@property
|
||||
def an_sent_date(self):
|
||||
e = self.latest_event(type="iesg_approved")
|
||||
return e.time if e else None
|
||||
|
||||
#an_sent_by = models.ForeignKey(IESGLogin, db_column='an_sent_by', related_name='ansent', null=True)
|
||||
@property
|
||||
def an_sent_by(self):
|
||||
e = self.latest_event(type="iesg_approved")
|
||||
from person.proxy import IESGLogin as IESGLoginProxy
|
||||
return IESGLoginProxy().from_object(e.by) if e else None
|
||||
|
||||
#defer = models.BooleanField()
|
||||
@property
|
||||
def defer(self):
|
||||
# we're deferred if we're in the deferred state
|
||||
return self.iesg_state and self.iesg_state.name == "IESG Evaluation - Defer"
|
||||
|
||||
#defer_by = models.ForeignKey(IESGLogin, db_column='defer_by', related_name='deferred', null=True)
|
||||
@property
|
||||
def defer_by(self):
|
||||
e = self.latest_event(type="changed_document", desc__startswith="State changed to <b>IESG Evaluation - Defer</b>")
|
||||
from person.proxy import IESGLogin as IESGLoginProxy
|
||||
return IESGLoginProxy().from_object(e.by) if e else None
|
||||
|
||||
#defer_date = models.DateField(null=True, blank=True)
|
||||
@property
|
||||
def defer_date(self):
|
||||
e = self.latest_event(type="changed_document", desc__startswith="State changed to <b>IESG Evaluation - Defer</b>")
|
||||
return e.time.date() if e else None
|
||||
|
||||
#approval_text = models.TextField(blank=True)
|
||||
@property
|
||||
def approval_text(self):
|
||||
e = self.latest_event(WriteupDocEvent, type="changed_ballot_approval_text")
|
||||
return e.text if e else ""
|
||||
|
||||
#last_call_text = models.TextField(blank=True)
|
||||
@property
|
||||
def last_call_text(self):
|
||||
e = self.latest_event(WriteupDocEvent, type="changed_last_call_text")
|
||||
return e.text if e else ""
|
||||
|
||||
#ballot_writeup = models.TextField(blank=True)
|
||||
@property
|
||||
def ballot_writeup(self):
|
||||
e = self.latest_event(WriteupDocEvent, type="changed_ballot_writeup_text")
|
||||
return e.text if e else ""
|
||||
|
||||
#ballot_issued = models.IntegerField(null=True, blank=True)
|
||||
@property
|
||||
def ballot_issued(self):
|
||||
return bool(self.latest_event(type="sent_ballot_announcement"))
|
||||
|
||||
# def remarks(self): # apparently not used
|
||||
# remarks = list(self.discusses.all()) + list(self.comments.all())
|
||||
# return remarks
|
||||
def active_positions(self):
|
||||
"""Returns a list of dicts, with AD and Position tuples"""
|
||||
active_ads = Person.objects.filter(email__role__name="ad", email__role__group__state="active")
|
||||
res = []
|
||||
def add(ad, pos):
|
||||
from person.proxy import IESGLogin as IESGLoginProxy
|
||||
res.append(dict(ad=IESGLoginProxy().from_object(ad), pos=Position().from_object(pos) if pos else None))
|
||||
|
||||
found = set()
|
||||
for pos in BallotPositionDocEvent.objects.filter(doc=self, type="changed_ballot_position", ad__in=active_ads).select_related('ad').order_by("-time", "-id"):
|
||||
if pos.ad not in found:
|
||||
found.add(pos.ad)
|
||||
add(pos.ad, pos)
|
||||
|
||||
for ad in active_ads:
|
||||
if ad not in found:
|
||||
add(ad, None)
|
||||
|
||||
res.sort(key=lambda x: x["ad"].last_name)
|
||||
|
||||
return res
|
||||
|
||||
def needed(self, standardsTrack=True):
|
||||
"""Returns text answering the question what does this document
|
||||
need to pass?. The return value is only useful if the document
|
||||
is currently in IESG evaluation."""
|
||||
tmp = self.active_positions()
|
||||
positions = [x["pos"] for x in tmp if x["pos"]]
|
||||
ads = [x["ad"] for x in tmp]
|
||||
|
||||
yes = noobj = discuss = recuse = 0
|
||||
for position in positions:
|
||||
p = position.pos_id
|
||||
if p == "yes":
|
||||
yes += 1
|
||||
if p == "noobj":
|
||||
noobj += 1
|
||||
if p == "discuss":
|
||||
discuss += 1
|
||||
if p == "recuse":
|
||||
recuse += 1
|
||||
answer = ''
|
||||
if yes < 1:
|
||||
answer += "Needs a YES. "
|
||||
if discuss > 0:
|
||||
if discuss == 1:
|
||||
answer += "Has a DISCUSS. "
|
||||
else:
|
||||
answer += "Has %d DISCUSSes. " % discuss
|
||||
if standardsTrack:
|
||||
# For standards-track, need positions from 2/3 of the
|
||||
# non-recused current IESG.
|
||||
needed = int((len(ads) - recuse) * 2 / 3)
|
||||
else:
|
||||
# Info and experimental only need one position.
|
||||
needed = 1
|
||||
have = yes + noobj + discuss
|
||||
if have < needed:
|
||||
more = needed - have
|
||||
if more == 1:
|
||||
answer += "Needs %d more position. " % more
|
||||
else:
|
||||
answer += "Needs %d more positions. " % more
|
||||
else:
|
||||
answer += "Has enough positions to pass"
|
||||
if discuss:
|
||||
answer += " once DISCUSSes are resolved"
|
||||
answer += ". "
|
||||
|
||||
return answer.rstrip()
|
||||
|
||||
|
||||
# things from RfcIndex
|
||||
|
||||
#rfc_number = models.IntegerField(primary_key=True) # already taken care of
|
||||
#title = models.CharField(max_length=250) # same name
|
||||
#authors = models.CharField(max_length=250) # exists already
|
||||
#rfc_published_date = models.DateField()
|
||||
@property
|
||||
def rfc_published_date(self):
|
||||
if hasattr(self, 'published_rfc'):
|
||||
e = self.published_rfc
|
||||
else:
|
||||
e = self.latest_event(type="published_rfc")
|
||||
return e.time.date() if e else datetime.date(1990,1,1)
|
||||
|
||||
#current_status = models.CharField(max_length=50,null=True)
|
||||
@property
|
||||
def current_status(self):
|
||||
return self.std_level.name
|
||||
|
||||
#updates = models.CharField(max_length=200,blank=True,null=True)
|
||||
@property
|
||||
def updates(self):
|
||||
return ",".join("RFC%s" % n for n in sorted(d.rfc_number for d in InternetDraft.objects.filter(docalias__relateddocument__source=self, docalias__relateddocument__relationship="updates")))
|
||||
|
||||
#updated_by = models.CharField(max_length=200,blank=True,null=True)
|
||||
@property
|
||||
def updated_by(self):
|
||||
if not hasattr(self, "updated_by_list"):
|
||||
self.updated_by_list = [d.rfc_number for d in InternetDraft.objects.filter(relateddocument__target__document=self, relateddocument__relationship="updates")]
|
||||
return ",".join("RFC%s" % n for n in sorted(self.updated_by_list))
|
||||
|
||||
#obsoletes = models.CharField(max_length=200,blank=True,null=True)
|
||||
@property
|
||||
def obsoletes(self):
|
||||
return ",".join("RFC%s" % n for n in sorted(d.rfc_number for d in InternetDraft.objects.filter(docalias__relateddocument__source=self, docalias__relateddocument__relationship="obs")))
|
||||
|
||||
#obsoleted_by = models.CharField(max_length=200,blank=True,null=True)
|
||||
@property
|
||||
def obsoleted_by(self):
|
||||
if not hasattr(self, "obsoleted_by_list"):
|
||||
self.obsoleted_by_list = [d.rfc_number for d in InternetDraft.objects.filter(relateddocument__target__document=self, relateddocument__relationship="obs")]
|
||||
return ",".join("RFC%s" % n for n in sorted(self.obsoleted_by_list))
|
||||
|
||||
#also = models.CharField(max_length=50,blank=True,null=True)
|
||||
@property
|
||||
def also(self):
|
||||
aliases = self.docalias_set.filter(models.Q(name__startswith="bcp") |
|
||||
models.Q(name__startswith="std") |
|
||||
models.Q(name__startswith="bcp"))
|
||||
return aliases[0].name.upper() if aliases else None
|
||||
|
||||
#draft = models.CharField(max_length=200,null=True) # have to ignore this, it's already implemented
|
||||
|
||||
#has_errata = models.BooleanField()
|
||||
@property
|
||||
def has_errata(self):
|
||||
return bool(self.tags.filter(slug="errata"))
|
||||
|
||||
#stream = models.CharField(max_length=15,blank=True,null=True)
|
||||
@property
|
||||
def stream(self):
|
||||
return super(InternetDraft, self).stream.name
|
||||
|
||||
#wg = models.CharField(max_length=15,blank=True,null=True)
|
||||
@property
|
||||
def wg(self):
|
||||
return self.group.acronym
|
||||
|
||||
#file_formats = models.CharField(max_length=20,blank=True,null=True)
|
||||
@property
|
||||
def file_formats(self):
|
||||
return self.get_file_type_matches_from(os.path.join(settings.RFC_PATH, "rfc" + str(self.rfc_number) + ".*")).replace(".", "").replace("txt", "ascii")
|
||||
|
||||
@property
|
||||
def positions(self):
|
||||
res = []
|
||||
found = set()
|
||||
for pos in Position.objects.filter(doc=self, type="changed_ballot_position").select_related('ad').order_by("-time", "-id"):
|
||||
if pos.ad not in found:
|
||||
found.add(pos.ad)
|
||||
res.append(pos)
|
||||
|
||||
class Dummy:
|
||||
def all(self):
|
||||
return self.res
|
||||
d = Dummy()
|
||||
d.res = res
|
||||
return d
|
||||
|
||||
@property
|
||||
def ipr(self):
|
||||
from ipr.models import IprDraftProxy
|
||||
return IprDraftProxy.objects.filter(doc_alias__document=self.pk)
|
||||
|
||||
class Meta:
|
||||
proxy = True
|
||||
|
||||
IDInternal = InternetDraft
|
||||
BallotInfo = InternetDraft
|
||||
RfcIndex = InternetDraft
|
||||
Rfc = InternetDraft
|
||||
|
||||
|
||||
class IDAuthor(DocumentAuthor):
|
||||
#document = models.ForeignKey(InternetDraft, db_column='id_document_tag', related_name='authors') # same name
|
||||
#person = models.ForeignKey(PersonOrOrgInfo, db_column='person_or_org_tag')
|
||||
@property
|
||||
def person(self):
|
||||
return self.author.person
|
||||
|
||||
#author_order = models.IntegerField()
|
||||
@property
|
||||
def author_order(self):
|
||||
return self.order
|
||||
|
||||
def email(self):
|
||||
return None if self.author.address.startswith("unknown-email") else self.author.address
|
||||
|
||||
def final_author_order(self):
|
||||
return self.order
|
||||
|
||||
class Meta:
|
||||
proxy = True
|
||||
|
||||
class DocumentComment(DocEvent):
|
||||
objects = TranslatingManager(dict(comment_text="desc",
|
||||
date="time"
|
||||
))
|
||||
|
||||
BALLOT_DISCUSS = 1
|
||||
BALLOT_COMMENT = 2
|
||||
BALLOT_CHOICES = (
|
||||
(BALLOT_DISCUSS, 'discuss'),
|
||||
(BALLOT_COMMENT, 'comment'),
|
||||
)
|
||||
#document = models.ForeignKey(IDInternal)
|
||||
@property
|
||||
def document(self):
|
||||
return self.doc
|
||||
#rfc_flag = models.IntegerField(null=True, blank=True)
|
||||
#public_flag = models.BooleanField() #unused
|
||||
#date = models.DateField(db_column='comment_date', default=datetime.date.today)
|
||||
@property
|
||||
def date(self):
|
||||
return self.time.date()
|
||||
#time = models.CharField(db_column='comment_time', max_length=20, default=lambda: datetime.datetime.now().strftime("%H:%M:%S"))
|
||||
#version = models.CharField(blank=True, max_length=3)
|
||||
@property
|
||||
def version(self):
|
||||
e = self.doc.latest_event(NewRevisionDocEvent, type="new_revision", time__lte=self.time)
|
||||
return e.rev if e else "0"
|
||||
#comment_text = models.TextField(blank=True)
|
||||
@property
|
||||
def comment_text(self):
|
||||
return self.desc
|
||||
#created_by = BrokenForeignKey(IESGLogin, db_column='created_by', null=True, null_values=(0, 999))
|
||||
#result_state = BrokenForeignKey(IDState, db_column='result_state', null=True, related_name="comments_leading_to_state", null_values=(0, 99))
|
||||
#origin_state = models.ForeignKey(IDState, db_column='origin_state', null=True, related_name="comments_coming_from_state")
|
||||
#ballot = models.IntegerField(null=True, choices=BALLOT_CHOICES)
|
||||
def get_absolute_url(self):
|
||||
return "/doc/%s/" % self.doc.name
|
||||
def get_author(self):
|
||||
return self.by.name
|
||||
def get_username(self):
|
||||
return unicode(self.by)
|
||||
def get_fullname(self):
|
||||
return self.by.name
|
||||
def datetime(self):
|
||||
return self.time
|
||||
def doc_id(self):
|
||||
return self.doc_id
|
||||
def __str__(self):
|
||||
return "\"%s...\" by %s" % (self.comment_text[:20], self.get_author())
|
||||
|
||||
class Meta:
|
||||
proxy = True
|
||||
|
||||
|
||||
class Position(BallotPositionDocEvent):
|
||||
def from_object(self, base):
|
||||
for f in base._meta.fields:
|
||||
if not f.name in ('discuss',): # don't overwrite properties
|
||||
setattr(self, f.name, getattr(base, f.name))
|
||||
return self
|
||||
|
||||
#ballot = models.ForeignKey(BallotInfo, related_name='positions')
|
||||
@property
|
||||
def ballot(self):
|
||||
return self.doc # FIXME: doesn't emulate old interface
|
||||
|
||||
# ad = models.ForeignKey(IESGLogin) # same name
|
||||
#yes = models.IntegerField(db_column='yes_col')
|
||||
@property
|
||||
def yes(self):
|
||||
return self.pos_id == "yes"
|
||||
#noobj = models.IntegerField(db_column='no_col')
|
||||
@property
|
||||
def noobj(self):
|
||||
return self.pos_id == "noobj"
|
||||
#abstain = models.IntegerField()
|
||||
@property
|
||||
def abstain(self):
|
||||
return self.pos_id == "abstain"
|
||||
#approve = models.IntegerField(default=0) # unused
|
||||
#discuss = models.IntegerField()
|
||||
# needs special treatment because of clash with attribute on base class
|
||||
def get_discuss(self):
|
||||
return self.pos_id == "discuss"
|
||||
def set_discuss(self, x):
|
||||
pass
|
||||
discuss = property(get_discuss, set_discuss)
|
||||
#recuse = models.IntegerField()
|
||||
@property
|
||||
def recuse(self):
|
||||
return self.pos_id == "recuse"
|
||||
def __str__(self):
|
||||
return "Position for %s on %s" % ( self.ad, self.ballot )
|
||||
def abstain_ind(self):
|
||||
if self.recuse:
|
||||
return 'R'
|
||||
if self.abstain:
|
||||
return 'X'
|
||||
else:
|
||||
return ' '
|
||||
def name(self):
|
||||
return self.pos.name if self.pos else "No Record"
|
||||
|
||||
class Meta:
|
||||
proxy = True
|
||||
|
||||
class DraftLikeDocAlias(DocAlias):
|
||||
# this class is mostly useful for the IPR part
|
||||
|
||||
def __str__(self):
|
||||
return str(unicode(self))
|
||||
|
||||
def __unicode__(self):
|
||||
if self.name.startswith("rfc"):
|
||||
return "RFC%04d" % int(self.name[3:])
|
||||
else:
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def id_document_tag(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def title(self):
|
||||
return self.document.title
|
||||
|
||||
@property
|
||||
def filename(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def ipr(self):
|
||||
from ipr.models import IprDraftProxy
|
||||
return IprDraftProxy.objects.filter(doc_alias=self.pk)
|
||||
|
||||
class Meta:
|
||||
proxy = True
|
0
redesign/group/__init__.py
Normal file
0
redesign/group/__init__.py
Normal file
13
redesign/group/admin.py
Normal file
13
redesign/group/admin.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
from django.contrib import admin
|
||||
from models import *
|
||||
|
||||
class GroupAdmin(admin.ModelAdmin):
|
||||
list_display = ["acronym", "name", "type"]
|
||||
search_fields = ["name"]
|
||||
ordering = ["name"]
|
||||
raw_id_fields = ["charter"]
|
||||
|
||||
admin.site.register(Group, GroupAdmin)
|
||||
admin.site.register(GroupHistory)
|
||||
|
||||
admin.site.register(Role)
|
100
redesign/group/models.py
Normal file
100
redesign/group/models.py
Normal file
|
@ -0,0 +1,100 @@
|
|||
# Copyright The IETF Trust 2007, All Rights Reserved
|
||||
|
||||
from django.db import models
|
||||
from redesign.name.models import *
|
||||
from redesign.person.models import Email, Person
|
||||
|
||||
import datetime
|
||||
|
||||
class Group(models.Model):
|
||||
time = models.DateTimeField(default=datetime.datetime.now) # should probably have auto_now=True
|
||||
name = models.CharField(max_length=80)
|
||||
acronym = models.CharField(max_length=16, db_index=True)
|
||||
state = models.ForeignKey(GroupStateName, null=True)
|
||||
type = models.ForeignKey(GroupTypeName, null=True)
|
||||
charter = models.OneToOneField('doc.Document', related_name='chartered_group', blank=True, null=True)
|
||||
parent = models.ForeignKey('Group', blank=True, null=True)
|
||||
ad = models.ForeignKey(Person, blank=True, null=True)
|
||||
list_email = models.CharField(max_length=64, blank=True)
|
||||
list_subscribe = models.CharField(max_length=255, blank=True)
|
||||
list_archive = models.CharField(max_length=255, blank=True)
|
||||
comments = models.TextField(blank=True)
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
def latest_event(self, *args, **filter_args):
|
||||
"""Get latest group event with filter arguments, e.g.
|
||||
d.latest_event(type="xyz")."""
|
||||
e = GroupEvent.objects.filter(group=self).filter(**filter_args).order_by('-time', '-id')[:1]
|
||||
return e[0] if e else None
|
||||
|
||||
class GroupURL(models.Model):
|
||||
group = models.ForeignKey(Group)
|
||||
name = models.CharField(max_length=255)
|
||||
url = models.URLField(verify_exists=False)
|
||||
|
||||
class GroupMilestone(models.Model):
|
||||
group = models.ForeignKey(Group)
|
||||
desc = models.TextField()
|
||||
expected_due_date = models.DateField()
|
||||
done = models.BooleanField()
|
||||
done_date = models.DateField(null=True, blank=True)
|
||||
time = models.DateTimeField(auto_now=True)
|
||||
def __unicode__(self):
|
||||
return self.desc[:20] + "..."
|
||||
class Meta:
|
||||
ordering = ['expected_due_date']
|
||||
|
||||
GROUP_EVENT_CHOICES = [("proposed", "Proposed group"),
|
||||
("started", "Started group"),
|
||||
("concluded", "Concluded group"),
|
||||
]
|
||||
|
||||
class GroupEvent(models.Model):
|
||||
"""An occurrence for a group, used for tracking who, when and what."""
|
||||
group = models.ForeignKey(Group)
|
||||
time = models.DateTimeField(default=datetime.datetime.now, help_text="When the event happened")
|
||||
type = models.CharField(max_length=50, choices=GROUP_EVENT_CHOICES)
|
||||
by = models.ForeignKey(Person)
|
||||
desc = models.TextField()
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s %s at %s" % (self.by.name, self.get_type_display().lower(), self.time)
|
||||
|
||||
class Meta:
|
||||
ordering = ['-time', 'id']
|
||||
|
||||
# This will actually be extended from Groups, but that requires Django 1.0
|
||||
# This will record the new state and the date it occurred for any changes
|
||||
# to a group. The group acronym must be unique and is the invariant used
|
||||
# to select group history from this table.
|
||||
# FIXME: this class needs to be updated
|
||||
class GroupHistory(models.Model):
|
||||
group = models.ForeignKey('Group', related_name='group_history')
|
||||
# Event related
|
||||
time = models.DateTimeField()
|
||||
comment = models.TextField()
|
||||
who = models.ForeignKey(Email, related_name='group_changes')
|
||||
# inherited from Group:
|
||||
name = models.CharField(max_length=64)
|
||||
acronym = models.CharField(max_length=16)
|
||||
state = models.ForeignKey(GroupStateName)
|
||||
type = models.ForeignKey(GroupTypeName)
|
||||
charter = models.ForeignKey('doc.Document', related_name='chartered_group_history')
|
||||
parent = models.ForeignKey('Group')
|
||||
chairs = models.ManyToManyField(Email, related_name='chaired_groups_history')
|
||||
list_email = models.CharField(max_length=64)
|
||||
list_pages = models.CharField(max_length=64)
|
||||
comments = models.TextField(blank=True)
|
||||
def __unicode__(self):
|
||||
return self.group.name
|
||||
class Meta:
|
||||
verbose_name_plural="Doc histories"
|
||||
|
||||
class Role(models.Model):
|
||||
name = models.ForeignKey(RoleName)
|
||||
group = models.ForeignKey(Group)
|
||||
email = models.ForeignKey(Email, help_text="Email address used by person for this role")
|
||||
auth = models.CharField(max_length=255, blank=True) # unused?
|
||||
def __unicode__(self):
|
||||
return u"%s is %s in %s" % (self.email.get_name(), self.name.name, self.group.acronym)
|
||||
|
199
redesign/group/proxy.py
Normal file
199
redesign/group/proxy.py
Normal file
|
@ -0,0 +1,199 @@
|
|||
from redesign.proxy_utils import TranslatingManager
|
||||
|
||||
from models import *
|
||||
|
||||
class Acronym(Group):
|
||||
class LazyIndividualSubmitter(object):
|
||||
def __get__(self, obj, type=None):
|
||||
return Group.objects.get(acronym="none").id
|
||||
|
||||
INDIVIDUAL_SUBMITTER = LazyIndividualSubmitter()
|
||||
|
||||
def from_object(self, base):
|
||||
for f in base._meta.fields:
|
||||
setattr(self, f.name, getattr(base, f.name))
|
||||
return self
|
||||
|
||||
#acronym_id = models.AutoField(primary_key=True)
|
||||
@property
|
||||
def acronym_id(self):
|
||||
raise NotImplemented
|
||||
#acronym = models.CharField(max_length=12) # same name
|
||||
#name = models.CharField(max_length=100) # same name
|
||||
#name_key = models.CharField(max_length=50, editable=False)
|
||||
@property
|
||||
def name_key(self):
|
||||
return self.name.upper()
|
||||
|
||||
def __str__(self):
|
||||
return self.acronym
|
||||
|
||||
def __unicode__(self):
|
||||
return self.acronym
|
||||
|
||||
class Meta:
|
||||
proxy = True
|
||||
|
||||
class Area(Group):
|
||||
def from_object(self, base):
|
||||
for f in base._meta.fields:
|
||||
setattr(self, f.name, getattr(base, f.name))
|
||||
return self
|
||||
|
||||
ACTIVE=1
|
||||
#area_acronym = models.OneToOneField(Acronym, primary_key=True)
|
||||
@property
|
||||
def area_acronym(self):
|
||||
return Acronym().from_object(self)
|
||||
|
||||
#start_date = models.DateField(auto_now_add=True)
|
||||
#concluded_date = models.DateField(null=True, blank=True)
|
||||
#status = models.ForeignKey(AreaStatus)
|
||||
@property
|
||||
def status_id(self):
|
||||
return { "active": 1, "dormant": 2, "conclude": 3 }[self.state_id]
|
||||
#comments = models.TextField(blank=True)
|
||||
#last_modified_date = models.DateField(auto_now=True)
|
||||
@property
|
||||
def last_modified_date(self):
|
||||
return self.time.date()
|
||||
#extra_email_addresses = models.TextField(blank=True,null=True)
|
||||
|
||||
#def additional_urls(self):
|
||||
# return AreaWGURL.objects.filter(name=self.area_acronym.name)
|
||||
def active_wgs(self):
|
||||
return IETFWG.objects.filter(type="wg", state="active", parent=self).select_related('type', 'state', 'parent').order_by("acronym")
|
||||
|
||||
@staticmethod
|
||||
def active_areas():
|
||||
return Area.objects.filter(type="area", state="active").select_related('type', 'state', 'parent').order_by('acronym')
|
||||
|
||||
def __str__(self):
|
||||
return self.acronym
|
||||
def __unicode__(self):
|
||||
return self.acronym
|
||||
|
||||
class Meta:
|
||||
proxy = True
|
||||
|
||||
def proxied_role_emails(emails):
|
||||
for e in emails:
|
||||
e.person.email = { 1: e }
|
||||
return emails
|
||||
|
||||
class IETFWG(Group):
|
||||
objects = TranslatingManager(dict(group_acronym="id",
|
||||
group_acronym__acronym="acronym",
|
||||
email_archive__startswith="list_archive__startswith",
|
||||
group_type=lambda v: ("type", { 1: "wg" }[int(v)]),
|
||||
status=lambda v: ("state", { 1: "active" }[int(v)]),
|
||||
areagroup__area__status=lambda v: ("parent__state", { 1: "active" }[v]),
|
||||
start_date__isnull=lambda v: None if v else ("groupevent__type", "started")
|
||||
),
|
||||
always_filter=dict(type__in=("wg", "individ")))
|
||||
|
||||
ACTIVE=1
|
||||
#group_acronym = models.OneToOneField(Acronym, primary_key=True, editable=False)
|
||||
@property
|
||||
def group_acronym(self):
|
||||
return Acronym().from_object(self)
|
||||
|
||||
#group_type = models.ForeignKey(WGType)
|
||||
#proposed_date = models.DateField(null=True, blank=True)
|
||||
#start_date = models.DateField(null=True, blank=True)
|
||||
@property
|
||||
def start_date(self):
|
||||
e = self.latest_event(type="started")
|
||||
return e.time.date() if e else None
|
||||
|
||||
#dormant_date = models.DateField(null=True, blank=True)
|
||||
#concluded_date = models.DateField(null=True, blank=True)
|
||||
#status = models.ForeignKey(WGStatus)
|
||||
@property
|
||||
def status_id(self):
|
||||
return { "active": 1, "dormant": 2, "conclude": 3 }[self.state_id]
|
||||
#area_director = models.ForeignKey(AreaDirector, null=True)
|
||||
#meeting_scheduled = models.CharField(blank=True, max_length=3)
|
||||
#email_address = models.CharField(blank=True, max_length=60)
|
||||
@property
|
||||
def email_address(self):
|
||||
return self.list_email
|
||||
#email_subscribe = models.CharField(blank=True, max_length=120)
|
||||
@property
|
||||
def email_subscribe(self):
|
||||
return self.list_subscribe
|
||||
#email_keyword = models.CharField(blank=True, max_length=50)
|
||||
#email_archive = models.CharField(blank=True, max_length=95)
|
||||
@property
|
||||
def email_archive(self):
|
||||
return self.list_archive
|
||||
#comments = models.TextField(blank=True)
|
||||
#last_modified_date = models.DateField()
|
||||
@property
|
||||
def last_modified_date(self):
|
||||
return self.time.date()
|
||||
#meeting_scheduled_old = models.CharField(blank=True, max_length=3)
|
||||
#area = FKAsOneToOne('areagroup', reverse=True)
|
||||
@property
|
||||
def area(self):
|
||||
class AreaGroup: pass
|
||||
if self.parent:
|
||||
areagroup = AreaGroup()
|
||||
areagroup.area = Area().from_object(self.parent)
|
||||
return areagroup
|
||||
else:
|
||||
return None
|
||||
|
||||
def __str__(self):
|
||||
return self.group_acronym.acronym
|
||||
|
||||
def __unicode__(self):
|
||||
return self.group_acronym.acronym
|
||||
|
||||
def active_drafts(self):
|
||||
from redesign.doc.proxy import InternetDraft
|
||||
return InternetDraft.objects.filter(group=self, state="active")
|
||||
# def choices():
|
||||
# return [(wg.group_acronym_id, wg.group_acronym.acronym) for wg in IETFWG.objects.all().filter(group_type__type='WG').select_related().order_by('acronym.acronym')]
|
||||
# choices = staticmethod(choices)
|
||||
def area_acronym(self):
|
||||
return Area().from_object(self.parent) if self.parent else None
|
||||
def area_directors(self):
|
||||
if not self.parent:
|
||||
return None
|
||||
return proxied_role_emails(sorted(Email.objects.filter(role__group=self.parent, role__name="ad"), key=lambda e: e.person.name_parts()[3]))
|
||||
def chairs(self): # return a set of WGChair objects for this work group
|
||||
return proxied_role_emails(sorted(Email.objects.filter(role__group=self, role__name="chair"), key=lambda e: e.person.name_parts()[3]))
|
||||
# def secretaries(self): # return a set of WGSecretary objects for this group
|
||||
# return WGSecretary.objects.filter(group_acronym__exact=self.group_acronym)
|
||||
# def milestones(self): # return a set of GoalMilestone objects for this group
|
||||
# return GoalMilestone.objects.filter(group_acronym__exact=self.group_acronym)
|
||||
# def rfcs(self): # return a set of Rfc objects for this group
|
||||
# return Rfc.objects.filter(group_acronym__exact=self.group_acronym)
|
||||
# def drafts(self): # return a set of Rfc objects for this group
|
||||
# return InternetDraft.objects.filter(group__exact=self.group_acronym)
|
||||
def charter_text(self): # return string containing WG description read from file
|
||||
import os
|
||||
from django.conf import settings
|
||||
# get file path from settings. Syntesize file name from path, acronym, and suffix
|
||||
try:
|
||||
filename = os.path.join(settings.IETFWG_DESCRIPTIONS_PATH, self.acronym) + ".desc.txt"
|
||||
desc_file = open(filename)
|
||||
desc = desc_file.read()
|
||||
except BaseException:
|
||||
desc = 'Error Loading Work Group Description'
|
||||
return desc
|
||||
|
||||
def additional_urls(self):
|
||||
return self.groupurl_set.all().order_by("name")
|
||||
def clean_email_archive(self):
|
||||
return self.list_archive
|
||||
def wgchair_set(self):
|
||||
# gross hack ...
|
||||
class Dummy: pass
|
||||
d = Dummy()
|
||||
d.all = self.chairs()
|
||||
return d
|
||||
|
||||
class Meta:
|
||||
proxy = True
|
0
redesign/importing/__init__.py
Normal file
0
redesign/importing/__init__.py
Normal file
162
redesign/importing/import-announcements.py
Executable file
162
redesign/importing/import-announcements.py
Executable file
|
@ -0,0 +1,162 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
import sys, os, re, datetime
|
||||
|
||||
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
sys.path = [ basedir ] + sys.path
|
||||
|
||||
from ietf import settings
|
||||
settings.USE_DB_REDESIGN_PROXY_CLASSES = False
|
||||
settings.IMPORTING_ANNOUNCEMENTS = True
|
||||
|
||||
from django.core import management
|
||||
management.setup_environ(settings)
|
||||
|
||||
from redesign.person.models import *
|
||||
from redesign.group.models import *
|
||||
from redesign.name.utils import name
|
||||
from redesign.importing.utils import old_person_to_person
|
||||
from ietf.announcements.models import Message, SendQueue
|
||||
from ietf.announcements.models import Announcement, PersonOrOrgInfo, AnnouncedTo, AnnouncedFrom, ScheduledAnnouncement
|
||||
from ietf.idtracker.models import IESGLogin
|
||||
|
||||
# assumptions:
|
||||
# - nomcom groups have been imported
|
||||
# - persons have been imported (Announcement originators and IESGLogins)
|
||||
|
||||
# imports Announcement, ScheduledAnnouncement
|
||||
|
||||
system = Person.objects.get(name="(System)")
|
||||
|
||||
# Announcement
|
||||
for o in Announcement.objects.all().select_related('announced_to', 'announced_from').order_by('announcement_id').iterator():
|
||||
print "importing Announcement", o.pk
|
||||
try:
|
||||
message = Message.objects.get(id=o.announcement_id)
|
||||
except Message.DoesNotExist:
|
||||
message = Message(id=o.announcement_id)
|
||||
|
||||
message.time = datetime.datetime.combine(o.announced_date,
|
||||
datetime.time(*(int(x) for x in o.announced_time.split(":"))))
|
||||
|
||||
try:
|
||||
x = o.announced_by
|
||||
except PersonOrOrgInfo.DoesNotExist:
|
||||
message.by = system
|
||||
else:
|
||||
if not o.announced_by.first_name and o.announced_by.last_name == 'None':
|
||||
message.by = system
|
||||
else:
|
||||
message.by = old_person_to_person(o.announced_by)
|
||||
|
||||
message.subject = o.subject.strip()
|
||||
if o.announced_from_id == 99:
|
||||
message.frm = o.other_val or ""
|
||||
elif o.announced_from_id == 18 and o.nomcom_chair_id != 0:
|
||||
message.frm = u"%s <%s>" % o.nomcom_chair.person.email()
|
||||
else:
|
||||
if '<' in o.announced_from.announced_from:
|
||||
message.frm = o.announced_from.announced_from
|
||||
else:
|
||||
message.frm = u"%s <%s>" % (o.announced_from.announced_from, o.announced_from.email)
|
||||
if o.announced_to_id == 99:
|
||||
message.to = o.other_val or ""
|
||||
else:
|
||||
try:
|
||||
message.to = u"%s <%s>" % (o.announced_to.announced_to, o.announced_to.email)
|
||||
except AnnouncedTo.DoesNotExist:
|
||||
message.to = ""
|
||||
|
||||
message.cc = o.cc or ""
|
||||
for l in (o.extra or "").strip().replace("^", "\n").replace("\r", "").split("\n"):
|
||||
l = l.strip()
|
||||
if l.lower().startswith("bcc:"):
|
||||
message.bcc = l[len("bcc:"):].strip()
|
||||
elif l.lower().startswith("reply-to:"):
|
||||
message.reply_to = l[len("reply-to:"):].strip()
|
||||
message.body = o.text
|
||||
message.save()
|
||||
|
||||
message.related_groups.clear()
|
||||
|
||||
if o.nomcom:
|
||||
nomcom = Group.objects.filter(role__name="chair",
|
||||
role__email__person__id=o.nomcom_chair.person.pk,
|
||||
acronym__startswith="nomcom").exclude(acronym="nomcom").get()
|
||||
|
||||
message.related_groups.add(nomcom)
|
||||
|
||||
|
||||
# precompute scheduled_by's to speed up the loop a bit
|
||||
scheduled_by_mapping = {}
|
||||
for by in ScheduledAnnouncement.objects.all().values_list("scheduled_by", flat=True).distinct():
|
||||
logins = IESGLogin.objects.filter(login_name=by)
|
||||
if logins:
|
||||
l = logins[0]
|
||||
person = l.person
|
||||
if not person:
|
||||
person = PersonOrOrgInfo.objects.get(first_name=l.first_name, last_name=l.last_name)
|
||||
found = old_person_to_person(person)
|
||||
else:
|
||||
found = system
|
||||
|
||||
print "mapping", by, "to", found
|
||||
scheduled_by_mapping[by] = found
|
||||
|
||||
# ScheduledAnnouncement
|
||||
for o in ScheduledAnnouncement.objects.all().order_by('id').iterator():
|
||||
print "importing ScheduledAnnouncement", o.pk
|
||||
try:
|
||||
q = SendQueue.objects.get(id=o.id)
|
||||
except SendQueue.DoesNotExist:
|
||||
q = SendQueue(id=o.id)
|
||||
# make sure there's no id overlap with ordinary already-imported announcements
|
||||
q.message = Message(id=o.id + 4000)
|
||||
|
||||
time = datetime.datetime.combine(o.scheduled_date,
|
||||
datetime.time(*(int(x) for x in o.scheduled_time.split(":"))))
|
||||
by = scheduled_by_mapping[o.scheduled_by]
|
||||
|
||||
q.message.time = time
|
||||
q.message.by = by
|
||||
|
||||
q.message.subject = (o.subject or "").strip()
|
||||
q.message.to = (o.to_val or "").strip()
|
||||
q.message.frm = (o.from_val or "").strip()
|
||||
q.message.cc = (o.cc_val or "").strip()
|
||||
q.message.bcc = (o.bcc_val or "").strip()
|
||||
q.message.reply_to = (o.replyto or "").strip()
|
||||
q.message.body = o.body or ""
|
||||
q.message.content_type = o.content_type or ""
|
||||
q.message.save()
|
||||
|
||||
q.time = time
|
||||
q.by = by
|
||||
|
||||
d = None
|
||||
if o.to_be_sent_date:
|
||||
try:
|
||||
t = datetime.time(*(int(x) for x in o.to_be_sent_time.split(":")))
|
||||
except ValueError:
|
||||
t = datetime.time(0, 0, 0)
|
||||
d = datetime.datetime.combine(o.to_be_sent_date, t)
|
||||
|
||||
q.send_at = d
|
||||
|
||||
d = None
|
||||
if o.actual_sent_date:
|
||||
try:
|
||||
t = datetime.time(*(int(x) for x in o.scheduled_time.split(":")))
|
||||
except ValueError:
|
||||
t = datetime.time(0, 0, 0)
|
||||
|
||||
d = datetime.datetime.combine(o.actual_sent_date, t)
|
||||
|
||||
q.sent_at = d
|
||||
|
||||
n = (o.note or "").strip()
|
||||
if n.startswith("<br>"):
|
||||
n = n[len("<br>"):]
|
||||
q.note = n
|
||||
|
||||
q.save()
|
1011
redesign/importing/import-document-state.py
Executable file
1011
redesign/importing/import-document-state.py
Executable file
File diff suppressed because it is too large
Load diff
274
redesign/importing/import-groups.py
Executable file
274
redesign/importing/import-groups.py
Executable file
|
@ -0,0 +1,274 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
import sys, os, datetime
|
||||
|
||||
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
sys.path = [ basedir ] + sys.path
|
||||
|
||||
from ietf import settings
|
||||
settings.USE_DB_REDESIGN_PROXY_CLASSES = False
|
||||
|
||||
from django.core import management
|
||||
management.setup_environ(settings)
|
||||
|
||||
|
||||
from redesign.group.models import *
|
||||
from redesign.name.models import *
|
||||
from redesign.name.utils import name
|
||||
from redesign.importing.utils import old_person_to_person
|
||||
from ietf.idtracker.models import AreaGroup, IETFWG, Area, AreaGroup, Acronym, AreaWGURL, IRTF, ChairsHistory, Role, AreaDirector
|
||||
|
||||
# imports IETFWG, Area, AreaGroup, Acronym, IRTF, AreaWGURL
|
||||
|
||||
# also creates nomcom groups
|
||||
|
||||
# assumptions: persons have been imported
|
||||
|
||||
state_names = dict(
|
||||
bof=name(GroupStateName, slug="bof", name="BOF"),
|
||||
proposed=name(GroupStateName, slug="proposed", name="Proposed"),
|
||||
active=name(GroupStateName, slug="active", name="Active"),
|
||||
dormant=name(GroupStateName, slug="dormant", name="Dormant"),
|
||||
conclude=name(GroupStateName, slug="conclude", name="Concluded"),
|
||||
unknown=name(GroupStateName, slug="unknown", name="Unknown"),
|
||||
)
|
||||
|
||||
type_names = dict(
|
||||
ietf=name(GroupTypeName, slug="ietf", name="IETF"),
|
||||
area=name(GroupTypeName, slug="area", name="Area"),
|
||||
ag=name(GroupTypeName, slug="ag", name="AG"),
|
||||
wg=name(GroupTypeName, slug="wg", name="WG"),
|
||||
rg=name(GroupTypeName, slug="rg", name="RG"),
|
||||
team=name(GroupTypeName, slug="team", name="Team"),
|
||||
individ=name(GroupTypeName, slug="individ", name="Individual"),
|
||||
)
|
||||
|
||||
# make sure we got the IESG so we can use it as parent for areas
|
||||
iesg_group, _ = Group.objects.get_or_create(acronym="iesg")
|
||||
iesg_group.name = "IESG"
|
||||
iesg_group.state = state_names["active"]
|
||||
iesg_group.type = type_names["ietf"]
|
||||
iesg_group.save()
|
||||
|
||||
# make sure we got the IRTF as parent for RGs
|
||||
irtf_group, _ = Group.objects.get_or_create(acronym="irtf")
|
||||
irtf_group.name = "IRTF"
|
||||
irtf_group.state = state_names["active"]
|
||||
irtf_group.type = type_names["ietf"]
|
||||
irtf_group.save()
|
||||
|
||||
# create Secretariat for use with roles
|
||||
secretariat_group, _ = Group.objects.get_or_create(acronym="secretariat")
|
||||
secretariat_group.name = "IETF Secretariat"
|
||||
secretariat_group.state = state_names["active"]
|
||||
secretariat_group.type = type_names["ietf"]
|
||||
secretariat_group.save()
|
||||
|
||||
system = Person.objects.get(name="(System)")
|
||||
|
||||
|
||||
# NomCom
|
||||
for o in ChairsHistory.objects.filter(chair_type=Role.NOMCOM_CHAIR).order_by("start_year"):
|
||||
print "importing ChairsHistory/Nomcom", o.pk, "nomcom%s" % o.start_year
|
||||
group, _ = Group.objects.get_or_create(acronym="nomcom%s" % o.start_year)
|
||||
group.name = "IAB/IESG Nominating Committee %s/%s" % (o.start_year, o.end_year)
|
||||
if o.chair_type.person == o.person:
|
||||
s = state_names["active"]
|
||||
else:
|
||||
s = state_names["conclude"]
|
||||
group.state = s
|
||||
group.type = type_names["ietf"]
|
||||
group.parent = None
|
||||
group.save()
|
||||
|
||||
# we need start/end year so fudge events
|
||||
group.groupevent_set.all().delete()
|
||||
|
||||
e = GroupEvent(group=group, type="started")
|
||||
e.time = datetime.datetime(o.start_year, 5, 1, 12, 0, 0)
|
||||
e.by = system
|
||||
e.desc = e.get_type_display()
|
||||
e.save()
|
||||
|
||||
e = GroupEvent(group=group, type="concluded")
|
||||
e.time = datetime.datetime(o.end_year, 5, 1, 12, 0, 0)
|
||||
e.by = system
|
||||
e.desc = e.get_type_display()
|
||||
e.save()
|
||||
|
||||
# Area
|
||||
for o in Area.objects.all():
|
||||
print "importing Area", o.pk, o.area_acronym.acronym
|
||||
|
||||
try:
|
||||
group = Group.objects.get(acronym=o.area_acronym.acronym)
|
||||
except Group.DoesNotExist:
|
||||
group = Group(acronym=o.area_acronym.acronym)
|
||||
group.id = o.area_acronym_id # transfer id
|
||||
|
||||
if o.last_modified_date:
|
||||
group.time = datetime.datetime.combine(o.last_modified_date, datetime.time(12, 0, 0))
|
||||
group.name = o.area_acronym.name
|
||||
if o.status.status == "Active":
|
||||
s = state_names["active"]
|
||||
elif o.status.status == "Concluded":
|
||||
s = state_names["conclude"]
|
||||
elif o.status.status == "Unknown":
|
||||
s = state_names["unknown"]
|
||||
group.state = s
|
||||
group.type = type_names["area"]
|
||||
group.parent = iesg_group
|
||||
group.comments = o.comments.strip() if o.comments else ""
|
||||
|
||||
group.save()
|
||||
|
||||
for u in o.additional_urls():
|
||||
url, _ = GroupURL.objects.get_or_create(group=group, url=u.url)
|
||||
url.name = u.description.strip()
|
||||
url.save()
|
||||
|
||||
# import events
|
||||
group.groupevent_set.all().delete()
|
||||
|
||||
if o.concluded_date:
|
||||
e = GroupEvent(group=group, type="concluded")
|
||||
e.time = datetime.datetime.combine(o.concluded_date, datetime.time(12, 0, 0))
|
||||
e.by = system
|
||||
e.desc = e.get_type_display()
|
||||
e.save()
|
||||
|
||||
# FIXME: missing fields from old: extra_email_addresses
|
||||
|
||||
|
||||
# IRTF
|
||||
for o in IRTF.objects.all():
|
||||
print "importing IRTF", o.pk, o.acronym
|
||||
|
||||
try:
|
||||
group = Group.objects.get(acronym=o.acronym.lower())
|
||||
except Group.DoesNotExist:
|
||||
group = Group(acronym=o.acronym.lower())
|
||||
|
||||
group.name = o.name
|
||||
group.state = state_names["active"] # we assume all to be active
|
||||
group.type = type_names["rg"]
|
||||
group.parent = irtf_group
|
||||
|
||||
group.comments = o.charter_text or ""
|
||||
|
||||
group.save()
|
||||
|
||||
# FIXME: missing fields from old: meeting_scheduled
|
||||
|
||||
# IETFWG, AreaGroup
|
||||
for o in IETFWG.objects.all().order_by("pk"):
|
||||
print "importing IETFWG", o.pk, o.group_acronym.acronym
|
||||
|
||||
try:
|
||||
group = Group.objects.get(acronym=o.group_acronym.acronym)
|
||||
except Group.DoesNotExist:
|
||||
group = Group(acronym=o.group_acronym.acronym)
|
||||
group.id = o.group_acronym_id # transfer id
|
||||
|
||||
if o.last_modified_date:
|
||||
group.time = datetime.datetime.combine(o.last_modified_date, datetime.time(12, 0, 0))
|
||||
group.name = o.group_acronym.name
|
||||
# state
|
||||
if o.group_type.type == "BOF":
|
||||
s = state_names["bof"]
|
||||
elif o.group_type.type == "PWG":
|
||||
s = state_names["proposed"]
|
||||
elif o.status.status == "Active":
|
||||
s = state_names["active"]
|
||||
elif o.status.status == "Dormant":
|
||||
s = state_names["dormant"]
|
||||
elif o.status.status == "Concluded":
|
||||
s = state_names["conclude"]
|
||||
group.state = s
|
||||
# type
|
||||
if o.group_type.type == "TEAM":
|
||||
group.type = type_names["team"]
|
||||
elif o.group_type.type == "AG":
|
||||
if o.group_acronym.acronym == "none":
|
||||
# none means individual
|
||||
group.type = type_names["individ"]
|
||||
elif o.group_acronym.acronym == "iab":
|
||||
group.type = type_names["ietf"]
|
||||
group.parent = None
|
||||
elif o.group_acronym.acronym in ("tsvdir", "secdir", "saag", "usac"):
|
||||
group.type = type_names["team"]
|
||||
elif o.group_acronym.acronym == "iesg":
|
||||
pass # we already treated iesg
|
||||
elif o.group_acronym.acronym in ("apparea", "opsarea", "rtgarea", "usvarea", "genarea", "tsvarea", "raiarea", "apptsv"):
|
||||
group.type = type_names["ag"]
|
||||
else:
|
||||
# the remaining groups are
|
||||
# apples, null, dirdir
|
||||
# for now, we don't transfer them
|
||||
if group.id:
|
||||
group.delete()
|
||||
print "not transferring", o.group_acronym.acronym, o.group_acronym.name
|
||||
continue
|
||||
else: # PWG/BOF/WG
|
||||
# some BOFs aren't WG-forming but we currently classify all as WGs
|
||||
group.type = type_names["wg"]
|
||||
|
||||
if o.area:
|
||||
group.parent = Group.objects.get(acronym=o.area.area.area_acronym.acronym)
|
||||
elif not group.parent:
|
||||
print "no area/parent for", group.acronym, group.name, group.type, group.state
|
||||
|
||||
try:
|
||||
area_director = o.area_director
|
||||
except AreaDirector.DoesNotExist:
|
||||
area_director = None
|
||||
if area_director and not area_director.area_id:
|
||||
area_director = None # fake TBD guy
|
||||
|
||||
group.ad = old_person_to_person(area_director.person) if area_director else None
|
||||
group.list_email = o.email_address if o.email_address else ""
|
||||
group.list_subscribe = (o.email_subscribe or "").replace("//listinfo", "/listinfo").strip()
|
||||
l = o.clean_email_archive().strip() if o.email_archive else ""
|
||||
if l in ("none", "not available"):
|
||||
l = ""
|
||||
group.list_archive = l
|
||||
group.comments = o.comments.strip() if o.comments else ""
|
||||
|
||||
group.save()
|
||||
|
||||
for u in o.additional_urls():
|
||||
url, _ = GroupURL.objects.get_or_create(group=group, url=u.url)
|
||||
url.name = u.description.strip()
|
||||
url.save()
|
||||
|
||||
for m in o.milestones():
|
||||
desc = m.description.strip()
|
||||
try:
|
||||
milestone = GroupMilestone.objects.get(group=group, desc=desc)
|
||||
except GroupMilestone.DoesNotExist:
|
||||
milestone = GroupMilestone(group=group, desc=desc)
|
||||
|
||||
milestone.expected_due_date = m.expected_due_date
|
||||
milestone.done = m.done == "Done"
|
||||
milestone.done_date = m.done_date
|
||||
milestone.time = datetime.datetime.combine(m.last_modified_date, datetime.time(12, 0, 0))
|
||||
milestone.save()
|
||||
|
||||
# import events
|
||||
group.groupevent_set.all().delete()
|
||||
|
||||
def import_date_event(name, type_name):
|
||||
d = getattr(o, "%s_date" % name)
|
||||
if d:
|
||||
e = GroupEvent(group=group, type=type_name)
|
||||
e.time = datetime.datetime.combine(d, datetime.time(12, 0, 0))
|
||||
e.by = system
|
||||
e.desc = e.get_type_display()
|
||||
e.save()
|
||||
|
||||
import_date_event("proposed", "proposed")
|
||||
import_date_event("start", "started")
|
||||
import_date_event("concluded", "concluded")
|
||||
# dormant_date is empty on all so don't bother with that
|
||||
|
||||
# FIXME: missing fields from old: meeting_scheduled, email_keyword, meeting_scheduled_old
|
59
redesign/importing/import-ipr.py
Executable file
59
redesign/importing/import-ipr.py
Executable file
|
@ -0,0 +1,59 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
import sys, os, re, datetime
|
||||
|
||||
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
sys.path = [ basedir ] + sys.path
|
||||
|
||||
from ietf import settings
|
||||
settings.USE_DB_REDESIGN_PROXY_CLASSES = False
|
||||
settings.IMPORTING_IPR = True
|
||||
|
||||
from django.core import management
|
||||
management.setup_environ(settings)
|
||||
|
||||
from ietf.ipr.models import IprDraftOld, IprRfcOld, IprDocAlias, IprDetail
|
||||
from redesign.doc.models import DocAlias
|
||||
|
||||
# imports IprDraft and IprRfc, converting them to IprDocAlias links to Document
|
||||
|
||||
# assumptions: documents have been imported
|
||||
|
||||
# some links are borked, only import those that reference an existing IprDetail
|
||||
ipr_ids = IprDetail.objects.all()
|
||||
|
||||
for o in IprDraftOld.objects.filter(ipr__in=ipr_ids).select_related("document").order_by("id").iterator():
|
||||
try:
|
||||
alias = DocAlias.objects.get(name=o.document.filename)
|
||||
except DocAlias.DoesNotExist:
|
||||
print "COULDN'T FIND DOCUMENT", o.document.filename
|
||||
continue
|
||||
|
||||
try:
|
||||
IprDocAlias.objects.get(ipr=o.ipr_id, doc_alias=alias)
|
||||
except IprDocAlias.DoesNotExist:
|
||||
link = IprDocAlias()
|
||||
link.ipr_id = o.ipr_id
|
||||
link.doc_alias = alias
|
||||
link.rev = o.revision or ""
|
||||
link.save()
|
||||
|
||||
print "importing IprDraft", o.pk, "linking", o.ipr_id, o.document.filename
|
||||
|
||||
for o in IprRfcOld.objects.filter(ipr__in=ipr_ids).select_related("document").order_by("id").iterator():
|
||||
try:
|
||||
alias = DocAlias.objects.get(name="rfc%s" % o.document.rfc_number)
|
||||
except DocAlias.DoesNotExist:
|
||||
print "COULDN'T FIND RFC%s", o.document.rfc_number
|
||||
continue
|
||||
|
||||
try:
|
||||
IprDocAlias.objects.get(ipr=o.ipr_id, doc_alias=alias)
|
||||
except IprDocAlias.DoesNotExist:
|
||||
link = IprDocAlias()
|
||||
link.ipr_id = o.ipr_id
|
||||
link.doc_alias = alias
|
||||
link.rev = ""
|
||||
link.save()
|
||||
|
||||
print "importing IprRfc", o.pk, "linking", o.ipr_id, o.document.rfc_number
|
54
redesign/importing/import-persons.py
Executable file
54
redesign/importing/import-persons.py
Executable file
|
@ -0,0 +1,54 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
import sys, os, re, datetime
|
||||
|
||||
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
sys.path = [ basedir ] + sys.path
|
||||
|
||||
from ietf import settings
|
||||
settings.USE_DB_REDESIGN_PROXY_CLASSES = False
|
||||
|
||||
from django.core import management
|
||||
management.setup_environ(settings)
|
||||
|
||||
from ietf.idtracker.models import AreaDirector, IETFWG
|
||||
from redesign.person.models import *
|
||||
from redesign.importing.utils import get_or_create_email
|
||||
|
||||
# creates system person and email
|
||||
|
||||
# imports AreaDirector persons that are connected to an IETFWG
|
||||
|
||||
# should probably also import the old person/email tables
|
||||
|
||||
print "creating (System) person and email"
|
||||
try:
|
||||
system_person = Person.objects.get(name="(System)")
|
||||
except Person.DoesNotExist:
|
||||
system_person = Person.objects.create(
|
||||
id=0, # special value
|
||||
name="(System)",
|
||||
ascii="(System)",
|
||||
address="",
|
||||
)
|
||||
|
||||
system_person = Person.objects.get(name="(System)")
|
||||
|
||||
if system_person.id != 0: # work around bug in Django
|
||||
Person.objects.filter(id=system_person.id).update(id=0)
|
||||
system_person = Person.objects.get(id=0)
|
||||
|
||||
system_alias = Alias.objects.get_or_create(
|
||||
person=system_person,
|
||||
name=system_person.name
|
||||
)
|
||||
|
||||
system_email = Email.objects.get_or_create(
|
||||
address="",
|
||||
defaults=dict(active=True, person=system_person)
|
||||
)
|
||||
|
||||
for o in AreaDirector.objects.filter(ietfwg__in=IETFWG.objects.all()).exclude(area_acronym=None).distinct().order_by("pk").iterator():
|
||||
print "importing AreaDirector (from IETFWG) persons", o.pk
|
||||
|
||||
get_or_create_email(o, create_fake=False)
|
204
redesign/importing/import-roles.py
Executable file
204
redesign/importing/import-roles.py
Executable file
|
@ -0,0 +1,204 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
import sys, os, re, datetime
|
||||
|
||||
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||
sys.path = [ basedir ] + sys.path
|
||||
|
||||
from ietf import settings
|
||||
settings.USE_DB_REDESIGN_PROXY_CLASSES = False
|
||||
|
||||
from django.core import management
|
||||
management.setup_environ(settings)
|
||||
|
||||
from redesign.person.models import *
|
||||
from redesign.group.models import *
|
||||
from redesign.name.models import *
|
||||
from redesign.name.utils import name
|
||||
from redesign.importing.utils import old_person_to_email, clean_email_address, get_or_create_email
|
||||
|
||||
from ietf.idtracker.models import IESGLogin, AreaDirector, IDAuthor, PersonOrOrgInfo, WGChair, WGEditor, WGSecretary, WGTechAdvisor, ChairsHistory, Role as OldRole, Acronym, IRTFChair
|
||||
|
||||
|
||||
# assumptions:
|
||||
# - groups have been imported
|
||||
|
||||
# PersonOrOrgInfo/PostalAddress/EmailAddress/PhoneNumber are not
|
||||
# imported, although some information is retrieved from those
|
||||
|
||||
# imports IESGLogin, AreaDirector, WGEditor, WGChair, IRTFChair,
|
||||
# WGSecretary, WGTechAdvisor, NomCom chairs from ChairsHistory,
|
||||
#
|
||||
# also imports persons from IDAuthor, announcement originators from
|
||||
# Announcements
|
||||
|
||||
# FIXME: should probably import Role
|
||||
|
||||
area_director_role = name(RoleName, "ad", "Area Director")
|
||||
inactive_area_director_role = name(RoleName, "ex-ad", "Ex-Area Director", desc="Inactive Area Director")
|
||||
chair_role = name(RoleName, "chair", "Chair")
|
||||
editor_role = name(RoleName, "editor", "Editor")
|
||||
secretary_role = name(RoleName, "secr", "Secretary")
|
||||
techadvisor_role = name(RoleName, "techadv", "Tech Advisor")
|
||||
|
||||
|
||||
# WGEditor
|
||||
for o in WGEditor.objects.all():
|
||||
acronym = Acronym.objects.get(acronym_id=o.group_acronym_id).acronym
|
||||
print "importing WGEditor", acronym, o.person
|
||||
|
||||
email = get_or_create_email(o, create_fake=True)
|
||||
group = Group.objects.get(acronym=acronym)
|
||||
|
||||
Role.objects.get_or_create(name=editor_role, group=group, email=email)
|
||||
|
||||
# WGSecretary
|
||||
for o in WGSecretary.objects.all():
|
||||
acronym = Acronym.objects.get(acronym_id=o.group_acronym_id).acronym
|
||||
print "importing WGSecretary", acronym, o.person
|
||||
|
||||
email = get_or_create_email(o, create_fake=True)
|
||||
group = Group.objects.get(acronym=acronym)
|
||||
|
||||
Role.objects.get_or_create(name=secretary_role, group=group, email=email)
|
||||
|
||||
# WGTechAdvisor
|
||||
for o in WGTechAdvisor.objects.all():
|
||||
acronym = Acronym.objects.get(acronym_id=o.group_acronym_id).acronym
|
||||
print "importing WGTechAdvisor", acronym, o.person
|
||||
|
||||
email = get_or_create_email(o, create_fake=True)
|
||||
group = Group.objects.get(acronym=acronym)
|
||||
|
||||
Role.objects.get_or_create(name=techadvisor_role, group=group, email=email)
|
||||
|
||||
# WGChair
|
||||
for o in WGChair.objects.all():
|
||||
# there's some garbage in this table, so wear double safety belts
|
||||
try:
|
||||
acronym = Acronym.objects.get(acronym_id=o.group_acronym_id).acronym
|
||||
except Acronym.DoesNotExist:
|
||||
print "SKIPPING WGChair with unknown acronym id", o.group_acronym_id
|
||||
continue
|
||||
|
||||
try:
|
||||
person = o.person
|
||||
except PersonOrOrgInfo.DoesNotExist:
|
||||
print "SKIPPING WGChair", acronym, "with invalid person id", o.person_id
|
||||
continue
|
||||
|
||||
if acronym in ("apples", "apptsv", "usac", "null", "dirdir"):
|
||||
print "SKIPPING WGChair", acronym, o.person
|
||||
continue
|
||||
|
||||
print "importing WGChair", acronym, o.person
|
||||
|
||||
email = get_or_create_email(o, create_fake=True)
|
||||
group = Group.objects.get(acronym=acronym)
|
||||
|
||||
Role.objects.get_or_create(name=chair_role, group=group, email=email)
|
||||
|
||||
# IRTFChair
|
||||
for o in IRTFChair.objects.all():
|
||||
acronym = o.irtf.acronym.lower()
|
||||
print "importing IRTFChair", acronym, o.person
|
||||
|
||||
email = get_or_create_email(o, create_fake=True)
|
||||
group = Group.objects.get(acronym=acronym)
|
||||
|
||||
Role.objects.get_or_create(name=chair_role, group=group, email=email)
|
||||
|
||||
# NomCom chairs
|
||||
nomcom_groups = list(Group.objects.filter(acronym__startswith="nomcom").exclude(acronym="nomcom"))
|
||||
for o in ChairsHistory.objects.filter(chair_type=OldRole.NOMCOM_CHAIR):
|
||||
print "importing NOMCOM chair", o
|
||||
for g in nomcom_groups:
|
||||
if ("%s/%s" % (o.start_year, o.end_year)) in g.name:
|
||||
break
|
||||
|
||||
email = get_or_create_email(o, create_fake=False)
|
||||
|
||||
Role.objects.get_or_create(name=chair_role, group=g, email=email)
|
||||
|
||||
# IESGLogin
|
||||
for o in IESGLogin.objects.all():
|
||||
print "importing IESGLogin", o.id, o.first_name, o.last_name
|
||||
|
||||
if not o.person:
|
||||
persons = PersonOrOrgInfo.objects.filter(first_name=o.first_name, last_name=o.last_name)
|
||||
if persons:
|
||||
o.person = persons[0]
|
||||
else:
|
||||
print "NO PERSON", o.person_id
|
||||
continue
|
||||
|
||||
email = get_or_create_email(o, create_fake=False)
|
||||
if not email:
|
||||
continue
|
||||
|
||||
user, _ = User.objects.get_or_create(username=o.login_name)
|
||||
email.person.user = user
|
||||
email.person.save()
|
||||
|
||||
# current ADs are imported below
|
||||
if o.user_level == IESGLogin.SECRETARIAT_LEVEL:
|
||||
if not Role.objects.filter(name=secretary_role, email=email):
|
||||
Role.objects.create(name=secretary_role, group=Group.objects.get(acronym="secretariat"), email=email)
|
||||
elif o.user_level == IESGLogin.INACTIVE_AD_LEVEL:
|
||||
if not Role.objects.filter(name=inactive_area_director_role, email=email):
|
||||
# connect them directly to the IESG as we don't really know where they belong
|
||||
Role.objects.create(name=inactive_area_director_role, group=Group.objects.get(acronym="iesg"), email=email)
|
||||
|
||||
# AreaDirector
|
||||
for o in AreaDirector.objects.all():
|
||||
if not o.area:
|
||||
print "NO AREA", o.person, o.area_id
|
||||
continue
|
||||
|
||||
print "importing AreaDirector", o.area, o.person
|
||||
email = get_or_create_email(o, create_fake=False)
|
||||
|
||||
area = Group.objects.get(acronym=o.area.area_acronym.acronym)
|
||||
|
||||
if area.state_id == "active":
|
||||
role_type = area_director_role
|
||||
else:
|
||||
# can't be active area director in an inactive area
|
||||
role_type = inactive_area_director_role
|
||||
|
||||
r = Role.objects.filter(name__in=(area_director_role, inactive_area_director_role),
|
||||
email=email)
|
||||
if r and r[0].group == "iesg":
|
||||
r[0].group = area
|
||||
r[0].name = role_type
|
||||
r[0].save()
|
||||
else:
|
||||
Role.objects.get_or_create(name=role_type, group=area, email=email)
|
||||
|
||||
|
||||
# Announcement persons
|
||||
for o in PersonOrOrgInfo.objects.filter(announcement__announcement_id__gte=1).distinct():
|
||||
print "importing Announcement originator", o.person_or_org_tag, o.first_name.encode('utf-8'), o.last_name.encode('utf-8')
|
||||
|
||||
o.person = o # satisfy the get_or_create_email interface
|
||||
|
||||
email = get_or_create_email(o, create_fake=False)
|
||||
|
||||
# IDAuthor persons
|
||||
for o in IDAuthor.objects.all().order_by('id').select_related('person').iterator():
|
||||
print "importing IDAuthor", o.id, o.person_id, o.person.first_name.encode('utf-8'), o.person.last_name.encode('utf-8')
|
||||
email = get_or_create_email(o, create_fake=True)
|
||||
|
||||
# we may also need to import email address used specifically for
|
||||
# the document
|
||||
addr = clean_email_address(o.email() or "")
|
||||
if addr and addr.lower() != email.address.lower():
|
||||
try:
|
||||
e = Email.objects.get(address=addr)
|
||||
if e.person != email.person or e.active != False:
|
||||
e.person = email.person
|
||||
e.active = False
|
||||
e.save()
|
||||
except Email.DoesNotExist:
|
||||
Email.objects.create(address=addr, person=email.person, active=False)
|
||||
|
50
redesign/importing/utils.py
Normal file
50
redesign/importing/utils.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
from redesign import unaccent
|
||||
from redesign.person.models import Person, Email, Alias
|
||||
|
||||
def clean_email_address(addr):
|
||||
addr = addr.replace("!", "@").replace("(at)", "@") # some obvious @ replacements
|
||||
addr = addr[addr.rfind('<') + 1:addr.find('>')] # whack surrounding <...>
|
||||
addr = addr.strip()
|
||||
if not "@" in addr:
|
||||
return ""
|
||||
else:
|
||||
return addr
|
||||
|
||||
def old_person_to_person(person):
|
||||
return Person.objects.get(id=person.pk)
|
||||
|
||||
def old_person_to_email(person):
|
||||
hardcoded_emails = { 'Dinara Suleymanova': "dinaras@ietf.org" }
|
||||
|
||||
return clean_email_address(person.email()[1] or hardcoded_emails.get("%s %s" % (person.first_name, person.last_name)) or "")
|
||||
|
||||
def get_or_create_email(o, create_fake):
|
||||
# take person on o and get or create new Email and Person objects
|
||||
email = old_person_to_email(o.person)
|
||||
if not email:
|
||||
if create_fake:
|
||||
email = u"unknown-email-%s-%s" % (o.person.first_name, o.person.last_name)
|
||||
print ("USING FAKE EMAIL %s for %s %s %s" % (email, o.person.pk, o.person.first_name, o.person.last_name)).encode('utf-8')
|
||||
else:
|
||||
print ("NO EMAIL FOR %s %s %s %s %s" % (o.__class__, o.pk, o.person.pk, o.person.first_name, o.person.last_name)).encode('utf-8')
|
||||
return None
|
||||
|
||||
e, _ = Email.objects.select_related("person").get_or_create(address=email)
|
||||
if not e.person:
|
||||
n = u"%s %s" % (o.person.first_name, o.person.last_name)
|
||||
asciified = unaccent.asciify(n)
|
||||
aliases = Alias.objects.filter(name__in=(n, asciified))
|
||||
if aliases:
|
||||
p = aliases[0].person
|
||||
else:
|
||||
p = Person.objects.create(id=o.person.pk, name=n, ascii=asciified)
|
||||
# FIXME: fill in address?
|
||||
|
||||
Alias.objects.create(name=n, person=p)
|
||||
if asciified != n:
|
||||
Alias.objects.create(name=asciified, person=p)
|
||||
|
||||
e.person = p
|
||||
e.save()
|
||||
|
||||
return e
|
0
redesign/name/__init__.py
Normal file
0
redesign/name/__init__.py
Normal file
22
redesign/name/admin.py
Normal file
22
redesign/name/admin.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
from django.contrib import admin
|
||||
from models import *
|
||||
|
||||
class NameAdmin(admin.ModelAdmin):
|
||||
list_display = ["slug", "name", "desc", "used"]
|
||||
prepopulate_from = { "slug": ("name",) }
|
||||
|
||||
admin.site.register(GroupTypeName, NameAdmin)
|
||||
admin.site.register(GroupStateName, NameAdmin)
|
||||
admin.site.register(RoleName, NameAdmin)
|
||||
admin.site.register(DocStreamName, NameAdmin)
|
||||
admin.site.register(DocStateName, NameAdmin)
|
||||
admin.site.register(DocRelationshipName, NameAdmin)
|
||||
admin.site.register(WgDocStateName, NameAdmin)
|
||||
admin.site.register(IesgDocStateName, NameAdmin)
|
||||
admin.site.register(IanaDocStateName, NameAdmin)
|
||||
admin.site.register(RfcDocStateName, NameAdmin)
|
||||
admin.site.register(DocTypeName, NameAdmin)
|
||||
admin.site.register(DocInfoTagName, NameAdmin)
|
||||
admin.site.register(StdLevelName, NameAdmin)
|
||||
admin.site.register(IntendedStdLevelName, NameAdmin)
|
||||
admin.site.register(BallotPositionName, NameAdmin)
|
87
redesign/name/models.py
Normal file
87
redesign/name/models.py
Normal file
|
@ -0,0 +1,87 @@
|
|||
# Copyright The IETF Trust 2007, All Rights Reserved
|
||||
|
||||
from django.db import models
|
||||
|
||||
class NameModel(models.Model):
|
||||
slug = models.CharField(max_length=8, primary_key=True)
|
||||
name = models.CharField(max_length=32)
|
||||
desc = models.TextField(blank=True)
|
||||
used = models.BooleanField(default=True)
|
||||
order = models.IntegerField(default=0)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
ordering = ['order']
|
||||
|
||||
class GroupStateName(NameModel):
|
||||
"""BOF, Proposed, Active, Dormant, Concluded"""
|
||||
class GroupTypeName(NameModel):
|
||||
"""IETF, Area, WG, RG, Team, etc."""
|
||||
class RoleName(NameModel):
|
||||
"""AD, Chair"""
|
||||
class DocStreamName(NameModel):
|
||||
"""IETF, IAB, IRTF, Independent Submission, Legacy"""
|
||||
class DocStateName(NameModel):
|
||||
"""Active, Expired, RFC, Replaced, Withdrawn"""
|
||||
class DocRelationshipName(NameModel):
|
||||
"""Updates, Replaces, Obsoletes, Reviews, ... The relationship is
|
||||
always recorded in one direction.
|
||||
"""
|
||||
class WgDocStateName(NameModel):
|
||||
"""Not, Candidate, Active, Parked, LastCall, WriteUp, Submitted,
|
||||
Dead"""
|
||||
class IesgDocStateName(NameModel):
|
||||
"""Pub Request, Ad Eval, Expert Review, Last Call Requested, In
|
||||
Last Call, Waiting for Writeup, Waiting for AD Go-Ahead, IESG
|
||||
Evaluation, Deferred, Approved, Announcement Sent, Do Not Publish,
|
||||
Ad is watching, Dead """
|
||||
class IanaDocStateName(NameModel):
|
||||
""" """
|
||||
class RfcDocStateName(NameModel):
|
||||
"""Missref, Edit, RFC-Editor, Auth48, Auth, Published; ISR,
|
||||
ISR-Auth, ISR-Timeout;"""
|
||||
class DocTypeName(NameModel):
|
||||
"""Draft, Agenda, Minutes, Charter, Discuss, Guideline, Email,
|
||||
Review, Issue, Wiki"""
|
||||
class DocInfoTagName(NameModel):
|
||||
"""Waiting for Reference, IANA Coordination, Revised ID Needed,
|
||||
External Party, AD Followup, Point Raised - Writeup Needed"""
|
||||
class StdLevelName(NameModel):
|
||||
"""Proposed Standard, Draft Standard, Standard, Experimental,
|
||||
Informational, Best Current Practice, Historic, ..."""
|
||||
class IntendedStdLevelName(NameModel):
|
||||
"""Standards Track, Experimental, Informational, Best Current
|
||||
Practice, Historic, ..."""
|
||||
class BallotPositionName(NameModel):
|
||||
""" Yes, NoObjection, Abstain, Discuss, Recuse """
|
||||
|
||||
|
||||
def get_next_iesg_states(iesg_state):
|
||||
if not iesg_state:
|
||||
return ()
|
||||
|
||||
next = {
|
||||
"pub-req": ("ad-eval", "watching", "dead"),
|
||||
"ad-eval": ("watching", "lc-req", "review-e", "iesg-eva"),
|
||||
"review-e": ("ad-eval", ),
|
||||
"lc-req": ("lc", ),
|
||||
"lc": ("writeupw", "goaheadw"),
|
||||
"writeupw": ("goaheadw", ),
|
||||
"goaheadw": ("iesg-eva", ),
|
||||
"iesg-eva": ("nopubadw", "defer", "ann"),
|
||||
"defer": ("iesg-eva", ),
|
||||
"ann": ("approved", ),
|
||||
"approved": ("rfcqueue", ),
|
||||
"rfcqueue": ("pub", ),
|
||||
"pub": ("dead", ),
|
||||
"nopubadw": ("nopubanw", ),
|
||||
"nopubanw": ("dead", ),
|
||||
"watching": ("pub-req", ),
|
||||
"dead": ("pub-req", ),
|
||||
}
|
||||
|
||||
return IesgDocStateName.objects.filter(slug__in=next.get(iesg_state.slug, ()))
|
||||
|
116
redesign/name/proxy.py
Normal file
116
redesign/name/proxy.py
Normal file
|
@ -0,0 +1,116 @@
|
|||
from redesign.proxy_utils import TranslatingManager
|
||||
from models import *
|
||||
|
||||
class IDStatus(DocStateName):
|
||||
def from_object(self, base):
|
||||
for f in base._meta.fields:
|
||||
setattr(self, f.name, getattr(base, f.name))
|
||||
return self
|
||||
|
||||
#status_id = models.AutoField(primary_key=True)
|
||||
|
||||
#status = models.CharField(max_length=25, db_column='status_value')
|
||||
@property
|
||||
def status(self):
|
||||
return self.name
|
||||
|
||||
def __unicode__(self):
|
||||
return super(self.__class__, self).__unicode__()
|
||||
|
||||
class Meta:
|
||||
proxy = True
|
||||
|
||||
class IDState(IesgDocStateName):
|
||||
PUBLICATION_REQUESTED = 10
|
||||
LAST_CALL_REQUESTED = 15
|
||||
IN_LAST_CALL = 16
|
||||
WAITING_FOR_WRITEUP = 18
|
||||
WAITING_FOR_AD_GO_AHEAD = 19
|
||||
IESG_EVALUATION = 20
|
||||
IESG_EVALUATION_DEFER = 21
|
||||
APPROVED_ANNOUNCEMENT_SENT = 30
|
||||
AD_WATCHING = 42
|
||||
DEAD = 99
|
||||
DO_NOT_PUBLISH_STATES = (33, 34)
|
||||
|
||||
objects = TranslatingManager(dict(pk="order"))
|
||||
|
||||
def from_object(self, base):
|
||||
for f in base._meta.fields:
|
||||
setattr(self, f.name, getattr(base, f.name))
|
||||
return self
|
||||
|
||||
#document_state_id = models.AutoField(primary_key=True)
|
||||
@property
|
||||
def document_state_id(self):
|
||||
return self.order
|
||||
|
||||
#state = models.CharField(max_length=50, db_column='document_state_val')
|
||||
@property
|
||||
def state(self):
|
||||
return self.name
|
||||
|
||||
#equiv_group_flag = models.IntegerField(null=True, blank=True) # unused
|
||||
#description = models.TextField(blank=True, db_column='document_desc')
|
||||
@property
|
||||
def description(self):
|
||||
return self.desc
|
||||
|
||||
@property
|
||||
def nextstate(self):
|
||||
# simulate related queryset
|
||||
from name.models import get_next_iesg_states
|
||||
return IDState.objects.filter(pk__in=[x.pk for x in get_next_iesg_states(self)])
|
||||
|
||||
@property
|
||||
def next_state(self):
|
||||
# simulate IDNextState
|
||||
return self
|
||||
|
||||
def __str__(self):
|
||||
return self.state
|
||||
|
||||
@staticmethod
|
||||
def choices():
|
||||
return [(state.slug, state.name) for state in IDState.objects.all()]
|
||||
|
||||
class Meta:
|
||||
proxy = True
|
||||
|
||||
|
||||
class IDSubStateManager(TranslatingManager):
|
||||
def __init__(self, *args):
|
||||
super(IDSubStateManager, self).__init__(*args)
|
||||
|
||||
def all(self):
|
||||
return self.filter(slug__in=['extpty', 'need-rev', 'ad-f-up', 'point'])
|
||||
|
||||
class IDSubState(DocInfoTagName):
|
||||
objects = IDSubStateManager(dict(pk="order"))
|
||||
|
||||
def from_object(self, base):
|
||||
for f in base._meta.fields:
|
||||
setattr(self, f.name, getattr(base, f.name))
|
||||
return self
|
||||
|
||||
#sub_state_id = models.AutoField(primary_key=True)
|
||||
@property
|
||||
def sub_state_id(self):
|
||||
return self.order
|
||||
|
||||
#sub_state = models.CharField(max_length=55, db_column='sub_state_val')
|
||||
@property
|
||||
def sub_state(self):
|
||||
return self.name
|
||||
|
||||
#description = models.TextField(blank=True, db_column='sub_state_desc')
|
||||
@property
|
||||
def description(self):
|
||||
return self.desc
|
||||
|
||||
def __str__(self):
|
||||
return self.sub_state
|
||||
|
||||
class Meta:
|
||||
proxy = True
|
||||
|
8
redesign/name/utils.py
Normal file
8
redesign/name/utils.py
Normal file
|
@ -0,0 +1,8 @@
|
|||
def name(name_class, slug, name, desc="", order=0):
|
||||
# create if it doesn't exist, set name and desc
|
||||
obj, _ = name_class.objects.get_or_create(slug=slug)
|
||||
obj.name = name
|
||||
obj.desc = desc
|
||||
obj.order = order
|
||||
obj.save()
|
||||
return obj
|
0
redesign/person/__init__.py
Normal file
0
redesign/person/__init__.py
Normal file
28
redesign/person/admin.py
Normal file
28
redesign/person/admin.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
from django.contrib import admin
|
||||
from models import *
|
||||
|
||||
class EmailAdmin(admin.ModelAdmin):
|
||||
list_display = ["address", "person", "time", "active", ]
|
||||
raw_id_fields = ["person", ]
|
||||
search_fields = ["address", "person__name", ]
|
||||
admin.site.register(Email, EmailAdmin)
|
||||
|
||||
class EmailInline(admin.TabularInline):
|
||||
model = Email
|
||||
|
||||
class AliasAdmin(admin.ModelAdmin):
|
||||
list_display = ["name", "person", ]
|
||||
search_fields = ["name",]
|
||||
raw_id_fields = ["person"]
|
||||
admin.site.register(Alias, AliasAdmin)
|
||||
|
||||
class AliasInline(admin.StackedInline):
|
||||
model = Alias
|
||||
|
||||
class PersonAdmin(admin.ModelAdmin):
|
||||
list_display = ["name", "short", "time", "user", ]
|
||||
search_fields = ["name", "ascii"]
|
||||
inlines = [ EmailInline, AliasInline, ]
|
||||
# actions = None
|
||||
admin.site.register(Person, PersonAdmin)
|
||||
|
103
redesign/person/models.py
Normal file
103
redesign/person/models.py
Normal file
|
@ -0,0 +1,103 @@
|
|||
# Copyright The IETF Trust 2007, All Rights Reserved
|
||||
|
||||
from django.db import models
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
class Person(models.Model):
|
||||
time = models.DateTimeField(auto_now_add=True) # When this Person record entered the system
|
||||
name = models.CharField(max_length=255, db_index=True) # The normal unicode form of the name. This must be
|
||||
# set to the same value as the ascii-form if equal.
|
||||
ascii = models.CharField(max_length=255) # The normal ascii-form of the name.
|
||||
ascii_short = models.CharField(max_length=32, null=True, blank=True) # The short ascii-form of the name. Also in alias table if non-null
|
||||
address = models.TextField(max_length=255, blank=True)
|
||||
|
||||
user = models.OneToOneField(User, blank=True, null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
def _parts(self, name):
|
||||
prefix, first, middle, last, suffix = "", "", "", "", ""
|
||||
parts = name.split()
|
||||
if parts[0] in ["Mr", "Mr.", "Mrs", "Mrs.", "Ms", "Ms.", "Miss", "Dr.", "Doctor", "Prof", "Prof.", "Professor", "Sir", "Lady", "Dame", ]:
|
||||
prefix = parts[0];
|
||||
parts = parts[1:]
|
||||
if len(parts) > 2:
|
||||
if parts[-1] in ["Jr", "Jr.", "II", "2nd", "III", "3rd", ]:
|
||||
suffix = parts[-1]
|
||||
parts = parts[:-1]
|
||||
if len(parts) > 2:
|
||||
first = parts[0]
|
||||
last = parts[-1]
|
||||
middle = " ".join(parts[1:-1])
|
||||
elif len(parts) == 2:
|
||||
first, last = parts
|
||||
else:
|
||||
last = parts[0]
|
||||
return prefix, first, middle, last, suffix
|
||||
def name_parts(self):
|
||||
return self._parts(self.name)
|
||||
def ascii_parts(self):
|
||||
return self._parts(self.ascii)
|
||||
def short(self):
|
||||
if self.ascii_short:
|
||||
return self.ascii_short
|
||||
else:
|
||||
prefix, first, middle, last, suffix = self.ascii_parts()
|
||||
return (first and first[0]+"." or "")+(middle or "")+" "+last+(suffix and " "+suffix or "")
|
||||
def role_email(self, role_name, group):
|
||||
e = Email.objects.filter(person=self, role__group=group, role__name=role_name)
|
||||
if e:
|
||||
return e[0]
|
||||
e = self.email_set.order_by("-active")
|
||||
if e:
|
||||
return e[0]
|
||||
return None
|
||||
def email_address(self):
|
||||
e = self.email_set.filter(active=True)
|
||||
if e:
|
||||
return e[0]
|
||||
else:
|
||||
return ""
|
||||
def formatted_email(self):
|
||||
e = self.email_set.order_by("-active")
|
||||
if e:
|
||||
return e[0].formatted_email()
|
||||
else:
|
||||
return ""
|
||||
def person(self): # little temporary wrapper to help porting
|
||||
return self
|
||||
def full_name_as_key(self):
|
||||
return self.name.lower().replace(" ", ".")
|
||||
|
||||
|
||||
class Alias(models.Model):
|
||||
"""This is used for alternative forms of a name. This is the
|
||||
primary lookup point for names, and should always contain the
|
||||
unicode form (and ascii form, if different) of a name which is
|
||||
recorded in the Person record.
|
||||
"""
|
||||
person = models.ForeignKey(Person)
|
||||
name = models.CharField(max_length=255, db_index=True)
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
class Meta:
|
||||
verbose_name_plural = "Aliases"
|
||||
|
||||
class Email(models.Model):
|
||||
address = models.CharField(max_length=64, primary_key=True)
|
||||
person = models.ForeignKey(Person, null=True)
|
||||
time = models.DateTimeField(auto_now_add=True)
|
||||
active = models.BooleanField(default=True) # Old email addresses are *not* purged, as history
|
||||
# information points to persons through these
|
||||
def __unicode__(self):
|
||||
return self.address
|
||||
|
||||
def get_name(self):
|
||||
return self.person.name if self.person else self.address
|
||||
|
||||
def formatted_email(self):
|
||||
if self.person and self.person.name:
|
||||
return u'"%s" <%s>' % (self.person.name, self.address)
|
||||
else:
|
||||
return self.address
|
||||
|
57
redesign/person/proxy.py
Normal file
57
redesign/person/proxy.py
Normal file
|
@ -0,0 +1,57 @@
|
|||
from redesign.proxy_utils import TranslatingManager
|
||||
|
||||
from models import *
|
||||
|
||||
class IESGLogin(Person):
|
||||
objects = TranslatingManager(dict(user_level__in=None,
|
||||
first_name="name"
|
||||
))
|
||||
|
||||
def from_object(self, base):
|
||||
for f in base._meta.fields:
|
||||
setattr(self, f.name, getattr(base, f.name))
|
||||
return self
|
||||
|
||||
SECRETARIAT_LEVEL = 0
|
||||
AD_LEVEL = 1
|
||||
INACTIVE_AD_LEVEL = 2
|
||||
|
||||
#login_name = models.CharField(blank=True, max_length=255)
|
||||
@property
|
||||
def login_name(self): raise NotImplemented
|
||||
#password = models.CharField(max_length=25)
|
||||
@property
|
||||
def password(self): raise NotImplemented
|
||||
#user_level = models.IntegerField(choices=USER_LEVEL_CHOICES)
|
||||
@property
|
||||
def user_level(self): raise NotImplemented
|
||||
|
||||
#first_name = models.CharField(blank=True, max_length=25)
|
||||
@property
|
||||
def first_name(self):
|
||||
return self.name_parts()[1]
|
||||
|
||||
#last_name = models.CharField(blank=True, max_length=25)
|
||||
@property
|
||||
def last_name(self):
|
||||
return self.name_parts()[3]
|
||||
|
||||
# FIXME: person isn't wrapped yet
|
||||
#person = BrokenForeignKey(PersonOrOrgInfo, db_column='person_or_org_tag', unique=True, null_values=(0, 888888), null=True)
|
||||
|
||||
# apparently unused
|
||||
#pgp_id = models.CharField(blank=True, null=True, max_length=20)
|
||||
#default_search = models.NullBooleanField()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
def is_current_ad(self):
|
||||
return self in Person.objects.filter(email__role__name="ad", email__role__group__state="active").distinct()
|
||||
@staticmethod
|
||||
def active_iesg():
|
||||
return IESGLogin.objects.filter(email__role__name="ad", email__role__group__state="active").distinct().order_by('name')
|
||||
|
||||
class Meta:
|
||||
proxy = True
|
259
redesign/proxy_utils.py
Normal file
259
redesign/proxy_utils.py
Normal file
|
@ -0,0 +1,259 @@
|
|||
from django.db.models.manager import Manager
|
||||
from django.db.models.query import QuerySet
|
||||
|
||||
class TranslatingQuerySet(QuerySet):
|
||||
def translated_args(self, args):
|
||||
trans = self.translated_attrs
|
||||
res = []
|
||||
for a in args:
|
||||
if a.startswith("-"):
|
||||
prefix = "-"
|
||||
a = a[1:]
|
||||
else:
|
||||
prefix = ""
|
||||
|
||||
if a in trans:
|
||||
t = trans[a]
|
||||
if callable(t):
|
||||
t, _ = t(None)
|
||||
|
||||
if t:
|
||||
res.append(prefix + t)
|
||||
else:
|
||||
res.append(prefix + a)
|
||||
return res
|
||||
|
||||
def translated_kwargs(self, kwargs):
|
||||
trans = self.translated_attrs
|
||||
res = dict()
|
||||
for k, v in kwargs.iteritems():
|
||||
if k in trans:
|
||||
t = trans[k]
|
||||
if callable(t):
|
||||
t, v = t(v)
|
||||
|
||||
if t:
|
||||
res[t] = v
|
||||
else:
|
||||
res[k] = v
|
||||
return res
|
||||
|
||||
# overridden methods
|
||||
def _clone(self, *args, **kwargs):
|
||||
c = super(TranslatingQuerySet, self)._clone(*args, **kwargs)
|
||||
c.translated_attrs = self.translated_attrs
|
||||
return c
|
||||
|
||||
def dates(self, *args, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).dates(*args, **kwargs)
|
||||
|
||||
def distinct(self, *args, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).distinct(*args, **kwargs)
|
||||
|
||||
def extra(self, *args, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).extra(*args, **kwargs)
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).get(*args, **kwargs)
|
||||
|
||||
def get_or_create(self, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).get_or_create(**kwargs)
|
||||
|
||||
def create(self, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).create(**kwargs)
|
||||
|
||||
def filter(self, *args, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).filter(*args, **kwargs)
|
||||
|
||||
def aggregate(self, *args, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).aggregate(*args, **kwargs)
|
||||
|
||||
def annotate(self, *args, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).annotate(*args, **kwargs)
|
||||
|
||||
def complex_filter(self, *args, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).complex_filter(*args, **kwargs)
|
||||
|
||||
def exclude(self, *args, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).exclude(*args, **kwargs)
|
||||
|
||||
def in_bulk(self, *args, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).in_bulk(*args, **kwargs)
|
||||
|
||||
def iterator(self, *args, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).iterator(*args, **kwargs)
|
||||
|
||||
def latest(self, *args, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).latest(*args, **kwargs)
|
||||
|
||||
def order_by(self, *args, **kwargs):
|
||||
args = self.translated_args(args)
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).order_by(*args, **kwargs)
|
||||
|
||||
def select_related(self, *args, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).select_related(*args, **kwargs)
|
||||
|
||||
def values(self, *args, **kwargs):
|
||||
args = self.translated_args(args)
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).values(*args, **kwargs)
|
||||
|
||||
def values_list(self, *args, **kwargs):
|
||||
args = self.translated_args(args)
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).values_list(*args, **kwargs)
|
||||
|
||||
def update(self, *args, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).update(*args, **kwargs)
|
||||
|
||||
def reverse(self, *args, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).reverse(*args, **kwargs)
|
||||
|
||||
def defer(self, *args, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).defer(*args, **kwargs)
|
||||
|
||||
def only(self, *args, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self).only(*args, **kwargs)
|
||||
|
||||
def _insert(self, values, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return insert_query(self.model, values, **kwargs)
|
||||
|
||||
def _update(self, values, **kwargs):
|
||||
kwargs = self.translated_kwargs(kwargs)
|
||||
return super(TranslatingQuerySet, self)._update(values, **kwargs)
|
||||
|
||||
class TranslatingManager(Manager):
|
||||
"""Translates keyword arguments for the ORM, for use in proxy
|
||||
wrapping, e.g. given trans={'foo': 'bar'} it will transform a
|
||||
lookup of the field foo to a lookup on the field bar. The right
|
||||
hand side can either be a string or a function which is called
|
||||
with the right-hand side to transform it."""
|
||||
|
||||
def __init__(self, trans, always_filter=None):
|
||||
super(TranslatingManager, self).__init__()
|
||||
self.translated_attrs = trans
|
||||
self.always_filter = always_filter
|
||||
|
||||
def get_query_set(self):
|
||||
qs = TranslatingQuerySet(self.model)
|
||||
qs.translated_attrs = self.translated_attrs
|
||||
if self.always_filter:
|
||||
qs = qs.filter(**self.always_filter)
|
||||
return qs
|
||||
|
||||
# def dates(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().dates(*args, **kwargs)
|
||||
|
||||
# def distinct(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().distinct(*args, **kwargs)
|
||||
|
||||
# def extra(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().extra(*args, **kwargs)
|
||||
|
||||
# def get(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().get(*args, **kwargs)
|
||||
|
||||
# def get_or_create(self, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().get_or_create(**kwargs)
|
||||
|
||||
# def create(self, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().create(**kwargs)
|
||||
|
||||
# def filter(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().filter(*args, **kwargs)
|
||||
|
||||
# def aggregate(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().aggregate(*args, **kwargs)
|
||||
|
||||
# def annotate(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().annotate(*args, **kwargs)
|
||||
|
||||
# def complex_filter(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().complex_filter(*args, **kwargs)
|
||||
|
||||
# def exclude(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().exclude(*args, **kwargs)
|
||||
|
||||
# def in_bulk(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().in_bulk(*args, **kwargs)
|
||||
|
||||
# def iterator(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().iterator(*args, **kwargs)
|
||||
|
||||
# def latest(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().latest(*args, **kwargs)
|
||||
|
||||
# def order_by(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().order_by(*args, **kwargs)
|
||||
|
||||
# def select_related(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().select_related(*args, **kwargs)
|
||||
|
||||
# def values(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().values(*args, **kwargs)
|
||||
|
||||
# def values_list(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().values_list(*args, **kwargs)
|
||||
|
||||
# def update(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().update(*args, **kwargs)
|
||||
|
||||
# def reverse(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().reverse(*args, **kwargs)
|
||||
|
||||
# def defer(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().defer(*args, **kwargs)
|
||||
|
||||
# def only(self, *args, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set().only(*args, **kwargs)
|
||||
|
||||
# def _insert(self, values, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return insert_query(self.model, values, **kwargs)
|
||||
|
||||
# def _update(self, values, **kwargs):
|
||||
# kwargs = self.translated_kwargs(kwargs)
|
||||
# return self.get_query_set()._update(values, **kwargs)
|
144
redesign/unaccent.py
Normal file
144
redesign/unaccent.py
Normal file
|
@ -0,0 +1,144 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# use a dynamically populated translation dictionary to remove accents
|
||||
# from a string
|
||||
# (by Chris Mulligan, http://chmullig.com/2009/12/python-unicode-ascii-ifier/)
|
||||
|
||||
import unicodedata, sys
|
||||
|
||||
class unaccented_map(dict):
|
||||
# Translation dictionary. Translation entries are added to this dictionary as needed.
|
||||
CHAR_REPLACEMENT = {
|
||||
0xc6: u"AE", # Æ LATIN CAPITAL LETTER AE
|
||||
0xd0: u"D", # Ð LATIN CAPITAL LETTER ETH
|
||||
0xd8: u"OE", # Ø LATIN CAPITAL LETTER O WITH STROKE
|
||||
0xde: u"Th", # Þ LATIN CAPITAL LETTER THORN
|
||||
0xc4: u'Ae', # Ä LATIN CAPITAL LETTER A WITH DIAERESIS
|
||||
0xd6: u'Oe', # Ö LATIN CAPITAL LETTER O WITH DIAERESIS
|
||||
0xdc: u'Ue', # Ü LATIN CAPITAL LETTER U WITH DIAERESIS
|
||||
|
||||
0xc0: u"A", # À LATIN CAPITAL LETTER A WITH GRAVE
|
||||
0xc1: u"A", # Á LATIN CAPITAL LETTER A WITH ACUTE
|
||||
0xc3: u"A", # Ã LATIN CAPITAL LETTER A WITH TILDE
|
||||
0xc7: u"C", # Ç LATIN CAPITAL LETTER C WITH CEDILLA
|
||||
0xc8: u"E", # È LATIN CAPITAL LETTER E WITH GRAVE
|
||||
0xc9: u"E", # É LATIN CAPITAL LETTER E WITH ACUTE
|
||||
0xca: u"E", # Ê LATIN CAPITAL LETTER E WITH CIRCUMFLEX
|
||||
0xcc: u"I", # Ì LATIN CAPITAL LETTER I WITH GRAVE
|
||||
0xcd: u"I", # Í LATIN CAPITAL LETTER I WITH ACUTE
|
||||
0xd2: u"O", # Ò LATIN CAPITAL LETTER O WITH GRAVE
|
||||
0xd3: u"O", # Ó LATIN CAPITAL LETTER O WITH ACUTE
|
||||
0xd5: u"O", # Õ LATIN CAPITAL LETTER O WITH TILDE
|
||||
0xd9: u"U", # Ù LATIN CAPITAL LETTER U WITH GRAVE
|
||||
0xda: u"U", # Ú LATIN CAPITAL LETTER U WITH ACUTE
|
||||
|
||||
0xdf: u"ss", # ß LATIN SMALL LETTER SHARP S
|
||||
0xe6: u"ae", # æ LATIN SMALL LETTER AE
|
||||
0xf0: u"d", # ð LATIN SMALL LETTER ETH
|
||||
0xf8: u"oe", # ø LATIN SMALL LETTER O WITH STROKE
|
||||
0xfe: u"th", # þ LATIN SMALL LETTER THORN,
|
||||
0xe4: u'ae', # ä LATIN SMALL LETTER A WITH DIAERESIS
|
||||
0xf6: u'oe', # ö LATIN SMALL LETTER O WITH DIAERESIS
|
||||
0xfc: u'ue', # ü LATIN SMALL LETTER U WITH DIAERESIS
|
||||
|
||||
0xe0: u"a", # à LATIN SMALL LETTER A WITH GRAVE
|
||||
0xe1: u"a", # á LATIN SMALL LETTER A WITH ACUTE
|
||||
0xe3: u"a", # ã LATIN SMALL LETTER A WITH TILDE
|
||||
0xe7: u"c", # ç LATIN SMALL LETTER C WITH CEDILLA
|
||||
0xe8: u"e", # è LATIN SMALL LETTER E WITH GRAVE
|
||||
0xe9: u"e", # é LATIN SMALL LETTER E WITH ACUTE
|
||||
0xea: u"e", # ê LATIN SMALL LETTER E WITH CIRCUMFLEX
|
||||
0xec: u"i", # ì LATIN SMALL LETTER I WITH GRAVE
|
||||
0xed: u"i", # í LATIN SMALL LETTER I WITH ACUTE
|
||||
0xf2: u"o", # ò LATIN SMALL LETTER O WITH GRAVE
|
||||
0xf3: u"o", # ó LATIN SMALL LETTER O WITH ACUTE
|
||||
0xf5: u"o", # õ LATIN SMALL LETTER O WITH TILDE
|
||||
0xf9: u"u", # ù LATIN SMALL LETTER U WITH GRAVE
|
||||
0xfa: u"u", # ú LATIN SMALL LETTER U WITH ACUTE
|
||||
|
||||
0x2018: u"'", # ‘ LEFT SINGLE QUOTATION MARK
|
||||
0x2019: u"'", # ’ RIGHT SINGLE QUOTATION MARK
|
||||
0x201c: u'"', # “ LEFT DOUBLE QUOTATION MARK
|
||||
0x201d: u'"', # ” RIGHT DOUBLE QUOTATION MARK
|
||||
|
||||
}
|
||||
|
||||
# Maps a unicode character code (the key) to a replacement code
|
||||
# (either a character code or a unicode string).
|
||||
def mapchar(self, key):
|
||||
ch = self.get(key)
|
||||
if ch is not None:
|
||||
return ch
|
||||
try:
|
||||
de = unicodedata.decomposition(unichr(key))
|
||||
p1, p2 = [int(x, 16) for x in de.split(None, 1)]
|
||||
if p2 == 0x308:
|
||||
ch = self.CHAR_REPLACEMENT.get(key)
|
||||
else:
|
||||
ch = int(p1)
|
||||
|
||||
except (IndexError, ValueError):
|
||||
ch = self.CHAR_REPLACEMENT.get(key, key)
|
||||
self[key] = ch
|
||||
return ch
|
||||
|
||||
if sys.version <= "2.5":
|
||||
# use __missing__ where available
|
||||
__missing__ = mapchar
|
||||
else:
|
||||
# otherwise, use standard __getitem__ hook (this is slower,
|
||||
# since it's called for each character)
|
||||
__getitem__ = mapchar
|
||||
|
||||
map = unaccented_map()
|
||||
|
||||
def asciify(input):
|
||||
try:
|
||||
return input.encode('ascii')
|
||||
except AttributeError:
|
||||
return str(input).encode('ascii')
|
||||
except UnicodeEncodeError:
|
||||
return unicodedata.normalize('NFKD', input.translate(map)).encode('ascii', 'replace')
|
||||
|
||||
text = u"""
|
||||
|
||||
##Norwegian
|
||||
"Jo, når'n da ha gått ett stôck te, så kommer'n te e å,
|
||||
å i åa ä e ö."
|
||||
"Vasa", sa'n.
|
||||
"Å i åa ä e ö", sa ja.
|
||||
"Men va i all ti ä dä ni säjer, a, o?", sa'n.
|
||||
"D'ä e å, vett ja", skrek ja, för ja ble rasen, "å i åa
|
||||
ä e ö, hörer han lite, d'ä e å, å i åa ä e ö."
|
||||
"A, o, ö", sa'n å dämmä geck'en.
|
||||
Jo, den va nôe te dum den.
|
||||
|
||||
(taken from the short story "Dumt fôlk" in Gustaf Fröding's
|
||||
"Räggler å paschaser på våra mål tå en bonne" (1895).
|
||||
|
||||
##Danish
|
||||
|
||||
Nu bliver Mølleren sikkert sur, og dog, han er stadig den største på verdensplan.
|
||||
|
||||
Userneeds A/S er en dansk virksomhed, der udfører statistiske undersøgelser på internettet. Den blev etableret i 2001 som et anpartsselskab af David Jensen og Henrik Vincentz.
|
||||
Frem til 2004 var det primære fokus på at forbedre hjemmesiderne for andre virksomheder. Herefter blev fokus omlagt, så man også beskæftigede sig med statistiske målinger. Ledelsen vurderede, at dette marked ville vokse betragteligt i de kommende år, hvilket man ønskede at udnytte.
|
||||
Siden omlægningen er der blevet fokuseret på at etablere meget store forbrugerpaneler. Således udgjorde det danske panel i 2005 65.000 personer og omfatter per 2008 100.000 personer.
|
||||
I 2007 blev Userneeds ApS konverteret til aktieselskabet Userneeds A/S
|
||||
Efterhånden er aktiviteterne blevet udvidet til de nordiske lande (med undtagelse af Island) og besidder i 2009 et forbrugerpanel med i alt mere end 250.000 personer bosat i de fire store nordiske lande.
|
||||
Selskabet tegnes udadtil af en direktion på tre personer, der foruden Henrik Vincentz tæller Palle Viby Morgen og Simon Andersen.
|
||||
De primære konkurrenter er andre analysebureauer som AC Nielsen, Analysedanmark, Gallup, Norstat, Synnovate og Zapera.
|
||||
|
||||
##Finnish
|
||||
Titus Aurelius Fulvus Boionius Arrius Antoninus eli Antoninus Pius (19. syyskuuta 86 – 7. maaliskuuta 161) oli Rooman keisari vuosina 138–161. Antoninus sai lisänimensä Pius (suom. velvollisuudentuntoinen) noustuaan valtaan vuonna 138. Hän kuului Nerva–Antoninusten hallitsijasukuun ja oli suosittu ja kunnioitettu keisari, joka tunnettiin lempeydestään ja oikeamielisyydestään. Hänen valtakauttaan on usein sanottu Rooman valtakunnan kultakaudeksi, jolloin talous kukoisti, poliittinen tilanne oli vakaa ja armeija vahva. Hän hallitsi pitempään kuin yksikään Rooman keisari Augustuksen jälkeen, ja hänen kautensa tunnetaan erityisen rauhallisena, joskaan ei sodattomana. Antoninus adoptoi Marcus Aureliuksen ja Lucius Veruksen vallanperijöikseen. Hän kuoli vuonna 161.
|
||||
|
||||
#German
|
||||
So heißt ein altes Märchen: "Der Ehre Dornenpfad", und es handelt von einem Schützen mit Namen Bryde, der wohl zu großen Ehren und Würden kam, aber nicht ohne lange und vielfältige Widerwärtigkeiten und Fährnisse des Lebens durchzumachen. Manch einer von uns hat es gewiß als Kind gehört oder es vielleicht später gelesen und dabei an seinen eigenen stillen Dornenweg und die vielen Widerwärtigkeiten gedacht. Märchen und Wirklichkeit liegen einander so nahe, aber das Märchen hat seine harmonische Lösung hier auf Erden, während die Wirklichkeit sie meist aus dem Erdenleben hinaus in Zeit und Ewigkeit verlegt.
|
||||
|
||||
12\xbd inch
|
||||
"""
|
||||
|
||||
if __name__ == "__main__":
|
||||
for i, line in enumerate(text.splitlines()):
|
||||
line = line.strip()
|
||||
print line
|
||||
if line and not line.startswith('#'):
|
||||
print '\tTrans: ', asciify(line).strip()
|
45
redesign/util.py
Normal file
45
redesign/util.py
Normal file
|
@ -0,0 +1,45 @@
|
|||
|
||||
def name(obj):
|
||||
if hasattr(obj, 'abbrev'):
|
||||
return obj.abbrev()
|
||||
elif hasattr(obj, 'name'):
|
||||
if callable(obj.name):
|
||||
return obj.name()
|
||||
else:
|
||||
return unicode(obj.name)
|
||||
else:
|
||||
return unicode(obj)
|
||||
|
||||
def admin_link(field, label=None, ordering="", display=name, suffix=""):
|
||||
if not label:
|
||||
label = field.capitalize().replace("_", " ").strip()
|
||||
if ordering == "":
|
||||
ordering = field
|
||||
def _link(self):
|
||||
obj = self
|
||||
for attr in field.split("__"):
|
||||
obj = getattr(obj, attr)
|
||||
if callable(obj):
|
||||
obj = obj()
|
||||
if hasattr(obj, "all"):
|
||||
objects = obj.all()
|
||||
elif callable(obj):
|
||||
objects = obj()
|
||||
if not hasattr(objects, "__iter__"):
|
||||
objects = [ objects ]
|
||||
elif hasattr(obj, "__iter__"):
|
||||
objects = obj
|
||||
else:
|
||||
objects = [ obj ]
|
||||
chunks = []
|
||||
for obj in objects:
|
||||
app = obj._meta.app_label
|
||||
model = obj.__class__.__name__.lower()
|
||||
id = obj.pk
|
||||
chunks += [ u'<a href="/admin/%(app)s/%(model)s/%(id)s/%(suffix)s">%(display)s</a>' %
|
||||
{'app':app, "model": model, "id":id, "display": display(obj), "suffix":suffix, } ]
|
||||
return u", ".join(chunks)
|
||||
_link.allow_tags = True
|
||||
_link.short_description = label
|
||||
_link.admin_order_field = ordering
|
||||
return _link
|
|
@ -111,6 +111,7 @@ table.ietf-table { border-collapse:collapse; border:1px solid #7f7f7f; }
|
|||
.ietf-doctable tr.header { border-top: 1px solid #7f7f7f; border-bottom: 1px solid #7f7f7f; border-left: 1px solid white; border-right:2px solid white;}
|
||||
.ietf-doctable tr.header td {padding: 6px 6px; font-weight: bold; }
|
||||
.ietf-doctable table { max-width: 1200px; }
|
||||
.ietf-doctable th { cursor: pointer }
|
||||
.ietf-doctable th.doc, .ietf-doctable td.doc { min-width:20em; max-width: 35em; }
|
||||
.ietf-doctable th.title, .ietf-doctable td.title { min-width: 20em; max-width: 35em; }
|
||||
.ietf-doctable th.date, .ietf-doctable td.date { white-space:nowrap; min-width: 6em;}
|
||||
|
|
Loading…
Reference in a new issue