chore: remove unneeded logging and code marked unreachable that has not been reached. (#4305)
* chore: remove unneeded logging and code marked unreachable that has not been reached. * fix: remove noop save override
This commit is contained in:
parent
c95679a03b
commit
9c236ea2a2
|
@ -36,7 +36,6 @@ from ietf.ietfauth.utils import role_required
|
||||||
from ietf.meeting.models import Meeting
|
from ietf.meeting.models import Meeting
|
||||||
from ietf.stats.models import MeetingRegistration
|
from ietf.stats.models import MeetingRegistration
|
||||||
from ietf.utils.decorators import require_api_key
|
from ietf.utils.decorators import require_api_key
|
||||||
from ietf.utils.log import log
|
|
||||||
from ietf.utils.models import DumpInfo
|
from ietf.utils.models import DumpInfo
|
||||||
|
|
||||||
|
|
||||||
|
@ -154,7 +153,6 @@ def api_new_meeting_registration(request):
|
||||||
if value is None and item in required_fields:
|
if value is None and item in required_fields:
|
||||||
missing_fields.append(item)
|
missing_fields.append(item)
|
||||||
data[item] = value
|
data[item] = value
|
||||||
log("Meeting registration notification: %s" % json.dumps(data))
|
|
||||||
if missing_fields:
|
if missing_fields:
|
||||||
return err(400, "Missing parameters: %s" % ', '.join(missing_fields))
|
return err(400, "Missing parameters: %s" % ', '.join(missing_fields))
|
||||||
number = data['meeting']
|
number = data['meeting']
|
||||||
|
|
|
@ -31,7 +31,6 @@ from django.contrib import admin
|
||||||
from ietf.person.models import Person
|
from ietf.person.models import Person
|
||||||
from ietf.person.utils import (merge_persons, send_merge_notification, handle_users,
|
from ietf.person.utils import (merge_persons, send_merge_notification, handle_users,
|
||||||
determine_merge_order)
|
determine_merge_order)
|
||||||
from ietf.utils.log import log
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
|
@ -7,7 +7,6 @@ import logging
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
import rfc2html
|
import rfc2html
|
||||||
import time
|
|
||||||
|
|
||||||
from typing import Optional, TYPE_CHECKING
|
from typing import Optional, TYPE_CHECKING
|
||||||
from weasyprint import HTML as wpHTML
|
from weasyprint import HTML as wpHTML
|
||||||
|
@ -15,7 +14,6 @@ from weasyprint import HTML as wpHTML
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.core import checks
|
from django.core import checks
|
||||||
from django.core.cache import caches
|
from django.core.cache import caches
|
||||||
from django.core.exceptions import ValidationError
|
|
||||||
from django.core.validators import URLValidator, RegexValidator
|
from django.core.validators import URLValidator, RegexValidator
|
||||||
from django.urls import reverse as urlreverse
|
from django.urls import reverse as urlreverse
|
||||||
from django.contrib.contenttypes.models import ContentType
|
from django.contrib.contenttypes.models import ContentType
|
||||||
|
@ -221,12 +219,8 @@ class DocumentInfo(models.Model):
|
||||||
if not hasattr(self, '_cached_href'):
|
if not hasattr(self, '_cached_href'):
|
||||||
validator = URLValidator()
|
validator = URLValidator()
|
||||||
if self.external_url and self.external_url.split(':')[0] in validator.schemes:
|
if self.external_url and self.external_url.split(':')[0] in validator.schemes:
|
||||||
try:
|
validator(self.external_url)
|
||||||
validator(self.external_url)
|
return self.external_url
|
||||||
return self.external_url
|
|
||||||
except ValidationError:
|
|
||||||
log.unreachable('2018-12-28')
|
|
||||||
pass
|
|
||||||
|
|
||||||
if self.type_id in settings.DOC_HREFS and self.type_id in meeting_doc_refs:
|
if self.type_id in settings.DOC_HREFS and self.type_id in meeting_doc_refs:
|
||||||
if self.meeting_related():
|
if self.meeting_related():
|
||||||
|
@ -582,7 +576,6 @@ class DocumentInfo(models.Model):
|
||||||
try:
|
try:
|
||||||
pdf = wpHTML(string=html.replace('\xad','')).write_pdf(stylesheets=[io.BytesIO(b'html { font-size: 94%;}')])
|
pdf = wpHTML(string=html.replace('\xad','')).write_pdf(stylesheets=[io.BytesIO(b'html { font-size: 94%;}')])
|
||||||
except AssertionError:
|
except AssertionError:
|
||||||
log.log(f'weasyprint failed with an assert on {self.name}')
|
|
||||||
pdf = None
|
pdf = None
|
||||||
if pdf:
|
if pdf:
|
||||||
cache.set(cache_key, pdf, settings.PDFIZER_CACHE_TIME)
|
cache.set(cache_key, pdf, settings.PDFIZER_CACHE_TIME)
|
||||||
|
@ -819,16 +812,12 @@ class Document(DocumentInfo):
|
||||||
assert events, "You must always add at least one event to describe the changes in the history log"
|
assert events, "You must always add at least one event to describe the changes in the history log"
|
||||||
self.time = max(self.time, events[0].time)
|
self.time = max(self.time, events[0].time)
|
||||||
|
|
||||||
mark = time.time()
|
|
||||||
self._has_an_event_so_saving_is_allowed = True
|
self._has_an_event_so_saving_is_allowed = True
|
||||||
self.save()
|
self.save()
|
||||||
del self._has_an_event_so_saving_is_allowed
|
del self._has_an_event_so_saving_is_allowed
|
||||||
log.log(f'{time.time()-mark:.3f} seconds to save {self.name} Document')
|
|
||||||
|
|
||||||
mark = time.time()
|
|
||||||
from ietf.doc.utils import save_document_in_history
|
from ietf.doc.utils import save_document_in_history
|
||||||
save_document_in_history(self)
|
save_document_in_history(self)
|
||||||
log.log(f'{time.time()-mark:.3f} seconds to save {self.name} DocHistory')
|
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
# if there's no primary key yet, we can allow the save to go
|
# if there's no primary key yet, we can allow the save to go
|
||||||
|
@ -1235,11 +1224,7 @@ class DocEvent(models.Model):
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return u"%s %s by %s at %s" % (self.doc.name, self.get_type_display().lower(), self.by.plain_name(), self.time)
|
return u"%s %s by %s at %s" % (self.doc.name, self.get_type_display().lower(), self.by.plain_name(), self.time)
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
|
||||||
super(DocEvent, self).save(*args, **kwargs)
|
|
||||||
log.assertion('self.rev != None')
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ['-time', '-id']
|
ordering = ['-time', '-id']
|
||||||
indexes = [
|
indexes = [
|
||||||
|
@ -1421,4 +1406,4 @@ class BofreqEditorDocEvent(DocEvent):
|
||||||
|
|
||||||
class BofreqResponsibleDocEvent(DocEvent):
|
class BofreqResponsibleDocEvent(DocEvent):
|
||||||
""" Capture the responsible leadership (IAB and IESG members) for a BOF Request """
|
""" Capture the responsible leadership (IAB and IESG members) for a BOF Request """
|
||||||
responsible = models.ManyToManyField('person.Person', blank=True)
|
responsible = models.ManyToManyField('person.Person', blank=True)
|
||||||
|
|
|
@ -35,10 +35,9 @@ from ietf.group.models import Role, Group
|
||||||
from ietf.ietfauth.utils import has_role, is_authorized_in_doc_stream, is_individual_draft_author, is_bofreq_editor
|
from ietf.ietfauth.utils import has_role, is_authorized_in_doc_stream, is_individual_draft_author, is_bofreq_editor
|
||||||
from ietf.person.models import Person
|
from ietf.person.models import Person
|
||||||
from ietf.review.models import ReviewWish
|
from ietf.review.models import ReviewWish
|
||||||
from ietf.utils import draft, text
|
from ietf.utils import draft, log
|
||||||
from ietf.utils.mail import send_mail
|
from ietf.utils.mail import send_mail
|
||||||
from ietf.mailtrigger.utils import gather_address_lists
|
from ietf.mailtrigger.utils import gather_address_lists
|
||||||
from ietf.utils import log
|
|
||||||
from ietf.utils.xmldraft import XMLDraft
|
from ietf.utils.xmldraft import XMLDraft
|
||||||
|
|
||||||
|
|
||||||
|
@ -396,23 +395,6 @@ def get_unicode_document_content(key, filename, codec='utf-8', errors='ignore'):
|
||||||
|
|
||||||
return raw_content
|
return raw_content
|
||||||
|
|
||||||
def get_document_content(key, filename, split=True, markup=True):
|
|
||||||
log.unreachable("2017-12-05")
|
|
||||||
try:
|
|
||||||
with io.open(filename, 'rb') as f:
|
|
||||||
raw_content = f.read()
|
|
||||||
except IOError:
|
|
||||||
if settings.DEBUG:
|
|
||||||
error = "Error; cannot read ("+filename+")"
|
|
||||||
else:
|
|
||||||
error = "Error; cannot read ("+key+")"
|
|
||||||
return error
|
|
||||||
|
|
||||||
# if markup:
|
|
||||||
# return markup_txt.markup(raw_content, split)
|
|
||||||
# else:
|
|
||||||
# return raw_content
|
|
||||||
return text.decode(raw_content)
|
|
||||||
|
|
||||||
def tags_suffix(tags):
|
def tags_suffix(tags):
|
||||||
return ("::" + "::".join(t.name for t in tags)) if tags else ""
|
return ("::" + "::".join(t.name for t in tags)) if tags else ""
|
||||||
|
|
|
@ -37,7 +37,6 @@ from ietf.mailtrigger.forms import CcSelectForm
|
||||||
from ietf.message.utils import infer_message
|
from ietf.message.utils import infer_message
|
||||||
from ietf.name.models import BallotPositionName, DocTypeName
|
from ietf.name.models import BallotPositionName, DocTypeName
|
||||||
from ietf.person.models import Person
|
from ietf.person.models import Person
|
||||||
from ietf.utils import log
|
|
||||||
from ietf.utils.mail import send_mail_text, send_mail_preformatted
|
from ietf.utils.mail import send_mail_text, send_mail_preformatted
|
||||||
from ietf.utils.decorators import require_api_key
|
from ietf.utils.decorators import require_api_key
|
||||||
from ietf.utils.response import permission_denied
|
from ietf.utils.response import permission_denied
|
||||||
|
@ -95,17 +94,6 @@ def do_undefer_ballot(request, doc):
|
||||||
|
|
||||||
email_ballot_undeferred(request, doc, by.plain_name(), telechat_date)
|
email_ballot_undeferred(request, doc, by.plain_name(), telechat_date)
|
||||||
|
|
||||||
def position_to_ballot_choice(position):
|
|
||||||
log.unreachable('2018-04-25')
|
|
||||||
for v, label in BALLOT_CHOICES:
|
|
||||||
if v and getattr(position, v):
|
|
||||||
return v
|
|
||||||
return ""
|
|
||||||
|
|
||||||
def position_label(position_value):
|
|
||||||
log.unreachable('2018-04-25')
|
|
||||||
return dict(BALLOT_CHOICES).get(position_value, "")
|
|
||||||
|
|
||||||
# -------------------------------------------------
|
# -------------------------------------------------
|
||||||
class EditPositionForm(forms.Form):
|
class EditPositionForm(forms.Form):
|
||||||
position = forms.ModelChoiceField(queryset=BallotPositionName.objects.all(), widget=forms.RadioSelect, initial="norecord", required=True)
|
position = forms.ModelChoiceField(queryset=BallotPositionName.objects.all(), widget=forms.RadioSelect, initial="norecord", required=True)
|
||||||
|
|
|
@ -1972,7 +1972,7 @@ def rfcdiff_latest_json(request, name, rev=None):
|
||||||
if doc.name != doc.canonical_name():
|
if doc.name != doc.canonical_name():
|
||||||
prev_rev = doc.rev
|
prev_rev = doc.rev
|
||||||
# not sure what to do if non-numeric values come back, so at least log it
|
# not sure what to do if non-numeric values come back, so at least log it
|
||||||
log.assertion('doc.rfc_number().isdigit()')
|
log.assertion('doc.rfc_number().isdigit()') # .rfc_number() is expensive...
|
||||||
log.assertion('doc.rev.isdigit()')
|
log.assertion('doc.rev.isdigit()')
|
||||||
if int(doc.rfc_number()) in HAS_TOMBSTONE and prev_rev != '00':
|
if int(doc.rfc_number()) in HAS_TOMBSTONE and prev_rev != '00':
|
||||||
prev_rev = f'{(int(doc.rev)-1):02d}'
|
prev_rev = f'{(int(doc.rev)-1):02d}'
|
||||||
|
|
|
@ -40,17 +40,13 @@ def import_mailman_listinfo(verbosity=0):
|
||||||
return
|
return
|
||||||
mailman_export = json.loads(result.stdout)
|
mailman_export = json.loads(result.stdout)
|
||||||
|
|
||||||
log("Starting import of list info from Mailman")
|
|
||||||
names = sorted(mailman_export.keys())
|
names = sorted(mailman_export.keys())
|
||||||
log_time("Fetched list of mailman list names")
|
|
||||||
addr_max_length = Subscribed._meta.get_field('email').max_length
|
addr_max_length = Subscribed._meta.get_field('email').max_length
|
||||||
|
|
||||||
subscribed = { l.name: set(l.subscribed_set.values_list('email', flat=True)) for l in List.objects.all().prefetch_related('subscribed_set') }
|
subscribed = { l.name: set(l.subscribed_set.values_list('email', flat=True)) for l in List.objects.all().prefetch_related('subscribed_set') }
|
||||||
log_time("Computed dictionary of list members")
|
|
||||||
|
|
||||||
for name in names:
|
for name in names:
|
||||||
note("List: %s" % mailman_export[name]['internal_name'])
|
note("List: %s" % mailman_export[name]['internal_name'])
|
||||||
log_time("Fetched Mailman list object for %s" % name)
|
|
||||||
|
|
||||||
lists = List.objects.filter(name=mailman_export[name]['real_name'])
|
lists = List.objects.filter(name=mailman_export[name]['real_name'])
|
||||||
if lists.count() > 1:
|
if lists.count() > 1:
|
||||||
|
@ -68,12 +64,12 @@ def import_mailman_listinfo(verbosity=0):
|
||||||
dirty = True
|
dirty = True
|
||||||
if dirty:
|
if dirty:
|
||||||
mmlist.save()
|
mmlist.save()
|
||||||
log_time(" Updated database List object for %s" % name)
|
|
||||||
# The following calls return lowercased addresses
|
# The following calls return lowercased addresses
|
||||||
if mailman_export[name]['advertised']:
|
if mailman_export[name]['advertised']:
|
||||||
members = set(mailman_export[name]['members'])
|
members = set(mailman_export[name]['members'])
|
||||||
if not mailman_export[name]['real_name'] in subscribed:
|
if not mailman_export[name]['real_name'] in subscribed:
|
||||||
log("Note: didn't find '%s' in the dictionary of subscriptions" % mailman_export[name]['real_name'])
|
# 2022-7-29: lots of these going into the logs but being ignored...
|
||||||
|
# log("Note: didn't find '%s' in the dictionary of subscriptions" % mailman_export[name]['real_name'])
|
||||||
continue
|
continue
|
||||||
known = subscribed[mailman_export[name]['real_name']]
|
known = subscribed[mailman_export[name]['real_name']]
|
||||||
log_time(" Fetched known list members from database")
|
log_time(" Fetched known list members from database")
|
||||||
|
@ -82,14 +78,10 @@ def import_mailman_listinfo(verbosity=0):
|
||||||
for addr in to_remove:
|
for addr in to_remove:
|
||||||
note(" Removing subscription: %s" % (addr))
|
note(" Removing subscription: %s" % (addr))
|
||||||
old = Subscribed.objects.get(email=addr)
|
old = Subscribed.objects.get(email=addr)
|
||||||
log_time(" Fetched subscribed object")
|
|
||||||
old.lists.remove(mmlist)
|
old.lists.remove(mmlist)
|
||||||
log_time(" Removed %s from %s" % (mmlist, old))
|
|
||||||
if old.lists.count() == 0:
|
if old.lists.count() == 0:
|
||||||
note(" Removing address with no subscriptions: %s" % (addr))
|
note(" Removing address with no subscriptions: %s" % (addr))
|
||||||
old.delete()
|
old.delete()
|
||||||
log_time(" Removed %s" % old)
|
|
||||||
log_time(" Removed addresses no longer subscribed")
|
|
||||||
if to_remove:
|
if to_remove:
|
||||||
log(" Removed %s addresses from %s" % (len(to_remove), name))
|
log(" Removed %s addresses from %s" % (len(to_remove), name))
|
||||||
for addr in to_add:
|
for addr in to_add:
|
||||||
|
@ -103,7 +95,6 @@ def import_mailman_listinfo(verbosity=0):
|
||||||
sys.stderr.write(" ** Error handling %s in %s: %s\n" % (addr, name, e))
|
sys.stderr.write(" ** Error handling %s in %s: %s\n" % (addr, name, e))
|
||||||
continue
|
continue
|
||||||
new.lists.add(mmlist)
|
new.lists.add(mmlist)
|
||||||
log_time(" Added new addresses")
|
|
||||||
if to_add:
|
if to_add:
|
||||||
log(" Added %s addresses to %s" % (len(to_add), name))
|
log(" Added %s addresses to %s" % (len(to_add), name))
|
||||||
log("Completed import of list info from Mailman")
|
log("Completed import of list info from Mailman")
|
||||||
|
|
|
@ -47,7 +47,6 @@ from ietf.utils.validators import (
|
||||||
validate_file_extension,
|
validate_file_extension,
|
||||||
)
|
)
|
||||||
from ietf.utils.fields import MissingOkImageField
|
from ietf.utils.fields import MissingOkImageField
|
||||||
from ietf.utils.log import unreachable
|
|
||||||
|
|
||||||
countries = list(pytz.country_names.items())
|
countries = list(pytz.country_names.items())
|
||||||
countries.sort(key=lambda x: x[1])
|
countries.sort(key=lambda x: x[1])
|
||||||
|
@ -274,16 +273,13 @@ class Meeting(models.Model):
|
||||||
else:
|
else:
|
||||||
version = len(settings.PROCEEDINGS_VERSION_CHANGES) # start assuming latest version
|
version = len(settings.PROCEEDINGS_VERSION_CHANGES) # start assuming latest version
|
||||||
mtg_number = self.get_number()
|
mtg_number = self.get_number()
|
||||||
if mtg_number is None:
|
# Find the index of the first entry in the version change array that
|
||||||
unreachable('2021-08-10')
|
# is >= this meeting's number. The first entry in the array is 0, so the
|
||||||
else:
|
# version is always >= 1 for positive meeting numbers.
|
||||||
# Find the index of the first entry in the version change array that
|
for vers, threshold in enumerate(settings.PROCEEDINGS_VERSION_CHANGES):
|
||||||
# is >= this meeting's number. The first entry in the array is 0, so the
|
if mtg_number < threshold:
|
||||||
# version is always >= 1 for positive meeting numbers.
|
version = vers
|
||||||
for vers, threshold in enumerate(settings.PROCEEDINGS_VERSION_CHANGES):
|
break
|
||||||
if mtg_number < threshold:
|
|
||||||
version = vers
|
|
||||||
break
|
|
||||||
self._proceedings_format_version = version # save this for later
|
self._proceedings_format_version = version # save this for later
|
||||||
return self._proceedings_format_version
|
return self._proceedings_format_version
|
||||||
|
|
||||||
|
|
|
@ -172,25 +172,6 @@ def send_notifications(meeting, groups, person):
|
||||||
by=person,desc='sent scheduled notification for %s' % meeting)
|
by=person,desc='sent scheduled notification for %s' % meeting)
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------
|
|
||||||
# AJAX Functions
|
|
||||||
# -------------------------------------------------
|
|
||||||
# def ajax_get_times(request, meeting_id, day):
|
|
||||||
# '''
|
|
||||||
# Ajax function to get timeslot times for a given day.
|
|
||||||
# returns JSON format response: [{id:start_time, value:start_time-end_time},...]
|
|
||||||
# '''
|
|
||||||
# # TODO strip duplicates if there are any
|
|
||||||
# from ietf.utils import log
|
|
||||||
# log.unreachable("2017-07-08")
|
|
||||||
# results=[]
|
|
||||||
# room = Room.objects.filter(meeting__number=meeting_id)[0]
|
|
||||||
# slots = TimeSlot.objects.filter(meeting__number=meeting_id,time__week_day=day,location=room).order_by('time')
|
|
||||||
# for slot in slots:
|
|
||||||
# d = {'id': slot.time.strftime('%H%M'), 'value': '%s-%s' % (slot.time.strftime('%H%M'), slot.end_time().strftime('%H%M'))}
|
|
||||||
# results.append(d)
|
|
||||||
#
|
|
||||||
# return HttpResponse(json.dumps(results), content_type='application/javascript')
|
|
||||||
|
|
||||||
# --------------------------------------------------
|
# --------------------------------------------------
|
||||||
# STANDARD VIEW FUNCTIONS
|
# STANDARD VIEW FUNCTIONS
|
||||||
|
@ -922,4 +903,4 @@ def view(request, meeting_id):
|
||||||
|
|
||||||
return render(request, 'meetings/view.html', {
|
return render(request, 'meetings/view.html', {
|
||||||
'meeting': meeting},
|
'meeting': meeting},
|
||||||
)
|
)
|
||||||
|
|
|
@ -38,7 +38,6 @@ from ietf.meeting.models import Meeting
|
||||||
from ietf.stats.models import MeetingRegistration, CountryAlias
|
from ietf.stats.models import MeetingRegistration, CountryAlias
|
||||||
from ietf.stats.utils import get_aliased_affiliations, get_aliased_countries, compute_hirsch_index
|
from ietf.stats.utils import get_aliased_affiliations, get_aliased_countries, compute_hirsch_index
|
||||||
from ietf.ietfauth.utils import has_role
|
from ietf.ietfauth.utils import has_role
|
||||||
from ietf.utils.log import log
|
|
||||||
from ietf.utils.response import permission_denied
|
from ietf.utils.response import permission_denied
|
||||||
|
|
||||||
def stats_index(request):
|
def stats_index(request):
|
||||||
|
@ -744,7 +743,8 @@ def document_stats(request, stats_type=None):
|
||||||
"eu_countries": sorted(eu_countries or [], key=lambda c: c.name),
|
"eu_countries": sorted(eu_countries or [], key=lambda c: c.name),
|
||||||
"content_template": "stats/document_stats_{}.html".format(template_name),
|
"content_template": "stats/document_stats_{}.html".format(template_name),
|
||||||
}
|
}
|
||||||
log("Cache miss for '%s'. Data size: %sk" % (cache_key, len(str(data))/1000))
|
# Logs are full of these, but nobody is using them
|
||||||
|
# log("Cache miss for '%s'. Data size: %sk" % (cache_key, len(str(data))/1000))
|
||||||
cache.set(cache_key, data, 24*60*60)
|
cache.set(cache_key, data, 24*60*60)
|
||||||
return render(request, "stats/document_stats.html", data)
|
return render(request, "stats/document_stats.html", data)
|
||||||
|
|
||||||
|
@ -996,7 +996,8 @@ def meeting_stats(request, num=None, stats_type=None):
|
||||||
"eu_countries": sorted(eu_countries or [], key=lambda c: c.name),
|
"eu_countries": sorted(eu_countries or [], key=lambda c: c.name),
|
||||||
"content_template": "stats/meeting_stats_{}.html".format(template_name),
|
"content_template": "stats/meeting_stats_{}.html".format(template_name),
|
||||||
}
|
}
|
||||||
log("Cache miss for '%s'. Data size: %sk" % (cache_key, len(str(data))/1000))
|
# Logs are full of these, but nobody is using them...
|
||||||
|
# log("Cache miss for '%s'. Data size: %sk" % (cache_key, len(str(data))/1000))
|
||||||
cache.set(cache_key, data, 24*60*60)
|
cache.set(cache_key, data, 24*60*60)
|
||||||
#
|
#
|
||||||
return render(request, "stats/meeting_stats.html", data)
|
return render(request, "stats/meeting_stats.html", data)
|
||||||
|
|
|
@ -288,6 +288,7 @@ def find_submission_filenames(draft):
|
||||||
|
|
||||||
@transaction.atomic
|
@transaction.atomic
|
||||||
def post_submission(request, submission, approved_doc_desc, approved_subm_desc):
|
def post_submission(request, submission, approved_doc_desc, approved_subm_desc):
|
||||||
|
# This is very chatty into the logs, but these could still be useful for quick diagnostics
|
||||||
log.log(f"{submission.name}: start")
|
log.log(f"{submission.name}: start")
|
||||||
system = Person.objects.get(name="(System)")
|
system = Person.objects.get(name="(System)")
|
||||||
submitter_parsed = submission.submitter_parsed()
|
submitter_parsed = submission.submitter_parsed()
|
||||||
|
@ -589,15 +590,10 @@ def ensure_person_email_info_exists(name, email, docname):
|
||||||
else:
|
else:
|
||||||
person.name_from_draft = name
|
person.name_from_draft = name
|
||||||
|
|
||||||
# make sure we have an email address
|
|
||||||
if addr and (addr.startswith('unknown-email-') or is_valid_email(addr)):
|
active = True
|
||||||
active = True
|
addr = addr.lower()
|
||||||
addr = addr.lower()
|
|
||||||
else:
|
|
||||||
log.unreachable('2019-10-02')
|
|
||||||
# we're in trouble, use a fake one
|
|
||||||
active = False
|
|
||||||
addr = "unknown-email-%s" % person.plain_ascii().replace(" ", "-")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
email = person.email_set.get(address=addr)
|
email = person.email_set.get(address=addr)
|
||||||
|
|
Loading…
Reference in a new issue