* feat: basic blobstore infrastructure for dev * refactor: (broken) attempt to put minio console behind nginx * feat: initialize blobstore with boto3 * fix: abandon attempt to proxy minio. Use docker compose instead. * feat: beginning of blob writes * feat: storage utilities * feat: test buckets * chore: black * chore: remove unused import * chore: avoid f string when not needed * fix: inform all settings files about blobstores * fix: declare types for some settings * ci: point to new target base * ci: adjust test workflow * fix: give the tests debug environment a blobstore * fix: "better" name declarations * ci: use devblobstore container * chore: identify places to write to blobstorage * chore: remove unreachable code * feat: store materials * feat: store statements * feat: store status changes * feat: store liaison attachments * feat: store agendas provided with Interim session requests * chore: capture TODOs * feat: store polls and chatlogs * chore: remove unneeded TODO * feat: store drafts on submit and post * fix: handle storage during doc expiration and resurrection * fix: mirror an unlink * chore: add/refine TODOs * feat: store slide submissions * fix: structure slide test correctly * fix: correct sense of existence check * feat: store some indexes * feat: BlobShadowFileSystemStorage * feat: shadow floorplans / host logos to the blob * chore: remove unused import * feat: strip path from blob shadow names * feat: shadow photos / thumbs * refactor: combine photo and photothumb blob kinds The photos / thumbs were already dropped in the same directory, so let's not add a distinction at this point. * style: whitespace * refactor: use kwargs consistently * chore: migrations * refactor: better deconstruct(); rebuild migrations * fix: use new class in mack patch * chore: add TODO * feat: store group index documents * chore: identify more TODO * feat: store reviews * fix: repair merge * chore: remove unnecessary TODO * feat: StoredObject metadata * fix: deburr some debugging code * fix: only set the deleted timestamp once * chore: correct typo * fix: get_or_create vs get and test * fix: avoid the questionable is_seekable helper * chore: capture future design consideration * chore: blob store cfg for k8s * chore: black * chore: copyright * ci: bucket name prefix option + run Black Adds/uses DATATRACKER_BLOB_STORE_BUCKET_PREFIX option. Other changes are just Black styling. * ci: fix typo in bucket name expression * chore: parameters in app-configure-blobstore Allows use with other blob stores. * ci: remove verify=False option * fix: don't return value from __init__ * feat: option to log timing of S3Storage calls * chore: units * fix: deleted->null when storing a file * style: Black * feat: log as JSON; refactor to share code; handle exceptions * ci: add ietf_log_blob_timing option for k8s * test: --no-manage-blobstore option for running tests * test: use blob store settings from env, if set * test: actually set a couple more storage opts * feat: offswitch (#8541) * feat: offswitch * fix: apply ENABLE_BLOBSTORAGE to BlobShadowFileSystemStorage behavior * chore: log timing of blob reads * chore: import Config from botocore.config * chore(deps): import boto3-stubs / botocore botocore is implicitly imported, but make it explicit since we refer to it directly * chore: drop type annotation that mypy loudly ignores * refactor: add storage methods via mixin Shares code between Document and DocHistory without putting it in the base DocumentInfo class, which lacks the name field. Also makes mypy happy. * feat: add timeout / retry limit to boto client * ci: let k8s config the timeouts via env * chore: repair merge resolution typo * chore: tweak settings imports * chore: simplify k8s/settings_local.py imports --------- Co-authored-by: Jennifer Richards <jennifer@staff.ietf.org>
254 lines
10 KiB
Python
254 lines
10 KiB
Python
# Copyright The IETF Trust 2014-2020, All Rights Reserved
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
# views for managing group materials (slides, ...)
|
|
import os
|
|
from pathlib import Path
|
|
import re
|
|
|
|
from django import forms
|
|
from django.conf import settings
|
|
from django.contrib.auth.decorators import login_required
|
|
from django.http import Http404
|
|
from django.shortcuts import render, get_object_or_404, redirect
|
|
from django.utils.html import mark_safe # type:ignore
|
|
from django.urls import reverse as urlreverse
|
|
|
|
import debug # pyflakes:ignore
|
|
|
|
from ietf.doc.models import Document, DocTypeName, DocEvent, State
|
|
from ietf.doc.models import NewRevisionDocEvent
|
|
from ietf.doc.utils import add_state_change_event, check_common_doc_name_rules
|
|
from ietf.group.models import Group
|
|
from ietf.group.utils import can_manage_materials
|
|
from ietf.utils import log
|
|
from ietf.utils.decorators import ignore_view_kwargs
|
|
from ietf.utils.meetecho import MeetechoAPIError, SlidesManager
|
|
from ietf.utils.response import permission_denied
|
|
|
|
@login_required
|
|
@ignore_view_kwargs("group_type")
|
|
def choose_material_type(request, acronym):
|
|
group = get_object_or_404(Group, acronym=acronym)
|
|
if not group.features.has_nonsession_materials:
|
|
raise Http404
|
|
|
|
return render(request, 'doc/material/choose_material_type.html', {
|
|
'group': group,
|
|
'material_types': DocTypeName.objects.filter(slug__in=group.features.material_types),
|
|
})
|
|
|
|
class UploadMaterialForm(forms.Form):
|
|
title = forms.CharField(max_length=Document._meta.get_field("title").max_length)
|
|
name = forms.CharField(max_length=Document._meta.get_field("name").max_length)
|
|
abstract = forms.CharField(max_length=Document._meta.get_field("abstract").max_length,widget=forms.Textarea, strip=False)
|
|
state = forms.ModelChoiceField(State.objects.all(), empty_label=None)
|
|
material = forms.FileField(label='File')
|
|
|
|
def __init__(self, doc_type, action, group, doc, *args, **kwargs):
|
|
super(UploadMaterialForm, self).__init__(*args, **kwargs)
|
|
|
|
self.fields["state"].queryset = self.fields["state"].queryset.filter(type__slug=doc_type.slug)
|
|
|
|
self.doc_type = doc_type
|
|
self.action = action
|
|
self.group = group
|
|
|
|
if action == "new":
|
|
self.fields["state"].widget = forms.HiddenInput()
|
|
self.fields["state"].queryset = self.fields["state"].queryset.filter(slug="active")
|
|
self.fields["state"].initial = self.fields["state"].queryset[0].pk
|
|
self.fields["name"].initial = self._default_name()
|
|
else:
|
|
del self.fields["name"]
|
|
|
|
self.fields["title"].initial = doc.title
|
|
self.fields["abstract"].initial = doc.abstract
|
|
self.fields["state"].initial = doc.get_state().pk if doc.get_state() else None
|
|
if doc.get_state_slug() == "deleted":
|
|
self.fields["state"].help_text = "Note: If you wish to revise this document, you may wish to change the state so it's not deleted."
|
|
|
|
if action in ["title","state","abstract"]:
|
|
for fieldname in ["title","state","material","abstract"]:
|
|
if fieldname != action:
|
|
del self.fields[fieldname]
|
|
|
|
if doc_type.slug == 'procmaterials' and 'abstract' in self.fields:
|
|
del self.fields['abstract']
|
|
|
|
def _default_name(self):
|
|
return "%s-%s-" % (self.doc_type.slug, self.group.acronym)
|
|
|
|
def clean_name(self):
|
|
name = self.cleaned_data["name"].strip().rstrip("-")
|
|
|
|
check_common_doc_name_rules(name)
|
|
|
|
if not re.search("^%s-%s-[a-z0-9]+" % (self.doc_type.slug, self.group.acronym), name):
|
|
raise forms.ValidationError("The name must start with %s-%s- followed by descriptive dash-separated words." % (self.doc_type.slug, self.group.acronym))
|
|
|
|
existing = Document.objects.filter(type=self.doc_type, name=name)
|
|
if existing:
|
|
url = urlreverse('ietf.doc.views_material.edit_material', kwargs={ 'name': existing[0].name, 'action': 'revise' })
|
|
raise forms.ValidationError(mark_safe("Can't upload: %s with name %s already exists. Choose another title and name for what you're uploading or <a href=\"%s\">revise the existing %s</a>." % (self.doc_type.name, name, url, name)))
|
|
|
|
return name
|
|
|
|
@login_required
|
|
@ignore_view_kwargs("group_type")
|
|
def edit_material(request, name=None, acronym=None, action=None, doc_type=None):
|
|
# the materials process is not very developed, so at the moment we
|
|
# handle everything through the same view/form
|
|
|
|
if action == "new":
|
|
group = get_object_or_404(Group, acronym=acronym)
|
|
if not group.features.has_nonsession_materials:
|
|
raise Http404
|
|
|
|
doc = None
|
|
document_type = get_object_or_404(DocTypeName, slug=doc_type)
|
|
else:
|
|
doc = get_object_or_404(Document, name=name)
|
|
group = doc.group
|
|
document_type = doc.type
|
|
|
|
valid_doctypes = ['procmaterials']
|
|
if group is not None:
|
|
valid_doctypes.extend(['minutes','agenda','bluesheets'])
|
|
if group.acronym=="iesg":
|
|
valid_doctypes.append("narrativeminutes")
|
|
valid_doctypes.extend(group.features.material_types)
|
|
|
|
if document_type.slug not in valid_doctypes:
|
|
raise Http404
|
|
|
|
if not can_manage_materials(request.user, group):
|
|
permission_denied(request, "You don't have permission to access this view")
|
|
|
|
sessions_with_slide_title_updates = set()
|
|
|
|
if request.method == 'POST':
|
|
form = UploadMaterialForm(document_type, action, group, doc, request.POST, request.FILES)
|
|
|
|
if form.is_valid():
|
|
events = []
|
|
|
|
if action == "new":
|
|
doc = Document.objects.create(
|
|
type=document_type,
|
|
group=group,
|
|
rev="00",
|
|
name=form.cleaned_data["name"])
|
|
|
|
prev_rev = None
|
|
else:
|
|
prev_rev = doc.rev
|
|
|
|
prev_title = doc.title
|
|
prev_state = doc.get_state()
|
|
prev_abstract = doc.abstract
|
|
|
|
if "title" in form.cleaned_data:
|
|
doc.title = form.cleaned_data["title"]
|
|
|
|
if "abstract" in form.cleaned_data:
|
|
doc.abstract = form.cleaned_data["abstract"]
|
|
|
|
if "material" in form.fields:
|
|
if action != "new":
|
|
doc.rev = "%02d" % (int(doc.rev) + 1)
|
|
|
|
f = form.cleaned_data["material"]
|
|
file_ext = os.path.splitext(f.name)[1]
|
|
|
|
basename = f"{doc.name}-{doc.rev}{file_ext}" # Note the lack of a . before file_ext - see os.path.splitext
|
|
filepath = Path(doc.get_file_path()) / basename
|
|
with filepath.open('wb+') as dest:
|
|
for chunk in f.chunks():
|
|
dest.write(chunk)
|
|
f.seek(0)
|
|
doc.store_file(basename, f)
|
|
if not doc.meeting_related():
|
|
log.assertion('doc.type_id == "slides"')
|
|
ftp_filepath = Path(settings.FTP_DIR) / doc.type_id / basename
|
|
try:
|
|
os.link(filepath, ftp_filepath) # Path.hardlink is not available until 3.10
|
|
except IOError as ex:
|
|
log.log(
|
|
"There was an error creating a hardlink at %s pointing to %s: %s"
|
|
% (ftp_filepath, filepath, ex)
|
|
)
|
|
|
|
if prev_rev != doc.rev:
|
|
e = NewRevisionDocEvent(type="new_revision", doc=doc, rev=doc.rev)
|
|
e.by = request.user.person
|
|
e.desc = "New version available: <b>%s-%s</b>" % (doc.name, doc.rev)
|
|
e.save()
|
|
events.append(e)
|
|
|
|
if prev_title != doc.title:
|
|
e = DocEvent(doc=doc, rev=doc.rev, by=request.user.person, type='changed_document')
|
|
e.desc = "Changed title to <b>%s</b>" % doc.title
|
|
if prev_title:
|
|
e.desc += " from %s" % prev_title
|
|
e.save()
|
|
events.append(e)
|
|
if doc.type_id == "slides":
|
|
for sp in doc.presentations.all():
|
|
sessions_with_slide_title_updates.add(sp.session)
|
|
|
|
if prev_abstract != doc.abstract:
|
|
e = DocEvent(doc=doc, rev=doc.rev, by=request.user.person, type='changed_document')
|
|
e.desc = "Changed abstract to <b>%s</b>" % doc.abstract
|
|
if prev_abstract:
|
|
e.desc += " from %s" % prev_abstract
|
|
e.save()
|
|
events.append(e)
|
|
|
|
if "state" in form.cleaned_data and form.cleaned_data["state"] != prev_state:
|
|
doc.set_state(form.cleaned_data["state"])
|
|
e = add_state_change_event(doc, request.user.person, prev_state, form.cleaned_data["state"])
|
|
events.append(e)
|
|
|
|
if events:
|
|
doc.save_with_history(events)
|
|
|
|
# Call Meetecho API if any session slides titles changed
|
|
if sessions_with_slide_title_updates and hasattr(settings, "MEETECHO_API_CONFIG"):
|
|
sm = SlidesManager(api_config=settings.MEETECHO_API_CONFIG)
|
|
for session in sessions_with_slide_title_updates:
|
|
try:
|
|
# SessionPresentations are unique over (session, document) so there will be no duplicates
|
|
sm.send_update(session)
|
|
except MeetechoAPIError as err:
|
|
log.log(f"Error in SlidesManager.send_update(): {err}")
|
|
|
|
return redirect("ietf.doc.views_doc.document_main", name=doc.name)
|
|
else:
|
|
form = UploadMaterialForm(document_type, action, group, doc)
|
|
|
|
# decide where to go if upload is canceled
|
|
if doc:
|
|
back_href = urlreverse('ietf.doc.views_doc.document_main', kwargs={'name': doc.name})
|
|
else:
|
|
back_href = urlreverse('ietf.group.views.materials', kwargs={'acronym': group.acronym})
|
|
|
|
if document_type.slug == 'procmaterials':
|
|
name_prefix = 'proceedings-'
|
|
else:
|
|
name_prefix = f'{document_type.slug}-{group.acronym}-'
|
|
|
|
return render(request, 'doc/material/edit_material.html', {
|
|
'group': group,
|
|
'form': form,
|
|
'action': action,
|
|
'material_type': document_type,
|
|
'name_prefix': name_prefix,
|
|
'doc': doc,
|
|
'doc_name': doc.name if doc else "",
|
|
'back_href': back_href,
|
|
})
|
|
|
|
|