feat: obviate ghostlinkd (#7336)

* wip: identify whats needed to obviate ghostlinkd

* fix: hardlink new charter files to ftp directory

* fix: hardlink new charter files to ftp directory (continued)

* chore: bring settings comment up to date

* chore: add archive and ftp dirs to setup of various environments

* fix: test charter submits write to ftp dir

* chore: remove debug

* fix: test charter approval writes to ftp dir

* fix: link review revisions into ftp dir

* fix: link to all archive and ftp on submission post

* chore: clean comments, move action to github issue

* fix: link idindex files to all archive and ftp

* chore: deflake

* chore: remove TODO comment

* fix: use settings

* chore: rename new setting
This commit is contained in:
Robert Sparks 2024-04-19 16:18:52 -05:00 committed by GitHub
parent 370c3b24ed
commit cedd58f950
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 172 additions and 54 deletions

View file

@ -60,10 +60,11 @@ CHARTER_PATH = '/assets/ietf-ftp/charter/'
BOFREQ_PATH = '/assets/ietf-ftp/bofreq/'
CONFLICT_REVIEW_PATH = '/assets/ietf-ftp/conflict-reviews/'
STATUS_CHANGE_PATH = '/assets/ietf-ftp/status-changes/'
INTERNET_DRAFT_ARCHIVE_DIR = '/assets/archive/id'
INTERNET_DRAFT_ARCHIVE_DIR = '/assets/collection/draft-archive'
INTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/assets/archive/id'
BIBXML_BASE_PATH = '/assets/ietfdata/derived/bibxml'
IDSUBMIT_REPOSITORY_PATH = INTERNET_DRAFT_PATH
FTP_DIR = '/assets/ftp'
NOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/'
SLIDE_STAGING_PATH = '/test/staging/'

View file

@ -57,9 +57,10 @@ CHARTER_PATH = '/assets/ietf-ftp/charter/'
BOFREQ_PATH = '/assets/ietf-ftp/bofreq/'
CONFLICT_REVIEW_PATH = '/assets/ietf-ftp/conflict-reviews/'
STATUS_CHANGE_PATH = '/assets/ietf-ftp/status-changes/'
INTERNET_DRAFT_ARCHIVE_DIR = '/assets/ietf-ftp/internet-drafts/'
INTERNET_DRAFT_ARCHIVE_DIR = '/assets/collection/draft-archive'
INTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/assets/ietf-ftp/internet-drafts/'
BIBXML_BASE_PATH = '/assets/ietfdata/derived/bibxml'
FTP_DIR = '/assets/ftp'
NOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/'
SLIDE_STAGING_PATH = 'test/staging/'

View file

@ -56,9 +56,10 @@ CHARTER_PATH = '/assets/ietf-ftp/charter/'
BOFREQ_PATH = '/assets/ietf-ftp/bofreq/'
CONFLICT_REVIEW_PATH = '/assets/ietf-ftp/conflict-reviews/'
STATUS_CHANGE_PATH = '/assets/ietf-ftp/status-changes/'
INTERNET_DRAFT_ARCHIVE_DIR = '/assets/ietf-ftp/internet-drafts/'
INTERNET_DRAFT_ARCHIVE_DIR = '/assets/collection/draft-archive'
INTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/assets/ietf-ftp/internet-drafts/'
BIBXML_BASE_PATH = '/assets/ietfdata/derived/bibxml'
FTP_DIR = '/assets/ftp'
NOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/'
SLIDE_STAGING_PATH = 'test/staging/'

View file

@ -46,10 +46,11 @@ CHARTER_PATH = '/assets/ietf-ftp/charter/'
BOFREQ_PATH = '/assets/ietf-ftp/bofreq/'
CONFLICT_REVIEW_PATH = '/assets/ietf-ftp/conflict-reviews/'
STATUS_CHANGE_PATH = '/assets/ietf-ftp/status-changes/'
INTERNET_DRAFT_ARCHIVE_DIR = '/assets/archive/id'
INTERNET_DRAFT_ARCHIVE_DIR = '/assets/collection/draft-archive'
INTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/assets/archive/id'
BIBXML_BASE_PATH = '/assets/ietfdata/derived/bibxml'
IDSUBMIT_REPOSITORY_PATH = INTERNET_DRAFT_PATH
FTP_DIR = '/assets/ftp'
NOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/'
SLIDE_STAGING_PATH = 'test/staging/'

View file

@ -9,6 +9,8 @@ for sub in \
test/wiki/ietf \
data/nomcom_keys/public_keys \
/assets/archive/id \
/assets/collection \
/assets/collection/draft-archive \
/assets/ietf-ftp \
/assets/ietf-ftp/bofreq \
/assets/ietf-ftp/charter \
@ -33,6 +35,10 @@ for sub in \
/assets/www6/iesg \
/assets/www6/iesg/evaluation \
/assets/media/photo \
/assets/ftp \
/assets/ftp/charter \
/assets/ftp/internet-drafts \
/assets/ftp/review \
; do
if [ ! -d "$sub" ]; then
echo "Creating dir $sub"

View file

@ -139,6 +139,9 @@ def move_draft_files_to_archive(doc, rev):
if os.path.exists(src):
try:
# ghostlinkd would keep this in the combined all archive since it would
# be sourced from a different place. But when ghostlinkd is removed, nothing
# new is needed here - the file will already exist in the combined archive
shutil.move(src, dst)
except IOError as e:
if "No such file or directory" in str(e):
@ -213,6 +216,10 @@ def clean_up_draft_files():
filename, revision = match.groups()
def move_file_to(subdir):
# Similar to move_draft_files_to_archive
# ghostlinkd would keep this in the combined all archive since it would
# be sourced from a different place. But when ghostlinkd is removed, nothing
# new is needed here - the file will already exist in the combined archive
shutil.move(path,
os.path.join(settings.INTERNET_DRAFT_ARCHIVE_DIR, subdir, basename))
@ -229,4 +236,5 @@ def clean_up_draft_files():
move_file_to("")
except Document.DoesNotExist:
# All uses of this past 2014 seem related to major system failures.
move_file_to("unknown_ids")

View file

@ -142,6 +142,7 @@ class DocumentInfo(models.Model):
if self.is_dochistory():
self._cached_file_path = settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR
else:
# This could be simplified since anything in INTERNET_DRAFT_PATH is also already in INTERNET_ALL_DRAFTS_ARCHIVE_DIR
draft_state = self.get_state('draft')
if draft_state and draft_state.slug == 'active':
self._cached_file_path = settings.INTERNET_DRAFT_PATH

View file

@ -87,6 +87,10 @@ class ViewCharterTests(TestCase):
class EditCharterTests(TestCase):
settings_temp_path_overrides = TestCase.settings_temp_path_overrides + ['CHARTER_PATH']
def setUp(self):
super().setUp()
(Path(settings.FTP_DIR)/"charter").mkdir()
def write_charter_file(self, charter):
(Path(settings.CHARTER_PATH) / f"{charter.name}-{charter.rev}.txt").write_text("This is a charter.")
@ -506,13 +510,16 @@ class EditCharterTests(TestCase):
self.assertEqual(charter.rev, next_revision(prev_rev))
self.assertTrue("new_revision" in charter.latest_event().type)
file_contents = (
Path(settings.CHARTER_PATH) / (charter.name + "-" + charter.rev + ".txt")
).read_text("utf-8")
charter_path = Path(settings.CHARTER_PATH) / (charter.name + "-" + charter.rev + ".txt")
file_contents = (charter_path).read_text("utf-8")
self.assertEqual(
file_contents,
"Windows line\nMac line\nUnix line\n" + utf_8_snippet.decode("utf-8"),
)
ftp_charter_path = Path(settings.FTP_DIR) / "charter" / charter_path.name
self.assertTrue(ftp_charter_path.exists())
self.assertTrue(charter_path.samefile(ftp_charter_path))
def test_submit_initial_charter(self):
group = GroupFactory(type_id='wg',acronym='mars',list_email='mars-wg@ietf.org')
@ -808,9 +815,11 @@ class EditCharterTests(TestCase):
self.assertTrue(not charter.ballot_open("approve"))
self.assertEqual(charter.rev, "01")
self.assertTrue(
(Path(settings.CHARTER_PATH) / ("charter-ietf-%s-%s.txt" % (group.acronym, charter.rev))).exists()
)
charter_path = Path(settings.CHARTER_PATH) / ("charter-ietf-%s-%s.txt" % (group.acronym, charter.rev))
charter_ftp_path = Path(settings.FTP_DIR) / "charter" / charter_path.name
self.assertTrue(charter_path.exists())
self.assertTrue(charter_ftp_path.exists())
self.assertTrue(charter_path.samefile(charter_ftp_path))
self.assertEqual(len(outbox), 2)
#

View file

@ -2,6 +2,7 @@
# -*- coding: utf-8 -*-
from pathlib import Path
import datetime, os, shutil
import io
import tarfile, tempfile, mailbox
@ -47,6 +48,7 @@ class ReviewTests(TestCase):
self.review_dir = self.tempdir('review')
self.old_document_path_pattern = settings.DOCUMENT_PATH_PATTERN
settings.DOCUMENT_PATH_PATTERN = self.review_dir + "/{doc.type_id}/"
(Path(settings.FTP_DIR) / "review").mkdir()
self.review_subdir = os.path.join(self.review_dir, "review")
if not os.path.exists(self.review_subdir):
@ -57,6 +59,13 @@ class ReviewTests(TestCase):
settings.DOCUMENT_PATH_PATTERN = self.old_document_path_pattern
super().tearDown()
def verify_review_files_were_written(self, assignment, expected_content = "This is a review\nwith two lines"):
review_file = Path(self.review_subdir) / f"{assignment.review.name}.txt"
content = review_file.read_text()
self.assertEqual(content, expected_content)
review_ftp_file = Path(settings.FTP_DIR) / "review" / review_file.name
self.assertTrue(review_file.samefile(review_ftp_file))
def test_request_review(self):
doc = WgDraftFactory(group__acronym='mars',rev='01')
NewRevisionDocEventFactory(doc=doc,rev='01')
@ -830,8 +839,7 @@ class ReviewTests(TestCase):
self.assertTrue(assignment.review_request.team.acronym.lower() in assignment.review.name)
self.assertTrue(assignment.review_request.doc.rev in assignment.review.name)
with io.open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f:
self.assertEqual(f.read(), "This is a review\nwith two lines")
self.verify_review_files_were_written(assignment)
self.assertEqual(len(outbox), 1)
self.assertIn(assignment.review_request.team.list_email, outbox[0]["To"])
@ -885,8 +893,7 @@ class ReviewTests(TestCase):
completed_time_diff = timezone.now() - assignment.completed_on
self.assertLess(completed_time_diff, datetime.timedelta(seconds=10))
with io.open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f:
self.assertEqual(f.read(), "This is a review\nwith two lines")
self.verify_review_files_were_written(assignment)
self.assertEqual(len(outbox), 1)
self.assertIn(assignment.review_request.team.list_email, outbox[0]["To"])
@ -926,8 +933,7 @@ class ReviewTests(TestCase):
self.assertLess(event0_time_diff, datetime.timedelta(seconds=10))
self.assertEqual(events[1].time, datetime.datetime(2012, 12, 24, 12, 13, 14, tzinfo=DEADLINE_TZINFO))
with io.open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f:
self.assertEqual(f.read(), "This is a review\nwith two lines")
self.verify_review_files_were_written(assignment)
self.assertEqual(len(outbox), 1)
self.assertIn(assignment.review_request.team.list_email, outbox[0]["To"])
@ -1013,8 +1019,7 @@ class ReviewTests(TestCase):
assignment = reload_db_objects(assignment)
self.assertEqual(assignment.state_id, "completed")
with io.open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f:
self.assertEqual(f.read(), "This is a review\nwith two lines")
self.verify_review_files_were_written(assignment)
self.assertEqual(len(outbox), 0)
self.assertTrue("http://example.com" in assignment.review.external_url)
@ -1063,8 +1068,7 @@ class ReviewTests(TestCase):
self.assertEqual(assignment.reviewer, rev_role.person.role_email('reviewer'))
self.assertEqual(assignment.state_id, "completed")
with io.open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f:
self.assertEqual(f.read(), "This is a review\nwith two lines")
self.verify_review_files_were_written(assignment)
self.assertEqual(len(outbox), 0)
self.assertTrue("http://example.com" in assignment.review.external_url)
@ -1172,8 +1176,9 @@ class ReviewTests(TestCase):
self.assertLess(event_time_diff, datetime.timedelta(seconds=10))
self.assertTrue('revised' in event1.desc.lower())
with io.open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f:
self.assertEqual(f.read(), "This is a review\nwith two lines")
# See https://github.com/ietf-tools/datatracker/issues/6941
# These are _not_ getting written as a new version as intended.
self.verify_review_files_were_written(assignment)
self.assertEqual(len(outbox), 0)
@ -1200,6 +1205,8 @@ class ReviewTests(TestCase):
# Ensure that a new event was created for the new revision (#2590)
self.assertNotEqual(event1.id, event2.id)
self.verify_review_files_were_written(assignment, "This is a revised review")
self.assertEqual(len(outbox), 0)
def test_edit_comment(self):

View file

@ -92,11 +92,31 @@ def change_group_state_after_charter_approval(group, by):
def fix_charter_revision_after_approval(charter, by):
# according to spec, 00-02 becomes 01, so copy file and record new revision
try:
old = os.path.join(charter.get_file_path(), '%s-%s.txt' % (charter.name, charter.rev))
new = os.path.join(charter.get_file_path(), '%s-%s.txt' % (charter.name, next_approved_revision(charter.rev)))
old = os.path.join(
charter.get_file_path(), "%s-%s.txt" % (charter.name, charter.rev)
)
new = os.path.join(
charter.get_file_path(),
"%s-%s.txt" % (charter.name, next_approved_revision(charter.rev)),
)
shutil.copy(old, new)
except IOError:
log("There was an error copying %s to %s" % (old, new))
# Also provide a copy to the legacy ftp source directory, which is served by rsync
# This replaces the hardlink copy that ghostlink has made in the past
# Still using a hardlink as long as these are on the same filesystem.
# Staying with os.path vs pathlib.Path until we get to python>=3.10.
charter_dir = os.path.join(settings.FTP_DIR, "charter")
ftp_filepath = os.path.join(
charter_dir, "%s-%s.txt" % (charter.name, next_approved_revision(charter.rev))
)
try:
os.link(new, ftp_filepath)
except IOError:
log(
"There was an error creating a harlink at %s pointing to %s"
% (ftp_filepath, new)
)
events = []
e = NewRevisionDocEvent(doc=charter, by=by, type="new_revision")
@ -108,6 +128,7 @@ def fix_charter_revision_after_approval(charter, by):
charter.rev = e.rev
charter.save_with_history(events)
def historic_milestones_for_charter(charter, rev):
"""Return GroupMilestone/GroupMilestoneHistory objects for charter
document at rev by looking through the history."""

View file

@ -4,6 +4,7 @@
import datetime
import json
import os
import textwrap
from pathlib import Path
@ -42,7 +43,7 @@ from ietf.ietfauth.utils import has_role, role_required
from ietf.name.models import GroupStateName
from ietf.person.models import Person
from ietf.utils.history import find_history_active_at
from ietf.utils.log import assertion
from ietf.utils.log import assertion, log
from ietf.utils.mail import send_mail_preformatted
from ietf.utils.textupload import get_cleaned_text_file_content
from ietf.utils.response import permission_denied
@ -443,6 +444,18 @@ def submit(request, name, option=None):
destination.write(form.cleaned_data["txt"])
else:
destination.write(form.cleaned_data["content"])
# Also provide a copy to the legacy ftp source directory, which is served by rsync
# This replaces the hardlink copy that ghostlink has made in the past
# Still using a hardlink as long as these are on the same filesystem.
ftp_filename = Path(settings.FTP_DIR) / "charter" / charter_filename.name
try:
os.link(charter_filename, ftp_filename) # os.link until we are on python>=3.10
except IOError:
log(
"There was an error creating a hardlink at %s pointing to %s"
% (ftp_filename, charter_filename)
)
if option in ["initcharter", "recharter"] and charter.ad == None:
charter.ad = getattr(group.ad_role(), "person", None)

View file

@ -831,6 +831,9 @@ def restore_draft_file(request, draft):
log.log("Resurrecting %s. Moving files:" % draft.name)
for file in files:
try:
# ghostlinkd would keep this in the combined all archive since it would
# be sourced from a different place. But when ghostlinkd is removed, nothing
# new is needed here - the file will already exist in the combined archive
shutil.move(file, settings.INTERNET_DRAFT_PATH)
log.log(" Moved file %s to %s" % (file, settings.INTERNET_DRAFT_PATH))
except shutil.Error as ex:

View file

@ -2,11 +2,11 @@
# -*- coding: utf-8 -*-
import io
import itertools
import json
import os
import datetime
from pathlib import Path
import requests
import email.utils
@ -803,9 +803,13 @@ def complete_review(request, name, assignment_id=None, acronym=None):
else:
content = form.cleaned_data['review_content']
filename = os.path.join(review.get_file_path(), '{}.txt'.format(review.name))
with io.open(filename, 'w', encoding='utf-8') as destination:
destination.write(content)
review_path = Path(review.get_file_path()) / f"{review.name}.txt"
review_path.write_text(content)
review_ftp_path = Path(settings.FTP_DIR) / "review" / review_path.name
# See https://github.com/ietf-tools/datatracker/issues/6941 - when that's
# addressed, making this link should not be conditional
if not review_ftp_path.exists():
os.link(review_path, review_ftp_path) # switch this to Path.hardlink when python>=3.10 is available
completion_datetime = timezone.now()
if "completion_date" in form.cleaned_data:

View file

@ -2,6 +2,7 @@
#
# Celery task definitions
#
import os
import shutil
import debug # pyflakes:ignore
@ -10,6 +11,9 @@ from celery import shared_task
from contextlib import AbstractContextManager
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import List
from django.conf import settings
from .index import all_id_txt, all_id2_txt, id_index_txt
@ -26,10 +30,14 @@ class TempFileManager(AbstractContextManager):
tf.write(content)
return tf_path
def move_into_place(self, src_path: Path, dest_path: Path):
def move_into_place(self, src_path: Path, dest_path: Path, hardlink_dirs: List[Path] = []):
shutil.move(src_path, dest_path)
dest_path.chmod(0o644)
self.cleanup_list.remove(src_path)
for path in hardlink_dirs:
target = path / dest_path.name
target.unlink(missing_ok=True)
os.link(dest_path, target) # until python>=3.10
def cleanup(self):
for tf_path in self.cleanup_list:
@ -43,9 +51,11 @@ class TempFileManager(AbstractContextManager):
@shared_task
def idindex_update_task():
"""Update I-D indexes"""
id_path = Path("/a/ietfdata/doc/draft/repository")
derived_path = Path("/a/ietfdata/derived")
download_path = Path("/a/www/www6s/download")
id_path = Path(settings.INTERNET_DRAFT_PATH)
derived_path = Path(settings.DERIVED_DIR)
download_path = Path(settings.ALL_ID_DOWNLOAD_DIR)
ftp_path = Path(settings.FTP_DIR) / "internet-drafts"
all_archive_path = Path(settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR)
with TempFileManager("/a/tmp") as tmp_mgr:
# Generate copies of new contents
@ -69,17 +79,17 @@ def idindex_update_task():
derived_all_id2_tmpfile = tmp_mgr.make_temp_file(all_id2_content)
# Move temp files as-atomically-as-possible into place
tmp_mgr.move_into_place(all_id_tmpfile, id_path / "all_id.txt")
tmp_mgr.move_into_place(all_id_tmpfile, id_path / "all_id.txt", [ftp_path, all_archive_path])
tmp_mgr.move_into_place(derived_all_id_tmpfile, derived_path / "all_id.txt")
tmp_mgr.move_into_place(download_all_id_tmpfile, download_path / "id-all.txt")
tmp_mgr.move_into_place(id_index_tmpfile, id_path / "1id-index.txt")
tmp_mgr.move_into_place(id_index_tmpfile, id_path / "1id-index.txt", [ftp_path, all_archive_path])
tmp_mgr.move_into_place(derived_id_index_tmpfile, derived_path / "1id-index.txt")
tmp_mgr.move_into_place(download_id_index_tmpfile, download_path / "id-index.txt")
tmp_mgr.move_into_place(id_abstracts_tmpfile, id_path / "1id-abstracts.txt")
tmp_mgr.move_into_place(id_abstracts_tmpfile, id_path / "1id-abstracts.txt", [ftp_path, all_archive_path])
tmp_mgr.move_into_place(derived_id_abstracts_tmpfile, derived_path / "1id-abstracts.txt")
tmp_mgr.move_into_place(download_id_abstracts_tmpfile, download_path / "id-abstract.txt")
tmp_mgr.move_into_place(all_id2_tmpfile, id_path / "all_id2.txt")
tmp_mgr.move_into_place(all_id2_tmpfile, id_path / "all_id2.txt", [ftp_path, all_archive_path])
tmp_mgr.move_into_place(derived_all_id2_tmpfile, derived_path / "all_id2.txt")

View file

@ -188,17 +188,20 @@ class TaskTests(TestCase):
def test_temp_file_manager(self):
with TemporaryDirectory() as temp_dir:
temp_path = Path(temp_dir)
with TempFileManager(temp_path) as tfm:
path1 = tfm.make_temp_file("yay")
path2 = tfm.make_temp_file("boo") # do not keep this one
self.assertTrue(path1.exists())
self.assertTrue(path2.exists())
dest = temp_path / "yay.txt"
tfm.move_into_place(path1, dest)
# make sure things were cleaned up...
self.assertFalse(path1.exists()) # moved to dest
self.assertFalse(path2.exists()) # left behind
# check destination contents and permissions
self.assertEqual(dest.read_text(), "yay")
self.assertEqual(dest.stat().st_mode & 0o777, 0o644)
with TemporaryDirectory() as other_dir:
temp_path = Path(temp_dir)
other_path = Path(other_dir)
with TempFileManager(temp_path) as tfm:
path1 = tfm.make_temp_file("yay")
path2 = tfm.make_temp_file("boo") # do not keep this one
self.assertTrue(path1.exists())
self.assertTrue(path2.exists())
dest = temp_path / "yay.txt"
tfm.move_into_place(path1, dest, [other_path])
# make sure things were cleaned up...
self.assertFalse(path1.exists()) # moved to dest
self.assertFalse(path2.exists()) # left behind
# check destination contents and permissions
self.assertEqual(dest.read_text(), "yay")
self.assertEqual(dest.stat().st_mode & 0o777, 0o644)
self.assertTrue(dest.samefile(other_path / "yay.txt"))

View file

@ -679,11 +679,13 @@ MEETINGHOST_LOGO_PATH = AGENDA_PATH # put these in the same place as other proc
IPR_DOCUMENT_PATH = '/a/www/ietf-ftp/ietf/IPR/'
# Move drafts to this directory when they expire
INTERNET_DRAFT_ARCHIVE_DIR = '/a/ietfdata/doc/draft/collection/draft-archive/'
# The following directory contains linked copies of all drafts, but don't
# write anything to this directory -- its content is maintained by ghostlinkd:
# The following directory contains copies of all drafts - it used to be
# a set of hardlinks maintained by ghostlinkd, but is now explicitly written to
INTERNET_ALL_DRAFTS_ARCHIVE_DIR = '/a/ietfdata/doc/draft/archive'
MEETING_RECORDINGS_DIR = '/a/www/audio'
DERIVED_DIR = '/a/ietfdata/derived'
FTP_DIR = '/a/ftp'
ALL_ID_DOWNLOAD_DIR = '/a/www/www6s/download'
DOCUMENT_FORMAT_ALLOWLIST = ["txt", "ps", "pdf", "xml", "html", ]

View file

@ -221,6 +221,7 @@ class ManualSubmissionTests(TestCase):
class SubmitTests(BaseSubmitTestCase):
def setUp(self):
super().setUp()
(Path(settings.FTP_DIR) / "internet-drafts").mkdir()
# Submit views assume there is a "next" IETF to look for cutoff dates against
MeetingFactory(type_id='ietf', date=date_today()+datetime.timedelta(days=180))
@ -954,6 +955,24 @@ class SubmitTests(BaseSubmitTestCase):
self.assertEqual(new_revision.by.name, "Submitter Name")
self.verify_bibxml_ids_creation(draft)
repository_path = Path(draft.get_file_name())
self.assertTrue(repository_path.exists()) # Note that this doesn't check that it has the right _content_
ftp_path = Path(settings.FTP_DIR) / "internet-drafts" / repository_path.name
self.assertTrue(repository_path.samefile(ftp_path))
all_archive_path = Path(settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR) / repository_path.name
self.assertTrue(repository_path.samefile(all_archive_path))
for ext in settings.IDSUBMIT_FILE_TYPES:
if ext == "txt":
continue
variant_path = repository_path.parent / f"{repository_path.stem}.{ext}"
if variant_path.exists():
variant_ftp_path = Path(settings.FTP_DIR) / "internet-drafts" / variant_path.name
self.assertTrue(variant_path.samefile(variant_ftp_path))
variant_all_archive_path = Path(settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR) / variant_path.name
self.assertTrue(variant_path.samefile(variant_all_archive_path))
def test_submit_new_individual_txt(self):
self.submit_new_individual(["txt"])

View file

@ -167,7 +167,10 @@ def validate_submission_rev(name, rev):
if rev != expected:
return 'Invalid revision (revision %02d is expected)' % expected
# This is not really correct, though the edges that it doesn't cover are not likely.
# It might be better just to look in the combined archive to make sure we're not colliding with
# a thing that exists there already because it was included from an approved personal collection.
for dirname in [settings.INTERNET_DRAFT_PATH, settings.INTERNET_DRAFT_ARCHIVE_DIR, ]:
dir = pathlib.Path(dirname)
pattern = '%s-%02d.*' % (name, rev)
@ -652,6 +655,10 @@ def move_files_to_repository(submission):
dest = Path(settings.IDSUBMIT_REPOSITORY_PATH) / fname
if source.exists():
move(source, dest)
all_archive_dest = Path(settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR) / dest.name
ftp_dest = Path(settings.FTP_DIR) / "internet-drafts" / dest.name
os.link(dest, all_archive_dest)
os.link(dest, ftp_dest)
elif dest.exists():
log.log("Intended to move '%s' to '%s', but found source missing while destination exists.")
elif ext in submission.file_types.split(','):

View file

@ -211,6 +211,7 @@ class TestCase(django.test.TestCase):
'INTERNET_DRAFT_ARCHIVE_DIR',
'INTERNET_DRAFT_PATH',
'BIBXML_BASE_PATH',
'FTP_DIR',
]
parser = html5lib.HTMLParser(strict=True)