ci: merge main to release (#7973)
This commit is contained in:
commit
cdc1467eb8
|
@ -30,17 +30,24 @@ const appContainer = ref(null)
|
|||
// Set user theme
|
||||
// --------------------------------------------------------------------
|
||||
|
||||
const desiredTheme = window.localStorage?.getItem('theme')
|
||||
if (desiredTheme === 'dark') {
|
||||
siteStore.theme = 'dark'
|
||||
} else if (desiredTheme === 'light') {
|
||||
siteStore.theme = 'light'
|
||||
} else if (window.matchMedia("(prefers-color-scheme: dark)").matches) {
|
||||
siteStore.theme = 'dark'
|
||||
} else {
|
||||
siteStore.theme = 'light'
|
||||
function updateTheme() {
|
||||
const desiredTheme = window.localStorage?.getItem('theme')
|
||||
if (desiredTheme === 'dark') {
|
||||
siteStore.theme = 'dark'
|
||||
} else if (desiredTheme === 'light') {
|
||||
siteStore.theme = 'light'
|
||||
} else if (window.matchMedia("(prefers-color-scheme: dark)").matches) {
|
||||
siteStore.theme = 'dark'
|
||||
} else {
|
||||
siteStore.theme = 'light'
|
||||
}
|
||||
}
|
||||
|
||||
updateTheme()
|
||||
|
||||
// this change event fires for either light or dark changes
|
||||
window.matchMedia('(prefers-color-scheme: dark)').addEventListener('change', updateTheme)
|
||||
|
||||
// --------------------------------------------------------------------
|
||||
// Handle browser resize
|
||||
// --------------------------------------------------------------------
|
||||
|
|
|
@ -17,8 +17,8 @@ DATABASES = {
|
|||
}
|
||||
|
||||
IDSUBMIT_IDNITS_BINARY = "/usr/local/bin/idnits"
|
||||
IDSUBMIT_REPOSITORY_PATH = "test/id/"
|
||||
IDSUBMIT_STAGING_PATH = "test/staging/"
|
||||
IDSUBMIT_REPOSITORY_PATH = "/assets/ietfdata/doc/draft/repository"
|
||||
IDSUBMIT_STAGING_PATH = "/assets/www6s/staging/"
|
||||
|
||||
AGENDA_PATH = '/assets/www6s/proceedings/'
|
||||
MEETINGHOST_LOGO_PATH = AGENDA_PATH
|
||||
|
|
|
@ -8,7 +8,7 @@ ALLOWED_HOSTS = ['*']
|
|||
from ietf.settings_postgresqldb import DATABASES # pyflakes:ignore
|
||||
|
||||
IDSUBMIT_IDNITS_BINARY = "/usr/local/bin/idnits"
|
||||
IDSUBMIT_STAGING_PATH = "test/staging/"
|
||||
IDSUBMIT_STAGING_PATH = "/assets/www6s/staging/"
|
||||
|
||||
AGENDA_PATH = '/assets/www6s/proceedings/'
|
||||
MEETINGHOST_LOGO_PATH = AGENDA_PATH
|
||||
|
@ -53,7 +53,7 @@ IDSUBMIT_REPOSITORY_PATH = INTERNET_DRAFT_PATH
|
|||
FTP_DIR = '/assets/ftp'
|
||||
|
||||
NOMCOM_PUBLIC_KEYS_DIR = 'data/nomcom_keys/public_keys/'
|
||||
SLIDE_STAGING_PATH = 'test/staging/'
|
||||
SLIDE_STAGING_PATH = '/assets/www6s/staging/'
|
||||
|
||||
DE_GFM_BINARY = '/usr/local/bin/de-gfm'
|
||||
|
||||
|
|
|
@ -1,13 +1,6 @@
|
|||
#!/bin/bash
|
||||
|
||||
for sub in \
|
||||
test/id \
|
||||
test/staging \
|
||||
test/archive \
|
||||
test/rfc \
|
||||
test/media \
|
||||
test/wiki/ietf \
|
||||
data/nomcom_keys/public_keys \
|
||||
/assets/archive/id \
|
||||
/assets/collection \
|
||||
/assets/collection/draft-archive \
|
||||
|
@ -27,6 +20,7 @@ for sub in \
|
|||
/assets/ietfdata/derived \
|
||||
/assets/ietfdata/derived/bibxml \
|
||||
/assets/ietfdata/derived/bibxml/bibxml-ids \
|
||||
/assets/ietfdata/doc/draft/repository \
|
||||
/assets/www6s \
|
||||
/assets/www6s/staging \
|
||||
/assets/www6s/wg-descriptions \
|
||||
|
|
|
@ -104,12 +104,15 @@ def notify_events(sender, instance, **kwargs):
|
|||
if not isinstance(instance, DocEvent):
|
||||
return
|
||||
|
||||
if not kwargs.get("created", False):
|
||||
return # only notify on creation
|
||||
|
||||
if instance.doc.type_id != 'draft':
|
||||
return
|
||||
|
||||
if getattr(instance, "skip_community_list_notification", False):
|
||||
return
|
||||
|
||||
|
||||
# kludge alert: queuing a celery task in response to a signal can cause unexpected attempts to
|
||||
# start a Celery task during tests. To prevent this, don't queue a celery task if we're running
|
||||
# tests.
|
||||
|
|
|
@ -17,6 +17,7 @@ from ietf.community.tasks import notify_event_to_subscribers_task
|
|||
import ietf.community.views
|
||||
from ietf.group.models import Group
|
||||
from ietf.group.utils import setup_default_community_list_for_group
|
||||
from ietf.doc.factories import DocumentFactory
|
||||
from ietf.doc.models import State
|
||||
from ietf.doc.utils import add_state_change_event
|
||||
from ietf.person.models import Person, Email, Alias
|
||||
|
@ -439,39 +440,45 @@ class CommunityListTests(TestCase):
|
|||
This implicitly tests that notify_events is hooked up to the post_save signal.
|
||||
"""
|
||||
# Arbitrary model that's not a DocEvent
|
||||
p = PersonFactory()
|
||||
person = PersonFactory()
|
||||
mock_notify_task.reset_mock() # clear any calls that resulted from the factories
|
||||
# be careful overriding SERVER_MODE - we do it here because the method
|
||||
# under test does not make this call when in "test" mode
|
||||
with override_settings(SERVER_MODE="not-test"):
|
||||
p.save()
|
||||
person.save()
|
||||
self.assertFalse(mock_notify_task.delay.called)
|
||||
|
||||
d = DocEventFactory()
|
||||
mock_notify_task.reset_mock() # clear any calls that resulted from the factories
|
||||
# build a DocEvent that is not yet persisted
|
||||
doc = DocumentFactory()
|
||||
d = DocEventFactory.build(by=person, doc=doc)
|
||||
# mock_notify_task.reset_mock() # clear any calls that resulted from the factories
|
||||
# be careful overriding SERVER_MODE - we do it here because the method
|
||||
# under test does not make this call when in "test" mode
|
||||
with override_settings(SERVER_MODE="not-test"):
|
||||
d.save()
|
||||
self.assertEqual(mock_notify_task.delay.call_count, 1)
|
||||
self.assertEqual(mock_notify_task.delay.call_count, 1, "notify_task should be run on creation of DocEvent")
|
||||
self.assertEqual(mock_notify_task.delay.call_args, mock.call(event_id = d.pk))
|
||||
|
||||
mock_notify_task.reset_mock()
|
||||
with override_settings(SERVER_MODE="not-test"):
|
||||
d.save()
|
||||
self.assertFalse(mock_notify_task.delay.called, "notify_task should not be run save of on existing DocEvent")
|
||||
|
||||
mock_notify_task.reset_mock()
|
||||
d = DocEventFactory.build(by=person, doc=doc)
|
||||
d.skip_community_list_notification = True
|
||||
# be careful overriding SERVER_MODE - we do it here because the method
|
||||
# under test does not make this call when in "test" mode
|
||||
with override_settings(SERVER_MODE="not-test"):
|
||||
d.save()
|
||||
self.assertFalse(mock_notify_task.delay.called)
|
||||
self.assertFalse(mock_notify_task.delay.called, "notify_task should not run when skip_community_list_notification is set")
|
||||
|
||||
del(d.skip_community_list_notification)
|
||||
d.doc.type_id="rfc" # not "draft"
|
||||
d.doc.save()
|
||||
d = DocEventFactory.build(by=person, doc=DocumentFactory(type_id="rfc"))
|
||||
# be careful overriding SERVER_MODE - we do it here because the method
|
||||
# under test does not make this call when in "test" mode
|
||||
with override_settings(SERVER_MODE="not-test"):
|
||||
d.save()
|
||||
self.assertFalse(mock_notify_task.delay.called)
|
||||
self.assertFalse(mock_notify_task.delay.called, "notify_task should not run on a document with type 'rfc'")
|
||||
|
||||
@mock.patch("ietf.utils.mail.send_mail_text")
|
||||
def test_notify_event_to_subscribers(self, mock_send_mail_text):
|
||||
|
|
|
@ -34,8 +34,8 @@ def expirable_drafts(queryset=None):
|
|||
|
||||
# Populate this first time through (but after django has been set up)
|
||||
if nonexpirable_states is None:
|
||||
# all IESG states except I-D Exists, AD Watching, and Dead block expiry
|
||||
nonexpirable_states = list(State.objects.filter(used=True, type="draft-iesg").exclude(slug__in=("idexists","watching", "dead")))
|
||||
# all IESG states except I-D Exists and Dead block expiry
|
||||
nonexpirable_states = list(State.objects.filter(used=True, type="draft-iesg").exclude(slug__in=("idexists", "dead")))
|
||||
# sent to RFC Editor and RFC Published block expiry (the latter
|
||||
# shouldn't be possible for an active draft, though)
|
||||
nonexpirable_states += list(State.objects.filter(used=True, type__in=("draft-stream-iab", "draft-stream-irtf", "draft-stream-ise"), slug__in=("rfc-edit", "pub")))
|
||||
|
|
121
ietf/doc/migrations/0024_remove_ad_is_watching_states.py
Normal file
121
ietf/doc/migrations/0024_remove_ad_is_watching_states.py
Normal file
|
@ -0,0 +1,121 @@
|
|||
# Copyright The IETF Trust 2024, All Rights Reserved
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def get_helper(DocHistory, RelatedDocument, RelatedDocHistory, DocumentAuthor, DocHistoryAuthor):
|
||||
"""Dependency injection wrapper"""
|
||||
|
||||
def save_document_in_history(doc):
|
||||
"""Save a snapshot of document and related objects in the database.
|
||||
|
||||
Local copy of ietf.doc.utils.save_document_in_history() to avoid depending on the
|
||||
code base in a migration.
|
||||
"""
|
||||
|
||||
def get_model_fields_as_dict(obj):
|
||||
return dict((field.name, getattr(obj, field.name))
|
||||
for field in obj._meta.fields
|
||||
if field is not obj._meta.pk)
|
||||
|
||||
# copy fields
|
||||
fields = get_model_fields_as_dict(doc)
|
||||
fields["doc"] = doc
|
||||
fields["name"] = doc.name
|
||||
|
||||
dochist = DocHistory(**fields)
|
||||
dochist.save()
|
||||
|
||||
# copy many to many
|
||||
for field in doc._meta.many_to_many:
|
||||
if field.remote_field.through and field.remote_field.through._meta.auto_created:
|
||||
hist_field = getattr(dochist, field.name)
|
||||
hist_field.clear()
|
||||
hist_field.set(getattr(doc, field.name).all())
|
||||
|
||||
# copy remaining tricky many to many
|
||||
def transfer_fields(obj, HistModel):
|
||||
mfields = get_model_fields_as_dict(item)
|
||||
# map doc -> dochist
|
||||
for k, v in mfields.items():
|
||||
if v == doc:
|
||||
mfields[k] = dochist
|
||||
HistModel.objects.create(**mfields)
|
||||
|
||||
for item in RelatedDocument.objects.filter(source=doc):
|
||||
transfer_fields(item, RelatedDocHistory)
|
||||
|
||||
for item in DocumentAuthor.objects.filter(document=doc):
|
||||
transfer_fields(item, DocHistoryAuthor)
|
||||
|
||||
return dochist
|
||||
|
||||
return save_document_in_history
|
||||
|
||||
|
||||
def forward(apps, schema_editor):
|
||||
"""Mark watching draft-iesg state unused after removing it from Documents"""
|
||||
StateDocEvent = apps.get_model("doc", "StateDocEvent")
|
||||
Document = apps.get_model("doc", "Document")
|
||||
State = apps.get_model("doc", "State")
|
||||
StateType = apps.get_model("doc", "StateType")
|
||||
Person = apps.get_model("person", "Person")
|
||||
|
||||
save_document_in_history = get_helper(
|
||||
DocHistory=apps.get_model("doc", "DocHistory"),
|
||||
RelatedDocument=apps.get_model("doc", "RelatedDocument"),
|
||||
RelatedDocHistory=apps.get_model("doc", "RelatedDocHistory"),
|
||||
DocumentAuthor=apps.get_model("doc", "DocumentAuthor"),
|
||||
DocHistoryAuthor=apps.get_model("doc", "DocHistoryAuthor"),
|
||||
)
|
||||
|
||||
draft_iesg_state_type = StateType.objects.get(slug="draft-iesg")
|
||||
idexists_state = State.objects.get(type=draft_iesg_state_type, slug="idexists")
|
||||
watching_state = State.objects.get(type=draft_iesg_state_type, slug="watching")
|
||||
system_person = Person.objects.get(name="(System)")
|
||||
|
||||
# Remove state from documents that currently have it
|
||||
for doc in Document.objects.filter(states=watching_state):
|
||||
assert doc.type_id == "draft"
|
||||
doc.states.remove(watching_state)
|
||||
doc.states.add(idexists_state)
|
||||
e = StateDocEvent.objects.create(
|
||||
type="changed_state",
|
||||
by=system_person,
|
||||
doc=doc,
|
||||
rev=doc.rev,
|
||||
desc=f"{draft_iesg_state_type.label} changed to <b>{idexists_state.name}</b> from {watching_state.name}",
|
||||
state_type=draft_iesg_state_type,
|
||||
state=idexists_state,
|
||||
)
|
||||
doc.time = e.time
|
||||
doc.save()
|
||||
save_document_in_history(doc)
|
||||
assert not Document.objects.filter(states=watching_state).exists()
|
||||
|
||||
# Mark state as unused
|
||||
watching_state.used = False
|
||||
watching_state.save()
|
||||
|
||||
|
||||
def reverse(apps, schema_editor):
|
||||
"""Mark watching draft-iesg state as used
|
||||
|
||||
Does not try to re-apply the state to Documents modified by the forward migration. This
|
||||
could be done in theory, but would either require dangerous history rewriting or add a
|
||||
lot of history junk.
|
||||
"""
|
||||
State = apps.get_model("doc", "State")
|
||||
StateType = apps.get_model("doc", "StateType")
|
||||
State.objects.filter(
|
||||
type=StateType.objects.get(slug="draft-iesg"), slug="watching"
|
||||
).update(used=True)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("doc", "0023_bofreqspamstate"),
|
||||
]
|
||||
|
||||
operations = [migrations.RunPython(forward, reverse)]
|
|
@ -184,7 +184,7 @@ def state_age_colored(doc):
|
|||
if not iesg_state:
|
||||
return ""
|
||||
|
||||
if iesg_state in ["dead", "watching", "pub", "idexists"]:
|
||||
if iesg_state in ["dead", "pub", "idexists"]:
|
||||
return ""
|
||||
try:
|
||||
state_datetime = (
|
||||
|
|
|
@ -59,7 +59,7 @@ from ietf.meeting.models import Meeting, SessionPresentation, SchedulingEvent
|
|||
from ietf.meeting.factories import ( MeetingFactory, SessionFactory, SessionPresentationFactory,
|
||||
ProceedingsMaterialFactory )
|
||||
|
||||
from ietf.name.models import SessionStatusName, BallotPositionName, DocTypeName
|
||||
from ietf.name.models import SessionStatusName, BallotPositionName, DocTypeName, RoleName
|
||||
from ietf.person.models import Person
|
||||
from ietf.person.factories import PersonFactory, EmailFactory
|
||||
from ietf.utils.mail import outbox, empty_outbox
|
||||
|
@ -1450,6 +1450,14 @@ Man Expires September 22, 2015 [Page 3]
|
|||
"""Buttons for action holders should be shown when AD or secretary"""
|
||||
draft = WgDraftFactory()
|
||||
draft.action_holders.set([PersonFactory()])
|
||||
other_group = GroupFactory(type_id=draft.group.type_id)
|
||||
|
||||
# create a test RoleName and put it in the docman_roles for the document group
|
||||
RoleName.objects.create(slug="wrangler", name="Wrangler", used=True)
|
||||
draft.group.features.docman_roles.append("wrangler")
|
||||
draft.group.features.save()
|
||||
wrangler = RoleFactory(group=draft.group, name_id="wrangler").person
|
||||
wrangler_of_other_group = RoleFactory(group=other_group, name_id="wrangler").person
|
||||
|
||||
url = urlreverse('ietf.doc.views_doc.document_main', kwargs=dict(name=draft.name))
|
||||
edit_ah_url = urlreverse('ietf.doc.views_doc.edit_action_holders', kwargs=dict(name=draft.name))
|
||||
|
@ -1482,6 +1490,8 @@ Man Expires September 22, 2015 [Page 3]
|
|||
|
||||
_run_test(None, False)
|
||||
_run_test('plain', False)
|
||||
_run_test(wrangler_of_other_group.user.username, False)
|
||||
_run_test(wrangler.user.username, True)
|
||||
_run_test('ad', True)
|
||||
_run_test('secretary', True)
|
||||
|
||||
|
@ -1683,6 +1693,17 @@ class DocTestCase(TestCase):
|
|||
|
||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertNotContains(r, "The session for this document was cancelled.")
|
||||
|
||||
SchedulingEvent.objects.create(
|
||||
session=session,
|
||||
status_id='canceled',
|
||||
by = Person.objects.get(user__username="marschairman"),
|
||||
)
|
||||
|
||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertContains(r, "The session for this document was cancelled.")
|
||||
|
||||
def test_document_ballot(self):
|
||||
doc = IndividualDraftFactory()
|
||||
|
|
|
@ -559,7 +559,7 @@ class BallotWriteupsTests(TestCase):
|
|||
q = PyQuery(r.content)
|
||||
self.assertFalse(q('[class=text-danger]:contains("not completed IETF Last Call")'))
|
||||
|
||||
for state_slug in ["lc", "watching", "ad-eval"]:
|
||||
for state_slug in ["lc", "ad-eval"]:
|
||||
draft.set_state(State.objects.get(type="draft-iesg",slug=state_slug))
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
|
|
@ -26,7 +26,7 @@ from ietf.doc.models import ( Document, DocReminder, DocEvent,
|
|||
WriteupDocEvent, DocRelationshipName, IanaExpertDocEvent )
|
||||
from ietf.doc.utils import get_tags_for_stream_id, create_ballot_if_not_open
|
||||
from ietf.doc.views_draft import AdoptDraftForm
|
||||
from ietf.name.models import StreamName, DocTagName
|
||||
from ietf.name.models import DocTagName, RoleName
|
||||
from ietf.group.factories import GroupFactory, RoleFactory
|
||||
from ietf.group.models import Group, Role
|
||||
from ietf.person.factories import PersonFactory, EmailFactory
|
||||
|
@ -471,69 +471,61 @@ class EditInfoTests(TestCase):
|
|||
self.assertIn("may not leave enough time", get_payload_text(outbox[-1]))
|
||||
|
||||
def test_start_iesg_process_on_draft(self):
|
||||
|
||||
draft = WgDraftFactory(
|
||||
name="draft-ietf-mars-test2",
|
||||
group__acronym='mars',
|
||||
group__acronym="mars",
|
||||
intended_std_level_id="ps",
|
||||
authors=[Person.objects.get(user__username='ad')],
|
||||
)
|
||||
|
||||
url = urlreverse('ietf.doc.views_draft.edit_info', kwargs=dict(name=draft.name))
|
||||
authors=[Person.objects.get(user__username="ad")],
|
||||
)
|
||||
|
||||
url = urlreverse("ietf.doc.views_draft.edit_info", kwargs=dict(name=draft.name))
|
||||
login_testing_unauthorized(self, "secretary", url)
|
||||
|
||||
# normal get
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
self.assertEqual(len(q('form select[name=intended_std_level]')), 1)
|
||||
self.assertEqual("", q('form textarea[name=notify]')[0].value.strip())
|
||||
self.assertEqual(len(q("form select[name=intended_std_level]")), 1)
|
||||
self.assertEqual("", q("form textarea[name=notify]")[0].value.strip())
|
||||
|
||||
# add
|
||||
events_before = draft.docevent_set.count()
|
||||
events_before = list(draft.docevent_set.values_list("id", flat=True))
|
||||
mailbox_before = len(outbox)
|
||||
|
||||
ad = Person.objects.get(name="Areað Irector")
|
||||
|
||||
r = self.client.post(url,
|
||||
dict(intended_std_level=str(draft.intended_std_level_id),
|
||||
ad=ad.pk,
|
||||
create_in_state=State.objects.get(used=True, type="draft-iesg", slug="watching").pk,
|
||||
notify="test@example.com",
|
||||
telechat_date="",
|
||||
))
|
||||
r = self.client.post(
|
||||
url,
|
||||
dict(
|
||||
intended_std_level=str(draft.intended_std_level_id),
|
||||
ad=ad.pk,
|
||||
notify="test@example.com",
|
||||
telechat_date="",
|
||||
),
|
||||
)
|
||||
self.assertEqual(r.status_code, 302)
|
||||
|
||||
draft = Document.objects.get(name=draft.name)
|
||||
self.assertEqual(draft.get_state_slug("draft-iesg"), "watching")
|
||||
self.assertEqual(draft.get_state_slug("draft-iesg"), "pub-req")
|
||||
self.assertEqual(draft.get_state_slug("draft-stream-ietf"), "sub-pub")
|
||||
self.assertEqual(draft.ad, ad)
|
||||
self.assertTrue(not draft.latest_event(TelechatDocEvent, type="scheduled_for_telechat"))
|
||||
self.assertEqual(draft.docevent_set.count(), events_before + 4)
|
||||
self.assertCountEqual(draft.action_holders.all(), [draft.ad])
|
||||
events = list(draft.docevent_set.order_by('time', 'id'))
|
||||
self.assertEqual(events[-4].type, "started_iesg_process")
|
||||
self.assertEqual(len(outbox), mailbox_before+1)
|
||||
self.assertTrue('IESG processing' in outbox[-1]['Subject'])
|
||||
self.assertTrue('draft-ietf-mars-test2@' in outbox[-1]['To'])
|
||||
|
||||
# Redo, starting in publication requested to make sure WG state is also set
|
||||
draft.set_state(State.objects.get(type_id='draft-iesg', slug='idexists'))
|
||||
draft.set_state(State.objects.get(type='draft-stream-ietf',slug='writeupw'))
|
||||
draft.stream = StreamName.objects.get(slug='ietf')
|
||||
draft.action_holders.clear()
|
||||
draft.save_with_history([DocEvent.objects.create(doc=draft, rev=draft.rev, type="changed_stream", by=Person.objects.get(user__username="secretary"), desc="Test")])
|
||||
r = self.client.post(url,
|
||||
dict(intended_std_level=str(draft.intended_std_level_id),
|
||||
ad=ad.pk,
|
||||
create_in_state=State.objects.get(used=True, type="draft-iesg", slug="pub-req").pk,
|
||||
notify="test@example.com",
|
||||
telechat_date="",
|
||||
))
|
||||
self.assertEqual(r.status_code, 302)
|
||||
draft = Document.objects.get(name=draft.name)
|
||||
self.assertEqual(draft.get_state_slug('draft-iesg'),'pub-req')
|
||||
self.assertEqual(draft.get_state_slug('draft-stream-ietf'),'sub-pub')
|
||||
self.assertTrue(
|
||||
not draft.latest_event(TelechatDocEvent, type="scheduled_for_telechat")
|
||||
)
|
||||
# check that the expected events were created (don't insist on ordering)
|
||||
self.assertCountEqual(
|
||||
draft.docevent_set.exclude(id__in=events_before).values_list("type", flat=True),
|
||||
[
|
||||
"changed_action_holders", # action holders set to AD
|
||||
"changed_document", # WG state set to sub-pub
|
||||
"changed_document", # AD set
|
||||
"changed_document", # state change notice email set
|
||||
"started_iesg_process", # IESG state is now pub-req
|
||||
],
|
||||
)
|
||||
self.assertCountEqual(draft.action_holders.all(), [draft.ad])
|
||||
self.assertEqual(len(outbox), mailbox_before + 1)
|
||||
self.assertTrue("IESG processing" in outbox[-1]["Subject"])
|
||||
self.assertTrue("draft-ietf-mars-test2@" in outbox[-1]["To"])
|
||||
|
||||
def test_edit_consensus(self):
|
||||
draft = WgDraftFactory()
|
||||
|
@ -750,10 +742,6 @@ class ExpireIDsTests(DraftFileMixin, TestCase):
|
|||
|
||||
self.assertEqual(len(list(get_expired_drafts())), 1)
|
||||
|
||||
draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="watching"))
|
||||
|
||||
self.assertEqual(len(list(get_expired_drafts())), 1)
|
||||
|
||||
draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="iesg-eva"))
|
||||
|
||||
self.assertEqual(len(list(get_expired_drafts())), 0)
|
||||
|
@ -935,6 +923,7 @@ class IndividualInfoFormsTests(TestCase):
|
|||
super().setUp()
|
||||
doc = WgDraftFactory(group__acronym='mars',shepherd=PersonFactory(user__username='plain',name='Plain Man').email_set.first())
|
||||
self.docname = doc.name
|
||||
self.doc_group = doc.group
|
||||
|
||||
def test_doc_change_stream(self):
|
||||
url = urlreverse('ietf.doc.views_draft.change_stream', kwargs=dict(name=self.docname))
|
||||
|
@ -1319,8 +1308,10 @@ class IndividualInfoFormsTests(TestCase):
|
|||
RoleFactory(name_id='techadv', person=PersonFactory(), group=doc.group)
|
||||
RoleFactory(name_id='editor', person=PersonFactory(), group=doc.group)
|
||||
RoleFactory(name_id='secr', person=PersonFactory(), group=doc.group)
|
||||
|
||||
some_other_chair = RoleFactory(name_id="chair").person
|
||||
|
||||
url = urlreverse('ietf.doc.views_doc.edit_action_holders', kwargs=dict(name=doc.name))
|
||||
login_testing_unauthorized(self, some_other_chair.user.username, url) # other chair can't edit action holders
|
||||
login_testing_unauthorized(self, username, url)
|
||||
|
||||
r = self.client.get(url)
|
||||
|
@ -1363,6 +1354,14 @@ class IndividualInfoFormsTests(TestCase):
|
|||
_test_changing_ah(doc.authors(), 'authors can do it, too')
|
||||
_test_changing_ah([], 'clear it back out')
|
||||
|
||||
def test_doc_change_action_holders_as_doc_manager(self):
|
||||
# create a test RoleName and put it in the docman_roles for the document group
|
||||
RoleName.objects.create(slug="wrangler", name="Wrangler", used=True)
|
||||
self.doc_group.features.docman_roles.append("wrangler")
|
||||
self.doc_group.features.save()
|
||||
wrangler = RoleFactory(group=self.doc_group, name_id="wrangler").person
|
||||
self.do_doc_change_action_holders_test(wrangler.user.username)
|
||||
|
||||
def test_doc_change_action_holders_as_secretary(self):
|
||||
self.do_doc_change_action_holders_test('secretary')
|
||||
|
||||
|
@ -1372,9 +1371,11 @@ class IndividualInfoFormsTests(TestCase):
|
|||
def do_doc_remind_action_holders_test(self, username):
|
||||
doc = Document.objects.get(name=self.docname)
|
||||
doc.action_holders.set(PersonFactory.create_batch(3))
|
||||
|
||||
some_other_chair = RoleFactory(name_id="chair").person
|
||||
|
||||
url = urlreverse('ietf.doc.views_doc.remind_action_holders', kwargs=dict(name=doc.name))
|
||||
|
||||
login_testing_unauthorized(self, some_other_chair.user.username, url) # other chair can't send reminder
|
||||
login_testing_unauthorized(self, username, url)
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
@ -1401,6 +1402,14 @@ class IndividualInfoFormsTests(TestCase):
|
|||
self.client.post(url)
|
||||
self.assertEqual(len(outbox), 1) # still 1
|
||||
|
||||
def test_doc_remind_action_holders_as_doc_manager(self):
|
||||
# create a test RoleName and put it in the docman_roles for the document group
|
||||
RoleName.objects.create(slug="wrangler", name="Wrangler", used=True)
|
||||
self.doc_group.features.docman_roles.append("wrangler")
|
||||
self.doc_group.features.save()
|
||||
wrangler = RoleFactory(group=self.doc_group, name_id="wrangler").person
|
||||
self.do_doc_remind_action_holders_test(wrangler.user.username)
|
||||
|
||||
def test_doc_remind_action_holders_as_ad(self):
|
||||
self.do_doc_remind_action_holders_test('ad')
|
||||
|
||||
|
|
|
@ -398,8 +398,12 @@ def get_unicode_document_content(key, filename, codec='utf-8', errors='ignore'):
|
|||
def tags_suffix(tags):
|
||||
return ("::" + "::".join(t.name for t in tags)) if tags else ""
|
||||
|
||||
def add_state_change_event(doc, by, prev_state, new_state, prev_tags=None, new_tags=None, timestamp=None):
|
||||
"""Add doc event to explain that state change just happened."""
|
||||
|
||||
def new_state_change_event(doc, by, prev_state, new_state, prev_tags=None, new_tags=None, timestamp=None):
|
||||
"""Create unsaved doc event to explain that state change just happened
|
||||
|
||||
Returns None if no state change occurred.
|
||||
"""
|
||||
if prev_state and new_state:
|
||||
assert prev_state.type_id == new_state.type_id
|
||||
|
||||
|
@ -419,7 +423,22 @@ def add_state_change_event(doc, by, prev_state, new_state, prev_tags=None, new_t
|
|||
e.desc += " from %s" % (prev_state.name + tags_suffix(prev_tags))
|
||||
if timestamp:
|
||||
e.time = timestamp
|
||||
e.save()
|
||||
return e # not saved!
|
||||
|
||||
|
||||
def add_state_change_event(doc, by, prev_state, new_state, prev_tags=None, new_tags=None, timestamp=None):
|
||||
"""Add doc event to explain that state change just happened.
|
||||
|
||||
Returns None if no state change occurred.
|
||||
|
||||
Note: Creating a state change DocEvent will trigger notifications to be sent to people subscribed
|
||||
to the doc via a CommunityList on its first save(). If you need to adjust the event (say, changing
|
||||
its desc) before that notification is sent, use new_state_change_event() instead and save the
|
||||
event after making your changes.
|
||||
"""
|
||||
e = new_state_change_event(doc, by, prev_state, new_state, prev_tags, new_tags, timestamp)
|
||||
if e is not None:
|
||||
e.save()
|
||||
return e
|
||||
|
||||
|
||||
|
|
|
@ -641,7 +641,7 @@ def ballot_writeupnotes(request, name):
|
|||
existing.save()
|
||||
|
||||
if "issue_ballot" in request.POST and not ballot_already_approved:
|
||||
if prev_state.slug in ['watching', 'writeupw', 'goaheadw']:
|
||||
if prev_state.slug in ['writeupw', 'goaheadw']:
|
||||
new_state = State.objects.get(used=True, type="draft-iesg", slug='iesg-eva')
|
||||
prev_tags = doc.tags.filter(slug__in=IESG_SUBSTATE_TAGS)
|
||||
doc.set_state(new_state)
|
||||
|
@ -708,7 +708,7 @@ def ballot_writeupnotes(request, name):
|
|||
back_url=doc.get_absolute_url(),
|
||||
ballot_issued=bool(doc.latest_event(type="sent_ballot_announcement")),
|
||||
warn_lc = not doc.docevent_set.filter(lastcalldocevent__expires__date__lt=date_today(DEADLINE_TZINFO)).exists(),
|
||||
warn_unexpected_state= prev_state if bool(prev_state.slug in ['watching', 'ad-eval', 'lc']) else None,
|
||||
warn_unexpected_state= prev_state if bool(prev_state.slug in ['ad-eval', 'lc']) else None,
|
||||
ballot_writeup_form=form,
|
||||
need_intended_status=need_intended_status,
|
||||
))
|
||||
|
|
|
@ -42,6 +42,7 @@ import re
|
|||
from pathlib import Path
|
||||
|
||||
from django.core.cache import caches
|
||||
from django.core.exceptions import PermissionDenied
|
||||
from django.db.models import Max
|
||||
from django.http import HttpResponse, Http404, HttpResponseBadRequest
|
||||
from django.shortcuts import render, get_object_or_404, redirect
|
||||
|
@ -403,6 +404,10 @@ def document_main(request, name, rev=None, document_html=False):
|
|||
|
||||
can_edit_replaces = has_role(request.user, ("Area Director", "Secretariat", "IRTF Chair", "WG Chair", "RG Chair", "WG Secretary", "RG Secretary"))
|
||||
|
||||
can_edit_action_holders = can_edit or (
|
||||
request.user.is_authenticated and group.has_role(request.user, group.features.docman_roles)
|
||||
)
|
||||
|
||||
is_author = request.user.is_authenticated and doc.documentauthor_set.filter(person__user=request.user).exists()
|
||||
can_view_possibly_replaces = can_edit_replaces or is_author
|
||||
|
||||
|
@ -582,7 +587,7 @@ def document_main(request, name, rev=None, document_html=False):
|
|||
if doc.get_state_slug() not in ["rfc", "expired"] and doc.stream_id in ("ietf",) and not snapshot:
|
||||
if iesg_state_slug == 'idexists' and can_edit:
|
||||
actions.append(("Begin IESG Processing", urlreverse('ietf.doc.views_draft.edit_info', kwargs=dict(name=doc.name)) + "?new=1"))
|
||||
elif can_edit_stream_info and (iesg_state_slug in ('idexists','watching')):
|
||||
elif can_edit_stream_info and (iesg_state_slug == 'idexists'):
|
||||
actions.append(("Submit to IESG for Publication", urlreverse('ietf.doc.views_draft.to_iesg', kwargs=dict(name=doc.name))))
|
||||
|
||||
if request.user.is_authenticated and hasattr(request.user, "person"):
|
||||
|
@ -660,6 +665,7 @@ def document_main(request, name, rev=None, document_html=False):
|
|||
can_edit_iana_state=can_edit_iana_state,
|
||||
can_edit_consensus=can_edit_consensus,
|
||||
can_edit_replaces=can_edit_replaces,
|
||||
can_edit_action_holders=can_edit_action_holders,
|
||||
can_view_possibly_replaces=can_view_possibly_replaces,
|
||||
can_request_review=can_request_review,
|
||||
can_submit_unsolicited_review_for_teams=can_submit_unsolicited_review_for_teams,
|
||||
|
@ -870,6 +876,13 @@ def document_main(request, name, rev=None, document_html=False):
|
|||
and doc.group.features.has_nonsession_materials
|
||||
and doc.type_id in doc.group.features.material_types
|
||||
)
|
||||
|
||||
session_statusid = None
|
||||
actual_doc = doc if isinstance(doc,Document) else doc.doc
|
||||
if actual_doc.session_set.count() == 1:
|
||||
if actual_doc.session_set.get().schedulingevent_set.exists():
|
||||
session_statusid = actual_doc.session_set.get().schedulingevent_set.order_by("-time").first().status_id
|
||||
|
||||
return render(request, "doc/document_material.html",
|
||||
dict(doc=doc,
|
||||
top=top,
|
||||
|
@ -882,6 +895,7 @@ def document_main(request, name, rev=None, document_html=False):
|
|||
can_upload = can_upload,
|
||||
other_types=other_types,
|
||||
presentations=presentations,
|
||||
session_statusid=session_statusid,
|
||||
))
|
||||
|
||||
|
||||
|
@ -1863,11 +1877,21 @@ def edit_authors(request, name):
|
|||
})
|
||||
|
||||
|
||||
@role_required('Area Director', 'Secretariat')
|
||||
@login_required
|
||||
def edit_action_holders(request, name):
|
||||
"""Change the set of action holders for a doc"""
|
||||
doc = get_object_or_404(Document, name=name)
|
||||
|
||||
|
||||
can_edit = has_role(request.user, ("Area Director", "Secretariat")) or (
|
||||
doc.group and doc.group.has_role(request.user, doc.group.features.docman_roles)
|
||||
)
|
||||
if not can_edit:
|
||||
# Keep the list of roles in this message up-to-date with the can_edit logic
|
||||
message = "Restricted to roles: Area Director, Secretariat"
|
||||
if doc.group and doc.group.acronym != "none":
|
||||
message += f", and document managers for the {doc.group.acronym} group"
|
||||
raise PermissionDenied(message)
|
||||
|
||||
if request.method == 'POST':
|
||||
form = ActionHoldersForm(request.POST)
|
||||
if form.is_valid():
|
||||
|
@ -1977,10 +2001,20 @@ class ReminderEmailForm(forms.Form):
|
|||
strip=True,
|
||||
)
|
||||
|
||||
@role_required('Area Director', 'Secretariat')
|
||||
@login_required
|
||||
def remind_action_holders(request, name):
|
||||
doc = get_object_or_404(Document, name=name)
|
||||
|
||||
|
||||
can_edit = has_role(request.user, ("Area Director", "Secretariat")) or (
|
||||
doc.group and doc.group.has_role(request.user, doc.group.features.docman_roles)
|
||||
)
|
||||
if not can_edit:
|
||||
# Keep the list of roles in this message up-to-date with the can_edit logic
|
||||
message = "Restricted to roles: Area Director, Secretariat"
|
||||
if doc.group and doc.group.acronym != "none":
|
||||
message += f", and document managers for the {doc.group.acronym} group"
|
||||
raise PermissionDenied(message)
|
||||
|
||||
if request.method == 'POST':
|
||||
form = ReminderEmailForm(request.POST)
|
||||
if form.is_valid():
|
||||
|
|
|
@ -487,40 +487,6 @@ def change_intention(request, name):
|
|||
doc=doc,
|
||||
))
|
||||
|
||||
class EditInfoForm(forms.Form):
|
||||
intended_std_level = forms.ModelChoiceField(IntendedStdLevelName.objects.filter(used=True), empty_label="(None)", required=True, label="Intended RFC status")
|
||||
area = forms.ModelChoiceField(Group.objects.filter(type="area", state="active"), empty_label="(None - individual submission)", required=False, label="Assigned to area")
|
||||
ad = forms.ModelChoiceField(Person.objects.filter(role__name="ad", role__group__state="active",role__group__type='area').order_by('name'), label="Responsible AD", empty_label="(None)", required=True)
|
||||
create_in_state = forms.ModelChoiceField(State.objects.filter(used=True, type="draft-iesg", slug__in=("pub-req", "watching")), empty_label=None, required=False)
|
||||
notify = forms.CharField(
|
||||
widget=forms.Textarea,
|
||||
max_length=1023,
|
||||
label="Notice emails",
|
||||
help_text="Separate email addresses with commas.",
|
||||
required=False,
|
||||
)
|
||||
telechat_date = forms.TypedChoiceField(coerce=lambda x: datetime.datetime.strptime(x, '%Y-%m-%d').date(), empty_value=None, required=False, widget=forms.Select(attrs={'onchange':'make_bold()'}))
|
||||
returning_item = forms.BooleanField(required=False)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(self.__class__, self).__init__(*args, **kwargs)
|
||||
|
||||
# if previous AD is now ex-AD, append that person to the list
|
||||
ad_pk = self.initial.get('ad')
|
||||
choices = self.fields['ad'].choices
|
||||
if ad_pk and ad_pk not in [pk for pk, name in choices]:
|
||||
self.fields['ad'].choices = list(choices) + [("", "-------"), (ad_pk, Person.objects.get(pk=ad_pk).plain_name())]
|
||||
|
||||
# telechat choices
|
||||
dates = [d.date for d in TelechatDate.objects.active().order_by('date')]
|
||||
init = kwargs['initial']['telechat_date']
|
||||
if init and init not in dates:
|
||||
dates.insert(0, init)
|
||||
|
||||
self.fields['telechat_date'].choices = [("", "(not on agenda)")] + [(d, d.strftime("%Y-%m-%d")) for d in dates]
|
||||
|
||||
# returning item is rendered non-standard
|
||||
self.standard_fields = [x for x in self.visible_fields() if x.name not in ('returning_item',)]
|
||||
|
||||
def to_iesg(request,name):
|
||||
""" Submit an IETF stream document to the IESG for publication """
|
||||
|
@ -619,7 +585,71 @@ def to_iesg(request,name):
|
|||
notify=notify,
|
||||
))
|
||||
|
||||
@role_required('Area Director','Secretariat')
|
||||
class EditInfoForm(forms.Form):
|
||||
intended_std_level = forms.ModelChoiceField(
|
||||
IntendedStdLevelName.objects.filter(used=True),
|
||||
empty_label="(None)",
|
||||
required=True,
|
||||
label="Intended RFC status",
|
||||
)
|
||||
area = forms.ModelChoiceField(
|
||||
Group.objects.filter(type="area", state="active"),
|
||||
empty_label="(None - individual submission)",
|
||||
required=False,
|
||||
label="Assigned to area",
|
||||
)
|
||||
ad = forms.ModelChoiceField(
|
||||
Person.objects.filter(
|
||||
role__name="ad", role__group__state="active", role__group__type="area"
|
||||
).order_by("name"),
|
||||
label="Responsible AD",
|
||||
empty_label="(None)",
|
||||
required=True,
|
||||
)
|
||||
notify = forms.CharField(
|
||||
widget=forms.Textarea,
|
||||
max_length=1023,
|
||||
label="Notice emails",
|
||||
help_text="Separate email addresses with commas.",
|
||||
required=False,
|
||||
)
|
||||
telechat_date = forms.TypedChoiceField(
|
||||
coerce=lambda x: datetime.datetime.strptime(x, "%Y-%m-%d").date(),
|
||||
empty_value=None,
|
||||
required=False,
|
||||
widget=forms.Select(attrs={"onchange": "make_bold()"}),
|
||||
)
|
||||
returning_item = forms.BooleanField(required=False)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(self.__class__, self).__init__(*args, **kwargs)
|
||||
|
||||
# if previous AD is now ex-AD, append that person to the list
|
||||
ad_pk = self.initial.get("ad")
|
||||
choices = self.fields["ad"].choices
|
||||
if ad_pk and ad_pk not in [pk for pk, name in choices]:
|
||||
self.fields["ad"].choices = list(choices) + [
|
||||
("", "-------"),
|
||||
(ad_pk, Person.objects.get(pk=ad_pk).plain_name()),
|
||||
]
|
||||
|
||||
# telechat choices
|
||||
dates = [d.date for d in TelechatDate.objects.active().order_by("date")]
|
||||
init = kwargs["initial"]["telechat_date"]
|
||||
if init and init not in dates:
|
||||
dates.insert(0, init)
|
||||
|
||||
self.fields["telechat_date"].choices = [("", "(not on agenda)")] + [
|
||||
(d, d.strftime("%Y-%m-%d")) for d in dates
|
||||
]
|
||||
|
||||
# returning item is rendered non-standard
|
||||
self.standard_fields = [
|
||||
x for x in self.visible_fields() if x.name not in ("returning_item",)
|
||||
]
|
||||
|
||||
|
||||
@role_required("Area Director", "Secretariat")
|
||||
def edit_info(request, name):
|
||||
"""Edit various Internet-Draft attributes, notifying parties as
|
||||
necessary and logging changes as document events."""
|
||||
|
@ -628,7 +658,8 @@ def edit_info(request, name):
|
|||
raise Http404
|
||||
|
||||
new_document = False
|
||||
if doc.get_state_slug("draft-iesg") == "idexists": # FIXME: should probably receive "new document" as argument to view instead of this
|
||||
# FIXME: should probably receive "new document" as argument to view instead of this
|
||||
if doc.get_state_slug("draft-iesg") == "idexists":
|
||||
new_document = True
|
||||
doc.notify = get_initial_notify(doc)
|
||||
|
||||
|
@ -636,34 +667,45 @@ def edit_info(request, name):
|
|||
initial_telechat_date = e.telechat_date if e else None
|
||||
initial_returning_item = bool(e and e.returning_item)
|
||||
|
||||
if request.method == 'POST':
|
||||
form = EditInfoForm(request.POST,
|
||||
initial=dict(ad=doc.ad_id,
|
||||
telechat_date=initial_telechat_date))
|
||||
if request.method == "POST":
|
||||
form = EditInfoForm(
|
||||
request.POST,
|
||||
initial=dict(ad=doc.ad_id, telechat_date=initial_telechat_date),
|
||||
)
|
||||
if form.is_valid():
|
||||
by = request.user.person
|
||||
pubreq_state = State.objects.get(type="draft-iesg", slug="pub-req")
|
||||
|
||||
r = form.cleaned_data
|
||||
events = []
|
||||
|
||||
if new_document:
|
||||
doc.set_state(r['create_in_state'])
|
||||
doc.set_state(pubreq_state)
|
||||
|
||||
# Is setting the WG state here too much of a hidden side-effect?
|
||||
if r['create_in_state'].slug=='pub-req':
|
||||
if doc.stream and doc.stream.slug=='ietf' and doc.group and doc.group.type_id == 'wg':
|
||||
submitted_state = State.objects.get(type='draft-stream-ietf',slug='sub-pub')
|
||||
doc.set_state(submitted_state)
|
||||
e = DocEvent()
|
||||
e.type = "changed_document"
|
||||
e.by = by
|
||||
e.doc = doc
|
||||
e.rev = doc.rev
|
||||
e.desc = "Working group state set to %s" % submitted_state.name
|
||||
e.save()
|
||||
events.append(e)
|
||||
if (
|
||||
doc.stream
|
||||
and doc.stream.slug == "ietf"
|
||||
and doc.group
|
||||
and doc.group.type_id == "wg"
|
||||
):
|
||||
submitted_state = State.objects.get(
|
||||
type="draft-stream-ietf", slug="sub-pub"
|
||||
)
|
||||
doc.set_state(submitted_state)
|
||||
e = DocEvent()
|
||||
e.type = "changed_document"
|
||||
e.by = by
|
||||
e.doc = doc
|
||||
e.rev = doc.rev
|
||||
e.desc = "Working group state set to %s" % submitted_state.name
|
||||
e.save()
|
||||
events.append(e)
|
||||
|
||||
replaces = Document.objects.filter(targets_related__source=doc, targets_related__relationship="replaces")
|
||||
replaces = Document.objects.filter(
|
||||
targets_related__source=doc,
|
||||
targets_related__relationship="replaces",
|
||||
)
|
||||
if replaces:
|
||||
# this should perhaps be somewhere else, e.g. the
|
||||
# place where the replace relationship is established?
|
||||
|
@ -672,7 +714,10 @@ def edit_info(request, name):
|
|||
e.by = Person.objects.get(name="(System)")
|
||||
e.doc = doc
|
||||
e.rev = doc.rev
|
||||
e.desc = "Earlier history may be found in the Comment Log for <a href=\"%s\">%s</a>" % (replaces[0], replaces[0].get_absolute_url())
|
||||
e.desc = (
|
||||
'Earlier history may be found in the Comment Log for <a href="%s">%s</a>'
|
||||
% (replaces[0], replaces[0].get_absolute_url())
|
||||
)
|
||||
e.save()
|
||||
events.append(e)
|
||||
|
||||
|
@ -681,7 +726,10 @@ def edit_info(request, name):
|
|||
e.by = by
|
||||
e.doc = doc
|
||||
e.rev = doc.rev
|
||||
e.desc = "Document is now in IESG state <b>%s</b>" % doc.get_state("draft-iesg").name
|
||||
e.desc = (
|
||||
"Document is now in IESG state <b>%s</b>"
|
||||
% doc.get_state("draft-iesg").name
|
||||
)
|
||||
e.save()
|
||||
events.append(e)
|
||||
|
||||
|
@ -691,9 +739,9 @@ def edit_info(request, name):
|
|||
entry = "%(attr)s changed to <b>%(new)s</b> from <b>%(old)s</b>"
|
||||
if new_document:
|
||||
entry = "%(attr)s changed to <b>%(new)s</b>"
|
||||
|
||||
|
||||
return entry % dict(attr=attr, new=new, old=old)
|
||||
|
||||
|
||||
def diff(attr, name):
|
||||
v = getattr(doc, attr)
|
||||
if r[attr] != v:
|
||||
|
@ -701,9 +749,9 @@ def edit_info(request, name):
|
|||
setattr(doc, attr, r[attr])
|
||||
|
||||
# update the attributes, keeping track of what we're doing
|
||||
diff('intended_std_level', "Intended Status")
|
||||
diff('ad', "Responsible AD")
|
||||
diff('notify', "State Change Notice email list")
|
||||
diff("intended_std_level", "Intended Status")
|
||||
diff("ad", "Responsible AD")
|
||||
diff("notify", "State Change Notice email list")
|
||||
|
||||
if doc.group.type_id in ("individ", "area"):
|
||||
if not r["area"]:
|
||||
|
@ -717,12 +765,16 @@ def edit_info(request, name):
|
|||
doc.group = r["area"]
|
||||
|
||||
for c in changes:
|
||||
events.append(DocEvent.objects.create(doc=doc, rev=doc.rev, by=by, desc=c, type="changed_document"))
|
||||
events.append(
|
||||
DocEvent.objects.create(
|
||||
doc=doc, rev=doc.rev, by=by, desc=c, type="changed_document"
|
||||
)
|
||||
)
|
||||
|
||||
# Todo - chase this
|
||||
e = update_telechat(request, doc, by,
|
||||
r['telechat_date'], r['returning_item'])
|
||||
|
||||
e = update_telechat(
|
||||
request, doc, by, r["telechat_date"], r["returning_item"]
|
||||
)
|
||||
if e:
|
||||
events.append(e)
|
||||
|
||||
|
@ -730,40 +782,44 @@ def edit_info(request, name):
|
|||
|
||||
if new_document:
|
||||
# If we created a new doc, update the action holders as though it
|
||||
# started in idexists and moved to its create_in_state. Do this
|
||||
# started in idexists and moved to pub-req. Do this
|
||||
# after the doc has been updated so, e.g., doc.ad is set.
|
||||
update_action_holders(
|
||||
doc,
|
||||
State.objects.get(type='draft-iesg', slug='idexists'),
|
||||
r['create_in_state']
|
||||
State.objects.get(type="draft-iesg", slug="idexists"),
|
||||
pubreq_state,
|
||||
)
|
||||
|
||||
if changes:
|
||||
email_iesg_processing_document(request, doc, changes)
|
||||
|
||||
|
||||
return HttpResponseRedirect(doc.get_absolute_url())
|
||||
else:
|
||||
init = dict(intended_std_level=doc.intended_std_level_id,
|
||||
area=doc.group_id,
|
||||
ad=doc.ad_id,
|
||||
notify=doc.notify,
|
||||
telechat_date=initial_telechat_date,
|
||||
returning_item=initial_returning_item,
|
||||
)
|
||||
init = dict(
|
||||
intended_std_level=doc.intended_std_level_id,
|
||||
area=doc.group_id,
|
||||
ad=doc.ad_id,
|
||||
notify=doc.notify,
|
||||
telechat_date=initial_telechat_date,
|
||||
returning_item=initial_returning_item,
|
||||
)
|
||||
|
||||
form = EditInfoForm(initial=init)
|
||||
|
||||
# optionally filter out some fields
|
||||
if not new_document:
|
||||
form.standard_fields = [x for x in form.standard_fields if x.name != "create_in_state"]
|
||||
if doc.group.type_id not in ("individ", "area"):
|
||||
form.standard_fields = [x for x in form.standard_fields if x.name != "area"]
|
||||
|
||||
return render(request, 'doc/draft/edit_info.html',
|
||||
dict(doc=doc,
|
||||
form=form,
|
||||
user=request.user,
|
||||
ballot_issued=doc.latest_event(type="sent_ballot_announcement")))
|
||||
return render(
|
||||
request,
|
||||
"doc/draft/edit_info.html",
|
||||
dict(
|
||||
doc=doc,
|
||||
form=form,
|
||||
user=request.user,
|
||||
ballot_issued=doc.latest_event(type="sent_ballot_announcement"),
|
||||
),
|
||||
)
|
||||
|
||||
@role_required('Area Director','Secretariat')
|
||||
def request_resurrect(request, name):
|
||||
|
|
|
@ -757,7 +757,7 @@ def drafts_in_last_call(request):
|
|||
})
|
||||
|
||||
def drafts_in_iesg_process(request):
|
||||
states = State.objects.filter(type="draft-iesg").exclude(slug__in=('idexists', 'pub', 'dead', 'watching', 'rfcqueue'))
|
||||
states = State.objects.filter(type="draft-iesg").exclude(slug__in=('idexists', 'pub', 'dead', 'rfcqueue'))
|
||||
title = "Documents in IESG process"
|
||||
|
||||
grouped_docs = []
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
|
||||
from itertools import chain
|
||||
from pathlib import Path
|
||||
|
||||
from django.db.models import Q
|
||||
|
@ -153,17 +154,23 @@ def can_manage_materials(user, group):
|
|||
def can_manage_session_materials(user, group, session):
|
||||
return has_role(user, 'Secretariat') or (group.has_role(user, group.features.matman_roles) and not session.is_material_submission_cutoff())
|
||||
|
||||
# Maybe this should be cached...
|
||||
def can_manage_some_groups(user):
|
||||
if not user.is_authenticated:
|
||||
return False
|
||||
authroles = set(
|
||||
chain.from_iterable(
|
||||
GroupFeatures.objects.values_list("groupman_authroles", flat=True)
|
||||
)
|
||||
)
|
||||
extra_role_qs = dict()
|
||||
for gf in GroupFeatures.objects.all():
|
||||
for authrole in gf.groupman_authroles:
|
||||
if has_role(user, authrole):
|
||||
return True
|
||||
if Role.objects.filter(name__in=gf.groupman_roles, group__type_id=gf.type_id, person__user=user).exists():
|
||||
return True
|
||||
return False
|
||||
extra_role_qs[f"{gf.type_id} groupman roles"] = Q(
|
||||
name__in=gf.groupman_roles,
|
||||
group__type_id=gf.type_id,
|
||||
group__state__in=["active", "bof", "proposed"],
|
||||
)
|
||||
return has_role(user, authroles, extra_role_qs=extra_role_qs)
|
||||
|
||||
|
||||
def can_provide_status_update(user, group):
|
||||
if not group.features.acts_like_wg:
|
||||
|
|
|
@ -731,7 +731,7 @@ def dependencies(request, acronym, group_type=None):
|
|||
{
|
||||
"id": x.became_rfc().name if x.became_rfc() else x.name,
|
||||
"rfc": x.type_id == "rfc" or x.became_rfc() is not None,
|
||||
"post-wg": x.get_state_slug("draft-iesg") not in ["idexists", "watching", "dead"],
|
||||
"post-wg": x.get_state_slug("draft-iesg") not in ["idexists", "dead"],
|
||||
"expired": x.get_state_slug("draft") == "expired",
|
||||
"replaced": x.get_state_slug("draft") == "repl",
|
||||
"group": x.group.acronym if x.group and x.group.acronym != "none" else "",
|
||||
|
|
|
@ -51,7 +51,7 @@ def all_id_txt():
|
|||
res.append(f1 + "\t" + f2 + "\t" + f3 + "\t" + f4)
|
||||
|
||||
|
||||
inactive_states = ["idexists", "pub", "watching", "dead"]
|
||||
inactive_states = ["idexists", "pub", "dead"]
|
||||
|
||||
excludes = list(State.objects.filter(type="draft", slug__in=["rfc","repl"]))
|
||||
includes = list(State.objects.filter(type="draft-iesg").exclude(slug__in=inactive_states))
|
||||
|
|
|
@ -38,9 +38,10 @@ def has_role(user, role_names, *args, **kwargs):
|
|||
"""Determines whether user has any of the given standard roles
|
||||
given. Role names must be a list or, in case of a single value, a
|
||||
string."""
|
||||
if not isinstance(role_names, (list, tuple)):
|
||||
role_names = [ role_names ]
|
||||
|
||||
extra_role_qs = kwargs.get("extra_role_qs", None)
|
||||
if not isinstance(role_names, (list, tuple, set)):
|
||||
role_names = [role_names]
|
||||
|
||||
if not user or not user.is_authenticated:
|
||||
return False
|
||||
|
||||
|
@ -48,7 +49,13 @@ def has_role(user, role_names, *args, **kwargs):
|
|||
if not hasattr(user, "roles_check_cache"):
|
||||
user.roles_check_cache = {}
|
||||
|
||||
key = frozenset(role_names)
|
||||
keynames = set(role_names)
|
||||
if extra_role_qs:
|
||||
keynames.update(set(extra_role_qs.keys()))
|
||||
year = kwargs.get("year", None)
|
||||
if year is not None:
|
||||
keynames.add(f"nomcomyear{year}")
|
||||
key = frozenset(keynames)
|
||||
if key not in user.roles_check_cache:
|
||||
try:
|
||||
person = user.person
|
||||
|
@ -56,54 +63,119 @@ def has_role(user, role_names, *args, **kwargs):
|
|||
return False
|
||||
|
||||
role_qs = {
|
||||
"Area Director": Q(person=person, name__in=("pre-ad", "ad"), group__type="area", group__state="active"),
|
||||
"Secretariat": Q(person=person, name="secr", group__acronym="secretariat"),
|
||||
"IAB" : Q(person=person, name="member", group__acronym="iab"),
|
||||
"IANA": Q(person=person, name="auth", group__acronym="iana"),
|
||||
"RFC Editor": Q(person=person, name="auth", group__acronym="rpc"),
|
||||
"ISE" : Q(person=person, name="chair", group__acronym="ise"),
|
||||
"IAD": Q(person=person, name="admdir", group__acronym="ietf"),
|
||||
"IETF Chair": Q(person=person, name="chair", group__acronym="ietf"),
|
||||
"IETF Trust Chair": Q(person=person, name="chair", group__acronym="ietf-trust"),
|
||||
"IRTF Chair": Q(person=person, name="chair", group__acronym="irtf"),
|
||||
"RSAB Chair": Q(person=person, name="chair", group__acronym="rsab"),
|
||||
"IAB Chair": Q(person=person, name="chair", group__acronym="iab"),
|
||||
"IAB Executive Director": Q(person=person, name="execdir", group__acronym="iab"),
|
||||
"IAB Group Chair": Q(person=person, name="chair", group__type="iab", group__state="active"),
|
||||
"IAOC Chair": Q(person=person, name="chair", group__acronym="iaoc"),
|
||||
"WG Chair": Q(person=person,name="chair", group__type="wg", group__state__in=["active","bof", "proposed"]),
|
||||
"WG Secretary": Q(person=person,name="secr", group__type="wg", group__state__in=["active","bof", "proposed"]),
|
||||
"RG Chair": Q(person=person,name="chair", group__type="rg", group__state__in=["active","proposed"]),
|
||||
"RG Secretary": Q(person=person,name="secr", group__type="rg", group__state__in=["active","proposed"]),
|
||||
"AG Secretary": Q(person=person,name="secr", group__type="ag", group__state__in=["active"]),
|
||||
"RAG Secretary": Q(person=person,name="secr", group__type="rag", group__state__in=["active"]),
|
||||
"Team Chair": Q(person=person,name="chair", group__type="team", group__state="active"),
|
||||
"Program Lead": Q(person=person,name="lead", group__type="program", group__state="active"),
|
||||
"Program Secretary": Q(person=person,name="secr", group__type="program", group__state="active"),
|
||||
"Program Chair": Q(person=person,name="chair", group__type="program", group__state="active"),
|
||||
"EDWG Chair": Q(person=person, name="chair", group__type="edwg", group__state="active"),
|
||||
"Nomcom Chair": Q(person=person, name="chair", group__type="nomcom", group__acronym__icontains=kwargs.get('year', '0000')),
|
||||
"Nomcom Advisor": Q(person=person, name="advisor", group__type="nomcom", group__acronym__icontains=kwargs.get('year', '0000')),
|
||||
"Nomcom": Q(person=person, group__type="nomcom", group__acronym__icontains=kwargs.get('year', '0000')),
|
||||
"Liaison Manager": Q(person=person,name="liaiman",group__type="sdo",group__state="active", ),
|
||||
"Authorized Individual": Q(person=person,name="auth",group__type="sdo",group__state="active", ),
|
||||
"Recording Manager": Q(person=person,name="recman",group__type="ietf",group__state="active", ),
|
||||
"Reviewer": Q(person=person, name="reviewer", group__state="active"),
|
||||
"Review Team Secretary": Q(person=person, name="secr", group__reviewteamsettings__isnull=False,group__state="active", ),
|
||||
"IRSG Member": (Q(person=person, name="member", group__acronym="irsg") | Q(person=person, name="chair", group__acronym="irtf") | Q(person=person, name="atlarge", group__acronym="irsg")),
|
||||
"RSAB Member": Q(person=person, name="member", group__acronym="rsab"),
|
||||
"Robot": Q(person=person, name="robot", group__acronym="secretariat"),
|
||||
}
|
||||
"Area Director": Q(
|
||||
name__in=("pre-ad", "ad"), group__type="area", group__state="active"
|
||||
),
|
||||
"Secretariat": Q(name="secr", group__acronym="secretariat"),
|
||||
"IAB": Q(name="member", group__acronym="iab"),
|
||||
"IANA": Q(name="auth", group__acronym="iana"),
|
||||
"RFC Editor": Q(name="auth", group__acronym="rpc"),
|
||||
"ISE": Q(name="chair", group__acronym="ise"),
|
||||
"IAD": Q(name="admdir", group__acronym="ietf"),
|
||||
"IETF Chair": Q(name="chair", group__acronym="ietf"),
|
||||
"IETF Trust Chair": Q(name="chair", group__acronym="ietf-trust"),
|
||||
"IRTF Chair": Q(name="chair", group__acronym="irtf"),
|
||||
"RSAB Chair": Q(name="chair", group__acronym="rsab"),
|
||||
"IAB Chair": Q(name="chair", group__acronym="iab"),
|
||||
"IAB Executive Director": Q(name="execdir", group__acronym="iab"),
|
||||
"IAB Group Chair": Q(
|
||||
name="chair", group__type="iab", group__state="active"
|
||||
),
|
||||
"IAOC Chair": Q(name="chair", group__acronym="iaoc"),
|
||||
"WG Chair": Q(
|
||||
name="chair",
|
||||
group__type="wg",
|
||||
group__state__in=["active", "bof", "proposed"],
|
||||
),
|
||||
"WG Secretary": Q(
|
||||
name="secr",
|
||||
group__type="wg",
|
||||
group__state__in=["active", "bof", "proposed"],
|
||||
),
|
||||
"RG Chair": Q(
|
||||
name="chair", group__type="rg", group__state__in=["active", "proposed"]
|
||||
),
|
||||
"RG Secretary": Q(
|
||||
name="secr", group__type="rg", group__state__in=["active", "proposed"]
|
||||
),
|
||||
"AG Secretary": Q(
|
||||
name="secr", group__type="ag", group__state__in=["active"]
|
||||
),
|
||||
"RAG Secretary": Q(
|
||||
name="secr", group__type="rag", group__state__in=["active"]
|
||||
),
|
||||
"Team Chair": Q(name="chair", group__type="team", group__state="active"),
|
||||
"Program Lead": Q(
|
||||
name="lead", group__type="program", group__state="active"
|
||||
),
|
||||
"Program Secretary": Q(
|
||||
name="secr", group__type="program", group__state="active"
|
||||
),
|
||||
"Program Chair": Q(
|
||||
name="chair", group__type="program", group__state="active"
|
||||
),
|
||||
"EDWG Chair": Q(name="chair", group__type="edwg", group__state="active"),
|
||||
"Nomcom Chair": Q(
|
||||
name="chair",
|
||||
group__type="nomcom",
|
||||
group__acronym__icontains=kwargs.get("year", "0000"),
|
||||
),
|
||||
"Nomcom Advisor": Q(
|
||||
name="advisor",
|
||||
group__type="nomcom",
|
||||
group__acronym__icontains=kwargs.get("year", "0000"),
|
||||
),
|
||||
"Nomcom": Q(
|
||||
group__type="nomcom",
|
||||
group__acronym__icontains=kwargs.get("year", "0000"),
|
||||
),
|
||||
"Liaison Manager": Q(
|
||||
name="liaiman",
|
||||
group__type="sdo",
|
||||
group__state="active",
|
||||
),
|
||||
"Authorized Individual": Q(
|
||||
name="auth",
|
||||
group__type="sdo",
|
||||
group__state="active",
|
||||
),
|
||||
"Recording Manager": Q(
|
||||
name="recman",
|
||||
group__type="ietf",
|
||||
group__state="active",
|
||||
),
|
||||
"Reviewer": Q(name="reviewer", group__state="active"),
|
||||
"Review Team Secretary": Q(
|
||||
name="secr",
|
||||
group__reviewteamsettings__isnull=False,
|
||||
group__state="active",
|
||||
),
|
||||
"IRSG Member": (
|
||||
Q(name="member", group__acronym="irsg")
|
||||
| Q(name="chair", group__acronym="irtf")
|
||||
| Q(name="atlarge", group__acronym="irsg")
|
||||
),
|
||||
"RSAB Member": Q(name="member", group__acronym="rsab"),
|
||||
"Robot": Q(name="robot", group__acronym="secretariat"),
|
||||
}
|
||||
|
||||
filter_expr = Q(pk__in=[]) # ensure empty set is returned if no other terms are added
|
||||
filter_expr = Q(
|
||||
pk__in=[]
|
||||
) # ensure empty set is returned if no other terms are added
|
||||
for r in role_names:
|
||||
filter_expr |= role_qs[r]
|
||||
if extra_role_qs:
|
||||
for r in extra_role_qs:
|
||||
filter_expr |= extra_role_qs[r]
|
||||
|
||||
user.roles_check_cache[key] = bool(Role.objects.filter(filter_expr).exists())
|
||||
user.roles_check_cache[key] = bool(
|
||||
Role.objects.filter(person=person).filter(filter_expr).exists()
|
||||
)
|
||||
|
||||
return user.roles_check_cache[key]
|
||||
|
||||
|
||||
|
||||
# convenient decorator
|
||||
|
||||
def passes_test_decorator(test_func, message):
|
||||
|
|
|
@ -338,7 +338,19 @@ class IprDisclosureFormBase(forms.ModelForm):
|
|||
|
||||
return cleaned_data
|
||||
|
||||
|
||||
class HolderIprDisclosureForm(IprDisclosureFormBase):
|
||||
is_blanket_disclosure = forms.BooleanField(
|
||||
label=mark_safe(
|
||||
'This is a blanket IPR disclosure '
|
||||
'(see Section 5.4.3 of <a href="https://www.ietf.org/rfc/rfc8179.txt">RFC 8179</a>)'
|
||||
),
|
||||
help_text="In satisfaction of its disclosure obligations, Patent Holder commits to license all of "
|
||||
"IPR (as defined in RFC 8179) that would have required disclosure under RFC 8179 on a "
|
||||
"royalty-free (and otherwise reasonable and non-discriminatory) basis. Patent Holder "
|
||||
"confirms that all other terms and conditions are described in this IPR disclosure.",
|
||||
required=False,
|
||||
)
|
||||
licensing = CustomModelChoiceField(IprLicenseTypeName.objects.all(),
|
||||
widget=forms.RadioSelect,empty_label=None)
|
||||
|
||||
|
@ -356,6 +368,15 @@ class HolderIprDisclosureForm(IprDisclosureFormBase):
|
|||
else:
|
||||
# entering new disclosure
|
||||
self.fields['licensing'].queryset = IprLicenseTypeName.objects.exclude(slug='none-selected')
|
||||
|
||||
if self.data.get("is_blanket_disclosure", False):
|
||||
# for a blanket disclosure, patent details are not required
|
||||
self.fields["patent_number"].required = False
|
||||
self.fields["patent_inventor"].required = False
|
||||
self.fields["patent_title"].required = False
|
||||
self.fields["patent_date"].required = False
|
||||
# n.b., self.fields["patent_notes"] is never required
|
||||
|
||||
|
||||
def clean(self):
|
||||
cleaned_data = super(HolderIprDisclosureForm, self).clean()
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
# Copyright The IETF Trust 2024, All Rights Reserved
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("ipr", "0003_alter_iprdisclosurebase_docs"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="holderiprdisclosure",
|
||||
name="is_blanket_disclosure",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
|
@ -3,6 +3,7 @@
|
|||
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
|
@ -124,17 +125,30 @@ class IprDisclosureBase(models.Model):
|
|||
|
||||
|
||||
class HolderIprDisclosure(IprDisclosureBase):
|
||||
ietfer_name = models.CharField(max_length=255, blank=True) # "Whose Personal Belief Triggered..."
|
||||
ietfer_contact_email = models.EmailField(blank=True)
|
||||
ietfer_contact_info = models.TextField(blank=True)
|
||||
patent_info = models.TextField()
|
||||
has_patent_pending = models.BooleanField(default=False)
|
||||
holder_contact_email = models.EmailField()
|
||||
holder_contact_name = models.CharField(max_length=255)
|
||||
holder_contact_info = models.TextField(blank=True, help_text="Address, phone, etc.")
|
||||
licensing = ForeignKey(IprLicenseTypeName)
|
||||
licensing_comments = models.TextField(blank=True)
|
||||
ietfer_name = models.CharField(
|
||||
max_length=255, blank=True
|
||||
) # "Whose Personal Belief Triggered..."
|
||||
ietfer_contact_email = models.EmailField(blank=True)
|
||||
ietfer_contact_info = models.TextField(blank=True)
|
||||
patent_info = models.TextField()
|
||||
has_patent_pending = models.BooleanField(default=False)
|
||||
holder_contact_email = models.EmailField()
|
||||
holder_contact_name = models.CharField(max_length=255)
|
||||
holder_contact_info = models.TextField(blank=True, help_text="Address, phone, etc.")
|
||||
licensing = ForeignKey(IprLicenseTypeName)
|
||||
licensing_comments = models.TextField(blank=True)
|
||||
submitter_claims_all_terms_disclosed = models.BooleanField(default=False)
|
||||
is_blanket_disclosure = models.BooleanField(default=False)
|
||||
|
||||
def clean(self):
|
||||
if self.is_blanket_disclosure:
|
||||
# If the IprLicenseTypeName does not exist, we have a serious problem and a 500 response is ok,
|
||||
# so not handling failure of the `get()`
|
||||
royalty_free_licensing = IprLicenseTypeName.objects.get(slug="royalty-free")
|
||||
if self.licensing_id != royalty_free_licensing.pk:
|
||||
raise ValidationError(
|
||||
f'Must select "{royalty_free_licensing.desc}" for a blanket IPR disclosure.')
|
||||
|
||||
|
||||
class ThirdPartyIprDisclosure(IprDisclosureBase):
|
||||
ietfer_name = models.CharField(max_length=255) # "Whose Personal Belief Triggered..."
|
||||
|
|
|
@ -33,7 +33,7 @@ from ietf.ipr.factories import (
|
|||
IprDocRelFactory,
|
||||
IprEventFactory
|
||||
)
|
||||
from ietf.ipr.forms import DraftForm
|
||||
from ietf.ipr.forms import DraftForm, HolderIprDisclosureForm
|
||||
from ietf.ipr.mail import (process_response_email, get_reply_to, get_update_submitter_emails,
|
||||
get_pseudo_submitter, get_holders, get_update_cc_addrs)
|
||||
from ietf.ipr.models import (IprDisclosureBase,GenericIprDisclosure,HolderIprDisclosure,
|
||||
|
@ -272,16 +272,16 @@ class IprTests(TestCase):
|
|||
|
||||
def test_new_generic(self):
|
||||
"""Ensure new-generic redirects to new-general"""
|
||||
url = urlreverse("ietf.ipr.views.new", kwargs={ "type": "generic" })
|
||||
url = urlreverse("ietf.ipr.views.new", kwargs={ "_type": "generic" })
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code,302)
|
||||
self.assertEqual(urlparse(r["Location"]).path, urlreverse("ietf.ipr.views.new", kwargs={ "type": "general"}))
|
||||
self.assertEqual(urlparse(r["Location"]).path, urlreverse("ietf.ipr.views.new", kwargs={ "_type": "general"}))
|
||||
|
||||
|
||||
def test_new_general(self):
|
||||
"""Add a new general disclosure. Note: submitter does not need to be logged in.
|
||||
"""
|
||||
url = urlreverse("ietf.ipr.views.new", kwargs={ "type": "general" })
|
||||
url = urlreverse("ietf.ipr.views.new", kwargs={ "_type": "general" })
|
||||
|
||||
# invalid post
|
||||
r = self.client.post(url, {
|
||||
|
@ -319,7 +319,7 @@ class IprTests(TestCase):
|
|||
"""
|
||||
draft = WgDraftFactory()
|
||||
rfc = WgRfcFactory()
|
||||
url = urlreverse("ietf.ipr.views.new", kwargs={ "type": "specific" })
|
||||
url = urlreverse("ietf.ipr.views.new", kwargs={ "_type": "specific" })
|
||||
|
||||
# successful post
|
||||
empty_outbox()
|
||||
|
@ -375,7 +375,7 @@ class IprTests(TestCase):
|
|||
def test_new_specific_no_revision(self):
|
||||
draft = WgDraftFactory()
|
||||
rfc = WgRfcFactory()
|
||||
url = urlreverse("ietf.ipr.views.new", kwargs={ "type": "specific" })
|
||||
url = urlreverse("ietf.ipr.views.new", kwargs={ "_type": "specific" })
|
||||
|
||||
# successful post
|
||||
empty_outbox()
|
||||
|
@ -409,7 +409,7 @@ class IprTests(TestCase):
|
|||
"""
|
||||
draft = WgDraftFactory()
|
||||
rfc = WgRfcFactory()
|
||||
url = urlreverse("ietf.ipr.views.new", kwargs={ "type": "third-party" })
|
||||
url = urlreverse("ietf.ipr.views.new", kwargs={ "_type": "third-party" })
|
||||
|
||||
# successful post
|
||||
empty_outbox()
|
||||
|
@ -456,7 +456,7 @@ class IprTests(TestCase):
|
|||
r = self.client.get(url)
|
||||
self.assertContains(r, original_ipr.holder_legal_name)
|
||||
|
||||
#url = urlreverse("ietf.ipr.views.new", kwargs={ "type": "specific" })
|
||||
#url = urlreverse("ietf.ipr.views.new", kwargs={ "_type": "specific" })
|
||||
# successful post
|
||||
empty_outbox()
|
||||
post_data = {
|
||||
|
@ -503,7 +503,7 @@ class IprTests(TestCase):
|
|||
r = self.client.get(url)
|
||||
self.assertContains(r, original_ipr.title)
|
||||
|
||||
#url = urlreverse("ietf.ipr.views.new", kwargs={ "type": "specific" })
|
||||
#url = urlreverse("ietf.ipr.views.new", kwargs={ "_type": "specific" })
|
||||
# successful post
|
||||
empty_outbox()
|
||||
r = self.client.post(url, {
|
||||
|
@ -543,7 +543,7 @@ class IprTests(TestCase):
|
|||
|
||||
def test_update_bad_post(self):
|
||||
draft = WgDraftFactory()
|
||||
url = urlreverse("ietf.ipr.views.new", kwargs={ "type": "specific" })
|
||||
url = urlreverse("ietf.ipr.views.new", kwargs={ "_type": "specific" })
|
||||
|
||||
empty_outbox()
|
||||
r = self.client.post(url, {
|
||||
|
@ -1022,3 +1022,61 @@ class DraftFormTests(TestCase):
|
|||
"revisions",
|
||||
null_char_error_msg,
|
||||
)
|
||||
|
||||
|
||||
class HolderIprDisclosureFormTests(TestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
# Checkboxes that are False are left out of the Form data, not sent back at all. These are
|
||||
# commented out - if they were checked, their value would be "on".
|
||||
self.data = {
|
||||
"holder_legal_name": "Test Legal",
|
||||
"holder_contact_name": "Test Holder",
|
||||
"holder_contact_email": "test@holder.com",
|
||||
"holder_contact_info": "555-555-0100",
|
||||
"ietfer_name": "Test Participant",
|
||||
"ietfer_contact_info": "555-555-0101",
|
||||
"iprdocrel_set-TOTAL_FORMS": 2,
|
||||
"iprdocrel_set-INITIAL_FORMS": 0,
|
||||
"iprdocrel_set-0-document": "1234", # fake id - validates but won't save()
|
||||
"iprdocrel_set-0-revisions": '00',
|
||||
"iprdocrel_set-1-document": "4567", # fake id - validates but won't save()
|
||||
# "is_blanket_disclosure": "on",
|
||||
"patent_number": "SE12345678901",
|
||||
"patent_inventor": "A. Nonymous",
|
||||
"patent_title": "A method of transferring bits",
|
||||
"patent_date": "2000-01-01",
|
||||
# "has_patent_pending": "on",
|
||||
"licensing": "reasonable",
|
||||
"submitter_name": "Test Holder",
|
||||
"submitter_email": "test@holder.com",
|
||||
}
|
||||
|
||||
def test_blanket_disclosure_licensing_restrictions(self):
|
||||
"""when is_blanket_disclosure is True only royalty-free licensing is valid
|
||||
|
||||
Most of the form functionality is tested via the views in IprTests above. More thorough testing
|
||||
of validation ought to move here so we don't have to exercise the whole Django plumbing repeatedly.
|
||||
"""
|
||||
self.assertTrue(HolderIprDisclosureForm(data=self.data).is_valid())
|
||||
self.data["is_blanket_disclosure"] = "on"
|
||||
self.assertFalse(HolderIprDisclosureForm(data=self.data).is_valid())
|
||||
self.data["licensing"] = "royalty-free"
|
||||
self.assertTrue(HolderIprDisclosureForm(data=self.data).is_valid())
|
||||
|
||||
def test_patent_details_required_unless_blanket(self):
|
||||
self.assertTrue(HolderIprDisclosureForm(data=self.data).is_valid())
|
||||
patent_fields = ["patent_number", "patent_inventor", "patent_title", "patent_date"]
|
||||
# any of the fields being missing should invalidate the form
|
||||
for pf in patent_fields:
|
||||
val = self.data.pop(pf)
|
||||
self.assertFalse(HolderIprDisclosureForm(data=self.data).is_valid())
|
||||
self.data[pf] = val
|
||||
|
||||
# should be optional if is_blanket_disclosure is True
|
||||
self.data["is_blanket_disclosure"] = "on"
|
||||
self.data["licensing"] = "royalty-free" # also needed for a blanket disclosure
|
||||
for pf in patent_fields:
|
||||
val = self.data.pop(pf)
|
||||
self.assertTrue(HolderIprDisclosureForm(data=self.data).is_valid())
|
||||
self.data[pf] = val
|
||||
|
|
|
@ -25,6 +25,6 @@ urlpatterns = [
|
|||
url(r'^(?P<id>\d+)/state/$', views.state),
|
||||
url(r'^update/$', RedirectView.as_view(url=reverse_lazy('ietf.ipr.views.showlist'), permanent=True)),
|
||||
url(r'^update/(?P<id>\d+)/$', views.update),
|
||||
url(r'^new-(?P<type>(specific|generic|general|third-party))/$', views.new),
|
||||
url(r'^new-(?P<_type>(specific|generic|general|third-party))/$', views.new),
|
||||
url(r'^search/$', views.search),
|
||||
]
|
||||
|
|
|
@ -475,28 +475,34 @@ def by_draft_recursive_txt(request):
|
|||
return HttpResponse(content, content_type="text/plain; charset=%s"%settings.DEFAULT_CHARSET)
|
||||
|
||||
|
||||
def new(request, type, updates=None):
|
||||
def new(request, _type, updates=None):
|
||||
"""Submit a new IPR Disclosure. If the updates field != None, this disclosure
|
||||
updates one or more other disclosures."""
|
||||
# Note that URL patterns won't ever send updates - updates is only non-null when called from code
|
||||
|
||||
# This odd construct flipping generic and general allows the URLs to say 'general' while having a minimal impact on the code.
|
||||
# A cleanup to change the code to switch on type 'general' should follow.
|
||||
if type == 'generic' and updates: # Only happens when called directly from the updates view
|
||||
if (
|
||||
_type == "generic" and updates
|
||||
): # Only happens when called directly from the updates view
|
||||
pass
|
||||
elif type == 'generic':
|
||||
return HttpResponseRedirect(urlreverse('ietf.ipr.views.new',kwargs=dict(type='general')))
|
||||
elif type == 'general':
|
||||
type = 'generic'
|
||||
elif _type == "generic":
|
||||
return HttpResponseRedirect(
|
||||
urlreverse("ietf.ipr.views.new", kwargs=dict(_type="general"))
|
||||
)
|
||||
elif _type == "general":
|
||||
_type = "generic"
|
||||
else:
|
||||
pass
|
||||
|
||||
# 1 to show initially + the template
|
||||
DraftFormset = inlineformset_factory(IprDisclosureBase, IprDocRel, form=DraftForm, can_delete=False, extra=1 + 1)
|
||||
DraftFormset = inlineformset_factory(
|
||||
IprDisclosureBase, IprDocRel, form=DraftForm, can_delete=False, extra=1 + 1
|
||||
)
|
||||
|
||||
if request.method == 'POST':
|
||||
form = ipr_form_mapping[type](request.POST)
|
||||
if type != 'generic':
|
||||
if request.method == "POST":
|
||||
form = ipr_form_mapping[_type](request.POST)
|
||||
if _type != "generic":
|
||||
draft_formset = DraftFormset(request.POST, instance=IprDisclosureBase())
|
||||
else:
|
||||
draft_formset = None
|
||||
|
@ -505,72 +511,92 @@ def new(request, type, updates=None):
|
|||
person = Person.objects.get(name="(System)")
|
||||
else:
|
||||
person = request.user.person
|
||||
|
||||
|
||||
# check formset validity
|
||||
if type != 'generic':
|
||||
if _type != "generic":
|
||||
valid_formsets = draft_formset.is_valid()
|
||||
else:
|
||||
valid_formsets = True
|
||||
|
||||
|
||||
if form.is_valid() and valid_formsets:
|
||||
if 'updates' in form.cleaned_data:
|
||||
updates = form.cleaned_data['updates']
|
||||
del form.cleaned_data['updates']
|
||||
if "updates" in form.cleaned_data:
|
||||
updates = form.cleaned_data["updates"]
|
||||
del form.cleaned_data["updates"]
|
||||
disclosure = form.save(commit=False)
|
||||
disclosure.by = person
|
||||
disclosure.state = IprDisclosureStateName.objects.get(slug='pending')
|
||||
disclosure.state = IprDisclosureStateName.objects.get(slug="pending")
|
||||
disclosure.save()
|
||||
|
||||
if type != 'generic':
|
||||
|
||||
if _type != "generic":
|
||||
draft_formset = DraftFormset(request.POST, instance=disclosure)
|
||||
draft_formset.save()
|
||||
|
||||
set_disclosure_title(disclosure)
|
||||
disclosure.save()
|
||||
|
||||
|
||||
if updates:
|
||||
for ipr in updates:
|
||||
RelatedIpr.objects.create(source=disclosure,target=ipr,relationship_id='updates')
|
||||
|
||||
RelatedIpr.objects.create(
|
||||
source=disclosure, target=ipr, relationship_id="updates"
|
||||
)
|
||||
|
||||
# create IprEvent
|
||||
IprEvent.objects.create(
|
||||
type_id='submitted',
|
||||
type_id="submitted",
|
||||
by=person,
|
||||
disclosure=disclosure,
|
||||
desc="Disclosure Submitted")
|
||||
desc="Disclosure Submitted",
|
||||
)
|
||||
|
||||
# send email notification
|
||||
(to, cc) = gather_address_lists('ipr_disclosure_submitted')
|
||||
send_mail(request, to, ('IPR Submitter App', 'ietf-ipr@ietf.org'),
|
||||
'New IPR Submission Notification',
|
||||
(to, cc) = gather_address_lists("ipr_disclosure_submitted")
|
||||
send_mail(
|
||||
request,
|
||||
to,
|
||||
("IPR Submitter App", "ietf-ipr@ietf.org"),
|
||||
"New IPR Submission Notification",
|
||||
"ipr/new_update_email.txt",
|
||||
{"ipr": disclosure,},
|
||||
cc=cc)
|
||||
|
||||
{
|
||||
"ipr": disclosure,
|
||||
},
|
||||
cc=cc,
|
||||
)
|
||||
|
||||
return render(request, "ipr/submitted.html")
|
||||
|
||||
else:
|
||||
if updates:
|
||||
original = IprDisclosureBase(id=updates).get_child()
|
||||
initial = model_to_dict(original)
|
||||
initial.update({'updates':str(updates), })
|
||||
patent_info = text_to_dict(initial.get('patent_info', ''))
|
||||
initial.update(
|
||||
{
|
||||
"updates": str(updates),
|
||||
}
|
||||
)
|
||||
patent_info = text_to_dict(initial.get("patent_info", ""))
|
||||
if list(patent_info.keys()):
|
||||
patent_dict = dict([ ('patent_'+k.lower(), v) for k,v in list(patent_info.items()) ])
|
||||
patent_dict = dict(
|
||||
[("patent_" + k.lower(), v) for k, v in list(patent_info.items())]
|
||||
)
|
||||
else:
|
||||
patent_dict = {'patent_notes': initial.get('patent_info', '')}
|
||||
patent_dict = {"patent_notes": initial.get("patent_info", "")}
|
||||
initial.update(patent_dict)
|
||||
form = ipr_form_mapping[type](initial=initial)
|
||||
form = ipr_form_mapping[_type](initial=initial)
|
||||
else:
|
||||
form = ipr_form_mapping[type]()
|
||||
disclosure = IprDisclosureBase() # dummy disclosure for inlineformset
|
||||
form = ipr_form_mapping[_type]()
|
||||
disclosure = IprDisclosureBase() # dummy disclosure for inlineformset
|
||||
draft_formset = DraftFormset(instance=disclosure)
|
||||
|
||||
return render(request, "ipr/details_edit.html", {
|
||||
'form': form,
|
||||
'draft_formset':draft_formset,
|
||||
'type':type,
|
||||
})
|
||||
return render(
|
||||
request,
|
||||
"ipr/details_edit.html",
|
||||
{
|
||||
"form": form,
|
||||
"draft_formset": draft_formset,
|
||||
"type": _type,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@role_required('Secretariat',)
|
||||
def notify(request, id, type):
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
import time
|
||||
import datetime
|
||||
import shutil
|
||||
import os
|
||||
import tempfile
|
||||
import re
|
||||
|
||||
from django.utils import timezone
|
||||
|
@ -939,13 +939,8 @@ class InterimTests(IetfSeleniumTestCase):
|
|||
def tempdir(self, label):
|
||||
# Borrowed from test_utils.TestCase
|
||||
slug = slugify(self.__class__.__name__.replace('.','-'))
|
||||
dirname = "tmp-{label}-{slug}-dir".format(**locals())
|
||||
if 'VIRTUAL_ENV' in os.environ:
|
||||
dirname = os.path.join(os.environ['VIRTUAL_ENV'], dirname)
|
||||
path = os.path.abspath(dirname)
|
||||
if not os.path.exists(path):
|
||||
os.mkdir(path)
|
||||
return path
|
||||
suffix = "-{label}-{slug}-dir".format(**locals())
|
||||
return tempfile.mkdtemp(suffix=suffix)
|
||||
|
||||
def displayed_interims(self, groups=None):
|
||||
sessions = add_event_info_to_session_qs(
|
||||
|
|
|
@ -39,7 +39,7 @@ from ietf.doc.models import Document, NewRevisionDocEvent
|
|||
from ietf.group.models import Group, Role, GroupFeatures
|
||||
from ietf.group.utils import can_manage_group
|
||||
from ietf.person.models import Person, PersonalApiKey
|
||||
from ietf.meeting.helpers import can_approve_interim_request, can_view_interim_request, preprocess_assignments_for_agenda
|
||||
from ietf.meeting.helpers import can_approve_interim_request, can_request_interim_meeting, can_view_interim_request, preprocess_assignments_for_agenda
|
||||
from ietf.meeting.helpers import send_interim_approval_request, AgendaKeywordTagger
|
||||
from ietf.meeting.helpers import send_interim_meeting_cancellation_notice, send_interim_session_cancellation_notice
|
||||
from ietf.meeting.helpers import send_interim_minutes_reminder, populate_important_dates, update_important_dates
|
||||
|
@ -7334,10 +7334,7 @@ class HasMeetingsTests(TestCase):
|
|||
for gf in GroupFeatures.objects.filter(has_meetings=True):
|
||||
for role_name in all_role_names - set(gf.groupman_roles):
|
||||
role = RoleFactory(group__type_id=gf.type_id,name_id=role_name)
|
||||
self.client.login(username=role.person.user.username, password=role.person.user.username+'+password')
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 403)
|
||||
self.client.logout()
|
||||
self.assertFalse(can_request_interim_meeting(role.person.user))
|
||||
|
||||
def test_appears_on_upcoming(self):
|
||||
url = urlreverse('ietf.meeting.views.upcoming')
|
||||
|
|
|
@ -312,7 +312,7 @@
|
|||
"order": 42,
|
||||
"slug": "watching",
|
||||
"type": "draft-iesg",
|
||||
"used": true
|
||||
"used": false
|
||||
},
|
||||
"model": "doc.state",
|
||||
"pk": 11
|
||||
|
|
|
@ -8,7 +8,7 @@ import faker
|
|||
import faker.config
|
||||
import os
|
||||
import random
|
||||
import shutil
|
||||
from PIL import Image
|
||||
|
||||
from unidecode import unidecode
|
||||
from unicodedata import normalize
|
||||
|
@ -103,10 +103,9 @@ class PersonFactory(factory.django.DjangoModelFactory):
|
|||
media_name = "%s/%s.jpg" % (settings.PHOTOS_DIRNAME, photo_name)
|
||||
obj.photo = media_name
|
||||
obj.photo_thumb = media_name
|
||||
photosrc = os.path.join(settings.TEST_DATA_DIR, "profile-default.jpg")
|
||||
photodst = os.path.join(settings.PHOTOS_DIR, photo_name + '.jpg')
|
||||
if not os.path.exists(photodst):
|
||||
shutil.copy(photosrc, photodst)
|
||||
img = Image.new('RGB', (200, 200))
|
||||
img.save(photodst)
|
||||
def delete_file(file):
|
||||
os.unlink(file)
|
||||
atexit.register(delete_file, photodst)
|
||||
|
|
|
@ -9,9 +9,12 @@
|
|||
# ./manage.py test --settings=settings_test doc.ChangeStateTestCase
|
||||
#
|
||||
|
||||
import os
|
||||
import atexit
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from ietf.settings import * # pyflakes:ignore
|
||||
from ietf.settings import TEST_CODE_COVERAGE_CHECKER, BASE_DIR, PHOTOS_DIRNAME
|
||||
from ietf.settings import TEST_CODE_COVERAGE_CHECKER
|
||||
import debug # pyflakes:ignore
|
||||
debug.debug = True
|
||||
|
||||
|
@ -48,11 +51,20 @@ DATABASES = {
|
|||
if TEST_CODE_COVERAGE_CHECKER and not TEST_CODE_COVERAGE_CHECKER._started: # pyflakes:ignore
|
||||
TEST_CODE_COVERAGE_CHECKER.start() # pyflakes:ignore
|
||||
|
||||
NOMCOM_PUBLIC_KEYS_DIR=os.path.abspath("tmp-nomcom-public-keys-dir")
|
||||
|
||||
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), 'test/media/') # pyflakes:ignore
|
||||
MEDIA_URL = '/test/media/'
|
||||
PHOTOS_DIR = MEDIA_ROOT + PHOTOS_DIRNAME # pyflakes:ignore
|
||||
def tempdir_with_cleanup(**kwargs):
|
||||
"""Utility to create a temporary dir and arrange cleanup"""
|
||||
_dir = tempfile.mkdtemp(**kwargs)
|
||||
atexit.register(shutil.rmtree, _dir)
|
||||
return _dir
|
||||
|
||||
|
||||
NOMCOM_PUBLIC_KEYS_DIR = tempdir_with_cleanup(suffix="-nomcom-public-keys-dir")
|
||||
|
||||
MEDIA_ROOT = tempdir_with_cleanup(suffix="-media")
|
||||
PHOTOS_DIRNAME = "photo"
|
||||
PHOTOS_DIR = os.path.join(MEDIA_ROOT, PHOTOS_DIRNAME)
|
||||
os.mkdir(PHOTOS_DIR)
|
||||
|
||||
# Undo any developer-dependent middleware when running the tests
|
||||
MIDDLEWARE = [ c for c in MIDDLEWARE if not c in DEV_MIDDLEWARE ] # pyflakes:ignore
|
||||
|
|
|
@ -69,4 +69,69 @@ $(document)
|
|||
form.find(".draft-row")
|
||||
.each(updateRevisions);
|
||||
}, 10);
|
||||
});
|
||||
|
||||
// Manage fields that depend on the Blanket IPR Disclosure choice
|
||||
const blanketCheckbox = document.getElementById('id_is_blanket_disclosure')
|
||||
if (blanketCheckbox) {
|
||||
const patentDetailInputs = [
|
||||
// The ids are from the HolderIprDisclosureForm and its base form class,
|
||||
// intentionally excluding patent_notes because it's never required
|
||||
'id_patent_number',
|
||||
'id_patent_inventor',
|
||||
'id_patent_title',
|
||||
'id_patent_date'
|
||||
].map((id) => document.getElementById(id))
|
||||
const patentDetailRowDivs = patentDetailInputs.map(
|
||||
(elt) => elt.closest('div.row')
|
||||
)
|
||||
const royaltyFreeLicensingRadio = document.querySelector(
|
||||
'#id_licensing input[value="royalty-free"]'
|
||||
)
|
||||
let lastSelectedLicensingRadio
|
||||
const otherLicensingRadios = document.querySelectorAll(
|
||||
'#id_licensing input:not([value="royalty-free"])'
|
||||
)
|
||||
|
||||
const handleBlanketCheckboxChange = () => {
|
||||
const isBlanket = blanketCheckbox.checked
|
||||
// Update required fields
|
||||
for (elt of patentDetailInputs) {
|
||||
// disable the input element
|
||||
elt.required = !isBlanket
|
||||
}
|
||||
for (elt of patentDetailRowDivs) {
|
||||
// update the styling on the row that indicates required field
|
||||
if (isBlanket) {
|
||||
elt.classList.remove('required')
|
||||
} else {
|
||||
elt.classList.add('required')
|
||||
}
|
||||
}
|
||||
// Update licensing selection
|
||||
if (isBlanket) {
|
||||
lastSelectedLicensingRadio = document.querySelector(
|
||||
'#id_licensing input:checked'
|
||||
)
|
||||
royaltyFreeLicensingRadio.checked = true
|
||||
otherLicensingRadios
|
||||
.forEach(
|
||||
(elt) => elt.setAttribute('disabled', '')
|
||||
)
|
||||
} else {
|
||||
royaltyFreeLicensingRadio.checked = false
|
||||
if (lastSelectedLicensingRadio) {
|
||||
lastSelectedLicensingRadio.checked = true
|
||||
}
|
||||
otherLicensingRadios
|
||||
.forEach(
|
||||
(elt) => elt.removeAttribute('disabled')
|
||||
)
|
||||
}
|
||||
}
|
||||
handleBlanketCheckboxChange()
|
||||
blanketCheckbox.addEventListener(
|
||||
'change',
|
||||
(evt) => handleBlanketCheckboxChange()
|
||||
)
|
||||
}
|
||||
});
|
||||
|
|
|
@ -21,7 +21,7 @@ import debug # pyflakes:ignore
|
|||
from ietf.doc.models import ( Document, State, StateType, DocEvent, DocRelationshipName,
|
||||
DocTagName, RelatedDocument, RelatedDocHistory )
|
||||
from ietf.doc.expire import move_draft_files_to_archive
|
||||
from ietf.doc.utils import add_state_change_event, prettify_std_name, update_action_holders
|
||||
from ietf.doc.utils import add_state_change_event, new_state_change_event, prettify_std_name, update_action_holders
|
||||
from ietf.group.models import Group
|
||||
from ietf.ipr.models import IprDocRel
|
||||
from ietf.name.models import StdLevelName, StreamName
|
||||
|
@ -202,11 +202,14 @@ def update_drafts_from_queue(drafts):
|
|||
if prev_state != next_state:
|
||||
d.set_state(next_state)
|
||||
|
||||
e = add_state_change_event(d, system, prev_state, next_state)
|
||||
e = new_state_change_event(d, system, prev_state, next_state) # unsaved
|
||||
if e:
|
||||
if auth48:
|
||||
e.desc = re.sub(r"(<b>.*</b>)", "<a href=\"%s\">\\1</a>" % auth48, e.desc)
|
||||
e.save()
|
||||
events.append(e)
|
||||
|
||||
if auth48:
|
||||
e.desc = re.sub(r"(<b>.*</b>)", "<a href=\"%s\">\\1</a>" % auth48, e.desc)
|
||||
e.save()
|
||||
# Create or update the auth48 URL whether or not this is a state expected to have one.
|
||||
d.documenturl_set.update_or_create(
|
||||
tag_id='auth48', # look up existing based on this field
|
||||
|
@ -215,8 +218,6 @@ def update_drafts_from_queue(drafts):
|
|||
else:
|
||||
# Remove any existing auth48 URL when an update does not have one.
|
||||
d.documenturl_set.filter(tag_id='auth48').delete()
|
||||
if e:
|
||||
events.append(e)
|
||||
|
||||
changed.add(name)
|
||||
|
||||
|
|
|
@ -304,7 +304,7 @@
|
|||
Action Holder{{ doc.documentactionholder_set.all|pluralize }}
|
||||
</th>
|
||||
<td class="edit">
|
||||
{% if can_edit %}
|
||||
{% if can_edit_action_holders %}
|
||||
<a class="btn btn-primary btn-sm"
|
||||
href="{% url 'ietf.doc.views_doc.edit_action_holders' name=doc.name %}">
|
||||
Edit
|
||||
|
@ -319,7 +319,7 @@
|
|||
{% person_link action_holder.person title=action_holder.role_for_doc %} {{ action_holder|action_holder_badge }}
|
||||
</div>
|
||||
{% endfor %}
|
||||
{% if can_edit %}
|
||||
{% if can_edit_action_holders %}
|
||||
<a class="btn btn-primary btn-sm mt-3"
|
||||
href="{% url "ietf.doc.views_doc.remind_action_holders" name=doc.name %}">
|
||||
<i class="bi bi-envelope">
|
||||
|
|
|
@ -153,6 +153,9 @@
|
|||
</a>
|
||||
{% endif %}
|
||||
</p>
|
||||
{% if session_statusid == "canceled" %}
|
||||
<div class="alert alert-warning">The session for this document was cancelled.</div>
|
||||
{% endif %}
|
||||
<div id="materials-content" class="card mt-5">
|
||||
<div class="card-header">{{ doc.name }}-{{ doc.rev }}</div>
|
||||
<div class="card-body{% if content_is_html %} text-break{% endif %}">
|
||||
|
|
|
@ -12,9 +12,9 @@
|
|||
</h1>
|
||||
{% if state.slug == "dead" %}
|
||||
<p class="alert alert-warning my-3">
|
||||
This document is in IESG state "Dead". It is unusual to change
|
||||
this to anything other than "AD is watching", and this should
|
||||
never be used as a replacement for Begin IESG Processing.
|
||||
This document is in IESG state "Dead". It is unusual to change this to
|
||||
anything other than "I-D Exists" and this should never be used as a
|
||||
replacement for Begin IESG Processing.
|
||||
</p>
|
||||
{% endif %}
|
||||
<a class="btn btn-info my-3"
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
<div class="badge rounded-pill text-bg-secondary">{{ s.current_status_name }}</div>
|
||||
{% if s.current_status == "canceled" %}
|
||||
{% with timeslot=s.official_timeslotassignment.timeslot %}
|
||||
<span class="text-decoration-line-through text-secondary session-time date me-3" data-start-utc="{{ timeslot.time|utc|date:'Y-m-d' }}" data-end-utc="{{ timeslot.end_time|utc|date:'Y-m-d' }}"></span>
|
||||
<span class="text-decoration-line-through text-secondary session-time date me-3" data-start-utc="{{ timeslot.time|utc|date:'Y-m-d H:i' }}" data-end-utc="{{ timeslot.end_time|utc|date:'Y-m-d H:i' }}"></span>
|
||||
{% endwith %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
regarding an IETF document or contribution when the person letting the
|
||||
IETF know about the patent has no relationship with the patent owners.
|
||||
Click
|
||||
<a href="{% url 'ietf.ipr.views.new' type='specific' %}">here</a>
|
||||
<a href="{% url 'ietf.ipr.views.new' 'specific' %}">here</a>
|
||||
if you want to disclose information about patents or patent
|
||||
applications where you do have a relationship to the patent owners or
|
||||
patent applicants.
|
||||
|
@ -121,12 +121,11 @@
|
|||
{% endif %}
|
||||
{% if type != "generic" %}
|
||||
<h2 class="mt-4">{% cycle section %}. IETF document or other contribution to which this IPR disclosure relates</h2>
|
||||
<p class="form-text">
|
||||
<p>
|
||||
If an Internet-Draft or RFC includes multiple parts and it is not
|
||||
reasonably apparent which part of such Internet-Draft or RFC is alleged
|
||||
to be covered by the patent information disclosed in Section
|
||||
V(A) or V(B), please identify the sections of
|
||||
the Internet-Draft or RFC that are alleged to be so
|
||||
to be covered by the patent information disclosed in Section V,
|
||||
please identify the sections of the Internet-Draft or RFC that are alleged to be so
|
||||
covered.
|
||||
</p>
|
||||
{{ draft_formset.management_form }}
|
||||
|
@ -154,6 +153,13 @@
|
|||
<small>i.e., patents or patent applications required to be disclosed by Section 5 of RFC8179</small>
|
||||
</h2>
|
||||
{% if form.patent_number %}
|
||||
{% if form.is_blanket_disclosure %}
|
||||
<p>
|
||||
This IPR disclosure must either identify a specific patent or patents in sections V(A) and V(B)
|
||||
below, or be made as a blanket IPR disclosure.
|
||||
</p>
|
||||
{% bootstrap_field form.is_blanket_disclosure layout='horizontal' %}
|
||||
{% endif %}
|
||||
<p>
|
||||
A. For granted patents or published pending patent applications,
|
||||
please provide the following information:
|
||||
|
|
|
@ -34,7 +34,7 @@
|
|||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
import re
|
||||
import email
|
||||
import html5lib
|
||||
|
@ -239,13 +239,8 @@ class TestCase(django.test.TestCase):
|
|||
|
||||
def tempdir(self, label):
|
||||
slug = slugify(self.__class__.__name__.replace('.','-'))
|
||||
dirname = "tmp-{label}-{slug}-dir".format(**locals())
|
||||
if 'VIRTUAL_ENV' in os.environ:
|
||||
dirname = os.path.join(os.environ['VIRTUAL_ENV'], dirname)
|
||||
path = os.path.abspath(dirname)
|
||||
if not os.path.exists(path):
|
||||
os.mkdir(path)
|
||||
return path
|
||||
suffix = "-{label}-{slug}-dir".format(**locals())
|
||||
return tempfile.mkdtemp(suffix=suffix)
|
||||
|
||||
def assertNoFormPostErrors(self, response, error_css_selector=".is-invalid"):
|
||||
"""Try to fish out form errors, if none found at least check the
|
||||
|
@ -306,7 +301,7 @@ class TestCase(django.test.TestCase):
|
|||
|
||||
# Replace settings paths with temporary directories.
|
||||
self._ietf_temp_dirs = {} # trashed during tearDown, DO NOT put paths you care about in this
|
||||
for setting in self.settings_temp_path_overrides:
|
||||
for setting in set(self.settings_temp_path_overrides):
|
||||
self._ietf_temp_dirs[setting] = self.tempdir(slugify(setting))
|
||||
self._ietf_saved_context = django.test.utils.override_settings(**self._ietf_temp_dirs)
|
||||
self._ietf_saved_context.enable()
|
||||
|
|
1
media/.gitignore
vendored
1
media/.gitignore
vendored
|
@ -1 +0,0 @@
|
|||
/floor
|
Binary file not shown.
Before Width: | Height: | Size: 1.6 KiB |
Binary file not shown.
Before Width: | Height: | Size: 1.4 KiB |
Binary file not shown.
Before Width: | Height: | Size: 1.4 KiB |
File diff suppressed because it is too large
Load diff
|
@ -1 +0,0 @@
|
|||
{"regionCode": "US", "kind": "youtube#searchListResponse", "etag": "\"m2yskBQFythfE4irbTIeOgYYfBU/eoV8llUEbIu5LXnwqBaLOkOK0Hg\"", "pageInfo": {"resultsPerPage": 1, "totalResults": 1}, "items": [{"snippet": {"thumbnails": {"default": {"url": "https://i.ytimg.com/vi/lhYWB5FFkg4/default.jpg", "width": 120, "height": 90}, "high": {"url": "https://i.ytimg.com/vi/lhYWB5FFkg4/hqdefault.jpg", "width": 480, "height": 360}, "medium": {"url": "https://i.ytimg.com/vi/lhYWB5FFkg4/mqdefault.jpg", "width": 320, "height": 180}}, "title": "IETF98", "channelId": "UC8dtK9njBLdFnBahHFp0eZQ", "publishedAt": "2017-03-30T12:41:04.000Z", "liveBroadcastContent": "none", "channelTitle": "IETF - Internet Engineering Task Force", "description": "Videos from the IETF 98 Meeting held in Chicago, Illinois, United States 26-31 March 2017."}, "kind": "youtube#searchResult", "etag": "\"m2yskBQFythfE4irbTIeOgYYfBU/X3dbZGRvgpvedtOP0KLGhZLg5UI\"", "id": {"kind": "youtube#playlist", "playlistId": "PLC86T-test"}}]}
|
|
@ -1 +0,0 @@
|
|||
{"items": [{"snippet": {"playlistId": "PLC86T-6ZTP5jo6kIuqdyeYYhsKv9sUwG1", "thumbnails": {"default": {"url": "https://i.ytimg.com/vi/lhYWB5FFkg4/default.jpg", "width": 120, "height": 90}, "high": {"url": "https://i.ytimg.com/vi/lhYWB5FFkg4/hqdefault.jpg", "width": 480, "height": 360}, "medium": {"url": "https://i.ytimg.com/vi/lhYWB5FFkg4/mqdefault.jpg", "width": 320, "height": 180}, "maxres": {"url": "https://i.ytimg.com/vi/lhYWB5FFkg4/maxresdefault.jpg", "width": 1280, "height": 720}, "standard": {"url": "https://i.ytimg.com/vi/lhYWB5FFkg4/sddefault.jpg", "width": 640, "height": 480}}, "title": "IETF98 Wrap Up", "resourceId": {"kind": "youtube#video", "videoId": "lhYWB5FFkg4"}, "channelId": "UC8dtK9njBLdFnBahHFp0eZQ", "publishedAt": "2017-04-06T13:32:39.000Z", "channelTitle": "IETF - Internet Engineering Task Force", "position": 0, "description": "Jari Arkko and Alissa Cooper recap some highlights the IETF 98 meeting held 26-31 March 2017 in Chicago, Illinois, United States"}, "kind": "youtube#playlistItem", "etag": "\"m2yskBQFythfE4irbTIeOgYYfBU/eW_De3gQF2fRzN_rPBbX-kY7oBI\"", "id": "UExDODZULTZaVFA1am82a0l1cWR5ZVlZaHNLdjlzVXdHMS40OTQ5QjlEMDgzN0FBNUIw"}, {"snippet": {"playlistId": "PLC86T-6ZTP5jo6kIuqdyeYYhsKv9sUwG1", "thumbnails": {"default": {"url": "https://i.ytimg.com/vi/lPSTcBITbvs/default.jpg", "width": 120, "height": 90}, "high": {"url": "https://i.ytimg.com/vi/lPSTcBITbvs/hqdefault.jpg", "width": 480, "height": 360}, "medium": {"url": "https://i.ytimg.com/vi/lPSTcBITbvs/mqdefault.jpg", "width": 320, "height": 180}}, "title": "IETF 98 - QUIC Tutorial", "resourceId": {"kind": "youtube#video", "videoId": "lPSTcBITbvs"}, "channelId": "UC8dtK9njBLdFnBahHFp0eZQ", "publishedAt": "2017-03-30T12:41:35.000Z", "channelTitle": "IETF - Internet Engineering Task Force", "position": 1, "description": "A tutorial about the new QUIC protocol"}, "kind": "youtube#playlistItem", "etag": "\"m2yskBQFythfE4irbTIeOgYYfBU/GhKVt6zTuEpFavgtf9GWlWuzX9s\"", "id": "UExDODZULTZaVFA1am82a0l1cWR5ZVlZaHNLdjlzVXdHMS41NkI0NEY2RDEwNTU3Q0M2"}], "kind": "youtube#playlistItemListResponse", "etag": "\"m2yskBQFythfE4irbTIeOgYYfBU/jlFue-jZVpFMOuLUXQZH4Y0Lh3Y\"", "pageInfo": {"resultsPerPage": 2, "totalResults": 110}}
|
1
test/lib/.gitignore
vendored
1
test/lib/.gitignore
vendored
|
@ -1 +0,0 @@
|
|||
/django
|
|
@ -1,9 +0,0 @@
|
|||
This directory will be used to set up packages used for testing if they need any
|
||||
special handling which should not be applied to the system-wide setup.
|
||||
|
||||
For instance, many of the tests to be run on the Django application should be run
|
||||
with a standard Django environment; but there are some Django test features which
|
||||
are broken in 0.9.6, and need patching in order to do the testing, like the ability
|
||||
to create a test database to run unit tests, according to this issue and patch:
|
||||
http://code.djangoproject.com/changeset/5106
|
||||
|
1
test/media/floor/.gitignore
vendored
1
test/media/floor/.gitignore
vendored
|
@ -1 +0,0 @@
|
|||
/*
|
1
test/media/photo/.gitignore
vendored
1
test/media/photo/.gitignore
vendored
|
@ -1 +0,0 @@
|
|||
/*
|
Loading…
Reference in a new issue