fix: test on postgres. POST and check intended pks. (#4949)
* fix: test on postgres. post and check intended pks. * fix: pass and check intended pks * fix: pass intended pk * fix: get an actually usable group type for test * fix: use a review assignment pk instead of the review request pk * fix: Use pks of sessions, not schedtimesessassignments * fix: test for actual pk and do not rely on case-insensitive LIKE * fix: test for actual pk * chore: spelling correction: nomine -> nominee
This commit is contained in:
parent
ec2b7d0d04
commit
3fb521eb53
|
@ -64,20 +64,20 @@ class Downref(TestCase):
|
|||
self.assertContains(r, 'Save downref')
|
||||
|
||||
# error - already in the downref registry
|
||||
r = self.client.post(url, dict(rfc=self.rfcalias.pk, drafts=(self.doc.pk, )))
|
||||
r = self.client.post(url, dict(rfc=self.rfcalias.pk, drafts=(self.docalias.pk, )))
|
||||
self.assertContains(r, 'Downref is already in the registry')
|
||||
|
||||
# error - source is not in an approved state
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
r = self.client.post(url, dict(rfc=self.rfcalias.pk, drafts=(self.draft.pk, )))
|
||||
r = self.client.post(url, dict(rfc=self.rfcalias.pk, drafts=(self.draftalias.pk, )))
|
||||
self.assertContains(r, 'Draft is not yet approved')
|
||||
|
||||
# error - the target is not a normative reference of the source
|
||||
self.draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="pub"))
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
r = self.client.post(url, dict(rfc=self.rfcalias.pk, drafts=(self.draft.pk, )))
|
||||
r = self.client.post(url, dict(rfc=self.rfcalias.pk, drafts=(self.draftalias.pk, )))
|
||||
self.assertContains(r, 'There does not seem to be a normative reference to RFC')
|
||||
self.assertContains(r, 'Save downref anyway')
|
||||
|
||||
|
@ -88,7 +88,7 @@ class Downref(TestCase):
|
|||
|
||||
r = self.client.get(url)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
r = self.client.post(url, dict(rfc=self.rfcalias.pk, drafts=(self.draft.pk, )))
|
||||
r = self.client.post(url, dict(rfc=self.rfcalias.pk, drafts=(self.draftalias.pk, )))
|
||||
self.assertEqual(r.status_code, 302)
|
||||
newurl = urlreverse('ietf.doc.views_downref.downref_registry')
|
||||
r = self.client.get(newurl)
|
||||
|
@ -118,4 +118,4 @@ class Downref(TestCase):
|
|||
self.assertEqual(r.status_code, 200)
|
||||
q = PyQuery(r.content)
|
||||
text = q("[name=last_call_text]").text()
|
||||
self.assertNotIn('The document contains these normative downward references', text)
|
||||
self.assertNotIn('The document contains these normative downward references', text)
|
||||
|
|
|
@ -1890,7 +1890,7 @@ class ChangeReplacesTests(TestCase):
|
|||
RelatedDocument.objects.create(source=self.replacea, target=self.basea.docalias.first(),
|
||||
relationship=DocRelationshipName.objects.get(slug="possibly-replaces"))
|
||||
self.assertEqual(self.basea.get_state().slug,'active')
|
||||
r = self.client.post(url, dict(replaces=self.basea.pk))
|
||||
r = self.client.post(url, dict(replaces=self.basea.docalias.first().pk))
|
||||
self.assertEqual(r.status_code, 302)
|
||||
self.assertEqual(RelatedDocument.objects.filter(relationship__slug='replaces',source=self.replacea).count(),1)
|
||||
self.assertEqual(Document.objects.get(name='draft-test-base-a').get_state().slug,'repl')
|
||||
|
@ -1904,7 +1904,7 @@ class ChangeReplacesTests(TestCase):
|
|||
# Post that says replaceboth replaces both base a and base b
|
||||
url = urlreverse('ietf.doc.views_draft.replaces', kwargs=dict(name=self.replaceboth.name))
|
||||
self.assertEqual(self.baseb.get_state().slug,'expired')
|
||||
r = self.client.post(url, dict(replaces=[self.basea.pk, self.baseb.pk]))
|
||||
r = self.client.post(url, dict(replaces=[self.basea.docalias.first().pk, self.baseb.docalias.first().pk]))
|
||||
self.assertEqual(r.status_code, 302)
|
||||
self.assertEqual(Document.objects.get(name='draft-test-base-a').get_state().slug,'repl')
|
||||
self.assertEqual(Document.objects.get(name='draft-test-base-b').get_state().slug,'repl')
|
||||
|
@ -1963,7 +1963,7 @@ class MoreReplacesTests(TestCase):
|
|||
new_doc = IndividualDraftFactory(stream_id=stream)
|
||||
|
||||
url = urlreverse('ietf.doc.views_draft.replaces', kwargs=dict(name=new_doc.name))
|
||||
r = self.client.post(url, dict(replaces=old_doc.pk))
|
||||
r = self.client.post(url, dict(replaces=old_doc.docalias.first().pk))
|
||||
self.assertEqual(r.status_code,302)
|
||||
old_doc = Document.objects.get(name=old_doc.name)
|
||||
self.assertEqual(old_doc.get_state_slug('draft'),'repl')
|
||||
|
|
|
@ -603,7 +603,7 @@ class ReviewTests(TestCase):
|
|||
for r in ReviewResultName.objects.filter(slug__in=("issues", "ready")):
|
||||
review_req.team.reviewteamsettings.review_results.add(r)
|
||||
|
||||
url = urlreverse('ietf.doc.views_review.complete_review', kwargs={ "name": doc.name, "assignment_id": review_req.pk })
|
||||
url = urlreverse('ietf.doc.views_review.complete_review', kwargs={ "name": doc.name, "assignment_id": assignment.pk })
|
||||
|
||||
return assignment, url
|
||||
|
||||
|
|
|
@ -616,7 +616,7 @@ class GroupEditTests(TestCase):
|
|||
|
||||
def test_create_non_chartered_includes_description(self):
|
||||
parent = GroupFactory(type_id='area')
|
||||
group_type = GroupTypeName.objects.filter(used=True, features__has_chartering_process=False).first()
|
||||
group_type = GroupTypeName.objects.filter(used=True, features__has_chartering_process=False, features__parent_types='area').first()
|
||||
self.assertIsNotNone(group_type)
|
||||
url = urlreverse('ietf.group.views.edit', kwargs=dict(group_type=group_type.slug, action="create"))
|
||||
login_testing_unauthorized(self, "secretary", url)
|
||||
|
|
|
@ -78,12 +78,13 @@ class ScheduleGeneratorTest(TestCase):
|
|||
|
||||
self.stdout.seek(0)
|
||||
output = self.stdout.read()
|
||||
self.assertIn('WARNING: session wg2 (pk 13) has no attendees set', output)
|
||||
wg2_no_attendees_session_pk = [s.session_pk for s in generator.schedule.sessions if s.group == "wg2" and not s.attendees][0]
|
||||
self.assertIn(f'WARNING: session wg2 (pk {wg2_no_attendees_session_pk}) has no attendees set', output)
|
||||
self.assertIn('scheduling 13 sessions in 20 timeslots', output)
|
||||
self.assertIn('Optimiser starting run 1', output)
|
||||
self.assertIn('Optimiser found an optimal schedule', output)
|
||||
|
||||
schedule = self.meeting.schedule_set.get(name__startswith='Auto-')
|
||||
schedule = self.meeting.schedule_set.get(name__startswith='auto-')
|
||||
self.assertEqual(schedule.assignments.count(), 13)
|
||||
|
||||
def test_unresolvable_schedule(self):
|
||||
|
@ -160,7 +161,8 @@ class ScheduleGeneratorTest(TestCase):
|
|||
self.assertIn('Applying schedule {} as base schedule'.format(
|
||||
generate_schedule.ScheduleId.from_schedule(base_schedule)
|
||||
), output)
|
||||
self.assertIn('WARNING: session wg2 (pk 13) has no attendees set', output)
|
||||
wg2_no_attendees_session_pk = [s.session_pk for s in generator.schedule.sessions if s.group == "wg2" and not s.attendees][0]
|
||||
self.assertIn(f'WARNING: session wg2 (pk {wg2_no_attendees_session_pk}) has no attendees set', output)
|
||||
self.assertIn('scheduling 13 sessions in 19 timeslots', output) # 19 because base is using one
|
||||
self.assertIn('Optimiser starting run 1', output)
|
||||
self.assertIn('Optimiser found an optimal schedule', output)
|
||||
|
|
|
@ -162,7 +162,7 @@ class AgendaApiTests(TestCase):
|
|||
meeting
|
||||
)
|
||||
AgendaKeywordTagger(assignments=processed).apply()
|
||||
extracted = {item.pk: agenda_extract_schedule(item) for item in processed}
|
||||
extracted = {item.session.pk: agenda_extract_schedule(item) for item in processed}
|
||||
|
||||
hidden = extracted[hidden_sess.pk]
|
||||
self.assertIsNone(hidden['room'])
|
||||
|
@ -2936,10 +2936,15 @@ class ReorderSlidesTests(TestCase):
|
|||
self.assertEqual(list(session2.sessionpresentation_set.order_by('order').values_list('order',flat=True)), list(range(1,3)))
|
||||
|
||||
|
||||
|
||||
|
||||
def test_reorder_slides_in_session(self):
|
||||
def _sppk_at(sppk, positions):
|
||||
return [sppk[p-1] for p in positions]
|
||||
chair_role = RoleFactory(name_id='chair')
|
||||
session = SessionFactory(group=chair_role.group, meeting__date=date_today() - datetime.timedelta(days=90))
|
||||
sp_list = SessionPresentationFactory.create_batch(5, document__type_id='slides', session=session)
|
||||
sppk = [o.pk for o in sp_list]
|
||||
for num, sp in enumerate(sp_list, start=1):
|
||||
sp.order = num
|
||||
sp.save()
|
||||
|
@ -2986,42 +2991,42 @@ class ReorderSlidesTests(TestCase):
|
|||
r = self.client.post(url, {'oldIndex':1, 'newIndex':3})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),[2,3,1,4,5])
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,1,4,5]))
|
||||
|
||||
# Move to beginning
|
||||
r = self.client.post(url, {'oldIndex':3, 'newIndex':1})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),[1,2,3,4,5])
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,3,4,5]))
|
||||
|
||||
# Move from end
|
||||
r = self.client.post(url, {'oldIndex':5, 'newIndex':3})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),[1,2,5,3,4])
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,5,3,4]))
|
||||
|
||||
# Move to end
|
||||
r = self.client.post(url, {'oldIndex':3, 'newIndex':5})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),[1,2,3,4,5])
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[1,2,3,4,5]))
|
||||
|
||||
# Move beginning to end
|
||||
r = self.client.post(url, {'oldIndex':1, 'newIndex':5})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),[2,3,4,5,1])
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,4,5,1]))
|
||||
|
||||
# Move middle to middle
|
||||
r = self.client.post(url, {'oldIndex':3, 'newIndex':4})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),[2,3,5,4,1])
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,3,5,4,1]))
|
||||
|
||||
r = self.client.post(url, {'oldIndex':3, 'newIndex':2})
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.json()['success'],True)
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),[2,5,3,4,1])
|
||||
self.assertEqual(list(session.sessionpresentation_set.order_by('order').values_list('pk',flat=True)),_sppk_at(sppk,[2,5,3,4,1]))
|
||||
|
||||
# Reset for next iteration in the loop
|
||||
session.sessionpresentation_set.update(order=F('pk'))
|
||||
|
|
|
@ -1035,8 +1035,8 @@ class NomineePositionStateSaveTest(TestCase):
|
|||
state=NomineePositionStateName.objects.get(slug='accepted'))
|
||||
self.assertEqual(nominee_position.state.slug, 'accepted')
|
||||
|
||||
def test_nomine_position_unique(self):
|
||||
"""Verify nomine and position are unique together"""
|
||||
def test_nominee_position_unique(self):
|
||||
"""Verify nominee and position are unique together"""
|
||||
position = Position.objects.get(name='OAM')
|
||||
NomineePosition.objects.create(position=position,
|
||||
nominee=self.nominee)
|
||||
|
|
62
ietf/settings_postgrestest.py
Executable file
62
ietf/settings_postgrestest.py
Executable file
|
@ -0,0 +1,62 @@
|
|||
# Copyright The IETF Trust 2010-2023, All Rights Reserved
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# Standard settings except we use Postgres and skip migrations, this is
|
||||
# useful for speeding up tests that depend on the test database, try
|
||||
# for instance:
|
||||
#
|
||||
# ./manage.py test --settings=settings_postgrestest doc.ChangeStateTestCase
|
||||
#
|
||||
|
||||
import os
|
||||
from ietf.settings import * # pyflakes:ignore
|
||||
from ietf.settings import TEST_CODE_COVERAGE_CHECKER, BASE_DIR, PHOTOS_DIRNAME
|
||||
import debug # pyflakes:ignore
|
||||
debug.debug = True
|
||||
|
||||
# Use a different hostname, to catch hardcoded values
|
||||
IDTRACKER_BASE_URL = "https://postgrestest.ietf.org"
|
||||
|
||||
# Workaround to avoid spending minutes stepping through the migrations in
|
||||
# every test run. The result of this is to use the 'syncdb' way of creating
|
||||
# the test database instead of doing it through the migrations. Taken from
|
||||
# https://gist.github.com/NotSqrt/5f3c76cd15e40ef62d09
|
||||
|
||||
class DisableMigrations(object):
|
||||
|
||||
def __contains__(self, item):
|
||||
return True
|
||||
|
||||
def __getitem__(self, item):
|
||||
return None
|
||||
|
||||
MIGRATION_MODULES = DisableMigrations()
|
||||
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'HOST': 'db',
|
||||
'PORT': 5432,
|
||||
'NAME': 'test.db',
|
||||
'ENGINE': 'django.db.backends.postgresql_psycopg2',
|
||||
'USER': 'django',
|
||||
'PASSWORD': 'RkTkDPFnKpko',
|
||||
},
|
||||
}
|
||||
|
||||
if TEST_CODE_COVERAGE_CHECKER and not TEST_CODE_COVERAGE_CHECKER._started: # pyflakes:ignore
|
||||
TEST_CODE_COVERAGE_CHECKER.start() # pyflakes:ignore
|
||||
|
||||
NOMCOM_PUBLIC_KEYS_DIR=os.path.abspath("tmp-nomcom-public-keys-dir")
|
||||
|
||||
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), 'test/media/') # pyflakes:ignore
|
||||
MEDIA_URL = '/test/media/'
|
||||
PHOTOS_DIR = MEDIA_ROOT + PHOTOS_DIRNAME # pyflakes:ignore
|
||||
|
||||
# Undo any developer-dependent middleware when running the tests
|
||||
MIDDLEWARE = [ c for c in MIDDLEWARE if not c in DEV_MIDDLEWARE ] # pyflakes:ignore
|
||||
|
||||
TEMPLATES[0]['OPTIONS']['context_processors'] = [ p for p in TEMPLATES[0]['OPTIONS']['context_processors'] if not p in DEV_TEMPLATE_CONTEXT_PROCESSORS ] # pyflakes:ignore
|
||||
|
||||
REQUEST_PROFILE_STORE_ANONYMOUS_SESSIONS = False
|
Loading…
Reference in a new issue