chore: Merge branch 'main' into feat/postgres
This commit is contained in:
commit
502376e7b2
10
.github/workflows/build.yml
vendored
10
.github/workflows/build.yml
vendored
|
@ -100,10 +100,10 @@ jobs:
|
|||
echo "pkg_version=$GITHUB_REF_NAME" >> $GITHUB_OUTPUT
|
||||
echo "::notice::Release $GITHUB_REF_NAME created using tag $GITHUB_REF_NAME"
|
||||
else
|
||||
echo "Using TEST mode: 8.0.0-dev.$GITHUB_RUN_NUMBER"
|
||||
echo "Using TEST mode: 9.0.0-dev.$GITHUB_RUN_NUMBER"
|
||||
echo "should_deploy=false" >> $GITHUB_OUTPUT
|
||||
echo "pkg_version=8.0.0-dev.$GITHUB_RUN_NUMBER" >> $GITHUB_OUTPUT
|
||||
echo "::notice::Non-production build 8.0.0-dev.$GITHUB_RUN_NUMBER created using branch $GITHUB_REF_NAME"
|
||||
echo "pkg_version=9.0.0-dev.$GITHUB_RUN_NUMBER" >> $GITHUB_OUTPUT
|
||||
echo "::notice::Non-production build 9.0.0-dev.$GITHUB_RUN_NUMBER created using branch $GITHUB_REF_NAME"
|
||||
fi
|
||||
|
||||
# -----------------------------------------------------------------
|
||||
|
@ -433,9 +433,13 @@ jobs:
|
|||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
echo "Reset production flags in settings.py..."
|
||||
sed -i -r -e 's/^DEBUG *= *.*$/DEBUG = True/' -e "s/^SERVER_MODE *= *.*\$/SERVER_MODE = 'development'/" ietf/settings.py
|
||||
echo "Install Deploy to Container CLI dependencies..."
|
||||
cd dev/deploy-to-container
|
||||
npm ci
|
||||
cd ../..
|
||||
echo "Start Deploy..."
|
||||
node ./dev/deploy-to-container/cli.js --branch ${{ github.ref_name }} --domain neverusethis.com
|
||||
|
||||
- name: Cleanup old docker resources
|
||||
|
|
|
@ -27,6 +27,7 @@ django.setup()
|
|||
from ietf.utils.log import logger
|
||||
|
||||
try:
|
||||
from ietf.utils.timezone import datetime_today
|
||||
from ietf.doc.expire import ( in_draft_expire_freeze, get_expired_drafts, expirable_drafts,
|
||||
send_expire_notice_for_draft, expire_draft, clean_up_draft_files )
|
||||
from ietf.doc.models import Document
|
||||
|
@ -42,7 +43,7 @@ try:
|
|||
# the purpose of double-checking that a document is still expirable when it is actually
|
||||
# being marked as expired.
|
||||
if (expirable_drafts(Document.objects.filter(pk=doc.pk)).exists()
|
||||
and doc.expires < datetime.datetime.today() + datetime.timedelta(1)):
|
||||
and doc.expires < datetime_today() + datetime.timedelta(1)):
|
||||
send_expire_notice_for_draft(doc)
|
||||
expire_draft(doc)
|
||||
syslog.syslog(" Expired draft %s-%s" % (doc.name, doc.rev))
|
||||
|
|
|
@ -29,7 +29,7 @@ def chunks(l, n):
|
|||
syslog.syslog("Updating history log with new RFC entries from IANA protocols page %s" % settings.IANA_SYNC_PROTOCOLS_URL)
|
||||
|
||||
# FIXME: this needs to be the date where this tool is first deployed
|
||||
rfc_must_published_later_than = datetime.datetime(2012, 11, 26, 0, 0, 0)
|
||||
rfc_must_published_later_than = datetime.datetime(2012, 11, 26, 0, 0, 0, tzinfo=datetime.timezone.utc)
|
||||
|
||||
try:
|
||||
response = requests.get(
|
||||
|
|
|
@ -73,10 +73,10 @@ def in_draft_expire_freeze(when=None):
|
|||
|
||||
d = meeting.get_second_cut_off()
|
||||
# for some reason, the old Perl code started at 9 am
|
||||
second_cut_off = datetime.datetime.combine(d, datetime.time(9, 0))
|
||||
second_cut_off = d.replace(hour=9, minute=0, second=0, microsecond=0)
|
||||
|
||||
d = meeting.get_ietf_monday()
|
||||
ietf_monday = datetime.datetime.combine(d, datetime.time(0, 0))
|
||||
ietf_monday = datetime.datetime.combine(d, datetime.time(0, 0), tzinfo=meeting.tz())
|
||||
|
||||
return second_cut_off <= when < ietf_monday
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
import datetime
|
||||
import re
|
||||
from urllib.parse import urljoin
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from django import template
|
||||
from django.conf import settings
|
||||
|
@ -316,9 +317,18 @@ def underline(string):
|
|||
|
||||
@register.filter(name='timesince_days')
|
||||
def timesince_days(date):
|
||||
"""Returns the number of days since 'date' (relative to now)"""
|
||||
"""Returns the number of days since 'date' (relative to now)
|
||||
|
||||
>>> timesince_days(timezone.now() - datetime.timedelta(days=2))
|
||||
2
|
||||
|
||||
>>> tz = ZoneInfo(settings.TIME_ZONE)
|
||||
>>> timesince_days(timezone.now().astimezone(tz).date() - datetime.timedelta(days=2))
|
||||
2
|
||||
|
||||
"""
|
||||
if date.__class__ is not datetime.datetime:
|
||||
date = datetime.datetime(date.year, date.month, date.day)
|
||||
date = datetime.datetime(date.year, date.month, date.day, tzinfo=ZoneInfo(settings.TIME_ZONE))
|
||||
delta = timezone.now() - date
|
||||
return delta.days
|
||||
|
||||
|
|
|
@ -1646,7 +1646,13 @@ class DocTestCase(TestCase):
|
|||
doc.save_with_history([e])
|
||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertRegex(r.content.decode(), r'\(\s*%s\s+for\s+-%s\s*\)' % (pos.comment_time.strftime('%Y-%m-%d'), oldrev))
|
||||
self.assertRegex(
|
||||
r.content.decode(),
|
||||
r'\(\s*%s\s+for\s+-%s\s*\)' % (
|
||||
pos.comment_time.astimezone(ZoneInfo(settings.TIME_ZONE)).strftime('%Y-%m-%d'),
|
||||
oldrev,
|
||||
)
|
||||
)
|
||||
|
||||
# Now simulate a new ballot against the new revision and make sure the "was" position is included
|
||||
pos2 = BallotPositionDocEvent.objects.create(
|
||||
|
|
|
@ -644,11 +644,19 @@ class ExpireIDsTests(DraftFileMixin, TestCase):
|
|||
second_cut_off = meeting.get_second_cut_off()
|
||||
ietf_monday = meeting.get_ietf_monday()
|
||||
|
||||
self.assertTrue(not in_draft_expire_freeze(datetime.datetime.combine(second_cut_off - datetime.timedelta(days=7), datetime.time(0, 0, 0))))
|
||||
self.assertTrue(not in_draft_expire_freeze(datetime.datetime.combine(second_cut_off, datetime.time(0, 0, 0))))
|
||||
self.assertTrue(in_draft_expire_freeze(datetime.datetime.combine(second_cut_off + datetime.timedelta(days=7), datetime.time(0, 0, 0))))
|
||||
self.assertTrue(in_draft_expire_freeze(datetime.datetime.combine(ietf_monday - datetime.timedelta(days=1), datetime.time(0, 0, 0))))
|
||||
self.assertTrue(not in_draft_expire_freeze(datetime.datetime.combine(ietf_monday, datetime.time(0, 0, 0))))
|
||||
self.assertFalse(in_draft_expire_freeze((second_cut_off - datetime.timedelta(days=7)).replace(hour=0, minute=0, second=0)))
|
||||
self.assertFalse(in_draft_expire_freeze(second_cut_off.replace(hour=0, minute=0, second=0)))
|
||||
self.assertTrue(in_draft_expire_freeze((second_cut_off + datetime.timedelta(days=7)).replace(hour=0, minute=0, second=0)))
|
||||
self.assertTrue(in_draft_expire_freeze(
|
||||
datetime.datetime.combine(
|
||||
ietf_monday - datetime.timedelta(days=1),
|
||||
datetime.time(0, 0, 0),
|
||||
tzinfo=datetime.timezone.utc,
|
||||
)
|
||||
))
|
||||
self.assertFalse(in_draft_expire_freeze(
|
||||
datetime.datetime.combine(ietf_monday, datetime.time(0, 0, 0), tzinfo=datetime.timezone.utc)
|
||||
))
|
||||
|
||||
def test_warn_expirable_drafts(self):
|
||||
from ietf.doc.expire import get_soon_to_expire_drafts, send_expire_warning_for_draft
|
||||
|
|
|
@ -270,7 +270,7 @@ def active_drafts_index_by_group(extra_values=()):
|
|||
groups = [g for g in groups_dict.values() if hasattr(g, "active_drafts")]
|
||||
groups.sort(key=lambda g: g.acronym)
|
||||
|
||||
fallback_time = datetime.datetime(1950, 1, 1)
|
||||
fallback_time = datetime.datetime(1950, 1, 1, tzinfo=datetime.timezone.utc)
|
||||
for g in groups:
|
||||
g.active_drafts.sort(key=lambda d: d.get("initial_rev_time", fallback_time))
|
||||
|
||||
|
|
|
@ -578,7 +578,10 @@ I would like to revoke this declaration.
|
|||
self.assertIn('posted on '+date_today().strftime("%Y-%m-%d"), get_payload_text(outbox[len_before]).replace('\n',' '))
|
||||
self.assertTrue('draft-ietf-mars-test@ietf.org' in outbox[len_before+1]['To'])
|
||||
self.assertTrue('mars-wg@ietf.org' in outbox[len_before+1]['Cc'])
|
||||
self.assertIn('Secretariat on '+ipr.get_latest_event_submitted().time.strftime("%Y-%m-%d"), get_payload_text(outbox[len_before+1]).replace('\n',' '))
|
||||
self.assertIn(
|
||||
'Secretariat on ' + ipr.get_latest_event_submitted().time.astimezone(ZoneInfo(settings.TIME_ZONE)).strftime("%Y-%m-%d"),
|
||||
get_payload_text(outbox[len_before + 1]).replace('\n', ' ')
|
||||
)
|
||||
self.assertIn(f'{settings.IDTRACKER_BASE_URL}{urlreverse("ietf.ipr.views.showlist")}', get_payload_text(outbox[len_before]).replace('\n',' '))
|
||||
self.assertIn(f'{settings.IDTRACKER_BASE_URL}{urlreverse("ietf.ipr.views.history",kwargs=dict(id=ipr.pk))}', get_payload_text(outbox[len_before+1]).replace('\n',' '))
|
||||
|
||||
|
|
|
@ -19,6 +19,24 @@ function prettify_tz(x) {
|
|||
return x.text.replaceAll("_", " ").replaceAll("/", " / ");
|
||||
}
|
||||
|
||||
function search_template_result(data) {
|
||||
if (data.url == null) {
|
||||
return data.text;
|
||||
}
|
||||
var $link = $("<a>" + data.text + "</a>");
|
||||
$link.prop("href", data.url);
|
||||
$link.on("mouseup", function (evt) {
|
||||
// Do not propagate any events which have modifiers keys
|
||||
// or if some other mouse button than 1 (left) was used.
|
||||
if (evt.shiftKey || evt.ctrlKey || evt.metaKey || evt.altKey ||
|
||||
evt.which != 1) {
|
||||
evt.stopPropagation();
|
||||
}
|
||||
});
|
||||
|
||||
return $link;
|
||||
}
|
||||
|
||||
// Copyright The IETF Trust 2015-2021, All Rights Reserved
|
||||
// JS for ietf.utils.fields.SearchableField subclasses
|
||||
window.setupSelect2Field = function (e) {
|
||||
|
@ -37,7 +55,8 @@ window.setupSelect2Field = function (e) {
|
|||
);
|
||||
}
|
||||
|
||||
template_modify = e.hasClass("tz-select") ? prettify_tz : undefined;
|
||||
template_modify = e.hasClass("tz-select") ? prettify_tz :
|
||||
(e.hasClass("search-select") ? search_template_result : undefined);
|
||||
|
||||
// focus the search field automatically
|
||||
$(document)
|
||||
|
|
|
@ -69,7 +69,7 @@
|
|||
{% endif %}
|
||||
|
||||
<label class="d-none d-md-block" aria-label="Document search">
|
||||
<input class="form-control select2-field"
|
||||
<input class="form-control select2-field search-select"
|
||||
id="navbar-doc-search"
|
||||
data-select2-ajax-url="{% url 'ietf.doc.views_search.ajax_select2_search_docs' model_name='docalias' doc_type='draft' %}"
|
||||
type="text"
|
||||
|
|
|
@ -10,7 +10,7 @@ SUMMARY:{% if item.session.name %}{{item.session.name|ics_esc}}{% else %}{% if n
|
|||
CLASS:PUBLIC
|
||||
DTSTART{% ics_date_time item.timeslot.local_start_time schedule.meeting.time_zone %}
|
||||
DTEND{% ics_date_time item.timeslot.local_end_time schedule.meeting.time_zone %}
|
||||
DTSTAMP:{% ics_date_time item.timeslot.modified|utc 'utc' %}{% if item.session.agenda %}
|
||||
DTSTAMP{% ics_date_time item.timeslot.modified|utc 'utc' %}{% if item.session.agenda %}
|
||||
URL:{{item.session.agenda.get_versionless_href}}{% endif %}
|
||||
DESCRIPTION:{{item.timeslot.name|ics_esc}}\n{% if item.session.agenda_note %}
|
||||
Note: {{item.session.agenda_note|ics_esc}}\n{% endif %}{% if item.timeslot.location.webex_url %}
|
||||
|
|
|
@ -10,7 +10,7 @@ SUMMARY:{% if item.session.name %}{{item.session.group.acronym|lower}} - {{item.
|
|||
CLASS:PUBLIC
|
||||
DTSTART{% ics_date_time item.timeslot.local_start_time item.schedule.meeting.time_zone %}
|
||||
DTEND{% ics_date_time item.timeslot.local_end_time item.schedule.meeting.time_zone %}
|
||||
DTSTAMP:{% ics_date_time item.timeslot.modified|utc 'utc' %}
|
||||
DTSTAMP{% ics_date_time item.timeslot.modified|utc 'utc' %}
|
||||
{% if item.session.agenda %}URL:{{item.session.agenda.get_href}}
|
||||
DESCRIPTION:{{item.timeslot.name|ics_esc}}\n{% if item.session.agenda_note %}
|
||||
Note: {{item.session.agenda_note|ics_esc}}\n{% endif %}{% for material in item.session.materials.all %}
|
||||
|
|
|
@ -23,6 +23,12 @@
|
|||
moments. For the nitty-gritty week-to-week code changes, please check the release
|
||||
notes or the commit log.
|
||||
</p>
|
||||
<h2>Version 9.0.0: Timezone Aware Data</h2>
|
||||
<p>All timestamps in the database are now stored as UTC. Values reported through the API for several
|
||||
models changed, particularly Meeting based models such as TimeSlot where times had previously been
|
||||
stored in the timezone of the meeting location. The 9.0.0 release leaves _presentation_ of the times
|
||||
in Pacific (daylight/standard).
|
||||
</p>
|
||||
<h2>Version 8.0.0: Facelift using Bootstrap 5</h2>
|
||||
<h2>Version 7.0.0: Django 2</h2>
|
||||
<h2>Version 6.0.0: Facelift using Bootstrap 3</h2>
|
||||
|
|
|
@ -54,7 +54,6 @@ import warnings
|
|||
from urllib.parse import urlencode
|
||||
|
||||
from fnmatch import fnmatch
|
||||
from pathlib import Path
|
||||
|
||||
from coverage.report import Reporter
|
||||
from coverage.results import Numbers
|
||||
|
@ -325,13 +324,20 @@ class ValidatingTemplate(Template):
|
|||
settings.validate_html.batches[kind].append(
|
||||
(self.origin.name, content, fingerprint)
|
||||
)
|
||||
# FWIW, a batch size of 30 seems to result in less than 10% runtime overhead
|
||||
if len(settings.validate_html.batches[kind]) >= 30:
|
||||
settings.validate_html.validate(kind)
|
||||
|
||||
return content
|
||||
|
||||
|
||||
class TemplateValidationTests(unittest.TestCase):
|
||||
def __init__(self, test_runner, validate_html, **kwargs):
|
||||
self.runner = test_runner
|
||||
self.validate_html = validate_html
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def run_template_validation(self):
|
||||
if self.validate_html:
|
||||
self.validate_html.validate(self)
|
||||
|
||||
|
||||
class TemplateCoverageLoader(BaseLoader):
|
||||
is_usable = True
|
||||
|
||||
|
@ -873,15 +879,17 @@ class IetfTestRunner(DiscoverRunner):
|
|||
config["doc"]["rules"]["require-sri"] = "off"
|
||||
# Turn "element-required-ancestor" back on
|
||||
del config["doc"]["rules"]["element-required-ancestor"]
|
||||
# permit discontinuous heading numbering in cards, modals and dialogs:
|
||||
config["doc"]["rules"]["heading-level"] = [
|
||||
"error",
|
||||
{
|
||||
# permit discontinuous heading numbering in cards, modals and dialogs:
|
||||
"sectioningRoots": [
|
||||
".card-body",
|
||||
".modal-content",
|
||||
'[role="dialog"]',
|
||||
]
|
||||
],
|
||||
# permit multiple H1 elements in a single document
|
||||
"allowMultipleH1": True,
|
||||
},
|
||||
]
|
||||
|
||||
|
@ -893,7 +901,7 @@ class IetfTestRunner(DiscoverRunner):
|
|||
)
|
||||
self.config_file[kind].write(json.dumps(config[kind]).encode())
|
||||
self.config_file[kind].flush()
|
||||
Path(self.config_file[kind].name).chmod(0o644)
|
||||
pathlib.Path(self.config_file[kind].name).chmod(0o644)
|
||||
|
||||
if not settings.validate_html_harder:
|
||||
print("")
|
||||
|
@ -926,96 +934,87 @@ class IetfTestRunner(DiscoverRunner):
|
|||
|
||||
if settings.validate_html:
|
||||
for kind in self.batches:
|
||||
try:
|
||||
self.validate(kind)
|
||||
except Exception:
|
||||
pass
|
||||
if len(self.batches[kind]):
|
||||
print(f" WARNING: not all templates of kind '{kind}' were validated")
|
||||
self.config_file[kind].close()
|
||||
if self.vnu:
|
||||
self.vnu.terminate()
|
||||
|
||||
super(IetfTestRunner, self).teardown_test_environment(**kwargs)
|
||||
|
||||
def validate(self, kind):
|
||||
if not self.batches[kind]:
|
||||
return
|
||||
|
||||
testcase = TestCase()
|
||||
def validate(self, testcase):
|
||||
cwd = pathlib.Path.cwd()
|
||||
tmpdir = tempfile.TemporaryDirectory(prefix="html-validate-")
|
||||
Path(tmpdir.name).chmod(0o777)
|
||||
for (name, content, fingerprint) in self.batches[kind]:
|
||||
path = pathlib.Path(tmpdir.name).joinpath(
|
||||
hex(fingerprint)[2:],
|
||||
pathlib.Path(name).relative_to(cwd)
|
||||
)
|
||||
pathlib.Path(path.parent).mkdir(parents=True, exist_ok=True)
|
||||
with path.open(mode="w") as file:
|
||||
file.write(content)
|
||||
self.batches[kind] = []
|
||||
errors = []
|
||||
with tempfile.TemporaryDirectory(prefix="html-validate-") as tmpdir_name:
|
||||
tmppath = pathlib.Path(tmpdir_name)
|
||||
tmppath.chmod(0o777)
|
||||
for kind in self.batches:
|
||||
if not self.batches[kind]:
|
||||
return
|
||||
for (name, content, fingerprint) in self.batches[kind]:
|
||||
path = tmppath.joinpath(
|
||||
hex(fingerprint)[2:],
|
||||
pathlib.Path(name).relative_to(cwd)
|
||||
)
|
||||
pathlib.Path(path.parent).mkdir(parents=True, exist_ok=True)
|
||||
with path.open(mode="w") as file:
|
||||
file.write(content)
|
||||
self.batches[kind] = []
|
||||
|
||||
validation_results = None
|
||||
with tempfile.NamedTemporaryFile() as stdout:
|
||||
subprocess.run(
|
||||
[
|
||||
"yarn",
|
||||
"html-validate",
|
||||
"--formatter=json",
|
||||
"--config=" + self.config_file[kind].name,
|
||||
tmpdir.name,
|
||||
],
|
||||
stdout=stdout,
|
||||
stderr=stdout,
|
||||
)
|
||||
validation_results = None
|
||||
with tempfile.NamedTemporaryFile() as stdout:
|
||||
subprocess.run(
|
||||
[
|
||||
"yarn",
|
||||
"html-validate",
|
||||
"--formatter=json",
|
||||
"--config=" + self.config_file[kind].name,
|
||||
tmpdir_name,
|
||||
],
|
||||
stdout=stdout,
|
||||
stderr=stdout,
|
||||
)
|
||||
|
||||
stdout.seek(0)
|
||||
try:
|
||||
validation_results = json.load(stdout)
|
||||
except json.decoder.JSONDecodeError:
|
||||
stdout.seek(0)
|
||||
testcase.fail(stdout.read())
|
||||
stdout.seek(0)
|
||||
try:
|
||||
validation_results = json.load(stdout)
|
||||
except json.decoder.JSONDecodeError:
|
||||
stdout.seek(0)
|
||||
testcase.fail(stdout.read())
|
||||
|
||||
errors = ""
|
||||
for result in validation_results:
|
||||
source_lines = result["source"].splitlines(keepends=True)
|
||||
for msg in result["messages"]:
|
||||
line = msg["line"]
|
||||
errors += (
|
||||
f'\n{result["filePath"]}:\n'
|
||||
+ "".join(source_lines[line - 5 : line])
|
||||
+ " " * (msg["column"] - 1)
|
||||
+ "^" * msg["size"] + "\n"
|
||||
+ " " * (msg["column"] - 1)
|
||||
+ f'{msg["ruleId"]}: {msg["message"]} '
|
||||
+ f'on line {line}:{msg["column"]}\n'
|
||||
+ "".join(source_lines[line : line + 5])
|
||||
+ "\n"
|
||||
)
|
||||
for result in validation_results:
|
||||
source_lines = result["source"].splitlines(keepends=True)
|
||||
for msg in result["messages"]:
|
||||
line = msg["line"]
|
||||
errors.append(
|
||||
f'\n{result["filePath"]}:\n'
|
||||
+ "".join(source_lines[line - 5 : line])
|
||||
+ " " * (msg["column"] - 1)
|
||||
+ "^" * msg["size"] + "\n"
|
||||
+ " " * (msg["column"] - 1)
|
||||
+ f'{msg["ruleId"]}: {msg["message"]} '
|
||||
+ f'on line {line}:{msg["column"]}\n'
|
||||
+ "".join(source_lines[line : line + 5])
|
||||
+ "\n"
|
||||
)
|
||||
|
||||
if settings.validate_html_harder and kind != "frag":
|
||||
files = [
|
||||
os.path.join(d, f)
|
||||
for d, dirs, files in os.walk(tmppath)
|
||||
for f in files
|
||||
]
|
||||
for file in files:
|
||||
with open(file, "rb") as f:
|
||||
content = f.read()
|
||||
result = vnu_validate(content)
|
||||
assert result
|
||||
for msg in json.loads(result)["messages"]:
|
||||
if vnu_filter_message(msg, False, True):
|
||||
continue
|
||||
errors.append(vnu_fmt_message(file, msg, content.decode("utf-8")))
|
||||
if errors:
|
||||
testcase.fail(errors)
|
||||
|
||||
if settings.validate_html_harder:
|
||||
if kind == "frag":
|
||||
return
|
||||
files = [
|
||||
os.path.join(d, f)
|
||||
for d, dirs, files in os.walk(tmpdir.name)
|
||||
for f in files
|
||||
]
|
||||
for file in files:
|
||||
with open(file, "rb") as f:
|
||||
content = f.read()
|
||||
result = vnu_validate(content)
|
||||
assert result
|
||||
for msg in json.loads(result)["messages"]:
|
||||
if vnu_filter_message(msg, False, True):
|
||||
continue
|
||||
errors = vnu_fmt_message(file, msg, content.decode("utf-8"))
|
||||
if errors:
|
||||
testcase.fail(errors)
|
||||
|
||||
tmpdir.cleanup()
|
||||
testcase.fail('\n'.join(errors))
|
||||
|
||||
def get_test_paths(self, test_labels):
|
||||
"""Find the apps and paths matching the test labels, so we later can limit
|
||||
|
@ -1076,6 +1075,15 @@ class IetfTestRunner(DiscoverRunner):
|
|||
|
||||
self.test_apps, self.test_paths = self.get_test_paths(test_labels)
|
||||
|
||||
if settings.validate_html:
|
||||
extra_tests += [
|
||||
TemplateValidationTests(
|
||||
test_runner=self,
|
||||
validate_html=self,
|
||||
methodName='run_template_validation',
|
||||
),
|
||||
]
|
||||
|
||||
if self.check_coverage:
|
||||
template_coverage_collection = True
|
||||
code_coverage_collection = True
|
||||
|
|
|
@ -145,6 +145,10 @@ def assert_ical_response_is_valid(test_inst, response, expected_event_summaries=
|
|||
test_inst.assertContains(response, 'END:VEVENT', count=expected_event_count)
|
||||
test_inst.assertContains(response, 'UID', count=expected_event_count)
|
||||
|
||||
# make sure no doubled colons after timestamp properties
|
||||
test_inst.assertNotContains(response, 'DTSTART::')
|
||||
test_inst.assertNotContains(response, 'DTEND::')
|
||||
test_inst.assertNotContains(response, 'DTSTAMP::')
|
||||
|
||||
|
||||
class ReverseLazyTest(django.test.TestCase):
|
||||
|
|
|
@ -96,7 +96,7 @@ def timezone_not_near_midnight():
|
|||
right_now = timezone.now().astimezone(ZoneInfo(tzname))
|
||||
# Avoid the remote possibility of an infinite loop (might come up
|
||||
# if there is a problem with the time zone library)
|
||||
tries_left = 20
|
||||
tries_left = 50
|
||||
while right_now.hour < 1 or right_now.hour >= 23:
|
||||
tzname = random.choice(timezone_options)
|
||||
right_now = right_now.astimezone(ZoneInfo(tzname))
|
||||
|
|
Loading…
Reference in a new issue