Merge remote-tracking branch 'upstream/main' into feat/postgres
This commit is contained in:
commit
3d802497f4
101
.github/workflows/dev-db-nightly.yml
vendored
101
.github/workflows/dev-db-nightly.yml
vendored
|
@ -25,8 +25,8 @@ on:
|
|||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Docker Images
|
||||
build-mariadb:
|
||||
name: Build MariaDB Docker Images
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.ref == 'refs/heads/main' }}
|
||||
permissions:
|
||||
|
@ -41,7 +41,7 @@ jobs:
|
|||
docker: "x64"
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
|
@ -64,15 +64,13 @@ jobs:
|
|||
push: true
|
||||
tags: ghcr.io/ietf-tools/datatracker-db:latest-${{ matrix.docker }}
|
||||
|
||||
combine:
|
||||
name: Create Docker Manifests
|
||||
combine-mariadb:
|
||||
name: Create MariaDB Docker Manifests
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build]
|
||||
needs: [build-mariadb]
|
||||
permissions:
|
||||
packages: write
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Get Current Date as Tag
|
||||
id: date
|
||||
run: echo "date=$(date +'%Y%m%d')" >> $GITHUB_OUTPUT
|
||||
|
@ -92,3 +90,90 @@ jobs:
|
|||
echo "Pushing the manifests..."
|
||||
docker manifest push -p ghcr.io/ietf-tools/datatracker-db:nightly-${{ steps.date.outputs.date }}
|
||||
docker manifest push -p ghcr.io/ietf-tools/datatracker-db:latest
|
||||
|
||||
migrate:
|
||||
name: Migrate MySQL to PostgreSQL DB
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/ietf-tools/datatracker-app-base:latest
|
||||
needs: [combine-mariadb]
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
services:
|
||||
db:
|
||||
image: ghcr.io/ietf-tools/datatracker-db:latest
|
||||
volumes:
|
||||
- mariadb-data:/var/lib/mysql
|
||||
env:
|
||||
MYSQL_ROOT_PASSWORD: ietf
|
||||
MYSQL_DATABASE: ietf_utf8
|
||||
MYSQL_USER: django
|
||||
MYSQL_PASSWORD: RkTkDPFnKpko
|
||||
pgdb:
|
||||
image: postgres:14.5
|
||||
volumes:
|
||||
- /pgdata:/var/lib/postgresql/data
|
||||
env:
|
||||
POSTGRES_PASSWORD: RkTkDPFnKpko
|
||||
POSTGRES_USER: django
|
||||
POSTGRES_DB: ietf
|
||||
POSTGRES_HOST_AUTH_METHOD: trust
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: 'feat/postgres'
|
||||
|
||||
- name: Migrate
|
||||
run: |
|
||||
chmod +x ./docker/scripts/db-pg-migrate.sh
|
||||
sh ./docker/scripts/db-pg-migrate.sh
|
||||
|
||||
- name: Upload DB Dump
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: dump
|
||||
path: ietf.dump
|
||||
|
||||
build:
|
||||
name: Build PostgreSQL Docker Images
|
||||
runs-on: ubuntu-latest
|
||||
needs: [migrate]
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: 'feat/postgres'
|
||||
|
||||
- name: Download DB Dump
|
||||
uses: actions/download-artifact@v3.0.0
|
||||
with:
|
||||
name: dump
|
||||
|
||||
- name: Get Current Date as Tag
|
||||
id: date
|
||||
run: echo "date=$(date +'%Y%m%d')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Docker Build & Push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: docker/db-pg.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ghcr.io/ietf-tools/datatracker-db-pg:latest,ghcr.io/ietf-tools/datatracker-db-pg:nightly-${{ steps.date.outputs.date }}
|
||||
|
|
|
@ -6,6 +6,7 @@ import datetime
|
|||
import debug #pyflakes:ignore
|
||||
from django import forms
|
||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||
from django.core.validators import validate_email
|
||||
|
||||
from ietf.doc.fields import SearchableDocAliasesField, SearchableDocAliasField
|
||||
from ietf.doc.models import RelatedDocument, DocExtResource
|
||||
|
@ -84,8 +85,42 @@ class NotifyForm(forms.Form):
|
|||
)
|
||||
|
||||
def clean_notify(self):
|
||||
addrspecs = [x.strip() for x in self.cleaned_data["notify"].split(',')]
|
||||
return ', '.join(addrspecs)
|
||||
# As long as the widget is a Textarea, users will separate addresses with newlines, whether that matches the instructions or not
|
||||
# We have been allowing nameaddrs for a long time (there are many Documents with namaddrs in their notify field)
|
||||
# python set doesn't preserve order, so in an attempt to mostly preserve the order of what was entered, we'll use
|
||||
# a dict (whose keys are guaranteed to be ordered) to cull out duplicates
|
||||
|
||||
nameaddrs=dict()
|
||||
duplicate_addrspecs = set()
|
||||
bad_nameaddrs = []
|
||||
for nameaddr in self.cleaned_data["notify"].replace("\n", ",").split(","):
|
||||
stripped = nameaddr.strip()
|
||||
if stripped == "":
|
||||
continue
|
||||
if "<" in stripped:
|
||||
if stripped[-1] != ">":
|
||||
bad_nameaddrs.append(nameaddr)
|
||||
continue
|
||||
addrspec = stripped[stripped.find("<")+1:-1]
|
||||
else:
|
||||
addrspec = stripped
|
||||
try:
|
||||
validate_email(addrspec)
|
||||
except ValidationError:
|
||||
bad_nameaddrs.append(nameaddr)
|
||||
if addrspec in nameaddrs:
|
||||
duplicate_addrspecs.add(addrspec)
|
||||
continue
|
||||
else:
|
||||
nameaddrs[addrspec] = stripped
|
||||
error_messages = []
|
||||
if len(duplicate_addrspecs) != 0:
|
||||
error_messages.append(f'Duplicate addresses: {", ".join(duplicate_addrspecs)}')
|
||||
if len(bad_nameaddrs) != 0:
|
||||
error_messages.append(f'Invalid addresses: {", ".join(bad_nameaddrs)}')
|
||||
if len(error_messages) != 0:
|
||||
raise ValidationError(" and ".join(error_messages))
|
||||
return ", ".join(nameaddrs.values())
|
||||
|
||||
class ActionHoldersForm(forms.Form):
|
||||
action_holders = SearchablePersonsField(required=False)
|
||||
|
|
|
@ -618,7 +618,6 @@ class DocumentInfo(models.Model):
|
|||
stylesheets.append(finders.find("ietf/css/document_html_txt.css"))
|
||||
else:
|
||||
text = self.htmlized()
|
||||
stylesheets.append(io.BytesIO(b"body { font-size: 9.2pt; }"))
|
||||
|
||||
cache = caches["pdfized"]
|
||||
cache_key = name.split(".")[0]
|
||||
|
|
|
@ -41,6 +41,7 @@ from ietf.doc.factories import ( DocumentFactory, DocEventFactory, CharterFactor
|
|||
IndividualRfcFactory, StateDocEventFactory, BallotPositionDocEventFactory,
|
||||
BallotDocEventFactory, DocumentAuthorFactory, NewRevisionDocEventFactory,
|
||||
StatusChangeFactory, BofreqFactory)
|
||||
from ietf.doc.forms import NotifyForm
|
||||
from ietf.doc.fields import SearchableDocumentsField
|
||||
from ietf.doc.utils import create_ballot_if_not_open, uppercase_std_abbreviated_name
|
||||
from ietf.doc.views_search import ad_dashboard_group, ad_dashboard_group_type, shorten_group_name # TODO: red flag that we're importing from views in tests. Move these to utils.
|
||||
|
@ -2926,3 +2927,43 @@ class PdfizedTests(TestCase):
|
|||
for ext in ('pdf','txt','html','anythingatall'):
|
||||
self.should_succeed(dict(name=rfc.name,rev=f'{r:02d}',ext=ext))
|
||||
self.should_404(dict(name=rfc.name,rev='02'))
|
||||
|
||||
class NotifyValidationTests(TestCase):
|
||||
def test_notify_validation(self):
|
||||
valid_values = [
|
||||
"foo@example.com, bar@example.com",
|
||||
"Foo Bar <foobar@example.com>, baz@example.com",
|
||||
"foo@example.com, ,bar@example.com,", # We're ignoring extra commas
|
||||
"foo@example.com\nbar@example.com", # Yes, we're quietly accepting a newline as a comma
|
||||
]
|
||||
bad_nameaddr_values = [
|
||||
"@example.com",
|
||||
"foo",
|
||||
"foo@",
|
||||
"foo bar foobar@example.com",
|
||||
]
|
||||
duplicate_values = [
|
||||
"foo@bar.com, bar@baz.com, foo@bar.com",
|
||||
"Foo <foo@bar.com>, foobar <foo@bar.com>",
|
||||
]
|
||||
both_duplicate_and_bad_values = [
|
||||
"foo@example.com, bar@, Foo <foo@example.com>",
|
||||
"Foo <@example.com>, Bar <@example.com>",
|
||||
]
|
||||
for v in valid_values:
|
||||
self.assertTrue(NotifyForm({"notify": v}).is_valid())
|
||||
for v in bad_nameaddr_values:
|
||||
f = NotifyForm({"notify": v})
|
||||
self.assertFalse(f.is_valid())
|
||||
self.assertTrue("Invalid addresses" in f.errors["notify"][0])
|
||||
self.assertFalse("Duplicate addresses" in f.errors["notify"][0])
|
||||
for v in duplicate_values:
|
||||
f = NotifyForm({"notify": v})
|
||||
self.assertFalse(f.is_valid())
|
||||
self.assertFalse("Invalid addresses" in f.errors["notify"][0])
|
||||
self.assertTrue("Duplicate addresses" in f.errors["notify"][0])
|
||||
for v in both_duplicate_and_bad_values:
|
||||
f = NotifyForm({"notify": v})
|
||||
self.assertFalse(f.is_valid())
|
||||
self.assertTrue("Invalid addresses" in f.errors["notify"][0])
|
||||
self.assertTrue("Duplicate addresses" in f.errors["notify"][0])
|
||||
|
|
|
@ -5,6 +5,7 @@ import debug # pyflakes:ignore
|
|||
from unittest.mock import patch
|
||||
|
||||
from django.db import IntegrityError
|
||||
from django.test.utils import override_settings
|
||||
from django.utils import timezone
|
||||
|
||||
from ietf.group.factories import GroupFactory, RoleFactory
|
||||
|
@ -15,7 +16,7 @@ from ietf.person.models import Person
|
|||
from ietf.doc.factories import DocumentFactory, WgRfcFactory, WgDraftFactory
|
||||
from ietf.doc.models import State, DocumentActionHolder, DocumentAuthor, Document
|
||||
from ietf.doc.utils import (update_action_holders, add_state_change_event, update_documentauthors,
|
||||
fuzzy_find_documents, rebuild_reference_relations)
|
||||
fuzzy_find_documents, rebuild_reference_relations, build_file_urls)
|
||||
from ietf.utils.draft import Draft, PlaintextDraft
|
||||
from ietf.utils.xmldraft import XMLDraft
|
||||
|
||||
|
@ -294,6 +295,30 @@ class MiscTests(TestCase):
|
|||
self.do_fuzzy_find_documents_rfc_test('draft-name-that-has-two-02-04')
|
||||
self.do_fuzzy_find_documents_rfc_test('draft-wild-01-numbers-0312')
|
||||
|
||||
@override_settings(RFC_FILE_TYPES=['pdf'], IDSUBMIT_FILE_TYPES=['xml'])
|
||||
@patch('ietf.doc.utils.os.path.exists', return_value=True)
|
||||
def test_build_file_urls(self, mocked):
|
||||
# a cursory test only - does not check details of how URLs are constructed
|
||||
self.assertEqual(
|
||||
build_file_urls(DocumentFactory(type_id='statchg')), ([], []),
|
||||
'Non-draft Document should return empty sets'
|
||||
)
|
||||
|
||||
with self.assertRaises(AssertionError):
|
||||
build_file_urls(WgDraftFactory(rev=''))
|
||||
|
||||
urls, types = build_file_urls(WgDraftFactory(rev='23'))
|
||||
self.assertEqual(['xml', 'bibtex'], [t for t, _ in urls])
|
||||
self.assertEqual(types, ['xml'])
|
||||
|
||||
urls, types = build_file_urls(WgRfcFactory(rev=''))
|
||||
self.assertEqual(['pdf', 'bibtex'], [t for t, _ in urls])
|
||||
self.assertEqual(types, ['pdf'])
|
||||
|
||||
urls, types = build_file_urls(WgRfcFactory(rev='23'))
|
||||
self.assertEqual(['pdf', 'bibtex'], [t for t, _ in urls])
|
||||
self.assertEqual(types, ['pdf'])
|
||||
|
||||
|
||||
class RebuildReferenceRelationsTests(TestCase):
|
||||
def setUp(self):
|
||||
|
|
|
@ -12,6 +12,7 @@ import re
|
|||
import textwrap
|
||||
|
||||
from collections import defaultdict, namedtuple
|
||||
from typing import Union
|
||||
from urllib.parse import quote
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
|
@ -995,8 +996,11 @@ def get_search_cache_key(params):
|
|||
key = "doc:document:search:" + hashlib.sha512(json.dumps(kwargs, sort_keys=True).encode('utf-8')).hexdigest()
|
||||
return key
|
||||
|
||||
def build_file_urls(doc):
|
||||
if isinstance(doc,Document) and doc.get_state_slug() == "rfc":
|
||||
def build_file_urls(doc: Union[Document, DocHistory]):
|
||||
if doc.type_id != 'draft':
|
||||
return [], []
|
||||
|
||||
if doc.get_state_slug() == "rfc":
|
||||
name = doc.canonical_name()
|
||||
base_path = os.path.join(settings.RFC_PATH, name + ".")
|
||||
possible_types = settings.RFC_FILE_TYPES
|
||||
|
@ -1017,7 +1021,7 @@ def build_file_urls(doc):
|
|||
if doc.tags.filter(slug="verified-errata").exists():
|
||||
file_urls.append(("with errata", settings.RFC_EDITOR_INLINE_ERRATA_URL.format(rfc_number=doc.rfc_number())))
|
||||
file_urls.append(("bibtex", urlreverse('ietf.doc.views_doc.document_bibtex',kwargs=dict(name=name))))
|
||||
else:
|
||||
elif doc.rev:
|
||||
base_path = os.path.join(settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR, doc.name + "-" + doc.rev + ".")
|
||||
possible_types = settings.IDSUBMIT_FILE_TYPES
|
||||
found_types = [t for t in possible_types if os.path.exists(base_path + t)]
|
||||
|
@ -1031,6 +1035,12 @@ def build_file_urls(doc):
|
|||
file_urls.append(("htmlized", urlreverse('ietf.doc.views_doc.document_html', kwargs=dict(name=doc.name, rev=doc.rev))))
|
||||
file_urls.append(("pdfized", urlreverse('ietf.doc.views_doc.document_pdfized', kwargs=dict(name=doc.name, rev=doc.rev))))
|
||||
file_urls.append(("bibtex", urlreverse('ietf.doc.views_doc.document_bibtex',kwargs=dict(name=doc.name,rev=doc.rev))))
|
||||
else:
|
||||
# As of 2022-12-14, there are 1463 Document and 3136 DocHistory records with type='draft' and rev=''.
|
||||
# All of these are in the rfc state and are covered by the above cases.
|
||||
log.unreachable('2022-12-14')
|
||||
file_urls = []
|
||||
found_types = []
|
||||
|
||||
return file_urls, found_types
|
||||
|
||||
|
|
|
@ -162,7 +162,7 @@ def document_main(request, name, rev=None, document_html=False):
|
|||
snapshot = False
|
||||
|
||||
gh = None
|
||||
if rev != None:
|
||||
if rev:
|
||||
# find the entry in the history
|
||||
for h in doc.history_set.order_by("-time"):
|
||||
if rev == h.rev:
|
||||
|
|
|
@ -711,8 +711,11 @@ def complete_review(request, name, assignment_id=None, acronym=None):
|
|||
date_today().isoformat(),
|
||||
]
|
||||
review_name = "-".join(c for c in name_components if c).lower()
|
||||
if not Document.objects.filter(name=review_name).exists():
|
||||
review = Document.objects.create(name=review_name,type_id='review',group=team)
|
||||
review, created = Document.objects.get_or_create(
|
||||
name=review_name,
|
||||
defaults={'type_id': 'review', 'group': team},
|
||||
)
|
||||
if created:
|
||||
DocAlias.objects.create(name=review_name).docs.add(review)
|
||||
else:
|
||||
messages.warning(request, message='Attempt to save review failed: review document already exists. This most likely occurred because the review was submitted twice in quick succession. If you intended to submit a new review, rather than update an existing one, things are probably OK. Please verify that the shown review is what you expected.')
|
||||
|
|
|
@ -119,6 +119,11 @@ def preprocess_assignments_for_agenda(assignments_queryset, meeting, extra_prefe
|
|||
groups = [ ]
|
||||
for a in assignments:
|
||||
if a.session:
|
||||
# Ensure that all Sessions refer to the same Meeting instance so they can share the
|
||||
# _groups_at_the_time() cache. The Sessions should all belong to the same meeting, but
|
||||
# check before blindly assigning to meeting just in case.
|
||||
if a.session.meeting.pk == meeting.pk:
|
||||
a.session.meeting = meeting
|
||||
a.session.order_number = None
|
||||
|
||||
if a.session.group and a.session.group not in groups:
|
||||
|
|
|
@ -290,10 +290,14 @@ tbody.meta tr {
|
|||
margin: .75in;
|
||||
}
|
||||
|
||||
* {
|
||||
font-size: 9.75pt !important;
|
||||
line-height: 1.25em !important;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
font-size: 10pt;
|
||||
}
|
||||
|
||||
pre {
|
||||
|
|
|
@ -343,6 +343,7 @@ a.relref, a.xref.cite {
|
|||
margin-right: -1ch;
|
||||
opacity: 0.01;
|
||||
text-decoration: none;
|
||||
user-select: none;
|
||||
}
|
||||
:hover > .pilcrow {
|
||||
opacity: 0.2;
|
||||
|
|
|
@ -11,22 +11,23 @@ import { populate_nav } from "./nav.js";
|
|||
|
||||
const cookies = Cookies.withAttributes({ sameSite: "strict" });
|
||||
|
||||
// set initial point size from cookie before DOM is ready, to avoid flickering
|
||||
const ptsize_var = "doc-ptsize-max";
|
||||
|
||||
function change_ptsize(ptsize) {
|
||||
document.documentElement.style.setProperty(`--${ptsize_var}`,
|
||||
`${ptsize}pt`);
|
||||
localStorage.setItem(ptsize_var, ptsize);
|
||||
}
|
||||
|
||||
const ptsize = localStorage.getItem(ptsize_var);
|
||||
change_ptsize(ptsize ? Math.min(Math.max(7, ptsize), 16) : 12);
|
||||
|
||||
document.addEventListener("DOMContentLoaded", function (event) {
|
||||
// handle point size slider
|
||||
const cookie = "doc-ptsize-max";
|
||||
|
||||
function change_ptsize(ptsize) {
|
||||
document.documentElement.style.setProperty(`--${cookie}`,
|
||||
`${ptsize}pt`);
|
||||
cookies.set(cookie, ptsize);
|
||||
}
|
||||
|
||||
document.getElementById("ptsize")
|
||||
.oninput = function () { change_ptsize(this.value) };
|
||||
|
||||
const ptsize = cookies.get(cookie);
|
||||
change_ptsize(ptsize ? Math.min(Math.max(7, ptsize), 16) : 12);
|
||||
|
||||
// Use the Bootstrap tooltip plugin for all elements with a title attribute
|
||||
const tt_triggers = document.querySelectorAll(
|
||||
"[title]:not([title=''])");
|
||||
|
@ -45,15 +46,25 @@ document.addEventListener("DOMContentLoaded", function (event) {
|
|||
#content .h1, #content .h2, #content .h3, #content .h4, #content .h5, #content .h6`,
|
||||
["py-0"]);
|
||||
|
||||
// activate pref buttons selected by pref cookies
|
||||
// activate pref buttons selected by pref cookies or localStorage
|
||||
const in_localStorage = ["deftab"];
|
||||
document.querySelectorAll(".btn-check")
|
||||
.forEach(btn => {
|
||||
const id = btn.id.replace("-radio", "");
|
||||
if (cookies.get(btn.name) == id) {
|
||||
|
||||
const val = in_localStorage.includes(btn.name) ?
|
||||
localStorage.getItem(btn.name) : cookies.get(btn.name);
|
||||
if (val == id) {
|
||||
btn.checked = true;
|
||||
}
|
||||
|
||||
btn.addEventListener("click", el => {
|
||||
cookies.set(btn.name, id);
|
||||
// only use cookies for things used in HTML templates
|
||||
if (in_localStorage.includes(btn.name)) {
|
||||
localStorage.setItem(btn.name, id)
|
||||
} else {
|
||||
cookies.set(btn.name, id);
|
||||
}
|
||||
window.location.reload();
|
||||
});
|
||||
});
|
||||
|
@ -62,7 +73,7 @@ document.addEventListener("DOMContentLoaded", function (event) {
|
|||
let defpane;
|
||||
try {
|
||||
defpane = Tab.getOrCreateInstance(
|
||||
`#${cookies.get("deftab")}-tab`);
|
||||
`#${localStorage.getItem("deftab")}-tab`);
|
||||
} catch (err) {
|
||||
defpane = Tab.getOrCreateInstance("#docinfo-tab");
|
||||
};
|
||||
|
|
|
@ -98,7 +98,7 @@
|
|||
</div>
|
||||
</nav>
|
||||
<div class="row g-0">
|
||||
<div class="col d-flex justify-content-center lh-sm"
|
||||
<div class="col-md-9 d-flex justify-content-center lh-sm"
|
||||
data-bs-spy="scroll"
|
||||
data-bs-target="#toc-nav"
|
||||
data-bs-smooth-scroll="true"
|
||||
|
@ -117,8 +117,8 @@
|
|||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="d-none d-md-block d-print-none col-3 bg-light collapse{% if request.COOKIES.sidebar != 'off'%} show{% endif %}" id="sidebar">
|
||||
<div class="position-fixed col-3 border-start sidebar overflow-scroll overscroll-none no-scrollbar">
|
||||
<div class="d-print-none col-md-3 bg-light collapse{% if request.COOKIES.sidebar != 'off'%} show{% endif %}" id="sidebar">
|
||||
<div class="position-fixed border-start sidebar overflow-scroll overscroll-none no-scrollbar">
|
||||
<button class="btn btn-outline-secondary float-end m-2"
|
||||
type="button"
|
||||
id="sidebar-off"
|
||||
|
|
|
@ -296,6 +296,7 @@ href="{% url 'ietf.doc.views_draft.review_possibly_replaces' name=doc.name %}">E
|
|||
{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
{% if not document_html %}
|
||||
{% for check in doc.submission.latest_checks %}
|
||||
{% if check.passed != None and check.symbol.strip %}
|
||||
<tr>
|
||||
|
@ -331,7 +332,6 @@ href="{% url 'ietf.doc.views_draft.review_possibly_replaces' name=doc.name %}">E
|
|||
</tr>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% if not document_html %}
|
||||
{% if review_assignments or can_request_review %}
|
||||
<tr>
|
||||
<td></td>
|
||||
|
|
Loading…
Reference in a new issue