Merged in ^/personal/henrik/6.98.2-py3@16468, containing code converted to work with both Python-2.7 and Python-3.7.
- Legacy-Id: 16470
This commit is contained in:
commit
fae97ed7d9
|
@ -1,5 +1,5 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3.7
|
||||||
# -*- python -*-
|
# -*- mode: python; coding: utf-8 -*-
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
"""
|
"""
|
||||||
NAME
|
NAME
|
||||||
|
@ -10,12 +10,18 @@ SYNOPSIS
|
||||||
|
|
||||||
DESCRIPTION
|
DESCRIPTION
|
||||||
Given a list of files or filename wildcard patterns, check all for
|
Given a list of files or filename wildcard patterns, check all for
|
||||||
an IETF Trust copyright notice with the current year.
|
an IETF Trust copyright notice with the current year. Optionally
|
||||||
|
generate a diff on standard out which can be used by 'patch'.
|
||||||
|
|
||||||
|
An invocation similar to the following can be particularly useful with
|
||||||
|
a set of changed version-controlled files, as it will fix up the
|
||||||
|
Copyright statements of any python files with pending changes:
|
||||||
|
|
||||||
|
$ check-copyright -p $(svn st | cut -c 9- | grep '\.py$' ) | patch -p0
|
||||||
|
|
||||||
|
|
||||||
%(options)s
|
%(options)s
|
||||||
|
|
||||||
FILES
|
|
||||||
|
|
||||||
AUTHOR
|
AUTHOR
|
||||||
Written by Henrik Levkowetz, <henrik@tools.ietf.org>
|
Written by Henrik Levkowetz, <henrik@tools.ietf.org>
|
||||||
|
|
||||||
|
@ -43,10 +49,12 @@ import pytz
|
||||||
import tzparse
|
import tzparse
|
||||||
import debug
|
import debug
|
||||||
|
|
||||||
version = "0.10"
|
version = "1.0.0"
|
||||||
program = os.path.basename(sys.argv[0])
|
program = os.path.basename(sys.argv[0])
|
||||||
progdir = os.path.dirname(sys.argv[0])
|
progdir = os.path.dirname(sys.argv[0])
|
||||||
|
|
||||||
|
debug.debug = True
|
||||||
|
|
||||||
# ----------------------------------------------------------------------
|
# ----------------------------------------------------------------------
|
||||||
# Parse options
|
# Parse options
|
||||||
|
|
||||||
|
@ -63,8 +71,8 @@ if len(sys.argv) < 1:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
opts, files = getopt.gnu_getopt(sys.argv[1:], "hvV", ["help", "version", "verbose",])
|
opts, files = getopt.gnu_getopt(sys.argv[1:], "hC:pvV", ["help", "copyright=", "patch", "version", "verbose",])
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
print( "%s: %s" % (program, e))
|
print( "%s: %s" % (program, e))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
@ -73,16 +81,22 @@ except Exception, e:
|
||||||
|
|
||||||
# set default values, if any
|
# set default values, if any
|
||||||
opt_verbose = 0
|
opt_verbose = 0
|
||||||
|
opt_patch = False
|
||||||
|
opt_copyright = "Copyright The IETF Trust {years}, All Rights Reserved"
|
||||||
|
|
||||||
# handle individual options
|
# handle individual options
|
||||||
for opt, value in opts:
|
for opt, value in opts:
|
||||||
if opt in ["-h", "--help"]: # Output this help, then exit
|
if opt in ["-h", "--help"]: # Output this help, then exit
|
||||||
print( __doc__ % locals() )
|
print( __doc__ % locals() )
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
elif opt in ["-p", "--patch"]: # Generate patch output rather than error messages
|
||||||
|
opt_patch = True
|
||||||
|
elif opt in ["-C", "--copyright"]: # Copyright line pattern using {years} for years
|
||||||
|
opt_copyright = value
|
||||||
elif opt in ["-V", "--version"]: # Output version information, then exit
|
elif opt in ["-V", "--version"]: # Output version information, then exit
|
||||||
print( program, version )
|
print( program, version )
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
elif opt in ["-v", "--verbose"]: # Output version information, then exit
|
elif opt in ["-v", "--verbose"]: # Be more verbose
|
||||||
opt_verbose += 1
|
opt_verbose += 1
|
||||||
|
|
||||||
# ----------------------------------------------------------------------
|
# ----------------------------------------------------------------------
|
||||||
|
@ -107,7 +121,7 @@ def pipe(cmd, inp=None):
|
||||||
args = shlex.split(cmd)
|
args = shlex.split(cmd)
|
||||||
bufsize = 4096
|
bufsize = 4096
|
||||||
stdin = PIPE if inp else None
|
stdin = PIPE if inp else None
|
||||||
pipe = Popen(args, stdin=stdin, stdout=PIPE, stderr=PIPE, bufsize=bufsize)
|
pipe = Popen(args, stdin=stdin, stdout=PIPE, stderr=PIPE, bufsize=bufsize, encoding='utf-8', universal_newlines=True)
|
||||||
out, err = pipe.communicate(inp)
|
out, err = pipe.communicate(inp)
|
||||||
code = pipe.returncode
|
code = pipe.returncode
|
||||||
if code != 0:
|
if code != 0:
|
||||||
|
@ -156,9 +170,6 @@ import json
|
||||||
|
|
||||||
cwd = os.getcwd()
|
cwd = os.getcwd()
|
||||||
|
|
||||||
if cwd.split(os.path.sep)[-1] != 'trunk':
|
|
||||||
die("Expected to run this operation in trunk, but the current\ndirectory is '%s'" % cwd)
|
|
||||||
|
|
||||||
# Get current initinfo from cache and svn
|
# Get current initinfo from cache and svn
|
||||||
cachefn = os.path.join(os.environ.get('HOME', '.'), '.initinfo')
|
cachefn = os.path.join(os.environ.get('HOME', '.'), '.initinfo')
|
||||||
|
|
||||||
|
@ -177,24 +188,67 @@ write_cache = False
|
||||||
loginfo_format = r'^r[0-9]+ \| [^@]+@[^@]+ \| \d\d\d\d-\d\d-\d\d '
|
loginfo_format = r'^r[0-9]+ \| [^@]+@[^@]+ \| \d\d\d\d-\d\d-\d\d '
|
||||||
|
|
||||||
year = time.strftime('%Y')
|
year = time.strftime('%Y')
|
||||||
|
copyright_re = "(?i)"+opt_copyright.format(years=r"(\d+-)?\d+")
|
||||||
for path in files:
|
for path in files:
|
||||||
note("Checking path %s" % path)
|
try:
|
||||||
if not path in initinfo:
|
if not os.path.exists(path):
|
||||||
initinfo.update(get_first_commit(path))
|
note("File does not exist: %s" % path)
|
||||||
write_cache = True
|
|
||||||
date = initinfo[path]['date']
|
|
||||||
init = date[:4]
|
|
||||||
copyright = "(?i)Copyright The IETF Trust (%s-)?%s, All Rights Reserved" % (init, year)
|
|
||||||
with open(path) as file:
|
|
||||||
chunk = file.read(4000)
|
|
||||||
if os.path.basename(path) == '__init__.py' and len(chunk)==0:
|
|
||||||
continue
|
continue
|
||||||
if not re.search(copyright, chunk):
|
note("Checking path %s" % path)
|
||||||
sys.stdout.write("%s(1): Error: Missing or bad copyright. " % path)
|
if not path in initinfo:
|
||||||
if year == init:
|
initinfo.update(get_first_commit(path))
|
||||||
print(" Expected: Copyright The IETF Trust %s, All Rights Reserved" % year)
|
write_cache = True
|
||||||
else:
|
date = initinfo[path]['date']
|
||||||
print(" Expected: Copyright The IETF Trust %s-%s, All Rights Reserved" % (init, year))
|
init = date[:4]
|
||||||
|
|
||||||
|
copyright_year_re = "(?i)"+opt_copyright.format(years=r"({init}-)?{year}".format(init=init, year=year))
|
||||||
|
with open(path) as file:
|
||||||
|
try:
|
||||||
|
chunk = file.read(4000)
|
||||||
|
except UnicodeDecodeError as e:
|
||||||
|
sys.stderr.write(f'Error when reading {file.name}: {e}\n')
|
||||||
|
raise
|
||||||
|
if os.path.basename(path) == '__init__.py' and len(chunk)==0:
|
||||||
|
continue
|
||||||
|
if not re.search(copyright_year_re, chunk):
|
||||||
|
if year == init:
|
||||||
|
copyright = opt_copyright.format(years=year)
|
||||||
|
else:
|
||||||
|
copyright = opt_copyright.format(years=f"{init}-{year}")
|
||||||
|
if opt_patch:
|
||||||
|
print(f"--- {file.name}\t(original)")
|
||||||
|
print(f"+++ {file.name}\t(modified)")
|
||||||
|
if not re.search(copyright_re, chunk):
|
||||||
|
# Simple case, just insert copyright at the top
|
||||||
|
print( "@@ -1,3 +1,4 @@")
|
||||||
|
print(f"+# {copyright}")
|
||||||
|
for i, line in list(enumerate(chunk.splitlines()))[:3]:
|
||||||
|
print(f" {line}")
|
||||||
|
else:
|
||||||
|
# Find old copyright, then emit preceding lines,
|
||||||
|
# change, and following lines.
|
||||||
|
pos = None
|
||||||
|
for i, line in enumerate(chunk.splitlines(), start=1):
|
||||||
|
if re.search(copyright_re, line):
|
||||||
|
pos = i
|
||||||
|
break
|
||||||
|
if not pos:
|
||||||
|
raise RuntimeError("Unexpected state: Expected a copyright line, but found none")
|
||||||
|
print(f"@@ -1,{pos+3} +1,{pos+3} @@")
|
||||||
|
for i, line in list(enumerate(chunk.splitlines(), start=1))[:pos+3]:
|
||||||
|
if i == pos:
|
||||||
|
print(f"-{line}")
|
||||||
|
print(f"+# {copyright}")
|
||||||
|
else:
|
||||||
|
print(f" {line}")
|
||||||
|
else:
|
||||||
|
sys.stderr.write(f"{path}(1): Error: Missing or bad copyright. Expected: {copyright}")
|
||||||
|
except Exception:
|
||||||
|
if write_cache:
|
||||||
|
cache = initinfo
|
||||||
|
with open(cachefn, "w") as file:
|
||||||
|
json.dump(cache, file, indent=2, sort_keys=True)
|
||||||
|
raise
|
||||||
|
|
||||||
if write_cache:
|
if write_cache:
|
||||||
cache = initinfo
|
cache = initinfo
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
# Copyright The IETF Trust 2013-2019, All Rights Reserved
|
||||||
|
|
||||||
import os, sys, re, datetime, argparse, traceback, json, subprocess
|
import os, sys, re, datetime, argparse, traceback, json, subprocess
|
||||||
import html5lib
|
import html5lib
|
||||||
|
@ -62,6 +63,7 @@ import debug # pyflakes:ignore
|
||||||
|
|
||||||
from ietf.name.models import DocTypeName
|
from ietf.name.models import DocTypeName
|
||||||
from ietf.utils.html import unescape
|
from ietf.utils.html import unescape
|
||||||
|
from ietf.utils.test_utils import unicontent
|
||||||
|
|
||||||
# --- Constants ---
|
# --- Constants ---
|
||||||
|
|
||||||
|
@ -387,7 +389,7 @@ if __name__ == "__main__":
|
||||||
if ctype == "text/html":
|
if ctype == "text/html":
|
||||||
try:
|
try:
|
||||||
if args.follow and not skip_extract_from(url):
|
if args.follow and not skip_extract_from(url):
|
||||||
for u in extract_html_urls(r.content):
|
for u in extract_html_urls(unicontent(r)):
|
||||||
if u not in visited and u not in urls:
|
if u not in visited and u not in urls:
|
||||||
urls[u] = url
|
urls[u] = url
|
||||||
referrers[u] = url
|
referrers[u] = url
|
||||||
|
@ -403,7 +405,7 @@ if __name__ == "__main__":
|
||||||
elif ctype == "application/json":
|
elif ctype == "application/json":
|
||||||
try:
|
try:
|
||||||
if args.follow:
|
if args.follow:
|
||||||
for u in extract_tastypie_urls(r.content):
|
for u in extract_tastypie_urls(unicontent(r)):
|
||||||
if u not in visited and u not in urls:
|
if u not in visited and u not in urls:
|
||||||
urls[u] = url
|
urls[u] = url
|
||||||
referrers[u] = url
|
referrers[u] = url
|
||||||
|
|
13
changelog.py
13
changelog.py
|
@ -1,4 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2012-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
import re
|
import re
|
||||||
|
import six
|
||||||
from tzparse import tzparse
|
from tzparse import tzparse
|
||||||
from datetime import datetime as Datetime
|
from datetime import datetime as Datetime
|
||||||
|
|
||||||
|
@ -38,12 +43,12 @@ class ChangeLogEntry:
|
||||||
title = ""
|
title = ""
|
||||||
|
|
||||||
def parse(logfile):
|
def parse(logfile):
|
||||||
ver_line = "^(\w+) \((\S+)\) (\S+;)? (?:urgency=(\S+))?$"
|
ver_line = r"^(\w+) \((\S+)\) (\S+;)? (?:urgency=(\S+))?$"
|
||||||
sig_line = "^ -- ([^<]+) <([^>]+)> (.*?) *$"
|
sig_line = r"^ -- ([^<]+) <([^>]+)> (.*?) *$"
|
||||||
inf_line = r"^ \*\*(.*)\*\* *"
|
inf_line = r"^ \*\*(.*)\*\* *"
|
||||||
|
|
||||||
entries = []
|
entries = []
|
||||||
if type(logfile) == type(''):
|
if isinstance(logfile, six.string_types):
|
||||||
logfile = open(logfile)
|
logfile = open(logfile)
|
||||||
entry = None
|
entry = None
|
||||||
for line in logfile:
|
for line in logfile:
|
||||||
|
@ -66,5 +71,5 @@ def parse(logfile):
|
||||||
elif entry:
|
elif entry:
|
||||||
entry.logentry += line
|
entry.logentry += line
|
||||||
else:
|
else:
|
||||||
print "Unexpected line: '%s'" % line
|
print("Unexpected line: '%s'" % line)
|
||||||
return entries
|
return entries
|
||||||
|
|
1
debug.py
1
debug.py
|
@ -1,3 +1,4 @@
|
||||||
|
# Copyright The IETF Trust 2012-2019, All Rights Reserved
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time as timeutils
|
import time as timeutils
|
||||||
|
|
|
@ -5,3 +5,4 @@ su - -c "apt-get update \
|
||||||
&& apt-get install -qy graphviz ghostscript apache2-utils \
|
&& apt-get install -qy graphviz ghostscript apache2-utils \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*"
|
&& rm -rf /var/lib/apt/lists/*"
|
||||||
|
|
|
@ -1,12 +0,0 @@
|
||||||
from django.contrib import admin
|
|
||||||
from django import forms
|
|
||||||
|
|
||||||
from form_utils.fields import ClearableFileField
|
|
||||||
|
|
||||||
class ClearableFileFieldsAdmin(admin.ModelAdmin):
|
|
||||||
def formfield_for_dbfield(self, db_field, **kwargs):
|
|
||||||
field = super(ClearableFileFieldsAdmin, self).formfield_for_dbfield(
|
|
||||||
db_field, **kwargs)
|
|
||||||
if isinstance(field, forms.FileField):
|
|
||||||
field = ClearableFileField(field)
|
|
||||||
return field
|
|
|
@ -1,51 +0,0 @@
|
||||||
from django import forms
|
|
||||||
|
|
||||||
from form_utils.widgets import ClearableFileInput
|
|
||||||
|
|
||||||
class FakeEmptyFieldFile(object):
|
|
||||||
"""
|
|
||||||
A fake FieldFile that will convice a FileField model field to
|
|
||||||
actually replace an existing file name with an empty string.
|
|
||||||
|
|
||||||
FileField.save_form_data only overwrites its instance data if the
|
|
||||||
incoming form data evaluates to True in a boolean context (because
|
|
||||||
an empty file input is assumed to mean "no change"). We want to be
|
|
||||||
able to clear it without requiring the use of a model FileField
|
|
||||||
subclass (keeping things at the form level only). In order to do
|
|
||||||
this we need our form field to return a value that evaluates to
|
|
||||||
True in a boolean context, but to the empty string when coerced to
|
|
||||||
unicode. This object fulfills that requirement.
|
|
||||||
|
|
||||||
It also needs the _committed attribute to satisfy the test in
|
|
||||||
FileField.pre_save.
|
|
||||||
|
|
||||||
This is, of course, hacky and fragile, and depends on internal
|
|
||||||
knowledge of the FileField and FieldFile classes. But it will
|
|
||||||
serve until Django FileFields acquire a native ability to be
|
|
||||||
cleared (ticket 7048).
|
|
||||||
|
|
||||||
"""
|
|
||||||
def __unicode__(self):
|
|
||||||
return u''
|
|
||||||
_committed = True
|
|
||||||
|
|
||||||
class ClearableFileField(forms.MultiValueField):
|
|
||||||
default_file_field_class = forms.FileField
|
|
||||||
widget = ClearableFileInput
|
|
||||||
|
|
||||||
def __init__(self, file_field=None, template=None, *args, **kwargs):
|
|
||||||
file_field = file_field or self.default_file_field_class(*args,
|
|
||||||
**kwargs)
|
|
||||||
fields = (file_field, forms.BooleanField(required=False))
|
|
||||||
kwargs['required'] = file_field.required
|
|
||||||
kwargs['widget'] = self.widget(file_widget=file_field.widget,
|
|
||||||
template=template)
|
|
||||||
super(ClearableFileField, self).__init__(fields, *args, **kwargs)
|
|
||||||
|
|
||||||
def compress(self, data_list):
|
|
||||||
if data_list[1] and not data_list[0]:
|
|
||||||
return FakeEmptyFieldFile()
|
|
||||||
return data_list[0]
|
|
||||||
|
|
||||||
class ClearableImageField(ClearableFileField):
|
|
||||||
default_file_field_class = forms.ImageField
|
|
|
@ -1,278 +0,0 @@
|
||||||
"""
|
|
||||||
forms for django-form-utils
|
|
||||||
|
|
||||||
Time-stamp: <2010-04-28 02:57:16 carljm forms.py>
|
|
||||||
|
|
||||||
"""
|
|
||||||
from copy import deepcopy
|
|
||||||
|
|
||||||
from django import forms
|
|
||||||
from django.forms.utils import flatatt, ErrorDict
|
|
||||||
from django.utils.safestring import mark_safe
|
|
||||||
|
|
||||||
class Fieldset(object):
|
|
||||||
"""
|
|
||||||
An iterable Fieldset with a legend and a set of BoundFields.
|
|
||||||
|
|
||||||
"""
|
|
||||||
def __init__(self, form, name, boundfields, legend='', classes='', description=''):
|
|
||||||
self.form = form
|
|
||||||
self.boundfields = boundfields
|
|
||||||
if legend is None: legend = name
|
|
||||||
self.legend = legend and mark_safe(legend)
|
|
||||||
self.classes = classes
|
|
||||||
self.description = mark_safe(description)
|
|
||||||
self.name = name
|
|
||||||
|
|
||||||
|
|
||||||
def _errors(self):
|
|
||||||
return ErrorDict(((k, v) for (k, v) in self.form.errors.iteritems()
|
|
||||||
if k in [f.name for f in self.boundfields]))
|
|
||||||
errors = property(_errors)
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
for bf in self.boundfields:
|
|
||||||
yield _mark_row_attrs(bf, self.form)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "%s('%s', %s, legend='%s', classes='%s', description='%s')" % (
|
|
||||||
self.__class__.__name__, self.name,
|
|
||||||
[f.name for f in self.boundfields], self.legend, self.classes, self.description)
|
|
||||||
|
|
||||||
class FieldsetCollection(object):
|
|
||||||
def __init__(self, form, fieldsets):
|
|
||||||
self.form = form
|
|
||||||
self.fieldsets = fieldsets
|
|
||||||
self._cached_fieldsets = []
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return len(self.fieldsets) or 1
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
if not self._cached_fieldsets:
|
|
||||||
self._gather_fieldsets()
|
|
||||||
for field in self._cached_fieldsets:
|
|
||||||
yield field
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
if not self._cached_fieldsets:
|
|
||||||
self._gather_fieldsets()
|
|
||||||
for field in self._cached_fieldsets:
|
|
||||||
if field.name == key:
|
|
||||||
return field
|
|
||||||
raise KeyError
|
|
||||||
|
|
||||||
def _gather_fieldsets(self):
|
|
||||||
if not self.fieldsets:
|
|
||||||
self.fieldsets = (('main', {'fields': self.form.fields.keys(),
|
|
||||||
'legend': ''}),)
|
|
||||||
for name, options in self.fieldsets:
|
|
||||||
try:
|
|
||||||
field_names = [n for n in options['fields']
|
|
||||||
if n in self.form.fields]
|
|
||||||
except KeyError:
|
|
||||||
raise ValueError("Fieldset definition must include 'fields' option." )
|
|
||||||
boundfields = [forms.forms.BoundField(self.form, self.form.fields[n], n)
|
|
||||||
for n in field_names]
|
|
||||||
self._cached_fieldsets.append(Fieldset(self.form, name,
|
|
||||||
boundfields, options.get('legend', None),
|
|
||||||
' '.join(options.get('classes', ())),
|
|
||||||
options.get('description', '')))
|
|
||||||
|
|
||||||
def _get_meta_attr(attrs, attr, default):
|
|
||||||
try:
|
|
||||||
ret = getattr(attrs['Meta'], attr)
|
|
||||||
except (KeyError, AttributeError):
|
|
||||||
ret = default
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def _set_meta_attr(attrs, attr, value):
|
|
||||||
try:
|
|
||||||
setattr(attrs['Meta'], attr, value)
|
|
||||||
return True
|
|
||||||
except KeyError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_fieldsets(bases, attrs):
|
|
||||||
"""
|
|
||||||
Get the fieldsets definition from the inner Meta class.
|
|
||||||
|
|
||||||
"""
|
|
||||||
fieldsets = _get_meta_attr(attrs, 'fieldsets', None)
|
|
||||||
if fieldsets is None:
|
|
||||||
#grab the fieldsets from the first base class that has them
|
|
||||||
for base in bases:
|
|
||||||
fieldsets = getattr(base, 'base_fieldsets', None)
|
|
||||||
if fieldsets is not None:
|
|
||||||
break
|
|
||||||
fieldsets = fieldsets or []
|
|
||||||
return fieldsets
|
|
||||||
|
|
||||||
def get_fields_from_fieldsets(fieldsets):
|
|
||||||
"""
|
|
||||||
Get a list of all fields included in a fieldsets definition.
|
|
||||||
|
|
||||||
"""
|
|
||||||
fields = []
|
|
||||||
try:
|
|
||||||
for name, options in fieldsets:
|
|
||||||
fields.extend(options['fields'])
|
|
||||||
except (TypeError, KeyError):
|
|
||||||
raise ValueError('"fieldsets" must be an iterable of two-tuples, '
|
|
||||||
'and the second tuple must be a dictionary '
|
|
||||||
'with a "fields" key')
|
|
||||||
return fields
|
|
||||||
|
|
||||||
def get_row_attrs(bases, attrs):
|
|
||||||
"""
|
|
||||||
Get the row_attrs definition from the inner Meta class.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return _get_meta_attr(attrs, 'row_attrs', {})
|
|
||||||
|
|
||||||
def _mark_row_attrs(bf, form):
|
|
||||||
row_attrs = deepcopy(form._row_attrs.get(bf.name, {}))
|
|
||||||
if bf.field.required:
|
|
||||||
req_class = 'required'
|
|
||||||
else:
|
|
||||||
req_class = 'optional'
|
|
||||||
if 'class' in row_attrs:
|
|
||||||
row_attrs['class'] = row_attrs['class'] + ' ' + req_class
|
|
||||||
else:
|
|
||||||
row_attrs['class'] = req_class
|
|
||||||
bf.row_attrs = mark_safe(flatatt(row_attrs))
|
|
||||||
return bf
|
|
||||||
|
|
||||||
class BetterFormBaseMetaclass(type):
|
|
||||||
def __new__(cls, name, bases, attrs):
|
|
||||||
attrs['base_fieldsets'] = get_fieldsets(bases, attrs)
|
|
||||||
fields = get_fields_from_fieldsets(attrs['base_fieldsets'])
|
|
||||||
if (_get_meta_attr(attrs, 'fields', None) is None and
|
|
||||||
_get_meta_attr(attrs, 'exclude', None) is None):
|
|
||||||
_set_meta_attr(attrs, 'fields', fields)
|
|
||||||
attrs['base_row_attrs'] = get_row_attrs(bases, attrs)
|
|
||||||
new_class = super(BetterFormBaseMetaclass,
|
|
||||||
cls).__new__(cls, name, bases, attrs)
|
|
||||||
return new_class
|
|
||||||
|
|
||||||
class BetterFormMetaclass(BetterFormBaseMetaclass,
|
|
||||||
forms.forms.DeclarativeFieldsMetaclass):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class BetterModelFormMetaclass(BetterFormBaseMetaclass,
|
|
||||||
forms.models.ModelFormMetaclass):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class BetterBaseForm(object):
|
|
||||||
"""
|
|
||||||
``BetterForm`` and ``BetterModelForm`` are subclasses of Form
|
|
||||||
and ModelForm that allow for declarative definition of fieldsets
|
|
||||||
and row_attrs in an inner Meta class.
|
|
||||||
|
|
||||||
The row_attrs declaration is a dictionary mapping field names to
|
|
||||||
dictionaries of attribute/value pairs. The attribute/value
|
|
||||||
dictionaries will be flattened into HTML-style attribute/values
|
|
||||||
(i.e. {'style': 'display: none'} will become ``style="display:
|
|
||||||
none"``), and will be available as the ``row_attrs`` attribute of
|
|
||||||
the ``BoundField``. Also, a CSS class of "required" or "optional"
|
|
||||||
will automatically be added to the row_attrs of each
|
|
||||||
``BoundField``, depending on whether the field is required.
|
|
||||||
|
|
||||||
There is no automatic inheritance of ``row_attrs``.
|
|
||||||
|
|
||||||
The fieldsets declaration is a list of two-tuples very similar to
|
|
||||||
the ``fieldsets`` option on a ModelAdmin class in
|
|
||||||
``django.contrib.admin``.
|
|
||||||
|
|
||||||
The first item in each two-tuple is a name for the fieldset, and
|
|
||||||
the second is a dictionary of fieldset options.
|
|
||||||
|
|
||||||
Valid fieldset options in the dictionary include:
|
|
||||||
|
|
||||||
``fields`` (required): A tuple of field names to display in this
|
|
||||||
fieldset.
|
|
||||||
|
|
||||||
``classes``: A list of extra CSS classes to apply to the fieldset.
|
|
||||||
|
|
||||||
``legend``: This value, if present, will be the contents of a ``legend``
|
|
||||||
tag to open the fieldset.
|
|
||||||
|
|
||||||
``description``: A string of optional extra text to be displayed
|
|
||||||
under the ``legend`` of the fieldset.
|
|
||||||
|
|
||||||
When iterated over, the ``fieldsets`` attribute of a
|
|
||||||
``BetterForm`` (or ``BetterModelForm``) yields ``Fieldset``s.
|
|
||||||
Each ``Fieldset`` has a ``name`` attribute, a ``legend``
|
|
||||||
attribute, , a ``classes`` attribute (the ``classes`` tuple
|
|
||||||
collapsed into a space-separated string), and a description
|
|
||||||
attribute, and when iterated over yields its ``BoundField``s.
|
|
||||||
|
|
||||||
Subclasses of a ``BetterForm`` will inherit their parent's
|
|
||||||
fieldsets unless they define their own.
|
|
||||||
|
|
||||||
A ``BetterForm`` or ``BetterModelForm`` can still be iterated over
|
|
||||||
directly to yield all of its ``BoundField``s, regardless of
|
|
||||||
fieldsets.
|
|
||||||
|
|
||||||
"""
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
self._fieldsets = deepcopy(self.base_fieldsets)
|
|
||||||
self._row_attrs = deepcopy(self.base_row_attrs)
|
|
||||||
self._fieldset_collection = None
|
|
||||||
super(BetterBaseForm, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def fieldsets(self):
|
|
||||||
if not self._fieldset_collection:
|
|
||||||
self._fieldset_collection = FieldsetCollection(self,
|
|
||||||
self._fieldsets)
|
|
||||||
return self._fieldset_collection
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
for bf in super(BetterBaseForm, self).__iter__():
|
|
||||||
yield _mark_row_attrs(bf, self)
|
|
||||||
|
|
||||||
def __getitem__(self, name):
|
|
||||||
bf = super(BetterBaseForm, self).__getitem__(name)
|
|
||||||
return _mark_row_attrs(bf, self)
|
|
||||||
|
|
||||||
class BetterForm(BetterBaseForm, forms.Form):
|
|
||||||
__metaclass__ = BetterFormMetaclass
|
|
||||||
__doc__ = BetterBaseForm.__doc__
|
|
||||||
|
|
||||||
class BetterModelForm(BetterBaseForm, forms.ModelForm):
|
|
||||||
__metaclass__ = BetterModelFormMetaclass
|
|
||||||
__doc__ = BetterBaseForm.__doc__
|
|
||||||
|
|
||||||
|
|
||||||
class BasePreviewForm (object):
|
|
||||||
"""
|
|
||||||
Mixin to add preview functionality to a form. If the form is submitted with
|
|
||||||
the following k/v pair in its ``data`` dictionary:
|
|
||||||
|
|
||||||
'submit': 'preview' (value string is case insensitive)
|
|
||||||
|
|
||||||
Then ``PreviewForm.preview`` will be marked ``True`` and the form will
|
|
||||||
be marked invalid (though this invalidation will not put an error in
|
|
||||||
its ``errors`` dictionary).
|
|
||||||
|
|
||||||
"""
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super(BasePreviewForm, self).__init__(*args, **kwargs)
|
|
||||||
self.preview = self.check_preview(kwargs.get('data', None))
|
|
||||||
|
|
||||||
def check_preview(self, data):
|
|
||||||
if data and data.get('submit', '').lower() == u'preview':
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def is_valid(self, *args, **kwargs):
|
|
||||||
if self.preview:
|
|
||||||
return False
|
|
||||||
return super(BasePreviewForm, self).is_valid()
|
|
||||||
|
|
||||||
class PreviewModelForm(BasePreviewForm, BetterModelForm):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class PreviewForm(BasePreviewForm, BetterForm):
|
|
||||||
pass
|
|
|
@ -1,3 +0,0 @@
|
||||||
$(document).ready(function() {
|
|
||||||
$('textarea.autoresize').autogrow();
|
|
||||||
});
|
|
|
@ -1,132 +0,0 @@
|
||||||
/*
|
|
||||||
* Auto Expanding Text Area (1.2.2)
|
|
||||||
* by Chrys Bader (www.chrysbader.com)
|
|
||||||
* chrysb@gmail.com
|
|
||||||
*
|
|
||||||
* Special thanks to:
|
|
||||||
* Jake Chapa - jake@hybridstudio.com
|
|
||||||
* John Resig - jeresig@gmail.com
|
|
||||||
*
|
|
||||||
* Copyright (c) 2008 Chrys Bader (www.chrysbader.com)
|
|
||||||
* Licensed under the GPL (GPL-LICENSE.txt) license.
|
|
||||||
*
|
|
||||||
*
|
|
||||||
* NOTE: This script requires jQuery to work. Download jQuery at www.jquery.com
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
|
|
||||||
(function(jQuery) {
|
|
||||||
|
|
||||||
var self = null;
|
|
||||||
|
|
||||||
jQuery.fn.autogrow = function(o)
|
|
||||||
{
|
|
||||||
return this.each(function() {
|
|
||||||
new jQuery.autogrow(this, o);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The autogrow object.
|
|
||||||
*
|
|
||||||
* @constructor
|
|
||||||
* @name jQuery.autogrow
|
|
||||||
* @param Object e The textarea to create the autogrow for.
|
|
||||||
* @param Hash o A set of key/value pairs to set as configuration properties.
|
|
||||||
* @cat Plugins/autogrow
|
|
||||||
*/
|
|
||||||
|
|
||||||
jQuery.autogrow = function (e, o)
|
|
||||||
{
|
|
||||||
this.options = o || {};
|
|
||||||
this.dummy = null;
|
|
||||||
this.interval = null;
|
|
||||||
this.line_height = this.options.lineHeight || parseInt(jQuery(e).css('line-height'));
|
|
||||||
this.min_height = this.options.minHeight || parseInt(jQuery(e).css('min-height'));
|
|
||||||
this.max_height = this.options.maxHeight || parseInt(jQuery(e).css('max-height'));;
|
|
||||||
this.textarea = jQuery(e);
|
|
||||||
|
|
||||||
if(this.line_height == NaN)
|
|
||||||
this.line_height = 0;
|
|
||||||
|
|
||||||
// Only one textarea activated at a time, the one being used
|
|
||||||
this.init();
|
|
||||||
};
|
|
||||||
|
|
||||||
jQuery.autogrow.fn = jQuery.autogrow.prototype = {
|
|
||||||
autogrow: '1.2.2'
|
|
||||||
};
|
|
||||||
|
|
||||||
jQuery.autogrow.fn.extend = jQuery.autogrow.extend = jQuery.extend;
|
|
||||||
|
|
||||||
jQuery.autogrow.fn.extend({
|
|
||||||
|
|
||||||
init: function() {
|
|
||||||
var self = this;
|
|
||||||
this.textarea.css({overflow: 'hidden', display: 'block'});
|
|
||||||
this.textarea.bind('focus', function() { self.startExpand() } ).bind('blur', function() { self.stopExpand() });
|
|
||||||
this.checkExpand();
|
|
||||||
},
|
|
||||||
|
|
||||||
startExpand: function() {
|
|
||||||
var self = this;
|
|
||||||
this.interval = window.setInterval(function() {self.checkExpand()}, 400);
|
|
||||||
},
|
|
||||||
|
|
||||||
stopExpand: function() {
|
|
||||||
clearInterval(this.interval);
|
|
||||||
},
|
|
||||||
|
|
||||||
checkExpand: function() {
|
|
||||||
|
|
||||||
if (this.dummy == null)
|
|
||||||
{
|
|
||||||
this.dummy = jQuery('<div></div>');
|
|
||||||
this.dummy.css({
|
|
||||||
'font-size' : this.textarea.css('font-size'),
|
|
||||||
'font-family': this.textarea.css('font-family'),
|
|
||||||
'width' : this.textarea.css('width'),
|
|
||||||
'padding' : this.textarea.css('padding'),
|
|
||||||
'line-height': this.line_height + 'px',
|
|
||||||
'overflow-x' : 'hidden',
|
|
||||||
'position' : 'absolute',
|
|
||||||
'top' : 0,
|
|
||||||
'left' : -9999
|
|
||||||
}).appendTo('body');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Strip HTML tags
|
|
||||||
var html = this.textarea.val().replace(/(<|>)/g, '');
|
|
||||||
|
|
||||||
// IE is different, as per usual
|
|
||||||
if ($.browser.msie)
|
|
||||||
{
|
|
||||||
html = html.replace(/\n/g, '<BR>new');
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
html = html.replace(/\n/g, '<br>new');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.dummy.html() != html)
|
|
||||||
{
|
|
||||||
this.dummy.html(html);
|
|
||||||
|
|
||||||
if (this.max_height > 0 && (this.dummy.height() + this.line_height > this.max_height))
|
|
||||||
{
|
|
||||||
this.textarea.css('overflow-y', 'auto');
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
this.textarea.css('overflow-y', 'hidden');
|
|
||||||
if (this.textarea.height() < this.dummy.height() + this.line_height || (this.dummy.height() < this.textarea.height()))
|
|
||||||
{
|
|
||||||
this.textarea.animate({height: (this.dummy.height() + this.line_height) + 'px'}, 100);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
});
|
|
||||||
})(jQuery);
|
|
|
@ -1,12 +0,0 @@
|
||||||
import posixpath
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
JQUERY_URL = getattr(
|
|
||||||
settings, 'JQUERY_URL',
|
|
||||||
'http://ajax.googleapis.com/ajax/libs/jquery/1.4/jquery.min.js')
|
|
||||||
|
|
||||||
if not ((':' in JQUERY_URL) or (JQUERY_URL.startswith('/'))):
|
|
||||||
JQUERY_URL = posixpath.join(settings.MEDIA_URL, JQUERY_URL)
|
|
||||||
|
|
||||||
FORM_UTILS_MEDIA_URL = getattr(settings, 'FORM_UTILS_MEDIA_URL', settings.MEDIA_URL)
|
|
|
@ -1,16 +0,0 @@
|
||||||
{% extends "form_utils/form.html" %}
|
|
||||||
|
|
||||||
{% block fields %}
|
|
||||||
{% for fieldset in form.fieldsets %}
|
|
||||||
<fieldset class="{{ fieldset.classes }}">
|
|
||||||
{% if fieldset.legend %}
|
|
||||||
<legend>{{ fieldset.legend }}</legend>
|
|
||||||
{% endif %}
|
|
||||||
<ul>
|
|
||||||
{% with fieldset as fields %}
|
|
||||||
{% include "form_utils/fields_as_lis.html" %}
|
|
||||||
{% endwith %}
|
|
||||||
</ul>
|
|
||||||
</fieldset>
|
|
||||||
{% endfor %}
|
|
||||||
{% endblock %}
|
|
|
@ -1,11 +0,0 @@
|
||||||
{% for field in fields %}
|
|
||||||
{% if field.is_hidden %}
|
|
||||||
{{ field }}
|
|
||||||
{% else %}
|
|
||||||
<li{{ field.row_attrs }}>
|
|
||||||
{{ field.errors }}
|
|
||||||
{{ field.label_tag }}
|
|
||||||
{{ field }}
|
|
||||||
</li>
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
|
|
@ -1,13 +0,0 @@
|
||||||
{% block errors %}
|
|
||||||
{% if form.non_field_errors %}{{ form.non_field_errors }}{% endif %}
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block fields %}
|
|
||||||
<fieldset class="fieldset_main">
|
|
||||||
<ul>
|
|
||||||
{% with form as fields %}
|
|
||||||
{% include "form_utils/fields_as_lis.html" %}
|
|
||||||
{% endwith %}
|
|
||||||
</ul>
|
|
||||||
</fieldset>
|
|
||||||
{% endblock %}
|
|
|
@ -1,3 +0,0 @@
|
||||||
/__init__.py/1.1/Fri Jan 28 21:08:54 2011//
|
|
||||||
/form_utils_tags.py/1.1/Fri Jan 28 21:08:54 2011//
|
|
||||||
D
|
|
|
@ -1 +0,0 @@
|
||||||
ietfsec/form_utils/templatetags
|
|
|
@ -1 +0,0 @@
|
||||||
/a/cvs
|
|
|
@ -1,6 +0,0 @@
|
||||||
"""
|
|
||||||
__init__.py for django-form-utils - templatetags
|
|
||||||
|
|
||||||
Time-stamp: <2008-10-13 12:14:37 carljm __init__.py>
|
|
||||||
|
|
||||||
"""
|
|
|
@ -1,42 +0,0 @@
|
||||||
"""
|
|
||||||
templatetags for django-form-utils
|
|
||||||
|
|
||||||
Time-stamp: <2009-03-26 12:32:08 carljm form_utils_tags.py>
|
|
||||||
|
|
||||||
"""
|
|
||||||
from django import template
|
|
||||||
|
|
||||||
from form_utils.forms import BetterForm, BetterModelForm
|
|
||||||
from form_utils.utils import select_template_from_string
|
|
||||||
|
|
||||||
register = template.Library()
|
|
||||||
|
|
||||||
@register.filter
|
|
||||||
def render(form, template_name=None):
|
|
||||||
"""
|
|
||||||
Renders a ``django.forms.Form`` or
|
|
||||||
``form_utils.forms.BetterForm`` instance using a template.
|
|
||||||
|
|
||||||
The template name(s) may be passed in as the argument to the
|
|
||||||
filter (use commas to separate multiple template names for
|
|
||||||
template selection).
|
|
||||||
|
|
||||||
If not provided, the default template name is
|
|
||||||
``form_utils/form.html``.
|
|
||||||
|
|
||||||
If the form object to be rendered is an instance of
|
|
||||||
``form_utils.forms.BetterForm`` or
|
|
||||||
``form_utils.forms.BetterModelForm``, the template
|
|
||||||
``form_utils/better_form.html`` will be used instead if present.
|
|
||||||
|
|
||||||
"""
|
|
||||||
default = 'form_utils/form.html'
|
|
||||||
if isinstance(form, (BetterForm, BetterModelForm)):
|
|
||||||
default = ','.join(['form_utils/better_form.html', default])
|
|
||||||
tpl = select_template_from_string(template_name or default)
|
|
||||||
|
|
||||||
return tpl.render(template.Context({'form': form}))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,20 +0,0 @@
|
||||||
"""
|
|
||||||
utility functions for django-form-utils
|
|
||||||
|
|
||||||
Time-stamp: <2009-03-26 12:32:41 carljm utils.py>
|
|
||||||
|
|
||||||
"""
|
|
||||||
from django.template import loader
|
|
||||||
|
|
||||||
def select_template_from_string(arg):
|
|
||||||
"""
|
|
||||||
Select a template from a string, which can include multiple
|
|
||||||
template paths separated by commas.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if ',' in arg:
|
|
||||||
tpl = loader.select_template(
|
|
||||||
[tn.strip() for tn in arg.split(',')])
|
|
||||||
else:
|
|
||||||
tpl = loader.get_template(arg)
|
|
||||||
return tpl
|
|
|
@ -1,112 +0,0 @@
|
||||||
"""
|
|
||||||
widgets for django-form-utils
|
|
||||||
|
|
||||||
parts of this code taken from http://www.djangosnippets.org/snippets/934/
|
|
||||||
- thanks baumer1122
|
|
||||||
|
|
||||||
"""
|
|
||||||
import os
|
|
||||||
import posixpath
|
|
||||||
|
|
||||||
from django import forms
|
|
||||||
from django.conf import settings
|
|
||||||
from django.utils.functional import curry
|
|
||||||
from django.utils.safestring import mark_safe
|
|
||||||
from django.core.files.uploadedfile import SimpleUploadedFile as UploadedFile
|
|
||||||
|
|
||||||
from form_utils.settings import JQUERY_URL, FORM_UTILS_MEDIA_URL
|
|
||||||
|
|
||||||
try:
|
|
||||||
from sorl.thumbnail.main import DjangoThumbnail
|
|
||||||
def thumbnail(image_path, width, height):
|
|
||||||
t = DjangoThumbnail(relative_source=image_path, requested_size=(width,height))
|
|
||||||
return u'<img src="%s" alt="%s" />' % (t.absolute_url, image_path)
|
|
||||||
except ImportError:
|
|
||||||
def thumbnail(image_path, width, height):
|
|
||||||
absolute_url = posixpath.join(settings.MEDIA_URL, image_path)
|
|
||||||
return u'<img src="%s" alt="%s" />' % (absolute_url, image_path)
|
|
||||||
|
|
||||||
class ImageWidget(forms.FileInput):
|
|
||||||
template = '%(input)s<br />%(image)s'
|
|
||||||
|
|
||||||
def __init__(self, attrs=None, template=None, width=200, height=200):
|
|
||||||
if template is not None:
|
|
||||||
self.template = template
|
|
||||||
self.width = width
|
|
||||||
self.height = height
|
|
||||||
super(ImageWidget, self).__init__(attrs)
|
|
||||||
|
|
||||||
def render(self, name, value, attrs=None):
|
|
||||||
input_html = super(forms.FileInput, self).render(name, value, attrs)
|
|
||||||
if hasattr(value, 'width') and hasattr(value, 'height'):
|
|
||||||
image_html = thumbnail(value.name, self.width, self.height)
|
|
||||||
output = self.template % {'input': input_html,
|
|
||||||
'image': image_html}
|
|
||||||
else:
|
|
||||||
output = input_html
|
|
||||||
return mark_safe(output)
|
|
||||||
|
|
||||||
class ClearableFileInput(forms.MultiWidget):
|
|
||||||
default_file_widget_class = forms.FileInput
|
|
||||||
template = '%(input)s Clear: %(checkbox)s'
|
|
||||||
|
|
||||||
def __init__(self, file_widget=None,
|
|
||||||
attrs=None, template=None):
|
|
||||||
if template is not None:
|
|
||||||
self.template = template
|
|
||||||
file_widget = file_widget or self.default_file_widget_class()
|
|
||||||
super(ClearableFileInput, self).__init__(
|
|
||||||
widgets=[file_widget, forms.CheckboxInput()],
|
|
||||||
attrs=attrs)
|
|
||||||
|
|
||||||
def render(self, name, value, attrs=None):
|
|
||||||
if isinstance(value, list):
|
|
||||||
self.value = value[0]
|
|
||||||
else:
|
|
||||||
self.value = value
|
|
||||||
return super(ClearableFileInput, self).render(name, value, attrs)
|
|
||||||
|
|
||||||
def decompress(self, value):
|
|
||||||
# the clear checkbox is never initially checked
|
|
||||||
return [value, None]
|
|
||||||
|
|
||||||
def format_output(self, rendered_widgets):
|
|
||||||
if self.value:
|
|
||||||
return self.template % {'input': rendered_widgets[0],
|
|
||||||
'checkbox': rendered_widgets[1]}
|
|
||||||
return rendered_widgets[0]
|
|
||||||
|
|
||||||
root = lambda path: posixpath.join(FORM_UTILS_MEDIA_URL, path)
|
|
||||||
|
|
||||||
class AutoResizeTextarea(forms.Textarea):
|
|
||||||
"""
|
|
||||||
A Textarea widget that automatically resizes to accomodate its contents.
|
|
||||||
|
|
||||||
"""
|
|
||||||
class Media:
|
|
||||||
|
|
||||||
js = (JQUERY_URL,
|
|
||||||
root('form_utils/js/jquery.autogrow.js'),
|
|
||||||
root('form_utils/js/autoresize.js'))
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
attrs = kwargs.setdefault('attrs', {})
|
|
||||||
try:
|
|
||||||
attrs['class'] = "%s autoresize" % (attrs['class'],)
|
|
||||||
except KeyError:
|
|
||||||
attrs['class'] = 'autoresize'
|
|
||||||
attrs.setdefault('cols', 80)
|
|
||||||
attrs.setdefault('rows', 5)
|
|
||||||
super(AutoResizeTextarea, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
class InlineAutoResizeTextarea(AutoResizeTextarea):
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
attrs = kwargs.setdefault('attrs', {})
|
|
||||||
try:
|
|
||||||
attrs['class'] = "%s inline" % (attrs['class'],)
|
|
||||||
except KeyError:
|
|
||||||
attrs['class'] = 'inline'
|
|
||||||
attrs.setdefault('cols', 40)
|
|
||||||
attrs.setdefault('rows', 2)
|
|
||||||
super(InlineAutoResizeTextarea, self).__init__(*args, **kwargs)
|
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
# Copyright The IETF Trust 2007-2019, All Rights Reserved
|
# Copyright The IETF Trust 2007-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import checks # pyflakes:ignore
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
|
from . import checks # pyflakes:ignore
|
||||||
|
|
||||||
# Don't add patch number here:
|
# Don't add patch number here:
|
||||||
__version__ = "6.98.5.dev0"
|
__version__ = "6.98.5.dev0"
|
||||||
|
|
|
@ -1,7 +1,14 @@
|
||||||
|
# Copyright The IETF Trust 2014-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
|
import datetime
|
||||||
import re
|
import re
|
||||||
import six
|
import six
|
||||||
import datetime
|
|
||||||
from urllib import urlencode
|
from six.moves.urllib.parse import urlencode
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.exceptions import ObjectDoesNotExist
|
from django.core.exceptions import ObjectDoesNotExist
|
||||||
|
@ -65,7 +72,7 @@ class ModelResource(tastypie.resources.ModelResource):
|
||||||
return "%s:%s:%s:%s" % (self._meta.api_name, self._meta.resource_name, ':'.join(args), smooshed)
|
return "%s:%s:%s:%s" % (self._meta.api_name, self._meta.resource_name, ':'.join(args), smooshed)
|
||||||
|
|
||||||
|
|
||||||
TIMEDELTA_REGEX = re.compile('^(?P<days>\d+d)?\s?(?P<hours>\d+h)?\s?(?P<minutes>\d+m)?\s?(?P<seconds>\d+s?)$')
|
TIMEDELTA_REGEX = re.compile(r'^(?P<days>\d+d)?\s?(?P<hours>\d+h)?\s?(?P<minutes>\d+m)?\s?(?P<seconds>\d+s?)$')
|
||||||
|
|
||||||
class TimedeltaField(ApiField):
|
class TimedeltaField(ApiField):
|
||||||
dehydrated_type = 'timedelta'
|
dehydrated_type = 'timedelta'
|
||||||
|
@ -112,6 +119,8 @@ class ToOneField(tastypie.fields.ToOneField):
|
||||||
|
|
||||||
def dehydrate(self, bundle, for_list=True):
|
def dehydrate(self, bundle, for_list=True):
|
||||||
foreign_obj = None
|
foreign_obj = None
|
||||||
|
previous_obj = None
|
||||||
|
attrib = None
|
||||||
|
|
||||||
if callable(self.attribute):
|
if callable(self.attribute):
|
||||||
previous_obj = bundle.obj
|
previous_obj = bundle.obj
|
||||||
|
@ -120,6 +129,7 @@ class ToOneField(tastypie.fields.ToOneField):
|
||||||
foreign_obj = bundle.obj
|
foreign_obj = bundle.obj
|
||||||
|
|
||||||
for attr in self._attrs:
|
for attr in self._attrs:
|
||||||
|
attrib = attr
|
||||||
previous_obj = foreign_obj
|
previous_obj = foreign_obj
|
||||||
try:
|
try:
|
||||||
foreign_obj = getattr(foreign_obj, attr, None)
|
foreign_obj = getattr(foreign_obj, attr, None)
|
||||||
|
@ -129,9 +139,9 @@ class ToOneField(tastypie.fields.ToOneField):
|
||||||
if not foreign_obj:
|
if not foreign_obj:
|
||||||
if not self.null:
|
if not self.null:
|
||||||
if callable(self.attribute):
|
if callable(self.attribute):
|
||||||
raise ApiFieldError(u"The related resource for resource %s could not be found." % (previous_obj))
|
raise ApiFieldError("The related resource for resource %s could not be found." % (previous_obj))
|
||||||
else:
|
else:
|
||||||
raise ApiFieldError(u"The model '%r' has an empty attribute '%s' and doesn't allow a null value." % (previous_obj, attr))
|
raise ApiFieldError("The model '%r' has an empty attribute '%s' and doesn't allow a null value." % (previous_obj, attrib))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
fk_resource = self.get_related_resource(foreign_obj)
|
fk_resource = self.get_related_resource(foreign_obj)
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
# Copyright The IETF Trust 2014-2019, All Rights Reserved
|
# Copyright The IETF Trust 2014-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from __future__ import print_function
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import datetime
|
import datetime
|
||||||
import collections
|
import collections
|
||||||
|
import io
|
||||||
|
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
|
|
||||||
import debug # pyflakes:ignore
|
import debug # pyflakes:ignore
|
||||||
|
@ -16,7 +18,11 @@ from django.template import Template, Context
|
||||||
from tastypie.resources import ModelResource
|
from tastypie.resources import ModelResource
|
||||||
|
|
||||||
|
|
||||||
resource_head_template = """# Autogenerated by the makeresources management command {{date}}
|
resource_head_template = """# Copyright The IETF Trust {{date}}, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Generated by the makeresources management command {{date}}
|
||||||
|
|
||||||
|
|
||||||
from tastypie.resources import ModelResource
|
from tastypie.resources import ModelResource
|
||||||
from tastypie.fields import ToManyField # pyflakes:ignore
|
from tastypie.fields import ToManyField # pyflakes:ignore
|
||||||
from tastypie.constants import ALL, ALL_WITH_RELATIONS # pyflakes:ignore
|
from tastypie.constants import ALL, ALL_WITH_RELATIONS # pyflakes:ignore
|
||||||
|
@ -79,7 +85,7 @@ class Command(AppCommand):
|
||||||
|
|
||||||
if missing_resources:
|
if missing_resources:
|
||||||
print("Updating resources.py for %s" % app.name)
|
print("Updating resources.py for %s" % app.name)
|
||||||
with open(resource_file_path, "a") as rfile:
|
with io.open(resource_file_path, "a") as rfile:
|
||||||
info = dict(
|
info = dict(
|
||||||
app=app.name,
|
app=app.name,
|
||||||
app_label=app.label,
|
app_label=app.label,
|
||||||
|
@ -164,7 +170,7 @@ class Command(AppCommand):
|
||||||
fields=model._meta.fields,
|
fields=model._meta.fields,
|
||||||
m2m_fields=model._meta.many_to_many,
|
m2m_fields=model._meta.many_to_many,
|
||||||
name=model_name,
|
name=model_name,
|
||||||
imports=[ v for k,v in imports.items() ],
|
imports=[ v for k,v in list(imports.items()) ],
|
||||||
foreign_keys=foreign_keys,
|
foreign_keys=foreign_keys,
|
||||||
m2m_keys=m2m_keys,
|
m2m_keys=m2m_keys,
|
||||||
resource_name=resource_name,
|
resource_name=resource_name,
|
||||||
|
@ -184,7 +190,7 @@ class Command(AppCommand):
|
||||||
while len(new_models) > 0:
|
while len(new_models) > 0:
|
||||||
list_len = len(new_models)
|
list_len = len(new_models)
|
||||||
#debug.show('len(new_models)')
|
#debug.show('len(new_models)')
|
||||||
keys = new_models.keys()
|
keys = list(new_models.keys())
|
||||||
for model_name in keys:
|
for model_name in keys:
|
||||||
internal_fk_count = 0
|
internal_fk_count = 0
|
||||||
for fk in new_models[model_name]["foreign_keys"]+new_models[model_name]["m2m_keys"]:
|
for fk in new_models[model_name]["foreign_keys"]+new_models[model_name]["m2m_keys"]:
|
||||||
|
@ -207,7 +213,7 @@ class Command(AppCommand):
|
||||||
internal_fk_count_limit += 1
|
internal_fk_count_limit += 1
|
||||||
else:
|
else:
|
||||||
print("Failed also with partial ordering, writing resource classes without ordering")
|
print("Failed also with partial ordering, writing resource classes without ordering")
|
||||||
new_model_list = [ v for k,v in new_models.items() ]
|
new_model_list = [ v for k,v in list(new_models.items()) ]
|
||||||
break
|
break
|
||||||
|
|
||||||
if rfile.tell() == 0:
|
if rfile.tell() == 0:
|
||||||
|
|
|
@ -1,5 +1,12 @@
|
||||||
|
# Copyright The IETF Trust 2018-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
|
import six
|
||||||
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.core.exceptions import ObjectDoesNotExist, FieldError
|
from django.core.exceptions import ObjectDoesNotExist, FieldError
|
||||||
|
@ -27,9 +34,9 @@ def filter_from_queryargs(request):
|
||||||
def is_ascii(s):
|
def is_ascii(s):
|
||||||
return all(ord(c) < 128 for c in s)
|
return all(ord(c) < 128 for c in s)
|
||||||
# limit parameter keys to ascii.
|
# limit parameter keys to ascii.
|
||||||
params = dict( (k,v) for (k,v) in request.GET.items() if is_ascii(k) )
|
params = dict( (k,v) for (k,v) in list(request.GET.items()) if is_ascii(k) )
|
||||||
filter = fix_ranges(dict([(k,params[k]) for k in params.keys() if not k.startswith("not__")]))
|
filter = fix_ranges(dict([(k,params[k]) for k in list(params.keys()) if not k.startswith("not__")]))
|
||||||
exclude = fix_ranges(dict([(k[5:],params[k]) for k in params.keys() if k.startswith("not__")]))
|
exclude = fix_ranges(dict([(k[5:],params[k]) for k in list(params.keys()) if k.startswith("not__")]))
|
||||||
return filter, exclude
|
return filter, exclude
|
||||||
|
|
||||||
def unique_obj_name(obj):
|
def unique_obj_name(obj):
|
||||||
|
@ -89,7 +96,7 @@ class AdminJsonSerializer(Serializer):
|
||||||
use_natural_keys = False
|
use_natural_keys = False
|
||||||
|
|
||||||
def serialize(self, queryset, **options):
|
def serialize(self, queryset, **options):
|
||||||
qi = options.get('query_info', '')
|
qi = options.get('query_info', '').encode('utf-8')
|
||||||
if len(list(queryset)) == 1:
|
if len(list(queryset)) == 1:
|
||||||
obj = queryset[0]
|
obj = queryset[0]
|
||||||
key = 'json:%s:%s' % (hashlib.md5(qi).hexdigest(), unique_obj_name(obj))
|
key = 'json:%s:%s' % (hashlib.md5(qi).hexdigest(), unique_obj_name(obj))
|
||||||
|
@ -147,7 +154,7 @@ class AdminJsonSerializer(Serializer):
|
||||||
if hasattr(field_value, "_meta"):
|
if hasattr(field_value, "_meta"):
|
||||||
self._current[name] = self.expand_related(field_value, name)
|
self._current[name] = self.expand_related(field_value, name)
|
||||||
else:
|
else:
|
||||||
self._current[name] = unicode(field_value)
|
self._current[name] = six.text_type(field_value)
|
||||||
except ObjectDoesNotExist:
|
except ObjectDoesNotExist:
|
||||||
pass
|
pass
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
|
@ -224,7 +231,7 @@ class JsonExportMixin(object):
|
||||||
|
|
||||||
def json_view(self, request, filter={}, expand=[]):
|
def json_view(self, request, filter={}, expand=[]):
|
||||||
qfilter, exclude = filter_from_queryargs(request)
|
qfilter, exclude = filter_from_queryargs(request)
|
||||||
for k in qfilter.keys():
|
for k in list(qfilter.keys()):
|
||||||
if k.startswith("_"):
|
if k.startswith("_"):
|
||||||
del qfilter[k]
|
del qfilter[k]
|
||||||
qfilter.update(filter)
|
qfilter.update(filter)
|
||||||
|
@ -244,7 +251,7 @@ class JsonExportMixin(object):
|
||||||
try:
|
try:
|
||||||
qs = self.get_queryset().filter(**filter).exclude(**exclude)
|
qs = self.get_queryset().filter(**filter).exclude(**exclude)
|
||||||
except (FieldError, ValueError) as e:
|
except (FieldError, ValueError) as e:
|
||||||
return HttpResponse(json.dumps({u"error": str(e)}, sort_keys=True, indent=3), content_type=content_type)
|
return HttpResponse(json.dumps({"error": str(e)}, sort_keys=True, indent=3), content_type=content_type)
|
||||||
try:
|
try:
|
||||||
if expand:
|
if expand:
|
||||||
qs = qs.select_related()
|
qs = qs.select_related()
|
||||||
|
@ -252,7 +259,7 @@ class JsonExportMixin(object):
|
||||||
items = [(getattr(o, key), serializer.serialize([o], expand=expand, query_info=query_info) ) for o in qs ]
|
items = [(getattr(o, key), serializer.serialize([o], expand=expand, query_info=query_info) ) for o in qs ]
|
||||||
qd = dict( ( k, json.loads(v)[0] ) for k,v in items )
|
qd = dict( ( k, json.loads(v)[0] ) for k,v in items )
|
||||||
except (FieldError, ValueError) as e:
|
except (FieldError, ValueError) as e:
|
||||||
return HttpResponse(json.dumps({u"error": str(e)}, sort_keys=True, indent=3), content_type=content_type)
|
return HttpResponse(json.dumps({"error": str(e)}, sort_keys=True, indent=3), content_type=content_type)
|
||||||
text = json.dumps({smart_text(self.model._meta): qd}, sort_keys=True, indent=3)
|
text = json.dumps({smart_text(self.model._meta): qd}, sort_keys=True, indent=3)
|
||||||
return HttpResponse(text, content_type=content_type)
|
return HttpResponse(text, content_type=content_type)
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
# Copyright The IETF Trust 2015-2018, All Rights Reserved
|
# Copyright The IETF Trust 2015-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@ -194,7 +197,7 @@ class TastypieApiTestCase(ResourceTestCaseMixin, TestCase):
|
||||||
client = Client(Accept='application/json')
|
client = Client(Accept='application/json')
|
||||||
r = client.get("/api/v1/")
|
r = client.get("/api/v1/")
|
||||||
self.assertValidJSONResponse(r)
|
self.assertValidJSONResponse(r)
|
||||||
resource_list = json.loads(r.content)
|
resource_list = r.json()
|
||||||
|
|
||||||
for name in self.apps:
|
for name in self.apps:
|
||||||
if not name in self.apps:
|
if not name in self.apps:
|
||||||
|
@ -207,19 +210,19 @@ class TastypieApiTestCase(ResourceTestCaseMixin, TestCase):
|
||||||
def test_all_model_resources_exist(self):
|
def test_all_model_resources_exist(self):
|
||||||
client = Client(Accept='application/json')
|
client = Client(Accept='application/json')
|
||||||
r = client.get("/api/v1")
|
r = client.get("/api/v1")
|
||||||
top = json.loads(r.content)
|
top = r.json()
|
||||||
for name in self.apps:
|
for name in self.apps:
|
||||||
app_name = self.apps[name]
|
app_name = self.apps[name]
|
||||||
app = import_module(app_name)
|
app = import_module(app_name)
|
||||||
self.assertEqual("/api/v1/%s/"%name, top[name]["list_endpoint"])
|
self.assertEqual("/api/v1/%s/"%name, top[name]["list_endpoint"])
|
||||||
r = client.get(top[name]["list_endpoint"])
|
r = client.get(top[name]["list_endpoint"])
|
||||||
self.assertValidJSONResponse(r)
|
self.assertValidJSONResponse(r)
|
||||||
app_resources = json.loads(r.content)
|
app_resources = r.json()
|
||||||
#
|
#
|
||||||
model_list = apps.get_app_config(name).get_models()
|
model_list = apps.get_app_config(name).get_models()
|
||||||
for model in model_list:
|
for model in model_list:
|
||||||
if not model._meta.model_name in app_resources.keys():
|
if not model._meta.model_name in list(app_resources.keys()):
|
||||||
#print("There doesn't seem to be any resource for model %s.models.%s"%(app.__name__,model.__name__,))
|
#print("There doesn't seem to be any resource for model %s.models.%s"%(app.__name__,model.__name__,))
|
||||||
self.assertIn(model._meta.model_name, app_resources.keys(),
|
self.assertIn(model._meta.model_name, list(app_resources.keys()),
|
||||||
"There doesn't seem to be any API resource for model %s.models.%s"%(app.__name__,model.__name__,))
|
"There doesn't seem to be any API resource for model %s.models.%s"%(app.__name__,model.__name__,))
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# Copyright The IETF Trust 2017, All Rights Reserved
|
# Copyright The IETF Trust 2017-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from jwcrypto.jwk import JWK
|
from jwcrypto.jwk import JWK
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2015-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import patch
|
import patch
|
||||||
import sys
|
import sys
|
||||||
|
@ -367,7 +373,7 @@ def maybe_patch_library(app_configs, **kwargs):
|
||||||
patch_path = os.path.join(cwd, patch_file)
|
patch_path = os.path.join(cwd, patch_file)
|
||||||
patch_set = patch.fromfile(patch_path)
|
patch_set = patch.fromfile(patch_path)
|
||||||
if patch_set:
|
if patch_set:
|
||||||
if not patch_set.apply(root=library_path):
|
if not patch_set.apply(root=library_path.encode('utf-8')):
|
||||||
errors.append(checks.Warning(
|
errors.append(checks.Warning(
|
||||||
"Could not apply patch from file '%s'"%patch_file,
|
"Could not apply patch from file '%s'"%patch_file,
|
||||||
hint=("Make sure that the patch file contains a unified diff and has valid file paths\n\n"
|
hint=("Make sure that the patch file contains a unified diff and has valid file paths\n\n"
|
||||||
|
|
|
@ -1,24 +1,24 @@
|
||||||
# Copyright The IETF Trust 2017-2019, All Rights Reserved
|
# Copyright The IETF Trust 2017-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from __future__ import unicode_literals
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
|
|
||||||
from ietf.community.models import CommunityList, SearchRule, EmailSubscription
|
from ietf.community.models import CommunityList, SearchRule, EmailSubscription
|
||||||
|
|
||||||
class CommunityListAdmin(admin.ModelAdmin):
|
class CommunityListAdmin(admin.ModelAdmin):
|
||||||
list_display = [u'id', 'user', 'group']
|
list_display = ['id', 'user', 'group']
|
||||||
raw_id_fields = ['user', 'group', 'added_docs']
|
raw_id_fields = ['user', 'group', 'added_docs']
|
||||||
admin.site.register(CommunityList, CommunityListAdmin)
|
admin.site.register(CommunityList, CommunityListAdmin)
|
||||||
|
|
||||||
class SearchRuleAdmin(admin.ModelAdmin):
|
class SearchRuleAdmin(admin.ModelAdmin):
|
||||||
list_display = [u'id', 'community_list', 'rule_type', 'state', 'group', 'person', 'text']
|
list_display = ['id', 'community_list', 'rule_type', 'state', 'group', 'person', 'text']
|
||||||
raw_id_fields = ['community_list', 'state', 'group', 'person', 'name_contains_index']
|
raw_id_fields = ['community_list', 'state', 'group', 'person', 'name_contains_index']
|
||||||
search_fields = ['person__name', 'group__acronym', 'text', ]
|
search_fields = ['person__name', 'group__acronym', 'text', ]
|
||||||
admin.site.register(SearchRule, SearchRuleAdmin)
|
admin.site.register(SearchRule, SearchRuleAdmin)
|
||||||
|
|
||||||
class EmailSubscriptionAdmin(admin.ModelAdmin):
|
class EmailSubscriptionAdmin(admin.ModelAdmin):
|
||||||
list_display = [u'id', 'community_list', 'email', 'notify_on']
|
list_display = ['id', 'community_list', 'email', 'notify_on']
|
||||||
raw_id_fields = ['community_list', 'email']
|
raw_id_fields = ['community_list', 'email']
|
||||||
admin.site.register(EmailSubscription, EmailSubscriptionAdmin)
|
admin.site.register(EmailSubscription, EmailSubscriptionAdmin)
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2012-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django import forms
|
from django import forms
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
|
|
||||||
|
@ -82,9 +88,9 @@ class SearchRuleForm(forms.ModelForm):
|
||||||
|
|
||||||
if 'group' in self.fields:
|
if 'group' in self.fields:
|
||||||
self.fields['group'].queryset = self.fields['group'].queryset.filter(state="active").order_by("acronym")
|
self.fields['group'].queryset = self.fields['group'].queryset.filter(state="active").order_by("acronym")
|
||||||
self.fields['group'].choices = [(g.pk, u"%s - %s" % (g.acronym, g.name)) for g in self.fields['group'].queryset]
|
self.fields['group'].choices = [(g.pk, "%s - %s" % (g.acronym, g.name)) for g in self.fields['group'].queryset]
|
||||||
|
|
||||||
for name, f in self.fields.iteritems():
|
for name, f in self.fields.items():
|
||||||
f.required = True
|
f.required = True
|
||||||
|
|
||||||
def clean_text(self):
|
def clean_text(self):
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
|
# Copyright The IETF Trust 2018-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.10 on 2018-02-20 10:52
|
# Generated by Django 1.11.10 on 2018-02-20 10:52
|
||||||
from __future__ import unicode_literals
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2018-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.10 on 2018-02-20 10:52
|
# Generated by Django 1.11.10 on 2018-02-20 10:52
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-05-21 14:23
|
# Generated by Django 1.11.20 on 2019-05-21 14:23
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-05-21 14:27
|
# Generated by Django 1.11.20 on 2019-05-21 14:27
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@ -21,7 +23,7 @@ def forward(apps, schema_editor):
|
||||||
# Document id fixup ------------------------------------------------------------
|
# Document id fixup ------------------------------------------------------------
|
||||||
|
|
||||||
objs = Document.objects.in_bulk()
|
objs = Document.objects.in_bulk()
|
||||||
nameid = { o.name: o.id for id, o in objs.iteritems() }
|
nameid = { o.name: o.id for id, o in objs.items() }
|
||||||
|
|
||||||
sys.stderr.write('\n')
|
sys.stderr.write('\n')
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-05-22 08:15
|
# Generated by Django 1.11.20 on 2019-05-22 08:15
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-05-22 08:15
|
# Generated by Django 1.11.20 on 2019-05-22 08:15
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-05-27 05:56
|
# Generated by Django 1.11.20 on 2019-05-27 05:56
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-05-30 03:06
|
# Generated by Django 1.11.20 on 2019-05-30 03:06
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,21 @@
|
||||||
|
# Copyright The IETF Trust 2012-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models import signals
|
from django.db.models import signals
|
||||||
from django.urls import reverse as urlreverse
|
from django.urls import reverse as urlreverse
|
||||||
|
from django.utils.encoding import python_2_unicode_compatible
|
||||||
|
|
||||||
from ietf.doc.models import Document, DocEvent, State
|
from ietf.doc.models import Document, DocEvent, State
|
||||||
from ietf.group.models import Group
|
from ietf.group.models import Group
|
||||||
from ietf.person.models import Person, Email
|
from ietf.person.models import Person, Email
|
||||||
from ietf.utils.models import ForeignKey
|
from ietf.utils.models import ForeignKey
|
||||||
|
|
||||||
|
@python_2_unicode_compatible
|
||||||
class CommunityList(models.Model):
|
class CommunityList(models.Model):
|
||||||
user = ForeignKey(User, blank=True, null=True)
|
user = ForeignKey(User, blank=True, null=True)
|
||||||
group = ForeignKey(Group, blank=True, null=True)
|
group = ForeignKey(Group, blank=True, null=True)
|
||||||
|
@ -21,7 +29,7 @@ class CommunityList(models.Model):
|
||||||
else:
|
else:
|
||||||
return 'ID list'
|
return 'ID list'
|
||||||
|
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
return self.long_name()
|
return self.long_name()
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
|
@ -33,6 +41,7 @@ class CommunityList(models.Model):
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
@python_2_unicode_compatible
|
||||||
class SearchRule(models.Model):
|
class SearchRule(models.Model):
|
||||||
# these types define the UI for setting up the rule, and also
|
# these types define the UI for setting up the rule, and also
|
||||||
# helps when interpreting the rule and matching documents
|
# helps when interpreting the rule and matching documents
|
||||||
|
@ -75,9 +84,10 @@ class SearchRule(models.Model):
|
||||||
# when new documents are submitted
|
# when new documents are submitted
|
||||||
name_contains_index = models.ManyToManyField(Document)
|
name_contains_index = models.ManyToManyField(Document)
|
||||||
|
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
return "%s %s %s/%s/%s/%s" % (self.community_list, self.rule_type, self.state, self.group, self.person, self.text)
|
return "%s %s %s/%s/%s/%s" % (self.community_list, self.rule_type, self.state, self.group, self.person, self.text)
|
||||||
|
|
||||||
|
@python_2_unicode_compatible
|
||||||
class EmailSubscription(models.Model):
|
class EmailSubscription(models.Model):
|
||||||
community_list = ForeignKey(CommunityList)
|
community_list = ForeignKey(CommunityList)
|
||||||
email = ForeignKey(Email)
|
email = ForeignKey(Email)
|
||||||
|
@ -88,8 +98,8 @@ class EmailSubscription(models.Model):
|
||||||
]
|
]
|
||||||
notify_on = models.CharField(max_length=30, choices=NOTIFICATION_CHOICES, default="all")
|
notify_on = models.CharField(max_length=30, choices=NOTIFICATION_CHOICES, default="all")
|
||||||
|
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
return u"%s to %s (%s changes)" % (self.email, self.community_list, self.notify_on)
|
return "%s to %s (%s changes)" % (self.email, self.community_list, self.notify_on)
|
||||||
|
|
||||||
|
|
||||||
def notify_events(sender, instance, **kwargs):
|
def notify_events(sender, instance, **kwargs):
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
# Copyright The IETF Trust 2014-2019, All Rights Reserved
|
# Copyright The IETF Trust 2014-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
# Autogenerated by the mkresources management command 2014-11-13 23:53
|
# Autogenerated by the mkresources management command 2014-11-13 23:53
|
||||||
|
|
||||||
|
|
||||||
from ietf.api import ModelResource
|
from ietf.api import ModelResource
|
||||||
from tastypie.fields import ToOneField, ToManyField
|
from tastypie.fields import ToOneField, ToManyField
|
||||||
from tastypie.constants import ALL, ALL_WITH_RELATIONS
|
from tastypie.constants import ALL, ALL_WITH_RELATIONS
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
# Copyright The IETF Trust 2016-2019, All Rights Reserved
|
# Copyright The IETF Trust 2016-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import json
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from pyquery import PyQuery
|
from pyquery import PyQuery
|
||||||
|
|
||||||
|
@ -19,7 +20,7 @@ from ietf.group.utils import setup_default_community_list_for_group
|
||||||
from ietf.doc.models import State
|
from ietf.doc.models import State
|
||||||
from ietf.doc.utils import add_state_change_event
|
from ietf.doc.utils import add_state_change_event
|
||||||
from ietf.person.models import Person, Email
|
from ietf.person.models import Person, Email
|
||||||
from ietf.utils.test_utils import login_testing_unauthorized, TestCase, unicontent
|
from ietf.utils.test_utils import login_testing_unauthorized, TestCase
|
||||||
from ietf.utils.mail import outbox
|
from ietf.utils.mail import outbox
|
||||||
from ietf.doc.factories import WgDraftFactory
|
from ietf.doc.factories import WgDraftFactory
|
||||||
from ietf.group.factories import GroupFactory, RoleFactory
|
from ietf.group.factories import GroupFactory, RoleFactory
|
||||||
|
@ -97,7 +98,7 @@ class CommunityListTests(TestCase):
|
||||||
)
|
)
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(draft.name in unicontent(r))
|
self.assertContains(r, draft.name)
|
||||||
|
|
||||||
def test_manage_personal_list(self):
|
def test_manage_personal_list(self):
|
||||||
PersonFactory(user__username='plain')
|
PersonFactory(user__username='plain')
|
||||||
|
@ -119,7 +120,7 @@ class CommunityListTests(TestCase):
|
||||||
# document shows up on GET
|
# document shows up on GET
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(draft.name in unicontent(r))
|
self.assertContains(r, draft.name)
|
||||||
|
|
||||||
# remove document
|
# remove document
|
||||||
r = self.client.post(url, { "action": "remove_document", "document": draft.name })
|
r = self.client.post(url, { "action": "remove_document", "document": draft.name })
|
||||||
|
@ -226,7 +227,7 @@ class CommunityListTests(TestCase):
|
||||||
# track
|
# track
|
||||||
r = self.client.post(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
|
r = self.client.post(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(json.loads(r.content)["success"], True)
|
self.assertEqual(r.json()["success"], True)
|
||||||
clist = CommunityList.objects.get(user__username="plain")
|
clist = CommunityList.objects.get(user__username="plain")
|
||||||
self.assertEqual(list(clist.added_docs.all()), [draft])
|
self.assertEqual(list(clist.added_docs.all()), [draft])
|
||||||
|
|
||||||
|
@ -234,7 +235,7 @@ class CommunityListTests(TestCase):
|
||||||
url = urlreverse(ietf.community.views.untrack_document, kwargs={ "username": "plain", "name": draft.name })
|
url = urlreverse(ietf.community.views.untrack_document, kwargs={ "username": "plain", "name": draft.name })
|
||||||
r = self.client.post(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
|
r = self.client.post(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(json.loads(r.content)["success"], True)
|
self.assertEqual(r.json()["success"], True)
|
||||||
clist = CommunityList.objects.get(user__username="plain")
|
clist = CommunityList.objects.get(user__username="plain")
|
||||||
self.assertEqual(list(clist.added_docs.all()), [])
|
self.assertEqual(list(clist.added_docs.all()), [])
|
||||||
|
|
||||||
|
@ -261,7 +262,7 @@ class CommunityListTests(TestCase):
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
# this is a simple-minded test, we don't actually check the fields
|
# this is a simple-minded test, we don't actually check the fields
|
||||||
self.assertTrue(draft.name in unicontent(r))
|
self.assertContains(r, draft.name)
|
||||||
|
|
||||||
def test_csv_for_group(self):
|
def test_csv_for_group(self):
|
||||||
draft = WgDraftFactory()
|
draft = WgDraftFactory()
|
||||||
|
@ -296,12 +297,12 @@ class CommunityListTests(TestCase):
|
||||||
)
|
)
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(draft.name in unicontent(r))
|
self.assertContains(r, draft.name)
|
||||||
|
|
||||||
# only significant
|
# only significant
|
||||||
r = self.client.get(url + "?significant=1")
|
r = self.client.get(url + "?significant=1")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue('<entry>' not in unicontent(r))
|
self.assertNotContains(r, '<entry>')
|
||||||
|
|
||||||
def test_feed_for_group(self):
|
def test_feed_for_group(self):
|
||||||
draft = WgDraftFactory()
|
draft = WgDraftFactory()
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
# Copyright The IETF Trust 2016-2019, All Rights Reserved
|
# Copyright The IETF Trust 2016-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
|
|
|
@ -1,16 +1,22 @@
|
||||||
# Copyright The IETF Trust 2012-2019, All Rights Reserved
|
# Copyright The IETF Trust 2012-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import csv
|
import csv
|
||||||
import uuid
|
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
import json
|
||||||
|
import six
|
||||||
|
import uuid
|
||||||
|
|
||||||
from django.http import HttpResponse, HttpResponseForbidden, HttpResponseRedirect, Http404
|
from django.http import HttpResponse, HttpResponseForbidden, HttpResponseRedirect, Http404
|
||||||
from django.shortcuts import get_object_or_404, render
|
from django.shortcuts import get_object_or_404, render
|
||||||
from django.contrib.auth.decorators import login_required
|
from django.contrib.auth.decorators import login_required
|
||||||
from django.utils.html import strip_tags
|
from django.utils.html import strip_tags
|
||||||
|
|
||||||
|
import debug # pyflakes:ignore
|
||||||
|
|
||||||
from ietf.community.models import SearchRule, EmailSubscription
|
from ietf.community.models import SearchRule, EmailSubscription
|
||||||
from ietf.community.forms import SearchRuleTypeForm, SearchRuleForm, AddDocumentsForm, SubscriptionForm
|
from ietf.community.forms import SearchRuleTypeForm, SearchRuleForm, AddDocumentsForm, SubscriptionForm
|
||||||
from ietf.community.utils import lookup_community_list, can_manage_community_list
|
from ietf.community.utils import lookup_community_list, can_manage_community_list
|
||||||
|
@ -135,7 +141,7 @@ def track_document(request, name, username=None, acronym=None):
|
||||||
clist.added_docs.add(doc)
|
clist.added_docs.add(doc)
|
||||||
|
|
||||||
if request.is_ajax():
|
if request.is_ajax():
|
||||||
return HttpResponse(json.dumps({ 'success': True }), content_type='text/plain')
|
return HttpResponse(json.dumps({ 'success': True }), content_type='application/json')
|
||||||
else:
|
else:
|
||||||
return HttpResponseRedirect(clist.get_absolute_url())
|
return HttpResponseRedirect(clist.get_absolute_url())
|
||||||
|
|
||||||
|
@ -155,7 +161,7 @@ def untrack_document(request, name, username=None, acronym=None):
|
||||||
clist.added_docs.remove(doc)
|
clist.added_docs.remove(doc)
|
||||||
|
|
||||||
if request.is_ajax():
|
if request.is_ajax():
|
||||||
return HttpResponse(json.dumps({ 'success': True }), content_type='text/plain')
|
return HttpResponse(json.dumps({ 'success': True }), content_type='application/json')
|
||||||
else:
|
else:
|
||||||
return HttpResponseRedirect(clist.get_absolute_url())
|
return HttpResponseRedirect(clist.get_absolute_url())
|
||||||
|
|
||||||
|
@ -176,7 +182,7 @@ def export_to_csv(request, username=None, acronym=None, group_type=None):
|
||||||
|
|
||||||
response['Content-Disposition'] = 'attachment; filename=%s' % filename
|
response['Content-Disposition'] = 'attachment; filename=%s' % filename
|
||||||
|
|
||||||
writer = csv.writer(response, dialect=csv.excel, delimiter=',')
|
writer = csv.writer(response, dialect=csv.excel, delimiter=str(','))
|
||||||
|
|
||||||
header = [
|
header = [
|
||||||
"Name",
|
"Name",
|
||||||
|
@ -198,7 +204,7 @@ def export_to_csv(request, username=None, acronym=None, group_type=None):
|
||||||
row.append(e.time.strftime("%Y-%m-%d") if e else "")
|
row.append(e.time.strftime("%Y-%m-%d") if e else "")
|
||||||
row.append(strip_tags(doc.friendly_state()))
|
row.append(strip_tags(doc.friendly_state()))
|
||||||
row.append(doc.group.acronym if doc.group else "")
|
row.append(doc.group.acronym if doc.group else "")
|
||||||
row.append(unicode(doc.ad) if doc.ad else "")
|
row.append(six.text_type(doc.ad) if doc.ad else "")
|
||||||
e = doc.latest_event()
|
e = doc.latest_event()
|
||||||
row.append(e.time.strftime("%Y-%m-%d") if e else "")
|
row.append(e.time.strftime("%Y-%m-%d") if e else "")
|
||||||
writer.writerow([v.encode("utf-8") for v in row])
|
writer.writerow([v.encode("utf-8") for v in row])
|
||||||
|
@ -223,8 +229,8 @@ def feed(request, username=None, acronym=None, group_type=None):
|
||||||
|
|
||||||
host = request.get_host()
|
host = request.get_host()
|
||||||
feed_url = 'https://%s%s' % (host, request.get_full_path())
|
feed_url = 'https://%s%s' % (host, request.get_full_path())
|
||||||
feed_id = uuid.uuid5(uuid.NAMESPACE_URL, feed_url.encode('utf-8'))
|
feed_id = uuid.uuid5(uuid.NAMESPACE_URL, str(feed_url))
|
||||||
title = u'%s RSS Feed' % clist.long_name()
|
title = '%s RSS Feed' % clist.long_name()
|
||||||
if significant:
|
if significant:
|
||||||
subtitle = 'Significant document changes'
|
subtitle = 'Significant document changes'
|
||||||
else:
|
else:
|
||||||
|
@ -235,7 +241,7 @@ def feed(request, username=None, acronym=None, group_type=None):
|
||||||
'entries': events[:50],
|
'entries': events[:50],
|
||||||
'title': title,
|
'title': title,
|
||||||
'subtitle': subtitle,
|
'subtitle': subtitle,
|
||||||
'id': feed_id.get_urn(),
|
'id': feed_id.urn,
|
||||||
'updated': datetime.datetime.now(),
|
'updated': datetime.datetime.now(),
|
||||||
}, content_type='text/xml')
|
}, content_type='text/xml')
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
# Copyright The IETF Trust 2010, All Rights Reserved
|
# Copyright The IETF Trust 2010-2019, All Rights Reserved
|
||||||
# coding: latin-1
|
# coding: latin-1
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
|
|
||||||
# These people will be sent a stack trace if there's an uncaught exception in
|
# These people will be sent a stack trace if there's an uncaught exception in
|
||||||
|
@ -9,7 +12,7 @@ DEBUG_EMAILS = [
|
||||||
('Tero Kivinen', 'kivinen@iki.fi'),
|
('Tero Kivinen', 'kivinen@iki.fi'),
|
||||||
]
|
]
|
||||||
|
|
||||||
for k in locals().keys():
|
for k in list(locals().keys()):
|
||||||
m = locals()[k]
|
m = locals()[k]
|
||||||
if isinstance(m, ModuleType):
|
if isinstance(m, ModuleType):
|
||||||
if hasattr(m, "DEBUG_EMAILS"):
|
if hasattr(m, "DEBUG_EMAILS"):
|
||||||
|
|
|
@ -1,5 +1,11 @@
|
||||||
|
# Copyright The IETF Trust 2015-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from pyquery import PyQuery
|
from pyquery import PyQuery
|
||||||
from Cookie import SimpleCookie
|
from six.moves.http_cookies import SimpleCookie
|
||||||
|
|
||||||
from django.urls import reverse as urlreverse
|
from django.urls import reverse as urlreverse
|
||||||
|
|
||||||
|
@ -12,7 +18,7 @@ class CookieTests(TestCase):
|
||||||
def test_settings_defaults(self):
|
def test_settings_defaults(self):
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertListEqual([], r.cookies.keys())
|
self.assertListEqual([], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/14"]').contents(), ['14 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/14"]').contents(), ['14 days'])
|
||||||
|
@ -21,10 +27,10 @@ class CookieTests(TestCase):
|
||||||
|
|
||||||
|
|
||||||
def test_settings_defaults_from_cookies(self):
|
def test_settings_defaults_from_cookies(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'new_enough' : '7', 'expires_soon' : 7, 'left_menu': 'on', })
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('new_enough') : '7', str('expires_soon') : 7, str('left_menu'): 'on', })
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertListEqual([], r.cookies.keys())
|
self.assertListEqual([], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/7"]').contents(), ['7 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/7"]').contents(), ['7 days'])
|
||||||
|
@ -32,7 +38,7 @@ class CookieTests(TestCase):
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/left_menu/on"]').contents(), ['On'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/left_menu/on"]').contents(), ['On'])
|
||||||
|
|
||||||
def test_settings_values_from_cookies_garbage(self):
|
def test_settings_values_from_cookies_garbage(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'foo', 'new_enough' : 'foo', 'expires_soon' : 'foo', 'left_menu': 'foo', })
|
self.client.cookies = SimpleCookie({str('full_draft'): 'foo', str('new_enough') : 'foo', str('expires_soon') : 'foo', str('left_menu'): 'foo', })
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
|
@ -42,7 +48,7 @@ class CookieTests(TestCase):
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/left_menu/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/left_menu/off"]').contents(), ['Off'])
|
||||||
|
|
||||||
def test_settings_values_from_cookies_random(self):
|
def test_settings_values_from_cookies_random(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'zappa', 'new_enough' : '365', 'expires_soon' : '5', 'left_menu': 'zappa', })
|
self.client.cookies = SimpleCookie({str('full_draft'): 'zappa', str('new_enough') : '365', str('expires_soon') : '5', str('left_menu'): 'zappa', })
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
|
@ -57,10 +63,10 @@ class CookieTests(TestCase):
|
||||||
# self.assertNotRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon')
|
# self.assertNotRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon')
|
||||||
|
|
||||||
def test_settings_values_from_cookies_1(self):
|
def test_settings_values_from_cookies_1(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'on', 'new_enough' : '90', 'expires_soon' : 7, 'left_menu': 'off', })
|
self.client.cookies = SimpleCookie({str('full_draft'): 'on', str('new_enough') : '90', str('expires_soon') : 7, str('left_menu'): 'off', })
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertListEqual([], r.cookies.keys())
|
self.assertListEqual([], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/90"]').contents(), ['90 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/90"]').contents(), ['90 days'])
|
||||||
|
@ -71,10 +77,10 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*7 days')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*7 days')
|
||||||
|
|
||||||
def test_settings_values_from_cookies_2(self):
|
def test_settings_values_from_cookies_2(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'new_enough' : '60', 'expires_soon' : 14, 'left_menu': 'on', })
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('new_enough') : '60', str('expires_soon') : 14, str('left_menu'): 'on', })
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertListEqual([], r.cookies.keys())
|
self.assertListEqual([], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/60"]').contents(), ['60 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/60"]').contents(), ['60 days'])
|
||||||
|
@ -85,10 +91,10 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*14 days')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*14 days')
|
||||||
|
|
||||||
def test_settings_values_from_cookies_3(self):
|
def test_settings_values_from_cookies_3(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'on', 'new_enough' : '30', 'expires_soon' : 21, 'left_menu': 'off'})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'on', str('new_enough') : '30', str('expires_soon') : 21, str('left_menu'): 'off'})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertListEqual([], r.cookies.keys())
|
self.assertListEqual([], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/30"]').contents(), ['30 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/30"]').contents(), ['30 days'])
|
||||||
|
@ -99,10 +105,10 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*21 days')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*21 days')
|
||||||
|
|
||||||
def test_settings_values_from_cookies_4(self):
|
def test_settings_values_from_cookies_4(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'new_enough' : '21', 'expires_soon' : 30, 'left_menu': 'on', })
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('new_enough') : '21', str('expires_soon') : 30, str('left_menu'): 'on', })
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertListEqual([], r.cookies.keys())
|
self.assertListEqual([], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/21"]').contents(), ['21 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/21"]').contents(), ['21 days'])
|
||||||
|
@ -113,10 +119,10 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*30 days')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*30 days')
|
||||||
|
|
||||||
def test_settings_values_from_cookies_5(self):
|
def test_settings_values_from_cookies_5(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'on', 'new_enough' : '14', 'expires_soon' : 60, 'left_menu': 'off', })
|
self.client.cookies = SimpleCookie({str('full_draft'): 'on', str('new_enough') : '14', str('expires_soon') : 60, str('left_menu'): 'off', })
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertListEqual([], r.cookies.keys())
|
self.assertListEqual([], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/14"]').contents(), ['14 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/14"]').contents(), ['14 days'])
|
||||||
|
@ -127,10 +133,10 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*60 days')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*60 days')
|
||||||
|
|
||||||
def test_settings_values_from_cookies_6(self):
|
def test_settings_values_from_cookies_6(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'new_enough' : '7', 'expires_soon' : 90, 'left_menu': 'on', })
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('new_enough') : '7', str('expires_soon') : 90, str('left_menu'): 'on', })
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
r = self.client.get(urlreverse("ietf.cookies.views.preferences"))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertListEqual([], r.cookies.keys())
|
self.assertListEqual([], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/7"]').contents(), ['7 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/7"]').contents(), ['7 days'])
|
||||||
|
@ -141,11 +147,11 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*90 days')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*90 days')
|
||||||
|
|
||||||
def test_full_draft(self):
|
def test_full_draft(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'new_enough' : '14', 'expires_soon' : 14})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('new_enough') : '14', str('expires_soon') : 14})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.full_draft")) # no value: reset
|
r = self.client.get(urlreverse("ietf.cookies.views.full_draft")) # no value: reset
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['full_draft'].value, '')
|
self.assertEqual(r.cookies[str('full_draft')].value, '')
|
||||||
self.assertListEqual(['full_draft'], r.cookies.keys())
|
self.assertListEqual([str('full_draft')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/14"]').contents(), ['14 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/14"]').contents(), ['14 days'])
|
||||||
|
@ -155,21 +161,21 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*14 days')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*14 days')
|
||||||
|
|
||||||
def test_full_draft_on(self):
|
def test_full_draft_on(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'new_enough' : '14', 'expires_soon' : 14})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('new_enough') : '14', str('expires_soon') : 14})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.full_draft", kwargs=dict(enabled="on")))
|
r = self.client.get(urlreverse("ietf.cookies.views.full_draft", kwargs=dict(enabled="on")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['full_draft'].value, 'on')
|
self.assertEqual(r.cookies[str('full_draft')].value, 'on')
|
||||||
self.assertListEqual(['full_draft'], r.cookies.keys())
|
self.assertListEqual([str('full_draft')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*full_draft.*on')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*full_draft.*on')
|
||||||
|
|
||||||
def test_full_draft_off(self):
|
def test_full_draft_off(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'new_enough' : '14', 'expires_soon' : 14})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('new_enough') : '14', str('expires_soon') : 14})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.full_draft", kwargs=dict(enabled="off")))
|
r = self.client.get(urlreverse("ietf.cookies.views.full_draft", kwargs=dict(enabled="off")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['full_draft'].value, 'off')
|
self.assertEqual(r.cookies[str('full_draft')].value, 'off')
|
||||||
self.assertListEqual(['full_draft'], r.cookies.keys())
|
self.assertListEqual([str('full_draft')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
||||||
# self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/14"]').contents(), ['14 days'])
|
# self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/14"]').contents(), ['14 days'])
|
||||||
|
@ -177,10 +183,10 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*full_draft.*off')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*full_draft.*off')
|
||||||
|
|
||||||
def test_full_draft_foo(self):
|
def test_full_draft_foo(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'new_enough' : '14', 'expires_soon' : 14})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('new_enough') : '14', str('expires_soon') : 14})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.full_draft", kwargs=dict(enabled="foo")))
|
r = self.client.get(urlreverse("ietf.cookies.views.full_draft", kwargs=dict(enabled="foo")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertListEqual([], r.cookies.keys())
|
self.assertListEqual([], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
||||||
# self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/14"]').contents(), ['14 days'])
|
# self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/14"]').contents(), ['14 days'])
|
||||||
|
@ -188,11 +194,11 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*full_draft.*off')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*full_draft.*off')
|
||||||
|
|
||||||
def test_left_menu(self):
|
def test_left_menu(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'new_enough' : '14', 'expires_soon' : 14, 'left_menu': 'on', })
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('new_enough') : '14', str('expires_soon') : 14, str('left_menu'): 'on', })
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.left_menu")) # no value: reset
|
r = self.client.get(urlreverse("ietf.cookies.views.left_menu")) # no value: reset
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['left_menu'].value, '')
|
self.assertEqual(r.cookies[str('left_menu')].value, '')
|
||||||
self.assertListEqual(['left_menu'], r.cookies.keys())
|
self.assertListEqual([str('left_menu')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/left_menu/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/left_menu/off"]').contents(), ['Off'])
|
||||||
|
@ -200,37 +206,37 @@ class CookieTests(TestCase):
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/expires_soon/14"]').contents(), ['14 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/expires_soon/14"]').contents(), ['14 days'])
|
||||||
|
|
||||||
def test_left_menu_on(self):
|
def test_left_menu_on(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'new_enough' : '14', 'expires_soon' : 14, 'left_menu': 'off', })
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('new_enough') : '14', str('expires_soon') : 14, str('left_menu'): 'off', })
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.left_menu", kwargs=dict(enabled="on")))
|
r = self.client.get(urlreverse("ietf.cookies.views.left_menu", kwargs=dict(enabled="on")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['left_menu'].value, 'on')
|
self.assertEqual(r.cookies[str('left_menu')].value, 'on')
|
||||||
self.assertListEqual(['left_menu'], r.cookies.keys())
|
self.assertListEqual([str('left_menu')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/left_menu/on"]').contents(), ['On'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/left_menu/on"]').contents(), ['On'])
|
||||||
|
|
||||||
def test_left_menu_off(self):
|
def test_left_menu_off(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'new_enough' : '14', 'expires_soon' : 14, 'left_menu': 'off', })
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('new_enough') : '14', str('expires_soon') : 14, str('left_menu'): 'off', })
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.left_menu", kwargs=dict(enabled="off")))
|
r = self.client.get(urlreverse("ietf.cookies.views.left_menu", kwargs=dict(enabled="off")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['left_menu'].value, 'off')
|
self.assertEqual(r.cookies[str('left_menu')].value, 'off')
|
||||||
self.assertListEqual(['left_menu'], r.cookies.keys())
|
self.assertListEqual([str('left_menu')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/left_menu/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/left_menu/off"]').contents(), ['Off'])
|
||||||
|
|
||||||
def test_left_menu_foo(self):
|
def test_left_menu_foo(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'new_enough' : '14', 'expires_soon' : 14, 'left_menu': 'off', })
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('new_enough') : '14', str('expires_soon') : 14, str('left_menu'): 'off', })
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.left_menu", kwargs=dict(enabled="foo")))
|
r = self.client.get(urlreverse("ietf.cookies.views.left_menu", kwargs=dict(enabled="foo")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertListEqual([], r.cookies.keys())
|
self.assertListEqual([], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/left_menu/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/left_menu/off"]').contents(), ['Off'])
|
||||||
|
|
||||||
def test_new_enough(self):
|
def test_new_enough(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'new_enough' : '14', 'expires_soon' : 14})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('new_enough') : '14', str('expires_soon') : 14})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.new_enough")) # no value: reset
|
r = self.client.get(urlreverse("ietf.cookies.views.new_enough")) # no value: reset
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['new_enough'].value, '')
|
self.assertEqual(r.cookies[str('new_enough')].value, '')
|
||||||
self.assertListEqual(['new_enough'], r.cookies.keys())
|
self.assertListEqual([str('new_enough')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/14"]').contents(), ['14 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/14"]').contents(), ['14 days'])
|
||||||
|
@ -240,11 +246,11 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*14 days')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*14 days')
|
||||||
|
|
||||||
def test_new_enough_7(self):
|
def test_new_enough_7(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'on', 'new_enough' : '14', 'expires_soon' : 21})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'on', str('new_enough') : '14', str('expires_soon') : 21})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.new_enough", kwargs=dict(days="7")))
|
r = self.client.get(urlreverse("ietf.cookies.views.new_enough", kwargs=dict(days="7")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['new_enough'].value, '7')
|
self.assertEqual(r.cookies[str('new_enough')].value, '7')
|
||||||
self.assertListEqual(['new_enough'], r.cookies.keys())
|
self.assertListEqual([str('new_enough')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/7"]').contents(), ['7 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/7"]').contents(), ['7 days'])
|
||||||
|
@ -254,11 +260,11 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*21 days')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*21 days')
|
||||||
|
|
||||||
def test_new_enough_14(self):
|
def test_new_enough_14(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'on', 'new_enough' : '7', 'expires_soon' : 99})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'on', str('new_enough') : '7', str('expires_soon') : 99})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.new_enough", kwargs=dict(days="14")))
|
r = self.client.get(urlreverse("ietf.cookies.views.new_enough", kwargs=dict(days="14")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['new_enough'].value, '14')
|
self.assertEqual(r.cookies[str('new_enough')].value, '14')
|
||||||
self.assertListEqual(['new_enough'], r.cookies.keys())
|
self.assertListEqual([str('new_enough')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/14"]').contents(), ['14 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/14"]').contents(), ['14 days'])
|
||||||
|
@ -268,11 +274,11 @@ class CookieTests(TestCase):
|
||||||
# self.assertNotRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon')
|
# self.assertNotRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon')
|
||||||
|
|
||||||
def test_new_enough_21(self):
|
def test_new_enough_21(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'on', 'new_enough' : '14', 'expires_soon' : 90})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'on', str('new_enough') : '14', str('expires_soon') : 90})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.new_enough", kwargs=dict(days="21")))
|
r = self.client.get(urlreverse("ietf.cookies.views.new_enough", kwargs=dict(days="21")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['new_enough'].value, '21')
|
self.assertEqual(r.cookies[str('new_enough')].value, '21')
|
||||||
self.assertListEqual(['new_enough'], r.cookies.keys())
|
self.assertListEqual([str('new_enough')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/21"]').contents(), ['21 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/21"]').contents(), ['21 days'])
|
||||||
|
@ -282,11 +288,11 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*90 days')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*90 days')
|
||||||
|
|
||||||
def test_new_enough_30(self):
|
def test_new_enough_30(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'new_enough' : '14', 'expires_soon' : 7})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('new_enough') : '14', str('expires_soon') : 7})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.new_enough", kwargs=dict(days="30")))
|
r = self.client.get(urlreverse("ietf.cookies.views.new_enough", kwargs=dict(days="30")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['new_enough'].value, '30')
|
self.assertEqual(r.cookies[str('new_enough')].value, '30')
|
||||||
self.assertListEqual(['new_enough'], r.cookies.keys())
|
self.assertListEqual([str('new_enough')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/30"]').contents(), ['30 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/30"]').contents(), ['30 days'])
|
||||||
|
@ -296,11 +302,11 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*7 days')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*7 days')
|
||||||
|
|
||||||
def test_new_enough_60(self):
|
def test_new_enough_60(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'new_enough' : '14', 'expires_soon' : 14})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('new_enough') : '14', str('expires_soon') : 14})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.new_enough", kwargs=dict(days="60")))
|
r = self.client.get(urlreverse("ietf.cookies.views.new_enough", kwargs=dict(days="60")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['new_enough'].value, '60')
|
self.assertEqual(r.cookies[str('new_enough')].value, '60')
|
||||||
self.assertListEqual(['new_enough'], r.cookies.keys())
|
self.assertListEqual([str('new_enough')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/60"]').contents(), ['60 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/60"]').contents(), ['60 days'])
|
||||||
|
@ -310,11 +316,11 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*14 days')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*14 days')
|
||||||
|
|
||||||
def test_new_enough_90(self):
|
def test_new_enough_90(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'new_enough' : '22', 'expires_soon' : 60})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('new_enough') : '22', str('expires_soon') : 60})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.new_enough", kwargs=dict(days="90")))
|
r = self.client.get(urlreverse("ietf.cookies.views.new_enough", kwargs=dict(days="90")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['new_enough'].value, '90')
|
self.assertEqual(r.cookies[str('new_enough')].value, '90')
|
||||||
self.assertListEqual(['new_enough'], r.cookies.keys())
|
self.assertListEqual([str('new_enough')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/90"]').contents(), ['90 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/90"]').contents(), ['90 days'])
|
||||||
|
@ -324,11 +330,11 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*60 days')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*expires_soon.*60 days')
|
||||||
|
|
||||||
def test_expires_soon(self):
|
def test_expires_soon(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'expires_soon' : '14', 'new_enough' : 14})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('expires_soon') : '14', str('new_enough') : 14})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.expires_soon")) # no value: reset
|
r = self.client.get(urlreverse("ietf.cookies.views.expires_soon")) # no value: reset
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['expires_soon'].value, '')
|
self.assertEqual(r.cookies[str('expires_soon')].value, '')
|
||||||
self.assertListEqual(['expires_soon'], r.cookies.keys())
|
self.assertListEqual([str('expires_soon')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/14"]').contents(), ['14 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/14"]').contents(), ['14 days'])
|
||||||
|
@ -338,11 +344,11 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*new_enough.*14 days')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*new_enough.*14 days')
|
||||||
|
|
||||||
def test_expires_soon_7(self):
|
def test_expires_soon_7(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'on', 'expires_soon' : '14', 'new_enough' : 21})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'on', str('expires_soon') : '14', str('new_enough') : 21})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.expires_soon", kwargs=dict(days="7")))
|
r = self.client.get(urlreverse("ietf.cookies.views.expires_soon", kwargs=dict(days="7")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['expires_soon'].value, '7')
|
self.assertEqual(r.cookies[str('expires_soon')].value, '7')
|
||||||
self.assertListEqual(['expires_soon'], r.cookies.keys())
|
self.assertListEqual([str('expires_soon')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/21"]').contents(), ['21 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/21"]').contents(), ['21 days'])
|
||||||
|
@ -352,11 +358,11 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*new_enough.*21 days')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*new_enough.*21 days')
|
||||||
|
|
||||||
def test_expires_soon_14(self):
|
def test_expires_soon_14(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'on', 'expires_soon' : '7', 'new_enough' : 99})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'on', str('expires_soon') : '7', str('new_enough') : 99})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.expires_soon", kwargs=dict(days="14")))
|
r = self.client.get(urlreverse("ietf.cookies.views.expires_soon", kwargs=dict(days="14")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['expires_soon'].value, '14')
|
self.assertEqual(r.cookies[str('expires_soon')].value, '14')
|
||||||
self.assertListEqual(['expires_soon'], r.cookies.keys())
|
self.assertListEqual([str('expires_soon')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
||||||
self.assertEqual(q('div a.active[href^="/accounts/settings/new_enough/"]').contents(), [])
|
self.assertEqual(q('div a.active[href^="/accounts/settings/new_enough/"]').contents(), [])
|
||||||
|
@ -366,11 +372,11 @@ class CookieTests(TestCase):
|
||||||
# self.assertNotRegexpMatches(r.content, r'ietf-highlight-y.*new_enough')
|
# self.assertNotRegexpMatches(r.content, r'ietf-highlight-y.*new_enough')
|
||||||
|
|
||||||
def test_expires_soon_21(self):
|
def test_expires_soon_21(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'on', 'expires_soon' : '14', 'new_enough' : 90})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'on', str('expires_soon') : '14', str('new_enough') : 90})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.expires_soon", kwargs=dict(days="21")))
|
r = self.client.get(urlreverse("ietf.cookies.views.expires_soon", kwargs=dict(days="21")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['expires_soon'].value, '21')
|
self.assertEqual(r.cookies[str('expires_soon')].value, '21')
|
||||||
self.assertListEqual(['expires_soon'], r.cookies.keys())
|
self.assertListEqual([str('expires_soon')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/on"]').contents(), ['On'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/90"]').contents(), ['90 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/90"]').contents(), ['90 days'])
|
||||||
|
@ -380,11 +386,11 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*new_enough.*90 days')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*new_enough.*90 days')
|
||||||
|
|
||||||
def test_expires_soon_30(self):
|
def test_expires_soon_30(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'expires_soon' : '14', 'new_enough' : 7})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('expires_soon') : '14', str('new_enough') : 7})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.expires_soon", kwargs=dict(days="30")))
|
r = self.client.get(urlreverse("ietf.cookies.views.expires_soon", kwargs=dict(days="30")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['expires_soon'].value, '30')
|
self.assertEqual(r.cookies[str('expires_soon')].value, '30')
|
||||||
self.assertListEqual(['expires_soon'], r.cookies.keys())
|
self.assertListEqual([str('expires_soon')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/7"]').contents(), ['7 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/7"]').contents(), ['7 days'])
|
||||||
|
@ -394,11 +400,11 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*new_enough.*7 days')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*new_enough.*7 days')
|
||||||
|
|
||||||
def test_expires_soon_60(self):
|
def test_expires_soon_60(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'expires_soon' : '14', 'new_enough' : 14})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('expires_soon') : '14', str('new_enough') : 14})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.expires_soon", kwargs=dict(days="60")))
|
r = self.client.get(urlreverse("ietf.cookies.views.expires_soon", kwargs=dict(days="60")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['expires_soon'].value, '60')
|
self.assertEqual(r.cookies[str('expires_soon')].value, '60')
|
||||||
self.assertListEqual(['expires_soon'], r.cookies.keys())
|
self.assertListEqual([str('expires_soon')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/14"]').contents(), ['14 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/14"]').contents(), ['14 days'])
|
||||||
|
@ -408,11 +414,11 @@ class CookieTests(TestCase):
|
||||||
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*new_enough.*14 days')
|
# self.assertRegexpMatches(r.content, r'ietf-highlight-y.*new_enough.*14 days')
|
||||||
|
|
||||||
def test_expires_soon_90(self):
|
def test_expires_soon_90(self):
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off', 'expires_soon' : '22', 'new_enough' : 60})
|
self.client.cookies = SimpleCookie({str('full_draft'): 'off', str('expires_soon') : '22', str('new_enough') : 60})
|
||||||
r = self.client.get(urlreverse("ietf.cookies.views.expires_soon", kwargs=dict(days="90")))
|
r = self.client.get(urlreverse("ietf.cookies.views.expires_soon", kwargs=dict(days="90")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertEqual(r.cookies['expires_soon'].value, '90')
|
self.assertEqual(r.cookies[str('expires_soon')].value, '90')
|
||||||
self.assertListEqual(['expires_soon'], r.cookies.keys())
|
self.assertListEqual([str('expires_soon')], list(r.cookies.keys()))
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/full_draft/off"]').contents(), ['Off'])
|
||||||
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/60"]').contents(), ['60 days'])
|
self.assertEqual(q('div a.active[href="/accounts/settings/new_enough/60"]').contents(), ['60 days'])
|
||||||
|
|
|
@ -1,4 +1,8 @@
|
||||||
# Copyright The IETF Trust 2010, All Rights Reserved
|
# Copyright The IETF Trust 2010-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.shortcuts import render
|
from django.shortcuts import render
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
@ -10,7 +14,7 @@ def preferences(request, **kwargs):
|
||||||
new_cookies = {}
|
new_cookies = {}
|
||||||
del_cookies = []
|
del_cookies = []
|
||||||
preferences['defaults'] = settings.USER_PREFERENCE_DEFAULTS
|
preferences['defaults'] = settings.USER_PREFERENCE_DEFAULTS
|
||||||
for key in settings.USER_PREFERENCE_DEFAULTS.keys():
|
for key in list(settings.USER_PREFERENCE_DEFAULTS.keys()):
|
||||||
if key in kwargs:
|
if key in kwargs:
|
||||||
if kwargs[key] == None:
|
if kwargs[key] == None:
|
||||||
del_cookies += [key]
|
del_cookies += [key]
|
||||||
|
|
|
@ -1,3 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2012-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django import forms
|
from django import forms
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.template import Context
|
from django.template import Context
|
||||||
|
@ -20,7 +26,7 @@ class DBTemplateForm(forms.ModelForm):
|
||||||
PlainTemplate(content).render(Context({}))
|
PlainTemplate(content).render(Context({}))
|
||||||
else:
|
else:
|
||||||
raise ValidationError("Unexpected DBTemplate.type.slug: %s" % self.type.slug)
|
raise ValidationError("Unexpected DBTemplate.type.slug: %s" % self.type.slug)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
raise ValidationError(e)
|
raise ValidationError(e)
|
||||||
return content
|
return content
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2018-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.10 on 2018-02-20 10:52
|
# Generated by Django 1.11.10 on 2018-02-20 10:52
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2018-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.10 on 2018-02-20 10:52
|
# Generated by Django 1.11.10 on 2018-02-20 10:52
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-03-05 11:39
|
# Generated by Django 1.11.20 on 2019-03-05 11:39
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-03-13 13:41
|
# Generated by Django 1.11.20 on 2019-03-13 13:41
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright The IETF Trust 2012-2019, All Rights Reserved
|
# Copyright The IETF Trust 2012-2019, All Rights Reserved
|
||||||
from __future__ import unicode_literals, print_function
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.template import Context
|
from django.template import Context
|
||||||
|
from django.utils.encoding import python_2_unicode_compatible
|
||||||
|
|
||||||
from ietf.group.models import Group
|
from ietf.group.models import Group
|
||||||
from ietf.name.models import DBTemplateTypeName
|
from ietf.name.models import DBTemplateTypeName
|
||||||
|
@ -19,6 +19,7 @@ TEMPLATE_TYPES = (
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@python_2_unicode_compatible
|
||||||
class DBTemplate(models.Model):
|
class DBTemplate(models.Model):
|
||||||
path = models.CharField( max_length=255, unique=True, blank=False, null=False, )
|
path = models.CharField( max_length=255, unique=True, blank=False, null=False, )
|
||||||
title = models.CharField( max_length=255, blank=False, null=False, )
|
title = models.CharField( max_length=255, blank=False, null=False, )
|
||||||
|
@ -27,7 +28,7 @@ class DBTemplate(models.Model):
|
||||||
content = models.TextField( blank=False, null=False, )
|
content = models.TextField( blank=False, null=False, )
|
||||||
group = ForeignKey( Group, blank=True, null=True, )
|
group = ForeignKey( Group, blank=True, null=True, )
|
||||||
|
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
return self.title
|
return self.title
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
|
@ -41,6 +42,6 @@ class DBTemplate(models.Model):
|
||||||
PlainTemplate(self.content).render(Context({}))
|
PlainTemplate(self.content).render(Context({}))
|
||||||
else:
|
else:
|
||||||
raise ValidationError("Unexpected DBTemplate.type.slug: %s" % self.type.slug)
|
raise ValidationError("Unexpected DBTemplate.type.slug: %s" % self.type.slug)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
raise ValidationError(e)
|
raise ValidationError(e)
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
# Copyright The IETF Trust 2014-2019, All Rights Reserved
|
# Copyright The IETF Trust 2014-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
# Autogenerated by the mkresources management command 2014-11-13 23:53
|
# Autogenerated by the mkresources management command 2014-11-13 23:53
|
||||||
|
|
||||||
|
|
||||||
from ietf.api import ModelResource
|
from ietf.api import ModelResource
|
||||||
from tastypie.fields import ToOneField
|
from tastypie.fields import ToOneField
|
||||||
from tastypie.constants import ALL, ALL_WITH_RELATIONS
|
from tastypie.constants import ALL, ALL_WITH_RELATIONS
|
||||||
|
|
|
@ -1,3 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2012-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import string
|
import string
|
||||||
from docutils.core import publish_string
|
from docutils.core import publish_string
|
||||||
|
@ -7,7 +13,7 @@ import debug # pyflakes:ignore
|
||||||
from django.template.loaders.base import Loader as BaseLoader
|
from django.template.loaders.base import Loader as BaseLoader
|
||||||
from django.template.base import Template as DjangoTemplate, TemplateEncodingError
|
from django.template.base import Template as DjangoTemplate, TemplateEncodingError
|
||||||
from django.template.exceptions import TemplateDoesNotExist
|
from django.template.exceptions import TemplateDoesNotExist
|
||||||
from django.utils.encoding import smart_unicode
|
from django.utils.encoding import smart_text
|
||||||
|
|
||||||
from ietf.dbtemplate.models import DBTemplate
|
from ietf.dbtemplate.models import DBTemplate
|
||||||
|
|
||||||
|
@ -20,7 +26,7 @@ class Template(object):
|
||||||
|
|
||||||
def __init__(self, template_string, origin=None, name='<Unknown Template>'):
|
def __init__(self, template_string, origin=None, name='<Unknown Template>'):
|
||||||
try:
|
try:
|
||||||
template_string = smart_unicode(template_string)
|
template_string = smart_text(template_string)
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
raise TemplateEncodingError("Templates can only be constructed from unicode or UTF-8 strings.")
|
raise TemplateEncodingError("Templates can only be constructed from unicode or UTF-8 strings.")
|
||||||
self.template_string = string.Template(template_string)
|
self.template_string = string.Template(template_string)
|
||||||
|
@ -54,7 +60,7 @@ class RSTTemplate(PlainTemplate):
|
||||||
'template': RST_TEMPLATE,
|
'template': RST_TEMPLATE,
|
||||||
'halt_level': 2,
|
'halt_level': 2,
|
||||||
})
|
})
|
||||||
except SystemMessage, e:
|
except SystemMessage as e:
|
||||||
e.message = e.message.replace('<string>:', 'line ')
|
e.message = e.message.replace('<string>:', 'line ')
|
||||||
args = list(e.args)
|
args = list(e.args)
|
||||||
args[0] = args[0].replace('<string>:', 'line ')
|
args[0] = args[0].replace('<string>:', 'line ')
|
||||||
|
|
|
@ -1,10 +1,13 @@
|
||||||
# Copyright The IETF Trust 2010-2019, All Rights Reserved
|
# Copyright The IETF Trust 2010-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
from django import forms
|
from django import forms
|
||||||
|
|
||||||
from models import (StateType, State, RelatedDocument, DocumentAuthor, Document, RelatedDocHistory,
|
from .models import (StateType, State, RelatedDocument, DocumentAuthor, Document, RelatedDocHistory,
|
||||||
DocHistoryAuthor, DocHistory, DocAlias, DocReminder, DocEvent, NewRevisionDocEvent,
|
DocHistoryAuthor, DocHistory, DocAlias, DocReminder, DocEvent, NewRevisionDocEvent,
|
||||||
StateDocEvent, ConsensusDocEvent, BallotType, BallotDocEvent, WriteupDocEvent, LastCallDocEvent,
|
StateDocEvent, ConsensusDocEvent, BallotType, BallotDocEvent, WriteupDocEvent, LastCallDocEvent,
|
||||||
TelechatDocEvent, BallotPositionDocEvent, ReviewRequestDocEvent, InitialReviewDocEvent,
|
TelechatDocEvent, BallotPositionDocEvent, ReviewRequestDocEvent, InitialReviewDocEvent,
|
||||||
|
@ -155,7 +158,7 @@ admin.site.register(EditedAuthorsDocEvent, DocEventAdmin)
|
||||||
|
|
||||||
|
|
||||||
class DeletedEventAdmin(admin.ModelAdmin):
|
class DeletedEventAdmin(admin.ModelAdmin):
|
||||||
list_display = [u'id', 'content_type', 'json', 'by', 'time']
|
list_display = ['id', 'content_type', 'json', 'by', 'time']
|
||||||
list_filter = ['time']
|
list_filter = ['time']
|
||||||
raw_id_fields = ['content_type', 'by']
|
raw_id_fields = ['content_type', 'by']
|
||||||
admin.site.register(DeletedEvent, DeletedEventAdmin)
|
admin.site.register(DeletedEvent, DeletedEventAdmin)
|
||||||
|
|
|
@ -1,5 +1,10 @@
|
||||||
|
# Copyright The IETF Trust 2010-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
# expiry of Internet Drafts
|
# expiry of Internet Drafts
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
||||||
import datetime, os, shutil, glob, re
|
import datetime, os, shutil, glob, re
|
||||||
|
@ -92,7 +97,7 @@ def send_expire_warning_for_draft(doc):
|
||||||
request = None
|
request = None
|
||||||
if to or cc:
|
if to or cc:
|
||||||
send_mail(request, to, frm,
|
send_mail(request, to, frm,
|
||||||
u"Expiration impending: %s" % doc.file_tag(),
|
"Expiration impending: %s" % doc.file_tag(),
|
||||||
"doc/draft/expire_warning_email.txt",
|
"doc/draft/expire_warning_email.txt",
|
||||||
dict(doc=doc,
|
dict(doc=doc,
|
||||||
state=state,
|
state=state,
|
||||||
|
@ -112,7 +117,7 @@ def send_expire_notice_for_draft(doc):
|
||||||
(to,cc) = gather_address_lists('doc_expired',doc=doc)
|
(to,cc) = gather_address_lists('doc_expired',doc=doc)
|
||||||
send_mail(request, to,
|
send_mail(request, to,
|
||||||
"I-D Expiring System <ietf-secretariat-reply@ietf.org>",
|
"I-D Expiring System <ietf-secretariat-reply@ietf.org>",
|
||||||
u"I-D was expired %s" % doc.file_tag(),
|
"I-D was expired %s" % doc.file_tag(),
|
||||||
"doc/draft/id_expired_email.txt",
|
"doc/draft/id_expired_email.txt",
|
||||||
dict(doc=doc,
|
dict(doc=doc,
|
||||||
state=state,
|
state=state,
|
||||||
|
@ -167,7 +172,7 @@ def clean_up_draft_files():
|
||||||
cut_off = datetime.date.today()
|
cut_off = datetime.date.today()
|
||||||
|
|
||||||
pattern = os.path.join(settings.INTERNET_DRAFT_PATH, "draft-*.*")
|
pattern = os.path.join(settings.INTERNET_DRAFT_PATH, "draft-*.*")
|
||||||
filename_re = re.compile('^(.*)-(\d\d)$')
|
filename_re = re.compile(r'^(.*)-(\d\d)$')
|
||||||
|
|
||||||
def splitext(fn):
|
def splitext(fn):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
# Copyright The IETF Trust 2016-2019, All Rights Reserved
|
# Copyright The IETF Trust 2016-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import debug # pyflakes:ignore
|
import debug # pyflakes:ignore
|
||||||
import factory
|
import factory
|
||||||
import datetime
|
import datetime
|
||||||
|
|
|
@ -1,6 +1,11 @@
|
||||||
# Copyright The IETF Trust 2007, All Rights Reserved
|
# Copyright The IETF Trust 2007-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
import six
|
||||||
|
|
||||||
from django.contrib.syndication.views import Feed, FeedDoesNotExist
|
from django.contrib.syndication.views import Feed, FeedDoesNotExist
|
||||||
from django.utils.feedgenerator import Atom1Feed, Rss201rev2Feed
|
from django.utils.feedgenerator import Atom1Feed, Rss201rev2Feed
|
||||||
|
@ -22,8 +27,8 @@ class DocumentChangesFeed(Feed):
|
||||||
return "Changes for %s" % obj.display_name()
|
return "Changes for %s" % obj.display_name()
|
||||||
|
|
||||||
def link(self, obj):
|
def link(self, obj):
|
||||||
if obj is None:
|
if obj is None:
|
||||||
raise FeedDoesNotExist
|
raise FeedDoesNotExist
|
||||||
return urlreverse('ietf.doc.views_doc.document_history', kwargs=dict(name=obj.canonical_name()))
|
return urlreverse('ietf.doc.views_doc.document_history', kwargs=dict(name=obj.canonical_name()))
|
||||||
|
|
||||||
def subtitle(self, obj):
|
def subtitle(self, obj):
|
||||||
|
@ -32,19 +37,19 @@ class DocumentChangesFeed(Feed):
|
||||||
def items(self, obj):
|
def items(self, obj):
|
||||||
events = obj.docevent_set.all().order_by("-time","-id")
|
events = obj.docevent_set.all().order_by("-time","-id")
|
||||||
augment_events_with_revision(obj, events)
|
augment_events_with_revision(obj, events)
|
||||||
return events
|
return events
|
||||||
|
|
||||||
def item_title(self, item):
|
def item_title(self, item):
|
||||||
return u"[%s] %s [rev. %s]" % (item.by, truncatewords(strip_tags(item.desc), 15), item.rev)
|
return "[%s] %s [rev. %s]" % (item.by, truncatewords(strip_tags(item.desc), 15), item.rev)
|
||||||
|
|
||||||
def item_description(self, item):
|
def item_description(self, item):
|
||||||
return truncatewords_html(format_textarea(item.desc), 20)
|
return truncatewords_html(format_textarea(item.desc), 20)
|
||||||
|
|
||||||
def item_pubdate(self, item):
|
def item_pubdate(self, item):
|
||||||
return item.time
|
return item.time
|
||||||
|
|
||||||
def item_author_name(self, item):
|
def item_author_name(self, item):
|
||||||
return unicode(item.by)
|
return six.text_type(item.by)
|
||||||
|
|
||||||
def item_link(self, item):
|
def item_link(self, item):
|
||||||
return urlreverse('ietf.doc.views_doc.document_history', kwargs=dict(name=item.doc.canonical_name())) + "#history-%s" % item.pk
|
return urlreverse('ietf.doc.views_doc.document_history', kwargs=dict(name=item.doc.canonical_name())) + "#history-%s" % item.pk
|
||||||
|
@ -62,12 +67,12 @@ class InLastCallFeed(Feed):
|
||||||
d.lc_event = d.latest_event(LastCallDocEvent, type="sent_last_call")
|
d.lc_event = d.latest_event(LastCallDocEvent, type="sent_last_call")
|
||||||
|
|
||||||
docs = [d for d in docs if d.lc_event]
|
docs = [d for d in docs if d.lc_event]
|
||||||
docs.sort(key=lambda d: d.lc_event.expires)
|
docs.sort(key=lambda d: d.lc_event.expires)
|
||||||
|
|
||||||
return docs
|
return docs
|
||||||
|
|
||||||
def item_title(self, item):
|
def item_title(self, item):
|
||||||
return u"%s (%s - %s)" % (item.name,
|
return "%s (%s - %s)" % (item.name,
|
||||||
datefilter(item.lc_event.time, "F j"),
|
datefilter(item.lc_event.time, "F j"),
|
||||||
datefilter(item.lc_event.expires, "F j, Y"))
|
datefilter(item.lc_event.expires, "F j, Y"))
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,11 @@
|
||||||
# Copyright The IETF Trust 2014-2019, All Rights Reserved
|
# Copyright The IETF Trust 2014-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import six
|
||||||
|
|
||||||
from django.utils.html import escape
|
from django.utils.html import escape
|
||||||
from django import forms
|
from django import forms
|
||||||
|
@ -53,9 +57,9 @@ class SearchableDocumentsField(forms.CharField):
|
||||||
def prepare_value(self, value):
|
def prepare_value(self, value):
|
||||||
if not value:
|
if not value:
|
||||||
value = ""
|
value = ""
|
||||||
if isinstance(value, (int, long)):
|
if isinstance(value, six.integer_types):
|
||||||
value = str(value)
|
value = str(value)
|
||||||
if isinstance(value, basestring):
|
if isinstance(value, six.string_types):
|
||||||
items = self.parse_select2_value(value)
|
items = self.parse_select2_value(value)
|
||||||
# accept both names and pks here
|
# accept both names and pks here
|
||||||
names = [ i for i in items if not i.isdigit() ]
|
names = [ i for i in items if not i.isdigit() ]
|
||||||
|
@ -79,7 +83,7 @@ class SearchableDocumentsField(forms.CharField):
|
||||||
"model_name": self.model.__name__.lower()
|
"model_name": self.model.__name__.lower()
|
||||||
})
|
})
|
||||||
|
|
||||||
return u",".join(unicode(o.pk) for o in value)
|
return ",".join(six.text_type(o.pk) for o in value)
|
||||||
|
|
||||||
def clean(self, value):
|
def clean(self, value):
|
||||||
value = super(SearchableDocumentsField, self).clean(value)
|
value = super(SearchableDocumentsField, self).clean(value)
|
||||||
|
@ -90,10 +94,10 @@ class SearchableDocumentsField(forms.CharField):
|
||||||
found_pks = [ str(o.pk) for o in objs ]
|
found_pks = [ str(o.pk) for o in objs ]
|
||||||
failed_pks = [ x for x in pks if x not in found_pks ]
|
failed_pks = [ x for x in pks if x not in found_pks ]
|
||||||
if failed_pks:
|
if failed_pks:
|
||||||
raise forms.ValidationError(u"Could not recognize the following documents: {names}. You can only input documents already registered in the Datatracker.".format(names=", ".join(failed_pks)))
|
raise forms.ValidationError("Could not recognize the following documents: {names}. You can only input documents already registered in the Datatracker.".format(names=", ".join(failed_pks)))
|
||||||
|
|
||||||
if self.max_entries != None and len(objs) > self.max_entries:
|
if self.max_entries != None and len(objs) > self.max_entries:
|
||||||
raise forms.ValidationError(u"You can select at most %s entries." % self.max_entries)
|
raise forms.ValidationError("You can select at most %s entries." % self.max_entries)
|
||||||
|
|
||||||
return objs
|
return objs
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
# Copyright The IETF Trust 2017, All Rights Reserved
|
# Copyright The IETF Trust 2013-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import debug #pyflakes:ignore
|
import debug #pyflakes:ignore
|
||||||
|
|
|
@ -1,15 +1,23 @@
|
||||||
|
# Copyright The IETF Trust 2010-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
# generation of mails
|
# generation of mails
|
||||||
|
|
||||||
import textwrap, datetime
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import six
|
||||||
|
import textwrap
|
||||||
|
|
||||||
from django.template.loader import render_to_string
|
from django.template.loader import render_to_string
|
||||||
from django.utils.html import strip_tags
|
from django.utils.html import strip_tags
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.urls import reverse as urlreverse
|
from django.urls import reverse as urlreverse
|
||||||
|
from django.utils.encoding import force_str, force_text
|
||||||
|
|
||||||
import debug # pyflakes:ignore
|
import debug # pyflakes:ignore
|
||||||
|
|
||||||
from ietf.utils.mail import send_mail, send_mail_text
|
from ietf.utils.mail import send_mail, send_mail_text, get_payload
|
||||||
from ietf.ipr.utils import iprs_from_docs, related_docs
|
from ietf.ipr.utils import iprs_from_docs, related_docs
|
||||||
from ietf.doc.models import WriteupDocEvent, LastCallDocEvent, DocAlias, ConsensusDocEvent
|
from ietf.doc.models import WriteupDocEvent, LastCallDocEvent, DocAlias, ConsensusDocEvent
|
||||||
from ietf.doc.utils import needed_ballot_positions
|
from ietf.doc.utils import needed_ballot_positions
|
||||||
|
@ -32,15 +40,15 @@ def email_state_changed(request, doc, text, mailtrigger_id=None):
|
||||||
cc=cc)
|
cc=cc)
|
||||||
|
|
||||||
def email_ad_approved_doc(request, doc, text):
|
def email_ad_approved_doc(request, doc, text):
|
||||||
to = "iesg@iesg.org"
|
to = "iesg@iesg.org"
|
||||||
bcc = "iesg-secretary@ietf.org"
|
bcc = "iesg-secretary@ietf.org"
|
||||||
frm = request.user.person.formatted_email()
|
frm = request.user.person.formatted_email()
|
||||||
send_mail(request, to, frm,
|
send_mail(request, to, frm,
|
||||||
"Approved: %s" % doc.filename_with_rev(),
|
"Approved: %s" % doc.filename_with_rev(),
|
||||||
"doc/mail/ad_approval_email.txt",
|
"doc/mail/ad_approval_email.txt",
|
||||||
dict(text=text,
|
dict(text=text,
|
||||||
docname=doc.filename_with_rev()),
|
docname=doc.filename_with_rev()),
|
||||||
bcc=bcc)
|
bcc=bcc)
|
||||||
|
|
||||||
def email_stream_changed(request, doc, old_stream, new_stream, text=""):
|
def email_stream_changed(request, doc, old_stream, new_stream, text=""):
|
||||||
"""Email the change text to the notify group and to the stream chairs"""
|
"""Email the change text to the notify group and to the stream chairs"""
|
||||||
|
@ -55,7 +63,7 @@ def email_stream_changed(request, doc, old_stream, new_stream, text=""):
|
||||||
return
|
return
|
||||||
|
|
||||||
if not text:
|
if not text:
|
||||||
text = u"Stream changed to <b>%s</b> from %s" % (new_stream, old_stream)
|
text = "Stream changed to <b>%s</b> from %s" % (new_stream, old_stream)
|
||||||
text = strip_tags(text)
|
text = strip_tags(text)
|
||||||
|
|
||||||
send_mail(request, to, None,
|
send_mail(request, to, None,
|
||||||
|
@ -119,8 +127,8 @@ def generate_ballot_writeup(request, doc):
|
||||||
e.by = request.user.person
|
e.by = request.user.person
|
||||||
e.doc = doc
|
e.doc = doc
|
||||||
e.rev = doc.rev
|
e.rev = doc.rev
|
||||||
e.desc = u"Ballot writeup was generated"
|
e.desc = "Ballot writeup was generated"
|
||||||
e.text = unicode(render_to_string("doc/mail/ballot_writeup.txt", {'iana': iana}))
|
e.text = force_text(render_to_string("doc/mail/ballot_writeup.txt", {'iana': iana}))
|
||||||
|
|
||||||
# caller is responsible for saving, if necessary
|
# caller is responsible for saving, if necessary
|
||||||
return e
|
return e
|
||||||
|
@ -131,8 +139,8 @@ def generate_ballot_rfceditornote(request, doc):
|
||||||
e.by = request.user.person
|
e.by = request.user.person
|
||||||
e.doc = doc
|
e.doc = doc
|
||||||
e.rev = doc.rev
|
e.rev = doc.rev
|
||||||
e.desc = u"RFC Editor Note for ballot was generated"
|
e.desc = "RFC Editor Note for ballot was generated"
|
||||||
e.text = unicode(render_to_string("doc/mail/ballot_rfceditornote.txt"))
|
e.text = force_text(render_to_string("doc/mail/ballot_rfceditornote.txt"))
|
||||||
e.save()
|
e.save()
|
||||||
|
|
||||||
return e
|
return e
|
||||||
|
@ -176,8 +184,8 @@ def generate_last_call_announcement(request, doc):
|
||||||
e.by = request.user.person
|
e.by = request.user.person
|
||||||
e.doc = doc
|
e.doc = doc
|
||||||
e.rev = doc.rev
|
e.rev = doc.rev
|
||||||
e.desc = u"Last call announcement was generated"
|
e.desc = "Last call announcement was generated"
|
||||||
e.text = unicode(mail)
|
e.text = force_text(mail)
|
||||||
|
|
||||||
# caller is responsible for saving, if necessary
|
# caller is responsible for saving, if necessary
|
||||||
return e
|
return e
|
||||||
|
@ -196,8 +204,8 @@ def generate_approval_mail(request, doc):
|
||||||
e.by = request.user.person
|
e.by = request.user.person
|
||||||
e.doc = doc
|
e.doc = doc
|
||||||
e.rev = doc.rev
|
e.rev = doc.rev
|
||||||
e.desc = u"Ballot approval text was generated"
|
e.desc = "Ballot approval text was generated"
|
||||||
e.text = unicode(mail)
|
e.text = force_text(mail)
|
||||||
|
|
||||||
# caller is responsible for saving, if necessary
|
# caller is responsible for saving, if necessary
|
||||||
return e
|
return e
|
||||||
|
@ -280,7 +288,7 @@ def generate_publication_request(request, doc):
|
||||||
approving_body = "IRSG"
|
approving_body = "IRSG"
|
||||||
consensus_body = doc.group.acronym.upper()
|
consensus_body = doc.group.acronym.upper()
|
||||||
else:
|
else:
|
||||||
approving_body = str(doc.stream)
|
approving_body = six.text_type(doc.stream)
|
||||||
consensus_body = approving_body
|
consensus_body = approving_body
|
||||||
|
|
||||||
e = doc.latest_event(WriteupDocEvent, type="changed_rfc_editor_note_text")
|
e = doc.latest_event(WriteupDocEvent, type="changed_rfc_editor_note_text")
|
||||||
|
@ -374,7 +382,7 @@ def generate_issue_ballot_mail(request, doc, ballot):
|
||||||
last_call_has_expired=last_call_has_expired,
|
last_call_has_expired=last_call_has_expired,
|
||||||
needed_ballot_positions=
|
needed_ballot_positions=
|
||||||
needed_ballot_positions(doc,
|
needed_ballot_positions(doc,
|
||||||
doc.active_ballot().active_ad_positions().values()
|
list(doc.active_ballot().active_ad_positions().values())
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -382,7 +390,7 @@ def generate_issue_ballot_mail(request, doc, ballot):
|
||||||
def email_iana(request, doc, to, msg, cc=None):
|
def email_iana(request, doc, to, msg, cc=None):
|
||||||
# fix up message and send it with extra info on doc in headers
|
# fix up message and send it with extra info on doc in headers
|
||||||
import email
|
import email
|
||||||
parsed_msg = email.message_from_string(msg.encode("utf-8"))
|
parsed_msg = email.message_from_string(force_str(msg))
|
||||||
parsed_msg.set_charset('UTF-8')
|
parsed_msg.set_charset('UTF-8')
|
||||||
|
|
||||||
extra = extra_automation_headers(doc)
|
extra = extra_automation_headers(doc)
|
||||||
|
@ -390,7 +398,7 @@ def email_iana(request, doc, to, msg, cc=None):
|
||||||
|
|
||||||
send_mail_text(request, to,
|
send_mail_text(request, to,
|
||||||
parsed_msg["From"], parsed_msg["Subject"],
|
parsed_msg["From"], parsed_msg["Subject"],
|
||||||
parsed_msg.get_payload().decode(str(parsed_msg.get_charset())),
|
get_payload(parsed_msg),
|
||||||
extra=extra,
|
extra=extra,
|
||||||
cc=cc)
|
cc=cc)
|
||||||
|
|
||||||
|
@ -451,7 +459,7 @@ def email_adopted(request, doc, prev_state, new_state, by, comment=""):
|
||||||
state_type = (prev_state or new_state).type
|
state_type = (prev_state or new_state).type
|
||||||
|
|
||||||
send_mail(request, to, settings.DEFAULT_FROM_EMAIL,
|
send_mail(request, to, settings.DEFAULT_FROM_EMAIL,
|
||||||
u'The %s %s has placed %s in state "%s"' %
|
'The %s %s has placed %s in state "%s"' %
|
||||||
(doc.group.acronym.upper(),doc.group.type_id.upper(), doc.name, new_state or "None"),
|
(doc.group.acronym.upper(),doc.group.type_id.upper(), doc.name, new_state or "None"),
|
||||||
'doc/mail/doc_adopted_email.txt',
|
'doc/mail/doc_adopted_email.txt',
|
||||||
dict(doc=doc,
|
dict(doc=doc,
|
||||||
|
@ -469,7 +477,7 @@ def email_stream_state_changed(request, doc, prev_state, new_state, by, comment=
|
||||||
state_type = (prev_state or new_state).type
|
state_type = (prev_state or new_state).type
|
||||||
|
|
||||||
send_mail(request, to, settings.DEFAULT_FROM_EMAIL,
|
send_mail(request, to, settings.DEFAULT_FROM_EMAIL,
|
||||||
u"%s changed for %s" % (state_type.label, doc.name),
|
"%s changed for %s" % (state_type.label, doc.name),
|
||||||
'doc/mail/stream_state_changed_email.txt',
|
'doc/mail/stream_state_changed_email.txt',
|
||||||
dict(doc=doc,
|
dict(doc=doc,
|
||||||
url=settings.IDTRACKER_BASE_URL + doc.get_absolute_url(),
|
url=settings.IDTRACKER_BASE_URL + doc.get_absolute_url(),
|
||||||
|
@ -485,7 +493,7 @@ def email_stream_tags_changed(request, doc, added_tags, removed_tags, by, commen
|
||||||
(to, cc) = gather_address_lists('doc_stream_state_edited',doc=doc)
|
(to, cc) = gather_address_lists('doc_stream_state_edited',doc=doc)
|
||||||
|
|
||||||
send_mail(request, to, settings.DEFAULT_FROM_EMAIL,
|
send_mail(request, to, settings.DEFAULT_FROM_EMAIL,
|
||||||
u"Tags changed for %s" % doc.name,
|
"Tags changed for %s" % doc.name,
|
||||||
'doc/mail/stream_tags_changed_email.txt',
|
'doc/mail/stream_tags_changed_email.txt',
|
||||||
dict(doc=doc,
|
dict(doc=doc,
|
||||||
url=settings.IDTRACKER_BASE_URL + doc.get_absolute_url(),
|
url=settings.IDTRACKER_BASE_URL + doc.get_absolute_url(),
|
||||||
|
|
|
@ -1,3 +1,10 @@
|
||||||
|
# Copyright The IETF Trust 2012-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
|
import io
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
@ -9,19 +16,19 @@ from ietf.doc.models import Document
|
||||||
|
|
||||||
def write(fn, new):
|
def write(fn, new):
|
||||||
try:
|
try:
|
||||||
f = open(fn)
|
f = io.open(fn)
|
||||||
old = f.read().decode('utf-8')
|
old = f.read().decode('utf-8')
|
||||||
f.close
|
f.close
|
||||||
except IOError:
|
except IOError:
|
||||||
old = ""
|
old = ""
|
||||||
if old.strip() != new.strip():
|
if old.strip() != new.strip():
|
||||||
sys.stdout.write(os.path.basename(fn)+'\n')
|
sys.stdout.write(os.path.basename(fn)+'\n')
|
||||||
f = open(fn, "wb")
|
f = io.open(fn, "wb")
|
||||||
f.write(new.encode('utf-8'))
|
f.write(new.encode('utf-8'))
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = (u'Generate draft bibxml files, for xml2rfc references')
|
help = ('Generate draft bibxml files, for xml2rfc references')
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
documents = Document.objects.filter(type__slug='draft')
|
documents = Document.objects.filter(type__slug='draft')
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2018-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.10 on 2018-02-20 10:52
|
# Generated by Django 1.11.10 on 2018-02-20 10:52
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import django.core.validators
|
import django.core.validators
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2018-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.10 on 2018-02-20 10:52
|
# Generated by Django 1.11.10 on 2018-02-20 10:52
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2018-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.11 on 2018-04-01 12:31
|
# Generated by Django 1.11.11 on 2018-04-01 12:31
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
@ -14,6 +17,6 @@ class Migration(migrations.Migration):
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddIndex(
|
migrations.AddIndex(
|
||||||
model_name='docevent',
|
model_name='docevent',
|
||||||
index=models.Index(fields=[b'type', b'doc'], name='doc_doceven_type_43e53e_idx'),
|
index=models.Index(fields=['type', 'doc'], name='doc_doceven_type_43e53e_idx'),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2018-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.13 on 2018-05-03 11:50
|
# Generated by Django 1.11.13 on 2018-05-03 11:50
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2018-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.13 on 2018-05-03 12:16
|
# Generated by Django 1.11.13 on 2018-05-03 12:16
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2018-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.15 on 2018-10-03 06:39
|
# Generated by Django 1.11.15 on 2018-10-03 06:39
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2018-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.16 on 2018-11-04 10:56
|
# Generated by Django 1.11.16 on 2018-11-04 10:56
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.17 on 2018-12-28 13:11
|
# Generated by Django 1.11.17 on 2018-12-28 13:11
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.17 on 2018-12-28 13:33
|
# Generated by Django 1.11.17 on 2018-12-28 13:33
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
from django.db.models import F
|
from django.db.models import F
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-02-25 13:02
|
# Generated by Django 1.11.20 on 2019-02-25 13:02
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.18 on 2019-01-11 11:22
|
# Generated by Django 1.11.18 on 2019-01-11 11:22
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-05-01 04:43
|
# Generated by Django 1.11.20 on 2019-05-01 04:43
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-05-08 08:41
|
# Generated by Django 1.11.20 on 2019-05-08 08:41
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-05-08 08:42
|
# Generated by Django 1.11.20 on 2019-05-08 08:42
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-05-08 10:29
|
# Generated by Django 1.11.20 on 2019-05-08 10:29
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import django.core.validators
|
import django.core.validators
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-05-28 12:42
|
# Generated by Django 1.11.20 on 2019-05-28 12:42
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import sys, time
|
import sys, time
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-05-08 14:04
|
# Generated by Django 1.11.20 on 2019-05-08 14:04
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
|
import six
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
|
@ -15,7 +18,7 @@ def forward(apps, schema_editor):
|
||||||
n = getattr(o, a+'_id')
|
n = getattr(o, a+'_id')
|
||||||
if n:
|
if n:
|
||||||
i = nameid[n]
|
i = nameid[n]
|
||||||
if not isinstance(i, (int, long)):
|
if not isinstance(i, six.integer_types):
|
||||||
raise ValueError("Inappropriate value: %s: nameid[%s]: %s" % (o.__class__.__name__, n, i))
|
raise ValueError("Inappropriate value: %s: nameid[%s]: %s" % (o.__class__.__name__, n, i))
|
||||||
if getattr(o, a+'2_id') != i:
|
if getattr(o, a+'2_id') != i:
|
||||||
setattr(o, a+'2_id', i)
|
setattr(o, a+'2_id', i)
|
||||||
|
@ -44,7 +47,7 @@ def forward(apps, schema_editor):
|
||||||
# Document id fixup ------------------------------------------------------------
|
# Document id fixup ------------------------------------------------------------
|
||||||
|
|
||||||
objs = Document.objects.in_bulk()
|
objs = Document.objects.in_bulk()
|
||||||
nameid = { o.name: o.id for id, o in objs.iteritems() }
|
nameid = { o.name: o.id for id, o in objs.items() }
|
||||||
|
|
||||||
sys.stderr.write('\n')
|
sys.stderr.write('\n')
|
||||||
|
|
||||||
|
@ -78,7 +81,7 @@ def forward(apps, schema_editor):
|
||||||
sys.stderr.write('\n')
|
sys.stderr.write('\n')
|
||||||
|
|
||||||
objs = DocAlias.objects.in_bulk()
|
objs = DocAlias.objects.in_bulk()
|
||||||
nameid = { o.name: o.id for id, o in objs.iteritems() }
|
nameid = { o.name: o.id for id, o in objs.items() }
|
||||||
|
|
||||||
sys.stderr.write('Setting DocAlias FKs:\n')
|
sys.stderr.write('Setting DocAlias FKs:\n')
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-05-09 05:46
|
# Generated by Django 1.11.20 on 2019-05-09 05:46
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-05-20 09:53
|
# Generated by Django 1.11.20 on 2019-05-20 09:53
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
@ -57,7 +59,7 @@ class Migration(migrations.Migration):
|
||||||
),
|
),
|
||||||
migrations.AddIndex(
|
migrations.AddIndex(
|
||||||
model_name='docevent',
|
model_name='docevent',
|
||||||
index=models.Index(fields=[b'type', b'doc2'], name='doc_doceven_type_ac7748_idx'),
|
index=models.Index(fields=['type', 'doc2'], name='doc_doceven_type_ac7748_idx'),
|
||||||
),
|
),
|
||||||
# The following 9 migrations are related to the m2m fields on Document
|
# The following 9 migrations are related to the m2m fields on Document
|
||||||
# Remove the intermediary model field pointing to Document.name
|
# Remove the intermediary model field pointing to Document.name
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-05-21 05:31
|
# Generated by Django 1.11.20 on 2019-05-21 05:31
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
@ -63,7 +65,7 @@ class Migration(migrations.Migration):
|
||||||
),
|
),
|
||||||
migrations.AddIndex(
|
migrations.AddIndex(
|
||||||
model_name='docevent',
|
model_name='docevent',
|
||||||
index=models.Index(fields=[b'type', b'doc'], name='doc_doceven_type_43e53e_idx'),
|
index=models.Index(fields=['type', 'doc'], name='doc_doceven_type_43e53e_idx'),
|
||||||
),
|
),
|
||||||
# Add back the m2m field we removed in 0018_...
|
# Add back the m2m field we removed in 0018_...
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-05-21 05:31
|
# Generated by Django 1.11.20 on 2019-05-21 05:31
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import sys, time
|
import sys, time
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-05-30 03:36
|
# Generated by Django 1.11.20 on 2019-05-30 03:36
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-06-10 03:47
|
# Generated by Django 1.11.20 on 2019-06-10 03:47
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# Copyright The IETF Trust 2019, All Rights Reserved
|
# Copyright The IETF Trust 2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.11.20 on 2019-06-10 04:36
|
# Generated by Django 1.11.20 on 2019-06-10 04:36
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
# Copyright The IETF Trust 2007-2019, All Rights Reserved
|
# Copyright The IETF Trust 2010-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
|
import io
|
||||||
import os
|
import os
|
||||||
import rfc2html
|
import rfc2html
|
||||||
import six
|
import six
|
||||||
|
@ -15,6 +17,7 @@ from django.core.validators import URLValidator, RegexValidator
|
||||||
from django.urls import reverse as urlreverse
|
from django.urls import reverse as urlreverse
|
||||||
from django.contrib.contenttypes.models import ContentType
|
from django.contrib.contenttypes.models import ContentType
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.utils.encoding import python_2_unicode_compatible, force_text
|
||||||
from django.utils.html import mark_safe
|
from django.utils.html import mark_safe
|
||||||
|
|
||||||
import debug # pyflakes:ignore
|
import debug # pyflakes:ignore
|
||||||
|
@ -34,11 +37,12 @@ from ietf.utils.models import ForeignKey
|
||||||
|
|
||||||
logger = logging.getLogger('django')
|
logger = logging.getLogger('django')
|
||||||
|
|
||||||
|
@python_2_unicode_compatible
|
||||||
class StateType(models.Model):
|
class StateType(models.Model):
|
||||||
slug = models.CharField(primary_key=True, max_length=30) # draft, draft-iesg, charter, ...
|
slug = models.CharField(primary_key=True, max_length=30) # draft, draft-iesg, charter, ...
|
||||||
label = models.CharField(max_length=255, help_text="Label that should be used (e.g. in admin) for state drop-down for this type of state") # State, IESG state, WG state, ...
|
label = models.CharField(max_length=255, help_text="Label that should be used (e.g. in admin) for state drop-down for this type of state") # State, IESG state, WG state, ...
|
||||||
|
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
return self.slug
|
return self.slug
|
||||||
|
|
||||||
@checks.register('db-consistency')
|
@checks.register('db-consistency')
|
||||||
|
@ -55,6 +59,7 @@ def check_statetype_slugs(app_configs, **kwargs):
|
||||||
))
|
))
|
||||||
return errors
|
return errors
|
||||||
|
|
||||||
|
@python_2_unicode_compatible
|
||||||
class State(models.Model):
|
class State(models.Model):
|
||||||
type = ForeignKey(StateType)
|
type = ForeignKey(StateType)
|
||||||
slug = models.SlugField()
|
slug = models.SlugField()
|
||||||
|
@ -65,7 +70,7 @@ class State(models.Model):
|
||||||
|
|
||||||
next_states = models.ManyToManyField('State', related_name="previous_states", blank=True)
|
next_states = models.ManyToManyField('State', related_name="previous_states", blank=True)
|
||||||
|
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
@ -372,7 +377,7 @@ class DocumentInfo(models.Model):
|
||||||
return self.rfc_number()
|
return self.rfc_number()
|
||||||
|
|
||||||
def author_list(self):
|
def author_list(self):
|
||||||
return u", ".join(author.email_id for author in self.documentauthor_set.all() if author.email_id)
|
return ", ".join(author.email_id for author in self.documentauthor_set.all() if author.email_id)
|
||||||
|
|
||||||
def authors(self):
|
def authors(self):
|
||||||
return [ a.person for a in self.documentauthor_set.all() ]
|
return [ a.person for a in self.documentauthor_set.all() ]
|
||||||
|
@ -407,7 +412,7 @@ class DocumentInfo(models.Model):
|
||||||
|
|
||||||
def relations_that(self, relationship):
|
def relations_that(self, relationship):
|
||||||
"""Return the related-document objects that describe a given relationship targeting self."""
|
"""Return the related-document objects that describe a given relationship targeting self."""
|
||||||
if isinstance(relationship, str):
|
if isinstance(relationship, six.string_types):
|
||||||
relationship = ( relationship, )
|
relationship = ( relationship, )
|
||||||
if not isinstance(relationship, tuple):
|
if not isinstance(relationship, tuple):
|
||||||
raise TypeError("Expected a string or tuple, received %s" % type(relationship))
|
raise TypeError("Expected a string or tuple, received %s" % type(relationship))
|
||||||
|
@ -482,7 +487,7 @@ class DocumentInfo(models.Model):
|
||||||
if ext != '.txt' and os.path.exists(txtpath):
|
if ext != '.txt' and os.path.exists(txtpath):
|
||||||
path = txtpath
|
path = txtpath
|
||||||
try:
|
try:
|
||||||
with open(path, 'rb') as file:
|
with io.open(path, 'rb') as file:
|
||||||
raw = file.read()
|
raw = file.read()
|
||||||
except IOError:
|
except IOError:
|
||||||
return None
|
return None
|
||||||
|
@ -524,13 +529,14 @@ class DocumentInfo(models.Model):
|
||||||
|
|
||||||
STATUSCHANGE_RELATIONS = ('tops','tois','tohist','toinf','tobcp','toexp')
|
STATUSCHANGE_RELATIONS = ('tops','tois','tohist','toinf','tobcp','toexp')
|
||||||
|
|
||||||
|
@python_2_unicode_compatible
|
||||||
class RelatedDocument(models.Model):
|
class RelatedDocument(models.Model):
|
||||||
source = ForeignKey('Document')
|
source = ForeignKey('Document')
|
||||||
target = ForeignKey('DocAlias')
|
target = ForeignKey('DocAlias')
|
||||||
relationship = ForeignKey(DocRelationshipName)
|
relationship = ForeignKey(DocRelationshipName)
|
||||||
def action(self):
|
def action(self):
|
||||||
return self.relationship.name
|
return self.relationship.name
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
return u"%s %s %s" % (self.source.name, self.relationship.name.lower(), self.target.name)
|
return u"%s %s %s" % (self.source.name, self.relationship.name.lower(), self.target.name)
|
||||||
|
|
||||||
def is_downref(self):
|
def is_downref(self):
|
||||||
|
@ -597,10 +603,11 @@ class DocumentAuthorInfo(models.Model):
|
||||||
abstract = True
|
abstract = True
|
||||||
ordering = ["document", "order"]
|
ordering = ["document", "order"]
|
||||||
|
|
||||||
|
@python_2_unicode_compatible
|
||||||
class DocumentAuthor(DocumentAuthorInfo):
|
class DocumentAuthor(DocumentAuthorInfo):
|
||||||
document = ForeignKey('Document')
|
document = ForeignKey('Document')
|
||||||
|
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
return u"%s %s (%s)" % (self.document.name, self.person, self.order)
|
return u"%s %s (%s)" % (self.document.name, self.person, self.order)
|
||||||
|
|
||||||
|
|
||||||
|
@ -610,10 +617,11 @@ validate_docname = RegexValidator(
|
||||||
'invalid'
|
'invalid'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@python_2_unicode_compatible
|
||||||
class Document(DocumentInfo):
|
class Document(DocumentInfo):
|
||||||
name = models.CharField(max_length=255, validators=[validate_docname,], unique=True) # immutable
|
name = models.CharField(max_length=255, validators=[validate_docname,], unique=True) # immutable
|
||||||
|
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
|
@ -641,10 +649,10 @@ class Document(DocumentInfo):
|
||||||
return self._cached_absolute_url
|
return self._cached_absolute_url
|
||||||
|
|
||||||
def file_tag(self):
|
def file_tag(self):
|
||||||
return u"<%s>" % self.filename_with_rev()
|
return "<%s>" % self.filename_with_rev()
|
||||||
|
|
||||||
def filename_with_rev(self):
|
def filename_with_rev(self):
|
||||||
return u"%s-%s.txt" % (self.name, self.rev)
|
return "%s-%s.txt" % (self.name, self.rev)
|
||||||
|
|
||||||
def latest_event(self, *args, **filter_args):
|
def latest_event(self, *args, **filter_args):
|
||||||
"""Get latest event of optional Python type and with filter
|
"""Get latest event of optional Python type and with filter
|
||||||
|
@ -845,21 +853,24 @@ class DocumentURL(models.Model):
|
||||||
desc = models.CharField(max_length=255, default='', blank=True)
|
desc = models.CharField(max_length=255, default='', blank=True)
|
||||||
url = models.URLField(max_length=2083) # 2083 is the legal max for URLs
|
url = models.URLField(max_length=2083) # 2083 is the legal max for URLs
|
||||||
|
|
||||||
|
@python_2_unicode_compatible
|
||||||
class RelatedDocHistory(models.Model):
|
class RelatedDocHistory(models.Model):
|
||||||
source = ForeignKey('DocHistory')
|
source = ForeignKey('DocHistory')
|
||||||
target = ForeignKey('DocAlias', related_name="reversely_related_document_history_set")
|
target = ForeignKey('DocAlias', related_name="reversely_related_document_history_set")
|
||||||
relationship = ForeignKey(DocRelationshipName)
|
relationship = ForeignKey(DocRelationshipName)
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
return u"%s %s %s" % (self.source.doc.name, self.relationship.name.lower(), self.target.name)
|
return u"%s %s %s" % (self.source.doc.name, self.relationship.name.lower(), self.target.name)
|
||||||
|
|
||||||
|
@python_2_unicode_compatible
|
||||||
class DocHistoryAuthor(DocumentAuthorInfo):
|
class DocHistoryAuthor(DocumentAuthorInfo):
|
||||||
# use same naming convention as non-history version to make it a bit
|
# use same naming convention as non-history version to make it a bit
|
||||||
# easier to write generic code
|
# easier to write generic code
|
||||||
document = ForeignKey('DocHistory', related_name="documentauthor_set")
|
document = ForeignKey('DocHistory', related_name="documentauthor_set")
|
||||||
|
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
return u"%s %s (%s)" % (self.document.doc.name, self.person, self.order)
|
return u"%s %s (%s)" % (self.document.doc.name, self.person, self.order)
|
||||||
|
|
||||||
|
@python_2_unicode_compatible
|
||||||
class DocHistory(DocumentInfo):
|
class DocHistory(DocumentInfo):
|
||||||
doc = ForeignKey(Document, related_name="history_set")
|
doc = ForeignKey(Document, related_name="history_set")
|
||||||
# the name here is used to capture the canonical name at the time
|
# the name here is used to capture the canonical name at the time
|
||||||
|
@ -868,8 +879,8 @@ class DocHistory(DocumentInfo):
|
||||||
# property
|
# property
|
||||||
name = models.CharField(max_length=255)
|
name = models.CharField(max_length=255)
|
||||||
|
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
return unicode(self.doc.name)
|
return force_text(self.doc.name)
|
||||||
|
|
||||||
def canonical_name(self):
|
def canonical_name(self):
|
||||||
if hasattr(self, '_canonical_name'):
|
if hasattr(self, '_canonical_name'):
|
||||||
|
@ -904,6 +915,7 @@ class DocHistory(DocumentInfo):
|
||||||
verbose_name = "document history"
|
verbose_name = "document history"
|
||||||
verbose_name_plural = "document histories"
|
verbose_name_plural = "document histories"
|
||||||
|
|
||||||
|
@python_2_unicode_compatible
|
||||||
class DocAlias(models.Model):
|
class DocAlias(models.Model):
|
||||||
"""This is used for documents that may appear under multiple names,
|
"""This is used for documents that may appear under multiple names,
|
||||||
and in particular for RFCs, which for continuity still keep the
|
and in particular for RFCs, which for continuity still keep the
|
||||||
|
@ -917,8 +929,8 @@ class DocAlias(models.Model):
|
||||||
def document(self):
|
def document(self):
|
||||||
return self.docs.first()
|
return self.docs.first()
|
||||||
|
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
return "%s-->%s" % (self.name, ','.join([unicode(d.name) for d in self.docs.all() if isinstance(d, Document) ]))
|
return u"%s-->%s" % (self.name, ','.join([force_text(d.name) for d in self.docs.all() if isinstance(d, Document) ]))
|
||||||
document_link = admin_link("document")
|
document_link = admin_link("document")
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = "document alias"
|
verbose_name = "document alias"
|
||||||
|
@ -1007,6 +1019,7 @@ EVENT_TYPES = [
|
||||||
("downref_approved", "Downref approved"),
|
("downref_approved", "Downref approved"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@python_2_unicode_compatible
|
||||||
class DocEvent(models.Model):
|
class DocEvent(models.Model):
|
||||||
"""An occurrence for a document, used for tracking who, when and what."""
|
"""An occurrence for a document, used for tracking who, when and what."""
|
||||||
time = models.DateTimeField(default=datetime.datetime.now, help_text="When the event happened", db_index=True)
|
time = models.DateTimeField(default=datetime.datetime.now, help_text="When the event happened", db_index=True)
|
||||||
|
@ -1023,7 +1036,7 @@ class DocEvent(models.Model):
|
||||||
def get_dochistory(self):
|
def get_dochistory(self):
|
||||||
return DocHistory.objects.filter(time__lte=self.time,doc__name=self.doc.name).order_by('-time', '-pk').first()
|
return DocHistory.objects.filter(time__lte=self.time,doc__name=self.doc.name).order_by('-time', '-pk').first()
|
||||||
|
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
return u"%s %s by %s at %s" % (self.doc.name, self.get_type_display().lower(), self.by.plain_name(), self.time)
|
return u"%s %s by %s at %s" % (self.doc.name, self.get_type_display().lower(), self.by.plain_name(), self.time)
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
|
@ -1047,6 +1060,7 @@ class ConsensusDocEvent(DocEvent):
|
||||||
consensus = models.NullBooleanField(default=None)
|
consensus = models.NullBooleanField(default=None)
|
||||||
|
|
||||||
# IESG events
|
# IESG events
|
||||||
|
@python_2_unicode_compatible
|
||||||
class BallotType(models.Model):
|
class BallotType(models.Model):
|
||||||
doc_type = ForeignKey(DocTypeName, blank=True, null=True)
|
doc_type = ForeignKey(DocTypeName, blank=True, null=True)
|
||||||
slug = models.SlugField()
|
slug = models.SlugField()
|
||||||
|
@ -1056,7 +1070,7 @@ class BallotType(models.Model):
|
||||||
order = models.IntegerField(default=0)
|
order = models.IntegerField(default=0)
|
||||||
positions = models.ManyToManyField(BallotPositionName, blank=True)
|
positions = models.ManyToManyField(BallotPositionName, blank=True)
|
||||||
|
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
return u"%s: %s" % (self.name, self.doc_type.name)
|
return u"%s: %s" % (self.name, self.doc_type.name)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
@ -1177,13 +1191,14 @@ class SubmissionDocEvent(DocEvent):
|
||||||
submission = ForeignKey(ietf.submit.models.Submission)
|
submission = ForeignKey(ietf.submit.models.Submission)
|
||||||
|
|
||||||
# dumping store for removed events
|
# dumping store for removed events
|
||||||
|
@python_2_unicode_compatible
|
||||||
class DeletedEvent(models.Model):
|
class DeletedEvent(models.Model):
|
||||||
content_type = ForeignKey(ContentType)
|
content_type = ForeignKey(ContentType)
|
||||||
json = models.TextField(help_text="Deleted object in JSON format, with attribute names chosen to be suitable for passing into the relevant create method.")
|
json = models.TextField(help_text="Deleted object in JSON format, with attribute names chosen to be suitable for passing into the relevant create method.")
|
||||||
by = ForeignKey(Person)
|
by = ForeignKey(Person)
|
||||||
time = models.DateTimeField(default=datetime.datetime.now)
|
time = models.DateTimeField(default=datetime.datetime.now)
|
||||||
|
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
return u"%s by %s %s" % (self.content_type, self.by, self.time)
|
return u"%s by %s %s" % (self.content_type, self.by, self.time)
|
||||||
|
|
||||||
class EditedAuthorsDocEvent(DocEvent):
|
class EditedAuthorsDocEvent(DocEvent):
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
# Copyright The IETF Trust 2014-2019, All Rights Reserved
|
# Copyright The IETF Trust 2014-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Autogenerated by the makeresources management command 2015-10-19 12:29 PDT
|
# Autogenerated by the makeresources management command 2015-10-19 12:29 PDT
|
||||||
|
|
||||||
|
|
||||||
from ietf.api import ModelResource
|
from ietf.api import ModelResource
|
||||||
from ietf.api import ToOneField
|
from ietf.api import ToOneField
|
||||||
from tastypie.fields import ToManyField, CharField
|
from tastypie.fields import ToManyField, CharField
|
||||||
|
|
|
@ -1,10 +1,13 @@
|
||||||
# Copyright The IETF Trust 2007, All Rights Reserved
|
# Copyright The IETF Trust 2007-2019, All Rights Reserved
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import bleach
|
import bleach
|
||||||
import datetime
|
import datetime
|
||||||
import re
|
import re
|
||||||
|
import six
|
||||||
import types
|
|
||||||
|
|
||||||
from email.utils import parseaddr
|
from email.utils import parseaddr
|
||||||
|
|
||||||
|
@ -14,6 +17,7 @@ from django.utils.html import escape
|
||||||
from django.template.defaultfilters import truncatewords_html, linebreaksbr, stringfilter, striptags
|
from django.template.defaultfilters import truncatewords_html, linebreaksbr, stringfilter, striptags
|
||||||
from django.utils.safestring import mark_safe, SafeData
|
from django.utils.safestring import mark_safe, SafeData
|
||||||
from django.utils.html import strip_tags
|
from django.utils.html import strip_tags
|
||||||
|
from django.utils.encoding import force_text
|
||||||
|
|
||||||
import debug # pyflakes:ignore
|
import debug # pyflakes:ignore
|
||||||
|
|
||||||
|
@ -47,24 +51,24 @@ def parse_email_list(value):
|
||||||
|
|
||||||
Splitting a string of email addresses should return a list:
|
Splitting a string of email addresses should return a list:
|
||||||
|
|
||||||
>>> unicode(parse_email_list('joe@example.org, fred@example.com'))
|
>>> six.ensure_str(parse_email_list('joe@example.org, fred@example.com'))
|
||||||
u'<a href="mailto:joe@example.org">joe@example.org</a>, <a href="mailto:fred@example.com">fred@example.com</a>'
|
'<a href="mailto:joe@example.org">joe@example.org</a>, <a href="mailto:fred@example.com">fred@example.com</a>'
|
||||||
|
|
||||||
Parsing a non-string should return the input value, rather than fail:
|
Parsing a non-string should return the input value, rather than fail:
|
||||||
|
|
||||||
>>> parse_email_list(['joe@example.org', 'fred@example.com'])
|
>>> [ six.ensure_str(e) for e in parse_email_list(['joe@example.org', 'fred@example.com']) ]
|
||||||
['joe@example.org', 'fred@example.com']
|
['joe@example.org', 'fred@example.com']
|
||||||
|
|
||||||
Null input values should pass through silently:
|
Null input values should pass through silently:
|
||||||
|
|
||||||
>>> parse_email_list('')
|
>>> six.ensure_str(parse_email_list(''))
|
||||||
''
|
''
|
||||||
|
|
||||||
>>> parse_email_list(None)
|
>>> parse_email_list(None)
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if value and isinstance(value, (types.StringType,types.UnicodeType)): # testing for 'value' being true isn't necessary; it's a fast-out route
|
if value and isinstance(value, (six.binary_type, six.text_type)): # testing for 'value' being true isn't necessary; it's a fast-out route
|
||||||
addrs = re.split(", ?", value)
|
addrs = re.split(", ?", value)
|
||||||
ret = []
|
ret = []
|
||||||
for addr in addrs:
|
for addr in addrs:
|
||||||
|
@ -88,7 +92,7 @@ def strip_email(value):
|
||||||
@register.filter(name='fix_angle_quotes')
|
@register.filter(name='fix_angle_quotes')
|
||||||
def fix_angle_quotes(value):
|
def fix_angle_quotes(value):
|
||||||
if "<" in value:
|
if "<" in value:
|
||||||
value = re.sub("<([\w\-\.]+@[\w\-\.]+)>", "<\1>", value)
|
value = re.sub(r"<([\w\-\.]+@[\w\-\.]+)>", "<\1>", value)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
# there's an "ahref -> a href" in GEN_UTIL
|
# there's an "ahref -> a href" in GEN_UTIL
|
||||||
|
@ -98,7 +102,7 @@ def make_one_per_line(value):
|
||||||
"""
|
"""
|
||||||
Turn a comma-separated list into a carriage-return-seperated list.
|
Turn a comma-separated list into a carriage-return-seperated list.
|
||||||
|
|
||||||
>>> make_one_per_line("a, b, c")
|
>>> six.ensure_str(make_one_per_line("a, b, c"))
|
||||||
'a\\nb\\nc'
|
'a\\nb\\nc'
|
||||||
|
|
||||||
Pass through non-strings:
|
Pass through non-strings:
|
||||||
|
@ -109,7 +113,7 @@ def make_one_per_line(value):
|
||||||
>>> make_one_per_line(None)
|
>>> make_one_per_line(None)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if value and isinstance(value, (types.StringType,types.UnicodeType)):
|
if value and isinstance(value, (six.binary_type, six.text_type)):
|
||||||
return re.sub(", ?", "\n", value)
|
return re.sub(", ?", "\n", value)
|
||||||
else:
|
else:
|
||||||
return value
|
return value
|
||||||
|
@ -145,9 +149,9 @@ def sanitize(value):
|
||||||
@register.filter(name='bracket')
|
@register.filter(name='bracket')
|
||||||
def square_brackets(value):
|
def square_brackets(value):
|
||||||
"""Adds square brackets around text."""
|
"""Adds square brackets around text."""
|
||||||
if isinstance(value, (types.StringType,types.UnicodeType)):
|
if isinstance(value, (six.binary_type, six.text_type)):
|
||||||
if value == "":
|
if value == "":
|
||||||
value = " "
|
value = " "
|
||||||
return "[ %s ]" % value
|
return "[ %s ]" % value
|
||||||
elif value > 0:
|
elif value > 0:
|
||||||
return "[ X ]"
|
return "[ X ]"
|
||||||
|
@ -195,7 +199,7 @@ def rfcnospace(string):
|
||||||
@register.filter
|
@register.filter
|
||||||
def prettystdname(string):
|
def prettystdname(string):
|
||||||
from ietf.doc.utils import prettify_std_name
|
from ietf.doc.utils import prettify_std_name
|
||||||
return prettify_std_name(unicode(string or ""))
|
return prettify_std_name(force_text(string or ""))
|
||||||
|
|
||||||
@register.filter(name='rfcurl')
|
@register.filter(name='rfcurl')
|
||||||
def rfclink(string):
|
def rfclink(string):
|
||||||
|
@ -213,13 +217,13 @@ def urlize_ietf_docs(string, autoescape=None):
|
||||||
"""
|
"""
|
||||||
if autoescape and not isinstance(string, SafeData):
|
if autoescape and not isinstance(string, SafeData):
|
||||||
string = escape(string)
|
string = escape(string)
|
||||||
string = re.sub("(?<!>)(RFC ?)0{0,3}(\d+)", "<a href=\"/doc/rfc\\2/\">\\1\\2</a>", string)
|
string = re.sub(r"(?<!>)(RFC ?)0{0,3}(\d+)", "<a href=\"/doc/rfc\\2/\">\\1\\2</a>", string)
|
||||||
string = re.sub("(?<!>)(BCP ?)0{0,3}(\d+)", "<a href=\"/doc/bcp\\2/\">\\1\\2</a>", string)
|
string = re.sub(r"(?<!>)(BCP ?)0{0,3}(\d+)", "<a href=\"/doc/bcp\\2/\">\\1\\2</a>", string)
|
||||||
string = re.sub("(?<!>)(STD ?)0{0,3}(\d+)", "<a href=\"/doc/std\\2/\">\\1\\2</a>", string)
|
string = re.sub(r"(?<!>)(STD ?)0{0,3}(\d+)", "<a href=\"/doc/std\\2/\">\\1\\2</a>", string)
|
||||||
string = re.sub("(?<!>)(FYI ?)0{0,3}(\d+)", "<a href=\"/doc/fyi\\2/\">\\1\\2</a>", string)
|
string = re.sub(r"(?<!>)(FYI ?)0{0,3}(\d+)", "<a href=\"/doc/fyi\\2/\">\\1\\2</a>", string)
|
||||||
string = re.sub("(?<!>)(draft-[-0-9a-zA-Z._+]+)", "<a href=\"/doc/\\1/\">\\1</a>", string)
|
string = re.sub(r"(?<!>)(draft-[-0-9a-zA-Z._+]+)", "<a href=\"/doc/\\1/\">\\1</a>", string)
|
||||||
string = re.sub("(?<!>)(conflict-review-[-0-9a-zA-Z._+]+)", "<a href=\"/doc/\\1/\">\\1</a>", string)
|
string = re.sub(r"(?<!>)(conflict-review-[-0-9a-zA-Z._+]+)", "<a href=\"/doc/\\1/\">\\1</a>", string)
|
||||||
string = re.sub("(?<!>)(status-change-[-0-9a-zA-Z._+]+)", "<a href=\"/doc/\\1/\">\\1</a>", string)
|
string = re.sub(r"(?<!>)(status-change-[-0-9a-zA-Z._+]+)", "<a href=\"/doc/\\1/\">\\1</a>", string)
|
||||||
return mark_safe(string)
|
return mark_safe(string)
|
||||||
urlize_ietf_docs = stringfilter(urlize_ietf_docs)
|
urlize_ietf_docs = stringfilter(urlize_ietf_docs)
|
||||||
|
|
||||||
|
@ -338,7 +342,7 @@ def expires_soon(x,request):
|
||||||
|
|
||||||
@register.filter(name='startswith')
|
@register.filter(name='startswith')
|
||||||
def startswith(x, y):
|
def startswith(x, y):
|
||||||
return unicode(x).startswith(y)
|
return six.text_type(x).startswith(y)
|
||||||
|
|
||||||
@register.filter
|
@register.filter
|
||||||
def has_role(user, role_names):
|
def has_role(user, role_names):
|
||||||
|
@ -377,14 +381,14 @@ def format_snippet(text, trunc_words=25):
|
||||||
full = keep_spacing(collapsebr(linebreaksbr(mark_safe(sanitize_fragment(text)))))
|
full = keep_spacing(collapsebr(linebreaksbr(mark_safe(sanitize_fragment(text)))))
|
||||||
snippet = truncatewords_html(full, trunc_words)
|
snippet = truncatewords_html(full, trunc_words)
|
||||||
if snippet != full:
|
if snippet != full:
|
||||||
return mark_safe(u'<div class="snippet">%s<button class="btn btn-xs btn-default show-all"><span class="fa fa-caret-down"></span></button></div><div class="hidden full">%s</div>' % (snippet, full))
|
return mark_safe('<div class="snippet">%s<button class="btn btn-xs btn-default show-all"><span class="fa fa-caret-down"></span></button></div><div class="hidden full">%s</div>' % (snippet, full))
|
||||||
return full
|
return full
|
||||||
|
|
||||||
@register.simple_tag
|
@register.simple_tag
|
||||||
def doc_edit_button(url_name, *args, **kwargs):
|
def doc_edit_button(url_name, *args, **kwargs):
|
||||||
"""Given URL name/args/kwargs, looks up the URL just like "url" tag and returns a properly formatted button for the document material tables."""
|
"""Given URL name/args/kwargs, looks up the URL just like "url" tag and returns a properly formatted button for the document material tables."""
|
||||||
from django.urls import reverse as urlreverse
|
from django.urls import reverse as urlreverse
|
||||||
return mark_safe(u'<a class="btn btn-default btn-xs" href="%s">Edit</a>' % (urlreverse(url_name, args=args, kwargs=kwargs)))
|
return mark_safe('<a class="btn btn-default btn-xs" href="%s">Edit</a>' % (urlreverse(url_name, args=args, kwargs=kwargs)))
|
||||||
|
|
||||||
@register.filter
|
@register.filter
|
||||||
def textify(text):
|
def textify(text):
|
||||||
|
@ -419,7 +423,7 @@ if __name__ == "__main__":
|
||||||
_test()
|
_test()
|
||||||
|
|
||||||
@register.filter
|
@register.filter
|
||||||
def plural(text, seq, arg=u's'):
|
def plural(text, seq, arg='s'):
|
||||||
"Similar to pluralize, but looks at the text, too"
|
"Similar to pluralize, but looks at the text, too"
|
||||||
from django.template.defaultfilters import pluralize
|
from django.template.defaultfilters import pluralize
|
||||||
if text.endswith('s'):
|
if text.endswith('s'):
|
||||||
|
@ -461,8 +465,8 @@ def capfirst_allcaps(text):
|
||||||
"""Like capfirst, except it doesn't lowercase words in ALL CAPS."""
|
"""Like capfirst, except it doesn't lowercase words in ALL CAPS."""
|
||||||
result = text
|
result = text
|
||||||
i = False
|
i = False
|
||||||
for token in re.split("(\W+)", striptags(text)):
|
for token in re.split(r"(\W+)", striptags(text)):
|
||||||
if not re.match("^[A-Z]+$", token):
|
if not re.match(r"^[A-Z]+$", token):
|
||||||
if not i:
|
if not i:
|
||||||
result = result.replace(token, token.capitalize())
|
result = result.replace(token, token.capitalize())
|
||||||
i = True
|
i = True
|
||||||
|
@ -474,8 +478,8 @@ def capfirst_allcaps(text):
|
||||||
def lower_allcaps(text):
|
def lower_allcaps(text):
|
||||||
"""Like lower, except it doesn't lowercase words in ALL CAPS."""
|
"""Like lower, except it doesn't lowercase words in ALL CAPS."""
|
||||||
result = text
|
result = text
|
||||||
for token in re.split("(\W+)", striptags(text)):
|
for token in re.split(r"(\W+)", striptags(text)):
|
||||||
if not re.match("^[A-Z]+$", token):
|
if not re.match(r"^[A-Z]+$", token):
|
||||||
result = result.replace(token, token.lower())
|
result = result.replace(token, token.lower())
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
@ -505,9 +509,9 @@ def nbsp(value):
|
||||||
@register.filter()
|
@register.filter()
|
||||||
def comma_separated_list(seq, end_word="and"):
|
def comma_separated_list(seq, end_word="and"):
|
||||||
if len(seq) < 2:
|
if len(seq) < 2:
|
||||||
return u"".join(seq)
|
return "".join(seq)
|
||||||
else:
|
else:
|
||||||
return u", ".join(seq[:-1]) + u" %s %s"%(end_word, seq[-1])
|
return ", ".join(seq[:-1]) + " %s %s"%(end_word, seq[-1])
|
||||||
|
|
||||||
@register.filter()
|
@register.filter()
|
||||||
def zaptmp(s):
|
def zaptmp(s):
|
||||||
|
@ -515,7 +519,7 @@ def zaptmp(s):
|
||||||
|
|
||||||
@register.filter()
|
@register.filter()
|
||||||
def rfcbis(s):
|
def rfcbis(s):
|
||||||
m = re.search('^.*-rfc(\d+)-?bis(-.*)?$', s)
|
m = re.search(r'^.*-rfc(\d+)-?bis(-.*)?$', s)
|
||||||
return None if m is None else 'rfc' + m.group(1)
|
return None if m is None else 'rfc' + m.group(1)
|
||||||
|
|
||||||
@register.filter
|
@register.filter
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
# Copyright The IETF Trust 2016-2019, All Rights Reserved
|
# Copyright The IETF Trust 2016-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
from django import template
|
from django import template
|
||||||
|
|
||||||
|
|
|
@ -1,20 +1,25 @@
|
||||||
# Copyright The IETF Trust 2012-2019, All Rights Reserved
|
# Copyright The IETF Trust 2012-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
import io
|
||||||
import sys
|
import sys
|
||||||
import urlparse
|
|
||||||
import bibtexparser
|
import bibtexparser
|
||||||
|
|
||||||
if sys.version_info[0] == 2 and sys.version_info[1] < 7:
|
if sys.version_info[0] == 2 and sys.version_info[1] < 7:
|
||||||
import unittest2 as unittest
|
import unittest2 as unittest
|
||||||
else:
|
else:
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
|
from six.moves.http_cookies import SimpleCookie
|
||||||
from pyquery import PyQuery
|
from pyquery import PyQuery
|
||||||
|
from six.moves.urllib.parse import urlparse, parse_qs
|
||||||
from tempfile import NamedTemporaryFile
|
from tempfile import NamedTemporaryFile
|
||||||
from Cookie import SimpleCookie
|
|
||||||
|
|
||||||
from django.urls import reverse as urlreverse
|
from django.urls import reverse as urlreverse
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
@ -56,72 +61,72 @@ class SearchTests(TestCase):
|
||||||
# no match
|
# no match
|
||||||
r = self.client.get(base_url + "?activedrafts=on&name=thisisnotadocumentname")
|
r = self.client.get(base_url + "?activedrafts=on&name=thisisnotadocumentname")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("no documents match" in str(r.content).lower())
|
self.assertContains(r, "No documents match")
|
||||||
|
|
||||||
r = self.client.get(base_url + "?rfcs=on&name=xyzzy")
|
r = self.client.get(base_url + "?rfcs=on&name=xyzzy")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("no documents match" in unicontent(r).lower())
|
self.assertContains(r, "No documents match")
|
||||||
|
|
||||||
r = self.client.get(base_url + "?olddrafts=on&name=bar")
|
r = self.client.get(base_url + "?olddrafts=on&name=bar")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("no documents match" in unicontent(r).lower())
|
self.assertContains(r, "No documents match")
|
||||||
|
|
||||||
r = self.client.get(base_url + "?olddrafts=on&name=foo")
|
r = self.client.get(base_url + "?olddrafts=on&name=foo")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("draft-foo-mars-test" in unicontent(r).lower())
|
self.assertContains(r, "draft-foo-mars-test")
|
||||||
|
|
||||||
# find by rfc/active/inactive
|
# find by rfc/active/inactive
|
||||||
draft.set_state(State.objects.get(type="draft", slug="rfc"))
|
draft.set_state(State.objects.get(type="draft", slug="rfc"))
|
||||||
r = self.client.get(base_url + "?rfcs=on&name=%s" % draft.name)
|
r = self.client.get(base_url + "?rfcs=on&name=%s" % draft.name)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(draft.title in unicontent(r))
|
self.assertContains(r, draft.title)
|
||||||
|
|
||||||
draft.set_state(State.objects.get(type="draft", slug="active"))
|
draft.set_state(State.objects.get(type="draft", slug="active"))
|
||||||
r = self.client.get(base_url + "?activedrafts=on&name=%s" % draft.name)
|
r = self.client.get(base_url + "?activedrafts=on&name=%s" % draft.name)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(draft.title in unicontent(r))
|
self.assertContains(r, draft.title)
|
||||||
|
|
||||||
draft.set_state(State.objects.get(type="draft", slug="expired"))
|
draft.set_state(State.objects.get(type="draft", slug="expired"))
|
||||||
r = self.client.get(base_url + "?olddrafts=on&name=%s" % draft.name)
|
r = self.client.get(base_url + "?olddrafts=on&name=%s" % draft.name)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(draft.title in unicontent(r))
|
self.assertContains(r, draft.title)
|
||||||
|
|
||||||
draft.set_state(State.objects.get(type="draft", slug="active"))
|
draft.set_state(State.objects.get(type="draft", slug="active"))
|
||||||
|
|
||||||
# find by title
|
# find by title
|
||||||
r = self.client.get(base_url + "?activedrafts=on&name=%s" % draft.title.split()[0])
|
r = self.client.get(base_url + "?activedrafts=on&name=%s" % draft.title.split()[0])
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(draft.title in unicontent(r))
|
self.assertContains(r, draft.title)
|
||||||
|
|
||||||
# find by author
|
# find by author
|
||||||
r = self.client.get(base_url + "?activedrafts=on&by=author&author=%s" % draft.documentauthor_set.first().person.name_parts()[1])
|
r = self.client.get(base_url + "?activedrafts=on&by=author&author=%s" % draft.documentauthor_set.first().person.name_parts()[1])
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(draft.title in unicontent(r))
|
self.assertContains(r, draft.title)
|
||||||
|
|
||||||
# find by group
|
# find by group
|
||||||
r = self.client.get(base_url + "?activedrafts=on&by=group&group=%s" % draft.group.acronym)
|
r = self.client.get(base_url + "?activedrafts=on&by=group&group=%s" % draft.group.acronym)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(draft.title in unicontent(r))
|
self.assertContains(r, draft.title)
|
||||||
|
|
||||||
# find by area
|
# find by area
|
||||||
r = self.client.get(base_url + "?activedrafts=on&by=area&area=%s" % draft.group.parent_id)
|
r = self.client.get(base_url + "?activedrafts=on&by=area&area=%s" % draft.group.parent_id)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(draft.title in unicontent(r))
|
self.assertContains(r, draft.title)
|
||||||
|
|
||||||
# find by area
|
# find by area
|
||||||
r = self.client.get(base_url + "?activedrafts=on&by=area&area=%s" % draft.group.parent_id)
|
r = self.client.get(base_url + "?activedrafts=on&by=area&area=%s" % draft.group.parent_id)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(draft.title in unicontent(r))
|
self.assertContains(r, draft.title)
|
||||||
|
|
||||||
# find by AD
|
# find by AD
|
||||||
r = self.client.get(base_url + "?activedrafts=on&by=ad&ad=%s" % draft.ad_id)
|
r = self.client.get(base_url + "?activedrafts=on&by=ad&ad=%s" % draft.ad_id)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(draft.title in unicontent(r))
|
self.assertContains(r, draft.title)
|
||||||
|
|
||||||
# find by IESG state
|
# find by IESG state
|
||||||
r = self.client.get(base_url + "?activedrafts=on&by=state&state=%s&substate=" % draft.get_state("draft-iesg").pk)
|
r = self.client.get(base_url + "?activedrafts=on&by=state&state=%s&substate=" % draft.get_state("draft-iesg").pk)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(draft.title in unicontent(r))
|
self.assertContains(r, draft.title)
|
||||||
|
|
||||||
def test_search_for_name(self):
|
def test_search_for_name(self):
|
||||||
draft = WgDraftFactory(name='draft-ietf-mars-test',group=GroupFactory(acronym='mars',parent=Group.objects.get(acronym='farfut')),authors=[PersonFactory()],ad=PersonFactory())
|
draft = WgDraftFactory(name='draft-ietf-mars-test',group=GroupFactory(acronym='mars',parent=Group.objects.get(acronym='farfut')),authors=[PersonFactory()],ad=PersonFactory())
|
||||||
|
@ -129,9 +134,9 @@ class SearchTests(TestCase):
|
||||||
CharterFactory(group=draft.group,name='charter-ietf-mars')
|
CharterFactory(group=draft.group,name='charter-ietf-mars')
|
||||||
DocumentFactory(type_id='conflrev',name='conflict-review-imaginary-irtf-submission')
|
DocumentFactory(type_id='conflrev',name='conflict-review-imaginary-irtf-submission')
|
||||||
DocumentFactory(type_id='statchg',name='status-change-imaginary-mid-review')
|
DocumentFactory(type_id='statchg',name='status-change-imaginary-mid-review')
|
||||||
DocumentFactory(type_id='agenda',name='agenda-42-mars')
|
DocumentFactory(type_id='agenda',name='agenda-72-mars')
|
||||||
DocumentFactory(type_id='minutes',name='minutes-42-mars')
|
DocumentFactory(type_id='minutes',name='minutes-72-mars')
|
||||||
DocumentFactory(type_id='slides',name='slides-42-mars')
|
DocumentFactory(type_id='slides',name='slides-72-mars')
|
||||||
|
|
||||||
draft.save_with_history([DocEvent.objects.create(doc=draft, rev=draft.rev, type="changed_document", by=Person.objects.get(user__username="secretary"), desc="Test")])
|
draft.save_with_history([DocEvent.objects.create(doc=draft, rev=draft.rev, type="changed_document", by=Person.objects.get(user__username="secretary"), desc="Test")])
|
||||||
|
|
||||||
|
@ -142,76 +147,76 @@ class SearchTests(TestCase):
|
||||||
# exact match
|
# exact match
|
||||||
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name=draft.name)))
|
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name=draft.name)))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
self.assertEqual(urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
||||||
|
|
||||||
# prefix match
|
# prefix match
|
||||||
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name="-".join(draft.name.split("-")[:-1]))))
|
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name="-".join(draft.name.split("-")[:-1]))))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
self.assertEqual(urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
||||||
|
|
||||||
# non-prefix match
|
# non-prefix match
|
||||||
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name="-".join(draft.name.split("-")[1:]))))
|
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name="-".join(draft.name.split("-")[1:]))))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
self.assertEqual(urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
||||||
|
|
||||||
# other doctypes than drafts
|
# other doctypes than drafts
|
||||||
doc = Document.objects.get(name='charter-ietf-mars')
|
doc = Document.objects.get(name='charter-ietf-mars')
|
||||||
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name='charter-ietf-ma')))
|
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name='charter-ietf-ma')))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
self.assertEqual(urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
||||||
|
|
||||||
doc = Document.objects.filter(name__startswith='conflict-review-').first()
|
doc = Document.objects.filter(name__startswith='conflict-review-').first()
|
||||||
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name="-".join(doc.name.split("-")[:-1]))))
|
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name="-".join(doc.name.split("-")[:-1]))))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
self.assertEqual(urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
||||||
|
|
||||||
doc = Document.objects.filter(name__startswith='status-change-').first()
|
doc = Document.objects.filter(name__startswith='status-change-').first()
|
||||||
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name="-".join(doc.name.split("-")[:-1]))))
|
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name="-".join(doc.name.split("-")[:-1]))))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
self.assertEqual(urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
||||||
|
|
||||||
doc = Document.objects.filter(name__startswith='agenda-').first()
|
doc = Document.objects.filter(name__startswith='agenda-').first()
|
||||||
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name="-".join(doc.name.split("-")[:-1]))))
|
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name="-".join(doc.name.split("-")[:-1]))))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
self.assertEqual(urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
||||||
|
|
||||||
doc = Document.objects.filter(name__startswith='minutes-').first()
|
doc = Document.objects.filter(name__startswith='minutes-').first()
|
||||||
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name="-".join(doc.name.split("-")[:-1]))))
|
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name="-".join(doc.name.split("-")[:-1]))))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
self.assertEqual(urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
||||||
|
|
||||||
doc = Document.objects.filter(name__startswith='slides-').first()
|
doc = Document.objects.filter(name__startswith='slides-').first()
|
||||||
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name="-".join(doc.name.split("-")[:-1]))))
|
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name="-".join(doc.name.split("-")[:-1]))))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
self.assertEqual(urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
||||||
|
|
||||||
# match with revision
|
# match with revision
|
||||||
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name=draft.name + "-" + prev_rev)))
|
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name=draft.name + "-" + prev_rev)))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name, rev=prev_rev)))
|
self.assertEqual(urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name, rev=prev_rev)))
|
||||||
|
|
||||||
# match with non-existing revision
|
# match with non-existing revision
|
||||||
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name=draft.name + "-09")))
|
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name=draft.name + "-09")))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
self.assertEqual(urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
||||||
|
|
||||||
# match with revision and extension
|
# match with revision and extension
|
||||||
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name=draft.name + "-" + prev_rev + ".txt")))
|
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name=draft.name + "-" + prev_rev + ".txt")))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name, rev=prev_rev)))
|
self.assertEqual(urlparse(r["Location"]).path, urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name, rev=prev_rev)))
|
||||||
|
|
||||||
# no match
|
# no match
|
||||||
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name="draft-ietf-doesnotexist-42")))
|
r = self.client.get(urlreverse('ietf.doc.views_search.search_for_name', kwargs=dict(name="draft-ietf-doesnotexist-42")))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
|
|
||||||
parsed = urlparse.urlparse(r["Location"])
|
parsed = urlparse(r["Location"])
|
||||||
self.assertEqual(parsed.path, urlreverse('ietf.doc.views_search.search'))
|
self.assertEqual(parsed.path, urlreverse('ietf.doc.views_search.search'))
|
||||||
self.assertEqual(urlparse.parse_qs(parsed.query)["name"][0], "draft-ietf-doesnotexist-42")
|
self.assertEqual(parse_qs(parsed.query)["name"][0], "draft-ietf-doesnotexist-42")
|
||||||
|
|
||||||
def test_frontpage(self):
|
def test_frontpage(self):
|
||||||
r = self.client.get("/")
|
r = self.client.get("/")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("Document Search" in unicontent(r))
|
self.assertContains(r, "Document Search")
|
||||||
|
|
||||||
def test_docs_for_ad(self):
|
def test_docs_for_ad(self):
|
||||||
ad = PersonFactory()
|
ad = PersonFactory()
|
||||||
|
@ -229,13 +234,11 @@ class SearchTests(TestCase):
|
||||||
|
|
||||||
r = self.client.get(urlreverse('ietf.doc.views_search.docs_for_ad', kwargs=dict(name=ad.full_name_as_key())))
|
r = self.client.get(urlreverse('ietf.doc.views_search.docs_for_ad', kwargs=dict(name=ad.full_name_as_key())))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
response_content = unicontent(r)
|
self.assertContains(r, draft.name)
|
||||||
#debug.show('response_content')
|
self.assertContains(r, rfc.canonical_name())
|
||||||
self.assertTrue(draft.name in response_content)
|
self.assertContains(r, conflrev.name)
|
||||||
self.assertTrue(rfc.canonical_name() in response_content)
|
self.assertContains(r, statchg.name)
|
||||||
self.assertTrue(conflrev.name in response_content)
|
self.assertContains(r, charter.name)
|
||||||
self.assertTrue(statchg.name in response_content)
|
|
||||||
self.assertTrue(charter.name in response_content)
|
|
||||||
|
|
||||||
|
|
||||||
def test_drafts_in_last_call(self):
|
def test_drafts_in_last_call(self):
|
||||||
|
@ -243,7 +246,7 @@ class SearchTests(TestCase):
|
||||||
draft.set_state(State.objects.get(type="draft-iesg", slug="lc"))
|
draft.set_state(State.objects.get(type="draft-iesg", slug="lc"))
|
||||||
r = self.client.get(urlreverse('ietf.doc.views_search.drafts_in_last_call'))
|
r = self.client.get(urlreverse('ietf.doc.views_search.drafts_in_last_call'))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(draft.title in unicontent(r))
|
self.assertContains(r, draft.title)
|
||||||
|
|
||||||
def test_in_iesg_process(self):
|
def test_in_iesg_process(self):
|
||||||
doc_in_process = IndividualDraftFactory()
|
doc_in_process = IndividualDraftFactory()
|
||||||
|
@ -251,8 +254,8 @@ class SearchTests(TestCase):
|
||||||
doc_not_in_process = IndividualDraftFactory()
|
doc_not_in_process = IndividualDraftFactory()
|
||||||
r = self.client.get(urlreverse('ietf.doc.views_search.drafts_in_iesg_process'))
|
r = self.client.get(urlreverse('ietf.doc.views_search.drafts_in_iesg_process'))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(doc_in_process.title in unicontent(r))
|
self.assertContains(r, doc_in_process.title)
|
||||||
self.assertFalse(doc_not_in_process.title in unicontent(r))
|
self.assertNotContains(r, doc_not_in_process.title)
|
||||||
|
|
||||||
def test_indexes(self):
|
def test_indexes(self):
|
||||||
draft = IndividualDraftFactory()
|
draft = IndividualDraftFactory()
|
||||||
|
@ -260,12 +263,12 @@ class SearchTests(TestCase):
|
||||||
|
|
||||||
r = self.client.get(urlreverse('ietf.doc.views_search.index_all_drafts'))
|
r = self.client.get(urlreverse('ietf.doc.views_search.index_all_drafts'))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertIn(draft.name, unicontent(r))
|
self.assertContains(r, draft.name)
|
||||||
self.assertIn(rfc.canonical_name().upper(),unicontent(r))
|
self.assertContains(r, rfc.canonical_name().upper())
|
||||||
|
|
||||||
r = self.client.get(urlreverse('ietf.doc.views_search.index_active_drafts'))
|
r = self.client.get(urlreverse('ietf.doc.views_search.index_active_drafts'))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(draft.title in unicontent(r))
|
self.assertContains(r, draft.title)
|
||||||
|
|
||||||
def test_ajax_search_docs(self):
|
def test_ajax_search_docs(self):
|
||||||
draft = IndividualDraftFactory()
|
draft = IndividualDraftFactory()
|
||||||
|
@ -277,7 +280,7 @@ class SearchTests(TestCase):
|
||||||
})
|
})
|
||||||
r = self.client.get(url, dict(q=draft.name))
|
r = self.client.get(url, dict(q=draft.name))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
data = json.loads(r.content)
|
data = r.json()
|
||||||
self.assertEqual(data[0]["id"], draft.pk)
|
self.assertEqual(data[0]["id"], draft.pk)
|
||||||
|
|
||||||
# DocAlias
|
# DocAlias
|
||||||
|
@ -290,7 +293,7 @@ class SearchTests(TestCase):
|
||||||
|
|
||||||
r = self.client.get(url, dict(q=doc_alias.name))
|
r = self.client.get(url, dict(q=doc_alias.name))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
data = json.loads(r.content)
|
data = r.json()
|
||||||
self.assertEqual(data[0]["id"], doc_alias.pk)
|
self.assertEqual(data[0]["id"], doc_alias.pk)
|
||||||
|
|
||||||
def test_recent_drafts(self):
|
def test_recent_drafts(self):
|
||||||
|
@ -489,7 +492,7 @@ Man Expires September 22, 2015 [Page 3]
|
||||||
settings.INTERNET_DRAFT_PATH = self.id_dir
|
settings.INTERNET_DRAFT_PATH = self.id_dir
|
||||||
self.saved_internet_all_drafts_archive_dir = settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR
|
self.saved_internet_all_drafts_archive_dir = settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR
|
||||||
settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR = self.id_dir
|
settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR = self.id_dir
|
||||||
f = open(os.path.join(self.id_dir, 'draft-ietf-mars-test-01.txt'), 'w')
|
f = io.open(os.path.join(self.id_dir, 'draft-ietf-mars-test-01.txt'), 'w')
|
||||||
f.write(self.draft_text)
|
f.write(self.draft_text)
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
|
@ -509,53 +512,53 @@ Man Expires September 22, 2015 [Page 3]
|
||||||
|
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("Active Internet-Draft" in unicontent(r))
|
self.assertContains(r, "Active Internet-Draft")
|
||||||
self.assertTrue("Show full document text" in unicontent(r))
|
self.assertContains(r, "Show full document text")
|
||||||
self.assertFalse("Deimos street" in unicontent(r))
|
self.assertNotContains(r, "Deimos street")
|
||||||
|
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)) + "?include_text=0")
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)) + "?include_text=0")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("Active Internet-Draft" in unicontent(r))
|
self.assertContains(r, "Active Internet-Draft")
|
||||||
self.assertFalse("Show full document text" in unicontent(r))
|
self.assertNotContains(r, "Show full document text")
|
||||||
self.assertTrue("Deimos street" in unicontent(r))
|
self.assertContains(r, "Deimos street")
|
||||||
|
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)) + "?include_text=foo")
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)) + "?include_text=foo")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("Active Internet-Draft" in unicontent(r))
|
self.assertContains(r, "Active Internet-Draft")
|
||||||
self.assertFalse("Show full document text" in unicontent(r))
|
self.assertNotContains(r, "Show full document text")
|
||||||
self.assertTrue("Deimos street" in unicontent(r))
|
self.assertContains(r, "Deimos street")
|
||||||
|
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)) + "?include_text=1")
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)) + "?include_text=1")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("Active Internet-Draft" in unicontent(r))
|
self.assertContains(r, "Active Internet-Draft")
|
||||||
self.assertFalse("Show full document text" in unicontent(r))
|
self.assertNotContains(r, "Show full document text")
|
||||||
self.assertTrue("Deimos street" in unicontent(r))
|
self.assertContains(r, "Deimos street")
|
||||||
|
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'on'})
|
self.client.cookies = SimpleCookie({str('full_draft'): str('on')})
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("Active Internet-Draft" in unicontent(r))
|
self.assertContains(r, "Active Internet-Draft")
|
||||||
self.assertFalse("Show full document text" in unicontent(r))
|
self.assertNotContains(r, "Show full document text")
|
||||||
self.assertTrue("Deimos street" in unicontent(r))
|
self.assertContains(r, "Deimos street")
|
||||||
|
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'off'})
|
self.client.cookies = SimpleCookie({str('full_draft'): str('off')})
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("Active Internet-Draft" in unicontent(r))
|
self.assertContains(r, "Active Internet-Draft")
|
||||||
self.assertTrue("Show full document text" in unicontent(r))
|
self.assertContains(r, "Show full document text")
|
||||||
self.assertFalse("Deimos street" in unicontent(r))
|
self.assertNotContains(r, "Deimos street")
|
||||||
|
|
||||||
self.client.cookies = SimpleCookie({'full_draft': 'foo'})
|
self.client.cookies = SimpleCookie({str('full_draft'): str('foo')})
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("Active Internet-Draft" in unicontent(r))
|
self.assertContains(r, "Active Internet-Draft")
|
||||||
self.assertTrue("Show full document text" in unicontent(r))
|
self.assertContains(r, "Show full document text")
|
||||||
self.assertFalse("Deimos street" in unicontent(r))
|
self.assertNotContains(r, "Deimos street")
|
||||||
|
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_html", kwargs=dict(name=draft.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_html", kwargs=dict(name=draft.name)))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("Versions:" in unicontent(r))
|
self.assertContains(r, "Versions:")
|
||||||
self.assertTrue("Deimos street" in unicontent(r))
|
self.assertContains(r, "Deimos street")
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(len(q('.rfcmarkup pre')), 4)
|
self.assertEqual(len(q('.rfcmarkup pre')), 4)
|
||||||
self.assertEqual(len(q('.rfcmarkup span.h1')), 2)
|
self.assertEqual(len(q('.rfcmarkup span.h1')), 2)
|
||||||
|
@ -569,7 +572,7 @@ Man Expires September 22, 2015 [Page 3]
|
||||||
|
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("Expired Internet-Draft" in unicontent(r))
|
self.assertContains(r, "Expired Internet-Draft")
|
||||||
|
|
||||||
# replaced draft
|
# replaced draft
|
||||||
draft.set_state(State.objects.get(type="draft", slug="repl"))
|
draft.set_state(State.objects.get(type="draft", slug="repl"))
|
||||||
|
@ -588,8 +591,8 @@ Man Expires September 22, 2015 [Page 3]
|
||||||
|
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=draft.name)))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("Replaced Internet-Draft" in unicontent(r))
|
self.assertContains(r, "Replaced Internet-Draft")
|
||||||
self.assertTrue(replacement.name in unicontent(r))
|
self.assertContains(r, replacement.name)
|
||||||
rel.delete()
|
rel.delete()
|
||||||
|
|
||||||
# draft published as RFC
|
# draft published as RFC
|
||||||
|
@ -610,8 +613,8 @@ Man Expires September 22, 2015 [Page 3]
|
||||||
|
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=rfc_alias.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=rfc_alias.name)))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("RFC 123456" in unicontent(r))
|
self.assertContains(r, "RFC 123456")
|
||||||
self.assertTrue(draft.name in unicontent(r))
|
self.assertContains(r, draft.name)
|
||||||
|
|
||||||
# naked RFC - also wierd that we test a PS from the ISE
|
# naked RFC - also wierd that we test a PS from the ISE
|
||||||
rfc = IndividualDraftFactory(
|
rfc = IndividualDraftFactory(
|
||||||
|
@ -621,7 +624,7 @@ Man Expires September 22, 2015 [Page 3]
|
||||||
std_level_id="ps")
|
std_level_id="ps")
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=rfc.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=rfc.name)))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("RFC 1234567" in unicontent(r))
|
self.assertContains(r, "RFC 1234567")
|
||||||
|
|
||||||
# unknown draft
|
# unknown draft
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name="draft-xyz123")))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name="draft-xyz123")))
|
||||||
|
@ -631,9 +634,9 @@ Man Expires September 22, 2015 [Page 3]
|
||||||
IndividualDraftFactory(name='draft-imaginary-independent-submission')
|
IndividualDraftFactory(name='draft-imaginary-independent-submission')
|
||||||
ConflictReviewFactory(name='conflict-review-imaginary-irtf-submission')
|
ConflictReviewFactory(name='conflict-review-imaginary-irtf-submission')
|
||||||
CharterFactory(name='charter-ietf-mars')
|
CharterFactory(name='charter-ietf-mars')
|
||||||
DocumentFactory(type_id='agenda',name='agenda-42-mars')
|
DocumentFactory(type_id='agenda',name='agenda-72-mars')
|
||||||
DocumentFactory(type_id='minutes',name='minutes-42-mars')
|
DocumentFactory(type_id='minutes',name='minutes-72-mars')
|
||||||
DocumentFactory(type_id='slides',name='slides-42-mars-1-active')
|
DocumentFactory(type_id='slides',name='slides-72-mars-1-active')
|
||||||
statchg = DocumentFactory(type_id='statchg',name='status-change-imaginary-mid-review')
|
statchg = DocumentFactory(type_id='statchg',name='status-change-imaginary-mid-review')
|
||||||
statchg.set_state(State.objects.get(type_id='statchg',slug='adrev'))
|
statchg.set_state(State.objects.get(type_id='statchg',slug='adrev'))
|
||||||
|
|
||||||
|
@ -642,12 +645,12 @@ Man Expires September 22, 2015 [Page 3]
|
||||||
"conflict-review-imaginary-irtf-submission",
|
"conflict-review-imaginary-irtf-submission",
|
||||||
"status-change-imaginary-mid-review",
|
"status-change-imaginary-mid-review",
|
||||||
"charter-ietf-mars",
|
"charter-ietf-mars",
|
||||||
"agenda-42-mars",
|
"agenda-72-mars",
|
||||||
"minutes-42-mars",
|
"minutes-72-mars",
|
||||||
"slides-42-mars-1-active",
|
"slides-72-mars-1-active",
|
||||||
# TODO: add
|
# TODO: add
|
||||||
#"bluesheets-42-mars-1",
|
#"bluesheets-72-mars-1",
|
||||||
#"recording-42-mars-1-00",
|
#"recording-72-mars-1-00",
|
||||||
]:
|
]:
|
||||||
doc = Document.objects.get(name=docname)
|
doc = Document.objects.get(name=docname)
|
||||||
# give it some history
|
# give it some history
|
||||||
|
@ -658,14 +661,14 @@ Man Expires September 22, 2015 [Page 3]
|
||||||
|
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("%s-01"%docname in unicontent(r))
|
self.assertContains(r, "%s-01"%docname)
|
||||||
|
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name,rev="01")))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name,rev="01")))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
|
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name,rev="00")))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name,rev="00")))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("%s-00"%docname in unicontent(r))
|
self.assertContains(r, "%s-00"%docname)
|
||||||
|
|
||||||
class DocTestCase(TestCase):
|
class DocTestCase(TestCase):
|
||||||
def test_document_charter(self):
|
def test_document_charter(self):
|
||||||
|
@ -680,7 +683,7 @@ class DocTestCase(TestCase):
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
def test_document_material(self):
|
def test_document_material(self):
|
||||||
MeetingFactory(type_id='ietf',number='42')
|
MeetingFactory(type_id='ietf',number='72')
|
||||||
mars = GroupFactory(type_id='wg',acronym='mars')
|
mars = GroupFactory(type_id='wg',acronym='mars')
|
||||||
marschairman = PersonFactory(user__username='marschairman')
|
marschairman = PersonFactory(user__username='marschairman')
|
||||||
mars.role_set.create(name_id='chair',person=marschairman,email=marschairman.email())
|
mars.role_set.create(name_id='chair',person=marschairman,email=marschairman.email())
|
||||||
|
@ -694,8 +697,8 @@ class DocTestCase(TestCase):
|
||||||
doc.set_state(State.objects.get(type="slides", slug="active"))
|
doc.set_state(State.objects.get(type="slides", slug="active"))
|
||||||
|
|
||||||
session = Session.objects.create(
|
session = Session.objects.create(
|
||||||
name = "session-42-mars-1",
|
name = "session-72-mars-1",
|
||||||
meeting = Meeting.objects.get(number='42'),
|
meeting = Meeting.objects.get(number='72'),
|
||||||
group = Group.objects.get(acronym='mars'),
|
group = Group.objects.get(acronym='mars'),
|
||||||
status = SessionStatusName.objects.create(slug='scheduled', name='Scheduled'),
|
status = SessionStatusName.objects.create(slug='scheduled', name='Scheduled'),
|
||||||
modified = datetime.datetime.now(),
|
modified = datetime.datetime.now(),
|
||||||
|
@ -730,12 +733,12 @@ class DocTestCase(TestCase):
|
||||||
|
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(pos.comment in unicontent(r))
|
self.assertContains(r, pos.comment)
|
||||||
|
|
||||||
# test with ballot_id
|
# test with ballot_id
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name, ballot_id=ballot.pk)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name, ballot_id=ballot.pk)))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(pos.comment in unicontent(r))
|
self.assertContains(r, pos.comment)
|
||||||
|
|
||||||
# test popup too while we're at it
|
# test popup too while we're at it
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.ballot_popup", kwargs=dict(name=doc.name, ballot_id=ballot.pk)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.ballot_popup", kwargs=dict(name=doc.name, ballot_id=ballot.pk)))
|
||||||
|
@ -748,7 +751,7 @@ class DocTestCase(TestCase):
|
||||||
doc.save_with_history([e])
|
doc.save_with_history([e])
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue( '(%s for -%s)' % (pos.comment_time.strftime('%Y-%m-%d'), oldrev) in unicontent(r))
|
self.assertContains(r, '(%s for -%s)' % (pos.comment_time.strftime('%Y-%m-%d'), oldrev))
|
||||||
|
|
||||||
def test_document_ballot_needed_positions(self):
|
def test_document_ballot_needed_positions(self):
|
||||||
# draft
|
# draft
|
||||||
|
@ -758,10 +761,10 @@ class DocTestCase(TestCase):
|
||||||
create_ballot_if_not_open(None, doc, ad, 'approve')
|
create_ballot_if_not_open(None, doc, ad, 'approve')
|
||||||
|
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
|
||||||
self.assertTrue('more YES or NO' in unicontent(r))
|
self.assertContains(r, 'more YES or NO')
|
||||||
Document.objects.filter(pk=doc.pk).update(intended_std_level='inf')
|
Document.objects.filter(pk=doc.pk).update(intended_std_level='inf')
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
|
||||||
self.assertFalse('more YES or NO' in unicontent(r))
|
self.assertNotContains(r, 'more YES or NO')
|
||||||
|
|
||||||
# status change
|
# status change
|
||||||
DocAlias.objects.create(name='rfc9998').docs.add(IndividualDraftFactory())
|
DocAlias.objects.create(name='rfc9998').docs.add(IndividualDraftFactory())
|
||||||
|
@ -772,29 +775,28 @@ class DocTestCase(TestCase):
|
||||||
r = self.client.post(urlreverse('ietf.doc.views_status_change.change_state',kwargs=dict(name=doc.name)),dict(new_state=iesgeval_pk))
|
r = self.client.post(urlreverse('ietf.doc.views_status_change.change_state',kwargs=dict(name=doc.name)),dict(new_state=iesgeval_pk))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
r = self.client.get(r._headers["location"][1])
|
r = self.client.get(r._headers["location"][1])
|
||||||
self.assertTrue(">IESG Evaluation<" in unicontent(r))
|
self.assertContains(r, ">IESG Evaluation<")
|
||||||
|
|
||||||
doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc9998'),relationship_id='tohist')
|
doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc9998'),relationship_id='tohist')
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
|
||||||
self.assertFalse('Needs a YES' in unicontent(r))
|
self.assertNotContains(r, 'Needs a YES')
|
||||||
self.assertFalse('more YES or NO' in unicontent(r))
|
self.assertNotContains(r, 'more YES or NO')
|
||||||
|
|
||||||
doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc9999'),relationship_id='tois')
|
doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc9999'),relationship_id='tois')
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
|
||||||
self.assertTrue('more YES or NO' in unicontent(r))
|
self.assertContains(r, 'more YES or NO')
|
||||||
|
|
||||||
def test_document_json(self):
|
def test_document_json(self):
|
||||||
doc = IndividualDraftFactory()
|
doc = IndividualDraftFactory()
|
||||||
|
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_json", kwargs=dict(name=doc.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_json", kwargs=dict(name=doc.name)))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
data = json.loads(r.content)
|
data = r.json()
|
||||||
self.assertEqual(doc.name, data['name'])
|
self.assertEqual(doc.name, data['name'])
|
||||||
self.assertEqual(doc.pages,data['pages'])
|
self.assertEqual(doc.pages,data['pages'])
|
||||||
|
|
||||||
def test_writeup(self):
|
def test_writeup(self):
|
||||||
doc = IndividualDraftFactory(states = [('draft','active'),('draft-iesg','iesg-eva')],
|
doc = IndividualDraftFactory(states = [('draft','active'),('draft-iesg','iesg-eva')],)
|
||||||
)
|
|
||||||
|
|
||||||
appr = WriteupDocEvent.objects.create(
|
appr = WriteupDocEvent.objects.create(
|
||||||
doc=doc,
|
doc=doc,
|
||||||
|
@ -823,9 +825,9 @@ class DocTestCase(TestCase):
|
||||||
url = urlreverse('ietf.doc.views_doc.document_writeup', kwargs=dict(name=doc.name))
|
url = urlreverse('ietf.doc.views_doc.document_writeup', kwargs=dict(name=doc.name))
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(appr.text in unicontent(r))
|
self.assertContains(r, appr.text)
|
||||||
self.assertTrue(notes.text in unicontent(r))
|
self.assertContains(r, notes.text)
|
||||||
self.assertTrue(rfced_note.text in r.content)
|
self.assertContains(r, rfced_note.text)
|
||||||
|
|
||||||
def test_history(self):
|
def test_history(self):
|
||||||
doc = IndividualDraftFactory()
|
doc = IndividualDraftFactory()
|
||||||
|
@ -840,7 +842,7 @@ class DocTestCase(TestCase):
|
||||||
url = urlreverse('ietf.doc.views_doc.document_history', kwargs=dict(name=doc.name))
|
url = urlreverse('ietf.doc.views_doc.document_history', kwargs=dict(name=doc.name))
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(e.desc in unicontent(r))
|
self.assertContains(r, e.desc)
|
||||||
|
|
||||||
def test_document_feed(self):
|
def test_document_feed(self):
|
||||||
doc = IndividualDraftFactory()
|
doc = IndividualDraftFactory()
|
||||||
|
@ -854,7 +856,7 @@ class DocTestCase(TestCase):
|
||||||
|
|
||||||
r = self.client.get("/feed/document-changes/%s/" % doc.name)
|
r = self.client.get("/feed/document-changes/%s/" % doc.name)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(e.desc in unicontent(r))
|
self.assertContains(r, e.desc)
|
||||||
|
|
||||||
def test_last_call_feed(self):
|
def test_last_call_feed(self):
|
||||||
doc = IndividualDraftFactory()
|
doc = IndividualDraftFactory()
|
||||||
|
@ -871,7 +873,7 @@ class DocTestCase(TestCase):
|
||||||
|
|
||||||
r = self.client.get("/feed/last-call/")
|
r = self.client.get("/feed/last-call/")
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(doc.name in unicontent(r))
|
self.assertContains(r, doc.name)
|
||||||
|
|
||||||
def test_rfc_feed(self):
|
def test_rfc_feed(self):
|
||||||
WgRfcFactory()
|
WgRfcFactory()
|
||||||
|
@ -884,7 +886,7 @@ class DocTestCase(TestCase):
|
||||||
url = urlreverse('ietf.doc.views_help.state_help', kwargs=dict(type="draft-iesg"))
|
url = urlreverse('ietf.doc.views_help.state_help', kwargs=dict(type="draft-iesg"))
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(State.objects.get(type="draft-iesg", slug="lc").name in unicontent(r))
|
self.assertContains(r, State.objects.get(type="draft-iesg", slug="lc").name)
|
||||||
|
|
||||||
def test_document_nonietf_pubreq_button(self):
|
def test_document_nonietf_pubreq_button(self):
|
||||||
doc = IndividualDraftFactory()
|
doc = IndividualDraftFactory()
|
||||||
|
@ -892,17 +894,17 @@ class DocTestCase(TestCase):
|
||||||
self.client.login(username='iab-chair', password='iab-chair+password')
|
self.client.login(username='iab-chair', password='iab-chair+password')
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertNotIn("Request publication", unicontent(r))
|
self.assertNotContains(r, "Request publication")
|
||||||
|
|
||||||
Document.objects.filter(pk=doc.pk).update(stream='iab')
|
Document.objects.filter(pk=doc.pk).update(stream='iab')
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertIn("Request publication", unicontent(r))
|
self.assertContains(r, "Request publication")
|
||||||
|
|
||||||
doc.states.add(State.objects.get(type_id='draft-stream-iab',slug='rfc-edit'))
|
doc.states.add(State.objects.get(type_id='draft-stream-iab',slug='rfc-edit'))
|
||||||
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
r = self.client.get(urlreverse("ietf.doc.views_doc.document_main", kwargs=dict(name=doc.name)))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertNotIn("Request publication", unicontent(r))
|
self.assertNotContains(r, "Request publication")
|
||||||
|
|
||||||
|
|
||||||
def test_document_bibtex(self):
|
def test_document_bibtex(self):
|
||||||
|
@ -918,12 +920,12 @@ class DocTestCase(TestCase):
|
||||||
#
|
#
|
||||||
url = urlreverse('ietf.doc.views_doc.document_bibtex', kwargs=dict(name=rfc.name))
|
url = urlreverse('ietf.doc.views_doc.document_bibtex', kwargs=dict(name=rfc.name))
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
entry = bibtexparser.loads(r.content).get_entry_dict()["rfc%s"%num]
|
entry = bibtexparser.loads(unicontent(r)).get_entry_dict()["rfc%s"%num]
|
||||||
self.assertEqual(entry['series'], u'Request for Comments')
|
self.assertEqual(entry['series'], 'Request for Comments')
|
||||||
self.assertEqual(entry['number'], num)
|
self.assertEqual(entry['number'], num)
|
||||||
self.assertEqual(entry['doi'], u'10.17487/RFC%s'%num)
|
self.assertEqual(entry['doi'], '10.17487/RFC%s'%num)
|
||||||
self.assertEqual(entry['year'], u'2010')
|
self.assertEqual(entry['year'], '2010')
|
||||||
self.assertEqual(entry['month'], u'oct')
|
self.assertEqual(entry['month'], 'oct')
|
||||||
#
|
#
|
||||||
self.assertNotIn('day', entry)
|
self.assertNotIn('day', entry)
|
||||||
|
|
||||||
|
@ -931,28 +933,28 @@ class DocTestCase(TestCase):
|
||||||
stream_id = 'rse',
|
stream_id = 'rse',
|
||||||
states = [('draft','rfc'),('draft-iesg','pub')],
|
states = [('draft','rfc'),('draft-iesg','pub')],
|
||||||
std_level_id = 'ind',
|
std_level_id = 'ind',
|
||||||
time = datetime.datetime(1990,04,01),
|
time = datetime.datetime(1990,0o4,0o1),
|
||||||
)
|
)
|
||||||
num = april1.rfc_number()
|
num = april1.rfc_number()
|
||||||
DocEventFactory.create(doc=april1, type='published_rfc', time = '1990-04-01')
|
DocEventFactory.create(doc=april1, type='published_rfc', time = '1990-04-01')
|
||||||
#
|
#
|
||||||
url = urlreverse('ietf.doc.views_doc.document_bibtex', kwargs=dict(name=april1.name))
|
url = urlreverse('ietf.doc.views_doc.document_bibtex', kwargs=dict(name=april1.name))
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
entry = bibtexparser.loads(r.content).get_entry_dict()['rfc%s'%num]
|
entry = bibtexparser.loads(unicontent(r)).get_entry_dict()['rfc%s'%num]
|
||||||
self.assertEqual(entry['series'], u'Request for Comments')
|
self.assertEqual(entry['series'], 'Request for Comments')
|
||||||
self.assertEqual(entry['number'], num)
|
self.assertEqual(entry['number'], num)
|
||||||
self.assertEqual(entry['doi'], u'10.17487/RFC%s'%num)
|
self.assertEqual(entry['doi'], '10.17487/RFC%s'%num)
|
||||||
self.assertEqual(entry['year'], u'1990')
|
self.assertEqual(entry['year'], '1990')
|
||||||
self.assertEqual(entry['month'], u'apr')
|
self.assertEqual(entry['month'], 'apr')
|
||||||
self.assertEqual(entry['day'], u'1')
|
self.assertEqual(entry['day'], '1')
|
||||||
|
|
||||||
draft = IndividualDraftFactory.create()
|
draft = IndividualDraftFactory.create()
|
||||||
docname = u'%s-%s' % (draft.name, draft.rev)
|
docname = '%s-%s' % (draft.name, draft.rev)
|
||||||
bibname = docname[6:] # drop the 'draft-' prefix
|
bibname = docname[6:] # drop the 'draft-' prefix
|
||||||
url = urlreverse('ietf.doc.views_doc.document_bibtex', kwargs=dict(name=draft.name))
|
url = urlreverse('ietf.doc.views_doc.document_bibtex', kwargs=dict(name=draft.name))
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
entry = bibtexparser.loads(r.content).get_entry_dict()[bibname]
|
entry = bibtexparser.loads(unicontent(r)).get_entry_dict()[bibname]
|
||||||
self.assertEqual(entry['note'], u'Work in Progress')
|
self.assertEqual(entry['note'], 'Work in Progress')
|
||||||
self.assertEqual(entry['number'], docname)
|
self.assertEqual(entry['number'], docname)
|
||||||
self.assertEqual(entry['year'], str(draft.pub_date().year))
|
self.assertEqual(entry['year'], str(draft.pub_date().year))
|
||||||
self.assertEqual(entry['month'], draft.pub_date().strftime('%b').lower())
|
self.assertEqual(entry['month'], draft.pub_date().strftime('%b').lower())
|
||||||
|
@ -969,7 +971,7 @@ class AddCommentTestCase(TestCase):
|
||||||
# normal get
|
# normal get
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(unicontent(r))
|
||||||
self.assertEqual(len(q('form textarea[name=comment]')), 1)
|
self.assertEqual(len(q('form textarea[name=comment]')), 1)
|
||||||
|
|
||||||
# request resurrect
|
# request resurrect
|
||||||
|
@ -983,9 +985,9 @@ class AddCommentTestCase(TestCase):
|
||||||
self.assertEqual("This is a test.", draft.latest_event().desc)
|
self.assertEqual("This is a test.", draft.latest_event().desc)
|
||||||
self.assertEqual("added_comment", draft.latest_event().type)
|
self.assertEqual("added_comment", draft.latest_event().type)
|
||||||
self.assertEqual(len(outbox), mailbox_before + 1)
|
self.assertEqual(len(outbox), mailbox_before + 1)
|
||||||
self.assertTrue("Comment added" in outbox[-1]['Subject'])
|
self.assertIn("Comment added", outbox[-1]['Subject'])
|
||||||
self.assertTrue(draft.name in outbox[-1]['Subject'])
|
self.assertIn(draft.name, outbox[-1]['Subject'])
|
||||||
self.assertTrue('draft-ietf-mars-test@' in outbox[-1]['To'])
|
self.assertIn('draft-ietf-mars-test@', outbox[-1]['To'])
|
||||||
|
|
||||||
# Make sure we can also do it as IANA
|
# Make sure we can also do it as IANA
|
||||||
self.client.login(username="iana", password="iana+password")
|
self.client.login(username="iana", password="iana+password")
|
||||||
|
@ -993,7 +995,7 @@ class AddCommentTestCase(TestCase):
|
||||||
# normal get
|
# normal get
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(unicontent(r))
|
||||||
self.assertEqual(len(q('form textarea[name=comment]')), 1)
|
self.assertEqual(len(q('form textarea[name=comment]')), 1)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1012,12 +1014,12 @@ class ReferencesTest(TestCase):
|
||||||
RelatedDocument.objects.get_or_create(source=doc1,target=doc2,relationship=DocRelationshipName.objects.get(slug='refnorm'))
|
RelatedDocument.objects.get_or_create(source=doc1,target=doc2,relationship=DocRelationshipName.objects.get(slug='refnorm'))
|
||||||
url = urlreverse('ietf.doc.views_doc.document_references', kwargs=dict(name=doc1.name))
|
url = urlreverse('ietf.doc.views_doc.document_references', kwargs=dict(name=doc1.name))
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEquals(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(doc2.name in unicontent(r))
|
self.assertContains(r, doc2.name)
|
||||||
url = urlreverse('ietf.doc.views_doc.document_referenced_by', kwargs=dict(name=doc2.name))
|
url = urlreverse('ietf.doc.views_doc.document_referenced_by', kwargs=dict(name=doc2.name))
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEquals(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(doc1.name in unicontent(r))
|
self.assertContains(r, doc1.name)
|
||||||
|
|
||||||
|
|
||||||
class EmailAliasesTests(TestCase):
|
class EmailAliasesTests(TestCase):
|
||||||
|
@ -1025,7 +1027,7 @@ class EmailAliasesTests(TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
WgDraftFactory(name='draft-ietf-mars-test',group__acronym='mars')
|
WgDraftFactory(name='draft-ietf-mars-test',group__acronym='mars')
|
||||||
WgDraftFactory(name='draft-ietf-ames-test',group__acronym='ames')
|
WgDraftFactory(name='draft-ietf-ames-test',group__acronym='ames')
|
||||||
self.doc_alias_file = NamedTemporaryFile(delete=False)
|
self.doc_alias_file = NamedTemporaryFile(delete=False, mode='w+')
|
||||||
self.doc_alias_file.write("""# Generated by hand at 2015-02-12_16:26:45
|
self.doc_alias_file.write("""# Generated by hand at 2015-02-12_16:26:45
|
||||||
virtual.ietf.org anything
|
virtual.ietf.org anything
|
||||||
draft-ietf-mars-test@ietf.org xfilter-draft-ietf-mars-test
|
draft-ietf-mars-test@ietf.org xfilter-draft-ietf-mars-test
|
||||||
|
@ -1071,8 +1073,8 @@ expand-draft-ietf-ames-test.all@virtual.ietf.org ames-author@example.ames, ames
|
||||||
url = urlreverse('ietf.doc.views_doc.document_email', kwargs=dict(name="draft-ietf-mars-test"))
|
url = urlreverse('ietf.doc.views_doc.document_email', kwargs=dict(name="draft-ietf-mars-test"))
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue('draft-ietf-mars-test.all@ietf.org' in unicontent(r))
|
self.assertContains(r, 'draft-ietf-mars-test.all@ietf.org')
|
||||||
self.assertTrue('ballot_saved' in unicontent(r))
|
self.assertContains(r, 'ballot_saved')
|
||||||
|
|
||||||
class DocumentMeetingTests(TestCase):
|
class DocumentMeetingTests(TestCase):
|
||||||
|
|
||||||
|
@ -1266,17 +1268,17 @@ class ChartTests(ResourceTestCaseMixin, TestCase):
|
||||||
# No qurey arguments; expect an empty json object
|
# No qurey arguments; expect an empty json object
|
||||||
r = self.client.get(conf_url)
|
r = self.client.get(conf_url)
|
||||||
self.assertValidJSONResponse(r)
|
self.assertValidJSONResponse(r)
|
||||||
self.assertEqual(r.content, '{}')
|
self.assertEqual(unicontent(r), '{}')
|
||||||
|
|
||||||
# No match
|
# No match
|
||||||
r = self.client.get(conf_url + '?activedrafts=on&name=thisisnotadocumentname')
|
r = self.client.get(conf_url + '?activedrafts=on&name=thisisnotadocumentname')
|
||||||
self.assertValidJSONResponse(r)
|
self.assertValidJSONResponse(r)
|
||||||
d = json.loads(r.content)
|
d = r.json()
|
||||||
self.assertEqual(d['chart']['type'], settings.CHART_TYPE_COLUMN_OPTIONS['chart']['type'])
|
self.assertEqual(d['chart']['type'], settings.CHART_TYPE_COLUMN_OPTIONS['chart']['type'])
|
||||||
|
|
||||||
r = self.client.get(conf_url + '?activedrafts=on&name=%s'%doc.name[6:12])
|
r = self.client.get(conf_url + '?activedrafts=on&name=%s'%doc.name[6:12])
|
||||||
self.assertValidJSONResponse(r)
|
self.assertValidJSONResponse(r)
|
||||||
d = json.loads(r.content)
|
d = r.json()
|
||||||
self.assertEqual(d['chart']['type'], settings.CHART_TYPE_COLUMN_OPTIONS['chart']['type'])
|
self.assertEqual(d['chart']['type'], settings.CHART_TYPE_COLUMN_OPTIONS['chart']['type'])
|
||||||
self.assertEqual(len(d['series'][0]['data']), 0)
|
self.assertEqual(len(d['series'][0]['data']), 0)
|
||||||
|
|
||||||
|
@ -1288,17 +1290,17 @@ class ChartTests(ResourceTestCaseMixin, TestCase):
|
||||||
# No qurey arguments; expect an empty json list
|
# No qurey arguments; expect an empty json list
|
||||||
r = self.client.get(data_url)
|
r = self.client.get(data_url)
|
||||||
self.assertValidJSONResponse(r)
|
self.assertValidJSONResponse(r)
|
||||||
self.assertEqual(r.content, '[]')
|
self.assertEqual(unicontent(r), '[]')
|
||||||
|
|
||||||
# No match
|
# No match
|
||||||
r = self.client.get(data_url + '?activedrafts=on&name=thisisnotadocumentname')
|
r = self.client.get(data_url + '?activedrafts=on&name=thisisnotadocumentname')
|
||||||
self.assertValidJSONResponse(r)
|
self.assertValidJSONResponse(r)
|
||||||
d = json.loads(r.content)
|
d = r.json()
|
||||||
self.assertEqual(r.content, '[]')
|
self.assertEqual(unicontent(r), '[]')
|
||||||
|
|
||||||
r = self.client.get(data_url + '?activedrafts=on&name=%s'%doc.name[6:12])
|
r = self.client.get(data_url + '?activedrafts=on&name=%s'%doc.name[6:12])
|
||||||
self.assertValidJSONResponse(r)
|
self.assertValidJSONResponse(r)
|
||||||
d = json.loads(r.content)
|
d = r.json()
|
||||||
self.assertEqual(len(d), 1)
|
self.assertEqual(len(d), 1)
|
||||||
self.assertEqual(len(d[0]), 2)
|
self.assertEqual(len(d[0]), 2)
|
||||||
|
|
||||||
|
@ -1322,7 +1324,7 @@ class ChartTests(ResourceTestCaseMixin, TestCase):
|
||||||
|
|
||||||
r = self.client.get(conf_url)
|
r = self.client.get(conf_url)
|
||||||
self.assertValidJSONResponse(r)
|
self.assertValidJSONResponse(r)
|
||||||
d = json.loads(r.content)
|
d = r.json()
|
||||||
self.assertEqual(d['chart']['type'], settings.CHART_TYPE_COLUMN_OPTIONS['chart']['type'])
|
self.assertEqual(d['chart']['type'], settings.CHART_TYPE_COLUMN_OPTIONS['chart']['type'])
|
||||||
self.assertEqual("New draft revisions over time for %s" % person.name, d['title']['text'])
|
self.assertEqual("New draft revisions over time for %s" % person.name, d['title']['text'])
|
||||||
|
|
||||||
|
@ -1330,7 +1332,7 @@ class ChartTests(ResourceTestCaseMixin, TestCase):
|
||||||
|
|
||||||
r = self.client.get(data_url)
|
r = self.client.get(data_url)
|
||||||
self.assertValidJSONResponse(r)
|
self.assertValidJSONResponse(r)
|
||||||
d = json.loads(r.content)
|
d = r.json()
|
||||||
self.assertEqual(len(d), 1)
|
self.assertEqual(len(d), 1)
|
||||||
self.assertEqual(len(d[0]), 2)
|
self.assertEqual(len(d[0]), 2)
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
# Copyright The IETF Trust 2013-2019, All Rights Reserved
|
# Copyright The IETF Trust 2013-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
from pyquery import PyQuery
|
from pyquery import PyQuery
|
||||||
|
|
||||||
|
@ -19,8 +20,8 @@ from ietf.name.models import BallotPositionName
|
||||||
from ietf.iesg.models import TelechatDate
|
from ietf.iesg.models import TelechatDate
|
||||||
from ietf.person.models import Person, PersonalApiKey
|
from ietf.person.models import Person, PersonalApiKey
|
||||||
from ietf.person.factories import PersonFactory
|
from ietf.person.factories import PersonFactory
|
||||||
from ietf.utils.test_utils import TestCase, unicontent, login_testing_unauthorized
|
from ietf.utils.test_utils import TestCase, login_testing_unauthorized
|
||||||
from ietf.utils.mail import outbox, empty_outbox
|
from ietf.utils.mail import outbox, empty_outbox, get_payload
|
||||||
from ietf.utils.text import unwrap
|
from ietf.utils.text import unwrap
|
||||||
|
|
||||||
|
|
||||||
|
@ -112,8 +113,7 @@ class EditPositionTests(TestCase):
|
||||||
discuss=" This is a discussion test. \n ",
|
discuss=" This is a discussion test. \n ",
|
||||||
comment=" This is a test. \n ")
|
comment=" This is a test. \n ")
|
||||||
)
|
)
|
||||||
self.assertEqual(r.content, "Done")
|
self.assertContains(r, "Done")
|
||||||
self.assertEqual(r.status_code, 200)
|
|
||||||
|
|
||||||
pos = draft.latest_event(BallotPositionDocEvent, ad=ad)
|
pos = draft.latest_event(BallotPositionDocEvent, ad=ad)
|
||||||
self.assertEqual(pos.pos.slug, "discuss")
|
self.assertEqual(pos.pos.slug, "discuss")
|
||||||
|
@ -172,7 +172,7 @@ class EditPositionTests(TestCase):
|
||||||
self.assertEqual(len(outbox), mailbox_before + 1)
|
self.assertEqual(len(outbox), mailbox_before + 1)
|
||||||
m = outbox[-1]
|
m = outbox[-1]
|
||||||
self.assertIn('COMMENT', m['Subject'])
|
self.assertIn('COMMENT', m['Subject'])
|
||||||
self.assertIn('New comment', m.get_payload())
|
self.assertIn('New comment', get_payload(m))
|
||||||
|
|
||||||
|
|
||||||
def test_edit_position_as_secretary(self):
|
def test_edit_position_as_secretary(self):
|
||||||
|
@ -363,7 +363,7 @@ class BallotWriteupsTests(TestCase):
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(len(q('textarea[name=ballot_writeup]')), 1)
|
self.assertEqual(len(q('textarea[name=ballot_writeup]')), 1)
|
||||||
self.assertTrue(q('[type=submit]:contains("Save")'))
|
self.assertTrue(q('[type=submit]:contains("Save")'))
|
||||||
self.assertTrue("IANA does not" in unicontent(r))
|
self.assertContains(r, "IANA does not")
|
||||||
|
|
||||||
# save
|
# save
|
||||||
r = self.client.post(url, dict(
|
r = self.client.post(url, dict(
|
||||||
|
@ -393,8 +393,8 @@ class BallotWriteupsTests(TestCase):
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(len(q('textarea[name=rfc_editor_note]')), 1)
|
self.assertEqual(len(q('textarea[name=rfc_editor_note]')), 1)
|
||||||
self.assertTrue(q('[type=submit]:contains("Save")'))
|
self.assertTrue(q('[type=submit]:contains("Save")'))
|
||||||
self.assertTrue("<label class=\"control-label\">RFC Editor Note</label>" in r.content)
|
self.assertContains(r, "<label class=\"control-label\">RFC Editor Note</label>")
|
||||||
self.assertTrue("This is a note for the RFC Editor" in r.content)
|
self.assertContains(r, "This is a note for the RFC Editor")
|
||||||
|
|
||||||
# save with a note
|
# save with a note
|
||||||
empty_outbox()
|
empty_outbox()
|
||||||
|
@ -540,8 +540,8 @@ class BallotWriteupsTests(TestCase):
|
||||||
e.by = Person.objects.get(name="(System)")
|
e.by = Person.objects.get(name="(System)")
|
||||||
e.doc = draft
|
e.doc = draft
|
||||||
e.rev = draft.rev
|
e.rev = draft.rev
|
||||||
e.desc = u"Ballot approval text was generated"
|
e.desc = "Ballot approval text was generated"
|
||||||
e.text = u"Test approval text."
|
e.text = "Test approval text."
|
||||||
e.save()
|
e.save()
|
||||||
events.append(e)
|
events.append(e)
|
||||||
|
|
||||||
|
@ -550,8 +550,8 @@ class BallotWriteupsTests(TestCase):
|
||||||
e.by = Person.objects.get(name="(System)")
|
e.by = Person.objects.get(name="(System)")
|
||||||
e.doc = draft
|
e.doc = draft
|
||||||
e.rev = draft.rev
|
e.rev = draft.rev
|
||||||
e.desc = u"Ballot writeup was generated"
|
e.desc = "Ballot writeup was generated"
|
||||||
e.text = u"Test ballot writeup text."
|
e.text = "Test ballot writeup text."
|
||||||
e.save()
|
e.save()
|
||||||
events.append(e)
|
events.append(e)
|
||||||
|
|
||||||
|
@ -560,8 +560,8 @@ class BallotWriteupsTests(TestCase):
|
||||||
e.by = Person.objects.get(name="(System)")
|
e.by = Person.objects.get(name="(System)")
|
||||||
e.doc = draft
|
e.doc = draft
|
||||||
e.rev = draft.rev
|
e.rev = draft.rev
|
||||||
e.desc = u"RFC Editor Note for ballot was generated"
|
e.desc = "RFC Editor Note for ballot was generated"
|
||||||
e.text = u"Test note to the RFC Editor text."
|
e.text = "Test note to the RFC Editor text."
|
||||||
e.save()
|
e.save()
|
||||||
events.append(e)
|
events.append(e)
|
||||||
|
|
||||||
|
@ -588,7 +588,7 @@ class BallotWriteupsTests(TestCase):
|
||||||
|
|
||||||
# RFC Editor Notes for documents in the IRTF Stream
|
# RFC Editor Notes for documents in the IRTF Stream
|
||||||
e = DocEvent(doc=draft, rev=draft.rev, by=Person.objects.get(name="(System)"), type='changed_stream')
|
e = DocEvent(doc=draft, rev=draft.rev, by=Person.objects.get(name="(System)"), type='changed_stream')
|
||||||
e.desc = u"Changed stream to <b>%s</b>" % 'irtf'
|
e.desc = "Changed stream to <b>%s</b>" % 'irtf'
|
||||||
e.save()
|
e.save()
|
||||||
|
|
||||||
draft.stream_id = 'irtf'
|
draft.stream_id = 'irtf'
|
||||||
|
@ -603,7 +603,7 @@ class BallotWriteupsTests(TestCase):
|
||||||
|
|
||||||
# RFC Editor Notes for documents in the IAB Stream
|
# RFC Editor Notes for documents in the IAB Stream
|
||||||
e = DocEvent(doc=draft, rev=draft.rev, by=Person.objects.get(name="(System)"), type='changed_stream')
|
e = DocEvent(doc=draft, rev=draft.rev, by=Person.objects.get(name="(System)"), type='changed_stream')
|
||||||
e.desc = u"Changed stream to <b>%s</b>" % 'ise'
|
e.desc = "Changed stream to <b>%s</b>" % 'ise'
|
||||||
e.save()
|
e.save()
|
||||||
|
|
||||||
draft.stream_id = 'ise'
|
draft.stream_id = 'ise'
|
||||||
|
@ -733,22 +733,19 @@ class ApproveBallotTests(TestCase):
|
||||||
# Only Secretariat can use this URL
|
# Only Secretariat can use this URL
|
||||||
login_testing_unauthorized(self, "ad", url)
|
login_testing_unauthorized(self, "ad", url)
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 403)
|
self.assertContains(r, "Restricted to role Secretariat", status_code=403)
|
||||||
self.assertTrue("Restricted to role Secretariat" in r.content)
|
|
||||||
|
|
||||||
# There are no downrefs, the page should say so
|
# There are no downrefs, the page should say so
|
||||||
login_testing_unauthorized(self, "secretary", url)
|
login_testing_unauthorized(self, "secretary", url)
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertContains(r, "No downward references for")
|
||||||
self.assertTrue("No downward references for" in r.content)
|
|
||||||
|
|
||||||
# Add a downref, the page should ask if it should be added to the registry
|
# Add a downref, the page should ask if it should be added to the registry
|
||||||
rel = draft.relateddocument_set.create(target=rfc.docalias.get(name='rfc6666'),relationship_id='refnorm')
|
rel = draft.relateddocument_set.create(target=rfc.docalias.get(name='rfc6666'),relationship_id='refnorm')
|
||||||
d = [rdoc for rdoc in draft.relateddocument_set.all() if rel.is_approved_downref()]
|
d = [rdoc for rdoc in draft.relateddocument_set.all() if rel.is_approved_downref()]
|
||||||
original_len = len(d)
|
original_len = len(d)
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertContains(r, "normatively references rfc6666")
|
||||||
self.assertTrue("normatively references rfc6666" in r.content)
|
|
||||||
|
|
||||||
# POST with the downref checked
|
# POST with the downref checked
|
||||||
r = self.client.post(url, dict(checkboxes=rel.pk))
|
r = self.client.post(url, dict(checkboxes=rel.pk))
|
||||||
|
@ -794,7 +791,7 @@ class MakeLastCallTests(TestCase):
|
||||||
self.assertTrue("ietf-announce@" in outbox[-2]['To'])
|
self.assertTrue("ietf-announce@" in outbox[-2]['To'])
|
||||||
for prefix in ['draft-ietf-mars-test','mars-chairs','aread']:
|
for prefix in ['draft-ietf-mars-test','mars-chairs','aread']:
|
||||||
self.assertTrue(prefix+"@" in outbox[-2]['Cc'])
|
self.assertTrue(prefix+"@" in outbox[-2]['Cc'])
|
||||||
self.assertIn("The following IPR Declarations",outbox[-2].get_payload())
|
self.assertIn("The following IPR Declarations", get_payload(outbox[-2]))
|
||||||
|
|
||||||
self.assertTrue("Last Call" in outbox[-1]['Subject'])
|
self.assertTrue("Last Call" in outbox[-1]['Subject'])
|
||||||
self.assertTrue("drafts-lastcall@icann.org" in outbox[-1]['To'])
|
self.assertTrue("drafts-lastcall@icann.org" in outbox[-1]['To'])
|
||||||
|
|
|
@ -1,8 +1,14 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright The IETF Trust 2011, All Rights Reserved
|
# Copyright The IETF Trust 2011-2019, All Rights Reserved
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
import os, shutil, datetime
|
|
||||||
from StringIO import StringIO
|
|
||||||
from pyquery import PyQuery
|
from pyquery import PyQuery
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
@ -20,8 +26,8 @@ from ietf.group.factories import RoleFactory, GroupFactory
|
||||||
from ietf.group.models import Group, GroupMilestone
|
from ietf.group.models import Group, GroupMilestone
|
||||||
from ietf.iesg.models import TelechatDate
|
from ietf.iesg.models import TelechatDate
|
||||||
from ietf.person.models import Person
|
from ietf.person.models import Person
|
||||||
from ietf.utils.test_utils import TestCase, unicontent
|
from ietf.utils.test_utils import TestCase
|
||||||
from ietf.utils.mail import outbox, empty_outbox
|
from ietf.utils.mail import outbox, empty_outbox, get_payload
|
||||||
from ietf.utils.test_utils import login_testing_unauthorized
|
from ietf.utils.test_utils import login_testing_unauthorized
|
||||||
|
|
||||||
class EditCharterTests(TestCase):
|
class EditCharterTests(TestCase):
|
||||||
|
@ -35,7 +41,7 @@ class EditCharterTests(TestCase):
|
||||||
shutil.rmtree(self.charter_dir)
|
shutil.rmtree(self.charter_dir)
|
||||||
|
|
||||||
def write_charter_file(self, charter):
|
def write_charter_file(self, charter):
|
||||||
with open(os.path.join(self.charter_dir, "%s-%s.txt" % (charter.canonical_name(), charter.rev)), "w") as f:
|
with io.open(os.path.join(self.charter_dir, "%s-%s.txt" % (charter.canonical_name(), charter.rev)), "w") as f:
|
||||||
f.write("This is a charter.")
|
f.write("This is a charter.")
|
||||||
|
|
||||||
def test_startstop_process(self):
|
def test_startstop_process(self):
|
||||||
|
@ -70,8 +76,8 @@ class EditCharterTests(TestCase):
|
||||||
|
|
||||||
ames = GroupFactory(acronym='ames',state_id='proposed',list_email='ames-wg@ietf.org',parent=area)
|
ames = GroupFactory(acronym='ames',state_id='proposed',list_email='ames-wg@ietf.org',parent=area)
|
||||||
RoleFactory(name_id='ad',group=ames,person=Person.objects.get(user__username='ad'))
|
RoleFactory(name_id='ad',group=ames,person=Person.objects.get(user__username='ad'))
|
||||||
RoleFactory(name_id='chair',group=ames,person__name=u'Ames Man',person__user__email='ameschairman@example.org')
|
RoleFactory(name_id='chair',group=ames,person__name='Ames Man',person__user__email='ameschairman@example.org')
|
||||||
RoleFactory(name_id='secr',group=ames,person__name=u'Secretary',person__user__email='amessecretary@example.org')
|
RoleFactory(name_id='secr',group=ames,person__name='Secretary',person__user__email='amessecretary@example.org')
|
||||||
CharterFactory(group=ames)
|
CharterFactory(group=ames)
|
||||||
|
|
||||||
mars = GroupFactory(acronym='mars',parent=area)
|
mars = GroupFactory(acronym='mars',parent=area)
|
||||||
|
@ -128,24 +134,24 @@ class EditCharterTests(TestCase):
|
||||||
self.assertIn("Internal WG Review", outbox[-3]['Subject'])
|
self.assertIn("Internal WG Review", outbox[-3]['Subject'])
|
||||||
self.assertIn("iab@", outbox[-3]['To'])
|
self.assertIn("iab@", outbox[-3]['To'])
|
||||||
self.assertIn("iesg@", outbox[-3]['To'])
|
self.assertIn("iesg@", outbox[-3]['To'])
|
||||||
self.assertIn("A new IETF WG", outbox[-3].get_payload())
|
body = get_payload(outbox[-3])
|
||||||
body = outbox[-3].get_payload()
|
for word in ["A new IETF WG", "Chairs", "Ames Man <ameschairman@example.org>",
|
||||||
for word in ["Chairs", "Ames Man <ameschairman@example.org>",
|
|
||||||
"Secretaries", "Secretary <amessecretary@example.org>",
|
"Secretaries", "Secretary <amessecretary@example.org>",
|
||||||
"Assigned Area Director", "Areað Irector <aread@example.org>",
|
"Assigned Area Director", "Areað Irector <aread@example.org>",
|
||||||
"Mailing list", "ames-wg@ietf.org",
|
"Mailing list", "ames-wg@ietf.org",
|
||||||
"Charter", "Milestones"]:
|
"Charter", "Milestones"]:
|
||||||
|
|
||||||
self.assertIn(word, body)
|
self.assertIn(word, body)
|
||||||
|
|
||||||
self.assertIn("state changed", outbox[-2]['Subject'].lower())
|
self.assertIn("state changed", outbox[-2]['Subject'].lower())
|
||||||
self.assertIn("iesg-secretary@", outbox[-2]['To'])
|
self.assertIn("iesg-secretary@", outbox[-2]['To'])
|
||||||
body = outbox[-2].get_payload()
|
body = get_payload(outbox[-2])
|
||||||
for word in ["WG", "Charter", ]:
|
for word in ["WG", "Charter", ]:
|
||||||
self.assertIn(word, body)
|
self.assertIn(word, body)
|
||||||
|
|
||||||
self.assertIn("State Update Notice", outbox[-1]['Subject'])
|
self.assertIn("State Update Notice", outbox[-1]['Subject'])
|
||||||
self.assertIn("ames-chairs@", outbox[-1]['To'])
|
self.assertIn("ames-chairs@", outbox[-1]['To'])
|
||||||
body = outbox[-1].get_payload()
|
body = get_payload(outbox[-1])
|
||||||
for word in ["State changed", "Datatracker URL", ]:
|
for word in ["State changed", "Datatracker URL", ]:
|
||||||
self.assertIn(word, body)
|
self.assertIn(word, body)
|
||||||
|
|
||||||
|
@ -165,7 +171,7 @@ class EditCharterTests(TestCase):
|
||||||
empty_outbox()
|
empty_outbox()
|
||||||
r = self.client.post(url, dict(charter_state=str(State.objects.get(used=True,type="charter",slug="intrev").pk), message="test"))
|
r = self.client.post(url, dict(charter_state=str(State.objects.get(used=True,type="charter",slug="intrev").pk), message="test"))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.assertTrue("A new charter" in outbox[-3].get_payload())
|
self.assertTrue("A new charter" in get_payload(outbox[-3]))
|
||||||
|
|
||||||
def test_abandon_bof(self):
|
def test_abandon_bof(self):
|
||||||
charter = CharterFactory(group__state_id='bof',group__type_id='wg')
|
charter = CharterFactory(group__state_id='bof',group__type_id='wg')
|
||||||
|
@ -389,19 +395,19 @@ class EditCharterTests(TestCase):
|
||||||
self.assertEqual(len(q('form input[name=txt]')), 1)
|
self.assertEqual(len(q('form input[name=txt]')), 1)
|
||||||
|
|
||||||
# faulty post
|
# faulty post
|
||||||
test_file = StringIO("\x10\x11\x12") # post binary file
|
test_file = io.StringIO("\x10\x11\x12") # post binary file
|
||||||
test_file.name = "unnamed"
|
test_file.name = "unnamed"
|
||||||
|
|
||||||
r = self.client.post(url, dict(txt=test_file))
|
r = self.client.post(url, dict(txt=test_file))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("does not appear to be a text file" in unicontent(r))
|
self.assertContains(r, "does not appear to be a text file")
|
||||||
|
|
||||||
# post
|
# post
|
||||||
prev_rev = charter.rev
|
prev_rev = charter.rev
|
||||||
|
|
||||||
latin_1_snippet = '\xe5' * 10
|
latin_1_snippet = b'\xe5' * 10
|
||||||
utf_8_snippet = '\xc3\xa5' * 10
|
utf_8_snippet = b'\xc3\xa5' * 10
|
||||||
test_file = StringIO("Windows line\r\nMac line\rUnix line\n" + latin_1_snippet)
|
test_file = io.StringIO("Windows line\r\nMac line\rUnix line\n" + latin_1_snippet.decode('latin-1'))
|
||||||
test_file.name = "unnamed"
|
test_file.name = "unnamed"
|
||||||
|
|
||||||
r = self.client.post(url, dict(txt=test_file))
|
r = self.client.post(url, dict(txt=test_file))
|
||||||
|
@ -411,9 +417,8 @@ class EditCharterTests(TestCase):
|
||||||
self.assertEqual(charter.rev, next_revision(prev_rev))
|
self.assertEqual(charter.rev, next_revision(prev_rev))
|
||||||
self.assertTrue("new_revision" in charter.latest_event().type)
|
self.assertTrue("new_revision" in charter.latest_event().type)
|
||||||
|
|
||||||
with open(os.path.join(self.charter_dir, charter.canonical_name() + "-" + charter.rev + ".txt")) as f:
|
with io.open(os.path.join(self.charter_dir, charter.canonical_name() + "-" + charter.rev + ".txt")) as f:
|
||||||
self.assertEqual(f.read(),
|
self.assertEqual(f.read(), "Windows line\nMac line\nUnix line\n" + utf_8_snippet.decode('utf_8'))
|
||||||
"Windows line\nMac line\nUnix line\n" + utf_8_snippet)
|
|
||||||
|
|
||||||
def test_submit_initial_charter(self):
|
def test_submit_initial_charter(self):
|
||||||
group = GroupFactory(type_id='wg',acronym='mars',list_email='mars-wg@ietf.org')
|
group = GroupFactory(type_id='wg',acronym='mars',list_email='mars-wg@ietf.org')
|
||||||
|
@ -428,7 +433,7 @@ class EditCharterTests(TestCase):
|
||||||
self.assertEqual(len(q('form input[name=txt]')), 1)
|
self.assertEqual(len(q('form input[name=txt]')), 1)
|
||||||
|
|
||||||
# create charter
|
# create charter
|
||||||
test_file = StringIO("Simple test")
|
test_file = io.StringIO("Simple test")
|
||||||
test_file.name = "unnamed"
|
test_file.name = "unnamed"
|
||||||
|
|
||||||
r = self.client.post(url, dict(txt=test_file))
|
r = self.client.post(url, dict(txt=test_file))
|
||||||
|
@ -591,8 +596,8 @@ class EditCharterTests(TestCase):
|
||||||
RoleFactory(name_id='ad',group=area,person=Person.objects.get(user__username='ad'))
|
RoleFactory(name_id='ad',group=area,person=Person.objects.get(user__username='ad'))
|
||||||
charter = CharterFactory(group__acronym='ames',group__list_email='ames-wg@ietf.org',group__parent=area,group__state_id='bof')
|
charter = CharterFactory(group__acronym='ames',group__list_email='ames-wg@ietf.org',group__parent=area,group__state_id='bof')
|
||||||
group = charter.group
|
group = charter.group
|
||||||
RoleFactory(name_id='chair',group=group,person__name=u'Ames Man',person__user__email='ameschairman@example.org')
|
RoleFactory(name_id='chair',group=group,person__name='Ames Man',person__user__email='ameschairman@example.org')
|
||||||
RoleFactory(name_id='secr',group=group,person__name=u'Secretary',person__user__email='amessecretary@example.org')
|
RoleFactory(name_id='secr',group=group,person__name='Secretary',person__user__email='amessecretary@example.org')
|
||||||
|
|
||||||
url = urlreverse('ietf.doc.views_charter.approve', kwargs=dict(name=charter.name))
|
url = urlreverse('ietf.doc.views_charter.approve', kwargs=dict(name=charter.name))
|
||||||
login_testing_unauthorized(self, "secretary", url)
|
login_testing_unauthorized(self, "secretary", url)
|
||||||
|
@ -658,7 +663,7 @@ class EditCharterTests(TestCase):
|
||||||
#
|
#
|
||||||
self.assertTrue("approved" in outbox[0]['Subject'].lower())
|
self.assertTrue("approved" in outbox[0]['Subject'].lower())
|
||||||
self.assertTrue("iesg-secretary" in outbox[0]['To'])
|
self.assertTrue("iesg-secretary" in outbox[0]['To'])
|
||||||
body = outbox[0].get_payload()
|
body = get_payload(outbox[0])
|
||||||
for word in ["WG", "/wg/ames/about/",
|
for word in ["WG", "/wg/ames/about/",
|
||||||
"Charter", "/doc/charter-ietf-ames/", ]:
|
"Charter", "/doc/charter-ietf-ames/", ]:
|
||||||
self.assertIn(word, body)
|
self.assertIn(word, body)
|
||||||
|
@ -666,7 +671,7 @@ class EditCharterTests(TestCase):
|
||||||
self.assertTrue("WG Action" in outbox[1]['Subject'])
|
self.assertTrue("WG Action" in outbox[1]['Subject'])
|
||||||
self.assertTrue("ietf-announce" in outbox[1]['To'])
|
self.assertTrue("ietf-announce" in outbox[1]['To'])
|
||||||
self.assertTrue("ames-wg@ietf.org" in outbox[1]['Cc'])
|
self.assertTrue("ames-wg@ietf.org" in outbox[1]['Cc'])
|
||||||
body = outbox[1].get_payload()
|
body = get_payload(outbox[1])
|
||||||
for word in ["Chairs", "Ames Man <ameschairman@example.org>",
|
for word in ["Chairs", "Ames Man <ameschairman@example.org>",
|
||||||
"Secretaries", "Secretary <amessecretary@example.org>",
|
"Secretaries", "Secretary <amessecretary@example.org>",
|
||||||
"Assigned Area Director", "Areað Irector <aread@example.org>",
|
"Assigned Area Director", "Areað Irector <aread@example.org>",
|
||||||
|
@ -696,7 +701,7 @@ class EditCharterTests(TestCase):
|
||||||
url = urlreverse('ietf.doc.views_charter.charter_with_milestones_txt', kwargs=dict(name=charter.name, rev=charter.rev))
|
url = urlreverse('ietf.doc.views_charter.charter_with_milestones_txt', kwargs=dict(name=charter.name, rev=charter.rev))
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(m.desc in unicontent(r))
|
self.assertContains(r, m.desc)
|
||||||
|
|
||||||
def test_chartering_from_bof(self):
|
def test_chartering_from_bof(self):
|
||||||
ad_role = RoleFactory(group__type_id='area',name_id='ad')
|
ad_role = RoleFactory(group__type_id='area',name_id='ad')
|
||||||
|
|
|
@ -1,15 +1,21 @@
|
||||||
|
# Copyright The IETF Trust 2012-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import debug # pyflakes:ignore
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
|
import io
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
from pyquery import PyQuery
|
from pyquery import PyQuery
|
||||||
from StringIO import StringIO
|
|
||||||
from textwrap import wrap
|
from textwrap import wrap
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.urls import reverse as urlreverse
|
from django.urls import reverse as urlreverse
|
||||||
|
|
||||||
|
import debug # pyflakes:ignore
|
||||||
|
|
||||||
from ietf.doc.factories import IndividualDraftFactory, ConflictReviewFactory
|
from ietf.doc.factories import IndividualDraftFactory, ConflictReviewFactory
|
||||||
from ietf.doc.models import Document, DocEvent, NewRevisionDocEvent, BallotPositionDocEvent, TelechatDocEvent, State
|
from ietf.doc.models import Document, DocEvent, NewRevisionDocEvent, BallotPositionDocEvent, TelechatDocEvent, State
|
||||||
from ietf.doc.utils import create_ballot_if_not_open
|
from ietf.doc.utils import create_ballot_if_not_open
|
||||||
|
@ -17,8 +23,8 @@ from ietf.doc.views_conflict_review import default_approval_text
|
||||||
from ietf.group.models import Person
|
from ietf.group.models import Person
|
||||||
from ietf.iesg.models import TelechatDate
|
from ietf.iesg.models import TelechatDate
|
||||||
from ietf.name.models import StreamName
|
from ietf.name.models import StreamName
|
||||||
from ietf.utils.test_utils import TestCase, unicontent
|
from ietf.utils.test_utils import TestCase
|
||||||
from ietf.utils.mail import outbox, empty_outbox
|
from ietf.utils.mail import outbox, empty_outbox, get_payload
|
||||||
from ietf.utils.test_utils import login_testing_unauthorized
|
from ietf.utils.test_utils import login_testing_unauthorized
|
||||||
|
|
||||||
|
|
||||||
|
@ -63,9 +69,9 @@ class ConflictReviewTests(TestCase):
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
review_doc = Document.objects.get(name='conflict-review-imaginary-independent-submission')
|
review_doc = Document.objects.get(name='conflict-review-imaginary-independent-submission')
|
||||||
self.assertEqual(review_doc.get_state('conflrev').slug,'needshep')
|
self.assertEqual(review_doc.get_state('conflrev').slug,'needshep')
|
||||||
self.assertEqual(review_doc.rev,u'00')
|
self.assertEqual(review_doc.rev,'00')
|
||||||
self.assertEqual(review_doc.ad.name,u'Areað Irector')
|
self.assertEqual(review_doc.ad.name,'Areað Irector')
|
||||||
self.assertEqual(review_doc.notify,u'ipu@ietf.org')
|
self.assertEqual(review_doc.notify,'ipu@ietf.org')
|
||||||
doc = Document.objects.get(name='draft-imaginary-independent-submission')
|
doc = Document.objects.get(name='draft-imaginary-independent-submission')
|
||||||
self.assertTrue(doc in [x.target.document for x in review_doc.relateddocument_set.filter(relationship__slug='conflrev')])
|
self.assertTrue(doc in [x.target.document for x in review_doc.relateddocument_set.filter(relationship__slug='conflrev')])
|
||||||
|
|
||||||
|
@ -87,34 +93,34 @@ class ConflictReviewTests(TestCase):
|
||||||
|
|
||||||
# can't start conflict reviews on documents not in a stream
|
# can't start conflict reviews on documents not in a stream
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEquals(r.status_code, 404)
|
self.assertEqual(r.status_code, 404)
|
||||||
|
|
||||||
|
|
||||||
# can't start conflict reviews on documents in some other stream
|
# can't start conflict reviews on documents in some other stream
|
||||||
doc.stream = StreamName.objects.get(slug='irtf')
|
doc.stream = StreamName.objects.get(slug='irtf')
|
||||||
doc.save_with_history([DocEvent.objects.create(doc=doc, rev=doc.rev, type="changed_stream", by=Person.objects.get(user__username="secretary"), desc="Test")])
|
doc.save_with_history([DocEvent.objects.create(doc=doc, rev=doc.rev, type="changed_stream", by=Person.objects.get(user__username="secretary"), desc="Test")])
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEquals(r.status_code, 404)
|
self.assertEqual(r.status_code, 404)
|
||||||
|
|
||||||
# successful get
|
# successful get
|
||||||
doc.stream = StreamName.objects.get(slug='ise')
|
doc.stream = StreamName.objects.get(slug='ise')
|
||||||
doc.save_with_history([DocEvent.objects.create(doc=doc, rev=doc.rev, type="changed_stream", by=Person.objects.get(user__username="secretary"), desc="Test")])
|
doc.save_with_history([DocEvent.objects.create(doc=doc, rev=doc.rev, type="changed_stream", by=Person.objects.get(user__username="secretary"), desc="Test")])
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEquals(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEquals(len(q('form input[name=notify]')),1)
|
self.assertEqual(len(q('form input[name=notify]')),1)
|
||||||
self.assertEquals(len(q('form select[name=ad]')),0)
|
self.assertEqual(len(q('form select[name=ad]')),0)
|
||||||
|
|
||||||
# successfully starts a review, and notifies the secretariat
|
# successfully starts a review, and notifies the secretariat
|
||||||
messages_before = len(outbox)
|
messages_before = len(outbox)
|
||||||
r = self.client.post(url,dict(notify='ipu@ietf.org'))
|
r = self.client.post(url,dict(notify='ipu@ietf.org'))
|
||||||
self.assertEquals(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
review_doc = Document.objects.get(name='conflict-review-imaginary-independent-submission')
|
review_doc = Document.objects.get(name='conflict-review-imaginary-independent-submission')
|
||||||
self.assertEquals(review_doc.get_state('conflrev').slug,'needshep')
|
self.assertEqual(review_doc.get_state('conflrev').slug,'needshep')
|
||||||
self.assertEquals(review_doc.rev,u'00')
|
self.assertEqual(review_doc.rev,'00')
|
||||||
self.assertEquals(review_doc.telechat_date(),None)
|
self.assertEqual(review_doc.telechat_date(),None)
|
||||||
self.assertEquals(review_doc.ad.name,u'Ietf Chair')
|
self.assertEqual(review_doc.ad.name,'Ietf Chair')
|
||||||
self.assertEquals(review_doc.notify,u'ipu@ietf.org')
|
self.assertEqual(review_doc.notify,'ipu@ietf.org')
|
||||||
doc = Document.objects.get(name='draft-imaginary-independent-submission')
|
doc = Document.objects.get(name='draft-imaginary-independent-submission')
|
||||||
self.assertTrue(doc in [x.target.document for x in review_doc.relateddocument_set.filter(relationship__slug='conflrev')])
|
self.assertTrue(doc in [x.target.document for x in review_doc.relateddocument_set.filter(relationship__slug='conflrev')])
|
||||||
|
|
||||||
|
@ -264,7 +270,7 @@ class ConflictReviewTests(TestCase):
|
||||||
def approve_test_helper(self,approve_type):
|
def approve_test_helper(self,approve_type):
|
||||||
|
|
||||||
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
|
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
|
||||||
url = urlreverse('ietf.doc.views_conflict_review.approve',kwargs=dict(name=doc.name))
|
url = urlreverse('ietf.doc.views_conflict_review.approve_conflict_review',kwargs=dict(name=doc.name))
|
||||||
|
|
||||||
login_testing_unauthorized(self, "secretary", url)
|
login_testing_unauthorized(self, "secretary", url)
|
||||||
|
|
||||||
|
@ -278,9 +284,9 @@ class ConflictReviewTests(TestCase):
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(len(q('[type=submit]:contains("Send announcement")')), 1)
|
self.assertEqual(len(q('[type=submit]:contains("Send announcement")')), 1)
|
||||||
if approve_type == 'appr-noprob':
|
if approve_type == 'appr-noprob':
|
||||||
self.assertIn( 'IESG has no problem', ''.join(wrap(r.content,2**16)))
|
self.assertContains(r, 'IESG has no problem')
|
||||||
else:
|
else:
|
||||||
self.assertIn( 'NOT be published', ''.join(wrap(r.content,2**16)))
|
self.assertContains(r, 'NOT be published')
|
||||||
|
|
||||||
# submit
|
# submit
|
||||||
empty_outbox()
|
empty_outbox()
|
||||||
|
@ -296,12 +302,13 @@ class ConflictReviewTests(TestCase):
|
||||||
self.assertIn('irtf-chair', outbox[0]['To'])
|
self.assertIn('irtf-chair', outbox[0]['To'])
|
||||||
self.assertIn('ietf-announce@', outbox[0]['Cc'])
|
self.assertIn('ietf-announce@', outbox[0]['Cc'])
|
||||||
self.assertIn('iana@', outbox[0]['Cc'])
|
self.assertIn('iana@', outbox[0]['Cc'])
|
||||||
|
|
||||||
if approve_type == 'appr-noprob':
|
if approve_type == 'appr-noprob':
|
||||||
self.assertIn( 'IESG has no problem', ''.join(wrap(unicode(outbox[0]),2**16)))
|
self.assertIn( 'IESG has no problem', ''.join(wrap(get_payload(outbox[0]), 2**16)))
|
||||||
else:
|
else:
|
||||||
self.assertIn( 'NOT be published', ''.join(wrap(unicode(outbox[0]),2**16)))
|
self.assertIn( 'NOT be published', ''.join(wrap(get_payload(outbox[0]), 2**16)))
|
||||||
|
|
||||||
|
|
||||||
def test_approve_reqnopub(self):
|
def test_approve_reqnopub(self):
|
||||||
self.approve_test_helper('appr-reqnopub')
|
self.approve_test_helper('appr-reqnopub')
|
||||||
|
|
||||||
|
@ -330,13 +337,13 @@ class ConflictReviewSubmitTests(TestCase):
|
||||||
|
|
||||||
# sane post using textbox
|
# sane post using textbox
|
||||||
path = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev))
|
path = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev))
|
||||||
self.assertEqual(doc.rev,u'00')
|
self.assertEqual(doc.rev,'00')
|
||||||
self.assertFalse(os.path.exists(path))
|
self.assertFalse(os.path.exists(path))
|
||||||
r = self.client.post(url,dict(content="Some initial review text\n",submit_response="1"))
|
r = self.client.post(url,dict(content="Some initial review text\n",submit_response="1"))
|
||||||
self.assertEqual(r.status_code,302)
|
self.assertEqual(r.status_code,302)
|
||||||
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
|
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
|
||||||
self.assertEqual(doc.rev,u'00')
|
self.assertEqual(doc.rev,'00')
|
||||||
with open(path) as f:
|
with io.open(path) as f:
|
||||||
self.assertEqual(f.read(),"Some initial review text\n")
|
self.assertEqual(f.read(),"Some initial review text\n")
|
||||||
f.close()
|
f.close()
|
||||||
self.assertTrue( "submission-00" in doc.latest_event(NewRevisionDocEvent).desc)
|
self.assertTrue( "submission-00" in doc.latest_event(NewRevisionDocEvent).desc)
|
||||||
|
@ -348,9 +355,9 @@ class ConflictReviewSubmitTests(TestCase):
|
||||||
|
|
||||||
# A little additional setup
|
# A little additional setup
|
||||||
# doc.rev is u'00' per the test setup - double-checking that here - if it fails, the breakage is in setUp
|
# doc.rev is u'00' per the test setup - double-checking that here - if it fails, the breakage is in setUp
|
||||||
self.assertEqual(doc.rev,u'00')
|
self.assertEqual(doc.rev,'00')
|
||||||
path = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev))
|
path = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev))
|
||||||
with open(path,'w') as f:
|
with io.open(path,'w') as f:
|
||||||
f.write('This is the old proposal.')
|
f.write('This is the old proposal.')
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
|
@ -363,21 +370,21 @@ class ConflictReviewSubmitTests(TestCase):
|
||||||
# faulty posts trying to use file upload
|
# faulty posts trying to use file upload
|
||||||
# Copied from wgtracker tests - is this really testing the server code, or is it testing
|
# Copied from wgtracker tests - is this really testing the server code, or is it testing
|
||||||
# how client.post populates Content-Type?
|
# how client.post populates Content-Type?
|
||||||
test_file = StringIO("\x10\x11\x12") # post binary file
|
test_file = io.StringIO("\x10\x11\x12") # post binary file
|
||||||
test_file.name = "unnamed"
|
test_file.name = "unnamed"
|
||||||
r = self.client.post(url, dict(txt=test_file,submit_response="1"))
|
r = self.client.post(url, dict(txt=test_file,submit_response="1"))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("does not appear to be a text file" in unicontent(r))
|
self.assertContains(r, "does not appear to be a text file")
|
||||||
|
|
||||||
# sane post uploading a file
|
# sane post uploading a file
|
||||||
test_file = StringIO("This is a new proposal.")
|
test_file = io.StringIO("This is a new proposal.")
|
||||||
test_file.name = "unnamed"
|
test_file.name = "unnamed"
|
||||||
r = self.client.post(url,dict(txt=test_file,submit_response="1"))
|
r = self.client.post(url,dict(txt=test_file,submit_response="1"))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
|
doc = Document.objects.get(name='conflict-review-imaginary-irtf-submission')
|
||||||
self.assertEqual(doc.rev,u'01')
|
self.assertEqual(doc.rev,'01')
|
||||||
path = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev))
|
path = os.path.join(settings.CONFLICT_REVIEW_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev))
|
||||||
with open(path) as f:
|
with io.open(path) as f:
|
||||||
self.assertEqual(f.read(),"This is a new proposal.")
|
self.assertEqual(f.read(),"This is a new proposal.")
|
||||||
f.close()
|
f.close()
|
||||||
self.assertTrue( "submission-01" in doc.latest_event(NewRevisionDocEvent).desc)
|
self.assertTrue( "submission-01" in doc.latest_event(NewRevisionDocEvent).desc)
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# Copyright The IETF Trust 2017-2019, All Rights Reserved
|
# Copyright The IETF Trust 2017-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
|
|
||||||
from django.urls import reverse as urlreverse
|
from django.urls import reverse as urlreverse
|
||||||
from pyquery import PyQuery
|
from pyquery import PyQuery
|
||||||
|
@ -12,7 +14,7 @@ from ietf.doc.factories import WgDraftFactory, WgRfcFactory
|
||||||
from ietf.doc.models import RelatedDocument, State
|
from ietf.doc.models import RelatedDocument, State
|
||||||
from ietf.person.factories import PersonFactory
|
from ietf.person.factories import PersonFactory
|
||||||
from ietf.utils.test_utils import TestCase
|
from ietf.utils.test_utils import TestCase
|
||||||
from ietf.utils.test_utils import login_testing_unauthorized, unicontent
|
from ietf.utils.test_utils import login_testing_unauthorized
|
||||||
|
|
||||||
class Downref(TestCase):
|
class Downref(TestCase):
|
||||||
|
|
||||||
|
@ -32,26 +34,20 @@ class Downref(TestCase):
|
||||||
# normal - get the table without the "Add downref" button
|
# normal - get the table without the "Add downref" button
|
||||||
self.client.login(username="plain", password="plain+password")
|
self.client.login(username="plain", password="plain+password")
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertContains(r, '<h1>Downref registry</h1>')
|
||||||
content = unicontent(r)
|
self.assertNotContains(r, 'Add downref')
|
||||||
self.assertTrue('<h1>Downref registry</h1>' in content)
|
|
||||||
self.assertFalse('Add downref' in content)
|
|
||||||
|
|
||||||
# secretariat - get the table with the "Add downref" button
|
# secretariat - get the table with the "Add downref" button
|
||||||
self.client.login(username='secretary', password='secretary+password')
|
self.client.login(username='secretary', password='secretary+password')
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertContains(r, '<h1>Downref registry</h1>')
|
||||||
content = unicontent(r)
|
self.assertContains(r, 'Add downref')
|
||||||
self.assertTrue('<h1>Downref registry</h1>' in content)
|
|
||||||
self.assertTrue('Add downref' in content)
|
|
||||||
|
|
||||||
# area director - get the table with the "Add downref" button
|
# area director - get the table with the "Add downref" button
|
||||||
self.client.login(username='ad', password='ad+password')
|
self.client.login(username='ad', password='ad+password')
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertContains(r, '<h1>Downref registry</h1>')
|
||||||
content = unicontent(r)
|
self.assertContains(r, 'Add downref')
|
||||||
self.assertTrue('<h1>Downref registry</h1>' in content)
|
|
||||||
self.assertTrue('Add downref' in content)
|
|
||||||
|
|
||||||
def test_downref_registry_add(self):
|
def test_downref_registry_add(self):
|
||||||
url = urlreverse('ietf.doc.views_downref.downref_registry_add')
|
url = urlreverse('ietf.doc.views_downref.downref_registry_add')
|
||||||
|
@ -60,42 +56,32 @@ class Downref(TestCase):
|
||||||
# secretariat - get the form to add entries to the registry
|
# secretariat - get the form to add entries to the registry
|
||||||
self.client.login(username='secretary', password='secretary+password')
|
self.client.login(username='secretary', password='secretary+password')
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertContains(r, '<h1>Add entry to the downref registry</h1>')
|
||||||
content = unicontent(r)
|
self.assertContains(r, 'Save downref')
|
||||||
self.assertTrue('<h1>Add entry to the downref registry</h1>' in content)
|
|
||||||
self.assertTrue('Save downref' in content)
|
|
||||||
|
|
||||||
# area director - get the form to add entries to the registry
|
# area director - get the form to add entries to the registry
|
||||||
self.client.login(username='ad', password='ad+password')
|
self.client.login(username='ad', password='ad+password')
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertContains(r, '<h1>Add entry to the downref registry</h1>')
|
||||||
content = unicontent(r)
|
self.assertContains(r, 'Save downref')
|
||||||
self.assertTrue('<h1>Add entry to the downref registry</h1>' in content)
|
|
||||||
self.assertTrue('Save downref' in content)
|
|
||||||
|
|
||||||
# error - already in the downref registry
|
# error - already in the downref registry
|
||||||
r = self.client.post(url, dict(rfc=self.rfcalias.pk, drafts=(self.doc.pk, )))
|
r = self.client.post(url, dict(rfc=self.rfcalias.pk, drafts=(self.doc.pk, )))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertContains(r, 'Downref is already in the registry')
|
||||||
content = unicontent(r)
|
|
||||||
self.assertTrue('Downref is already in the registry' in content)
|
|
||||||
|
|
||||||
# error - source is not in an approved state
|
# error - source is not in an approved state
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
r = self.client.post(url, dict(rfc=self.rfcalias.pk, drafts=(self.draft.pk, )))
|
r = self.client.post(url, dict(rfc=self.rfcalias.pk, drafts=(self.draft.pk, )))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertContains(r, 'Draft is not yet approved')
|
||||||
content = unicontent(r)
|
|
||||||
self.assertTrue('Draft is not yet approved' in content)
|
|
||||||
|
|
||||||
# error - the target is not a normative reference of the source
|
# error - the target is not a normative reference of the source
|
||||||
self.draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="pub"))
|
self.draft.set_state(State.objects.get(used=True, type="draft-iesg", slug="pub"))
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
r = self.client.post(url, dict(rfc=self.rfcalias.pk, drafts=(self.draft.pk, )))
|
r = self.client.post(url, dict(rfc=self.rfcalias.pk, drafts=(self.draft.pk, )))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertContains(r, 'There does not seem to be a normative reference to RFC')
|
||||||
content = unicontent(r)
|
self.assertContains(r, 'Save downref anyway')
|
||||||
self.assertTrue('There does not seem to be a normative reference to RFC' in content)
|
|
||||||
self.assertTrue('Save downref anyway' in content)
|
|
||||||
|
|
||||||
# normal - approve the document so the downref is now okay
|
# normal - approve the document so the downref is now okay
|
||||||
RelatedDocument.objects.create(source=self.draft, target=self.rfcalias, relationship_id='refnorm')
|
RelatedDocument.objects.create(source=self.draft, target=self.rfcalias, relationship_id='refnorm')
|
||||||
|
@ -108,9 +94,7 @@ class Downref(TestCase):
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
newurl = urlreverse('ietf.doc.views_downref.downref_registry')
|
newurl = urlreverse('ietf.doc.views_downref.downref_registry')
|
||||||
r = self.client.get(newurl)
|
r = self.client.get(newurl)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertContains(r, '<a href="/doc/draft-ietf-mars-test')
|
||||||
content = unicontent(r)
|
|
||||||
self.assertTrue('<a href="/doc/draft-ietf-mars-test' in content)
|
|
||||||
self.assertTrue(RelatedDocument.objects.filter(source=self.draft, target=self.rfcalias, relationship_id='downref-approval'))
|
self.assertTrue(RelatedDocument.objects.filter(source=self.draft, target=self.rfcalias, relationship_id='downref-approval'))
|
||||||
self.assertEqual(self.draft.docevent_set.count(), draft_de_count_before + 1)
|
self.assertEqual(self.draft.docevent_set.count(), draft_de_count_before + 1)
|
||||||
self.assertEqual(self.rfc.docevent_set.count(), rfc_de_count_before + 1)
|
self.assertEqual(self.rfc.docevent_set.count(), rfc_de_count_before + 1)
|
||||||
|
|
|
@ -1,10 +1,13 @@
|
||||||
# Copyright The IETF Trust 2011-2019, All Rights Reserved
|
# Copyright The IETF Trust 2011-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import datetime
|
import datetime
|
||||||
import StringIO
|
import io
|
||||||
from pyquery import PyQuery
|
from pyquery import PyQuery
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
|
|
||||||
|
@ -25,8 +28,8 @@ from ietf.person.factories import PersonFactory
|
||||||
from ietf.person.models import Person, Email
|
from ietf.person.models import Person, Email
|
||||||
from ietf.meeting.models import Meeting, MeetingTypeName
|
from ietf.meeting.models import Meeting, MeetingTypeName
|
||||||
from ietf.iesg.models import TelechatDate
|
from ietf.iesg.models import TelechatDate
|
||||||
from ietf.utils.test_utils import login_testing_unauthorized, unicontent
|
from ietf.utils.test_utils import login_testing_unauthorized
|
||||||
from ietf.utils.mail import outbox, empty_outbox
|
from ietf.utils.mail import outbox, empty_outbox, get_payload
|
||||||
from ietf.utils.test_utils import TestCase
|
from ietf.utils.test_utils import TestCase
|
||||||
|
|
||||||
|
|
||||||
|
@ -40,7 +43,7 @@ class ChangeStateTests(TestCase):
|
||||||
|
|
||||||
url = urlreverse('ietf.doc.views_draft.change_state', kwargs=dict(name=draft.name))
|
url = urlreverse('ietf.doc.views_draft.change_state', kwargs=dict(name=draft.name))
|
||||||
login_testing_unauthorized(self, "ad", url)
|
login_testing_unauthorized(self, "ad", url)
|
||||||
|
|
||||||
# normal get
|
# normal get
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
@ -210,7 +213,7 @@ class ChangeStateTests(TestCase):
|
||||||
self.assertTrue(not draft.latest_event(type="changed_ballot_writeup_text"))
|
self.assertTrue(not draft.latest_event(type="changed_ballot_writeup_text"))
|
||||||
r = self.client.post(url, dict(state=State.objects.get(used=True, type="draft-iesg", slug="lc-req").pk))
|
r = self.client.post(url, dict(state=State.objects.get(used=True, type="draft-iesg", slug="lc-req").pk))
|
||||||
self.assertEqual(r.status_code,200)
|
self.assertEqual(r.status_code,200)
|
||||||
self.assertTrue("Your request to issue" in unicontent(r))
|
self.assertContains(r, "Your request to issue")
|
||||||
|
|
||||||
# last call text
|
# last call text
|
||||||
e = draft.latest_event(WriteupDocEvent, type="changed_last_call_text")
|
e = draft.latest_event(WriteupDocEvent, type="changed_last_call_text")
|
||||||
|
@ -372,7 +375,7 @@ class EditInfoTests(TestCase):
|
||||||
data["telechat_date"] = next_week.isoformat()
|
data["telechat_date"] = next_week.isoformat()
|
||||||
r = self.client.post(url,data)
|
r = self.client.post(url,data)
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.assertTrue("may not leave enough time" in outbox[-1].get_payload())
|
self.assertIn("may not leave enough time", get_payload(outbox[-1]))
|
||||||
|
|
||||||
def test_start_iesg_process_on_draft(self):
|
def test_start_iesg_process_on_draft(self):
|
||||||
|
|
||||||
|
@ -458,7 +461,7 @@ class EditInfoTests(TestCase):
|
||||||
|
|
||||||
# reset
|
# reset
|
||||||
e = DocEvent(doc=draft, rev=draft.rev, by=Person.objects.get(name="(System)"), type='changed_document')
|
e = DocEvent(doc=draft, rev=draft.rev, by=Person.objects.get(name="(System)"), type='changed_document')
|
||||||
e.desc = u"Intended Status changed to <b>%s</b> from %s"% (draft.intended_std_level_id, 'bcp')
|
e.desc = "Intended Status changed to <b>%s</b> from %s"% (draft.intended_std_level_id, 'bcp')
|
||||||
e.save()
|
e.save()
|
||||||
|
|
||||||
draft.intended_std_level_id = 'bcp'
|
draft.intended_std_level_id = 'bcp'
|
||||||
|
@ -467,7 +470,7 @@ class EditInfoTests(TestCase):
|
||||||
self.assertEqual(r.status_code, 403) # BCPs must have a consensus
|
self.assertEqual(r.status_code, 403) # BCPs must have a consensus
|
||||||
|
|
||||||
e = DocEvent(doc=draft, rev=draft.rev, by=Person.objects.get(name="(System)"), type='changed_document')
|
e = DocEvent(doc=draft, rev=draft.rev, by=Person.objects.get(name="(System)"), type='changed_document')
|
||||||
e.desc = u"Intended Status changed to <b>%s</b> from %s"% (draft.intended_std_level_id, 'inf')
|
e.desc = "Intended Status changed to <b>%s</b> from %s"% (draft.intended_std_level_id, 'inf')
|
||||||
e.save()
|
e.save()
|
||||||
|
|
||||||
draft.intended_std_level_id = 'inf'
|
draft.intended_std_level_id = 'inf'
|
||||||
|
@ -563,7 +566,7 @@ class ExpireIDsTests(TestCase):
|
||||||
settings.INTERNET_DRAFT_ARCHIVE_DIR = self.saved_archive_dir
|
settings.INTERNET_DRAFT_ARCHIVE_DIR = self.saved_archive_dir
|
||||||
|
|
||||||
def write_draft_file(self, name, size):
|
def write_draft_file(self, name, size):
|
||||||
f = open(os.path.join(self.id_dir, name), 'w')
|
f = io.open(os.path.join(self.id_dir, name), 'w')
|
||||||
f.write("a" * size)
|
f.write("a" * size)
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
|
@ -774,7 +777,7 @@ class ExpireLastCallTests(TestCase):
|
||||||
class IndividualInfoFormsTests(TestCase):
|
class IndividualInfoFormsTests(TestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
doc = WgDraftFactory(group__acronym='mars',shepherd=PersonFactory(user__username='plain',name=u'Plain Man').email_set.first())
|
doc = WgDraftFactory(group__acronym='mars',shepherd=PersonFactory(user__username='plain',name='Plain Man').email_set.first())
|
||||||
self.docname = doc.name
|
self.docname = doc.name
|
||||||
|
|
||||||
def test_doc_change_stream(self):
|
def test_doc_change_stream(self):
|
||||||
|
@ -1056,7 +1059,7 @@ class IndividualInfoFormsTests(TestCase):
|
||||||
self.assertTrue(doc.latest_event(WriteupDocEvent,type="changed_protocol_writeup").text.startswith('here is a new writeup'))
|
self.assertTrue(doc.latest_event(WriteupDocEvent,type="changed_protocol_writeup").text.startswith('here is a new writeup'))
|
||||||
|
|
||||||
# file upload
|
# file upload
|
||||||
test_file = StringIO.StringIO("This is a different writeup.")
|
test_file = io.StringIO("This is a different writeup.")
|
||||||
test_file.name = "unnamed"
|
test_file.name = "unnamed"
|
||||||
r = self.client.post(url,dict(txt=test_file,submit_response="1"))
|
r = self.client.post(url,dict(txt=test_file,submit_response="1"))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
|
@ -1362,7 +1365,7 @@ class AdoptDraftTests(TestCase):
|
||||||
|
|
||||||
class ChangeStreamStateTests(TestCase):
|
class ChangeStreamStateTests(TestCase):
|
||||||
def test_set_tags(self):
|
def test_set_tags(self):
|
||||||
role = RoleFactory(name_id='chair',group__acronym='mars',group__list_email='mars-wg@ietf.org',person__user__username='marschairman',person__name=u'WG Cháir Man')
|
role = RoleFactory(name_id='chair',group__acronym='mars',group__list_email='mars-wg@ietf.org',person__user__username='marschairman',person__name='WG Cháir Man')
|
||||||
RoleFactory(name_id='delegate',group=role.group,person__user__email='marsdelegate@example.org')
|
RoleFactory(name_id='delegate',group=role.group,person__user__email='marsdelegate@example.org')
|
||||||
draft = WgDraftFactory(group=role.group,shepherd=PersonFactory(user__username='plain',user__email='plain@example.com').email_set.first())
|
draft = WgDraftFactory(group=role.group,shepherd=PersonFactory(user__username='plain',user__email='plain@example.com').email_set.first())
|
||||||
draft.tags.set(DocTagName.objects.filter(slug="w-expert"))
|
draft.tags.set(DocTagName.objects.filter(slug="w-expert"))
|
||||||
|
@ -1399,12 +1402,12 @@ class ChangeStreamStateTests(TestCase):
|
||||||
self.assertEqual(draft.docevent_set.count() - events_before, 2)
|
self.assertEqual(draft.docevent_set.count() - events_before, 2)
|
||||||
self.assertEqual(len(outbox), mailbox_before + 1)
|
self.assertEqual(len(outbox), mailbox_before + 1)
|
||||||
self.assertTrue("tags changed" in outbox[-1]["Subject"].lower())
|
self.assertTrue("tags changed" in outbox[-1]["Subject"].lower())
|
||||||
self.assertTrue("mars-chairs@ietf.org" in unicode(outbox[-1]))
|
self.assertTrue("mars-chairs@ietf.org" in outbox[-1].as_string())
|
||||||
self.assertTrue("marsdelegate@example.org" in unicode(outbox[-1]))
|
self.assertTrue("marsdelegate@example.org" in outbox[-1].as_string())
|
||||||
self.assertTrue("plain@example.com" in unicode(outbox[-1]))
|
self.assertTrue("plain@example.com" in outbox[-1].as_string())
|
||||||
|
|
||||||
def test_set_initial_state(self):
|
def test_set_initial_state(self):
|
||||||
role = RoleFactory(name_id='chair',group__acronym='mars',group__list_email='mars-wg@ietf.org',person__user__username='marschairman',person__name=u'WG Cháir Man')
|
role = RoleFactory(name_id='chair',group__acronym='mars',group__list_email='mars-wg@ietf.org',person__user__username='marschairman',person__name='WG Cháir Man')
|
||||||
RoleFactory(name_id='delegate',group=role.group,person__user__email='marsdelegate@ietf.org')
|
RoleFactory(name_id='delegate',group=role.group,person__user__email='marsdelegate@ietf.org')
|
||||||
draft = WgDraftFactory(group=role.group)
|
draft = WgDraftFactory(group=role.group)
|
||||||
draft.states.all().delete()
|
draft.states.all().delete()
|
||||||
|
@ -1436,11 +1439,11 @@ class ChangeStreamStateTests(TestCase):
|
||||||
self.assertTrue(due - datetime.timedelta(days=1) <= reminder[0].due <= due + datetime.timedelta(days=1))
|
self.assertTrue(due - datetime.timedelta(days=1) <= reminder[0].due <= due + datetime.timedelta(days=1))
|
||||||
self.assertEqual(len(outbox), 1)
|
self.assertEqual(len(outbox), 1)
|
||||||
self.assertTrue("state changed" in outbox[0]["Subject"].lower())
|
self.assertTrue("state changed" in outbox[0]["Subject"].lower())
|
||||||
self.assertTrue("mars-chairs@ietf.org" in unicode(outbox[0]))
|
self.assertTrue("mars-chairs@ietf.org" in outbox[0].as_string())
|
||||||
self.assertTrue("marsdelegate@ietf.org" in unicode(outbox[0]))
|
self.assertTrue("marsdelegate@ietf.org" in outbox[0].as_string())
|
||||||
|
|
||||||
def test_set_state(self):
|
def test_set_state(self):
|
||||||
role = RoleFactory(name_id='chair',group__acronym='mars',group__list_email='mars-wg@ietf.org',person__user__username='marschairman',person__name=u'WG Cháir Man')
|
role = RoleFactory(name_id='chair',group__acronym='mars',group__list_email='mars-wg@ietf.org',person__user__username='marschairman',person__name='WG Cháir Man')
|
||||||
RoleFactory(name_id='delegate',group=role.group,person__user__email='marsdelegate@ietf.org')
|
RoleFactory(name_id='delegate',group=role.group,person__user__email='marsdelegate@ietf.org')
|
||||||
draft = WgDraftFactory(group=role.group)
|
draft = WgDraftFactory(group=role.group)
|
||||||
|
|
||||||
|
@ -1481,11 +1484,11 @@ class ChangeStreamStateTests(TestCase):
|
||||||
self.assertTrue(due - datetime.timedelta(days=1) <= reminder[0].due <= due + datetime.timedelta(days=1))
|
self.assertTrue(due - datetime.timedelta(days=1) <= reminder[0].due <= due + datetime.timedelta(days=1))
|
||||||
self.assertEqual(len(outbox), 1)
|
self.assertEqual(len(outbox), 1)
|
||||||
self.assertTrue("state changed" in outbox[0]["Subject"].lower())
|
self.assertTrue("state changed" in outbox[0]["Subject"].lower())
|
||||||
self.assertTrue("mars-chairs@ietf.org" in unicode(outbox[0]))
|
self.assertTrue("mars-chairs@ietf.org" in outbox[0].as_string())
|
||||||
self.assertTrue("marsdelegate@ietf.org" in unicode(outbox[0]))
|
self.assertTrue("marsdelegate@ietf.org" in outbox[0].as_string())
|
||||||
|
|
||||||
def test_pubreq_validation(self):
|
def test_pubreq_validation(self):
|
||||||
role = RoleFactory(name_id='chair',group__acronym='mars',group__list_email='mars-wg@ietf.org',person__user__username='marschairman',person__name=u'WG Cháir Man')
|
role = RoleFactory(name_id='chair',group__acronym='mars',group__list_email='mars-wg@ietf.org',person__user__username='marschairman',person__name='WG Cháir Man')
|
||||||
RoleFactory(name_id='delegate',group=role.group,person__user__email='marsdelegate@ietf.org')
|
RoleFactory(name_id='delegate',group=role.group,person__user__email='marsdelegate@ietf.org')
|
||||||
draft = WgDraftFactory(group=role.group)
|
draft = WgDraftFactory(group=role.group)
|
||||||
|
|
||||||
|
@ -1509,7 +1512,7 @@ class ChangeStreamStateTests(TestCase):
|
||||||
class ChangeReplacesTests(TestCase):
|
class ChangeReplacesTests(TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
|
||||||
role = RoleFactory(name_id='chair',group__acronym='mars',group__list_email='mars-wg@ietf.org',person__user__username='marschairman',person__name=u'WG Cháir Man')
|
role = RoleFactory(name_id='chair',group__acronym='mars',group__list_email='mars-wg@ietf.org',person__user__username='marschairman',person__name='WG Cháir Man')
|
||||||
RoleFactory(name_id='delegate',group=role.group,person__user__email='marsdelegate@ietf.org')
|
RoleFactory(name_id='delegate',group=role.group,person__user__email='marsdelegate@ietf.org')
|
||||||
#draft = WgDraftFactory(group=role.group)
|
#draft = WgDraftFactory(group=role.group)
|
||||||
|
|
||||||
|
@ -1520,7 +1523,7 @@ class ChangeReplacesTests(TestCase):
|
||||||
title="Base A",
|
title="Base A",
|
||||||
group=mars_wg,
|
group=mars_wg,
|
||||||
)
|
)
|
||||||
p = PersonFactory(name=u"basea_author")
|
p = PersonFactory(name="basea_author")
|
||||||
e = Email.objects.create(address="basea_author@example.com", person=p, origin=p.user.username)
|
e = Email.objects.create(address="basea_author@example.com", person=p, origin=p.user.username)
|
||||||
self.basea.documentauthor_set.create(person=p, email=e, order=1)
|
self.basea.documentauthor_set.create(person=p, email=e, order=1)
|
||||||
|
|
||||||
|
@ -1530,7 +1533,7 @@ class ChangeReplacesTests(TestCase):
|
||||||
group=mars_wg,
|
group=mars_wg,
|
||||||
expires = datetime.datetime.now() - datetime.timedelta(days = 365 - settings.INTERNET_DRAFT_DAYS_TO_EXPIRE),
|
expires = datetime.datetime.now() - datetime.timedelta(days = 365 - settings.INTERNET_DRAFT_DAYS_TO_EXPIRE),
|
||||||
)
|
)
|
||||||
p = PersonFactory(name=u"baseb_author")
|
p = PersonFactory(name="baseb_author")
|
||||||
e = Email.objects.create(address="baseb_author@example.com", person=p, origin=p.user.username)
|
e = Email.objects.create(address="baseb_author@example.com", person=p, origin=p.user.username)
|
||||||
self.baseb.documentauthor_set.create(person=p, email=e, order=1)
|
self.baseb.documentauthor_set.create(person=p, email=e, order=1)
|
||||||
|
|
||||||
|
@ -1539,7 +1542,7 @@ class ChangeReplacesTests(TestCase):
|
||||||
title="Replace Base A",
|
title="Replace Base A",
|
||||||
group=mars_wg,
|
group=mars_wg,
|
||||||
)
|
)
|
||||||
p = PersonFactory(name=u"replacea_author")
|
p = PersonFactory(name="replacea_author")
|
||||||
e = Email.objects.create(address="replacea_author@example.com", person=p, origin=p.user.username)
|
e = Email.objects.create(address="replacea_author@example.com", person=p, origin=p.user.username)
|
||||||
self.replacea.documentauthor_set.create(person=p, email=e, order=1)
|
self.replacea.documentauthor_set.create(person=p, email=e, order=1)
|
||||||
|
|
||||||
|
@ -1548,7 +1551,7 @@ class ChangeReplacesTests(TestCase):
|
||||||
title="Replace Base A and Base B",
|
title="Replace Base A and Base B",
|
||||||
group=mars_wg,
|
group=mars_wg,
|
||||||
)
|
)
|
||||||
p = PersonFactory(name=u"replaceboth_author")
|
p = PersonFactory(name="replaceboth_author")
|
||||||
e = Email.objects.create(address="replaceboth_author@example.com", person=p, origin=p.user.username)
|
e = Email.objects.create(address="replaceboth_author@example.com", person=p, origin=p.user.username)
|
||||||
self.replaceboth.documentauthor_set.create(person=p, email=e, order=1)
|
self.replaceboth.documentauthor_set.create(person=p, email=e, order=1)
|
||||||
|
|
||||||
|
@ -1627,15 +1630,15 @@ class ChangeReplacesTests(TestCase):
|
||||||
login_testing_unauthorized(self, "secretary", url)
|
login_testing_unauthorized(self, "secretary", url)
|
||||||
|
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEquals(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEquals(len(q('form[name=review-suggested-replaces]')), 1)
|
self.assertEqual(len(q('form[name=review-suggested-replaces]')), 1)
|
||||||
|
|
||||||
r = self.client.post(url, dict(replaces=[replaced.pk]))
|
r = self.client.post(url, dict(replaces=[replaced.pk]))
|
||||||
self.assertEquals(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
self.assertTrue(not self.replacea.related_that_doc("possibly-replaces"))
|
self.assertTrue(not self.replacea.related_that_doc("possibly-replaces"))
|
||||||
self.assertEqual(len(self.replacea.related_that_doc("replaces")), 1)
|
self.assertEqual(len(self.replacea.related_that_doc("replaces")), 1)
|
||||||
self.assertEquals(Document.objects.get(pk=self.basea.pk).get_state().slug, 'repl')
|
self.assertEqual(Document.objects.get(pk=self.basea.pk).get_state().slug, 'repl')
|
||||||
|
|
||||||
class MoreReplacesTests(TestCase):
|
class MoreReplacesTests(TestCase):
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,14 @@
|
||||||
# Copyright The IETF Trust 2011-2019, All Rights Reserved
|
# Copyright The IETF Trust 2014-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import datetime
|
import datetime
|
||||||
from StringIO import StringIO
|
import io
|
||||||
|
|
||||||
from pyquery import PyQuery
|
from pyquery import PyQuery
|
||||||
|
|
||||||
import debug # pyflakes:ignore
|
import debug # pyflakes:ignore
|
||||||
|
@ -19,7 +23,7 @@ from ietf.meeting.factories import MeetingFactory
|
||||||
from ietf.meeting.models import Meeting, Session, SessionPresentation
|
from ietf.meeting.models import Meeting, Session, SessionPresentation
|
||||||
from ietf.name.models import SessionStatusName
|
from ietf.name.models import SessionStatusName
|
||||||
from ietf.person.models import Person
|
from ietf.person.models import Person
|
||||||
from ietf.utils.test_utils import TestCase, login_testing_unauthorized, unicontent
|
from ietf.utils.test_utils import TestCase, login_testing_unauthorized
|
||||||
|
|
||||||
|
|
||||||
class GroupMaterialTests(TestCase):
|
class GroupMaterialTests(TestCase):
|
||||||
|
@ -68,7 +72,7 @@ class GroupMaterialTests(TestCase):
|
||||||
# normal get
|
# normal get
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("Slides" in unicontent(r))
|
self.assertContains(r, "Slides")
|
||||||
|
|
||||||
url = urlreverse('ietf.doc.views_material.choose_material_type', kwargs=dict(acronym='mars'))
|
url = urlreverse('ietf.doc.views_material.choose_material_type', kwargs=dict(acronym='mars'))
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
|
@ -85,7 +89,7 @@ class GroupMaterialTests(TestCase):
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
content = "%PDF-1.5\n..."
|
content = "%PDF-1.5\n..."
|
||||||
test_file = StringIO(content)
|
test_file = io.StringIO(content)
|
||||||
test_file.name = "unnamed.pdf"
|
test_file.name = "unnamed.pdf"
|
||||||
|
|
||||||
# faulty post
|
# faulty post
|
||||||
|
@ -110,7 +114,7 @@ class GroupMaterialTests(TestCase):
|
||||||
self.assertEqual(doc.title, "Test File - with fancy title")
|
self.assertEqual(doc.title, "Test File - with fancy title")
|
||||||
self.assertEqual(doc.get_state_slug(), "active")
|
self.assertEqual(doc.get_state_slug(), "active")
|
||||||
|
|
||||||
with open(os.path.join(self.materials_dir, "slides", doc.name + "-" + doc.rev + ".pdf")) as f:
|
with io.open(os.path.join(self.materials_dir, "slides", doc.name + "-" + doc.rev + ".pdf")) as f:
|
||||||
self.assertEqual(f.read(), content)
|
self.assertEqual(f.read(), content)
|
||||||
|
|
||||||
# check that posting same name is prevented
|
# check that posting same name is prevented
|
||||||
|
@ -165,7 +169,7 @@ class GroupMaterialTests(TestCase):
|
||||||
login_testing_unauthorized(self, "secretary", url)
|
login_testing_unauthorized(self, "secretary", url)
|
||||||
|
|
||||||
content = "some text"
|
content = "some text"
|
||||||
test_file = StringIO(content)
|
test_file = io.StringIO(content)
|
||||||
test_file.name = "unnamed.txt"
|
test_file.name = "unnamed.txt"
|
||||||
|
|
||||||
# post
|
# post
|
||||||
|
@ -179,6 +183,6 @@ class GroupMaterialTests(TestCase):
|
||||||
self.assertEqual(doc.title, "New title")
|
self.assertEqual(doc.title, "New title")
|
||||||
self.assertEqual(doc.get_state_slug(), "active")
|
self.assertEqual(doc.get_state_slug(), "active")
|
||||||
|
|
||||||
with open(os.path.join(doc.get_file_path(), doc.name + "-" + doc.rev + ".txt")) as f:
|
with io.open(os.path.join(doc.get_file_path(), doc.name + "-" + doc.rev + ".txt")) as f:
|
||||||
self.assertEqual(f.read(), content)
|
self.assertEqual(f.read(), content)
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,14 @@
|
||||||
# Copyright The IETF Trust 2016-2019, All Rights Reserved
|
# Copyright The IETF Trust 2016-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import datetime, os, shutil, json
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
|
import datetime, os, shutil
|
||||||
|
import io
|
||||||
import tarfile, tempfile, mailbox
|
import tarfile, tempfile, mailbox
|
||||||
import email.mime.multipart, email.mime.text, email.utils
|
import email.mime.multipart, email.mime.text, email.utils
|
||||||
|
|
||||||
from StringIO import StringIO
|
|
||||||
from mock import patch
|
from mock import patch
|
||||||
from requests import Response
|
from requests import Response
|
||||||
|
|
||||||
|
@ -33,7 +36,7 @@ from ietf.review.utils import reviewer_rotation_list, possibly_advance_next_revi
|
||||||
|
|
||||||
from ietf.utils.test_utils import TestCase
|
from ietf.utils.test_utils import TestCase
|
||||||
from ietf.utils.test_data import create_person
|
from ietf.utils.test_data import create_person
|
||||||
from ietf.utils.test_utils import login_testing_unauthorized, unicontent, reload_db_objects
|
from ietf.utils.test_utils import login_testing_unauthorized, reload_db_objects
|
||||||
from ietf.utils.mail import outbox, empty_outbox, parseaddr, on_behalf_of
|
from ietf.utils.mail import outbox, empty_outbox, parseaddr, on_behalf_of
|
||||||
from ietf.person.factories import PersonFactory
|
from ietf.person.factories import PersonFactory
|
||||||
|
|
||||||
|
@ -100,6 +103,8 @@ class ReviewTests(TestCase):
|
||||||
self.assertTrue('reviewteam Early' in outbox[0]['Subject'])
|
self.assertTrue('reviewteam Early' in outbox[0]['Subject'])
|
||||||
self.assertTrue('reviewsecretary@' in outbox[0]['To'])
|
self.assertTrue('reviewsecretary@' in outbox[0]['To'])
|
||||||
self.assertTrue('reviewteam3 Early' in outbox[1]['Subject'])
|
self.assertTrue('reviewteam3 Early' in outbox[1]['Subject'])
|
||||||
|
if not 'reviewsecretary3@' in outbox[1]['To']:
|
||||||
|
print(outbox[1].as_string())
|
||||||
self.assertTrue('reviewsecretary3@' in outbox[1]['To'])
|
self.assertTrue('reviewsecretary3@' in outbox[1]['To'])
|
||||||
|
|
||||||
# set the reviewteamsetting for the secretary email alias, then do the post again
|
# set the reviewteamsetting for the secretary email alias, then do the post again
|
||||||
|
@ -152,9 +157,7 @@ class ReviewTests(TestCase):
|
||||||
|
|
||||||
url = urlreverse('ietf.doc.views_doc.document_main', kwargs={ "name": doc.name })
|
url = urlreverse('ietf.doc.views_doc.document_main', kwargs={ "name": doc.name })
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertContains(r, "{} Review".format(review_req.type.name))
|
||||||
content = unicontent(r)
|
|
||||||
self.assertTrue("{} Review".format(review_req.type.name) in content)
|
|
||||||
|
|
||||||
def test_review_request(self):
|
def test_review_request(self):
|
||||||
doc = WgDraftFactory(group__acronym='mars',rev='01')
|
doc = WgDraftFactory(group__acronym='mars',rev='01')
|
||||||
|
@ -166,9 +169,8 @@ class ReviewTests(TestCase):
|
||||||
url = urlreverse('ietf.doc.views_review.review_request', kwargs={ "name": doc.name, "request_id": review_req.pk })
|
url = urlreverse('ietf.doc.views_review.review_request', kwargs={ "name": doc.name, "request_id": review_req.pk })
|
||||||
|
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertContains(r, review_req.team.acronym)
|
||||||
self.assertIn(review_req.team.acronym, unicontent(r))
|
self.assertContains(r, review_req.team.name)
|
||||||
self.assertIn(review_req.team.name, unicontent(r))
|
|
||||||
|
|
||||||
url = urlreverse('ietf.doc.views_review.review_request_forced_login', kwargs={ "name": doc.name, "request_id": review_req.pk })
|
url = urlreverse('ietf.doc.views_review.review_request_forced_login', kwargs={ "name": doc.name, "request_id": review_req.pk })
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
|
@ -193,7 +195,7 @@ class ReviewTests(TestCase):
|
||||||
self.client.login(username="reviewsecretary", password="reviewsecretary+password")
|
self.client.login(username="reviewsecretary", password="reviewsecretary+password")
|
||||||
r = self.client.get(req_url)
|
r = self.client.get(req_url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(close_url in unicontent(r))
|
self.assertContains(r, close_url)
|
||||||
self.client.logout()
|
self.client.logout()
|
||||||
|
|
||||||
# get close page
|
# get close page
|
||||||
|
@ -311,8 +313,8 @@ class ReviewTests(TestCase):
|
||||||
def test_assign_reviewer(self):
|
def test_assign_reviewer(self):
|
||||||
doc = WgDraftFactory(pages=2)
|
doc = WgDraftFactory(pages=2)
|
||||||
review_team = ReviewTeamFactory(acronym="reviewteam", name="Review Team", type_id="review", list_email="reviewteam@ietf.org", parent=Group.objects.get(acronym="farfut"))
|
review_team = ReviewTeamFactory(acronym="reviewteam", name="Review Team", type_id="review", list_email="reviewteam@ietf.org", parent=Group.objects.get(acronym="farfut"))
|
||||||
rev_role = RoleFactory(group=review_team,person__user__username='reviewer',person__user__email='reviewer@example.com',person__name=u'Some Reviewer',name_id='reviewer')
|
rev_role = RoleFactory(group=review_team,person__user__username='reviewer',person__user__email='reviewer@example.com',person__name='Some Reviewer',name_id='reviewer')
|
||||||
RoleFactory(group=review_team,person__user__username='marschairman',person__name=u'WG Cháir Man',name_id='reviewer')
|
RoleFactory(group=review_team,person__user__username='marschairman',person__name='WG Cháir Man',name_id='reviewer')
|
||||||
RoleFactory(group=review_team,person__user__username='reviewsecretary',person__user__email='reviewsecretary@example.com',name_id='secr')
|
RoleFactory(group=review_team,person__user__username='reviewsecretary',person__user__email='reviewsecretary@example.com',name_id='secr')
|
||||||
ReviewerSettings.objects.create(team=review_team, person=rev_role.person, min_interval=14, skip_next=0)
|
ReviewerSettings.objects.create(team=review_team, person=rev_role.person, min_interval=14, skip_next=0)
|
||||||
|
|
||||||
|
@ -353,7 +355,7 @@ class ReviewTests(TestCase):
|
||||||
reviewer_settings.save()
|
reviewer_settings.save()
|
||||||
|
|
||||||
# Need one more person in review team one so we can test incrementing skip_count without immediately decrementing it
|
# Need one more person in review team one so we can test incrementing skip_count without immediately decrementing it
|
||||||
another_reviewer = PersonFactory.create(name = u"Extra TestReviewer") # needs to be lexically greater than the existing one
|
another_reviewer = PersonFactory.create(name = "Extra TestReviewer") # needs to be lexically greater than the existing one
|
||||||
another_reviewer.role_set.create(name_id='reviewer', email=another_reviewer.email(), group=review_req.team)
|
another_reviewer.role_set.create(name_id='reviewer', email=another_reviewer.email(), group=review_req.team)
|
||||||
|
|
||||||
UnavailablePeriod.objects.create(
|
UnavailablePeriod.objects.create(
|
||||||
|
@ -381,7 +383,7 @@ class ReviewTests(TestCase):
|
||||||
self.client.login(username="reviewsecretary", password="reviewsecretary+password")
|
self.client.login(username="reviewsecretary", password="reviewsecretary+password")
|
||||||
r = self.client.get(req_url)
|
r = self.client.get(req_url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(assign_url in unicontent(r))
|
self.assertContains(r, assign_url)
|
||||||
self.client.logout()
|
self.client.logout()
|
||||||
|
|
||||||
# get assign page
|
# get assign page
|
||||||
|
@ -455,14 +457,14 @@ class ReviewTests(TestCase):
|
||||||
self.client.login(username="reviewsecretary", password="reviewsecretary+password")
|
self.client.login(username="reviewsecretary", password="reviewsecretary+password")
|
||||||
r = self.client.get(req_url)
|
r = self.client.get(req_url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(reject_url in unicontent(r))
|
self.assertContains(r, reject_url)
|
||||||
self.client.logout()
|
self.client.logout()
|
||||||
|
|
||||||
# get reject page
|
# get reject page
|
||||||
login_testing_unauthorized(self, "reviewsecretary", reject_url)
|
login_testing_unauthorized(self, "reviewsecretary", reject_url)
|
||||||
r = self.client.get(reject_url)
|
r = self.client.get(reject_url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue(unicode(assignment.reviewer.person) in unicontent(r))
|
self.assertContains(r, str(assignment.reviewer.person))
|
||||||
|
|
||||||
# reject
|
# reject
|
||||||
empty_outbox()
|
empty_outbox()
|
||||||
|
@ -539,7 +541,7 @@ class ReviewTests(TestCase):
|
||||||
|
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
messages = json.loads(r.content)["messages"]
|
messages = r.json()["messages"]
|
||||||
self.assertEqual(len(messages), 2)
|
self.assertEqual(len(messages), 2)
|
||||||
|
|
||||||
today = datetime.date.today()
|
today = datetime.date.today()
|
||||||
|
@ -560,7 +562,7 @@ class ReviewTests(TestCase):
|
||||||
|
|
||||||
# Test failure to return mailarch results
|
# Test failure to return mailarch results
|
||||||
no_result_path = os.path.join(self.review_dir, "mailarch_no_result.html")
|
no_result_path = os.path.join(self.review_dir, "mailarch_no_result.html")
|
||||||
with open(no_result_path, "w") as f:
|
with io.open(no_result_path, "w") as f:
|
||||||
f.write('Content-Type: text/html\n\n<html><body><div class="xtr"><div class="xtd no-results">No results found</div></div>')
|
f.write('Content-Type: text/html\n\n<html><body><div class="xtr"><div class="xtd no-results">No results found</div></div>')
|
||||||
ietf.review.mailarch.construct_query_urls = lambda review_req, query=None: { "query_data_url": "file://" + os.path.abspath(no_result_path) }
|
ietf.review.mailarch.construct_query_urls = lambda review_req, query=None: { "query_data_url": "file://" + os.path.abspath(no_result_path) }
|
||||||
|
|
||||||
|
@ -568,7 +570,7 @@ class ReviewTests(TestCase):
|
||||||
|
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
result = json.loads(r.content)
|
result = r.json()
|
||||||
self.assertNotIn('messages', result)
|
self.assertNotIn('messages', result)
|
||||||
self.assertIn('No results found', result['error'])
|
self.assertIn('No results found', result['error'])
|
||||||
|
|
||||||
|
@ -617,7 +619,7 @@ class ReviewTests(TestCase):
|
||||||
# complete by uploading file
|
# complete by uploading file
|
||||||
empty_outbox()
|
empty_outbox()
|
||||||
|
|
||||||
test_file = StringIO("This is a review\nwith two lines")
|
test_file = io.StringIO("This is a review\nwith two lines")
|
||||||
test_file.name = "unnamed"
|
test_file.name = "unnamed"
|
||||||
|
|
||||||
r = self.client.post(url, data={
|
r = self.client.post(url, data={
|
||||||
|
@ -638,7 +640,7 @@ class ReviewTests(TestCase):
|
||||||
self.assertTrue(assignment.review_request.team.acronym.lower() in assignment.review.name)
|
self.assertTrue(assignment.review_request.team.acronym.lower() in assignment.review.name)
|
||||||
self.assertTrue(assignment.review_request.doc.rev in assignment.review.name)
|
self.assertTrue(assignment.review_request.doc.rev in assignment.review.name)
|
||||||
|
|
||||||
with open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f:
|
with io.open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f:
|
||||||
self.assertEqual(f.read(), "This is a review\nwith two lines")
|
self.assertEqual(f.read(), "This is a review\nwith two lines")
|
||||||
|
|
||||||
self.assertEqual(len(outbox), 1)
|
self.assertEqual(len(outbox), 1)
|
||||||
|
@ -662,10 +664,8 @@ class ReviewTests(TestCase):
|
||||||
# check the review document page
|
# check the review document page
|
||||||
url = urlreverse('ietf.doc.views_doc.document_main', kwargs={ "name": assignment.review.name })
|
url = urlreverse('ietf.doc.views_doc.document_main', kwargs={ "name": assignment.review.name })
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertContains(r, "{} Review".format(assignment.review_request.type.name))
|
||||||
content = unicontent(r)
|
self.assertContains(r, "This is a review")
|
||||||
self.assertIn("{} Review".format(assignment.review_request.type.name), content)
|
|
||||||
self.assertIn("This is a review", content)
|
|
||||||
|
|
||||||
|
|
||||||
def test_complete_review_enter_content(self):
|
def test_complete_review_enter_content(self):
|
||||||
|
@ -690,7 +690,7 @@ class ReviewTests(TestCase):
|
||||||
self.assertEqual(assignment.state_id, "completed")
|
self.assertEqual(assignment.state_id, "completed")
|
||||||
self.assertNotEqual(assignment.completed_on, None)
|
self.assertNotEqual(assignment.completed_on, None)
|
||||||
|
|
||||||
with open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f:
|
with io.open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f:
|
||||||
self.assertEqual(f.read(), "This is a review\nwith two lines")
|
self.assertEqual(f.read(), "This is a review\nwith two lines")
|
||||||
|
|
||||||
self.assertEqual(len(outbox), 1)
|
self.assertEqual(len(outbox), 1)
|
||||||
|
@ -753,7 +753,7 @@ class ReviewTests(TestCase):
|
||||||
# Mock up the url response for the request.get() call to retrieve the mailing list url
|
# Mock up the url response for the request.get() call to retrieve the mailing list url
|
||||||
response = Response()
|
response = Response()
|
||||||
response.status_code = 200
|
response.status_code = 200
|
||||||
response._content = "This is a review\nwith two lines"
|
response._content = b"This is a review\nwith two lines"
|
||||||
mock.return_value = response
|
mock.return_value = response
|
||||||
|
|
||||||
# Run the test
|
# Run the test
|
||||||
|
@ -768,7 +768,7 @@ class ReviewTests(TestCase):
|
||||||
"state": ReviewAssignmentStateName.objects.get(slug="completed").pk,
|
"state": ReviewAssignmentStateName.objects.get(slug="completed").pk,
|
||||||
"reviewed_rev": assignment.review_request.doc.rev,
|
"reviewed_rev": assignment.review_request.doc.rev,
|
||||||
"review_submission": "link",
|
"review_submission": "link",
|
||||||
"review_content": response.content,
|
"review_content": response.content.decode(),
|
||||||
"review_url": "http://example.com/testreview/",
|
"review_url": "http://example.com/testreview/",
|
||||||
"review_file": "",
|
"review_file": "",
|
||||||
})
|
})
|
||||||
|
@ -777,7 +777,7 @@ class ReviewTests(TestCase):
|
||||||
assignment = reload_db_objects(assignment)
|
assignment = reload_db_objects(assignment)
|
||||||
self.assertEqual(assignment.state_id, "completed")
|
self.assertEqual(assignment.state_id, "completed")
|
||||||
|
|
||||||
with open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f:
|
with io.open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f:
|
||||||
self.assertEqual(f.read(), "This is a review\nwith two lines")
|
self.assertEqual(f.read(), "This is a review\nwith two lines")
|
||||||
|
|
||||||
self.assertEqual(len(outbox), 0)
|
self.assertEqual(len(outbox), 0)
|
||||||
|
@ -877,7 +877,7 @@ class ReviewTests(TestCase):
|
||||||
event = ReviewAssignmentDocEvent.objects.get(type="closed_review_assignment", review_assignment=assignment)
|
event = ReviewAssignmentDocEvent.objects.get(type="closed_review_assignment", review_assignment=assignment)
|
||||||
self.assertEqual(event.time, datetime.datetime(2012, 12, 24, 12, 13, 14))
|
self.assertEqual(event.time, datetime.datetime(2012, 12, 24, 12, 13, 14))
|
||||||
|
|
||||||
with open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f:
|
with io.open(os.path.join(self.review_subdir, assignment.review.name + ".txt")) as f:
|
||||||
self.assertEqual(f.read(), "This is a review\nwith two lines")
|
self.assertEqual(f.read(), "This is a review\nwith two lines")
|
||||||
|
|
||||||
self.assertEqual(len(outbox), 0)
|
self.assertEqual(len(outbox), 0)
|
||||||
|
|
|
@ -1,13 +1,17 @@
|
||||||
# Copyright The IETF Trust 2013-2019, All Rights Reserved
|
# Copyright The IETF Trust 2013-2019, All Rights Reserved
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
|
import io
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
import debug # pyflakes:ignore
|
import debug # pyflakes:ignore
|
||||||
|
|
||||||
from pyquery import PyQuery
|
from pyquery import PyQuery
|
||||||
from StringIO import StringIO
|
from io import StringIO
|
||||||
from textwrap import wrap
|
from textwrap import wrap
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
@ -20,7 +24,7 @@ from ietf.doc.utils import create_ballot_if_not_open
|
||||||
from ietf.doc.views_status_change import default_approval_text
|
from ietf.doc.views_status_change import default_approval_text
|
||||||
from ietf.group.models import Person
|
from ietf.group.models import Person
|
||||||
from ietf.iesg.models import TelechatDate
|
from ietf.iesg.models import TelechatDate
|
||||||
from ietf.utils.test_utils import TestCase, unicontent
|
from ietf.utils.test_utils import TestCase
|
||||||
from ietf.utils.mail import outbox
|
from ietf.utils.mail import outbox
|
||||||
from ietf.utils.test_utils import login_testing_unauthorized
|
from ietf.utils.test_utils import login_testing_unauthorized
|
||||||
|
|
||||||
|
@ -73,9 +77,9 @@ class StatusChangeTests(TestCase):
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
status_change = Document.objects.get(name='status-change-imaginary-new')
|
status_change = Document.objects.get(name='status-change-imaginary-new')
|
||||||
self.assertEqual(status_change.get_state('statchg').slug,'adrev')
|
self.assertEqual(status_change.get_state('statchg').slug,'adrev')
|
||||||
self.assertEqual(status_change.rev,u'00')
|
self.assertEqual(status_change.rev,'00')
|
||||||
self.assertEqual(status_change.ad.name,u'Areað Irector')
|
self.assertEqual(status_change.ad.name,'Areað Irector')
|
||||||
self.assertEqual(status_change.notify,u'ipu@ietf.org')
|
self.assertEqual(status_change.notify,'ipu@ietf.org')
|
||||||
self.assertTrue(status_change.relateddocument_set.filter(relationship__slug='tois',target__docs__name='draft-ietf-random-thing'))
|
self.assertTrue(status_change.relateddocument_set.filter(relationship__slug='tois',target__docs__name='draft-ietf-random-thing'))
|
||||||
|
|
||||||
def test_change_state(self):
|
def test_change_state(self):
|
||||||
|
@ -112,10 +116,10 @@ class StatusChangeTests(TestCase):
|
||||||
doc.save_with_history([DocEvent.objects.create(doc=doc, rev=doc.rev, type="changed_document", by=Person.objects.get(user__username="secretary"), desc="Test")])
|
doc.save_with_history([DocEvent.objects.create(doc=doc, rev=doc.rev, type="changed_document", by=Person.objects.get(user__username="secretary"), desc="Test")])
|
||||||
lc_req_pk = str(State.objects.get(slug='lc-req',type__slug='statchg').pk)
|
lc_req_pk = str(State.objects.get(slug='lc-req',type__slug='statchg').pk)
|
||||||
r = self.client.post(url,dict(new_state=lc_req_pk))
|
r = self.client.post(url,dict(new_state=lc_req_pk))
|
||||||
self.assertEquals(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
doc = Document.objects.get(name='status-change-imaginary-mid-review')
|
doc = Document.objects.get(name='status-change-imaginary-mid-review')
|
||||||
self.assertEquals(doc.get_state('statchg').slug,'lc-req')
|
self.assertEqual(doc.get_state('statchg').slug,'lc-req')
|
||||||
self.assertEquals(len(outbox), messages_before + 1)
|
self.assertEqual(len(outbox), messages_before + 1)
|
||||||
self.assertTrue('Last Call:' in outbox[-1]['Subject'])
|
self.assertTrue('Last Call:' in outbox[-1]['Subject'])
|
||||||
|
|
||||||
# successful change to IESG Evaluation
|
# successful change to IESG Evaluation
|
||||||
|
@ -171,15 +175,15 @@ class StatusChangeTests(TestCase):
|
||||||
|
|
||||||
# normal get
|
# normal get
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEquals(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEquals(len(q('input[name=title]')),1)
|
self.assertEqual(len(q('input[name=title]')),1)
|
||||||
|
|
||||||
# change title
|
# change title
|
||||||
r = self.client.post(url,dict(title='New title'))
|
r = self.client.post(url,dict(title='New title'))
|
||||||
self.assertEquals(r.status_code,302)
|
self.assertEqual(r.status_code,302)
|
||||||
doc = Document.objects.get(name='status-change-imaginary-mid-review')
|
doc = Document.objects.get(name='status-change-imaginary-mid-review')
|
||||||
self.assertEquals(doc.title,'New title')
|
self.assertEqual(doc.title,'New title')
|
||||||
self.assertTrue(doc.latest_event(DocEvent,type="added_comment").desc.startswith('Title changed'))
|
self.assertTrue(doc.latest_event(DocEvent,type="added_comment").desc.startswith('Title changed'))
|
||||||
|
|
||||||
def test_edit_ad(self):
|
def test_edit_ad(self):
|
||||||
|
@ -265,8 +269,8 @@ class StatusChangeTests(TestCase):
|
||||||
q = PyQuery(r.content)
|
q = PyQuery(r.content)
|
||||||
self.assertEqual(len(q('form.edit-last-call-text')),1)
|
self.assertEqual(len(q('form.edit-last-call-text')),1)
|
||||||
|
|
||||||
self.assertTrue( 'RFC9999 from Proposed Standard to Internet Standard' in ''.join(wrap(r.content,2**16)))
|
self.assertContains(r, 'RFC9999 from Proposed Standard to Internet Standard')
|
||||||
self.assertTrue( 'RFC9998 from Informational to Historic' in ''.join(wrap(r.content,2**16)))
|
self.assertContains(r, 'RFC9998 from Informational to Historic')
|
||||||
|
|
||||||
# save
|
# save
|
||||||
r = self.client.post(url,dict(last_call_text="Bogus last call text",save_last_call_text="1"))
|
r = self.client.post(url,dict(last_call_text="Bogus last call text",save_last_call_text="1"))
|
||||||
|
@ -278,17 +282,17 @@ class StatusChangeTests(TestCase):
|
||||||
# reset
|
# reset
|
||||||
r = self.client.post(url,dict(regenerate_last_call_text="1"))
|
r = self.client.post(url,dict(regenerate_last_call_text="1"))
|
||||||
self.assertEqual(r.status_code,200)
|
self.assertEqual(r.status_code,200)
|
||||||
self.assertTrue( 'RFC9999 from Proposed Standard to Internet Standard' in ''.join(wrap(r.content,2**16)))
|
self.assertContains(r, 'RFC9999 from Proposed Standard to Internet Standard')
|
||||||
self.assertTrue( 'RFC9998 from Informational to Historic' in ''.join(wrap(r.content,2**16)))
|
self.assertContains(r, 'RFC9998 from Informational to Historic')
|
||||||
|
|
||||||
# request last call
|
# request last call
|
||||||
messages_before = len(outbox)
|
messages_before = len(outbox)
|
||||||
r = self.client.post(url,dict(last_call_text='stuff',send_last_call_request='Save+and+Request+Last+Call'))
|
r = self.client.post(url,dict(last_call_text='stuff',send_last_call_request='Save+and+Request+Last+Call'))
|
||||||
self.assertEqual(r.status_code,200)
|
self.assertEqual(r.status_code,200)
|
||||||
self.assertTrue( 'Last call requested' in ''.join(wrap(r.content,2**16)))
|
self.assertContains(r, 'Last call requested')
|
||||||
self.assertEqual(len(outbox), messages_before + 1)
|
self.assertEqual(len(outbox), messages_before + 1)
|
||||||
self.assertTrue('Last Call:' in outbox[-1]['Subject'])
|
self.assertTrue('Last Call:' in outbox[-1]['Subject'])
|
||||||
self.assertTrue('Last Call Request has been submitted' in ''.join(wrap(unicode(outbox[-1]),2**16)))
|
self.assertTrue('Last Call Request has been submitted' in ''.join(wrap(outbox[-1].as_string(), width=2**16)))
|
||||||
|
|
||||||
|
|
||||||
def test_approve(self):
|
def test_approve(self):
|
||||||
|
@ -310,8 +314,8 @@ class StatusChangeTests(TestCase):
|
||||||
self.assertEqual(len(q('[type=submit]:contains("Send announcement")')), 1)
|
self.assertEqual(len(q('[type=submit]:contains("Send announcement")')), 1)
|
||||||
# There should be two messages to edit
|
# There should be two messages to edit
|
||||||
self.assertEqual(q('input#id_form-TOTAL_FORMS').val(),'2')
|
self.assertEqual(q('input#id_form-TOTAL_FORMS').val(),'2')
|
||||||
self.assertTrue( '(rfc9999) to Internet Standard' in ''.join(wrap(r.content,2**16)))
|
self.assertContains(r, '(rfc9999) to Internet Standard')
|
||||||
self.assertTrue( '(rfc9998) to Historic' in ''.join(wrap(r.content,2**16)))
|
self.assertContains(r, '(rfc9998) to Historic')
|
||||||
|
|
||||||
# submit
|
# submit
|
||||||
messages_before = len(outbox)
|
messages_before = len(outbox)
|
||||||
|
@ -328,10 +332,10 @@ class StatusChangeTests(TestCase):
|
||||||
self.assertTrue('Action:' in outbox[-1]['Subject'])
|
self.assertTrue('Action:' in outbox[-1]['Subject'])
|
||||||
self.assertTrue('ietf-announce' in outbox[-1]['To'])
|
self.assertTrue('ietf-announce' in outbox[-1]['To'])
|
||||||
self.assertTrue('rfc-editor' in outbox[-1]['Cc'])
|
self.assertTrue('rfc-editor' in outbox[-1]['Cc'])
|
||||||
self.assertTrue('(rfc9998) to Historic' in ''.join(wrap(unicode(outbox[-1])+unicode(outbox[-2]),2**16)))
|
self.assertTrue('(rfc9998) to Historic' in ''.join(wrap(outbox[-1].as_string()+outbox[-2].as_string(), 2**16)))
|
||||||
self.assertTrue('(rfc9999) to Internet Standard' in ''.join(wrap(unicode(outbox[-1])+unicode(outbox[-2]),2**16)))
|
self.assertTrue('(rfc9999) to Internet Standard' in ''.join(wrap(outbox[-1].as_string()+outbox[-2].as_string(),2**16)))
|
||||||
|
|
||||||
self.assertTrue(doc.latest_event(DocEvent,type="added_comment").desc.startswith('The following approval message was sent'))
|
self.assertTrue(doc.latest_event(DocEvent,type="added_comment").desc.startswith('The following approval message was sent'))
|
||||||
|
|
||||||
def test_edit_relations(self):
|
def test_edit_relations(self):
|
||||||
doc = Document.objects.get(name='status-change-imaginary-mid-review')
|
doc = Document.objects.get(name='status-change-imaginary-mid-review')
|
||||||
|
@ -415,13 +419,13 @@ class StatusChangeSubmitTests(TestCase):
|
||||||
|
|
||||||
# sane post using textbox
|
# sane post using textbox
|
||||||
path = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev))
|
path = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev))
|
||||||
self.assertEqual(doc.rev,u'00')
|
self.assertEqual(doc.rev,'00')
|
||||||
self.assertFalse(os.path.exists(path))
|
self.assertFalse(os.path.exists(path))
|
||||||
r = self.client.post(url,dict(content="Some initial review text\n",submit_response="1"))
|
r = self.client.post(url,dict(content="Some initial review text\n",submit_response="1"))
|
||||||
self.assertEqual(r.status_code,302)
|
self.assertEqual(r.status_code,302)
|
||||||
doc = Document.objects.get(name='status-change-imaginary-mid-review')
|
doc = Document.objects.get(name='status-change-imaginary-mid-review')
|
||||||
self.assertEqual(doc.rev,u'00')
|
self.assertEqual(doc.rev,'00')
|
||||||
with open(path) as f:
|
with io.open(path) as f:
|
||||||
self.assertEqual(f.read(),"Some initial review text\n")
|
self.assertEqual(f.read(),"Some initial review text\n")
|
||||||
self.assertTrue( "mid-review-00" in doc.latest_event(NewRevisionDocEvent).desc)
|
self.assertTrue( "mid-review-00" in doc.latest_event(NewRevisionDocEvent).desc)
|
||||||
|
|
||||||
|
@ -432,9 +436,9 @@ class StatusChangeSubmitTests(TestCase):
|
||||||
|
|
||||||
# A little additional setup
|
# A little additional setup
|
||||||
# doc.rev is u'00' per the test setup - double-checking that here - if it fails, the breakage is in setUp
|
# doc.rev is u'00' per the test setup - double-checking that here - if it fails, the breakage is in setUp
|
||||||
self.assertEqual(doc.rev,u'00')
|
self.assertEqual(doc.rev,'00')
|
||||||
path = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev))
|
path = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev))
|
||||||
with open(path,'w') as f:
|
with io.open(path,'w') as f:
|
||||||
f.write('This is the old proposal.')
|
f.write('This is the old proposal.')
|
||||||
f.close()
|
f.close()
|
||||||
# Put the old proposal into IESG review (exercises ballot tab when looking at an older revision below)
|
# Put the old proposal into IESG review (exercises ballot tab when looking at an older revision below)
|
||||||
|
@ -456,7 +460,7 @@ class StatusChangeSubmitTests(TestCase):
|
||||||
test_file.name = "unnamed"
|
test_file.name = "unnamed"
|
||||||
r = self.client.post(url, dict(txt=test_file,submit_response="1"))
|
r = self.client.post(url, dict(txt=test_file,submit_response="1"))
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
self.assertTrue("does not appear to be a text file" in unicontent(r))
|
self.assertContains(r, "does not appear to be a text file")
|
||||||
|
|
||||||
# sane post uploading a file
|
# sane post uploading a file
|
||||||
test_file = StringIO("This is a new proposal.")
|
test_file = StringIO("This is a new proposal.")
|
||||||
|
@ -464,9 +468,9 @@ class StatusChangeSubmitTests(TestCase):
|
||||||
r = self.client.post(url,dict(txt=test_file,submit_response="1"))
|
r = self.client.post(url,dict(txt=test_file,submit_response="1"))
|
||||||
self.assertEqual(r.status_code, 302)
|
self.assertEqual(r.status_code, 302)
|
||||||
doc = Document.objects.get(name='status-change-imaginary-mid-review')
|
doc = Document.objects.get(name='status-change-imaginary-mid-review')
|
||||||
self.assertEqual(doc.rev,u'01')
|
self.assertEqual(doc.rev,'01')
|
||||||
path = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev))
|
path = os.path.join(settings.STATUS_CHANGE_PATH, '%s-%s.txt' % (doc.canonical_name(), doc.rev))
|
||||||
with open(path) as f:
|
with io.open(path) as f:
|
||||||
self.assertEqual(f.read(),"This is a new proposal.")
|
self.assertEqual(f.read(),"This is a new proposal.")
|
||||||
f.close()
|
f.close()
|
||||||
self.assertTrue( "mid-review-01" in doc.latest_event(NewRevisionDocEvent).desc)
|
self.assertTrue( "mid-review-01" in doc.latest_event(NewRevisionDocEvent).desc)
|
||||||
|
@ -481,7 +485,7 @@ class StatusChangeSubmitTests(TestCase):
|
||||||
url = urlreverse('ietf.doc.views_doc.document_main',kwargs=dict(name=doc.name,rev='00'))
|
url = urlreverse('ietf.doc.views_doc.document_main',kwargs=dict(name=doc.name,rev='00'))
|
||||||
r = self.client.get(url)
|
r = self.client.get(url)
|
||||||
self.assertEqual(r.status_code,200)
|
self.assertEqual(r.status_code,200)
|
||||||
self.assertTrue("This is the old proposal." in unicontent(r))
|
self.assertContains(r, "This is the old proposal.")
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
DocumentFactory(type_id='statchg',name='status-change-imaginary-mid-review',notify='notify@example.org')
|
DocumentFactory(type_id='statchg',name='status-change-imaginary-mid-review',notify='notify@example.org')
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue