Changed regex strings to raw strings.
- Legacy-Id: 16312
This commit is contained in:
parent
218f6d86a6
commit
06daf6ac65
|
@ -168,7 +168,7 @@ def clean_up_draft_files():
|
|||
cut_off = datetime.date.today()
|
||||
|
||||
pattern = os.path.join(settings.INTERNET_DRAFT_PATH, "draft-*.*")
|
||||
filename_re = re.compile('^(.*)-(\d\d)$')
|
||||
filename_re = re.compile(r'^(.*)-(\d\d)$')
|
||||
|
||||
def splitext(fn):
|
||||
"""
|
||||
|
|
|
@ -395,7 +395,7 @@ def clean_helper(form, formtype):
|
|||
for k in sorted(form.data.keys()):
|
||||
v = form.data[k]
|
||||
if k.startswith('new_relation_row'):
|
||||
if re.match('\d{1,4}',v):
|
||||
if re.match(r'\d{1,4}',v):
|
||||
v = 'rfc'+v
|
||||
rfc_fields[k[17:]]=v
|
||||
elif k.startswith('statchg_relation_row'):
|
||||
|
@ -412,7 +412,7 @@ def clean_helper(form, formtype):
|
|||
errors=[]
|
||||
for key in new_relations:
|
||||
|
||||
if not re.match('(?i)rfc\d{1,4}',key):
|
||||
if not re.match(r'(?i)rfc\d{1,4}',key):
|
||||
errors.append(key+" is not a valid RFC - please use the form RFCn\n")
|
||||
elif not DocAlias.objects.filter(name=key):
|
||||
errors.append(key+" does not exist\n")
|
||||
|
|
|
@ -201,7 +201,7 @@ def fill_in_wg_drafts(group):
|
|||
|
||||
|
||||
def check_group_email_aliases():
|
||||
pattern = re.compile('expand-(.*?)(-\w+)@.*? +(.*)$')
|
||||
pattern = re.compile(r'expand-(.*?)(-\w+)@.*? +(.*)$')
|
||||
tot_count = 0
|
||||
good_count = 0
|
||||
with open(settings.GROUP_VIRTUAL_PATH,"r") as virtual_file:
|
||||
|
@ -414,7 +414,7 @@ def concluded_groups(request):
|
|||
sections['Review teams'] = Group.objects.filter(type='review', state="conclude").select_related("state", "charter").order_by("parent__name","acronym")
|
||||
sections['Teams'] = Group.objects.filter(type='team', state="conclude").select_related("state", "charter").order_by("parent__name","acronym")
|
||||
|
||||
for name, groups in list(sections.items()):
|
||||
for name, groups in sections.items():
|
||||
|
||||
# add start/conclusion date
|
||||
d = dict((g.pk, g) for g in groups)
|
||||
|
@ -625,9 +625,9 @@ def group_about_status_edit(request, acronym, group_type=None):
|
|||
|
||||
def get_group_email_aliases(acronym, group_type):
|
||||
if acronym:
|
||||
pattern = re.compile('expand-(%s)(-\w+)@.*? +(.*)$'%acronym)
|
||||
pattern = re.compile(r'expand-(%s)(-\w+)@.*? +(.*)$'%acronym)
|
||||
else:
|
||||
pattern = re.compile('expand-(.*?)(-\w+)@.*? +(.*)$')
|
||||
pattern = re.compile(r'expand-(.*?)(-\w+)@.*? +(.*)$')
|
||||
|
||||
aliases = []
|
||||
with open(settings.GROUP_VIRTUAL_PATH,"r") as virtual_file:
|
||||
|
@ -976,7 +976,7 @@ def edit(request, group_type=None, acronym=None, action="edit", field=None):
|
|||
group.groupurl_set.all().delete()
|
||||
# Add new ones
|
||||
for u in new_urls:
|
||||
m = re.search('(?P<url>[\w\d:#@%/;$()~_?\+-=\\\.&]+)( \((?P<name>.+)\))?', u)
|
||||
m = re.search(r'(?P<url>[\w\d:#@%/;$()~_?\+-=\\\.&]+)( \((?P<name>.+)\))?', u)
|
||||
if m:
|
||||
if m.group('name'):
|
||||
url = GroupURL(url=m.group('url'), name=m.group('name'), group=group)
|
||||
|
|
|
@ -238,7 +238,7 @@ def convert_draft_to_pdf(doc_name):
|
|||
for line in infile:
|
||||
line = re.sub("\r","",line)
|
||||
line = re.sub("[ \t]+$","",line)
|
||||
if re.search("\[?[Pp]age [0-9ivx]+\]?[ \t]*$",line):
|
||||
if re.search(r"\[?[Pp]age [0-9ivx]+\]?[ \t]*$",line):
|
||||
pageend=1
|
||||
tempfile.write(line)
|
||||
continue
|
||||
|
|
|
@ -34,7 +34,7 @@ def get_cleaned_text_file_content(uploaded_file):
|
|||
if not filetype.startswith("text"):
|
||||
raise ValidationError("Uploaded file does not appear to be a text file.")
|
||||
|
||||
match = re.search("charset=([\w-]+)", filetype)
|
||||
match = re.search(r"charset=([\w-]+)", filetype)
|
||||
if not match:
|
||||
raise ValidationError("File has unknown encoding.")
|
||||
|
||||
|
|
Loading…
Reference in a new issue