chore: remove svn/trac infrastructure (#3649)

* chore: Remove svn/trac related infrastructure

* chore: Remove more svn/trac infrastructure

* fix: remove commented out Trac requirement
This commit is contained in:
Robert Sparks 2022-03-16 11:08:47 -05:00 committed by GitHub
parent 868afd287c
commit a60c31e45d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
34 changed files with 0 additions and 3903 deletions

View file

@ -1,33 +0,0 @@
#!/bin/bash
cur=$(svn info | awk '/^Revision:/ { print $2 }')
# List recent commits, extract revision numbers, stop at release, and grab the last revision
# - get svn log, looking backwards from HEAD
# - stop on 'Set version info ...'
# - filter out revision lines
# - grab the first field
# - grab the last line
# - remove the leading 'r'
rev=$(svn log $PWD -r ${2:-HEAD}:${1:-$((cur-100))} \
| sed -r -n -e '1,/^Set version info( and settings)?( back)? to (development|release)/p' \
| egrep '^r[0-9]+ \|' \
| cut -d ' ' -f 1 \
| tail -n 1 \
| sed 's/^r//')
# Grab commit log lines from just after the start rev, going forwards, and reformat
# - Get svn log entries, starting with the earliest
# - Filter out dividing lines and revision/committer/date lines, keeping the messages
# - Insert leading asterisks: ' * ' at the front of the first line in each text block
# - Unwrap lines that start with unindented text
# - Do line folding at column 76
# - Indent any unindented lines 4 spaces
# - Add blank lines in front of log entries
svn log $PWD -r $((rev+2)):${2:-HEAD} \
| sed -r 's/^(----------|r[0-9]+).*$/\n/' \
| sed '1,/./s/^/ * /;/^$/,/./s/^/ * /;/^ \* $/d' \
| sed -e :a -e '$!N;s/\n([A-Za-z0-9])/ \\1/;ta' -e 'P;D' \
| fold -sw76 \
| sed -r 's/^([^ ].*)$/ &/' \
| sed -r 's/^ \* /\n * /'
echo ""
TZ=UTC date +" -- Robert Sparks <rjsparks@nostrum.com> %d %b %Y %H:%M:%S %z"

View file

@ -1,259 +0,0 @@
#!/bin/bash
version=0.20
program=${0##*/}
progdir=${0%/*}
if [ "$progdir" = "$program" ]; then progdir="."; fi
# ----------------------------------------------------------------------
function usage() {
cat <<EOF
NAME
$program - merge and commit a sprint branch
SYNOPSIS
$program [OPTIONS] BRANCH SVNREV
DESCRIPTION
Merge and commit a sprint branch
EOF
echo -e "OPTIONS"
if [ "$(uname)" = "Linux" ]; then
egrep "^[ ]+[-][A-Za-z| -]+\*?\)[ ]+[A-Za-z].+#" $0 | tr -s "\t|" "\t," | sed -r -e 's/\)[ \t]+([A-Z]+)=\$2[^#]*#/=\1\t/' -e 's/\)[^#]*#/\t/'
else
egrep "^[ ]+[-][A-Za-z| -]+\*?\)[ ]+[A-Za-z].+#" $0 | sed 's/\|.*\$2[^#]*#/ /'| sed -E 's/\|.*\)[^#]*#/ /'
fi
cat <<EOF
FILES
AUTHOR
Written by Henrik Levkowetz, <henrik@tools.ietf.org>
COPYRIGHT
Copyright 2010 Henrik Levkowetz.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or (at
your option) any later version. There is NO WARRANTY; not even the
implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
EOF
}
# ----------------------------------------------------------------------
function die() {
echo -e "\n$program: error: $*" > /dev/stderr
exit 1
}
function note() {
if [ -n "$VERBOSE" ]; then echo -e "$*"; fi
}
# ----------------------------------------------------------------------
function version() {
echo -e "$program $version"
}
# ----------------------------------------------------------------------
trap 'echo "$program($LINENO): Command failed with error code $? ([$$] $0 $*)"; exit 1' ERR
# ----------------------------------------------------------------------
# Option parsing
# Options
shortopts=cnhvV
longopts=commit,no-commit,help,verbose,version
# Default values
ARG_COMMIT=1
if [ "$(uname)" = "Linux" ]; then
args=$(getopt -o "$shortopts" --long "$longopts" -n '$program' -- $SV "$@")
if [ $? != 0 ] ; then die "Terminating..." >&2 ; exit 1 ; fi
eval set -- "$args"
sed="sed -r"
else
# Darwin, BSDs
args=$(getopt -o$shortopts $SV $*)
if [ $? != 0 ] ; then die "Terminating..." >&2 ; exit 1 ; fi
set -- $args
sed="sed -E"
fi
while true ; do
case "$1" in
-c| --commit) ARG_COMMIT=1;; # Run commit in addition to merge
-n| --no-commit) ARG_COMMIT=0;; # Don't commit after merge
-h| --help) usage; exit;; # Show this help, then exit
-v| --verbose) VERBOSE=1;; # Be more talkative
-V| --version) version; exit;; # Show program version, then exit
--) shift; break;;
*) die "Internal error, inconsistent option specification: '$1'";;
esac
shift
done
# ----------------------------------------------------------------------
# The program itself
# Argument validation
[[ $1 =~ @ ]] && set ${1/@/ }
[ $# -ge 2 ] || die "Expected branch and repository revision on the command line"
[ ${PWD##*/} = trunk ] || die "Expected this script to be run in trunk"
# Global settings
cwd=${PWD##*/}
branch=$1
rev=$2
fix=$3
# remove leading 'r' from rev, if present
rev=${rev#r}
repo=$(echo -n "$(svn info | grep "^Repository Root: " | sed 's/Repository Root: //')")
[ -z "$by" ] && by=${RELEASER_REAL_NAME}
[ -z "$by" ] && by=$(getent passwd $(whoami) | cut -d ':' -f 5 | tr -d ',')
[ -z "$by" ] && die "Can't determine the real name of the user running this script"
python -c 'import django' || die "Can't find django - can't run tests"
note "Identify the branch:"
if svn info $branch > /dev/null 2>&1; then
branch="${branch#^/}"
elif svn info ${repo}/personal/$branch > /dev/null 2>&1; then
branch="personal/$branch"
elif svn info ${repo}/branch/$branch > /dev/null 2>&1; then
branch="branch/$branch"
elif svn info ${repo}/$branch > /dev/null 2>&1; then
true
else
die "Could not find a branch matching '$branch'"
fi
note "Svn update, to make sure we don't have a mixed revision working copy"
svn update -q
mergelog=$(mktemp)
svn propget svn:mergeinfo . > $mergelog
if grep "@$rev $branch" $mergelog; then die "Changeset $branch@$rev is already in the merge log. Skipping it."; exit 0; fi
note "Will attempt merge from $branch@$rev"
# "Check there's no uncommitted changes ..."
echo ""
$do svn st | grep "^[AMGRD] " && {
echo ""
read -p "There are uncommitted changes. Really do merge? [y/N] "
[ "$REPLY" = "Y" -o "$REPLY" = "y" ] || exit
}
note "Extract who and what:"
info=$(svn log ${repo}/ -r $rev --incremental)
set $(echo "$info" | tail -n +2 | head -n 1 | tr "|" "\t")
who=$2; echo -e "$who"
comment=$(echo "$info" | tail -n +3); echo -e "$comment\n"
comment=$(echo "$comment" | sed -r -e 's/(commit )?ready (for|to) merge\.?//i' -e '/^$/d')
files=$(svn diff ${repo}/ -c $rev --summarize | awk '{$1=""; print;}' | while read file; do echo "${file/$repo\/$branch\//}"; done)
echo -e "Files: \n$files\n"
read -p "Continue with diff? [Y/n] "
[ "$REPLY" = "Y" -o "$REPLY" = "y" -o "$REPLY" = "" ] || exit
note "Diff:"
note "svn diff -c $rev $repo/$branch"
svn diff -c $rev $repo/$branch | less
echo ""
read -p "Additional descriptive text (hit return for none): "
if [ "$REPLY" != "" ]; then
comment="$REPLY
$comment"
fi
echo ""
read -p "Continue with the merge? [Y/n] "
[ "$REPLY" = "Y" -o "$REPLY" = "y" -o "$REPLY" = "" ] || exit
note "Do the merge:"
if [[ $rev =~ : ]]; then
svn merge -r $rev ${repo}/$branch . || die "Merge of $branch @$rev failed. The merge command was:
svn merge -r $rev ${repo}/$branch ."
else
svn merge -c $rev ${repo}/$branch . || die "Merge of $branch @$rev failed. The merge command was:
svn merge -c $rev ${repo}/$branch ."
fi
note "Writing commit script"
echo -e "#!/bin/bash\n\nsvn commit -m \"Merged in [$rev] from $who:\n ${comment//\"/\'} ${fix//\"/\'}\"" > ../cicmd/commit-${rev}-merge.sh
chmod +x ../cicmd/commit-${rev}-merge.sh
M=$(svn st | cut -c 1-7 | grep -oh 'M' | head -n 1)
C=$(svn st | cut -c 1-7 | grep -oh 'C' | head -n 1)
G=$(svn st | cut -c 1-7 | grep -oh 'G' | head -n 1)
##cd ../
##rsync -a $cwd/ merged@$rev/
##cp cicmd/commit-${rev}-merge.sh merged@$rev/commit
##cd -
# Potentially run flake8 at this point
# read -p "Run flake8? [y/N] "
# if [ "$REPLY" = "Y" -o "$REPLY" = "y"]; then
# mod=$(svn st | cut -c 9- | grep '\.py$')
# flake8 $mod | less
# fi
read -p "Continue with tests? [Y/n] "
[ "$REPLY" = "Y" -o "$REPLY" = "y" -o "$REPLY" = "" ] || exit
echo -e "\nRunning tests"
time { ietf/manage.py test --settings=settings_sqlitetest \
|| die "Tests failed.\nThe commit script is ../cicmd/commit-${rev}-merge.sh"; } 3>&1 1>&2 2>&3 | bin/count
echo ""
note "Sending email to changeset author: <$who>"
SEND_ARGS=""
[ "${RELEASER_EMAIL}" ] && SEND_ARGS="-r ${RELEASER_EMAIL}"
mail "${SEND_ARGS}" -s "Merged datatracker branch personal/$branch@$rev to trunk" $who -c rjsparks@nostrum.com <<-EOF
Hi,
This is an automatic merge info message. Your code in personal/$branch@$rev
has been merged to trunk, and will be part of the next release if nothing
goes wrong during final testing.
Regards,
$by
(via the mergesprintbranch script)
EOF
echo ""
read -p "Continue with the commit? [Y/n] "
[ "$REPLY" = "Y" -o "$REPLY" = "y" -o "$REPLY" = "" ] || ARG_COMMIT=0
if [ "$ARG_COMMIT" != 0 ]; then
echo "Svn update:"
svn update -q
echo "Committing the merge:"
echo ""
svn commit -m "Merged in [$rev] from $who:
${comment//\"/\'} ${fix//\"/\'}"
else
echo "This merge has not been committed yet."
echo "To commit it, run this commit command: ../cicmd/commit-$rev-merge.sh"
fi
echo -e "\n------------------------------------------------------------------------\n\n"

View file

@ -1,414 +0,0 @@
#!/usr/bin/env python
# -*- python -*-
"""
NAME
%(program)s - look for SVN commits that are ready to merge
SYNOPSIS
%(program)s [OPTIONS] ARGS
DESCRIPTION
%(program)s looks in the SVN log for commits which are marked with the
phrase 'Commit ready for merge', and compares the resulting list with
the 'svn:mergeinfo' property on the current directory, in order to
work out which (if any) commits are ready to merge, but not yet
merged. The command requires (and checks) that it's running in a
directory named 'trunk', and requires that to be an SVN working copy.
The files (in the top directory of the working copy) 'ready-for-merge'
and 'hold-for-merge' are also consulted for additions and exceptions to
the merge list.
A list of commit date, committer, and branch@revision for each commit
which is marked ready for merge, but not yet merged, is then written
to standard out.
%(options)s
AUTHOR
Written by Henrik Levkowetz, <henrik@tools.ietf.org>
COPYRIGHT
Copyright 2014 Henrik Levkowetz
This program is free software; you can redistribute it and/or modify
it under the terms of the Simplified BSD license as published by the
Open Source Initiative at http://opensource.org/licenses/BSD-2-Clause.
"""
from __future__ import print_function, unicode_literals
import sys
import os
path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if not path in sys.path:
sys.path.insert(0, path)
import getopt
import re
import pytz
import tzparse
import debug
version = "0.20"
program = os.path.basename(sys.argv[0])
progdir = os.path.dirname(sys.argv[0])
# ----------------------------------------------------------------------
# Parse options
options = ""
for line in re.findall("\n +(if|elif) +opt in \[(.+)\]:\s+#(.+)\n", open(sys.argv[0]).read()):
if not options:
options += "OPTIONS\n"
options += " %-16s %s\n" % (line[1].replace('"', ''), line[2])
options = options.strip()
# with ' < 1:' on the next line, this is a no-op:
if len(sys.argv) < 1:
print(__doc__ % locals())
sys.exit(1)
try:
opts, files = getopt.gnu_getopt(sys.argv[1:], "hvV", ["help", "version","verbose",])
except Exception as e:
print( "%s: %s" % (program, e))
sys.exit(1)
# ----------------------------------------------------------------------
# Handle options
# set default values, if any
opt_verbose = 0
# handle individual options
for opt, value in opts:
if opt in ["-h", "--help"]: # Output this help, then exit
print( __doc__ % locals() )
sys.exit(1)
elif opt in ["-V", "--version"]: # Output version information, then exit
print( program, version )
sys.exit(0)
elif opt in ["-v", "--verbose"]: # Output version information, then exit
opt_verbose += 1
# ----------------------------------------------------------------------
def say(s):
sys.stderr.write("%s\n" % (s))
# ----------------------------------------------------------------------
def note(s):
if opt_verbose:
sys.stderr.write("%s\n" % (s))
# ----------------------------------------------------------------------
def die(s, error=1):
sys.stderr.write("\n%s: Error: %s\n\n" % (program, s))
sys.exit(error)
# ----------------------------------------------------------------------
# The program itself
import os
import json
cwd = os.getcwd()
if cwd.split(os.path.sep)[-1] != 'trunk':
die("Expected to run this operation in trunk, but the current\ndirectory is '%s'" % cwd)
# ----------------------------------------------------------------------
# Some utility functions
def pipe(cmd, inp=None):
import shlex
from subprocess import Popen, PIPE
args = shlex.split(cmd)
bufsize = 4096
stdin = PIPE if inp else None
pipe = Popen(args, stdin=stdin, stdout=PIPE, stderr=PIPE, bufsize=bufsize)
out, err = pipe.communicate(inp)
code = pipe.returncode
if code != 0:
raise OSError(err)
return out.decode('utf-8')
def split_loginfo(line):
try:
parts = line.split()
rev = parts[0][1:]
who = parts[2]
date = parts[4]
time = parts[5]
tz = parts[6]
when = tzparse.tzparse(" ".join(parts[4:7]), "%Y-%m-%d %H:%M:%S %Z")
when = when.astimezone(pytz.utc)
except ValueError as e:
sys.stderr.write("Bad log line format: %s\n %s\n" % (line, e))
return rev, who, when
# ----------------------------------------------------------------------
# Get repository information
svn_info = {}
for line in pipe('svn info .').splitlines():
if line:
key, value = line.strip().split(':', 1)
svn_info[key] = value.strip()
repo = svn_info["Repository Root"]
head = int(svn_info['Revision'])
# Get current mergeinfo from cache and svn
cachefn = os.path.join(os.environ.get('HOME', '.'), '.mergeinfo')
if os.path.exists(cachefn):
note("Reading mergeinfo cache file %s" % cachefn)
with open(cachefn, "r") as file:
cache = json.load(file)
else:
sys.stderr.write("No merge info cache file found -- will have to extract all information from SVN.\n"+
"This may take some time.\n\n")
opt_verbose = True
cache = {}
mergeinfo = cache[repo] if repo in cache else {}
merged_revs = {}
write_cache = False
loginfo_format = r'^r[0-9]+ \| [^@]+@[^@]+ \| \d\d\d\d-\d\d-\d\d '
note("Getting svn:mergeinfo for current branch")
for line in pipe('svn propget svn:mergeinfo .').splitlines():
if opt_verbose:
sys.stderr.write('.')
if line in mergeinfo:
merged = mergeinfo[line]
else:
merged = {}
branch, revs = line.strip().split(':',1)
for part in revs.split(','):
if '-' in part:
beg, end = part.split('-')
try:
commit_log = pipe('svn log -v -r %s:%s %s%s' % (beg, end, repo, branch))
for logline in commit_log.splitlines():
if re.search(loginfo_format, logline):
rev, who, when = split_loginfo(logline)
merged[rev] = branch[1:]
write_cache = True
except OSError:
pass
else:
merged[part] = branch[1:]
write_cache = True
mergeinfo[line] = merged
merged_revs.update(merged)
note('')
if write_cache:
cache[repo] = mergeinfo
with open(cachefn, "w") as file:
json.dump(cache, file, indent=2, sort_keys=True)
def get_changeset_list_from_file(repo, filename):
"""
This is used to read changesets to hold or merge from the ready-for-merge
and hold-for-merge files.
"""
list = []
if os.path.exists(filename):
note("Reading list from '%s'" % filename)
else:
note("File doesn't exist: '%s'" % filename)
return list
with open(filename) as file:
for line in file:
line = line.strip()
if line.startswith('#') or line == "":
continue
try:
#note(" '%s'" % line)
parts = line.split()
if len(parts) >1 and parts[1] == '@':
branch, rev = parts[0], parts[2]
changeset = "%s@%s" % (branch, rev)
else:
changeset = parts[0]
branch, rev = changeset.split('@')
if branch.startswith('^'):
branch = branch[1:]
if branch.startswith('/'):
branch = branch[1:]
if not (rev in merged_revs and branch == merged_revs[rev]):
list += [(rev, repo, branch),]
#elif rev in merged_revs and not branch == merged_revs[rev]:
# sys.stderr.write('Rev %s: %s != %s' % (rev, branch, merged_revs[rev]))
else:
#sys.stderr.write('Already merged: merged_revs[%s]: %s\n' % (rev, merged_revs[rev]))
pass
except ValueError as e:
sys.stderr.write("Bad changeset specification in %s: '%s': %s\n" % (file.name, changeset, e))
return list
def get_ready_commits(repo, tree):
list = []
note("Getting ready commits from '%s'" % tree)
cmd = 'svn log -v -r %s:HEAD %s/%s/' % ((head-200), repo, tree)
if opt_verbose > 1:
note("Running '%s' ..." % cmd)
commit_log = pipe(cmd)
for line in commit_log.splitlines():
if re.search(loginfo_format, line):
rev, who, when = split_loginfo(line)
branch = None
continue
if (line.startswith(' M') or line.startswith(' A') or line.startswith(' D')) and branch == None:
type, path = line[:4], line[5:]
if ' (from ' in path:
i = path.index(' (from ')
path = path[:i]
branch = '/'.join(path.split('/')[1:4])
elif re.search("(?i)((commit|branch) ready (for|to) merge)", line):
if not rev in merged_revs:
note(" %s %s: %s@%s" % (when.strftime("%Y-%m-%d %H:%MZ"), who, branch, rev))
list += [(rev, repo, branch),]
elif rev in merged_revs and not branch == merged_revs[rev]:
sys.stderr.write('Rev %s: %s != %s\n' % (rev, branch, merged_revs[rev]))
else:
pass
else:
pass
return list
ready = get_changeset_list_from_file(repo, 'ready-for-merge')
ready += get_changeset_list_from_file(repo, '../ready-for-merge')
hold = get_changeset_list_from_file(repo, 'hold-for-merge')
hold += get_changeset_list_from_file(repo, '../hold-for-merge')
ready += get_ready_commits(repo, 'personal')
ready += get_ready_commits(repo, 'branch/iola')
ready += get_ready_commits(repo, 'branch/dash')
ready_commits = {}
all_commits = {}
not_passed = {}
branches = set()
for entry in ready:
rev, repo, branch = entry
branches.add(branch)
# Get the time, committer, and commit message
cmd = 'svn log -v -r %s %s/%s/' % (rev, repo, branch)
if opt_verbose > 1:
note("Running '%s' ..." % cmd)
try:
loginfo = pipe(cmd).splitlines()
except OSError:
continue
try:
rev, who, when = split_loginfo(loginfo[1])
except IndexError:
die("Wrong changeset version in %s@%s ?" % (branch, rev))
for line in loginfo[3:]:
type, path = line[:4], line[5:]
if 'M' in type or 'A' in type or 'D' in type:
if ' (from ' in path:
i = path.index(' (from ')
path = path[:i]
break
# Get the test status
try:
cmd = 'svn propget --revprop -r %s "test:unittest"' % rev
unittest = pipe(cmd).strip()
except OSError as e:
if "E200017" in str(e):
unittest = ""
pass
else:
raise
#
dirs = path.split(os.path.sep)
dirs = dirs[:dirs.index('ietf')] if 'ietf' in dirs else dirs[:4]
merge_path = os.path.join(*dirs)
if not (rev, repo, merge_path) in hold:
output_line = "%s %-24s ^/%s@%s" % (when.strftime("%Y-%m-%d_%H:%MZ"), who+":", merge_path, rev)
all_commits[when] = (rev, repo, branch, who, merge_path)
if unittest == 'passed':
ready_commits[when] = output_line
else:
not_passed[when] = output_line
hold_revs = {}
for rev, repo, branch in hold:
hold_revs[rev] = branch
unmerged_branch_commits = {}
for branch in branches:
try:
cmd = 'svn ls %s/%s --depth empty' % (repo, branch)
_ = pipe(cmd)
except OSError:
note("Skipping nonexistent branch %s" % branch)
continue
note("Fetching commit information for branch %s" % branch)
commits = []
cmd = 'svn log -v -r 0:HEAD --stop-on-copy %s/%s/' % (repo, branch)
commit_log = pipe(cmd)
rev = None
mod = False
for line in commit_log.splitlines():
if re.search(loginfo_format, line):
rev, who, when = split_loginfo(line)
elif re.search('^ [AMD]', line):
if not ' (from ' in line and not mod:
mod = True
elif re.search('^-{72}$', line) and rev and mod:
if not rev in merged_revs and not rev in hold_revs:
commits.append(rev)
rev = None
mod = False
commits.sort()
unmerged_branch_commits[branch] = commits
keys = list(all_commits.keys())
keys.sort()
# Check that we don't have holes in the commit list -- commits not mentioned
# as ready for merge, and not already merged, earlier than a waiting commit.
unmerged = False
for key in keys:
(rev, repo, branch, who, merge_path) = all_commits[key]
try:
i = unmerged_branch_commits[branch].index(rev)
except:
say("Unexpected state. Mismatch between branch name and revision in hold-for-merge or ready-for-merge?")
raise
if not i == 0:
unmerged = True
sys.stderr.write("There are unmerged commits ahead of r%s on branch ^/%s:\n" % (rev, branch))
for j in range(0,i):
commit = unmerged_branch_commits[branch][j]
if commit != rev:
sys.stderr.write(" %s:\n" % commit)
commit_comment = pipe("svn log -c %s ^/" % commit).splitlines()[3:-1]
for l in commit_comment:
sys.stderr.write(" %s\n" % l)
unmerged_branch_commits[branch] = unmerged_branch_commits[branch][i:]
sys.stderr.write("\n")
del unmerged_branch_commits[branch][0]
keys = list(not_passed.keys())
keys.sort()
if len(keys) > 0:
print("")
print("Commits marked ready which haven't passed the test suite:\n")
for key in keys:
print(not_passed[key])
print('')
keys = list(ready_commits.keys())
keys.sort()
for key in keys:
print(ready_commits[key])
print("\n%s pending merges" % len(keys))

View file

@ -1,282 +0,0 @@
#!/bin/bash
version=0.24
program=${0##*/}
progdir=${0%/*}
svn_url_base="https://svn.ietf.org/svn/tools/ietfdb"
if [ "$progdir" = "$program" ]; then progdir="."; fi
# ----------------------------------------------------------------------
function usage() {
cat <<EOF
NAME
$program - make new dev branches for the IETF sprint
SYNOPSIS
$program [OPTIONS] [DEVELOPER [BRANCHNAME]]
DESCRIPTION
Make new dev branches for sprint participants based on the
content of the sprint registration page. If given a specific
developer name and optionally a branch name as arguments, make a
new branch for the specified developer instead. If run without
arguments, the script assumes that it's being run on the host that
holds the Trac wiki with the sprint signup pages.
EOF
echo -e "OPTIONS"
if [ "$(uname)" = "Linux" ]; then
# shellcheck disable=SC2086
# shellcheck disable=SC2016
grep -E "^\s+[-][A-Za-z| -]+\*?\)\s+[A-Za-z].+#" "$0" | tr -s "\t|" "\t," | $sed -e 's/\)[ \t]+([A-Z]+)=\$2[^#]*#/=\1\t/' -e 's/\)[^#]*#/\t/'
else
# shellcheck disable=SC2086
# shellcheck disable=SC2016
grep -E "^\s+[-][A-Za-z| -]+\*?\)\s+[A-Za-z].+#" "$0" | $sed 's/\|.*\$2[^#]*#/ /' | $sed 's/\|.*\)[^#]*#/ /'
fi
cat <<EOF
FILES
AUTHOR
Written by Henrik Levkowetz, <henrik@zinfandel.tools.ietf.org>
COPYRIGHT
Copyright 2016 Henrik Levkowetz.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or (at
your option) any later version. There is NO WARRANTY; not even the
implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
EOF
}
# ----------------------------------------------------------------------
function die() {
echo -e "\n$program: error: $*" >/dev/stderr
exit 1
}
function warn() {
echo "$program: Warning: $*" 1>&2
}
function note() {
if [ -n "$VERBOSE" ]; then echo -e "$*"; fi
}
# ----------------------------------------------------------------------
function version() {
echo -e "$program $version"
}
# ----------------------------------------------------------------------
function check_svn_path_exists() {
local __resultvar=$1
local __path=$2
local myresult
svn info "${__path}" >/dev/null 2>&1
myresult=$?
# shellcheck disable=SC2086
eval $__resultvar="'$myresult'"
}
function mksvndir() {
who=$1
if [ "$2" ]; then dir=$2; else dir=$who; fi
check_svn_path_exists exists "${svn_url_base}/personal/$dir"
# shellcheck disable=SC2154
if [ "$exists" != "0" ]; then
$do echo "Creating personal directory area for IETF datatracker coding: /personal/$dir"
$do svn mkdir "${svn_url_base}/personal/$dir" -m "Personal SVN dir for $who, for IETF datatracker code"
else
echo "Repository area personal/$dir is already in place."
fi
}
function mksvntarget() {
local who=$1
local target=$2
local source=$3
local email=$4
local name=$5
check_svn_path_exists exists "${svn_url_base}/personal/$who/$target"
if [ "$exists" != "0" ]; then
$do echo " creating $target branch for $who ($name)."
$do svn cp "${svn_url_base}/$source" \
"${svn_url_base}/personal/$who/$target/" \
-m "New branch for $target"
$do echo "New branch: ^/personal/$who/$target"
if [ -n "$email" ]; then
notify_user "$who" "$target" "$email" "$name"
fi
else
$do echo " branch personal/$who/$target already exists."
fi
}
function notify_user() {
local login=$1
local target=$2
local email=$3
local name=$4
$do mail "$name <$email>" -s "A new SVN branch for you for IETF datatracker coding${rev:+, based on $rev}." -b rjsparks@nostrum.com <<-EOF
Hi,
$msg
This mail has been automatically generated by the $program script.
A new SVN branch has been set up for you for IETF datatracker coding, at
${svn_url_base}/personal/$login/$target
${rev:+This branch is based on $rev. }You can check it out by doing
svn co ${svn_url_base}/personal/$login/$target
There's also a database dump available at
https://www.ietf.org/lib/dt/sprint/ietf_utf8.sql.gz -- this dump is served
via CDN, and should hopefully be swifter to download than the alternatives.
Please read the instructions about sprint coder setup at
https://trac.ietf.org/tools/ietfdb/wiki/SprintCoderSetup
-- both the workflow description and the details of setting up your
environment.
Best regards,
The IETF tools team (via the $program script)
EOF
}
function get_sprinters_info() {
local n=$1
curl -L -s "https://trac.ietf.org/trac/ietfdb/wiki/IETF${n}SprintSignUp?format=txt" | \
grep -E "^\|\|" | \
grep -Ev "^\|\|\s+\|\|\s+" | \
tail -n +2 | \
python3 sprintcoders.py | \
update "$progdir/sprint${n}.txt"
}
# ----------------------------------------------------------------------
trap 'echo "$program($LINENO): Command failed with error code $? ([$$] "$0" $*)"; exit 1' ERR
# ----------------------------------------------------------------------
# Option parsing
# Options
short_options=hm:M:nsvV
long_options=help,meeting=,message=,dry-run,sprint,verbose,version
# Default values
num=""
msg=""
do=""
if [ "$(uname)" = "Linux" ]; then
# shellcheck disable=SC2086
# shellcheck disable=SC2048
if ! args=$(getopt -o $short_options --long $long_options -n $program -- $SV $*); then
die "Terminating..." >&2
exit 1
fi
# shellcheck disable=SC2086
eval set -- $args
sed="sed -r"
else
# Darwin, BSDs
# shellcheck disable=SC2086
# shellcheck disable=SC2048
if ! args=$(getopt -o$short_options $SV $*); then
die "Terminating..." >&2
exit 1
fi
# shellcheck disable=SC2086
set -- $args
sed="sed -E"
fi
while true; do
case "$1" in
-h | --help)
usage
exit
;; # Show this help, then exit
-m | --meeting)
num=$2
shift
;; # Specify the IETF meeting number
-M | --message)
msg=$2
shift
;; # Specify extra message text
-n | --dry-run) do="echo -- ==>" ;; # Only show what would be done
-s | --sprint) SPRINT=1 ;; # Make branches for sprint sign-ups
-v | --verbose) VERBOSE=1 ;; # Be more talkative
-V | --version)
version
exit
;; # Show program version, then exit
--)
shift
break
;;
*) die "Internal error, inconsistent option specification: '$1'" ;;
esac
shift
done
# ----------------------------------------------------------------------
# The program itself
who=""
tag=$(svn log -v ${svn_url_base}/tags/dev/ --limit 1 | grep '/tags/' | awk '{print $2}')
source="${tag:1}"
target="${tag##*/}"
rev="dev tag $target"
[ "$1" ] && who="$1"
[ "$2" ] && target="${target%.dev*}-$2"
if [ -z "${who}${SPRINT}" ]; then die "Specify either individual developer name or --sprint"; fi
cd $progdir || exit
if [ "$who" ]; then
mksvndir "$who"
mksvntarget "$who" "$target" "$source"
fi
if [ "$SPRINT" ]; then
[ "$msg" ] && msg="
$msg
"
[ "$num" ] || num=$(curl -L -s "https://tools.ietf.org/meta/current-ietf-number.txt")
for n in $(seq $((num - 3)) "$num"); do
get_sprinters_info "$n"
done
# shellcheck disable=SC2046
# shellcheck disable=SC2012
# shellcheck disable=SC2162
# sed -E (regexp extended) breaks this usage on MacOS 10.15, so back to regular sed.
cat $(ls $progdir/sprint*.txt | tail -n 2) $progdir/extras.txt | \
sed -e 's/[ \t]*$//' -e 's/[ \t]+/ /g' | \
sort | uniq | \
while read login email name; do
echo ""
echo "$login ($name <$email>):"
mksvndir "$login"
mksvntarget "$login" "$target" "$source" "$email" "$name"
done
fi

View file

@ -1,132 +0,0 @@
#!/bin/bash
# -*- indent-with-tabs: 1 -*-
version=0.10
program=${0##*/}
progdir=${0%/*}
if [ "$progdir" = "$program" ]; then progdir="."; fi
# ----------------------------------------------------------------------
function usage() {
cat <<EOF
NAME
$program - given a list of changed files, create a patch diff
SYNOPSIS
$program [OPTIONS] PATHS
DESCRIPTION
Given a list of changed file, run svn diff to create a patch
suitable for the patch command, named with the current date and
the given patch name. Place this in the local patch directory.
EOF
echo -e "OPTIONS"
if [ "$(uname)" = "Linux" ]; then
egrep "^[ ]+[-][A-Za-z| -]+\*?\)[ ]+[A-Za-z].+#" $0 | tr -s "\t|" "\t," | sed -r -e 's/\)[ \t]+([A-Z]+)=\$2[^#]*#/=\1\t/' -e 's/\)[^#]*#/\t/'
else
egrep "^[ ]+[-][A-Za-z| -]+\*?\)[ ]+[A-Za-z].+#" $0 | sed 's/\|.*\$2[^#]*#/ /'| sed -E 's/\|.*\)[^#]*#/ /'
fi
cat <<EOF
AUTHOR
Written by Henrik Levkowetz, <henrik@tools.ietf.org>
COPYRIGHT
Copyright 2013 Henrik Levkowetz.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or (at
your option) any later version. There is NO WARRANTY; not even the
implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
EOF
}
# ----------------------------------------------------------------------
function die() {
echo -e "\n$program: error: $*" > /dev/stderr
exit 1
}
function note() {
if [ -n "$VERBOSE" ]; then echo -e "$*"; fi
}
# ----------------------------------------------------------------------
function version() {
echo -e "$program $version"
}
# ----------------------------------------------------------------------
trap 'echo "$program($LINENO): Command failed with error code $? ([$$] $0 $*)"; exit 1' ERR
# ----------------------------------------------------------------------
# Option parsing
# Options
shortopts=c:n:or:hvV
longopts=change=,name=,overwrite,revision=,help,verbose,version
# Default values
if [ "$(uname)" = "Linux" ]; then
args=$(getopt -o "$shortopts" --long "$longopts" -n "$program" -- $SV "$@")
if [ $? != 0 ] ; then die "Terminating..." >&2 ; exit 1 ; fi
eval set -- "$args"
sed="sed -r"
date="date -d"
else
# Darwin, BSDs
args=$(getopt -o$shortopts $SV $*)
if [ $? != 0 ] ; then die "Terminating..." >&2 ; exit 1 ; fi
set -- $args
sed="sed -E"
date="date -j -f %Y-%m-%d"
fi
while true ; do
case "$1" in
-c| --change) CHG="$2"; shift;; # Use the change made by revision ARG
-n| --name) NAME="$2"; shift;; # Patch name
-o| --overwrite) OVER=1;; # Overwrite any existing patch file
-h| --help) usage; exit;; # Show this help, then exit
-v| --verbose) VERBOSE=1;; # Be more talkative
-V| --version) version; exit;; # Show program version, then exit
--) shift; break;;
*) die "Internal error, inconsistent option specification: '$1'";;
esac
shift
done
# ----------------------------------------------------------------------
# The program itself
if [ "$CHG" ]; then
if [ "$NAME" ]; then
name="${NAME//_/-}-c$CHG"
else
name=$(echo $(svn log -c $CHG | sed -r -e '/^---/d' -e '/^r[0-9]+/d' -e '/^$/d' -e 's/Merged in \[[0-9]+\] from [^:]+..//' ) | sed -r -e 's/(.*)/\L\1/' -e 's/[^[:alnum:]]/-/g' -e 's/-+/-/g' -e's/-$//' | cut -c 1-40)
name="$name-c$CHG"
fi
else
if [ "$NAME" ]; then
if [ $# -lt 1 ]; then die "Expected file list on the command line."; fi
name="${NAME//_/-}"
else
die "Please use the -n switch to provide a patch name"
fi
fi
patchfile=$progdir/../../patches/$(date +%Y-%m-%d)-$name.patch
if [ -e $patchfile -a ! -n "$OVER" ]; then die "Patchfile $patchfile already exists"; fi
svn diff ${CHG:+ -c $CHG} ${REV:+ -r $REV} "$@" > $patchfile
less $patchfile
echo ""
echo ""
echo "Patch is in $patchfile"

View file

@ -1,348 +0,0 @@
#!/bin/bash
version=0.10
program=${0##*/}
progdir=${0%/*}
if [ "$progdir" = "$program" ]; then progdir="."; fi
PATH="$PATH:$progdir"
# ----------------------------------------------------------------------
function usage() {
cat <<EOF
NAME
$program - Make a release
SYNOPSIS
$program [OPTIONS] VERSION
DESCRIPTION
Do the sequence of actions necessary to properly produce a release
branch. This includes updating the project version and committing that
to the repository, creating a release tag and a release branch if
needed, and updating the project version again to indicate that any
further commits are development work. Requires 1 argument: the VERSION
number (e.g., 1.23).
The script uses svn info to retrieve information about the repository
and path of the current directory, and inspects that to determine
exactly what to do. If the current path relative to the repository root
starts with 'trunk', then a new branch is created named
branch/\$VERSION. If the current path starts with something else
than 'trunk', it is assumed to be a working branch, and no new branch is
created. In either case, a copy of the current working copy is created
in tags/\$VERSION.
EOF
echo -e "OPTIONS"
if [ "$(uname)" = "Linux" ]; then
egrep "^[ ]+[-][A-Za-z| -]+\*?\)[ ]+[A-Za-z].+#" $0 | tr -s "\t|" "\t," | sed -r -e 's/\)[ \t]+([A-Z]+)=\$2[^#]*#/=\\1\t/' -e 's/\)[^#]*#/\t/'
else
egrep "^[ ]+[-][A-Za-z| -]+\*?\)[ ]+[A-Za-z].+#" $0 | sed 's/\|.*\$2[^#]*#/ /'| sed -E 's/\|.*\)[^#]*#/ /'
fi
cat <<EOF
AUTHOR
Written by Henrik Levkowetz, <henrik@levkowetz.com>
COPYRIGHT
Copyright 2007 The IETF Trust.
EOF
}
# ----------------------------------------------------------------------
function die() {
echo -e "\n$program: error: $*" > /dev/stderr
exit 1
}
function say() {
if [ -n "$VERBOSE" ]; then echo -e "$*"; fi
}
function note() {
if [ -n "$VERBOSE" ]; then echo -e "\n$*"; fi
}
function check() {
[ "$(which $1)" ] || die "could not find the '$1' command. $2"
}
# ----------------------------------------------------------------------
function version() {
echo -e "$program $version"
}
# ----------------------------------------------------------------------
trap 'echo "$program($LINENO): Command failed with error code $? ([$$] $0 $*)"; exit 1' ERR
# ----------------------------------------------------------------------
# Option parsing
# Options
shortopts=hmnipvV
longopts=help,message,dry-run,ignore-resources,permit-migr-mix,verbose,version
# Default values
MSG=""
PROJ=ietfdb
VERFILE=ietf/__init__.py
SETTINGS=ietf/settings.py
PERMIT_MIGR_MIX=""
IGNORE_RESOURCES=""
do=""
if [ "$(uname)" = "Linux" ]; then
args=$(getopt -o "$shortopts" --long "$longopts" -n '$program' -- $SV "$@")
if [ $? != 0 ] ; then die "Terminating..." >&2 ; exit 1 ; fi
eval set -- "$args"
sed="sed -r"
else
# Darwin, BSDs
args=$(getopt -o$shortopts $SV $*)
if [ $? != 0 ] ; then die "Terminating..." >&2 ; exit 1 ; fi
set -- $args
sed="sed -E"
fi
while true ; do
case "$1" in
-h| --help) usage; exit;; # Show this help, then exit
-m| --message) MSG=$2; shift;; # Specify a commit message
-n| --dry-run) do="echo ==>";; # Show what would be done
-i| --ignore-resources) IGNORE_RESOURCES=1;; # Don't try to update resources
-p| --permit-migr-mix) PERMIT_MIGR_MIX=1;; # Permit mixed schema and data migrations
-v| --verbose) VERBOSE=1;; # Be more talkative
-V| --version) version; exit;; # Show program version, then exit
--) shift; break;;
*) die "Internal error, inconsistent option specification: '$1'";;
esac
shift
done
# ----------------------------------------------------------------------
# Check some requirements
check bower "It is required to update web resources. Install with npm."
# ----------------------------------------------------------------------
# The program itself
ARGMIN=1
if [ $# -lt $ARGMIN ]; then
usage
die "$# arguments found, $ARGMIN required"
fi
[ -z "$by" ] && by=${RELEASER_REAL_NAME}
[ -z "$by" ] && by=$(getent passwd $(whoami) | cut -d ':' -f 5 | tr -d ',')
[ -z "$by" ] && die "Can't determine the real name of the user running this script"
VER=$1
REPO=$(svn info | grep "^Repository Root:" | awk '{ print $3 }')
RDATE=$(svn info | grep "^Last Changed Date:" | awk '{ print $4 "T" $5 $6 }')
RURL=$(svn info | grep "^URL:" | awk '{ print $2 }')
RDIR=${RURL#$REPO}
DIR=${RDIR#/}
if [ -z "$DIR" ]; then
die "Couldn't find anything to release here"
elif [ "${DIR%%/*}" = "trunk" ]; then
SRC="trunk"
elif [ "${DIR%%/*}" = "branch" ]; then
tmp=${DIR#*/} # get rid of 'branch/'
SRC="branch/${tmp%%/*}" # keep first subdir under branch/
fi
note "Releasing from $SRC"
note "Locating the root of the working copy ..."
while [ "${#DIR}" -gt "${#SRC}" ]; do
[ "$DIR" = "$prev" ] && die "Internal error"
cd ..
#note " now at $PWD"
prev=$DIR
DIR=${DIR%/*}
done
if [ "$DIR" != "$SRC" ]; then
die "Couldn't find the root of your '$SRC' working copy"
fi
say " $DIR"
REPO=${REPO%/} # remove trailing slash
SRC=${SRC#/} # remove leading slash
MAJOR=${VER%%.*}
REST=${VER#*.}
MINOR=${REST%%.*}
MAINT=${REST#*.}
VER="$(printf %d.%d.%d $MAJOR $MINOR $MAINT)"
NEXT=$(( $MAINT + 1 ))
DEV="$(printf %d.%d.%d.dev0 $MAJOR $MINOR $NEXT)"
#cd $DIR ??
note "Checking that changelog information is available ..."
changes=$( sed -n "/^$PROJ ($VER.*)/,/^ -- /p" changelog )
[ "$changes" ] || die "No changelog information for $VER found"
#note "$changes"
note "Checking for migrations not in SVN"
$do svn st | grep "^[?] .*/migrations/[0-9].*\.py$" && die "There seems to be new migrations which aren't in SVN"
if [ -z "$PERMIT_MIGR_MIX" ]; then
note "Checking that we don't have both schema and data migrations ..."
cur=$(svn info | awk '/^Revision:/ { print $2 }')
migrations=$(svn log $PWD -v -r HEAD:$((cur-100)) | sed -n -e '1,/^Set version info and settings back to development mode/p' | grep '^...A /.*/migrations/0.*.py' | cut -c6- | awk '{ print $1 }' | sed -re 's|/trunk/||')
if [ -n "$migrations" ]; then
datamigr=$(for m in "$migrations"; do egrep -sl 'migrations\.RunPython' $m; done || true)
schemamigr=$(for m in "$migrations"; do egrep -sl 'migrations\.(Add|Alter|Create|Delete|Remove|Rename)(Field|Model|UniqueTogether)' $m; done || true)
if [ -n "$datamigr" -a -n "$schemamigr" ]; then
echo -e "\n Schema migrations:"
for m in $schemamigr; do
echo " $m"
done
echo -e "\n Data migrations:"
for m in $datamigr; do
echo " $m"
done
die "\n Found both data migrations and schema migrations in this release.\n This is likely to cause delay between schema changes and deployment,\n which means the old code will run on the new schema longer than necessary."
fi
fi
fi
if [ -z "$IGNORE_RESOURCES" ]; then
note "Updating bower assets ..."
$do ietf/manage.py bower_install > .mkrelease-bower-install.log
$do rm .mkrelease-bower-install.log # This happens at once unless the previous command returns error
$do svn st ietf/externals/static | grep -v '^\?' || true
$do svn commit ietf/externals/static -m "Updated bower-managed static web assets"
# Get rid of bower-installed files which we don't use:
$do rm -rf ietf/externals/static/datatracker/
$do rm -rf ietf/externals/static/jquery.cookie/
$do rm -f $(svn st ietf/externals/ | grep '^\?' | awk '{print $2}')
fi
note "Collecting static files ..."
$do ietf/manage.py collectstatic --noinput --ignore=bower.json --ignore='README.*' --ignore=rev | grep -v "Found another file with the destination path"
#$do svn commit static/lib/ -m "Updated static files under static/lib/"
# note "Checking that there's a recent test-crawler log"
# touch -d $RDATE .svn/.latest-commit
# TCLOG=$(ls -t ../test-crawl-*.log | head -n 1)
# [ $TCLOG -nt .svn/.latest-commit ] || die "Looked for ../test-crawl-*.log, but didn't find one newer than the latest repository commit ($RDATE)"
note "Upgrading the python library modules before checking migrations and running tests ..."
$do pip install --upgrade -r requirements.txt
$do pip check
note "Checking that all model changes have been captured in migrations ..."
$do ietf/manage.py makemigrations | tee /dev/stderr | $do grep -q "^No changes detected$" || die "Model changes without migrations found."
note "Making sure all migrations have been run ..."
$do ietf/manage.py migrate
note "Running the tests suite and writing release coverage data ..."
$do ietf/manage.py test --settings=ietf.settings_releasetest --save-version-coverage=$VER ${PERMIT_MIGR_MIX:+"--permit-mixed-migrations"}
note "Committing the release coverage data ..."
$do svn commit release-coverage.json.gz -m "Code coverage data for release $VER"
if [ -d ../coverage ]; then
cp .coverage ../coverage/$VER.coverage
rsync -a static/coverage/ ../coverage/$VER/
fi
contributors=$(echo "$changes" | gawk '/^ \* Merged in \[[0-9]+\] from [^: ]+/ {sub(":",""); print $6;}' | sort | uniq)
note "Contributors:
$contributors"
note "Setting the current time on the release notes in the changelog file ..."
$do sed -r -i -e "1,/^ -- /s/([A-Za-z-]+ <[a-z0-9.-]+@[a-z0-9.-]+> ).*$/\1$(TZ=UTC date +'%d %b %Y %H:%M:%S %z')/" changelog
say " $(grep -m1 "^ -- " changelog)"
note "Verifying that version $VER doesn't already exist ..."
$do svn info $REPO/tags/$VER 2>&1 | $do egrep -q "(Not a valid URL|URL .* non-existent)" || die "The tag '$VER' already exists (or there was an error testing for it)."
say " Ok"
note "Committing the changelog ..."
$do svn commit changelog -m "Changelog entry for $VER"
note "Verifying there's no uncommitted changes ..."
$do svn st | grep "^[AMGRD] " && die "There seems to be uncommitted changes in this working copy"
note "Updating the version info in $VERFILE and making sure'\$Rev\$' is Ok ..."
$do sed -i -r -e "/^__version__/s/\"[.0-9]+(dev[0-9]+)?\"/\"$VER\"/" \
-e "/^__rev__/s/\".*\"/\"\$Rev:\$\"/" \
$VERFILE
note "Updating the deployment settings in settings.py"
$do sed -i -r -e 's/^DEBUG *= *.*$/DEBUG = False/' \
-e "s/^SERVER_MODE *= *.*\$/SERVER_MODE = 'production'/" \
$SETTINGS
note "Committing version information for version $VER: $MSG ..."
$do svn commit $VERFILE $SETTINGS -m "Set version info to release version $VER before branching. $MSG"
note "Creating new tag 'tags/$VER' from $SRC ..."
$do svn cp $REPO/$SRC $REPO/tags/$VER -m "Creating new tag 'tags/$VER' from $SRC"
note "Updating version and revision info to indicate that the source and branch aren't releases ..."
$do sed -i -r -e "/^__version__/s/\"[0-9.]*\"/\"$DEV\"/" \
-e "/^__rev__/s/\"\\\$Rev: (.*) \\\$\"/\"\$Rev:\$ (dev) Latest release: Rev. \1 \"/" \
$VERFILE
note "Updating the deployment settings in settings.py to development mode ..."
$do sed -i -r -e 's/^DEBUG *= *.*$/DEBUG = True/' \
-e "s/^SERVER_MODE *= *.*\$/SERVER_MODE = 'development'/" \
$SETTINGS
note "Committing the updated version and deployment settings ..."
$do svn commit $VERFILE $SETTINGS -m "Set version info and settings back to development mode"
note "Creating new tag 'tags/dev/$DEV' from $SRC ..."
$do svn cp $REPO/$SRC $REPO/tags/dev/$DEV -m "Creating new tag 'tags/dev/$DEV' from $SRC"
$do svn update -q
[ -d ~/src/db/mail ] || mkdir -p ~/src/db/mail
echo "
Hi,
This is an automatic notification about a new datatracker release,
v$VER, generated when running the mkrelease script.
Release notes:
$changes
The new version is available for installation through SVN checkout, with
'svn checkout https://svn.ietf.org/svn/tools/$PROJ/tags/$VER'
For development, copy the new development version instead:
'svn copy https://svn.ietf.org/svn/tools/$PROJ/tags/dev/$DEV' <YOURBRANCH>
Regards,
$by
(via the mkrelease script)
" > ~/src/db/mail/release-mail-v$VER.txt
SEND_ARGS=""
[ "${RELEASER_EMAIL}" ] && SEND_ARGS="-r ${RELEASER_EMAIL}"
cat ~/src/db/mail/release-mail-v$VER.txt | $do mail "${SEND_ARGS}" -s "New datatracker release: v$VER" housley@vigilsec.com rjsparks@nostrum.com krathnayake@ietf.org -c jay@ietf.org -c glen@amsl.com -c maddy@amsl.com -c cmorgan@amsl.com -c avezza@amsl.com -c amorris@amsl.com -c smccammon@amsl.com -c kmoreland@amsl.com $contributors
cat ~/src/db/mail/release-mail-v$VER.txt | $do mail "${SEND_ARGS}" -s "New datatracker release: v$VER" iesg@ietf.org wgchairs@ietf.org codesprints@ietf.org -b rjsparks@nostrum.com
# Removed at Henrik's suggestion
#$do toolsfeed control changelog /www/tools.ietf.org/tools/atomfeed.xml
#$do toolpush /www/tools.ietf.org/tools/atomfeed.xml

View file

@ -1,21 +0,0 @@
import sys, re
with open("aliases") as afile:
try:
aliases = dict([line.strip().split(None, 1) for line in afile.read().splitlines() if line.strip()])
except ValueError:
sys.stderr.write([line.strip().split(None, 1) for line in afile.read().splitlines() if line.strip()])
raise
for line in sys.stdin:
try:
blank, name, email, rest = line.strip().split("||", 3)
email = email.strip()
except ValueError:
sys.stderr.write(line + "\n")
raise
login, dummy = re.split("[@.]", email, 1)
if email in aliases:
login = aliases[email]
print("\t".join((login.strip().lower(), email.strip().lower(), name.strip())))

View file

@ -1,21 +0,0 @@
# Copyright The IETF Trust 2015-2020, All Rights Reserved
# -*- python; coding: utf-8 -*-
# Uncomment and set passwords below to match those set for the workers, then
# save as "buildbot_passwords.py"
# datatracker_lin_py27_1_pw = "<password>"
# datatracker_lin_py27_2_pw = "<password>"
# datatracker_lin_py27_3_pw = "<password>"
# datatracker_osx_py27_4_pw = "<password>"
# datatracker_lin_py27_5_pw = "<password>"
# datatracker_lin_py27_6_pw = "<password>"
# ietfdb_svn_hook_pw = "<password>"
#
# datatracker_lin_py36_1_pw = "<password>"
# datatracker_lin_py36_2_pw = "<password>"
# datatracker_lin_py36_3_pw = "<password>"
# datatracker_lin_py36_4_pw = "<password>"
# datatracker_lin_py36_5_pw = "<password>"
# datatracker_lin_py36_6_pw = "<password>"

View file

@ -1,183 +0,0 @@
# Copyright The IETF Trust 2015-2020, All Rights Reserved
# -*- coding: utf-8 -*-
import re
from buildbot.plugins import steps
class TestCrawlerShellCommand(steps.WarningCountingShellCommand):
name = "testcrawl"
haltOnFailure = 1
flunkOnFailure = 1
descriptionDone = ["test crawler"]
command=["bin/test-crawl"]
warningPatterns = {
"exceptions": "^(Traceback| File| |.*Error|.*Exception)",
"failed": " FAIL ",
"warnings": " WARN",
"slow": " SLOW",
"invalid_html": " invalid html:",
}
logline = "^ *(?P<elapsed>\d+:\d+:\d+) +(?P<pages>\d+) +(?P<queue>\d+) +(?P<result>\d+) +(?P<runtime>\d+.\d+)s +(?P<message>.+)"
def setTestResults(self, **kwargs):
"""
Called by subclasses to set the relevant statistics; this actually
adds to any statistics already present
"""
for kw in kwargs:
value = kwargs[kw]
if value.isdigit():
# Counter
value = int(value)
value += self.step_status.getStatistic(kw, 0)
elif re.search("^[0-9]+\.[0-9]+$", value):
# Runtime
value = float(value)
value += self.step_status.getStatistic(kw, 0)
else:
# This is a percentage, and we can't add them
pass
self.step_status.setStatistic(kw, value)
def createSummary(self, log):
"""
Match log lines against warningPattern.
Warnings are collected into another log for this step, and the
build-wide 'warnings-count' is updated."""
warnings = {}
wregex = {}
regex_class = re.compile("").__class__
if not isinstance(self.logline, regex_class):
self.logline = re.compile(self.logline)
for key in self.warningPatterns:
warnings[key] = []
pattern = self.warningPatterns[key]
if not isinstance(pattern, regex_class):
wregex[key] = re.compile(pattern)
else:
wregex[key] = pattern
# Count matches to the various warning patterns
last_line = None
for line in log.getText().split("\n"):
for key in wregex:
match = re.search(wregex[key], line)
if match:
warnings[key].append(line)
if re.search(self.logline, line):
last_line = line
# If there were any warnings, make the log if lines with warnings
# available
for key in warnings:
if len(warnings[key]) > 0:
self.addCompleteLog("%s (%d)" % (key, len(warnings[key])),
"\n".join(warnings[key]) + "\n")
self.step_status.setStatistic(key, len(warnings[key]))
self.setProperty(key, len(warnings[key]), "TestCrawlerShellCommand")
if last_line:
match = re.search(self.logline, last_line)
for key in ['elapsed', 'pages']:
info = match.group(key)
self.step_status.setStatistic(key, info)
self.setProperty(key, info, "TestCrawlerShellCommand")
def describe(self, done=False):
description = steps.WarningCountingShellCommand.describe(self, done)
if done:
description = description[:] # make a private copy
for name in ["time", "elapsed", "pages", "failed", "warnings", "slow", "invalid_html", ]:
if name in self.step_status.statistics:
value = self.step_status.getStatistic(name)
displayName = name.replace('_', ' ')
# special case. Mph.
if type(value) is float: # this is run-time
description.append('%s: %.2fs' % (displayName, value))
elif type(value) is int:
description.append('%s: %d' % (displayName, value))
else:
description.append('%s: %s' % (displayName, value))
return description
class DjangoTest(steps.WarningCountingShellCommand):
name = "test"
warnOnFailure = 1
description = ["testing"]
descriptionDone = ["test"]
command = ["manage.py", "test", ]
regexPatterns = {
"tests": "Ran (\d+) tests in [0-9.]+s",
"time": "Ran \d+ tests in ([0-9.]+)s",
"skipped": "(?:OK|FAILED).*skipped=(\d+)",
"failed": "FAILED.*failures=(\d+)",
"errors": "FAILED.*errors=(\d+)",
"template_coverage":" +Template coverage: +([0-9.]+%)",
"url_coverage": " +Url coverage: +([0-9.]+%)",
"code_coverage": " +Code coverage: +([0-9.]+%)",
}
def setTestResults(self, **kwargs):
"""
Called by subclasses to set the relevant statistics; this actually
adds to any statistics already present
"""
for kw in kwargs:
value = kwargs[kw]
if value.isdigit():
# Counter
value = int(value)
value += self.step_status.getStatistic(kw, 0)
elif re.search("^[0-9]+\.[0-9]+$", value):
# Runtime
value = float(value)
value += self.step_status.getStatistic(kw, 0)
else:
# This is a percentage, and we can't add them
pass
self.step_status.setStatistic(kw, value)
def createSummary(self, log):
info = {}
for line in log.getText().split("\n"):
for key in self.regexPatterns:
regex = self.regexPatterns[key]
match = re.search(regex, line)
if match:
info[key] = match.group(1)
self.setTestResults(**info)
def describe(self, done=False):
description = steps.WarningCountingShellCommand.describe(self, done)
if done:
description = description[:] # make a private copy
self.step_status.statistics["passed"] = (
self.step_status.getStatistic("tests",0) -
self.step_status.getStatistic("skipped",0) -
self.step_status.getStatistic("failed",0) -
self.step_status.getStatistic("errors",0))
for name in ["time", "tests", "passed", "skipped", "failed", "errors", "template_coverage", "url_coverage", "code_coverage", ]:
if name in self.step_status.statistics:
value = self.step_status.getStatistic(name)
displayName = name.replace('_', ' ')
# special case. Mph.
if displayName == 'template coverage':
displayName = 'templ. coverage'
if type(value) is float: # this is run-time
description.append('%s: %.2fs' % (displayName, value))
elif type(value) is int:
description.append('%s: %d' % (displayName, value))
else:
description.append('%s: %s' % (displayName, value))
return description

View file

@ -1,426 +0,0 @@
# -*- python -*-
# ex: set filetype=python:
from os.path import expanduser as expandtilde
from buildbot.plugins import worker, changes, schedulers, util, steps
import buildbot_passwords
import custom_steps
# This is a sample buildmaster config file. It must be installed as
# 'master.cfg' in your buildmaster's base directory.
# This is the dictionary that the buildmaster pays attention to. We also use
# a shorter alias to save typing.
c = BuildmasterConfig = {}
####### SETTINGS
# For miscellaneous settings, see MISC. SETTINGS at the bottom of the file
####### WORKERS
# The 'workers' list defines the set of recognized workers. Each element is
# a Worker object, specifying a unique worker name and password. The same
# worker name and password must be configured on the worker.
c['workers'] = [
worker.Worker("datatracker_lin_py36_1", buildbot_passwords.datatracker_lin_py36_1_pw),
worker.Worker("datatracker_lin_py36_2", buildbot_passwords.datatracker_lin_py36_2_pw),
worker.Worker("datatracker_lin_py36_3", buildbot_passwords.datatracker_lin_py36_3_pw),
# worker.Worker("datatracker_lin_py36_4", buildbot_passwords.datatracker_lin_py36_4_pw),
worker.Worker("datatracker_lin_py36_5", buildbot_passwords.datatracker_lin_py36_5_pw),
worker.Worker("datatracker_lin_py36_6", buildbot_passwords.datatracker_lin_py36_6_pw),
]
# 'protocols' contains information about protocols which master will use for
# communicating with workers. You must define at least 'port' option that workers
# could connect to your master with this protocol.
# 'port' must match the value configured into the workers (with their
# --master option)
c['protocols'] = {'pb': {'port': 9989}}
####### CHANGESOURCES
# the 'change_source' setting tells the buildmaster how it should find out
# about source code changes. Here we point to the buildbot version of a python hello-world project.
c['change_source'] = [
changes.PBChangeSource(user='ietfdb', passwd=buildbot_passwords.ietfdb_svn_hook_pw),
]
####### SCHEDULERS
# Configure the Schedulers, which decide how to react to incoming changes. In this
# case, just kick off a 'runtests' build
c['schedulers'] = [
# Branch schedulers
schedulers.SingleBranchScheduler(name="pyflakes", treeStableTimer=10, builderNames=["Check PyFlakes"],
change_filter=util.ChangeFilter(branch='trunk')),
schedulers.SingleBranchScheduler(name="lin_test", treeStableTimer=60*5, builderNames=["Test Suite"],
change_filter=util.ChangeFilter(branch='trunk')),
# schedulers.SingleBranchScheduler(name="osx_test", treeStableTimer=60*5, builderNames=["Test Suite (OS X)"],
# change_filter=util.ChangeFilter(branch='trunk')),
#
schedulers.AnyBranchScheduler(name="pyflakes_branch", treeStableTimer=10, builderNames=["[branch] Check PyFlakes"],
change_filter=util.ChangeFilter(branch_re='branch/.*')),
# schedulers.AnyBranchScheduler(name="lin_test_branch", treeStableTimer=60*5, builderNames=["[branch] Test Suite"],
# change_filter=util.ChangeFilter(branch_re='branch/.*')),
# schedulers.AnyBranchScheduler(name="osx_test_branch", treeStableTimer=60*5, builderNames=["[branch] Test Suite (OS X)"],
# change_filter=util.ChangeFilter(branch_re='branch/.*')),
#
schedulers.AnyBranchScheduler(name="pyflakes_personal",treeStableTimer=10, builderNames=["[personal] Check PyFlakes"],
change_filter=util.ChangeFilter(branch_re='personal/.*')),
schedulers.AnyBranchScheduler(name="lin_test_personal",treeStableTimer=60*5, builderNames=["[personal] Test Suite"],
change_filter=util.ChangeFilter(branch_re='personal/.*')),
# Periodic Schedulers
schedulers.Nightly(name="lin_test_old_libs", hour=16, minute=12, branch="trunk", builderNames=["Verify Minimum Libs"],),
schedulers.Nightly(name="lin_test_libs", hour=16, minute=42, branch="trunk", builderNames=["Verify Latest Libs"],),
schedulers.Nightly(name="crawler", hour=9, minute=00, branch="trunk", onlyIfChanged=True, builderNames=["Test-Crawler"],),
# schedulers.Force schedulers
schedulers.ForceScheduler(name="force_pyflakes", builderNames=["Check PyFlakes"]),
schedulers.ForceScheduler(name="force_lin_test", builderNames=["Test Suite"]),
# schedulers.ForceScheduler(name="force_osx_test", builderNames=["Test Suite (OS X)"]),
schedulers.ForceScheduler(name="force_test_crawler", builderNames=["Test-Crawler"]),
#
schedulers.ForceScheduler(name="force_pyflakes_branch", builderNames=["[branch] Check PyFlakes"]),
schedulers.ForceScheduler(name="force_lin_test_branch", builderNames=["[branch] Test Suite"]),
# schedulers.ForceScheduler(name="force_osx_test_branch", builderNames=["[branch] Test Suite (OS X)"]),
#
schedulers.ForceScheduler(name="force_pyflakes_personal", builderNames=["[personal] Check PyFlakes"]),
schedulers.ForceScheduler(name="force_lin_test_personal", builderNames=["[personal] Test Suite"]),
]
####### BUILDERS
# The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
# what steps, and which workers can execute them. Note that any particular build will
# only take place on one worker.
#### Custom subclassed builder
factory = util.BuildFactory()
# check out the source
factory.addStep(steps.Git(repourl='git://github.com/buildbot/hello-world.git', mode='incremental'))
# run the tests (note that this will require that 'trial' is installed)
factory.addStep(steps.ShellCommand(command=["trial", "hello"],
env={"PYTHONPATH": "."}))
c['builders'] = []
# -*- section Builder_Run_pyflakes -*-
factory = util.BuildFactory()
factory.addStep(steps.SVN(
username='buildbot@tools.ietf.org',
descriptionDone="svn update",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
repourl=util.Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'),
descriptionSuffix=[util.Interpolate('%(src::branch)s %(src::revision)s')],
))
factory.addStep(steps.ShellCommand(
descriptionDone="seting up settings_local.py",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
command=["cp", expandtilde("~/settings_local.py"), "./ietf/"],
))
factory.addStep(steps.PyFlakes(
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
command=["ietf/manage.py", "pyflakes", "--verbosity=0"],
))
# This should be the last action
factory.addStep(steps.ShellCommand(
descriptionDone="mark as passed",
workdir=util.Interpolate('build/%(src::branch)s'),
command=["svn", "--username=buildbot@tools.ietf.org", "--non-interactive",
"propset", "--revprop", "-r", util.Property('got_revision'), "test:pyflakes", "passed" ],
))
c['builders'].append(util.BuilderConfig(name="Check PyFlakes", factory=factory, category="1. trunk",
workernames=["datatracker_lin_py36_1", ]))
c['builders'].append(util.BuilderConfig(name="[branch] Check PyFlakes", factory=factory, category="2. branch",
workernames=["datatracker_lin_py36_2", ]))
c['builders'].append(util.BuilderConfig(name="[personal] Check PyFlakes", factory=factory, category="3. personal",
workernames=["datatracker_lin_py36_3", ]))
# -*- section Builder_TestSuite -*-
factory = util.BuildFactory()
factory.addStep(steps.SVN(
username='buildbot@tools.ietf.org',
descriptionDone="svn update",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
repourl=util.Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'),
descriptionSuffix=[util.Interpolate('%(src::branch)s %(src::revision)s')],
))
factory.addStep(steps.RemovePYCs(workdir=util.Interpolate('build/%(src::branch)s')))
factory.addStep(steps.ShellCommand(
descriptionDone="remove tmp-* dirs",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
command=["rm", "-rf", "tmp-*/"],
))
factory.addStep(steps.ShellCommand(
descriptionDone="install requirements",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
command=["pip", "install", "-r", "requirements.txt"],
))
factory.addStep(steps.ShellCommand(
descriptionDone="copy settings_local.py",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
command=["cp", expandtilde("~/settings_local.py"), "./ietf/"],
))
factory.addStep(steps.ShellCommand(
descriptionDone="collect static files",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=False,
flunkOnFailure=False,
command=["ietf/manage.py", "collectstatic", "--noinput", ],
))
factory.addStep(custom_steps.DjangoTest(
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
want_stderr=True,
command=["ietf/manage.py", "test", "--settings=settings_sqlitetest", "--verbosity=2", ],
))
# This should come after tests
factory.addStep(steps.ShellCommand(
descriptionDone="mark as passed",
workdir=util.Interpolate('build/%(src::branch)s'),
command=["svn", "--username=buildbot@tools.ietf.org", "--non-interactive",
"propset", "--revprop", "-r", util.Property('got_revision'), "test:unittest", "passed" ],
))
c['builders'].append(util.BuilderConfig(name="Test Suite", factory=factory, category="1. trunk",
workernames=["datatracker_lin_py36_1", ]))
c['builders'].append(util.BuilderConfig(name="[branch] Test Suite", factory=factory, category="2. branch",
workernames=["datatracker_lin_py36_2", ]))
c['builders'].append(util.BuilderConfig(name="[personal] Test Suite", factory=factory, category="3. personal",
workernames=["datatracker_lin_py36_3", ]))
# -*- section Builder_TestCrawler -*-
factory = util.BuildFactory()
factory.addStep(steps.SVN(
username='buildbot@tools.ietf.org',
descriptionDone="svn update",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
repourl=util.Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'),
descriptionSuffix=[util.Interpolate('%(src::branch)s %(src::revision)s')],
))
factory.addStep(steps.RemovePYCs(workdir=util.Interpolate('build/%(src::branch)s')))
factory.addStep(steps.ShellCommand(
descriptionDone="install requirements",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
command=["pip", "install", "-r", "requirements.txt"],
))
factory.addStep(steps.ShellCommand(
descriptionDone="copy settings_local.py",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
command=["cp", expandtilde("~/settings_local.py"), "./ietf/"],
))
factory.addStep(steps.ShellCommand(
descriptionDone="run migrations",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
command=["ietf/manage.py", "migrate"],
))
factory.addStep(custom_steps.TestCrawlerShellCommand(
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
want_stderr=True,
command=["bin/test-crawl", "--settings=ietf.settings_testcrawl"],
))
# This should be the last action
factory.addStep(steps.ShellCommand(
descriptionDone="mark as passed",
workdir=util.Interpolate('build/%(src::branch)s'),
command=["svn", "--username=buildbot@tools.ietf.org", "--non-interactive",
"propset", "--revprop", "-r", util.Property('got_revision'), "test:crawler", "passed" ],
))
c['builders'].append(util.BuilderConfig(name="Test-Crawler", factory=factory, category="1. trunk",
workernames=["datatracker_lin_py36_6", ]))
# -*- section Builder_Verify_Minimum_Libs -*-
# This build runs pip install --upgrade, to make sure that we install the earliest version of
# all dependencies, in order to get an indication if/when an incompatibility turns up with a new
# version of a dependency. The other test suite builders *don't* use --upgrade, in order to not
# change the external test conditions and produce spurious errors because of version changes in
# dependencies.
factory = util.BuildFactory()
factory.addStep(steps.ShellCommand(
descriptionDone="remove tweaked requirements",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=False,
flunkOnFailure=False,
command=["rm", "requirements.txt"],
))
factory.addStep(steps.SVN(
username='buildbot@tools.ietf.org',
descriptionDone="svn update",
workdir=util.Interpolate('build/%(src::branch)s'),
alwaysUseLatest=True,
haltOnFailure=True,
repourl=util.Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'),
descriptionSuffix=[util.Interpolate('%(src::branch)s %(src::revision)s')],
))
factory.addStep(steps.RemovePYCs(workdir=util.Interpolate('build/%(src::branch)s')))
factory.addStep(steps.ShellCommand(
descriptionDone="edit requirements",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
command=["sed", "-i", "-e", "s/>=/==/", "requirements.txt"],
))
factory.addStep(steps.ShellCommand(
descriptionDone="install/upgrade requirements",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
command=["pip", "install", "--upgrade", "-r", "requirements.txt"],
))
factory.addStep(steps.ShellCommand(
descriptionDone="seting up settings_local.py",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
command=["cp", expandtilde("~/settings_local.py"), "./ietf/"],
))
factory.addStep(steps.ShellCommand(
descriptionDone="list installed pyton modules",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
command=["pip", "freeze"],
))
factory.addStep(steps.ShellCommand(
descriptionDone="collect static files",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=False,
flunkOnFailure=False,
command=["ietf/manage.py", "collectstatic", "--noinput", ],
))
factory.addStep(custom_steps.DjangoTest(
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
command=["ietf/manage.py", "test", "--settings=settings_sqlitetest", "--verbosity=2", ],
))
c['builders'].append(util.BuilderConfig(name="Verify Minimum Libs", factory=factory, category="1. trunk",
workernames=["datatracker_lin_py36_5", ]))
# -*- section Builder_Veryfy_Latest_Libs -*-
# This build runs pip install --upgrade, to make sure that we install the latest version of all
# dependencies, in order to get an indication if/when an incompatibility turns up with a new
# version of a dependency. The other test suite builders *don't* use --upgrade, in order to not
# change the external test conditions and produce spurious errors because of version changes in
# dependencies.
factory = util.BuildFactory()
factory.addStep(steps.SVN(
username='buildbot@tools.ietf.org',
descriptionDone="svn update",
workdir=util.Interpolate('build/%(src::branch)s'),
alwaysUseLatest=True,
haltOnFailure=True,
repourl=util.Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'),
descriptionSuffix=[util.Interpolate('%(src::branch)s %(src::revision)s')],
))
factory.addStep(steps.RemovePYCs(workdir=util.Interpolate('build/%(src::branch)s')))
factory.addStep(steps.ShellCommand(
descriptionDone="install/upgrade requirements",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
command=["pip", "install", "--upgrade", "-r", "requirements.txt"],
))
factory.addStep(steps.ShellCommand(
descriptionDone="seting up settings_local.py",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
command=["cp", expandtilde("~/settings_local.py"), "./ietf/"],
))
factory.addStep(steps.ShellCommand(
descriptionDone="list installed pyton modules",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
command=["pip", "freeze"],
))
factory.addStep(steps.ShellCommand(
descriptionDone="collect static files",
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=False,
flunkOnFailure=False,
command=["ietf/manage.py", "collectstatic", "--noinput", ],
))
factory.addStep(custom_steps.DjangoTest(
workdir=util.Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
command=["ietf/manage.py", "test", "--settings=settings_sqlitetest", "--verbosity=2", ],
))
c['builders'].append(util.BuilderConfig(name="Verify Latest Libs", factory=factory, category="1. trunk",
workernames=["datatracker_lin_py36_5", ]))
####### BUILDBOT SERVICES
# 'services' is a list of BuildbotService items like reporter targets. The
# status of each build will be pushed to these targets. buildbot/reporters/*.py
# has a variety to choose from, like IRC bots.
c['services'] = []
####### PROJECT IDENTITY
# the 'title' string will appear at the top of this buildbot installation's
# home pages (linked to the 'titleURL').
c['title'] = "Buildbot: IETF Datatracker"
c['titleURL'] = "https://datatracker.ietf.org/"
# the 'buildbotURL' string should point to the location where the buildbot's
# internal web server is visible. This typically uses the port number set in
# the 'www' entry below, but with an externally-visible host name which the
# buildbot cannot figure out without some help.
c['buildbotURL'] = "http://dunkelfelder.tools.ietf.org:8010/"
# minimalistic config to activate new web UI
c['www'] = {
'port': 8010,
'plugins': {
'waterfall_view': True,
'console_view': True,
'grid_view': True,
},
'default_page': 'waterfall_view',
'debug': True,
'auth': util.UserPasswordAuth({"ietfdb": "ietfdb"}),
}
####### DB URL
c['db'] = {
# This specifies what database buildbot uses to store its state.
# It's easy to start with sqlite, but it's recommended to switch to a dedicated
# database, such as PostgreSQL or MySQL, for use in production environments.
# http://docs.buildbot.net/current/manual/configuration/global.html#database-specification
'db_url' : "sqlite:///state.sqlite",
}
####### MISC. SETTINGS
c['buildbotNetUsageData'] = 'full'

View file

@ -1,716 +0,0 @@
# Copyright The IETF Trust 2015-2020, All Rights Reserved
# -*- python; coding: utf-8 -*-
# This is a buildbot config file for buildbot 0.8.14.p1 (patched to work with
# workers of version 2.7 in addition to 0.8 workers).
import re
from buildbot_passwords import *
# This is the dictionary that the buildmaster pays attention to. We also use
# a shorter alias to save typing.
c = BuildmasterConfig = {}
# -*- section BuildSlaves -*-
# The 'slaves' list defines the set of recognized buildslaves. Each element is
# a BuildSlave object, specifying a unique slave name and password. The same
# slave name and password must be configured on the slave.
from buildbot.buildslave import BuildSlave
c['slaves'] = [
#
BuildSlave("dunkelfelder_lin_py36_1", dunkelfelder_lin_py36_1_pw),
BuildSlave("dunkelfelder_lin_py36_2", dunkelfelder_lin_py36_2_pw),
BuildSlave("dunkelfelder_lin_py36_3", dunkelfelder_lin_py36_3_pw),
BuildSlave("dunkelfelder_lin_py36_4", dunkelfelder_lin_py36_4_pw),
BuildSlave("dornfelder_lin_py36_1", dornfelder_lin_py36_1_pw),
BuildSlave("dornfelder_lin_py36_2", dornfelder_lin_py36_2_pw),
BuildSlave("dornfelder_lin_py36_3", dornfelder_lin_py36_3_pw),
BuildSlave("dornfelder_lin_py36_4", dornfelder_lin_py36_4_pw),
]
# 'protocols' contains information about protocols which master will use for
# communicating with slaves.
# You must define at least 'port' option that slaves could connect to your master
# with this protocol.
# 'port' must match the value configured into the buildslaves (with their
# --master option)
c['protocols'] = {'pb': {'host':'zinfandel.tools.ietf.org', 'port': 9989}}
####### CHANGESOURCES
# -*- section ChangeSources -*-
# the 'change_source' setting tells the buildmaster how it should find out
# about source code changes.
from buildbot.changes.pb import PBChangeSource
# c['change_source'] = []
# with open("users") as file:
# userinfo = json.read(file)
# for user in userinfo:
# prefix = userinfo[user]["prefix"]
# c.['change_source'].append(PBChangeSource(user=user, passwd="BRiR6XcT7x3$", prefix=prefix))
c['change_source'] = [
PBChangeSource(user="ietfdb", passwd=ietfdb_svn_hook_pw),
]
####### SCHEDULERS
# -*- section Schedulers -*-
# Configure the Schedulers, which decide how to react to incoming changes. In this
# case, just kick off a 'runtests' build
from buildbot.schedulers.basic import SingleBranchScheduler, AnyBranchScheduler
from buildbot.schedulers.forcesched import ForceScheduler
from buildbot.schedulers.timed import Nightly
from buildbot.changes import filter
c['schedulers'] = [
# Branch schedulers
SingleBranchScheduler(name="pyflakes", treeStableTimer=10, builderNames=["Check PyFlakes"],
change_filter=filter.ChangeFilter(branch='trunk')),
SingleBranchScheduler(name="lin_test", treeStableTimer=60*5, builderNames=["Test Suite"],
change_filter=filter.ChangeFilter(branch='trunk')),
# SingleBranchScheduler(name="osx_test", treeStableTimer=60*5, builderNames=["Test Suite (OS X)"],
# change_filter=filter.ChangeFilter(branch='trunk')),
#
AnyBranchScheduler(name="pyflakes_branch", treeStableTimer=10, builderNames=["[branch] Check PyFlakes"],
change_filter=filter.ChangeFilter(branch_re='branch/.*')),
# AnyBranchScheduler(name="lin_test_branch", treeStableTimer=60*5, builderNames=["[branch] Test Suite"],
# change_filter=filter.ChangeFilter(branch_re='branch/.*')),
# AnyBranchScheduler(name="osx_test_branch", treeStableTimer=60*5, builderNames=["[branch] Test Suite (OS X)"],
# change_filter=filter.ChangeFilter(branch_re='branch/.*')),
#
AnyBranchScheduler(name="pyflakes_personal",treeStableTimer=10, builderNames=["[personal] Check PyFlakes"],
change_filter=filter.ChangeFilter(branch_re='personal/.*')),
AnyBranchScheduler(name="lin_test_personal",treeStableTimer=60*5, builderNames=["[personal] Test Suite"],
change_filter=filter.ChangeFilter(branch_re='personal/.*')),
# Periodic Schedulers
Nightly(name="lin_test_old_libs", hour=16, minute=12, branch="trunk", builderNames=["Verify Minimum Libs"],),
Nightly(name="lin_test_libs", hour=16, minute=42, branch="trunk", builderNames=["Verify Latest Libs"],),
Nightly(name="crawler", hour=23, minute=00, branch="trunk", onlyIfChanged=True, builderNames=["Test-Crawler"],),
# Force schedulers
ForceScheduler(name="force_pyflakes", builderNames=["Check PyFlakes"]),
ForceScheduler(name="force_lin_test", builderNames=["Test Suite"]),
# ForceScheduler(name="force_osx_test", builderNames=["Test Suite (OS X)"]),
ForceScheduler(name="force_test_crawler", builderNames=["Test-Crawler"]),
#
ForceScheduler(name="force_pyflakes_branch", builderNames=["[branch] Check PyFlakes"]),
ForceScheduler(name="force_lin_test_branch", builderNames=["[branch] Test Suite"]),
# ForceScheduler(name="force_osx_test_branch", builderNames=["[branch] Test Suite (OS X)"]),
#
ForceScheduler(name="force_pyflakes_personal", builderNames=["[personal] Check PyFlakes"]),
ForceScheduler(name="force_lin_test_personal", builderNames=["[personal] Test Suite"]),
]
####### BUILDERS
# -*- section Builders -*-
# The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
# what steps, and which slaves can execute them. Note that any particular build will
# only take place on one slave.
from buildbot.process.factory import BuildFactory
from buildbot.steps.source.svn import SVN
from buildbot.steps.shell import ShellCommand, WarningCountingShellCommand
from buildbot.steps.python import PyFlakes
from buildbot.steps.python_twisted import RemovePYCs
from buildbot.steps.slave import SetPropertiesFromEnv
#
from buildbot.process.properties import Property, Interpolate
from buildbot.config import BuilderConfig
#### Custom subclassed builder
class TestCrawlerShellCommand(WarningCountingShellCommand):
name = "testcrawl"
haltOnFailure = 1
flunkOnFailure = 1
descriptionDone = ["test crawler"]
command=["bin/test-crawl"]
warningPatterns = {
"exceptions": "^(Traceback| File| |.*Error|.*Exception)",
"failed": " FAIL ",
"warnings": " WARN",
"slow": " SLOW",
"invalid_html": " invalid html:",
}
logline = "^ *(?P<elapsed>\d+:\d+:\d+) +(?P<pages>\d+) +(?P<queue>\d+) +(?P<result>\d+) +(?P<runtime>\d+.\d+)s +(?P<message>.+)"
def setTestResults(self, **kwargs):
"""
Called by subclasses to set the relevant statistics; this actually
adds to any statistics already present
"""
for kw in kwargs:
value = kwargs[kw]
if value.isdigit():
# Counter
value = int(value)
value += self.step_status.getStatistic(kw, 0)
elif re.search("^[0-9]+\.[0-9]+$", value):
# Runtime
value = float(value)
value += self.step_status.getStatistic(kw, 0)
else:
# This is a percentage, and we can't add them
pass
self.step_status.setStatistic(kw, value)
def createSummary(self, log):
"""
Match log lines against warningPattern.
Warnings are collected into another log for this step, and the
build-wide 'warnings-count' is updated."""
warnings = {}
wregex = {}
regex_class = re.compile("").__class__
if not isinstance(self.logline, regex_class):
self.logline = re.compile(self.logline)
for key in self.warningPatterns:
warnings[key] = []
pattern = self.warningPatterns[key]
if not isinstance(pattern, regex_class):
wregex[key] = re.compile(pattern)
else:
wregex[key] = pattern
# Count matches to the various warning patterns
for line in log.getText().split("\n"):
for key in wregex:
match = re.search(wregex[key], line)
if match:
warnings[key].append(line)
if re.search(self.logline, line):
last_line = line
# If there were any warnings, make the log if lines with warnings
# available
for key in warnings:
if len(warnings[key]) > 0:
self.addCompleteLog("%s (%d)" % (key, len(warnings[key])),
"\n".join(warnings[key]) + "\n")
self.step_status.setStatistic(key, len(warnings[key]))
self.setProperty(key, len(warnings[key]), "TestCrawlerShellCommand")
match = re.search(self.logline, last_line)
for key in ['elapsed', 'pages']:
info = match.group(key)
self.step_status.setStatistic(key, info)
self.setProperty(key, info, "TestCrawlerShellCommand")
def describe(self, done=False):
description = WarningCountingShellCommand.describe(self, done)
if done:
description = description[:] # make a private copy
for name in ["time", "elapsed", "pages", "failed", "warnings", "slow", "invalid_html", ]:
if name in self.step_status.statistics:
value = self.step_status.getStatistic(name)
displayName = name.replace('_', ' ')
# special case. Mph.
if type(value) is float: # this is run-time
description.append('%s: %.2fs' % (displayName, value))
elif type(value) is int:
description.append('%s: %d' % (displayName, value))
else:
description.append('%s: %s' % (displayName, value))
return description
class UnitTest(WarningCountingShellCommand):
name = "test"
warnOnFailure = 1
description = ["testing"]
descriptionDone = ["test"]
command = ["python", "-m", "unittest", "discover"]
regexPatterns = {
"tests": "Ran (\d+) tests in [0-9.]+s",
"time": "Ran \d+ tests in ([0-9.]+)s",
"skipped": "(?:OK|FAILED).*skipped=(\d+)",
"failed": "FAILED.*failures=(\d+)",
"errors": "FAILED.*errors=(\d+)",
"template_coverage":" +Template coverage: +([0-9.]+%)",
"url_coverage": " +Url coverage: +([0-9.]+%)",
"code_coverage": " +Code coverage: +([0-9.]+%)",
}
def setTestResults(self, **kwargs):
"""
Called by subclasses to set the relevant statistics; this actually
adds to any statistics already present
"""
for kw in kwargs:
value = kwargs[kw]
if value.isdigit():
# Counter
value = int(value)
value += self.step_status.getStatistic(kw, 0)
elif re.search("^[0-9]+\.[0-9]+$", value):
# Runtime
value = float(value)
value += self.step_status.getStatistic(kw, 0)
else:
# This is a percentage, and we can't add them
pass
self.step_status.setStatistic(kw, value)
def createSummary(self, log):
info = {}
for line in log.getText().split("\n"):
for key in self.regexPatterns:
regex = self.regexPatterns[key]
match = re.search(regex, line)
if match:
info[key] = match.group(1)
self.setTestResults(**info)
def describe(self, done=False):
description = WarningCountingShellCommand.describe(self, done)
if done:
description = description[:] # make a private copy
self.step_status.statistics["passed"] = (
self.step_status.getStatistic("tests",0) -
self.step_status.getStatistic("skipped",0) -
self.step_status.getStatistic("failed",0) -
self.step_status.getStatistic("errors",0))
for name in ["time", "tests", "passed", "skipped", "failed", "errors", "template_coverage", "url_coverage", "code_coverage", ]:
if name in self.step_status.statistics:
value = self.step_status.getStatistic(name)
displayName = name.replace('_', ' ')
# special case. Mph.
if displayName == 'template coverage':
displayName = 'templ. coverage'
if type(value) is float: # this is run-time
description.append('%s: %.2fs' % (displayName, value))
elif type(value) is int:
description.append('%s: %d' % (displayName, value))
else:
description.append('%s: %s' % (displayName, value))
return description
## Set up builders
c['builders'] = []
# -*- section Builder_Run_pyflakes -*-
factory = BuildFactory()
factory.addStep(SetPropertiesFromEnv(variables=['HOME',]))
factory.addStep(SVN(
username='buildbot@tools.ietf.org',
descriptionDone="svn update",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
repourl=Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'),
descriptionSuffix=[Interpolate('%(src::branch)s %(src::revision)s')],
))
factory.addStep(ShellCommand(
descriptionDone="install requirements",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["pip", "install", "-r", "requirements.txt"],
))
factory.addStep(ShellCommand(
descriptionDone="seting up settings_local.py",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["cp", Interpolate("%(prop:HOME)s/settings_local.py"), "./ietf/"],
))
factory.addStep(PyFlakes(
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["ietf/manage.py", "pyflakes", "--verbosity=0"],
))
# This should be the last action
factory.addStep(ShellCommand(
descriptionDone="mark as passed",
workdir=Interpolate('build/%(src::branch)s'),
flunkOnFailure=False,
usePTY=False,
command=["svn", "--username=buildbot@tools.ietf.org", "--non-interactive",
"propset", "--revprop", "-r", Property('got_revision'), "test:pyflakes", "passed" ],
))
c['builders'].append(BuilderConfig(name="Check PyFlakes", factory=factory, category="1. trunk",
slavenames=["dunkelfelder_lin_py36_1", "dornfelder_lin_py36_1", ]))
c['builders'].append(BuilderConfig(name="[branch] Check PyFlakes", factory=factory, category="2. branch",
slavenames=["dunkelfelder_lin_py36_2", "dornfelder_lin_py36_2", ]))
c['builders'].append(BuilderConfig(name="[personal] Check PyFlakes", factory=factory, category="3. personal",
slavenames=["dunkelfelder_lin_py36_2",]))
# -*- section Builder_TestSuite -*-
factory = BuildFactory()
factory.addStep(SetPropertiesFromEnv(variables=['HOME',]))
factory.addStep(SVN(
username='buildbot@tools.ietf.org',
descriptionDone="svn update",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
repourl=Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'),
descriptionSuffix=[Interpolate('%(src::branch)s %(src::revision)s')],
))
factory.addStep(RemovePYCs(workdir=Interpolate('build/%(src::branch)s'), usePTY=False))
factory.addStep(ShellCommand(
descriptionDone="remove tmp-* dirs",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["rm", "-rf", "tmp-*/"],
))
factory.addStep(ShellCommand(
descriptionDone="install requirements",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["pip", "install", "-r", "requirements.txt"],
))
factory.addStep(ShellCommand(
descriptionDone="copy settings_local.py",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["cp", Interpolate("%(prop:HOME)s/settings_local.py"), "./ietf/"],
))
factory.addStep(ShellCommand(
descriptionDone="collect static files",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=False,
flunkOnFailure=False,
usePTY=False,
command=["ietf/manage.py", "collectstatic", "--noinput", ],
))
factory.addStep(UnitTest(
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["ietf/manage.py", "test", "--settings=settings_sqlitetest", "--verbosity=2", ],
))
# This should come after tests
factory.addStep(ShellCommand(
descriptionDone="mark as passed",
workdir=Interpolate('build/%(src::branch)s'),
flunkOnFailure=False,
usePTY=False,
command=["svn", "--username=buildbot@tools.ietf.org", "--non-interactive",
"propset", "--revprop", "-r", Property('got_revision'), "test:unittest", "passed" ],
))
c['builders'].append(BuilderConfig(name="Test Suite", factory=factory, category="1. trunk",
slavenames=["dunkelfelder_lin_py36_1", "dornfelder_lin_py36_1", ]))
c['builders'].append(BuilderConfig(name="[branch] Test Suite", factory=factory, category="2. branch",
slavenames=["dunkelfelder_lin_py36_2", "dornfelder_lin_py36_2", ]))
c['builders'].append(BuilderConfig(name="[personal] Test Suite", factory=factory, category="3. personal",
slavenames=["dunkelfelder_lin_py36_2", "dornfelder_lin_py36_2", ]))
# -*- section Builder_TestCrawler -*-
factory = BuildFactory()
factory.addStep(SetPropertiesFromEnv(variables=['HOME',]))
factory.addStep(ShellCommand(
descriptionDone="update database",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
timeout=3600, # 1 hour
command=["docker/updatedb", "-q"],
))
factory.addStep(SVN(
username='buildbot@tools.ietf.org',
descriptionDone="svn update",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
repourl=Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'),
descriptionSuffix=[Interpolate('%(src::branch)s %(src::revision)s')],
))
factory.addStep(RemovePYCs(workdir=Interpolate('build/%(src::branch)s'), usePTY=False))
factory.addStep(ShellCommand(
descriptionDone="install requirements",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["pip", "install", "-r", "requirements.txt"],
))
factory.addStep(ShellCommand(
descriptionDone="copy settings_local.py",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["cp", Interpolate("%(prop:HOME)s/settings_local.py"), "./ietf/"],
))
factory.addStep(ShellCommand(
descriptionDone="run migrations",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["ietf/manage.py", "migrate"],
))
# This will not only do a prelimnary sanity check, but also patch libs as needed:
factory.addStep(ShellCommand(
descriptionDone="run django checks",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["ietf/manage.py", "check"],
))
factory.addStep(TestCrawlerShellCommand(
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["bin/test-crawl", "--settings=ietf.settings_testcrawl"],
))
# This should be the last action
factory.addStep(ShellCommand(
descriptionDone="mark as passed",
workdir=Interpolate('build/%(src::branch)s'),
flunkOnFailure=False,
usePTY=False,
command=["svn", "--username=buildbot@tools.ietf.org", "--non-interactive",
"propset", "--revprop", "-r", Property('got_revision'), "test:crawler", "passed" ],
))
c['builders'].append(BuilderConfig(name="Test-Crawler", factory=factory, category="1. trunk",
slavenames=["dunkelfelder_lin_py36_4", ]))
# -*- section Builder_Verify_Old_Libs -*-
# This build runs pip install --upgrade, to make sure that we install the earliest version of
# all dependencies, in order to get an indication if/when an incompatibility turns up with a new
# version of a dependency. The other test suite builders *don't* use --upgrade, in order to not
# change the external test conditions and produce spurious errors because of version changes in
# dependencies.
factory = BuildFactory()
factory.addStep(SetPropertiesFromEnv(variables=['HOME',]))
factory.addStep(ShellCommand(
descriptionDone="remove tweaked requirements",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=False,
flunkOnFailure=False,
usePTY=False,
command=["rm", "requirements.txt"],
))
factory.addStep(SVN(
username='buildbot@tools.ietf.org',
descriptionDone="svn update",
workdir=Interpolate('build/%(src::branch)s'),
alwaysUseLatest=True,
haltOnFailure=True,
usePTY=False,
repourl=Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'),
descriptionSuffix=[Interpolate('%(src::branch)s %(src::revision)s')],
))
factory.addStep(RemovePYCs(workdir=Interpolate('build/%(src::branch)s'), usePTY=False))
factory.addStep(ShellCommand(
descriptionDone="edit requirements",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["sed", "-i", "-e", "s/>=/==/", "requirements.txt"],
))
factory.addStep(ShellCommand(
descriptionDone="install/upgrade requirements",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["pip", "install", "--upgrade", "-r", "requirements.txt"],
))
factory.addStep(ShellCommand(
descriptionDone="seting up settings_local.py",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["cp", Interpolate("%(prop:HOME)s/settings_local.py"), "./ietf/"],
))
factory.addStep(ShellCommand(
descriptionDone="list installed pyton modules",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["pip", "freeze"],
))
factory.addStep(ShellCommand(
descriptionDone="collect static files",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=False,
flunkOnFailure=False,
usePTY=False,
command=["ietf/manage.py", "collectstatic", "--noinput", ],
))
factory.addStep(UnitTest(
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["ietf/manage.py", "test", "--settings=settings_sqlitetest", "--verbosity=2", ],
))
c['builders'].append(BuilderConfig(name="Verify Minimum Libs", factory=factory, category="1. trunk",
slavenames=["dornfelder_lin_py36_3", ]))
# -*- section Verify_Latest_Libs -*-
# This build runs pip install --upgrade, to make sure that we install the latest version of all
# dependencies, in order to get an indication if/when an incompatibility turns up with a new
# version of a dependency. The other test suite builders *don't* use --upgrade, in order to not
# change the external test conditions and produce spurious errors because of version changes in
# dependencies.
factory = BuildFactory()
factory.addStep(SetPropertiesFromEnv(variables=['HOME',]))
factory.addStep(SVN(
username='buildbot@tools.ietf.org',
descriptionDone="svn update",
workdir=Interpolate('build/%(src::branch)s'),
alwaysUseLatest=True,
haltOnFailure=True,
usePTY=False,
repourl=Interpolate('https://svn.tools.ietf.org/svn/tools/ietfdb/%(src::branch:~trunk)s'),
descriptionSuffix=[Interpolate('%(src::branch)s %(src::revision)s')],
))
factory.addStep(RemovePYCs(workdir=Interpolate('build/%(src::branch)s'), usePTY=False))
factory.addStep(ShellCommand(
descriptionDone="install/upgrade requirements",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["pip", "install", "--upgrade", "-r", "requirements.txt"],
))
factory.addStep(ShellCommand(
descriptionDone="seting up settings_local.py",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["cp", Interpolate("%(prop:HOME)s/settings_local.py"), "./ietf/"],
))
factory.addStep(ShellCommand(
descriptionDone="list installed pyton modules",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["pip", "freeze"],
))
factory.addStep(ShellCommand(
descriptionDone="collect static files",
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=False,
flunkOnFailure=False,
usePTY=False,
command=["ietf/manage.py", "collectstatic", "--noinput", ],
))
factory.addStep(UnitTest(
workdir=Interpolate('build/%(src::branch)s'),
haltOnFailure=True,
usePTY=False,
command=["ietf/manage.py", "test", "--settings=settings_sqlitetest", "--verbosity=2", ],
))
c['builders'].append(BuilderConfig(name="Verify Latest Libs", factory=factory, category="1. trunk",
slavenames=["dornfelder_lin_py36_3", ]))
####### STATUS TARGETS
# -*- section StatusTargets -*-
# 'status' is a list of Status Targets. The results of each build will be
# pushed to these targets. buildbot/status/*.py has a variety to choose from,
# including web pages, email senders, and IRC bots.
c['status'] = []
from buildbot.status import html, mail
from buildbot.status.web import authz, auth
authz_cfg=authz.Authz(
# change any of these to True to enable; see the manual for more
# options
auth=auth.BasicAuth([("ietfdb","ietfdb")]),
gracefulShutdown = False,
forceBuild = 'auth', # use this to test your slave once it is set up
forceAllBuilds = False,
pingBuilder = False,
stopBuild = 'auth',
stopAllBuilds = False,
cancelPendingBuild = 'auth',
)
c['status'].append(html.WebStatus(http_port=8010, authz=authz_cfg))
# A second web status with slightly different rendering
from twisted.python import log
def changelinkfilter(html, project):
log.msg(" * changelinkfilter(html='%s', project='%s')" % (html, project))
return html
import jinja2, os
trac_template_loaders = [jinja2.FileSystemLoader(os.path.join(os.getcwd(), 'trac_view'))]
c['status'].append(html.WebStatus(http_port=8011, jinja_loaders=trac_template_loaders,
authz=authz_cfg))
# Email notifications
from zope.interface import implements
from buildbot import interfaces
class UsernameIsEmailAddress():
"This IEmailLookup provider assumes that the svn username is a valid email address."
implements(interfaces.IEmailLookup)
def getAddress(self, name):
return name
c['status'].append(mail.MailNotifier(
fromaddr='buildbot@tools.ietf.org',
sendToInterestedUsers=False,
extraRecipients=['rjsparks@nostrum.com',],
mode="problem",
))
c['status'].append(mail.MailNotifier(
fromaddr='buildbot@tools.ietf.org',
lookup=UsernameIsEmailAddress(),
mode="problem",
))
####### PROJECT IDENTITY
# -*- section Project -*-
# the 'title' string will appear at the top of this buildbot
# installation's html.WebStatus home page (linked to the
# 'titleURL') and is embedded in the title of the waterfall HTML page.
c['title'] = "Buildbot: IETF Datatracker"
c['titleURL'] = "https://datatracker.ietf.org/"
# the 'buildbotURL' string should point to the location where the buildbot's
# internal web server (usually the html.WebStatus page) is visible. This
# typically uses the port number set in the Waterfall 'status' entry, but
# with an externally-visible host name which the buildbot cannot figure out
# without some help.
c['buildbotURL'] = "http://zinfandel.tools.ietf.org:8010/"
####### DB URL
c['db'] = {
# This specifies what database buildbot uses to store its state. You can leave
# this at its default for all but the largest installations.
'db_url' : "sqlite:///state.sqlite",
}

View file

@ -1,3 +0,0 @@
#!/bin/bash
for d in masters/*; do ( cd $d; . env/bin/activate; buildbot restart; ); done

View file

@ -1,3 +0,0 @@
#!/bin/bash
for d in ${1:-workers/*}; do ( cd $d; . env/bin/activate; buildbot-worker restart; ); done

View file

@ -1,180 +0,0 @@
#!/bin/bash
# -*- indent-with-tabs: 0 -*-
version=0.10
program=${0##*/}
progdir=${0%/*}
if [ "$progdir" = "$program" ]; then progdir="."; fi
# ----------------------------------------------------------------------
function usage() {
# Possible sections:
# NAME
# SYNOPSIS
# CONFIGURATION [Normally only in Section 4]
# DESCRIPTION
# OPTIONS [Normally only in Sections 1, 8]
# EXIT STATUS [Normally only in Sections 1, 8]
# RETURN VALUE [Normally only in Sections 2, 3]
# ERRORS [Typically only in Sections 2, 3]
# ENVIRONMENT
# FILES
# VERSIONS [Normally only in Sections 2, 3]
# CONFORMING TO
# NOTES
# BUGS
# EXAMPLE
# SEE ALSO
cat <<EOF
NAME
$program - set up buildbot workers
SYNOPSIS
$program [OPTIONS]
DESCRIPTION
Set up environment and buildbot worker files after checking
out the buildbot directory tree from the repository.
EOF
echo -e "OPTIONS"
if [ "$(uname)" = "Linux" ]; then
egrep "^[ ]+[-][A-Za-z| -]+\*?\)[ ]+[A-Za-z].+#" $0 | tr -s "\t|" "\t," | sed -r -e 's/\)[ \t]+([A-Z]+)=\$2[^#]*#/=\1\t/' -e 's/\)[^#]*#/\t/'
else
egrep "^[ ]+[-][A-Za-z| -]+\*?\)[ ]+[A-Za-z].+#" $0 | sed 's/\|.*\$2[^#]*#/ /'| sed -E 's/\|.*\)[^#]*#/ /'
fi
cat <<EOF
AUTHOR
Written by Henrik Levkowetz, <henrik@levkowetz.com>. Repository:
https://svn.tools.ietf.org/svn/tools/ietfdb/trunk/buildbot
COPYRIGHT
Copyright 2020 the IETF Trust. All rights reserved.
Redistribution and use in source and binary forms, with or
without modification, are permitted provided that the conditions
laid out in the 3-clause BSD license is followed.
License text: https://opensource.org/licenses/BSD-3-Clause
EOF
}
# ----------------------------------------------------------------------
function die() {
echo -e "\n$program: error: $*" >&2
exit 1
}
function err() {
echo -e "${red}$*${reset}" >&2
}
function note() {
if [ -n "$VERBOSE" ]; then echo -e "\n$*"; fi
}
# ----------------------------------------------------------------------
function version() {
echo -e "$program $version"
}
# ----------------------------------------------------------------------
trap 'echo "$program($LINENO): Command failed with error code $? ([$$] $0 $*)"; exit 1' ERR
# ----------------------------------------------------------------------
# Option parsing
# Options
shortopts=a:hp:s:vV
longopts=admin:,help,server:,passwd:,python:,verbose,version
# Default values
read -d ' ' <<< $(who -m)
user=$REPLY
name=$(getent passwd $user | cut -d ':' -f 5 | cut -d ',' -f 1)
server='zinfandel.tools.ietf.org'
pass='' # must be set on the command line
python=python3.6
if [ "$(uname)" = "Linux" ]; then
args=$(getopt -o "$shortopts" --long "$longopts" -n '$program' -- $SV "$@")
if [ $? != 0 ] ; then die "Terminating..." >&2 ; exit 1 ; fi
files="$@"
eval set -- "$args"
sed="sed -r"
else
# Darwin, BSDs
args=$(getopt -o$shortopts $SV $*)
if [ $? != 0 ] ; then die "Terminating..." >&2 ; exit 1 ; fi
files="$@"
set -- $args
sed="sed -E"
fi
while true ; do
case "$1" in
-a| --admin) admin="$2"; shift;; # "Name <email>" of buildbot admin
-h| --help) usage; exit;; # Show this help, then exit
-p| --passwd) pass=$2; shift;; # Worker password
--python) python=$2; shift;; # Python version to use (e.g., 'python3.6')
-s| --server) server=$2; shift;; # Set the server fqdn
-v| --verbose) VERBOSE=1;; # Be more talkative
-V| --version) version; exit;; # Show program version, then exit
--) shift; break;;
*) die "Internal error, inconsistent option specification: '$1'";;
esac
shift
done
# ----------------------------------------------------------------------
# The program itself
dir=$(dirname $(realpath $0))
if [ -d "$dir/slaves" ]; then
path="$dir/slaves"
else
path="$dir/workers"
fi
for worker in $path/*; do
(
cd $worker;
pwd
if [ ! -d ./env ]; then
echo "Setting up virtual environment"
# Change python version to match deployment version
python3.6 -m venv env
fi
. env/bin/activate
pip install buildbot-worker
if [ ! -f ./buildbot.tac ]; then
pwfile=$dir/${worker##*/}_pw
echo "Looking for pwfile: $pwfile"
[ -f "$pwfile" ] && pass=$(< $pwfile)
[ -z "$pass" ] && die "Missing parameter: worker password"
buildbot-worker create-worker $PWD $server ${PWD##*/} $pass
fi
if ! grep -q "$name" ./info/admin; then
read -p "Expected $name in $PWD/info/admin, but found $(<./info/admin) -- change it [Y/n]?"
if [ "$REPLY" = "Y" -o "$REPLY" = "y" ]; then
if [ -z "$admin" ]; then
read -p "Admin (Your Name <your@email.example>): "
admin=$REPLY
fi
echo "Setting up ./info/admin"
echo "$admin" > ./info/admin
echo "Setting up ./info/host"
echo "$(uname -s -n -r) $(python --version)" > ./info/host
fi
fi
buildbot-worker stop
buildbot-worker start
)
done

View file

@ -1 +0,0 @@
Henrik Levkowetz <henrik@levkowetz.com>

View file

@ -1 +0,0 @@
Linux dunkelfelder 3.16.0-4-amd64 Python 3.6.9

View file

@ -1 +0,0 @@
Henrik Levkowetz <henrik@levkowetz.com>

View file

@ -1 +0,0 @@
Linux dunkelfelder 3.16.0-4-amd64 Python 3.6.9

View file

@ -1 +0,0 @@
Henrik Levkowetz <henrik@levkowetz.com>

View file

@ -1 +0,0 @@
Linux dunkelfelder 3.16.0-4-amd64 Python 3.6.9

View file

@ -1 +0,0 @@
Henrik Levkowetz <henrik@levkowetz.com>

View file

@ -1 +0,0 @@
Linux dunkelfelder 3.16.0-4-amd64 Python 3.6.9

View file

@ -38,9 +38,6 @@ USING_DEBUG_EMAIL_SERVER=True
EMAIL_HOST='localhost'
EMAIL_PORT=2025
TRAC_WIKI_DIR_PATTERN = "test/wiki/%s"
TRAC_SVN_DIR_PATTERN = "test/svn/%s"
MEDIA_BASE_DIR = 'test'
MEDIA_ROOT = MEDIA_BASE_DIR + '/media/'
MEDIA_URL = '/media/'

View file

@ -63,9 +63,6 @@ USING_DEBUG_EMAIL_SERVER=True
EMAIL_HOST='localhost'
EMAIL_PORT=2025
TRAC_WIKI_DIR_PATTERN = "test/wiki/%s"
TRAC_SVN_DIR_PATTERN = "test/svn/%s"
MEDIA_BASE_DIR = 'test'
MEDIA_ROOT = MEDIA_BASE_DIR + '/media/'
MEDIA_URL = '/media/'

View file

@ -1107,61 +1107,6 @@ USER_PREFERENCE_DEFAULTS = {
"left_menu" : "off",
}
TRAC_MASTER_DIR = "/a/www/trac-setup/"
TRAC_WIKI_DIR_PATTERN = "/a/www/www6s/trac/%s"
TRAC_WIKI_URL_PATTERN = "https://trac.ietf.org/trac/%s/wiki"
TRAC_ISSUE_URL_PATTERN = "https://trac.ietf.org/trac/%s/report/1"
TRAC_SVN_DIR_PATTERN = "/a/svn/group/%s"
#TRAC_SVN_URL_PATTERN = "https://svn.ietf.org/svn/group/%s/"
# The group types setting was replaced by a group feature entry 10 Jan 2019
#TRAC_CREATE_GROUP_TYPES = ['wg', 'rg', 'area', 'team', 'dir', 'review', 'ag', 'nomcom', ]
TRAC_CREATE_GROUP_STATES = ['bof', 'active', ]
TRAC_CREATE_GROUP_ACRONYMS = ['iesg', 'iaoc', 'ietf', ]
# This is overridden in production's settings-local. Make sure to update it.
TRAC_CREATE_ADHOC_WIKIS = [
# admin group acronym, name, sub-path
# A trailing fileglob wildcard is supported on group acronyms
('iesg', 'Meeting', "ietf/meeting"),
('nomcom*', 'NomCom', 'nomcom'),
]
SVN_PACKAGES = [
"/usr/lib/python/dist-packages/svn",
"/usr/lib/python3.6/dist-packages/libsvn",
]
TRAC_ENV_OPTIONS = [
('project', 'name', "{name} Wiki"),
('trac', 'database', 'sqlite:db/trac.db' ),
('trac', 'repository_type', 'svn'),
('trac', 'repository_dir', "{svn_dir}"),
('inherit', 'file', "/a/www/trac-setup/conf/trac.ini"),
('components', 'tracopt.versioncontrol.svn.*', 'enabled'),
]
TRAC_WIKI_PAGES_TEMPLATES = [
"utils/wiki/IetfSpecificFeatures",
"utils/wiki/InterMapTxt",
"utils/wiki/SvnTracHooks",
"utils/wiki/ThisTracInstallation",
"utils/wiki/TrainingMaterials",
"utils/wiki/WikiStart",
]
TRAC_ISSUE_SEVERITY_ADD = [
"-",
"Candidate WG Document",
"Active WG Document",
"Waiting for Expert Review",
"In WG Last Call",
"Waiting for Shepherd Writeup",
"Submitted WG Document",
"Dead WG Document",
]
SVN_ADMIN_COMMAND = "/usr/bin/svnadmin"
# Email addresses people attempt to set for their account will be checked
# against the following list of regex expressions with re.search(pat, addr):

View file

@ -1,38 +0,0 @@
= IETF-Specific Information =
== Editing the Wiki and Issues ==
In order to create and edit wiki pages and issues, you need to log in. Click on the
small 'Login' link above the main horizontal menubar. You log in with the same
username (your email address) and password as the datatracker. If you don't have a
login/passwd or need to reset your passwd, go to https://datatracker.ietf.org/accounts/create/.
The login and password is also used for commits to the SVN repository. See more about
the repository further down.
== IETF-Specific Features ==
This Trac installation has a few IETF-specific features which are not generally found
in Trac:
* Occurences of RFC numbers or draft names in Wiki text will generate links to the
RFC or draft in question. Unless you want to point to an RFC or draft in a
specific location which is different from the automatically generated link, you
don't need to explicitly add links for RFCs and drafts. Examples: RFC 2026,
draft-ietf-poised95-std-proc-3
* Each issue in the issue tracker can be indicated to concern a 'component'. This is
a standard Trac feature; however, the list of available components is automatically
updated to include all the active working group drafts. This makes it easier to
associate issues with drafts for the WG participants, without the Chairs needing to
go in as admin users and add a new component each time there's a new WG draft.
* Everywhere you can use wiki markup (on the wiki pages, roadmap descriptions,
etc.) you may embed a macro which shows a ticket statistics graph. Full
information about the macro is available at [http://trac-hacks.org/wiki/TicketStatsMacro].
Briefly, the macro syntax is:
{{{
[[TicketStats( height=250,daterange=12m,res_days=30)]]
}}}
which gives this result: [[TicketStats( height=250,daterange=12m,res_days=30)]]

View file

@ -1,72 +0,0 @@
= InterMapTxt =
== This is the place for defining InterWiki prefixes ==
This page was modelled after the MeatBall:InterMapTxt page.
In addition, an optional comment is allowed after the mapping.
This page is interpreted in a special way by Trac, in order to support
!InterWiki links in a flexible and dynamic way.
The code block after the first line separator in this page
will be interpreted as a list of !InterWiki specifications:
{{{
prefix <space> URL [<space> # comment]
}}}
By using `$1`, `$2`, etc. within the URL, it is possible to create
InterWiki links which support multiple arguments, e.g. Trac:ticket:40.
The URL itself can be optionally followed by a comment,
which will subsequently be used for decorating the links
using that prefix.
New !InterWiki links can be created by adding to that list, in real time.
Note however that ''deletions'' are also taken into account immediately,
so it may be better to use comments for disabling prefixes.
Also note that !InterWiki prefixes are case insensitive.
== List of Active Prefixes ==
[[InterWiki]]
----
== Prefix Definitions ==
{{{
PEP http://www.python.org/peps/pep-$1.html # Python Enhancement Proposal
Trac-ML http://thread.gmane.org/gmane.comp.version-control.subversion.trac.general/ # Message $1 in Trac Mailing List
trac-dev http://thread.gmane.org/gmane.comp.version-control.subversion.trac.devel/ # Message $1 in Trac Development Mailing List
Mercurial http://www.selenic.com/mercurial/wiki/index.cgi/ # the wiki for the Mercurial distributed SCM
RFC http://datatracker.ietf.org/doc/html/rfc$1.html # IETF's RFC $1
DataTracker https://datatracker.ietf.org/doc/
dt https://datatracker.ietf.org/doc/
#
# A arbitrary pick of InterWiki prefixes...
#
Acronym http://www.acronymfinder.com/af-query.asp?String=exact&Acronym=
C2find http://c2.com/cgi/wiki?FindPage&value=
Cache http://www.google.com/search?q=cache:
CPAN http://search.cpan.org/perldoc?
DebianBug http://bugs.debian.org/
DebianPackage http://packages.debian.org/
Dictionary http://www.dict.org/bin/Dict?Database=*&Form=Dict1&Strategy=*&Query=
Google http://www.google.com/search?q=
GoogleGroups http://groups.google.com/group/$1/msg/$2 # Message $2 in $1 Google Group
JargonFile http://downlode.org/perl/jargon-redirect.cgi?term=
MeatBall http://www.usemod.com/cgi-bin/mb.pl?
MetaWiki http://sunir.org/apps/meta.pl?
MetaWikiPedia http://meta.wikipedia.org/wiki/
MoinMoin http://moinmoin.wikiwikiweb.de/
WhoIs http://www.whois.sc/
Why http://clublet.com/c/c/why?
c2Wiki http://c2.com/cgi/wiki?
WikiPedia http://en.wikipedia.org/wiki/
}}}

View file

@ -1,73 +0,0 @@
= SVN Trac Hooks =
If the Trac Hooks for SVN has been installed for the svn repository
coupled to this Trac instance, the Key Phrases documented below may
be used in SVN commit messages to cause automatic updates and annotations
of Trac issues.
== The trac-post-commit-hook ==
This script looks at the commit message associated with an SVN commit,
and based on the presence of a number of key phrases will add annotations
to tickets and also possibly change ticket status, for instance closing
it.
=== Key Phrases ===
The key phrases available are:
{{{
Fix <ticket_spec>
Fixes <ticket_spec>
Fix for <ticket_spec>
Close <ticket_spec>
Closes <ticket_spec>
Addresses <ticket_spec>
References <ticket_spec>
Relates to <ticket_spec>
Related to <ticket_spec>
See <ticket_spec>
}}}
=== Ticket specification ===
The specification of the ticket to act on may specify one or more
tickets, using any of the following forms:
{{{
<ticket>
<ticket>, <ticket>{, <ticket>}
<ticket>, <ticket> and <ticket>
}}}
and variations thereof.
=== Ticket identification ===
The individual ticket specification
can take any of the following forms:
{{{
#<number>
ticket <number>
ticket:<number>
issue <number>
issue:<number>
bug <number>
bug:<number>
}}}
=== Examples ===
{{{
Clarify header normalization vs matching request headers (see #147)
Resolve #27: fix definition of idempotent
Note change for issue 157 (related to #157)
Define http and https URI schemes: addresses #58, #128, #159
Define http and https URI schemes: addresses #58, #128, #159;
fixes #157: removed reference to RFC1900 use of IP addresses in URI.
Resolve #140: rephrase note so that it becomes clear that the described ...
}}}

View file

@ -1,94 +0,0 @@
{{{
#!rst
Trac Installation on tools.ietf.org
===================================
Background
----------
The Track installation used on the tools.ietf.org site is different from the
installation examples provided with Trac and on http://trac.edgewall.com. The
reason is mainly that the multi-project examples all assume that Trac
constitutes the whole of the deployed environment, rather than being part of a
greater set. This means that the examples assume that accessing the
individual projects through URLs of the form "/$some_path/trac/$projname"
makes sense, while in our case, we would like the URLs to look like
"/$some_path/$projname/trac". In the multi-project configuration, this would
make Trac always believe that the project name is 'trac' - the last path
component.
Explored Alternatives
---------------------
Make Apache set ``TRAC_ENV`` dynamically
........................................
Tell Apache to dynamically set Trac's environment variable ``TRAC_ENV`` to the
particular value for the accessed project:
``/etc/apache2/sites-available/tools.ietf.org``:
::
ScriptAliasMatch "^/wg/[^/]+/trac(/.*)?" /usr/share/trac/cgi-bin/trac.cgi$1
<LocationMatch "^/wg/([^/]+)/trac">
SetEnv TRAC_ENV "/www/tools.ietf.org/tools/trac/wg/$1"
</LocationMatch>
This doesn't work because Apache doesn't support $n replacements based on
earlier LocationMatch matches.
Use .htaccess with default ScriptAlias
......................................
Maybe we could use individual .htaccess files in each WG directory to set the
``TRAC_ENV`` variable to the required value?
``/etc/apache2/sites-available/tools.ietf.org``:
::
ScriptAliasMatch "^/wg/[^/]+/trac(/.*)?" /usr/share/trac/cgi-bin/trac.cgi$1
``/www/tools.ietf.org/wg/examplewg/.htaccess``:
::
SetEnv TRAC_ENV "/www/tools.ietf.org/wg/examplewg/trac"
This doesn't work because this .htaccess isn't read when ScriptAlias points to
another directory.
Use .htaccess with a local CGI script
.....................................
Suppose we let ScriptAlias point to a script which is placed so that the
.htaccess file actually gets read?
``/etc/apache2/sites-available/tools.ietf.org``:
::
ScriptAliasMatch "^/wg/([^/]+)/trac(/.*)?" /www/tools.ietf.org/wg/$1/trac/index.cgi$2
``/www/tools.ietf.org/wg/examplewg/.htaccess``:
::
SetEnv TRAC_ENV "/www/tools.ietf.org/wg/examplewg/trac"
This *does* work, but it is not easily adapted to a Fast-CGI solution. It is
the set-up which is currently in use, but an alternative which will permit
fast-cgi usage would be preferred - the current solution is anything but
snappy...
}}}

View file

@ -1,10 +0,0 @@
= Training Materials =
WG Traingin Materials go here.
If you want to embed video, you can use the ![[Movie(<url>,width=<width>,height=<height>)]]
macro to embed moves from [http://youtube.com/ YouTube]. Suggested width and height parameters: width=640,height=385.
Example which doesn't point to an actual video:
[[Movie(http://www.youtube.com/watch?v=g_exampleid,width=640px,height=385px)]]

View file

@ -1,29 +0,0 @@
= Welcome to this IETF WG Trac installation =
Trac is a '''minimalistic''' approach to '''web-based''' project management,
suitable for software and documentation projects and similar. Its goal is to
simplify effective tracking and handling of project issues, enhancements and
overall progress.
As all Wiki pages, this page is editable, this means that you can modify the
contents of this page simply by using your web-browser. Simply click on the
"Edit this page" link at the bottom of the page. WikiFormatting will give you
a detailed description of available Wiki formatting commands.
There is nothing in this page which isn't also covered in one of the other
wiki pages, so the first adjustment you make of this Trac installation could
be to edit the content of this page, replacing this initial text with content
appropriate to your Working Group.
There are some aspects of this Trac installation which are specific to the
IETF environment. Those are described in IetfSpecificFeatures.
== Starting Points ==
* TracGuide -- Built-in Documentation
* [http://trac.edgewall.org/ The Trac project] -- Trac Open Source Project
* [http://trac.edgewall.org/wiki/TracFaq Trac FAQ] -- Frequently Asked Questions
* TracSupport -- Trac Support
For a complete list of local wiki pages, see TitleIndex.

View file

@ -1,413 +0,0 @@
# Copyright The IETF Trust 2016-2020, All Rights Reserved
# -*- coding: utf-8 -*-
import os
import copy
import io
import pkg_resources
import syslog
from trac.core import TracError
from trac.env import Environment
from trac.perm import PermissionSystem
from trac.ticket.model import Component, Milestone, Severity
from trac.util.text import unicode_unquote
from trac.wiki.model import WikiPage
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Q
from django.template.loader import render_to_string
import debug # pyflakes:ignore
from ietf.group.models import Group, GroupFeatures
from ietf.utils.pipe import pipe
logtag = __name__.split('.')[-1]
logname = "user.log"
syslog.openlog(str(logtag), syslog.LOG_PID, syslog.LOG_USER)
class Command(BaseCommand):
help = "Create group wikis for WGs, RGs and Areas which don't have one."
def add_arguments(self, parser):
parser.add_argument('--wiki-dir-pattern', dest='wiki_dir_pattern',
default=settings.TRAC_WIKI_DIR_PATTERN,
help='A pattern with %s placeholder for group wiki path')
parser.add_argument('--svn-dir-pattern', dest='svn_dir_pattern',
default=settings.TRAC_SVN_DIR_PATTERN,
help='A pattern with %s placeholder for group svn path')
parser.add_argument('--group-list', '-g', dest='group_list', help='Limit processing to groups with the given acronyms (a comma-separated list)')
parser.add_argument('--dummy-run', '-n', default=False, action='store_true', dest='dummy_run', help='Make no changes, just show what would be done')
secretariat = Group.objects.get(acronym='secretariat')
def note(self, msg):
if self.verbosity > 1:
self.stdout.write(msg)
def log(self, msg):
syslog.syslog(msg)
self.stdout.write(msg)
self.stderr.write(msg)
# --- svn ---
def do_cmd(self, cmd, *args):
quoted_args = [ '"%s"'%a if ' ' in a else a for a in args ]
if self.dummy_run:
self.note("Would run %s %s ..." % (os.path.basename(cmd), " ".join(quoted_args)))
else:
self.note("Running %s %s ..." % (os.path.basename(cmd), " ".join(quoted_args)))
command = [ cmd, ] + list(args)
command = ' '.join(command).encode('utf-8')
code, out, err = pipe(command)
out = out.decode('utf-8')
err = err.decode('utf-8')
msg = None
if code != 0:
msg = "Error %s: %s when executing '%s'" % (code, err, " ".join(command))
self.log(msg)
return msg, out
def svn_admin_cmd(self, *args):
return self.do_cmd(settings.SVN_ADMIN_COMMAND, *args)
def create_svn(self, svn):
if self.dummy_run:
self.note(" Would create svn repository: %s" % svn)
return "Dummy run, no svn repo created"
else:
self.note(" Creating svn repository: %s" % svn)
if not os.path.exists(os.path.dirname(svn)):
msg = "Intended to create '%s', but parent directory is missing" % svn
self.log(msg)
return msg
err, out= self.svn_admin_cmd("create", svn )
if err:
self.log(err)
return err
return ""
# --- trac ---
def remove_demo_components(self, env):
for component in Component.select(env):
if component.name.startswith('component'):
component.delete()
def remove_demo_milestones(self, env):
for milestone in Milestone.select(env):
if milestone.name.startswith('milestone'):
milestone.delete()
def symlink_to_master_assets(self, path, env):
master_dir = settings.TRAC_MASTER_DIR
master_htdocs = os.path.join(master_dir, "htdocs")
group_htdocs = os.path.join(path, "htdocs")
self.note(" Symlinking %s to %s" % (master_htdocs, group_htdocs))
os.removedirs(group_htdocs)
os.symlink(master_htdocs, group_htdocs)
def add_wg_draft_states(self, group, env):
for state in settings.TRAC_ISSUE_SEVERITY_ADD:
self.note(" Adding severity %s" % state)
severity = Severity(env)
severity.name = state
severity.insert()
def add_wiki_page(self, env, name, text):
page = WikiPage(env, name)
if page.time:
self.note(" ** Page %s already exists, not adding it." % name)
return
page.text = text
page.save(author="(System)", comment="Initial page import")
def add_default_wiki_pages(self, env):
dir = pkg_resources.resource_filename('trac.wiki', 'default-pages')
#WikiAdmin(env).load_pages(dir)
with env.db_transaction:
for name in os.listdir(dir):
filename = os.path.join(dir, name)
name = unicode_unquote(name.encode('utf-8'))
if os.path.isfile(filename):
self.note(" Adding page %s" % name)
with io.open(filename, encoding='utf-8') as file:
text = file.read()
self.add_wiki_page(env, name, text)
def add_custom_wiki_pages(self, group, env):
for templ in settings.TRAC_WIKI_PAGES_TEMPLATES:
_, name = os.path.split(templ)
text = render_to_string(templ, {"group": group})
self.note(" Adding page %s" % name)
self.add_wiki_page(env, name, text)
def sync_default_repository(self, group, env):
repository = env.get_repository('')
if repository:
self.note(" Indexing default repository")
repository.sync()
else:
self.log("Trac environment '%s' does not have any repository" % env)
def create_adhoc_trac(self, name, path):
if not os.path.exists(os.path.dirname(path)):
msg = "Intended to create '%s', but parent directory is missing" % path
self.log(msg)
return None, msg
options = copy.deepcopy(settings.TRAC_ENV_OPTIONS)
# Interpolate group field names to values in the option settings:
remove = []
for i in range(len(options)):
sect, key, val = options[i]
if key in ['repository_type', 'repository_dir', ]:
remove = [i] + remove
else:
val = val.format(name=name)
options[i] = sect, key, val
for i in remove:
del options[i]
# Try to creat ethe environment, remove unwanted defaults, and add
# custom pages and settings.
if self.dummy_run:
self.note("Would create Trac for '%s' at %s" % (name, path))
return None, "Dummy run, no trac created"
else:
try:
self.note("Creating Trac for '%s' at %s" % (name, path))
env = Environment(path, create=True, options=options)
self.remove_demo_components(env)
self.remove_demo_milestones(env)
# Use custom assets (if any) from the master setup
self.symlink_to_master_assets(path, env)
self.add_custom_wiki_pages(name, env)
self.add_default_wiki_pages(env)
# Permissions will be handled during permission update later.
return env, ""
except TracError as e:
msg = "While creating Trac instance for %s: %s" % (name, e)
self.log(msg)
return None, msg
def create_group_trac(self, group):
if not os.path.exists(os.path.dirname(group.trac_dir)):
msg = "Intended to create '%s', but parent directory is missing" % group.trac_dir
self.log(msg)
return None, msg
options = copy.deepcopy(settings.TRAC_ENV_OPTIONS)
# Interpolate group field names to values in the option settings:
for i in range(len(options)):
sect, key, val = options[i]
val = val.format(**group.__dict__)
options[i] = sect, key, val
# Try to create the environment, remove unwanted defaults, and add
# custom pages and settings.
if self.dummy_run:
self.note("Would create Trac for group '%s' at %s" % (group.acronym, group.trac_dir))
return None, "Dummy run, no trac created"
else:
try:
self.note("Creating Trac for group '%s' at %s" % (group.acronym, group.trac_dir))
env = Environment(group.trac_dir, create=True, options=options)
self.remove_demo_components(env)
self.remove_demo_milestones(env)
self.maybe_add_group_url(group, 'wiki', settings.TRAC_WIKI_URL_PATTERN % group.acronym)
self.maybe_add_group_url(group, 'tracker', settings.TRAC_ISSUE_URL_PATTERN % group.acronym)
# Use custom assets (if any) from the master setup
self.symlink_to_master_assets(group.trac_dir, env)
if group.features.acts_like_wg:
self.add_wg_draft_states(group, env)
self.add_custom_wiki_pages(group, env)
self.add_default_wiki_pages(env)
self.sync_default_repository(group, env)
# Components (i.e., drafts) will be handled during components
# update later
# Permissions will be handled during permission update later.
return env, ""
except (TracError, IOError) as e:
msg = "While creating Trac instance for %s: %s" % (group, e)
self.log(msg)
return None, msg
def update_trac_permissions(self, name, group, env):
if self.dummy_run:
self.note("Would update Trac permissions for '%s' from group %s" % (name, group.acronym))
else:
self.note("Updating Trac permissions for '%s' from group %s" % (name, group.acronym))
mgr = PermissionSystem(env)
permission_list = mgr.get_all_permissions()
permission_list = [ (u,a) for (u,a) in permission_list if not u in ['anonymous', 'authenticated']]
permissions = {}
for user, action in permission_list:
if not user in permissions:
permissions[user] = []
permissions[user].append(action)
roles = ( list( group.role_set.filter(name_id__in=set(['chair', 'secr', 'ad', 'trac-admin', ]+group.features.admin_roles)))
+ list(self.secretariat.role_set.filter(name_id__in=['trac-admin', ]) ))
users = []
for role in roles:
user = role.email.address.lower()
users.append(user)
if not user in permissions:
try:
self.note(" Granting admin permission for %s" % user)
mgr.grant_permission(user, 'TRAC_ADMIN')
if not user in permissions:
permissions[user] = []
permissions[user].append('TRAC_ADMIN')
except TracError as e:
self.log("While adding admin permission for %s: %s" (user, e))
for user in permissions:
if not user in users:
if 'TRAC_ADMIN' in permissions[user]:
try:
self.note(" Revoking admin permission for %s" % user)
mgr.revoke_permission(user, 'TRAC_ADMIN')
except TracError as e:
self.log("While revoking admin permission for %s: %s" (user, e))
def update_trac_components(self, group, env):
if self.dummy_run:
self.note("Would update Trac components for group '%s'" % group.acronym)
else:
self.note("Updating Trac components for group '%s'" % group.acronym)
components = Component.select(env)
comp_names = [ c.name for c in components ]
group_docs = group.document_set.filter(states__slug='active', type_id='draft').distinct()
group_comp = []
for doc in group_docs:
if not doc.name.startswith('draft-'):
self.log("While adding components: unexpectd %s group doc name: %s" % (group.acronym, doc.name))
continue
name = doc.name[len('draft-'):]
if name.startswith('ietf-'):
name = name[len('ietf-'):]
elif name.startswith('irtf-'):
name = name[len('ietf-'):]
if name.startswith(group.acronym+'-'):
name = name[len(group.acronym+'-'):]
group_comp.append(name)
if not name in comp_names and not doc.name in comp_names:
self.note(" Group draft: %s" % doc.name)
self.note(" Adding component %s" % name)
comp = Component(env)
comp.name = name
comp.owner = "%s@ietf.org" % doc.name
comp.insert()
def maybe_add_group_url(self, group, slug, url):
if not group.groupextresource_set.filter(name__slug=slug).exists():
self.note(" adding %s %s URL ..." % (group.acronym, slug))
group.groupextresource_set.create(name_id=slug,value=url)
def add_custom_pages(self, group, env):
for template_name in settings.TRAC_WIKI_PAGES_TEMPLATES:
pass
def add_custom_group_states(self, group, env):
for state_name in settings.TRAC_ISSUE_SEVERITY_ADD:
pass
# --------------------------------------------------------------------
def handle(self, *filenames, **options):
self.verbosity = options['verbosity']
self.errors = []
self.wiki_dir_pattern = options.get('wiki_dir_pattern', settings.TRAC_WIKI_DIR_PATTERN)
self.svn_dir_pattern = options.get('svn_dir_pattern', settings.TRAC_SVN_DIR_PATTERN)
self.group_list = options.get('group_list', None)
self.dummy_run = options.get('dummy_run', False)
self.wiki_dir_pattern = os.path.join(str(settings.BASE_DIR), str('..'), self.wiki_dir_pattern)
self.svn_dir_pattern = os.path.join(settings.BASE_DIR, '..', self.svn_dir_pattern)
if not self.group_list is None:
self.group_list = self.group_list.split('.')
if isinstance(self.verbosity, str) and self.verbosity.isdigit():
self.verbosity = int(self.verbosity)
if self.dummy_run and self.verbosity < 2:
self.verbosity = 2
if not os.path.exists(os.path.dirname(self.wiki_dir_pattern)):
raise CommandError('The Wiki base direcory specified for the wiki directories (%s) does not exist.' % os.path.dirname(self.wiki_dir_pattern))
if not os.path.exists(os.path.dirname(self.svn_dir_pattern)):
raise CommandError('The SVN base direcory specified for the SVN directories (%s) does not exist.' % os.path.dirname(self.svn_dir_pattern))
gtypes = [ f.type for f in GroupFeatures.objects.filter(create_wiki=True) ]
gfilter = Q(type__in=gtypes, state__slug__in=settings.TRAC_CREATE_GROUP_STATES)
gfilter |= Q(acronym__in=settings.TRAC_CREATE_GROUP_ACRONYMS)
groups = Group.objects.filter(gfilter).order_by('acronym')
if self.group_list:
groups = groups.filter(acronym__in=self.group_list)
for group in groups:
try:
self.note("Processing group '%s'" % group.acronym)
group.trac_dir = self.wiki_dir_pattern % group.acronym
group.svn_dir = self.svn_dir_pattern % group.acronym
if not os.path.exists(group.svn_dir):
err = self.create_svn(group.svn_dir)
if err:
self.errors.append(err)
if not os.path.exists(group.trac_dir):
trac_env, msg = self.create_group_trac(group)
if not trac_env:
self.errors.append(msg)
else:
trac_env = Environment(group.trac_dir)
if not trac_env and not self.dummy_run:
continue
self.update_trac_permissions(group.acronym, group, trac_env)
self.update_trac_components(group, trac_env)
except Exception as e:
self.errors.append(e)
self.log("While processing %s: %s" % (group.acronym, e))
raise
for acronym, name, path in settings.TRAC_CREATE_ADHOC_WIKIS:
try:
self.note("Processing wiki '%s'" % name)
if not os.path.isabs(path):
path = self.wiki_dir_pattern % path
if not os.path.exists(path):
trac_env, msg = self.create_adhoc_trac(name, path)
if not trac_env:
self.errors.append(msg)
else:
trac_env = Environment(path)
if not trac_env and not self.dummy_run:
continue
if acronym.endswith('*'):
groups = Group.objects.filter(acronym__startswith=acronym[:-1], state_id='active')
for group in groups:
self.update_trac_permissions(name, group, trac_env)
else:
group = Group.objects.get(acronym=acronym)
self.update_trac_permissions(name, group, trac_env)
except Exception as e:
self.errors.append(e)
self.log("While processing %s: %s" % (name, e))
raise
if self.errors:
raise CommandError("There were %s failures in WG Trac creation:\n %s" % (len(self.errors), "\n ".join(self.errors)))

View file

@ -15,15 +15,12 @@ from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from fnmatch import fnmatch
from importlib import import_module
from .pipe import pipe
from textwrap import dedent
from unittest import skipIf
from tempfile import mkdtemp
from django.apps import apps
from django.contrib.auth.models import User
from django.conf import settings
from django.core.management import call_command
from django.template import Context
from django.template import Template # pyflakes:ignore
from django.template.defaulttags import URLNode
@ -33,8 +30,6 @@ from django.urls import reverse as urlreverse
import debug # pyflakes:ignore
from ietf.group.factories import GroupFactory
from ietf.group.models import Group
from ietf.person.name import name_parts, unidecode_name
from ietf.submit.tests import submission_file
from ietf.utils.bower_storage import BowerStorageFinder
@ -46,22 +41,6 @@ from ietf.utils.test_utils import TestCase
from ietf.utils.text import parse_unicode
from ietf.utils.xmldraft import XMLDraft
skip_wiki_glue_testing = False
skip_message_svn = ""
skip_message_trac = ""
try:
import svn # pyflakes:ignore
except ImportError as e:
skip_wiki_glue_testing = True
skip_message_svn = "Skipping trac tests: %s" % e
print(" "+skip_message_svn)
if sys.version_info.major==3:
skip_version_trac = True
skip_message_trac = "Skipping trac tests: Trac not available for Python3 as of 14 Jul 2019, 04 Jul 2020"
print(" "+skip_message_trac)
class SendingMail(TestCase):
def test_send_mail_preformatted(self):
@ -292,67 +271,6 @@ class TemplateChecksTestCase(TestCase):
r = self.client.get(url)
self.assertTemplateUsed(r, '500.html')
@skipIf(skip_version_trac, skip_message_trac)
@skipIf(skip_wiki_glue_testing, skip_message_svn)
class TestWikiGlueManagementCommand(TestCase):
def setUp(self):
super().setUp()
# We create temporary wiki and svn directories, and provide them to the management
# command through command line switches. We have to do it this way because the
# management command reads in its own copy of settings.py in its own python
# environment, so we can't modify it here.
set_coverage_checking(False)
self.wiki_dir_pattern = os.path.abspath('tmp-wiki-dir-root/%s')
if not os.path.exists(os.path.dirname(self.wiki_dir_pattern)):
os.mkdir(os.path.dirname(self.wiki_dir_pattern))
self.svn_dir_pattern = os.path.abspath('tmp-svn-dir-root/%s')
if not os.path.exists(os.path.dirname(self.svn_dir_pattern)):
os.mkdir(os.path.dirname(self.svn_dir_pattern))
def tearDown(self):
shutil.rmtree(os.path.dirname(self.wiki_dir_pattern))
shutil.rmtree(os.path.dirname(self.svn_dir_pattern))
set_coverage_checking(True)
super().tearDown()
def test_wiki_create_output(self):
for group_type in ['wg','rg','ag','area','rag']:
GroupFactory(type_id=group_type)
groups = Group.objects.filter(
type__slug__in=['wg','rg','ag','area','rag'],
state__slug='active'
).order_by('acronym')
out = io.StringIO()
err = io.StringIO()
call_command('create_group_wikis', stdout=out, stderr=err, verbosity=2,
wiki_dir_pattern=self.wiki_dir_pattern,
svn_dir_pattern=self.svn_dir_pattern,
)
command_output = out.getvalue()
command_errors = err.getvalue()
self.assertEqual("", command_errors)
for group in groups:
self.assertIn("Processing group '%s'" % group.acronym, command_output)
# Do a bit of verification using trac-admin, too
admin_code, admin_output, admin_error = pipe(
'trac-admin %s permission list' % (self.wiki_dir_pattern % group.acronym))
self.assertEqual(admin_code, 0)
roles = group.role_set.filter(name_id__in=['chair', 'secr', 'ad'])
for role in roles:
user = role.email.address.lower()
self.assertIn("Granting admin permission for %s" % user, command_output)
self.assertIn(user, admin_output)
docs = group.document_set.filter(states__slug='active', type_id='draft')
for doc in docs:
name = doc.name
name = name.replace('draft-','')
name = name.replace(doc.stream_id+'-', '')
name = name.replace(group.acronym+'-', '')
self.assertIn("Adding component %s"%name, command_output)
for page in settings.TRAC_WIKI_PAGES_TEMPLATES:
self.assertIn("Adding page %s" % os.path.basename(page), command_output)
self.assertIn("Indexing default repository", command_output)
OMITTED_APPS = [
'ietf.secr.meetings',

View file

@ -66,10 +66,8 @@ rfc2html>=2.0.1
scout-apm>=2.23.0
selenium>=3.141.0,<4.0
six>=1.10.0
svn>=1.0.1
tblib>=1.3.0
tqdm>=3.7.0
#Trac>=1.0.10,<1.2
Unidecode>=0.4.18,<1.2.0
#wsgiref>=0.1.2
weasyprint>=52.5,<53