ci: merge main to release (#7598)

ci: merge main to release
This commit is contained in:
Robert Sparks 2024-06-26 14:55:58 -05:00 committed by GitHub
commit 67ccfcfa09
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
30 changed files with 183 additions and 667 deletions

View file

@ -7,21 +7,20 @@ on:
workflow_dispatch:
inputs:
summary:
description: 'Release Summary'
required: false
type: string
default: ''
deploy:
description: 'Deploy to K8S'
default: 'Skip'
required: true
type: choice
options:
- Skip
- Staging Only
- Staging + Prod
sandbox:
description: 'Deploy to Sandbox'
default: true
required: true
type: boolean
deploy:
description: 'Deploy to Staging / Prod'
default: false
required: true
type: boolean
sandboxNoDbRefresh:
description: 'Sandbox Disable Daily DB Refresh'
default: false
@ -277,7 +276,7 @@ jobs:
repoCommon: common
version: ${{needs.prepare.outputs.pkg_version}}
changelog: ${{ steps.changelog.outputs.changes }}
summary: ${{ github.event.inputs.summary }}
summary: ''
coverageResultsPath: coverage.json
histCoveragePath: historical-coverage.json
@ -417,7 +416,7 @@ jobs:
# -----------------------------------------------------------------
staging:
name: Deploy to Staging
if: ${{ !failure() && !cancelled() && github.event.inputs.deploy == 'true' }}
if: ${{ !failure() && !cancelled() && (github.event.inputs.deploy == 'Staging Only' || github.event.inputs.deploy == 'Staging + Prod') }}
needs: [prepare, release]
runs-on: ubuntu-latest
environment:
@ -427,23 +426,37 @@ jobs:
steps:
- name: Deploy to staging
run: |
curl -X POST -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${{ secrets.GH_INFRA_K8S_TOKEN }}" ${{ secrets.GHA_K8S_DEPLOY_API }} -d '{"ref":"main", "inputs": { "environment":"${{ secrets.GHA_K8S_CLUSTER }}", "app":"datatracker", "appVersion":"${{ env.PKG_VERSION }}" }}'
uses: the-actions-org/workflow-dispatch@v4
with:
workflow: deploy.yml
repo: ietf-tools/infra-k8s
token: ${{ secrets.GH_INFRA_K8S_TOKEN }}
inputs: '{ "environment":"${{ secrets.GHA_K8S_CLUSTER }}", "app":"datatracker", "appVersion":"${{ env.PKG_VERSION }}", "remoteRef":"${{ github.sha }}" }'
wait-for-completion: true
wait-for-completion-timeout: 10m
display-workflow-run-url: false
# -----------------------------------------------------------------
# PROD
# -----------------------------------------------------------------
prod:
name: Deploy to Production
if: ${{ !failure() && !cancelled() && github.event.inputs.deploy == 'true' }}
if: ${{ !failure() && !cancelled() && github.event.inputs.deploy == 'Staging + Prod' }}
needs: [staging]
runs-on: ubuntu-latest
environment:
name: production
env:
PKG_VERSION: ${{needs.prepare.outputs.pkg_version}}
steps:
- name: Deploy to production
run: |
curl -X POST -H "Accept: application/vnd.github.v3+json" -H "Authorization: Bearer ${{ secrets.GH_INFRA_K8S_TOKEN }}" ${{ secrets.GHA_K8S_DEPLOY_API }} -d '{"ref":"main", "inputs": { "environment":"${{ secrets.GHA_K8S_CLUSTER }}", "app":"datatracker", "appVersion":"${{ env.PKG_VERSION }}" }}'
uses: the-actions-org/workflow-dispatch@v4
with:
workflow: deploy.yml
repo: ietf-tools/infra-k8s
token: ${{ secrets.GH_INFRA_K8S_TOKEN }}
inputs: '{ "environment":"${{ secrets.GHA_K8S_CLUSTER }}", "app":"datatracker", "appVersion":"${{ env.PKG_VERSION }}", "remoteRef":"${{ github.sha }}" }'
wait-for-completion: true
wait-for-completion-timeout: 10m
display-workflow-run-url: false

View file

@ -1,26 +0,0 @@
#include <stdio.h>
int main( void )
{
int c;
int count = 0;
//turn off buffering
setvbuf(stdin, NULL, _IONBF, 0);
setvbuf(stdout, NULL, _IONBF, 0);
setvbuf(stderr, NULL, _IONBF, 0);
c = fgetc(stdin);
while(c != EOF)
{
if (c=='.' || c=='E' || c=='F' || c=='s') count++; else count=0;
fputc(c, stdout);
fflush(stdout);
if (count && count % 76 == 0) {
fprintf(stderr, "%4d\n", count);
fflush(stderr);
}
c = fgetc(stdin);
}
return 0;
}

View file

@ -1,27 +0,0 @@
#!/bin/bash
# Nightly datatracker jobs.
#
# This script is expected to be triggered by cron from
# /etc/cron.d/datatracker
export LANG=en_US.UTF-8
# Make sure we stop if something goes wrong:
program=${0##*/}
trap 'echo "$program($LINENO): Command failed with error code $? ([$$] $0 $*)"; exit 1' ERR
# Datatracker directory
DTDIR=/a/www/ietf-datatracker/web
cd $DTDIR/
logger -p user.info -t cron "Running $DTDIR/bin/daily"
# Get IANA-registered yang models
#YANG_IANA_DIR=$(python -c 'import ietf.settings; print ietf.settings.SUBMIT_YANG_IANA_MODEL_DIR')
# Hardcode the rsync target to avoid any unwanted deletes:
# rsync -avzq --delete rsync.ietf.org::iana/yang-parameters/ /a/www/ietf-ftp/yang/ianamod/
rsync -avzq --delete /a/www/ietf-ftp/iana/yang-parameters/ /a/www/ietf-ftp/yang/ianamod/
# Get Yang models from Yangcatalog.
#rsync -avzq rsync://rsync.yangcatalog.org:10873/yangdeps /a/www/ietf-ftp/yang/catalogmod/
/a/www/ietf-datatracker/scripts/sync_to_yangcatalog

View file

@ -1,13 +0,0 @@
#!/bin/bash
# Drop tables which don't exist in the database dump.
[ -n "$1" ] || { echo -e "\nUsage: $0 DUMPFILE\n\nError: No database dump file given"; exit 1; }
zcat $1 | head | grep "Database: ietf_utf8" || { echo "Is this a database dump? Expected to see 'Database: ietf_utf8' "; exit 1; }
echo -e "\nSQL commands:\n"
diff <(zcat $1 | grep '^DROP TABLE IF EXISTS' | tr -d '`;' | field 5) <(ietf/manage.py dbshell <<< 'show tables;' | tail -n +2) | grep '^>' | awk '{print "drop table if exists", $2, ";";}' | tee /dev/stderr | ietf/manage.py dbshell
echo -e "\nDone"

View file

@ -1,16 +0,0 @@
#!/bin/bash
# This script provides a limited selected dump of database content with the
# purpose of generating a test fixture that provides the test data needed
# by the test suite.
#
# The generated data fixture is sorted and normalized in order to produce
# minimal commit diffs which reflect only actual changes in the fixture data,
# without apparent changes resulting only from ordering changes.
set -x
ietf/manage.py dumpdata --indent 1 doc.State doc.BallotType doc.StateType \
mailtrigger.MailTrigger mailtrigger.Recipient name \
group.GroupFeatures stats.CountryAlias dbtemplate.DBTemplate \
| jq --sort-keys "sort_by(.model, .pk)" \
| jq '[.[] | select(.model!="dbtemplate.dbtemplate" or .pk==354)]' > ietf/name/fixtures/names.json

View file

@ -1,50 +0,0 @@
#!/bin/bash
# assume we're in bin/, sibling to ietf/
cd ${0%/*}/../ietf || { echo "CD to ietf directory failed, bailing out"; exit; }
trap 'echo "$program($LINENO): Command failed with error code $? ($0 $*)"; exit 1' ERR
if [ "$*" ]; then apps="$@"; graph="${1%.*}"; else apps=$(ls */models.py | sed 's!/models.py!!'); graph="models"; fi
newapps="doc group meeting message person name"
legacyapps="announcements idindex idrfc idtracker iesg ietfauth ipr liaisons mailinglists proceedings redirects submit wgcharter wginfo"
proxy="$(grep ^class */proxy.py | tr '()' ' ' | awk '{printf $2 ","}')"
names="$(grep ^class name/models.py | tr '()' ' ' | awk '{printf $2 ","}')"
legacy="$(for app in $legacyapps; do grep ^class $app/models.py | tr '()' ' '; done | grep -v ' Meeting\\(' | awk '{printf $2 ","}')"
events="$(egrep '^class .+DocEvent' doc/models.py | tr '()' ' ' | awk '{printf $2 ","}')"
echo -e "proxy: $proxy\n"
echo -e "names: $names\n"
echo -e "legacy:$legacy\n"
echo -e "events:$events\n"
exclude="--exclude=$proxy,$names,$legacy"
export PYTHONPATH="$PWD/.."
echo "Validating..."
./manage.py validate
export PYTHONPATH=`dirname $PWD`
module=${PWD##*/}
export DJANGO_SETTINGS_MODULE=$module.settings
export graph
export title
echo "Generate model graph"
graph="models-with-names-and-events"
title="New IETF Database schema"
${0%/*}/../ietf/manage.py graph_models --exclude="$proxy,$legacy" --title "$title" $apps > $graph.dot && dot -Tpng $graph.dot > $graph.png
echo "Generate new model without names"
graph="models-with-names"
title="New IETF Database schema, without name tables"
modelviz.py --exclude="$proxy,$legacy,$names" --title "$title" $apps > $graph.dot && dot -Tpng $graph.dot > $graph.png
echo "Generate new model without names and subevents"
graph="models"
title="New IETF Database schema, without name tables and subevents"
modelviz.py --exclude="$proxy,$legacy,$names,$events" --title "$title" $apps > $graph.dot && dot -Tpng $graph.dot > $graph.png

View file

@ -1,2 +0,0 @@
#!/bin/bash
zcat release-coverage.json.gz | jq 'to_entries[] | [.value.time, .key, .value.code.coverage, .value.template.coverage, .value.url.coverage] ' 2>/dev/null | tr "\n][" " \n" | tr -d ' "Z' | tr ",T" " " | sort -n | cut -c 2- | sed -n '/2015-03-10/,$p'

View file

@ -1,229 +0,0 @@
#!/bin/bash
version="0.34"
program=$(basename $0)
NEW="" # If there are more than $NEW % new lines, skip update
OLD="" # If there are more than $OLD % deleted lines, skip update
FILE=""
verbose=""
silent=""
# ----------------------------------------------------------------------
function usage() {
cat <<EOF
NAME
$program - conditionally update target file.
SYNOPSIS
$program [OPTIONS] FILE
DESCRIPTION
$program reads input from a pipe or file and saves it to a target
(FILE) if there are changes. If the new content is the same as the
old, the target is left untouched. By default, the target is also
left untouched if the new content is empty. There are options to
also abstain from applying an update if the changes are too large,
and to back up the previous version.
The purpose is to handle files with dynamically generated content in
such a manner that timestamps don't change if the content doesn't change,
and mistakes in content generation doesn't unnecessarily propagate to
the target.
OPTIONS
EOF
if [ "$(uname)" = "Linux" ]; then
egrep "^[ ]+[-][A-Za-z| -]+\*?\)[ ]+[A-Za-z].+#" $0 | tr -s "\t|" "\t," | sed -r -e 's/\)[ \t]+([A-Z]+)="\$2"[^#]*#/=\1\t/' -e 's/\)[^#]*#/\t/'
else
egrep "^[ ]+[-][A-Za-z| -]+\*?\)[ ]+[A-Za-z].+#" $0 | sed 's/\|.*"\$2"[^#]*#/ /'| sed -E 's/\|.*\)[^#]*#/ /'
fi
cat <<EOF
AUTHOR
Henrik Levkowetz <henrik@levkowetz.com>
EOF
exit
}
# ----------------------------------------------------------------------
function note() {
if [ -n "$verbose" ]; then
echo -e "$program: $*"
fi
}
# ----------------------------------------------------------------------
function warn() {
[ "$QUIET" ] || echo -e "$program: $*"
}
# ----------------------------------------------------------------------
function err() {
echo -e "$program: $*" > /dev/stderr
}
# -----------------------------------------------------------------------------
function leave() {
errcode=$1; shift
if [ "$errcode" -ge "2" ]; then warn "$*"; else note "$*"; fi
if [ -f "$tempfile" ]; then rm $tempfile; fi
if [ -f "$difffile" ]; then rm $difffile; fi
if [ "$errcode" = "1" -a "$RESULT" = "0" ]; then exit 0; else exit $errcode; fi
}
# ----------------------------------------------------------------------
# Set up error trap
trap 'leave 127 "$program($LINENO): Command failed with error code $? while processing '$origfile'."' ERR
# exit with a message if a command fails
set -e
# ----------------------------------------------------------------------
# Get any options
#
# Default values
PAT="\$path\$base.%Y-%m-%d_%H%M"
RESULT="0"
QUIET=""
# Based on the sample code in /usr/share/doc/util-linux/examples/parse.bash.gz
if [ "$(uname)" = "Linux" ]; then
GETOPT_RESULT=$(getopt -o bc:ef:hn:o:p:qrvV --long backup,maxchg:,empty,file:,help,maxnew:,maxold:,prefix:,report,quiet,verbose,version -n "$program" -- "$@")
else
GETOPT_RESULT=$(getopt bc:ef:hn:o:p:qrvV "$@")
fi
if [ $? != 0 ] ; then echo "Terminating..." >&2 ; exit 1 ; fi
note "GETOPT_RESULT: $GETOPT_RESULT"
eval set -- "$GETOPT_RESULT"
while true ; do
case "$1" in
-b|--backup) backup=1; shift ;; # Back up earlier versions by creating a backup file
-c|--maxchg) CHG="$2"; shift 2 ;; # Limit on percentage of changed lines
-e|--empty) empty=1; shift ;; # Permit the update to be empty (default: discard)
-f|--file) FILE="$2"; shift 2 ;; # Read input from FILE instead of standard input
-h|--help) usage; shift ;; # Show this text and exit
-n|--maxnew) NEW="$2"; shift 2 ;; # Limit on percentage of new (added) lines
-o|--maxold) OLD="$2"; shift 2 ;; # Limit on percentage of old (deleted) lines
-p|--pat*) PAT="$2"; shift 2 ;; # Backup name base ('$path$base.%Y%m%d_%H%M')
-q|--quiet) QUIET=1; shift;; # Be less verbose
-r|--result) RESULT=1; shift ;; # Return 1 if update not done
-v|--verbose) verbose=1; shift ;; # Be more verbose about what's happening
-V|--version) echo -e "$program\t$version"; exit;; # Show version and exit
--) shift ; break ;;
*) echo "$program: Internal error, inconsistent option specification." ; exit 1 ;;
esac
done
if [ $CHG ]; then OLD=$CHG; NEW=$CHG; fi
if [ $# -lt 1 ]; then echo -e "$program: Missing output filename\n"; usage; fi
origfile=$1
tempfile=$(mktemp)
difffile=$(mktemp)
if [ -e "$origfile" ]; then
cp -p $origfile $tempfile # For ownership and permissions
cat $FILE > $tempfile
[ "$FILE" ] && touch -r $FILE $tempfile
# This won't work if we don't have sufficient privileges:
#chown --reference=$origfile $tempfile
#chmod --reference=$origfile $tempfile
else
cat $FILE > $origfile
[ "$FILE" ] && touch -r $FILE $tempfile
leave 0 "Created file '$origfile'"
fi
origlen=$(wc -c < $origfile)
newlen=$(wc -c < $tempfile)
if [ $origlen = 0 -a $newlen = 0 ]; then
rm $tempfile
leave 1 "New content is identical (and void) - not updating '$origfile'."
fi
if [ $newlen = 0 -a -z "$empty" ]; then
leave 1 "New content is void - not updating '$origfile'."
fi
diff $origfile $tempfile > $difffile || [ $? -le 1 ] && true # suppress the '1' error code on differences
difflen=$(wc -l < $difffile)
if [ $difflen = 0 ]; then
leave 1 "New content is identical - not updating '$origfile'."
fi
if [ "$OLD" -o "$NEW" ]; then
if [ "$NEW" ]; then maxnew=$(( $origlen * $NEW / 100 )); fi
if [ "$OLD" ]; then maxdel=$(( $origlen * $OLD / 100 )); fi
newcount=$(grep "^> " $difffile | wc -c)
outcount=$(grep "^< " $difffile | wc -c)
delcount=$(grep "^! " $difffile | wc -c)
delcount=$(( $outcount + $delcount ))
rm $difffile
if [ "$OLD" ]; then
if [ "$delcount" -ge "$maxdel" ]; then
cp $tempfile $origfile.update
leave 2 "New content has too many removed lines ($delcount/$origlen)\n - not updating '$origfile'.\nNew content placed in '$origfile.update' instead"
fi
fi
if [ "$NEW" ]; then
if [ "$newcount" -ge "$maxnew" ]; then
cp $tempfile $origfile.update
leave 2 "New content has too many added lines ($newcount/$origlen)\n - not updating '$origfile'.\nNew content placed in '$origfile.update' instead"
fi
fi
fi
if [ "$backup" ]; then
path=${origfile%/*}
name=${origfile##*/}
base=${name%.*}
ext=${origfile##*.}
if [ "$ext" = "$origfile" ]; then
ext=""
elif [ ! "${ext%/*}" = "$ext" ]; then
ext=""
else
ext=".$ext"
fi
if [ "$path" = "$origfile" ]; then
path=""
else
path="$path/"
fi
ver=1
backfile=$(eval date +"$PAT")
backpath="${backfile%/*}"
if [ "$backpath" = "$backfile" ]; then
backpath="."
fi
if [ ! -d $backpath ]; then
if [ -e $backpath ]; then
leave 3 "The backup path '$backpath' exists but isn't a directory"
else
mkdir -p $backpath
fi
fi
while [ -e "$backfile,$ver$ext" ]; do
ver=$(( $ver+1 ))
done
note "Saving backup: $backfile,$ver$ext"
cp -p "$origfile" "$backfile,$ver$ext"
chmod -w "$backfile,$ver$ext" || true
fi
if ! mv $tempfile $origfile; then cp -p $tempfile $origfile; fi
leave 0 "Updated file '$origfile'"

View file

@ -3,15 +3,7 @@ import os
import sys
import time as timeutils
import inspect
from typing import Callable
try:
import syslog
logger = syslog.syslog # type: Callable
except ImportError: # import syslog will fail on Windows boxes
import logging
logging.basicConfig(filename='tracker.log',level=logging.INFO)
logger = logging.info
try:
from pprint import pformat
@ -155,13 +147,6 @@ def showpos(name):
indent = ' ' * (_report_indent[0])
sys.stderr.write("%s%s:%s: %s: '%s'\n" % (indent, fn, line, name, value))
def log(name):
if debug:
frame = inspect.stack()[1][0]
value = eval(name, frame.f_globals, frame.f_locals)
indent = ' ' * (_report_indent[0])
logger("%s%s: %s" % (indent, name, value))
def pprint(name):
if debug:
frame = inspect.stack()[1][0]

View file

@ -18,6 +18,7 @@ COPY . .
COPY ./dev/build/start.sh ./start.sh
COPY ./dev/build/datatracker-start.sh ./datatracker-start.sh
COPY ./dev/build/celery-start.sh ./celery-start.sh
COPY ./dev/build/gunicorn.conf.py ./gunicorn.conf.py
RUN pip3 --disable-pip-version-check --no-cache-dir install -r requirements.txt && \
echo '# empty' > ietf/settings_local.py && \

View file

@ -0,0 +1,49 @@
# Copyright The IETF Trust 2024, All Rights Reserved
# Log as JSON on stdout (to distinguish from Django's logs on stderr)
#
# This is applied as an update to gunicorn's glogging.CONFIG_DEFAULTS.
logconfig_dict = {
"version": 1,
"disable_existing_loggers": False,
"root": {"level": "INFO", "handlers": ["console"]},
"loggers": {
"gunicorn.error": {
"level": "INFO",
"handlers": ["console"],
"propagate": False,
"qualname": "gunicorn.error"
},
"gunicorn.access": {
"level": "INFO",
"handlers": ["access_console"],
"propagate": False,
"qualname": "gunicorn.access"
}
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"formatter": "json",
"stream": "ext://sys.stdout"
},
"access_console": {
"class": "logging.StreamHandler",
"formatter": "access_json",
"stream": "ext://sys.stdout"
},
},
"formatters": {
"json": {
"class": "ietf.utils.jsonlogger.DatatrackerJsonFormatter",
"style": "{",
"format": "{asctime}{levelname}{message}{name}{process}",
},
"access_json": {
"class": "ietf.utils.jsonlogger.GunicornRequestJsonFormatter",
"style": "{",
"format": "{asctime}{levelname}{message}{name}{process}",
}
}
}

View file

@ -1,7 +1,7 @@
# =====================
# --- Builder Stage ---
# =====================
FROM postgres:14.6 AS builder
FROM postgres:16 AS builder
ENV POSTGRES_PASSWORD=hk2j22sfiv
ENV POSTGRES_USER=django
@ -19,7 +19,7 @@ RUN ["/usr/local/bin/docker-entrypoint.sh", "postgres"]
# ===================
# --- Final Image ---
# ===================
FROM postgres:14.6
FROM postgres:16
LABEL maintainer="IETF Tools Team <tools-discuss@ietf.org>"
COPY --from=builder /data $PGDATA

View file

@ -57,7 +57,7 @@ def idindex_update_task():
ftp_path = Path(settings.FTP_DIR) / "internet-drafts"
all_archive_path = Path(settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR)
with TempFileManager("/a/tmp") as tmp_mgr:
with TempFileManager() as tmp_mgr:
# Generate copies of new contents
all_id_content = all_id_txt()
all_id_tmpfile = tmp_mgr.make_temp_file(all_id_content)

View file

@ -0,0 +1,16 @@
# Generated by Django 4.2.13 on 2024-06-21 20:40
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("iesg", "0002_telechatagendacontent"),
]
operations = [
migrations.DeleteModel(
name="Telechat",
),
]

View file

@ -59,20 +59,6 @@ class TelechatAgendaItem(models.Model):
type_name = self.TYPE_CHOICES_DICT.get(self.type, str(self.type))
return "%s: %s" % (type_name, self.title or "")
class Telechat(models.Model):
telechat_id = models.IntegerField(primary_key=True)
telechat_date = models.DateField(null=True, blank=True)
minute_approved = models.IntegerField(null=True, blank=True)
wg_news_txt = models.TextField(blank=True)
iab_news_txt = models.TextField(blank=True)
management_issue = models.TextField(blank=True)
frozen = models.IntegerField(null=True, blank=True)
mi_frozen = models.IntegerField(null=True, blank=True)
class Meta:
db_table = 'telechat'
def next_telechat_date():
dates = TelechatDate.objects.order_by("-date")
if dates:

View file

@ -9,7 +9,7 @@ from tastypie.cache import SimpleCache
from ietf import api
from ietf.iesg.models import TelechatDate, Telechat, TelechatAgendaItem, TelechatAgendaContent
from ietf.iesg.models import TelechatDate, TelechatAgendaItem, TelechatAgendaContent
class TelechatDateResource(ModelResource):
@ -17,62 +17,57 @@ class TelechatDateResource(ModelResource):
cache = SimpleCache()
queryset = TelechatDate.objects.all()
serializer = api.Serializer()
#resource_name = 'telechatdate'
ordering = ['id', ]
filtering = {
# resource_name = 'telechatdate'
ordering = [
"id",
]
filtering = {
"id": ALL,
"date": ALL,
}
api.iesg.register(TelechatDateResource())
class TelechatResource(ModelResource):
class Meta:
cache = SimpleCache()
queryset = Telechat.objects.all()
serializer = api.Serializer()
#resource_name = 'telechat'
ordering = ['tlechat_id', ]
filtering = {
"telechat_id": ALL,
"telechat_date": ALL,
"minute_approved": ALL,
"wg_news_txt": ALL,
"iab_news_txt": ALL,
"management_issue": ALL,
"frozen": ALL,
"mi_frozen": ALL,
}
api.iesg.register(TelechatResource())
class TelechatAgendaItemResource(ModelResource):
class Meta:
cache = SimpleCache()
queryset = TelechatAgendaItem.objects.all()
serializer = api.Serializer()
#resource_name = 'telechatagendaitem'
ordering = ['id', ]
filtering = {
# resource_name = 'telechatagendaitem'
ordering = [
"id",
]
filtering = {
"id": ALL,
"text": ALL,
"type": ALL,
"title": ALL,
}
api.iesg.register(TelechatAgendaItemResource())
from ietf.name.resources import TelechatAgendaSectionNameResource
class TelechatAgendaContentResource(ModelResource):
section = ToOneField(TelechatAgendaSectionNameResource, 'section')
section = ToOneField(TelechatAgendaSectionNameResource, "section")
class Meta:
queryset = TelechatAgendaContent.objects.none()
serializer = api.Serializer()
cache = SimpleCache()
#resource_name = 'telechatagendacontent'
ordering = ['id', ]
filtering = {
# resource_name = 'telechatagendacontent'
ordering = [
"id",
]
filtering = {
"id": ALL,
"text": ALL,
"section": ALL_WITH_RELATIONS,
}
api.iesg.register(TelechatAgendaContentResource())

View file

@ -1,30 +0,0 @@
# Copyright The IETF Trust 2016-2020, All Rights Reserved
# -*- coding: utf-8 -*-
import io
import subprocess, hashlib
from django.utils.encoding import force_bytes
from django.conf import settings
def update_htpasswd_file(username, password):
if getattr(settings, 'USE_PYTHON_HTDIGEST', None):
pass_file = settings.HTPASSWD_FILE
realm = settings.HTDIGEST_REALM
prefix = force_bytes('%s:%s:' % (username, realm))
key = force_bytes(hashlib.md5(prefix + force_bytes(password)).hexdigest())
f = io.open(pass_file, 'r+b')
pos = f.tell()
line = f.readline()
while line:
if line.startswith(prefix):
break
pos=f.tell()
line = f.readline()
f.seek(pos)
f.write(b'%s%s\n' % (prefix, key))
f.close()
else:
p = subprocess.Popen([settings.HTPASSWD_COMMAND, "-b", settings.HTPASSWD_FILE, username, password], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()

View file

@ -3,13 +3,10 @@
import datetime
import io
import logging # pyflakes:ignore
import os
import re
import requests
import requests_mock
import shutil
import time
import urllib
@ -21,7 +18,6 @@ from oic.oic.message import RegistrationResponse, AuthorizationResponse
from oic.utils.authn.client import CLIENT_AUTHN_METHOD
from oidc_provider.models import RSAKey
from pyquery import PyQuery
from unittest import skipIf
from urllib.parse import urlsplit
import django.core.signing
@ -35,7 +31,6 @@ import debug # pyflakes:ignore
from ietf.group.factories import GroupFactory, RoleFactory
from ietf.group.models import Group, Role, RoleName
from ietf.ietfauth.htpasswd import update_htpasswd_file
from ietf.ietfauth.utils import has_role
from ietf.meeting.factories import MeetingFactory
from ietf.nomcom.factories import NomComFactory
@ -45,41 +40,12 @@ from ietf.person.tasks import send_apikey_usage_emails_task
from ietf.review.factories import ReviewRequestFactory, ReviewAssignmentFactory
from ietf.review.models import ReviewWish, UnavailablePeriod
from ietf.stats.models import MeetingRegistration
from ietf.utils.decorators import skip_coverage
from ietf.utils.mail import outbox, empty_outbox, get_payload_text
from ietf.utils.test_utils import TestCase, login_testing_unauthorized
from ietf.utils.timezone import date_today
if os.path.exists(settings.HTPASSWD_COMMAND):
skip_htpasswd_command = False
skip_message = ""
else:
skip_htpasswd_command = True
skip_message = ("Skipping htpasswd test: The binary for htpasswd wasn't found in the\n "
"location indicated in settings.py.")
print(" "+skip_message)
class IetfAuthTests(TestCase):
def setUp(self):
super().setUp()
self.saved_use_python_htdigest = getattr(settings, "USE_PYTHON_HTDIGEST", None)
settings.USE_PYTHON_HTDIGEST = True
self.saved_htpasswd_file = settings.HTPASSWD_FILE
self.htpasswd_dir = self.tempdir('htpasswd')
settings.HTPASSWD_FILE = os.path.join(self.htpasswd_dir, "htpasswd")
io.open(settings.HTPASSWD_FILE, 'a').close() # create empty file
self.saved_htdigest_realm = getattr(settings, "HTDIGEST_REALM", None)
settings.HTDIGEST_REALM = "test-realm"
def tearDown(self):
shutil.rmtree(self.htpasswd_dir)
settings.USE_PYTHON_HTDIGEST = self.saved_use_python_htdigest
settings.HTPASSWD_FILE = self.saved_htpasswd_file
settings.HTDIGEST_REALM = self.saved_htdigest_realm
super().tearDown()
def test_index(self):
self.assertEqual(self.client.get(urlreverse("ietf.ietfauth.views.index")).status_code, 200)
@ -162,15 +128,6 @@ class IetfAuthTests(TestCase):
return confirm_url
def username_in_htpasswd_file(self, username):
with io.open(settings.HTPASSWD_FILE) as f:
for l in f:
if l.startswith(username + ":"):
return True
with io.open(settings.HTPASSWD_FILE) as f:
print(f.read())
return False
# For the lowered barrier to account creation period, we are disabling this kind of failure
# def test_create_account_failure(self):
@ -223,8 +180,6 @@ class IetfAuthTests(TestCase):
self.assertEqual(Person.objects.filter(user__username=email).count(), 1)
self.assertEqual(Email.objects.filter(person__user__username=email).count(), 1)
self.assertTrue(self.username_in_htpasswd_file(email))
# This also tests new account creation.
def test_create_existing_account(self):
@ -490,7 +445,6 @@ class IetfAuthTests(TestCase):
self.assertEqual(r.status_code, 200)
q = PyQuery(r.content)
self.assertEqual(len(q("form .is-invalid")), 0)
self.assertTrue(self.username_in_htpasswd_file(user.username))
# reuse reset url
r = self.client.get(confirm_url)
@ -614,23 +568,6 @@ class IetfAuthTests(TestCase):
self.assertEqual(r.status_code, 302)
self.assertEqual(ReviewWish.objects.filter(doc=doc, team=review_req.team).count(), 0)
def test_htpasswd_file_with_python(self):
# make sure we test both Python and call-out to binary
settings.USE_PYTHON_HTDIGEST = True
update_htpasswd_file("foo", "passwd")
self.assertTrue(self.username_in_htpasswd_file("foo"))
@skipIf(skip_htpasswd_command, skip_message)
@skip_coverage
def test_htpasswd_file_with_htpasswd_binary(self):
# make sure we test both Python and call-out to binary
settings.USE_PYTHON_HTDIGEST = False
update_htpasswd_file("foo", "passwd")
self.assertTrue(self.username_in_htpasswd_file("foo"))
def test_change_password(self):
chpw_url = urlreverse("ietf.ietfauth.views.change_password")
prof_url = urlreverse("ietf.ietfauth.views.profile")

View file

@ -65,7 +65,6 @@ from ietf.group.models import Role, Group
from ietf.ietfauth.forms import ( RegistrationForm, PasswordForm, ResetPasswordForm, TestEmailForm,
ChangePasswordForm, get_person_form, RoleEmailForm,
NewEmailForm, ChangeUsernameForm, PersonPasswordForm)
from ietf.ietfauth.htpasswd import update_htpasswd_file
from ietf.ietfauth.utils import has_role
from ietf.name.models import ExtResourceName
from ietf.nomcom.models import NomCom
@ -222,8 +221,6 @@ def confirm_account(request, auth):
user = User.objects.create(username=email, email=email)
user.set_password(password)
user.save()
# password is also stored in htpasswd file
update_htpasswd_file(email, password)
# make sure the rest of the person infrastructure is
# well-connected
@ -552,8 +549,6 @@ def confirm_password_reset(request, auth):
user.set_password(password)
user.save()
# password is also stored in htpasswd file
update_htpasswd_file(user.username, password)
success = True
else:
@ -693,8 +688,6 @@ def change_password(request):
user.set_password(new_password)
user.save()
# password is also stored in htpasswd file
update_htpasswd_file(user.username, new_password)
# keep the session
update_session_auth_hash(request, user)
@ -731,13 +724,10 @@ def change_username(request):
form = ChangeUsernameForm(user, request.POST)
if form.is_valid():
new_username = form.cleaned_data["username"]
password = form.cleaned_data["password"]
assert new_username in emails
user.username = new_username.lower()
user.save()
# password is also stored in htpasswd file
update_htpasswd_file(user.username, password)
# keep the session
update_session_auth_hash(request, user)

View file

@ -789,7 +789,9 @@ Subject: test
mock_process_response_email.side_effect = None
mock_process_response_email.return_value = None # rejected message
ingest_response_email(message) # should _not_ send an exception email on a clean rejection
with self.assertRaises(EmailIngestionError) as context:
ingest_response_email(message)
self.assertIsNone(context.exception.as_emailmessage()) # should not send an email on a clean rejection
self.assertTrue(mock_process_response_email.called)
self.assertEqual(mock_process_response_email.call_args, mock.call(message))
mock_process_response_email.reset_mock()

View file

@ -92,8 +92,10 @@ def generate_draft_recursive_txt():
def ingest_response_email(message: bytes):
from ietf.api.views import EmailIngestionError # avoid circular import
try:
process_response_email(message)
result = process_response_email(message)
except Exception as err:
# Message was rejected due to an unhandled exception. This is likely something
# the admins need to address, so send them a copy of the email.
raise EmailIngestionError(
"Datatracker IPR email ingestion error",
email_body=dedent("""\
@ -104,3 +106,8 @@ def ingest_response_email(message: bytes):
email_original_message=message,
email_attach_traceback=True,
) from err
if result is None:
# Message was rejected due to some problem the sender can fix, so bounce but don't send
# an email to the admins
raise EmailIngestionError("IPR response rejected", email_body=None)

View file

@ -3,10 +3,8 @@
import datetime
import os
import pprint
import sys
import syslog
from django.contrib import admin
from django.core.cache import cache
@ -17,14 +15,14 @@ from django.http import Http404
import debug # pyflakes:ignore
from ietf.person.models import Person, Alias, Email
from ietf.utils import log
from ietf.utils.mail import send_mail
def merge_persons(request, source, target, file=sys.stdout, verbose=False):
changes = []
# write log
syslog.openlog(str(os.path.basename(__file__)), syslog.LOG_PID, syslog.LOG_USER)
syslog.syslog("Merging person records {} => {}".format(source.pk,target.pk))
log.log(f"Merging person records {source.pk} => {target.pk}")
# handle primary emails
for email in get_extra_primary(source,target):
@ -118,7 +116,7 @@ def handle_users(source,target,check_only=False):
if source.user and target.user:
message = "DATATRACKER LOGIN ACTION: retaining login: {}, removing login: {}".format(target.user,source.user)
if not check_only:
syslog.syslog('merge-person-records: deactivating user {}'.format(source.user.username))
log.log(f"merge-person-records: deactivating user {source.user.username}")
user = source.user
source.user = None
source.save()

View file

@ -11,5 +11,4 @@ urlpatterns = [
url(r'^(?P<date>[0-9\-]+)/management/$', views.management),
url(r'^(?P<date>[0-9\-]+)/minutes/$', views.minutes),
url(r'^(?P<date>[0-9\-]+)/roll-call/$', views.roll_call),
url(r'^new/$', views.new),
]

View file

@ -17,7 +17,7 @@ from ietf.doc.utils import add_state_change_event, update_action_holders
from ietf.person.models import Person
from ietf.doc.lastcall import request_last_call
from ietf.doc.mails import email_state_changed
from ietf.iesg.models import TelechatDate, TelechatAgendaItem, Telechat
from ietf.iesg.models import TelechatDate, TelechatAgendaItem
from ietf.iesg.agenda import agenda_data, get_doc_section
from ietf.ietfauth.utils import role_required
from ietf.secr.telechat.forms import BallotForm, ChangeStateForm, DateSelectForm, TELECHAT_TAGS
@ -419,18 +419,6 @@ def minutes(request, date):
'da_docs': da_docs},
)
@role_required('Secretariat')
def new(request):
'''
This view creates a new telechat agenda and redirects to the default view
'''
if request.method == 'POST':
date = request.POST['date']
# create legacy telechat record
Telechat.objects.create(telechat_date=date)
messages.success(request,'New Telechat Agenda created')
return redirect('ietf.secr.telechat.views.doc', date=date)
@role_required('Secretariat')
def roll_call(request, date):

View file

@ -26,11 +26,6 @@ warnings.filterwarnings("ignore", message="The logout\\(\\) view is superseded b
warnings.filterwarnings("ignore", message="Report.file_reporters will no longer be available in Coverage.py 4.2", module="coverage.report")
warnings.filterwarnings("ignore", message="Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated", module="bleach")
warnings.filterwarnings("ignore", message="HTTPResponse.getheader\\(\\) is deprecated", module='selenium.webdriver')
try:
import syslog
syslog.openlog(str("datatracker"), syslog.LOG_PID, syslog.LOG_USER)
except ImportError:
pass
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.abspath(BASE_DIR + "/.."))
@ -240,11 +235,11 @@ LOGGING = {
#
'loggers': {
'django': {
'handlers': ['debug_console', 'mail_admins'],
'handlers': ['console', 'mail_admins'],
'level': 'INFO',
},
'django.request': {
'handlers': ['debug_console'],
'handlers': ['console'],
'level': 'ERROR',
},
'django.server': {
@ -252,19 +247,19 @@ LOGGING = {
'level': 'INFO',
},
'django.security': {
'handlers': ['debug_console', ],
'handlers': ['console', ],
'level': 'INFO',
},
'oidc_provider': {
'handlers': ['debug_console', ],
'handlers': ['console', ],
'level': 'DEBUG',
},
'datatracker': {
'handlers': ['debug_console'],
'handlers': ['console'],
'level': 'INFO',
},
'celery': {
'handlers': ['debug_console'],
'handlers': ['console'],
'level': 'INFO',
},
},
@ -275,7 +270,7 @@ LOGGING = {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'json',
'formatter': 'plain',
},
'debug_console': {
# Active only when DEBUG=True
@ -331,7 +326,9 @@ LOGGING = {
'format': '{levelname}: {name}:{lineno}: {message}',
},
'json' : {
'()': 'pythonjsonlogger.jsonlogger.JsonFormatter'
"class": "ietf.utils.jsonlogger.DatatrackerJsonFormatter",
"style": "{",
"format": "{asctime}{levelname}{message}{name}{pathname}{lineno}{funcName}{process}",
}
},
}
@ -821,8 +818,6 @@ IDSUBMIT_MAX_VALIDATION_TIME = datetime.timedelta(minutes=20)
# Age at which a submission expires if not posted
IDSUBMIT_EXPIRATION_AGE = datetime.timedelta(days=14)
IDSUBMIT_MANUAL_STAGING_DIR = '/tmp/'
IDSUBMIT_FILE_TYPES = (
'txt',
'html',
@ -981,8 +976,6 @@ DE_GFM_BINARY = '/usr/bin/de-gfm.ruby2.5'
# Account settings
DAYS_TO_EXPIRE_REGISTRATION_LINK = 3
MINUTES_TO_EXPIRE_RESET_PASSWORD_LINK = 60
HTPASSWD_COMMAND = "/usr/bin/htpasswd"
HTPASSWD_FILE = "/a/www/htpasswd"
# Generation of pdf files
GHOSTSCRIPT_COMMAND = "/usr/bin/gs"

View file

@ -1,8 +1,6 @@
# Copyright The IETF Trust 2017-2019, All Rights Reserved
# Copyright 2016 IETF Trust
import syslog
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
@ -10,10 +8,8 @@ import debug # pyflakes:ignore
from ietf.meeting.models import Meeting
from ietf.stats.utils import fetch_attendance_from_meetings
from ietf.utils import log
logtag = __name__.split('.')[-1]
logname = "user.log"
syslog.openlog(str(logtag), syslog.LOG_PID, syslog.LOG_USER)
class Command(BaseCommand):
help = "Fetch meeting attendee figures from ietf.org/registration/attendees."
@ -43,4 +39,4 @@ class Command(BaseCommand):
if self.stdout.isatty():
self.stdout.write(msg+'\n') # make debugging a bit easier
else:
syslog.syslog(msg)
log.log(msg)

26
ietf/utils/jsonlogger.py Normal file
View file

@ -0,0 +1,26 @@
# Copyright The IETF Trust 2024, All Rights Reserved
from pythonjsonlogger import jsonlogger
import time
class DatatrackerJsonFormatter(jsonlogger.JsonFormatter):
converter = time.gmtime # use UTC
default_msec_format = "%s.%03d" # '.' instead of ','
class GunicornRequestJsonFormatter(DatatrackerJsonFormatter):
"""Only works with Gunicorn's logging"""
def add_fields(self, log_record, record, message_dict):
super().add_fields(log_record, record, message_dict)
log_record.setdefault("method", record.args["m"])
log_record.setdefault("proto", record.args["H"])
log_record.setdefault("remote_ip", record.args["h"])
path = record.args["U"] # URL path
if record.args["q"]: # URL query string
path = "?".join([path, record.args["q"]])
log_record.setdefault("path", path)
log_record.setdefault("status", record.args["s"])
log_record.setdefault("referer", record.args["f"])
log_record.setdefault("user_agent", record.args["a"])
log_record.setdefault("len_bytes", record.args["B"])
log_record.setdefault("duration_ms", record.args["M"])

View file

@ -1,63 +0,0 @@
# Copyright The IETF Trust 2014-2020, All Rights Reserved
import io
import sys
from textwrap import dedent
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
def import_htpasswd_file(filename, verbosity=1, overwrite=False):
with io.open(filename) as file:
for line in file:
if not ':' in line:
raise ValueError('Found a line without colon separator in the htpassword file %s:'
' "%s"' % (file.name, line))
username, password = line.strip().split(':', 1)
try:
user = User.objects.get(username__iexact=username)
if overwrite == True or not user.password:
if password.startswith('{SHA}'):
user.password = "sha1$$%s" % password[len('{SHA}'):]
elif password.startswith('$apr1$'):
user.password = "md5$%s" % password[len('$apr1$'):]
else: # Assume crypt
user.password = "crypt$$%s" % password
user.save()
if verbosity > 0:
sys.stderr.write('.')
if verbosity > 1:
sys.stderr.write(' %s\n' % username)
except User.DoesNotExist:
if verbosity > 1:
sys.stderr.write('\nNo such user: %s\n' % username)
class Command(BaseCommand):
"""
Import passwords from one or more htpasswd files to Django's auth_user table.
This command only imports passwords; it does not import usernames, as that
would leave usernames without associated Person records in the database,
something which is undesirable.
By default the command won't overwrite existing password entries, but
given the --force switch, it will overwrite existing entries too. Without
the --force switch, the command is safe to run repeatedly.
"""
help = dedent(__doc__).strip()
def add_arguments(self, parser):
parser.add_argument('--force',
action='store_true', dest='overwrite', default=False,
help='Overwrite existing passwords in the auth_user table.')
args = '[path [path [...]]]'
def handle(self, *filenames, **options):
overwrite = options.get('overwrite', False)
verbosity = int(options.get('verbosity'))
for fn in filenames:
import_htpasswd_file(fn, verbosity=verbosity, overwrite=overwrite)

View file

@ -1,23 +1,17 @@
# Copyright The IETF Trust 2013-2021, All Rights Reserved
# Copyright The IETF Trust 2013-2024, All Rights Reserved
# -*- coding: utf-8 -*-
import os
import sys
import syslog
path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
syslog.openlog(str("datatracker"), syslog.LOG_PID, syslog.LOG_USER)
if not path in sys.path:
sys.path.insert(0, path)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ietf.settings")
syslog.syslog("Starting datatracker wsgi instance")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
application = get_wsgi_application()

View file

@ -261,8 +261,5 @@ _csrf_trusted_origins_str = os.environ.get("DATATRACKER_CSRF_TRUSTED_ORIGINS")
if _csrf_trusted_origins_str is not None:
CSRF_TRUSTED_ORIGINS = _multiline_to_list(_csrf_trusted_origins_str)
# Send logs to console instead of debug_console when running in kubernetes
LOGGING["loggers"]["django"]["handlers"] = ["console", "mail_admins"]
LOGGING["loggers"]["django.security"]["handlers"] = ["console"]
LOGGING["loggers"]["datatracker"]["handlers"] = ["console"]
LOGGING["loggers"]["celery"]["handlers"] = ["console"]
# Console logs as JSON instead of plain when running in k8s
LOGGING["handlers"]["console"]["formatter"] = "json"