misc: import docker improvements from 7.39.1.dev2

- Legacy-Id: 19629
This commit is contained in:
nick 2021-11-10 21:51:55 +00:00
parent dd3447a7ad
commit d52ad4fbc2
32 changed files with 931 additions and 782 deletions

View file

@ -2,11 +2,14 @@
// https://github.com/microsoft/vscode-dev-containers/tree/v0.202.5/containers/python-3
{
"name": "IETF Datatracker",
"dockerComposeFile": "docker-compose.yml",
"dockerComposeFile": ["../docker/docker-compose.yml", "docker-compose.extend.yml"],
"service": "app",
"workspaceFolder": "/usr/local/share/datatracker",
"workspaceFolder": "/root/src",
"shutdownAction": "stopCompose",
"postCreateCommand": "/docker-init.sh",
"containerEnv": {
"EDITOR_VSCODE": "true"
},
// Set *default* container specific settings.json values on container create.
"settings": {
@ -35,6 +38,20 @@
"./ietf",
"-p",
"test*.py"
],
"sqltools.connections": [
// Default connection to dev DB container
{
"name": "Local Dev",
"server": "db",
"port": 3306,
"database": "ietf_utf8",
"username": "django",
"password": "RkTkDPFnKpko",
"driver": "MySQL",
"askForPassword": false,
"connectionTimeout": 60
}
]
// "python.envFile": "${workspaceFolder}/.devcontainer/dev.env"
},
@ -48,10 +65,25 @@
"redhat.vscode-yaml",
"visualstudioexptteam.vscodeintellicode",
"batisteo.vscode-django",
"mutantdino.resourcemonitor",
"spmeesseman.vscode-taskexplorer",
"mtxr.sqltools",
"mtxr.sqltools-driver-mysql"
],
// Use 'forwardPorts' to make a list of ports inside the container available locally.
"forwardPorts": [8000, 3306]
"forwardPorts": [8000, 3306],
"portsAttributes": {
"8000": {
"label": "Datatracker",
"onAutoForward": "notify"
},
"3306": {
"label": "MariaDB",
"onAutoForward": "silent"
}
}
// Use 'postCreateCommand' to run commands after the container is created.
// "postCreateCommand": "sh /docker-init.sh",

View file

@ -0,0 +1,8 @@
version: '3.8'
services:
app:
environment:
EDITOR_VSCODE: 1
# Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function.
network_mode: service:db

View file

@ -1,164 +0,0 @@
#!/bin/bash
# A little bit of setup
export LANG=en_GB.UTF-8
WORKSPACEDIR="/usr/local/share/datatracker"
echo "Gathering info ..."
if [ ! "$USER" ]; then
echo "Environment variable USER is not set -- will set USER='django'."
USER="django"
fi
if [ ! "$UID" ]; then
echo "Environment variable UID is not set -- will set UID='1000'."
UID="1000"
fi
if [ ! "$GID" ]; then
echo "Environment variable GID is not set -- will set GID='1000'."
GID="1000"
fi
if [ ! "$TAG" ]; then
echo "Environment variable TAG is not set -- will set TAG='datatracker'."
TAG="datatracker"
fi
echo "User $USER ($UID:$GID)"
echo "Checking if syslogd is running ..."
if ! /etc/init.d/rsyslog status > /dev/null; then
echo "Starting syslogd ..."
/etc/init.d/rsyslog start
fi
echo "Waiting for DB container to come online ..."
wget -qO- https://raw.githubusercontent.com/eficode/wait-for/v2.1.3/wait-for | sh -s -- localhost:3306 -- echo "DB ready"
echo "Checking if the IETF database exists in DB container ..."
if ! mysql --protocol tcp -h localhost -u root --password=ietf --database="ietf_utf8" --execute="SHOW TABLES;" | grep -q 'django'; then
echo "Fetching database ..."
DUMPDIR=/home/$USER/$DATADIR
wget -N --progress=bar:force:noscroll -P $DUMPDIR http://www.ietf.org/lib/dt/sprint/ietf_utf8.sql.gz
echo "Creating database ..."
mysql --protocol tcp -h localhost -u root --password=ietf --database="ietf_utf8" --execute="DROP DATABASE IF EXISTS ietf_utf8;"
mysqladmin --protocol tcp -h localhost -u root --password=ietf --default-character-set=utf8 create ietf_utf8
echo "Setting up permissions ..."
mysql --protocol tcp -h localhost -u root --password="ietf" ietf_utf8 <<< "GRANT ALL PRIVILEGES ON ietf_utf8.* TO 'django'@'%' IDENTIFIED BY 'RkTkDPFnKpko'; FLUSH PRIVILEGES;"
echo "Loading database (this may take a while)..."
gunzip < $DUMPDIR/ietf_utf8.sql.gz \
| pv --progress --bytes --rate --eta --cursor --force --size $(gzip --list --quiet $DUMPDIR/ietf_utf8.sql.gz | awk '{ print $2 }') \
| sed -e 's/ENGINE=MyISAM/ENGINE=InnoDB/' \
| mysql --protocol tcp -h localhost -u django --password=RkTkDPFnKpko -s -f ietf_utf8 \
&& rm $DUMPDIR/ietf_utf8.sql.gz
fi
if ! grep -q ":$GID:$" /etc/group ; then
echo "Creating group entry for GID '$GID' ..."
groupadd -g "$GID" "$USER"
fi
if ! id -u "$USER" &> /dev/null; then
echo "Creating user '$USER' ..."
useradd -s /bin/bash --groups staff,sudo --uid $UID --gid $GID $USER
echo "$USER:$USER" | chpasswd
fi
VIRTDIR="/opt/home/$USER/$TAG"
echo "Checking that there's a virtual environment for $TAG ..."
if [ ! -f $VIRTDIR/bin/activate ]; then
echo "Setting up python virtualenv at $VIRTDIR ..."
mkdir -p $VIRTDIR
python3.6 -m venv $VIRTDIR
echo -e "
# This is from $VIRTDIR/bin/activate, to activate the
# datatracker virtual python environment on docker container entry:
" >> /etc/bash.bashrc
cat $VIRTDIR/bin/activate >> /etc/bash.bashrc
cat /usr/local/share/datatracker/docker/setprompt >> /etc/bash.bashrc
else
echo "Using virtual environment at $VIRTDIR"
fi
echo "Activating the virtual python environment ..."
. $VIRTDIR/bin/activate
if [ ! -f "$WORKSPACEDIR/ietf/settings_local.py" ]; then
echo "Setting up a default settings_local.py ..."
cp $WORKSPACEDIR/.devcontainer/settings_local.py $WORKSPACEDIR/ietf/settings_local.py
fi
if [ ! -f "$WORKSPACEDIR/ietf/settings_local_debug.py" ]; then
echo "Setting up a default settings_local_debug.py ..."
cp $WORKSPACEDIR/.devcontainer/settings_local_debug.py $WORKSPACEDIR/ietf/settings_local_debug.py
fi
for sub in test/id/ test/staging/ test/archive/ test/rfc test/media test/wiki/ietf; do
dir="$WORKSPACEDIR/$sub"
if [ ! -d "$dir" ]; then
echo "Creating dir $dir"
mkdir -p "$dir";
fi
done
for sub in \
nomcom_keys/public_keys \
developers/ietf-ftp \
developers/ietf-ftp/bofreq \
developers/ietf-ftp/charter \
developers/ietf-ftp/conflict-reviews \
developers/ietf-ftp/internet-drafts \
developers/ietf-ftp/rfc \
developers/ietf-ftp/status-changes \
developers/ietf-ftp/yang/catalogmod \
developers/ietf-ftp/yang/draftmod \
developers/ietf-ftp/yang/ianamod \
developers/ietf-ftp/yang/invalmod \
developers/ietf-ftp/yang/rfcmod \
developers/www6s \
developers/www6s/staging \
developers/www6s/wg-descriptions \
developers/www6s/proceedings \
developers/www6/ \
developers/www6/iesg \
developers/www6/iesg/evaluation \
; do
dir="$WORKSPACEDIR/data/$sub"
if [ ! -d "$dir" ]; then
echo "Creating dir $dir"
mkdir -p "$dir";
chown "$USER" "$dir"
fi
done
if [ ! -f "$WORKSPACEDIR/test/data/draft-aliases" ]; then
echo "Generating draft aliases ..."
ietf/bin/generate-draft-aliases }
fi
if [ ! -f "$WORKSPACEDIR/test/data/group-aliases" ]; then
echo "Generating group aliases ..."
ietf/bin/generate-wg-aliases }
fi
chown -R $USER /opt/home/$USER
chmod -R g+w /usr/local/lib/ # so we can patch libs if needed
cd "$WORKSPACEDIR" || cd "/home/$USER/"
if ! echo "$LANG" | grep "UTF-8"; then
echo ""
echo "Make sure you export LANG=en_GB.UTF-8 (or another UTF-8 locale) in your .bashrc"
else
echo "LANG=$LANG"
fi
HOME=/opt/home/$USER
/usr/local/bin/python $WORKSPACEDIR/ietf/manage.py check --settings=settings_local
echo "Done!"
# su -p $USER
exec "$@"

View file

@ -1,65 +0,0 @@
# Copyright The IETF Trust 2007-2019, All Rights Reserved
# -*- coding: utf-8 -*-
import six
if six.PY3:
from typing import Collection, Dict, List, Tuple # pyflakes:ignore
from ietf.settings import * # pyflakes:ignore
ALLOWED_HOSTS = ['*']
SECRET_KEY = 'jzv$o93h_lzw4a0%0oz-5t5lk+ai=3f8x@uo*9ahu8w4i300o6'
DATABASES = {
'default': {
'HOST': 'db',
'PORT': 3306,
'NAME': 'ietf_utf8',
'ENGINE': 'django.db.backends.mysql',
'USER': 'django',
'PASSWORD': 'RkTkDPFnKpko',
'OPTIONS': {
'sql_mode': 'STRICT_TRANS_TABLES',
'init_command': 'SET storage_engine=InnoDB; SET names "utf8"',
},
},
} # type: Dict[str, Dict[str, Collection[str]]]
DATABASE_TEST_OPTIONS = {
'init_command': 'SET storage_engine=InnoDB',
}
IDSUBMIT_IDNITS_BINARY = "/usr/local/bin/idnits"
IDSUBMIT_REPOSITORY_PATH = "test/id/"
IDSUBMIT_STAGING_PATH = "test/staging/"
INTERNET_DRAFT_ARCHIVE_DIR = "test/archive/"
INTERNET_ALL_DRAFTS_ARCHIVE_DIR = "test/archive/"
RFC_PATH = "test/rfc/"
AGENDA_PATH = 'data/developers/www6s/proceedings/'
MEETINGHOST_LOGO_PATH = AGENDA_PATH
USING_DEBUG_EMAIL_SERVER=True
EMAIL_HOST='localhost'
EMAIL_PORT=2025
TRAC_WIKI_DIR_PATTERN = "test/wiki/%s"
TRAC_SVN_DIR_PATTERN = "test/svn/%s"
TRAC_CREATE_ADHOC_WIKIS = [
] # type: List[Tuple[str, str, str]]
MEDIA_BASE_DIR = 'test'
MEDIA_ROOT = MEDIA_BASE_DIR + '/media/'
MEDIA_URL = '/media/'
PHOTOS_DIRNAME = 'photo'
PHOTOS_DIR = MEDIA_ROOT + PHOTOS_DIRNAME
DOCUMENT_PATH_PATTERN = 'data/developers/ietf-ftp/{doc.type_id}/'
SUBMIT_YANG_CATALOG_MODEL_DIR = 'data/developers/ietf-ftp/yang/catalogmod/'
SUBMIT_YANG_DRAFT_MODEL_DIR = 'data/developers/ietf-ftp/yang/draftmod/'
SUBMIT_YANG_INVAL_MODEL_DIR = 'data/developers/ietf-ftp/yang/invalmod/'
SUBMIT_YANG_IANA_MODEL_DIR = 'data/developers/ietf-ftp/yang/ianamod/'
SUBMIT_YANG_RFC_MODEL_DIR = 'data/developers/ietf-ftp/yang/rfcmod/'

45
.vscode/settings.json vendored Normal file
View file

@ -0,0 +1,45 @@
{
"taskExplorer.exclude": [
"**/.vscode-test/**",
"**/bin/**",
"**/build/**",
"**/CompiledOutput/**",
"**/dist/**",
"**/doc/**",
"**/ext/**",
"**/out/**",
"**/output/**",
"**/packages/**",
"**/release/**",
"**/releases/**",
"**/samples/**",
"**/sdks/**",
"**/static/**",
"**/target/**",
"**/test/**",
"**/third_party/**",
"**/vendor/**",
"**/work/**",
"/root/src/bootstrap/nuget/MyGet.ps1"
],
"taskExplorer.enableAnt": false,
"taskExplorer.enableAppPublisher": false,
"taskExplorer.enablePipenv": false,
"taskExplorer.enableBash": false,
"taskExplorer.enableBatch": false,
"taskExplorer.enableGradle": false,
"taskExplorer.enableGrunt": false,
"taskExplorer.enableGulp": false,
"taskExplorer.enablePerl": false,
"taskExplorer.enableMake": false,
"taskExplorer.enableMaven": false,
"taskExplorer.enableNsis": false,
"taskExplorer.enableNpm": false,
"taskExplorer.enablePowershell": false,
"taskExplorer.enablePython": false,
"taskExplorer.enableRuby": false,
"taskExplorer.enableTsc": false,
"taskExplorer.enableWorkspace": true,
"taskExplorer.enableExplorerView": false,
"taskExplorer.enableSideBar": true
}

74
.vscode/tasks.json vendored
View file

@ -23,13 +23,34 @@
"problemMatcher": []
},
{
"label": "Run Tests",
"label": "Run All Tests",
"type": "shell",
"command": "/usr/local/bin/python",
"args": [
"${workspaceFolder}/ietf/manage.py",
"test",
"--settings=settings_sqlitetest"
"--settings=settings_local_sqlitetest"
],
"group": "test",
"presentation": {
"echo": true,
"reveal": "always",
"focus": true,
"panel": "new",
"showReuseMessage": true,
"clear": false
},
"problemMatcher": []
},
{
"label": "Run JS Tests",
"type": "shell",
"command": "/usr/local/bin/python",
"args": [
"${workspaceFolder}/ietf/manage.py",
"test",
"--settings=settings_local_sqlitetest",
"--pattern=tests_js.py",
],
"group": "test",
"presentation": {
@ -58,6 +79,55 @@
"clear": false
},
"problemMatcher": []
},
{
"label": "Run SMTP Debugging Server",
"type": "shell",
"command": "/usr/local/bin/python",
"args": ["-m", "smtpd", "-n", "-c", "DebuggingServer", "localhost:2025"],
"presentation": {
"echo": true,
"reveal": "always",
"focus": true,
"panel": "new",
"showReuseMessage": false,
"clear": false
},
"problemMatcher": []
},
{
"label": "Fetch assets via rsync",
"type": "shell",
"command": "/bin/bash",
"args": [
"${workspaceFolder}/docker/scripts/app-rsync-extras.sh"
],
"presentation": {
"echo": true,
"reveal": "always",
"focus": true,
"panel": "new",
"showReuseMessage": false,
"clear": false
},
"problemMatcher": []
},
{
"label": "Fix Windows Timezone File Linking",
"type": "shell",
"command": "/bin/bash",
"args": [
"${workspaceFolder}/docker/app-win32-timezone-fix.sh"
],
"presentation": {
"echo": true,
"reveal": "always",
"focus": true,
"panel": "new",
"showReuseMessage": false,
"clear": false
},
"problemMatcher": []
}
]
}

View file

@ -1,137 +0,0 @@
# This is a Dockerfile with everything in it to run the IETF datatracker.
#
# If you make changes to the datatracker that add new dependencies (python
# packages or otherwise), you need to rebuild this image to make them
# available. Do this in the top-level directory of your datatracker source
# tree:
#
# docker/build
#
# You can then execute the datatracker like this (also from the top-level
# datatracker source directory):
#
# docker/run
FROM python:3.6-bullseye
LABEL maintainer="IETF Tools Team <tools-discuss@ietf.org>"
# Default django runserver port
EXPOSE 8000
# Default mysqld/mariadb port
EXPOSE 3306
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get -y update && \
# apt-get upgrade is normally not a good idea, but this is a dev container
apt-get -y upgrade && \
# Install all dependencies that are available as packages
apt-get -y install --no-install-recommends \
apache2-utils \
apt-file \
apt-utils \
curl \
enscript \
gcc \
ghostscript \
git \
gnupg \
graphviz \
libmagic-dev \
libmariadb-dev \
locales \
mariadb-server \
npm \
pigz \
pv \
python-is-python3 \
rsyslog \
unzip \
yang-tools && \
# Since snap doesn't work in Docker containers, install chromedriver per
# https://gist.github.com/varyonic/dea40abcf3dd891d204ef235c6e8dd79#gistcomment-3160722
curl https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - && \
echo "deb http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list && \
apt-get update -y && \
apt-get install -y --no-install-recommends google-chrome-stable && \
CHROMEVER=$(google-chrome --product-version | grep -o "[^\.]*\.[^\.]*\.[^\.]*") && \
DRIVERVER=$(curl "https://chromedriver.storage.googleapis.com/LATEST_RELEASE_$CHROMEVER") && \
curl -L -O -C - "http://chromedriver.storage.googleapis.com/$DRIVERVER/chromedriver_linux64.zip" && \
unzip chromedriver_linux64.zip -d /bin && \
rm chromedriver_linux64.zip && \
# Install some other packages that are not dependencies but make life easier
apt-get -y install --no-install-recommends \
fish \
less \
nano \
ripgrep \
subversion \
zsh && \
# Reduce image footprint (not that it matters given the size of the above)
apt-get -y clean && \
rm -rf /var/lib/apt/lists/*
# Set locale to en_US.UTF-8
RUN echo "LC_ALL=en_US.UTF-8" >> /etc/environment && \
echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen && \
echo "LANG=en_US.UTF-8" > /etc/locale.conf
RUN dpkg-reconfigure locales && \
locale-gen en_US.UTF-8 && \
update-locale LC_ALL en_US.UTF-8
ENV LC_ALL en_US.UTF-8
# Install bower
RUN npm install -g bower
# Install idnits
ADD https://raw.githubusercontent.com/ietf-tools/idnits-mirror/main/idnits /usr/local/bin/
RUN chmod +rx /usr/local/bin/idnits
# Install current datatracker python dependencies
COPY requirements.txt /
RUN pip install -r /requirements.txt
# Turn off rsyslog kernel logging (doesn't work in Docker)
RUN sed -i '/imklog/s/^/#/' /etc/rsyslog.conf
# Allow access to mariadb over the network
RUN sed -i 's/127.0.0.1/0.0.0.0/' /etc/mysql/mariadb.conf.d/50-server.cnf
# Turn on mariadb performance_schema
RUN sed -i 's/\[mysqld\]/\[mysqld\]\nperformance_schema=ON/' /etc/mysql/mariadb.conf.d/50-server.cnf
# Make the mariadb sys schema available for possible installation
# We would normally use the next line, but that has a bug:
# ADD https://github.com/FromDual/mariadb-sys/archive/master.zip /
# This is the repo that has the PR:
ADD https://github.com/grooverdan/mariadb-sys/archive/refs/heads/master.zip /
RUN unzip /master.zip
RUN rm /master.zip
# Colorize the bash shell
RUN sed -i 's/#force_color_prompt=/force_color_prompt=/' /root/.bashrc
# Make a database dump available as part of the image, for if a user doesn't
# have one installed locally yet - this saves a bunch of time then
ADD https://www.ietf.org/lib/dt/sprint/ietf_utf8.sql.gz /
RUN pigz -v -d /ietf_utf8.sql.gz && \
sed -i -e 's/ENGINE=MyISAM/ENGINE=InnoDB/' /ietf_utf8.sql
# see https://dba.stackexchange.com/a/83385
RUN sed -i 's/\[mysqld\]/\[mysqld\]\ninnodb_buffer_pool_size = 1G\ninnodb_log_buffer_size = 128M\ninnodb_log_file_size = 256M\ninnodb_write_io_threads = 8\ninnodb_flush_log_at_trx_commit = 0/' /etc/mysql/mariadb.conf.d/50-server.cnf && \
service mariadb start --innodb-doublewrite=0 && \
echo "This sequence will take a long time, please be patient" && \
mysqladmin -u root --default-character-set=utf8 create ietf_utf8 && \
bash -c "cd /mariadb-sys-master && mysql --user root < sys_10.sql" && \
bash -c "mysql --user root ietf_utf8 <<< \"GRANT ALL PRIVILEGES ON *.* TO django@localhost IDENTIFIED BY 'RkTkDPFnKpko'; FLUSH PRIVILEGES;\"" && \
bash -c "mysql --user=django --password=RkTkDPFnKpko -f ietf_utf8 < /ietf_utf8.sql" && \
service mariadb stop && \
sed -i 's/^innodb_.*//g' /etc/mysql/mariadb.conf.d/50-server.cnf && \
rm -rf /ietf_utf8.sql /mariadb-sys-master && \
mv /var/lib/mysql /
# Copy the startup file
COPY docker-init.sh /docker-init.sh
RUN chmod +x /docker-init.sh
WORKDIR /root/src
ENTRYPOINT ["/docker-init.sh"]

View file

@ -2,8 +2,9 @@
## Getting started
1. [Set up Docker](https://docs.docker.com/get-started/) on your preferred
platform.
1. [Set up Docker](https://docs.docker.com/get-started/) on your preferred platform. On Windows, it is highly recommended to use the [WSL 2 *(Windows Subsystem for Linux)*](https://docs.docker.com/desktop/windows/wsl/) backend.
2. On Linux, you must also install [Docker Compose](https://docs.docker.com/compose/install/). Docker Desktop for Mac and Windows already include Docker Compose.
2. If you have a copy of the datatracker code checked out already, simply `cd`
to the top-level directory.
@ -11,41 +12,149 @@
If not, check out a datatracker branch as usual. We'll check out `trunk`
below, but you can use any branch:
```sh
svn co https://svn.ietf.org/svn/tools/ietfdb/trunk
cd trunk
```
4. **TEMPORARY:** Until a pre-built docker image is available for download, you will need
to build it locally:
3. Follow the instructions for your preferred editor:
- [Visual Studio Code](#using-visual-studio-code)
- [Other Editors / Generic](#using-other-editors--generic)
docker/build
## Using Visual Studio Code
This will take a while (15 to 30m), but only needs to be done once.
This project includes a devcontainer configuration which automates the setup of the development environment with all the required dependencies.
5. Use the `docker/run` script to start the datatracker container. You will be
dropped into a shell from which you can start the datatracker and execute
related commands as usual, for example
### Initial Setup
ietf/manage.py check; ietf/manage.py runserver 0.0.0.0:8000
1. Launch [VS Code](https://code.visualstudio.com/)
2. Under the **Extensions** tab, install the **Remote - Containers** ([ms-vscode-remote.remote-containers](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers)) extension installed. On Windows, you also need the **Remote - WSL** ([ms-vscode-remote.remote-wsl](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-wsl)) extension to take advantage of the WSL 2 *(Windows Subsystem for Linux)* native integration.
2. Open the top-level directory of the datatracker code you fetched above.
3. A prompt inviting you to reopen the project in containers will appear in the bottom-right corner. Click the **Reopen in Container** button. If you missed the prompt, you can press `F1`, start typing `reopen in container` task and launch it.
4. VS Code will relaunch in the dev environment and create the containers automatically.
5. You may get several warnings prompting you to reload the window as extensions get installed for the first time. Wait for the initialization script to complete before doing so. *(Wait for the message `Done!` to appear in the terminal panel.)*
### Subsequent Launch
To return to your dev environment created above, simply open **VS Code** and select **File** > **Open Recent** and select the datatracker folder with the `[Dev Container]` suffix.
You can also open the datatracker project folder and click the **Reopen in container** button when prompted. f you missed the prompt, you can press `F1`, start typing `reopen in container` task and launch it.
### Usage
- Under the **Run and Debug** tab, you can run the server with the debugger attached using **Run Server** (F5). Once the server is ready to accept connections, you'll be prompted to open in a browser. You can also open [http://localhost:8000](http://localhost:8000) in a browser.
> An alternate profile **Run Server with Debug Toolbar** is also available from the dropdown menu, which displays various tools
on top of the webpage. However, note that this configuration has a significant performance impact.
To add a **Breakpoint**, simply click to the left of the line gutter you wish to stop at. You can also add **Conditional Breakpoints** and **Logpoint** by right-clicking at the same location.
![](assets/vscode-debug-breakpoint.png)
While running in debug mode (`F5`), the following toolbar is shown at the top of the editor:
![](assets/vscode-debug-toolbar.png)
See this [tutorial](https://code.visualstudio.com/docs/python/tutorial-django#_explore-the-debugger) on how to use the debugging tools for Django in VS Code.
- An integrated terminal is available with various shell options *(zsh, bash, fish, etc.)*. Use the **New Terminal** button located at the right side of the Terminal panel. You can have as many as needed running in parallel and you can use split to display multiple at once.
![](assets/vscode-terminal-new.png)
- Under the **SQL Tools** tab, a connection **Local Dev** is preconfigured to connect to the DB container. Using this tool, you can list tables, view records and execute SQL queries directly from VS Code.
> The port `3306` is also exposed to the host automatically, should you prefer to use your own SQL tool.
![](assets/vscode-sqltools.png)
- Under the **Task Explorer** tab, a list of available preconfigured tasks is displayed. *(You may need to expand the tree to `src > vscode` to see it.)* These are common scritps you can run *(e.g. run tests, fetch assets, etc.)*.
![](assets/vscode-tasks.png)
- From the command palette (`F1`), the command **Run Test Task** allows you to choose between running all tests or just the javascript tests.
- The **Ports** panel, found in the Terminal area, shows the ports currently mapped to your host and if they are currently listening.
![](assets/vscode-ports.png)
## Using Other Editors / Generic
1. From the terminal, in the top-level directory of the datatracker project:
On Linux / macOS:
```sh
cd docker
run
```
On Windows:
```sh
cd docker
docker-compose -f docker-compose.yml -f docker-compose.extend.yml up -d
docker-compose exec app /bin/sh /docker-init.sh
```
2. Wait for the containers to initialize. Upon completion, you will be dropped into a shell from which you can start the datatracker and execute related commands as usual, for example
```
ietf/manage.py runserver 0.0.0.0:8000
```
to start the datatracker.
You can also pass additional arguments to `docker/run`, in which case they
will be executed in the container (instead of a shell being started.)
Once the datatracker has started, you should be able to open [http://localhost:8000](http://localhost:8000) in a browser and see the landing page.
If you do not already have a copy of the IETF database available in the
`data` directory, one will be downloaded and imported the first time you run
`docker/run`. This will take some time.
Note that unlike the VS Code setup, a debug SMTP server is launched automatically. Any email will be discarded and logged to the shell.
Once the datatracker has started, you should be able to open
[http://localhost:8000](http://localhost:8000) in a browser and see the
landing page.
### Exit Environment
## Troubleshooting
To exit the dev environment, simply enter command `exit` in the shell.
- If the database fails to start, the cause is usually an incompatibility
between the database that last touched the files in `data/mysql` and the
database running inside the docker container.
The containers will automatically be shut down on Linux / macOS.
The solution is to blow away your existing database (`rm -rf data/mysql`). A
fresh copy will be retrieved and imported next time you do `docker/run`, which
should resolve this issue.
On Windows, type the command (from the `docker/` directory)
```sh
docker-compose down
```
to terminate the containers.
### Clean / Rebuild / Fetch Latest DB Image
To delete the active DB container, its volume and get the latest image / DB dump, simply run the following command:
On Linux / macOS:
```sh
cd docker
cleandb
```
On Windows:
```sh
cd docker
docker-compose down -v
docker-compose pull db
docker-compose build --no-cache db
```
### Accessing MariaDB Port
The port is exposed but not mapped to `3306` to avoid potential conflicts with the host. To get the mapped port, run the command *(from the project `/docker` directory)*:
```sh
docker-compose port db 3306
```
## Notes / Troubleshooting
### Windows .ics files incorrectly linked
When checking out the project on Windows, the `.ics` files are not correctly linked and will cause many tests to fail. To fix this issue, run the **Fix Windows Timezone File Linking** task in VS Code or run manually the script `docker/scripts/app-win32-timezone-fix.sh`
The content of the source files will be copied into the target `.ics` files. Make sure not to add these modified files when committing code!
### Missing assets in the data folder
Because including all assets in the image would significantly increase the file size, they are not included by default. You can however fetch them by running the **Fetch assets via rsync** task in VS Code or run manually the script `docker/scripts/app-rsync-extras.sh`

56
docker/_old/README.md Normal file
View file

@ -0,0 +1,56 @@
# Datatracker Development in Docker
## Getting started
1. [Set up Docker](https://docs.docker.com/get-started/) on your preferred
platform.
2. If you have a copy of the datatracker code checked out already, simply `cd`
to the top-level directory.
If not, check out a datatracker branch as usual. We'll check out `trunk`
below, but you can use any branch:
svn co https://svn.ietf.org/svn/tools/ietfdb/trunk
cd trunk
3. **TEMPORARY:** Replace the contents of the `docker` directory with [Lars'
files](https://svn.ietf.org/svn/tools/ietfdb/personal/lars/7.39.1.dev0/docker/).
4. **TEMPORARY:** Until [Lars'
changes](https://svn.ietf.org/svn/tools/ietfdb/personal/lars/7.39.1.dev0/docker/)
have been merged and a docker image is available for download, you will need
to build it locally:
docker/build
This will take a while, but only needs to be done once.
5. Use the `docker/run` script to start the datatracker container. You will be
dropped into a shell from which you can start the datatracker and execute
related commands as usual, for example
ietf/manage.py runserver 0.0.0.0:8000
to start the datatracker.
You can also pass additional arguments to `docker/run`, in which case they
will be executed in the container (instead of a shell being started.)
If you do not already have a copy of the IETF database available in the
`data` directory, one will be downloaded and imported the first time you run
`docker/run`. This will take some time.
Once the datatracker has started, you should be able to open
[http://localhost:8000](http://localhost:8000) in a browser and see the
landing page.
## Troubleshooting
- If the database fails to start, the cause is usually an incompatibility
between the database that last touched the files in `data/mysql` and the
database running inside the docker container.
The solution is to blow away your existing database (`rm -rf data/mysql`). A
fresh copy will be retrieved and imported next time you do `docker/run`, which
should resolve this issue.

110
docker/_old/run Normal file
View file

@ -0,0 +1,110 @@
#!/bin/bash
version=0.20
program=${0##*/}
progdir=${0%/*}
if [ "$progdir" = "$program" ]; then progdir="."; fi
if [ "$progdir" = "." ]; then progdir="$PWD"; fi
parent=$(dirname "$progdir")
if [ "$parent" = "." ]; then parent="$PWD"; fi
if [[ $(uname) =~ CYGWIN.* ]]; then parent=$(echo "$parent" | sed -e 's/^\/cygdrive\/\(.\)/\1:/'); fi
function usage() {
cat <<EOF
NAME
$program - Run a docker datatracker container with suitable settings
SYNOPSIS
$program [OPTIONS] ARGS
DESCRIPTION
This is a wrapper which runs an Ubuntu-based docker image which
has been set up with the dependencies needed to easily run the
IETF datatracker in development mode.
MySQL database files at data/mysql will be used; if they do not exist,
a database dump will be retrieved and restored on first run.
OPTIONS
EOF
grep -E '^\s+-[a-zA-Z])' "$0" | sed -E -e 's/\)[^#]+#/ /'
cat <<EOF
AUTHOR
Written by:
Henrik Levkowetz, <henrik@levkowetz.com>
Lars Eggert, <lars@eggert.org>
COPYRIGHT
Copyright (c) 2016 IETF Trust and the persons identified as authors of
the code. All rights reserved. Redistribution and use in source and
binary forms, with or without modification, is permitted pursuant to,
and subject to the license terms contained in, the Revised BSD
License set forth in Section 4.c of the IETF Trusts Legal Provisions
Relating to IETF Documents(https://trustee.ietf.org/license-info).
EOF
}
function die() {
echo -e "\n$program: error: $*" >&2
exit 1
}
function version() {
echo -e "$program $version"
}
trap 'echo "$program($LINENO): Command failed with error code $? ([$$] $0 $*)"; exit 1' ERR
# Default values
MYSQLDIR=$parent/data/mysql
PORT=8000
REPO="ietf/datatracker-environment"
CACHED=':cached'
# Option parsing
shortopts=cChp:V
args=$(getopt -o$shortopts $*)
if [ $? != 0 ] ; then die "Terminating..." >&2 ; exit 1 ; fi
set -- $args
while true ; do
case "$1" in
-c) CACHED=':cached';; # Use cached disk access to reduce system load
-C) CACHED=':consistent';; # Use fully synchronized disk access
-h) usage; exit;; # Show this help, then exit
-p) PORT=$2; shift;; # Bind the container's port 8000 to external port PORT
-V) version; exit;; # Show program version, then exit
--) shift; break;;
*) die "Internal error, inconsistent option specification: '$1'";;
esac
shift
done
if [ -z "$TAG" ]; then
TAG=$(basename "$(svn info "$parent" | grep ^URL | awk '{print $2}' | tr -d '\r')")
fi
if [[ $(uname) =~ CYGWIN.* ]]; then
echo "Running under Cygwin, replacing symlinks with file copies"
ICSFILES=$(/usr/bin/find "$parent/vzic/zoneinfo/" -name '*.ics' -print)
for ICSFILE in $ICSFILES; do
LINK=$(head -n1 "$ICSFILE" | sed -e '/link .*/!d' -e 's/link \(.*\)/\1/')
if [ "$LINK" ]; then
WDIR=$(dirname "$ICSFILE")
echo "Replacing $(basename "$ICSFILE") with $LINK"
cp -f "$WDIR/$LINK" "$ICSFILE"
fi
done
fi
echo "Starting a docker container for '$REPO:$TAG'."
mkdir -p "$MYSQLDIR"
docker run -ti -p "$PORT":8000 -p 33306:3306 \
-v "$parent:/root/src$CACHED" \
-v "$MYSQLDIR:/var/lib/mysql:delegated" \
"$REPO:$TAG" "$@"

View file

@ -3,71 +3,58 @@
# [Choice] Python version (use -bullseye variants on local arm64/Apple Silicon): 3, 3.10, 3.9, 3.8, 3.7, 3.6, 3-bullseye, 3.10-bullseye, 3.9-bullseye, 3.8-bullseye, 3.7-bullseye, 3.6-bullseye, 3-buster, 3.10-buster, 3.9-buster, 3.8-buster, 3.7-buster, 3.6-buster
ARG VARIANT="3.10-bullseye"
FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT}
LABEL maintainer="IETF Tools Team <tools-discuss@ietf.org>"
# [Choice] Node.js version: none, lts/*, 16, 14, 12, 10
ARG NODE_VERSION="none"
RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi
EXPOSE 8000
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update
# apt-get upgrade is normally not a good idea, but this is a dev container
RUN apt-get -qy upgrade
# Install the packages we need
RUN apt-get install -qy \
apache2-utils \
apt-file \
apt-utils \
bash \
build-essential \
bzip2 \
ca-certificates \
colordiff \
curl \
enscript \
fish \
gawk \
gcc \
python3-ipython \
git \
gnupg \
graphviz \
jq \
less \
libbz2-dev \
libdb5.3-dev \
libexpat1-dev \
libffi-dev \
libgdbm-dev \
libjpeg62-turbo-dev \
liblzma-dev \
libmagic1 \
libmariadb-dev-compat \
libmagic-dev \
libmariadb-dev \
libncurses5-dev \
libncursesw5-dev \
libreadline-dev \
libsqlite3-dev \
libssl-dev \
libsvn1 \
libxml2-dev \
libxslt-dev \
libyang1 \
libz-dev \
libffi-dev \
locales \
make \
man \
mariadb-client \
netcat \
openssh-client \
patch \
procps \
nano \
pigz \
pv \
python3-ipython \
ripgrep \
rsync \
rsyslog \
subversion \
sudo \
uuid-dev \
vim \
unzip \
wget \
xz-utils\
zile \
zlib1g-dev
yang-tools && \
zsh
# Install chromedriver
RUN apt-get update && \
apt-get install -y gnupg wget curl unzip --no-install-recommends && \
wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - && \
RUN wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - && \
echo "deb http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list && \
apt-get update -y && \
apt-get install -y google-chrome-stable && \
@ -81,37 +68,37 @@ RUN apt-get update && \
# Get rid of installation files we don't need in the image, to reduce size
RUN apt-get clean && rm -rf /var/lib/apt/lists/*
# Enable some common locales
RUN sed -i -e 's/^. en_US/en_US/' -e 's/^. en_GB/en_GB/' -e 's/^. en_IE/en_IE/' /etc/locale.gen && \
locale-gen
# Set locale to en_US.UTF-8
RUN echo "LC_ALL=en_US.UTF-8" >> /etc/environment && \
echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen && \
echo "LANG=en_US.UTF-8" > /etc/locale.conf && \
dpkg-reconfigure locales && \
locale-gen en_US.UTF-8 && \
update-locale LC_ALL en_US.UTF-8
ENV LC_ALL en_US.UTF-8
# Remove an rsyslog module that we don't need, which also requires extra permissions
RUN sed -i -e '/load="imklog"/d' /etc/rsyslog.conf
# Install bower
RUN npm install -g bower
# Set up root password
RUN echo "root:root" | chpasswd
# idnits and dependencies
ADD https://tools.ietf.org/tools/idnits/idnits /usr/local/bin/
# Install idnits
ADD https://raw.githubusercontent.com/ietf-tools/idnits-mirror/main/idnits /usr/local/bin/
RUN chmod +rx /usr/local/bin/idnits
# Directory for Mac certs
RUN mkdir /etc/certificates
# Setup workspace
ENV HOSTNAME="datatracker"
ENV DDIR="/usr/local/share/datatracker"
RUN mkdir -p $DDIR
WORKDIR $DDIR
# Install current datatracker python dependencies
COPY requirements.txt /tmp/pip-tmp/
RUN pip3 --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.txt \
&& rm -rf /tmp/pip-tmp
COPY .devcontainer/init.sh /docker-init.sh
# Turn off rsyslog kernel logging (doesn't work in Docker)
RUN sed -i '/imklog/s/^/#/' /etc/rsyslog.conf
# Colorize the bash shell
RUN sed -i 's/#force_color_prompt=/force_color_prompt=/' /root/.bashrc
# Copy the startup file
COPY docker/scripts/app-init.sh /docker-init.sh
RUN sed -i 's/\r$//' /docker-init.sh && \
chmod +x /docker-init.sh
# ENTRYPOINT ["/usr/local/share/datatracker/.devcontainer/init.sh"]
CMD ["sleep", "infinity"]
WORKDIR /root/src
# ENTRYPOINT [ "/docker-init.sh" ]

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 69 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

8
docker/cleandb Normal file
View file

@ -0,0 +1,8 @@
#!/bin/bash
echo "Shutting down any instance still running..."
docker-compose down -v
echo "Rebuilding the DB image..."
docker-compose pull db
docker-compose build --no-cache db
echo "Done!"

View file

@ -1,14 +1,14 @@
# Copyright The IETF Trust 2007-2019, All Rights Reserved
# -*- coding: utf-8 -*-
import six
if six.PY3:
from typing import Collection, Dict, List, Tuple # pyflakes:ignore
from ietf.settings import * # pyflakes:ignore
SECRET_KEY = 'jzv$o93h_lzw4a0%0oz-5t5lk+ai=3f8x@uo*9ahu8w4i300o6'
ALLOWED_HOSTS = ['*']
DATABASES = {
'default': {
'HOST': 'db',
'PORT': 3306,
'NAME': 'ietf_utf8',
'ENGINE': 'django.db.backends.mysql',
'USER': 'django',
@ -18,7 +18,7 @@ DATABASES = {
'init_command': 'SET storage_engine=InnoDB; SET names "utf8"',
},
},
} # type: Dict[str, Dict[str, Collection[str]]]
}
DATABASE_TEST_OPTIONS = {
'init_command': 'SET storage_engine=InnoDB',
@ -57,5 +57,3 @@ SUBMIT_YANG_DRAFT_MODEL_DIR = 'data/developers/ietf-ftp/yang/draftmod/'
SUBMIT_YANG_INVAL_MODEL_DIR = 'data/developers/ietf-ftp/yang/invalmod/'
SUBMIT_YANG_IANA_MODEL_DIR = 'data/developers/ietf-ftp/yang/ianamod/'
SUBMIT_YANG_RFC_MODEL_DIR = 'data/developers/ietf-ftp/yang/rfcmod/'

View file

@ -0,0 +1,84 @@
# Copyright The IETF Trust 2010-2020, All Rights Reserved
# -*- coding: utf-8 -*-
# Standard settings except we use SQLite and skip migrations, this is
# useful for speeding up tests that depend on the test database, try
# for instance:
#
# ./manage.py test --settings=settings_sqlitetest doc.ChangeStateTestCase
#
import os
from ietf.settings import * # pyflakes:ignore
from ietf.settings import TEST_CODE_COVERAGE_CHECKER, BASE_DIR, PHOTOS_DIRNAME
import debug # pyflakes:ignore
debug.debug = True
# Workaround to avoid spending minutes stepping through the migrations in
# every test run. The result of this is to use the 'syncdb' way of creating
# the test database instead of doing it through the migrations. Taken from
# https://gist.github.com/NotSqrt/5f3c76cd15e40ef62d09
class DisableMigrations(object):
def __contains__(self, item):
return True
def __getitem__(self, item):
return None
MIGRATION_MODULES = DisableMigrations()
DATABASES = {
'default': {
'NAME': 'test.db',
'ENGINE': 'django.db.backends.sqlite3',
},
}
if TEST_CODE_COVERAGE_CHECKER and not TEST_CODE_COVERAGE_CHECKER._started: # pyflakes:ignore
TEST_CODE_COVERAGE_CHECKER.start() # pyflakes:ignore
NOMCOM_PUBLIC_KEYS_DIR=os.path.abspath("tmp-nomcom-public-keys-dir")
# Undo any developer-dependent middleware when running the tests
MIDDLEWARE = [ c for c in MIDDLEWARE if not c in DEV_MIDDLEWARE ] # pyflakes:ignore
TEMPLATES[0]['OPTIONS']['context_processors'] = [ p for p in TEMPLATES[0]['OPTIONS']['context_processors'] if not p in DEV_TEMPLATE_CONTEXT_PROCESSORS ] # pyflakes:ignore
REQUEST_PROFILE_STORE_ANONYMOUS_SESSIONS = False
IDSUBMIT_IDNITS_BINARY = "/usr/local/bin/idnits"
IDSUBMIT_REPOSITORY_PATH = "test/id/"
IDSUBMIT_STAGING_PATH = "test/staging/"
INTERNET_DRAFT_ARCHIVE_DIR = "test/archive/"
INTERNET_ALL_DRAFTS_ARCHIVE_DIR = "test/archive/"
RFC_PATH = "test/rfc/"
AGENDA_PATH = 'data/developers/www6s/proceedings/'
MEETINGHOST_LOGO_PATH = AGENDA_PATH
USING_DEBUG_EMAIL_SERVER=True
EMAIL_HOST='localhost'
EMAIL_PORT=2025
TRAC_WIKI_DIR_PATTERN = "test/wiki/%s"
TRAC_SVN_DIR_PATTERN = "test/svn/%s"
TRAC_CREATE_ADHOC_WIKIS = [
] # type: List[Tuple[str, str, str]]
MEDIA_BASE_DIR = 'test'
MEDIA_ROOT = MEDIA_BASE_DIR + '/media/'
MEDIA_URL = '/media/'
PHOTOS_DIRNAME = 'photo'
PHOTOS_DIR = MEDIA_ROOT + PHOTOS_DIRNAME
DOCUMENT_PATH_PATTERN = 'data/developers/ietf-ftp/{doc.type_id}/'
SUBMIT_YANG_CATALOG_MODEL_DIR = 'data/developers/ietf-ftp/yang/catalogmod/'
SUBMIT_YANG_DRAFT_MODEL_DIR = 'data/developers/ietf-ftp/yang/draftmod/'
SUBMIT_YANG_INVAL_MODEL_DIR = 'data/developers/ietf-ftp/yang/invalmod/'
SUBMIT_YANG_IANA_MODEL_DIR = 'data/developers/ietf-ftp/yang/ianamod/'
SUBMIT_YANG_RFC_MODEL_DIR = 'data/developers/ietf-ftp/yang/rfcmod/'

56
docker/db.Dockerfile Normal file
View file

@ -0,0 +1,56 @@
# ====================
# --- Import Stage ---
# ====================
FROM ubuntu:hirsute AS importStage
# Install dependencies for import
RUN DEBIAN_FRONTEND=noninteractive apt-get -y update && \
apt-get -y install --no-install-recommends \
locales \
mariadb-server \
pigz \
unzip && \
apt-get clean && rm -rf /var/lib/apt/lists/*
# Set locale to en_US.UTF-8
RUN echo "LC_ALL=en_US.UTF-8" >> /etc/environment && \
echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen && \
echo "LANG=en_US.UTF-8" > /etc/locale.conf && \
dpkg-reconfigure locales && \
locale-gen en_US.UTF-8 && \
update-locale LC_ALL en_US.UTF-8
ENV LC_ALL en_US.UTF-8
# Turn on mariadb performance_schema
RUN sed -i 's/\[mysqld\]/\[mysqld\]\nperformance_schema=ON/' /etc/mysql/mariadb.conf.d/50-server.cnf
# Make the mariadb sys schema available for possible installation
# We would normally use the next line, but that has a bug:
# ADD https://github.com/FromDual/mariadb-sys/archive/master.zip /
# This is the repo that has the PR:
ADD https://github.com/grooverdan/mariadb-sys/archive/refs/heads/master.zip /
RUN unzip /master.zip
RUN rm /master.zip
# Import the latest database dump
ADD https://www.ietf.org/lib/dt/sprint/ietf_utf8.sql.gz /
RUN pigz -v -d /ietf_utf8.sql.gz && \
sed -i -e 's/ENGINE=MyISAM/ENGINE=InnoDB/' /ietf_utf8.sql
# see https://dba.stackexchange.com/a/83385
RUN sed -i 's/\[mysqld\]/\[mysqld\]\ninnodb_buffer_pool_size = 1G\ninnodb_log_buffer_size = 128M\ninnodb_log_file_size = 256M\ninnodb_write_io_threads = 8\ninnodb_flush_log_at_trx_commit = 0/' /etc/mysql/mariadb.conf.d/50-server.cnf && \
service mariadb start --innodb-doublewrite=0 && \
echo "This sequence will take a long time, please be patient" && \
mysqladmin -u root --default-character-set=utf8 create ietf_utf8 && \
bash -c "cd /mariadb-sys-master && mysql --user root < sys_10.sql" && \
bash -c "mysql --user root ietf_utf8 <<< \"GRANT ALL PRIVILEGES ON *.* TO 'django'@'%' IDENTIFIED BY 'RkTkDPFnKpko'; FLUSH PRIVILEGES;\"" && \
bash -c "mysql --user=django --password=RkTkDPFnKpko -f ietf_utf8 < /ietf_utf8.sql" && \
service mariadb stop
# ===================
# --- Final Image ---
# ===================
FROM mariadb:10
LABEL maintainer="IETF Tools Team <tools-discuss@ietf.org>"
# Copy the mysql data folder from the import stage
COPY --from=importStage /var/lib/mysql /var/lib/mysql

View file

@ -0,0 +1,9 @@
version: '3.8'
services:
app:
ports:
- '8000:8000'
db:
ports:
- '3306'

View file

@ -4,31 +4,32 @@ services:
app:
build:
context: ..
dockerfile: .devcontainer/Dockerfile
dockerfile: docker/app.Dockerfile
args:
# Update 'VARIANT' to pick a version of Python: 3, 3.10, 3.9, 3.8, 3.7, 3.6
# Append -bullseye or -buster to pin to an OS version.
# Use -bullseye variants on local arm64/Apple Silicon.
VARIANT: 3.6-bullseye
NODE_VERSION: '16'
volumes:
- ..:/usr/local/share/datatracker:cached
- ..:/root/src:cached
init: true
# Overrides default command so things don't shut down after the process ends.
command: sleep infinity
# Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function.
network_mode: service:db
# network_mode: service:db
depends_on:
- db
environment:
USER: django
UID: 1001
GID: 1001
DATADIR: data
# environment:
# USER: django
# UID: 1001
# GID: 1001
# DATADIR: data
# DJANGO_SETTINGS_MODULE: settings_sqlitetest
# Uncomment the next line to use a non-root user for all processes.
@ -38,7 +39,9 @@ services:
# (Adding the "ports" property to this file will not forward from a Codespace.)
db:
image: mariadb:10
build:
context: ..
dockerfile: docker/db.Dockerfile
restart: unless-stopped
volumes:
- mariadb-data:/var/lib/mysql
@ -47,8 +50,15 @@ services:
MYSQL_DATABASE: ietf_utf8
MYSQL_USER: django
MYSQL_PASSWORD: RkTkDPFnKpko
command: ['--character-set-server=utf8mb4', '--collation-server=utf8mb4_unicode_ci']
command:
- '--character-set-server=utf8'
- '--collation-server=utf8_unicode_ci'
- '--innodb-buffer-pool-size=1G'
- '--innodb-log-buffer-size=128M'
- '--innodb-log-file-size=256M'
- '--innodb-write-io-threads=8'
- '--innodb-flush-log-at-trx-commit=0'
- '--performance-schema=1'
# Add "forwardPorts": ["5432"] to **devcontainer.json** to forward PostgreSQL locally.
# (Adding the "ports" property to this file will not forward from a Codespace.)

View file

@ -1,91 +0,0 @@
#!/bin/bash
MYSQLDIR=/var/lib/mysql
if [ ! -d "$MYSQLDIR" ]; then
echo "WARNING: Expected the directory $MYSQLDIR to exist."
exit 1
fi
service rsyslog start
if [ -z "$(ls -A $MYSQLDIR/mysql 2>/dev/null)" ]; then
can=$(date -r /mysql +%s)
now=$(date +%s)
age=$((($now - $can)/86400))
echo "NOTE: Database empty; populating it from canned snapshot ($age days old)"
echo " This will take a little while..."
cp -r /mysql/* $MYSQLDIR
fi
service mariadb start
if ! service mariadb status; then
echo "ERROR: MySQL didn't start. Here are some possible causes:"
echo "-------------------------------------------------------------------"
grep mysqld /var/log/syslog
echo "-------------------------------------------------------------------"
echo "Such errors are usually due to a corrupt or outdated database."
echo "Remove your local database and let the image install a clean copy."
exit 1
fi
if [ ! -f /root/src/ietf/settings_local.py ]; then
echo "Setting up a default settings_local.py ..."
cp /root/src/docker/settings_local.py /root/src/ietf/settings_local.py
fi
for sub in \
test/id \
test/staging \
test/archive \
test/rfc \
test/media \
test/wiki/ietf \
data/nomcom_keys/public_keys \
data/developers/ietf-ftp \
data/developers/ietf-ftp/bofreq \
data/developers/ietf-ftp/charter \
data/developers/ietf-ftp/conflict-reviews \
data/developers/ietf-ftp/internet-drafts \
data/developers/ietf-ftp/rfc \
data/developers/ietf-ftp/status-changes \
data/developers/ietf-ftp/yang/catalogmod \
data/developers/ietf-ftp/yang/draftmod \
data/developers/ietf-ftp/yang/ianamod \
data/developers/ietf-ftp/yang/invalmod \
data/developers/ietf-ftp/yang/rfcmod \
data/developers/www6s \
data/developers/www6s/staging \
data/developers/www6s/wg-descriptions \
data/developers/www6s/proceedings \
data/developers/www6/ \
data/developers/www6/iesg \
data/developers/www6/iesg/evaluation \
; do
dir="/root/src/$sub"
if [ ! -d "$dir" ]; then
echo "Creating dir $dir"
mkdir -p "$dir";
fi
done
python -m smtpd -n -c DebuggingServer localhost:2025 &
echo
if [ -z "$*" ]; then
echo "You can execute arbitrary commands now, e.g.,"
echo
echo " ietf/manage.py check && ietf/manage.py runserver 0.0.0.0:8000"
echo
echo "to start a development instance of the Datatracker."
echo
bash
else
echo "Executing \"$*\" and stopping container."
echo
bash -c "$*"
fi
service mariadb stop
service rsyslog stop

18
docker/build → docker/misc/build Executable file → Normal file
View file

@ -13,7 +13,7 @@ if [[ $(uname) =~ CYGWIN.* ]]; then parent=$(echo "$parent" | sed -e 's/^\/cygdr
function usage() {
cat <<EOF
NAME
$program - Build a datatracker docker image
$program - Build the datatracker docker images
SYNOPSIS
$program [OPTIONS]
@ -21,8 +21,8 @@ SYNOPSIS
DESCRIPTION
This script builds a Ubuntu-based docker image that has been
set up with the dependencies needed to easily run the IETF
datatracker in development mode. It uses docker/Dockerfile;
i.e., the Dockerfile in the same directory as this script.
datatracker in development mode. It uses docker/app.Dockerfile and
db.Dockerfile; i.e., the Dockerfiles in the same directory as this script.
OPTIONS
EOF
@ -84,9 +84,13 @@ done
# The program itself
docker rmi -f $IMAGE:trunk 2>/dev/null || true
docker build --progress plain -t "$IMAGE:$TAG" docker/
docker tag "$(docker images -q $IMAGE | head -n 1)" $IMAGE:latest
docker build --progress plain -t "$IMAGE-app:$TAG" -f docker/app.Dockerfile .
docker build --progress plain -t "$IMAGE-db:$TAG" -f docker/db.Dockerfile .
docker tag "$(docker images -q $IMAGE-app | head -n 1)" $IMAGE-app:latest
docker tag "$(docker images -q $IMAGE-db | head -n 1)" $IMAGE-db:latest
if [ -z "$LOCAL" ]; then
docker push $IMAGE:latest
docker push "$IMAGE:$TAG"
docker push $IMAGE-app:latest
docker push "$IMAGE-app:$TAG"
docker push $IMAGE-db:latest
docker push "$IMAGE-db:$TAG"
fi

0
docker/copydb → docker/misc/copydb Executable file → Normal file
View file

View file

@ -1,110 +1,5 @@
#!/bin/bash
version=0.20
program=${0##*/}
progdir=${0%/*}
if [ "$progdir" = "$program" ]; then progdir="."; fi
if [ "$progdir" = "." ]; then progdir="$PWD"; fi
parent=$(dirname "$progdir")
if [ "$parent" = "." ]; then parent="$PWD"; fi
if [[ $(uname) =~ CYGWIN.* ]]; then parent=$(echo "$parent" | sed -e 's/^\/cygdrive\/\(.\)/\1:/'); fi
function usage() {
cat <<EOF
NAME
$program - Run a docker datatracker container with suitable settings
SYNOPSIS
$program [OPTIONS] ARGS
DESCRIPTION
This is a wrapper which runs an Ubuntu-based docker image which
has been set up with the dependencies needed to easily run the
IETF datatracker in development mode.
MySQL database files at data/mysql will be used; if they do not exist,
a database dump will be retrieved and restored on first run.
OPTIONS
EOF
grep -E '^\s+-[a-zA-Z])' "$0" | sed -E -e 's/\)[^#]+#/ /'
cat <<EOF
AUTHOR
Written by:
Henrik Levkowetz, <henrik@levkowetz.com>
Lars Eggert, <lars@eggert.org>
COPYRIGHT
Copyright (c) 2016 IETF Trust and the persons identified as authors of
the code. All rights reserved. Redistribution and use in source and
binary forms, with or without modification, is permitted pursuant to,
and subject to the license terms contained in, the Revised BSD
License set forth in Section 4.c of the IETF Trusts Legal Provisions
Relating to IETF Documents(https://trustee.ietf.org/license-info).
EOF
}
function die() {
echo -e "\n$program: error: $*" >&2
exit 1
}
function version() {
echo -e "$program $version"
}
trap 'echo "$program($LINENO): Command failed with error code $? ([$$] $0 $*)"; exit 1' ERR
# Default values
MYSQLDIR=$parent/data/mysql
PORT=8000
REPO="ietf/datatracker-environment"
CACHED=':cached'
# Option parsing
shortopts=cChp:V
args=$(getopt -o$shortopts $*)
if [ $? != 0 ] ; then die "Terminating..." >&2 ; exit 1 ; fi
set -- $args
while true ; do
case "$1" in
-c) CACHED=':cached';; # Use cached disk access to reduce system load
-C) CACHED=':consistent';; # Use fully synchronized disk access
-h) usage; exit;; # Show this help, then exit
-p) PORT=$2; shift;; # Bind the container's port 8000 to external port PORT
-V) version; exit;; # Show program version, then exit
--) shift; break;;
*) die "Internal error, inconsistent option specification: '$1'";;
esac
shift
done
if [ -z "$TAG" ]; then
TAG=$(basename "$(svn info "$parent" | grep ^URL | awk '{print $2}' | tr -d '\r')")
fi
if [[ $(uname) =~ CYGWIN.* ]]; then
echo "Running under Cygwin, replacing symlinks with file copies"
ICSFILES=$(/usr/bin/find "$parent/vzic/zoneinfo/" -name '*.ics' -print)
for ICSFILE in $ICSFILES; do
LINK=$(head -n1 "$ICSFILE" | sed -e '/link .*/!d' -e 's/link \(.*\)/\1/')
if [ "$LINK" ]; then
WDIR=$(dirname "$ICSFILE")
echo "Replacing $(basename "$ICSFILE") with $LINK"
cp -f "$WDIR/$LINK" "$ICSFILE"
fi
done
fi
echo "Starting a docker container for '$REPO:$TAG'."
mkdir -p "$MYSQLDIR"
docker run -ti -p "$PORT":8000 -p 33306:3306 \
-v "$parent:/root/src$CACHED" \
-v "$MYSQLDIR:/var/lib/mysql:delegated" \
"$REPO:$TAG" "$@"
docker-compose -f docker-compose.yml -f docker-compose.extend.yml up -d
docker-compose exec app /bin/sh /docker-init.sh
docker-compose down

110
docker/scripts/app-init.sh Executable file
View file

@ -0,0 +1,110 @@
#!/bin/bash
WORKSPACEDIR="/root/src"
service rsyslog start
# Copy config files if needed
if [ ! -f "$WORKSPACEDIR/ietf/settings_local.py" ]; then
echo "Setting up a default settings_local.py ..."
cp $WORKSPACEDIR/docker/configs/settings_local.py $WORKSPACEDIR/ietf/settings_local.py
else
echo "Using existing ietf/settings_local.py file"
if ! cmp -s $WORKSPACEDIR/docker/configs/settings_local.py $WORKSPACEDIR/ietf/settings_local.py; then
echo "NOTE: Differences detected compared to docker/configs/settings_local.py!"
echo "We'll assume you made these deliberately."
fi
fi
if [ ! -f "$WORKSPACEDIR/ietf/settings_local_debug.py" ]; then
echo "Setting up a default settings_local_debug.py ..."
cp $WORKSPACEDIR/docker/configs/settings_local_debug.py $WORKSPACEDIR/ietf/settings_local_debug.py
else
echo "Using existing ietf/settings_local_debug.py file"
if ! cmp -s $WORKSPACEDIR/docker/configs/settings_local_debug.py $WORKSPACEDIR/ietf/settings_local_debug.py; then
echo "NOTE: Differences detected compared to docker/configs/settings_local_debug.py!"
echo "We'll assume you made these deliberately."
fi
fi
if [ ! -f "$WORKSPACEDIR/ietf/settings_local_sqlitetest.py" ]; then
echo "Setting up a default settings_local_sqlitetest.py ..."
cp $WORKSPACEDIR/docker/configs/settings_local_sqlitetest.py $WORKSPACEDIR/ietf/settings_local_sqlitetest.py
else
echo "Using existing ietf/settings_local_sqlitetest.py file"
if ! cmp -s $WORKSPACEDIR/docker/configs/settings_local_sqlitetest.py $WORKSPACEDIR/ietf/settings_local_sqlitetest.py; then
echo "NOTE: Differences detected compared to docker/configs/settings_local_sqlitetest.py!"
echo "We'll assume you made these deliberately."
fi
fi
# Create assets directories
for sub in \
test/id \
test/staging \
test/archive \
test/rfc \
test/media \
test/wiki/ietf \
data/nomcom_keys/public_keys \
data/developers/ietf-ftp \
data/developers/ietf-ftp/bofreq \
data/developers/ietf-ftp/charter \
data/developers/ietf-ftp/conflict-reviews \
data/developers/ietf-ftp/internet-drafts \
data/developers/ietf-ftp/rfc \
data/developers/ietf-ftp/status-changes \
data/developers/ietf-ftp/yang/catalogmod \
data/developers/ietf-ftp/yang/draftmod \
data/developers/ietf-ftp/yang/ianamod \
data/developers/ietf-ftp/yang/invalmod \
data/developers/ietf-ftp/yang/rfcmod \
data/developers/www6s \
data/developers/www6s/staging \
data/developers/www6s/wg-descriptions \
data/developers/www6s/proceedings \
data/developers/www6/ \
data/developers/www6/iesg \
data/developers/www6/iesg/evaluation \
; do
dir="/root/src/$sub"
if [ ! -d "$dir" ]; then
echo "Creating dir $dir"
mkdir -p "$dir";
fi
done
# Wait for DB container
if [ -n "$EDITOR_VSCODE" ]; then
echo "Waiting for DB container to come online ..."
wget -qO- https://raw.githubusercontent.com/eficode/wait-for/v2.1.3/wait-for | sh -s -- localhost:3306 -- echo "DB ready"
fi
# Initial checks
echo "Running initial checks..."
/usr/local/bin/python $WORKSPACEDIR/ietf/manage.py check --settings=settings_local
# /usr/local/bin/python $WORKSPACEDIR/ietf/manage.py migrate --settings=settings_local
echo "Done!"
if [ -z "$EDITOR_VSCODE" ]; then
python -m smtpd -n -c DebuggingServer localhost:2025 &
if [ -z "$*" ]; then
echo
echo "You can execute arbitrary commands now, e.g.,"
echo
echo " ietf/manage.py check && ietf/manage.py runserver 0.0.0.0:8000"
echo
echo "to start a development instance of the Datatracker."
echo
bash
else
echo "Executing \"$*\" and stopping container."
echo
bash -c "$*"
fi
service rsyslog stop
fi

View file

@ -0,0 +1,15 @@
#!/bin/bash
WORKSPACEDIR="/root/src"
ICSFILES=$(/usr/bin/find $WORKSPACEDIR/vzic/zoneinfo/ -name '*.ics' -print)
for ICSFILE in $ICSFILES
do
LINK=$(head -n1 $ICSFILE | sed -e '/link .*/!d' -e 's/link \(.*\)/\1/')
if [ "$LINK" ]; then
WDIR=$(dirname $ICSFILE)
echo "Replacing $(basename $ICSFILE) with $LINK"
cp -f $WDIR/$LINK $ICSFILE
fi
done
echo "Done!"

0
docker/updatedb → docker/scripts/updatedb Executable file → Normal file
View file